diff --git a/gradle/validation/spotless.gradle b/gradle/validation/spotless.gradle index fd945dbb81d..7b7a82f4e35 100644 --- a/gradle/validation/spotless.gradle +++ b/gradle/validation/spotless.gradle @@ -45,7 +45,7 @@ configure(project(":solr").subprojects) { prj -> // Exclude certain files (generated ones, mostly). switch (project.path) { case ":solr:core": - targetExclude "src/**/*.java" + targetExclude "src/java/**/*.java" break case ":solr:solrj": targetExclude "src/test/org/apache/solr/client/solrj/io/stream/*.java" diff --git a/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java b/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java index 49c6351d725..f701968eaee 100644 --- a/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java +++ b/solr/core/src/test/org/apache/solr/AnalysisAfterCoreReloadTest.java @@ -16,6 +16,13 @@ */ package org.apache.solr; +import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; import org.apache.commons.io.FileUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; @@ -28,22 +35,14 @@ import org.junit.AfterClass; import org.junit.BeforeClass; -import java.io.File; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; - -import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; - public class AnalysisAfterCoreReloadTest extends SolrTestCaseJ4 { - + private static String tmpSolrHome; int port = 0; static final String context = "/solr"; static final String collection = "collection1"; - + @BeforeClass public static void beforeClass() throws Exception { tmpSolrHome = createTempDir().toFile().getAbsolutePath(); @@ -52,72 +51,70 @@ public static void beforeClass() throws Exception { } @AfterClass - public static void AfterClass() throws Exception { - - } - + public static void AfterClass() throws Exception {} + public void testStopwordsAfterCoreReload() throws Exception { SolrInputDocument doc = new SolrInputDocument(); - doc.setField( "id", "42" ); - doc.setField( "teststop", "terma stopworda stopwordb stopwordc" ); - + doc.setField("id", "42"); + doc.setField("teststop", "terma stopworda stopwordb stopwordc"); + // default stopwords - stopworda and stopwordb - + UpdateRequest up = new UpdateRequest(); up.setAction(ACTION.COMMIT, true, true); - up.add( doc ); - up.process( getSolrCore() ); + up.add(doc); + up.process(getSolrCore()); SolrQuery q = new SolrQuery(); - QueryRequest r = new QueryRequest( q ); - q.setQuery( "teststop:terma" ); - assertEquals( 1, r.process( getSolrCore() ).getResults().size() ); + QueryRequest r = new QueryRequest(q); + q.setQuery("teststop:terma"); + assertEquals(1, r.process(getSolrCore()).getResults().size()); q = new SolrQuery(); - r = new QueryRequest( q ); - q.setQuery( "teststop:stopworda" ); - assertEquals( 0, r.process( getSolrCore() ).getResults().size() ); + r = new QueryRequest(q); + q.setQuery("teststop:stopworda"); + assertEquals(0, r.process(getSolrCore()).getResults().size()); q = new SolrQuery(); - r = new QueryRequest( q ); - q.setQuery( "teststop:stopwordb" ); - assertEquals( 0, r.process( getSolrCore() ).getResults().size() ); + r = new QueryRequest(q); + q.setQuery("teststop:stopwordb"); + assertEquals(0, r.process(getSolrCore()).getResults().size()); q = new SolrQuery(); - r = new QueryRequest( q ); - q.setQuery( "teststop:stopwordc" ); - assertEquals( 1, r.process( getSolrCore() ).getResults().size() ); + r = new QueryRequest(q); + q.setQuery("teststop:stopwordc"); + assertEquals(1, r.process(getSolrCore()).getResults().size()); // overwrite stopwords file with stopword list ["stopwordc"] and reload the core overwriteStopwords("stopwordc\n"); h.getCoreContainer().reload(collection); - up.process( getSolrCore() ); + up.process(getSolrCore()); q = new SolrQuery(); - r = new QueryRequest( q ); - q.setQuery( "teststop:terma" ); - assertEquals( 1, r.process( getSolrCore() ).getResults().size() ); + r = new QueryRequest(q); + q.setQuery("teststop:terma"); + assertEquals(1, r.process(getSolrCore()).getResults().size()); q = new SolrQuery(); - r = new QueryRequest( q ); - q.setQuery( "teststop:stopworda" ); + r = new QueryRequest(q); + q.setQuery("teststop:stopworda"); // stopworda is no longer a stopword - assertEquals( 1, r.process( getSolrCore() ).getResults().size() ); + assertEquals(1, r.process(getSolrCore()).getResults().size()); q = new SolrQuery(); - r = new QueryRequest( q ); - q.setQuery( "teststop:stopwordb" ); + r = new QueryRequest(q); + q.setQuery("teststop:stopwordb"); // stopwordb is no longer a stopword - assertEquals( 1, r.process( getSolrCore() ).getResults().size() ); + assertEquals(1, r.process(getSolrCore()).getResults().size()); q = new SolrQuery(); - r = new QueryRequest( q ); - q.setQuery( "teststop:stopwordc" ); + r = new QueryRequest(q); + q.setQuery("teststop:stopwordc"); // stopwordc should be a stopword - assertEquals( 0, r.process( getSolrCore() ).getResults().size() ); + assertEquals(0, r.process(getSolrCore()).getResults().size()); } - + private void overwriteStopwords(String stopwords) throws IOException { try (SolrCore core = h.getCoreContainer().getCore(collection)) { Path configPath = core.getResourceLoader().getConfigPath(); @@ -125,7 +122,7 @@ private void overwriteStopwords(String stopwords) throws IOException { Files.write(configPath.resolve("stopwords.txt"), stopwords.getBytes(StandardCharsets.UTF_8)); } } - + @Override public void tearDown() throws Exception { Path configPath; @@ -142,5 +139,4 @@ public void tearDown() throws Exception { protected SolrClient getSolrCore() { return new EmbeddedSolrServer(h.getCoreContainer(), collection); } - } diff --git a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java index 55946f354a7..5b80656ae6d 100644 --- a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java +++ b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java @@ -16,8 +16,8 @@ */ package org.apache.solr; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Metric; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; @@ -27,13 +27,12 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Metric; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.misc.document.LazyDocument; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.misc.document.LazyDocument; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.MapSolrParams; @@ -59,17 +58,18 @@ import org.junit.Test; /** - * Tests some basic functionality of Solr while demonstrating good - * Best Practices for using SolrTestCaseJ4 + * Tests some basic functionality of Solr while demonstrating good Best Practices for using + * SolrTestCaseJ4 */ public class BasicFunctionalityTest extends SolrTestCaseJ4 { - - public String getCoreName() { return "basic"; } + public String getCoreName() { + return "basic"; + } @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } // tests the performance of dynamic field creation and // field property testing. @@ -101,23 +101,22 @@ public void testFieldPerf() { System.out.println("ret=" + ret + " time="+ (end-start)); } ***/ - + @Test public void testIgnoredFields() throws Exception { - lrf.args.put(CommonParams.VERSION,"2.2"); - assertU("adding doc with ignored field", - adoc("id", "42", "foo_ignored", "blah blah")); - assertU("commit", - commit()); - - // :TODO: the behavior of querying on an unindexed field should be better specified in the future. - assertQ("query with ignored field", - req("bar_ignored:yo id:42") - ,"//*[@numFound='1']" - ,"//str[@name='id'][.='42']" - ); + lrf.args.put(CommonParams.VERSION, "2.2"); + assertU("adding doc with ignored field", adoc("id", "42", "foo_ignored", "blah blah")); + assertU("commit", commit()); + + // :TODO: the behavior of querying on an unindexed field should be better specified in the + // future. + assertQ( + "query with ignored field", + req("bar_ignored:yo id:42"), + "//*[@numFound='1']", + "//str[@name='id'][.='42']"); } - + @Test public void testSomeStuff() throws Exception { clearIndex(); @@ -134,215 +133,161 @@ public void testSomeStuff() throws Exception { assertTrue(metrics.containsKey("CORE.coreName")); assertTrue(metrics.containsKey("CORE.refCount")); @SuppressWarnings({"unchecked"}) - Gauge g = (Gauge)metrics.get("CORE.refCount"); + Gauge g = (Gauge) metrics.get("CORE.refCount"); assertTrue(g.getValue().intValue() > 0); - lrf.args.put(CommonParams.VERSION,"2.2"); - assertQ("test query on empty index", - req("qlkciyopsbgzyvkylsjhchghjrdf") - ,"//result[@numFound='0']" - ); + lrf.args.put(CommonParams.VERSION, "2.2"); + assertQ( + "test query on empty index", + req("qlkciyopsbgzyvkylsjhchghjrdf"), + "//result[@numFound='0']"); // test escaping of ";" - assertU("deleting 42 for no reason at all", - delI("42")); - assertU("adding doc#42", - adoc("id", "42", "val_s", "aa;bb")); - assertU("does commit work?", - commit()); - - assertQ("backslash escaping semicolon", - req("id:42 AND val_s:aa\\;bb") - ,"//*[@numFound='1']" - ,"//str[@name='id'][.='42']" - ); - - assertQ("quote escaping semicolon", - req("id:42 AND val_s:\"aa;bb\"") - ,"//*[@numFound='1']" - ,"//str[@name='id'][.='42']" - ); - - assertQ("no escaping semicolon", - req("id:42 AND val_s:aa") - ,"//*[@numFound='0']" - ); + assertU("deleting 42 for no reason at all", delI("42")); + assertU("adding doc#42", adoc("id", "42", "val_s", "aa;bb")); + assertU("does commit work?", commit()); + + assertQ( + "backslash escaping semicolon", + req("id:42 AND val_s:aa\\;bb"), + "//*[@numFound='1']", + "//str[@name='id'][.='42']"); + + assertQ( + "quote escaping semicolon", + req("id:42 AND val_s:\"aa;bb\""), + "//*[@numFound='1']", + "//str[@name='id'][.='42']"); + + assertQ("no escaping semicolon", req("id:42 AND val_s:aa"), "//*[@numFound='0']"); assertU(delI("42")); assertU(commit()); - assertQ(req("id:42") - ,"//*[@numFound='0']" - ); + assertQ(req("id:42"), "//*[@numFound='0']"); // test overwrite default of true assertU(adoc("id", "42", "val_s", "AAA")); assertU(adoc("id", "42", "val_s", "BBB")); assertU(commit()); - assertQ(req("id:42") - ,"//*[@numFound='1']" - ,"//str[.='BBB']" - ); + assertQ(req("id:42"), "//*[@numFound='1']", "//str[.='BBB']"); assertU(adoc("id", "42", "val_s", "CCC")); assertU(adoc("id", "42", "val_s", "DDD")); assertU(commit()); - assertQ(req("id:42") - ,"//*[@numFound='1']" - ,"//str[.='DDD']" - ); + assertQ(req("id:42"), "//*[@numFound='1']", "//str[.='DDD']"); // test deletes - String [] adds = new String[] { - add( doc("id","101"), "overwrite", "true" ), - add( doc("id","101"), "overwrite", "true" ), - add( doc("id","105"), "overwrite", "false" ), - add( doc("id","102"), "overwrite", "true" ), - add( doc("id","103"), "overwrite", "false" ), - add( doc("id","101"), "overwrite", "true" ), - }; + String[] adds = + new String[] { + add(doc("id", "101"), "overwrite", "true"), + add(doc("id", "101"), "overwrite", "true"), + add(doc("id", "105"), "overwrite", "false"), + add(doc("id", "102"), "overwrite", "true"), + add(doc("id", "103"), "overwrite", "false"), + add(doc("id", "101"), "overwrite", "true"), + }; for (String a : adds) { assertU(a, a); } assertU(commit()); // test maxint - assertQ(req("q","id:[100 TO 110]", "rows","2147483647") - ,"//*[@numFound='4']" - ); + assertQ(req("q", "id:[100 TO 110]", "rows", "2147483647"), "//*[@numFound='4']"); // test big limit - assertQ(req("q","id:[100 TO 111]", "rows","1147483647") - ,"//*[@numFound='4']" - ); + assertQ(req("q", "id:[100 TO 111]", "rows", "1147483647"), "//*[@numFound='4']"); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='4']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); assertU(delI("102")); assertU(commit()); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='3']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='3']"); assertU(delI("105")); assertU(commit()); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='2']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='2']"); assertU(delQ("id:[100 TO 110]")); assertU(commit()); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='0']" - ); - + assertQ(req("id:[100 TO 110]"), "//*[@numFound='0']"); + assertU(BaseTestHarness.simpleTag("rollback")); assertU(commit()); } - - /** - * verify that delete by query works with the QParser framework and - * pure negative queries - */ + /** verify that delete by query works with the QParser framework and pure negative queries */ public void testNonTrivialDeleteByQuery() throws Exception { clearIndex(); - + // setup - assertU( add(doc("id","101", "text", "red apple" )) ); - assertU( add(doc("id","102", "text", "purple grape" )) ); - assertU( add(doc("id","103", "text", "green grape" )) ); - assertU( add(doc("id","104", "text", "green pear" )) ); - assertU( add(doc("id","105", "text", "yellow banana" )) ); - assertU( add(doc("id","106", "text", "red cherry" )) ); + assertU(add(doc("id", "101", "text", "red apple"))); + assertU(add(doc("id", "102", "text", "purple grape"))); + assertU(add(doc("id", "103", "text", "green grape"))); + assertU(add(doc("id", "104", "text", "green pear"))); + assertU(add(doc("id", "105", "text", "yellow banana"))); + assertU(add(doc("id", "106", "text", "red cherry"))); // sanity checks assertU(commit()); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='6']" - ); - assertQ(req("*:*") - ,"//*[@numFound='6']" - ); - assertQ(req("text:red") - ,"//*[@numFound='2']" - ); - assertQ(req("-text:red") - ,"//*[@numFound='4']" - ); - assertQ(req("text:grape") - ,"//*[@numFound='2']" - ); - assertQ(req("-text:grape") - ,"//*[@numFound='4']" - ); - assertQ(req("-text:red -text:grape") - ,"//*[@numFound='2']" - ); - assertQ(req("{!lucene q.op=AND df=text}grape green") - ,"//*[@numFound='1']" - ,"//str[@name='id'][.='103']" - ); - assertQ(req("-_val_:\"{!lucene q.op=AND df=text}grape green\"") - ,"//*[@numFound='5']" - ,"//str[@name='id'][.='101']" - ,"//str[@name='id'][.='102']" - ,"//str[@name='id'][.='104']" - ,"//str[@name='id'][.='105']" - ,"//str[@name='id'][.='106']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='6']"); + assertQ(req("*:*"), "//*[@numFound='6']"); + assertQ(req("text:red"), "//*[@numFound='2']"); + assertQ(req("-text:red"), "//*[@numFound='4']"); + assertQ(req("text:grape"), "//*[@numFound='2']"); + assertQ(req("-text:grape"), "//*[@numFound='4']"); + assertQ(req("-text:red -text:grape"), "//*[@numFound='2']"); + assertQ( + req("{!lucene q.op=AND df=text}grape green"), + "//*[@numFound='1']", + "//str[@name='id'][.='103']"); + assertQ( + req("-_val_:\"{!lucene q.op=AND df=text}grape green\""), + "//*[@numFound='5']", + "//str[@name='id'][.='101']", + "//str[@name='id'][.='102']", + "//str[@name='id'][.='104']", + "//str[@name='id'][.='105']", + "//str[@name='id'][.='106']"); // tests assertU(delQ("-*:*")); // NOOP assertU(commit()); - assertQ(req("*:*") - ,"//*[@numFound='6']" - ); + assertQ(req("*:*"), "//*[@numFound='6']"); assertU(delQ("-text:grape -text:red")); assertU(commit()); - assertQ(req("*:*") - ,"//*[@numFound='4']" - ,"//str[@name='id'][.='101']" - ,"//str[@name='id'][.='102']" - ,"//str[@name='id'][.='103']" - ,"//str[@name='id'][.='106']" - ); + assertQ( + req("*:*"), + "//*[@numFound='4']", + "//str[@name='id'][.='101']", + "//str[@name='id'][.='102']", + "//str[@name='id'][.='103']", + "//str[@name='id'][.='106']"); assertU(delQ("{!term f=id}106")); assertU(commit()); - assertQ(req("*:*") - ,"//*[@numFound='3']" - ,"//str[@name='id'][.='101']" - ,"//str[@name='id'][.='102']" - ,"//str[@name='id'][.='103']" - ); + assertQ( + req("*:*"), + "//*[@numFound='3']", + "//str[@name='id'][.='101']", + "//str[@name='id'][.='102']", + "//str[@name='id'][.='103']"); assertU(delQ("-_val_:\"{!lucene q.op=AND df=text}grape green\"")); assertU(commit()); - assertQ(req("*:*") - ,"//*[@numFound='1']" - ,"//str[@name='id'][.='103']" - ); + assertQ(req("*:*"), "//*[@numFound='1']", "//str[@name='id'][.='103']"); assertU(delQ("-text:doesnotexist")); assertU(commit()); - assertQ(req("*:*") - ,"//*[@numFound='0']" - ); - + assertQ(req("*:*"), "//*[@numFound='0']"); } @Test public void testHTMLStrip() { - assertU(add(doc("id","200", "HTMLwhitetok","ABC"))); - assertU(add(doc("id","201", "HTMLwhitetok","ABC"))); // do it again to make sure reuse is working + assertU(add(doc("id", "200", "HTMLwhitetok", "ABC"))); + // do it again to make sure reuse is working + assertU(add(doc("id", "201", "HTMLwhitetok", "ABC"))); assertU(commit()); - assertQ(req("q","HTMLwhitetok:ABC") - ,"//*[@numFound='2']" - ); - assertQ(req("q","HTMLwhitetok:ABC") - ,"//*[@numFound='2']" - ); + assertQ(req("q", "HTMLwhitetok:ABC"), "//*[@numFound='2']"); + assertQ(req("q", "HTMLwhitetok:ABC"), "//*[@numFound='2']"); } @Test @@ -351,48 +296,52 @@ public void testClientErrorOnMalformedDate() throws Exception { ignoreException(BAD_VALUE); final List FIELDS = new LinkedList<>(); - for (String type : new String[] { - "tdt", "tdt1", "tdtdv", "tdtdv1", - "dt_dv", "dt_dvo", "dt", "dt1", "dt_os" - }) { + for (String type : + new String[] {"tdt", "tdt1", "tdtdv", "tdtdv1", "dt_dv", "dt_dvo", "dt", "dt1", "dt_os"}) { FIELDS.add("malformed_" + type); } // test that malformed numerics cause client error not server error for (String field : FIELDS) { - SolrException e1 = expectThrows(SolrException.class, - "Didn't encounter an error trying to add a bad date: " + field, - () -> h.update(add( doc("id","100", field, BAD_VALUE)))); + SolrException e1 = + expectThrows( + SolrException.class, + "Didn't encounter an error trying to add a bad date: " + field, + () -> h.update(add(doc("id", "100", field, BAD_VALUE)))); String msg1 = e1.getMessage(); - assertTrue("not an (update) client error on field: " + field +" : "+ msg1, + assertTrue( + "not an (update) client error on field: " + field + " : " + msg1, 400 <= e1.code() && e1.code() < 500); - assertTrue("(update) client error does not mention bad value: " + msg1, - msg1.contains(BAD_VALUE)); - assertTrue("client error does not mention document id: " + msg1, - msg1.contains("[doc=100]")); + assertTrue( + "(update) client error does not mention bad value: " + msg1, msg1.contains(BAD_VALUE)); + assertTrue("client error does not mention document id: " + msg1, msg1.contains("[doc=100]")); SchemaField sf = h.getCore().getLatestSchema().getField(field); if (!sf.hasDocValues() && !sf.indexed()) { continue; } - SolrException e2 = expectThrows(SolrException.class, - "Didn't encounter an error trying to add a bad date: " + field, - () -> h.query(req("q",field + ":" + BAD_VALUE)) - ); + SolrException e2 = + expectThrows( + SolrException.class, + "Didn't encounter an error trying to add a bad date: " + field, + () -> h.query(req("q", field + ":" + BAD_VALUE))); String msg2 = e2.toString(); - assertTrue("not a (search) client error on field: " + field +" : "+ msg2, + assertTrue( + "not a (search) client error on field: " + field + " : " + msg2, 400 <= e2.code() && e2.code() < 500); - assertTrue("(search) client error does not mention bad value: " + msg2, - msg2.contains(BAD_VALUE)); - - SolrException e3 = expectThrows(SolrException.class, - "Didn't encounter an error trying to add a bad date: " + field, - () -> h.query(req("q",field + ":[NOW TO " + BAD_VALUE + "]")) - ); + assertTrue( + "(search) client error does not mention bad value: " + msg2, msg2.contains(BAD_VALUE)); + + SolrException e3 = + expectThrows( + SolrException.class, + "Didn't encounter an error trying to add a bad date: " + field, + () -> h.query(req("q", field + ":[NOW TO " + BAD_VALUE + "]"))); String msg3 = e3.toString(); - assertTrue("not a (search) client error on field: " + field +" : "+ msg3, + assertTrue( + "not a (search) client error on field: " + field + " : " + msg3, 400 <= e3.code() && e3.code() < 500); - assertTrue("(search) client error does not mention bad value: " + msg3, - msg3.contains(BAD_VALUE)); + assertTrue( + "(search) client error does not mention bad value: " + msg3, msg3.contains(BAD_VALUE)); } } @@ -403,131 +352,140 @@ public void testClientErrorOnMalformedNumbers() throws Exception { ignoreException(BAD_VALUE); final List FIELDS = new LinkedList<>(); - for (String type : new String[] { - "ti", "tf", "td", "tl", - "i", "f", "d", "l", - "i_dv", "f_dv", "d_dv", "l_dv", - "i_dvo", "f_dvo", "d_dvo", "l_dvo", - "i_os", "f_os", "d_os", "l_os" + for (String type : + new String[] { + "ti", "tf", "td", "tl", + "i", "f", "d", "l", + "i_dv", "f_dv", "d_dv", "l_dv", + "i_dvo", "f_dvo", "d_dvo", "l_dvo", + "i_os", "f_os", "d_os", "l_os" }) { FIELDS.add("malformed_" + type); } // test that malformed numerics cause client error not server error for (String field : FIELDS) { - SolrException e1 = expectThrows(SolrException.class, - "Didn't encounter an error trying to add a non-number: " + field, - () -> h.update(add( doc("id","100", field, BAD_VALUE)))); + SolrException e1 = + expectThrows( + SolrException.class, + "Didn't encounter an error trying to add a non-number: " + field, + () -> h.update(add(doc("id", "100", field, BAD_VALUE)))); String msg1 = e1.toString(); - assertTrue("not an (update) client error on field: " + field +" : "+ msg1, + assertTrue( + "not an (update) client error on field: " + field + " : " + msg1, 400 <= e1.code() && e1.code() < 500); - assertTrue("(update) client error does not mention bad value: " + msg1, - msg1.contains(BAD_VALUE)); - assertTrue("client error does not mention document id", - msg1.contains("[doc=100]")); - SchemaField sf = h.getCore().getLatestSchema().getField(field); + assertTrue( + "(update) client error does not mention bad value: " + msg1, msg1.contains(BAD_VALUE)); + assertTrue("client error does not mention document id", msg1.contains("[doc=100]")); + SchemaField sf = h.getCore().getLatestSchema().getField(field); if (!sf.hasDocValues() && !sf.indexed()) { continue; } - SolrException e2 = expectThrows(SolrException.class, - "Didn't encounter an error trying to add a non-number: " + field, - () -> h.query(req("q",field + ":" + BAD_VALUE)) - ); + SolrException e2 = + expectThrows( + SolrException.class, + "Didn't encounter an error trying to add a non-number: " + field, + () -> h.query(req("q", field + ":" + BAD_VALUE))); String msg2 = e2.toString(); - assertTrue("not a (search) client error on field: " + field +" : "+ msg2, + assertTrue( + "not a (search) client error on field: " + field + " : " + msg2, 400 <= e2.code() && e2.code() < 500); - assertTrue("(search) client error does not mention bad value: " + msg2, - msg2.contains(BAD_VALUE)); - - SolrException e3 = expectThrows(SolrException.class, - "Didn't encounter an error trying to add a non-number: " + field, - () -> h.query(req("q",field + ":[10 TO " + BAD_VALUE + "]")) - ); + assertTrue( + "(search) client error does not mention bad value: " + msg2, msg2.contains(BAD_VALUE)); + + SolrException e3 = + expectThrows( + SolrException.class, + "Didn't encounter an error trying to add a non-number: " + field, + () -> h.query(req("q", field + ":[10 TO " + BAD_VALUE + "]"))); String msg3 = e3.toString(); - assertTrue("not a (search) client error on field: " + field +" : "+ msg3, + assertTrue( + "not a (search) client error on field: " + field + " : " + msg3, 400 <= e3.code() && e3.code() < 500); - assertTrue("(search) client error does not mention bad value: " + msg3, - msg3.contains(BAD_VALUE)); + assertTrue( + "(search) client error does not mention bad value: " + msg3, msg3.contains(BAD_VALUE)); } } - + @Test public void testRequestHandlerBaseException() { final String tmp = "BOO! ignore_exception"; - SolrRequestHandler handler = new RequestHandlerBase() { - @Override - public Name getPermissionName(AuthorizationContext request) { - return Name.ALL; - } - - @Override - public String getDescription() { return tmp; } - @Override - public void handleRequestBody - ( SolrQueryRequest req, SolrQueryResponse rsp ) { - throw new RuntimeException(tmp); - } - }; + SolrRequestHandler handler = + new RequestHandlerBase() { + @Override + public Name getPermissionName(AuthorizationContext request) { + return Name.ALL; + } + + @Override + public String getDescription() { + return tmp; + } + + @Override + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) { + throw new RuntimeException(tmp); + } + }; handler.init(new NamedList<>()); SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryRequest req = req(); - h.getCore().execute(handler, - req, - rsp); + h.getCore().execute(handler, req, rsp); assertNotNull("should have found an exception", rsp.getException()); - req.close(); + req.close(); } @Test public void testMultipleUpdatesPerAdd() { clearIndex(); // big freaking kludge since the response is currently not well formed. - String res = h.update("12"); + String res = + h.update( + "12"); // assertEquals("", res); assertU(""); - assertQ(req("id:[0 TO 99]") - ,"//*[@numFound='2']" - ); - + assertQ(req("id:[0 TO 99]"), "//*[@numFound='2']"); } @Test public void testDocBoost() throws Exception { - String res = h.update("" + "1"+ - "hello" + - "2" + - "hello" + - ""); + String res = + h.update( + "" + + "1" + + "hello" + + "2" + + "hello" + + ""); // assertEquals("", res); assertU(""); - assertQ(req("text:hello") - ,"//*[@numFound='2']" - ); + assertQ(req("text:hello"), "//*[@numFound='2']"); String resp = h.query(lrf.makeRequest("q", "text:hello", CommonParams.DEBUG_QUERY, "true")); - //System.out.println(resp); + // System.out.println(resp); // second doc ranked first - assertTrue( resp.indexOf("\"2\"") < resp.indexOf("\"1\"") ); + assertTrue(resp.indexOf("\"2\"") < resp.indexOf("\"1\"")); } @Test public void testFieldBoost() throws Exception { - String res = h.update("" + "1"+ - "hello" + - "2" + - "hello" + - ""); + String res = + h.update( + "" + + "1" + + "hello" + + "2" + + "hello" + + ""); // assertEquals("", res); assertU(""); - assertQ(req("text:hello"), - "//*[@numFound='2']" - ); + assertQ(req("text:hello"), "//*[@numFound='2']"); String resp = h.query(lrf.makeRequest("q", "text:hello", CommonParams.DEBUG_QUERY, "true")); - //System.out.println(resp); + // System.out.println(resp); // second doc ranked first - assertTrue( resp.indexOf("\"2\"") < resp.indexOf("\"1\"") ); + assertTrue(resp.indexOf("\"2\"") < resp.indexOf("\"1\"")); } @Test @@ -538,11 +496,10 @@ public void testXMLWriter() throws Exception { StringWriter writer = new StringWriter(32000); SolrQueryRequest req = req("foo"); - XMLWriter.writeResponse(writer,req,rsp); + XMLWriter.writeResponse(writer, req, rsp); DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); - builder.parse(new ByteArrayInputStream - (writer.toString().getBytes(StandardCharsets.UTF_8))); + builder.parse(new ByteArrayInputStream(writer.toString().getBytes(StandardCharsets.UTF_8))); req.close(); } @@ -569,22 +526,21 @@ public void testLocalSolrQueryRequestParams() { @Test public void testKeywordTokenizerFactory() { - assertU(adoc("id", "42", - "keywordtok", "How nOw broWn-ish C.o.w. ?")); + assertU( + adoc( + "id", "42", + "keywordtok", "How nOw broWn-ish C.o.w. ?")); assertU(commit()); - assertQ("stored value matches?", - req("id:42") - ,"//str[.='How nOw broWn-ish C.o.w. ?']" - ); - assertQ("query on exact matches?", - req("keywordtok:\"How nOw broWn-ish C.o.w. ?\"") - ,"//str[.='How nOw broWn-ish C.o.w. ?']" - ); + assertQ("stored value matches?", req("id:42"), "//str[.='How nOw broWn-ish C.o.w. ?']"); + assertQ( + "query on exact matches?", + req("keywordtok:\"How nOw broWn-ish C.o.w. ?\""), + "//str[.='How nOw broWn-ish C.o.w. ?']"); } @Test public void testTermVectorFields() { - + IndexSchema ischema = IndexSchemaFactory.buildIndexSchema(getSchemaFile(), solrConfig); SchemaField f; // Solr field type IndexableField luf; // Lucene field @@ -612,59 +568,66 @@ public void testTermVectorFields() { f = ischema.getField("test_posofftv"); luf = f.createField("test"); assertTrue(f.storeTermVector() && f.storeTermPositions() && f.storeTermOffsets()); - assertTrue(luf.fieldType().storeTermVectorOffsets() && luf.fieldType().storeTermVectorPositions()); + assertTrue( + luf.fieldType().storeTermVectorOffsets() && luf.fieldType().storeTermVectorPositions()); f = ischema.getField("test_posoffpaytv"); luf = f.createField("test"); - assertTrue(f.storeTermVector() && f.storeTermPositions() && f.storeTermOffsets() && f.storeTermPayloads()); - assertTrue(luf.fieldType().storeTermVectorOffsets() && luf.fieldType().storeTermVectorPositions() && luf.fieldType().storeTermVectorPayloads()); - + assertTrue( + f.storeTermVector() + && f.storeTermPositions() + && f.storeTermOffsets() + && f.storeTermPayloads()); + assertTrue( + luf.fieldType().storeTermVectorOffsets() + && luf.fieldType().storeTermVectorPositions() + && luf.fieldType().storeTermVectorPayloads()); } @Test public void testSolrParams() throws Exception { NamedList nl = new NamedList<>(); - nl.add("i",555); - nl.add("s","bbb"); - nl.add("bt","true"); - nl.add("bf","false"); + nl.add("i", 555); + nl.add("s", "bbb"); + nl.add("bt", "true"); + nl.add("bf", "false"); - Map m = new HashMap<>(); + Map m = new HashMap<>(); m.put("f.field1.i", "1000"); m.put("s", "BBB"); m.put("ss", "SSS"); - LocalSolrQueryRequest req = new LocalSolrQueryRequest(null,nl); + LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, nl); SolrParams p = req.getParams(); assertEquals(p.get("i"), "555"); assertEquals(p.getInt("i").intValue(), 555); - assertEquals(p.getInt("i",5), 555); - assertEquals(p.getInt("iii",5), 5); - assertEquals(p.getFieldParam("field1","i"), "555"); + assertEquals(p.getInt("i", 5), 555); + assertEquals(p.getInt("iii", 5), 5); + assertEquals(p.getFieldParam("field1", "i"), "555"); req.setParams(SolrParams.wrapDefaults(p, new MapSolrParams(m))); p = req.getParams(); assertEquals(req.getOriginalParams().get("s"), "bbb"); assertEquals(p.get("i"), "555"); assertEquals(p.getInt("i").intValue(), 555); - assertEquals(p.getInt("i",5), 555); - assertEquals(p.getInt("iii",5), 5); + assertEquals(p.getInt("i", 5), 555); + assertEquals(p.getInt("iii", 5), 5); - assertEquals(p.getFieldParam("field1","i"), "1000"); + assertEquals(p.getFieldParam("field1", "i"), "1000"); assertEquals(p.get("s"), "bbb"); assertEquals(p.get("ss"), "SSS"); assertEquals(!!p.getBool("bt"), !p.getBool("bf")); - assertEquals(p.getBool("foo",true), true); - assertEquals(p.getBool("foo",false), false); + assertEquals(p.getBool("foo", true), true); + assertEquals(p.getBool("foo", false), false); assertEquals(!!p.getBool("bt"), !p.getBool("bf")); NamedList more = new NamedList<>(); more.add("s", "aaa"); more.add("s", "ccc"); - more.add("ss","YYY"); - more.add("xx","XXX"); + more.add("ss", "YYY"); + more.add("xx", "XXX"); p = SolrParams.wrapAppended(p, more.toSolrParams()); assertEquals(3, p.getParams("s").length); assertEquals("bbb", p.getParams("s")[0]); @@ -680,115 +643,120 @@ public void testSolrParams() throws Exception { @Test public void testDefaultFieldValues() { clearIndex(); - lrf.args.put(CommonParams.VERSION,"2.2"); - assertU(adoc("id", "4055", - "subject", "Hoss the Hoss man Hostetter")); - assertU(adoc("id", "4056", - "intDefault", "4", - "subject", "Some Other Guy")); - assertU(adoc("id", "4057", - "multiDefault", "a", - "multiDefault", "b", - "subject", "The Dude")); + lrf.args.put(CommonParams.VERSION, "2.2"); + assertU( + adoc( + "id", "4055", + "subject", "Hoss the Hoss man Hostetter")); + assertU( + adoc( + "id", "4056", + "intDefault", "4", + "subject", "Some Other Guy")); + assertU( + adoc( + "id", "4057", + "multiDefault", "a", + "multiDefault", "b", + "subject", "The Dude")); assertU(commit()); - assertQ("everthing should have recent timestamp", - req("timestamp:[NOW-10MINUTES TO NOW]") - ,"*[count(//doc)=3]" - ,"//date[@name='timestamp']" - ); - - assertQ("2 docs should have the default for multiDefault", - req("multiDefault:muLti-Default") - ,"*[count(//doc)=2]" - ,"//arr[@name='multiDefault']" - ); - assertQ("1 doc should have its explicit multiDefault", - req("multiDefault:a") - ,"*[count(//doc)=1]" - ); - - assertQ("2 docs should have the default for intDefault", - req("intDefault:42") - ,"*[count(//doc)=2]" - ); - assertQ("1 doc should have its explicit intDefault", - req("intDefault:[3 TO 5]") - ,"*[count(//doc)=1]" - ); - + assertQ( + "everthing should have recent timestamp", + req("timestamp:[NOW-10MINUTES TO NOW]"), + "*[count(//doc)=3]", + "//date[@name='timestamp']"); + + assertQ( + "2 docs should have the default for multiDefault", + req("multiDefault:muLti-Default"), + "*[count(//doc)=2]", + "//arr[@name='multiDefault']"); + assertQ( + "1 doc should have its explicit multiDefault", req("multiDefault:a"), "*[count(//doc)=1]"); + + assertQ( + "2 docs should have the default for intDefault", req("intDefault:42"), "*[count(//doc)=2]"); + assertQ( + "1 doc should have its explicit intDefault", + req("intDefault:[3 TO 5]"), + "*[count(//doc)=1]"); } @Test public void testTokenizer() { - assertU(adoc("id", "4055", - "patterntok", "Hello,There")); - assertU(adoc("id", "4056", - "patterntok", "Goodbye,Now")); + assertU(adoc("id", "4055", "patterntok", "Hello,There")); + assertU(adoc("id", "4056", "patterntok", "Goodbye,Now")); assertU(commit()); - assertQ("make sure it split ok", - req("patterntok:Hello") - ,"*[count(//doc)=1]" - ); - assertQ("make sure it split ok", - req("patterntok:Goodbye") - ,"*[count(//doc)=1]" - ); + assertQ("make sure it split ok", req("patterntok:Hello"), "*[count(//doc)=1]"); + assertQ("make sure it split ok", req("patterntok:Goodbye"), "*[count(//doc)=1]"); } @Test public void testConfigDefaults() { - assertU(adoc("id", "42", - "name", "Zapp Brannigan")); - assertU(adoc("id", "43", - "title", "Democratic Order of Planets")); - assertU(adoc("id", "44", - "name", "The Zapper")); - assertU(adoc("id", "45", - "title", "25 star General")); - assertU(adoc("id", "46", - "subject", "Defeated the pacifists of the Gandhi nebula")); - assertU(adoc("id", "47", - "text", "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); + assertU( + adoc( + "id", "42", + "name", "Zapp Brannigan")); + assertU( + adoc( + "id", "43", + "title", "Democratic Order of Planets")); + assertU( + adoc( + "id", "44", + "name", "The Zapper")); + assertU( + adoc( + "id", "45", + "title", "25 star General")); + assertU( + adoc( + "id", "46", + "subject", "Defeated the pacifists of the Gandhi nebula")); + assertU( + adoc( + "id", "47", + "text", + "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); assertU(commit()); - assertQ("standard request handler returns all matches", - req("id:[42 TO 47]"), - "*[count(//doc)=6]" - ); - - assertQ("defaults handler returns fewer matches", - req("q", "id:[42 TO 47]", "qt","/defaults"), - "*[count(//doc)=4]" - ); + assertQ( + "standard request handler returns all matches", req("id:[42 TO 47]"), "*[count(//doc)=6]"); - assertQ("defaults handler includes highlighting", - req("q", "name:Zapp OR title:General", "qt","/defaults"), - "//lst[@name='highlighting']" - ); + assertQ( + "defaults handler returns fewer matches", + req("q", "id:[42 TO 47]", "qt", "/defaults"), + "*[count(//doc)=4]"); + assertQ( + "defaults handler includes highlighting", + req("q", "name:Zapp OR title:General", "qt", "/defaults"), + "//lst[@name='highlighting']"); } private String mkstr(int len) { StringBuilder sb = new StringBuilder(len); for (int i = 0; i < len; i++) { - sb.append((char)(65 + i%26)); + sb.append((char) (65 + i % 26)); } return new String(sb); - } + } @Test public void testNotLazyField() throws IOException { - assertU(adoc("id", "7777", - "title", "keyword", - "test_hlt", mkstr(20000))); + assertU( + adoc( + "id", "7777", + "title", "keyword", + "test_hlt", mkstr(20000))); assertU(commit()); SolrCore core = h.getCore(); - + SolrQueryRequest req = req("q", "id:7777", "fl", "id,title,test_hlt"); SolrQueryResponse rsp = new SolrQueryResponse(); core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp); @@ -796,42 +764,44 @@ public void testNotLazyField() throws IOException { DocList dl = ((ResultContext) rsp.getResponse()).getDocList(); Document d = req.getSearcher().doc(dl.iterator().nextDoc()); // ensure field in fl is not lazy - assertFalse( ((Field) d.getField("test_hlt")).getClass().getSimpleName().equals("LazyField")); - assertFalse( ((Field) d.getField("title")).getClass().getSimpleName().equals("LazyField")); + assertFalse(((Field) d.getField("test_hlt")).getClass().getSimpleName().equals("LazyField")); + assertFalse(((Field) d.getField("title")).getClass().getSimpleName().equals("LazyField")); req.close(); } @Test public void testLazyField() throws IOException { - assertU(adoc("id", "7777", - "title", "keyword", - "test_hlt", mkstr(10000), - "test_hlt", mkstr(20000), - "test_hlt", mkstr(30000), - "test_hlt", mkstr(40000))); + assertU( + adoc( + "id", "7777", + "title", "keyword", + "test_hlt", mkstr(10000), + "test_hlt", mkstr(20000), + "test_hlt", mkstr(30000), + "test_hlt", mkstr(40000))); assertU(commit()); SolrCore core = h.getCore(); - + // initial request SolrQueryRequest req = req("q", "id:7777", "fl", "id,title"); SolrQueryResponse rsp = new SolrQueryResponse(); core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp); DocList dl = ((ResultContext) rsp.getResponse()).getDocList(); - DocIterator di = dl.iterator(); + DocIterator di = dl.iterator(); Document d1 = req.getSearcher().doc(di.nextDoc()); IndexableField[] values1 = null; // ensure fl field is non lazy, and non-fl field is lazy - assertFalse( d1.getField("title") instanceof LazyDocument.LazyField); - assertFalse( d1.getField("id") instanceof LazyDocument.LazyField); + assertFalse(d1.getField("title") instanceof LazyDocument.LazyField); + assertFalse(d1.getField("id") instanceof LazyDocument.LazyField); values1 = d1.getFields("test_hlt"); assertEquals(4, values1.length); for (int i = 0; i < values1.length; i++) { - assertTrue( values1[i] instanceof LazyDocument.LazyField ); + assertTrue(values1[i] instanceof LazyDocument.LazyField); LazyDocument.LazyField f = (LazyDocument.LazyField) values1[i]; - assertFalse( f.hasBeenLoaded() ); + assertFalse(f.hasBeenLoaded()); } req.close(); @@ -841,18 +811,17 @@ public void testLazyField() throws IOException { core.execute(core.getRequestHandler(req.getParams().get(CommonParams.QT)), req, rsp); dl = ((ResultContext) rsp.getResponse()).getDocList(); - di = dl.iterator(); + di = dl.iterator(); Document d2 = req.getSearcher().doc(di.nextDoc()); // ensure same doc, same lazy field now assertTrue("Doc was not cached", d1 == d2); IndexableField[] values2 = d2.getFields("test_hlt"); assertEquals(values1.length, values2.length); for (int i = 0; i < values1.length; i++) { - assertSame("LazyField wasn't reused", - values1[i], values2[i]); + assertSame("LazyField wasn't reused", values1[i], values2[i]); LazyDocument.LazyField f = (LazyDocument.LazyField) values1[i]; // still not a real boy, no response writer in play - assertFalse(f.hasBeenLoaded()); + assertFalse(f.hasBeenLoaded()); } assertNotNull(values2[0].stringValue()); // actuallize one value @@ -863,10 +832,11 @@ public void testLazyField() throws IOException { } req.close(); - } - + } - /** @see org.apache.solr.util.DateMathParserTest */ + /** + * @see org.apache.solr.util.DateMathParserTest + */ @Test public void testDateMath() { clearIndex(); @@ -879,14 +849,14 @@ public void testDateMath() { // assuming the test doesn't take too long to run... final String july4 = "1976-07-04T12:08:56.235Z"; - assertU(adoc("id", "1", "bday", july4)); - assertU(adoc("id", "2", "bday", "NOW")); - assertU(adoc("id", "3", "bday", "NOW/HOUR")); - assertU(adoc("id", "4", "bday", "NOW-30MINUTES")); - assertU(adoc("id", "5", "bday", "NOW+30MINUTES")); - assertU(adoc("id", "6", "bday", "NOW+2YEARS")); + assertU(adoc("id", "1", "bday", july4)); + assertU(adoc("id", "2", "bday", "NOW")); + assertU(adoc("id", "3", "bday", "NOW/HOUR")); + assertU(adoc("id", "4", "bday", "NOW-30MINUTES")); + assertU(adoc("id", "5", "bday", "NOW+30MINUTES")); + assertU(adoc("id", "6", "bday", "NOW+2YEARS")); assertU(commit()); - + // a ridiculoulsy long date math expression that's still equivalent to july4 final StringBuilder july4Long = new StringBuilder(july4); final int iters = atLeast(10); @@ -896,153 +866,166 @@ public void testDateMath() { } // term queries using date math (all of these should match doc#1) - for (String q : - new String[] { - "bday:1976-07-04T12\\:08\\:56.45Z/SECOND+235MILLIS", - "bday:1976-07-04T12\\:08\\:56.123Z/MINUTE+56SECONDS+235MILLIS", - "bday:\"1976-07-04T12:08:56.45Z/SECOND+235MILLIS\"", - "bday:\"1976-07-04T12:08:56.123Z/MINUTE+56SECONDS+235MILLIS\"", - "{!term f=bday}1976-07-04T12:08:56.45Z/SECOND+235MILLIS", - "{!term f=bday}1976-07-04T12:08:56.123Z/MINUTE+56SECONDS+235MILLIS", - "{!term f=bday}"+july4, - "{!term f=bday}"+july4Long, - "bday:\"" + july4Long + "\"" - }) { - assertQ("check math on field query: " + q, - req("q", q), - "*[count(//doc)=1]", - "//str[@name='id'][.='1']"); + for (String q : + new String[] { + "bday:1976-07-04T12\\:08\\:56.45Z/SECOND+235MILLIS", + "bday:1976-07-04T12\\:08\\:56.123Z/MINUTE+56SECONDS+235MILLIS", + "bday:\"1976-07-04T12:08:56.45Z/SECOND+235MILLIS\"", + "bday:\"1976-07-04T12:08:56.123Z/MINUTE+56SECONDS+235MILLIS\"", + "{!term f=bday}1976-07-04T12:08:56.45Z/SECOND+235MILLIS", + "{!term f=bday}1976-07-04T12:08:56.123Z/MINUTE+56SECONDS+235MILLIS", + "{!term f=bday}" + july4, + "{!term f=bday}" + july4Long, + "bday:\"" + july4Long + "\"" + }) { + assertQ( + "check math on field query: " + q, + req("q", q), + "*[count(//doc)=1]", + "//str[@name='id'][.='1']"); } // range queries using date math - assertQ("check math on absolute date#1", - req("q", "bday:[* TO "+july4+"/SECOND]"), - "*[count(//doc)=0]"); - assertQ("check math on absolute date#2", - req("q", "bday:[* TO "+july4+"/SECOND+1SECOND]"), - "*[count(//doc)=1]"); - assertQ("check math on absolute date#3", - req("q", "bday:["+july4+"/SECOND TO "+july4+"/SECOND+1SECOND]"), - "*[count(//doc)=1]"); - assertQ("check math on absolute date#4", - req("q", "bday:["+july4+"/MINUTE+1MINUTE TO *]"), - "*[count(//doc)=5]"); - - assertQ("check count for before now", - req("q", "bday:[* TO NOW]"), "*[count(//doc)=4]"); - - assertQ("check count for after now", - req("q", "bday:[NOW TO *]"), "*[count(//doc)=2]"); - - assertQ("check count for old stuff", - req("q", "bday:[* TO NOW-2YEARS]"), "*[count(//doc)=1]"); - - assertQ("check count for future stuff", - req("q", "bday:[NOW+1MONTH TO *]"), "*[count(//doc)=1]"); - - assertQ("check count for near stuff", - req("q", "bday:[NOW-1MONTH TO NOW+2HOURS]"), "*[count(//doc)=4]"); - - assertQ("check counts using fixed NOW", - req("q", "bday:[NOW/DAY TO NOW/DAY+1DAY]", - "NOW", "205369736000" // 1976-07-04T23:08:56.235Z - ), - "*[count(//doc)=1]"); - - assertQ("check counts using fixed NOW and TZ rounding", - req("q", "bday:[NOW/DAY TO NOW/DAY+1DAY]", - "TZ", "GMT+01", - "NOW", "205369736000" // 1976-07-04T23:08:56.235Z - ), - "*[count(//doc)=0]"); - + assertQ( + "check math on absolute date#1", + req("q", "bday:[* TO " + july4 + "/SECOND]"), + "*[count(//doc)=0]"); + assertQ( + "check math on absolute date#2", + req("q", "bday:[* TO " + july4 + "/SECOND+1SECOND]"), + "*[count(//doc)=1]"); + assertQ( + "check math on absolute date#3", + req("q", "bday:[" + july4 + "/SECOND TO " + july4 + "/SECOND+1SECOND]"), + "*[count(//doc)=1]"); + assertQ( + "check math on absolute date#4", + req("q", "bday:[" + july4 + "/MINUTE+1MINUTE TO *]"), + "*[count(//doc)=5]"); + + assertQ("check count for before now", req("q", "bday:[* TO NOW]"), "*[count(//doc)=4]"); + + assertQ("check count for after now", req("q", "bday:[NOW TO *]"), "*[count(//doc)=2]"); + + assertQ("check count for old stuff", req("q", "bday:[* TO NOW-2YEARS]"), "*[count(//doc)=1]"); + + assertQ( + "check count for future stuff", req("q", "bday:[NOW+1MONTH TO *]"), "*[count(//doc)=1]"); + + assertQ( + "check count for near stuff", + req("q", "bday:[NOW-1MONTH TO NOW+2HOURS]"), + "*[count(//doc)=4]"); + + assertQ( + "check counts using fixed NOW", + req( + "q", "bday:[NOW/DAY TO NOW/DAY+1DAY]", + "NOW", "205369736000" // 1976-07-04T23:08:56.235Z + ), + "*[count(//doc)=1]"); + + assertQ( + "check counts using fixed NOW and TZ rounding", + req( + "q", "bday:[NOW/DAY TO NOW/DAY+1DAY]", + "TZ", "GMT+01", + "NOW", "205369736000" // 1976-07-04T23:08:56.235Z + ), + "*[count(//doc)=0]"); } // commented after SOLR-8904; both are false -// public void testDateRoundtrip() { -// assertU(adoc("id", "99", "bday", "99-01-01T12:34:56.789Z")); -// assertU(commit()); -// assertQ("year should be canonicallized to 4 digits", -// req("q", "id:99"), -// "//date[@name='bday'][.='0099-01-01T12:34:56.789Z']"); -// assertU(adoc("id", "99", "bday", "1999-01-01T12:34:56.900Z")); -// assertU(commit()); -// assertQ("millis should be canonicallized to no trailing zeros", -// req("q", "id:99"), -// "//date[@name='bday'][.='1999-01-01T12:34:56.9Z']"); -// } - + // public void testDateRoundtrip() { + // assertU(adoc("id", "99", "bday", "99-01-01T12:34:56.789Z")); + // assertU(commit()); + // assertQ("year should be canonicallized to 4 digits", + // req("q", "id:99"), + // "//date[@name='bday'][.='0099-01-01T12:34:56.789Z']"); + // assertU(adoc("id", "99", "bday", "1999-01-01T12:34:56.900Z")); + // assertU(commit()); + // assertQ("millis should be canonicallized to no trailing zeros", + // req("q", "id:99"), + // "//date[@name='bday'][.='1999-01-01T12:34:56.9Z']"); + // } + @Test public void testPatternReplaceFilter() { - assertU(adoc("id", "1", - "patternreplacefilt", "My fine-feathered friend!")); - assertU(adoc("id", "2", - "patternreplacefilt", " What's Up Doc?")); + assertU( + adoc( + "id", "1", + "patternreplacefilt", "My fine-feathered friend!")); + assertU( + adoc( + "id", "2", + "patternreplacefilt", " What's Up Doc?")); assertU(commit()); - - assertQ("don't find Up", - req("q", "patternreplacefilt:Up"), - "*[count(//doc)=0]"); - - assertQ("find doc", - req("q", "patternreplacefilt:__What_s_Up_Doc_"), - "*[count(//doc)=1]"); - - assertQ("find birds", - req("q", "patternreplacefilt:My__fine_feathered_friend_"), - "*[count(//doc)=1]"); + + assertQ("don't find Up", req("q", "patternreplacefilt:Up"), "*[count(//doc)=0]"); + + assertQ("find doc", req("q", "patternreplacefilt:__What_s_Up_Doc_"), "*[count(//doc)=1]"); + + assertQ( + "find birds", + req("q", "patternreplacefilt:My__fine_feathered_friend_"), + "*[count(//doc)=1]"); } @Test public void testAbuseOfSort() { - assertU(adoc("id", "9999991", - "sortabuse_not_uninvertible", "xxx", - "sortabuse_t", "zzz xxx ccc vvv bbb nnn aaa sss ddd fff ggg")); - assertU(adoc("id", "9999992", - "sortabuse_not_uninvertible", "yyy", - "sortabuse_t", "zzz xxx ccc vvv bbb nnn qqq www eee rrr ttt")); + assertU( + adoc( + "id", "9999991", + "sortabuse_not_uninvertible", "xxx", + "sortabuse_t", "zzz xxx ccc vvv bbb nnn aaa sss ddd fff ggg")); + assertU( + adoc( + "id", "9999992", + "sortabuse_not_uninvertible", "yyy", + "sortabuse_t", "zzz xxx ccc vvv bbb nnn qqq www eee rrr ttt")); assertU(commit()); for (String f : Arrays.asList("sortabuse_not_uninvertible", "sortabuse_t")) { - RuntimeException outerEx = expectThrows(RuntimeException.class, () -> { - ignoreException("sortabuse"); - assertQ("sort on something that shouldn't work", - req("q", "*:*", - "sort", f+ " asc"), - "*[count(//doc)=2]"); - }); + RuntimeException outerEx = + expectThrows( + RuntimeException.class, + () -> { + ignoreException("sortabuse"); + assertQ( + "sort on something that shouldn't work", + req("q", "*:*", "sort", f + " asc"), + "*[count(//doc)=2]"); + }); Throwable root = getRootCause(outerEx); - assertEquals("sort exception root cause", - SolrException.class, root.getClass()); + assertEquals("sort exception root cause", SolrException.class, root.getClass()); SolrException e = (SolrException) root; - assertEquals("incorrect error type", - SolrException.ErrorCode.BAD_REQUEST, - SolrException.ErrorCode.getErrorCode(e.code())); - assertTrue("exception doesn't contain field name", - e.getMessage().contains(f)); + assertEquals( + "incorrect error type", + SolrException.ErrorCode.BAD_REQUEST, + SolrException.ErrorCode.getErrorCode(e.code())); + assertTrue("exception doesn't contain field name", e.getMessage().contains(f)); } } - -// /** this doesn't work, but if it did, this is how we'd test it. */ -// public void testOverwriteFalse() { - -// assertU(adoc("id", "overwrite", "val_s", "AAA")); -// assertU(commit()); - -// assertU(add(doc("id", "overwrite", "val_s", "BBB") -// ,"allowDups", "false" -// ,"overwriteCommitted","false" -// ,"overwritePending","false" -// )); -// assertU(commit()); -// assertQ(req("id:overwrite") -// ,"//*[@numFound='1']" -// ,"//str[.='AAA']" -// ); -// } + // /** this doesn't work, but if it did, this is how we'd test it. */ + // public void testOverwriteFalse() { + + // assertU(adoc("id", "overwrite", "val_s", "AAA")); + // assertU(commit()); + + // assertU(add(doc("id", "overwrite", "val_s", "BBB") + // ,"allowDups", "false" + // ,"overwriteCommitted","false" + // ,"overwritePending","false" + // )); + // assertU(commit()); + // assertQ(req("id:overwrite") + // ,"//*[@numFound='1']" + // ,"//str[.='AAA']" + // ); + // } } diff --git a/solr/core/src/test/org/apache/solr/ConvertedLegacyTest.java b/solr/core/src/test/org/apache/solr/ConvertedLegacyTest.java index 1ba28ee0d6e..c439a7cdc5f 100644 --- a/solr/core/src/test/org/apache/solr/ConvertedLegacyTest.java +++ b/solr/core/src/test/org/apache/solr/ConvertedLegacyTest.java @@ -16,34 +16,31 @@ */ package org.apache.solr; +import java.util.*; import org.apache.solr.common.params.CommonParams; import org.apache.solr.request.*; import org.apache.solr.util.ErrorLogMuter; import org.junit.BeforeClass; import org.junit.Test; -import java.util.*; - - /** * This tests was converted from a legacy testing system. * - * it does not represent the best practices that should be used when - * writing Solr JUnit tests + *

it does not represent the best practices that should be used when writing Solr JUnit tests */ public class ConvertedLegacyTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } @Test public void testABunchOfConvertedStuff() { // these may be reused by things that need a special query SolrQueryRequest req = null; - Map args = new HashMap<>(); - lrf.args.put(CommonParams.VERSION,"2.2"); + Map args = new HashMap<>(); + lrf.args.put(CommonParams.VERSION, "2.2"); // compact the index, keep things from getting out of hand @@ -51,26 +48,17 @@ public void testABunchOfConvertedStuff() { // test query - assertQ(req("qlkciyopsbgzyvkylsjhchghjrdf") - ,"//result[@numFound='0']" - ); + assertQ(req("qlkciyopsbgzyvkylsjhchghjrdf"), "//result[@numFound='0']"); // test escaping of ";" assertU("42"); - assertU("42aa;bb"); + assertU( + "42aa;bb"); assertU(""); - assertQ(req("id:42 AND val_s:aa\\;bb") - ,"//*[@numFound='1']" - ); - assertQ(req("id:42 AND val_s:\"aa;bb\"") - ,"//*[@numFound='1']" - ); - assertQ(req("id:42 AND val_s:\"aa\"") - ,"//*[@numFound='0']" - ); - - + assertQ(req("id:42 AND val_s:aa\\;bb"), "//*[@numFound='1']"); + assertQ(req("id:42 AND val_s:\"aa;bb\""), "//*[@numFound='1']"); + assertQ(req("id:42 AND val_s:\"aa\""), "//*[@numFound='0']"); // test allowDups default of false @@ -78,17 +66,11 @@ public void testABunchOfConvertedStuff() { assertU("42AAA"); assertU("42BBB"); assertU(""); - assertQ(req("id:42") - ,"//*[@numFound='1'] " - ,"//str[.='BBB']" - ); + assertQ(req("id:42"), "//*[@numFound='1'] ", "//str[.='BBB']"); assertU("42CCC"); assertU("42DDD"); assertU(""); - assertQ(req("id:42") - ,"//*[@numFound='1'] " - ,"//str[.='DDD']" - ); + assertQ(req("id:42"), "//*[@numFound='1'] ", "//str[.='DDD']"); assertU("42"); // test deletes @@ -101,910 +83,663 @@ public void testABunchOfConvertedStuff() { assertU("103"); assertU("101"); assertU(""); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='4']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); assertU("102"); assertU(""); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='3']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='3']"); assertU("id:105"); assertU(""); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='2']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='2']"); assertU("id:[100 TO 110]"); assertU(""); - assertQ(req("id:[100 TO 110]") - ,"//*[@numFound='0']" - ); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='0']"); // test range assertU("44"); - assertU("44appleapple"); - assertU("44bananabanana"); - assertU("44pearpear"); - assertU(""); - assertQ(req("val_s:[a TO z]") - ,"//*[@numFound='3'] " - ,"*[count(//doc)=3] " - ,"//*[@start='0']" - ); + assertU( + "44appleapple"); + assertU( + "44bananabanana"); + assertU( + "44pearpear"); + assertU(""); + assertQ(req("val_s:[a TO z]"), "//*[@numFound='3'] ", "*[count(//doc)=3] ", "//*[@start='0']"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 2, 5 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=1] " - ,"*//doc[1]/str[.='pear'] " - ,"//*[@start='2']" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 2, 5, args); + assertQ( + req, + "//*[@numFound='3'] ", + "*[count(//doc)=1] ", + "*//doc[1]/str[.='pear'] ", + "//*[@start='2']"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 3, 5 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 3, 5, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 4, 5 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 4, 5, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 25, 5 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 25, 5, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 0, 1 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=1] " - ,"*//doc[1]/str[.='apple']" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 0, 1, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=1] ", "*//doc[1]/str[.='apple']"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 0, 2 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=2] " - ,"*//doc[2]/str[.='banana']" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 0, 2, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=2] ", "*//doc[2]/str[.='banana']"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 1, 1 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=1] " - ,"*//doc[1]/str[.='banana']" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 1, 1, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=1] ", "*//doc[1]/str[.='banana']"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 3, 1 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 3, 1, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 4, 1 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 4, 1, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 1, 0 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 1, 0, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 0, 0 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 0, 0, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - args.put("sort","val_s1 asc"); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 0, 0 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); + args.put("sort", "val_s1 asc"); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 0, 0, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); args = new HashMap<>(); - args.put("sort","val_s1 desc"); - req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", - "/select", 0, 0 , args); - assertQ(req - ,"//*[@numFound='3'] " - ,"*[count(//doc)=0]" - ); - assertQ(req("val_s:[a TO b]") - ,"//*[@numFound='1']" - ); - assertQ(req("val_s:[a TO cat]") - ,"//*[@numFound='2']" - ); - assertQ(req("val_s:[a TO *]") - ,"//*[@numFound='3']" - ); - assertQ(req("val_s:[* TO z]") - ,"//*[@numFound='3']" - ); - assertQ(req("val_s:[* TO *]") - ,"//*[@numFound='3']" - ); - assertQ(req("val_s:[apple TO pear]") - ,"//*[@numFound='3']" - ); - assertQ(req("val_s:[bear TO boar]") - ,"//*[@numFound='0']" - ); - assertQ(req("val_s:[a TO a]") - ,"//*[@numFound='0']" - ); - assertQ(req("val_s:[apple TO apple]") - ,"//*[@numFound='1']" - ); - assertQ(req("val_s:{apple TO pear}") - ,"//*[@numFound='1']" - ); - assertQ(req("val_s:{a TO z}") - ,"//*[@numFound='3']" - ); - assertQ(req("val_s:{* TO *}") - ,"//*[@numFound='3']" - ); + args.put("sort", "val_s1 desc"); + req = new LocalSolrQueryRequest(h.getCore(), "val_s:[a TO z]", "/select", 0, 0, args); + assertQ(req, "//*[@numFound='3'] ", "*[count(//doc)=0]"); + assertQ(req("val_s:[a TO b]"), "//*[@numFound='1']"); + assertQ(req("val_s:[a TO cat]"), "//*[@numFound='2']"); + assertQ(req("val_s:[a TO *]"), "//*[@numFound='3']"); + assertQ(req("val_s:[* TO z]"), "//*[@numFound='3']"); + assertQ(req("val_s:[* TO *]"), "//*[@numFound='3']"); + assertQ(req("val_s:[apple TO pear]"), "//*[@numFound='3']"); + assertQ(req("val_s:[bear TO boar]"), "//*[@numFound='0']"); + assertQ(req("val_s:[a TO a]"), "//*[@numFound='0']"); + assertQ(req("val_s:[apple TO apple]"), "//*[@numFound='1']"); + assertQ(req("val_s:{apple TO pear}"), "//*[@numFound='1']"); + assertQ(req("val_s:{a TO z}"), "//*[@numFound='3']"); + assertQ(req("val_s:{* TO *}"), "//*[@numFound='3']"); // test rangequery within a boolean query - assertQ(req("id:44 AND val_s:[a TO z]") - ,"//*[@numFound='3']" - ); - assertQ(req("id:44 OR val_s:[a TO z]") - ,"//*[@numFound='3']" - ); - assertQ(req("val_s:[a TO b] OR val_s:[b TO z]") - ,"//*[@numFound='3']" - ); - assertQ(req("+val_s:[a TO b] -val_s:[b TO z]") - ,"//*[@numFound='1']" - ); - assertQ(req("-val_s:[a TO b] +val_s:[b TO z]") - ,"//*[@numFound='2']" - ); - assertQ(req("val_s:[a TO c] AND val_s:[apple TO z]") - ,"//*[@numFound='2']" - ); - assertQ(req("val_s:[a TO c] AND val_s:[a TO apple]") - ,"//*[@numFound='1']" - ); - assertQ(req("id:44 AND (val_s:[a TO c] AND val_s:[a TO apple])") - ,"//*[@numFound='1']" - ); - assertQ(req("(val_s:[apple TO apple] OR val_s:[a TO c]) AND (val_s:[b TO c] OR val_s:[b TO b])") - ,"//*[@numFound='1'] " - ,"//str[.='banana']" - ); - assertQ(req("(val_s:[apple TO apple] AND val_s:[a TO c]) OR (val_s:[p TO z] AND val_s:[a TO z])") - ,"//*[@numFound='2'] " - ,"//str[.='apple'] " - ,"//str[.='pear']" - ); + assertQ(req("id:44 AND val_s:[a TO z]"), "//*[@numFound='3']"); + assertQ(req("id:44 OR val_s:[a TO z]"), "//*[@numFound='3']"); + assertQ(req("val_s:[a TO b] OR val_s:[b TO z]"), "//*[@numFound='3']"); + assertQ(req("+val_s:[a TO b] -val_s:[b TO z]"), "//*[@numFound='1']"); + assertQ(req("-val_s:[a TO b] +val_s:[b TO z]"), "//*[@numFound='2']"); + assertQ(req("val_s:[a TO c] AND val_s:[apple TO z]"), "//*[@numFound='2']"); + assertQ(req("val_s:[a TO c] AND val_s:[a TO apple]"), "//*[@numFound='1']"); + assertQ(req("id:44 AND (val_s:[a TO c] AND val_s:[a TO apple])"), "//*[@numFound='1']"); + assertQ( + req("(val_s:[apple TO apple] OR val_s:[a TO c]) AND (val_s:[b TO c] OR val_s:[b TO b])"), + "//*[@numFound='1'] ", + "//str[.='banana']"); + assertQ( + req("(val_s:[apple TO apple] AND val_s:[a TO c]) OR (val_s:[p TO z] AND val_s:[a TO z])"), + "//*[@numFound='2'] ", + "//str[.='apple'] ", + "//str[.='pear']"); // check for docs that appear more than once in a range - assertU("44applebanana"); + assertU( + "44applebanana"); assertU(""); - assertQ(req("val_s:[* TO *] OR val_s:[* TO *]") - ,"//*[@numFound='4']" - ); - assertQ(req("val_s:[* TO *] AND val_s:[* TO *]") - ,"//*[@numFound='4']" - ); - assertQ(req("val_s:[* TO *]") - ,"//*[@numFound='4']" - ); - + assertQ(req("val_s:[* TO *] OR val_s:[* TO *]"), "//*[@numFound='4']"); + assertQ(req("val_s:[* TO *] AND val_s:[* TO *]"), "//*[@numFound='4']"); + assertQ(req("val_s:[* TO *]"), "//*[@numFound='4']"); // 44 - assertU("44red riding hood"); + assertU( + "44red riding hood"); assertU(""); - assertQ(req("id:44 AND red") - ,"//@numFound[.='1'] " - ,"*[count(//doc)=1]" - ); - assertQ(req("id:44 AND ride") - ,"//@numFound[.='1']" - ); - assertQ(req("id:44 AND blue") - ,"//@numFound[.='0']" - ); + assertQ(req("id:44 AND red"), "//@numFound[.='1'] ", "*[count(//doc)=1]"); + assertQ(req("id:44 AND ride"), "//@numFound[.='1']"); + assertQ(req("id:44 AND blue"), "//@numFound[.='0']"); // allow duplicates assertU("44"); - assertU("44red riding hood"); - assertU("44big bad wolf"); + assertU( + "44red riding hood"); + assertU( + "44big bad wolf"); assertU(""); - assertQ(req("id:44") - ,"//@numFound[.='2']" - ); - assertQ(req("id:44 AND red") - ,"//@numFound[.='1'] " - ,"*[count(//doc)=1]" - ); - assertQ(req("id:44 AND wolf") - ,"//@numFound[.='1'] " - ,"*[count(//doc)=1]" - ); - assertQ(req("+id:44 red wolf") - ,"//@numFound[.='2']" - ); + assertQ(req("id:44"), "//@numFound[.='2']"); + assertQ(req("id:44 AND red"), "//@numFound[.='1'] ", "*[count(//doc)=1]"); + assertQ(req("id:44 AND wolf"), "//@numFound[.='1'] ", "*[count(//doc)=1]"); + assertQ(req("+id:44 red wolf"), "//@numFound[.='2']"); // test removal of multiples w/o adding anything else assertU("44"); assertU(""); - assertQ(req("id:44") - ,"//@numFound[.='0']" - ); + assertQ(req("id:44"), "//@numFound[.='0']"); // untokenized string type assertU("44"); - assertU("44and a 10.4 ?"); + assertU( + "44and a 10.4 ?"); assertU(""); - assertQ(req("id:44") - ,"//str[.='and a 10.4 ?']" - ); + assertQ(req("id:44"), "//str[.='and a 10.4 ?']"); assertU("44"); - assertU("44abc123"); + assertU( + "44abc123"); assertU(""); // TODO: how to search for something with spaces.... - assertQ(req("sind:abc123") - ,"//@numFound[.='1'] " - ,"*[count(//@name[.='sind'])=0] " - ,"*[count(//@name[.='id'])=1]" - ); + assertQ( + req("sind:abc123"), + "//@numFound[.='1'] ", + "*[count(//@name[.='sind'])=0] ", + "*[count(//@name[.='id'])=1]"); assertU("44"); assertU("44"); - assertU("44abc123"); + assertU( + "44abc123"); assertU(""); // TODO: how to search for something with spaces.... - assertQ(req("sindsto:abc123") - ,"//str[.='abc123']" - ); + assertQ(req("sindsto:abc123"), "//str[.='abc123']"); // test output of multivalued fields assertU("44"); - assertU("44yonik3yonik4"); + assertU( + "44yonik3yonik4"); assertU(""); - assertQ(req("id:44") - ,"//arr[@name='title'][./str='yonik3' and ./str='yonik4'] " - ,"*[count(//@name[.='title'])=1]" - ); - assertQ(req("title:yonik3") - ,"//@numFound[.>'0']" - ); - assertQ(req("title:yonik4") - ,"//@numFound[.>'0']" - ); - assertQ(req("title:yonik5") - ,"//@numFound[.='0']" - ); + assertQ( + req("id:44"), + "//arr[@name='title'][./str='yonik3' and ./str='yonik4'] ", + "*[count(//@name[.='title'])=1]"); + assertQ(req("title:yonik3"), "//@numFound[.>'0']"); + assertQ(req("title:yonik4"), "//@numFound[.>'0']"); + assertQ(req("title:yonik5"), "//@numFound[.='0']"); assertU("title:yonik4"); assertU(""); - assertQ(req("id:44") - ,"//@numFound[.='0']" - ); - + assertQ(req("id:44"), "//@numFound[.='0']"); // not visible until commit assertU("44"); assertU(""); assertU("44"); - assertQ(req("id:44") - ,"//@numFound[.='0']" - ); + assertQ(req("id:44"), "//@numFound[.='0']"); assertU(""); - assertQ(req("id:44") - ,"//@numFound[.='1']" - ); + assertQ(req("id:44"), "//@numFound[.='1']"); // test configurable stop words assertU("44"); - assertU("44world stopworda view"); + assertU( + "44world stopworda view"); assertU(""); - assertQ(req("+id:44 +teststop:world") - ,"//@numFound[.='1']" - ); - assertQ(req("teststop:stopworda") - ,"//@numFound[.='0']" - ); + assertQ(req("+id:44 +teststop:world"), "//@numFound[.='1']"); + assertQ(req("teststop:stopworda"), "//@numFound[.='0']"); // test ignoreCase stop words assertU("44"); - assertU("44world AnD view"); + assertU( + "44world AnD view"); assertU(""); - assertQ(req("+id:44 +stopfilt:world") - ,"//@numFound[.='1']" - ); - assertQ(req("stopfilt:\"and\"") - ,"//@numFound[.='0']" - ); - assertQ(req("stopfilt:\"AND\"") - ,"//@numFound[.='0']" - ); - assertQ(req("stopfilt:\"AnD\"") - ,"//@numFound[.='0']" - ); + assertQ(req("+id:44 +stopfilt:world"), "//@numFound[.='1']"); + assertQ(req("stopfilt:\"and\""), "//@numFound[.='0']"); + assertQ(req("stopfilt:\"AND\""), "//@numFound[.='0']"); + assertQ(req("stopfilt:\"AnD\""), "//@numFound[.='0']"); // test dynamic field types assertU("44"); - assertU("4451778cats"); + assertU( + "4451778cats"); assertU(""); // test if the dyn fields got added - assertQ(req("id:44") - ,"*[count(//doc/*)>=3] " - ,"//arr[@name='gack_i']/int[.='51778'] " - ,"//arr[@name='t_name']/str[.='cats']" - ); + assertQ( + req("id:44"), + "*[count(//doc/*)>=3] ", + "//arr[@name='gack_i']/int[.='51778'] ", + "//arr[@name='t_name']/str[.='cats']"); // now test if we can query by a dynamic field (requires analyzer support) - assertQ(req("t_name:cat") - ,"//arr[@name='t_name' and .='cats']/str" - ); + assertQ(req("t_name:cat"), "//arr[@name='t_name' and .='cats']/str"); // check that deleteByQuery works for dynamic fields assertU("t_name:cat"); assertU(""); - assertQ(req("t_name:cat") - ,"//@numFound[.='0']" - ); + assertQ(req("t_name:cat"), "//@numFound[.='0']"); // test that longest dynamic field match happens first - assertU("44mystr12321"); + assertU( + "44mystr12321"); assertU(""); - assertQ(req("id:44") - ,"//arr[@name='xaa'][.='mystr']/str " - ,"//arr[@name='xaaa'][.='12321']/int" - ); - + assertQ( + req("id:44"), "//arr[@name='xaa'][.='mystr']/str ", "//arr[@name='xaaa'][.='12321']/int"); // test integer ranges and sorting assertU("44"); - assertU("441234567890"); - assertU("4410"); - assertU("441"); - assertU("442"); - assertU("4415"); - assertU("44-1"); - assertU("44-987654321"); - assertU("442147483647"); - assertU("44-2147483648"); - assertU("440"); - assertU(""); - assertQ(req("id:44") - ,"*[count(//doc)=10]" - ); - assertQ(req("num_i1:2147483647") - ,"//@numFound[.='1'] " - ,"//int[.='2147483647']" - ); - assertQ(req("num_i1:\"-2147483648\"") - ,"//@numFound[.='1'] " - ,"//int[.='-2147483648']" - ); - assertQ(req("q", "id:44", "sort","num_i1 asc") - ,"//doc[1]/int[.='-2147483648'] " - ,"//doc[last()]/int[.='2147483647']" - ); - assertQ(req("q","id:44","sort","num_i1 desc") - ,"//doc[1]/int[.='2147483647'] " - ,"//doc[last()]/int[.='-2147483648']" - ); - assertQ(req("num_i1:[0 TO 9]") - ,"*[count(//doc)=3]" - ); - assertQ(req("num_i1:[-2147483648 TO 2147483647]") - ,"*[count(//doc)=10]" - ); - assertQ(req("num_i1:[-10 TO -1]") - ,"*[count(//doc)=1]" - ); + assertU( + "441234567890"); + assertU( + "4410"); + assertU( + "441"); + assertU( + "442"); + assertU( + "4415"); + assertU( + "44-1"); + assertU( + "44-987654321"); + assertU( + "442147483647"); + assertU( + "44-2147483648"); + assertU( + "440"); + assertU(""); + assertQ(req("id:44"), "*[count(//doc)=10]"); + assertQ(req("num_i1:2147483647"), "//@numFound[.='1'] ", "//int[.='2147483647']"); + assertQ(req("num_i1:\"-2147483648\""), "//@numFound[.='1'] ", "//int[.='-2147483648']"); + assertQ( + req("q", "id:44", "sort", "num_i1 asc"), + "//doc[1]/int[.='-2147483648'] ", + "//doc[last()]/int[.='2147483647']"); + assertQ( + req("q", "id:44", "sort", "num_i1 desc"), + "//doc[1]/int[.='2147483647'] ", + "//doc[last()]/int[.='-2147483648']"); + assertQ(req("num_i1:[0 TO 9]"), "*[count(//doc)=3]"); + assertQ(req("num_i1:[-2147483648 TO 2147483647]"), "*[count(//doc)=10]"); + assertQ(req("num_i1:[-10 TO -1]"), "*[count(//doc)=1]"); // test long ranges and sorting assertU("44"); - assertU("441234567890"); - assertU("4410"); - assertU("441"); - assertU("442"); - assertU("4415"); - assertU("44-1"); - assertU("44-987654321"); - assertU("449223372036854775807"); - assertU("44-9223372036854775808"); - assertU("440"); - assertU(""); - assertQ(req("id:44") - ,"*[count(//doc)=10]" - ); - assertQ(req("num_l1:9223372036854775807") - ,"//@numFound[.='1'] " - ,"//long[.='9223372036854775807']" - ); - assertQ(req("num_l1:\"-9223372036854775808\"") - ,"//@numFound[.='1'] " - ,"//long[.='-9223372036854775808']" - ); - assertQ(req("q","id:44","sort","num_l1 asc") - ,"//doc[1]/long[.='-9223372036854775808'] " - ,"//doc[last()]/long[.='9223372036854775807']" - ); - assertQ(req("q","id:44", "sort", "num_l1 desc") - ,"//doc[1]/long[.='9223372036854775807'] " - ,"//doc[last()]/long[.='-9223372036854775808']" - ); - assertQ(req("num_l1:[-1 TO 9]") - ,"*[count(//doc)=4]" - ); - assertQ(req("num_l1:[-9223372036854775808 TO 9223372036854775807]") - ,"*[count(//doc)=10]" - ); - assertQ(req("num_l1:[-10 TO -1]") - ,"*[count(//doc)=1]" - ); + assertU( + "441234567890"); + assertU( + "4410"); + assertU( + "441"); + assertU( + "442"); + assertU( + "4415"); + assertU( + "44-1"); + assertU( + "44-987654321"); + assertU( + "449223372036854775807"); + assertU( + "44-9223372036854775808"); + assertU( + "440"); + assertU(""); + assertQ(req("id:44"), "*[count(//doc)=10]"); + assertQ( + req("num_l1:9223372036854775807"), + "//@numFound[.='1'] ", + "//long[.='9223372036854775807']"); + assertQ( + req("num_l1:\"-9223372036854775808\""), + "//@numFound[.='1'] ", + "//long[.='-9223372036854775808']"); + assertQ( + req("q", "id:44", "sort", "num_l1 asc"), + "//doc[1]/long[.='-9223372036854775808'] ", + "//doc[last()]/long[.='9223372036854775807']"); + assertQ( + req("q", "id:44", "sort", "num_l1 desc"), + "//doc[1]/long[.='9223372036854775807'] ", + "//doc[last()]/long[.='-9223372036854775808']"); + assertQ(req("num_l1:[-1 TO 9]"), "*[count(//doc)=4]"); + assertQ(req("num_l1:[-9223372036854775808 TO 9223372036854775807]"), "*[count(//doc)=10]"); + assertQ(req("num_l1:[-10 TO -1]"), "*[count(//doc)=1]"); // test binary float ranges and sorting assertU("44"); - assertU("441.4142135"); - assertU("44Infinity"); - assertU("44-Infinity"); - assertU("44NaN"); - assertU("442"); - assertU("44-1"); - assertU("44-987654321"); - assertU("44-999999.99"); - assertU("44-1e20"); - assertU("440"); - assertU(""); - assertQ(req("id:44") - ,"*[count(//doc)=10]" - ); - assertQ(req("num_f1:Infinity") - ,"//@numFound[.='1'] " - ,"//float[.='Infinity']" - ); - assertQ(req("num_f1:\"-Infinity\"") - ,"//@numFound[.='1'] " - ,"//float[.='-Infinity']" - ); - assertQ(req("num_f1:\"NaN\"") - ,"//@numFound[.='1'] " - ,"//float[.='NaN']" - ); - assertQ(req("num_f1:\"-1e20\"") - ,"//@numFound[.='1']" - ); - assertQ(req("q", "id:44", "sort", "num_f1 asc") - ,"//doc[1]/float[.='-Infinity'] " - ,"//doc[last()]/float[.='NaN']" - ); - assertQ(req("q", "id:44", "sort","num_f1 desc") - ,"//doc[1]/float[.='NaN'] " - ,"//doc[last()]/float[.='-Infinity']" - ); - assertQ(req("num_f1:[-1 TO 2]") - ,"*[count(//doc)=4]" - ); - assertQ(req("num_f1:[-Infinity TO Infinity]") - ,"*[count(//doc)=9]" - ); - - + assertU( + "441.4142135"); + assertU( + "44Infinity"); + assertU( + "44-Infinity"); + assertU( + "44NaN"); + assertU( + "442"); + assertU( + "44-1"); + assertU( + "44-987654321"); + assertU( + "44-999999.99"); + assertU( + "44-1e20"); + assertU( + "440"); + assertU(""); + assertQ(req("id:44"), "*[count(//doc)=10]"); + assertQ(req("num_f1:Infinity"), "//@numFound[.='1'] ", "//float[.='Infinity']"); + assertQ(req("num_f1:\"-Infinity\""), "//@numFound[.='1'] ", "//float[.='-Infinity']"); + assertQ(req("num_f1:\"NaN\""), "//@numFound[.='1'] ", "//float[.='NaN']"); + assertQ(req("num_f1:\"-1e20\""), "//@numFound[.='1']"); + assertQ( + req("q", "id:44", "sort", "num_f1 asc"), + "//doc[1]/float[.='-Infinity'] ", + "//doc[last()]/float[.='NaN']"); + assertQ( + req("q", "id:44", "sort", "num_f1 desc"), + "//doc[1]/float[.='NaN'] ", + "//doc[last()]/float[.='-Infinity']"); + assertQ(req("num_f1:[-1 TO 2]"), "*[count(//doc)=4]"); + assertQ(req("num_f1:[-Infinity TO Infinity]"), "*[count(//doc)=9]"); // test binary double ranges and sorting assertU("44"); - assertU("441.4142135"); - assertU("44Infinity"); - assertU("44-Infinity"); - assertU("44NaN"); - assertU("442"); - assertU("44-1"); - assertU("441e-100"); - assertU("44-999999.99"); - assertU("44-1e100"); - assertU("440"); - assertU(""); - assertQ(req("id:44") - ,"*[count(//doc)=10]" - ); - assertQ(req("num_d1:Infinity") - ,"//@numFound[.='1'] " - ,"//double[.='Infinity']" - ); - assertQ(req("num_d1:\"-Infinity\"") - ,"//@numFound[.='1'] " - ,"//double[.='-Infinity']" - ); - assertQ(req("num_d1:\"NaN\"") - ,"//@numFound[.='1'] " - ,"//double[.='NaN']" - ); - assertQ(req("num_d1:\"-1e100\"") - ,"//@numFound[.='1']" - ); - assertQ(req("num_d1:\"1e-100\"") - ,"//@numFound[.='1']" - ); - assertQ(req("q", "id:44", "sort", "num_d1 asc") - ,"//doc[1]/double[.='-Infinity'] " - ,"//doc[last()]/double[.='NaN']" - ); - assertQ(req("q","id:44","sort","num_d1 desc") - ,"//doc[1]/double[.='NaN'] " - ,"//doc[last()]/double[.='-Infinity']" - ); - assertQ(req("num_d1:[-1 TO 2]") - ,"*[count(//doc)=5]" - ); - assertQ(req("num_d1:[-Infinity TO Infinity]") - ,"*[count(//doc)=9]" - ); - + assertU( + "441.4142135"); + assertU( + "44Infinity"); + assertU( + "44-Infinity"); + assertU( + "44NaN"); + assertU( + "442"); + assertU( + "44-1"); + assertU( + "441e-100"); + assertU( + "44-999999.99"); + assertU( + "44-1e100"); + assertU( + "440"); + assertU(""); + assertQ(req("id:44"), "*[count(//doc)=10]"); + assertQ(req("num_d1:Infinity"), "//@numFound[.='1'] ", "//double[.='Infinity']"); + assertQ(req("num_d1:\"-Infinity\""), "//@numFound[.='1'] ", "//double[.='-Infinity']"); + assertQ(req("num_d1:\"NaN\""), "//@numFound[.='1'] ", "//double[.='NaN']"); + assertQ(req("num_d1:\"-1e100\""), "//@numFound[.='1']"); + assertQ(req("num_d1:\"1e-100\""), "//@numFound[.='1']"); + assertQ( + req("q", "id:44", "sort", "num_d1 asc"), + "//doc[1]/double[.='-Infinity'] ", + "//doc[last()]/double[.='NaN']"); + assertQ( + req("q", "id:44", "sort", "num_d1 desc"), + "//doc[1]/double[.='NaN'] ", + "//doc[last()]/double[.='-Infinity']"); + assertQ(req("num_d1:[-1 TO 2]"), "*[count(//doc)=5]"); + assertQ(req("num_d1:[-Infinity TO Infinity]"), "*[count(//doc)=9]"); // test sorting on multiple fields assertU("44"); - assertU("4410"); - assertU("441100"); - assertU("44-1"); - assertU("4415"); - assertU("44150"); - assertU("440"); - assertU(""); - assertQ(req("id:44") - ,"*[count(//doc)=6]" - ); - - assertQ(req("q","id:44", "sort", "a_i1 asc,b_i1 desc") - ,"*[count(//doc)=6] " - ,"//doc[3]/int[.='100'] " - ,"//doc[4]/int[.='50']" - ); - assertQ(req("q","id:44", "sort", "a_i1 asc , b_i1 asc") - ,"*[count(//doc)=6] " - ,"//doc[3]/int[.='50'] " - ,"//doc[4]/int[.='100']" - ); - assertQ(req("q", "id:44", "sort", "a_i1 asc") - ,"*[count(//doc)=6] " - ,"//doc[1]/int[.='-1'] " - ,"//doc[last()]/int[.='15']" - ); - assertQ(req("q","id:44","sort","a_i1 asc , score top") - ,"*[count(//doc)=6] " - ,"//doc[1]/int[.='-1'] " - ,"//doc[last()]/int[.='15']" - ); - assertQ(req("q","id:44","sort","score top , a_i1 top, b_i1 bottom ") - ,"*[count(//doc)=6] " - ,"//doc[last()]/int[.='-1'] " - ,"//doc[1]/int[.='15'] " - ,"//doc[3]/int[.='50'] " - ,"//doc[4]/int[.='100']" - ); - + assertU( + "4410"); + assertU( + "441100"); + assertU( + "44-1"); + assertU( + "4415"); + assertU( + "44150"); + assertU( + "440"); + assertU(""); + assertQ(req("id:44"), "*[count(//doc)=6]"); + + assertQ( + req("q", "id:44", "sort", "a_i1 asc,b_i1 desc"), + "*[count(//doc)=6] ", + "//doc[3]/int[.='100'] ", + "//doc[4]/int[.='50']"); + assertQ( + req("q", "id:44", "sort", "a_i1 asc , b_i1 asc"), + "*[count(//doc)=6] ", + "//doc[3]/int[.='50'] ", + "//doc[4]/int[.='100']"); + assertQ( + req("q", "id:44", "sort", "a_i1 asc"), + "*[count(//doc)=6] ", + "//doc[1]/int[.='-1'] ", + "//doc[last()]/int[.='15']"); + assertQ( + req("q", "id:44", "sort", "a_i1 asc , score top"), + "*[count(//doc)=6] ", + "//doc[1]/int[.='-1'] ", + "//doc[last()]/int[.='15']"); + assertQ( + req("q", "id:44", "sort", "score top , a_i1 top, b_i1 bottom "), + "*[count(//doc)=6] ", + "//doc[last()]/int[.='-1'] ", + "//doc[1]/int[.='15'] ", + "//doc[3]/int[.='50'] ", + "//doc[4]/int[.='100']"); // test sorting with some docs missing the sort field assertU("id_i:[1000 TO 1010]"); - assertU("10001Z"); - assertU("100110A"); - assertU("10021100"); - assertU("1003-1"); - assertU("100415"); - assertU("1005150"); - assertU("10060"); - assertU(""); - assertQ(req("id_i:[1000 TO 1010]") - ,"*[count(//doc)=7]" - ); - assertQ(req("q","id_i:[1000 TO 1010]","sort","b_i1 asc") - ,"*[count(//doc)=7] " - ,"//doc[1]/int[.='50'] " - ,"//doc[2]/int[.='100']" - ); - assertQ(req("q","id_i:[1000 TO 1010]","sort"," b_i1 desc") - ,"*[count(//doc)=7] " - ,"//doc[1]/int[.='100'] " - ,"//doc[2]/int[.='50']" - ); - assertQ(req("q","id_i:[1000 TO 1010]","sort"," a_i1 asc,b_i1 desc") - ,"*[count(//doc)=7] " - ,"//doc[3]/int[@name='b_i1' and .='100'] " - ,"//doc[4]/int[@name='b_i1' and .='50'] " - ,"//doc[5]/arr[@name='id_i' and .='1000']" - ); - assertQ(req("q","id_i:[1000 TO 1010]","sort"," a_i1 asc,b_i1 asc") - ,"*[count(//doc)=7] " - ,"//doc[3]/int[@name='b_i1' and .='50'] " - ,"//doc[4]/int[@name='b_i1' and .='100'] " - ,"//doc[5]/arr[@name='id_i' and .='1000']" - ); + assertU( + "10001Z"); + assertU( + "100110A"); + assertU( + "10021100"); + assertU( + "1003-1"); + assertU( + "100415"); + assertU( + "1005150"); + assertU( + "10060"); + assertU(""); + assertQ(req("id_i:[1000 TO 1010]"), "*[count(//doc)=7]"); + assertQ( + req("q", "id_i:[1000 TO 1010]", "sort", "b_i1 asc"), + "*[count(//doc)=7] ", + "//doc[1]/int[.='50'] ", + "//doc[2]/int[.='100']"); + assertQ( + req("q", "id_i:[1000 TO 1010]", "sort", " b_i1 desc"), + "*[count(//doc)=7] ", + "//doc[1]/int[.='100'] ", + "//doc[2]/int[.='50']"); + assertQ( + req("q", "id_i:[1000 TO 1010]", "sort", " a_i1 asc,b_i1 desc"), + "*[count(//doc)=7] ", + "//doc[3]/int[@name='b_i1' and .='100'] ", + "//doc[4]/int[@name='b_i1' and .='50'] ", + "//doc[5]/arr[@name='id_i' and .='1000']"); + assertQ( + req("q", "id_i:[1000 TO 1010]", "sort", " a_i1 asc,b_i1 asc"), + "*[count(//doc)=7] ", + "//doc[3]/int[@name='b_i1' and .='50'] ", + "//doc[4]/int[@name='b_i1' and .='100'] ", + "//doc[5]/arr[@name='id_i' and .='1000']"); // nullfirst tests - assertQ(req("q","id_i:[1000 TO 1002]","sort"," nullfirst asc") - ,"*[count(//doc)=3] " - ,"//doc[1]/arr[@name='id_i' and .='1002']" - ,"//doc[2]/arr[@name='id_i' and .='1001'] " - ,"//doc[3]/arr[@name='id_i' and .='1000']" - ); - assertQ(req("q","id_i:[1000 TO 1002]","sort"," nullfirst desc") - ,"*[count(//doc)=3] " - ,"//doc[1]/arr[@name='id_i' and .='1002']" - ,"//doc[2]/arr[@name='id_i' and .='1000'] " - ,"//doc[3]/arr[@name='id_i' and .='1001']" - ); + assertQ( + req("q", "id_i:[1000 TO 1002]", "sort", " nullfirst asc"), + "*[count(//doc)=3] ", + "//doc[1]/arr[@name='id_i' and .='1002']", + "//doc[2]/arr[@name='id_i' and .='1001'] ", + "//doc[3]/arr[@name='id_i' and .='1000']"); + assertQ( + req("q", "id_i:[1000 TO 1002]", "sort", " nullfirst desc"), + "*[count(//doc)=3] ", + "//doc[1]/arr[@name='id_i' and .='1002']", + "//doc[2]/arr[@name='id_i' and .='1000'] ", + "//doc[3]/arr[@name='id_i' and .='1001']"); // Sort parsing exception tests. (SOLR-6, SOLR-99) try (ErrorLogMuter errors = ErrorLogMuter.substring("shouldbeunindexed")) { - assertQEx( "can not sort unindexed fields", - req( "q","id_i:1000", "sort", "shouldbeunindexed asc" ), 400 ); + assertQEx( + "can not sort unindexed fields", + req("q", "id_i:1000", "sort", "shouldbeunindexed asc"), + 400); assertEquals(1, errors.getCount()); } - + try (ErrorLogMuter errors = ErrorLogMuter.substring("nullfirst")) { - assertQEx( "invalid query format", - req( "q","id_i:1000", "sort", "nullfirst" ), 400 ); + assertQEx("invalid query format", req("q", "id_i:1000", "sort", "nullfirst"), 400); assertEquals(1, errors.getCount()); } try (ErrorLogMuter abc = ErrorLogMuter.substring("abcde12345"); - ErrorLogMuter aaa = ErrorLogMuter.substring("aaa")) { - assertQEx( "unknown sort field", - req( "q","id_i:1000", "sort", "abcde12345 asc" ), 400 ); + ErrorLogMuter aaa = ErrorLogMuter.substring("aaa")) { + assertQEx("unknown sort field", req("q", "id_i:1000", "sort", "abcde12345 asc"), 400); + + assertQEx("unknown sort order", req("q", "id_i:1000", "sort", "nullfirst aaa"), 400); - assertQEx( "unknown sort order", - req( "q","id_i:1000", "sort", "nullfirst aaa" ), 400 ); - assertEquals(1, abc.getCount()); assertEquals(1, aaa.getCount()); } - + // test prefix query assertU("val_s:[* TO *]"); - assertU("100apple"); - assertU("101banana"); - assertU("102apple"); - assertU("103pearing"); - assertU("104pear"); - assertU("105appalling"); - assertU("106pearson"); - assertU("107port"); - assertU(""); - - assertQ(req("val_s:a*") - ,"//*[@numFound='3']" - ); - assertQ(req("val_s:p*") - ,"//*[@numFound='4']" - ); + assertU( + "100apple"); + assertU( + "101banana"); + assertU( + "102apple"); + assertU( + "103pearing"); + assertU( + "104pear"); + assertU( + "105appalling"); + assertU( + "106pearson"); + assertU( + "107port"); + assertU(""); + + assertQ(req("val_s:a*"), "//*[@numFound='3']"); + assertQ(req("val_s:p*"), "//*[@numFound='4']"); // val_s:* %//*[@numFound="8"] // test wildcard query - assertQ(req("val_s:a*p*") ,"//*[@numFound='3']"); - assertQ(req("val_s:p?a*") ,"//*[@numFound='3']"); + assertQ(req("val_s:a*p*"), "//*[@numFound='3']"); + assertQ(req("val_s:p?a*"), "//*[@numFound='3']"); assertU("id:[100 TO 110]"); // test copyField functionality - assertU("42How Now4 brown Cows"); + assertU( + "42How Now4 brown Cows"); assertU(""); - assertQ(req("id:42 AND title:Now") - ,"*[count(//doc)=0]" - ); - assertQ(req("id:42 AND title_lettertok:Now") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND title:cow") - ,"*[count(//doc)=0]" - ); - assertQ(req("id:42 AND title_stemmed:cow") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND text:cow") - ,"*[count(//doc)=1]" - ); + assertQ(req("id:42 AND title:Now"), "*[count(//doc)=0]"); + assertQ(req("id:42 AND title_lettertok:Now"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND title:cow"), "*[count(//doc)=0]"); + assertQ(req("id:42 AND title_stemmed:cow"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND text:cow"), "*[count(//doc)=1]"); // test copyField functionality with a pattern. - assertU("42Copy me to the text field pretty please."); + assertU( + "42Copy me to the text field pretty please."); assertU(""); - assertQ(req("id:42 AND text:pretty") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND copy_t:pretty") - ,"*[count(//doc)=1]" - ); - + assertQ(req("id:42 AND text:pretty"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND copy_t:pretty"), "*[count(//doc)=1]"); + // test slop - assertU("42foo bar"); + assertU( + "42foo bar"); assertU(""); - assertQ(req("id:42 AND text:\"foo bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND text:\"foo\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND text:\"bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND text:\"bar foo\"") - ,"*[count(//doc)=0]" - ); - assertQ(req("id:42 AND text:\"bar foo\"~2") - ,"*[count(//doc)=1]" - ); - + assertQ(req("id:42 AND text:\"foo bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND text:\"foo\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND text:\"bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND text:\"bar foo\""), "*[count(//doc)=0]"); + assertQ(req("id:42 AND text:\"bar foo\"~2"), "*[count(//doc)=1]"); // intra-word delimiter testing (WordDelimiterGraphFilter) - assertU("42foo bar"); - assertU(""); - assertQ(req("id:42 AND subword:\"foo bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"foo\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"bar foo\"") - ,"*[count(//doc)=0]" - ); - assertQ(req("id:42 AND subword:\"bar foo\"~2") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"foo/bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:foobar") - ,"*[count(//doc)=0]" - ); - - assertU("42foo-bar"); - assertU(""); - assertQ(req("id:42 AND subword:\"foo bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"foo\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"bar foo\"") - ,"*[count(//doc)=0]" - ); - assertQ(req("id:42 AND subword:\"bar foo\"~2") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"foo/bar\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:foobar") - ,"*[count(//doc)=1]" - ); - - assertU("42Canon PowerShot SD500 7MP"); - assertU(""); - assertQ(req("id:42 AND subword:\"power-shot\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"power shot sd 500\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"powershot\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"SD-500\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"SD500\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"SD500-7MP\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"PowerShotSD500-7MP\"") - ,"*[count(//doc)=1]" - ); - - assertU("42Wi-Fi"); - assertU(""); - assertQ(req("id:42 AND subword:wifi") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:wi+=fi") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:wi+=fi") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:WiFi") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"wi fi\"") - ,"*[count(//doc)=1]" - ); - - assertU("42'I.B.M' A's,B's,C's"); - assertU(""); - assertQ(req("id:42 AND subword:\"'I.B.M.'\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:I.B.M") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:IBM") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:I--B--M") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"I B M\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:IBM's") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"IBM'sx\"") - ,"*[count(//doc)=0]" - ); + assertU( + "42foo bar"); + assertU(""); + assertQ(req("id:42 AND subword:\"foo bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"foo\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"bar foo\""), "*[count(//doc)=0]"); + assertQ(req("id:42 AND subword:\"bar foo\"~2"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"foo/bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:foobar"), "*[count(//doc)=0]"); + + assertU( + "42foo-bar"); + assertU(""); + assertQ(req("id:42 AND subword:\"foo bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"foo\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"bar foo\""), "*[count(//doc)=0]"); + assertQ(req("id:42 AND subword:\"bar foo\"~2"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"foo/bar\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:foobar"), "*[count(//doc)=1]"); + + assertU( + "42Canon PowerShot SD500 7MP"); + assertU(""); + assertQ(req("id:42 AND subword:\"power-shot\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"power shot sd 500\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"powershot\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"SD-500\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"SD500\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"SD500-7MP\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"PowerShotSD500-7MP\""), "*[count(//doc)=1]"); + + assertU( + "42Wi-Fi"); + assertU(""); + assertQ(req("id:42 AND subword:wifi"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:wi+=fi"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:wi+=fi"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:WiFi"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"wi fi\""), "*[count(//doc)=1]"); + + assertU( + "42'I.B.M' A's,B's,C's"); + assertU(""); + assertQ(req("id:42 AND subword:\"'I.B.M.'\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:I.B.M"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:IBM"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:I--B--M"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"I B M\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:IBM's"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"IBM'sx\""), "*[count(//doc)=0]"); // this one fails since IBM and ABC are separated by two tokens // id:42 AND subword:IBM's-ABC's %*[count(//doc)=1] - assertQ(req("id:42 AND subword:\"IBM's-ABC's\"~2") - ,"*[count(//doc)=1]" - ); + assertQ(req("id:42 AND subword:\"IBM's-ABC's\"~2"), "*[count(//doc)=1]"); - assertQ(req("id:42 AND subword:\"A's B's-C's\"") - ,"*[count(//doc)=1]" - ); + assertQ(req("id:42 AND subword:\"A's B's-C's\""), "*[count(//doc)=1]"); - assertU("42Sony KDF-E50A10"); + assertU( + "42Sony KDF-E50A10"); assertU(""); // check for exact match: @@ -1013,239 +748,163 @@ public void testABunchOfConvertedStuff() { // Sony KDF E 50 A 10 (and how it's queried) - assertQ(req("id:42 AND subword:\"Sony KDF-E50A10\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:10") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:Sony") - ,"*[count(//doc)=1]" - ); + assertQ(req("id:42 AND subword:\"Sony KDF-E50A10\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:10"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:Sony"), "*[count(//doc)=1]"); // this one fails without slop since Sony and KDFE have a token inbetween // id:42 AND subword:SonyKDFE50A10 %*[count(//doc)=1] - assertQ(req("id:42 AND subword:\"SonyKDFE50A10\"~10") - ,"*[count(//doc)=1]" - ); - - assertQ(req("id:42 AND subword:\"Sony KDF E-50-A-10\"") - ,"*[count(//doc)=1]" - ); - - assertU("42http://www.yahoo.com"); - assertU(""); - assertQ(req("id:42 AND subword:yahoo") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:www.yahoo.com") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:http\\://www.yahoo.com") - ,"*[count(//doc)=1]" - ); - - assertU("42--Q 1-- W2 E-3 Ok xY 4R 5-T *6-Y- 7-8-- 10A-B"); - assertU(""); - assertQ(req("id:42 AND subword:Q") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:1") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"w 2\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"e 3\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"o k\"") - ,"*[count(//doc)=0]" - ); - assertQ(req("id:42 AND subword:\"ok\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"x y\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"xy\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"4 r\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"5 t\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"5 t\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"6 y\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"7 8\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"78\"") - ,"*[count(//doc)=1]" - ); - assertQ(req("id:42 AND subword:\"10 A+B\"") - ,"*[count(//doc)=1]" - ); - - assertU("42FooBarBaz"); - assertU("42FooBar10"); - assertU("4210FooBar"); - assertU("42BAZ"); - assertU("4210"); - assertU("42Mark, I found what's the problem! It turns to be from the latest schema. I found tons of exceptions in the resin.stdout that prevented the builder from performing. It's all coming from the WordDelimiterFilter which was just added to the latest schema: [2005-08-29 15:11:38.375] java.lang.IndexOutOfBoundsException: Index: 3, Size: 3 673804 [2005-08-29 15:11:38.375] at java.util.ArrayList.RangeCheck(ArrayList.java:547) 673805 [2005-08-29 15:11:38.375] at java.util.ArrayList.get(ArrayList.java:322) 673806 [2005-08-29 15:11:38.375] at solr.analysis.WordDelimiterFilter.addCombos(WordDelimiterFilter.java:349) 673807 [2005-08-29 15:11:38.375] at solr.analysis.WordDelimiterFilter.next(WordDelimiterFilter.java:325) 673808 [2005-08-29 15:11:38.375] at org.apache.lucene.analysis.LowerCaseFilter.next(LowerCaseFilter.java:32) 673809 [2005-08-29 15:11:38.375] at org.apache.lucene.analysis.StopFilter.next(StopFilter.java:98) 673810 [2005-08-29 15:11:38.375] at solr.EnglishPorterFilter.next(TokenizerFactory.java:163) 673811 [2005-08-29 15:11:38.375] at org.apache.lucene.index.DocumentWriter.invertDocument(DocumentWriter.java:143) 673812 [2005-08-29 15:11:38.375] at org.apache.lucene.index.DocumentWriter.addDocument(DocumentWriter.java:81) 673813 [2005-08-29 15:11:38.375] at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:307) 673814 [2005-08-29 15:11:38.375] at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:294) 673815 [2005-08-29 15:11:38.375] at solr.DirectUpdateHandler2.doAdd(DirectUpdateHandler2.java:170) 673816 [2005-08-29 15:11:38.375] at solr.DirectUpdateHandler2.overwriteBoth(DirectUpdateHandler2.java:317) 673817 [2005-08-29 15:11:38.375] at solr.DirectUpdateHandler2.addDoc(DirectUpdateHandler2.java:191) 673818 [2005-08-29 15:11:38.375] at solr.SolrCore.update(SolrCore.java:795) 673819 [2005-08-29 15:11:38.375] at solrserver.SolrServlet.doPost(SolrServlet.java:71) 673820 [2005-08-29 15:11:38.375] at javax.servlet.http.HttpServlet.service(HttpServlet.java:154) 673821 [2005-08-29 15:11:38.375] at javax.servlet.http.HttpServlet.service(HttpServlet.java:92) 673822 [2005-08-29 15:11:38.375] at com.caucho.server.dispatch.ServletFilterChain.doFilter(ServletFilterChain.java:99) 673823 [2005-08-29 15:11:38.375] at com.caucho.server.cache.CacheFilterChain.doFilter(CacheFilterChain.java:188) 673824 [2005-08-29 15:11:38.375] at com.caucho.server.webapp.WebAppFilterChain.doFilter(WebAppFilterChain.java:163) 673825 [2005-08-29 15:11:38.375] at com.caucho.server.dispatch.ServletInvocation.service(ServletInvocation.java:208) 673826 [2005-08-29 15:11:38.375] at com.caucho.server.http.HttpRequest.handleRequest(HttpRequest.java:259) 673827 [2005-08-29 15:11:38.375] at com.caucho.server.port.TcpConnection.run(TcpConnection.java:363) 673828 [2005-08-29 15:11:38.375] at com.caucho.util.ThreadPool.runTasks(ThreadPool.java:490) 673829 [2005-08-29 15:11:38.375] at com.caucho.util.ThreadPool.run(ThreadPool.java:423) 673830 [2005-08-29 15:11:38.375] at java.lang.Thread.run(Thread.java:595) With the previous schema I'm able to perform a successful full build: http://c12-ssa-dev40-so-mas1.cnet.com:5078/select/?stylesheet=q=docTypeversion=2.0start=0rows=10indent=on Do you want to rollback to the previous schema version"); - - - // + assertQ(req("id:42 AND subword:\"SonyKDFE50A10\"~10"), "*[count(//doc)=1]"); + + assertQ(req("id:42 AND subword:\"Sony KDF E-50-A-10\""), "*[count(//doc)=1]"); + + assertU( + "42http://www.yahoo.com"); + assertU(""); + assertQ(req("id:42 AND subword:yahoo"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:www.yahoo.com"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:http\\://www.yahoo.com"), "*[count(//doc)=1]"); + + assertU( + "42--Q 1-- W2 E-3 Ok xY 4R 5-T *6-Y- 7-8-- 10A-B"); + assertU(""); + assertQ(req("id:42 AND subword:Q"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:1"), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"w 2\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"e 3\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"o k\""), "*[count(//doc)=0]"); + assertQ(req("id:42 AND subword:\"ok\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"x y\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"xy\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"4 r\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"5 t\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"5 t\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"6 y\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"7 8\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"78\""), "*[count(//doc)=1]"); + assertQ(req("id:42 AND subword:\"10 A+B\""), "*[count(//doc)=1]"); + + assertU( + "42FooBarBaz"); + assertU( + "42FooBar10"); + assertU( + "4210FooBar"); + assertU( + "42BAZ"); + assertU( + "4210"); + assertU( + "42Mark, I found what's the problem! It turns to be from the latest schema. I found tons of exceptions in the resin.stdout that prevented the builder from performing. It's all coming from the WordDelimiterFilter which was just added to the latest schema: [2005-08-29 15:11:38.375] java.lang.IndexOutOfBoundsException: Index: 3, Size: 3 673804 [2005-08-29 15:11:38.375] at java.util.ArrayList.RangeCheck(ArrayList.java:547) 673805 [2005-08-29 15:11:38.375] at java.util.ArrayList.get(ArrayList.java:322) 673806 [2005-08-29 15:11:38.375] at solr.analysis.WordDelimiterFilter.addCombos(WordDelimiterFilter.java:349) 673807 [2005-08-29 15:11:38.375] at solr.analysis.WordDelimiterFilter.next(WordDelimiterFilter.java:325) 673808 [2005-08-29 15:11:38.375] at org.apache.lucene.analysis.LowerCaseFilter.next(LowerCaseFilter.java:32) 673809 [2005-08-29 15:11:38.375] at org.apache.lucene.analysis.StopFilter.next(StopFilter.java:98) 673810 [2005-08-29 15:11:38.375] at solr.EnglishPorterFilter.next(TokenizerFactory.java:163) 673811 [2005-08-29 15:11:38.375] at org.apache.lucene.index.DocumentWriter.invertDocument(DocumentWriter.java:143) 673812 [2005-08-29 15:11:38.375] at org.apache.lucene.index.DocumentWriter.addDocument(DocumentWriter.java:81) 673813 [2005-08-29 15:11:38.375] at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:307) 673814 [2005-08-29 15:11:38.375] at org.apache.lucene.index.IndexWriter.addDocument(IndexWriter.java:294) 673815 [2005-08-29 15:11:38.375] at solr.DirectUpdateHandler2.doAdd(DirectUpdateHandler2.java:170) 673816 [2005-08-29 15:11:38.375] at solr.DirectUpdateHandler2.overwriteBoth(DirectUpdateHandler2.java:317) 673817 [2005-08-29 15:11:38.375] at solr.DirectUpdateHandler2.addDoc(DirectUpdateHandler2.java:191) 673818 [2005-08-29 15:11:38.375] at solr.SolrCore.update(SolrCore.java:795) 673819 [2005-08-29 15:11:38.375] at solrserver.SolrServlet.doPost(SolrServlet.java:71) 673820 [2005-08-29 15:11:38.375] at javax.servlet.http.HttpServlet.service(HttpServlet.java:154) 673821 [2005-08-29 15:11:38.375] at javax.servlet.http.HttpServlet.service(HttpServlet.java:92) 673822 [2005-08-29 15:11:38.375] at com.caucho.server.dispatch.ServletFilterChain.doFilter(ServletFilterChain.java:99) 673823 [2005-08-29 15:11:38.375] at com.caucho.server.cache.CacheFilterChain.doFilter(CacheFilterChain.java:188) 673824 [2005-08-29 15:11:38.375] at com.caucho.server.webapp.WebAppFilterChain.doFilter(WebAppFilterChain.java:163) 673825 [2005-08-29 15:11:38.375] at com.caucho.server.dispatch.ServletInvocation.service(ServletInvocation.java:208) 673826 [2005-08-29 15:11:38.375] at com.caucho.server.http.HttpRequest.handleRequest(HttpRequest.java:259) 673827 [2005-08-29 15:11:38.375] at com.caucho.server.port.TcpConnection.run(TcpConnection.java:363) 673828 [2005-08-29 15:11:38.375] at com.caucho.util.ThreadPool.runTasks(ThreadPool.java:490) 673829 [2005-08-29 15:11:38.375] at com.caucho.util.ThreadPool.run(ThreadPool.java:423) 673830 [2005-08-29 15:11:38.375] at java.lang.Thread.run(Thread.java:595) With the previous schema I'm able to perform a successful full build: http://c12-ssa-dev40-so-mas1.cnet.com:5078/select/?stylesheet=q=docTypeversion=2.0start=0rows=10indent=on Do you want to rollback to the previous schema version"); + + // assertU("44"); - assertU("44Yoniktrue10000000000software engineer1e1003.14159622005-03-18T01:14:34Z1.414213562.999"); + assertU( + "44Yoniktrue10000000000software engineer1e1003.14159622005-03-18T01:14:34Z1.414213562.999"); assertU(""); - assertQ(req("id:44") - ); + assertQ(req("id:44")); args = new HashMap<>(); - args.put("fl","fname_s,arr_f "); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//str[.='Yonik'] " - ,"//float[.='1.4142135']" - ); + args.put("fl", "fname_s,arr_f "); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ(req, "//str[.='Yonik'] ", "//float[.='1.4142135']"); args = new HashMap<>(); - args.put("fl","fname_s,score"); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//str[.='Yonik']" - ,"//float[@name='score' and . > 0]" - ); + args.put("fl", "fname_s,score"); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ(req, "//str[.='Yonik']", "//float[@name='score' and . > 0]"); // test addition of score field args = new HashMap<>(); - args.put("fl","score,* "); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//str[.='Yonik'] " - ,"//float[.='1.4142135'] " - ,"//float[@name='score'] " - ,"*[count(//doc/*)>=13]" - ); + args.put("fl", "score,* "); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ( + req, + "//str[.='Yonik'] ", + "//float[.='1.4142135'] ", + "//float[@name='score'] ", + "*[count(//doc/*)>=13]"); args = new HashMap<>(); - args.put("fl","*,score "); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//str[.='Yonik'] " - ,"//float[.='1.4142135'] " - ,"//float[@name='score'] " - ,"*[count(//doc/*)>=13]" - ); + args.put("fl", "*,score "); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ( + req, + "//str[.='Yonik'] ", + "//float[.='1.4142135'] ", + "//float[@name='score'] ", + "*[count(//doc/*)>=13]"); args = new HashMap<>(); - args.put("fl","* "); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//str[.='Yonik'] " - ,"//float[.='1.4142135'] " - ,"*[count(//doc/*)>=12]" - ); + args.put("fl", "* "); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ(req, "//str[.='Yonik'] ", "//float[.='1.4142135'] ", "*[count(//doc/*)>=12]"); // test maxScore args = new HashMap<>(); - args.put("fl","score "); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//result[@maxScore>0]" - ); + args.put("fl", "score "); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ(req, "//result[@maxScore>0]"); args = new HashMap<>(); - args.put("fl","score "); - args.put("sort","id desc"); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//result[@maxScore>0]" - ); + args.put("fl", "score "); + args.put("sort", "id desc"); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ(req, "//result[@maxScore>0]"); args = new HashMap<>(); - args.put("fl","score "); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//@maxScore = //doc/float[@name='score']" - ); + args.put("fl", "score "); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ(req, "//@maxScore = //doc/float[@name='score']"); args = new HashMap<>(); - args.put("fl","score "); - args.put("sort","id desc"); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 10, args); - assertQ(req - ,"//@maxScore = //doc/float[@name='score']" - ); + args.put("fl", "score "); + args.put("sort", "id desc"); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 10, args); + assertQ(req, "//@maxScore = //doc/float[@name='score']"); args = new HashMap<>(); - args.put("fl","*,score"); - args.put("sort","id desc"); - req = new LocalSolrQueryRequest(h.getCore(), "id:44", - "/select", 0, 0 , args); - assertQ(req - ,"//result[@maxScore>0]" - ); - + args.put("fl", "*,score"); + args.put("sort", "id desc"); + req = new LocalSolrQueryRequest(h.getCore(), "id:44", "/select", 0, 0, args); + assertQ(req, "//result[@maxScore>0]"); // test schema field attribute inheritance and overriding assertU("44"); - assertU("44hi"); + assertU( + "44hi"); assertU(""); - assertQ(req("id:44") - ,"//*[@name='shouldbestored']" - ); - assertQ(req("+id:44 +shouldbestored:hi") - ,"//*[@numFound='1']" - ); + assertQ(req("id:44"), "//*[@name='shouldbestored']"); + assertQ(req("+id:44 +shouldbestored:hi"), "//*[@numFound='1']"); assertU("44"); - assertU("44hi"); + assertU( + "44hi"); assertU(""); - assertQ(req("id:44") - ,"not(//*[@name='shouldbeunstored'])" - ); - assertQ(req("+id:44 +shouldbeunstored:hi") - ,"//*[@numFound='1']" - ); + assertQ(req("id:44"), "not(//*[@name='shouldbeunstored'])"); + assertQ(req("+id:44 +shouldbeunstored:hi"), "//*[@numFound='1']"); assertU("44"); - assertU("44hi"); + assertU( + "44hi"); assertU(""); - assertQ(req("id:44") - ,"//*[@name='shouldbeunindexed']" - ); + assertQ(req("id:44"), "//*[@name='shouldbeunindexed']"); // this should result in an error... how to check for that? // +id:44 +shouldbeunindexed:hi %//*[@numFound="0"] - // test spaces between XML elements because that can introduce extra XML events that // can mess up parsing (and it has in the past) assertU(" 44 "); - assertU(" 44 hi "); + assertU( + " 44 hi "); assertU(""); // test adding multiple docs per add command // assertU("id:[0 TO 99]"); - // assertU("12"); + // assertU("12"); // assertU(""); // assertQ(req("id:[0 TO 99]") // ,"//*[@numFound='2']" @@ -1259,68 +918,33 @@ public void testABunchOfConvertedStuff() { assertU("12c"); assertU("13foo"); assertU(""); - assertQ(req("id:10 AND syn:a") - ,"//*[@numFound='1']" - ); - assertQ(req("id:10 AND syn:aa") - ,"//*[@numFound='1']" - ); - assertQ(req("id:11 AND syn:b") - ,"//*[@numFound='1']" - ); - assertQ(req("id:11 AND syn:b1") - ,"//*[@numFound='1']" - ); - assertQ(req("id:11 AND syn:b2") - ,"//*[@numFound='1']" - ); - assertQ(req("id:12 AND syn:c") - ,"//*[@numFound='1']" - ); - assertQ(req("id:12 AND syn:c1") - ,"//*[@numFound='1']" - ); - assertQ(req("id:12 AND syn:c2") - ,"//*[@numFound='1']" - ); - assertQ(req("id:13 AND syn:foo") - ,"//*[@numFound='1']" - ); - assertQ(req("id:13 AND syn:bar") - ,"//*[@numFound='1']" - ); - assertQ(req("id:13 AND syn:baz") - ,"//*[@numFound='1']" - ); - + assertQ(req("id:10 AND syn:a"), "//*[@numFound='1']"); + assertQ(req("id:10 AND syn:aa"), "//*[@numFound='1']"); + assertQ(req("id:11 AND syn:b"), "//*[@numFound='1']"); + assertQ(req("id:11 AND syn:b1"), "//*[@numFound='1']"); + assertQ(req("id:11 AND syn:b2"), "//*[@numFound='1']"); + assertQ(req("id:12 AND syn:c"), "//*[@numFound='1']"); + assertQ(req("id:12 AND syn:c1"), "//*[@numFound='1']"); + assertQ(req("id:12 AND syn:c2"), "//*[@numFound='1']"); + assertQ(req("id:13 AND syn:foo"), "//*[@numFound='1']"); + assertQ(req("id:13 AND syn:bar"), "//*[@numFound='1']"); + assertQ(req("id:13 AND syn:baz"), "//*[@numFound='1']"); // test position increment gaps between field values assertU("44"); assertU("45"); - assertU("44aa bb ccdd ee ff"); - assertU("45aa bb ccdd ee ff"); - assertU(""); - assertQ(req("+id:44 +textgap:\"aa bb cc\"") - ,"//*[@numFound='1']" - ); - assertQ(req("+id:44 +textgap:\"dd ee ff\"") - ,"//*[@numFound='1']" - ); - assertQ(req("+id:44 +textgap:\"cc dd\"") - ,"//*[@numFound='0']" - ); - assertQ(req("+id:44 +textgap:\"cc dd\"~100") - ,"//*[@numFound='1']" - ); - assertQ(req("+id:44 +textgap:\"bb cc dd ee\"~90") - ,"//*[@numFound='0']" - ); - assertQ(req("+id:44 +textgap:\"bb cc dd ee\"~100") - ,"//*[@numFound='1']" - ); - assertQ(req("+id:45 +text:\"cc dd\"") - ,"//*[@numFound='1']" - ); + assertU( + "44aa bb ccdd ee ff"); + assertU( + "45aa bb ccdd ee ff"); + assertU(""); + assertQ(req("+id:44 +textgap:\"aa bb cc\""), "//*[@numFound='1']"); + assertQ(req("+id:44 +textgap:\"dd ee ff\""), "//*[@numFound='1']"); + assertQ(req("+id:44 +textgap:\"cc dd\""), "//*[@numFound='0']"); + assertQ(req("+id:44 +textgap:\"cc dd\"~100"), "//*[@numFound='1']"); + assertQ(req("+id:44 +textgap:\"bb cc dd ee\"~90"), "//*[@numFound='0']"); + assertQ(req("+id:44 +textgap:\"bb cc dd ee\"~100"), "//*[@numFound='1']"); + assertQ(req("+id:45 +text:\"cc dd\""), "//*[@numFound='1']"); } } diff --git a/solr/core/src/test/org/apache/solr/CursorPagingTest.java b/solr/core/src/test/org/apache/solr/CursorPagingTest.java index 378574c5282..e3b1b28aead 100644 --- a/solr/core/src/test/org/apache/solr/CursorPagingTest.java +++ b/solr/core/src/test/org/apache/solr/CursorPagingTest.java @@ -16,6 +16,13 @@ */ package org.apache.solr; +import static org.apache.solr.common.params.CommonParams.TIME_ALLOWED; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; +import static org.apache.solr.common.params.SolrParams.wrapDefaults; +import static org.apache.solr.common.util.Utils.fromJSONString; + import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -28,7 +35,7 @@ import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.IntStream; - +import org.apache.commons.lang3.StringUtils; import org.apache.lucene.util.SentinelIntSet; import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.mutable.MutableValueInt; @@ -47,36 +54,28 @@ import org.junit.After; import org.junit.BeforeClass; -import org.apache.commons.lang3.StringUtils; - -import static org.apache.solr.common.params.SolrParams.wrapDefaults; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; -import static org.apache.solr.common.params.CommonParams.TIME_ALLOWED; -import static org.apache.solr.common.util.Utils.fromJSONString; - -/** - * Tests of deep paging using {@link CursorMark} and {@link CursorMarkParams#CURSOR_MARK_PARAM}. - */ +/** Tests of deep paging using {@link CursorMark} and {@link CursorMarkParams#CURSOR_MARK_PARAM}. */ public class CursorPagingTest extends SolrTestCaseJ4 { /** solrconfig.xml file name, shared with other cursor related tests */ - - public final static String TEST_SOLRCONFIG_NAME = "solrconfig-deeppaging.xml"; + public static final String TEST_SOLRCONFIG_NAME = "solrconfig-deeppaging.xml"; /** schema.xml file name, shared with other cursor related tests */ - public final static String TEST_SCHEMAXML_NAME = "schema-sorts.xml"; + public static final String TEST_SCHEMAXML_NAME = "schema-sorts.xml"; /** values from enumConfig.xml */ - public static final String[] SEVERITY_ENUM_VALUES = - { "Not Available", "Low", "Medium", "High", "Critical" }; + public static final String[] SEVERITY_ENUM_VALUES = { + "Not Available", "Low", "Medium", "High", "Critical" + }; @BeforeClass public static void beforeTests() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - System.setProperty("solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + System.setProperty( + "solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); initCore(TEST_SOLRCONFIG_NAME, TEST_SCHEMAXML_NAME); } + @After public void cleanup() throws Exception { assertU(delQ("*:*")); @@ -95,80 +94,85 @@ public void testBadInputs() throws Exception { assertU(commit()); // empty, blank, or bogus cursor - for (String c : new String[] { "", " ", "all the docs please!"}) { - assertFail(params("q", "*:*", - "sort", "id desc", - CURSOR_MARK_PARAM, c), - ErrorCode.BAD_REQUEST, "Unable to parse"); + for (String c : new String[] {"", " ", "all the docs please!"}) { + assertFail( + params("q", "*:*", "sort", "id desc", CURSOR_MARK_PARAM, c), + ErrorCode.BAD_REQUEST, + "Unable to parse"); } // no id in sort - assertFail(params("q", "*:*", - "sort", "score desc", - CURSOR_MARK_PARAM, CURSOR_MARK_START), - ErrorCode.BAD_REQUEST, "uniqueKey field"); + assertFail( + params("q", "*:*", "sort", "score desc", CURSOR_MARK_PARAM, CURSOR_MARK_START), + ErrorCode.BAD_REQUEST, + "uniqueKey field"); // _docid_ - assertFail(params("q", "*:*", - "sort", "_docid_ asc, id desc", - CURSOR_MARK_PARAM, CURSOR_MARK_START), - ErrorCode.BAD_REQUEST, "_docid_"); + assertFail( + params("q", "*:*", "sort", "_docid_ asc, id desc", CURSOR_MARK_PARAM, CURSOR_MARK_START), + ErrorCode.BAD_REQUEST, + "_docid_"); // using cursor w/ grouping - assertFail(params("q", "*:*", - "sort", "id desc", - GroupParams.GROUP, "true", - GroupParams.GROUP_FIELD, "str", - CURSOR_MARK_PARAM, CURSOR_MARK_START), - ErrorCode.BAD_REQUEST, "Grouping"); - - // if a user specifies a 'bogus' cursorMark param, this should error *only* if some other component - // cares about (and parses) a SortSpec in it's prepare() method. - // (the existence of a 'sort' param shouldn't make a diff ... unless it makes a diff to a component being used, + assertFail( + params( + "q", + "*:*", + "sort", + "id desc", + GroupParams.GROUP, + "true", + GroupParams.GROUP_FIELD, + "str", + CURSOR_MARK_PARAM, + CURSOR_MARK_START), + ErrorCode.BAD_REQUEST, + "Grouping"); + + // if a user specifies a 'bogus' cursorMark param, this should error *only* if some other + // component cares about (and parses) a SortSpec in it's prepare() method. (the existence of a + // 'sort' param shouldn't make a diff ... unless it makes a diff to a component being used, // which it doesn't for RTG) assertU(adoc("id", "yyy", "str", "y", "float", "3", "int", "-3")); if (random().nextBoolean()) { assertU(commit()); } - for (SolrParams p : Arrays.asList(params(), - params(CURSOR_MARK_PARAM, "gibberish"), - params(CURSOR_MARK_PARAM, "gibberish", - "sort", "id asc"))) { - assertJQ(req(p, - "qt","/get", - "fl", "id", - "id","yyy") - , "=={'doc':{'id':'yyy'}}"); - assertJQ(req(p, - "qt","/get", - "fl", "id", - "id","xxx") // doesn't exist in our collection - , "=={'doc':null}"); + for (SolrParams p : + Arrays.asList( + params(), + params(CURSOR_MARK_PARAM, "gibberish"), + params(CURSOR_MARK_PARAM, "gibberish", "sort", "id asc"))) { + assertJQ(req(p, "qt", "/get", "fl", "id", "id", "yyy"), "=={'doc':{'id':'yyy'}}"); + assertJQ( + req(p, "qt", "/get", "fl", "id", "id", "xxx") // doesn't exist in our collection + , + "=={'doc':null}"); } } - /** simple static test of some carefully crafted docs */ public void testSimple() throws Exception { String cursorMark; SolrParams params = null; - + final String intsort = "int" + (random().nextBoolean() ? "" : "_dv"); final String intmissingsort = intsort; // trivial base case: ensure cursorMark against an empty index doesn't blow up cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","4", - "fl", "id", - "sort", "id desc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==0" - ,"/response/start==0" - ,"/response/docs==[]" - ); + params = + params( + "q", "*:*", + "rows", "4", + "fl", "id", + "sort", "id desc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==0", + "/response/start==0", + "/response/docs==[]"); assertEquals(CURSOR_MARK_START, cursorMark); - // don't add in order of any field to ensure we aren't inadvertently // counting on internal docid ordering assertU(adoc("id", "9", "str", "c", "float", "-3.2", "int", "42")); @@ -185,289 +189,357 @@ public void testSimple() throws Exception { // base case: ensure cursorMark that matches no docs doesn't blow up cursorMark = CURSOR_MARK_START; - params = params("q", "id:9999999", - "rows","4", - "fl", "id", - "sort", "id desc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==0" - ,"/response/start==0" - ,"/response/docs==[]" - ); + params = + params( + "q", "id:9999999", + "rows", "4", + "fl", "id", + "sort", "id desc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==0", + "/response/start==0", + "/response/docs==[]"); assertEquals(CURSOR_MARK_START, cursorMark); // edge case: ensure rows=0 doesn't blow up and gives back same cursor for next cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","0", - "fl", "id", - "sort", "id desc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[]" - ); + params = + params( + "q", "*:*", + "rows", "0", + "fl", "id", + "sort", "id desc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[]"); assertEquals(CURSOR_MARK_START, cursorMark); // simple id sort w/some faceting cursorMark = CURSOR_MARK_START; - params = params("q", "-int:6", - "rows","4", - "fl", "id", - "sort", "id desc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==9" - ,"/response/start==0" - ,"/response/docs==[{'id':'9'},{'id':'8'},{'id':'7'},{'id':'6'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==9" - ,"/response/start==0" - ,"/response/docs==[{'id':'5'},{'id':'3'},{'id':'2'},{'id':'1'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==9" - ,"/response/start==0" - ,"/response/docs==[{'id':'0'}]" - ); + params = + params( + "q", "-int:6", + "rows", "4", + "fl", "id", + "sort", "id desc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==9", + "/response/start==0", + "/response/docs==[{'id':'9'},{'id':'8'},{'id':'7'},{'id':'6'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==9", + "/response/start==0", + "/response/docs==[{'id':'5'},{'id':'3'},{'id':'2'},{'id':'1'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==9", + "/response/start==0", + "/response/docs==[{'id':'0'}]"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==9" - ,"/response/start==0" - ,"/response/docs==[]" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==9", + "/response/start==0", + "/response/docs==[]")); // simple score sort w/some faceting cursorMark = CURSOR_MARK_START; - params = params("q", "float:[0 TO *] int:7 id:6", - "rows","4", - "fl", "id", - "facet", "true", - "facet.field", "str", - "json.nl", "map", - "sort", "score desc, id desc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==7" - ,"/response/start==0" - ,"/response/docs==[{'id':'6'},{'id':'1'},{'id':'8'},{'id':'5'}]" - ,"/facet_counts/facet_fields/str=={'a':4,'b':3,'c':0}" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==7" - ,"/response/start==0" - ,"/response/docs==[{'id':'4'},{'id':'3'},{'id':'0'}]" - ,"/facet_counts/facet_fields/str=={'a':4,'b':3,'c':0}" - ); + params = + params( + "q", "float:[0 TO *] int:7 id:6", + "rows", "4", + "fl", "id", + "facet", "true", + "facet.field", "str", + "json.nl", "map", + "sort", "score desc, id desc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==7", + "/response/start==0", + "/response/docs==[{'id':'6'},{'id':'1'},{'id':'8'},{'id':'5'}]", + "/facet_counts/facet_fields/str=={'a':4,'b':3,'c':0}"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==7", + "/response/start==0", + "/response/docs==[{'id':'4'},{'id':'3'},{'id':'0'}]", + "/facet_counts/facet_fields/str=={'a':4,'b':3,'c':0}"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==7" - ,"/response/start==0" - ,"/response/docs==[]" - ,"/facet_counts/facet_fields/str=={'a':4,'b':3,'c':0}" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==7", + "/response/start==0", + "/response/docs==[]", + "/facet_counts/facet_fields/str=={'a':4,'b':3,'c':0}")); // int sort with dups, id tie breaker ... and some faceting cursorMark = CURSOR_MARK_START; - params = params("q", "-int:2001 -int:4055", - "rows","3", - "fl", "id", - "facet", "true", - "facet.field", "str", - "json.nl", "map", - "sort", intsort + " asc, id asc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'7'},{'id':'0'},{'id':'3'}]" - ,"/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'4'},{'id':'1'},{'id':'6'}]" - ,"/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'9'},{'id':'2'}]" - ,"/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}" - ); + params = + params( + "q", "-int:2001 -int:4055", + "rows", "3", + "fl", "id", + "facet", "true", + "facet.field", "str", + "json.nl", "map", + "sort", intsort + " asc, id asc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'7'},{'id':'0'},{'id':'3'}]", + "/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'4'},{'id':'1'},{'id':'6'}]", + "/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'9'},{'id':'2'}]", + "/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[]" - ,"/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[]", + "/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}")); // int missing first sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; - params = params("q", "-int:2001 -int:4055", - "rows","3", - "fl", "id", - "json.nl", "map", - "sort", intmissingsort + "_first asc, id asc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'3'},{'id':'7'},{'id':'0'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'4'},{'id':'1'},{'id':'6'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'9'},{'id':'2'}]" - ); + params = + params( + "q", "-int:2001 -int:4055", + "rows", "3", + "fl", "id", + "json.nl", "map", + "sort", intmissingsort + "_first asc, id asc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'3'},{'id':'7'},{'id':'0'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'4'},{'id':'1'},{'id':'6'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'9'},{'id':'2'}]"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[]" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[]")); // int missing last sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; - params = params("q", "-int:2001 -int:4055", - "rows","3", - "fl", "id", - "json.nl", "map", - "sort", intmissingsort + "_last asc, id asc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'7'},{'id':'0'},{'id':'4'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'1'},{'id':'6'},{'id':'9'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'2'},{'id':'3'}]" - ); + params = + params( + "q", "-int:2001 -int:4055", + "rows", "3", + "fl", "id", + "json.nl", "map", + "sort", intmissingsort + "_last asc, id asc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'7'},{'id':'0'},{'id':'4'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'1'},{'id':'6'},{'id':'9'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'2'},{'id':'3'}]"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[]" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[]")); // string sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","6", - "fl", "id", - "sort", "str asc, id desc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'6'},{'id':'4'},{'id':'3'},{'id':'1'},{'id':'8'},{'id':'5'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'0'},{'id':'9'},{'id':'7'},{'id':'2'}]" - ); + params = + params( + "q", "*:*", + "rows", "6", + "fl", "id", + "sort", "str asc, id desc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'6'},{'id':'4'},{'id':'3'},{'id':'1'},{'id':'8'},{'id':'5'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'0'},{'id':'9'},{'id':'7'},{'id':'2'}]"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[]" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[]")); // tri-level sort with more dups of primary then fit on a page cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","2", - "fl", "id", - "sort", "float asc, "+intsort+" desc, id desc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'2'},{'id':'9'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'7'},{'id':'4'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'3'},{'id':'8'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'5'},{'id':'6'}]" - ); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'1'},{'id':'0'}]" - ); + params = + params( + "q", "*:*", + "rows", "2", + "fl", "id", + "sort", "float asc, " + intsort + " desc, id desc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'2'},{'id':'9'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'7'},{'id':'4'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'3'},{'id':'8'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'5'},{'id':'6'}]"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'1'},{'id':'0'}]"); // we've exactly exhausted all the results, but solr had no way of know that // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[]" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[]")); // trivial base case: rows bigger then number of matches cursorMark = CURSOR_MARK_START; - params = params("q", "id:3 id:7", - "rows","111", - "fl", "id", - "sort", intsort + " asc, id asc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==2" - ,"/response/start==0" - ,"/response/docs==[{'id':'7'},{'id':'3'}]" - ); + params = + params( + "q", "id:3 id:7", + "rows", "111", + "fl", "id", + "sort", intsort + " asc, id asc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==2", + "/response/start==0", + "/response/docs==[{'id':'7'},{'id':'3'}]"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==2" - ,"/response/start==0" - ,"/response/docs==[]" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==2", + "/response/start==0", + "/response/docs==[]")); // sanity check our full walk method SentinelIntSet ids; - ids = assertFullWalkNoDups(10, params("q", "*:*", - "rows", "4", - "sort", "id desc")); + ids = + assertFullWalkNoDups( + 10, + params( + "q", "*:*", + "rows", "4", + "sort", "id desc")); assertEquals(10, ids.size()); - ids = assertFullWalkNoDups(9, params("q", "*:*", - "rows", "1", - "fq", "-id:4", - "sort", "id asc")); + ids = + assertFullWalkNoDups( + 9, + params( + "q", "*:*", + "rows", "1", + "fq", "-id:4", + "sort", "id asc")); assertEquals(9, ids.size()); assertFalse("matched on id:4 unexpectedly", ids.exists(4)); - ids = assertFullWalkNoDups(9, params("q", "*:*", - "rows", "3", - "fq", "-id:6", - "sort", "float desc, id asc, "+intsort+" asc")); + ids = + assertFullWalkNoDups( + 9, + params( + "q", "*:*", + "rows", "3", + "fq", "-id:6", + "sort", "float desc, id asc, " + intsort + " asc")); assertEquals(9, ids.size()); assertFalse("matched on id:6 unexpectedly", ids.exists(6)); - ids = assertFullWalkNoDups(9, params("q", "float:[0 TO *] int:7 id:6", - "rows", "3", - "sort", "score desc, id desc")); + ids = + assertFullWalkNoDups( + 9, + params( + "q", "float:[0 TO *] int:7 id:6", + "rows", "3", + "sort", "score desc, id desc")); assertEquals(7, ids.size()); assertFalse("matched on id:9 unexpectedly", ids.exists(9)); assertFalse("matched on id:7 unexpectedly", ids.exists(7)); @@ -475,71 +547,81 @@ public void testSimple() throws Exception { // strategically delete/add some docs in the middle of walking the cursor cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","2", - "fl", "id", - "sort", "str asc, id asc"); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==10" - ,"/response/start==0" - ,"/response/docs==[{'id':'1'},{'id':'3'}]" - ); + params = + params( + "q", "*:*", + "rows", "2", + "fl", "id", + "sort", "str asc, id asc"); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==10", + "/response/start==0", + "/response/docs==[{'id':'1'},{'id':'3'}]"); // delete the last guy we got - assertU(delI("3")); + assertU(delI("3")); assertU(commit()); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==9" - ,"/response/start==0" - ,"/response/docs==[{'id':'4'},{'id':'6'}]" - ); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==9", + "/response/start==0", + "/response/docs==[{'id':'4'},{'id':'6'}]"); // delete the next guy we expect - assertU(delI("0")); + assertU(delI("0")); assertU(commit()); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'5'},{'id':'8'}]" - ); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'5'},{'id':'8'}]"); // update a doc we've already seen so it repeats assertU(adoc("id", "5", "str", "c")); assertU(commit()); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'2'},{'id':'5'}]" - ); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'2'},{'id':'5'}]"); // update the next doc we expect so it's now in the past assertU(adoc("id", "7", "str", "a")); assertU(commit()); - cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[{'id':'9'}]" - ); + cursorMark = + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[{'id':'9'}]"); // no more, so no change to cursorMark, and no new docs - assertEquals(cursorMark, - assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark) - ,"/response/numFound==8" - ,"/response/start==0" - ,"/response/docs==[]" - )); + assertEquals( + cursorMark, + assertCursor( + req(params, CURSOR_MARK_PARAM, cursorMark), + "/response/numFound==8", + "/response/start==0", + "/response/docs==[]")); } /** - * test that timeAllowed parameter can be used with cursors - * uses DelayingSearchComponent in solrconfig-deeppaging.xml + * test that timeAllowed parameter can be used with cursors uses DelayingSearchComponent in + * solrconfig-deeppaging.xml */ - @LogLevel("org.apache.solr.search.SolrIndexSearcher=ERROR;org.apache.solr.handler.component.SearchHandler=ERROR") + @LogLevel( + "org.apache.solr.search.SolrIndexSearcher=ERROR;org.apache.solr.handler.component.SearchHandler=ERROR") public void testTimeAllowed() throws Exception { String wontExceedTimeout = "10000"; int numDocs = 1000; - List ids = IntStream.range(0, 1000).mapToObj(String::valueOf).collect(Collectors.toList()); + List ids = + IntStream.range(0, 1000).mapToObj(String::valueOf).collect(Collectors.toList()); // Shuffle to test ordering Collections.shuffle(ids, random()); for (String id : ids) { assertU(adoc("id", id, "name", "a" + id)); if (random().nextInt(numDocs) == 0) { - assertU(commit()); // sometimes make multiple segments + assertU(commit()); // sometimes make multiple segments } } assertU(commit()); @@ -548,12 +630,20 @@ public void testTimeAllowed() throws Exception { String cursorMark, nextCursorMark = CURSOR_MARK_START; - SolrParams params = params("q", "name:a*", - "fl", "id", - "sort", "id asc", - "rows", "50", - "qt", "/delayed", - "sleep", "10"); + SolrParams params = + params( + "q", + "name:a*", + "fl", + "id", + "sort", + "id asc", + "rows", + "50", + "qt", + "/delayed", + "sleep", + "10"); List foundDocIds = new ArrayList<>(); String[] timeAllowedVariants = {"1", "50", wontExceedTimeout}; @@ -563,7 +653,8 @@ public void testTimeAllowed() throws Exception { for (String timeAllowed : timeAllowedVariants) { // execute the query - String json = assertJQ(req(params, CURSOR_MARK_PARAM, cursorMark, TIME_ALLOWED, timeAllowed)); + String json = + assertJQ(req(params, CURSOR_MARK_PARAM, cursorMark, TIME_ALLOWED, timeAllowed)); Map response = (Map) fromJSONString(json); Map responseHeader = (Map) response.get("responseHeader"); @@ -594,15 +685,18 @@ public void testTimeAllowed() throws Exception { // Note: it is not guaranteed that all docs will be found, because a query may time out // before reaching all segments, this causes documents in the skipped segments to be skipped // in the overall result set as the cursor pages through. - assertEquals("Should have found last doc id eventually", ids.get(ids.size() -1), foundDocIds.get(foundDocIds.size() -1)); - assertEquals("Documents arrived in sorted order within and between pages", sortedFoundDocIds, foundDocIds); + assertEquals( + "Should have found last doc id eventually", + ids.get(ids.size() - 1), + foundDocIds.get(foundDocIds.size() - 1)); + assertEquals( + "Documents arrived in sorted order within and between pages", + sortedFoundDocIds, + foundDocIds); assertTrue("Should have experienced at least one partialResult", partialCount > 0); } - - /** - * test that our assumptions about how caches are affected hold true - */ + /** test that our assumptions about how caches are affected hold true */ public void testCacheImpacts() throws Exception { // cursor queries can't live in the queryResultCache, but independent filters // should still be cached & reused @@ -624,46 +718,61 @@ public void testCacheImpacts() throws Exception { final Collection allFieldNames = getAllSortFieldNames(); final MetricsMap filterCacheStats = - (MetricsMap)((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.filterCache")).getGauge(); + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.filterCache")) + .getGauge(); assertNotNull(filterCacheStats); final MetricsMap queryCacheStats = - (MetricsMap)((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache")).getGauge(); + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.queryResultCache")) + .getGauge(); assertNotNull(queryCacheStats); final long preQcIn = (Long) queryCacheStats.getValue().get("inserts"); final long preFcIn = (Long) filterCacheStats.getValue().get("inserts"); final long preFcHits = (Long) filterCacheStats.getValue().get("hits"); - SentinelIntSet ids = assertFullWalkNoDups - (10, params("q", "*:*", - "rows",""+ TestUtil.nextInt(random(), 1, 11), - "fq", "-id:[1 TO 2]", - "fq", "-id:[6 TO 7]", - "fl", "id", - "sort", buildRandomSort(allFieldNames))); - + SentinelIntSet ids = + assertFullWalkNoDups( + 10, + params( + "q", "*:*", + "rows", "" + TestUtil.nextInt(random(), 1, 11), + "fq", "-id:[1 TO 2]", + "fq", "-id:[6 TO 7]", + "fl", "id", + "sort", buildRandomSort(allFieldNames))); + assertEquals(6, ids.size()); final long postQcIn = (Long) queryCacheStats.getValue().get("inserts"); final long postFcIn = (Long) filterCacheStats.getValue().get("inserts"); final long postFcHits = (Long) filterCacheStats.getValue().get("hits"); - + assertEquals("query cache inserts changed", preQcIn, postQcIn); // NOTE: use of pure negative filters clauses "*:* to be tracked in filterCache - assertEquals("filter cache did not grow correctly", 3, postFcIn-preFcIn); - assertTrue("filter cache did not have any new cache hits", 0 < postFcHits-preFcHits); - + assertEquals("filter cache did not grow correctly", 3, postFcIn - preFcIn); + assertTrue("filter cache did not have any new cache hits", 0 < postFcHits - preFcHits); } - /** randomized testing of a non-trivial number of docs using assertFullWalkNoDups - */ + /** randomized testing of a non-trivial number of docs using assertFullWalkNoDups */ public void testRandomSortsOnLargeIndex() throws Exception { final Collection allFieldNames = getAllSortFieldNames(); final int initialDocs = TestUtil.nextInt(random(), 100, 200); final int totalDocs = atLeast(500); - // start with a smallish number of documents, and test that we can do a full walk using a + // start with a smallish number of documents, and test that we can do a full walk using a // sort on *every* field in the schema... for (int i = 1; i <= initialDocs; i++) { @@ -676,20 +785,22 @@ public void testRandomSortsOnLargeIndex() throws Exception { for (String order : new String[] {" asc", " desc"}) { String sort = f + order + ("id".equals(f) ? "" : ", id" + order); String rows = "" + TestUtil.nextInt(random(), 13, 50); - final SolrParams main = params("q", "*:*", - "fl","id", - "rows",rows, - "sort",sort); + final SolrParams main = params("q", "*:*", "fl", "id", "rows", rows, "sort", sort); final SentinelIntSet ids = assertFullWalkNoDups(totalDocs, main); assertEquals(initialDocs, ids.size()); - // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated docs are first... - final SentinelIntSet elevated = assertFullWalkNoDupsElevated(wrapDefaults(params("qt", "/elevate", - "fl","id,[elevated]", - "forceElevation","true", - "elevateIds", "50,20,80"), - main), - ids); + // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated + // docs are first... + final SentinelIntSet elevated = + assertFullWalkNoDupsElevated( + wrapDefaults( + params( + "qt", "/elevate", + "fl", "id,[elevated]", + "forceElevation", "true", + "elevateIds", "50,20,80"), + main), + ids); assertTrue(elevated.exists(50)); assertTrue(elevated.exists(20)); assertTrue(elevated.exists(80)); @@ -698,7 +809,7 @@ public void testRandomSortsOnLargeIndex() throws Exception { } // now add a lot more docs, and test a handful of randomized sorts - for (int i = initialDocs+1; i <= totalDocs; i++) { + for (int i = initialDocs + 1; i <= totalDocs; i++) { SolrInputDocument doc = buildRandomDocument(i); assertU(adoc(doc)); } @@ -711,59 +822,70 @@ public void testRandomSortsOnLargeIndex() throws Exception { final String fl = random().nextBoolean() ? "id" : "id,score"; final boolean matchAll = random().nextBoolean(); final String q = matchAll ? "*:*" : buildRandomQuery(); - final SolrParams main = params("q", q, - "fl",fl, - "rows",rows, - "sort",sort); + final SolrParams main = + params( + "q", q, + "fl", fl, + "rows", rows, + "sort", sort); final SentinelIntSet ids = assertFullWalkNoDups(totalDocs, main); if (matchAll) { assertEquals(totalDocs, ids.size()); } - // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated docs are first... + // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated + // docs are first... // first we have to build a set of ids to elevate, from the set of ids known to match query... final int[] expectedElevated = pickElevations(TestUtil.nextInt(random(), 3, 33), ids); - final SentinelIntSet elevated = assertFullWalkNoDupsElevated - (wrapDefaults(params("qt", "/elevate", - "fl", fl + ",[elevated]", - // HACK: work around SOLR-15307... same results should match, just not same order - "sort", (sort.startsWith("score asc") ? "score desc, " + sort : sort), - "forceElevation","true", - "elevateIds", StringUtils.join(expectedElevated,',')), - main), - ids); + final SentinelIntSet elevated = + assertFullWalkNoDupsElevated( + wrapDefaults( + params( + "qt", + "/elevate", + "fl", + fl + ",[elevated]", + // HACK: work around SOLR-15307... same results should match, just not same + // order + "sort", + (sort.startsWith("score asc") ? "score desc, " + sort : sort), + "forceElevation", + "true", + "elevateIds", + StringUtils.join(expectedElevated, ',')), + main), + ids); for (int expected : expectedElevated) { - assertTrue(expected + " wasn't elevated even though it should have been", - elevated.exists(expected)); + assertTrue( + expected + " wasn't elevated even though it should have been", + elevated.exists(expected)); } assertEquals(expectedElevated.length, elevated.size()); } } - /** Similar to usually() but we want it to happen just as often regardless - * of test multiplier and nightly status + /** + * Similar to usually() but we want it to happen just as often regardless of test multiplier and + * nightly status */ private static boolean useField() { return 0 != TestUtil.nextInt(random(), 0, 30); } - + /** - * An immutable list of the fields in the schema that can be used for sorting, - * deterministically random order. + * An immutable list of the fields in the schema that can be used for sorting, deterministically + * random order. */ private List getAllSortFieldNames() { - return pruneAndDeterministicallySort - (h.getCore().getLatestSchema().getFields().keySet()); + return pruneAndDeterministicallySort(h.getCore().getLatestSchema().getFields().keySet()); } - /** - *

- * Given a list of field names in the schema, returns an immutable list in - * deterministically random order with the following things removed: - *

+ * Given a list of field names in the schema, returns an immutable list in deterministically + * random order with the following things removed: + * *
    - *
  • _version_ is removed
  • + *
  • _version_ is removed *
*/ public static List pruneAndDeterministicallySort(Collection raw) { @@ -777,119 +899,119 @@ public static List pruneAndDeterministicallySort(Collection raw) } Collections.sort(names); - Collections.shuffle(names,random()); + Collections.shuffle(names, random()); return Collections.unmodifiableList(names); } /** * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} - * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as long - * as a non-0 number of docs ar returned. This method records the the set of all id's - * (must be positive ints) encountered and throws an assertion failure if any id is - * encountered more than once, or if an id is encountered which is not expected, - * or if an id is [elevated] and comes "after" any ids which were not [elevated] - * + * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as + * long as a non-0 number of docs ar returned. This method records the the set of all id's (must + * be positive ints) encountered and throws an assertion failure if any id is encountered more + * than once, or if an id is encountered which is not expected, or if an id is [elevated] + * and comes "after" any ids which were not [elevated] * * @returns set of all elevated ids encountered in the walk * @see #assertFullWalkNoDups(SolrParams,Consumer) */ - public SentinelIntSet assertFullWalkNoDupsElevated(final SolrParams params, final SentinelIntSet allExpected) - throws Exception { + public SentinelIntSet assertFullWalkNoDupsElevated( + final SolrParams params, final SentinelIntSet allExpected) throws Exception { final SentinelIntSet ids = new SentinelIntSet(allExpected.size(), -1); final SentinelIntSet idsElevated = new SentinelIntSet(32, -1); - assertFullWalkNoDups(params, (doc) -> { - final int id = Integer.parseInt(doc.get("id").toString()); - final boolean elevated = Boolean.parseBoolean(doc.getOrDefault("[elevated]","false").toString()); - assertTrue(id + " is not expected to match query", - allExpected.exists(id)); - assertFalse("walk already seen: " + id, - ids.exists(id)); - if (elevated) { - assertEquals("id is elevated, but we've already seen non elevated ids: " + id, - idsElevated.size(), ids.size()); - idsElevated.put(id); - } - ids.put(id); - }); - assertEquals("total number of ids seen did not match expected", - allExpected.size(), ids.size()); - + assertFullWalkNoDups( + params, + (doc) -> { + final int id = Integer.parseInt(doc.get("id").toString()); + final boolean elevated = + Boolean.parseBoolean(doc.getOrDefault("[elevated]", "false").toString()); + assertTrue(id + " is not expected to match query", allExpected.exists(id)); + assertFalse("walk already seen: " + id, ids.exists(id)); + if (elevated) { + assertEquals( + "id is elevated, but we've already seen non elevated ids: " + id, + idsElevated.size(), + ids.size()); + idsElevated.put(id); + } + ids.put(id); + }); + assertEquals("total number of ids seen did not match expected", allExpected.size(), ids.size()); + return idsElevated; } - /** * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} - * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as long - * as a non-0 number of docs ar returned. This method records the the set of all id's - * (must be positive ints) encountered and throws an assertion failure if any id is - * encountered more than once, or if the set grows above maxSize + * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as + * long as a non-0 number of docs ar returned. This method records the the set of all id's (must + * be positive ints) encountered and throws an assertion failure if any id is encountered more + * than once, or if the set grows above maxSize * * @returns set of all ids encountered in the walk * @see #assertFullWalkNoDups(SolrParams,Consumer) */ - public SentinelIntSet assertFullWalkNoDups(int maxSize, SolrParams params) - throws Exception { + public SentinelIntSet assertFullWalkNoDups(int maxSize, SolrParams params) throws Exception { final SentinelIntSet ids = new SentinelIntSet(maxSize, -1); - assertFullWalkNoDups(params, (doc) -> { - int id = Integer.parseInt(doc.get("id").toString()); - assertFalse("walk already seen: " + id, ids.exists(id)); - ids.put(id); - assertFalse("id set bigger then max allowed ("+maxSize+"): " + ids.size(), - maxSize < ids.size()); - - }); + assertFullWalkNoDups( + params, + (doc) -> { + int id = Integer.parseInt(doc.get("id").toString()); + assertFalse("walk already seen: " + id, ids.exists(id)); + ids.put(id); + assertFalse( + "id set bigger then max allowed (" + maxSize + "): " + ids.size(), + maxSize < ids.size()); + }); return ids; } - + /** * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} - * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as long - * as a non-0 number of docs ar returned. This method does some basic validation of each response, and then - * passes each doc encountered (in order returned) to the specified Consumer, which may throw an assertion if - * there is a problem. + * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as + * long as a non-0 number of docs ar returned. This method does some basic validation of each + * response, and then passes each doc encountered (in order returned) to the specified Consumer, + * which may throw an assertion if there is a problem. */ - public void assertFullWalkNoDups(SolrParams params, Consumer> consumer) - throws Exception { - + public void assertFullWalkNoDups(SolrParams params, Consumer> consumer) + throws Exception { + String cursorMark = CURSOR_MARK_START; int docsOnThisPage = Integer.MAX_VALUE; while (0 < docsOnThisPage) { - String json = assertJQ(req(params, - CURSOR_MARK_PARAM, cursorMark)); + String json = assertJQ(req(params, CURSOR_MARK_PARAM, cursorMark)); Map rsp = (Map) fromJSONString(json); - assertTrue("response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, - rsp.containsKey(CURSOR_MARK_NEXT)); - String nextCursorMark = (String)rsp.get(CURSOR_MARK_NEXT); + assertTrue( + "response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, + rsp.containsKey(CURSOR_MARK_NEXT)); + String nextCursorMark = (String) rsp.get(CURSOR_MARK_NEXT); assertNotNull(CURSOR_MARK_NEXT + " is null", nextCursorMark); @SuppressWarnings("unchecked") - List> docs = (List>) (((Map)rsp.get("response")).get("docs")); + List> docs = + (List>) (((Map) rsp.get("response")).get("docs")); docsOnThisPage = docs.size(); if (null != params.getInt(CommonParams.ROWS)) { int rows = params.getInt(CommonParams.ROWS); - assertTrue("Too many docs on this page: " + rows + " < " + docsOnThisPage, - docsOnThisPage <= rows); + assertTrue( + "Too many docs on this page: " + rows + " < " + docsOnThisPage, docsOnThisPage <= rows); } if (0 == docsOnThisPage) { - assertEquals("no more docs, but "+CURSOR_MARK_NEXT+" isn't same", - cursorMark, nextCursorMark); + assertEquals( + "no more docs, but " + CURSOR_MARK_NEXT + " isn't same", cursorMark, nextCursorMark); } - for (Map doc : docs) { + for (Map doc : docs) { consumer.accept(doc); } cursorMark = nextCursorMark; } } - /** - * test faceting with deep paging - */ + /** test faceting with deep paging */ public void testFacetingWithRandomSorts() throws Exception { final int numDocs = TestUtil.nextInt(random(), 1000, 3000); - String[] fieldsToFacetOn = { "int", "long", "str" }; - String[] facetMethods = { "enum", "fc", "fcs" }; + String[] fieldsToFacetOn = {"int", "long", "str"}; + String[] facetMethods = {"enum", "fc", "fcs"}; for (int i = 1; i <= numDocs; i++) { SolrInputDocument doc = buildRandomDocument(i); @@ -904,31 +1026,41 @@ public void testFacetingWithRandomSorts() throws Exception { String order = 0 == TestUtil.nextInt(random(), 0, 1) ? " asc" : " desc"; String sort = f + order + (f.equals("id") ? "" : ", id" + order); String rows = "" + TestUtil.nextInt(random(), 13, 50); - String facetField = fieldsToFacetOn - [TestUtil.nextInt(random(), 0, fieldsToFacetOn.length - 1)]; - String facetMethod = facetMethods - [TestUtil.nextInt(random(), 0, facetMethods.length - 1)]; - SentinelIntSet ids = assertFullWalkNoDupsWithFacets - (numDocs, params("q", "*:*", - "fl", "id," + facetField, - "facet", "true", - "facet.field", facetField, - "facet.method", facetMethod, - "facet.missing", "true", - "facet.limit", "-1", // unlimited - "rows", rows, - "sort", sort)); + String facetField = fieldsToFacetOn[TestUtil.nextInt(random(), 0, fieldsToFacetOn.length - 1)]; + String facetMethod = facetMethods[TestUtil.nextInt(random(), 0, facetMethods.length - 1)]; + SentinelIntSet ids = + assertFullWalkNoDupsWithFacets( + numDocs, + params( + "q", + "*:*", + "fl", + "id," + facetField, + "facet", + "true", + "facet.field", + facetField, + "facet.method", + facetMethod, + "facet.missing", + "true", + "facet.limit", + "-1", // unlimited + "rows", + rows, + "sort", + sort)); assertEquals(numDocs, ids.size()); } /** * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} - * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as long - * as a non-0 number of docs ar returned. This method records the the set of all id's - * (must be positive ints) encountered and throws an assertion failure if any id is - * encountered more than once, or if the set grows above maxSize. + * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as + * long as a non-0 number of docs ar returned. This method records the the set of all id's (must + * be positive ints) encountered and throws an assertion failure if any id is encountered more + * than once, or if the set grows above maxSize. * - * Also checks that facets are the same with each page, and that they are correct. + *

Also checks that facets are the same with each page, and that they are correct. */ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams params) throws Exception { @@ -937,7 +1069,7 @@ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams par assertNotNull("facet.field param not specified", facetField); assertFalse("facet.field param contains multiple values", facetField.contains(",")); assertEquals("facet.limit param not set to -1", "-1", params.get("facet.limit")); - final Map facetCounts = new HashMap<>(); + final Map facetCounts = new HashMap<>(); SentinelIntSet ids = new SentinelIntSet(maxSize, -1); String cursorMark = CURSOR_MARK_START; int docsOnThisPage = Integer.MAX_VALUE; @@ -945,28 +1077,31 @@ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams par while (0 < docsOnThisPage) { String json = assertJQ(req(params, CURSOR_MARK_PARAM, cursorMark)); Map rsp = (Map) fromJSONString(json); - assertTrue("response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, - rsp.containsKey(CURSOR_MARK_NEXT)); - String nextCursorMark = (String)rsp.get(CURSOR_MARK_NEXT); + assertTrue( + "response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, + rsp.containsKey(CURSOR_MARK_NEXT)); + String nextCursorMark = (String) rsp.get(CURSOR_MARK_NEXT); assertNotNull(CURSOR_MARK_NEXT + " is null", nextCursorMark); @SuppressWarnings({"unchecked"}) - List> docs = (List>)(((Map)rsp.get("response")).get("docs")); + List> docs = + (List>) (((Map) rsp.get("response")).get("docs")); docsOnThisPage = docs.size(); if (null != params.getInt(CommonParams.ROWS)) { int rows = params.getInt(CommonParams.ROWS); - assertTrue("Too many docs on this page: " + rows + " < " + docsOnThisPage, - docsOnThisPage <= rows); + assertTrue( + "Too many docs on this page: " + rows + " < " + docsOnThisPage, docsOnThisPage <= rows); } if (0 == docsOnThisPage) { - assertEquals("no more docs, but "+CURSOR_MARK_NEXT+" isn't same", - cursorMark, nextCursorMark); + assertEquals( + "no more docs, but " + CURSOR_MARK_NEXT + " isn't same", cursorMark, nextCursorMark); } - for (Map doc : docs) { + for (Map doc : docs) { int id = Integer.parseInt(doc.get("id").toString()); assertFalse("walk already seen: " + id, ids.exists(id)); ids.put(id); - assertFalse("id set bigger then max allowed ("+maxSize+"): " + ids.size(), - maxSize < ids.size()); + assertFalse( + "id set bigger then max allowed (" + maxSize + "): " + ids.size(), + maxSize < ids.size()); Object facet = doc.get(facetField); String facetString = null == facet ? null : facet.toString(); // null: missing facet value MutableValueInt count = facetCounts.get(facetString); @@ -978,14 +1113,16 @@ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams par } cursorMark = nextCursorMark; - Map facetFields = (Map)((Map)rsp.get("facet_counts")).get("facet_fields"); - List facets = (List)facetFields.get(facetField); + Map facetFields = (Map) ((Map) rsp.get("facet_counts")).get("facet_fields"); + List facets = (List) facetFields.get(facetField); if (null != previousFacets) { - assertEquals("Facets not the same as on previous page:\nprevious page facets: " - + Arrays.toString(facets.toArray(new Object[facets.size()])) - + "\ncurrent page facets: " - + Arrays.toString(previousFacets.toArray(new Object[previousFacets.size()])), - previousFacets, facets); + assertEquals( + "Facets not the same as on previous page:\nprevious page facets: " + + Arrays.toString(facets.toArray(new Object[facets.size()])) + + "\ncurrent page facets: " + + Arrays.toString(previousFacets.toArray(new Object[previousFacets.size()])), + previousFacets, + facets); } previousFacets = facets; } @@ -993,47 +1130,51 @@ public SentinelIntSet assertFullWalkNoDupsWithFacets(int maxSize, SolrParams par assertNotNull("previousFacets is null", previousFacets); assertEquals("Mismatch in number of facets: ", facetCounts.size(), previousFacets.size() / 2); int pos; - for (pos = 0 ; pos < previousFacets.size() ; pos += 2) { - String label = (String)previousFacets.get(pos); - int expectedCount = ((Number)previousFacets.get(pos + 1)).intValue(); + for (pos = 0; pos < previousFacets.size(); pos += 2) { + String label = (String) previousFacets.get(pos); + int expectedCount = ((Number) previousFacets.get(pos + 1)).intValue(); MutableValueInt count = facetCounts.get(label); assertNotNull("Expected facet label #" + (pos / 2) + " not found: '" + label + "'", count); - assertEquals("Facet count mismatch for label #" + (pos / 2) + " '" + label + "'", expectedCount, - facetCounts.get(label).value); + assertEquals( + "Facet count mismatch for label #" + (pos / 2) + " '" + label + "'", + expectedCount, + facetCounts.get(label).value); pos += 2; } return ids; } /** - * Asserts that the query matches the specified JSON patterns and then returns the - * {@link CursorMarkParams#CURSOR_MARK_NEXT} value from the response + * Asserts that the query matches the specified JSON patterns and then returns the {@link + * CursorMarkParams#CURSOR_MARK_NEXT} value from the response * * @see #assertJQ */ public String assertCursor(SolrQueryRequest req, String... tests) throws Exception { String json = assertJQ(req, tests); Map rsp = (Map) fromJSONString(json); - assertTrue("response doesn't contain "+CURSOR_MARK_NEXT + ": " + json, - rsp.containsKey(CURSOR_MARK_NEXT)); - String next = (String)rsp.get(CURSOR_MARK_NEXT); + assertTrue( + "response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, + rsp.containsKey(CURSOR_MARK_NEXT)); + String next = (String) rsp.get(CURSOR_MARK_NEXT); assertNotNull(CURSOR_MARK_NEXT + " is null", next); return next; } - /** - * execute a local request, verify that we get an expected error - */ - public void assertFail(SolrParams p, ErrorCode expCode, String expSubstr) - throws Exception { + /** execute a local request, verify that we get an expected error */ + public void assertFail(SolrParams p, ErrorCode expCode, String expSubstr) throws Exception { try { - SolrException e = expectThrows(SolrException.class, () -> { - ignoreException(expSubstr); - assertJQ(req(p)); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + ignoreException(expSubstr); + assertJQ(req(p)); + }); assertEquals(expCode.code, e.code()); - assertTrue("Expected substr not found: " + expSubstr + " numericFields = Arrays.asList("int","long","float","double"); + List numericFields = Arrays.asList("int", "long", "float", "double"); Collections.shuffle(numericFields, random()); if (random().nextBoolean()) { // simple function query across one field. @@ -1112,14 +1256,20 @@ public static String buildRandomQuery() { // several SHOULD clauses on range queries int low = TestUtil.nextInt(random(), -2379, 2); int high = TestUtil.nextInt(random(), 4, 5713); - return - numericFields.get(0) + ":[* TO 0] " + - numericFields.get(1) + ":[0 TO *] " + - numericFields.get(2) + ":[" + low + " TO " + high + "]"; + return numericFields.get(0) + + ":[* TO 0] " + + numericFields.get(1) + + ":[0 TO *] " + + numericFields.get(2) + + ":[" + + low + + " TO " + + high + + "]"; } } - private static final String[] currencies = { "USD", "EUR", "NOK" }; + private static final String[] currencies = {"USD", "EUR", "NOK"}; public static String randomCurrency() { return currencies[random().nextInt(currencies.length)]; @@ -1130,8 +1280,8 @@ private static String randomEnumValue() { } /** - * Given a list of fieldNames, builds up a random sort string which is guaranteed to - * have at least 3 clauses, ending with the "id" field for tie breaking + * Given a list of fieldNames, builds up a random sort string which is guaranteed to have at least + * 3 clauses, ending with the "id" field for tie breaking */ public static String buildRandomSort(final Collection fieldNames) { @@ -1146,11 +1296,13 @@ public static String buildRandomSort(final Collection fieldNames) { String field = shuffledNames.get(i); // wrap in a function sometimes - if ( ! "score".equals(field) && 0 == TestUtil.nextInt(random(), 0, 7)) { + if (!"score".equals(field) && 0 == TestUtil.nextInt(random(), 0, 7)) { // specific function doesn't matter, just proving that we can handle the concept. // but we do have to be careful with non numeric fields - if (field.contains("float") || field.contains("double") - || field.contains("int") || field.contains("long")) { + if (field.contains("float") + || field.contains("double") + || field.contains("int") + || field.contains("long")) { field = "abs(" + field + ")"; } else { field = "if(exists(" + field + "),47,83)"; @@ -1162,9 +1314,7 @@ public static String buildRandomSort(final Collection fieldNames) { return result.toString(); } - /** - * Given a set of id, picks some, semi-randomly, to use for elevation - */ + /** Given a set of id, picks some, semi-randomly, to use for elevation */ public static int[] pickElevations(final int numToElevate, final SentinelIntSet ids) { assert numToElevate < ids.size(); final int[] results = new int[numToElevate]; @@ -1188,5 +1338,4 @@ public static int[] pickElevations(final int numToElevate, final SentinelIntSet } return results; } - } diff --git a/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java index dbd4c64c46a..3c16be19704 100644 --- a/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/DisMaxRequestHandlerTest.java @@ -16,54 +16,90 @@ */ package org.apache.solr; +import java.util.regex.Pattern; import org.apache.solr.common.params.CommonParams; import org.junit.BeforeClass; import org.junit.Test; -import java.util.regex.Pattern; - -/** - * Tests some basic functionality of the DisMaxRequestHandler - */ +/** Tests some basic functionality of the DisMaxRequestHandler */ public class DisMaxRequestHandlerTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); - lrf = h.getRequestFactory - ("/dismax", 0, 20, - CommonParams.VERSION,"2.2", - "facet", "true", - "facet.field","t_s" - ); - /** Add some documents to the index */ - assertNull(h.validateUpdate(adoc("id", "666", - "features_t", "cool and scary stuff", - "subject", "traveling in hell", - "t_s", "movie", - "title", "The Omen", - "weight", "87.9", - "iind", "666"))); - assertNull(h.validateUpdate(adoc("id", "42", - "features_t", "cool stuff", - "subject", "traveling the galaxy", - "t_s", "movie", "t_s", "book", - "title", "Hitch Hiker's Guide to the Galaxy", - "weight", "99.45", - "iind", "42"))); - assertNull(h.validateUpdate(adoc("id", "1", - "features_t", "nothing", - "subject", "garbage", - "t_s", "book", - "title", "Most Boring Guide Ever", - "weight", "77", - "iind", "4"))); - assertNull(h.validateUpdate(adoc("id", "8675309", - "features_t", "Wikedly memorable chorus and stuff", - "subject", "One Cool Hot Chick", - "t_s", "song", - "title", "Jenny", - "weight", "97.3", - "iind", "8675309"))); + initCore("solrconfig.xml", "schema.xml"); + lrf = + h.getRequestFactory( + "/dismax", 0, 20, CommonParams.VERSION, "2.2", "facet", "true", "facet.field", "t_s"); + /** Add some documents to the index */ + assertNull( + h.validateUpdate( + adoc( + "id", + "666", + "features_t", + "cool and scary stuff", + "subject", + "traveling in hell", + "t_s", + "movie", + "title", + "The Omen", + "weight", + "87.9", + "iind", + "666"))); + assertNull( + h.validateUpdate( + adoc( + "id", + "42", + "features_t", + "cool stuff", + "subject", + "traveling the galaxy", + "t_s", + "movie", + "t_s", + "book", + "title", + "Hitch Hiker's Guide to the Galaxy", + "weight", + "99.45", + "iind", + "42"))); + assertNull( + h.validateUpdate( + adoc( + "id", + "1", + "features_t", + "nothing", + "subject", + "garbage", + "t_s", + "book", + "title", + "Most Boring Guide Ever", + "weight", + "77", + "iind", + "4"))); + assertNull( + h.validateUpdate( + adoc( + "id", + "8675309", + "features_t", + "Wikedly memorable chorus and stuff", + "subject", + "One Cool Hot Chick", + "t_s", + "song", + "title", + "Jenny", + "weight", + "97.3", + "iind", + "8675309"))); assertNull(h.validateUpdate(commit())); } @@ -71,124 +107,130 @@ public static void beforeClass() throws Exception { public void testSomeStuff() throws Exception { doTestSomeStuff("/dismax"); } + public void doTestSomeStuff(final String qt) throws Exception { - assertQ("basic match", - req("guide") - ,"//*[@numFound='2']" - ,"//lst[@name='facet_fields']/lst[@name='t_s']" - ,"*[count(//lst[@name='t_s']/int)=3]" - ,"//lst[@name='t_s']/int[@name='book'][.='2']" - ,"//lst[@name='t_s']/int[@name='movie'][.='1']" - ); - - assertQ("basic cross field matching, boost on same field matching", - req("cool stuff") - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='42']" - ,"//result/doc[2]/str[@name='id'][.='8675309']" - ,"//result/doc[3]/str[@name='id'][.='666']" - ); - - assertQ("multi qf", - req("q", "cool" - ,"qt", qt - ,CommonParams.VERSION, "2.2" - ,"qf", "subject" - ,"qf", "features_t" - ) - ,"//*[@numFound='3']" - ); - - assertQ("multi qf as local params", - req("q", "{!dismax qf=subject qf=features_t}cool") - ,"//*[@numFound='3']" - ); - - assertQ("boost query", - req("q", "cool stuff" - ,"qt", qt - ,CommonParams.VERSION, "2.2" - ,"bq", "subject:hell^400" - ) - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='666']" - ,"//result/doc[2]/str[@name='id'][.='42']" - ,"//result/doc[3]/str[@name='id'][.='8675309']" - ); - - assertQ("multi boost query", - req("q", "cool stuff" - ,"qt", qt - ,CommonParams.VERSION, "2.2" - ,"bq", "subject:hell^400" - ,"bq", "subject:cool^4" - , CommonParams.DEBUG_QUERY, "true" - ) - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='666']" - ,"//result/doc[2]/str[@name='id'][.='8675309']" - ,"//result/doc[3]/str[@name='id'][.='42']" - ); - - assertQ("minimum mm is three", - req("cool stuff traveling") - ,"//*[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][. ='42']" - ,"//result/doc[2]/str[@name='id'][. ='666']" - ); - - assertQ("at 4 mm allows one missing ", - req("cool stuff traveling jenny") - ,"//*[@numFound='3']" - ); - - assertQ("relying on ALTQ from config", - req( "qt", qt, - "fq", "id:666", - "facet", "false" ) - ,"//*[@numFound='1']" - ); - - assertQ("explicit ALTQ", - req( "qt", qt, - "q.alt", "id:9999", - "fq", "id:666", - "facet", "false" ) - ,"//*[@numFound='0']" - ); - - assertQ("no query slop == no match", - req( "qt", qt, - "q", "\"cool chick\"" ) - ,"//*[@numFound='0']" - ); - assertQ("query slop == match", - req( "qt", qt, - "qs", "2", - "q", "\"cool chick\"" ) - ,"//*[@numFound='1']" - ); + assertQ( + "basic match", + req("guide"), + "//*[@numFound='2']", + "//lst[@name='facet_fields']/lst[@name='t_s']", + "*[count(//lst[@name='t_s']/int)=3]", + "//lst[@name='t_s']/int[@name='book'][.='2']", + "//lst[@name='t_s']/int[@name='movie'][.='1']"); + + assertQ( + "basic cross field matching, boost on same field matching", + req("cool stuff"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='42']", + "//result/doc[2]/str[@name='id'][.='8675309']", + "//result/doc[3]/str[@name='id'][.='666']"); + + assertQ( + "multi qf", + req( + "q", + "cool", + "qt", + qt, + CommonParams.VERSION, + "2.2", + "qf", + "subject", + "qf", + "features_t"), + "//*[@numFound='3']"); + + assertQ( + "multi qf as local params", + req("q", "{!dismax qf=subject qf=features_t}cool"), + "//*[@numFound='3']"); + assertQ( + "boost query", + req("q", "cool stuff", "qt", qt, CommonParams.VERSION, "2.2", "bq", "subject:hell^400"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='666']", + "//result/doc[2]/str[@name='id'][.='42']", + "//result/doc[3]/str[@name='id'][.='8675309']"); + + assertQ( + "multi boost query", + req( + "q", + "cool stuff", + "qt", + qt, + CommonParams.VERSION, + "2.2", + "bq", + "subject:hell^400", + "bq", + "subject:cool^4", + CommonParams.DEBUG_QUERY, + "true"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='666']", + "//result/doc[2]/str[@name='id'][.='8675309']", + "//result/doc[3]/str[@name='id'][.='42']"); + + assertQ( + "minimum mm is three", + req("cool stuff traveling"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][. ='42']", + "//result/doc[2]/str[@name='id'][. ='666']"); + + assertQ("at 4 mm allows one missing ", req("cool stuff traveling jenny"), "//*[@numFound='3']"); + + assertQ( + "relying on ALTQ from config", + req( + "qt", qt, + "fq", "id:666", + "facet", "false"), + "//*[@numFound='1']"); + + assertQ( + "explicit ALTQ", + req( + "qt", qt, + "q.alt", "id:9999", + "fq", "id:666", + "facet", "false"), + "//*[@numFound='0']"); + + assertQ( + "no query slop == no match", req("qt", qt, "q", "\"cool chick\""), "//*[@numFound='0']"); + assertQ( + "query slop == match", + req( + "qt", qt, + "qs", "2", + "q", "\"cool chick\""), + "//*[@numFound='1']"); } @Test public void testSubQueriesNotSupported() { // See org.apache.solr.search.TestSolrQueryParser.testNestedQueryModifiers() - assertQ("don't parse subqueries", - req("defType", "dismax", + assertQ( + "don't parse subqueries", + req( + "defType", "dismax", "df", "doesnotexist_s", "q", "_query_:\"{!v=$qq}\"", - "qq", "features_t:cool") - ,"//*[@numFound='0']" - ); - assertQ("don't parse subqueries", - req("defType", "dismax", + "qq", "features_t:cool"), + "//*[@numFound='0']"); + assertQ( + "don't parse subqueries", + req( + "defType", "dismax", "df", "doesnotexist_s", "q", "{!v=$qq}", - "qq", "features_t:cool") - ,"//*[@numFound='0']" - ); + "qq", "features_t:cool"), + "//*[@numFound='0']"); } @Test @@ -198,25 +240,38 @@ public void testExtraBlankBQ() throws Exception { // surrounded by ()'s in the debug output Pattern p = Pattern.compile("subject:hell\\s*subject:cool"); Pattern p_bool = Pattern.compile("\\(subject:hell\\s*subject:cool\\)"); - String resp = h.query(req("q", "cool stuff" - ,"qt", "/dismax" - ,CommonParams.VERSION, "2.2" - ,"bq", "subject:hell OR subject:cool" - ,CommonParams.DEBUG_QUERY, "true" - )); + String resp = + h.query( + req( + "q", + "cool stuff", + "qt", + "/dismax", + CommonParams.VERSION, + "2.2", + "bq", + "subject:hell OR subject:cool", + CommonParams.DEBUG_QUERY, + "true")); assertTrue(p.matcher(resp).find()); assertFalse(p_bool.matcher(resp).find()); - resp = h.query(req("q", "cool stuff" - ,"qt", "/dismax" - ,CommonParams.VERSION, "2.2" - ,"bq", "subject:hell OR subject:cool" - ,"bq","" - ,CommonParams.DEBUG_QUERY, "true" - )); + resp = + h.query( + req( + "q", + "cool stuff", + "qt", + "/dismax", + CommonParams.VERSION, + "2.2", + "bq", + "subject:hell OR subject:cool", + "bq", + "", + CommonParams.DEBUG_QUERY, + "true")); assertTrue(p.matcher(resp).find()); assertTrue(p_bool.matcher(resp).find()); - } - } diff --git a/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java b/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java index 7cbd372e015..ff52ff05368 100644 --- a/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java +++ b/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java @@ -18,7 +18,6 @@ import java.util.Arrays; import java.util.List; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrQuery; @@ -29,10 +28,10 @@ import org.junit.Test; @Slow -@SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-9182 - causes OOM") -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally -public class DistributedIntervalFacetingTest extends - BaseDistributedSearchTestCase { +@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-9182 - causes OOM") +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally +public class DistributedIntervalFacetingTest extends BaseDistributedSearchTestCase { @BeforeClass public static void beforeSuperClass() throws Exception { @@ -56,12 +55,12 @@ private void testSolrJ() throws Exception { indexr("id", "2", "test_i_dv", "2", "test_s_dv", "AAA"); indexr("id", "3", "test_i_dv", "3", "test_s_dv", "CCC"); commit(); - + QueryResponse response = controlClient.query(new SolrQuery("*:*")); assertEquals(4, response.getResults().getNumFound()); - + SolrQuery q = new SolrQuery("*:*"); - String[] intervals = new String[]{"[0,1)","[1,2)", "[2,3)", "[3,*)"}; + String[] intervals = new String[] {"[0,1)", "[1,2)", "[2,3)", "[3,*)"}; q.addIntervalFacets("test_i_dv", intervals); response = controlClient.query(q); assertEquals(1, response.getIntervalFacets().size()); @@ -72,32 +71,38 @@ private void testSolrJ() throws Exception { assertEquals(intervals[i], count.getKey()); assertEquals(1, count.getCount()); } - + q = new SolrQuery("*:*"); q.addIntervalFacets("test_i_dv", intervals); - q.addIntervalFacets("test_s_dv", new String[]{"{!key='AAA'}[AAA,AAA]", "{!key='BBB'}[BBB,BBB]", "{!key='CCC'}[CCC,CCC]"}); + q.addIntervalFacets( + "test_s_dv", + new String[] {"{!key='AAA'}[AAA,AAA]", "{!key='BBB'}[BBB,BBB]", "{!key='CCC'}[CCC,CCC]"}); response = controlClient.query(q); assertEquals(2, response.getIntervalFacets().size()); - - int stringIntervalIndex = "test_s_dv".equals(response.getIntervalFacets().get(0).getField())?0:1; - - assertEquals("test_i_dv", response.getIntervalFacets().get(1-stringIntervalIndex).getField()); + + int stringIntervalIndex = + "test_s_dv".equals(response.getIntervalFacets().get(0).getField()) ? 0 : 1; + + assertEquals("test_i_dv", response.getIntervalFacets().get(1 - stringIntervalIndex).getField()); assertEquals("test_s_dv", response.getIntervalFacets().get(stringIntervalIndex).getField()); - - for (int i = 0; i < response.getIntervalFacets().get(1-stringIntervalIndex).getIntervals().size(); i++) { - Count count = response.getIntervalFacets().get(1-stringIntervalIndex).getIntervals().get(i); + + for (int i = 0; + i < response.getIntervalFacets().get(1 - stringIntervalIndex).getIntervals().size(); + i++) { + Count count = response.getIntervalFacets().get(1 - stringIntervalIndex).getIntervals().get(i); assertEquals(intervals[i], count.getKey()); assertEquals(1, count.getCount()); } - - List stringIntervals = response.getIntervalFacets().get(stringIntervalIndex).getIntervals(); + + List stringIntervals = + response.getIntervalFacets().get(stringIntervalIndex).getIntervals(); assertEquals(3, stringIntervals.size()); assertEquals("AAA", stringIntervals.get(0).getKey()); assertEquals(2, stringIntervals.get(0).getCount()); - + assertEquals("BBB", stringIntervals.get(1).getKey()); assertEquals(1, stringIntervals.get(1).getCount()); - + assertEquals("CCC", stringIntervals.get(2).getKey()); assertEquals(1, stringIntervals.get(2).getCount()); } @@ -106,17 +111,28 @@ private void testRandom() throws Exception { // All field values will be a number between 0 and cardinality int cardinality = 1000000; // Fields to use for interval faceting - String[] fields = new String[]{"test_s_dv", "test_i_dv", "test_l_dv", "test_f_dv", "test_d_dv", - "test_ss_dv", "test_is_dv", "test_fs_dv", "test_ls_dv", "test_ds_dv"}; + String[] fields = + new String[] { + "test_s_dv", + "test_i_dv", + "test_l_dv", + "test_f_dv", + "test_d_dv", + "test_ss_dv", + "test_is_dv", + "test_fs_dv", + "test_ls_dv", + "test_ds_dv" + }; for (int i = 0; i < atLeast(500); i++) { if (random().nextInt(50) == 0) { - //have some empty docs + // have some empty docs indexr("id", String.valueOf(i)); continue; } if (random().nextInt(100) == 0 && i > 0) { - //delete some docs + // delete some docs del("id:" + String.valueOf(i - 1)); } Object[] docFields = new Object[(random().nextInt(5)) * 10 + 12]; @@ -156,30 +172,28 @@ private void testRandom() throws Exception { handle.put("timestamp", SKIPVAL); handle.put("maxScore", SKIPVAL); - for (int i = 0; i < atLeast(100); i++) { doTestQuery(cardinality, fields); } - } /** - * Executes one query using interval faceting and compares with the same query using - * facet query with the same range + * Executes one query using interval faceting and compares with the same query using facet query + * with the same range */ private void doTestQuery(int cardinality, String[] fields) throws Exception { - String[] startOptions = new String[]{"(", "["}; - String[] endOptions = new String[]{")", "]"}; + String[] startOptions = new String[] {"(", "["}; + String[] endOptions = new String[] {")", "]"}; // the query should match some documents in most cases Integer[] qRange = getRandomRange(cardinality, "id"); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("q", "id:[" + qRange[0] + " TO " + qRange[1] + "]"); params.set("facet", "true"); params.set("rows", "0"); - String field = fields[random().nextInt(fields.length)]; //choose from any of the fields + String field = fields[random().nextInt(fields.length)]; // choose from any of the fields if (random().nextBoolean()) { params.set("facet.interval", field); - } else { + } else { params.set("facet.interval", getFieldWithKey(field)); } // number of intervals @@ -187,10 +201,10 @@ private void doTestQuery(int cardinality, String[] fields) throws Exception { Integer[] interval = getRandomRange(cardinality, field); String open = startOptions[interval[0] % 2]; String close = endOptions[interval[1] % 2]; - params.add("f." + field + ".facet.interval.set", open + interval[0] + "," + interval[1] + close); + params.add( + "f." + field + ".facet.interval.set", open + interval[0] + "," + interval[1] + close); } query(params); - } private String getFieldWithKey(String field) { @@ -198,11 +212,9 @@ private String getFieldWithKey(String field) { } /** - * Returns a random range. It's guaranteed that the first - * number will be lower than the second, and both of them - * between 0 (inclusive) and max (exclusive). - * If the fieldName is "test_s_dv" or "test_ss_dv" (the - * two fields used for Strings), the comparison will be done + * Returns a random range. It's guaranteed that the first number will be lower than the second, + * and both of them between 0 (inclusive) and max (exclusive). If the fieldName is + * "test_s_dv" or "test_ss_dv" (the two fields used for Strings), the comparison will be done * alphabetically */ private Integer[] getRandomRange(int max, String fieldName) { diff --git a/solr/core/src/test/org/apache/solr/EchoParamsTest.java b/solr/core/src/test/org/apache/solr/EchoParamsTest.java index 44699e8948c..1ed909defc2 100644 --- a/solr/core/src/test/org/apache/solr/EchoParamsTest.java +++ b/solr/core/src/test/org/apache/solr/EchoParamsTest.java @@ -21,16 +21,15 @@ import org.junit.Test; /** Test SOLR-59, echo of query parameters */ - public class EchoParamsTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solr/crazy-path-to-config.xml","solr/crazy-path-to-schema.xml"); + initCore("solr/crazy-path-to-config.xml", "solr/crazy-path-to-schema.xml"); } private static final String HEADER_XPATH = "/response/lst[@name='responseHeader']"; - + @Test public void test() { defaultEchoParams(); @@ -39,42 +38,51 @@ public void test() { allEchoParams(); } - // the following test methods rely on their order, which is no longer guaranteed by Java 7, so call them directly above: - + // the following test methods rely on their order, which is no longer guaranteed by Java 7, so + // call them directly above: + private void defaultEchoParams() { lrf.args.put("wt", "xml"); - lrf.args.put(CommonParams.VERSION, "2.2"); - assertQ(req("foo"),HEADER_XPATH + "/int[@name='status']"); - assertQ(req("foo"),"not(//lst[@name='params'])"); + lrf.args.put(CommonParams.VERSION, "2.2"); + assertQ(req("foo"), HEADER_XPATH + "/int[@name='status']"); + assertQ(req("foo"), "not(//lst[@name='params'])"); } private void defaultEchoParamsDefaultVersion() { lrf.args.put("wt", "xml"); - lrf.args.remove(CommonParams.VERSION); - assertQ(req("foo"),HEADER_XPATH + "/int[@name='status']"); - assertQ(req("foo"),"not(//lst[@name='params'])"); + lrf.args.remove(CommonParams.VERSION); + assertQ(req("foo"), HEADER_XPATH + "/int[@name='status']"); + assertQ(req("foo"), "not(//lst[@name='params'])"); } private void explicitEchoParams() { lrf.args.put("wt", "xml"); lrf.args.put(CommonParams.VERSION, "2.2"); lrf.args.put("echoParams", "explicit"); - assertQ(req("foo"),HEADER_XPATH + "/int[@name='status']"); - assertQ(req("foo"),HEADER_XPATH + "/lst[@name='params']"); - assertQ(req("foo"),HEADER_XPATH + "/lst[@name='params']/str[@name='wt'][.='xml']"); + assertQ(req("foo"), HEADER_XPATH + "/int[@name='status']"); + assertQ(req("foo"), HEADER_XPATH + "/lst[@name='params']"); + assertQ(req("foo"), HEADER_XPATH + "/lst[@name='params']/str[@name='wt'][.='xml']"); } private void allEchoParams() { - lrf = h.getRequestFactory - ("/crazy_custom_qt", 0, 20, - CommonParams.VERSION,"2.2", - "wt","xml", - "echoParams", "all", - "echoHandler","true" - ); + lrf = + h.getRequestFactory( + "/crazy_custom_qt", + 0, + 20, + CommonParams.VERSION, + "2.2", + "wt", + "xml", + "echoParams", + "all", + "echoHandler", + "true"); - assertQ(req("foo"),HEADER_XPATH + "/lst[@name='params']/str[@name='fl'][.='implicit']"); - assertQ(req("foo"),HEADER_XPATH + "/str[@name='handler'][.='org.apache.solr.handler.component.SearchHandler']"); + assertQ(req("foo"), HEADER_XPATH + "/lst[@name='params']/str[@name='fl'][.='implicit']"); + assertQ( + req("foo"), + HEADER_XPATH + + "/str[@name='handler'][.='org.apache.solr.handler.component.SearchHandler']"); } - } diff --git a/solr/core/src/test/org/apache/solr/HelloWorldSolrCloudTestCase.java b/solr/core/src/test/org/apache/solr/HelloWorldSolrCloudTestCase.java index 0f4bf04bd91..270ab6cabe7 100644 --- a/solr/core/src/test/org/apache/solr/HelloWorldSolrCloudTestCase.java +++ b/solr/core/src/test/org/apache/solr/HelloWorldSolrCloudTestCase.java @@ -23,25 +23,21 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrInputDocument; - import org.junit.BeforeClass; import org.junit.Test; /** - * How to use this test class: - * #1 Run the test, e.g. - * in Eclipse 'Run As JUnit Test' or - * on the command line: ./gradlew -p solr/core test --tests HelloWorldSolrCloudTestCase - * #2 Modify the test, e.g. - * in setupCluster add further documents and then re-run the test. + * How to use this test class: #1 Run the test, e.g. in Eclipse 'Run As JUnit Test' or on the + * command line: ./gradlew -p solr/core test --tests HelloWorldSolrCloudTestCase #2 Modify the test, + * e.g. in setupCluster add further documents and then re-run the test. */ public class HelloWorldSolrCloudTestCase extends SolrCloudTestCase { - private static final String COLLECTION = "hello_world" ; + private static final String COLLECTION = "hello_world"; private static final int numShards = 3; private static final int numReplicas = 2; - private static final int nodeCount = numShards*numReplicas; + private static final int nodeCount = numShards * numReplicas; private static final String id = "id"; @@ -49,22 +45,24 @@ public class HelloWorldSolrCloudTestCase extends SolrCloudTestCase { public static void setupCluster() throws Exception { // create and configure cluster - configureCluster(nodeCount) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); + configureCluster(nodeCount).addConfig("conf", configset("cloud-dynamic")).configure(); // create an empty collection CollectionAdminRequest.createCollection(COLLECTION, "conf", numShards, numReplicas) .process(cluster.getSolrClient()); // add a document - final SolrInputDocument doc1 = sdoc(id, "1", - "title_s", "Here comes the sun", - "artist_s", "The Beatles", - "popularity_i", "123"); - new UpdateRequest() - .add(doc1) - .commit(cluster.getSolrClient(), COLLECTION); + final SolrInputDocument doc1 = + sdoc( + id, + "1", + "title_s", + "Here comes the sun", + "artist_s", + "The Beatles", + "popularity_i", + "123"); + new UpdateRequest().add(doc1).commit(cluster.getSolrClient(), COLLECTION); // add further document(s) here // TODO @@ -72,7 +70,9 @@ public static void setupCluster() throws Exception { @Test public void testHighestScoring() throws Exception { - final SolrQuery solrQuery = new SolrQuery("q", "*:*", "fl", "id,popularity_i", "sort", "popularity_i desc", "rows", "1"); + final SolrQuery solrQuery = + new SolrQuery( + "q", "*:*", "fl", "id,popularity_i", "sort", "popularity_i desc", "rows", "1"); final CloudSolrClient cloudSolrClient = cluster.getSolrClient(); final QueryResponse rsp = cloudSolrClient.query(COLLECTION, solrQuery); assertEquals(1, rsp.getResults().size()); @@ -81,12 +81,11 @@ public void testHighestScoring() throws Exception { @Test public void testLowestScoring() throws Exception { - final SolrQuery solrQuery = new SolrQuery("q", "*:*", "fl", "id,popularity_i", "sort", "popularity_i asc", "rows", "1"); + final SolrQuery solrQuery = + new SolrQuery("q", "*:*", "fl", "id,popularity_i", "sort", "popularity_i asc", "rows", "1"); final CloudSolrClient cloudSolrClient = cluster.getSolrClient(); final QueryResponse rsp = cloudSolrClient.query(COLLECTION, solrQuery); assertEquals(1, rsp.getResults().size()); assertEquals("1", rsp.getResults().get(0).getFieldValue(id)); } - } - diff --git a/solr/core/src/test/org/apache/solr/MinimalSchemaTest.java b/solr/core/src/test/org/apache/solr/MinimalSchemaTest.java index 8e0f8db6c12..a0bbd490f5f 100644 --- a/solr/core/src/test/org/apache/solr/MinimalSchemaTest.java +++ b/solr/core/src/test/org/apache/solr/MinimalSchemaTest.java @@ -16,87 +16,78 @@ */ package org.apache.solr; +import java.util.Set; import org.apache.solr.common.params.CommonParams; import org.junit.BeforeClass; import org.junit.Test; -import java.util.Set; - -/** - * A test of basic features using the minial legal solr schema. - */ +/** A test of basic features using the minial legal solr schema. */ public class MinimalSchemaTest extends SolrTestCaseJ4 { /** - * NOTE: we explicitly use the general 'solrconfig.xml' file here, in - * an attempt to test as many broad features as possible. + * NOTE: we explicitly use the general 'solrconfig.xml' file here, in an attempt to test as many + * broad features as possible. * - * Do not change this to point at some other "simpler" solrconfig.xml - * just because you want to add a new test case using solrconfig.xml, - * but your new testcase adds a feature that breaks this test. + *

Do not change this to point at some other "simpler" solrconfig.xml just because you want to + * add a new test case using solrconfig.xml, but your new testcase adds a feature that breaks this + * test. */ @BeforeClass public static void beforeClass() throws Exception { - initCore("solr/collection1/conf/solrconfig.xml","solr/collection1/conf/schema-minimal.xml"); + initCore("solr/collection1/conf/solrconfig.xml", "solr/collection1/conf/schema-minimal.xml"); - /* make sure some misguided soul doesn't inadvertently give us + /* make sure some misguided soul doesn't inadvertently give us a uniqueKey field and defeat the point of the tests */ - assertNull("UniqueKey Field isn't null", - h.getCore().getLatestSchema().getUniqueKeyField()); - - lrf.args.put(CommonParams.VERSION,"2.2"); - - assertNull("Simple assertion that adding a document works", h.validateUpdate( - adoc("id", "4055", - "subject", "Hoss", - "project", "Solr"))); - assertNull(h.validateUpdate(adoc("id", "4056", - "subject", "Yonik", - "project", "Solr"))); + assertNull("UniqueKey Field isn't null", h.getCore().getLatestSchema().getUniqueKeyField()); + + lrf.args.put(CommonParams.VERSION, "2.2"); + + assertNull( + "Simple assertion that adding a document works", + h.validateUpdate( + adoc( + "id", "4055", + "subject", "Hoss", + "project", "Solr"))); + assertNull(h.validateUpdate(adoc("id", "4056", "subject", "Yonik", "project", "Solr"))); assertNull(h.validateUpdate(commit())); assertNull(h.validateUpdate(optimize())); - } @Test public void testSimpleQueries() { - assertQ("couldn't find subject hoss", - req("subject:Hoss") - ,"//result[@numFound=1]" - ,"//str[@name='id'][.='4055']" - ); - - assertQ("couldn't find subject Yonik", - req("subject:Yonik") - ,"//result[@numFound=1]" - ,"//str[@name='id'][.='4056']" - ); + assertQ( + "couldn't find subject hoss", + req("subject:Hoss"), + "//result[@numFound=1]", + "//str[@name='id'][.='4055']"); + + assertQ( + "couldn't find subject Yonik", + req("subject:Yonik"), + "//result[@numFound=1]", + "//str[@name='id'][.='4056']"); } /** SOLR-1371 */ @Test public void testLuke() { - - assertQ("basic luke request failed", - req("qt", "/admin/luke") - ,"//int[@name='numDocs'][.='2']" - ); - assertQ("luke show schema failed", - req("qt", "/admin/luke", - "show","schema") - ,"//int[@name='numDocs'][.='2']" - ,"//null[@name='uniqueKeyField']" - ); + assertQ("basic luke request failed", req("qt", "/admin/luke"), "//int[@name='numDocs'][.='2']"); + assertQ( + "luke show schema failed", + req( + "qt", "/admin/luke", + "show", "schema"), + "//int[@name='numDocs'][.='2']", + "//null[@name='uniqueKeyField']"); } - - /** - * Iterates over all (non "/update/*") handlers in the core and hits - * them with a request (using some simple params) to verify that they - * don't generate an error against the minimal schema + /** + * Iterates over all (non "/update/*") handlers in the core and hits them with a request (using + * some simple params) to verify that they don't generate an error against the minimal schema */ @Test public void testAllConfiguredHandlers() { @@ -104,39 +95,34 @@ public void testAllConfiguredHandlers() { for (String handler : handlerNames) { try { - - if (handler.startsWith("/update") || - handler.startsWith("/admin") || - handler.startsWith("/schema") || - handler.startsWith("/config") || - handler.startsWith("/mlt") || - handler.startsWith("/export") || - handler.startsWith("/graph") || - handler.startsWith("/sql") || - handler.startsWith("/stream") || - handler.startsWith("/terms") || - handler.startsWith("/analysis/")|| - handler.startsWith("/debug/") || - handler.startsWith("/replication") || - handler.startsWith("/tasks") - ) { + if (handler.startsWith("/update") + || handler.startsWith("/admin") + || handler.startsWith("/schema") + || handler.startsWith("/config") + || handler.startsWith("/mlt") + || handler.startsWith("/export") + || handler.startsWith("/graph") + || handler.startsWith("/sql") + || handler.startsWith("/stream") + || handler.startsWith("/terms") + || handler.startsWith("/analysis/") + || handler.startsWith("/debug/") + || handler.startsWith("/replication") + || handler.startsWith("/tasks")) { continue; } - assertQ("failure w/handler: '" + handler + "'", - req("qt", handler, - // this should be fairly innocuous for any type of query - "q", "foo:bar", - "omitHeader", "false" - ) - ,"//lst[@name='responseHeader']" - ); + assertQ( + "failure w/handler: '" + handler + "'", + req( + "qt", handler, + // this should be fairly innocuous for any type of query + "q", "foo:bar", + "omitHeader", "false"), + "//lst[@name='responseHeader']"); } catch (Exception e) { - throw new RuntimeException("exception w/handler: '" + handler + "'", - e); + throw new RuntimeException("exception w/handler: '" + handler + "'", e); } } } } - - diff --git a/solr/core/src/test/org/apache/solr/OutputWriterTest.java b/solr/core/src/test/org/apache/solr/OutputWriterTest.java index ff88fe03017..6760c84b6b3 100644 --- a/solr/core/src/test/org/apache/solr/OutputWriterTest.java +++ b/solr/core/src/test/org/apache/solr/OutputWriterTest.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.io.Writer; - import org.apache.solr.common.params.CommonParams; import org.apache.solr.core.PluginBag; import org.apache.solr.request.SolrQueryRequest; @@ -27,77 +26,70 @@ import org.junit.BeforeClass; import org.junit.Test; -/** Tests the ability to configure multiple query output writers, and select those - * at query time. - * - */ +/** Tests the ability to configure multiple query output writers, and select those at query time. */ public class OutputWriterTest extends SolrTestCaseJ4 { - /** The XML string that's output for testing purposes. */ - public static final String USELESS_OUTPUT = "useless output"; - - @BeforeClass - public static void beforeClass() throws Exception { - initCore("solr/crazy-path-to-config.xml","solr/crazy-path-to-schema.xml"); - } - - - /** - * responseHeader has changed in SOLR-59, check old and new variants, - * In SOLR-2413, we removed support for the deprecated versions - */ - @Test - public void testSOLR59responseHeaderVersions() { - // default version is 2.2, with "new" responseHeader - lrf.args.remove(CommonParams.VERSION); - lrf.args.put("wt", "standard"); - assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"); - lrf.args.remove("wt"); - assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='QTime']"); - - // and explicit 2.2 works as default - //lrf.args.put("version", "2.2"); - lrf.args.put("wt", "standard"); - assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"); - lrf.args.remove("wt"); - assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='QTime']"); - } - - @Test - public void testUselessWriter() throws Exception { - lrf.args.put("wt", "useless"); - String out = h.query(req("foo")); - assertEquals(USELESS_OUTPUT, out); - } - - public void testLazy() { - PluginBag.PluginHolder qrw = h.getCore().getResponseWriters().getRegistry().get("useless"); - assertTrue("Should be a lazy class", qrw instanceof PluginBag.LazyPluginHolder); - - qrw = h.getCore().getResponseWriters().getRegistry().get("xml"); - assertTrue("Should not be a lazy class", qrw.isLoaded()); - assertTrue("Should not be a lazy class", qrw.getClass() == PluginBag.PluginHolder.class); - - } - - //////////////////////////////////////////////////////////////////////////// - /** An output writer that doesn't do anything useful. */ - - public static class UselessOutputWriter implements QueryResponseWriter { - - public UselessOutputWriter() {} - - @Override - public void write(Writer writer, SolrQueryRequest request, SolrQueryResponse response) + /** The XML string that's output for testing purposes. */ + public static final String USELESS_OUTPUT = "useless output"; + + @BeforeClass + public static void beforeClass() throws Exception { + initCore("solr/crazy-path-to-config.xml", "solr/crazy-path-to-schema.xml"); + } + + /** + * responseHeader has changed in SOLR-59, check old and new variants, In SOLR-2413, we removed + * support for the deprecated versions + */ + @Test + public void testSOLR59responseHeaderVersions() { + // default version is 2.2, with "new" responseHeader + lrf.args.remove(CommonParams.VERSION); + lrf.args.put("wt", "standard"); + assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"); + lrf.args.remove("wt"); + assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='QTime']"); + + // and explicit 2.2 works as default + // lrf.args.put("version", "2.2"); + lrf.args.put("wt", "standard"); + assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"); + lrf.args.remove("wt"); + assertQ(req("foo"), "/response/lst[@name='responseHeader']/int[@name='QTime']"); + } + + @Test + public void testUselessWriter() throws Exception { + lrf.args.put("wt", "useless"); + String out = h.query(req("foo")); + assertEquals(USELESS_OUTPUT, out); + } + + public void testLazy() { + PluginBag.PluginHolder qrw = + h.getCore().getResponseWriters().getRegistry().get("useless"); + assertTrue("Should be a lazy class", qrw instanceof PluginBag.LazyPluginHolder); + + qrw = h.getCore().getResponseWriters().getRegistry().get("xml"); + assertTrue("Should not be a lazy class", qrw.isLoaded()); + assertTrue("Should not be a lazy class", qrw.getClass() == PluginBag.PluginHolder.class); + } + + //////////////////////////////////////////////////////////////////////////// + /** An output writer that doesn't do anything useful. */ + public static class UselessOutputWriter implements QueryResponseWriter { + + public UselessOutputWriter() {} + + @Override + public void write(Writer writer, SolrQueryRequest request, SolrQueryResponse response) throws IOException { - writer.write(USELESS_OUTPUT); - } - - @Override - public String getContentType(SolrQueryRequest request, SolrQueryResponse response) { - return CONTENT_TYPE_TEXT_UTF8; - } - + writer.write(USELESS_OUTPUT); } + @Override + public String getContentType(SolrQueryRequest request, SolrQueryResponse response) { + return CONTENT_TYPE_TEXT_UTF8; + } + } } diff --git a/solr/core/src/test/org/apache/solr/SampleTest.java b/solr/core/src/test/org/apache/solr/SampleTest.java index 407a4832ff4..e542d02868a 100644 --- a/solr/core/src/test/org/apache/solr/SampleTest.java +++ b/solr/core/src/test/org/apache/solr/SampleTest.java @@ -22,99 +22,92 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * This is an example of how to write a JUnit tests for Solr using the - * SolrTestCaseJ4 - */ +/** This is an example of how to write a JUnit tests for Solr using the SolrTestCaseJ4 */ public class SampleTest extends SolrTestCaseJ4 { /** * All subclasses of SolrTestCaseJ4 should initialize the core. * - *

- * Note that different tests can use different schemas/configs by referring - * to any crazy path they want (as long as it works). - *

+ *

Note that different tests can use different schemas/configs by referring to any crazy path + * they want (as long as it works). */ @BeforeClass public static void beforeClass() throws Exception { - initCore("solr/crazy-path-to-config.xml","solr/crazy-path-to-schema.xml"); + initCore("solr/crazy-path-to-config.xml", "solr/crazy-path-to-schema.xml"); } - - /** - * Demonstration of some of the simple ways to use the base class - */ + + /** Demonstration of some of the simple ways to use the base class */ @Test public void testSimple() { - lrf.args.put(CommonParams.VERSION,"2.2"); - assertU("Simple assertion that adding a document works", - adoc("id", "4055", - "subject", "Hoss the Hoss man Hostetter")); + lrf.args.put(CommonParams.VERSION, "2.2"); + assertU( + "Simple assertion that adding a document works", + adoc( + "id", "4055", + "subject", "Hoss the Hoss man Hostetter")); /* alternate syntax, no label */ - assertU(adoc("id", "4056", - "subject", "Some Other Guy")); + assertU( + adoc( + "id", "4056", + "subject", "Some Other Guy")); assertU(commit()); assertU(optimize()); - assertQ("couldn't find subject hoss", - req("subject:Hoss") - ,"//result[@numFound=1]" - ,"//str[@name='id'][.='4055']" - ); + assertQ( + "couldn't find subject hoss", + req("subject:Hoss"), + "//result[@numFound=1]", + "//str[@name='id'][.='4055']"); } - /** - * Demonstration of some of the more complex ways to use the base class - */ + /** Demonstration of some of the more complex ways to use the base class */ @Test public void testAdvanced() throws Exception { - lrf.args.put(CommonParams.VERSION,"2.2"); - assertU("less common case, a complex addition with options", - add(doc("id", "4059", - "subject", "Who Me?"), - "overwrite", "false")); + lrf.args.put(CommonParams.VERSION, "2.2"); + assertU( + "less common case, a complex addition with options", + add( + doc( + "id", "4059", + "subject", "Who Me?"), + "overwrite", + "false")); - assertU("or just make the raw XML yourself", - "" + - doc("id", "4059", - "subject", "Who Me Again?") + ""); + assertU( + "or just make the raw XML yourself", + "" + + doc( + "id", "4059", + "subject", "Who Me Again?") + + ""); /* or really make the xml yourself */ - assertU("4055" - +"Hoss the Hoss man Hostetter" - +""); - + assertU( + "4055" + + "Hoss the Hoss man Hostetter" + + ""); + assertU(""); assertU(""); - + /* access the default LocalRequestFactory directly to make a request */ - SolrQueryRequest req = lrf.makeRequest( "subject:Hoss" ); - assertQ("couldn't find subject hoss", - req - ,"//result[@numFound=1]" - ,"//str[@name='id'][.='4055']" - ); + SolrQueryRequest req = lrf.makeRequest("subject:Hoss"); + assertQ( + "couldn't find subject hoss", req, "//result[@numFound=1]", "//str[@name='id'][.='4055']"); /* make your own LocalRequestFactory to build a request * * Note: the qt proves we are using our custom config... */ - TestHarness.LocalRequestFactory l = h.getRequestFactory - ("/crazy_custom_qt",100,200,CommonParams.VERSION,"2.2"); - assertQ("how did i find Mack Daddy? ", - l.makeRequest( "Mack Daddy" ) - ,"//result[@numFound=0]" - ); + TestHarness.LocalRequestFactory l = + h.getRequestFactory("/crazy_custom_qt", 100, 200, CommonParams.VERSION, "2.2"); + assertQ("how did i find Mack Daddy? ", l.makeRequest("Mack Daddy"), "//result[@numFound=0]"); /* you can access the harness directly as well*/ - assertNull("how did i find Mack Daddy? ", - h.validateQuery(l.makeRequest( "Mack Daddy" ) - ,"//result[@numFound=0]" - )); - + assertNull( + "how did i find Mack Daddy? ", + h.validateQuery(l.makeRequest("Mack Daddy"), "//result[@numFound=0]")); } } - - diff --git a/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java index c0a219919da..a76dfaee738 100644 --- a/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java +++ b/solr/core/src/test/org/apache/solr/SolrInfoBeanTest.java @@ -16,6 +16,12 @@ */ package org.apache.solr; +import java.io.File; +import java.net.URI; +import java.net.URL; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.List; import org.apache.lucene.util.TestUtil; import org.apache.solr.core.SolrInfoBean; import org.apache.solr.handler.admin.LukeRequestHandler; @@ -26,26 +32,17 @@ import org.apache.solr.metrics.SolrMetricsContext; import org.apache.solr.search.CaffeineCache; import org.junit.BeforeClass; -import java.io.File; -import java.net.URI; -import java.net.URL; -import java.util.ArrayList; -import java.util.Enumeration; -import java.util.List; -/** - * A simple test used to increase code coverage for some standard things... - */ -public class SolrInfoBeanTest extends SolrTestCaseJ4 -{ +/** A simple test used to increase code coverage for some standard things... */ +public class SolrInfoBeanTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } /** - * Gets a list of everything we can find in the classpath and makes sure it has - * a name, description, etc... + * Gets a list of everything we can find in the classpath and makes sure it has a name, + * description, etc... */ public void testCallMBeanInfo() throws Exception { List> classes = new ArrayList<>(); @@ -54,40 +51,42 @@ public void testCallMBeanInfo() throws Exception { classes.addAll(getClassesForPackage(LukeRequestHandler.class.getPackage().getName())); classes.addAll(getClassesForPackage(DefaultSolrHighlighter.class.getPackage().getName())); classes.addAll(getClassesForPackage(CaffeineCache.class.getPackage().getName())); - // System.out.println(classes); - + // System.out.println(classes); + int checked = 0; SolrMetricManager metricManager = h.getCoreContainer().getMetricManager(); String registry = h.getCore().getCoreMetricManager().getRegistryName(); SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registry, "foo"); String scope = TestUtil.randomSimpleString(random(), 2, 10); - for(Class clazz : classes ) { - if( SolrInfoBean.class.isAssignableFrom( clazz ) ) { + for (Class clazz : classes) { + if (SolrInfoBean.class.isAssignableFrom(clazz)) { try { SolrInfoBean info = clazz.asSubclass(SolrInfoBean.class).getConstructor().newInstance(); info.initializeMetrics(solrMetricsContext, scope); - //System.out.println( info.getClass() ); - assertNotNull( info.getClass().getCanonicalName(), info.getName() ); - assertNotNull( info.getClass().getCanonicalName(), info.getDescription() ); - assertNotNull( info.getClass().getCanonicalName(), info.getCategory() ); - - if( info instanceof CaffeineCache ) { + // System.out.println( info.getClass() ); + assertNotNull(info.getClass().getCanonicalName(), info.getName()); + assertNotNull(info.getClass().getCanonicalName(), info.getDescription()); + assertNotNull(info.getClass().getCanonicalName(), info.getCategory()); + + if (info instanceof CaffeineCache) { continue; } - - assertNotNull( info.toString() ); + + assertNotNull(info.toString()); checked++; - } - catch( ReflectiveOperationException ex ) { + } catch (ReflectiveOperationException ex) { // expected... - //System.out.println( "unable to initialize: "+clazz ); + // System.out.println( "unable to initialize: "+clazz ); } } } - assertTrue( "there are at least 10 SolrInfoBean that should be found in the classpath, found " + checked, checked > 10 ); + assertTrue( + "there are at least 10 SolrInfoBean that should be found in the classpath, found " + + checked, + checked > 10); } - + private static List> getClassesForPackage(String pckgname) throws Exception { ArrayList directories = new ArrayList<>(); ClassLoader cld = h.getCore().getResourceLoader().getClassLoader(); @@ -95,29 +94,32 @@ private static List> getClassesForPackage(String pckgname) throws Excep Enumeration resources = cld.getResources(path); while (resources.hasMoreElements()) { final URI uri = resources.nextElement().toURI(); - if (!"file".equalsIgnoreCase(uri.getScheme())) - continue; + if (!"file".equalsIgnoreCase(uri.getScheme())) continue; final File f = new File(uri); directories.add(f); } - + ArrayList> classes = new ArrayList<>(); for (File directory : directories) { if (directory.exists()) { String[] files = directory.list(); for (String file : files) { if (file.endsWith(".class")) { - String clazzName = file.substring(0, file.length() - 6); - // exclude Test classes that happen to be in these packages. - // class.ForName'ing some of them can cause trouble. - if (!clazzName.endsWith("Test") && !clazzName.startsWith("Test")) { - classes.add(Class.forName(pckgname + '.' + clazzName)); - } + String clazzName = file.substring(0, file.length() - 6); + // exclude Test classes that happen to be in these packages. + // class.ForName'ing some of them can cause trouble. + if (!clazzName.endsWith("Test") && !clazzName.startsWith("Test")) { + classes.add(Class.forName(pckgname + '.' + clazzName)); + } } } } } - assertFalse("No classes found in package '"+pckgname+"'; maybe your test classes are packaged as JAR file?", classes.isEmpty()); + assertFalse( + "No classes found in package '" + + pckgname + + "'; maybe your test classes are packaged as JAR file?", + classes.isEmpty()); return classes; } } diff --git a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4DeleteCoreTest.java b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4DeleteCoreTest.java index 8c344f9efb0..3167204bebb 100644 --- a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4DeleteCoreTest.java +++ b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4DeleteCoreTest.java @@ -21,16 +21,13 @@ public class SolrTestCaseJ4DeleteCoreTest extends SolrTestCaseJ4 { public void testDeleteCore() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); assertU(adoc("id", "1")); assertU(commit()); assertQ(req("q", "*:*"), "//*[@numFound='1']"); deleteCore(); - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); assertQ(req("q", "*:*"), "//*[@numFound='0']"); } - - - } diff --git a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java index fc995e3d4ff..d281eb7c48c 100644 --- a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java +++ b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java @@ -17,34 +17,35 @@ package org.apache.solr; import java.io.File; - import org.apache.commons.io.FileUtils; import org.apache.solr.common.params.ModifiableSolrParams; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; - public class SolrTestCaseJ4Test extends SolrTestCaseJ4 { private static String tmpSolrHome; @BeforeClass public static void beforeClass() throws Exception { - // Create a temporary directory that holds a core NOT named "collection1". Use the smallest configuration sets - // we can so we don't copy that much junk around. + // Create a temporary directory that holds a core NOT named "collection1". Use the smallest + // configuration sets we can so we don't copy that much junk around. tmpSolrHome = createTempDir().toFile().getAbsolutePath(); File subHome = new File(new File(tmpSolrHome, "core0"), "conf"); assertTrue("Failed to make subdirectory ", subHome.mkdirs()); String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; FileUtils.copyFile(new File(top, "schema-tiny.xml"), new File(subHome, "schema-tiny.xml")); - FileUtils.copyFile(new File(top, "solrconfig-minimal.xml"), new File(subHome, "solrconfig-minimal.xml")); - FileUtils.copyFile(new File(top, "solrconfig.snippet.randomindexconfig.xml"), new File(subHome, "solrconfig.snippet.randomindexconfig.xml")); + FileUtils.copyFile( + new File(top, "solrconfig-minimal.xml"), new File(subHome, "solrconfig-minimal.xml")); + FileUtils.copyFile( + new File(top, "solrconfig.snippet.randomindexconfig.xml"), + new File(subHome, "solrconfig.snippet.randomindexconfig.xml")); FileUtils.copyDirectory(new File(tmpSolrHome, "core0"), new File(tmpSolrHome, "core1")); - // Core discovery will default to the name of the dir the core.properties file is in. So if everything else is - // OK as defaults, just the _presence_ of this file is sufficient. + // Core discovery will default to the name of the dir the core.properties file is in. So if + // everything else is OK as defaults, just the _presence_ of this file is sufficient. FileUtils.touch(new File(tmpSolrHome, "core0/core.properties")); FileUtils.touch(new File(tmpSolrHome, "core1/core.properties")); @@ -54,9 +55,7 @@ public static void beforeClass() throws Exception { } @AfterClass - public static void AfterClass() throws Exception { - - } + public static void AfterClass() throws Exception {} @Test public void testCorrectCore() throws Exception { @@ -74,13 +73,16 @@ public void testParams() throws Exception { params.add("rows", "42"); assertEquals(params.toString(), params("q", "*:*", "rows", "42").toString()); - expectThrows(RuntimeException.class, () -> { - params("parameterWithoutValue"); - }); - - expectThrows(RuntimeException.class, () -> { - params("q", "*:*", "rows", "42", "parameterWithoutValue"); - }); + expectThrows( + RuntimeException.class, + () -> { + params("parameterWithoutValue"); + }); + + expectThrows( + RuntimeException.class, + () -> { + params("q", "*:*", "rows", "42", "parameterWithoutValue"); + }); } - } diff --git a/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java b/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java index d21b5dbbbb0..e819cf63de1 100644 --- a/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java +++ b/solr/core/src/test/org/apache/solr/TestCrossCoreJoin.java @@ -16,10 +16,9 @@ */ package org.apache.solr; +import com.google.common.collect.ImmutableMap; import java.io.StringWriter; import java.util.Collections; - -import com.google.common.collect.ImmutableMap; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.core.CoreContainer; @@ -44,7 +43,7 @@ public class TestCrossCoreJoin extends SolrTestCaseJ4 { public static void beforeTests() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ System.setProperty("solr.filterCache.async", "true"); -// initCore("solrconfig.xml","schema12.xml"); + // initCore("solrconfig.xml","schema12.xml"); // File testHome = createTempDir().toFile(); // FileUtils.copyDirectory(getFile("solrj/solr"), testHome); @@ -53,22 +52,106 @@ public static void beforeTests() throws Exception { fromCore = coreContainer.create("fromCore", ImmutableMap.of("configSet", "minimal")); - assertU(add(doc("id", "1", "id_s_dv", "1", "name", "john", "title", "Director", "dept_s", "Engineering"))); - assertU(add(doc("id", "2", "id_s_dv", "2", "name", "mark", "title", "VP", "dept_s", "Marketing"))); - assertU(add(doc("id", "3", "id_s_dv", "3", "name", "nancy", "title", "MTS", "dept_s", "Sales"))); - assertU(add(doc("id", "4", "id_s_dv", "4", "name", "dave", "title", "MTS", "dept_s", "Support", "dept_s", "Engineering"))); - assertU(add(doc("id", "5", "id_s_dv", "5", "name", "tina", "title", "VP", "dept_s", "Engineering"))); + assertU( + add( + doc( + "id", + "1", + "id_s_dv", + "1", + "name", + "john", + "title", + "Director", + "dept_s", + "Engineering"))); + assertU( + add(doc("id", "2", "id_s_dv", "2", "name", "mark", "title", "VP", "dept_s", "Marketing"))); + assertU( + add(doc("id", "3", "id_s_dv", "3", "name", "nancy", "title", "MTS", "dept_s", "Sales"))); + assertU( + add( + doc( + "id", + "4", + "id_s_dv", + "4", + "name", + "dave", + "title", + "MTS", + "dept_s", + "Support", + "dept_s", + "Engineering"))); + assertU( + add( + doc( + "id", + "5", + "id_s_dv", + "5", + "name", + "tina", + "title", + "VP", + "dept_s", + "Engineering"))); assertU(commit()); - update(fromCore, add(doc("id", "10", "id_s_dv", "10", "dept_id_s", "Engineering", "text", "These guys develop stuff", "cat", "dev"))); - update(fromCore, add(doc("id", "11", "id_s_dv", "11", "dept_id_s", "Marketing", "text", "These guys make you look good"))); - update(fromCore, add(doc("id", "12", "id_s_dv", "12", "dept_id_s", "Sales", "text", "These guys sell stuff"))); - update(fromCore, add(doc("id", "13", "id_s_dv", "13", "dept_id_s", "Support", "text", "These guys help customers"))); + update( + fromCore, + add( + doc( + "id", + "10", + "id_s_dv", + "10", + "dept_id_s", + "Engineering", + "text", + "These guys develop stuff", + "cat", + "dev"))); + update( + fromCore, + add( + doc( + "id", + "11", + "id_s_dv", + "11", + "dept_id_s", + "Marketing", + "text", + "These guys make you look good"))); + update( + fromCore, + add( + doc( + "id", + "12", + "id_s_dv", + "12", + "dept_id_s", + "Sales", + "text", + "These guys sell stuff"))); + update( + fromCore, + add( + doc( + "id", + "13", + "id_s_dv", + "13", + "dept_id_s", + "Support", + "text", + "These guys help customers"))); update(fromCore, commit()); - } - public static String update(SolrCore core, String xml) throws Exception { DirectSolrConnection connection = new DirectSolrConnection(core); SolrRequestHandler handler = core.getRequestHandler("/update"); @@ -86,36 +169,55 @@ public void testScoreJoin() throws Exception { } void doTestJoin(String joinPrefix) throws Exception { - assertJQ(req("q", joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", "fl", "id", - "debugQuery", random().nextBoolean() ? "true":"false") - , "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" - ); - - assertJQ(req( "qt", "/export", - "q", joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", "fl", "id_s_dv", - "sort", "id_s_dv asc", - "debugQuery", random().nextBoolean() ? "true":"false") - , "/response=={'numFound':3,'docs':[{'id_s_dv':'1'},{'id_s_dv':'4'},{'id_s_dv':'5'}]}" - ); + assertJQ( + req( + "q", + joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", + "fl", + "id", + "debugQuery", + random().nextBoolean() ? "true" : "false"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}"); + + assertJQ( + req( + "qt", + "/export", + "q", + joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", + "fl", + "id_s_dv", + "sort", + "id_s_dv asc", + "debugQuery", + random().nextBoolean() ? "true" : "false"), + "/response=={'numFound':3,'docs':[{'id_s_dv':'1'},{'id_s_dv':'4'},{'id_s_dv':'5'}]}"); assertFalse(fromCore.isClosed()); assertFalse(h.getCore().isClosed()); // find people that develop stuff - but limit via filter query to a name of "john" // this tests filters being pushed down to queries (SOLR-3062) - assertJQ(req("q", joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", "fl", "id", "fq", "name:john", - "debugQuery", random().nextBoolean() ? "true":"false") - , "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}" - ); + assertJQ( + req( + "q", + joinPrefix + " from=dept_id_s to=dept_s fromIndex=fromCore}cat:dev", + "fl", + "id", + "fq", + "name:john", + "debugQuery", + random().nextBoolean() ? "true" : "false"), + "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}"); } @Test public void testCoresAreDifferent() throws Exception { assertQEx("schema12.xml" + " has no \"cat\" field", req("cat:*"), ErrorCode.BAD_REQUEST); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(fromCore, "cat:*", "/select", 0, 100, Collections.emptyMap()); + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(fromCore, "cat:*", "/select", 0, 100, Collections.emptyMap()); final String resp = query(fromCore, req); assertTrue(resp, resp.contains("numFound=\"1\"")); assertTrue(resp, resp.contains("10")); - } public String query(SolrCore core, SolrQueryRequest req) throws Exception { @@ -123,7 +225,7 @@ public String query(SolrCore core, SolrQueryRequest req) throws Exception { if (req.getParams().get("qt") != null) { handler = req.getParams().get("qt"); } - if (req.getParams().get("wt") == null){ + if (req.getParams().get("wt") == null) { ModifiableSolrParams params = new ModifiableSolrParams(req.getParams()); params.set("wt", "xml"); req.setParams(params); diff --git a/solr/core/src/test/org/apache/solr/TestCursorMarkWithoutUniqueKey.java b/solr/core/src/test/org/apache/solr/TestCursorMarkWithoutUniqueKey.java index a98d44f3fbd..1c0638d3a9b 100644 --- a/solr/core/src/test/org/apache/solr/TestCursorMarkWithoutUniqueKey.java +++ b/solr/core/src/test/org/apache/solr/TestCursorMarkWithoutUniqueKey.java @@ -16,34 +16,36 @@ */ package org.apache.solr; -import org.apache.solr.schema.SchemaField; import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; -import org.junit.Before; +import org.apache.solr.schema.SchemaField; import org.junit.After; +import org.junit.Before; -/** - * Tests that cursor requests fail unless the IndexSchema defines a uniqueKey. - */ +/** Tests that cursor requests fail unless the IndexSchema defines a uniqueKey. */ public class TestCursorMarkWithoutUniqueKey extends SolrTestCaseJ4 { - public final static String TEST_SOLRCONFIG_NAME = "solrconfig-minimal.xml"; - public final static String TEST_SCHEMAXML_NAME = "schema-minimal.xml"; + public static final String TEST_SOLRCONFIG_NAME = "solrconfig-minimal.xml"; + public static final String TEST_SCHEMAXML_NAME = "schema-minimal.xml"; @Before public void beforeSetupCore() throws Exception { - System.setProperty("solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); - System.setProperty("solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); + System.setProperty( + "solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); + System.setProperty( + "solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); initCore(TEST_SOLRCONFIG_NAME, TEST_SCHEMAXML_NAME); SchemaField uniqueKeyField = h.getCore().getLatestSchema().getUniqueKeyField(); - assertNull("This test requires that the schema not have a uniquekey field -- someone violated that in " + TEST_SCHEMAXML_NAME, uniqueKeyField); + assertNull( + "This test requires that the schema not have a uniquekey field -- someone violated that in " + + TEST_SCHEMAXML_NAME, + uniqueKeyField); } @After public void afterDestroyCore() throws Exception { deleteCore(); } - public void test() throws Exception { @@ -51,13 +53,15 @@ public void test() throws Exception { assertU(commit()); try { - ignoreException("Cursor functionality is not available unless the IndexSchema defines a uniqueKey field"); - expectThrows(RuntimeException.class, + ignoreException( + "Cursor functionality is not available unless the IndexSchema defines a uniqueKey field"); + expectThrows( + RuntimeException.class, "No exception when querying with a cursorMark with no uniqueKey defined.", - () -> assertQ(req("q", "*:*", "sort", "fld desc", "cursorMark", CURSOR_MARK_START)) - ); + () -> assertQ(req("q", "*:*", "sort", "fld desc", "cursorMark", CURSOR_MARK_START))); } finally { - unIgnoreException("Cursor functionality is not available unless the IndexSchema defines a uniqueKey field"); + unIgnoreException( + "Cursor functionality is not available unless the IndexSchema defines a uniqueKey field"); } } } diff --git a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java index 868a67ccb89..7b726466e1d 100644 --- a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java +++ b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java @@ -16,10 +16,12 @@ */ package org.apache.solr; +import static org.hamcrest.CoreMatchers.containsString; + import java.io.IOException; import java.util.List; - import org.apache.lucene.util.LuceneTestCase.Slow; +import org.apache.solr.SolrTestCaseJ4.SuppressPointFields; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.QueryResponse; @@ -29,37 +31,34 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.SolrTestCaseJ4.SuppressPointFields; import org.junit.Test; -import static org.hamcrest.CoreMatchers.containsString; - /** - * TODO? perhaps use: - * http://docs.codehaus.org/display/JETTY/ServletTester - * rather then open a real connection? + * TODO? perhaps use: http://docs.codehaus.org/display/JETTY/ServletTester rather then open a real + * connection? * * @since solr 4.0 */ @Slow -@SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-10844") +@SuppressPointFields(bugUrl = "https://issues.apache.org/jira/browse/SOLR-10844") public class TestDistributedGrouping extends BaseDistributedSearchTestCase { public TestDistributedGrouping() { - // SOLR-10844: Even with points suppressed, this test breaks if we (randomize) docvalues="true" on trie fields?!?!?!!? - System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"false"); + // SOLR-10844: Even with points suppressed, this test breaks if we (randomize) docvalues="true" + // on trie fields?!?!?!!? + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "false"); } - - String t1="a_t"; - String i1dv="a_idv"; - String i1="a_i1"; - String s1="a_s"; + + String t1 = "a_t"; + String i1dv = "a_idv"; + String i1 = "a_i1"; + String s1 = "a_s"; String s1dv = "a_s_dvo"; String b1dv = "a_b_dvo"; String tlong = "other_tl1"; String tdate_a = "a_n_tdt1"; // use single-valued date field String tdate_b = "b_n_tdt1"; - String oddField="oddField_s1"; + String oddField = "oddField_s1"; @Test @SuppressWarnings({"unchecked"}) @@ -70,97 +69,368 @@ public void test() throws Exception { handle.clear(); handle.put("timestamp", SKIPVAL); handle.put("_version_", SKIP); - handle.put("grouped", UNORDERED); // distrib grouping doesn't guarantee order of top level group commands + // distrib grouping doesn't guarantee order of top level group commands + handle.put("grouped", UNORDERED); // Test distributed grouping with empty indices - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "hl","true","hl.fl",t1); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "stats", "true", "stats.field", i1); - query("q", "kings", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "spellcheck", "true", "spellcheck.build", "true", "qt", "spellCheckCompRH", "df", "subject"); - query("q", "*:*", "fq", s1 + ":a", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "group.truncate", "true", "facet", "true", "facet.field", t1); - - indexr(id,1, i1, 100, tlong, 100, i1dv, 100, t1,"now is the time for all good men", - tdate_a, "2010-04-20T11:00:00Z", b1dv, true, - tdate_b, "2009-08-20T11:00:00Z", s1dv, "Trillian", - "foo_f", 1.414f, "foo_b", "true", "foo_d", 1.414d); - indexr(id,2, i1, 50 , tlong, 50, i1dv, 50, t1,"to come to the aid of their country.", - tdate_a, "2010-05-02T11:00:00Z", b1dv, false, - tdate_b, "2009-11-02T11:00:00Z"); - indexr(id,3, i1, 2, tlong, 2,t1,"how now brown cow", - tdate_a, "2010-05-03T11:00:00Z"); - indexr(id,4, i1, -100 ,tlong, 101, i1dv, 101, - t1,"the quick fox jumped over the lazy dog", b1dv, true, s1dv, "Zaphod", - tdate_a, "2010-05-03T11:00:00Z", - tdate_b, "2010-05-03T11:00:00Z"); - indexr(id,5, i1, 500, tlong, 500 , i1dv, 500, - t1,"the quick fox jumped way over the lazy dog", - tdate_a, "2010-05-05T11:00:00Z"); - indexr(id,6, i1, -600, tlong, 600 , i1dv, 600, t1,"humpty dumpy sat on a wall"); - indexr(id,7, i1, 123, tlong, 123 ,i1dv, 123, t1,"humpty dumpy had a great fall"); - indexr(id,8, i1, 876, tlong, 876, - tdate_b, "2010-01-05T11:00:00Z", - t1,"all the kings horses and all the kings men"); - indexr(id,9, i1, 7, tlong, 7, i1dv, 7, t1,"couldn't put humpty together again"); - indexr(id,10, i1, 4321, tlong, 4321, i1dv, 4321, t1,"this too shall pass"); - indexr(id,11, i1, -987, tlong, 987, i1dv, 2015, - t1,"An eye for eye only ends up making the whole world blind."); - indexr(id,12, i1, 379, tlong, 379, i1dv, 379, - t1,"Great works are performed, not by strength, but by perseverance."); - - indexr(id, 14, "SubjectTerms_mfacet", new String[] {"mathematical models", "mathematical analysis"}); - indexr(id, 15, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); - indexr(id, 16, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "hl", + "true", + "hl.fl", + t1); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "facet", + "true", + "facet.field", + t1); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "stats", + "true", + "stats.field", + i1); + query( + "q", + "kings", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "spellcheck", + "true", + "spellcheck.build", + "true", + "qt", + "spellCheckCompRH", + "df", + "subject"); + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "group.truncate", + "true", + "facet", + "true", + "facet.field", + t1); + + indexr( + id, + 1, + i1, + 100, + tlong, + 100, + i1dv, + 100, + t1, + "now is the time for all good men", + tdate_a, + "2010-04-20T11:00:00Z", + b1dv, + true, + tdate_b, + "2009-08-20T11:00:00Z", + s1dv, + "Trillian", + "foo_f", + 1.414f, + "foo_b", + "true", + "foo_d", + 1.414d); + indexr( + id, + 2, + i1, + 50, + tlong, + 50, + i1dv, + 50, + t1, + "to come to the aid of their country.", + tdate_a, + "2010-05-02T11:00:00Z", + b1dv, + false, + tdate_b, + "2009-11-02T11:00:00Z"); + indexr(id, 3, i1, 2, tlong, 2, t1, "how now brown cow", tdate_a, "2010-05-03T11:00:00Z"); + indexr( + id, + 4, + i1, + -100, + tlong, + 101, + i1dv, + 101, + t1, + "the quick fox jumped over the lazy dog", + b1dv, + true, + s1dv, + "Zaphod", + tdate_a, + "2010-05-03T11:00:00Z", + tdate_b, + "2010-05-03T11:00:00Z"); + indexr( + id, + 5, + i1, + 500, + tlong, + 500, + i1dv, + 500, + t1, + "the quick fox jumped way over the lazy dog", + tdate_a, + "2010-05-05T11:00:00Z"); + indexr(id, 6, i1, -600, tlong, 600, i1dv, 600, t1, "humpty dumpy sat on a wall"); + indexr(id, 7, i1, 123, tlong, 123, i1dv, 123, t1, "humpty dumpy had a great fall"); + indexr( + id, + 8, + i1, + 876, + tlong, + 876, + tdate_b, + "2010-01-05T11:00:00Z", + t1, + "all the kings horses and all the kings men"); + indexr(id, 9, i1, 7, tlong, 7, i1dv, 7, t1, "couldn't put humpty together again"); + indexr(id, 10, i1, 4321, tlong, 4321, i1dv, 4321, t1, "this too shall pass"); + indexr( + id, + 11, + i1, + -987, + tlong, + 987, + i1dv, + 2015, + t1, + "An eye for eye only ends up making the whole world blind."); + indexr( + id, + 12, + i1, + 379, + tlong, + 379, + i1dv, + 379, + t1, + "Great works are performed, not by strength, but by perseverance."); + + indexr( + id, + 14, + "SubjectTerms_mfacet", + new String[] {"mathematical models", "mathematical analysis"}); + indexr(id, 15, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); + indexr(id, 16, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); String[] vals = new String[100]; - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { vals[i] = "test " + i; } indexr(id, 17, "SubjectTerms_mfacet", vals); indexr( - id, 18, i1, 232, tlong, 332, i1dv, 150, - t1,"no eggs on wall, lesson learned", b1dv, true, s1dv, "dent", - oddField, "odd man out" - ); + id, + 18, + i1, + 232, + tlong, + 332, + i1dv, + 150, + t1, + "no eggs on wall, lesson learned", + b1dv, + true, + s1dv, + "dent", + oddField, + "odd man out"); indexr( - id, 19, i1, 232, tlong, 432, i1dv, 300, - t1, "many eggs on wall", b1dv, false, s1dv, "dent", - oddField, "odd man in" - ); + id, + 19, + i1, + 232, + tlong, + 432, + i1dv, + 300, + t1, + "many eggs on wall", + b1dv, + false, + s1dv, + "dent", + oddField, + "odd man in"); indexr( - id, 20, i1, 232, tlong, 532, i1dv, 150, - t1, "some eggs on wall", b1dv, false, s1dv, "author", - oddField, "odd man between" - ); + id, + 20, + i1, + 232, + tlong, + 532, + i1dv, + 150, + t1, + "some eggs on wall", + b1dv, + false, + s1dv, + "author", + oddField, + "odd man between"); indexr( - id, 21, i1, 232, tlong, 632, i1dv, 120, - t1, "a few eggs on wall", b1dv, true, s1dv, "ford prefect", - oddField, "odd man under" - ); + id, + 21, + i1, + 232, + tlong, + 632, + i1dv, + 120, + t1, + "a few eggs on wall", + b1dv, + true, + s1dv, + "ford prefect", + oddField, + "odd man under"); indexr( - id, 22, i1, 232, tlong, 732, i1dv, 120, - t1, "any eggs on wall", b1dv, false, s1dv, "ford prefect", - oddField, "odd man above" - ); + id, + 22, + i1, + 232, + tlong, + 732, + i1dv, + 120, + t1, + "any eggs on wall", + b1dv, + false, + s1dv, + "ford prefect", + oddField, + "odd man above"); indexr( - id, 23, i1, 233, tlong, 734, i1dv, 120, - t1, "dirty eggs", b1dv, true, s1dv, "Marvin", - oddField, "odd eggs" - ); + id, + 23, + i1, + 233, + tlong, + 734, + i1dv, + 120, + t1, + "dirty eggs", + b1dv, + true, + s1dv, + "Marvin", + oddField, + "odd eggs"); for (int i = 100; i < 150; i++) { indexr(id, i); } - int[] values = new int[]{9999, 99999, 999999, 9999999}; + int[] values = new int[] {9999, 99999, 999999, 9999999}; for (int shard = 0; shard < clients.size(); shard++) { int groupValue = values[shard]; for (int i = 500; i < 600; i++) { - index_specific(shard, - i1, groupValue, - s1, "a", - id, i * (shard + 1), - t1, random().nextInt(7)); + index_specific( + shard, i1, groupValue, s1, "a", id, i * (shard + 1), t1, random().nextInt(7)); } } @@ -169,148 +439,857 @@ public void test() throws Exception { // test grouping // The second sort = id asc . The sorting behaviour is different in dist mode. See TopDocs#merge // The shard the result came from matters in the order if both document sortvalues are equal - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 0, "sort", i1 + " asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", "id asc, _docid_ asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", "{!func}add(" + i1 + ",5) asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "stats", "true", "stats.field", tlong); - query("q", "kings", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "spellcheck", "true", "spellcheck.build", "true", "qt", "spellCheckCompRH", "df", "subject"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "hl","true","hl.fl",t1); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "group.sort", "id desc"); - - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.offset", 5, "group.limit", -1, "sort", i1 + " asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "offset", 5, "rows", 5, "group.offset", 5, "group.limit", -1, "sort", i1 + " asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "offset", 5, "rows", 5, "sort", i1 + " asc, id asc", "group.format", "simple"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "offset", 5, "rows", 5, "sort", i1 + " asc, id asc", "group.main", "true"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.offset", 5, "group.limit", 5, "sort", i1 + " asc, id asc", "group.format", "simple", "offset", 5, "rows", 5); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.offset", 5, "group.limit", 5, "sort", i1 + " asc, id asc", "group.main", "true", "offset", 5, "rows", 5); - - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", -1, "sort", i1 + " asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", 10, "sort", i1 + " asc, id asc"); - - query("q", "*:*", "fl", "id," + i1dv, "group", "true", "group.field", i1dv, "group.limit", 10, "sort", i1 + " asc, id asc"); - - - // SOLR-4150: what if group.query has no matches, + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + 0, + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + "id asc, _docid_ asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + "{!func}add(" + i1 + ",5) asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "facet", + "true", + "facet.field", + t1); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "stats", + "true", + "stats.field", + tlong); + query( + "q", + "kings", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "spellcheck", + "true", + "spellcheck.build", + "true", + "qt", + "spellCheckCompRH", + "df", + "subject"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "facet", + "true", + "hl", + "true", + "hl.fl", + t1); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "group.sort", + "id desc"); + + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.offset", + 5, + "group.limit", + -1, + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "offset", + 5, + "rows", + 5, + "group.offset", + 5, + "group.limit", + -1, + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "offset", + 5, + "rows", + 5, + "sort", + i1 + " asc, id asc", + "group.format", + "simple"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "offset", + 5, + "rows", + 5, + "sort", + i1 + " asc, id asc", + "group.main", + "true"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.offset", + 5, + "group.limit", + 5, + "sort", + i1 + " asc, id asc", + "group.format", + "simple", + "offset", + 5, + "rows", + 5); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.offset", + 5, + "group.limit", + 5, + "sort", + i1 + " asc, id asc", + "group.main", + "true", + "offset", + 5, + "rows", + 5); + + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + -1, + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + 10, + "sort", + i1 + " asc, id asc"); + + query( + "q", + "*:*", + "fl", + "id," + i1dv, + "group", + "true", + "group.field", + i1dv, + "group.limit", + 10, + "sort", + i1 + " asc, id asc"); + + // SOLR-4150: what if group.query has no matches, // or only matches on one shard - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", - "group.query", "id:5", // single doc, so only one shard will have it - "group.limit", -1, "sort", i1 + " asc, id asc"); - query("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", - "group.query", t1 + ":this_will_never_match", - "group.limit", 10, "sort", i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.query", + "id:5", // single doc, so only one shard will have it + "group.limit", + -1, + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.query", + t1 + ":this_will_never_match", + "group.limit", + 10, + "sort", + i1 + " asc, id asc"); // SOLR-4164: main query matches nothing, or only matches on one shard - query("q", "bogus_s:nothing", // no docs match - "group", "true", - "group.query", t1 + ":this_will_never_match", - "group.field", i1, - "fl", "id", "group.limit", "2", "group.format", "simple"); - query("q", "id:5", // one doc matches, so only one shard - "rows", 100, "fl", "id," + i1, "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", - "group.field", i1, - "group.limit", 10, "sort", i1 + " asc, id asc"); + query( + "q", + "bogus_s:nothing", // no docs match + "group", + "true", + "group.query", + t1 + ":this_will_never_match", + "group.field", + i1, + "fl", + "id", + "group.limit", + "2", + "group.format", + "simple"); + query( + "q", + "id:5", // one doc matches, so only one shard + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.field", + i1, + "group.limit", + 10, + "sort", + i1 + " asc, id asc"); // SOLR-13404 - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", - "fl", "id", "group.format", "grouped", "group.limit", "2", "group.offset", "2", - "sort", i1 + " asc, id asc"); - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", - "fl", "id", "group.format", "grouped", "group.limit", "-12", - "sort", i1 + " asc, id asc"); + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "fl", + "id", + "group.format", + "grouped", + "group.limit", + "2", + "group.offset", + "2", + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "fl", + "id", + "group.format", + "grouped", + "group.limit", + "-12", + "sort", + i1 + " asc, id asc"); ignoreException("'group.offset' parameter cannot be negative"); - SolrException exception = expectThrows(SolrException.class, () -> query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.offset", "-1") - ); + SolrException exception = + expectThrows( + SolrException.class, + () -> + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.offset", + "-1")); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); - assertThat(exception.getMessage(), containsString("'group.offset' parameter cannot be negative")); + assertThat( + exception.getMessage(), containsString("'group.offset' parameter cannot be negative")); resetExceptionIgnores(); - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", "3", - "fl", "id", "group.format", "simple", "sort", i1 + " asc, id asc"); - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", - "fl", "id", "group.main", "true", "sort", i1 + " asc, id asc"); - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", "rows", "13", "start", "2", - "fl", "id", "group.main", "true", "sort", i1 + " asc, id asc"); + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + "3", + "fl", + "id", + "group.format", + "simple", + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "fl", + "id", + "group.main", + "true", + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "rows", + "13", + "start", + "2", + "fl", + "id", + "group.main", + "true", + "sort", + i1 + " asc, id asc"); // SOLR-9802 - query("q", "*:*", "group", "true", "group.field", tdate_a, "sort", i1 + " asc, id asc", "fl", "id"); + query( + "q", + "*:*", + "group", + "true", + "group.field", + tdate_a, + "sort", + i1 + " asc, id asc", + "fl", + "id"); // SOLR-3109 - query("q", t1 + ":eggs", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", tlong + " asc, id asc"); - query("q", i1 + ":232", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", tlong + " asc, id asc"); + query( + "q", + t1 + ":eggs", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + 10, + "sort", + tlong + " asc, id asc"); + query( + "q", + i1 + ":232", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + 10, + "sort", + tlong + " asc, id asc"); // SOLR-12248 - query("q", "*:*", "rows", 100, "fl", "id," + s1dv, "group", "true", "group.field", s1dv, "group.limit", -1, "sort", b1dv + " asc, id asc", "group.sort", "id desc"); - query("q", "*:*", "fl", "id," + b1dv, "group", "true", "group.field", b1dv, "group.limit", 10, "sort", s1dv + " asc, id asc"); - query("q", s1dv + ":dent", "fl", "id," + b1dv, "group", "true", "group.field", b1dv, "group.limit", 10, "sort", i1 + " asc, id asc"); - - // In order to validate this we need to make sure that during indexing that all documents of one group only occur on the same shard - query("q", "*:*", "fq", s1 + ":a", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "group.ngroups", "true"); - query("q", "*:*", "fq", s1 + ":a", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "group.truncate", "true"); - query("q", "*:*", "fq", s1 + ":a", "rows", 100, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", 10, "sort", i1 + " asc, id asc", "group.truncate", "true", "facet", "true", "facet.field", t1); - for (String gfacet : new String[] { "true", "false" }) { - for (String flimit : new String[] { "-100","-1", "1", "2", "10000" }) { - for (String foffset : new String[] { "0","1", "2", "1000" }) { - query("q", "*:*", "fq", s1+":a", - "rows", 100, "fl", "id,"+i1, "sort", i1+" asc, id asc", - "group", "true", "group.field", i1, "group.limit", 10, - "facet", "true", "facet.field", t1, "group.facet", gfacet, - "facet.limit", flimit, "facet.offset", foffset); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + s1dv, + "group", + "true", + "group.field", + s1dv, + "group.limit", + -1, + "sort", + b1dv + " asc, id asc", + "group.sort", + "id desc"); + query( + "q", + "*:*", + "fl", + "id," + b1dv, + "group", + "true", + "group.field", + b1dv, + "group.limit", + 10, + "sort", + s1dv + " asc, id asc"); + query( + "q", + s1dv + ":dent", + "fl", + "id," + b1dv, + "group", + "true", + "group.field", + b1dv, + "group.limit", + 10, + "sort", + i1 + " asc, id asc"); + + // In order to validate this we need to make sure that during indexing that all documents of one + // group only occur on the same shard + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + 10, + "sort", + i1 + " asc, id asc", + "group.ngroups", + "true"); + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + 10, + "sort", + i1 + " asc, id asc", + "group.truncate", + "true"); + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + 10, + "sort", + i1 + " asc, id asc", + "group.truncate", + "true", + "facet", + "true", + "facet.field", + t1); + for (String gfacet : new String[] {"true", "false"}) { + for (String flimit : new String[] {"-100", "-1", "1", "2", "10000"}) { + for (String foffset : new String[] {"0", "1", "2", "1000"}) { + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 100, + "fl", + "id," + i1, + "sort", + i1 + " asc, id asc", + "group", + "true", + "group.field", + i1, + "group.limit", + 10, + "facet", + "true", + "facet.field", + t1, + "group.facet", + gfacet, + "facet.limit", + flimit, + "facet.offset", + foffset); } } } // SOLR-3316 - query("q", "*:*", "fq", s1 + ":a", "rows", 0, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "facet", "true", "facet.field", t1); - query("q", "*:*", "fq", s1 + ":a", "rows", 0, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc", "group.truncate", "true", "facet", "true", "facet.field", t1); + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 0, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "facet", + "true", + "facet.field", + t1); + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 0, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc", + "group.truncate", + "true", + "facet", + "true", + "facet.field", + t1); // SOLR-3436 - query("q", "*:*", "fq", s1 + ":a", "fl", "id," + i1, "group", "true", "group.field", i1, "sort", i1 + " asc, id asc", "group.ngroups", "true"); - query("q", "*:*", "fq", s1 + ":a", "rows", 0, "fl", "id," + i1, "group", "true", "group.field", i1, "sort", i1 + " asc, id asc", "group.ngroups", "true"); + query( + "q", + "*:*", + "fq", + s1 + ":a", + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "sort", + i1 + " asc, id asc", + "group.ngroups", + "true"); + query( + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 0, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "sort", + i1 + " asc, id asc", + "group.ngroups", + "true"); // SOLR-15273: if id was renamed we need to use the new name - query("q", "*:*", "rows", 100, "fl", "aliasId:id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " asc, id asc"); + query( + "q", + "*:*", + "rows", + 100, + "fl", + "aliasId:id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " asc, id asc"); // SOLR-3960 - include a postfilter - for (String facet : new String[] { "false", "true"}) { - for (String fcache : new String[] { "", " cache=false cost=200"}) { - query("q", "*:*", "rows", 100, "fl", "id," + i1, - "group.limit", 10, "sort", i1 + " asc, id asc", - "group", "true", "group.field", i1, - "fq", "{!frange l=50 "+fcache+"}"+tlong, - "facet.field", t1, - "facet", facet - ); + for (String facet : new String[] {"false", "true"}) { + for (String fcache : new String[] {"", " cache=false cost=200"}) { + query( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group.limit", + 10, + "sort", + i1 + " asc, id asc", + "group", + "true", + "group.field", + i1, + "fq", + "{!frange l=50 " + fcache + "}" + tlong, + "facet.field", + t1, + "facet", + facet); } } // SOLR-6156: timeAllowed with rows>0 and rows==0 - for (String ngroups : new String[] { "false", "true" }) { - for (String rows : new String[] { "10", "0" }) { - simpleQuery("q", "*:*", "group", "true", "group.field", i1, "group.ngroups", ngroups, "rows", rows); - simpleQuery("q", "*:*", "group", "true", "group.field", i1, "group.ngroups", ngroups, "rows", rows, "timeAllowed", "123456"); + for (String ngroups : new String[] {"false", "true"}) { + for (String rows : new String[] {"10", "0"}) { + simpleQuery( + "q", "*:*", "group", "true", "group.field", i1, "group.ngroups", ngroups, "rows", rows); + simpleQuery( + "q", + "*:*", + "group", + "true", + "group.field", + i1, + "group.ngroups", + ngroups, + "rows", + rows, + "timeAllowed", + "123456"); } } ModifiableSolrParams params = new ModifiableSolrParams(); - Object[] q = {"q", "*:*", "fq", s1 + ":a", "rows", 1, "fl", "id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "group.ngroups", "true"}; + Object[] q = { + "q", + "*:*", + "fq", + s1 + ":a", + "rows", + 1, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "group.ngroups", + "true" + }; for (int i = 0; i < q.length; i += 2) { params.add(q[i].toString(), q[i + 1].toString()); @@ -328,131 +1307,351 @@ public void test() throws Exception { assertEquals(100 * shardsArr.length, matches); assertEquals(shardsArr.length, groupCount); - // We validate distributed grouping with scoring as first sort. - // note: this 'q' matches all docs and returns the 'id' as the score, which is unique and so our results should be deterministic. - handle.put("maxScore", SKIP);// TODO see SOLR-6612 - query("q", "{!func}id_i1", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", i1 + " desc", "group.sort", "score desc"); // SOLR-2955 - query("q", "{!func}id_i1", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", -1, "sort", "score desc, _docid_ asc, id asc"); - query("q", "{!func}id_i1", "rows", 100, "fl", "score,id," + i1, "group", "true", "group.field", i1, "group.limit", -1); - - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", "3", - "fl", "id,score", "sort", i1 + " asc, id asc"); - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", "3", - "fl", "id,score", "group.format", "simple", "sort", i1 + " asc, id asc"); - query("q", "*:*", - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", "3", - "fl", "id,score", "group.main", "true", "sort", i1 + " asc, id asc"); - - // grouping shouldn't care if there are multiple fl params, or what order the fl field names are in - variantQuery(params("q", "*:*", - "group", "true", "group.field", i1dv, "group.limit", "10", - "sort", i1 + " asc, id asc") - , params("fl", "id," + i1dv) - , params("fl", i1dv + ",id") - , params("fl", "id", "fl", i1dv) - , params("fl", i1dv, "fl", "id") - ); - variantQuery(params("q", "*:*", "rows", "100", - "group", "true", "group.field", s1dv, "group.limit", "-1", - "sort", b1dv + " asc, id asc", - "group.sort", "id desc") - , params("fl", "id," + s1dv + "," + tdate_a) - , params("fl", "id", "fl", s1dv, "fl", tdate_a) - , params("fl", tdate_a, "fl", s1dv, "fl", "id") - ); - variantQuery(params("q", "*:*", "rows", "100", - "group", "true", "group.field", s1dv, "group.limit", "-1", - "sort", b1dv + " asc, id asc", - "group.sort", "id desc") - , params("fl", s1dv + "," + tdate_a) - , params("fl", s1dv, "fl", tdate_a) - , params("fl", tdate_a, "fl", s1dv) - ); - variantQuery(params("q", "{!func}id_i1", "rows", "100", - "group", "true", "group.field", i1, "group.limit", "-1", - "sort", tlong+" asc, id desc") - , params("fl", t1 + ",score," + i1dv) - , params("fl", t1, "fl", "score", "fl", i1dv) - , params("fl", "score", "fl", t1, "fl", i1dv) - ); - + // note: this 'q' matches all docs and returns the 'id' as the score, which is unique and so our + // results should be deterministic. + handle.put("maxScore", SKIP); // TODO see SOLR-6612 + query( + "q", + "{!func}id_i1", + "rows", + 100, + "fl", + "score,id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + i1 + " desc", + "group.sort", + "score desc"); // SOLR-2955 + query( + "q", + "{!func}id_i1", + "rows", + 100, + "fl", + "score,id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + "score desc, _docid_ asc, id asc"); + query( + "q", + "{!func}id_i1", + "rows", + 100, + "fl", + "score,id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1); + + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + "3", + "fl", + "id,score", + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + "3", + "fl", + "id,score", + "group.format", + "simple", + "sort", + i1 + " asc, id asc"); + query( + "q", + "*:*", + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + "3", + "fl", + "id,score", + "group.main", + "true", + "sort", + i1 + " asc, id asc"); + + // grouping shouldn't care if there are multiple fl params, or what order the fl field names are + // in + variantQuery( + params( + "q", + "*:*", + "group", + "true", + "group.field", + i1dv, + "group.limit", + "10", + "sort", + i1 + " asc, id asc"), + params("fl", "id," + i1dv), + params("fl", i1dv + ",id"), + params("fl", "id", "fl", i1dv), + params("fl", i1dv, "fl", "id")); + variantQuery( + params( + "q", + "*:*", + "rows", + "100", + "group", + "true", + "group.field", + s1dv, + "group.limit", + "-1", + "sort", + b1dv + " asc, id asc", + "group.sort", + "id desc"), + params("fl", "id," + s1dv + "," + tdate_a), + params("fl", "id", "fl", s1dv, "fl", tdate_a), + params("fl", tdate_a, "fl", s1dv, "fl", "id")); + variantQuery( + params( + "q", + "*:*", + "rows", + "100", + "group", + "true", + "group.field", + s1dv, + "group.limit", + "-1", + "sort", + b1dv + " asc, id asc", + "group.sort", + "id desc"), + params("fl", s1dv + "," + tdate_a), + params("fl", s1dv, "fl", tdate_a), + params("fl", tdate_a, "fl", s1dv)); + variantQuery( + params( + "q", + "{!func}id_i1", + "rows", + "100", + "group", + "true", + "group.field", + i1, + "group.limit", + "-1", + "sort", + tlong + " asc, id desc"), + params("fl", t1 + ",score," + i1dv), + params("fl", t1, "fl", "score", "fl", i1dv), + params("fl", "score", "fl", t1, "fl", i1dv)); + // some explicit checks of non default sorting, and sort/group.sort with diff clauses - query("q", "{!func}id_i1", "rows", 100, "fl", tlong + ",id," + i1, "group", "true", - "group.field", i1, "group.limit", -1, - "sort", tlong+" asc, id desc"); - query("q", "{!func}id_i1", "rows", 100, "fl", tlong + ",id," + i1, "group", "true", - "group.field", i1, "group.limit", -1, - "sort", "id asc", - "group.sort", tlong+" asc, id desc"); - query("q", "{!func}id_i1", "rows", 100, "fl", tlong + ",id," + i1, "group", "true", - "group.field", i1, "group.limit", -1, - "sort", tlong+" asc, id desc", - "group.sort", "id asc"); + query( + "q", + "{!func}id_i1", + "rows", + 100, + "fl", + tlong + ",id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + tlong + " asc, id desc"); + query( + "q", + "{!func}id_i1", + "rows", + 100, + "fl", + tlong + ",id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + "id asc", + "group.sort", + tlong + " asc, id desc"); + query( + "q", + "{!func}id_i1", + "rows", + 100, + "fl", + tlong + ",id," + i1, + "group", + "true", + "group.field", + i1, + "group.limit", + -1, + "sort", + tlong + " asc, id desc", + "group.sort", + "id asc"); for (boolean withFL : new boolean[] {true, false}) { if (withFL) { - rsp = variantQuery(params("q", "{!func}id_i1", "fq", oddField+":[* TO *]", - "rows", "100", - "group", "true", "group.field", i1, "group.limit", "-1", - "sort", tlong+" asc", "group.sort", oddField+" asc") - , params("fl", tlong + ",id," + i1) - , params("fl", tlong, "fl", "id", "fl", i1) - , params("fl", "id", "fl", i1, "fl", tlong) - ); + rsp = + variantQuery( + params( + "q", + "{!func}id_i1", + "fq", + oddField + ":[* TO *]", + "rows", + "100", + "group", + "true", + "group.field", + i1, + "group.limit", + "-1", + "sort", + tlong + " asc", + "group.sort", + oddField + " asc"), + params("fl", tlong + ",id," + i1), + params("fl", tlong, "fl", "id", "fl", i1), + params("fl", "id", "fl", i1, "fl", tlong)); } else { // special check: same query, but empty fl... - rsp = query("q", "{!func}id_i1", "fq", oddField+":[* TO *]", - "rows", "100", - "group", "true", "group.field", i1, "group.limit", "-1", - "sort", tlong+" asc", "group.sort", oddField+" asc"); + rsp = + query( + "q", + "{!func}id_i1", + "fq", + oddField + ":[* TO *]", + "rows", + "100", + "group", + "true", + "group.field", + i1, + "group.limit", + "-1", + "sort", + tlong + " asc", + "group.sort", + oddField + " asc"); } nl = (NamedList) rsp.getResponse().get("grouped"); nl = (NamedList) nl.get(i1); assertEquals(rsp.toString(), 6, nl.get("matches")); - assertEquals(rsp.toString(), 2, ((List>)nl.get("groups")).size()); - nl = ((List>)nl.get("groups")).get(0); + assertEquals(rsp.toString(), 2, ((List>) nl.get("groups")).size()); + nl = ((List>) nl.get("groups")).get(0); assertEquals(rsp.toString(), 232, nl.get("groupValue")); SolrDocumentList docs = (SolrDocumentList) nl.get("doclist"); assertEquals(docs.toString(), 5, docs.getNumFound()); // assertEquals(docs.toString(), "22", docs.get(0).getFirstValue("id")); assertEquals(docs.toString(), 732L, docs.get(0).getFirstValue(tlong)); - assertEquals(docs.toString(), 232, docs.get(0).getFirstValue(i1)); + assertEquals(docs.toString(), 232, docs.get(0).getFirstValue(i1)); // assertEquals(docs.toString(), "21", docs.get(4).getFirstValue("id")); assertEquals(docs.toString(), 632L, docs.get(4).getFirstValue(tlong)); - assertEquals(docs.toString(), 232, docs.get(4).getFirstValue(i1)); + assertEquals(docs.toString(), 232, docs.get(4).getFirstValue(i1)); // if (withFL == false) { - // exact number varies based on test randomization, but there should always be at least the 8 - // explicitly indexed in these 2 docs... + // exact number varies based on test randomization, but there should always be at least the + // 8 explicitly indexed in these 2 docs... assertTrue(docs.toString(), 8 <= docs.get(0).getFieldNames().size()); assertTrue(docs.toString(), 8 <= docs.get(4).getFieldNames().size()); } } - + // grouping on boolean non-stored docValued enabled field - rsp = query("q", b1dv + ":*", "fl", "id," + b1dv, "group", "true", "group.field", - b1dv, "group.limit", 10, "sort", b1dv + " asc, id asc"); + rsp = + query( + "q", + b1dv + ":*", + "fl", + "id," + b1dv, + "group", + "true", + "group.field", + b1dv, + "group.limit", + 10, + "sort", + b1dv + " asc, id asc"); nl = (NamedList) rsp.getResponse().get("grouped"); nl = (NamedList) nl.get(b1dv); assertEquals(rsp.toString(), 9, nl.get("matches")); - assertEquals(rsp.toString(), 2, ((List>)nl.get("groups")).size()); - nl = ((List>)nl.get("groups")).get(0); + assertEquals(rsp.toString(), 2, ((List>) nl.get("groups")).size()); + nl = ((List>) nl.get("groups")).get(0); assertEquals(rsp.toString(), false, nl.get("groupValue")); SolrDocumentList docs = (SolrDocumentList) nl.get("doclist"); assertEquals(docs.toString(), 4, docs.getNumFound()); - + // Can't validate the response, but can check if no errors occur. - simpleQuery("q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", 10, "sort", i1 + " asc, id asc", CommonParams.TIME_ALLOWED, 1); - - //Debug - simpleQuery("q", "*:*", "rows", 10, "fl", "id," + i1, "group", "true", "group.field", i1, "debug", "true"); + simpleQuery( + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + 10, + "sort", + i1 + " asc, id asc", + CommonParams.TIME_ALLOWED, + 1); + + // Debug + simpleQuery( + "q", + "*:*", + "rows", + 10, + "fl", + "id," + i1, + "group", + "true", + "group.field", + i1, + "debug", + "true"); } private void simpleQuery(Object... queryParams) throws SolrServerException, IOException { @@ -465,18 +1664,19 @@ private void simpleQuery(Object... queryParams) throws SolrServerException, IOEx } /** - * Special helper method for verifying that multiple queries behave the same as each other, - * both in distributed and single node queries + * Special helper method for verifying that multiple queries behave the same as each other, both + * in distributed and single node queries * * @param commonParams params that are common to all queries - * @param variantParams params that will be appended to the common params to create a variant query + * @param variantParams params that will be appended to the common params to create a variant + * query * @return the last response returned by the last variant * @see #query * @see #compareResponses * @see SolrParams#wrapAppended */ - protected QueryResponse variantQuery(final SolrParams commonParams, - final SolrParams... variantParams) throws Exception { + protected QueryResponse variantQuery( + final SolrParams commonParams, final SolrParams... variantParams) throws Exception { QueryResponse lastResponse = null; for (SolrParams extra : variantParams) { final QueryResponse rsp = query(SolrParams.wrapAppended(commonParams, extra)); @@ -487,5 +1687,4 @@ protected QueryResponse variantQuery(final SolrParams commonParams, } return lastResponse; } - } diff --git a/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java b/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java index 416556a1f95..1b14bf89a2f 100644 --- a/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java +++ b/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java @@ -20,22 +20,20 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.junit.Test; -/** - * Tests sortMissingFirst and sortMissingLast in distributed sort - */ +/** Tests sortMissingFirst and sortMissingLast in distributed sort */ @Slow public class TestDistributedMissingSort extends BaseDistributedSearchTestCase { public TestDistributedMissingSort() { schemaString = "schema-distributed-missing-sort.xml"; } - - String sint1_ml = "one_i1_ml"; // int field, sortMissingLast=true, multiValued=false - String sint1_mf = "two_i1_mf"; // int field, sortMissingFirst=true, multiValued=false - String long1_ml = "three_l1_ml"; // long field, sortMissingLast=true, multiValued=false - String long1_mf = "four_l1_mf"; // long field, sortMissingFirst=true, multiValued=false + + String sint1_ml = "one_i1_ml"; // int field, sortMissingLast=true, multiValued=false + String sint1_mf = "two_i1_mf"; // int field, sortMissingFirst=true, multiValued=false + String long1_ml = "three_l1_ml"; // long field, sortMissingLast=true, multiValued=false + String long1_mf = "four_l1_mf"; // long field, sortMissingFirst=true, multiValued=false String string1_ml = "five_s1_ml"; // StringField, sortMissingLast=true, multiValued=false - String string1_mf = "six_s1_mf"; // StringField, sortMissingFirst=true, multiValued=false + String string1_mf = "six_s1_mf"; // StringField, sortMissingFirst=true, multiValued=false @Test public void test() throws Exception { @@ -46,47 +44,225 @@ public void test() throws Exception { private void index() throws Exception { del("*:*"); - indexr(id,1, sint1_ml, 100, sint1_mf, 100, long1_ml, 100, long1_mf, 100, - "foo_f", 1.414f, "foo_b", "true", "foo_d", 1.414d, - string1_ml, "DE", string1_mf, "DE"); - indexr(id,2, sint1_ml, 50, sint1_mf, 50, long1_ml, 50, long1_mf, 50, - string1_ml, "ABC", string1_mf, "ABC"); - indexr(id,3, sint1_ml, 2, sint1_mf, 2, long1_ml, 2, long1_mf, 2, - string1_ml, "HIJK", string1_mf, "HIJK"); - indexr(id,4, sint1_ml, -100, sint1_mf, -100, long1_ml, -101, long1_mf, -101, - string1_ml, "L M", string1_mf, "L M"); - indexr(id,5, sint1_ml, 500, sint1_mf, 500, long1_ml, 500, long1_mf, 500, - string1_ml, "YB", string1_mf, "YB"); - indexr(id,6, sint1_ml, -600, sint1_mf, -600, long1_ml, -600, long1_mf, -600, - string1_ml, "WX", string1_mf, "WX"); - indexr(id,7, sint1_ml, 123, sint1_mf, 123, long1_ml, 123, long1_mf, 123, - string1_ml, "N", string1_mf, "N"); - indexr(id,8, sint1_ml, 876, sint1_mf, 876, long1_ml, 876, long1_mf, 876, - string1_ml, "QRS", string1_mf, "QRS"); - indexr(id,9, sint1_ml, 7, sint1_mf, 7, long1_ml, 7, long1_mf, 7, - string1_ml, "P", string1_mf, "P"); - - commit(); // try to ensure there's more than one segment - - indexr(id,10, sint1_ml, 4321, sint1_mf, 4321, long1_ml, 4321, long1_mf, 4321, - string1_ml, "O", string1_mf, "O"); - indexr(id,11, sint1_ml, -987, sint1_mf, -987, long1_ml, -987, long1_mf, -987, - string1_ml, "YA", string1_mf, "YA"); - indexr(id,12, sint1_ml, 379, sint1_mf, 379, long1_ml, 379, long1_mf, 379, - string1_ml, "TUV", string1_mf, "TUV"); - indexr(id,13, sint1_ml, 232, sint1_mf, 232, long1_ml, 232, long1_mf, 232, - string1_ml, "F G", string1_mf, "F G"); - - indexr(id, 14, "SubjectTerms_mfacet", new String[] {"mathematical models", "mathematical analysis"}); - indexr(id, 15, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); - indexr(id, 16, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); + indexr( + id, + 1, + sint1_ml, + 100, + sint1_mf, + 100, + long1_ml, + 100, + long1_mf, + 100, + "foo_f", + 1.414f, + "foo_b", + "true", + "foo_d", + 1.414d, + string1_ml, + "DE", + string1_mf, + "DE"); + indexr( + id, + 2, + sint1_ml, + 50, + sint1_mf, + 50, + long1_ml, + 50, + long1_mf, + 50, + string1_ml, + "ABC", + string1_mf, + "ABC"); + indexr( + id, + 3, + sint1_ml, + 2, + sint1_mf, + 2, + long1_ml, + 2, + long1_mf, + 2, + string1_ml, + "HIJK", + string1_mf, + "HIJK"); + indexr( + id, + 4, + sint1_ml, + -100, + sint1_mf, + -100, + long1_ml, + -101, + long1_mf, + -101, + string1_ml, + "L M", + string1_mf, + "L M"); + indexr( + id, + 5, + sint1_ml, + 500, + sint1_mf, + 500, + long1_ml, + 500, + long1_mf, + 500, + string1_ml, + "YB", + string1_mf, + "YB"); + indexr( + id, + 6, + sint1_ml, + -600, + sint1_mf, + -600, + long1_ml, + -600, + long1_mf, + -600, + string1_ml, + "WX", + string1_mf, + "WX"); + indexr( + id, + 7, + sint1_ml, + 123, + sint1_mf, + 123, + long1_ml, + 123, + long1_mf, + 123, + string1_ml, + "N", + string1_mf, + "N"); + indexr( + id, + 8, + sint1_ml, + 876, + sint1_mf, + 876, + long1_ml, + 876, + long1_mf, + 876, + string1_ml, + "QRS", + string1_mf, + "QRS"); + indexr( + id, + 9, + sint1_ml, + 7, + sint1_mf, + 7, + long1_ml, + 7, + long1_mf, + 7, + string1_ml, + "P", + string1_mf, + "P"); + + commit(); // try to ensure there's more than one segment + + indexr( + id, + 10, + sint1_ml, + 4321, + sint1_mf, + 4321, + long1_ml, + 4321, + long1_mf, + 4321, + string1_ml, + "O", + string1_mf, + "O"); + indexr( + id, + 11, + sint1_ml, + -987, + sint1_mf, + -987, + long1_ml, + -987, + long1_mf, + -987, + string1_ml, + "YA", + string1_mf, + "YA"); + indexr( + id, + 12, + sint1_ml, + 379, + sint1_mf, + 379, + long1_ml, + 379, + long1_mf, + 379, + string1_ml, + "TUV", + string1_mf, + "TUV"); + indexr( + id, + 13, + sint1_ml, + 232, + sint1_mf, + 232, + long1_ml, + 232, + long1_mf, + 232, + string1_ml, + "F G", + string1_mf, + "F G"); + + indexr( + id, + 14, + "SubjectTerms_mfacet", + new String[] {"mathematical models", "mathematical analysis"}); + indexr(id, 15, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); + indexr(id, 16, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); String[] vals = new String[100]; - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { vals[i] = "test " + i; } indexr(id, 17, "SubjectTerms_mfacet", vals); - for (int i=100; i<150; i++) { + for (int i = 100; i < 150; i++) { indexr(id, i); } @@ -96,183 +272,948 @@ private void index() throws Exception { handle.put("timestamp", SKIPVAL); handle.put("_version_", SKIPVAL); // not a cloud test, but may use updateLog } - + private void testSortMissingLast() throws Exception { - // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 13 - // sint1_ml field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 232 - // sint1_ml asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 9 - // sint1_ml desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 5 + // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 + // 13 + // sint1_ml field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 + // 232 + // sint1_ml asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 + // 9 + // sint1_ml desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 + // 5 - QueryResponse rsp = query("q","*:*", "sort", sint1_ml + " desc", "rows", "13"); + QueryResponse rsp = query("q", "*:*", "sort", sint1_ml + " desc", "rows", "13"); assertFieldValues(rsp.getResults(), "id_i", 10, 8, 5, 12, 13, 7, 1, 2, 9, 3, 4, 6, 11); - rsp = query("q","*:*", "sort", sint1_ml + " asc", "rows", "13"); + rsp = query("q", "*:*", "sort", sint1_ml + " asc", "rows", "13"); assertFieldValues(rsp.getResults(), "id_i", 11, 6, 4, 3, 9, 2, 1, 7, 13, 12, 5, 8, 10); - rsp = query("q","*:*", "sort", sint1_ml + " desc, id_i asc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 10, 8, 5, 12, 13, 7, 1, 2, 9, 3, 4, 6, 11, - 14, 15, 16, 17, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149); - - rsp = query("q","*:*", "sort", sint1_ml + " asc, id_i desc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 11, 6, 4, 3, 9, 2, 1, 7, 13, 12, 5, 8, 10, - 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, - 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, - 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, - 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, - 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, - 17, 16, 15, 14); - - // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 13 - // long1_ml field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 232 - // long1_ml asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 9 - // long1_ml desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 5 - - rsp = query("q","*:*", "sort", long1_ml + " desc", "rows", "13"); + rsp = query("q", "*:*", "sort", sint1_ml + " desc, id_i asc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 10, + 8, + 5, + 12, + 13, + 7, + 1, + 2, + 9, + 3, + 4, + 6, + 11, + 14, + 15, + 16, + 17, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149); + + rsp = query("q", "*:*", "sort", sint1_ml + " asc, id_i desc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 11, + 6, + 4, + 3, + 9, + 2, + 1, + 7, + 13, + 12, + 5, + 8, + 10, + 149, + 148, + 147, + 146, + 145, + 144, + 143, + 142, + 141, + 140, + 139, + 138, + 137, + 136, + 135, + 134, + 133, + 132, + 131, + 130, + 129, + 128, + 127, + 126, + 125, + 124, + 123, + 122, + 121, + 120, + 119, + 118, + 117, + 116, + 115, + 114, + 113, + 112, + 111, + 110, + 109, + 108, + 107, + 106, + 105, + 104, + 103, + 102, + 101, + 100, + 17, + 16, + 15, + 14); + + // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 + // 13 + // long1_ml field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 + // 232 + // long1_ml asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 + // 9 + // long1_ml desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 + // 5 + + rsp = query("q", "*:*", "sort", long1_ml + " desc", "rows", "13"); assertFieldValues(rsp.getResults(), "id_i", 10, 8, 5, 12, 13, 7, 1, 2, 9, 3, 4, 6, 11); - rsp = query("q","*:*", "sort", long1_ml + " asc", "rows", "13"); + rsp = query("q", "*:*", "sort", long1_ml + " asc", "rows", "13"); assertFieldValues(rsp.getResults(), "id_i", 11, 6, 4, 3, 9, 2, 1, 7, 13, 12, 5, 8, 10); - rsp = query("q","*:*", "sort", long1_ml + " desc, id_i asc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 10, 8, 5, 12, 13, 7, 1, 2, 9, 3, 4, 6, 11, - 14, 15, 16, 17, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149); - - rsp = query("q","*:*", "sort", long1_ml + " asc, id_i desc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 11, 6, 4, 3, 9, 2, 1, 7, 13, 12, 5, 8, 10, - 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, - 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, - 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, - 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, - 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, - 17, 16, 15, 14); - - - // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 13 - // string1_ml field values: DE ABC HIJK L M YB WX N QRS P O YA TUV F G - // string1_ml asc sort pos: 2 1 4 5 13 11 6 9 8 7 12 10 3 - // string1_ml desc sort pos: 12 13 10 9 1 3 8 5 6 7 2 4 11 - - rsp = query("q","*:*", "sort", string1_ml + " desc", "rows", "13"); + rsp = query("q", "*:*", "sort", long1_ml + " desc, id_i asc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 10, + 8, + 5, + 12, + 13, + 7, + 1, + 2, + 9, + 3, + 4, + 6, + 11, + 14, + 15, + 16, + 17, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149); + + rsp = query("q", "*:*", "sort", long1_ml + " asc, id_i desc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 11, + 6, + 4, + 3, + 9, + 2, + 1, + 7, + 13, + 12, + 5, + 8, + 10, + 149, + 148, + 147, + 146, + 145, + 144, + 143, + 142, + 141, + 140, + 139, + 138, + 137, + 136, + 135, + 134, + 133, + 132, + 131, + 130, + 129, + 128, + 127, + 126, + 125, + 124, + 123, + 122, + 121, + 120, + 119, + 118, + 117, + 116, + 115, + 114, + 113, + 112, + 111, + 110, + 109, + 108, + 107, + 106, + 105, + 104, + 103, + 102, + 101, + 100, + 17, + 16, + 15, + 14); + + // id field values: 1 2 3 4 5 6 7 8 9 10 11 + // 12 13 + // string1_ml field values: DE ABC HIJK L M YB WX N QRS P O YA + // TUV F G + // string1_ml asc sort pos: 2 1 4 5 13 11 6 9 8 7 12 + // 10 3 + // string1_ml desc sort pos: 12 13 10 9 1 3 8 5 6 7 2 + // 4 11 + + rsp = query("q", "*:*", "sort", string1_ml + " desc", "rows", "13"); assertFieldValues(rsp.getResults(), "id_i", 5, 11, 6, 12, 8, 9, 10, 7, 4, 3, 13, 1, 2); - rsp = query("q","*:*", "sort", string1_ml + " asc", "rows", "13"); + rsp = query("q", "*:*", "sort", string1_ml + " asc", "rows", "13"); assertFieldValues(rsp.getResults(), "id_i", 2, 1, 13, 3, 4, 7, 10, 9, 8, 12, 6, 11, 5); - rsp = query("q","*:*", "sort", string1_ml + " desc, id_i asc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 5, 11, 6, 12, 8, 9, 10, 7, 4, 3, 13, 1, 2, + rsp = query("q", "*:*", "sort", string1_ml + " desc, id_i asc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 5, + 11, + 6, + 12, + 8, + 9, + 10, + 7, + 4, + 3, + 13, + 1, + 2, // missing field string1_ml="a_s1", ascending id sort - 14, 15, 16, 17, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149); - - rsp = query("q","*:*", "sort", string1_ml + " asc, id_i desc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 2, 1, 13, 3, 4, 7, 10, 9, 8, 12, 6, 11, 5, + 14, + 15, + 16, + 17, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149); + + rsp = query("q", "*:*", "sort", string1_ml + " asc, id_i desc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 2, + 1, + 13, + 3, + 4, + 7, + 10, + 9, + 8, + 12, + 6, + 11, + 5, // missing field string1_ml="a_s1", descending id sort - 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, - 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, - 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, - 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, - 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, - 17, 16, 15, 14); + 149, + 148, + 147, + 146, + 145, + 144, + 143, + 142, + 141, + 140, + 139, + 138, + 137, + 136, + 135, + 134, + 133, + 132, + 131, + 130, + 129, + 128, + 127, + 126, + 125, + 124, + 123, + 122, + 121, + 120, + 119, + 118, + 117, + 116, + 115, + 114, + 113, + 112, + 111, + 110, + 109, + 108, + 107, + 106, + 105, + 104, + 103, + 102, + 101, + 100, + 17, + 16, + 15, + 14); } - + private void testSortMissingFirst() throws Exception { - // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 13 - // sint1_mf field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 232 - // sint1_mf asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 9 - // sint1_mf desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 5 - - QueryResponse rsp = query("q","*:*", "sort", sint1_mf + " desc, id_i asc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 14, 15, 16, 17, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 10, 8, 5, 12, 13, 7, 1, 2, 9, 3, 4, 6, 11); - - rsp = query("q","*:*", "sort", sint1_mf + " asc, id_i desc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, - 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, - 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, - 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, - 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, - 17, 16, 15, 14, - 11, 6, 4, 3, 9, 2, 1, 7, 13, 12, 5, 8, 10); - - - // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 13 - // long1_mf field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 232 - // long1_mf asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 9 - // long1_mf desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 5 - - rsp = query("q","*:*", "sort", long1_mf + " desc, id_i asc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 14, 15, 16, 17, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 10, 8, 5, 12, 13, 7, 1, 2, 9, 3, 4, 6, 11); - - rsp = query("q","*:*", "sort", long1_mf + " asc, id_i desc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", - 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, - 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, - 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, - 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, - 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, - 17, 16, 15, 14, - 11, 6, 4, 3, 9, 2, 1, 7, 13, 12, 5, 8, 10); - - - // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 13 - // string1_mf field values: DE ABC HIJK L M YB WX N QRS P O YA TUV F G - // string1_mf asc sort pos: 2 1 4 5 13 11 6 9 8 7 12 10 3 - // string1_mf desc sort pos: 12 13 10 9 1 3 8 5 6 7 2 4 11 - - rsp = query("q","*:*", "sort", string1_mf + " desc, id_i asc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", + // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 + // 13 + // sint1_mf field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 + // 232 + // sint1_mf asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 + // 9 + // sint1_mf desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 + // 5 + + QueryResponse rsp = query("q", "*:*", "sort", sint1_mf + " desc, id_i asc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 14, + 15, + 16, + 17, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149, + 10, + 8, + 5, + 12, + 13, + 7, + 1, + 2, + 9, + 3, + 4, + 6, + 11); + + rsp = query("q", "*:*", "sort", sint1_mf + " asc, id_i desc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 149, + 148, + 147, + 146, + 145, + 144, + 143, + 142, + 141, + 140, + 139, + 138, + 137, + 136, + 135, + 134, + 133, + 132, + 131, + 130, + 129, + 128, + 127, + 126, + 125, + 124, + 123, + 122, + 121, + 120, + 119, + 118, + 117, + 116, + 115, + 114, + 113, + 112, + 111, + 110, + 109, + 108, + 107, + 106, + 105, + 104, + 103, + 102, + 101, + 100, + 17, + 16, + 15, + 14, + 11, + 6, + 4, + 3, + 9, + 2, + 1, + 7, + 13, + 12, + 5, + 8, + 10); + + // id field values: 1 2 3 4 5 6 7 8 9 10 11 12 + // 13 + // long1_mf field values: 100 50 2 -100 500 -600 123 876 7 4321 -987 379 + // 232 + // long1_mf asc sort pos: 7 6 4 3 11 2 8 12 5 13 1 10 + // 9 + // long1_mf desc sort pos: 7 8 10 11 3 12 6 2 9 1 13 4 + // 5 + + rsp = query("q", "*:*", "sort", long1_mf + " desc, id_i asc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 14, + 15, + 16, + 17, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149, + 10, + 8, + 5, + 12, + 13, + 7, + 1, + 2, + 9, + 3, + 4, + 6, + 11); + + rsp = query("q", "*:*", "sort", long1_mf + " asc, id_i desc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", + 149, + 148, + 147, + 146, + 145, + 144, + 143, + 142, + 141, + 140, + 139, + 138, + 137, + 136, + 135, + 134, + 133, + 132, + 131, + 130, + 129, + 128, + 127, + 126, + 125, + 124, + 123, + 122, + 121, + 120, + 119, + 118, + 117, + 116, + 115, + 114, + 113, + 112, + 111, + 110, + 109, + 108, + 107, + 106, + 105, + 104, + 103, + 102, + 101, + 100, + 17, + 16, + 15, + 14, + 11, + 6, + 4, + 3, + 9, + 2, + 1, + 7, + 13, + 12, + 5, + 8, + 10); + + // id field values: 1 2 3 4 5 6 7 8 9 10 11 + // 12 13 + // string1_mf field values: DE ABC HIJK L M YB WX N QRS P O YA + // TUV F G + // string1_mf asc sort pos: 2 1 4 5 13 11 6 9 8 7 12 + // 10 3 + // string1_mf desc sort pos: 12 13 10 9 1 3 8 5 6 7 2 + // 4 11 + + rsp = query("q", "*:*", "sort", string1_mf + " desc, id_i asc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", // missing field string1_mf="a_s1_mf", ascending id sort - 14, 15, 16, 17, - 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, - 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, - 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, - 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, - 5, 11, 6, 12, 8, 9, 10, 7, 4, 3, 13, 1, 2); - - rsp = query("q","*:*", "sort", string1_mf + " asc, id_i desc", "rows", "200"); - assertFieldValues(rsp.getResults(), "id_i", + 14, + 15, + 16, + 17, + 100, + 101, + 102, + 103, + 104, + 105, + 106, + 107, + 108, + 109, + 110, + 111, + 112, + 113, + 114, + 115, + 116, + 117, + 118, + 119, + 120, + 121, + 122, + 123, + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137, + 138, + 139, + 140, + 141, + 142, + 143, + 144, + 145, + 146, + 147, + 148, + 149, + 5, + 11, + 6, + 12, + 8, + 9, + 10, + 7, + 4, + 3, + 13, + 1, + 2); + + rsp = query("q", "*:*", "sort", string1_mf + " asc, id_i desc", "rows", "200"); + assertFieldValues( + rsp.getResults(), + "id_i", // missing field string1_mf="a_s1_mf", descending id sort - 149, 148, 147, 146, 145, 144, 143, 142, 141, 140, - 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, - 129, 128, 127, 126, 125, 124, 123, 122, 121, 120, - 119, 118, 117, 116, 115, 114, 113, 112, 111, 110, - 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, - 17, 16, 15, 14, - 2, 1, 13, 3, 4, 7, 10, 9, 8, 12, 6, 11, 5); + 149, + 148, + 147, + 146, + 145, + 144, + 143, + 142, + 141, + 140, + 139, + 138, + 137, + 136, + 135, + 134, + 133, + 132, + 131, + 130, + 129, + 128, + 127, + 126, + 125, + 124, + 123, + 122, + 121, + 120, + 119, + 118, + 117, + 116, + 115, + 114, + 113, + 112, + 111, + 110, + 109, + 108, + 107, + 106, + 105, + 104, + 103, + 102, + 101, + 100, + 17, + 16, + 15, + 14, + 2, + 1, + 13, + 3, + 4, + 7, + 10, + 9, + 8, + 12, + 6, + 11, + 5); } } diff --git a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java index 07cf1f03aff..75c966cfeb6 100644 --- a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java +++ b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java @@ -31,7 +31,6 @@ import java.util.concurrent.Callable; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Future; - import org.apache.commons.lang3.StringUtils; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; @@ -67,10 +66,8 @@ import org.slf4j.LoggerFactory; /** - * TODO? perhaps use: - * http://docs.codehaus.org/display/JETTY/ServletTester - * rather then open a real connection? - * + * TODO? perhaps use: http://docs.codehaus.org/display/JETTY/ServletTester rather then open a real + * connection? * * @since solr 1.3 */ @@ -80,18 +77,18 @@ public class TestDistributedSearch extends BaseDistributedSearchTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - String t1="a_t"; + String t1 = "a_t"; String i1 = pickRandom("a_i1", "a_i_p", "a_i_ni_p"); String nint = pickRandom("n_i", "n_is_p", "n_is_ni_p"); String tint = "n_ti"; String tlong = "other_tl1"; String tdate_a = "a_n_tdt"; String tdate_b = "b_n_tdt"; - - String oddField="oddField_s"; - String s1="a_s"; - String missingField="ignore_exception__missing_but_valid_field_t"; - String invalidField="ignore_exception__invalid_field_not_in_schema"; + + String oddField = "oddField_s"; + String s1 = "a_s"; + String missingField = "ignore_exception__missing_but_valid_field_t"; + String invalidField = "ignore_exception__invalid_field_not_in_schema"; @Override protected String getSolrXml() { @@ -105,87 +102,189 @@ public static void beforeClass() { // we validate the connection before use on the restarted // server so that we don't use a bad one System.setProperty("validateAfterInactivity", "200"); - + System.setProperty("solr.httpclient.retries", "0"); System.setProperty("distribUpdateSoTimeout", "5000"); - - } public TestDistributedSearch() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); } - + @Test @SuppressWarnings({"unchecked"}) public void test() throws Exception { - + assertEquals(clients.size(), jettys.size()); - + QueryResponse rsp = null; int backupStress = stress; // make a copy so we can restore del("*:*"); - indexr(id,1, i1, 100, tlong, 100,t1,"now is the time for all good men", - "foo_sev_enum", "Medium", - tdate_a, "2010-04-20T11:00:00Z", - tdate_b, "2009-08-20T11:00:00Z", - "foo_f", 1.414f, "foo_b", "true", "foo_d", 1.414d, - s1, "z${foo}"); - indexr(id,2, i1, 50 , tlong, 50,t1,"to come to the aid of their country.", - "foo_sev_enum", "Medium", - "foo_sev_enum", "High", - tdate_a, "2010-05-02T11:00:00Z", - tdate_b, "2009-11-02T11:00:00Z", - s1, "z${foo}"); - indexr(id,3, i1, 2, tlong, 2,t1,"how now brown cow", - tdate_a, "2010-05-03T11:00:00Z", - s1, "z${foo}"); - indexr(id,4, i1, -100 ,tlong, 101, - t1,"the quick fox jumped over the lazy dog", - tdate_a, "2010-05-03T11:00:00Z", - tdate_b, "2010-05-03T11:00:00Z", - s1, "a"); - indexr(id,5, i1, 500, tlong, 500 , - t1,"the quick fox jumped way over the lazy dog", - tdate_a, "2010-05-05T11:00:00Z", - s1, "b"); - indexr(id,6, i1, -600, tlong, 600 ,t1,"humpty dumpy sat on a wall", s1, "c"); - indexr(id,7, i1, 123, tlong, 123 ,t1,"humpty dumpy had a great fall", s1, "d"); - indexr(id,8, i1, 876, tlong, 876, - tdate_b, "2010-01-05T11:00:00Z", - "foo_sev_enum", "High", - t1,"all the kings horses and all the kings men", s1, "e"); - indexr(id,9, i1, 7, tlong, 7,t1,"couldn't put humpty together again", s1, "f"); - - commit(); // try to ensure there's more than one segment - - indexr(id,10, i1, 4321, tlong, 4321,t1,"this too shall pass", s1, "g"); - indexr(id,11, i1, -987, tlong, 987, - "foo_sev_enum", "Medium", - t1,"An eye for eye only ends up making the whole world blind.", s1, "h"); - indexr(id,12, i1, 379, tlong, 379, - t1,"Great works are performed, not by strength, but by perseverance.", s1, "i"); - indexr(id,13, i1, 232, tlong, 232, - t1,"no eggs on wall, lesson learned", - oddField, "odd man out", s1, "j"); + indexr( + id, + 1, + i1, + 100, + tlong, + 100, + t1, + "now is the time for all good men", + "foo_sev_enum", + "Medium", + tdate_a, + "2010-04-20T11:00:00Z", + tdate_b, + "2009-08-20T11:00:00Z", + "foo_f", + 1.414f, + "foo_b", + "true", + "foo_d", + 1.414d, + s1, + "z${foo}"); + indexr( + id, + 2, + i1, + 50, + tlong, + 50, + t1, + "to come to the aid of their country.", + "foo_sev_enum", + "Medium", + "foo_sev_enum", + "High", + tdate_a, + "2010-05-02T11:00:00Z", + tdate_b, + "2009-11-02T11:00:00Z", + s1, + "z${foo}"); + indexr( + id, + 3, + i1, + 2, + tlong, + 2, + t1, + "how now brown cow", + tdate_a, + "2010-05-03T11:00:00Z", + s1, + "z${foo}"); + indexr( + id, + 4, + i1, + -100, + tlong, + 101, + t1, + "the quick fox jumped over the lazy dog", + tdate_a, + "2010-05-03T11:00:00Z", + tdate_b, + "2010-05-03T11:00:00Z", + s1, + "a"); + indexr( + id, + 5, + i1, + 500, + tlong, + 500, + t1, + "the quick fox jumped way over the lazy dog", + tdate_a, + "2010-05-05T11:00:00Z", + s1, + "b"); + indexr(id, 6, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall", s1, "c"); + indexr(id, 7, i1, 123, tlong, 123, t1, "humpty dumpy had a great fall", s1, "d"); + indexr( + id, + 8, + i1, + 876, + tlong, + 876, + tdate_b, + "2010-01-05T11:00:00Z", + "foo_sev_enum", + "High", + t1, + "all the kings horses and all the kings men", + s1, + "e"); + indexr(id, 9, i1, 7, tlong, 7, t1, "couldn't put humpty together again", s1, "f"); + + commit(); // try to ensure there's more than one segment + + indexr(id, 10, i1, 4321, tlong, 4321, t1, "this too shall pass", s1, "g"); + indexr( + id, + 11, + i1, + -987, + tlong, + 987, + "foo_sev_enum", + "Medium", + t1, + "An eye for eye only ends up making the whole world blind.", + s1, + "h"); + indexr( + id, + 12, + i1, + 379, + tlong, + 379, + t1, + "Great works are performed, not by strength, but by perseverance.", + s1, + "i"); + indexr( + id, + 13, + i1, + 232, + tlong, + 232, + t1, + "no eggs on wall, lesson learned", + oddField, + "odd man out", + s1, + "j"); indexr(id, "1001", "lowerfilt", "toyota", s1, "k"); // for spellcheck - indexr(id, 14, "SubjectTerms_mfacet", new String[] {"mathematical models", "mathematical analysis"}, s1, "l"); - indexr(id, 15, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); - indexr(id, 16, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); + indexr( + id, + 14, + "SubjectTerms_mfacet", + new String[] {"mathematical models", "mathematical analysis"}, + s1, + "l"); + indexr(id, 15, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); + indexr(id, 16, "SubjectTerms_mfacet", new String[] {"test 1", "test 2", "test3"}); String[] vals = new String[100]; - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { vals[i] = "test " + i; } indexr(id, 17, "SubjectTerms_mfacet", vals); - - - for (int i=100; i<150; i++) { - indexr(id, i); + for (int i = 100; i < 150; i++) { + indexr(id, i); } commit(); @@ -194,126 +293,218 @@ public void test() throws Exception { handle.put("timestamp", SKIPVAL); handle.put("_version_", SKIPVAL); // not a cloud test, but may use updateLog - //Test common query parameters. + // Test common query parameters. validateCommonQueryParameters(); // random value sort for (String f : fieldNames) { - query("q","*:*", "sort",f+" desc"); - query("q","*:*", "sort",f+" asc"); + query("q", "*:*", "sort", f + " desc"); + query("q", "*:*", "sort", f + " asc"); } // these queries should be exactly ordered and scores should exactly match - query("q","*:*", "sort",i1+" desc"); - query("q","*:*", "sort","{!func}testfunc(add("+i1+",5))"+" desc"); - query("q",i1 + "[* TO *]", "sort",i1+" asc"); - query("q","*:*", "sort",i1+" asc, id desc"); - query("q","*:*", "sort",i1+" desc", "fl","*,score"); - query("q","*:*", "sort","n_tl1 asc", "fl","*,score"); - query("q","*:*", "sort","n_tl1 desc"); - + query("q", "*:*", "sort", i1 + " desc"); + query("q", "*:*", "sort", "{!func}testfunc(add(" + i1 + ",5))" + " desc"); + query("q", i1 + "[* TO *]", "sort", i1 + " asc"); + query("q", "*:*", "sort", i1 + " asc, id desc"); + query("q", "*:*", "sort", i1 + " desc", "fl", "*,score"); + query("q", "*:*", "sort", "n_tl1 asc", "fl", "*,score"); + query("q", "*:*", "sort", "n_tl1 desc"); + handle.put("maxScore", SKIPVAL); testMinExactCount(); - - query("q","{!func}"+i1);// does not expect maxScore. So if it comes ,ignore it. JavaBinCodec.writeSolrDocumentList() - //is agnostic of request params. + + query("q", "{!func}" + i1); // does not expect maxScore. So if it comes ,ignore it. + // JavaBinCodec.writeSolrDocumentList() is agnostic of request params. handle.remove("maxScore"); - query("q","{!func}"+i1, "fl","*,score"); // even scores should match exactly here + query("q", "{!func}" + i1, "fl", "*,score"); // even scores should match exactly here handle.put("highlighting", UNORDERED); handle.put("response", UNORDERED); handle.put("maxScore", SKIPVAL); - query("q","quick"); - query("q","all","fl","id","start","0"); - query("q","all","fl","foofoofoo","start","0"); // no fields in returned docs - query("q","all","fl","id","start","100"); + query("q", "quick"); + query("q", "all", "fl", "id", "start", "0"); + query("q", "all", "fl", "foofoofoo", "start", "0"); // no fields in returned docs + query("q", "all", "fl", "id", "start", "100"); handle.put("score", SKIPVAL); - query("q","quick","fl","*,score"); - query("q","all","fl","*,score","start","1"); - query("q","all","fl","*,score","start","100"); + query("q", "quick", "fl", "*,score"); + query("q", "all", "fl", "*,score", "start", "1"); + query("q", "all", "fl", "*,score", "start", "100"); - query("q","now their fox sat had put","fl","*,score", - "hl","true","hl.fl",t1); + query("q", "now their fox sat had put", "fl", "*,score", "hl", "true", "hl.fl", t1); - query("q","now their fox sat had put","fl","foofoofoo", - "hl","true","hl.fl",t1); + query("q", "now their fox sat had put", "fl", "foofoofoo", "hl", "true", "hl.fl", t1); - query("q","matchesnothing","fl","*,score"); + query("q", "matchesnothing", "fl", "*,score"); // test that a single NOW value is propagated to all shards... if that is true // then the primary sort should always be a tie and then the secondary should always decide - query("q","{!func}ms(NOW)", "sort","score desc,"+i1+" desc","fl","id"); - - query("q","*:*", "rows",0, "facet","true", "facet.field",t1, "facet.field",t1); - query("q","*:*", "rows",0, "facet","true", "facet.field",t1,"facet.limit",1); - query("q","*:*", "rows",0, "facet","true", "facet.query","quick", "facet.query","quick", "facet.query","all", "facet.query","*:*"); - query("q","*:*", "rows",0, "facet","true", "facet.field",t1, "facet.mincount",2); + query("q", "{!func}ms(NOW)", "sort", "score desc," + i1 + " desc", "fl", "id"); + + query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.field", t1); + query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.limit", 1); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.query", + "quick", + "facet.query", + "quick", + "facet.query", + "all", + "facet.query", + "*:*"); + query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.mincount", 2); // a facet query to test out chars out of the ascii range - query("q","*:*", "rows",0, "facet","true", "facet.query","{!term f=foo_s}international\u00ff\u01ff\u2222\u3333"); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.query", + "{!term f=foo_s}international\u00ff\u01ff\u2222\u3333"); // simple field facet on date fields - rsp = query("q","*:*", "rows", 0, - "facet","true", "facet.limit", 1, // TODO: limit shouldn't be needed: SOLR-6386 - "facet.field", tdate_a); + rsp = + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.limit", + 1, // TODO: limit shouldn't be needed: SOLR-6386 + "facet.field", + tdate_a); assertEquals(1, rsp.getFacetFields().size()); - rsp = query("q","*:*", "rows", 0, - "facet","true", "facet.limit", 1, // TODO: limit shouldn't be needed: SOLR-6386 - "facet.field", tdate_b, "facet.field", tdate_a); + rsp = + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.limit", + 1, // TODO: limit shouldn't be needed: SOLR-6386 + "facet.field", + tdate_b, + "facet.field", + tdate_a); assertEquals(2, rsp.getFacetFields().size()); - + String facetQuery = "id_i1:[1 TO 15]"; // simple range facet on one field - query("q",facetQuery, "rows",100, "facet","true", - "facet.range",tlong, - "facet.range",tlong, - "facet.range.start",200, - "facet.range.gap",100, - "facet.range.end",900, - "facet.range.method", FacetRangeMethod.FILTER); - + query( + "q", + facetQuery, + "rows", + 100, + "facet", + "true", + "facet.range", + tlong, + "facet.range", + tlong, + "facet.range.start", + 200, + "facet.range.gap", + 100, + "facet.range.end", + 900, + "facet.range.method", + FacetRangeMethod.FILTER); + // simple range facet on one field using dv method - query("q",facetQuery, "rows",100, "facet","true", - "facet.range",tlong, - "facet.range",tlong, - "facet.range.start",200, - "facet.range.gap",100, - "facet.range.end",900, - "facet.range.method", FacetRangeMethod.DV); + query( + "q", + facetQuery, + "rows", + 100, + "facet", + "true", + "facet.range", + tlong, + "facet.range", + tlong, + "facet.range.start", + 200, + "facet.range.gap", + 100, + "facet.range.end", + 900, + "facet.range.method", + FacetRangeMethod.DV); // range facet on multiple fields - query("q",facetQuery, "rows",100, "facet","true", - "facet.range",tlong, - "facet.range",i1, - "f."+i1+".facet.range.start",300, - "f."+i1+".facet.range.gap",87, - "facet.range.end",900, - "facet.range.start",200, - "facet.range.gap",100, - "f."+tlong+".facet.range.end",900, - "f."+i1+".facet.range.method", FacetRangeMethod.FILTER, - "f."+tlong+".facet.range.method", FacetRangeMethod.DV); - + query( + "q", + facetQuery, + "rows", + 100, + "facet", + "true", + "facet.range", + tlong, + "facet.range", + i1, + "f." + i1 + ".facet.range.start", + 300, + "f." + i1 + ".facet.range.gap", + 87, + "facet.range.end", + 900, + "facet.range.start", + 200, + "facet.range.gap", + 100, + "f." + tlong + ".facet.range.end", + 900, + "f." + i1 + ".facet.range.method", + FacetRangeMethod.FILTER, + "f." + tlong + ".facet.range.method", + FacetRangeMethod.DV); + // range facet with "other" param - QueryResponse response = query("q",facetQuery, "rows",100, "facet","true", - "facet.range",tlong, - "facet.range.start",200, - "facet.range.gap",100, - "facet.range.end",900, - "facet.range.other","all"); + QueryResponse response = + query( + "q", + facetQuery, + "rows", + 100, + "facet", + "true", + "facet.range", + tlong, + "facet.range.start", + 200, + "facet.range.gap", + 100, + "facet.range.end", + 900, + "facet.range.other", + "all"); assertEquals(tlong, response.getFacetRanges().get(0).getName()); assertEquals(6, response.getFacetRanges().get(0).getBefore()); assertEquals(5, response.getFacetRanges().get(0).getBetween()); assertEquals(2, response.getFacetRanges().get(0).getAfter()); - // Test mincounts. Do NOT want to go through all the stuff where with validateControlData in query() method - // Purposely packing a _bunch_ of stuff together here to insure that the proper level of mincount is used for - // each + // Test mincounts. Do NOT want to go through all the stuff where with validateControlData in + // query() method. Purposely packing a _bunch_ of stuff together here to insure that the proper + // level of mincount is used for each ModifiableSolrParams minParams = new ModifiableSolrParams(); - minParams.set("q","*:*"); + minParams.set("q", "*:*"); minParams.set("rows", 1); minParams.set("facet", "true"); minParams.set("facet.missing", "true"); @@ -328,7 +519,6 @@ public void test() throws Exception { minParams.set("f." + i1 + ".facet.range.end", 1200); minParams.set("f." + i1 + ".facet.mincount", 4); - // Return a separate section of ranges over tlong Should respect facet.mincount minParams.add("facet.range", tlong); minParams.set("f." + tlong + ".facet.range.start", 0); @@ -342,122 +532,271 @@ public void test() throws Exception { minParams.set("f." + tdate_b + ".facet.mincount", 3); // Insure that global mincount is respected for facet queries - minParams.set("facet.query", tdate_a + ":[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]"); // Should return some counts - //minParams.set("facet.query", tdate_a + ":[* TO *]"); // Should be removed - minParams.add("facet.query", tdate_b + ":[2008-01-01T00:00:00Z TO 2009-09-01T00:00:00Z]"); // Should be removed from response - + minParams.set( + "facet.query", + tdate_a + ":[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]"); // Should return some counts + // minParams.set("facet.query", tdate_a + ":[* TO *]"); // Should be removed + minParams.add( + "facet.query", + tdate_b + + ":[2008-01-01T00:00:00Z TO 2009-09-01T00:00:00Z]"); // Should be removed from response setDistributedParams(minParams); QueryResponse minResp = queryServer(minParams); ModifiableSolrParams eParams = new ModifiableSolrParams(); - eParams.set("q",tdate_b + ":[* TO *]"); + eParams.set("q", tdate_b + ":[* TO *]"); eParams.set("rows", 1000); eParams.set("fl", tdate_b); setDistributedParams(eParams); QueryResponse eResp = queryServer(eParams); // Check that exactly the right numbers of counts came through - assertEquals("Should be exactly 2 range facets returned after minCounts taken into account ", 3, minResp.getFacetRanges().size()); - assertEquals("Should only be 1 query facets returned after minCounts taken into account ", 1, minResp.getFacetQuery().size()); - - checkMinCountsField(minResp.getFacetField(i1).getValues(), new Object[]{null, 55L}); // Should just be the null entries for field - - checkMinCountsRange(minResp.getFacetRanges().get(0).getCounts(), new Object[]{"0", 5L}); // range on i1 - checkMinCountsRange(minResp.getFacetRanges().get(1).getCounts(), new Object[]{"0", 3L, "100", 3L}); // range on tlong - checkMinCountsRange(minResp.getFacetRanges().get(2).getCounts(), new Object[]{"2009-02-01T00:00:00Z", 3L}); // date (range) on tvh - - assertTrue("Should have a facet for tdate_a", minResp.getFacetQuery().containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]")); - int qCount = minResp.getFacetQuery().get("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]"); + assertEquals( + "Should be exactly 2 range facets returned after minCounts taken into account ", + 3, + minResp.getFacetRanges().size()); + assertEquals( + "Should only be 1 query facets returned after minCounts taken into account ", + 1, + minResp.getFacetQuery().size()); + + checkMinCountsField( + minResp.getFacetField(i1).getValues(), + new Object[] {null, 55L}); // Should just be the null entries for field + + checkMinCountsRange( + minResp.getFacetRanges().get(0).getCounts(), new Object[] {"0", 5L}); // range on i1 + checkMinCountsRange( + minResp.getFacetRanges().get(1).getCounts(), + new Object[] {"0", 3L, "100", 3L}); // range on tlong + checkMinCountsRange( + minResp.getFacetRanges().get(2).getCounts(), + new Object[] {"2009-02-01T00:00:00Z", 3L}); // date (range) on tvh + + assertTrue( + "Should have a facet for tdate_a", + minResp + .getFacetQuery() + .containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]")); + int qCount = + minResp.getFacetQuery().get("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]"); assertEquals("tdate_a should be 5", qCount, 5); // Now let's do some queries, the above is getting too complex minParams = new ModifiableSolrParams(); - minParams.set("q","*:*"); + minParams.set("q", "*:*"); minParams.set("rows", 1); minParams.set("facet", "true"); minParams.set("facet.mincount", 3); minParams.set("facet.query", tdate_a + ":[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]"); - minParams.add("facet.query", tdate_b + ":[2009-01-01T00:00:00Z TO 2010-01-01T00:00:00Z]"); // Should be removed + minParams.add( + "facet.query", + tdate_b + ":[2009-01-01T00:00:00Z TO 2010-01-01T00:00:00Z]"); // Should be removed setDistributedParams(minParams); minResp = queryServer(minParams); - assertEquals("Should only be 1 query facets returned after minCounts taken into account ", 1, minResp.getFacetQuery().size()); - assertTrue("Should be an entry for a_n_tdt", minResp.getFacetQuery().containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]")); + assertEquals( + "Should only be 1 query facets returned after minCounts taken into account ", + 1, + minResp.getFacetQuery().size()); + assertTrue( + "Should be an entry for a_n_tdt", + minResp + .getFacetQuery() + .containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]")); qCount = minResp.getFacetQuery().get("a_n_tdt:[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]"); assertEquals("a_n_tdt should have a count of 4 ", qCount, 4); // variations of fl - query("q","*:*", "fl","score","sort",i1 + " desc"); - query("q","*:*", "fl",i1 + ",score","sort",i1 + " desc"); - query("q","*:*", "fl", i1, "fl","score","sort",i1 + " desc"); - query("q","*:*", "fl", "id," + i1,"sort",i1 + " desc"); - query("q","*:*", "fl", "id", "fl",i1,"sort",i1 + " desc"); - query("q","*:*", "fl",i1, "fl", "id","sort",i1 + " desc"); - query("q","*:*", "fl", "id", "fl",nint, "fl",tint,"sort",i1 + " desc"); - query("q","*:*", "fl",nint, "fl", "id", "fl",tint,"sort",i1 + " desc"); + query("q", "*:*", "fl", "score", "sort", i1 + " desc"); + query("q", "*:*", "fl", i1 + ",score", "sort", i1 + " desc"); + query("q", "*:*", "fl", i1, "fl", "score", "sort", i1 + " desc"); + query("q", "*:*", "fl", "id," + i1, "sort", i1 + " desc"); + query("q", "*:*", "fl", "id", "fl", i1, "sort", i1 + " desc"); + query("q", "*:*", "fl", i1, "fl", "id", "sort", i1 + " desc"); + query("q", "*:*", "fl", "id", "fl", nint, "fl", tint, "sort", i1 + " desc"); + query("q", "*:*", "fl", nint, "fl", "id", "fl", tint, "sort", i1 + " desc"); handle.put("did", SKIPVAL); - query("q","*:*", "fl","did:[docid]","sort",i1 + " desc"); + query("q", "*:*", "fl", "did:[docid]", "sort", i1 + " desc"); handle.remove("did"); - query("q","*:*", "fl","log(" + tlong + "),abs(" + tlong + "),score","sort",i1 + " desc"); - query("q","*:*", "fl","n_*","sort",i1 + " desc"); + query("q", "*:*", "fl", "log(" + tlong + "),abs(" + tlong + "),score", "sort", i1 + " desc"); + query("q", "*:*", "fl", "n_*", "sort", i1 + " desc"); // basic spellcheck testing - query("q", "toyata", "fl", "id,lowerfilt", "spellcheck", true, "spellcheck.q", "toyata", "qt", "/spellCheckCompRH_Direct", "shards.qt", "/spellCheckCompRH_Direct"); - - stress=0; // turn off stress... we want to tex max combos in min time - for (int i=0; i<25*RANDOM_MULTIPLIER; i++) { + query( + "q", + "toyata", + "fl", + "id,lowerfilt", + "spellcheck", + true, + "spellcheck.q", + "toyata", + "qt", + "/spellCheckCompRH_Direct", + "shards.qt", + "/spellCheckCompRH_Direct"); + + stress = 0; // turn off stress... we want to tex max combos in min time + for (int i = 0; i < 25 * RANDOM_MULTIPLIER; i++) { String f = fieldNames[random().nextInt(fieldNames.length)]; - if (random().nextBoolean()) f = t1; // the text field is a really interesting one to facet on (and it's multi-valued too) + if (random().nextBoolean()) { + // the text field is an interesting one to facet on (and it's multi-valued too) + f = t1; + } // we want a random query and not just *:* so we'll get zero counts in facets also // TODO: do a better random query - String q = random().nextBoolean() ? "*:*" : "id:(1 3 5 7 9 11 13) OR id_i1:[100 TO " + random().nextInt(50) + "]"; + String q = + random().nextBoolean() + ? "*:*" + : "id:(1 3 5 7 9 11 13) OR id_i1:[100 TO " + random().nextInt(50) + "]"; - int nolimit = random().nextBoolean() ? -1 : 10000; // these should be equivalent + int nolimit = random().nextBoolean() ? -1 : 10000; // these should be equivalent // if limit==-1, we should always get exact matches - query("q",q, "rows",0, "facet","true", "facet.field",f, "facet.limit",nolimit, "facet.sort","count", "facet.mincount",random().nextInt(5), "facet.offset",random().nextInt(10)); - query("q",q, "rows",0, "facet","true", "facet.field",f, "facet.limit",nolimit, "facet.sort","index", "facet.mincount",random().nextInt(5), "facet.offset",random().nextInt(10)); + query( + "q", + q, + "rows", + 0, + "facet", + "true", + "facet.field", + f, + "facet.limit", + nolimit, + "facet.sort", + "count", + "facet.mincount", + random().nextInt(5), + "facet.offset", + random().nextInt(10)); + query( + "q", + q, + "rows", + 0, + "facet", + "true", + "facet.field", + f, + "facet.limit", + nolimit, + "facet.sort", + "index", + "facet.mincount", + random().nextInt(5), + "facet.offset", + random().nextInt(10)); // for index sort, we should get exact results for mincount <= 1 - query("q",q, "rows",0, "facet","true", "facet.field",f, "facet.sort","index", "facet.mincount",random().nextInt(2), "facet.offset",random().nextInt(10), "facet.limit",random().nextInt(11)-1); + query( + "q", + q, + "rows", + 0, + "facet", + "true", + "facet.field", + f, + "facet.sort", + "index", + "facet.mincount", + random().nextInt(2), + "facet.offset", + random().nextInt(10), + "facet.limit", + random().nextInt(11) - 1); } - stress = backupStress; // restore stress + stress = backupStress; // restore stress // test faceting multiple things at once - query("q","*:*", "rows",0, "facet","true", "facet.query","quick", "facet.query","all", "facet.query","*:*" - ,"facet.field",t1); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.query", + "quick", + "facet.query", + "all", + "facet.query", + "*:*", + "facet.field", + t1); // test filter tagging, facet exclusion, and naming (multi-select facet support) - queryAndCompareUIF("q","*:*", "rows",0, "facet","true", "facet.query","{!key=myquick}quick", "facet.query","{!key=myall ex=a}all", "facet.query","*:*" - ,"facet.field","{!key=mykey ex=a}"+t1 - ,"facet.field","{!key=other ex=b}"+t1 - ,"facet.field","{!key=again ex=a,b}"+t1 - ,"facet.field",t1 - ,"fq","{!tag=a}id_i1:[1 TO 7]", "fq","{!tag=b}id_i1:[3 TO 9]" - ); - queryAndCompareUIF("q", "*:*", "facet", "true", "facet.field", "{!ex=t1}SubjectTerms_mfacet", "fq", "{!tag=t1}SubjectTerms_mfacet:(test 1)", "facet.limit", "10", "facet.mincount", "1"); + queryAndCompareUIF( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.query", + "{!key=myquick}quick", + "facet.query", + "{!key=myall ex=a}all", + "facet.query", + "*:*", + "facet.field", + "{!key=mykey ex=a}" + t1, + "facet.field", + "{!key=other ex=b}" + t1, + "facet.field", + "{!key=again ex=a,b}" + t1, + "facet.field", + t1, + "fq", + "{!tag=a}id_i1:[1 TO 7]", + "fq", + "{!tag=b}id_i1:[3 TO 9]"); + queryAndCompareUIF( + "q", + "*:*", + "facet", + "true", + "facet.field", + "{!ex=t1}SubjectTerms_mfacet", + "fq", + "{!tag=t1}SubjectTerms_mfacet:(test 1)", + "facet.limit", + "10", + "facet.mincount", + "1"); // test field that is valid in schema but missing in all shards - query("q","*:*", "rows",100, "facet","true", "facet.field",missingField, "facet.mincount",2); + query( + "q", "*:*", "rows", 100, "facet", "true", "facet.field", missingField, "facet.mincount", 2); // test field that is valid in schema and missing in some shards - query("q","*:*", "rows",100, "facet","true", "facet.field",oddField, "facet.mincount",2); - - query("q","*:*", "sort",i1+" desc", "stats", "true", "stats.field", "stats_dt"); - query("q","*:*", "sort",i1+" desc", "stats", "true", "stats.field", i1); - query("q","*:*", "sort",i1+" desc", "stats", "true", "stats.field", nint); + query("q", "*:*", "rows", 100, "facet", "true", "facet.field", oddField, "facet.mincount", 2); + + query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "stats_dt"); + query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", i1); + query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", nint); handle.put("stddev", FUZZY); handle.put("sumOfSquares", FUZZY); - query("q","*:*", "sort",i1+" desc", "stats", "true", "stats.field", tdate_a); - query("q","*:*", "sort",i1+" desc", "stats", "true", "stats.field", tdate_b); + query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", tdate_a); + query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", tdate_b); handle.remove("stddev"); handle.remove("sumOfSquares"); - - rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", - "stats.field", "{!cardinality='true'}" + oddField, - "stats.field", "{!cardinality='true'}" + tlong); + rsp = + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", + "{!cardinality='true'}" + oddField, + "stats.field", + "{!cardinality='true'}" + tlong); { // don't leak variabls @@ -494,31 +833,53 @@ public void test() throws Exception { assertNull("expected null for percentiles", s.getSum()); } - query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", "{!percentiles='1,2,3,4,5'}" + i1); - - query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", + + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", "{!percentiles='1,20,30,40,98,99,99.9'}" + i1); - - rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", - "{!percentiles='1.0,99.999,0.001'}" + tlong); + + rsp = + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", + "{!percentiles='1.0,99.999,0.001'}" + tlong); { // don't leak variabls - Double[] expectedKeys = new Double[] { 1.0D, 99.999D, 0.001D }; - Double[] expectedVals = new Double[] { 2.0D, 4320.0D, 2.0D }; + Double[] expectedKeys = new Double[] {1.0D, 99.999D, 0.001D}; + Double[] expectedVals = new Double[] {2.0D, 4320.0D, 2.0D}; FieldStatsInfo s = rsp.getFieldStatsInfo().get(tlong); assertNotNull("no stats for " + tlong, s); - Map p = s.getPercentiles(); + Map p = s.getPercentiles(); assertNotNull("no percentils", p); assertEquals("insufficient percentiles", expectedKeys.length, p.size()); Iterator actualKeys = p.keySet().iterator(); for (int i = 0; i < expectedKeys.length; i++) { Double expectedKey = expectedKeys[i]; - assertTrue("Ran out of actual keys as of : "+ i + "->" +expectedKey, - actualKeys.hasNext()); + assertTrue( + "Ran out of actual keys as of : " + i + "->" + expectedKey, actualKeys.hasNext()); assertEquals(expectedKey, actualKeys.next()); - assertEquals("percentiles are off: " + p.toString(), - expectedVals[i], p.get(expectedKey), 1.0D); + assertEquals( + "percentiles are off: " + p.toString(), expectedVals[i], p.get(expectedKey), 1.0D); } // @@ -532,48 +893,108 @@ public void test() throws Exception { assertNull("expected null for stddev", s.getStddev()); assertNull("expected null for sum", s.getSum()); } - - query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", + + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", "{!percentiles='1,20,50,80,99'}" + tdate_a); - query("q","*:*", "sort",i1+" desc", "stats", "true", - "fq", "{!tag=nothing}-*:*", - "stats.field", "{!key=special_key ex=nothing}stats_dt"); - query("q","*:*", "sort",i1+" desc", "stats", "true", - "f.stats_dt.stats.calcdistinct", "true", - "stats.field", "{!key=special_key}stats_dt"); - query("q","*:*", "sort",i1+" desc", "stats", "true", - "f.stats_dt.stats.calcdistinct", "true", - "fq", "{!tag=xxx}id_i1:[3 TO 9]", - "stats.field", "{!key=special_key}stats_dt", - "stats.field", "{!ex=xxx}stats_dt"); + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "fq", + "{!tag=nothing}-*:*", + "stats.field", + "{!key=special_key ex=nothing}stats_dt"); + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "f.stats_dt.stats.calcdistinct", + "true", + "stats.field", + "{!key=special_key}stats_dt"); + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "f.stats_dt.stats.calcdistinct", + "true", + "fq", + "{!tag=xxx}id_i1:[3 TO 9]", + "stats.field", + "{!key=special_key}stats_dt", + "stats.field", + "{!ex=xxx}stats_dt"); handle.put("stddev", FUZZY); handle.put("sumOfSquares", FUZZY); - query("q","*:*", "sort",i1+" desc", "stats", "true", - // do a really simple query so distributed IDF doesn't cause problems - // when comparing with control collection - "stats.field", "{!lucene key=q_key}" + i1 + "foo_b:true", - "stats.field", "{!func key=f_key}sum(" + tlong +","+i1+")"); - - query("q","*:*", "sort",i1+" desc", "stats", "true", - "stats.field", "stats_dt", - "stats.field", i1, - "stats.field", tdate_a, - "stats.field", tdate_b); - - // only ask for "min" and "mean", explicitly exclude deps of mean, whitebox check shard responses + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + // do a really simple query so distributed IDF doesn't cause problems + // when comparing with control collection + "stats.field", + "{!lucene key=q_key}" + i1 + "foo_b:true", + "stats.field", + "{!func key=f_key}sum(" + tlong + "," + i1 + ")"); + + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", + "stats_dt", + "stats.field", + i1, + "stats.field", + tdate_a, + "stats.field", + tdate_b); + + // only ask for "min" and "mean", explicitly exclude deps of mean, whitebox check shard + // responses try { RequestTrackingQueue trackingQueue = new RequestTrackingQueue(); TrackingShardHandlerFactory.setTrackingQueue(jettys, trackingQueue); - rsp = query("q","*:*", "sort",i1+" desc", "stats", "true", - "stats.field", "{!min=true sum=false mean=true count=false}" + i1); + rsp = + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", + "{!min=true sum=false mean=true count=false}" + i1); FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); assertNotNull("no stats for " + i1, s); // - assertEquals("wrong min", -987.0D, (Double)s.getMin(), 0.0001D ); - assertEquals("wrong mean", 377.153846D, (Double)s.getMean(), 0.0001D ); + assertEquals("wrong min", -987.0D, (Double) s.getMin(), 0.0001D); + assertEquals("wrong mean", 377.153846D, (Double) s.getMean(), 0.0001D); // assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); @@ -587,13 +1008,16 @@ public void test() throws Exception { // sanity check deps relationship for (Stat dep : EnumSet.of(Stat.sum, Stat.count)) { - assertTrue("Purpose of this test is to ensure that asking for some stats works even when the deps " + - "of those stats are explicitly excluded -- but the expected dep relationshp is no longer valid. " + - "ie: who changed the code and didn't change this test?, expected: " + dep, - Stat.mean.getDistribDeps().contains(dep)); + assertTrue( + "Purpose of this test is to ensure that asking for some stats works even when the deps " + + "of those stats are explicitly excluded -- but the expected dep relationshp is no longer valid. " + + "ie: who changed the code and didn't change this test?, expected: " + + dep, + Stat.mean.getDistribDeps().contains(dep)); } - // check our shard requests & responses - ensure we didn't get unneccessary stats from every shard + // check our shard requests & responses - ensure we didn't get unneccessary stats from every + // shard int numStatsShardRequests = 0; EnumSet shardStatsExpected = EnumSet.of(Stat.min, Stat.sum, Stat.count); for (List shard : trackingQueue.getAllRequests().values()) { @@ -602,16 +1026,19 @@ public void test() throws Exception { numStatsShardRequests++; for (ShardResponse shardRsp : shardReq.sreq.responses) { NamedList shardStats = - ((NamedList>>) - shardRsp.getSolrResponse().getResponse().get("stats")).get("stats_fields").get(i1); + ((NamedList>>) + shardRsp.getSolrResponse().getResponse().get("stats")) + .get("stats_fields") + .get(i1); assertNotNull("no stard stats for " + i1, shardStats); // - for (Map.Entry entry : shardStats) { + for (Map.Entry entry : shardStats) { Stat found = Stat.forName(entry.getKey()); assertNotNull("found shardRsp stat key we were not expecting: " + entry, found); - assertTrue("found stat we were not expecting: " + entry, shardStatsExpected.contains(found)); - + assertTrue( + "found stat we were not expecting: " + entry, + shardStatsExpected.contains(found)); } } } @@ -621,17 +1048,25 @@ public void test() throws Exception { } finally { TrackingShardHandlerFactory.setTrackingQueue(jettys, null); } - + // only ask for "min", "mean" and "stddev", - rsp = query("q","*:*", "sort",i1+" desc", "stats", "true", - "stats.field", "{!min=true mean=true stddev=true}" + i1); - { // don't leak variables + rsp = + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", + "{!min=true mean=true stddev=true}" + i1); + { // don't leak variables FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); assertNotNull("no stats for " + i1, s); // - assertEquals("wrong min", -987.0D, (Double)s.getMin(), 0.0001D ); - assertEquals("wrong mean", 377.153846D, (Double)s.getMean(), 0.0001D ); - assertEquals("wrong stddev", 1271.76215D, s.getStddev(), 0.0001D ); + assertEquals("wrong min", -987.0D, (Double) s.getMin(), 0.0001D); + assertEquals("wrong mean", 377.153846D, (Double) s.getMean(), 0.0001D); + assertEquals("wrong stddev", 1271.76215D, s.getStddev(), 0.0001D); // assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); @@ -644,16 +1079,26 @@ public void test() throws Exception { } // request stats, but disable them all via param refs - rsp = query("q","*:*", "sort",i1+" desc", "stats", "true", "doMin", "false", - "stats.field", "{!min=$doMin}" + i1); - { // don't leak variables + rsp = + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "doMin", + "false", + "stats.field", + "{!min=$doMin}" + i1); + { // don't leak variables FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); // stats section should exist, even though stats should be null assertNotNull("no stats for " + i1, s); // - assertNull("expected null for min", s.getMin() ); - assertNull("expected null for mean", s.getMean() ); - assertNull("expected null for stddev", s.getStddev() ); + assertNull("expected null for min", s.getMin()); + assertNull("expected null for mean", s.getMean()); + assertNull("expected null for stddev", s.getStddev()); // assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); @@ -665,38 +1110,46 @@ public void test() throws Exception { assertNull("expected null for cardinality", s.getCardinality()); } - final String[] stats = new String[] { - "min", "max", "sum", "sumOfSquares", "stddev", "mean", "missing", "count" - }; - + final String[] stats = + new String[] {"min", "max", "sum", "sumOfSquares", "stddev", "mean", "missing", "count"}; + // ask for arbitrary pairs of stats for (String stat1 : stats) { for (String stat2 : stats) { // NOTE: stat1 might equal stat2 - good edge case to test for - rsp = query("q","*:*", "sort",i1+" desc", "stats", "true", - "stats.field", "{!" + stat1 + "=true " + stat2 + "=true}" + i1); + rsp = + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", + "{!" + stat1 + "=true " + stat2 + "=true}" + i1); final List statsExpected = new ArrayList(2); statsExpected.add(stat1); - if ( ! stat1.equals(stat2) ) { + if (!stat1.equals(stat2)) { statsExpected.add(stat2); } // ignore the FieldStatsInfo convinience class, and look directly at the NamedList // so we don't need any sort of crazy reflection NamedList svals = - ((NamedList>>) - rsp.getResponse().get("stats")).get("stats_fields").get(i1); + ((NamedList>>) rsp.getResponse().get("stats")) + .get("stats_fields") + .get(i1); assertNotNull("no stats for field " + i1, svals); assertEquals("wrong quantity of stats", statsExpected.size(), svals.size()); - for (String s : statsExpected) { assertNotNull("stat shouldn't be null: " + s, svals.get(s)); - assertTrue("stat should be a Number: " + s + " -> " + svals.get(s).getClass(), - svals.get(s) instanceof Number); + assertTrue( + "stat should be a Number: " + s + " -> " + svals.get(s).getClass(), + svals.get(s) instanceof Number); // some loose assertions since we're iterating over various stats if (svals.get(s) instanceof Double) { Double val = (Double) svals.get(s); @@ -705,110 +1158,148 @@ public void test() throws Exception { assertFalse("stat shouldn't be 0: " + s, val.equals(0.0D)); } else { // count or missing - assertTrue("stat should be count of missing: " + s, - ("count".equals(s) || "missing".equals(s))); - assertTrue("stat should be a Long: " + s + " -> " + svals.get(s).getClass(), - svals.get(s) instanceof Long); + assertTrue( + "stat should be count of missing: " + s, + ("count".equals(s) || "missing".equals(s))); + assertTrue( + "stat should be a Long: " + s + " -> " + svals.get(s).getClass(), + svals.get(s) instanceof Long); Long val = (Long) svals.get(s); assertFalse("stat shouldn't be 0: " + s, val.equals(0L)); } } } } - + // all of these diff ways of asking for min & calcdistinct should have the same result - for (SolrParams p : new SolrParams[] { - params("stats.field", "{!min=true calcdistinct=true}" + i1), - params("stats.calcdistinct", "true", - "stats.field", "{!min=true}" + i1), - params("f."+i1+".stats.calcdistinct", "true", - "stats.field", "{!min=true}" + i1), - params("stats.calcdistinct", "false", - "f."+i1+".stats.calcdistinct", "true", - "stats.field", "{!min=true}" + i1), - params("stats.calcdistinct", "false", - "f."+i1+".stats.calcdistinct", "false", - "stats.field", "{!min=true calcdistinct=true}" + i1), - params("stats.calcdistinct", "false", - "f."+i1+".stats.calcdistinct", "false", - "stats.field", "{!min=true countDistinct=true distinctValues=true}" + i1), - params("stats.field", "{!min=true countDistinct=true distinctValues=true}" + i1), - params("yes", "true", - "stats.field", "{!min=$yes countDistinct=$yes distinctValues=$yes}" + i1), - }) { - - rsp = query(SolrParams.wrapDefaults - (p, params("q","*:*", "sort",i1+" desc", "stats", "true"))); + for (SolrParams p : + new SolrParams[] { + params("stats.field", "{!min=true calcdistinct=true}" + i1), + params("stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), + params("f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), + params( + "stats.calcdistinct", + "false", + "f." + i1 + ".stats.calcdistinct", + "true", + "stats.field", + "{!min=true}" + i1), + params( + "stats.calcdistinct", + "false", + "f." + i1 + ".stats.calcdistinct", + "false", + "stats.field", + "{!min=true calcdistinct=true}" + i1), + params( + "stats.calcdistinct", + "false", + "f." + i1 + ".stats.calcdistinct", + "false", + "stats.field", + "{!min=true countDistinct=true distinctValues=true}" + i1), + params("stats.field", "{!min=true countDistinct=true distinctValues=true}" + i1), + params( + "yes", + "true", + "stats.field", + "{!min=$yes countDistinct=$yes distinctValues=$yes}" + i1), + }) { + + rsp = + query( + SolrParams.wrapDefaults( + p, params("q", "*:*", "sort", i1 + " desc", "stats", "true"))); FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); - assertNotNull(p+" no stats for " + i1, s); + assertNotNull(p + " no stats for " + i1, s); // - assertEquals(p+" wrong min", -987.0D, (Double)s.getMin(), 0.0001D ); - assertEquals(p+" wrong calcDistinct", Long.valueOf(13), s.getCountDistinct()); - assertNotNull(p+" expected non-null list for distinct vals", s.getDistinctValues()); - assertEquals(p+" expected list for distinct vals", 13, s.getDistinctValues().size()); + assertEquals(p + " wrong min", -987.0D, (Double) s.getMin(), 0.0001D); + assertEquals(p + " wrong calcDistinct", Long.valueOf(13), s.getCountDistinct()); + assertNotNull(p + " expected non-null list for distinct vals", s.getDistinctValues()); + assertEquals(p + " expected list for distinct vals", 13, s.getDistinctValues().size()); // - assertNull(p+" expected null for mean", s.getMean() ); - assertNull(p+" expected null for count", s.getCount()); - assertNull(p+" expected null for max", s.getMax()); - assertNull(p+" expected null for missing", s.getMissing()); - assertNull(p+" expected null for stddev", s.getStddev()); - assertNull(p+" expected null for sum", s.getSum()); - assertNull(p+" expected null for percentiles", s.getPercentiles()); - assertNull(p+" expected null for cardinality", s.getCardinality()); - + assertNull(p + " expected null for mean", s.getMean()); + assertNull(p + " expected null for count", s.getCount()); + assertNull(p + " expected null for max", s.getMax()); + assertNull(p + " expected null for missing", s.getMissing()); + assertNull(p + " expected null for stddev", s.getStddev()); + assertNull(p + " expected null for sum", s.getSum()); + assertNull(p + " expected null for percentiles", s.getPercentiles()); + assertNull(p + " expected null for cardinality", s.getCardinality()); } // all of these diff ways of excluding calcdistinct should have the same result - for (SolrParams p : new SolrParams[] { - params("stats.field", "{!min=true calcdistinct=false}" + i1), - params("stats.calcdistinct", "false", - "stats.field", "{!min=true}" + i1), - params("f."+i1+".stats.calcdistinct", "false", - "stats.field", "{!min=true}" + i1), - params("stats.calcdistinct", "true", - "f."+i1+".stats.calcdistinct", "false", - "stats.field", "{!min=true}" + i1), - params("stats.calcdistinct", "true", - "f."+i1+".stats.calcdistinct", "true", - "stats.field", "{!min=true calcdistinct=false}" + i1), - params("stats.calcdistinct", "true", - "f."+i1+".stats.calcdistinct", "true", - "stats.field", "{!min=true countDistinct=false distinctValues=false}" + i1), - }) { - - rsp = query(SolrParams.wrapDefaults - (p, params("q","*:*", "sort",i1+" desc", "stats", "true"))); + for (SolrParams p : + new SolrParams[] { + params("stats.field", "{!min=true calcdistinct=false}" + i1), + params("stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), + params("f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), + params( + "stats.calcdistinct", + "true", + "f." + i1 + ".stats.calcdistinct", + "false", + "stats.field", + "{!min=true}" + i1), + params( + "stats.calcdistinct", + "true", + "f." + i1 + ".stats.calcdistinct", + "true", + "stats.field", + "{!min=true calcdistinct=false}" + i1), + params( + "stats.calcdistinct", + "true", + "f." + i1 + ".stats.calcdistinct", + "true", + "stats.field", + "{!min=true countDistinct=false distinctValues=false}" + i1), + }) { + + rsp = + query( + SolrParams.wrapDefaults( + p, params("q", "*:*", "sort", i1 + " desc", "stats", "true"))); FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); - assertNotNull(p+" no stats for " + i1, s); + assertNotNull(p + " no stats for " + i1, s); // - assertEquals(p+" wrong min", -987.0D, (Double)s.getMin(), 0.0001D ); + assertEquals(p + " wrong min", -987.0D, (Double) s.getMin(), 0.0001D); // - assertNull(p+" expected null for calcDistinct", s.getCountDistinct()); - assertNull(p+" expected null for distinct vals", s.getDistinctValues()); + assertNull(p + " expected null for calcDistinct", s.getCountDistinct()); + assertNull(p + " expected null for distinct vals", s.getDistinctValues()); // - assertNull(p+" expected null for mean", s.getMean() ); - assertNull(p+" expected null for count", s.getCount()); - assertNull(p+" expected null for max", s.getMax()); - assertNull(p+" expected null for missing", s.getMissing()); - assertNull(p+" expected null for stddev", s.getStddev()); - assertNull(p+" expected null for sum", s.getSum()); - assertNull(p+" expected null for percentiles", s.getPercentiles()); - assertNull(p+" expected null for cardinality", s.getCardinality()); + assertNull(p + " expected null for mean", s.getMean()); + assertNull(p + " expected null for count", s.getCount()); + assertNull(p + " expected null for max", s.getMax()); + assertNull(p + " expected null for missing", s.getMissing()); + assertNull(p + " expected null for stddev", s.getStddev()); + assertNull(p + " expected null for sum", s.getSum()); + assertNull(p + " expected null for percentiles", s.getPercentiles()); + assertNull(p + " expected null for cardinality", s.getCardinality()); } // this field doesn't exist in any doc in the result set. // ensure we get expected values for the stats we ask for, but null for the stats - rsp = query("q","*:*", "sort",i1+" desc", "stats", "true", - "stats.field", "{!min=true mean=true stddev=true}does_not_exist_i"); - { // don't leak variables + rsp = + query( + "q", + "*:*", + "sort", + i1 + " desc", + "stats", + "true", + "stats.field", + "{!min=true mean=true stddev=true}does_not_exist_i"); + { // don't leak variables FieldStatsInfo s = rsp.getFieldStatsInfo().get("does_not_exist_i"); assertNotNull("no stats for bogus field", s); // things we explicit expect because we asked for them // NOTE: min is expected to be null even though requested because of no values - assertEquals("wrong min", null, s.getMin()); - assertTrue("mean should be NaN", ((Double)s.getMean()).isNaN()); - assertEquals("wrong stddev", 0.0D, s.getStddev(), 0.0D ); + assertEquals("wrong min", null, s.getMin()); + assertTrue("mean should be NaN", ((Double) s.getMean()).isNaN()); + assertEquals("wrong stddev", 0.0D, s.getStddev(), 0.0D); // things that we didn't ask for, so they better be null assertNull("expected null for count", s.getCount()); @@ -823,9 +1314,9 @@ public void test() throws Exception { // look at stats on non numeric fields // - // not all stats are supported on every field type, so some of these permutations will - // result in no stats being computed but this at least lets us sanity check that for each - // of these field+stats(s) combinations we get consistent results between the distribted + // not all stats are supported on every field type, so some of these permutations will + // result in no stats being computed but this at least lets us sanity check that for each + // of these field+stats(s) combinations we get consistent results between the distribted // request and the single node situation. // // NOTE: percentiles excluded because it doesn't support simple 'true/false' syntax @@ -834,45 +1325,55 @@ public void test() throws Exception { int numTotalStatQueries = 0; // don't go overboard, just do all permutations of 1 or 2 stat params, for each field & query - final int numStatParamsAtOnce = 2; + final int numStatParamsAtOnce = 2; for (int numParams = 1; numParams <= numStatParamsAtOnce; numParams++) { for (EnumSet set : new StatSetCombinations(numParams, allStats)) { - for (String field : new String[] { - "foo_f", i1, tlong, tdate_a, oddField, "foo_sev_enum", - // fields that no doc has any value in - "bogus___s", "bogus___f", "bogus___i", "bogus___tdt", "bogus___sev_enum" - }) { - - for ( String q : new String[] { - "*:*", // all docs - "bogus___s:bogus", // no docs - "id:" + random().nextInt(50 ), // 0 or 1 doc... - "id:" + random().nextInt(50 ), - "id:" + random().nextInt(100), - "id:" + random().nextInt(100), - "id:" + random().nextInt(200) + for (String field : + new String[] { + "foo_f", + i1, + tlong, + tdate_a, + oddField, + "foo_sev_enum", + // fields that no doc has any value in + "bogus___s", + "bogus___f", + "bogus___i", + "bogus___tdt", + "bogus___sev_enum" }) { + for (String q : + new String[] { + "*:*", // all docs + "bogus___s:bogus", // no docs + "id:" + random().nextInt(50), // 0 or 1 doc... + "id:" + random().nextInt(50), + "id:" + random().nextInt(100), + "id:" + random().nextInt(100), + "id:" + random().nextInt(200) + }) { + // EnumSets use natural ordering, we want to randomize the order of the params List combo = new ArrayList(set); Collections.shuffle(combo, random()); - + StringBuilder paras = new StringBuilder("{!key=k "); - + for (Stat stat : combo) { paras.append(stat + "=true "); } - + paras.append("}").append(field); numTotalStatQueries++; - rsp = query("q", q, "rows", "0", "stats", "true", - "stats.field", paras.toString()); + rsp = query("q", q, "rows", "0", "stats", "true", "stats.field", paras.toString()); // simple assert, mostly relying on comparison with single shard FieldStatsInfo s = rsp.getFieldStatsInfo().get("k"); assertNotNull(s); - // TODO: if we had a programatic way to determine what stats are supported + // TODO: if we had a programatic way to determine what stats are supported // by what field types, we could make more confident asserts here. } } @@ -880,73 +1381,177 @@ public void test() throws Exception { } handle.remove("stddev"); handle.remove("sumOfSquares"); - assertEquals("Sanity check failed: either test broke, or test changed, or you adjusted Stat enum" + - " (adjust constant accordingly if intentional)", - 5082, numTotalStatQueries); + assertEquals( + "Sanity check failed: either test broke, or test changed, or you adjusted Stat enum" + + " (adjust constant accordingly if intentional)", + 5082, + numTotalStatQueries); /*** TODO: the failure may come back in "exception" - try { - // test error produced for field that is invalid for schema - query("q","*:*", "rows",100, "facet","true", "facet.field",invalidField, "facet.mincount",2); - TestCase.fail("SolrServerException expected for invalid field that is not in schema"); - } catch (SolrServerException ex) { - // expected - } - ***/ + * try { + * // test error produced for field that is invalid for schema + * query("q","*:*", "rows",100, "facet","true", "facet.field",invalidField, "facet.mincount",2); + * TestCase.fail("SolrServerException expected for invalid field that is not in schema"); + * } catch (SolrServerException ex) { + * // expected + * } + ***/ // Try to get better coverage for refinement queries by turning off over requesting. // This makes it much more likely that we may not get the top facet values and hence // we turn of that checking. handle.put("facet_fields", SKIPVAL); - query("q","*:*", "rows",0, "facet","true", "facet.field",t1,"facet.limit",5, "facet.shard.limit",5); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + t1, + "facet.limit", + 5, + "facet.shard.limit", + 5); // check a complex key name - query("q","*:*", "rows",0, "facet","true", "facet.field","{!key='$a b/c \\' \\} foo'}"+t1,"facet.limit",5, "facet.shard.limit",5); - query("q","*:*", "rows",0, "facet","true", "facet.field","{!key='$a'}"+t1,"facet.limit",5, "facet.shard.limit",5); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + "{!key='$a b/c \\' \\} foo'}" + t1, + "facet.limit", + 5, + "facet.shard.limit", + 5); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + "{!key='$a'}" + t1, + "facet.limit", + 5, + "facet.shard.limit", + 5); handle.remove("facet_fields"); // Make sure there is no macro expansion for field values - query("q","*:*", "rows",0, "facet","true", "facet.field",s1,"facet.limit",5, "facet.shard.limit",5); - query("q","*:*", "rows",0, "facet","true", "facet.field",s1,"facet.limit",5, "facet.shard.limit",5, "expandMacros", "true"); - query("q","*:*", "rows",0, "facet","true", "facet.field",s1,"facet.limit",5, "facet.shard.limit",5, "expandMacros", "false"); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + s1, + "facet.limit", + 5, + "facet.shard.limit", + 5); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + s1, + "facet.limit", + 5, + "facet.shard.limit", + 5, + "expandMacros", + "true"); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + s1, + "facet.limit", + 5, + "facet.shard.limit", + 5, + "expandMacros", + "false"); // Macro expansion should still work for the parameters - query("q","*:*", "rows",0, "facet","true", "facet.field","${foo}", "f.${foo}.mincount", 1, "foo", s1); - query("q","*:*", "rows",0, "facet","true", "facet.field","${foo}", "f.${foo}.mincount", 1, "foo", s1, "expandMacros", "true"); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + "${foo}", + "f.${foo}.mincount", + 1, + "foo", + s1); + query( + "q", + "*:*", + "rows", + 0, + "facet", + "true", + "facet.field", + "${foo}", + "f.${foo}.mincount", + 1, + "foo", + s1, + "expandMacros", + "true"); // index the same document to two servers and make sure things // don't blow up. - if (clients.size()>=2) { - index(id,100, i1, 107 ,t1,"oh no, a duplicate!"); - for (int i=0; i= 2) { + index(id, 100, i1, 107, t1, "oh no, a duplicate!"); + for (int i = 0; i < clients.size(); i++) { + index_specific(i, id, 100, i1, 107, t1, "oh no, a duplicate!"); } commit(); - query("q","duplicate", "hl","true", "hl.fl", t1); - query("q","fox duplicate horses", "hl","true", "hl.fl", t1); - query("q","*:*", "rows",100); + query("q", "duplicate", "hl", "true", "hl.fl", t1); + query("q", "fox duplicate horses", "hl", "true", "hl.fl", t1); + query("q", "*:*", "rows", 100); } - //SOLR 3161 ensure shards.qt=/update fails (anything but search handler really) + // SOLR 3161 ensure shards.qt=/update fails (anything but search handler really) // Also see TestRemoteStreaming#testQtUpdateFails() - //SolrException e = expectThrows(SolrException.class, () -> { + // SolrException e = expectThrows(SolrException.class, () -> { // ignoreException("isShard is only acceptable"); // query("q","*:*","shards.qt","/update","stream.body","*:*"); - //}); + // }); unIgnoreException("isShard is only acceptable"); // test debugging // handle.put("explain", UNORDERED); - handle.put("explain", SKIPVAL); // internal docids differ, idf differs w/o global idf + handle.put("explain", SKIPVAL); // internal docids differ, idf differs w/o global idf handle.put("debug", UNORDERED); handle.put("time", SKIPVAL); - handle.put("track", SKIP); //track is not included in single node search - query("q","now their fox sat had put","fl","*,score",CommonParams.DEBUG_QUERY, "true"); + handle.put("track", SKIP); // track is not included in single node search + query("q", "now their fox sat had put", "fl", "*,score", CommonParams.DEBUG_QUERY, "true"); query("q", "id_i1:[1 TO 5]", CommonParams.DEBUG_QUERY, "true"); query("q", "id_i1:[1 TO 5]", CommonParams.DEBUG, CommonParams.TIMING); query("q", "id_i1:[1 TO 5]", CommonParams.DEBUG, CommonParams.RESULTS); query("q", "id_i1:[1 TO 5]", CommonParams.DEBUG, CommonParams.QUERY); // SOLR-6545, wild card field list - indexr(id, "19", "text", "d", "cat_a_sS", "1" ,t1, "2"); + indexr(id, "19", "text", "d", "cat_a_sS", "1", t1, "2"); commit(); rsp = query("q", "id:19", "fl", "id", "fl", "*a_sS"); @@ -963,10 +1568,11 @@ public void test() throws Exception { rsp = queryServer(q); NamedList sinfo = (NamedList) rsp.getResponse().get(ShardParams.SHARDS_INFO); String shards = getShardsString(); - int cnt = StringUtils.countMatches(shards, ",")+1; - + int cnt = StringUtils.countMatches(shards, ",") + 1; + assertNotNull("missing shard info", sinfo); - assertEquals("should have an entry for each shard ["+sinfo+"] "+shards, cnt, sinfo.size()); + assertEquals( + "should have an entry for each shard [" + sinfo + "] " + shards, cnt, sinfo.size()); // test shards.tolerant=true @@ -974,8 +1580,8 @@ public void test() throws Exception { List upClients = Collections.synchronizedList(new ArrayList<>(clients)); List downJettys = Collections.synchronizedList(new ArrayList<>()); List upShards = Collections.synchronizedList(new ArrayList<>(Arrays.asList(shardsArr))); - - int cap = Math.max(upJettys.size() - 1, 1); + + int cap = Math.max(upJettys.size() - 1, 1); int numDownServers = random().nextInt(cap); for (int i = 0; i < numDownServers; i++) { @@ -990,176 +1596,325 @@ public void test() throws Exception { downJetty.stop(); downJettys.add(downJetty); } - + Thread.sleep(100); - queryPartialResults(upShards, upClients, - "q", "*:*", - "facet", "true", - "facet.field", t1, - "facet.field", t1, - "facet.limit", 5, - ShardParams.SHARDS_INFO, "true", - ShardParams.SHARDS_TOLERANT, "true"); - - queryPartialResults(upShards, upClients, - "q", "*:*", - "facet", "true", - "facet.query", i1 + ":[1 TO 50]", - "facet.query", i1 + ":[1 TO 50]", - ShardParams.SHARDS_INFO, "true", - ShardParams.SHARDS_TOLERANT, "true"); + queryPartialResults( + upShards, + upClients, + "q", + "*:*", + "facet", + "true", + "facet.field", + t1, + "facet.field", + t1, + "facet.limit", + 5, + ShardParams.SHARDS_INFO, + "true", + ShardParams.SHARDS_TOLERANT, + "true"); + + queryPartialResults( + upShards, + upClients, + "q", + "*:*", + "facet", + "true", + "facet.query", + i1 + ":[1 TO 50]", + "facet.query", + i1 + ":[1 TO 50]", + ShardParams.SHARDS_INFO, + "true", + ShardParams.SHARDS_TOLERANT, + "true"); // test group query - queryPartialResults(upShards, upClients, - "q", "*:*", - "rows", 100, - "fl", "id," + i1, - "group", "true", - "group.query", t1 + ":kings OR " + t1 + ":eggs", - "group.limit", 10, - "sort", i1 + " asc, id asc", - CommonParams.TIME_ALLOWED, 10000, - ShardParams.SHARDS_INFO, "true", - ShardParams.SHARDS_TOLERANT, "true"); - - queryPartialResults(upShards, upClients, - "q", "*:*", - "stats", "true", - "stats.field", i1, - ShardParams.SHARDS_INFO, "true", - ShardParams.SHARDS_TOLERANT, "true"); - - queryPartialResults(upShards, upClients, - "q", "toyata", - "spellcheck", "true", - "spellcheck.q", "toyata", - "qt", "/spellCheckCompRH_Direct", - "shards.qt", "/spellCheckCompRH_Direct", - ShardParams.SHARDS_INFO, "true", - ShardParams.SHARDS_TOLERANT, "true"); + queryPartialResults( + upShards, + upClients, + "q", + "*:*", + "rows", + 100, + "fl", + "id," + i1, + "group", + "true", + "group.query", + t1 + ":kings OR " + t1 + ":eggs", + "group.limit", + 10, + "sort", + i1 + " asc, id asc", + CommonParams.TIME_ALLOWED, + 10000, + ShardParams.SHARDS_INFO, + "true", + ShardParams.SHARDS_TOLERANT, + "true"); + + queryPartialResults( + upShards, + upClients, + "q", + "*:*", + "stats", + "true", + "stats.field", + i1, + ShardParams.SHARDS_INFO, + "true", + ShardParams.SHARDS_TOLERANT, + "true"); + + queryPartialResults( + upShards, + upClients, + "q", + "toyata", + "spellcheck", + "true", + "spellcheck.q", + "toyata", + "qt", + "/spellCheckCompRH_Direct", + "shards.qt", + "/spellCheckCompRH_Direct", + ShardParams.SHARDS_INFO, + "true", + ShardParams.SHARDS_TOLERANT, + "true"); // restart the jettys for (JettySolrRunner downJetty : downJettys) { downJetty.start(); } - // This index has the same number for every field - + // TODO: This test currently fails because debug info is obtained only // on shards with matches. // query("q","matchesnothing","fl","*,score", "debugQuery", "true"); - + // Thread.sleep(10000000000L); del("*:*"); // delete all docs and test stats request commit(); try { - query("q", "*:*", "stats", "true", - "stats.field", "stats_dt", - "stats.field", i1, - "stats.field", tdate_a, - "stats.field", tdate_b, - "stats.calcdistinct", "true"); + query( + "q", + "*:*", + "stats", + "true", + "stats.field", + "stats_dt", + "stats.field", + i1, + "stats.field", + tdate_a, + "stats.field", + tdate_b, + "stats.calcdistinct", + "true"); } catch (BaseHttpSolrClient.RemoteSolrException e) { - if (e.getMessage().startsWith("java.lang.NullPointerException")) { + if (e.getMessage().startsWith("java.lang.NullPointerException")) { fail("NullPointerException with stats request on empty index"); - } else { + } else { throw e; } } - + String fieldName = "severity"; indexr("id", "1", fieldName, "Not Available"); indexr("id", "2", fieldName, "Low"); indexr("id", "3", fieldName, "Medium"); indexr("id", "4", fieldName, "High"); indexr("id", "5", fieldName, "Critical"); - + commit(); - + rsp = query("q", "*:*", "stats", "true", "stats.field", fieldName); - assertEquals(new EnumFieldValue(0, "Not Available"), - rsp.getFieldStatsInfo().get(fieldName).getMin()); - query("q", "*:*", "stats", "true", "stats.field", fieldName, - StatsParams.STATS_CALC_DISTINCT, "true"); - assertEquals(new EnumFieldValue(11, "Critical"), - rsp.getFieldStatsInfo().get(fieldName).getMax()); + assertEquals( + new EnumFieldValue(0, "Not Available"), rsp.getFieldStatsInfo().get(fieldName).getMin()); + query( + "q", + "*:*", + "stats", + "true", + "stats.field", + fieldName, + StatsParams.STATS_CALC_DISTINCT, + "true"); + assertEquals( + new EnumFieldValue(11, "Critical"), rsp.getFieldStatsInfo().get(fieldName).getMax()); handle.put("severity", UNORDERED); // this is stupid, but stats.facet doesn't guarantee order - query("q", "*:*", "stats", "true", "stats.field", fieldName, - "stats.facet", fieldName); + query("q", "*:*", "stats", "true", "stats.field", fieldName, "stats.facet", fieldName); } private void testMinExactCount() throws Exception { - assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "200", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc"); - assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "-1", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc"); - assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc"); - assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", "facet", "true", "facet.field", s1, CommonParams.MIN_EXACT_COUNT,"1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc"); - assertIsExactHitCount("q","{!cache=false}id:1", CommonParams.MIN_EXACT_COUNT,"1", CommonParams.ROWS, "1"); - assertApproximatedHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT,"2", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc"); + assertIsExactHitCount( + "q", + "{!cache=false}dog OR men OR cow OR country OR dumpty", + CommonParams.MIN_EXACT_COUNT, + "200", + CommonParams.ROWS, + "2", + CommonParams.SORT, + "score desc, id asc"); + assertIsExactHitCount( + "q", + "{!cache=false}dog OR men OR cow OR country OR dumpty", + CommonParams.MIN_EXACT_COUNT, + "-1", + CommonParams.ROWS, + "2", + CommonParams.SORT, + "score desc, id asc"); + assertIsExactHitCount( + "q", + "{!cache=false}dog OR men OR cow OR country OR dumpty", + CommonParams.MIN_EXACT_COUNT, + "1", + CommonParams.ROWS, + "200", + CommonParams.SORT, + "score desc, id asc"); + assertIsExactHitCount( + "q", + "{!cache=false}dog OR men OR cow OR country OR dumpty", + "facet", + "true", + "facet.field", + s1, + CommonParams.MIN_EXACT_COUNT, + "1", + CommonParams.ROWS, + "200", + CommonParams.SORT, + "score desc, id asc"); + assertIsExactHitCount( + "q", "{!cache=false}id:1", CommonParams.MIN_EXACT_COUNT, "1", CommonParams.ROWS, "1"); + assertApproximatedHitCount( + "q", + "{!cache=false}dog OR men OR cow OR country OR dumpty", + CommonParams.MIN_EXACT_COUNT, + "2", + CommonParams.ROWS, + "2", + CommonParams.SORT, + "score desc, id asc"); } - + private void assertIsExactHitCount(Object... requestParams) throws Exception { QueryResponse response = query(requestParams); - assertNotNull("Expecting exact hit count in response: " + response.getResults().toString(), + assertNotNull( + "Expecting exact hit count in response: " + response.getResults().toString(), response.getResults().getNumFoundExact()); - assertTrue("Expecting exact hit count in response: " + response.getResults().toString(), + assertTrue( + "Expecting exact hit count in response: " + response.getResults().toString(), response.getResults().getNumFoundExact()); } - - private void assertApproximatedHitCount(Object...requestParams) throws Exception { + + private void assertApproximatedHitCount(Object... requestParams) throws Exception { handle.put("numFound", SKIPVAL); QueryResponse response = query(requestParams); - assertNotNull("Expecting numFoundExact in response: " + response.getResults().toString(), + assertNotNull( + "Expecting numFoundExact in response: " + response.getResults().toString(), response.getResults().getNumFoundExact()); - assertFalse("Expecting aproximated results in response: " + response.getResults().toString(), + assertFalse( + "Expecting aproximated results in response: " + response.getResults().toString(), response.getResults().getNumFoundExact()); handle.remove("numFound", SKIPVAL); } /** comparing results with facet.method=uif */ - private void queryAndCompareUIF(Object ... params) throws Exception { + private void queryAndCompareUIF(Object... params) throws Exception { final QueryResponse expect = query(params); - - final Object[] newParams = Arrays.copyOf(params, params.length+2); - newParams[newParams.length-2] = "facet.method"; - newParams[newParams.length-1] = "uif"; + + final Object[] newParams = Arrays.copyOf(params, params.length + 2); + newParams[newParams.length - 2] = "facet.method"; + newParams[newParams.length - 1] = "uif"; final QueryResponse uifResult = query(newParams); compareResponses(expect, uifResult); } protected void checkMinCountsField(List counts, Object[] pairs) { - assertEquals("There should be exactly " + pairs.length / 2 + " returned counts. There were: " + counts.size(), counts.size(), pairs.length / 2); - assertTrue("Variable len param must be an even number, it was: " + pairs.length, (pairs.length % 2) == 0); - for (int pairs_idx = 0, counts_idx = 0; pairs_idx < pairs.length; pairs_idx += 2, counts_idx++) { + assertEquals( + "There should be exactly " + + pairs.length / 2 + + " returned counts. There were: " + + counts.size(), + counts.size(), + pairs.length / 2); + assertTrue( + "Variable len param must be an even number, it was: " + pairs.length, + (pairs.length % 2) == 0); + for (int pairs_idx = 0, counts_idx = 0; + pairs_idx < pairs.length; + pairs_idx += 2, counts_idx++) { String act_name = counts.get(counts_idx).getName(); long act_count = counts.get(counts_idx).getCount(); String exp_name = (String) pairs[pairs_idx]; long exp_count = (long) pairs[pairs_idx + 1]; - assertEquals("Expected ordered entry " + exp_name + " at position " + counts_idx + " got " + act_name, act_name, exp_name); - assertEquals("Expected count for entry: " + exp_name + " at position " + counts_idx + " got " + act_count, act_count, exp_count); + assertEquals( + "Expected ordered entry " + exp_name + " at position " + counts_idx + " got " + act_name, + act_name, + exp_name); + assertEquals( + "Expected count for entry: " + + exp_name + + " at position " + + counts_idx + + " got " + + act_count, + act_count, + exp_count); } } protected void checkMinCountsRange(List counts, Object[] pairs) { - assertEquals("There should be exactly " + pairs.length / 2 + " returned counts. There were: " + counts.size(), counts.size(), pairs.length / 2); - assertTrue("Variable len param must be an even number, it was: " + pairs.length, (pairs.length % 2) == 0); - for (int pairs_idx = 0, counts_idx = 0; pairs_idx < pairs.length; pairs_idx += 2, counts_idx++) { + assertEquals( + "There should be exactly " + + pairs.length / 2 + + " returned counts. There were: " + + counts.size(), + counts.size(), + pairs.length / 2); + assertTrue( + "Variable len param must be an even number, it was: " + pairs.length, + (pairs.length % 2) == 0); + for (int pairs_idx = 0, counts_idx = 0; + pairs_idx < pairs.length; + pairs_idx += 2, counts_idx++) { String act_name = counts.get(counts_idx).getValue(); long act_count = counts.get(counts_idx).getCount(); String exp_name = (String) pairs[pairs_idx]; long exp_count = (long) pairs[pairs_idx + 1]; - assertEquals("Expected ordered entry " + exp_name + " at position " + counts_idx + " got " + act_name, act_name, exp_name); - assertEquals("Expected count for entry: " + exp_name + " at position " + counts_idx + " got " + act_count, act_count, exp_count); + assertEquals( + "Expected ordered entry " + exp_name + " at position " + counts_idx + " got " + act_name, + act_name, + exp_name); + assertEquals( + "Expected count for entry: " + + exp_name + + " at position " + + counts_idx + + " got " + + act_count, + act_count, + exp_count); } } - protected void queryPartialResults(final List upShards, - final List upClients, - Object... q) throws Exception { - + protected void queryPartialResults( + final List upShards, final List upClients, Object... q) throws Exception { + final ModifiableSolrParams params = new ModifiableSolrParams(); for (int i = 0; i < q.length; i += 2) { @@ -1169,7 +1924,7 @@ protected void queryPartialResults(final List upShards, params.set("distrib", "false"); final QueryResponse controlRsp = controlClient.query(params); // if time.allowed is specified then even a control response can return a partialResults header - if (params.get(CommonParams.TIME_ALLOWED) == null) { + if (params.get(CommonParams.TIME_ALLOWED) == null) { validateControlData(controlRsp); } @@ -1185,42 +1940,44 @@ protected void queryPartialResults(final List upShards, if (stress > 0) { log.info("starting stress..."); - Set> pending = new HashSet<>();; + Set> pending = new HashSet<>(); + ; ExecutorCompletionService cs = new ExecutorCompletionService<>(executor); @SuppressWarnings("unchecked") Callable[] threads = (Callable[]) Array.newInstance(Callable.class, nThreads); for (int i = 0; i < threads.length; i++) { - threads[i] = new Callable<>() { - @Override - public Object call() { - for (int j = 0; j < stress; j++) { - int which = r.nextInt(upClients.size()); - SolrClient client = upClients.get(which); - try { - QueryResponse rsp = client.query(new ModifiableSolrParams(params)); - if (verifyStress) { - comparePartialResponses(rsp, controlRsp, upShards); + threads[i] = + new Callable<>() { + @Override + public Object call() { + for (int j = 0; j < stress; j++) { + int which = r.nextInt(upClients.size()); + SolrClient client = upClients.get(which); + try { + QueryResponse rsp = client.query(new ModifiableSolrParams(params)); + if (verifyStress) { + comparePartialResponses(rsp, controlRsp, upShards); + } + } catch (SolrServerException | IOException e) { + throw new RuntimeException(e); + } } - } catch (SolrServerException | IOException e) { - throw new RuntimeException(e); + return null; } - } - return null; - } - }; + }; pending.add(cs.submit(threads[i])); } - + while (pending.size() > 0) { Future future = cs.take(); pending.remove(future); future.get(); } - } } - protected QueryResponse queryRandomUpServer(ModifiableSolrParams params, List upClients) + protected QueryResponse queryRandomUpServer( + ModifiableSolrParams params, List upClients) throws SolrServerException, IOException { // query a random "up" server SolrClient client; @@ -1235,80 +1992,118 @@ protected QueryResponse queryRandomUpServer(ModifiableSolrParams params, List upShards) - { + protected void comparePartialResponses( + QueryResponse rsp, QueryResponse controlRsp, List upShards) { NamedList sinfo = (NamedList) rsp.getResponse().get(ShardParams.SHARDS_INFO); assertNotNull("missing shard info", sinfo); - assertEquals("should have an entry for each shard ["+sinfo+"] "+shards, shardsArr.length, sinfo.size()); + assertEquals( + "should have an entry for each shard [" + sinfo + "] " + shards, + shardsArr.length, + sinfo.size()); // identify each one - for (Map.Entry entry : sinfo) { + for (Map.Entry entry : sinfo) { String shard = entry.getKey(); NamedList info = (NamedList) entry.getValue(); boolean found = false; - for(int i=0; i { - SolrQuery query = new SolrQuery(); - query.setParam("start", "non_numeric_value").setQuery("*"); - QueryResponse resp = query(query); - }); + SolrException e1 = + expectThrows( + SolrException.class, + () -> { + SolrQuery query = new SolrQuery(); + query.setParam("start", "non_numeric_value").setQuery("*"); + QueryResponse resp = query(query); + }); assertEquals(ErrorCode.BAD_REQUEST.code, e1.code()); - SolrException e2 = expectThrows(SolrException.class, () -> { - SolrQuery query = new SolrQuery(); - query.setStart(-1).setQuery("*"); - QueryResponse resp = query(query); - }); + SolrException e2 = + expectThrows( + SolrException.class, + () -> { + SolrQuery query = new SolrQuery(); + query.setStart(-1).setQuery("*"); + QueryResponse resp = query(query); + }); assertEquals(ErrorCode.BAD_REQUEST.code, e2.code()); - SolrException e3 = expectThrows(SolrException.class, () -> { - SolrQuery query = new SolrQuery(); - query.setRows(-1).setStart(0).setQuery("*"); - QueryResponse resp = query(query); - }); + SolrException e3 = + expectThrows( + SolrException.class, + () -> { + SolrQuery query = new SolrQuery(); + query.setRows(-1).setStart(0).setQuery("*"); + QueryResponse resp = query(query); + }); assertEquals(ErrorCode.BAD_REQUEST.code, e3.code()); - SolrException e4 = expectThrows(SolrException.class, () -> { - SolrQuery query = new SolrQuery(); - query.setParam("rows", "non_numeric_value").setQuery("*"); - QueryResponse resp = query(query); - }); + SolrException e4 = + expectThrows( + SolrException.class, + () -> { + SolrQuery query = new SolrQuery(); + query.setParam("rows", "non_numeric_value").setQuery("*"); + QueryResponse resp = query(query); + }); assertEquals(ErrorCode.BAD_REQUEST.code, e4.code()); resetExceptionIgnores(); diff --git a/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java b/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java index 8da8fe747ae..a0cdf24de2e 100644 --- a/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java +++ b/solr/core/src/test/org/apache/solr/TestDocumentBuilder.java @@ -21,11 +21,9 @@ import java.util.Collection; import java.util.Iterator; import java.util.List; - import org.apache.solr.common.SolrInputDocument; import org.junit.Test; - public class TestDocumentBuilder extends SolrTestCase { @Test @@ -40,22 +38,22 @@ public void testDeepCopy() throws IOException { list.add(33); list.add(20); doc.addField("field5", list); - + SolrInputDocument clone = doc.deepCopy(); - - System.out.println("doc1: "+ doc); - System.out.println("clone: "+ clone); - + + System.out.println("doc1: " + doc); + System.out.println("clone: " + clone); + assertNotSame(doc, clone); - + Collection fieldNames = doc.getFieldNames(); for (String name : fieldNames) { Collection values = doc.getFieldValues(name); Collection cloneValues = clone.getFieldValues(name); - + assertEquals(values.size(), cloneValues.size()); assertNotSame(values, cloneValues); - + Iterator cloneIt = cloneValues.iterator(); for (Object value : values) { Object cloneValue = cloneIt.next(); @@ -63,5 +61,4 @@ public void testDeepCopy() throws IOException { } } } - } diff --git a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java index fd12ec682fc..42ed7de1610 100644 --- a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java +++ b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java @@ -16,6 +16,19 @@ */ package org.apache.solr; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; import org.apache.solr.client.solrj.impl.BinaryResponseParser; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; @@ -36,20 +49,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - public class TestGroupingSearch extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -63,8 +62,9 @@ public class TestGroupingSearch extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + // force LogDocMergePolicy so that we get a predictable doc order // when doing unsorted group collection systemSetPropertySolrTestsMergePolicyFactory(LogDocMergePolicyFactory.class.getName()); @@ -86,103 +86,196 @@ public void cleanIndex() { @Test public void testGroupingGroupSortingScore_basic() { - assertU(add(doc("id", "1", "id_i", "1", "name", "author1", "title", "a book title", "group_i", "1"))); - assertU(add(doc("id", "2", "id_i", "2", "name", "author1", "title", "the title", "group_i", "2"))); - assertU(add(doc("id", "3", "id_i", "3", "name", "author2", "title", "a book title", "group_i", "1"))); + assertU( + add( + doc( + "id", + "1", + "id_i", + "1", + "name", + "author1", + "title", + "a book title", + "group_i", + "1"))); + assertU( + add(doc("id", "2", "id_i", "2", "name", "author1", "title", "the title", "group_i", "2"))); + assertU( + add( + doc( + "id", + "3", + "id_i", + "3", + "name", + "author2", + "title", + "a book title", + "group_i", + "1"))); assertU(add(doc("id", "4", "id_i", "4", "name", "author2", "title", "title", "group_i", "2"))); - assertU(add(doc("id", "5", "id_i", "5", "name", "author3", "title", "the title of a title", "group_i", "1"))); + assertU( + add( + doc( + "id", + "5", + "id_i", + "5", + "name", + "author3", + "title", + "the title of a title", + "group_i", + "1"))); assertU(commit()); // function based query for predictable scores not affect by similarity - assertQ(req("q","{!func}id_i", "group", "true", "group.field","name", "fl", "id, score") - ,"//lst[@name='grouped']/lst[@name='name']" - ,"*[count(//arr[@name='groups']/lst) = 3]" - - ,"//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author3']" - ,"//arr[@name='groups']/lst[1]/result[@numFound='1']" - ,"//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']" - - ,"//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']" - ,"//arr[@name='groups']/lst[2]/result[@numFound='2']" - ,"//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']" - - ,"//arr[@name='groups']/lst[3]/str[@name='groupValue'][.='author1']" - ,"//arr[@name='groups']/lst[3]/result[@numFound='2']" - ,"//arr[@name='groups']/lst[3]/result/doc/*[@name='id'][.='2']" - - ); - - assertQ(req("q", "title:title", "group", "true", "group.field", "group_i") - , "//lst[@name='grouped']/lst[@name='group_i']" - , "*[count(//arr[@name='groups']/lst) = 2]" - - , "//arr[@name='groups']/lst[1]/int[@name='groupValue'][.='2']" - , "//arr[@name='groups']/lst[1]/result[@numFound='2']" - , "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='4']" - - , "//arr[@name='groups']/lst[2]/int[@name='groupValue'][.='1']" - , "//arr[@name='groups']/lst[2]/result[@numFound='3']" - , "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='5']" - ); - - SolrException exception = expectThrows(SolrException.class, () -> { - h.query(req("q", "title:title", "group", "true", "group.field", "group_i", "group.offset", "-1")); - }); + assertQ( + req("q", "{!func}id_i", "group", "true", "group.field", "name", "fl", "id, score"), + "//lst[@name='grouped']/lst[@name='name']", + "*[count(//arr[@name='groups']/lst) = 3]", + "//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author3']", + "//arr[@name='groups']/lst[1]/result[@numFound='1']", + "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']", + "//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']", + "//arr[@name='groups']/lst[2]/result[@numFound='2']", + "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']", + "//arr[@name='groups']/lst[3]/str[@name='groupValue'][.='author1']", + "//arr[@name='groups']/lst[3]/result[@numFound='2']", + "//arr[@name='groups']/lst[3]/result/doc/*[@name='id'][.='2']"); + + assertQ( + req("q", "title:title", "group", "true", "group.field", "group_i"), + "//lst[@name='grouped']/lst[@name='group_i']", + "*[count(//arr[@name='groups']/lst) = 2]", + "//arr[@name='groups']/lst[1]/int[@name='groupValue'][.='2']", + "//arr[@name='groups']/lst[1]/result[@numFound='2']", + "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='4']", + "//arr[@name='groups']/lst[2]/int[@name='groupValue'][.='1']", + "//arr[@name='groups']/lst[2]/result[@numFound='3']", + "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='5']"); + + SolrException exception = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + "q", + "title:title", + "group", + "true", + "group.field", + "group_i", + "group.offset", + "-1")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); assertEquals("'group.offset' parameter cannot be negative", exception.getMessage()); // for group.main=true and group.format=simple, group.offset is not consumed - assertQ(req("q", "title:title", "group", "true", "group.field", "group_i", - "group.offset", "-1", "group.format", "simple")); - assertQ(req("q", "title:title", "group", "true", "group.field", "group_i", - "group.offset", "-1", "group.main", "true")); + assertQ( + req( + "q", + "title:title", + "group", + "true", + "group.field", + "group_i", + "group.offset", + "-1", + "group.format", + "simple")); + assertQ( + req( + "q", + "title:title", + "group", + "true", + "group.field", + "group_i", + "group.offset", + "-1", + "group.main", + "true")); } @Test public void testGroupingGroupSortingScore_withTotalGroupCount() { - assertU(add(doc("id", "1", "id_i", "1", "name", "author1", "title", "a book title", "group_i", "1"))); - assertU(add(doc("id", "2", "id_i", "2", "name", "author1", "title", "the title", "group_i", "2"))); - assertU(add(doc("id", "3", "id_i", "3", "name", "author2", "title", "a book title", "group_i", "1"))); + assertU( + add( + doc( + "id", + "1", + "id_i", + "1", + "name", + "author1", + "title", + "a book title", + "group_i", + "1"))); + assertU( + add(doc("id", "2", "id_i", "2", "name", "author1", "title", "the title", "group_i", "2"))); + assertU( + add( + doc( + "id", + "3", + "id_i", + "3", + "name", + "author2", + "title", + "a book title", + "group_i", + "1"))); assertU(add(doc("id", "4", "id_i", "4", "name", "author2", "title", "title", "group_i", "2"))); - assertU(add(doc("id", "5", "id_i", "5", "name", "author3", "title", "the title of a title", "group_i", "1"))); + assertU( + add( + doc( + "id", + "5", + "id_i", + "5", + "name", + "author3", + "title", + "the title of a title", + "group_i", + "1"))); assertU(commit()); // function based query for predictable scores not affect by similarity - assertQ(req("q","{!func}id_i", "group", "true", "group.field","name", "group.ngroups", "true") - ,"//lst[@name='grouped']/lst[@name='name']" - ,"//lst[@name='grouped']/lst[@name='name']/int[@name='matches'][.='5']" - ,"//lst[@name='grouped']/lst[@name='name']/int[@name='ngroups'][.='3']" - ,"*[count(//arr[@name='groups']/lst) = 3]" - - ,"//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author3']" - ,"//arr[@name='groups']/lst[1]/result[@numFound='1']" - ,"//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']" - - ,"//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']" - ,"//arr[@name='groups']/lst[2]/result[@numFound='2']" - ,"//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']" - - ,"//arr[@name='groups']/lst[3]/str[@name='groupValue'][.='author1']" - ,"//arr[@name='groups']/lst[3]/result[@numFound='2']" - ,"//arr[@name='groups']/lst[3]/result/doc/*[@name='id'][.='2']" - - ); + assertQ( + req("q", "{!func}id_i", "group", "true", "group.field", "name", "group.ngroups", "true"), + "//lst[@name='grouped']/lst[@name='name']", + "//lst[@name='grouped']/lst[@name='name']/int[@name='matches'][.='5']", + "//lst[@name='grouped']/lst[@name='name']/int[@name='ngroups'][.='3']", + "*[count(//arr[@name='groups']/lst) = 3]", + "//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author3']", + "//arr[@name='groups']/lst[1]/result[@numFound='1']", + "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']", + "//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']", + "//arr[@name='groups']/lst[2]/result[@numFound='2']", + "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']", + "//arr[@name='groups']/lst[3]/str[@name='groupValue'][.='author1']", + "//arr[@name='groups']/lst[3]/result[@numFound='2']", + "//arr[@name='groups']/lst[3]/result/doc/*[@name='id'][.='2']"); // function based query for predictable scores not affect by similarity - assertQ(req("q", "{!func}id_i", "group", "true", "group.field", "group_i", "group.ngroups", "true") - , "//lst[@name='grouped']/lst[@name='group_i']/int[@name='matches'][.='5']" - , "//lst[@name='grouped']/lst[@name='group_i']/int[@name='ngroups'][.='2']" - , "*[count(//arr[@name='groups']/lst) = 2]" - - , "//arr[@name='groups']/lst[1]/int[@name='groupValue'][.='1']" - , "//arr[@name='groups']/lst[1]/result[@numFound='3']" - , "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']" - - , "//arr[@name='groups']/lst[2]/int[@name='groupValue'][.='2']" - , "//arr[@name='groups']/lst[2]/result[@numFound='2']" - , "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']" - - ); + assertQ( + req("q", "{!func}id_i", "group", "true", "group.field", "group_i", "group.ngroups", "true"), + "//lst[@name='grouped']/lst[@name='group_i']/int[@name='matches'][.='5']", + "//lst[@name='grouped']/lst[@name='group_i']/int[@name='ngroups'][.='2']", + "*[count(//arr[@name='groups']/lst) = 2]", + "//arr[@name='groups']/lst[1]/int[@name='groupValue'][.='1']", + "//arr[@name='groups']/lst[1]/result[@numFound='3']", + "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']", + "//arr[@name='groups']/lst[2]/int[@name='groupValue'][.='2']", + "//arr[@name='groups']/lst[2]/result[@numFound='2']", + "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']"); } @Test @@ -195,54 +288,69 @@ public void testGroupingGroupSortingScore_basicWithGroupSortEqualToSort() { assertU(commit()); // function based query for predictable scores not affect by similarity - assertQ(req("q", "{!func}id_i", "group", "true", "group.field", "name", - "sort", "score desc", "group.sort", "score desc") - - , "//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author3']" - , "//arr[@name='groups']/lst[1]/result[@numFound='1']" - , "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']" - - , "//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']" - , "//arr[@name='groups']/lst[2]/result[@numFound='2']" - , "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']" - - , "//arr[@name='groups']/lst[3]/str[@name='groupValue'][.='author1']" - , "//arr[@name='groups']/lst[3]/result[@numFound='2']" - , "//arr[@name='groups']/lst[3]/result/doc/*[@name='id'][.='2']" - - ); + assertQ( + req( + "q", + "{!func}id_i", + "group", + "true", + "group.field", + "name", + "sort", + "score desc", + "group.sort", + "score desc"), + "//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author3']", + "//arr[@name='groups']/lst[1]/result[@numFound='1']", + "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='5']", + "//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']", + "//arr[@name='groups']/lst[2]/result[@numFound='2']", + "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']", + "//arr[@name='groups']/lst[3]/str[@name='groupValue'][.='author1']", + "//arr[@name='groups']/lst[3]/result[@numFound='2']", + "//arr[@name='groups']/lst[3]/result/doc/*[@name='id'][.='2']"); } @Test public void testGroupingGroupSortingWeight() { - assertU(add(doc("id", "1","name", "author1", "weight", "12.1"))); - assertU(add(doc("id", "2","name", "author1", "weight", "2.1"))); - assertU(add(doc("id", "3","name", "author2", "weight", "0.1"))); - assertU(add(doc("id", "4","name", "author2", "weight", "0.11"))); + assertU(add(doc("id", "1", "name", "author1", "weight", "12.1"))); + assertU(add(doc("id", "2", "name", "author1", "weight", "2.1"))); + assertU(add(doc("id", "3", "name", "author2", "weight", "0.1"))); + assertU(add(doc("id", "4", "name", "author2", "weight", "0.11"))); assertU(commit()); - assertQ(req("q", "*:*", "group", "true", "group.field", "name", "sort", "id asc", "group.sort", "weight desc") - , "*[count(//arr[@name='groups']/lst) = 2]" - , "//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author1']" + assertQ( + req( + "q", + "*:*", + "group", + "true", + "group.field", + "name", + "sort", + "id asc", + "group.sort", + "weight desc"), + "*[count(//arr[@name='groups']/lst) = 2]", + "//arr[@name='groups']/lst[1]/str[@name='groupValue'][.='author1']" // ,"//arr[@name='groups']/lst[1]/int[@name='matches'][.='2']" - , "//arr[@name='groups']/lst[1]/result[@numFound='2']" - , "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='1']" - - , "//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']" + , + "//arr[@name='groups']/lst[1]/result[@numFound='2']", + "//arr[@name='groups']/lst[1]/result/doc/*[@name='id'][.='1']", + "//arr[@name='groups']/lst[2]/str[@name='groupValue'][.='author2']" // ,"//arr[@name='groups']/lst[2]/int[@name='matches'][.='2']" - , "//arr[@name='groups']/lst[2]/result[@numFound='2']" - , "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']" - ); + , + "//arr[@name='groups']/lst[2]/result[@numFound='2']", + "//arr[@name='groups']/lst[2]/result/doc/*[@name='id'][.='4']"); } @Test public void testGroupingNoQuery() { - assertU(add(doc("id", "1","name", "author1", "weight", "12.1"))); + assertU(add(doc("id", "1", "name", "author1", "weight", "12.1"))); assertU(commit()); - assertQ(req( "group", "true", "group.query", "") - ,"//lst[2]/lst[@name='']/result[@numFound='0']" - ); + assertQ( + req("group", "true", "group.query", ""), "//lst[2]/lst[@name='']/result[@numFound='0']"); } @Test @@ -254,12 +362,22 @@ public void testGroupingSimpleFormatArrayIndexOutOfBoundsException() throws Exce assertJQ( req("q", "*:*", "start", "1", "group", "true", "group.field", "id", "group.main", "true"), - "/response=={'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'2'},{'id':'3'}]}" - ); + "/response=={'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'2'},{'id':'3'}]}"); assertJQ( - req("q", "*:*", "start", "1", "rows", "1", "group", "true", "group.field", "id", "group.main", "true"), - "/response=={'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'2'}]}" - ); + req( + "q", + "*:*", + "start", + "1", + "rows", + "1", + "group", + "true", + "group.field", + "id", + "group.main", + "true"), + "/response=={'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'2'}]}"); } @Test @@ -272,9 +390,20 @@ public void testGroupingSimpleFormatStartBiggerThanRows() throws Exception { assertU(commit()); assertJQ( - req("q", "*:*", "start", "2", "rows", "1", "group", "true", "group.field", "id", "group.main", "true"), - "/response=={'numFound':5,'start':2,'numFoundExact':true,'docs':[{'id':'3'}]}" - ); + req( + "q", + "*:*", + "start", + "2", + "rows", + "1", + "group", + "true", + "group.field", + "id", + "group.main", + "true"), + "/response=={'numFound':5,'start':2,'numFoundExact':true,'docs':[{'id':'3'}]}"); } @Test @@ -288,7 +417,21 @@ public void testGroupingSimpleFormatArrayIndexOutOfBoundsExceptionWithJavaBin() assertU(commit()); SolrQueryRequest request = - req("q", "*:*","group", "true", "group.field", "nullfirst", "group.main", "true", "wt", "javabin", "start", "4", "rows", "10"); + req( + "q", + "*:*", + "group", + "true", + "group.field", + "nullfirst", + "group.main", + "true", + "wt", + "javabin", + "start", + "4", + "rows", + "10"); SolrQueryResponse response = new SolrQueryResponse(); ByteArrayOutputStream out = new ByteArrayOutputStream(); @@ -318,7 +461,18 @@ public void testGroupingWithTimeAllowed() throws Exception { assertU(commit()); // Just checking if no errors occur - assertJQ(req("q", "*:*", "group", "true", "group.query", "id:1", "group.query", "id:2", "timeAllowed", "1")); + assertJQ( + req( + "q", + "*:*", + "group", + "true", + "group.query", + "id:1", + "group.query", + "id:2", + "timeAllowed", + "1")); } @Test @@ -331,88 +485,249 @@ public void testGroupingSortByFunction() throws Exception { assertU(commit()); assertJQ( - req("q", "*:*", "sort", "sum(value1_i, value2_i) desc", "rows", "1", "group", "true", "group.field", "id", "fl", "id"), - "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'5','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}" - ); + req( + "q", + "*:*", + "sort", + "sum(value1_i, value2_i) desc", + "rows", + "1", + "group", + "true", + "group.field", + "id", + "fl", + "id"), + "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'5','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}"); assertJQ( - req("q", "*:*", "sort", "geodist(45.18014,-93.87742) asc", "sfield", "store", "rows", "1", "group", "true", "group.field", "id", "fl", "id"), - "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}" - ); + req( + "q", + "*:*", + "sort", + "geodist(45.18014,-93.87742) asc", + "sfield", + "store", + "rows", + "1", + "group", + "true", + "group.field", + "id", + "fl", + "id"), + "/grouped=={'id':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}"); } @Test public void testGroupingGroupedBasedFaceting() throws Exception { - assertU(add(doc("id", "1", "value1_s1", "1", "value2_i", "1", "value3_s1", "a", "value4_i", "1"))); - assertU(add(doc("id", "2", "value1_s1", "1", "value2_i", "2", "value3_s1", "a", "value4_i", "1"))); + assertU( + add(doc("id", "1", "value1_s1", "1", "value2_i", "1", "value3_s1", "a", "value4_i", "1"))); + assertU( + add(doc("id", "2", "value1_s1", "1", "value2_i", "2", "value3_s1", "a", "value4_i", "1"))); assertU(commit()); - assertU(add(doc("id", "3", "value1_s1", "2", "value2_i", "3", "value3_s1", "b", "value4_i", "2"))); - assertU(add(doc("id", "4", "value1_s1", "1", "value2_i", "4", "value3_s1", "a", "value4_i", "1"))); - assertU(add(doc("id", "5", "value1_s1", "2", "value2_i", "5", "value3_s1", "b", "value4_i", "2"))); + assertU( + add(doc("id", "3", "value1_s1", "2", "value2_i", "3", "value3_s1", "b", "value4_i", "2"))); + assertU( + add(doc("id", "4", "value1_s1", "1", "value2_i", "4", "value3_s1", "a", "value4_i", "1"))); + assertU( + add(doc("id", "5", "value1_s1", "2", "value2_i", "5", "value3_s1", "b", "value4_i", "2"))); assertU(commit()); // Facet counts based on documents - SolrQueryRequest req = req("q", "*:*", "sort", "value2_i asc", "rows", "1", "group", "true", "group.field", - "value1_s1", "fl", "id", "facet", "true", "facet.field", "value3_s1", "group.truncate", "false"); + SolrQueryRequest req = + req( + "q", + "*:*", + "sort", + "value2_i asc", + "rows", + "1", + "group", + "true", + "group.field", + "value1_s1", + "fl", + "id", + "facet", + "true", + "facet.field", + "value3_s1", + "group.truncate", + "false"); assertJQ( req, "/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", - "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]}," + EMPTY_FACETS + "}" - ); + "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]}," + + EMPTY_FACETS + + "}"); // Facet counts based on groups - req = req("q", "*:*", "sort", "value2_i asc", "rows", "1", "group", "true", "group.field", - "value1_s1", "fl", "id", "facet", "true", "facet.field", "value3_s1", "group.truncate", "true"); + req = + req( + "q", + "*:*", + "sort", + "value2_i asc", + "rows", + "1", + "group", + "true", + "group.field", + "value1_s1", + "fl", + "id", + "facet", + "true", + "facet.field", + "value3_s1", + "group.truncate", + "true"); assertJQ( req, "/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", - "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" - ); - - // Facet counts based on groups and with group.func. This should trigger FunctionAllGroupHeadsCollector - req = req("q", "*:*", "sort", "value2_i asc", "rows", "1", "group", "true", "group.func", - "strdist(1,value1_s1,edit)", "fl", "id", "facet", "true", "facet.field", "value3_s1", "group.truncate", "true"); + "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + + EMPTY_FACETS + + "}"); + + // Facet counts based on groups and with group.func. This should trigger + // FunctionAllGroupHeadsCollector + req = + req( + "q", + "*:*", + "sort", + "value2_i asc", + "rows", + "1", + "group", + "true", + "group.func", + "strdist(1,value1_s1,edit)", + "fl", + "id", + "facet", + "true", + "facet.field", + "value3_s1", + "group.truncate", + "true"); assertJQ( req, "/grouped=={'strdist(1,value1_s1,edit)':{'matches':5,'groups':[{'groupValue':1.0,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", - "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" - ); + "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + + EMPTY_FACETS + + "}"); // Facet counts based on groups without sort on an int field. - req = req("q", "*:*", "rows", "1", "group", "true", "group.field", "value4_i", "fl", "id", "facet", "true", - "facet.field", "value3_s1", "group.truncate", "true"); + req = + req( + "q", + "*:*", + "rows", + "1", + "group", + "true", + "group.field", + "value4_i", + "fl", + "id", + "facet", + "true", + "facet.field", + "value3_s1", + "group.truncate", + "true"); assertJQ( req, "/grouped=={'value4_i':{'matches':5,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}]}}", - "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" - ); + "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + + EMPTY_FACETS + + "}"); // Multi select facets AND group.truncate=true - req = req("q", "*:*", "rows", "1", "group", "true", "group.field", "value4_i", "fl", "id", "facet", "true", - "facet.field", "{!ex=v}value3_s1", "group.truncate", "true", "fq", "{!tag=v}value3_s1:b"); + req = + req( + "q", + "*:*", + "rows", + "1", + "group", + "true", + "group.field", + "value4_i", + "fl", + "id", + "facet", + "true", + "facet.field", + "{!ex=v}value3_s1", + "group.truncate", + "true", + "fq", + "{!tag=v}value3_s1:b"); assertJQ( req, "/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}]}}", - "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" - ); + "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + + EMPTY_FACETS + + "}"); // Multi select facets AND group.truncate=false - req = req("q", "*:*", "rows", "1", "group", "true", "group.field", "value4_i", "fl", "id", "facet", "true", - "facet.field", "{!ex=v}value3_s1", "group.truncate", "false", "fq", "{!tag=v}value3_s1:b"); + req = + req( + "q", + "*:*", + "rows", + "1", + "group", + "true", + "group.field", + "value4_i", + "fl", + "id", + "facet", + "true", + "facet.field", + "{!ex=v}value3_s1", + "group.truncate", + "false", + "fq", + "{!tag=v}value3_s1:b"); assertJQ( req, "/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}]}}", - "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]}," + EMPTY_FACETS + "}" - ); + "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]}," + + EMPTY_FACETS + + "}"); // Multi select facets AND group.truncate=true - req = req("q", "*:*", "rows", "1", "group", "true", "group.func", "sub(value4_i,1)", "fl", "id", "facet", "true", - "facet.field", "{!ex=v}value3_s1", "group.truncate", "true", "fq", "{!tag=v}value3_s1:b"); + req = + req( + "q", + "*:*", + "rows", + "1", + "group", + "true", + "group.func", + "sub(value4_i,1)", + "fl", + "id", + "facet", + "true", + "facet.field", + "{!ex=v}value3_s1", + "group.truncate", + "true", + "fq", + "{!tag=v}value3_s1:b"); assertJQ( req, "/grouped=={'sub(value4_i,1)':{'matches':2,'groups':[{'groupValue':1.0,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}]}}", - "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + EMPTY_FACETS + "}" - ); + "/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]}," + + EMPTY_FACETS + + "}"); } @Test @@ -425,36 +740,59 @@ public void testGroupingGroupedBasedFacetingWithTaggedFilter() throws Exception assertU(commit()); // Facet counts based on groups - SolrQueryRequest req = req("q", "*:*", "rows", "1", "group", "true", "group.field", "cat_sI", - "sort", "cat_sI asc", "fl", "id", "fq", "{!tag=chk}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", - "facet", "true", "group.truncate", "true", "group.sort", "bday desc", - "facet.query", "{!ex=chk key=LW1}bday:[2013-01-11T00:00:00Z TO 2013-01-17T23:59:59Z]", - "facet.query", "{!ex=chk key=LM1}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", - "facet.query", "{!ex=chk key=LM3}bday:[2012-10-18T00:00:00Z TO 2013-01-17T23:59:59Z]"); + SolrQueryRequest req = + req( + "q", + "*:*", + "rows", + "1", + "group", + "true", + "group.field", + "cat_sI", + "sort", + "cat_sI asc", + "fl", + "id", + "fq", + "{!tag=chk}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", + "facet", + "true", + "group.truncate", + "true", + "group.sort", + "bday desc", + "facet.query", + "{!ex=chk key=LW1}bday:[2013-01-11T00:00:00Z TO 2013-01-17T23:59:59Z]", + "facet.query", + "{!ex=chk key=LM1}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", + "facet.query", + "{!ex=chk key=LM3}bday:[2012-10-18T00:00:00Z TO 2013-01-17T23:59:59Z]"); assertJQ( req, "/grouped=={'cat_sI':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}", - "/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{}," + EMPTY_FACETS + "}" - ); + "/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{}," + + EMPTY_FACETS + + "}"); } static String f = "foo_i"; static String f2 = "foo2_i"; public static void createIndex() { - assertU(adoc("id","1", f,"5", f2,"4")); - assertU(adoc("id","2", f,"4", f2,"2")); - assertU(adoc("id","3", f,"3", f2,"7")); + assertU(adoc("id", "1", f, "5", f2, "4")); + assertU(adoc("id", "2", f, "4", f2, "2")); + assertU(adoc("id", "3", f, "3", f2, "7")); + assertU(commit()); + assertU(adoc("id", "4", f, "2", f2, "6")); + assertU(adoc("id", "5", f, "1", f2, "2")); + assertU(adoc("id", "6", f, "3", f2, "2")); + assertU(adoc("id", "7", f, "2", f2, "3")); assertU(commit()); - assertU(adoc("id","4", f,"2", f2,"6")); - assertU(adoc("id","5", f,"1", f2,"2")); - assertU(adoc("id","6", f,"3", f2,"2")); - assertU(adoc("id","7", f,"2", f2,"3")); + assertU(adoc("id", "8", f, "1", f2, "10")); + assertU(adoc("id", "9", f, "2", f2, "1")); assertU(commit()); - assertU(adoc("id","8", f,"1", f2,"10")); - assertU(adoc("id","9", f,"2", f2,"1")); - assertU(commit()); - assertU(adoc("id","10", f,"1", f2,"3")); + assertU(adoc("id", "10", f, "1", f2, "3")); assertU(commit()); } @@ -463,239 +801,704 @@ public void testGroupAPI() throws Exception { createIndex(); String filt = f + ":[* TO *]"; - assertQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f) - ,"/response/lst[@name='grouped']/lst[@name='"+f+"']/arr[@name='groups']" - ); - - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id") - ,"/responseHeader/status==0" // exact match - ,"/responseHeader=={'_SKIP_':'QTime', 'status':0}" // partial match by skipping some elements - ,"/responseHeader=={'_MATCH_':'status', 'status':0}" // partial match by only including some elements - ,"/grouped=={'"+f+"':{'matches':10,'groups':[\n" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}," + - "{'groupValue':2,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}," + - "{'groupValue':5,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}," + - "{'groupValue':4,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'2'}]}}" + - "]}}" - ); + assertQ( + req("fq", filt, "q", "{!func}" + f2, "group", "true", "group.field", f), + "/response/lst[@name='grouped']/lst[@name='" + f + "']/arr[@name='groups']"); + + assertJQ( + req("fq", filt, "q", "{!func}" + f2, "group", "true", "group.field", f, "fl", "id"), + "/responseHeader/status==0" // exact match + , + "/responseHeader=={'_SKIP_':'QTime', 'status':0}" // partial match by skipping some elements + , + "/responseHeader=={'_MATCH_':'status', 'status':0}" // partial match by only including some + // elements + , + "/grouped=={'" + + f + + "':{'matches':10,'groups':[\n" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}," + + "{'groupValue':2,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}," + + "{'groupValue':5,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'1'}]}}," + + "{'groupValue':4,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'2'}]}}" + + "]}}"); // test that filtering cuts down the result set - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "fq",f+":2") - ,"/grouped=={'"+f+"':{'matches':3,'groups':[" + - "{'groupValue':2,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}" + - "]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "fq", + f + ":2"), + "/grouped=={'" + + f + + "':{'matches':3,'groups':[" + + "{'groupValue':2,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}" + + "]}}"); // test limiting the number of groups returned - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2") - ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + - "]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "2"), + "/grouped=={'" + + f + + "':{'matches':10,'groups':[" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + + "]}}"); // test offset into group list - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","1", "start","1") - ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + - "]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "1", + "start", + "1"), + "/grouped=={'" + + f + + "':{'matches':10,'groups':[" + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + + "]}}"); // test big offset into group list - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","1", "start","100") - ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "1", + "start", + "100"), + "/grouped=={'" + f + "':{'matches':10,'groups':[" + "]}}"); // test increasing the docs per group returned - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "group.limit","3") - ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'},{'id':'5'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'6'}]}}" + - "]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "2", + "group.limit", + "3"), + "/grouped=={'" + + f + + "':{'matches':10,'groups':[" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'},{'id':'5'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'6'}]}}" + + "]}}"); // test offset into each group - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "group.limit","3", "group.offset","1") - ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'5'}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':1,'numFoundExact':true,'docs':[{'id':'6'}]}}" + - "]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "2", + "group.limit", + "3", + "group.offset", + "1"), + "/grouped=={'" + + f + + "':{'matches':10,'groups':[" + + "{'groupValue':1,'doclist':{'numFound':3,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'5'}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':1,'numFoundExact':true,'docs':[{'id':'6'}]}}" + + "]}}"); // test big offset into each group - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "group.limit","3", "group.offset","10") - ,"/grouped=={'"+f+"':{'matches':10,'groups':[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':10,'numFoundExact':true,'docs':[]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':10,'numFoundExact':true,'docs':[]}}" + - "]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "2", + "group.limit", + "3", + "group.offset", + "10"), + "/grouped=={'" + + f + + "':{'matches':10,'groups':[" + + "{'groupValue':1,'doclist':{'numFound':3,'start':10,'numFoundExact':true,'docs':[]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':10,'numFoundExact':true,'docs':[]}}" + + "]}}"); // test adding in scores - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id,score", "rows","2", "group.limit","2", "indent","off") - ,"/grouped/"+f+"/groups==" + - "[" + - "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," + - "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'maxScore':7.0,'docs':[{'id':'3','score':7.0},{'id':'6','score':2.0}]}}" + - "]" - - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id,score", + "rows", + "2", + "group.limit", + "2", + "indent", + "off"), + "/grouped/" + + f + + "/groups==" + + "[" + + "{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," + + "{'groupValue':3,'doclist':{'numFound':2,'start':0,numFoundExact:true,'maxScore':7.0,'docs':[{'id':'3','score':7.0},{'id':'6','score':2.0}]}}" + + "]"); // test function (functions are currently all float - this may change) - String func = "add("+f+","+f+")"; - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.func", func , "fl","id", "rows","2") - ,"/grouped=={'"+func+"':{'matches':10,'groups':[" + - "{'groupValue':2.0,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + - "{'groupValue':6.0,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + - "]}}" - ); + String func = "add(" + f + "," + f + ")"; + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.func", + func, + "fl", + "id", + "rows", + "2"), + "/grouped=={'" + + func + + "':{'matches':10,'groups':[" + + "{'groupValue':2.0,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'}]}}," + + "{'groupValue':6.0,'doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'3'}]}}" + + "]}}"); // test that faceting works with grouping - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id" - ,"facet","true", "facet.field",f) - ,"/grouped/"+f+"/matches==10" - ,"/facet_counts/facet_fields/"+f+"==['1',3, '2',3, '3',2, '4',1, '5',1]" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "facet", + "true", + "facet.field", + f), + "/grouped/" + f + "/matches==10", + "/facet_counts/facet_fields/" + f + "==['1',3, '2',3, '3',2, '4',1, '5',1]"); // test that grouping works with debugging - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id" - ,"debugQuery","true") - ,"/grouped/"+f+"/matches==10" - ,"/debug/explain/8==" - ,"/debug/explain/2==" - ); - - ///////////////////////// group.query - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "group.limit","3") - ,"/grouped=={'id:[2 TO 5]':{'matches':10," + - "'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "debugQuery", + "true"), + "/grouped/" + f + "/matches==10", + "/debug/explain/8==", + "/debug/explain/2=="); + + ///////////////////////// group.query + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + "id:[2 TO 5]", + "fl", + "id", + "group.limit", + "3"), + "/grouped=={'id:[2 TO 5]':{'matches':10," + + "'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}}"); // group.query that matches nothing - assertJQ(req("fq",filt, - "q","{!func}"+f2, - "group","true", - "group.query","id:[2 TO 5]", - "group.query","id:1000", - "fl","id", - "group.limit","3") - ,"/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}" - ,"/grouped/id:1000=={'matches':10,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" - ); + assertJQ( + req( + "fq", filt, + "q", "{!func}" + f2, + "group", "true", + "group.query", "id:[2 TO 5]", + "group.query", "id:1000", + "fl", "id", + "group.limit", "3"), + "/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}}", + "/grouped/id:1000=={'matches':10,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}"); // group.query and sort - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query",f+":1", "fl","id,score", "rows","2", "group.limit","2", "sort",f+" desc, score desc", "indent","off") - ,"/grouped/"+f+":1==" + - "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + f + ":1", + "fl", + "id,score", + "rows", + "2", + "group.limit", + "2", + "sort", + f + " desc, score desc", + "indent", + "off"), + "/grouped/" + + f + + ":1==" + + "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}},"); // group.query with fl=score and default sort - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query",f+":1", "fl","id,score", "rows","2", "group.limit","2", "sort", "score desc", "indent","off") - ,"/grouped/"+f+":1==" + - "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}}," - ); - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query",f+":1", "fl","id", "rows","2", "group.limit","2", "indent","off") - ,"/grouped/"+f+":1==" + - "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'}]}}," - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + f + ":1", + "fl", + "id,score", + "rows", + "2", + "group.limit", + "2", + "sort", + "score desc", + "indent", + "off"), + "/grouped/" + + f + + ":1==" + + "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'maxScore':10.0,'docs':[{'id':'8','score':10.0},{'id':'10','score':3.0}]}},"); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + f + ":1", + "fl", + "id", + "rows", + "2", + "group.limit", + "2", + "indent", + "off"), + "/grouped/" + + f + + ":1==" + + "{'matches':10,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'}]}},"); // group.query and offset - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "group.limit","3", "group.offset","2") - ,"/grouped=={'id:[2 TO 5]':{'matches':10," + - "'doclist':{'numFound':4,'start':2,'numFoundExact':true,'docs':[{'id':'2'},{'id':'5'}]}}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + "id:[2 TO 5]", + "fl", + "id", + "group.limit", + "3", + "group.offset", + "2"), + "/grouped=={'id:[2 TO 5]':{'matches':10," + + "'doclist':{'numFound':4,'start':2,'numFoundExact':true,'docs':[{'id':'2'},{'id':'5'}]}}}"); // group.query and big offset - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "group.limit","3", "group.offset","10") - ,"/grouped=={'id:[2 TO 5]':{'matches':10," + - "'doclist':{'numFound':4,'start':10,'numFoundExact':true,'docs':[]}}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + "id:[2 TO 5]", + "fl", + "id", + "group.limit", + "3", + "group.offset", + "10"), + "/grouped=={'id:[2 TO 5]':{'matches':10," + + "'doclist':{'numFound':4,'start':10,'numFoundExact':true,'docs':[]}}}"); ///////////////////////// group.query as main result - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "rows","3", "group.main","true") - ,"/response=={'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + "id:[2 TO 5]", + "fl", + "id", + "rows", + "3", + "group.main", + "true"), + "/response=={'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'},{'id':'2'}]}"); // group.query and offset - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "rows","3", "start","2", "group.main","true") - ,"/response=={'numFound':4,'start':2,'numFoundExact':true,'docs':[{'id':'2'},{'id':'5'}]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + "id:[2 TO 5]", + "fl", + "id", + "rows", + "3", + "start", + "2", + "group.main", + "true"), + "/response=={'numFound':4,'start':2,'numFoundExact':true,'docs':[{'id':'2'},{'id':'5'}]}"); // group.query and big offset - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.query","id:[2 TO 5]", "fl","id", "rows","3", "start","10", "group.main","true") - ,"/response=={'numFound':4,'start':10,'numFoundExact':true,'docs':[]}" - ); - + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + "id:[2 TO 5]", + "fl", + "id", + "rows", + "3", + "start", + "10", + "group.main", + "true"), + "/response=={'numFound':4,'start':10,'numFoundExact':true,'docs':[]}"); // multiple at once - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", - "group.query","id:[2 TO 5]", - "group.query","id:[5 TO 5]", - "group.field",f, - "rows","1", - "fl","id", "group.limit","2") - ,"/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'}]}}" - ,"/grouped/id:[5 TO 5]=={'matches':10,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}" - ,"/grouped/"+f+"=={'matches':10,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'}]}}]}" - ); - + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.query", + "id:[2 TO 5]", + "group.query", + "id:[5 TO 5]", + "group.field", + f, + "rows", + "1", + "fl", + "id", + "group.limit", + "2"), + "/grouped/id:[2 TO 5]=={'matches':10,'doclist':{'numFound':4,'start':0,numFoundExact:true,'docs':[{'id':'3'},{'id':'4'}]}}", + "/grouped/id:[5 TO 5]=={'matches':10,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}", + "/grouped/" + + f + + "=={'matches':10,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'}]}}]}"); ///////////////////////// group.field as main result - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "group.main","true") - ,"/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'3'},{'id':'4'},{'id':'1'},{'id':'2'}]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "group.main", + "true"), + "/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'3'},{'id':'4'},{'id':'1'},{'id':'2'}]}"); // test that rows limits #docs - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","3", "group.main","true") - ,"/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'3'},{'id':'4'}]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "3", + "group.main", + "true"), + "/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'3'},{'id':'4'}]}"); // small offset - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "start","1", "group.main","true") - ,"/response=={'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "2", + "start", + "1", + "group.main", + "true"), + "/response=={'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}"); // large offset - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","2", "start","20", "group.main","true") - ,"/response=={'numFound':10,'start':20,'numFoundExact':true,'docs':[]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "2", + "start", + "20", + "group.main", + "true"), + "/response=={'numFound':10,'start':20,'numFoundExact':true,'docs':[]}"); // group.limit>1 - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","3", "group.limit","2", "group.main","true") - ,"/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'},{'id':'3'}]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "3", + "group.limit", + "2", + "group.main", + "true"), + "/response=={'numFound':10,'start':0,numFoundExact:true,'docs':[{'id':'8'},{'id':'10'},{'id':'3'}]}"); // group.limit>1 with start>0 - assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id", "rows","3", "start","1", "group.limit","2", "group.main","true") - ,"/response=={'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "3", + "start", + "1", + "group.limit", + "2", + "group.main", + "true"), + "/response=={'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}"); ///////////////////////// group.format == simple - assertJQ(req("fq", filt, "q", "{!func}" + f2, "group", "true", "group.field", f, "fl", "id", "rows", "3", "start", "1", "group.limit", "2", "group.format", "simple") - , "/grouped/foo_i=={'matches':10,'doclist':" - + "{'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}}" - ); + assertJQ( + req( + "fq", + filt, + "q", + "{!func}" + f2, + "group", + "true", + "group.field", + f, + "fl", + "id", + "rows", + "3", + "start", + "1", + "group.limit", + "2", + "group.format", + "simple"), + "/grouped/foo_i=={'matches':10,'doclist':" + + "{'numFound':10,'start':1,'numFoundExact':true,'docs':[{'id':'10'},{'id':'3'},{'id':'6'}]}}"); //////////////////////// grouping where main query matches nothing - assertJQ(req("fq", filt, "q", "bogus_s:nothing", "group", "true", "group.field", f, "fl", "id", "group.limit", "2", "group.format", "simple") - , "/grouped/foo_i=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" - ); - assertJQ(req("fq",filt, "q","bogus_s:nothing", "group","true", - "group.query","id:[2 TO 5]", - "group.query","id:[5 TO 5]", - "group.field",f, - "rows","1", - "fl","id", "group.limit","2") - ,"/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" - ,"/grouped/id:[5 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" - ,"/grouped/"+f+"=={'matches':0,'groups':[]}" - ); - assertJQ(req("fq",filt, - "q","bogus_s:nothing", - "group","true", - "group.query","id:[2 TO 5]", - "group.query","id:1000", - "fl","id", - "group.limit","3") - ,"/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" - ,"/grouped/id:1000=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}" - ); - + assertJQ( + req( + "fq", + filt, + "q", + "bogus_s:nothing", + "group", + "true", + "group.field", + f, + "fl", + "id", + "group.limit", + "2", + "group.format", + "simple"), + "/grouped/foo_i=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}"); + assertJQ( + req( + "fq", + filt, + "q", + "bogus_s:nothing", + "group", + "true", + "group.query", + "id:[2 TO 5]", + "group.query", + "id:[5 TO 5]", + "group.field", + f, + "rows", + "1", + "fl", + "id", + "group.limit", + "2"), + "/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}", + "/grouped/id:[5 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}", + "/grouped/" + f + "=={'matches':0,'groups':[]}"); + assertJQ( + req( + "fq", filt, + "q", "bogus_s:nothing", + "group", "true", + "group.query", "id:[2 TO 5]", + "group.query", "id:1000", + "fl", "id", + "group.limit", "3"), + "/grouped/id:[2 TO 5]=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}", + "/grouped/id:1000=={'matches':0,'doclist':{'numFound':0,'start':0,numFoundExact:true,'docs':[]}}"); } @Test @@ -709,137 +1512,174 @@ public void testGroupingNonIndexedOrStoredDocValues() throws Exception { assertU(commit()); // Facet counts based on groups - SolrQueryRequest req = req("q", "*:*", "rows", "1", "group", "true", "group.field", FOO_STRING_DOCVAL_FIELD, - "sort", FOO_STRING_DOCVAL_FIELD + " asc", "fl", "id", - "fq", "{!tag=chk}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", - "facet", "true", "group.truncate", "true", "group.sort", "bday desc", - "facet.query", "{!ex=chk key=LW1}bday:[2013-01-11T00:00:00Z TO 2013-01-17T23:59:59Z]", - "facet.query", "{!ex=chk key=LM1}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", - "facet.query", "{!ex=chk key=LM3}bday:[2012-10-18T00:00:00Z TO 2013-01-17T23:59:59Z]"); + SolrQueryRequest req = + req( + "q", + "*:*", + "rows", + "1", + "group", + "true", + "group.field", + FOO_STRING_DOCVAL_FIELD, + "sort", + FOO_STRING_DOCVAL_FIELD + " asc", + "fl", + "id", + "fq", + "{!tag=chk}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", + "facet", + "true", + "group.truncate", + "true", + "group.sort", + "bday desc", + "facet.query", + "{!ex=chk key=LW1}bday:[2013-01-11T00:00:00Z TO 2013-01-17T23:59:59Z]", + "facet.query", + "{!ex=chk key=LM1}bday:[2012-12-18T00:00:00Z TO 2013-01-17T23:59:59Z]", + "facet.query", + "{!ex=chk key=LM3}bday:[2012-10-18T00:00:00Z TO 2013-01-17T23:59:59Z]"); assertJQ( req, - "/grouped=={'"+FOO_STRING_DOCVAL_FIELD+"':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}", - "/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{}," + EMPTY_FACETS + "}" - ); + "/grouped=={'" + + FOO_STRING_DOCVAL_FIELD + + "':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}]}}", + "/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{}," + + EMPTY_FACETS + + "}"); } @Test public void testGroupingOnDateField() throws Exception { - assertU(add(doc("id", "1", "date_dt", "2012-11-20T00:00:00Z"))); - assertU(add(doc("id", "2", "date_dt", "2012-11-21T00:00:00Z"))); + assertU(add(doc("id", "1", "date_dt", "2012-11-20T00:00:00Z"))); + assertU(add(doc("id", "2", "date_dt", "2012-11-21T00:00:00Z"))); assertU(commit()); - assertU(add(doc("id", "3", "date_dt", "2012-11-20T00:00:00Z"))); - assertU(add(doc("id", "4", "date_dt", "2013-01-15T00:00:00Z"))); + assertU(add(doc("id", "3", "date_dt", "2012-11-20T00:00:00Z"))); + assertU(add(doc("id", "4", "date_dt", "2013-01-15T00:00:00Z"))); assertU(add(doc("id", "5"))); assertU(commit()); - ModifiableSolrParams params = params("q", "*:*", "group.limit", "10", - "group", "true", "fl", "id", "group.ngroups", "true"); - - assertJQ(req(params, "group.field", "date_dt", "sort", "id asc"), - "/grouped=={'date_dt':{'matches':5,'ngroups':4, 'groups':" + - "[{'groupValue':'2012-11-20T00:00:00Z','doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'1'},{'id':'3'}]}}," + - "{'groupValue':'2012-11-21T00:00:00Z','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'2'}]}}," + - "{'groupValue':'2013-01-15T00:00:00Z','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}," + - "{'groupValue':null,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}" + - "]}}" - ); + ModifiableSolrParams params = + params( + "q", "*:*", "group.limit", "10", "group", "true", "fl", "id", "group.ngroups", "true"); + + assertJQ( + req(params, "group.field", "date_dt", "sort", "id asc"), + "/grouped=={'date_dt':{'matches':5,'ngroups':4, 'groups':" + + "[{'groupValue':'2012-11-20T00:00:00Z','doclist':{'numFound':2,'start':0,numFoundExact:true,'docs':[{'id':'1'},{'id':'3'}]}}," + + "{'groupValue':'2012-11-21T00:00:00Z','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'2'}]}}," + + "{'groupValue':'2013-01-15T00:00:00Z','doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'4'}]}}," + + "{'groupValue':null,'doclist':{'numFound':1,'start':0,numFoundExact:true,'docs':[{'id':'5'}]}}" + + "]}}"); } @Test public void testRandomGrouping() throws Exception { /** - updateJ("{\"add\":{\"doc\":{\"id\":\"77\"}}}", params("commit","true")); - assertJQ(req("q","id:77"), "/response/numFound==1"); - - Doc doc = createDocObj(types); - updateJ(toJSON(doc), params("commit","true")); - - assertJQ(req("q","id:"+doc.id), "/response/numFound==1"); - **/ - - int indexIter=atLeast(10); // make >0 to enable test - int queryIter=atLeast(50); + * updateJ("{\"add\":{\"doc\":{\"id\":\"77\"}}}", params("commit","true")); + * assertJQ(req("q","id:77"), "/response/numFound==1"); + * + *

Doc doc = createDocObj(types); updateJ(toJSON(doc), params("commit","true")); + * + *

assertJQ(req("q","id:"+doc.id), "/response/numFound==1"); + */ + int indexIter = atLeast(10); // make >0 to enable test + int queryIter = atLeast(50); while (--indexIter >= 0) { int indexSize = random().nextInt(25 * RANDOM_MULTIPLIER); -//indexSize=2; + // indexSize=2; List types = new ArrayList<>(); - types.add(new FldType("id",ONE_ONE, new SVal('A','Z',4,4))); - types.add(new FldType("score_f",ONE_ONE, new FVal(1,100))); // field used to score - types.add(new FldType("foo_i",ZERO_ONE, new IRange(0,indexSize))); - types.add(new FldType(FOO_STRING_FIELD,ONE_ONE, new SVal('a','z',1,2))); - types.add(new FldType(SMALL_STRING_FIELD,ZERO_ONE, new SVal('a',(char)('c'+indexSize/10),1,1))); - types.add(new FldType(SMALL_INT_FIELD,ZERO_ONE, new IRange(0,5+indexSize/10))); + types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); + types.add(new FldType("score_f", ONE_ONE, new FVal(1, 100))); // field used to score + types.add(new FldType("foo_i", ZERO_ONE, new IRange(0, indexSize))); + types.add(new FldType(FOO_STRING_FIELD, ONE_ONE, new SVal('a', 'z', 1, 2))); + types.add( + new FldType( + SMALL_STRING_FIELD, ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 10), 1, 1))); + types.add(new FldType(SMALL_INT_FIELD, ZERO_ONE, new IRange(0, 5 + indexSize / 10))); // non-stored non-indexed docValue enabled fields - types.add(new FldType("score_ff",ONE_ONE, new FVal(1,100))); - types.add(new FldType("foo_ii",ZERO_ONE, new IRange(0,indexSize))); - types.add(new FldType(FOO_STRING_DOCVAL_FIELD,ONE_ONE, new SVal('a','z',3,7))); + types.add(new FldType("score_ff", ONE_ONE, new FVal(1, 100))); + types.add(new FldType("foo_ii", ZERO_ONE, new IRange(0, indexSize))); + types.add(new FldType(FOO_STRING_DOCVAL_FIELD, ONE_ONE, new SVal('a', 'z', 3, 7))); types.add(new FldType("foo_bdv", ZERO_ONE, new BVal())); clearIndex(); @SuppressWarnings({"rawtypes"}) Map model = indexDocs(types, null, indexSize); - //System.out.println("############### model=" + model); + // System.out.println("############### model=" + model); // test with specific docs if (false) { clearIndex(); model.clear(); Doc d1 = createDoc(types); - d1.getValues(SMALL_STRING_FIELD).set(0,"c"); - d1.getValues(SMALL_INT_FIELD).set(0,5); + d1.getValues(SMALL_STRING_FIELD).set(0, "c"); + d1.getValues(SMALL_INT_FIELD).set(0, 5); d1.order = 0; - updateJ(toJSON(d1), params("commit","true")); + updateJ(toJSON(d1), params("commit", "true")); model.put(d1.id, d1); d1 = createDoc(types); - d1.getValues(SMALL_STRING_FIELD).set(0,"b"); - d1.getValues(SMALL_INT_FIELD).set(0,5); + d1.getValues(SMALL_STRING_FIELD).set(0, "b"); + d1.getValues(SMALL_INT_FIELD).set(0, 5); d1.order = 1; - updateJ(toJSON(d1), params("commit","false")); + updateJ(toJSON(d1), params("commit", "false")); model.put(d1.id, d1); d1 = createDoc(types); - d1.getValues(SMALL_STRING_FIELD).set(0,"c"); - d1.getValues(SMALL_INT_FIELD).set(0,5); + d1.getValues(SMALL_STRING_FIELD).set(0, "c"); + d1.getValues(SMALL_INT_FIELD).set(0, 5); d1.order = 2; - updateJ(toJSON(d1), params("commit","false")); + updateJ(toJSON(d1), params("commit", "false")); model.put(d1.id, d1); d1 = createDoc(types); - d1.getValues(SMALL_STRING_FIELD).set(0,"c"); - d1.getValues(SMALL_INT_FIELD).set(0,5); + d1.getValues(SMALL_STRING_FIELD).set(0, "c"); + d1.getValues(SMALL_INT_FIELD).set(0, 5); d1.order = 3; - updateJ(toJSON(d1), params("commit","false")); + updateJ(toJSON(d1), params("commit", "false")); model.put(d1.id, d1); d1 = createDoc(types); - d1.getValues(SMALL_STRING_FIELD).set(0,"b"); - d1.getValues(SMALL_INT_FIELD).set(0,2); + d1.getValues(SMALL_STRING_FIELD).set(0, "b"); + d1.getValues(SMALL_INT_FIELD).set(0, 2); d1.order = 4; - updateJ(toJSON(d1), params("commit","true")); + updateJ(toJSON(d1), params("commit", "true")); model.put(d1.id, d1); } - - for (int qiter=0; qiter sortComparator = createSort(schema, types, stringSortA); String sortStr = stringSortA[0]; - Comparator groupComparator = random().nextBoolean() ? sortComparator : createSort(schema, types, stringSortA); + Comparator groupComparator = + random().nextBoolean() ? sortComparator : createSort(schema, types, stringSortA); String groupSortStr = stringSortA[0]; // since groupSortStr defaults to sortStr, we need to normalize null to "score desc" if @@ -847,15 +1687,22 @@ public void testRandomGrouping() throws Exception { if (groupSortStr == null && groupSortStr != sortStr) { groupSortStr = "score desc"; } - - // Test specific case + + // Test specific case if (false) { - groupField=SMALL_INT_FIELD; - sortComparator=createComparator(Arrays.asList(createComparator(SMALL_STRING_FIELD, true, true, false, true))); + groupField = SMALL_INT_FIELD; + sortComparator = + createComparator( + Arrays.asList(createComparator(SMALL_STRING_FIELD, true, true, false, true))); sortStr = SMALL_STRING_FIELD + " asc"; - groupComparator = createComparator(Arrays.asList(createComparator(SMALL_STRING_FIELD, true, true, false, false))); + groupComparator = + createComparator( + Arrays.asList(createComparator(SMALL_STRING_FIELD, true, true, false, false))); groupSortStr = SMALL_STRING_FIELD + " asc"; - rows=1; start=0; group_offset=1; group_limit=1; + rows = 1; + start = 0; + group_offset = 1; + group_limit = 1; } @SuppressWarnings({"rawtypes"}) @@ -870,14 +1717,20 @@ public void testRandomGrouping() throws Exception { // if sort != group.sort, we need to find the max doc by "sort" if (groupComparator != sortComparator) { - for (Grp grp : groups.values()) grp.setMaxDoc(sortComparator); + for (Grp grp : groups.values()) grp.setMaxDoc(sortComparator); } List sortedGroups = new ArrayList<>(groups.values()); - Collections.sort(sortedGroups, groupComparator==sortComparator ? createFirstDocComparator(sortComparator) : createMaxDocComparator(sortComparator)); + Collections.sort( + sortedGroups, + groupComparator == sortComparator + ? createFirstDocComparator(sortComparator) + : createMaxDocComparator(sortComparator)); boolean includeNGroups = random().nextBoolean(); - Object modelResponse = buildGroupedResult(schema, sortedGroups, start, rows, group_offset, group_limit, includeNGroups); + Object modelResponse = + buildGroupedResult( + schema, sortedGroups, start, rows, group_offset, group_limit, includeNGroups); boolean truncateGroups = random().nextBoolean(); Map facetCounts = new TreeMap<>(); @@ -899,7 +1752,8 @@ public void testRandomGrouping() throws Exception { continue; } - for (@SuppressWarnings({"rawtypes"})Comparable field : doc.getValues(FOO_STRING_FIELD)) { + for (@SuppressWarnings({"rawtypes"}) + Comparable field : doc.getValues(FOO_STRING_FIELD)) { String key = field.toString(); boolean exists = facetCounts.containsKey(key); int count = exists ? facetCounts.get(key) : 0; @@ -916,22 +1770,69 @@ public void testRandomGrouping() throws Exception { int randomPercentage = random().nextInt(101); // TODO: create a random filter too - SolrQueryRequest req = req("group","true","wt","json","indent","true", "echoParams","all", "q","{!func}score_f", "group.field",groupField - ,sortStr==null ? "nosort":"sort", sortStr ==null ? "": sortStr, "fl", "*,score_ff,foo_ii,foo_bdv," + FOO_STRING_DOCVAL_FIELD // only docValued fields are not returned by default - ,(groupSortStr == null || groupSortStr == sortStr) ? "noGroupsort":"group.sort", groupSortStr==null ? "": groupSortStr - ,"rows",""+rows, "start",""+start, "group.offset",""+group_offset, "group.limit",""+group_limit, - GroupParams.GROUP_CACHE_PERCENTAGE, Integer.toString(randomPercentage), GroupParams.GROUP_TOTAL_COUNT, includeNGroups ? "true" : "false", - "facet", "true", "facet.sort", "index", "facet.limit", "-1", "facet.field", FOO_STRING_FIELD, - GroupParams.GROUP_TRUNCATE, truncateGroups ? "true" : "false", "facet.mincount", "1", "facet.method", "fcs" // to avoid FC insanity - ); + SolrQueryRequest req = + req( + "group", + "true", + "wt", + "json", + "indent", + "true", + "echoParams", + "all", + "q", + "{!func}score_f", + "group.field", + groupField, + sortStr == null ? "nosort" : "sort", + sortStr == null ? "" : sortStr, + "fl", + "*,score_ff,foo_ii,foo_bdv," + + FOO_STRING_DOCVAL_FIELD // only docValued fields are not returned by default + , + (groupSortStr == null || groupSortStr == sortStr) ? "noGroupsort" : "group.sort", + groupSortStr == null ? "" : groupSortStr, + "rows", + "" + rows, + "start", + "" + start, + "group.offset", + "" + group_offset, + "group.limit", + "" + group_limit, + GroupParams.GROUP_CACHE_PERCENTAGE, + Integer.toString(randomPercentage), + GroupParams.GROUP_TOTAL_COUNT, + includeNGroups ? "true" : "false", + "facet", + "true", + "facet.sort", + "index", + "facet.limit", + "-1", + "facet.field", + FOO_STRING_FIELD, + GroupParams.GROUP_TRUNCATE, + truncateGroups ? "true" : "false", + "facet.mincount", + "1", + "facet.method", + "fcs" // to avoid FC insanity + ); String strResponse = h.query(req); Object realResponse = Utils.fromJSONString(strResponse); String err = JSONTestUtil.matchObj("/grouped/" + groupField, realResponse, modelResponse); if (err != null) { - log.error("GROUPING MISMATCH ({}}): {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tsorted_model={}" - , queryIter, err, req, strResponse, Utils.toJSONString(modelResponse), sortedGroups); + log.error( + "GROUPING MISMATCH ({}}): {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tsorted_model={}", + queryIter, + err, + req, + strResponse, + Utils.toJSONString(modelResponse), + sortedGroups); // re-execute the request... good for putting a breakpoint here for debugging String rsp = h.query(req); @@ -940,46 +1841,63 @@ public void testRandomGrouping() throws Exception { } // assert post / pre grouping facets - err = JSONTestUtil.matchObj("/facet_counts/facet_fields/"+FOO_STRING_FIELD, realResponse, expectedFacetResponse); + err = + JSONTestUtil.matchObj( + "/facet_counts/facet_fields/" + FOO_STRING_FIELD, + realResponse, + expectedFacetResponse); if (err != null) { - log.error("GROUPING MISMATCH ({}): {}\n\trequest={}\n\tresult={}\n\texpected={}" - , queryIter, err, req, strResponse, Utils.toJSONString(expectedFacetResponse)); + log.error( + "GROUPING MISMATCH ({}): {}\n\trequest={}\n\tresult={}\n\texpected={}", + queryIter, + err, + req, + strResponse, + Utils.toJSONString(expectedFacetResponse)); // re-execute the request... good for putting a breakpoint here for debugging h.query(req); fail(err); } } // end query iter } // end index iter - } @Test public void testGroupWithMinExactHitCount() throws Exception { final int NUM_DOCS = 20; - for (int i = 0; i < NUM_DOCS ; i++) { + for (int i = 0; i < NUM_DOCS; i++) { assertU(adoc("id", String.valueOf(i), FOO_STRING_FIELD, "Book1")); assertU(commit()); } ModifiableSolrParams params = new ModifiableSolrParams(); params.set("q", FOO_STRING_FIELD + ":Book1"); - assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2") - ,"/response/result[@numFoundExact='false']" - ); + assertQ( + req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2"), + "/response/result[@numFoundExact='false']"); params.set("group", true); params.set("group.field", FOO_STRING_FIELD); - assertQ(req(params) - ,"/response/lst[@name='grouped']/lst[@name='"+FOO_STRING_FIELD+"']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']" - ); - - assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2") - ,"/response/lst[@name='grouped']/lst[@name='"+FOO_STRING_FIELD+"']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']" - ); - - + assertQ( + req(params), + "/response/lst[@name='grouped']/lst[@name='" + + FOO_STRING_FIELD + + "']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']"); + + assertQ( + req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2"), + "/response/lst[@name='grouped']/lst[@name='" + + FOO_STRING_FIELD + + "']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']"); } - public static Object buildGroupedResult(IndexSchema schema, List sortedGroups, int start, int rows, int group_offset, int group_limit, boolean includeNGroups) { - Map result = new LinkedHashMap<>(); + public static Object buildGroupedResult( + IndexSchema schema, + List sortedGroups, + int start, + int rows, + int group_offset, + int group_limit, + boolean includeNGroups) { + Map result = new LinkedHashMap<>(); long matches = 0; for (Grp grp : sortedGroups) { @@ -989,34 +1907,36 @@ public static Object buildGroupedResult(IndexSchema schema, List sortedGrou if (includeNGroups) { result.put("ngroups", sortedGroups.size()); } - List> groupList = new ArrayList<>(); + List> groupList = new ArrayList<>(); result.put("groups", groupList); - for (int i=start; i= rows) break; // directly test rather than calculating, so we can catch any calc errors in the real code - Map group = new LinkedHashMap<>(); + for (int i = start; i < sortedGroups.size(); i++) { + if (rows != -1 && groupList.size() >= rows) { + // directly test rather than calculating, so we can catch any calc errors in the real code + break; + } + Map group = new LinkedHashMap<>(); groupList.add(group); Grp grp = sortedGroups.get(i); group.put("groupValue", grp.groupValue); - Map resultSet = new LinkedHashMap<>(); + Map resultSet = new LinkedHashMap<>(); group.put("doclist", resultSet); resultSet.put("numFound", grp.docs.size()); resultSet.put("start", group_offset); resultSet.put("numFoundExact", true); - List> docs = new ArrayList<>(); + List> docs = new ArrayList<>(); resultSet.put("docs", docs); - for (int j=group_offset; j= group_limit) break; - docs.add( grp.docs.get(j).toObject(schema) ); + docs.add(grp.docs.get(j).toObject(schema)); } } return result; } - public static Comparator createMaxDocComparator(final Comparator docComparator) { return (o1, o2) -> { // all groups should have at least one doc @@ -1066,13 +1986,12 @@ public static Map groupBy(Collection docs, String field) { return groups; } - public static class Grp { @SuppressWarnings({"rawtypes"}) public Comparable groupValue; - public List docs; - public Doc maxDoc; // the document highest according to the "sort" param + public List docs; + public Doc maxDoc; // the document highest according to the "sort" param public void setMaxDoc(Comparator comparator) { Doc[] arr = docs.toArray(new Doc[docs.size()]); @@ -1082,9 +2001,7 @@ public void setMaxDoc(Comparator comparator) { @Override public String toString() { - return "{groupValue="+groupValue+",docs="+docs+"}"; + return "{groupValue=" + groupValue + ",docs=" + docs + "}"; } } } - - diff --git a/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java b/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java index 1b707a5efd2..4dc4f2e0f18 100644 --- a/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java +++ b/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java @@ -17,7 +17,6 @@ package org.apache.solr; import java.io.IOException; - import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrServerException; @@ -27,8 +26,8 @@ import org.junit.Test; /** - * Tests that highlighting doesn't break on grouped documents - * with duplicate unique key fields stored on multiple shards. + * Tests that highlighting doesn't break on grouped documents with duplicate unique key fields + * stored on multiple shards. */ public class TestHighlightDedupGrouping extends BaseDistributedSearchTestCase { @@ -41,7 +40,7 @@ public static void afterClass() throws Exception { TimeLimitingCollector.getGlobalTimerThread().stopTimer(); TimeLimitingCollector.getGlobalTimerThread().join(); } - + @Test @ShardsFixed(num = 2) public void test() throws Exception { @@ -55,24 +54,33 @@ private void basicTest() throws Exception { handle.clear(); handle.put("timestamp", SKIPVAL); - handle.put("grouped", UNORDERED); // distrib grouping doesn't guarantee order of top level group commands + // distrib grouping doesn't guarantee order of top level group commands + handle.put("grouped", UNORDERED); int docid = 1; int group = 5; - for (int shard = 0 ; shard < getShardCount(); ++shard) { + for (int shard = 0; shard < getShardCount(); ++shard) { addDoc(docid, group, shard); // add the same doc to both shards clients.get(shard).commit(); } - QueryResponse rsp = queryServer(params - ("q", id_s1 + ":" + docid, - "shards", shards, - "group", "true", - "group.field", id_s1, - "group.limit", Integer.toString(getShardCount()), - "hl", "true", - "hl.fl", id_s1 - )); + QueryResponse rsp = + queryServer( + params( + "q", + id_s1 + ":" + docid, + "shards", + shards, + "group", + "true", + "group.field", + id_s1, + "group.limit", + Integer.toString(getShardCount()), + "hl", + "true", + "hl.fl", + id_s1)); // The number of highlit documents should be the same as the de-duplicated docs assertEquals(1, rsp.getHighlighting().values().size()); @@ -84,18 +92,19 @@ private void randomizedTest() throws Exception { handle.clear(); handle.put("timestamp", SKIPVAL); - handle.put("grouped", UNORDERED); // distrib grouping doesn't guarantee order of top level group commands + // distrib grouping doesn't guarantee order of top level group commands + handle.put("grouped", UNORDERED); int numDocs = TestUtil.nextInt(random(), 100, 1000); int numGroups = TestUtil.nextInt(random(), 1, numDocs / 50); int[] docsInGroup = new int[numGroups + 1]; int percentDuplicates = TestUtil.nextInt(random(), 1, 25); - for (int docid = 0 ; docid < numDocs ; ++docid) { + for (int docid = 0; docid < numDocs; ++docid) { int group = TestUtil.nextInt(random(), 1, numGroups); ++docsInGroup[group]; boolean makeDuplicate = 0 == TestUtil.nextInt(random(), 0, numDocs / percentDuplicates); if (makeDuplicate) { - for (int shard = 0 ; shard < getShardCount(); ++shard) { + for (int shard = 0; shard < getShardCount(); ++shard) { addDoc(docid, group, shard); } } else { @@ -103,18 +112,38 @@ private void randomizedTest() throws Exception { addDoc(docid, group, shard); } } - for (int shard = 0 ; shard < getShardCount(); ++shard) { + for (int shard = 0; shard < getShardCount(); ++shard) { clients.get(shard).commit(); } - for (int group = 1 ; group <= numGroups ; ++group) { - QueryResponse rsp = queryServer(params - ("q", group_ti1 + ":" + group + " AND " + id_s1 + ":[* TO *]", "start", "0", "rows", "" + numDocs, - "fl", id_s1 + "," + shard_i1, "sort", id_s1 + " asc", "shards", shards, - "group", "true", "group.field", id_s1 - ,"group.limit", "" + numDocs - ,"hl", "true", "hl.fl", "*", "hl.requireFieldMatch", "true" - )); + for (int group = 1; group <= numGroups; ++group) { + QueryResponse rsp = + queryServer( + params( + "q", + group_ti1 + ":" + group + " AND " + id_s1 + ":[* TO *]", + "start", + "0", + "rows", + "" + numDocs, + "fl", + id_s1 + "," + shard_i1, + "sort", + id_s1 + " asc", + "shards", + shards, + "group", + "true", + "group.field", + id_s1, + "group.limit", + "" + numDocs, + "hl", + "true", + "hl.fl", + "*", + "hl.requireFieldMatch", + "true")); // The number of highlit documents should be the same as the de-duplicated docs for this group assertEquals(docsInGroup[group], rsp.getHighlighting().values().size()); } @@ -123,7 +152,7 @@ private void randomizedTest() throws Exception { private void addDoc(int docid, int group, int shard) throws IOException, SolrServerException { SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, docid); - doc.addField(id_s1, docid); // string copy of the id for highlighting + doc.addField(id_s1, docid); // string copy of the id for highlighting doc.addField(group_ti1, group); doc.addField(shard_i1, shard); clients.get(shard).add(doc); diff --git a/solr/core/src/test/org/apache/solr/TestJoin.java b/solr/core/src/test/org/apache/solr/TestJoin.java index 5a9b8c46e97..f966a69bb09 100644 --- a/solr/core/src/test/org/apache/solr/TestJoin.java +++ b/solr/core/src/test/org/apache/solr/TestJoin.java @@ -26,7 +26,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.solr.common.SolrException; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.Utils; @@ -45,12 +44,12 @@ public static void beforeTests() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ System.setProperty("solr.filterCache.async", "true"); - if (System.getProperty("solr.tests.IntegerFieldType").contains("Point")) { // all points change at the same time - // point fields need docvalues + // all points change at the same time point fields need docvalues + if (System.getProperty("solr.tests.IntegerFieldType").contains("Point")) { System.setProperty("solr.tests.numeric.dv", "true"); } - initCore("solrconfig.xml","schema12.xml"); + initCore("solrconfig.xml", "schema12.xml"); } private static final String PRIMARY_DEPT_FIELD = "primary_dept_indexed_sdv"; @@ -58,16 +57,79 @@ public static void beforeTests() throws Exception { private static final String DEPT_ID_FIELD = "dept_id_indexed_sdv"; private void indexEmployeeDocs() { - assertU(add(doc("id", "1","name", "john", "title", "Director", PRIMARY_DEPT_FIELD, "Engineering", DEPT_FIELD,"Engineering"))); - assertU(add(doc("id", "2","name", "mark", "title", "VP", PRIMARY_DEPT_FIELD, "Marketing", DEPT_FIELD,"Marketing"))); - assertU(add(doc("id", "3","name", "nancy", "title", "MTS", PRIMARY_DEPT_FIELD, "Sales", DEPT_FIELD,"Sales"))); - assertU(add(doc("id", "4","name", "dave", "title", "MTS", PRIMARY_DEPT_FIELD, "Support", DEPT_FIELD,"Support", DEPT_FIELD,"Engineering"))); - assertU(add(doc("id", "5","name", "tina", "title", "VP", PRIMARY_DEPT_FIELD, "Engineering", DEPT_FIELD,"Engineering"))); - - assertU(add(doc("id","10", DEPT_ID_FIELD, "Engineering", "text","These guys develop stuff"))); - assertU(add(doc("id","11", DEPT_ID_FIELD, "Marketing", "text","These guys make you look good"))); - assertU(add(doc("id","12", DEPT_ID_FIELD, "Sales", "text","These guys sell stuff"))); - assertU(add(doc("id","13", DEPT_ID_FIELD, "Support", "text","These guys help customers"))); + assertU( + add( + doc( + "id", + "1", + "name", + "john", + "title", + "Director", + PRIMARY_DEPT_FIELD, + "Engineering", + DEPT_FIELD, + "Engineering"))); + assertU( + add( + doc( + "id", + "2", + "name", + "mark", + "title", + "VP", + PRIMARY_DEPT_FIELD, + "Marketing", + DEPT_FIELD, + "Marketing"))); + assertU( + add( + doc( + "id", + "3", + "name", + "nancy", + "title", + "MTS", + PRIMARY_DEPT_FIELD, + "Sales", + DEPT_FIELD, + "Sales"))); + assertU( + add( + doc( + "id", + "4", + "name", + "dave", + "title", + "MTS", + PRIMARY_DEPT_FIELD, + "Support", + DEPT_FIELD, + "Support", + DEPT_FIELD, + "Engineering"))); + assertU( + add( + doc( + "id", + "5", + "name", + "tina", + "title", + "VP", + PRIMARY_DEPT_FIELD, + "Engineering", + DEPT_FIELD, + "Engineering"))); + + assertU(add(doc("id", "10", DEPT_ID_FIELD, "Engineering", "text", "These guys develop stuff"))); + assertU( + add(doc("id", "11", DEPT_ID_FIELD, "Marketing", "text", "These guys make you look good"))); + assertU(add(doc("id", "12", DEPT_ID_FIELD, "Sales", "text", "These guys sell stuff"))); + assertU(add(doc("id", "13", DEPT_ID_FIELD, "Support", "text", "These guys help customers"))); assertU(commit()); } @@ -78,65 +140,109 @@ private void indexEmployeeDocs() { @Test public void testJoinAllMethods() throws Exception { indexEmployeeDocs(); - ModifiableSolrParams p = params("sort","id asc"); + ModifiableSolrParams p = params("sort", "id asc"); - assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "title:MTS"), "fl","id") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" - ); + assertJQ( + req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "title:MTS"), "fl", "id"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); // empty from - assertJQ(req(p, "q", buildJoinRequest("noexist_ss_dv", DEPT_ID_FIELD, "*:*", "fl","id")) - ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" - ); + assertJQ( + req(p, "q", buildJoinRequest("noexist_ss_dv", DEPT_ID_FIELD, "*:*", "fl", "id")), + "/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}"); // empty to - assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, "noexist_ss_dv", "*:*"), "fl","id") - ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" - ); + assertJQ( + req(p, "q", buildJoinRequest(DEPT_FIELD, "noexist_ss_dv", "*:*"), "fl", "id"), + "/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}"); // self join... return everyone in same dept(s) as Dave - assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_FIELD, "name:dave"), "fl","id") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" - ); + assertJQ( + req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_FIELD, "name:dave"), "fl", "id"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}"); // from single-value to multi-value - assertJQ(req(p, "q", buildJoinRequest(DEPT_ID_FIELD, DEPT_FIELD, "text:develop"), "fl","id") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" - ); + assertJQ( + req(p, "q", buildJoinRequest(DEPT_ID_FIELD, DEPT_FIELD, "text:develop"), "fl", "id"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}"); // from multi-value to single-value - assertJQ(req(p, "q",buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "title:MTS"), "fl","id", "debugQuery","true") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" - ); + assertJQ( + req( + p, + "q", + buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "title:MTS"), + "fl", + "id", + "debugQuery", + "true"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); // expected outcome for a sub query matching dave joined against departments final String davesDepartments = "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'13'}]}"; // straight forward query - assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "name:dave"), "fl","id"), + assertJQ( + req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "name:dave"), "fl", "id"), davesDepartments); // variable deref in 'from' query - assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "$qq"), "qq","{!dismax}dave", "qf","name", - "fl","id", "debugQuery","true"), + assertJQ( + req( + p, + "q", + buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "$qq"), + "qq", + "{!dismax}dave", + "qf", + "name", + "fl", + "id", + "debugQuery", + "true"), davesDepartments); // variable deref in 'from' query (w/ localparams) - assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "$qq"), "qq","{!dismax qf=name}dave", - "fl","id", "debugQuery","true"), + assertJQ( + req( + p, + "q", + buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "$qq"), + "qq", + "{!dismax qf=name}dave", + "fl", + "id", + "debugQuery", + "true"), davesDepartments); // defType local param to control sub-query parsing - assertJQ(req(p, "q", buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "dave", "defType=dismax"), "qf","name", - "fl","id", "debugQuery","true"), + assertJQ( + req( + p, + "q", + buildJoinRequest(DEPT_FIELD, DEPT_ID_FIELD, "dave", "defType=dismax"), + "qf", + "name", + "fl", + "id", + "debugQuery", + "true"), davesDepartments); // find people that develop stuff - but limit via filter query to a name of "john" // this tests filters being pushed down to queries (SOLR-3062) - assertJQ(req(p, "q", buildJoinRequest(DEPT_ID_FIELD, DEPT_FIELD, "text:develop"), "fl","id", "fq", "name:john") - ,"/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}" - ); + assertJQ( + req( + p, + "q", + buildJoinRequest(DEPT_ID_FIELD, DEPT_FIELD, "text:develop"), + "fl", + "id", + "fq", + "name:john"), + "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}"); } /* @@ -145,68 +251,92 @@ public void testJoinAllMethods() throws Exception { @Test public void testTopLevelDVJoin() throws Exception { indexEmployeeDocs(); - ModifiableSolrParams p = params("sort","id asc"); + ModifiableSolrParams p = params("sort", "id asc"); // "from" field missing docValues - expectThrows(SolrException.class, () -> { - h.query(req(p, "q", "{!join from=nodocvalues_s to=dept_ss_dv method=topLevelDV}*:*", "fl","id")); - }); + expectThrows( + SolrException.class, + () -> { + h.query( + req( + p, + "q", + "{!join from=nodocvalues_s to=dept_ss_dv method=topLevelDV}*:*", + "fl", + "id")); + }); // "to" field missing docValues - expectThrows(SolrException.class, () -> { - h.query(req(p, "q", "{!join from=dept_ss_dv to=nodocvalues_s method=topLevelDV}*:*", "fl","id")); - }); + expectThrows( + SolrException.class, + () -> { + h.query( + req( + p, + "q", + "{!join from=dept_ss_dv to=nodocvalues_s method=topLevelDV}*:*", + "fl", + "id")); + }); } - @Test public void testIndexJoin() throws Exception { indexEmployeeDocs(); - ModifiableSolrParams p = params("sort","id asc"); + ModifiableSolrParams p = params("sort", "id asc"); // Debugging information - assertJQ(req(p, "q", "{!join from=dept_ss_dv to=dept_id_indexed_sdv}title:MTS", "fl","id", "debugQuery","true") - ,"/debug/join/{!join from=dept_ss_dv to=dept_id_indexed_sdv}title:MTS=={'_MATCH_':'fromSetSize,toSetSize', 'fromSetSize':2, 'toSetSize':3}" - ); + assertJQ( + req( + p, + "q", + "{!join from=dept_ss_dv to=dept_id_indexed_sdv}title:MTS", + "fl", + "id", + "debugQuery", + "true"), + "/debug/join/{!join from=dept_ss_dv to=dept_id_indexed_sdv}title:MTS=={'_MATCH_':'fromSetSize,toSetSize', 'fromSetSize':2, 'toSetSize':3}"); // non-DV/text field. - assertJQ(req(p, "q","{!join from=title to=title}name:dave", "fl","id") - ,"/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" - ); + assertJQ( + req(p, "q", "{!join from=title to=title}name:dave", "fl", "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}"); } - @Test @SuppressWarnings({"unchecked"}) public void testRandomJoin() throws Exception { - int indexIter=50 * RANDOM_MULTIPLIER; - int queryIter=50 * RANDOM_MULTIPLIER; + int indexIter = 50 * RANDOM_MULTIPLIER; + int queryIter = 50 * RANDOM_MULTIPLIER; // groups of fields that have any chance of matching... used to // increase test effectiveness by avoiding 0 resultsets much of the time. - String[][] compat = new String[][] { - {"small_s","small2_s","small2_ss","small3_ss"}, - {"small_i","small2_i","small2_is","small3_is", "small_i_dv", "small_is_dv"} - }; - + String[][] compat = + new String[][] { + {"small_s", "small2_s", "small2_ss", "small3_ss"}, + {"small_i", "small2_i", "small2_is", "small3_is", "small_i_dv", "small_is_dv"} + }; while (--indexIter >= 0) { int indexSize = random().nextInt(20 * RANDOM_MULTIPLIER); List types = new ArrayList<>(); - types.add(new FldType("id",ONE_ONE, new SVal('A','Z',4,4))); - types.add(new FldType("score_f",ONE_ONE, new FVal(1,100))); // field used to score - types.add(new FldType("small_s",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_s",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_ss",ZERO_TWO, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small3_ss",new IRange(0,25), new SVal('A','z',1,1))); - types.add(new FldType("small_i",ZERO_ONE, new IRange(0,5+indexSize/3))); - types.add(new FldType("small2_i",ZERO_ONE, new IRange(0,5+indexSize/3))); - types.add(new FldType("small2_is",ZERO_TWO, new IRange(0,5+indexSize/3))); - types.add(new FldType("small3_is",new IRange(0,25), new IRange(0,100))); - types.add(new FldType("small_i_dv",ZERO_ONE, new IRange(0,5+indexSize/3))); - types.add(new FldType("small_is_dv",ZERO_ONE, new IRange(0,5+indexSize/3))); + types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); + types.add(new FldType("score_f", ONE_ONE, new FVal(1, 100))); // field used to score + types.add( + new FldType("small_s", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add( + new FldType("small2_s", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add( + new FldType("small2_ss", ZERO_TWO, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add(new FldType("small3_ss", new IRange(0, 25), new SVal('A', 'z', 1, 1))); + types.add(new FldType("small_i", ZERO_ONE, new IRange(0, 5 + indexSize / 3))); + types.add(new FldType("small2_i", ZERO_ONE, new IRange(0, 5 + indexSize / 3))); + types.add(new FldType("small2_is", ZERO_TWO, new IRange(0, 5 + indexSize / 3))); + types.add(new FldType("small3_is", new IRange(0, 25), new IRange(0, 100))); + types.add(new FldType("small_i_dv", ZERO_ONE, new IRange(0, 5 + indexSize / 3))); + types.add(new FldType("small_is_dv", ZERO_ONE, new IRange(0, 5 + indexSize / 3))); clearIndex(); @SuppressWarnings({"rawtypes"}) @@ -214,7 +344,7 @@ public void testRandomJoin() throws Exception { @SuppressWarnings({"rawtypes"}) Map>> pivots = new HashMap<>(); - for (int qiter=0; qiter> pivot = pivots.get(fromField+"/"+toField); + Map> pivot = pivots.get(fromField + "/" + toField); if (pivot == null) { pivot = createJoinMap(model, fromField, toField); - pivots.put(fromField+"/"+toField, pivot); + pivots.put(fromField + "/" + toField, pivot); } Collection fromDocs = model.values(); @SuppressWarnings({"rawtypes"}) Set docs = join(fromDocs, pivot); List docList = new ArrayList<>(docs.size()); - for (@SuppressWarnings({"rawtypes"})Comparable id : docs) docList.add(model.get(id)); - Collections.sort(docList, createComparator("_docid_",true,false,false,false)); + for (@SuppressWarnings({"rawtypes"}) Comparable id : docs) docList.add(model.get(id)); + Collections.sort(docList, createComparator("_docid_", true, false, false, false)); List sortedDocs = new ArrayList<>(); for (Doc doc : docList) { if (sortedDocs.size() >= 10) break; sortedDocs.add(doc.toObject(h.getCore().getLatestSchema())); } - Map resultSet = new LinkedHashMap<>(); + Map resultSet = new LinkedHashMap<>(); resultSet.put("numFound", docList.size()); resultSet.put("start", 0); resultSet.put("numFoundExact", true); @@ -259,34 +389,47 @@ public void testRandomJoin() throws Exception { // todo: use different join queries for better coverage - SolrQueryRequest req = req("wt","json","indent","true", "echoParams","all", - "q","{!join from="+fromField+" to="+toField - + (random().nextInt(4)==0 ? " fromIndex=collection1" : "") - +"}*:*" - ); + SolrQueryRequest req = + req( + "wt", + "json", + "indent", + "true", + "echoParams", + "all", + "q", + "{!join from=" + + fromField + + " to=" + + toField + + (random().nextInt(4) == 0 ? " fromIndex=collection1" : "") + + "}*:*"); String strResponse = h.query(req); Object realResponse = Utils.fromJSONString(strResponse); String err = JSONTestUtil.matchObj("/response", realResponse, resultSet); if (err != null) { - log.error("JOIN MISMATCH: {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tmodel={}" - , err, req, strResponse, Utils.toJSONString(resultSet), model - ); + log.error( + "JOIN MISMATCH: {}\n\trequest={}\n\tresult={}\n\texpected={}\n\tmodel={}", + err, + req, + strResponse, + Utils.toJSONString(resultSet), + model); // re-execute the request... good for putting a breakpoint here for debugging String rsp = h.query(req); fail(err); } - } } } - @SuppressWarnings({"rawtypes"}) - Map> createJoinMap(Map model, String fromField, String toField) { + Map> createJoinMap( + Map model, String fromField, String toField) { Map> id_to_id = new HashMap<>(); Map> value_to_id = invertField(model, toField); @@ -303,15 +446,13 @@ Map> createJoinMap(Map model, Strin ids = new HashSet<>(); id_to_id.put(fromId, ids); } - for (Comparable toId : toIds) - ids.add(toId); + for (Comparable toId : toIds) ids.add(toId); } } return id_to_id; } - @SuppressWarnings({"rawtypes"}) Set join(Collection input, Map> joinMap) { @SuppressWarnings({"rawtypes"}) @@ -325,9 +466,13 @@ Set join(Collection input, Map> joi return ids; } - private static String buildJoinRequest(String fromField, String toField, String fromQuery, String... otherLocalParams) { + private static String buildJoinRequest( + String fromField, String toField, String fromQuery, String... otherLocalParams) { final String baseJoinParams = "from=" + fromField + " to=" + toField + " v=" + fromQuery; - final String optionalParamsJoined = (otherLocalParams != null && otherLocalParams.length > 0) ? String.join(" ", otherLocalParams) : " "; + final String optionalParamsJoined = + (otherLocalParams != null && otherLocalParams.length > 0) + ? String.join(" ", otherLocalParams) + : " "; final String allProvidedParams = baseJoinParams + " " + optionalParamsJoined; final int joinMethod = random().nextInt(4); @@ -338,7 +483,7 @@ private static String buildJoinRequest(String fromField, String toField, String return "{!join " + allProvidedParams + " method=index}"; case 2: // method=score return "{!join " + allProvidedParams + " method=dvWithScore score=none}"; - default: // method=toplevel + default: // method=toplevel return "{!join " + allProvidedParams + " method=topLevelDV}"; } } diff --git a/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java b/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java index 401b809e15d..e52b1e43de0 100644 --- a/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java +++ b/solr/core/src/test/org/apache/solr/TestRandomDVFaceting.java @@ -22,26 +22,26 @@ import java.util.List; import java.util.Map; import java.util.Random; - -import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.LuceneTestCase.Slow; +import org.apache.lucene.util.TestUtil; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.schema.IntPointField; import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.TrieIntField; -import org.apache.solr.schema.IntPointField; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * This is like TestRandomFaceting, except it does a copyField on each - * indexed field to field_dv, and compares the docvalues facet results - * to the indexed facet results as if it were just another faceting method. + * This is like TestRandomFaceting, except it does a copyField on each indexed field to field_dv, + * and compares the docvalues facet results to the indexed facet results as if it were just another + * faceting method. */ @Slow -@SolrTestCaseJ4.SuppressPointFields(bugUrl="Test explicitly compares Trie to Points, randomization defeats the point") +@SolrTestCaseJ4.SuppressPointFields( + bugUrl = "Test explicitly compares Trie to Points, randomization defeats the point") @SolrTestCaseJ4.SuppressSSL public class TestRandomDVFaceting extends SolrTestCaseJ4 { @@ -52,32 +52,42 @@ public static void beforeTests() throws Exception { // This tests explicitly compares Trie DV with non-DV Trie with DV Points // so we don't want randomized DocValues on all Trie fields System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "false"); - - initCore("solrconfig-basic.xml","schema-docValuesFaceting.xml"); - - assertEquals("DocValues: Schema assumptions are broken", - false, h.getCore().getLatestSchema().getField("foo_i").hasDocValues()); - assertEquals("DocValues: Schema assumptions are broken", - true, h.getCore().getLatestSchema().getField("foo_i_dv").hasDocValues()); - assertEquals("DocValues: Schema assumptions are broken", - true, h.getCore().getLatestSchema().getField("foo_i_p").hasDocValues()); - - assertEquals("Type: Schema assumptions are broken", - TrieIntField.class, - h.getCore().getLatestSchema().getField("foo_i").getType().getClass()); - assertEquals("Type: Schema assumptions are broken", - TrieIntField.class, - h.getCore().getLatestSchema().getField("foo_i_dv").getType().getClass()); - assertEquals("Type: Schema assumptions are broken", - IntPointField.class, - h.getCore().getLatestSchema().getField("foo_i_p").getType().getClass()); - + + initCore("solrconfig-basic.xml", "schema-docValuesFaceting.xml"); + + assertEquals( + "DocValues: Schema assumptions are broken", + false, + h.getCore().getLatestSchema().getField("foo_i").hasDocValues()); + assertEquals( + "DocValues: Schema assumptions are broken", + true, + h.getCore().getLatestSchema().getField("foo_i_dv").hasDocValues()); + assertEquals( + "DocValues: Schema assumptions are broken", + true, + h.getCore().getLatestSchema().getField("foo_i_p").hasDocValues()); + + assertEquals( + "Type: Schema assumptions are broken", + TrieIntField.class, + h.getCore().getLatestSchema().getField("foo_i").getType().getClass()); + assertEquals( + "Type: Schema assumptions are broken", + TrieIntField.class, + h.getCore().getLatestSchema().getField("foo_i_dv").getType().getClass()); + assertEquals( + "Type: Schema assumptions are broken", + IntPointField.class, + h.getCore().getLatestSchema().getField("foo_i_p").getType().getClass()); } int indexSize; List types; + @SuppressWarnings({"rawtypes"}) Map model = null; + boolean validateResponses = true; void init() { @@ -87,31 +97,34 @@ void init() { indexSize = rand.nextBoolean() ? (rand.nextInt(10) + 1) : (rand.nextInt(100) + 10); types = new ArrayList<>(); - types.add(new FldType("id",ONE_ONE, new SVal('A','Z',4,4))); - types.add(new FldType("score_f",ONE_ONE, new FVal(1,100))); - types.add(new FldType("score_d",ONE_ONE, new FVal(1,100))); - types.add(new FldType("foo_i",ZERO_ONE, new IRange(0,indexSize))); - types.add(new FldType("foo_l",ZERO_ONE, new IRange(0,indexSize))); - types.add(new FldType("small_s",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_s",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_ss",ZERO_TWO, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small3_ss",new IRange(0,25), new SVal('A','z',1,1))); - types.add(new FldType("small4_ss",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); // to test specialization when a multi-valued field is actually single-valued - types.add(new FldType("small_i",ZERO_ONE, new IRange(0,5+indexSize/3))); - types.add(new FldType("small2_i",ZERO_ONE, new IRange(0,5+indexSize/3))); - types.add(new FldType("small2_is",ZERO_TWO, new IRange(0,5+indexSize/3))); - types.add(new FldType("small3_is",new IRange(0,25), new IRange(0,100))); - - types.add(new FldType("foo_fs", new IRange(0,25), new FVal(0,indexSize))); - types.add(new FldType("foo_f", ZERO_ONE, new FVal(0,indexSize))); - types.add(new FldType("foo_ds", new IRange(0,25), new FVal(0,indexSize))); - types.add(new FldType("foo_d", ZERO_ONE, new FVal(0,indexSize))); - types.add(new FldType("foo_ls", new IRange(0,25), new IRange(0,indexSize))); - - types.add(new FldType("missing_i",new IRange(0,0), new IRange(0,100))); - types.add(new FldType("missing_is",new IRange(0,0), new IRange(0,100))); - types.add(new FldType("missing_s",new IRange(0,0), new SVal('a','b',1,1))); - types.add(new FldType("missing_ss",new IRange(0,0), new SVal('a','b',1,1))); + types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); + types.add(new FldType("score_f", ONE_ONE, new FVal(1, 100))); + types.add(new FldType("score_d", ONE_ONE, new FVal(1, 100))); + types.add(new FldType("foo_i", ZERO_ONE, new IRange(0, indexSize))); + types.add(new FldType("foo_l", ZERO_ONE, new IRange(0, indexSize))); + types.add(new FldType("small_s", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add(new FldType("small2_s", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add( + new FldType("small2_ss", ZERO_TWO, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add(new FldType("small3_ss", new IRange(0, 25), new SVal('A', 'z', 1, 1))); + // to test specialization when a multi-valued field is actually single-valued + types.add( + new FldType("small4_ss", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add(new FldType("small_i", ZERO_ONE, new IRange(0, 5 + indexSize / 3))); + types.add(new FldType("small2_i", ZERO_ONE, new IRange(0, 5 + indexSize / 3))); + types.add(new FldType("small2_is", ZERO_TWO, new IRange(0, 5 + indexSize / 3))); + types.add(new FldType("small3_is", new IRange(0, 25), new IRange(0, 100))); + + types.add(new FldType("foo_fs", new IRange(0, 25), new FVal(0, indexSize))); + types.add(new FldType("foo_f", ZERO_ONE, new FVal(0, indexSize))); + types.add(new FldType("foo_ds", new IRange(0, 25), new FVal(0, indexSize))); + types.add(new FldType("foo_d", ZERO_ONE, new FVal(0, indexSize))); + types.add(new FldType("foo_ls", new IRange(0, 25), new IRange(0, indexSize))); + + types.add(new FldType("missing_i", new IRange(0, 0), new IRange(0, 100))); + types.add(new FldType("missing_is", new IRange(0, 0), new IRange(0, 100))); + types.add(new FldType("missing_s", new IRange(0, 0), new SVal('a', 'b', 1, 1))); + types.add(new FldType("missing_ss", new IRange(0, 0), new SVal('a', 'b', 1, 1))); // TODO: doubles, multi-floats, ints with precisionStep>0, booleans } @@ -141,10 +154,10 @@ void deleteSomeDocs() { assertU(delQ(sb.toString())); - if (rand.nextInt(10)==0) { + if (rand.nextInt(10) == 0) { assertU(optimize()); } else { - assertU(commit("softCommit",""+(rand.nextInt(10)!=0))); + assertU(commit("softCommit", "" + (rand.nextInt(10) != 0))); } } @@ -154,23 +167,22 @@ public void testRandomFaceting() throws Exception { int iter = atLeast(100); init(); addMoreDocs(0); - - for (int i=0; i multiValuedMethods = Arrays.asList(new String[]{"enum","fc","dv","uif"}); - List singleValuedMethods = Arrays.asList(new String[]{"enum","fc","fcs","dv","uif"}); - + List multiValuedMethods = Arrays.asList(new String[] {"enum", "fc", "dv", "uif"}); + List singleValuedMethods = Arrays.asList(new String[] {"enum", "fc", "fcs", "dv", "uif"}); void doFacetTests(FldType ftype) throws Exception { SolrQueryRequest req = req(); try { Random rand = random(); boolean validate = validateResponses; - ModifiableSolrParams params = params("facet","true", "wt","json", "indent","true", "omitHeader","true"); - params.add("q","*:*"); // TODO: select subsets - params.add("rows","0"); + ModifiableSolrParams params = + params("facet", "true", "wt", "json", "indent", "true", "omitHeader", "true"); + params.add("q", "*:*"); // TODO: select subsets + params.add("rows", "0"); SchemaField sf = req.getSchema().getField(ftype.fname); boolean multiValued = sf.getType().multiValuedFieldCache(); @@ -200,18 +212,24 @@ void doFacetTests(FldType ftype) throws Exception { int offset = 0; if (rand.nextInt(100) < 20) { if (rand.nextBoolean()) { - offset = rand.nextInt(100) < 10 ? rand.nextInt(indexSize*2) : rand.nextInt(indexSize/3+1); + offset = + rand.nextInt(100) < 10 + ? rand.nextInt(indexSize * 2) + : rand.nextInt(indexSize / 3 + 1); } params.add("facet.offset", Integer.toString(offset)); } if (rand.nextInt(100) < 20) { - if(rarely()) { + if (rarely()) { params.add("facet.limit", "-1"); } else { int limit = 100; if (rand.nextBoolean()) { - limit = rand.nextInt(100) < 10 ? rand.nextInt(indexSize/2+1) : rand.nextInt(indexSize*2); + limit = + rand.nextInt(100) < 10 + ? rand.nextInt(indexSize / 2 + 1) + : rand.nextInt(indexSize * 2); } params.add("facet.limit", Integer.toString(limit)); } @@ -225,20 +243,20 @@ void doFacetTests(FldType ftype) throws Exception { if (rand.nextBoolean()) { params.add("facet.sort", rand.nextBoolean() ? "index" : "count"); } - + if (rand.nextInt(100) < 10) { params.add("facet.mincount", Integer.toString(rand.nextInt(5))); } } else { params.add("facet.sort", "count"); - params.add("facet.mincount", Integer.toString(1+rand.nextInt(5))); + params.add("facet.mincount", Integer.toString(1 + rand.nextInt(5))); } if ((ftype.vals instanceof SVal) && rand.nextInt(100) < 20) { // validate = false; String prefix = ftype.createValue().toString(); - if (rand.nextInt(100) < 5) prefix = TestUtil.randomUnicodeString(rand); - else if (rand.nextInt(100) < 10) prefix = Character.toString((char)rand.nextInt(256)); + if (rand.nextInt(100) < 5) prefix = TestUtil.randomUnicodeString(rand); + else if (rand.nextInt(100) < 10) prefix = Character.toString((char) rand.nextInt(256)); else if (prefix.length() > 0) prefix = prefix.substring(0, rand.nextInt(prefix.length())); params.add("facet.prefix", prefix); } @@ -254,14 +272,15 @@ void doFacetTests(FldType ftype) throws Exception { List responses = new ArrayList<>(methods.size()); for (String method : methods) { if (method.equals("dv")) { - params.set("facet.field", "{!key="+facet_field+"}"+facet_field+"_dv"); - params.set("facet.method",(String) null); + params.set("facet.field", "{!key=" + facet_field + "}" + facet_field + "_dv"); + params.set("facet.method", (String) null); } else { params.set("facet.field", facet_field); params.set("facet.method", method); } - // if (random().nextBoolean()) params.set("facet.mincount", "1"); // uncomment to test that validation fails + // uncomment to test that validation fails + // if (random().nextBoolean()) params.set("facet.mincount", "1"); String strResponse = h.query(req(params)); // Object realResponse = ObjectBuilder.fromJSON(strResponse); @@ -274,31 +293,32 @@ void doFacetTests(FldType ftype) throws Exception { if (h.getCore().getLatestSchema().getFieldOrNull(facet_field + "_p") != null && params.get("facet.mincount") != null && params.getInt("facet.mincount").intValue() > 0) { - params.set("facet.field", "{!key="+facet_field+"}"+facet_field+"_p"); + params.set("facet.field", "{!key=" + facet_field + "}" + facet_field + "_p"); String strResponse = h.query(req(params)); responses.add(strResponse); } /** - String strResponse = h.query(req(params)); - Object realResponse = ObjectBuilder.fromJSON(strResponse); - **/ - + * String strResponse = h.query(req(params)); Object realResponse = + * ObjectBuilder.fromJSON(strResponse); + */ if (validate) { - for (int i=1; i types; + @SuppressWarnings({"rawtypes"}) Map model = null; + boolean validateResponses = true; void init() { @@ -77,45 +79,50 @@ void init() { indexSize = rand.nextBoolean() ? (rand.nextInt(10) + 1) : (rand.nextInt(100) + 10); types = new ArrayList<>(); - types.add(new FldType("id",ONE_ONE, new SVal('A','Z',4,4))); - types.add(new FldType("score_f",ONE_ONE, new FVal(1,100))); - types.add(new FldType("small_f",ONE_ONE, new FVal(-4,5))); - types.add(new FldType("small_d",ONE_ONE, new FVal(-4,5))); - types.add(new FldType("foo_i",ZERO_ONE, new IRange(-2,indexSize))); - types.add(new FldType("rare_s1",new IValsPercent(95,0,5,1), new SVal('a','b',1,5))); - types.add(new FldType("str_s1",ZERO_ONE, new SVal('a','z',1,2))); - types.add(new FldType("long_s1",ZERO_ONE, new SVal('a','b',1,5))); - types.add(new FldType("small_s1",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_s1",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_ss",ZERO_TWO, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small3_ss",new IRange(0,25), new SVal('A','z',1,1))); - types.add(new FldType("small_i",ZERO_ONE, new IRange(-2,5+indexSize/3))); - types.add(new FldType("small2_i",ZERO_ONE, new IRange(-1,5+indexSize/3))); - types.add(new FldType("small2_is",ZERO_TWO, new IRange(-2,5+indexSize/3))); - types.add(new FldType("small3_is",new IRange(0,25), new IRange(-50,50))); - - types.add(new FldType("missing_i",new IRange(0,0), new IRange(0,100))); - types.add(new FldType("missing_is",new IRange(0,0), new IRange(0,100))); - types.add(new FldType("missing_s1",new IRange(0,0), new SVal('a','b',1,1))); - types.add(new FldType("missing_ss",new IRange(0,0), new SVal('a','b',1,1))); + types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); + types.add(new FldType("score_f", ONE_ONE, new FVal(1, 100))); + types.add(new FldType("small_f", ONE_ONE, new FVal(-4, 5))); + types.add(new FldType("small_d", ONE_ONE, new FVal(-4, 5))); + types.add(new FldType("foo_i", ZERO_ONE, new IRange(-2, indexSize))); + types.add(new FldType("rare_s1", new IValsPercent(95, 0, 5, 1), new SVal('a', 'b', 1, 5))); + types.add(new FldType("str_s1", ZERO_ONE, new SVal('a', 'z', 1, 2))); + types.add(new FldType("long_s1", ZERO_ONE, new SVal('a', 'b', 1, 5))); + types.add(new FldType("small_s1", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add( + new FldType("small2_s1", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add( + new FldType("small2_ss", ZERO_TWO, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add(new FldType("small3_ss", new IRange(0, 25), new SVal('A', 'z', 1, 1))); + types.add(new FldType("small_i", ZERO_ONE, new IRange(-2, 5 + indexSize / 3))); + types.add(new FldType("small2_i", ZERO_ONE, new IRange(-1, 5 + indexSize / 3))); + types.add(new FldType("small2_is", ZERO_TWO, new IRange(-2, 5 + indexSize / 3))); + types.add(new FldType("small3_is", new IRange(0, 25), new IRange(-50, 50))); + + types.add(new FldType("missing_i", new IRange(0, 0), new IRange(0, 100))); + types.add(new FldType("missing_is", new IRange(0, 0), new IRange(0, 100))); + types.add(new FldType("missing_s1", new IRange(0, 0), new SVal('a', 'b', 1, 1))); + types.add(new FldType("missing_ss", new IRange(0, 0), new SVal('a', 'b', 1, 1))); // TODO: doubles, multi-floats, ints with precisionStep>0, booleans - types.add(new FldType("small_tf",ZERO_ONE, new FVal(-4,5))); + types.add(new FldType("small_tf", ZERO_ONE, new FVal(-4, 5))); assert trieFields.matcher("small_tf").matches(); assert !trieFields.matcher("small_f").matches(); - - types.add(new FldType("foo_ti",ZERO_ONE, new IRange(-2,indexSize))); + + types.add(new FldType("foo_ti", ZERO_ONE, new IRange(-2, indexSize))); assert trieFields.matcher("foo_ti").matches(); assert !trieFields.matcher("foo_i").matches(); - - types.add(new FldType("bool_b",ZERO_ONE, new Vals(){ - @Override - @SuppressWarnings({"rawtypes"}) - public Comparable get() { - return random().nextBoolean(); - } - - })); + + types.add( + new FldType( + "bool_b", + ZERO_ONE, + new Vals() { + @Override + @SuppressWarnings({"rawtypes"}) + public Comparable get() { + return random().nextBoolean(); + } + })); } void addMoreDocs(int ndocs) throws Exception { @@ -127,7 +134,7 @@ void deleteSomeDocs() { int percent = rand.nextInt(100); if (model == null) return; ArrayList ids = new ArrayList<>(model.size()); - for (@SuppressWarnings({"rawtypes"})Comparable id : model.keySet()) { + for (@SuppressWarnings({"rawtypes"}) Comparable id : model.keySet()) { if (rand.nextInt(100) < percent) { ids.add(id.toString()); } @@ -143,10 +150,10 @@ void deleteSomeDocs() { assertU(delQ(sb.toString())); - if (rand.nextInt(10)==0) { + if (rand.nextInt(10) == 0) { assertU(optimize()); } else { - assertU(commit("softCommit",""+(rand.nextInt(10)!=0))); + assertU(commit("softCommit", "" + (rand.nextInt(10) != 0))); } } @@ -156,41 +163,39 @@ public void testRandomFaceting() throws Exception { int iter = atLeast(100); init(); addMoreDocs(0); - - for (int i=0; i multiValuedMethods = Arrays.asList(new String[]{"enum","fc", null}); - List singleValuedMethods = Arrays.asList(new String[]{"enum","fc","fcs", null}); - + List multiValuedMethods = Arrays.asList(new String[] {"enum", "fc", null}); + List singleValuedMethods = Arrays.asList(new String[] {"enum", "fc", "fcs", null}); void doFacetTests(FldType ftype) throws Exception { SolrQueryRequest req = req(); try { Random rand = random(); - ModifiableSolrParams params = params("facet","true", "wt","json", "indent","true", "omitHeader","true"); - params.add("q","*:*"); // TODO: select subsets - params.add("rows","0"); + ModifiableSolrParams params = + params("facet", "true", "wt", "json", "indent", "true", "omitHeader", "true"); + params.add("q", "*:*"); // TODO: select subsets + params.add("rows", "0"); SchemaField sf = req.getSchema().getField(ftype.fname); boolean multiValued = sf.getType().multiValuedFieldCache(); @@ -198,7 +203,10 @@ void doFacetTests(FldType ftype) throws Exception { int offset = 0; if (rand.nextInt(100) < 20) { if (rand.nextBoolean()) { - offset = rand.nextInt(100) < 10 ? rand.nextInt(indexSize*2) : rand.nextInt(indexSize/3+1); + offset = + rand.nextInt(100) < 10 + ? rand.nextInt(indexSize * 2) + : rand.nextInt(indexSize / 3 + 1); } params.add("facet.offset", Integer.toString(offset)); } @@ -206,7 +214,10 @@ void doFacetTests(FldType ftype) throws Exception { int limit = 100; if (rand.nextInt(100) < 20) { if (rand.nextBoolean()) { - limit = rand.nextInt(100) < 10 ? rand.nextInt(indexSize/2+1) : rand.nextInt(indexSize*2); + limit = + rand.nextInt(100) < 10 + ? rand.nextInt(indexSize / 2 + 1) + : rand.nextInt(indexSize * 2); } params.add("facet.limit", Integer.toString(limit)); } @@ -218,8 +229,8 @@ void doFacetTests(FldType ftype) throws Exception { if ((ftype.vals instanceof SVal) && rand.nextInt(100) < 20) { // validate = false; String prefix = ftype.createValue().toString(); - if (rand.nextInt(100) < 5) prefix = TestUtil.randomUnicodeString(rand); - else if (rand.nextInt(100) < 10) prefix = Character.toString((char)rand.nextInt(256)); + if (rand.nextInt(100) < 5) prefix = TestUtil.randomUnicodeString(rand); + else if (rand.nextInt(100) < 10) prefix = Character.toString((char) rand.nextInt(256)); else if (prefix.length() > 0) prefix = prefix.substring(0, rand.nextInt(prefix.length())); params.add("facet.prefix", prefix); } @@ -233,165 +244,193 @@ void doFacetTests(FldType ftype) throws Exception { } if (rand.nextBoolean()) { - params.add("facet.enum.cache.minDf",""+ rand.nextInt(indexSize)); + params.add("facet.enum.cache.minDf", "" + rand.nextInt(indexSize)); } - + // TODO: randomly add other facet params String key = ftype.fname; String facet_field = ftype.fname; if (random().nextBoolean()) { key = "alternate_key"; - facet_field = "{!key="+key+"}"+ftype.fname; + facet_field = "{!key=" + key + "}" + ftype.fname; } params.set("facet.field", facet_field); List methods = multiValued ? multiValuedMethods : singleValuedMethods; List responses = new ArrayList<>(methods.size()); - + for (String method : methods) { - for (boolean exists : new boolean[]{false, true}) { + for (boolean exists : new boolean[] {false, true}) { // params.add("facet.field", "{!key="+method+"}" + ftype.fname); // TODO: allow method to be passed on local params? - if (method!=null) { + if (method != null) { params.set("facet.method", method); } else { params.remove("facet.method"); } - params.set("facet.exists", ""+exists); + params.set("facet.exists", "" + exists); if (!exists && rand.nextBoolean()) { params.remove("facet.exists"); } - - // if (random().nextBoolean()) params.set("facet.mincount", "1"); // uncomment to test that validation fails - if (!(params.getInt("facet.limit", 100) == 0 && - !params.getBool("facet.missing", false))) { + + // uncomment to test that validation fails + // if (random().nextBoolean()) params.set("facet.mincount", "1"); + + if (!(params.getInt("facet.limit", 100) == 0 + && !params.getBool("facet.missing", false))) { // it bypasses all processing, and we can go to empty validation - if (exists && params.getInt("facet.mincount", 0)>1) { - assertQEx("no mincount on facet.exists", - rand.nextBoolean() ? "facet.exists":"facet.mincount", - req(params), ErrorCode.BAD_REQUEST); + if (exists && params.getInt("facet.mincount", 0) > 1) { + assertQEx( + "no mincount on facet.exists", + rand.nextBoolean() ? "facet.exists" : "facet.mincount", + req(params), + ErrorCode.BAD_REQUEST); continue; } - // facet.exists can't be combined with non-enum nor with enum requested for tries, because it will be flipped to FC/FCS + // facet.exists can't be combined with non-enum nor with enum requested for tries, + // because it will be flipped to FC/FCS final boolean notEnum = method != null && !method.equals("enum"); final boolean trieField = trieFields.matcher(ftype.fname).matches(); if ((notEnum || trieField) && exists) { - assertQEx("facet.exists only when enum or ommitted", - "facet.exists", req(params), ErrorCode.BAD_REQUEST); + assertQEx( + "facet.exists only when enum or ommitted", + "facet.exists", + req(params), + ErrorCode.BAD_REQUEST); continue; } if (exists && sf.getType().isPointField()) { // PointFields don't yet support "enum" method or the "facet.exists" parameter - assertQEx("Expecting failure, since ", - "facet.exists=true is requested, but facet.method=enum can't be used with " + sf.getName(), - req(params), ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting failure, since ", + "facet.exists=true is requested, but facet.method=enum can't be used with " + + sf.getName(), + req(params), + ErrorCode.BAD_REQUEST); continue; } } String strResponse = h.query(req(params)); responses.add(strResponse); - - if (responses.size()>1) { + + if (responses.size() > 1) { validateResponse(responses.get(0), strResponse, params, method, methods); } } - } - + /** - String strResponse = h.query(req(params)); - Object realResponse = ObjectBuilder.fromJSON(strResponse); - **/ + * String strResponse = h.query(req(params)); Object realResponse = + * ObjectBuilder.fromJSON(strResponse); + */ } finally { req.close(); } } - private void validateResponse(String expected, String actual, ModifiableSolrParams params, String method, - List methods) throws Exception { + + private void validateResponse( + String expected, + String actual, + ModifiableSolrParams params, + String method, + List methods) + throws Exception { if (params.getBool("facet.exists", false)) { - if (isSortByCount(params)) { // it's challenged with facet.sort=count - expected = getExpectationForSortByCount(params, methods);// that requires to recalculate expactation + if (isSortByCount(params)) { // it's challenged with facet.sort=count + // that requires to recalculate expactation + expected = getExpectationForSortByCount(params, methods); } else { // facet.sort=index expected = capFacetCountsTo1(expected); } } - + String err = JSONTestUtil.match("/", actual, expected, 0.0); if (err != null) { - log.error("ERROR: mismatch facet response: {}\n expected ={}\n response = {}\n request = {}" - , err, expected, actual, params); + log.error( + "ERROR: mismatch facet response: {}\n expected ={}\n response = {}\n request = {}", + err, + expected, + actual, + params); fail(err); } } - /** if facet.exists=true with facet.sort=counts, - * it should return all values with 1 hits ordered by label index - * then all vals with 0 , and then missing count with null label, - * in the implementation below they are called three stratas - * */ + /** + * if facet.exists=true with facet.sort=counts, it should return all values with 1 hits ordered by + * label index then all vals with 0 , and then missing count with null label, in the + * implementation below they are called three stratas + */ @SuppressWarnings({"unchecked"}) - private String getExpectationForSortByCount( ModifiableSolrParams params, List methods) throws Exception { + private String getExpectationForSortByCount(ModifiableSolrParams params, List methods) + throws Exception { String indexSortedResponse = getIndexSortedAllFacetValues(params, methods); - - return transformFacetFields(indexSortedResponse, e -> { - List facetSortedByIndex = (List) e.getValue(); - Map> stratas = new HashMap>(){ - @Override // poor man multimap, I won't do that anymore, I swear. - public List get(Object key) { - if (!containsKey(key)) { - put((Integer) key, new ArrayList<>()); - } - return super.get(key); - } - }; - - for (@SuppressWarnings({"rawtypes"})Iterator iterator = facetSortedByIndex.iterator(); iterator.hasNext();) { - Object label = iterator.next(); - Long count = (Long) iterator.next(); - final Integer strata; - if (label==null) { // missing (here "stratas" seems like overengineering ) - strata = null; - }else { - if (count>0) { - count = 1L; // capping here - strata = 1; // non-zero count become zero - } else { - strata = 0; // zero-count + + return transformFacetFields( + indexSortedResponse, + e -> { + List facetSortedByIndex = (List) e.getValue(); + Map> stratas = + new HashMap>() { + @Override // poor man multimap, I won't do that anymore, I swear. + public List get(Object key) { + if (!containsKey(key)) { + put((Integer) key, new ArrayList<>()); + } + return super.get(key); + } + }; + + for (@SuppressWarnings({"rawtypes"}) Iterator iterator = facetSortedByIndex.iterator(); + iterator.hasNext(); ) { + Object label = iterator.next(); + Long count = (Long) iterator.next(); + final Integer strata; + if (label == null) { // missing (here "stratas" seems like overengineering ) + strata = null; + } else { + if (count > 0) { + count = 1L; // capping here + strata = 1; // non-zero count become zero + } else { + strata = 0; // zero-count + } + } + final List facet = stratas.get(strata); + facet.add(label); + facet.add(count); } - } - final List facet = stratas.get(strata); - facet.add(label); - facet.add(count); - } - @SuppressWarnings({"rawtypes"}) - List stratified =new ArrayList<>(); - for(Integer s : new Integer[]{1, 0}) { // non-zero capped to one goes first, zeroes go then - stratified.addAll(stratas.get(s)); - }// cropping them now - int offset=params.getInt("facet.offset", 0) * 2; - int end = offset + params.getInt("facet.limit", 100) * 2 ; - int fromIndex = offset > stratified.size() ? stratified.size() : offset; - stratified = stratified.subList(fromIndex, - end > stratified.size() ? stratified.size() : end); - - stratified.addAll(stratas.get(null)); - - facetSortedByIndex.clear(); - facetSortedByIndex.addAll(stratified); - }); + @SuppressWarnings({"rawtypes"}) + List stratified = new ArrayList<>(); + // non-zero capped to one goes first, zeroes go then + for (Integer s : new Integer[] {1, 0}) { + stratified.addAll(stratas.get(s)); + } // cropping them now + int offset = params.getInt("facet.offset", 0) * 2; + int end = offset + params.getInt("facet.limit", 100) * 2; + int fromIndex = offset > stratified.size() ? stratified.size() : offset; + stratified = + stratified.subList(fromIndex, end > stratified.size() ? stratified.size() : end); + + stratified.addAll(stratas.get(null)); + + facetSortedByIndex.clear(); + facetSortedByIndex.addAll(stratified); + }); } - private String getIndexSortedAllFacetValues(ModifiableSolrParams in, List methods) throws Exception { + private String getIndexSortedAllFacetValues(ModifiableSolrParams in, List methods) + throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(in); params.set("facet.sort", "index"); - String goodOldMethod = methods.get(random().nextInt( methods.size())); + String goodOldMethod = methods.get(random().nextInt(methods.size())); params.set("facet.method", goodOldMethod); params.set("facet.exists", "false"); if (random().nextBoolean()) { params.remove("facet.exists"); } - params.set("facet.limit",-1); - params.set("facet.offset",0); + params.set("facet.limit", -1); + params.set("facet.offset", 0); final String query; SolrQueryRequest req = null; try { @@ -406,7 +445,8 @@ private String getIndexSortedAllFacetValues(ModifiableSolrParams in, List0); + sortIsCount = + "count".equals(sortParam) || (sortParam == null && in.getInt("facet.limit", 100) > 0); return sortIsCount; } @@ -423,33 +463,35 @@ private boolean isSortByCount(ModifiableSolrParams in) { "3",1]}, "facet_ranges":{}, "facet_intervals":{}, - "facet_heatmaps":{}}} + "facet_heatmaps":{}}} * */ @SuppressWarnings({"rawtypes", "unchecked"}) private String capFacetCountsTo1(String expected) throws IOException { - return transformFacetFields(expected, e -> { - List facetValues = (List) e.getValue(); - for (ListIterator iterator = facetValues.listIterator(); iterator.hasNext();) { - Object value = iterator.next(); - Long count = (Long) iterator.next(); - if (value!=null && count > 1) { - iterator.set(1); - } - - } - }); + return transformFacetFields( + expected, + e -> { + List facetValues = (List) e.getValue(); + for (ListIterator iterator = facetValues.listIterator(); iterator.hasNext(); ) { + Object value = iterator.next(); + Long count = (Long) iterator.next(); + if (value != null && count > 1) { + iterator.set(1); + } + } + }); } - + @SuppressWarnings({"unchecked"}) - private String transformFacetFields(String expected, Consumer> consumer) throws IOException { + private String transformFacetFields(String expected, Consumer> consumer) + throws IOException { Object json = Utils.fromJSONString(expected); @SuppressWarnings({"rawtypes"}) Map facet_fields = getFacetFieldMap(json); @SuppressWarnings({"rawtypes"}) Set entries = facet_fields.entrySet(); - for (Object facetTuples : entries) { //despite there should be only one field + for (Object facetTuples : entries) { // despite there should be only one field @SuppressWarnings({"rawtypes"}) - Entry entry = (Entry)facetTuples; + Entry entry = (Entry) facetTuples; consumer.accept(entry); } return Utils.toJSONString(json); @@ -457,10 +499,8 @@ private String transformFacetFields(String expected, Consumer shard : trackingQueue.getAllRequests().values()) { diff --git a/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java b/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java index 5387f04ce23..6c65ad293f3 100644 --- a/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java +++ b/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java @@ -16,14 +16,6 @@ */ package org.apache.solr; -import org.apache.commons.io.FileUtils; -import org.apache.lucene.util.IOUtils; -import org.apache.solr.client.solrj.embedded.JettySolrRunner; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.params.SolrParams; -import org.apache.solr.common.util.NamedList; -import org.junit.BeforeClass; - import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; @@ -31,10 +23,16 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.Properties; +import org.apache.commons.io.FileUtils; +import org.apache.lucene.util.IOUtils; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.NamedList; +import org.junit.BeforeClass; /** - *

Test for Loading core properties from a properties file

- * + * Test for Loading core properties from a properties file * * @since solr 1.4 */ @@ -55,41 +53,46 @@ public static void beforeTest() throws Exception { dataDir.mkdirs(); confDir.mkdirs(); - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(homeDir, "solr.xml")); + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(homeDir, "solr.xml")); String src_dir = TEST_HOME() + "/collection1/conf"; - FileUtils.copyFile(new File(src_dir, "schema-tiny.xml"), - new File(confDir, "schema.xml")); - FileUtils.copyFile(new File(src_dir, "solrconfig-solcoreproperties.xml"), - new File(confDir, "solrconfig.xml")); - FileUtils.copyFile(new File(src_dir, "solrconfig.snippet.randomindexconfig.xml"), - new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); + FileUtils.copyFile(new File(src_dir, "schema-tiny.xml"), new File(confDir, "schema.xml")); + FileUtils.copyFile( + new File(src_dir, "solrconfig-solcoreproperties.xml"), new File(confDir, "solrconfig.xml")); + FileUtils.copyFile( + new File(src_dir, "solrconfig.snippet.randomindexconfig.xml"), + new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); Properties p = new Properties(); p.setProperty("foo.foo1", "f1"); p.setProperty("foo.foo2", "f2"); - Writer fos = new OutputStreamWriter(new FileOutputStream(new File(confDir, "solrcore.properties")), StandardCharsets.UTF_8); + Writer fos = + new OutputStreamWriter( + new FileOutputStream(new File(confDir, "solrcore.properties")), StandardCharsets.UTF_8); p.store(fos, null); IOUtils.close(fos); Files.createFile(collDir.toPath().resolve("core.properties")); - Properties nodeProperties = new Properties(); // this sets the property for jetty starting SolrDispatchFilter if (System.getProperty("solr.data.dir") == null) { nodeProperties.setProperty("solr.data.dir", createTempDir().toFile().getCanonicalPath()); } - jetty = new JettySolrRunner(homeDir.getAbsolutePath(), nodeProperties, buildJettyConfig("/solr")); + jetty = + new JettySolrRunner(homeDir.getAbsolutePath(), nodeProperties, buildJettyConfig("/solr")); jetty.start(); port = jetty.getLocalPort(); - //createJetty(homeDir.getAbsolutePath(), null, null); + // createJetty(homeDir.getAbsolutePath(), null, null); } public void testSimple() throws Exception { - SolrParams params = params("q", "*:*", - "echoParams", "all"); + SolrParams params = + params( + "q", "*:*", + "echoParams", "all"); QueryResponse res = getSolrClient().query(params); assertEquals(0, res.getResults().getNumFound()); @@ -97,5 +100,4 @@ public void testSimple() throws Exception { assertEquals("f1", echoedParams.get("p1")); assertEquals("f2", echoedParams.get("p2")); } - } diff --git a/solr/core/src/test/org/apache/solr/TestTolerantSearch.java b/solr/core/src/test/org/apache/solr/TestTolerantSearch.java index a99b77401a7..fe4d77e69da 100644 --- a/solr/core/src/test/org/apache/solr/TestTolerantSearch.java +++ b/solr/core/src/test/org/apache/solr/TestTolerantSearch.java @@ -19,7 +19,6 @@ import java.io.File; import java.io.IOException; import java.io.OutputStream; - import org.apache.commons.io.FileUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; @@ -38,22 +37,23 @@ import org.junit.BeforeClass; public class TestTolerantSearch extends SolrJettyTestBase { - + private static SolrClient collection1; private static SolrClient collection2; private static String shard1; private static String shard2; private static File solrHome; - + private static File createSolrHome() throws Exception { File workDir = createTempDir().toFile(); setupJettyTestHome(workDir, "collection1"); - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME() + "/collection1/conf/solrconfig-tolerant-search.xml"), new File(workDir, "/collection1/conf/solrconfig.xml")); + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME() + "/collection1/conf/solrconfig-tolerant-search.xml"), + new File(workDir, "/collection1/conf/solrconfig.xml")); FileUtils.copyDirectory(new File(workDir, "collection1"), new File(workDir, "collection2")); return workDir; } - - + @BeforeClass public static void createThings() throws Exception { systemSetPropertySolrDisableUrlAllowList("true"); @@ -62,13 +62,13 @@ public static void createThings() throws Exception { String url = jetty.getBaseUrl().toString(); collection1 = getHttpSolrClient(url + "/collection1"); collection2 = getHttpSolrClient(url + "/collection2"); - + String urlCollection1 = jetty.getBaseUrl().toString() + "/" + "collection1"; String urlCollection2 = jetty.getBaseUrl().toString() + "/" + "collection2"; shard1 = urlCollection1.replaceAll("https?://", ""); shard2 = urlCollection2.replaceAll("https?://", ""); - - //create second core + + // create second core try (HttpSolrClient nodeClient = getHttpSolrClient(url)) { CoreAdminRequest.Create req = new CoreAdminRequest.Create(); req.setCoreName("collection2"); @@ -82,21 +82,20 @@ public static void createThings() throws Exception { doc.setField("title", "foo bar"); collection1.add(doc); collection1.commit(); - + doc.setField("id", "2"); doc.setField("subject", "superman"); collection2.add(doc); collection2.commit(); - + doc = new SolrInputDocument(); doc.setField("id", "3"); doc.setField("subject", "aquaman"); doc.setField("title", "foo bar"); collection1.add(doc); collection1.commit(); - } - + @AfterClass public static void destroyThings() throws Exception { if (null != collection1) { @@ -109,12 +108,12 @@ public static void destroyThings() throws Exception { } if (null != jetty) { jetty.stop(); - jetty=null; + jetty = null; } resetExceptionIgnores(); systemClearPropertySolrDisableUrlAllowList(); } - + @SuppressWarnings("unchecked") public void testGetFieldsPhaseError() throws SolrServerException, IOException { BadResponseWriter.failOnGetFields = true; @@ -137,19 +136,23 @@ public void testGetFieldsPhaseError() throws SolrServerException, IOException { query.setHighlight(true); query.addFacetField("id"); query.setFacet(true); - + ignoreException("Dummy exception in BadResponseWriter"); expectThrows(SolrException.class, () -> collection1.query(query)); query.set(ShardParams.SHARDS_TOLERANT, "true"); QueryResponse response = collection1.query(query); - assertTrue(response.getResponseHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); - NamedList shardsInfo = ((NamedList)response.getResponse().get(ShardParams.SHARDS_INFO)); + assertTrue( + response + .getResponseHeader() + .getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + NamedList shardsInfo = + ((NamedList) response.getResponse().get(ShardParams.SHARDS_INFO)); boolean foundError = false; for (int i = 0; i < shardsInfo.size(); i++) { if (shardsInfo.getName(i).contains("collection2")) { - assertNotNull(((NamedList)shardsInfo.getVal(i)).get("error")); + assertNotNull(((NamedList) shardsInfo.getVal(i)).get("error")); foundError = true; break; } @@ -159,7 +162,7 @@ public void testGetFieldsPhaseError() throws SolrServerException, IOException { assertEquals("batman", response.getResults().get(0).getFirstValue("subject")); unIgnoreException("Dummy exception in BadResponseWriter"); } - + @SuppressWarnings("unchecked") public void testGetTopIdsPhaseError() throws SolrServerException, IOException { BadResponseWriter.failOnGetTopIds = true; @@ -182,64 +185,64 @@ public void testGetTopIdsPhaseError() throws SolrServerException, IOException { query.setHighlight(true); query.addFacetField("id"); query.setFacet(true); - + ignoreException("Dummy exception in BadResponseWriter"); expectThrows(Exception.class, () -> collection1.query(query)); query.set(ShardParams.SHARDS_TOLERANT, "true"); QueryResponse response = collection1.query(query); - assertTrue(response.getResponseHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); - NamedList shardsInfo = ((NamedList)response.getResponse().get(ShardParams.SHARDS_INFO)); + assertTrue( + response + .getResponseHeader() + .getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + NamedList shardsInfo = + ((NamedList) response.getResponse().get(ShardParams.SHARDS_INFO)); boolean foundError = false; for (int i = 0; i < shardsInfo.size(); i++) { if (shardsInfo.getName(i).contains("collection2")) { - assertNotNull(((NamedList)shardsInfo.getVal(i)).get("error")); + assertNotNull(((NamedList) shardsInfo.getVal(i)).get("error")); foundError = true; break; } } assertTrue(foundError); - assertFalse(""+response, response.getResults().isEmpty()); + assertFalse("" + response, response.getResults().isEmpty()); assertEquals("1", response.getResults().get(0).getFieldValue("id")); assertEquals("batman", response.getResults().get(0).getFirstValue("subject")); unIgnoreException("Dummy exception in BadResponseWriter"); } - + public static class BadResponseWriter extends BinaryResponseWriter { - + private static boolean failOnGetFields = false; private static boolean failOnGetTopIds = false; - + public BadResponseWriter() { super(); } - + @Override - public void write(OutputStream out, SolrQueryRequest req, - SolrQueryResponse response) throws IOException { - - // I want to fail on the shard request, not the original user request, and only on the - // GET_FIELDS phase - if (failOnGetFields && - "collection2".equals(req.getCore().getName()) + public void write(OutputStream out, SolrQueryRequest req, SolrQueryResponse response) + throws IOException { + + // I want to fail on the shard request, not the original user request, and only on the + // GET_FIELDS phase + if (failOnGetFields + && "collection2".equals(req.getCore().getName()) && "subject:batman OR subject:superman".equals(req.getParams().get("q", "")) && req.getParams().get("ids") != null) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Dummy exception in BadResponseWriter"); - } else if (failOnGetTopIds + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, "Dummy exception in BadResponseWriter"); + } else if (failOnGetTopIds && "collection2".equals(req.getCore().getName()) && "subject:batman OR subject:superman".equals(req.getParams().get("q", "")) && req.getParams().get("ids") == null && req.getParams().getBool("isShard", false) == true) { - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Dummy exception in BadResponseWriter"); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, "Dummy exception in BadResponseWriter"); } super.write(out, req, response); } - - } - - } diff --git a/solr/core/src/test/org/apache/solr/TestTrie.java b/solr/core/src/test/org/apache/solr/TestTrie.java index 4dcb0b07dc7..6e69532fb67 100644 --- a/solr/core/src/test/org/apache/solr/TestTrie.java +++ b/solr/core/src/test/org/apache/solr/TestTrie.java @@ -19,7 +19,6 @@ import java.text.SimpleDateFormat; import java.util.Locale; import java.util.TimeZone; - import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.TrieField; @@ -29,10 +28,9 @@ import org.junit.Test; /** - * Tests for numeric field functionality. The name originated from {@link TrieField}, but all tests + * Tests for numeric field functionality. The name originated from {@link TrieField}, but all tests * done in this class are also valid for any numeric field types. * - * * @since solr 1.4 * @deprecated Trie fields are deprecated as of Solr 7.0 */ @@ -40,9 +38,9 @@ public class TestTrie extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-trie.xml"); + initCore("solrconfig.xml", "schema-trie.xml"); } - + @Override @After public void tearDown() throws Exception { @@ -56,48 +54,106 @@ public void testTrieIntRangeSearch() throws Exception { assertU(adoc("id", String.valueOf(i), "tint", String.valueOf(i))); } assertU(commit()); - assertQ("Range filter must match only 5 documents", req("q", "*:*", "fq", "tint:[2 TO 6]"), "//*[@numFound='5']"); + assertQ( + "Range filter must match only 5 documents", + req("q", "*:*", "fq", "tint:[2 TO 6]"), + "//*[@numFound='5']"); for (int i = 1; i < 11; i++) { assertU(adoc("id", String.valueOf(-i), "tint", String.valueOf(-i))); } assertU(commit()); - assertQ("Range filter must match only 5 documents", req("q", "*:*", "fq", "tint:[-6 TO -2]"), "//*[@numFound='5']"); + assertQ( + "Range filter must match only 5 documents", + req("q", "*:*", "fq", "tint:[-6 TO -2]"), + "//*[@numFound='5']"); // Test open ended range searches - assertQ("Range filter tint:[-9 to *] must match 20 documents", req("q", "*:*", "fq", "tint:[-10 TO *]"), "//*[@numFound='20']"); - assertQ("Range filter tint:[* to 9] must match 20 documents", req("q", "*:*", "fq", "tint:[* TO 10]"), "//*[@numFound='20']"); - assertQ("Range filter tint:[* to *] must match 20 documents", req("q", "*:*", "fq", "tint:[* TO *]"), "//*[@numFound='20']"); + assertQ( + "Range filter tint:[-9 to *] must match 20 documents", + req("q", "*:*", "fq", "tint:[-10 TO *]"), + "//*[@numFound='20']"); + assertQ( + "Range filter tint:[* to 9] must match 20 documents", + req("q", "*:*", "fq", "tint:[* TO 10]"), + "//*[@numFound='20']"); + assertQ( + "Range filter tint:[* to *] must match 20 documents", + req("q", "*:*", "fq", "tint:[* TO *]"), + "//*[@numFound='20']"); // Sorting - assertQ("Sort descending does not work correctly on tint fields", req("q", "*:*", "sort", "tint desc"), "//*[@numFound='20']", "//int[@name='tint'][.='9']"); - assertQ("Sort ascending does not work correctly on tint fields", req("q", "*:*", "sort", "tint asc"), "//*[@numFound='20']", "//int[@name='tint'][.='-10']"); + assertQ( + "Sort descending does not work correctly on tint fields", + req("q", "*:*", "sort", "tint desc"), + "//*[@numFound='20']", + "//int[@name='tint'][.='9']"); + assertQ( + "Sort ascending does not work correctly on tint fields", + req("q", "*:*", "sort", "tint asc"), + "//*[@numFound='20']", + "//int[@name='tint'][.='-10']"); // Function queries - assertQ("Function queries does not work correctly on tint fields", req("q", "_val_:\"sum(tint,1)\""), "//*[@numFound='20']", "//int[@name='tint'][.='9']"); + assertQ( + "Function queries does not work correctly on tint fields", + req("q", "_val_:\"sum(tint,1)\""), + "//*[@numFound='20']", + "//int[@name='tint'][.='9']"); } @Test public void testTrieTermQuery() throws Exception { for (int i = 0; i < 10; i++) { - assertU(adoc("id", String.valueOf(i), - "tint", String.valueOf(i), - "tfloat", String.valueOf(i * i * 31.11f), - "tlong", String.valueOf((long) Integer.MAX_VALUE + (long) i), - "tdouble", String.valueOf(i * 2.33d))); + assertU( + adoc( + "id", + String.valueOf(i), + "tint", + String.valueOf(i), + "tfloat", + String.valueOf(i * i * 31.11f), + "tlong", + String.valueOf((long) Integer.MAX_VALUE + (long) i), + "tdouble", + String.valueOf(i * 2.33d))); } assertU(commit()); // Use with q - assertQ("Term query on trie int field must match 1 document", req("q", "tint:2"), "//*[@numFound='1']"); - assertQ("Term query on trie float field must match 1 document", req("q", "tfloat:124.44"), "//*[@numFound='1']"); - assertQ("Term query on trie long field must match 1 document", req("q", "tlong:2147483648"), "//*[@numFound='1']"); - assertQ("Term query on trie double field must match 1 document", req("q", "tdouble:4.66"), "//*[@numFound='1']"); + assertQ( + "Term query on trie int field must match 1 document", + req("q", "tint:2"), + "//*[@numFound='1']"); + assertQ( + "Term query on trie float field must match 1 document", + req("q", "tfloat:124.44"), + "//*[@numFound='1']"); + assertQ( + "Term query on trie long field must match 1 document", + req("q", "tlong:2147483648"), + "//*[@numFound='1']"); + assertQ( + "Term query on trie double field must match 1 document", + req("q", "tdouble:4.66"), + "//*[@numFound='1']"); // Use with fq - assertQ("Term query on trie int field must match 1 document", req("q", "*:*", "fq", "tint:2"), "//*[@numFound='1']"); - assertQ("Term query on trie float field must match 1 document", req("q", "*:*", "fq", "tfloat:124.44"), "//*[@numFound='1']"); - assertQ("Term query on trie long field must match 1 document", req("q", "*:*", "fq", "tlong:2147483648"), "//*[@numFound='1']"); - assertQ("Term query on trie double field must match 1 document", req("q", "*:*", "fq", "tdouble:4.66"), "//*[@numFound='1']"); + assertQ( + "Term query on trie int field must match 1 document", + req("q", "*:*", "fq", "tint:2"), + "//*[@numFound='1']"); + assertQ( + "Term query on trie float field must match 1 document", + req("q", "*:*", "fq", "tfloat:124.44"), + "//*[@numFound='1']"); + assertQ( + "Term query on trie long field must match 1 document", + req("q", "*:*", "fq", "tlong:2147483648"), + "//*[@numFound='1']"); + assertQ( + "Term query on trie double field must match 1 document", + req("q", "*:*", "fq", "tdouble:4.66"), + "//*[@numFound='1']"); } @Test @@ -112,11 +168,23 @@ public void testTrieFloatRangeSearch() throws Exception { assertQ("Range filter must match 10 documents", req, "//*[@numFound='10']"); // Sorting - assertQ("Sort descending does not work correctly on tfloat fields", req("q", "*:*", "sort", "tfloat desc"), "//*[@numFound='10']", "//float[@name='tfloat'][.='2519.9102']"); - assertQ("Sort ascending does not work correctly on tfloat fields", req("q", "*:*", "sort", "tfloat asc"), "//*[@numFound='10']", "//float[@name='tfloat'][.='0.0']"); + assertQ( + "Sort descending does not work correctly on tfloat fields", + req("q", "*:*", "sort", "tfloat desc"), + "//*[@numFound='10']", + "//float[@name='tfloat'][.='2519.9102']"); + assertQ( + "Sort ascending does not work correctly on tfloat fields", + req("q", "*:*", "sort", "tfloat asc"), + "//*[@numFound='10']", + "//float[@name='tfloat'][.='0.0']"); // Function queries - assertQ("Function queries does not work correctly on tfloat fields", req("q", "_val_:\"sum(tfloat,1.0)\""), "//*[@numFound='10']", "//float[@name='tfloat'][.='2519.9102']"); + assertQ( + "Function queries does not work correctly on tfloat fields", + req("q", "_val_:\"sum(tfloat,1.0)\""), + "//*[@numFound='10']", + "//float[@name='tfloat'][.='2519.9102']"); } @Test @@ -128,14 +196,29 @@ public void testTrieLongRangeSearch() throws Exception { String fq = "tlong:[" + Integer.MAX_VALUE + " TO " + (5l + Integer.MAX_VALUE) + "]"; SolrQueryRequest req = req("q", "*:*", "fq", fq); assertQ("Range filter must match only 5 documents", req, "//*[@numFound='6']"); - assertQ("Range filter tlong:[* to *] must match 10 documents", req("q", "*:*", "fq", "tlong:[* TO *]"), "//*[@numFound='10']"); + assertQ( + "Range filter tlong:[* to *] must match 10 documents", + req("q", "*:*", "fq", "tlong:[* TO *]"), + "//*[@numFound='10']"); // Sorting - assertQ("Sort descending does not work correctly on tlong fields", req("q", "*:*", "sort", "tlong desc"), "//*[@numFound='10']", "//long[@name='tlong'][.='2147483656']"); - assertQ("Sort ascending does not work correctly on tlong fields", req("q", "*:*", "sort", "tlong asc"), "//*[@numFound='10']", "//long[@name='tlong'][.='2147483647']"); + assertQ( + "Sort descending does not work correctly on tlong fields", + req("q", "*:*", "sort", "tlong desc"), + "//*[@numFound='10']", + "//long[@name='tlong'][.='2147483656']"); + assertQ( + "Sort ascending does not work correctly on tlong fields", + req("q", "*:*", "sort", "tlong asc"), + "//*[@numFound='10']", + "//long[@name='tlong'][.='2147483647']"); // Function queries - assertQ("Function queries does not work correctly on tlong fields", req("q", "_val_:\"sum(tlong,1.0)\""), "//*[@numFound='10']", "//long[@name='tlong'][.='2147483656']"); + assertQ( + "Function queries does not work correctly on tlong fields", + req("q", "_val_:\"sum(tlong,1.0)\""), + "//*[@numFound='10']", + "//long[@name='tlong'][.='2147483656']"); } @Test @@ -144,31 +227,65 @@ public void testTrieDoubleRangeSearch() throws Exception { assertU(adoc("id", String.valueOf(c++), "tdouble", String.valueOf(i * 2.33d))); } assertU(commit()); - String fq = "tdouble:[" + Integer.MAX_VALUE * 2.33d + " TO " + (5l + Integer.MAX_VALUE) * 2.33d + "]"; - assertQ("Range filter must match only 5 documents", req("q", "*:*", "fq", fq), "//*[@numFound='6']"); - assertQ("Range filter tdouble:[* to *] must match 10 documents", req("q", "*:*", "fq", "tdouble:[* TO *]"), "//*[@numFound='10']"); + String fq = + "tdouble:[" + Integer.MAX_VALUE * 2.33d + " TO " + (5l + Integer.MAX_VALUE) * 2.33d + "]"; + assertQ( + "Range filter must match only 5 documents", + req("q", "*:*", "fq", fq), + "//*[@numFound='6']"); + assertQ( + "Range filter tdouble:[* to *] must match 10 documents", + req("q", "*:*", "fq", "tdouble:[* TO *]"), + "//*[@numFound='10']"); // Sorting - assertQ("Sort descending does not work correctly on tdouble fields", req("q", "*:*", "sort", "tdouble desc"), "//*[@numFound='10']", "//double[@name='tdouble'][.='5.0036369184800005E9']"); - assertQ("Sort ascending does not work correctly on tdouble fields", req("q", "*:*", "sort", "tdouble asc"), "//*[@numFound='10']", "//double[@name='tdouble'][.='5.00363689751E9']"); + assertQ( + "Sort descending does not work correctly on tdouble fields", + req("q", "*:*", "sort", "tdouble desc"), + "//*[@numFound='10']", + "//double[@name='tdouble'][.='5.0036369184800005E9']"); + assertQ( + "Sort ascending does not work correctly on tdouble fields", + req("q", "*:*", "sort", "tdouble asc"), + "//*[@numFound='10']", + "//double[@name='tdouble'][.='5.00363689751E9']"); // Function queries - assertQ("Function queries does not work correctly on tdouble fields", req("q", "_val_:\"sum(tdouble,1.0)\""), "//*[@numFound='10']", "//double[@name='tdouble'][.='5.0036369184800005E9']"); + assertQ( + "Function queries does not work correctly on tdouble fields", + req("q", "_val_:\"sum(tdouble,1.0)\""), + "//*[@numFound='10']", + "//double[@name='tdouble'][.='5.0036369184800005E9']"); } @Test public void testTrieDateRangeSearch() throws Exception { for (int i = 0; i < 10; i++) { - assertU(adoc("id", String.valueOf(i), "tdate", "1995-12-31T23:" + (i < 10 ? "0" + i : i) + ":59.999Z")); + assertU( + adoc( + "id", + String.valueOf(i), + "tdate", + "1995-12-31T23:" + (i < 10 ? "0" + i : i) + ":59.999Z")); } assertU(commit()); - SolrQueryRequest req = req("q", "*:*", "fq", "tdate:[1995-12-31T23:00:59.999Z TO 1995-12-31T23:04:59.999Z]"); + SolrQueryRequest req = + req("q", "*:*", "fq", "tdate:[1995-12-31T23:00:59.999Z TO 1995-12-31T23:04:59.999Z]"); assertQ("Range filter must match only 5 documents", req, "//*[@numFound='5']"); // Test open ended range searches - assertQ("Range filter tint:[1995-12-31T23:00:59.999Z to *] must match 10 documents", req("q", "*:*", "fq", "tdate:[1995-12-31T23:00:59.999Z TO *]"), "//*[@numFound='10']"); - assertQ("Range filter tint:[* to 1995-12-31T23:09:59.999Z] must match 10 documents", req("q", "*:*", "fq", "tdate:[* TO 1995-12-31T23:09:59.999Z]"), "//*[@numFound='10']"); - assertQ("Range filter tint:[* to *] must match 10 documents", req("q", "*:*", "fq", "tdate:[* TO *]"), "//*[@numFound='10']"); + assertQ( + "Range filter tint:[1995-12-31T23:00:59.999Z to *] must match 10 documents", + req("q", "*:*", "fq", "tdate:[1995-12-31T23:00:59.999Z TO *]"), + "//*[@numFound='10']"); + assertQ( + "Range filter tint:[* to 1995-12-31T23:09:59.999Z] must match 10 documents", + req("q", "*:*", "fq", "tdate:[* TO 1995-12-31T23:09:59.999Z]"), + "//*[@numFound='10']"); + assertQ( + "Range filter tint:[* to *] must match 10 documents", + req("q", "*:*", "fq", "tdate:[* TO *]"), + "//*[@numFound='10']"); // Test date math syntax SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT); @@ -179,27 +296,49 @@ public void testTrieDateRangeSearch() throws Exception { String largestDate = ""; for (int i = 0; i < 10; i++) { // index 10 days starting with today - String d = format.format(i == 0 ? dmp.parseMath("/DAY") : dmp.parseMath("/DAY+" + i + "DAYS")); + String d = + format.format(i == 0 ? dmp.parseMath("/DAY") : dmp.parseMath("/DAY+" + i + "DAYS")); assertU(adoc("id", String.valueOf(i), "tdate", d)); if (i == 9) largestDate = d; } assertU(commit()); - assertQ("Range filter must match only 10 documents", req("q", "*:*", "fq", "tdate:[* TO *]"), "//*[@numFound='10']"); + assertQ( + "Range filter must match only 10 documents", + req("q", "*:*", "fq", "tdate:[* TO *]"), + "//*[@numFound='10']"); req = req("q", "*:*", "fq", "tdate:[NOW/DAY TO NOW/DAY+5DAYS]"); assertQ("Range filter must match only 6 documents", req, "//*[@numFound='6']"); // Test Term Queries assertU(adoc("id", "11", "tdate", "1995-12-31T23:59:59.999Z")); assertU(commit()); - assertQ("Term query must match only 1 document", req("q", "tdate:1995-12-31T23\\:59\\:59.999Z"), "//*[@numFound='1']"); - assertQ("Term query must match only 1 document", req("q", "*:*", "fq", "tdate:1995-12-31T23\\:59\\:59.999Z"), "//*[@numFound='1']"); + assertQ( + "Term query must match only 1 document", + req("q", "tdate:1995-12-31T23\\:59\\:59.999Z"), + "//*[@numFound='1']"); + assertQ( + "Term query must match only 1 document", + req("q", "*:*", "fq", "tdate:1995-12-31T23\\:59\\:59.999Z"), + "//*[@numFound='1']"); // Sorting - assertQ("Sort descending does not work correctly on tdate fields", req("q", "*:*", "sort", "tdate desc"), "//*[@numFound='11']", "//date[@name='tdate'][.='" + largestDate + "']"); - assertQ("Sort ascending does not work correctly on tdate fields", req("q", "*:*", "sort", "tdate asc"), "//*[@numFound='11']", "//date[@name='tdate'][.='1995-12-31T23:59:59.999Z']"); + assertQ( + "Sort descending does not work correctly on tdate fields", + req("q", "*:*", "sort", "tdate desc"), + "//*[@numFound='11']", + "//date[@name='tdate'][.='" + largestDate + "']"); + assertQ( + "Sort ascending does not work correctly on tdate fields", + req("q", "*:*", "sort", "tdate asc"), + "//*[@numFound='11']", + "//date[@name='tdate'][.='1995-12-31T23:59:59.999Z']"); // Function queries - assertQ("Function queries does not work correctly on tdate fields", req("q", "_val_:\"sum(tdate,1.0)\""), "//*[@numFound='11']", "//date[@name='tdate'][.='" + largestDate + "']"); + assertQ( + "Function queries does not work correctly on tdate fields", + req("q", "_val_:\"sum(tdate,1.0)\""), + "//*[@numFound='11']", + "//date[@name='tdate'][.='" + largestDate + "']"); } @Test @@ -208,17 +347,22 @@ public void testTrieDoubleRangeSearch_CustomPrecisionStep() throws Exception { assertU(adoc("id", String.valueOf(c++), "tdouble4", String.valueOf(i * 2.33d))); } assertU(commit()); - String fq = "tdouble4:[" + Integer.MAX_VALUE * 2.33d + " TO " + (5l + Integer.MAX_VALUE) * 2.33d + "]"; - assertQ("Range filter must match only 5 documents", req("q", "*:*", "fq", fq), "//*[@numFound='6']"); + String fq = + "tdouble4:[" + Integer.MAX_VALUE * 2.33d + " TO " + (5l + Integer.MAX_VALUE) * 2.33d + "]"; + assertQ( + "Range filter must match only 5 documents", + req("q", "*:*", "fq", fq), + "//*[@numFound='6']"); } @Test public void testTrieFacet_PrecisionStep() throws Exception { if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) { - assumeTrue("Skipping test: Points+facets require docValues, but randomizer: points=true && DV=false", - Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)); + assumeTrue( + "Skipping test: Points+facets require docValues, but randomizer: points=true && DV=false", + Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)); } - + // Future protect - assert 0 args = new HashMap<>(); + Map args = new HashMap<>(); args.put("ignoreCase", "true"); - args.put("protected", "protected-1.txt,protected-2.txt"); // Protected: foobar, jaxfopbuz, golden, compote + // Protected: foobar, jaxfopbuz, golden, compote + args.put("protected", "protected-1.txt,protected-2.txt"); args.put("wrappedFilters", "lowercase"); ResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1")); @@ -47,38 +47,44 @@ public void testBasic() throws Exception { factory.inform(loader); TokenStream ts = factory.create(whitespaceMockTokenizer(text)); - BaseTokenStreamTestCase.assertTokenStreamContents(ts, - new String[] { "wuthering", "FooBar", "distant", "goldeN", "abc", "compote" }); + BaseTokenStreamTestCase.assertTokenStreamContents( + ts, new String[] {"wuthering", "FooBar", "distant", "goldeN", "abc", "compote"}); } public void testTwoWrappedFilters() { - // Index-time: Filters: truncate:4 & lowercase. Protected (ignoreCase:true): foobar, jaxfopbuz, golden, compote + // Index-time: Filters: truncate:4 & lowercase. Protected (ignoreCase:true): foobar, jaxfopbuz, + // golden, compote // Query-time: No filters assertU(adoc("id", "1", "prefix4_lower", "Wuthering FooBar distant goldeN ABC compote")); assertU(commit()); - assertQ(req("prefix4_lower:(+wuth +FooBar +dist +goldeN +abc +compote)") - , "//result[@numFound=1]" - ); + assertQ( + req("prefix4_lower:(+wuth +FooBar +dist +goldeN +abc +compote)"), "//result[@numFound=1]"); } public void testDuplicateFilters() { - // Index-time: Filters: truncate:3 & reversestring & truncate:2. Protected (ignoreCase:true): foobar, jaxfopbuz, golden, compote + // Index-time: Filters: truncate:3 & reversestring & truncate:2. Protected (ignoreCase:true): + // foobar, jaxfopbuz, golden, compote // Query-time: No filters - assertU(adoc("id", "1", - "prefix3_rev_prefix2", "Wuthering FooBar distant goldeN ABC compote", - "prefix3_rev_prefix2_mixed_IDs", "Wuthering FooBar distant goldeN ABC compote", - "prefix3_rev_prefix2_mixed_case", "Wuthering FooBar distant goldeN ABC compote")); + assertU( + adoc( + "id", + "1", + "prefix3_rev_prefix2", + "Wuthering FooBar distant goldeN ABC compote", + "prefix3_rev_prefix2_mixed_IDs", + "Wuthering FooBar distant goldeN ABC compote", + "prefix3_rev_prefix2_mixed_case", + "Wuthering FooBar distant goldeN ABC compote")); assertU(commit()); - assertQ(req("prefix3_rev_prefix2:(+tu +FooBar +si +goldeN +CB +compote)") - , "//result[@numFound=1]" - ); - assertQ(req("prefix3_rev_prefix2_mixed_IDs:(+tu +FooBar +si +goldeN +CB +compote)") - , "//result[@numFound=1]" - ); - assertQ(req("prefix3_rev_prefix2_mixed_case:(+tu +FooBar +si +goldeN +CB +compote)") - , "//result[@numFound=1]" - ); + assertQ( + req("prefix3_rev_prefix2:(+tu +FooBar +si +goldeN +CB +compote)"), "//result[@numFound=1]"); + assertQ( + req("prefix3_rev_prefix2_mixed_IDs:(+tu +FooBar +si +goldeN +CB +compote)"), + "//result[@numFound=1]"); + assertQ( + req("prefix3_rev_prefix2_mixed_case:(+tu +FooBar +si +goldeN +CB +compote)"), + "//result[@numFound=1]"); } } diff --git a/solr/core/src/test/org/apache/solr/analysis/TestCharFilters.java b/solr/core/src/test/org/apache/solr/analysis/TestCharFilters.java index 646bdbb11dc..26cdb3453e2 100644 --- a/solr/core/src/test/org/apache/solr/analysis/TestCharFilters.java +++ b/solr/core/src/test/org/apache/solr/analysis/TestCharFilters.java @@ -20,14 +20,13 @@ import org.junit.BeforeClass; /** - * Tests that charfilters are being applied properly - * (e.g. once and only once) with mockcharfilter. + * Tests that charfilters are being applied properly (e.g. once and only once) with mockcharfilter. */ public class TestCharFilters extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-charfilters.xml"); + initCore("solrconfig-basic.xml", "schema-charfilters.xml"); // add some docs assertU(adoc("id", "1", "content", "aab")); assertU(adoc("id", "2", "content", "aabaa")); @@ -35,42 +34,42 @@ public static void beforeClass() throws Exception { assertU(adoc("id", "4", "content2", "aba")); assertU(commit()); } - + /** - * Test query analysis: at querytime MockCharFilter will - * double the 'a', so ab -> aab, and aba -> aabaa - * - * We run the test twice to make sure reuse is working + * Test query analysis: at querytime MockCharFilter will double the 'a', so ab -> aab, and aba + * -> aabaa + * + *

We run the test twice to make sure reuse is working */ public void testQueryAnalysis() { - assertQ("Query analysis: ", - req("fl", "id", "q", "content:ab", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=1]" - ); - assertQ("Query analysis: ", + assertQ( + "Query analysis: ", + req("fl", "id", "q", "content:ab", "sort", "id asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=1]"); + assertQ( + "Query analysis: ", req("fl", "id", "q", "content:aba", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); } - + /** - * Test index analysis: at indextime MockCharFilter will - * double the 'a', so ab -> aab, and aba -> aabaa - * - * We run the test twice to make sure reuse is working + * Test index analysis: at indextime MockCharFilter will double the 'a', so ab -> aab, and aba + * -> aabaa + * + *

We run the test twice to make sure reuse is working */ public void testIndexAnalysis() { - assertQ("Index analysis: ", - req("fl", "id", "q", "content2:aab", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=3]" - ); - assertQ("Index analysis: ", + assertQ( + "Index analysis: ", + req("fl", "id", "q", "content2:aab", "sort", "id asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=3]"); + assertQ( + "Index analysis: ", req("fl", "id", "q", "content2:aabaa", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=4]"); } } diff --git a/solr/core/src/test/org/apache/solr/analysis/TestDeprecatedFilters.java b/solr/core/src/test/org/apache/solr/analysis/TestDeprecatedFilters.java index fea1ca894f8..c1f54fb7f76 100644 --- a/solr/core/src/test/org/apache/solr/analysis/TestDeprecatedFilters.java +++ b/solr/core/src/test/org/apache/solr/analysis/TestDeprecatedFilters.java @@ -24,7 +24,7 @@ public class TestDeprecatedFilters extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-leader.xml","schema-deprecations.xml"); + initCore("solrconfig-leader.xml", "schema-deprecations.xml"); } public void testLowerCaseTokenizer() { @@ -32,5 +32,4 @@ public void testLowerCaseTokenizer() { assertU(commit()); assertQ(req("lowertext:test"), "//result[@numFound=1]"); } - } diff --git a/solr/core/src/test/org/apache/solr/analysis/TestLuceneMatchVersion.java b/solr/core/src/test/org/apache/solr/analysis/TestLuceneMatchVersion.java index 2f6f8564e37..09e567d55fe 100644 --- a/solr/core/src/test/org/apache/solr/analysis/TestLuceneMatchVersion.java +++ b/solr/core/src/test/org/apache/solr/analysis/TestLuceneMatchVersion.java @@ -16,34 +16,32 @@ */ package org.apache.solr.analysis; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.core.SolrConfig; -import org.apache.solr.schema.IndexSchema; -import org.apache.solr.schema.FieldType; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.util.Version; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.core.SolrConfig; +import org.apache.solr.schema.FieldType; +import org.apache.solr.schema.IndexSchema; import org.junit.BeforeClass; -/** - * Tests for luceneMatchVersion property for analyzers - */ +/** Tests for luceneMatchVersion property for analyzers */ public class TestLuceneMatchVersion extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-luceneMatchVersion.xml"); + initCore("solrconfig.xml", "schema-luceneMatchVersion.xml"); } - + // this must match the solrconfig.xml version for this test public static final Version DEFAULT_VERSION = - SolrConfig.parseLuceneVersionString(System.getProperty("tests.luceneMatchVersion", "LATEST")); + SolrConfig.parseLuceneVersionString(System.getProperty("tests.luceneMatchVersion", "LATEST")); public void testStandardTokenizerVersions() throws Exception { assertEquals(DEFAULT_VERSION, solrConfig.luceneMatchVersion); - + final IndexSchema schema = h.getCore().getLatestSchema(); - + FieldType type = schema.getFieldType("textDefault"); TokenizerChain ana = (TokenizerChain) type.getIndexAnalyzer(); assertEquals(DEFAULT_VERSION, (ana.getTokenizerFactory()).getLuceneMatchVersion()); diff --git a/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java b/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java index 19ac6fc6ac4..935d0ce6b9a 100644 --- a/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/analysis/TestReversedWildcardFilterFactory.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.analysis; + import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; @@ -23,7 +24,6 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.queryparser.charstream.CharStream; @@ -45,18 +45,17 @@ import org.junit.BeforeClass; import org.junit.Test; - public class TestReversedWildcardFilterFactory extends SolrTestCaseJ4 { - Map args = new HashMap<>(); + Map args = new HashMap<>(); IndexSchema schema; @BeforeClass public static void beforeClass() throws Exception { assumeWorkingMockito(); - initCore("solrconfig.xml","schema-reversed.xml"); + initCore("solrconfig.xml", "schema-reversed.xml"); } - + @Override @Before public void setUp() throws Exception { @@ -72,19 +71,18 @@ public void testReversedTokens() throws IOException { args.put("withOriginal", "true"); ReversedWildcardFilterFactory factory = new ReversedWildcardFilterFactory(args); TokenStream input = factory.create(whitespaceMockTokenizer(text)); - assertTokenStreamContents(input, - new String[] { "\u0001elpmis", "simple", "\u0001txet", "text" }, - new int[] { 1, 0, 1, 0 }); + assertTokenStreamContents( + input, + new String[] {"\u0001elpmis", "simple", "\u0001txet", "text"}, + new int[] {1, 0, 1, 0}); // now without original tokens args.put("withOriginal", "false"); factory = new ReversedWildcardFilterFactory(args); input = factory.create(whitespaceMockTokenizer(text)); - assertTokenStreamContents(input, - new String[] { "\u0001elpmis", "\u0001txet" }, - new int[] { 1, 1 }); + assertTokenStreamContents(input, new String[] {"\u0001elpmis", "\u0001txet"}, new int[] {1, 1}); } - + @Test public void testIndexingAnalysis() throws Exception { Analyzer a = schema.getIndexAnalyzer(); @@ -92,31 +90,38 @@ public void testIndexingAnalysis() throws Exception { // field one TokenStream input = a.tokenStream("one", text); - assertTokenStreamContents(input, - new String[] { "\u0001eno", "one", "\u0001owt", "two", - "\u0001eerht", "three", "\u0001x\uD834\uDD1Eis", "si\uD834\uDD1Ex" }, - new int[] { 0, 0, 4, 4, 8, 8, 14, 14 }, - new int[] { 3, 3, 7, 7, 13, 13, 19, 19 }, - new int[] { 1, 0, 1, 0, 1, 0, 1, 0 } - ); + assertTokenStreamContents( + input, + new String[] { + "\u0001eno", + "one", + "\u0001owt", + "two", + "\u0001eerht", + "three", + "\u0001x\uD834\uDD1Eis", + "si\uD834\uDD1Ex" + }, + new int[] {0, 0, 4, 4, 8, 8, 14, 14}, + new int[] {3, 3, 7, 7, 13, 13, 19, 19}, + new int[] {1, 0, 1, 0, 1, 0, 1, 0}); // field two input = a.tokenStream("two", text); - assertTokenStreamContents(input, - new String[] { "\u0001eno", "\u0001owt", - "\u0001eerht", "\u0001x\uD834\uDD1Eis" }, - new int[] { 0, 4, 8, 14 }, - new int[] { 3, 7, 13, 19 }, - new int[] { 1, 1, 1, 1 } - ); + assertTokenStreamContents( + input, + new String[] {"\u0001eno", "\u0001owt", "\u0001eerht", "\u0001x\uD834\uDD1Eis"}, + new int[] {0, 4, 8, 14}, + new int[] {3, 7, 13, 19}, + new int[] {1, 1, 1, 1}); // field three input = a.tokenStream("three", text); - assertTokenStreamContents(input, - new String[] { "one", "two", "three", "si\uD834\uDD1Ex" }, - new int[] { 0, 4, 8, 14 }, - new int[] { 3, 7, 13, 19 } - ); + assertTokenStreamContents( + input, + new String[] {"one", "two", "three", "si\uD834\uDD1Ex"}, + new int[] {0, 4, 8, 14}, + new int[] {3, 7, 13, 19}); } - + @Test public void testQueryParsing() throws Exception { @@ -128,18 +133,12 @@ public void testQueryParsing() throws Exception { assertU(adoc("id", "5", "two", "five")); assertU(adoc("id", "6", "three", "si\uD834\uDD1Ex")); assertU(commit()); - - assertQ("should have matched", - req("+id:1 +one:one"), - "//result[@numFound=1]"); - - assertQ("should have matched", - req("+id:4 +one:f*ur"), - "//result[@numFound=1]"); - - assertQ("should have matched", - req("+id:6 +three:*si\uD834\uDD1Ex"), - "//result[@numFound=1]"); + + assertQ("should have matched", req("+id:1 +one:one"), "//result[@numFound=1]"); + + assertQ("should have matched", req("+id:4 +one:f*ur"), "//result[@numFound=1]"); + + assertQ("should have matched", req("+id:6 +three:*si\uD834\uDD1Ex"), "//result[@numFound=1]"); SolrQueryRequest req = req(); QParser qparser = QParser.getParser("id:1", req); @@ -161,16 +160,17 @@ public void testQueryParsing() throws Exception { req.close(); } - - /** fragile assert: depends on our implementation, but cleanest way to check for now */ + + /** fragile assert: depends on our implementation, but cleanest way to check for now */ private boolean wasReversed(SolrQueryParser qp, String query) throws Exception { Query q = qp.parse(query); if (!(q instanceof AutomatonQuery)) { return false; } Automaton automaton = ((AutomatonQuery) q).getAutomaton(); - String prefix = Operations.getCommonPrefix(Operations.determinize(automaton, - Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); + String prefix = + Operations.getCommonPrefix( + Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); return prefix.length() > 0 && prefix.charAt(0) == '\u0001'; } @@ -179,34 +179,28 @@ public void testFalsePositives() throws Exception { // add a doc assertU(adoc("id", "1", "one", "gomez", "two", "gomez", "three", "gomez")); assertU(commit()); - - assertQ("false positive", - req("+id:1 +one:*zemog*"), - "//result[@numFound=0]"); - - assertQ("no reverse, no false positive", - req("q", "+id:1 +three:[* TO a]", + + assertQ("false positive", req("+id:1 +one:*zemog*"), "//result[@numFound=0]"); + + assertQ( + "no reverse, no false positive", + req( + "q", "+id:1 +three:[* TO a]", "debugQuery", "true"), "//result[@numFound=0]"); { - String reverseField = random().nextBoolean() ? "one":"two"; - assertQ("false positive", - req("q", "+id:1 +"+reverseField+":[* TO a]", - "debugQuery", "true"), + String reverseField = random().nextBoolean() ? "one" : "two"; + assertQ( + "false positive", + req("q", "+id:1 +" + reverseField + ":[* TO a]", "debugQuery", "true"), "//result[@numFound=0]"); } - assertQ("false positive", - req("+id:1 +two:*zemog*"), - "//result[@numFound=0]"); - assertQ("false positive", - req("+id:1 +three:*zemog*"), - "//result[@numFound=0]"); - - assertQ("should have matched", - req("+id:1 +one:*omez*"), - "//result[@numFound=1]"); + assertQ("false positive", req("+id:1 +two:*zemog*"), "//result[@numFound=0]"); + assertQ("false positive", req("+id:1 +three:*zemog*"), "//result[@numFound=0]"); + + assertQ("should have matched", req("+id:1 +one:*omez*"), "//result[@numFound=1]"); } - + private static final class SolrQParser extends SolrQueryParserBase { @Override public Query TopLevelQuery(String field) throws ParseException, SyntaxError { @@ -221,21 +215,21 @@ protected ReversedWildcardFilterFactory getReversedWildcardFilterFactory(FieldTy return super.getReversedWildcardFilterFactory(fieldType); } } - - @Test + + @Test public void testCachingInQueryParser() { SolrQParser parser = new SolrQParser(); - + SolrQueryRequest req = req(); - String[] fields = new String[]{"one", "two", "three"}; + String[] fields = new String[] {"one", "two", "three"}; String aField = fields[random().nextInt(fields.length)]; FieldType type = req.getSchema().getField(aField).getType(); - + FieldType typeSpy = spy(type); - // calling twice + // calling twice parser.getReversedWildcardFilterFactory(typeSpy); parser.getReversedWildcardFilterFactory(typeSpy); - // but it should reach only once + // but it should reach only once verify(typeSpy, times(1)).getIndexAnalyzer(); } } diff --git a/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java b/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java index 61d1bb5c4d7..0630bbe54bf 100644 --- a/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/analysis/TestWordDelimiterFilterFactory.java @@ -18,7 +18,6 @@ import java.util.HashMap; import java.util.Map; - import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory; @@ -29,49 +28,49 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * New WordDelimiterFilter tests... most of the tests are in ConvertedLegacyTest - */ +/** New WordDelimiterFilter tests... most of the tests are in ConvertedLegacyTest */ // TODO: add a low-level test for this factory public class TestWordDelimiterFilterFactory extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } public void posTst(String v1, String v2, String s1, String s2) { - assertU(adoc("id", "42", - "subword", v1, - "subword", v2)); + assertU( + adoc( + "id", "42", + "subword", v1, + "subword", v2)); assertU(commit()); // there is a positionIncrementGap of 100 between field values, so // we test if that was maintained. - assertQ("position increment lost", - req("+id:42 +subword:\"" + s1 + ' ' + s2 + "\"~90") - ,"//result[@numFound=0]" - ); - assertQ("position increment lost", - req("+id:42 +subword:\"" + s1 + ' ' + s2 + "\"~110") - ,"//result[@numFound=1]" - ); + assertQ( + "position increment lost", + req("+id:42 +subword:\"" + s1 + ' ' + s2 + "\"~90"), + "//result[@numFound=0]"); + assertQ( + "position increment lost", + req("+id:42 +subword:\"" + s1 + ' ' + s2 + "\"~110"), + "//result[@numFound=1]"); clearIndex(); } @Test public void testRetainPositionIncrement() { - posTst("foo","bar","foo","bar"); - posTst("-foo-","-bar-","foo","bar"); - posTst("foo","bar","-foo-","-bar-"); + posTst("foo", "bar", "foo", "bar"); + posTst("-foo-", "-bar-", "foo", "bar"); + posTst("foo", "bar", "-foo-", "-bar-"); - posTst("123","456","123","456"); - posTst("/123/","/456/","123","456"); + posTst("123", "456", "123", "456"); + posTst("/123/", "/456/", "123", "456"); - posTst("/123/abc","qwe/456/","abc","qwe"); + posTst("/123/abc", "qwe/456/", "abc", "qwe"); - posTst("zoo-foo","bar-baz","foo","bar"); - posTst("zoo-foo-123","456-bar-baz","foo","bar"); + posTst("zoo-foo", "bar-baz", "foo", "bar"); + posTst("zoo-foo-123", "456-bar-baz", "foo", "bar"); } @Test @@ -83,43 +82,32 @@ public void testNoGenerationEdgeCase() { @Test public void testIgnoreCaseChange() { - assertU(adoc("id", "43", - "wdf_nocase", "HellO WilliAM", - "subword", "GoodBye JonEs")); + assertU( + adoc( + "id", "43", + "wdf_nocase", "HellO WilliAM", + "subword", "GoodBye JonEs")); assertU(commit()); - - assertQ("no case change", - req("wdf_nocase:(hell o am)") - ,"//result[@numFound=0]" - ); - assertQ("case change", - req("subword:(good jon)") - ,"//result[@numFound=1]" - ); + + assertQ("no case change", req("wdf_nocase:(hell o am)"), "//result[@numFound=0]"); + assertQ("case change", req("subword:(good jon)"), "//result[@numFound=1]"); clearIndex(); } @Test public void testPreserveOrignalTrue() { - assertU(adoc("id", "144", - "wdf_preserve", "404-123")); + assertU( + adoc( + "id", "144", + "wdf_preserve", "404-123")); assertU(commit()); - - assertQ("preserving original word", - req("wdf_preserve:404") - ,"//result[@numFound=1]" - ); - - assertQ("preserving original word", - req("wdf_preserve:123") - ,"//result[@numFound=1]" - ); - - assertQ("preserving original word", - req("wdf_preserve:404-123*") - ,"//result[@numFound=1]" - ); + + assertQ("preserving original word", req("wdf_preserve:404"), "//result[@numFound=1]"); + + assertQ("preserving original word", req("wdf_preserve:123"), "//result[@numFound=1]"); + + assertQ("preserving original word", req("wdf_preserve:404-123*"), "//result[@numFound=1]"); clearIndex(); } @@ -142,62 +130,38 @@ public void testPerformance() throws IOException { ***/ @Test - public void testAlphaNumericWords(){ - assertU(adoc("id", "68","numericsubword","Java/J2SE")); - assertU(commit()); - - assertQ("j2se found", - req("numericsubword:(J2SE)") - ,"//result[@numFound=1]" - ); - assertQ("no j2 or se", - req("numericsubword:(J2 OR SE)") - ,"//result[@numFound=0]" - ); + public void testAlphaNumericWords() { + assertU(adoc("id", "68", "numericsubword", "Java/J2SE")); + assertU(commit()); + + assertQ("j2se found", req("numericsubword:(J2SE)"), "//result[@numFound=1]"); + assertQ("no j2 or se", req("numericsubword:(J2 OR SE)"), "//result[@numFound=0]"); clearIndex(); } @Test - public void testProtectedWords(){ - assertU(adoc("id", "70","protectedsubword","c# c++ .net Java/J2SE")); + public void testProtectedWords() { + assertU(adoc("id", "70", "protectedsubword", "c# c++ .net Java/J2SE")); assertU(commit()); - assertQ("java found", - req("protectedsubword:(java)") - ,"//result[@numFound=1]" - ); - - assertQ(".net found", - req("protectedsubword:(.net)") - ,"//result[@numFound=1]" - ); - - assertQ("c# found", - req("protectedsubword:(c#)") - ,"//result[@numFound=1]" - ); - - assertQ("c++ found", - req("protectedsubword:(c++)") - ,"//result[@numFound=1]" - ); - - assertQ("c found?", - req("protectedsubword:c") - ,"//result[@numFound=0]" - ); - assertQ("net found?", - req("protectedsubword:net") - ,"//result[@numFound=0]" - ); + assertQ("java found", req("protectedsubword:(java)"), "//result[@numFound=1]"); + + assertQ(".net found", req("protectedsubword:(.net)"), "//result[@numFound=1]"); + + assertQ("c# found", req("protectedsubword:(c#)"), "//result[@numFound=1]"); + + assertQ("c++ found", req("protectedsubword:(c++)"), "//result[@numFound=1]"); + + assertQ("c found?", req("protectedsubword:c"), "//result[@numFound=0]"); + assertQ("net found?", req("protectedsubword:net"), "//result[@numFound=0]"); clearIndex(); } - + @Test public void testCustomTypes() throws Exception { String testText = "I borrowed $5,400.00 at 25% interest-rate"; ResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1")); - Map args = new HashMap<>(); + Map args = new HashMap<>(); args.put("luceneMatchVersion", Version.LATEST.toString()); args.put("generateWordParts", "1"); args.put("generateNumberParts", "1"); @@ -205,20 +169,31 @@ public void testCustomTypes() throws Exception { args.put("catenateNumbers", "1"); args.put("catenateAll", "0"); args.put("splitOnCaseChange", "1"); - + /* default behavior */ WordDelimiterFilterFactory factoryDefault = new WordDelimiterFilterFactory(args); factoryDefault.inform(loader); - + TokenStream ts = factoryDefault.create(whitespaceMockTokenizer(testText)); - BaseTokenStreamTestCase.assertTokenStreamContents(ts, - new String[] { "I", "borrowed", "5", "540000", "400", "00", "at", "25", "interest", "interestrate", "rate" }); + BaseTokenStreamTestCase.assertTokenStreamContents( + ts, + new String[] { + "I", + "borrowed", + "5", + "540000", + "400", + "00", + "at", + "25", + "interest", + "interestrate", + "rate" + }); ts = factoryDefault.create(whitespaceMockTokenizer("foo\u200Dbar")); - BaseTokenStreamTestCase.assertTokenStreamContents(ts, - new String[] { "foo", "foobar", "bar" }); + BaseTokenStreamTestCase.assertTokenStreamContents(ts, new String[] {"foo", "foobar", "bar"}); - /* custom behavior */ args = new HashMap<>(); // use a custom type mapping @@ -232,14 +207,16 @@ public void testCustomTypes() throws Exception { args.put("types", "wdftypes.txt"); WordDelimiterFilterFactory factoryCustom = new WordDelimiterFilterFactory(args); factoryCustom.inform(loader); - + ts = factoryCustom.create(whitespaceMockTokenizer(testText)); - BaseTokenStreamTestCase.assertTokenStreamContents(ts, - new String[] { "I", "borrowed", "$5,400.00", "at", "25%", "interest", "interestrate", "rate" }); - + BaseTokenStreamTestCase.assertTokenStreamContents( + ts, + new String[] { + "I", "borrowed", "$5,400.00", "at", "25%", "interest", "interestrate", "rate" + }); + /* test custom behavior with a char > 0x7F, because we had to make a larger byte[] */ ts = factoryCustom.create(whitespaceMockTokenizer("foo\u200Dbar")); - BaseTokenStreamTestCase.assertTokenStreamContents(ts, - new String[] { "foo\u200Dbar" }); + BaseTokenStreamTestCase.assertTokenStreamContents(ts, new String[] {"foo\u200Dbar"}); } } diff --git a/solr/core/src/test/org/apache/solr/analysis/ThrowingMockTokenFilterFactory.java b/solr/core/src/test/org/apache/solr/analysis/ThrowingMockTokenFilterFactory.java index 0dc2a573b19..c0774b95556 100644 --- a/solr/core/src/test/org/apache/solr/analysis/ThrowingMockTokenFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/analysis/ThrowingMockTokenFilterFactory.java @@ -16,17 +16,14 @@ */ package org.apache.solr.analysis; -import org.apache.lucene.analysis.TokenFilter; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.TokenFilterFactory; - import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.util.Map; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenFilterFactory; +import org.apache.lucene.analysis.TokenStream; -/** - * Token filter factory that misbehaves on command. - */ +/** Token filter factory that misbehaves on command. */ public class ThrowingMockTokenFilterFactory extends TokenFilterFactory { private Class exceptionClass; @@ -44,11 +41,10 @@ public ThrowingMockTokenFilterFactory(Map args) { throw new RuntimeException("Required parameter exceptionClassName is missing"); } try { - exceptionClass = (Class)Class.forName(exceptionClassName); + exceptionClass = (Class) Class.forName(exceptionClassName); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } - } @Override @@ -59,7 +55,10 @@ public boolean incrementToken() throws IOException { if (input.incrementToken()) { try { throw exceptionClass.getConstructor().newInstance(); - } catch (IllegalAccessException | InstantiationException | InvocationTargetException | NoSuchMethodException iae) { + } catch (IllegalAccessException + | InstantiationException + | InvocationTargetException + | NoSuchMethodException iae) { throw new RuntimeException(iae); } } diff --git a/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java b/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java index fe4d39c61db..5dad20e003b 100644 --- a/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java +++ b/solr/core/src/test/org/apache/solr/analysis/TokenizerChainTest.java @@ -17,15 +17,13 @@ package org.apache.solr.analysis; import java.util.Collections; - +import org.apache.lucene.analysis.TokenFilterFactory; import org.apache.lucene.analysis.core.LowerCaseFilterFactory; import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilterFactory; -import org.apache.lucene.analysis.TokenFilterFactory; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; import org.junit.Test; - public class TokenizerChainTest extends SolrTestCaseJ4 { @Test @@ -34,11 +32,9 @@ public void testNormalization() throws Exception { TokenFilterFactory[] tff = new TokenFilterFactory[2]; tff[0] = new LowerCaseFilterFactory(Collections.emptyMap()); tff[1] = new ASCIIFoldingFilterFactory(Collections.emptyMap()); - TokenizerChain tokenizerChain = new TokenizerChain( - new MockTokenizerFactory(Collections.emptyMap()), - tff); - assertEquals(new BytesRef("fooba"), - tokenizerChain.normalize(fieldName, "FOOB\u00c4")); + TokenizerChain tokenizerChain = + new TokenizerChain(new MockTokenizerFactory(Collections.emptyMap()), tff); + assertEquals(new BytesRef("fooba"), tokenizerChain.normalize(fieldName, "FOOB\u00c4")); tokenizerChain.close(); } } diff --git a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java index 39f7675a897..757108693b3 100644 --- a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java +++ b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerAdminHandler.java @@ -17,7 +17,6 @@ package org.apache.solr.client.solrj.embedded; import java.io.IOException; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; @@ -31,40 +30,40 @@ public class TestEmbeddedSolrServerAdminHandler extends SolrTestCaseJ4 { - @Test - public void testPathIsAddedToContext() throws IOException, SolrServerException { + @Test + public void testPathIsAddedToContext() throws IOException, SolrServerException { - final NodeConfig config = new NodeConfig.NodeConfigBuilder("testnode", TEST_PATH()) - .setConfigSetBaseDirectory(TEST_PATH().resolve("configsets").toString()) - .build(); + final NodeConfig config = + new NodeConfig.NodeConfigBuilder("testnode", TEST_PATH()) + .setConfigSetBaseDirectory(TEST_PATH().resolve("configsets").toString()) + .build(); - try (final EmbeddedSolrServer server = new EmbeddedSolrServer(config, "collection1")) { - final SystemInfoRequest info = new SystemInfoRequest(); - final NamedList response = server.request(info); - assertTrue(response.size() > 0); - } + try (final EmbeddedSolrServer server = new EmbeddedSolrServer(config, "collection1")) { + final SystemInfoRequest info = new SystemInfoRequest(); + final NamedList response = server.request(info); + assertTrue(response.size() > 0); } + } - private static class SystemInfoRequest extends SolrRequest { - - public SystemInfoRequest() { - super(METHOD.GET, "/admin/info/system"); - } + private static class SystemInfoRequest extends SolrRequest { - @Override - public SolrParams getParams() { - return new ModifiableSolrParams(); - } + public SystemInfoRequest() { + super(METHOD.GET, "/admin/info/system"); + } - @Override - protected QueryResponse createResponse(final SolrClient client) { - return new QueryResponse(); - } + @Override + public SolrParams getParams() { + return new ModifiableSolrParams(); + } - @Override - public String getRequestType() { - return SolrRequest.SolrRequestType.ADMIN.toString(); - } + @Override + protected QueryResponse createResponse(final SolrClient client) { + return new QueryResponse(); } + @Override + public String getRequestType() { + return SolrRequest.SolrRequestType.ADMIN.toString(); + } + } } diff --git a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java index 20aaa797e70..ec6d688fe17 100644 --- a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java +++ b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerConstructors.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.nio.file.Path; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.CoreAdminRequest; @@ -31,18 +30,17 @@ public class TestEmbeddedSolrServerConstructors extends SolrTestCaseJ4 { @Test @SuppressWarnings({"try"}) public void testPathConstructor() throws IOException { - try (EmbeddedSolrServer server = new EmbeddedSolrServer(TEST_PATH(), "collection1")) { - - } + try (EmbeddedSolrServer server = new EmbeddedSolrServer(TEST_PATH(), "collection1")) {} } @Test public void testNodeConfigConstructor() throws Exception { Path path = createTempDir(); - NodeConfig config = new NodeConfig.NodeConfigBuilder("testnode", path) - .setConfigSetBaseDirectory(TEST_PATH().resolve("configsets").toString()) - .build(); + NodeConfig config = + new NodeConfig.NodeConfigBuilder("testnode", path) + .setConfigSetBaseDirectory(TEST_PATH().resolve("configsets").toString()) + .build(); try (EmbeddedSolrServer server = new EmbeddedSolrServer(config, "newcore")) { @@ -58,8 +56,6 @@ public void testNodeConfigConstructor() throws Exception { assertEquals(1, server.query(new SolrQuery("*:*")).getResults().getNumFound()); assertEquals(1, server.query("newcore", new SolrQuery("*:*")).getResults().getNumFound()); - } } - } diff --git a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerSchemaAPI.java b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerSchemaAPI.java index 0edd0ea3d97..5eee81ee6c0 100644 --- a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerSchemaAPI.java +++ b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServerSchemaAPI.java @@ -21,7 +21,6 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.ApiBag; import org.apache.solr.client.solrj.request.schema.SchemaRequest; @@ -37,8 +36,9 @@ public class TestEmbeddedSolrServerSchemaAPI extends SolrTestCaseJ4 { private String fieldName = "VerificationTest"; private static EmbeddedSolrServer server; private final Map fieldAttributes; + { - Map field = new LinkedHashMap<>(); + Map field = new LinkedHashMap<>(); field.put("name", fieldName); field.put("type", "string"); field.put("stored", false); @@ -50,21 +50,20 @@ public class TestEmbeddedSolrServerSchemaAPI extends SolrTestCaseJ4 { @BeforeClass public static void initClass() throws Exception { assertNull("no system props clash please", System.getProperty("managed.schema.mutable")); - System.setProperty("managed.schema.mutable", ""+//true - random().nextBoolean() - ); + System.setProperty("managed.schema.mutable", "" + random().nextBoolean()); Path tmpHome = createTempDir("tmp-home"); Path coreDir = tmpHome.resolve(DEFAULT_TEST_CORENAME); copyMinConf(coreDir.toFile(), null, "solrconfig-managed-schema.xml"); - initCore("solrconfig.xml" /*it's renamed to to*/, "schema.xml", tmpHome.toAbsolutePath().toString()); - + initCore( + "solrconfig.xml" /*it's renamed to to*/, "schema.xml", tmpHome.toAbsolutePath().toString()); + server = new EmbeddedSolrServer(h.getCoreContainer(), DEFAULT_TEST_CORENAME); } @AfterClass public static void destroyClass() throws IOException { if (null != server) { - server.close(); + server.close(); server = null; } System.clearProperty("managed.schema.mutable"); @@ -72,36 +71,43 @@ public static void destroyClass() throws IOException { @Before public void thereIsNoFieldYet() { - SolrException ex = expectThrows(SolrException.class, () -> new SchemaRequest.Field(fieldName).process(server)); + SolrException ex = + expectThrows(SolrException.class, () -> new SchemaRequest.Field(fieldName).process(server)); assertTrue(ex.getMessage().contains("No") && ex.getMessage().contains("VerificationTest")); } - + @Test public void testSchemaAddFieldAndVerifyExistence() throws Exception { assumeTrue("it needs to ammend schema", Boolean.getBoolean("managed.schema.mutable")); - SchemaResponse.UpdateResponse addFieldResponse = new SchemaRequest.AddField(fieldAttributes).process(server); + SchemaResponse.UpdateResponse addFieldResponse = + new SchemaRequest.AddField(fieldAttributes).process(server); assertEquals(addFieldResponse.toString(), 0, addFieldResponse.getStatus()); // This asserts that the field was actually created // this is due to the fact that the response gave OK but actually never created the field. - Map foundFieldAttributes = new SchemaRequest.Field(fieldName).process(server).getField(); + Map foundFieldAttributes = + new SchemaRequest.Field(fieldName).process(server).getField(); assertEquals(fieldAttributes, foundFieldAttributes); - assertEquals("removing " + fieldName, 0, + assertEquals( + "removing " + fieldName, + 0, new SchemaRequest.DeleteField(fieldName).process(server).getStatus()); } - @Test + @Test public void testSchemaAddFieldAndFailOnImmutable() { assumeFalse("it needs a readonly schema", Boolean.getBoolean("managed.schema.mutable")); - SchemaRequest.AddField addFieldUpdateSchemaRequest = new SchemaRequest.AddField(fieldAttributes); - assertFailedSchemaResponse(() -> addFieldUpdateSchemaRequest.process(server), - "schema is not editable"); + SchemaRequest.AddField addFieldUpdateSchemaRequest = + new SchemaRequest.AddField(fieldAttributes); + assertFailedSchemaResponse( + () -> addFieldUpdateSchemaRequest.process(server), "schema is not editable"); } - private static void assertFailedSchemaResponse(ThrowingRunnable runnable, String expectedErrorMessage) { + private static void assertFailedSchemaResponse( + ThrowingRunnable runnable, String expectedErrorMessage) { ApiBag.ExceptionWithErrObject e = expectThrows(ApiBag.ExceptionWithErrObject.class, runnable); String msg = e.getErrs().get(0).get("errorMessages").toString(); assertTrue(msg.contains(expectedErrorMessage)); diff --git a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java index ead3b0926e5..e5e0ee4716e 100644 --- a/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java +++ b/solr/core/src/test/org/apache/solr/client/solrj/embedded/TestJettySolrRunner.java @@ -16,12 +16,6 @@ */ package org.apache.solr.client.solrj.embedded; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.request.CoreAdminRequest; -import org.apache.solr.cloud.MiniSolrCloudCluster; -import org.junit.Test; - import java.io.IOException; import java.net.BindException; import java.nio.charset.Charset; @@ -29,6 +23,11 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Properties; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.request.CoreAdminRequest; +import org.apache.solr.cloud.MiniSolrCloudCluster; +import org.junit.Test; public class TestJettySolrRunner extends SolrTestCaseJ4 { @@ -43,15 +42,16 @@ public void testPassSolrHomeToRunner() throws Exception { Path configsets = TEST_PATH().resolve("configsets"); - String solrxml - = "CONFIGSETSCOREROOT" - .replace("CONFIGSETS", configsets.toString()) - .replace("COREROOT", coresDir.toString()); + String solrxml = + "CONFIGSETSCOREROOT" + .replace("CONFIGSETS", configsets.toString()) + .replace("COREROOT", coresDir.toString()); Files.write(solrHome.resolve("solr.xml"), solrxml.getBytes(StandardCharsets.UTF_8)); JettyConfig jettyConfig = buildJettyConfig("/solr"); - JettySolrRunner runner = new JettySolrRunner(solrHome.toString(), new Properties(), jettyConfig); + JettySolrRunner runner = + new JettySolrRunner(solrHome.toString(), new Properties(), jettyConfig); try { runner.start(); @@ -68,15 +68,15 @@ public void testPassSolrHomeToRunner() throws Exception { } finally { runner.stop(); } - } - @SuppressWarnings("ThrowableNotThrown") @Test public void testLookForBindException() throws IOException { Path solrHome = createTempDir(); - Files.write(solrHome.resolve("solr.xml"), MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML.getBytes(Charset.defaultCharset())); + Files.write( + solrHome.resolve("solr.xml"), + MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML.getBytes(Charset.defaultCharset())); JettyConfig config = JettyConfig.builder().build(); @@ -97,12 +97,13 @@ public void testLookForBindException() throws IOException { result = jetty.lookForBindException(test); assertEquals(result, test); - test = new IOException() { - @Override - public synchronized Throwable getCause() { - return this; - } - }; + test = + new IOException() { + @Override + public synchronized Throwable getCause() { + return this; + } + }; result = jetty.lookForBindException(test); assertEquals(result, test); @@ -113,8 +114,5 @@ public synchronized Throwable getCause() { test = new IOException(new RuntimeException(be)); result = jetty.lookForBindException(test); assertEquals(result, be); - } - - } diff --git a/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java b/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java index f0ae1269044..a4050e70769 100644 --- a/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java +++ b/solr/core/src/test/org/apache/solr/client/solrj/impl/ConnectionReuseTest.java @@ -21,7 +21,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.http.HttpClientConnection; import org.apache.http.HttpConnectionMetrics; import org.apache.http.HttpException; @@ -36,10 +35,10 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.message.BasicHttpRequest; +import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; -import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.util.TestInjection; @@ -48,7 +47,7 @@ @SuppressSSL public class ConnectionReuseTest extends SolrCloudTestCase { - + private AtomicInteger id = new AtomicInteger(); private HttpClientContext context = HttpClientContext.create(); @@ -58,14 +57,20 @@ public class ConnectionReuseTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { TestInjection.failUpdateRequests = "true:100"; configureCluster(1) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); CollectionAdminRequest.createCollection(COLLECTION, "config", 1, 1) .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 1, 1)); + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, 1, 1)); } private SolrClient buildClient(CloseableHttpClient httpClient, URL url) { @@ -76,13 +81,19 @@ private SolrClient buildClient(CloseableHttpClient httpClient, URL url) { case 1: return getHttpSolrClient(url.toString() + "/" + COLLECTION, httpClient); case 2: - CloudSolrClient client = getCloudSolrClient(cluster.getZkServer().getZkAddress(), random().nextBoolean(), httpClient, 30000, 60000); + CloudSolrClient client = + getCloudSolrClient( + cluster.getZkServer().getZkAddress(), + random().nextBoolean(), + httpClient, + 30000, + 60000); client.setDefaultCollection(COLLECTION); return client; } throw new RuntimeException("impossible"); } - + @Test public void testConnectionReuse() throws Exception { @@ -115,7 +126,10 @@ public void testConnectionReuse() throws Exception { } catch (Exception e) { e.printStackTrace(); } - if (!done && i > 0 && i < cnt2 - 1 && client instanceof ConcurrentUpdateSolrClient + if (!done + && i > 0 + && i < cnt2 - 1 + && client instanceof ConcurrentUpdateSolrClient && random().nextInt(10) > 8) { queueBreaks++; done = true; @@ -127,7 +141,8 @@ && random().nextInt(10) > 8) { } } - route = new HttpRoute(new HttpHost(url.getHost(), url.getPort(), isSSLMode() ? "https" : "http")); + route = + new HttpRoute(new HttpHost(url.getHost(), url.getPort(), isSSLMode() ? "https" : "http")); mConn = cm.requestConnection(route, HttpSolrClient.cacheKey); @@ -138,25 +153,32 @@ && random().nextInt(10) > 8) { cm.releaseConnection(conn2, null, -1, TimeUnit.MILLISECONDS); - assertNotNull("No connection metrics found - is the connection getting aborted? server closing the connection? " - + client.getClass().getSimpleName(), metrics); + assertNotNull( + "No connection metrics found - is the connection getting aborted? server closing the connection? " + + client.getClass().getSimpleName(), + metrics); // we try and make sure the connection we get has handled all of the requests in this test if (client instanceof ConcurrentUpdateSolrClient) { // we can't fully control queue polling breaking up requests - allow a bit of leeway int exp = cnt1 + queueBreaks + 2; assertTrue( - "We expected all communication via streaming client to use one connection! expected=" + exp + " got=" + "We expected all communication via streaming client to use one connection! expected=" + + exp + + " got=" + metrics.getRequestCount(), - Math.max(exp, metrics.getRequestCount()) - Math.min(exp, metrics.getRequestCount()) < 3); + Math.max(exp, metrics.getRequestCount()) - Math.min(exp, metrics.getRequestCount()) + < 3); } else { - assertTrue("We expected all communication to use one connection! " + client.getClass().getSimpleName() + " " - + metrics.getRequestCount(), + assertTrue( + "We expected all communication to use one connection! " + + client.getClass().getSimpleName() + + " " + + metrics.getRequestCount(), cnt1 * cnt2 + 2 <= metrics.getRequestCount()); } - } - finally { + } finally { HttpClientUtil.close(httpClient); } } @@ -168,7 +190,11 @@ public HttpClientConnection getConn(ConnectionRequest mConn) return conn; } - public void headerRequest(HttpHost target, HttpRoute route, HttpClientConnection conn, PoolingHttpClientConnectionManager cm) + public void headerRequest( + HttpHost target, + HttpRoute route, + HttpClientConnection conn, + PoolingHttpClientConnectionManager cm) throws IOException, HttpException { HttpRequest req = new BasicHttpRequest("OPTIONS", "*", HttpVersion.HTTP_1_1); @@ -184,10 +210,9 @@ public void headerRequest(HttpHost target, HttpRoute route, HttpClientConnection conn.receiveResponseHeader(); } - public ConnectionRequest getClientConnectionRequest(HttpClient httpClient, HttpRoute route, PoolingHttpClientConnectionManager cm) { + public ConnectionRequest getClientConnectionRequest( + HttpClient httpClient, HttpRoute route, PoolingHttpClientConnectionManager cm) { ConnectionRequest mConn = cm.requestConnection(route, HttpSolrClient.cacheKey); return mConn; } - } - diff --git a/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java b/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java index d8fe78b8c0a..984d1b8e4a7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ActionThrottleTest.java @@ -19,7 +19,6 @@ import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.TimeSource; import org.junit.Test; @@ -48,7 +47,7 @@ public long getEpochTimeNs() { @Override public long[] getTimeAndEpochNs() { long time = getTimeNs(); - return new long[]{time, time}; + return new long[] {time, time}; } @Override @@ -60,7 +59,6 @@ public void sleep(long ms) throws InterruptedException { public long convertDelay(TimeUnit fromUnit, long value, TimeUnit toUnit) { throw new UnsupportedOperationException(); } - } // use the same time source as ActionThrottle @@ -75,14 +73,16 @@ public void testBasics() throws Exception { at.minimumWaitBetweenActions(); // should be no wait - assertTrue(TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS) < 1000); + assertTrue( + TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS) < 1000); at.markAttemptingAction(); if (random().nextBoolean()) Thread.sleep(100); at.minimumWaitBetweenActions(); - long elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS); + long elaspsedTime = + TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS); assertTrue(elaspsedTime + "ms", elaspsedTime >= 995); @@ -93,32 +93,37 @@ public void testBasics() throws Exception { Thread.sleep(random().nextInt(1000)); - elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS); + elaspsedTime = + TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS); assertTrue(elaspsedTime + "ms", elaspsedTime >= 995); } - + @Test public void testAZeroNanoTimeReturnInWait() throws Exception { - ActionThrottle at = new ActionThrottle("test", 1000, new TestNanoTimeSource(Arrays.asList(new Long[]{0L, 10L}))); + ActionThrottle at = + new ActionThrottle( + "test", 1000, new TestNanoTimeSource(Arrays.asList(new Long[] {0L, 10L}))); long start = timeSource.getTimeNs(); - + at.markAttemptingAction(); - + at.minimumWaitBetweenActions(); - - long elaspsedTime = TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS); - - assertTrue(elaspsedTime + "ms", elaspsedTime >= 995); + long elaspsedTime = + TimeUnit.MILLISECONDS.convert(timeSource.getTimeNs() - start, TimeUnit.NANOSECONDS); + + assertTrue(elaspsedTime + "ms", elaspsedTime >= 995); } public void testCreateNewThrottleWithLastValue() throws Exception { - ActionThrottle throttle = new ActionThrottle("xyz", 1000, new TestNanoTimeSource(Arrays.asList(new Long[]{10L, 20L}))); + ActionThrottle throttle = + new ActionThrottle( + "xyz", 1000, new TestNanoTimeSource(Arrays.asList(new Long[] {10L, 20L}))); throttle.markAttemptingAction(); - assertEquals((Long)10L, throttle.getLastActionStartedAt()); + assertEquals((Long) 10L, throttle.getLastActionStartedAt()); throttle = new ActionThrottle("new_xyz", 1000, throttle.getLastActionStartedAt()); - assertEquals((Long)10L, throttle.getLastActionStartedAt()); + assertEquals((Long) 10L, throttle.getLastActionStartedAt()); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java index 84cdd74a814..9f45d94eb4d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/AddReplicaTest.java @@ -22,7 +22,6 @@ import java.util.Collection; import java.util.EnumSet; import java.util.LinkedHashSet; - import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.RequestStatusState; @@ -35,52 +34,55 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * - */ +/** */ public class AddReplicaTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass public static void setupCluster() throws Exception { configureCluster(3) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } @Before - public void setUp() throws Exception { + public void setUp() throws Exception { super.setUp(); cluster.deleteAllCollections(); } @Test - public void testAddMultipleReplicas() throws Exception { + public void testAddMultipleReplicas() throws Exception { String collection = "testAddMultipleReplicas"; CloudSolrClient cloudClient = cluster.getSolrClient(); - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collection, "conf1", 1, 1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collection, "conf1", 1, 1); cloudClient.request(create); cluster.waitForActiveCollection(collection, 1, 1); - CollectionAdminRequest.AddReplica addReplica = CollectionAdminRequest.addReplicaToShard(collection, "shard1") - .setNrtReplicas(1) - .setTlogReplicas(1) - .setPullReplicas(1); + CollectionAdminRequest.AddReplica addReplica = + CollectionAdminRequest.addReplicaToShard(collection, "shard1") + .setNrtReplicas(1) + .setTlogReplicas(1) + .setPullReplicas(1); RequestStatusState status = addReplica.processAndWait(collection + "_xyz1", cloudClient, 120); assertEquals(COMPLETED, status); - + cluster.waitForActiveCollection(collection, 1, 4); - - DocCollection docCollection = cloudClient.getZkStateReader().getClusterState().getCollectionOrNull(collection); + + DocCollection docCollection = + cloudClient.getZkStateReader().getClusterState().getCollectionOrNull(collection); assertNotNull(docCollection); assertEquals(4, docCollection.getReplicas().size()); assertEquals(2, docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); assertEquals(1, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); assertEquals(1, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); - docCollection = cloudClient.getZkStateReader().getClusterState().getCollectionOrNull(collection); + docCollection = + cloudClient.getZkStateReader().getClusterState().getCollectionOrNull(collection); assertNotNull(docCollection); // sanity check that everything is as before assertEquals(4, docCollection.getReplicas().size()); @@ -92,19 +94,21 @@ public void testAddMultipleReplicas() throws Exception { // so test that as well LinkedHashSet createNodeSet = new LinkedHashSet<>(2); createNodeSet.add(cluster.getRandomJetty(random()).getNodeName()); - while (true) { + while (true) { String nodeName = cluster.getRandomJetty(random()).getNodeName(); - if (createNodeSet.add(nodeName)) break; + if (createNodeSet.add(nodeName)) break; } - addReplica = CollectionAdminRequest.addReplicaToShard(collection, "shard1") - .setNrtReplicas(3) - .setTlogReplicas(1) - .setPullReplicas(1) - .setCreateNodeSet(String.join(",", createNodeSet)); + addReplica = + CollectionAdminRequest.addReplicaToShard(collection, "shard1") + .setNrtReplicas(3) + .setTlogReplicas(1) + .setPullReplicas(1) + .setCreateNodeSet(String.join(",", createNodeSet)); status = addReplica.processAndWait(collection + "_xyz1", cloudClient, 120); assertEquals(COMPLETED, status); waitForState("Timedout wait for collection to be created", collection, clusterShape(1, 9)); - docCollection = cloudClient.getZkStateReader().getClusterState().getCollectionOrNull(collection); + docCollection = + cloudClient.getZkStateReader().getClusterState().getCollectionOrNull(collection); assertNotNull(docCollection); // sanity check that everything is as before assertEquals(9, docCollection.getReplicas().size()); @@ -115,26 +119,29 @@ public void testAddMultipleReplicas() throws Exception { @Test public void test() throws Exception { - + String collection = "addreplicatest_coll"; CloudSolrClient cloudClient = cluster.getSolrClient(); - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collection, "conf1", 2, 1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collection, "conf1", 2, 1); cloudClient.request(create); - + cluster.waitForActiveCollection(collection, 2, 2); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); DocCollection coll = clusterState.getCollection(collection); String sliceName = coll.getSlices().iterator().next().getName(); Collection replicas = coll.getSlice(sliceName).getReplicas(); - CollectionAdminRequest.AddReplica addReplica = CollectionAdminRequest.addReplicaToShard(collection, sliceName); + CollectionAdminRequest.AddReplica addReplica = + CollectionAdminRequest.addReplicaToShard(collection, sliceName); addReplica.processAsync("000", cloudClient); - CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("000"); + CollectionAdminRequest.RequestStatus requestStatus = + CollectionAdminRequest.requestStatus("000"); CollectionAdminRequest.RequestStatusResponse rsp = requestStatus.process(cloudClient); assertNotSame(rsp.getRequestStatus(), COMPLETED); - + // wait for async request success boolean success = false; for (int i = 0; i < 200; i++) { @@ -147,8 +154,14 @@ public void test() throws Exception { Thread.sleep(500); } assertTrue(success); - - Collection replicas2 = cloudClient.getZkStateReader().getClusterState().getCollection(collection).getSlice(sliceName).getReplicas(); + + Collection replicas2 = + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(collection) + .getSlice(sliceName) + .getReplicas(); replicas2.removeAll(replicas); assertEquals(1, replicas2.size()); @@ -182,7 +195,8 @@ public void test() throws Exception { if (replica.getName().equals(replica2)) { continue; // may be still recovering } - assertSame(coll.toString() + "\n" + replica.toString(), replica.getState(), Replica.State.ACTIVE); + assertSame( + coll.toString() + "\n" + replica.toString(), replica.getState(), Replica.State.ACTIVE); } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java index 09d5969f399..8259fb9993c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java @@ -16,13 +16,14 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.common.cloud.ZkStateReader.ALIASES; + import java.io.IOException; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.UnaryOperator; - import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; @@ -62,8 +63,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.common.cloud.ZkStateReader.ALIASES; - public class AliasIntegrationTest extends SolrCloudTestCase { private CloseableHttpClient httpClient; @@ -71,9 +70,7 @@ public class AliasIntegrationTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -96,14 +93,22 @@ public void tearDown() throws Exception { @Test @SuppressWarnings({"unchecked"}) public void testProperties() throws Exception { - CollectionAdminRequest.createCollection("collection1meta", "conf", 2, 1).process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection("collection2meta", "conf", 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection1meta", "conf", 2, 1) + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection2meta", "conf", 1, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection("collection1meta", 2, 2); cluster.waitForActiveCollection("collection2meta", 1, 1); - waitForState("Expected collection1 to be created with 2 shards and 1 replica", "collection1meta", clusterShape(2, 2)); - waitForState("Expected collection2 to be created with 1 shard and 1 replica", "collection2meta", clusterShape(1, 1)); + waitForState( + "Expected collection1 to be created with 2 shards and 1 replica", + "collection1meta", + clusterShape(2, 2)); + waitForState( + "Expected collection2 to be created with 1 shard and 1 replica", + "collection2meta", + clusterShape(1, 1)); ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); zkStateReader.createClusterStateWatchersAndUpdate(); List aliases = zkStateReader.getAliases().resolveAliases("meta1"); @@ -111,26 +116,29 @@ public void testProperties() throws Exception { assertEquals("meta1", aliases.get(0)); final ZkStateReader.AliasesManager aliasesManager = zkStateReader.aliasesManager; - aliasesManager.applyModificationAndExportToZk(a -> a.cloneWithCollectionAlias("meta1", "collection1meta,collection2meta")); + aliasesManager.applyModificationAndExportToZk( + a -> a.cloneWithCollectionAlias("meta1", "collection1meta,collection2meta")); aliases = zkStateReader.getAliases().resolveAliases("meta1"); assertEquals(2, aliases.size()); assertEquals("collection1meta", aliases.get(0)); assertEquals("collection2meta", aliases.get(1)); - //ensure we have the back-compat format in ZK: + // ensure we have the back-compat format in ZK: final byte[] rawBytes = zkStateReader.getZkClient().getData(ALIASES, null, null, true); - assertTrue(((Map>)Utils.fromJSON(rawBytes)).get("collection").get("meta1") instanceof String); + assertTrue( + ((Map>) Utils.fromJSON(rawBytes)).get("collection").get("meta1") + instanceof String); // set properties - aliasesManager.applyModificationAndExportToZk(a1 -> - a1.cloneWithCollectionAliasProperties("meta1", "foo", "bar")); + aliasesManager.applyModificationAndExportToZk( + a1 -> a1.cloneWithCollectionAliasProperties("meta1", "foo", "bar")); Map meta = zkStateReader.getAliases().getCollectionAliasProperties("meta1"); assertNotNull(meta); assertTrue(meta.containsKey("foo")); assertEquals("bar", meta.get("foo")); // set more properties - aliasesManager.applyModificationAndExportToZk( a1 -> - a1.cloneWithCollectionAliasProperties("meta1", "foobar", "bazbam")); + aliasesManager.applyModificationAndExportToZk( + a1 -> a1.cloneWithCollectionAliasProperties("meta1", "foobar", "bazbam")); meta = zkStateReader.getAliases().getCollectionAliasProperties("meta1"); assertNotNull(meta); @@ -143,8 +151,8 @@ public void testProperties() throws Exception { assertEquals("bazbam", meta.get("foobar")); // remove properties - aliasesManager.applyModificationAndExportToZk(a1 -> - a1.cloneWithCollectionAliasProperties("meta1", "foo", null)); + aliasesManager.applyModificationAndExportToZk( + a1 -> a1.cloneWithCollectionAliasProperties("meta1", "foo", null)); meta = zkStateReader.getAliases().getCollectionAliasProperties("meta1"); assertNotNull(meta); @@ -156,20 +164,23 @@ public void testProperties() throws Exception { assertEquals("bazbam", meta.get("foobar")); // removal of non existent key should succeed. - aliasesManager.applyModificationAndExportToZk(a2 -> - a2.cloneWithCollectionAliasProperties("meta1", "foo", null)); + aliasesManager.applyModificationAndExportToZk( + a2 -> a2.cloneWithCollectionAliasProperties("meta1", "foo", null)); // chained invocations - aliasesManager.applyModificationAndExportToZk(a1 -> - a1.cloneWithCollectionAliasProperties("meta1", "foo2", "bazbam") - .cloneWithCollectionAliasProperties("meta1", "foo3", "bazbam2")); + aliasesManager.applyModificationAndExportToZk( + a1 -> + a1.cloneWithCollectionAliasProperties("meta1", "foo2", "bazbam") + .cloneWithCollectionAliasProperties("meta1", "foo3", "bazbam2")); // some other independent update (not overwritten) - aliasesManager.applyModificationAndExportToZk(a1 -> - a1.cloneWithCollectionAlias("meta3", "collection1meta,collection2meta")); + aliasesManager.applyModificationAndExportToZk( + a1 -> a1.cloneWithCollectionAlias("meta3", "collection1meta,collection2meta")); // competing went through - assertEquals("collection1meta,collection2meta", zkStateReader.getAliases().getCollectionAliasMap().get("meta3")); + assertEquals( + "collection1meta,collection2meta", + zkStateReader.getAliases().getCollectionAliasMap().get("meta3")); meta = zkStateReader.getAliases().getCollectionAliasProperties("meta1"); assertNotNull(meta); @@ -179,7 +190,9 @@ public void testProperties() throws Exception { assertEquals("bazbam", meta.get("foobar")); // competing update not overwritten - assertEquals("collection1meta,collection2meta", zkStateReader.getAliases().getCollectionAliasMap().get("meta3")); + assertEquals( + "collection1meta,collection2meta", + zkStateReader.getAliases().getCollectionAliasMap().get("meta3")); // new properties added assertTrue(meta.containsKey("foo2")); @@ -207,10 +220,13 @@ public void testProperties() throws Exception { } // check removal leaves no props behind - assertEquals(0, zkStateReader.getAliases() - .cloneWithCollectionAlias("meta1", null) // not persisted to zk on purpose - .getCollectionAliasProperties("meta1") - .size()); + assertEquals( + 0, + zkStateReader + .getAliases() + .cloneWithCollectionAlias("meta1", null) // not persisted to zk on purpose + .getCollectionAliasProperties("meta1") + .size()); } @Test @@ -218,18 +234,25 @@ public void testModifyPropertiesV2() throws Exception { final String aliasName = getSaferTestName(); ZkStateReader zkStateReader = createColectionsAndAlias(aliasName); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - //TODO fix Solr test infra so that this /____v2/ becomes /api/ + // TODO fix Solr test infra so that this /____v2/ becomes /api/ HttpPost post = new HttpPost(baseUrl + "/____v2/c"); - post.setEntity(new StringEntity("{\n" + - "\"set-alias-property\" : {\n" + - " \"name\": \"" + aliasName + "\",\n" + - " \"properties\" : {\n" + - " \"foo\": \"baz\",\n" + - " \"bar\": \"bam\"\n" + - " }\n" + - //TODO should we use "NOW=" param? Won't work with v2 and is kinda a hack any way since intended for distrib - " }\n" + - "}", ContentType.APPLICATION_JSON)); + post.setEntity( + new StringEntity( + "{\n" + + "\"set-alias-property\" : {\n" + + " \"name\": \"" + + aliasName + + "\",\n" + + " \"properties\" : {\n" + + " \"foo\": \"baz\",\n" + + " \"bar\": \"bam\"\n" + + " }\n" + + + // TODO should we use "NOW=" param? Won't work with v2 and is kinda a hack any way + // since intended for distrib + " }\n" + + "}", + ContentType.APPLICATION_JSON)); assertSuccess(post); checkFooAndBarMeta(aliasName, zkStateReader); } @@ -240,11 +263,15 @@ public void testModifyPropertiesV1() throws Exception { final String aliasName = getSaferTestName(); ZkStateReader zkStateReader = createColectionsAndAlias(aliasName); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=ALIASPROP" + - "&wt=xml" + - "&name=" + aliasName + - "&property.foo=baz" + - "&property.bar=bam"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=ALIASPROP" + + "&wt=xml" + + "&name=" + + aliasName + + "&property.foo=baz" + + "&property.bar=bam"); assertSuccess(get); checkFooAndBarMeta(aliasName, zkStateReader); } @@ -254,36 +281,35 @@ public void testModifyPropertiesCAR() throws Exception { // note we don't use TZ in this test, thus it's UTC final String aliasName = getSaferTestName(); ZkStateReader zkStateReader = createColectionsAndAlias(aliasName); - CollectionAdminRequest.SetAliasProperty setAliasProperty = CollectionAdminRequest.setAliasProperty(aliasName); - setAliasProperty.addProperty("foo","baz"); - setAliasProperty.addProperty("bar","bam"); + CollectionAdminRequest.SetAliasProperty setAliasProperty = + CollectionAdminRequest.setAliasProperty(aliasName); + setAliasProperty.addProperty("foo", "baz"); + setAliasProperty.addProperty("bar", "bam"); setAliasProperty.process(cluster.getSolrClient()); checkFooAndBarMeta(aliasName, zkStateReader); // now verify we can delete setAliasProperty = CollectionAdminRequest.setAliasProperty(aliasName); - setAliasProperty.addProperty("foo",""); + setAliasProperty.addProperty("foo", ""); setAliasProperty.process(cluster.getSolrClient()); setAliasProperty = CollectionAdminRequest.setAliasProperty(aliasName); - setAliasProperty.addProperty("bar",null); + setAliasProperty.addProperty("bar", null); setAliasProperty.process(cluster.getSolrClient()); setAliasProperty = CollectionAdminRequest.setAliasProperty(aliasName); // whitespace value - setAliasProperty.addProperty("foo"," "); + setAliasProperty.addProperty("foo", " "); setAliasProperty.process(cluster.getSolrClient()); - - } @Test public void testClusterStateProviderAPI() throws Exception { final String aliasName = getSaferTestName(); - + // pick an arbitrary node, and use it's cloudManager to assert that (an instance of) // the ClusterStateProvider API reflects alias changes made by remote clients - final SolrCloudManager cloudManager = cluster.getRandomJetty(random()) - .getCoreContainer().getZkController().getSolrCloudManager(); + final SolrCloudManager cloudManager = + cluster.getRandomJetty(random()).getCoreContainer().getZkController().getSolrCloudManager(); // allthough the purpose of this test is to verify that the ClusterStateProvider API // works as a "black box" for inspecting alias information, we'll be doing some "grey box" @@ -306,9 +332,10 @@ public void testClusterStateProviderAPI() throws Exception { assertTrue(collections.toString(), collections.contains("collection2meta")); // modify the alias to have some properties - CollectionAdminRequest.SetAliasProperty setAliasProperty = CollectionAdminRequest.setAliasProperty(aliasName); - setAliasProperty.addProperty("foo","baz"); - setAliasProperty.addProperty("bar","bam"); + CollectionAdminRequest.SetAliasProperty setAliasProperty = + CollectionAdminRequest.setAliasProperty(aliasName); + setAliasProperty.addProperty("foo", "baz"); + setAliasProperty.addProperty("bar", "bam"); setAliasProperty.process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, cloudManager.getClusterStateProvider()); @@ -324,38 +351,41 @@ public void testClusterStateProviderAPI() throws Exception { assertTrue(collections.toString(), collections.contains("collection2meta")); assertFalse("should not be a routed alias", stateProvider.isRoutedAlias(aliasName)); - + // now make it a routed alias, according to the criteria in the API setAliasProperty = CollectionAdminRequest.setAliasProperty(aliasName); - setAliasProperty.addProperty(CollectionAdminParams.ROUTER_PREFIX + "foo","baz"); + setAliasProperty.addProperty(CollectionAdminParams.ROUTER_PREFIX + "foo", "baz"); setAliasProperty.process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, cloudManager.getClusterStateProvider()); - + // assert ClusterStateProvider sees it's routed... stateProvider = cloudManager.getClusterStateProvider(); assertTrue("should be a routed alias", stateProvider.isRoutedAlias(aliasName)); - expectThrows(SolrException.class, () -> { - String resolved = cloudManager.getClusterStateProvider().resolveSimpleAlias(aliasName); - fail("this is not a simple alias but it resolved to " + resolved); - }); + expectThrows( + SolrException.class, + () -> { + String resolved = cloudManager.getClusterStateProvider().resolveSimpleAlias(aliasName); + fail("this is not a simple alias but it resolved to " + resolved); + }); } - /** - * Does a "grey box" assertion that the ClusterStateProvider is a ZkClientClusterStateProvider - * and then waits for it's underlying ZkStateReader to see the updated aliases, - * returning the current ZNodeVersion for the aliases + /** + * Does a "grey box" assertion that the ClusterStateProvider is a ZkClientClusterStateProvider and + * then waits for it's underlying ZkStateReader to see the updated aliases, returning the current + * ZNodeVersion for the aliases */ private int waitForAliasesUpdate(int lastVersion, ClusterStateProvider stateProvider) - throws Exception { - - assertTrue("this method does grey box introspection which requires that " + - "the stateProvider be a ZkClientClusterStateProvider", - stateProvider instanceof ZkClientClusterStateProvider); - return waitForAliasesUpdate(lastVersion, - ((ZkClientClusterStateProvider)stateProvider).getZkStateReader()); + throws Exception { + + assertTrue( + "this method does grey box introspection which requires that " + + "the stateProvider be a ZkClientClusterStateProvider", + stateProvider instanceof ZkClientClusterStateProvider); + return waitForAliasesUpdate( + lastVersion, ((ZkClientClusterStateProvider) stateProvider).getZkStateReader()); } - + private int waitForAliasesUpdate(int lastVersion, ZkStateReader zkStateReader) throws Exception { TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeOut.hasTimedOut()) { @@ -364,7 +394,11 @@ private int waitForAliasesUpdate(int lastVersion, ZkStateReader zkStateReader) t if (aliases.getZNodeVersion() > lastVersion) { return aliases.getZNodeVersion(); } else if (aliases.getZNodeVersion() < lastVersion) { - fail("unexpected znode version, expected greater than " + lastVersion + " but was " + aliases.getZNodeVersion()); + fail( + "unexpected znode version, expected greater than " + + lastVersion + + " but was " + + aliases.getZNodeVersion()); } timeOut.sleep(1000); } @@ -384,21 +418,31 @@ private void checkFooAndBarMeta(String aliasName, ZkStateReader zkStateReader) t assertEquals("bam", meta.get("bar")); } - private ZkStateReader createColectionsAndAlias(String aliasName) throws SolrServerException, IOException, KeeperException, InterruptedException { - CollectionAdminRequest.createCollection("collection1meta", "conf", 2, 1).process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection("collection2meta", "conf", 1, 1).process(cluster.getSolrClient()); + private ZkStateReader createColectionsAndAlias(String aliasName) + throws SolrServerException, IOException, KeeperException, InterruptedException { + CollectionAdminRequest.createCollection("collection1meta", "conf", 2, 1) + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection2meta", "conf", 1, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection("collection1meta", 2, 2); cluster.waitForActiveCollection("collection2meta", 1, 1); - waitForState("Expected collection1 to be created with 2 shards and 1 replica", "collection1meta", clusterShape(2, 2)); - waitForState("Expected collection2 to be created with 1 shard and 1 replica", "collection2meta", clusterShape(1, 1)); + waitForState( + "Expected collection1 to be created with 2 shards and 1 replica", + "collection1meta", + clusterShape(2, 2)); + waitForState( + "Expected collection2 to be created with 1 shard and 1 replica", + "collection2meta", + clusterShape(1, 1)); ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); zkStateReader.createClusterStateWatchersAndUpdate(); List aliases = zkStateReader.getAliases().resolveAliases(aliasName); assertEquals(1, aliases.size()); assertEquals(aliasName, aliases.get(0)); - UnaryOperator op6 = a -> a.cloneWithCollectionAlias(aliasName, "collection1meta,collection2meta"); + UnaryOperator op6 = + a -> a.cloneWithCollectionAlias(aliasName, "collection1meta,collection2meta"); final ZkStateReader.AliasesManager aliasesManager = zkStateReader.aliasesManager; aliasesManager.applyModificationAndExportToZk(op6); @@ -417,10 +461,12 @@ private void assertSuccess(HttpUriRequest msg) throws IOException { } } } - // Rather a long title, but it's common to recommend when people need to re-index for any reason that they: + // Rather a long title, but it's common to recommend when people need to re-index for any reason + // that they: // 1> create a new collection // 2> index the corpus to the new collection and verify it - // 3> create an alias pointing to the new collection WITH THE SAME NAME as their original collection + // 3> create an alias pointing to the new collection WITH THE SAME NAME as their original + // collection // 4> delete the old collection. // // They may or may not have an alias already pointing to the old collection that's being replaced. @@ -432,19 +478,28 @@ private void assertSuccess(HttpUriRequest msg) throws IOException { // // What happens when they delete old_collection now? // - // Current behavior is that delete "does the right thing" and deletes old_collection rather than new_collection, - // but if this behavior changes it could be disastrous for users so this test insures that this behavior. + // Current behavior is that delete "does the right thing" and deletes old_collection rather than + // new_collection, but if this behavior changes it could be disastrous for users so this test + // insures that this behavior. // @Test public void testDeleteAliasWithExistingCollectionName() throws Exception { - CollectionAdminRequest.createCollection("collection_old", "conf", 2, 1).process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection("collection_new", "conf", 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection_old", "conf", 2, 1) + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection_new", "conf", 1, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection("collection_old", 2, 2); cluster.waitForActiveCollection("collection_new", 1, 1); - waitForState("Expected collection_old to be created with 2 shards and 1 replica", "collection_old", clusterShape(2, 2)); - waitForState("Expected collection_new to be created with 1 shard and 1 replica", "collection_new", clusterShape(1, 1)); + waitForState( + "Expected collection_old to be created with 2 shards and 1 replica", + "collection_old", + clusterShape(2, 2)); + waitForState( + "Expected collection_new to be created with 1 shard and 1 replica", + "collection_new", + clusterShape(1, 1)); new UpdateRequest() .add("id", "6", "a_t", "humpty dumpy sat on a wall") @@ -462,14 +517,17 @@ public void testDeleteAliasWithExistingCollectionName() throws Exception { ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); int lastVersion = zkStateReader.aliasesManager.getAliases().getZNodeVersion(); // Let's insure we have a "handle" to the old collection - CollectionAdminRequest.createAlias("collection_old_reserve", "collection_old").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("collection_old_reserve", "collection_old") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); // This is the critical bit. The alias uses the _old collection name. - CollectionAdminRequest.createAlias("collection_old", "collection_new").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("collection_old", "collection_new") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); - // aliases: collection_old->collection_new, collection_old_reserve -> collection_old -> collection_new + // aliases: collection_old->collection_new, collection_old_reserve -> collection_old -> + // collection_new // collections: collection_new and collection_old // Now we should only see the doc in collection_new through the collection_old alias @@ -480,60 +538,109 @@ public void testDeleteAliasWithExistingCollectionName() throws Exception { res = cluster.getSolrClient().query("collection_old_reserve", new SolrQuery("*:*")); assertEquals(1, res.getResults().getNumFound()); - // Now delete the old collection. This should fail since the collection_old_reserve points to collection_old - RequestStatusState delResp = CollectionAdminRequest.deleteCollection("collection_old").processAndWait(cluster.getSolrClient(), 60); + // Now delete the old collection. This should fail since the collection_old_reserve points to + // collection_old + RequestStatusState delResp = + CollectionAdminRequest.deleteCollection("collection_old") + .processAndWait(cluster.getSolrClient(), 60); assertEquals("Should have failed to delete collection: ", delResp, RequestStatusState.FAILED); // assure ourselves that the old colletion is, indeed, still there. - assertNotNull("collection_old should exist!", cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionOrNull("collection_old")); + assertNotNull( + "collection_old should exist!", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollectionOrNull("collection_old")); // Now we should still succeed using the alias collection_old which points to collection_new - // aliase: collection_old -> collection_new, collection_old_reserve -> collection_old -> collection_new + // aliase: collection_old -> collection_new, collection_old_reserve -> collection_old -> + // collection_new // collections: collection_old, collection_new res = cluster.getSolrClient().query("collection_old", new SolrQuery("*:*")); assertEquals(1, res.getResults().getNumFound()); Aliases aliases = cluster.getSolrClient().getZkStateReader().getAliases(); - assertTrue("collection_old should point to collection_new", aliases.resolveAliases("collection_old").contains("collection_new")); - assertTrue("collection_old_reserve should point to collection_new", aliases.resolveAliases("collection_old_reserve").contains("collection_new")); + assertTrue( + "collection_old should point to collection_new", + aliases.resolveAliases("collection_old").contains("collection_new")); + assertTrue( + "collection_old_reserve should point to collection_new", + aliases.resolveAliases("collection_old_reserve").contains("collection_new")); // Clean up - CollectionAdminRequest.deleteAlias("collection_old_reserve").processAndWait(cluster.getSolrClient(), 60); + CollectionAdminRequest.deleteAlias("collection_old_reserve") + .processAndWait(cluster.getSolrClient(), 60); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); - CollectionAdminRequest.deleteAlias("collection_old").processAndWait(cluster.getSolrClient(), 60); + CollectionAdminRequest.deleteAlias("collection_old") + .processAndWait(cluster.getSolrClient(), 60); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); - CollectionAdminRequest.deleteCollection("collection_new").processAndWait(cluster.getSolrClient(), 60); - CollectionAdminRequest.deleteCollection("collection_old").processAndWait(cluster.getSolrClient(), 60); + CollectionAdminRequest.deleteCollection("collection_new") + .processAndWait(cluster.getSolrClient(), 60); + CollectionAdminRequest.deleteCollection("collection_old") + .processAndWait(cluster.getSolrClient(), 60); // collection_old already deleted as well as collection_old_reserve - assertNull("collection_old_reserve should be gone", cluster.getSolrClient().getZkStateReader().getAliases().getCollectionAliasMap().get("collection_old_reserve")); - assertNull("collection_old should be gone", cluster.getSolrClient().getZkStateReader().getAliases().getCollectionAliasMap().get("collection_old")); - - assertFalse("collection_new should be gone", - cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("collection_new")); - - assertFalse("collection_old should be gone", - cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("collection_old")); + assertNull( + "collection_old_reserve should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getAliases() + .getCollectionAliasMap() + .get("collection_old_reserve")); + assertNull( + "collection_old should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getAliases() + .getCollectionAliasMap() + .get("collection_old")); + + assertFalse( + "collection_new should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .hasCollection("collection_new")); + + assertFalse( + "collection_old should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .hasCollection("collection_old")); } - // While writing the above test I wondered what happens when an alias points to two collections and one of them - // is deleted. + // While writing the above test I wondered what happens when an alias points to two collections + // and one of them is deleted. @Test public void testDeleteOneOfTwoCollectionsAliased() throws Exception { - CollectionAdminRequest.createCollection("collection_one", "conf", 2, 1).process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection("collection_two", "conf", 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection_one", "conf", 2, 1) + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection_two", "conf", 1, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection("collection_one", 2, 2); cluster.waitForActiveCollection("collection_two", 1, 1); - waitForState("Expected collection_one to be created with 2 shards and 1 replica", "collection_one", clusterShape(2, 2)); - waitForState("Expected collection_two to be created with 1 shard and 1 replica", "collection_two", clusterShape(1, 1)); + waitForState( + "Expected collection_one to be created with 2 shards and 1 replica", + "collection_one", + clusterShape(2, 2)); + waitForState( + "Expected collection_two to be created with 1 shard and 1 replica", + "collection_two", + clusterShape(1, 1)); new UpdateRequest() .add("id", "1", "a_t", "humpty dumpy sat on a wall") .commit(cluster.getSolrClient(), "collection_one"); - new UpdateRequest() .add("id", "10", "a_t", "humpty dumpy sat on a high wall") .add("id", "11", "a_t", "humpty dumpy sat on a low wall") @@ -543,18 +650,23 @@ public void testDeleteOneOfTwoCollectionsAliased() throws Exception { int lastVersion = zkStateReader.aliasesManager.getAliases().getZNodeVersion(); // Create an alias pointing to both - CollectionAdminRequest.createAlias("collection_alias_pair", "collection_one,collection_two").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("collection_alias_pair", "collection_one,collection_two") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); - QueryResponse res = cluster.getSolrClient().query("collection_alias_pair", new SolrQuery("*:*")); + QueryResponse res = + cluster.getSolrClient().query("collection_alias_pair", new SolrQuery("*:*")); assertEquals(3, res.getResults().getNumFound()); // Now delete one of the collections, should fail since an alias points to it. - RequestStatusState delResp = CollectionAdminRequest.deleteCollection("collection_one").processAndWait(cluster.getSolrClient(), 60); + RequestStatusState delResp = + CollectionAdminRequest.deleteCollection("collection_one") + .processAndWait(cluster.getSolrClient(), 60); // failed because the collection is a part of a compound alias assertEquals("Should have failed to delete collection: ", RequestStatusState.FAILED, delResp); - CollectionAdminRequest.Delete delete = CollectionAdminRequest.deleteCollection("collection_alias_pair"); + CollectionAdminRequest.Delete delete = + CollectionAdminRequest.deleteCollection("collection_alias_pair"); delResp = delete.processAndWait(cluster.getSolrClient(), 60); // failed because we tried to delete an alias with followAliases=false assertEquals("Should have failed to delete alias: ", RequestStatusState.FAILED, delResp); @@ -564,7 +676,8 @@ public void testDeleteOneOfTwoCollectionsAliased() throws Exception { // failed because we tried to delete compound alias assertEquals("Should have failed to delete collection: ", RequestStatusState.FAILED, delResp); - CollectionAdminRequest.createAlias("collection_alias_one", "collection_one").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("collection_alias_one", "collection_one") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); delete = CollectionAdminRequest.deleteCollection("collection_one"); @@ -582,13 +695,19 @@ public void testDeleteOneOfTwoCollectionsAliased() throws Exception { lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); // Now redefine the alias to only point to collection two - CollectionAdminRequest.createAlias("collection_alias_pair", "collection_two").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("collection_alias_pair", "collection_two") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); - //Delete collection_one. - delResp = CollectionAdminRequest.deleteCollection("collection_one").processAndWait(cluster.getSolrClient(), 60); + // Delete collection_one. + delResp = + CollectionAdminRequest.deleteCollection("collection_one") + .processAndWait(cluster.getSolrClient(), 60); - assertEquals("Should not have failed to delete collection, it was removed from the alias: ", RequestStatusState.COMPLETED, delResp); + assertEquals( + "Should not have failed to delete collection, it was removed from the alias: ", + RequestStatusState.COMPLETED, + delResp); // Should only see two docs now in second collection res = cluster.getSolrClient().query("collection_alias_pair", new SolrQuery("*:*")); @@ -604,33 +723,57 @@ public void testDeleteOneOfTwoCollectionsAliased() throws Exception { } // Clean up - CollectionAdminRequest.deleteAlias("collection_alias_pair").processAndWait(cluster.getSolrClient(), 60); - CollectionAdminRequest.deleteCollection("collection_two").processAndWait(cluster.getSolrClient(), 60); + CollectionAdminRequest.deleteAlias("collection_alias_pair") + .processAndWait(cluster.getSolrClient(), 60); + CollectionAdminRequest.deleteCollection("collection_two") + .processAndWait(cluster.getSolrClient(), 60); // collection_one already deleted lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); - assertNull("collection_alias_pair should be gone", - cluster.getSolrClient().getZkStateReader().getAliases().getCollectionAliasMap().get("collection_alias_pair")); - - assertFalse("collection_one should be gone", - cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("collection_one")); - - assertFalse("collection_two should be gone", - cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("collection_two")); - + assertNull( + "collection_alias_pair should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getAliases() + .getCollectionAliasMap() + .get("collection_alias_pair")); + + assertFalse( + "collection_one should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .hasCollection("collection_one")); + + assertFalse( + "collection_two should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .hasCollection("collection_two")); } - @Test public void test() throws Exception { - CollectionAdminRequest.createCollection("collection1", "conf", 2, 1).process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection("collection2", "conf", 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection1", "conf", 2, 1) + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection2", "conf", 1, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection("collection1", 2, 2); cluster.waitForActiveCollection("collection2", 1, 1); - waitForState("Expected collection1 to be created with 2 shards and 1 replica", "collection1", clusterShape(2, 2)); - waitForState("Expected collection2 to be created with 1 shard and 1 replica", "collection2", clusterShape(1, 1)); + waitForState( + "Expected collection1 to be created with 2 shards and 1 replica", + "collection1", + clusterShape(2, 2)); + waitForState( + "Expected collection2 to be created with 1 shard and 1 replica", + "collection2", + clusterShape(1, 1)); new UpdateRequest() .add("id", "6", "a_t", "humpty dumpy sat on a wall") @@ -649,7 +792,8 @@ public void test() throws Exception { int lastVersion = zkStateReader.aliasesManager.getAliases().getZNodeVersion(); CollectionAdminRequest.deleteAlias("collection1").process(cluster.getSolrClient()); - CollectionAdminRequest.createAlias("testalias1", "collection1").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("testalias1", "collection1") + .process(cluster.getSolrClient()); // verify proper resolution on the server-side lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); @@ -660,8 +804,12 @@ public void test() throws Exception { assertTrue(collections.contains("collection1")); // ensure that the alias is visible in the API - assertEquals("collection1", - new CollectionAdminRequest.ListAliases().process(cluster.getSolrClient()).getAliases().get("testalias1")); + assertEquals( + "collection1", + new CollectionAdminRequest.ListAliases() + .process(cluster.getSolrClient()) + .getAliases() + .get("testalias1")); // search for alias searchSeveralWays("testalias1", new SolrQuery("*:*"), 3); @@ -671,7 +819,8 @@ public void test() throws Exception { /////////////// // test alias pointing to two collections. collection2 first because it's not on every node - CollectionAdminRequest.createAlias("testalias2", "collection2,collection1").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("testalias2", "collection2,collection1") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); @@ -679,22 +828,26 @@ public void test() throws Exception { /////////////// // update alias - CollectionAdminRequest.createAlias("testalias2", "collection2").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("testalias2", "collection2") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); searchSeveralWays("testalias2", new SolrQuery("*:*"), 2); /////////////// - // alias pointing to alias. One level of indirection is supported; more than that is not (may or may not work) + // alias pointing to alias. One level of indirection is supported; more than that is not (may + // or may not work) CollectionAdminRequest.createAlias("testalias3", "testalias2").process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); searchSeveralWays("testalias3", new SolrQuery("*:*"), 2); /////////////// // Test 2 aliases pointing to the same collection - CollectionAdminRequest.createAlias("testalias4", "collection2").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("testalias4", "collection2") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); - CollectionAdminRequest.createAlias("testalias5", "collection2").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("testalias5", "collection2") + .process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); // add one document to testalias4, thus to collection2 @@ -703,18 +856,21 @@ public void test() throws Exception { .commit(cluster.getSolrClient(), "testalias4"); // thus gets added to collection2 searchSeveralWays("testalias4", new SolrQuery("*:*"), 3); - //searchSeveralWays("testalias4,testalias5", new SolrQuery("*:*"), 3); + // searchSeveralWays("testalias4,testalias5", new SolrQuery("*:*"), 3); /////////////// // use v2 API new V2Request.Builder("/collections") .withMethod(SolrRequest.METHOD.POST) - .withPayload("{\"create-alias\": {\"name\": \"testalias6\", collections:[\"collection2\",\"collection1\"]}}") - .build().process(cluster.getSolrClient()); + .withPayload( + "{\"create-alias\": {\"name\": \"testalias6\", collections:[\"collection2\",\"collection1\"]}}") + .build() + .process(cluster.getSolrClient()); searchSeveralWays("testalias6", new SolrQuery("*:*"), 6); - // add one document to testalias6. this should fail because it's a multi-collection non-routed alias + // add one document to testalias6. this should fail because it's a multi-collection non-routed + // alias try { new UpdateRequest() .add("id", "12", "a_t", "humpty dumpy5 sat on a walls") @@ -726,24 +882,35 @@ public void test() throws Exception { } /////////////// - for (int i = 1; i <= 6 ; i++) { + for (int i = 1; i <= 6; i++) { CollectionAdminRequest.deleteAlias("testalias" + i).process(cluster.getSolrClient()); lastVersion = waitForAliasesUpdate(lastVersion, zkStateReader); } - SolrException e = expectThrows(SolrException.class, () -> { - SolrQuery q = new SolrQuery("*:*"); - q.set("collection", "testalias1"); - cluster.getSolrClient().query(q); - }); - assertTrue("Unexpected exception message: " + e.getMessage(), e.getMessage().contains("Collection not found: testalias1")); + SolrException e = + expectThrows( + SolrException.class, + () -> { + SolrQuery q = new SolrQuery("*:*"); + q.set("collection", "testalias1"); + cluster.getSolrClient().query(q); + }); + assertTrue( + "Unexpected exception message: " + e.getMessage(), + e.getMessage().contains("Collection not found: testalias1")); } - private void searchSeveralWays(String collectionList, SolrParams solrQuery, int expectedNumFound) throws IOException, SolrServerException { - searchSeveralWays(collectionList, solrQuery, res -> assertEquals(expectedNumFound, res.getResults().getNumFound())); + private void searchSeveralWays(String collectionList, SolrParams solrQuery, int expectedNumFound) + throws IOException, SolrServerException { + searchSeveralWays( + collectionList, + solrQuery, + res -> assertEquals(expectedNumFound, res.getResults().getNumFound())); } - private void searchSeveralWays(String collectionList, SolrParams solrQuery, Consumer responseConsumer) throws IOException, SolrServerException { + private void searchSeveralWays( + String collectionList, SolrParams solrQuery, Consumer responseConsumer) + throws IOException, SolrServerException { if (random().nextBoolean()) { // cluster's CloudSolrClient responseConsumer.accept(cluster.getSolrClient().query(collectionList, solrQuery)); @@ -759,13 +926,15 @@ private void searchSeveralWays(String collectionList, SolrParams solrQuery, Cons } } - // note: collectionList could be null when we randomly recurse and put the actual collection list into the - // "collection" param and some bugs value into collectionList (including null). Only CloudSolrClient supports null. + // note: collectionList could be null when we randomly recurse and put the actual collection + // list into the "collection" param and some bugs value into collectionList (including null). + // Only CloudSolrClient supports null. if (collectionList != null) { // HttpSolrClient JettySolrRunner jetty = cluster.getRandomJetty(random()); if (random().nextBoolean()) { - try (HttpSolrClient client = getHttpSolrClient(jetty.getBaseUrl().toString() + "/" + collectionList)) { + try (HttpSolrClient client = + getHttpSolrClient(jetty.getBaseUrl().toString() + "/" + collectionList)) { responseConsumer.accept(client.query(null, solrQuery)); } } else { @@ -779,7 +948,8 @@ private void searchSeveralWays(String collectionList, SolrParams solrQuery, Cons // put in "collection" param ModifiableSolrParams newParams = new ModifiableSolrParams(solrQuery); newParams.set("collection", collectionList); - String maskedColl = new String[]{null, "bogus", "collection2", "collection1"}[random().nextInt(4)]; + String maskedColl = + new String[] {null, "bogus", "collection2", "collection1"}[random().nextInt(4)]; searchSeveralWays(maskedColl, newParams, responseConsumer); } } @@ -787,44 +957,75 @@ private void searchSeveralWays(String collectionList, SolrParams solrQuery, Cons @Test public void testErrorChecks() throws Exception { - CollectionAdminRequest.createCollection("testErrorChecks-collection", "conf", 2, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("testErrorChecks-collection", "conf", 2, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection("testErrorChecks-collection", 2, 2); - waitForState("Expected testErrorChecks-collection to be created with 2 shards and 1 replica", "testErrorChecks-collection", clusterShape(2, 2)); + waitForState( + "Expected testErrorChecks-collection to be created with 2 shards and 1 replica", + "testErrorChecks-collection", + clusterShape(2, 2)); ignoreException("."); // Invalid Alias name - SolrException e = expectThrows(SolrException.class, () -> - CollectionAdminRequest.createAlias("test:alias", "testErrorChecks-collection").process(cluster.getSolrClient())); - assertEquals(SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); + SolrException e = + expectThrows( + SolrException.class, + () -> + CollectionAdminRequest.createAlias("test:alias", "testErrorChecks-collection") + .process(cluster.getSolrClient())); + assertEquals( + SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); // Target collection doesn't exists - e = expectThrows(SolrException.class, () -> - CollectionAdminRequest.createAlias("testalias", "doesnotexist").process(cluster.getSolrClient())); - assertEquals(SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); - assertTrue(e.getMessage().contains("Can't create collection alias for collections='doesnotexist', 'doesnotexist' is not an existing collection or alias")); + e = + expectThrows( + SolrException.class, + () -> + CollectionAdminRequest.createAlias("testalias", "doesnotexist") + .process(cluster.getSolrClient())); + assertEquals( + SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); + assertTrue( + e.getMessage() + .contains( + "Can't create collection alias for collections='doesnotexist', 'doesnotexist' is not an existing collection or alias")); // One of the target collections doesn't exist - e = expectThrows(SolrException.class, () -> - CollectionAdminRequest.createAlias("testalias", "testErrorChecks-collection,doesnotexist").process(cluster.getSolrClient())); - assertEquals(SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); - assertTrue(e.getMessage().contains("Can't create collection alias for collections='testErrorChecks-collection,doesnotexist', 'doesnotexist' is not an existing collection or alias")); + e = + expectThrows( + SolrException.class, + () -> + CollectionAdminRequest.createAlias( + "testalias", "testErrorChecks-collection,doesnotexist") + .process(cluster.getSolrClient())); + assertEquals( + SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); + assertTrue( + e.getMessage() + .contains( + "Can't create collection alias for collections='testErrorChecks-collection,doesnotexist', 'doesnotexist' is not an existing collection or alias")); // Valid - CollectionAdminRequest.createAlias("testalias", "testErrorChecks-collection").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("testalias", "testErrorChecks-collection") + .process(cluster.getSolrClient()); // TODO dubious; remove? CollectionAdminRequest.createAlias("testalias2", "testalias").process(cluster.getSolrClient()); // Alias + invalid - e = expectThrows(SolrException.class, () -> - CollectionAdminRequest.createAlias("testalias3", "testalias2,doesnotexist").process(cluster.getSolrClient())); - assertEquals(SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); + e = + expectThrows( + SolrException.class, + () -> + CollectionAdminRequest.createAlias("testalias3", "testalias2,doesnotexist") + .process(cluster.getSolrClient())); + assertEquals( + SolrException.ErrorCode.BAD_REQUEST, SolrException.ErrorCode.getErrorCode(e.code())); unIgnoreException("."); CollectionAdminRequest.deleteAlias("testalias").process(cluster.getSolrClient()); CollectionAdminRequest.deleteAlias("testalias2").process(cluster.getSolrClient()); CollectionAdminRequest.deleteCollection("testErrorChecks-collection"); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java b/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java index 37062e63927..ab0f4be3c9c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/AssignBackwardCompatibilityTest.java @@ -21,7 +21,6 @@ import java.lang.invoke.MethodHandles; import java.util.HashSet; import java.util.Set; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.CollectionAdminResponse; @@ -35,10 +34,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +// TODO Remove in Solr 9.0 /** - * Test for backward compatibility when users update from 6.x or 7.0 to 7.1, - * then the counter of collection does not exist in Zk - * TODO Remove in Solr 9.0 + * Test for backward compatibility when users update from 6.x or 7.0 to 7.1, then the counter of + * collection does not exist in Zk */ public class AssignBackwardCompatibilityTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -48,15 +47,16 @@ public class AssignBackwardCompatibilityTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(4) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) .configure(); - CollectionAdminRequest.createCollection(COLLECTION, 1, 4) - .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(COLLECTION, 1, 4).process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION, 1, 4); } @Test - public void test() throws IOException, SolrServerException, KeeperException, InterruptedException { + public void test() + throws IOException, SolrServerException, KeeperException, InterruptedException { Set coreNames = new HashSet<>(); Set coreNodeNames = new HashSet<>(); @@ -73,30 +73,36 @@ public void test() throws IOException, SolrServerException, KeeperException, Int if (random().nextBoolean() && i > 5 && !clearedCounter) { log.info("Clear collection counter"); // clear counter - cluster.getZkClient().delete("/collections/"+COLLECTION+"/counter", -1, true); + cluster.getZkClient().delete("/collections/" + COLLECTION + "/counter", -1, true); clearedCounter = true; } if (deleteReplica) { cluster.waitForActiveCollection(COLLECTION, 1, numLiveReplicas); DocCollection dc = getCollectionState(COLLECTION); - Replica replica = getRandomReplica(dc.getSlice("shard1"), (r) -> r.getState() == Replica.State.ACTIVE); - CollectionAdminRequest.deleteReplica(COLLECTION, "shard1", replica.getName()).process(cluster.getSolrClient()); + Replica replica = + getRandomReplica(dc.getSlice("shard1"), (r) -> r.getState() == Replica.State.ACTIVE); + CollectionAdminRequest.deleteReplica(COLLECTION, "shard1", replica.getName()) + .process(cluster.getSolrClient()); coreNames.remove(replica.getCoreName()); numLiveReplicas--; } else { - CollectionAdminResponse response = CollectionAdminRequest.addReplicaToShard(COLLECTION, "shard1") - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.addReplicaToShard(COLLECTION, "shard1") + .process(cluster.getSolrClient()); assertTrue(response.isSuccess()); - String coreName = response.getCollectionCoresStatus() - .keySet().iterator().next(); - assertFalse("Core name is not unique coreName=" + coreName + " " + coreNames, coreNames.contains(coreName)); + String coreName = response.getCollectionCoresStatus().keySet().iterator().next(); + assertFalse( + "Core name is not unique coreName=" + coreName + " " + coreNames, + coreNames.contains(coreName)); coreNames.add(coreName); numLiveReplicas++; cluster.waitForActiveCollection(COLLECTION, 1, numLiveReplicas); - Replica newReplica = getCollectionState(COLLECTION).getReplicas().stream() - .filter(r -> r.getCoreName().equals(coreName)) - .findAny().get(); + Replica newReplica = + getCollectionState(COLLECTION).getReplicas().stream() + .filter(r -> r.getCoreName().equals(coreName)) + .findAny() + .get(); String coreNodeName = newReplica.getName(); assertFalse("Core node name is not unique", coreNodeNames.contains(coreName)); coreNodeNames.add(coreNodeName); @@ -106,7 +112,10 @@ public void test() throws IOException, SolrServerException, KeeperException, Int private int getCounter() throws KeeperException, InterruptedException { try { - byte[] data = cluster.getZkClient().getData("/collections/"+COLLECTION+"/counter", null, new Stat(), true); + byte[] data = + cluster + .getZkClient() + .getData("/collections/" + COLLECTION + "/counter", null, new Stat(), true); int count = NumberUtils.bytesToInt(data); if (count < 0) throw new AssertionError("Found negative collection counter " + count); return count; diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java index f6b96a98f10..b99330645f1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java +++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java @@ -20,9 +20,9 @@ import org.junit.Test; /** - * This test simply does a bunch of basic things in solrcloud mode and asserts things - * work as expected. - * Implementation moved to AbstractBasicDistributedZk2TestBase as it is used by HDFS contrib module tests. + * This test simply does a bunch of basic things in solrcloud mode and asserts things work as + * expected. Implementation moved to AbstractBasicDistributedZk2TestBase as it is used by HDFS + * contrib module tests. */ @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class BasicDistributedZk2Test extends AbstractBasicDistributedZk2TestBase { @@ -33,5 +33,4 @@ public class BasicDistributedZk2Test extends AbstractBasicDistributedZk2TestBase public void test() throws Exception { super.test(); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java index 6afabf39070..81ea1912edd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java @@ -20,21 +20,18 @@ import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.junit.Test; - /** - * This test simply does a bunch of basic things in solrcloud mode and asserts things - * work as expected. - * Implementation moved to AbstractBasicDistributedZkTestBase as it is used by many HDFS contrib tests. + * This test simply does a bunch of basic things in solrcloud mode and asserts things work as + * expected. Implementation moved to AbstractBasicDistributedZkTestBase as it is used by many HDFS + * contrib tests. */ -@Slow +@Slow @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class BasicDistributedZkTest extends AbstractBasicDistributedZkTestBase { - @Test @Override @ShardsFixed(num = 4) - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void test() throws Exception { super.test(); } diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java index 6fd58502705..ad93969a28a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java @@ -16,16 +16,13 @@ */ package org.apache.solr.cloud; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; - /* * Implementation moved to AbstractChaosMonkeyNothingIsSafeTestBase.java as it is also * used by the HDFS contrib tests. */ @Slow @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") -public class ChaosMonkeyNothingIsSafeTest extends AbstractChaosMonkeyNothingIsSafeTestBase { -} +public class ChaosMonkeyNothingIsSafeTest extends AbstractChaosMonkeyNothingIsSafeTestBase {} diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java index 1e6cc3fcfe7..cadf6e20a21 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java @@ -23,7 +23,6 @@ import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrQuery; @@ -48,46 +47,47 @@ public class ChaosMonkeyNothingIsSafeWithPullReplicasTest extends AbstractFullDi private static final int FAIL_TOLERANCE = 100; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private static final Integer RUN_LENGTH = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.runlength", "-1")); + + private static final Integer RUN_LENGTH = + Integer.parseInt(System.getProperty("solr.tests.cloud.cm.runlength", "-1")); private final boolean useTlogReplicas = random().nextBoolean(); - + private final int numPullReplicas; private final int numRealtimeOrTlogReplicas; - + protected int getPullReplicaCount() { return numPullReplicas; } @BeforeClass public static void beforeSuperClass() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id if (usually()) { System.setProperty("solr.autoCommit.maxTime", "15000"); } System.clearProperty("solr.httpclient.retries"); System.clearProperty("solr.retries.on.forward"); - System.clearProperty("solr.retries.to.followers"); + System.clearProperty("solr.retries.to.followers"); setErrorHook(); } - + @AfterClass public static void afterSuperClass() { System.clearProperty("solr.autoCommit.maxTime"); clearErrorHook(); TestInjection.reset(); } - - protected static final String[] fieldNames = new String[]{"f_i", "f_f", "f_d", "f_l", "f_dt"}; - protected static final RandVal[] randVals = new RandVal[]{rint, rfloat, rdouble, rlong, rdate}; + + protected static final String[] fieldNames = new String[] {"f_i", "f_f", "f_d", "f_l", "f_dt"}; + protected static final RandVal[] randVals = new RandVal[] {rint, rfloat, rdouble, rlong, rdate}; private int clientSoTimeout; private volatile FullThrottleStoppableIndexingThread ftIndexThread; private final boolean runFullThrottle; - + public String[] getFieldNames() { return fieldNames; } @@ -95,15 +95,15 @@ public String[] getFieldNames() { public RandVal[] getRandValues() { return randVals; } - + @Override public void distribSetUp() throws Exception { super.distribSetUp(); // can help to hide this when testing and looking at logs - //ignoreException("shard update error"); + // ignoreException("shard update error"); useFactory("solr.StandardDirectoryFactory"); } - + @Override public void distribTearDown() throws Exception { try { @@ -113,7 +113,7 @@ public void distribTearDown() throws Exception { } super.distribTearDown(); } - + public ChaosMonkeyNothingIsSafeWithPullReplicasTest() { super(); numPullReplicas = random().nextInt(TEST_NIGHTLY ? 2 : 1) + 1; @@ -132,36 +132,49 @@ public ChaosMonkeyNothingIsSafeWithPullReplicasTest() { @Override protected boolean useTlogReplicas() { - return false; // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's TestInjection use + // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's + // TestInjection use + return false; } - + @Override protected CloudSolrClient createCloudClient(String defaultCollection) { return this.createCloudClient(defaultCollection, this.clientSoTimeout); } - + protected CloudSolrClient createCloudClient(String defaultCollection, int socketTimeout) { - CloudSolrClient client = getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, socketTimeout); + CloudSolrClient client = + getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, socketTimeout); if (defaultCollection != null) client.setDefaultCollection(defaultCollection); return client; } - @Test public void test() throws Exception { // None of the operations used here are particularly costly, so this should work. // Using this low timeout will also help us catch index stalling. clientSoTimeout = 8000; - DocCollection docCollection = cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); + DocCollection docCollection = + cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); assertEquals(this.sliceCount, docCollection.getSlices().size()); Slice s = docCollection.getSlice("shard1"); assertNotNull(s); - assertEquals("Unexpected number of replicas. Collection: " + docCollection, numRealtimeOrTlogReplicas + numPullReplicas, s.getReplicas().size()); - assertEquals("Unexpected number of pull replicas. Collection: " + docCollection, numPullReplicas, s.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); - assertEquals(useTlogReplicas()?0:numRealtimeOrTlogReplicas, s.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); - assertEquals(useTlogReplicas()?numRealtimeOrTlogReplicas:0, s.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); - + assertEquals( + "Unexpected number of replicas. Collection: " + docCollection, + numRealtimeOrTlogReplicas + numPullReplicas, + s.getReplicas().size()); + assertEquals( + "Unexpected number of pull replicas. Collection: " + docCollection, + numPullReplicas, + s.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); + assertEquals( + useTlogReplicas() ? 0 : numRealtimeOrTlogReplicas, + s.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); + assertEquals( + useTlogReplicas() ? numRealtimeOrTlogReplicas : 0, + s.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); + boolean testSuccessful = false; try { handle.clear(); @@ -170,23 +183,25 @@ public void test() throws Exception { // make sure we have leaders for each shard for (int j = 1; j < sliceCount; j++) { zkStateReader.getLeaderRetry(DEFAULT_COLLECTION, "shard" + j, 10000); - } // make sure we again have leaders for each shard - + } // make sure we again have leaders for each shard + waitForRecoveriesToFinish(false); del("*:*"); - + List threads = new ArrayList<>(); List indexTreads = new ArrayList<>(); int threadCount = TEST_NIGHTLY ? 3 : 1; int i = 0; for (i = 0; i < threadCount; i++) { - StoppableIndexingThread indexThread = new StoppableIndexingThread(controlClient, cloudClient, Integer.toString(i), true, 35, 1, true); + StoppableIndexingThread indexThread = + new StoppableIndexingThread( + controlClient, cloudClient, Integer.toString(i), true, 35, 1, true); threads.add(indexThread); indexTreads.add(indexThread); indexThread.start(); } - + threadCount = 1; i = 0; for (i = 0; i < threadCount; i++) { @@ -194,19 +209,26 @@ public void test() throws Exception { threads.add(searchThread); searchThread.start(); } - + if (usually()) { StoppableCommitThread commitThread = new StoppableCommitThread(cloudClient, 1000, false); threads.add(commitThread); commitThread.start(); } - + if (runFullThrottle) { - ftIndexThread = - new FullThrottleStoppableIndexingThread(cloudClient.getHttpClient(), controlClient, cloudClient, clients, "ft1", true, this.clientSoTimeout); + ftIndexThread = + new FullThrottleStoppableIndexingThread( + cloudClient.getHttpClient(), + controlClient, + cloudClient, + clients, + "ft1", + true, + this.clientSoTimeout); ftIndexThread.start(); } - + chaosMonkey.startTheMonkey(true, 10000); try { long runLength; @@ -215,8 +237,8 @@ public void test() throws Exception { } else { int[] runTimes; if (TEST_NIGHTLY) { - runTimes = new int[] {5000, 6000, 10000, 15000, 25000, 30000, - 30000, 45000, 90000, 120000}; + runTimes = + new int[] {5000, 6000, 10000, 15000, 25000, 30000, 30000, 45000, 90000, 120000}; } else { runTimes = new int[] {5000, 7000, 10000}; } @@ -233,74 +255,83 @@ public void test() throws Exception { if (runFullThrottle) { ftIndexThread.safeStop(); } - + for (StoppableThread indexThread : threads) { indexThread.safeStop(); } - + // start any downed jetties to be sure we still will end up with a leader per shard... - + // wait for stop... for (StoppableThread indexThread : threads) { indexThread.join(); } - + // try and wait for any replications and what not to finish... - + ChaosMonkey.wait(2000, DEFAULT_COLLECTION, zkStateReader); - + // wait until there are no recoveries... waitForThingsToLevelOut(); - + // make sure we again have leaders for each shard for (int j = 1; j < sliceCount; j++) { zkStateReader.getLeaderRetry(DEFAULT_COLLECTION, "shard" + j, 30000); } - + commit(); - + // TODO: assert we didnt kill everyone - + zkStateReader.updateLiveNodes(); assertTrue(zkStateReader.getClusterState().getLiveNodes().size() > 0); - - + // we expect full throttle fails, but cloud client should not easily fail for (StoppableThread indexThread : threads) { - if (indexThread instanceof StoppableIndexingThread && !(indexThread instanceof FullThrottleStoppableIndexingThread)) { + if (indexThread instanceof StoppableIndexingThread + && !(indexThread instanceof FullThrottleStoppableIndexingThread)) { int failCount = ((StoppableIndexingThread) indexThread).getFailCount(); - assertFalse("There were too many update fails (" + failCount + " > " + FAIL_TOLERANCE - + ") - we expect it can happen, but shouldn't easily", failCount > FAIL_TOLERANCE); + assertFalse( + "There were too many update fails (" + + failCount + + " > " + + FAIL_TOLERANCE + + ") - we expect it can happen, but shouldn't easily", + failCount > FAIL_TOLERANCE); } } - - waitForReplicationFromReplicas(DEFAULT_COLLECTION, zkStateReader, new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); -// waitForAllWarmingSearchers(); - + + waitForReplicationFromReplicas( + DEFAULT_COLLECTION, + zkStateReader, + new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + // waitForAllWarmingSearchers(); + Set addFails = getAddFails(indexTreads); Set deleteFails = getDeleteFails(indexTreads); // full throttle thread can // have request fails - checkShardConsistency(!runFullThrottle, true, addFails, deleteFails); - - long ctrlDocs = controlClient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); - + checkShardConsistency(!runFullThrottle, true, addFails, deleteFails); + + long ctrlDocs = controlClient.query(new SolrQuery("*:*")).getResults().getNumFound(); + // ensure we have added more than 0 docs - long cloudClientDocs = cloudClient.query(new SolrQuery("*:*")) - .getResults().getNumFound(); - + long cloudClientDocs = cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound(); + assertTrue("Found " + ctrlDocs + " control docs", cloudClientDocs > 0); if (log.isInfoEnabled()) { log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); } - - if (VERBOSE) System.out.println("control docs:" - + controlClient.query(new SolrQuery("*:*")).getResults() - .getNumFound() + "\n\n"); - - // try and make a collection to make sure the overseer has survived the expiration and session loss + + if (VERBOSE) + System.out.println( + "control docs:" + + controlClient.query(new SolrQuery("*:*")).getResults().getNumFound() + + "\n\n"); + + // try and make a collection to make sure the overseer has survived the expiration and session + // loss // sometimes we restart zookeeper as well if (random().nextBoolean()) { @@ -308,14 +339,13 @@ public void test() throws Exception { } try (CloudSolrClient client = createCloudClient("collection1", 30000)) { - createCollection(null, "testcollection", - 1, 1, client, null, "conf1"); + createCollection(null, "testcollection", 1, 1, client, null, "conf1"); } List numShardsNumReplicas = new ArrayList<>(2); numShardsNumReplicas.add(1); numShardsNumReplicas.add(1 + getPullReplicaCount()); checkForCollection("testcollection", numShardsNumReplicas, null); - + testSuccessful = true; } finally { if (!testSuccessful) { @@ -327,18 +357,18 @@ public void test() throws Exception { private Set getAddFails(List threads) { Set addFails = new HashSet(); - for (StoppableIndexingThread thread : threads) { + for (StoppableIndexingThread thread : threads) { addFails.addAll(thread.getAddFails()); -// addFails.addAll(thread.getAddFailsMinRf()); + // addFails.addAll(thread.getAddFailsMinRf()); } return addFails; } - + private Set getDeleteFails(List threads) { Set deleteFails = new HashSet(); - for (StoppableIndexingThread thread : threads) { + for (StoppableIndexingThread thread : threads) { deleteFails.addAll(thread.getDeleteFails()); -// deleteFails.addAll(thread.getDeleteFailsMinRf()); + // deleteFails.addAll(thread.getDeleteFailsMinRf()); } return deleteFails; } @@ -349,5 +379,4 @@ protected void indexr(Object... fields) throws Exception { SolrInputDocument doc = getDoc(fields); indexDoc(doc); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java index 414e3a4e736..754324b1698 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java @@ -30,5 +30,4 @@ public class ChaosMonkeySafeLeaderTest extends AbstractChaosMonkeySafeLeaderTest protected String getDirectoryFactory() { return DIRECTORY_FACTORY; } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java index 33afa703898..e09db9c3cdf 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java @@ -21,7 +21,6 @@ import java.util.EnumSet; import java.util.List; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; @@ -43,27 +42,30 @@ @Slow public class ChaosMonkeySafeLeaderWithPullReplicasTest extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private static final Integer RUN_LENGTH = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.runlength", "-1")); - + + private static final Integer RUN_LENGTH = + Integer.parseInt(System.getProperty("solr.tests.cloud.cm.runlength", "-1")); + private final boolean useTlogReplicas = random().nextBoolean(); - + private final int numPullReplicas; private final int numRealtimeOrTlogReplicas; - + @Override protected int getPullReplicaCount() { return numPullReplicas; } - + @Override protected boolean useTlogReplicas() { - return false; // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's TestInjection use + // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's + // TestInjection use + return false; } @BeforeClass public static void beforeSuperClass() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id if (usually()) { System.setProperty("solr.autoCommit.maxTime", "15000"); } @@ -72,7 +74,7 @@ public static void beforeSuperClass() { System.clearProperty("solr.retries.to.followers"); setErrorHook(); } - + @AfterClass public static void afterSuperClass() { System.clearProperty("solr.autoCommit.maxTime"); @@ -80,9 +82,9 @@ public static void afterSuperClass() { TestInjection.reset(); } - protected static final String[] fieldNames = new String[]{"f_i", "f_f", "f_d", "f_l", "f_dt"}; - protected static final RandVal[] randVals = new RandVal[]{rint, rfloat, rdouble, rlong, rdate}; - + protected static final String[] fieldNames = new String[] {"f_i", "f_f", "f_d", "f_l", "f_dt"}; + protected static final RandVal[] randVals = new RandVal[] {rint, rfloat, rdouble, rlong, rdate}; + public String[] getFieldNames() { return fieldNames; } @@ -90,13 +92,13 @@ public String[] getFieldNames() { public RandVal[] getRandValues() { return randVals; } - + @Override public void distribSetUp() throws Exception { useFactory("solr.StandardDirectoryFactory"); super.distribSetUp(); } - + public ChaosMonkeySafeLeaderWithPullReplicasTest() { super(); numPullReplicas = random().nextInt(TEST_NIGHTLY ? 3 : 2) + 1; @@ -110,33 +112,43 @@ public ChaosMonkeySafeLeaderWithPullReplicasTest() { fixShardCount(numNodes); log.info("Starting ChaosMonkey test with {} shards and {} nodes", sliceCount, numNodes); } - + @Test - //2018-06-18 (commented) @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") public void test() throws Exception { - DocCollection docCollection = cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); + DocCollection docCollection = + cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); assertEquals(this.sliceCount, docCollection.getSlices().size()); Slice s = docCollection.getSlice("shard1"); assertNotNull(s); - assertEquals("Unexpected number of replicas. Collection: " + docCollection, numRealtimeOrTlogReplicas + numPullReplicas, s.getReplicas().size()); - assertEquals("Unexpected number of pull replicas. Collection: " + docCollection, numPullReplicas, s.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); - assertEquals(useTlogReplicas()?0:numRealtimeOrTlogReplicas, s.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); - assertEquals(useTlogReplicas()?numRealtimeOrTlogReplicas:0, s.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); + assertEquals( + "Unexpected number of replicas. Collection: " + docCollection, + numRealtimeOrTlogReplicas + numPullReplicas, + s.getReplicas().size()); + assertEquals( + "Unexpected number of pull replicas. Collection: " + docCollection, + numPullReplicas, + s.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); + assertEquals( + useTlogReplicas() ? 0 : numRealtimeOrTlogReplicas, + s.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); + assertEquals( + useTlogReplicas() ? numRealtimeOrTlogReplicas : 0, + s.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); handle.clear(); handle.put("timestamp", SKIPVAL); - + // randomly turn on 1 seconds 'soft' commit randomlyEnableAutoSoftCommit(); tryDelete(); - + List threads = new ArrayList<>(); int threadCount = 2; int batchSize = 1; if (random().nextBoolean()) { batchSize = random().nextInt(98) + 2; } - + boolean pauseBetweenUpdates = TEST_NIGHTLY ? random().nextBoolean() : true; int maxUpdates = -1; if (!pauseBetweenUpdates) { @@ -144,17 +156,25 @@ public void test() throws Exception { } else { maxUpdates = 15000; } - + for (int i = 0; i < threadCount; i++) { - StoppableIndexingThread indexThread = new StoppableIndexingThread(controlClient, cloudClient, Integer.toString(i), true, maxUpdates, batchSize, pauseBetweenUpdates); // random().nextInt(999) + 1 + StoppableIndexingThread indexThread = + new StoppableIndexingThread( + controlClient, + cloudClient, + Integer.toString(i), + true, + maxUpdates, + batchSize, + pauseBetweenUpdates); // random().nextInt(999) + 1 threads.add(indexThread); indexThread.start(); } - + StoppableCommitThread commitThread = new StoppableCommitThread(cloudClient, 1000, false); threads.add(commitThread); commitThread.start(); - + chaosMonkey.startTheMonkey(false, 500); try { long runLength; @@ -163,58 +183,64 @@ public void test() throws Exception { } else { int[] runTimes; if (TEST_NIGHTLY) { - runTimes = new int[] {5000, 6000, 10000, 15000, 25000, 30000, - 30000, 45000, 90000, 120000}; + runTimes = + new int[] {5000, 6000, 10000, 15000, 25000, 30000, 30000, 45000, 90000, 120000}; } else { runTimes = new int[] {5000, 7000, 15000}; } runLength = runTimes[random().nextInt(runTimes.length - 1)]; } - + ChaosMonkey.wait(runLength, DEFAULT_COLLECTION, cloudClient.getZkStateReader()); } finally { chaosMonkey.stopTheMonkey(); } - + for (StoppableThread thread : threads) { thread.safeStop(); } - + // wait for stop... for (StoppableThread thread : threads) { thread.join(); } - + for (StoppableThread thread : threads) { if (thread instanceof StoppableIndexingThread) { - assertEquals(0, ((StoppableIndexingThread)thread).getFailCount()); + assertEquals(0, ((StoppableIndexingThread) thread).getFailCount()); } } - + // try and wait for any replications and what not to finish... Thread.sleep(2000); waitForThingsToLevelOut(3, TimeUnit.MINUTES); - + // even if things were leveled out, a jetty may have just been stopped or something // we wait again and wait to level out again to make sure the system is not still in flux - + Thread.sleep(3000); waitForThingsToLevelOut(3, TimeUnit.MINUTES); if (log.isInfoEnabled()) { - log.info("control docs:{}\n\n", controlClient.query(new SolrQuery("*:*")).getResults().getNumFound()); + log.info( + "control docs:{}\n\n", + controlClient.query(new SolrQuery("*:*")).getResults().getNumFound()); log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // nowarn } - - waitForReplicationFromReplicas(DEFAULT_COLLECTION, cloudClient.getZkStateReader(), new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); -// waitForAllWarmingSearchers(); + + waitForReplicationFromReplicas( + DEFAULT_COLLECTION, + cloudClient.getZkStateReader(), + new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + // waitForAllWarmingSearchers(); checkShardConsistency(batchSize == 1, true); - - // try and make a collection to make sure the overseer has survived the expiration and session loss + + // try and make a collection to make sure the overseer has survived the expiration and session + // loss // sometimes we restart zookeeper as well if (random().nextBoolean()) { @@ -224,13 +250,12 @@ public void test() throws Exception { } try (CloudSolrClient client = createCloudClient("collection1")) { - createCollection(null, "testcollection", 1, 1, client, null, "conf1"); - + createCollection(null, "testcollection", 1, 1, client, null, "conf1"); } List numShardsNumReplicas = new ArrayList<>(2); numShardsNumReplicas.add(1); numShardsNumReplicas.add(1 + getPullReplicaCount()); - checkForCollection("testcollection",numShardsNumReplicas, null); + checkForCollection("testcollection", numShardsNumReplicas, null); } private void tryDelete() throws Exception { @@ -247,7 +272,7 @@ private void tryDelete() throws Exception { Thread.sleep(100); } } - + // skip the randoms - they can deadlock... @Override protected void indexr(Object... fields) throws Exception { @@ -256,5 +281,4 @@ protected void indexr(Object... fields) throws Exception { addFields(doc, "rnd_b", true); indexDoc(doc); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java index 5be91da63f1..1dba0e1adac 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java @@ -24,7 +24,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.cloud.api.collections.ShardSplitTest; @@ -48,9 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Test split phase that occurs when a Collection API split call is made. - */ +/** Test split phase that occurs when a Collection API split call is made. */ @Slow @Ignore("SOLR-4944") public class ChaosMonkeyShardSplitTest extends ShardSplitTest { @@ -59,12 +56,12 @@ public class ChaosMonkeyShardSplitTest extends ShardSplitTest { static final int TIMEOUT = 10000; private AtomicInteger killCounter = new AtomicInteger(); - + @BeforeClass public static void beforeSuperClass() { System.clearProperty("solr.httpclient.retries"); System.clearProperty("solr.retries.on.forward"); - System.clearProperty("solr.retries.to.followers"); + System.clearProperty("solr.retries.to.followers"); } @Test @@ -72,9 +69,12 @@ public void test() throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - final DocRouter router = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); - Slice shard1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); - DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); + final DocRouter router = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); + Slice shard1 = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); + DocRouter.Range shard1Range = + shard1.getRange() != null ? shard1.getRange() : router.fullRange(); final List ranges = router.partitionRange(2, shard1Range); final int[] docCounts = new int[ranges.size()]; int numReplicas = shard1.getReplicas().size(); @@ -92,20 +92,22 @@ public void test() throws Exception { } commit(); - indexThread = new Thread() { - @Override - public void run() { - int max = atLeast(401); - for (int id = 101; id < max; id++) { - try { - indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id, documentIds); - Thread.sleep(atLeast(25)); - } catch (Exception e) { - log.error("Exception while adding doc", e); + indexThread = + new Thread() { + @Override + public void run() { + int max = atLeast(401); + for (int id = 101; id < max; id++) { + try { + indexAndUpdateCount( + router, ranges, docCounts, String.valueOf(id), id, documentIds); + Thread.sleep(atLeast(25)); + } catch (Exception e) { + log.error("Exception while adding doc", e); + } + } } - } - } - }; + }; indexThread.start(); // kill the leader @@ -150,10 +152,8 @@ public void run() { // distributed commit on all shards } finally { - if (indexThread != null) - indexThread.join(); - if (solrClient != null) - solrClient.commit(); + if (indexThread != null) indexThread.join(); + if (solrClient != null) solrClient.commit(); if (killer != null) { killer.run = false; if (killerThread != null) { @@ -227,7 +227,8 @@ private void waitTillRecovered() throws Exception { Collection replicas = slice.getReplicas(); boolean allActive = true; for (Replica replica : replicas) { - if (!clusterState.liveNodesContain(replica.getNodeName()) || replica.getState() != Replica.State.ACTIVE) { + if (!clusterState.liveNodesContain(replica.getNodeName()) + || replica.getState() != Replica.State.ACTIVE) { allActive = false; break; } @@ -254,23 +255,29 @@ protected void indexr(Object... fields) throws Exception { * * @return SolrZkClient */ - private SolrZkClient electNewOverseer(String address) throws KeeperException, - InterruptedException, IOException { + private SolrZkClient electNewOverseer(String address) + throws KeeperException, InterruptedException, IOException { SolrZkClient zkClient = new SolrZkClient(address, TIMEOUT); ZkStateReader reader = new ZkStateReader(zkClient); LeaderElector overseerElector = new LeaderElector(zkClient); - UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); + UpdateShardHandler updateShardHandler = + new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); try (HttpShardHandlerFactory hshf = new HttpShardHandlerFactory()) { - Overseer overseer = new Overseer((HttpShardHandler) hshf.getShardHandler(), updateShardHandler, "/admin/cores", - reader, null, new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build()); + Overseer overseer = + new Overseer( + (HttpShardHandler) hshf.getShardHandler(), + updateShardHandler, + "/admin/cores", + reader, + null, + new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build()); overseer.close(); - ElectionContext ec = new OverseerElectionContext(zkClient, overseer, - address.replaceAll("/", "_")); + ElectionContext ec = + new OverseerElectionContext(zkClient, overseer, address.replaceAll("/", "_")); overseerElector.setup(ec); overseerElector.joinElection(ec, false); } reader.close(); return zkClient; } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/CleanupOldIndexTest.java b/solr/core/src/test/org/apache/solr/cloud/CleanupOldIndexTest.java index ff1660f9f90..52cf837fc5d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CleanupOldIndexTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CleanupOldIndexTest.java @@ -21,7 +21,6 @@ import java.util.Date; import java.util.Locale; import java.util.concurrent.TimeUnit; - import org.apache.commons.io.FileUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -42,7 +41,8 @@ public static void setupCluster() throws Exception { // we restart jetty and expect to find on disk data - need a local fs directory useFactory(null); configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) .configure(); } @@ -52,7 +52,6 @@ public static void afterClass() throws Exception { if (null != cluster && suiteFailureMarker.wasSuccessful()) { zkClient().printLayoutToStream(System.out); } - } private static final String COLLECTION = "oldindextest"; @@ -62,12 +61,14 @@ public void test() throws Exception { CollectionAdminRequest.createCollection(COLLECTION, "conf1", 1, 2) .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); - cluster.getSolrClient().setDefaultCollection(COLLECTION); // TODO make this configurable on StoppableIndexingThread + // TODO make this configurable on StoppableIndexingThread + cluster.getSolrClient().setDefaultCollection(COLLECTION); int[] maxDocList = new int[] {300, 500, 700}; int maxDoc = maxDocList[random().nextInt(maxDocList.length - 1)]; - StoppableIndexingThread indexThread = new StoppableIndexingThread(null, cluster.getSolrClient(), "1", true, maxDoc, 1, true); + StoppableIndexingThread indexThread = + new StoppableIndexingThread(null, cluster.getSolrClient(), "1", true, maxDoc, 1, true); indexThread.start(); // give some time to index... @@ -78,17 +79,20 @@ public void test() throws Exception { JettySolrRunner jetty = cluster.getRandomJetty(random()); CoreContainer coreContainer = jetty.getCoreContainer(); File dataDir = null; - try (SolrCore solrCore = coreContainer.getCore(coreContainer.getCoreDescriptors().get(0).getName())) { + try (SolrCore solrCore = + coreContainer.getCore(coreContainer.getCoreDescriptors().get(0).getName())) { dataDir = new File(solrCore.getDataDir()); } assertTrue(dataDir.isDirectory()); - long msInDay = 60*60*24L; - String timestamp1 = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date(1*msInDay)); - String timestamp2 = new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date(2*msInDay)); - File oldIndexDir1 = new File(dataDir, "index."+timestamp1); + long msInDay = 60 * 60 * 24L; + String timestamp1 = + new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date(1 * msInDay)); + String timestamp2 = + new SimpleDateFormat(SnapShooter.DATE_FMT, Locale.ROOT).format(new Date(2 * msInDay)); + File oldIndexDir1 = new File(dataDir, "index." + timestamp1); FileUtils.forceMkdir(oldIndexDir1); - File oldIndexDir2 = new File(dataDir, "index."+timestamp2); + File oldIndexDir2 = new File(dataDir, "index." + timestamp2); FileUtils.forceMkdir(oldIndexDir2); // verify the "old" index directories exist @@ -111,12 +115,15 @@ public void test() throws Exception { indexThread.safeStop(); indexThread.join(); - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 1, 2)); + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, 1, 2)); assertTrue(!oldIndexDir1.isDirectory()); assertTrue(!oldIndexDir2.isDirectory()); } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java index 8fa115742ba..d48b6cb02b8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java @@ -16,14 +16,19 @@ */ package org.apache.solr.cloud; -import java.lang.invoke.MethodHandles; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; +import static org.apache.solr.cloud.TrollingIndexReaderFactory.CheckMethodName; +import static org.apache.solr.cloud.TrollingIndexReaderFactory.Trap; +import static org.apache.solr.cloud.TrollingIndexReaderFactory.catchClass; +import static org.apache.solr.cloud.TrollingIndexReaderFactory.catchCount; +import static org.apache.solr.cloud.TrollingIndexReaderFactory.catchTrace; import com.carrotsearch.randomizedtesting.annotations.Repeat; import com.codahale.metrics.Metered; import com.codahale.metrics.MetricRegistry; +import java.lang.invoke.MethodHandles; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -44,17 +49,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.cloud.TrollingIndexReaderFactory.CheckMethodName; -import static org.apache.solr.cloud.TrollingIndexReaderFactory.Trap; -import static org.apache.solr.cloud.TrollingIndexReaderFactory.catchClass; -import static org.apache.solr.cloud.TrollingIndexReaderFactory.catchCount; -import static org.apache.solr.cloud.TrollingIndexReaderFactory.catchTrace; - -/** -* Distributed test for {@link org.apache.lucene.index.ExitableDirectoryReader} -*/ +/** Distributed test for {@link org.apache.lucene.index.ExitableDirectoryReader} */ public class CloudExitableDirectoryReaderTest extends SolrCloudTestCase { - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final int NUM_DOCS_PER_TYPE = 20; @@ -65,53 +62,66 @@ public class CloudExitableDirectoryReaderTest extends SolrCloudTestCase { /** * Client used for all test requests. - *

- * LBSolrClient (and by extension CloudSolrClient) has it's own enforcement of timeAllowed - * in an attempt to prevent "retrying" failed requests far longer then the client requested. - * Because of this client side logic, we do not want to use any LBSolrClient (derivative) in - * this test, in order to ensure that on a "slow" machine, the client doesn't pre-emptively - * abort any of our requests that use very low 'timeAllowed' values. - *

- *

- * ie: This test is not about testing the SolrClient, so keep the SOlrClient simple. - *

+ * + *

LBSolrClient (and by extension CloudSolrClient) has it's own enforcement of timeAllowed in + * an attempt to prevent "retrying" failed requests far longer then the client requested. Because + * of this client side logic, we do not want to use any LBSolrClient (derivative) in this test, in + * order to ensure that on a "slow" machine, the client doesn't pre-emptively abort any of our + * requests that use very low 'timeAllowed' values. + * + *

ie: This test is not about testing the SolrClient, so keep the SOlrClient simple. */ private static SolrClient client; - + @BeforeClass public static void setupCluster() throws Exception { // create one more node then shard, so that we also test the case of proxied requests. - MiniSolrCloudCluster.Builder clusterBuilder = configureCluster(3) - .addConfig("conf", TEST_PATH().resolve("configsets").resolve("exitable-directory").resolve("conf")); + MiniSolrCloudCluster.Builder clusterBuilder = + configureCluster(3) + .addConfig( + "conf", + TEST_PATH().resolve("configsets").resolve("exitable-directory").resolve("conf")); clusterBuilder.withMetrics(true); - clusterBuilder - .configure(); + clusterBuilder.configure(); // pick an arbitrary node to use for our requests client = cluster.getRandomJetty(random()).newClient(); CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1) .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); fiveHundredsByNode = new LinkedHashMap<>(); int httpOk = 0; - for (JettySolrRunner jetty: cluster.getJettySolrRunners()) { - MetricRegistry metricRegistry = ((JettySolrRunnerWithMetrics)jetty).getMetricRegistry(); - - httpOk += ((Metered) metricRegistry.getMetrics() - .get("org.eclipse.jetty.servlet.ServletContextHandler.2xx-responses")).getCount(); - - Metered old = fiveHundredsByNode.put(jetty.getNodeName(), - (Metered) metricRegistry.getMetrics() - .get("org.eclipse.jetty.servlet.ServletContextHandler.5xx-responses")); - assertNull("expecting uniq nodenames",old); + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { + MetricRegistry metricRegistry = ((JettySolrRunnerWithMetrics) jetty).getMetricRegistry(); + + httpOk += + ((Metered) + metricRegistry + .getMetrics() + .get("org.eclipse.jetty.servlet.ServletContextHandler.2xx-responses")) + .getCount(); + + Metered old = + fiveHundredsByNode.put( + jetty.getNodeName(), + (Metered) + metricRegistry + .getMetrics() + .get("org.eclipse.jetty.servlet.ServletContextHandler.5xx-responses")); + assertNull("expecting uniq nodenames", old); } assertTrue("expecting some http activity during collection creation", httpOk > 0); indexDocs(); } - + @AfterClass public static void closeClient() throws Exception { if (null != client) { @@ -125,31 +135,60 @@ public static void indexDocs() throws Exception { counter = 1; UpdateRequest req = new UpdateRequest(); - for(; (counter % NUM_DOCS_PER_TYPE) != 0; counter++ ) { + for (; (counter % NUM_DOCS_PER_TYPE) != 0; counter++) { final String v = "a" + counter; - req.add(sdoc("id", Integer.toString(counter), "name", v, - "name_dv", v, - "name_dvs", v,"name_dvs", v+"1", - "num",""+counter)); + req.add( + sdoc( + "id", + Integer.toString(counter), + "name", + v, + "name_dv", + v, + "name_dvs", + v, + "name_dvs", + v + "1", + "num", + "" + counter)); } counter++; - for(; (counter % NUM_DOCS_PER_TYPE) != 0; counter++ ) { + for (; (counter % NUM_DOCS_PER_TYPE) != 0; counter++) { final String v = "b" + counter; - req.add(sdoc("id", Integer.toString(counter), "name", v, - "name_dv", v, - "name_dvs", v,"name_dvs", v+"1", - "num",""+counter)); + req.add( + sdoc( + "id", + Integer.toString(counter), + "name", + v, + "name_dv", + v, + "name_dvs", + v, + "name_dvs", + v + "1", + "num", + "" + counter)); } counter++; - for(; counter % NUM_DOCS_PER_TYPE != 0; counter++ ) { + for (; counter % NUM_DOCS_PER_TYPE != 0; counter++) { final String v = "dummy term doc" + counter; - req.add(sdoc("id", Integer.toString(counter), "name", - v, - "name_dv", v, - "name_dvs", v,"name_dvs", v+"1", - "num",""+counter)); + req.add( + sdoc( + "id", + Integer.toString(counter), + "name", + v, + "name_dv", + v, + "name_dvs", + v, + "name_dvs", + v + "1", + "num", + "" + counter)); } req.commit(client, COLLECTION); @@ -160,33 +199,33 @@ public void test() throws Exception { assertPartialResults(params("q", "name:a*", "timeAllowed", "1", "sleep", sleep)); /* - query rewriting for NUM_DOCS_PER_TYPE terms should take less - time than this. Keeping it at 5 because the delaying search component delays all requests + query rewriting for NUM_DOCS_PER_TYPE terms should take less + time than this. Keeping it at 5 because the delaying search component delays all requests by at 1 second. */ int fiveSeconds = 5000; - + Integer timeAllowed = TestUtil.nextInt(random(), fiveSeconds, Integer.MAX_VALUE); assertSuccess(params("q", "name:a*", "timeAllowed", timeAllowed.toString())); assertPartialResults(params("q", "name:a*", "timeAllowed", "1", "sleep", sleep)); timeAllowed = TestUtil.nextInt(random(), fiveSeconds, Integer.MAX_VALUE); - assertSuccess(params("q", "name:b*", "timeAllowed",timeAllowed.toString())); + assertSuccess(params("q", "name:b*", "timeAllowed", timeAllowed.toString())); // negative timeAllowed should disable timeouts - timeAllowed = TestUtil.nextInt(random(), Integer.MIN_VALUE, -1); - assertSuccess(params("q", "name:b*", "timeAllowed",timeAllowed.toString())); + timeAllowed = TestUtil.nextInt(random(), Integer.MIN_VALUE, -1); + assertSuccess(params("q", "name:b*", "timeAllowed", timeAllowed.toString())); - assertSuccess(params("q","name:b*")); // no time limitation + assertSuccess(params("q", "name:b*")); // no time limitation } @Test public void testWhitebox() throws Exception { - - try (Trap catchIds = catchTrace( - new CheckMethodName("doProcessSearchByIds"), () -> {})) { - assertPartialResults(params("q", "{!cache=false}name:a*", "sort", "query($q,1) asc"), + + try (Trap catchIds = catchTrace(new CheckMethodName("doProcessSearchByIds"), () -> {})) { + assertPartialResults( + params("q", "{!cache=false}name:a*", "sort", "query($q,1) asc"), () -> assertTrue(catchIds.hasCaught())); } catch (AssertionError ae) { Trap.dumpLastStackTraces(log); @@ -195,35 +234,46 @@ public void testWhitebox() throws Exception { // the point is to catch sort_values (fsv) timeout, between search and facet // I haven't find a way to encourage fsv to read index - try (Trap catchFSV = catchTrace( - new CheckMethodName("doFieldSortValues"), () -> {})) { - assertPartialResults(params("q", "{!cache=false}name:a*", "sort", "query($q,1) asc"), + try (Trap catchFSV = catchTrace(new CheckMethodName("doFieldSortValues"), () -> {})) { + assertPartialResults( + params("q", "{!cache=false}name:a*", "sort", "query($q,1) asc"), () -> assertTrue(catchFSV.hasCaught())); } catch (AssertionError ae) { Trap.dumpLastStackTraces(log); throw ae; } - - try (Trap catchClass = catchClass( - QueryComponent.class.getSimpleName(), () -> { })) { - assertPartialResults(params("q", "{!cache=false}name:a*"), - ()->assertTrue(catchClass.hasCaught())); - }catch(AssertionError ae) { + + try (Trap catchClass = catchClass(QueryComponent.class.getSimpleName(), () -> {})) { + assertPartialResults( + params("q", "{!cache=false}name:a*"), () -> assertTrue(catchClass.hasCaught())); + } catch (AssertionError ae) { Trap.dumpLastStackTraces(log); throw ae; } - try(Trap catchClass = catchClass(FacetComponent.class.getSimpleName())){ - assertPartialResults(params("q", "{!cache=false}name:a*", "facet","true", "facet.method", "enum", - "facet.field", "id"), - ()->assertTrue(catchClass.hasCaught())); - }catch(AssertionError ae) { + try (Trap catchClass = catchClass(FacetComponent.class.getSimpleName())) { + assertPartialResults( + params( + "q", + "{!cache=false}name:a*", + "facet", + "true", + "facet.method", + "enum", + "facet.field", + "id"), + () -> assertTrue(catchClass.hasCaught())); + } catch (AssertionError ae) { Trap.dumpLastStackTraces(log); throw ae; } try (Trap catchClass = catchClass(FacetModule.class.getSimpleName())) { - assertPartialResults(params("q", "{!cache=false}name:a*", "json.facet", "{ ids: {" - + " type: range, field : num, start : 0, end : 100, gap : 10 }}"), + assertPartialResults( + params( + "q", + "{!cache=false}name:a*", + "json.facet", + "{ ids: {" + " type: range, field : num, start : 0, end : 100, gap : 10 }}"), () -> assertTrue(catchClass.hasCaught())); } catch (AssertionError ae) { Trap.dumpLastStackTraces(log); @@ -231,59 +281,64 @@ public void testWhitebox() throws Exception { } } - @Test - @Repeat(iterations=5) + @Test + @Repeat(iterations = 5) public void testCreepThenBite() throws Exception { - int creep=100; + int creep = 100; ModifiableSolrParams params = params("q", "{!cache=false}name:a*"); - SolrParams cases[] = new SolrParams[] { - params( "sort","query($q,1) asc"), - params("rows","0", "facet","true", "facet.method", "enum", "facet.field", "name"), - params("rows","0", "json.facet","{ ids: { type: range, field : num, start : 1, end : 99, gap : 9 }}"), - params("q", "*:*", "rows","0", "json.facet","{ ids: { type: field, field : num}}"), - params("q", "*:*", "rows","0", "json.facet","{ ids: { type: field, field : name_dv}}"), - params("q", "*:*", "rows","0", "json.facet","{ ids: { type: field, field : name_dvs}}") - }; // add more cases here + SolrParams cases[] = + new SolrParams[] { + params("sort", "query($q,1) asc"), + params("rows", "0", "facet", "true", "facet.method", "enum", "facet.field", "name"), + params( + "rows", + "0", + "json.facet", + "{ ids: { type: range, field : num, start : 1, end : 99, gap : 9 }}"), + params("q", "*:*", "rows", "0", "json.facet", "{ ids: { type: field, field : num}}"), + params("q", "*:*", "rows", "0", "json.facet", "{ ids: { type: field, field : name_dv}}"), + params("q", "*:*", "rows", "0", "json.facet", "{ ids: { type: field, field : name_dvs}}") + }; // add more cases here params.add(cases[random().nextInt(cases.length)]); - for (; ; creep*=1.5) { + for (; ; creep *= 1.5) { final int boundary = creep; - try(Trap catchClass = catchCount(boundary)){ - + try (Trap catchClass = catchCount(boundary)) { + params.set("boundary", boundary); - QueryResponse rsp = client.query(COLLECTION, - params); - assertEquals(""+rsp, rsp.getStatus(), 0); - assertNo500s(""+rsp); + QueryResponse rsp = client.query(COLLECTION, params); + assertEquals("" + rsp, rsp.getStatus(), 0); + assertNo500s("" + rsp); if (!isPartial(rsp)) { assertFalse(catchClass.hasCaught()); break; } assertTrue(catchClass.hasCaught()); - }catch(AssertionError ae) { + } catch (AssertionError ae) { Trap.dumpLastStackTraces(log); throw ae; } } int numBites = atLeast(100); - for(int bite=0; bitem.getCount()==0)); + assertTrue(msg, fiveHundredsByNode.values().stream().allMatch((m) -> m.getCount() == 0)); } - - /** - * execute a request, verify that we get an expected error - */ + + /** execute a request, verify that we get an expected error */ public void assertPartialResults(ModifiableSolrParams p) throws Exception { - assertPartialResults(p, ()->{}); + assertPartialResults(p, () -> {}); } - - public void assertPartialResults(ModifiableSolrParams p, Runnable postRequestCheck) throws Exception { - QueryResponse rsp = client.query(COLLECTION, p); - postRequestCheck.run(); - assertEquals(rsp.getStatus(), 0); - assertEquals(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY+" were expected at "+rsp, - true, rsp.getHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); - assertNo500s(""+rsp); + + public void assertPartialResults(ModifiableSolrParams p, Runnable postRequestCheck) + throws Exception { + QueryResponse rsp = client.query(COLLECTION, p); + postRequestCheck.run(); + assertEquals(rsp.getStatus(), 0); + assertEquals( + SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY + " were expected at " + rsp, + true, + rsp.getHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + assertNo500s("" + rsp); } - + public void assertSuccess(ModifiableSolrParams p) throws Exception { QueryResponse rsp = client.query(COLLECTION, p); assertEquals(rsp.getStatus(), 0); assertEquals("Wrong #docs in response", NUM_DOCS_PER_TYPE - 1, rsp.getResults().getNumFound()); - assertNotEquals(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY+" weren't expected "+rsp, - true, rsp.getHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); - assertNo500s(""+rsp); + assertNotEquals( + SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY + " weren't expected " + rsp, + true, + rsp.getHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + assertNo500s("" + rsp); } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java index 7b242e22739..3145737bf80 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java @@ -25,7 +25,6 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; - import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.DocRouter; @@ -36,43 +35,43 @@ import org.apache.solr.handler.admin.ConfigSetsHandler; /** - * A utility class that can create mock ZkStateReader objects with custom ClusterState objects created - * using a simple string based description. See {@link #buildClusterState(String, int, String...)} for - * details on how the cluster state can be created. + * A utility class that can create mock ZkStateReader objects with custom ClusterState objects + * created using a simple string based description. See {@link #buildClusterState(String, int, + * String...)} for details on how the cluster state can be created. * * @lucene.experimental */ public class ClusterStateMockUtil { - private final static Pattern BLUEPRINT = Pattern.compile("([a-z])(\\d+)?(?:(['A','R','D','F']))?(\\*)?"); + private static final Pattern BLUEPRINT = + Pattern.compile("([a-z])(\\d+)?(?:(['A','R','D','F']))?(\\*)?"); - public static ZkStateReader buildClusterState(String clusterDescription, String ... liveNodes) { + public static ZkStateReader buildClusterState(String clusterDescription, String... liveNodes) { return buildClusterState(clusterDescription, 1, liveNodes); } /** - * This method lets you construct a complex ClusterState object by using simple strings of letters. + * This method lets you construct a complex ClusterState object by using simple strings of + * letters. * - * c = collection, s = slice, r = replica (nrt type, default), n = nrt replica, t = tlog replica, p = pull replica, \d = node number (r2 means the replica is on node 2), - * state = [A,R,D,F], * = replica to replace, binds to the left. + *

c = collection, s = slice, r = replica (nrt type, default), n = nrt replica, t = tlog + * replica, p = pull replica, \d = node number (r2 means the replica is on node 2), state = + * [A,R,D,F], * = replica to replace, binds to the left. * - * For example: - * csrr2rD*sr2csr + *

For example: csrr2rD*sr2csr * - * Creates: + *

Creates: * - * 'csrr2rD*' - * A collection, a shard, a replica on node 1 (the default) that is active (the default), a replica on node 2, and a replica on node 1 - * that has a state of down and is the replica we will be looking to put somewhere else (the *). + *

'csrr2rD*' A collection, a shard, a replica on node 1 (the default) that is active (the + * default), a replica on node 2, and a replica on node 1 that has a state of down and is the + * replica we will be looking to put somewhere else (the *). * - * 'sr2' - * Then, another shard that has a replica on node 2. + *

'sr2' Then, another shard that has a replica on node 2. * - * 'csr' - * Then, another collection that has a shard with a single active replica on node 1. + *

'csr' Then, another collection that has a shard with a single active replica on node 1. * - * Result: - * { + *

Result: + * { * "collection2":{ * "replicationFactor":"1", * "shards":{"slice1":{ @@ -105,15 +104,16 @@ public static ZkStateReader buildClusterState(String clusterDescription, String * "state":"active", * "node_name":"baseUrl2_", * "base_url":"http://baseUrl2"}}}}}} - * + * */ @SuppressWarnings("resource") - public static ZkStateReader buildClusterState(String clusterDescription, int replicationFactor, String ... liveNodes) { - Map slices = null; - Map replicas = null; - Map collectionProps = new HashMap<>(); + public static ZkStateReader buildClusterState( + String clusterDescription, int replicationFactor, String... liveNodes) { + Map slices = null; + Map replicas = null; + Map collectionProps = new HashMap<>(); collectionProps.put(ZkStateReader.REPLICATION_FACTOR, Integer.toString(replicationFactor)); - Map collectionStates = new HashMap<>(); + Map collectionStates = new HashMap<>(); DocCollection docCollection = null; String collName = null; String sliceName = null; @@ -126,20 +126,28 @@ public static ZkStateReader buildClusterState(String clusterDescription, int rep switch (m.group(1)) { case "c": slices = new HashMap<>(); - collectionProps.put(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME); - docCollection = new DocCollection(collName = "collection" + (collectionStates.size() + 1), slices, collectionProps, DocRouter.DEFAULT); + collectionProps.put( + ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME); + docCollection = + new DocCollection( + collName = "collection" + (collectionStates.size() + 1), + slices, + collectionProps, + DocRouter.DEFAULT); collectionStates.put(docCollection.getName(), docCollection); break; case "s": replicas = new HashMap<>(); - if(collName == null) collName = "collection" + (collectionStates.size() + 1); - slice = new Slice(sliceName = "slice" + (slices.size() + 1), replicas, null, collName); + if (collName == null) collName = "collection" + (collectionStates.size() + 1); + slice = new Slice(sliceName = "slice" + (slices.size() + 1), replicas, null, collName); slices.put(slice.getName(), slice); - // hack alert: the DocCollection constructor copies over active slices to its active slice map in the constructor - // but here we construct the DocCollection before creating the slices which breaks code that calls DocCollection.getActiveSlices - // so here we re-create doc collection with the latest slices map to workaround this problem - // todo: a better fix would be to have a builder class for DocCollection that builds the final object once all the slices and replicas have been created. + // hack alert: the DocCollection constructor copies over active slices to its active slice + // map in the constructor but here we construct the DocCollection before creating the + // slices which breaks code that calls DocCollection.getActiveSlices so here we re-create + // doc collection with the latest slices map to workaround this problem + // todo: a better fix would be to have a builder class for DocCollection that builds the + // final object once all the slices and replicas have been created. docCollection = docCollection.copyWithSlices(slices); collectionStates.put(docCollection.getName(), docCollection); break; @@ -151,15 +159,17 @@ public static ZkStateReader buildClusterState(String clusterDescription, int rep String replicaName = "replica" + replicaCount++; String stateCode = m.group(3); - Map replicaPropMap = makeReplicaProps(sliceName, node, replicaName, stateCode, m.group(1)); + Map replicaPropMap = + makeReplicaProps(sliceName, node, replicaName, stateCode, m.group(1)); if (collName == null) collName = "collection" + (collectionStates.size() + 1); if (sliceName == null) collName = "slice" + (slices.size() + 1); - // O(n^2) alert! but this is for mocks and testing so shouldn't be used for very large cluster states + // O(n^2) alert! but this is for mocks and testing so shouldn't be used for very large + // cluster states boolean leaderFound = false; for (Map.Entry entry : replicas.entrySet()) { Replica value = entry.getValue(); - if ("true".equals(value.get(Slice.LEADER))) { + if ("true".equals(value.get(Slice.LEADER))) { leaderFound = true; break; } @@ -170,17 +180,20 @@ public static ZkStateReader buildClusterState(String clusterDescription, int rep replica = new Replica(replicaName, replicaPropMap, collName, sliceName); replicas.put(replica.getName(), replica); - // hack alert: re-create slice with existing data and new replicas map so that it updates its internal leader attribute + // hack alert: re-create slice with existing data and new replicas map so that it updates + // its internal leader attribute slice = new Slice(slice.getName(), replicas, null, collName); slices.put(slice.getName(), slice); - // we don't need to update doc collection again because we aren't adding a new slice or changing its state + // we don't need to update doc collection again because we aren't adding a new slice or + // changing its state break; default: break; } } - ClusterState clusterState = new ClusterState(new HashSet<>(Arrays.asList(liveNodes)), collectionStates); + ClusterState clusterState = + new ClusterState(new HashSet<>(Arrays.asList(liveNodes)), collectionStates); MockZkStateReader reader = new MockZkStateReader(clusterState, collectionStates.keySet()); String json; @@ -190,7 +203,8 @@ public static ZkStateReader buildClusterState(String clusterDescription, int rep return reader; } - private static Map makeReplicaProps(String sliceName, String node, String replicaName, String stateCode, String replicaTypeCode) { + private static Map makeReplicaProps( + String sliceName, String node, String replicaName, String stateCode, String replicaTypeCode) { if (node == null || node.trim().length() == 0) { node = "1"; } @@ -211,13 +225,12 @@ private static Map makeReplicaProps(String sliceName, String nod state = Replica.State.RECOVERY_FAILED; break; default: - throw new IllegalArgumentException( - "Unexpected state for replica: " + stateCode); + throw new IllegalArgumentException("Unexpected state for replica: " + stateCode); } } Replica.Type replicaType = Replica.Type.NRT; - switch (replicaTypeCode) { + switch (replicaTypeCode) { case "t": replicaType = Replica.Type.TLOG; break; @@ -226,7 +239,7 @@ private static Map makeReplicaProps(String sliceName, String nod break; } - Map replicaPropMap = new HashMap<>(); + Map replicaPropMap = new HashMap<>(); int port = 8982 + Integer.parseInt(node); String nodeName = String.format(Locale.ROOT, "baseUrl%s:%d_", node, port); replicaPropMap.put(ZkStateReader.NODE_NAME_PROP, nodeName); @@ -236,6 +249,4 @@ private static Map makeReplicaProps(String sliceName, String nod replicaPropMap.put(ZkStateReader.REPLICA_TYPE, replicaType.name()); return replicaPropMap; } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java index 9a72d4b9758..601e92e89b9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtilTest.java @@ -26,15 +26,14 @@ import org.apache.solr.common.cloud.ZkStateReader; import org.junit.Test; -/** - * Tests for {@link ClusterStateMockUtil} - */ +/** Tests for {@link ClusterStateMockUtil} */ @SolrTestCaseJ4.SuppressSSL // tests expect http scheme public class ClusterStateMockUtilTest extends SolrTestCaseJ4 { @Test public void testBuildClusterState_Simple() { - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csr", "baseUrl1:8983_")) { + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csr", "baseUrl1:8983_")) { ClusterState clusterState = zkStateReader.getClusterState(); assertNotNull(clusterState); assertEquals(1, clusterState.getCollectionStates().size()); @@ -59,7 +58,8 @@ public void testBuildClusterState_Simple() { @Test public void testBuildClusterState_ReplicaTypes() { - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csntp", "baseUrl1:8983_")) { + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csntp", "baseUrl1:8983_")) { ClusterState clusterState = zkStateReader.getClusterState(); assertNotNull(clusterState); assertEquals(1, clusterState.getCollectionStates().size()); @@ -79,7 +79,8 @@ public void testBuildClusterState_ReplicaTypes() { @Test public void testBuildClusterState_ReplicaStateAndType() { - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csrStRpDnF", "baseUrl1:8983_")) { + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csrStRpDnF", "baseUrl1:8983_")) { ClusterState clusterState = zkStateReader.getClusterState(); assertNotNull(clusterState); assertEquals(1, clusterState.getCollectionStates().size()); @@ -91,10 +92,38 @@ public void testBuildClusterState_ReplicaStateAndType() { Slice slice1 = collection1.getSlice("slice1"); assertNotNull(slice1); assertEquals(4, slice1.getReplicas().size()); - assertEquals(1, slice1.getReplicas(replica -> replica.getType() == Replica.Type.NRT && replica.getState() == Replica.State.ACTIVE).size()); - assertEquals(1, slice1.getReplicas(replica -> replica.getType() == Replica.Type.NRT && replica.getState() == Replica.State.RECOVERY_FAILED).size()); - assertEquals(1, slice1.getReplicas(replica -> replica.getType() == Replica.Type.TLOG && replica.getState() == Replica.State.RECOVERING).size()); - assertEquals(1, slice1.getReplicas(replica -> replica.getType() == Replica.Type.PULL && replica.getState() == Replica.State.DOWN).size()); + assertEquals( + 1, + slice1 + .getReplicas( + replica -> + replica.getType() == Replica.Type.NRT + && replica.getState() == Replica.State.ACTIVE) + .size()); + assertEquals( + 1, + slice1 + .getReplicas( + replica -> + replica.getType() == Replica.Type.NRT + && replica.getState() == Replica.State.RECOVERY_FAILED) + .size()); + assertEquals( + 1, + slice1 + .getReplicas( + replica -> + replica.getType() == Replica.Type.TLOG + && replica.getState() == Replica.State.RECOVERING) + .size()); + assertEquals( + 1, + slice1 + .getReplicas( + replica -> + replica.getType() == Replica.Type.PULL + && replica.getState() == Replica.State.DOWN) + .size()); } } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java index 71f0007bdcc..ff9fb0747fd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java @@ -20,7 +20,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; @@ -37,14 +36,14 @@ public class ClusterStateTest extends SolrTestCaseJ4 { @Test public void testStoreAndRead() throws Exception { - Map collectionStates = new HashMap<>(); + Map collectionStates = new HashMap<>(); Set liveNodes = new HashSet<>(); liveNodes.add("node1"); liveNodes.add("node2"); - - Map slices = new HashMap<>(); - Map sliceToProps = new HashMap<>(); - Map props = new HashMap<>(); + + Map slices = new HashMap<>(); + Map sliceToProps = new HashMap<>(); + Map props = new HashMap<>(); String nodeName = "node1:10000_solr"; props.put(ZkStateReader.NODE_NAME_PROP, nodeName); props.put(ZkStateReader.BASE_URL_PROP, Utils.getBaseUrlForNodeName(nodeName, "http")); @@ -59,31 +58,48 @@ public void testStoreAndRead() throws Exception { slices.put("shard1", slice); Slice slice2 = new Slice("shard2", sliceToProps, null, "collection1"); slices.put("shard2", slice2); - collectionStates.put("collection1", new DocCollection("collection1", slices, props, DocRouter.DEFAULT)); - collectionStates.put("collection2", new DocCollection("collection2", slices, props, DocRouter.DEFAULT)); + collectionStates.put( + "collection1", new DocCollection("collection1", slices, props, DocRouter.DEFAULT)); + collectionStates.put( + "collection2", new DocCollection("collection2", slices, props, DocRouter.DEFAULT)); ClusterState clusterState = new ClusterState(liveNodes, collectionStates); byte[] bytes = Utils.toJSON(clusterState); // System.out.println("#################### " + new String(bytes)); ClusterState loadedClusterState = ClusterState.createFromJson(-1, bytes, liveNodes); - - assertEquals("Provided liveNodes not used properly", 2, loadedClusterState - .getLiveNodes().size()); + + assertEquals( + "Provided liveNodes not used properly", 2, loadedClusterState.getLiveNodes().size()); assertEquals("No collections found", 2, loadedClusterState.getCollectionsMap().size()); - assertEquals("Properties not copied properly", replica.getStr("prop1"), loadedClusterState.getCollection("collection1").getSlice("shard1").getReplicasMap().get("node1").getStr("prop1")); - assertEquals("Properties not copied properly", replica.getStr("prop2"), loadedClusterState.getCollection("collection1").getSlice("shard1").getReplicasMap().get("node1").getStr("prop2")); + assertEquals( + "Properties not copied properly", + replica.getStr("prop1"), + loadedClusterState + .getCollection("collection1") + .getSlice("shard1") + .getReplicasMap() + .get("node1") + .getStr("prop1")); + assertEquals( + "Properties not copied properly", + replica.getStr("prop2"), + loadedClusterState + .getCollection("collection1") + .getSlice("shard1") + .getReplicasMap() + .get("node1") + .getStr("prop2")); loadedClusterState = ClusterState.createFromJson(-1, new byte[0], liveNodes); - - assertEquals("Provided liveNodes not used properly", 2, loadedClusterState - .getLiveNodes().size()); + + assertEquals( + "Provided liveNodes not used properly", 2, loadedClusterState.getLiveNodes().size()); assertEquals("Should not have collections", 0, loadedClusterState.getCollectionsMap().size()); - loadedClusterState = ClusterState.createFromJson(-1, (byte[])null, liveNodes); - - assertEquals("Provided liveNodes not used properly", 2, loadedClusterState - .getLiveNodes().size()); + loadedClusterState = ClusterState.createFromJson(-1, (byte[]) null, liveNodes); + + assertEquals( + "Provided liveNodes not used properly", 2, loadedClusterState.getLiveNodes().size()); assertEquals("Should not have collections", 0, loadedClusterState.getCollectionsMap().size()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java index 3ab04fa92fe..abc0e4f2343 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java @@ -20,7 +20,6 @@ import java.lang.invoke.MethodHandles; import java.util.Map; import java.util.Set; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -36,15 +35,13 @@ import org.slf4j.LoggerFactory; @Slow -public class ClusterStateUpdateTest extends SolrCloudTestCase { +public class ClusterStateUpdateTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Override public void setUp() throws Exception { super.setUp(); - configureCluster(3) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(3).addConfig("conf", configset("cloud-minimal")).configure(); } @BeforeClass @@ -57,29 +54,33 @@ public static void afterClass() throws InterruptedException, IOException { System.clearProperty("solrcloud.skip.autorecovery"); System.clearProperty("genericCoreNodeNames"); } - + @Test public void testCoreRegistration() throws Exception { System.setProperty("solrcloud.update.delay", "1"); - assertEquals(0, CollectionAdminRequest.createCollection("testcore", "conf", 1, 1) - .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()) - .process(cluster.getSolrClient()).getStatus()); + assertEquals( + 0, + CollectionAdminRequest.createCollection("testcore", "conf", 1, 1) + .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()) + .process(cluster.getSolrClient()) + .getStatus()); ZkController zkController2 = cluster.getJettySolrRunner(1).getCoreContainer().getZkController(); String host = zkController2.getHostName(); - + // slight pause - TODO: takes an oddly long amount of time to schedule tasks // with almost no delay ... ClusterState clusterState2 = null; - Map slices = null; + Map slices = null; for (int i = 75; i > 0; i--) { clusterState2 = zkController2.getClusterState(); DocCollection docCollection = clusterState2.getCollectionOrNull("testcore"); slices = docCollection == null ? null : docCollection.getSlicesMap(); - - if (slices != null && slices.containsKey("shard1") + + if (slices != null + && slices.containsKey("shard1") && slices.get("shard1").getReplicasMap().size() > 0) { break; } @@ -92,7 +93,7 @@ public void testCoreRegistration() throws Exception { Slice slice = slices.get("shard1"); assertEquals("shard1", slice.getName()); - Map shards = slice.getReplicasMap(); + Map shards = slice.getReplicasMap(); assertEquals(1, shards.size()); @@ -101,10 +102,23 @@ public void testCoreRegistration() throws Exception { assertNotNull(zkProps); - assertEquals(host + ":" +cluster.getJettySolrRunner(0).getLocalPort()+"_solr", zkProps.getStr(ZkStateReader.NODE_NAME_PROP)); - - assertTrue(zkProps.getStr(ZkStateReader.BASE_URL_PROP).contains("http://" + host + ":"+cluster.getJettySolrRunner(0).getLocalPort()+"/solr") - || zkProps.getStr(ZkStateReader.BASE_URL_PROP).contains("https://" + host + ":"+cluster.getJettySolrRunner(0).getLocalPort()+"/solr") ); + assertEquals( + host + ":" + cluster.getJettySolrRunner(0).getLocalPort() + "_solr", + zkProps.getStr(ZkStateReader.NODE_NAME_PROP)); + + assertTrue( + zkProps + .getStr(ZkStateReader.BASE_URL_PROP) + .contains( + "http://" + host + ":" + cluster.getJettySolrRunner(0).getLocalPort() + "/solr") + || zkProps + .getStr(ZkStateReader.BASE_URL_PROP) + .contains( + "https://" + + host + + ":" + + cluster.getJettySolrRunner(0).getLocalPort() + + "/solr")); // assert there are 3 live nodes Set liveNodes = clusterState2.getLiveNodes(); @@ -115,31 +129,42 @@ public void testCoreRegistration() throws Exception { JettySolrRunner j = cluster.stopJettySolrRunner(2); // slight pause (15s timeout) for watch to trigger - for(int i = 0; i < (5 * 15); i++) { - if(zkController2.getClusterState().getLiveNodes().size() == 2) { + for (int i = 0; i < (5 * 15); i++) { + if (zkController2.getClusterState().getLiveNodes().size() == 2) { break; } Thread.sleep(200); } - + cluster.waitForJettyToStop(j); assertEquals(2, zkController2.getClusterState().getLiveNodes().size()); cluster.getJettySolrRunner(1).stop(); cluster.getJettySolrRunner(1).start(); - + // pause for watch to trigger - for(int i = 0; i < 200; i++) { - if (cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getClusterState().liveNodesContain( - cluster.getJettySolrRunner(1).getCoreContainer().getZkController().getNodeName())) { + for (int i = 0; i < 200; i++) { + if (cluster + .getJettySolrRunner(0) + .getCoreContainer() + .getZkController() + .getClusterState() + .liveNodesContain( + cluster.getJettySolrRunner(1).getCoreContainer().getZkController().getNodeName())) { break; } Thread.sleep(100); } - assertTrue(cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getClusterState().liveNodesContain( - cluster.getJettySolrRunner(1).getCoreContainer().getZkController().getNodeName())); + assertTrue( + cluster + .getJettySolrRunner(0) + .getCoreContainer() + .getZkController() + .getClusterState() + .liveNodesContain( + cluster.getJettySolrRunner(1).getCoreContainer().getZkController().getNodeName())); // core.close(); // don't close - this core is managed by container1 now } diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java index b8ddb3951e3..2dd7ef00872 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionPropsTest.java @@ -20,14 +20,14 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; -import java.util.Locale; import java.util.Collections; import java.util.HashMap; +import java.util.Locale; import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -50,9 +50,7 @@ public class CollectionPropsTest extends SolrCloudTestCase { @BeforeClass public static void setupClass() throws Exception { - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -62,7 +60,8 @@ public void setUp() throws Exception { collectionName = "CollectionPropsTest" + System.nanoTime(); - CollectionAdminRequest.Create request = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2); + CollectionAdminRequest.Create request = + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2); CollectionAdminResponse response = request.process(cluster.getSolrClient()); assertTrue("Unable to create collection: " + response.toString(), response.isSuccess()); } @@ -75,7 +74,7 @@ public void testReadWriteNoCache() throws InterruptedException, IOException { collectionProps.setCollectionProperty(collectionName, "property2", "value2"); checkValue("property1", "value1"); checkValue("property2", "value2"); - + collectionProps.setCollectionProperty(collectionName, "property1", "value1"); // no change checkValue("property1", "value1"); @@ -83,14 +82,14 @@ public void testReadWriteNoCache() throws InterruptedException, IOException { collectionProps.setCollectionProperty(collectionName, "property2", "newValue"); checkValue("property1", null); checkValue("property2", "newValue"); - + collectionProps.setCollectionProperty(collectionName, "property2", null); checkValue("property2", null); - + collectionProps.setCollectionProperty(collectionName, "property2", null); // no change checkValue("property2", null); } - + @Test public void testReadWriteCached() throws InterruptedException, IOException { CollectionProperties collectionProps = new CollectionProperties(zkClient()); @@ -98,24 +97,25 @@ public void testReadWriteCached() throws InterruptedException, IOException { // NOTE: Using a semaphore to ensure we wait for Watcher to fire before proceeding with // test logic, to prevent triggering SOLR-13678 final Semaphore sawExpectedProps = new Semaphore(0); - final AtomicReference> expectedProps - = new AtomicReference>(null); - - final CollectionPropsWatcher w = new CollectionPropsWatcher() { - @Override - public boolean onStateChanged(Map collectionProperties) { - log.info("collection properties changed. Now: {}", collectionProperties); - final Map expected = expectedProps.get(); - if (expected != null && expected.equals(collectionProperties)) { - log.info("...new props match expected"); - sawExpectedProps.release(); - } - return false; - } - }; - + final AtomicReference> expectedProps = + new AtomicReference>(null); + + final CollectionPropsWatcher w = + new CollectionPropsWatcher() { + @Override + public boolean onStateChanged(Map collectionProperties) { + log.info("collection properties changed. Now: {}", collectionProperties); + final Map expected = expectedProps.get(); + if (expected != null && expected.equals(collectionProperties)) { + log.info("...new props match expected"); + sawExpectedProps.release(); + } + return false; + } + }; + cluster.getSolrClient().getZkStateReader().registerCollectionPropsWatcher(collectionName, w); - + collectionProps.setCollectionProperty(collectionName, "property1", "value1"); collectionProps.setCollectionProperty(collectionName, "property2", "value2"); waitForValue("property1", "value1", 5000); @@ -132,35 +132,42 @@ public boolean onStateChanged(Map collectionProperties) { collectionProps.setCollectionProperty(collectionName, "property2", "newValue"); waitForValue("property1", null, 5000); waitForValue("property2", "newValue", 5000); - + collectionProps.setCollectionProperty(collectionName, "property2", null); waitForValue("property2", null, 5000); - + collectionProps.setCollectionProperty(collectionName, "property2", null); // no change checkValue("property2", null); - assertTrue("Gave up waitng an excessive amount of time for watcher to see final expected props", - sawExpectedProps.tryAcquire(1, 120, TimeUnit.SECONDS)); + assertTrue( + "Gave up waitng an excessive amount of time for watcher to see final expected props", + sawExpectedProps.tryAcquire(1, 120, TimeUnit.SECONDS)); cluster.getSolrClient().getZkStateReader().removeCollectionPropsWatcher(collectionName, w); - + collectionProps.setCollectionProperty(collectionName, "property1", "value1"); - checkValue("property1", "value1"); //Should be no cache, so the change should take effect immediately - + // Should be no cache, so the change should take effect immediately + checkValue("property1", "value1"); } - + private void checkValue(String propertyName, String expectedValue) throws InterruptedException { - final Object value = cluster.getSolrClient().getZkStateReader().getCollectionProperties(collectionName).get(propertyName); + final Object value = + cluster + .getSolrClient() + .getZkStateReader() + .getCollectionProperties(collectionName) + .get(propertyName); assertEquals("Unexpected value for collection property: " + propertyName, expectedValue, value); } - private void waitForValue(String propertyName, String expectedValue, int timeout) throws InterruptedException { + private void waitForValue(String propertyName, String expectedValue, int timeout) + throws InterruptedException { final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); Object lastValueSeen = null; for (int i = 0; i < timeout; i += 10) { final Object value = zkStateReader.getCollectionProperties(collectionName).get(propertyName); - if ((expectedValue == null && value == null) || - (expectedValue != null && expectedValue.equals(value))) { + if ((expectedValue == null && value == null) + || (expectedValue != null && expectedValue.equals(value))) { return; } lastValueSeen = value; @@ -168,18 +175,36 @@ private void waitForValue(String propertyName, String expectedValue, int timeout } String collectionpropsInZk = null; try { - collectionpropsInZk = new String(cluster.getZkClient().getData("/collections/" + collectionName + "/collectionprops.json", null, null, true), StandardCharsets.UTF_8); + collectionpropsInZk = + new String( + cluster + .getZkClient() + .getData( + "/collections/" + collectionName + "/collectionprops.json", null, null, true), + StandardCharsets.UTF_8); } catch (Exception e) { collectionpropsInZk = "Could not get file from ZooKeeper: " + e.getMessage(); log.error("Could not get collectionprops from ZooKeeper for assertion mesage", e); } - - String propertiesInZkReader = cluster.getSolrClient().getZkStateReader().getCollectionProperties(collectionName).toString(); - fail(String.format(Locale.ROOT, "Could not see value change after setting collection property. Name: %s, current value: %s, expected value: %s. " + - "\ncollectionprops.json file in ZooKeeper: %s" + - "\nCollectionProperties in zkStateReader: %s", - propertyName, lastValueSeen, expectedValue, collectionpropsInZk, propertiesInZkReader)); + String propertiesInZkReader = + cluster + .getSolrClient() + .getZkStateReader() + .getCollectionProperties(collectionName) + .toString(); + + fail( + String.format( + Locale.ROOT, + "Could not see value change after setting collection property. Name: %s, current value: %s, expected value: %s. " + + "\ncollectionprops.json file in ZooKeeper: %s" + + "\nCollectionProperties in zkStateReader: %s", + propertyName, + lastValueSeen, + expectedValue, + collectionpropsInZk, + propertiesInZkReader)); } @Test @@ -190,7 +215,7 @@ public void testWatcher() throws KeeperException, InterruptedException, IOExcept // Add a watcher to collection props final Watcher watcher = new Watcher("Watcher", random().nextBoolean()); zkStateReader.registerCollectionPropsWatcher(collectionName, watcher); - assertEquals(0, watcher.waitForTrigger(TEST_NIGHTLY?2000:200)); + assertEquals(0, watcher.waitForTrigger(TEST_NIGHTLY ? 2000 : 200)); // Trigger a new znode event log.info("setting value1"); @@ -207,7 +232,9 @@ public void testWatcher() throws KeeperException, InterruptedException, IOExcept // Delete the properties znode log.info("deleting props"); - zkStateReader.getZkClient().delete("/collections/" + collectionName + "/collectionprops.json", -1, true); + zkStateReader + .getZkClient() + .delete("/collections/" + collectionName + "/collectionprops.json", -1, true); assertEquals(1, watcher.waitForTrigger()); final Map props = watcher.getProps(); assertTrue(props.toString(), props.isEmpty()); @@ -217,7 +244,10 @@ public void testWatcher() throws KeeperException, InterruptedException, IOExcept zkStateReader.removeCollectionPropsWatcher(collectionName, watcher); log.info("setting value1 (again)"); collectionProps.setCollectionProperty(collectionName, "property", "value1"); - assertEquals("ZK watcher was triggered after it was removed for collection " + collectionName, 0, watcher.waitForTrigger()); + assertEquals( + "ZK watcher was triggered after it was removed for collection " + collectionName, + 0, + watcher.waitForTrigger()); } @Test @@ -234,7 +264,7 @@ public void testMultipleWatchers() throws InterruptedException, IOException { watcher1.waitForTrigger(); // this might still get triggered because of registerCore final Watcher watcher2 = new Watcher("Watcher2", random().nextBoolean()); zkStateReader.registerCollectionPropsWatcher(collectionName, watcher2); - assertEquals(0, watcher2.waitForTrigger(TEST_NIGHTLY?2000:200)); + assertEquals(0, watcher2.waitForTrigger(TEST_NIGHTLY ? 2000 : 200)); // Make sure a value change triggers both watchers log.info("setting value1"); @@ -267,7 +297,7 @@ public void testMultipleWatchers() throws InterruptedException, IOException { zkStateReader.removeCollectionPropsWatcher(collectionName, watcher1); log.info("setting value4"); collectionProps.setCollectionProperty(collectionName, "property", "value4"); - assertEquals(0, watcher1.waitForTrigger(TEST_NIGHTLY?2000:200)); + assertEquals(0, watcher1.waitForTrigger(TEST_NIGHTLY ? 2000 : 200)); } private class Watcher implements CollectionPropsWatcher { @@ -281,7 +311,7 @@ public Watcher(final String name, final boolean forceReadPropsFromZk) { this.forceReadPropsFromZk = forceReadPropsFromZk; log.info("Watcher '{}' initialized with forceReadPropsFromZk={}", name, forceReadPropsFromZk); } - + @Override public boolean onStateChanged(Map collectionProperties) { log.info("{}: state changed...", name); @@ -293,7 +323,7 @@ public boolean onStateChanged(Map collectionProperties) { props = Map.copyOf(collectionProperties); log.info("{}: Setting props from caller={}", name, props); } - + synchronized (this) { triggered.incrementAndGet(); log.info("{}: notifying", name); @@ -307,7 +337,7 @@ public boolean onStateChanged(Map collectionProperties) { private Map getProps() { return props; } - + private int waitForTrigger() throws InterruptedException { return waitForTrigger(1000); } diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionStateZnodeTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionStateZnodeTest.java index 6033e1ee0fc..3ff63d93d76 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionStateZnodeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionStateZnodeTest.java @@ -28,16 +28,14 @@ public class CollectionStateZnodeTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); } - + @After public void afterTest() throws Exception { cluster.deleteAllCollections(); } - + @Test public void testZkNodeLocation() throws Exception { @@ -46,9 +44,13 @@ public void testZkNodeLocation() throws Exception { .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, 2, 4); - - waitForState("Collection not created", collectionName, (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); - assertTrue("Collection path does not exist", + + waitForState( + "Collection not created", + collectionName, + (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); + assertTrue( + "Collection path does not exist", zkClient().exists(ZkStateReader.getCollectionPath(collectionName), true)); Stat stat = new Stat(); @@ -56,15 +58,17 @@ public void testZkNodeLocation() throws Exception { DocCollection c = getCollectionState(collectionName); - assertEquals("DocCollection version should equal the znode version", stat.getVersion(), c.getZNodeVersion() ); + assertEquals( + "DocCollection version should equal the znode version", + stat.getVersion(), + c.getZNodeVersion()); // remove collection CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); waitForState("Collection not deleted", collectionName, (n, coll) -> coll == null); - assertFalse("collection state should not exist", + assertFalse( + "collection state should not exist", zkClient().exists(ZkStateReader.getCollectionPath(collectionName), true)); - } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java index 0653dc3c85f..810e6bc1010 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java @@ -16,6 +16,14 @@ */ package org.apache.solr.cloud; +import static java.util.Arrays.asList; +import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_DEF; +import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS; +import static org.apache.solr.common.cloud.ZkStateReader.NUM_SHARDS_PROP; +import static org.apache.solr.common.params.CollectionAdminParams.COLLECTION; +import static org.apache.solr.common.params.CollectionAdminParams.DEFAULTS; + +import com.google.common.collect.ImmutableList; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Path; @@ -29,8 +37,6 @@ import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; - -import com.google.common.collect.ImmutableList; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; @@ -70,43 +76,35 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; -import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_DEF; -import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS; -import static org.apache.solr.common.cloud.ZkStateReader.NUM_SHARDS_PROP; -import static org.apache.solr.common.params.CollectionAdminParams.COLLECTION; -import static org.apache.solr.common.params.CollectionAdminParams.DEFAULTS; - @LuceneTestCase.Slow public class CollectionsAPISolrJTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Before public void beforeTest() throws Exception { - //System.setProperty("metricsEnabled", "true"); + // System.setProperty("metricsEnabled", "true"); configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .addConfig("conf2", configset("cloud-dynamic")) - .configure(); - + .addConfig("conf", configset("cloud-minimal")) + .addConfig("conf2", configset("cloud-dynamic")) + .configure(); } - + @After public void afterTest() throws Exception { shutdownCluster(); } /** - * When a config name is not specified during collection creation, the _default should - * be used. + * When a config name is not specified during collection creation, the _default should be used. */ @Test public void testCreateWithDefaultConfigSet() throws Exception { String collectionName = "solrj_default_configset"; - CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName, 2, 2) - .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .process(cluster.getSolrClient()); - + CollectionAdminResponse response = + CollectionAdminRequest.createCollection(collectionName, 2, 2) + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(collectionName, 2, 4); assertEquals(0, response.getStatus()); @@ -115,49 +113,72 @@ public void testCreateWithDefaultConfigSet() throws Exception { assertEquals(4, coresStatus.size()); for (String coreName : coresStatus.keySet()) { NamedList status = coresStatus.get(coreName); - assertEquals(0, (int)status.get("status")); + assertEquals(0, (int) status.get("status")); assertTrue(status.get("QTime") > 0); } - // Use of _default configset should generate a warning for data-driven functionality in production use - assertTrue(response.getWarning() != null && response.getWarning().contains("NOT RECOMMENDED for production use")); - - response = CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); + // Use of _default configset should generate a warning for data-driven functionality in + // production use + assertTrue( + response.getWarning() != null + && response.getWarning().contains("NOT RECOMMENDED for production use")); + + response = + CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - Map> nodesStatus = response.getCollectionNodesStatus(); + Map> nodesStatus = response.getCollectionNodesStatus(); assertEquals(4, nodesStatus.size()); - waitForState("Expected " + collectionName + " to disappear from cluster state", collectionName, (n, c) -> c == null); + waitForState( + "Expected " + collectionName + " to disappear from cluster state", + collectionName, + (n, c) -> c == null); } @Test public void testCreateCollWithDefaultClusterPropertiesOldFormat() throws Exception { String COLL_NAME = "CollWithDefaultClusterProperties"; try { - V2Response rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{set-obj-property:{collectionDefaults:{numShards : 2 , nrtReplicas : 2}}}") - .build() - .process(cluster.getSolrClient()); + V2Response rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload( + "{set-obj-property:{collectionDefaults:{numShards : 2 , nrtReplicas : 2}}}") + .build() + .process(cluster.getSolrClient()); for (int i = 0; i < 300; i++) { - Map m = cluster.getSolrClient().getZkStateReader().getClusterProperty(COLLECTION_DEF, null); + Map m = + cluster.getSolrClient().getZkStateReader().getClusterProperty(COLLECTION_DEF, null); if (m != null) break; Thread.sleep(10); } - Object clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); + Object clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); assertEquals("2", String.valueOf(clusterProperty)); - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); assertEquals("2", String.valueOf(clusterProperty)); - CollectionAdminResponse response = CollectionAdminRequest - .createCollection(COLL_NAME, "conf", null, null, null, null) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.createCollection(COLL_NAME, "conf", null, null, null, null) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - + cluster.waitForActiveCollection(COLL_NAME, 2, 4); - DocCollection coll = cluster.getSolrClient().getClusterStateProvider().getClusterState().getCollection(COLL_NAME); + DocCollection coll = + cluster + .getSolrClient() + .getClusterStateProvider() + .getClusterState() + .getCollection(COLL_NAME); Map slices = coll.getSlicesMap(); assertEquals(2, slices.size()); for (Slice slice : slices.values()) { @@ -166,82 +187,118 @@ public void testCreateCollWithDefaultClusterPropertiesOldFormat() throws Excepti CollectionAdminRequest.deleteCollection(COLL_NAME).process(cluster.getSolrClient()); // unset only a single value using old format - rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{\n" + - " \"set-obj-property\": {\n" + - " \"collectionDefaults\": {\n" + - " \"nrtReplicas\": null\n" + - " }\n" + - " }\n" + - "}") - .build() - .process(cluster.getSolrClient()); + rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload( + "{\n" + + " \"set-obj-property\": {\n" + + " \"collectionDefaults\": {\n" + + " \"nrtReplicas\": null\n" + + " }\n" + + " }\n" + + "}") + .build() + .process(cluster.getSolrClient()); // assert that it is really gone in both old and new paths - // we use a timeout so that the change made in ZK is reflected in the watched copy inside ZkStateReader + // we use a timeout so that the change made in ZK is reflected in the watched copy inside + // ZkStateReader TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSource.NanoTimeSource()); - while (!timeOut.hasTimedOut()) { - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); - if (clusterProperty == null) break; + while (!timeOut.hasTimedOut()) { + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); + if (clusterProperty == null) break; } assertNull(clusterProperty); - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(COLLECTION_DEF, NRT_REPLICAS), null); + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(COLLECTION_DEF, NRT_REPLICAS), null); assertNull(clusterProperty); // delete all defaults the old way - rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{set-obj-property:{collectionDefaults:null}}") - .build() - .process(cluster.getSolrClient()); + rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload("{set-obj-property:{collectionDefaults:null}}") + .build() + .process(cluster.getSolrClient()); // assert that it is really gone in both old and new paths timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSource.NanoTimeSource()); while (!timeOut.hasTimedOut()) { - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); - if (clusterProperty == null) break; + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); + if (clusterProperty == null) break; } assertNull(clusterProperty); - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(COLLECTION_DEF, NUM_SHARDS_PROP), null); + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(COLLECTION_DEF, NUM_SHARDS_PROP), null); assertNull(clusterProperty); } finally { // clean up in case there was an exception during the test - V2Response rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{set-obj-property:{collectionDefaults: null}}") - .build() - .process(cluster.getSolrClient()); + V2Response rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload("{set-obj-property:{collectionDefaults: null}}") + .build() + .process(cluster.getSolrClient()); } - } @Test public void testCreateCollWithDefaultClusterPropertiesNewFormat() throws Exception { String COLL_NAME = "CollWithDefaultClusterProperties"; try { - V2Response rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{set-obj-property:{defaults : {collection:{numShards : 2 , nrtReplicas : 2}}}}") - .build() - .process(cluster.getSolrClient()); + V2Response rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload( + "{set-obj-property:{defaults : {collection:{numShards : 2 , nrtReplicas : 2}}}}") + .build() + .process(cluster.getSolrClient()); for (int i = 0; i < 300; i++) { - Map m = cluster.getSolrClient().getZkStateReader().getClusterProperty(COLLECTION_DEF, null); + Map m = + cluster.getSolrClient().getZkStateReader().getClusterProperty(COLLECTION_DEF, null); if (m != null) break; Thread.sleep(10); } - Object clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); + Object clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); assertEquals("2", String.valueOf(clusterProperty)); - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); assertEquals("2", String.valueOf(clusterProperty)); - CollectionAdminResponse response = CollectionAdminRequest - .createCollection(COLL_NAME, "conf", null, null, null, null) - .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.createCollection(COLL_NAME, "conf", null, null, null, null) + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); cluster.waitForActiveCollection(COLL_NAME, 2, 4); - DocCollection coll = cluster.getSolrClient().getClusterStateProvider().getClusterState().getCollection(COLL_NAME); + DocCollection coll = + cluster + .getSolrClient() + .getClusterStateProvider() + .getClusterState() + .getCollection(COLL_NAME); Map slices = coll.getSlicesMap(); assertEquals(2, slices.size()); for (Slice slice : slices.values()) { @@ -250,57 +307,73 @@ public void testCreateCollWithDefaultClusterPropertiesNewFormat() throws Excepti CollectionAdminRequest.deleteCollection(COLL_NAME).process(cluster.getSolrClient()); // unset only a single value - rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{\n" + - " \"set-obj-property\": {\n" + - " \"defaults\" : {\n" + - " \"collection\": {\n" + - " \"nrtReplicas\": null\n" + - " }\n" + - " }\n" + - " }\n" + - "}") - .build() - .process(cluster.getSolrClient()); - // we use a timeout so that the change made in ZK is reflected in the watched copy inside ZkStateReader + rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload( + "{\n" + + " \"set-obj-property\": {\n" + + " \"defaults\" : {\n" + + " \"collection\": {\n" + + " \"nrtReplicas\": null\n" + + " }\n" + + " }\n" + + " }\n" + + "}") + .build() + .process(cluster.getSolrClient()); + // we use a timeout so that the change made in ZK is reflected in the watched copy inside + // ZkStateReader TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSource.NanoTimeSource()); - while (!timeOut.hasTimedOut()) { - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); - if (clusterProperty == null) break; + while (!timeOut.hasTimedOut()) { + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NRT_REPLICAS), null); + if (clusterProperty == null) break; } assertNull(clusterProperty); - rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{set-obj-property:{defaults: {collection:null}}}") - .build() - .process(cluster.getSolrClient()); + rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload("{set-obj-property:{defaults: {collection:null}}}") + .build() + .process(cluster.getSolrClient()); // assert that it is really gone in both old and new paths timeOut = new TimeOut(5, TimeUnit.SECONDS, new TimeSource.NanoTimeSource()); while (!timeOut.hasTimedOut()) { - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); - if (clusterProperty == null) break; + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(DEFAULTS, COLLECTION, NUM_SHARDS_PROP), null); + if (clusterProperty == null) break; } assertNull(clusterProperty); - clusterProperty = cluster.getSolrClient().getZkStateReader().getClusterProperty(ImmutableList.of(COLLECTION_DEF, NUM_SHARDS_PROP), null); + clusterProperty = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterProperty(ImmutableList.of(COLLECTION_DEF, NUM_SHARDS_PROP), null); assertNull(clusterProperty); } finally { - V2Response rsp = new V2Request.Builder("/cluster") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{set-obj-property:{defaults: null}}") - .build() - .process(cluster.getSolrClient()); - + V2Response rsp = + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload("{set-obj-property:{defaults: null}}") + .build() + .process(cluster.getSolrClient()); } - } @Test public void testCreateAndDeleteCollection() throws Exception { String collectionName = "solrj_test"; - CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); @@ -308,47 +381,59 @@ public void testCreateAndDeleteCollection() throws Exception { assertEquals(4, coresStatus.size()); for (String coreName : coresStatus.keySet()) { NamedList status = coresStatus.get(coreName); - assertEquals(0, (int)status.get("status")); + assertEquals(0, (int) status.get("status")); assertTrue(status.get("QTime") > 0); } - response = CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); + response = + CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - Map> nodesStatus = response.getCollectionNodesStatus(); + Map> nodesStatus = response.getCollectionNodesStatus(); assertEquals(4, nodesStatus.size()); - waitForState("Expected " + collectionName + " to disappear from cluster state", collectionName, (n, c) -> c == null); + waitForState( + "Expected " + collectionName + " to disappear from cluster state", + collectionName, + (n, c) -> c == null); // Test Creating a new collection. collectionName = "solrj_test2"; - response = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) - .process(cluster.getSolrClient()); + response = + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - waitForState("Expected " + collectionName + " to appear in cluster state", collectionName, (n, c) -> c != null); + waitForState( + "Expected " + collectionName + " to appear in cluster state", + collectionName, + (n, c) -> c != null); } @Test public void testCloudInfoInCoreStatus() throws IOException, SolrServerException { String collectionName = "corestatus_test"; - CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - + cluster.waitForActiveCollection(collectionName, 2, 4); - + String nodeName = (String) response._get("success[0]/key", null); String corename = (String) response._get(asList("success", nodeName, "core"), null); - try (HttpSolrClient coreclient = getHttpSolrClient(cluster.getSolrClient().getZkStateReader().getBaseUrlForNodeName(nodeName))) { + try (HttpSolrClient coreclient = + getHttpSolrClient( + cluster.getSolrClient().getZkStateReader().getBaseUrlForNodeName(nodeName))) { CoreAdminResponse status = CoreAdminRequest.getStatus(corename, coreclient); - assertEquals(collectionName, status._get(asList("status", corename, "cloud", "collection"), null)); + assertEquals( + collectionName, status._get(asList("status", corename, "cloud", "collection"), null)); assertNotNull(status._get(asList("status", corename, "cloud", "shard"), null)); assertNotNull(status._get(asList("status", corename, "cloud", "replica"), null)); } @@ -359,26 +444,35 @@ public void testCloudInfoInCoreStatus() throws IOException, SolrServerException public void testCreateAndDeleteShard() throws Exception { // Create an implicit collection String collectionName = "solrj_implicit"; - CollectionAdminResponse response - = CollectionAdminRequest.createCollectionWithImplicitRouter(collectionName, "conf", "shardA,shardB", 1, 1, 1) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.createCollectionWithImplicitRouter( + collectionName, "conf", "shardA,shardB", 1, 1, 1) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - + cluster.waitForActiveCollection(collectionName, 2, 6); - + Map> coresStatus = response.getCollectionCoresStatus(); assertEquals(6, coresStatus.size()); // Add a shard to the implicit collection - response = CollectionAdminRequest.createShard(collectionName, "shardC").process(cluster.getSolrClient()); + response = + CollectionAdminRequest.createShard(collectionName, "shardC") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - - cluster.getSolrClient().waitForState(collectionName, 30, TimeUnit.SECONDS, (l,c) -> c != null && c.getSlice("shardC") != null); - + + cluster + .getSolrClient() + .waitForState( + collectionName, + 30, + TimeUnit.SECONDS, + (l, c) -> c != null && c.getSlice("shardC") != null); + coresStatus = response.getCollectionCoresStatus(); assertEquals(3, coresStatus.size()); int replicaTlog = 0; @@ -394,7 +488,9 @@ public void testCreateAndDeleteShard() throws Exception { assertEquals(1, replicaTlog); assertEquals(1, replicaPull); - response = CollectionAdminRequest.deleteShard(collectionName, "shardC").process(cluster.getSolrClient()); + response = + CollectionAdminRequest.deleteShard(collectionName, "shardC") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); @@ -410,8 +506,9 @@ public void testCreateAndDeleteAlias() throws IOException, SolrServerException { .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(cluster.getSolrClient()); - CollectionAdminResponse response - = CollectionAdminRequest.createAlias("solrj_alias", collection).process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.createAlias("solrj_alias", collection) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); response = CollectionAdminRequest.deleteAlias("solrj_alias").process(cluster.getSolrClient()); @@ -427,10 +524,11 @@ public void testSplitShard() throws Exception { .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, 2, 2); - - CollectionAdminResponse response = CollectionAdminRequest.splitShard(collectionName) - .setShardName("shard1") - .process(cluster.getSolrClient()); + + CollectionAdminResponse response = + CollectionAdminRequest.splitShard(collectionName) + .setShardName("shard1") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); @@ -445,26 +543,30 @@ public void testSplitShard() throws Exception { assertEquals(1, shard10); assertEquals(1, shard11); - waitForState("Expected all shards to be active and parent shard to be removed", collectionName, (n, c) -> { - if (c.getSlice("shard1").getState() == Slice.State.ACTIVE) - return false; - for (Replica r : c.getReplicas()) { - if (r.isActive(n) == false) - return false; - } - return true; - }); + waitForState( + "Expected all shards to be active and parent shard to be removed", + collectionName, + (n, c) -> { + if (c.getSlice("shard1").getState() == Slice.State.ACTIVE) return false; + for (Replica r : c.getReplicas()) { + if (r.isActive(n) == false) return false; + } + return true; + }); // Test splitting using split.key - response = CollectionAdminRequest.splitShard(collectionName) - .setSplitKey("b!") - .process(cluster.getSolrClient()); + response = + CollectionAdminRequest.splitShard(collectionName) + .setSplitKey("b!") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - waitForState("Expected 5 slices to be active", collectionName, (n, c) -> c.getActiveSlices().size() == 5); - + waitForState( + "Expected 5 slices to be active", + collectionName, + (n, c) -> c.getActiveSlices().size() == 5); } @Test @@ -477,17 +579,18 @@ public void testCreateCollectionWithPropertyParam() throws Exception { Path ulogDir = tmpDir.resolve("ulogDir-" + TestUtil.randomSimpleString(random(), 1, 5)); cluster.getJettySolrRunners().forEach(j -> j.getCoreContainer().getAllowPaths().add(tmpDir)); - CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1) - .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .withProperty(CoreAdminParams.DATA_DIR, dataDir.toString()) - .withProperty(CoreAdminParams.ULOG_DIR, ulogDir.toString()) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1) + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) + .withProperty(CoreAdminParams.DATA_DIR, dataDir.toString()) + .withProperty(CoreAdminParams.ULOG_DIR, ulogDir.toString()) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - + cluster.waitForActiveCollection(collectionName, 1, 1); - + Map> coresStatus = response.getCollectionCoresStatus(); assertEquals(1, coresStatus.size()); @@ -497,7 +600,6 @@ public void testCreateCollectionWithPropertyParam() throws Exception { CoreStatus coreStatus = getCoreStatus(replica1); assertEquals(Paths.get(coreStatus.getDataDirectory()).toString(), dataDir.toString()); - } @Test @@ -507,40 +609,45 @@ public void testAddAndDeleteReplica() throws Exception { CollectionAdminRequest.createCollection(collectionName, "conf", 1, 2) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 1, 2); - ArrayList nodeList - = new ArrayList<>(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes()); + ArrayList nodeList = + new ArrayList<>( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes()); Collections.shuffle(nodeList, random()); final String node = nodeList.get(0); - CollectionAdminResponse response = CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") - .setNode(node) - .process(cluster.getSolrClient()); - + CollectionAdminResponse response = + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") + .setNode(node) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(collectionName, 1, 3); - + Replica newReplica = grabNewReplica(response, getCollectionState(collectionName)); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); assertTrue(newReplica.getNodeName().equals(node)); // Test DELETEREPLICA - response = CollectionAdminRequest.deleteReplica(collectionName, "shard1", newReplica.getName()) - .process(cluster.getSolrClient()); + response = + CollectionAdminRequest.deleteReplica(collectionName, "shard1", newReplica.getName()) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - waitForState("Expected replica " + newReplica.getName() + " to vanish from cluster state", collectionName, + waitForState( + "Expected replica " + newReplica.getName() + " to vanish from cluster state", + collectionName, (n, c) -> c.getSlice("shard1").getReplica(newReplica.getName()) == null); - } private Replica grabNewReplica(CollectionAdminResponse response, DocCollection docCollection) { String replicaName = response.getCollectionCoresStatus().keySet().iterator().next(); - Optional optional = docCollection.getReplicas().stream() - .filter(replica -> replicaName.equals(replica.getCoreName())) - .findAny(); + Optional optional = + docCollection.getReplicas().stream() + .filter(replica -> replicaName.equals(replica.getCoreName())) + .findAny(); if (optional.isPresent()) { return optional.get(); } @@ -552,20 +659,32 @@ public void testClusterProp() throws InterruptedException, IOException, SolrServ // sanity check our expected default final ClusterProperties props = new ClusterProperties(zkClient()); - - CollectionAdminResponse response = CollectionAdminRequest.setClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, "42") - .process(cluster.getSolrClient()); + + CollectionAdminResponse response = + CollectionAdminRequest.setClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, "42") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - assertEquals("Cluster property was not set", props.getClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null), "42"); + assertEquals( + "Cluster property was not set", + props.getClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null), + "42"); // Unset ClusterProp that we set. - CollectionAdminRequest.setClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null).process(cluster.getSolrClient()); - assertEquals("Cluster property was not unset", props.getClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null), null); - - response = CollectionAdminRequest.setClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, "1") + CollectionAdminRequest.setClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null) .process(cluster.getSolrClient()); + assertEquals( + "Cluster property was not unset", + props.getClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null), + null); + + response = + CollectionAdminRequest.setClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, "1") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - assertEquals("Cluster property was not set", props.getClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null), "1"); + assertEquals( + "Cluster property was not set", + props.getClusterProperty(ZkStateReader.MAX_CORES_PER_NODE, null), + "1"); } @Test @@ -576,7 +695,7 @@ public void testCollectionProp() throws InterruptedException, IOException, SolrS CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 2, 4); // Check for value change @@ -590,11 +709,19 @@ public void testCollectionProp() throws InterruptedException, IOException, SolrS checkCollectionProperty(collectionName, propName, null, 3000); } - private void checkCollectionProperty(String collection, String propertyName, String propertyValue, long timeoutMs) throws InterruptedException { + private void checkCollectionProperty( + String collection, String propertyName, String propertyValue, long timeoutMs) + throws InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); - while (!timeout.hasTimedOut()){ + while (!timeout.hasTimedOut()) { Thread.sleep(10); - if (Objects.equals(cluster.getSolrClient().getZkStateReader().getCollectionProperties(collection).get(propertyName), propertyValue)) { + if (Objects.equals( + cluster + .getSolrClient() + .getZkStateReader() + .getCollectionProperties(collection) + .get(propertyName), + propertyValue)) { return; } } @@ -645,16 +772,26 @@ public void testColStatus() throws Exception { CollectionAdminResponse rsp = req.process(cluster.getSolrClient()); assertEquals(0, rsp.getStatus()); @SuppressWarnings({"unchecked"}) - List nonCompliant = (List)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant"); + List nonCompliant = + (List) rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant"); assertEquals(nonCompliant.toString(), 1, nonCompliant.size()); assertTrue(nonCompliant.toString(), nonCompliant.contains("(NONE)")); @SuppressWarnings({"unchecked"}) - NamedList segInfos = (NamedList) rsp.getResponse().findRecursive(collectionName, "shards", "shard1", "leader", "segInfos"); + NamedList segInfos = + (NamedList) + rsp.getResponse() + .findRecursive(collectionName, "shards", "shard1", "leader", "segInfos"); assertNotNull(Utils.toJSONString(rsp), segInfos.findRecursive("info", "core", "startTime")); assertNotNull(Utils.toJSONString(rsp), segInfos.get("fieldInfoLegend")); - assertNotNull(Utils.toJSONString(rsp), segInfos.findRecursive("segments", "_0", "fields", "id", "flags")); + assertNotNull( + Utils.toJSONString(rsp), segInfos.findRecursive("segments", "_0", "fields", "id", "flags")); // test for replicas not active - SOLR-13882 - DocCollection coll = cluster.getSolrClient().getClusterStateProvider().getClusterState().getCollection(collectionName); + DocCollection coll = + cluster + .getSolrClient() + .getClusterStateProvider() + .getClusterState() + .getCollection(collectionName); Replica firstReplica = coll.getSlice("shard1").getReplicas().iterator().next(); String firstNode = firstReplica.getNodeName(); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { @@ -664,8 +801,11 @@ public void testColStatus() throws Exception { } rsp = req.process(cluster.getSolrClient()); assertEquals(0, rsp.getStatus()); - Number down = (Number) rsp.getResponse().findRecursive(collectionName, "shards", "shard1", "replicas", "down"); - assertTrue("should be some down replicas, but there were none in shard1:" + rsp, down.intValue() > 0); + Number down = + (Number) + rsp.getResponse().findRecursive(collectionName, "shards", "shard1", "replicas", "down"); + assertTrue( + "should be some down replicas, but there were none in shard1:" + rsp, down.intValue() > 0); // test for a collection with implicit router String implicitColl = "implicitColl"; @@ -678,40 +818,44 @@ public void testColStatus() throws Exception { req = CollectionAdminRequest.collectionStatus(implicitColl); rsp = req.process(cluster.getSolrClient()); assertNotNull(rsp.getResponse().get(implicitColl)); - assertNotNull(rsp.toString(), rsp.getResponse().findRecursive(implicitColl, "shards", "shardA")); - assertNotNull(rsp.toString(), rsp.getResponse().findRecursive(implicitColl, "shards", "shardB")); + assertNotNull( + rsp.toString(), rsp.getResponse().findRecursive(implicitColl, "shards", "shardA")); + assertNotNull( + rsp.toString(), rsp.getResponse().findRecursive(implicitColl, "shards", "shardB")); } - + @Test public void testColStatusCollectionName() throws Exception { final String[] collectionNames = {"collectionStatusTest_1", "collectionStatusTest_2"}; for (String collectionName : collectionNames) { CollectionAdminRequest.createCollection(collectionName, "conf2", 1, 1) - .process(cluster.getSolrClient()); + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, 1, 1); } // assert only one collection is returned using the solrj colstatus interface - CollectionAdminRequest.ColStatus req = CollectionAdminRequest.collectionStatus(collectionNames[0]); + CollectionAdminRequest.ColStatus req = + CollectionAdminRequest.collectionStatus(collectionNames[0]); CollectionAdminResponse rsp = req.process(cluster.getSolrClient()); assertNotNull(rsp.getResponse().get(collectionNames[0])); assertNull(rsp.getResponse().get(collectionNames[1])); - + req = CollectionAdminRequest.collectionStatus(collectionNames[1]); rsp = req.process(cluster.getSolrClient()); assertNotNull(rsp.getResponse().get(collectionNames[1])); assertNull(rsp.getResponse().get(collectionNames[0])); - + // assert passing null collection fails - expectThrows(NullPointerException.class, + expectThrows( + NullPointerException.class, "Passing null to collectionStatus should result in an NPE", () -> CollectionAdminRequest.collectionStatus(null)); - + // assert passing non-existent collection returns no collections req = CollectionAdminRequest.collectionStatus("doesNotExist"); rsp = req.process(cluster.getSolrClient()); assertNull(rsp.getResponse().get(collectionNames[0])); assertNull(rsp.getResponse().get(collectionNames[1])); - + // assert collectionStatuses returns all collections req = CollectionAdminRequest.collectionStatuses(); rsp = req.process(cluster.getSolrClient()); @@ -726,8 +870,7 @@ public void testReadOnlyCollection() throws Exception { final String collectionName = "readOnlyTest"; CloudSolrClient solrClient = cluster.getSolrClient(); - CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2) - .process(solrClient); + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2).process(solrClient); solrClient.setDefaultCollection(collectionName); @@ -751,38 +894,47 @@ public void testReadOnlyCollection() throws Exception { } solrClient.add(docs); - Replica leader - = solrClient.getZkStateReader().getLeaderRetry(collectionName, "shard1", DEFAULT_TIMEOUT); + Replica leader = + solrClient.getZkStateReader().getLeaderRetry(collectionName, "shard1", DEFAULT_TIMEOUT); - final AtomicReference coreStartTime = new AtomicReference<>(getCoreStatus(leader).getCoreStartTime().getTime()); + final AtomicReference coreStartTime = + new AtomicReference<>(getCoreStatus(leader).getCoreStartTime().getTime()); // Check for value change - CollectionAdminRequest.modifyCollection(collectionName, - Collections.singletonMap(ZkStateReader.READ_ONLY, "true")) + CollectionAdminRequest.modifyCollection( + collectionName, Collections.singletonMap(ZkStateReader.READ_ONLY, "true")) .process(solrClient); - DocCollection coll = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection coll = + solrClient.getZkStateReader().getClusterState().getCollection(collectionName); assertNotNull(coll.toString(), coll.getProperties().get(ZkStateReader.READ_ONLY)); - assertEquals(coll.toString(), coll.getProperties().get(ZkStateReader.READ_ONLY).toString(), "true"); + assertEquals( + coll.toString(), coll.getProperties().get(ZkStateReader.READ_ONLY).toString(), "true"); // wait for the expected collection reload - RetryUtil.retryUntil("Timed out waiting for core to reload", 30, 1000, TimeUnit.MILLISECONDS, () -> { - long restartTime = 0; - try { - restartTime = getCoreStatus(leader).getCoreStartTime().getTime(); - } catch (Exception e) { - log.warn("Exception getting core start time: ", e); - return false; - } - return restartTime > coreStartTime.get(); - }); + RetryUtil.retryUntil( + "Timed out waiting for core to reload", + 30, + 1000, + TimeUnit.MILLISECONDS, + () -> { + long restartTime = 0; + try { + restartTime = getCoreStatus(leader).getCoreStartTime().getTime(); + } catch (Exception e) { + log.warn("Exception getting core start time: ", e); + return false; + } + return restartTime > coreStartTime.get(); + }); coreStartTime.set(getCoreStatus(leader).getCoreStartTime().getTime()); // check for docs - reloading should have committed the new docs // this also verifies that searching works in read-only mode rsp = solrClient.query(params(CommonParams.Q, "*:*")); - assertEquals("num docs after turning on read-only", NUM_DOCS * 2, rsp.getResults().getNumFound()); + assertEquals( + "num docs after turning on read-only", NUM_DOCS * 2, rsp.getResults().getNumFound()); // try sending updates try { @@ -824,23 +976,29 @@ public void testReadOnlyCollection() throws Exception { // Check for removing value // setting to empty string is equivalent to removing the property, see SOLR-12507 - CollectionAdminRequest.modifyCollection(collectionName, - Collections.singletonMap(ZkStateReader.READ_ONLY, "")) + CollectionAdminRequest.modifyCollection( + collectionName, Collections.singletonMap(ZkStateReader.READ_ONLY, "")) .process(cluster.getSolrClient()); - coll = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(collectionName); + coll = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(collectionName); assertNull(coll.toString(), coll.getProperties().get(ZkStateReader.READ_ONLY)); // wait for the expected collection reload - RetryUtil.retryUntil("Timed out waiting for core to reload", 30, 1000, TimeUnit.MILLISECONDS, () -> { - long restartTime = 0; - try { - restartTime = getCoreStatus(leader).getCoreStartTime().getTime(); - } catch (Exception e) { - log.warn("Exception getting core start time: ", e); - return false; - } - return restartTime > coreStartTime.get(); - }); + RetryUtil.retryUntil( + "Timed out waiting for core to reload", + 30, + 1000, + TimeUnit.MILLISECONDS, + () -> { + long restartTime = 0; + try { + restartTime = getCoreStatus(leader).getCoreStartTime().getTime(); + } catch (Exception e) { + log.warn("Exception getting core start time: ", e); + return false; + } + return restartTime > coreStartTime.get(); + }); // check that updates are working now docs.clear(); @@ -850,7 +1008,8 @@ public void testReadOnlyCollection() throws Exception { solrClient.add(docs); solrClient.commit(); rsp = solrClient.query(params(CommonParams.Q, "*:*")); - assertEquals("num docs after turning off read-only", NUM_DOCS * 3, rsp.getResults().getNumFound()); + assertEquals( + "num docs after turning off read-only", NUM_DOCS * 3, rsp.getResults().getNumFound()); } @Test @@ -869,59 +1028,94 @@ public void testRenameCollection() throws Exception { private void doTestRenameCollection(boolean followAliases) throws Exception { String collectionName1 = "testRename1_" + followAliases; String collectionName2 = "testRename2_" + followAliases; - CollectionAdminRequest.createCollection(collectionName1, "conf", 1, 1).setAlias("col1").process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection(collectionName2, "conf", 1, 1).setAlias("col2").process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(collectionName1, "conf", 1, 1) + .setAlias("col1") + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(collectionName2, "conf", 1, 1) + .setAlias("col2") + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName1, 1, 1); cluster.waitForActiveCollection(collectionName2, 1, 1); - waitForState("Expected collection1 to be created with 1 shard and 1 replica", collectionName1, clusterShape(1, 1)); - waitForState("Expected collection2 to be created with 1 shard and 1 replica", collectionName2, clusterShape(1, 1)); + waitForState( + "Expected collection1 to be created with 1 shard and 1 replica", + collectionName1, + clusterShape(1, 1)); + waitForState( + "Expected collection2 to be created with 1 shard and 1 replica", + collectionName2, + clusterShape(1, 1)); - CollectionAdminRequest.createAlias("compoundAlias", "col1,col2").process(cluster.getSolrClient()); + CollectionAdminRequest.createAlias("compoundAlias", "col1,col2") + .process(cluster.getSolrClient()); CollectionAdminRequest.createAlias("simpleAlias", "col1").process(cluster.getSolrClient()); - CollectionAdminRequest.createCategoryRoutedAlias("catAlias", "field1", 100, - CollectionAdminRequest.createCollection("_unused_", "conf", 1, 1)).process(cluster.getSolrClient()); + CollectionAdminRequest.createCategoryRoutedAlias( + "catAlias", + "field1", + 100, + CollectionAdminRequest.createCollection("_unused_", "conf", 1, 1)) + .process(cluster.getSolrClient()); CollectionAdminRequest.Rename rename = CollectionAdminRequest.renameCollection("col1", "foo"); rename.setFollowAliases(followAliases); ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); Aliases aliases; if (!followAliases) { - Exception e = assertThrows(Exception.class, () -> - rename.process(cluster.getSolrClient())); + Exception e = assertThrows(Exception.class, () -> rename.process(cluster.getSolrClient())); assertTrue(e.toString(), e.toString().contains("source collection 'col1' not found")); } else { rename.process(cluster.getSolrClient()); zkStateReader.aliasesManager.update(); aliases = zkStateReader.getAliases(); - assertEquals(aliases.getCollectionAliasListMap().toString(), collectionName1, aliases.resolveSimpleAlias("foo")); - assertEquals(aliases.getCollectionAliasListMap().toString(), collectionName1, aliases.resolveSimpleAlias("simpleAlias")); + assertEquals( + aliases.getCollectionAliasListMap().toString(), + collectionName1, + aliases.resolveSimpleAlias("foo")); + assertEquals( + aliases.getCollectionAliasListMap().toString(), + collectionName1, + aliases.resolveSimpleAlias("simpleAlias")); List compoundAliases = aliases.resolveAliases("compoundAlias"); assertEquals(compoundAliases.toString(), 2, compoundAliases.size()); assertTrue(compoundAliases.toString(), compoundAliases.contains(collectionName1)); assertTrue(compoundAliases.toString(), compoundAliases.contains(collectionName2)); } - CollectionAdminRequest.renameCollection(collectionName1, collectionName2).process(cluster.getSolrClient()); + CollectionAdminRequest.renameCollection(collectionName1, collectionName2) + .process(cluster.getSolrClient()); zkStateReader.aliasesManager.update(); aliases = zkStateReader.getAliases(); if (followAliases) { - assertEquals(aliases.getCollectionAliasListMap().toString(), collectionName2, aliases.resolveSimpleAlias("foo")); + assertEquals( + aliases.getCollectionAliasListMap().toString(), + collectionName2, + aliases.resolveSimpleAlias("foo")); } - assertEquals(aliases.getCollectionAliasListMap().toString(), collectionName2, aliases.resolveSimpleAlias("simpleAlias")); - assertEquals(aliases.getCollectionAliasListMap().toString(), collectionName2, aliases.resolveSimpleAlias(collectionName1)); - // we renamed col1 -> col2 so the compound alias contains only "col2,col2" which is reduced to col2 + assertEquals( + aliases.getCollectionAliasListMap().toString(), + collectionName2, + aliases.resolveSimpleAlias("simpleAlias")); + assertEquals( + aliases.getCollectionAliasListMap().toString(), + collectionName2, + aliases.resolveSimpleAlias(collectionName1)); + // we renamed col1 -> col2 so the compound alias contains only "col2,col2" which is reduced to + // col2 List compoundAliases = aliases.resolveAliases("compoundAlias"); assertEquals(compoundAliases.toString(), 1, compoundAliases.size()); assertTrue(compoundAliases.toString(), compoundAliases.contains(collectionName2)); - CollectionAdminRequest.Rename catRename = CollectionAdminRequest.renameCollection("catAlias", "bar"); + CollectionAdminRequest.Rename catRename = + CollectionAdminRequest.renameCollection("catAlias", "bar"); catRename.setFollowAliases(followAliases); - Exception e = assertThrows("category-based alias renaming should fail", Exception.class, - () -> catRename.process(cluster.getSolrClient())); + Exception e = + assertThrows( + "category-based alias renaming should fail", + Exception.class, + () -> catRename.process(cluster.getSolrClient())); if (followAliases) { assertTrue(e.toString(), e.toString().contains("is a routed alias")); } else { @@ -930,8 +1124,11 @@ private void doTestRenameCollection(boolean followAliases) throws Exception { CollectionAdminRequest.Rename rename2 = CollectionAdminRequest.renameCollection("col2", "foo"); rename2.setFollowAliases(followAliases); - e = assertThrows("should fail because 'foo' already exists", Exception.class, - () -> rename2.process(cluster.getSolrClient())); + e = + assertThrows( + "should fail because 'foo' already exists", + Exception.class, + () -> rename2.process(cluster.getSolrClient())); if (followAliases) { assertTrue(e.toString(), e.toString().contains("exists")); } else { @@ -950,8 +1147,14 @@ public void testDeleteAliasedCollection() throws Exception { cluster.waitForActiveCollection(collectionName1, 1, 1); cluster.waitForActiveCollection(collectionName2, 1, 1); - waitForState("Expected collection1 to be created with 1 shard and 1 replica", collectionName1, clusterShape(1, 1)); - waitForState("Expected collection2 to be created with 1 shard and 1 replica", collectionName2, clusterShape(1, 1)); + waitForState( + "Expected collection1 to be created with 1 shard and 1 replica", + collectionName1, + clusterShape(1, 1)); + waitForState( + "Expected collection2 to be created with 1 shard and 1 replica", + collectionName2, + clusterShape(1, 1)); SolrInputDocument doc = new SolrInputDocument("id", "1"); solrClient.add(collectionName1, doc); @@ -965,16 +1168,24 @@ public void testDeleteAliasedCollection() throws Exception { CollectionAdminRequest.createAlias(collectionName1, collectionName2).process(solrClient); - RetryUtil.retryUntil("didn't get the new aliases", 10, 1000, TimeUnit.MILLISECONDS, () -> { - try { - solrClient.getZkStateReader().aliasesManager.update(); - return solrClient.getZkStateReader().getAliases() - .resolveSimpleAlias(collectionName1).equals(collectionName2); - } catch (Exception e) { - fail("exception caught refreshing aliases: " + e); - return false; - } - }); + RetryUtil.retryUntil( + "didn't get the new aliases", + 10, + 1000, + TimeUnit.MILLISECONDS, + () -> { + try { + solrClient.getZkStateReader().aliasesManager.update(); + return solrClient + .getZkStateReader() + .getAliases() + .resolveSimpleAlias(collectionName1) + .equals(collectionName2); + } catch (Exception e) { + fail("exception caught refreshing aliases: " + e); + return false; + } + }); // both results should come from collection 2 assertDoc(solrClient, collectionName1, "2"); // aliased @@ -1009,97 +1220,116 @@ public void testDeleteAliasedCollection() throws Exception { assertFalse(state.getCollectionsMap().toString(), state.hasCollection(collectionName2)); // and the alias is gone - RetryUtil.retryUntil("didn't get the new aliases", 10, 1000, TimeUnit.MILLISECONDS, () -> { - try { - solrClient.getZkStateReader().aliasesManager.update(); - return !solrClient.getZkStateReader().getAliases().hasAlias(collectionName1); - } catch (Exception e) { - fail("exception caught refreshing aliases: " + e); - return false; - } - }); + RetryUtil.retryUntil( + "didn't get the new aliases", + 10, + 1000, + TimeUnit.MILLISECONDS, + () -> { + try { + solrClient.getZkStateReader().aliasesManager.update(); + return !solrClient.getZkStateReader().getAliases().hasAlias(collectionName1); + } catch (Exception e) { + fail("exception caught refreshing aliases: " + e); + return false; + } + }); } - private void assertDoc(CloudSolrClient solrClient, String collection, String id) throws Exception { + private void assertDoc(CloudSolrClient solrClient, String collection, String id) + throws Exception { QueryResponse rsp = solrClient.query(collection, params(CommonParams.Q, "*:*")); assertEquals(rsp.toString(), 1, rsp.getResults().getNumFound()); SolrDocument sdoc = rsp.getResults().get(0); assertEquals(sdoc.toString(), id, sdoc.getFieldValue("id")); - } @Test public void testOverseerStatus() throws IOException, SolrServerException { - CollectionAdminResponse response = new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()); + CollectionAdminResponse response = + new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - // When running with Distributed Collection API, no real data in Overseer status, but the Collection API call above shouldn't fail - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cluster.getSolrClient()).getIsCollectionApiDistributed()) { + // When running with Distributed Collection API, no real data in Overseer status, but the + // Collection API call above shouldn't fail + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cluster.getSolrClient()) + .getIsCollectionApiDistributed()) { return; } - assertNotNull("overseer_operations shouldn't be null", response.getResponse().get("overseer_operations")); + assertNotNull( + "overseer_operations shouldn't be null", response.getResponse().get("overseer_operations")); } @Test public void testList() throws IOException, SolrServerException { - CollectionAdminResponse response = new CollectionAdminRequest.List().process(cluster.getSolrClient()); + CollectionAdminResponse response = + new CollectionAdminRequest.List().process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertNotNull("collection list should not be null", response.getResponse().get("collections")); } @Test - public void testAddAndDeleteReplicaProp() throws InterruptedException, IOException, SolrServerException { + public void testAddAndDeleteReplicaProp() + throws InterruptedException, IOException, SolrServerException { final String collection = "replicaProperties"; CollectionAdminRequest.createCollection(collection, "conf", 2, 2) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collection, 2, 4); final Replica replica = getCollectionState(collection).getLeader("shard1"); - CollectionAdminResponse response - = CollectionAdminRequest.addReplicaProperty(collection, "shard1", replica.getName(), "preferredleader", "true") - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.addReplicaProperty( + collection, "shard1", replica.getName(), "preferredleader", "true") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - waitForState("Expecting property 'preferredleader' to appear on replica " + replica.getName(), collection, + waitForState( + "Expecting property 'preferredleader' to appear on replica " + replica.getName(), + collection, (n, c) -> "true".equals(c.getReplica(replica.getName()).getProperty("preferredleader"))); - response = CollectionAdminRequest.deleteReplicaProperty(collection, "shard1", replica.getName(), "property.preferredleader") - .process(cluster.getSolrClient()); + response = + CollectionAdminRequest.deleteReplicaProperty( + collection, "shard1", replica.getName(), "property.preferredleader") + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); - waitForState("Expecting property 'preferredleader' to be removed from replica " + replica.getName(), collection, + waitForState( + "Expecting property 'preferredleader' to be removed from replica " + replica.getName(), + collection, (n, c) -> c.getReplica(replica.getName()).getProperty("preferredleader") == null); - } @Test - public void testBalanceShardUnique() throws IOException, - SolrServerException, KeeperException, InterruptedException { + public void testBalanceShardUnique() + throws IOException, SolrServerException, KeeperException, InterruptedException { final String collection = "balancedProperties"; CollectionAdminRequest.createCollection(collection, "conf", 2, 2) .process(cluster.getSolrClient()); - - cluster.waitForActiveCollection(collection, 2, 4); - CollectionAdminResponse response = CollectionAdminRequest.balanceReplicaProperty(collection, "preferredLeader") - .process(cluster.getSolrClient()); - assertEquals(0, response.getStatus()); + cluster.waitForActiveCollection(collection, 2, 4); - waitForState("Expecting 'preferredleader' property to be balanced across all shards", collection, (n, c) -> { - for (Slice slice : c) { - int count = 0; - for (Replica replica : slice) { - if ("true".equals(replica.getProperty("preferredleader"))) - count += 1; - } - if (count != 1) - return false; - } - return true; - }); + CollectionAdminResponse response = + CollectionAdminRequest.balanceReplicaProperty(collection, "preferredLeader") + .process(cluster.getSolrClient()); + assertEquals(0, response.getStatus()); + waitForState( + "Expecting 'preferredleader' property to be balanced across all shards", + collection, + (n, c) -> { + for (Slice slice : c) { + int count = 0; + for (Replica replica : slice) { + if ("true".equals(replica.getProperty("preferredleader"))) count += 1; + } + if (count != 1) return false; + } + return true; + }); } @Test @@ -1107,35 +1337,40 @@ public void testModifyCollectionAttribute() throws IOException, SolrServerExcept final String collection = "testAddAndDeleteCollectionAttribute"; CollectionAdminRequest.createCollection(collection, "conf", 1, 1) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collection, 1, 1); CollectionAdminRequest.modifyCollection(collection, null) .setAttribute("replicationFactor", 25) .process(cluster.getSolrClient()); - waitForState("Expecting attribute 'replicationFactor' to be 25", collection, + waitForState( + "Expecting attribute 'replicationFactor' to be 25", + collection, (n, c) -> 25 == c.getReplicationFactor()); - expectThrows(IllegalArgumentException.class, + expectThrows( + IllegalArgumentException.class, "An attempt to set unknown collection attribute should have failed", - () -> CollectionAdminRequest.modifyCollection(collection, null) - .setAttribute("non_existent_attr", 25) - .process(cluster.getSolrClient()) - ); + () -> + CollectionAdminRequest.modifyCollection(collection, null) + .setAttribute("non_existent_attr", 25) + .process(cluster.getSolrClient())); - expectThrows(IllegalArgumentException.class, + expectThrows( + IllegalArgumentException.class, "An attempt to set null value should have failed", - () -> CollectionAdminRequest.modifyCollection(collection, null) - .setAttribute("non_existent_attr", null) - .process(cluster.getSolrClient()) - ); + () -> + CollectionAdminRequest.modifyCollection(collection, null) + .setAttribute("non_existent_attr", null) + .process(cluster.getSolrClient())); - expectThrows(IllegalArgumentException.class, + expectThrows( + IllegalArgumentException.class, "An attempt to unset unknown collection attribute should have failed", - () -> CollectionAdminRequest.modifyCollection(collection, null) - .unsetAttribute("non_existent_attr") - .process(cluster.getSolrClient()) - ); + () -> + CollectionAdminRequest.modifyCollection(collection, null) + .unsetAttribute("non_existent_attr") + .process(cluster.getSolrClient())); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/ConcurrentCreateRoutedAliasTest.java b/solr/core/src/test/org/apache/solr/cloud/ConcurrentCreateRoutedAliasTest.java index 26c272bee45..f573dfa851f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ConcurrentCreateRoutedAliasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ConcurrentCreateRoutedAliasTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.concurrent.atomic.AtomicReference; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -34,7 +33,7 @@ import org.slf4j.LoggerFactory; @LuceneTestCase.Slow -@LuceneTestCase.AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12386") // "Can't find resource" +@LuceneTestCase.AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12386") public class ConcurrentCreateRoutedAliasTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -59,36 +58,39 @@ public void tearDown() throws Exception { } @Test - public void testConcurrentCreateRoutedAliasMinimal() throws IOException, KeeperException.NoNodeException { - // this is the test where be blow out a bunch of create commands all out at once. - // other tests are more functionality based, and just use a single thread. - - // Failure of this test very occasionally due to overseer overload would not be worrisome (just bothersome). - // Any use case creating large numbers of time routed aliases concurrently would be an EXTREMELY odd - // if not fundamentally broken use case. This test method is just here to guard against any race - // conditions in the code that could crop up rarely in lower volume usage. + public void testConcurrentCreateRoutedAliasMinimal() + throws IOException, KeeperException.NoNodeException { + // this is the test where be blow out a bunch of create commands all out at once. other tests + // are more functionality based, and just use a single thread. - // That said any failures involving about NPE's or missing parameters or oddities other than overwhelming - // the overseer queue with retry races emanating from this test should be investigated. Also if it fails - // frequently that needs to be investigated of course. + // Failure of this test very occasionally due to overseer overload would not be worrisome (just + // bothersome). Any use case creating large numbers of time routed aliases concurrently would be + // an EXTREMELY odd if not fundamentally broken use case. This test method is just here to guard + // against any race conditions in the code that could crop up rarely in lower volume usage. + // That said any failures involving about NPE's or missing parameters or oddities other than + // overwhelming the overseer queue with retry races emanating from this test should be + // investigated. Also if it fails frequently that needs to be investigated of course. final AtomicReference failure = new AtomicReference<>(); - // Note: this number of threads seems to work regularly with the up-tweaked number of retries (50) in + // Note: this number of threads seems to work regularly with the up-tweaked number of retries + // (50) in // org.apache.solr.common.cloud.ZkStateReader.AliasesManager.applyModificationAndExportToZk() - // with the original 5 retries this wouldn't reliably pass with 10 threads, but with 50 retries it seems - // to handle 50 threads about a dozen times without any failure (on a 32 thread processor) - // it also passed 3/3 at 150 threads and 2/3 with 250 threads on both 1 node and 4 nodes... - // the failure mode seems to be overseer tasks that are not found. I suspect this happens when enough - // threads get into retry races and the spam overwhelms the overseer. (that this can happen might imply - // an issue over there, but I'm not sure, since there is an intentional hard limit on the overseer queue - // and I haven't tried to count the retries up and figure out if the requests are actually exceeding that - // limit or not, but the speed of retries might indicate an effectively hot loop, but again, a separate issue. - - // The hope is that the level of concurrency supported by create routed alias and the code it uses is such - // that this test wouldn't spuriously fail more than once a year. If that's true users should never see - // an issue in the wild unless they are doing something we probably don't want to support anyway + // with the original 5 retries this wouldn't reliably pass with 10 threads, but with 50 retries + // it seems to handle 50 threads about a dozen times without any failure (on a 32 thread + // processor) it also passed 3/3 at 150 threads and 2/3 with 250 threads on both 1 node and 4 + // nodes... the failure mode seems to be overseer tasks that are not found. I suspect this + // happens when enough threads get into retry races and the spam overwhelms the overseer. (that + // this can happen might imply an issue over there, but I'm not sure, since there is an + // intentional hard limit on the overseer queue and I haven't tried to count the retries up and + // figure out if the requests are actually exceeding that limit or not, but the speed of retries + // might indicate an effectively hot loop, but again, a separate issue. + + // The hope is that the level of concurrency supported by create routed alias and the code it + // uses is such that this test wouldn't spuriously fail more than once a year. If that's true + // users should never see an issue in the wild unless they are doing something we probably don't + // want to support anyway final CreateRoutedAliasThread[] threads = new CreateRoutedAliasThread[4]; int numStart = num; @@ -97,10 +99,10 @@ public void testConcurrentCreateRoutedAliasMinimal() throws IOException, KeeperE final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString(); final SolrClient solrClient = getHttpSolrClient(baseUrl); - int i = num - numStart; - threads[i] = new CreateRoutedAliasThread("create-delete-search-" + i, aliasName, "NOW/HOUR", - solrClient, failure, false); + threads[i] = + new CreateRoutedAliasThread( + "create-delete-search-" + i, aliasName, "NOW/HOUR", solrClient, failure, false); } startAll(threads); @@ -109,23 +111,27 @@ public void testConcurrentCreateRoutedAliasMinimal() throws IOException, KeeperE assertNull("concurrent alias creation failed " + failure.get(), failure.get()); } - @Test public void testConcurrentCreateRoutedAliasComplex() { final AtomicReference failure = new AtomicReference<>(); final CreateRoutedAliasThread[] threads = new CreateRoutedAliasThread[1]; int numStart = num; - System.out.println("NUM ==> " +num); + System.out.println("NUM ==> " + num); for (; num < threads.length + numStart; num++) { final String aliasName = "testAliasCplx" + num; final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString(); final SolrClient solrClient = getHttpSolrClient(baseUrl); int i = num - numStart; - threads[i] = new CreateRoutedAliasThread("create-routed-alias-cplx-" + i, - aliasName, "2017-12-25T23:24:25Z", - solrClient, failure, true); + threads[i] = + new CreateRoutedAliasThread( + "create-routed-alias-cplx-" + i, + aliasName, + "2017-12-25T23:24:25Z", + solrClient, + failure, + true); } startAll(threads); @@ -158,8 +164,12 @@ private static class CreateRoutedAliasThread extends Thread { protected final AtomicReference failure; CreateRoutedAliasThread( - String name, String aliasName, String start, SolrClient solrClient, - AtomicReference failure, boolean v2) { + String name, + String aliasName, + String start, + SolrClient solrClient, + AtomicReference failure, + boolean v2) { super(name); this.aliasName = aliasName; this.start = start; @@ -169,7 +179,7 @@ private static class CreateRoutedAliasThread extends Thread { @Override public void run() { - doWork(); + doWork(); } void doWork() { @@ -189,14 +199,13 @@ void addFailure(Exception e) { private void createAlias() { try { - CollectionAdminRequest.CreateTimeRoutedAlias rq = CollectionAdminRequest - .createTimeRoutedAlias( + CollectionAdminRequest.CreateTimeRoutedAlias rq = + CollectionAdminRequest.createTimeRoutedAlias( aliasName, start, "+12HOUR", "routedFoo_dt", - CollectionAdminRequest.createCollection("_ignored_", "_default", 1, 1) - ); + CollectionAdminRequest.createCollection("_ignored_", "_default", 1, 1)); final CollectionAdminResponse response = rq.process(solrClient); if (response.getStatus() != 0) { @@ -205,10 +214,8 @@ private void createAlias() { } catch (Exception e) { addFailure(e); } - } - void joinAndClose() throws InterruptedException { try { super.join(60000); @@ -217,6 +224,4 @@ void joinAndClose() throws InterruptedException { } } } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ConfigSetApiLockingTest.java b/solr/core/src/test/org/apache/solr/cloud/ConfigSetApiLockingTest.java index ef953aa75dc..c8e313839b5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ConfigSetApiLockingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ConfigSetApiLockingTest.java @@ -16,8 +16,8 @@ */ package org.apache.solr.cloud; -import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CountDownLatch; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.SolrZkClient; import org.junit.Test; @@ -40,7 +40,9 @@ public void monothreadedApiLockTests() throws Exception { try { server.run(); try (SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT)) { - ConfigSetApiLockFactory apiLockFactory = new ConfigSetApiLockFactory(new ZkDistributedConfigSetLockFactory(zkClient, "/apiLockTestRoot")); + ConfigSetApiLockFactory apiLockFactory = + new ConfigSetApiLockFactory( + new ZkDistributedConfigSetLockFactory(zkClient, "/apiLockTestRoot")); monothreadedTests(apiLockFactory); multithreadedTests(apiLockFactory); @@ -56,25 +58,30 @@ private void monothreadedTests(ConfigSetApiLockFactory apiLockingHelper) throws assertTrue("cs1Lock should have been acquired", cs1Lock.isAcquired()); // This lock does have a base config set - DistributedMultiLock cs2Lock = apiLockingHelper.createConfigSetApiLock(CONFIG_SET_NAME_2, BASE_CONFIG_SET_NAME); + DistributedMultiLock cs2Lock = + apiLockingHelper.createConfigSetApiLock(CONFIG_SET_NAME_2, BASE_CONFIG_SET_NAME); assertTrue("cs2Lock should have been acquired", cs2Lock.isAcquired()); // This lock does has the same base config set, but that shouldn't prevent acquiring it - DistributedMultiLock cs3Lock = apiLockingHelper.createConfigSetApiLock(CONFIG_SET_NAME_3, BASE_CONFIG_SET_NAME); + DistributedMultiLock cs3Lock = + apiLockingHelper.createConfigSetApiLock(CONFIG_SET_NAME_3, BASE_CONFIG_SET_NAME); assertTrue("cs3Lock should have been acquired", cs3Lock.isAcquired()); // But we shouldn't be able to lock at this stage the base config set - DistributedMultiLock csBaseLock = apiLockingHelper.createConfigSetApiLock(BASE_CONFIG_SET_NAME, null); + DistributedMultiLock csBaseLock = + apiLockingHelper.createConfigSetApiLock(BASE_CONFIG_SET_NAME, null); assertFalse("csBaseLock should not have been acquired", csBaseLock.isAcquired()); cs2Lock.release(); - assertFalse("csBaseLock should not have been acquired, cs3Lock still there", csBaseLock.isAcquired()); + assertFalse( + "csBaseLock should not have been acquired, cs3Lock still there", csBaseLock.isAcquired()); cs3Lock.release(); assertTrue("csBaseLock should have been acquired", csBaseLock.isAcquired()); // Acquiring a lock with a locked base config set should not be possible cs2Lock = apiLockingHelper.createConfigSetApiLock(CONFIG_SET_NAME_2, BASE_CONFIG_SET_NAME); - assertFalse("cs2Lock should not have been acquired, csBaseLock still held", cs2Lock.isAcquired()); + assertFalse( + "cs2Lock should not have been acquired, csBaseLock still held", cs2Lock.isAcquired()); csBaseLock.release(); assertTrue("cs2Lock should now be acquired, csBaseLock was freed", cs2Lock.isAcquired()); @@ -84,41 +91,55 @@ private void monothreadedTests(ConfigSetApiLockFactory apiLockingHelper) throws private void multithreadedTests(ConfigSetApiLockFactory apiLockingHelper) throws Exception { // This lock does have a base config set - DistributedMultiLock cs2Lock = apiLockingHelper.createConfigSetApiLock(CONFIG_SET_NAME_2, BASE_CONFIG_SET_NAME); + DistributedMultiLock cs2Lock = + apiLockingHelper.createConfigSetApiLock(CONFIG_SET_NAME_2, BASE_CONFIG_SET_NAME); assertTrue("cs2Lock should have been acquired", cs2Lock.isAcquired()); // But we shouldn't be able to lock at this stage the base config set - DistributedMultiLock csBaseLock = apiLockingHelper.createConfigSetApiLock(BASE_CONFIG_SET_NAME, null); + DistributedMultiLock csBaseLock = + apiLockingHelper.createConfigSetApiLock(BASE_CONFIG_SET_NAME, null); assertFalse("csBaseLock should not have been acquired", csBaseLock.isAcquired()); - // Wait for acquisition of the base config set lock on another thread (and be notified via a latch) + // Wait for acquisition of the base config set lock on another thread (and be notified via a + // latch) final CountDownLatch latch = new CountDownLatch(1); - new Thread(() -> { - csBaseLock.waitUntilAcquired(); - // countDown() will not be called if waitUntilAcquired() threw exception of any kind - latch.countDown(); - }).start(); + new Thread( + () -> { + csBaseLock.waitUntilAcquired(); + // countDown() will not be called if waitUntilAcquired() threw exception of any kind + latch.countDown(); + }) + .start(); // Wait for the thread to start and to get blocked in waitUntilAcquired() - // (thread start could have been checked more reliably using another latch, and verifying the thread is in waitUntilAcquired - // done through that thread stacktrace, but that would be overkill compared to the very slight race condition of waiting 30ms, - // but a race that would not cause the test to fail since we're testing... that nothing happened yet). + // (thread start could have been checked more reliably using another latch, and verifying the + // thread is in waitUntilAcquired done through that thread stacktrace, but that would be + // overkill compared to the very slight race condition of waiting 30ms, but a race that would + // not cause the test to fail since we're testing... that nothing happened yet). Thread.sleep(30); - assertEquals("we should not have been notified that base config set lock was acquired", 1, latch.getCount()); + assertEquals( + "we should not have been notified that base config set lock was acquired", + 1, + latch.getCount()); assertFalse("base config set lock should not have been acquired", csBaseLock.isAcquired()); cs2Lock.release(); - assertTrue("basec config set lock should have been acquired now that other lock was released", csBaseLock.isAcquired()); + assertTrue( + "basec config set lock should have been acquired now that other lock was released", + csBaseLock.isAcquired()); // Wait for the Zookeeper watch to fire + the thread to be unblocked and countdown the latch - // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a pause + // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a + // pause int i = 0; while (i < 1000 && latch.getCount() != 0) { Thread.sleep(10); i++; } - assertEquals("we should have been notified that the base config set lock was acquired", 0, latch.getCount()); + assertEquals( + "we should have been notified that the base config set lock was acquired", + 0, + latch.getCount()); } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/ConnectionManagerTest.java b/solr/core/src/test/org/apache/solr/cloud/ConnectionManagerTest.java index 76f0c54e8f0..daf0e9585c5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ConnectionManagerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ConnectionManagerTest.java @@ -21,7 +21,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeoutException; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ConnectionManager; @@ -37,29 +36,29 @@ @Slow public class ConnectionManagerTest extends SolrTestCaseJ4 { - + static final int TIMEOUT = 3000; - + @Ignore public void testConnectionManager() throws Exception { - + // setup a SolrZkClient to do some getBaseUrlForNodeName testing Path zkDir = createTempDir("zkData"); ZkTestServer server = new ZkTestServer(zkDir); try { server.run(); - + SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT); ConnectionManager cm = zkClient.getConnectionManager(); try { assertFalse(cm.isLikelyExpired()); zkClient.getSolrZooKeeper().closeCnxn(); - + long sessionId = zkClient.getSolrZooKeeper().getSessionId(); server.expire(sessionId); Thread.sleep(TIMEOUT); - + assertTrue(cm.isLikelyExpired()); } finally { cm.close(); @@ -89,7 +88,7 @@ public void testLikelyExpired() throws Exception { assertFalse(cm.isLikelyExpired()); // but it should after the timeout - Thread.sleep((long)(zkClient.getZkClientTimeout() * 1.5)); + Thread.sleep((long) (zkClient.getZkClientTimeout() * 1.5)); assertFalse(cm.isConnectedAndNotClosed()); assertTrue(cm.isLikelyExpired()); @@ -110,25 +109,27 @@ public void testLikelyExpired() throws Exception { server.shutdown(); } } - + @Test public void testReconnectWhenZkDisappeared() throws Exception { - ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(new SolrNamedThreadFactory("connectionManagerTest")); - + ScheduledExecutorService executor = + Executors.newSingleThreadScheduledExecutor( + new SolrNamedThreadFactory("connectionManagerTest")); + // setup a SolrZkClient to do some getBaseUrlForNodeName testing Path zkDir = createTempDir("zkData"); ZkTestServer server = new ZkTestServer(zkDir); try { server.run(); - + MockZkClientConnectionStrategy strat = new MockZkClientConnectionStrategy(); - SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT, strat , null); + SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT, strat, null); ConnectionManager cm = zkClient.getConnectionManager(); - + try { assertFalse(cm.isLikelyExpired()); assertTrue(cm.isConnectedAndNotClosed()); - + // reconnect -- should no longer be likely expired cm.process(new WatchedEvent(EventType.None, KeeperState.Expired, "")); assertFalse(cm.isLikelyExpired()); @@ -143,23 +144,27 @@ public void testReconnectWhenZkDisappeared() throws Exception { server.shutdown(); } } - + private static class MockZkClientConnectionStrategy extends DefaultConnectionStrategy { int called = 0; boolean exceptionThrown = false; - + @Override - public void reconnect(final String serverAddress, final int zkClientTimeout, - final Watcher watcher, final ZkUpdate updater) throws IOException, InterruptedException, TimeoutException { - - if(called++ < 1) { + public void reconnect( + final String serverAddress, + final int zkClientTimeout, + final Watcher watcher, + final ZkUpdate updater) + throws IOException, InterruptedException, TimeoutException { + + if (called++ < 1) { exceptionThrown = true; throw new IOException("Testing"); } - + super.reconnect(serverAddress, zkClientTimeout, watcher, updater); } - + public boolean isExceptionThrow() { return exceptionThrown; } diff --git a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java index bcdc867a0c4..7e41981b744 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CreateCollectionCleanupTest.java @@ -34,38 +34,39 @@ public class CreateCollectionCleanupTest extends SolrCloudTestCase { - protected static final String CLOUD_SOLR_XML_WITH_10S_CREATE_COLL_WAIT = "\n" + - "\n" + - " ${shareSchema:false}\n" + - " ${configSetBaseDir:configsets}\n" + - " ${coreRootDirectory:.}\n" + - "\n" + - " \n" + - " ${urlScheme:}\n" + - " ${socketTimeout:90000}\n" + - " ${connTimeout:15000}\n" + - " \n" + - "\n" + - " \n" + - " 127.0.0.1\n" + - " ${hostPort:8983}\n" + - " ${hostContext:solr}\n" + - " ${solr.zkclienttimeout:30000}\n" + - " ${genericCoreNodeNames:true}\n" + - " 10000\n" + - " ${distribUpdateConnTimeout:45000}\n" + - " ${distribUpdateSoTimeout:340000}\n" + - " ${createCollectionWaitTimeTillActive:10}\n" + - " ${solr.distributedClusterStateUpdates:false} \n" + - " \n" + - " \n" + - "\n"; - + protected static final String CLOUD_SOLR_XML_WITH_10S_CREATE_COLL_WAIT = + "\n" + + "\n" + + " ${shareSchema:false}\n" + + " ${configSetBaseDir:configsets}\n" + + " ${coreRootDirectory:.}\n" + + "\n" + + " \n" + + " ${urlScheme:}\n" + + " ${socketTimeout:90000}\n" + + " ${connTimeout:15000}\n" + + " \n" + + "\n" + + " \n" + + " 127.0.0.1\n" + + " ${hostPort:8983}\n" + + " ${hostContext:solr}\n" + + " ${solr.zkclienttimeout:30000}\n" + + " ${genericCoreNodeNames:true}\n" + + " 10000\n" + + " ${distribUpdateConnTimeout:45000}\n" + + " ${distribUpdateSoTimeout:340000}\n" + + " ${createCollectionWaitTimeTillActive:10}\n" + + " ${solr.distributedClusterStateUpdates:false} \n" + + " \n" + + " \n" + + "\n"; @BeforeClass public static void createCluster() throws Exception { configureCluster(1) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .withSolrXml(CLOUD_SOLR_XML_WITH_10S_CREATE_COLL_WAIT) .useOtherCollectionConfigSetExecution() .configure(); @@ -77,7 +78,8 @@ public void testCreateCollectionCleanup() throws Exception { String collectionName = "foo"; assertThat(CollectionAdminRequest.listCollections(cloudClient), not(hasItem(collectionName))); // Create a collection that would fail - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName,"conf1",1,1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1); Properties properties = new Properties(); Path tmpDir = createTempDir(); @@ -85,24 +87,34 @@ public void testCreateCollectionCleanup() throws Exception { Files.createFile(tmpDir); properties.put(CoreAdminParams.DATA_DIR, tmpDir.toString()); create.setProperties(properties); - expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - create.process(cloudClient); - }); + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + create.process(cloudClient); + }); // Confirm using LIST that the collection does not exist - assertThat("Failed collection is still in the clusterstate: " + cluster.getSolrClient().getClusterStateProvider().getClusterState().getCollectionOrNull(collectionName), - CollectionAdminRequest.listCollections(cloudClient), not(hasItem(collectionName))); - + assertThat( + "Failed collection is still in the clusterstate: " + + cluster + .getSolrClient() + .getClusterStateProvider() + .getClusterState() + .getCollectionOrNull(collectionName), + CollectionAdminRequest.listCollections(cloudClient), + not(hasItem(collectionName))); } - + @Test public void testAsyncCreateCollectionCleanup() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); String collectionName = "foo2"; assertThat(CollectionAdminRequest.listCollections(cloudClient), not(hasItem(collectionName))); - + // Create a collection that would fail - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName,"conf1",1,1).setPerReplicaState(random().nextBoolean()); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1) + .setPerReplicaState(random().nextBoolean()); Properties properties = new Properties(); Path tmpDir = createTempDir(); @@ -112,13 +124,20 @@ public void testAsyncCreateCollectionCleanup() throws Exception { create.setProperties(properties); create.setAsyncId("testAsyncCreateCollectionCleanup"); create.process(cloudClient); - RequestStatusState state = AbstractFullDistribZkTestBase.getRequestStateAfterCompletion("testAsyncCreateCollectionCleanup", 30, cloudClient); + RequestStatusState state = + AbstractFullDistribZkTestBase.getRequestStateAfterCompletion( + "testAsyncCreateCollectionCleanup", 30, cloudClient); assertThat(state.getKey(), is("failed")); // Confirm using LIST that the collection does not exist - assertThat("Failed collection is still in the clusterstate: " + cluster.getSolrClient().getClusterStateProvider().getClusterState().getCollectionOrNull(collectionName), - CollectionAdminRequest.listCollections(cloudClient), not(hasItem(collectionName))); - + assertThat( + "Failed collection is still in the clusterstate: " + + cluster + .getSolrClient() + .getClusterStateProvider() + .getClusterState() + .getCollectionOrNull(collectionName), + CollectionAdminRequest.listCollections(cloudClient), + not(hasItem(collectionName))); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java b/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java index cd6edd9006f..783c8e22711 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CreateRoutedAliasTest.java @@ -17,13 +17,14 @@ package org.apache.solr.cloud; +import static org.apache.solr.client.solrj.RoutedAliasTypes.TIME; + import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.Date; import java.util.Map; import java.util.TimeZone; - import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; @@ -50,11 +51,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.client.solrj.RoutedAliasTypes.TIME; - -/** - * Direct http tests of the CreateRoutedAlias functionality. - */ +/** Direct http tests of the CreateRoutedAlias functionality. */ @SolrTestCaseJ4.SuppressSSL public class CreateRoutedAliasTest extends SolrCloudTestCase { @@ -62,13 +59,13 @@ public class CreateRoutedAliasTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { configureCluster(2).configure(); -// final Properties properties = new Properties(); -// properties.setProperty("immutable", "true"); // we won't modify it in this test -// new ConfigSetAdminRequest.Create() -// .setConfigSetName(configName) -// .setBaseConfigSetName("_default") -// .setNewConfigSetProperties(properties) -// .process(cluster.getSolrClient()); + // final Properties properties = new Properties(); + // properties.setProperty("immutable", "true"); // we won't modify it in this test + // new ConfigSetAdminRequest.Create() + // .setConfigSetName(configName) + // .setBaseConfigSetName("_default") + // .setNewConfigSetProperties(properties) + // .process(cluster.getSolrClient()); } private CloudSolrClient solrClient; @@ -88,8 +85,8 @@ public void doAfter() throws Exception { } } - // This is a fairly complete test where we set many options and see that it both affected the created - // collection and that the alias metadata was saved accordingly + // This is a fairly complete test where we set many options and see that it both affected the + // created collection and that the alias metadata was saved accordingly @Test public void testV2() throws Exception { // note we don't use TZ in this test, thus it's UTC @@ -98,75 +95,94 @@ public void testV2() throws Exception { String createNode = cluster.getRandomJetty(random()).getNodeName(); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - //TODO fix Solr test infra so that this /____v2/ becomes /api/ + // TODO fix Solr test infra so that this /____v2/ becomes /api/ HttpPost post = new HttpPost(baseUrl + "/____v2/c"); - post.setEntity(new StringEntity("{\n" + - " \"create-alias\" : {\n" + - " \"name\": \"" + aliasName + "\",\n" + - " \"router\" : {\n" + - " \"name\": \"time\",\n" + - " \"field\": \"evt_dt\",\n" + - " \"start\":\"NOW/DAY\",\n" + // small window for test failure once a day. - " \"interval\":\"+2HOUR\",\n" + - " \"maxFutureMs\":\"14400000\"\n" + - " },\n" + - //TODO should we use "NOW=" param? Won't work with v2 and is kinda a hack any way since intended for distrib - " \"create-collection\" : {\n" + - " \"router\": {\n" + - " \"name\":\"implicit\",\n" + - " \"field\":\"foo_s\"\n" + - " },\n" + - " \"shards\":\"foo,bar\",\n" + - " \"config\":\"_default\",\n" + - " \"tlogReplicas\":1,\n" + - " \"pullReplicas\":1,\n" + - " \"nodeSet\": '" + createNode + "',\n" + - " \"properties\" : {\n" + - " \"foobar\":\"bazbam\",\n" + - " \"foobar2\":\"bazbam2\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}", ContentType.APPLICATION_JSON)); + post.setEntity( + new StringEntity( + "{\n" + + " \"create-alias\" : {\n" + + " \"name\": \"" + + aliasName + + "\",\n" + + " \"router\" : {\n" + + " \"name\": \"time\",\n" + + " \"field\": \"evt_dt\",\n" + + " \"start\":\"NOW/DAY\",\n" + + // small window for test failure once a day. + " \"interval\":\"+2HOUR\",\n" + + " \"maxFutureMs\":\"14400000\"\n" + + " },\n" + + + // TODO should we use "NOW=" param? Won't work with v2 and is kinda a hack any way + // since intended for distrib + " \"create-collection\" : {\n" + + " \"router\": {\n" + + " \"name\":\"implicit\",\n" + + " \"field\":\"foo_s\"\n" + + " },\n" + + " \"shards\":\"foo,bar\",\n" + + " \"config\":\"_default\",\n" + + " \"tlogReplicas\":1,\n" + + " \"pullReplicas\":1,\n" + + " \"nodeSet\": '" + + createNode + + "',\n" + + " \"properties\" : {\n" + + " \"foobar\":\"bazbam\",\n" + + " \"foobar2\":\"bazbam2\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}", + ContentType.APPLICATION_JSON)); assertSuccess(post); Date startDate = DateMathParser.parseMath(new Date(), "NOW/DAY"); - String initialCollectionName = TimeRoutedAlias.formatCollectionNameFromInstant(aliasName, startDate.toInstant()); + String initialCollectionName = + TimeRoutedAlias.formatCollectionNameFromInstant(aliasName, startDate.toInstant()); // small chance could fail due to "NOW"; see above assertCollectionExists(initialCollectionName); Thread.sleep(1000); // Test created collection: - final DocCollection coll = solrClient.getClusterStateProvider().getState(initialCollectionName).get(); - //System.err.println(coll); - //TODO how do we assert the configSet ? + final DocCollection coll = + solrClient.getClusterStateProvider().getState(initialCollectionName).get(); + // System.err.println(coll); + // TODO how do we assert the configSet ? assertEquals(ImplicitDocRouter.class, coll.getRouter().getClass()); - assertEquals("foo_s", ((Map)coll.get("router")).get("field")); + assertEquals("foo_s", ((Map) coll.get("router")).get("field")); assertEquals(2, coll.getSlices().size()); // numShards - assertEquals(4, coll.getSlices().stream() - .mapToInt(s -> s.getReplicas().size()).sum()); // num replicas + assertEquals( + 4, coll.getSlices().stream().mapToInt(s -> s.getReplicas().size()).sum()); // num replicas // we didn't ask for any NRT replicas - assertEquals(0, coll.getSlices().stream() - .mapToInt(s -> s.getReplicas(r -> r.getType() == Replica.Type.NRT).size()).sum()); - //assertEquals(1, coll.getNumNrtReplicas().intValue()); // TODO seems to be erroneous; I figured 'null' + assertEquals( + 0, + coll.getSlices().stream() + .mapToInt(s -> s.getReplicas(r -> r.getType() == Replica.Type.NRT).size()) + .sum()); + // assertEquals(1, coll.getNumNrtReplicas().intValue()); // TODO seems to be erroneous; I + // figured 'null' assertEquals(1, coll.getNumTlogReplicas().intValue()); // per-shard assertEquals(1, coll.getNumPullReplicas().intValue()); // per-shard - assertTrue("nodeSet didn't work?", - coll.getSlices().stream().flatMap(s -> s.getReplicas().stream()) - .map(Replica::getNodeName).allMatch(createNode::equals)); + assertTrue( + "nodeSet didn't work?", + coll.getSlices().stream() + .flatMap(s -> s.getReplicas().stream()) + .map(Replica::getNodeName) + .allMatch(createNode::equals)); // Test Alias metadata: Aliases aliases = cluster.getSolrClient().getZkStateReader().getAliases(); Map collectionAliasMap = aliases.getCollectionAliasMap(); assertEquals(initialCollectionName, collectionAliasMap.get(aliasName)); Map meta = aliases.getCollectionAliasProperties(aliasName); - //System.err.println(new TreeMap(meta)); - assertEquals("evt_dt",meta.get("router.field")); - assertEquals("_default",meta.get("create-collection.collection.configName")); - assertEquals("foo_s",meta.get("create-collection.router.field")); - assertEquals("bazbam",meta.get("create-collection.property.foobar")); - assertEquals("bazbam2",meta.get("create-collection.property.foobar2")); - assertEquals(createNode,meta.get("create-collection.createNodeSet")); + // System.err.println(new TreeMap(meta)); + assertEquals("evt_dt", meta.get("router.field")); + assertEquals("_default", meta.get("create-collection.collection.configName")); + assertEquals("foo_s", meta.get("create-collection.router.field")); + assertEquals("bazbam", meta.get("create-collection.property.foobar")); + assertEquals("bazbam2", meta.get("create-collection.property.foobar2")); + assertEquals(createNode, meta.get("create-collection.createNodeSet")); } @Test @@ -174,27 +190,34 @@ public void testV1() throws Exception { final String aliasName = getSaferTestName(); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); Instant start = Instant.now().truncatedTo(ChronoUnit.HOURS); // mostly make sure no millis - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=xml" + - "&name=" + aliasName + - "&router.field=evt_dt" + - "&router.name=time" + - "&router.start=" + start + - "&router.interval=%2B30MINUTE" + - "&create-collection.collection.configName=_default" + - "&create-collection.router.field=foo_s" + - "&create-collection.numShards=1" + - "&create-collection.replicationFactor=2"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=xml" + + "&name=" + + aliasName + + "&router.field=evt_dt" + + "&router.name=time" + + "&router.start=" + + start + + "&router.interval=%2B30MINUTE" + + "&create-collection.collection.configName=_default" + + "&create-collection.router.field=foo_s" + + "&create-collection.numShards=1" + + "&create-collection.replicationFactor=2"); assertSuccess(get); - String initialCollectionName = TimeRoutedAlias.formatCollectionNameFromInstant(aliasName, start); + String initialCollectionName = + TimeRoutedAlias.formatCollectionNameFromInstant(aliasName, start); assertCollectionExists(initialCollectionName); // Test created collection: - final DocCollection coll = solrClient.getClusterStateProvider().getState(initialCollectionName).get(); - //TODO how do we assert the configSet ? + final DocCollection coll = + solrClient.getClusterStateProvider().getState(initialCollectionName).get(); + // TODO how do we assert the configSet ? assertEquals(CompositeIdRouter.class, coll.getRouter().getClass()); - assertEquals("foo_s", ((Map)coll.get("router")).get("field")); + assertEquals("foo_s", ((Map) coll.get("router")).get("field")); assertEquals(1, coll.getSlices().size()); // numShards assertEquals(2, coll.getReplicationFactor().intValue()); // num replicas @@ -205,9 +228,9 @@ public void testV1() throws Exception { assertNotNull(alias); Map meta = aliases.getCollectionAliasProperties(aliasName); assertNotNull(meta); - assertEquals("evt_dt",meta.get("router.field")); - assertEquals("_default",meta.get("create-collection.collection.configName")); - assertEquals(null,meta.get("start")); + assertEquals("evt_dt", meta.get("router.field")); + assertEquals("_default", meta.get("create-collection.collection.configName")); + assertEquals(null, meta.get("start")); } // TZ should not affect the first collection name if absolute date given for start @@ -216,58 +239,73 @@ public void testTimezoneAbsoluteDate() throws Exception { final String aliasName = getSaferTestName(); try (SolrClient client = getCloudSolrClient(cluster)) { CollectionAdminRequest.createTimeRoutedAlias( - aliasName, - "2018-01-15T00:00:00Z", - "+30MINUTE", - "evt_dt", - CollectionAdminRequest.createCollection("_ignored_", "_default", 1, 1) - ) + aliasName, + "2018-01-15T00:00:00Z", + "+30MINUTE", + "evt_dt", + CollectionAdminRequest.createCollection("_ignored_", "_default", 1, 1)) .setTimeZone(TimeZone.getTimeZone("GMT-10")) .process(client); } - assertCollectionExists(aliasName + TIME.getSeparatorPrefix() +"2018-01-15"); + assertCollectionExists(aliasName + TIME.getSeparatorPrefix() + "2018-01-15"); } @Test public void testCollectionNamesMustBeAbsent() throws Exception { - CollectionAdminRequest.createCollection("collection1meta", "_default", 2, 1).process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection("collection2meta", "_default", 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection1meta", "_default", 2, 1) + .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("collection2meta", "_default", 1, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection("collection1meta", 2, 2); cluster.waitForActiveCollection("collection2meta", 1, 1); - waitForState("Expected collection1 to be created with 2 shards and 1 replica", "collection1meta", clusterShape(2, 2)); - waitForState("Expected collection2 to be created with 1 shard and 1 replica", "collection2meta", clusterShape(1, 1)); + waitForState( + "Expected collection1 to be created with 2 shards and 1 replica", + "collection1meta", + clusterShape(2, 2)); + waitForState( + "Expected collection2 to be created with 1 shard and 1 replica", + "collection2meta", + clusterShape(1, 1)); ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); zkStateReader.createClusterStateWatchersAndUpdate(); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=json" + - "&name=" + getTestName() + - "&collections=collection1meta,collection2meta" + - "&router.field=evt_dt" + - "&router.name=time" + - "&router.start=2018-01-15T00:00:00Z" + - "&router.interval=%2B30MINUTE" + - "&create-collection.collection.configName=_default" + - "&create-collection.numShards=1"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=json" + + "&name=" + + getTestName() + + "&collections=collection1meta,collection2meta" + + "&router.field=evt_dt" + + "&router.name=time" + + "&router.start=2018-01-15T00:00:00Z" + + "&router.interval=%2B30MINUTE" + + "&create-collection.collection.configName=_default" + + "&create-collection.numShards=1"); assertFailure(get, "Collections cannot be specified"); } @Test public void testAliasNameMustBeValid() throws Exception { final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=json" + - "&name=735741!45" + // ! not allowed - "&router.field=evt_dt" + - "&router.name=time" + - "&router.start=2018-01-15T00:00:00Z" + - "&router.interval=%2B30MINUTE" + - "&create-collection.collection.configName=_default" + - "&create-collection.numShards=1"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=json" + + "&name=735741!45" + + // ! not allowed + "&router.field=evt_dt" + + "&router.name=time" + + "&router.start=2018-01-15T00:00:00Z" + + "&router.interval=%2B30MINUTE" + + "&create-collection.collection.configName=_default" + + "&create-collection.numShards=1"); assertFailure(get, "Invalid alias"); } @@ -275,15 +313,20 @@ public void testAliasNameMustBeValid() throws Exception { public void testRandomRouterNameFails() throws Exception { final String aliasName = getSaferTestName(); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=json" + - "&name=" + aliasName + - "&router.field=evt_dt" + - "&router.name=tiafasme" + //bad - "&router.start=2018-01-15T00:00:00Z" + - "&router.interval=%2B30MINUTE" + - "&create-collection.collection.configName=_default" + - "&create-collection.numShards=1"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=json" + + "&name=" + + aliasName + + "&router.field=evt_dt" + + "&router.name=tiafasme" + + // bad + "&router.start=2018-01-15T00:00:00Z" + + "&router.interval=%2B30MINUTE" + + "&create-collection.collection.configName=_default" + + "&create-collection.numShards=1"); assertFailure(get, " is not in supported types, "); } @@ -291,15 +334,20 @@ public void testRandomRouterNameFails() throws Exception { public void testTimeStampWithMsFails() throws Exception { final String aliasName = getSaferTestName(); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=json" + - "&name=" + aliasName + - "&router.field=evt_dt" + - "&router.name=time" + - "&router.start=2018-01-15T00:00:00.001Z" + // bad: no milliseconds permitted - "&router.interval=%2B30MINUTE" + - "&create-collection.collection.configName=_default" + - "&create-collection.numShards=1"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=json" + + "&name=" + + aliasName + + "&router.field=evt_dt" + + "&router.name=time" + + "&router.start=2018-01-15T00:00:00.001Z" + + // bad: no milliseconds permitted + "&router.interval=%2B30MINUTE" + + "&create-collection.collection.configName=_default" + + "&create-collection.numShards=1"); assertFailure(get, "Date or date math for start time includes milliseconds"); } @@ -307,16 +355,21 @@ public void testTimeStampWithMsFails() throws Exception { public void testBadDateMathIntervalFails() throws Exception { final String aliasName = getSaferTestName(); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=json" + - "&name=" + aliasName + - "&router.field=evt_dt" + - "&router.name=time" + - "&router.start=2018-01-15T00:00:00Z" + - "&router.interval=%2B30MINUTEx" + // bad; trailing 'x' - "&router.maxFutureMs=60000" + - "&create-collection.collection.configName=_default" + - "&create-collection.numShards=1"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=json" + + "&name=" + + aliasName + + "&router.field=evt_dt" + + "&router.name=time" + + "&router.start=2018-01-15T00:00:00Z" + + "&router.interval=%2B30MINUTEx" + + // bad; trailing 'x' + "&router.maxFutureMs=60000" + + "&create-collection.collection.configName=_default" + + "&create-collection.numShards=1"); assertFailure(get, "Unit not recognized"); } @@ -324,16 +377,21 @@ public void testBadDateMathIntervalFails() throws Exception { public void testNegativeFutureFails() throws Exception { final String aliasName = getSaferTestName(); final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=json" + - "&name=" + aliasName + - "&router.field=evt_dt" + - "&router.name=time" + - "&router.start=2018-01-15T00:00:00Z" + - "&router.interval=%2B30MINUTE" + - "&router.maxFutureMs=-60000" + // bad: negative - "&create-collection.collection.configName=_default" + - "&create-collection.numShards=1"); + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=json" + + "&name=" + + aliasName + + "&router.field=evt_dt" + + "&router.name=time" + + "&router.start=2018-01-15T00:00:00Z" + + "&router.interval=%2B30MINUTE" + + "&router.maxFutureMs=-60000" + + // bad: negative + "&create-collection.collection.configName=_default" + + "&create-collection.numShards=1"); assertFailure(get, "must be >= 0"); } @@ -341,17 +399,22 @@ public void testNegativeFutureFails() throws Exception { public void testUnParseableFutureFails() throws Exception { final String aliasName = "testAlias"; final String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - HttpGet get = new HttpGet(baseUrl + "/admin/collections?action=CREATEALIAS" + - "&wt=json" + - "&name=" + aliasName + - "&router.field=evt_dt" + - "&router.name=time" + - "&router.start=2018-01-15T00:00:00Z" + - "&router.interval=%2B30MINUTE" + - "&router.maxFutureMs=SixtyThousandMilliseconds" + // bad - "&create-collection.collection.configName=_default" + - "&create-collection.numShards=1"); - assertFailure(get, "SixtyThousandMilliseconds"); //TODO improve SolrParams.getLong + HttpGet get = + new HttpGet( + baseUrl + + "/admin/collections?action=CREATEALIAS" + + "&wt=json" + + "&name=" + + aliasName + + "&router.field=evt_dt" + + "&router.name=time" + + "&router.start=2018-01-15T00:00:00Z" + + "&router.interval=%2B30MINUTE" + + "&router.maxFutureMs=SixtyThousandMilliseconds" + + // bad + "&create-collection.collection.configName=_default" + + "&create-collection.numShards=1"); + assertFailure(get, "SixtyThousandMilliseconds"); // TODO improve SolrParams.getLong } private void assertSuccess(HttpUriRequest msg) throws IOException { @@ -369,7 +432,8 @@ private void assertFailure(HttpUriRequest msg, String expectedErrorSubstring) th try (CloseableHttpResponse response = httpClient.execute(msg)) { assertEquals(400, response.getStatusLine().getStatusCode()); String entity = EntityUtils.toString(response.getEntity()); - assertTrue("Didn't find expected error string within response: " + entity, + assertTrue( + "Didn't find expected error string within response: " + entity, entity.contains(expectedErrorSubstring)); } } @@ -380,8 +444,9 @@ private void assertCollectionExists(String name) throws IOException, SolrServerE assertNotNull(name + " not found", solrClient.getClusterStateProvider().getState(name)); // note: could also do: - //List collections = CollectionAdminRequest.listCollections(solrClient); + // List collections = CollectionAdminRequest.listCollections(solrClient); } - // not testing collection parameters, those should inherit error checking from the collection creation code. + // not testing collection parameters, those should inherit error checking from the collection + // creation code. } diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java index 325c5fbf486..f869418473d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java @@ -19,7 +19,6 @@ import java.lang.invoke.MethodHandles; import java.nio.file.Files; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -43,9 +42,7 @@ public class DeleteInactiveReplicaTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); } @Test @@ -57,9 +54,12 @@ public void deleteInactiveReplicaTest() throws Exception { CollectionAdminRequest.createCollection(collectionName, "conf", numShards, replicationFactor) .process(cluster.getSolrClient()); - waitForState("Expected a cluster of 2 shards and 2 replicas", collectionName, (n, c) -> { - return DocCollection.isFullyActive(n, c, numShards, replicationFactor); - }); + waitForState( + "Expected a cluster of 2 shards and 2 replicas", + collectionName, + (n, c) -> { + return DocCollection.isFullyActive(n, c, numShards, replicationFactor); + }); DocCollection collectionState = getCollectionState(collectionName); @@ -72,38 +72,50 @@ public void deleteInactiveReplicaTest() throws Exception { } cluster.stopJettySolrRunner(jetty); - waitForState("Expected replica " + replica.getName() + " on down node to be removed from cluster state", collectionName, (n, c) -> { - Replica r = c.getReplica(replica.getCoreName()); - return r == null || r.getState() != Replica.State.ACTIVE; - }); + waitForState( + "Expected replica " + replica.getName() + " on down node to be removed from cluster state", + collectionName, + (n, c) -> { + Replica r = c.getReplica(replica.getCoreName()); + return r == null || r.getState() != Replica.State.ACTIVE; + }); if (log.isInfoEnabled()) { log.info("Removing replica {}/{} ", shard.getName(), replica.getName()); } CollectionAdminRequest.deleteReplica(collectionName, shard.getName(), replica.getName()) .process(cluster.getSolrClient()); - waitForState("Expected deleted replica " + replica.getName() + " to be removed from cluster state", collectionName, (n, c) -> { - return c.getReplica(replica.getCoreName()) == null; - }); + waitForState( + "Expected deleted replica " + replica.getName() + " to be removed from cluster state", + collectionName, + (n, c) -> { + return c.getReplica(replica.getCoreName()) == null; + }); cluster.startJettySolrRunner(jetty); log.info("restarted jetty"); TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeOut.waitFor("Expected data dir and instance dir of " + replica.getName() + " is deleted", () - -> !Files.exists(replicaCd.getInstanceDir()) && !FileUtils.fileExists(replicaCd.getDataDir())); + timeOut.waitFor( + "Expected data dir and instance dir of " + replica.getName() + " is deleted", + () -> + !Files.exists(replicaCd.getInstanceDir()) + && !FileUtils.fileExists(replicaCd.getDataDir())); // Check that we can't create a core with no coreNodeName try (SolrClient queryClient = getHttpSolrClient(jetty.getBaseUrl().toString())) { - Exception e = expectThrows(Exception.class, () -> { - CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create(); - createRequest.setCoreName("testcore"); - createRequest.setCollection(collectionName); - createRequest.setShardId("shard2"); - queryClient.request(createRequest); - }); - assertTrue("Unexpected error message: " + e.getMessage(), e.getMessage().contains("coreNodeName missing")); - + Exception e = + expectThrows( + Exception.class, + () -> { + CoreAdminRequest.Create createRequest = new CoreAdminRequest.Create(); + createRequest.setCoreName("testcore"); + createRequest.setCollection(collectionName); + createRequest.setShardId("shard2"); + queryClient.request(createRequest); + }); + assertTrue( + "Unexpected error message: " + e.getMessage(), + e.getMessage().contains("coreNodeName missing")); } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java index 13d40e1dd6e..b6883a22af1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java @@ -26,9 +26,7 @@ public class DeleteLastCustomShardedReplicaTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); } @Test @@ -45,11 +43,11 @@ public void test() throws Exception { CollectionAdminRequest.deleteReplica(collectionName, "a", replica.getName()) .process(cluster.getSolrClient()); - waitForState("Expected shard 'a' to have no replicas", collectionName, (n, c) -> { - return c.getSlice("a") == null || c.getSlice("a").getReplicas().size() == 0; - }); - + waitForState( + "Expected shard 'a' to have no replicas", + collectionName, + (n, c) -> { + return c.getSlice("a") == null || c.getSlice("a").getReplicas().size() == 0; + }); } - } - diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java index 84a5eced095..fd41bdfef3e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteNodeTest.java @@ -17,13 +17,11 @@ package org.apache.solr.cloud; - import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; - import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.RequestStatusState; @@ -44,7 +42,8 @@ public class DeleteNodeTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(6) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) .configure(); } @@ -60,14 +59,14 @@ public void test() throws Exception { Set liveNodes = state.getLiveNodes(); ArrayList l = new ArrayList<>(liveNodes); Collections.shuffle(l, random()); - CollectionAdminRequest.Create create = pickRandom( - CollectionAdminRequest.createCollection(coll, "conf1", 5, 2, 0, 0), - CollectionAdminRequest.createCollection(coll, "conf1", 5, 1, 1, 0), - CollectionAdminRequest.createCollection(coll, "conf1", 5, 0, 1, 1), - // check RF=1 - CollectionAdminRequest.createCollection(coll, "conf1", 5, 1, 0, 0), - CollectionAdminRequest.createCollection(coll, "conf1", 5, 0, 1, 0) - ); + CollectionAdminRequest.Create create = + pickRandom( + CollectionAdminRequest.createCollection(coll, "conf1", 5, 2, 0, 0), + CollectionAdminRequest.createCollection(coll, "conf1", 5, 1, 1, 0), + CollectionAdminRequest.createCollection(coll, "conf1", 5, 0, 1, 1), + // check RF=1 + CollectionAdminRequest.createCollection(coll, "conf1", 5, 1, 0, 0), + CollectionAdminRequest.createCollection(coll, "conf1", 5, 0, 1, 0)); create.setCreateNodeSet(StrUtils.join(l, ',')); cloudClient.request(create); state = cloudClient.getZkStateReader().getClusterState(); @@ -79,13 +78,14 @@ public void test() throws Exception { List replicas = docColl.getReplicas(node2bdecommissioned); if (replicas != null) { for (Replica replica : replicas) { - String shard = docColl.getShardId(node2bdecommissioned, replica.getStr(ZkStateReader.CORE_NAME_PROP)); + String shard = + docColl.getShardId(node2bdecommissioned, replica.getStr(ZkStateReader.CORE_NAME_PROP)); Slice slice = docColl.getSlice(shard); boolean hasOtherNonPullReplicas = false; - for (Replica r: slice.getReplicas()) { - if (!r.getName().equals(replica.getName()) && - !r.getNodeName().equals(node2bdecommissioned) && - r.getType() != Replica.Type.PULL) { + for (Replica r : slice.getReplicas()) { + if (!r.getName().equals(replica.getName()) + && !r.getNodeName().equals(node2bdecommissioned) + && r.getType() != Replica.Type.PULL) { hasOtherNonPullReplicas = true; break; } @@ -97,17 +97,21 @@ public void test() throws Exception { } } new CollectionAdminRequest.DeleteNode(node2bdecommissioned).processAsync("003", cloudClient); - CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("003"); + CollectionAdminRequest.RequestStatus requestStatus = + CollectionAdminRequest.requestStatus("003"); CollectionAdminRequest.RequestStatusResponse rsp = null; for (int i = 0; i < 200; i++) { rsp = requestStatus.process(cloudClient); - if (rsp.getRequestStatus() == RequestStatusState.FAILED || rsp.getRequestStatus() == RequestStatusState.COMPLETED) { + if (rsp.getRequestStatus() == RequestStatusState.FAILED + || rsp.getRequestStatus() == RequestStatusState.COMPLETED) { break; } Thread.sleep(50); } if (log.isInfoEnabled()) { - log.info("####### DocCollection after: {}", cloudClient.getZkStateReader().getClusterState().getCollection(coll)); + log.info( + "####### DocCollection after: {}", + cloudClient.getZkStateReader().getClusterState().getCollection(coll)); } if (shouldFail) { assertTrue(String.valueOf(rsp), rsp.getRequestStatus() == RequestStatusState.FAILED); diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java index 3255a3cde09..5c533c6108d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.common.cloud.Replica.State.DOWN; + import java.lang.invoke.MethodHandles; import java.nio.file.Files; import java.nio.file.Path; @@ -27,7 +29,6 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.CollectionAdminRequest.Create; @@ -53,9 +54,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.cloud.Replica.State.DOWN; - - public class DeleteReplicaTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -64,23 +62,24 @@ public class DeleteReplicaTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { System.setProperty("solr.zkclienttimeout", "45000"); System.setProperty("distribUpdateSoTimeout", "15000"); - } - + @Before @Override public void setUp() throws Exception { super.setUp(); System.setProperty("solr.zkclienttimeout", "45000"); System.setProperty("distribUpdateSoTimeout", "15000"); - + // these tests need to be isolated, so we dont share the minicluster configureCluster(4) .addConfig("conf", configset("cloud-minimal")) - .useOtherCollectionConfigSetExecution() // Some tests (this one) use "the other" cluster Collection API execution strategy to increase coverage + .useOtherCollectionConfigSetExecution() + // Some tests (this one) use "the other" cluster Collection API execution strategy to + // increase coverage .configure(); } - + @After @Override public void tearDown() throws Exception { @@ -89,86 +88,112 @@ public void tearDown() throws Exception { } @Test - // commented out on: 01-Apr-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void deleteLiveReplicaTest() throws Exception { final String collectionName = "delLiveColl"; Create req = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2); req.process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 2, 4); DocCollection state = getCollectionState(collectionName); Slice shard = getRandomShard(state); - + // don't choose the leader to shutdown, it just complicates things unneccessarily - Replica replica = getRandomReplica(shard, (r) -> - ( r.getState() == Replica.State.ACTIVE && - ! r.equals(shard.getLeader()))); + Replica replica = + getRandomReplica( + shard, (r) -> (r.getState() == Replica.State.ACTIVE && !r.equals(shard.getLeader()))); CoreStatus coreStatus = getCoreStatus(replica); Path dataDir = Paths.get(coreStatus.getDataDirectory()); - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.deleteReplica(collectionName, shard.getName(), replica.getName()) - .setOnlyIfDown(true) - .process(cluster.getSolrClient()); - }); - assertTrue("Unexpected error message: " + e.getMessage(), e.getMessage().contains("state is 'active'")); - assertTrue("Data directory for " + replica.getName() + " should not have been deleted", Files.exists(dataDir)); + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.deleteReplica( + collectionName, shard.getName(), replica.getName()) + .setOnlyIfDown(true) + .process(cluster.getSolrClient()); + }); + assertTrue( + "Unexpected error message: " + e.getMessage(), + e.getMessage().contains("state is 'active'")); + assertTrue( + "Data directory for " + replica.getName() + " should not have been deleted", + Files.exists(dataDir)); JettySolrRunner replicaJetty = cluster.getReplicaJetty(replica); - ZkStateReaderAccessor accessor = new ZkStateReaderAccessor(replicaJetty.getCoreContainer().getZkController().getZkStateReader()); + ZkStateReaderAccessor accessor = + new ZkStateReaderAccessor( + replicaJetty.getCoreContainer().getZkController().getZkStateReader()); + + final long preDeleteWatcherCount = + countUnloadCoreOnDeletedWatchers(accessor.getStateWatchers(collectionName)); - final long preDeleteWatcherCount = countUnloadCoreOnDeletedWatchers - (accessor.getStateWatchers(collectionName)); - CollectionAdminRequest.deleteReplica(collectionName, shard.getName(), replica.getName()) .process(cluster.getSolrClient()); - waitForState("Expected replica " + replica.getName() + " to have been removed", collectionName, (n, c) -> { - Slice testShard = c.getSlice(shard.getName()); - return testShard.getReplica(replica.getName()) == null; - }); - + waitForState( + "Expected replica " + replica.getName() + " to have been removed", + collectionName, + (n, c) -> { + Slice testShard = c.getSlice(shard.getName()); + return testShard.getReplica(replica.getName()) == null; + }); + // the core should no longer have a watch collection state since it was removed // the core should no longer have a watch collection state since it was removed TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeOut.waitFor("Waiting for core's watcher to be removed", () -> { - final long postDeleteWatcherCount = countUnloadCoreOnDeletedWatchers - (accessor.getStateWatchers(collectionName)); - log.info("preDeleteWatcherCount={} vs postDeleteWatcherCount={}", - preDeleteWatcherCount, postDeleteWatcherCount); - return (preDeleteWatcherCount - 1L == postDeleteWatcherCount); - }); - - assertFalse("Data directory for " + replica.getName() + " should have been removed", Files.exists(dataDir)); - + timeOut.waitFor( + "Waiting for core's watcher to be removed", + () -> { + final long postDeleteWatcherCount = + countUnloadCoreOnDeletedWatchers(accessor.getStateWatchers(collectionName)); + log.info( + "preDeleteWatcherCount={} vs postDeleteWatcherCount={}", + preDeleteWatcherCount, + postDeleteWatcherCount); + return (preDeleteWatcherCount - 1L == postDeleteWatcherCount); + }); + + assertFalse( + "Data directory for " + replica.getName() + " should have been removed", + Files.exists(dataDir)); } @Test public void deleteReplicaAndVerifyDirectoryCleanup() throws Exception { final String collectionName = "deletereplica_test"; - CollectionAdminRequest.createCollection(collectionName, "conf", 1, 2).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(collectionName, "conf", 1, 2) + .process(cluster.getSolrClient()); - Replica leader = cluster.getSolrClient().getZkStateReader().getLeaderRetry(collectionName, "shard1"); + Replica leader = + cluster.getSolrClient().getZkStateReader().getLeaderRetry(collectionName, "shard1"); - //Confirm that the instance and data directory exist + // Confirm that the instance and data directory exist CoreStatus coreStatus = getCoreStatus(leader); - assertTrue("Instance directory doesn't exist", Files.exists(Paths.get(coreStatus.getInstanceDirectory()))); - assertTrue("DataDirectory doesn't exist", Files.exists(Paths.get(coreStatus.getDataDirectory()))); + assertTrue( + "Instance directory doesn't exist", + Files.exists(Paths.get(coreStatus.getInstanceDirectory()))); + assertTrue( + "DataDirectory doesn't exist", Files.exists(Paths.get(coreStatus.getDataDirectory()))); - CollectionAdminRequest.deleteReplica(collectionName, "shard1",leader.getName()) + CollectionAdminRequest.deleteReplica(collectionName, "shard1", leader.getName()) .process(cluster.getSolrClient()); - Replica newLeader = cluster.getSolrClient().getZkStateReader().getLeaderRetry(collectionName, "shard1"); + Replica newLeader = + cluster.getSolrClient().getZkStateReader().getLeaderRetry(collectionName, "shard1"); assertFalse(leader.equals(newLeader)); - //Confirm that the instance and data directory were deleted by default - assertFalse("Instance directory still exists", Files.exists(Paths.get(coreStatus.getInstanceDirectory()))); - assertFalse("DataDirectory still exists", Files.exists(Paths.get(coreStatus.getDataDirectory()))); + // Confirm that the instance and data directory were deleted by default + assertFalse( + "Instance directory still exists", + Files.exists(Paths.get(coreStatus.getInstanceDirectory()))); + assertFalse( + "DataDirectory still exists", Files.exists(Paths.get(coreStatus.getDataDirectory()))); } @Test @@ -176,38 +201,44 @@ public void deleteReplicaByCount() throws Exception { final String collectionName = "deleteByCount"; - CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3) + .process(cluster.getSolrClient()); waitForState("Expected a single shard with three replicas", collectionName, clusterShape(1, 3)); - CollectionAdminRequest.deleteReplicasFromShard(collectionName, "shard1", 2).process(cluster.getSolrClient()); - waitForState("Expected a single shard with a single replica", collectionName, clusterShape(1, 1)); - - SolrException e = expectThrows(SolrException.class, - "Can't delete the last replica by count", - () -> CollectionAdminRequest.deleteReplicasFromShard(collectionName, "shard1", 1).process(cluster.getSolrClient()) - ); + CollectionAdminRequest.deleteReplicasFromShard(collectionName, "shard1", 2) + .process(cluster.getSolrClient()); + waitForState( + "Expected a single shard with a single replica", collectionName, clusterShape(1, 1)); + + SolrException e = + expectThrows( + SolrException.class, + "Can't delete the last replica by count", + () -> + CollectionAdminRequest.deleteReplicasFromShard(collectionName, "shard1", 1) + .process(cluster.getSolrClient())); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertTrue(e.getMessage().contains("There is only one replica available")); DocCollection docCollection = getCollectionState(collectionName); // We know that since leaders are preserved, PULL replicas should not be left alone in the shard - assertEquals(0, docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.PULL)).size()); + assertEquals( + 0, docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.PULL)).size()); } @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void deleteReplicaByCountForAllShards() throws Exception { final String collectionName = "deleteByCountNew"; Create req = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2); req.process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 2, 4); - + waitForState("Expected two shards with two replicas each", collectionName, clusterShape(2, 4)); - CollectionAdminRequest.deleteReplicasFromAllShards(collectionName, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.deleteReplicasFromAllShards(collectionName, 1) + .process(cluster.getSolrClient()); waitForState("Expected two shards with one replica each", collectionName, clusterShape(2, 2)); - } @Test @@ -215,9 +246,9 @@ public void deleteReplicaFromClusterState() throws Exception { final String collectionName = "deleteFromClusterStateCollection"; CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 1, 3); - + cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "1")); cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2")); cluster.getSolrClient().commit(collectionName); @@ -227,71 +258,85 @@ public void deleteReplicaFromClusterState() throws Exception { Slice shard = getCollectionState(collectionName).getSlice("shard1"); // don't choose the leader to shutdown, it just complicates things unnecessarily - Replica replica = getRandomReplica(shard, (r) -> - ( r.getState() == Replica.State.ACTIVE && - ! r.equals(shard.getLeader()))); - + Replica replica = + getRandomReplica( + shard, (r) -> (r.getState() == Replica.State.ACTIVE && !r.equals(shard.getLeader()))); + JettySolrRunner replicaJetty = cluster.getReplicaJetty(replica); ZkController replicaZkController = replicaJetty.getCoreContainer().getZkController(); - ZkStateReaderAccessor accessor = new ZkStateReaderAccessor(replicaZkController.getZkStateReader()); - - final long preDeleteWatcherCount = countUnloadCoreOnDeletedWatchers - (accessor.getStateWatchers(collectionName)); - - ZkNodeProps m = new ZkNodeProps( - Overseer.QUEUE_OPERATION, OverseerAction.DELETECORE.toLower(), - ZkStateReader.CORE_NAME_PROP, replica.getCoreName(), - ZkStateReader.NODE_NAME_PROP, replica.getNodeName(), - ZkStateReader.BASE_URL_PROP, replicaZkController.getZkStateReader().getBaseUrlForNodeName(replica.getNodeName()), - ZkStateReader.COLLECTION_PROP, collectionName, - ZkStateReader.CORE_NODE_NAME_PROP, replica.getName()); + ZkStateReaderAccessor accessor = + new ZkStateReaderAccessor(replicaZkController.getZkStateReader()); + + final long preDeleteWatcherCount = + countUnloadCoreOnDeletedWatchers(accessor.getStateWatchers(collectionName)); + + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, OverseerAction.DELETECORE.toLower(), + ZkStateReader.CORE_NAME_PROP, replica.getCoreName(), + ZkStateReader.NODE_NAME_PROP, replica.getNodeName(), + ZkStateReader.BASE_URL_PROP, + replicaZkController.getZkStateReader().getBaseUrlForNodeName(replica.getNodeName()), + ZkStateReader.COLLECTION_PROP, collectionName, + ZkStateReader.CORE_NODE_NAME_PROP, replica.getName()); if (replicaZkController.getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - cluster.getOpenOverseer().getDistributedClusterStateUpdater().doSingleStateUpdate( - DistributedClusterStateUpdater.MutatingCommand.SliceRemoveReplica, m, - cluster.getOpenOverseer().getSolrCloudManager(), - cluster.getOpenOverseer().getZkStateReader()); + cluster + .getOpenOverseer() + .getDistributedClusterStateUpdater() + .doSingleStateUpdate( + DistributedClusterStateUpdater.MutatingCommand.SliceRemoveReplica, + m, + cluster.getOpenOverseer().getSolrCloudManager(), + cluster.getOpenOverseer().getZkStateReader()); } else { cluster.getOpenOverseer().getStateUpdateQueue().offer(Utils.toJSON(m)); } - waitForState("Timeout waiting for replica get deleted", collectionName, - (liveNodes, collectionState) -> collectionState.getSlice("shard1").getReplicas().size() == 2); + waitForState( + "Timeout waiting for replica get deleted", + collectionName, + (liveNodes, collectionState) -> + collectionState.getSlice("shard1").getReplicas().size() == 2); TimeOut timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeOut.waitFor("Waiting for replica get unloaded", () -> - replicaJetty.getCoreContainer().getCoreDescriptor(replica.getCoreName()) == null - ); - + timeOut.waitFor( + "Waiting for replica get unloaded", + () -> replicaJetty.getCoreContainer().getCoreDescriptor(replica.getCoreName()) == null); + // the core should no longer have a watch collection state since it was removed timeOut = new TimeOut(60, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeOut.waitFor("Waiting for core's watcher to be removed", () -> { - final long postDeleteWatcherCount = countUnloadCoreOnDeletedWatchers - (accessor.getStateWatchers(collectionName)); - log.info("preDeleteWatcherCount={} vs postDeleteWatcherCount={}", - preDeleteWatcherCount, postDeleteWatcherCount); - return (preDeleteWatcherCount - 1L == postDeleteWatcherCount); - }); - + timeOut.waitFor( + "Waiting for core's watcher to be removed", + () -> { + final long postDeleteWatcherCount = + countUnloadCoreOnDeletedWatchers(accessor.getStateWatchers(collectionName)); + log.info( + "preDeleteWatcherCount={} vs postDeleteWatcherCount={}", + preDeleteWatcherCount, + postDeleteWatcherCount); + return (preDeleteWatcherCount - 1L == postDeleteWatcherCount); + }); + CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); } @Test @Slow - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void raceConditionOnDeleteAndRegisterReplica() throws Exception { final String collectionName = "raceDeleteReplicaCollection"; CollectionAdminRequest.createCollection(collectionName, "conf", 1, 2) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 1, 2); - + waitForState("Expected 1x2 collections", collectionName, clusterShape(1, 2)); Slice shard1 = getCollectionState(collectionName).getSlice("shard1"); Replica leader = shard1.getLeader(); JettySolrRunner leaderJetty = getJettyForReplica(leader); - Replica replica1 = shard1.getReplicas(replica -> !replica.getName().equals(leader.getName())).get(0); + Replica replica1 = + shard1.getReplicas(replica -> !replica.getName().equals(leader.getName())).get(0); assertFalse(replica1.getName().equals(leader.getName())); JettySolrRunner replica1Jetty = getJettyForReplica(replica1); @@ -301,77 +346,103 @@ public void raceConditionOnDeleteAndRegisterReplica() throws Exception { Semaphore waitingForReplicaGetDeleted = new Semaphore(0); // for safety, we only want this hook get triggered one time AtomicInteger times = new AtomicInteger(0); - ZkContainer.testing_beforeRegisterInZk = cd -> { - if (cd.getCloudDescriptor() == null) return false; - if (replica1.getName().equals(cd.getCloudDescriptor().getCoreNodeName()) - && collectionName.equals(cd.getCloudDescriptor().getCollectionName())) { - if (times.incrementAndGet() > 1) { - return false; - } - log.info("Running delete core {}",cd); - - try { - ZkController replica1ZkController = replica1Jetty.getCoreContainer().getZkController(); - ZkNodeProps m = new ZkNodeProps( - Overseer.QUEUE_OPERATION, OverseerAction.DELETECORE.toLower(), - ZkStateReader.CORE_NAME_PROP, replica1.getCoreName(), - ZkStateReader.NODE_NAME_PROP, replica1.getNodeName(), - ZkStateReader.BASE_URL_PROP, replica1ZkController.getZkStateReader().getBaseUrlForNodeName(replica1.getNodeName()), - ZkStateReader.COLLECTION_PROP, collectionName, - ZkStateReader.CORE_NODE_NAME_PROP, replica1.getName()); - - if (replica1ZkController.getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - cluster.getOpenOverseer().getDistributedClusterStateUpdater().doSingleStateUpdate( - DistributedClusterStateUpdater.MutatingCommand.SliceRemoveReplica, m, - cluster.getOpenOverseer().getSolrCloudManager(), - cluster.getOpenOverseer().getZkStateReader()); - } else { - cluster.getOpenOverseer().getStateUpdateQueue().offer(Utils.toJSON(m)); - } + ZkContainer.testing_beforeRegisterInZk = + cd -> { + if (cd.getCloudDescriptor() == null) return false; + if (replica1.getName().equals(cd.getCloudDescriptor().getCoreNodeName()) + && collectionName.equals(cd.getCloudDescriptor().getCollectionName())) { + if (times.incrementAndGet() > 1) { + return false; + } + log.info("Running delete core {}", cd); - boolean replicaDeleted = false; - TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while (!timeOut.hasTimedOut()) { try { - ZkStateReader stateReader = replica1Jetty.getCoreContainer().getZkController().getZkStateReader(); - stateReader.forceUpdateCollection(collectionName); - Slice shard = stateReader.getClusterState().getCollection(collectionName).getSlice("shard1"); - if (shard.getReplicas().size() == 1) { - replicaDeleted = true; - waitingForReplicaGetDeleted.release(); - break; + ZkController replica1ZkController = + replica1Jetty.getCoreContainer().getZkController(); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, OverseerAction.DELETECORE.toLower(), + ZkStateReader.CORE_NAME_PROP, replica1.getCoreName(), + ZkStateReader.NODE_NAME_PROP, replica1.getNodeName(), + ZkStateReader.BASE_URL_PROP, + replica1ZkController + .getZkStateReader() + .getBaseUrlForNodeName(replica1.getNodeName()), + ZkStateReader.COLLECTION_PROP, collectionName, + ZkStateReader.CORE_NODE_NAME_PROP, replica1.getName()); + + if (replica1ZkController + .getDistributedClusterStateUpdater() + .isDistributedStateUpdate()) { + cluster + .getOpenOverseer() + .getDistributedClusterStateUpdater() + .doSingleStateUpdate( + DistributedClusterStateUpdater.MutatingCommand.SliceRemoveReplica, + m, + cluster.getOpenOverseer().getSolrCloudManager(), + cluster.getOpenOverseer().getZkStateReader()); + } else { + cluster.getOpenOverseer().getStateUpdateQueue().offer(Utils.toJSON(m)); + } + + boolean replicaDeleted = false; + TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSource.NANO_TIME); + while (!timeOut.hasTimedOut()) { + try { + ZkStateReader stateReader = + replica1Jetty.getCoreContainer().getZkController().getZkStateReader(); + stateReader.forceUpdateCollection(collectionName); + Slice shard = + stateReader + .getClusterState() + .getCollection(collectionName) + .getSlice("shard1"); + if (shard.getReplicas().size() == 1) { + replicaDeleted = true; + waitingForReplicaGetDeleted.release(); + break; + } + Thread.sleep(500); + } catch (NullPointerException | SolrException e) { + e.printStackTrace(); + Thread.sleep(500); + } + } + if (!replicaDeleted) { + fail("Timeout for waiting replica get deleted"); } - Thread.sleep(500); - } catch (NullPointerException | SolrException e) { + } catch (Exception e) { e.printStackTrace(); - Thread.sleep(500); + fail("Failed to delete replica"); + } finally { + // avoiding deadlock + waitingForReplicaGetDeleted.release(); } + return true; } - if (!replicaDeleted) { - fail("Timeout for waiting replica get deleted"); - } - } catch (Exception e) { - e.printStackTrace(); - fail("Failed to delete replica"); - } finally { - //avoiding deadlock - waitingForReplicaGetDeleted.release(); - } - return true; - } - return false; - }; + return false; + }; try { replica1Jetty.stop(); waitForNodeLeave(replica1JettyNodeName); - // There is a race condition: the replica might be marked down before we get here, in which case we never get notified - // So we check before waiting... Not eliminating but significantly reducing the race window - eliminating would require - // deeper changes in the code where the watcher is set. - if (getCollectionState(collectionName).getSlice("shard1").getReplica(replica1.getName()).getState() != DOWN) { - waitForState("Expected replica:" + replica1 + " get down", collectionName, (liveNodes, collectionState) - -> collectionState.getSlice("shard1").getReplica(replica1.getName()).getState() == DOWN); + // There is a race condition: the replica might be marked down before we get here, in which + // case we never get notified. So we check before waiting... Not eliminating but significantly + // reducing the race window - eliminating would require deeper changes in the code where the + // watcher is set. + if (getCollectionState(collectionName) + .getSlice("shard1") + .getReplica(replica1.getName()) + .getState() + != DOWN) { + waitForState( + "Expected replica:" + replica1 + " get down", + collectionName, + (liveNodes, collectionState) -> + collectionState.getSlice("shard1").getReplica(replica1.getName()).getState() + == DOWN); } replica1Jetty.start(); waitingForReplicaGetDeleted.acquire(); @@ -380,16 +451,18 @@ public void raceConditionOnDeleteAndRegisterReplica() throws Exception { } TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeOut.waitFor("Timeout adding replica to shard", () -> { - try { - CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") - .process(cluster.getSolrClient()); - return true; - } catch (Exception e) { - // expected, when the node is not fully started - return false; - } - }); + timeOut.waitFor( + "Timeout adding replica to shard", + () -> { + try { + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") + .process(cluster.getSolrClient()); + return true; + } catch (Exception e) { + // expected, when the node is not fully started + return false; + } + }); waitForState("Expected 1x2 collections", collectionName, clusterShape(1, 2)); shard1 = getCollectionState(collectionName).getSlice("shard1"); @@ -399,11 +472,16 @@ public void raceConditionOnDeleteAndRegisterReplica() throws Exception { leaderJetty.stop(); waitForNodeLeave(leaderJettyNodeName); - waitForState("Expected new active leader", collectionName, (liveNodes, collectionState) -> { - Slice shard = collectionState.getSlice("shard1"); - Replica newLeader = shard.getLeader(); - return newLeader != null && newLeader.getState() == Replica.State.ACTIVE && !newLeader.getName().equals(latestLeader.getName()); - }); + waitForState( + "Expected new active leader", + collectionName, + (liveNodes, collectionState) -> { + Slice shard = collectionState.getSlice("shard1"); + Replica newLeader = shard.getLeader(); + return newLeader != null + && newLeader.getState() == Replica.State.ACTIVE + && !newLeader.getName().equals(latestLeader.getName()); + }); leaderJetty.start(); cluster.waitForActiveCollection(collectionName, 1, 2); @@ -416,10 +494,9 @@ private JettySolrRunner getJettyForReplica(Replica replica) { String nodeName = jetty.getNodeName(); if (nodeName != null && nodeName.equals(replica.getNodeName())) return jetty; } - throw new IllegalArgumentException("Can not find jetty for replica "+ replica); + throw new IllegalArgumentException("Can not find jetty for replica " + replica); } - private void waitForNodeLeave(String lostNodeName) throws InterruptedException { ZkStateReader reader = cluster.getSolrClient().getZkStateReader(); TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSource.NANO_TIME); @@ -439,29 +516,41 @@ public void deleteReplicaOnIndexing() throws Exception { Thread[] threads = new Thread[100]; for (int i = 0; i < threads.length; i++) { int finalI = i; - threads[i] = new Thread(() -> { - int doc = finalI * 10000; - while (!closed.get()) { - try { - cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", String.valueOf(doc++))); - } catch (Exception e) { - log.error("Failed on adding document to {}", collectionName, e); - } - } - }); + threads[i] = + new Thread( + () -> { + int doc = finalI * 10000; + while (!closed.get()) { + try { + cluster + .getSolrClient() + .add(collectionName, new SolrInputDocument("id", String.valueOf(doc++))); + } catch (Exception e) { + log.error("Failed on adding document to {}", collectionName, e); + } + } + }); threads[i].start(); } Slice shard1 = getCollectionState(collectionName).getSlice("shard1"); - Replica nonLeader = shard1.getReplicas(rep -> !rep.getName().equals(shard1.getLeader().getName())).get(0); - CollectionAdminRequest.deleteReplica(collectionName, "shard1", nonLeader.getName()).process(cluster.getSolrClient()); + Replica nonLeader = + shard1.getReplicas(rep -> !rep.getName().equals(shard1.getLeader().getName())).get(0); + CollectionAdminRequest.deleteReplica(collectionName, "shard1", nonLeader.getName()) + .process(cluster.getSolrClient()); closed.set(true); for (int i = 0; i < threads.length; i++) { threads[i].join(); } try { - cluster.getSolrClient().waitForState(collectionName, 20, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState.getReplicas().size() == 1); + cluster + .getSolrClient() + .waitForState( + collectionName, + 20, + TimeUnit.SECONDS, + (liveNodes, collectionState) -> collectionState.getReplicas().size() == 1); } catch (TimeoutException e) { if (log.isInfoEnabled()) { log.info("Timeout wait for state {}", getCollectionState(collectionName)); @@ -470,18 +559,20 @@ public void deleteReplicaOnIndexing() throws Exception { } } - /** + /** * Helper method for counting the number of instances of UnloadCoreOnDeletedWatcher * that exist on a given node. * - * This is useful for verifying that deleting a replica correctly removed it's watchers. + *

This is useful for verifying that deleting a replica correctly removed it's watchers. * - * (Note: tests should not assert specific values, since multiple replicas may exist on the same - * node. Instead tests should only assert that the number of watchers has decreased by 1 per known - * replica removed) + *

(Note: tests should not assert specific values, since multiple replicas may exist on the + * same node. Instead tests should only assert that the number of watchers has decreased by 1 per + * known replica removed) */ - private static final long countUnloadCoreOnDeletedWatchers(final Set watchers) { - return watchers.stream().filter(w -> w instanceof ZkController.UnloadCoreOnDeletedWatcher).count(); + private static final long countUnloadCoreOnDeletedWatchers( + final Set watchers) { + return watchers.stream() + .filter(w -> w instanceof ZkController.UnloadCoreOnDeletedWatcher) + .count(); } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java index 5c3aa01bcad..9a7dd581d37 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.cloud.DistributedQueue; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -44,11 +43,9 @@ public class DeleteShardTest extends SolrCloudTestCase { @Before public void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); } - + @After public void teardownCluster() throws Exception { shutdownCluster(); @@ -68,25 +65,32 @@ public void test() throws Exception { assertEquals(State.ACTIVE, state.getSlice("shard2").getState()); // Can't delete an ACTIVE shard - expectThrows(Exception.class, () -> { - CollectionAdminRequest.deleteShard(collection, "shard1").process(cluster.getSolrClient()); - }); + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.deleteShard(collection, "shard1").process(cluster.getSolrClient()); + }); setSliceState(collection, "shard1", Slice.State.INACTIVE); // Can delete an INATIVE shard CollectionAdminRequest.deleteShard(collection, "shard1").process(cluster.getSolrClient()); - waitForState("Expected 'shard1' to be removed", collection, (n, c) -> { - return c.getSlice("shard1") == null; - }); + waitForState( + "Expected 'shard1' to be removed", + collection, + (n, c) -> { + return c.getSlice("shard1") == null; + }); // Can delete a shard under construction setSliceState(collection, "shard2", Slice.State.CONSTRUCTION); CollectionAdminRequest.deleteShard(collection, "shard2").process(cluster.getSolrClient()); - waitForState("Expected 'shard2' to be removed", collection, (n, c) -> { - return c.getSlice("shard2") == null; - }); - + waitForState( + "Expected 'shard2' to be removed", + collection, + (n, c) -> { + return c.getSlice("shard2") == null; + }); } protected void setSliceState(String collection, String slice, State state) throws Exception { @@ -102,49 +106,68 @@ protected void setSliceState(String collection, String slice, State state) throw final Overseer overseer = cluster.getOpenOverseer(); if (overseer.getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - overseer.getDistributedClusterStateUpdater().doSingleStateUpdate( - DistributedClusterStateUpdater.MutatingCommand.SliceUpdateShardState, m, - cluster.getOpenOverseer().getSolrCloudManager(), - cluster.getOpenOverseer().getZkStateReader()); + overseer + .getDistributedClusterStateUpdater() + .doSingleStateUpdate( + DistributedClusterStateUpdater.MutatingCommand.SliceUpdateShardState, + m, + cluster.getOpenOverseer().getSolrCloudManager(), + cluster.getOpenOverseer().getZkStateReader()); } else { - DistributedQueue inQueue = cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getOverseer().getStateUpdateQueue(); + DistributedQueue inQueue = + cluster + .getJettySolrRunner(0) + .getCoreContainer() + .getZkController() + .getOverseer() + .getStateUpdateQueue(); inQueue.offer(Utils.toJSON(m)); } - waitForState("Expected shard " + slice + " to be in state " + state.toString(), collection, (n, c) -> { - return c.getSlice(slice).getState() == state; - }); - + waitForState( + "Expected shard " + slice + " to be in state " + state.toString(), + collection, + (n, c) -> { + return c.getSlice(slice).getState() == state; + }); } @Test - // commented 4-Sep-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 09-Aug-2018 - public void testDirectoryCleanupAfterDeleteShard() throws InterruptedException, IOException, SolrServerException { + // commented 4-Sep-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // + // added 09-Aug-2018 + public void testDirectoryCleanupAfterDeleteShard() + throws InterruptedException, IOException, SolrServerException { final String collection = "deleteshard_test"; CollectionAdminRequest.createCollectionWithImplicitRouter(collection, "conf", "a,b,c", 1) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collection, 3, 3); // Get replica details Replica leader = getCollectionState(collection).getLeader("a"); CoreStatus coreStatus = getCoreStatus(leader); - assertTrue("Instance directory doesn't exist", FileUtils.fileExists(coreStatus.getInstanceDirectory())); + assertTrue( + "Instance directory doesn't exist", + FileUtils.fileExists(coreStatus.getInstanceDirectory())); assertTrue("Data directory doesn't exist", FileUtils.fileExists(coreStatus.getDataDirectory())); assertEquals(3, getCollectionState(collection).getActiveSlices().size()); // Delete shard 'a' CollectionAdminRequest.deleteShard(collection, "a").process(cluster.getSolrClient()); - - waitForState("Expected 'a' to be removed", collection, (n, c) -> { - return c.getSlice("a") == null; - }); + + waitForState( + "Expected 'a' to be removed", + collection, + (n, c) -> { + return c.getSlice("a") == null; + }); assertEquals(2, getCollectionState(collection).getActiveSlices().size()); - assertFalse("Instance directory still exists", FileUtils.fileExists(coreStatus.getInstanceDirectory())); + assertFalse( + "Instance directory still exists", FileUtils.fileExists(coreStatus.getInstanceDirectory())); assertFalse("Data directory still exists", FileUtils.fileExists(coreStatus.getDataDirectory())); leader = getCollectionState(collection).getLeader("b"); @@ -156,12 +179,16 @@ public void testDirectoryCleanupAfterDeleteShard() throws InterruptedException, .setDeleteInstanceDir(false) .process(cluster.getSolrClient()); - waitForState("Expected 'b' to be removed", collection, (n, c) -> { - return c.getSlice("b") == null; - }); - + waitForState( + "Expected 'b' to be removed", + collection, + (n, c) -> { + return c.getSlice("b") == null; + }); + assertEquals(1, getCollectionState(collection).getActiveSlices().size()); - assertTrue("Instance directory still exists", FileUtils.fileExists(coreStatus.getInstanceDirectory())); + assertTrue( + "Instance directory still exists", FileUtils.fileExists(coreStatus.getInstanceDirectory())); assertTrue("Data directory still exists", FileUtils.fileExists(coreStatus.getDataDirectory())); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java index bd14ef73a76..cfc5e794f20 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DeleteStatusTest.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -35,7 +34,8 @@ public class DeleteStatusTest extends SolrCloudTestCase { @BeforeClass public static void createCluster() throws Exception { configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } @@ -46,12 +46,12 @@ private static RequestStatusState waitForRequestState(String id, SolrClient clie long endTime = System.nanoTime() + TimeUnit.SECONDS.toNanos(MAX_WAIT_TIMEOUT); while (System.nanoTime() < endTime) { state = CollectionAdminRequest.requestStatus(id).process(client).getRequestStatus(); - if (state == RequestStatusState.COMPLETED) - break; + if (state == RequestStatusState.COMPLETED) break; assumeTrue("Error creating collection - skipping test", state != RequestStatusState.FAILED); TimeUnit.SECONDS.sleep(1); } - assumeTrue("Timed out creating collection - skipping test", state == RequestStatusState.COMPLETED); + assumeTrue( + "Timed out creating collection - skipping test", state == RequestStatusState.COMPLETED); return state; } @@ -61,19 +61,23 @@ public void testAsyncIdsMayBeDeleted() throws Exception { final CloudSolrClient client = cluster.getSolrClient(); final String collection = "deletestatus"; - final String asyncId = CollectionAdminRequest.createCollection(collection, "conf1", 1, 1).processAsync(client); + final String asyncId = + CollectionAdminRequest.createCollection(collection, "conf1", 1, 1).processAsync(client); waitForRequestState(asyncId, client, MAX_WAIT_TIMEOUT); - assertEquals(RequestStatusState.COMPLETED, + assertEquals( + RequestStatusState.COMPLETED, CollectionAdminRequest.requestStatus(asyncId).process(client).getRequestStatus()); CollectionAdminResponse rsp = CollectionAdminRequest.deleteAsyncId(asyncId).process(client); - assertEquals("successfully removed stored response for [" + asyncId + "]", rsp.getResponse().get("status")); + assertEquals( + "successfully removed stored response for [" + asyncId + "]", + rsp.getResponse().get("status")); - assertEquals(RequestStatusState.NOT_FOUND, + assertEquals( + RequestStatusState.NOT_FOUND, CollectionAdminRequest.requestStatus(asyncId).process(client).getRequestStatus()); - } @Test @@ -83,22 +87,23 @@ public void testDeletingNonExistentRequests() throws Exception { CollectionAdminResponse rsp = CollectionAdminRequest.deleteAsyncId("foo").process(client); assertEquals("[foo] not found in stored responses", rsp.getResponse().get("status")); - } @Test - public void testProcessAndWaitDeletesAsyncIds() throws IOException, SolrServerException, InterruptedException { + public void testProcessAndWaitDeletesAsyncIds() + throws IOException, SolrServerException, InterruptedException { final CloudSolrClient client = cluster.getSolrClient(); - RequestStatusState state = CollectionAdminRequest.createCollection("requeststatus", "conf1", 1, 1) - .processAndWait("request1", client, MAX_WAIT_TIMEOUT); + RequestStatusState state = + CollectionAdminRequest.createCollection("requeststatus", "conf1", 1, 1) + .processAndWait("request1", client, MAX_WAIT_TIMEOUT); assertSame(RequestStatusState.COMPLETED, state); // using processAndWait deletes the requestid state = CollectionAdminRequest.requestStatus("request1").process(client).getRequestStatus(); - assertSame("Request id was not deleted by processAndWait call", RequestStatusState.NOT_FOUND, state); - + assertSame( + "Request id was not deleted by processAndWait call", RequestStatusState.NOT_FOUND, state); } @Test @@ -106,18 +111,21 @@ public void testDeleteStatusFlush() throws Exception { final CloudSolrClient client = cluster.getSolrClient(); - String id1 = CollectionAdminRequest.createCollection("flush1", "conf1", 1, 1).processAsync(client); - String id2 = CollectionAdminRequest.createCollection("flush2", "conf1", 1, 1).processAsync(client); + String id1 = + CollectionAdminRequest.createCollection("flush1", "conf1", 1, 1).processAsync(client); + String id2 = + CollectionAdminRequest.createCollection("flush2", "conf1", 1, 1).processAsync(client); assertEquals(RequestStatusState.COMPLETED, waitForRequestState(id1, client, MAX_WAIT_TIMEOUT)); assertEquals(RequestStatusState.COMPLETED, waitForRequestState(id2, client, MAX_WAIT_TIMEOUT)); CollectionAdminRequest.deleteAllAsyncIds().process(client); - assertEquals(RequestStatusState.NOT_FOUND, + assertEquals( + RequestStatusState.NOT_FOUND, CollectionAdminRequest.requestStatus(id1).process(client).getRequestStatus()); - assertEquals(RequestStatusState.NOT_FOUND, + assertEquals( + RequestStatusState.NOT_FOUND, CollectionAdminRequest.requestStatus(id2).process(client).getRequestStatus()); - } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java index fb09152ef81..47ec9c34459 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java @@ -16,11 +16,24 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; +import static org.apache.solr.common.params.SolrParams.wrapDefaults; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import org.apache.commons.lang3.StringUtils; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.SentinelIntSet; import org.apache.lucene.util.TestUtil; -import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.CursorPagingTest; +import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.LukeRequest; import org.apache.solr.client.solrj.response.QueryResponse; @@ -35,41 +48,28 @@ import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.search.CursorMark; - -import org.apache.commons.lang3.StringUtils; - -import static org.apache.solr.common.params.SolrParams.wrapDefaults; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; - import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.function.Consumer; - /** - * Distributed tests of deep paging using {@link CursorMark} and {@link CursorMarkParams#CURSOR_MARK_PARAM}. - * - * NOTE: this class Reuses some utilities from {@link CursorPagingTest} that assume the same schema and configs. + * Distributed tests of deep paging using {@link CursorMark} and {@link + * CursorMarkParams#CURSOR_MARK_PARAM}. + * + *

NOTE: this class Reuses some utilities from {@link CursorPagingTest} that assume the same + * schema and configs. * - * @see CursorPagingTest + * @see CursorPagingTest */ @Slow -@SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-9182 - causes OOM") +@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-9182 - causes OOM") public class DistribCursorPagingTest extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public DistribCursorPagingTest() { - System.setProperty("solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); + System.setProperty( + "solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); configString = CursorPagingTest.TEST_SOLRCONFIG_NAME; schemaString = CursorPagingTest.TEST_SCHEMAXML_NAME; } @@ -79,16 +79,16 @@ protected String getCloudSolrConfig() { return configString; } - /** - * A really obnoxious hack needed to get our elevate.xml into zk ... - * But simpler for now then re-writing the whole test case using SolrCloudTestCase. + /** + * A really obnoxious hack needed to get our elevate.xml into zk ... But simpler for now then + * re-writing the whole test case using SolrCloudTestCase. */ @Override public void distribSetUp() throws Exception { super.distribSetUp(); ZkTestServer.putConfig("conf1", zkServer.getZkClient(), ZkTestServer.SOLRHOME, "elevate.xml"); } - + @Test public void test() throws Exception { boolean testFinished = false; @@ -127,32 +127,39 @@ private void doBadInputTest() throws Exception { commit(); // empty, blank, or bogus cursor - for (String c : new String[] { "", " ", "all the docs please!"}) { - assertFail(params("q", "*:*", - "sort", "id desc", - CURSOR_MARK_PARAM, c), - ErrorCode.BAD_REQUEST, "Unable to parse"); + for (String c : new String[] {"", " ", "all the docs please!"}) { + assertFail( + params("q", "*:*", "sort", "id desc", CURSOR_MARK_PARAM, c), + ErrorCode.BAD_REQUEST, + "Unable to parse"); } // no id in sort - assertFail(params("q", "*:*", - "sort", "score desc", - CURSOR_MARK_PARAM, CURSOR_MARK_START), - ErrorCode.BAD_REQUEST, "uniqueKey field"); + assertFail( + params("q", "*:*", "sort", "score desc", CURSOR_MARK_PARAM, CURSOR_MARK_START), + ErrorCode.BAD_REQUEST, + "uniqueKey field"); // _docid_ - assertFail(params("q", "*:*", - "sort", "_docid_ asc, id desc", - CURSOR_MARK_PARAM, CURSOR_MARK_START), - ErrorCode.BAD_REQUEST, "_docid_"); + assertFail( + params("q", "*:*", "sort", "_docid_ asc, id desc", CURSOR_MARK_PARAM, CURSOR_MARK_START), + ErrorCode.BAD_REQUEST, + "_docid_"); // using cursor w/ grouping - assertFail(params("q", "*:*", - "sort", "id desc", - GroupParams.GROUP, "true", - GroupParams.GROUP_FIELD, "str", - CURSOR_MARK_PARAM, CURSOR_MARK_START), - ErrorCode.BAD_REQUEST, "Grouping"); - + assertFail( + params( + "q", + "*:*", + "sort", + "id desc", + GroupParams.GROUP, + "true", + GroupParams.GROUP_FIELD, + "str", + CURSOR_MARK_PARAM, + CURSOR_MARK_START), + ErrorCode.BAD_REQUEST, + "Grouping"); } private void doSimpleTest() throws Exception { @@ -165,17 +172,19 @@ private void doSimpleTest() throws Exception { // trivial base case: ensure cursorMark against an empty index doesn't blow up cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","4", - "fl", "id", - "sort", "id desc"); + params = + params( + "q", "*:*", + "rows", "4", + "fl", "id", + "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(0, rsp); assertStartsAt(0, rsp); assertDocList(rsp); assertEquals(cursorMark, assertHashNextCursorMark(rsp)); - // don't add in order of either field to ensure we aren't inadvertantly + // don't add in order of either field to ensure we aren't inadvertantly // counting on internal docid ordering indexDoc(sdoc("id", "9", "str", "c", "float", "-3.2", "int", "42")); indexDoc(sdoc("id", "7", "str", "c", "float", "-3.2", "int", "-1976")); @@ -191,10 +200,12 @@ private void doSimpleTest() throws Exception { // base case: ensure cursorMark that matches no docs doesn't blow up cursorMark = CURSOR_MARK_START; - params = params("q", "id:9999999", - "rows","4", - "fl", "id", - "sort", "id desc"); + params = + params( + "q", "id:9999999", + "rows", "4", + "fl", "id", + "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(0, rsp); assertStartsAt(0, rsp); @@ -203,10 +214,12 @@ private void doSimpleTest() throws Exception { // edge case: ensure rows=0 doesn't blow up and gives back same cursor for next cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","0", - "fl", "id", - "sort", "id desc"); + params = + params( + "q", "*:*", + "rows", "0", + "fl", "id", + "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAt(0, rsp); @@ -215,10 +228,12 @@ private void doSimpleTest() throws Exception { // simple id sort cursorMark = CURSOR_MARK_START; - params = params("q", "-int:6", - "rows","4", - "fl", "id", - "sort", "id desc"); + params = + params( + "q", "-int:6", + "rows", "4", + "fl", "id", + "sort", "id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(9, rsp); assertStartsAt(0, rsp); @@ -232,7 +247,7 @@ private void doSimpleTest() throws Exception { cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); - assertNumFound(9, rsp); + assertNumFound(9, rsp); assertStartsAt(0, rsp); assertDocList(rsp, 0); cursorMark = assertHashNextCursorMark(rsp); @@ -241,28 +256,30 @@ private void doSimpleTest() throws Exception { assertNumFound(9, rsp); assertStartsAt(0, rsp); assertDocList(rsp); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); - + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); + // NOTE: because field stats and queryNorms can vary amongst shards, // not all "obvious" score based sorts can be iterated cleanly. - // queries that seem like they should result in an obvious "tie" score + // queries that seem like they should result in an obvious "tie" score // between two documents (and would tie in a single node case) may actually // get diff scores for diff docs if they are on diff shards // - // so here, in this test, we can't assert a hardcoded score ordering -- we trust + // so here, in this test, we can't assert a hardcoded score ordering -- we trust // the full walk testing (below) // int sort with dups, id tie breaker ... and some faceting cursorMark = CURSOR_MARK_START; - params = params("q", "-int:2001 -int:4055", - "rows","3", - "fl", "id", - "facet", "true", - "facet.field", "str", - "facet.mincount", "1", - "json.nl", "map", - "sort", intsort + " asc, id asc"); + params = + params( + "q", "-int:2001 -int:4055", + "rows", "3", + "fl", "id", + "facet", "true", + "facet.field", "str", + "facet.mincount", "1", + "json.nl", "map", + "sort", intsort + " asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAt(0, rsp); @@ -294,16 +311,18 @@ private void doSimpleTest() throws Exception { assertDocList(rsp); assertEquals("a", rsp.getFacetField("str").getValues().get(0).getName()); assertEquals(4, rsp.getFacetField("str").getValues().get(0).getCount()); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); - + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); + // int missing first sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; - params = params("q", "-int:2001 -int:4055", - "rows","3", - "fl", "id", - "json.nl", "map", - "sort", intmissingsort + "_first asc, id asc"); + params = + params( + "q", "-int:2001 -int:4055", + "rows", "3", + "fl", "id", + "json.nl", "map", + "sort", intmissingsort + "_first asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAt(0, rsp); @@ -326,16 +345,18 @@ private void doSimpleTest() throws Exception { assertNumFound(8, rsp); assertStartsAt(0, rsp); assertDocList(rsp); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // int missing last sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; - params = params("q", "-int:2001 -int:4055", - "rows","3", - "fl", "id", - "json.nl", "map", - "sort", intmissingsort + "_last asc, id asc"); + params = + params( + "q", "-int:2001 -int:4055", + "rows", "3", + "fl", "id", + "json.nl", "map", + "sort", intmissingsort + "_last asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); assertStartsAt(0, rsp); @@ -358,15 +379,17 @@ private void doSimpleTest() throws Exception { assertNumFound(8, rsp); assertStartsAt(0, rsp); assertDocList(rsp); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // string sort with dups, id tie breaker cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","6", - "fl", "id", - "sort", "str asc, id desc"); + params = + params( + "q", "*:*", + "rows", "6", + "fl", "id", + "sort", "str asc, id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAt(0, rsp); @@ -383,18 +406,20 @@ private void doSimpleTest() throws Exception { assertNumFound(10, rsp); assertStartsAt(0, rsp); assertDocList(rsp); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // tri-level sort with more dups of primary then fit on a page. // also a function based sort using a simple function(s) on same field // (order should be the same in all cases) - for (String primarysort : new String[] { "float", "field('float')", "sum(float,42)" }) { + for (String primarysort : new String[] {"float", "field('float')", "sum(float,42)"}) { cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","2", - "fl", "id", - "sort", primarysort + " asc, "+intsort+" desc, id desc"); + params = + params( + "q", "*:*", + "rows", "2", + "fl", "id", + "sort", primarysort + " asc, " + intsort + " desc, id desc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAt(0, rsp); @@ -402,19 +427,19 @@ private void doSimpleTest() throws Exception { cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); - assertNumFound(10, rsp); + assertNumFound(10, rsp); assertStartsAt(0, rsp); assertDocList(rsp, 7, 4); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); - assertNumFound(10, rsp); + assertNumFound(10, rsp); assertStartsAt(0, rsp); assertDocList(rsp, 3, 8); cursorMark = assertHashNextCursorMark(rsp); // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); - assertNumFound(10, rsp); + assertNumFound(10, rsp); assertStartsAt(0, rsp); assertDocList(rsp, 5, 6); cursorMark = assertHashNextCursorMark(rsp); @@ -427,19 +452,21 @@ private void doSimpleTest() throws Exception { // we've exactly exhausted all the results, but solr had no way of know that // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); - assertNumFound(10, rsp); + assertNumFound(10, rsp); assertStartsAt(0, rsp); assertDocList(rsp); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); } - + // trivial base case: rows bigger then number of matches cursorMark = CURSOR_MARK_START; - params = params("q", "id:3 id:7", - "rows","111", - "fl", "id", - "sort", intsort + " asc, id asc"); + params = + params( + "q", "id:3 id:7", + "rows", "111", + "fl", "id", + "sort", intsort + " asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(2, rsp); assertStartsAt(0, rsp); @@ -450,30 +477,46 @@ private void doSimpleTest() throws Exception { assertNumFound(2, rsp); assertStartsAt(0, rsp); assertDocList(rsp); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); // sanity check our full walk method SentinelIntSet ids; - ids = assertFullWalkNoDups(10, params("q", "*:*", - "rows", "4", - "sort", "id desc")); + ids = + assertFullWalkNoDups( + 10, + params( + "q", "*:*", + "rows", "4", + "sort", "id desc")); assertEquals(10, ids.size()); - ids = assertFullWalkNoDups(9, params("q", "*:*", - "rows", "1", - "fq", "-id:4", - "sort", "id asc")); + ids = + assertFullWalkNoDups( + 9, + params( + "q", "*:*", + "rows", "1", + "fq", "-id:4", + "sort", "id asc")); assertEquals(9, ids.size()); assertFalse("matched on id:4 unexpectedly", ids.exists(4)); - ids = assertFullWalkNoDups(9, params("q", "*:*", - "rows", "3", - "fq", "-id:6", - "sort", "float desc, id asc, int asc")); + ids = + assertFullWalkNoDups( + 9, + params( + "q", "*:*", + "rows", "3", + "fq", "-id:6", + "sort", "float desc, id asc, int asc")); assertEquals(9, ids.size()); assertFalse("matched on id:6 unexpectedly", ids.exists(6)); - ids = assertFullWalkNoDups(9, params("q", "float:[0 TO *] int:7 id:6", - "rows", "3", - "sort", "score desc, id desc")); + ids = + assertFullWalkNoDups( + 9, + params( + "q", "float:[0 TO *] int:7 id:6", + "rows", "3", + "sort", "score desc, id desc")); assertEquals(7, ids.size()); assertFalse("matched on id:9 unexpectedly", ids.exists(9)); assertFalse("matched on id:7 unexpectedly", ids.exists(7)); @@ -481,10 +524,12 @@ private void doSimpleTest() throws Exception { // strategically delete/add some docs in the middle of walking the cursor cursorMark = CURSOR_MARK_START; - params = params("q", "*:*", - "rows","2", - "fl", "id", - "sort", "str asc, id asc"); + params = + params( + "q", "*:*", + "rows", "2", + "fl", "id", + "sort", "str asc, id asc"); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(10, rsp); assertStartsAt(0, rsp); @@ -499,7 +544,7 @@ private void doSimpleTest() throws Exception { assertDocList(rsp, 4, 6); cursorMark = assertHashNextCursorMark(rsp); // delete the next guy we expect - del("id:0"); + del("id:0"); commit(); rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertNumFound(8, rsp); @@ -523,21 +568,18 @@ private void doSimpleTest() throws Exception { // rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark)); assertDocList(rsp); - assertEquals("no more docs, but cursorMark has changed", - cursorMark, assertHashNextCursorMark(rsp)); - - + assertEquals( + "no more docs, but cursorMark has changed", cursorMark, assertHashNextCursorMark(rsp)); } - /** randomized testing of a non-trivial number of docs using assertFullWalkNoDups - */ + /** randomized testing of a non-trivial number of docs using assertFullWalkNoDups */ public void doRandomSortsOnLargeIndex() throws Exception { final Collection allFieldNames = getAllSortFieldNames(); final int numInitialDocs = TestUtil.nextInt(random(), 100, 200); final int totalDocs = atLeast(500); - // start with a smallish number of documents, and test that we can do a full walk using a + // start with a smallish number of documents, and test that we can do a full walk using a // sort on *every* field in the schema... List initialDocs = new ArrayList<>(); @@ -552,30 +594,31 @@ public void doRandomSortsOnLargeIndex() throws Exception { for (String order : new String[] {" asc", " desc"}) { String sort = f + order + ("id".equals(f) ? "" : ", id" + order); String rows = "" + TestUtil.nextInt(random(), 13, 50); - final SolrParams main = params("q", "*:*", - "fl","id,"+f, - "rows",rows, - "sort",sort); + final SolrParams main = params("q", "*:*", "fl", "id," + f, "rows", rows, "sort", sort); final SentinelIntSet ids = assertFullWalkNoDups(numInitialDocs, main); assertEquals(numInitialDocs, ids.size()); - // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated docs are first... - final SentinelIntSet elevated = assertFullWalkNoDupsElevated(wrapDefaults(params("qt", "/elevate", - "fl","id,[elevated]", - "forceElevation","true", - "elevateIds", "50,20,80"), - main), - ids); + // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated + // docs are first... + final SentinelIntSet elevated = + assertFullWalkNoDupsElevated( + wrapDefaults( + params( + "qt", "/elevate", + "fl", "id,[elevated]", + "forceElevation", "true", + "elevateIds", "50,20,80"), + main), + ids); assertTrue(elevated.exists(50)); assertTrue(elevated.exists(20)); assertTrue(elevated.exists(80)); assertEquals(3, elevated.size()); - } } // now add a lot more docs, and test a handful of randomized multi-level sorts - for (int i = numInitialDocs+1; i <= totalDocs; i++) { + for (int i = numInitialDocs + 1; i <= totalDocs; i++) { SolrInputDocument doc = CursorPagingTest.buildRandomDocument(i); indexDoc(doc); } @@ -588,65 +631,77 @@ public void doRandomSortsOnLargeIndex() throws Exception { final String fl = random().nextBoolean() ? "id" : "id,score"; final boolean matchAll = random().nextBoolean(); final String q = matchAll ? "*:*" : CursorPagingTest.buildRandomQuery(); - final SolrParams main = params("q", q, - "fl",fl, - "rows",rows, - "sort",sort); - final SentinelIntSet ids = assertFullWalkNoDups(totalDocs, - params("q", q, - "fl",fl, - "rows",rows, - "sort",sort)); + final SolrParams main = + params( + "q", q, + "fl", fl, + "rows", rows, + "sort", sort); + final SentinelIntSet ids = + assertFullWalkNoDups( + totalDocs, + params( + "q", q, + "fl", fl, + "rows", rows, + "sort", sort)); if (matchAll) { assertEquals(totalDocs, ids.size()); } - - // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated docs are first... + + // same query, now with QEC ... verify we get all the same docs, but the (expected) elevated + // docs are first... // first we have to build a set of ids to elevate, from the set of ids known to match query... - final int[] expectedElevated = CursorPagingTest.pickElevations(TestUtil.nextInt(random(), 3, 33), ids); - final SentinelIntSet elevated = assertFullWalkNoDupsElevated - (wrapDefaults(params("qt", "/elevate", - "fl", fl + ",[elevated]", - // HACK: work around SOLR-15307... same results should match, just not same order - "sort", (sort.startsWith("score asc") ? "score desc, " + sort : sort), - "forceElevation","true", - "elevateIds", StringUtils.join(expectedElevated,',')), - main), - ids); + final int[] expectedElevated = + CursorPagingTest.pickElevations(TestUtil.nextInt(random(), 3, 33), ids); + final SentinelIntSet elevated = + assertFullWalkNoDupsElevated( + wrapDefaults( + params( + "qt", + "/elevate", + "fl", + fl + ",[elevated]", + // HACK: work around SOLR-15307... same results should match, just not same + // order + "sort", + (sort.startsWith("score asc") ? "score desc, " + sort : sort), + "forceElevation", + "true", + "elevateIds", + StringUtils.join(expectedElevated, ',')), + main), + ids); for (int expected : expectedElevated) { - assertTrue(expected + " wasn't elevated even though it should have been", - elevated.exists(expected)); + assertTrue( + expected + " wasn't elevated even though it should have been", + elevated.exists(expected)); } assertEquals(expectedElevated.length, elevated.size()); - } - } - + /** - * Asks the LukeRequestHandler on the control client for a list of the fields in the - * schema and then prunes that list down to just the fields that can be used for sorting, - * and returns them as an immutable list in a deterministically random order. + * Asks the LukeRequestHandler on the control client for a list of the fields in the schema and + * then prunes that list down to just the fields that can be used for sorting, and returns them as + * an immutable list in a deterministically random order. */ private List getAllSortFieldNames() throws SolrServerException, IOException { LukeRequest req = new LukeRequest("/admin/luke"); - req.setShowSchema(true); + req.setShowSchema(true); NamedList rsp = controlClient.request(req); @SuppressWarnings({"unchecked"}) - NamedList fields = (NamedList) ((NamedList)rsp.get("schema")).get("fields"); + NamedList fields = (NamedList) ((NamedList) rsp.get("schema")).get("fields"); ArrayList names = new ArrayList<>(fields.size()); - for (Map.Entry item : fields) { + for (Map.Entry item : fields) { names.add(item.getKey()); } - + return CursorPagingTest.pruneAndDeterministicallySort(names); } - /** - * execute a request, verify that we get an expected error - */ - public void assertFail(SolrParams p, ErrorCode expCode, String expSubstr) - throws Exception { + /** execute a request, verify that we get an expected error */ + public void assertFail(SolrParams p, ErrorCode expCode, String expSubstr) throws Exception { try { ignoreException(expSubstr); @@ -654,17 +709,18 @@ public void assertFail(SolrParams p, ErrorCode expCode, String expSubstr) fail("no exception matching expected: " + expCode.code + ": " + expSubstr); } catch (SolrException e) { assertEquals(expCode.code, e.code()); - assertTrue("Expected substr not found: " + expSubstr + " [elevated] and comes "after" any ids which were not [elevated] - * + * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as + * long as a non-0 number of docs ar returned. This method records the the set of all id's (must + * be positive ints) encountered and throws an assertion failure if any id is encountered more + * than once, or if an id is encountered which is not expected, or if an id is [elevated] + * and comes "after" any ids which were not [elevated] * * @returns set of all elevated ids encountered in the walk * @see #assertFullWalkNoDups(SolrParams,Consumer) */ - public SentinelIntSet assertFullWalkNoDupsElevated(final SolrParams params, final SentinelIntSet allExpected) - throws Exception { + public SentinelIntSet assertFullWalkNoDupsElevated( + final SolrParams params, final SentinelIntSet allExpected) throws Exception { final SentinelIntSet ids = new SentinelIntSet(allExpected.size(), -1); final SentinelIntSet idsElevated = new SentinelIntSet(32, -1); - assertFullWalkNoDups(params, (doc) -> { - final int id = Integer.parseInt(doc.get("id").toString()); - final boolean elevated = Boolean.parseBoolean(doc.getOrDefault("[elevated]","false").toString()); - assertTrue(id + " is not expected to match query", - allExpected.exists(id)); - - if (ids.exists(id)) { - String msg = "walk already seen: " + id; - try { + assertFullWalkNoDups( + params, + (doc) -> { + final int id = Integer.parseInt(doc.get("id").toString()); + final boolean elevated = + Boolean.parseBoolean(doc.getOrDefault("[elevated]", "false").toString()); + assertTrue(id + " is not expected to match query", allExpected.exists(id)); + + if (ids.exists(id)) { + String msg = "walk already seen: " + id; try { - queryAndCompareShards(params("distrib","false", - "q","id:"+id)); - } catch (AssertionError ae) { - throw new AssertionError(msg + ", found shard inconsistency that would explain it...", ae); + try { + queryAndCompareShards(params("distrib", "false", "q", "id:" + id)); + } catch (AssertionError ae) { + throw new AssertionError( + msg + ", found shard inconsistency that would explain it...", ae); + } + final QueryResponse rsp = cloudClient.query(params("q", "id:" + id)); + throw new AssertionError( + msg + ", don't know why; q=id:" + id + " gives: " + rsp.toString()); + } catch (Exception e) { + throw new AssertionError(msg + ", exception trying to fiture out why...", e); } - final QueryResponse rsp = cloudClient.query(params("q","id:"+id)); - throw new AssertionError(msg + ", don't know why; q=id:"+id+" gives: " + rsp.toString()); - } catch (Exception e) { - throw new AssertionError(msg + ", exception trying to fiture out why...", e); } - } - if (elevated) { - assertEquals("id is elevated, but we've already seen non elevated ids: " + id, - idsElevated.size(), ids.size()); - idsElevated.put(id); - } - ids.put(id); - }); - - assertEquals("total number of ids seen did not match expected", - allExpected.size(), ids.size()); - + if (elevated) { + assertEquals( + "id is elevated, but we've already seen non elevated ids: " + id, + idsElevated.size(), + ids.size()); + idsElevated.put(id); + } + ids.put(id); + }); + + assertEquals("total number of ids seen did not match expected", allExpected.size(), ids.size()); + return idsElevated; } - /** - *

- * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} - * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as long - * as a non-0 number of docs ar returned. This method records the the set of all id's - * (must be positive ints) encountered and throws an assertion failure if any id is - * encountered more then once, or if the set grows above maxSize - *

+ * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} + * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as + * long as a non-0 number of docs ar returned. This method records the the set of all id's (must + * be positive ints) encountered and throws an assertion failure if any id is encountered more + * then once, or if the set grows above maxSize * - *

- * Note that this method explicitly uses the "cloudClient" for executing the queries, - * instead of relying on the test infrastructure to execute the queries redundently - * against both the cloud client as well as a control client. This is because term stat - * differences in a sharded setup can result in different scores for documents compared - * to the control index -- which can affect the sorting in some cases and cause false - * negatives in the response comparisons (even if we don't include "score" in the "fl") - *

+ *

Note that this method explicitly uses the "cloudClient" for executing the queries, instead + * of relying on the test infrastructure to execute the queries redundently against both the cloud + * client as well as a control client. This is because term stat differences in a sharded setup + * can result in different scores for documents compared to the control index -- which can affect + * the sorting in some cases and cause false negatives in the response comparisons (even if we + * don't include "score" in the "fl") * * @returns set of all ids encountered in the walk * @see #assertFullWalkNoDups(SolrParams,Consumer) */ public SentinelIntSet assertFullWalkNoDups(int maxSize, SolrParams params) throws Exception { final SentinelIntSet ids = new SentinelIntSet(maxSize, -1); - assertFullWalkNoDups(params, (doc) -> { - int id = Integer.parseInt(doc.getFieldValue("id").toString()); - if (ids.exists(id)) { - String msg = "walk already seen: " + id; - try { + assertFullWalkNoDups( + params, + (doc) -> { + int id = Integer.parseInt(doc.getFieldValue("id").toString()); + if (ids.exists(id)) { + String msg = "walk already seen: " + id; try { - queryAndCompareShards(params("distrib","false", - "q","id:"+id)); - } catch (AssertionError ae) { - throw new AssertionError(msg + ", found shard inconsistency that would explain it...", ae); + try { + queryAndCompareShards(params("distrib", "false", "q", "id:" + id)); + } catch (AssertionError ae) { + throw new AssertionError( + msg + ", found shard inconsistency that would explain it...", ae); + } + final QueryResponse rsp = cloudClient.query(params("q", "id:" + id)); + throw new AssertionError( + msg + ", don't know why; q=id:" + id + " gives: " + rsp.toString()); + } catch (Exception e) { + throw new AssertionError(msg + ", exception trying to fiture out why...", e); } - final QueryResponse rsp = cloudClient.query(params("q","id:"+id)); - throw new AssertionError(msg + ", don't know why; q=id:"+id+" gives: " + rsp.toString()); - } catch (Exception e) { - throw new AssertionError(msg + ", exception trying to fiture out why...", e); } - } - ids.put(id); - assertFalse("id set bigger then max allowed ("+maxSize+"): " + ids.size(), - maxSize < ids.size()); - }); + ids.put(id); + assertFalse( + "id set bigger then max allowed (" + maxSize + "): " + ids.size(), + maxSize < ids.size()); + }); return ids; } - /** - *

- * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} - * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as long - * as a non-0 number of docs ar returned. This method does some basic validation of each response, and then - * passes each doc encountered (in order returned) to the specified Consumer, which may throw an assertion if - * there is a problem. - *

+ * Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START} + * and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as + * long as a non-0 number of docs ar returned. This method does some basic validation of each + * response, and then passes each doc encountered (in order returned) to the specified Consumer, + * which may throw an assertion if there is a problem. * - *

- * Note that this method explicitly uses the "cloudClient" for executing the queries, - * instead of relying on the test infrastructure to execute the queries redundently - * against both the cloud client as well as a control client. This is because term stat - * differences in a sharded setup can result in different scores for documents compared - * to the control index -- which can affect the sorting in some cases and cause false - * negatives in the response comparisons (even if we don't include "score" in the "fl") - *

+ *

Note that this method explicitly uses the "cloudClient" for executing the queries, instead + * of relying on the test infrastructure to execute the queries redundently against both the cloud + * client as well as a control client. This is because term stat differences in a sharded setup + * can result in different scores for documents compared to the control index -- which can affect + * the sorting in some cases and cause false negatives in the response comparisons (even if we + * don't include "score" in the "fl") */ - public void assertFullWalkNoDups(SolrParams params, Consumer consumer) throws Exception { - + public void assertFullWalkNoDups(SolrParams params, Consumer consumer) + throws Exception { + String cursorMark = CURSOR_MARK_START; int docsOnThisPage = Integer.MAX_VALUE; while (0 < docsOnThisPage) { @@ -844,12 +901,12 @@ public void assertFullWalkNoDups(SolrParams params, Consumer consu docsOnThisPage = docs.size(); if (null != params.getInt(CommonParams.ROWS)) { int rows = params.getInt(CommonParams.ROWS); - assertTrue("Too many docs on this page: " + rows + " < " + docsOnThisPage, - docsOnThisPage <= rows); + assertTrue( + "Too many docs on this page: " + rows + " < " + docsOnThisPage, docsOnThisPage <= rows); } if (0 == docsOnThisPage) { - assertEquals("no more docs, but "+CURSOR_MARK_NEXT+" isn't same", - cursorMark, nextCursorMark); + assertEquals( + "no more docs, but " + CURSOR_MARK_NEXT + " isn't same", cursorMark, nextCursorMark); } for (SolrDocument doc : docs) { @@ -863,5 +920,4 @@ private SolrParams p(SolrParams params, String... other) { SolrParams extras = params(other); return SolrParams.wrapDefaults(params, extras); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java index dad34efdd2d..01242f0c00a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java @@ -16,17 +16,17 @@ */ package org.apache.solr.cloud; +import static java.util.Collections.singletonList; +import static org.apache.solr.security.Sha256AuthenticationProvider.getSaltedHashedValue; + import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.util.Objects; import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; - -import static java.util.Collections.singletonList; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -49,9 +49,6 @@ import org.apache.solr.security.RuleBasedAuthorizationPlugin; import org.apache.solr.update.processor.DocExpirationUpdateProcessorFactory; import org.apache.solr.util.TimeOut; - -import static org.apache.solr.security.Sha256AuthenticationProvider.getSaltedHashedValue; - import org.junit.After; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -74,9 +71,7 @@ public void cleanup() throws Exception { PASS = null; } - /** - * Modifies the request to inlcude authentication params if needed, returns the request - */ + /** Modifies the request to inlcude authentication params if needed, returns the request */ private > T setAuthIfNeeded(T req) { if (null != USER) { assert null != PASS; @@ -84,37 +79,54 @@ private > T setAuthIfNeeded(T req) { } return req; } - + public void setupCluster(boolean security) throws Exception { - // we want at most one core per node to force lots of network traffic to try and tickle distributed bugs - final MiniSolrCloudCluster.Builder b = configureCluster(4) - .addConfig("conf", TEST_PATH().resolve("configsets").resolve("doc-expiry").resolve("conf")); + // we want at most one core per node to force lots of network traffic to try and tickle + // distributed bugs + final MiniSolrCloudCluster.Builder b = + configureCluster(4) + .addConfig( + "conf", TEST_PATH().resolve("configsets").resolve("doc-expiry").resolve("conf")); COLLECTION = "expiring"; if (security) { USER = "solr"; PASS = "SolrRocksAgain"; COLLECTION += "_secure"; - - final String SECURITY_JSON = Utils.toJSONString - (Map.of("authorization", - Map.of("class", RuleBasedAuthorizationPlugin.class.getName(), - "user-role", Map.of(USER,"admin"), - "permissions", singletonList(Map.of("name","all", - "role","admin"))), - "authentication", - Map.of("class", BasicAuthPlugin.class.getName(), - "blockUnknown",true, - "credentials", Map.of(USER, getSaltedHashedValue(PASS))))); + + final String SECURITY_JSON = + Utils.toJSONString( + Map.of( + "authorization", + Map.of( + "class", + RuleBasedAuthorizationPlugin.class.getName(), + "user-role", + Map.of(USER, "admin"), + "permissions", + singletonList(Map.of("name", "all", "role", "admin"))), + "authentication", + Map.of( + "class", + BasicAuthPlugin.class.getName(), + "blockUnknown", + true, + "credentials", + Map.of(USER, getSaltedHashedValue(PASS))))); b.withSecurityJson(SECURITY_JSON); } b.configure(); setAuthIfNeeded(CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2)) - .process(cluster.getSolrClient()); - - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); + .process(cluster.getSolrClient()); + + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); } public void testNoAuth() throws Exception { @@ -122,27 +134,36 @@ public void testNoAuth() throws Exception { runTest(); } - public void testBasicAuth() throws Exception { setupCluster(true); // sanity check that our cluster really does require authentication - assertEquals("sanity check of non authenticated request", - 401, - expectThrows(SolrException.class, () -> { - final long ignored = cluster.getSolrClient().query - (COLLECTION, - params("q", "*:*", - "rows", "0", - "_trace", "no_auth_sanity_check")).getResults().getNumFound(); - }).code()); - + assertEquals( + "sanity check of non authenticated request", + 401, + expectThrows( + SolrException.class, + () -> { + final long ignored = + cluster + .getSolrClient() + .query( + COLLECTION, + params( + "q", "*:*", + "rows", "0", + "_trace", "no_auth_sanity_check")) + .getResults() + .getNumFound(); + }) + .code()); + runTest(); } - + private void runTest() throws Exception { final int totalNumDocs = atLeast(50); - + // Add a bunch of docs; some with extremely short expiration, some with no expiration // these should be randomly distributed to each shard long numDocsThatNeverExpire = 0; @@ -152,80 +173,85 @@ private void runTest() throws Exception { final SolrInputDocument doc = sdoc("id", i); if (random().nextBoolean()) { - doc.addField("should_expire_s","yup"); - doc.addField("tTl_s","+1SECONDS"); + doc.addField("should_expire_s", "yup"); + doc.addField("tTl_s", "+1SECONDS"); } else { numDocsThatNeverExpire++; } - + req.add(doc); } req.commit(cluster.getSolrClient(), COLLECTION); } - - // NOTE: don't assume we can find exactly totalNumDocs right now, some may have already been deleted... - - // it should not take long for us to get to the point where all 'should_expire_s:yup' docs are gone - waitForNoResults(30, params("q","should_expire_s:yup","rows","0","_trace","init_batch_check")); + + // NOTE: don't assume we can find exactly totalNumDocs right now, some may have already been + // deleted... + + // it should not take long for us to get to the point where all 'should_expire_s:yup' docs are + // gone + waitForNoResults( + 30, params("q", "should_expire_s:yup", "rows", "0", "_trace", "init_batch_check")); { // ...*NOW* we can assert that exactly numDocsThatNeverExpire should exist... - final QueryRequest req = setAuthIfNeeded(new QueryRequest - (params("q", "*:*", - "rows", "0", - "_trace", "count_non_expire_docs"))); + final QueryRequest req = + setAuthIfNeeded( + new QueryRequest( + params( + "q", "*:*", + "rows", "0", + "_trace", "count_non_expire_docs"))); // NOTE: it's possible that replicas could be out of sync but this query may get lucky and // only hit leaders. we'll compare the counts of every replica in every shard later on... - assertEquals(numDocsThatNeverExpire, - req.process(cluster.getSolrClient(), COLLECTION).getResults().getNumFound()); + assertEquals( + numDocsThatNeverExpire, + req.process(cluster.getSolrClient(), COLLECTION).getResults().getNumFound()); } - - // + // now that we've confrmed the basics work, let's check some fine grain stuff... - // - + // first off, sanity check that this special docId doesn't some how already exist - waitForNoResults(0, params("q","id:special99","rows","0","_trace","sanity_check99")); + waitForNoResults(0, params("q", "id:special99", "rows", "0", "_trace", "sanity_check99")); { - // force a hard commit on all shards (the prior auto-expire would have only done a soft commit) - // so we can ensure our indexVersion won't change uncessisarily on the un-affected + // force a hard commit on all shards (the prior auto-expire would have only done a soft + // commit) so we can ensure our indexVersion won't change uncessisarily on the un-affected // shard when we add & (hard) commit our special doc... final UpdateRequest req = setAuthIfNeeded(new UpdateRequest()); req.commit(cluster.getSolrClient(), COLLECTION); } - - - // record important data for each replica core so we can check later - // that it only changes for the replicas of a single shard after we add/expire a single special doc + + // record important data for each replica core so we can check later that it only changes for + // the replicas of a single shard after we add/expire a single special doc log.info("Fetching ReplicaData BEFORE special doc addition/expiration"); - final Map initReplicaData = getTestDataForAllReplicas(); + final Map initReplicaData = getTestDataForAllReplicas(); assertTrue("WTF? no replica data?", 0 < initReplicaData.size()); - // add & hard commit a special doc with a short TTL - setAuthIfNeeded(new UpdateRequest()).add(sdoc("id", "special99", "should_expire_s","yup","tTl_s","+30SECONDS")) - .commit(cluster.getSolrClient(), COLLECTION); + // add & hard commit a special doc with a short TTL + setAuthIfNeeded(new UpdateRequest()) + .add(sdoc("id", "special99", "should_expire_s", "yup", "tTl_s", "+30SECONDS")) + .commit(cluster.getSolrClient(), COLLECTION); // wait for our special docId to be deleted - waitForNoResults(180, params("q","id:special99","rows","0","_trace","did_special_doc_expire_yet")); + waitForNoResults( + 180, params("q", "id:special99", "rows", "0", "_trace", "did_special_doc_expire_yet")); // now check all of the replicas to verify a few things: // - only the replicas of one shard changed -- no unneccessary churn on other shards // - every replica of each single shard should have the same number of docs // - the total number of docs should match numDocsThatNeverExpire log.info("Fetching ReplicaData AFTER special doc addition/expiration"); - final Map finalReplicaData = getTestDataForAllReplicas(); - assertEquals("WTF? not same num replicas?", - initReplicaData.size(), - finalReplicaData.size()); + final Map finalReplicaData = getTestDataForAllReplicas(); + assertEquals("WTF? not same num replicas?", initReplicaData.size(), finalReplicaData.size()); final Set coresThatChange = new HashSet<>(); final Set shardsThatChange = new HashSet<>(); - + int coresCompared = 0; int totalDocsOnAllShards = 0; - final DocCollection collectionState = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION); + final DocCollection collectionState = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION); for (Slice shard : collectionState) { boolean firstReplica = true; for (Replica replica : shard) { @@ -239,10 +265,10 @@ private void runTest() throws Exception { if (!initData.equals(finalData)) { log.error("ReplicaData changed: {} != {}", initData, finalData); - coresThatChange.add(core + "("+shard.getName()+")"); + coresThatChange.add(core + "(" + shard.getName() + ")"); shardsThatChange.add(shard.getName()); } - + if (firstReplica) { totalDocsOnAllShards += finalData.numDocs; firstReplica = false; @@ -250,29 +276,38 @@ private void runTest() throws Exception { } } - assertEquals("Exactly one shard should have changed, instead: " + shardsThatChange - + " cores=(" + coresThatChange + ")", - 1, shardsThatChange.size()); - assertEquals("somehow we missed some cores?", - initReplicaData.size(), coresCompared); + assertEquals( + "Exactly one shard should have changed, instead: " + + shardsThatChange + + " cores=(" + + coresThatChange + + ")", + 1, + shardsThatChange.size()); + assertEquals("somehow we missed some cores?", initReplicaData.size(), coresCompared); + + assertEquals( + "Final tally has incorrect numDocsThatNeverExpire", + numDocsThatNeverExpire, + totalDocsOnAllShards); - assertEquals("Final tally has incorrect numDocsThatNeverExpire", - numDocsThatNeverExpire, totalDocsOnAllShards); - // TODO: above logic verifies that deleteByQuery happens on all nodes, and ... - // doesn't affect searcher re-open on shards w/o expired docs ... can we also verify + // doesn't affect searcher re-open on shards w/o expired docs ... can we also verify // that *only* one node is sending the deletes ? // (ie: no flood of redundant deletes?) } /** - * returns a map whose key is the coreNodeName and whose value is data about that core needed for the test + * returns a map whose key is the coreNodeName and whose value is data about that core needed for + * the test */ - private Map getTestDataForAllReplicas() throws IOException, SolrServerException { - Map results = new HashMap<>(); + private Map getTestDataForAllReplicas() + throws IOException, SolrServerException { + Map results = new HashMap<>(); - DocCollection collectionState = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION); + DocCollection collectionState = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION); for (Replica replica : collectionState.getReplicas()) { @@ -292,17 +327,22 @@ private Map getTestDataForAllReplicas() throws IOException, assertNotNull("null version from server: " + coreName, version); assertTrue("version isn't a long: " + coreName, version instanceof Long); - long numDocs = - setAuthIfNeeded(new QueryRequest - (params("q", "*:*", - "distrib", "false", - "rows", "0", - "_trace", "counting_docs"))).process(client).getResults().getNumFound(); - - final ReplicaData data = new ReplicaData(replica.getShard(),coreName,(Long)version,numDocs); + long numDocs = + setAuthIfNeeded( + new QueryRequest( + params( + "q", "*:*", + "distrib", "false", + "rows", "0", + "_trace", "counting_docs"))) + .process(client) + .getResults() + .getNumFound(); + + final ReplicaData data = + new ReplicaData(replica.getShard(), coreName, (Long) version, numDocs); log.info("{}", data); results.put(coreName, data); - } } @@ -310,26 +350,24 @@ private Map getTestDataForAllReplicas() throws IOException, } /** - * Executes a query over and over against the cloudClient every 5 seconds - * until the numFound is 0 or the maxTimeLimitSeconds is exceeded. - * Query is guaranteed to be executed at least once. + * Executes a query over and over against the cloudClient every 5 seconds until the numFound is 0 + * or the maxTimeLimitSeconds is exceeded. Query is guaranteed to be executed at least once. */ - private void waitForNoResults(int maxTimeLimitSeconds, - SolrParams params) + private void waitForNoResults(int maxTimeLimitSeconds, SolrParams params) throws SolrServerException, InterruptedException, IOException { final QueryRequest req = setAuthIfNeeded(new QueryRequest(params)); - final TimeOut timeout = new TimeOut(maxTimeLimitSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); - + final TimeOut timeout = + new TimeOut(maxTimeLimitSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); + long numFound = req.process(cluster.getSolrClient(), COLLECTION).getResults().getNumFound(); - while (0L < numFound && ! timeout.hasTimedOut()) { + while (0L < numFound && !timeout.hasTimedOut()) { Thread.sleep(Math.max(1, Math.min(5000, timeout.timeLeft(TimeUnit.MILLISECONDS)))); - + numFound = req.process(cluster.getSolrClient(), COLLECTION).getResults().getNumFound(); } - assertEquals("Give up waiting for no results: " + params, - 0L, numFound); + assertEquals("Give up waiting for no results: " + params, 0L, numFound); } private static class ReplicaData { @@ -337,34 +375,42 @@ private static class ReplicaData { public final String coreName; public final long indexVersion; public final long numDocs; - public ReplicaData(final String shardName, - final String coreName, - final long indexVersion, - final long numDocs) { + + public ReplicaData( + final String shardName, + final String coreName, + final long indexVersion, + final long numDocs) { assert null != shardName; assert null != coreName; - + this.shardName = shardName; this.coreName = coreName; this.indexVersion = indexVersion; this.numDocs = numDocs; } - + @Override public String toString() { - return "ReplicaData(shard="+shardName+",core="+coreName+ - ",indexVer="+indexVersion+",numDocs="+numDocs+")"; + return "ReplicaData(shard=" + + shardName + + ",core=" + + coreName + + ",indexVer=" + + indexVersion + + ",numDocs=" + + numDocs + + ")"; } - + @Override public boolean equals(Object other) { if (other instanceof ReplicaData) { - ReplicaData that = (ReplicaData)other; - return - this.shardName.equals(that.shardName) && - this.coreName.equals(that.coreName) && - (this.indexVersion == that.indexVersion) && - (this.numDocs == that.numDocs); + ReplicaData that = (ReplicaData) other; + return this.shardName.equals(that.shardName) + && this.coreName.equals(that.coreName) + && (this.indexVersion == that.indexVersion) + && (this.numDocs == that.numDocs); } return false; } @@ -374,5 +420,4 @@ public int hashCode() { return Objects.hash(this.shardName, this.coreName, this.indexVersion, this.numDocs); } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java index 9406594aa20..2fdb07911d8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistribJoinFromCollectionTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud; +import static org.hamcrest.CoreMatchers.not; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Path; @@ -23,7 +25,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.BaseHttpSolrClient; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -45,42 +46,35 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.hamcrest.CoreMatchers.not; - -/** - * Tests using fromIndex that points to a collection in SolrCloud mode. - */ -public class DistribJoinFromCollectionTest extends SolrCloudTestCase{ +/** Tests using fromIndex that points to a collection in SolrCloud mode. */ +public class DistribJoinFromCollectionTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - final private static String[] scoreModes = {"avg","max","min","total"}; + private static final String[] scoreModes = {"avg", "max", "min", "total"}; -// resetExceptionIgnores(); + // resetExceptionIgnores(); private static String toColl = "to_2x2"; private static String fromColl = "from_1x4"; private static String toDocId; - + @BeforeClass public static void setupCluster() throws Exception { final Path configDir = TEST_COLL1_CONF(); String configName = "solrCloudCollectionConfig"; int nodeCount = 5; - configureCluster(nodeCount) - .addConfig(configName, configDir) - .configure(); - - + configureCluster(nodeCount).addConfig(configName, configDir).configure(); + Map collectionProperties = new HashMap<>(); - collectionProperties.put("config", "solrconfig-tlog.xml" ); - collectionProperties.put("schema", "schema.xml"); - + collectionProperties.put("config", "solrconfig-tlog.xml"); + collectionProperties.put("schema", "schema.xml"); + // create a collection holding data for the "to" side of the JOIN - + int shards = 2; - int replicas = 2 ; + int replicas = 2; CollectionAdminRequest.createCollection(toColl, configName, shards, replicas) .setProperties(collectionProperties) .process(cluster.getSolrClient()); @@ -90,8 +84,7 @@ public static void setupCluster() throws Exception { ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); ClusterState cs = zkStateReader.getClusterState(); for (Slice slice : cs.getCollection(toColl).getActiveSlices()) - for (Replica replica : slice.getReplicas()) - nodeSet.add(replica.getNodeName()); + for (Replica replica : slice.getReplicas()) nodeSet.add(replica.getNodeName()); assertTrue(nodeSet.size() > 0); // deploy the "from" collection to all nodes where the "to" collection exists @@ -104,30 +97,31 @@ public static void setupCluster() throws Exception { indexDoc(fromColl, 2001, "a", "c", null); Thread.sleep(1000); // so the commits fire - } @Test public void testScore() throws Exception { - //without score + // without score testJoins(toColl, fromColl, toDocId, true); } - + @Test public void testNoScore() throws Exception { - //with score + // with score testJoins(toColl, fromColl, toDocId, false); - } - + @AfterClass public static void shutdown() { - log.info("DistribJoinFromCollectionTest logic complete ... deleting the {} and {} collections", toColl, fromColl); + log.info( + "DistribJoinFromCollectionTest logic complete ... deleting the {} and {} collections", + toColl, + fromColl); // try to clean up - for (String c : new String[]{ toColl, fromColl }) { + for (String c : new String[] {toColl, fromColl}) { try { - CollectionAdminRequest.Delete req = CollectionAdminRequest.deleteCollection(c); + CollectionAdminRequest.Delete req = CollectionAdminRequest.deleteCollection(c); req.process(cluster.getSolrClient()); } catch (Exception e) { // don't fail the test @@ -144,30 +138,41 @@ private void testJoins(String toColl, String fromColl, String toDocId, boolean i final String fromQ = "match_s:c^2"; CloudSolrClient client = cluster.getSolrClient(); { - final String joinQ = "{!join " + anyScoreMode(isScoresTest) - + "from=join_s fromIndex=" + fromColl + - " to=join_s}" + fromQ; - QueryRequest qr = new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); - QueryResponse rsp = new QueryResponse(client.request(qr), client); - SolrDocumentList hits = rsp.getResults(); - assertTrue("Expected 1 doc, got "+hits, hits.getNumFound() == 1); - SolrDocument doc = hits.get(0); - assertEquals(toDocId, doc.getFirstValue("id")); - assertEquals("b", doc.getFirstValue("get_s")); - assertScore(isScoresTest, doc); + final String joinQ = + "{!join " + + anyScoreMode(isScoresTest) + + "from=join_s fromIndex=" + + fromColl + + " to=join_s}" + + fromQ; + QueryRequest qr = + new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); + QueryResponse rsp = new QueryResponse(client.request(qr), client); + SolrDocumentList hits = rsp.getResults(); + assertTrue("Expected 1 doc, got " + hits, hits.getNumFound() == 1); + SolrDocument doc = hits.get(0); + assertEquals(toDocId, doc.getFirstValue("id")); + assertEquals("b", doc.getFirstValue("get_s")); + assertScore(isScoresTest, doc); } - //negative test before creating an alias + // negative test before creating an alias checkAbsentFromIndex(fromColl, toColl, isScoresTest); // create an alias for the fromIndex and then query through the alias - String alias = fromColl+"Alias"; + String alias = fromColl + "Alias"; CollectionAdminRequest.createAlias(alias, fromColl).process(client); { - final String joinQ = "{!join " + anyScoreMode(isScoresTest) - + "from=join_s fromIndex=" + alias + " to=join_s}"+fromQ; - final QueryRequest qr = new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); + final String joinQ = + "{!join " + + anyScoreMode(isScoresTest) + + "from=join_s fromIndex=" + + alias + + " to=join_s}" + + fromQ; + final QueryRequest qr = + new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); final QueryResponse rsp = new QueryResponse(client.request(qr), client); final SolrDocumentList hits = rsp.getResults(); assertTrue("Expected 1 doc", hits.getNumFound() == 1); @@ -177,15 +182,20 @@ private void testJoins(String toColl, String fromColl, String toDocId, boolean i assertScore(isScoresTest, doc); } - //negative test after creating an alias + // negative test after creating an alias checkAbsentFromIndex(fromColl, toColl, isScoresTest); { // verify join doesn't work if no match in the "from" index - final String joinQ = "{!join " + (anyScoreMode(isScoresTest)) - + "from=join_s fromIndex=" + fromColl + " to=join_s}match_s:d"; - final QueryRequest qr = new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); - final QueryResponse rsp = new QueryResponse(client.request(qr), client); + final String joinQ = + "{!join " + + (anyScoreMode(isScoresTest)) + + "from=join_s fromIndex=" + + fromColl + + " to=join_s}match_s:d"; + final QueryRequest qr = + new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); + final QueryResponse rsp = new QueryResponse(client.request(qr), client); final SolrDocumentList hits = rsp.getResults(); assertTrue("Expected no hits", hits.getNumFound() == 0); } @@ -193,7 +203,8 @@ private void testJoins(String toColl, String fromColl, String toDocId, boolean i private void assertScore(boolean isScoresTest, SolrDocument doc) { if (isScoresTest) { - assertThat("score join doesn't return 1.0",doc.getFirstValue("score").toString(), not("1.0")); + assertThat( + "score join doesn't return 1.0", doc.getFirstValue("score").toString(), not("1.0")); } else { assertEquals("Solr join has constant score", "1.0", doc.getFirstValue("score").toString()); } @@ -203,18 +214,28 @@ private String anyScoreMode(boolean isScoresTest) { return isScoresTest ? "score=" + (scoreModes[random().nextInt(scoreModes.length)]) + " " : ""; } - private void checkAbsentFromIndex(String fromColl, String toColl, boolean isScoresTest) throws SolrServerException, IOException { + private void checkAbsentFromIndex(String fromColl, String toColl, boolean isScoresTest) + throws SolrServerException, IOException { final String wrongName = fromColl + "WrongName"; - final String joinQ = "{!join " + (anyScoreMode(isScoresTest)) - + "from=join_s fromIndex=" + wrongName + " to=join_s}match_s:c"; - final QueryRequest qr = new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); - BaseHttpSolrClient.RemoteSolrException ex = assertThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> - cluster.getSolrClient().request(qr)); + final String joinQ = + "{!join " + + (anyScoreMode(isScoresTest)) + + "from=join_s fromIndex=" + + wrongName + + " to=join_s}match_s:c"; + final QueryRequest qr = + new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score")); + BaseHttpSolrClient.RemoteSolrException ex = + assertThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> cluster.getSolrClient().request(qr)); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains(wrongName)); } - protected static String indexDoc(String collection, int id, String joinField, String matchField, String getField) throws Exception { + protected static String indexDoc( + String collection, int id, String joinField, String matchField, String getField) + throws Exception { UpdateRequest up = new UpdateRequest(); up.setCommitWithin(50); up.setParam("collection", collection); @@ -222,10 +243,8 @@ protected static String indexDoc(String collection, int id, String joinField, St String docId = "" + id; doc.addField("id", docId); doc.addField("join_s", joinField); - if (matchField != null) - doc.addField("match_s", matchField); - if (getField != null) - doc.addField("get_s", getField); + if (matchField != null) doc.addField("match_s", matchField); + if (getField != null) doc.addField("get_s", getField); up.add(doc); cluster.getSolrClient().request(up); return docId; diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedApiAsyncTrackerTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedApiAsyncTrackerTest.java index 1c7f9bf2c6b..63a23af304b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistributedApiAsyncTrackerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistributedApiAsyncTrackerTest.java @@ -23,9 +23,7 @@ import org.apache.solr.common.util.NamedList; import org.junit.Test; -/** - * Test async id tracking scenarios as used when Collection API is distributed. - */ +/** Test async id tracking scenarios as used when Collection API is distributed. */ public class DistributedApiAsyncTrackerTest extends SolrTestCaseJ4 { protected ZkTestServer zkServer; @@ -54,17 +52,25 @@ public void testBasic() throws Exception { final String asyncId = "mario"; - assertTrue("Could not create async task " + asyncId ,daat.createNewAsyncJobTracker(asyncId)); - assertFalse("Should not have been able to create duplicate task " + asyncId ,daat.createNewAsyncJobTracker(asyncId)); + assertTrue("Could not create async task " + asyncId, daat.createNewAsyncJobTracker(asyncId)); + assertFalse( + "Should not have been able to create duplicate task " + asyncId, + daat.createNewAsyncJobTracker(asyncId)); daat.cancelAsyncId(asyncId); assertEquals(RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(asyncId).first()); - assertTrue("Could not create async task after cancel " + asyncId ,daat.createNewAsyncJobTracker(asyncId)); + assertTrue( + "Could not create async task after cancel " + asyncId, + daat.createNewAsyncJobTracker(asyncId)); assertEquals(RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus(asyncId).first()); - assertFalse("Can't delete a non completed/failed task " + asyncId, daat.deleteSingleAsyncId(asyncId)); + assertFalse( + "Can't delete a non completed/failed task " + asyncId, daat.deleteSingleAsyncId(asyncId)); daat.deleteAllAsyncIds(); - assertEquals("Task should still be here because couldn't be deleted " + asyncId, RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus(asyncId).first()); + assertEquals( + "Task should still be here because couldn't be deleted " + asyncId, + RequestStatusState.SUBMITTED, + daat.getAsyncTaskRequestStatus(asyncId).first()); daat.setTaskRunning(asyncId); assertEquals(RequestStatusState.RUNNING, daat.getAsyncTaskRequestStatus(asyncId).first()); @@ -73,42 +79,87 @@ public void testBasic() throws Exception { nl.add("MyList", "myValue"); daat.setTaskCompleted(asyncId, new OverseerSolrResponse(nl)); assertEquals(RequestStatusState.COMPLETED, daat.getAsyncTaskRequestStatus(asyncId).first()); - assertEquals("Did not retrieve correct completed OverseerSolrResponse " + asyncId, "myValue", daat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("MyList")); - - assertTrue("Should be able to delete a completed task " + asyncId, daat.deleteSingleAsyncId(asyncId)); - assertEquals("A completed task should not be found", RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(asyncId).first()); + assertEquals( + "Did not retrieve correct completed OverseerSolrResponse " + asyncId, + "myValue", + daat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("MyList")); + + assertTrue( + "Should be able to delete a completed task " + asyncId, daat.deleteSingleAsyncId(asyncId)); + assertEquals( + "A completed task should not be found", + RequestStatusState.NOT_FOUND, + daat.getAsyncTaskRequestStatus(asyncId).first()); } @Test public void testDisconnect() throws Exception { final String TRACKER_ROOT = "/disconnect"; - DistributedApiAsyncTracker permanentDaat = new DistributedApiAsyncTracker(zkClient, TRACKER_ROOT); - DistributedApiAsyncTracker permanentDaat2 = new DistributedApiAsyncTracker(zkClient, TRACKER_ROOT); + DistributedApiAsyncTracker permanentDaat = + new DistributedApiAsyncTracker(zkClient, TRACKER_ROOT); + DistributedApiAsyncTracker permanentDaat2 = + new DistributedApiAsyncTracker(zkClient, TRACKER_ROOT); final String asycPermanent = "permanentAsync"; final String asyncTransient = "transientAsync"; - assertTrue("Could not create async task " + asycPermanent ,permanentDaat.createNewAsyncJobTracker(asycPermanent)); - - try (SolrZkClient transientZkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) { - DistributedApiAsyncTracker transientDaat = new DistributedApiAsyncTracker(transientZkClient, TRACKER_ROOT); - assertTrue("Could not create async task " + asyncTransient ,transientDaat.createNewAsyncJobTracker(asyncTransient)); - - assertEquals("permanentDaat can't see " + asycPermanent, RequestStatusState.SUBMITTED, permanentDaat.getAsyncTaskRequestStatus(asycPermanent).first()); - assertEquals("permanentDaat2 can't see " + asycPermanent, RequestStatusState.SUBMITTED, permanentDaat2.getAsyncTaskRequestStatus(asycPermanent).first()); - assertEquals("transientDaat can't see " + asycPermanent, RequestStatusState.SUBMITTED, transientDaat.getAsyncTaskRequestStatus(asycPermanent).first()); - assertEquals("permanentDaat can't see " + asyncTransient, RequestStatusState.SUBMITTED, permanentDaat.getAsyncTaskRequestStatus(asyncTransient).first()); - assertEquals("permanentDaat2 can't see " + asyncTransient, RequestStatusState.SUBMITTED, permanentDaat2.getAsyncTaskRequestStatus(asyncTransient).first()); - assertEquals("transientDaat can't see " + asyncTransient, RequestStatusState.SUBMITTED, transientDaat.getAsyncTaskRequestStatus(asyncTransient).first()); + assertTrue( + "Could not create async task " + asycPermanent, + permanentDaat.createNewAsyncJobTracker(asycPermanent)); + + try (SolrZkClient transientZkClient = + new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) { + DistributedApiAsyncTracker transientDaat = + new DistributedApiAsyncTracker(transientZkClient, TRACKER_ROOT); + assertTrue( + "Could not create async task " + asyncTransient, + transientDaat.createNewAsyncJobTracker(asyncTransient)); + + assertEquals( + "permanentDaat can't see " + asycPermanent, + RequestStatusState.SUBMITTED, + permanentDaat.getAsyncTaskRequestStatus(asycPermanent).first()); + assertEquals( + "permanentDaat2 can't see " + asycPermanent, + RequestStatusState.SUBMITTED, + permanentDaat2.getAsyncTaskRequestStatus(asycPermanent).first()); + assertEquals( + "transientDaat can't see " + asycPermanent, + RequestStatusState.SUBMITTED, + transientDaat.getAsyncTaskRequestStatus(asycPermanent).first()); + assertEquals( + "permanentDaat can't see " + asyncTransient, + RequestStatusState.SUBMITTED, + permanentDaat.getAsyncTaskRequestStatus(asyncTransient).first()); + assertEquals( + "permanentDaat2 can't see " + asyncTransient, + RequestStatusState.SUBMITTED, + permanentDaat2.getAsyncTaskRequestStatus(asyncTransient).first()); + assertEquals( + "transientDaat can't see " + asyncTransient, + RequestStatusState.SUBMITTED, + transientDaat.getAsyncTaskRequestStatus(asyncTransient).first()); } // transientDaat connection closed, doesn't change a thing for asycPermanent... - assertEquals("permanentDaat can't see " + asycPermanent, RequestStatusState.SUBMITTED, permanentDaat.getAsyncTaskRequestStatus(asycPermanent).first()); - assertEquals("permanentDaat2 can't see " + asycPermanent, RequestStatusState.SUBMITTED, permanentDaat2.getAsyncTaskRequestStatus(asycPermanent).first()); + assertEquals( + "permanentDaat can't see " + asycPermanent, + RequestStatusState.SUBMITTED, + permanentDaat.getAsyncTaskRequestStatus(asycPermanent).first()); + assertEquals( + "permanentDaat2 can't see " + asycPermanent, + RequestStatusState.SUBMITTED, + permanentDaat2.getAsyncTaskRequestStatus(asycPermanent).first()); // ...but asyncTransient is now failed. - assertEquals("permanentDaat can't see " + asyncTransient, RequestStatusState.FAILED, permanentDaat.getAsyncTaskRequestStatus(asyncTransient).first()); - assertEquals("permanentDaat2 can't see " + asyncTransient, RequestStatusState.FAILED, permanentDaat2.getAsyncTaskRequestStatus(asyncTransient).first()); + assertEquals( + "permanentDaat can't see " + asyncTransient, + RequestStatusState.FAILED, + permanentDaat.getAsyncTaskRequestStatus(asyncTransient).first()); + assertEquals( + "permanentDaat2 can't see " + asyncTransient, + RequestStatusState.FAILED, + permanentDaat2.getAsyncTaskRequestStatus(asyncTransient).first()); } @Test @@ -118,72 +169,135 @@ public void testDisconnectAfterCompletion() throws Exception { final String asyncId = "theId"; - try (SolrZkClient transientZkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) { - DistributedApiAsyncTracker transientDaat = new DistributedApiAsyncTracker(transientZkClient, TRACKER_ROOT); - assertTrue("Could not create async task " + asyncId ,transientDaat.createNewAsyncJobTracker(asyncId)); + try (SolrZkClient transientZkClient = + new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT)) { + DistributedApiAsyncTracker transientDaat = + new DistributedApiAsyncTracker(transientZkClient, TRACKER_ROOT); + assertTrue( + "Could not create async task " + asyncId, + transientDaat.createNewAsyncJobTracker(asyncId)); // The task completes, then connection lost NamedList nl = new NamedList<>(); nl.add("status", "I made it"); transientDaat.setTaskCompleted(asyncId, new OverseerSolrResponse(nl)); - assertEquals("transientDaat can't see " + asyncId, RequestStatusState.COMPLETED, transientDaat.getAsyncTaskRequestStatus(asyncId).first()); - assertEquals("transientDaat can't retrieve correct completed OverseerSolrResponse " + asyncId, "I made it", transientDaat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("status")); - assertEquals("daat can't retrieve correct completed OverseerSolrResponse " + asyncId, "I made it", daat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("status")); + assertEquals( + "transientDaat can't see " + asyncId, + RequestStatusState.COMPLETED, + transientDaat.getAsyncTaskRequestStatus(asyncId).first()); + assertEquals( + "transientDaat can't retrieve correct completed OverseerSolrResponse " + asyncId, + "I made it", + transientDaat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("status")); + assertEquals( + "daat can't retrieve correct completed OverseerSolrResponse " + asyncId, + "I made it", + daat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("status")); } - // Even though connection was closed, the task should still be completed, and we should be able to retrieve its response - assertEquals("daat can't see " + asyncId, RequestStatusState.COMPLETED, daat.getAsyncTaskRequestStatus(asyncId).first()); - assertEquals("Did not retrieve correct completed OverseerSolrResponse after other connection closes " + asyncId, "I made it", daat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("status")); + // Even though connection was closed, the task should still be completed, and we should be able + // to retrieve its response + assertEquals( + "daat can't see " + asyncId, + RequestStatusState.COMPLETED, + daat.getAsyncTaskRequestStatus(asyncId).first()); + assertEquals( + "Did not retrieve correct completed OverseerSolrResponse after other connection closes " + + asyncId, + "I made it", + daat.getAsyncTaskRequestStatus(asyncId).second().getResponse().get("status")); // And given it completed, it should be deleted daat.deleteAllAsyncIds(); - assertEquals("task was not deleted " + asyncId, RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(asyncId).first()); + assertEquals( + "task was not deleted " + asyncId, + RequestStatusState.NOT_FOUND, + daat.getAsyncTaskRequestStatus(asyncId).first()); } @Test public void testIdCleanup() throws Exception { final int maxTasks = 30; // When cleaning up, 3 async id's will be removed - DistributedApiAsyncTracker daat = new DistributedApiAsyncTracker(zkClient, "/manyIds", maxTasks); + DistributedApiAsyncTracker daat = + new DistributedApiAsyncTracker(zkClient, "/manyIds", maxTasks); for (int asyncId = 1; asyncId <= maxTasks; asyncId++) { assertTrue(daat.createNewAsyncJobTracker(Integer.toString(asyncId))); } // All ids should be tracked - assertEquals(RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus(Integer.toString(1)).first()); - assertEquals(RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus(Integer.toString(maxTasks)).first()); + assertEquals( + RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus(Integer.toString(1)).first()); + assertEquals( + RequestStatusState.SUBMITTED, + daat.getAsyncTaskRequestStatus(Integer.toString(maxTasks)).first()); // Adding one more should trigger cleanup of earlier id's assertTrue(daat.createNewAsyncJobTracker("straw")); String cleanedUpId1 = Integer.toString(1); - assertEquals("Cleanup should have been triggered and removed the first task", RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(cleanedUpId1).first()); - assertEquals("Last task should not have been cleaned up", RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus(Integer.toString(maxTasks)).first()); - assertEquals("Task having triggered cleanup should not have been cleaned up", RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus("straw").first()); + assertEquals( + "Cleanup should have been triggered and removed the first task", + RequestStatusState.NOT_FOUND, + daat.getAsyncTaskRequestStatus(cleanedUpId1).first()); + assertEquals( + "Last task should not have been cleaned up", + RequestStatusState.SUBMITTED, + daat.getAsyncTaskRequestStatus(Integer.toString(maxTasks)).first()); + assertEquals( + "Task having triggered cleanup should not have been cleaned up", + RequestStatusState.SUBMITTED, + daat.getAsyncTaskRequestStatus("straw").first()); // Identical to the test 3 lines above but repeated to be considered in context - assertEquals("Cleaned up ID (1) should no longer be visible", RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(cleanedUpId1).first()); + assertEquals( + "Cleaned up ID (1) should no longer be visible", + RequestStatusState.NOT_FOUND, + daat.getAsyncTaskRequestStatus(cleanedUpId1).first()); // Creating a new task with same id fails but "revives" the ID if the task is still in progress - assertFalse("Should not be able to create a task with same id as task in progress", daat.createNewAsyncJobTracker(cleanedUpId1)); - assertEquals("Cleaned up ID now visible again", RequestStatusState.SUBMITTED, daat.getAsyncTaskRequestStatus(cleanedUpId1).first()); - - // 2 is also in progress and was also cleaned up. It should be markable complete without issues (since that's what will happen when it completed). + assertFalse( + "Should not be able to create a task with same id as task in progress", + daat.createNewAsyncJobTracker(cleanedUpId1)); + assertEquals( + "Cleaned up ID now visible again", + RequestStatusState.SUBMITTED, + daat.getAsyncTaskRequestStatus(cleanedUpId1).first()); + + // 2 is also in progress and was also cleaned up. It should be markable complete without issues + // (since that's what will happen when it completed). String cleanedUpId2 = Integer.toString(2); - assertEquals("Another cleaned up ID (2) should not be visible", RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(cleanedUpId2).first()); + assertEquals( + "Another cleaned up ID (2) should not be visible", + RequestStatusState.NOT_FOUND, + daat.getAsyncTaskRequestStatus(cleanedUpId2).first()); NamedList nl = new NamedList<>(); nl.add("code", "da vinci"); daat.setTaskCompleted(cleanedUpId2, new OverseerSolrResponse(nl)); - assertEquals("task should now be completed " + cleanedUpId2, RequestStatusState.COMPLETED, daat.getAsyncTaskRequestStatus(cleanedUpId2).first()); - assertEquals("task should have correct OverseerSolrResponse " + cleanedUpId2, "da vinci", daat.getAsyncTaskRequestStatus(cleanedUpId2).second().getResponse().get("code")); - - // 3 is also in progress and was also cleaned up. It should be markable running without issues, but that doesn't revive it (still not found). - // (if we want setTaskRunning to revive a removed task, we'd have to check the persistent node existence each time, which is likely not worth it) + assertEquals( + "task should now be completed " + cleanedUpId2, + RequestStatusState.COMPLETED, + daat.getAsyncTaskRequestStatus(cleanedUpId2).first()); + assertEquals( + "task should have correct OverseerSolrResponse " + cleanedUpId2, + "da vinci", + daat.getAsyncTaskRequestStatus(cleanedUpId2).second().getResponse().get("code")); + + // 3 is also in progress and was also cleaned up. It should be markable running without issues, + // but that doesn't revive it (still not found). + // (if we want setTaskRunning to revive a removed task, we'd have to check the persistent node + // existence each time, which is likely not worth it) String cleanedUpId3 = Integer.toString(3); - assertEquals("Another cleaned up ID (3) should not be visible", RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(cleanedUpId3).first()); + assertEquals( + "Another cleaned up ID (3) should not be visible", + RequestStatusState.NOT_FOUND, + daat.getAsyncTaskRequestStatus(cleanedUpId3).first()); daat.setTaskRunning(cleanedUpId3); - assertEquals("task should now be running " + cleanedUpId3, RequestStatusState.NOT_FOUND, daat.getAsyncTaskRequestStatus(cleanedUpId3).first()); + assertEquals( + "task should now be running " + cleanedUpId3, + RequestStatusState.NOT_FOUND, + daat.getAsyncTaskRequestStatus(cleanedUpId3).first()); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java index 7929ed674c4..ac0afecb1e9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistributedQueueTest.java @@ -23,7 +23,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Predicate; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.cloud.DistributedQueue; import org.apache.solr.common.cloud.SolrZkClient; @@ -41,7 +40,8 @@ public class DistributedQueueTest extends SolrTestCaseJ4 { protected ZkTestServer zkServer; protected SolrZkClient zkClient; - protected ExecutorService executor = ExecutorUtil.newMDCAwareSingleThreadExecutor(new SolrNamedThreadFactory("dqtest-")); + protected ExecutorService executor = + ExecutorUtil.newMDCAwareSingleThreadExecutor(new SolrNamedThreadFactory("dqtest-")); @Before @Override @@ -145,16 +145,18 @@ public void testDistributedQueueBlocking() throws Exception { // After draining the queue, a watcher should be set. assertNull(dq.peek(100)); - + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeout.waitFor("Timeout waiting to see dirty=false", () -> { - try { - return !dq.isDirty(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); - + timeout.waitFor( + "Timeout waiting to see dirty=false", + () -> { + try { + return !dq.isDirty(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + assertFalse(dq.isDirty()); assertEquals(1, dq.watcherCount()); @@ -231,7 +233,6 @@ public void testLocallyOffer() throws Exception { } } - @Test public void testPeekElements() throws Exception { String dqZNode = "/distqueue/test"; @@ -261,21 +262,29 @@ public void testPeekElements() throws Exception { assertTrue(System.nanoTime() - start >= TimeUnit.MILLISECONDS.toNanos(500)); // If someone adds a new matching element while we're waiting, we should return immediately. - executor.submit(() -> { - try { - Thread.sleep(500); - dq.offer(data); - } catch (Exception e) { - // ignore - } - }); + executor.submit( + () -> { + try { + Thread.sleep(500); + dq.offer(data); + } catch (Exception e) { + // ignore + } + }); start = System.nanoTime(); - assertEquals(1, dq.peekElements(4, 2000, child -> { - // The 4th element in the queue will end with a "3". - return child.endsWith("3"); - }).size()); + assertEquals( + 1, + dq.peekElements( + 4, + 2000, + child -> { + // The 4th element in the queue will end with a "3". + return child.endsWith("3"); + }) + .size()); long timeTaken = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start); - assertTrue("Time was " + timeTaken + "ms, expected 250-1500ms", timeTaken > 250 && timeTaken < 1500); + assertTrue( + "Time was " + timeTaken + "ms, expected 250-1500ms", timeTaken > 250 && timeTaken < 1500); } private void forceSessionExpire() throws InterruptedException, TimeoutException { @@ -320,10 +329,8 @@ public void run() { } protected String setupNewDistributedQueueZNode(String znodePath) throws Exception { - if (!zkClient.exists("/", true)) - zkClient.makePath("/", false, true); - if (zkClient.exists(znodePath, true)) - zkClient.clean(znodePath); + if (!zkClient.exists("/", true)) zkClient.makePath("/", false, true); + if (zkClient.exists(znodePath, true)) zkClient.clean(znodePath); zkClient.makePath(znodePath, false, true); return znodePath; } diff --git a/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java b/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java index 1d68e60a45b..e0b9d4bad96 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DistributedVersionInfoTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.update.processor.DistributedUpdateProcessor.DISTRIB_FROM; +import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -27,7 +30,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; @@ -56,9 +58,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.update.processor.DistributedUpdateProcessor.DISTRIB_FROM; -import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; - @Slow @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class DistributedVersionInfoTest extends SolrCloudTestCase { @@ -67,9 +66,7 @@ public class DistributedVersionInfoTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(3) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(3).addConfig("conf", configset("cloud-minimal")).configure(); } private static final String COLLECTION = "c8n_vers_1x3"; @@ -83,7 +80,10 @@ public void testReplicaVersionHandling() throws Exception { .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); final ZkStateReader stateReader = cluster.getSolrClient().getZkStateReader(); - stateReader.waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, + stateReader.waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, (n, c) -> DocCollection.isFullyActive(n, c, 1, 3)); final Replica leader = stateReader.getLeaderRetry(COLLECTION, shardId); @@ -95,17 +95,20 @@ public void testReplicaVersionHandling() throws Exception { cluster.getSolrClient().commit(COLLECTION); // verify doc is on the leader and replica - final List notLeaders = stateReader.getClusterState().getCollection(COLLECTION).getReplicas() - .stream() - .filter(r -> r.getCoreName().equals(leader.getCoreName()) == false) - .collect(Collectors.toList()); + final List notLeaders = + stateReader.getClusterState().getCollection(COLLECTION).getReplicas().stream() + .filter(r -> r.getCoreName().equals(leader.getCoreName()) == false) + .collect(Collectors.toList()); assertDocsExistInAllReplicas(leader, notLeaders, COLLECTION, 1, 1, null); // get max version from the leader and replica Replica replica = notLeaders.get(0); Long maxOnLeader = getMaxVersionFromIndex(leader); Long maxOnReplica = getMaxVersionFromIndex(replica); - assertEquals("leader and replica should have same max version: " + maxOnLeader, maxOnLeader, maxOnReplica); + assertEquals( + "leader and replica should have same max version: " + maxOnLeader, + maxOnLeader, + maxOnReplica); // send the same doc but with a lower version than the max in the index try (SolrClient client = getHttpSolrClient(replica.getCoreUrl())) { @@ -116,14 +119,17 @@ public void testReplicaVersionHandling() throws Exception { // simulate what the leader does when sending a doc to a replica ModifiableSolrParams params = new ModifiableSolrParams(); - params.set(DISTRIB_UPDATE_PARAM, DistributedUpdateProcessor.DistribPhase.FROMLEADER.toString()); + params.set( + DISTRIB_UPDATE_PARAM, DistributedUpdateProcessor.DistribPhase.FROMLEADER.toString()); params.set(DISTRIB_FROM, leader.getCoreUrl()); UpdateRequest req = new UpdateRequest(); req.setParams(params); req.add(doc); - log.info("Sending doc with out-of-date version ({}) document directly to replica", maxOnReplica -1); + log.info( + "Sending doc with out-of-date version ({}) document directly to replica", + maxOnReplica - 1); client.request(req); client.commit(); @@ -136,7 +142,8 @@ public void testReplicaVersionHandling() throws Exception { maxOnLeader = getMaxVersionFromIndex(leader); maxOnReplica = getMaxVersionFromIndex(replica); - assertEquals("leader and replica should have same max version after reload", maxOnLeader, maxOnReplica); + assertEquals( + "leader and replica should have same max version after reload", maxOnLeader, maxOnReplica); // now start sending docs while collection is reloading @@ -146,91 +153,105 @@ public void testReplicaVersionHandling() throws Exception { final Set deletedDocs = new HashSet<>(); final AtomicInteger docsSent = new AtomicInteger(0); final Random rand = new Random(5150); - Thread docSenderThread = new Thread() { - public void run() { + Thread docSenderThread = + new Thread() { + public void run() { - // brief delay before sending docs - try { - Thread.sleep(rand.nextInt(30)+1); - } catch (InterruptedException e) {} + // brief delay before sending docs + try { + Thread.sleep(rand.nextInt(30) + 1); + } catch (InterruptedException e) { + } - for (int i=0; i < 1000; i++) { - if (i % (rand.nextInt(20)+1) == 0) { + for (int i = 0; i < 1000; i++) { + if (i % (rand.nextInt(20) + 1) == 0) { + try { + Thread.sleep(rand.nextInt(50) + 1); + } catch (InterruptedException e) { + } + } + + int docId = i + 1; + try { + sendDoc(docId); + docsSent.incrementAndGet(); + } catch (Exception e) { + } + } + } + }; + + Thread reloaderThread = + new Thread() { + public void run() { try { - Thread.sleep(rand.nextInt(50)+1); - } catch (InterruptedException e) {} + Thread.sleep(rand.nextInt(300) + 1); + } catch (InterruptedException e) { + } + + for (int i = 0; i < 3; i++) { + try { + reloadCollection(leader, COLLECTION); + } catch (Exception e) { + } + + try { + Thread.sleep(rand.nextInt(300) + 300); + } catch (InterruptedException e) { + } + } } + }; - int docId = i+1; - try { - sendDoc(docId); - docsSent.incrementAndGet(); - } catch (Exception e) {} - } - } - }; - - Thread reloaderThread = new Thread() { - public void run() { - try { - Thread.sleep(rand.nextInt(300)+1); - } catch (InterruptedException e) {} - - for (int i=0; i < 3; i++) { - try { - reloadCollection(leader, COLLECTION); - } catch (Exception e) {} - - try { - Thread.sleep(rand.nextInt(300)+300); - } catch (InterruptedException e) {} - } - } - }; - - Thread deleteThread = new Thread() { - public void run() { - - // brief delay before sending docs - try { - Thread.sleep(500); - } catch (InterruptedException e) {} - - for (int i=0; i < 200; i++) { - try { - Thread.sleep(rand.nextInt(50)+1); - } catch (InterruptedException e) {} - - int ds = docsSent.get(); - if (ds > 0) { - int docToDelete = rand.nextInt(ds) + 1; - if (!deletedDocs.contains(docToDelete)) { - delI(String.valueOf(docToDelete)); - deletedDocs.add(docToDelete); + Thread deleteThread = + new Thread() { + public void run() { + + // brief delay before sending docs + try { + Thread.sleep(500); + } catch (InterruptedException e) { + } + + for (int i = 0; i < 200; i++) { + try { + Thread.sleep(rand.nextInt(50) + 1); + } catch (InterruptedException e) { + } + + int ds = docsSent.get(); + if (ds > 0) { + int docToDelete = rand.nextInt(ds) + 1; + if (!deletedDocs.contains(docToDelete)) { + delI(String.valueOf(docToDelete)); + deletedDocs.add(docToDelete); + } + } } } - } - } - }; - - Thread committerThread = new Thread() { - public void run() { - try { - Thread.sleep(rand.nextInt(200)+1); - } catch (InterruptedException e) {} - - for (int i=0; i < 20; i++) { - try { - cluster.getSolrClient().commit(COLLECTION); - } catch (Exception e) {} - - try { - Thread.sleep(rand.nextInt(100)+100); - } catch (InterruptedException e) {} - } - } - }; + }; + Thread committerThread = + new Thread() { + public void run() { + try { + Thread.sleep(rand.nextInt(200) + 1); + } catch (InterruptedException e) { + } + + for (int i = 0; i < 20; i++) { + try { + cluster.getSolrClient().commit(COLLECTION); + } catch (Exception e) { + } + + try { + Thread.sleep(rand.nextInt(100) + 100); + } catch (InterruptedException e) { + } + } + } + }; docSenderThread.start(); reloaderThread.start(); @@ -250,23 +271,25 @@ public void run() { maxOnLeader = getMaxVersionFromIndex(leader); maxOnReplica = getMaxVersionFromIndex(replica); - assertEquals("leader and replica should have same max version before reload", maxOnLeader, maxOnReplica); + assertEquals( + "leader and replica should have same max version before reload", maxOnLeader, maxOnReplica); reloadCollection(leader, COLLECTION); maxOnLeader = getMaxVersionFromIndex(leader); maxOnReplica = getMaxVersionFromIndex(replica); - assertEquals("leader and replica should have same max version after reload", maxOnLeader, maxOnReplica); + assertEquals( + "leader and replica should have same max version after reload", maxOnLeader, maxOnReplica); assertDocsExistInAllReplicas(leader, notLeaders, COLLECTION, 1, 1000, deletedDocs); - } protected long getMaxVersionFromIndex(Replica replica) throws IOException, SolrServerException { return getVersionFromIndex(replica, null); } - protected long getVersionFromIndex(Replica replica, String docId) throws IOException, SolrServerException { + protected long getVersionFromIndex(Replica replica, String docId) + throws IOException, SolrServerException { Long vers = null; String queryStr = (docId != null) ? "id:" + docId : "_version_:[0 TO *]"; SolrQuery query = new SolrQuery(queryStr); @@ -278,8 +301,7 @@ protected long getVersionFromIndex(Replica replica, String docId) throws IOExcep try (SolrClient client = getHttpSolrClient(replica.getCoreUrl())) { QueryResponse qr = client.query(query); SolrDocumentList hits = qr.getResults(); - if (hits.isEmpty()) - fail("No results returned from query: "+query); + if (hits.isEmpty()) fail("No results returned from query: " + query); vers = (Long) hits.get(0).getFirstValue("_version_"); } @@ -290,22 +312,22 @@ protected long getVersionFromIndex(Replica replica, String docId) throws IOExcep return vers.longValue(); } - protected void assertDocsExistInAllReplicas(Replica leader, List notLeaders, - String testCollectionName, - int firstDocId, - int lastDocId, - Set deletedDocs) + protected void assertDocsExistInAllReplicas( + Replica leader, + List notLeaders, + String testCollectionName, + int firstDocId, + int lastDocId, + Set deletedDocs) throws Exception { HttpSolrClient leaderSolr = getHttpSolrClient(leader); List replicas = new ArrayList(notLeaders.size()); - for (Replica r : notLeaders) - replicas.add(getHttpSolrClient(r)); + for (Replica r : notLeaders) replicas.add(getHttpSolrClient(r)); try { for (int d = firstDocId; d <= lastDocId; d++) { - if (deletedDocs != null && deletedDocs.contains(d)) - continue; + if (deletedDocs != null && deletedDocs.contains(d)) continue; String docId = String.valueOf(d); Long leaderVers = assertDocExists(leaderSolr, testCollectionName, docId, null); @@ -330,25 +352,37 @@ protected void sendDoc(int docId) throws Exception { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", String.valueOf(docId)); doc.addField("a_t", "hello" + docId); - AbstractFullDistribZkTestBase.sendDocsWithRetry(cluster.getSolrClient(), COLLECTION, Collections.singletonList(doc), 2, 3, 100); + AbstractFullDistribZkTestBase.sendDocsWithRetry( + cluster.getSolrClient(), COLLECTION, Collections.singletonList(doc), 2, 3, 100); } /** - * Query the real-time get handler for a specific doc by ID to verify it - * exists in the provided server, using distrib=false so it doesn't route to another replica. + * Query the real-time get handler for a specific doc by ID to verify it exists in the provided + * server, using distrib=false so it doesn't route to another replica. */ - protected Long assertDocExists(HttpSolrClient solr, String coll, String docId, Long expVers) throws Exception { - QueryRequest qr = new QueryRequest(params("qt", "/get", "id", docId, "distrib", "false", "fl", "id,_version_")); + protected Long assertDocExists(HttpSolrClient solr, String coll, String docId, Long expVers) + throws Exception { + QueryRequest qr = + new QueryRequest( + params("qt", "/get", "id", docId, "distrib", "false", "fl", "id,_version_")); NamedList rsp = solr.request(qr); - SolrDocument doc = (SolrDocument)rsp.get("doc"); + SolrDocument doc = (SolrDocument) rsp.get("doc"); String match = JSONTestUtil.matchObj("/id", doc, docId); - assertTrue("Doc with id=" + docId + " not found in " + solr.getBaseURL() + - " due to: " + match + "; rsp=" + rsp, match == null); - - Long vers = (Long)doc.getFirstValue("_version_"); + assertTrue( + "Doc with id=" + + docId + + " not found in " + + solr.getBaseURL() + + " due to: " + + match + + "; rsp=" + + rsp, + match == null); + + Long vers = (Long) doc.getFirstValue("_version_"); assertNotNull(vers); if (expVers != null) - assertEquals("expected version of doc "+docId+" to be "+expVers, expVers, vers); + assertEquals("expected version of doc " + docId + " to be " + expVers, expVers, vers); return vers; } @@ -365,8 +399,7 @@ protected boolean reloadCollection(Replica replica, String testCollectionName) t // send reload command for the collection log.info("Sending RELOAD command for {}", testCollectionName); - CollectionAdminRequest.reloadCollection(testCollectionName) - .process(client); + CollectionAdminRequest.reloadCollection(testCollectionName).process(client); Thread.sleep(2000); // reload can take a short while // verify reload is done, waiting up to 30 seconds for slow test environments diff --git a/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java b/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java index 3caf6db8b06..1a0d68745a2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/DocValuesNotIndexedTest.java @@ -19,6 +19,7 @@ import static org.apache.lucene.util.LuceneTestCase.random; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.time.Instant; @@ -30,7 +31,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -60,98 +60,100 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; - public class DocValuesNotIndexedTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - static final String COLLECTION = "dv_coll"; + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + static final String COLLECTION = "dv_coll"; - volatile static List fieldsToTestSingle = null; - volatile static List fieldsToTestMulti = null; - volatile static List fieldsToTestGroupSortFirst = null; - volatile static List fieldsToTestGroupSortLast = null; + static volatile List fieldsToTestSingle = null; + static volatile List fieldsToTestMulti = null; + static volatile List fieldsToTestGroupSortFirst = null; + static volatile List fieldsToTestGroupSortLast = null; @BeforeClass public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) .configure(); // Need enough shards that we have some shards that don't have any docs on them. CollectionAdminRequest.createCollection(COLLECTION, "conf1", 4, 1) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(COLLECTION, 4, 4); fieldsToTestSingle = - Collections.unmodifiableList(Arrays.asList( - new FieldProps("intField", "int", 1), - new FieldProps("longField", "long", 1), - new FieldProps("doubleField", "double", 1), - new FieldProps("floatField", "float", 1), - new FieldProps("dateField", "date", 1), - new FieldProps("stringField", "string", 1), - new FieldProps("boolField", "boolean", 1), - new FieldProps("sortableText", "sortabletext", 1) - )); + Collections.unmodifiableList( + Arrays.asList( + new FieldProps("intField", "int", 1), + new FieldProps("longField", "long", 1), + new FieldProps("doubleField", "double", 1), + new FieldProps("floatField", "float", 1), + new FieldProps("dateField", "date", 1), + new FieldProps("stringField", "string", 1), + new FieldProps("boolField", "boolean", 1), + new FieldProps("sortableText", "sortabletext", 1))); fieldsToTestMulti = - Collections.unmodifiableList(Arrays.asList( - new FieldProps("intFieldMulti", "int", 5), - new FieldProps("longFieldMulti", "long", 5), - new FieldProps("doubleFieldMulti", "double", 5), - new FieldProps("floatFieldMulti", "float", 5), - new FieldProps("dateFieldMulti", "date", 5), - new FieldProps("stringFieldMulti", "string", 5), - new FieldProps("boolFieldMulti", "boolean", 2), - new FieldProps("sortableFieldMulti", "sortabletext", 5) - )); + Collections.unmodifiableList( + Arrays.asList( + new FieldProps("intFieldMulti", "int", 5), + new FieldProps("longFieldMulti", "long", 5), + new FieldProps("doubleFieldMulti", "double", 5), + new FieldProps("floatFieldMulti", "float", 5), + new FieldProps("dateFieldMulti", "date", 5), + new FieldProps("stringFieldMulti", "string", 5), + new FieldProps("boolFieldMulti", "boolean", 2), + new FieldProps("sortableFieldMulti", "sortabletext", 5))); // Fields to test for grouping and sorting with sortMissingFirst/Last. fieldsToTestGroupSortFirst = - Collections.unmodifiableList(Arrays.asList( - new FieldProps("intGSF", "int"), - new FieldProps("longGSF", "long"), - new FieldProps("doubleGSF", "double"), - new FieldProps("floatGSF", "float"), - new FieldProps("dateGSF", "date"), - new FieldProps("stringGSF", "string"), - new FieldProps("boolGSF", "boolean"), - new FieldProps("sortableGSF", "sortabletext") - )); + Collections.unmodifiableList( + Arrays.asList( + new FieldProps("intGSF", "int"), + new FieldProps("longGSF", "long"), + new FieldProps("doubleGSF", "double"), + new FieldProps("floatGSF", "float"), + new FieldProps("dateGSF", "date"), + new FieldProps("stringGSF", "string"), + new FieldProps("boolGSF", "boolean"), + new FieldProps("sortableGSF", "sortabletext"))); fieldsToTestGroupSortLast = - Collections.unmodifiableList(Arrays.asList( - new FieldProps("intGSL", "int"), - new FieldProps("longGSL", "long"), - new FieldProps("doubleGSL", "double"), - new FieldProps("floatGSL", "float"), - new FieldProps("dateGSL", "date"), - new FieldProps("stringGSL", "string"), - new FieldProps("boolGSL", "boolean"), - new FieldProps("sortableGSL", "sortabletext") - )); - - List updateList = new ArrayList<>(fieldsToTestSingle.size() + - fieldsToTestMulti.size() + fieldsToTestGroupSortFirst.size() + fieldsToTestGroupSortLast.size() + - 4); - - updateList.add(getType("name", "float", "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Float.class))); - - updateList.add(getType("name", "double", "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Double.class))); + Collections.unmodifiableList( + Arrays.asList( + new FieldProps("intGSL", "int"), + new FieldProps("longGSL", "long"), + new FieldProps("doubleGSL", "double"), + new FieldProps("floatGSL", "float"), + new FieldProps("dateGSL", "date"), + new FieldProps("stringGSL", "string"), + new FieldProps("boolGSL", "boolean"), + new FieldProps("sortableGSL", "sortabletext"))); + + List updateList = + new ArrayList<>( + fieldsToTestSingle.size() + + fieldsToTestMulti.size() + + fieldsToTestGroupSortFirst.size() + + fieldsToTestGroupSortLast.size() + + 4); + + updateList.add( + getType("name", "float", "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Float.class))); + + updateList.add( + getType("name", "double", "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Double.class))); updateList.add(getType("name", "date", "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Date.class))); updateList.add(getType("name", "boolean", "class", "solr.BoolField")); - // Add a field for each of the types we want to the schema. defineFields(updateList, fieldsToTestSingle, false); @@ -159,15 +161,14 @@ public static void createCluster() throws Exception { defineFields(updateList, fieldsToTestGroupSortFirst, false, "sorMissingFirst", "true"); defineFields(updateList, fieldsToTestGroupSortLast, false, "sorMissingLast", "true"); - MultiUpdate multiUpdateRequest = new MultiUpdate(updateList); - SchemaResponse.UpdateResponse multipleUpdatesResponse = multiUpdateRequest.process(cluster.getSolrClient(), COLLECTION); + SchemaResponse.UpdateResponse multipleUpdatesResponse = + multiUpdateRequest.process(cluster.getSolrClient(), COLLECTION); assertNull("Error adding fields", multipleUpdatesResponse.getResponse().get("errors")); cluster.getSolrClient().setDefaultCollection(COLLECTION); } - @AfterClass public static void shutdown() throws Exception { shutdownCluster(); @@ -182,7 +183,6 @@ public void clean() throws IOException, SolrServerException { resetFields(fieldsToTestMulti); resetFields(fieldsToTestGroupSortFirst); resetFields(fieldsToTestGroupSortLast); - } void resetFields(List fieldProps) { @@ -190,10 +190,11 @@ void resetFields(List fieldProps) { prop.resetBase(); } } + @Test public void testDistribFaceting() throws IOException, SolrServerException { - // For this test, I want to insure that there are shards that do _not_ have a doc with any of the DV_only - // fields, see SOLR-5260. So I'll add exactly 1 document to a 4 shard collection. + // For this test, I want to insure that there are shards that do _not_ have a doc with any of + // the DV_only fields, see SOLR-5260. So I'll add exactly 1 document to a 4 shard collection. CloudSolrClient client = cluster.getSolrClient(); @@ -209,9 +210,7 @@ public void testDistribFaceting() throws IOException, SolrServerException { } } - new UpdateRequest() - .add(doc) - .commit(client, COLLECTION); + new UpdateRequest().add(doc).commit(client, COLLECTION); final SolrQuery solrQuery = new SolrQuery("q", "*:*", "rows", "0"); solrQuery.setFacet(true); @@ -234,12 +233,13 @@ public void testDistribFaceting() throws IOException, SolrServerException { } } - // We should be able to sort thing with missing first/last and that are _NOT_ present at all on one server. + // We should be able to sort thing with missing first/last and that are _NOT_ present at all on + // one server. @Test public void testGroupingSorting() throws IOException, SolrServerException { CloudSolrClient client = cluster.getSolrClient(); - // The point of these is to have at least one shard w/o the value. + // The point of these is to have at least one shard w/o the value. // While getting values for each of these fields starts _out_ random, each successive // _value_ increases. List docs = new ArrayList<>(3); @@ -250,32 +250,61 @@ public void testGroupingSorting() throws IOException, SolrServerException { doc.addField("id", 4); docs.add(doc); - new UpdateRequest() - .add(docs) - .commit(client, COLLECTION); - - checkSortOrder(client, fieldsToTestGroupSortFirst, "asc", new String[]{"4", "2", "1", "3"}, new String[]{"4", "1", "2", "3"}); - checkSortOrder(client, fieldsToTestGroupSortFirst, "desc", new String[]{"3", "1", "2", "4"}, new String[]{"2", "3", "1", "4"}); - - checkSortOrder(client, fieldsToTestGroupSortLast, "asc", new String[]{"4", "2", "1", "3"}, new String[]{"4", "1", "2", "3"}); - checkSortOrder(client, fieldsToTestGroupSortLast, "desc", new String[]{"3", "1", "2", "4"}, new String[]{"2", "3", "1", "4"}); - + new UpdateRequest().add(docs).commit(client, COLLECTION); + + checkSortOrder( + client, + fieldsToTestGroupSortFirst, + "asc", + new String[] {"4", "2", "1", "3"}, + new String[] {"4", "1", "2", "3"}); + checkSortOrder( + client, + fieldsToTestGroupSortFirst, + "desc", + new String[] {"3", "1", "2", "4"}, + new String[] {"2", "3", "1", "4"}); + + checkSortOrder( + client, + fieldsToTestGroupSortLast, + "asc", + new String[] {"4", "2", "1", "3"}, + new String[] {"4", "1", "2", "3"}); + checkSortOrder( + client, + fieldsToTestGroupSortLast, + "desc", + new String[] {"3", "1", "2", "4"}, + new String[] {"2", "3", "1", "4"}); } - private void checkSortOrder(CloudSolrClient client, List props, String sortDir, String[] order, String[] orderBool) throws IOException, SolrServerException { + private void checkSortOrder( + CloudSolrClient client, + List props, + String sortDir, + String[] order, + String[] orderBool) + throws IOException, SolrServerException { for (FieldProps prop : props) { final SolrQuery solrQuery = new SolrQuery("q", "*:*", "rows", "100"); - solrQuery.setSort(prop.getName(), "asc".equals(sortDir) ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc); + solrQuery.setSort( + prop.getName(), "asc".equals(sortDir) ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc); solrQuery.addSort("id", SolrQuery.ORDER.asc); final QueryResponse rsp = client.query(COLLECTION, solrQuery); SolrDocumentList res = rsp.getResults(); - assertEquals("Should have exactly " + order.length + " documents returned", order.length, res.getNumFound()); + assertEquals( + "Should have exactly " + order.length + " documents returned", + order.length, + res.getNumFound()); String expected; for (int idx = 0; idx < res.size(); ++idx) { if (prop.getName().startsWith("bool")) expected = orderBool[idx]; else expected = order[idx]; - assertEquals("Documents in wrong order for field: " + prop.getName(), - expected, res.get(idx).get("id")); + assertEquals( + "Documents in wrong order for field: " + prop.getName(), + expected, + res.get(idx).get("id")); } } } @@ -291,19 +320,17 @@ public void testGroupingDocAbsent() throws IOException, SolrServerException { docs.add(doc); CloudSolrClient client = cluster.getSolrClient(); - new UpdateRequest() - .add(docs) - .commit(client, COLLECTION); + new UpdateRequest().add(docs).commit(client, COLLECTION); - // when grouping on any of these DV-only (not indexed) fields we expect exactly 4 groups except for Boolean. + // when grouping on any of these DV-only (not indexed) fields we expect exactly 4 groups except + // for Boolean. for (FieldProps prop : fieldsToTestGroupSortFirst) { // Special handling until SOLR-9802 is fixed if (prop.getName().startsWith("date")) continue; // SOLR-9802 to here - final SolrQuery solrQuery = new SolrQuery("q", "*:*", - "group", "true", - "group.field", prop.getName()); + final SolrQuery solrQuery = + new SolrQuery("q", "*:*", "group", "true", "group.field", prop.getName()); final QueryResponse rsp = client.query(COLLECTION, solrQuery); @@ -311,17 +338,20 @@ public void testGroupingDocAbsent() throws IOException, SolrServerException { List commands = groupResponse.getValues(); GroupCommand fieldCommand = commands.get(0); int expected = 4; - if (prop.getName().startsWith("bool")) expected = 3; //true, false and null + if (prop.getName().startsWith("bool")) expected = 3; // true, false and null List fieldCommandGroups = fieldCommand.getValues(); - assertEquals("Did not find the expected number of groups for field " + prop.getName(), expected, fieldCommandGroups.size()); + assertEquals( + "Did not find the expected number of groups for field " + prop.getName(), + expected, + fieldCommandGroups.size()); } } @Test - // Verify that we actually form groups that are "expected". Most of the processing takes some care to - // make sure all the values for each field are unique. We need to have docs that have values that are _not_ - // unique. + // Verify that we actually form groups that are "expected". Most of the processing takes some care + // to make sure all the values for each field are unique. We need to have docs that have values + // that are _not_ unique. public void testGroupingDVOnlySortFirst() throws IOException, SolrServerException { doGroupingDvOnly(fieldsToTestGroupSortFirst, "boolGSF"); } @@ -331,7 +361,8 @@ public void testGroupingDVOnlySortLast() throws IOException, SolrServerException doGroupingDvOnly(fieldsToTestGroupSortLast, "boolGSL"); } - private void doGroupingDvOnly(List fieldProps, String boolName) throws IOException, SolrServerException { + private void doGroupingDvOnly(List fieldProps, String boolName) + throws IOException, SolrServerException { List docs = new ArrayList<>(50); for (int idx = 0; idx < 49; ++idx) { SolrInputDocument doc = new SolrInputDocument(); @@ -343,8 +374,8 @@ private void doGroupingDvOnly(List fieldProps, String boolName) thro } docs.add(doc); - // Every fifth time through we add a doc with no values in any of the "fields of interest", so there should be - // 10 docs with nulls + // Every fifth time through we add a doc with no values in any of the "fields of interest", so + // there should be 10 docs with nulls if ((idx % 5) == 0) { doc = new SolrInputDocument(); doc.addField("id", idx + 10_000); @@ -354,30 +385,28 @@ private void doGroupingDvOnly(List fieldProps, String boolName) thro CloudSolrClient client = cluster.getSolrClient(); - new UpdateRequest() - .add(docs) - .commit(client, COLLECTION); + new UpdateRequest().add(docs).commit(client, COLLECTION); - // OK, we should have one group with 10 entries for null, a group with 1 entry and 7 groups with 7 + // we should have one group with 10 entries for null, a group with 1 entry and 7 groups with 7 for (FieldProps prop : fieldProps) { // Solr 9802 if (prop.getName().startsWith("date")) continue; - final SolrQuery solrQuery = new SolrQuery( - "q", "*:*", - "rows", "100", - "group", "true", - "group.field", prop.getName(), - "group.limit", "100", - "group.sort", "id asc"); + final SolrQuery solrQuery = + new SolrQuery( + "q", "*:*", + "rows", "100", + "group", "true", + "group.field", prop.getName(), + "group.limit", "100", + "group.sort", "id asc"); final QueryResponse rsp = client.query(COLLECTION, solrQuery); GroupResponse groupResponse = rsp.getGroupResponse(); List commands = groupResponse.getValues(); - int nullCount = 0; int sevenCount = 0; int boolCount = 0; @@ -387,7 +416,8 @@ private void doGroupingDvOnly(List fieldProps, String boolName) thro switch (grp.getResult().size()) { case 7: ++sevenCount; - assertNotNull("Every group with 7 entries should have a group value.", grp.getGroupValue()); + assertNotNull( + "Every group with 7 entries should have a group value.", grp.getGroupValue()); break; case 10: ++nullCount; @@ -396,28 +426,47 @@ private void doGroupingDvOnly(List fieldProps, String boolName) thro case 25: case 24: ++boolCount; - assertEquals("We should have more counts for boolean fields!", boolName, prop.getName()); + assertEquals( + "We should have more counts for boolean fields!", boolName, prop.getName()); break; - + default: - fail("Unexpected number of elements in the group for '" + prop.getName() + "' size: '" + grp.getResult().size() - + "' GroupValue: '" + grp.getGroupValue() - + "' rsp: " + rsp); + fail( + "Unexpected number of elements in the group for '" + + prop.getName() + + "' size: '" + + grp.getResult().size() + + "' GroupValue: '" + + grp.getGroupValue() + + "' rsp: " + + rsp); } } } - assertEquals("Should be exactly one group with 1 entry of 10 for null for field " + prop.getName(), 1, nullCount); + assertEquals( + "Should be exactly one group with 1 entry of 10 for null for field " + prop.getName(), + 1, + nullCount); if (prop.getName().startsWith("bool")) { - assertEquals("Should be exactly 2 groups with non-null Boolean types " + prop.getName(), 2, boolCount); - assertEquals("Should be no seven count groups for Boolean types " + prop.getName(), 0, sevenCount); + assertEquals( + "Should be exactly 2 groups with non-null Boolean types " + prop.getName(), + 2, + boolCount); + assertEquals( + "Should be no seven count groups for Boolean types " + prop.getName(), 0, sevenCount); } else { - assertEquals("Should be exactly 7 groups with seven entries for field " + prop.getName(), 7, sevenCount); - assertEquals("Should be no gropus with 24 or 25 entries for field " + prop.getName(), 0, boolCount); + assertEquals( + "Should be exactly 7 groups with seven entries for field " + prop.getName(), + 7, + sevenCount); + assertEquals( + "Should be no gropus with 24 or 25 entries for field " + prop.getName(), 0, boolCount); } } } - private SolrInputDocument makeGSDoc(int id, List p1, List p2, String... args) { + private SolrInputDocument makeGSDoc( + int id, List p1, List p2, String... args) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", id); for (FieldProps prop : p1) { @@ -436,8 +485,8 @@ private SolrInputDocument makeGSDoc(int id, List p1, List updateList, List props, boolean multi, String... extras) { + private static void defineFields( + List updateList, List props, boolean multi, String... extras) { for (FieldProps prop : props) { Map fieldAttributes = new LinkedHashMap<>(); fieldAttributes.put("name", prop.getName()); @@ -461,14 +510,13 @@ private static AddFieldType getType(String... args) { return new SchemaRequest.AddFieldType(ftd); } - private void doTestFacet(FieldProps props, QueryResponse rsp) { String name = props.getName(); final List counts = rsp.getFacetField(name).getValues(); long expectedCount = props.getExpectedCount(); long foundCount = getCount(counts); - assertEquals("Field " + name + " should have a count of " + expectedCount, expectedCount, foundCount); - + assertEquals( + "Field " + name + " should have a count of " + expectedCount, expectedCount, foundCount); } private long getCount(final List counts) { @@ -491,7 +539,8 @@ class FieldProps { resetBase(); } - // There's a vague chance that counts will roll over, so let's insure we have some room to grow in a positive direction + // There's a vague chance that counts will roll over, so let's insure we have some room to grow in + // a positive direction void resetBase() { if (name.startsWith("int")) { base = (random().nextInt(Integer.MAX_VALUE)) / 2; @@ -533,7 +582,8 @@ int getExpectedCount() { public String getValue(boolean incrementCounter) { if (incrementCounter) { - counter += random().nextInt(10_000) + 1; // Must add something because nextInt could return zero + counter += + random().nextInt(10_000) + 1; // Must add something because nextInt could return zero } if (name.startsWith("int")) { return Integer.toString((int) base + counter); @@ -561,4 +611,3 @@ public String getValue(boolean incrementCounter) { throw new RuntimeException("Should have found a prefix for the field before now!"); } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java index 68957151d93..d3933b50fd7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java @@ -16,13 +16,12 @@ */ package org.apache.solr.cloud; +import com.carrotsearch.randomizedtesting.annotations.Nightly; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; - -import com.carrotsearch.randomizedtesting.annotations.Nightly; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.cloud.SocketProxy; @@ -53,37 +52,40 @@ public static void beforeClassSetup() { System.setProperty("distribUpdateSoTimeout", "15000"); System.setProperty("solr.httpclient.retries", "0"); System.setProperty("solr.retries.on.forward", "0"); - System.setProperty("solr.retries.to.followers", "0"); + System.setProperty("solr.retries.to.followers", "0"); } @Test @Override @Ignore - public void test() throws Exception { - - } + public void test() throws Exception {} /** - * Tests that FORCELEADER can get an active leader even only replicas with term lower than leader's term are live + * Tests that FORCELEADER can get an active leader even only replicas with term lower than + * leader's term are live */ @Test @Slow public void testReplicasInLowerTerms() throws Exception { handle.put("maxScore", SKIPVAL); handle.put("timestamp", SKIPVAL); - - String testCollectionName = "forceleader_lower_terms_collection"; createCollection(testCollectionName, "conf1", 1, 3); - try { cloudClient.setDefaultCollection(testCollectionName); - List notLeaders = ensureAllReplicasAreActive(testCollectionName, SHARD1, 1, 3, maxWaitSecsToSeeAllActive); - assertEquals("Expected 2 replicas for collection " + testCollectionName - + " but found " + notLeaders.size() + "; clusterState: " - + printClusterStateInfo(testCollectionName), 2, notLeaders.size()); + List notLeaders = + ensureAllReplicasAreActive(testCollectionName, SHARD1, 1, 3, maxWaitSecsToSeeAllActive); + assertEquals( + "Expected 2 replicas for collection " + + testCollectionName + + " but found " + + notLeaders.size() + + "; clusterState: " + + printClusterStateInfo(testCollectionName), + 2, + notLeaders.size()); Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, SHARD1); JettySolrRunner notLeader0 = getJettyOnPort(getReplicaPort(notLeaders.get(0))); @@ -92,7 +94,8 @@ public void testReplicasInLowerTerms() throws Exception { if (log.isInfoEnabled()) { log.info("Before put non leaders into lower term: {}", printClusterStateInfo()); } - putNonLeadersIntoLowerTerm(testCollectionName, SHARD1, zkController, leader, notLeaders, cloudClient); + putNonLeadersIntoLowerTerm( + testCollectionName, SHARD1, zkController, leader, notLeaders, cloudClient); for (Replica replica : notLeaders) { waitForState(testCollectionName, replica.getName(), State.DOWN, 60000); @@ -101,11 +104,17 @@ public void testReplicasInLowerTerms() throws Exception { cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); int numActiveReplicas = getNumberOfActiveReplicas(clusterState, testCollectionName, SHARD1); - assertEquals("Expected only 0 active replica but found " + numActiveReplicas + - "; clusterState: " + printClusterStateInfo(), 0, numActiveReplicas); + assertEquals( + "Expected only 0 active replica but found " + + numActiveReplicas + + "; clusterState: " + + printClusterStateInfo(), + 0, + numActiveReplicas); int numReplicasOnLiveNodes = 0; - for (Replica rep : clusterState.getCollection(testCollectionName).getSlice(SHARD1).getReplicas()) { + for (Replica rep : + clusterState.getCollection(testCollectionName).getSlice(SHARD1).getReplicas()) { if (clusterState.getLiveNodes().contains(rep.getNodeName())) { numReplicasOnLiveNodes++; } @@ -115,7 +124,9 @@ public void testReplicasInLowerTerms() throws Exception { log.info("Before forcing leader: {}", printClusterStateInfo()); } // Assert there is no leader yet - assertNull("Expected no leader right now. State: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1), + assertNull( + "Expected no leader right now. State: " + + clusterState.getCollection(testCollectionName).getSlice(SHARD1), clusterState.getCollection(testCollectionName).getSlice(SHARD1).getLeader()); assertSendDocFails(3); @@ -129,10 +140,13 @@ public void testReplicasInLowerTerms() throws Exception { cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); clusterState = cloudClient.getZkStateReader().getClusterState(); if (log.isInfoEnabled()) { - log.info("After forcing leader: {}", clusterState.getCollection(testCollectionName).getSlice(SHARD1)); + log.info( + "After forcing leader: {}", + clusterState.getCollection(testCollectionName).getSlice(SHARD1)); } // we have a leader - Replica newLeader = clusterState.getCollectionOrNull(testCollectionName).getSlice(SHARD1).getLeader(); + Replica newLeader = + clusterState.getCollectionOrNull(testCollectionName).getSlice(SHARD1).getLeader(); assertNotNull(newLeader); // leader is active assertEquals(State.ACTIVE, newLeader.getState()); @@ -150,24 +164,27 @@ public void testReplicasInLowerTerms() throws Exception { assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 1); assertDocsExistInAllReplicas(notLeaders, testCollectionName, 4, 4); - if (useTlogReplicas()) { - - } + if (useTlogReplicas()) {} // Docs 1 and 4 should be here. 2 was lost during the partition, 3 had failed to be indexed. log.info("Checking doc counts..."); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); if (useTlogReplicas()) { TimeOut timeOut = new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeOut.waitFor("Expected only 2 documents in the index", () -> { - try { - return 2 == cloudClient.query(params).getResults().getNumFound(); - } catch (Exception e) { - return false; - } - }); + timeOut.waitFor( + "Expected only 2 documents in the index", + () -> { + try { + return 2 == cloudClient.query(params).getResults().getNumFound(); + } catch (Exception e) { + return false; + } + }); } else { - assertEquals("Expected only 2 documents in the index", 2, cloudClient.query(params).getResults().getNumFound()); + assertEquals( + "Expected only 2 documents in the index", + 2, + cloudClient.query(params).getResults().getNumFound()); } bringBackOldLeaderAndSendDoc(testCollectionName, leader, notLeaders, 5); @@ -178,7 +195,14 @@ public void testReplicasInLowerTerms() throws Exception { } } - private void putNonLeadersIntoLowerTerm(String collectionName, String shard, ZkController zkController, Replica leader, List notLeaders, SolrClient solrClient) throws Exception { + private void putNonLeadersIntoLowerTerm( + String collectionName, + String shard, + ZkController zkController, + Replica leader, + List notLeaders, + SolrClient solrClient) + throws Exception { SocketProxy[] nonLeaderProxies = new SocketProxy[notLeaders.size()]; for (int i = 0; i < notLeaders.size(); i++) nonLeaderProxies[i] = getProxyForReplica(notLeaders.get(i)); @@ -187,15 +211,14 @@ private void putNonLeadersIntoLowerTerm(String collectionName, String shard, ZkC // ok, now introduce a network partition between the leader and both replicas log.info("Closing proxies for the non-leader replicas..."); - for (SocketProxy proxy : nonLeaderProxies) - proxy.close(); + for (SocketProxy proxy : nonLeaderProxies) proxy.close(); getProxyForReplica(leader).close(); // indexing during a partition log.info("Sending a doc during the network partition..."); JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(leader)); sendDoc(2, leaderJetty); - + for (Replica replica : notLeaders) { waitForState(collectionName, replica.getName(), State.DOWN, 60000); } @@ -211,12 +234,14 @@ private void putNonLeadersIntoLowerTerm(String collectionName, String shard, ZkC for (int i = 0; i < 20; i++) { ClusterState clusterState = zkController.getZkStateReader().getClusterState(); boolean allDown = true; - for (Replica replica : clusterState.getCollection(collectionName).getSlice(shard).getReplicas()) { + for (Replica replica : + clusterState.getCollection(collectionName).getSlice(shard).getReplicas()) { if (replica.getState() != State.DOWN) { allDown = false; } } - if (allDown && clusterState.getCollection(collectionName).getSlice(shard).getLeader() == null) { + if (allDown + && clusterState.getCollection(collectionName).getSlice(shard).getLeader() == null) { break; } Thread.sleep(1000); @@ -225,24 +250,27 @@ private void putNonLeadersIntoLowerTerm(String collectionName, String shard, ZkC // remove the network partition log.info("Reopening the proxies for the non-leader replicas..."); - for (SocketProxy proxy : nonLeaderProxies) - proxy.reopen(); + for (SocketProxy proxy : nonLeaderProxies) proxy.reopen(); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, shard, cloudClient.getZkStateReader().getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collectionName, shard, cloudClient.getZkStateReader().getZkClient())) { for (Replica notLeader : notLeaders) { - assertTrue(zkShardTerms.getTerm(leader.getName()) > zkShardTerms.getTerm(notLeader.getName())); + assertTrue( + zkShardTerms.getTerm(leader.getName()) > zkShardTerms.getTerm(notLeader.getName())); } } } private void assertSendDocFails(int docId) throws Exception { // sending a doc in this state fails - expectThrows(SolrException.class, + expectThrows( + SolrException.class, "Should've failed indexing during a down state.", () -> sendDoc(docId)); } - private void bringBackOldLeaderAndSendDoc(String collection, Replica leader, List notLeaders, int docid) throws Exception { + private void bringBackOldLeaderAndSendDoc( + String collection, Replica leader, List notLeaders, int docid) throws Exception { // Bring back the leader which was stopped log.info("Bringing back originally killed leader..."); JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(leader)); @@ -252,10 +280,12 @@ private void bringBackOldLeaderAndSendDoc(String collection, Replica leader, Lis cloudClient.getZkStateReader().forceUpdateCollection(collection); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); if (log.isInfoEnabled()) { - log.info("After bringing back leader: {}", clusterState.getCollection(collection).getSlice(SHARD1)); + log.info( + "After bringing back leader: {}", + clusterState.getCollection(collection).getSlice(SHARD1)); } int numActiveReplicas = getNumberOfActiveReplicas(clusterState, collection, SHARD1); - assertEquals(1+notLeaders.size(), numActiveReplicas); + assertEquals(1 + notLeaders.size(), numActiveReplicas); log.info("Sending doc {}...", docid); sendDoc(docid); log.info("Committing..."); @@ -274,14 +304,18 @@ protected int sendDoc(int docId) throws Exception { return sendDocsWithRetry(Collections.singletonList(doc), 1, 5, 1); } - private void doForceLeader(String collectionName, String shard) throws IOException, SolrServerException { - CollectionAdminRequest.ForceLeader forceLeader = CollectionAdminRequest.forceLeaderElection(collectionName, shard); - try(CloudSolrClient cloudClient = getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, 60000)) { + private void doForceLeader(String collectionName, String shard) + throws IOException, SolrServerException { + CollectionAdminRequest.ForceLeader forceLeader = + CollectionAdminRequest.forceLeaderElection(collectionName, shard); + try (CloudSolrClient cloudClient = + getCloudSolrClient(zkServer.getZkAddress(), random().nextBoolean(), 30000, 60000)) { cloudClient.request(forceLeader); } } - private int getNumberOfActiveReplicas(ClusterState clusterState, String collection, String sliceId) { + private int getNumberOfActiveReplicas( + ClusterState clusterState, String collection, String sliceId) { int numActiveReplicas = 0; // Assert all replicas are active for (Replica rep : clusterState.getCollection(collection).getSlice(sliceId).getReplicas()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java index 32591de37cf..7222111bef9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java @@ -17,9 +17,8 @@ package org.apache.solr.cloud; import java.lang.invoke.MethodHandles; - -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -28,9 +27,8 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; - -import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.LuceneTestCase.Slow; +import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.cloud.SocketProxy; @@ -39,8 +37,8 @@ import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.RequestStatusState; import org.apache.solr.client.solrj.request.UpdateRequest; +import org.apache.solr.client.solrj.response.RequestStatusState; import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; @@ -53,13 +51,10 @@ import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Super basic testing, no shard restarting or anything. - */ +/** Super basic testing, no shard restarting or anything. */ @Slow public class FullSolrCloudDistribCmdsTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -79,113 +74,137 @@ public void purgeAllCollections() throws Exception { } /** - * Creates a new 2x2 collection using a unique name, blocking until it's state is fully active, + * Creates a new 2x2 collection using a unique name, blocking until it's state is fully active, * and sets that collection as the default on the cluster's default CloudSolrClient. - * + * * @return the name of the new collection */ public static String createAndSetNewDefaultCollection() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String name = "test_collection_" + NAME_COUNTER.getAndIncrement(); - assertEquals(RequestStatusState.COMPLETED, - CollectionAdminRequest.createCollection(name, "_default", 2, 2) - .processAndWait(cloudClient, DEFAULT_TIMEOUT)); - cloudClient.waitForState(name, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); + assertEquals( + RequestStatusState.COMPLETED, + CollectionAdminRequest.createCollection(name, "_default", 2, 2) + .processAndWait(cloudClient, DEFAULT_TIMEOUT)); + cloudClient.waitForState( + name, DEFAULT_TIMEOUT, TimeUnit.SECONDS, (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); cloudClient.setDefaultCollection(name); return name; } - + @Test public void testBasicUpdates() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String collectionName = createAndSetNewDefaultCollection(); - + // add a doc, update it, and delete it addUpdateDelete("doc1"); - assertEquals(0, cloudClient.query(params("q","*:*")).getResults().getNumFound()); - + assertEquals(0, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); + // add 2 docs in a single request addTwoDocsInOneRequest("doc2", "doc3"); - assertEquals(2, cloudClient.query(params("q","*:*")).getResults().getNumFound()); + assertEquals(2, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); // 2 deletes in a single request... - assertEquals(0, (new UpdateRequest().deleteById("doc2").deleteById("doc3")) - .process(cloudClient).getStatus()); + assertEquals( + 0, + (new UpdateRequest().deleteById("doc2").deleteById("doc3")) + .process(cloudClient) + .getStatus()); assertEquals(0, cloudClient.commit().getStatus()); - - assertEquals(0, cloudClient.query(params("q","*:*")).getResults().getNumFound()); - - // add a doc that we will then delete later after adding two other docs (all before next commit). - assertEquals(0, cloudClient.add(sdoc("id", "doc4", "content_s", "will_delete_later")).getStatus()); - assertEquals(0, cloudClient.add(sdocs(sdoc("id", "doc5"), - sdoc("id", "doc6"))).getStatus()); + + assertEquals(0, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); + + // add a doc that we will then delete later after adding two other docs (all before next + // commit). + assertEquals( + 0, cloudClient.add(sdoc("id", "doc4", "content_s", "will_delete_later")).getStatus()); + assertEquals(0, cloudClient.add(sdocs(sdoc("id", "doc5"), sdoc("id", "doc6"))).getStatus()); assertEquals(0, cloudClient.deleteById("doc4").getStatus()); assertEquals(0, cloudClient.commit().getStatus()); assertEquals(0, cloudClient.query(params("q", "id:doc4")).getResults().getNumFound()); assertEquals(1, cloudClient.query(params("q", "id:doc5")).getResults().getNumFound()); assertEquals(1, cloudClient.query(params("q", "id:doc6")).getResults().getNumFound()); - assertEquals(2, cloudClient.query(params("q","*:*")).getResults().getNumFound()); - - checkShardConsistency(params("q","*:*", "rows", "9999","_trace","post_doc_5_6")); + assertEquals(2, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); + + checkShardConsistency(params("q", "*:*", "rows", "9999", "_trace", "post_doc_5_6")); // delete everything.... assertEquals(0, cloudClient.deleteByQuery("*:*").getStatus()); assertEquals(0, cloudClient.commit().getStatus()); - assertEquals(0, cloudClient.query(params("q","*:*")).getResults().getNumFound()); + assertEquals(0, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); - checkShardConsistency(params("q","*:*", "rows", "9999","_trace","delAll")); - + checkShardConsistency(params("q", "*:*", "rows", "9999", "_trace", "delAll")); } public void testDeleteByIdImplicitRouter() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String name = "implicit_collection_without_routerfield_" + NAME_COUNTER.getAndIncrement(); - assertEquals(RequestStatusState.COMPLETED, - CollectionAdminRequest.createCollectionWithImplicitRouter(name, "_default", "shard1,shard2", 2) - .processAndWait(cloudClient, DEFAULT_TIMEOUT)); - cloudClient.waitForState(name, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); + assertEquals( + RequestStatusState.COMPLETED, + CollectionAdminRequest.createCollectionWithImplicitRouter( + name, "_default", "shard1,shard2", 2) + .processAndWait(cloudClient, DEFAULT_TIMEOUT)); + cloudClient.waitForState( + name, DEFAULT_TIMEOUT, TimeUnit.SECONDS, (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); cloudClient.setDefaultCollection(name); - final DocCollection docCol = cloudClient.getZkStateReader().getClusterState().getCollection(name); + final DocCollection docCol = + cloudClient.getZkStateReader().getClusterState().getCollection(name); try (SolrClient shard1 = getHttpSolrClient(docCol.getSlice("shard1").getLeader().getCoreUrl()); - SolrClient shard2 = getHttpSolrClient(docCol.getSlice("shard2").getLeader().getCoreUrl())) { - + SolrClient shard2 = getHttpSolrClient(docCol.getSlice("shard2").getLeader().getCoreUrl())) { + // Add three documents to shard1 shard1.add(sdoc("id", "1", "title", "s1 one")); shard1.add(sdoc("id", "2", "title", "s1 two")); shard1.add(sdoc("id", "3", "title", "s1 three")); shard1.commit(); final AtomicInteger docCounts1 = new AtomicInteger(3); - + // Add two documents to shard2 shard2.add(sdoc("id", "4", "title", "s2 four")); shard2.add(sdoc("id", "5", "title", "s2 five")); shard2.commit(); final AtomicInteger docCounts2 = new AtomicInteger(2); - // A re-usable helper to verify that the expected number of documents can be found on each shard... - Runnable checkShardCounts = () -> { - try { - // including cloudClient helps us test view from other nodes that aren't the leaders... - for (SolrClient c : Arrays.asList(cloudClient, shard1, shard2)) { - assertEquals(docCounts1.get() + docCounts2.get(), c.query(params("q", "*:*")).getResults().getNumFound()); - - assertEquals(docCounts1.get(), c.query(params("q", "*:*", "shards", "shard1")).getResults().getNumFound()); - assertEquals(docCounts2.get(), c.query(params("q", "*:*", "shards", "shard2")).getResults().getNumFound()); - - assertEquals(docCounts1.get() + docCounts2.get(), c.query(params("q", "*:*", "shards", "shard2,shard1")).getResults().getNumFound()); - } - - assertEquals(docCounts1.get(), shard1.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound()); - assertEquals(docCounts2.get(), shard2.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound()); - - } catch (Exception sse) { - throw new RuntimeException(sse); - } - }; + // A re-usable helper to verify that the expected number of documents can be found on each + // shard... + Runnable checkShardCounts = + () -> { + try { + // including cloudClient helps us test view from other nodes that aren't the + // leaders... + for (SolrClient c : Arrays.asList(cloudClient, shard1, shard2)) { + assertEquals( + docCounts1.get() + docCounts2.get(), + c.query(params("q", "*:*")).getResults().getNumFound()); + + assertEquals( + docCounts1.get(), + c.query(params("q", "*:*", "shards", "shard1")).getResults().getNumFound()); + assertEquals( + docCounts2.get(), + c.query(params("q", "*:*", "shards", "shard2")).getResults().getNumFound()); + + assertEquals( + docCounts1.get() + docCounts2.get(), + c.query(params("q", "*:*", "shards", "shard2,shard1")) + .getResults() + .getNumFound()); + } + + assertEquals( + docCounts1.get(), + shard1.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound()); + assertEquals( + docCounts2.get(), + shard2.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound()); + + } catch (Exception sse) { + throw new RuntimeException(sse); + } + }; checkShardCounts.run(); { // Send a delete request for a doc on shard1 to core hosting shard1 with NO routing info @@ -197,8 +216,9 @@ public void testDeleteByIdImplicitRouter() throws Exception { docCounts1.decrementAndGet(); } checkShardCounts.run(); - - { // Send a delete request to core hosting shard1 with a route param for a document that is actually in shard2 + + { // Send a delete request to core hosting shard1 with a route param for a document that is + // actually in shard2 // Should delete. final UpdateRequest deleteRequest = new UpdateRequest(); deleteRequest.deleteById("4").withRoute("shard2"); @@ -208,7 +228,8 @@ public void testDeleteByIdImplicitRouter() throws Exception { } checkShardCounts.run(); - { // Send a delete request to core hosting shard1 with NO route param for a document that is actually in shard2 + { // Send a delete request to core hosting shard1 with NO route param for a document that is + // actually in shard2 // Shouldn't delete, since deleteById requests are not broadcast to all shard leaders. // (This is effictively a request to delete "5" if an only if it is on shard1) final UpdateRequest deleteRequest = new UpdateRequest(); @@ -217,7 +238,7 @@ public void testDeleteByIdImplicitRouter() throws Exception { shard1.commit(); } checkShardCounts.run(); - + { // Multiple deleteById commands for different shards in a single request final UpdateRequest deleteRequest = new UpdateRequest(); deleteRequest.deleteById("2", "shard1"); @@ -229,25 +250,26 @@ public void testDeleteByIdImplicitRouter() throws Exception { } checkShardCounts.run(); } - } public void testDeleteByIdCompositeRouterWithRouterField() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String name = "composite_collection_with_routerfield_" + NAME_COUNTER.getAndIncrement(); - assertEquals(RequestStatusState.COMPLETED, - CollectionAdminRequest.createCollection(name, "_default", 2, 2) - .setRouterName("compositeId") - .setRouterField("routefield_s") - .setShards("shard1,shard2") - .processAndWait(cloudClient, DEFAULT_TIMEOUT)); - cloudClient.waitForState(name, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); + assertEquals( + RequestStatusState.COMPLETED, + CollectionAdminRequest.createCollection(name, "_default", 2, 2) + .setRouterName("compositeId") + .setRouterField("routefield_s") + .setShards("shard1,shard2") + .processAndWait(cloudClient, DEFAULT_TIMEOUT)); + cloudClient.waitForState( + name, DEFAULT_TIMEOUT, TimeUnit.SECONDS, (n, c) -> DocCollection.isFullyActive(n, c, 2, 2)); cloudClient.setDefaultCollection(name); - - final DocCollection docCol = cloudClient.getZkStateReader().getClusterState().getCollection(name); + + final DocCollection docCol = + cloudClient.getZkStateReader().getClusterState().getCollection(name); try (SolrClient shard1 = getHttpSolrClient(docCol.getSlice("shard1").getLeader().getCoreUrl()); - SolrClient shard2 = getHttpSolrClient(docCol.getSlice("shard2").getLeader().getCoreUrl())) { + SolrClient shard2 = getHttpSolrClient(docCol.getSlice("shard2").getLeader().getCoreUrl())) { // Add six documents w/diff routes (all sent to shard1 leader's core) shard1.add(sdoc("id", "1", "routefield_s", "europe")); @@ -264,27 +286,37 @@ public void testDeleteByIdCompositeRouterWithRouterField() throws Exception { shard2.add(sdoc("id", "4", "routefield_s", "africa")); shard2.add(sdoc("id", "2", "routefield_s", "europe")); shard2.commit(); - + final AtomicInteger docCountsEurope = new AtomicInteger(6); final AtomicInteger docCountsAfrica = new AtomicInteger(4); - // A re-usable helper to verify that the expected number of documents can be found based on _route_ key... - Runnable checkShardCounts = () -> { - try { - // including cloudClient helps us test view from other nodes that aren't the leaders... - for (SolrClient c : Arrays.asList(cloudClient, shard1, shard2)) { - assertEquals(docCountsEurope.get() + docCountsAfrica.get(), c.query(params("q", "*:*")).getResults().getNumFound()); - - assertEquals(docCountsEurope.get(), c.query(params("q", "*:*", "_route_", "europe")).getResults().getNumFound()); - assertEquals(docCountsAfrica.get(), c.query(params("q", "*:*", "_route_", "africa")).getResults().getNumFound()); - } - } catch (Exception sse) { - throw new RuntimeException(sse); - } - }; + // A re-usable helper to verify that the expected number of documents can be found based on + // _route_ key... + Runnable checkShardCounts = + () -> { + try { + // including cloudClient helps us test view from other nodes that aren't the + // leaders... + for (SolrClient c : Arrays.asList(cloudClient, shard1, shard2)) { + assertEquals( + docCountsEurope.get() + docCountsAfrica.get(), + c.query(params("q", "*:*")).getResults().getNumFound()); + + assertEquals( + docCountsEurope.get(), + c.query(params("q", "*:*", "_route_", "europe")).getResults().getNumFound()); + assertEquals( + docCountsAfrica.get(), + c.query(params("q", "*:*", "_route_", "africa")).getResults().getNumFound()); + } + } catch (Exception sse) { + throw new RuntimeException(sse); + } + }; checkShardCounts.run(); - - { // Send a delete request to core hosting shard1 with a route param for a document that was originally added via core on shard2 + + { // Send a delete request to core hosting shard1 with a route param for a document that was + // originally added via core on shard2 final UpdateRequest deleteRequest = new UpdateRequest(); deleteRequest.deleteById("4", "africa"); shard1.request(deleteRequest); @@ -292,7 +324,7 @@ public void testDeleteByIdCompositeRouterWithRouterField() throws Exception { docCountsAfrica.decrementAndGet(); } checkShardCounts.run(); - + { // Multiple deleteById commands with different routes in a single request final UpdateRequest deleteRequest = new UpdateRequest(); deleteRequest.deleteById("2", "europe"); @@ -305,7 +337,8 @@ public void testDeleteByIdCompositeRouterWithRouterField() throws Exception { checkShardCounts.run(); // Tests for distributing delete by id when route is missing from the request - { // Send a delete request with no route to shard1 for document on shard2, should be distributed + { // Send a delete request with no route to shard1 for document on shard2, should be + // distributed final UpdateRequest deleteRequest = new UpdateRequest(); deleteRequest.deleteById("8"); shard1.request(deleteRequest); @@ -314,7 +347,8 @@ public void testDeleteByIdCompositeRouterWithRouterField() throws Exception { } checkShardCounts.run(); - { // Multiple deleteById commands with missing route in a single request, should be distributed + { // Multiple deleteById commands with missing route in a single request, should be + // distributed final UpdateRequest deleteRequest = new UpdateRequest(); deleteRequest.deleteById("6"); deleteRequest.deleteById("11"); @@ -324,7 +358,6 @@ public void testDeleteByIdCompositeRouterWithRouterField() throws Exception { docCountsAfrica.decrementAndGet(); } checkShardCounts.run(); - } } @@ -332,20 +365,22 @@ public void testThatCantForwardToLeaderFails() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String collectionName = "test_collection_" + NAME_COUNTER.getAndIncrement(); cloudClient.setDefaultCollection(collectionName); - + // get a random node for use in our collection before creating the one we'll partition.. final JettySolrRunner otherLeader = cluster.getRandomJetty(random()); // pick a (second) random node (which may be the same) for sending updates to // (if it's the same, we're testing routing from another shard, if diff we're testing routing // from a non-collection node) - final String indexingUrl = cluster.getRandomJetty(random()).getProxyBaseUrl() + "/" + collectionName; + final String indexingUrl = + cluster.getRandomJetty(random()).getProxyBaseUrl() + "/" + collectionName; // create a new node for the purpose of killing it... final JettySolrRunner leaderToPartition = cluster.startJettySolrRunner(); try { cluster.waitForNode(leaderToPartition, DEFAULT_TIMEOUT); - // HACK: we have to stop the node in order to enable the proxy, in order to then restart the node + // HACK: we have to stop the node in order to enable the proxy, in order to then restart the + // node // (in order to then "partition it" later via the proxy) final SocketProxy proxy = new SocketProxy(); cluster.stopJettySolrRunner(leaderToPartition); @@ -355,54 +390,76 @@ public void testThatCantForwardToLeaderFails() throws Exception { proxy.open(leaderToPartition.getBaseUrl().toURI()); try { log.info("leaderToPartition's Proxy: {}", proxy); - + cluster.waitForNode(leaderToPartition, DEFAULT_TIMEOUT); // create a 2x1 collection using a nodeSet that includes our leaderToPartition... - assertEquals(RequestStatusState.COMPLETED, - CollectionAdminRequest.createCollection(collectionName, 2, 1) - .setCreateNodeSet(leaderToPartition.getNodeName() + "," + otherLeader.getNodeName()) - .processAndWait(cloudClient, DEFAULT_TIMEOUT)); - - cloudClient.waitForState(collectionName, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); - + assertEquals( + RequestStatusState.COMPLETED, + CollectionAdminRequest.createCollection(collectionName, 2, 1) + .setCreateNodeSet(leaderToPartition.getNodeName() + "," + otherLeader.getNodeName()) + .processAndWait(cloudClient, DEFAULT_TIMEOUT)); + + cloudClient.waitForState( + collectionName, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); + { // HACK: Check the leaderProps for the shard hosted on the node we're going to kill... - final Replica leaderProps = cloudClient.getZkStateReader() - .getClusterState().getCollection(collectionName) - .getLeaderReplicas(leaderToPartition.getNodeName()).get(0); - + final Replica leaderProps = + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(collectionName) + .getLeaderReplicas(leaderToPartition.getNodeName()) + .get(0); + // No point in this test if these aren't true... - assertNotNull("Sanity check: leaderProps isn't a leader?: " + leaderProps.toString(), - leaderProps.getStr(Slice.LEADER)); - assertTrue("Sanity check: leaderProps isn't using the proxy port?: " + leaderProps.toString(), - leaderProps.getCoreUrl().contains(""+proxy.getListenPort())); + assertNotNull( + "Sanity check: leaderProps isn't a leader?: " + leaderProps.toString(), + leaderProps.getStr(Slice.LEADER)); + assertTrue( + "Sanity check: leaderProps isn't using the proxy port?: " + leaderProps.toString(), + leaderProps.getCoreUrl().contains("" + proxy.getListenPort())); } - + // create client to send our updates to... try (HttpSolrClient indexClient = getHttpSolrClient(indexingUrl)) { - + // Sanity check: we should be able to send a bunch of updates that work right now... for (int i = 0; i < 100; i++) { - final UpdateResponse rsp = indexClient.add - (sdoc("id", i, "text_t", TestUtil.randomRealisticUnicodeString(random(), 200))); + final UpdateResponse rsp = + indexClient.add( + sdoc("id", i, "text_t", TestUtil.randomRealisticUnicodeString(random(), 200))); assertEquals(0, rsp.getStatus()); } log.info("Closing leaderToPartition's proxy: {}", proxy); proxy.close(); // NOTE: can't use halfClose, won't ensure a garunteed failure - - final SolrException e = expectThrows(SolrException.class, () -> { - // start at 50 so that we have some "updates" to previous docs and some "adds"... - for (int i = 50; i < 250; i++) { - // Pure random odds of all of these docs belonging to the live shard are 1 in 2**200... - // Except we know the hashing algorithm isn't purely random, - // So the actual odds are "0" unless the hashing algorithm is changed to suck badly... - final UpdateResponse rsp = indexClient.add - (sdoc("id", i, "text_t", TestUtil.randomRealisticUnicodeString(random(), 200))); - // if the update didn't throw an exception, it better be a success.. - assertEquals(0, rsp.getStatus()); - } - }); + + final SolrException e = + expectThrows( + SolrException.class, + () -> { + // start at 50 so that we have some "updates" to previous docs and some + // "adds"... + for (int i = 50; i < 250; i++) { + // Pure random odds of all of these docs belonging to the live shard are 1 in + // 2**200... + // Except we know the hashing algorithm isn't purely random, + // So the actual odds are "0" unless the hashing algorithm is changed to suck + // badly... + final UpdateResponse rsp = + indexClient.add( + sdoc( + "id", + i, + "text_t", + TestUtil.randomRealisticUnicodeString(random(), 200))); + // if the update didn't throw an exception, it better be a success.. + assertEquals(0, rsp.getStatus()); + } + }); assertEquals(500, e.code()); } } finally { @@ -413,68 +470,79 @@ public void testThatCantForwardToLeaderFails() throws Exception { cluster.waitForJettyToStop(leaderToPartition); } } - - /** NOTE: uses the cluster's CloudSolrClient and asumes default collection has been set */ + + /** NOTE: uses the cluster's CloudSolrClient and asumes default collection has been set */ private void addTwoDocsInOneRequest(String docIdA, String docIdB) throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); - assertEquals(0, cloudClient.add(sdocs(sdoc("id", docIdA), - sdoc("id", docIdB))).getStatus()); + assertEquals(0, cloudClient.add(sdocs(sdoc("id", docIdA), sdoc("id", docIdB))).getStatus()); assertEquals(0, cloudClient.commit().getStatus()); - - assertEquals(2, cloudClient.query(params("q","id:(" + docIdA + " OR " + docIdB + ")") - ).getResults().getNumFound()); - - checkShardConsistency(params("q","*:*", "rows", "99","_trace","two_docs")); + + assertEquals( + 2, + cloudClient + .query(params("q", "id:(" + docIdA + " OR " + docIdB + ")")) + .getResults() + .getNumFound()); + + checkShardConsistency(params("q", "*:*", "rows", "99", "_trace", "two_docs")); } - /** NOTE: uses the cluster's CloudSolrClient and asumes default collection has been set */ + /** NOTE: uses the cluster's CloudSolrClient and asumes default collection has been set */ private void addUpdateDelete(String docId) throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); // add the doc, confirm we can query it... assertEquals(0, cloudClient.add(sdoc("id", docId, "content_t", "originalcontent")).getStatus()); assertEquals(0, cloudClient.commit().getStatus()); - + assertEquals(1, cloudClient.query(params("q", "id:" + docId)).getResults().getNumFound()); - assertEquals(1, cloudClient.query(params("q", "content_t:originalcontent")).getResults().getNumFound()); - assertEquals(1, - cloudClient.query(params("q", "content_t:originalcontent AND id:" + docId)) - .getResults().getNumFound()); - - checkShardConsistency(params("q","id:" + docId, "rows", "99","_trace","original_doc")); - + assertEquals( + 1, cloudClient.query(params("q", "content_t:originalcontent")).getResults().getNumFound()); + assertEquals( + 1, + cloudClient + .query(params("q", "content_t:originalcontent AND id:" + docId)) + .getResults() + .getNumFound()); + + checkShardConsistency(params("q", "id:" + docId, "rows", "99", "_trace", "original_doc")); + // update doc assertEquals(0, cloudClient.add(sdoc("id", docId, "content_t", "updatedcontent")).getStatus()); assertEquals(0, cloudClient.commit().getStatus()); - + // confirm we can query the doc by updated content and not original... - assertEquals(0, cloudClient.query(params("q", "content_t:originalcontent")).getResults().getNumFound()); - assertEquals(1, cloudClient.query(params("q", "content_t:updatedcontent")).getResults().getNumFound()); - assertEquals(1, - cloudClient.query(params("q", "content_t:updatedcontent AND id:" + docId)) - .getResults().getNumFound()); - + assertEquals( + 0, cloudClient.query(params("q", "content_t:originalcontent")).getResults().getNumFound()); + assertEquals( + 1, cloudClient.query(params("q", "content_t:updatedcontent")).getResults().getNumFound()); + assertEquals( + 1, + cloudClient + .query(params("q", "content_t:updatedcontent AND id:" + docId)) + .getResults() + .getNumFound()); + // delete the doc, confim it no longer matches in queries... assertEquals(0, cloudClient.deleteById(docId).getStatus()); assertEquals(0, cloudClient.commit().getStatus()); - + assertEquals(0, cloudClient.query(params("q", "id:" + docId)).getResults().getNumFound()); - assertEquals(0, cloudClient.query(params("q", "content_t:updatedcontent")).getResults().getNumFound()); - - checkShardConsistency(params("q","id:" + docId, "rows", "99","_trace","del_updated_doc")); + assertEquals( + 0, cloudClient.query(params("q", "content_t:updatedcontent")).getResults().getNumFound()); + checkShardConsistency(params("q", "id:" + docId, "rows", "99", "_trace", "del_updated_doc")); } - public long testIndexQueryDeleteHierarchical() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String collectionName = createAndSetNewDefaultCollection(); - + // index long docId = 42; int topDocsNum = atLeast(5); - int childsNum = 5+random().nextInt(5); + int childsNum = 5 + random().nextInt(5); for (int i = 0; i < topDocsNum; ++i) { UpdateRequest uReq = new UpdateRequest(); SolrInputDocument topDocument = new SolrInputDocument(); @@ -482,61 +550,59 @@ public long testIndexQueryDeleteHierarchical() throws Exception { topDocument.addField("type_s", "parent"); topDocument.addField(i + "parent_f1_s", "v1"); topDocument.addField(i + "parent_f2_s", "v2"); - - + for (int index = 0; index < childsNum; ++index) { docId = addChildren("child", topDocument, index, false, docId); } - + uReq.add(topDocument); - assertEquals(i + "/" + docId, - 0, uReq.process(cloudClient).getStatus()); + assertEquals(i + "/" + docId, 0, uReq.process(cloudClient).getStatus()); } assertEquals(0, cloudClient.commit().getStatus()); - checkShardConsistency(params("q","*:*", "rows", "9999","_trace","added_all_top_docs_with_kids")); - + checkShardConsistency( + params("q", "*:*", "rows", "9999", "_trace", "added_all_top_docs_with_kids")); + // query - + // parents - assertEquals(topDocsNum, - cloudClient.query(new SolrQuery("type_s:parent")).getResults().getNumFound()); - - // childs - assertEquals(topDocsNum * childsNum, - cloudClient.query(new SolrQuery("type_s:child")).getResults().getNumFound()); - - + assertEquals( + topDocsNum, cloudClient.query(new SolrQuery("type_s:parent")).getResults().getNumFound()); + + // childs + assertEquals( + topDocsNum * childsNum, + cloudClient.query(new SolrQuery("type_s:child")).getResults().getNumFound()); + // grandchilds // - //each topDoc has t childs where each child has x = 0 + 2 + 4 + ..(t-1)*2 grands - //x = 2 * (1 + 2 + 3 +.. (t-1)) => arithmetic summ of t-1 - //x = 2 * ((t-1) * t / 2) = t * (t - 1) - assertEquals(topDocsNum * childsNum * (childsNum - 1), - cloudClient.query(new SolrQuery("type_s:grand")).getResults().getNumFound()); - - //delete + // each topDoc has t childs where each child has x = 0 + 2 + 4 + ..(t-1)*2 grands + // x = 2 * (1 + 2 + 3 +.. (t-1)) => arithmetic summ of t-1 + // x = 2 * ((t-1) * t / 2) = t * (t - 1) + assertEquals( + topDocsNum * childsNum * (childsNum - 1), + cloudClient.query(new SolrQuery("type_s:grand")).getResults().getNumFound()); + + // delete assertEquals(0, cloudClient.deleteByQuery("*:*").getStatus()); assertEquals(0, cloudClient.commit().getStatus()); - assertEquals(0, cloudClient.query(params("q","*:*")).getResults().getNumFound()); + assertEquals(0, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); + + checkShardConsistency(params("q", "*:*", "rows", "9999", "_trace", "delAll")); - checkShardConsistency(params("q","*:*", "rows", "9999","_trace","delAll")); - return docId; } - - /** - * Recursive helper function for building out child and grandchild docs - */ - private long addChildren(String prefix, SolrInputDocument topDocument, int childIndex, boolean lastLevel, long docId) { + /** Recursive helper function for building out child and grandchild docs */ + private long addChildren( + String prefix, SolrInputDocument topDocument, int childIndex, boolean lastLevel, long docId) { SolrInputDocument childDocument = new SolrInputDocument(); childDocument.addField("id", docId++); childDocument.addField("type_s", prefix); for (int index = 0; index < childIndex; ++index) { - childDocument.addField(childIndex + prefix + index + "_s", childIndex + "value"+ index); - } - + childDocument.addField(childIndex + prefix + index + "_s", childIndex + "value" + index); + } + if (!lastLevel) { for (int i = 0; i < childIndex * 2; ++i) { docId = addChildren("grand", childDocument, i, true, docId); @@ -545,51 +611,58 @@ private long addChildren(String prefix, SolrInputDocument topDocument, int child topDocument.addChildDocument(childDocument); return docId; } - - + public void testIndexingOneDocPerRequestWithHttpSolrClient() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String collectionName = createAndSetNewDefaultCollection(); - + final int numDocs = atLeast(50); for (int i = 0; i < numDocs; i++) { UpdateRequest uReq; uReq = new UpdateRequest(); - assertEquals(0, cloudClient.add - (sdoc("id", i, "text_t", TestUtil.randomRealisticUnicodeString(random(), 200))).getStatus()); + assertEquals( + 0, + cloudClient + .add(sdoc("id", i, "text_t", TestUtil.randomRealisticUnicodeString(random(), 200))) + .getStatus()); } assertEquals(0, cloudClient.commit().getStatus()); - assertEquals(numDocs, cloudClient.query(params("q","*:*")).getResults().getNumFound()); - - checkShardConsistency(params("q","*:*", "rows", ""+(1 + numDocs),"_trace","addAll")); + assertEquals(numDocs, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); + + checkShardConsistency(params("q", "*:*", "rows", "" + (1 + numDocs), "_trace", "addAll")); } - + public void testIndexingBatchPerRequestWithHttpSolrClient() throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); final String collectionName = createAndSetNewDefaultCollection(); final int numDocsPerBatch = atLeast(5); final int numBatchesPerThread = atLeast(5); - + final CountDownLatch abort = new CountDownLatch(1); class BatchIndexer implements Runnable { private boolean keepGoing() { return 0 < abort.getCount(); } - + final int name; + public BatchIndexer(int name) { this.name = name; } - + @Override public void run() { try { for (int batchId = 0; batchId < numBatchesPerThread && keepGoing(); batchId++) { final UpdateRequest req = new UpdateRequest(); for (int docId = 0; docId < numDocsPerBatch && keepGoing(); docId++) { - req.add(sdoc("id", "indexer" + name + "_" + batchId + "_" + docId, - "test_t", TestUtil.randomRealisticUnicodeString(random(), 200))); + req.add( + sdoc( + "id", + "indexer" + name + "_" + batchId + "_" + docId, + "test_t", + TestUtil.randomRealisticUnicodeString(random(), 200))); } assertEquals(0, req.process(cloudClient).getStatus()); } @@ -598,7 +671,8 @@ public void run() { throw new RuntimeException(e); } } - }; + } + ; final ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("batchIndexing"); final int numThreads = random().nextInt(TEST_NIGHTLY ? 4 : 2) + 1; final List> futures = new ArrayList<>(numThreads); @@ -614,10 +688,12 @@ public void run() { // all we care about is propagating any possible execution exception... final Object ignored = result.get(); } - + cloudClient.commit(); - assertEquals(totalDocsExpected, cloudClient.query(params("q","*:*")).getResults().getNumFound()); - checkShardConsistency(params("q","*:*", "rows", ""+totalDocsExpected, "_trace","batches_done")); + assertEquals( + totalDocsExpected, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); + checkShardConsistency( + params("q", "*:*", "rows", "" + totalDocsExpected, "_trace", "batches_done")); } public void testConcurrentIndexing() throws Exception { @@ -626,40 +702,44 @@ public void testConcurrentIndexing() throws Exception { final int numDocs = atLeast(50); final JettySolrRunner nodeToUpdate = cluster.getRandomJetty(random()); - try (ConcurrentUpdateSolrClient indexClient - = getConcurrentUpdateSolrClient(nodeToUpdate.getProxyBaseUrl() + "/" + collectionName, 10, 2)) { - + try (ConcurrentUpdateSolrClient indexClient = + getConcurrentUpdateSolrClient( + nodeToUpdate.getProxyBaseUrl() + "/" + collectionName, 10, 2)) { + for (int i = 0; i < numDocs; i++) { - indexClient.add(sdoc("id", i, "text_t", - TestUtil.randomRealisticUnicodeString(random(), 200))); + indexClient.add( + sdoc("id", i, "text_t", TestUtil.randomRealisticUnicodeString(random(), 200))); } indexClient.blockUntilFinished(); - + assertEquals(0, indexClient.commit().getStatus()); - assertEquals(numDocs, cloudClient.query(params("q","*:*")).getResults().getNumFound()); + assertEquals(numDocs, cloudClient.query(params("q", "*:*")).getResults().getNumFound()); - checkShardConsistency(params("q","*:*", "rows", ""+(1 + numDocs),"_trace","addAll")); + checkShardConsistency(params("q", "*:*", "rows", "" + (1 + numDocs), "_trace", "addAll")); } } - + /** * Inspects the cluster to determine all active shards/replicas for the default collection then, - * executes a distrib=false query using the specified params, and compares the resulting - * {@link SolrDocumentList}, failing if any replica does not agree with it's leader. + * executes a distrib=false query using the specified params, and compares the + * resulting {@link SolrDocumentList}, failing if any replica does not agree with it's leader. * * @see #cluster - * @see CloudInspectUtil#showDiff + * @see CloudInspectUtil#showDiff */ private void checkShardConsistency(final SolrParams params) throws Exception { // TODO: refactor into static in CloudInspectUtil w/ DocCollection param? // TODO: refactor to take in a BiFunction ? - - final SolrParams perReplicaParams = SolrParams.wrapDefaults(params("distrib", "false"), - params); - final DocCollection collection = cluster.getSolrClient().getZkStateReader() - .getClusterState().getCollection(cluster.getSolrClient().getDefaultCollection()); + + final SolrParams perReplicaParams = SolrParams.wrapDefaults(params("distrib", "false"), params); + final DocCollection collection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(cluster.getSolrClient().getDefaultCollection()); log.info("Checking shard consistency via: {}", perReplicaParams); - for (Map.Entry entry : collection.getActiveSlicesMap().entrySet()) { + for (Map.Entry entry : collection.getActiveSlicesMap().entrySet()) { final String shardName = entry.getKey(); final Slice slice = entry.getValue(); log.info("Checking: {} -> {}", shardName, slice); @@ -669,19 +749,26 @@ private void checkShardConsistency(final SolrParams params) throws Exception { log.debug("Shard {}: Leader results: {}", shardName, leaderResults); for (Replica replica : slice) { try (HttpSolrClient replicaClient = getHttpSolrClient(replica.getCoreUrl())) { - final SolrDocumentList replicaResults = replicaClient.query(perReplicaParams).getResults(); + final SolrDocumentList replicaResults = + replicaClient.query(perReplicaParams).getResults(); if (log.isDebugEnabled()) { - log.debug("Shard {}: Replica ({}) results: {}", shardName, replica.getCoreName(), replicaResults); + log.debug( + "Shard {}: Replica ({}) results: {}", + shardName, + replica.getCoreName(), + replicaResults); } - assertEquals("inconsistency w/leader: shard=" + shardName + "core=" + replica.getCoreName(), - Collections.emptySet(), - CloudInspectUtil.showDiff(leaderResults, replicaResults, - shardName + " leader: " + leader.getCoreUrl(), - shardName + ": " + replica.getCoreUrl())); + assertEquals( + "inconsistency w/leader: shard=" + shardName + "core=" + replica.getCoreName(), + Collections.emptySet(), + CloudInspectUtil.showDiff( + leaderResults, + replicaResults, + shardName + " leader: " + leader.getCoreUrl(), + shardName + ": " + replica.getCoreUrl())); } } } } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java index 2c4ac646276..2e7b7539560 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionOnCommitTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.cloud; +import java.io.File; +import java.lang.invoke.MethodHandles; +import java.util.List; import org.apache.http.NoHttpResponseException; import org.apache.solr.client.solrj.cloud.SocketProxy; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -28,10 +31,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.lang.invoke.MethodHandles; -import java.util.List; - public class HttpPartitionOnCommitTest extends BasicDistributedZkTest { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -46,9 +45,9 @@ public static void setupSysProps() { System.setProperty("distribUpdateSoTimeout", "5000"); System.setProperty("solr.httpclient.retries", "0"); System.setProperty("solr.retries.on.forward", "0"); - System.setProperty("solr.retries.to.followers", "0"); + System.setProperty("solr.retries.to.followers", "0"); } - + public HttpPartitionOnCommitTest() { super(); sliceCount = 1; @@ -57,7 +56,9 @@ public HttpPartitionOnCommitTest() { @Override protected boolean useTlogReplicas() { - return false; // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's TestInjection use + // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's + // TestInjection use + return false; } @Override @@ -76,10 +77,13 @@ private void multiShardTest() throws Exception { createCollection(testCollectionName, "conf1", 2, 2); cloudClient.setDefaultCollection(testCollectionName); - List notLeaders = - ensureAllReplicasAreActive(testCollectionName, "shard1", 2, 2, 30); - assertTrue("Expected 1 replicas for collection " + testCollectionName - + " but found " + notLeaders.size() + "; clusterState: " + List notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 2, 2, 30); + assertTrue( + "Expected 1 replicas for collection " + + testCollectionName + + " but found " + + notLeaders.size() + + "; clusterState: " + printClusterStateInfo(), notLeaders.size() == 1); @@ -96,12 +100,15 @@ private void multiShardTest() throws Exception { leaderProxy.close(); // let's find the leader of shard2 and ask him to commit - Replica shard2Leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard2"); + Replica shard2Leader = + cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard2"); sendCommitWithRetry(shard2Leader); Thread.sleep(sleepMsBeforeHealPartition); - cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); // get the latest state + cloudClient + .getZkStateReader() + .forceUpdateCollection(testCollectionName); // get the latest state leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); @@ -125,10 +132,13 @@ private void oneShardTest() throws Exception { createCollection(testCollectionName, "conf1", 1, 3); cloudClient.setDefaultCollection(testCollectionName); - List notLeaders = - ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, 30); - assertTrue("Expected 2 replicas for collection " + testCollectionName - + " but found " + notLeaders.size() + "; clusterState: " + List notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, 30); + assertTrue( + "Expected 2 replicas for collection " + + testCollectionName + + " but found " + + notLeaders.size() + + "; clusterState: " + printClusterStateInfo(), notLeaders.size() == 2); @@ -147,7 +157,8 @@ private void oneShardTest() throws Exception { sendCommitWithRetry(replica); Thread.sleep(sleepMsBeforeHealPartition); - cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); // get the latest state + // get the latest state + cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); assertSame("Leader was not active", Replica.State.ACTIVE, leader.getState()); @@ -163,14 +174,18 @@ private void oneShardTest() throws Exception { log.info("oneShardTest completed OK"); } - /** - * Overrides the parent implementation to install a SocketProxy in-front of the Jetty server. - */ + /** Overrides the parent implementation to install a SocketProxy in-front of the Jetty server. */ @Override - public JettySolrRunner createJetty(File solrHome, String dataDir, - String shardList, String solrConfigOverride, String schemaOverride, Replica.Type replicaType) + public JettySolrRunner createJetty( + File solrHome, + String dataDir, + String shardList, + String solrConfigOverride, + String schemaOverride, + Replica.Type replicaType) throws Exception { - return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); + return createProxiedJetty( + solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); } protected void sendCommitWithRetry(Replica replica) throws Exception { @@ -187,7 +202,9 @@ protected void sendCommitWithRetry(Replica replica) throws Exception { } catch (Exception exc) { Throwable rootCause = SolrException.getRootCause(exc); if (rootCause instanceof NoHttpResponseException) { - log.warn("No HTTP response from sending commit request to {}; will re-try after waiting 3 seconds", replicaCoreUrl); + log.warn( + "No HTTP response from sending commit request to {}; will re-try after waiting 3 seconds", + replicaCoreUrl); Thread.sleep(3000); client.commit(); log.info("Second attempt at sending commit to {} succeeded", replicaCoreUrl); @@ -197,5 +214,4 @@ protected void sendCommitWithRetry(Replica replica) throws Exception { } } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java index c5dd82da2c8..da0051f6bf7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java @@ -66,17 +66,15 @@ import org.slf4j.LoggerFactory; /** - * Simulates HTTP partitions between a leader and replica but the replica does - * not lose its ZooKeeper connection. + * Simulates HTTP partitions between a leader and replica but the replica does not lose its + * ZooKeeper connection. */ - @Slow @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") -// commented out on: 24-Dec-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18 public class HttpPartitionTest extends AbstractFullDistribZkTestBase { - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + // To prevent the test assertions firing too fast before cluster state // recognizes (and propagates) partitions protected static final long sleepMsBeforeHealPartition = 300; @@ -90,42 +88,44 @@ public static void setupSysProps() { System.setProperty("distribUpdateSoTimeout", "10000"); System.setProperty("solr.httpclient.retries", "0"); System.setProperty("solr.retries.on.forward", "0"); - System.setProperty("solr.retries.to.followers", "0"); + System.setProperty("solr.retries.to.followers", "0"); } - + public HttpPartitionTest() { super(); sliceCount = 2; fixShardCount(3); } - /** - * We need to turn off directUpdatesToLeadersOnly due to SOLR-9512 - */ + /** We need to turn off directUpdatesToLeadersOnly due to SOLR-9512 */ @Override protected CloudSolrClient createCloudClient(String defaultCollection) { - CloudSolrClient client = new CloudSolrClient.Builder(Collections.singletonList(zkServer.getZkAddress()), Optional.empty()) - .sendDirectUpdatesToAnyShardReplica() - .withConnectionTimeout(5000) - .withSocketTimeout(10000) - .build(); + CloudSolrClient client = + new CloudSolrClient.Builder( + Collections.singletonList(zkServer.getZkAddress()), Optional.empty()) + .sendDirectUpdatesToAnyShardReplica() + .withConnectionTimeout(5000) + .withSocketTimeout(10000) + .build(); if (defaultCollection != null) client.setDefaultCollection(defaultCollection); return client; } - /** - * Overrides the parent implementation to install a SocketProxy in-front of the Jetty server. - */ + /** Overrides the parent implementation to install a SocketProxy in-front of the Jetty server. */ @Override - public JettySolrRunner createJetty(File solrHome, String dataDir, - String shardList, String solrConfigOverride, String schemaOverride, Replica.Type replicaType) - throws Exception - { - return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); + public JettySolrRunner createJetty( + File solrHome, + String dataDir, + String shardList, + String solrConfigOverride, + String schemaOverride, + Replica.Type replicaType) + throws Exception { + return createProxiedJetty( + solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); } @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") public void test() throws Exception { waitForThingsToLevelOut(30, TimeUnit.SECONDS); @@ -158,19 +158,21 @@ private void testDoRecoveryOnRestart() throws Exception { // Inject pausing in recovery op, hence the replica won't be able to finish recovery TestInjection.prepRecoveryOpPauseForever = "true:100"; - + createCollection(testCollectionName, "conf1", 1, 2); cloudClient.setDefaultCollection(testCollectionName); sendDoc(1); - JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(getShardLeader(testCollectionName, "shard1", 1000))); + JettySolrRunner leaderJetty = + getJettyOnPort(getReplicaPort(getShardLeader(testCollectionName, "shard1", 1000))); List notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive); assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 1); SocketProxy proxy0 = getProxyForReplica(notLeaders.get(0)); - SocketProxy leaderProxy = getProxyForReplica(getShardLeader(testCollectionName, "shard1", 1000)); + SocketProxy leaderProxy = + getProxyForReplica(getShardLeader(testCollectionName, "shard1", 1000)); proxy0.close(); leaderProxy.close(); @@ -178,7 +180,9 @@ private void testDoRecoveryOnRestart() throws Exception { // indexing during a partition int achievedRf = sendDoc(2, leaderJetty); assertEquals("Unexpected achieved replication factor", 1, achievedRf); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(testCollectionName, "shard1", cloudClient.getZkStateReader().getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms( + testCollectionName, "shard1", cloudClient.getZkStateReader().getZkClient())) { assertFalse(zkShardTerms.canBecomeLeader(notLeaders.get(0).getName())); } waitForState(testCollectionName, notLeaders.get(0).getName(), DOWN, 10000); @@ -193,8 +197,11 @@ private void testDoRecoveryOnRestart() throws Exception { JettySolrRunner notLeaderJetty = getJettyOnPort(getReplicaPort(notLeaders.get(0))); String notLeaderNodeName = notLeaderJetty.getNodeName(); notLeaderJetty.stop(); - - cloudClient.getZkStateReader().waitForLiveNodes(15, TimeUnit.SECONDS, SolrCloudTestCase.missingLiveNode(notLeaderNodeName)); + + cloudClient + .getZkStateReader() + .waitForLiveNodes( + 15, TimeUnit.SECONDS, SolrCloudTestCase.missingLiveNode(notLeaderNodeName)); notLeaderJetty.start(); ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, 130); @@ -213,17 +220,20 @@ protected void testRf2() throws Exception { String testCollectionName = "c8n_1x2"; createCollectionRetry(testCollectionName, "conf1", 1, 2); cloudClient.setDefaultCollection(testCollectionName); - + sendDoc(1); - - Replica notLeader = - ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive).get(0); - JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(getShardLeader(testCollectionName, "shard1", 1000))); + + Replica notLeader = + ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive) + .get(0); + JettySolrRunner leaderJetty = + getJettyOnPort(getReplicaPort(getShardLeader(testCollectionName, "shard1", 1000))); // ok, now introduce a network partition between the leader and the replica SocketProxy proxy = getProxyForReplica(notLeader); - SocketProxy leaderProxy = getProxyForReplica(getShardLeader(testCollectionName, "shard1", 1000)); - + SocketProxy leaderProxy = + getProxyForReplica(getShardLeader(testCollectionName, "shard1", 1000)); + proxy.close(); leaderProxy.close(); @@ -231,31 +241,33 @@ protected void testRf2() throws Exception { sendDoc(2, leaderJetty); // replica should publish itself as DOWN if the network is not healed after some amount time waitForState(testCollectionName, notLeader.getName(), DOWN, 10000); - + proxy.reopen(); leaderProxy.reopen(); - - List notLeaders = + + List notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive); - + int achievedRf = sendDoc(3); if (achievedRf == 1) { - // this case can happen when leader reuse an connection get established before network partition + // this case can happen when leader reuse an connection get established before network + // partition // TODO: Remove when SOLR-11776 get committed ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive); } - + // sent 3 docs in so far, verify they are on the leader and replica assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 3); - // Get the max version from the replica core to make sure it gets updated after recovery (see SOLR-7625) + // Get the max version from the replica core to make sure it gets updated after recovery (see + // SOLR-7625) JettySolrRunner replicaJetty = getJettyOnPort(getReplicaPort(notLeader)); CoreContainer coreContainer = replicaJetty.getCoreContainer(); ZkCoreNodeProps replicaCoreNodeProps = new ZkCoreNodeProps(notLeader); String coreName = replicaCoreNodeProps.getCoreName(); Long maxVersionBefore = null; try (SolrCore core = coreContainer.getCore(coreName)) { - assertNotNull("Core '"+coreName+"' not found for replica: "+notLeader.getName(), core); + assertNotNull("Core '" + coreName + "' not found for replica: " + notLeader.getName(), core); UpdateLog ulog = core.getUpdateHandler().getUpdateLog(); maxVersionBefore = ulog.getCurrentMaxVersion(); } @@ -284,21 +296,27 @@ protected void testRf2() throws Exception { // always send doc directly to leader without going through proxy sendDoc(d + 4, leaderJetty); // 4 is offset as we've already indexed 1-3 } - + // restore connectivity if lost if (hasPartition) { proxy.reopen(); leaderProxy.reopen(); } - - notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive); + + notLeaders = + ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive); try (SolrCore core = coreContainer.getCore(coreName)) { assertNotNull("Core '" + coreName + "' not found for replica: " + notLeader.getName(), core); Long currentMaxVersion = core.getUpdateHandler().getUpdateLog().getCurrentMaxVersion(); - log.info("After recovery, looked up NEW max version bucket seed {} for core {}, was: {}" - , currentMaxVersion, coreName, maxVersionBefore); - assertTrue("max version bucket seed not updated after recovery!", currentMaxVersion > maxVersionBefore); + log.info( + "After recovery, looked up NEW max version bucket seed {} for core {}, was: {}", + currentMaxVersion, + coreName, + maxVersionBefore); + assertTrue( + "max version bucket seed not updated after recovery!", + currentMaxVersion > maxVersionBefore); } // verify all docs received @@ -310,7 +328,8 @@ protected void testRf2() throws Exception { attemptCollectionDelete(cloudClient, testCollectionName); } - protected void waitForState(String collection, String replicaName, Replica.State state, long ms) throws KeeperException, InterruptedException { + protected void waitForState(String collection, String replicaName, Replica.State state, long ms) + throws KeeperException, InterruptedException { TimeOut timeOut = new TimeOut(ms, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); Replica.State replicaState = Replica.State.ACTIVE; while (!timeOut.hasTimedOut()) { @@ -323,55 +342,69 @@ protected void waitForState(String collection, String replicaName, Replica.State replicaState = partitionedReplica.getState(); if (replicaState == state) return; } - assertEquals("Timeout waiting for state "+ state +" of replica " + replicaName + ", current state " + replicaState, - state, replicaState); + assertEquals( + "Timeout waiting for state " + + state + + " of replica " + + replicaName + + ", current state " + + replicaState, + state, + replicaState); } protected void testRf3() throws Exception { // create a collection that has 1 shard but 2 replicas String testCollectionName = "c8n_1x3"; createCollectionRetry(testCollectionName, "conf1", 1, 3); - + cloudClient.setDefaultCollection(testCollectionName); - + sendDoc(1); - List notLeaders = + List notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); - assertTrue("Expected 2 replicas for collection " + testCollectionName - + " but found " + notLeaders.size() + "; clusterState: " - + printClusterStateInfo(testCollectionName), + assertTrue( + "Expected 2 replicas for collection " + + testCollectionName + + " but found " + + notLeaders.size() + + "; clusterState: " + + printClusterStateInfo(testCollectionName), notLeaders.size() == 2); - JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(getShardLeader(testCollectionName, "shard1", 1000))); + JettySolrRunner leaderJetty = + getJettyOnPort(getReplicaPort(getShardLeader(testCollectionName, "shard1", 1000))); // ok, now introduce a network partition between the leader and the replica SocketProxy proxy0 = getProxyForReplica(notLeaders.get(0)); - SocketProxy leaderProxy = getProxyForReplica(getShardLeader(testCollectionName, "shard1", 1000)); - + SocketProxy leaderProxy = + getProxyForReplica(getShardLeader(testCollectionName, "shard1", 1000)); + proxy0.close(); leaderProxy.close(); - + // indexing during a partition sendDoc(2, leaderJetty); - + Thread.sleep(sleepMsBeforeHealPartition); proxy0.reopen(); - + SocketProxy proxy1 = getProxyForReplica(notLeaders.get(1)); proxy1.close(); - + sendDoc(3, leaderJetty); - + Thread.sleep(sleepMsBeforeHealPartition); proxy1.reopen(); leaderProxy.reopen(); - + // sent 4 docs in so far, verify they are on the leader and replica - notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); - + notLeaders = + ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); + sendDoc(4); - + assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 4); log.info("testRf3 succeeded ... deleting the {} collection", testCollectionName); @@ -392,19 +425,25 @@ protected void testLeaderZkSessionLoss() throws Exception { List notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 2, maxWaitSecsToSeeAllActive); - assertTrue("Expected 1 replicas for collection " + testCollectionName - + " but found " + notLeaders.size() + "; clusterState: " + assertTrue( + "Expected 1 replicas for collection " + + testCollectionName + + " but found " + + notLeaders.size() + + "; clusterState: " + printClusterStateInfo(testCollectionName), notLeaders.size() == 1); - Replica leader = - cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); + Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); String leaderNode = leader.getNodeName(); - assertNotNull("Could not find leader for shard1 of "+ - testCollectionName+"; clusterState: "+printClusterStateInfo(testCollectionName), leader); + assertNotNull( + "Could not find leader for shard1 of " + + testCollectionName + + "; clusterState: " + + printClusterStateInfo(testCollectionName), + leader); JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(leader)); - SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, String.valueOf(2)); doc.addField("a_t", "hello" + 2); @@ -420,7 +459,8 @@ protected void testLeaderZkSessionLoss() throws Exception { Replica currentLeader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); currentLeaderName = currentLeader.getName(); - } catch (Exception exc) {} + } catch (Exception exc) { + } if (expectedNewLeaderCoreNodeName.equals(currentLeaderName)) break; // new leader was elected after zk session expiration @@ -437,8 +477,8 @@ protected void testLeaderZkSessionLoss() throws Exception { if (log.isInfoEnabled()) { log.info("Sending doc 2 to old leader {}", leader.getName()); } - try ( HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName)) { - + try (HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName)) { + leaderSolr.add(doc); leaderSolr.close(); @@ -452,23 +492,25 @@ protected void testLeaderZkSessionLoss() throws Exception { try (HttpSolrClient client = getHttpSolrClient(currentLeader, testCollectionName)) { client.add(doc); // this should work } - } + } - List participatingReplicas = getActiveOrRecoveringReplicas(testCollectionName, "shard1"); + List participatingReplicas = + getActiveOrRecoveringReplicas(testCollectionName, "shard1"); Set replicasToCheck = new HashSet<>(); - for (Replica stillUp : participatingReplicas) - replicasToCheck.add(stillUp.getName()); + for (Replica stillUp : participatingReplicas) replicasToCheck.add(stillUp.getName()); waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 30); assertDocsExistInAllReplicas(participatingReplicas, testCollectionName, 1, 2); - log.info("testLeaderZkSessionLoss succeeded ... deleting the {} collection", testCollectionName); + log.info( + "testLeaderZkSessionLoss succeeded ... deleting the {} collection", testCollectionName); // try to clean up attemptCollectionDelete(cloudClient, testCollectionName); } - protected List getActiveOrRecoveringReplicas(String testCollectionName, String shardId) throws Exception { - Map activeReplicas = new HashMap(); + protected List getActiveOrRecoveringReplicas(String testCollectionName, String shardId) + throws Exception { + Map activeReplicas = new HashMap(); ZkStateReader zkr = cloudClient.getZkStateReader(); ClusterState cs = zkr.getClusterState(); assertNotNull(cs); @@ -481,24 +523,24 @@ protected List getActiveOrRecoveringReplicas(String testCollectionName, } } } - } + } List replicas = new ArrayList(); replicas.addAll(activeReplicas.values()); return replicas; } /** - * Assert docs exists in {@code notLeaders} replicas, docs must also exist in the shard1 leader as well. - * This method uses RTG for validation therefore it must work for asserting both TLOG and NRT replicas. + * Assert docs exists in {@code notLeaders} replicas, docs must also exist in the shard1 leader as + * well. This method uses RTG for validation therefore it must work for asserting both TLOG and + * NRT replicas. */ - protected void assertDocsExistInAllReplicas(List notLeaders, - String testCollectionName, int firstDocId, int lastDocId) + protected void assertDocsExistInAllReplicas( + List notLeaders, String testCollectionName, int firstDocId, int lastDocId) throws Exception { Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1", 10000); HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName); - List replicas = - new ArrayList(notLeaders.size()); + List replicas = new ArrayList(notLeaders.size()); for (Replica r : notLeaders) { replicas.add(getHttpSolrClient(r, testCollectionName)); @@ -528,8 +570,10 @@ protected HttpSolrClient getHttpSolrClient(Replica replica, String coll) throws } // Send doc directly to a server (without going through proxy) - protected int sendDoc(int docId, JettySolrRunner leaderJetty) throws IOException, SolrServerException { - try (HttpSolrClient solrClient = new HttpSolrClient.Builder(leaderJetty.getBaseUrl().toString()).build()) { + protected int sendDoc(int docId, JettySolrRunner leaderJetty) + throws IOException, SolrServerException { + try (HttpSolrClient solrClient = + new HttpSolrClient.Builder(leaderJetty.getBaseUrl().toString()).build()) { return sendDoc(docId, solrClient, cloudClient.getDefaultCollection()); } } @@ -538,48 +582,69 @@ protected int sendDoc(int docId) throws Exception { return sendDoc(docId, cloudClient, cloudClient.getDefaultCollection()); } - protected int sendDoc(int docId, SolrClient solrClient, String collection) throws IOException, SolrServerException { + protected int sendDoc(int docId, SolrClient solrClient, String collection) + throws IOException, SolrServerException { SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, String.valueOf(docId)); doc.addField("a_t", "hello" + docId); UpdateRequest up = new UpdateRequest(); up.add(doc); - return cloudClient.getMinAchievedReplicationFactor(collection, solrClient.request(up, collection)); + return cloudClient.getMinAchievedReplicationFactor( + collection, solrClient.request(up, collection)); } /** - * Query the real-time get handler for a specific doc by ID to verify it - * exists in the provided server, using distrib=false so it doesn't route to another replica. + * Query the real-time get handler for a specific doc by ID to verify it exists in the provided + * server, using distrib=false so it doesn't route to another replica. */ protected void assertDocExists(HttpSolrClient solr, String coll, String docId) throws Exception { NamedList rsp = realTimeGetDocId(solr, docId); String match = JSONTestUtil.matchObj("/id", rsp.get("doc"), docId); - assertTrue("Doc with id=" + docId + " not found in " + solr.getBaseURL() - + " due to: " + match + "; rsp="+rsp, match == null); + assertTrue( + "Doc with id=" + + docId + + " not found in " + + solr.getBaseURL() + + " due to: " + + match + + "; rsp=" + + rsp, + match == null); } - protected void assertDocNotExists(HttpSolrClient solr, String coll, String docId) throws Exception { + protected void assertDocNotExists(HttpSolrClient solr, String coll, String docId) + throws Exception { NamedList rsp = realTimeGetDocId(solr, docId); String match = JSONTestUtil.matchObj("/id", rsp.get("doc"), Integer.valueOf(docId)); - assertTrue("Doc with id=" + docId + " is found in " + solr.getBaseURL() - + " due to: " + match + "; rsp="+rsp, match != null); + assertTrue( + "Doc with id=" + + docId + + " is found in " + + solr.getBaseURL() + + " due to: " + + match + + "; rsp=" + + rsp, + match != null); } - private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) throws SolrServerException, IOException { + private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) + throws SolrServerException, IOException { QueryRequest qr = new QueryRequest(params("qt", "/get", "id", docId, "distrib", "false")); return solr.request(qr); } protected int getReplicaPort(Replica replica) { String replicaNode = replica.getNodeName(); - String tmp = replicaNode.substring(replicaNode.indexOf(':')+1); - if (tmp.indexOf('_') != -1) - tmp = tmp.substring(0,tmp.indexOf('_')); - return Integer.parseInt(tmp); + String tmp = replicaNode.substring(replicaNode.indexOf(':') + 1); + if (tmp.indexOf('_') != -1) tmp = tmp.substring(0, tmp.indexOf('_')); + return Integer.parseInt(tmp); } - protected void waitToSeeReplicasActive(String testCollectionName, String shardId, Set replicasToCheck, int maxWaitSecs) throws Exception { + protected void waitToSeeReplicasActive( + String testCollectionName, String shardId, Set replicasToCheck, int maxWaitSecs) + throws Exception { final RTimer timer = new RTimer(); ZkStateReader zkr = cloudClient.getZkStateReader(); @@ -594,13 +659,12 @@ protected void waitToSeeReplicasActive(String testCollectionName, String shardId final DocCollection docCollection = cs.getCollectionOrNull(testCollectionName); assertNotNull(docCollection); Slice shard = docCollection.getSlice(shardId); - assertNotNull("No Slice for "+shardId, shard); + assertNotNull("No Slice for " + shardId, shard); allReplicasUp = true; // assume true // wait to see all replicas are "active" for (Replica replica : shard.getReplicas()) { - if (!replicasToCheck.contains(replica.getName())) - continue; + if (!replicasToCheck.contains(replica.getName())) continue; final Replica.State state = replica.getState(); if (state != Replica.State.ACTIVE) { @@ -614,18 +678,23 @@ protected void waitToSeeReplicasActive(String testCollectionName, String shardId if (!allReplicasUp) { try { Thread.sleep(200L); - } catch (Exception ignoreMe) {} + } catch (Exception ignoreMe) { + } waitMs += 200L; } } // end while if (!allReplicasUp) - fail("Didn't see replicas "+ replicasToCheck + - " come up within " + maxWaitMs + " ms! ClusterState: " + printClusterStateInfo(testCollectionName)); + fail( + "Didn't see replicas " + + replicasToCheck + + " come up within " + + maxWaitMs + + " ms! ClusterState: " + + printClusterStateInfo(testCollectionName)); if (log.isInfoEnabled()) { log.info("Took {} ms to see replicas [{}] become active.", timer.getTime(), replicasToCheck); } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionWithTlogReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionWithTlogReplicasTest.java index f7bf65c7e9a..7df2c0fefb8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionWithTlogReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionWithTlogReplicasTest.java @@ -30,5 +30,4 @@ public class HttpPartitionWithTlogReplicasTest extends HttpPartitionTest { protected boolean useTlogReplicas() { return true; } - -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java index ee74f680ee6..9b7d9cef558 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionContextKeyTest.java @@ -21,7 +21,6 @@ import java.util.List; import java.util.Locale; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -44,57 +43,63 @@ public class LeaderElectionContextKeyTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(1) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); for (int i = 1; i <= 2; i++) { // Create two collections with same order of requests, no parallel // therefore Assign.buildCoreNodeName will create same coreNodeName - CollectionAdminRequest - .createCollection("testCollection"+i, "config", 2, 1) + CollectionAdminRequest.createCollection("testCollection" + i, "config", 2, 1) .setCreateNodeSet("") .process(cluster.getSolrClient()); - CollectionAdminRequest - .addReplicaToShard("testCollection"+i, "shard1") + CollectionAdminRequest.addReplicaToShard("testCollection" + i, "shard1") .process(cluster.getSolrClient()); - CollectionAdminRequest - .addReplicaToShard("testCollection"+i, "shard2") + CollectionAdminRequest.addReplicaToShard("testCollection" + i, "shard2") .process(cluster.getSolrClient()); } - AbstractDistribZkTestBase.waitForRecoveriesToFinish("testCollection1", cluster.getSolrClient().getZkStateReader(), - false, true, 30); - AbstractDistribZkTestBase.waitForRecoveriesToFinish("testCollection2", cluster.getSolrClient().getZkStateReader(), - false, true, 30); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + "testCollection1", cluster.getSolrClient().getZkStateReader(), false, true, 30); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + "testCollection2", cluster.getSolrClient().getZkStateReader(), false, true, 30); } @Test - public void test() throws KeeperException, InterruptedException, IOException, SolrServerException { + public void test() + throws KeeperException, InterruptedException, IOException, SolrServerException { ZkStateReader stateReader = cluster.getSolrClient().getZkStateReader(); stateReader.forceUpdateCollection(TEST_COLLECTION_1); ClusterState clusterState = stateReader.getClusterState(); // The test assume that TEST_COLLECTION_1 and TEST_COLLECTION_2 will have identical layout // ( same replica's name on every shard ) for (int i = 1; i <= 2; i++) { - String coll1ShardiLeader = clusterState.getCollection(TEST_COLLECTION_1).getLeader("shard"+i).getName(); - String coll2ShardiLeader = clusterState.getCollection(TEST_COLLECTION_2).getLeader("shard"+i).getName(); - String assertMss = String.format(Locale.ROOT, "Expect %s and %s each have a replica with same name on shard %s", - coll1ShardiLeader, coll2ShardiLeader, "shard"+i); - assertEquals( - assertMss, - coll1ShardiLeader, - coll2ShardiLeader - ); + String coll1ShardiLeader = + clusterState.getCollection(TEST_COLLECTION_1).getLeader("shard" + i).getName(); + String coll2ShardiLeader = + clusterState.getCollection(TEST_COLLECTION_2).getLeader("shard" + i).getName(); + String assertMss = + String.format( + Locale.ROOT, + "Expect %s and %s each have a replica with same name on shard %s", + coll1ShardiLeader, + coll2ShardiLeader, + "shard" + i); + assertEquals(assertMss, coll1ShardiLeader, coll2ShardiLeader); } String shard = "shard" + String.valueOf(random().nextInt(2) + 1); Replica replica = clusterState.getCollection(TEST_COLLECTION_1).getLeader(shard); assertNotNull(replica); - try (SolrClient shardLeaderClient = new HttpSolrClient.Builder(replica.get("base_url").toString()).build()) { - assertEquals(1L, getElectionNodes(TEST_COLLECTION_1, shard, stateReader.getZkClient()).size()); - List collection2Shard1Nodes = getElectionNodes(TEST_COLLECTION_2, "shard1", stateReader.getZkClient()); - List collection2Shard2Nodes = getElectionNodes(TEST_COLLECTION_2, "shard2", stateReader.getZkClient()); + try (SolrClient shardLeaderClient = + new HttpSolrClient.Builder(replica.get("base_url").toString()).build()) { + assertEquals( + 1L, getElectionNodes(TEST_COLLECTION_1, shard, stateReader.getZkClient()).size()); + List collection2Shard1Nodes = + getElectionNodes(TEST_COLLECTION_2, "shard1", stateReader.getZkClient()); + List collection2Shard2Nodes = + getElectionNodes(TEST_COLLECTION_2, "shard2", stateReader.getZkClient()); CoreAdminRequest.unloadCore(replica.getCoreName(), shardLeaderClient); // Waiting for leader election being kicked off long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS); @@ -109,12 +114,22 @@ public void test() throws KeeperException, InterruptedException, IOException, So } assertTrue(found); // There are no leader election was kicked off on testCollection2 - assertThat(collection2Shard1Nodes, CoreMatchers.is(getElectionNodes(TEST_COLLECTION_2, "shard1", stateReader.getZkClient()))); - assertThat(collection2Shard2Nodes, CoreMatchers.is(getElectionNodes(TEST_COLLECTION_2, "shard2", stateReader.getZkClient()))); + assertThat( + collection2Shard1Nodes, + CoreMatchers.is( + getElectionNodes(TEST_COLLECTION_2, "shard1", stateReader.getZkClient()))); + assertThat( + collection2Shard2Nodes, + CoreMatchers.is( + getElectionNodes(TEST_COLLECTION_2, "shard2", stateReader.getZkClient()))); } } - private List getElectionNodes(String collection, String shard, SolrZkClient client) throws KeeperException, InterruptedException { - return client.getChildren("/collections/"+collection+"/leader_elect/"+shard+LeaderElector.ELECTION_NODE, null, true); + private List getElectionNodes(String collection, String shard, SolrZkClient client) + throws KeeperException, InterruptedException { + return client.getChildren( + "/collections/" + collection + "/leader_elect/" + shard + LeaderElector.ELECTION_NODE, + null, + true); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java index 35ffa6951ea..6e31054faa7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -33,7 +32,7 @@ @Slow public class LeaderElectionIntegrationTest extends SolrCloudTestCase { - private final static int NUM_REPLICAS_OF_SHARD1 = 5; + private static final int NUM_REPLICAS_OF_SHARD1 = 5; @BeforeClass public static void beforeClass() { @@ -43,26 +42,24 @@ public static void beforeClass() { @Override public void setUp() throws Exception { super.setUp(); - configureCluster(6) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(6).addConfig("conf", configset("cloud-minimal")).configure(); } - private void createCollection(String collection) throws IOException, SolrServerException { - assertEquals(0, CollectionAdminRequest.createCollection(collection, - "conf", 2, 1) - .process(cluster.getSolrClient()).getStatus()); + assertEquals( + 0, + CollectionAdminRequest.createCollection(collection, "conf", 2, 1) + .process(cluster.getSolrClient()) + .getStatus()); for (int i = 1; i < NUM_REPLICAS_OF_SHARD1; i++) { assertTrue( - CollectionAdminRequest.addReplicaToShard(collection, "shard1").process(cluster.getSolrClient()).isSuccess() - ); + CollectionAdminRequest.addReplicaToShard(collection, "shard1") + .process(cluster.getSolrClient()) + .isSuccess()); } } @Test - // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018 - // commented 4-Sep-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018 public void testSimpleSliceLeaderElection() throws Exception { String collection = "collection1"; createCollection(collection); @@ -74,8 +71,16 @@ public void testSimpleSliceLeaderElection() throws Exception { String leader = getLeader(collection); JettySolrRunner jetty = getRunner(leader); assertNotNull(jetty); - assertEquals("shard1", jetty.getCoreContainer().getCores().iterator().next() - .getCoreDescriptor().getCloudDescriptor().getShardId()); + assertEquals( + "shard1", + jetty + .getCoreContainer() + .getCores() + .iterator() + .next() + .getCoreDescriptor() + .getCloudDescriptor() + .getShardId()); jetty.stop(); stoppedRunners.add(jetty); } @@ -83,8 +88,8 @@ public void testSimpleSliceLeaderElection() throws Exception { for (JettySolrRunner runner : stoppedRunners) { runner.start(); } - waitForState("Expected to see nodes come back " + collection, collection, - (n, c) -> n.size() == 6); + waitForState( + "Expected to see nodes come back " + collection, collection, (n, c) -> n.size() == 6); CollectionAdminRequest.deleteCollection(collection).process(cluster.getSolrClient()); // testLeaderElectionAfterClientTimeout @@ -133,15 +138,17 @@ public void testSimpleSliceLeaderElection() throws Exception { } private JettySolrRunner getRunner(String nodeName) { - for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()){ - if (!jettySolrRunner.isStopped() && nodeName.equals(jettySolrRunner.getNodeName())) return jettySolrRunner; + for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { + if (!jettySolrRunner.isStopped() && nodeName.equals(jettySolrRunner.getNodeName())) + return jettySolrRunner; } return null; } private String getLeader(String collection) throws InterruptedException { - ZkNodeProps props = cluster.getSolrClient().getZkStateReader().getLeaderRetry(collection, "shard1", 30000); + ZkNodeProps props = + cluster.getSolrClient().getZkStateReader().getLeaderRetry(collection, "shard1", 30000); String leader = props.getStr(ZkStateReader.NODE_NAME_PROP); return leader; diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java index d542edf15a7..1ef126083ba 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.common.cloud.ZkStateReader.URL_SCHEME; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Path; @@ -27,16 +29,15 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.OnReconnect; import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.util.Utils; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkNodeProps; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.SolrNamedThreadFactory; +import org.apache.solr.common.util.Utils; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.KeeperException.SessionExpiredException; @@ -46,8 +47,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.cloud.ZkStateReader.URL_SCHEME; - @Slow public class LeaderElectionTest extends SolrTestCaseJ4 { @@ -57,19 +56,15 @@ public class LeaderElectionTest extends SolrTestCaseJ4 { private ZkTestServer server; private SolrZkClient zkClient; private ZkStateReader zkStateReader; - private Map seqToThread; + private Map seqToThread; private volatile boolean stopStress = false; @BeforeClass - public static void beforeClass() { - - } + public static void beforeClass() {} @AfterClass - public static void afterClass() { - - } + public static void afterClass() {} @Override public void setUp() throws Exception { @@ -82,7 +77,7 @@ public void setUp() throws Exception { zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT); zkStateReader = new ZkStateReader(zkClient); - seqToThread = Collections.synchronizedMap(new HashMap()); + seqToThread = Collections.synchronizedMap(new HashMap()); zkClient.makePath("/collections/collection1", true); zkClient.makePath("/collections/collection2", true); } @@ -90,10 +85,15 @@ public void setUp() throws Exception { class TestLeaderElectionContext extends ShardLeaderElectionContextBase { private long runLeaderDelay = 0; - public TestLeaderElectionContext(LeaderElector leaderElector, - String shardId, String collection, String coreNodeName, ZkNodeProps props, - ZkController zkController, long runLeaderDelay) { - super (leaderElector, shardId, collection, coreNodeName, props, zkController); + public TestLeaderElectionContext( + LeaderElector leaderElector, + String shardId, + String collection, + String coreNodeName, + ZkNodeProps props, + ZkController zkController, + long runLeaderDelay) { + super(leaderElector, shardId, collection, coreNodeName, props, zkController); this.runLeaderDelay = runLeaderDelay; } @@ -143,31 +143,40 @@ public ClientThread(String shard, int nodeNumber) throws Exception { this(null, shard, nodeNumber, 0); } - public ClientThread(ElectorSetup es, String shard, int nodeNumber, long runLeaderDelay) throws Exception { + public ClientThread(ElectorSetup es, String shard, int nodeNumber, long runLeaderDelay) + throws Exception { super("Thread-" + shard + nodeNumber); this.shard = shard; this.nodeName = shard + nodeNumber + ":80_solr"; this.runLeaderDelay = runLeaderDelay; - props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, this.nodeName, ZkStateReader.BASE_URL_PROP, Integer.toString(nodeNumber), ZkStateReader.CORE_NAME_PROP, ""); + props = + new ZkNodeProps( + ZkStateReader.NODE_NAME_PROP, + this.nodeName, + ZkStateReader.BASE_URL_PROP, + Integer.toString(nodeNumber), + ZkStateReader.CORE_NAME_PROP, + ""); this.es = es; if (this.es == null) { - this.es = new ElectorSetup(() -> { - try { - setupOnConnect(); - } catch (Throwable t) { - } - }); + this.es = + new ElectorSetup( + () -> { + try { + setupOnConnect(); + } catch (Throwable t) { + } + }); } } - private void setupOnConnect() throws InterruptedException, KeeperException, - IOException { + private void setupOnConnect() throws InterruptedException, KeeperException, IOException { assertNotNull(es); - TestLeaderElectionContext context = new TestLeaderElectionContext( - es.elector, shard, "collection1", nodeName, - props, es.zkController, runLeaderDelay); + TestLeaderElectionContext context = + new TestLeaderElectionContext( + es.elector, shard, "collection1", nodeName, props, es.zkController, runLeaderDelay); es.elector.setup(context); seq = es.elector.joinElection(context, false); electionDone = true; @@ -191,7 +200,6 @@ public void run() { return; } } - } public void close() { @@ -209,11 +217,18 @@ public void testBasic() throws Exception { LeaderElector elector = new LeaderElector(zkClient); ZkController zkController = MockSolrSource.makeSimpleMock(null, null, zkClient); String nodeName = "127.0.0.1:80_solr"; - ZkNodeProps props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, nodeName, ZkStateReader.BASE_URL_PROP, - zkStateReader.getBaseUrlForNodeName(nodeName), ZkStateReader.CORE_NAME_PROP, ""); - - ElectionContext context = new ShardLeaderElectionContextBase(elector, - "shard2", "collection1", "dummynode1", props, zkController); + ZkNodeProps props = + new ZkNodeProps( + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.BASE_URL_PROP, + zkStateReader.getBaseUrlForNodeName(nodeName), + ZkStateReader.CORE_NAME_PROP, + ""); + + ElectionContext context = + new ShardLeaderElectionContextBase( + elector, "shard2", "collection1", "dummynode1", props, zkController); elector.setup(context); elector.joinElection(context, false); String urlScheme = zkStateReader.getClusterProperty(URL_SCHEME, "http"); @@ -225,11 +240,18 @@ public void testCancelElection() throws Exception { LeaderElector first = new LeaderElector(zkClient); String nodeName = "127.0.0.1:80_solr"; - ZkNodeProps props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, nodeName, ZkStateReader.BASE_URL_PROP, - zkStateReader.getBaseUrlForNodeName(nodeName), ZkStateReader.CORE_NAME_PROP, "1"); + ZkNodeProps props = + new ZkNodeProps( + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.BASE_URL_PROP, + zkStateReader.getBaseUrlForNodeName(nodeName), + ZkStateReader.CORE_NAME_PROP, + "1"); ZkController zkController = MockSolrSource.makeSimpleMock(null, null, zkClient); - ElectionContext firstContext = new ShardLeaderElectionContextBase(first, - "slice1", "collection2", "dummynode1", props, zkController); + ElectionContext firstContext = + new ShardLeaderElectionContextBase( + first, "slice1", "collection2", "dummynode1", props, zkController); first.setup(firstContext); first.joinElection(firstContext, false); @@ -242,15 +264,23 @@ public void testCancelElection() throws Exception { assertEquals("original leader was not registered", url1, getLeaderUrl("collection2", "slice1")); LeaderElector second = new LeaderElector(zkClient); - props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, nodeName, ZkStateReader.BASE_URL_PROP, - zkStateReader.getBaseUrlForNodeName(nodeName), ZkStateReader.CORE_NAME_PROP, "2"); + props = + new ZkNodeProps( + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.BASE_URL_PROP, + zkStateReader.getBaseUrlForNodeName(nodeName), + ZkStateReader.CORE_NAME_PROP, + "2"); zkController = MockSolrSource.makeSimpleMock(null, null, zkClient); - ElectionContext context = new ShardLeaderElectionContextBase(second, - "slice1", "collection2", "dummynode2", props, zkController); + ElectionContext context = + new ShardLeaderElectionContextBase( + second, "slice1", "collection2", "dummynode2", props, zkController); second.setup(context); second.joinElection(context, false); Thread.sleep(1000); - assertEquals("original leader should have stayed leader", url1, getLeaderUrl("collection2", "slice1")); + assertEquals( + "original leader should have stayed leader", url1, getLeaderUrl("collection2", "slice1")); firstContext.cancelElection(); Thread.sleep(1000); assertEquals("new leader was not registered", url2, getLeaderUrl("collection2", "slice1")); @@ -261,11 +291,10 @@ private String getLeaderUrl(final String collection, final String slice) int iterCount = 60; while (iterCount-- > 0) { try { - byte[] data = zkClient.getData( - ZkStateReader.getShardLeadersPath(collection, slice), null, null, - true); - ZkCoreNodeProps leaderProps = new ZkCoreNodeProps( - ZkNodeProps.load(data)); + byte[] data = + zkClient.getData( + ZkStateReader.getShardLeadersPath(collection, slice), null, null, true); + ZkCoreNodeProps leaderProps = new ZkCoreNodeProps(ZkNodeProps.load(data)); return leaderProps.getCoreUrl(); } catch (NoNodeException | SessionExpiredException e) { Thread.sleep(500); @@ -275,7 +304,7 @@ private String getLeaderUrl(final String collection, final String slice) throw new RuntimeException("Could not get leader props for " + collection + " " + slice); } - private static void startAndJoinElection (List threads) throws InterruptedException { + private static void startAndJoinElection(List threads) throws InterruptedException { for (Thread thread : threads) { thread.start(); } @@ -351,14 +380,12 @@ public void testElection() throws Exception { for (ClientThread thread : threads) { thread.close(); thread.interrupt(); - } for (Thread thread : threads) { thread.join(); } } - } @Test @@ -383,7 +410,12 @@ public void testParallelElection() throws Exception { List replica2s = new ArrayList<>(); ElectorSetup es2 = new ElectorSetup(null); for (int i = 1; i <= numShards; i++) { - ClientThread thread = new ClientThread(es2, "parshard" + i, 2, 40000 / (numShards - 1) /* delay enough to timeout or expire */); + ClientThread thread = + new ClientThread( + es2, + "parshard" + i, + 2, + 40000 / (numShards - 1) /* delay enough to timeout or expire */); threads.add(thread); replica2s.add(thread); } @@ -393,7 +425,7 @@ public void testParallelElection() throws Exception { // disconnect the leaders es1.close(); - for (int i = 1; i <= numShards; i ++) { + for (int i = 1; i <= numShards; i++) { // if this test fails, getLeaderUrl will more likely throw an exception and fail the test, // but add an assertEquals as well for good measure String leaderUrl = getLeaderUrl("collection1", "parshard" + i); @@ -441,10 +473,9 @@ private int getLeaderThread() throws KeeperException, InterruptedException { @Test public void testStressElection() throws Exception { - final ScheduledExecutorService scheduler = Executors - .newScheduledThreadPool(15, new SolrNamedThreadFactory("stressElection")); - final List threads = Collections - .synchronizedList(new ArrayList()); + final ScheduledExecutorService scheduler = + Executors.newScheduledThreadPool(15, new SolrNamedThreadFactory("stressElection")); + final List threads = Collections.synchronizedList(new ArrayList()); // start with a leader ClientThread thread1 = null; @@ -452,79 +483,80 @@ public void testStressElection() throws Exception { threads.add(thread1); scheduler.schedule(thread1, 0, TimeUnit.MILLISECONDS); - - - Thread scheduleThread = new Thread() { - @Override - public void run() { - int count = atLeast(5); - for (int i = 1; i < count; i++) { - int launchIn = random().nextInt(500); - ClientThread thread = null; - try { - thread = new ClientThread("shard1", i); - } catch (Exception e) { - // - } - if (thread != null) { - threads.add(thread); - scheduler.schedule(thread, launchIn, TimeUnit.MILLISECONDS); - } - } - } - }; - - Thread killThread = new Thread() { - @Override - public void run() { - - while (!stopStress) { - try { - int j; - try { - // always 1 we won't kill... - j = random().nextInt(threads.size() - 2); - } catch(IllegalArgumentException e) { - continue; - } - try { - threads.get(j).close(); - } catch (Exception e) { + Thread scheduleThread = + new Thread() { + @Override + public void run() { + int count = atLeast(5); + for (int i = 1; i < count; i++) { + int launchIn = random().nextInt(500); + ClientThread thread = null; + try { + thread = new ClientThread("shard1", i); + } catch (Exception e) { + // + } + if (thread != null) { + threads.add(thread); + scheduler.schedule(thread, launchIn, TimeUnit.MILLISECONDS); + } } - - Thread.sleep(10); - } catch (Exception e) { } - } - } - }; - - Thread connLossThread = new Thread() { - @Override - public void run() { - - while (!stopStress) { - try { - Thread.sleep(50); - int j; - j = random().nextInt(threads.size()); - try { - threads.get(j).es.zkClient.getSolrZooKeeper().closeCnxn(); - if (random().nextBoolean()) { - long sessionId = zkClient.getSolrZooKeeper().getSessionId(); - server.expire(sessionId); + }; + + Thread killThread = + new Thread() { + @Override + public void run() { + + while (!stopStress) { + try { + int j; + try { + // always 1 we won't kill... + j = random().nextInt(threads.size() - 2); + } catch (IllegalArgumentException e) { + continue; + } + try { + threads.get(j).close(); + } catch (Exception e) { + } + + Thread.sleep(10); + } catch (Exception e) { } - } catch (Exception e) { - e.printStackTrace(); } - Thread.sleep(500); - - } catch (Exception e) { + } + }; + + Thread connLossThread = + new Thread() { + @Override + public void run() { + + while (!stopStress) { + try { + Thread.sleep(50); + int j; + j = random().nextInt(threads.size()); + try { + threads.get(j).es.zkClient.getSolrZooKeeper().closeCnxn(); + if (random().nextBoolean()) { + long sessionId = zkClient.getSolrZooKeeper().getSessionId(); + server.expire(sessionId); + } + } catch (Exception e) { + e.printStackTrace(); + } + Thread.sleep(500); + + } catch (Exception e) { + } + } } - } - } - }; + }; scheduleThread.start(); connLossThread.start(); @@ -557,8 +589,6 @@ public void run() { for (Thread thread : threads) { thread.join(); } - - } @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java index fc340e05f33..a09fca25a95 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java @@ -16,6 +16,12 @@ */ package org.apache.solr.cloud; +import java.lang.invoke.MethodHandles; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.cloud.SocketProxy; @@ -27,16 +33,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.TimeUnit; - /** - * Tests leader-initiated recovery scenarios after a leader node fails - * and one of the replicas is out-of-sync. + * Tests leader-initiated recovery scenarios after a leader node fails and one of the replicas is + * out-of-sync. */ @Slow @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") @@ -48,9 +47,7 @@ public LeaderFailoverAfterPartitionTest() { super(); } - @Test - //28-June-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") public void test() throws Exception { waitForThingsToLevelOut(30, TimeUnit.SECONDS); @@ -65,61 +62,70 @@ protected void testRf3WithLeaderFailover() throws Exception { String testCollectionName = "c8n_1x3_lf"; // _lf is leader fails createCollection(testCollectionName, "conf1", 1, 3); cloudClient.setDefaultCollection(testCollectionName); - + sendDoc(1); - - List notLeaders = + + List notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); - assertTrue("Expected 2 replicas for collection " + testCollectionName - + " but found " + notLeaders.size() + "; clusterState: " - + printClusterStateInfo(testCollectionName), + assertTrue( + "Expected 2 replicas for collection " + + testCollectionName + + " but found " + + notLeaders.size() + + "; clusterState: " + + printClusterStateInfo(testCollectionName), notLeaders.size() == 2); - + // ok, now introduce a network partition between the leader and the replica SocketProxy proxy0 = null; proxy0 = getProxyForReplica(notLeaders.get(0)); - + proxy0.close(); - + // indexing during a partition sendDoc(2); - + Thread.sleep(sleepMsBeforeHealPartition); - + proxy0.reopen(); - + SocketProxy proxy1 = getProxyForReplica(notLeaders.get(1)); - + proxy1.close(); - + sendDoc(3); - + Thread.sleep(sleepMsBeforeHealPartition); proxy1.reopen(); - + // sent 4 docs in so far, verify they are on the leader and replica - notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); - + notLeaders = + ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); + sendDoc(4); - - assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 4); - - Replica leader = - cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); + + assertDocsExistInAllReplicas(notLeaders, testCollectionName, 1, 4); + + Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1"); String leaderNode = leader.getNodeName(); - assertNotNull("Could not find leader for shard1 of "+ - testCollectionName+"; clusterState: "+printClusterStateInfo(testCollectionName), leader); + assertNotNull( + "Could not find leader for shard1 of " + + testCollectionName + + "; clusterState: " + + printClusterStateInfo(testCollectionName), + leader); JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(leader)); - + // since maxShardsPerNode is 1, we're safe to kill the leader - notLeaders = ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); + notLeaders = + ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive); proxy0 = getProxyForReplica(notLeaders.get(0)); proxy0.close(); - + // indexing during a partition // doc should be on leader and 1 replica sendDoc(5); - + try (HttpSolrClient server = getHttpSolrClient(leader, testCollectionName)) { assertDocExists(server, testCollectionName, "5"); } @@ -127,39 +133,42 @@ protected void testRf3WithLeaderFailover() throws Exception { try (HttpSolrClient server = getHttpSolrClient(notLeaders.get(1), testCollectionName)) { assertDocExists(server, testCollectionName, "5"); } - + Thread.sleep(sleepMsBeforeHealPartition); - + String shouldNotBeNewLeaderNode = notLeaders.get(0).getNodeName(); - //chaosMonkey.expireSession(leaderJetty); + // chaosMonkey.expireSession(leaderJetty); // kill the leader leaderJetty.stop(); - if (leaderJetty.isRunning()) - fail("Failed to stop the leader on "+leaderNode); - + if (leaderJetty.isRunning()) fail("Failed to stop the leader on " + leaderNode); + SocketProxy oldLeaderProxy = getProxyForReplica(leader); if (oldLeaderProxy != null) { - oldLeaderProxy.close(); + oldLeaderProxy.close(); } else { - log.warn("No SocketProxy found for old leader node {}",leaderNode); + log.warn("No SocketProxy found for old leader node {}", leaderNode); } Thread.sleep(10000); // give chance for new leader to be elected. - - Replica newLeader = + + Replica newLeader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1", 60000); - - assertNotNull("No new leader was elected after 60 seconds; clusterState: "+ - printClusterStateInfo(testCollectionName),newLeader); - - assertTrue("Expected node "+shouldNotBeNewLeaderNode+ - " to NOT be the new leader b/c it was out-of-sync with the old leader! ClusterState: "+ - printClusterStateInfo(testCollectionName), + + assertNotNull( + "No new leader was elected after 60 seconds; clusterState: " + + printClusterStateInfo(testCollectionName), + newLeader); + + assertTrue( + "Expected node " + + shouldNotBeNewLeaderNode + + " to NOT be the new leader b/c it was out-of-sync with the old leader! ClusterState: " + + printClusterStateInfo(testCollectionName), !shouldNotBeNewLeaderNode.equals(newLeader.getNodeName())); - + proxy0.reopen(); - + long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(90, TimeUnit.SECONDS); while (System.nanoTime() < timeout) { List activeReps = getActiveOrRecoveringReplicas(testCollectionName, "shard1"); @@ -167,10 +176,15 @@ protected void testRf3WithLeaderFailover() throws Exception { Thread.sleep(1000); } - List participatingReplicas = getActiveOrRecoveringReplicas(testCollectionName, "shard1"); - assertTrue("Expected 2 of 3 replicas to be active but only found "+ - participatingReplicas.size()+"; "+participatingReplicas+"; clusterState: "+ - printClusterStateInfo(testCollectionName), + List participatingReplicas = + getActiveOrRecoveringReplicas(testCollectionName, "shard1"); + assertTrue( + "Expected 2 of 3 replicas to be active but only found " + + participatingReplicas.size() + + "; " + + participatingReplicas + + "; clusterState: " + + printClusterStateInfo(testCollectionName), participatingReplicas.size() >= 2); SolrInputDocument doc = new SolrInputDocument(); @@ -179,8 +193,7 @@ protected void testRf3WithLeaderFailover() throws Exception { sendDocsWithRetry(Collections.singletonList(doc), 1, 3, 1); Set replicasToCheck = new HashSet<>(); - for (Replica stillUp : participatingReplicas) - replicasToCheck.add(stillUp.getName()); + for (Replica stillUp : participatingReplicas) replicasToCheck.add(stillUp.getName()); waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 90); assertDocsExistInAllReplicas(participatingReplicas, testCollectionName, 1, 6); diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java index 8cb40ddfbbf..71726706229 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java @@ -17,6 +17,8 @@ package org.apache.solr.cloud; +import static java.util.Collections.singletonList; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Files; @@ -28,7 +30,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.lucene.util.LuceneTestCase.Slow; @@ -48,13 +49,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Collections.singletonList; - /** - * * Test for SOLR-9446 * - * This test is modeled after SyncSliceTest + *

This test is modeled after SyncSliceTest */ @Slow public class LeaderFailureAfterFreshStartTest extends AbstractFullDistribZkTestBase { @@ -105,15 +103,21 @@ public void test() throws Exception { try { CloudJettyRunner initialLeaderJetty = shardToLeaderJetty.get("shard1"); List otherJetties = getOtherAvailableJetties(initialLeaderJetty); - - log.info("Leader node_name: {}, url: {}", initialLeaderJetty.coreNodeName, initialLeaderJetty.url); + + log.info( + "Leader node_name: {}, url: {}", + initialLeaderJetty.coreNodeName, + initialLeaderJetty.url); for (CloudJettyRunner cloudJettyRunner : otherJetties) { - log.info("Nonleader node_name: {}, url: {}", cloudJettyRunner.coreNodeName, cloudJettyRunner.url); + log.info( + "Nonleader node_name: {}, url: {}", + cloudJettyRunner.coreNodeName, + cloudJettyRunner.url); } - + CloudJettyRunner secondNode = otherJetties.get(0); CloudJettyRunner freshNode = otherJetties.get(1); - + // shutdown a node to simulate fresh start otherJetties.remove(freshNode); forceNodeFailures(singletonList(freshNode)); @@ -125,47 +129,54 @@ public void test() throws Exception { // index a few docs and commit for (int i = 0; i < 100; i++) { - indexDoc(id, docId, i1, 50, tlong, 50, t1, - "document number " + docId++); + indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId++); } commit(); waitForThingsToLevelOut(30, TimeUnit.SECONDS); checkShardConsistency(false, true); - - // bring down the other node and index a few docs; so the leader and other node segments diverge + + // bring down the other node and index a few docs; so the leader and other node segments + // diverge forceNodeFailures(singletonList(secondNode)); for (int i = 0; i < 10; i++) { - indexDoc(id, docId, i1, 50, tlong, 50, t1, - "document number " + docId++); - if(i % 2 == 0) { + indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId++); + if (i % 2 == 0) { commit(); } } commit(); restartNodes(singletonList(secondNode)); - // start the freshNode + // start the freshNode restartNodes(singletonList(freshNode)); String coreName = freshNode.jetty.getCoreContainer().getCores().iterator().next().getName(); - String replicationProperties = freshNode.jetty.getSolrHome() + "/cores/" + coreName + "/data/replication.properties"; + String replicationProperties = + freshNode.jetty.getSolrHome() + "/cores/" + coreName + "/data/replication.properties"; String md5 = DigestUtils.md5Hex(Files.readAllBytes(Paths.get(replicationProperties))); - + // shutdown the original leader log.info("Now shutting down initial leader"); forceNodeFailures(singletonList(initialLeaderJetty)); - waitForNewLeader(cloudClient, "shard1", (Replica)initialLeaderJetty.client.info , new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + waitForNewLeader( + cloudClient, + "shard1", + (Replica) initialLeaderJetty.client.info, + new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME)); waitTillNodesActive(); log.info("Updating mappings from zk"); updateMappingsFromZk(jettys, clients, true); - assertEquals("Node went into replication", md5, DigestUtils.md5Hex(Files.readAllBytes(Paths.get(replicationProperties)))); - + assertEquals( + "Node went into replication", + md5, + DigestUtils.md5Hex(Files.readAllBytes(Paths.get(replicationProperties)))); + success = true; } finally { System.clearProperty("solr.disableFingerprint"); } } - + private void restartNodes(List nodesToRestart) throws Exception { for (CloudJettyRunner node : nodesToRestart) { node.jetty.start(); @@ -175,7 +186,6 @@ private void restartNodes(List nodesToRestart) throws Exceptio checkShardConsistency(false, true); } - private void forceNodeFailures(List replicasToShutDown) throws Exception { for (CloudJettyRunner replicaToShutDown : replicasToShutDown) { replicaToShutDown.jetty.stop(); @@ -199,8 +209,6 @@ private void forceNodeFailures(List replicasToShutDown) throws nodesDown.addAll(replicasToShutDown); } - - private void waitTillNodesActive() throws Exception { for (int i = 0; i < 60; i++) { Thread.sleep(3000); @@ -211,17 +219,18 @@ private void waitTillNodesActive() throws Exception { Collection replicas = slice.getReplicas(); boolean allActive = true; - Collection nodesDownNames = nodesDown.stream() - .map(n -> n.coreNodeName) - .collect(Collectors.toList()); - + Collection nodesDownNames = + nodesDown.stream().map(n -> n.coreNodeName).collect(Collectors.toList()); + Collection replicasToCheck = null; - replicasToCheck = replicas.stream() - .filter(r -> !nodesDownNames.contains(r.getName())) - .collect(Collectors.toList()); + replicasToCheck = + replicas.stream() + .filter(r -> !nodesDownNames.contains(r.getName())) + .collect(Collectors.toList()); for (Replica replica : replicasToCheck) { - if (!clusterState.liveNodesContain(replica.getNodeName()) || replica.getState() != Replica.State.ACTIVE) { + if (!clusterState.liveNodesContain(replica.getNodeName()) + || replica.getState() != Replica.State.ACTIVE) { allActive = false; break; } @@ -234,7 +243,6 @@ private void waitTillNodesActive() throws Exception { fail("timeout waiting to see all nodes active"); } - private List getOtherAvailableJetties(CloudJettyRunner leader) { List candidates = new ArrayList<>(); candidates.addAll(shardToJetty.get("shard1")); @@ -248,8 +256,7 @@ private List getOtherAvailableJetties(CloudJettyRunner leader) return candidates; } - protected void indexDoc(Object... fields) throws IOException, - SolrServerException { + protected void indexDoc(Object... fields) throws IOException, SolrServerException { SolrInputDocument doc = new SolrInputDocument(); addFields(doc, fields); @@ -270,5 +277,4 @@ protected void indexr(Object... fields) throws Exception { addFields(doc, "rnd_b", true); indexDoc(doc); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java index f15ad47326f..249f4e2eaeb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderTragicEventTest.java @@ -17,6 +17,11 @@ package org.apache.solr.cloud; +import static org.hamcrest.CoreMatchers.anyOf; +import static org.hamcrest.CoreMatchers.is; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; import org.apache.lucene.store.AlreadyClosedException; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; @@ -42,12 +47,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; - -import static org.hamcrest.CoreMatchers.anyOf; -import static org.hamcrest.CoreMatchers.is; - public class LeaderTragicEventTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -56,7 +55,8 @@ public class LeaderTragicEventTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(2) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } @@ -75,36 +75,47 @@ public void tearDown() throws Exception { @Test public void testLeaderFailsOver() throws Exception { - CollectionAdminRequest - .createCollection(collection, "config", 1, 2) + CollectionAdminRequest.createCollection(collection, "config", 1, 2) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collection, 1, 2); - UpdateResponse updateResponse = new UpdateRequest().add("id", "1").commit(cluster.getSolrClient(), null); + UpdateResponse updateResponse = + new UpdateRequest().add("id", "1").commit(cluster.getSolrClient(), null); assertEquals(0, updateResponse.getStatus()); Replica oldLeader = corruptLeader(collection); - waitForState("Now waiting for new replica to become leader", collection, (liveNodes, collectionState) -> { - Slice slice = collectionState.getSlice("shard1"); + waitForState( + "Now waiting for new replica to become leader", + collection, + (liveNodes, collectionState) -> { + Slice slice = collectionState.getSlice("shard1"); - if (slice.getReplicas().size() != 2) return false; - if (slice.getLeader() == null) return false; - if (slice.getLeader().getName().equals(oldLeader.getName())) return false; + if (slice.getReplicas().size() != 2) return false; + if (slice.getLeader() == null) return false; + if (slice.getLeader().getName().equals(oldLeader.getName())) return false; - return true; - }); - ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), collection, 120000); + return true; + }); + ClusterStateUtil.waitForAllActiveAndLiveReplicas( + cluster.getSolrClient().getZkStateReader(), collection, 120000); Slice shard = getCollectionState(collection).getSlice("shard1"); - assertNotEquals("Old leader should not be leader again", oldLeader.getNodeName(), shard.getLeader().getNodeName()); - assertEquals("Old leader should be a follower", oldLeader.getNodeName(), getNonLeader(shard).getNodeName()); + assertNotEquals( + "Old leader should not be leader again", + oldLeader.getNodeName(), + shard.getLeader().getNodeName()); + assertEquals( + "Old leader should be a follower", + oldLeader.getNodeName(), + getNonLeader(shard).getNodeName()); // Check that we can continue indexing after this updateResponse = new UpdateRequest().add("id", "2").commit(cluster.getSolrClient(), null); assertEquals(0, updateResponse.getStatus()); try (SolrClient followerClient = new HttpSolrClient.Builder(oldLeader.getCoreUrl()).build()) { QueryResponse queryResponse = new QueryRequest(new SolrQuery("*:*")).process(followerClient); - assertEquals(queryResponse.getResults().toString(), 2, queryResponse.getResults().getNumFound()); + assertEquals( + queryResponse.getResults().toString(), 2, queryResponse.getResults().getNumFound()); } } @@ -116,7 +127,8 @@ private Replica corruptLeader(String collection) throws IOException, SolrServerE Replica oldLeader = dc.getLeader("shard1"); log.info("Will crash leader : {}", oldLeader); - try (HttpSolrClient solrClient = new HttpSolrClient.Builder(dc.getLeader("shard1").getCoreUrl()).build()) { + try (HttpSolrClient solrClient = + new HttpSolrClient.Builder(dc.getLeader("shard1").getCoreUrl()).build()) { new UpdateRequest().add("id", "99").commit(solrClient, null); fail("Should have injected tragedy"); } catch (RemoteSolrException e) { @@ -142,33 +154,37 @@ private Replica getNonLeader(Slice slice) { public void testOtherReplicasAreNotActive() throws Exception { int numReplicas = random().nextInt(2) + 1; // won't do anything if leader is the only one active replica in the shard - CollectionAdminRequest - .createCollection(collection, "config", 1, numReplicas) + CollectionAdminRequest.createCollection(collection, "config", 1, numReplicas) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collection, 1, numReplicas); - JettySolrRunner otherReplicaJetty = null; - if (numReplicas == 2) { - Slice shard = getCollectionState(collection).getSlice("shard1"); - otherReplicaJetty = cluster.getReplicaJetty(getNonLeader(shard)); - if (log.isInfoEnabled()) { - log.info("Stop jetty node : {} state:{}", otherReplicaJetty.getBaseUrl(), getCollectionState(collection)); - } - otherReplicaJetty.stop(); - cluster.waitForJettyToStop(otherReplicaJetty); - waitForState("Timeout waiting for replica get down", collection, (liveNodes, collectionState) -> getNonLeader(collectionState.getSlice("shard1")).getState() != Replica.State.ACTIVE); + JettySolrRunner otherReplicaJetty = null; + if (numReplicas == 2) { + Slice shard = getCollectionState(collection).getSlice("shard1"); + otherReplicaJetty = cluster.getReplicaJetty(getNonLeader(shard)); + if (log.isInfoEnabled()) { + log.info( + "Stop jetty node : {} state:{}", + otherReplicaJetty.getBaseUrl(), + getCollectionState(collection)); } + otherReplicaJetty.stop(); + cluster.waitForJettyToStop(otherReplicaJetty); + waitForState( + "Timeout waiting for replica get down", + collection, + (liveNodes, collectionState) -> + getNonLeader(collectionState.getSlice("shard1")).getState() != Replica.State.ACTIVE); + } - Replica oldLeader = corruptLeader(collection); + Replica oldLeader = corruptLeader(collection); - if (otherReplicaJetty != null) { - otherReplicaJetty.start(); - cluster.waitForNode(otherReplicaJetty, 30); - } + if (otherReplicaJetty != null) { + otherReplicaJetty.start(); + cluster.waitForNode(otherReplicaJetty, 30); + } - Replica leader = getCollectionState(collection).getSlice("shard1").getLeader(); - assertEquals(leader.getName(), oldLeader.getName()); + Replica leader = getCollectionState(collection).getSlice("shard1").getLeader(); + assertEquals(leader.getName(), oldLeader.getName()); } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java index 3c3f48faff8..95cd25e920a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java @@ -17,6 +17,9 @@ package org.apache.solr.cloud; +import static org.apache.solr.common.cloud.ZkStateReader.HTTP; +import static org.apache.solr.common.cloud.ZkStateReader.URL_SCHEME; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.net.URI; @@ -26,7 +29,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.solr.JSONTestUtil; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.cloud.SocketProxy; @@ -47,9 +49,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.cloud.ZkStateReader.HTTP; -import static org.apache.solr.common.cloud.ZkStateReader.URL_SCHEME; - public class LeaderVoteWaitTimeoutTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -94,7 +93,7 @@ public void setupTest() throws Exception { for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { SocketProxy proxy = new SocketProxy(); jetty.setProxyPort(proxy.getListenPort()); - cluster.stopJettySolrRunner(jetty);// TODO: Can we avoid this restart + cluster.stopJettySolrRunner(jetty); // TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); if (log.isInfoEnabled()) { @@ -104,7 +103,7 @@ public void setupTest() throws Exception { jettys.put(proxy.getUrl(), jetty); } } - + @After public void tearDown() throws Exception { if (null != proxies) { @@ -127,39 +126,52 @@ public void basicTest() throws Exception { cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2")); cluster.getSolrClient().commit(collectionName); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { assertEquals(1, zkShardTerms.getTerms().size()); assertEquals(1L, zkShardTerms.getHighestTerm()); } - String nodeName = cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getNodeName(); - + String nodeName = + cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getNodeName(); + JettySolrRunner j = cluster.getJettySolrRunner(0); j.stop(); cluster.waitForJettyToStop(j); - - cluster.getSolrClient().getZkStateReader().waitForState(collectionName, 10, TimeUnit.SECONDS, (liveNodes, collectionState) -> !liveNodes.contains(nodeName)); + + cluster + .getSolrClient() + .getZkStateReader() + .waitForState( + collectionName, + 10, + TimeUnit.SECONDS, + (liveNodes, collectionState) -> !liveNodes.contains(nodeName)); CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") .setNode(cluster.getJettySolrRunner(1).getNodeName()) .process(cluster.getSolrClient()); - waitForState("Timeout waiting for replica win the election", collectionName, (liveNodes, collectionState) -> { - Replica newLeader = collectionState.getSlice("shard1").getLeader(); - if (newLeader == null) { - return false; - } - return newLeader.getNodeName().equals(cluster.getJettySolrRunner(1).getNodeName()); - }); - - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { + waitForState( + "Timeout waiting for replica win the election", + collectionName, + (liveNodes, collectionState) -> { + Replica newLeader = collectionState.getSlice("shard1").getLeader(); + if (newLeader == null) { + return false; + } + return newLeader.getNodeName().equals(cluster.getJettySolrRunner(1).getNodeName()); + }); + + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { Replica newLeader = getCollectionState(collectionName).getSlice("shard1").getLeader(); assertEquals(2, zkShardTerms.getTerms().size()); assertEquals(1L, zkShardTerms.getTerm(newLeader.getName())); } cluster.getJettySolrRunner(0).start(); - + cluster.waitForAllNodes(30); CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); } @@ -173,9 +185,9 @@ public void testMostInSyncReplicasCanWinElection() throws Exception { CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") .setNode(cluster.getJettySolrRunner(0).getNodeName()) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 1, 1); - + waitForState("Timeout waiting for shard leader", collectionName, clusterShape(1, 1)); Replica leader = getCollectionState(collectionName).getSlice("shard1").getLeader(); @@ -183,22 +195,32 @@ public void testMostInSyncReplicasCanWinElection() throws Exception { CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") .setNode(cluster.getJettySolrRunner(1).getNodeName()) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 1, 2); - + waitForState("Timeout waiting for 1x2 collection", collectionName, clusterShape(1, 2)); - Replica replica1 = getCollectionState(collectionName).getSlice("shard1") - .getReplicas(replica -> replica.getNodeName().equals(cluster.getJettySolrRunner(1).getNodeName())).get(0); + Replica replica1 = + getCollectionState(collectionName) + .getSlice("shard1") + .getReplicas( + replica -> + replica.getNodeName().equals(cluster.getJettySolrRunner(1).getNodeName())) + .get(0); CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") .setNode(cluster.getJettySolrRunner(2).getNodeName()) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 1, 3); - + waitForState("Timeout waiting for 1x3 collection", collectionName, clusterShape(1, 3)); - Replica replica2 = getCollectionState(collectionName).getSlice("shard1") - .getReplicas(replica -> replica.getNodeName().equals(cluster.getJettySolrRunner(2).getNodeName())).get(0); + Replica replica2 = + getCollectionState(collectionName) + .getSlice("shard1") + .getReplicas( + replica -> + replica.getNodeName().equals(cluster.getJettySolrRunner(2).getNodeName())) + .get(0); cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "1")); cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2")); @@ -211,7 +233,8 @@ public void testMostInSyncReplicasCanWinElection() throws Exception { addDoc(collectionName, 3, cluster.getJettySolrRunner(0)); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { assertEquals(3, zkShardTerms.getTerms().size()); assertEquals(zkShardTerms.getHighestTerm(), zkShardTerms.getTerm(leader.getName())); assertEquals(zkShardTerms.getHighestTerm(), zkShardTerms.getTerm(replica2.getName())); @@ -221,34 +244,40 @@ public void testMostInSyncReplicasCanWinElection() throws Exception { proxies.get(cluster.getJettySolrRunner(2)).close(); addDoc(collectionName, 4, cluster.getJettySolrRunner(0)); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collectionName, "shard1", cluster.getZkClient())) { assertEquals(3, zkShardTerms.getTerms().size()); assertEquals(zkShardTerms.getHighestTerm(), zkShardTerms.getTerm(leader.getName())); assertTrue(zkShardTerms.getHighestTerm() > zkShardTerms.getTerm(replica2.getName())); assertTrue(zkShardTerms.getHighestTerm() > zkShardTerms.getTerm(replica1.getName())); - assertTrue(zkShardTerms.getTerm(replica2.getName()) > zkShardTerms.getTerm(replica1.getName())); + assertTrue( + zkShardTerms.getTerm(replica2.getName()) > zkShardTerms.getTerm(replica1.getName())); } proxies.get(cluster.getJettySolrRunner(1)).reopen(); proxies.get(cluster.getJettySolrRunner(2)).reopen(); - - + JettySolrRunner j = cluster.getJettySolrRunner(0); j.stop(); cluster.waitForJettyToStop(j); try { // even replica2 joined election at the end of the queue, but it is the one with highest term - waitForState("Timeout waiting for new leader", collectionName, (liveNodes, collectionState) -> { - Replica newLeader = collectionState.getSlice("shard1").getLeader(); - if (newLeader == null) { - return false; - } - return newLeader.getName().equals(replica2.getName()); - }); + waitForState( + "Timeout waiting for new leader", + collectionName, + (liveNodes, collectionState) -> { + Replica newLeader = collectionState.getSlice("shard1").getLeader(); + if (newLeader == null) { + return false; + } + return newLeader.getName().equals(replica2.getName()); + }); } catch (Exception e) { - List children = zkClient().getChildren("/collections/"+collectionName+"/leader_elect/shard1/election", - null, true); + List children = + zkClient() + .getChildren( + "/collections/" + collectionName + "/leader_elect/shard1/election", null, true); log.info("{} election nodes:{}", collectionName, children); throw e; } @@ -260,21 +289,25 @@ public void testMostInSyncReplicasCanWinElection() throws Exception { CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); } - - private void addDoc(String collection, int docId, JettySolrRunner solrRunner) throws IOException, SolrServerException { - try (HttpSolrClient solrClient = new HttpSolrClient.Builder(solrRunner.getBaseUrl().toString()).build()) { + private void addDoc(String collection, int docId, JettySolrRunner solrRunner) + throws IOException, SolrServerException { + try (HttpSolrClient solrClient = + new HttpSolrClient.Builder(solrRunner.getBaseUrl().toString()).build()) { solrClient.add(collection, new SolrInputDocument("id", String.valueOf(docId))); solrClient.commit(collection); } } - private void assertDocsExistInAllReplicas(List notLeaders, - String testCollectionName, int firstDocId, int lastDocId) throws Exception { + private void assertDocsExistInAllReplicas( + List notLeaders, String testCollectionName, int firstDocId, int lastDocId) + throws Exception { Replica leader = - cluster.getSolrClient().getZkStateReader().getLeaderRetry(testCollectionName, "shard1", 10000); + cluster + .getSolrClient() + .getZkStateReader() + .getLeaderRetry(testCollectionName, "shard1", 10000); HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName); - List replicas = - new ArrayList(notLeaders.size()); + List replicas = new ArrayList(notLeaders.size()); for (Replica r : notLeaders) { replicas.add(getHttpSolrClient(r, testCollectionName)); @@ -300,11 +333,20 @@ private void assertDocsExistInAllReplicas(List notLeaders, private void assertDocExists(HttpSolrClient solr, String coll, String docId) throws Exception { NamedList rsp = realTimeGetDocId(solr, docId); String match = JSONTestUtil.matchObj("/id", rsp.get("doc"), docId); - assertTrue("Doc with id=" + docId + " not found in " + solr.getBaseURL() - + " due to: " + match + "; rsp="+rsp, match == null); + assertTrue( + "Doc with id=" + + docId + + " not found in " + + solr.getBaseURL() + + " due to: " + + match + + "; rsp=" + + rsp, + match == null); } - private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) throws SolrServerException, IOException { + private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) + throws SolrServerException, IOException { QueryRequest qr = new QueryRequest(params("qt", "/get", "id", docId, "distrib", "false")); return solr.request(qr); } @@ -314,6 +356,4 @@ protected HttpSolrClient getHttpSolrClient(Replica replica, String coll) throws String url = zkProps.getBaseUrl() + "/" + coll; return getHttpSolrClient(url); } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java index edd23b50e34..2a451a84ea4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java @@ -20,7 +20,6 @@ import java.lang.invoke.MethodHandles; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; @@ -47,14 +46,14 @@ public class MigrateRouteKeyTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); } private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private boolean waitForRuleToExpire(String collection, String shard, String splitKey, long finishTime) throws KeeperException, InterruptedException, SolrServerException, IOException { + private boolean waitForRuleToExpire( + String collection, String shard, String splitKey, long finishTime) + throws KeeperException, InterruptedException, SolrServerException, IOException { DocCollection state; Slice slice; boolean ruleRemoved = false; @@ -63,7 +62,7 @@ private boolean waitForRuleToExpire(String collection, String shard, String spli cluster.getSolrClient().getZkStateReader().forceUpdateCollection(collection); state = getCollectionState(collection); slice = state.getSlice(shard); - Map routingRules = slice.getRoutingRules(); + Map routingRules = slice.getRoutingRules(); if (routingRules == null || routingRules.isEmpty() || !routingRules.containsKey(splitKey)) { ruleRemoved = true; break; @@ -76,12 +75,14 @@ private boolean waitForRuleToExpire(String collection, String shard, String spli return ruleRemoved; } - protected void invokeCollectionMigration(CollectionAdminRequest.AsyncCollectionAdminRequest request) throws IOException, SolrServerException, InterruptedException { + protected void invokeCollectionMigration( + CollectionAdminRequest.AsyncCollectionAdminRequest request) + throws IOException, SolrServerException, InterruptedException { request.processAndWait(cluster.getSolrClient(), 60000); } @Test - public void testMissingSplitKey() throws Exception { + public void testMissingSplitKey() throws Exception { String sourceCollection = "testMissingSplitKey-source"; CollectionAdminRequest.createCollection(sourceCollection, "conf", 1, 1) .process(cluster.getSolrClient()); @@ -89,41 +90,46 @@ public void testMissingSplitKey() throws Exception { CollectionAdminRequest.createCollection(targetCollection, "conf", 1, 1) .process(cluster.getSolrClient()); - BaseHttpSolrClient.RemoteSolrException remoteSolrException = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, - "Expected an exception in case split.key is not specified", () -> { - CollectionAdminRequest.migrateData(sourceCollection, targetCollection, "") - .setForwardTimeout(45) - .process(cluster.getSolrClient()); - }); + BaseHttpSolrClient.RemoteSolrException remoteSolrException = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + "Expected an exception in case split.key is not specified", + () -> { + CollectionAdminRequest.migrateData(sourceCollection, targetCollection, "") + .setForwardTimeout(45) + .process(cluster.getSolrClient()); + }); assertTrue(remoteSolrException.getMessage().contains("split.key cannot be null or empty")); } @Test - public void multipleShardMigrateTest() throws Exception { + public void multipleShardMigrateTest() throws Exception { - CollectionAdminRequest.createCollection("sourceCollection", "conf", 2, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection("sourceCollection", "conf", 2, 1) + .process(cluster.getSolrClient()); cluster.getSolrClient().setDefaultCollection("sourceCollection"); final String splitKey = "a"; final int BIT_SEP = 1; final int[] splitKeyCount = new int[1]; - for (int id = 0; id < 26*3; id++) { - String shardKey = "" + (char) ('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution + for (int id = 0; id < 26 * 3; id++) { + String shardKey = + "" + (char) ('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution String key = shardKey; - if (splitKey.equals(shardKey)) { - key += "/" + BIT_SEP; // spread it over half the collection + if (splitKey.equals(shardKey)) { + key += "/" + BIT_SEP; // spread it over half the collection } SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", key + "!" + id); doc.addField("n_ti", id); cluster.getSolrClient().add("sourceCollection", doc); - if (splitKey.equals(shardKey)) - splitKeyCount[0]++; + if (splitKey.equals(shardKey)) splitKeyCount[0]++; } assertTrue(splitKeyCount[0] > 0); String targetCollection = "migrate_multipleshardtest_targetCollection"; - CollectionAdminRequest.createCollection(targetCollection, "conf", 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(targetCollection, "conf", 1, 1) + .process(cluster.getSolrClient()); Indexer indexer = new Indexer(cluster.getSolrClient(), splitKey, 1, 30); indexer.start(); @@ -133,11 +139,15 @@ public void multipleShardMigrateTest() throws Exception { try (HttpSolrClient collectionClient = getHttpSolrClient(replica.getCoreUrl())) { SolrQuery solrQuery = new SolrQuery("*:*"); - assertEquals("DocCount on target collection does not match", 0, collectionClient.query(solrQuery).getResults().getNumFound()); + assertEquals( + "DocCount on target collection does not match", + 0, + collectionClient.query(solrQuery).getResults().getNumFound()); invokeCollectionMigration( - CollectionAdminRequest.migrateData("sourceCollection", targetCollection, splitKey + "/" + BIT_SEP + "!") - .setForwardTimeout(45)); + CollectionAdminRequest.migrateData( + "sourceCollection", targetCollection, splitKey + "/" + BIT_SEP + "!") + .setForwardTimeout(45)); long finishTime = System.nanoTime(); @@ -156,20 +166,22 @@ public void multipleShardMigrateTest() throws Exception { solrQuery = new SolrQuery("*:*").setRows(1000); QueryResponse response = collectionClient.query(solrQuery); log.info("Response from target collection: {}", response); - assertEquals("DocCount on target collection does not match", splitKeyCount[0], response.getResults().getNumFound()); - - waitForState("Expected to find routing rule for split key " + splitKey, "sourceCollection", (n, c) -> { - if (c == null) - return false; - Slice shard = c.getSlice("shard2"); - if (shard == null) - return false; - if (shard.getRoutingRules() == null || shard.getRoutingRules().isEmpty()) - return false; - if (shard.getRoutingRules().get(splitKey + "!") == null) - return false; - return true; - }); + assertEquals( + "DocCount on target collection does not match", + splitKeyCount[0], + response.getResults().getNumFound()); + + waitForState( + "Expected to find routing rule for split key " + splitKey, + "sourceCollection", + (n, c) -> { + if (c == null) return false; + Slice shard = c.getSlice("shard2"); + if (shard == null) return false; + if (shard.getRoutingRules() == null || shard.getRoutingRules().isEmpty()) return false; + if (shard.getRoutingRules().get(splitKey + "!") == null) return false; + return true; + }); boolean ruleRemoved = waitForRuleToExpire("sourceCollection", "shard2", splitKey, finishTime); assertTrue("Routing rule was not expired", ruleRemoved); @@ -193,15 +205,15 @@ public Indexer(CloudSolrClient cloudClient, String splitKey, int bitSep, int sec @Override public void run() { TimeOut timeout = new TimeOut(seconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); - for (int id = 26*3; id < 500 && ! timeout.hasTimedOut(); id++) { - String shardKey = "" + (char) ('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution + for (int id = 26 * 3; id < 500 && !timeout.hasTimedOut(); id++) { + String shardKey = + "" + (char) ('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", shardKey + (bitSep != -1 ? "/" + bitSep : "") + "!" + id); doc.addField("n_ti", id); try { cloudClient.add(doc); - if (splitKey.equals(shardKey)) - splitKeyCount++; + if (splitKey.equals(shardKey)) splitKeyCount++; } catch (Exception e) { log.error("Exception while adding document id: {}", doc.getField("id"), e); } diff --git a/solr/core/src/test/org/apache/solr/cloud/MissingSegmentRecoveryTest.java b/solr/core/src/test/org/apache/solr/cloud/MissingSegmentRecoveryTest.java index 4584ca2b4c0..fd1de1b0f0c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MissingSegmentRecoveryTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MissingSegmentRecoveryTest.java @@ -22,7 +22,6 @@ import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.List; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; @@ -42,15 +41,13 @@ @Slow public class MissingSegmentRecoveryTest extends SolrCloudTestCase { final String collection = getClass().getSimpleName(); - + Replica leader; Replica replica; @BeforeClass public static void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); useFactory("solr.StandardDirectoryFactory"); } @@ -58,7 +55,8 @@ public static void setupCluster() throws Exception { public void setup() throws SolrServerException, IOException { CollectionAdminRequest.createCollection(collection, "conf", 1, 2) .process(cluster.getSolrClient()); - waitForState("Expected a collection with one shard and two replicas", collection, clusterShape(1, 2)); + waitForState( + "Expected a collection with one shard and two replicas", collection, clusterShape(1, 2)); cluster.getSolrClient().setDefaultCollection(collection); List docs = new ArrayList<>(); @@ -70,12 +68,12 @@ public void setup() throws SolrServerException, IOException { cluster.getSolrClient().add(docs); cluster.getSolrClient().commit(); - + DocCollection state = getCollectionState(collection); leader = state.getLeader("shard1"); replica = getRandomReplica(state.getSlice("shard1"), (r) -> leader != r); } - + @After public void teardown() throws Exception { if (null == leader) { @@ -106,21 +104,24 @@ public void testLeaderRecovery() throws Exception { jetty.stop(); jetty.start(); - waitForState("Expected a collection with one shard and two replicas", collection, clusterShape(1, 2)); - + waitForState( + "Expected a collection with one shard and two replicas", collection, clusterShape(1, 2)); + QueryResponse resp = cluster.getSolrClient().query(collection, new SolrQuery("*:*")); assertEquals(10, resp.getResults().getNumFound()); } private File[] getSegmentFiles(Replica replica) { - try (SolrCore core = cluster.getReplicaJetty(replica).getCoreContainer().getCore(replica.getCoreName())) { + try (SolrCore core = + cluster.getReplicaJetty(replica).getCoreContainer().getCore(replica.getCoreName())) { File indexDir = new File(core.getDataDir(), "index"); - return indexDir.listFiles((File dir, String name) -> { - return name.startsWith("segments_"); - }); + return indexDir.listFiles( + (File dir, String name) -> { + return name.startsWith("segments_"); + }); } } - + private void truncate(File file) throws IOException { Files.write(file.toPath(), new byte[0], StandardOpenOption.TRUNCATE_EXISTING); } diff --git a/solr/core/src/test/org/apache/solr/cloud/MockScriptUpdateProcessorFactory.java b/solr/core/src/test/org/apache/solr/cloud/MockScriptUpdateProcessorFactory.java index e8d78298fd4..2e30679502a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MockScriptUpdateProcessorFactory.java +++ b/solr/core/src/test/org/apache/solr/cloud/MockScriptUpdateProcessorFactory.java @@ -17,7 +17,6 @@ package org.apache.solr.cloud; import java.io.IOException; - import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.SolrInputDocument; @@ -30,26 +29,28 @@ import org.apache.solr.util.plugin.SolrCoreAware; /** - *

* The scripting update processor capability is something that is only allowed by a trusted - * configSet. The actual code lives in the /modules/scripting project, however the test - * for trusted configsets lives in TestConfigSetsAPI. This class is meant to simulate the + * configSet. The actual code lives in the /modules/scripting project, however the test for trusted + * configsets lives in TestConfigSetsAPI. This class is meant to simulate the * ScriptUpdateProcessorFactory for this test. - *

-*/ -public class MockScriptUpdateProcessorFactory extends UpdateRequestProcessorFactory implements SolrCoreAware { + */ +public class MockScriptUpdateProcessorFactory extends UpdateRequestProcessorFactory + implements SolrCoreAware { @Override public void inform(SolrCore core) { if (!core.getCoreDescriptor().isConfigSetTrusted()) { - throw new SolrException(ErrorCode.UNAUTHORIZED, "The configset for this collection was uploaded without any authentication in place," - + " and this operation is not available for collections with untrusted configsets. To use this component, re-upload the configset" - + " after enabling authentication and authorization."); + throw new SolrException( + ErrorCode.UNAUTHORIZED, + "The configset for this collection was uploaded without any authentication in place," + + " and this operation is not available for collections with untrusted configsets. To use this component, re-upload the configset" + + " after enabling authentication and authorization."); } } @Override - public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { + public UpdateRequestProcessor getInstance( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { return new MockScriptUpdateRequestProcessor(next); } @@ -63,14 +64,11 @@ public MockScriptUpdateRequestProcessor(UpdateRequestProcessor next) { * @throws IOException If there is a low-level I/O error */ @Override - public void processAdd(AddUpdateCommand cmd) - throws IOException { + public void processAdd(AddUpdateCommand cmd) throws IOException { SolrInputDocument doc = cmd.getSolrInputDocument(); doc.setField("script_added_i", "42"); super.processAdd(cmd); } - } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/MockSimpleZkController.java b/solr/core/src/test/org/apache/solr/cloud/MockSimpleZkController.java index c33ed011eb0..c6eb033603d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MockSimpleZkController.java +++ b/solr/core/src/test/org/apache/solr/cloud/MockSimpleZkController.java @@ -20,15 +20,19 @@ import java.util.List; import java.util.concurrent.TimeoutException; import java.util.function.Supplier; - import org.apache.solr.core.CloudConfig; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.CoreDescriptor; public class MockSimpleZkController extends ZkController { - public MockSimpleZkController(CoreContainer cc, String zkServerAddress, int zkClientConnectTimeout, CloudConfig cloudConfig, - Supplier> registerOnReconnect) throws InterruptedException, TimeoutException, IOException { + public MockSimpleZkController( + CoreContainer cc, + String zkServerAddress, + int zkClientConnectTimeout, + CloudConfig cloudConfig, + Supplier> registerOnReconnect) + throws InterruptedException, TimeoutException, IOException { super(cc, zkServerAddress, zkClientConnectTimeout, cloudConfig, registerOnReconnect); } diff --git a/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java b/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java index 093fe1ceec4..804983c71b4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java +++ b/solr/core/src/test/org/apache/solr/cloud/MockSolrSource.java @@ -22,14 +22,15 @@ import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkStateReader; -public class MockSolrSource { +public class MockSolrSource { - public static ZkController makeSimpleMock(Overseer overseer, ZkStateReader reader, SolrZkClient zkClient) { + public static ZkController makeSimpleMock( + Overseer overseer, ZkStateReader reader, SolrZkClient zkClient) { ZkController zkControllerMock = mock(ZkController.class); final DistributedClusterStateUpdater distributedClusterStateUpdater; if (overseer == null) { - // When no overseer is passed, the Overseer queue does nothing. Replicate this in how we handle distributed state - // updates by doing nothing as well... + // When no overseer is passed, the Overseer queue does nothing. Replicate this in how we + // handle distributed state updates by doing nothing as well... distributedClusterStateUpdater = mock(DistributedClusterStateUpdater.class); overseer = mock(Overseer.class); when(overseer.getDistributedClusterStateUpdater()).thenReturn(distributedClusterStateUpdater); @@ -41,17 +42,16 @@ public static ZkController makeSimpleMock(Overseer overseer, ZkStateReader reade if (reader != null && zkClient == null) { zkClient = reader.getZkClient(); } else { - if (zkClient == null) { - } reader = mock(ZkStateReader.class); when(reader.getZkClient()).thenReturn(zkClient); } - + when(zkControllerMock.getOverseer()).thenReturn(overseer); when(zkControllerMock.getZkStateReader()).thenReturn(reader); when(zkControllerMock.getZkClient()).thenReturn(zkClient); when(zkControllerMock.getOverseer()).thenReturn(overseer); - when(zkControllerMock.getDistributedClusterStateUpdater()).thenReturn(distributedClusterStateUpdater); + when(zkControllerMock.getDistributedClusterStateUpdater()) + .thenReturn(distributedClusterStateUpdater); return zkControllerMock; } } diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java index 1d4d34ca9f6..00b6d52c611 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java @@ -20,23 +20,17 @@ import org.junit.Test; /** - * Implementation moved to AbstractMoveReplicaTestBase as it is referenced by HDFS contrib module tests. + * Implementation moved to AbstractMoveReplicaTestBase as it is referenced by HDFS contrib module + * tests. */ public class MoveReplicaTest extends AbstractMoveReplicaTestBase { - @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void test() throws Exception { super.test(); } - //Commented out 5-Dec-2017 - // @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-11458") @Test - // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 17-Mar-2018 This JIRA is fixed, but this test still fails - //17-Aug-2018 commented @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018 - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void testFailedMove() throws Exception { super.testFailedMove(); } diff --git a/solr/core/src/test/org/apache/solr/cloud/MultiSolrCloudTestCaseTest.java b/solr/core/src/test/org/apache/solr/cloud/MultiSolrCloudTestCaseTest.java index e4b7551aa64..4df1dc609e5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MultiSolrCloudTestCaseTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MultiSolrCloudTestCaseTest.java @@ -34,19 +34,19 @@ public static void setupClusters() throws Exception { numClouds = random().nextInt(4); // 0..3 final String[] clusterIds = new String[numClouds]; - for (int ii=0; ii taskCollB); + // We didn't wait for the 3rd A_COLL task to complete (test can run quickly) but if it did, + // we expect the B_COLL to have finished first. + assertTrue( + "task2CollA: " + task2CollA + " taskCollB: " + taskCollB, + task2CollA == null || task2CollA > taskCollB); } } } /** * Verifies the status of an async task submitted to the Overseer Collection queue. - * @return null if the task has not completed, the completion timestamp if the task has completed - * (see mockOperation() in {@link org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler}). + * + * @return null if the task has not completed, the completion timestamp if the task + * has completed (see mockOperation() in {@link + * org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler}). */ - private Long checkTaskHasCompleted(SolrClient client, int requestId) throws IOException, SolrServerException { - return (Long) getStatusResponse(Integer.toString(requestId), client).getResponse().get("MOCK_FINISHED"); + private Long checkTaskHasCompleted(SolrClient client, int requestId) + throws IOException, SolrServerException { + return (Long) + getStatusResponse(Integer.toString(requestId), client).getResponse().get("MOCK_FINISHED"); } /** * Waits until the specified async task has completed or time ran out. - * @return null if the task has not completed, the completion timestamp if the task has completed + * + * @return null if the task has not completed, the completion timestamp if the task + * has completed */ private Long waitForTaskToCompleted(SolrClient client, int requestId) throws Exception { for (int i = 0; i < 500; i++) { @@ -135,13 +154,14 @@ private Long waitForTaskToCompleted(SolrClient client, int requestId) throws Exc private void testParallelCollectionAPICalls() throws IOException, SolrServerException { final int ASYNC_SHIFT = 10000; try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) { - for(int i = 1 + ASYNC_SHIFT; i <= NUM_COLLECTIONS + ASYNC_SHIFT; i++) { - CollectionAdminRequest.createCollection("ocptest" + i,"conf1",3,1).processAsync(String.valueOf(i), client); + for (int i = 1 + ASYNC_SHIFT; i <= NUM_COLLECTIONS + ASYNC_SHIFT; i++) { + CollectionAdminRequest.createCollection("ocptest" + i, "conf1", 3, 1) + .processAsync(String.valueOf(i), client); } - + boolean pass = false; int counter = 0; - while(true) { + while (true) { int numRunningTasks = 0; for (int i = 1 + ASYNC_SHIFT; i <= NUM_COLLECTIONS + ASYNC_SHIFT; i++) if (getRequestState(i + "", client) == RequestStatusState.RUNNING) { @@ -159,10 +179,15 @@ private void testParallelCollectionAPICalls() throws IOException, SolrServerExce Thread.currentThread().interrupt(); } } - assertTrue("More than one tasks were supposed to be running in parallel but they weren't.", pass); + assertTrue( + "More than one tasks were supposed to be running in parallel but they weren't.", pass); for (int i = 1 + ASYNC_SHIFT; i <= NUM_COLLECTIONS + ASYNC_SHIFT; i++) { - final RequestStatusState state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client); - assertSame("Task " + i + " did not complete, final state: " + state, RequestStatusState.COMPLETED, state); + final RequestStatusState state = + getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client); + assertSame( + "Task " + i + " did not complete, final state: " + state, + RequestStatusState.COMPLETED, + state); } } } @@ -170,10 +195,12 @@ private void testParallelCollectionAPICalls() throws IOException, SolrServerExce private void testTaskExclusivity() throws Exception, SolrServerException { try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) { - Create createCollectionRequest = CollectionAdminRequest.createCollection("ocptest_shardsplit","conf1",4,1); - createCollectionRequest.processAsync("1000",client); + Create createCollectionRequest = + CollectionAdminRequest.createCollection("ocptest_shardsplit", "conf1", 4, 1); + createCollectionRequest.processAsync("1000", client); - CollectionAdminRequest.MockCollTask mockTask = CollectionAdminRequest.mockCollTask("ocptest_shardsplit"); + CollectionAdminRequest.MockCollTask mockTask = + CollectionAdminRequest.mockCollTask("ocptest_shardsplit"); mockTask.setSleep("100"); mockTask.processAsync("1001", client); @@ -182,7 +209,7 @@ private void testTaskExclusivity() throws Exception, SolrServerException { mockTask.processAsync("1002", client); int iterations = 0; - while(true) { + while (true) { int runningTasks = 0; int completedTasks = 0; for (int i = 1001; i <= 1002; i++) { @@ -195,13 +222,15 @@ private void testTaskExclusivity() throws Exception, SolrServerException { assertNotSame("We have a failed SPLITSHARD task", RequestStatusState.FAILED, state); } // TODO: REQUESTSTATUS might come back with more than 1 running tasks over multiple calls. - // The only way to fix this is to support checking of multiple requestids in a single REQUESTSTATUS task. - - assertTrue("Mutual exclusion failed. Found more than one task running for the same collection", runningTasks < 2); - - if(completedTasks == 2 || iterations++ > REQUEST_STATUS_TIMEOUT) - break; - + // The only way to fix this is to support checking of multiple requestids in a single + // REQUESTSTATUS task. + + assertTrue( + "Mutual exclusion failed. Found more than one task running for the same collection", + runningTasks < 2); + + if (completedTasks == 2 || iterations++ > REQUEST_STATUS_TIMEOUT) break; + try { Thread.sleep(1000); } catch (InterruptedException e) { @@ -210,66 +239,89 @@ private void testTaskExclusivity() throws Exception, SolrServerException { } } for (int i = 1001; i <= 1002; i++) { - final RequestStatusState state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client); - assertSame("Task " + i + " did not complete, final state: " + state, RequestStatusState.COMPLETED, state); + final RequestStatusState state = + getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client); + assertSame( + "Task " + i + " did not complete, final state: " + state, + RequestStatusState.COMPLETED, + state); } } } private void testDeduplicationOfSubmittedTasks() throws IOException, SolrServerException { try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) { - CollectionAdminRequest.createCollection("ocptest_shardsplit2","conf1",3,1).processAsync("3000",client); - - SplitShard splitShardRequest = CollectionAdminRequest.splitShard("ocptest_shardsplit2").setShardName(SHARD1); - splitShardRequest.processAsync("3001",client); - - splitShardRequest = CollectionAdminRequest.splitShard("ocptest_shardsplit2").setShardName(SHARD2); - splitShardRequest.processAsync("3002",client); - - // Now submit another task with the same id. At this time, hopefully the previous 3002 should still be in the queue. - expectThrows(SolrServerException.class, () -> { - CollectionAdminRequest.splitShard("ocptest_shardsplit2").setShardName(SHARD1).processAsync("3002",client); - // more helpful assertion failure - fail("Duplicate request was supposed to exist but wasn't found. De-duplication of submitted task failed."); - }); - + CollectionAdminRequest.createCollection("ocptest_shardsplit2", "conf1", 3, 1) + .processAsync("3000", client); + + SplitShard splitShardRequest = + CollectionAdminRequest.splitShard("ocptest_shardsplit2").setShardName(SHARD1); + splitShardRequest.processAsync("3001", client); + + splitShardRequest = + CollectionAdminRequest.splitShard("ocptest_shardsplit2").setShardName(SHARD2); + splitShardRequest.processAsync("3002", client); + + // Now submit another task with the same id. At this time, hopefully the previous 3002 should + // still be in the queue. + expectThrows( + SolrServerException.class, + () -> { + CollectionAdminRequest.splitShard("ocptest_shardsplit2") + .setShardName(SHARD1) + .processAsync("3002", client); + // more helpful assertion failure + fail( + "Duplicate request was supposed to exist but wasn't found. De-duplication of submitted task failed."); + }); + for (int i = 3001; i <= 3002; i++) { - final RequestStatusState state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client); - assertSame("Task " + i + " did not complete, final state: " + state, RequestStatusState.COMPLETED, state); + final RequestStatusState state = + getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client); + assertSame( + "Task " + i + " did not complete, final state: " + state, + RequestStatusState.COMPLETED, + state); } } } - private void testLongAndShortRunningParallelApiCalls() throws InterruptedException, IOException, SolrServerException { - Thread indexThread = new Thread() { - @Override - public void run() { - Random random = random(); - int max = atLeast(random, 200); - for (int id = 101; id < max; id++) { - try { - doAddDoc(String.valueOf(id)); - } catch (Exception e) { - log.error("Exception while adding docs", e); + private void testLongAndShortRunningParallelApiCalls() + throws InterruptedException, IOException, SolrServerException { + Thread indexThread = + new Thread() { + @Override + public void run() { + Random random = random(); + int max = atLeast(random, 200); + for (int id = 101; id < max; id++) { + try { + doAddDoc(String.valueOf(id)); + } catch (Exception e) { + log.error("Exception while adding docs", e); + } + } } - } - } - }; + }; indexThread.start(); try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) { - SplitShard splitShardRequest = CollectionAdminRequest.splitShard("collection1").setShardName(SHARD1); - splitShardRequest.processAsync("2000",client); + SplitShard splitShardRequest = + CollectionAdminRequest.splitShard("collection1").setShardName(SHARD1); + splitShardRequest.processAsync("2000", client); RequestStatusState state = getRequestState("2000", client); - while (state == RequestStatusState.SUBMITTED) { + while (state == RequestStatusState.SUBMITTED) { state = getRequestState("2000", client); Thread.sleep(10); } - assertSame("SplitShard task [2000] was supposed to be in [running] but isn't. It is [" + state + "]", - RequestStatusState.RUNNING, state); + assertSame( + "SplitShard task [2000] was supposed to be in [running] but isn't. It is [" + state + "]", + RequestStatusState.RUNNING, + state); - // CLUSTERSTATE is always mutually exclusive, it should return with a response before the split completes + // CLUSTERSTATE is always mutually exclusive, it should return with a response before the + // split completes ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); @@ -281,8 +333,13 @@ public void run() { state = getRequestState("2000", client); - assertSame("After invoking OVERSEERSTATUS, SplitShard task [2000] was still supposed to be in [running] but " - + "isn't. It is [" + state + "]", RequestStatusState.RUNNING, state); + assertSame( + "After invoking OVERSEERSTATUS, SplitShard task [2000] was still supposed to be in [running] but " + + "isn't. It is [" + + state + + "]", + RequestStatusState.RUNNING, + state); } finally { try { @@ -298,6 +355,3 @@ void doAddDoc(String id) throws Exception { // todo - target diff servers and use cloud clients as well as non-cloud clients } } - - - diff --git a/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java b/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java index 7b7e089c860..e3857547976 100644 --- a/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/NestedShardedAtomicUpdateTest.java @@ -21,7 +21,6 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.List; - import org.apache.lucene.util.IOUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -39,16 +38,14 @@ import org.junit.BeforeClass; import org.junit.Test; -public class NestedShardedAtomicUpdateTest extends SolrCloudTestCase { // used to extend AbstractFullDistribZkTestBase +public class NestedShardedAtomicUpdateTest extends SolrCloudTestCase { private static final String DEFAULT_COLLECTION = "col1"; private static CloudSolrClient cloudClient; private static List clients; // not CloudSolrClient @BeforeClass public static void beforeClass() throws Exception { - configureCluster(1) - .addConfig("_default", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("_default", configset("cloud-minimal")).configure(); // replace schema.xml with schema-test.xml Path schemaPath = TEST_COLL1_CONF().resolve("schema-nest.xml"); cluster.getZkClient().setData("/configs/_default/schema.xml", schemaPath, true); @@ -56,8 +53,7 @@ public static void beforeClass() throws Exception { cloudClient = cluster.getSolrClient(); cloudClient.setDefaultCollection(DEFAULT_COLLECTION); - CollectionAdminRequest.createCollection(DEFAULT_COLLECTION, 4, 1) - .process(cloudClient); + CollectionAdminRequest.createCollection(DEFAULT_COLLECTION, 4, 1).process(cloudClient); clients = new ArrayList<>(); ClusterState clusterState = cloudClient.getClusterStateProvider().getClusterState(); @@ -73,10 +69,20 @@ public static void afterClass() throws Exception { @Test public void doRootShardRoutingTest() throws Exception { - assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size()); + assertEquals( + 4, + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_COLLECTION) + .getSlices() + .size()); final String[] ids = {"3", "4", "5", "6"}; - assertEquals("size of ids to index should be the same as the number of clients", clients.size(), ids.length); + assertEquals( + "size of ids to index should be the same as the number of clients", + clients.size(), + ids.length); // for now, we know how ranges will be distributed to shards. // may have to look it up in clusterstate if that assumption changes. @@ -87,13 +93,27 @@ public void doRootShardRoutingTest() throws Exception { indexDoc(aClient, null, doc); - doc = sdoc("id", "1", "_root_", "1", "children", map("add", sdocs(sdoc("id", "2", "level_s", "child")))); + doc = + sdoc( + "id", + "1", + "_root_", + "1", + "children", + map("add", sdocs(sdoc("id", "2", "level_s", "child")))); indexDoc(aClient, null, doc); - for(int idIndex = 0; idIndex < ids.length; ++idIndex) { + for (int idIndex = 0; idIndex < ids.length; ++idIndex) { - doc = sdoc("id", "2", "_root_", "1", "grandChildren", map("add", sdocs(sdoc("id", ids[idIndex], "level_s", "grand_child")))); + doc = + sdoc( + "id", + "2", + "_root_", + "1", + "grandChildren", + map("add", sdocs(sdoc("id", ids[idIndex], "level_s", "grand_child")))); indexDocAndRandomlyCommit(getRandomSolrClient(), null, doc); @@ -102,9 +122,11 @@ public void doRootShardRoutingTest() throws Exception { indexDocAndRandomlyCommit(getRandomSolrClient(), null, doc); // assert RTG request respects _route_ param - QueryResponse routeRsp = getRandomSolrClient().query(params("qt","/get", "id","2", "_route_", "1")); + QueryResponse routeRsp = + getRandomSolrClient().query(params("qt", "/get", "id", "2", "_route_", "1")); SolrDocument results = (SolrDocument) routeRsp.getResponse().get("doc"); - assertNotNull("RTG should find doc because _route_ was set to the root documents' ID", results); + assertNotNull( + "RTG should find doc because _route_ was set to the root documents' ID", results); assertEquals("2", results.getFieldValue("id")); // assert all docs are indexed under the same root @@ -112,7 +134,8 @@ public void doRootShardRoutingTest() throws Exception { assertEquals(0, getRandomSolrClient().query(params("q", "-_root_:1")).getResults().size()); // assert all docs are indexed inside the same block - QueryResponse rsp = getRandomSolrClient().query(params("qt","/get", "id","1", "fl", "*, [child]")); + QueryResponse rsp = + getRandomSolrClient().query(params("qt", "/get", "id", "1", "fl", "*, [child]")); SolrDocument val = (SolrDocument) rsp.getResponse().get("doc"); assertEquals("1", val.getFieldValue("id")); @SuppressWarnings({"unchecked"}) @@ -131,10 +154,20 @@ public void doRootShardRoutingTest() throws Exception { @Test public void doNestedInplaceUpdateTest() throws Exception { - assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size()); + assertEquals( + 4, + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_COLLECTION) + .getSlices() + .size()); final String[] ids = {"3", "4", "5", "6"}; - assertEquals("size of ids to index should be the same as the number of clients", clients.size(), ids.length); + assertEquals( + "size of ids to index should be the same as the number of clients", + clients.size(), + ids.length); // for now, we know how ranges will be distributed to shards. // may have to look it up in clusterstate if that assumption changes. @@ -145,11 +178,25 @@ public void doNestedInplaceUpdateTest() throws Exception { indexDocAndRandomlyCommit(aClient, null, doc); - doc = sdoc("id", "1", "_root_", "1", "children", map("add", sdocs(sdoc("id", "2", "level_s", "child")))); + doc = + sdoc( + "id", + "1", + "_root_", + "1", + "children", + map("add", sdocs(sdoc("id", "2", "level_s", "child")))); indexDocAndRandomlyCommit(aClient, null, doc); - doc = sdoc("id", "2", "_root_", "1", "grandChildren", map("add", sdocs(sdoc("id", ids[0], "level_s", "grand_child")))); + doc = + sdoc( + "id", + "2", + "_root_", + "1", + "grandChildren", + map("add", sdocs(sdoc("id", ids[0], "level_s", "grand_child")))); indexDocAndRandomlyCommit(aClient, null, doc); @@ -185,9 +232,11 @@ public void doNestedInplaceUpdateTest() throws Exception { if (random().nextBoolean()) { // assert RTG request respects _route_ param - QueryResponse routeRsp = getRandomSolrClient().query(params("qt","/get", "id","2", "_route_", "1")); + QueryResponse routeRsp = + getRandomSolrClient().query(params("qt", "/get", "id", "2", "_route_", "1")); SolrDocument results = (SolrDocument) routeRsp.getResponse().get("doc"); - assertNotNull("RTG should find doc because _route_ was set to the root documents' ID", results); + assertNotNull( + "RTG should find doc because _route_ was set to the root documents' ID", results); assertEquals("2", results.getFieldValue("id")); } @@ -198,7 +247,8 @@ public void doNestedInplaceUpdateTest() throws Exception { if (random().nextBoolean()) { // assert all docs are indexed inside the same block - QueryResponse rsp = getRandomSolrClient().query(params("qt","/get", "id","1", "fl", "*, [child]")); + QueryResponse rsp = + getRandomSolrClient().query(params("qt", "/get", "id", "1", "fl", "*, [child]")); SolrDocument val = (SolrDocument) rsp.getResponse().get("doc"); assertEquals("1", val.getFieldValue("id")); assertInplaceCounter(id1InPlaceCounter, val); @@ -230,7 +280,14 @@ private void assertInplaceCounter(int expected, SolrDocument val) { @Test public void sendWrongRouteParam() throws Exception { - assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size()); + assertEquals( + 4, + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_COLLECTION) + .getSlices() + .size()); final String rootId = "1"; SolrInputDocument doc = sdoc("id", rootId, "level_s", "root"); @@ -243,27 +300,32 @@ public void sendWrongRouteParam() throws Exception { indexDocAndRandomlyCommit(aClient, null, doc); - final SolrInputDocument childDoc = sdoc("id", rootId, "children", map("add", sdocs(sdoc("id", "2", "level_s", "child")))); + final SolrInputDocument childDoc = + sdoc("id", rootId, "children", map("add", sdocs(sdoc("id", "2", "level_s", "child")))); indexDocAndRandomlyCommit(aClient, rightParams, childDoc); - final SolrInputDocument grandChildDoc = sdoc("id", "2", "_root_", rootId, - "grandChildren", - map("add", sdocs( - sdoc("id", "3", "level_s", "grandChild") - ) - ) - ); + final SolrInputDocument grandChildDoc = + sdoc( + "id", + "2", + "_root_", + rootId, + "grandChildren", + map("add", sdocs(sdoc("id", "3", "level_s", "grandChild")))); // despite the wrong param, it'll be routed correctly; we can find the doc after. // An error would have been okay too but routing correctly is also fine. indexDoc(aClient, wrongRouteParams, grandChildDoc); aClient.commit(); - assertEquals(1, aClient.query(params("_route_", rootId, "q", "id:3")).getResults().getNumFound()); + assertEquals( + 1, aClient.query(params("_route_", rootId, "q", "id:3")).getResults().getNumFound()); } - private void indexDocAndRandomlyCommit(SolrClient client, SolrParams params, SolrInputDocument sdoc) throws IOException, SolrServerException { + private void indexDocAndRandomlyCommit( + SolrClient client, SolrParams params, SolrInputDocument sdoc) + throws IOException, SolrServerException { indexDoc(client, params, sdoc); // randomly commit docs if (random().nextBoolean()) { @@ -271,7 +333,8 @@ private void indexDocAndRandomlyCommit(SolrClient client, SolrParams params, Sol } } - private void indexDoc(SolrClient client, SolrParams params, SolrInputDocument sdoc) throws IOException, SolrServerException { + private void indexDoc(SolrClient client, SolrParams params, SolrInputDocument sdoc) + throws IOException, SolrServerException { final UpdateRequest updateRequest = new UpdateRequest(); updateRequest.add(sdoc); updateRequest.setParams(new ModifiableSolrParams(params)); @@ -283,5 +346,4 @@ private SolrClient getRandomSolrClient() { final int index = random().nextInt(clients.size() + 1); return index == clients.size() ? cloudClient : clients.get(index); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java b/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java index 8fa70d8ad5a..886f76934eb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4Test; import org.apache.solr.cloud.overseer.NodeMutator; @@ -45,13 +44,16 @@ public class NodeMutatorTest extends SolrTestCaseJ4Test { public void downNodeReportsAllImpactedCollectionsAndNothingElse() throws IOException { NodeMutator nm = new NodeMutator(null); - //Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2 - //Collection2: 1 shard X 1 replica = replica1 on node2 + // Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2 + // Collection2: 1 shard X 1 replica = replica1 on node2 ZkStateReader reader = ClusterStateMockUtil.buildClusterState("csrr2rDcsr2", 1, NODE1, NODE2); ClusterState clusterState = reader.getClusterState(); - assertEquals(clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL); - assertEquals(clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL); - assertEquals(clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL); + assertEquals( + clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL); + assertEquals( + clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL); + assertEquals( + clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL); ZkNodeProps props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, NODE1); List writes = nm.downNode(clusterState, props); @@ -61,19 +63,25 @@ public void downNodeReportsAllImpactedCollectionsAndNothingElse() throws IOExcep assertEquals(writes.get(0).collection.getReplica("replica2").getState(), Replica.State.ACTIVE); reader.close(); - //Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2 - //Collection2: 1 shard X 1 replica = replica1 on node2 - //Collection3: 1 shard X 3 replica = replica1 on node1 , replica2 on node2, replica3 on node3 + // Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2 + // Collection2: 1 shard X 1 replica = replica1 on node2 + // Collection3: 1 shard X 3 replica = replica1 on node1 , replica2 on node2, replica3 on node3 reader = ClusterStateMockUtil.buildClusterState("csrr2rDcsr2csr1r2r3", 1, NODE1, NODE2, NODE3); clusterState = reader.getClusterState(); - assertEquals(clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL); - assertEquals(clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL); + assertEquals( + clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL); + assertEquals( + clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL); - assertEquals(clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL); + assertEquals( + clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL); - assertEquals(clusterState.getCollection("collection3").getReplica("replica5").getBaseUrl(), NODE1_URL); - assertEquals(clusterState.getCollection("collection3").getReplica("replica6").getBaseUrl(), NODE2_URL); - assertEquals(clusterState.getCollection("collection3").getReplica("replica7").getBaseUrl(), NODE3_URL); + assertEquals( + clusterState.getCollection("collection3").getReplica("replica5").getBaseUrl(), NODE1_URL); + assertEquals( + clusterState.getCollection("collection3").getReplica("replica6").getBaseUrl(), NODE2_URL); + assertEquals( + clusterState.getCollection("collection3").getReplica("replica7").getBaseUrl(), NODE3_URL); writes = nm.downNode(clusterState, props); assertEquals(writes.size(), 2); diff --git a/solr/core/src/test/org/apache/solr/cloud/NodeRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/NodeRolesTest.java index 1a573d68935..4b3e0e7abe3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/NodeRolesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/NodeRolesTest.java @@ -20,9 +20,7 @@ import java.lang.invoke.MethodHandles; import java.util.Collection; import java.util.Collections; - import java.util.Map; - import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.V2Request; @@ -38,9 +36,7 @@ public class NodeRolesTest extends SolrCloudTestCase { @Before public void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } @After @@ -54,7 +50,8 @@ public void testRoleIntegration() throws Exception { // Start a dedicated overseer node JettySolrRunner j1 = startNodeWithRoles("overseer:preferred,data:off"); - validateNodeRoles(j1.getNodeName(), "node-roles/overseer/preferred", j1.getNodeName(), "node-roles/data/off"); + validateNodeRoles( + j1.getNodeName(), "node-roles/overseer/preferred", j1.getNodeName(), "node-roles/data/off"); V2Response rsp; OverseerRolesTest.waitForNewOverseer(20, j1.getNodeName(), true); @@ -62,21 +59,31 @@ public void testRoleIntegration() throws Exception { // Start another node that is allowed or preferred overseer but has data String overseerModeOnDataNode = random().nextBoolean() ? "preferred" : "allowed"; JettySolrRunner j2 = startNodeWithRoles("overseer:" + overseerModeOnDataNode + ",data:on"); - validateNodeRoles(j2.getNodeName(), "node-roles/overseer/" + overseerModeOnDataNode, j2.getNodeName(), "node-roles/data/on"); + validateNodeRoles( + j2.getNodeName(), + "node-roles/overseer/" + overseerModeOnDataNode, + j2.getNodeName(), + "node-roles/data/on"); // validate the preferred overseers - validateNodeRoles(j2.getNodeName(), "node-roles/overseer/" + overseerModeOnDataNode, j1.getNodeName(), "node-roles/overseer/preferred"); + validateNodeRoles( + j2.getNodeName(), + "node-roles/overseer/" + overseerModeOnDataNode, + j1.getNodeName(), + "node-roles/overseer/preferred"); String COLLECTION_NAME = "TEST_ROLES"; - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 3, 1) - .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 3, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION_NAME, 3, 3); // Assert that no replica was placed on the dedicated overseer node String dedicatedOverseer = j1.getNodeName(); - cluster.getSolrClient().getClusterStateProvider().getCollection(COLLECTION_NAME) - .forEachReplica((s, replica) -> assertNotEquals(replica.node, dedicatedOverseer)); + cluster + .getSolrClient() + .getClusterStateProvider() + .getCollection(COLLECTION_NAME) + .forEachReplica((s, replica) -> assertNotEquals(replica.node, dedicatedOverseer)); // Shutdown the dedicated overseer, make sure that node disappears from the roles output j1.stop(); @@ -85,25 +92,39 @@ public void testRoleIntegration() throws Exception { OverseerRolesTest.waitForNewOverseer(20, it -> !dedicatedOverseer.equals(it), false); // Make sure the stopped node no longer has the role assigned - rsp = new V2Request.Builder("/cluster/node-roles/role/overseer/" + overseerModeOnDataNode).GET().build().process(cluster.getSolrClient()); - assertFalse(((Collection) rsp._get("node-roles/overseer/" + overseerModeOnDataNode, null)).contains(j1.getNodeName())); + rsp = + new V2Request.Builder("/cluster/node-roles/role/overseer/" + overseerModeOnDataNode) + .GET() + .build() + .process(cluster.getSolrClient()); + assertFalse( + ((Collection) rsp._get("node-roles/overseer/" + overseerModeOnDataNode, null)) + .contains(j1.getNodeName())); } @SuppressWarnings("rawtypes") - private void validateNodeRoles(String... nodenamePaths) throws org.apache.solr.client.solrj.SolrServerException, java.io.IOException { - V2Response rsp = new V2Request.Builder("/cluster/node-roles").GET().build().process(cluster.getSolrClient()); + private void validateNodeRoles(String... nodenamePaths) + throws org.apache.solr.client.solrj.SolrServerException, java.io.IOException { + V2Response rsp = + new V2Request.Builder("/cluster/node-roles").GET().build().process(cluster.getSolrClient()); for (int i = 0; i < nodenamePaths.length; i += 2) { String nodename = nodenamePaths[i]; String path = nodenamePaths[i + 1]; - assertTrue("Didn't find " + nodename + " at " + path + ". Full response: " + rsp.jsonStr(), - ((Collection) rsp._get(path, Collections.emptyList())).contains(nodename)); + assertTrue( + "Didn't find " + nodename + " at " + path + ". Full response: " + rsp.jsonStr(), + ((Collection) rsp._get(path, Collections.emptyList())).contains(nodename)); } } @SuppressWarnings("unchecked") private void testSupportedRolesAPI() throws Exception { - V2Response rsp = new V2Request.Builder("/cluster/node-roles/supported").GET().build().process(cluster.getSolrClient()); - Map l = (Map) rsp._get("supported-roles", Collections.emptyMap()); + V2Response rsp = + new V2Request.Builder("/cluster/node-roles/supported") + .GET() + .build() + .process(cluster.getSolrClient()); + Map l = + (Map) rsp._get("supported-roles", Collections.emptyMap()); assertTrue(l.containsKey("data")); assertTrue(l.containsKey("overseer")); } @@ -118,5 +139,4 @@ private JettySolrRunner startNodeWithRoles(String roles) throws Exception { } return jetty; } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java index 6a5187cdcc4..d17f6ec7c7d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OutOfBoxZkACLAndCredentialsProvidersTest.java @@ -21,7 +21,6 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.SecurityAwareZkACLProvider; import org.apache.solr.common.cloud.SolrZkClient; @@ -36,25 +35,25 @@ import org.slf4j.LoggerFactory; public class OutOfBoxZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + private static final Charset DATA_ENCODING = Charset.forName("UTF-8"); - + protected ZkTestServer zkServer; protected Path zkDir; - + @BeforeClass public static void beforeClass() { System.setProperty("solrcloud.skip.autorecovery", "true"); } - + @AfterClass public static void afterClass() throws InterruptedException { System.clearProperty("solrcloud.skip.autorecovery"); } - + @Override public void setUp() throws Exception { super.setUp(); @@ -67,19 +66,30 @@ public void setUp() throws Exception { log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(); - + System.setProperty("zkHost", zkServer.getZkAddress()); - + SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT); zkClient.makePath("/solr", false, true); zkClient.close(); zkClient = new SolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); - zkClient.create("/protectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); - zkClient.makePath("/protectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); - zkClient.create("/unprotectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); - zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); - zkClient.create(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient.create( + "/protectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient.makePath( + "/protectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient.create( + "/unprotectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient.makePath( + "/unprotectedMakePathNode", + "content".getBytes(DATA_ENCODING), + CreateMode.PERSISTENT, + false); + zkClient.create( + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, + "content".getBytes(DATA_ENCODING), + CreateMode.PERSISTENT, + false); zkClient.close(); if (log.isInfoEnabled()) { @@ -90,7 +100,7 @@ public void setUp() throws Exception { @Override public void tearDown() throws Exception { zkServer.shutdown(); - + super.tearDown(); } @@ -98,14 +108,13 @@ public void tearDown() throws Exception { public void testOutOfBoxSolrZkClient() throws Exception { SolrZkClient zkClient = new SolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - true, true, true, true, true, - true, true, true, true, true); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, true, true, true, true, true, true, true, true, true, true); } finally { zkClient.close(); } } - + @Test public void testOpenACLUnsafeAllover() throws Exception { SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT); @@ -123,26 +132,29 @@ public void testOpenACLUnsafeAllover() throws Exception { } } - - protected void assertOpenACLUnsafeAllover(SolrZkClient zkClient, String path, List verifiedList) throws Exception { + protected void assertOpenACLUnsafeAllover( + SolrZkClient zkClient, String path, List verifiedList) throws Exception { List acls = zkClient.getSolrZooKeeper().getACL(path, new Stat()); if (log.isInfoEnabled()) { log.info("Verifying {}", path); } if (ZooDefs.CONFIG_NODE.equals(path)) { // Treat this node specially, from the ZK docs: - // The dynamic configuration is stored in a special znode ZooDefs.CONFIG_NODE = /zookeeper/config. + // The dynamic configuration is stored in a special znode ZooDefs.CONFIG_NODE = + // /zookeeper/config. // This node by default is read only for all users, except super user and // users that's explicitly configured for write access. - assertEquals("Path " + path + " does not have READ_ACL_UNSAFE", ZooDefs.Ids.READ_ACL_UNSAFE, acls); + assertEquals( + "Path " + path + " does not have READ_ACL_UNSAFE", ZooDefs.Ids.READ_ACL_UNSAFE, acls); } else { - assertEquals("Path " + path + " does not have OPEN_ACL_UNSAFE", ZooDefs.Ids.OPEN_ACL_UNSAFE, acls); + assertEquals( + "Path " + path + " does not have OPEN_ACL_UNSAFE", ZooDefs.Ids.OPEN_ACL_UNSAFE, acls); } verifiedList.add(path); List children = zkClient.getChildren(path, null, false); for (String child : children) { - assertOpenACLUnsafeAllover(zkClient, path + ((path.endsWith("/")) ? "" : "/") + child, verifiedList); + assertOpenACLUnsafeAllover( + zkClient, path + ((path.endsWith("/")) ? "" : "/") + child, verifiedList); } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java index 5f2112bac80..0a741c12afa 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverriddenZkACLAndCredentialsProvidersTest.java @@ -16,6 +16,13 @@ */ package org.apache.solr.cloud; +import java.lang.invoke.MethodHandles; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.StringUtils; import org.apache.solr.common.cloud.DefaultZkCredentialsProvider; @@ -32,34 +39,27 @@ import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.nio.charset.Charset; -import java.nio.file.Path; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; public class OverriddenZkACLAndCredentialsProvidersTest extends SolrTestCaseJ4 { - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + private static final Charset DATA_ENCODING = Charset.forName("UTF-8"); - + protected ZkTestServer zkServer; - + protected Path zkDir; - + @BeforeClass public static void beforeClass() { System.setProperty("solrcloud.skip.autorecovery", "true"); } - + @AfterClass public static void afterClass() throws InterruptedException { System.clearProperty("solrcloud.skip.autorecovery"); } - + @Override public void setUp() throws Exception { super.setUp(); @@ -67,68 +67,96 @@ public void setUp() throws Exception { log.info("####SETUP_START {}", getTestName()); } createTempDir(); - - zkDir =createTempDir().resolve("zookeeper/server1/data"); + + zkDir = createTempDir().resolve("zookeeper/server1/data"); log.info("ZooKeeper dataDir:{}", zkDir); zkServer = new ZkTestServer(zkDir); zkServer.run(false); - + System.setProperty("zkHost", zkServer.getZkAddress()); - - SolrZkClient zkClient = new SolrZkClientFactoryUsingCompletelyNewProviders("connectAndAllACLUsername", "connectAndAllACLPassword", - "readonlyACLUsername", "readonlyACLPassword").getSolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT); + + SolrZkClient zkClient = + new SolrZkClientFactoryUsingCompletelyNewProviders( + "connectAndAllACLUsername", + "connectAndAllACLPassword", + "readonlyACLUsername", + "readonlyACLPassword") + .getSolrZkClient(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT); zkClient.makePath("/solr", false, true); zkClient.close(); - zkClient = new SolrZkClientFactoryUsingCompletelyNewProviders("connectAndAllACLUsername", "connectAndAllACLPassword", - "readonlyACLUsername", "readonlyACLPassword").getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); - zkClient.create("/protectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); - zkClient.makePath("/protectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); - zkClient.create(SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient = + new SolrZkClientFactoryUsingCompletelyNewProviders( + "connectAndAllACLUsername", + "connectAndAllACLPassword", + "readonlyACLUsername", + "readonlyACLPassword") + .getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + zkClient.create( + "/protectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient.makePath( + "/protectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient.create( + SecurityAwareZkACLProvider.SECURITY_ZNODE_PATH, + "content".getBytes(DATA_ENCODING), + CreateMode.PERSISTENT, + false); zkClient.close(); - - zkClient = new SolrZkClientFactoryUsingCompletelyNewProviders(null, null, - null, null).getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); - zkClient.getSolrZooKeeper().addAuthInfo("digest", ("connectAndAllACLUsername:connectAndAllACLPassword").getBytes(DATA_ENCODING)); - zkClient.create("/unprotectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); - zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + + zkClient = + new SolrZkClientFactoryUsingCompletelyNewProviders(null, null, null, null) + .getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + zkClient + .getSolrZooKeeper() + .addAuthInfo( + "digest", + ("connectAndAllACLUsername:connectAndAllACLPassword").getBytes(DATA_ENCODING)); + zkClient.create( + "/unprotectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false); + zkClient.makePath( + "/unprotectedMakePathNode", + "content".getBytes(DATA_ENCODING), + CreateMode.PERSISTENT, + false); zkClient.close(); if (log.isInfoEnabled()) { log.info("####SETUP_END {}", getTestName()); } } - + @Override public void tearDown() throws Exception { zkServer.shutdown(); - + clearSecuritySystemProperties(); - + super.tearDown(); } - + @Test public void testNoCredentialsSolrZkClientFactoryUsingCompletelyNewProviders() throws Exception { - SolrZkClient zkClient = new SolrZkClientFactoryUsingCompletelyNewProviders(null, null, - null, null).getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + SolrZkClient zkClient = + new SolrZkClientFactoryUsingCompletelyNewProviders(null, null, null, null) + .getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - false, false, false, false, false, - false, false, false, false, false); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, false, false, false, false, false, false, false, false, false, false); } finally { zkClient.close(); } } @Test - public void testWrongCredentialsSolrZkClientFactoryUsingCompletelyNewProviders() throws Exception { - SolrZkClient zkClient = new SolrZkClientFactoryUsingCompletelyNewProviders("connectAndAllACLUsername", "connectAndAllACLPasswordWrong", - null, null).getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + public void testWrongCredentialsSolrZkClientFactoryUsingCompletelyNewProviders() + throws Exception { + SolrZkClient zkClient = + new SolrZkClientFactoryUsingCompletelyNewProviders( + "connectAndAllACLUsername", "connectAndAllACLPasswordWrong", null, null) + .getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - false, false, false, false, false, - false, false, false, false, false); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, false, false, false, false, false, false, false, false, false, false); } finally { zkClient.close(); } @@ -136,104 +164,122 @@ public void testWrongCredentialsSolrZkClientFactoryUsingCompletelyNewProviders() @Test public void testAllCredentialsSolrZkClientFactoryUsingCompletelyNewProviders() throws Exception { - SolrZkClient zkClient = new SolrZkClientFactoryUsingCompletelyNewProviders("connectAndAllACLUsername", "connectAndAllACLPassword", - null, null).getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + SolrZkClient zkClient = + new SolrZkClientFactoryUsingCompletelyNewProviders( + "connectAndAllACLUsername", "connectAndAllACLPassword", null, null) + .getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - true, true, true, true, true, - true, true, true, true, true); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, true, true, true, true, true, true, true, true, true, true); } finally { zkClient.close(); } } - + @Test - public void testReadonlyCredentialsSolrZkClientFactoryUsingCompletelyNewProviders() throws Exception { - SolrZkClient zkClient = new SolrZkClientFactoryUsingCompletelyNewProviders("readonlyACLUsername", "readonlyACLPassword", - null, null).getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + public void testReadonlyCredentialsSolrZkClientFactoryUsingCompletelyNewProviders() + throws Exception { + SolrZkClient zkClient = + new SolrZkClientFactoryUsingCompletelyNewProviders( + "readonlyACLUsername", "readonlyACLPassword", null, null) + .getSolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - true, true, false, false, false, - false, false, false, false, false); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, true, true, false, false, false, false, false, false, false, false); } finally { zkClient.close(); } } @Test - public void testNoCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() throws Exception { + public void + testNoCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() + throws Exception { useNoCredentials(); - - SolrZkClient zkClient = new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + + SolrZkClient zkClient = + new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames( + zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - false, false, false, false, false, - false, false, false, false, false); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, false, false, false, false, false, false, false, false, false, false); } finally { zkClient.close(); } } @Test - public void testWrongCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() throws Exception { + public void + testWrongCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() + throws Exception { useWrongCredentials(); - - SolrZkClient zkClient = new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + + SolrZkClient zkClient = + new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames( + zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - false, false, false, false, false, - false, false, false, false, false); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, false, false, false, false, false, false, false, false, false, false); } finally { zkClient.close(); } } @Test - public void testAllCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() throws Exception { + public void + testAllCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() + throws Exception { useAllCredentials(); - - SolrZkClient zkClient = new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + + SolrZkClient zkClient = + new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames( + zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - true, true, true, true, true, - true, true, true, true, true); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, true, true, true, true, true, true, true, true, true, true); } finally { zkClient.close(); } } - + @Test - public void testReadonlyCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() throws Exception { + public void + testReadonlyCredentialsSolrZkClientFactoryUsingVMParamsProvidersButWithDifferentVMParamsNames() + throws Exception { useReadonlyCredentials(); - - SolrZkClient zkClient = new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); + + SolrZkClient zkClient = + new SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames( + zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); try { - VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, - true, true, false, false, false, - false, false, false, false, false); + VMParamsZkACLAndCredentialsProvidersTest.doTest( + zkClient, true, true, false, false, false, false, false, false, false, false); } finally { zkClient.close(); } } - + private static class SolrZkClientFactoryUsingCompletelyNewProviders { - + final String digestUsername; final String digestPassword; final String digestReadonlyUsername; final String digestReadonlyPassword; - public SolrZkClientFactoryUsingCompletelyNewProviders(final String digestUsername, final String digestPassword, - final String digestReadonlyUsername, final String digestReadonlyPassword) { + public SolrZkClientFactoryUsingCompletelyNewProviders( + final String digestUsername, + final String digestPassword, + final String digestReadonlyUsername, + final String digestReadonlyPassword) { this.digestUsername = digestUsername; this.digestPassword = digestPassword; this.digestReadonlyUsername = digestReadonlyUsername; this.digestReadonlyPassword = digestReadonlyPassword; } - + public SolrZkClient getSolrZkClient(String zkServerAddress, int zkClientTimeout) { return new SolrZkClient(zkServerAddress, zkClientTimeout) { - + @Override protected ZkCredentialsProvider createZkCredentialsToAddAutomatically() { return new DefaultZkCredentialsProvider() { @@ -241,12 +287,13 @@ protected ZkCredentialsProvider createZkCredentialsToAddAutomatically() { protected Collection createCredentials() { List result = new ArrayList<>(); if (!StringUtils.isEmpty(digestUsername) && !StringUtils.isEmpty(digestPassword)) { - result.add(new ZkCredentials("digest", - (digestUsername + ":" + digestPassword).getBytes(StandardCharsets.UTF_8))); + result.add( + new ZkCredentials( + "digest", + (digestUsername + ":" + digestPassword).getBytes(StandardCharsets.UTF_8))); } return result; } - }; } @@ -255,7 +302,12 @@ public ZkACLProvider createZkACLProvider() { return new VMParamsAllAndReadonlyDigestZkACLProvider() { @Override protected List createNonSecurityACLsToAdd() { - return createACLsToAdd(true, digestUsername, digestPassword, digestReadonlyUsername, digestReadonlyPassword); + return createACLsToAdd( + true, + digestUsername, + digestPassword, + digestReadonlyUsername, + digestReadonlyPassword); } /** @@ -263,77 +315,144 @@ protected List createNonSecurityACLsToAdd() { */ @Override protected List createSecurityACLsToAdd() { - return createACLsToAdd(false, digestUsername, digestPassword, digestReadonlyUsername, digestReadonlyPassword); + return createACLsToAdd( + false, + digestUsername, + digestPassword, + digestReadonlyUsername, + digestReadonlyPassword); } }; } - }; } - } - - private static class SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames extends SolrZkClient { - - public SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames(String zkServerAddress, int zkClientTimeout) { + + private static class SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames + extends SolrZkClient { + + public SolrZkClientUsingVMParamsProvidersButWithDifferentVMParamsNames( + String zkServerAddress, int zkClientTimeout) { super(zkServerAddress, zkClientTimeout); } @Override protected ZkCredentialsProvider createZkCredentialsToAddAutomatically() { return new VMParamsSingleSetCredentialsDigestZkCredentialsProvider( - "alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, - "alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME); + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME); } @Override public ZkACLProvider createZkACLProvider() { return new VMParamsAllAndReadonlyDigestZkACLProvider( - "alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, - "alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, - "alternative" + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME, - "alternative" + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME); + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, + "alternative" + + VMParamsAllAndReadonlyDigestZkACLProvider + .DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME, + "alternative" + + VMParamsAllAndReadonlyDigestZkACLProvider + .DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME); } } - + public void useNoCredentials() { clearSecuritySystemProperties(); } - + public void useWrongCredentials() { clearSecuritySystemProperties(); - - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, "connectAndAllACLUsername"); - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, "connectAndAllACLPasswordWrong"); + + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, + "connectAndAllACLUsername"); + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, + "connectAndAllACLPasswordWrong"); } - + public void useAllCredentials() { clearSecuritySystemProperties(); - - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, "connectAndAllACLUsername"); - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, "connectAndAllACLPassword"); + + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, + "connectAndAllACLUsername"); + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, + "connectAndAllACLPassword"); } - + public void useReadonlyCredentials() { clearSecuritySystemProperties(); - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, "readonlyACLUsername"); - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, "readonlyACLPassword"); + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, + "readonlyACLUsername"); + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, + "readonlyACLPassword"); } - + public void setSecuritySystemProperties() { - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, "connectAndAllACLUsername"); - System.setProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, "connectAndAllACLPassword"); - System.setProperty("alternative" + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME, "readonlyACLUsername"); - System.setProperty("alternative" + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME, "readonlyACLPassword"); + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME, + "connectAndAllACLUsername"); + System.setProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME, + "connectAndAllACLPassword"); + System.setProperty( + "alternative" + + VMParamsAllAndReadonlyDigestZkACLProvider + .DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME, + "readonlyACLUsername"); + System.setProperty( + "alternative" + + VMParamsAllAndReadonlyDigestZkACLProvider + .DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME, + "readonlyACLPassword"); } - + public void clearSecuritySystemProperties() { - System.clearProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME); - System.clearProperty("alternative" + VMParamsSingleSetCredentialsDigestZkCredentialsProvider.DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME); - System.clearProperty("alternative" + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME); - System.clearProperty("alternative" + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME); + System.clearProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_USERNAME_VM_PARAM_NAME); + System.clearProperty( + "alternative" + + VMParamsSingleSetCredentialsDigestZkCredentialsProvider + .DEFAULT_DIGEST_PASSWORD_VM_PARAM_NAME); + System.clearProperty( + "alternative" + + VMParamsAllAndReadonlyDigestZkACLProvider + .DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME); + System.clearProperty( + "alternative" + + VMParamsAllAndReadonlyDigestZkACLProvider + .DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME); } - } - diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java index 0059504d853..74325c9922d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java @@ -16,6 +16,19 @@ */ package org.apache.solr.cloud; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyBoolean; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.anyLong; +import static org.mockito.Mockito.anyString; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.isNull; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Arrays; @@ -32,7 +45,6 @@ import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; - import org.apache.http.client.HttpClient; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrResponse; @@ -84,27 +96,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyBoolean; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.anyLong; -import static org.mockito.Mockito.anyString; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.isNull; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - public class OverseerCollectionConfigSetProcessorTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + private static final String ADMIN_PATH = "/admin/cores"; private static final String COLLECTION_NAME = "mycollection"; private static final String CONFIG_NAME = "myconfig"; - + private static OverseerTaskQueue workQueueMock; private static OverseerTaskQueue stateUpdateQueueMock; private static Overseer overseerMock; @@ -127,8 +126,10 @@ public class OverseerCollectionConfigSetProcessorTest extends SolrTestCaseJ4 { private static CoreContainer coreContainerMock; private static UpdateShardHandler updateShardHandlerMock; private static HttpClient httpClientMock; + @SuppressWarnings("rawtypes") private static PlacementPluginFactory placementPluginFactoryMock; + private static SolrMetricsContext solrMetricsContextMock; private static ObjectCache objectCache; @@ -137,38 +138,50 @@ public class OverseerCollectionConfigSetProcessorTest extends SolrTestCaseJ4 { private final List replicas = new ArrayList<>(); private SolrResponse lastProcessMessageResult; - private OverseerCollectionConfigSetProcessorToBeTested underTest; - + private Thread thread; private Queue queue = new ArrayBlockingQueue<>(10); - private static class OverseerCollectionConfigSetProcessorToBeTested extends - OverseerCollectionConfigSetProcessor { - + private static class OverseerCollectionConfigSetProcessorToBeTested + extends OverseerCollectionConfigSetProcessor { - public OverseerCollectionConfigSetProcessorToBeTested(ZkStateReader zkStateReader, - String myId, HttpShardHandlerFactory shardHandlerFactory, + public OverseerCollectionConfigSetProcessorToBeTested( + ZkStateReader zkStateReader, + String myId, + HttpShardHandlerFactory shardHandlerFactory, String adminPath, - OverseerTaskQueue workQueue, DistributedMap runningMap, + OverseerTaskQueue workQueue, + DistributedMap runningMap, Overseer overseer, DistributedMap completedMap, DistributedMap failureMap, SolrMetricsContext solrMetricsContext) { - super(zkStateReader, myId, shardHandlerFactory, adminPath, new Stats(), overseer, new OverseerNodePrioritizer(zkStateReader, overseer, adminPath, shardHandlerFactory), workQueue, runningMap, completedMap, failureMap, solrMetricsContext); + super( + zkStateReader, + myId, + shardHandlerFactory, + adminPath, + new Stats(), + overseer, + new OverseerNodePrioritizer(zkStateReader, overseer, adminPath, shardHandlerFactory), + workQueue, + runningMap, + completedMap, + failureMap, + solrMetricsContext); } - + @Override protected LeaderStatus amILeader() { return LeaderStatus.YES; } - } @BeforeClass public static void setUpOnce() throws Exception { assumeWorkingMockito(); - + workQueueMock = mock(OverseerTaskQueue.class); stateUpdateQueueMock = mock(OverseerTaskQueue.class); runningMapMock = mock(DistributedMap.class); @@ -195,7 +208,7 @@ public static void setUpOnce() throws Exception { placementPluginFactoryMock = mock(PlacementPluginFactory.class); solrMetricsContextMock = mock(SolrMetricsContext.class); } - + @AfterClass public static void tearDownOnce() { workQueueMock = null; @@ -214,7 +227,8 @@ public static void tearDownOnce() { zkControllerMock = null; cloudDataProviderMock = null; clusterStateProviderMock = null; - stateManagerMock = null;; + stateManagerMock = null; + ; cloudManagerMock = null; distribStateManagerMock = null; coreContainerMock = null; @@ -223,7 +237,7 @@ public static void tearDownOnce() { placementPluginFactoryMock = null; solrMetricsContextMock = null; } - + @Before public void setUp() throws Exception { super.setUp(); @@ -260,7 +274,7 @@ public void setUp() throws Exception { collectionsSet.clear(); replicas.clear(); } - + @After public void tearDown() throws Exception { stopComponentUnderTest(); @@ -268,133 +282,172 @@ public void tearDown() throws Exception { } @SuppressWarnings("unchecked") - protected Set commonMocks(int liveNodesCount, boolean distributedClusterStateUpdates) throws Exception { + protected Set commonMocks(int liveNodesCount, boolean distributedClusterStateUpdates) + throws Exception { when(shardHandlerFactoryMock.getShardHandler()).thenReturn(shardHandlerMock); - when(workQueueMock.peekTopN(anyInt(), any(), anyLong())).thenAnswer(invocation -> { - Object result; - int count = 0; - while ((result = queue.peek()) == null) { - Thread.sleep(1000); - count++; - if (count > 1) return null; - } - - return Arrays.asList(result); - }); - - when(workQueueMock.getTailId()).thenAnswer(invocation -> { - QueueEvent result = null; - Iterator iter = queue.iterator(); - while(iter.hasNext()) { - result = iter.next(); - } - return result==null ? null : result.getId(); - }); + when(workQueueMock.peekTopN(anyInt(), any(), anyLong())) + .thenAnswer( + invocation -> { + Object result; + int count = 0; + while ((result = queue.peek()) == null) { + Thread.sleep(1000); + count++; + if (count > 1) return null; + } - when(workQueueMock.peek(true)).thenAnswer(invocation -> { - Object result; - while ((result = queue.peek()) == null) { - Thread.sleep(1000); - } - return result; - }); + return Arrays.asList(result); + }); - doAnswer(invocation -> { - queue.remove(invocation.getArgument(0)); - return null; - }).when(workQueueMock).remove(any(QueueEvent.class)); + when(workQueueMock.getTailId()) + .thenAnswer( + invocation -> { + QueueEvent result = null; + Iterator iter = queue.iterator(); + while (iter.hasNext()) { + result = iter.next(); + } + return result == null ? null : result.getId(); + }); + + when(workQueueMock.peek(true)) + .thenAnswer( + invocation -> { + Object result; + while ((result = queue.peek()) == null) { + Thread.sleep(1000); + } + return result; + }); - when(workQueueMock.poll()).thenAnswer(invocation -> { - queue.poll(); - return null; - }); + doAnswer( + invocation -> { + queue.remove(invocation.getArgument(0)); + return null; + }) + .when(workQueueMock) + .remove(any(QueueEvent.class)); + + when(workQueueMock.poll()) + .thenAnswer( + invocation -> { + queue.poll(); + return null; + }); when(zkStateReaderMock.getZkClient()).thenReturn(solrZkClientMock); when(zkStateReaderMock.getClusterState()).thenReturn(clusterStateMock); when(zkStateReaderMock.getAliases()).thenReturn(Aliases.EMPTY); - doAnswer(invocation -> { - Predicate p = invocation.getArgument(3); - p.test(clusterStateMock.getCollection(invocation.getArgument(0))); - return null; - }).when(zkStateReaderMock).waitForState(anyString(), anyLong(), any(), any(Predicate.class)); + doAnswer( + invocation -> { + Predicate p = invocation.getArgument(3); + p.test(clusterStateMock.getCollection(invocation.getArgument(0))); + return null; + }) + .when(zkStateReaderMock) + .waitForState(anyString(), anyLong(), any(), any(Predicate.class)); when(clusterStateMock.getCollection(anyString())).thenCallRealMethod(); - when(clusterStateMock.getCollectionOrNull(anyString())).thenAnswer(invocation -> { - String key = invocation.getArgument(0); - if (!collectionsSet.containsKey(key)) return null; - DocCollection docCollection = collectionsSet.get(key).get(); - Map> slices = new HashMap<>(); - for (ZkNodeProps replica : replicas) { - if (!key.equals(replica.getStr(ZkStateReader.COLLECTION_PROP))) continue; - - String slice = replica.getStr(ZkStateReader.SHARD_ID_PROP); - if (!slices.containsKey(slice)) slices.put(slice, new HashMap<>()); - String replicaName = replica.getStr(ZkStateReader.CORE_NAME_PROP); - slices.get(slice).put(replicaName, new Replica(replicaName, replica.getProperties(), docCollection.getName(), slice)); - } + when(clusterStateMock.getCollectionOrNull(anyString())) + .thenAnswer( + invocation -> { + String key = invocation.getArgument(0); + if (!collectionsSet.containsKey(key)) return null; + DocCollection docCollection = collectionsSet.get(key).get(); + Map> slices = new HashMap<>(); + for (ZkNodeProps replica : replicas) { + if (!key.equals(replica.getStr(ZkStateReader.COLLECTION_PROP))) continue; + + String slice = replica.getStr(ZkStateReader.SHARD_ID_PROP); + if (!slices.containsKey(slice)) slices.put(slice, new HashMap<>()); + String replicaName = replica.getStr(ZkStateReader.CORE_NAME_PROP); + slices + .get(slice) + .put( + replicaName, + new Replica( + replicaName, replica.getProperties(), docCollection.getName(), slice)); + } - Map slicesMap = new HashMap<>(); - for (Map.Entry> entry : slices.entrySet()) { - slicesMap.put(entry.getKey(), new Slice(entry.getKey(), entry.getValue(), null,docCollection.getName())); - } + Map slicesMap = new HashMap<>(); + for (Map.Entry> entry : slices.entrySet()) { + slicesMap.put( + entry.getKey(), + new Slice(entry.getKey(), entry.getValue(), null, docCollection.getName())); + } - return docCollection.copyWithSlices(slicesMap); - }); + return docCollection.copyWithSlices(slicesMap); + }); final Set liveNodes = new HashSet<>(); for (int i = 0; i < liveNodesCount; i++) { final String address = "localhost:" + (8963 + i) + "_solr"; liveNodes.add(address); - - when(zkStateReaderMock.getBaseUrlForNodeName(address)).thenAnswer(invocation -> address.replaceAll("_", "/")); + + when(zkStateReaderMock.getBaseUrlForNodeName(address)) + .thenAnswer(invocation -> address.replaceAll("_", "/")); } when(solrZkClientMock.getZkClientTimeout()).thenReturn(30000); - - when(clusterStateMock.hasCollection(anyString())).thenAnswer(invocation -> { - String key = invocation.getArgument(0); - return collectionsSet.containsKey(key); - }); + + when(clusterStateMock.hasCollection(anyString())) + .thenAnswer( + invocation -> { + String key = invocation.getArgument(0); + return collectionsSet.containsKey(key); + }); when(clusterStateMock.getLiveNodes()).thenReturn(liveNodes); - when(solrZkClientMock.setData(anyString(), any(), anyInt(), anyBoolean())).then(invocation -> { - System.out.println("set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); - if (invocation.getArgument(1) == null) { - zkClientData.put(invocation.getArgument(0), new byte[0]); - } else { - zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); - } - return null; - }); - - when(solrZkClientMock.getData(anyString(), any(), any(), anyBoolean())).thenAnswer(invocation -> { - byte[] data = zkClientData.get(invocation.getArgument(0)); - if (data == null || data.length == 0) { - return null; - } - return data; - }); - - when(solrZkClientMock.create(any(), any(), any(), anyBoolean())).thenAnswer(invocation -> { - zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); - return invocation.getArgument(0); - }); - - when(solrZkClientMock.exists(any(String.class), anyBoolean())).thenAnswer(invocation -> { - String key = invocation.getArgument(0); - return zkClientData.containsKey(key); - }); + when(solrZkClientMock.setData(anyString(), any(), anyInt(), anyBoolean())) + .then( + invocation -> { + System.out.println( + "set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); + if (invocation.getArgument(1) == null) { + zkClientData.put(invocation.getArgument(0), new byte[0]); + } else { + zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); + } + return null; + }); + + when(solrZkClientMock.getData(anyString(), any(), any(), anyBoolean())) + .thenAnswer( + invocation -> { + byte[] data = zkClientData.get(invocation.getArgument(0)); + if (data == null || data.length == 0) { + return null; + } + return data; + }); + + when(solrZkClientMock.create(any(), any(), any(), anyBoolean())) + .thenAnswer( + invocation -> { + zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); + return invocation.getArgument(0); + }); + + when(solrZkClientMock.exists(any(String.class), anyBoolean())) + .thenAnswer( + invocation -> { + String key = invocation.getArgument(0); + return zkClientData.containsKey(key); + }); when(overseerMock.getZkController()).thenReturn(zkControllerMock); when(overseerMock.getSolrCloudManager()).thenReturn(cloudDataProviderMock); when(overseerMock.getCoreContainer()).thenReturn(coreContainerMock); - when(overseerMock.getDistributedClusterStateUpdater()).thenReturn(distributedClusterStateUpdater); - when(distributedClusterStateUpdater.createStateChangeRecorder(any(), anyBoolean())).thenReturn(stateChangeRecorder); + when(overseerMock.getDistributedClusterStateUpdater()) + .thenReturn(distributedClusterStateUpdater); + when(distributedClusterStateUpdater.createStateChangeRecorder(any(), anyBoolean())) + .thenReturn(stateChangeRecorder); when(coreContainerMock.getUpdateShardHandler()).thenReturn(updateShardHandlerMock); when(coreContainerMock.getPlacementPluginFactory()).thenReturn(placementPluginFactoryMock); - when(coreContainerMock.getConfigSetService()).thenReturn(new ZkConfigSetService(solrZkClientMock)); + when(coreContainerMock.getConfigSetService()) + .thenReturn(new ZkConfigSetService(solrZkClientMock)); when(updateShardHandlerMock.getDefaultHttpClient()).thenReturn(httpClientMock); - + when(zkControllerMock.getSolrCloudManager()).thenReturn(cloudDataProviderMock); when(cloudDataProviderMock.getClusterStateProvider()).thenReturn(clusterStateProviderMock); when(clusterStateProviderMock.getClusterState()).thenReturn(clusterStateMock); @@ -403,155 +456,197 @@ protected Set commonMocks(int liveNodesCount, boolean distributedCluster when(cloudManagerMock.getDistribStateManager()).thenReturn(distribStateManagerMock); Mockito.doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - System.out.println("set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); - if (invocation.getArgument(1) == null) { - zkClientData.put(invocation.getArgument(0), new byte[0]); - } else { - zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); - } - - return null; - }}).when(distribStateManagerMock).setData(anyString(), any(), anyInt()); - - when(distribStateManagerMock.getData(anyString(), any())).thenAnswer(invocation -> { - byte[] data = zkClientData.get(invocation.getArgument(0)); - if (data == null || data.length == 0) { - return null; - } - return new VersionedData(-1, data, CreateMode.PERSISTENT, ""); - - }); - - when(distribStateManagerMock.createData(any(), any(), any())).thenAnswer(invocation -> { - System.out.println("set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); - if (invocation.getArgument(1) == null) { - zkClientData.put(invocation.getArgument(0), new byte[0]); - } else { - zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); - } - return null; - }); - + new Answer() { + public Void answer(InvocationOnMock invocation) { + System.out.println( + "set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); + if (invocation.getArgument(1) == null) { + zkClientData.put(invocation.getArgument(0), new byte[0]); + } else { + zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); + } + + return null; + } + }) + .when(distribStateManagerMock) + .setData(anyString(), any(), anyInt()); + + when(distribStateManagerMock.getData(anyString(), any())) + .thenAnswer( + invocation -> { + byte[] data = zkClientData.get(invocation.getArgument(0)); + if (data == null || data.length == 0) { + return null; + } + return new VersionedData(-1, data, CreateMode.PERSISTENT, ""); + }); + + when(distribStateManagerMock.createData(any(), any(), any())) + .thenAnswer( + invocation -> { + System.out.println( + "set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); + if (invocation.getArgument(1) == null) { + zkClientData.put(invocation.getArgument(0), new byte[0]); + } else { + zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); + } + return null; + }); + when(distribStateManagerMock.hasData(anyString())) - .then(invocation -> zkClientData.containsKey(invocation.getArgument(0)) && zkClientData.get(invocation.getArgument(0)).length > 0); - + .then( + invocation -> + zkClientData.containsKey(invocation.getArgument(0)) + && zkClientData.get(invocation.getArgument(0)).length > 0); + Mockito.doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - System.out.println("set data: " + invocation.getArgument(0) + " " + new byte[0]); - zkClientData.put(invocation.getArgument(0), new byte[0]); - return null; - }}).when(distribStateManagerMock).makePath(anyString()); - - when(solrZkClientMock.exists(any(String.class), isNull(), anyBoolean())).thenAnswer(invocation -> { - String key = invocation.getArgument(0); - if (zkClientData.containsKey(key)) { - return new Stat(); - } else { - return null; - } - }); - + new Answer() { + public Void answer(InvocationOnMock invocation) { + System.out.println("set data: " + invocation.getArgument(0) + " " + new byte[0]); + zkClientData.put(invocation.getArgument(0), new byte[0]); + return null; + } + }) + .when(distribStateManagerMock) + .makePath(anyString()); + + when(solrZkClientMock.exists(any(String.class), isNull(), anyBoolean())) + .thenAnswer( + invocation -> { + String key = invocation.getArgument(0); + if (zkClientData.containsKey(key)) { + return new Stat(); + } else { + return null; + } + }); + when(cloudManagerMock.getClusterStateProvider()).thenReturn(clusterStateProviderMock); when(cloudManagerMock.getTimeSource()).thenReturn(new TimeSource.NanoTimeSource()); when(cloudManagerMock.getDistribStateManager()).thenReturn(distribStateManagerMock); - + when(overseerMock.getSolrCloudManager()).thenReturn(cloudManagerMock); - + when(overseerMock.getStateUpdateQueue(any())).thenReturn(stateUpdateQueueMock); when(overseerMock.getStateUpdateQueue()).thenReturn(stateUpdateQueueMock); - // Selecting the cluster state update strategy: Overseer when distributedClusterStateUpdates is false, otherwise distributed updates. - when(distributedClusterStateUpdater.isDistributedStateUpdate()).thenReturn(distributedClusterStateUpdates); + // Selecting the cluster state update strategy: Overseer when distributedClusterStateUpdates is + // false, otherwise distributed updates. + when(distributedClusterStateUpdater.isDistributedStateUpdate()) + .thenReturn(distributedClusterStateUpdates); if (distributedClusterStateUpdates) { - // Mocking for state change via distributed updates. There are two types of updates done in CreateCollectionCmd: + // Mocking for state change via distributed updates. There are two types of updates done in + // CreateCollectionCmd: // 1. Single line recording and executing a command Mockito.doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - handleCreateCollMessageProps(invocation.getArgument(1)); - return null; - }}).when(distributedClusterStateUpdater).doSingleStateUpdate(any(), any(), any(), any()); + new Answer() { + public Void answer(InvocationOnMock invocation) { + handleCreateCollMessageProps(invocation.getArgument(1)); + return null; + } + }) + .when(distributedClusterStateUpdater) + .doSingleStateUpdate(any(), any(), any(), any()); // 2. Recording a command to be executed as part of a batch of commands Mockito.doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - handleCreateCollMessageProps(invocation.getArgument(1)); - return null; - }}).when(stateChangeRecorder).record(any(), any()); + new Answer() { + public Void answer(InvocationOnMock invocation) { + handleCreateCollMessageProps(invocation.getArgument(1)); + return null; + } + }) + .when(stateChangeRecorder) + .record(any(), any()); } else { // Mocking for state change via the Overseer queue Mockito.doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - try { - handleCreateCollMessage(invocation.getArgument(0)); - stateUpdateQueueMock.offer(invocation.getArgument(0)); - } catch (KeeperException e) { - throw new RuntimeException(e); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - return null; - } - }).when(overseerMock).offerStateUpdate(any()); + new Answer() { + public Void answer(InvocationOnMock invocation) { + try { + handleCreateCollMessage(invocation.getArgument(0)); + stateUpdateQueueMock.offer(invocation.getArgument(0)); + } catch (KeeperException e) { + throw new RuntimeException(e); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + return null; + } + }) + .when(overseerMock) + .offerStateUpdate(any()); } when(zkControllerMock.getZkClient()).thenReturn(solrZkClientMock); - + when(cloudManagerMock.getDistribStateManager()).thenReturn(distribStateManagerMock); Mockito.doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - System.out.println("set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); - if (invocation.getArgument(1) == null) { - zkClientData.put(invocation.getArgument(0), new byte[0]); - } else { - zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); - } - - return null; - }}).when(distribStateManagerMock).setData(anyString(), any(), anyInt()); - - when(distribStateManagerMock.getData(anyString(), any())).thenAnswer(invocation -> { - byte[] data = zkClientData.get(invocation.getArgument(0)); - if (data == null || data.length == 0) { - return null; - } - return new VersionedData(-1, data, CreateMode.PERSISTENT, ""); - - }); - - when(distribStateManagerMock.createData(any(), any(), any())).thenAnswer(invocation -> { - System.out.println("set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); - if (invocation.getArgument(1) == null) { - zkClientData.put(invocation.getArgument(0), new byte[0]); - } else { - zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); - } - return null; - }); - + new Answer() { + public Void answer(InvocationOnMock invocation) { + System.out.println( + "set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); + if (invocation.getArgument(1) == null) { + zkClientData.put(invocation.getArgument(0), new byte[0]); + } else { + zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); + } + + return null; + } + }) + .when(distribStateManagerMock) + .setData(anyString(), any(), anyInt()); + + when(distribStateManagerMock.getData(anyString(), any())) + .thenAnswer( + invocation -> { + byte[] data = zkClientData.get(invocation.getArgument(0)); + if (data == null || data.length == 0) { + return null; + } + return new VersionedData(-1, data, CreateMode.PERSISTENT, ""); + }); + + when(distribStateManagerMock.createData(any(), any(), any())) + .thenAnswer( + invocation -> { + System.out.println( + "set data: " + invocation.getArgument(0) + " " + invocation.getArgument(1)); + if (invocation.getArgument(1) == null) { + zkClientData.put(invocation.getArgument(0), new byte[0]); + } else { + zkClientData.put(invocation.getArgument(0), invocation.getArgument(1)); + } + return null; + }); + when(distribStateManagerMock.hasData(anyString())) - .then(invocation -> zkClientData.containsKey(invocation.getArgument(0)) && zkClientData.get(invocation.getArgument(0)).length > 0); - + .then( + invocation -> + zkClientData.containsKey(invocation.getArgument(0)) + && zkClientData.get(invocation.getArgument(0)).length > 0); + Mockito.doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - System.out.println("set data: " + invocation.getArgument(0) + " " + new byte[0]); - zkClientData.put(invocation.getArgument(0), new byte[0]); - return null; - }}).when(distribStateManagerMock).makePath(anyString()); + new Answer() { + public Void answer(InvocationOnMock invocation) { + System.out.println("set data: " + invocation.getArgument(0) + " " + new byte[0]); + zkClientData.put(invocation.getArgument(0), new byte[0]); + return null; + } + }) + .when(distribStateManagerMock) + .makePath(anyString()); - zkClientData.put("/configs/"+CONFIG_NAME, new byte[1]); - zkClientData.put("/configs/"+CONFIG_NAME+"/solrconfig.xml", new byte[1]); + zkClientData.put("/configs/" + CONFIG_NAME, new byte[1]); + zkClientData.put("/configs/" + CONFIG_NAME + "/solrconfig.xml", new byte[1]); - when(solrMetricsContextMock.getChildContext(any(Object.class))).thenReturn(solrMetricsContextMock); + when(solrMetricsContextMock.getChildContext(any(Object.class))) + .thenReturn(solrMetricsContextMock); return liveNodes; } @@ -566,23 +661,29 @@ private void handleCreateCollMessageProps(ZkNodeProps props) { if (CollectionParams.CollectionAction.CREATE.isEqual(props.getStr("operation"))) { String collName = props.getStr("name"); if (props.containsKey(CollectionAdminParams.COLL_CONF)) { - String configName = (String) props.getProperties().remove(CollectionAdminParams.COLL_CONF); + String configName = + (String) props.getProperties().remove(CollectionAdminParams.COLL_CONF); props.getProperties().put(ZkStateReader.CONFIGNAME_PROP, configName); } - if (collName != null) collectionsSet.put(collName, new ClusterState.CollectionRef( - new DocCollection(collName, new HashMap<>(), props.getProperties(), DocRouter.DEFAULT))); + if (collName != null) + collectionsSet.put( + collName, + new ClusterState.CollectionRef( + new DocCollection( + collName, new HashMap<>(), props.getProperties(), DocRouter.DEFAULT))); } if (CollectionParams.CollectionAction.ADDREPLICA.isEqual(props.getStr("operation"))) { replicas.add(props); } - } catch (Exception e) {} + } catch (Exception e) { + } } protected void startComponentUnderTest() { thread = new Thread(underTest); thread.start(); } - + protected void stopComponentUnderTest() throws Exception { if (null != underTest) { underTest.close(); @@ -595,139 +696,174 @@ protected void stopComponentUnderTest() throws Exception { } } - protected void issueCreateJob(Integer numberOfSlices, - Integer replicationFactor, List createNodeList, boolean sendCreateNodeList, boolean createNodeSetShuffle) { - Map propMap = Utils.makeMap( - (Object) Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - ZkStateReader.REPLICATION_FACTOR, replicationFactor.toString(), - "name", COLLECTION_NAME, - "collection.configName", CONFIG_NAME, - CollectionHandlingUtils.NUM_SLICES, numberOfSlices.toString() - ); + protected void issueCreateJob( + Integer numberOfSlices, + Integer replicationFactor, + List createNodeList, + boolean sendCreateNodeList, + boolean createNodeSetShuffle) { + Map propMap = + Utils.makeMap( + (Object) Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + ZkStateReader.REPLICATION_FACTOR, + replicationFactor.toString(), + "name", + COLLECTION_NAME, + "collection.configName", + CONFIG_NAME, + CollectionHandlingUtils.NUM_SLICES, + numberOfSlices.toString()); if (sendCreateNodeList) { - propMap.put(CollectionHandlingUtils.CREATE_NODE_SET, - (createNodeList != null)?StrUtils.join(createNodeList, ','):null); - if (CollectionHandlingUtils.CREATE_NODE_SET_SHUFFLE_DEFAULT != createNodeSetShuffle || random().nextBoolean()) { + propMap.put( + CollectionHandlingUtils.CREATE_NODE_SET, + (createNodeList != null) ? StrUtils.join(createNodeList, ',') : null); + if (CollectionHandlingUtils.CREATE_NODE_SET_SHUFFLE_DEFAULT != createNodeSetShuffle + || random().nextBoolean()) { propMap.put(CollectionHandlingUtils.CREATE_NODE_SET_SHUFFLE, createNodeSetShuffle); } } ZkNodeProps props = new ZkNodeProps(propMap); - QueueEvent qe = new QueueEvent("id", Utils.toJSON(props), null){ - @Override - public void setBytes(byte[] bytes) { - lastProcessMessageResult = OverseerSolrResponseSerializer.deserialize(bytes); - } - }; + QueueEvent qe = + new QueueEvent("id", Utils.toJSON(props), null) { + @Override + public void setBytes(byte[] bytes) { + lastProcessMessageResult = OverseerSolrResponseSerializer.deserialize(bytes); + } + }; queue.add(qe); } - + protected void verifySubmitCaptures( - Integer numberOfSlices, Integer numberOfReplica, Collection createNodes, boolean dontShuffleCreateNodeSet) { + Integer numberOfSlices, + Integer numberOfReplica, + Collection createNodes, + boolean dontShuffleCreateNodeSet) { List coreNames = new ArrayList<>(); - Map> sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap = new HashMap<>(); - List nodeUrlWithoutProtocolPartForLiveNodes = new ArrayList<>( - createNodes.size()); + Map> + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap = new HashMap<>(); + List nodeUrlWithoutProtocolPartForLiveNodes = new ArrayList<>(createNodes.size()); for (String nodeName : createNodes) { String nodeUrlWithoutProtocolPart = nodeName.replaceAll("_", "/"); - if (nodeUrlWithoutProtocolPart.startsWith("http://")) nodeUrlWithoutProtocolPart = nodeUrlWithoutProtocolPart - .substring(7); + if (nodeUrlWithoutProtocolPart.startsWith("http://")) + nodeUrlWithoutProtocolPart = nodeUrlWithoutProtocolPart.substring(7); nodeUrlWithoutProtocolPartForLiveNodes.add(nodeUrlWithoutProtocolPart); } - final Map> shard_TO_coreNames_map = new HashMap<>(); - final Map coreName_TO_nodeUrlWithoutProtocolPartForLiveNodes_map = new HashMap<>(); + final Map> shard_TO_coreNames_map = new HashMap<>(); + final Map coreName_TO_nodeUrlWithoutProtocolPartForLiveNodes_map = + new HashMap<>(); ArgumentCaptor shardRequestCaptor = ArgumentCaptor.forClass(ShardRequest.class); - ArgumentCaptor nodeUrlsWithoutProtocolPartCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor paramsCaptor = ArgumentCaptor.forClass(ModifiableSolrParams.class); + ArgumentCaptor nodeUrlsWithoutProtocolPartCaptor = + ArgumentCaptor.forClass(String.class); + ArgumentCaptor paramsCaptor = + ArgumentCaptor.forClass(ModifiableSolrParams.class); verify(shardHandlerMock, times(numberOfReplica * numberOfSlices)) - .submit(shardRequestCaptor.capture(), nodeUrlsWithoutProtocolPartCaptor.capture(), paramsCaptor.capture()); + .submit( + shardRequestCaptor.capture(), + nodeUrlsWithoutProtocolPartCaptor.capture(), + paramsCaptor.capture()); for (int i = 0; i < shardRequestCaptor.getAllValues().size(); i++) { ShardRequest shardRequest = shardRequestCaptor.getAllValues().get(i); - String nodeUrlsWithoutProtocolPartCapture = nodeUrlsWithoutProtocolPartCaptor.getAllValues().get(i); + String nodeUrlsWithoutProtocolPartCapture = + nodeUrlsWithoutProtocolPartCaptor.getAllValues().get(i); ModifiableSolrParams params = paramsCaptor.getAllValues().get(i); - assertEquals(CoreAdminAction.CREATE.toString(), - shardRequest.params.get(CoreAdminParams.ACTION)); + assertEquals( + CoreAdminAction.CREATE.toString(), shardRequest.params.get(CoreAdminParams.ACTION)); // assertEquals(shardRequest.params, submitCapture.params); String coreName = shardRequest.params.get(CoreAdminParams.NAME); assertTrue("Core with name " + coreName + " created twice", coreNames.add(coreName)); - shard_TO_coreNames_map.computeIfAbsent(shardRequest.params.get(CoreAdminParams.SHARD), shard -> new HashSet<>()).add(coreName); - assertEquals(CONFIG_NAME, - shardRequest.params.get("collection.configName")); - assertEquals(COLLECTION_NAME, - shardRequest.params.get(CoreAdminParams.COLLECTION)); - assertEquals(numberOfSlices.toString(), - shardRequest.params.get(ZkStateReader.NUM_SHARDS_PROP)); + shard_TO_coreNames_map + .computeIfAbsent(shardRequest.params.get(CoreAdminParams.SHARD), shard -> new HashSet<>()) + .add(coreName); + assertEquals(CONFIG_NAME, shardRequest.params.get("collection.configName")); + assertEquals(COLLECTION_NAME, shardRequest.params.get(CoreAdminParams.COLLECTION)); + assertEquals( + numberOfSlices.toString(), shardRequest.params.get(ZkStateReader.NUM_SHARDS_PROP)); assertEquals(ADMIN_PATH, shardRequest.params.get("qt")); assertEquals(1, shardRequest.purpose); assertEquals(1, shardRequest.shards.length); - assertEquals(nodeUrlsWithoutProtocolPartCapture, - shardRequest.shards[0]); - assertTrue("Shard " + coreName + " created on wrong node " - + shardRequest.shards[0], - nodeUrlWithoutProtocolPartForLiveNodes - .contains(shardRequest.shards[0])); + assertEquals(nodeUrlsWithoutProtocolPartCapture, shardRequest.shards[0]); + assertTrue( + "Shard " + coreName + " created on wrong node " + shardRequest.shards[0], + nodeUrlWithoutProtocolPartForLiveNodes.contains(shardRequest.shards[0])); coreName_TO_nodeUrlWithoutProtocolPartForLiveNodes_map.put(coreName, shardRequest.shards[0]); assertEquals(shardRequest.shards, shardRequest.actualShards); - + String sliceName = shardRequest.params.get(CoreAdminParams.SHARD); - if (!sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap - .containsKey(sliceName)) { + if (!sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap.containsKey(sliceName)) { sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap.put( sliceName, new HashMap<>()); } - Map nodeUrlsWithoutProtocolPartToNumberOfShardsRunningMap = sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap - .get(sliceName); + Map nodeUrlsWithoutProtocolPartToNumberOfShardsRunningMap = + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap.get(sliceName); Integer existingCount; - nodeUrlsWithoutProtocolPartToNumberOfShardsRunningMap - .put( - shardRequest.shards[0], - ((existingCount = nodeUrlsWithoutProtocolPartToNumberOfShardsRunningMap - .get(shardRequest.shards[0])) == null) ? 1 - : (existingCount + 1)); + nodeUrlsWithoutProtocolPartToNumberOfShardsRunningMap.put( + shardRequest.shards[0], + ((existingCount = + nodeUrlsWithoutProtocolPartToNumberOfShardsRunningMap.get( + shardRequest.shards[0])) + == null) + ? 1 + : (existingCount + 1)); } - + assertEquals(numberOfSlices * numberOfReplica, coreNames.size()); - assertEquals("Wrong number of shards", numberOfSlices.intValue(), shard_TO_coreNames_map.size()); + assertEquals( + "Wrong number of shards", numberOfSlices.intValue(), shard_TO_coreNames_map.size()); for (Map.Entry> entry : shard_TO_coreNames_map.entrySet()) { - assertEquals("Wrong number of cores for shard " + entry.getKey(), numberOfReplica.intValue(), entry.getValue().size()); + assertEquals( + "Wrong number of cores for shard " + entry.getKey(), + numberOfReplica.intValue(), + entry.getValue().size()); Set foundNodeNames = new HashSet<>(numberOfReplica); for (String coreName : entry.getValue()) { String foundNode = coreName_TO_nodeUrlWithoutProtocolPartForLiveNodes_map.get(coreName); - assertTrue("Multiple replicas scheduled for node: "+foundNode, foundNodeNames.add(foundNode)); - assertTrue("Assigned node name not in list of given nodes: "+foundNode, nodeUrlWithoutProtocolPartForLiveNodes.contains(foundNode)); + assertTrue( + "Multiple replicas scheduled for node: " + foundNode, foundNodeNames.add(foundNode)); + assertTrue( + "Assigned node name not in list of given nodes: " + foundNode, + nodeUrlWithoutProtocolPartForLiveNodes.contains(foundNode)); } } - - assertEquals(numberOfSlices.intValue(), + + assertEquals( + numberOfSlices.intValue(), sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap.size()); for (int i = 1; i <= numberOfSlices; i++) { - sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap.keySet() + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap + .keySet() .contains("shard" + i); } int minShardsPerSlicePerNode = numberOfReplica / createNodes.size(); - int numberOfNodesSupposedToRunMaxShards = numberOfReplica - % createNodes.size(); - int numberOfNodesSupposedToRunMinShards = createNodes.size() - - numberOfNodesSupposedToRunMaxShards; + int numberOfNodesSupposedToRunMaxShards = numberOfReplica % createNodes.size(); + int numberOfNodesSupposedToRunMinShards = + createNodes.size() - numberOfNodesSupposedToRunMaxShards; int maxShardsPerSlicePerNode = (minShardsPerSlicePerNode + 1); if (numberOfNodesSupposedToRunMaxShards == 0) { numberOfNodesSupposedToRunMaxShards = numberOfNodesSupposedToRunMinShards; maxShardsPerSlicePerNode = minShardsPerSlicePerNode; } - boolean diffBetweenMinAndMaxShardsPerSlicePerNode = (maxShardsPerSlicePerNode != minShardsPerSlicePerNode); - - for (Entry> sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry : sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap - .entrySet()) { + boolean diffBetweenMinAndMaxShardsPerSlicePerNode = + (maxShardsPerSlicePerNode != minShardsPerSlicePerNode); + + for (Entry> + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry : + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMap.entrySet()) { int numberOfShardsRunning = 0; int numberOfNodesRunningMinShards = 0; int numberOfNodesRunningMaxShards = 0; int numberOfNodesRunningAtLeastOneShard = 0; - for (String nodeUrlsWithoutProtocolPart : sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry - .getValue().keySet()) { - int numberOfShardsRunningOnThisNode = sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry - .getValue().get(nodeUrlsWithoutProtocolPart); + for (String nodeUrlsWithoutProtocolPart : + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry + .getValue() + .keySet()) { + int numberOfShardsRunningOnThisNode = + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry + .getValue() + .get(nodeUrlsWithoutProtocolPart); numberOfShardsRunning += numberOfShardsRunningOnThisNode; numberOfNodesRunningAtLeastOneShard++; assertTrue( @@ -735,46 +871,64 @@ protected void verifySubmitCaptures( + nodeUrlsWithoutProtocolPart + " is running wrong number of shards. Supposed to run " + minShardsPerSlicePerNode - + (diffBetweenMinAndMaxShardsPerSlicePerNode ? (" or " + maxShardsPerSlicePerNode) + + (diffBetweenMinAndMaxShardsPerSlicePerNode + ? (" or " + maxShardsPerSlicePerNode) : ""), (numberOfShardsRunningOnThisNode == minShardsPerSlicePerNode) || (numberOfShardsRunningOnThisNode == maxShardsPerSlicePerNode)); - if (numberOfShardsRunningOnThisNode == minShardsPerSlicePerNode) numberOfNodesRunningMinShards++; - if (numberOfShardsRunningOnThisNode == maxShardsPerSlicePerNode) numberOfNodesRunningMaxShards++; + if (numberOfShardsRunningOnThisNode == minShardsPerSlicePerNode) + numberOfNodesRunningMinShards++; + if (numberOfShardsRunningOnThisNode == maxShardsPerSlicePerNode) + numberOfNodesRunningMaxShards++; } - if (minShardsPerSlicePerNode == 0) numberOfNodesRunningMinShards = (createNodes - .size() - numberOfNodesRunningAtLeastOneShard); + if (minShardsPerSlicePerNode == 0) + numberOfNodesRunningMinShards = (createNodes.size() - numberOfNodesRunningAtLeastOneShard); assertEquals( "Too many shards are running under slice " - + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry - .getKey(), - numberOfReplica.intValue(), numberOfShardsRunning); - assertEquals(numberOfNodesSupposedToRunMinShards, - numberOfNodesRunningMinShards); - assertEquals(numberOfNodesSupposedToRunMaxShards, - numberOfNodesRunningMaxShards); + + sliceToNodeUrlsWithoutProtocolPartToNumberOfShardsRunningMapMapEntry.getKey(), + numberOfReplica.intValue(), + numberOfShardsRunning); + assertEquals(numberOfNodesSupposedToRunMinShards, numberOfNodesRunningMinShards); + assertEquals(numberOfNodesSupposedToRunMaxShards, numberOfNodesRunningMaxShards); } } - + protected void waitForEmptyQueue(long maxWait) throws Exception { final TimeOut timeout = new TimeOut(maxWait, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); while (queue.peek() != null) { - if (timeout.hasTimedOut()) - fail("Queue not empty within " + maxWait + " ms"); + if (timeout.hasTimedOut()) fail("Queue not empty within " + maxWait + " ms"); Thread.sleep(100); } } - + protected enum CreateNodeListOptions { SEND, DONT_SEND, SEND_NULL } - protected void testTemplate(Integer numberOfNodes, Integer numberOfNodesToCreateOn, CreateNodeListOptions createNodeListOption, Integer replicationFactor, - Integer numberOfSlices, boolean collectionExceptedToBeCreated, boolean distributedClusterStateUpdates) throws Exception { - assertTrue("Wrong usage of testTemplate. numberOfNodesToCreateOn " + numberOfNodesToCreateOn + " is not allowed to be higher than numberOfNodes " + numberOfNodes, numberOfNodes.intValue() >= numberOfNodesToCreateOn.intValue()); - assertTrue("Wrong usage of testTemplage. createNodeListOption has to be " + CreateNodeListOptions.SEND + " when numberOfNodes and numberOfNodesToCreateOn are unequal", ((createNodeListOption == CreateNodeListOptions.SEND) || (numberOfNodes.intValue() == numberOfNodesToCreateOn.intValue()))); - + + protected void testTemplate( + Integer numberOfNodes, + Integer numberOfNodesToCreateOn, + CreateNodeListOptions createNodeListOption, + Integer replicationFactor, + Integer numberOfSlices, + boolean collectionExceptedToBeCreated, + boolean distributedClusterStateUpdates) + throws Exception { + assertTrue( + "Wrong usage of testTemplate. numberOfNodesToCreateOn " + + numberOfNodesToCreateOn + + " is not allowed to be higher than numberOfNodes " + + numberOfNodes, + numberOfNodes.intValue() >= numberOfNodesToCreateOn.intValue()); + assertTrue( + "Wrong usage of testTemplage. createNodeListOption has to be " + + CreateNodeListOptions.SEND + + " when numberOfNodes and numberOfNodesToCreateOn are unequal", + ((createNodeListOption == CreateNodeListOptions.SEND) + || (numberOfNodes.intValue() == numberOfNodesToCreateOn.intValue()))); + Set liveNodes = commonMocks(numberOfNodes, distributedClusterStateUpdates); List createNodeList = new ArrayList<>(); int i = 0; @@ -783,24 +937,39 @@ protected void testTemplate(Integer numberOfNodes, Integer numberOfNodesToCreate createNodeList.add(node); } } - - if (random().nextBoolean()) Collections.shuffle(createNodeList, random()); - underTest = new OverseerCollectionConfigSetProcessorToBeTested(zkStateReaderMock, - "1234", shardHandlerFactoryMock, ADMIN_PATH, workQueueMock, runningMapMock, - overseerMock, completedMapMock, failureMapMock, solrMetricsContextMock); + if (random().nextBoolean()) Collections.shuffle(createNodeList, random()); + underTest = + new OverseerCollectionConfigSetProcessorToBeTested( + zkStateReaderMock, + "1234", + shardHandlerFactoryMock, + ADMIN_PATH, + workQueueMock, + runningMapMock, + overseerMock, + completedMapMock, + failureMapMock, + solrMetricsContextMock); if (log.isInfoEnabled()) { log.info("clusterstate {}", clusterStateMock.hashCode()); } startComponentUnderTest(); - - final List createNodeListToSend = ((createNodeListOption != CreateNodeListOptions.SEND_NULL) ? createNodeList : null); + + final List createNodeListToSend = + ((createNodeListOption != CreateNodeListOptions.SEND_NULL) ? createNodeList : null); final boolean sendCreateNodeList = (createNodeListOption != CreateNodeListOptions.DONT_SEND); - final boolean dontShuffleCreateNodeSet = (createNodeListToSend != null) && sendCreateNodeList && random().nextBoolean(); - issueCreateJob(numberOfSlices, replicationFactor, createNodeListToSend, sendCreateNodeList, !dontShuffleCreateNodeSet); + final boolean dontShuffleCreateNodeSet = + (createNodeListToSend != null) && sendCreateNodeList && random().nextBoolean(); + issueCreateJob( + numberOfSlices, + replicationFactor, + createNodeListToSend, + sendCreateNodeList, + !dontShuffleCreateNodeSet); waitForEmptyQueue(10000); if (collectionExceptedToBeCreated) { @@ -808,16 +977,17 @@ protected void testTemplate(Integer numberOfNodes, Integer numberOfNodesToCreate } if (collectionExceptedToBeCreated) { - verifySubmitCaptures(numberOfSlices, replicationFactor, - createNodeList, dontShuffleCreateNodeSet); + verifySubmitCaptures( + numberOfSlices, replicationFactor, createNodeList, dontShuffleCreateNodeSet); } } - // Tests below are being run twice: once with Overseer based updates and once with distributed updates. - // This is done explicitly here because these tests use mocks than can be configured directly. - // Tests not using mocks (most other tests) but using the MiniSolrCloudCluster are randomized to sometimes use Overseer - // and sometimes distributed state updates (but not both for a given test and a given test seed). - // See the SolrCloudTestCase.Builder constructor and the rest of the Builder class. + // Tests below are being run twice: once with Overseer based updates and once with distributed + // updates. This is done explicitly here because these tests use mocks than can be configured + // directly. Tests not using mocks (most other tests) but using the MiniSolrCloudCluster are + // randomized to sometimes use Overseer and sometimes distributed state updates (but not both for + // a given test and a given test seed). See the SolrCloudTestCase.Builder constructor and the rest + // of the Builder class. @Test public void testNoReplicationEqualNumberOfSlicesPerNodeOverseer() throws Exception { @@ -829,113 +999,166 @@ public void testNoReplicationEqualNumberOfSlicesPerNodeDistributedUpdates() thro testNoReplicationEqualNumberOfSlicesPerNodeInternal(true); } - private void testNoReplicationEqualNumberOfSlicesPerNodeInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testNoReplicationEqualNumberOfSlicesPerNodeInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.DONT_SEND; Integer replicationFactor = 1; Integer numberOfSlices = 8; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test public void testReplicationEqualNumberOfSlicesPerNodeOverseer() throws Exception { testReplicationEqualNumberOfSlicesPerNodeInternal(false); } + @Test public void testReplicationEqualNumberOfSlicesPerNodeDistributedUpdates() throws Exception { testReplicationEqualNumberOfSlicesPerNodeInternal(true); } - private void testReplicationEqualNumberOfSlicesPerNodeInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testReplicationEqualNumberOfSlicesPerNodeInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.DONT_SEND; Integer replicationFactor = 2; Integer numberOfSlices = 4; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test - public void testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesOverseer() throws Exception { + public void testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesOverseer() + throws Exception { testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal(false); } @Test - public void testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesDistributedUpdates() throws Exception { + public void + testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesDistributedUpdates() + throws Exception { testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal(true); } - private void testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testNoReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND; Integer replicationFactor = 1; Integer numberOfSlices = 8; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test - public void testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesOverseer() throws Exception { + public void testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesOverseer() + throws Exception { testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal(false); } @Test - public void testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesDistributedUpdates() throws Exception { + public void + testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesDistributedUpdates() + throws Exception { testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal(true); } - private void testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testReplicationEqualNumberOfSlicesPerNodeSendCreateNodesEqualToLiveNodesInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND; Integer replicationFactor = 2; Integer numberOfSlices = 4; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test - public void testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesOverseer() throws Exception { + public void testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesOverseer() + throws Exception { testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal(false); } @Test - public void testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesDistributedUpdates() throws Exception { + public void testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesDistributedUpdates() + throws Exception { testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal(true); } - private void testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testNoReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND_NULL; Integer replicationFactor = 1; Integer numberOfSlices = 8; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test - public void testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesOverseer() throws Exception { + public void testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesOverseer() + throws Exception { testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal(false); } @Test - public void testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesDistributedUpdates() throws Exception { + public void testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesDistributedUpdates() + throws Exception { testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal(true); } - private void testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testReplicationEqualNumberOfSlicesPerNodeSendNullCreateNodesInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND_NULL; Integer replicationFactor = 2; Integer numberOfSlices = 4; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test @@ -948,14 +1171,21 @@ public void testNoReplicationUnequalNumberOfSlicesPerNodeDistributedUpdates() th testNoReplicationUnequalNumberOfSlicesPerNodeInternal(true); } - private void testNoReplicationUnequalNumberOfSlicesPerNodeInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testNoReplicationUnequalNumberOfSlicesPerNodeInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.DONT_SEND; Integer replicationFactor = 1; Integer numberOfSlices = 6; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test @@ -968,14 +1198,21 @@ public void testReplicationUnequalNumberOfSlicesPerNodeDistributedUpdates() thro testReplicationUnequalNumberOfSlicesPerNodeInternal(true); } - private void testReplicationUnequalNumberOfSlicesPerNodeInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testReplicationUnequalNumberOfSlicesPerNodeInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 4; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.DONT_SEND; Integer replicationFactor = 2; Integer numberOfSlices = 3; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test @@ -988,14 +1225,21 @@ public void testNoReplicationLimitedNodesToCreateOnDistributedUpdates() throws E testNoReplicationLimitedNodesToCreateOnInternal(true); } - private void testNoReplicationLimitedNodesToCreateOnInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testNoReplicationLimitedNodesToCreateOnInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 2; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND; Integer replicationFactor = 1; Integer numberOfSlices = 6; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test @@ -1008,54 +1252,86 @@ public void testReplicationLimitedNodesToCreateOnDistributedUpdates() throws Exc testReplicationLimitedNodesToCreateOnInternal(true); } - private void testReplicationLimitedNodesToCreateOnInternal(boolean distributedClusterStateUpdates) throws Exception { + private void testReplicationLimitedNodesToCreateOnInternal(boolean distributedClusterStateUpdates) + throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 2; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND; Integer replicationFactor = 2; Integer numberOfSlices = 3; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - true, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + true, + distributedClusterStateUpdates); } @Test - public void testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsOverseer() throws Exception { - testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal(false); + public void + testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsOverseer() + throws Exception { + testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal( + false); } @Test - public void testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsDistributedUpdates() throws Exception { - testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal(true); + public void + testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsDistributedUpdates() + throws Exception { + testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal( + true); } - private void testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal(boolean distributedClusterStateUpdates) throws Exception { + private void + testNoReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 3; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND; Integer replicationFactor = 1; Integer numberOfSlices = 8; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - false, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + false, + distributedClusterStateUpdates); } @Test - public void testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsOverseer() throws Exception { + public void + testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsOverseer() + throws Exception { testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal(false); } @Test - public void testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsDistributedUpdates() throws Exception { + public void + testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsDistributedUpdates() + throws Exception { testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal(true); } - private void testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal(boolean distributedClusterStateUpdates) throws Exception { + private void + testReplicationCollectionNotCreatedDueToMaxShardsPerNodeAndNodesToCreateOnLimitsInternal( + boolean distributedClusterStateUpdates) throws Exception { Integer numberOfNodes = 4; Integer numberOfNodesToCreateOn = 3; CreateNodeListOptions createNodeListOptions = CreateNodeListOptions.SEND; Integer replicationFactor = 2; Integer numberOfSlices = 4; - testTemplate(numberOfNodes, numberOfNodesToCreateOn, createNodeListOptions, replicationFactor, numberOfSlices, - false, distributedClusterStateUpdates); + testTemplate( + numberOfNodes, + numberOfNodesToCreateOn, + createNodeListOptions, + replicationFactor, + numberOfSlices, + false, + distributedClusterStateUpdates); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java index a1411947c98..e9cb4d775ba 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerModifyCollectionTest.java @@ -18,14 +18,11 @@ package org.apache.solr.cloud; import java.util.Collections; - import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.RequestStatusState; import org.junit.BeforeClass; import org.junit.Test; - - public class OverseerModifyCollectionTest extends SolrCloudTestCase { @BeforeClass @@ -45,20 +42,26 @@ public void testModifyColl() throws Exception { .process(cluster.getSolrClient()); // Modify configSet - RequestStatusState requestStatusState = CollectionAdminRequest.modifyCollection(collName, - Collections.singletonMap("collection.configName", "conf2")) + RequestStatusState requestStatusState = + CollectionAdminRequest.modifyCollection( + collName, Collections.singletonMap("collection.configName", "conf2")) .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); assertEquals(requestStatusState, RequestStatusState.COMPLETED); - String configName = cluster.getSolrClient().getClusterStateProvider().getCollection(collName).getConfigName(); + String configName = + cluster.getSolrClient().getClusterStateProvider().getCollection(collName).getConfigName(); assertEquals("conf2", configName); - - //Try an invalid config name - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.modifyCollection(collName, - Collections.singletonMap("collection.configName", "notARealConfigName") - ).process(cluster.getSolrClient()); - }); + + // Try an invalid config name + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.modifyCollection( + collName, + Collections.singletonMap("collection.configName", "notARealConfigName")) + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage(), e.getMessage().contains("Can not find the specified config set")); } diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java index 6e59f092882..4fc3c5fb986 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.cloud.OverseerCollectionConfigSetProcessor.getLeaderNode; +import static org.apache.solr.cloud.OverseerTaskProcessor.getSortedElectionNodes; + import java.lang.invoke.MethodHandles; import java.net.URL; import java.util.Collections; @@ -23,7 +26,6 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; - import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.common.util.TimeSource; @@ -35,44 +37,44 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.cloud.OverseerCollectionConfigSetProcessor.getLeaderNode; -import static org.apache.solr.cloud.OverseerTaskProcessor.getSortedElectionNodes; - public class OverseerRolesTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Before public void setupCluster() throws Exception { - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); } @After public void tearDownCluster() throws Exception { shutdownCluster(); } - - public static void waitForNewOverseer(int seconds, Predicate state, boolean failOnIntermediateTransition) throws Exception { + + public static void waitForNewOverseer( + int seconds, Predicate state, boolean failOnIntermediateTransition) throws Exception { TimeOut timeout = new TimeOut(seconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); String current = null; while (timeout.hasTimedOut() == false) { String prev = current; current = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient()); - if (state.test(current)) - return; + if (state.test(current)) return; else if (failOnIntermediateTransition) { if (prev != null && current != null && !current.equals(prev)) { - fail ("There was an intermediate transition, previous: "+prev+", intermediate transition: "+current); + fail( + "There was an intermediate transition, previous: " + + prev + + ", intermediate transition: " + + current); } } Thread.sleep(100); } - fail("Timed out waiting for overseer state change. The current overseer is: "+current); + fail("Timed out waiting for overseer state change. The current overseer is: " + current); } - public static void waitForNewOverseer(int seconds, String expected, boolean failOnIntermediateTransition) throws Exception { + public static void waitForNewOverseer( + int seconds, String expected, boolean failOnIntermediateTransition) throws Exception { log.info("Expecting node: {}", expected); waitForNewOverseer(seconds, s -> Objects.equals(s, expected), failOnIntermediateTransition); } @@ -83,10 +85,9 @@ private JettySolrRunner getOverseerJetty() throws Exception { int hostPort = overseerUrl.getPort(); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { try { - if (jetty.getBaseUrl().getPort() == hostPort) - return jetty; + if (jetty.getBaseUrl().getPort() == hostPort) return jetty; } catch (IllegalStateException e) { - + } } fail("Couldn't find overseer node " + overseer); @@ -96,21 +97,26 @@ private JettySolrRunner getOverseerJetty() throws Exception { private void logOverseerState() throws KeeperException, InterruptedException { if (log.isInfoEnabled()) { log.info("Overseer: {}", getLeaderNode(zkClient())); - log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // nowarn + log.info( + "Election queue: {}", + getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // nowarn } } @Test public void testOverseerRole() throws Exception { - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cluster.getSolrClient()).getIsCollectionApiDistributed()) { + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cluster.getSolrClient()) + .getIsCollectionApiDistributed()) { log.info("Skipping test because Collection API is distributed"); return; } logOverseerState(); - List nodes = OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()); + List nodes = + OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()); // Remove the OVERSEER role, in case it was already assigned by another test in this suite - for (String node: nodes) { + for (String node : nodes) { CollectionAdminRequest.removeRole(node, "overseer").process(cluster.getSolrClient()); } String overseer1 = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient()); @@ -124,7 +130,7 @@ public void testOverseerRole() throws Exception { waitForNewOverseer(15, overseer2, false); - //add another node as overseer + // add another node as overseer nodes.remove(overseer2); Collections.shuffle(nodes, random()); @@ -158,28 +164,37 @@ public void testOverseerRole() throws Exception { String leaderId = OverseerCollectionConfigSetProcessor.getLeaderId(zkClient()); String leader = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient()); log.info("### Sending QUIT to overseer {}", leader); - getOverseerJetty().getCoreContainer().getZkController().getOverseer().sendQuitToOverseer(leaderId); + getOverseerJetty() + .getCoreContainer() + .getZkController() + .getOverseer() + .sendQuitToOverseer(leaderId); waitForNewOverseer(15, s -> Objects.equals(leader, s) == false, false); Thread.sleep(1000); - + logOverseerState(); - assertTrue("The old leader should have rejoined election", - OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()).contains(leader)); + assertTrue( + "The old leader should have rejoined election", + OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()) + .contains(leader)); leaderJetty.start(); // starting this back, just for good measure } @Test public void testDesignatedOverseerRestarts() throws Exception { - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cluster.getSolrClient()).getIsCollectionApiDistributed()) { + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cluster.getSolrClient()) + .getIsCollectionApiDistributed()) { log.info("Skipping test because Collection API is distributed"); return; } logOverseerState(); // Remove the OVERSEER role, in case it was already assigned by another test in this suite - for (String node: OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient())) { + for (String node : + OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient())) { CollectionAdminRequest.removeRole(node, "overseer").process(cluster.getSolrClient()); } String overseer1 = OverseerCollectionConfigSetProcessor.getLeaderNode(zkClient()); @@ -194,21 +209,24 @@ public void testDesignatedOverseerRestarts() throws Exception { waitForNewOverseer(15, overseer1, false); JettySolrRunner leaderJetty = getOverseerJetty(); - List nodes = OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()); + List nodes = + OverseerCollectionConfigSetProcessor.getSortedOverseerNodeNames(zkClient()); nodes.remove(overseer1); // remove the designated overseer logOverseerState(); - // kill the current overseer, and check that the next node in the election queue assumes leadership + // kill the current overseer, and check that the next node in the election queue assumes + // leadership leaderJetty.stop(); log.info("Killing designated overseer: {}", overseer1); - // after 5 seconds, bring back dead designated overseer and assert that it assumes leadership "right away", - // i.e. without any other node assuming leadership before this node becomes leader. + // after 5 seconds, bring back dead designated overseer and assert that it assumes leadership + // "right away", i.e. without any other node assuming leadership before this node becomes + // leader. Thread.sleep(5); logOverseerState(); log.info("Starting back the prioritized overseer.."); leaderJetty.start(); - waitForNewOverseer(15, overseer1, true); // assert that there is just a single leadership transition + // assert that there is just a single leadership transition + waitForNewOverseer(15, overseer1, true); } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseTest.java index 3107ecdefe3..5caeeaef5e2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseTest.java @@ -22,31 +22,31 @@ import org.apache.solr.common.util.SimpleOrderedMap; public class OverseerSolrResponseTest extends SolrTestCaseJ4 { - + public void testEmpty() { assertSerializeDeserialize(new NamedList()); } - + public void testWithSingleObject() { NamedList responseNl = new NamedList<>(); responseNl.add("foo", "bar"); assertSerializeDeserialize(responseNl); } - + public void testWithMultipleObject() { NamedList responseNl = new NamedList<>(); responseNl.add("foo", "bar"); responseNl.add("foobar", "foo"); assertSerializeDeserialize(responseNl); } - + public void testRepeatedKeys() { NamedList responseNl = new NamedList<>(); responseNl.add("foo", "bar"); responseNl.add("foo", "zoo"); assertSerializeDeserialize(responseNl); } - + public void testNested() { NamedList responseNl = new NamedList<>(); NamedList response2 = new NamedList<>(); @@ -54,7 +54,7 @@ public void testNested() { responseNl.add("foo", response2); assertSerializeDeserialize(responseNl); } - + public void testException() { NamedList responseNl = new NamedList<>(); SolrException e = new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Foo"); @@ -62,18 +62,31 @@ public void testException() { exceptionNl.add("msg", e.getMessage()); exceptionNl.add("rspCode", e.code()); responseNl.add("exception", exceptionNl); - OverseerSolrResponse deserialized = OverseerSolrResponseSerializer.deserialize(OverseerSolrResponseSerializer.serialize(new OverseerSolrResponse(responseNl))); + OverseerSolrResponse deserialized = + OverseerSolrResponseSerializer.deserialize( + OverseerSolrResponseSerializer.serialize(new OverseerSolrResponse(responseNl))); assertNotNull("Expecting an exception", deserialized.getException()); - assertEquals("Unexpected exception type in deserialized response", SolrException.class, deserialized.getException().getClass()); - assertEquals("Unexpected exception code in deserialized response", e.code(), ((SolrException)deserialized.getException()).code()); - assertEquals("Unexpected exception message in deserialized response", e.getMessage(), deserialized.getException().getMessage()); + assertEquals( + "Unexpected exception type in deserialized response", + SolrException.class, + deserialized.getException().getClass()); + assertEquals( + "Unexpected exception code in deserialized response", + e.code(), + ((SolrException) deserialized.getException()).code()); + assertEquals( + "Unexpected exception message in deserialized response", + e.getMessage(), + deserialized.getException().getMessage()); } - + private void assertSerializeDeserialize(NamedList content) { OverseerSolrResponse response = new OverseerSolrResponse(content); byte[] serialized = OverseerSolrResponseSerializer.serialize(response); OverseerSolrResponse deserialized = OverseerSolrResponseSerializer.deserialize(serialized); - assertEquals("Deserialized response is different than original", response.getResponse(), deserialized.getResponse()); + assertEquals( + "Deserialized response is different than original", + response.getResponse(), + deserialized.getResponse()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseUnsafeSerializationTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseUnsafeSerializationTest.java index 1d3d8e7cffd..73cba282ee3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseUnsafeSerializationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerSolrResponseUnsafeSerializationTest.java @@ -20,18 +20,17 @@ import org.junit.BeforeClass; public class OverseerSolrResponseUnsafeSerializationTest extends OverseerSolrResponseTest { - + @BeforeClass public static void setUpClass() { System.setProperty("solr.useUnsafeOverseerResponse", "true"); } - + @AfterClass public static void tearDownClass() { System.clearProperty("solr.useUnsafeOverseerResponse"); } - - + public void testUnsafeSerializartionToggles() { assertToggles("true", true, true); assertToggles("deserialization", false, true); @@ -41,21 +40,27 @@ public void testUnsafeSerializartionToggles() { assertToggles("serialization", false, false); // This is not an option } - private void assertToggles(String propertyValue, boolean serializationEnabled, boolean deserializationEnabled) { + private void assertToggles( + String propertyValue, boolean serializationEnabled, boolean deserializationEnabled) { String previousValue = System.getProperty("solr.useUnsafeOverseerResponse"); - try { + try { if (propertyValue == null) { System.clearProperty("solr.useUnsafeOverseerResponse"); } else { System.setProperty("solr.useUnsafeOverseerResponse", propertyValue); } - assertEquals("Unexpected serialization toggle for value: " + propertyValue, serializationEnabled, OverseerSolrResponseSerializer.useUnsafeSerialization()); - assertEquals("Unexpected serialization toggle for value: " + propertyValue, deserializationEnabled, OverseerSolrResponseSerializer.useUnsafeDeserialization()); + assertEquals( + "Unexpected serialization toggle for value: " + propertyValue, + serializationEnabled, + OverseerSolrResponseSerializer.useUnsafeSerialization()); + assertEquals( + "Unexpected serialization toggle for value: " + propertyValue, + deserializationEnabled, + OverseerSolrResponseSerializer.useUnsafeDeserialization()); } finally { if (previousValue != null) { System.setProperty("solr.useUnsafeOverseerResponse", previousValue); } } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java index bf636e78f40..40d57eda3ee 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java @@ -28,9 +28,8 @@ public class OverseerStatusTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure();; + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); + ; } @Test @@ -41,56 +40,81 @@ public void test() throws Exception { int numCollectionCreates = 0, numOverseerCreates = 0; String collectionName = "overseer_status_test"; - CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1) + .process(cluster.getSolrClient()); - NamedList resp = new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()).getResponse(); - // When running with Distributed Collection API, no real data in Overseer status, but the Collection API call shouldn't fail - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cluster.getSolrClient()).getIsCollectionApiDistributed()) { + NamedList resp = + new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()).getResponse(); + // When running with Distributed Collection API, no real data in Overseer status, but the + // Collection API call shouldn't fail + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cluster.getSolrClient()) + .getIsCollectionApiDistributed()) { return; } NamedList collection_operations = (NamedList) resp.get("collection_operations"); NamedList overseer_operations = (NamedList) resp.get("overseer_operations"); - SimpleOrderedMap createcollection - = (SimpleOrderedMap) collection_operations.get(CollectionParams.CollectionAction.CREATE.toLower()); - assertEquals("No stats for create in OverseerCollectionProcessor", numCollectionCreates + 1, createcollection.get("requests")); - // When cluster state updates are distributed, Overseer doesn't see them and doesn't report stats on them. + SimpleOrderedMap createcollection = + (SimpleOrderedMap) + collection_operations.get(CollectionParams.CollectionAction.CREATE.toLower()); + assertEquals( + "No stats for create in OverseerCollectionProcessor", + numCollectionCreates + 1, + createcollection.get("requests")); + // When cluster state updates are distributed, Overseer doesn't see them and doesn't report + // stats on them. if (!cluster.getOpenOverseer().getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - // Note the "create" key here is in a different map from the "create" key above. Above it's Collection creation in the - // Collection API, here it's the collection creation from the cluster state updater perspective. - createcollection = (SimpleOrderedMap) overseer_operations.get(CollectionParams.CollectionAction.CREATE.toLower()); - assertEquals("No stats for create in Overseer", numOverseerCreates + 1, createcollection.get("requests")); + // Note the "create" key here is in a different map from the "create" key above. Above it's + // Collection creation in the Collection API, here it's the collection creation from the + // cluster state updater perspective. + createcollection = + (SimpleOrderedMap) + overseer_operations.get(CollectionParams.CollectionAction.CREATE.toLower()); + assertEquals( + "No stats for create in Overseer", + numOverseerCreates + 1, + createcollection.get("requests")); } // Reload the collection CollectionAdminRequest.reloadCollection(collectionName).process(cluster.getSolrClient()); - resp = new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()).getResponse(); + resp = + new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()).getResponse(); collection_operations = (NamedList) resp.get("collection_operations"); - SimpleOrderedMap reload = (SimpleOrderedMap) collection_operations.get(CollectionParams.CollectionAction.RELOAD.toLower()); + SimpleOrderedMap reload = + (SimpleOrderedMap) + collection_operations.get(CollectionParams.CollectionAction.RELOAD.toLower()); assertEquals("No stats for reload in OverseerCollectionProcessor", 1, reload.get("requests")); - BaseHttpSolrClient.RemoteSolrException e = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, - "Split shard for non existent collection should have failed", - () -> CollectionAdminRequest - .splitShard("non_existent_collection") - .setShardName("non_existent_shard") - .process(cluster.getSolrClient()) - ); + BaseHttpSolrClient.RemoteSolrException e = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + "Split shard for non existent collection should have failed", + () -> + CollectionAdminRequest.splitShard("non_existent_collection") + .setShardName("non_existent_shard") + .process(cluster.getSolrClient())); - resp = new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()).getResponse(); + resp = + new CollectionAdminRequest.OverseerStatus().process(cluster.getSolrClient()).getResponse(); collection_operations = (NamedList) resp.get("collection_operations"); - SimpleOrderedMap split = (SimpleOrderedMap) collection_operations.get(CollectionParams.CollectionAction.SPLITSHARD.toLower()); + SimpleOrderedMap split = + (SimpleOrderedMap) + collection_operations.get(CollectionParams.CollectionAction.SPLITSHARD.toLower()); assertEquals("No stats for split in OverseerCollectionProcessor", 1, split.get("errors")); assertNotNull(split.get("recent_failures")); - SimpleOrderedMap amIleader = (SimpleOrderedMap) collection_operations.get("am_i_leader"); + SimpleOrderedMap amIleader = + (SimpleOrderedMap) collection_operations.get("am_i_leader"); assertNotNull("OverseerCollectionProcessor amILeader stats should not be null", amIleader); assertNotNull(amIleader.get("requests")); assertTrue(Integer.parseInt(amIleader.get("requests").toString()) > 0); assertNotNull(amIleader.get("errors")); assertNotNull(amIleader.get("avgTimePerRequest")); - // When cluster state updates are distributed, Overseer doesn't see the updates and doesn't report stats on them. + // When cluster state updates are distributed, Overseer doesn't see the updates and doesn't + // report stats on them. if (!cluster.getOpenOverseer().getDistributedClusterStateUpdater().isDistributedStateUpdate()) { amIleader = (SimpleOrderedMap) overseer_operations.get("am_i_leader"); assertNotNull("Overseer amILeader stats should not be null", amIleader); @@ -99,7 +123,8 @@ public void test() throws Exception { assertNotNull(amIleader.get("errors")); assertNotNull(amIleader.get("avgTimePerRequest")); - SimpleOrderedMap updateState = (SimpleOrderedMap) overseer_operations.get("update_state"); + SimpleOrderedMap updateState = + (SimpleOrderedMap) overseer_operations.get("update_state"); assertNotNull("Overseer update_state stats should not be null", updateState); assertNotNull(updateState.get("requests")); assertTrue(Integer.parseInt(updateState.get("requests").toString()) > 0); diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java index 3d6f7ef98f7..64f1c5d4c29 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java @@ -20,7 +20,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import org.apache.solr.cloud.api.collections.CollectionHandlingUtils; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CollectionAdminParams; @@ -31,7 +30,6 @@ public class OverseerTaskQueueTest extends DistributedQueueTest { - // TODO: OverseerTaskQueue specific tests. @Override @@ -57,10 +55,12 @@ public void testContainsTaskWithRequestId() throws Exception { props.put(CommonAdminParams.ASYNC, requestId); tq.offer(Utils.toJSON(props)); - assertTrue("Task queue should contain task with requestid " + requestId, + assertTrue( + "Task queue should contain task with requestid " + requestId, tq.containsTaskWithRequestId(CommonAdminParams.ASYNC, requestId)); - assertFalse("Task queue should not contain task with requestid " + nonExistentRequestId, + assertFalse( + "Task queue should not contain task with requestid " + nonExistentRequestId, tq.containsTaskWithRequestId(CommonAdminParams.ASYNC, nonExistentRequestId)); // Create a response node as if someone is waiting for a response from the Overseer; then, @@ -74,7 +74,8 @@ public void testContainsTaskWithRequestId() throws Exception { props.put(CommonAdminParams.ASYNC, requestId2); tq.createRequestNode(Utils.toJSON(props), watchID); - // Set a SolrResponse as the response node by removing the QueueEvent, as done in OverseerTaskProcessor + // Set a SolrResponse as the response node by removing the QueueEvent, as done in + // OverseerTaskProcessor List queueEvents = tq.peekTopN(2, s -> false, 1000); OverseerTaskQueue.QueueEvent requestId2Event = null; for (OverseerTaskQueue.QueueEvent queueEvent : queueEvents) { @@ -90,7 +91,8 @@ public void testContainsTaskWithRequestId() throws Exception { tq.remove(requestId2Event); // Make sure this call to check if requestId exists doesn't barf with Json parse exception - assertTrue("Task queue should contain task with requestid " + requestId, + assertTrue( + "Task queue should contain task with requestid " + requestId, tq.containsTaskWithRequestId(CommonAdminParams.ASYNC, requestId)); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java index dfaa9c8a671..c2a46d36cc4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java @@ -27,6 +27,8 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.codahale.metrics.Snapshot; +import com.codahale.metrics.Timer; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Path; @@ -45,9 +47,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; - import javax.xml.parsers.ParserConfigurationException; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -108,9 +108,6 @@ import org.slf4j.LoggerFactory; import org.xml.sax.SAXException; -import com.codahale.metrics.Snapshot; -import com.codahale.metrics.Timer; - @Slow @SolrTestCaseJ4.SuppressSSL public class OverseerTest extends SolrTestCaseJ4 { @@ -129,20 +126,24 @@ public class OverseerTest extends SolrTestCaseJ4 { private final List overseers = Collections.synchronizedList(new ArrayList<>()); private final List readers = Collections.synchronizedList(new ArrayList<>()); private final List zkClients = Collections.synchronizedList(new ArrayList<>()); - private final List httpShardHandlerFactorys = Collections.synchronizedList(new ArrayList<>()); - private final List updateShardHandlers = Collections.synchronizedList(new ArrayList<>()); + private final List httpShardHandlerFactorys = + Collections.synchronizedList(new ArrayList<>()); + private final List updateShardHandlers = + Collections.synchronizedList(new ArrayList<>()); private final List solrClients = Collections.synchronizedList(new ArrayList<>()); private static final String COLLECTION = SolrTestCaseJ4.DEFAULT_TEST_COLLECTION_NAME; - public static class MockZKController{ + public static class MockZKController { private final SolrZkClient zkClient; private final ZkStateReader zkStateReader; private final String nodeName; - private final Map electionContext = Collections.synchronizedMap(new HashMap()); + private final Map electionContext = + Collections.synchronizedMap(new HashMap()); private List overseers; - public MockZKController(String zkAddress, String nodeName, List overseers) throws InterruptedException, TimeoutException, IOException, KeeperException { + public MockZKController(String zkAddress, String nodeName, List overseers) + throws InterruptedException, TimeoutException, IOException, KeeperException { this.overseers = overseers; this.nodeName = nodeName; zkClient = new SolrZkClient(zkAddress, TIMEOUT); @@ -185,58 +186,97 @@ public void close() { } /** - * Create a collection. - * Note there's a similar but slightly different {@link OverseerTest#createCollection(String, int)}. + * Create a collection. Note there's a similar but slightly different {@link + * OverseerTest#createCollection(String, int)}. */ public void createCollection(String collection, int numShards) throws Exception { - // Create collection znode before having ClusterStateUpdater create state.json below it or it will fail. + // Create collection znode before having ClusterStateUpdater create state.json below it or it + // will fail. zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", collection, - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, Integer.toString(numShards), - "createNodeSet", ""); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + "name", + collection, + ZkStateReader.REPLICATION_FACTOR, + "1", + ZkStateReader.NUM_SHARDS_PROP, + Integer.toString(numShards), + "createNodeSet", + ""); final Overseer overseer = MiniSolrCloudCluster.getOpenOverseer(overseers); - // This being an Overseer test, we force it to use the Overseer based cluster state update. Look for "new Overseer" calls in this class. + // This being an Overseer test, we force it to use the Overseer based cluster state update. + // Look for "new Overseer" calls in this class. assertFalse(overseer.getDistributedClusterStateUpdater().isDistributedStateUpdate()); ZkDistributedQueue q = overseer.getStateUpdateQueue(); q.offer(Utils.toJSON(m)); } - public String publishState(String collection, String coreName, String coreNodeName, String shard, Replica.State stateName, int numShards, boolean startElection, Overseer overseer) + public String publishState( + String collection, + String coreName, + String coreNodeName, + String shard, + Replica.State stateName, + int numShards, + boolean startElection, + Overseer overseer) throws Exception { if (stateName == null) { ElectionContext ec = electionContext.remove(coreName); if (ec != null) { ec.cancelElection(); } - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.DELETECORE.toLower(), - ZkStateReader.NODE_NAME_PROP, nodeName, - ZkStateReader.BASE_URL_PROP, zkStateReader.getBaseUrlForNodeName(nodeName), - ZkStateReader.CORE_NAME_PROP, coreName, - ZkStateReader.CORE_NODE_NAME_PROP, coreNodeName, - ZkStateReader.COLLECTION_PROP, collection); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.DELETECORE.toLower(), + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.BASE_URL_PROP, + zkStateReader.getBaseUrlForNodeName(nodeName), + ZkStateReader.CORE_NAME_PROP, + coreName, + ZkStateReader.CORE_NODE_NAME_PROP, + coreNodeName, + ZkStateReader.COLLECTION_PROP, + collection); assertFalse(overseer.getDistributedClusterStateUpdater().isDistributedStateUpdate()); ZkDistributedQueue q = overseer.getStateUpdateQueue(); q.offer(Utils.toJSON(m)); return null; } else { - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.STATE_PROP, stateName.toString(), - ZkStateReader.NODE_NAME_PROP, nodeName, - ZkStateReader.BASE_URL_PROP, zkStateReader.getBaseUrlForNodeName(nodeName), - ZkStateReader.CORE_NAME_PROP, coreName, - ZkStateReader.CORE_NODE_NAME_PROP, coreNodeName, - ZkStateReader.COLLECTION_PROP, collection, - ZkStateReader.SHARD_ID_PROP, shard, - ZkStateReader.NUM_SHARDS_PROP, Integer.toString(numShards)); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.STATE_PROP, + stateName.toString(), + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.BASE_URL_PROP, + zkStateReader.getBaseUrlForNodeName(nodeName), + ZkStateReader.CORE_NAME_PROP, + coreName, + ZkStateReader.CORE_NODE_NAME_PROP, + coreNodeName, + ZkStateReader.COLLECTION_PROP, + collection, + ZkStateReader.SHARD_ID_PROP, + shard, + ZkStateReader.NUM_SHARDS_PROP, + Integer.toString(numShards)); ZkDistributedQueue q = overseer.getStateUpdateQueue(); q.offer(Utils.toJSON(m)); } if (startElection && collection.length() > 0) { - zkStateReader.waitForState(collection, 45000, TimeUnit.MILLISECONDS, + zkStateReader.waitForState( + collection, + 45000, + TimeUnit.MILLISECONDS, (liveNodes, collectionState) -> getShardId(collectionState, coreNodeName) != null); String shardId = getShardId(collection, coreNodeName); if (shardId != null) { @@ -246,19 +286,33 @@ public String publishState(String collection, String coreName, String coreNodeNa } try { - zkClient.makePath("/collections/" + collection + "/leader_elect/" - + shardId + "/election", true); - } catch (NodeExistsException nee) {} - ZkNodeProps props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, nodeName, - ZkStateReader.BASE_URL_PROP, zkStateReader.getBaseUrlForNodeName(nodeName), - ZkStateReader.CORE_NAME_PROP, coreName, - ZkStateReader.SHARD_ID_PROP, shardId, - ZkStateReader.COLLECTION_PROP, collection, - ZkStateReader.CORE_NODE_NAME_PROP, coreNodeName); + zkClient.makePath( + "/collections/" + collection + "/leader_elect/" + shardId + "/election", true); + } catch (NodeExistsException nee) { + } + ZkNodeProps props = + new ZkNodeProps( + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.BASE_URL_PROP, + zkStateReader.getBaseUrlForNodeName(nodeName), + ZkStateReader.CORE_NAME_PROP, + coreName, + ZkStateReader.SHARD_ID_PROP, + shardId, + ZkStateReader.COLLECTION_PROP, + collection, + ZkStateReader.CORE_NODE_NAME_PROP, + coreNodeName); LeaderElector elector = new LeaderElector(zkClient); - ShardLeaderElectionContextBase ctx = new ShardLeaderElectionContextBase( - elector, shardId, collection, nodeName + coreName, props, - MockSolrSource.makeSimpleMock(overseer, zkStateReader, null)); + ShardLeaderElectionContextBase ctx = + new ShardLeaderElectionContextBase( + elector, + shardId, + collection, + nodeName + coreName, + props, + MockSolrSource.makeSimpleMock(overseer, zkStateReader, null)); elector.setup(ctx); electionContext.put(coreName, ctx); elector.joinElection(ctx, false); @@ -275,7 +329,7 @@ private String getShardId(String collection, String coreNodeName) { private String getShardId(DocCollection collection, String coreNodeName) { if (collection == null) return null; - Map slices = collection.getSlicesMap(); + Map slices = collection.getSlicesMap(); if (slices != null) { for (Slice slice : slices.values()) { for (Replica replica : slice.getReplicas()) { @@ -289,7 +343,6 @@ private String getShardId(DocCollection collection, String coreNodeName) { return null; } - public ZkStateReader getZkReader() { return zkStateReader; } @@ -311,7 +364,6 @@ public static void beforeClass() throws Exception { initCore(); } - @Before public void setUp() throws Exception { testDone = false; @@ -332,14 +384,14 @@ public static void afterClass() throws Exception { } server = null; - } @After public void tearDown() throws Exception { testDone = true; - ExecutorService customThreadPool = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("closeThreadPool")); + ExecutorService customThreadPool = + ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("closeThreadPool")); for (ZkController zkController : zkControllers) { customThreadPool.submit(zkController::close); @@ -354,7 +406,7 @@ public void tearDown() throws Exception { } for (SolrClient solrClient : solrClients) { - customThreadPool.submit( () -> IOUtils.closeQuietly(solrClient)); + customThreadPool.submit(() -> IOUtils.closeQuietly(solrClient)); } for (ZkStateReader reader : readers) { @@ -362,13 +414,13 @@ public void tearDown() throws Exception { } for (SolrZkClient solrZkClient : zkClients) { - customThreadPool.submit( () -> IOUtils.closeQuietly(solrZkClient)); + customThreadPool.submit(() -> IOUtils.closeQuietly(solrZkClient)); } ExecutorUtil.shutdownAndAwaitTermination(customThreadPool); - customThreadPool = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("closeThreadPool")); - + customThreadPool = + ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("closeThreadPool")); for (Overseer overseer : overseers) { customThreadPool.submit(overseer::close); @@ -391,18 +443,27 @@ public void tearDown() throws Exception { } /** - * This method creates a collection. It is different from {@link MockZKController#createCollection(String, int)} in - * the way the {@link ZkDistributedQueue} is obtained. + * This method creates a collection. It is different from {@link + * MockZKController#createCollection(String, int)} in the way the {@link ZkDistributedQueue} is + * obtained. */ private void createCollection(String collection, int numShards) throws Exception { - // Create collection znode before having ClusterStateUpdater create state.json below it or it will fail. + // Create collection znode before having ClusterStateUpdater create state.json below it or it + // will fail. zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", collection, - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, Integer.toString(numShards), - "createNodeSet", ""); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + "name", + collection, + ZkStateReader.REPLICATION_FACTOR, + "1", + ZkStateReader.NUM_SHARDS_PROP, + Integer.toString(numShards), + "createNodeSet", + ""); ZkDistributedQueue q = getOverseerZero().getStateUpdateQueue(); q.offer(Utils.toJSON(m)); } @@ -421,24 +482,56 @@ public void testShardAssignment() throws Exception { try (ZkStateReader reader = new ZkStateReader(zkClient)) { reader.createClusterStateWatchersAndUpdate(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); final int numShards = 6; // this is not the number of shards in the collection createCollection(COLLECTION, 3); for (int i = 0; i < numShards; i++) { - assertNotNull("shard got no id?", mockController.publishState(COLLECTION, "core" + (i + 1), "node" + (i + 1), "shard" + ((i % 3) + 1), Replica.State.ACTIVE, 3, true, overseers.get(0))); + assertNotNull( + "shard got no id?", + mockController.publishState( + COLLECTION, + "core" + (i + 1), + "node" + (i + 1), + "shard" + ((i % 3) + 1), + Replica.State.ACTIVE, + 3, + true, + overseers.get(0))); } - reader.waitForState(COLLECTION, 30, TimeUnit.SECONDS, MiniSolrCloudCluster.expectedShardsAndActiveReplicas(3, 6)); + reader.waitForState( + COLLECTION, + 30, + TimeUnit.SECONDS, + MiniSolrCloudCluster.expectedShardsAndActiveReplicas(3, 6)); - final Map rmap = reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap(); + final Map rmap = + reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap(); assertEquals(rmap.toString(), 2, rmap.size()); - assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size()); - assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size()); - - //make sure leaders are in cloud state + assertEquals( + rmap.toString(), + 2, + reader + .getClusterState() + .getCollection(COLLECTION) + .getSlice("shard2") + .getReplicasMap() + .size()); + assertEquals( + rmap.toString(), + 2, + reader + .getClusterState() + .getCollection(COLLECTION) + .getSlice("shard3") + .getReplicasMap() + .size()); + + // make sure leaders are in cloud state assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000)); assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000)); assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000)); @@ -465,45 +558,123 @@ public void testBadQueueItem() throws Exception { try (ZkStateReader reader = new ZkStateReader(zkClient)) { reader.createClusterStateWatchersAndUpdate(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); final int numShards = 3; mockController.createCollection(COLLECTION, 3); for (int i = 0; i < numShards; i++) { - assertNotNull("shard got no id?", mockController.publishState(COLLECTION, "core" + (i + 1), - "node" + (i + 1), "shard" + ((i % 3) + 1), Replica.State.ACTIVE, 3, true, overseers.get(0))); + assertNotNull( + "shard got no id?", + mockController.publishState( + COLLECTION, + "core" + (i + 1), + "node" + (i + 1), + "shard" + ((i % 3) + 1), + Replica.State.ACTIVE, + 3, + true, + overseers.get(0))); } - reader.waitForState(COLLECTION, 30, TimeUnit.SECONDS, MiniSolrCloudCluster.expectedShardsAndActiveReplicas(3, 3)); - - assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size()); - - //make sure leaders are in cloud state + reader.waitForState( + COLLECTION, + 30, + TimeUnit.SECONDS, + MiniSolrCloudCluster.expectedShardsAndActiveReplicas(3, 3)); + + assertEquals( + 1, + reader + .getClusterState() + .getCollection(COLLECTION) + .getSlice("shard1") + .getReplicasMap() + .size()); + assertEquals( + 1, + reader + .getClusterState() + .getCollection(COLLECTION) + .getSlice("shard2") + .getReplicasMap() + .size()); + assertEquals( + 1, + reader + .getClusterState() + .getCollection(COLLECTION) + .getSlice("shard3") + .getReplicasMap() + .size()); + + // make sure leaders are in cloud state assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000)); assertNotNull(reader.getLeaderUrl(COLLECTION, "shard2", 15000)); assertNotNull(reader.getLeaderUrl(COLLECTION, "shard3", 15000)); // publish a bad queue item String emptyCollectionName = ""; - mockController.publishState(emptyCollectionName, "core0", "node0", "shard1", Replica.State.ACTIVE, 1, true, overseers.get(0)); - mockController.publishState(emptyCollectionName, "core0", "node0", "shard1", null, 1, true, overseers.get(0)); + mockController.publishState( + emptyCollectionName, + "core0", + "node0", + "shard1", + Replica.State.ACTIVE, + 1, + true, + overseers.get(0)); + mockController.publishState( + emptyCollectionName, "core0", "node0", "shard1", null, 1, true, overseers.get(0)); mockController.createCollection("collection2", 3); // make sure the Overseer is still processing items for (int i = 0; i < numShards; i++) { - assertNotNull("shard got no id?", mockController.publishState("collection2", - "core" + (i + 1), "node" + (i + 1), "shard" + ((i % 3) + 1), Replica.State.ACTIVE, 3, true, overseers.get(0))); + assertNotNull( + "shard got no id?", + mockController.publishState( + "collection2", + "core" + (i + 1), + "node" + (i + 1), + "shard" + ((i % 3) + 1), + Replica.State.ACTIVE, + 3, + true, + overseers.get(0))); } - reader.waitForState("collection2", 30, TimeUnit.SECONDS, MiniSolrCloudCluster.expectedShardsAndActiveReplicas(3, 3)); - - assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard1").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard2").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard3").getReplicasMap().size()); - - //make sure leaders are in cloud state + reader.waitForState( + "collection2", + 30, + TimeUnit.SECONDS, + MiniSolrCloudCluster.expectedShardsAndActiveReplicas(3, 3)); + + assertEquals( + 1, + reader + .getClusterState() + .getCollection("collection2") + .getSlice("shard1") + .getReplicasMap() + .size()); + assertEquals( + 1, + reader + .getClusterState() + .getCollection("collection2") + .getSlice("shard2") + .getReplicasMap() + .size()); + assertEquals( + 1, + reader + .getClusterState() + .getCollection("collection2") + .getSlice("shard3") + .getReplicasMap() + .size()); + + // make sure leaders are in cloud state assertNotNull(reader.getLeaderUrl("collection2", "shard1", 15000)); assertNotNull(reader.getLeaderUrl("collection2", "shard2", 15000)); assertNotNull(reader.getLeaderUrl("collection2", "shard3", 15000)); @@ -532,25 +703,41 @@ public void testDownNodeFailover() throws Exception { try (ZkStateReader reader = new ZkStateReader(zkClient)) { reader.createClusterStateWatchersAndUpdate(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); - try (ZkController zkController = createMockZkController(server.getZkAddress(), zkClient, reader)) { + try (ZkController zkController = + createMockZkController(server.getZkAddress(), zkClient, reader)) { for (int i = 0; i < 5; i++) { mockController.createCollection("collection" + i, 1); - assertNotNull("shard got no id?", mockController.publishState("collection" + i, "core1", - "core_node1", "shard1", Replica.State.ACTIVE, 1, true, overseers.get(0))); + assertNotNull( + "shard got no id?", + mockController.publishState( + "collection" + i, + "core1", + "core_node1", + "shard1", + Replica.State.ACTIVE, + 1, + true, + overseers.get(0))); } } - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.DOWNNODE.toLower(), - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr"); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.DOWNNODE.toLower(), + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr"); List commands = new NodeMutator(null).downNode(reader.getClusterState(), m); ZkDistributedQueue q = getOverseerZero().getStateUpdateQueue(); q.offer(Utils.toJSON(m)); - verifyReplicaStatus(reader, commands.get(0).name, "shard1", "core_node1", Replica.State.DOWN); + verifyReplicaStatus( + reader, commands.get(0).name, "shard1", "core_node1", Replica.State.DOWN); overseerClient.close(); overseerClient = electNewOverseer(server.getZkAddress()); @@ -566,25 +753,31 @@ public void testDownNodeFailover() throws Exception { } } - //wait until collections are available - private void waitForCollections(ZkStateReader stateReader, String... collections) throws InterruptedException, KeeperException, TimeoutException { + // wait until collections are available + private void waitForCollections(ZkStateReader stateReader, String... collections) + throws InterruptedException, KeeperException, TimeoutException { int maxIterations = 100; while (0 < maxIterations--) { final ClusterState state = stateReader.getClusterState(); Set availableCollections = state.getCollectionsMap().keySet(); int availableCount = 0; - for(String requiredCollection: collections) { - stateReader.waitForState(requiredCollection, 30000, TimeUnit.MILLISECONDS, (liveNodes, collectionState) -> collectionState != null); - if(availableCollections.contains(requiredCollection)) { + for (String requiredCollection : collections) { + stateReader.waitForState( + requiredCollection, + 30000, + TimeUnit.MILLISECONDS, + (liveNodes, collectionState) -> collectionState != null); + if (availableCollections.contains(requiredCollection)) { availableCount++; } - if(availableCount == collections.length) return; - + if (availableCount == collections.length) return; } } - log.warn("Timeout waiting for collections: {} state: {}" - , Arrays.asList(collections), stateReader.getClusterState()); + log.warn( + "Timeout waiting for collections: {} state: {}", + Arrays.asList(collections), + stateReader.getClusterState()); } @Test @@ -606,28 +799,47 @@ public void testStateChange() throws Exception { createCollection(COLLECTION, 1); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.NODE_NAME_PROP, "node1:8983_", - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.CORE_NODE_NAME_PROP, "core_node1", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.NODE_NAME_PROP, + "node1:8983_", + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.CORE_NODE_NAME_PROP, + "core_node1", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.RECOVERING.toString()); q.offer(Utils.toJSON(m)); waitForCollections(reader, COLLECTION); verifyReplicaStatus(reader, "collection1", "shard1", "core_node1", Replica.State.RECOVERING); - //publish node state (active) - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.NODE_NAME_PROP, "node1:8983_", - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString()); + // publish node state (active) + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.NODE_NAME_PROP, + "node1:8983_", + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.ACTIVE.toString()); q.offer(Utils.toJSON(m)); @@ -641,17 +853,27 @@ public void testStateChange() throws Exception { } } - private void verifyShardLeader(ZkStateReader reader, String collection, String shard, String expectedCore) + private void verifyShardLeader( + ZkStateReader reader, String collection, String shard, String expectedCore) throws InterruptedException, KeeperException, TimeoutException { - reader.waitForState(collection, 15000, TimeUnit.MILLISECONDS, - (liveNodes, collectionState) -> collectionState != null - && expectedCore.equals((collectionState.getLeader(shard) != null) - ? collectionState.getLeader(shard).getStr(ZkStateReader.CORE_NAME_PROP) : null)); + reader.waitForState( + collection, + 15000, + TimeUnit.MILLISECONDS, + (liveNodes, collectionState) -> + collectionState != null + && expectedCore.equals( + (collectionState.getLeader(shard) != null) + ? collectionState.getLeader(shard).getStr(ZkStateReader.CORE_NAME_PROP) + : null)); DocCollection docCollection = reader.getClusterState().getCollection(collection); - assertEquals("Unexpected shard leader coll:" + collection + " shard:" + shard, expectedCore, - (docCollection.getLeader(shard) != null) ? docCollection.getLeader(shard).getStr(ZkStateReader.CORE_NAME_PROP) + assertEquals( + "Unexpected shard leader coll:" + collection + " shard:" + shard, + expectedCore, + (docCollection.getLeader(shard) != null) + ? docCollection.getLeader(shard).getStr(ZkStateReader.CORE_NAME_PROP) : null); } @@ -686,7 +908,8 @@ public void testOverseerFailure() throws Exception { reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); overseerClient = electNewOverseer(server.getZkAddress()); @@ -694,40 +917,75 @@ public void testOverseerFailure() throws Exception { ZkController zkController = createMockZkController(server.getZkAddress(), zkClient, reader); - mockController.publishState(COLLECTION, core, core_node, "shard1", - Replica.State.RECOVERING, numShards, true, overseers.get(0)); + mockController.publishState( + COLLECTION, + core, + core_node, + "shard1", + Replica.State.RECOVERING, + numShards, + true, + overseers.get(0)); waitForCollections(reader, COLLECTION); verifyReplicaStatus(reader, COLLECTION, "shard1", "core_node1", Replica.State.RECOVERING); - mockController.publishState(COLLECTION, core, core_node, "shard1", Replica.State.ACTIVE, - numShards, true, overseers.get(0)); + mockController.publishState( + COLLECTION, + core, + core_node, + "shard1", + Replica.State.ACTIVE, + numShards, + true, + overseers.get(0)); verifyReplicaStatus(reader, COLLECTION, "shard1", "core_node1", Replica.State.ACTIVE); - mockController.publishState(COLLECTION, core, core_node, "shard1", - Replica.State.RECOVERING, numShards, true, overseers.get(0)); + mockController.publishState( + COLLECTION, + core, + core_node, + "shard1", + Replica.State.RECOVERING, + numShards, + true, + overseers.get(0)); overseerClient.close(); - + overseerClient = electNewOverseer(server.getZkAddress()); verifyReplicaStatus(reader, COLLECTION, "shard1", "core_node1", Replica.State.RECOVERING); - assertEquals("Live nodes count does not match", 1, reader - .getClusterState().getLiveNodes().size()); - assertEquals(shard+" replica count does not match", 1, reader.getClusterState() - .getCollection(COLLECTION).getSlice(shard).getReplicasMap().size()); - mockController.publishState(COLLECTION, core, core_node, "shard1", null, numShards, true, overseers.get(1)); - - reader.waitForState(COLLECTION, 5000, - TimeUnit.MILLISECONDS, (liveNodes, collectionState) -> collectionState != null && collectionState.getReplica(core_node) == null); + assertEquals( + "Live nodes count does not match", 1, reader.getClusterState().getLiveNodes().size()); + assertEquals( + shard + " replica count does not match", + 1, + reader + .getClusterState() + .getCollection(COLLECTION) + .getSlice(shard) + .getReplicasMap() + .size()); + mockController.publishState( + COLLECTION, core, core_node, "shard1", null, numShards, true, overseers.get(1)); + + reader.waitForState( + COLLECTION, + 5000, + TimeUnit.MILLISECONDS, + (liveNodes, collectionState) -> + collectionState != null && collectionState.getReplica(core_node) == null); reader.forceUpdateCollection(COLLECTION); // as of SOLR-5209 core removal does not cascade to remove the slice and collection - assertTrue(COLLECTION +" should remain after removal of the last core", + assertTrue( + COLLECTION + " should remain after removal of the last core", reader.getClusterState().hasCollection(COLLECTION)); - assertTrue(core_node+" should be gone after publishing the null state", + assertTrue( + core_node + " should be gone after publishing the null state", null == reader.getClusterState().getCollection(COLLECTION).getReplica(core_node)); } finally { close(mockController); @@ -748,32 +1006,51 @@ public void testOverseerStatsReset() throws Exception { reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); LeaderElector overseerElector = new LeaderElector(zkClient); if (overseers.size() > 0) { - overseers.get(overseers.size() -1).close(); - overseers.get(overseers.size() -1).getZkStateReader().getZkClient().close(); + overseers.get(overseers.size() - 1).close(); + overseers.get(overseers.size() - 1).getZkStateReader().getZkClient().close(); } ZkController zkController = createMockZkController(server.getZkAddress(), zkClient, reader); - UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); + UpdateShardHandler updateShardHandler = + new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); updateShardHandlers.add(updateShardHandler); HttpShardHandlerFactory httpShardHandlerFactory = new HttpShardHandlerFactory(); httpShardHandlerFactory.init(new PluginInfo("shardHandlerFactory", Collections.emptyMap())); httpShardHandlerFactorys.add(httpShardHandlerFactory); - Overseer overseer = new Overseer((HttpShardHandler) httpShardHandlerFactory.getShardHandler(), updateShardHandler, "/admin/cores", reader, zkController, - new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "").setUseDistributedClusterStateUpdates(false). - setUseDistributedCollectionConfigSetExecution(false).build()); + Overseer overseer = + new Overseer( + (HttpShardHandler) httpShardHandlerFactory.getShardHandler(), + updateShardHandler, + "/admin/cores", + reader, + zkController, + new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "") + .setUseDistributedClusterStateUpdates(false) + .setUseDistributedCollectionConfigSetExecution(false) + .build()); overseers.add(overseer); - ElectionContext ec = new OverseerElectionContext(zkClient, overseer, - server.getZkAddress().replaceAll("/", "_")); + ElectionContext ec = + new OverseerElectionContext( + zkClient, overseer, server.getZkAddress().replaceAll("/", "_")); overseerElector.setup(ec); overseerElector.joinElection(ec, false); mockController.createCollection(COLLECTION, 1); - mockController.publishState(COLLECTION, "core1", "core_node1", "shard1", Replica.State.ACTIVE, 1, true, overseers.get(0)); + mockController.publishState( + COLLECTION, + "core1", + "core_node1", + "shard1", + Replica.State.ACTIVE, + 1, + true, + overseers.get(0)); assertNotNull(overseer.getStats()); assertTrue((overseer.getStats().getSuccessCount(OverseerAction.STATE.toLower())) > 0); @@ -796,7 +1073,7 @@ public void testOverseerStatsReset() throws Exception { private AtomicInteger killCounter = new AtomicInteger(); - private class OverseerRestarter implements Runnable{ + private class OverseerRestarter implements Runnable { SolrZkClient overseerClient = null; public volatile boolean run = true; private final String zkAddress; @@ -810,7 +1087,7 @@ public void run() { try { overseerClient = electNewOverseer(zkAddress); while (run) { - if (killCounter.get()>0) { + if (killCounter.get() > 0) { try { killCounter.decrementAndGet(); log.info("Killing overseer."); @@ -831,7 +1108,7 @@ public void run() { } finally { if (overseerClient != null) { try { - // overseerClient.close(); + // overseerClient.close(); } catch (Throwable t) { // ignore } @@ -853,13 +1130,20 @@ public void testExceptionWhenFlushClusterState() throws Exception { reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - // We did not create /collections/collection1 -> this message will cause exception when Overseer tries to flush - // the collection state - ZkNodeProps badMessage = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", "collection1", - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, "1", - "createNodeSet", ""); + // We did not create /collections/collection1 -> this message will cause exception when + // Overseer tries to flush the collection state + ZkNodeProps badMessage = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + "name", + "collection1", + ZkStateReader.REPLICATION_FACTOR, + "1", + ZkStateReader.NUM_SHARDS_PROP, + "1", + "createNodeSet", + ""); ZkDistributedQueue workQueue = Overseer.getInternalWorkQueue(zkClient, new Stats()); workQueue.offer(Utils.toJSON(badMessage)); overseerClient = electNewOverseer(server.getZkAddress()); @@ -868,7 +1152,7 @@ public void testExceptionWhenFlushClusterState() throws Exception { q.offer(Utils.toJSON(badMessage)); TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while(!timeOut.hasTimedOut()) { + while (!timeOut.hasTimedOut()) { if (q.peek() == null) { break; } @@ -876,7 +1160,7 @@ public void testExceptionWhenFlushClusterState() throws Exception { } assertTrue(showQpeek(workQueue), workQueue.peek() == null); - assertTrue(showQpeek(q), q.peek() == null); + assertTrue(showQpeek(q), q.peek() == null); } finally { close(overseerClient); close(reader); @@ -896,7 +1180,6 @@ private String showQpeek(ZkDistributedQueue q) throws KeeperException, Interrupt return json.toString(); } - @Test public void testShardLeaderChange() throws Exception { ZkStateReader reader = null; @@ -915,14 +1198,16 @@ public void testShardLeaderChange() throws Exception { reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); + UpdateShardHandler updateShardHandler = + new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); updateShardHandlers.add(updateShardHandler); HttpShardHandlerFactory httpShardHandlerFactory = new HttpShardHandlerFactory(); httpShardHandlerFactorys.add(httpShardHandlerFactory); - electNewOverseer(server.getZkAddress()); + electNewOverseer(server.getZkAddress()); - // Create collection znode before repeatedly trying to enqueue the cluster state update message + // Create collection znode before repeatedly trying to enqueue the cluster state update + // message zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + COLLECTION, true); for (int i = 0; i < atLeast(4); i++) { @@ -933,12 +1218,20 @@ public void testShardLeaderChange() throws Exception { TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeout.hasTimedOut()) { try { - // We must only retry the enqueue to Overseer, not the collection znode creation (that doesn't depend on Overseer) - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", COLLECTION, - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, "1", - "createNodeSet", ""); + // We must only retry the enqueue to Overseer, not the collection znode creation (that + // doesn't depend on Overseer) + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + "name", + COLLECTION, + ZkStateReader.REPLICATION_FACTOR, + "1", + ZkStateReader.NUM_SHARDS_PROP, + "1", + "createNodeSet", + ""); ZkDistributedQueue q = getOpenOverseer().getStateUpdateQueue(); q.offer(Utils.toJSON(m)); break; @@ -950,8 +1243,15 @@ public void testShardLeaderChange() throws Exception { timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeout.hasTimedOut()) { try { - mockController.publishState(COLLECTION, "core1", "node1", "shard1", Replica.State.ACTIVE, - 1, true, getOpenOverseer()); + mockController.publishState( + COLLECTION, + "core1", + "node1", + "shard1", + Replica.State.ACTIVE, + 1, + true, + getOpenOverseer()); break; } catch (SolrException | KeeperException | AlreadyClosedException e) { e.printStackTrace(); @@ -968,21 +1268,35 @@ public void testShardLeaderChange() throws Exception { timeout = new TimeOut(1, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeout.hasTimedOut()) { try { - mockController.publishState(COLLECTION, "core1", "node1", "shard1", - Replica.State.RECOVERING, 1, true, getOpenOverseer()); + mockController.publishState( + COLLECTION, + "core1", + "node1", + "shard1", + Replica.State.RECOVERING, + 1, + true, + getOpenOverseer()); break; } catch (SolrException | AlreadyClosedException e) { - e.printStackTrace(); + e.printStackTrace(); } } mockController2 = new MockZKController(server.getZkAddress(), "node2:8984_", overseers); - timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); + timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeout.hasTimedOut()) { try { - mockController.publishState(COLLECTION, "core1", "node1", "shard1", Replica.State.ACTIVE, - 1, true, getOpenOverseer()); + mockController.publishState( + COLLECTION, + "core1", + "node1", + "shard1", + Replica.State.ACTIVE, + 1, + true, + getOpenOverseer()); break; } catch (SolrException | AlreadyClosedException e) { e.printStackTrace(); @@ -991,19 +1305,24 @@ public void testShardLeaderChange() throws Exception { verifyShardLeader(reader, COLLECTION, "shard1", "core1"); - timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeout.hasTimedOut()) { try { - mockController2.publishState(COLLECTION, "core4", "node2", "shard1", Replica.State.ACTIVE, - 1, true, getOpenOverseer()); + mockController2.publishState( + COLLECTION, + "core4", + "node2", + "shard1", + Replica.State.ACTIVE, + 1, + true, + getOpenOverseer()); break; } catch (SolrException | AlreadyClosedException e) { e.printStackTrace(); } } - mockController.close(); mockController = null; @@ -1011,25 +1330,24 @@ public void testShardLeaderChange() throws Exception { zkControllers.add(zkController); TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeOut.waitFor("Timed out waiting to see core4 as leader", () -> { - - ZkCoreNodeProps leaderProps; - try { - leaderProps = zkController.getLeaderProps(COLLECTION, "shard1", 1000, false); - } catch (SolrException e) { - return false; - } catch (InterruptedException e) { - throw new RuntimeException(e); - } catch (SessionExpiredException e) { - return false; - } - if (leaderProps.getCoreName().equals("core4")) { - return true; - } - return false; - - }); - + timeOut.waitFor( + "Timed out waiting to see core4 as leader", + () -> { + ZkCoreNodeProps leaderProps; + try { + leaderProps = zkController.getLeaderProps(COLLECTION, "shard1", 1000, false); + } catch (SolrException e) { + return false; + } catch (InterruptedException e) { + throw new RuntimeException(e); + } catch (SessionExpiredException e) { + return false; + } + if (leaderProps.getCoreName().equals("core4")) { + return true; + } + return false; + }); } } finally { @@ -1060,7 +1378,8 @@ public void testDoubleAssignment() throws Exception { reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); overseerClient = electNewOverseer(server.getZkAddress()); @@ -1068,7 +1387,15 @@ public void testDoubleAssignment() throws Exception { ZkController zkController = createMockZkController(server.getZkAddress(), zkClient, reader); - mockController.publishState(COLLECTION, "core1", "core_node1", "shard1", Replica.State.RECOVERING, 1, true, overseers.get(0)); + mockController.publishState( + COLLECTION, + "core1", + "core_node1", + "shard1", + Replica.State.RECOVERING, + 1, + true, + overseers.get(0)); waitForCollections(reader, COLLECTION); @@ -1076,9 +1403,18 @@ public void testDoubleAssignment() throws Exception { mockController.close(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); - mockController.publishState(COLLECTION, "core1", "core_node1","shard1", Replica.State.RECOVERING, 1, true, overseers.get(0)); + mockController.publishState( + COLLECTION, + "core1", + "core_node1", + "shard1", + Replica.State.RECOVERING, + 1, + true, + overseers.get(0)); reader.forceUpdateCollection(COLLECTION); ClusterState state = reader.getClusterState(); @@ -1093,8 +1429,7 @@ public void testDoubleAssignment() throws Exception { } } } - assertEquals("Shard was found more than once in ClusterState", 1, - numFound); + assertEquals("Shard was found more than once in ClusterState", 1, numFound); } finally { close(overseerClient); close(mockController); @@ -1118,30 +1453,46 @@ public void testPerformance() throws Exception { reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - mockController = new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); + mockController = + new MockZKController(server.getZkAddress(), "127.0.0.1:8983_solr", overseers); final int MAX_COLLECTIONS = 10, MAX_CORES = 10, MAX_STATE_CHANGES = 20000; - for (int i=0; i= MAX_COLLECTIONS - 1) j = 0; @@ -1149,12 +1500,17 @@ public void testPerformance() throws Exception { if (i > 0 && i % 100 == 0) log.info("Published {} items", i); } - // let's create a sentinel collection which we'll use to wait for overseer to complete operations + // let's create a sentinel collection which we'll use to wait for overseer to complete + // operations createCollection("perf_sentinel", 1); Timer t = new Timer(); Timer.Context context = t.time(); - reader.waitForState("perf_sentinel", 15000, TimeUnit.MILLISECONDS, (liveNodes, collectionState) -> collectionState != null); + reader.waitForState( + "perf_sentinel", + 15000, + TimeUnit.MILLISECONDS, + (liveNodes, collectionState) -> collectionState != null); context.stop(); log.info("Overseer loop finished processing: "); @@ -1167,8 +1523,7 @@ public void testPerformance() throws Exception { Arrays.sort(interestingOps); for (Map.Entry entry : stats.getStats().entrySet()) { String op = entry.getKey(); - if (Arrays.binarySearch(interestingOps, op) < 0) - continue; + if (Arrays.binarySearch(interestingOps, op) < 0) continue; Stats.Stat stat = entry.getValue(); if (log.isInfoEnabled()) { log.info("op: {}, success: {}, failure: {}", op, stat.success.get(), stat.errors.get()); @@ -1200,7 +1555,7 @@ private void printTimingStats(Timer timer) { } private static long nsToMs(double ns) { - return TimeUnit.MILLISECONDS.convert((long)ns, TimeUnit.NANOSECONDS); + return TimeUnit.MILLISECONDS.convert((long) ns, TimeUnit.NANOSECONDS); } private void close(MockZKController mockController) { @@ -1209,9 +1564,8 @@ private void close(MockZKController mockController) { } } - @Test - public void testReplay() throws Exception{ + public void testReplay() throws Exception { SolrZkClient overseerClient = null; ZkStateReader reader = null; @@ -1222,53 +1576,100 @@ public void testReplay() throws Exception{ reader = new ZkStateReader(zkClient); reader.createClusterStateWatchersAndUpdate(); - //prepopulate work queue with some items to emulate previous overseer died before persisting state + // prepopulate work queue with some items to emulate previous overseer died before persisting + // state DistributedQueue queue = Overseer.getInternalWorkQueue(zkClient, new Stats()); zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + COLLECTION, true); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", COLLECTION, - ZkStateReader.REPLICATION_FACTOR, "1", - ZkStateReader.NUM_SHARDS_PROP, "1", - "createNodeSet", ""); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + "name", + COLLECTION, + ZkStateReader.REPLICATION_FACTOR, + "1", + ZkStateReader.NUM_SHARDS_PROP, + "1", + "createNodeSet", + ""); queue.offer(Utils.toJSON(m)); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.RECOVERING.toString()); queue.offer(Utils.toJSON(m)); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.NODE_NAME_PROP, "node1:8983_", - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.CORE_NAME_PROP, "core2", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.NODE_NAME_PROP, + "node1:8983_", + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.CORE_NAME_PROP, + "core2", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.RECOVERING.toString()); queue.offer(Utils.toJSON(m)); overseerClient = electNewOverseer(server.getZkAddress()); - //submit to proper queue + // submit to proper queue queue = getOverseerZero().getStateUpdateQueue(); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.CORE_NAME_PROP, "core3", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.CORE_NAME_PROP, + "core3", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.RECOVERING.toString()); queue.offer(Utils.toJSON(m)); - reader.waitForState(COLLECTION, 1000, TimeUnit.MILLISECONDS, - (liveNodes, collectionState) -> collectionState != null && collectionState.getSlice("shard1") != null - && collectionState.getSlice("shard1").getReplicas().size() == 3); + reader.waitForState( + COLLECTION, + 1000, + TimeUnit.MILLISECONDS, + (liveNodes, collectionState) -> + collectionState != null + && collectionState.getSlice("shard1") != null + && collectionState.getSlice("shard1").getReplicas().size() == 3); assertNotNull(reader.getClusterState().getCollection(COLLECTION).getSlice("shard1")); - assertEquals(3, reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap().size()); + assertEquals( + 3, + reader + .getClusterState() + .getCollection(COLLECTION) + .getSlice("shard1") + .getReplicasMap() + .size()); } finally { close(overseerClient); close(reader); @@ -1294,77 +1695,124 @@ public void testExternalClusterStateChangeBehavior() throws Exception { createCollection("c1", 1); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.COLLECTION_PROP, "c1", - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.CORE_NODE_NAME_PROP, "core_node1", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.DOWN.toString()); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.COLLECTION_PROP, + "c1", + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.CORE_NODE_NAME_PROP, + "core_node1", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.DOWN.toString()); q.offer(Utils.toJSON(m)); waitForCollections(reader, "c1"); verifyReplicaStatus(reader, "c1", "shard1", "core_node1", Replica.State.DOWN); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.COLLECTION_PROP, "c1", - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.COLLECTION_PROP, + "c1", + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.RECOVERING.toString()); q.offer(Utils.toJSON(m)); - - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.COLLECTION_PROP, "c1", - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString()); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.COLLECTION_PROP, + "c1", + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.ACTIVE.toString()); q.offer(Utils.toJSON(m)); final String testCollectionName = "test"; zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + testCollectionName, true); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", testCollectionName, - ZkStateReader.NUM_SHARDS_PROP, "1", - ZkStateReader.REPLICATION_FACTOR, "1" - ); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + "name", + testCollectionName, + ZkStateReader.NUM_SHARDS_PROP, + "1", + ZkStateReader.REPLICATION_FACTOR, + "1"); q.offer(Utils.toJSON(m)); // Wait for the overseer to create state.json for the collection waitForCollections(reader, testCollectionName); - final String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + testCollectionName + "/state.json"; + final String path = + ZkStateReader.COLLECTIONS_ZKNODE + "/" + testCollectionName + "/state.json"; byte[] data = zkClient.getData(path, null, null, true); // Simulate an external modification of state.json zkClient.setData(path, data, true); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATESHARD.toLower(), - "collection", testCollectionName, - ZkStateReader.SHARD_ID_PROP, "x", - ZkStateReader.REPLICATION_FACTOR, "1" - ); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATESHARD.toLower(), + "collection", + testCollectionName, + ZkStateReader.SHARD_ID_PROP, + "x", + ZkStateReader.REPLICATION_FACTOR, + "1"); q.offer(Utils.toJSON(m)); - m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.ADDREPLICA.toLower(), - "collection", testCollectionName, - ZkStateReader.SHARD_ID_PROP, "x", - ZkStateReader.CORE_NODE_NAME_PROP, "core_node1", - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.CORE_NAME_PROP, "core1", - ZkStateReader.STATE_PROP, Replica.State.DOWN.toString() - ); + m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.ADDREPLICA.toLower(), + "collection", + testCollectionName, + ZkStateReader.SHARD_ID_PROP, + "x", + ZkStateReader.CORE_NODE_NAME_PROP, + "core_node1", + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.CORE_NAME_PROP, + "core1", + ZkStateReader.STATE_PROP, + Replica.State.DOWN.toString()); q.offer(Utils.toJSON(m)); - // Verify replica creation worked ok in spite of external update of state.json (although in theory such updates - // do not happen unless an old overseer is still updating ZK after a new Overseer got elected...). + // Verify replica creation worked ok in spite of external update of state.json (although in + // theory such updates do not happen unless an old overseer is still updating ZK after a new + // Overseer got elected...). verifyReplicaStatus(reader, testCollectionName, "x", "core_node1", Replica.State.DOWN); waitForCollections(reader, "c1"); @@ -1389,8 +1837,9 @@ private void close(SolrZkClient client) throws InterruptedException { } private SolrZkClient electNewOverseer(String address) - throws InterruptedException, TimeoutException, IOException, - KeeperException, ParserConfigurationException, SAXException, NoSuchFieldException, SecurityException, IllegalAccessException { + throws InterruptedException, TimeoutException, IOException, KeeperException, + ParserConfigurationException, SAXException, NoSuchFieldException, SecurityException, + IllegalAccessException { SolrZkClient zkClient = new SolrZkClient(address, TIMEOUT); zkClients.add(zkClient); ZkStateReader reader = new ZkStateReader(zkClient); @@ -1400,7 +1849,8 @@ private SolrZkClient electNewOverseer(String address) overseers.get(0).close(); overseers.get(0).getZkStateReader().getZkClient().close(); } - UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); + UpdateShardHandler updateShardHandler = + new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); updateShardHandlers.add(updateShardHandler); HttpShardHandlerFactory httpShardHandlerFactory = new HttpShardHandlerFactory(); httpShardHandlerFactory.init(new PluginInfo("shardHandlerFactory", Collections.emptyMap())); @@ -1408,68 +1858,101 @@ private SolrZkClient electNewOverseer(String address) ZkController zkController = createMockZkController(address, null, reader); zkControllers.add(zkController); - // Create an Overseer with associated configuration to NOT USE distributed state update. Tests in this class really test the Overseer. - Overseer overseer = new Overseer((HttpShardHandler) httpShardHandlerFactory.getShardHandler(), updateShardHandler, "/admin/cores", reader, zkController, - new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "").setUseDistributedClusterStateUpdates(false).build()); + // Create an Overseer with associated configuration to NOT USE distributed state update. Tests + // in this class really test the Overseer. + Overseer overseer = + new Overseer( + (HttpShardHandler) httpShardHandlerFactory.getShardHandler(), + updateShardHandler, + "/admin/cores", + reader, + zkController, + new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "") + .setUseDistributedClusterStateUpdates(false) + .build()); overseers.add(overseer); - ElectionContext ec = new OverseerElectionContext(zkClient, overseer, - address.replaceAll("/", "_")); + ElectionContext ec = + new OverseerElectionContext(zkClient, overseer, address.replaceAll("/", "_")); overseerElector.setup(ec); overseerElector.joinElection(ec, false); return zkClient; } - private ZkController createMockZkController(String zkAddress, SolrZkClient zkClient, ZkStateReader reader) throws InterruptedException, NoSuchFieldException, SecurityException, SessionExpiredException, IllegalAccessException { + private ZkController createMockZkController( + String zkAddress, SolrZkClient zkClient, ZkStateReader reader) + throws InterruptedException, NoSuchFieldException, SecurityException, SessionExpiredException, + IllegalAccessException { ZkController zkController = mock(ZkController.class); if (zkClient == null) { - SolrZkClient newZkClient = new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT); + SolrZkClient newZkClient = + new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT); doAnswer( - new Answer() { - public Void answer(InvocationOnMock invocation) { - newZkClient.close(); - return null; - }}).when(zkController).close(); + new Answer() { + public Void answer(InvocationOnMock invocation) { + newZkClient.close(); + return null; + } + }) + .when(zkController) + .close(); zkClient = newZkClient; } else { doNothing().when(zkController).close(); } - CoreContainer mockAlwaysUpCoreContainer = mock(CoreContainer.class, - Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS)); + CoreContainer mockAlwaysUpCoreContainer = + mock(CoreContainer.class, Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS)); SolrMetricManager mockMetricManager = mock(SolrMetricManager.class); when(mockAlwaysUpCoreContainer.getMetricManager()).thenReturn(mockMetricManager); - when(mockAlwaysUpCoreContainer.isShutDown()).thenReturn(testDone); // Allow retry on session expiry - when(mockAlwaysUpCoreContainer.getResourceLoader()).thenReturn(new SolrResourceLoader(createTempDir())); + when(mockAlwaysUpCoreContainer.isShutDown()) + .thenReturn(testDone); // Allow retry on session expiry + when(mockAlwaysUpCoreContainer.getResourceLoader()) + .thenReturn(new SolrResourceLoader(createTempDir())); ClusterSingletons singletons = new ClusterSingletons(() -> true, r -> r.run()); // don't wait for all singletons singletons.setReady(); final MemberAccessor accessor = Plugins.getMemberAccessor(); - accessor.set(CoreContainer.class.getDeclaredField("clusterSingletons"), mockAlwaysUpCoreContainer, singletons); + accessor.set( + CoreContainer.class.getDeclaredField("clusterSingletons"), + mockAlwaysUpCoreContainer, + singletons); accessor.set(ZkController.class.getDeclaredField("zkClient"), zkController, zkClient); - accessor.set(ZkController.class.getDeclaredField("cc"), zkController, mockAlwaysUpCoreContainer); + accessor.set( + ZkController.class.getDeclaredField("cc"), zkController, mockAlwaysUpCoreContainer); when(zkController.getCoreContainer()).thenReturn(mockAlwaysUpCoreContainer); when(zkController.getZkClient()).thenReturn(zkClient); when(zkController.getZkStateReader()).thenReturn(reader); // primitive support for CC.runAsync - doAnswer(invocable -> { - Runnable r = invocable.getArgument(0); - Thread t = new Thread(r); - t.start(); - return null; - }).when(mockAlwaysUpCoreContainer).runAsync(any(Runnable.class)); + doAnswer( + invocable -> { + Runnable r = invocable.getArgument(0); + Thread t = new Thread(r); + t.start(); + return null; + }) + .when(mockAlwaysUpCoreContainer) + .runAsync(any(Runnable.class)); when(zkController.getLeaderProps(anyString(), anyString(), anyInt())).thenCallRealMethod(); - when(zkController.getLeaderProps(anyString(), anyString(), anyInt(), anyBoolean())).thenCallRealMethod(); + when(zkController.getLeaderProps(anyString(), anyString(), anyInt(), anyBoolean())) + .thenCallRealMethod(); doReturn(getCloudDataProvider(zkAddress, zkClient, reader)) - .when(zkController).getSolrCloudManager(); + .when(zkController) + .getSolrCloudManager(); return zkController; } - private SolrCloudManager getCloudDataProvider(String zkAddress, SolrZkClient zkClient, ZkStateReader reader) { - CloudSolrClient client = new CloudSolrClient.Builder(Collections.singletonList(zkAddress), Optional.empty()).withSocketTimeout(30000).withConnectionTimeout(15000).build(); + private SolrCloudManager getCloudDataProvider( + String zkAddress, SolrZkClient zkClient, ZkStateReader reader) { + CloudSolrClient client = + new CloudSolrClient.Builder(Collections.singletonList(zkAddress), Optional.empty()) + .withSocketTimeout(30000) + .withConnectionTimeout(15000) + .build(); solrClients.add(client); - SolrClientCloudManager sccm = new SolrClientCloudManager(new ZkDistributedQueueFactory(zkClient), client); + SolrClientCloudManager sccm = + new SolrClientCloudManager(new ZkDistributedQueueFactory(zkClient), client); sccm.getClusterStateProvider().connect(); return sccm; } @@ -1477,8 +1960,8 @@ private SolrCloudManager getCloudDataProvider(String zkAddress, SolrZkClient zkC @Test public void testRemovalOfLastReplica() throws Exception { - final Integer numReplicas = 1+random().nextInt(4); // between 1 and 4 replicas - final Integer numShards = 1+random().nextInt(4); // between 1 and 4 shards + final Integer numReplicas = 1 + random().nextInt(4); // between 1 and 4 replicas + final Integer numShards = 1 + random().nextInt(4); // between 1 and 4 shards ZkStateReader zkStateReader = null; SolrZkClient overseerClient = null; @@ -1496,11 +1979,16 @@ public void testRemovalOfLastReplica() throws Exception { // create collection { zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/" + COLLECTION, true); - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(), - "name", COLLECTION, - ZkStateReader.NUM_SHARDS_PROP, numShards.toString(), - ZkStateReader.REPLICATION_FACTOR, "1" - ); + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + "name", + COLLECTION, + ZkStateReader.NUM_SHARDS_PROP, + numShards.toString(), + ZkStateReader.REPLICATION_FACTOR, + "1"); q.offer(Utils.toJSON(m)); } waitForCollections(zkStateReader, COLLECTION); @@ -1508,15 +1996,25 @@ public void testRemovalOfLastReplica() throws Exception { // create nodes with state recovering for (int rr = 1; rr <= numReplicas; ++rr) { for (int ss = 1; ss <= numShards; ++ss) { - final int N = (numReplicas-rr)*numShards + ss; - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.SHARD_ID_PROP, "shard"+ss, - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.CORE_NAME_PROP, "core"+N, - ZkStateReader.CORE_NODE_NAME_PROP, "core_node"+N, - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); + final int N = (numReplicas - rr) * numShards + ss; + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.SHARD_ID_PROP, + "shard" + ss, + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.CORE_NAME_PROP, + "core" + N, + ZkStateReader.CORE_NODE_NAME_PROP, + "core_node" + N, + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.RECOVERING.toString()); q.offer(Utils.toJSON(m)); } @@ -1524,22 +2022,32 @@ public void testRemovalOfLastReplica() throws Exception { // verify recovering for (int rr = 1; rr <= numReplicas; ++rr) { for (int ss = 1; ss <= numShards; ++ss) { - final int N = (numReplicas-rr)*numShards + ss; - verifyReplicaStatus(zkStateReader, COLLECTION, "shard"+ss, "core_node"+N, Replica.State.RECOVERING); + final int N = (numReplicas - rr) * numShards + ss; + verifyReplicaStatus( + zkStateReader, COLLECTION, "shard" + ss, "core_node" + N, Replica.State.RECOVERING); } } // publish node states (active) for (int rr = 1; rr <= numReplicas; ++rr) { for (int ss = 1; ss <= numShards; ++ss) { - final int N = (numReplicas-rr)*numShards + ss; - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.SHARD_ID_PROP, "shard"+ss, - ZkStateReader.NODE_NAME_PROP, "127.0.0.1:8983_solr", - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.CORE_NAME_PROP, "core"+N, - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString()); + final int N = (numReplicas - rr) * numShards + ss; + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.SHARD_ID_PROP, + "shard" + ss, + ZkStateReader.NODE_NAME_PROP, + "127.0.0.1:8983_solr", + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.CORE_NAME_PROP, + "core" + N, + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.ACTIVE.toString()); q.offer(Utils.toJSON(m)); } @@ -1547,34 +2055,66 @@ public void testRemovalOfLastReplica() throws Exception { // verify active for (int rr = 1; rr <= numReplicas; ++rr) { for (int ss = 1; ss <= numShards; ++ss) { - final int N = (numReplicas-rr)*numShards + ss; - verifyReplicaStatus(zkStateReader, COLLECTION, "shard"+ss, "core_node"+N, Replica.State.ACTIVE); + final int N = (numReplicas - rr) * numShards + ss; + verifyReplicaStatus( + zkStateReader, COLLECTION, "shard" + ss, "core_node" + N, Replica.State.ACTIVE); } } // delete node for (int rr = 1; rr <= numReplicas; ++rr) { for (int ss = 1; ss <= numShards; ++ss) { - final int N = (numReplicas-rr)*numShards + ss; - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.DELETECORE.toLower(), - ZkStateReader.COLLECTION_PROP, COLLECTION, - ZkStateReader.CORE_NODE_NAME_PROP, "core_node"+N); + final int N = (numReplicas - rr) * numShards + ss; + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.DELETECORE.toLower(), + ZkStateReader.COLLECTION_PROP, + COLLECTION, + ZkStateReader.CORE_NODE_NAME_PROP, + "core_node" + N); q.offer(Utils.toJSON(m)); { - String shard = "shard"+ss; - zkStateReader.waitForState(COLLECTION, 15000, TimeUnit.MILLISECONDS, (liveNodes, collectionState) -> collectionState != null && (collectionState.getSlice(shard) == null || collectionState.getSlice(shard).getReplicasMap().get("core_node"+N) == null)); + String shard = "shard" + ss; + zkStateReader.waitForState( + COLLECTION, + 15000, + TimeUnit.MILLISECONDS, + (liveNodes, collectionState) -> + collectionState != null + && (collectionState.getSlice(shard) == null + || collectionState.getSlice(shard).getReplicasMap().get("core_node" + N) + == null)); } - final DocCollection docCollection = zkStateReader.getClusterState().getCollection(COLLECTION); - assertTrue("found no "+ COLLECTION, (null != docCollection)); - - final Slice slice = docCollection.getSlice("shard"+ss); - assertTrue("found no "+ COLLECTION +" shard"+ss+" slice after removal of replica "+rr+" of "+numReplicas, (null != slice)); + final DocCollection docCollection = + zkStateReader.getClusterState().getCollection(COLLECTION); + assertTrue("found no " + COLLECTION, (null != docCollection)); + + final Slice slice = docCollection.getSlice("shard" + ss); + assertTrue( + "found no " + + COLLECTION + + " shard" + + ss + + " slice after removal of replica " + + rr + + " of " + + numReplicas, + (null != slice)); final Collection replicas = slice.getReplicas(); - assertEquals("wrong number of "+ COLLECTION +" shard"+ss+" replicas left, replicas="+replicas, numReplicas-rr, replicas.size()); + assertEquals( + "wrong number of " + + COLLECTION + + " shard" + + ss + + " replicas left, replicas=" + + replicas, + numReplicas - rr, + replicas.size()); } } @@ -1591,33 +2131,40 @@ public void testLatchWatcher() throws InterruptedException { long before = System.nanoTime(); latch1.await(100); long after = System.nanoTime(); - assertTrue(TimeUnit.NANOSECONDS.toMillis(after-before) > 50); - assertTrue(TimeUnit.NANOSECONDS.toMillis(after-before) < 500);// Mostly to make sure the millis->nanos->millis is not broken + assertTrue(TimeUnit.NANOSECONDS.toMillis(after - before) > 50); + // Mostly to make sure the millis->nanos->millis is not broken + assertTrue(TimeUnit.NANOSECONDS.toMillis(after - before) < 500); latch1.process(new WatchedEvent(new WatcherEvent(1, 1, "/foo/bar"))); before = System.nanoTime(); - latch1.await(10000);// Expecting no wait + latch1.await(10000); // Expecting no wait after = System.nanoTime(); - assertTrue(TimeUnit.NANOSECONDS.toMillis(after-before) < 1000); + assertTrue(TimeUnit.NANOSECONDS.toMillis(after - before) < 1000); final AtomicBoolean expectedEventProcessed = new AtomicBoolean(false); final AtomicBoolean doneWaiting = new AtomicBoolean(false); - final OverseerTaskQueue.LatchWatcher latch2 = new OverseerTaskQueue.LatchWatcher(Event.EventType.NodeCreated); - Thread t = new Thread(()->{ - //Process an event of a different type first, this shouldn't release the latch - latch2.process(new WatchedEvent(new WatcherEvent(Event.EventType.NodeDeleted.getIntValue(), 1, "/foo/bar"))); - - assertFalse("Latch shouldn't have been released", doneWaiting.get()); - // Now process the correct type of event - expectedEventProcessed.set(true); - latch2.process(new WatchedEvent(new WatcherEvent(Event.EventType.NodeCreated.getIntValue(), 1, "/foo/bar"))); - }); + final OverseerTaskQueue.LatchWatcher latch2 = + new OverseerTaskQueue.LatchWatcher(Event.EventType.NodeCreated); + Thread t = + new Thread( + () -> { + // Process an event of a different type first, this shouldn't release the latch + latch2.process( + new WatchedEvent( + new WatcherEvent(Event.EventType.NodeDeleted.getIntValue(), 1, "/foo/bar"))); + + assertFalse("Latch shouldn't have been released", doneWaiting.get()); + // Now process the correct type of event + expectedEventProcessed.set(true); + latch2.process( + new WatchedEvent( + new WatcherEvent(Event.EventType.NodeCreated.getIntValue(), 1, "/foo/bar"))); + }); t.start(); before = System.nanoTime(); latch2.await(10000); // It shouldn't wait this long, t should notify the lock after = System.nanoTime(); doneWaiting.set(true); assertTrue(expectedEventProcessed.get()); - assertTrue(TimeUnit.NANOSECONDS.toMillis(after-before) < 1000); + assertTrue(TimeUnit.NANOSECONDS.toMillis(after - before) < 1000); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java b/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java index aad9286fd80..0403b48060d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java +++ b/solr/core/src/test/org/apache/solr/cloud/PackageManagerCLITest.java @@ -19,7 +19,6 @@ import java.lang.invoke.MethodHandles; import java.util.Arrays; - import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.core.TestSolrConfigHandler; import org.apache.solr.util.LogLevel; @@ -42,8 +41,11 @@ public class PackageManagerCLITest extends SolrCloudTestCase { // Note for those who want to modify the jar files used in the packages used in this test: // You need to re-sign the jars for install step, as follows: - // $ openssl dgst -sha1 -sign ./solr/core/src/test-files/solr/question-answer-repository-private-key.pem ./solr/core/src/test-files/solr/question-answer-repository/question-answer-request-handler-1.1.jar | openssl enc -base64 - // You can place the new signature thus obtained (removing any whitespaces) in the repository.json. + // $ openssl dgst -sha1 -sign + // ./solr/core/src/test-files/solr/question-answer-repository-private-key.pem + // ./solr/core/src/test-files/solr/question-answer-repository/question-answer-request-handler-1.1.jar | openssl enc -base64 + // You can place the new signature thus obtained (removing any whitespaces) in the + // repository.json. private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -54,11 +56,14 @@ public static void setupCluster() throws Exception { System.setProperty("enable.packages", "true"); configureCluster(1) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .addConfig("conf2", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .configure(); - - repositoryServer = new LocalWebServer(TEST_PATH().resolve("question-answer-repository").toString()); + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf2", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .configure(); + + repositoryServer = + new LocalWebServer(TEST_PATH().resolve("question-answer-repository").toString()); repositoryServer.start(); } @@ -76,7 +81,15 @@ public void testPackageManager() throws Exception { run(tool, new String[] {"-solrUrl", solrUrl, "list-installed"}); - run(tool, new String[] {"-solrUrl", solrUrl, "add-repo", "fullstory", "http://localhost:" + repositoryServer.getPort()}); + run( + tool, + new String[] { + "-solrUrl", + solrUrl, + "add-repo", + "fullstory", + "http://localhost:" + repositoryServer.getPort() + }); run(tool, new String[] {"-solrUrl", solrUrl, "list-available"}); @@ -91,21 +104,38 @@ public void testPackageManager() throws Exception { run(tool, new String[] {"-solrUrl", solrUrl, "list-deployed", "question-answer"}); - run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "question-answer", "-y", "-collections", "abc", "-p", "RH-HANDLER-PATH=" + rhPath}); + run( + tool, + new String[] { + "-solrUrl", + solrUrl, + "deploy", + "question-answer", + "-y", + "-collections", + "abc", + "-p", + "RH-HANDLER-PATH=" + rhPath + }); assertPackageVersion("abc", "question-answer", "1.0.0", rhPath, "1.0.0"); run(tool, new String[] {"-solrUrl", solrUrl, "list-deployed", "question-answer"}); run(tool, new String[] {"-solrUrl", solrUrl, "list-deployed", "-c", "abc"}); - // Should we test the "auto-update to latest" functionality or the default explicit deploy functionality + // Should we test the "auto-update to latest" functionality or the default explicit deploy + // functionality boolean autoUpdateToLatest = random().nextBoolean(); if (autoUpdateToLatest) { log.info("Testing auto-update to latest installed"); // This command pegs the version to the latest available - run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "question-answer:latest", "-y", "-collections", "abc"}); + run( + tool, + new String[] { + "-solrUrl", solrUrl, "deploy", "question-answer:latest", "-y", "-collections", "abc" + }); assertPackageVersion("abc", "question-answer", "$LATEST", rhPath, "1.0.0"); run(tool, new String[] {"-solrUrl", solrUrl, "install", "question-answer"}); @@ -116,22 +146,50 @@ public void testPackageManager() throws Exception { run(tool, new String[] {"-solrUrl", solrUrl, "install", "question-answer"}); assertPackageVersion("abc", "question-answer", "1.0.0", rhPath, "1.0.0"); - if (random().nextBoolean()) { // even if parameters are not passed in, they should be picked up from previous deployment - run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "--update", "-y", "question-answer", "-collections", "abc", "-p", "RH-HANDLER-PATH=" + rhPath}); + // even if parameters are not passed in, they should be picked up from previous deployment + if (random().nextBoolean()) { + run( + tool, + new String[] { + "-solrUrl", + solrUrl, + "deploy", + "--update", + "-y", + "question-answer", + "-collections", + "abc", + "-p", + "RH-HANDLER-PATH=" + rhPath + }); } else { - run(tool, new String[] {"-solrUrl", solrUrl, "deploy", "--update", "-y", "question-answer", "-collections", "abc"}); + run( + tool, + new String[] { + "-solrUrl", + solrUrl, + "deploy", + "--update", + "-y", + "question-answer", + "-collections", + "abc" + }); } assertPackageVersion("abc", "question-answer", "1.1.0", rhPath, "1.1.0"); } log.info("Running undeploy..."); - run(tool, new String[] {"-solrUrl", solrUrl, "undeploy", "question-answer", "-collections", "abc"}); + run( + tool, + new String[] {"-solrUrl", solrUrl, "undeploy", "question-answer", "-collections", "abc"}); run(tool, new String[] {"-solrUrl", solrUrl, "list-deployed", "question-answer"}); - } - void assertPackageVersion(String collection, String pkg, String version, String component, String componentVersion) throws Exception { + void assertPackageVersion( + String collection, String pkg, String version, String component, String componentVersion) + throws Exception { TestSolrConfigHandler.testForResponseElement( null, cluster.getJettySolrRunner(0).getBaseUrl().toString() + "/" + collection, @@ -152,13 +210,16 @@ void assertPackageVersion(String collection, String pkg, String version, String } private void run(PackageTool tool, String[] args) throws Exception { - int res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + int res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); assertEquals("Non-zero status returned for: " + Arrays.toString(args), 0, res); } static class LocalWebServer { private int port = 0; - final private String resourceDir; + private final String resourceDir; Server server; ServerConnector connector; @@ -167,7 +228,7 @@ public LocalWebServer(String resourceDir) { } public int getPort() { - return connector != null? connector.getLocalPort(): port; + return connector != null ? connector.getLocalPort() : port; } public void start() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/cloud/ParallelCommitExecutionTest.java b/solr/core/src/test/org/apache/solr/cloud/ParallelCommitExecutionTest.java index b92a5c61104..55b67cf75e1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ParallelCommitExecutionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ParallelCommitExecutionTest.java @@ -16,12 +16,6 @@ */ package org.apache.solr.cloud; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.update.CommitUpdateCommand; -import org.apache.solr.update.processor.UpdateRequestProcessor; -import org.apache.solr.update.processor.UpdateRequestProcessorFactory; - import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Path; @@ -31,10 +25,14 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.update.CommitUpdateCommand; +import org.apache.solr.update.processor.UpdateRequestProcessor; +import org.apache.solr.update.processor.UpdateRequestProcessorFactory; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -46,6 +44,7 @@ public class ParallelCommitExecutionTest extends SolrCloudTestCase { /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; + private static int expectCount; private static volatile CountDownLatch countdown; @@ -53,8 +52,9 @@ public class ParallelCommitExecutionTest extends SolrCloudTestCase { @BeforeClass public static void beforeClass() throws Exception { - // multi replicas matters; for the initial parallel commit execution tests, only consider repFactor=1 - final int repFactor = 1;//random().nextBoolean() ? 1 : 2; + // multi replicas matters; for the initial parallel commit execution tests, only consider + // repFactor=1 + final int repFactor = 1; // random().nextBoolean() ? 1 : 2; final int numShards = TestUtil.nextInt(random(), 1, 4); final int numNodes = (numShards * repFactor); expectCount = numNodes; @@ -102,14 +102,14 @@ public void testParallelOk() throws Exception { public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } public static class CheckFactory extends UpdateRequestProcessorFactory { @Override - public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { + public UpdateRequestProcessor getInstance( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { return new Check(next); } } @@ -125,8 +125,8 @@ public void processCommit(CommitUpdateCommand cmd) throws IOException { super.processCommit(cmd); countdown.countDown(); try { - // NOTE: this ensures that all commits are executed in parallel; no commit can complete successfully - // until all commits have entered the `processCommit(...)` method. + // NOTE: this ensures that all commits are executed in parallel; no commit can complete + // successfully until all commits have entered the `processCommit(...)` method. if (!countdown.await(5, TimeUnit.SECONDS)) { throw new RuntimeException("done waiting"); } diff --git a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java index c20e3eb127a..e21aedd91ad 100644 --- a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java @@ -15,9 +15,13 @@ * limitations under the License. */ - package org.apache.solr.cloud; +import static java.util.Collections.singletonList; + +import com.codahale.metrics.Counter; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Files; @@ -31,10 +35,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - -import com.codahale.metrics.Counter; -import com.codahale.metrics.Metric; -import com.codahale.metrics.MetricRegistry; import org.apache.commons.lang3.RandomStringUtils; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrQuery; @@ -56,12 +56,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Collections.singletonList; - /** * Test PeerSync when a node restarts and documents are indexed when node was down. * - * This test is modeled after SyncSliceTest + *

This test is modeled after SyncSliceTest */ @Slow public class PeerSyncReplicationTest extends AbstractFullDistribZkTestBase { @@ -108,7 +106,7 @@ public void distribSetUp() throws Exception { } @Test - //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") + // commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") public void test() throws Exception { handle.clear(); handle.put("timestamp", SKIPVAL); @@ -119,8 +117,7 @@ public void test() throws Exception { // index enough docs and commit to establish frame of reference for PeerSync for (int i = 0; i < 100; i++) { - indexDoc(id, docId, i1, 50, tlong, 50, t1, - "document number " + docId++); + indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId++); } commit(); waitForThingsToLevelOut(30, TimeUnit.SECONDS); @@ -134,7 +131,7 @@ public void test() throws Exception { CloudJettyRunner initialLeaderJetty = shardToLeaderJetty.get("shard1"); List otherJetties = getOtherAvailableJetties(initialLeaderJetty); CloudJettyRunner neverLeader = otherJetties.get(otherJetties.size() - 1); - otherJetties.remove(neverLeader) ; + otherJetties.remove(neverLeader); // first shutdown a node that will never be a leader forceNodeFailures(singletonList(neverLeader)); @@ -144,10 +141,8 @@ public void test() throws Exception { CloudJettyRunner nodePeerSynced = forceNodeFailureAndDoPeerSync(false); // add a few more docs - indexDoc(id, docId, i1, 50, tlong, 50, t1, - "document number " + docId++); - indexDoc(id, docId, i1, 50, tlong, 50, t1, - "document number " + docId++); + indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId++); + indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId++); commit(); cloudClientDocs = cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound(); @@ -163,19 +158,27 @@ public void test() throws Exception { log.info("Now shutting down initial leader"); forceNodeFailures(singletonList(initialLeaderJetty)); log.info("Updating mappings from zk"); - waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info, new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME)); + waitForNewLeader( + cloudClient, + "shard1", + (Replica) initialLeaderJetty.client.info, + new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME)); updateMappingsFromZk(jettys, clients, true); - assertEquals("PeerSynced node did not become leader", nodePeerSynced, shardToLeaderJetty.get("shard1")); + assertEquals( + "PeerSynced node did not become leader", + nodePeerSynced, + shardToLeaderJetty.get("shard1")); - // bring up node that was down all along, and let it PeerSync from the node that was forced to PeerSynce + // bring up node that was down all along, and let it PeerSync from the node that was forced to + // PeerSynce bringUpDeadNodeAndEnsureNoReplication(neverLeader, false); waitTillNodesActive(); checkShardConsistency(false, true); - - // bring back all the nodes including initial leader - // (commented as reports Maximum concurrent create/delete watches above limit violation and reports thread leaks) + // bring back all the nodes including initial leader + // (commented as reports Maximum concurrent create/delete watches above limit violation and + // reports thread leaks) /*for(int i = 0 ; i < nodesDown.size(); i++) { bringUpDeadNodeAndEnsureNoReplication(shardToLeaderJetty.get("shard1"), neverLeader, false); } @@ -195,10 +198,13 @@ public void test() throws Exception { } assertNotNull(registry); Map metrics = registry.getMetrics(); - assertTrue("REPLICATION.peerSync.time present", metrics.containsKey("REPLICATION.peerSync.time")); - assertTrue("REPLICATION.peerSync.errors present", metrics.containsKey("REPLICATION.peerSync.errors")); + assertTrue( + "REPLICATION.peerSync.time present", metrics.containsKey("REPLICATION.peerSync.time")); + assertTrue( + "REPLICATION.peerSync.errors present", + metrics.containsKey("REPLICATION.peerSync.errors")); - Counter counter = (Counter)metrics.get("REPLICATION.peerSync.errors"); + Counter counter = (Counter) metrics.get("REPLICATION.peerSync.errors"); assertEquals(0L, counter.getCount()); success = true; } finally { @@ -215,7 +221,7 @@ public IndexInBackGround(int numDocs, CloudJettyRunner nodeToBringUp) { this.numDocs = numDocs; this.runner = nodeToBringUp; } - + public void run() { try { // If we don't wait for cores get loaded, the leader may put this replica into LIR state @@ -228,7 +234,7 @@ public void run() { } } catch (Exception e) { log.error("Error indexing doc in background", e); - //Throwing an error here will kill the thread + // Throwing an error here will kill the thread } } @@ -243,7 +249,6 @@ private void waitForCoreLoading() throws InterruptedException { } } } - private void forceNodeFailures(List replicasToShutDown) throws Exception { for (CloudJettyRunner replicaToShutDown : replicasToShutDown) { @@ -267,8 +272,6 @@ private void forceNodeFailures(List replicasToShutDown) throws nodesDown.addAll(replicasToShutDown); } - - private CloudJettyRunner forceNodeFailureAndDoPeerSync(boolean disableFingerprint) throws Exception { @@ -276,15 +279,14 @@ private CloudJettyRunner forceNodeFailureAndDoPeerSync(boolean disableFingerprin CloudJettyRunner leaderJetty = shardToLeaderJetty.get("shard1"); List nonLeaderJetties = getOtherAvailableJetties(leaderJetty); - CloudJettyRunner replicaToShutDown = nonLeaderJetties.get(random().nextInt(nonLeaderJetties.size())); // random non leader node + CloudJettyRunner replicaToShutDown = + nonLeaderJetties.get(random().nextInt(nonLeaderJetties.size())); // random non leader node forceNodeFailures(Arrays.asList(replicaToShutDown)); // two docs need to be sync'd back when replica restarts - indexDoc(id, docId, i1, 50, tlong, 50, t1, - "document number " + docId++); - indexDoc(id, docId, i1, 50, tlong, 50, t1, - "document number " + docId++); + indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId++); + indexDoc(id, docId, i1, 50, tlong, 50, t1, "document number " + docId++); commit(); bringUpDeadNodeAndEnsureNoReplication(replicaToShutDown, disableFingerprint); @@ -292,19 +294,18 @@ private CloudJettyRunner forceNodeFailureAndDoPeerSync(boolean disableFingerprin return replicaToShutDown; } - - private void bringUpDeadNodeAndEnsureNoReplication(CloudJettyRunner nodeToBringUp, boolean disableFingerprint) - throws Exception { + private void bringUpDeadNodeAndEnsureNoReplication( + CloudJettyRunner nodeToBringUp, boolean disableFingerprint) throws Exception { // disable fingerprint check if needed System.setProperty("solr.disableFingerprint", String.valueOf(disableFingerprint)); // we wait a little bit, so socket between leader -> replica will be timeout Thread.sleep(3000); IndexInBackGround iib = new IndexInBackGround(50, nodeToBringUp); iib.start(); - + // bring back dead node and ensure it recovers nodeToBringUp.jetty.start(); - + nodesDown.remove(nodeToBringUp); waitTillNodesActive(); @@ -316,23 +317,27 @@ private void bringUpDeadNodeAndEnsureNoReplication(CloudJettyRunner nodeToBringU assertEquals(getShardCount() - nodesDown.size(), jetties.size()); waitForThingsToLevelOut(30, TimeUnit.SECONDS); - + iib.join(); - + cloudClient.commit(); - + checkShardConsistency(false, false); - + long cloudClientDocs = cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound(); assertEquals(docId, cloudClientDocs); // if there was no replication, we should not have replication.properties file - String replicationProperties = nodeToBringUp.jetty.getSolrHome() + "/cores/" + DEFAULT_TEST_COLLECTION_NAME + "/data/replication.properties"; - assertTrue("PeerSync failed. Had to fail back to replication", Files.notExists(Paths.get(replicationProperties))); + String replicationProperties = + nodeToBringUp.jetty.getSolrHome() + + "/cores/" + + DEFAULT_TEST_COLLECTION_NAME + + "/data/replication.properties"; + assertTrue( + "PeerSync failed. Had to fail back to replication", + Files.notExists(Paths.get(replicationProperties))); } - - private void waitTillNodesActive() throws Exception { for (int i = 0; i < 60; i++) { Thread.sleep(3000); @@ -344,9 +349,7 @@ private void waitTillNodesActive() throws Exception { boolean allActive = true; Collection nodesDownNames = - nodesDown.stream() - .map(n -> n.coreNodeName) - .collect(Collectors.toList()); + nodesDown.stream().map(n -> n.coreNodeName).collect(Collectors.toList()); Collection replicasToCheck = replicas.stream() @@ -354,7 +357,8 @@ private void waitTillNodesActive() throws Exception { .collect(Collectors.toList()); for (Replica replica : replicasToCheck) { - if (!clusterState.liveNodesContain(replica.getNodeName()) || replica.getState() != Replica.State.ACTIVE) { + if (!clusterState.liveNodesContain(replica.getNodeName()) + || replica.getState() != Replica.State.ACTIVE) { allActive = false; break; } @@ -366,8 +370,6 @@ private void waitTillNodesActive() throws Exception { printLayout(); fail("timeout waiting to see all nodes active"); } - - private List getOtherAvailableJetties(CloudJettyRunner leader) { List candidates = new ArrayList<>(); @@ -382,10 +384,7 @@ private List getOtherAvailableJetties(CloudJettyRunner leader) return candidates; } - - - protected void indexDoc(Object... fields) throws IOException, - SolrServerException { + protected void indexDoc(Object... fields) throws IOException, SolrServerException { SolrInputDocument doc = new SolrInputDocument(); addFields(doc, fields); @@ -406,5 +405,4 @@ protected void indexr(Object... fields) throws Exception { addFields(doc, "rnd_b", true); indexDoc(doc); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/RecoveryAfterSoftCommitTest.java b/solr/core/src/test/org/apache/solr/cloud/RecoveryAfterSoftCommitTest.java index 4808a6ebe0d..e305721d713 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RecoveryAfterSoftCommitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RecoveryAfterSoftCommitTest.java @@ -18,7 +18,6 @@ import java.io.File; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.cloud.SocketProxy; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -36,6 +35,7 @@ public class RecoveryAfterSoftCommitTest extends AbstractFullDistribZkTestBase { private static final int MAX_BUFFERED_DOCS = 2, ULOG_NUM_RECORDS_TO_KEEP = 2; private final boolean onlyLeaderIndexes = random().nextBoolean(); + public RecoveryAfterSoftCommitTest() { sliceCount = 1; fixShardCount(2); @@ -43,7 +43,9 @@ public RecoveryAfterSoftCommitTest() { @Override protected boolean useTlogReplicas() { - return false; // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's TestInjection use + // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's + // TestInjection use + return false; } @BeforeClass @@ -55,22 +57,25 @@ public static void beforeTests() { } @AfterClass - public static void afterTest() { + public static void afterTest() { System.clearProperty("solr.tests.maxBufferedDocs"); System.clearProperty("solr.ulog.numRecordsToKeep"); System.clearProperty("useCompoundFile"); TestInjection.reset(); } - /** - * Overrides the parent implementation to install a SocketProxy in-front of the Jetty server. - */ + /** Overrides the parent implementation to install a SocketProxy in-front of the Jetty server. */ @Override - public JettySolrRunner createJetty(File solrHome, String dataDir, - String shardList, String solrConfigOverride, String schemaOverride, Replica.Type replicaType) - throws Exception - { - return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); + public JettySolrRunner createJetty( + File solrHome, + String dataDir, + String shardList, + String solrConfigOverride, + String schemaOverride, + Replica.Type replicaType) + throws Exception { + return createProxiedJetty( + solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); } @Test @@ -78,7 +83,7 @@ public void test() throws Exception { waitForRecoveriesToFinish(DEFAULT_COLLECTION, true); // flush twice int i = 0; - for (; i notLeaders = - ensureAllReplicasAreActive(DEFAULT_COLLECTION, "shard1", 1, 2, 30); + List notLeaders = ensureAllReplicasAreActive(DEFAULT_COLLECTION, "shard1", 1, 2, 30); } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java index 3671cddc9c0..92305724305 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java @@ -27,9 +27,7 @@ public class RecoveryZkTest extends AbstractRecoveryZkTestBase { @Test @Override - //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018 public void test() throws Exception { super.test(); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java index 555e040d18a..23446a61d21 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReindexCollectionTest.java @@ -25,7 +25,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Function; - import org.apache.solr.client.solrj.cloud.DistribStateManager; import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -48,9 +47,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ +/** */ @LogLevel("org.apache.solr.cloud.api.collections.ReindexCollectionCmd=DEBUG") public class ReindexCollectionTest extends SolrCloudTestCase { @@ -75,22 +72,25 @@ public void doBefore() throws Exception { ZkController zkController = cluster.getJettySolrRunner(0).getCoreContainer().getZkController(); cloudManager = zkController.getSolrCloudManager(); stateManager = cloudManager.getDistribStateManager(); - solrClient = new CloudSolrClientBuilder(Collections.singletonList(zkController.getZkServerAddress()), - Optional.empty()).build(); + solrClient = + new CloudSolrClientBuilder( + Collections.singletonList(zkController.getZkServerAddress()), Optional.empty()) + .build(); } private ReindexCollectionCmd.State getState(String collection) { try { - return ReindexCollectionCmd.State.get(ReindexCollectionCmd - .getReindexingState(stateManager, collection) - .get(ReindexCollectionCmd.STATE)); + return ReindexCollectionCmd.State.get( + ReindexCollectionCmd.getReindexingState(stateManager, collection) + .get(ReindexCollectionCmd.STATE)); } catch (Exception e) { fail("Unexpected exception checking state of " + collection + ": " + e); return null; } } - private void waitForState(String collection, ReindexCollectionCmd.State expected) throws Exception { + private void waitForState(String collection, ReindexCollectionCmd.State expected) + throws Exception { TimeOut timeOut = new TimeOut(30, TimeUnit.SECONDS, cloudManager.getTimeSource()); ReindexCollectionCmd.State current = null; while (!timeOut.hasTimedOut()) { @@ -123,24 +123,34 @@ public void testBasicReindexing() throws Exception { createCollection(sourceCollection, "conf1", 2, 2); - indexDocs(sourceCollection, NUM_DOCS, + indexDocs( + sourceCollection, + NUM_DOCS, i -> new SolrInputDocument("id", String.valueOf(i), "string_s", String.valueOf(i))); final String targetCollection = "basicReindexingTarget"; - CollectionAdminRequest.ReindexCollection req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(targetCollection); + CollectionAdminRequest.ReindexCollection req = + CollectionAdminRequest.reindexCollection(sourceCollection).setTarget(targetCollection); CollectionAdminResponse rsp = req.process(solrClient); assertNotNull(rsp.toString(), rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS)); @SuppressWarnings({"unchecked"}) - Map status = (Map)rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); - assertEquals(status.toString(), (long)NUM_DOCS, ((Number)status.get("inputDocs")).longValue()); - assertEquals(status.toString(), (long)NUM_DOCS, ((Number)status.get("processedDocs")).longValue()); - - CloudUtil.waitForState(cloudManager, "did not finish copying in time", targetCollection, (liveNodes, coll) -> { - ReindexCollectionCmd.State state = ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); - return ReindexCollectionCmd.State.FINISHED == state; - }); + Map status = + (Map) rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); + assertEquals( + status.toString(), (long) NUM_DOCS, ((Number) status.get("inputDocs")).longValue()); + assertEquals( + status.toString(), (long) NUM_DOCS, ((Number) status.get("processedDocs")).longValue()); + + CloudUtil.waitForState( + cloudManager, + "did not finish copying in time", + targetCollection, + (liveNodes, coll) -> { + ReindexCollectionCmd.State state = + ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); + return ReindexCollectionCmd.State.FINISHED == state; + }); // verify the target docs exist QueryResponse queryResponse = solrClient.query(targetCollection, params(CommonParams.Q, "*:*")); @@ -155,16 +165,19 @@ public void testSameTargetReindexing() throws Exception { doTestSameTargetReindexing(true, true); } - private void doTestSameTargetReindexing(boolean sourceRemove, boolean followAliases) throws Exception { + private void doTestSameTargetReindexing(boolean sourceRemove, boolean followAliases) + throws Exception { final String sourceCollection = "sameTargetReindexing_" + sourceRemove + "_" + followAliases; final String targetCollection = sourceCollection; createCollection(sourceCollection, "conf1", 2, 2); - indexDocs(sourceCollection, NUM_DOCS, + indexDocs( + sourceCollection, + NUM_DOCS, i -> new SolrInputDocument("id", String.valueOf(i), "string_s", String.valueOf(i))); - CollectionAdminRequest.ReindexCollection req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(targetCollection); + CollectionAdminRequest.ReindexCollection req = + CollectionAdminRequest.reindexCollection(sourceCollection).setTarget(targetCollection); req.setRemoveSource(sourceRemove); req.setFollowAliases(followAliases); req.process(solrClient); @@ -174,7 +187,8 @@ private void doTestSameTargetReindexing(boolean sourceRemove, boolean followAlia String prefix = ReindexCollectionCmd.TARGET_COL_PREFIX + targetCollection; while (!timeOut.hasTimedOut()) { timeOut.sleep(500); - for (String name : cloudManager.getClusterStateProvider().getClusterState().getCollectionsMap().keySet()) { + for (String name : + cloudManager.getClusterStateProvider().getClusterState().getCollectionsMap().keySet()) { if (name.startsWith(prefix)) { realTargetCollection = name; break; @@ -186,10 +200,15 @@ private void doTestSameTargetReindexing(boolean sourceRemove, boolean followAlia } assertNotNull("target collection not present after 30s", realTargetCollection); - CloudUtil.waitForState(cloudManager, "did not finish copying in time", realTargetCollection, (liveNodes, coll) -> { - ReindexCollectionCmd.State state = ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); - return ReindexCollectionCmd.State.FINISHED == state; - }); + CloudUtil.waitForState( + cloudManager, + "did not finish copying in time", + realTargetCollection, + (liveNodes, coll) -> { + ReindexCollectionCmd.State state = + ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); + return ReindexCollectionCmd.State.FINISHED == state; + }); solrClient.getZkStateReader().aliasesManager.update(); // verify the target docs exist QueryResponse rsp = solrClient.query(targetCollection, params(CommonParams.Q, "*:*")); @@ -205,29 +224,38 @@ public void testLossySchema() throws Exception { final String sourceCollection = "sourceLossyReindexing"; final String targetCollection = "targetLossyReindexing"; - createCollection(sourceCollection, "conf2", 2, 2); - indexDocs(sourceCollection, NUM_DOCS, i -> - new SolrInputDocument( - "id", String.valueOf(i), - "string_s", String.valueOf(i), - "sind", "this is a test " + i)); // "sind": indexed=true, stored=false, will be lost... - - CollectionAdminRequest.ReindexCollection req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(targetCollection) - .setConfigName("conf3"); + indexDocs( + sourceCollection, + NUM_DOCS, + i -> + new SolrInputDocument( + "id", String.valueOf(i), + "string_s", String.valueOf(i), + "sind", + "this is a test " + i)); // "sind": indexed=true, stored=false, will be lost... + + CollectionAdminRequest.ReindexCollection req = + CollectionAdminRequest.reindexCollection(sourceCollection) + .setTarget(targetCollection) + .setConfigName("conf3"); req.process(solrClient); - CloudUtil.waitForState(cloudManager, "did not finish copying in time", targetCollection, (liveNodes, coll) -> { - ReindexCollectionCmd.State state = ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); - return ReindexCollectionCmd.State.FINISHED == state; - }); + CloudUtil.waitForState( + cloudManager, + "did not finish copying in time", + targetCollection, + (liveNodes, coll) -> { + ReindexCollectionCmd.State state = + ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); + return ReindexCollectionCmd.State.FINISHED == state; + }); // verify the target docs exist QueryResponse rsp = solrClient.query(targetCollection, params(CommonParams.Q, "*:*")); assertEquals("copied num docs", NUM_DOCS, rsp.getResults().getNumFound()); for (SolrDocument doc : rsp.getResults()) { - String id = (String)doc.getFieldValue("id"); + String id = (String) doc.getFieldValue("id"); assertEquals(id, doc.getFieldValue("string_s")); assertFalse(doc.containsKey("sind")); // lost in translation ... } @@ -238,26 +266,35 @@ public void testReshapeReindexing() throws Exception { final String sourceCollection = "reshapeReindexing"; final String targetCollection = "reshapeReindexingTarget"; createCollection(sourceCollection, "conf1", 2, 2); - indexDocs(sourceCollection, NUM_DOCS, - i -> new SolrInputDocument( - "id", String.valueOf(i), - "string_s", String.valueOf(i), - "remove_s", String.valueOf(i))); - - CollectionAdminRequest.ReindexCollection req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(targetCollection) - .setCollectionParam(ZkStateReader.NUM_SHARDS_PROP, 3) - .setCollectionParam(ZkStateReader.REPLICATION_FACTOR, 1) - .setCollectionParam("router.name", ImplicitDocRouter.NAME) - .setCollectionParam("shards", "foo,bar,baz") - .setCollectionParam("fl", "id,string_s") - .setCollectionParam("q", "id:10*"); + indexDocs( + sourceCollection, + NUM_DOCS, + i -> + new SolrInputDocument( + "id", String.valueOf(i), + "string_s", String.valueOf(i), + "remove_s", String.valueOf(i))); + + CollectionAdminRequest.ReindexCollection req = + CollectionAdminRequest.reindexCollection(sourceCollection) + .setTarget(targetCollection) + .setCollectionParam(ZkStateReader.NUM_SHARDS_PROP, 3) + .setCollectionParam(ZkStateReader.REPLICATION_FACTOR, 1) + .setCollectionParam("router.name", ImplicitDocRouter.NAME) + .setCollectionParam("shards", "foo,bar,baz") + .setCollectionParam("fl", "id,string_s") + .setCollectionParam("q", "id:10*"); req.process(solrClient); - CloudUtil.waitForState(cloudManager, "did not finish copying in time", targetCollection, (liveNodes, coll) -> { - ReindexCollectionCmd.State state = ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); - return ReindexCollectionCmd.State.FINISHED == state; - }); + CloudUtil.waitForState( + cloudManager, + "did not finish copying in time", + targetCollection, + (liveNodes, coll) -> { + ReindexCollectionCmd.State state = + ReindexCollectionCmd.State.get(coll.getStr(ReindexCollectionCmd.REINDEXING_STATE)); + return ReindexCollectionCmd.State.FINISHED == state; + }); // verify the target docs exist QueryResponse rsp = solrClient.query(targetCollection, params(CommonParams.Q, "*:*")); @@ -272,7 +309,8 @@ public void testReshapeReindexing() throws Exception { // check the shape of the new collection ClusterState clusterState = solrClient.getClusterStateProvider().getClusterState(); - List aliases = solrClient.getZkStateReader().getAliases().resolveAliases(targetCollection); + List aliases = + solrClient.getZkStateReader().getAliases().resolveAliases(targetCollection); assertFalse(aliases.isEmpty()); String realTargetCollection = aliases.get(0); DocCollection coll = clusterState.getCollection(realTargetCollection); @@ -293,45 +331,60 @@ public void testFailure() throws Exception { createCollection(sourceCollection, "conf1", 2, 2); createCollection(targetCollection, "conf1", 1, 1); CollectionAdminRequest.createAlias(aliasTarget, targetCollection).process(solrClient); - indexDocs(sourceCollection, NUM_DOCS, - i -> new SolrInputDocument( - "id", String.valueOf(i), - "string_s", String.valueOf(i))); - - CollectionAdminRequest.ReindexCollection req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(targetCollection); + indexDocs( + sourceCollection, + NUM_DOCS, + i -> + new SolrInputDocument( + "id", String.valueOf(i), + "string_s", String.valueOf(i))); + + CollectionAdminRequest.ReindexCollection req = + CollectionAdminRequest.reindexCollection(sourceCollection).setTarget(targetCollection); CollectionAdminResponse rsp = req.process(solrClient); assertNotNull(rsp.getResponse().get("error")); - assertTrue(rsp.toString(), rsp.getResponse().get("error").toString().contains("already exists")); + assertTrue( + rsp.toString(), rsp.getResponse().get("error").toString().contains("already exists")); - req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(aliasTarget); + req = CollectionAdminRequest.reindexCollection(sourceCollection).setTarget(aliasTarget); rsp = req.process(solrClient); assertNotNull(rsp.getResponse().get("error")); - assertTrue(rsp.toString(), rsp.getResponse().get("error").toString().contains("already exists")); + assertTrue( + rsp.toString(), rsp.getResponse().get("error").toString().contains("already exists")); CollectionAdminRequest.deleteAlias(aliasTarget).process(solrClient); CollectionAdminRequest.deleteCollection(targetCollection).process(solrClient); - req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(targetCollection); + req = CollectionAdminRequest.reindexCollection(sourceCollection).setTarget(targetCollection); TestInjection.reindexFailure = "true:100"; rsp = req.process(solrClient); assertNotNull(rsp.getResponse().get("error")); - assertTrue(rsp.toString(), rsp.getResponse().get("error").toString().contains("waiting for daemon")); + assertTrue( + rsp.toString(), rsp.getResponse().get("error").toString().contains("waiting for daemon")); // verify that the target and checkpoint collections don't exist - cloudManager.getClusterStateProvider().getClusterState().forEachCollection(coll -> { - assertFalse(coll.getName() + " still exists", coll.getName().startsWith(ReindexCollectionCmd.TARGET_COL_PREFIX)); - assertFalse(coll.getName() + " still exists", coll.getName().startsWith(ReindexCollectionCmd.CHK_COL_PREFIX)); - }); + cloudManager + .getClusterStateProvider() + .getClusterState() + .forEachCollection( + coll -> { + assertFalse( + coll.getName() + " still exists", + coll.getName().startsWith(ReindexCollectionCmd.TARGET_COL_PREFIX)); + assertFalse( + coll.getName() + " still exists", + coll.getName().startsWith(ReindexCollectionCmd.CHK_COL_PREFIX)); + }); // verify that the source collection is read-write and has no reindexing flags - CloudUtil.waitForState(cloudManager, "collection state is incorrect", sourceCollection, + CloudUtil.waitForState( + cloudManager, + "collection state is incorrect", + sourceCollection, ((liveNodes, collectionState) -> - !collectionState.isReadOnly() && - collectionState.getStr(ReindexCollectionCmd.REINDEXING_STATE) == null && - getState(sourceCollection) == null)); + !collectionState.isReadOnly() + && collectionState.getStr(ReindexCollectionCmd.REINDEXING_STATE) == null + && getState(sourceCollection) == null)); } @Test @@ -342,50 +395,64 @@ public void testAbort() throws Exception { createCollection(sourceCollection, "conf1", 2, 1); TestInjection.reindexLatch = new CountDownLatch(1); - CollectionAdminRequest.ReindexCollection req = CollectionAdminRequest.reindexCollection(sourceCollection) - .setTarget(targetCollection); + CollectionAdminRequest.ReindexCollection req = + CollectionAdminRequest.reindexCollection(sourceCollection).setTarget(targetCollection); String asyncId = req.processAsync(solrClient); // wait for the source collection to be put in readOnly mode - CloudUtil.waitForState(cloudManager, "source collection didn't become readOnly", - sourceCollection, (liveNodes, coll) -> coll.isReadOnly()); + CloudUtil.waitForState( + cloudManager, + "source collection didn't become readOnly", + sourceCollection, + (liveNodes, coll) -> coll.isReadOnly()); req = CollectionAdminRequest.reindexCollection(sourceCollection); req.setCommand("abort"); CollectionAdminResponse rsp = req.process(solrClient); - Map status = (Map)rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); + Map status = + (Map) rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); assertNotNull(rsp.toString(), status); assertEquals(status.toString(), "aborting", status.get("state")); - CloudUtil.waitForState(cloudManager, "incorrect collection state", sourceCollection, + CloudUtil.waitForState( + cloudManager, + "incorrect collection state", + sourceCollection, ((liveNodes, collectionState) -> - collectionState.isReadOnly() && - getState(sourceCollection) == ReindexCollectionCmd.State.ABORTED)); + collectionState.isReadOnly() + && getState(sourceCollection) == ReindexCollectionCmd.State.ABORTED)); // verify status req.setCommand("status"); rsp = req.process(solrClient); - status = (Map)rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); + status = (Map) rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); assertNotNull(rsp.toString(), status); assertEquals(status.toString(), "aborted", status.get("state")); // let the process continue TestInjection.reindexLatch.countDown(); - CloudUtil.waitForState(cloudManager, "source collection is in wrong state", - sourceCollection, (liveNodes, docCollection) -> !docCollection.isReadOnly() && getState(sourceCollection) == null); + CloudUtil.waitForState( + cloudManager, + "source collection is in wrong state", + sourceCollection, + (liveNodes, docCollection) -> + !docCollection.isReadOnly() && getState(sourceCollection) == null); // verify the response rsp = CollectionAdminRequest.requestStatus(asyncId).process(solrClient); - status = (Map)rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); + status = (Map) rsp.getResponse().get(ReindexCollectionCmd.REINDEX_STATUS); assertNotNull(rsp.toString(), status); assertEquals(status.toString(), "aborted", status.get("state")); } - private void createCollection(String name, String config, int numShards, int numReplicas) throws Exception { + private void createCollection(String name, String config, int numShards, int numReplicas) + throws Exception { CollectionAdminRequest.createCollection(name, config, numShards, numReplicas) .process(solrClient); cluster.waitForActiveCollection(name, numShards, numShards * numReplicas); } - private void indexDocs(String collection, int numDocs, Function generator) throws Exception { + private void indexDocs( + String collection, int numDocs, Function generator) + throws Exception { List docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { docs.add(generator.apply(i)); @@ -396,6 +463,5 @@ private void indexDocs(String collection, int numDocs, Function { - client.add("collection", new SolrInputDocument()); - }); - assertThat(e.getMessage(), containsString("Document is missing mandatory uniqueKey field: id")); + SolrException e = + expectThrows( + SolrException.class, + () -> { + client.add("collection", new SolrInputDocument()); + }); + assertThat( + e.getMessage(), containsString("Document is missing mandatory uniqueKey field: id")); } } - } } diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java index 9202c0840d2..46272bbeb05 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplaceNodeTest.java @@ -17,7 +17,7 @@ package org.apache.solr.cloud; - +import com.codahale.metrics.Metric; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; @@ -26,8 +26,6 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import com.codahale.metrics.Metric; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -53,6 +51,7 @@ public class ReplaceNodeTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @BeforeClass public static void setupCluster() throws Exception { System.setProperty("metricsEnabled", "true"); @@ -71,7 +70,8 @@ protected String getSolrXml() { @Test public void test() throws Exception { configureCluster(6) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) .configure(); String coll = "replacenodetest_coll"; if (log.isInfoEnabled()) { @@ -85,30 +85,42 @@ public void test() throws Exception { String emptyNode = l.remove(0); String node2bdecommissioned = l.get(0); CollectionAdminRequest.Create create; - // NOTE: always using the createCollection that takes in 'int' for all types of replicas, so we never - // have to worry about null checking when comparing the Create command with the final Slices + // NOTE: always using the createCollection that takes in 'int' for all types of replicas, so we + // never have to worry about null checking when comparing the Create command with the final + // Slices - // TODO: tlog replicas do not work correctly in tests due to fault TestInjection#waitForInSyncWithLeader - create = pickRandom( - CollectionAdminRequest.createCollection(coll, "conf1", 5, 2,0,0), - //CollectionAdminRequest.createCollection(coll, "conf1", 5, 1,1,0), - //CollectionAdminRequest.createCollection(coll, "conf1", 5, 0,1,1), - //CollectionAdminRequest.createCollection(coll, "conf1", 5, 1,0,1), - //CollectionAdminRequest.createCollection(coll, "conf1", 5, 0,2,0), - // check also replicationFactor 1 - CollectionAdminRequest.createCollection(coll, "conf1", 5, 1,0,0) - //CollectionAdminRequest.createCollection(coll, "conf1", 5, 0,1,0) - ); + // TODO: tlog replicas do not work correctly in tests due to fault + // TestInjection#waitForInSyncWithLeader + create = + pickRandom( + CollectionAdminRequest.createCollection(coll, "conf1", 5, 2, 0, 0), + // CollectionAdminRequest.createCollection(coll, "conf1", 5, 1,1,0), + // CollectionAdminRequest.createCollection(coll, "conf1", 5, 0,1,1), + // CollectionAdminRequest.createCollection(coll, "conf1", 5, 1,0,1), + // CollectionAdminRequest.createCollection(coll, "conf1", 5, 0,2,0), + // check also replicationFactor 1 + CollectionAdminRequest.createCollection(coll, "conf1", 5, 1, 0, 0) + // CollectionAdminRequest.createCollection(coll, "conf1", 5, 0,1,0) + ); create.setCreateNodeSet(StrUtils.join(l, ',')); cloudClient.request(create); - cluster.waitForActiveCollection(coll, 5, 5 * (create.getNumNrtReplicas() + create.getNumPullReplicas() + create.getNumTlogReplicas())); + cluster.waitForActiveCollection( + coll, + 5, + 5 + * (create.getNumNrtReplicas() + + create.getNumPullReplicas() + + create.getNumTlogReplicas())); DocCollection collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); log.debug("### Before decommission: {}", collection); log.info("excluded_node : {} ", emptyNode); - createReplaceNodeRequest(node2bdecommissioned, emptyNode, null).processAndWait("000", cloudClient, 15); - try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(node2bdecommissioned))) { + createReplaceNodeRequest(node2bdecommissioned, emptyNode, null) + .processAndWait("000", cloudClient, 15); + try (HttpSolrClient coreclient = + getHttpSolrClient( + cloudClient.getZkStateReader().getBaseUrlForNodeName(node2bdecommissioned))) { CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient); assertEquals(0, status.getCoreStatus().size()); } @@ -123,22 +135,33 @@ public void test() throws Exception { } log.debug("### Existing replicas on decommissioned node: {}", replicas); - //let's do it back - this time wait for recoveries - CollectionAdminRequest.AsyncCollectionAdminRequest replaceNodeRequest = createReplaceNodeRequest(emptyNode, node2bdecommissioned, Boolean.TRUE); + // let's do it back - this time wait for recoveries + CollectionAdminRequest.AsyncCollectionAdminRequest replaceNodeRequest = + createReplaceNodeRequest(emptyNode, node2bdecommissioned, Boolean.TRUE); replaceNodeRequest.setWaitForFinalState(true); replaceNodeRequest.processAndWait("001", cloudClient, 10); - try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(emptyNode))) { + try (HttpSolrClient coreclient = + getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(emptyNode))) { CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient); - assertEquals("Expecting no cores but found some: " + status.getCoreStatus(), 0, status.getCoreStatus().size()); + assertEquals( + "Expecting no cores but found some: " + status.getCoreStatus(), + 0, + status.getCoreStatus().size()); } collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); assertEquals(create.getNumShards().intValue(), collection.getSlices().size()); - for (Slice s:collection.getSlices()) { - assertEquals(create.getNumNrtReplicas().intValue(), s.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); - assertEquals(create.getNumTlogReplicas().intValue(), s.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); - assertEquals(create.getNumPullReplicas().intValue(), s.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); + for (Slice s : collection.getSlices()) { + assertEquals( + create.getNumNrtReplicas().intValue(), + s.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); + assertEquals( + create.getNumTlogReplicas().intValue(), + s.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); + assertEquals( + create.getNumPullReplicas().intValue(), + s.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); } // make sure all newly created replicas on node are active List newReplicas = collection.getReplicas(node2bdecommissioned); @@ -171,28 +194,35 @@ public void test() throws Exception { if (!metrics.containsKey("REPLICATION./replication.fetcher")) { continue; } - MetricsMap fetcherGauge = (MetricsMap) ((SolrMetricManager.GaugeWrapper) metrics.get("REPLICATION./replication.fetcher")).getGauge(); + MetricsMap fetcherGauge = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) metrics.get("REPLICATION./replication.fetcher")) + .getGauge(); assertNotNull("no IndexFetcher gauge in metrics", fetcherGauge); Map value = fetcherGauge.getValue(); if (value.isEmpty()) { continue; } assertNotNull("isReplicating missing: " + value, value.get("isReplicating")); - assertTrue("isReplicating should be a boolean: " + value, value.get("isReplicating") instanceof Boolean); + assertTrue( + "isReplicating should be a boolean: " + value, + value.get("isReplicating") instanceof Boolean); if (value.get("indexReplicatedAt") == null) { continue; } assertNotNull("timesIndexReplicated missing: " + value, value.get("timesIndexReplicated")); - assertTrue("timesIndexReplicated should be a number: " + value, value.get("timesIndexReplicated") instanceof Number); + assertTrue( + "timesIndexReplicated should be a number: " + value, + value.get("timesIndexReplicated") instanceof Number); } - } @Test public void testGoodSpreadDuringAssignWithNoTarget() throws Exception { configureCluster(5) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) - .configure(); + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .configure(); String coll = "replacenodetest_coll"; if (log.isInfoEnabled()) { log.info("total_jettys: {}", cluster.getJettySolrRunners().size()); @@ -206,18 +236,31 @@ public void testGoodSpreadDuringAssignWithNoTarget() throws Exception { l = l.subList(2, l.size()); String node2bdecommissioned = l.get(0); - // TODO: tlog replicas do not work correctly in tests due to fault TestInjection#waitForInSyncWithLeader - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 4, 3,0,0); + // TODO: tlog replicas do not work correctly in tests due to fault + // TestInjection#waitForInSyncWithLeader + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(coll, "conf1", 4, 3, 0, 0); create.setCreateNodeSet(StrUtils.join(l, ',')); cloudClient.request(create); - cluster.waitForActiveCollection(coll, 4, 4 * (create.getNumNrtReplicas() + create.getNumPullReplicas() + create.getNumTlogReplicas())); + cluster.waitForActiveCollection( + coll, + 4, + 4 + * (create.getNumNrtReplicas() + + create.getNumPullReplicas() + + create.getNumTlogReplicas())); - DocCollection initialCollection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); + DocCollection initialCollection = + cloudClient.getZkStateReader().getClusterState().getCollection(coll); log.debug("### Before decommission: {}", initialCollection); log.info("excluded_nodes : {} ", emptyNodes); - List initialReplicaCounts = l.stream().map(node -> initialCollection.getReplicas(node).size()).collect(Collectors.toList()); - createReplaceNodeRequest(node2bdecommissioned, null, true).processAndWait("000", cloudClient, 15); + List initialReplicaCounts = + l.stream() + .map(node -> initialCollection.getReplicas(node).size()) + .collect(Collectors.toList()); + createReplaceNodeRequest(node2bdecommissioned, null, true) + .processAndWait("000", cloudClient, 15); DocCollection collection = cloudClient.getZkStateReader().getClusterState().getCollection(coll); log.debug("### After decommission: {}", collection); @@ -226,20 +269,31 @@ public void testGoodSpreadDuringAssignWithNoTarget() throws Exception { if (replicas == null) { replicas = Collections.emptyList(); } - assertEquals("There should be no more replicas on the sourceNode after a replaceNode request.", Collections.emptyList(), replicas); + assertEquals( + "There should be no more replicas on the sourceNode after a replaceNode request.", + Collections.emptyList(), + replicas); int sizeA = collection.getReplicas(emptyNodes.get(0)).size(); int sizeB = collection.getReplicas(emptyNodes.get(1)).size(); - assertEquals("The empty nodes should have a similar number of replicas placed on each", sizeA, sizeB, 1); - assertEquals("The number of replicas on the two empty nodes should equal the number of replicas removed from the source node", initialReplicaCounts.get(0).intValue(), sizeA + sizeB); + assertEquals( + "The empty nodes should have a similar number of replicas placed on each", sizeA, sizeB, 1); + assertEquals( + "The number of replicas on the two empty nodes should equal the number of replicas removed from the source node", + initialReplicaCounts.get(0).intValue(), + sizeA + sizeB); for (int i = 1; i < l.size(); i++) { - assertEquals("The number of replicas on non-empty and non-source nodes should not change", initialReplicaCounts.get(i).intValue(), collection.getReplicas(l.get(i)).size()); + assertEquals( + "The number of replicas on non-empty and non-source nodes should not change", + initialReplicaCounts.get(i).intValue(), + collection.getReplicas(l.get(i)).size()); } } @Test public void testFailOnSingleNode() throws Exception { configureCluster(1) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) .configure(); String coll = "replacesinglenodetest_coll"; if (log.isInfoEnabled()) { @@ -247,21 +301,26 @@ public void testFailOnSingleNode() throws Exception { } CloudSolrClient cloudClient = cluster.getSolrClient(); - cloudClient.request(CollectionAdminRequest.createCollection(coll, "conf1", 5, 1,0,0)); + cloudClient.request(CollectionAdminRequest.createCollection(coll, "conf1", 5, 1, 0, 0)); cluster.waitForActiveCollection(coll, 5, 5); - String liveNode = cloudClient.getZkStateReader().getClusterState().getLiveNodes().iterator().next(); - expectThrows(SolrException.class, () -> createReplaceNodeRequest(liveNode, null, null).process(cloudClient)); + String liveNode = + cloudClient.getZkStateReader().getClusterState().getLiveNodes().iterator().next(); + expectThrows( + SolrException.class, + () -> createReplaceNodeRequest(liveNode, null, null).process(cloudClient)); } - public static CollectionAdminRequest.AsyncCollectionAdminRequest createReplaceNodeRequest(String sourceNode, String targetNode, Boolean parallel) { + public static CollectionAdminRequest.AsyncCollectionAdminRequest createReplaceNodeRequest( + String sourceNode, String targetNode, Boolean parallel) { if (random().nextBoolean()) { return new CollectionAdminRequest.ReplaceNode(sourceNode, targetNode).setParallel(parallel); - } else { + } else { // test back compat with old param names // todo remove in solr 8.0 - return new CollectionAdminRequest.AsyncCollectionAdminRequest(CollectionParams.CollectionAction.REPLACENODE) { + return new CollectionAdminRequest.AsyncCollectionAdminRequest( + CollectionParams.CollectionAction.REPLACENODE) { @Override public SolrParams getParams() { ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java index 056a94963c9..c23fb2b7815 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java @@ -26,7 +26,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.apache.commons.lang3.StringUtils; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; @@ -47,14 +46,13 @@ import org.slf4j.LoggerFactory; /** - * Tests a client application's ability to get replication factor - * information back from the cluster after an add or update. + * Tests a client application's ability to get replication factor information back from the cluster + * after an add or update. */ @Slow @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") -// 12-Jun-2018 @LuceneTestCase.BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-6944") public class ReplicationFactorTest extends AbstractFullDistribZkTestBase { - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public ReplicationFactorTest() { @@ -62,23 +60,27 @@ public ReplicationFactorTest() { sliceCount = 3; fixShardCount(3); } - + /** - * Overrides the parent implementation so that we can configure a socket proxy - * to sit infront of each Jetty server, which gives us the ability to simulate - * network partitions without having to fuss with IPTables (which is not very - * cross platform friendly). + * Overrides the parent implementation so that we can configure a socket proxy to sit infront of + * each Jetty server, which gives us the ability to simulate network partitions without having to + * fuss with IPTables (which is not very cross platform friendly). */ @Override - public JettySolrRunner createJetty(File solrHome, String dataDir, - String shardList, String solrConfigOverride, String schemaOverride, Replica.Type replicaType) + public JettySolrRunner createJetty( + File solrHome, + String dataDir, + String shardList, + String solrConfigOverride, + String schemaOverride, + Replica.Type replicaType) throws Exception { - return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); + return createProxiedJetty( + solrHome, dataDir, shardList, solrConfigOverride, schemaOverride, replicaType); } - + @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018 public void test() throws Exception { log.info("replication factor test running"); waitForThingsToLevelOut(30, TimeUnit.SECONDS); @@ -92,14 +94,15 @@ public void test() throws Exception { // test handling when not using direct updates log.info("Now testing replication factor handling for repfacttest_c8n_2x2"); testRf2NotUsingDirectUpdates(); - + waitForThingsToLevelOut(30, TimeUnit.SECONDS); if (log.isInfoEnabled()) { - log.info("replication factor testing complete! final clusterState is: {}", + log.info( + "replication factor testing complete! final clusterState is: {}", cloudClient.getZkStateReader().getClusterState()); } } - + protected void testRf2NotUsingDirectUpdates() throws Exception { int numShards = 2; int replicationFactor = 2; @@ -109,20 +112,21 @@ protected void testRf2NotUsingDirectUpdates() throws Exception { createCollectionWithRetry(testCollectionName, "conf1", numShards, replicationFactor); cloudClient.setDefaultCollection(testCollectionName); - - List replicas = + + List replicas = ensureAllReplicasAreActive(testCollectionName, shardId, numShards, replicationFactor, 30); - assertTrue("Expected active 1 replicas for "+testCollectionName, replicas.size() == 1); - + assertTrue("Expected active 1 replicas for " + testCollectionName, replicas.size() == 1); + List batch = new ArrayList(10); - for (int i=0; i < 15; i++) { + for (int i = 0; i < 15; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, String.valueOf(i)); doc.addField("a_t", "hello" + i); batch.add(doc); } - // send directly to the leader using HttpSolrServer instead of CloudSolrServer (to test support for non-direct updates) + // send directly to the leader using HttpSolrServer instead of CloudSolrServer (to test support + // for non-direct updates) UpdateRequest up = new UpdateRequest(); up.add(batch); @@ -132,20 +136,20 @@ protected void testRf2NotUsingDirectUpdates() throws Exception { sendNonDirectUpdateRequestReplicaWithRetry(replicas.get(0), up, 2, testCollectionName); // Insure nothing is tricky about a delete where only one shard needs to delete anything. - sendNonDirectDeletesRequestReplicaWithRetry(leader, getSomeIds(1), 2, - getSomeIds(1), 2, testCollectionName); - sendNonDirectDeletesRequestReplicaWithRetry(replicas.get(0), getSomeIds(1), 2, - getSomeIds(1), 2, testCollectionName); + sendNonDirectDeletesRequestReplicaWithRetry( + leader, getSomeIds(1), 2, getSomeIds(1), 2, testCollectionName); + sendNonDirectDeletesRequestReplicaWithRetry( + replicas.get(0), getSomeIds(1), 2, getSomeIds(1), 2, testCollectionName); - sendNonDirectDeletesRequestReplicaWithRetry(leader, getSomeIds(2), 2, - getSomeIds(2), 2, testCollectionName); - sendNonDirectDeletesRequestReplicaWithRetry(replicas.get(0), getSomeIds(2), 2, - getSomeIds(2), 2, testCollectionName); + sendNonDirectDeletesRequestReplicaWithRetry( + leader, getSomeIds(2), 2, getSomeIds(2), 2, testCollectionName); + sendNonDirectDeletesRequestReplicaWithRetry( + replicas.get(0), getSomeIds(2), 2, getSomeIds(2), 2, testCollectionName); // so now kill the replica of shard2 and verify the achieved rf is only 1 List shard2Replicas = ensureAllReplicasAreActive(testCollectionName, "shard2", numShards, replicationFactor, 30); - assertTrue("Expected active 1 replicas for "+testCollectionName, replicas.size() == 1); + assertTrue("Expected active 1 replicas for " + testCollectionName, replicas.size() == 1); getProxyForReplica(shard2Replicas.get(0)).close(); @@ -155,43 +159,52 @@ protected void testRf2NotUsingDirectUpdates() throws Exception { sendNonDirectUpdateRequestReplicaWithRetry(leader, up, 1, testCollectionName); sendNonDirectUpdateRequestReplicaWithRetry(replicas.get(0), up, 1, testCollectionName); - // Whether the replication factor is 1 or 2 in the delete-by-id case depends on whether the doc IDs happen to fall - // on a single shard or not. + // Whether the replication factor is 1 or 2 in the delete-by-id case depends on whether the doc + // IDs happen to fall on a single shard or not. Set byIDs; byIDs = getSomeIds(2); - sendNonDirectDeletesRequestReplicaWithRetry(leader, - byIDs, calcByIdRf(byIDs, testCollectionName, "shard2"), - getSomeIds(2), 1, testCollectionName); + sendNonDirectDeletesRequestReplicaWithRetry( + leader, + byIDs, + calcByIdRf(byIDs, testCollectionName, "shard2"), + getSomeIds(2), + 1, + testCollectionName); byIDs = getSomeIds(2); - sendNonDirectDeletesRequestReplicaWithRetry(replicas.get(0), byIDs, + sendNonDirectDeletesRequestReplicaWithRetry( + replicas.get(0), + byIDs, calcByIdRf(byIDs, testCollectionName, "shard2"), - getSomeIds(2), 1, testCollectionName); + getSomeIds(2), + 1, + testCollectionName); // heal the partition getProxyForReplica(shard2Replicas.get(0)).reopen(); Thread.sleep(2000); } - // When doing a delete by id, it's tricky, very tricky. If any document we're deleting by ID goes to shardWithOne, - // then the replication factor we return will be 1. - // + // When doing a delete by id, it's tricky, very tricky. If any document we're deleting by ID goes + // to shardWithOne, then the replication factor we return will be 1. private int calcByIdRf(Set byIDs, String testCollectionName, String shardWithOne) { ZkController zkController = jettys.get(0).getCoreContainer().getZkController(); DocCollection coll = zkController.getClusterState().getCollection(testCollectionName); int retval = 2; for (int id : byIDs) { DocRouter router = coll.getRouter(); - if (shardWithOne.equals(router.getTargetSlice(Integer.toString(id), null, null, null, coll).getName())) { + if (shardWithOne.equals( + router.getTargetSlice(Integer.toString(id), null, null, null, coll).getName())) { retval = 1; } } return retval; } - int idFloor = random().nextInt(100) + 1000; // Get the delete tests to use disjoint documents although + // Get the delete tests to use disjoint documents although + int idFloor = random().nextInt(100) + 1000; - // Randomize documents so we exercise requests landing on replicas that have (or don't) particular documents - // Yeah, this will go on forever if you ask for more than 100, but it suffices. + // Randomize documents so we exercise requests landing on replicas that have (or don't) particular + // documents. Yeah, this will go on forever if you ask for more than 100, but it suffices. private Set getSomeIds(int count) { Set ids = new HashSet<>(); while (ids.size() < count) { @@ -201,11 +214,14 @@ private Set getSomeIds(int count) { return ids; } - - protected void sendNonDirectDeletesRequestReplicaWithRetry(Replica rep, - Set byIdsSet, int expectedRfByIds, - Set byQueriesSet, int expectedRfDBQ, - String coll) throws Exception { + protected void sendNonDirectDeletesRequestReplicaWithRetry( + Replica rep, + Set byIdsSet, + int expectedRfByIds, + Set byQueriesSet, + int expectedRfDBQ, + String coll) + throws Exception { // First add the docs indicated List byIdsList = new ArrayList<>(); List byQueryList = new ArrayList<>(); @@ -233,14 +249,14 @@ protected void sendNonDirectDeletesRequestReplicaWithRetry(Replica rep, req.deleteById(byIdsList); sendNonDirectUpdateRequestReplicaWithRetry(rep, req, expectedRfByIds, coll); - //Delete the docs by query indicated. + // Delete the docs by query indicated. req = new UpdateRequest(); req.deleteByQuery("id:(" + StringUtils.join(byQueriesSet, " OR ") + ")"); sendNonDirectUpdateRequestReplicaWithRetry(rep, req, expectedRfDBQ, coll); - } - protected void sendNonDirectUpdateRequestReplicaWithRetry(Replica replica, UpdateRequest up, int expectedRf, String collection) throws Exception { + protected void sendNonDirectUpdateRequestReplicaWithRetry( + Replica replica, UpdateRequest up, int expectedRf, String collection) throws Exception { try { sendNonDirectUpdateRequestReplica(replica, up, expectedRf, collection); Thread.sleep(100); // Let the system settle down before retrying @@ -248,17 +264,25 @@ protected void sendNonDirectUpdateRequestReplicaWithRetry(Replica replica, Updat sendNonDirectUpdateRequestReplica(replica, up, expectedRf, collection); } } - - protected void sendNonDirectUpdateRequestReplica(Replica replica, UpdateRequest up, int expectedRf, String collection) throws Exception { + + protected void sendNonDirectUpdateRequestReplica( + Replica replica, UpdateRequest up, int expectedRf, String collection) throws Exception { ZkCoreNodeProps zkProps = new ZkCoreNodeProps(replica); String url = zkProps.getBaseUrl() + "/" + collection; try (HttpSolrClient solrServer = getHttpSolrClient(url)) { NamedList resp = solrServer.request(up); NamedList hdr = (NamedList) resp.get("responseHeader"); - Integer batchRf = (Integer)hdr.get(UpdateRequest.REPFACT); - // Note that this also tests if we're wonky and return an achieved rf greater than the number of live replicas. - assertTrue("Expected rf="+expectedRf+" for batch but got "+ - batchRf + "; clusterState: " + printClusterStateInfo(), batchRf == expectedRf); + Integer batchRf = (Integer) hdr.get(UpdateRequest.REPFACT); + // Note that this also tests if we're wonky and return an achieved rf greater than the number + // of live replicas. + assertTrue( + "Expected rf=" + + expectedRf + + " for batch but got " + + batchRf + + "; clusterState: " + + printClusterStateInfo(), + batchRf == expectedRf); } } @@ -271,11 +295,11 @@ protected void testRf3() throws Exception { createCollectionWithRetry(testCollectionName, "conf1", numShards, replicationFactor); cloudClient.setDefaultCollection(testCollectionName); - - List replicas = + + List replicas = ensureAllReplicasAreActive(testCollectionName, shardId, numShards, replicationFactor, 30); - assertTrue("Expected 2 active replicas for "+testCollectionName, replicas.size() == 2); - + assertTrue("Expected 2 active replicas for " + testCollectionName, replicas.size() == 2); + log.info("Indexing docId=1"); int rf = sendDoc(1); assertRf(3, "all replicas should be active", rf); @@ -286,7 +310,7 @@ protected void testRf3() throws Exception { log.info("Closing one proxy port"); getProxyForReplica(replicas.get(0)).close(); - + log.info("Indexing docId=2"); rf = sendDoc(2); assertRf(2, "one replica should be down", rf); @@ -295,10 +319,9 @@ protected void testRf3() throws Exception { doDBQWithRetry(2, 5, "deletes should have propagated to 2 replicas", 1); doDBIdWithRetry(2, 5, "deletes should have propagated to 2 replicas", 1); - // SOLR-13599 sanity check if problem is related to sending a batch List batch = new ArrayList(10); - for (int i=30; i < 45; i++) { + for (int i = 30; i < 45; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, String.valueOf(i)); doc.addField("a_t", "hello" + i); @@ -307,9 +330,9 @@ protected void testRf3() throws Exception { log.info("Indexing batch of documents (30-45)"); int batchRf = sendDocsWithRetry(batch, minRf, 5, 1); assertRf(2, "batch should have succeded, only one replica should be down", batchRf); - + log.info("Closing second proxy port"); - getProxyForReplica(replicas.get(1)).close(); + getProxyForReplica(replicas.get(1)).close(); log.info("Indexing docId=3"); rf = sendDoc(3); @@ -320,13 +343,13 @@ protected void testRf3() throws Exception { // heal the partitions log.info("Re-opening closed proxy ports"); - getProxyForReplica(replicas.get(0)).reopen(); + getProxyForReplica(replicas.get(0)).reopen(); getProxyForReplica(replicas.get(1)).reopen(); - + Thread.sleep(2000); // give time for the healed partition to get propagated - + ensureAllReplicasAreActive(testCollectionName, shardId, numShards, replicationFactor, 30); - + log.info("Indexing docId=4"); rf = sendDoc(4); assertRf(3, "all replicas have been healed", rf); @@ -336,13 +359,13 @@ protected void testRf3() throws Exception { // now send a batch batch = new ArrayList(10); - for (int i=5; i < 15; i++) { + for (int i = 5; i < 15; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, String.valueOf(i)); doc.addField("a_t", "hello" + i); batch.add(doc); } - + log.info("Indexing batch of documents (5-14)"); batchRf = sendDocsWithRetry(batch, minRf, 5, 1); assertRf(3, "batch add should have succeeded on all replicas", batchRf); @@ -359,10 +382,10 @@ protected void testRf3() throws Exception { log.info("Indexing docId=5"); rf = sendDoc(5); assertRf(2, "doc should have succeded, only one replica should be down", rf); - + // now send a batch (again) batch = new ArrayList(10); - for (int i=15; i < 30; i++) { + for (int i = 15; i < 30; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, String.valueOf(i)); doc.addField("a_t", "hello" + i); @@ -378,9 +401,9 @@ protected void testRf3() throws Exception { // close the 2nd replica, and send a 3rd batch with expected achieved rf=1 log.info("Closing second proxy port (again)"); getProxyForReplica(replicas.get(1)).close(); - + batch = new ArrayList(10); - for (int i=30; i < 45; i++) { + for (int i = 30; i < 45; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField(id, String.valueOf(i)); doc.addField("a_t", "hello" + i); @@ -388,15 +411,18 @@ protected void testRf3() throws Exception { } batchRf = sendDocsWithRetry(batch, minRf, 5, 1); - assertRf(1, "batch should have succeeded on the leader only (both replicas should be down)", batchRf); + assertRf( + 1, + "batch should have succeeded on the leader only (both replicas should be down)", + batchRf); doDBQWithRetry(1, 5, "deletes should have propagated to only 1 replica", 15); doDBIdWithRetry(1, 5, "deletes should have propagated to only 1 replica", 15); - getProxyForReplica(replicas.get(0)).reopen(); + getProxyForReplica(replicas.get(0)).reopen(); getProxyForReplica(replicas.get(1)).reopen(); - Thread.sleep(2000); + Thread.sleep(2000); ensureAllReplicasAreActive(testCollectionName, shardId, numShards, replicationFactor, 30); } @@ -417,7 +443,8 @@ protected void addDocs(Set docIds, int expectedRf, int retries) throws sendDocsWithRetry(batch, expectedRf, retries, 1); } - protected void doDBQWithRetry(int expectedRf, int retries, String msg, int docsToAdd) throws Exception { + protected void doDBQWithRetry(int expectedRf, int retries, String msg, int docsToAdd) + throws Exception { Set docIds = getSomeIds(docsToAdd); addDocs(docIds, expectedRf, retries); UpdateRequest req = new UpdateRequest(); @@ -425,7 +452,8 @@ protected void doDBQWithRetry(int expectedRf, int retries, String msg, int docsT doDelete(req, msg, expectedRf, retries); } - protected void doDBIdWithRetry(int expectedRf, int retries, String msg, int docsToAdd) throws Exception { + protected void doDBIdWithRetry(int expectedRf, int retries, String msg, int docsToAdd) + throws Exception { Set docIds = getSomeIds(docsToAdd); addDocs(docIds, expectedRf, retries); UpdateRequest req = new UpdateRequest(); @@ -433,11 +461,13 @@ protected void doDBIdWithRetry(int expectedRf, int retries, String msg, int docs doDelete(req, msg, expectedRf, retries); } - protected void doDelete(UpdateRequest req, String msg, int expectedRf, int retries) throws IOException, SolrServerException, InterruptedException { + protected void doDelete(UpdateRequest req, String msg, int expectedRf, int retries) + throws IOException, SolrServerException, InterruptedException { int achievedRf = -1; for (int idx = 0; idx < retries; ++idx) { NamedList response = cloudClient.request(req); - achievedRf = cloudClient.getMinAchievedReplicationFactor(cloudClient.getDefaultCollection(), response); + achievedRf = + cloudClient.getMinAchievedReplicationFactor(cloudClient.getDefaultCollection(), response); if (achievedRf == expectedRf) return; Thread.sleep(1000); } @@ -452,22 +482,27 @@ protected int sendDoc(int docId) throws Exception { up.add(doc); return runAndGetAchievedRf(up); } - + private int runAndGetAchievedRf(UpdateRequest up) throws SolrServerException, IOException { NamedList response = cloudClient.request(up); - return cloudClient.getMinAchievedReplicationFactor(cloudClient.getDefaultCollection(), response); + return cloudClient.getMinAchievedReplicationFactor( + cloudClient.getDefaultCollection(), response); } protected void assertRf(int expected, String explain, int actual) throws Exception { if (actual != expected) { - String assertionFailedMessage = - String.format(Locale.ENGLISH, "Expected rf=%d because %s but got %d", expected, explain, actual); - fail(assertionFailedMessage+"; clusterState: "+printClusterStateInfo()); + String assertionFailedMessage = + String.format( + Locale.ENGLISH, "Expected rf=%d because %s but got %d", expected, explain, actual); + fail(assertionFailedMessage + "; clusterState: " + printClusterStateInfo()); } } - void createCollectionWithRetry(String testCollectionName, String config, int numShards, int replicationFactor) throws IOException, SolrServerException, InterruptedException, TimeoutException { - CollectionAdminResponse resp = createCollection(testCollectionName, "conf1", numShards, replicationFactor); + void createCollectionWithRetry( + String testCollectionName, String config, int numShards, int replicationFactor) + throws IOException, SolrServerException, InterruptedException, TimeoutException { + CollectionAdminResponse resp = + createCollection(testCollectionName, "conf1", numShards, replicationFactor); if (resp.getResponse().get("failure") != null) { Thread.sleep(5000); // let system settle down. This should be very rare. @@ -481,5 +516,4 @@ void createCollectionWithRetry(String testCollectionName, String config, int num } } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/RestartWhileUpdatingTest.java b/solr/core/src/test/org/apache/solr/cloud/RestartWhileUpdatingTest.java index 7079ccea1a5..aceb8a2f678 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RestartWhileUpdatingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RestartWhileUpdatingTest.java @@ -23,7 +23,8 @@ @Slow @Nightly /** - * Implementation moved to AbstractRestartWhileUpdatingTestBase because it is used by HDFS contrib module tests + * Implementation moved to AbstractRestartWhileUpdatingTestBase because it is used by HDFS contrib + * module tests */ public class RestartWhileUpdatingTest extends AbstractRestartWhileUpdatingTestBase { diff --git a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java index de939a7ed83..1b71333d6bf 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; - import org.apache.commons.collections4.CollectionUtils; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.common.cloud.SolrZkClient; @@ -46,9 +45,10 @@ public void distribSetUp() throws Exception { } @Test - //commented 2-Aug-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2018-06-18 public void test() throws Exception { - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cloudClient).getIsCollectionApiDistributed()) { + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cloudClient) + .getIsCollectionApiDistributed()) { log.info("Skipping test because Collection API is distributed"); return; } @@ -60,9 +60,10 @@ public void test() throws Exception { waitForRecoveriesToFinish(false); } - public void restartWithRolesTest() throws Exception { - String leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); + String leader = + OverseerCollectionConfigSetProcessor.getLeaderNode( + cloudClient.getZkStateReader().getZkClient()); assertNotNull(leader); log.info("Current overseer leader = {}", leader); @@ -76,12 +77,13 @@ public void restartWithRolesTest() throws Exception { int n = random().nextInt(getShardCount()); String nodeName = cloudJettys.get(n).nodeName; log.info("Chose {} as overseer designate", nodeName); - CollectionAdminRequest.addRole(nodeName,"overseer").process(cloudClient); + CollectionAdminRequest.addRole(nodeName, "overseer").process(cloudClient); designates.add(nodeName); designateJettys.add(cloudJettys.get(n)); } - waitUntilOverseerDesignateIsLeader(cloudClient.getZkStateReader().getZkClient(), designates, MAX_WAIT_TIME); + waitUntilOverseerDesignateIsLeader( + cloudClient.getZkStateReader().getZkClient(), designates, MAX_WAIT_TIME); cloudClient.getZkStateReader().getZkClient().printLayoutToStream(System.out); @@ -93,48 +95,70 @@ public void restartWithRolesTest() throws Exception { log.info("Restarting {}", cloudJetty); chaosMonkey.stopJetty(cloudJetty); cloudClient.getZkStateReader().updateLiveNodes(); - boolean liveDesignates = CollectionUtils.intersection(cloudClient.getZkStateReader().getClusterState().getLiveNodes(), designates).size() > 0; + boolean liveDesignates = + CollectionUtils.intersection( + cloudClient.getZkStateReader().getClusterState().getLiveNodes(), designates) + .size() + > 0; if (liveDesignates) { sawLiveDesignate = true; - boolean success = waitUntilOverseerDesignateIsLeader(cloudClient.getZkStateReader().getZkClient(), designates, MAX_WAIT_TIME); + boolean success = + waitUntilOverseerDesignateIsLeader( + cloudClient.getZkStateReader().getZkClient(), designates, MAX_WAIT_TIME); if (!success) { - leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); + leader = + OverseerCollectionConfigSetProcessor.getLeaderNode( + cloudClient.getZkStateReader().getZkClient()); if (leader == null) - log.error("NOOVERSEER election queue is : {}" - , OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), - "/overseer_elect/election")); + log.error( + "NOOVERSEER election queue is : {}", + OverseerCollectionConfigSetProcessor.getSortedElectionNodes( + cloudClient.getZkStateReader().getZkClient(), "/overseer_elect/election")); fail("No overseer designate as leader found after restart #" + (i + 1) + ": " + leader); } } cloudJetty.jetty.start(); - boolean success = waitUntilOverseerDesignateIsLeader(cloudClient.getZkStateReader().getZkClient(), designates, MAX_WAIT_TIME); + boolean success = + waitUntilOverseerDesignateIsLeader( + cloudClient.getZkStateReader().getZkClient(), designates, MAX_WAIT_TIME); if (!success) { - leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); + leader = + OverseerCollectionConfigSetProcessor.getLeaderNode( + cloudClient.getZkStateReader().getZkClient()); if (leader == null) - log.error("NOOVERSEER election queue is :{}" - , OverseerCollectionConfigSetProcessor.getSortedElectionNodes(cloudClient.getZkStateReader().getZkClient(), - "/overseer_elect/election")); + log.error( + "NOOVERSEER election queue is :{}", + OverseerCollectionConfigSetProcessor.getSortedElectionNodes( + cloudClient.getZkStateReader().getZkClient(), "/overseer_elect/election")); fail("No overseer leader found after restart #" + (i + 1) + ": " + leader); } cloudClient.getZkStateReader().updateLiveNodes(); - sawLiveDesignate = CollectionUtils.intersection(cloudClient.getZkStateReader().getClusterState().getLiveNodes(), designates).size() > 0; - + sawLiveDesignate = + CollectionUtils.intersection( + cloudClient.getZkStateReader().getClusterState().getLiveNodes(), designates) + .size() + > 0; } } assertTrue("Test may not be working if we never saw a live designate", sawLiveDesignate); - leader = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); + leader = + OverseerCollectionConfigSetProcessor.getLeaderNode( + cloudClient.getZkStateReader().getZkClient()); assertNotNull(leader); log.info("Current overseer leader (after restart) = {}", leader); cloudClient.getZkStateReader().getZkClient().printLayoutToStream(System.out); } - static boolean waitUntilOverseerDesignateIsLeader(SolrZkClient testZkClient, List overseerDesignates, long timeoutInNanos) throws KeeperException, InterruptedException { + static boolean waitUntilOverseerDesignateIsLeader( + SolrZkClient testZkClient, List overseerDesignates, long timeoutInNanos) + throws KeeperException, InterruptedException { long now = System.nanoTime(); - long maxTimeout = now + timeoutInNanos; // the maximum amount of time we're willing to wait to see the designate as leader + // the maximum amount of time we're willing to wait to see the designate as leader + long maxTimeout = now + timeoutInNanos; long timeout = now + TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS); boolean firstTime = true; int stableCheckTimeout = 2000; @@ -144,7 +168,8 @@ static boolean waitUntilOverseerDesignateIsLeader(SolrZkClient testZkClient, Lis if (newLeader != null && !newLeader.equals(oldleader)) { // the leaders have changed, let's move the timeout further timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS); - log.info("oldLeader={} newLeader={} - Advancing timeout to: {}", oldleader, newLeader, timeout); + log.info( + "oldLeader={} newLeader={} - Advancing timeout to: {}", oldleader, newLeader, timeout); oldleader = newLeader; } if (!overseerDesignates.contains(newLeader)) { diff --git a/solr/core/src/test/org/apache/solr/cloud/RouteFieldTest.java b/solr/core/src/test/org/apache/solr/cloud/RouteFieldTest.java index a3c18560638..5049c9bd1d1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/RouteFieldTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/RouteFieldTest.java @@ -17,6 +17,13 @@ package org.apache.solr.cloud; +import static org.apache.solr.client.solrj.response.schema.SchemaResponse.*; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -34,14 +41,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.apache.solr.client.solrj.response.schema.SchemaResponse.*; - public class RouteFieldTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -52,9 +51,7 @@ public class RouteFieldTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); - configureCluster(1) - .addConfig("conf", configset("cloud-managed")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-managed")).configure(); } // Test for seeing if we actually respect the route field @@ -65,23 +62,32 @@ public static void setupCluster() throws Exception { public void routeFieldTest() throws Exception { log.info("Starting routeFieldTest"); - assertEquals("Failed to create collection routeFieldTest", + assertEquals( + "Failed to create collection routeFieldTest", 0, CollectionAdminRequest.createCollection(COLL_ROUTE, "conf", 2, 1) .setRouterField(ROUTE_FIELD) - .process(cluster.getSolrClient()).getStatus()); + .process(cluster.getSolrClient()) + .getStatus()); List updateList = new ArrayList<>(); - updateList.add(new SchemaRequest.AddField(Map.of("name", ROUTE_FIELD, "type", "string", "indexed", "true", "stored", "true"))); - updateList.add(new SchemaRequest.AddField(Map.of("name", "sorter", "type", "int", "indexed", "true", "stored", "true"))); + updateList.add( + new SchemaRequest.AddField( + Map.of("name", ROUTE_FIELD, "type", "string", "indexed", "true", "stored", "true"))); + updateList.add( + new SchemaRequest.AddField( + Map.of("name", "sorter", "type", "int", "indexed", "true", "stored", "true"))); SchemaRequest.MultiUpdate multiUpdateRequest = new SchemaRequest.MultiUpdate(updateList); - UpdateResponse multipleUpdatesResponse = multiUpdateRequest.process(cluster.getSolrClient(), COLL_ROUTE); + UpdateResponse multipleUpdatesResponse = + multiUpdateRequest.process(cluster.getSolrClient(), COLL_ROUTE); assertNull("Error adding fields", multipleUpdatesResponse.getResponse().get("errors")); - assertEquals("Failed to create collection routeIdTest" - , 0 - , CollectionAdminRequest.createCollection(COLL_ID, "conf", 2, 1) - .process(cluster.getSolrClient()).getStatus()); + assertEquals( + "Failed to create collection routeIdTest", + 0, + CollectionAdminRequest.createCollection(COLL_ID, "conf", 2, 1) + .process(cluster.getSolrClient()) + .getStatus()); // We now have two collections, add the same docs to each with the proper // fields so the id field is used in one collection and ROUTE_FIELD in the other.. @@ -136,7 +142,8 @@ private void checkShardsHaveSameDocs() throws IOException, SolrServerException { compareShardDocs(urlIdShard2, urlRouteShard2); } - private void compareShardDocs(String urlId, String urlRoute) throws IOException, SolrServerException { + private void compareShardDocs(String urlId, String urlRoute) + throws IOException, SolrServerException { ModifiableSolrParams params = new ModifiableSolrParams(); QueryRequest request = new QueryRequest(params); params.add("distrib", "false"); @@ -152,12 +159,17 @@ private void compareShardDocs(String urlId, String urlRoute) throws IOException, SolrDocumentList docsRoute = (SolrDocumentList) httpSC.request(request).get("response"); httpSC.close(); - assertEquals("We should have the exact same number of docs on each shard", docsId.getNumFound(), docsRoute.getNumFound()); + assertEquals( + "We should have the exact same number of docs on each shard", + docsId.getNumFound(), + docsRoute.getNumFound()); for (int idx = 0; idx < docsId.getNumFound(); ++idx) { int idId = Integer.parseInt((String) docsId.get(idx).getFieldValue("id")); int idRoute = Integer.parseInt((String) docsRoute.get(idx).getFieldValue("id")); - assertEquals("Docs with Ids 1.5M different should be on exactly the same shard and in the same order when sorted", - idId, idRoute - 1_500_000); + assertEquals( + "Docs with Ids 1.5M different should be on exactly the same shard and in the same order when sorted", + idId, + idRoute - 1_500_000); } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java b/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java index 2d09ac69fea..fff489d5217 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java @@ -16,10 +16,14 @@ */ package org.apache.solr.cloud; - +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Properties; import org.apache.commons.lang3.StringUtils; -import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.AwaitsFix; +import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettyConfig; @@ -36,99 +40,99 @@ import org.apache.solr.util.SSLTestConfig; import org.junit.Test; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Properties; - /** - * We want to make sure that when migrating between http and https modes the - * replicas will not be rejoined as new nodes, but rather take off where it left - * off in the cluster. + * We want to make sure that when migrating between http and https modes the replicas will not be + * rejoined as new nodes, but rather take off where it left off in the cluster. */ @Slow @SuppressSSL -@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 17-Mar-2018 +@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12028") // 17-Mar-2018 public class SSLMigrationTest extends AbstractFullDistribZkTestBase { @Test public void test() throws Exception { - //Migrate from HTTP -> HTTPS -> HTTP + // Migrate from HTTP -> HTTPS -> HTTP assertReplicaInformation("http"); testMigrateSSL(new SSLTestConfig(true, false)); testMigrateSSL(new SSLTestConfig(false, false)); } - + public void testMigrateSSL(SSLTestConfig sslConfig) throws Exception { String urlScheme = sslConfig.isSSLMode() ? "https" : "http"; setUrlScheme(urlScheme); - - for(JettySolrRunner runner : jettys) { + + for (JettySolrRunner runner : jettys) { runner.stop(); } - - HttpClientUtil.setSocketFactoryRegistryProvider(sslConfig.buildClientSocketFactoryRegistryProvider()); - for(int i = 0; i < this.jettys.size(); i++) { + + HttpClientUtil.setSocketFactoryRegistryProvider( + sslConfig.buildClientSocketFactoryRegistryProvider()); + for (int i = 0; i < this.jettys.size(); i++) { JettySolrRunner runner = jettys.get(i); - JettyConfig config = JettyConfig.builder() - .setContext(context) - .setPort(runner.getLocalPort()) - .stopAtShutdown(false) - .withServlets(getExtraServlets()) - .withFilters(getExtraRequestFilters()) - .withSSLConfig(sslConfig.buildServerSSLConfig()) - .build(); + JettyConfig config = + JettyConfig.builder() + .setContext(context) + .setPort(runner.getLocalPort()) + .stopAtShutdown(false) + .withServlets(getExtraServlets()) + .withFilters(getExtraRequestFilters()) + .withSSLConfig(sslConfig.buildServerSSLConfig()) + .build(); Properties props = new Properties(); - if (getSolrConfigFile() != null) - props.setProperty("solrconfig", getSolrConfigFile()); - if (getSchemaFile() != null) - props.setProperty("schema", getSchemaFile()); + if (getSolrConfigFile() != null) props.setProperty("solrconfig", getSolrConfigFile()); + if (getSchemaFile() != null) props.setProperty("schema", getSchemaFile()); props.setProperty("solr.data.dir", getDataDir(testDir + "/shard" + i + "/data")); JettySolrRunner newRunner = new JettySolrRunner(runner.getSolrHome(), props, config); newRunner.start(); jettys.set(i, newRunner); } - + assertReplicaInformation(urlScheme); } - + private void assertReplicaInformation(String urlScheme) throws Exception { List replicas = getReplicas(); assertEquals("Wrong number of replicas found", 4, replicas.size()); - for(Replica replica : replicas) { - assertTrue("Replica didn't have the proper urlScheme in the ClusterState", + for (Replica replica : replicas) { + assertTrue( + "Replica didn't have the proper urlScheme in the ClusterState", StringUtils.startsWith(replica.getStr(ZkStateReader.BASE_URL_PROP), urlScheme)); } } - + private List getReplicas() { List replicas = new ArrayList(); - - DocCollection collection = this.cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); - for(Slice slice : collection.getSlices()) { + + DocCollection collection = + this.cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); + for (Slice slice : collection.getSlices()) { replicas.addAll(slice.getReplicas()); } return replicas; } - + private void setUrlScheme(String value) throws Exception { - Map m = Map.of("action", CollectionAction.CLUSTERPROP.toString() - .toLowerCase(Locale.ROOT), "name", "urlScheme", "val", value); + Map m = + Map.of( + "action", + CollectionAction.CLUSTERPROP.toString().toLowerCase(Locale.ROOT), + "name", + "urlScheme", + "val", + value); SolrParams params = new MapSolrParams(m); QueryRequest request = new QueryRequest(params); request.setPath("/admin/collections"); - + List urls = new ArrayList<>(); - for(Replica replica : getReplicas()) { + for (Replica replica : getReplicas()) { urls.add(replica.getStr(ZkStateReader.BASE_URL_PROP)); } - //Create new SolrServer to configure new HttpClient w/ SSL config - try (SolrClient client = getLBHttpSolrClient(urls.toArray(new String[]{}))) { + // Create new SolrServer to configure new HttpClient w/ SSL config + try (SolrClient client = getLBHttpSolrClient(urls.toArray(new String[] {}))) { client.request(request); } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java b/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java index 3074106d5e1..04fa5069edd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java +++ b/solr/core/src/test/org/apache/solr/cloud/SegmentTerminateEarlyTestState.java @@ -19,9 +19,8 @@ import java.util.HashSet; import java.util.Map; -import java.util.Set; import java.util.Random; - +import java.util.Set; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; @@ -36,12 +35,14 @@ class SegmentTerminateEarlyTestState { static final String KEY_FIELD = "id"; - // for historic reasons, this is refered to as a "timestamp" field, but in actuallity is just an int - // value representing a number of "minutes" between 0-60. + // for historic reasons, this is refered to as a "timestamp" field, but in actuallity is just an + // int value representing a number of "minutes" between 0-60. // aka: I decided not to rename a million things while refactoring this test public static final String TIMESTAMP_FIELD = "timestamp_i_dvo"; - public static final String ODD_FIELD = "odd_l1"; // - public static final String QUAD_FIELD = "quad_l1"; // + // + public static final String ODD_FIELD = "odd_l1"; + // + public static final String QUAD_FIELD = "quad_l1"; final Set minTimestampDocKeys = new HashSet<>(); final Set maxTimestampDocKeys = new HashSet<>(); @@ -55,15 +56,16 @@ class SegmentTerminateEarlyTestState { public SegmentTerminateEarlyTestState(Random rand) { this.rand = rand; } - - void addDocuments(CloudSolrClient cloudSolrClient, - int numCommits, int numDocsPerCommit, boolean optimize) throws Exception { + + void addDocuments( + CloudSolrClient cloudSolrClient, int numCommits, int numDocsPerCommit, boolean optimize) + throws Exception { for (int cc = 1; cc <= numCommits; ++cc) { for (int nn = 1; nn <= numDocsPerCommit; ++nn) { ++numDocs; final Integer docKey = numDocs; SolrInputDocument doc = new SolrInputDocument(); - doc.setField(KEY_FIELD, ""+docKey); + doc.setField(KEY_FIELD, "" + docKey); final int MM = rand.nextInt(60); // minutes if (minTimestampMM == null || MM <= minTimestampMM.intValue()) { if (minTimestampMM != null && MM < minTimestampMM.intValue()) { @@ -80,8 +82,8 @@ void addDocuments(CloudSolrClient cloudSolrClient, maxTimestampDocKeys.add(docKey); } doc.setField(TIMESTAMP_FIELD, MM); - doc.setField(ODD_FIELD, ""+(numDocs % 2)); - doc.setField(QUAD_FIELD, ""+(numDocs % 4)+1); + doc.setField(ODD_FIELD, "" + (numDocs % 2)); + doc.setField(QUAD_FIELD, "" + (numDocs % 4) + 1); cloudSolrClient.add(doc); } cloudSolrClient.commit(); @@ -93,35 +95,46 @@ void addDocuments(CloudSolrClient cloudSolrClient, void queryTimestampDescending(CloudSolrClient cloudSolrClient) throws Exception { TestSegmentSorting.assertFalse(maxTimestampDocKeys.isEmpty()); - TestSegmentSorting.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0); + TestSegmentSorting.assertTrue("numDocs=" + numDocs + " is not even", (numDocs % 2) == 0); final Long oddFieldValue = (long) (maxTimestampDocKeys.iterator().next().intValue() % 2); - final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue); + final SolrQuery query = new SolrQuery(ODD_FIELD + ":" + oddFieldValue); query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.desc); query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD); query.setRows(1); // CommonParams.SEGMENT_TERMINATE_EARLY parameter intentionally absent final QueryResponse rsp = cloudSolrClient.query(query); // check correctness of the results count - TestSegmentSorting.assertEquals("numFound", numDocs/2, rsp.getResults().getNumFound()); + TestSegmentSorting.assertEquals("numFound", numDocs / 2, rsp.getResults().getNumFound()); // check correctness of the first result if (rsp.getResults().getNumFound() > 0) { final SolrDocument solrDocument0 = rsp.getResults().get(0); final Integer idAsInt = Integer.parseInt(solrDocument0.getFieldValue(KEY_FIELD).toString()); - TestSegmentSorting.assertTrue - (KEY_FIELD +"="+idAsInt+" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")", - maxTimestampDocKeys.contains(idAsInt)); - TestSegmentSorting.assertEquals(ODD_FIELD, oddFieldValue, solrDocument0.getFieldValue(ODD_FIELD)); + TestSegmentSorting.assertTrue( + KEY_FIELD + + "=" + + idAsInt + + " of (" + + solrDocument0 + + ") is not in maxTimestampDocKeys(" + + maxTimestampDocKeys + + ")", + maxTimestampDocKeys.contains(idAsInt)); + TestSegmentSorting.assertEquals( + ODD_FIELD, oddFieldValue, solrDocument0.getFieldValue(ODD_FIELD)); } // check segmentTerminatedEarly flag - TestSegmentSorting.assertNull("responseHeader.segmentTerminatedEarly present in "+rsp.getResponseHeader(), - rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); + TestSegmentSorting.assertNull( + "responseHeader.segmentTerminatedEarly present in " + rsp.getResponseHeader(), + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); } - void queryTimestampDescendingSegmentTerminateEarlyYes(CloudSolrClient cloudSolrClient) throws Exception { + void queryTimestampDescendingSegmentTerminateEarlyYes(CloudSolrClient cloudSolrClient) + throws Exception { TestSegmentSorting.assertFalse(maxTimestampDocKeys.isEmpty()); - TestSegmentSorting.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0); + TestSegmentSorting.assertTrue("numDocs=" + numDocs + " is not even", (numDocs % 2) == 0); final Long oddFieldValue = (long) (maxTimestampDocKeys.iterator().next().intValue() % 2); - final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue); + final SolrQuery query = new SolrQuery(ODD_FIELD + ":" + oddFieldValue); query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.desc); query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD); final int rowsWanted = 1; @@ -134,21 +147,34 @@ void queryTimestampDescendingSegmentTerminateEarlyYes(CloudSolrClient cloudSolrC final QueryResponse rsp = cloudSolrClient.query(query); // check correctness of the results count TestSegmentSorting.assertTrue("numFound", rowsWanted <= rsp.getResults().getNumFound()); - TestSegmentSorting.assertTrue("numFound", rsp.getResults().getNumFound() <= numDocs/2); + TestSegmentSorting.assertTrue("numFound", rsp.getResults().getNumFound() <= numDocs / 2); // check correctness of the first result if (rsp.getResults().getNumFound() > 0) { final SolrDocument solrDocument0 = rsp.getResults().get(0); final Integer idAsInt = Integer.parseInt(solrDocument0.getFieldValue(KEY_FIELD).toString()); - TestSegmentSorting.assertTrue - (KEY_FIELD +"="+idAsInt+" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")", - maxTimestampDocKeys.contains(idAsInt)); - TestSegmentSorting.assertEquals(ODD_FIELD, oddFieldValue, rsp.getResults().get(0).getFieldValue(ODD_FIELD)); + TestSegmentSorting.assertTrue( + KEY_FIELD + + "=" + + idAsInt + + " of (" + + solrDocument0 + + ") is not in maxTimestampDocKeys(" + + maxTimestampDocKeys + + ")", + maxTimestampDocKeys.contains(idAsInt)); + TestSegmentSorting.assertEquals( + ODD_FIELD, oddFieldValue, rsp.getResults().get(0).getFieldValue(ODD_FIELD)); } // check segmentTerminatedEarly flag - TestSegmentSorting.assertNotNull("responseHeader.segmentTerminatedEarly missing in "+rsp.getResponseHeader(), - rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); - TestSegmentSorting.assertTrue("responseHeader.segmentTerminatedEarly missing/false in "+rsp.getResponseHeader(), - Boolean.TRUE.equals(rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); + TestSegmentSorting.assertNotNull( + "responseHeader.segmentTerminatedEarly missing in " + rsp.getResponseHeader(), + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); + TestSegmentSorting.assertTrue( + "responseHeader.segmentTerminatedEarly missing/false in " + rsp.getResponseHeader(), + Boolean.TRUE.equals( + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); // check shards info final Object shardsInfo = rsp.getResponse().get(ShardParams.SHARDS_INFO); if (!Boolean.TRUE.equals(shardsInfoWanted)) { @@ -156,22 +182,28 @@ void queryTimestampDescendingSegmentTerminateEarlyYes(CloudSolrClient cloudSolrC } else { TestSegmentSorting.assertNotNull(ShardParams.SHARDS_INFO, shardsInfo); int segmentTerminatedEarlyShardsCount = 0; - for (Map.Entry si : (SimpleOrderedMap)shardsInfo) { - if (Boolean.TRUE.equals(((SimpleOrderedMap)si.getValue()).get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))) { + for (Map.Entry si : (SimpleOrderedMap) shardsInfo) { + if (Boolean.TRUE.equals( + ((SimpleOrderedMap) si.getValue()) + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))) { segmentTerminatedEarlyShardsCount += 1; } } // check segmentTerminatedEarly flag within shards info - TestSegmentSorting.assertTrue(segmentTerminatedEarlyShardsCount+" shards reported "+SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY, - (0 0) { final SolrDocument solrDocument0 = rsp.getResults().get(0); final Integer idAsInt = Integer.parseInt(solrDocument0.getFieldValue(KEY_FIELD).toString()); - TestSegmentSorting.assertTrue - (KEY_FIELD +"="+idAsInt+" of ("+solrDocument0+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")", - maxTimestampDocKeys.contains(idAsInt)); - TestSegmentSorting.assertEquals(ODD_FIELD, oddFieldValue, rsp.getResults().get(0).getFieldValue(ODD_FIELD)); + TestSegmentSorting.assertTrue( + KEY_FIELD + + "=" + + idAsInt + + " of (" + + solrDocument0 + + ") is not in maxTimestampDocKeys(" + + maxTimestampDocKeys + + ")", + maxTimestampDocKeys.contains(idAsInt)); + TestSegmentSorting.assertEquals( + ODD_FIELD, oddFieldValue, rsp.getResults().get(0).getFieldValue(ODD_FIELD)); } // check segmentTerminatedEarly flag - TestSegmentSorting.assertNull("responseHeader.segmentTerminatedEarly present in "+rsp.getResponseHeader(), - rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); - TestSegmentSorting.assertFalse("responseHeader.segmentTerminatedEarly present/true in "+rsp.getResponseHeader(), - Boolean.TRUE.equals(rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); + TestSegmentSorting.assertNull( + "responseHeader.segmentTerminatedEarly present in " + rsp.getResponseHeader(), + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); + TestSegmentSorting.assertFalse( + "responseHeader.segmentTerminatedEarly present/true in " + rsp.getResponseHeader(), + Boolean.TRUE.equals( + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); // check shards info final Object shardsInfo = rsp.getResponse().get(ShardParams.SHARDS_INFO); if (!Boolean.TRUE.equals(shardsInfoWanted)) { @@ -204,72 +249,105 @@ void queryTimestampDescendingSegmentTerminateEarlyNo(CloudSolrClient cloudSolrCl } else { TestSegmentSorting.assertNotNull(ShardParams.SHARDS_INFO, shardsInfo); int segmentTerminatedEarlyShardsCount = 0; - for (Map.Entry si : (SimpleOrderedMap)shardsInfo) { - if (Boolean.TRUE.equals(((SimpleOrderedMap)si.getValue()).get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))) { + for (Map.Entry si : (SimpleOrderedMap) shardsInfo) { + if (Boolean.TRUE.equals( + ((SimpleOrderedMap) si.getValue()) + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))) { segmentTerminatedEarlyShardsCount += 1; } } - TestSegmentSorting.assertEquals("shards reporting "+SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY, - 0, segmentTerminatedEarlyShardsCount); + TestSegmentSorting.assertEquals( + "shards reporting " + SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY, + 0, + segmentTerminatedEarlyShardsCount); } } - void queryTimestampDescendingSegmentTerminateEarlyYesGrouped(CloudSolrClient cloudSolrClient) throws Exception { + void queryTimestampDescendingSegmentTerminateEarlyYesGrouped(CloudSolrClient cloudSolrClient) + throws Exception { TestSegmentSorting.assertFalse(maxTimestampDocKeys.isEmpty()); - TestSegmentSorting.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0); + TestSegmentSorting.assertTrue("numDocs=" + numDocs + " is not even", (numDocs % 2) == 0); final Long oddFieldValue = (long) (maxTimestampDocKeys.iterator().next().intValue() % 2); - final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue); + final SolrQuery query = new SolrQuery(ODD_FIELD + ":" + oddFieldValue); query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.desc); query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD); query.setRows(1); query.set(CommonParams.SEGMENT_TERMINATE_EARLY, true); - TestSegmentSorting.assertTrue("numDocs="+numDocs+" is not quad-able", (numDocs%4)==0); + TestSegmentSorting.assertTrue("numDocs=" + numDocs + " is not quad-able", (numDocs % 4) == 0); query.add("group.field", QUAD_FIELD); query.set("group", true); final QueryResponse rsp = cloudSolrClient.query(query); // check correctness of the results count - TestSegmentSorting.assertEquals("matches", numDocs/2, rsp.getGroupResponse().getValues().get(0).getMatches()); + TestSegmentSorting.assertEquals( + "matches", numDocs / 2, rsp.getGroupResponse().getValues().get(0).getMatches()); // check correctness of the first result if (rsp.getGroupResponse().getValues().get(0).getMatches() > 0) { - final SolrDocument solrDocument = rsp.getGroupResponse().getValues().get(0).getValues().get(0).getResult().get(0); + final SolrDocument solrDocument = + rsp.getGroupResponse().getValues().get(0).getValues().get(0).getResult().get(0); final Integer idAsInt = Integer.parseInt(solrDocument.getFieldValue(KEY_FIELD).toString()); - TestSegmentSorting.assertTrue - (KEY_FIELD +"="+idAsInt+" of ("+solrDocument+") is not in maxTimestampDocKeys("+maxTimestampDocKeys+")", - maxTimestampDocKeys.contains(idAsInt)); - TestSegmentSorting.assertEquals(ODD_FIELD, oddFieldValue, solrDocument.getFieldValue(ODD_FIELD)); + TestSegmentSorting.assertTrue( + KEY_FIELD + + "=" + + idAsInt + + " of (" + + solrDocument + + ") is not in maxTimestampDocKeys(" + + maxTimestampDocKeys + + ")", + maxTimestampDocKeys.contains(idAsInt)); + TestSegmentSorting.assertEquals( + ODD_FIELD, oddFieldValue, solrDocument.getFieldValue(ODD_FIELD)); } // check segmentTerminatedEarly flag // at present segmentTerminateEarly cannot be used with grouped queries - TestSegmentSorting.assertFalse("responseHeader.segmentTerminatedEarly present/true in "+rsp.getResponseHeader(), - Boolean.TRUE.equals(rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); + TestSegmentSorting.assertFalse( + "responseHeader.segmentTerminatedEarly present/true in " + rsp.getResponseHeader(), + Boolean.TRUE.equals( + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); } - void queryTimestampAscendingSegmentTerminateEarlyYes(CloudSolrClient cloudSolrClient) throws Exception { + void queryTimestampAscendingSegmentTerminateEarlyYes(CloudSolrClient cloudSolrClient) + throws Exception { TestSegmentSorting.assertFalse(minTimestampDocKeys.isEmpty()); - TestSegmentSorting.assertTrue("numDocs="+numDocs+" is not even", (numDocs%2)==0); + TestSegmentSorting.assertTrue("numDocs=" + numDocs + " is not even", (numDocs % 2) == 0); final Long oddFieldValue = (long) (minTimestampDocKeys.iterator().next().intValue() % 2); - final SolrQuery query = new SolrQuery(ODD_FIELD +":"+oddFieldValue); - query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.asc); // a sort order that is _not_ compatible with the merge sort order + final SolrQuery query = new SolrQuery(ODD_FIELD + ":" + oddFieldValue); + // a sort order that is _not_ compatible with the merge sort order + query.setSort(TIMESTAMP_FIELD, SolrQuery.ORDER.asc); query.setFields(KEY_FIELD, ODD_FIELD, TIMESTAMP_FIELD); query.setRows(1); query.set(CommonParams.SEGMENT_TERMINATE_EARLY, true); final QueryResponse rsp = cloudSolrClient.query(query); // check correctness of the results count - TestSegmentSorting.assertEquals("numFound", numDocs/2, rsp.getResults().getNumFound()); + TestSegmentSorting.assertEquals("numFound", numDocs / 2, rsp.getResults().getNumFound()); // check correctness of the first result if (rsp.getResults().getNumFound() > 0) { final SolrDocument solrDocument0 = rsp.getResults().get(0); final Integer idAsInt = Integer.parseInt(solrDocument0.getFieldValue(KEY_FIELD).toString()); - TestSegmentSorting.assertTrue - (KEY_FIELD +"="+idAsInt+" of ("+solrDocument0+") is not in minTimestampDocKeys("+minTimestampDocKeys+")", - minTimestampDocKeys.contains(idAsInt)); - TestSegmentSorting.assertEquals(ODD_FIELD, oddFieldValue, solrDocument0.getFieldValue(ODD_FIELD)); + TestSegmentSorting.assertTrue( + KEY_FIELD + + "=" + + idAsInt + + " of (" + + solrDocument0 + + ") is not in minTimestampDocKeys(" + + minTimestampDocKeys + + ")", + minTimestampDocKeys.contains(idAsInt)); + TestSegmentSorting.assertEquals( + ODD_FIELD, oddFieldValue, solrDocument0.getFieldValue(ODD_FIELD)); } // check segmentTerminatedEarly flag - TestSegmentSorting.assertNotNull("responseHeader.segmentTerminatedEarly missing in "+rsp.getResponseHeader(), - rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); + TestSegmentSorting.assertNotNull( + "responseHeader.segmentTerminatedEarly missing in " + rsp.getResponseHeader(), + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY)); // segmentTerminateEarly cannot be used with incompatible sort orders - TestSegmentSorting.assertTrue("responseHeader.segmentTerminatedEarly missing/true in "+rsp.getResponseHeader(), - Boolean.FALSE.equals(rsp.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); + TestSegmentSorting.assertTrue( + "responseHeader.segmentTerminatedEarly missing/true in " + rsp.getResponseHeader(), + Boolean.FALSE.equals( + rsp.getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY))); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java index 2c239a12b2b..e1e9c94cb64 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java @@ -17,7 +17,6 @@ package org.apache.solr.cloud; import java.io.File; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -27,7 +26,8 @@ public class ShardRoutingCustomTest extends AbstractFullDistribZkTestBase { - String collection = DEFAULT_COLLECTION; // enable this to be configurable (more work needs to be done) + // enable this to be configurable (more work needs to be done) + String collection = DEFAULT_COLLECTION; @BeforeClass public static void beforeShardHashingTest() throws Exception { @@ -35,7 +35,7 @@ public static void beforeShardHashingTest() throws Exception { } public ShardRoutingCustomTest() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id sliceCount = 0; } @@ -59,17 +59,22 @@ private void doCustomSharding() throws Exception { File jettyDir = createTempDir("jetty").toFile(); jettyDir.mkdirs(); setupJettySolrHome(jettyDir); - JettySolrRunner j = createJetty(jettyDir, createTempDir().toFile().getAbsolutePath(), "shardA", "solrconfig.xml", null); + JettySolrRunner j = + createJetty( + jettyDir, createTempDir().toFile().getAbsolutePath(), "shardA", "solrconfig.xml", null); j.start(); - assertEquals(0, CollectionAdminRequest - .createCollection(DEFAULT_COLLECTION, "conf1", 1, 1) - .setCreateNodeSet("") - .process(cloudClient).getStatus()); - assertTrue(CollectionAdminRequest - .addReplicaToShard(collection,"shard1") - .setNode(j.getNodeName()) - .setType(useTlogReplicas()? Replica.Type.TLOG: Replica.Type.NRT) - .process(cloudClient).isSuccess()); + assertEquals( + 0, + CollectionAdminRequest.createCollection(DEFAULT_COLLECTION, "conf1", 1, 1) + .setCreateNodeSet("") + .process(cloudClient) + .getStatus()); + assertTrue( + CollectionAdminRequest.addReplicaToShard(collection, "shard1") + .setNode(j.getNodeName()) + .setType(useTlogReplicas() ? Replica.Type.TLOG : Replica.Type.NRT) + .process(cloudClient) + .isSuccess()); jettys.add(j); SolrClient client = createNewSolrClient(j.getLocalPort()); clients.add(client); @@ -80,6 +85,4 @@ private void doCustomSharding() throws Exception { printLayout(); } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java index 5045ca88dab..88f4e923eae 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.cloud; +import java.lang.invoke.MethodHandles; +import java.util.List; +import java.util.Map; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.UpdateRequest; @@ -26,20 +29,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.List; -import java.util.Map; - - public class ShardRoutingTest extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - String bucket1 = "shard1"; // shard1: top bits:10 80000000:bfffffff - String bucket2 = "shard2"; // shard2: top bits:11 c0000000:ffffffff - String bucket3 = "shard3"; // shard3: top bits:00 00000000:3fffffff - String bucket4 = "shard4"; // shard4: top bits:01 40000000:7fffffff - + String bucket1 = "shard1"; // shard1: top bits:10 80000000:bfffffff + String bucket2 = "shard2"; // shard2: top bits:11 c0000000:ffffffff + String bucket3 = "shard3"; // shard3: top bits:00 00000000:3fffffff + String bucket4 = "shard4"; // shard4: top bits:01 40000000:7fffffff @BeforeClass public static void beforeShardHashingTest() throws Exception { @@ -47,12 +44,12 @@ public static void beforeShardHashingTest() throws Exception { // like a ram dir will not recover correctly right now // because tran log will still exist on restart and ram // dir will not persist - perhaps translog can empty on - // start if using an EphemeralDirectoryFactory + // start if using an EphemeralDirectoryFactory useFactory(null); } public ShardRoutingTest() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id super.sliceCount = 4; // from negative to positive, the upper bits of the hash ranges should be @@ -62,36 +59,36 @@ public ShardRoutingTest() { // shard4: top bits:01 40000000:7fffffff /*** - hash of a is 3c2569b2 high bits=0 shard=shard3 - hash of b is 95de7e03 high bits=2 shard=shard1 - hash of c is e132d65f high bits=3 shard=shard2 - hash of d is 27191473 high bits=0 shard=shard3 - hash of e is 656c4367 high bits=1 shard=shard4 - hash of f is 2b64883b high bits=0 shard=shard3 - hash of g is f18ae416 high bits=3 shard=shard2 - hash of h is d482b2d3 high bits=3 shard=shard2 - hash of i is 811a702b high bits=2 shard=shard1 - hash of j is ca745a39 high bits=3 shard=shard2 - hash of k is cfbda5d1 high bits=3 shard=shard2 - hash of l is 1d5d6a2c high bits=0 shard=shard3 - hash of m is 5ae4385c high bits=1 shard=shard4 - hash of n is c651d8ac high bits=3 shard=shard2 - hash of o is 68348473 high bits=1 shard=shard4 - hash of p is 986fdf9a high bits=2 shard=shard1 - hash of q is ff8209e8 high bits=3 shard=shard2 - hash of r is 5c9373f1 high bits=1 shard=shard4 - hash of s is ff4acaf1 high bits=3 shard=shard2 - hash of t is ca87df4d high bits=3 shard=shard2 - hash of u is 62203ae0 high bits=1 shard=shard4 - hash of v is bdafcc55 high bits=2 shard=shard1 - hash of w is ff439d1f high bits=3 shard=shard2 - hash of x is 3e9a9b1b high bits=0 shard=shard3 - hash of y is 477d9216 high bits=1 shard=shard4 - hash of z is c1f69a17 high bits=3 shard=shard2 - - hash of f1 is 313bf6b1 - hash of f2 is ff143f8 - + * hash of a is 3c2569b2 high bits=0 shard=shard3 + * hash of b is 95de7e03 high bits=2 shard=shard1 + * hash of c is e132d65f high bits=3 shard=shard2 + * hash of d is 27191473 high bits=0 shard=shard3 + * hash of e is 656c4367 high bits=1 shard=shard4 + * hash of f is 2b64883b high bits=0 shard=shard3 + * hash of g is f18ae416 high bits=3 shard=shard2 + * hash of h is d482b2d3 high bits=3 shard=shard2 + * hash of i is 811a702b high bits=2 shard=shard1 + * hash of j is ca745a39 high bits=3 shard=shard2 + * hash of k is cfbda5d1 high bits=3 shard=shard2 + * hash of l is 1d5d6a2c high bits=0 shard=shard3 + * hash of m is 5ae4385c high bits=1 shard=shard4 + * hash of n is c651d8ac high bits=3 shard=shard2 + * hash of o is 68348473 high bits=1 shard=shard4 + * hash of p is 986fdf9a high bits=2 shard=shard1 + * hash of q is ff8209e8 high bits=3 shard=shard2 + * hash of r is 5c9373f1 high bits=1 shard=shard4 + * hash of s is ff4acaf1 high bits=3 shard=shard2 + * hash of t is ca87df4d high bits=3 shard=shard2 + * hash of u is 62203ae0 high bits=1 shard=shard4 + * hash of v is bdafcc55 high bits=2 shard=shard1 + * hash of w is ff439d1f high bits=3 shard=shard2 + * hash of x is 3e9a9b1b high bits=0 shard=shard3 + * hash of y is 477d9216 high bits=1 shard=shard4 + * hash of z is c1f69a17 high bits=3 shard=shard2 + * + * hash of f1 is 313bf6b1 + * hash of f2 is ff143f8 + * ***/ } @@ -118,17 +115,20 @@ public void test() throws Exception { } } - - - private void doHashingTest() throws Exception { log.info("### STARTING doHashingTest"); - assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size()); + assertEquals( + 4, + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_COLLECTION) + .getSlices() + .size()); String shardKeys = ShardParams._ROUTE_; // for now, we know how ranges will be distributed to shards. // may have to look it up in clusterstate if that assumption changes. - doAddDoc("b!doc1"); doAddDoc("c!doc2"); doAddDoc("d!doc3"); @@ -148,54 +148,79 @@ private void doHashingTest() throws Exception { commit(); - doQuery("b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q","*:*"); - doQuery("b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q","*:*", "shards","shard1,shard2,shard3,shard4"); - doQuery("b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q","*:*", shardKeys,"b!,c!,d!,e!,f1!f2!"); - doQuery("b!doc1", "q","*:*", shardKeys,"b!"); - doQuery("c!doc2", "q","*:*", shardKeys,"c!"); - doQuery("d!doc3,f1!f2!doc5,f1!f2!doc5/5", "q","*:*", shardKeys,"d!"); - doQuery("e!doc4", "q","*:*", shardKeys,"e!"); - doQuery("f1!f2!doc5,d!doc3,f1!f2!doc5/5", "q","*:*", shardKeys,"f1/8!"); + doQuery("b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q", "*:*"); + doQuery( + "b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", + "q", + "*:*", + "shards", + "shard1,shard2,shard3,shard4"); + doQuery( + "b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", + "q", + "*:*", + shardKeys, + "b!,c!,d!,e!,f1!f2!"); + doQuery("b!doc1", "q", "*:*", shardKeys, "b!"); + doQuery("c!doc2", "q", "*:*", shardKeys, "c!"); + doQuery("d!doc3,f1!f2!doc5,f1!f2!doc5/5", "q", "*:*", shardKeys, "d!"); + doQuery("e!doc4", "q", "*:*", shardKeys, "e!"); + doQuery("f1!f2!doc5,d!doc3,f1!f2!doc5/5", "q", "*:*", shardKeys, "f1/8!"); // try using shards parameter - doQuery("b!doc1", "q","*:*", "shards",bucket1); - doQuery("c!doc2", "q","*:*", "shards",bucket2); - doQuery("d!doc3,f1!f2!doc5,f1!f2!doc5/5", "q","*:*", "shards",bucket3); - doQuery("e!doc4", "q","*:*", "shards",bucket4); - - - doQuery("b!doc1,c!doc2", "q","*:*", shardKeys,"b!,c!"); - doQuery("b!doc1,e!doc4", "q","*:*", shardKeys,"b!,e!"); - - doQuery("b!doc1,c!doc2", "q","*:*", shardKeys,"b,c"); // query shards that would contain *documents* "b" and "c" (i.e. not prefixes). The upper bits are the same, so the shards should be the same. - - doQuery("b!doc1,c!doc2", "q","*:*", shardKeys,"b/1!"); // top bit of hash(b)==1, so shard1 and shard2 - doQuery("d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q","*:*", shardKeys,"d/1!"); // top bit of hash(b)==0, so shard3 and shard4 - - doQuery("b!doc1,c!doc2", "q","*:*", shardKeys,"b!,c!"); - - doQuery("b!doc1,f1!f2!doc5,c!doc2,d!doc3,e!doc4,f1!f2!doc5/5", "q","*:*", shardKeys,"foo/0!"); + doQuery("b!doc1", "q", "*:*", "shards", bucket1); + doQuery("c!doc2", "q", "*:*", "shards", bucket2); + doQuery("d!doc3,f1!f2!doc5,f1!f2!doc5/5", "q", "*:*", "shards", bucket3); + doQuery("e!doc4", "q", "*:*", "shards", bucket4); + + doQuery("b!doc1,c!doc2", "q", "*:*", shardKeys, "b!,c!"); + doQuery("b!doc1,e!doc4", "q", "*:*", shardKeys, "b!,e!"); + + doQuery( + "b!doc1,c!doc2", + "q", + "*:*", + shardKeys, + "b,c"); // query shards that would contain *documents* "b" and "c" (i.e. not prefixes). The + // upper bits are the same, so the shards should be the same. + + doQuery( + "b!doc1,c!doc2", + "q", + "*:*", + shardKeys, + "b/1!"); // top bit of hash(b)==1, so shard1 and shard2 + doQuery( + "d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", + "q", + "*:*", + shardKeys, + "d/1!"); // top bit of hash(b)==0, so shard3 and shard4 + + doQuery("b!doc1,c!doc2", "q", "*:*", shardKeys, "b!,c!"); + + doQuery("b!doc1,f1!f2!doc5,c!doc2,d!doc3,e!doc4,f1!f2!doc5/5", "q", "*:*", shardKeys, "foo/0!"); // test targeting deleteByQuery at only certain shards - doDBQ("*:*", shardKeys,"b!"); + doDBQ("*:*", shardKeys, "b!"); commit(); - doQuery("c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q","*:*"); + doQuery("c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q", "*:*"); doAddDoc("b!doc1"); - doDBQ("*:*", shardKeys,"f1!"); + doDBQ("*:*", shardKeys, "f1!"); commit(); - doQuery("b!doc1,c!doc2,e!doc4", "q","*:*"); + doQuery("b!doc1,c!doc2,e!doc4", "q", "*:*"); doAddDoc("f1!f2!doc5"); doAddDoc("d!doc3"); - doDBQ("*:*", shardKeys,"c!"); + doDBQ("*:*", shardKeys, "c!"); commit(); - doQuery("b!doc1,f1!f2!doc5,d!doc3,e!doc4", "q","*:*"); + doQuery("b!doc1,f1!f2!doc5,d!doc3,e!doc4", "q", "*:*"); doAddDoc("c!doc2"); - doDBQ("*:*", shardKeys,"d!,e!"); + doDBQ("*:*", shardKeys, "d!,e!"); commit(); - doQuery("b!doc1,c!doc2", "q","*:*"); + doQuery("b!doc1,c!doc2", "q", "*:*"); doAddDoc("d!doc3"); doAddDoc("e!doc4"); doAddDoc("f1!f2!doc5"); @@ -208,16 +233,16 @@ private void doHashingTest() throws Exception { doAddDoc("b!"); doAddDoc("c!doc1"); commit(); - doQuery("b!,c!doc1", "q","*:*"); + doQuery("b!,c!doc1", "q", "*:*"); UpdateRequest req = new UpdateRequest(); req.deleteById("b!"); req.process(cloudClient); commit(); - doQuery("c!doc1", "q","*:*"); + doQuery("c!doc1", "q", "*:*"); doDBQ("id:b!"); commit(); - doQuery("c!doc1", "q","*:*"); + doQuery("c!doc1", "q", "*:*"); doDBQ("*:*"); commit(); @@ -230,66 +255,65 @@ private void doHashingTest() throws Exception { doAddDoc("f1!f2!doc5"); doAddDoc("f1!f2!doc5/5"); commit(); - doQuery("a!b!,b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q","*:*"); + doQuery("a!b!,b!doc1,c!doc2,d!doc3,e!doc4,f1!f2!doc5,f1!f2!doc5/5", "q", "*:*"); } - public void doTestNumRequests() throws Exception { log.info("### STARTING doTestNumRequests"); List runners = shardToJetty.get(bucket1); CloudJettyRunner leader = shardToLeaderJetty.get(bucket1); - CloudJettyRunner replica = null; + CloudJettyRunner replica = null; for (CloudJettyRunner r : runners) { if (r != leader) replica = r; } long nStart = getNumRequests(); - leader.client.solrClient.add( sdoc("id","b!doc1") ); + leader.client.solrClient.add(sdoc("id", "b!doc1")); long nEnd = getNumRequests(); - assertEquals(2, nEnd - nStart); // one request to leader, which makes another to a replica - + assertEquals(2, nEnd - nStart); // one request to leader, which makes another to a replica nStart = getNumRequests(); - replica.client.solrClient.add( sdoc("id","b!doc1") ); + replica.client.solrClient.add(sdoc("id", "b!doc1")); nEnd = getNumRequests(); - assertEquals(3, nEnd - nStart); // orig request + replica forwards to leader, which forward back to replica. + // orig request + replica forwards to leader, which forward back to replica. + assertEquals(3, nEnd - nStart); nStart = getNumRequests(); - replica.client.solrClient.add( sdoc("id","b!doc1") ); + replica.client.solrClient.add(sdoc("id", "b!doc1")); nEnd = getNumRequests(); - assertEquals(3, nEnd - nStart); // orig request + replica forwards to leader, which forward back to replica. + // orig request + replica forwards to leader, which forward back to replica. + assertEquals(3, nEnd - nStart); CloudJettyRunner leader2 = shardToLeaderJetty.get(bucket2); - nStart = getNumRequests(); - replica.client.solrClient.query( params("q","*:*", "shards",bucket1) ); + replica.client.solrClient.query(params("q", "*:*", "shards", bucket1)); nEnd = getNumRequests(); - assertEquals(1, nEnd - nStart); // short circuit should prevent distrib search + assertEquals(1, nEnd - nStart); // short circuit should prevent distrib search nStart = getNumRequests(); - replica.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!") ); + replica.client.solrClient.query(params("q", "*:*", ShardParams._ROUTE_, "b!")); nEnd = getNumRequests(); - assertEquals(1, nEnd - nStart); // short circuit should prevent distrib search + assertEquals(1, nEnd - nStart); // short circuit should prevent distrib search nStart = getNumRequests(); - leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!") ); + leader2.client.solrClient.query(params("q", "*:*", ShardParams._ROUTE_, "b!")); nEnd = getNumRequests(); - assertEquals(3, nEnd - nStart); // original + 2 phase distrib search. we could improve this! + assertEquals(3, nEnd - nStart); // original + 2 phase distrib search. we could improve this! nStart = getNumRequests(); - leader2.client.solrClient.query( params("q","*:*") ); + leader2.client.solrClient.query(params("q", "*:*")); nEnd = getNumRequests(); - assertEquals(9, nEnd - nStart); // original + 2 phase distrib search * 4 shards. + assertEquals(9, nEnd - nStart); // original + 2 phase distrib search * 4 shards. nStart = getNumRequests(); - leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!,d!") ); + leader2.client.solrClient.query(params("q", "*:*", ShardParams._ROUTE_, "b!,d!")); nEnd = getNumRequests(); - assertEquals(5, nEnd - nStart); // original + 2 phase distrib search * 2 shards. + assertEquals(5, nEnd - nStart); // original + 2 phase distrib search * 2 shards. nStart = getNumRequests(); - leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!,f1!f2!") ); + leader2.client.solrClient.query(params("q", "*:*", ShardParams._ROUTE_, "b!,f1!f2!")); nEnd = getNumRequests(); assertEquals(5, nEnd - nStart); } @@ -301,14 +325,13 @@ public void doAtomicUpdate() throws Exception { int expectedVal = 0; for (SolrClient client : clients) { - client.add(sdoc("id", "b!doc", "foo_i", map("inc",1))); + client.add(sdoc("id", "b!doc", "foo_i", map("inc", 1))); expectedVal++; - QueryResponse rsp = client.query(params("qt","/get", "id","b!doc")); - Object val = ((Map)rsp.getResponse().get("doc")).get("foo_i"); - assertEquals((Integer)expectedVal, val); + QueryResponse rsp = client.query(params("qt", "/get", "id", "b!doc")); + Object val = ((Map) rsp.getResponse().get("doc")).get("foo_i"); + assertEquals((Integer) expectedVal, val); } - } long getNumRequests() { @@ -319,9 +342,8 @@ long getNumRequests() { return n; } - void doAddDoc(String id) throws Exception { - index("id",id); + index("id", id); // todo - target diff servers and use cloud clients as well as non-cloud clients } @@ -341,5 +363,4 @@ void doDBQ(String q, String... reqParams) throws Exception { public void tearDown() throws Exception { super.tearDown(); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardTermsTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardTermsTest.java index ec20cec79e1..a7a4685b380 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ShardTermsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ShardTermsTest.java @@ -17,13 +17,12 @@ package org.apache.solr.cloud; -import org.apache.solr.SolrTestCase; -import org.apache.solr.client.solrj.cloud.ShardTerms; -import org.junit.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; +import org.apache.solr.SolrTestCase; +import org.apache.solr.client.solrj.cloud.ShardTerms; +import org.junit.Test; public class ShardTermsTest extends SolrTestCase { @Test diff --git a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java index 42b7710318a..f12b9597ccb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java @@ -20,7 +20,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; @@ -34,7 +33,7 @@ @SolrTestCaseJ4.SuppressSSL public class SliceStateTest extends SolrTestCaseJ4 { - + @Test public void testDefaultSliceState() { Map collectionStates = new HashMap<>(); @@ -55,12 +54,16 @@ public void testDefaultSliceState() { Slice slice = new Slice("shard1", sliceToProps, null, "collection1"); assertSame("Default state not set to active", Slice.State.ACTIVE, slice.getState()); slices.put("shard1", slice); - collectionStates.put("collection1", new DocCollection("collection1", slices, props, DocRouter.DEFAULT)); + collectionStates.put( + "collection1", new DocCollection("collection1", slices, props, DocRouter.DEFAULT)); ClusterState clusterState = new ClusterState(liveNodes, collectionStates); byte[] bytes = Utils.toJSON(clusterState); ClusterState loadedClusterState = ClusterState.createFromJson(-1, bytes, liveNodes); - assertSame("Default state not set to active", Slice.State.ACTIVE, loadedClusterState.getCollection("collection1").getSlice("shard1").getState()); + assertSame( + "Default state not set to active", + Slice.State.ACTIVE, + loadedClusterState.getCollection("collection1").getSlice("shard1").getState()); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java index f0ce5d7a961..2c308694650 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrCLIZkUtilsTest.java @@ -31,7 +31,6 @@ import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.List; - import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkMaintenanceUtils; import org.apache.solr.util.SolrCLI; @@ -46,11 +45,11 @@ public class SolrCLIZkUtilsTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(1) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); zkAddr = cluster.getZkServer().getZkAddress(); zkClient = new SolrZkClient(zkAddr, 30000); - } @AfterClass @@ -78,58 +77,86 @@ public void testUpconfig() throws Exception { // Now just use a name in the configsets directory, do we find it? configSet = TEST_PATH().resolve("configsets"); - String[] args = new String[]{ - "-confname", "upconfig2", - "-confdir", "cloud-subdirs", - "-zkHost", zkAddr, - "-configsetsDir", configSet.toAbsolutePath().toString(), - }; + String[] args = + new String[] { + "-confname", + "upconfig2", + "-confdir", + "cloud-subdirs", + "-zkHost", + zkAddr, + "-configsetsDir", + configSet.toAbsolutePath().toString(), + }; SolrCLI.ConfigSetUploadTool tool = new SolrCLI.ConfigSetUploadTool(); - int res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + int res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); assertEquals("tool should have returned 0 for success ", 0, res); // Now do we have that config up on ZK? verifyZkLocalPathsMatch(srcPathCheck, "/configs/upconfig2"); // do we barf on a bogus path? - args = new String[]{ - "-confname", "upconfig3", - "-confdir", "nothinghere", - "-zkHost", zkAddr, - "-configsetsDir", configSet.toAbsolutePath().toString(), - }; - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-confname", + "upconfig3", + "-confdir", + "nothinghere", + "-zkHost", + zkAddr, + "-configsetsDir", + configSet.toAbsolutePath().toString(), + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); assertTrue("tool should have returned non-zero for failure ", 0 != res); - String content = new String(zkClient.getData("/configs/upconfig2/schema.xml", null, null, true), StandardCharsets.UTF_8); - assertTrue("There should be content in the node! ", content.contains("Apache Software Foundation")); - + String content = + new String( + zkClient.getData("/configs/upconfig2/schema.xml", null, null, true), + StandardCharsets.UTF_8); + assertTrue( + "There should be content in the node! ", content.contains("Apache Software Foundation")); } @Test public void testDownconfig() throws Exception { - Path tmp = Paths.get(createTempDir("downConfigNewPlace").toAbsolutePath().toString(), "myconfset"); + Path tmp = + Paths.get(createTempDir("downConfigNewPlace").toAbsolutePath().toString(), "myconfset"); + + // First we need a configset on ZK to bring down. - // First we need a configset on ZK to bring down. - Path configSet = TEST_PATH().resolve("configsets"); Path srcPathCheck = configSet.resolve("cloud-subdirs").resolve("conf"); AbstractDistribZkTestBase.copyConfigUp(configSet, "cloud-subdirs", "downconfig1", zkAddr); // Now do we have that config up on ZK? verifyZkLocalPathsMatch(srcPathCheck, "/configs/downconfig1"); - String[] args = new String[]{ - "-confname", "downconfig1", - "-confdir", tmp.toAbsolutePath().toString(), - "-zkHost", zkAddr, - }; + String[] args = + new String[] { + "-confname", + "downconfig1", + "-confdir", + tmp.toAbsolutePath().toString(), + "-zkHost", + zkAddr, + }; SolrCLI.ConfigSetDownloadTool downTool = new SolrCLI.ConfigSetDownloadTool(); - int res = downTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(downTool.getOptions()), args)); + int res = + downTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(downTool.getOptions()), args)); assertEquals("Download should have succeeded.", 0, res); - verifyZkLocalPathsMatch(Paths.get(tmp.toAbsolutePath().toString(), "conf"), "/configs/downconfig1"); + verifyZkLocalPathsMatch( + Paths.get(tmp.toAbsolutePath().toString(), "conf"), "/configs/downconfig1"); // Insure that empty files don't become directories (SOLR-11198) @@ -140,19 +167,26 @@ public void testDownconfig() throws Exception { AbstractDistribZkTestBase.copyConfigUp(tmp.getParent(), "myconfset", "downconfig2", zkAddr); Path tmp2 = createTempDir("downConfigNewPlace2"); downTool = new SolrCLI.ConfigSetDownloadTool(); - args = new String[]{ - "-confname", "downconfig2", - "-confdir", tmp2.toAbsolutePath().toString(), - "-zkHost", zkAddr, - }; - - res = downTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(downTool.getOptions()), args)); + args = + new String[] { + "-confname", + "downconfig2", + "-confdir", + tmp2.toAbsolutePath().toString(), + "-zkHost", + zkAddr, + }; + + res = + downTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(downTool.getOptions()), args)); assertEquals("Download should have succeeded.", 0, res); - verifyZkLocalPathsMatch(Paths.get(tmp.toAbsolutePath().toString(), "conf"), "/configs/downconfig2"); + verifyZkLocalPathsMatch( + Paths.get(tmp.toAbsolutePath().toString(), "conf"), "/configs/downconfig2"); // And insure the empty file is a text file Path destEmpty = Paths.get(tmp2.toAbsolutePath().toString(), "conf", "stopwords", "emptyfile"); assertTrue("Empty files should NOT be copied down as directories", destEmpty.toFile().isFile()); - } @Test @@ -165,149 +199,234 @@ public void testCp() throws Exception { AbstractDistribZkTestBase.copyConfigUp(configSet, "cloud-subdirs", "cp1", zkAddr); // Now copy it somewhere else on ZK. - String[] args = new String[]{ - "-src", "zk:/configs/cp1", - "-dst", "zk:/cp2", - "-recurse", "true", - "-zkHost", zkAddr, - }; + String[] args = + new String[] { + "-src", "zk:/configs/cp1", + "-dst", "zk:/cp2", + "-recurse", "true", + "-zkHost", zkAddr, + }; SolrCLI.ZkCpTool cpTool = new SolrCLI.ZkCpTool(); - int res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + int res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy from zk -> zk should have succeeded.", 0, res); verifyZnodesMatch("/configs/cp1", "/cp2"); - // try with zk->local Path tmp = createTempDir("tmpNewPlace2"); - args = new String[]{ - "-src", "zk:/configs/cp1", - "-dst", "file:" + tmp.toAbsolutePath().toString(), - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "zk:/configs/cp1", + "-dst", + "file:" + tmp.toAbsolutePath().toString(), + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); verifyZkLocalPathsMatch(tmp, "/configs/cp1"); - // try with zk->local no file: prefix tmp = createTempDir("tmpNewPlace3"); - args = new String[]{ - "-src", "zk:/configs/cp1", - "-dst", tmp.toAbsolutePath().toString(), - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "zk:/configs/cp1", + "-dst", + tmp.toAbsolutePath().toString(), + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); verifyZkLocalPathsMatch(tmp, "/configs/cp1"); - // try with local->zk - args = new String[]{ - "-src", srcPathCheck.toAbsolutePath().toString(), - "-dst", "zk:/cp3", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + srcPathCheck.toAbsolutePath().toString(), + "-dst", + "zk:/cp3", + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); verifyZkLocalPathsMatch(srcPathCheck, "/cp3"); // try with local->zk, file: specified - args = new String[]{ - "-src", "file:" + srcPathCheck.toAbsolutePath().toString(), - "-dst", "zk:/cp4", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "file:" + srcPathCheck.toAbsolutePath().toString(), + "-dst", + "zk:/cp4", + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); verifyZkLocalPathsMatch(srcPathCheck, "/cp4"); // try with recurse not specified - args = new String[]{ - "-src", "file:" + srcPathCheck.toAbsolutePath().toString(), - "-dst", "zk:/cp5Fail", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "file:" + srcPathCheck.toAbsolutePath().toString(), + "-dst", + "zk:/cp5Fail", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertTrue("Copy should NOT have succeeded, recurse not specified.", 0 != res); // try with recurse = false - args = new String[]{ - "-src", "file:" + srcPathCheck.toAbsolutePath().toString(), - "-dst", "zk:/cp6Fail", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "file:" + srcPathCheck.toAbsolutePath().toString(), + "-dst", + "zk:/cp6Fail", + "-recurse", + "false", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertTrue("Copy should NOT have succeeded, recurse set to false.", 0 != res); - - // NOTE: really can't test copying to '.' because the test framework doesn't allow altering the source tree - // and at least IntelliJ's CWD is in the source tree. + // NOTE: really can't test copying to '.' because the test framework doesn't allow altering the + // source tree and at least IntelliJ's CWD is in the source tree. // copy to local ending in separator - //src and cp3 and cp4 are valid - String localSlash = tmp.normalize() + File.separator +"cpToLocal" + File.separator; - args = new String[]{ - "-src", "zk:/cp3/schema.xml", - "-dst", localSlash, - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + // src and cp3 and cp4 are valid + String localSlash = tmp.normalize() + File.separator + "cpToLocal" + File.separator; + args = + new String[] { + "-src", "zk:/cp3/schema.xml", "-dst", localSlash, "-recurse", "false", "-zkHost", zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should nave created intermediate directory locally.", 0, res); - assertTrue("File should have been copied to a directory successfully", Files.exists(Paths.get(localSlash, "schema.xml"))); + assertTrue( + "File should have been copied to a directory successfully", + Files.exists(Paths.get(localSlash, "schema.xml"))); // copy to ZK ending in '/'. - //src and cp3 are valid - args = new String[]{ - "-src", "file:" + srcPathCheck.normalize().toAbsolutePath().toString() + File.separator + "solrconfig.xml", - "-dst", "zk:/powerup/", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + // src and cp3 are valid + args = + new String[] { + "-src", + "file:" + + srcPathCheck.normalize().toAbsolutePath().toString() + + File.separator + + "solrconfig.xml", + "-dst", + "zk:/powerup/", + "-recurse", + "false", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy up to intermediate file should have succeeded.", 0, res); - assertTrue("Should have created an intermediate node on ZK", zkClient.exists("/powerup/solrconfig.xml", true)); + assertTrue( + "Should have created an intermediate node on ZK", + zkClient.exists("/powerup/solrconfig.xml", true)); // copy individual file up - //src and cp3 are valid - args = new String[]{ - "-src", "file:" + srcPathCheck.normalize().toAbsolutePath().toString() + File.separator + "solrconfig.xml", - "-dst", "zk:/copyUpFile.xml", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + // src and cp3 are valid + args = + new String[] { + "-src", + "file:" + + srcPathCheck.normalize().toAbsolutePath().toString() + + File.separator + + "solrconfig.xml", + "-dst", + "zk:/copyUpFile.xml", + "-recurse", + "false", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy up to named file should have succeeded.", 0, res); - assertTrue("Should NOT have created an intermediate node on ZK", zkClient.exists("/copyUpFile.xml", true)); + assertTrue( + "Should NOT have created an intermediate node on ZK", + zkClient.exists("/copyUpFile.xml", true)); // copy individual file down - //src and cp3 are valid - - String localNamed = tmp.normalize().toString() + File.separator + "localnamed" + File.separator + "renamed.txt"; - args = new String[]{ - "-src", "zk:/cp4/solrconfig.xml", - "-dst", "file:" + localNamed, - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + // src and cp3 are valid + + String localNamed = + tmp.normalize().toString() + File.separator + "localnamed" + File.separator + "renamed.txt"; + args = + new String[] { + "-src", + "zk:/cp4/solrconfig.xml", + "-dst", + "file:" + localNamed, + "-recurse", + "false", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy to local named file should have succeeded.", 0, res); Path locPath = Paths.get(localNamed); assertTrue("Should have found file: " + localNamed, Files.exists(locPath)); @@ -322,28 +441,41 @@ public void testCp() throws Exception { } assertTrue("Should have found Apache Software Foundation in the file! ", foundApache); - // Test copy from somwehere in ZK to the root of ZK. - args = new String[]{ - "-src", "zk:/cp4/solrconfig.xml", - "-dst", "zk:/", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", "zk:/cp4/solrconfig.xml", + "-dst", "zk:/", + "-recurse", "false", + "-zkHost", zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy from somewhere in ZK to ZK root should have succeeded.", 0, res); - assertTrue("Should have found znode /solrconfig.xml: ", zkClient.exists("/solrconfig.xml", true)); - - // Check that the form path/ works for copying files up. Should append the last bit of the source path to the dst - args = new String[]{ - "-src", "file:" + srcPathCheck.toAbsolutePath().toString(), - "-dst", "zk:/cp7/", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + assertTrue( + "Should have found znode /solrconfig.xml: ", zkClient.exists("/solrconfig.xml", true)); + + // Check that the form path/ works for copying files up. Should append the last bit of the + // source path to the dst + args = + new String[] { + "-src", + "file:" + srcPathCheck.toAbsolutePath().toString(), + "-dst", + "zk:/cp7/", + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); verifyZkLocalPathsMatch(srcPathCheck, "/cp7/" + srcPathCheck.getFileName().toString()); @@ -354,31 +486,51 @@ public void testCp() throws Exception { lines.add("{Some Arbitrary Data}"); Files.write(file, lines, Charset.forName("UTF-8")); // First, just copy the data up the cp7 since it's a directory. - args = new String[]{ - "-src", "file:" + file.toAbsolutePath().toString(), - "-dst", "zk:/cp7/conf/stopwords/", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "file:" + file.toAbsolutePath().toString(), + "-dst", + "zk:/cp7/conf/stopwords/", + "-recurse", + "false", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); - String content = new String(zkClient.getData("/cp7/conf/stopwords", null, null, true), StandardCharsets.UTF_8); + String content = + new String( + zkClient.getData("/cp7/conf/stopwords", null, null, true), StandardCharsets.UTF_8); assertTrue("There should be content in the node! ", content.contains("{Some Arbitrary Data}")); - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); tmp = createTempDir("cp8"); - args = new String[]{ - "-src", "zk:/cp7", - "-dst", "file:" + tmp.toAbsolutePath().toString(), - "-recurse", "true", - "-zkHost", zkAddr, - }; - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "zk:/cp7", + "-dst", + "file:" + tmp.toAbsolutePath().toString(), + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); // Next, copy cp7 down and verify that zknode.data exists for cp7 @@ -386,67 +538,109 @@ public void testCp() throws Exception { assertTrue("znode.data should have been copied down", zData.toFile().exists()); // Finally, copy up to cp8 and verify that the data is up there. - args = new String[]{ - "-src", "file:" + tmp.toAbsolutePath().toString(), - "-dst", "zk:/cp9", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "file:" + tmp.toAbsolutePath().toString(), + "-dst", + "zk:/cp9", + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); - content = new String(zkClient.getData("/cp9/conf/stopwords", null, null, true), StandardCharsets.UTF_8); + content = + new String( + zkClient.getData("/cp9/conf/stopwords", null, null, true), StandardCharsets.UTF_8); assertTrue("There should be content in the node! ", content.contains("{Some Arbitrary Data}")); // Copy an individual empty file up and back down and insure it's still a file Path emptyFile = Paths.get(tmp.toAbsolutePath().toString(), "conf", "stopwords", "emptyfile"); Files.createFile(emptyFile); - args = new String[]{ - "-src", "file:" + emptyFile.toAbsolutePath().toString(), - "-dst", "zk:/cp7/conf/stopwords/emptyfile", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "file:" + emptyFile.toAbsolutePath().toString(), + "-dst", + "zk:/cp7/conf/stopwords/emptyfile", + "-recurse", + "false", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); Path tmp2 = createTempDir("cp9"); Path emptyDest = Paths.get(tmp2.toAbsolutePath().toString(), "emptyfile"); - args = new String[]{ - "-src", "zk:/cp7/conf/stopwords/emptyfile", - "-dst", "file:" + emptyDest.toAbsolutePath().toString(), - "-recurse", "false", - "-zkHost", zkAddr, - }; - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "zk:/cp7/conf/stopwords/emptyfile", + "-dst", + "file:" + emptyDest.toAbsolutePath().toString(), + "-recurse", + "false", + "-zkHost", + zkAddr, + }; + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); assertTrue("Empty files should NOT be copied down as directories", emptyDest.toFile().isFile()); // Now with recursive copy - args = new String[]{ - "-src", "file:" + emptyFile.getParent().getParent().toString(), - "-dst", "zk:/cp10", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "file:" + emptyFile.getParent().getParent().toString(), + "-dst", + "zk:/cp10", + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); // Now copy it all back and make sure empty file is still a file when recursively copying. tmp2 = createTempDir("cp10"); - args = new String[]{ - "-src", "zk:/cp10", - "-dst", "file:" + tmp2.toAbsolutePath().toString(), - "-recurse", "true", - "-zkHost", zkAddr, - }; - res = cpTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); + args = + new String[] { + "-src", + "zk:/cp10", + "-dst", + "file:" + tmp2.toAbsolutePath().toString(), + "-recurse", + "true", + "-zkHost", + zkAddr, + }; + res = + cpTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(cpTool.getOptions()), args)); assertEquals("Copy should have succeeded.", 0, res); Path locEmpty = Paths.get(tmp2.toAbsolutePath().toString(), "stopwords", "emptyfile"); @@ -464,15 +658,19 @@ public void testMv() throws Exception { AbstractDistribZkTestBase.copyConfigUp(configSet, "cloud-subdirs", "mv1", zkAddr); // Now move it somewhere else. - String[] args = new String[]{ - "-src", "zk:/configs/mv1", - "-dst", "zk:/mv2", - "-zkHost", zkAddr, - }; + String[] args = + new String[] { + "-src", "zk:/configs/mv1", + "-dst", "zk:/mv2", + "-zkHost", zkAddr, + }; SolrCLI.ZkMvTool mvTool = new SolrCLI.ZkMvTool(); - int res = mvTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); + int res = + mvTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); assertEquals("Move should have succeeded.", 0, res); // Now does the moved directory match the original on disk? @@ -482,24 +680,30 @@ public void testMv() throws Exception { // Files are in mv2 // Now fail if we specify "file:". Everything should still be in /mv2 - args = new String[]{ - "-src", "file:" + File.separator + "mv2", - "-dst", "/mv3", - "-zkHost", zkAddr, - }; + args = + new String[] { + "-src", "file:" + File.separator + "mv2", "-dst", "/mv3", "-zkHost", zkAddr, + }; // Still in mv2 - res = mvTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); + res = + mvTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); assertTrue("Move should NOT have succeeded with file: specified.", 0 != res); // Let's move it to yet another place with no zk: prefix. - args = new String[]{ - "-src", "/mv2", - "-dst", "/mv4", - "-zkHost", zkAddr, - }; - - res = mvTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); + args = + new String[] { + "-src", "/mv2", + "-dst", "/mv4", + "-zkHost", zkAddr, + }; + + res = + mvTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); assertEquals("Move should have succeeded.", 0, res); assertFalse("Znode /mv3 really should be gone", zkClient.exists("/mv3", true)); @@ -507,30 +711,45 @@ public void testMv() throws Exception { // Now does the moved directory match the original on disk? verifyZkLocalPathsMatch(srcPathCheck, "/mv4"); - args = new String[]{ - "-src", "/mv4/solrconfig.xml", - "-dst", "/testmvsingle/solrconfig.xml", - "-zkHost", zkAddr, - }; - - res = mvTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); + args = + new String[] { + "-src", "/mv4/solrconfig.xml", + "-dst", "/testmvsingle/solrconfig.xml", + "-zkHost", zkAddr, + }; + + res = + mvTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); assertEquals("Move should have succeeded.", 0, res); - assertTrue("Should be able to move a single file", zkClient.exists("/testmvsingle/solrconfig.xml", true)); + assertTrue( + "Should be able to move a single file", + zkClient.exists("/testmvsingle/solrconfig.xml", true)); zkClient.makePath("/parentNode", true); // what happens if the destination ends with a slash? - args = new String[]{ - "-src", "/mv4/schema.xml", - "-dst", "/parentnode/", - "-zkHost", zkAddr, - }; - - res = mvTool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); + args = + new String[] { + "-src", "/mv4/schema.xml", + "-dst", "/parentnode/", + "-zkHost", zkAddr, + }; + + res = + mvTool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(mvTool.getOptions()), args)); assertEquals("Move should have succeeded.", 0, res); - assertTrue("Should be able to move a single file to a parent znode", zkClient.exists("/parentnode/schema.xml", true)); - String content = new String(zkClient.getData("/parentnode/schema.xml", null, null, true), StandardCharsets.UTF_8); - assertTrue("There should be content in the node! ", content.contains("Apache Software Foundation")); + assertTrue( + "Should be able to move a single file to a parent znode", + zkClient.exists("/parentnode/schema.xml", true)); + String content = + new String( + zkClient.getData("/parentnode/schema.xml", null, null, true), StandardCharsets.UTF_8); + assertTrue( + "There should be content in the node! ", content.contains("Apache Software Foundation")); } @Test @@ -541,34 +760,37 @@ public void testLs() throws Exception { AbstractDistribZkTestBase.copyConfigUp(configSet, "cloud-subdirs", "lister", zkAddr); // Should only find a single level. - String[] args = new String[]{ - "-path", "/configs", - "-zkHost", zkAddr, - }; - + String[] args = + new String[] { + "-path", "/configs", "-zkHost", zkAddr, + }; ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos, false, StandardCharsets.UTF_8.name()); SolrCLI.ZkLsTool tool = new SolrCLI.ZkLsTool(ps); - - int res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + int res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); String content = new String(baos.toByteArray(), StandardCharsets.UTF_8); assertEquals("List should have succeeded", res, 0); assertTrue("Return should contain the conf directory", content.contains("lister")); assertFalse("Return should NOT contain a child node", content.contains("solrconfig.xml")); - // simple ls recurse=false - args = new String[]{ - "-path", "/configs", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-path", "/configs", + "-recurse", "false", + "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); content = new String(baos.toByteArray(), StandardCharsets.UTF_8); assertEquals("List should have succeeded", res, 0); @@ -576,14 +798,17 @@ public void testLs() throws Exception { assertFalse("Return should NOT contain a child node", content.contains("solrconfig.xml")); // recurse=true - args = new String[]{ - "-path", "/configs", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-path", "/configs", + "-recurse", "true", + "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); content = new String(baos.toByteArray(), StandardCharsets.UTF_8); assertEquals("List should have succeeded", res, 0); @@ -591,49 +816,58 @@ public void testLs() throws Exception { assertTrue("Return should contain a child node", content.contains("solrconfig.xml")); // Saw a case where going from root foo'd, so test it. - args = new String[]{ - "-path", "/", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-path", "/", + "-recurse", "true", + "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); content = new String(baos.toByteArray(), StandardCharsets.UTF_8); assertEquals("List should have succeeded", res, 0); assertTrue("Return should contain the conf directory", content.contains("lister")); assertTrue("Return should contain a child node", content.contains("solrconfig.xml")); - args = new String[]{ - "-path", "/", - "-zkHost", zkAddr, - }; - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-path", "/", "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); content = new String(baos.toByteArray(), StandardCharsets.UTF_8); assertEquals("List should have succeeded", res, 0); assertFalse("Return should not contain /zookeeper", content.contains("/zookeeper")); // Saw a case where ending in slash foo'd, so test it. - args = new String[]{ - "-path", "/configs/", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-path", "/configs/", + "-recurse", "true", + "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); content = new String(baos.toByteArray(), StandardCharsets.UTF_8); assertEquals("List should have succeeded", res, 0); assertTrue("Return should contain the conf directory", content.contains("lister")); assertTrue("Return should contain a child node", content.contains("solrconfig.xml")); - } @Test public void testRm() throws Exception { - + Path configSet = TEST_PATH().resolve("configsets"); Path srcPathCheck = configSet.resolve("cloud-subdirs").resolve("conf"); @@ -641,66 +875,89 @@ public void testRm() throws Exception { AbstractDistribZkTestBase.copyConfigUp(configSet, "cloud-subdirs", "rm2", zkAddr); // Should fail if recurse not set. - String[] args = new String[]{ - "-path", "/configs/rm1", - "-zkHost", zkAddr, - }; + String[] args = + new String[] { + "-path", "/configs/rm1", "-zkHost", zkAddr, + }; SolrCLI.ZkRmTool tool = new SolrCLI.ZkRmTool(); - int res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + int res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); - assertTrue("Should have failed to remove node with children unless -recurse is set to true", res != 0); + assertTrue( + "Should have failed to remove node with children unless -recurse is set to true", res != 0); // Are we sure all the znodes are still there? verifyZkLocalPathsMatch(srcPathCheck, "/configs/rm1"); - args = new String[]{ - "-path", "zk:/configs/rm1", - "-recurse", "false", - "-zkHost", zkAddr, - }; - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); - - assertTrue("Should have failed to remove node with children if -recurse is set to false", res != 0); - - args = new String[]{ - "-path", "/configs/rm1", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-path", "zk:/configs/rm1", + "-recurse", "false", + "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + + assertTrue( + "Should have failed to remove node with children if -recurse is set to false", res != 0); + + args = + new String[] { + "-path", "/configs/rm1", + "-recurse", "true", + "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); assertEquals("Should have removed node /configs/rm1", res, 0); - assertFalse("Znode /configs/toremove really should be gone", zkClient.exists("/configs/rm1", true)); + assertFalse( + "Znode /configs/toremove really should be gone", zkClient.exists("/configs/rm1", true)); // Check that zk prefix also works. - args = new String[]{ - "-path", "zk:/configs/rm2", - "-recurse", "true", - "-zkHost", zkAddr, - }; - - - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + args = + new String[] { + "-path", "zk:/configs/rm2", + "-recurse", "true", + "-zkHost", zkAddr, + }; + + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); assertEquals("Should have removed node /configs/rm2", res, 0); - assertFalse("Znode /configs/toremove2 really should be gone", zkClient.exists("/configs/rm2", true)); - + assertFalse( + "Znode /configs/toremove2 really should be gone", zkClient.exists("/configs/rm2", true)); + // This should silently just refuse to do anything to the / or /zookeeper - args = new String[]{ - "-path", "zk:/", - "-recurse", "true", - "-zkHost", zkAddr, - }; + args = + new String[] { + "-path", "zk:/", + "-recurse", "true", + "-zkHost", zkAddr, + }; AbstractDistribZkTestBase.copyConfigUp(configSet, "cloud-subdirs", "rm3", zkAddr); - res = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); + res = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args)); assertFalse("Should fail when trying to remove /.", res == 0); } // Check that all children of fileRoot are children of zkRoot and vice-versa - private void verifyZkLocalPathsMatch(Path fileRoot, String zkRoot) throws IOException, KeeperException, InterruptedException { + private void verifyZkLocalPathsMatch(Path fileRoot, String zkRoot) + throws IOException, KeeperException, InterruptedException { verifyAllFilesAreZNodes(fileRoot, zkRoot); verifyAllZNodesAreFiles(fileRoot, zkRoot); } @@ -710,55 +967,71 @@ private static boolean isEphemeral(String zkPath) throws KeeperException, Interr return znodeStat.getEphemeralOwner() != 0; } - void verifyAllZNodesAreFiles(Path fileRoot, String zkRoot) throws KeeperException, InterruptedException { + void verifyAllZNodesAreFiles(Path fileRoot, String zkRoot) + throws KeeperException, InterruptedException { for (String child : zkClient.getChildren(zkRoot, null, true)) { // Skip ephemeral nodes if (zkRoot.endsWith("/") == false) zkRoot += "/"; if (isEphemeral(zkRoot + child)) continue; - + Path thisPath = Paths.get(fileRoot.toAbsolutePath().toString(), child); - assertTrue("Znode " + child + " should have been found on disk at " + fileRoot.toAbsolutePath().toString(), + assertTrue( + "Znode " + + child + + " should have been found on disk at " + + fileRoot.toAbsolutePath().toString(), Files.exists(thisPath)); verifyAllZNodesAreFiles(thisPath, zkRoot + child); } } void verifyAllFilesAreZNodes(Path fileRoot, String zkRoot) throws IOException { - Files.walkFileTree(fileRoot, new SimpleFileVisitor() { - void checkPathOnZk(Path path) { - String znode = ZkMaintenanceUtils.createZkNodeName(zkRoot, fileRoot, path); - try { // It's easier to catch this exception and fail than catch it everywher eles. - assertTrue("Should have found " + znode + " on Zookeeper", zkClient.exists(znode, true)); - } catch (Exception e) { - fail("Caught unexpected exception " + e.getMessage() + " Znode we were checking " + znode); - } - } - - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - assertTrue("Path should start at proper place!", file.startsWith(fileRoot)); - checkPathOnZk(file); - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - - checkPathOnZk(dir); - return FileVisitResult.CONTINUE; - } - }); + Files.walkFileTree( + fileRoot, + new SimpleFileVisitor() { + void checkPathOnZk(Path path) { + String znode = ZkMaintenanceUtils.createZkNodeName(zkRoot, fileRoot, path); + try { // It's easier to catch this exception and fail than catch it everywher eles. + assertTrue( + "Should have found " + znode + " on Zookeeper", zkClient.exists(znode, true)); + } catch (Exception e) { + fail( + "Caught unexpected exception " + + e.getMessage() + + " Znode we were checking " + + znode); + } + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + assertTrue("Path should start at proper place!", file.startsWith(fileRoot)); + checkPathOnZk(file); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) + throws IOException { + + checkPathOnZk(dir); + return FileVisitResult.CONTINUE; + } + }); } // Insure that all znodes in first are in second and vice-versa - private void verifyZnodesMatch(String first, String second) throws KeeperException, InterruptedException { + private void verifyZnodesMatch(String first, String second) + throws KeeperException, InterruptedException { verifyFirstZNodesInSecond(first, second); verifyFirstZNodesInSecond(second, first); } - // Note, no folderol here with Windows path names. - private void verifyFirstZNodesInSecond(String first, String second) throws KeeperException, InterruptedException { + // Note, no folderol here with Windows path names. + private void verifyFirstZNodesInSecond(String first, String second) + throws KeeperException, InterruptedException { for (String node : zkClient.getChildren(first, null, true)) { String fNode = first + "/" + node; String sNode = second + "/" + node; diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java index 09d0eac4f79..f954c61dcc9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java @@ -16,6 +16,10 @@ */ package org.apache.solr.cloud; +import static java.util.Arrays.asList; +import static org.apache.solr.common.util.Utils.fromJSONString; +import static org.apache.solr.common.util.Utils.getObjectByPath; + import java.io.File; import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; @@ -32,7 +36,6 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; - import org.apache.commons.cli.CommandLine; import org.apache.http.HttpEntity; import org.apache.http.client.methods.HttpGet; @@ -51,15 +54,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; -import static org.apache.solr.common.util.Utils.fromJSONString; -import static org.apache.solr.common.util.Utils.getObjectByPath; - /** - * Emulates bin/solr -e cloud -noprompt; bin/post -c gettingstarted example/exampledocs/*.xml; - * this test is useful for catching regressions in indexing the example docs in collections that - * use data driven functionality and managed schema features of the default configset - * (configsets/_default). + * Emulates bin/solr -e cloud -noprompt; bin/post -c gettingstarted example/exampledocs/*.xml; this + * test is useful for catching regressions in indexing the example docs in collections that use data + * driven functionality and managed schema features of the default configset (configsets/_default). */ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase { @@ -79,32 +77,44 @@ public void testLoadDocsIntoGettingStartedCollection() throws Exception { String testCollectionName = "gettingstarted"; File defaultConfigs = new File(ExternalPaths.DEFAULT_CONFIGSET); - assertTrue(defaultConfigs.getAbsolutePath()+" not found!", defaultConfigs.isDirectory()); + assertTrue(defaultConfigs.getAbsolutePath() + " not found!", defaultConfigs.isDirectory()); Set liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes(); if (liveNodes.isEmpty()) - fail("No live nodes found! Cannot create a collection until there is at least 1 live node in the cluster."); + fail( + "No live nodes found! Cannot create a collection until there is at least 1 live node in the cluster."); String firstLiveNode = liveNodes.iterator().next(); String solrUrl = cloudClient.getZkStateReader().getBaseUrlForNodeName(firstLiveNode); // create the gettingstarted collection just like the bin/solr script would do - String[] args = new String[] { - "-name", testCollectionName, - "-shards", "2", - "-replicationFactor", "2", - "-confname", testCollectionName, - "-confdir", "_default", - "-configsetsDir", defaultConfigs.getParentFile().getParentFile().getAbsolutePath(), - "-solrUrl", solrUrl - }; - - // NOTE: not calling SolrCLI.main as the script does because it calls System.exit which is a no-no in a JUnit test + String[] args = + new String[] { + "-name", + testCollectionName, + "-shards", + "2", + "-replicationFactor", + "2", + "-confname", + testCollectionName, + "-confdir", + "_default", + "-configsetsDir", + defaultConfigs.getParentFile().getParentFile().getAbsolutePath(), + "-solrUrl", + solrUrl + }; + + // NOTE: not calling SolrCLI.main as the script does because it calls System.exit which is a + // no-no in a JUnit test SolrCLI.CreateCollectionTool tool = new SolrCLI.CreateCollectionTool(); - CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); + CommandLine cli = + SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); log.info("Creating the '{}' collection using SolrCLI with: {}", testCollectionName, solrUrl); tool.runTool(cli); - assertTrue("Collection '" + testCollectionName + "' doesn't exist after trying to create it!", + assertTrue( + "Collection '" + testCollectionName + "' doesn't exist after trying to create it!", cloudClient.getZkStateReader().getClusterState().hasCollection(testCollectionName)); // verify the collection is usable ... @@ -113,10 +123,13 @@ public void testLoadDocsIntoGettingStartedCollection() throws Exception { cloudClient.setDefaultCollection(testCollectionName); int invalidToolExitStatus = 1; - assertEquals("Collection '" + testCollectionName + "' created even though it already existed", - invalidToolExitStatus, tool.runTool(cli)); + assertEquals( + "Collection '" + testCollectionName + "' created even though it already existed", + invalidToolExitStatus, + tool.runTool(cli)); - // now index docs like bin/post would do but we can't use SimplePostTool because it uses System.exit when + // now index docs like bin/post would do but we can't use SimplePostTool because it uses + // System.exit when // it encounters an error, which JUnit doesn't like ... log.info("Created collection, now posting example docs!"); Path exampleDocsDir = Path.of(ExternalPaths.SOURCE_HOME, "example", "exampledocs"); @@ -124,23 +137,28 @@ public void testLoadDocsIntoGettingStartedCollection() throws Exception { List xmlFiles; try (Stream stream = Files.walk(exampleDocsDir, 1)) { - xmlFiles = stream.filter(path -> path.getFileName().toString().endsWith(".xml")) - // don't rely on File.compareTo, it's behavior varies by OS - .sorted(Comparator.comparing(path -> path.getFileName().toString())) - // be explicit about the collection type because we will shuffle it later - .collect(Collectors.toCollection(ArrayList::new)); + xmlFiles = + stream + .filter(path -> path.getFileName().toString().endsWith(".xml")) + // don't rely on File.compareTo, it's behavior varies by OS + .sorted(Comparator.comparing(path -> path.getFileName().toString())) + // be explicit about the collection type because we will shuffle it later + .collect(Collectors.toCollection(ArrayList::new)); } - // force a deterministic random ordering of the files so seeds reproduce regardless of platform/filesystem + // force a deterministic random ordering of the files so seeds reproduce regardless of + // platform/filesystem Collections.shuffle(xmlFiles, new Random(random().nextLong())); // if you add/remove example XML docs, you'll have to fix these expected values int expectedXmlFileCount = 14; int expectedXmlDocCount = 32; - assertEquals("Unexpected # of example XML files in " + exampleDocsDir.toAbsolutePath(), - expectedXmlFileCount, xmlFiles.size()); - + assertEquals( + "Unexpected # of example XML files in " + exampleDocsDir.toAbsolutePath(), + expectedXmlFileCount, + xmlFiles.size()); + for (Path xml : xmlFiles) { if (log.isInfoEnabled()) { log.info("POSTing {}", xml.toAbsolutePath()); @@ -174,10 +192,11 @@ public void testLoadDocsIntoGettingStartedCollection() throws Exception { } protected void doTestHealthcheck(String testCollectionName, String zkHost) throws Exception { - String[] args = new String[]{ - "-collection", testCollectionName, - "-zkHost", zkHost - }; + String[] args = + new String[] { + "-collection", testCollectionName, + "-zkHost", zkHost + }; SolrCLI.HealthcheckTool tool = new SolrCLI.HealthcheckTool(); CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); @@ -185,43 +204,49 @@ protected void doTestHealthcheck(String testCollectionName, String zkHost) throw } protected void doTestDeleteAction(String testCollectionName, String solrUrl) throws Exception { - String[] args = new String[] { - "-name", testCollectionName, - "-solrUrl", solrUrl - }; + String[] args = + new String[] { + "-name", testCollectionName, + "-solrUrl", solrUrl + }; SolrCLI.DeleteTool tool = new SolrCLI.DeleteTool(); CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); assertTrue("Delete action failed!", tool.runTool(cli) == 0); - assertTrue(!SolrCLI.safeCheckCollectionExists(solrUrl, testCollectionName)); // it should not exist anymore + assertTrue( + !SolrCLI.safeCheckCollectionExists( + solrUrl, testCollectionName)); // it should not exist anymore } /** - * Uses the SolrCLI config action to activate soft auto-commits for the getting started collection. + * Uses the SolrCLI config action to activate soft auto-commits for the getting started + * collection. */ protected void doTestConfigUpdate(String testCollectionName, String solrUrl) throws Exception { - if (!solrUrl.endsWith("/")) - solrUrl += "/"; + if (!solrUrl.endsWith("/")) solrUrl += "/"; String configUrl = solrUrl + testCollectionName + "/config"; Map configJson = SolrCLI.getJson(configUrl); - Object maxTimeFromConfig = SolrCLI.atPath("/config/updateHandler/autoSoftCommit/maxTime", configJson); + Object maxTimeFromConfig = + SolrCLI.atPath("/config/updateHandler/autoSoftCommit/maxTime", configJson); assertNotNull(maxTimeFromConfig); assertEquals(-1L, maxTimeFromConfig); String prop = "updateHandler.autoSoftCommit.maxTime"; Long maxTime = 3000L; - String[] args = new String[]{ - "-collection", testCollectionName, - "-property", prop, - "-value", maxTime.toString(), - "-solrUrl", solrUrl - }; + String[] args = + new String[] { + "-collection", testCollectionName, + "-property", prop, + "-value", maxTime.toString(), + "-solrUrl", solrUrl + }; Map startTimes = getSoftAutocommitInterval(testCollectionName); SolrCLI.ConfigTool tool = new SolrCLI.ConfigTool(); - CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); + CommandLine cli = + SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args); log.info("Sending set-property '{}'={} to SolrCLI.ConfigTool.", prop, maxTime); assertTrue("Set config property failed!", tool.runTool(cli) == 0); @@ -230,25 +255,32 @@ protected void doTestConfigUpdate(String testCollectionName, String solrUrl) thr assertNotNull(maxTimeFromConfig); assertEquals(maxTime, maxTimeFromConfig); - // Just check that we can access paths with slashes in them both through an intermediate method and explicitly - // using atPath. - assertEquals("Should have been able to get a value from the /query request handler", - "explicit", SolrCLI.asString("/config/requestHandler/\\/query/defaults/echoParams", configJson)); + // Just check that we can access paths with slashes in them both through an intermediate method + // and explicitly using atPath. + assertEquals( + "Should have been able to get a value from the /query request handler", + "explicit", + SolrCLI.asString("/config/requestHandler/\\/query/defaults/echoParams", configJson)); - assertEquals("Should have been able to get a value from the /query request handler", - "explicit", SolrCLI.atPath("/config/requestHandler/\\/query/defaults/echoParams", configJson)); + assertEquals( + "Should have been able to get a value from the /query request handler", + "explicit", + SolrCLI.atPath("/config/requestHandler/\\/query/defaults/echoParams", configJson)); if (log.isInfoEnabled()) { - log.info("live_nodes_count : {}", cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + log.info( + "live_nodes_count : {}", + cloudClient.getZkStateReader().getClusterState().getLiveNodes()); } - // Since it takes some time for this command to complete we need to make sure all the reloads for - // all the cores have been done. + // Since it takes some time for this command to complete we need to make sure all the reloads + // for all the cores have been done. boolean allGood = false; Map curSoftCommitInterval = null; for (int idx = 0; idx < 600 && allGood == false; ++idx) { curSoftCommitInterval = getSoftAutocommitInterval(testCollectionName); - if (curSoftCommitInterval.size() > 0 && curSoftCommitInterval.size() == startTimes.size()) { // no point in even trying if they're not the same size! + // no point in even trying if they're not the same size! + if (curSoftCommitInterval.size() > 0 && curSoftCommitInterval.size() == startTimes.size()) { allGood = true; for (Map.Entry currEntry : curSoftCommitInterval.entrySet()) { if (currEntry.getValue().equals(maxTime) == false) { @@ -269,9 +301,17 @@ private Map getSoftAutocommitInterval(String collection) throws Ex DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection(collection); for (Slice slice : coll.getActiveSlices()) { for (Replica replica : slice.getReplicas()) { - String uri = "" + replica.get(ZkStateReader.BASE_URL_PROP) + "/" + replica.get(ZkStateReader.CORE_NAME_PROP) + "/config"; + String uri = + "" + + replica.get(ZkStateReader.BASE_URL_PROP) + + "/" + + replica.get(ZkStateReader.CORE_NAME_PROP) + + "/config"; Map respMap = getAsMap(cloudClient, uri); - Long maxTime = (Long) (getObjectByPath(respMap, true, asList("config", "updateHandler", "autoSoftCommit", "maxTime"))); + Long maxTime = + (Long) + (getObjectByPath( + respMap, true, asList("config", "updateHandler", "autoSoftCommit", "maxTime"))); ret.put(replica.getCoreName(), maxTime); } } @@ -289,5 +329,4 @@ private Map getSoftAutocommitInterval(String collection) throws Ex EntityUtils.consumeQuietly(entity); } } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java index 8507ea2597e..126e82cb604 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java @@ -16,13 +16,12 @@ */ package org.apache.solr.cloud; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.io.File; import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.util.Properties; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -40,8 +39,7 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); protected ZkTestServer zkServer; @@ -58,7 +56,9 @@ private void setUpZkAndDiskXml(boolean toZk, boolean leaveOnLocal) throws Except Path solrHome = tmpDir.resolve("home"); copyMinConf(new File(solrHome.toFile(), "myCollect")); if (leaveOnLocal) { - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr-stress-new.xml"), new File(solrHome.toFile(), "solr.xml")); + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr-stress-new.xml"), + new File(solrHome.toFile(), "solr.xml")); } ignoreException("No UpdateLog found - cannot sync"); @@ -110,8 +110,10 @@ private void closeZK() throws Exception { public void testXmlOnBoth() throws Exception { try { setUpZkAndDiskXml(true, true); - assertEquals("Should have gotten a new port the xml file sent to ZK, overrides the copy on disk", - cfg.getCloudConfig().getSolrHostPort(), 9045); + assertEquals( + "Should have gotten a new port the xml file sent to ZK, overrides the copy on disk", + cfg.getCloudConfig().getSolrHostPort(), + 9045); } finally { closeZK(); } @@ -121,8 +123,10 @@ public void testXmlOnBoth() throws Exception { public void testXmlInZkOnly() throws Exception { try { setUpZkAndDiskXml(true, false); - assertEquals("Should have gotten a new port the xml file sent to ZK", - cfg.getCloudConfig().getSolrHostPort(), 9045); + assertEquals( + "Should have gotten a new port the xml file sent to ZK", + cfg.getCloudConfig().getSolrHostPort(), + 9045); } finally { closeZK(); } @@ -132,8 +136,8 @@ public void testXmlInZkOnly() throws Exception { public void testNotInZkFallbackLocal() throws Exception { try { setUpZkAndDiskXml(false, true); - assertEquals("Should have gotten the default port", - cfg.getCloudConfig().getSolrHostPort(), 8983); + assertEquals( + "Should have gotten the default port", cfg.getCloudConfig().getSolrHostPort(), 8983); } finally { closeZK(); } @@ -143,8 +147,8 @@ public void testNotInZkFallbackLocal() throws Exception { public void testNotInZkOrOnDiskFallbackDefault() throws Exception { try { setUpZkAndDiskXml(false, false); - assertEquals("Should have gotten the default port", - cfg.getCloudConfig().getSolrHostPort(), 8983); + assertEquals( + "Should have gotten the default port", cfg.getCloudConfig().getSolrHostPort(), 8983); } finally { closeZK(); } @@ -154,11 +158,15 @@ public void testNotInZkOrOnDiskFallbackDefault() throws Exception { public void testNotInZkOrOnDiskWhenRequired() throws Exception { try { System.setProperty("solr.solrxml.required", "true"); - SolrException e = expectThrows(SolrException.class, () -> { - System.setProperty("hostPort", "8787"); - setUpZkAndDiskXml(false, false); // solr.xml not on disk either - }); - assertTrue("Should be failing to create default solr.xml in code", + SolrException e = + expectThrows( + SolrException.class, + () -> { + System.setProperty("hostPort", "8787"); + setUpZkAndDiskXml(false, false); // solr.xml not on disk either + }); + assertTrue( + "Should be failing to create default solr.xml in code", e.getMessage().contains("solr.xml does not exist")); } finally { closeZK(); @@ -170,25 +178,25 @@ public void testNotInZkOrOnDiskWhenRequired() throws Exception { public void testOnDiskOnly() throws Exception { try { setUpZkAndDiskXml(false, true); - assertEquals("Should have gotten the default port", cfg.getCloudConfig().getSolrHostPort(), 8983); + assertEquals( + "Should have gotten the default port", cfg.getCloudConfig().getSolrHostPort(), 8983); } finally { closeZK(); } } - // Just a random port, I'm not going to use it but just check that the Solr instance constructed from the XML - // file in ZK overrides the default port. + // Just a random port, I'm not going to use it but just check that the Solr instance constructed + // from the XML file in ZK overrides the default port. private static final String XML_FOR_ZK = - "" + - " " + - " 127.0.0.1" + - " 9045" + - " ${hostContext:solr}" + - " " + - " " + - " ${socketTimeout:120000}" + - " ${connTimeout:15000}" + - " " + - ""; - + "" + + " " + + " 127.0.0.1" + + " 9045" + + " ${hostContext:solr}" + + " " + + " " + + " ${socketTimeout:120000}" + + " ${connTimeout:15000}" + + " " + + ""; } diff --git a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java index 576eec8c79c..7a74e7fedd3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SplitShardTest.java @@ -25,7 +25,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; @@ -55,9 +54,7 @@ public class SplitShardTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { System.setProperty("metricsEnabled", "true"); - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -75,43 +72,63 @@ public void tearDown() throws Exception { @Test public void doTest() throws IOException, SolrServerException { - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 2, 1) + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 1) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2); - - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME) - .setNumSubShards(5) - .setShardName("shard1"); + + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(COLLECTION_NAME) + .setNumSubShards(5) + .setShardName("shard1"); splitShard.process(cluster.getSolrClient()); - waitForState("Timed out waiting for sub shards to be active. Number of active shards=" + - cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getActiveSlices().size(), - COLLECTION_NAME, activeClusterShape(6, 7)); + waitForState( + "Timed out waiting for sub shards to be active. Number of active shards=" + + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME) + .getActiveSlices() + .size(), + COLLECTION_NAME, + activeClusterShape(6, 7)); try { - splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME).setShardName("shard2").setNumSubShards(10); + splitShard = + CollectionAdminRequest.splitShard(COLLECTION_NAME) + .setShardName("shard2") + .setNumSubShards(10); splitShard.process(cluster.getSolrClient()); fail("SplitShard should throw an exception when numSubShards > 8"); } catch (BaseHttpSolrClient.RemoteSolrException ex) { - assertTrue(ex.getMessage().contains("A shard can only be split into 2 to 8 subshards in one split request.")); + assertTrue( + ex.getMessage() + .contains("A shard can only be split into 2 to 8 subshards in one split request.")); } try { - splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME).setShardName("shard2").setNumSubShards(1); + splitShard = + CollectionAdminRequest.splitShard(COLLECTION_NAME) + .setShardName("shard2") + .setNumSubShards(1); splitShard.process(cluster.getSolrClient()); fail("SplitShard should throw an exception when numSubShards < 2"); } catch (BaseHttpSolrClient.RemoteSolrException ex) { - assertTrue(ex.getMessage().contains("A shard can only be split into 2 to 8 subshards in one split request. Provided numSubShards=1")); + assertTrue( + ex.getMessage() + .contains( + "A shard can only be split into 2 to 8 subshards in one split request. Provided numSubShards=1")); } } @Test public void multipleOptionsSplitTest() throws IOException, SolrServerException { - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME) - .setNumSubShards(5) - .setRanges("0-c,d-7fffffff") - .setShardName("shard1"); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(COLLECTION_NAME) + .setNumSubShards(5) + .setRanges("0-c,d-7fffffff") + .setShardName("shard1"); boolean expectedException = false; try { splitShard.process(cluster.getSolrClient()); @@ -125,23 +142,30 @@ public void multipleOptionsSplitTest() throws IOException, SolrServerException { @Test public void testSplitFuzz() throws Exception { String collectionName = "splitFuzzCollection"; - CollectionAdminRequest - .createCollection(collectionName, "conf", 2, 1) + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 1) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, 2, 2); - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName) - .setSplitFuzz(0.5f) - .setShardName("shard1"); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(collectionName).setSplitFuzz(0.5f).setShardName("shard1"); splitShard.process(cluster.getSolrClient()); - waitForState("Timed out waiting for sub shards to be active. Number of active shards=" + - cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(collectionName).getActiveSlices().size(), - collectionName, activeClusterShape(3, 4)); - DocCollection coll = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(collectionName); + waitForState( + "Timed out waiting for sub shards to be active. Number of active shards=" + + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(collectionName) + .getActiveSlices() + .size(), + collectionName, + activeClusterShape(3, 4)); + DocCollection coll = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(collectionName); Slice s1_0 = coll.getSlice("shard1_0"); Slice s1_1 = coll.getSlice("shard1_1"); - long fuzz = ((long)Integer.MAX_VALUE >> 3) + 1L; + long fuzz = ((long) Integer.MAX_VALUE >> 3) + 1L; long delta0 = s1_0.getRange().max - s1_0.getRange().min; long delta1 = s1_1.getRange().max - s1_1.getRange().min; long expected0 = (Integer.MAX_VALUE >> 1) + fuzz; @@ -150,12 +174,10 @@ public void testSplitFuzz() throws Exception { assertEquals("wrong range in s1_1", expected1, delta1); } - CloudSolrClient createCollection(String collectionName, int repFactor) throws Exception { - CollectionAdminRequest - .createCollection(collectionName, "conf", 1, repFactor) - .process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(collectionName, "conf", 1, repFactor) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, 1, repFactor); @@ -164,10 +186,10 @@ CloudSolrClient createCollection(String collectionName, int repFactor) throws Ex return client; } - long getNumDocs(CloudSolrClient client) throws Exception { String collectionName = client.getDefaultCollection(); - DocCollection collection = client.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection collection = + client.getZkStateReader().getClusterState().getCollection(collectionName); Collection slices = collection.getSlices(); long totCount = 0; @@ -175,10 +197,15 @@ long getNumDocs(CloudSolrClient client) throws Exception { if (!slice.getState().equals(Slice.State.ACTIVE)) continue; long lastReplicaCount = -1; for (Replica replica : slice.getReplicas()) { - SolrClient replicaClient = getHttpSolrClient(replica.getBaseUrl() + "/" + replica.getCoreName()); + SolrClient replicaClient = + getHttpSolrClient(replica.getBaseUrl() + "/" + replica.getCoreName()); long numFound = 0; try { - numFound = replicaClient.query(params("q", "*:*", "distrib", "false")).getResults().getNumFound(); + numFound = + replicaClient + .query(params("q", "*:*", "distrib", "false")) + .getResults() + .getNumFound(); log.info("Replica count={} for {}", numFound, replica); } finally { replicaClient.close(); @@ -191,63 +218,74 @@ long getNumDocs(CloudSolrClient client) throws Exception { totCount += lastReplicaCount; } - long cloudClientDocs = client.query(new SolrQuery("*:*")).getResults().getNumFound(); - assertEquals("Sum of shard count should equal distrib query doc count", totCount, cloudClientDocs); + assertEquals( + "Sum of shard count should equal distrib query doc count", totCount, cloudClientDocs); return totCount; } void doLiveSplitShard(String collectionName, int repFactor, int nThreads) throws Exception { final CloudSolrClient client = createCollection(collectionName, repFactor); - final ConcurrentHashMap model = new ConcurrentHashMap<>(); // what the index should contain + final ConcurrentHashMap model = + new ConcurrentHashMap<>(); // what the index should contain final AtomicBoolean doIndex = new AtomicBoolean(true); final AtomicInteger docsIndexed = new AtomicInteger(); Thread[] indexThreads = new Thread[nThreads]; try { - for (int i=0; i { - while (doIndex.get()) { - try { - // Thread.sleep(10); // cap indexing rate at 100 docs per second per thread - int currDoc = docsIndexed.incrementAndGet(); - String docId = "doc_" + currDoc; - - // Try all docs in the same update request - UpdateRequest updateReq = new UpdateRequest(); - updateReq.add(sdoc("id", docId)); - // UpdateResponse ursp = updateReq.commit(client, collectionName); // uncomment this if you want a commit each time - UpdateResponse ursp = updateReq.process(client, collectionName); - assertEquals(0, ursp.getStatus()); // for now, don't accept any failures - if (ursp.getStatus() == 0) { - model.put(docId, 1L); // in the future, keep track of a version per document and reuse ids to keep index from growing too large - } - } catch (Exception e) { - fail(e.getMessage()); - break; - } - } - }); + for (int i = 0; i < nThreads; i++) { + indexThreads[i] = + new Thread( + () -> { + while (doIndex.get()) { + try { + // Thread.sleep(10); // cap indexing rate at 100 docs per second per thread + int currDoc = docsIndexed.incrementAndGet(); + String docId = "doc_" + currDoc; + + // Try all docs in the same update request + UpdateRequest updateReq = new UpdateRequest(); + updateReq.add(sdoc("id", docId)); + // UpdateResponse ursp = updateReq.commit(client, collectionName); // + // uncomment this if you want a commit each time + UpdateResponse ursp = updateReq.process(client, collectionName); + assertEquals(0, ursp.getStatus()); // for now, don't accept any failures + if (ursp.getStatus() == 0) { + model.put( + docId, + 1L); // in the future, keep track of a version per document and reuse + // ids to keep index from growing too large + } + } catch (Exception e) { + fail(e.getMessage()); + break; + } + } + }); } for (Thread thread : indexThreads) { thread.start(); } - Thread.sleep(100); // wait for a few docs to be indexed before invoking split + Thread.sleep(100); // wait for a few docs to be indexed before invoking split int docCount = model.size(); - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName) - .setShardName("shard1"); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(collectionName).setShardName("shard1"); splitShard.process(client); - waitForState("Timed out waiting for sub shards to be active.", - collectionName, activeClusterShape(2, 3*repFactor)); // 2 repFactor for the new split shards, 1 repFactor for old replicas + waitForState( + "Timed out waiting for sub shards to be active.", + collectionName, + activeClusterShape( + 2, + 3 * repFactor)); // 2 repFactor for the new split shards, 1 repFactor for old replicas // make sure that docs were able to be indexed during the split assertTrue(model.size() > docCount); - Thread.sleep(100); // wait for a few more docs to be indexed after split + Thread.sleep(100); // wait for a few more docs to be indexed after split } finally { // shut down the indexers @@ -257,12 +295,15 @@ void doLiveSplitShard(String collectionName, int repFactor, int nThreads) throws } } - client.commit(); // final commit is needed for visibility + client.commit(); // final commit is needed for visibility long numDocs = getNumDocs(client); if (numDocs != model.size()) { - SolrDocumentList results = client.query(new SolrQuery("q","*:*", "fl","id", "rows", Integer.toString(model.size()) )).getResults(); - Map leftover = new HashMap<>(model); + SolrDocumentList results = + client + .query(new SolrQuery("q", "*:*", "fl", "id", "rows", Integer.toString(model.size()))) + .getResults(); + Map leftover = new HashMap<>(model); for (SolrDocument doc : results) { String id = (String) doc.get("id"); leftover.remove(id); @@ -274,18 +315,15 @@ void doLiveSplitShard(String collectionName, int repFactor, int nThreads) throws log.info("Number of documents indexed and queried : {}", numDocs); } - - @Test public void testLiveSplit() throws Exception { - // Debugging tips: if this fails, it may be easier to debug by lowering the number fo threads to 1 and looping the test - // until you get another failure. - // You may need to further instrument things like DistributedZkUpdateProcessor to display the cluster state for the collection, etc. - // Using more threads increases the chance to hit a concurrency bug, but too many threads can overwhelm single-threaded buffering - // replay after the low level index split and result in subShard leaders that can't catch up and - // become active (a known issue that still needs to be resolved.) + // Debugging tips: if this fails, it may be easier to debug by lowering the number fo threads to + // 1 and looping the test until you get another failure. You may need to further instrument + // things like DistributedZkUpdateProcessor to display the cluster state for the collection, + // etc. Using more threads increases the chance to hit a concurrency bug, but too many threads + // can overwhelm single-threaded buffering replay after the low level index split and result in + // subShard leaders that can't catch up and become active (a known issue that still needs to be + // resolved.) doLiveSplitShard("livesplit1", 1, 4); } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java index 0998e371522..cf217eb6b6e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java @@ -19,12 +19,9 @@ import org.apache.lucene.util.LuceneTestCase.Slow; import org.junit.Test; - /** - * Test sync phase that occurs when Leader goes down and a new Leader is - * elected. - * Implementation moved to AbstractSyncSliceTestBase.java as it is also - * used by the HDFS contrib tests. + * Test sync phase that occurs when Leader goes down and a new Leader is elected. Implementation + * moved to AbstractSyncSliceTestBase.java as it is also used by the HDFS contrib tests. */ @Slow public class SyncSliceTest extends AbstractSyncSliceTestBase { @@ -34,5 +31,4 @@ public class SyncSliceTest extends AbstractSyncSliceTestBase { public void test() throws Exception { super.test(); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java b/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java index ec941384c0d..a546cb28f52 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SystemCollectionCompatTest.java @@ -27,7 +27,6 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -56,19 +55,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * - */ +/** */ public class SystemCollectionCompatTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass public static void setupCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); - configureCluster(2) - .addConfig("conf1", configset("cloud-managed")) - .configure(); - if (! log.isWarnEnabled()) { + configureCluster(2).addConfig("conf1", configset("cloud-managed")).configure(); + if (!log.isWarnEnabled()) { fail("Test requires that log-level is at-least WARN, but WARN is disabled"); } } @@ -80,8 +75,10 @@ public static void setupCluster() throws Exception { public void setupSystemCollection() throws Exception { ZkController zkController = cluster.getJettySolrRunner(0).getCoreContainer().getZkController(); cloudManager = zkController.getSolrCloudManager(); - solrClient = new CloudSolrClientBuilder(Collections.singletonList(zkController.getZkServerAddress()), - Optional.empty()).build(); + solrClient = + new CloudSolrClientBuilder( + Collections.singletonList(zkController.getZkServerAddress()), Optional.empty()) + .build(); CollectionAdminRequest.OverseerStatus status = new CollectionAdminRequest.OverseerStatus(); CollectionAdminResponse adminResponse = status.process(solrClient); String overseerLeader = (String) adminResponse.getResponse().get("leader"); @@ -91,18 +88,18 @@ public void setupSystemCollection() throws Exception { CollectionAdminRequest.createCollection(CollectionAdminParams.SYSTEM_COLL, null, 1, 2) .setCreateNodeSet(String.join(",", nodes)) .process(cluster.getSolrClient()); - cluster.waitForActiveCollection(CollectionAdminParams.SYSTEM_COLL, 1, 2); + cluster.waitForActiveCollection(CollectionAdminParams.SYSTEM_COLL, 1, 2); // send a dummy doc to the .system collection - SolrInputDocument doc = new SolrInputDocument( - "id", IdUtils.timeRandomId(), - CommonParams.TYPE, "dummy"); + SolrInputDocument doc = + new SolrInputDocument("id", IdUtils.timeRandomId(), CommonParams.TYPE, "dummy"); doc.addField("time_l", cloudManager.getTimeSource().getEpochTimeNs()); doc.addField("timestamp", new Date()); solrClient.add(CollectionAdminParams.SYSTEM_COLL, doc); solrClient.commit(CollectionAdminParams.SYSTEM_COLL); Map coreStartTimes = new HashMap<>(); - DocCollection coll = cloudManager.getClusterStateProvider().getCollection(CollectionAdminParams.SYSTEM_COLL); + DocCollection coll = + cloudManager.getClusterStateProvider().getCollection(CollectionAdminParams.SYSTEM_COLL); for (Replica r : coll.getReplicas()) { coreStartTimes.put(r.getName(), getCoreStatus(r).getCoreStartTime().getTime()); } @@ -114,29 +111,35 @@ public void setupSystemCollection() throws Exception { field.put("type", "string"); field.put("docValues", false); SchemaRequest.ReplaceField replaceFieldRequest = new SchemaRequest.ReplaceField(field); - SchemaResponse.UpdateResponse replaceFieldResponse = replaceFieldRequest.process(solrClient, CollectionAdminParams.SYSTEM_COLL); + SchemaResponse.UpdateResponse replaceFieldResponse = + replaceFieldRequest.process(solrClient, CollectionAdminParams.SYSTEM_COLL); assertEquals(replaceFieldResponse.toString(), 0, replaceFieldResponse.getStatus()); - CollectionAdminRequest.Reload reloadRequest = CollectionAdminRequest.reloadCollection(CollectionAdminParams.SYSTEM_COLL); + CollectionAdminRequest.Reload reloadRequest = + CollectionAdminRequest.reloadCollection(CollectionAdminParams.SYSTEM_COLL); CollectionAdminResponse response = reloadRequest.process(solrClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); // wait for the reload of all replicas to complete - RetryUtil.retryUntil("Timed out waiting for core to reload", 30, 1000, TimeUnit.MILLISECONDS, () -> { - boolean allReloaded = true; - for (Replica r : coll.getReplicas()) { - long previousTime = coreStartTimes.get(r.getName()); - try { - long currentTime = getCoreStatus(r).getCoreStartTime().getTime(); - allReloaded = allReloaded && (previousTime < currentTime); - } catch (Exception e) { - log.warn("Error retrieving replica status of {}", Utils.toJSONString(r), e); - allReloaded = false; - } - } - return allReloaded; - }); - cluster.waitForActiveCollection(CollectionAdminParams.SYSTEM_COLL, 1, 2); - + RetryUtil.retryUntil( + "Timed out waiting for core to reload", + 30, + 1000, + TimeUnit.MILLISECONDS, + () -> { + boolean allReloaded = true; + for (Replica r : coll.getReplicas()) { + long previousTime = coreStartTimes.get(r.getName()); + try { + long currentTime = getCoreStatus(r).getCoreStartTime().getTime(); + allReloaded = allReloaded && (previousTime < currentTime); + } catch (Exception e) { + log.warn("Error retrieving replica status of {}", Utils.toJSONString(r), e); + allReloaded = false; + } + } + return allReloaded; + }); + cluster.waitForActiveCollection(CollectionAdminParams.SYSTEM_COLL, 1, 2); } @After @@ -162,9 +165,12 @@ private Map getSchemaField(String name, SchemaResponse schemaRes @Test public void testBackCompat() throws Exception { - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cluster.getSolrClient()).getIsCollectionApiDistributed()) { + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cluster.getSolrClient()) + .getIsCollectionApiDistributed()) { log.info("Skipping test because Collection API is distributed"); - // TODO once we completely remove Overseer, do we need to move the back compat check to some other place, for example + // TODO once we completely remove Overseer, do we need to move the back compat check to some + // other place, for example // to when the .system collection is opened? return; } @@ -192,7 +198,10 @@ public void testBackCompat() throws Exception { // restart Overseer to trigger the back-compat check if (log.isInfoEnabled()) { - log.info("Stopping Overseer Node: {} ({})", overseerNode.getNodeName(), overseerNode.getLocalPort()); + log.info( + "Stopping Overseer Node: {} ({})", + overseerNode.getNodeName(), + overseerNode.getLocalPort()); } cluster.stopJettySolrRunner(overseerNode); log.info("Waiting for new overseer election..."); @@ -241,9 +250,11 @@ public void testBackCompat() throws Exception { break; } } - log.info("Done polling log watcher: foundWarning={} foundSchemaWarning={}", foundWarning, foundSchemaWarning); + log.info( + "Done polling log watcher: foundWarning={} foundSchemaWarning={}", + foundWarning, + foundSchemaWarning); assertTrue("re-indexing warning not found", foundWarning); assertTrue("timestamp field incompatibility warning not found", foundSchemaWarning); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java b/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java index 3f26b7effe5..2ee24eba86f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestAuthenticationFramework.java @@ -16,12 +16,11 @@ */ package org.apache.solr.cloud; +import java.lang.invoke.MethodHandles; +import java.util.Map; import javax.servlet.FilterChain; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.lang.invoke.MethodHandles; -import java.util.Map; - import org.apache.http.HttpRequestInterceptor; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrQuery; @@ -38,16 +37,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Test of the MiniSolrCloudCluster functionality with authentication enabled. - */ +/** Test of the MiniSolrCloudCluster functionality with authentication enabled. */ @LuceneTestCase.Slow public class TestAuthenticationFramework extends SolrCloudTestCase { - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final int numShards = 2; private static final int numReplicas = 2; - private static final int nodeCount = numShards*numReplicas; + private static final int nodeCount = numShards * numReplicas; private static final String configName = "solrCloudCollectionConfig"; private static final String collectionName = "testcollection"; @@ -60,13 +57,15 @@ public void setUp() throws Exception { configureCluster(nodeCount).addConfig(configName, configset("cloud-minimal")).configure(); super.setUp(); } - + private void setupAuthenticationPlugin() throws Exception { - System.setProperty("authenticationPlugin", "org.apache.solr.cloud.TestAuthenticationFramework$MockAuthenticationPlugin"); + System.setProperty( + "authenticationPlugin", + "org.apache.solr.cloud.TestAuthenticationFramework$MockAuthenticationPlugin"); MockAuthenticationPlugin.expectedUsername = null; MockAuthenticationPlugin.expectedPassword = null; } - + @Test public void testBasics() throws Exception { collectionCreateSearchDeleteTwice(); @@ -76,8 +75,10 @@ public void testBasics() throws Exception { // Should fail with 401 try { - BaseHttpSolrClient.RemoteSolrException e = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, - this::collectionCreateSearchDeleteTwice); + BaseHttpSolrClient.RemoteSolrException e = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + this::collectionCreateSearchDeleteTwice); assertTrue("Should've returned a 401 error", e.getMessage().contains("Error 401")); } finally { MockAuthenticationPlugin.expectedUsername = null; @@ -92,51 +93,53 @@ public void tearDown() throws Exception { super.tearDown(); } - private void createCollection(String collectionName) - throws Exception { - if (random().nextBoolean()) { // process asynchronously + private void createCollection(String collectionName) throws Exception { + if (random().nextBoolean()) { // process asynchronously CollectionAdminRequest.createCollection(collectionName, configName, numShards, numReplicas) .processAndWait(cluster.getSolrClient(), 90); cluster.waitForActiveCollection(collectionName, numShards, numShards * numReplicas); - } - else { + } else { CollectionAdminRequest.createCollection(collectionName, configName, numShards, numReplicas) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, numShards, numShards * numReplicas); } - } public void collectionCreateSearchDeleteTwice() throws Exception { final CloudSolrClient client = cluster.getSolrClient(); - for (int i = 0 ; i < 2 ; ++i) { + for (int i = 0; i < 2; ++i) { // create collection createCollection(collectionName); // check that there's no left-over state - assertEquals(0, client.query(collectionName, new SolrQuery("*:*")).getResults().getNumFound()); + assertEquals( + 0, client.query(collectionName, new SolrQuery("*:*")).getResults().getNumFound()); // modify/query collection - Thread.sleep(100); // not everyone is up to date just because we waited to make sure one was - pause a moment + // not everyone is up to date just because we waited to make sure one was - pause a moment + Thread.sleep(100); new UpdateRequest().add("id", "1").commit(client, collectionName); QueryResponse rsp = client.query(collectionName, new SolrQuery("*:*")); assertEquals(1, rsp.getResults().getNumFound()); // delete the collection - cluster.deleteAllCollections(); + cluster.deleteAllCollections(); } } - public static class MockAuthenticationPlugin extends AuthenticationPlugin implements HttpClientBuilderPlugin { + public static class MockAuthenticationPlugin extends AuthenticationPlugin + implements HttpClientBuilderPlugin { public static String expectedUsername; public static String expectedPassword; private HttpRequestInterceptor interceptor; + @Override - public void init(Map pluginConfig) {} + public void init(Map pluginConfig) {} @Override - public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) + public boolean doAuthenticate( + HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws Exception { if (expectedUsername == null) { filterChain.doFilter(request, response); @@ -145,9 +148,10 @@ public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse re HttpServletRequest httpRequest = request; String username = httpRequest.getHeader("username"); String password = httpRequest.getHeader("password"); - + log.info("Username: {}, password: {}", username, password); - if(MockAuthenticationPlugin.expectedUsername.equals(username) && MockAuthenticationPlugin.expectedPassword.equals(password)) { + if (MockAuthenticationPlugin.expectedUsername.equals(username) + && MockAuthenticationPlugin.expectedPassword.equals(password)) { filterChain.doFilter(request, response); return true; } else { @@ -158,10 +162,11 @@ public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse re @Override public SolrHttpClientBuilder getHttpClientBuilder(SolrHttpClientBuilder httpClientBuilder) { - interceptor = (req, rsp) -> { - req.addHeader("username", requestUsername); - req.addHeader("password", requestPassword); - }; + interceptor = + (req, rsp) -> { + req.addHeader("username", requestUsername); + req.addHeader("password", requestPassword); + }; HttpClientUtil.addRequestInterceptor(interceptor); return httpClientBuilder; @@ -171,6 +176,5 @@ public SolrHttpClientBuilder getHttpClientBuilder(SolrHttpClientBuilder httpClie public void close() { HttpClientUtil.removeRequestInterceptor(interceptor); } - } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java index 11ec68ea0e9..31ca4e11921 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java @@ -20,7 +20,6 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Function; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; @@ -42,9 +41,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * - */ +/** */ @Ignore("Abstract classes should not be executed as tests") public abstract class TestBaseStatsCacheCloud extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -54,17 +51,18 @@ public abstract class TestBaseStatsCacheCloud extends SolrCloudTestCase { protected String collectionName = "collection_" + getClass().getSimpleName(); - protected Function generator = i -> { - SolrInputDocument doc = new SolrInputDocument("id", "id-" + i); - if (i % 3 == 0) { - doc.addField("foo_t", "bar baz"); - } else if (i % 3 == 1) { - doc.addField("foo_t", "bar"); - } else { - // skip the field - } - return doc; - }; + protected Function generator = + i -> { + SolrInputDocument doc = new SolrInputDocument("id", "id-" + i); + if (i % 3 == 0) { + doc.addField("foo_t", "bar baz"); + } else if (i % 3 == 1) { + doc.addField("foo_t", "bar"); + } else { + // skip the field + } + return doc; + }; protected CloudSolrClient solrClient; @@ -110,10 +108,16 @@ public void tearDownCluster() { @Test @SuppressWarnings({"unchecked"}) public void testBasicStats() throws Exception { - QueryResponse cloudRsp = solrClient.query(collectionName, - params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + NUM_DOCS, "debug", "true")); - QueryResponse controlRsp = control.query("collection1", - params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + NUM_DOCS, "debug", "true")); + QueryResponse cloudRsp = + solrClient.query( + collectionName, + params( + "q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + NUM_DOCS, "debug", "true")); + QueryResponse controlRsp = + control.query( + "collection1", + params( + "q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + NUM_DOCS, "debug", "true")); assertResponses(controlRsp, cloudRsp, assertSameScores()); @@ -121,75 +125,94 @@ public void testBasicStats() throws Exception { indexDocs(solrClient, collectionName, NUM_DOCS, NUM_DOCS, generator); indexDocs(control, "collection1", NUM_DOCS, NUM_DOCS, generator); - cloudRsp = solrClient.query(collectionName, - params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + (NUM_DOCS * 2))); - controlRsp = control.query("collection1", - params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + (NUM_DOCS * 2))); + cloudRsp = + solrClient.query( + collectionName, + params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + (NUM_DOCS * 2))); + controlRsp = + control.query( + "collection1", + params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + (NUM_DOCS * 2))); assertResponses(controlRsp, cloudRsp, assertSameScores()); // check cache metrics StatsCache.StatsCacheMetrics statsCacheMetrics = new StatsCache.StatsCacheMetrics(); for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { try (SolrClient client = getHttpSolrClient(jettySolrRunner.getBaseUrl().toString())) { - NamedList metricsRsp = client.request( - new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/metrics", params("group", "solr.core", "prefix", "CACHE.searcher.statsCache"))); + NamedList metricsRsp = + client.request( + new GenericSolrRequest( + SolrRequest.METHOD.GET, + "/admin/metrics", + params("group", "solr.core", "prefix", "CACHE.searcher.statsCache"))); assertNotNull(metricsRsp); - NamedList metricsPerReplica = (NamedList)metricsRsp.get("metrics"); + NamedList metricsPerReplica = (NamedList) metricsRsp.get("metrics"); assertNotNull("no metrics perReplica", metricsPerReplica); - //log.info("======= Node: " + jettySolrRunner.getBaseUrl()); - //log.info("======= Metrics:\n" + Utils.toJSONString(metricsPerReplica)); - metricsPerReplica.forEach((replica, metrics) -> { - Map values = (Map)((NamedList)metrics).get("CACHE.searcher.statsCache"); - values.forEach((name, value) -> { - long val = value instanceof Number ? ((Number) value).longValue() : 0; - switch (name) { - case "lookups" : - statsCacheMetrics.lookups.add(val); - break; - case "returnLocalStats" : - statsCacheMetrics.returnLocalStats.add(val); - break; - case "mergeToGlobalStats" : - statsCacheMetrics.mergeToGlobalStats.add(val); - break; - case "missingGlobalFieldStats" : - statsCacheMetrics.missingGlobalFieldStats.add(val); - break; - case "missingGlobalTermStats" : - statsCacheMetrics.missingGlobalTermStats.add(val); - break; - case "receiveGlobalStats" : - statsCacheMetrics.receiveGlobalStats.add(val); - break; - case "retrieveStats" : - statsCacheMetrics.retrieveStats.add(val); - break; - case "sendGlobalStats" : - statsCacheMetrics.sendGlobalStats.add(val); - break; - case "useCachedGlobalStats" : - statsCacheMetrics.useCachedGlobalStats.add(val); - break; - case "statsCacheImpl" : - assertTrue("incorreect cache impl, expected" + getImplementationName() + " but was " + value, - getImplementationName().endsWith((String)value)); - break; - default: - fail("Unexpected cache metrics: key=" + name + ", value=" + value); - } - }); - }); + // log.info("======= Node: " + jettySolrRunner.getBaseUrl()); + // log.info("======= Metrics:\n" + Utils.toJSONString(metricsPerReplica)); + metricsPerReplica.forEach( + (replica, metrics) -> { + Map values = + (Map) + ((NamedList) metrics).get("CACHE.searcher.statsCache"); + values.forEach( + (name, value) -> { + long val = value instanceof Number ? ((Number) value).longValue() : 0; + switch (name) { + case "lookups": + statsCacheMetrics.lookups.add(val); + break; + case "returnLocalStats": + statsCacheMetrics.returnLocalStats.add(val); + break; + case "mergeToGlobalStats": + statsCacheMetrics.mergeToGlobalStats.add(val); + break; + case "missingGlobalFieldStats": + statsCacheMetrics.missingGlobalFieldStats.add(val); + break; + case "missingGlobalTermStats": + statsCacheMetrics.missingGlobalTermStats.add(val); + break; + case "receiveGlobalStats": + statsCacheMetrics.receiveGlobalStats.add(val); + break; + case "retrieveStats": + statsCacheMetrics.retrieveStats.add(val); + break; + case "sendGlobalStats": + statsCacheMetrics.sendGlobalStats.add(val); + break; + case "useCachedGlobalStats": + statsCacheMetrics.useCachedGlobalStats.add(val); + break; + case "statsCacheImpl": + assertTrue( + "incorreect cache impl, expected" + + getImplementationName() + + " but was " + + value, + getImplementationName().endsWith((String) value)); + break; + default: + fail("Unexpected cache metrics: key=" + name + ", value=" + value); + } + }); + }); } } checkStatsCacheMetrics(statsCacheMetrics); } protected void checkStatsCacheMetrics(StatsCache.StatsCacheMetrics statsCacheMetrics) { - assertEquals(statsCacheMetrics.toString(), 0, statsCacheMetrics.missingGlobalFieldStats.intValue()); - assertEquals(statsCacheMetrics.toString(), 0, statsCacheMetrics.missingGlobalTermStats.intValue()); + assertEquals( + statsCacheMetrics.toString(), 0, statsCacheMetrics.missingGlobalFieldStats.intValue()); + assertEquals( + statsCacheMetrics.toString(), 0, statsCacheMetrics.missingGlobalTermStats.intValue()); } - protected void assertResponses(QueryResponse controlRsp, QueryResponse cloudRsp, boolean sameScores) throws Exception { + protected void assertResponses( + QueryResponse controlRsp, QueryResponse cloudRsp, boolean sameScores) throws Exception { Map cloudDocs = new HashMap<>(); Map controlDocs = new HashMap<>(); cloudRsp.getResults().forEach(doc -> cloudDocs.put((String) doc.getFieldValue("id"), doc)); @@ -202,14 +225,22 @@ protected void assertResponses(QueryResponse controlRsp, QueryResponse cloudRsp, Float controlScore = (Float) controlDoc.getFieldValue("score"); Float cloudScore = (Float) cloudDoc.getFieldValue("score"); if (sameScores) { - assertEquals("cloud score differs from control", controlScore, cloudScore, controlScore * 0.001f); + assertEquals( + "cloud score differs from control", controlScore, cloudScore, controlScore * 0.001f); } else { - assertNotEquals("cloud score is the same as control", controlScore, cloudScore, controlScore * 0.001f); + assertNotEquals( + "cloud score is the same as control", controlScore, cloudScore, controlScore * 0.001f); } } } - protected void indexDocs(SolrClient client, String collectionName, int num, int start, Function generator) throws Exception { + protected void indexDocs( + SolrClient client, + String collectionName, + int num, + int start, + Function generator) + throws Exception { UpdateRequest ureq = new UpdateRequest(); for (int i = 0; i < num; i++) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java index 48297731ab1..cd3db586370 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.apache.solr.JSONTestUtil; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.cloud.SocketProxy; @@ -59,16 +58,14 @@ public void setupCluster() throws Exception { System.setProperty("solr.ulog.numRecordsToKeep", "1000"); System.setProperty("leaderVoteWait", "60000"); - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); // Add proxies proxies = new HashMap<>(cluster.getJettySolrRunners().size()); jettys = new HashMap<>(); - for (JettySolrRunner jetty:cluster.getJettySolrRunners()) { + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { SocketProxy proxy = new SocketProxy(); jetty.setProxyPort(proxy.getListenPort()); - cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart + cluster.stopJettySolrRunner(jetty); // TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); if (log.isInfoEnabled()) { @@ -91,7 +88,7 @@ public void tearDownCluster() throws Exception { System.clearProperty("solr.directoryFactory"); System.clearProperty("solr.ulog.numRecordsToKeep"); System.clearProperty("leaderVoteWait"); - + shutdownCluster(); } @@ -106,7 +103,7 @@ public void testOutOfSyncReplicasCannotBecomeLeaderAfterRestart() throws Excepti } public void testOutOfSyncReplicasCannotBecomeLeader(boolean onRestart) throws Exception { - final String collectionName = "outOfSyncReplicasCannotBecomeLeader-"+onRestart; + final String collectionName = "outOfSyncReplicasCannotBecomeLeader-" + onRestart; CollectionAdminRequest.createCollection(collectionName, 1, 3) .setCreateNodeSet("") .process(cluster.getSolrClient()); @@ -121,9 +118,9 @@ public void testOutOfSyncReplicasCannotBecomeLeader(boolean onRestart) throws Ex CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") .setNode(cluster.getJettySolrRunner(2).getNodeName()) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, 1, 3); - + waitForState("Timeout waiting for 1x3 collection", collectionName, clusterShape(1, 3)); addDocs(collectionName, 3, 1); @@ -137,78 +134,99 @@ public void testOutOfSyncReplicasCannotBecomeLeader(boolean onRestart) throws Ex addDocWhenOtherReplicasAreNetworkPartitioned(collectionName, oldLeader, 4); } - assertDocsExistInAllReplicas(getCollectionState(collectionName).getReplicas(), collectionName, 1, 4); + assertDocsExistInAllReplicas( + getCollectionState(collectionName).getReplicas(), collectionName, 1, 4); CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); } - /** - * Adding doc when replicas (not leader) are down, - * These replicas are out-of-sync hence they should not become leader even when current leader is DOWN. - * Leader should be on node - 0 + * Adding doc when replicas (not leader) are down, These replicas are out-of-sync hence they + * should not become leader even when current leader is DOWN. Leader should be on node - 0 */ - private void addDocToWhenOtherReplicasAreDown(String collection, Replica leader, int docId) throws Exception { + private void addDocToWhenOtherReplicasAreDown(String collection, Replica leader, int docId) + throws Exception { JettySolrRunner j1 = cluster.getJettySolrRunner(1); JettySolrRunner j2 = cluster.getJettySolrRunner(2); j1.stop(); j2.stop(); cluster.waitForJettyToStop(j1); cluster.waitForJettyToStop(j2); - - waitForState("", collection, (liveNodes, collectionState) -> - collectionState.getSlice("shard1").getReplicas().stream() - .filter(replica -> replica.getState() == Replica.State.DOWN).count() == 2); + + waitForState( + "", + collection, + (liveNodes, collectionState) -> + collectionState.getSlice("shard1").getReplicas().stream() + .filter(replica -> replica.getState() == Replica.State.DOWN) + .count() + == 2); addDocs(collection, 1, docId); JettySolrRunner j3 = cluster.getJettySolrRunner(0); j3.stop(); cluster.waitForJettyToStop(j3); - waitForState("", collection, (liveNodes, collectionState) -> collectionState.getReplica(leader.getName()).getState() == Replica.State.DOWN); + waitForState( + "", + collection, + (liveNodes, collectionState) -> + collectionState.getReplica(leader.getName()).getState() == Replica.State.DOWN); cluster.getJettySolrRunner(1).start(); cluster.getJettySolrRunner(2).start(); - + cluster.waitForNode(j1, 30); cluster.waitForNode(j2, 30); // the meat of the test -- wait to see if a different replica become a leader // the correct behavior is that this should time out, if it succeeds we have a problem... - expectThrows(TimeoutException.class, - "Did not time out waiting for new leader, out of sync replica became leader", - () -> { - cluster.getSolrClient().waitForState(collection, 10, TimeUnit.SECONDS, (state) -> { - Replica newLeader = state.getSlice("shard1").getLeader(); - if (newLeader != null && !newLeader.getName().equals(leader.getName()) && newLeader.getState() == Replica.State.ACTIVE) { - // this is is the bad case, our "bad" state was found before timeout - log.error("WTF: New Leader={}", newLeader); - return true; - } - return false; // still no bad state, wait for timeout - }); - }); + expectThrows( + TimeoutException.class, + "Did not time out waiting for new leader, out of sync replica became leader", + () -> { + cluster + .getSolrClient() + .waitForState( + collection, + 10, + TimeUnit.SECONDS, + (state) -> { + Replica newLeader = state.getSlice("shard1").getLeader(); + if (newLeader != null + && !newLeader.getName().equals(leader.getName()) + && newLeader.getState() == Replica.State.ACTIVE) { + // this is is the bad case, our "bad" state was found before timeout + log.error("WTF: New Leader={}", newLeader); + return true; + } + return false; // still no bad state, wait for timeout + }); + }); JettySolrRunner j0 = cluster.getJettySolrRunner(0); j0.start(); cluster.waitForNode(j0, 30); - + // waitForNode not solid yet? cluster.waitForAllNodes(30); - - waitForState("Timeout waiting for leader", collection, (liveNodes, collectionState) -> { - Replica newLeader = collectionState.getLeader("shard1"); - return newLeader != null && newLeader.getName().equals(leader.getName()); - }); + + waitForState( + "Timeout waiting for leader", + collection, + (liveNodes, collectionState) -> { + Replica newLeader = collectionState.getLeader("shard1"); + return newLeader != null && newLeader.getName().equals(leader.getName()); + }); waitForState("Timeout waiting for active collection", collection, clusterShape(1, 3)); } - /** - * Adding doc when replicas (not leader) are network partitioned with leader, - * These replicas are out-of-sync hence they should not become leader even when current leader is DOWN. - * Leader should be on node - 0 + * Adding doc when replicas (not leader) are network partitioned with leader, These replicas are + * out-of-sync hence they should not become leader even when current leader is DOWN. Leader should + * be on node - 0 */ - private void addDocWhenOtherReplicasAreNetworkPartitioned(String collection, Replica leader, int docId) throws Exception { + private void addDocWhenOtherReplicasAreNetworkPartitioned( + String collection, Replica leader, int docId) throws Exception { for (int i = 0; i < 3; i++) { proxies.get(cluster.getJettySolrRunner(i)).close(); } @@ -219,38 +237,55 @@ private void addDocWhenOtherReplicasAreNetworkPartitioned(String collection, Rep for (int i = 1; i < 3; i++) { proxies.get(cluster.getJettySolrRunner(i)).reopen(); } - waitForState("Timeout waiting for leader goes DOWN", collection, (liveNodes, collectionState) - -> collectionState.getReplica(leader.getName()).getState() == Replica.State.DOWN); + waitForState( + "Timeout waiting for leader goes DOWN", + collection, + (liveNodes, collectionState) -> + collectionState.getReplica(leader.getName()).getState() == Replica.State.DOWN); // the meat of the test -- wait to see if a different replica become a leader // the correct behavior is that this should time out, if it succeeds we have a problem... - expectThrows(TimeoutException.class, - "Did not time out waiting for new leader, out of sync replica became leader", - () -> { - cluster.getSolrClient().waitForState(collection, 10, TimeUnit.SECONDS, (state) -> { - Replica newLeader = state.getSlice("shard1").getLeader(); - if (newLeader != null && !newLeader.getName().equals(leader.getName()) && newLeader.getState() == Replica.State.ACTIVE) { - // this is is the bad case, our "bad" state was found before timeout - log.error("WTF: New Leader={}", newLeader); - return true; - } - return false; // still no bad state, wait for timeout - }); - }); + expectThrows( + TimeoutException.class, + "Did not time out waiting for new leader, out of sync replica became leader", + () -> { + cluster + .getSolrClient() + .waitForState( + collection, + 10, + TimeUnit.SECONDS, + (state) -> { + Replica newLeader = state.getSlice("shard1").getLeader(); + if (newLeader != null + && !newLeader.getName().equals(leader.getName()) + && newLeader.getState() == Replica.State.ACTIVE) { + // this is is the bad case, our "bad" state was found before timeout + log.error("WTF: New Leader={}", newLeader); + return true; + } + return false; // still no bad state, wait for timeout + }); + }); proxies.get(cluster.getJettySolrRunner(0)).reopen(); cluster.getJettySolrRunner(0).start(); - cluster.waitForAllNodes(30);; - waitForState("Timeout waiting for leader", collection, (liveNodes, collectionState) -> { - Replica newLeader = collectionState.getLeader("shard1"); - return newLeader != null && newLeader.getName().equals(leader.getName()); - }); + cluster.waitForAllNodes(30); + ; + waitForState( + "Timeout waiting for leader", + collection, + (liveNodes, collectionState) -> { + Replica newLeader = collectionState.getLeader("shard1"); + return newLeader != null && newLeader.getName().equals(leader.getName()); + }); waitForState("Timeout waiting for active collection", collection, clusterShape(1, 3)); - + cluster.waitForActiveCollection(collection, 1, 3); } - private void addDocs(String collection, int numDocs, int startId) throws SolrServerException, IOException { + private void addDocs(String collection, int numDocs, int startId) + throws SolrServerException, IOException { List docs = new ArrayList<>(numDocs); for (int i = 0; i < numDocs; i++) { int id = startId + i; @@ -260,20 +295,27 @@ private void addDocs(String collection, int numDocs, int startId) throws SolrSer cluster.getSolrClient().commit(collection); } - private void addDoc(String collection, int docId, JettySolrRunner solrRunner) throws IOException, SolrServerException { - try (HttpSolrClient solrClient = new HttpSolrClient.Builder(solrRunner.getBaseUrl().toString()).build()) { - solrClient.add(collection, new SolrInputDocument("id", String.valueOf(docId), "fieldName_s", String.valueOf(docId))); + private void addDoc(String collection, int docId, JettySolrRunner solrRunner) + throws IOException, SolrServerException { + try (HttpSolrClient solrClient = + new HttpSolrClient.Builder(solrRunner.getBaseUrl().toString()).build()) { + solrClient.add( + collection, + new SolrInputDocument("id", String.valueOf(docId), "fieldName_s", String.valueOf(docId))); solrClient.commit(collection); } } - private void assertDocsExistInAllReplicas(List notLeaders, - String testCollectionName, int firstDocId, int lastDocId) throws Exception { + private void assertDocsExistInAllReplicas( + List notLeaders, String testCollectionName, int firstDocId, int lastDocId) + throws Exception { Replica leader = - cluster.getSolrClient().getZkStateReader().getLeaderRetry(testCollectionName, "shard1", 10000); + cluster + .getSolrClient() + .getZkStateReader() + .getLeaderRetry(testCollectionName, "shard1", 10000); HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName); - List replicas = - new ArrayList(notLeaders.size()); + List replicas = new ArrayList(notLeaders.size()); for (Replica r : notLeaders) { replicas.add(getHttpSolrClient(r, testCollectionName)); @@ -299,11 +341,20 @@ private void assertDocsExistInAllReplicas(List notLeaders, private void assertDocExists(HttpSolrClient solr, String coll, String docId) throws Exception { NamedList rsp = realTimeGetDocId(solr, docId); String match = JSONTestUtil.matchObj("/id", rsp.get("doc"), docId); - assertTrue("Doc with id=" + docId + " not found in " + solr.getBaseURL() - + " due to: " + match + "; rsp="+rsp, match == null); + assertTrue( + "Doc with id=" + + docId + + " not found in " + + solr.getBaseURL() + + " due to: " + + match + + "; rsp=" + + rsp, + match == null); } - private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) throws SolrServerException, IOException { + private NamedList realTimeGetDocId(HttpSolrClient solr, String docId) + throws SolrServerException, IOException { QueryRequest qr = new QueryRequest(params("qt", "/get", "id", docId, "distrib", "false")); return solr.request(qr); } @@ -313,5 +364,4 @@ protected HttpSolrClient getHttpSolrClient(Replica replica, String coll) throws String url = zkProps.getBaseUrl() + "/" + coll; return getHttpSolrClient(url); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java index 7db38db4737..3ad3c022d72 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java @@ -22,7 +22,6 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -50,36 +49,36 @@ public class TestCloudDeleteByQuery extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final int NUM_SHARDS = 2; - private static final int REPLICATION_FACTOR = 2; - private static final int NUM_SERVERS = 5; - + private static final int NUM_SHARDS = 2; + private static final int REPLICATION_FACTOR = 2; + private static final int NUM_SERVERS = 5; + private static final String COLLECTION_NAME = "test_col"; - + /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; /** A client for talking directly to the leader of shard1 */ private static HttpSolrClient S_ONE_LEADER_CLIENT; - + /** A client for talking directly to the leader of shard2 */ private static HttpSolrClient S_TWO_LEADER_CLIENT; /** A client for talking directly to a passive replica of shard1 */ private static HttpSolrClient S_ONE_NON_LEADER_CLIENT; - + /** A client for talking directly to a passive replica of shard2 */ private static HttpSolrClient S_TWO_NON_LEADER_CLIENT; /** A client for talking directly to a node that has no piece of the collection */ private static HttpSolrClient NO_COLLECTION_CLIENT; - + /** id field doc routing prefix for shard1 */ private static final String S_ONE_PRE = "abc!"; - + /** id field doc routing prefix for shard2 */ private static final String S_TWO_PRE = "XYZ!"; - + @AfterClass private static void afterClass() throws Exception { if (null != CLOUD_CLIENT) { @@ -107,29 +106,28 @@ private static void afterClass() throws Exception { NO_COLLECTION_CLIENT = null; } } - + @BeforeClass private static void createMiniSolrCloudCluster() throws Exception { - + final String configName = "solrCloudCollectionConfig"; final Path configDir = TEST_COLL1_CONF(); - - configureCluster(NUM_SERVERS) - .addConfig(configName, configDir) - .configure(); - + + configureCluster(NUM_SERVERS).addConfig(configName, configDir).configure(); + Map collectionProperties = new HashMap<>(); collectionProperties.put("config", "solrconfig-tlog.xml"); collectionProperties.put("schema", "schema15.xml"); // string id for doc routing prefix - CollectionAdminRequest.createCollection(COLLECTION_NAME, configName, NUM_SHARDS, REPLICATION_FACTOR) + CollectionAdminRequest.createCollection( + COLLECTION_NAME, configName, NUM_SHARDS, REPLICATION_FACTOR) .setProperties(collectionProperties) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION_NAME, NUM_SHARDS, REPLICATION_FACTOR * NUM_SHARDS); CLOUD_CLIENT = cluster.getSolrClient(); CLOUD_CLIENT.setDefaultCollection(COLLECTION_NAME); - + ZkStateReader zkStateReader = CLOUD_CLIENT.getZkStateReader(); // really hackish way to get a URL for specific nodes based on shard/replica hosting @@ -137,7 +135,8 @@ private static void createMiniSolrCloudCluster() throws Exception { HashMap urlMap = new HashMap<>(); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { URL jettyURL = jetty.getBaseUrl(); - String nodeKey = jettyURL.getHost() + ":" + jettyURL.getPort() + jettyURL.getPath().replace("/","_"); + String nodeKey = + jettyURL.getHost() + ":" + jettyURL.getPort() + jettyURL.getPath().replace("/", "_"); urlMap.put(nodeKey, jettyURL.toString()); } ClusterState clusterState = zkStateReader.getClusterState(); @@ -147,18 +146,19 @@ private static void createMiniSolrCloudCluster() throws Exception { assertNotNull("slice has null leader: " + slice.toString(), leader); assertNotNull("slice leader has null node name: " + slice.toString(), leader.getNodeName()); String leaderUrl = urlMap.remove(leader.getNodeName()); - assertNotNull("could not find URL for " + shardName + " leader: " + leader.getNodeName(), - leaderUrl); - assertEquals("expected two total replicas for: " + slice.getName(), - 2, slice.getReplicas().size()); - + assertNotNull( + "could not find URL for " + shardName + " leader: " + leader.getNodeName(), leaderUrl); + assertEquals( + "expected two total replicas for: " + slice.getName(), 2, slice.getReplicas().size()); + String passiveUrl = null; - + for (Replica replica : slice.getReplicas()) { - if ( ! replica.equals(leader)) { + if (!replica.equals(leader)) { passiveUrl = urlMap.remove(replica.getNodeName()); - assertNotNull("could not find URL for " + shardName + " replica: " + replica.getNodeName(), - passiveUrl); + assertNotNull( + "could not find URL for " + shardName + " replica: " + replica.getNodeName(), + passiveUrl); } } assertNotNull("could not find URL for " + shardName + " replica", passiveUrl); @@ -174,38 +174,55 @@ private static void createMiniSolrCloudCluster() throws Exception { } } assertEquals("Should be exactly one server left (nost hosting either shard)", 1, urlMap.size()); - NO_COLLECTION_CLIENT = getHttpSolrClient(urlMap.values().iterator().next() + - "/" + COLLECTION_NAME + "/"); - + NO_COLLECTION_CLIENT = + getHttpSolrClient(urlMap.values().iterator().next() + "/" + COLLECTION_NAME + "/"); + assertNotNull(S_ONE_LEADER_CLIENT); assertNotNull(S_TWO_LEADER_CLIENT); assertNotNull(S_ONE_NON_LEADER_CLIENT); assertNotNull(S_TWO_NON_LEADER_CLIENT); assertNotNull(NO_COLLECTION_CLIENT); - // sanity check that our S_ONE_PRE & S_TWO_PRE really do map to shard1 & shard2 with default routing - assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_ONE_PRE + random().nextInt()), - f("expected_shard_s", "shard1"))).getStatus()); - assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_TWO_PRE + random().nextInt()), - f("expected_shard_s", "shard2"))).getStatus()); + // sanity check that our S_ONE_PRE & S_TWO_PRE really do map to shard1 & shard2 with default + // routing + assertEquals( + 0, + CLOUD_CLIENT + .add(doc(f("id", S_ONE_PRE + random().nextInt()), f("expected_shard_s", "shard1"))) + .getStatus()); + assertEquals( + 0, + CLOUD_CLIENT + .add(doc(f("id", S_TWO_PRE + random().nextInt()), f("expected_shard_s", "shard2"))) + .getStatus()); assertEquals(0, CLOUD_CLIENT.commit().getStatus()); - SolrDocumentList docs = CLOUD_CLIENT.query(params("q", "*:*", - "fl","id,expected_shard_s,[shard]")).getResults(); + SolrDocumentList docs = + CLOUD_CLIENT + .query( + params( + "q", "*:*", + "fl", "id,expected_shard_s,[shard]")) + .getResults(); assertEquals(2, docs.getNumFound()); assertEquals(2, docs.size()); for (SolrDocument doc : docs) { String expected = COLLECTION_NAME + "_" + doc.getFirstValue("expected_shard_s") + "_replica"; String docShard = doc.getFirstValue("[shard]").toString(); - assertTrue("shard routing prefixes don't seem to be aligned anymore, " + - "did someone change the default routing rules? " + - "and/or the the default core name rules? " + - "and/or the numShards used by this test? ... " + - "couldn't find " + expected + " as substring of [shard] == '" + docShard + - "' ... for docId == " + doc.getFirstValue("id"), - docShard.contains(expected)); + assertTrue( + "shard routing prefixes don't seem to be aligned anymore, " + + "did someone change the default routing rules? " + + "and/or the the default core name rules? " + + "and/or the numShards used by this test? ... " + + "couldn't find " + + expected + + " as substring of [shard] == '" + + docShard + + "' ... for docId == " + + doc.getFirstValue("id"), + docShard.contains(expected)); } } - + @Before private void clearCloudCollection() throws Exception { assertEquals(0, CLOUD_CLIENT.deleteByQuery("*:*").getStatus()); @@ -214,9 +231,11 @@ private void clearCloudCollection() throws Exception { public void testMalformedDBQ(SolrClient client) throws Exception { assertNotNull("client not initialized", client); - SolrException e = expectThrows(SolrException.class, - "Expected DBQ failure", - () -> update(params()).deleteByQuery("foo_i:not_a_num").process(client)); + SolrException e = + expectThrows( + SolrException.class, + "Expected DBQ failure", + () -> update(params()).deleteByQuery("foo_i:not_a_num").process(client)); assertEquals("not the expected DBQ failure: " + e.getMessage(), 400, e.code()); } @@ -224,18 +243,23 @@ public void testMalformedDBQ(SolrClient client) throws Exception { public void testMalformedDBQViaCloudClient() throws Exception { testMalformedDBQ(CLOUD_CLIENT); } + public void testMalformedDBQViaShard1LeaderClient() throws Exception { testMalformedDBQ(S_ONE_LEADER_CLIENT); } + public void testMalformedDBQViaShard2LeaderClient() throws Exception { testMalformedDBQ(S_TWO_LEADER_CLIENT); } + public void testMalformedDBQViaShard1NonLeaderClient() throws Exception { testMalformedDBQ(S_ONE_NON_LEADER_CLIENT); } + public void testMalformedDBQViaShard2NonLeaderClient() throws Exception { testMalformedDBQ(S_TWO_NON_LEADER_CLIENT); } + public void testMalformedDBQViaNoCollectionClient() throws Exception { testMalformedDBQ(NO_COLLECTION_CLIENT); } @@ -246,7 +270,7 @@ public static UpdateRequest update(SolrParams params, SolrInputDocument... docs) r.add(Arrays.asList(docs)); return r; } - + public static SolrInputDocument doc(SolrInputField... fields) { SolrInputDocument doc = new SolrInputDocument(); for (SolrInputField f : fields) { @@ -254,7 +278,7 @@ public static SolrInputDocument doc(SolrInputField... fields) { } return doc; } - + public static SolrInputField f(String fieldName, Object... values) { SolrInputField f = new SolrInputField(fieldName); f.setValue(values); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudInspectUtil.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudInspectUtil.java index 6031e03937a..125a10f3c27 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudInspectUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudInspectUtil.java @@ -18,7 +18,6 @@ import java.util.HashSet; import java.util.Set; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; @@ -27,91 +26,89 @@ import org.junit.Test; public class TestCloudInspectUtil extends SolrTestCaseJ4 { - + @Override @Before public void setUp() throws Exception { super.setUp(); - - } - + @Override @After public void tearDown() throws Exception { - + super.tearDown(); } - + @Test public void testCheckIfDiffIsLegal() throws Exception { Set addFails = null; Set deleteFails = null; SolrDocumentList a = getDocList("2", "3"); SolrDocumentList b = getDocList("1"); - boolean legal = CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, - deleteFails); - + boolean legal = + CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, deleteFails); + assertFalse(legal); - + // ################################ - + addFails = new HashSet(); deleteFails = new HashSet(); - + a = getDocList("2", "3", "4"); b = getDocList("2", "3"); addFails.add("4"); - - legal = CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, - deleteFails); - + + legal = CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, deleteFails); + assertTrue(legal); - + // ################################ - + addFails = new HashSet(); deleteFails = new HashSet(); - + a = getDocList("2", "3", "4"); b = getDocList("2", "3", "5"); addFails.add("4"); deleteFails.add("5"); - - legal = CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, - deleteFails); - + + legal = CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, deleteFails); + assertTrue(legal); - + // ################################ - + addFails = new HashSet(); deleteFails = new HashSet(); - + a = getDocList("2", "3", "4"); b = getDocList("2", "3", "5"); addFails.add("4"); deleteFails.add("6"); - - legal = CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, - deleteFails); - + + legal = CloudInspectUtil.checkIfDiffIsLegal(a, b, "control", "cloud", addFails, deleteFails); + assertFalse(legal); - + // ################################ - + final HashSet addFailsExpectEx = new HashSet(); final HashSet deleteFailsExpectEx = new HashSet(); - + final SolrDocumentList aExpectEx = getDocList("2", "3", "4"); final SolrDocumentList bExpectEx = getDocList("2", "3", "4"); - expectThrows(IllegalArgumentException.class, "Expected exception because lists have no diff", - () -> CloudInspectUtil.checkIfDiffIsLegal(aExpectEx, bExpectEx, - "control", "cloud", addFailsExpectEx, deleteFailsExpectEx)); + expectThrows( + IllegalArgumentException.class, + "Expected exception because lists have no diff", + () -> + CloudInspectUtil.checkIfDiffIsLegal( + aExpectEx, bExpectEx, "control", "cloud", addFailsExpectEx, deleteFailsExpectEx)); } - private SolrDocumentList getDocList(String ... ids) { + private SolrDocumentList getDocList(String... ids) { SolrDocumentList list = new SolrDocumentList(); for (String id : ids) { SolrDocument doc = new SolrDocument(); @@ -120,5 +117,4 @@ private SolrDocumentList getDocList(String ... ids) { } return list; } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java index 7170da849fe..bef999bd980 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPhrasesIdentificationComponent.java @@ -18,13 +18,12 @@ import java.lang.invoke.MethodHandles; import java.nio.file.Path; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Random; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; @@ -36,13 +35,12 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; - import org.junit.AfterClass; import org.junit.BeforeClass; -/** - * A very simple sanity check that Phrase Identification works across a cloud cluster - * using distributed term stat collection. +/** + * A very simple sanity check that Phrase Identification works across a cloud cluster using + * distributed term stat collection. * * @see org.apache.solr.handler.component.PhrasesIdentificationComponentTest */ @@ -59,18 +57,18 @@ public class TestCloudPhrasesIdentificationComponent extends SolrCloudTestCase { @BeforeClass private static void createMiniSolrCloudCluster() throws Exception { - + // multi replicas should not matter... final int repFactor = usually() ? 1 : 2; // ... but we definitely want to test multiple shards - final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 :3)); + final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 : 3)); final int numNodes = (numShards * repFactor); - + final String configName = DEBUG_LABEL + "_config-set"; final Path configDir = TEST_COLL1_CONF(); configureCluster(numNodes).addConfig(configName, configDir).configure(); - + Map collectionProperties = new LinkedHashMap<>(); collectionProperties.put("config", "solrconfig-phrases-identification.xml"); collectionProperties.put("schema", "schema-phrases-identification.xml"); @@ -88,21 +86,26 @@ private static void createMiniSolrCloudCluster() throws Exception { } // index some docs... - CLOUD_CLIENT.add - (sdoc("id", "42", - "title","Tale of the Brown Fox: was he lazy?", + CLOUD_CLIENT.add( + sdoc( + "id", "42", + "title", "Tale of the Brown Fox: was he lazy?", "body", "No. The quick brown fox was a very brown fox who liked to get into trouble.")); - CLOUD_CLIENT.add - (sdoc("id", "43", - "title","A fable in two acts", + CLOUD_CLIENT.add( + sdoc( + "id", "43", + "title", "A fable in two acts", "body", "The brOwn fOx jumped. The lazy dog did not")); - CLOUD_CLIENT.add - (sdoc("id", "44", - "title","Why the LazY dog was lazy", - "body", "News flash: Lazy Dog was not actually lazy, it just seemd so compared to Fox")); - CLOUD_CLIENT.add - (sdoc("id", "45", - "title","Why Are We Lazy?", + CLOUD_CLIENT.add( + sdoc( + "id", "44", + "title", "Why the LazY dog was lazy", + "body", + "News flash: Lazy Dog was not actually lazy, it just seemd so compared to Fox")); + CLOUD_CLIENT.add( + sdoc( + "id", "45", + "title", "Why Are We Lazy?", "body", "Because we are. that's why")); CLOUD_CLIENT.commit(); } @@ -122,14 +125,16 @@ private static void afterClass() throws Exception { public void testBasicPhrases() throws Exception { final String input = " did a Quick brown FOX perniciously jump over the lazy dog"; final String expected = " did a Quick {brown FOX} perniciously jump over {the lazy dog}"; - + // based on the documents indexed, these assertions should all pass regardless of // how many shards we have, or wether the request is done via /phrases or /select... for (String path : Arrays.asList("/select", "/phrases")) { // ... or if we muck with "q" and use the alternative phrases.q for the bits we care about... - for (SolrParams p : Arrays.asList(params("q", input, "phrases", "true"), - params("q", "*:*", "phrases.q", input, "phrases", "true"), - params("q", "-*:*", "phrases.q", input, "phrases", "true"))) { + for (SolrParams p : + Arrays.asList( + params("q", input, "phrases", "true"), + params("q", "*:*", "phrases.q", input, "phrases", "true"), + params("q", "-*:*", "phrases.q", input, "phrases", "true"))) { final QueryRequest req = new QueryRequest(p); req.setPath(path); final QueryResponse rsp = req.process(getRandClient(random())); @@ -138,20 +143,22 @@ public void testBasicPhrases() throws Exception { NamedList phrases = (NamedList) rsp.getResponse().get("phrases"); assertEquals("input", input, phrases.get("input")); assertEquals("summary", expected, phrases.get("summary")); - + @SuppressWarnings({"unchecked"}) final List> details = (List>) phrases.get("details"); assertNotNull("null details", details); assertEquals("num phrases found", 2, details.size()); - + final NamedList lazy_dog = details.get(0); assertEquals("dog text", "the lazy dog", lazy_dog.get("text")); - assertEquals("dog score", 0.166666D, ((Double)lazy_dog.get("score")).doubleValue(), 0.000001D); - + assertEquals( + "dog score", 0.166666D, ((Double) lazy_dog.get("score")).doubleValue(), 0.000001D); + final NamedList brown_fox = details.get(1); assertEquals("fox text", "brown FOX", brown_fox.get("text")); - assertEquals("fox score", 0.083333D, ((Double)brown_fox.get("score")).doubleValue(), 0.000001D); - + assertEquals( + "fox score", 0.083333D, ((Double) brown_fox.get("score")).doubleValue(), 0.000001D); + } catch (AssertionError e) { throw new AssertionError(e.getMessage() + " ::: " + path + " ==> " + rsp, e); } @@ -162,8 +169,10 @@ public void testBasicPhrases() throws Exception { public void testEmptyInput() throws Exception { // empty input shouldn't error, just produce empty results... for (String input : Arrays.asList("", " ")) { - for (SolrParams p : Arrays.asList(params("q", "*:*", "phrases.q", input, "phrases", "true"), - params("q", "-*:*", "phrases.q", input, "phrases", "true"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "phrases.q", input, "phrases", "true"), + params("q", "-*:*", "phrases.q", input, "phrases", "true"))) { final QueryRequest req = new QueryRequest(p); req.setPath("/phrases"); final QueryResponse rsp = req.process(getRandClient(random())); @@ -172,12 +181,12 @@ public void testEmptyInput() throws Exception { NamedList phrases = (NamedList) rsp.getResponse().get("phrases"); assertEquals("input", input, phrases.get("input")); assertEquals("summary", input, phrases.get("summary")); - + @SuppressWarnings({"unchecked"}) final List> details = (List>) phrases.get("details"); assertNotNull("null details", details); assertEquals("num phrases found", 0, details.size()); - + } catch (AssertionError e) { throw new AssertionError(e.getMessage() + " ==> " + rsp, e); } @@ -185,9 +194,9 @@ public void testEmptyInput() throws Exception { } } - /** - * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed - * at a node in our cluster + /** + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed at a node + * in our cluster */ public static SolrClient getRandClient(Random rand) { int numClients = CLIENTS.size(); @@ -198,9 +207,7 @@ public static SolrClient getRandClient(Random rand) { public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java index 64f3466941b..2437f11ed9c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java @@ -16,6 +16,16 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.common.params.FacetParams.FACET; +import static org.apache.solr.common.params.FacetParams.FACET_LIMIT; +import static org.apache.solr.common.params.FacetParams.FACET_MISSING; +import static org.apache.solr.common.params.FacetParams.FACET_OFFSET; +import static org.apache.solr.common.params.FacetParams.FACET_OVERREQUEST_COUNT; +import static org.apache.solr.common.params.FacetParams.FACET_OVERREQUEST_RATIO; +import static org.apache.solr.common.params.FacetParams.FACET_PIVOT; +import static org.apache.solr.common.params.FacetParams.FACET_PIVOT_MINCOUNT; +import static org.apache.solr.common.params.FacetParams.FACET_SORT; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.Arrays; @@ -26,7 +36,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrServerException; @@ -44,37 +53,18 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.params.FacetParams.FACET; -import static org.apache.solr.common.params.FacetParams.FACET_LIMIT; -import static org.apache.solr.common.params.FacetParams.FACET_MISSING; -import static org.apache.solr.common.params.FacetParams.FACET_OFFSET; -import static org.apache.solr.common.params.FacetParams.FACET_OVERREQUEST_COUNT; -import static org.apache.solr.common.params.FacetParams.FACET_OVERREQUEST_RATIO; -import static org.apache.solr.common.params.FacetParams.FACET_PIVOT; -import static org.apache.solr.common.params.FacetParams.FACET_PIVOT_MINCOUNT; -import static org.apache.solr.common.params.FacetParams.FACET_SORT; - /** - *

* Randomized testing of Pivot Faceting using SolrCloud. - *

- *

- * After indexing a bunch of random docs, picks some random fields to pivot facet on, - * and then confirms that the resulting counts match the results of filtering on those - * values. This gives us strong assertions on the correctness of the total counts for - * each pivot value, but no assertions that the correct "top" counts were chosen. - *

- *

- * NOTE: this test ignores the control collection and only deals with the - * CloudSolrServer - this is because the randomized field values make it very easy for - * the term stats to miss values even with the overrequest. - * (because so many values will tie for "1"). What we care about here is - * that the counts we get back are correct and match what we get when filtering on those - * constraints. - *

- * * + *

After indexing a bunch of random docs, picks some random fields to pivot facet on, and then + * confirms that the resulting counts match the results of filtering on those values. This gives us + * strong assertions on the correctness of the total counts for each pivot value, but no assertions + * that the correct "top" counts were chosen. * + *

NOTE: this test ignores the control collection and only deals with the CloudSolrServer - this + * is because the randomized field values make it very easy for the term stats to miss values even + * with the overrequest. (because so many values will tie for "1"). What we care about here is that + * the counts we get back are correct and match what we get when filtering on those constraints. */ @SuppressSSL // Too Slow public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase { @@ -85,7 +75,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase { // any stats that can be lossy -- the purpose of testing stats here is just to sanity check // that the basic hooks between pivot faceting and stats.field work, and these let us do that private static final String USE_STATS = "count=true missing=true min=true max=true"; - + // param used by test purely for tracing & validation private static String TRACE_MIN = "_test_min"; // param used by test purely for tracing & validation @@ -95,12 +85,14 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase { public TestCloudPivotFacet() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); } - - /** - * Controls the odds of any given doc having a value in any given field -- as this gets lower, - * the counts for "facet.missing" pivots should increase. + + /** + * Controls the odds of any given doc having a value in any given field -- as this gets lower, the + * counts for "facet.missing" pivots should increase. + * * @see #useField() */ private static int useFieldRandomizedFactor = -1; @@ -112,17 +104,16 @@ public static void initUseFieldRandomizedFactor() { } @Test - //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018 public void test() throws Exception { waitForThingsToLevelOut(30, TimeUnit.SECONDS); // TODO: why would we have to wait? - // + // handle.clear(); handle.put("QTime", SKIPVAL); handle.put("timestamp", SKIPVAL); - + final Set fieldNameSet = new HashSet<>(); - + // build up a randomized index final int numDocs = atLeast(500); log.info("numDocs: {}", numDocs); @@ -148,59 +139,62 @@ public void test() throws Exception { String q = "*:*"; if (random().nextBoolean()) { - q = "id:[* TO " + TestUtil.nextInt(random(),300,numDocs) + "]"; + q = "id:[* TO " + TestUtil.nextInt(random(), 300, numDocs) + "]"; } ModifiableSolrParams baseP = params("rows", "0", "q", q); - + if (random().nextBoolean()) { - baseP.add("fq", "id:[* TO " + TestUtil.nextInt(random(),200,numDocs) + "]"); + baseP.add("fq", "id:[* TO " + TestUtil.nextInt(random(), 200, numDocs) + "]"); } final boolean stats = random().nextBoolean(); if (stats) { baseP.add(StatsParams.STATS, "true"); - + // if we are doing stats, then always generated the same # of STATS_FIELD // params, using multiple tags from a fixed set, but with diff fieldName values. // later, each pivot will randomly pick a tag. - baseP.add(StatsParams.STATS_FIELD, "{!key=sk1 tag=st1,st2 "+USE_STATS+"}" + - pickRandomStatsFields(fieldNames)); - baseP.add(StatsParams.STATS_FIELD, "{!key=sk2 tag=st2,st3 "+USE_STATS+"}" + - pickRandomStatsFields(fieldNames)); - baseP.add(StatsParams.STATS_FIELD, "{!key=sk3 tag=st3,st4 "+USE_STATS+"}" + - pickRandomStatsFields(fieldNames)); + baseP.add( + StatsParams.STATS_FIELD, + "{!key=sk1 tag=st1,st2 " + USE_STATS + "}" + pickRandomStatsFields(fieldNames)); + baseP.add( + StatsParams.STATS_FIELD, + "{!key=sk2 tag=st2,st3 " + USE_STATS + "}" + pickRandomStatsFields(fieldNames)); + baseP.add( + StatsParams.STATS_FIELD, + "{!key=sk3 tag=st3,st4 " + USE_STATS + "}" + pickRandomStatsFields(fieldNames)); // NOTE: there's a chance that some of those stats field names // will be the same, but if so, all the better to test that edge case } - - ModifiableSolrParams pivotP = params(FACET,"true"); + + ModifiableSolrParams pivotP = params(FACET, "true"); // put our FACET_PIVOT params in a set in case we just happen to pick the same one twice LinkedHashSet pivotParamValues = new LinkedHashSet(); pivotParamValues.add(buildPivotParamValue(buildRandomPivot(fieldNames))); - + if (random().nextBoolean()) { pivotParamValues.add(buildPivotParamValue(buildRandomPivot(fieldNames))); } pivotP.set(FACET_PIVOT, pivotParamValues.toArray(new String[pivotParamValues.size()])); // keep limit low - lots of unique values, and lots of depth in pivots - pivotP.add(FACET_LIMIT, ""+TestUtil.nextInt(random(),1,17)); + pivotP.add(FACET_LIMIT, "" + TestUtil.nextInt(random(), 1, 17)); // sometimes use an offset if (random().nextBoolean()) { - pivotP.add(FACET_OFFSET, ""+TestUtil.nextInt(random(),0,7)); + pivotP.add(FACET_OFFSET, "" + TestUtil.nextInt(random(), 0, 7)); } if (random().nextBoolean()) { - String min = ""+TestUtil.nextInt(random(),0,numDocs+10); + String min = "" + TestUtil.nextInt(random(), 0, numDocs + 10); pivotP.add(FACET_PIVOT_MINCOUNT, min); // trace param for validation baseP.add(TRACE_MIN, min); } - + if (random().nextBoolean()) { - String missing = ""+random().nextBoolean(); + String missing = "" + random().nextBoolean(); pivotP.add(FACET_MISSING, missing); // trace param for validation baseP.add(TRACE_MISS, missing); @@ -215,17 +209,17 @@ public void test() throws Exception { // overrequest // - // NOTE: since this test focuses on accuracy of refinement, and doesn't do + // NOTE: since this test focuses on accuracy of refinement, and doesn't do // control collection comparisons, there isn't a lot of need for excessive // overrequesting -- we focus here on trying to exercise the various edge cases // involved as different values are used with overrequest - if (0 == TestUtil.nextInt(random(),0,4)) { + if (0 == TestUtil.nextInt(random(), 0, 4)) { // we want a decent chance of no overrequest at all pivotP.add(FACET_OVERREQUEST_COUNT, "0"); pivotP.add(FACET_OVERREQUEST_RATIO, "0"); } else { if (random().nextBoolean()) { - pivotP.add(FACET_OVERREQUEST_COUNT, ""+TestUtil.nextInt(random(),0,5)); + pivotP.add(FACET_OVERREQUEST_COUNT, "" + TestUtil.nextInt(random(), 0, 5)); } if (random().nextBoolean()) { // sometimes give a ratio less then 1, code should be smart enough to deal @@ -234,23 +228,22 @@ public void test() throws Exception { if (random().nextBoolean()) { ratio *= -1; } - pivotP.add(FACET_OVERREQUEST_RATIO, ""+ratio); + pivotP.add(FACET_OVERREQUEST_RATIO, "" + ratio); } } - + assertPivotCountsAreCorrect(baseP, pivotP); } } /** - * Given some query params, executes the request against the cloudClient and - * then walks the pivot facet values in the response, treating each one as a - * filter query to assert the pivot counts are correct. + * Given some query params, executes the request against the cloudClient and then walks the pivot + * facet values in the response, treating each one as a filter query to assert the pivot counts + * are correct. */ - private void assertPivotCountsAreCorrect(SolrParams baseParams, - SolrParams pivotParams) - throws SolrServerException { - + private void assertPivotCountsAreCorrect(SolrParams baseParams, SolrParams pivotParams) + throws SolrServerException { + SolrParams initParams = SolrParams.wrapAppended(pivotParams, baseParams); log.info("Doing full run: {}", initParams); @@ -261,17 +254,16 @@ private void assertPivotCountsAreCorrect(SolrParams baseParams, QueryResponse initResponse = cloudClient.query(initParams); pivots = initResponse.getFacetPivot(); assertNotNull(initParams + " has null pivots?", pivots); - assertEquals(initParams + " num pivots", - initParams.getParams("facet.pivot").length, pivots.size()); + assertEquals( + initParams + " num pivots", initParams.getParams("facet.pivot").length, pivots.size()); } catch (Exception e) { - throw new RuntimeException("init query failed: " + initParams + ": " + - e.getMessage(), e); + throw new RuntimeException("init query failed: " + initParams + ": " + e.getMessage(), e); } try { - for (Map.Entry> pivot : pivots) { + for (Map.Entry> pivot : pivots) { final String pivotKey = pivot.getKey(); // :HACK: for counting the max possible pivot depth - final int maxDepth = 1 + pivotKey.length() - pivotKey.replace(",","").length(); + final int maxDepth = 1 + pivotKey.length() - pivotKey.replace(",", "").length(); assertTraceOk(pivotKey, baseParams, pivot.getValue()); @@ -282,41 +274,35 @@ private void assertPivotCountsAreCorrect(SolrParams baseParams, // will catch it. for (PivotField constraint : pivot.getValue()) { int depth = assertPivotCountsAreCorrect(pivotKey, baseParams, constraint); - + // we can't assert that the depth reached is the same as the depth requested // because the fq and/or mincount may have pruned the tree too much - assertTrue("went too deep: "+depth+": " + pivotKey + " ==> " + pivot, - depth <= maxDepth); - + assertTrue( + "went too deep: " + depth + ": " + pivotKey + " ==> " + pivot, depth <= maxDepth); } } } catch (AssertionError e) { throw new AssertionError(initParams + " ==> " + e.getMessage(), e); } finally { - log.info("Ending full run (countNumFoundChecks={}): {}", - countNumFoundChecks, initParams); + log.info("Ending full run (countNumFoundChecks={}): {}", countNumFoundChecks, initParams); } } - + /** - * Recursive Helper method for asserting that pivot constraint counts match - * results when filtering on those constraints. Returns the recursive depth reached - * (for sanity checking) + * Recursive Helper method for asserting that pivot constraint counts match results when filtering + * on those constraints. Returns the recursive depth reached (for sanity checking) */ - private int assertPivotCountsAreCorrect(String pivotName, - SolrParams baseParams, - PivotField constraint) - throws SolrServerException { + private int assertPivotCountsAreCorrect( + String pivotName, SolrParams baseParams, PivotField constraint) throws SolrServerException { - SolrParams p = SolrParams.wrapAppended(baseParams, - params("fq", buildFilter(constraint))); + SolrParams p = SolrParams.wrapAppended(baseParams, params("fq", buildFilter(constraint))); List subPivots = null; try { - assertPivotData(pivotName, constraint, p); + assertPivotData(pivotName, constraint, p); subPivots = constraint.getPivot(); } catch (Exception e) { - throw new RuntimeException(pivotName + ": count query failed: " + p + ": " + - e.getMessage(), e); + throw new RuntimeException( + pivotName + ": count query failed: " + p + ": " + e.getMessage(), e); } int depth = 0; if (null != subPivots) { @@ -330,9 +316,9 @@ private int assertPivotCountsAreCorrect(String pivotName, } /** - * Executes a query and compares the results with the data available in the - * {@link PivotField} constraint -- this method is not recursive, and doesn't - * check anything about the sub-pivots (if any). + * Executes a query and compares the results with the data available in the {@link PivotField} + * constraint -- this method is not recursive, and doesn't check anything about the sub-pivots (if + * any). * * @param pivotName pivot name * @param constraint filters on pivot @@ -340,22 +326,20 @@ private int assertPivotCountsAreCorrect(String pivotName, */ private void assertPivotData(String pivotName, PivotField constraint, SolrParams params) throws SolrServerException, IOException { - - SolrParams p = SolrParams.wrapDefaults(params("rows","0"), params); + + SolrParams p = SolrParams.wrapDefaults(params("rows", "0"), params); QueryResponse res = cloudClient.query(p); String msg = pivotName + ": " + p; assertNumFound(msg, constraint.getCount(), res); - if ( p.getBool(StatsParams.STATS, false) ) { + if (p.getBool(StatsParams.STATS, false)) { // only check stats if stats expected assertPivotStats(msg, constraint, res); } } - /** - * Compare top level stats in response with stats from pivot constraint - */ + /** Compare top level stats in response with stats from pivot constraint */ private void assertPivotStats(String message, PivotField constraint, QueryResponse response) { if (null == constraint.getFieldStatsInfo()) { @@ -365,7 +349,7 @@ private void assertPivotStats(String message, PivotField constraint, QueryRespon log.info("No stats to check for => {}", message); return; } - + Map actualFieldStatsInfoMap = response.getFieldStatsInfo(); for (FieldStatsInfo pivotStats : constraint.getFieldStatsInfo().values()) { @@ -376,14 +360,13 @@ private void assertPivotStats(String message, PivotField constraint, QueryRespon if (actualStats == null) { // handle case for not found stats (using stats query) // - // these has to be a special case check due to the legacy behavior of "top level" - // StatsComponent results being "null" (and not even included in the - // getFieldStatsInfo() Map due to specila SolrJ logic) + // these has to be a special case check due to the legacy behavior of "top level" + // StatsComponent results being "null" (and not even included in the + // getFieldStatsInfo() Map due to specila SolrJ logic) log.info("Requested stats missing in verification query, pivot stats: {}", pivotStats); assertEquals("Special Count", 0L, pivotStats.getCount().longValue()); - assertEquals("Special Missing", - constraint.getCount(), pivotStats.getMissing().longValue()); + assertEquals("Special Missing", constraint.getCount(), pivotStats.getMissing().longValue()); } else { // regular stats, compare everything... @@ -391,7 +374,7 @@ private void assertPivotStats(String message, PivotField constraint, QueryRespon assert actualStats != null; try { String msg = " of " + statsKey; - + // no wiggle room, these should always be exactly equals, regardless of field type assertEquals("Count" + msg, pivotStats.getCount(), actualStats.getCount()); assertEquals("Missing" + msg, pivotStats.getMissing(), actualStats.getMissing()); @@ -399,7 +382,9 @@ private void assertPivotStats(String message, PivotField constraint, QueryRespon assertEquals("Max" + msg, pivotStats.getMax(), actualStats.getMax()); } catch (AssertionError e) { - throw new AssertionError("Stats: Pivot[" + pivotStats + "] <==> Actual[" + actualStats + "] => " + message, e); + throw new AssertionError( + "Stats: Pivot[" + pivotStats + "] <==> Actual[" + actualStats + "] => " + message, + e); } } } @@ -407,29 +392,29 @@ private void assertPivotStats(String message, PivotField constraint, QueryRespon if (constraint.getFieldStatsInfo().containsKey("sk2")) { // cheeseball hack // if "sk2" was one of hte stats we computed, then we must have also seen // sk1 or sk3 because of the way the tags are fixed - assertEquals("had stats sk2, but not another stat?", - 2, constraint.getFieldStatsInfo().size()); + assertEquals( + "had stats sk2, but not another stat?", 2, constraint.getFieldStatsInfo().size()); } else { // if we did not see "sk2", then 1 of the others must be alone - assertEquals("only expected 1 stat", - 1, constraint.getFieldStatsInfo().size()); - assertTrue("not sk1 or sk3", - constraint.getFieldStatsInfo().containsKey("sk1") || - constraint.getFieldStatsInfo().containsKey("sk3")); + assertEquals("only expected 1 stat", 1, constraint.getFieldStatsInfo().size()); + assertTrue( + "not sk1 or sk3", + constraint.getFieldStatsInfo().containsKey("sk1") + || constraint.getFieldStatsInfo().containsKey("sk3")); } - } /** - * Verify that the PivotFields we're lookin at doesn't violate any of the expected - * behaviors based on the TRACE_* params found in the base params + * Verify that the PivotFields we're lookin at doesn't violate any of the expected behaviors based + * on the TRACE_* params found in the base params */ - private void assertTraceOk(String pivotName, SolrParams baseParams, List constraints) { + private void assertTraceOk( + String pivotName, SolrParams baseParams, List constraints) { if (null == constraints || 0 == constraints.size()) { return; } final int maxIdx = constraints.size() - 1; - + final int min = baseParams.getInt(TRACE_MIN, -1); final boolean expectMissing = baseParams.getBool(TRACE_MISS, false); final boolean checkCount = "count".equals(baseParams.get(TRACE_SORT, "count")); @@ -441,33 +426,53 @@ private void assertTraceOk(String pivotName, SolrParams baseParams, List prevCount("+prevCount+"): " + constraint, - ((count <= prevCount) - || (expectMissing && i == maxIdx && null == constraint.getValue()))); + assertTrue( + pivotName + + ": val #" + + i + + " of" + + maxIdx + + ": count(" + + count + + ") > prevCount(" + + prevCount + + "): " + + constraint, + ((count <= prevCount) + || (expectMissing && i == maxIdx && null == constraint.getValue()))); prevCount = count; } } } /** - * Given a PivotField constraint, generate a query for the field+value - * for use in an fq to verify the constraint count + * Given a PivotField constraint, generate a query for the field+value for use in an fq + * to verify the constraint count */ private static String buildFilter(PivotField constraint) { Object value = constraint.getValue(); @@ -484,38 +489,32 @@ private static String buildFilter(PivotField constraint) { } } - - /** - * Creates a random facet.pivot param string using some of the specified fieldNames - */ + /** Creates a random facet.pivot param string using some of the specified fieldNames */ private static String buildRandomPivot(String[] fieldNames) { final int depth = TestUtil.nextInt(random(), 1, 3); - String [] fields = new String[depth]; + String[] fields = new String[depth]; for (int i = 0; i < depth; i++) { // yes this means we might use the same field twice // makes it a robust test (especially for multi-valued fields) - fields[i] = fieldNames[TestUtil.nextInt(random(),0,fieldNames.length-1)]; + fields[i] = fieldNames[TestUtil.nextInt(random(), 0, fieldNames.length - 1)]; } return String.join(",", fields); } - /** - * Picks a random field to use for Stats - */ + /** Picks a random field to use for Stats */ private static String pickRandomStatsFields(String[] fieldNames) { // we need to skip boolean fields when computing stats String fieldName; do { - fieldName = fieldNames[TestUtil.nextInt(random(),0,fieldNames.length-1)]; - } - while(fieldName.endsWith("_b") || fieldName.endsWith("_b1")) ; - + fieldName = fieldNames[TestUtil.nextInt(random(), 0, fieldNames.length - 1)]; + } while (fieldName.endsWith("_b") || fieldName.endsWith("_b1")); + return fieldName; } /** - * Generates a random {@link FacetParams#FACET_PIVOT} value w/ local params - * using the specified pivotValue. + * Generates a random {@link FacetParams#FACET_PIVOT} value w/ local params using the specified + * pivotValue. */ private static String buildPivotParamValue(String pivotValue) { // randomly decide which stat tag to use @@ -523,10 +522,10 @@ private static String buildPivotParamValue(String pivotValue) { // if this is 0, or stats aren't enabled, we'll be asking for a tag that doesn't exist // ...which should be fine (just like excluding a tagged fq that doesn't exist) final int statTag = TestUtil.nextInt(random(), -1, 4); - + if (0 <= statTag) { // only use 1 tag name in the 'stats' localparam - see SOLR-6663 - return "{!stats=st"+statTag+"}" + pivotValue; + return "{!stats=st" + statTag + "}" + pivotValue; } else { // statTag < 0 == sanity check the case of a pivot w/o any stats return pivotValue; @@ -534,11 +533,10 @@ private static String buildPivotParamValue(String pivotValue) { } /** - * Creates a document with randomized field values, some of which be missing values, - * some of which will be multi-valued (per the schema) and some of which will be - * skewed so that small subsets of the ranges will be more common (resulting in an - * increased likelihood of duplicate values) - * + * Creates a document with randomized field values, some of which be missing values, some of which + * will be multi-valued (per the schema) and some of which will be skewed so that small subsets of + * the ranges will be more common (resulting in an increased likelihood of duplicate values) + * * @see #buildRandomPivot */ private static SolrInputDocument buildRandomDocument(int id) { @@ -546,76 +544,74 @@ private static SolrInputDocument buildRandomDocument(int id) { // most fields are in most docs // if field is in a doc, then "skewed" chance val is from a dense range // (hopefully with lots of duplication) - for (String prefix : new String[] { "pivot_i", "pivot_ti" }) { + for (String prefix : new String[] {"pivot_i", "pivot_ti"}) { if (useField()) { - doc.addField(prefix+"1", skewed(TestUtil.nextInt(random(), 20, 50), - random().nextInt())); - + doc.addField(prefix + "1", skewed(TestUtil.nextInt(random(), 20, 50), random().nextInt())); } if (useField()) { int numMulti = atLeast(1); while (0 < numMulti--) { - doc.addField(prefix, skewed(TestUtil.nextInt(random(), 20, 50), - random().nextInt())); + doc.addField(prefix, skewed(TestUtil.nextInt(random(), 20, 50), random().nextInt())); } } } - for (String prefix : new String[] { "pivot_l", "pivot_tl" }) { + for (String prefix : new String[] {"pivot_l", "pivot_tl"}) { if (useField()) { - doc.addField(prefix+"1", skewed(TestUtil.nextInt(random(), 5000, 5100), - random().nextLong())); + doc.addField( + prefix + "1", skewed(TestUtil.nextInt(random(), 5000, 5100), random().nextLong())); } if (useField()) { int numMulti = atLeast(1); while (0 < numMulti--) { - doc.addField(prefix, skewed(TestUtil.nextInt(random(), 5000, 5100), - random().nextLong())); + doc.addField(prefix, skewed(TestUtil.nextInt(random(), 5000, 5100), random().nextLong())); } } } - for (String prefix : new String[] { "pivot_f", "pivot_tf" }) { + for (String prefix : new String[] {"pivot_f", "pivot_tf"}) { if (useField()) { - doc.addField(prefix+"1", skewed(1.0F / random().nextInt(13), - random().nextFloat() * random().nextInt())); + doc.addField( + prefix + "1", + skewed(1.0F / random().nextInt(13), random().nextFloat() * random().nextInt())); } if (useField()) { int numMulti = atLeast(1); while (0 < numMulti--) { - doc.addField(prefix, skewed(1.0F / random().nextInt(13), - random().nextFloat() * random().nextInt())); + doc.addField( + prefix, + skewed(1.0F / random().nextInt(13), random().nextFloat() * random().nextInt())); } } } - for (String prefix : new String[] { "pivot_d", "pivot_td" }) { + for (String prefix : new String[] {"pivot_d", "pivot_td"}) { if (useField()) { - doc.addField(prefix+"1", skewed(1.0D / random().nextInt(19), - random().nextDouble() * random().nextInt())); + doc.addField( + prefix + "1", + skewed(1.0D / random().nextInt(19), random().nextDouble() * random().nextInt())); } if (useField()) { int numMulti = atLeast(1); while (0 < numMulti--) { - doc.addField(prefix, skewed(1.0D / random().nextInt(19), - random().nextDouble() * random().nextInt())); + doc.addField( + prefix, + skewed(1.0D / random().nextInt(19), random().nextDouble() * random().nextInt())); } } } - for (String prefix : new String[] { "pivot_dt", "pivot_tdt" }) { + for (String prefix : new String[] {"pivot_dt", "pivot_tdt"}) { if (useField()) { - doc.addField(prefix+"1", skewed(randomSkewedDate(), randomDate())); - + doc.addField(prefix + "1", skewed(randomSkewedDate(), randomDate())); } if (useField()) { int numMulti = atLeast(1); while (0 < numMulti--) { doc.addField(prefix, skewed(randomSkewedDate(), randomDate())); - } } } { String prefix = "pivot_b"; if (useField()) { - doc.addField(prefix+"1", random().nextBoolean() ? "t" : "f"); + doc.addField(prefix + "1", random().nextBoolean() ? "t" : "f"); } if (useField()) { int numMulti = atLeast(1); @@ -624,16 +620,18 @@ private static SolrInputDocument buildRandomDocument(int id) { } } } - for (String prefix : new String[] { "pivot_x_s", "pivot_y_s", "pivot_z_s"}) { + for (String prefix : new String[] {"pivot_x_s", "pivot_y_s", "pivot_z_s"}) { if (useField()) { - doc.addField(prefix+"1", skewed(TestUtil.randomSimpleString(random(), 1, 1), - randomXmlUsableUnicodeString())); + doc.addField( + prefix + "1", + skewed(TestUtil.randomSimpleString(random(), 1, 1), randomXmlUsableUnicodeString())); } if (useField()) { int numMulti = atLeast(1); while (0 < numMulti--) { - doc.addField(prefix, skewed(TestUtil.randomSimpleString(random(), 1, 1), - randomXmlUsableUnicodeString())); + doc.addField( + prefix, + skewed(TestUtil.randomSimpleString(random(), 1, 1), randomXmlUsableUnicodeString())); } } } @@ -642,9 +640,9 @@ private static SolrInputDocument buildRandomDocument(int id) { // for the remaining fields, make every doc have a value in a dense range // - for (String prefix : new String[] { "dense_pivot_x_s", "dense_pivot_y_s" }) { + for (String prefix : new String[] {"dense_pivot_x_s", "dense_pivot_y_s"}) { if (useField()) { - doc.addField(prefix+"1", TestUtil.randomSimpleString(random(), 1, 1)); + doc.addField(prefix + "1", TestUtil.randomSimpleString(random(), 1, 1)); } if (useField()) { int numMulti = atLeast(1); @@ -653,9 +651,9 @@ private static SolrInputDocument buildRandomDocument(int id) { } } } - for (String prefix : new String[] { "dense_pivot_i", "dense_pivot_ti" }) { + for (String prefix : new String[] {"dense_pivot_i", "dense_pivot_ti"}) { if (useField()) { - doc.addField(prefix+"1", TestUtil.nextInt(random(), 20, 50)); + doc.addField(prefix + "1", TestUtil.nextInt(random(), 20, 50)); } if (useField()) { int numMulti = atLeast(1); @@ -668,9 +666,9 @@ private static SolrInputDocument buildRandomDocument(int id) { return doc; } - /** - * Similar to usually() but we want it to happen just as often regardless - * of test multiplier and nightly status + /** + * Similar to usually() but we want it to happen just as often regardless of test multiplier and + * nightly status * * @see #useFieldRandomizedFactor */ @@ -678,10 +676,8 @@ private static boolean useField() { assert 0 < useFieldRandomizedFactor; return 0 != TestUtil.nextInt(random(), 0, useFieldRandomizedFactor); } - - /** - * Asserts the number of docs found in the response - */ + + /** Asserts the number of docs found in the response */ private void assertNumFound(String msg, int expected, QueryResponse response) { countNumFoundChecks++; @@ -694,5 +690,4 @@ private void assertNumFound(String msg, int expected, QueryResponse response) { * @see #assertPivotCountsAreCorrect(SolrParams,SolrParams) */ private int countNumFoundChecks = 0; - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java index 23d2f93cdc8..ae2d28127c9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPseudoReturnFields.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Map; import java.util.Random; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -45,15 +44,15 @@ import org.junit.Before; import org.junit.BeforeClass; -/** - * @see TestPseudoReturnFields +/** + * @see TestPseudoReturnFields * @see TestRandomFlRTGCloud */ public class TestCloudPseudoReturnFields extends SolrCloudTestCase { - + private static final String DEBUG_LABEL = MethodHandles.lookup().lookupClass().getName(); private static final String COLLECTION_NAME = DEBUG_LABEL + "_collection"; - + /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; /** One client per node */ @@ -62,17 +61,17 @@ public class TestCloudPseudoReturnFields extends SolrCloudTestCase { @BeforeClass private static void createMiniSolrCloudCluster() throws Exception { // multi replicas should matter... - final int repFactor = usually() ? 1 : 2;; + final int repFactor = usually() ? 1 : 2; // ... but we definitely want to ensure forwarded requests to other shards work ... final int numShards = 2; // ... including some forwarded requests from nodes not hosting a shard final int numNodes = 1 + (numShards * repFactor); - + final String configName = DEBUG_LABEL + "_config-set"; final Path configDir = TEST_COLL1_CONF(); configureCluster(numNodes).addConfig(configName, configDir).configure(); - + Map collectionProperties = new HashMap<>(); collectionProperties.put("config", "solrconfig-tlog.xml"); collectionProperties.put("schema", "schema-pseudo-fields.xml"); @@ -89,24 +88,47 @@ private static void createMiniSolrCloudCluster() throws Exception { CLIENTS.add(getHttpSolrClient(jetty.getBaseUrl() + "/" + COLLECTION_NAME + "/")); } - assertEquals(0, CLOUD_CLIENT.add(sdoc("id", "42", "val_i", "1", "ssto", "X", "subject", "aaa")).getStatus()); - assertEquals(0, CLOUD_CLIENT.add(sdoc("id", "43", "val_i", "9", "ssto", "X", "subject", "bbb")).getStatus()); - assertEquals(0, CLOUD_CLIENT.add(sdoc("id", "44", "val_i", "4", "ssto", "X", "subject", "aaa")).getStatus()); - assertEquals(0, CLOUD_CLIENT.add(sdoc("id", "45", "val_i", "6", "ssto", "X", "subject", "aaa")).getStatus()); - assertEquals(0, CLOUD_CLIENT.add(sdoc("id", "46", "val_i", "3", "ssto", "X", "subject", "ggg")).getStatus()); - assertEquals(0, CLOUD_CLIENT.commit().getStatus());; - + assertEquals( + 0, + CLOUD_CLIENT + .add(sdoc("id", "42", "val_i", "1", "ssto", "X", "subject", "aaa")) + .getStatus()); + assertEquals( + 0, + CLOUD_CLIENT + .add(sdoc("id", "43", "val_i", "9", "ssto", "X", "subject", "bbb")) + .getStatus()); + assertEquals( + 0, + CLOUD_CLIENT + .add(sdoc("id", "44", "val_i", "4", "ssto", "X", "subject", "aaa")) + .getStatus()); + assertEquals( + 0, + CLOUD_CLIENT + .add(sdoc("id", "45", "val_i", "6", "ssto", "X", "subject", "aaa")) + .getStatus()); + assertEquals( + 0, + CLOUD_CLIENT + .add(sdoc("id", "46", "val_i", "3", "ssto", "X", "subject", "ggg")) + .getStatus()); + assertEquals(0, CLOUD_CLIENT.commit().getStatus()); + ; } - + @Before private void addUncommittedDoc99() throws Exception { // uncommitted doc in transaction log at start of every test // Even if an RTG causes ulog to re-open realtime searcher, next test method // will get another copy of doc 99 in the ulog - assertEquals(0, CLOUD_CLIENT.add(sdoc("id", "99", "val_i", "1", "ssto", "X", - "subject", "uncommitted")).getStatus()); + assertEquals( + 0, + CLOUD_CLIENT + .add(sdoc("id", "99", "val_i", "1", "ssto", "X", "subject", "uncommitted")) + .getStatus()); } - + @AfterClass private static void afterClass() throws Exception { if (null != CLOUD_CLIENT) { @@ -128,106 +150,134 @@ public void testMultiValued() throws Exception { // score as pseudo field - precondition checks for (String name : new String[] {"score", "val_ss"}) { try { - FieldResponse frsp = new Field(name, params("includeDynamic","true", - "showDefaults","true")).process(CLOUD_CLIENT); - assertNotNull("Test depends on a (dynamic) field matching '"+name+"', Null response", frsp); - assertEquals("Test depends on a (dynamic) field matching '"+name+"', bad status: " + frsp.toString(), - 0, frsp.getStatus()); - assertNotNull("Test depends on a (dynamic) field matching '"+name+ - "', schema was changed out from under us? ... " + frsp.toString(), frsp.getField()); - assertEquals("Test depends on a multivalued dynamic field matching '"+name+ - "', schema was changed out from under us? ... " + frsp.toString(), - Boolean.TRUE, frsp.getField().get("multiValued")); + FieldResponse frsp = + new Field( + name, + params( + "includeDynamic", "true", + "showDefaults", "true")) + .process(CLOUD_CLIENT); + assertNotNull( + "Test depends on a (dynamic) field matching '" + name + "', Null response", frsp); + assertEquals( + "Test depends on a (dynamic) field matching '" + + name + + "', bad status: " + + frsp.toString(), + 0, + frsp.getStatus()); + assertNotNull( + "Test depends on a (dynamic) field matching '" + + name + + "', schema was changed out from under us? ... " + + frsp.toString(), + frsp.getField()); + assertEquals( + "Test depends on a multivalued dynamic field matching '" + + name + + "', schema was changed out from under us? ... " + + frsp.toString(), + Boolean.TRUE, + frsp.getField().get("multiValued")); } catch (SolrServerException e) { - assertEquals("Couldn't fetch field for '"+name+"' ... schema changed out from under us?", - null, e); + assertEquals( + "Couldn't fetch field for '" + name + "' ... schema changed out from under us?", + null, + e); } } SolrDocument doc = null; - + // score as pseudo field - doc = assertSearchOneDoc(params("q","*:*", "fq", "id:42", "fl","id,score,val_ss,val2_ss")); + doc = assertSearchOneDoc(params("q", "*:*", "fq", "id:42", "fl", "id,score,val_ss,val2_ss")); assertEquals("42", doc.getFieldValue("id")); assertEquals(1.0F, doc.getFieldValue("score")); - assertEquals(""+doc, 2, doc.size()); // no value for val2_ss or val_ss ... yet... - - // TODO: update this test & TestPseudoReturnFields to index docs using a (multivalued) "val_ss" instead of "ssto" + assertEquals("" + doc, 2, doc.size()); // no value for val2_ss or val_ss ... yet... + + // TODO: update this test & TestPseudoReturnFields to index docs using a (multivalued) "val_ss" + // instead of "ssto" // - // that way we can first sanity check a single value in a multivalued field is returned correctly - // as a "List" of one element, *AND* then we could be testing that a (single valued) pseudo-field correctly - // overrides that actual (real) value in a multivalued field (ie: not returning a an List) + // that way we can first sanity check a single value in a multivalued field is returned + // correctly as a "List" of one element, *AND* then we could be testing that a (single valued) + // pseudo-field correctly overrides that actual (real) value in a multivalued field (ie: not + // returning a an List) // - // (NOTE: not doing this yet due to how it will impact most other tests, many of which are currently - // @AwaitsFix'ed) + // (NOTE: not doing this yet due to how it will impact most other tests, many of which are + // currently @AwaitsFix'ed) // - //assertTrue(doc.getFieldValue("val_ss").getClass().toString(), + // assertTrue(doc.getFieldValue("val_ss").getClass().toString(), // doc.getFieldValue("val_ss") instanceof List); - + // single value int using alias that matches multivalued dynamic field - doc = assertSearchOneDoc(params("q","id:42", "fl","val_ss:val_i, val2_ss:10")); - assertEquals(""+doc, 2, doc.size()); - assertEquals(""+doc, 1, doc.getFieldValue("val_ss")); - assertEquals(""+doc, 10L, doc.getFieldValue("val2_ss")); + doc = assertSearchOneDoc(params("q", "id:42", "fl", "val_ss:val_i, val2_ss:10")); + assertEquals("" + doc, 2, doc.size()); + assertEquals("" + doc, 1, doc.getFieldValue("val_ss")); + assertEquals("" + doc, 10L, doc.getFieldValue("val2_ss")); } - + public void testMultiValuedRTG() throws Exception { SolrDocument doc = null; // check same results as testMultiValued via RTG (committed doc) - doc = getRandClient(random()).getById("42", params("fl","val_ss:val_i, val2_ss:10, subject")); - assertEquals(""+doc, 3, doc.size()); - assertEquals(""+doc, 1, doc.getFieldValue("val_ss")); - assertEquals(""+doc, 10L, doc.getFieldValue("val2_ss")); - assertEquals(""+doc, "aaa", doc.getFieldValue("subject")); + doc = getRandClient(random()).getById("42", params("fl", "val_ss:val_i, val2_ss:10, subject")); + assertEquals("" + doc, 3, doc.size()); + assertEquals("" + doc, 1, doc.getFieldValue("val_ss")); + assertEquals("" + doc, 10L, doc.getFieldValue("val2_ss")); + assertEquals("" + doc, "aaa", doc.getFieldValue("subject")); // also check real-time-get from transaction log (uncommitted doc) - doc = getRandClient(random()).getById("99", params("fl","val_ss:val_i, val2_ss:10, subject")); - assertEquals(""+doc, 3, doc.size()); - assertEquals(""+doc, 1, doc.getFieldValue("val_ss")); - assertEquals(""+doc, 10L, doc.getFieldValue("val2_ss")); - assertEquals(""+doc, "uncommitted", doc.getFieldValue("subject")); + doc = getRandClient(random()).getById("99", params("fl", "val_ss:val_i, val2_ss:10, subject")); + assertEquals("" + doc, 3, doc.size()); + assertEquals("" + doc, 1, doc.getFieldValue("val_ss")); + assertEquals("" + doc, 10L, doc.getFieldValue("val2_ss")); + assertEquals("" + doc, "uncommitted", doc.getFieldValue("subject")); } - + public void testAllRealFields() throws Exception { for (String fl : TestPseudoReturnFields.ALL_REAL_FIELDS) { - SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl",fl)); + SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl", fl)); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { assertEquals(fl + " => " + doc, 5, doc.size()); assertTrue(fl + " => " + doc, doc.getFieldValue("id") instanceof String); assertTrue(fl + " => " + doc, doc.getFieldValue("val_i") instanceof Integer); assertTrue(fl + " => " + doc, doc.getFieldValue("subject") instanceof String); - assertTrue(fl + " => " + doc, doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List + assertTrue( + fl + " => " + doc, + doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List } } } - + public void testAllRealFieldsRTG() throws Exception { // shouldn't matter if we use RTG (committed or otherwise) for (String fl : TestPseudoReturnFields.ALL_REAL_FIELDS) { for (int i : Arrays.asList(42, 43, 44, 45, 46, 99)) { - SolrDocument doc = getRandClient(random()).getById(""+i, params("fl",fl)); + SolrDocument doc = getRandClient(random()).getById("" + i, params("fl", fl)); assertEquals(fl + " => " + doc, 5, doc.size()); assertTrue(fl + " => " + doc, doc.getFieldValue("id") instanceof String); assertTrue(fl + " => " + doc, doc.getFieldValue("val_i") instanceof Integer); assertTrue(fl + " => " + doc, doc.getFieldValue("subject") instanceof String); - assertTrue(fl + " => " + doc, doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List - + assertTrue( + fl + " => " + doc, + doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List } } } - + public void testFilterAndOneRealFieldRTG() throws Exception { - SolrParams params = params("fl","id,val_i", - "fq","{!field f='subject' v=$my_var}", - "my_var","uncommitted"); - SolrDocumentList docs = getRandClient(random()).getById(Arrays.asList("42","99"), params); + SolrParams params = + params( + "fl", "id,val_i", + "fq", "{!field f='subject' v=$my_var}", + "my_var", "uncommitted"); + SolrDocumentList docs = getRandClient(random()).getById(Arrays.asList("42", "99"), params); final String msg = params + " => " + docs; assertEquals(msg, 1, docs.size()); assertEquals(msg, 1, docs.getNumFound()); - + SolrDocument doc = docs.get(0); assertEquals(msg, 2, doc.size()); assertEquals(msg, "99", doc.getFieldValue("id")); @@ -236,7 +286,7 @@ public void testFilterAndOneRealFieldRTG() throws Exception { public void testScoreAndAllRealFields() throws Exception { for (String fl : TestPseudoReturnFields.SCORE_AND_REAL_FIELDS) { - SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl",fl)); + SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl", fl)); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { assertEquals(fl + " => " + doc, 6, doc.size()); @@ -244,32 +294,38 @@ public void testScoreAndAllRealFields() throws Exception { assertTrue(fl + " => " + doc, doc.getFieldValue("score") instanceof Float); assertTrue(fl + " => " + doc, doc.getFieldValue("val_i") instanceof Integer); assertTrue(fl + " => " + doc, doc.getFieldValue("subject") instanceof String); - assertTrue(fl + " => " + doc, doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List + assertTrue( + fl + " => " + doc, + doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List } } } - + public void testScoreAndAllRealFieldsRTG() throws Exception { // also shouldn't matter if we use RTG (committed or otherwise) .. score should be ignored for (String fl : TestPseudoReturnFields.SCORE_AND_REAL_FIELDS) { for (int i : Arrays.asList(42, 43, 44, 45, 46, 99)) { - SolrDocument doc = getRandClient(random()).getById(""+i, params("fl",fl)); + SolrDocument doc = getRandClient(random()).getById("" + i, params("fl", fl)); assertEquals(fl + " => " + doc, 5, doc.size()); assertTrue(fl + " => " + doc, doc.getFieldValue("id") instanceof String); assertTrue(fl + " => " + doc, doc.getFieldValue("val_i") instanceof Integer); assertTrue(fl + " => " + doc, doc.getFieldValue("subject") instanceof String); - assertTrue(fl + " => " + doc, doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List + assertTrue( + fl + " => " + doc, + doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List } } } public void testScoreAndExplicitRealFields() throws Exception { - + SolrDocumentList docs = null; SolrDocument doc = null; - for (SolrParams p : Arrays.asList(params("q","*:*", "rows", "1", "fl","score,val_i"), - params("q","*:*", "rows", "1", "fl","score", "fl","val_i"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "rows", "1", "fl", "score,val_i"), + params("q", "*:*", "rows", "1", "fl", "score", "fl", "val_i"))) { docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); doc = docs.get(0); // doesn't really matter which one @@ -277,23 +333,23 @@ public void testScoreAndExplicitRealFields() throws Exception { assertTrue(p + " => " + doc, doc.getFieldValue("val_i") instanceof Integer); assertTrue(p + " => " + doc, doc.getFieldValue("score") instanceof Float); } - - docs = assertSearch(params("q","*:*", "rows", "1", "fl","val_i")); + + docs = assertSearch(params("q", "*:*", "rows", "1", "fl", "val_i")); assertEquals("" + docs, 5, docs.getNumFound()); doc = docs.get(0); // doesn't really matter which one assertEquals("" + doc, 1, doc.size()); assertTrue("" + doc, doc.getFieldValue("val_i") instanceof Integer); } - + public void testScoreAndExplicitRealFieldsRTG() throws Exception { SolrDocumentList docs = null; SolrDocument doc = null; - + // shouldn't matter if we use RTG (committed or otherwise) .. score should be ignored for (int i : Arrays.asList(42, 43, 44, 45, 46, 99)) { - for (SolrParams p : Arrays.asList(params("fl","score,val_i"), - params("fl","score", "fl","val_i"))) { - doc = getRandClient(random()).getById(""+i, p); + for (SolrParams p : + Arrays.asList(params("fl", "score,val_i"), params("fl", "score", "fl", "val_i"))) { + doc = getRandClient(random()).getById("" + i, p); assertEquals(p + " => " + doc, 1, doc.size()); assertTrue(p + " => " + doc, doc.getFieldValue("val_i") instanceof Integer); } @@ -302,14 +358,16 @@ public void testScoreAndExplicitRealFieldsRTG() throws Exception { public void testFunctions() throws Exception { - SolrDocumentList docs = assertSearch(params("q","*:*","rows","1","fl","log(val_i)")); - assertEquals(""+docs, 5, docs.getNumFound()); + SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "1", "fl", "log(val_i)")); + assertEquals("" + docs, 5, docs.getNumFound()); SolrDocument doc = docs.get(0); // doesn't really matter which one - assertEquals(""+doc, 1, doc.size()); - assertTrue(""+doc, doc.getFieldValue("log(val_i)") instanceof Double); - - for (SolrParams p : Arrays.asList(params("q","*:*", "rows", "1", "fl","log(val_i),abs(val_i)"), - params("q","*:*", "rows", "1", "fl","log(val_i)", "fl","abs(val_i)"))) { + assertEquals("" + doc, 1, doc.size()); + assertTrue("" + doc, doc.getFieldValue("log(val_i)") instanceof Double); + + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "rows", "1", "fl", "log(val_i),abs(val_i)"), + params("q", "*:*", "rows", "1", "fl", "log(val_i)", "fl", "abs(val_i)"))) { docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); doc = docs.get(0); // doesn't really matter which one @@ -321,9 +379,11 @@ public void testFunctions() throws Exception { public void testFunctionsRTG() throws Exception { // if we use RTG (committed or otherwise) functions should behave the same - for (String id : Arrays.asList("42","99")) { - for (SolrParams p : Arrays.asList(params("fl","log(val_i),abs(val_i)"), - params("fl","log(val_i)","fl", "abs(val_i)"))) { + for (String id : Arrays.asList("42", "99")) { + for (SolrParams p : + Arrays.asList( + params("fl", "log(val_i),abs(val_i)"), + params("fl", "log(val_i)", "fl", "abs(val_i)"))) { SolrDocument doc = getRandClient(random()).getById(id, p); String msg = id + "," + p + " => " + doc; assertEquals(msg, 2, doc.size()); @@ -337,8 +397,10 @@ public void testFunctionsRTG() throws Exception { } public void testFunctionsAndExplicit() throws Exception { - for (SolrParams p : Arrays.asList(params("q","*:*", "rows", "1", "fl","log(val_i),val_i"), - params("q","*:*", "rows", "1", "fl","log(val_i)", "fl","val_i"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "rows", "1", "fl", "log(val_i),val_i"), + params("q", "*:*", "rows", "1", "fl", "log(val_i)", "fl", "val_i"))) { SolrDocumentList docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); SolrDocument doc = docs.get(0); // doesn't really matter which one @@ -347,12 +409,13 @@ public void testFunctionsAndExplicit() throws Exception { assertTrue(p + " => " + doc, doc.getFieldValue("val_i") instanceof Integer); } } - + public void testFunctionsAndExplicitRTG() throws Exception { // shouldn't matter if we use RTG (committed or otherwise) - for (String id : Arrays.asList("42","99")) { - for (SolrParams p : Arrays.asList(params("fl","log(val_i),val_i"), - params("fl","log(val_i)","fl","val_i"))) { + for (String id : Arrays.asList("42", "99")) { + for (SolrParams p : + Arrays.asList( + params("fl", "log(val_i),val_i"), params("fl", "log(val_i)", "fl", "val_i"))) { SolrDocument doc = getRandClient(random()).getById(id, p); String msg = id + "," + p + " => " + doc; assertEquals(msg, 2, doc.size()); @@ -365,12 +428,13 @@ public void testFunctionsAndExplicitRTG() throws Exception { } } - public void testFunctionsAndScore() throws Exception { - for (SolrParams p : Arrays.asList(params("fl","log(val_i),score"), - params("fl","log(val_i)","fl","score"))) { - SolrDocumentList docs = assertSearch(SolrParams.wrapDefaults(p, params("q", "*:*", "rows", "10"))); + for (SolrParams p : + Arrays.asList( + params("fl", "log(val_i),score"), params("fl", "log(val_i)", "fl", "score"))) { + SolrDocumentList docs = + assertSearch(SolrParams.wrapDefaults(p, params("q", "*:*", "rows", "10"))); assertEquals(p + " => " + docs, 5, docs.getNumFound()); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { @@ -379,11 +443,14 @@ public void testFunctionsAndScore() throws Exception { assertTrue(p + " => " + doc, doc.getFieldValue("log(val_i)") instanceof Double); } } - for (SolrParams p : Arrays.asList(params("fl","log(val_i),abs(val_i),score"), - params("fl","log(val_i),abs(val_i)","fl","score"), - params("fl","log(val_i)","fl","abs(val_i),score"), - params("fl","log(val_i)","fl","abs(val_i)","fl","score"))) { - SolrDocumentList docs = assertSearch(SolrParams.wrapDefaults(p, params("q", "*:*", "rows", "10"))); + for (SolrParams p : + Arrays.asList( + params("fl", "log(val_i),abs(val_i),score"), + params("fl", "log(val_i),abs(val_i)", "fl", "score"), + params("fl", "log(val_i)", "fl", "abs(val_i),score"), + params("fl", "log(val_i)", "fl", "abs(val_i)", "fl", "score"))) { + SolrDocumentList docs = + assertSearch(SolrParams.wrapDefaults(p, params("q", "*:*", "rows", "10"))); assertEquals(p + " => " + docs, 5, docs.getNumFound()); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { @@ -398,11 +465,13 @@ public void testFunctionsAndScore() throws Exception { public void testFunctionsAndScoreRTG() throws Exception { // if we use RTG (committed or otherwise) score should be ignored - for (String id : Arrays.asList("42","99")) { - for (SolrParams p : Arrays.asList(params("fl","score","fl","log(val_i)","fl","abs(val_i)"), - params("fl","score","fl","log(val_i),abs(val_i)"), - params("fl","score,log(val_i)","fl","abs(val_i)"), - params("fl","score,log(val_i),abs(val_i)"))) { + for (String id : Arrays.asList("42", "99")) { + for (SolrParams p : + Arrays.asList( + params("fl", "score", "fl", "log(val_i)", "fl", "abs(val_i)"), + params("fl", "score", "fl", "log(val_i),abs(val_i)"), + params("fl", "score,log(val_i)", "fl", "abs(val_i)"), + params("fl", "score,log(val_i),abs(val_i)"))) { SolrDocument doc = getRandClient(random()).getById(id, p); String msg = id + "," + p + " => " + doc; assertEquals(msg, 2, doc.size()); @@ -416,23 +485,25 @@ public void testFunctionsAndScoreRTG() throws Exception { } public void testGlobs() throws Exception { - SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl","val_*")); + SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl", "val_*")); assertEquals(5, docs.getNumFound()); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { assertEquals(doc.toString(), 1, doc.size()); assertTrue(doc.toString(), doc.getFieldValue("val_i") instanceof Integer); } - for (SolrParams p : Arrays.asList(params("q", "*:*", "rows", "10", "fl","val_*,subj*,ss*"), - params("q", "*:*", "rows", "10", "fl","val_*","fl","subj*,ss*"), - params("q", "*:*", "rows", "10", "fl","val_*","fl","subj*","fl","ss*"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "rows", "10", "fl", "val_*,subj*,ss*"), + params("q", "*:*", "rows", "10", "fl", "val_*", "fl", "subj*,ss*"), + params("q", "*:*", "rows", "10", "fl", "val_*", "fl", "subj*", "fl", "ss*"))) { docs = assertSearch(p); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { String msg = p + " => " + doc; assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); - assertTrue(msg, doc.getFieldValue("subject") instanceof String); + assertTrue(msg, doc.getFieldValue("subject") instanceof String); assertTrue(msg, doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List assertEquals(msg, "X", doc.getFieldValue("ssto")); } @@ -441,23 +512,24 @@ public void testGlobs() throws Exception { public void testGlobsRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted - for (String id : Arrays.asList("42","99")) { - - SolrDocument doc = getRandClient(random()).getById(id, params("fl","val_*")); + for (String id : Arrays.asList("42", "99")) { + + SolrDocument doc = getRandClient(random()).getById(id, params("fl", "val_*")); String msg = id + ": fl=val_* => " + doc; assertEquals(msg, 1, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); assertEquals(msg, 1, doc.getFieldValue("val_i")); - - for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,ss*"), - params("fl","val_*","fl","subj*,ss*"))) { + + for (SolrParams p : + Arrays.asList( + params("fl", "val_*,subj*,ss*"), params("fl", "val_*", "fl", "subj*,ss*"))) { doc = getRandClient(random()).getById(id, p); msg = id + ": " + p + " => " + doc; - + assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); assertEquals(msg, 1, doc.getFieldValue("val_i")); - assertTrue(msg, doc.getFieldValue("subject") instanceof String); + assertTrue(msg, doc.getFieldValue("subject") instanceof String); // NOTE: 'subject' is diff between two docs assertTrue(msg, doc.getFieldValue("ssto") instanceof String); // TODO: val_ss: List assertEquals(msg, "X", doc.getFieldValue("ssto")); @@ -466,7 +538,7 @@ public void testGlobsRTG() throws Exception { } public void testGlobsAndExplicit() throws Exception { - SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl","val_*,id")); + SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl", "val_*,id")); assertEquals(5, docs.getNumFound()); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { @@ -475,9 +547,11 @@ public void testGlobsAndExplicit() throws Exception { assertTrue(doc.toString(), doc.getFieldValue("id") instanceof String); } - for (SolrParams p : Arrays.asList(params("q", "*:*", "rows", "10", "fl","val_*,subj*,id"), - params("q", "*:*", "rows", "10", "fl","val_*","fl","subj*","fl","id"), - params("q", "*:*", "rows", "10", "fl","val_*","fl","subj*,id"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "rows", "10", "fl", "val_*,subj*,id"), + params("q", "*:*", "rows", "10", "fl", "val_*", "fl", "subj*", "fl", "id"), + params("q", "*:*", "rows", "10", "fl", "val_*", "fl", "subj*,id"))) { docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); // shouldn't matter what doc we pick... @@ -485,7 +559,7 @@ public void testGlobsAndExplicit() throws Exception { String msg = p + " => " + doc; assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); - assertTrue(msg, doc.getFieldValue("subject") instanceof String); + assertTrue(msg, doc.getFieldValue("subject") instanceof String); assertTrue(msg, doc.getFieldValue("id") instanceof String); } } @@ -493,30 +567,32 @@ public void testGlobsAndExplicit() throws Exception { public void testGlobsAndExplicitRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted - for (String id : Arrays.asList("42","99")) { - SolrDocument doc = getRandClient(random()).getById(id, params("fl","val_*,id")); + for (String id : Arrays.asList("42", "99")) { + SolrDocument doc = getRandClient(random()).getById(id, params("fl", "val_*,id")); String msg = id + ": fl=val_*,id => " + doc; assertEquals(msg, 2, doc.size()); assertTrue(msg, doc.getFieldValue("id") instanceof String); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); assertEquals(msg, 1, doc.getFieldValue("val_i")); - for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,id"), - params("fl","val_*","fl","subj*","fl","id"), - params("fl","val_*","fl","subj*,id"))) { + for (SolrParams p : + Arrays.asList( + params("fl", "val_*,subj*,id"), + params("fl", "val_*", "fl", "subj*", "fl", "id"), + params("fl", "val_*", "fl", "subj*,id"))) { doc = getRandClient(random()).getById(id, p); msg = id + ": " + p + " => " + doc; assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); assertEquals(msg, 1, doc.getFieldValue("val_i")); - assertTrue(msg, doc.getFieldValue("subject") instanceof String); + assertTrue(msg, doc.getFieldValue("subject") instanceof String); assertTrue(msg, doc.getFieldValue("id") instanceof String); } } } public void testGlobsAndScore() throws Exception { - SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl","val_*,score")); + SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl", "val_*,score")); assertEquals(5, docs.getNumFound()); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { @@ -525,17 +601,19 @@ public void testGlobsAndScore() throws Exception { assertTrue(doc.toString(), doc.getFieldValue("score") instanceof Float); } - for (SolrParams p : Arrays.asList(params("q", "*:*", "rows", "10", "fl","val_*,subj*,score"), - params("q", "*:*", "rows", "10", "fl","val_*","fl","subj*","fl","score"), - params("q", "*:*", "rows", "10", "fl","val_*","fl","subj*,score"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "rows", "10", "fl", "val_*,subj*,score"), + params("q", "*:*", "rows", "10", "fl", "val_*", "fl", "subj*", "fl", "score"), + params("q", "*:*", "rows", "10", "fl", "val_*", "fl", "subj*,score"))) { docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); - // shouldn't matter what doc we pick... + // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { String msg = p + " => " + doc; assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); - assertTrue(msg, doc.getFieldValue("subject") instanceof String); + assertTrue(msg, doc.getFieldValue("subject") instanceof String); assertTrue(msg, doc.getFieldValue("score") instanceof Float); } } @@ -543,38 +621,53 @@ public void testGlobsAndScore() throws Exception { public void testGlobsAndScoreRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted, score should be ignored - for (String id : Arrays.asList("42","99")) { - SolrDocument doc = getRandClient(random()).getById(id, params("fl","val_*,score")); + for (String id : Arrays.asList("42", "99")) { + SolrDocument doc = getRandClient(random()).getById(id, params("fl", "val_*,score")); String msg = id + ": fl=val_*,score => " + doc; assertEquals(msg, 1, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); assertEquals(msg, 1, doc.getFieldValue("val_i")); - for (SolrParams p : Arrays.asList(params("fl","val_*,subj*,score"), - params("fl","val_*","fl","subj*","fl","score"), - params("fl","val_*","fl","subj*,score"))) { + for (SolrParams p : + Arrays.asList( + params("fl", "val_*,subj*,score"), + params("fl", "val_*", "fl", "subj*", "fl", "score"), + params("fl", "val_*", "fl", "subj*,score"))) { doc = getRandClient(random()).getById(id, p); msg = id + ": " + p + " => " + doc; assertEquals(msg, 2, doc.size()); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); assertEquals(msg, 1, doc.getFieldValue("val_i")); - assertTrue(msg, doc.getFieldValue("subject") instanceof String); + assertTrue(msg, doc.getFieldValue("subject") instanceof String); } } } public void testAugmenters() throws Exception { - SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl","[docid]")); + SolrDocumentList docs = assertSearch(params("q", "*:*", "rows", "10", "fl", "[docid]")); assertEquals(5, docs.getNumFound()); // shouldn't matter what doc we pick... for (SolrDocument doc : docs) { assertEquals(doc.toString(), 1, doc.size()); assertTrue(doc.toString(), doc.getFieldValue("[docid]") instanceof Integer); } - - for (SolrParams p : Arrays.asList(params("q","*:*", "fl","[docid],[shard],[explain],x_alias:[value v=10 t=int]"), - params("q","*:*", "fl","[docid],[shard]","fl","[explain],x_alias:[value v=10 t=int]"), - params("q","*:*", "fl","[docid]","fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) { + + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "fl", "[docid],[shard],[explain],x_alias:[value v=10 t=int]"), + params( + "q", "*:*", "fl", "[docid],[shard]", "fl", "[explain],x_alias:[value v=10 t=int]"), + params( + "q", + "*:*", + "fl", + "[docid]", + "fl", + "[shard]", + "fl", + "[explain]", + "fl", + "x_alias:[value v=10 t=int]"))) { docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); // shouldn't matter what doc we pick... @@ -582,7 +675,7 @@ public void testAugmenters() throws Exception { String msg = p + " => " + doc; assertEquals(msg, 4, doc.size()); assertTrue(msg, doc.getFieldValue("[docid]") instanceof Integer); - assertTrue(msg, doc.getFieldValue("[shard]") instanceof String); + assertTrue(msg, doc.getFieldValue("[shard]") instanceof String); assertTrue(msg, doc.getFieldValue("[explain]") instanceof String); assertTrue(msg, doc.getFieldValue("x_alias") instanceof Integer); assertEquals(msg, 10, doc.getFieldValue("x_alias")); @@ -592,41 +685,62 @@ public void testAugmenters() throws Exception { public void testDocIdAugmenterRTG() throws Exception { // for an uncommitted doc, we should get -1 - for (String id : Arrays.asList("42","99")) { - SolrDocument doc = getRandClient(random()).getById(id, params("fl","[docid]")); + for (String id : Arrays.asList("42", "99")) { + SolrDocument doc = getRandClient(random()).getById(id, params("fl", "[docid]")); String msg = id + ": fl=[docid] => " + doc; assertEquals(msg, 1, doc.size()); assertTrue(msg, doc.getFieldValue("[docid]") instanceof Integer); - assertTrue(msg, -1 <= ((Integer)doc.getFieldValue("[docid]")).intValue()); + assertTrue(msg, -1 <= ((Integer) doc.getFieldValue("[docid]")).intValue()); } } - + public void testAugmentersRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted - for (String id : Arrays.asList("42","99")) { - for (SolrParams p : Arrays.asList - (params("fl","[docid],[shard],[explain],x_alias:[value v=10 t=int]"), - params("fl","[docid],[shard]","fl","[explain],x_alias:[value v=10 t=int]"), - params("fl","[docid]","fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) { - + for (String id : Arrays.asList("42", "99")) { + for (SolrParams p : + Arrays.asList( + params("fl", "[docid],[shard],[explain],x_alias:[value v=10 t=int]"), + params("fl", "[docid],[shard]", "fl", "[explain],x_alias:[value v=10 t=int]"), + params( + "fl", + "[docid]", + "fl", + "[shard]", + "fl", + "[explain]", + "fl", + "x_alias:[value v=10 t=int]"))) { + SolrDocument doc = getRandClient(random()).getById(id, p); String msg = id + ": " + p + " => " + doc; - + assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("[shard]") instanceof String); // RTG: [explain] should be ignored assertTrue(msg, doc.getFieldValue("x_alias") instanceof Integer); assertEquals(msg, 10, doc.getFieldValue("x_alias")); assertTrue(msg, doc.getFieldValue("[docid]") instanceof Integer); - assertTrue(msg, -1 <= ((Integer)doc.getFieldValue("[docid]")).intValue()); + assertTrue(msg, -1 <= ((Integer) doc.getFieldValue("[docid]")).intValue()); } } } - + public void testAugmentersAndExplicit() throws Exception { - for (SolrParams p : Arrays.asList(params("q", "*:*", "fl","id,[docid],[explain],x_alias:[value v=10 t=int]"), - params("q", "*:*", "fl","id","fl","[docid],[explain],x_alias:[value v=10 t=int]"), - params("q", "*:*", "fl","id","fl","[docid]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "fl", "id,[docid],[explain],x_alias:[value v=10 t=int]"), + params("q", "*:*", "fl", "id", "fl", "[docid],[explain],x_alias:[value v=10 t=int]"), + params( + "q", + "*:*", + "fl", + "id", + "fl", + "[docid]", + "fl", + "[explain]", + "fl", + "x_alias:[value v=10 t=int]"))) { SolrDocumentList docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); // shouldn't matter what doc we pick... @@ -641,29 +755,39 @@ public void testAugmentersAndExplicit() throws Exception { } } } - + public void testAugmentersAndExplicitRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted - for (String id : Arrays.asList("42","99")) { - for (SolrParams p : Arrays.asList(params("fl","id,[docid],[explain],x_alias:[value v=10 t=int]"), - params("fl","id,[docid]","fl","[explain],x_alias:[value v=10 t=int]"), - params("fl","id","fl","[docid]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) { + for (String id : Arrays.asList("42", "99")) { + for (SolrParams p : + Arrays.asList( + params("fl", "id,[docid],[explain],x_alias:[value v=10 t=int]"), + params("fl", "id,[docid]", "fl", "[explain],x_alias:[value v=10 t=int]"), + params( + "fl", + "id", + "fl", + "[docid]", + "fl", + "[explain]", + "fl", + "x_alias:[value v=10 t=int]"))) { SolrDocument doc = getRandClient(random()).getById(id, p); String msg = id + ": " + p + " => " + doc; - + assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("id") instanceof String); // RTG: [explain] should be missing (ignored) assertTrue(msg, doc.getFieldValue("x_alias") instanceof Integer); assertEquals(msg, 10, doc.getFieldValue("x_alias")); assertTrue(msg, doc.getFieldValue("[docid]") instanceof Integer); - assertTrue(msg, -1 <= ((Integer)doc.getFieldValue("[docid]")).intValue()); + assertTrue(msg, -1 <= ((Integer) doc.getFieldValue("[docid]")).intValue()); } } } public void testAugmentersAndScore() throws Exception { - SolrParams params = params("q","*:*", "fl","[docid],x_alias:[value v=10 t=int],score"); + SolrParams params = params("q", "*:*", "fl", "[docid],x_alias:[value v=10 t=int],score"); SolrDocumentList docs = assertSearch(params); assertEquals(params + " => " + docs, 5, docs.getNumFound()); // shouldn't matter what doc we pick... @@ -672,12 +796,32 @@ public void testAugmentersAndScore() throws Exception { assertEquals(msg, 3, doc.size()); assertTrue(msg, doc.getFieldValue("[docid]") instanceof Integer); assertTrue(msg, doc.getFieldValue("x_alias") instanceof Integer); - assertEquals(msg, 10, doc.getFieldValue("x_alias")); + assertEquals(msg, 10, doc.getFieldValue("x_alias")); assertTrue(msg, doc.getFieldValue("score") instanceof Float); } - for (SolrParams p : Arrays.asList(params("q","*:*","fl","[docid],x_alias:[value v=10 t=int],[explain],score"), - params("q","*:*","fl","[docid]","fl","x_alias:[value v=10 t=int],[explain]","fl","score"), - params("q","*:*","fl","[docid]","fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score"))) { + for (SolrParams p : + Arrays.asList( + params("q", "*:*", "fl", "[docid],x_alias:[value v=10 t=int],[explain],score"), + params( + "q", + "*:*", + "fl", + "[docid]", + "fl", + "x_alias:[value v=10 t=int],[explain]", + "fl", + "score"), + params( + "q", + "*:*", + "fl", + "[docid]", + "fl", + "x_alias:[value v=10 t=int]", + "fl", + "[explain]", + "fl", + "score"))) { docs = assertSearch(p); assertEquals(p + " => " + docs, 5, docs.getNumFound()); @@ -693,30 +837,41 @@ public void testAugmentersAndScore() throws Exception { } } } - + public void testAugmentersAndScoreRTG() throws Exception { // if we use RTG (committed or otherwise) score should be ignored - for (String id : Arrays.asList("42","99")) { - SolrDocument doc = getRandClient(random()).getById(id, params("fl","x_alias:[value v=10 t=int],score")); + for (String id : Arrays.asList("42", "99")) { + SolrDocument doc = + getRandClient(random()).getById(id, params("fl", "x_alias:[value v=10 t=int],score")); String msg = id + " => " + doc; - + assertEquals(msg, 1, doc.size()); assertTrue(msg, doc.getFieldValue("x_alias") instanceof Integer); assertEquals(msg, 10, doc.getFieldValue("x_alias")); - for (SolrParams p : Arrays.asList(params("fl","d_alias:[docid],x_alias:[value v=10 t=int],[explain],score"), - params("fl","d_alias:[docid],x_alias:[value v=10 t=int],[explain]","fl","score"), - params("fl","d_alias:[docid]","fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score"))) { - + for (SolrParams p : + Arrays.asList( + params("fl", "d_alias:[docid],x_alias:[value v=10 t=int],[explain],score"), + params("fl", "d_alias:[docid],x_alias:[value v=10 t=int],[explain]", "fl", "score"), + params( + "fl", + "d_alias:[docid]", + "fl", + "x_alias:[value v=10 t=int]", + "fl", + "[explain]", + "fl", + "score"))) { + doc = getRandClient(random()).getById(id, p); msg = id + ": " + p + " => " + doc; - + assertEquals(msg, 2, doc.size()); assertTrue(msg, doc.getFieldValue("x_alias") instanceof Integer); assertEquals(msg, 10, doc.getFieldValue("x_alias")); // RTG: [explain] and score should be missing (ignored) assertTrue(msg, doc.getFieldValue("d_alias") instanceof Integer); - assertTrue(msg, -1 <= ((Integer)doc.getFieldValue("d_alias")).intValue()); + assertTrue(msg, -1 <= ((Integer) doc.getFieldValue("d_alias")).intValue()); } } } @@ -725,18 +880,17 @@ public void testAugmentersGlobsExplicitAndScoreOhMy() throws Exception { Random random = random(); // NOTE: 'ssto' is the missing one - final List fl = Arrays.asList - ("id","[docid]","[explain]","score","val_*","subj*"); - + final List fl = Arrays.asList("id", "[docid]", "[explain]", "score", "val_*", "subj*"); + final int iters = atLeast(random, 10); - for (int i = 0; i< iters; i++) { - + for (int i = 0; i < iters; i++) { + Collections.shuffle(fl, random); - final SolrParams singleFl = params("q","*:*", "rows", "1","fl",String.join(",", fl)); - final ModifiableSolrParams multiFl = params("q","*:*", "rows", "1"); + final SolrParams singleFl = params("q", "*:*", "rows", "1", "fl", String.join(",", fl)); + final ModifiableSolrParams multiFl = params("q", "*:*", "rows", "1"); for (String item : fl) { - multiFl.add("fl",item); + multiFl.add("fl", item); } for (SolrParams params : Arrays.asList(singleFl, multiFl)) { SolrDocumentList docs = assertSearch(params); @@ -760,71 +914,74 @@ public void testAugmentersGlobsExplicitAndScoreOhMyRTG() throws Exception { Random random = random(); // NOTE: 'ssto' is the missing one - final List fl = Arrays.asList - ("id","[docid]","[explain]","score","val_*","subj*"); - + final List fl = Arrays.asList("id", "[docid]", "[explain]", "score", "val_*", "subj*"); + final int iters = atLeast(random, 10); - for (int i = 0; i< iters; i++) { - + for (int i = 0; i < iters; i++) { + Collections.shuffle(fl, random); - final SolrParams singleFl = params("fl",String.join(",", fl)); + final SolrParams singleFl = params("fl", String.join(",", fl)); final ModifiableSolrParams multiFl = params(); for (String item : fl) { - multiFl.add("fl",item); + multiFl.add("fl", item); } - // RTG behavior should be consistent, (committed or otherwise) - for (String id : Arrays.asList("42","99")) { + // RTG behavior should be consistent, (committed or otherwise) + for (String id : Arrays.asList("42", "99")) { for (SolrParams params : Arrays.asList(singleFl, multiFl)) { SolrDocument doc = getRandClient(random()).getById(id, params); String msg = id + ": " + params + " => " + doc; - + assertEquals(msg, 4, doc.size()); assertTrue(msg, doc.getFieldValue("id") instanceof String); assertTrue(msg, doc.getFieldValue("val_i") instanceof Integer); assertEquals(msg, 1, doc.getFieldValue("val_i")); assertTrue(msg, doc.getFieldValue("subject") instanceof String); assertTrue(msg, doc.getFieldValue("[docid]") instanceof Integer); - assertTrue(msg, -1 <= ((Integer)doc.getFieldValue("[docid]")).intValue()); + assertTrue(msg, -1 <= ((Integer) doc.getFieldValue("[docid]")).intValue()); // RTG: [explain] and score should be missing (ignored) } } } } - - - /** - * Given a set of query params, executes as a Query against a random SolrClient and - * asserts that exactly one document is returned + /** + * Given a set of query params, executes as a Query against a random SolrClient and asserts that + * exactly one document is returned */ public static SolrDocument assertSearchOneDoc(SolrParams p) throws Exception { SolrDocumentList docs = assertSearch(p); - assertEquals("does not match exactly one doc: " + p.toString() + " => " + docs.toString(), - 1, docs.getNumFound()); - assertEquals("does not contain exactly one doc: " + p.toString() + " => " + docs.toString(), - 1, docs.size()); + assertEquals( + "does not match exactly one doc: " + p.toString() + " => " + docs.toString(), + 1, + docs.getNumFound()); + assertEquals( + "does not contain exactly one doc: " + p.toString() + " => " + docs.toString(), + 1, + docs.size()); return docs.get(0); } - - /** - * Given a set of query params, executes as a Query against a random SolrClient and - * asserts that at least 1 doc is matched and at least 1 doc is returned + + /** + * Given a set of query params, executes as a Query against a random SolrClient and asserts that + * at least 1 doc is matched and at least 1 doc is returned */ public static SolrDocumentList assertSearch(SolrParams p) throws Exception { QueryResponse rsp = getRandClient(random()).query(p); assertEquals("failed request: " + p.toString() + " => " + rsp.toString(), 0, rsp.getStatus()); - assertTrue("does not match at least one doc: " + p.toString() + " => " + rsp.toString(), - 1 <= rsp.getResults().getNumFound()); - assertTrue("rsp does not contain at least one doc: " + p.toString() + " => " + rsp.toString(), - 1 <= rsp.getResults().size()); + assertTrue( + "does not match at least one doc: " + p.toString() + " => " + rsp.toString(), + 1 <= rsp.getResults().getNumFound()); + assertTrue( + "rsp does not contain at least one doc: " + p.toString() + " => " + rsp.toString(), + 1 <= rsp.getResults().size()); return rsp.getResults(); } - /** - * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed - * at a node in our cluster + /** + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed at a node + * in our cluster */ public static SolrClient getRandClient(Random rand) { int numClients = CLIENTS.size(); @@ -834,9 +991,7 @@ public static SolrClient getRandClient(Random rand) { public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java index 2bc23c8f2ad..a83450244ab 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java @@ -17,6 +17,9 @@ package org.apache.solr.cloud; +import com.codahale.metrics.Counter; +import com.codahale.metrics.Metric; +import com.codahale.metrics.Timer; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -26,7 +29,6 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; - import org.apache.commons.io.IOUtils; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -44,15 +46,11 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.codahale.metrics.Counter; -import com.codahale.metrics.Metric; -import com.codahale.metrics.Timer; - public class TestCloudRecovery extends SolrCloudTestCase { private static final String COLLECTION = "collection1"; private static boolean onlyLeaderIndexes; - + private int nrtReplicas; private int tlogReplicas; @@ -66,19 +64,20 @@ public static void setupCluster() throws Exception { @Before public void beforeTest() throws Exception { configureCluster(2) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); onlyLeaderIndexes = random().nextBoolean(); nrtReplicas = 2; // onlyLeaderIndexes?0:2; tlogReplicas = 0; // onlyLeaderIndexes?2:0; TODO: SOLR-12313 tlog replicas break tests because - // TestInjection#waitForInSyncWithLeader is broken - CollectionAdminRequest - .createCollection(COLLECTION, "config", 2, nrtReplicas, tlogReplicas, 0) + // TestInjection#waitForInSyncWithLeader is broken + CollectionAdminRequest.createCollection(COLLECTION, "config", 2, nrtReplicas, tlogReplicas, 0) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION, 2, 2 * (nrtReplicas + tlogReplicas)); - // SOLR-12314 : assert that these values are from the solr.xml file and not UpdateShardHandlerConfig#DEFAULT + // SOLR-12314 : assert that these values are from the solr.xml file and not + // UpdateShardHandlerConfig#DEFAULT for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { UpdateShardHandler shardHandler = jettySolrRunner.getCoreContainer().getUpdateShardHandler(); int socketTimeout = shardHandler.getSocketTimeout(); @@ -87,7 +86,7 @@ public void beforeTest() throws Exception { assertEquals(45000, connectionTimeout); } } - + @After public void afterTest() throws Exception { TestInjection.reset(); // do after every test, don't wait for AfterClass @@ -95,7 +94,6 @@ public void afterTest() throws Exception { } @Test - // commented 4-Sep-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018 public void leaderRecoverFromLogOnStartupTest() throws Exception { AtomicInteger countReplayLog = new AtomicInteger(0); TestInjection.skipIndexWriterCommitOnClose = true; @@ -114,23 +112,28 @@ public void leaderRecoverFromLogOnStartupTest() throws Exception { ChaosMonkey.stop(cluster.getJettySolrRunners()); - for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { cluster.waitForJettyToStop(jettySolrRunner); } - assertTrue("Timeout waiting for all not live", ClusterStateUtil.waitForAllReplicasNotLive(cloudClient.getZkStateReader(), 45000)); + assertTrue( + "Timeout waiting for all not live", + ClusterStateUtil.waitForAllReplicasNotLive(cloudClient.getZkStateReader(), 45000)); ChaosMonkey.start(cluster.getJettySolrRunners()); - + cluster.waitForAllNodes(30); - - assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cloudClient.getZkStateReader(), COLLECTION, 120000)); + + assertTrue( + "Timeout waiting for all live and active", + ClusterStateUtil.waitForAllActiveAndLiveReplicas( + cloudClient.getZkStateReader(), COLLECTION, 120000)); resp = cloudClient.query(COLLECTION, params); assertEquals(4, resp.getResults().getNumFound()); // Make sure all nodes is recover from tlog if (onlyLeaderIndexes) { - // Leader election can be kicked off, so 2 tlog replicas will replay its tlog before becoming new leader - assertTrue( countReplayLog.get() >=2); + // Leader election can be kicked off, so 2 tlog replicas will replay its tlog before becoming + // new leader + assertTrue(countReplayLog.get() >= 2); } else { assertEquals(4, countReplayLog.get()); } @@ -141,13 +144,15 @@ public void leaderRecoverFromLogOnStartupTest() throws Exception { int skippedCount = 0; for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { SolrMetricManager manager = jetty.getCoreContainer().getMetricManager(); - List registryNames = manager.registryNames().stream() - .filter(s -> s.startsWith("solr.core.")).collect(Collectors.toList()); + List registryNames = + manager.registryNames().stream() + .filter(s -> s.startsWith("solr.core.")) + .collect(Collectors.toList()); for (String registry : registryNames) { Map metrics = manager.registry(registry).getMetrics(); - Timer timer = (Timer)metrics.get("REPLICATION.peerSync.time"); - Counter counter = (Counter)metrics.get("REPLICATION.peerSync.errors"); - Counter skipped = (Counter)metrics.get("REPLICATION.peerSync.skipped"); + Timer timer = (Timer) metrics.get("REPLICATION.peerSync.time"); + Counter counter = (Counter) metrics.get("REPLICATION.peerSync.errors"); + Counter skipped = (Counter) metrics.get("REPLICATION.peerSync.skipped"); replicationCount += timer.getCount(); errorsCount += counter.getCount(); skippedCount += skipped.getCount(); @@ -161,7 +166,6 @@ public void leaderRecoverFromLogOnStartupTest() throws Exception { } @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 14-Oct-2018 public void corruptedLogTest() throws Exception { AtomicInteger countReplayLog = new AtomicInteger(0); TestInjection.skipIndexWriterCommitOnClose = true; @@ -187,7 +191,7 @@ public void corruptedLogTest() throws Exception { String[] tLogFiles = tlogFolder.list(); Arrays.sort(tLogFiles); String lastTLogFile = tlogFolder.getAbsolutePath() + "/" + tLogFiles[tLogFiles.length - 1]; - try (FileInputStream inputStream = new FileInputStream(lastTLogFile)){ + try (FileInputStream inputStream = new FileInputStream(lastTLogFile)) { byte[] tlogBytes = IOUtils.toByteArray(inputStream); contentFiles.put(lastTLogFile, tlogBytes); logHeaderSize = Math.min(tlogBytes.length, logHeaderSize); @@ -196,19 +200,21 @@ public void corruptedLogTest() throws Exception { } ChaosMonkey.stop(cluster.getJettySolrRunners()); - + for (JettySolrRunner j : cluster.getJettySolrRunners()) { cluster.waitForJettyToStop(j); } - - assertTrue("Timeout waiting for all not live", ClusterStateUtil.waitForAllReplicasNotLive(cloudClient.getZkStateReader(), 45000)); + + assertTrue( + "Timeout waiting for all not live", + ClusterStateUtil.waitForAllReplicasNotLive(cloudClient.getZkStateReader(), 45000)); for (Map.Entry entry : contentFiles.entrySet()) { byte[] tlogBytes = entry.getValue(); if (tlogBytes.length <= logHeaderSize) continue; try (FileOutputStream stream = new FileOutputStream(entry.getKey())) { - int skipLastBytes = Math.max(random().nextInt(tlogBytes.length - logHeaderSize)-2, 2); + int skipLastBytes = Math.max(random().nextInt(tlogBytes.length - logHeaderSize) - 2, 2); for (int i = 0; i < entry.getValue().length - skipLastBytes; i++) { stream.write(tlogBytes[i]); } @@ -217,19 +223,21 @@ public void corruptedLogTest() throws Exception { ChaosMonkey.start(cluster.getJettySolrRunners()); cluster.waitForAllNodes(30); - + Thread.sleep(1000); - - assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cloudClient.getZkStateReader(), COLLECTION, 120000)); - + + assertTrue( + "Timeout waiting for all live and active", + ClusterStateUtil.waitForAllActiveAndLiveReplicas( + cloudClient.getZkStateReader(), COLLECTION, 120000)); + cluster.waitForActiveCollection(COLLECTION, 2, 2 * (nrtReplicas + tlogReplicas)); - + cloudClient.getZkStateReader().forceUpdateCollection(COLLECTION); - + resp = cloudClient.query(COLLECTION, params); // Make sure cluster still healthy // TODO: AwaitsFix - this will fail under test beasting // assertTrue(resp.toString(), resp.getResults().getNumFound() >= 2); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery2.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery2.java index 8167f2cc059..e9a28acf858 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery2.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery2.java @@ -18,7 +18,6 @@ package org.apache.solr.cloud; import java.lang.invoke.MethodHandles; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -40,14 +39,14 @@ public static void setupCluster() throws Exception { System.setProperty("solr.ulog.numRecordsToKeep", "1000"); configureCluster(2) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); - CollectionAdminRequest - .createCollection(COLLECTION, "config", 1,2) + CollectionAdminRequest.createCollection(COLLECTION, "config", 1, 2) .process(cluster.getSolrClient()); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), - false, true, 30); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, 30); } @Test @@ -61,7 +60,7 @@ public void test() throws Exception { UpdateRequest req = new UpdateRequest(); for (int i = 0; i < 100; i++) { - req = req.add("id", i+"", "num", i+""); + req = req.add("id", i + "", "num", i + ""); } req.commit(client1, COLLECTION); @@ -69,74 +68,125 @@ public void test() throws Exception { waitForState("", COLLECTION, clusterShape(1, 2)); try (HttpSolrClient client = getHttpSolrClient(node2.getBaseUrl().toString())) { - long numFound = client.query(COLLECTION, new SolrQuery("q","*:*", "distrib", "false")).getResults().getNumFound(); + long numFound = + client + .query(COLLECTION, new SolrQuery("q", "*:*", "distrib", "false")) + .getResults() + .getNumFound(); assertEquals(100, numFound); } - long numFound = client1.query(COLLECTION, new SolrQuery("q","*:*", "distrib", "false")).getResults().getNumFound(); + long numFound = + client1 + .query(COLLECTION, new SolrQuery("q", "*:*", "distrib", "false")) + .getResults() + .getNumFound(); assertEquals(100, numFound); - new UpdateRequest().add("id", "1", "num", "10") - .commit(client1, COLLECTION); + new UpdateRequest().add("id", "1", "num", "10").commit(client1, COLLECTION); try (HttpSolrClient client = getHttpSolrClient(node2.getBaseUrl().toString())) { - Object v = client.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + Object v = + client + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("10", v.toString()); } - Object v = client1.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + Object v = + client1 + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("10", v.toString()); // node2.stop(); waitForState("", COLLECTION, (liveNodes, collectionState) -> liveNodes.size() == 1); - new UpdateRequest().add("id", "1", "num", "20") - .commit(client1, COLLECTION); - v = client1.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + new UpdateRequest().add("id", "1", "num", "20").commit(client1, COLLECTION); + v = + client1 + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("20", v.toString()); node2.start(); waitForState("", COLLECTION, clusterShape(1, 2)); try (HttpSolrClient client = getHttpSolrClient(node2.getBaseUrl().toString())) { - v = client.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + v = + client + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("20", v.toString()); } node2.stop(); waitForState("", COLLECTION, (liveNodes, collectionState) -> liveNodes.size() == 1); - new UpdateRequest().add("id", "1", "num", "30") - .commit(client1, COLLECTION); - v = client1.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + new UpdateRequest().add("id", "1", "num", "30").commit(client1, COLLECTION); + v = + client1 + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("30", v.toString()); node2.start(); waitForState("", COLLECTION, clusterShape(1, 2)); try (HttpSolrClient client = getHttpSolrClient(node2.getBaseUrl().toString())) { - v = client.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + v = + client + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("30", v.toString()); } - v = client1.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + v = + client1 + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("30", v.toString()); } node1.stop(); - waitForState("", COLLECTION, (liveNodes, collectionState) -> { - Replica leader = collectionState.getLeader("shard1"); - return leader != null && leader.getNodeName().equals(node2.getNodeName()); - }); + waitForState( + "", + COLLECTION, + (liveNodes, collectionState) -> { + Replica leader = collectionState.getLeader("shard1"); + return leader != null && leader.getNodeName().equals(node2.getNodeName()); + }); node1.start(); waitForState("", COLLECTION, clusterShape(1, 2)); try (HttpSolrClient client = getHttpSolrClient(node1.getBaseUrl().toString())) { - Object v = client.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + Object v = + client + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("30", v.toString()); } try (HttpSolrClient client = getHttpSolrClient(node2.getBaseUrl().toString())) { - Object v = client.query(COLLECTION, new SolrQuery("q","id:1", "distrib", "false")).getResults().get(0).get("num"); + Object v = + client + .query(COLLECTION, new SolrQuery("q", "id:1", "distrib", "false")) + .getResults() + .get(0) + .get("num"); assertEquals("30", v.toString()); } - } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java index 439ca2a5236..66be2dcb11d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java @@ -21,7 +21,6 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -46,10 +45,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Tests related to SOLR-6086 - */ -@LogLevel("org.apache.solr.cloud.overseer.*=DEBUG,org.apache.solr.cloud.Overseer=DEBUG,org.apache.solr.cloud.ZkController=DEBUG") +/** Tests related to SOLR-6086 */ +@LogLevel( + "org.apache.solr.cloud.overseer.*=DEBUG,org.apache.solr.cloud.Overseer=DEBUG,org.apache.solr.cloud.ZkController=DEBUG") public class TestCloudSearcherWarming extends SolrCloudTestCase { public static final AtomicReference coreNodeNameRef = new AtomicReference<>(null), coreNameRef = new AtomicReference<>(null); @@ -58,7 +56,8 @@ public class TestCloudSearcherWarming extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - useFactory("solr.StandardDirectoryFactory"); // necessary to find the index+tlog intact after restart + // necessary to find the index+tlog intact after restart + useFactory("solr.StandardDirectoryFactory"); } @Before @@ -66,14 +65,14 @@ public void setUp() throws Exception { super.setUp(); configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } - + @After @Override public void tearDown() throws Exception { coreNameRef.set(null); coreNodeNameRef.set(null); sleepTime.set(-1); - + if (null != cluster) { cluster.deleteAllCollections(); cluster.deleteAllConfigSets(); @@ -81,7 +80,7 @@ public void tearDown() throws Exception { cluster = null; } TestInjection.wrongIndexFingerprint = null; - + super.tearDown(); } @@ -91,18 +90,24 @@ public void testRepFactor1LeaderStartup() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String collectionName = "testRepFactor1LeaderStartup"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, 1, 1) - .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, 1, 1) + .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()); create.process(solrClient); - cluster.waitForActiveCollection(collectionName, 1, 1); + cluster.waitForActiveCollection(collectionName, 1, 1); solrClient.setDefaultCollection(collectionName); - String addListenerCommand = "{" + - "'add-listener' : {'name':'newSearcherListener','event':'newSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + - "'add-listener' : {'name':'firstSearcherListener','event':'firstSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + - "}"; + String addListenerCommand = + "{" + + "'add-listener' : {'name':'newSearcherListener','event':'newSearcher', 'class':'" + + SleepingSolrEventListener.class.getName() + + "'}" + + "'add-listener' : {'name':'firstSearcherListener','event':'firstSearcher', 'class':'" + + SleepingSolrEventListener.class.getName() + + "'}" + + "}"; ConfigRequest request = new ConfigRequest(addListenerCommand); solrClient.request(request); @@ -112,25 +117,36 @@ public void testRepFactor1LeaderStartup() throws Exception { AtomicInteger expectedDocs = new AtomicInteger(1); AtomicReference failingCoreNodeName = new AtomicReference<>(); - CollectionStateWatcher stateWatcher = createActiveReplicaSearcherWatcher(expectedDocs, failingCoreNodeName); + CollectionStateWatcher stateWatcher = + createActiveReplicaSearcherWatcher(expectedDocs, failingCoreNodeName); JettySolrRunner runner = cluster.getJettySolrRunner(0); runner.stop(); - + cluster.waitForJettyToStop(runner); - // check waitForState only after we are sure the node has shutdown and have forced an update to liveNodes - // ie: workaround SOLR-13490 + // check waitForState only after we are sure the node has shutdown and have forced an update to + // liveNodes ie: workaround SOLR-13490 cluster.getSolrClient().getZkStateReader().updateLiveNodes(); - waitForState("jetty count:" + cluster.getJettySolrRunners().size(), collectionName, clusterShape(1, 0)); - + waitForState( + "jetty count:" + cluster.getJettySolrRunners().size(), collectionName, clusterShape(1, 0)); + // restart sleepTime.set(1000); runner.start(); cluster.waitForAllNodes(30); - cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); + cluster + .getSolrClient() + .getZkStateReader() + .registerCollectionStateWatcher(collectionName, stateWatcher); cluster.waitForActiveCollection(collectionName, 1, 1); - assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); - cluster.getSolrClient().getZkStateReader().removeCollectionStateWatcher(collectionName, stateWatcher); + assertNull( + "No replica should have been active without registering a searcher, found: " + + failingCoreNodeName.get(), + failingCoreNodeName.get()); + cluster + .getSolrClient() + .getZkStateReader() + .removeCollectionStateWatcher(collectionName, stateWatcher); } @Test @@ -139,18 +155,25 @@ public void testPeersyncFailureReplicationSuccess() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String collectionName = "testPeersyncFailureReplicationSuccess"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, 1, 1) - .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, 1, 1) + .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()); create.process(solrClient); - waitForState("The collection should have 1 shard and 1 replica", collectionName, clusterShape(1, 1)); + waitForState( + "The collection should have 1 shard and 1 replica", collectionName, clusterShape(1, 1)); solrClient.setDefaultCollection(collectionName); - String addListenerCommand = "{" + - "'add-listener' : {'name':'newSearcherListener','event':'newSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + - "'add-listener' : {'name':'firstSearcherListener','event':'firstSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + - "}"; + String addListenerCommand = + "{" + + "'add-listener' : {'name':'newSearcherListener','event':'newSearcher', 'class':'" + + SleepingSolrEventListener.class.getName() + + "'}" + + "'add-listener' : {'name':'firstSearcherListener','event':'firstSearcher', 'class':'" + + SleepingSolrEventListener.class.getName() + + "'}" + + "}"; ConfigRequest request = new ConfigRequest(addListenerCommand); solrClient.request(request); @@ -170,8 +193,12 @@ public void testPeersyncFailureReplicationSuccess() throws Exception { failingCoreNodeName.set(null); sleepTime.set(5000); - CollectionStateWatcher stateWatcher = createActiveReplicaSearcherWatcher(expectedDocs, failingCoreNodeName); - cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); + CollectionStateWatcher stateWatcher = + createActiveReplicaSearcherWatcher(expectedDocs, failingCoreNodeName); + cluster + .getSolrClient() + .getZkStateReader() + .registerCollectionStateWatcher(collectionName, stateWatcher); JettySolrRunner newNode = cluster.startJettySolrRunner(); cluster.waitForAllNodes(30); @@ -179,27 +206,34 @@ public void testPeersyncFailureReplicationSuccess() throws Exception { .setNode(newNode.getNodeName()) .process(solrClient); - waitForState("The collection should have 1 shard and 2 replica", collectionName, clusterShape(1, 2)); - assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); + waitForState( + "The collection should have 1 shard and 2 replica", collectionName, clusterShape(1, 2)); + assertNull( + "No replica should have been active without registering a searcher, found: " + + failingCoreNodeName.get(), + failingCoreNodeName.get()); // stop the old node log.info("Stopping old node 1"); - AtomicReference oldNodeName = new AtomicReference<>(cluster.getJettySolrRunner(0).getNodeName()); + AtomicReference oldNodeName = + new AtomicReference<>(cluster.getJettySolrRunner(0).getNodeName()); JettySolrRunner oldNode = cluster.stopJettySolrRunner(0); - + cluster.waitForJettyToStop(oldNode); // the newly created replica should become leader - waitForState("The collection should have 1 shard and 1 replica", collectionName, clusterShape(1, 1)); + waitForState( + "The collection should have 1 shard and 1 replica", collectionName, clusterShape(1, 1)); // the above call is not enough because we want to assert that the down'ed replica is not active // but clusterShape will also return true if replica is not live -- which we don't want - CollectionStatePredicate collectionStatePredicate = (liveNodes, collectionState) -> { - for (Replica r : collectionState.getReplicas()) { - if (r.getNodeName().equals(oldNodeName.get())) { - return r.getState() == Replica.State.DOWN; - } - } - return false; - }; + CollectionStatePredicate collectionStatePredicate = + (liveNodes, collectionState) -> { + for (Replica r : collectionState.getReplicas()) { + if (r.getNodeName().equals(oldNodeName.get())) { + return r.getState() == Replica.State.DOWN; + } + } + return false; + }; waitForState("", collectionName, collectionStatePredicate); assertNotNull(solrClient.getZkStateReader().getLeaderRetry(collectionName, "shard1")); @@ -215,9 +249,16 @@ public void testPeersyncFailureReplicationSuccess() throws Exception { log.info("Starting old node 1"); cluster.startJettySolrRunner(oldNode); waitForState("", collectionName, clusterShape(1, 2)); - // invoke statewatcher explicitly to avoid race condition where the assert happens before the state watcher is invoked by ZkStateReader - cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); - assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); + // invoke statewatcher explicitly to avoid race condition where the assert happens before the + // state watcher is invoked by ZkStateReader + cluster + .getSolrClient() + .getZkStateReader() + .registerCollectionStateWatcher(collectionName, stateWatcher); + assertNull( + "No replica should have been active without registering a searcher, found: " + + failingCoreNodeName.get(), + failingCoreNodeName.get()); oldNodeName.set(cluster.getJettySolrRunner(1).getNodeName()); assertSame(oldNode, cluster.stopJettySolrRunner(1)); // old node is now at 1 @@ -229,7 +270,8 @@ public void testPeersyncFailureReplicationSuccess() throws Exception { coreNameRef.set(null); coreNodeNameRef.set(null); failingCoreNodeName.set(null); - sleepTime.set(14000); // has to be higher than the twice the recovery wait pause between attempts plus some margin + // has to be higher than the twice the recovery wait pause between attempts plus some margin + sleepTime.set(14000); // inject failure TestInjection.failIndexFingerprintRequests = "true:100"; @@ -237,13 +279,24 @@ public void testPeersyncFailureReplicationSuccess() throws Exception { log.info("Starting old node 2"); cluster.startJettySolrRunner(oldNode); waitForState("", collectionName, clusterShape(1, 2)); - // invoke statewatcher explicitly to avoid race condition where the assert happens before the state watcher is invoked by ZkStateReader - cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); - assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); - cluster.getSolrClient().getZkStateReader().removeCollectionStateWatcher(collectionName, stateWatcher); + // invoke statewatcher explicitly to avoid race condition where the assert happens before the + // state watcher is invoked by ZkStateReader + cluster + .getSolrClient() + .getZkStateReader() + .registerCollectionStateWatcher(collectionName, stateWatcher); + assertNull( + "No replica should have been active without registering a searcher, found: " + + failingCoreNodeName.get(), + failingCoreNodeName.get()); + cluster + .getSolrClient() + .getZkStateReader() + .removeCollectionStateWatcher(collectionName, stateWatcher); } - private CollectionStateWatcher createActiveReplicaSearcherWatcher(AtomicInteger expectedDocs, AtomicReference failingCoreNodeName) { + private CollectionStateWatcher createActiveReplicaSearcherWatcher( + AtomicInteger expectedDocs, AtomicReference failingCoreNodeName) { return new CollectionStateWatcher() { @Override public boolean onStateChanged(Set liveNodes, DocCollection collectionState) { @@ -265,7 +318,8 @@ public boolean onStateChanged(Set liveNodes, DocCollection collectionSta SolrDispatchFilter solrDispatchFilter = jettySolrRunner.getSolrDispatchFilter(); try (SolrCore core = solrDispatchFilter.getCores().getCore(coreName)) { if (core.getSolrConfig().useColdSearcher) { - log.error("useColdSearcher is enabled! It should not be enabled for this test!"); + log.error( + "useColdSearcher is enabled! It should not be enabled for this test!"); assert false; return false; } @@ -274,7 +328,9 @@ public boolean onStateChanged(Set liveNodes, DocCollection collectionSta } RefCounted registeredSearcher = core.getRegisteredSearcher(); if (registeredSearcher != null) { - log.error("registered searcher not null, maxdocs = {}", registeredSearcher.get().maxDoc()); + log.error( + "registered searcher not null, maxdocs = {}", + registeredSearcher.get().maxDoc()); if (registeredSearcher.get().maxDoc() != expectedDocs.get()) { failingCoreNodeName.set(coreNodeName); registeredSearcher.decref(); @@ -311,23 +367,25 @@ public boolean onStateChanged(Set liveNodes, DocCollection collectionSta public static class SleepingSolrEventListener implements SolrEventListener { @Override - public void postCommit() { - - } + public void postCommit() {} @Override - public void postSoftCommit() { - - } + public void postSoftCommit() {} @Override public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { if (sleepTime.get() > 0) { - TestCloudSearcherWarming.coreNodeNameRef.set(newSearcher.getCore().getCoreDescriptor().getCloudDescriptor().getCoreNodeName()); + TestCloudSearcherWarming.coreNodeNameRef.set( + newSearcher.getCore().getCoreDescriptor().getCloudDescriptor().getCoreNodeName()); TestCloudSearcherWarming.coreNameRef.set(newSearcher.getCore().getName()); if (log.isInfoEnabled()) { - log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}" - , sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + log.info( + "Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", + sleepTime.get(), + newSearcher, + currentSearcher, + newSearcher.getCore().getName(), + newSearcher.getCore()); } try { Thread.sleep(sleepTime.get()); @@ -335,8 +393,13 @@ public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher current log.warn("newSearcher was interupdated", e); } if (log.isInfoEnabled()) { - log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}" - , sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + log.info( + "Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", + sleepTime.get(), + newSearcher, + currentSearcher, + newSearcher.getCore().getName(), + newSearcher.getCore()); } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java index 03f6afd2eeb..35cb08fd507 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java @@ -26,7 +26,7 @@ public class TestClusterProperties extends SolrCloudTestCase { private ClusterProperties props; - + @BeforeClass public static void setupCluster() throws Exception { configureCluster(1).configure(); @@ -37,7 +37,7 @@ public void setUp() throws Exception { super.setUp(); props = new ClusterProperties(zkClient()); } - + @Test public void testSetPluginClusterProperty() throws Exception { String propertyName = ClusterProperties.EXT_PROPRTTY_PREFIX + "pluginA.propertyA"; @@ -45,12 +45,11 @@ public void testSetPluginClusterProperty() throws Exception { .process(cluster.getSolrClient()); assertEquals("valueA", props.getClusterProperty(propertyName, null)); } - + @Test(expected = SolrException.class) public void testSetInvalidPluginClusterProperty() throws Exception { String propertyName = "pluginA.propertyA"; CollectionAdminRequest.setClusterProperty(propertyName, "valueA") .process(cluster.getSolrClient()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java index 8d375b6feb2..3333d8e2da9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java @@ -16,11 +16,12 @@ */ package org.apache.solr.cloud; -import javax.script.ScriptEngineManager; -import javax.servlet.FilterChain; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletRequestWrapper; -import javax.servlet.http.HttpServletResponse; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.solr.common.params.CommonParams.NAME; +import static org.apache.solr.core.ConfigSetProperties.DEFAULT_FILENAME; +import static org.junit.matchers.JUnitMatchers.containsString; + +import com.google.common.collect.ImmutableMap; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; @@ -50,8 +51,11 @@ import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; - -import com.google.common.collect.ImmutableMap; +import javax.script.ScriptEngineManager; +import javax.servlet.FilterChain; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; +import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FileUtils; import org.apache.http.HttpEntity; import org.apache.http.auth.BasicUserPrincipal; @@ -87,17 +91,17 @@ import org.apache.solr.core.ConfigSetProperties; import org.apache.solr.core.ConfigSetService; import org.apache.solr.core.TestSolrConfigHandler; -import org.apache.solr.security.BasicAuthPlugin; import org.apache.solr.security.AuthorizationContext; import org.apache.solr.security.AuthorizationPlugin; import org.apache.solr.security.AuthorizationResponse; +import org.apache.solr.security.BasicAuthPlugin; import org.apache.solr.servlet.SolrDispatchFilter; import org.apache.solr.util.ExternalPaths; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; +import org.apache.zookeeper.data.Stat; import org.junit.After; import org.junit.AfterClass; -import org.apache.zookeeper.data.Stat; import org.junit.Assume; import org.junit.BeforeClass; import org.junit.Test; @@ -105,28 +109,18 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.apache.solr.common.params.CommonParams.NAME; -import static org.apache.solr.core.ConfigSetProperties.DEFAULT_FILENAME; -import static org.junit.matchers.JUnitMatchers.containsString; - -/** - * Simple ConfigSets API tests on user errors and simple success cases. - */ +/** Simple ConfigSets API tests on user errors and simple success cases. */ public class TestConfigSetsAPI extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass public static void setUpClass() throws Exception { - configureCluster(1) - .withSecurityJson(getSecurityJson()) - .configure(); + configureCluster(1).withSecurityJson(getSecurityJson()).configure(); } @AfterClass - public static void tearDownClass() throws Exception { - } + public static void tearDownClass() throws Exception {} private static ConfigSetService getConfigSetService() { return cluster.getOpenOverseer().getCoreContainer().getConfigSetService(); @@ -176,17 +170,28 @@ public void testCreate() throws Exception { verifyCreate(null, "configSet1", null, null, "solr"); // no old, new - verifyCreate("baseConfigSet2", "configSet2", - null, ImmutableMap.of("immutable", "true", "key1", "value1"), "solr"); + verifyCreate( + "baseConfigSet2", + "configSet2", + null, + ImmutableMap.of("immutable", "true", "key1", "value1"), + "solr"); // old, no new - verifyCreate("baseConfigSet3", "configSet3", - ImmutableMap.of("immutable", "false", "key2", "value2"), null, "solr"); + verifyCreate( + "baseConfigSet3", + "configSet3", + ImmutableMap.of("immutable", "false", "key2", "value2"), + null, + "solr"); // old, new - verifyCreate("baseConfigSet4", "configSet4", + verifyCreate( + "baseConfigSet4", + "configSet4", ImmutableMap.of("immutable", "true", "onlyOld", "onlyOldValue"), - ImmutableMap.of("immutable", "false", "onlyNew", "onlyNewValue"), "solr"); + ImmutableMap.of("immutable", "false", "onlyNew", "onlyNewValue"), + "solr"); } @Test @@ -196,57 +201,91 @@ public void testCreateWithTrust() throws Exception { String configsetSuffix2 = "testCreateWithTrust2"; uploadConfigSetWithAssertions(configsetName, configsetSuffix, "solr"); uploadConfigSetWithAssertions(configsetName, configsetSuffix2, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix2)); try { ignoreException("unauthenticated request"); // trusted -> unstrusted - createConfigSet(configsetName + configsetSuffix, "foo", Collections.emptyMap(), cluster.getSolrClient(), null); + createConfigSet( + configsetName + configsetSuffix, + "foo", + Collections.emptyMap(), + cluster.getSolrClient(), + null); fail("Expecting exception"); } catch (SolrException e) { assertEquals(SolrException.ErrorCode.UNAUTHORIZED.code, e.code()); unIgnoreException("unauthenticated request"); } // trusted -> trusted - verifyCreate(configsetName + configsetSuffix, "foo2", Collections.emptyMap(), Collections.emptyMap(), "solr"); + verifyCreate( + configsetName + configsetSuffix, + "foo2", + Collections.emptyMap(), + Collections.emptyMap(), + "solr"); assertTrue(isTrusted(zkClient, "foo2", "")); // unstrusted -> unstrusted - verifyCreate(configsetName + configsetSuffix2, "bar", Collections.emptyMap(), Collections.emptyMap(), null); + verifyCreate( + configsetName + configsetSuffix2, + "bar", + Collections.emptyMap(), + Collections.emptyMap(), + null); assertFalse(isTrusted(zkClient, "bar", "")); // unstrusted -> trusted - verifyCreate(configsetName + configsetSuffix2, "bar2", Collections.emptyMap(), Collections.emptyMap(), "solr"); + verifyCreate( + configsetName + configsetSuffix2, + "bar2", + Collections.emptyMap(), + Collections.emptyMap(), + "solr"); assertFalse(isTrusted(zkClient, "bar2", "")); } } - private void setupBaseConfigSet(String baseConfigSetName, Map oldProps) throws Exception { + private void setupBaseConfigSet(String baseConfigSetName, Map oldProps) + throws Exception { final File configDir = getFile("solr").toPath().resolve("configsets/configset-2/conf").toFile(); final File tmpConfigDir = createTempDir().toFile(); tmpConfigDir.deleteOnExit(); FileUtils.copyDirectory(configDir, tmpConfigDir); if (oldProps != null) { - FileUtils.write(new File(tmpConfigDir, ConfigSetProperties.DEFAULT_FILENAME), - getConfigSetProps(oldProps), UTF_8); + FileUtils.write( + new File(tmpConfigDir, ConfigSetProperties.DEFAULT_FILENAME), + getConfigSetProps(oldProps), + UTF_8); } getConfigSetService().uploadConfig(baseConfigSetName, tmpConfigDir.toPath()); } - private void verifyCreate(String baseConfigSetName, String configSetName, - Map oldProps, Map newProps, String username) throws Exception { + private void verifyCreate( + String baseConfigSetName, + String configSetName, + Map oldProps, + Map newProps, + String username) + throws Exception { final String baseUrl = cluster.getJettySolrRunners().get(0).getBaseUrl().toString(); try (final SolrClient solrClient = getHttpSolrClient(baseUrl)) { setupBaseConfigSet(baseConfigSetName, oldProps); - SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); + SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), + AbstractZkTestCase.TIMEOUT, + AbstractZkTestCase.TIMEOUT, + null); try { assertFalse(getConfigSetService().checkConfigExists(configSetName)); - ConfigSetAdminResponse response = createConfigSet(baseConfigSetName, configSetName, newProps, solrClient, username); + ConfigSetAdminResponse response = + createConfigSet(baseConfigSetName, configSetName, newProps, solrClient, username); assertNotNull(response.getResponse()); assertTrue(getConfigSetService().checkConfigExists(configSetName)); @@ -257,7 +296,13 @@ private void verifyCreate(String baseConfigSetName, String configSetName, } } - private ConfigSetAdminResponse createConfigSet(String baseConfigSetName, String configSetName, Map newProps, SolrClient solrClient, String username) throws SolrServerException, IOException { + private ConfigSetAdminResponse createConfigSet( + String baseConfigSetName, + String configSetName, + Map newProps, + SolrClient solrClient, + String username) + throws SolrServerException, IOException { Create create = new Create(); create.setBaseConfigSetName(baseConfigSetName).setConfigSetName(configSetName); if (newProps != null) { @@ -271,9 +316,9 @@ private ConfigSetAdminResponse createConfigSet(String baseConfigSetName, String return create.process(solrClient); } - private NamedList getConfigSetPropertiesFromZk( - SolrZkClient zkClient, String path) throws Exception { - byte [] oldPropsData = null; + private NamedList getConfigSetPropertiesFromZk(SolrZkClient zkClient, String path) + throws Exception { + byte[] oldPropsData = null; try { oldPropsData = zkClient.getData(path, null, null, true); } catch (KeeperException.NoNodeException e) { @@ -281,7 +326,8 @@ private NamedList getConfigSetPropertiesFromZk( } if (oldPropsData != null) { - InputStreamReader reader = new InputStreamReader(new ByteArrayInputStream(oldPropsData), UTF_8); + InputStreamReader reader = + new InputStreamReader(new ByteArrayInputStream(oldPropsData), UTF_8); try { return ConfigSetProperties.readFromInputStream(reader); } finally { @@ -291,9 +337,16 @@ private NamedList getConfigSetPropertiesFromZk( return null; } - private void verifyProperties(String configSetName, Map oldProps, - Map newProps, SolrZkClient zkClient) throws Exception { - NamedList properties = getConfigSetPropertiesFromZk(zkClient,ZkConfigSetService.CONFIGS_ZKNODE + "/" + configSetName + "/" + DEFAULT_FILENAME); + private void verifyProperties( + String configSetName, + Map oldProps, + Map newProps, + SolrZkClient zkClient) + throws Exception { + NamedList properties = + getConfigSetPropertiesFromZk( + zkClient, + ZkConfigSetService.CONFIGS_ZKNODE + "/" + configSetName + "/" + DEFAULT_FILENAME); // let's check without merging the maps, since that's what the MessageHandler does // (since we'd probably repeat any bug in the MessageHandler here) if (oldProps == null && newProps == null) { @@ -327,51 +380,69 @@ private void verifyProperties(String configSetName, Map oldProps assertTrue(oldValue.equals(entry.getValue())); } else { // not in either - assert(false); + assert (false); } } } @Test public void testUploadErrors() throws Exception { - final SolrClient solrClient = getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); + final SolrClient solrClient = + getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); ByteBuffer emptyData = ByteBuffer.allocate(0); ignoreException("The configuration name should be provided"); // Checking error when no configuration name is specified in request - Map map = postDataAndGetResponse(cluster.getSolrClient(), - cluster.getJettySolrRunners().get(0).getBaseUrl().toString() - + "/admin/configs?action=UPLOAD", emptyData, null, false); + Map map = + postDataAndGetResponse( + cluster.getSolrClient(), + cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + + "/admin/configs?action=UPLOAD", + emptyData, + null, + false); assertNotNull(map); unIgnoreException("The configuration name should be provided"); - long statusCode = (long) getObjectByPath(map, false, - Arrays.asList("responseHeader", "status")); + long statusCode = (long) getObjectByPath(map, false, Arrays.asList("responseHeader", "status")); assertEquals(400l, statusCode); - SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null); + SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null); // Create dummy config files in zookeeper zkClient.makePath("/configs/myconf", true); - zkClient.create("/configs/myconf/firstDummyFile", - "first dummy content".getBytes(UTF_8), CreateMode.PERSISTENT, true); - zkClient.create("/configs/myconf/anotherDummyFile", - "second dummy content".getBytes(UTF_8), CreateMode.PERSISTENT, true); + zkClient.create( + "/configs/myconf/firstDummyFile", + "first dummy content".getBytes(UTF_8), + CreateMode.PERSISTENT, + true); + zkClient.create( + "/configs/myconf/anotherDummyFile", + "second dummy content".getBytes(UTF_8), + CreateMode.PERSISTENT, + true); // Checking error when configuration name specified already exists ignoreException("already exists"); - map = postDataAndGetResponse(cluster.getSolrClient(), - cluster.getJettySolrRunners().get(0).getBaseUrl().toString() - + "/admin/configs?action=UPLOAD&name=myconf", emptyData, null, false); + map = + postDataAndGetResponse( + cluster.getSolrClient(), + cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + + "/admin/configs?action=UPLOAD&name=myconf", + emptyData, + null, + false); assertNotNull(map); unIgnoreException("already exists`"); - statusCode = (long) getObjectByPath(map, false, - Arrays.asList("responseHeader", "status")); + statusCode = (long) getObjectByPath(map, false, Arrays.asList("responseHeader", "status")); assertEquals(400l, statusCode); - assertTrue("Expected file doesnt exist in zk. It's possibly overwritten", + assertTrue( + "Expected file doesnt exist in zk. It's possibly overwritten", zkClient.exists("/configs/myconf/firstDummyFile", true)); - assertTrue("Expected file doesnt exist in zk. It's possibly overwritten", + assertTrue( + "Expected file doesnt exist in zk. It's possibly overwritten", zkClient.exists("/configs/myconf/anotherDummyFile", true)); zkClient.close(); @@ -389,16 +460,22 @@ public void testUploadDisabledV2() throws Exception { } public void testUploadDisabled(boolean v2) throws Exception { - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { ignoreException("Configset upload feature is disabled"); - for (boolean enabled: new boolean[] {true, false}) { + for (boolean enabled : new boolean[] {true, false}) { System.setProperty("configset.upload.enabled", String.valueOf(enabled)); try { - long statusCode = uploadConfigSet("regular", "test-enabled-is-" + enabled, null, zkClient, v2); - assertEquals("ConfigSet upload enabling/disabling not working as expected for enabled=" + enabled + ".", - enabled? 0l: 400l, statusCode); + long statusCode = + uploadConfigSet("regular", "test-enabled-is-" + enabled, null, zkClient, v2); + assertEquals( + "ConfigSet upload enabling/disabling not working as expected for enabled=" + + enabled + + ".", + enabled ? 0l : 400l, + statusCode); } finally { System.clearProperty("configset.upload.enabled"); } @@ -409,14 +486,16 @@ public void testUploadDisabled(boolean v2) throws Exception { public void testUploadLegacyManagedSchemaFile() throws Exception { String configSetName = "legacy-managed-schema"; - SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null); + SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null); try { long statusCode = uploadConfigSet(configSetName, "", null, zkClient, true); assertEquals(0l, statusCode); - assertTrue("managed-schema file should have been uploaded", - zkClient.exists("/configs/"+configSetName+"/managed-schema", true)); + assertTrue( + "managed-schema file should have been uploaded", + zkClient.exists("/configs/" + configSetName + "/managed-schema", true)); } finally { zkClient.close(); } @@ -424,23 +503,29 @@ public void testUploadLegacyManagedSchemaFile() throws Exception { // try to create a collection with the uploaded configset createCollection("newcollection", configSetName, 1, 1, cluster.getSolrClient()); - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'a1',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " },\n" + - " }"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " }"; ByteBuffer buff = Charset.forName("UTF-8").encode(payload); - Map map = postDataAndGetResponse(cluster.getSolrClient(), + Map map = + postDataAndGetResponse( + cluster.getSolrClient(), cluster.getJettySolrRunners().get(0).getBaseUrl().toString() - + "/newcollection/schema?wt=js" + - "on", buff, null, false); - Map responseHeader = (Map)map.get("responseHeader"); - Long status = (Long)responseHeader.get("status"); - assertEquals((long)status, 0L); + + "/newcollection/schema?wt=js" + + "on", + buff, + null, + false); + Map responseHeader = (Map) map.get("responseHeader"); + Long status = (Long) responseHeader.get("status"); + assertEquals((long) status, 0L); } @Test @@ -457,20 +542,27 @@ public void testOverwrite(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testOverwrite-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { - int solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { + int solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); ignoreException("The configuration regulartestOverwrite-1 already exists in zookeeper"); - assertEquals("Can't overwrite an existing configset unless the overwrite parameter is set", - 400, uploadConfigSet(configsetName, configsetSuffix, null, false, false, v2)); + assertEquals( + "Can't overwrite an existing configset unless the overwrite parameter is set", + 400, + uploadConfigSet(configsetName, configsetSuffix, null, false, false, v2)); unIgnoreException("The configuration regulartestOverwrite-1 already exists in zookeeper"); - assertEquals("Expecting version to remain equal", - solrconfigZkVersion, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertEquals( + "Expecting version to remain equal", + solrconfigZkVersion, + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, null, true, false, v2)); - assertTrue("Expecting version bump", - solrconfigZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertTrue( + "Expecting version bump", + solrconfigZkVersion + < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); } - } @Test @@ -487,10 +579,12 @@ public void testOverwriteWithCleanup(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testOverwriteWithCleanup-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { String configPath = "/configs/" + configsetName + configsetSuffix; - List extraFiles = Arrays.asList( + List extraFiles = + Arrays.asList( configPath + "/foo1", configPath + "/foo2", configPath + "/foo2/1", @@ -500,11 +594,15 @@ public void testOverwriteWithCleanup(boolean v2) throws Exception { } assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, null, true, false, v2)); for (String f : extraFiles) { - assertTrue("Expecting file " + f + " to exist in ConfigSet but it's gone", zkClient.exists(f, true)); + assertTrue( + "Expecting file " + f + " to exist in ConfigSet but it's gone", + zkClient.exists(f, true)); } assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, null, true, true, v2)); for (String f : extraFiles) { - assertFalse("Expecting file " + f + " to be deleted from ConfigSet but it wasn't", zkClient.exists(f, true)); + assertFalse( + "Expecting file " + f + " to be deleted from ConfigSet but it wasn't", + zkClient.exists(f, true)); } assertConfigsetFiles(configsetName, configsetSuffix, zkClient); } @@ -524,72 +622,100 @@ public void testOverwriteWithTrust(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testOverwriteWithTrust-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); - int solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + int solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); // Was untrusted, overwrite with untrusted assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, null, true, false, v2)); - assertTrue("Expecting version bump", - solrconfigZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertTrue( + "Expecting version bump", + solrconfigZkVersion + < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); - solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); // Was untrusted, overwrite with trusted but no cleanup assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, "solr", true, false, v2)); - assertTrue("Expecting version bump", - solrconfigZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertTrue( + "Expecting version bump", + solrconfigZkVersion + < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); - solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); // Was untrusted, overwrite with trusted with cleanup but fail on unzipping. // Should not set trusted=true in configSet - ignoreException("Either empty zipped data, or non-zipped data was passed. In order to upload a configSet, you must zip a non-empty directory to upload."); + ignoreException( + "Either empty zipped data, or non-zipped data was passed. In order to upload a configSet, you must zip a non-empty directory to upload."); assertEquals(400, uploadBadConfigSet(configsetName, configsetSuffix, "solr", true, true, v2)); - assertEquals("Expecting version bump", - solrconfigZkVersion, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertEquals( + "Expecting version bump", + solrconfigZkVersion, + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); - solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); - ignoreException("Either empty zipped data, or non-zipped data was passed. In order to upload a configSet, you must zip a non-empty directory to upload."); + solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + ignoreException( + "Either empty zipped data, or non-zipped data was passed. In order to upload a configSet, you must zip a non-empty directory to upload."); // Was untrusted, overwrite with trusted with cleanup assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, "solr", true, true, v2)); - assertTrue("Expecting version bump", - solrconfigZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertTrue( + "Expecting version bump", + solrconfigZkVersion + < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); - solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); // Was trusted, try to overwrite with untrusted with no cleanup ignoreException("Trying to make an unstrusted ConfigSet update on a trusted configSet"); - assertEquals("Can't upload a trusted configset with an untrusted request", - 400, uploadConfigSet(configsetName, configsetSuffix, null, true, false, v2)); - assertEquals("Expecting version to remain equal", - solrconfigZkVersion, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertEquals( + "Can't upload a trusted configset with an untrusted request", + 400, + uploadConfigSet(configsetName, configsetSuffix, null, true, false, v2)); + assertEquals( + "Expecting version to remain equal", + solrconfigZkVersion, + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); // Was trusted, try to overwrite with untrusted with cleanup ignoreException("Trying to make an unstrusted ConfigSet update on a trusted configSet"); - assertEquals("Can't upload a trusted configset with an untrusted request", - 400, uploadConfigSet(configsetName, configsetSuffix, null, true, true, v2)); - assertEquals("Expecting version to remain equal", - solrconfigZkVersion, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertEquals( + "Can't upload a trusted configset with an untrusted request", + 400, + uploadConfigSet(configsetName, configsetSuffix, null, true, true, v2)); + assertEquals( + "Expecting version to remain equal", + solrconfigZkVersion, + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); unIgnoreException("Trying to make an unstrusted ConfigSet update on a trusted configSet"); // Was trusted, overwrite with trusted no cleanup assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, "solr", true, false, v2)); - assertTrue("Expecting version bump", - solrconfigZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertTrue( + "Expecting version bump", + solrconfigZkVersion + < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); - solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); // Was trusted, overwrite with trusted with cleanup assertEquals(0, uploadConfigSet(configsetName, configsetSuffix, "solr", true, true, v2)); - assertTrue("Expecting version bump", - solrconfigZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertTrue( + "Expecting version bump", + solrconfigZkVersion + < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); } - } @Test @@ -606,18 +732,44 @@ public void testSingleFileOverwrite(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testSinglePathOverwrite-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { - int solrconfigZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { + int solrconfigZkVersion = + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml"); ignoreException("The configuration regulartestOverwrite-1 already exists in zookeeper"); - assertEquals("Can't overwrite an existing configset unless the overwrite parameter is set", - 400, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "solrconfig.xml", false, false, v2)); + assertEquals( + "Can't overwrite an existing configset unless the overwrite parameter is set", + 400, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "solrconfig.xml", + false, + false, + v2)); unIgnoreException("The configuration regulartestOverwrite-1 already exists in zookeeper"); - assertEquals("Expecting version to remain equal", - solrconfigZkVersion, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "solrconfig.xml", true, false, v2)); - assertTrue("Expecting version bump", - solrconfigZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertEquals( + "Expecting version to remain equal", + solrconfigZkVersion, + getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "solrconfig.xml", + true, + false, + v2)); + assertTrue( + "Expecting version bump", + solrconfigZkVersion + < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "solrconfig.xml")); } } @@ -635,10 +787,25 @@ public void testNewSingleFile(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testSinglePathNew-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "/test/upload/path/solrconfig.xml", false, false, v2)); - assertEquals("Expecting first version of new file", 0, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "/test/upload/path/solrconfig.xml", + false, + false, + v2)); + assertEquals( + "Expecting first version of new file", + 0, + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); } } @@ -657,11 +824,26 @@ public void testSingleWithCleanup(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testSinglePathCleanup-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { ignoreException("ConfigSet uploads do not allow cleanup=true when filePath is used."); - assertEquals(400, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "/test/upload/path/solrconfig.xml", true, true, v2)); - assertFalse("New file should not exist, since the trust check did not succeed.", zkClient.exists("/configs/"+configsetName+configsetSuffix+"/test/upload/path/solrconfig.xml", true)); + assertEquals( + 400, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "/test/upload/path/solrconfig.xml", + true, + true, + v2)); + assertFalse( + "New file should not exist, since the trust check did not succeed.", + zkClient.exists( + "/configs/" + configsetName + configsetSuffix + "/test/upload/path/solrconfig.xml", + true)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); unIgnoreException("ConfigSet uploads do not allow cleanup=true when filePath is used."); } @@ -681,27 +863,71 @@ public void testSingleFileTrusted(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testSinglePathTrusted-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, "solr"); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffix, "solr", "solr/configsets/upload/regular/solrconfig.xml", "/test/upload/path/solrconfig.xml", true, false, v2)); - assertEquals("Expecting first version of new file", 0, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + "solr", + "solr/configsets/upload/regular/solrconfig.xml", + "/test/upload/path/solrconfig.xml", + true, + false, + v2)); + assertEquals( + "Expecting first version of new file", + 0, + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); ignoreException("Trying to make an unstrusted ConfigSet update on a trusted configSet"); - assertEquals("Can't upload a trusted configset with an untrusted request", - 400, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "/test/different/path/solrconfig.xml", true, false, v2)); - assertFalse("New file should not exist, since the trust check did not succeed.", zkClient.exists("/configs/"+configsetName+configsetSuffix+"/test/different/path/solrconfig.xml", true)); + assertEquals( + "Can't upload a trusted configset with an untrusted request", + 400, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "/test/different/path/solrconfig.xml", + true, + false, + v2)); + assertFalse( + "New file should not exist, since the trust check did not succeed.", + zkClient.exists( + "/configs/" + configsetName + configsetSuffix + "/test/different/path/solrconfig.xml", + true)); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); unIgnoreException("Trying to make an unstrusted ConfigSet update on a trusted configSet"); ignoreException("Trying to make an unstrusted ConfigSet update on a trusted configSet"); - int extraFileZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml"); - assertEquals("Can't upload a trusted configset with an untrusted request", - 400, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "/test/upload/path/solrconfig.xml", true, false, v2)); - assertEquals("Expecting version to remain equal", - extraFileZkVersion, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); + int extraFileZkVersion = + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml"); + assertEquals( + "Can't upload a trusted configset with an untrusted request", + 400, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "/test/upload/path/solrconfig.xml", + true, + false, + v2)); + assertEquals( + "Expecting version to remain equal", + extraFileZkVersion, + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); unIgnoreException("Trying to make an unstrusted ConfigSet update on a trusted configSet"); @@ -722,43 +948,119 @@ public void testSingleFileUntrusted(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffix = "testSinglePathUntrusted-1-" + v2; uploadConfigSetWithAssertions(configsetName, configsetSuffix, null); - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { // New file with trusted request - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffix, "solr", "solr/configsets/upload/regular/solrconfig.xml", "/test/upload/path/solrconfig.xml", false, false, v2)); - assertEquals("Expecting first version of new file", 0, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + "solr", + "solr/configsets/upload/regular/solrconfig.xml", + "/test/upload/path/solrconfig.xml", + false, + false, + v2)); + assertEquals( + "Expecting first version of new file", + 0, + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); // New file with untrusted request - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "/test/different/path/solrconfig.xml", false, false, v2)); - assertEquals("Expecting first version of new file", 0, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml")); + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "/test/different/path/solrconfig.xml", + false, + false, + v2)); + assertEquals( + "Expecting first version of new file", + 0, + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); // Overwrite with trusted request - int extraFileZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml"); - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffix, "solr", "solr/configsets/upload/regular/solrconfig.xml", "/test/different/path/solrconfig.xml", true, false, v2)); - assertTrue("Expecting version bump", - extraFileZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml")); + int extraFileZkVersion = + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml"); + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + "solr", + "solr/configsets/upload/regular/solrconfig.xml", + "/test/different/path/solrconfig.xml", + true, + false, + v2)); + assertTrue( + "Expecting version bump", + extraFileZkVersion + < getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); // Overwrite with untrusted request - extraFileZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml"); - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffix, null, "solr/configsets/upload/regular/solrconfig.xml", "/test/upload/path/solrconfig.xml", true, false, v2)); - assertTrue("Expecting version bump", - extraFileZkVersion < getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); + extraFileZkVersion = + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml"); + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "/test/upload/path/solrconfig.xml", + true, + false, + v2)); + assertTrue( + "Expecting version bump", + extraFileZkVersion + < getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/upload/path/solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); // Make sure that cleanup flag does not result in configSet being trusted. ignoreException("ConfigSet uploads do not allow cleanup=true when filePath is used."); - extraFileZkVersion = getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml"); - assertEquals(400, uploadSingleConfigSetFile(configsetName, configsetSuffix, "solr", "solr/configsets/upload/regular/solrconfig.xml", "/test/different/path/solrconfig.xml", true, true, v2)); - assertEquals("Expecting version to stay the same", - extraFileZkVersion, getConfigZNodeVersion(zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml")); - assertFalse("The cleanup=true flag allowed for trust overwriting in a filePath upload.", isTrusted(zkClient, configsetName, configsetSuffix)); + extraFileZkVersion = + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml"); + assertEquals( + 400, + uploadSingleConfigSetFile( + configsetName, + configsetSuffix, + "solr", + "solr/configsets/upload/regular/solrconfig.xml", + "/test/different/path/solrconfig.xml", + true, + true, + v2)); + assertEquals( + "Expecting version to stay the same", + extraFileZkVersion, + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffix, "test/different/path/solrconfig.xml")); + assertFalse( + "The cleanup=true flag allowed for trust overwriting in a filePath upload.", + isTrusted(zkClient, configsetName, configsetSuffix)); assertConfigsetFiles(configsetName, configsetSuffix, zkClient); unIgnoreException("ConfigSet uploads do not allow cleanup=true when filePath is used."); } @@ -778,38 +1080,83 @@ public void testSingleFileNewConfig(boolean v2) throws Exception { String configsetName = "regular"; String configsetSuffixTrusted = "testSinglePathNewConfig-1-" + v2; String configsetSuffixUntrusted = "testSinglePathNewConfig-2-" + v2; - try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null)) { + try (SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null)) { // New file with trusted request - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffixTrusted, "solr", "solr/configsets/upload/regular/solrconfig.xml", "solrconfig.xml", false, false, v2)); - assertEquals("Expecting first version of new file", 0, getConfigZNodeVersion(zkClient, configsetName, configsetSuffixTrusted, "solrconfig.xml")); + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffixTrusted, + "solr", + "solr/configsets/upload/regular/solrconfig.xml", + "solrconfig.xml", + false, + false, + v2)); + assertEquals( + "Expecting first version of new file", + 0, + getConfigZNodeVersion(zkClient, configsetName, configsetSuffixTrusted, "solrconfig.xml")); assertTrue(isTrusted(zkClient, configsetName, configsetSuffixTrusted)); - List children = zkClient.getChildren(String.format(Locale.ROOT,"/configs/%s%s", configsetName, configsetSuffixTrusted), null, true); + List children = + zkClient.getChildren( + String.format(Locale.ROOT, "/configs/%s%s", configsetName, configsetSuffixTrusted), + null, + true); assertEquals("The configSet should only have one file uploaded.", 1, children.size()); assertEquals("Incorrect file uploaded.", "solrconfig.xml", children.get(0)); // New file with trusted request - assertEquals(0, uploadSingleConfigSetFile(configsetName, configsetSuffixUntrusted, null, "solr/configsets/upload/regular/solrconfig.xml", "solrconfig.xml", false, false, v2)); - assertEquals("Expecting first version of new file", 0, getConfigZNodeVersion(zkClient, configsetName, configsetSuffixUntrusted, "solrconfig.xml")); + assertEquals( + 0, + uploadSingleConfigSetFile( + configsetName, + configsetSuffixUntrusted, + null, + "solr/configsets/upload/regular/solrconfig.xml", + "solrconfig.xml", + false, + false, + v2)); + assertEquals( + "Expecting first version of new file", + 0, + getConfigZNodeVersion( + zkClient, configsetName, configsetSuffixUntrusted, "solrconfig.xml")); assertFalse(isTrusted(zkClient, configsetName, configsetSuffixUntrusted)); - children = zkClient.getChildren(String.format(Locale.ROOT,"/configs/%s%s", configsetName, configsetSuffixUntrusted), null, true); + children = + zkClient.getChildren( + String.format(Locale.ROOT, "/configs/%s%s", configsetName, configsetSuffixUntrusted), + null, + true); assertEquals("The configSet should only have one file uploaded.", 1, children.size()); assertEquals("Incorrect file uploaded.", "solrconfig.xml", children.get(0)); } } - private boolean isTrusted(SolrZkClient zkClient, String configsetName, String configsetSuffix) throws KeeperException, InterruptedException { - String configSetZkPath = String.format(Locale.ROOT,"/configs/%s%s", configsetName, configsetSuffix); - byte[] configSetNodeContent = zkClient.getData(configSetZkPath, null, null, true);; + private boolean isTrusted(SolrZkClient zkClient, String configsetName, String configsetSuffix) + throws KeeperException, InterruptedException { + String configSetZkPath = + String.format(Locale.ROOT, "/configs/%s%s", configsetName, configsetSuffix); + byte[] configSetNodeContent = zkClient.getData(configSetZkPath, null, null, true); + ; @SuppressWarnings("unchecked") Map contentMap = (Map) Utils.fromJSON(configSetNodeContent); return (boolean) contentMap.getOrDefault("trusted", true); } - private int getConfigZNodeVersion(SolrZkClient zkClient, String configsetName, String configsetSuffix, String configFile) throws KeeperException, InterruptedException { + private int getConfigZNodeVersion( + SolrZkClient zkClient, String configsetName, String configsetSuffix, String configFile) + throws KeeperException, InterruptedException { Stat stat = new Stat(); - zkClient.getData(String.format(Locale.ROOT, "/configs/%s%s/%s", configsetName, configsetSuffix, configFile), null, stat, true); + zkClient.getData( + String.format(Locale.ROOT, "/configs/%s%s/%s", configsetName, configsetSuffix, configFile), + null, + stat, + true); return stat.getVersion(); } @@ -831,10 +1178,17 @@ public void testUploadWithScriptUpdateProcessor() throws Exception { uploadConfigSetWithAssertions("with-script-processor", untrustedSuffix, null); // try to create a collection with the uploaded configset ignoreException("uploaded without any authentication in place"); - Throwable thrown = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - createCollection("newcollection2", "with-script-processor" + untrustedSuffix, - 1, 1, cluster.getSolrClient()); - }); + Throwable thrown = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + createCollection( + "newcollection2", + "with-script-processor" + untrustedSuffix, + 1, + 1, + cluster.getSolrClient()); + }); unIgnoreException("uploaded without any authentication in place"); assertThat(thrown.getMessage(), containsString("Underlying core creation failed")); @@ -843,10 +1197,14 @@ public void testUploadWithScriptUpdateProcessor() throws Exception { final String trustedSuffix = "-trusted"; uploadConfigSetWithAssertions("with-script-processor", trustedSuffix, "solr"); // try to create a collection with the uploaded configset - CollectionAdminResponse resp = createCollection("newcollection2", "with-script-processor" + trustedSuffix, - 1, 1, cluster.getSolrClient()); + CollectionAdminResponse resp = + createCollection( + "newcollection2", + "with-script-processor" + trustedSuffix, + 1, + 1, + cluster.getSolrClient()); scriptRequest("newcollection2"); - } @Test @@ -855,10 +1213,17 @@ public void testUploadWithLibDirective() throws Exception { uploadConfigSetWithAssertions("with-lib-directive", untrustedSuffix, null); // try to create a collection with the uploaded configset ignoreException("without any authentication in place"); - Throwable thrown = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - createCollection("newcollection3", "with-lib-directive" + untrustedSuffix, - 1, 1, cluster.getSolrClient()); - }); + Throwable thrown = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + createCollection( + "newcollection3", + "with-lib-directive" + untrustedSuffix, + 1, + 1, + cluster.getSolrClient()); + }); unIgnoreException("without any authentication in place"); assertThat(thrown.getMessage(), containsString("Underlying core creation failed")); @@ -867,29 +1232,43 @@ public void testUploadWithLibDirective() throws Exception { final String trustedSuffix = "-trusted"; uploadConfigSetWithAssertions("with-lib-directive", trustedSuffix, "solr"); // try to create a collection with the uploaded configset - CollectionAdminResponse resp = createCollection("newcollection3", "with-lib-directive" + trustedSuffix, - 1, 1, cluster.getSolrClient()); + CollectionAdminResponse resp = + createCollection( + "newcollection3", "with-lib-directive" + trustedSuffix, 1, 1, cluster.getSolrClient()); SolrInputDocument doc = sdoc("id", "4055", "subject", "Solr"); cluster.getSolrClient().add("newcollection3", doc); cluster.getSolrClient().commit("newcollection3"); - assertEquals("4055", cluster.getSolrClient().query("newcollection3", - params("q", "*:*")).getResults().get(0).get("id")); + assertEquals( + "4055", + cluster + .getSolrClient() + .query("newcollection3", params("q", "*:*")) + .getResults() + .get(0) + .get("id")); } private static String getSecurityJson() throws KeeperException, InterruptedException { - String securityJson = "{\n" + - " 'authentication':{\n" + - " 'blockUnknown': false,\n" + - " 'class':'" + MockAuthenticationPlugin.class.getName() + "'},\n" + - " 'authorization':{\n" + - " 'class':'" + MockAuthorizationPlugin.class.getName() + "'}}"; + String securityJson = + "{\n" + + " 'authentication':{\n" + + " 'blockUnknown': false,\n" + + " 'class':'" + + MockAuthenticationPlugin.class.getName() + + "'},\n" + + " 'authorization':{\n" + + " 'class':'" + + MockAuthorizationPlugin.class.getName() + + "'}}"; return securityJson; } - private void uploadConfigSetWithAssertions(String configSetName, String suffix, String username) throws Exception { - SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, 45000, null); + private void uploadConfigSetWithAssertions(String configSetName, String suffix, String username) + throws Exception { + SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, 45000, null); try { long statusCode = uploadConfigSet(configSetName, suffix, username, zkClient, true); assertEquals(0l, statusCode); @@ -898,71 +1277,124 @@ private void uploadConfigSetWithAssertions(String configSetName, String suffix, zkClient.close(); } } - private void assertConfigsetFiles(String configSetName, String suffix, SolrZkClient zkClient) throws KeeperException, InterruptedException, IOException { - assertTrue("managed-schema.xml file should have been uploaded", - zkClient.exists("/configs/"+configSetName+suffix+"/managed-schema.xml", true)); - assertTrue("managed-schema.xml file contents on zookeeper are not exactly same as that of the file uploaded in config", - Arrays.equals(zkClient.getData("/configs/"+configSetName+suffix+"/managed-schema.xml", null, null, true), - readFile("solr/configsets/upload/"+configSetName+"/managed-schema.xml"))); - - assertTrue("solrconfig.xml file should have been uploaded", - zkClient.exists("/configs/"+configSetName+suffix+"/solrconfig.xml", true)); - byte data[] = zkClient.getData("/configs/"+configSetName+suffix, null, null, true); - //assertEquals("{\"trusted\": false}", new String(data, StandardCharsets.UTF_8)); - assertTrue("solrconfig.xml file contents on zookeeper are not exactly same as that of the file uploaded in config", - Arrays.equals(zkClient.getData("/configs/"+configSetName+suffix+"/solrconfig.xml", null, null, true), - readFile("solr/configsets/upload/"+configSetName+"/solrconfig.xml"))); - } - private long uploadConfigSet(String configSetName, String suffix, String username, - SolrZkClient zkClient, boolean v2) throws IOException { + private void assertConfigsetFiles(String configSetName, String suffix, SolrZkClient zkClient) + throws KeeperException, InterruptedException, IOException { + assertTrue( + "managed-schema.xml file should have been uploaded", + zkClient.exists("/configs/" + configSetName + suffix + "/managed-schema.xml", true)); + assertTrue( + "managed-schema.xml file contents on zookeeper are not exactly same as that of the file uploaded in config", + Arrays.equals( + zkClient.getData( + "/configs/" + configSetName + suffix + "/managed-schema.xml", null, null, true), + readFile("solr/configsets/upload/" + configSetName + "/managed-schema.xml"))); + + assertTrue( + "solrconfig.xml file should have been uploaded", + zkClient.exists("/configs/" + configSetName + suffix + "/solrconfig.xml", true)); + byte data[] = zkClient.getData("/configs/" + configSetName + suffix, null, null, true); + // assertEquals("{\"trusted\": false}", new String(data, StandardCharsets.UTF_8)); + assertTrue( + "solrconfig.xml file contents on zookeeper are not exactly same as that of the file uploaded in config", + Arrays.equals( + zkClient.getData( + "/configs/" + configSetName + suffix + "/solrconfig.xml", null, null, true), + readFile("solr/configsets/upload/" + configSetName + "/solrconfig.xml"))); + } + + private long uploadConfigSet( + String configSetName, String suffix, String username, SolrZkClient zkClient, boolean v2) + throws IOException { assertFalse(getConfigSetService().checkConfigExists(configSetName + suffix)); return uploadConfigSet(configSetName, suffix, username, false, false, v2); } - private long uploadConfigSet(String configSetName, String suffix, String username, - boolean overwrite, boolean cleanup, boolean v2) throws IOException { - + private long uploadConfigSet( + String configSetName, + String suffix, + String username, + boolean overwrite, + boolean cleanup, + boolean v2) + throws IOException { + // Read zipped sample config - return uploadGivenConfigSet(createTempZipFile("solr/configsets/upload/"+configSetName), - configSetName, suffix, username, overwrite, cleanup, v2); - } + return uploadGivenConfigSet( + createTempZipFile("solr/configsets/upload/" + configSetName), + configSetName, + suffix, + username, + overwrite, + cleanup, + v2); + } + + private long uploadBadConfigSet( + String configSetName, + String suffix, + String username, + boolean overwrite, + boolean cleanup, + boolean v2) + throws IOException { - private long uploadBadConfigSet(String configSetName, String suffix, String username, - boolean overwrite, boolean cleanup, boolean v2) throws IOException { - // Read single file from sample configs. This should fail the unzipping - return uploadGivenConfigSet(SolrTestCaseJ4.getFile("solr/configsets/upload/regular/solrconfig.xml"), - configSetName, suffix, username, overwrite, cleanup, v2); - } + return uploadGivenConfigSet( + SolrTestCaseJ4.getFile("solr/configsets/upload/regular/solrconfig.xml"), + configSetName, + suffix, + username, + overwrite, + cleanup, + v2); + } + + private long uploadGivenConfigSet( + File file, + String configSetName, + String suffix, + String username, + boolean overwrite, + boolean cleanup, + boolean v2) + throws IOException { - private long uploadGivenConfigSet(File file, String configSetName, String suffix, String username, - boolean overwrite, boolean cleanup, boolean v2) throws IOException { - if (v2) { // TODO: switch to using V2Request - - final ByteBuffer fileBytes = TestSolrConfigHandler.getFileContent(file.getAbsolutePath(), false); - final String uriEnding = "/api/cluster/configs/" + configSetName+suffix + (!overwrite? "?overwrite=false" : "") + (cleanup? "?cleanup=true" : ""); + + final ByteBuffer fileBytes = + TestSolrConfigHandler.getFileContent(file.getAbsolutePath(), false); + final String uriEnding = + "/api/cluster/configs/" + + configSetName + + suffix + + (!overwrite ? "?overwrite=false" : "") + + (cleanup ? "?cleanup=true" : ""); final boolean usePut = true; - Map map = postDataAndGetResponse(cluster.getSolrClient(), - cluster.getJettySolrRunners().get(0).getBaseUrl().toString().replace("/solr", "") + uriEnding, - fileBytes, username, usePut); + Map map = + postDataAndGetResponse( + cluster.getSolrClient(), + cluster.getJettySolrRunners().get(0).getBaseUrl().toString().replace("/solr", "") + + uriEnding, + fileBytes, + username, + usePut); assertNotNull(map); - long statusCode = (long) getObjectByPath(map, false, Arrays.asList("responseHeader", "status")); + long statusCode = + (long) getObjectByPath(map, false, Arrays.asList("responseHeader", "status")); return statusCode; - } // else "not" a V2 request... - + try { return (new Upload()) - .setConfigSetName(configSetName + suffix) - .setUploadFile(file, "application/zip") - .setOverwrite(overwrite ? true : null) // expect server default to be 'false' - .setCleanup(cleanup ? true : null) // expect server default to be 'false' - .setBasicAuthCredentials(username, username) // for our MockAuthenticationPlugin - .process(cluster.getSolrClient()) - .getStatus(); + .setConfigSetName(configSetName + suffix) + .setUploadFile(file, "application/zip") + .setOverwrite(overwrite ? true : null) // expect server default to be 'false' + .setCleanup(cleanup ? true : null) // expect server default to be 'false' + .setBasicAuthCredentials(username, username) // for our MockAuthenticationPlugin + .process(cluster.getSolrClient()) + .getStatus(); } catch (SolrServerException e1) { throw new AssertionError("Server error uploading configset: " + e1.toString(), e1); } catch (SolrException e2) { @@ -970,37 +1402,59 @@ private long uploadGivenConfigSet(File file, String configSetName, String suffix } } - private long uploadSingleConfigSetFile(String configSetName, String suffix, String username, - String localFilePath, String uploadPath, boolean overwrite, boolean cleanup, boolean v2) throws IOException { + private long uploadSingleConfigSetFile( + String configSetName, + String suffix, + String username, + String localFilePath, + String uploadPath, + boolean overwrite, + boolean cleanup, + boolean v2) + throws IOException { // Read single file from sample configs final File file = SolrTestCaseJ4.getFile(localFilePath); if (v2) { // TODO: switch to use V2Request - - final ByteBuffer sampleConfigFile = TestSolrConfigHandler.getFileContent(file.getAbsolutePath(), false); - final String uriEnding = "/api/cluster/configs/" + configSetName+suffix + "/" + uploadPath + (!overwrite? "?overwrite=false" : "") + (cleanup? "?cleanup=true" : ""); + + final ByteBuffer sampleConfigFile = + TestSolrConfigHandler.getFileContent(file.getAbsolutePath(), false); + final String uriEnding = + "/api/cluster/configs/" + + configSetName + + suffix + + "/" + + uploadPath + + (!overwrite ? "?overwrite=false" : "") + + (cleanup ? "?cleanup=true" : ""); final boolean usePut = true; - Map map = postDataAndGetResponse(cluster.getSolrClient(), - cluster.getJettySolrRunners().get(0).getBaseUrl().toString().replace("/solr", "") + uriEnding, - sampleConfigFile, username, usePut); + Map map = + postDataAndGetResponse( + cluster.getSolrClient(), + cluster.getJettySolrRunners().get(0).getBaseUrl().toString().replace("/solr", "") + + uriEnding, + sampleConfigFile, + username, + usePut); assertNotNull(map); - long statusCode = (long) getObjectByPath(map, false, Arrays.asList("responseHeader", "status")); + long statusCode = + (long) getObjectByPath(map, false, Arrays.asList("responseHeader", "status")); return statusCode; - } // else "not" a V2 request... - + try { return (new Upload()) - .setConfigSetName(configSetName + suffix) - .setFilePath(uploadPath) - .setUploadFile(file, "application/octet-stream") // NOTE: server doesn't actually care, and test plumbing doesn't tell us - .setOverwrite(overwrite ? true : null) // expect server default to be 'false' - .setCleanup(cleanup ? true : null) // expect server default to be 'false' - .setBasicAuthCredentials(username, username) // for our MockAuthenticationPlugin - .process(cluster.getSolrClient()) - .getStatus(); + .setConfigSetName(configSetName + suffix) + .setFilePath(uploadPath) + // NOTE: server doesn't actually care, and test plumbing doesn't tell us + .setUploadFile(file, "application/octet-stream") + .setOverwrite(overwrite ? true : null) // expect server default to be 'false' + .setCleanup(cleanup ? true : null) // expect server default to be 'false' + .setBasicAuthCredentials(username, username) // for our MockAuthenticationPlugin + .process(cluster.getSolrClient()) + .getStatus(); } catch (SolrServerException e1) { throw new AssertionError("Server error uploading file to configset: " + e1.toString(), e1); } catch (SolrException e2) { @@ -1008,19 +1462,18 @@ private long uploadSingleConfigSetFile(String configSetName, String suffix, Stri } } - /** - * Create a zip file (in the temp directory) containing all the files within the specified directory - * and return the zip file. + * Create a zip file (in the temp directory) containing all the files within the specified + * directory and return the zip file. */ private File createTempZipFile(String directoryPath) { try { - final File zipFile = createTempFile("configset","zip").toFile(); + final File zipFile = createTempFile("configset", "zip").toFile(); final File directory = SolrTestCaseJ4.getFile(directoryPath); if (log.isInfoEnabled()) { log.info("Directory: {}", directory.getAbsolutePath()); } - zip (directory, zipFile); + zip(directory, zipFile); if (log.isInfoEnabled()) { log.info("Zipfile: {}", zipFile.getAbsolutePath()); } @@ -1077,11 +1530,18 @@ public void scriptRequest(String collection) throws SolrServerException, IOExcep client.add(collection, doc); client.commit(collection); - assertEquals("42", client.query(collection, params("q", "*:*")).getResults().get(0).get("script_added_i")); + assertEquals( + "42", + client.query(collection, params("q", "*:*")).getResults().get(0).get("script_added_i")); } - protected CollectionAdminResponse createCollection(String collectionName, String confSetName, int numShards, - int replicationFactor, SolrClient client) throws SolrServerException, IOException { + protected CollectionAdminResponse createCollection( + String collectionName, + String confSetName, + int numShards, + int replicationFactor, + SolrClient client) + throws SolrServerException, IOException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionAction.CREATE.toString()); params.set("collection.configName", confSetName); @@ -1096,8 +1556,9 @@ protected CollectionAdminResponse createCollection(String collectionName, String return res; } - public static Map postDataAndGetResponse(CloudSolrClient cloudClient, - String uri, ByteBuffer bytarr, String username, boolean usePut) throws IOException { + public static Map postDataAndGetResponse( + CloudSolrClient cloudClient, String uri, ByteBuffer bytarr, String username, boolean usePut) + throws IOException { HttpEntityEnclosingRequestBase httpRequest = null; HttpEntity entity; String response = null; @@ -1115,11 +1576,10 @@ protected CollectionAdminResponse createCollection(String collectionName, String } httpRequest.setHeader("Content-Type", "application/octet-stream"); - httpRequest.setEntity(new ByteArrayEntity(bytarr.array(), bytarr - .arrayOffset(), bytarr.limit())); + httpRequest.setEntity( + new ByteArrayEntity(bytarr.array(), bytarr.arrayOffset(), bytarr.limit())); log.info("Uploading configset with user {}", username); - entity = cloudClient.getLbClient().getHttpClient().execute(httpRequest) - .getEntity(); + entity = cloudClient.getLbClient().getHttpClient().execute(httpRequest).getEntity(); try { response = EntityUtils.toString(entity, UTF_8); m = (Map) Utils.fromJSONString(response); @@ -1133,8 +1593,8 @@ protected CollectionAdminResponse createCollection(String collectionName, String return m; } - private static Object getObjectByPath(Map root, - boolean onlyPrimitive, java.util.List hierarchy) { + private static Object getObjectByPath( + Map root, boolean onlyPrimitive, java.util.List hierarchy) { Map obj = root; for (int i = 0; i < hierarchy.size(); i++) { String s = hierarchy.get(i); @@ -1172,8 +1632,10 @@ public void testDeleteErrors() throws Exception { tmpConfigDir.deleteOnExit(); // Ensure ConfigSet is immutable FileUtils.copyDirectory(configDir, tmpConfigDir); - FileUtils.write(new File(tmpConfigDir, "configsetprops.json"), - getConfigSetProps(ImmutableMap.of("immutable", "true")), UTF_8); + FileUtils.write( + new File(tmpConfigDir, "configsetprops.json"), + getConfigSetProps(ImmutableMap.of("immutable", "true")), + UTF_8); getConfigSetService().uploadConfig("configSet", tmpConfigDir.toPath()); // no ConfigSet name @@ -1187,13 +1649,14 @@ public void testDeleteErrors() throws Exception { solrClient.close(); } - private void verifyException(SolrClient solrClient, - ConfigSetAdminRequest request, - String errorContains) throws Exception { + private void verifyException( + SolrClient solrClient, ConfigSetAdminRequest request, String errorContains) + throws Exception { ignoreException(errorContains); Exception e = expectThrows(Exception.class, () -> solrClient.request(request)); - assertTrue("Expected exception message to contain: " + errorContains - + " got: " + e.getMessage(), e.getMessage().contains(errorContains)); + assertTrue( + "Expected exception message to contain: " + errorContains + " got: " + e.getMessage(), + e.getMessage().contains(errorContains)); unIgnoreException(errorContains); } @@ -1208,9 +1671,14 @@ public void testDelete() throws Exception { solrClient.close(); } - private void assertDelete(SolrClient solrClient, String configSet, boolean assertExists) throws IOException, SolrServerException { - SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); + private void assertDelete(SolrClient solrClient, String configSet, boolean assertExists) + throws IOException, SolrServerException { + SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), + AbstractZkTestCase.TIMEOUT, + AbstractZkTestCase.TIMEOUT, + null); try { assertEquals(assertExists, getConfigSetService().checkConfigExists(configSet)); @@ -1229,8 +1697,12 @@ public void testList() throws Exception { final String baseUrl = cluster.getJettySolrRunners().get(0).getBaseUrl().toString(); final SolrClient solrClient = getHttpSolrClient(baseUrl); - SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); + SolrZkClient zkClient = + new SolrZkClient( + cluster.getZkServer().getZkAddress(), + AbstractZkTestCase.TIMEOUT, + AbstractZkTestCase.TIMEOUT, + null); try { // test empty ConfigSetAdminRequest.List list = new ConfigSetAdminRequest.List(); @@ -1257,16 +1729,15 @@ public void testList() throws Exception { } /** - * A simple sanity check that the test-framework hueristic logic for setting - * {@link ExternalPaths#DEFAULT_CONFIGSET} is working as it should - * in the current test env, and finding the real directory which matches what {@link ZkController} - * finds and uses to bootstrap ZK in cloud based tests. + * A simple sanity check that the test-framework hueristic logic for setting {@link + * ExternalPaths#DEFAULT_CONFIGSET} is working as it should in the current test env, and finding + * the real directory which matches what {@link ZkController} finds and uses to bootstrap ZK in + * cloud based tests. * - *

- * This assumes the {@link SolrDispatchFilter#SOLR_DEFAULT_CONFDIR_ATTRIBUTE} system property + *

This assumes the {@link SolrDispatchFilter#SOLR_DEFAULT_CONFDIR_ATTRIBUTE} system property * has not been externally set in the environment where this test is being run -- which should - * never be the case, since it would prevent the test-framework from using - * {@link ExternalPaths#DEFAULT_CONFIGSET} + * never be the case, since it would prevent the test-framework from using {@link + * ExternalPaths#DEFAULT_CONFIGSET} * * @see SolrDispatchFilter#SOLR_DEFAULT_CONFDIR_ATTRIBUTE * @see #beforeSolrTestCase @@ -1275,12 +1746,13 @@ public void testList() throws Exception { @Test public void testUserAndTestDefaultConfigsetsAreSame() throws IOException { final Path extPath = Path.of(ExternalPaths.DEFAULT_CONFIGSET); - assertTrue("_default dir doesn't exist: " + ExternalPaths.DEFAULT_CONFIGSET, Files.exists(extPath)); - assertTrue("_default dir isn't a dir: " + ExternalPaths.DEFAULT_CONFIGSET, Files.isDirectory(extPath)); + assertTrue( + "_default dir doesn't exist: " + ExternalPaths.DEFAULT_CONFIGSET, Files.exists(extPath)); + assertTrue( + "_default dir isn't a dir: " + ExternalPaths.DEFAULT_CONFIGSET, Files.isDirectory(extPath)); final Path zkBootStrap = ConfigSetService.getDefaultConfigDirPath(); - assertEquals("extPath _default configset dir vs zk bootstrap path", - extPath, zkBootStrap); + assertEquals("extPath _default configset dir vs zk bootstrap path", extPath, zkBootStrap); } private StringBuilder getConfigSetProps(Map map) { @@ -1289,7 +1761,7 @@ private StringBuilder getConfigSetProps(Map map) { public static class CreateNoErrorChecking extends ConfigSetAdminRequest.Create { public ConfigSetAdminRequest setAction(ConfigSetAction action) { - return super.setAction(action); + return super.setAction(action); } @Override @@ -1304,7 +1776,7 @@ public SolrParams getParams() { public static class DeleteNoErrorChecking extends ConfigSetAdminRequest.Delete { public ConfigSetAdminRequest setAction(ConfigSetAction action) { - return super.setAction(action); + return super.setAction(action); } @Override @@ -1321,19 +1793,30 @@ public static class MockAuthenticationPlugin extends BasicAuthPlugin { @Override public AuthenticationProvider getAuthenticationProvider(Map pluginConfig) { return new AuthenticationProvider() { - @Override public void init(Map ignored) { } - @Override public ValidatingJsonMap getSpec() { return Utils.getSpec("cluster.security.BasicAuth.Commands").getSpec(); } - @Override public boolean authenticate(String user, String pwd) { + @Override + public void init(Map ignored) {} + + @Override + public ValidatingJsonMap getSpec() { + return Utils.getSpec("cluster.security.BasicAuth.Commands").getSpec(); + } + + @Override + public boolean authenticate(String user, String pwd) { return user.equals(pwd); } - @Override public Map getPromptHeaders() { + + @Override + public Map getPromptHeaders() { return Collections.emptyMap(); } }; } @Override - public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws Exception { + public boolean doAuthenticate( + HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) + throws Exception { if (request.getHeader("user") != null) { final Principal p = new BasicUserPrincipal("solr"); filterChain.doFilter(wrap(request, p, "solr"), response); @@ -1365,13 +1848,9 @@ public AuthorizationResponse authorize(AuthorizationContext context) { } @Override - public void init(Map initInfo) { - - } + public void init(Map initInfo) {} @Override - public void close() throws IOException { - - } + public void close() throws IOException {} } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java index 9887a83ab9b..26cf55b47a4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java @@ -21,7 +21,6 @@ import java.util.Arrays; import java.util.LinkedList; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.request.ConfigSetAdminRequest; @@ -34,10 +33,9 @@ import org.slf4j.LoggerFactory; /** - * Tests the exclusivity of the ConfigSets API. - * Submits a number of API requests concurrently and checks that - * the responses indicate the requests are handled sequentially for - * the same ConfigSet and base ConfigSet. + * Tests the exclusivity of the ConfigSets API. Submits a number of API requests concurrently and + * checks that the responses indicate the requests are handled sequentially for the same ConfigSet + * and base ConfigSet. */ public class TestConfigSetsAPIExclusivity extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -74,8 +72,8 @@ public void testAPIExclusivity() throws Exception { new CreateThread(solrCluster, CONFIGSET_NAME, BASE_CONFIGSET_NAME, trials); DeleteThread deleteBaseThread = new DeleteThread(solrCluster, BASE_CONFIGSET_NAME, trials); DeleteThread deleteThread = new DeleteThread(solrCluster, CONFIGSET_NAME, trials); - List threads = Arrays.asList( - createBaseThread, createThread, deleteBaseThread, deleteThread); + List threads = + Arrays.asList(createBaseThread, createThread, deleteBaseThread, deleteThread); for (ConfigSetsAPIThread thread : threads) { thread.start(); @@ -87,28 +85,36 @@ public void testAPIExclusivity() throws Exception { for (ConfigSetsAPIThread thread : threads) { exceptions.addAll(thread.getUnexpectedExceptions()); } - assertEquals("Unexpected exception: " + getFirstExceptionOrNull(exceptions), - 0, exceptions.size()); + assertEquals( + "Unexpected exception: " + getFirstExceptionOrNull(exceptions), 0, exceptions.size()); } private void setupBaseConfigSet(String baseConfigSetName) throws Exception { solrCluster.uploadConfigSet(configset("configset-2"), baseConfigSetName); - //Make configset untrusted - solrCluster.getZkClient().setData("/configs/" + baseConfigSetName, "{\"trusted\": false}".getBytes(StandardCharsets.UTF_8), true); + // Make configset untrusted + solrCluster + .getZkClient() + .setData( + "/configs/" + baseConfigSetName, + "{\"trusted\": false}".getBytes(StandardCharsets.UTF_8), + true); } private Exception getFirstExceptionOrNull(List list) { return list.size() == 0 ? null : list.get(0); } - private static abstract class ConfigSetsAPIThread extends Thread { + private abstract static class ConfigSetsAPIThread extends Thread { private MiniSolrCloudCluster solrCluster; private int trials; private List unexpectedExceptions = new LinkedList(); - private List allowedExceptions = Arrays.asList(new String[] { - "ConfigSet already exists", - "ConfigSet does not exist to delete", - "Base ConfigSet does not exist"}); + private List allowedExceptions = + Arrays.asList( + new String[] { + "ConfigSet already exists", + "ConfigSet does not exist to delete", + "Base ConfigSet does not exist" + }); public ConfigSetsAPIThread(MiniSolrCloudCluster solrCluster, int trials) { this.solrCluster = solrCluster; @@ -154,8 +160,8 @@ private static class CreateThread extends ConfigSetsAPIThread { private String configSet; private String baseConfigSet; - public CreateThread(MiniSolrCloudCluster solrCluster, String configSet, - String baseConfigSet, int trials) { + public CreateThread( + MiniSolrCloudCluster solrCluster, String configSet, String baseConfigSet, int trials) { super(solrCluster, trials); this.configSet = configSet; this.baseConfigSet = baseConfigSet; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIShareSchema.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIShareSchema.java index 81284b767c5..cc78567dd28 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIShareSchema.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIShareSchema.java @@ -30,13 +30,16 @@ public class TestConfigSetsAPIShareSchema extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - System.setProperty("shareSchema", "true"); // see testSharedSchema + System.setProperty("shareSchema", "true"); // see testSharedSchema configureCluster(1) // some tests here assume 1 node - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .addConfig("cShare", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "cShare", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } + @After public void doAfter() throws Exception { cluster.deleteAllCollections(); @@ -55,9 +58,11 @@ public void testConfigSetDeleteWhenInUse() throws Exception { // TODO - check exception response! ConfigSetAdminRequest.Delete deleteConfigRequest = new ConfigSetAdminRequest.Delete(); deleteConfigRequest.setConfigSetName("conf1"); - expectThrows(SolrException.class, () -> { - deleteConfigRequest.process(cluster.getSolrClient()); - }); + expectThrows( + SolrException.class, + () -> { + deleteConfigRequest.process(cluster.getSolrClient()); + }); } @Test @@ -72,22 +77,21 @@ public void testSharedSchema() throws Exception { CoreContainer coreContainer = cluster.getJettySolrRunner(0).getCoreContainer(); try (SolrCore coreCol1 = coreContainer.getCore("col1_shard1_replica_n1"); - SolrCore coreCol2 = coreContainer.getCore("col2_shard1_replica_n1"); - SolrCore coreCol3 = coreContainer.getCore("col3_shard1_replica_n1")) { + SolrCore coreCol2 = coreContainer.getCore("col2_shard1_replica_n1"); + SolrCore coreCol3 = coreContainer.getCore("col3_shard1_replica_n1")) { assertSame(coreCol1.getLatestSchema(), coreCol2.getLatestSchema()); assertNotSame(coreCol1.getLatestSchema(), coreCol3.getLatestSchema()); } // change col1's configSet - CollectionAdminRequest.modifyCollection("col1", - map("collection.configName", (Object) "conf1") // from cShare - ).process(cluster.getSolrClient(), "col1"); + CollectionAdminRequest.modifyCollection( + "col1", map("collection.configName", (Object) "conf1") // from cShare + ) + .process(cluster.getSolrClient(), "col1"); try (SolrCore coreCol1 = coreContainer.getCore("col1_shard1_replica_n1"); - SolrCore coreCol2 = coreContainer.getCore("col2_shard1_replica_n1")) { + SolrCore coreCol2 = coreContainer.getCore("col2_shard1_replica_n1")) { assertNotSame(coreCol1.getLatestSchema(), coreCol2.getLatestSchema()); } - } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java index 562798af59d..ad6c3b991fc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java @@ -16,6 +16,9 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.cloud.ZkConfigSetService.CONFIGS_ZKNODE; + +import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.IOException; import java.io.PrintWriter; @@ -27,8 +30,6 @@ import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.ReentrantReadWriteLock; - -import com.google.common.collect.ImmutableMap; import org.apache.commons.io.FileUtils; import org.apache.jute.InputArchive; import org.apache.jute.OutputArchive; @@ -60,12 +61,9 @@ import org.junit.Before; import org.junit.Test; -import static org.apache.solr.cloud.ZkConfigSetService.CONFIGS_ZKNODE; - /** - * Test the ConfigSets API under ZK failure. In particular, - * if create fails, ensure proper cleanup occurs so we aren't - * left with a partially created ConfigSet. + * Test the ConfigSets API under ZK failure. In particular, if create fails, ensure proper cleanup + * occurs so we aren't left with a partially created ConfigSet. */ public class TestConfigSetsAPIZkFailure extends SolrTestCaseJ4 { private MiniSolrCloudCluster solrCluster; @@ -81,9 +79,16 @@ public void setUp() throws Exception { final Path zkDir = testDir.resolve("zookeeper/server1/data"); zkTestServer = new ZkTestServer(zkDir); zkTestServer.run(); - zkTestServer.setZKDatabase(new FailureDuringCopyZKDatabase(zkTestServer.getZKDatabase(), zkTestServer)); - solrCluster = new MiniSolrCloudCluster(1, testDir, - MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML, buildJettyConfig("/solr"), zkTestServer, true); + zkTestServer.setZKDatabase( + new FailureDuringCopyZKDatabase(zkTestServer.getZKDatabase(), zkTestServer)); + solrCluster = + new MiniSolrCloudCluster( + 1, + testDir, + MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML, + buildJettyConfig("/solr"), + zkTestServer, + true); } @Override @@ -104,20 +109,26 @@ public void tearDown() throws Exception { public void testCreateZkFailure() throws Exception { final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString(); final SolrClient solrClient = getHttpSolrClient(baseUrl); - final ConfigSetService configSetService = solrCluster.getOpenOverseer().getCoreContainer().getConfigSetService(); + final ConfigSetService configSetService = + solrCluster.getOpenOverseer().getCoreContainer().getConfigSetService(); final Map oldProps = ImmutableMap.of("immutable", "true"); setupBaseConfigSet(BASE_CONFIGSET_NAME, oldProps); - SolrZkClient zkClient = new SolrZkClient(solrCluster.getZkServer().getZkAddress(), - AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); + SolrZkClient zkClient = + new SolrZkClient( + solrCluster.getZkServer().getZkAddress(), + AbstractZkTestCase.TIMEOUT, + AbstractZkTestCase.TIMEOUT, + null); try { assertFalse(configSetService.checkConfigExists(CONFIGSET_NAME)); Create create = new Create(); create.setBaseConfigSetName(BASE_CONFIGSET_NAME).setConfigSetName(CONFIGSET_NAME); - RemoteSolrException se = expectThrows(RemoteSolrException.class, () -> create.process(solrClient)); + RemoteSolrException se = + expectThrows(RemoteSolrException.class, () -> create.process(solrClient)); // partial creation should have been cleaned up assertFalse(configSetService.checkConfigExists(CONFIGSET_NAME)); assertEquals(SolrException.ErrorCode.SERVER_ERROR.code, se.code()); @@ -128,17 +139,25 @@ public void testCreateZkFailure() throws Exception { solrClient.close(); } - private void setupBaseConfigSet(String baseConfigSetName, Map oldProps) throws Exception { + private void setupBaseConfigSet(String baseConfigSetName, Map oldProps) + throws Exception { final File configDir = getFile("solr").toPath().resolve("configsets/configset-2/conf").toFile(); final File tmpConfigDir = createTempDir().toFile(); tmpConfigDir.deleteOnExit(); FileUtils.copyDirectory(configDir, tmpConfigDir); if (oldProps != null) { - FileUtils.write(new File(tmpConfigDir, ConfigSetProperties.DEFAULT_FILENAME), - getConfigSetProps(oldProps), StandardCharsets.UTF_8); + FileUtils.write( + new File(tmpConfigDir, ConfigSetProperties.DEFAULT_FILENAME), + getConfigSetProps(oldProps), + StandardCharsets.UTF_8); } solrCluster.uploadConfigSet(tmpConfigDir.toPath(), baseConfigSetName); - solrCluster.getZkClient().setData("/configs/" + baseConfigSetName, "{\"trusted\": false}".getBytes(StandardCharsets.UTF_8), true); + solrCluster + .getZkClient() + .setData( + "/configs/" + baseConfigSetName, + "{\"trusted\": false}".getBytes(StandardCharsets.UTF_8), + true); } private StringBuilder getConfigSetProps(Map map) { @@ -154,7 +173,8 @@ public FailureDuringCopyZKDatabase(ZKDatabase zkdb, ZkTestServer zkTestServer) { } @Override - public byte[] getData(String path, Stat stat, Watcher watcher) throws KeeperException.NoNodeException { + public byte[] getData(String path, Stat stat, Watcher watcher) + throws KeeperException.NoNodeException { // we know we are doing a copy when we are getting data from the base config set and // the new config set (partially) exists String zkAddress = zkTestServer.getZkAddress(); @@ -164,7 +184,8 @@ public byte[] getData(String path, Stat stat, Watcher watcher) throws KeeperExce List children = null; try { children = getChildren(chroot + CONFIGS_ZKNODE + "/" + CONFIGSET_NAME, null, null); - } catch (KeeperException.NoNodeException e) {} + } catch (KeeperException.NoNodeException e) { + } if (children != null && children.size() > 0) { throw new RuntimeException("sample zookeeper error"); } @@ -277,7 +298,8 @@ public ProcessTxnResult processTxn(TxnHeader hdr, Record txn, TxnDigest digest) } @Override - public Stat statNode(String path, ServerCnxn serverCnxn) throws KeeperException.NoNodeException { + public Stat statNode(String path, ServerCnxn serverCnxn) + throws KeeperException.NoNodeException { return zkdb.statNode(path, serverCnxn); } @@ -293,20 +315,28 @@ public List aclForNode(DataNode n) { @Override public byte[] getData(String path, Stat stat, Watcher watcher) - throws KeeperException.NoNodeException { + throws KeeperException.NoNodeException { return zkdb.getData(path, stat, watcher); } @Override - public void setWatches(long relativeZxid, List dataWatches, - List existWatches, List childWatches, - List persistentWatches, - List persistentRecursiveWatches, - Watcher watcher) { - zkdb.setWatches(relativeZxid, dataWatches, existWatches, childWatches, - persistentWatches, persistentRecursiveWatches, watcher); - - } + public void setWatches( + long relativeZxid, + List dataWatches, + List existWatches, + List childWatches, + List persistentWatches, + List persistentRecursiveWatches, + Watcher watcher) { + zkdb.setWatches( + relativeZxid, + dataWatches, + existWatches, + childWatches, + persistentWatches, + persistentRecursiveWatches, + watcher); + } @Override public List getACL(String path, Stat stat) throws NoNodeException { @@ -315,7 +345,7 @@ public List getACL(String path, Stat stat) throws NoNodeException { @Override public List getChildren(String path, Stat stat, Watcher watcher) - throws KeeperException.NoNodeException { + throws KeeperException.NoNodeException { return zkdb.getChildren(path, stat, watcher); } @@ -340,8 +370,7 @@ public void deserializeSnapshot(InputArchive ia) throws IOException { } @Override - public void serializeSnapshot(OutputArchive oa) throws IOException, - InterruptedException { + public void serializeSnapshot(OutputArchive oa) throws IOException, InterruptedException { zkdb.serializeSnapshot(oa); } @@ -364,6 +393,7 @@ public void commit() throws IOException { public void close() throws IOException { zkdb.close(); } + @Override public int getTxnCount() { return zkdb.getTxnCount(); @@ -373,6 +403,5 @@ public int getTxnCount() { public long getTxnSize() { return zkdb.getTxnSize(); } - } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java b/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java index f2146be7f19..578cf469902 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestDeleteCollectionOnDownNodes.java @@ -18,7 +18,6 @@ package org.apache.solr.cloud; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.junit.After; @@ -34,7 +33,7 @@ public void setupCluster() throws Exception { .addConfig("conf2", configset("cloud-minimal")) .configure(); } - + @After public void teardownCluster() throws Exception { shutdownCluster(); @@ -47,7 +46,7 @@ public void deleteCollectionWithDownNodes() throws Exception { .process(cluster.getSolrClient()); cluster.waitForActiveCollection("halfdeletedcollection2", 60, TimeUnit.SECONDS, 4, 12); - + // stop a couple nodes JettySolrRunner j1 = cluster.stopJettySolrRunner(cluster.getRandomJetty(random())); JettySolrRunner j2 = cluster.stopJettySolrRunner(cluster.getRandomJetty(random())); @@ -56,11 +55,19 @@ public void deleteCollectionWithDownNodes() throws Exception { cluster.waitForJettyToStop(j2); // delete the collection - CollectionAdminRequest.deleteCollection("halfdeletedcollection2").process(cluster.getSolrClient()); - waitForState("Timed out waiting for collection to be deleted", "halfdeletedcollection2", (n, c) -> c == null); - - assertFalse("Still found collection that should be gone", - cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2")); + CollectionAdminRequest.deleteCollection("halfdeletedcollection2") + .process(cluster.getSolrClient()); + waitForState( + "Timed out waiting for collection to be deleted", + "halfdeletedcollection2", + (n, c) -> c == null); + assertFalse( + "Still found collection that should be gone", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .hasCollection("halfdeletedcollection2")); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java b/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java index 946b3941909..48dd1245673 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java @@ -16,6 +16,12 @@ */ package org.apache.solr.cloud; +import java.lang.invoke.MethodHandles; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.response.QueryResponse; @@ -27,24 +33,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - - public class TestDistribDocBasedVersion extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - String bucket1 = "shard1"; // shard1: top bits:10 80000000:ffffffff - String bucket2 = "shard2"; // shard2: top bits:00 00000000:7fffffff + String bucket1 = "shard1"; // shard1: top bits:10 80000000:ffffffff + String bucket2 = "shard2"; // shard2: top bits:00 00000000:7fffffff private static String vfield = "my_version_l"; - @BeforeClass public static void beforeShardHashingTest() throws Exception { useFactory(null); @@ -56,37 +53,36 @@ protected String getCloudSolrConfig() { } public TestDistribDocBasedVersion() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id super.sliceCount = 2; - /*** - hash of a is 3c2569b2 high bits=0 shard=shard3 - hash of b is 95de7e03 high bits=2 shard=shard1 - hash of c is e132d65f high bits=3 shard=shard2 - hash of d is 27191473 high bits=0 shard=shard3 - hash of e is 656c4367 high bits=1 shard=shard4 - hash of f is 2b64883b high bits=0 shard=shard3 - hash of g is f18ae416 high bits=3 shard=shard2 - hash of h is d482b2d3 high bits=3 shard=shard2 - hash of i is 811a702b high bits=2 shard=shard1 - hash of j is ca745a39 high bits=3 shard=shard2 - hash of k is cfbda5d1 high bits=3 shard=shard2 - hash of l is 1d5d6a2c high bits=0 shard=shard3 - hash of m is 5ae4385c high bits=1 shard=shard4 - hash of n is c651d8ac high bits=3 shard=shard2 - hash of o is 68348473 high bits=1 shard=shard4 - hash of p is 986fdf9a high bits=2 shard=shard1 - hash of q is ff8209e8 high bits=3 shard=shard2 - hash of r is 5c9373f1 high bits=1 shard=shard4 - hash of s is ff4acaf1 high bits=3 shard=shard2 - hash of t is ca87df4d high bits=3 shard=shard2 - hash of u is 62203ae0 high bits=1 shard=shard4 - hash of v is bdafcc55 high bits=2 shard=shard1 - hash of w is ff439d1f high bits=3 shard=shard2 - hash of x is 3e9a9b1b high bits=0 shard=shard3 - hash of y is 477d9216 high bits=1 shard=shard4 - hash of z is c1f69a17 high bits=3 shard=shard2 + * hash of a is 3c2569b2 high bits=0 shard=shard3 + * hash of b is 95de7e03 high bits=2 shard=shard1 + * hash of c is e132d65f high bits=3 shard=shard2 + * hash of d is 27191473 high bits=0 shard=shard3 + * hash of e is 656c4367 high bits=1 shard=shard4 + * hash of f is 2b64883b high bits=0 shard=shard3 + * hash of g is f18ae416 high bits=3 shard=shard2 + * hash of h is d482b2d3 high bits=3 shard=shard2 + * hash of i is 811a702b high bits=2 shard=shard1 + * hash of j is ca745a39 high bits=3 shard=shard2 + * hash of k is cfbda5d1 high bits=3 shard=shard2 + * hash of l is 1d5d6a2c high bits=0 shard=shard3 + * hash of m is 5ae4385c high bits=1 shard=shard4 + * hash of n is c651d8ac high bits=3 shard=shard2 + * hash of o is 68348473 high bits=1 shard=shard4 + * hash of p is 986fdf9a high bits=2 shard=shard1 + * hash of q is ff8209e8 high bits=3 shard=shard2 + * hash of r is 5c9373f1 high bits=1 shard=shard4 + * hash of s is ff4acaf1 high bits=3 shard=shard2 + * hash of t is ca87df4d high bits=3 shard=shard2 + * hash of u is 62203ae0 high bits=1 shard=shard4 + * hash of v is bdafcc55 high bits=2 shard=shard1 + * hash of w is ff439d1f high bits=3 shard=shard2 + * hash of x is 3e9a9b1b high bits=0 shard=shard3 + * hash of y is 477d9216 high bits=1 shard=shard4 + * hash of z is c1f69a17 high bits=3 shard=shard2 ***/ } @@ -129,17 +125,24 @@ private void doTestHardFail() throws Exception { } private void doTestHardFail(String id) throws Exception { - vdelete(id, 5, "update.chain","external-version-failhard"); - vadd(id, 10, "update.chain","external-version-failhard"); - vadd(id ,15, "update.chain","external-version-failhard"); - vaddFail(id ,11, 409, "update.chain","external-version-failhard"); - vdeleteFail(id ,11, 409, "update.chain","external-version-failhard"); - vdelete(id, 20, "update.chain","external-version-failhard"); + vdelete(id, 5, "update.chain", "external-version-failhard"); + vadd(id, 10, "update.chain", "external-version-failhard"); + vadd(id, 15, "update.chain", "external-version-failhard"); + vaddFail(id, 11, 409, "update.chain", "external-version-failhard"); + vdeleteFail(id, 11, 409, "update.chain", "external-version-failhard"); + vdelete(id, 20, "update.chain", "external-version-failhard"); } private void doTestDocVersions() throws Exception { log.info("### STARTING doTestDocVersions"); - assertEquals(2, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size()); + assertEquals( + 2, + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_COLLECTION) + .getSlices() + .size()); solrClient = cloudClient; @@ -180,7 +183,6 @@ private void doTestDocVersions() throws Exception { vadd("b!doc123", 101); doRTG("b!doc123", "101"); - // // now test with a non-smart client // @@ -227,16 +229,19 @@ private void doTestDocVersions() throws Exception { commit(); // check liveness for all docs - doQuery("b!doc123,101,c!doc2,23,d!doc3,22,e!doc4,21,b!doc1234,101,c!doc6,23,d!doc7,22,e!doc8,21", "q","live_b:true"); - doQuery("b!doc1,30,b!doc5,30", "q","live_b:false"); + doQuery( + "b!doc123,101,c!doc2,23,d!doc3,22,e!doc4,21,b!doc1234,101,c!doc6,23,d!doc7,22,e!doc8,21", + "q", + "live_b:true"); + doQuery("b!doc1,30,b!doc5,30", "q", "live_b:false"); // delete by query should just work like normal doDBQ("id:b!doc1 OR id:e*"); commit(); - doQuery("b!doc123,101,c!doc2,23,d!doc3,22,b!doc1234,101,c!doc6,23,d!doc7,22", "q","live_b:true"); - doQuery("b!doc5,30", "q","live_b:false"); - + doQuery( + "b!doc123,101,c!doc2,23,d!doc3,22,b!doc1234,101,c!doc6,23,d!doc7,22", "q", "live_b:true"); + doQuery("b!doc5,30", "q", "live_b:false"); } SolrClient solrClient; @@ -245,8 +250,8 @@ void vdelete(String id, long version, String... params) throws Exception { UpdateRequest req = new UpdateRequest(); req.deleteById(id); req.setParam("del_version", Long.toString(version)); - for (int i=0; i strs = StrUtils.splitSmart(expectedDocs, ",", true); Map expectedIds = new HashMap<>(); - for (int i=0; i expectedIds = new HashMap<>(); List strs = StrUtils.splitSmart(ids, ",", true); List verS = StrUtils.splitSmart(versions, ",", true); - for (int i=0; i obtainedIds = new HashMap<>(); for (SolrDocument doc : rsp.getResults()) { obtainedIds.put((String) doc.get("id"), doc.get(vfield)); @@ -331,9 +334,9 @@ void doRTG(String ids, String versions) throws Exception { void doRTG(String ids) throws Exception { solrClient.query(params("qt", "/get", "ids", ids)); - Set expectedIds = new HashSet<>( StrUtils.splitSmart(ids, ",", true) ); + Set expectedIds = new HashSet<>(StrUtils.splitSmart(ids, ",", true)); - QueryResponse rsp = cloudClient.query(params("qt","/get", "ids",ids)); + QueryResponse rsp = cloudClient.query(params("qt", "/get", "ids", ids)); Set obtainedIds = new HashSet<>(); for (SolrDocument doc : rsp.getResults()) { obtainedIds.add((String) doc.get("id")); @@ -342,7 +345,6 @@ void doRTG(String ids) throws Exception { assertEquals(expectedIds, obtainedIds); } - // TODO: refactor some of this stuff into the SolrJ client... it should be easier to use void doDBQ(String q, String... reqParams) throws Exception { UpdateRequest req = new UpdateRequest(); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDistributedMap.java b/solr/core/src/test/org/apache/solr/cloud/TestDistributedMap.java index 8a89df472b6..f1e3669e8f1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestDistributedMap.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestDistributedMap.java @@ -29,21 +29,21 @@ import org.junit.BeforeClass; public class TestDistributedMap extends SolrTestCaseJ4 { - + private static Path zkDir; - + protected static ZkTestServer zkServer; - + @BeforeClass public static void setUpClass() throws Exception { zkDir = createTempDir("TestDistributedMap"); zkServer = new ZkTestServer(zkDir); zkServer.run(); } - + @AfterClass public static void tearDownClass() throws IOException, InterruptedException { - + if (zkServer != null) { zkServer.shutdown(); zkServer = null; @@ -53,7 +53,7 @@ public static void tearDownClass() throws IOException, InterruptedException { zkDir = null; } } - + public void testPut() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); @@ -63,39 +63,57 @@ public void testPut() throws KeeperException, InterruptedException { assertTrue(zkClient.exists(path + "/" + DistributedMap.PREFIX + "foo", true)); } } - + public void testGet() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); byte[] data = "data".getBytes(Charset.defaultCharset()); - zkClient.makePath(path + "/" + DistributedMap.PREFIX + "foo", data, CreateMode.PERSISTENT, null, false, true); + zkClient.makePath( + path + "/" + DistributedMap.PREFIX + "foo", + data, + CreateMode.PERSISTENT, + null, + false, + true); DistributedMap map = createMap(zkClient, path); - assertArrayEquals(data, map.get("foo")); + assertArrayEquals(data, map.get("foo")); } } - + public void testContains() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); DistributedMap map = createMap(zkClient, path); assertFalse(map.contains("foo")); - zkClient.makePath(path + "/" + DistributedMap.PREFIX + "foo", new byte[0], CreateMode.PERSISTENT, null, false, true); + zkClient.makePath( + path + "/" + DistributedMap.PREFIX + "foo", + new byte[0], + CreateMode.PERSISTENT, + null, + false, + true); assertTrue(map.contains("foo")); } } - + public void testRemove() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); DistributedMap map = createMap(zkClient, path); assertFalse(map.remove("foo")); - zkClient.makePath(path + "/" + DistributedMap.PREFIX + "foo", new byte[0], CreateMode.PERSISTENT, null, false, true); + zkClient.makePath( + path + "/" + DistributedMap.PREFIX + "foo", + new byte[0], + CreateMode.PERSISTENT, + null, + false, + true); assertTrue(map.remove("foo")); assertFalse(map.contains("foo")); assertFalse(zkClient.exists(path + "/" + DistributedMap.PREFIX + "foo", true)); } } - + public void testSize() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); @@ -111,7 +129,7 @@ public void testSize() throws KeeperException, InterruptedException { assertEquals(1, map.size()); } } - + public void testPutIfAbsent() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); @@ -131,9 +149,8 @@ public void testPutIfAbsent() throws KeeperException, InterruptedException { assertEquals(1, map.size()); assertTrue(map.contains("foo")); } - } - + public void testKeys() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); @@ -142,18 +159,18 @@ public void testKeys() throws KeeperException, InterruptedException { map.put("foo", new byte[0]); assertTrue(map.keys().contains("foo")); assertEquals(1, map.keys().size()); - + map.put("bar", new byte[0]); assertTrue(map.keys().contains("bar")); assertTrue(map.keys().contains("foo")); assertEquals(2, map.keys().size()); - + map.remove("foo"); assertTrue(map.keys().contains("bar")); assertEquals(1, map.keys().size()); } } - + public void testClear() throws KeeperException, InterruptedException { try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); @@ -167,16 +184,15 @@ public void testClear() throws KeeperException, InterruptedException { assertEquals(0, map.size()); } } - + protected DistributedMap createMap(SolrZkClient zkClient, String path) { return new DistributedMap(zkClient, path); } - - protected String getAndMakeInitialPath(SolrZkClient zkClient) throws KeeperException, InterruptedException { + + protected String getAndMakeInitialPath(SolrZkClient zkClient) + throws KeeperException, InterruptedException { String path = String.format(Locale.ROOT, "/%s/%s", getClass().getName(), getSaferTestName()); zkClient.makePath(path, false, true); return path; } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDownShardTolerantSearch.java b/solr/core/src/test/org/apache/solr/cloud/TestDownShardTolerantSearch.java index 351e35621c2..95a73a1a493 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestDownShardTolerantSearch.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestDownShardTolerantSearch.java @@ -16,8 +16,9 @@ */ package org.apache.solr.cloud; -import java.lang.invoke.MethodHandles; +import static org.hamcrest.CoreMatchers.is; +import java.lang.invoke.MethodHandles; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -30,12 +31,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.hamcrest.CoreMatchers.is; - /** - * Test which asserts that shards.tolerant=true works even if one shard is down - * and also asserts that a meaningful exception is thrown when shards.tolerant=false - * See SOLR-7566 + * Test which asserts that shards.tolerant=true works even if one shard is down and also asserts + * that a meaningful exception is thrown when shards.tolerant=false See SOLR-7566 */ public class TestDownShardTolerantSearch extends SolrCloudTestCase { @@ -43,9 +41,7 @@ public class TestDownShardTolerantSearch extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); } @Test @@ -60,25 +56,39 @@ public void searchingShouldFailWithoutTolerantSearchSetToTrue() throws Exception } update.commit(cluster.getSolrClient(), "tolerant"); - QueryResponse response = cluster.getSolrClient().query("tolerant", new SolrQuery("*:*").setRows(1)); + QueryResponse response = + cluster.getSolrClient().query("tolerant", new SolrQuery("*:*").setRows(1)); assertThat(response.getStatus(), is(0)); assertThat(response.getResults().getNumFound(), is(100L)); JettySolrRunner stoppedServer = cluster.stopJettySolrRunner(0); - + cluster.waitForJettyToStop(stoppedServer); - response = cluster.getSolrClient().query("tolerant", new SolrQuery("*:*").setRows(1).setParam(ShardParams.SHARDS_TOLERANT, true)); + response = + cluster + .getSolrClient() + .query( + "tolerant", + new SolrQuery("*:*").setRows(1).setParam(ShardParams.SHARDS_TOLERANT, true)); assertThat(response.getStatus(), is(0)); assertTrue(response.getResults().getNumFound() > 0); - SolrServerException e = expectThrows(SolrServerException.class, - "Request should have failed because we killed shard1 jetty", - () -> cluster.getSolrClient().query("tolerant", new SolrQuery("*:*").setRows(1) - .setParam(ShardParams.SHARDS_TOLERANT, false)) - ); + SolrServerException e = + expectThrows( + SolrServerException.class, + "Request should have failed because we killed shard1 jetty", + () -> + cluster + .getSolrClient() + .query( + "tolerant", + new SolrQuery("*:*") + .setRows(1) + .setParam(ShardParams.SHARDS_TOLERANT, false))); assertNotNull(e.getCause()); - assertTrue("Error message from server should have the name of the down shard", + assertTrue( + "Error message from server should have the name of the down shard", e.getCause().getMessage().contains("shard")); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java b/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java index f568d21d440..ac914e1852c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestDynamicFieldNamesIndexCorrectly.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrQuery; @@ -82,11 +81,11 @@ void populateIndex(int numRuns) throws IOException, SolrServerException { } } - private void assertThatDocsHaveCorrectFields(final Collection solrDocs, - final SolrDocumentList resultDocs) { + private void assertThatDocsHaveCorrectFields( + final Collection solrDocs, final SolrDocumentList resultDocs) { assertEquals("Wrong number of docs found", resultDocs.getNumFound(), solrDocs.size()); - final Map resultMap = resultDocs.stream() - .collect(Collectors.toMap(doc -> doc.getFieldValue("id"), doc -> doc)); + final Map resultMap = + resultDocs.stream().collect(Collectors.toMap(doc -> doc.getFieldValue("id"), doc -> doc)); Iterator it = solrDocs.iterator(); while (it.hasNext()) { final SolrInputDocument inDoc = it.next(); @@ -98,9 +97,14 @@ private void assertThatDocsHaveCorrectFields(final Collection .forEach( fieldName -> { assertThat( - String.format(Locale.ROOT, "Doc %s does not have field %s, it has %s", id, fieldName, + String.format( + Locale.ROOT, + "Doc %s does not have field %s, it has %s", + id, + fieldName, resultFieldNames), - resultFieldNames, new IsCollectionContaining<>(new IsEqual<>(fieldName))); + resultFieldNames, + new IsCollectionContaining<>(new IsEqual<>(fieldName))); }); } } @@ -141,679 +145,680 @@ public static SolrInputDocument nextDoc(int id, Iterator iterator) { return solrDoc; } - private static final List FIELD_NAMES = Arrays.asList( - new String[] { - "name_DfsqCIYgwMpJnc_prop_s", - "name_VHzHTZWnqGALJJ_prop_s", - "name_OyKmIqynBbK_prop_s", - "name_JofvOXUMYQs_prop_s", - "name_SaAfmgHXbCIUethh_prop_s", - "name_CMajAPNHivraqKBmYxH_prop_s", - "name_OpJFcSZHuOFVKs_prop_s", - "name_fTaolBrXTGpJ_prop_s", - "name_hlgpuaRTRmYjBNmzHBI_prop_s", - "name_DGSzgfeiMouuTgbaklJ_prop_s", - "name_hTAZuAysueB_prop_s", - "name_VqztpEqzBCXEhVM_prop_s", - "name_CaJSsxLqxhq_prop_s", - "name_JjEYNobdJiyAJ_prop_s", - "name_GGpLbFvxdFyBH_prop_s", - "name_NIfhcAmufHRwaGNuO_prop_s", - "name_wRzKYNtwiUapyzjQh_prop_s", - "name_UonaDljKBYUMgMV_prop_s", - "name_sByosZWJLlrrFYVXaT_prop_s", - "name_HKHToAtQQkPMwNyGr_prop_s", - "name_HJBQHPKbxHvPGp_prop_s", - "name_UtERukPiRHzqv_prop_s", - "name_WIevbvmoKJkcr_prop_s", - "name_YjoCtbikMRaY_prop_s", - "name_OwuVrwcxslmiWMylkuH_prop_s", - "name_eEoZobamQfJLad_prop_s", - "name_IWkfNtxsTRbuPIT_prop_s", - "name_rZphZcqVQN_prop_s", - "name_QbePjDfrPkiUySUfSS_prop_s", - "name_ABCPaNPQXBwVJh_prop_s", - "name_OitLZpkeOXrOAeITlAc_prop_s", - "name_GlGQselWNwuHUSPy_prop_s", - "name_XDNBBpHaxD_prop_s", - "name_NkSQtvNhCwgPxnuRGGK_prop_s", - "name_mkYuyjFfWjEb_prop_s", - "name_JUOzeuNelNHbQVlj_prop_s", - "name_CuzbqxBlEJEnBdeJo_prop_s", - "name_GbpIJAqoVP_prop_s", - "name_oPozbuiwFXFoCQ_prop_s", - "name_QPcamTHGpEgYGW_prop_s", - "name_QfgfGrTZZkqIbLq_prop_s", - "name_UtkepJfqAPQQZvDnB_prop_s", - "name_ShipLvibadhd_prop_s", - "name_wAdEXOEAydT_prop_s", - "name_YiquTYZxxNsxanQ_prop_s", - "name_hJfuWEBCYIdtcixldUy_prop_s", - "name_PzYofpLhvtw_prop_s", - "name_rhkJFHishBuS_prop_s", - "name_GNUoUCaqqfGErM_prop_s", - "name_hSrbCrBUEs_prop_s", - "name_xJANZEGtTrIXMDLBgL_prop_s", - "name_pOhSitCAKl_prop_s", - "name_PkBHXUceEgVP_prop_s", - "name_fvDrPKkegWr_prop_s", - "name_HVzmAutUrUoicr_prop_s", - "name_ouFhihsihDk_prop_s", - "name_eeFcnImKkXiKXDTIPC_prop_s", - "name_NMEsrYgSBoIEwp_prop_s", - "name_yqCQGPzCamFqBwLZiiC_prop_s", - "name_JlHlxPykBl_prop_s", - "name_lYGskGWJfNhnd_prop_s", - "name_ifXTlDnYqUmjFNhKOxq_prop_s", - "name_uaCtJcjZWu_prop_s", - "name_LzSXDKQdhQ_prop_s", - "name_TpvZetClsYcJenPCdW_prop_s", - "name_NPsQNyfkDCgNus_prop_s", - "name_zMZnwFtVnbdlGncBEf_prop_s", - "name_dGDCXTxABxh_prop_s", - "name_JIOxBoRhiZLD_prop_s", - "name_smVTZaCZZMiSmYq_prop_s", - "name_VgCZTMfOHpfAlGUjDxT_prop_s", - "name_HhtLeCOGJMNLMXFBgI_prop_s", - "name_QpzFZXNIpk_prop_s", - "name_obTfzXxBoCXpiGFGWuz_prop_s", - "name_VrBTsQmfJoqNI_prop_s", - "name_QeXnmsrvSYZBtkWwDxs_prop_s", - "name_vtvvKPfpTBBBMuMTZZ_prop_s", - "name_VvPvDbWJXsXIAUSNWgW_prop_s", - "name_BYCAfIaRKVUvHHBIut_prop_s", - "name_srwPMPauluyfyM_prop_s", - "name_YlrFboTEUfq_prop_s", - "name_vIPAkvspnnT_prop_s", - "name_XWVkDyVpkZvo_prop_s", - "name_tJDzyfWZtOrzwvuw_prop_s", - "name_mvfaMcKLduLXcvol_prop_s", - "name_OKvQYLTaCWwGTXDboK_prop_s", - "name_VkMXjFZGUQgNWDbKbgp_prop_s", - "name_IixctxAiJdqQQlPwV_prop_s", - "name_LbOxzyxGVrsKyZgCHKi_prop_s", - "name_YtJheZqyzPhpuAAitN_prop_s", - "name_IsctRhBopyx_prop_s", - "name_xrfxhlkidKabA_prop_s", - "name_MFqGPFbIOrneplmaOK_prop_s", - "name_fXOsAXXtMnLy_prop_s", - "name_ATQmfQzgdOlFPuDp_prop_s", - "name_rFrgtZZDVFGuHjteUX_prop_s", - "name_qcPrtNSRKfBPvtdXWJJ_prop_s", - "name_UpInzgFgMlfOuMuffOa_prop_s", - "name_cmwSPLLLuiv_prop_s", - "name_WDQjhkEHQabWvK_prop_s", - "name_BqSJaaLDBTTVy_prop_s", - "name_nqXaRkhFXV_prop_s", - "name_GJBYZZXOOlyJ_prop_s", - "name_khgXzOmSxxrerikblPC_prop_s", - "name_uFNMtGvQQJljSgk_prop_s", - "name_yoZRduwiqx_prop_s", - "name_GqqWeEyYXEwT_prop_s", - "name_tzhVSqoPKt_prop_s", - "name_ensyGAXGQSuW_prop_s", - "name_LQJmrvSWKQHc_prop_s", - "name_KPpikIjkpciF_prop_s", - "name_mplQAMNcigYEwNEBT_prop_s", - "name_idmsrYlJGoizvsllQsW_prop_s", - "name_rMPMEsrySqUVwcDaUE_prop_s", - "name_febnQEKdThaqhnghZ_prop_s", - "name_XxtOzKGvvSguMNS_prop_s", - "name_VtFlQvelTPyz_prop_s", - "name_PQYUOnhHJsSaqVDH_prop_s", - "name_qQEIWMsRNQAV_prop_s", - "name_rPPHpYLbLoUiLYQ_prop_s", - "name_wZaRlynJFNvWJKjyyuA_prop_s", - "name_sOwZhIRXUlCvaqRn_prop_s", - "name_omkQRxJuYPLTeB_prop_s", - "name_fVJbGrSpMpO_prop_s", - "name_wLYQtojRTtWeQfz_prop_s", - "name_dlQxbbzWoAEDbRPFy_prop_s", - "name_SkYKoVihqWDXnsH_prop_s", - "name_whlpGhuMeZA_prop_s", - "name_iOsqSwnNKSNrjLmkpvo_prop_s", - "name_dWYzrxvJttwv_prop_s", - "name_stOcVzqQedeqagmynaG_prop_s", - "name_NENunrnlQI_prop_s", - "name_HqeTpJDHOsfpawjehIq_prop_s", - "name_RPwyjltiltvDOqpsYi_prop_s", - "name_znVAkUDVYWMIoLr_prop_s", - "name_jTzTSvTRyguN_prop_s", - "name_ySeOANIBnMabQvaru_prop_s", - "name_SadaPaYJaxkwHkRMuE_prop_s", - "name_JQVolDkiGeuvA_prop_s", - "name_NtxjSaBccGJWoK_prop_s", - "name_WvwitdcFXPUQny_prop_s", - "name_JQGUUVnzyMCJs_prop_s", - "name_GqDdyBcHznboeW_prop_s", - "name_RlRSvAFEykA_prop_s", - "name_TvNERqviFBnOCtemES_prop_s", - "name_DUlAWwbaslagWbIImdd_prop_s", - "name_gWILZCZRlbjBoQdrP_prop_s", - "name_ftNrYHWFvhuGYHuJt_prop_s", - "name_QYBKgeSLCQeRUX_prop_s", - "name_PYUIqToJNDgWASGFr_prop_s", - "name_zBZIhwwifmRTOXe_prop_s", - "name_hnPUucMPfhUuJoO_prop_s", - "name_agZLOYIoOWl_prop_s", - "name_SEgmWAjhjJ_prop_s", - "name_pUclNPUSiDZtMg_prop_s", - "name_LjIrSIDJqoqL_prop_s", - "name_vjHbgxEULpsQiZlUaM_prop_s", - "name_eymEZtHNKYjWFEUlbR_prop_s", - "name_tQmOnPEwkIMJlzPRG_prop_s", - "name_ogsTpGUlFLOvLzl_prop_s", - "name_jJfXDLSaOuHI_prop_s", - "name_tBfQFKUYmaAeR_prop_s", - "name_rzFgVahQrXezOIMy_prop_s", - "name_qdjFkPulsPpMLXVPp_prop_s", - "name_rWetgUNXaoxXIfbPDz_prop_s", - "name_OrSAGeTkkrRUygOLG_prop_s", - "name_LoeOnHUogQnvFHbvXCQ_prop_s", - "name_wCbfoExoqlldz_prop_s", - "name_mAyvGeccKbSpO_prop_s", - "name_LAlRNXNqtwdF_prop_s", - "name_CzQuGtKdZviLIh_prop_s", - "name_pkfyyloJeQLCiclF_prop_s", - "name_BBabvpGlueqCqEAJq_prop_s", - "name_yMyCCNWJarW_prop_s", - "name_rXyBgzPnWqnU_prop_s", - "name_yjTcYotQfUVXVp_prop_s", - "name_iQulShIGGjlJuGtkOk_prop_s", - "name_EAMjjKBtOri_prop_s", - "name_cKKMdfEVvOY_prop_s", - "name_HCgMMUWJhAPUcSYEw_prop_s", - "name_QxAiEPSPFcGdpbsAN_prop_s", - "name_uRFDixdPAlsNiZ_prop_s", - "name_ctffdxcrVBN_prop_s", - "name_mdXIbwncmwHgDmfsiAM_prop_s", - "name_gKlSaxAfDdYgt_prop_s", - "name_juaOrDYjSfvcmkd_prop_s", - "name_YadqjaxLPXUpJCIMdNm_prop_s", - "name_jlNcOgAYUBoj_prop_s", - "name_AKNbQWFRzzYbAhOlqAI_prop_s", - "name_JAzAPnrljRhqbNfdoh_prop_s", - "name_kXYgLRfqrYiQxRo_prop_s", - "name_AfZylgVaZgvaIQgR_prop_s", - "name_XaOBvJEVEw_prop_s", - "name_hDwJONxcscyJuzYRH_prop_s", - "name_SvogOicRPq_prop_s", - "name_RIsXETbdCtBuL_prop_s", - "name_jOxeorqpGcdkp_prop_s", - "name_IBzKXorZDdowJujJkC_prop_s", - "name_kWfsavjmSEIyGxeoz_prop_s", - "name_DhaoVQSvJZfy_prop_s", - "name_dWSNRommSreSW_prop_s", - "name_LWqiEKFOMPVklmFwyoX_prop_s", - "name_GVazWdylOnyamFiz_prop_s", - "name_CcgGFeiwORNbAw_prop_s", - "name_mVgxCpHfjhofqaOA_prop_s", - "name_nzuAietKmfmjnXalz_prop_s", - "name_YzYAGdOoaxwgSh_prop_s", - "name_jLMshjzscpgU_prop_s", - "name_JaLKPfULNIeWysimJf_prop_s", - "name_KehwwGmAULqXtNCrwhX_prop_s", - "name_mxpwZrDktTLXUzkdKa_prop_s", - "name_bPmedbyCSSjDC_prop_s", - "name_LYbCFtmQiC_prop_s", - "name_cLrVLzwMcMnAT_prop_s", - "name_HeOUpecBxVvHEERlPUk_prop_s", - "name_jCVSgiNewmDB_prop_s", - "name_jOLtAVRUFrs_prop_s", - "name_gfWTsWEVeVSXwGMgUT_prop_s", - "name_BPbiEWmyizADxNIV_prop_s", - "name_VYSwGIOIarPmGWVKenS_prop_s", - "name_QpAHpTcxrzVYYfWYT_prop_s", - "name_tOBVdVTRBMmCXfnxrNa_prop_s", - "name_iCaKxgfTXuvgCT_prop_s", - "name_kdPWzVZHslaijNrKbKU_prop_s", - "name_wmJmiiWghggUHmNiQAg_prop_s", - "name_ZpzaQMGuMfOjw_prop_s", - "name_cgOqMOeYMHJ_prop_s", - "name_EnguvcJhre_prop_s", - "name_edeevGMabTDek_prop_s", - "name_vmJEHidWgTTUvioGhi_prop_s", - "name_CHYfwnIHxQzPwEFJ_prop_s", - "name_KXpUaenwfjlj_prop_s", - "name_eVGHumUijQhFvzGjaV_prop_s", - "name_XorPzBArSbSTCHpz_prop_s", - "name_RRLESavujqcxblljkn_prop_s", - "name_YftgbzYxUNUCMXt_prop_s", - "name_IqEDQHVFGIyQSS_prop_s", - "name_XbStVPkHwGYmQB_prop_s", - "name_JyWCZhERjLOtqw_prop_s", - "name_dDiuFMzjhrJGyqqud_prop_s", - "name_uCCDpPtxkdQNDq_prop_s", - "name_ohZQKMOVeb_prop_s", - "name_gBTzxrPwsX_prop_s", - "name_RLkUwPFSVjqB_prop_s", - "name_CXlPWeBunQDGtBXqo_prop_s", - "name_kGvCPheDzjir_prop_s", - "name_cvcOAZkaTZsTyrrWxvQ_prop_s", - "name_sftNHXiElgbUQxtYDI_prop_s", - "name_aqGEmvTBCqdFKyfa_prop_s", - "name_myCtzywMPzQyJhHwsEy_prop_s", - "name_TmvkTzpWLtPEDUfmg_prop_s", - "name_XnYvWLQJdcjdOBmfJ_prop_s", - "name_toYeyORNQWA_prop_s", - "name_hcWpqATuIiUbyfiHPaJ_prop_s", - "name_EAelPZjFpiThB_prop_s", - "name_aEfokIQMbKI_prop_s", - "name_YMbTCeRRipELjF_prop_s", - "name_yIbPmIvnUNFsKaEk_prop_s", - "name_PVsusfJldMrTq_prop_s", - "name_BhGnYInbCoBcRxbkh_prop_s", - "name_LvywGWGeDmCnwYM_prop_s", - "name_bJwdGFMfTyRhI_prop_s", - "name_durkyUrNKHx_prop_s", - "name_RdeZaAlmttQzNDZCb_prop_s", - "name_VdzHkraZKezBjY_prop_s", - "name_rAhOeyHbDuW_prop_s", - "name_SNzylGssYOA_prop_s", - "name_vHqZyqgwfD_prop_s", - "name_DPnKKQlfkn_prop_s", - "name_PQFtvTrPezVRLL_prop_s", - "name_YkOCraZfkuCyx_prop_s", - "name_glGgplQXQzqaHbT_prop_s", - "name_OqpvyNHqeQUANE_prop_s", - "name_EYRKsQekVHcYlWf_prop_s", - "name_RFuZbCWIOu_prop_s", - "name_ekHWLiTVyNjYdl_prop_s", - "name_vezpACcbFw_prop_s", - "name_oQQXcPzeODviDC_prop_s", - "name_wZkyzXscqPGWiEzwR_prop_s", - "name_eYywOQdxMbAwHNC_prop_s", - "name_gvEXKFXEAQMaYm_prop_s", - "name_vofoikKFpZsOZfY_prop_s", - "name_aXNocadbQQO_prop_s", - "name_pzzPuuliByDjLm_prop_s", - "name_dIOSQFOVldP_prop_s", - "name_sbplpizxCQWndsBpoU_prop_s", - "name_uogQaerZVBnV_prop_s", - "name_WsDhwfdJivmMKO_prop_s", - "name_RjJjIrPGWGFgCbT_prop_s", - "name_sKymsAbmFqwyzKRSH_prop_s", - "name_wIHDafXfvOunVi_prop_s", - "name_pWEWMRdqgvuGdqwztct_prop_s", - "name_aFDHZXHKgnVo_prop_s", - "name_dAdcQYTvmRZ_prop_s", - "name_zQsaOcogPYNqypDPYjS_prop_s", - "name_KOtJNECCHjLxKZqHZ_prop_s", - "name_wfdxykXSBRcrfUv_prop_s", - "name_kGJgFephxkeH_prop_s", - "name_peispafiMLgmE_prop_s", - "name_CJTnCuCsOSCvj_prop_s", - "name_xpOyokirtcJoFPKyH_prop_s", - "name_nhmhQePxBvNT_prop_s", - "name_vPxdJTwHkzDdvaK_prop_s", - "name_dAGyfZWSkTaCCt_prop_s", - "name_CYaZJGFolJqNhmKgsV_prop_s", - "name_vboqCHtthOPMRHU_prop_s", - "name_fqrgYweKbBNzlYJk_prop_s", - "name_SwOSQemwasu_prop_s", - "name_dRDJlPUxSgvIS_prop_s", - "name_DYjfbnkMhnMyL_prop_s", - "name_REAirSXdUlsq_prop_s", - "name_aPLpQwhWGCcjk_prop_s", - "name_LWlbDafEriuRGmJYW_prop_s", - "name_bTFLYGqAHYvnpFvzd_prop_s", - "name_emIonaQRdfsjmVCjUn_prop_s", - "name_RdMOfMWlqKmKuxYawG_prop_s", - "name_NmvxkGBDyJ_prop_s", - "name_veeKFlgaBqTXINdlbi_prop_s", - "name_JEMSCgBWKwpd_prop_s", - "name_RKsEwiClkYAENVkO_prop_s", - "name_QSfmaqphip_prop_s", - "name_DhcOPbKnWrv_prop_s", - "name_AhEfQCMTWtrdjBV_prop_s", - "name_EtAMDtJVTd_prop_s", - "name_qVxNUttsduupj_prop_s", - "name_BeFWYHBfnSNqVEPz_prop_s", - "name_wPevXszAQZWZwe_prop_s", - "name_oJYcnxrshAkjJYnXyn_prop_s", - "name_nffSJxMrhrIlQw_prop_s", - "name_ZrHpSfuzHHIin_prop_s", - "name_rdMnHMmgEQaGLmXRPiD_prop_s", - "name_huldbnqnXwop_prop_s", - "name_jduhQpDoYv_prop_s", - "name_NBOKEducirzNsSSy_prop_s", - "name_xJzWfJrMIY_prop_s", - "name_VMZbxqOHwfQDaGT_prop_s", - "name_syUXJprVoLTZYebB_prop_s", - "name_prZEUbNoTysB_prop_s", - "name_RfvJRIoQeGSu_prop_s", - "name_BBshBWkaLopZ_prop_s", - "name_YOAVKRdkRspIVaLva_prop_s", - "name_RrTctdPJnMoMw_prop_s", - "name_TEoYvqSeBmaUHflB_prop_s", - "name_IwsxROIVgJ_prop_s", - "name_ktQwKjuCLYAmOnyj_prop_s", - "name_MZrmJkYFkHsU_prop_s", - "name_bdagQHBFmoIo_prop_s", - "name_zmoxFeHMBwkyEO_prop_s", - "name_wenNdlQvlHItqflx_prop_s", - "name_XprqFpXiYoHzEfd_prop_s", - "name_ogZQmtfQOfvP_prop_s", - "name_QOsBJGNDUzbHWHrQ_prop_s", - "name_jfrfWIuCWSFXQumtm_prop_s", - "name_VFWIKhommZaTVuzphSb_prop_s", - "name_RVhGwEvGjdOnzR_prop_s", - "name_FQlxoQLZIkZCyfiVx_prop_s", - "name_MPbQJjgBGMUR_prop_s", - "name_SbbTGVASSkYHiNwV_prop_s", - "name_MntYiMNrHQ_prop_s", - "name_yjcZRVwITRLXb_prop_s", - "name_aSKYqqhexuo_prop_s", - "name_TfzoLKDlIhDun_prop_s", - "name_KeKTrXfMFglbN_prop_s", - "name_iIdfUsKoIlf_prop_s", - "name_FPQqtNlVCLSgwgNhf_prop_s", - "name_PkYUzUADmq_prop_s", - "name_nXAJwIhWfESKdZ_prop_s", - "name_faXLvuLCiq_prop_s", - "name_zarHYCyYIr_prop_s", - "name_sowzONSDytjGEZuv_prop_s", - "name_zyWCVstnSnLz_prop_s", - "name_anncXfqvveOWy_prop_s", - "name_TbvIhvzhkLAXm_prop_s", - "name_tBWzDGmZocLjPRFMIF_prop_s", - "name_JgCrqPcPNiVdrRRbf_prop_s", - "name_FBtKmopbwHOPPoMjDRA_prop_s", - "name_BOEyOhYKOUSQFQPxwDL_prop_s", - "name_uVosPVYbIF_prop_s", - "name_eQOiKlnUNZ_prop_s", - "name_lYYQBjpaIjMXYRH_prop_s", - "name_FyFvEcZfRrnx_prop_s", - "name_rNiSOAGXkMPBY_prop_s", - "name_tylcSBADvLvAKkzv_prop_s", - "name_KvoxbuKdiqLGUo_prop_s", - "name_FDZfmbIjXBiKoeWImxj_prop_s", - "name_NULbsIjjyysWdXGAxy_prop_s", - "name_RVtYeHUXaxVSBJUCX_prop_s", - "name_jlNFgVZgDAFKqHxR_prop_s", - "name_uIhSJwItLLKHa_prop_s", - "name_lEMFtKhGZjrjnLlW_prop_s", - "name_avEoREwfXmm_prop_s", - "name_IiXRqkZmvNAqf_prop_s", - "name_dKzqqsjZzTgxHTpZiA_prop_s", - "name_jilMmwVsaTkUgJ_prop_s", - "name_xYNTFgaEEluQ_prop_s", - "name_WFkNIiGzzfHous_prop_s", - "name_ztXfmQXTTNuXjPSCYC_prop_s", - "name_jyGvFWOSfs_prop_s", - "name_jRpEJIPQzYKLR_prop_s", - "name_FIUqxuPiWpMMTuZ_prop_s", - "name_ttkqBQpFtwHL_prop_s", - "name_bqYmgceeoJZSZbW_prop_s", - "name_ctRkHATHrFlnEKmSRLd_prop_s", - "name_wZorXwBeanELgv_prop_s", - "name_jXiyBDjpCKe_prop_s", - "name_sRvLkwUSBIsrt_prop_s", - "name_yEHNabvaqyAGa_prop_s", - "name_cwmgaKpzluwJOBvphxY_prop_s", - "name_cOXSTpgjzFEjfbJPVM_prop_s", - "name_ikkFRyBgGfWbg_prop_s", - "name_dEKLFEgvjHFo_prop_s", - "name_HJZRtrGjmPlc_prop_s", - "name_hMpazPhQVkTUE_prop_s", - "name_VKnOJLBqMVzkxD_prop_s", - "name_zKPBHVcuULlMTRy_prop_s", - "name_LzbMOhdcPnvcF_prop_s", - "name_euHYSgnsustyR_prop_s", - "name_IvuYSeiYicgpmboJW_prop_s", - "name_yGrlGoiNHNIt_prop_s", - "name_tpDceZWQvat_prop_s", - "name_iaDXoHUSwG_prop_s", - "name_fJXmNNxUHggajGl_prop_s", - "name_qdzxqokVXHjNBORhW_prop_s", - "name_DxoLvhVEbDcXb_prop_s", - "name_bFHhHakPJd_prop_s", - "name_hVrFxShinIeN_prop_s", - "name_XKPhskHDDg_prop_s", - "name_JjbLlVDrWA_prop_s", - "name_xOJcUebWcopYLGKGYhH_prop_s", - "name_VJTvLToaSyFUm_prop_s", - "name_civISGYkrfwD_prop_s", - "name_kSPizRJqJZ_prop_s", - "name_gmmUBdiHNFVBzpqukdi_prop_s", - "name_jSGXVJsJPmESYy_prop_s", - "name_AbyytYHuJyn_prop_s", - "name_YGNtCMfmLqE_prop_s", - "name_siCxrMEiFjwoEqfcc_prop_s", - "name_yWlyMAenZiTylpYzW_prop_s", - "name_XWOZYkmhzHmOF_prop_s", - "name_FlCjaUETSllVHEwmoR_prop_s", - "name_ZaXOAZXrKGXs_prop_s", - "name_wveujGHeUQ_prop_s", - "name_KhSPQFkCmHuScj_prop_s", - "name_cBXYezKthhDfoVOnIo_prop_s", - "name_rOVAKNTsPprlUDDlCa_prop_s", - "name_fgWaLCfjuDnbH_prop_s", - "name_ekxAMazIGJgLCCMox_prop_s", - "name_iCbNCfPSYKZ_prop_s", - "name_rULXErnmZoIMARdsEL_prop_s", - "name_MjtGLUmEVFFRKydbJ_prop_s", - "name_DzLQfXBPWppyPjj_prop_s", - "name_xxNOkzscmZ_prop_s", - "name_VAiCBAZUeEnA_prop_s", - "name_ftdPuTtNtpLoRmtqQB_prop_s", - "name_ebNmBmAGnjhDwEMkWN_prop_s", - "name_eZVGYMBDaN_prop_s", - "name_hxykcxgsIAfxfupix_prop_s", - "name_XEDImtbSKXAeLyEop_prop_s", - "name_yOxGFWeePpUIc_prop_s", - "name_RzqLTLciLlaundr_prop_s", - "name_UtCQadSTlNF_prop_s", - "name_ORSaWMOVQhZZWxkv_prop_s", - "name_qCgQYTeGGSJf_prop_s", - "name_AlIZOvRFcZPbZwU_prop_s", - "name_vdqdlYetlciyb_prop_s", - "name_dmJIAXeXYjJhwacpkLZ_prop_s", - "name_mCOjAATZrgxJ_prop_s", - "name_RJsQfzfqbZGXp_prop_s", - "name_XMiImCbTVJAoKSfEo_prop_s", - "name_kDCCVcALrCx_prop_s", - "name_VmkGYGugHqaA_prop_s", - "name_jvZilzavGvyq_prop_s", - "name_CCDtRrXmOTmc_prop_s", - "name_UGbllGSvifotji_prop_s", - "name_JOfVgyuwzbIriJg_prop_s", - "name_cJCGLUbaZcrJXGCcZyE_prop_s", - "name_yKXkqdoNhbkSPSBUv_prop_s", - "name_QSrzBIUBQVUrdzM_prop_s", - "name_ulgjGcvaqh_prop_s", - "name_JaQtXbimGQW_prop_s", - "name_xYrQHDXvVbzq_prop_s", - "name_wSxZHthLVwKjuBWR_prop_s", - "name_mEefJyzMBqdSbQ_prop_s", - "name_GGJivsaoxiirx_prop_s", - "name_CACALOHPQCrf_prop_s", - "name_GBrQDvusDOWuvClhYa_prop_s", - "name_vqZfUUBIkd_prop_s", - "name_mXGYvfrccKBFymNB_prop_s", - "name_wZhiLMSbHcweTy_prop_s", - "name_fFPlXgVZKVHosY_prop_s", - "name_wAFjlOGjIQJBOBgsg_prop_s", - "name_diTnXDoUYaBiVnc_prop_s", - "name_DyufnSBeLVPwDSPBi_prop_s", - "name_TlGahXVDZeZdT_prop_s", - "name_jDnUlzuoxtWKe_prop_s", - "name_MCnYrKrvAa_prop_s", - "name_HZtNEzgsgQpgPULw_prop_s", - "name_sZZJIdHfiEnPvbgdoK_prop_s", - "name_aehzQLgzPf_prop_s", - "name_uOhtALYSlV_prop_s", - "name_tLmeLHpBwzP_prop_s", - "name_tMDTGUzelwUQrqPaN_prop_s", - "name_wMUhdfLFRaXOOLeKL_prop_s", - "name_XqpDZerjeDqrzzhsw_prop_s", - "name_zrmxpkEbOGPIEzqM_prop_s", - "name_dEOcHvQShe_prop_s", - "name_QbZyQMReoJJG_prop_s", - "name_gsYPhgkPfTOiPDEAVD_prop_s", - "name_rbNswYkvBvqmA_prop_s", - "name_LNmSBXRpRnvKFpqbT_prop_s", - "name_rHuaoaqVkaAz_prop_s", - "name_zjeWWuFoacJuuxETiD_prop_s", - "name_BhAKoPqFSVlA_prop_s", - "name_JjjcumppysyXsTldO_prop_s", - "name_bMjIQaLeLZ_prop_s", - "name_ujEVEGDoYpXmg_prop_s", - "name_xeFvZrHvmONeM_prop_s", - "name_vgNWlNzSOGo_prop_s", - "name_AvdOuoFmghMsCklVua_prop_s", - "name_KWDQpWtFvwxJWNNj_prop_s", - "name_llVSmiZlLiNdippBgzm_prop_s", - "name_BLmEHGblGXQHbywh_prop_s", - "name_rfoqACJQVUHBuJZDjdx_prop_s", - "name_kDQGbSJbyD_prop_s", - "name_PLSTfOtIQx_prop_s", - "name_NiIIwJLdfGlAcwzfT_prop_s", - "name_kPGazwKspZmewuiaZVB_prop_s", - "name_cuhoZBMdAMi_prop_s", - "name_XbZqGnehJGT_prop_s", - "name_yfCAGBvZufEM_prop_s", - "name_GHuXtHZtwTY_prop_s", - "name_jFtdTtkJbvHrsgGQ_prop_s", - "name_iBcSefLjrrEyHOfqpx_prop_s", - "name_GBJRIYHEbuwJmzLdxtm_prop_s", - "name_VPiXQrwycQPT_prop_s", - "name_XJstMKshDibmiHoZMd_prop_s", - "name_wiGBycapxeIXtTrvW_prop_s", - "name_rJPHaUEbgraQ_prop_s", - "name_rGxylqGVHinLjO_prop_s", - "name_GXeVgdEWBmv_prop_s", - "name_HnYKYhHxZlpGIwdIVQ_prop_s", - "name_FIOSdBvncmSeMiH_prop_s", - "name_FCOLTVOghkVRBXhh_prop_s", - "name_iZknWYaTKn_prop_s", - "name_bQhwLkthwP_prop_s", - "name_GJKLUOxgFtxMdbpeN_prop_s", - "name_uCUdhLIXQKheDpQMB_prop_s", - "name_knArOLgcybDsJsor_prop_s", - "name_vgoNwqvzshUKeOPUSYk_prop_s", - "name_YzIaNlWjqBqwoJcA_prop_s", - "name_hDYFmiHwhPCL_prop_s", - "name_fEAcVIqAfAIXehyOoGU_prop_s", - "name_KwUSxCHFWiXOTqk_prop_s", - "name_KRUSuEYGaQgWJmnGm_prop_s", - "name_PpWwLjvaGoR_prop_s", - "name_skVILQlxWYQowRGw_prop_s", - "name_bcbBLimvTIGQp_prop_s", - "name_vYQrLudbiua_prop_s", - "name_nuDloTTlKFpeoV_prop_s", - "name_RhbixfcpVSMOPfK_prop_s", - "name_fRRDlXHyOAGhwJ_prop_s", - "name_PGTPucoCVbz_prop_s", - "name_TTOIQLLAUIMUqE_prop_s", - "name_kXJQwDYAdc_prop_s", - "name_VlYMFsIAfv_prop_s", - "name_OThsmraSBTydoPfu_prop_s", - "name_WhEccUbWgvObJoS_prop_s", - "name_bxJtNPHBleHNhfat_prop_s", - "name_aLJcfxHporPCXBiF_prop_s", - "name_BbBwSzFKovNubMsv_prop_s", - "name_ZoaCLmepYLkTCLddGPn_prop_s", - "name_jYflHPNvrnzB_prop_s", - "name_SGqftBmurcbCEMn_prop_s", - "name_PqiMioFAtKOjkan_prop_s", - "name_ZeazKbMtVMB_prop_s", - "name_sgQyAUHsEg_prop_s", - "name_EAIUmQCWbiQbZI_prop_s", - "name_FNcVUavfHz_prop_s", - "name_ViUmtAvjlwKCeFb_prop_s", - "name_FYjubApKwXxQnNUIxB_prop_s", - "name_WLPEmGTQAisfXsq_prop_s", - "name_CyrnsHyuyFBx_prop_s", - "name_zMGfDpWzqfZMAF_prop_s", - "name_NILxzDPIbmoxOwQtuQP_prop_s", - "name_JJCEpGqGVjJa_prop_s", - "name_CtTFvRpyzKguMdZ_prop_s", - "name_qiGhKGSMzMMp_prop_s", - "name_QLUJBWXryHb_prop_s", - "name_sMJePABydcVoQk_prop_s", - "name_tfpbMNRLaXuyLuexLGy_prop_s", - "name_rYoMoMLacxWlS_prop_s", - "name_vWDCkyzmEi_prop_s", - "name_RkKjeQtYycWC_prop_s", - "name_xfDfirUchdkxKIDJOt_prop_s", - "name_mEWCBmdvyhON_prop_s", - "name_uLtsxsjXOGQZkCChL_prop_s", - "name_UYjWVNCvGE_prop_s", - "name_JJxhmSNcmsN_prop_s", - "name_fYqlzMmhQdoecsvx_prop_s", - "name_MxXoomSYegfmoEy_prop_s", - "name_hKITNVMXrrjaeFpwfh_prop_s", - "name_bhTKjWsdWDdonwi_prop_s", - "name_XWjLvIfzoorQRqBmo_prop_s", - "name_UqLAinOoswSeBVh_prop_s", - "name_mQzjXAidhWpqqG_prop_s", - "name_ytxaqwLBrvJYolqi_prop_s", - "name_daTgAYVVJQsmO_prop_s", - "name_xCmENbUDoiZ_prop_s", - "name_eZTpxzkHHLjKUGuV_prop_s", - "name_XdJsjHWRNMnQeC_prop_s", - "name_tTSOfpdJTOsZkcTH_prop_s", - "name_ridXaoCaPNoyFx_prop_s", - "name_HVIFmePdpnAcvjba_prop_s", - "name_osQVkiJtkHiBVP_prop_s", - "name_ikTrXQFmMpAw_prop_s", - "name_CtPYdlsrBtsuRkU_prop_s", - "name_BbnERLXULZsX_prop_s", - "name_FUGsEWgJtiLxWUEadSE_prop_s", - "name_babUPIRWxOJTyQqt_prop_s", - "name_zqaORMkAJlhSf_prop_s", - "name_CeRKgIekQl_prop_s", - "name_sHuCaTJIqfPYqpDILZe_prop_s", - "name_wMsJtSzGDCJ_prop_s", - "name_NprXcFInRsRGK_prop_s", - "name_kruVqZBPAizaB_prop_s", - "name_OJaYkRoxWwARAGa_prop_s", - "name_fQeYEMbbBnnmbwS_prop_s", - "name_jHwrTEPSNe_prop_s", - "name_tGtgZLRbdYQHqFyI_prop_s", - "name_bYUODaraQABQMuiVwa_prop_s", - "name_LsdkDDyTgtLnQv_prop_s", - "name_WmBvbHCQqNznHXDM_prop_s", - "name_yCpJZfnNvJt_prop_s", - "name_nxEaZdhiNOaCgHXu_prop_s", - "name_YlsRbOaHrwrjw_prop_s", - "name_wEzIAxJlGY_prop_s", - "name_wgtEQdJDFUZMRCtKuvN_prop_s", - "name_NlrMACPMAY_prop_s", - "name_lyJIPhQYMXgUIOe_prop_s", - "name_XDMUiHILIfVcRVS_prop_s", - "name_CReyJWfRLOR_prop_s", - "name_AySGHgndHRfNrHYs_prop_s", - "name_vMKLAoTfxxBNIVC_prop_s", - "name_UiEpdEsyrJWBVZN_prop_s", - "name_ZDESHNBkigMNhIdqjqB_prop_s", - "name_MeDLRbvcZrLgrXD_prop_s", - "name_wtkpdHkreDpFK_prop_s", - "name_fdKDEadJGWkIhpT_prop_s", - "name_ozeAMJPgTwwzrTmu_prop_s", - "name_CNivtYVLtjVlr_prop_s", - "name_yglTIePAOb_prop_s", - "name_UTRKTVkvhpJKEE_prop_s", - "name_OmHylNTQXDRUKEC_prop_s", - "name_JiZnnChtUFMUrGi_prop_s", - "name_WoCxWZkHoaQu_prop_s", - "name_AnNVbPPNuzjqFnL_prop_s", - "name_kLXLsnBnOoySgS_prop_s", - "name_UhbzdIMuOFGDaNiXEv_prop_s", - "name_eWOWltJaJILIzCH_prop_s", - "name_AIMKZYfLAHIs_prop_s", - "name_pDzYoeEDPjsvqJ_prop_s", - "name_eOACrLtTfxoyRlU_prop_s", - "name_WauBOgBeapqDugJyyp_prop_s", - "name_uwzXxeCxlcsKrNwpPkm_prop_s", - "name_zZYjhAOxmRWjICXyd_prop_s", - "name_jyeCWKaQnlrYHkzwSH_prop_s", - "name_SesSMUttyVjUJaGKX_prop_s", - "name_HBOChmtthCl_prop_s", - "name_CxlLbdpOOfXwL_prop_s", - "name_MiFBPgcnSSYFJdyju_prop_s", - "name_rKEAVEpJXKWbRYM_prop_s", - "name_xLQKEwIRCsGTqWzRf_prop_s" - }); + private static final List FIELD_NAMES = + Arrays.asList( + new String[] { + "name_DfsqCIYgwMpJnc_prop_s", + "name_VHzHTZWnqGALJJ_prop_s", + "name_OyKmIqynBbK_prop_s", + "name_JofvOXUMYQs_prop_s", + "name_SaAfmgHXbCIUethh_prop_s", + "name_CMajAPNHivraqKBmYxH_prop_s", + "name_OpJFcSZHuOFVKs_prop_s", + "name_fTaolBrXTGpJ_prop_s", + "name_hlgpuaRTRmYjBNmzHBI_prop_s", + "name_DGSzgfeiMouuTgbaklJ_prop_s", + "name_hTAZuAysueB_prop_s", + "name_VqztpEqzBCXEhVM_prop_s", + "name_CaJSsxLqxhq_prop_s", + "name_JjEYNobdJiyAJ_prop_s", + "name_GGpLbFvxdFyBH_prop_s", + "name_NIfhcAmufHRwaGNuO_prop_s", + "name_wRzKYNtwiUapyzjQh_prop_s", + "name_UonaDljKBYUMgMV_prop_s", + "name_sByosZWJLlrrFYVXaT_prop_s", + "name_HKHToAtQQkPMwNyGr_prop_s", + "name_HJBQHPKbxHvPGp_prop_s", + "name_UtERukPiRHzqv_prop_s", + "name_WIevbvmoKJkcr_prop_s", + "name_YjoCtbikMRaY_prop_s", + "name_OwuVrwcxslmiWMylkuH_prop_s", + "name_eEoZobamQfJLad_prop_s", + "name_IWkfNtxsTRbuPIT_prop_s", + "name_rZphZcqVQN_prop_s", + "name_QbePjDfrPkiUySUfSS_prop_s", + "name_ABCPaNPQXBwVJh_prop_s", + "name_OitLZpkeOXrOAeITlAc_prop_s", + "name_GlGQselWNwuHUSPy_prop_s", + "name_XDNBBpHaxD_prop_s", + "name_NkSQtvNhCwgPxnuRGGK_prop_s", + "name_mkYuyjFfWjEb_prop_s", + "name_JUOzeuNelNHbQVlj_prop_s", + "name_CuzbqxBlEJEnBdeJo_prop_s", + "name_GbpIJAqoVP_prop_s", + "name_oPozbuiwFXFoCQ_prop_s", + "name_QPcamTHGpEgYGW_prop_s", + "name_QfgfGrTZZkqIbLq_prop_s", + "name_UtkepJfqAPQQZvDnB_prop_s", + "name_ShipLvibadhd_prop_s", + "name_wAdEXOEAydT_prop_s", + "name_YiquTYZxxNsxanQ_prop_s", + "name_hJfuWEBCYIdtcixldUy_prop_s", + "name_PzYofpLhvtw_prop_s", + "name_rhkJFHishBuS_prop_s", + "name_GNUoUCaqqfGErM_prop_s", + "name_hSrbCrBUEs_prop_s", + "name_xJANZEGtTrIXMDLBgL_prop_s", + "name_pOhSitCAKl_prop_s", + "name_PkBHXUceEgVP_prop_s", + "name_fvDrPKkegWr_prop_s", + "name_HVzmAutUrUoicr_prop_s", + "name_ouFhihsihDk_prop_s", + "name_eeFcnImKkXiKXDTIPC_prop_s", + "name_NMEsrYgSBoIEwp_prop_s", + "name_yqCQGPzCamFqBwLZiiC_prop_s", + "name_JlHlxPykBl_prop_s", + "name_lYGskGWJfNhnd_prop_s", + "name_ifXTlDnYqUmjFNhKOxq_prop_s", + "name_uaCtJcjZWu_prop_s", + "name_LzSXDKQdhQ_prop_s", + "name_TpvZetClsYcJenPCdW_prop_s", + "name_NPsQNyfkDCgNus_prop_s", + "name_zMZnwFtVnbdlGncBEf_prop_s", + "name_dGDCXTxABxh_prop_s", + "name_JIOxBoRhiZLD_prop_s", + "name_smVTZaCZZMiSmYq_prop_s", + "name_VgCZTMfOHpfAlGUjDxT_prop_s", + "name_HhtLeCOGJMNLMXFBgI_prop_s", + "name_QpzFZXNIpk_prop_s", + "name_obTfzXxBoCXpiGFGWuz_prop_s", + "name_VrBTsQmfJoqNI_prop_s", + "name_QeXnmsrvSYZBtkWwDxs_prop_s", + "name_vtvvKPfpTBBBMuMTZZ_prop_s", + "name_VvPvDbWJXsXIAUSNWgW_prop_s", + "name_BYCAfIaRKVUvHHBIut_prop_s", + "name_srwPMPauluyfyM_prop_s", + "name_YlrFboTEUfq_prop_s", + "name_vIPAkvspnnT_prop_s", + "name_XWVkDyVpkZvo_prop_s", + "name_tJDzyfWZtOrzwvuw_prop_s", + "name_mvfaMcKLduLXcvol_prop_s", + "name_OKvQYLTaCWwGTXDboK_prop_s", + "name_VkMXjFZGUQgNWDbKbgp_prop_s", + "name_IixctxAiJdqQQlPwV_prop_s", + "name_LbOxzyxGVrsKyZgCHKi_prop_s", + "name_YtJheZqyzPhpuAAitN_prop_s", + "name_IsctRhBopyx_prop_s", + "name_xrfxhlkidKabA_prop_s", + "name_MFqGPFbIOrneplmaOK_prop_s", + "name_fXOsAXXtMnLy_prop_s", + "name_ATQmfQzgdOlFPuDp_prop_s", + "name_rFrgtZZDVFGuHjteUX_prop_s", + "name_qcPrtNSRKfBPvtdXWJJ_prop_s", + "name_UpInzgFgMlfOuMuffOa_prop_s", + "name_cmwSPLLLuiv_prop_s", + "name_WDQjhkEHQabWvK_prop_s", + "name_BqSJaaLDBTTVy_prop_s", + "name_nqXaRkhFXV_prop_s", + "name_GJBYZZXOOlyJ_prop_s", + "name_khgXzOmSxxrerikblPC_prop_s", + "name_uFNMtGvQQJljSgk_prop_s", + "name_yoZRduwiqx_prop_s", + "name_GqqWeEyYXEwT_prop_s", + "name_tzhVSqoPKt_prop_s", + "name_ensyGAXGQSuW_prop_s", + "name_LQJmrvSWKQHc_prop_s", + "name_KPpikIjkpciF_prop_s", + "name_mplQAMNcigYEwNEBT_prop_s", + "name_idmsrYlJGoizvsllQsW_prop_s", + "name_rMPMEsrySqUVwcDaUE_prop_s", + "name_febnQEKdThaqhnghZ_prop_s", + "name_XxtOzKGvvSguMNS_prop_s", + "name_VtFlQvelTPyz_prop_s", + "name_PQYUOnhHJsSaqVDH_prop_s", + "name_qQEIWMsRNQAV_prop_s", + "name_rPPHpYLbLoUiLYQ_prop_s", + "name_wZaRlynJFNvWJKjyyuA_prop_s", + "name_sOwZhIRXUlCvaqRn_prop_s", + "name_omkQRxJuYPLTeB_prop_s", + "name_fVJbGrSpMpO_prop_s", + "name_wLYQtojRTtWeQfz_prop_s", + "name_dlQxbbzWoAEDbRPFy_prop_s", + "name_SkYKoVihqWDXnsH_prop_s", + "name_whlpGhuMeZA_prop_s", + "name_iOsqSwnNKSNrjLmkpvo_prop_s", + "name_dWYzrxvJttwv_prop_s", + "name_stOcVzqQedeqagmynaG_prop_s", + "name_NENunrnlQI_prop_s", + "name_HqeTpJDHOsfpawjehIq_prop_s", + "name_RPwyjltiltvDOqpsYi_prop_s", + "name_znVAkUDVYWMIoLr_prop_s", + "name_jTzTSvTRyguN_prop_s", + "name_ySeOANIBnMabQvaru_prop_s", + "name_SadaPaYJaxkwHkRMuE_prop_s", + "name_JQVolDkiGeuvA_prop_s", + "name_NtxjSaBccGJWoK_prop_s", + "name_WvwitdcFXPUQny_prop_s", + "name_JQGUUVnzyMCJs_prop_s", + "name_GqDdyBcHznboeW_prop_s", + "name_RlRSvAFEykA_prop_s", + "name_TvNERqviFBnOCtemES_prop_s", + "name_DUlAWwbaslagWbIImdd_prop_s", + "name_gWILZCZRlbjBoQdrP_prop_s", + "name_ftNrYHWFvhuGYHuJt_prop_s", + "name_QYBKgeSLCQeRUX_prop_s", + "name_PYUIqToJNDgWASGFr_prop_s", + "name_zBZIhwwifmRTOXe_prop_s", + "name_hnPUucMPfhUuJoO_prop_s", + "name_agZLOYIoOWl_prop_s", + "name_SEgmWAjhjJ_prop_s", + "name_pUclNPUSiDZtMg_prop_s", + "name_LjIrSIDJqoqL_prop_s", + "name_vjHbgxEULpsQiZlUaM_prop_s", + "name_eymEZtHNKYjWFEUlbR_prop_s", + "name_tQmOnPEwkIMJlzPRG_prop_s", + "name_ogsTpGUlFLOvLzl_prop_s", + "name_jJfXDLSaOuHI_prop_s", + "name_tBfQFKUYmaAeR_prop_s", + "name_rzFgVahQrXezOIMy_prop_s", + "name_qdjFkPulsPpMLXVPp_prop_s", + "name_rWetgUNXaoxXIfbPDz_prop_s", + "name_OrSAGeTkkrRUygOLG_prop_s", + "name_LoeOnHUogQnvFHbvXCQ_prop_s", + "name_wCbfoExoqlldz_prop_s", + "name_mAyvGeccKbSpO_prop_s", + "name_LAlRNXNqtwdF_prop_s", + "name_CzQuGtKdZviLIh_prop_s", + "name_pkfyyloJeQLCiclF_prop_s", + "name_BBabvpGlueqCqEAJq_prop_s", + "name_yMyCCNWJarW_prop_s", + "name_rXyBgzPnWqnU_prop_s", + "name_yjTcYotQfUVXVp_prop_s", + "name_iQulShIGGjlJuGtkOk_prop_s", + "name_EAMjjKBtOri_prop_s", + "name_cKKMdfEVvOY_prop_s", + "name_HCgMMUWJhAPUcSYEw_prop_s", + "name_QxAiEPSPFcGdpbsAN_prop_s", + "name_uRFDixdPAlsNiZ_prop_s", + "name_ctffdxcrVBN_prop_s", + "name_mdXIbwncmwHgDmfsiAM_prop_s", + "name_gKlSaxAfDdYgt_prop_s", + "name_juaOrDYjSfvcmkd_prop_s", + "name_YadqjaxLPXUpJCIMdNm_prop_s", + "name_jlNcOgAYUBoj_prop_s", + "name_AKNbQWFRzzYbAhOlqAI_prop_s", + "name_JAzAPnrljRhqbNfdoh_prop_s", + "name_kXYgLRfqrYiQxRo_prop_s", + "name_AfZylgVaZgvaIQgR_prop_s", + "name_XaOBvJEVEw_prop_s", + "name_hDwJONxcscyJuzYRH_prop_s", + "name_SvogOicRPq_prop_s", + "name_RIsXETbdCtBuL_prop_s", + "name_jOxeorqpGcdkp_prop_s", + "name_IBzKXorZDdowJujJkC_prop_s", + "name_kWfsavjmSEIyGxeoz_prop_s", + "name_DhaoVQSvJZfy_prop_s", + "name_dWSNRommSreSW_prop_s", + "name_LWqiEKFOMPVklmFwyoX_prop_s", + "name_GVazWdylOnyamFiz_prop_s", + "name_CcgGFeiwORNbAw_prop_s", + "name_mVgxCpHfjhofqaOA_prop_s", + "name_nzuAietKmfmjnXalz_prop_s", + "name_YzYAGdOoaxwgSh_prop_s", + "name_jLMshjzscpgU_prop_s", + "name_JaLKPfULNIeWysimJf_prop_s", + "name_KehwwGmAULqXtNCrwhX_prop_s", + "name_mxpwZrDktTLXUzkdKa_prop_s", + "name_bPmedbyCSSjDC_prop_s", + "name_LYbCFtmQiC_prop_s", + "name_cLrVLzwMcMnAT_prop_s", + "name_HeOUpecBxVvHEERlPUk_prop_s", + "name_jCVSgiNewmDB_prop_s", + "name_jOLtAVRUFrs_prop_s", + "name_gfWTsWEVeVSXwGMgUT_prop_s", + "name_BPbiEWmyizADxNIV_prop_s", + "name_VYSwGIOIarPmGWVKenS_prop_s", + "name_QpAHpTcxrzVYYfWYT_prop_s", + "name_tOBVdVTRBMmCXfnxrNa_prop_s", + "name_iCaKxgfTXuvgCT_prop_s", + "name_kdPWzVZHslaijNrKbKU_prop_s", + "name_wmJmiiWghggUHmNiQAg_prop_s", + "name_ZpzaQMGuMfOjw_prop_s", + "name_cgOqMOeYMHJ_prop_s", + "name_EnguvcJhre_prop_s", + "name_edeevGMabTDek_prop_s", + "name_vmJEHidWgTTUvioGhi_prop_s", + "name_CHYfwnIHxQzPwEFJ_prop_s", + "name_KXpUaenwfjlj_prop_s", + "name_eVGHumUijQhFvzGjaV_prop_s", + "name_XorPzBArSbSTCHpz_prop_s", + "name_RRLESavujqcxblljkn_prop_s", + "name_YftgbzYxUNUCMXt_prop_s", + "name_IqEDQHVFGIyQSS_prop_s", + "name_XbStVPkHwGYmQB_prop_s", + "name_JyWCZhERjLOtqw_prop_s", + "name_dDiuFMzjhrJGyqqud_prop_s", + "name_uCCDpPtxkdQNDq_prop_s", + "name_ohZQKMOVeb_prop_s", + "name_gBTzxrPwsX_prop_s", + "name_RLkUwPFSVjqB_prop_s", + "name_CXlPWeBunQDGtBXqo_prop_s", + "name_kGvCPheDzjir_prop_s", + "name_cvcOAZkaTZsTyrrWxvQ_prop_s", + "name_sftNHXiElgbUQxtYDI_prop_s", + "name_aqGEmvTBCqdFKyfa_prop_s", + "name_myCtzywMPzQyJhHwsEy_prop_s", + "name_TmvkTzpWLtPEDUfmg_prop_s", + "name_XnYvWLQJdcjdOBmfJ_prop_s", + "name_toYeyORNQWA_prop_s", + "name_hcWpqATuIiUbyfiHPaJ_prop_s", + "name_EAelPZjFpiThB_prop_s", + "name_aEfokIQMbKI_prop_s", + "name_YMbTCeRRipELjF_prop_s", + "name_yIbPmIvnUNFsKaEk_prop_s", + "name_PVsusfJldMrTq_prop_s", + "name_BhGnYInbCoBcRxbkh_prop_s", + "name_LvywGWGeDmCnwYM_prop_s", + "name_bJwdGFMfTyRhI_prop_s", + "name_durkyUrNKHx_prop_s", + "name_RdeZaAlmttQzNDZCb_prop_s", + "name_VdzHkraZKezBjY_prop_s", + "name_rAhOeyHbDuW_prop_s", + "name_SNzylGssYOA_prop_s", + "name_vHqZyqgwfD_prop_s", + "name_DPnKKQlfkn_prop_s", + "name_PQFtvTrPezVRLL_prop_s", + "name_YkOCraZfkuCyx_prop_s", + "name_glGgplQXQzqaHbT_prop_s", + "name_OqpvyNHqeQUANE_prop_s", + "name_EYRKsQekVHcYlWf_prop_s", + "name_RFuZbCWIOu_prop_s", + "name_ekHWLiTVyNjYdl_prop_s", + "name_vezpACcbFw_prop_s", + "name_oQQXcPzeODviDC_prop_s", + "name_wZkyzXscqPGWiEzwR_prop_s", + "name_eYywOQdxMbAwHNC_prop_s", + "name_gvEXKFXEAQMaYm_prop_s", + "name_vofoikKFpZsOZfY_prop_s", + "name_aXNocadbQQO_prop_s", + "name_pzzPuuliByDjLm_prop_s", + "name_dIOSQFOVldP_prop_s", + "name_sbplpizxCQWndsBpoU_prop_s", + "name_uogQaerZVBnV_prop_s", + "name_WsDhwfdJivmMKO_prop_s", + "name_RjJjIrPGWGFgCbT_prop_s", + "name_sKymsAbmFqwyzKRSH_prop_s", + "name_wIHDafXfvOunVi_prop_s", + "name_pWEWMRdqgvuGdqwztct_prop_s", + "name_aFDHZXHKgnVo_prop_s", + "name_dAdcQYTvmRZ_prop_s", + "name_zQsaOcogPYNqypDPYjS_prop_s", + "name_KOtJNECCHjLxKZqHZ_prop_s", + "name_wfdxykXSBRcrfUv_prop_s", + "name_kGJgFephxkeH_prop_s", + "name_peispafiMLgmE_prop_s", + "name_CJTnCuCsOSCvj_prop_s", + "name_xpOyokirtcJoFPKyH_prop_s", + "name_nhmhQePxBvNT_prop_s", + "name_vPxdJTwHkzDdvaK_prop_s", + "name_dAGyfZWSkTaCCt_prop_s", + "name_CYaZJGFolJqNhmKgsV_prop_s", + "name_vboqCHtthOPMRHU_prop_s", + "name_fqrgYweKbBNzlYJk_prop_s", + "name_SwOSQemwasu_prop_s", + "name_dRDJlPUxSgvIS_prop_s", + "name_DYjfbnkMhnMyL_prop_s", + "name_REAirSXdUlsq_prop_s", + "name_aPLpQwhWGCcjk_prop_s", + "name_LWlbDafEriuRGmJYW_prop_s", + "name_bTFLYGqAHYvnpFvzd_prop_s", + "name_emIonaQRdfsjmVCjUn_prop_s", + "name_RdMOfMWlqKmKuxYawG_prop_s", + "name_NmvxkGBDyJ_prop_s", + "name_veeKFlgaBqTXINdlbi_prop_s", + "name_JEMSCgBWKwpd_prop_s", + "name_RKsEwiClkYAENVkO_prop_s", + "name_QSfmaqphip_prop_s", + "name_DhcOPbKnWrv_prop_s", + "name_AhEfQCMTWtrdjBV_prop_s", + "name_EtAMDtJVTd_prop_s", + "name_qVxNUttsduupj_prop_s", + "name_BeFWYHBfnSNqVEPz_prop_s", + "name_wPevXszAQZWZwe_prop_s", + "name_oJYcnxrshAkjJYnXyn_prop_s", + "name_nffSJxMrhrIlQw_prop_s", + "name_ZrHpSfuzHHIin_prop_s", + "name_rdMnHMmgEQaGLmXRPiD_prop_s", + "name_huldbnqnXwop_prop_s", + "name_jduhQpDoYv_prop_s", + "name_NBOKEducirzNsSSy_prop_s", + "name_xJzWfJrMIY_prop_s", + "name_VMZbxqOHwfQDaGT_prop_s", + "name_syUXJprVoLTZYebB_prop_s", + "name_prZEUbNoTysB_prop_s", + "name_RfvJRIoQeGSu_prop_s", + "name_BBshBWkaLopZ_prop_s", + "name_YOAVKRdkRspIVaLva_prop_s", + "name_RrTctdPJnMoMw_prop_s", + "name_TEoYvqSeBmaUHflB_prop_s", + "name_IwsxROIVgJ_prop_s", + "name_ktQwKjuCLYAmOnyj_prop_s", + "name_MZrmJkYFkHsU_prop_s", + "name_bdagQHBFmoIo_prop_s", + "name_zmoxFeHMBwkyEO_prop_s", + "name_wenNdlQvlHItqflx_prop_s", + "name_XprqFpXiYoHzEfd_prop_s", + "name_ogZQmtfQOfvP_prop_s", + "name_QOsBJGNDUzbHWHrQ_prop_s", + "name_jfrfWIuCWSFXQumtm_prop_s", + "name_VFWIKhommZaTVuzphSb_prop_s", + "name_RVhGwEvGjdOnzR_prop_s", + "name_FQlxoQLZIkZCyfiVx_prop_s", + "name_MPbQJjgBGMUR_prop_s", + "name_SbbTGVASSkYHiNwV_prop_s", + "name_MntYiMNrHQ_prop_s", + "name_yjcZRVwITRLXb_prop_s", + "name_aSKYqqhexuo_prop_s", + "name_TfzoLKDlIhDun_prop_s", + "name_KeKTrXfMFglbN_prop_s", + "name_iIdfUsKoIlf_prop_s", + "name_FPQqtNlVCLSgwgNhf_prop_s", + "name_PkYUzUADmq_prop_s", + "name_nXAJwIhWfESKdZ_prop_s", + "name_faXLvuLCiq_prop_s", + "name_zarHYCyYIr_prop_s", + "name_sowzONSDytjGEZuv_prop_s", + "name_zyWCVstnSnLz_prop_s", + "name_anncXfqvveOWy_prop_s", + "name_TbvIhvzhkLAXm_prop_s", + "name_tBWzDGmZocLjPRFMIF_prop_s", + "name_JgCrqPcPNiVdrRRbf_prop_s", + "name_FBtKmopbwHOPPoMjDRA_prop_s", + "name_BOEyOhYKOUSQFQPxwDL_prop_s", + "name_uVosPVYbIF_prop_s", + "name_eQOiKlnUNZ_prop_s", + "name_lYYQBjpaIjMXYRH_prop_s", + "name_FyFvEcZfRrnx_prop_s", + "name_rNiSOAGXkMPBY_prop_s", + "name_tylcSBADvLvAKkzv_prop_s", + "name_KvoxbuKdiqLGUo_prop_s", + "name_FDZfmbIjXBiKoeWImxj_prop_s", + "name_NULbsIjjyysWdXGAxy_prop_s", + "name_RVtYeHUXaxVSBJUCX_prop_s", + "name_jlNFgVZgDAFKqHxR_prop_s", + "name_uIhSJwItLLKHa_prop_s", + "name_lEMFtKhGZjrjnLlW_prop_s", + "name_avEoREwfXmm_prop_s", + "name_IiXRqkZmvNAqf_prop_s", + "name_dKzqqsjZzTgxHTpZiA_prop_s", + "name_jilMmwVsaTkUgJ_prop_s", + "name_xYNTFgaEEluQ_prop_s", + "name_WFkNIiGzzfHous_prop_s", + "name_ztXfmQXTTNuXjPSCYC_prop_s", + "name_jyGvFWOSfs_prop_s", + "name_jRpEJIPQzYKLR_prop_s", + "name_FIUqxuPiWpMMTuZ_prop_s", + "name_ttkqBQpFtwHL_prop_s", + "name_bqYmgceeoJZSZbW_prop_s", + "name_ctRkHATHrFlnEKmSRLd_prop_s", + "name_wZorXwBeanELgv_prop_s", + "name_jXiyBDjpCKe_prop_s", + "name_sRvLkwUSBIsrt_prop_s", + "name_yEHNabvaqyAGa_prop_s", + "name_cwmgaKpzluwJOBvphxY_prop_s", + "name_cOXSTpgjzFEjfbJPVM_prop_s", + "name_ikkFRyBgGfWbg_prop_s", + "name_dEKLFEgvjHFo_prop_s", + "name_HJZRtrGjmPlc_prop_s", + "name_hMpazPhQVkTUE_prop_s", + "name_VKnOJLBqMVzkxD_prop_s", + "name_zKPBHVcuULlMTRy_prop_s", + "name_LzbMOhdcPnvcF_prop_s", + "name_euHYSgnsustyR_prop_s", + "name_IvuYSeiYicgpmboJW_prop_s", + "name_yGrlGoiNHNIt_prop_s", + "name_tpDceZWQvat_prop_s", + "name_iaDXoHUSwG_prop_s", + "name_fJXmNNxUHggajGl_prop_s", + "name_qdzxqokVXHjNBORhW_prop_s", + "name_DxoLvhVEbDcXb_prop_s", + "name_bFHhHakPJd_prop_s", + "name_hVrFxShinIeN_prop_s", + "name_XKPhskHDDg_prop_s", + "name_JjbLlVDrWA_prop_s", + "name_xOJcUebWcopYLGKGYhH_prop_s", + "name_VJTvLToaSyFUm_prop_s", + "name_civISGYkrfwD_prop_s", + "name_kSPizRJqJZ_prop_s", + "name_gmmUBdiHNFVBzpqukdi_prop_s", + "name_jSGXVJsJPmESYy_prop_s", + "name_AbyytYHuJyn_prop_s", + "name_YGNtCMfmLqE_prop_s", + "name_siCxrMEiFjwoEqfcc_prop_s", + "name_yWlyMAenZiTylpYzW_prop_s", + "name_XWOZYkmhzHmOF_prop_s", + "name_FlCjaUETSllVHEwmoR_prop_s", + "name_ZaXOAZXrKGXs_prop_s", + "name_wveujGHeUQ_prop_s", + "name_KhSPQFkCmHuScj_prop_s", + "name_cBXYezKthhDfoVOnIo_prop_s", + "name_rOVAKNTsPprlUDDlCa_prop_s", + "name_fgWaLCfjuDnbH_prop_s", + "name_ekxAMazIGJgLCCMox_prop_s", + "name_iCbNCfPSYKZ_prop_s", + "name_rULXErnmZoIMARdsEL_prop_s", + "name_MjtGLUmEVFFRKydbJ_prop_s", + "name_DzLQfXBPWppyPjj_prop_s", + "name_xxNOkzscmZ_prop_s", + "name_VAiCBAZUeEnA_prop_s", + "name_ftdPuTtNtpLoRmtqQB_prop_s", + "name_ebNmBmAGnjhDwEMkWN_prop_s", + "name_eZVGYMBDaN_prop_s", + "name_hxykcxgsIAfxfupix_prop_s", + "name_XEDImtbSKXAeLyEop_prop_s", + "name_yOxGFWeePpUIc_prop_s", + "name_RzqLTLciLlaundr_prop_s", + "name_UtCQadSTlNF_prop_s", + "name_ORSaWMOVQhZZWxkv_prop_s", + "name_qCgQYTeGGSJf_prop_s", + "name_AlIZOvRFcZPbZwU_prop_s", + "name_vdqdlYetlciyb_prop_s", + "name_dmJIAXeXYjJhwacpkLZ_prop_s", + "name_mCOjAATZrgxJ_prop_s", + "name_RJsQfzfqbZGXp_prop_s", + "name_XMiImCbTVJAoKSfEo_prop_s", + "name_kDCCVcALrCx_prop_s", + "name_VmkGYGugHqaA_prop_s", + "name_jvZilzavGvyq_prop_s", + "name_CCDtRrXmOTmc_prop_s", + "name_UGbllGSvifotji_prop_s", + "name_JOfVgyuwzbIriJg_prop_s", + "name_cJCGLUbaZcrJXGCcZyE_prop_s", + "name_yKXkqdoNhbkSPSBUv_prop_s", + "name_QSrzBIUBQVUrdzM_prop_s", + "name_ulgjGcvaqh_prop_s", + "name_JaQtXbimGQW_prop_s", + "name_xYrQHDXvVbzq_prop_s", + "name_wSxZHthLVwKjuBWR_prop_s", + "name_mEefJyzMBqdSbQ_prop_s", + "name_GGJivsaoxiirx_prop_s", + "name_CACALOHPQCrf_prop_s", + "name_GBrQDvusDOWuvClhYa_prop_s", + "name_vqZfUUBIkd_prop_s", + "name_mXGYvfrccKBFymNB_prop_s", + "name_wZhiLMSbHcweTy_prop_s", + "name_fFPlXgVZKVHosY_prop_s", + "name_wAFjlOGjIQJBOBgsg_prop_s", + "name_diTnXDoUYaBiVnc_prop_s", + "name_DyufnSBeLVPwDSPBi_prop_s", + "name_TlGahXVDZeZdT_prop_s", + "name_jDnUlzuoxtWKe_prop_s", + "name_MCnYrKrvAa_prop_s", + "name_HZtNEzgsgQpgPULw_prop_s", + "name_sZZJIdHfiEnPvbgdoK_prop_s", + "name_aehzQLgzPf_prop_s", + "name_uOhtALYSlV_prop_s", + "name_tLmeLHpBwzP_prop_s", + "name_tMDTGUzelwUQrqPaN_prop_s", + "name_wMUhdfLFRaXOOLeKL_prop_s", + "name_XqpDZerjeDqrzzhsw_prop_s", + "name_zrmxpkEbOGPIEzqM_prop_s", + "name_dEOcHvQShe_prop_s", + "name_QbZyQMReoJJG_prop_s", + "name_gsYPhgkPfTOiPDEAVD_prop_s", + "name_rbNswYkvBvqmA_prop_s", + "name_LNmSBXRpRnvKFpqbT_prop_s", + "name_rHuaoaqVkaAz_prop_s", + "name_zjeWWuFoacJuuxETiD_prop_s", + "name_BhAKoPqFSVlA_prop_s", + "name_JjjcumppysyXsTldO_prop_s", + "name_bMjIQaLeLZ_prop_s", + "name_ujEVEGDoYpXmg_prop_s", + "name_xeFvZrHvmONeM_prop_s", + "name_vgNWlNzSOGo_prop_s", + "name_AvdOuoFmghMsCklVua_prop_s", + "name_KWDQpWtFvwxJWNNj_prop_s", + "name_llVSmiZlLiNdippBgzm_prop_s", + "name_BLmEHGblGXQHbywh_prop_s", + "name_rfoqACJQVUHBuJZDjdx_prop_s", + "name_kDQGbSJbyD_prop_s", + "name_PLSTfOtIQx_prop_s", + "name_NiIIwJLdfGlAcwzfT_prop_s", + "name_kPGazwKspZmewuiaZVB_prop_s", + "name_cuhoZBMdAMi_prop_s", + "name_XbZqGnehJGT_prop_s", + "name_yfCAGBvZufEM_prop_s", + "name_GHuXtHZtwTY_prop_s", + "name_jFtdTtkJbvHrsgGQ_prop_s", + "name_iBcSefLjrrEyHOfqpx_prop_s", + "name_GBJRIYHEbuwJmzLdxtm_prop_s", + "name_VPiXQrwycQPT_prop_s", + "name_XJstMKshDibmiHoZMd_prop_s", + "name_wiGBycapxeIXtTrvW_prop_s", + "name_rJPHaUEbgraQ_prop_s", + "name_rGxylqGVHinLjO_prop_s", + "name_GXeVgdEWBmv_prop_s", + "name_HnYKYhHxZlpGIwdIVQ_prop_s", + "name_FIOSdBvncmSeMiH_prop_s", + "name_FCOLTVOghkVRBXhh_prop_s", + "name_iZknWYaTKn_prop_s", + "name_bQhwLkthwP_prop_s", + "name_GJKLUOxgFtxMdbpeN_prop_s", + "name_uCUdhLIXQKheDpQMB_prop_s", + "name_knArOLgcybDsJsor_prop_s", + "name_vgoNwqvzshUKeOPUSYk_prop_s", + "name_YzIaNlWjqBqwoJcA_prop_s", + "name_hDYFmiHwhPCL_prop_s", + "name_fEAcVIqAfAIXehyOoGU_prop_s", + "name_KwUSxCHFWiXOTqk_prop_s", + "name_KRUSuEYGaQgWJmnGm_prop_s", + "name_PpWwLjvaGoR_prop_s", + "name_skVILQlxWYQowRGw_prop_s", + "name_bcbBLimvTIGQp_prop_s", + "name_vYQrLudbiua_prop_s", + "name_nuDloTTlKFpeoV_prop_s", + "name_RhbixfcpVSMOPfK_prop_s", + "name_fRRDlXHyOAGhwJ_prop_s", + "name_PGTPucoCVbz_prop_s", + "name_TTOIQLLAUIMUqE_prop_s", + "name_kXJQwDYAdc_prop_s", + "name_VlYMFsIAfv_prop_s", + "name_OThsmraSBTydoPfu_prop_s", + "name_WhEccUbWgvObJoS_prop_s", + "name_bxJtNPHBleHNhfat_prop_s", + "name_aLJcfxHporPCXBiF_prop_s", + "name_BbBwSzFKovNubMsv_prop_s", + "name_ZoaCLmepYLkTCLddGPn_prop_s", + "name_jYflHPNvrnzB_prop_s", + "name_SGqftBmurcbCEMn_prop_s", + "name_PqiMioFAtKOjkan_prop_s", + "name_ZeazKbMtVMB_prop_s", + "name_sgQyAUHsEg_prop_s", + "name_EAIUmQCWbiQbZI_prop_s", + "name_FNcVUavfHz_prop_s", + "name_ViUmtAvjlwKCeFb_prop_s", + "name_FYjubApKwXxQnNUIxB_prop_s", + "name_WLPEmGTQAisfXsq_prop_s", + "name_CyrnsHyuyFBx_prop_s", + "name_zMGfDpWzqfZMAF_prop_s", + "name_NILxzDPIbmoxOwQtuQP_prop_s", + "name_JJCEpGqGVjJa_prop_s", + "name_CtTFvRpyzKguMdZ_prop_s", + "name_qiGhKGSMzMMp_prop_s", + "name_QLUJBWXryHb_prop_s", + "name_sMJePABydcVoQk_prop_s", + "name_tfpbMNRLaXuyLuexLGy_prop_s", + "name_rYoMoMLacxWlS_prop_s", + "name_vWDCkyzmEi_prop_s", + "name_RkKjeQtYycWC_prop_s", + "name_xfDfirUchdkxKIDJOt_prop_s", + "name_mEWCBmdvyhON_prop_s", + "name_uLtsxsjXOGQZkCChL_prop_s", + "name_UYjWVNCvGE_prop_s", + "name_JJxhmSNcmsN_prop_s", + "name_fYqlzMmhQdoecsvx_prop_s", + "name_MxXoomSYegfmoEy_prop_s", + "name_hKITNVMXrrjaeFpwfh_prop_s", + "name_bhTKjWsdWDdonwi_prop_s", + "name_XWjLvIfzoorQRqBmo_prop_s", + "name_UqLAinOoswSeBVh_prop_s", + "name_mQzjXAidhWpqqG_prop_s", + "name_ytxaqwLBrvJYolqi_prop_s", + "name_daTgAYVVJQsmO_prop_s", + "name_xCmENbUDoiZ_prop_s", + "name_eZTpxzkHHLjKUGuV_prop_s", + "name_XdJsjHWRNMnQeC_prop_s", + "name_tTSOfpdJTOsZkcTH_prop_s", + "name_ridXaoCaPNoyFx_prop_s", + "name_HVIFmePdpnAcvjba_prop_s", + "name_osQVkiJtkHiBVP_prop_s", + "name_ikTrXQFmMpAw_prop_s", + "name_CtPYdlsrBtsuRkU_prop_s", + "name_BbnERLXULZsX_prop_s", + "name_FUGsEWgJtiLxWUEadSE_prop_s", + "name_babUPIRWxOJTyQqt_prop_s", + "name_zqaORMkAJlhSf_prop_s", + "name_CeRKgIekQl_prop_s", + "name_sHuCaTJIqfPYqpDILZe_prop_s", + "name_wMsJtSzGDCJ_prop_s", + "name_NprXcFInRsRGK_prop_s", + "name_kruVqZBPAizaB_prop_s", + "name_OJaYkRoxWwARAGa_prop_s", + "name_fQeYEMbbBnnmbwS_prop_s", + "name_jHwrTEPSNe_prop_s", + "name_tGtgZLRbdYQHqFyI_prop_s", + "name_bYUODaraQABQMuiVwa_prop_s", + "name_LsdkDDyTgtLnQv_prop_s", + "name_WmBvbHCQqNznHXDM_prop_s", + "name_yCpJZfnNvJt_prop_s", + "name_nxEaZdhiNOaCgHXu_prop_s", + "name_YlsRbOaHrwrjw_prop_s", + "name_wEzIAxJlGY_prop_s", + "name_wgtEQdJDFUZMRCtKuvN_prop_s", + "name_NlrMACPMAY_prop_s", + "name_lyJIPhQYMXgUIOe_prop_s", + "name_XDMUiHILIfVcRVS_prop_s", + "name_CReyJWfRLOR_prop_s", + "name_AySGHgndHRfNrHYs_prop_s", + "name_vMKLAoTfxxBNIVC_prop_s", + "name_UiEpdEsyrJWBVZN_prop_s", + "name_ZDESHNBkigMNhIdqjqB_prop_s", + "name_MeDLRbvcZrLgrXD_prop_s", + "name_wtkpdHkreDpFK_prop_s", + "name_fdKDEadJGWkIhpT_prop_s", + "name_ozeAMJPgTwwzrTmu_prop_s", + "name_CNivtYVLtjVlr_prop_s", + "name_yglTIePAOb_prop_s", + "name_UTRKTVkvhpJKEE_prop_s", + "name_OmHylNTQXDRUKEC_prop_s", + "name_JiZnnChtUFMUrGi_prop_s", + "name_WoCxWZkHoaQu_prop_s", + "name_AnNVbPPNuzjqFnL_prop_s", + "name_kLXLsnBnOoySgS_prop_s", + "name_UhbzdIMuOFGDaNiXEv_prop_s", + "name_eWOWltJaJILIzCH_prop_s", + "name_AIMKZYfLAHIs_prop_s", + "name_pDzYoeEDPjsvqJ_prop_s", + "name_eOACrLtTfxoyRlU_prop_s", + "name_WauBOgBeapqDugJyyp_prop_s", + "name_uwzXxeCxlcsKrNwpPkm_prop_s", + "name_zZYjhAOxmRWjICXyd_prop_s", + "name_jyeCWKaQnlrYHkzwSH_prop_s", + "name_SesSMUttyVjUJaGKX_prop_s", + "name_HBOChmtthCl_prop_s", + "name_CxlLbdpOOfXwL_prop_s", + "name_MiFBPgcnSSYFJdyju_prop_s", + "name_rKEAVEpJXKWbRYM_prop_s", + "name_xLQKEwIRCsGTqWzRf_prop_s" + }); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestExactSharedStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestExactSharedStatsCacheCloud.java index cca209b1bb6..3e2ad6956f5 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestExactSharedStatsCacheCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestExactSharedStatsCacheCloud.java @@ -18,9 +18,7 @@ import org.apache.solr.search.stats.ExactSharedStatsCache; -/** - * - */ +/** */ public class TestExactSharedStatsCacheCloud extends TestBaseStatsCacheCloud { @Override protected boolean assertSameScores() { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestExactStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestExactStatsCacheCloud.java index ba7e0d48bc1..52ae696b264 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestExactStatsCacheCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestExactStatsCacheCloud.java @@ -19,9 +19,7 @@ import org.apache.solr.search.stats.ExactStatsCache; import org.apache.solr.util.LogLevel; -/** - * - */ +/** */ @LogLevel("org.apache.solr.search=DEBUG") public class TestExactStatsCacheCloud extends TestBaseStatsCacheCloud { @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/TestExclusionRuleCollectionAccess.java b/solr/core/src/test/org/apache/solr/cloud/TestExclusionRuleCollectionAccess.java index 5bf77c1a71d..1d805c575a1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestExclusionRuleCollectionAccess.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestExclusionRuleCollectionAccess.java @@ -25,9 +25,7 @@ public class TestExclusionRuleCollectionAccess extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } @Test @@ -35,13 +33,15 @@ public void doTest() throws Exception { CollectionAdminRequest.createCollection("css33", "conf", 1, 1).process(cluster.getSolrClient()); - new UpdateRequest() - .add("id", "1") - .commit(cluster.getSolrClient(), "css33"); - - assertEquals("Should have returned 1 result", 1, - cluster.getSolrClient().query("css33", params("q", "*:*", "collection", "css33")).getResults().getNumFound()); + new UpdateRequest().add("id", "1").commit(cluster.getSolrClient(), "css33"); + assertEquals( + "Should have returned 1 result", + 1, + cluster + .getSolrClient() + .query("css33", params("q", "*:*", "collection", "css33")) + .getResults() + .getNumFound()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java b/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java index fa7c094f768..b8856e45f9a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestHashPartitioner.java @@ -22,7 +22,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.CompositeIdRouter; @@ -37,7 +36,7 @@ import org.apache.solr.handler.admin.ConfigSetsHandler; public class TestHashPartitioner extends SolrTestCaseJ4 { - + public void testMapHashes() throws Exception { DocRouter hp = DocRouter.DEFAULT; List ranges; @@ -60,19 +59,22 @@ public void testMapHashes() throws Exception { for (int i = 1; i <= 30000; i++) { // start skipping at higher numbers - if (i > 100) i+=13; - else if (i > 1000) i+=31; - else if (i > 5000) i+=101; + if (i > 100) i += 13; + else if (i > 1000) i += 31; + else if (i > 5000) i += 101; long rangeSize = 0x0000000100000000L / i; ranges = hp.partitionRange(i, hp.fullRange()); assertEquals(i, ranges.size()); - assertTrue("First range does not start before " + Integer.MIN_VALUE - + " it is:" + ranges.get(0).min, + assertTrue( + "First range does not start before " + Integer.MIN_VALUE + " it is:" + ranges.get(0).min, ranges.get(0).min <= Integer.MIN_VALUE); - assertTrue("Last range does not end after " + Integer.MAX_VALUE - + " it is:" + ranges.get(ranges.size() - 1).max, + assertTrue( + "Last range does not end after " + + Integer.MAX_VALUE + + " it is:" + + ranges.get(ranges.size() - 1).max, ranges.get(ranges.size() - 1).max >= Integer.MAX_VALUE); for (Range range : ranges) { @@ -86,28 +88,27 @@ public void testMapHashes() throws Exception { for (Range range : ranges) { int currStart = range.min; int currEnd = range.max; - assertEquals(lastEnd+1, currStart); + assertEquals(lastEnd + 1, currStart); if (ranges.size() < 4000) { // ranges should be rounded to avoid crossing hash domains assertEquals(defaultLowerBits, currEnd & defaultLowerBits); // given our rounding condition that domains should be less than 1/16 of the step size, - // this means that any sizing deviations should also be less than 1/16th of the idealized range size. + // this means that any sizing deviations should also be less than 1/16th of the idealized + // range size. // boolean round = rangeStep >= (1< colls = solrCluster.collections(); + LazySolrCluster solrCluster = new LazySolrCluster(cluster.getSolrClient().getZkStateReader()); + SimpleMap colls = solrCluster.collections(); - SolrCollection c = colls.get("testLazyCluster1"); - assertNotNull(c); - c = colls.get("testLazyCluster2"); - assertNotNull(c); - int[] count = new int[1]; - solrCluster.collections().forEachEntry((s, solrCollection) -> count[0]++); - assertEquals(2, count[0]); + SolrCollection c = colls.get("testLazyCluster1"); + assertNotNull(c); + c = colls.get("testLazyCluster2"); + assertNotNull(c); + int[] count = new int[1]; + solrCluster.collections().forEachEntry((s, solrCollection) -> count[0]++); + assertEquals(2, count[0]); - count[0] = 0; + count[0] = 0; - assertEquals(2, solrCluster.collections().get("testLazyCluster1").shards().size()); - solrCluster.collections().get("testLazyCluster1").shards() - .forEachEntry((s, shard) -> shard.replicas().forEachEntry((s1, replica) -> count[0]++)); - assertEquals(4, count[0]); + assertEquals(2, solrCluster.collections().get("testLazyCluster1").shards().size()); + solrCluster + .collections() + .get("testLazyCluster1") + .shards() + .forEachEntry((s, shard) -> shard.replicas().forEachEntry((s1, replica) -> count[0]++)); + assertEquals(4, count[0]); - assertEquals(5, solrCluster.nodes().size()); - SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient(); - zkClient.create(ZkStateReader.CONFIGS_ZKNODE + "/conf1/a", null, CreateMode.PERSISTENT, true); - zkClient.create(ZkStateReader.CONFIGS_ZKNODE + "/conf1/a/aa1", new byte[1024], CreateMode.PERSISTENT, true); - zkClient.create(ZkStateReader.CONFIGS_ZKNODE + "/conf1/a/aa2", new byte[1024 * 2], CreateMode.PERSISTENT, true); + assertEquals(5, solrCluster.nodes().size()); + SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient(); + zkClient.create(ZkStateReader.CONFIGS_ZKNODE + "/conf1/a", null, CreateMode.PERSISTENT, true); + zkClient.create( + ZkStateReader.CONFIGS_ZKNODE + "/conf1/a/aa1", new byte[1024], CreateMode.PERSISTENT, true); + zkClient.create( + ZkStateReader.CONFIGS_ZKNODE + "/conf1/a/aa2", + new byte[1024 * 2], + CreateMode.PERSISTENT, + true); - List allFiles = new ArrayList<>(); - byte[] buf = new byte[3*1024]; - CollectionConfig conf1 = solrCluster.configs().get("conf1"); - conf1.resources().abortableForEach((s, resource) -> { - allFiles.add(s); - if("a/aa1".equals(s)) { - resource.get(is -> assertEquals(1024, is.read(buf))); - } - if("a/aa2".equals(s)) { - resource.get(is -> assertEquals(2*1024, is.read(buf))); - } - if("a".equals(s)) { + List allFiles = new ArrayList<>(); + byte[] buf = new byte[3 * 1024]; + CollectionConfig conf1 = solrCluster.configs().get("conf1"); + conf1 + .resources() + .abortableForEach( + (s, resource) -> { + allFiles.add(s); + if ("a/aa1".equals(s)) { + resource.get(is -> assertEquals(1024, is.read(buf))); + } + if ("a/aa2".equals(s)) { + resource.get(is -> assertEquals(2 * 1024, is.read(buf))); + } + if ("a".equals(s)) { resource.get(is -> assertEquals(-1, is.read())); - } - return Boolean.TRUE; - }); - assertEquals(5, allFiles.size()); - - } - - + } + return Boolean.TRUE; + }); + assertEquals(5, allFiles.size()); + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java index f8d7b5a6bd8..b5c0344b087 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -35,9 +34,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * See SOLR-9504 - */ +/** See SOLR-9504 */ public class TestLeaderElectionWithEmptyReplica extends SolrCloudTestCase { private static final String COLLECTION_NAME = "solr_9504"; @@ -45,7 +42,8 @@ public class TestLeaderElectionWithEmptyReplica extends SolrCloudTestCase { public static void beforeClass() throws Exception { useFactory(null); configureCluster(2) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); CollectionAdminRequest.createCollection(COLLECTION_NAME, "config", 1, 1) @@ -58,7 +56,7 @@ public static void beforeClass() throws Exception { public void test() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); solrClient.setDefaultCollection(COLLECTION_NAME); - for (int i=0; i<10; i++) { + for (int i = 0; i < 10; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", String.valueOf(i)); solrClient.add(doc); @@ -71,7 +69,7 @@ public void test() throws Exception { List jettySolrRunners = cluster.getJettySolrRunners(); for (JettySolrRunner jettySolrRunner : jettySolrRunners) { int port = jettySolrRunner.getBaseUrl().getPort(); - if (replica.getBaseUrl().contains(":" + port)) { + if (replica.getBaseUrl().contains(":" + port)) { replicaJetty = jettySolrRunner; break; } @@ -81,7 +79,8 @@ public void test() throws Exception { replicaJetty.stop(); // add a replica (asynchronously) - CollectionAdminRequest.AddReplica addReplica = CollectionAdminRequest.addReplicaToShard(COLLECTION_NAME, "shard1"); + CollectionAdminRequest.AddReplica addReplica = + CollectionAdminRequest.addReplicaToShard(COLLECTION_NAME, "shard1"); String asyncId = addReplica.processAsync(solrClient); // wait a bit @@ -91,29 +90,45 @@ public void test() throws Exception { replicaJetty.start(); // wait until everyone is active - solrClient.waitForState(COLLECTION_NAME, DEFAULT_TIMEOUT, TimeUnit.SECONDS, + solrClient.waitForState( + COLLECTION_NAME, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, (n, c) -> DocCollection.isFullyActive(n, c, 1, 2)); // now query each replica and check for consistency - assertConsistentReplicas(solrClient, solrClient.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlice("shard1")); + assertConsistentReplicas( + solrClient, + solrClient + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME) + .getSlice("shard1")); // sanity check that documents still exist QueryResponse response = solrClient.query(new SolrQuery("*:*")); assertEquals("Indexed documents not found", 10, response.getResults().getNumFound()); } - private static int assertConsistentReplicas(CloudSolrClient cloudClient, Slice shard) throws SolrServerException, IOException { + private static int assertConsistentReplicas(CloudSolrClient cloudClient, Slice shard) + throws SolrServerException, IOException { long numFound = Long.MIN_VALUE; int count = 0; for (Replica replica : shard.getReplicas()) { - HttpSolrClient client = new HttpSolrClient.Builder(replica.getCoreUrl()) - .withHttpClient(cloudClient.getLbClient().getHttpClient()).build(); + HttpSolrClient client = + new HttpSolrClient.Builder(replica.getCoreUrl()) + .withHttpClient(cloudClient.getLbClient().getHttpClient()) + .build(); QueryResponse response = client.query(new SolrQuery("q", "*:*", "distrib", "false")); -// log.info("Found numFound={} on replica: {}", response.getResults().getNumFound(), replica.getCoreUrl()); - if (numFound == Long.MIN_VALUE) { + // log.info("Found numFound={} on replica: {}", response.getResults().getNumFound(), + // replica.getCoreUrl()); + if (numFound == Long.MIN_VALUE) { numFound = response.getResults().getNumFound(); - } else { - assertEquals("Shard " + shard.getName() + " replicas do not have same number of documents", numFound, response.getResults().getNumFound()); + } else { + assertEquals( + "Shard " + shard.getName() + " replicas do not have same number of documents", + numFound, + response.getResults().getNumFound()); } count++; } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java index 83fba71d869..3328da1b3c1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java @@ -21,7 +21,6 @@ import java.nio.file.Path; import java.util.Collections; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.core.CloudConfig; @@ -51,27 +50,32 @@ public void testLeaderElectionWithZkExpiry() throws Exception { try { server.run(); - CloudConfig cloudConfig = new CloudConfig.CloudConfigBuilder("dummy.host.com", 8984, "solr") - .setLeaderConflictResolveWait(180000) - .setLeaderVoteWait(180000) - .build(); - final ZkController zkController = new ZkController(cc, server.getZkAddress(), 15000, cloudConfig, () -> Collections.emptyList()); + CloudConfig cloudConfig = + new CloudConfig.CloudConfigBuilder("dummy.host.com", 8984, "solr") + .setLeaderConflictResolveWait(180000) + .setLeaderVoteWait(180000) + .build(); + final ZkController zkController = + new ZkController( + cc, server.getZkAddress(), 15000, cloudConfig, () -> Collections.emptyList()); try { - Thread killer = new Thread() { - @Override - public void run() { - long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(10, TimeUnit.SECONDS); - while (System.nanoTime() < timeout) { - long sessionId = zkController.getZkClient().getSolrZooKeeper().getSessionId(); - server.expire(sessionId); - try { - Thread.sleep(10); - } catch (InterruptedException e) { - return; + Thread killer = + new Thread() { + @Override + public void run() { + long timeout = + System.nanoTime() + TimeUnit.NANOSECONDS.convert(10, TimeUnit.SECONDS); + while (System.nanoTime() < timeout) { + long sessionId = zkController.getZkClient().getSolrZooKeeper().getSessionId(); + server.expire(sessionId); + try { + Thread.sleep(10); + } catch (InterruptedException e) { + return; + } + } } - } - } - }; + }; killer.start(); killer.join(); long timeout = System.nanoTime() + TimeUnit.NANOSECONDS.convert(60, TimeUnit.SECONDS); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLocalStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestLocalStatsCacheCloud.java index fd44232f8dd..3be7561d06c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLocalStatsCacheCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLocalStatsCacheCloud.java @@ -20,9 +20,7 @@ import org.apache.solr.search.stats.StatsCache; import org.apache.solr.util.LogLevel; -/** - * - */ +/** */ @LogLevel("org.apache.solr.search=DEBUG") public class TestLocalStatsCacheCloud extends TestBaseStatsCacheCloud { @@ -38,9 +36,11 @@ protected String getImplementationName() { @Override protected void checkStatsCacheMetrics(StatsCache.StatsCacheMetrics statsCacheMetrics) { - assertTrue("LocalStatsCache should produce missing stats: " + statsCacheMetrics, + assertTrue( + "LocalStatsCache should produce missing stats: " + statsCacheMetrics, statsCacheMetrics.missingGlobalFieldStats.intValue() > 0); - assertTrue("LocalStatsCache should produce missing stats: " + statsCacheMetrics, + assertTrue( + "LocalStatsCache should produce missing stats: " + statsCacheMetrics, statsCacheMetrics.missingGlobalTermStats.intValue() > 0); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java b/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java index ec503275b11..e855011ca63 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLockTree.java @@ -17,14 +17,18 @@ package org.apache.solr.cloud; +import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP; +import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA; +import static org.apache.solr.common.params.CollectionParams.CollectionAction.MODIFYCOLLECTION; +import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD; + +import com.google.common.collect.ImmutableSet; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; - -import com.google.common.collect.ImmutableSet; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.OverseerMessageHandler.Lock; import org.apache.solr.common.params.CollectionParams.CollectionAction; @@ -32,35 +36,32 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP; -import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA; -import static org.apache.solr.common.params.CollectionParams.CollectionAction.MODIFYCOLLECTION; -import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD; - public class TestLockTree extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - public void testLocks() throws Exception { LockTree lockTree = new LockTree(); - Lock coll1Lock = lockTree.getSession().lock(CollectionAction.CREATE, - Arrays.asList("coll1")); + Lock coll1Lock = lockTree.getSession().lock(CollectionAction.CREATE, Arrays.asList("coll1")); assertNotNull(coll1Lock); - assertNull("Should not be able to lock coll1/shard1", lockTree.getSession().lock(CollectionAction.BALANCESHARDUNIQUE, - Arrays.asList("coll1", "shard1"))); + assertNull( + "Should not be able to lock coll1/shard1", + lockTree + .getSession() + .lock(CollectionAction.BALANCESHARDUNIQUE, Arrays.asList("coll1", "shard1"))); - assertNull(lockTree.getSession().lock(ADDREPLICAPROP, - Arrays.asList("coll1", "shard1", "core_node2"))); + assertNull( + lockTree.getSession().lock(ADDREPLICAPROP, Arrays.asList("coll1", "shard1", "core_node2"))); coll1Lock.unlock(); - Lock shard1Lock = lockTree.getSession().lock(CollectionAction.BALANCESHARDUNIQUE, - Arrays.asList("coll1", "shard1")); + Lock shard1Lock = + lockTree + .getSession() + .lock(CollectionAction.BALANCESHARDUNIQUE, Arrays.asList("coll1", "shard1")); assertNotNull(shard1Lock); shard1Lock.unlock(); - Lock replica1Lock = lockTree.getSession().lock(ADDREPLICAPROP, - Arrays.asList("coll1", "shard1", "core_node2")); + Lock replica1Lock = + lockTree.getSession().lock(ADDREPLICAPROP, Arrays.asList("coll1", "shard1", "core_node2")); assertNotNull(replica1Lock); - List>> operations = new ArrayList<>(); operations.add(new Pair<>(ADDREPLICAPROP, Arrays.asList("coll1", "shard1", "core_node2"))); operations.add(new Pair<>(MODIFYCOLLECTION, Arrays.asList("coll1"))); @@ -69,10 +70,11 @@ public void testLocks() throws Exception { operations.add(new Pair<>(MODIFYCOLLECTION, Arrays.asList("coll2"))); operations.add(new Pair<>(DELETEREPLICA, Arrays.asList("coll2", "shard1"))); - List> orderOfExecution = Arrays.asList( - ImmutableSet.of("coll1/shard1/core_node2", "coll2/shard2"), - ImmutableSet.of("coll1", "coll2"), - ImmutableSet.of("coll1/shard1", "coll2/shard1")); + List> orderOfExecution = + Arrays.asList( + ImmutableSet.of("coll1/shard1/core_node2", "coll2/shard2"), + ImmutableSet.of("coll1", "coll2"), + ImmutableSet.of("coll1/shard1", "coll2/shard1")); lockTree = new LockTree(); for (int counter = 0; counter < orderOfExecution.size(); counter++) { LockTree.Session session = lockTree.getSession(); @@ -89,7 +91,6 @@ public void testLocks() throws Exception { } } - for (Thread thread : threads) thread.join(); if (locks.isEmpty()) throw new RuntimeException("Could not attain lock for anything " + operations); @@ -98,7 +99,9 @@ public void testLocks() throws Exception { log.info("counter : {} , expected : {}, actual : {}", counter, expectedOps, locks); assertEquals(expectedOps.size(), locks.size()); for (Lock lock : locks) - assertTrue("locks : " + locks + " expectedOps : " + expectedOps, expectedOps.contains(lock.toString())); + assertTrue( + "locks : " + locks + " expectedOps : " + expectedOps, + expectedOps.contains(lock.toString())); locks.clear(); for (Pair> completedOp : completedOps) { operations.remove(completedOp); @@ -106,8 +109,11 @@ public void testLocks() throws Exception { } } - private Runnable getRunnable(List>> completedOps, Pair> operation, List locks, Lock lock) { + private Runnable getRunnable( + List>> completedOps, + Pair> operation, + List locks, + Lock lock) { return () -> { try { Thread.sleep(1); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java index 4c455372983..d17b57d46dc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java @@ -16,12 +16,11 @@ */ package org.apache.solr.cloud; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLException; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.List; - +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpHead; import org.apache.http.config.Registry; @@ -34,7 +33,6 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.lucene.util.Constants; import org.apache.lucene.util.TestRuleRestoreSystemProperties; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettyConfig; @@ -52,45 +50,49 @@ import org.junit.Before; import org.junit.Rule; import org.junit.rules.TestRule; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Tests various permutations of SSL options with {@link MiniSolrCloudCluster}. - * NOTE: This Test ignores the randomized SSL & clientAuth settings selected by base class, - * instead each method initializes a {@link SSLTestConfig} will specific combinations of settings to test. + * Tests various permutations of SSL options with {@link MiniSolrCloudCluster}. NOTE: This Test + * ignores the randomized SSL & clientAuth settings selected by base class, instead each + * method initializes a {@link SSLTestConfig} will specific combinations of settings to test. * * @see TestSSLRandomization */ public class TestMiniSolrCloudClusterSSL extends SolrTestCaseJ4 { private static final SSLContext DEFAULT_SSL_CONTEXT; + static { try { DEFAULT_SSL_CONTEXT = SSLContext.getDefault(); assert null != DEFAULT_SSL_CONTEXT; } catch (Exception e) { - throw new RuntimeException("Unable to initialize 'Default' SSLContext Algorithm, JVM is borked", e); + throw new RuntimeException( + "Unable to initialize 'Default' SSLContext Algorithm, JVM is borked", e); } } + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static final int NUM_SERVERS = 3; public static final String CONF_NAME = MethodHandles.lookup().lookupClass().getName(); - + @Rule - public TestRule syspropRestore = new TestRuleRestoreSystemProperties - (HttpClientUtil.SYS_PROP_CHECK_PEER_NAME); - + public TestRule syspropRestore = + new TestRuleRestoreSystemProperties(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME); + @Before public void before() { // undo the randomization of our super class - log.info("NOTE: This Test ignores the randomized SSL & clientAuth settings selected by base class"); + log.info( + "NOTE: This Test ignores the randomized SSL & clientAuth settings selected by base class"); HttpClientUtil.resetHttpClientBuilder(); // also resets SocketFactoryRegistryProvider Http2SolrClient.resetSslContextFactory(); System.clearProperty(ZkStateReader.URL_SCHEME); } + @After public void after() { HttpClientUtil.resetHttpClientBuilder(); // also resets SocketFactoryRegistryProvider @@ -98,29 +100,32 @@ public void after() { System.clearProperty(ZkStateReader.URL_SCHEME); SSLContext.setDefault(DEFAULT_SSL_CONTEXT); } - + public void testNoSsl() throws Exception { final SSLTestConfig sslConfig = new SSLTestConfig(false, false); - HttpClientUtil.setSocketFactoryRegistryProvider(sslConfig.buildClientSocketFactoryRegistryProvider()); + HttpClientUtil.setSocketFactoryRegistryProvider( + sslConfig.buildClientSocketFactoryRegistryProvider()); Http2SolrClient.setDefaultSSLConfig(sslConfig.buildClientSSLConfig()); System.setProperty(ZkStateReader.URL_SCHEME, "http"); checkClusterWithNodeReplacement(sslConfig); } - + public void testNoSslButSillyClientAuth() throws Exception { - // this combination doesn't really make sense, since ssl==false the clientauth option will be ignored - // but we test it anyway for completeness of sanity checking the behavior of code that looks at those - // options. + // this combination doesn't really make sense, since ssl==false the clientauth option will be + // ignored but we test it anyway for completeness of sanity checking the behavior of code that + // looks at those options. final SSLTestConfig sslConfig = new SSLTestConfig(false, true); - HttpClientUtil.setSocketFactoryRegistryProvider(sslConfig.buildClientSocketFactoryRegistryProvider()); + HttpClientUtil.setSocketFactoryRegistryProvider( + sslConfig.buildClientSocketFactoryRegistryProvider()); Http2SolrClient.setDefaultSSLConfig(sslConfig.buildClientSSLConfig()); System.setProperty(ZkStateReader.URL_SCHEME, "http"); checkClusterWithNodeReplacement(sslConfig); } - + public void testSslAndNoClientAuth() throws Exception { final SSLTestConfig sslConfig = new SSLTestConfig(true, false); - HttpClientUtil.setSocketFactoryRegistryProvider(sslConfig.buildClientSocketFactoryRegistryProvider()); + HttpClientUtil.setSocketFactoryRegistryProvider( + sslConfig.buildClientSocketFactoryRegistryProvider()); Http2SolrClient.setDefaultSSLConfig(sslConfig.buildClientSSLConfig()); System.setProperty(ZkStateReader.URL_SCHEME, "https"); checkClusterWithNodeReplacement(sslConfig); @@ -128,51 +133,54 @@ public void testSslAndNoClientAuth() throws Exception { public void testSslAndClientAuth() throws Exception { assumeFalse("SOLR-9039: SSL w/clientAuth does not work on MAC_OS_X", Constants.MAC_OS_X); - + final SSLTestConfig sslConfig = new SSLTestConfig(true, true); - HttpClientUtil.setSocketFactoryRegistryProvider(sslConfig.buildClientSocketFactoryRegistryProvider()); + HttpClientUtil.setSocketFactoryRegistryProvider( + sslConfig.buildClientSocketFactoryRegistryProvider()); Http2SolrClient.setDefaultSSLConfig(sslConfig.buildClientSSLConfig()); System.setProperty(ZkStateReader.URL_SCHEME, "https"); checkClusterWithNodeReplacement(sslConfig); } - // commented out on: 17-Feb-2019 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018 public void testSslWithCheckPeerName() throws Exception { final SSLTestConfig sslConfig = new SSLTestConfig(true, false, true); - HttpClientUtil.setSocketFactoryRegistryProvider(sslConfig.buildClientSocketFactoryRegistryProvider()); + HttpClientUtil.setSocketFactoryRegistryProvider( + sslConfig.buildClientSocketFactoryRegistryProvider()); Http2SolrClient.setDefaultSSLConfig(sslConfig.buildClientSSLConfig()); System.setProperty(ZkStateReader.URL_SCHEME, "https"); checkClusterWithNodeReplacement(sslConfig); } - + /** - * Constructs a cluster with the specified sslConfigs, runs {@link #checkClusterWithCollectionCreations}, - * then verifies that if we modify the default SSLContext (mimicing javax.net.ssl.* - * sysprops set on JVM startup) and reset to the default HttpClientBuilder, new HttpSolrClient instances - * will still be able to talk to our servers. + * Constructs a cluster with the specified sslConfigs, runs {@link + * #checkClusterWithCollectionCreations}, then verifies that if we modify the default SSLContext + * (mimicing javax.net.ssl.* sysprops set on JVM startup) and reset to the default + * HttpClientBuilder, new HttpSolrClient instances will still be able to talk to our servers. * * @see SSLContext#setDefault * @see HttpClientUtil#resetHttpClientBuilder * @see #checkClusterWithCollectionCreations */ private void checkClusterWithNodeReplacement(SSLTestConfig sslConfig) throws Exception { - - final JettyConfig config = JettyConfig.builder().withSSLConfig(sslConfig.buildServerSSLConfig()).build(); - final MiniSolrCloudCluster cluster = new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), config); + + final JettyConfig config = + JettyConfig.builder().withSSLConfig(sslConfig.buildServerSSLConfig()).build(); + final MiniSolrCloudCluster cluster = + new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), config); try { checkClusterWithCollectionCreations(cluster, sslConfig); - - // Change the defaul SSLContext to match our test config, or to match our original system default if - // our test config doesn't use SSL, and reset HttpClientUtil to it's defaults so it picks up our - // SSLContext that way. - SSLContext.setDefault( sslConfig.isSSLMode() ? sslConfig.buildClientSSLContext() : DEFAULT_SSL_CONTEXT); - System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, - Boolean.toString(sslConfig.getCheckPeerName())); + // Change the defaul SSLContext to match our test config, or to match our original system + // default if our test config doesn't use SSL, and reset HttpClientUtil to it's defaults so it + // picks up our SSLContext that way. + SSLContext.setDefault( + sslConfig.isSSLMode() ? sslConfig.buildClientSSLContext() : DEFAULT_SSL_CONTEXT); + System.setProperty( + HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, Boolean.toString(sslConfig.getCheckPeerName())); HttpClientUtil.resetHttpClientBuilder(); Http2SolrClient.resetSslContextFactory(); - + // recheck that we can communicate with all the jetty instances in our cluster checkClusterJettys(cluster, sslConfig); } finally { @@ -185,14 +193,17 @@ public void testSslWithInvalidPeerName() throws Exception { // NOTE: first initialize the cluster w/o peer name checks, which means our server will use // certs with a bogus hostname/ip and clients shouldn't care... final SSLTestConfig sslConfig = new SSLTestConfig(true, false, false); - HttpClientUtil.setSocketFactoryRegistryProvider(sslConfig.buildClientSocketFactoryRegistryProvider()); + HttpClientUtil.setSocketFactoryRegistryProvider( + sslConfig.buildClientSocketFactoryRegistryProvider()); Http2SolrClient.setDefaultSSLConfig(sslConfig.buildClientSSLConfig()); System.setProperty(ZkStateReader.URL_SCHEME, "https"); - final JettyConfig config = JettyConfig.builder().withSSLConfig(sslConfig.buildServerSSLConfig()).build(); - final MiniSolrCloudCluster cluster = new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), config); + final JettyConfig config = + JettyConfig.builder().withSSLConfig(sslConfig.buildServerSSLConfig()).build(); + final MiniSolrCloudCluster cluster = + new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), config); try { checkClusterWithCollectionCreations(cluster, sslConfig); - + // now initialize a client that still uses the existing SSLContext/Provider, so it will accept // our existing certificate, but *does* care about validating the peer name System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, "true"); @@ -204,86 +215,91 @@ public void testSslWithInvalidPeerName() throws Exception { for (JettySolrRunner jetty : jettys) { final String baseURL = jetty.getBaseUrl().toString(); // verify new solr clients validate peer name and can't talk to this server - Exception ex = expectThrows(SolrServerException.class, () -> { - try (HttpSolrClient client = getRandomizedHttpSolrClient(baseURL)) { - CoreAdminRequest req = new CoreAdminRequest(); - req.setAction( CoreAdminAction.STATUS ); - client.request(req); - } - }); - assertTrue("Expected an root cause SSL Exception, got: " + ex.toString(), - ex.getCause() instanceof SSLException); + Exception ex = + expectThrows( + SolrServerException.class, + () -> { + try (HttpSolrClient client = getRandomizedHttpSolrClient(baseURL)) { + CoreAdminRequest req = new CoreAdminRequest(); + req.setAction(CoreAdminAction.STATUS); + client.request(req); + } + }); + assertTrue( + "Expected an root cause SSL Exception, got: " + ex.toString(), + ex.getCause() instanceof SSLException); } } finally { cluster.shutdown(); } - - - } /** * General purpose cluster sanity check... + * *

    - *
  1. Upload a config set
  2. - *
  3. verifies a collection can be created
  4. - *
  5. verifies many things that should succeed/fail when communicating with the cluster according to the specified sslConfig
  6. - *
  7. shutdown a server & startup a new one in it's place
  8. - *
  9. repeat the verifications of ssl / no-ssl communication
  10. - *
  11. create a second collection
  12. + *
  13. Upload a config set + *
  14. verifies a collection can be created + *
  15. verifies many things that should succeed/fail when communicating with the cluster + * according to the specified sslConfig + *
  16. shutdown a server & startup a new one in it's place + *
  17. repeat the verifications of ssl / no-ssl communication + *
  18. create a second collection *
+ * * @see #CONF_NAME * @see #NUM_SERVERS */ - public static void checkClusterWithCollectionCreations(final MiniSolrCloudCluster cluster, - final SSLTestConfig sslConfig) throws Exception { + public static void checkClusterWithCollectionCreations( + final MiniSolrCloudCluster cluster, final SSLTestConfig sslConfig) throws Exception { + + cluster.uploadConfigSet( + SolrTestCaseJ4.TEST_PATH().resolve("collection1").resolve("conf"), CONF_NAME); - cluster.uploadConfigSet(SolrTestCaseJ4.TEST_PATH().resolve("collection1").resolve("conf"), CONF_NAME); - checkCreateCollection(cluster, "first_collection"); - + checkClusterJettys(cluster, sslConfig); - + // shut down a server JettySolrRunner stoppedServer = cluster.stopJettySolrRunner(0); cluster.waitForJettyToStop(stoppedServer); assertTrue(stoppedServer.isStopped()); assertEquals(NUM_SERVERS - 1, cluster.getJettySolrRunners().size()); - + // create a new server JettySolrRunner startedServer = cluster.startJettySolrRunner(); cluster.waitForAllNodes(30); assertTrue(startedServer.isRunning()); assertEquals(NUM_SERVERS, cluster.getJettySolrRunners().size()); - + checkClusterJettys(cluster, sslConfig); - + checkCreateCollection(cluster, "second_collection"); } - + /** * Verify that we can create a collection that involves one replica per node using the * CloudSolrClient available for the cluster */ - private static void checkCreateCollection(final MiniSolrCloudCluster cluster, - final String collection) throws Exception { + private static void checkCreateCollection( + final MiniSolrCloudCluster cluster, final String collection) throws Exception { final CloudSolrClient cloudClient = cluster.getSolrClient(); CollectionAdminRequest.createCollection(collection, CONF_NAME, NUM_SERVERS, 1) .withProperty("config", "solrconfig-tlog.xml") .process(cloudClient); cluster.waitForActiveCollection(collection, NUM_SERVERS, NUM_SERVERS); - assertEquals("sanity query", 0, cloudClient.query(collection, params("q","*:*")).getStatus()); + assertEquals("sanity query", 0, cloudClient.query(collection, params("q", "*:*")).getStatus()); } - - /** + + /** * verify that we can query all of the Jetty instances the specified cluster using the expected - * options (based on the sslConfig), and that we can NOT query the Jetty instances in + * options (based on the sslConfig), and that we can NOT query the Jetty instances in * specified cluster in the ways that should fail (based on the sslConfig) * * @see #getRandomizedHttpSolrClient */ - private static void checkClusterJettys(final MiniSolrCloudCluster cluster, - final SSLTestConfig sslConfig) throws Exception { + private static void checkClusterJettys( + final MiniSolrCloudCluster cluster, final SSLTestConfig sslConfig) throws Exception { final boolean ssl = sslConfig.isSSLMode(); List jettys = cluster.getJettySolrRunners(); @@ -293,14 +309,13 @@ private static void checkClusterJettys(final MiniSolrCloudCluster cluster, // basic base URL sanity checks assertTrue("WTF baseURL: " + baseURL, null != baseURL && 10 < baseURL.length()); - assertEquals("http vs https: " + baseURL, - ssl ? "https" : "http:", baseURL.substring(0,5)); - + assertEquals("http vs https: " + baseURL, ssl ? "https" : "http:", baseURL.substring(0, 5)); + // verify solr client success with expected protocol try (HttpSolrClient client = getRandomizedHttpSolrClient(baseURL)) { assertEquals(0, CoreAdminRequest.getStatus(/* all */ null, client).getStatus()); } - + // sanity check the HttpClient used under the hood by our the cluster's CloudSolrClient // ensure it has the necessary protocols/credentials for each jetty server // @@ -310,88 +325,102 @@ private static void checkClusterJettys(final MiniSolrCloudCluster cluster, assertEquals(0, CoreAdminRequest.getStatus(/* all */ null, client).getStatus()); } - final String wrongBaseURL = baseURL.replaceFirst((ssl ? "https://" : "http://"), - (ssl ? "http://" : "https://")); - + final String wrongBaseURL = + baseURL.replaceFirst((ssl ? "https://" : "http://"), (ssl ? "http://" : "https://")); + // verify solr client using wrong protocol can't talk to server - expectThrows(SolrServerException.class, () -> { - try (HttpSolrClient client = getRandomizedHttpSolrClient(wrongBaseURL)) { - CoreAdminRequest req = new CoreAdminRequest(); - req.setAction( CoreAdminAction.STATUS ); - client.request(req); - } - }); - - if (! sslConfig.isClientAuthMode()) { + expectThrows( + SolrServerException.class, + () -> { + try (HttpSolrClient client = getRandomizedHttpSolrClient(wrongBaseURL)) { + CoreAdminRequest req = new CoreAdminRequest(); + req.setAction(CoreAdminAction.STATUS); + client.request(req); + } + }); + + if (!sslConfig.isClientAuthMode()) { // verify simple HTTP(S) client can't do HEAD request for URL with wrong protocol try (CloseableHttpClient client = getSslAwareClientWithNoClientCerts()) { final String wrongUrl = wrongBaseURL + "/admin/cores"; - // vastly diff exception details between plain http vs https, not worried about details here - expectThrows(IOException.class, () -> { - doHeadRequest(client, wrongUrl); - }); + // vastly diff exception details between plain http vs https, not worried about details + // here + expectThrows( + IOException.class, + () -> { + doHeadRequest(client, wrongUrl); + }); } } - + if (ssl) { - // verify expected results for a HEAD request to valid URL from HTTP(S) client w/o client certs + // verify expected results for a HEAD request to valid URL from HTTP(S) client w/o client + // certs try (CloseableHttpClient client = getSslAwareClientWithNoClientCerts()) { final String url = baseURL + "/admin/cores"; if (sslConfig.isClientAuthMode()) { // w/o a valid client cert, SSL connection should fail - expectThrows(IOException.class, () -> { - doHeadRequest(client, url); - }); + expectThrows( + IOException.class, + () -> { + doHeadRequest(client, url); + }); } else { - assertEquals("Wrong status for head request ("+url+") when clientAuth=" - + sslConfig.isClientAuthMode(), - 200, doHeadRequest(client, url)); + assertEquals( + "Wrong status for head request (" + + url + + ") when clientAuth=" + + sslConfig.isClientAuthMode(), + 200, + doHeadRequest(client, url)); } } } - } } - /** - * Trivial helper method for doing a HEAD request of the specified URL using the specified client + /** + * Trivial helper method for doing a HEAD request of the specified URL using the specified client * and getting the HTTP statusCode from the response */ - private static int doHeadRequest(final CloseableHttpClient client, final String url) throws Exception { + private static int doHeadRequest(final CloseableHttpClient client, final String url) + throws Exception { return client.execute(new HttpHead(url)).getStatusLine().getStatusCode(); } - + /** - * Returns a new HttpClient that supports both HTTP and HTTPS (with the default test truststore), but - * has no keystore -- so servers requiring client authentication should fail. + * Returns a new HttpClient that supports both HTTP and HTTPS (with the default test truststore), + * but has no keystore -- so servers requiring client authentication should fail. */ private static CloseableHttpClient getSslAwareClientWithNoClientCerts() throws Exception { - + // NOTE: This method explicitly does *NOT* use HttpClientUtil code because that // will muck with the global static HttpClientBuilder / SchemeRegistryProvider // and we can't do that and still test the entire purpose of what we are trying to test here. final SSLTestConfig clientConfig = new SSLTestConfig(true, false); - - final SSLConnectionSocketFactory sslFactory = clientConfig.buildClientSSLConnectionSocketFactory(); + + final SSLConnectionSocketFactory sslFactory = + clientConfig.buildClientSSLConnectionSocketFactory(); assert null != sslFactory; - final Registry socketFactoryReg = - RegistryBuilder. create() - .register("https", sslFactory) - .register("http", PlainConnectionSocketFactory.INSTANCE ) - .build(); - + final Registry socketFactoryReg = + RegistryBuilder.create() + .register("https", sslFactory) + .register("http", PlainConnectionSocketFactory.INSTANCE) + .build(); + final HttpClientBuilder builder = HttpClientBuilder.create(); builder.setConnectionManager(new PoolingHttpClientConnectionManager(socketFactoryReg)); return builder.build(); } - /** - * Generates an HttpSolrClient, either by using the test framework helper method or by direct + /** + * Generates an HttpSolrClient, either by using the test framework helper method or by direct * instantiation (determined randomly) + * * @see #getHttpSolrClient */ public static HttpSolrClient getRandomizedHttpSolrClient(String url) { @@ -400,9 +429,9 @@ public static HttpSolrClient getRandomizedHttpSolrClient(String url) { // 1) a direct test that "new HttpSolrClient" works given the current JVM/sysprop defaults // 2) a sanity check that whatever getHttpSolrClient(String) returns will work regardless of // current test configuration. - // ... so we are hopefully future proofing against possible changes to SolrTestCaseJ4.getHttpSolrClient - // that "optimize" the test client construction in a way that would prevent us from finding bugs with - // regular HttpSolrClient instantiation. + // ... so we are hopefully future proofing against possible changes to + // SolrTestCaseJ4.getHttpSolrClient that "optimize" the test client construction in a way that + // would prevent us from finding bugs with regular HttpSolrClient instantiation. if (random().nextBoolean()) { return (new HttpSolrClient.Builder(url)).build(); } // else... diff --git a/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java b/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java index a2039cdb331..0567e0ae71f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestOnReconnectListenerSupport.java @@ -17,10 +17,11 @@ package org.apache.solr.cloud; +import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP; + import java.lang.invoke.MethodHandles; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -34,8 +35,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP; - @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class TestOnReconnectListenerSupport extends AbstractFullDistribZkTestBase { @@ -78,8 +77,8 @@ public void test() throws Exception { String leaderCoreName = leader.getStr(CORE_NAME_PROP); String leaderCoreId; try (SolrCore leaderCore = cores.getCore(leaderCoreName)) { - assertNotNull("SolrCore for "+leaderCoreName+" not found!", leaderCore); - leaderCoreId = leaderCore.getName()+":"+leaderCore.getStartNanoTime(); + assertNotNull("SolrCore for " + leaderCoreName + " not found!", leaderCore); + leaderCoreId = leaderCore.getName() + ":" + leaderCore.getStartNanoTime(); } // verify the ZkIndexSchemaReader is a registered OnReconnect listener @@ -88,25 +87,32 @@ public void test() throws Exception { ZkIndexSchemaReader expectedListener = null; for (OnReconnect listener : listeners) { if (listener instanceof ZkIndexSchemaReader) { - ZkIndexSchemaReader reader = (ZkIndexSchemaReader)listener; + ZkIndexSchemaReader reader = (ZkIndexSchemaReader) listener; if (leaderCoreId.equals(reader.getUniqueCoreId())) { expectedListener = reader; break; } } } - assertNotNull("ZkIndexSchemaReader for core " + leaderCoreName + - " not registered as an OnReconnect listener and should be", expectedListener); + assertNotNull( + "ZkIndexSchemaReader for core " + + leaderCoreName + + " not registered as an OnReconnect listener and should be", + expectedListener); // reload the collection boolean wasReloaded = reloadCollection(leader, testCollectionName); - assertTrue("Collection '" + testCollectionName + "' failed to reload within a reasonable amount of time!", + assertTrue( + "Collection '" + + testCollectionName + + "' failed to reload within a reasonable amount of time!", wasReloaded); - // after reload, the new core should be registered as an OnReconnect listener and the old should not be + // after reload, the new core should be registered as an OnReconnect listener and the old should + // not be String reloadedLeaderCoreId; try (SolrCore leaderCore = cores.getCore(leaderCoreName)) { - reloadedLeaderCoreId = leaderCore.getName()+":"+leaderCore.getStartNanoTime(); + reloadedLeaderCoreId = leaderCore.getName() + ":" + leaderCore.getStartNanoTime(); } // they shouldn't be equal after reload @@ -118,10 +124,13 @@ public void test() throws Exception { expectedListener = null; // reset for (OnReconnect listener : listeners) { if (listener instanceof ZkIndexSchemaReader) { - ZkIndexSchemaReader reader = (ZkIndexSchemaReader)listener; + ZkIndexSchemaReader reader = (ZkIndexSchemaReader) listener; if (leaderCoreId.equals(reader.getUniqueCoreId())) { - fail("Previous core "+leaderCoreId+ - " should no longer be a registered OnReconnect listener! Current listeners: "+listeners); + fail( + "Previous core " + + leaderCoreId + + " should no longer be a registered OnReconnect listener! Current listeners: " + + listeners); } else if (reloadedLeaderCoreId.equals(reader.getUniqueCoreId())) { expectedListener = reader; break; @@ -129,8 +138,11 @@ public void test() throws Exception { } } - assertNotNull("ZkIndexSchemaReader for core "+reloadedLeaderCoreId+ - " not registered as an OnReconnect listener and should be", expectedListener); + assertNotNull( + "ZkIndexSchemaReader for core " + + reloadedLeaderCoreId + + " not registered as an OnReconnect listener and should be", + expectedListener); // try to clean up try { @@ -143,10 +155,12 @@ public void test() throws Exception { listeners = zkController.getCurrentOnReconnectListeners(); for (OnReconnect listener : listeners) { if (listener instanceof ZkIndexSchemaReader) { - ZkIndexSchemaReader reader = (ZkIndexSchemaReader)listener; + ZkIndexSchemaReader reader = (ZkIndexSchemaReader) listener; if (reloadedLeaderCoreId.equals(reader.getUniqueCoreId())) { - fail("Previous core "+reloadedLeaderCoreId+ - " should no longer be a registered OnReconnect listener after collection delete!"); + fail( + "Previous core " + + reloadedLeaderCoreId + + " should no longer be a registered OnReconnect listener after collection delete!"); } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPrepRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestPrepRecovery.java index e593c63df67..2f8488822b7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPrepRecovery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPrepRecovery.java @@ -18,7 +18,6 @@ package org.apache.solr.cloud; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -28,9 +27,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * Tests for PREPRECOVERY CoreAdmin API - */ +/** Tests for PREPRECOVERY CoreAdmin API */ public class TestPrepRecovery extends SolrCloudTestCase { @BeforeClass @@ -41,9 +38,10 @@ public static void setupCluster() throws Exception { // so we lower this so that we can still test timeouts System.setProperty("leaderConflictResolveWait", "5000"); System.setProperty("prepRecoveryReadTimeoutExtraWait", "1000"); - + configureCluster(2) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .withSolrXml(TEST_PATH().resolve("solr.xml")) .configure(); } @@ -58,11 +56,12 @@ public void testLeaderUnloaded() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String collectionName = "testLeaderUnloaded"; - CollectionAdminRequest.createCollection(collectionName, 1, 2) - .process(solrClient); + CollectionAdminRequest.createCollection(collectionName, 1, 2).process(solrClient); - waitForState("Expected collection: testLeaderUnloaded to be live with 1 shard and 2 replicas", - collectionName, clusterShape(1, 2)); + waitForState( + "Expected collection: testLeaderUnloaded to be live with 1 shard and 2 replicas", + collectionName, + clusterShape(1, 2)); JettySolrRunner newNode = cluster.startJettySolrRunner(); String newNodeName = newNode.getNodeName(); @@ -77,15 +76,17 @@ public void testLeaderUnloaded() throws Exception { CollectionAdminRequest.deleteReplica(collectionName, "shard1", leader.getName()) .process(solrClient); - // add another replica to the new node. When it starts recovering, it will likely have stale state - // and ask the erstwhile leader to PREPRECOVERY which will hang for about 30 seconds + // add another replica to the new node. When it starts recovering, it will likely have stale + // state and ask the erstwhile leader to PREPRECOVERY which will hang for about 30 seconds CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") .setNode(newNodeName) .process(solrClient); // in the absence of the fixes made in SOLR-10914, this statement will timeout after 90s - waitForState("Expected collection: testLeaderUnloaded to be live with 1 shard and 3 replicas", - collectionName, clusterShape(1, 3)); + waitForState( + "Expected collection: testLeaderUnloaded to be live with 1 shard and 3 replicas", + collectionName, + clusterShape(1, 3)); } @Test @@ -93,21 +94,25 @@ public void testLeaderNotResponding() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String collectionName = "testLeaderNotResponding"; - CollectionAdminRequest.createCollection(collectionName, 1, 1) - .process(solrClient); + CollectionAdminRequest.createCollection(collectionName, 1, 1).process(solrClient); - waitForState("Expected collection: testLeaderNotResponding to be live with 1 shard and 1 replicas", - collectionName, clusterShape(1, 1)); + waitForState( + "Expected collection: testLeaderNotResponding to be live with 1 shard and 1 replicas", + collectionName, + clusterShape(1, 1)); TestInjection.prepRecoveryOpPauseForever = "true:100"; try { - CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") - .process(solrClient); - - // in the absence of fixes made in SOLR-9716, prep recovery waits forever and the following statement - // times out - waitForState("Expected collection: testLeaderNotResponding to be live with 1 shard and 2 replicas", - collectionName, clusterShape(1, 2), 30, TimeUnit.SECONDS); + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1").process(solrClient); + + // in the absence of fixes made in SOLR-9716, prep recovery waits forever and the following + // statement times out + waitForState( + "Expected collection: testLeaderNotResponding to be live with 1 shard and 2 replicas", + collectionName, + clusterShape(1, 2), + 30, + TimeUnit.SECONDS); } finally { TestInjection.prepRecoveryOpPauseForever = null; TestInjection.notifyPauseForeverDone(); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java index caf6aa9f875..9563c657e2b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java @@ -16,6 +16,7 @@ */ package org.apache.solr.cloud; +import com.carrotsearch.randomizedtesting.annotations.Repeat; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -27,8 +28,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - -import com.carrotsearch.randomizedtesting.annotations.Repeat; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; @@ -66,24 +65,29 @@ import org.slf4j.LoggerFactory; @Slow -@LogLevel("org.apache.solr.handler.ReplicationHandler=DEBUG;org.apache.solr.handler.IndexFetcher=DEBUG") +@LogLevel( + "org.apache.solr.handler.ReplicationHandler=DEBUG;org.apache.solr.handler.IndexFetcher=DEBUG") public class TestPullReplica extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String collectionName = null; - private final static int REPLICATION_TIMEOUT_SECS = 30; + private static final int REPLICATION_TIMEOUT_SECS = 30; private String suggestedCollectionName() { - return (getTestClass().getSimpleName().replace("Test", "") + "_" + getSaferTestName().split(" ")[0]).replaceAll("(.)(\\p{Upper})", "$1_$2").toLowerCase(Locale.ROOT); + return (getTestClass().getSimpleName().replace("Test", "") + + "_" + + getSaferTestName().split(" ")[0]) + .replaceAll("(.)(\\p{Upper})", "$1_$2") + .toLowerCase(Locale.ROOT); } @BeforeClass public static void createTestCluster() throws Exception { - System.setProperty("cloudSolrClientMaxStaleRetries", "1"); - System.setProperty("zkReaderGetLeaderRetryTimeoutMs", "1000"); + System.setProperty("cloudSolrClientMaxStaleRetries", "1"); + System.setProperty("zkReaderGetLeaderRetryTimeoutMs", "1000"); - configureCluster(2) // 2 + random().nextInt(3) + configureCluster(2) // 2 + random().nextInt(3) .addConfig("conf", configset("cloud-minimal")) .configure(); } @@ -105,13 +109,18 @@ public void setUp() throws Exception { @Override public void tearDown() throws Exception { - for (JettySolrRunner jetty:cluster.getJettySolrRunners()) { + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { if (!jetty.isRunning()) { log.warn("Jetty {} not running, probably some bad test. Starting it", jetty.getLocalPort()); jetty.start(); } } - if (cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionOrNull(collectionName) != null) { + if (cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollectionOrNull(collectionName) + != null) { log.info("tearDown deleting collection"); CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); log.info("Collection deleted"); @@ -120,38 +129,52 @@ public void tearDown() throws Exception { super.tearDown(); } - @Repeat(iterations=2) // 2 times to make sure cleanup is complete and we can create the same collection + // 2 times to make sure cleanup is complete and we can create the same collection + @Repeat(iterations = 2) public void testCreateDelete() throws Exception { try { switch (random().nextInt(3)) { case 0: // Sometimes use SolrJ CollectionAdminRequest.createCollection(collectionName, "conf", 2, 1, 0, 3) - .process(cluster.getSolrClient()); + .process(cluster.getSolrClient()); break; case 1: // Sometimes use v1 API - String url = String.format(Locale.ROOT, "%s/admin/collections?action=CREATE&name=%s&collection.configName=%s&numShards=%s&pullReplicas=%s", - cluster.getRandomJetty(random()).getBaseUrl(), - collectionName, "conf", - 2, // numShards - 3); // pullReplicas - url = url + pickRandom("", "&nrtReplicas=1", "&replicationFactor=1"); // These options should all mean the same + String url = + String.format( + Locale.ROOT, + "%s/admin/collections?action=CREATE&name=%s&collection.configName=%s&numShards=%s&pullReplicas=%s", + cluster.getRandomJetty(random()).getBaseUrl(), + collectionName, + "conf", + 2, // numShards + 3); // pullReplicas + // These options should all mean the same + url = url + pickRandom("", "&nrtReplicas=1", "&replicationFactor=1"); HttpGet createCollectionGet = new HttpGet(url); cluster.getSolrClient().getHttpClient().execute(createCollectionGet); break; case 2: // Sometimes use V2 API url = cluster.getRandomJetty(random()).getBaseUrl().toString() + "/____v2/c"; - String requestBody = String.format(Locale.ROOT, "{create:{name:%s, config:%s, numShards:%s, pullReplicas:%s, %s}}", - collectionName, "conf", - 2, // numShards - 3, // pullReplicas - pickRandom("", ", nrtReplicas:1", ", replicationFactor:1")); // These options should all mean the same + String requestBody = + String.format( + Locale.ROOT, + "{create:{name:%s, config:%s, numShards:%s, pullReplicas:%s, %s}}", + collectionName, + "conf", + 2, // numShards + 3, // pullReplicas + pickRandom( + "", + ", nrtReplicas:1", + ", replicationFactor:1")); // These options should all mean the same HttpPost createCollectionPost = new HttpPost(url); createCollectionPost.setHeader("Content-type", "application/json"); createCollectionPost.setEntity(new StringEntity(requestBody)); - HttpResponse httpResponse = cluster.getSolrClient().getHttpClient().execute(createCollectionPost); + HttpResponse httpResponse = + cluster.getSolrClient().getHttpClient().execute(createCollectionPost); assertEquals(200, httpResponse.getStatusLine().getStatusCode()); break; } @@ -159,26 +182,41 @@ public void testCreateDelete() throws Exception { while (true) { DocCollection docCollection = getCollectionState(collectionName); assertNotNull(docCollection); - assertEquals("Expecting 4 relpicas per shard", - 8, docCollection.getReplicas().size()); - assertEquals("Expecting 6 pull replicas, 3 per shard", - 6, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); - assertEquals("Expecting 2 writer replicas, one per shard", - 2, docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); - for (Slice s:docCollection.getSlices()) { + assertEquals("Expecting 4 relpicas per shard", 8, docCollection.getReplicas().size()); + assertEquals( + "Expecting 6 pull replicas, 3 per shard", + 6, + docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); + assertEquals( + "Expecting 2 writer replicas, one per shard", + 2, + docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); + for (Slice s : docCollection.getSlices()) { // read-only replicas can never become leaders assertNotSame(s.getLeader().getType(), Replica.Type.PULL); - List shardElectionNodes = cluster.getZkClient().getChildren(ZkStateReader.getShardLeadersElectPath(collectionName, s.getName()), null, true); - assertEquals("Unexpected election nodes for Shard: " + s.getName() + ": " + Arrays.toString(shardElectionNodes.toArray()), - 1, shardElectionNodes.size()); + List shardElectionNodes = + cluster + .getZkClient() + .getChildren( + ZkStateReader.getShardLeadersElectPath(collectionName, s.getName()), + null, + true); + assertEquals( + "Unexpected election nodes for Shard: " + + s.getName() + + ": " + + Arrays.toString(shardElectionNodes.toArray()), + 1, + shardElectionNodes.size()); } assertUlogPresence(docCollection); if (reloaded) { break; } else { // reload - CollectionAdminResponse response = CollectionAdminRequest.reloadCollection(collectionName) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.reloadCollection(collectionName) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); reloaded = true; } @@ -189,17 +227,21 @@ public void testCreateDelete() throws Exception { } /** - * Asserts that Update logs don't exist for replicas of type {@link org.apache.solr.common.cloud.Replica.Type#PULL} + * Asserts that Update logs don't exist for replicas of type {@link + * org.apache.solr.common.cloud.Replica.Type#PULL} */ static void assertUlogPresence(DocCollection collection) { - for (Slice s:collection.getSlices()) { - for (Replica r:s.getReplicas()) { + for (Slice s : collection.getSlices()) { + for (Replica r : s.getReplicas()) { if (r.getType() == Replica.Type.NRT) { continue; } - try (SolrCore core = cluster.getReplicaJetty(r).getCoreContainer().getCore(r.getCoreName())) { + try (SolrCore core = + cluster.getReplicaJetty(r).getCoreContainer().getCore(r.getCoreName())) { assertNotNull(core); - assertFalse("Update log should not exist for replicas of type Passive but file is present: " + core.getUlogDir(), + assertFalse( + "Update log should not exist for replicas of type Passive but file is present: " + + core.getUlogDir(), new java.io.File(core.getUlogDir()).exists()); } } @@ -210,20 +252,25 @@ static void assertUlogPresence(DocCollection collection) { public void testAddDocs() throws Exception { int numPullReplicas = 1 + random().nextInt(3); CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1, 0, numPullReplicas) - .process(cluster.getSolrClient()); - waitForState("Expected collection to be created with 1 shard and " + (numPullReplicas + 1) + " replicas", - collectionName, clusterShape(1, numPullReplicas + 1)); + .process(cluster.getSolrClient()); + waitForState( + "Expected collection to be created with 1 shard and " + (numPullReplicas + 1) + " replicas", + collectionName, + clusterShape(1, numPullReplicas + 1)); DocCollection docCollection = assertNumberOfReplicas(1, 0, numPullReplicas, false, true); assertEquals(1, docCollection.getSlices().size()); - // ugly but needed to ensure a full PULL replication cycle (every sec) has occurred on the replicas before adding docs + // ugly but needed to ensure a full PULL replication cycle (every sec) has occurred on the + // replicas before adding docs Thread.sleep(1500); boolean reloaded = false; int numDocs = 0; while (true) { numDocs++; - cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", String.valueOf(numDocs), "foo", "bar")); + cluster + .getSolrClient() + .add(collectionName, new SolrInputDocument("id", String.valueOf(numDocs), "foo", "bar")); cluster.getSolrClient().commit(collectionName); log.info("Committed doc {} to leader", numDocs); @@ -231,10 +278,15 @@ public void testAddDocs() throws Exception { try (HttpSolrClient leaderClient = getHttpSolrClient(s.getLeader().getCoreUrl())) { assertEquals(numDocs, leaderClient.query(new SolrQuery("*:*")).getResults().getNumFound()); } - log.info("Found {} docs in leader, verifying updates make it to {} pull replicas", numDocs, numPullReplicas); + log.info( + "Found {} docs in leader, verifying updates make it to {} pull replicas", + numDocs, + numPullReplicas); List pullReplicas = - (numDocs == 1) ? restartPullReplica(docCollection, numPullReplicas) : s.getReplicas(EnumSet.of(Replica.Type.PULL)); + (numDocs == 1) + ? restartPullReplica(docCollection, numPullReplicas) + : s.getReplicas(EnumSet.of(Replica.Type.PULL)); waitForNumDocsInAllReplicas(numDocs, pullReplicas); for (Replica r : pullReplicas) { @@ -242,8 +294,12 @@ public void testAddDocs() throws Exception { SolrQuery req = new SolrQuery("qt", "/admin/plugins", "stats", "true"); QueryResponse statsResponse = pullReplicaClient.query(req); // The adds gauge metric should be null for pull replicas since they don't process adds - assertNull("Replicas shouldn't process the add document request: " + statsResponse, - ((Map)(statsResponse.getResponse()).findRecursive("plugins", "UPDATE", "updateHandler", "stats")).get("UPDATE.updateHandler.adds")); + assertNull( + "Replicas shouldn't process the add document request: " + statsResponse, + ((Map) + (statsResponse.getResponse()) + .findRecursive("plugins", "UPDATE", "updateHandler", "stats")) + .get("UPDATE.updateHandler.adds")); } } @@ -251,8 +307,9 @@ public void testAddDocs() throws Exception { break; } else { // reload - CollectionAdminResponse response = CollectionAdminRequest.reloadCollection(collectionName) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.reloadCollection(collectionName) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); reloaded = true; } @@ -260,7 +317,8 @@ public void testAddDocs() throws Exception { assertUlogPresence(docCollection); } - private List restartPullReplica(DocCollection docCollection, int numPullReplicas) throws Exception { + private List restartPullReplica(DocCollection docCollection, int numPullReplicas) + throws Exception { Slice s = docCollection.getSlices().iterator().next(); List pullReplicas = s.getReplicas(EnumSet.of(Replica.Type.PULL)); @@ -279,22 +337,34 @@ private List restartPullReplica(DocCollection docCollection, int numPul if (replicaJetty != null) { replicaJetty.stop(); cluster.waitForJettyToStop(replicaJetty); - waitForState("Expected to see a downed PULL replica", collectionName, clusterStateReflectsActiveAndDownReplicas()); + waitForState( + "Expected to see a downed PULL replica", + collectionName, + clusterStateReflectsActiveAndDownReplicas()); replicaJetty.start(); - waitForState("Expected collection to have recovered with 1 shard and " + (numPullReplicas + 1) + " replicas after restarting " + replicaJetty.getNodeName(), - collectionName, clusterShape(1, numPullReplicas + 1)); + waitForState( + "Expected collection to have recovered with 1 shard and " + + (numPullReplicas + 1) + + " replicas after restarting " + + replicaJetty.getNodeName(), + collectionName, + clusterShape(1, numPullReplicas + 1)); docCollection = assertNumberOfReplicas(1, 0, numPullReplicas, false, true); s = docCollection.getSlices().iterator().next(); pullReplicas = s.getReplicas(EnumSet.of(Replica.Type.PULL)); - } // else it's ok if all replicas ended up on the same node, we're not testing replica placement here, but skip this part of the test + } // else it's ok if all replicas ended up on the same node, we're not testing replica placement + // here, but skip this part of the test return pullReplicas; } public void testAddRemovePullReplica() throws Exception { CollectionAdminRequest.createCollection(collectionName, "conf", 2, 1, 0, 0) - .process(cluster.getSolrClient()); - waitForState("Expected collection to be created with 2 shards and 1 replica each", collectionName, clusterShape(2, 2)); + .process(cluster.getSolrClient()); + waitForState( + "Expected collection to be created with 2 shards and 1 replica each", + collectionName, + clusterShape(2, 2)); DocCollection docCollection = assertNumberOfReplicas(2, 0, 0, false, true); assertEquals(2, docCollection.getSlices().size()); @@ -303,14 +373,21 @@ public void testAddRemovePullReplica() throws Exception { addReplicaToShard("shard2", Replica.Type.PULL); docCollection = assertNumberOfReplicas(2, 0, 2, true, false); - waitForState("Expecting collection to have 2 shards and 2 replica each", collectionName, clusterShape(2, 4)); + waitForState( + "Expecting collection to have 2 shards and 2 replica each", + collectionName, + clusterShape(2, 4)); - //Delete pull replica from shard1 + // Delete pull replica from shard1 CollectionAdminRequest.deleteReplica( - collectionName, - "shard1", - docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getName()) - .process(cluster.getSolrClient()); + collectionName, + "shard1", + docCollection + .getSlice("shard1") + .getReplicas(EnumSet.of(Replica.Type.PULL)) + .get(0) + .getName()) + .process(cluster.getSolrClient()); assertNumberOfReplicas(2, 0, 1, true, true); } @@ -326,55 +403,79 @@ public void testKillLeader() throws Exception { @Ignore("Ignore until I figure out a way to reliably record state transitions") public void testPullReplicaStates() throws Exception { - // Validate that pull replicas go through the correct states when starting, stopping, reconnecting + // Validate that pull replicas go through the correct states when starting, stopping, + // reconnecting CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1, 0, 0) - .process(cluster.getSolrClient()); -// cluster.getSolrClient().getZkStateReader().registerCore(collectionName); //TODO: Is this needed? + .process(cluster.getSolrClient()); + // cluster.getSolrClient().getZkStateReader().registerCore(collectionName); //TODO: Is this + // needed? waitForState("Replica not added", collectionName, activeReplicaCount(1, 0, 0)); addDocs(500); List statesSeen = new ArrayList<>(3); - cluster.getSolrClient().registerCollectionStateWatcher(collectionName, (liveNodes, collectionState) -> { - Replica r = collectionState.getSlice("shard1").getReplica("core_node2"); - log.info("CollectionStateWatcher state change: {}", r); - if (r == null) { - return false; - } - statesSeen.add(r.getState()); - if (log.isInfoEnabled()) { - log.info("CollectionStateWatcher saw state: {}", r.getState()); - } - return r.getState() == Replica.State.ACTIVE; - }); - CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.PULL).process(cluster.getSolrClient()); + cluster + .getSolrClient() + .registerCollectionStateWatcher( + collectionName, + (liveNodes, collectionState) -> { + Replica r = collectionState.getSlice("shard1").getReplica("core_node2"); + log.info("CollectionStateWatcher state change: {}", r); + if (r == null) { + return false; + } + statesSeen.add(r.getState()); + if (log.isInfoEnabled()) { + log.info("CollectionStateWatcher saw state: {}", r.getState()); + } + return r.getState() == Replica.State.ACTIVE; + }); + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.PULL) + .process(cluster.getSolrClient()); waitForState("Replica not added", collectionName, activeReplicaCount(1, 0, 1)); zkClient().printLayoutToStream(System.out); if (log.isInfoEnabled()) { log.info("Saw states: {}", Arrays.toString(statesSeen.toArray())); } - assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), 3, statesSeen.size()); - assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.DOWN, statesSeen.get(0)); - assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.RECOVERING, statesSeen.get(0)); - assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.ACTIVE, statesSeen.get(0)); + assertEquals( + "Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), + 3, + statesSeen.size()); + assertEquals( + "Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), + Replica.State.DOWN, + statesSeen.get(0)); + assertEquals( + "Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), + Replica.State.RECOVERING, + statesSeen.get(0)); + assertEquals( + "Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), + Replica.State.ACTIVE, + statesSeen.get(0)); } - public void testRealTimeGet() throws SolrServerException, IOException, KeeperException, InterruptedException { + public void testRealTimeGet() + throws SolrServerException, IOException, KeeperException, InterruptedException { // should be redirected to Replica.Type.NRT - int numReplicas = random().nextBoolean()?1:2; + int numReplicas = random().nextBoolean() ? 1 : 2; CollectionAdminRequest.createCollection(collectionName, "conf", 1, numReplicas, 0, numReplicas) - .process(cluster.getSolrClient()); - waitForState("Unexpected replica count", collectionName, activeReplicaCount(numReplicas, 0, numReplicas)); + .process(cluster.getSolrClient()); + waitForState( + "Unexpected replica count", + collectionName, + activeReplicaCount(numReplicas, 0, numReplicas)); DocCollection docCollection = assertNumberOfReplicas(numReplicas, 0, numReplicas, false, true); HttpClient httpClient = cluster.getSolrClient().getHttpClient(); int id = 0; Slice slice = docCollection.getSlice("shard1"); List ids = new ArrayList<>(slice.getReplicas().size()); - for (Replica rAdd:slice.getReplicas()) { + for (Replica rAdd : slice.getReplicas()) { try (HttpSolrClient client = getHttpSolrClient(rAdd.getCoreUrl(), httpClient)) { client.add(new SolrInputDocument("id", String.valueOf(id), "foo_s", "bar")); } - SolrDocument docCloudClient = cluster.getSolrClient().getById(collectionName, String.valueOf(id)); + SolrDocument docCloudClient = + cluster.getSolrClient().getById(collectionName, String.valueOf(id)); assertEquals("bar", docCloudClient.getFieldValue("foo_s")); - for (Replica rGet:slice.getReplicas()) { + for (Replica rGet : slice.getReplicas()) { try (HttpSolrClient client = getHttpSolrClient(rGet.getCoreUrl(), httpClient)) { SolrDocument doc = client.getById(String.valueOf(id)); assertEquals("bar", doc.getFieldValue("foo_s")); @@ -384,7 +485,7 @@ public void testRealTimeGet() throws SolrServerException, IOException, KeeperExc id++; } SolrDocumentList previousAllIdsResult = null; - for (Replica rAdd:slice.getReplicas()) { + for (Replica rAdd : slice.getReplicas()) { try (HttpSolrClient client = getHttpSolrClient(rAdd.getCoreUrl(), httpClient)) { SolrDocumentList allIdsResult = client.getById(ids); if (previousAllIdsResult != null) { @@ -405,8 +506,11 @@ public void testRealTimeGet() throws SolrServerException, IOException, KeeperExc @SuppressWarnings({"try"}) private void doTestNoLeader(boolean removeReplica) throws Exception { CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1, 0, 1) - .process(cluster.getSolrClient()); - waitForState("Expected collection to be created with 1 shard and 2 replicas", collectionName, clusterShape(1, 2)); + .process(cluster.getSolrClient()); + waitForState( + "Expected collection to be created with 1 shard and 2 replicas", + collectionName, + clusterShape(1, 2)); DocCollection docCollection = assertNumberOfReplicas(1, 0, 1, false, true); // Add a document and commit @@ -420,65 +524,80 @@ private void doTestNoLeader(boolean removeReplica) throws Exception { waitForNumDocsInAllReplicas(1, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL))); // Delete leader replica from shard1 - ignoreException("No registered leader was found"); //These are expected + ignoreException("No registered leader was found"); // These are expected JettySolrRunner leaderJetty = null; if (removeReplica) { - CollectionAdminRequest.deleteReplica( - collectionName, - "shard1", - s.getLeader().getName()) - .process(cluster.getSolrClient()); + CollectionAdminRequest.deleteReplica(collectionName, "shard1", s.getLeader().getName()) + .process(cluster.getSolrClient()); } else { leaderJetty = cluster.getReplicaJetty(s.getLeader()); leaderJetty.stop(); waitForState("Leader replica not removed", collectionName, clusterShape(1, 1)); // Wait for cluster state to be updated - waitForState("Replica state not updated in cluster state", - collectionName, clusterStateReflectsActiveAndDownReplicas()); + waitForState( + "Replica state not updated in cluster state", + collectionName, + clusterStateReflectsActiveAndDownReplicas()); } docCollection = assertNumberOfReplicas(0, 0, 1, true, true); // Check that there is no leader for the shard Replica leader = docCollection.getSlice("shard1").getLeader(); - assertTrue(leader == null || !leader.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())); + assertTrue( + leader == null + || !leader.isActive( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())); // Pull replica on the other hand should be active - Replica pullReplica = docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.PULL)).get(0); - assertTrue(pullReplica.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())); + Replica pullReplica = + docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.PULL)).get(0); + assertTrue( + pullReplica.isActive( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())); long highestTerm = 0L; try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", zkClient())) { highestTerm = zkShardTerms.getHighestTerm(); } - // add document, this should fail since there is no leader. Pull replica should not accept the update - expectThrows(SolrException.class, () -> - cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2", "foo", "zoo")) - ); + // add document, this should fail since there is no leader. Pull replica should not accept the + // update + expectThrows( + SolrException.class, + () -> + cluster + .getSolrClient() + .add(collectionName, new SolrInputDocument("id", "2", "foo", "zoo"))); if (removeReplica) { - try(ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", zkClient())) { + try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", zkClient())) { assertEquals(highestTerm, zkShardTerms.getHighestTerm()); } } // Also fails if I send the update to the pull replica explicitly - try (HttpSolrClient pullReplicaClient = getHttpSolrClient(docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { - expectThrows(SolrException.class, () -> - pullReplicaClient.add(collectionName, new SolrInputDocument("id", "2", "foo", "zoo")) - ); + try (HttpSolrClient pullReplicaClient = + getHttpSolrClient( + docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + expectThrows( + SolrException.class, + () -> + pullReplicaClient.add( + collectionName, new SolrInputDocument("id", "2", "foo", "zoo"))); } if (removeReplica) { - try(ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", zkClient())) { + try (ZkShardTerms zkShardTerms = new ZkShardTerms(collectionName, "shard1", zkClient())) { assertEquals(highestTerm, zkShardTerms.getHighestTerm()); } } // Queries should still work waitForNumDocsInAllReplicas(1, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL))); - // Add nrt replica back. Since there is no nrt now, new nrt will have no docs. There will be data loss, since the it will become the leader - // and pull replicas will replicate from it. Maybe we want to change this. Replicate from pull replicas is not a good idea, since they - // are by definition out of date. + // Add nrt replica back. Since there is no nrt now, new nrt will have no docs. There will be + // data loss, since the it will become the leader and pull replicas will replicate from it. + // Maybe we want to change this. Replicate from pull replicas is not a good idea, since they are + // by definition out of date. if (removeReplica) { - CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.NRT).process(cluster.getSolrClient()); + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.NRT) + .process(cluster.getSolrClient()); } else { leaderJetty.start(); } @@ -489,9 +608,14 @@ private void doTestNoLeader(boolean removeReplica) throws Exception { cluster.getSolrClient().getZkStateReader().forceUpdateCollection(collectionName); docCollection = getCollectionState(collectionName); leader = docCollection.getSlice("shard1").getLeader(); - assertTrue(leader != null && leader.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())); - - // If jetty is restarted, the replication is not forced, and replica doesn't replicate from leader until new docs are added. Is this the correct behavior? Why should these two cases be different? + assertTrue( + leader != null + && leader.isActive( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())); + + // If jetty is restarted, the replication is not forced, and replica doesn't replicate from + // leader until new docs are added. Is this the correct behavior? Why should these two cases be + // different? if (removeReplica) { // Pull replicas will replicate the empty index if a new replica was added and becomes leader waitForNumDocsInAllReplicas(0, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL))); @@ -504,15 +628,20 @@ private void doTestNoLeader(boolean removeReplica) throws Exception { leaderClient.commit(); assertEquals(1, leaderClient.query(new SolrQuery("*:*")).getResults().getNumFound()); } - waitForNumDocsInAllReplicas(1, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)), "id:2", null, null); + waitForNumDocsInAllReplicas( + 1, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)), "id:2", null, null); waitForNumDocsInAllReplicas(1, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL))); } public void testKillPullReplica() throws Exception { CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1, 0, 1) - .process(cluster.getSolrClient()); -// cluster.getSolrClient().getZkStateReader().registerCore(collectionName); //TODO: Is this needed? - waitForState("Expected collection to be created with 1 shard and 2 replicas", collectionName, clusterShape(1, 2)); + .process(cluster.getSolrClient()); + // cluster.getSolrClient().getZkStateReader().registerCore(collectionName); //TODO: Is this + // needed? + waitForState( + "Expected collection to be created with 1 shard and 2 replicas", + collectionName, + clusterShape(1, 2)); DocCollection docCollection = assertNumberOfReplicas(1, 0, 1, false, true); assertEquals(1, docCollection.getSlices().size()); @@ -521,11 +650,14 @@ public void testKillPullReplica() throws Exception { cluster.getSolrClient().commit(collectionName); waitForNumDocsInAllActiveReplicas(1); - JettySolrRunner pullReplicaJetty = cluster.getReplicaJetty(docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.PULL)).get(0)); + JettySolrRunner pullReplicaJetty = + cluster.getReplicaJetty( + docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.PULL)).get(0)); pullReplicaJetty.stop(); waitForState("Replica not removed", collectionName, activeReplicaCount(1, 0, 0)); // Also wait for the replica to be placed in state="down" - waitForState("Didn't update state", collectionName, clusterStateReflectsActiveAndDownReplicas()); + waitForState( + "Didn't update state", collectionName, clusterStateReflectsActiveAndDownReplicas()); cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2", "foo", "bar")); cluster.getSolrClient().commit(collectionName); @@ -536,22 +668,28 @@ public void testKillPullReplica() throws Exception { waitForNumDocsInAllActiveReplicas(2); } - public void testSearchWhileReplicationHappens() { - - } + public void testSearchWhileReplicationHappens() {} - private void waitForNumDocsInAllActiveReplicas(int numDocs) throws IOException, SolrServerException, InterruptedException { + private void waitForNumDocsInAllActiveReplicas(int numDocs) + throws IOException, SolrServerException, InterruptedException { DocCollection docCollection = getCollectionState(collectionName); - waitForNumDocsInAllReplicas(numDocs, docCollection.getReplicas().stream().filter(r -> r.getState() == Replica.State.ACTIVE).collect(Collectors.toList())); + waitForNumDocsInAllReplicas( + numDocs, + docCollection.getReplicas().stream() + .filter(r -> r.getState() == Replica.State.ACTIVE) + .collect(Collectors.toList())); } - private void waitForNumDocsInAllReplicas(int numDocs, Collection replicas) throws IOException, SolrServerException, InterruptedException { + private void waitForNumDocsInAllReplicas(int numDocs, Collection replicas) + throws IOException, SolrServerException, InterruptedException { waitForNumDocsInAllReplicas(numDocs, replicas, "*:*", null, null); } - static void waitForNumDocsInAllReplicas(int numDocs, Collection replicas, String query, String user, String pass) throws IOException, SolrServerException, InterruptedException { + static void waitForNumDocsInAllReplicas( + int numDocs, Collection replicas, String query, String user, String pass) + throws IOException, SolrServerException, InterruptedException { TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSource.NANO_TIME); - for (Replica r:replicas) { + for (Replica r : replicas) { String replicaUrl = r.getCoreUrl(); try (HttpSolrClient replicaClient = getHttpSolrClient(replicaUrl)) { while (true) { @@ -561,8 +699,16 @@ static void waitForNumDocsInAllReplicas(int numDocs, Collection replica } try { long numFound = req.process(replicaClient).getResults().getNumFound(); - assertEquals("Replica " + r.getName() + " (" + replicaUrl + ") not up to date after " + REPLICATION_TIMEOUT_SECS + " seconds", - numDocs, numFound); + assertEquals( + "Replica " + + r.getName() + + " (" + + replicaUrl + + ") not up to date after " + + REPLICATION_TIMEOUT_SECS + + " seconds", + numDocs, + numFound); log.info("Replica {} ({}) has all {} docs", r.name, replicaUrl, numDocs); break; } catch (AssertionError e) { @@ -587,29 +733,59 @@ static void waitForDeletion(String collection) throws InterruptedException, Keep fail("Timed out waiting for collection " + collection + " to be deleted."); } cluster.getSolrClient().getZkStateReader().forceUpdateCollection(collection); - } catch(SolrException e) { + } catch (SolrException e) { return; } - } } - private DocCollection assertNumberOfReplicas(int numNrtReplicas, int numTlogReplicas, int numPullReplicas, boolean updateCollection, boolean activeOnly) throws KeeperException, InterruptedException { - return assertNumberOfReplicas(collectionName, numNrtReplicas, numTlogReplicas, numPullReplicas, updateCollection, activeOnly); + private DocCollection assertNumberOfReplicas( + int numNrtReplicas, + int numTlogReplicas, + int numPullReplicas, + boolean updateCollection, + boolean activeOnly) + throws KeeperException, InterruptedException { + return assertNumberOfReplicas( + collectionName, + numNrtReplicas, + numTlogReplicas, + numPullReplicas, + updateCollection, + activeOnly); } - static DocCollection assertNumberOfReplicas(String coll, int numNrtReplicas, int numTlogReplicas, int numPullReplicas, boolean updateCollection, boolean activeOnly) throws KeeperException, InterruptedException { + static DocCollection assertNumberOfReplicas( + String coll, + int numNrtReplicas, + int numTlogReplicas, + int numPullReplicas, + boolean updateCollection, + boolean activeOnly) + throws KeeperException, InterruptedException { if (updateCollection) { cluster.getSolrClient().getZkStateReader().forceUpdateCollection(coll); } DocCollection docCollection = getCollectionState(coll); assertNotNull(docCollection); - assertEquals("Unexpected number of writer replicas: " + docCollection, numNrtReplicas, - docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); - assertEquals("Unexpected number of pull replicas: " + docCollection, numPullReplicas, - docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); - assertEquals("Unexpected number of active replicas: " + docCollection, numTlogReplicas, - docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); + assertEquals( + "Unexpected number of writer replicas: " + docCollection, + numNrtReplicas, + docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); + assertEquals( + "Unexpected number of pull replicas: " + docCollection, + numPullReplicas, + docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); + assertEquals( + "Unexpected number of active replicas: " + docCollection, + numTlogReplicas, + docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); return docCollection; } @@ -618,7 +794,7 @@ static DocCollection assertNumberOfReplicas(String coll, int numNrtReplicas, int */ private CollectionStatePredicate clusterStateReflectsActiveAndDownReplicas() { return (liveNodes, collectionState) -> { - for (Replica r:collectionState.getReplicas()) { + for (Replica r : collectionState.getReplicas()) { if (r.getState() != Replica.State.DOWN && r.getState() != Replica.State.ACTIVE) { return false; } @@ -633,12 +809,11 @@ private CollectionStatePredicate clusterStateReflectsActiveAndDownReplicas() { }; } - - private CollectionStatePredicate activeReplicaCount(int numNrtReplicas, int numTlogReplicas, int numPullReplicas) { + private CollectionStatePredicate activeReplicaCount( + int numNrtReplicas, int numTlogReplicas, int numPullReplicas) { return (liveNodes, collectionState) -> { int nrtFound = 0, tlogFound = 0, pullFound = 0; - if (collectionState == null) - return false; + if (collectionState == null) return false; for (Slice slice : collectionState) { for (Replica replica : slice) { if (replica.isActive(liveNodes)) @@ -657,7 +832,9 @@ private CollectionStatePredicate activeReplicaCount(int numNrtReplicas, int numT } } } - return numNrtReplicas == nrtFound && numTlogReplicas == tlogFound && numPullReplicas == pullFound; + return numNrtReplicas == nrtFound + && numTlogReplicas == tlogFound + && numPullReplicas == pullFound; }; } @@ -670,29 +847,38 @@ private void addDocs(int numDocs) throws SolrServerException, IOException { cluster.getSolrClient().commit(collectionName); } - private void addReplicaToShard(String shardName, Replica.Type type) throws IOException, SolrServerException { + private void addReplicaToShard(String shardName, Replica.Type type) + throws IOException, SolrServerException { switch (random().nextInt(3)) { case 0: // Add replica with SolrJ - CollectionAdminResponse response = CollectionAdminRequest.addReplicaToShard(collectionName, shardName, type).process(cluster.getSolrClient()); - assertEquals("Unexpected response status: " + response.getStatus(), 0, response.getStatus()); + CollectionAdminResponse response = + CollectionAdminRequest.addReplicaToShard(collectionName, shardName, type) + .process(cluster.getSolrClient()); + assertEquals( + "Unexpected response status: " + response.getStatus(), 0, response.getStatus()); break; case 1: // Add replica with V1 API - String url = String.format(Locale.ROOT, "%s/admin/collections?action=ADDREPLICA&collection=%s&shard=%s&type=%s", - cluster.getRandomJetty(random()).getBaseUrl(), - collectionName, - shardName, - type); + String url = + String.format( + Locale.ROOT, + "%s/admin/collections?action=ADDREPLICA&collection=%s&shard=%s&type=%s", + cluster.getRandomJetty(random()).getBaseUrl(), + collectionName, + shardName, + type); HttpGet addReplicaGet = new HttpGet(url); HttpResponse httpResponse = cluster.getSolrClient().getHttpClient().execute(addReplicaGet); assertEquals(200, httpResponse.getStatusLine().getStatusCode()); break; - case 2:// Add replica with V2 API - url = String.format(Locale.ROOT, "%s/____v2/c/%s/shards", - cluster.getRandomJetty(random()).getBaseUrl(), - collectionName); - String requestBody = String.format(Locale.ROOT, "{add-replica:{shard:%s, type:%s}}", - shardName, - type); + case 2: // Add replica with V2 API + url = + String.format( + Locale.ROOT, + "%s/____v2/c/%s/shards", + cluster.getRandomJetty(random()).getBaseUrl(), + collectionName); + String requestBody = + String.format(Locale.ROOT, "{add-replica:{shard:%s, type:%s}}", shardName, type); HttpPost addReplicaPost = new HttpPost(url); addReplicaPost.setHeader("Content-type", "application/json"); addReplicaPost.setEntity(new StringEntity(requestBody)); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java index 2ff0976c72b..6341e1c4e8a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java @@ -27,7 +27,6 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; @@ -55,33 +54,35 @@ @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class TestPullReplicaErrorHandling extends SolrCloudTestCase { - - private final static int REPLICATION_TIMEOUT_SECS = 10; - + + private static final int REPLICATION_TIMEOUT_SECS = 10; + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static Map proxies; private static Map jettys; private String collectionName = null; - + private String suggestedCollectionName() { - return (getTestClass().getSimpleName().replace("Test", "") + "_" + getSaferTestName().split(" ")[0]).replaceAll("(.)(\\p{Upper})", "$1_$2").toLowerCase(Locale.ROOT); + return (getTestClass().getSimpleName().replace("Test", "") + + "_" + + getSaferTestName().split(" ")[0]) + .replaceAll("(.)(\\p{Upper})", "$1_$2") + .toLowerCase(Locale.ROOT); } @BeforeClass public static void setupCluster() throws Exception { System.setProperty("solr.zkclienttimeout", "20000"); - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); // Add proxies proxies = new HashMap<>(cluster.getJettySolrRunners().size()); jettys = new HashMap<>(cluster.getJettySolrRunners().size()); - for (JettySolrRunner jetty:cluster.getJettySolrRunners()) { + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { SocketProxy proxy = new SocketProxy(); jetty.setProxyPort(proxy.getListenPort()); - cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart + cluster.stopJettySolrRunner(jetty); // TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); cluster.waitForAllNodes(30); proxy.open(jetty.getBaseUrl().toURI()); @@ -92,7 +93,7 @@ public static void setupCluster() throws Exception { jettys.put(proxy.getUrl(), jetty); } } - + @AfterClass public static void tearDownCluster() throws Exception { if (null != proxies) { @@ -104,7 +105,7 @@ public static void tearDownCluster() throws Exception { jettys = null; TestInjection.reset(); } - + @Override public void setUp() throws Exception { super.setUp(); @@ -116,7 +117,12 @@ public void setUp() throws Exception { @Override public void tearDown() throws Exception { - if (cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionOrNull(collectionName) != null) { + if (cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollectionOrNull(collectionName) + != null) { log.info("tearDown deleting collection"); CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); log.info("Collection deleted"); @@ -125,13 +131,11 @@ public void tearDown() throws Exception { collectionName = null; super.tearDown(); } - -// @Repeat(iterations=10) -//commented 9-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018 -public void testCantConnectToPullReplica() throws Exception { + + public void testCantConnectToPullReplica() throws Exception { int numShards = 2; CollectionAdminRequest.createCollection(collectionName, "conf", numShards, 1, 0, 1) - .process(cluster.getSolrClient()); + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, numShards, numShards * 2); addDocs(10); DocCollection docCollection = assertNumberOfReplicas(numShards, 0, numShards, false, true); @@ -139,41 +143,53 @@ public void testCantConnectToPullReplica() throws Exception { SocketProxy proxy = getProxyForReplica(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0)); try { proxy.close(); - for (int i = 1; i <= 10; i ++) { + for (int i = 1; i <= 10; i++) { addDocs(10 + i); try (HttpSolrClient leaderClient = getHttpSolrClient(s.getLeader().getCoreUrl())) { assertNumDocs(10 + i, leaderClient); } } - SolrServerException e = expectThrows(SolrServerException.class, () -> { - try(HttpSolrClient pullReplicaClient = getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { - pullReplicaClient.query(new SolrQuery("*:*")).getResults().getNumFound(); - } - }); - - assertNumberOfReplicas(numShards, 0, numShards, true, true);// Replica should still be active, since it doesn't disconnect from ZooKeeper + SolrServerException e = + expectThrows( + SolrServerException.class, + () -> { + try (HttpSolrClient pullReplicaClient = + getHttpSolrClient( + s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + pullReplicaClient.query(new SolrQuery("*:*")).getResults().getNumFound(); + } + }); + + // Replica should still be active, since it doesn't disconnect from ZooKeeper + assertNumberOfReplicas(numShards, 0, numShards, true, true); { long numFound = 0; TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (numFound < 20 && !t.hasTimedOut()) { Thread.sleep(200); - numFound = cluster.getSolrClient().query(collectionName, new SolrQuery("*:*")).getResults().getNumFound(); + numFound = + cluster + .getSolrClient() + .query(collectionName, new SolrQuery("*:*")) + .getResults() + .getNumFound(); } } } finally { proxy.reopen(); } - - try (HttpSolrClient pullReplicaClient = getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + + try (HttpSolrClient pullReplicaClient = + getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { assertNumDocs(20, pullReplicaClient); } } - + public void testCantConnectToLeader() throws Exception { int numShards = 1; CollectionAdminRequest.createCollection(collectionName, "conf", numShards, 1, 0, 1) - .process(cluster.getSolrClient()); + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, numShards, numShards * 2); addDocs(10); DocCollection docCollection = assertNumberOfReplicas(numShards, 0, numShards, false, true); @@ -181,12 +197,14 @@ public void testCantConnectToLeader() throws Exception { SocketProxy proxy = getProxyForReplica(s.getLeader()); try { // wait for replication - try (HttpSolrClient pullReplicaClient = getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + try (HttpSolrClient pullReplicaClient = + getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { assertNumDocs(10, pullReplicaClient); } proxy.close(); - expectThrows(SolrException.class, ()->addDocs(1)); - try (HttpSolrClient pullReplicaClient = getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + expectThrows(SolrException.class, () -> addDocs(1)); + try (HttpSolrClient pullReplicaClient = + getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { assertNumDocs(10, pullReplicaClient); } assertNumDocs(10, cluster.getSolrClient()); @@ -194,25 +212,29 @@ public void testCantConnectToLeader() throws Exception { log.info("Opening leader node"); proxy.reopen(); } -// Back to normal -// Even if the leader is back to normal, the replica can get broken pipe for some time when trying to connect to it. The commit -// can fail if it's sent to the replica and it forwards it to the leader, and since it uses CUSC the error is hidden! That breaks -// the last part of this test. -// addDocs(20); -// assertNumDocs(20, cluster.getSolrClient(), 300); -// try (HttpSolrClient pullReplicaClient = getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { -// assertNumDocs(20, pullReplicaClient); -// } + // Back to normal + // Even if the leader is back to normal, the replica can get broken pipe for some time when + // trying to connect to it. The commit + // can fail if it's sent to the replica and it forwards it to the leader, and since it uses + // CUSC the error is hidden! That breaks + // the last part of this test. + // addDocs(20); + // assertNumDocs(20, cluster.getSolrClient(), 300); + // try (HttpSolrClient pullReplicaClient = + // getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + // assertNumDocs(20, pullReplicaClient); + // } } - + public void testPullReplicaDisconnectsFromZooKeeper() throws Exception { int numShards = 1; CollectionAdminRequest.createCollection(collectionName, "conf", numShards, 1, 0, 1) - .process(cluster.getSolrClient()); + .process(cluster.getSolrClient()); addDocs(10); DocCollection docCollection = assertNumberOfReplicas(numShards, 0, numShards, false, true); Slice s = docCollection.getSlices().iterator().next(); - try (HttpSolrClient pullReplicaClient = getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + try (HttpSolrClient pullReplicaClient = + getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { assertNumDocs(10, pullReplicaClient); } addDocs(20); @@ -222,14 +244,15 @@ public void testPullReplicaDisconnectsFromZooKeeper() throws Exception { waitForState("Expecting node to be disconnected", collectionName, activeReplicaCount(1, 0, 0)); addDocs(40); waitForState("Expecting node to be reconnected", collectionName, activeReplicaCount(1, 0, 1)); - try (HttpSolrClient pullReplicaClient = getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { + try (HttpSolrClient pullReplicaClient = + getHttpSolrClient(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0).getCoreUrl())) { assertNumDocs(40, pullReplicaClient); } } public void testCloseHooksDeletedOnReconnect() throws Exception { CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1, 0, 1) - .process(cluster.getSolrClient()); + .process(cluster.getSolrClient()); addDocs(10); DocCollection docCollection = assertNumberOfReplicas(1, 0, 1, false, true); @@ -239,14 +262,17 @@ public void testCloseHooksDeletedOnReconnect() throws Exception { for (int i = 0; i < (TEST_NIGHTLY ? 5 : 2); i++) { cluster.expireZkSession(jetty); - waitForState("Expecting node to be disconnected", collectionName, activeReplicaCount(1, 0, 0)); + waitForState( + "Expecting node to be disconnected", collectionName, activeReplicaCount(1, 0, 0)); waitForState("Expecting node to reconnect", collectionName, activeReplicaCount(1, 0, 1)); - // We have two active ReplicationHandler with two close hooks each, one for triggering recovery and one for doing interval polling + // We have two active ReplicationHandler with two close hooks each, one for triggering + // recovery and one for doing interval polling assertEquals(5, core.getCloseHooks().size()); } } - - private void assertNumDocs(int numDocs, SolrClient client, int timeoutSecs) throws InterruptedException, SolrServerException, IOException { + + private void assertNumDocs(int numDocs, SolrClient client, int timeoutSecs) + throws InterruptedException, SolrServerException, IOException { TimeOut t = new TimeOut(timeoutSecs, TimeUnit.SECONDS, TimeSource.NANO_TIME); long numFound = -1; while (!t.hasTimedOut()) { @@ -258,9 +284,9 @@ private void assertNumDocs(int numDocs, SolrClient client, int timeoutSecs) thro } fail("Didn't get expected doc count. Expected: " + numDocs + ", Found: " + numFound); } - - - private void assertNumDocs(int numDocs, SolrClient client) throws InterruptedException, SolrServerException, IOException { + + private void assertNumDocs(int numDocs, SolrClient client) + throws InterruptedException, SolrServerException, IOException { assertNumDocs(numDocs, client, REPLICATION_TIMEOUT_SECS); } @@ -272,22 +298,36 @@ private void addDocs(int numDocs) throws SolrServerException, IOException { cluster.getSolrClient().add(collectionName, docs); cluster.getSolrClient().commit(collectionName); } - - private DocCollection assertNumberOfReplicas(int numWriter, int numActive, int numPassive, boolean updateCollection, boolean activeOnly) throws KeeperException, InterruptedException { + + private DocCollection assertNumberOfReplicas( + int numWriter, int numActive, int numPassive, boolean updateCollection, boolean activeOnly) + throws KeeperException, InterruptedException { if (updateCollection) { cluster.getSolrClient().getZkStateReader().forceUpdateCollection(collectionName); } DocCollection docCollection = getCollectionState(collectionName); assertNotNull(docCollection); - assertEquals("Unexpected number of writer replicas: " + docCollection, numWriter, - docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); - assertEquals("Unexpected number of pull replicas: " + docCollection, numPassive, - docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); - assertEquals("Unexpected number of active replicas: " + docCollection, numActive, - docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); + assertEquals( + "Unexpected number of writer replicas: " + docCollection, + numWriter, + docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); + assertEquals( + "Unexpected number of pull replicas: " + docCollection, + numPassive, + docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); + assertEquals( + "Unexpected number of active replicas: " + docCollection, + numActive, + docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); return docCollection; } - + protected JettySolrRunner getJettyForReplica(Replica replica) throws Exception { String replicaBaseUrl = replica.getStr(ZkStateReader.BASE_URL_PROP); assertNotNull(replicaBaseUrl); @@ -296,8 +336,8 @@ protected JettySolrRunner getJettyForReplica(Replica replica) throws Exception { JettySolrRunner proxy = jettys.get(baseUrl.toURI()); assertNotNull("No proxy found for " + baseUrl + "!", proxy); return proxy; - } - + } + protected SocketProxy getProxyForReplica(Replica replica) throws Exception { String replicaBaseUrl = replica.getStr(ZkStateReader.BASE_URL_PROP); assertNotNull(replicaBaseUrl); @@ -311,7 +351,7 @@ protected SocketProxy getProxyForReplica(Replica replica) throws Exception { assertNotNull("No proxy found for " + baseUrl + "!", proxy); return proxy; } - + private void waitForDeletion(String collection) throws InterruptedException, KeeperException { TimeOut t = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection(collection)) { @@ -322,18 +362,17 @@ private void waitForDeletion(String collection) throws InterruptedException, Kee fail("Timed out waiting for collection " + collection + " to be deleted."); } cluster.getSolrClient().getZkStateReader().forceUpdateCollection(collection); - } catch(SolrException e) { + } catch (SolrException e) { return; } - } } - - private CollectionStatePredicate activeReplicaCount(int numWriter, int numActive, int numPassive) { + + private CollectionStatePredicate activeReplicaCount( + int numWriter, int numActive, int numPassive) { return (liveNodes, collectionState) -> { int writersFound = 0, activesFound = 0, passivesFound = 0; - if (collectionState == null) - return false; + if (collectionState == null) return false; for (Slice slice : collectionState) { for (Replica replica : slice) { if (replica.isActive(liveNodes)) @@ -355,5 +394,4 @@ private CollectionStatePredicate activeReplicaCount(int numWriter, int numActive return numWriter == writersFound && numActive == activesFound && numPassive == passivesFound; }; } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaWithAuth.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaWithAuth.java index d460669835f..d44b13e8ca1 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaWithAuth.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaWithAuth.java @@ -16,11 +16,18 @@ */ package org.apache.solr.cloud; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.apache.solr.cloud.TestPullReplica.assertNumberOfReplicas; +import static org.apache.solr.cloud.TestPullReplica.assertUlogPresence; +import static org.apache.solr.cloud.TestPullReplica.waitForDeletion; +import static org.apache.solr.cloud.TestPullReplica.waitForNumDocsInAllReplicas; +import static org.apache.solr.security.Sha256AuthenticationProvider.getSaltedHashedValue; + import java.io.IOException; import java.util.EnumSet; import java.util.List; import java.util.Map; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; @@ -43,14 +50,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; -import static org.apache.solr.cloud.TestPullReplica.assertNumberOfReplicas; -import static org.apache.solr.cloud.TestPullReplica.assertUlogPresence; -import static org.apache.solr.cloud.TestPullReplica.waitForDeletion; -import static org.apache.solr.cloud.TestPullReplica.waitForNumDocsInAllReplicas; -import static org.apache.solr.security.Sha256AuthenticationProvider.getSaltedHashedValue; - @Slow public class TestPullReplicaWithAuth extends SolrCloudTestCase { @@ -60,15 +59,25 @@ public class TestPullReplicaWithAuth extends SolrCloudTestCase { @BeforeClass public static void setupClusterWithSecurityEnabled() throws Exception { - final String SECURITY_JSON = Utils.toJSONString - (Map.of("authorization", - Map.of("class", RuleBasedAuthorizationPlugin.class.getName(), - "user-role", singletonMap(USER, "admin"), - "permissions", singletonList(Map.of("name", "all", "role", "admin"))), - "authentication", - Map.of("class", BasicAuthPlugin.class.getName(), - "blockUnknown", true, - "credentials", singletonMap(USER, getSaltedHashedValue(PASS))))); + final String SECURITY_JSON = + Utils.toJSONString( + Map.of( + "authorization", + Map.of( + "class", + RuleBasedAuthorizationPlugin.class.getName(), + "user-role", + singletonMap(USER, "admin"), + "permissions", + singletonList(Map.of("name", "all", "role", "admin"))), + "authentication", + Map.of( + "class", + BasicAuthPlugin.class.getName(), + "blockUnknown", + true, + "credentials", + singletonMap(USER, getSaltedHashedValue(PASS))))); configureCluster(2) .addConfig("conf", configset("cloud-minimal")) @@ -81,17 +90,22 @@ private > T withBasicAuth(T req) { return req; } - private QueryResponse queryWithBasicAuth(HttpSolrClient client, SolrQuery q) throws IOException, SolrServerException { + private QueryResponse queryWithBasicAuth(HttpSolrClient client, SolrQuery q) + throws IOException, SolrServerException { return withBasicAuth(new QueryRequest(q)).process(client); } @Test public void testPKIAuthWorksForPullReplication() throws Exception { int numPullReplicas = 2; - withBasicAuth(CollectionAdminRequest.createCollection(collectionName, "conf", 1, 1, 0, numPullReplicas)) + withBasicAuth( + CollectionAdminRequest.createCollection( + collectionName, "conf", 1, 1, 0, numPullReplicas)) .processAndWait(cluster.getSolrClient(), 10); - waitForState("Expected collection to be created with 1 shard and " + (numPullReplicas + 1) + " replicas", - collectionName, clusterShape(1, numPullReplicas + 1)); + waitForState( + "Expected collection to be created with 1 shard and " + (numPullReplicas + 1) + " replicas", + collectionName, + clusterShape(1, numPullReplicas + 1)); DocCollection docCollection = assertNumberOfReplicas(collectionName, 1, 0, numPullReplicas, false, true); @@ -106,7 +120,9 @@ public void testPKIAuthWorksForPullReplication() throws Exception { Slice s = docCollection.getSlices().iterator().next(); try (HttpSolrClient leaderClient = getHttpSolrClient(s.getLeader().getCoreUrl())) { - assertEquals(numDocs, queryWithBasicAuth(leaderClient, new SolrQuery("*:*")).getResults().getNumFound()); + assertEquals( + numDocs, + queryWithBasicAuth(leaderClient, new SolrQuery("*:*")).getResults().getNumFound()); } List pullReplicas = s.getReplicas(EnumSet.of(Replica.Type.PULL)); @@ -114,18 +130,24 @@ public void testPKIAuthWorksForPullReplication() throws Exception { for (Replica r : pullReplicas) { try (HttpSolrClient pullReplicaClient = getHttpSolrClient(r.getCoreUrl())) { - QueryResponse statsResponse = queryWithBasicAuth(pullReplicaClient, new SolrQuery("qt", "/admin/plugins", "stats", "true")); + QueryResponse statsResponse = + queryWithBasicAuth( + pullReplicaClient, new SolrQuery("qt", "/admin/plugins", "stats", "true")); // adds is a gauge, which is null for PULL replicas - assertNull("Replicas shouldn't process the add document request: " + statsResponse, + assertNull( + "Replicas shouldn't process the add document request: " + statsResponse, getUpdateHandlerMetric(statsResponse, "UPDATE.updateHandler.adds")); - assertEquals("Replicas shouldn't process the add document request: " + statsResponse, - 0L, getUpdateHandlerMetric(statsResponse, "UPDATE.updateHandler.cumulativeAdds.count")); + assertEquals( + "Replicas shouldn't process the add document request: " + statsResponse, + 0L, + getUpdateHandlerMetric(statsResponse, "UPDATE.updateHandler.cumulativeAdds.count")); } } } CollectionAdminResponse response = - withBasicAuth(CollectionAdminRequest.reloadCollection(collectionName)).process(cluster.getSolrClient()); + withBasicAuth(CollectionAdminRequest.reloadCollection(collectionName)) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); assertUlogPresence(docCollection); @@ -133,26 +155,36 @@ public void testPKIAuthWorksForPullReplication() throws Exception { Slice s = docCollection.getSlices().iterator().next(); List pullReplicas = s.getReplicas(EnumSet.of(Replica.Type.PULL)); assertEquals(numPullReplicas, pullReplicas.size()); - response = withBasicAuth(CollectionAdminRequest.addReplicaToShard(collectionName, s.getName(), Replica.Type.PULL)).process(cluster.getSolrClient()); + response = + withBasicAuth( + CollectionAdminRequest.addReplicaToShard( + collectionName, s.getName(), Replica.Type.PULL)) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); numPullReplicas = numPullReplicas + 1; // added a PULL - waitForState("Expected collection to be created with 1 shard and " + (numPullReplicas + 1) + " replicas", - collectionName, clusterShape(1, numPullReplicas + 1)); + waitForState( + "Expected collection to be created with 1 shard and " + (numPullReplicas + 1) + " replicas", + collectionName, + clusterShape(1, numPullReplicas + 1)); - docCollection = - assertNumberOfReplicas(collectionName, 1, 0, numPullReplicas, false, true); + docCollection = assertNumberOfReplicas(collectionName, 1, 0, numPullReplicas, false, true); s = docCollection.getSlices().iterator().next(); pullReplicas = s.getReplicas(EnumSet.of(Replica.Type.PULL)); assertEquals(numPullReplicas, pullReplicas.size()); waitForNumDocsInAllReplicas(numDocs, pullReplicas, "*:*", USER, PASS); - withBasicAuth(CollectionAdminRequest.deleteCollection(collectionName)).process(cluster.getSolrClient()); + withBasicAuth(CollectionAdminRequest.deleteCollection(collectionName)) + .process(cluster.getSolrClient()); waitForDeletion(collectionName); } @SuppressWarnings("unchecked") private Object getUpdateHandlerMetric(QueryResponse statsResponse, String metric) { - return ((Map) statsResponse.getResponse().findRecursive("plugins", "UPDATE", "updateHandler", "stats")).get(metric); + return ((Map) + statsResponse + .getResponse() + .findRecursive("plugins", "UPDATE", "updateHandler", "stats")) + .get(metric); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java index df1d2dd6d27..2f291ec4d5e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestQueryingOnDownCollection.java @@ -19,7 +19,6 @@ import java.lang.invoke.MethodHandles; import java.util.List; import java.util.Map; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; @@ -54,8 +53,8 @@ public static void setupCluster() throws Exception { @Test /** - * Assert that requests to "down collection", i.e. a collection which has all replicas in down state - * (but are hosted on nodes that are live), fail fast and throw meaningful exceptions + * Assert that requests to "down collection", i.e. a collection which has all replicas in down + * state (but are hosted on nodes that are live), fail fast and throw meaningful exceptions */ public void testQueryToDownCollectionShouldFailFast() throws Exception { @@ -64,19 +63,20 @@ public void testQueryToDownCollectionShouldFailFast() throws Exception { .process(cluster.getSolrClient()); // Add some dummy documents - UpdateRequest update = (UpdateRequest) new UpdateRequest().setBasicAuthCredentials(USERNAME, PASSWORD); + UpdateRequest update = + (UpdateRequest) new UpdateRequest().setBasicAuthCredentials(USERNAME, PASSWORD); for (int i = 0; i < 100; i++) { update.add("id", Integer.toString(i)); } update.commit(cluster.getSolrClient(), COLLECTION_NAME); - // Bring down replicas but keep nodes up. This could've been done by some combinations of collections API operations; - // however, to make it faster, altering cluster state directly! ;-) + // Bring down replicas but keep nodes up. This could've been done by some combinations of + // collections API operations; however to make it faster, altering cluster state directly! downAllReplicas(); // assert all replicas are in down state List replicas = getCollectionState(COLLECTION_NAME).getReplicas(); - for (Replica replica: replicas){ + for (Replica replica : replicas) { assertEquals(replica.getState(), Replica.State.DOWN); } @@ -85,69 +85,82 @@ public void testQueryToDownCollectionShouldFailFast() throws Exception { SolrClient client = cluster.getJettySolrRunner(0).newClient(); - SolrRequest req = new QueryRequest(new SolrQuery("*:*").setRows(0)).setBasicAuthCredentials(USERNAME, PASSWORD); + SolrRequest req = + new QueryRequest(new SolrQuery("*:*").setRows(0)) + .setBasicAuthCredentials(USERNAME, PASSWORD); - // Without the SOLR-13793 fix, this causes requests to "down collection" to pile up (until the nodes run out - // of serviceable threads and they crash, even for other collections hosted on the nodes). - SolrException error = expectThrows(SolrException.class, - "Request should fail after trying all replica nodes once", - () -> client.request(req, COLLECTION_NAME) - ); + // Without the SOLR-13793 fix, this causes requests to "down collection" to pile up (until the + // nodes run out of serviceable threads and they crash, even for other collections hosted on the + // nodes). + SolrException error = + expectThrows( + SolrException.class, + "Request should fail after trying all replica nodes once", + () -> client.request(req, COLLECTION_NAME)); client.close(); assertEquals(error.code(), SolrException.ErrorCode.INVALID_STATE.code); - assertTrue(error.getMessage().contains("No active replicas found for collection: " + COLLECTION_NAME)); + assertTrue( + error.getMessage().contains("No active replicas found for collection: " + COLLECTION_NAME)); // run same set of tests on v2 client which uses V2HttpCall - Http2SolrClient v2Client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) - .build(); + Http2SolrClient v2Client = + new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()).build(); - error = expectThrows(SolrException.class, - "Request should fail after trying all replica nodes once", - () -> v2Client.request(req, COLLECTION_NAME) - ); + error = + expectThrows( + SolrException.class, + "Request should fail after trying all replica nodes once", + () -> v2Client.request(req, COLLECTION_NAME)); v2Client.close(); assertEquals(error.code(), SolrException.ErrorCode.INVALID_STATE.code); - assertTrue(error.getMessage().contains("No active replicas found for collection: " + COLLECTION_NAME)); + assertTrue( + error.getMessage().contains("No active replicas found for collection: " + COLLECTION_NAME)); } @SuppressWarnings({"unchecked"}) private void downAllReplicas() throws Exception { - byte[] collectionState = cluster.getZkClient().getData("/collections/" + COLLECTION_NAME + "/state.json", - null, null, true); - - Map> infectedState = (Map>) Utils.fromJSON(collectionState); - Map shards = (Map) infectedState.get(COLLECTION_NAME).get("shards"); - for(Map.Entry shard: shards.entrySet()) { - Map replicas = (Map) ((Map) shard.getValue() ).get("replicas"); + byte[] collectionState = + cluster + .getZkClient() + .getData("/collections/" + COLLECTION_NAME + "/state.json", null, null, true); + + Map> infectedState = + (Map>) Utils.fromJSON(collectionState); + Map shards = + (Map) infectedState.get(COLLECTION_NAME).get("shards"); + for (Map.Entry shard : shards.entrySet()) { + Map replicas = + (Map) ((Map) shard.getValue()).get("replicas"); for (Map.Entry replica : replicas.entrySet()) { ((Map) replica.getValue()).put("state", Replica.State.DOWN.toString()); } } - cluster.getZkClient().setData("/collections/" + COLLECTION_NAME + "/state.json", Utils.toJSON(infectedState) - , true); + cluster + .getZkClient() + .setData( + "/collections/" + COLLECTION_NAME + "/state.json", Utils.toJSON(infectedState), true); } - protected static final String STD_CONF = "{\n" + - " \"authentication\":{\n" + - " \"blockUnknown\": true,\n" + - " \"class\":\"solr.BasicAuthPlugin\",\n" + - " \"credentials\":{\"solr\":\"EEKn7ywYk5jY8vG9TyqlG2jvYuvh1Q7kCCor6Hqm320= 6zkmjMjkMKyJX6/f0VarEWQujju5BzxZXub6WOrEKCw=\"}\n" + - " },\n" + - " \"authorization\":{\n" + - " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + - " \"permissions\":[\n" + - " {\"name\":\"security-edit\", \"role\":\"admin\"},\n" + - " {\"name\":\"collection-admin-edit\", \"role\":\"admin\"},\n" + - " {\"name\":\"core-admin-edit\", \"role\":\"admin\"}\n" + - " ],\n" + - " \"user-role\":{\"solr\":\"admin\"}\n" + - " }\n" + - "}"; - - + protected static final String STD_CONF = + "{\n" + + " \"authentication\":{\n" + + " \"blockUnknown\": true,\n" + + " \"class\":\"solr.BasicAuthPlugin\",\n" + + " \"credentials\":{\"solr\":\"EEKn7ywYk5jY8vG9TyqlG2jvYuvh1Q7kCCor6Hqm320= 6zkmjMjkMKyJX6/f0VarEWQujju5BzxZXub6WOrEKCw=\"}\n" + + " },\n" + + " \"authorization\":{\n" + + " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + + " \"permissions\":[\n" + + " {\"name\":\"security-edit\", \"role\":\"admin\"},\n" + + " {\"name\":\"collection-admin-edit\", \"role\":\"admin\"},\n" + + " {\"name\":\"core-admin-edit\", \"role\":\"admin\"}\n" + + " ],\n" + + " \"user-role\":{\"solr\":\"admin\"}\n" + + " }\n" + + "}"; } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRSAKeyPair.java b/solr/core/src/test/org/apache/solr/cloud/TestRSAKeyPair.java index 3c5f7934eee..5b41cbac1d0 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRSAKeyPair.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRSAKeyPair.java @@ -16,45 +16,45 @@ */ package org.apache.solr.cloud; -import org.apache.solr.SolrTestCase; -import org.apache.solr.util.CryptoKeys; -import org.junit.Test; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; import java.net.URL; import java.nio.ByteBuffer; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.not; +import org.apache.solr.SolrTestCase; +import org.apache.solr.util.CryptoKeys; +import org.junit.Test; public class TestRSAKeyPair extends SolrTestCase { - @Test - public void testGenKeyPair() throws Exception { - testRoundTrip(new CryptoKeys.RSAKeyPair()); - } + @Test + public void testGenKeyPair() throws Exception { + testRoundTrip(new CryptoKeys.RSAKeyPair()); + } - @Test - public void testReadKeysFromDisk() throws Exception { - URL privateKey = getClass().getClassLoader().getResource("cryptokeys/priv_key512_pkcs8.pem"); - URL publicKey = getClass().getClassLoader().getResource("cryptokeys/pub_key512.der"); + @Test + public void testReadKeysFromDisk() throws Exception { + URL privateKey = getClass().getClassLoader().getResource("cryptokeys/priv_key512_pkcs8.pem"); + URL publicKey = getClass().getClassLoader().getResource("cryptokeys/pub_key512.der"); - testRoundTrip(new CryptoKeys.RSAKeyPair(privateKey, publicKey)); - } + testRoundTrip(new CryptoKeys.RSAKeyPair(privateKey, publicKey)); + } - private void testRoundTrip(CryptoKeys.RSAKeyPair kp) throws Exception { - final byte[] plaintext = new byte[random().nextInt(64)]; - random().nextBytes(plaintext); + private void testRoundTrip(CryptoKeys.RSAKeyPair kp) throws Exception { + final byte[] plaintext = new byte[random().nextInt(64)]; + random().nextBytes(plaintext); - byte[] encrypted = kp.encrypt(ByteBuffer.wrap(plaintext)); - assertThat(plaintext, not(equalTo(encrypted))); + byte[] encrypted = kp.encrypt(ByteBuffer.wrap(plaintext)); + assertThat(plaintext, not(equalTo(encrypted))); - byte[] decrypted = CryptoKeys.decryptRSA(encrypted, kp.getPublicKey()); + byte[] decrypted = CryptoKeys.decryptRSA(encrypted, kp.getPublicKey()); - assertTrue("Decrypted text is shorter than original text.", decrypted.length >= plaintext.length); + assertTrue( + "Decrypted text is shorter than original text.", decrypted.length >= plaintext.length); - // Pad with null bytes because RSAKeyPair uses RSA/ECB/NoPadding - int pad = decrypted.length - plaintext.length; - final byte[] padded = new byte[decrypted.length]; - System.arraycopy(plaintext, 0, padded, pad, plaintext.length); - assertArrayEquals(padded, decrypted); - } + // Pad with null bytes because RSAKeyPair uses RSA/ECB/NoPadding + int pad = decrypted.length - plaintext.length; + final byte[] padded = new byte[decrypted.length]; + System.arraycopy(plaintext, 0, padded, pad, plaintext.length); + assertArrayEquals(padded, decrypted); + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java index 26b2fdf94c3..5ae7ac85e7e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomFlRTGCloud.java @@ -31,7 +31,9 @@ import java.util.Random; import java.util.Set; import java.util.TreeSet; - +import javax.xml.stream.XMLStreamConstants; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; import org.apache.commons.io.FilenameUtils; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.ResponseParser; @@ -62,88 +64,91 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.xml.stream.XMLStreamConstants; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.XMLStreamReader; - -/** @see TestCloudPseudoReturnFields */ -@RandomizeSSL(clientAuth=0.0,reason="client auth uses too much RAM") +/** + * @see TestCloudPseudoReturnFields + */ +@RandomizeSSL(clientAuth = 0.0, reason = "client auth uses too much RAM") public class TestRandomFlRTGCloud extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String DEBUG_LABEL = MethodHandles.lookup().lookupClass().getName(); private static final String COLLECTION_NAME = DEBUG_LABEL + "_collection"; - + /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; /** One client per node */ - private static final List CLIENTS = Collections.synchronizedList(new ArrayList<>(5)); + private static final List CLIENTS = + Collections.synchronizedList(new ArrayList<>(5)); /** Always included in fl so we can vet what doc we're looking at */ private static final FlValidator ID_VALIDATOR = new SimpleFieldValueValidator("id"); - /** Since nested documents are not tested, when _root_ is declared in schema, it is always the same as id */ - private static final FlValidator ROOT_VALIDATOR = new RenameFieldValueValidator("id" , "_root_"); - - /** + /** + * Since nested documents are not tested, when _root_ is declared in schema, it is always the same + * as id + */ + private static final FlValidator ROOT_VALIDATOR = new RenameFieldValueValidator("id", "_root_"); + + /** * Types of things we will randomly ask for in fl param, and validate in response docs. * * @see #addRandomFlValidators */ - private static final List FL_VALIDATORS = List.of( - new GlobValidator("*"), - new GlobValidator("*_i"), - new GlobValidator("*_s"), - new GlobValidator("a*"), - new DocIdValidator(), - new DocIdValidator("my_docid_alias"), - new ShardValidator(), - new ShardValidator("my_shard_alias"), - new ValueAugmenterValidator(42), - new ValueAugmenterValidator(1976, "val_alias"), - // - new RenameFieldValueValidator("id", "my_id_alias"), - // NOTE: we add a SimpleFieldValueValidator below to check that we can enforce the presence of this field, - // even when it may have been "renamed" by the transformer above? (this and other such instances are - // marked with `//REQ`); also add a RenameFieldValueValidator to "fork" values, marked with `//FORK`. - new SimpleFieldValueValidator("id"), //REQ - new SimpleFieldValueValidator("aaa_i"), - new RenameFieldValueValidator("bbb_i", "my_int_field_alias"), - new RenameFieldValueValidator("bbb_i", "my_int_field_alias2"), //FORK - new SimpleFieldValueValidator("bbb_i"), //REQ - new SimpleFieldValueValidator("ccc_s"), - new RenameFieldValueValidator("ddd_s", "my_str_field_alias"), - new RenameFieldValueValidator("ddd_s", "my_str_field_alias2"), // FORK - new SimpleFieldValueValidator("ddd_s"), //REQ - - new RawFieldValueValidator("json", "eee_s", "my_json_field_alias"), - new RenameFieldValueValidator("eee_s", "my_escaped_json_field_alias"), // FORK - new SimpleFieldValueValidator("eee_s"), //REQ - new RawFieldValueValidator("json", "fff_s"), - new RawFieldValueValidator("xml", "ggg_s", "my_xml_field_alias"), - new RenameFieldValueValidator("ggg_s", "my_escaped_xml_field_alias"), // FORK - new SimpleFieldValueValidator("ggg_s"), //REQ - new RawFieldValueValidator("xml", "hhh_s"), - - new NotIncludedValidator("bogus_unused_field_ss"), - new NotIncludedValidator("bogus_alias","bogus_alias:other_bogus_field_i"), - new NotIncludedValidator("bogus_raw_alias","bogus_raw_alias:[xml f=bogus_raw_field_ss]"), - // - new FunctionValidator("aaa_i"), // fq field - new FunctionValidator("aaa_i", "func_aaa_alias"), - new GeoTransformerValidator("geo_1_srpt"), - new GeoTransformerValidator("geo_2_srpt","my_geo_alias"), - new RenameFieldValueValidator("geo_2_srpt", "my_geo_alias2"), // FORK - new SimpleFieldValueValidator("geo_2_srpt"), //REQ - new ExplainValidator(), - new ExplainValidator("explain_alias"), - new SubQueryValidator(), - new NotIncludedValidator("score"), - new NotIncludedValidator("score","score_alias:score")); - + private static final List FL_VALIDATORS = + List.of( + new GlobValidator("*"), + new GlobValidator("*_i"), + new GlobValidator("*_s"), + new GlobValidator("a*"), + new DocIdValidator(), + new DocIdValidator("my_docid_alias"), + new ShardValidator(), + new ShardValidator("my_shard_alias"), + new ValueAugmenterValidator(42), + new ValueAugmenterValidator(1976, "val_alias"), + // + new RenameFieldValueValidator("id", "my_id_alias"), + // NOTE: we add a SimpleFieldValueValidator below to check that we can enforce the + // presence of this field, even when it may have been "renamed" by the transformer above? + // (this and other such instances are marked with `//REQ`); also add a + // RenameFieldValueValidator to "fork" values, marked with `//FORK`. + new SimpleFieldValueValidator("id"), // REQ + new SimpleFieldValueValidator("aaa_i"), + new RenameFieldValueValidator("bbb_i", "my_int_field_alias"), + new RenameFieldValueValidator("bbb_i", "my_int_field_alias2"), // FORK + new SimpleFieldValueValidator("bbb_i"), // REQ + new SimpleFieldValueValidator("ccc_s"), + new RenameFieldValueValidator("ddd_s", "my_str_field_alias"), + new RenameFieldValueValidator("ddd_s", "my_str_field_alias2"), // FORK + new SimpleFieldValueValidator("ddd_s"), // REQ + new RawFieldValueValidator("json", "eee_s", "my_json_field_alias"), + new RenameFieldValueValidator("eee_s", "my_escaped_json_field_alias"), // FORK + new SimpleFieldValueValidator("eee_s"), // REQ + new RawFieldValueValidator("json", "fff_s"), + new RawFieldValueValidator("xml", "ggg_s", "my_xml_field_alias"), + new RenameFieldValueValidator("ggg_s", "my_escaped_xml_field_alias"), // FORK + new SimpleFieldValueValidator("ggg_s"), // REQ + new RawFieldValueValidator("xml", "hhh_s"), + new NotIncludedValidator("bogus_unused_field_ss"), + new NotIncludedValidator("bogus_alias", "bogus_alias:other_bogus_field_i"), + new NotIncludedValidator("bogus_raw_alias", "bogus_raw_alias:[xml f=bogus_raw_field_ss]"), + // + new FunctionValidator("aaa_i"), // fq field + new FunctionValidator("aaa_i", "func_aaa_alias"), + new GeoTransformerValidator("geo_1_srpt"), + new GeoTransformerValidator("geo_2_srpt", "my_geo_alias"), + new RenameFieldValueValidator("geo_2_srpt", "my_geo_alias2"), // FORK + new SimpleFieldValueValidator("geo_2_srpt"), // REQ + new ExplainValidator(), + new ExplainValidator("explain_alias"), + new SubQueryValidator(), + new NotIncludedValidator("score"), + new NotIncludedValidator("score", "score_alias:score")); + @BeforeClass public static void createMiniSolrCloudCluster() throws Exception { - // 50% runs use single node/shard a FL_VALIDATORS with all validators known to work on single node + // 50% runs use single node/shard a FL_VALIDATORS with all validators known to work on single + // node // 50% runs use multi node/shard with FL_VALIDATORS only containing stuff that works in cloud final boolean singleCoreMode = random().nextBoolean(); @@ -153,10 +158,10 @@ public static void createMiniSolrCloudCluster() throws Exception { final int numShards = singleCoreMode ? 1 : 2; // ... including some forwarded requests from nodes not hosting a shard final int numNodes = 1 + (singleCoreMode ? 0 : (numShards * repFactor)); - + final String configName = DEBUG_LABEL + "_config-set"; final Path configDir = TEST_COLL1_CONF(); - + configureCluster(numNodes).addConfig(configName, configDir).configure(); CLOUD_CLIENT = cluster.getSolrClient(); @@ -186,8 +191,9 @@ private static void afterClass() throws Exception { CLIENTS.clear(); } - /** - * Tests that all TransformerFactories that are implicitly provided by Solr are tested in this class + /** + * Tests that all TransformerFactories that are implicitly provided by Solr are tested in this + * class * * @see FlValidator#getDefaultTransformerFactoryName * @see #FL_VALIDATORS @@ -195,10 +201,10 @@ private static void afterClass() throws Exception { */ public void testCoverage() throws Exception { final Set implicit = new LinkedHashSet<>(); - for (String t : TransformerFactory.defaultFactories.keySet()) { + for (String t : TransformerFactory.defaultFactories.keySet()) { implicit.add(t); } - + final Set covered = new LinkedHashSet<>(); for (FlValidator v : FL_VALIDATORS) { String t = v.getDefaultTransformerFactoryName(); @@ -209,35 +215,40 @@ public void testCoverage() throws Exception { // items should only be added to this list if it's known that they do not work with RTG // and a specific Jira for fixing this is listed as a comment - final List knownBugs = Arrays.asList - ( - "child" // way to complicatd to vet with this test, see SOLR-9379 instead - ); + final List knownBugs = + Arrays.asList( + "child" // way to complicatd to vet with this test, see SOLR-9379 instead + ); for (String buggy : knownBugs) { - assertFalse(buggy + " is listed as a being a known bug, " + - "but it exists in the set of 'covered' TransformerFactories", - covered.contains(buggy)); - assertTrue(buggy + " is listed as a known bug, " + - "but it does not even exist in the set of 'implicit' TransformerFactories", - implicit.remove(buggy)); - } - + assertFalse( + buggy + + " is listed as a being a known bug, " + + "but it exists in the set of 'covered' TransformerFactories", + covered.contains(buggy)); + assertTrue( + buggy + + " is listed as a known bug, " + + "but it does not even exist in the set of 'implicit' TransformerFactories", + implicit.remove(buggy)); + } + implicit.removeAll(covered); - assertEquals("Some implicit TransformerFactories are not yet tested by this class: " + implicit, - 0, implicit.size()); + assertEquals( + "Some implicit TransformerFactories are not yet tested by this class: " + implicit, + 0, + implicit.size()); } - public void testRandomizedUpdatesAndRTGs() throws Exception { final int maxNumDocs = atLeast(100); - final int numSeedDocs = random().nextInt(maxNumDocs / 10); // at most ~10% of the max possible docs + final int numSeedDocs = + random().nextInt(maxNumDocs / 10); // at most ~10% of the max possible docs final int numIters = atLeast(maxNumDocs * 10); final SolrInputDocument[] knownDocs = new SolrInputDocument[maxNumDocs]; - log.info("Starting {} iters by seeding {} of {} max docs", - numIters, numSeedDocs, maxNumDocs); + log.info("Starting {} iters by seeding {} of {} max docs", numIters, numSeedDocs, maxNumDocs); int itersSinceLastCommit = 0; for (int i = 0; i < numIters; i++) { @@ -252,13 +263,15 @@ public void testRandomizedUpdatesAndRTGs() throws Exception { } } - /** - * Randomly chooses to do a commit, where the probability of doing so increases the longer it's been since - * a commit was done. + /** + * Randomly chooses to do a commit, where the probability of doing so increases the longer it's + * been since a commit was done. * * @returns 0 if a commit was done, else itersSinceLastCommit + 1 */ - private static int maybeCommit(final Random rand, final int itersSinceLastCommit, final int numIters) throws IOException, SolrServerException { + private static int maybeCommit( + final Random rand, final int itersSinceLastCommit, final int numIters) + throws IOException, SolrServerException { final float threshold = itersSinceLastCommit / numIters; if (rand.nextFloat() < threshold) { log.info("COMMIT"); @@ -267,12 +280,13 @@ private static int maybeCommit(final Random rand, final int itersSinceLastCommit } return itersSinceLastCommit + 1; } - - private void assertOneIter(final SolrInputDocument[] knownDocs) throws IOException, SolrServerException { + + private void assertOneIter(final SolrInputDocument[] knownDocs) + throws IOException, SolrServerException { // we want to occasionally test more then one doc per RTG final int numDocsThisIter = TestUtil.nextInt(random(), 1, atLeast(2)); int numDocsThisIterThatExist = 0; - + // pick some random docIds for this iteration and ... final int[] docIds = new int[numDocsThisIter]; for (int i = 0; i < numDocsThisIter; i++) { @@ -288,74 +302,87 @@ private void assertOneIter(final SolrInputDocument[] knownDocs) throws IOExcepti if (random().nextInt(numDocsThisIter + 2) <= numDocsThisIterThatExist) { if (0 < TestUtil.nextInt(random(), 0, 13)) { - log.info("RTG: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}", - numDocsThisIter, numDocsThisIterThatExist, docIds); + log.info( + "RTG: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}", + numDocsThisIter, + numDocsThisIterThatExist, + docIds); assertRTG(knownDocs, docIds); } else { // sporadically delete some docs instead of doing an RTG - log.info("DEL: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}", - numDocsThisIter, numDocsThisIterThatExist, docIds); + log.info( + "DEL: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}", + numDocsThisIter, + numDocsThisIterThatExist, + docIds); assertDelete(knownDocs, docIds); } } else { - log.info("UPD: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}", - numDocsThisIter, numDocsThisIterThatExist, docIds); + log.info( + "UPD: numDocsThisIter={} numDocsThisIterThatExist={}, docIds={}", + numDocsThisIter, + numDocsThisIterThatExist, + docIds); assertUpdate(knownDocs, docIds); } } - /** - * Does some random indexing of the specified docIds and adds them to knownDocs - */ - private void assertUpdate(final SolrInputDocument[] knownDocs, final int[] docIds) throws IOException, SolrServerException { - + /** Does some random indexing of the specified docIds and adds them to knownDocs */ + private void assertUpdate(final SolrInputDocument[] knownDocs, final int[] docIds) + throws IOException, SolrServerException { + for (final int docId : docIds) { // TODO: this method should also do some atomic update operations (ie: "inc" and "set") // (but make sure to eval the updates locally as well before modifying knownDocs) knownDocs[docId] = addRandomDocument(docId); } } - + /** * Deletes the docIds specified and asserts the results are valid, updateing knownDocs accordingly */ - private void assertDelete(final SolrInputDocument[] knownDocs, final int[] docIds) throws IOException, SolrServerException { + private void assertDelete(final SolrInputDocument[] knownDocs, final int[] docIds) + throws IOException, SolrServerException { List ids = new ArrayList<>(docIds.length); for (final int docId : docIds) { ids.add("" + docId); knownDocs[docId] = null; } - assertEquals("Failed delete: " + docIds, 0, getRandClient(random()).deleteById(ids).getStatus()); + assertEquals( + "Failed delete: " + docIds, 0, getRandClient(random()).deleteById(ids).getStatus()); } - + /** - * Adds one randomly generated document with the specified docId, asserting success, and returns + * Adds one randomly generated document with the specified docId, asserting success, and returns * the document added */ - private SolrInputDocument addRandomDocument(final int docId) throws IOException, SolrServerException { + private SolrInputDocument addRandomDocument(final int docId) + throws IOException, SolrServerException { final SolrClient client = getRandClient(random()); - final SolrInputDocument doc = sdoc("id", "" + docId, - "aaa_i", random().nextInt(), - "bbb_i", random().nextInt(), - // - "ccc_s", TestUtil.randomSimpleString(random()), - "ddd_s", TestUtil.randomSimpleString(random()), - "eee_s", makeJson(TestUtil.randomSimpleString(random())), - "fff_s", makeJson(TestUtil.randomSimpleString(random())), - "ggg_s", makeXml(TestUtil.randomSimpleString(random())), - "hhh_s", makeXml(TestUtil.randomSimpleString(random())), - // - "geo_1_srpt", GeoTransformerValidator.getValueForIndexing(random()), - "geo_2_srpt", GeoTransformerValidator.getValueForIndexing(random()), - // for testing subqueries - "next_2_ids_ss", String.valueOf(docId + 1), - "next_2_ids_ss", String.valueOf(docId + 2), - // for testing prefix globbing - "axx_i", random().nextInt(), - "ayy_i", random().nextInt(), - "azz_s", TestUtil.randomSimpleString(random())); - + final SolrInputDocument doc = + sdoc( + "id", "" + docId, + "aaa_i", random().nextInt(), + "bbb_i", random().nextInt(), + // + "ccc_s", TestUtil.randomSimpleString(random()), + "ddd_s", TestUtil.randomSimpleString(random()), + "eee_s", makeJson(TestUtil.randomSimpleString(random())), + "fff_s", makeJson(TestUtil.randomSimpleString(random())), + "ggg_s", makeXml(TestUtil.randomSimpleString(random())), + "hhh_s", makeXml(TestUtil.randomSimpleString(random())), + // + "geo_1_srpt", GeoTransformerValidator.getValueForIndexing(random()), + "geo_2_srpt", GeoTransformerValidator.getValueForIndexing(random()), + // for testing subqueries + "next_2_ids_ss", String.valueOf(docId + 1), + "next_2_ids_ss", String.valueOf(docId + 2), + // for testing prefix globbing + "axx_i", random().nextInt(), + "ayy_i", random().nextInt(), + "azz_s", TestUtil.randomSimpleString(random())); + log.info("ADD: {} = {}", docId, doc); assertEquals(0, client.add(doc).getStatus()); return doc; @@ -394,12 +421,13 @@ private String makeXml(String s) { } private static final ResponseParser RAW_XML_RESPONSE_PARSER = new NoOpResponseParser(); - private static final ResponseParser RAW_JSON_RESPONSE_PARSER = new NoOpResponseParser() { - @Override - public String getWriterType() { - return "json"; - } - }; + private static final ResponseParser RAW_JSON_RESPONSE_PARSER = + new NoOpResponseParser() { + @Override + public String getWriterType() { + return "json"; + } + }; private static ResponseParser modifyParser(HttpSolrClient client, final String wt) { final ResponseParser ret = client.getParser(); @@ -416,22 +444,25 @@ private static ResponseParser modifyParser(HttpSolrClient client, final String w } /** - * Does one or more RTG request for the specified docIds with a randomized fl & fq params, asserting - * that the returned document (if any) makes sense given the expected SolrInputDocuments + * Does one or more RTG request for the specified docIds with a randomized fl & fq params, + * asserting that the returned document (if any) makes sense given the expected SolrInputDocuments */ - private void assertRTG(final SolrInputDocument[] knownDocs, final int[] docIds) throws IOException, SolrServerException { + private void assertRTG(final SolrInputDocument[] knownDocs, final int[] docIds) + throws IOException, SolrServerException { final SolrClient client = getRandClient(random()); - // NOTE: not using SolrClient.getById or getByIds because we want to force choice of "id" vs "ids" params - final ModifiableSolrParams params = params("qt","/get"); - + // NOTE: not using SolrClient.getById or getByIds because we want to force choice of "id" vs + // "ids" params + final ModifiableSolrParams params = params("qt", "/get"); + // random fq -- nothing fancy, secondary concern for our test final Integer FQ_MAX = usually() ? null : random().nextInt(); if (null != FQ_MAX) { params.add("fq", "aaa_i:[* TO " + FQ_MAX + "]"); } - + final Set validators = new LinkedHashSet<>(); - validators.add(ID_VALIDATOR); // always include id so we can be confident which doc we're looking at + // always include id so we can be confident which doc we're looking at + validators.add(ID_VALIDATOR); validators.add(ROOT_VALIDATOR); // always added in a nested schema, with the same value as id addRandomFlValidators(random(), validators); FlValidator.addParams(validators, params); @@ -441,8 +472,9 @@ private void assertRTG(final SolrInputDocument[] knownDocs, final int[] docIds) for (int docId : docIds) { // every docId will be included in the request idsToRequest.add("" + docId); - - // only docs that should actually exist and match our (optional) filter will be expected in response + + // only docs that should actually exist and match our (optional) filter will be expected in + // response if (null != knownDocs[docId]) { Integer filterVal = (Integer) knownDocs[docId].getFieldValue("aaa_i"); if (null == FQ_MAX || ((null != filterVal) && filterVal.intValue() <= FQ_MAX.intValue())) { @@ -462,7 +494,7 @@ private void assertRTG(final SolrInputDocument[] knownDocs, final int[] docIds) if (random().nextBoolean()) { // each id in its own param for (String id : idsToRequest) { - params.add("id",id); + params.add("id", id); } } else { // add one or more comma separated ids params @@ -471,7 +503,7 @@ private void assertRTG(final SolrInputDocument[] knownDocs, final int[] docIds) } } else { assert 1 == idsToRequest.size(); - params.add("id",idsToRequest.get(0)); + params.add("id", idsToRequest.get(0)); } String wt = params.get(CommonParams.WT, "javabin"); @@ -512,44 +544,50 @@ private void assertRTG(final SolrInputDocument[] knownDocs, final int[] docIds) } assertNotNull(params + " => " + rsp, docs); - - assertEquals("num docs mismatch: " + params + " => " + docsToExpect + " vs " + docs, - docsToExpect.size(), docs.size()); - + + assertEquals( + "num docs mismatch: " + params + " => " + docsToExpect + " vs " + docs, + docsToExpect.size(), + docs.size()); + // NOTE: RTG makes no garuntees about the order docs will be returned in when multi requested for (SolrDocument actual : docs) { try { int actualId = assertParseInt("id", actual.getFirstValue("id")); final SolrInputDocument expected = knownDocs[actualId]; assertNotNull("expected null doc but RTG returned: " + actual, expected); - + Set expectedFieldNames = new TreeSet<>(); for (FlValidator v : validators) { expectedFieldNames.addAll(v.assertRTGResults(validators, expected, actual, wt)); } // ensure only expected field names are in the actual document Set actualFieldNames = new TreeSet<>(actual.getFieldNames()); - assertEquals("Actual field names returned differs from expected", expectedFieldNames, actualFieldNames); + assertEquals( + "Actual field names returned differs from expected", + expectedFieldNames, + actualFieldNames); } catch (AssertionError ae) { throw new AssertionError(params + " => " + actual + ": " + ae.getMessage(), ae); } } } - /** + /** * trivial helper method to deal with diff response structure between using a single 'id' param vs * 2 or more 'id' params (or 1 or more 'ids' params). * - * @return List from response, or a synthetic one created from single response doc if - * expectList was false; May be empty; May be null if response included null list. + * @return List from response, or a synthetic one created from single response doc if + * expectList was false; May be empty; May be null if response included null list. */ - private static SolrDocumentList getDocsFromRTGResponse(final boolean expectList, final QueryResponse rsp) { + private static SolrDocumentList getDocsFromRTGResponse( + final boolean expectList, final QueryResponse rsp) { if (expectList) { return rsp.getResults(); } - + // else: expect single doc, make our own list... - + final SolrDocumentList result = new SolrDocumentList(); NamedList raw = rsp.getResponse(); Object doc = raw.get("doc"); @@ -570,7 +608,8 @@ private static SolrDocumentList getSolrDocumentList(Map response } @SuppressWarnings("unchecked") - private static SolrDocumentList getDocsFromJsonResponse(final boolean expectList, final String rsp) throws IOException { + private static SolrDocumentList getDocsFromJsonResponse( + final boolean expectList, final String rsp) throws IOException { Map nl = (Map) ObjectBuilder.fromJSON(rsp); if (expectList) { return getSolrDocumentList((Map) nl.get("response")); @@ -584,13 +623,17 @@ private static SolrDocumentList getDocsFromJsonResponse(final boolean expectList } } - private static SolrDocumentList getDocsFromXmlResponse(final boolean expectList, final String rsp) { - return getDocsFromRTGResponse(expectList, new QueryResponse(new RawCapableXMLResponseParser().processResponse(new StringReader(rsp)), null)); + private static SolrDocumentList getDocsFromXmlResponse( + final boolean expectList, final String rsp) { + return getDocsFromRTGResponse( + expectList, + new QueryResponse( + new RawCapableXMLResponseParser().processResponse(new StringReader(rsp)), null)); } /** - * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed - * at a node in our cluster + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed at a node + * in our cluster */ public static SolrClient getRandClient(Random rand) { int numClients = CLIENTS.size(); @@ -600,22 +643,22 @@ public static SolrClient getRandClient(Random rand) { public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } - /** - * Abstraction for diff types of things that can be added to an 'fl' param that can validate - * the results are correct compared to an expected SolrInputDocument + /** + * Abstraction for diff types of things that can be added to an 'fl' param that can validate the + * results are correct compared to an expected SolrInputDocument */ private interface FlValidator { - - /** - * Given a list of FlValidators, adds one or more fl params that corrispond to the entire set, + + /** + * Given a list of FlValidators, adds one or more fl params that corrispond to the entire set, * as well as any other special case top level params required by the validators. */ - public static void addParams(final Collection validators, final ModifiableSolrParams params) { + public static void addParams( + final Collection validators, final ModifiableSolrParams params) { final List fls = new ArrayList<>(validators.size()); for (FlValidator v : validators) { params.add(v.getExtraRequestParams()); @@ -625,13 +668,13 @@ public static void addParams(final Collection validators, final Mod } /** - * Indicates if this validator is for a transformer that returns true from - * {@link DocTransformer#needsSolrIndexSearcher}. Other validators for transformers that - * do not require a re-opened searcher (but may have slightly diff behavior depending - * on wether a doc comesfrom the index or from the update log) may use this information to - * decide wether they wish to enforce stricter assertions on the resulting document. + * Indicates if this validator is for a transformer that returns true from {@link + * DocTransformer#needsSolrIndexSearcher}. Other validators for transformers that do + * not require a re-opened searcher (but may have slightly diff behavior depending on + * wether a doc comesfrom the index or from the update log) may use this information to decide + * wether they wish to enforce stricter assertions on the resulting document. * - * The default implementation always returns false + *

The default implementation always returns false * * @see DocIdValidator */ @@ -640,45 +683,52 @@ public default boolean requiresRealtimeSearcherReOpen() { } /** - * the name of a transformer listed in {@link TransformerFactory#defaultFactories} that this validator - * corrisponds to, or null if not applicable. Used for testing coverage of - * Solr's implicitly supported transformers. + * the name of a transformer listed in {@link TransformerFactory#defaultFactories} that this + * validator corrisponds to, or null if not applicable. Used for testing coverage of Solr's + * implicitly supported transformers. + * + *

Default behavior is to return null * - * Default behavior is to return null * @see #testCoverage */ - public default String getDefaultTransformerFactoryName() { return null; } - + public default String getDefaultTransformerFactoryName() { + return null; + } + + /** Any special case params that must be added to the request for this validator */ + public default SolrParams getExtraRequestParams() { + return params(); + } + /** - * Any special case params that must be added to the request for this validator - */ - public default SolrParams getExtraRequestParams() { return params(); } - - /** - * Must return a non null String that can be used in an fl param -- either by itself, - * or with other items separated by commas + * Must return a non null String that can be used in an fl param -- either by itself, or with + * other items separated by commas */ public String getFlParam(); - /** - * Given the expected document and the actual document returned from an RTG, this method - * should assert that relative to what {@link #getFlParam} returns, the actual document contained - * what it should relative to the expected document. + /** + * Given the expected document and the actual document returned from an RTG, this method should + * assert that relative to what {@link #getFlParam} returns, the actual document contained what + * it should relative to the expected document. * * @param validators all validators in use for this request, including the current one - * @param expected a document containing the expected fields & values that should be in the index + * @param expected a document containing the expected fields & values that should be in the + * index * @param actual A document that was returned by an RTG request * @param wt the `wt` serialization of the response * @return A set of "field names" in the actual document that this validator expected. */ - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt); + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt); } - - /** - * Some validators behave in a way that "suppresses" real fields even when they would otherwise match a glob + + /** + * Some validators behave in a way that "suppresses" real fields even when they would otherwise + * match a glob + * * @see GlobValidator */ private interface SuppressRealFields { @@ -688,26 +738,32 @@ private interface SuppressRealFields { private abstract static class FieldValueValidator implements FlValidator { protected final String expectedFieldName; protected final String actualFieldName; + public FieldValueValidator(final String expectedFieldName, final String actualFieldName) { this.expectedFieldName = expectedFieldName; this.actualFieldName = actualFieldName; } + public abstract String getFlParam(); @Override - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { - assertEquals(expectedFieldName + " vs " + actualFieldName, - expected.getFieldValue(expectedFieldName), normalize(wt, actual.getFirstValue(actualFieldName))); + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { + assertEquals( + expectedFieldName + " vs " + actualFieldName, + expected.getFieldValue(expectedFieldName), + normalize(wt, actual.getFirstValue(actualFieldName))); return Collections.singleton(actualFieldName); } } /** - * Json parsing results in all Long and Double number values; `expected` values are all (conveniently!) - * expressed as Integer and Float, so we do a little normalization here so that the values are compatible + * Json parsing results in all Long and Double number values; `expected` values are all + * (conveniently!) expressed as Integer and Float, so we do a little normalization here so that + * the values are compatible */ private static Object normalize(String wt, Object val) { if ("json".equals(wt) && val instanceof Number) { @@ -726,60 +782,76 @@ private static class SimpleFieldValueValidator extends FieldValueValidator { public SimpleFieldValueValidator(final String fieldName) { super(fieldName, fieldName); } - public String getFlParam() { return expectedFieldName; } + + public String getFlParam() { + return expectedFieldName; + } } - - private static class RenameFieldValueValidator extends FieldValueValidator implements SuppressRealFields { + + private static class RenameFieldValueValidator extends FieldValueValidator + implements SuppressRealFields { public RenameFieldValueValidator(final String origFieldName, final String alias) { super(origFieldName, alias); } - public String getFlParam() { return actualFieldName + ":" + expectedFieldName; } - public Set getSuppressedFields() { return Collections.singleton(expectedFieldName); } + + public String getFlParam() { + return actualFieldName + ":" + expectedFieldName; + } + + public Set getSuppressedFields() { + return Collections.singleton(expectedFieldName); + } } /** * Validator for {@link RawValueTransformerFactory} * - * This validator is fairly weak, because it doesn't do anything to verify the conditional logic - * in RawValueTransformerFactory realted to the output format -- but that's out of the scope of - * this randomized testing. - * - * What we're primarily concerned with is that the transformer does it's job and puts the string - * in the response, regardless of cloud/RTG/uncommited state of the document. + *

This validator is fairly weak, because it doesn't do anything to verify the conditional + * logic in RawValueTransformerFactory realted to the output format -- but that's out of the scope + * of this randomized testing. + * + *

What we're primarily concerned with is that the transformer does it's job and puts the + * string in the response, regardless of cloud/RTG/uncommited state of the document. */ private static class RawFieldValueValidator extends RenameFieldValueValidator { final String type; final String alias; final SolrParams extraParams; + public RawFieldValueValidator(final String type, final String fieldName, final String alias) { // transformer is weird, default result key doesn't care what params are used... - super(fieldName, null == alias ? "["+type+"]" : alias); + super(fieldName, null == alias ? "[" + type + "]" : alias); this.type = type; this.alias = alias; this.extraParams = new ModifiableSolrParams().set(CommonParams.WT, type); } + public RawFieldValueValidator(final String type, final String fieldName) { this(type, fieldName, null); } + public String getFlParam() { return (null == alias ? "" : (alias + ":")) + "[" + type + " f=" + expectedFieldName + "]"; } + @Override - public Collection assertRTGResults(final Collection validators, - SolrInputDocument expected, - final SolrDocument actual, - final String wt) { + public Collection assertRTGResults( + final Collection validators, + SolrInputDocument expected, + final SolrDocument actual, + final String wt) { if ("json".equals(wt) && "json".equals(type)) { Object v = actual.get(actualFieldName); if (v instanceof Collection) { - // the json "array" type is indistinguishable from a multivalued field, so when `super` validates - // based on `actual.getFirstValue(...)`, it causes issues. Here we know that our raw values are only - // on single-valued fields, so we wrap it to work around `getFirstValue` in parent class. - // The same logic applies to `expected` (below) + // the json "array" type is indistinguishable from a multivalued field, so when `super` + // validates based on `actual.getFirstValue(...)`, it causes issues. Here we know that our + // raw values are only on single-valued fields, so we wrap it to work around + // `getFirstValue` in parent class. The same logic applies to `expected` (below) actual.setField(actualFieldName, Collections.singleton(v)); } try { - Object parsedExpected = ObjectBuilder.fromJSON((String) expected.getFieldValue(expectedFieldName)); + Object parsedExpected = + ObjectBuilder.fromJSON((String) expected.getFieldValue(expectedFieldName)); if (parsedExpected instanceof Collection) { // see note above parsedExpected = Collections.singleton(parsedExpected); @@ -791,7 +863,9 @@ public Collection assertRTGResults(final Collection validat } } else if ("xml".equals(wt) && "xml".equals(type)) { try { - Object parsedExpected = RawCapableXMLResponseParser.convertRawContent((String) expected.getFieldValue(expectedFieldName)); + Object parsedExpected = + RawCapableXMLResponseParser.convertRawContent( + (String) expected.getFieldValue(expectedFieldName)); expected = expected.deepCopy(); // need to copy before modifying expected! expected.setField(expectedFieldName, parsedExpected); } catch (XMLStreamException ex) { @@ -800,30 +874,35 @@ public Collection assertRTGResults(final Collection validat } return super.assertRTGResults(validators, expected, actual, wt); } + @Override public SolrParams getExtraRequestParams() { return extraParams; } + public String getDefaultTransformerFactoryName() { return type; } } /** - * Local extension of XMLResponseParser that is capable of handling "raw" xml field values, for the - * purpose of validation and consistency between expected vs. actual. + * Local extension of XMLResponseParser that is capable of handling "raw" xml field values, for + * the purpose of validation and consistency between expected vs. actual. */ private static class RawCapableXMLResponseParser extends XMLResponseParser { private static String convertRawContent(String raw) throws XMLStreamException { - return XMLResponseParser.convertRawContent(raw, (parser) -> { - try { - return consumeRawContent0(parser); - } catch (XMLStreamException ex) { - // only called in the context of this test, so the extra exception wrapping is totally fine - throw new RuntimeException(ex); - } - }); + return XMLResponseParser.convertRawContent( + raw, + (parser) -> { + try { + return consumeRawContent0(parser); + } catch (XMLStreamException ex) { + // only called in the context of this test, so the extra exception wrapping is totally + // fine + throw new RuntimeException(ex); + } + }); } protected String consumeRawContent(XMLStreamReader parser) throws XMLStreamException { @@ -833,7 +912,7 @@ protected String consumeRawContent(XMLStreamReader parser) throws XMLStreamExcep private static String consumeRawContent0(XMLStreamReader parser) throws XMLStreamException { int depth = 0; StringBuilder sb = new StringBuilder(); - for (;;) { + for (; ; ) { int elementType = parser.next(); switch (elementType) { case XMLStreamConstants.START_ELEMENT: @@ -855,30 +934,40 @@ private static String consumeRawContent0(XMLStreamReader parser) throws XMLStrea } } - /** - * enforces that a valid [docid] is present in the response, possibly using a - * resultKey alias. By default the only validation of docId values is that they are an integer - * greater than or equal to -1 -- but if any other validator in use returns true - * from {@link #requiresRealtimeSearcherReOpen} then the constraint is tightened and values must - * be greater than or equal to 0 + /** + * enforces that a valid [docid] is present in the response, possibly using a + * resultKey alias. By default the only validation of docId values is that they are an integer + * greater than or equal to -1 -- but if any other validator in use returns true from + * {@link #requiresRealtimeSearcherReOpen} then the constraint is tightened and values must be + * greater than or equal to 0 */ private static class DocIdValidator implements FlValidator { private static final String NAME = "docid"; - private static final String USAGE = "["+NAME+"]"; + private static final String USAGE = "[" + NAME + "]"; private final String resultKey; + public DocIdValidator(final String resultKey) { this.resultKey = resultKey; } + public DocIdValidator() { this(USAGE); } - public String getDefaultTransformerFactoryName() { return NAME; } - public String getFlParam() { return USAGE.equals(resultKey) ? resultKey : resultKey+":"+USAGE; } - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { - Object value = normalize(wt, actual.getFirstValue(resultKey)); + + public String getDefaultTransformerFactoryName() { + return NAME; + } + + public String getFlParam() { + return USAGE.equals(resultKey) ? resultKey : resultKey + ":" + USAGE; + } + + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { + Object value = normalize(wt, actual.getFirstValue(resultKey)); assertNotNull(getFlParam() + " => no value in actual doc", value); assertTrue(USAGE + " must be an Integer: " + value, value instanceof Integer); @@ -889,8 +978,9 @@ public Collection assertRTGResults(final Collection validat break; } } - assertTrue(USAGE + " must be >= " + minValidDocId + ": " + value, - minValidDocId <= ((Integer)value).intValue()); + assertTrue( + USAGE + " must be >= " + minValidDocId + ": " + value, + minValidDocId <= ((Integer) value).intValue()); return Collections.singleton(resultKey); } } @@ -898,21 +988,31 @@ public Collection assertRTGResults(final Collection validat /** Trivial validator of ShardAugmenterFactory */ private static class ShardValidator implements FlValidator { private static final String NAME = "shard"; - private static final String USAGE = "["+NAME+"]"; + private static final String USAGE = "[" + NAME + "]"; private final String resultKey; + public ShardValidator(final String resultKey) { this.resultKey = resultKey; } + public ShardValidator() { this(USAGE); } - public String getDefaultTransformerFactoryName() { return NAME; } - public String getFlParam() { return USAGE.equals(resultKey) ? resultKey : resultKey+":"+USAGE; } - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { - final Object value = actual.getFirstValue(resultKey); + + public String getDefaultTransformerFactoryName() { + return NAME; + } + + public String getFlParam() { + return USAGE.equals(resultKey) ? resultKey : resultKey + ":" + USAGE; + } + + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { + final Object value = actual.getFirstValue(resultKey); assertNotNull(getFlParam() + " => no value in actual doc", value); assertTrue(USAGE + " must be an String: " + value, value instanceof String); @@ -925,181 +1025,239 @@ public Collection assertRTGResults(final Collection validat /** Trivial validator of ValueAugmenter */ private static class ValueAugmenterValidator implements FlValidator { private static final String NAME = "value"; - private static String trans(final int value) { return "[" + NAME + " v=" + value + " t=int]"; } - + + private static String trans(final int value) { + return "[" + NAME + " v=" + value + " t=int]"; + } + private final String resultKey; private final String fl; private final Integer expectedVal; - private ValueAugmenterValidator(final String fl, final int expectedVal, final String resultKey) { + + private ValueAugmenterValidator( + final String fl, final int expectedVal, final String resultKey) { this.resultKey = resultKey; this.expectedVal = expectedVal; this.fl = fl; } + public ValueAugmenterValidator(final int expectedVal, final String resultKey) { - this(resultKey + ":" +trans(expectedVal), expectedVal, resultKey); + this(resultKey + ":" + trans(expectedVal), expectedVal, resultKey); } + public ValueAugmenterValidator(final int expectedVal) { // value transformer is weird, default result key doesn't care what params are used... - this(trans(expectedVal), expectedVal, "["+NAME+"]"); - } - public String getDefaultTransformerFactoryName() { return NAME; } - public String getFlParam() { return fl; } - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { - Object actualVal = normalize(wt, actual.getFirstValue(resultKey)); + this(trans(expectedVal), expectedVal, "[" + NAME + "]"); + } + + public String getDefaultTransformerFactoryName() { + return NAME; + } + + public String getFlParam() { + return fl; + } + + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { + Object actualVal = normalize(wt, actual.getFirstValue(resultKey)); assertNotNull(getFlParam() + " => no value in actual doc", actualVal); assertEquals(getFlParam(), expectedVal, actualVal); return Collections.singleton(resultKey); } } - /** Trivial validator of a ValueSourceAugmenter */ private static class FunctionValidator implements FlValidator { private static String func(String fieldName) { - return "add(1.3,sub("+fieldName+","+fieldName+"))"; + return "add(1.3,sub(" + fieldName + "," + fieldName + "))"; } + protected final String fl; protected final String resultKey; protected final String fieldName; + public FunctionValidator(final String fieldName) { this(func(fieldName), fieldName, func(fieldName)); } + public FunctionValidator(final String fieldName, final String resultKey) { this(resultKey + ":" + func(fieldName), fieldName, resultKey); } + private FunctionValidator(final String fl, final String fieldName, final String resultKey) { this.fl = fl; this.resultKey = resultKey; this.fieldName = fieldName; } /** always returns true */ - public boolean requiresRealtimeSearcherReOpen() { return true; } - public String getFlParam() { return fl; } - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { + public boolean requiresRealtimeSearcherReOpen() { + return true; + } + + public String getFlParam() { + return fl; + } + + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { final Object origVal = expected.getFieldValue(fieldName); - assertTrue("this validator only works on numeric fields: " + origVal, origVal instanceof Number); - + assertTrue( + "this validator only works on numeric fields: " + origVal, origVal instanceof Number); + assertEquals(fl, 1.3F, normalize(wt, actual.getFirstValue(resultKey))); return Collections.singleton(resultKey); } } - - /** - * Trivial validator of a SubQueryAugmenter. + /** + * Trivial validator of a SubQueryAugmenter. + * + *

This validator ignores 90% of the features/complexity of SubQueryAugmenter, and instead just + * focuses on the basics of: * - * This validator ignores 90% of the features/complexity - * of SubQueryAugmenter, and instead just focuses on the basics of: *

    - *
  • do a subquery for docs where SUBQ_FIELD contains the id of the top level doc
  • - *
  • verify that any subquery match is expected based on indexing pattern
  • + *
  • do a subquery for docs where SUBQ_FIELD contains the id of the top level doc + *
  • verify that any subquery match is expected based on indexing pattern *
*/ private static class SubQueryValidator implements FlValidator { // HACK to work around SOLR-9396... - // + // // we're using "id" (and only "id") in the subquery.q as a workarround limitation in // "$rows.foo" parsing -- it only works reliably if "foo" is in fl, so we only use "$rows.id", // which we know is in every request (and is a valid integer) - - public final static String NAME = "subquery"; - public final static String SUBQ_KEY = "subq"; - public final static String SUBQ_FIELD = "next_2_ids_i"; - public String getFlParam() { return SUBQ_KEY+":["+NAME+"]"; } + + public static final String NAME = "subquery"; + public static final String SUBQ_KEY = "subq"; + public static final String SUBQ_FIELD = "next_2_ids_i"; + + public String getFlParam() { + return SUBQ_KEY + ":[" + NAME + "]"; + } + @SuppressWarnings("unchecked") - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { final int compVal = assertParseInt("expected id", expected.getFieldValue("id")); - + Object actualVal = actual.getFieldValue(SUBQ_KEY); if ("json".equals(wt)) { actualVal = getSolrDocumentList((Map) actualVal); } - assertTrue("Expected a doclist: " + actualVal, - actualVal instanceof SolrDocumentList); - assertTrue("should be at most 2 docs in doc list: " + actualVal, - ((SolrDocumentList) actualVal).getNumFound() <= 2); - + assertTrue("Expected a doclist: " + actualVal, actualVal instanceof SolrDocumentList); + assertTrue( + "should be at most 2 docs in doc list: " + actualVal, + ((SolrDocumentList) actualVal).getNumFound() <= 2); + for (SolrDocument subDoc : (SolrDocumentList) actualVal) { final int subDocIdVal = assertParseInt("subquery id", subDoc.getFirstValue("id")); - assertTrue("subDocId="+subDocIdVal+" not in valid range for id="+compVal+" (expected " - + (compVal-1) + " or " + (compVal-2) + ")", - ((subDocIdVal < compVal) && ((compVal-2) <= subDocIdVal))); - + assertTrue( + "subDocId=" + + subDocIdVal + + " not in valid range for id=" + + compVal + + " (expected " + + (compVal - 1) + + " or " + + (compVal - 2) + + ")", + ((subDocIdVal < compVal) && ((compVal - 2) <= subDocIdVal))); } - + return Collections.singleton(SUBQ_KEY); } - public String getDefaultTransformerFactoryName() { return NAME; } + + public String getDefaultTransformerFactoryName() { + return NAME; + } + public SolrParams getExtraRequestParams() { - return params(SubQueryValidator.SUBQ_KEY + ".q", - "{!field f=" + SubQueryValidator.SUBQ_FIELD + " v=$row.id}"); + return params( + SubQueryValidator.SUBQ_KEY + ".q", + "{!field f=" + SubQueryValidator.SUBQ_FIELD + " v=$row.id}"); } } - + /** Trivial validator of a GeoTransformer */ - private static class GeoTransformerValidator implements FlValidator, SuppressRealFields{ + private static class GeoTransformerValidator implements FlValidator, SuppressRealFields { private static final String NAME = "geo"; - /** - * we're not worried about testing the actual geo parsing/formatting of values, - * just that the transformer gets called with the expected field value. - * so have a small set of fixed input values we use when indexing docs, - * and the expected output for each + /** + * we're not worried about testing the actual geo parsing/formatting of values, just that the + * transformer gets called with the expected field value. so have a small set of fixed input + * values we use when indexing docs, and the expected output for each */ - private static final Map VALUES = new HashMap<>(); - /** - * The set of legal field values this validator is willing to test as a list so we can - * reliably index into it with random ints + private static final Map VALUES = new HashMap<>(); + /** + * The set of legal field values this validator is willing to test as a list so we can reliably + * index into it with random ints */ private static final List ALLOWED_FIELD_VALUES; + static { - for (int i = -42; i < 66; i+=13) { - VALUES.put("POINT( 42 "+i+" )", "{\"type\":\"Point\",\"coordinates\":[42,"+i+"]}"); + for (int i = -42; i < 66; i += 13) { + VALUES.put("POINT( 42 " + i + " )", "{\"type\":\"Point\",\"coordinates\":[42," + i + "]}"); } ALLOWED_FIELD_VALUES = List.copyOf(VALUES.keySet()); } - /** - * returns a random field value usable when indexing a document that this validator will - * be able to handle. + /** + * returns a random field value usable when indexing a document that this validator will be able + * to handle. */ public static String getValueForIndexing(final Random rand) { return ALLOWED_FIELD_VALUES.get(rand.nextInt(ALLOWED_FIELD_VALUES.size())); } + private static String trans(String fieldName) { - return "["+NAME+" f="+fieldName+"]"; + return "[" + NAME + " f=" + fieldName + "]"; } + protected final String fl; protected final String resultKey; protected final String fieldName; + public GeoTransformerValidator(final String fieldName) { // geo transformer is weird, default result key doesn't care what params are used... - this(trans(fieldName), fieldName, "["+NAME+"]"); + this(trans(fieldName), fieldName, "[" + NAME + "]"); } + public GeoTransformerValidator(final String fieldName, final String resultKey) { this(resultKey + ":" + trans(fieldName), fieldName, resultKey); } - private GeoTransformerValidator(final String fl, final String fieldName, final String resultKey) { + + private GeoTransformerValidator( + final String fl, final String fieldName, final String resultKey) { this.fl = fl; this.resultKey = resultKey; this.fieldName = fieldName; } - public String getDefaultTransformerFactoryName() { return NAME; } - public String getFlParam() { return fl; } - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { + + public String getDefaultTransformerFactoryName() { + return NAME; + } + + public String getFlParam() { + return fl; + } + + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { final Object origVal = expected.getFieldValue(fieldName); - assertTrue(fl + ": orig field value is not supported: " + origVal, VALUES.containsKey(origVal)); + assertTrue( + fl + ": orig field value is not supported: " + origVal, VALUES.containsKey(origVal)); Object orig = VALUES.get(origVal); if ("json".equals(wt)) { @@ -1112,79 +1270,95 @@ public Collection assertRTGResults(final Collection validat assertEquals(fl, orig, actual.getFirstValue(resultKey)); return Collections.singleton(resultKey); } - public Set getSuppressedFields() { return Collections.singleton(fieldName); } + + public Set getSuppressedFields() { + return Collections.singleton(fieldName); + } } - /** - * Glob based validator. - * This class checks that every field in the expected doc exists in the actual doc with the expected - * value -- with special exceptions for fields that are "suppressed" (usually via an alias) + /** + * Glob based validator. This class checks that every field in the expected doc exists in the + * actual doc with the expected value -- with special exceptions for fields that are "suppressed" + * (usually via an alias) * - * By design, fields that are aliased are "moved" unless the original field name was explicitly included - * in the fl, globs don't count. + *

By design, fields that are aliased are "moved" unless the original field name was explicitly + * included in the fl, globs don't count. * * @see RenameFieldValueValidator */ private static class GlobValidator implements FlValidator { private final String glob; + public GlobValidator(final String glob) { this.glob = glob; } + private final Set matchingFieldsCache = new LinkedHashSet<>(); - - public String getFlParam() { return glob; } - + + public String getFlParam() { + return glob; + } + private boolean matchesGlob(final String fieldName) { - if ( FilenameUtils.wildcardMatch(fieldName, glob) ) { + if (FilenameUtils.wildcardMatch(fieldName, glob)) { matchingFieldsCache.add(fieldName); // Don't calculate it again return true; } return false; } - - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { + + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { final Set renamed = new LinkedHashSet<>(validators.size()); for (FlValidator v : validators) { if (v instanceof SuppressRealFields) { - renamed.addAll(((SuppressRealFields)v).getSuppressedFields()); + renamed.addAll(((SuppressRealFields) v).getSuppressedFields()); } } - + // every real field name matching the glob that is not renamed should be in the results Set result = new LinkedHashSet<>(expected.getFieldNames().size()); for (String f : expected.getFieldNames()) { - if ( matchesGlob(f) && (! renamed.contains(f) ) ) { + if (matchesGlob(f) && (!renamed.contains(f))) { result.add(f); - assertEquals(glob + " => " + f, expected.getFieldValue(f), normalize(wt, actual.getFirstValue(f))); + assertEquals( + glob + " => " + f, expected.getFieldValue(f), normalize(wt, actual.getFirstValue(f))); } } return result; } } - - /** - * for things like "score" and "[explain]" where we explicitly expect what we ask for in the fl - * to not be returned when using RTG. + + /** + * for things like "score" and "[explain]" where we explicitly expect what we ask for in the fl to + * not be returned when using RTG. */ private static class NotIncludedValidator implements FlValidator { private final String fieldName; private final String fl; + public NotIncludedValidator(final String fl) { this(fl, fl); } + public NotIncludedValidator(final String fieldName, final String fl) { this.fieldName = fieldName; this.fl = fl; } - public String getFlParam() { return fl; } - public Collection assertRTGResults(final Collection validators, - final SolrInputDocument expected, - final SolrDocument actual, - final String wt) { + + public String getFlParam() { + return fl; + } + + public Collection assertRTGResults( + final Collection validators, + final SolrInputDocument expected, + final SolrDocument actual, + final String wt) { assertEquals(fl, null, actual.getFirstValue(fieldName)); return Collections.emptySet(); } @@ -1192,15 +1366,20 @@ public Collection assertRTGResults(final Collection validat /** explain should always be ignored when using RTG */ private static class ExplainValidator extends NotIncludedValidator { - private final static String NAME = "explain"; - private final static String USAGE = "[" + NAME + "]"; + private static final String NAME = "explain"; + private static final String USAGE = "[" + NAME + "]"; + public ExplainValidator() { super(USAGE); } + public ExplainValidator(final String resultKey) { super(USAGE, resultKey + ":" + USAGE); } - public String getDefaultTransformerFactoryName() { return NAME; } + + public String getDefaultTransformerFactoryName() { + return NAME; + } } /** helper method for adding a random number (may be 0) of items from {@link #FL_VALIDATORS} */ @@ -1212,15 +1391,16 @@ private static void addRandomFlValidators(final Random r, final Set } /** - * Given an ordered list of values to include in a (key) param, randomly groups them (ie: comma separated) - * into actual param key=values which are returned as a new SolrParams instance + * Given an ordered list of values to include in a (key) param, randomly groups them (ie: comma + * separated) into actual param key=values which are returned as a new SolrParams instance */ - private static SolrParams buildCommaSepParams(final Random rand, final String key, Collection values) { + private static SolrParams buildCommaSepParams( + final Random rand, final String key, Collection values) { ModifiableSolrParams result = new ModifiableSolrParams(); List copy = new ArrayList<>(values); - while (! copy.isEmpty()) { + while (!copy.isEmpty()) { List slice = copy.subList(0, random().nextInt(1 + copy.size())); - result.add(key,String.join(",",slice)); + result.add(key, String.join(",", slice)); slice.clear(); } return result; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java index c4e07743b76..86be73eb7d2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java @@ -16,6 +16,7 @@ */ package org.apache.solr.cloud; +import com.codahale.metrics.Counter; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collection; @@ -25,9 +26,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - - -import com.codahale.metrics.Counter; import org.apache.lucene.util.TestUtil; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4; @@ -49,7 +47,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - @SolrTestCaseJ4.SuppressSSL public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase { @@ -71,9 +68,7 @@ public void test() throws Exception { testQueryAgainstDownReplica(); } - /** - * Asserts that requests aren't always sent to the same poor node. See SOLR-7493 - */ + /** Asserts that requests aren't always sent to the same poor node. See SOLR-7493 */ private void testRequestTracking() throws Exception { CollectionAdminRequest.createCollection("a1x2", "conf1", 1, 2) @@ -89,8 +84,9 @@ private void testRequestTracking() throws Exception { cloudClient.getZkStateReader().forceUpdateCollection("b1x1"); - // get direct access to the metrics counters for each core/replica we're interested to monitor them - final Map counters = new LinkedHashMap<>(); + // get direct access to the metrics counters for each core/replica we're interested to monitor + // them + final Map counters = new LinkedHashMap<>(); for (JettySolrRunner runner : jettys) { CoreContainer container = runner.getCoreContainer(); SolrMetricManager metricManager = container.getMetricManager(); @@ -99,8 +95,7 @@ private void testRequestTracking() throws Exception { String registry = core.getCoreMetricManager().getRegistryName(); Counter cnt = metricManager.counter(null, registry, "requests", "QUERY./select"); // sanity check - assertEquals(core.getName() + " has already received some requests?", - 0, cnt.getCount()); + assertEquals(core.getName() + " has already received some requests?", 0, cnt.getCount()); counters.put(core.getName(), cnt); } } @@ -118,34 +113,37 @@ private void testRequestTracking() throws Exception { long expectedTotalRequests = 0; Set uniqueCoreNames = new LinkedHashSet<>(); - + log.info("Making requests to {} a1x2", baseUrl); while (uniqueCoreNames.size() < counters.keySet().size() && expectedTotalRequests < 1000L) { expectedTotalRequests++; client.query(new SolrQuery("*:*")); long actualTotalRequests = 0; - for (Map.Entry e : counters.entrySet()) { + for (Map.Entry e : counters.entrySet()) { final long coreCount = e.getValue().getCount(); actualTotalRequests += coreCount; if (0 < coreCount) { uniqueCoreNames.add(e.getKey()); } } - assertEquals("Sanity Check: Num Queries So Far Doesn't Match Total????", - expectedTotalRequests, actualTotalRequests); + assertEquals( + "Sanity Check: Num Queries So Far Doesn't Match Total????", + expectedTotalRequests, + actualTotalRequests); } log.info("Total requests: {}", expectedTotalRequests); - assertEquals("either request randomization code is broken of this test seed is really unlucky, " + - "Gave up waiting for requests to hit every core at least once after " + - expectedTotalRequests + " requests", - uniqueCoreNames.size(), counters.size()); + assertEquals( + "either request randomization code is broken of this test seed is really unlucky, " + + "Gave up waiting for requests to hit every core at least once after " + + expectedTotalRequests + + " requests", + uniqueCoreNames.size(), + counters.size()); } } - /** - * Asserts that requests against a collection are only served by a 'active' local replica - */ + /** Asserts that requests against a collection are only served by a 'active' local replica */ private void testQueryAgainstDownReplica() throws Exception { log.info("Creating collection 'football' with 1 shard and 2 replicas"); @@ -160,7 +158,13 @@ private void testQueryAgainstDownReplica() throws Exception { Replica leader = null; Replica notLeader = null; - Collection replicas = cloudClient.getZkStateReader().getClusterState().getCollection("football").getSlice("shard1").getReplicas(); + Collection replicas = + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection("football") + .getSlice("shard1") + .getReplicas(); for (Replica replica : replicas) { if (replica.getStr(ZkStateReader.LEADER_PROP) != null) { leader = replica; @@ -169,32 +173,53 @@ private void testQueryAgainstDownReplica() throws Exception { } } - //Simulate a replica being in down state. - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.STATE.toLower(), - ZkStateReader.NODE_NAME_PROP, notLeader.getStr(ZkStateReader.NODE_NAME_PROP), - ZkStateReader.BASE_URL_PROP, notLeader.getStr(ZkStateReader.BASE_URL_PROP), - ZkStateReader.COLLECTION_PROP, "football", - ZkStateReader.SHARD_ID_PROP, "shard1", - ZkStateReader.CORE_NAME_PROP, notLeader.getStr(ZkStateReader.CORE_NAME_PROP), - ZkStateReader.ROLES_PROP, "", - ZkStateReader.STATE_PROP, Replica.State.DOWN.toString()); + // Simulate a replica being in down state. + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + OverseerAction.STATE.toLower(), + ZkStateReader.NODE_NAME_PROP, + notLeader.getStr(ZkStateReader.NODE_NAME_PROP), + ZkStateReader.BASE_URL_PROP, + notLeader.getStr(ZkStateReader.BASE_URL_PROP), + ZkStateReader.COLLECTION_PROP, + "football", + ZkStateReader.SHARD_ID_PROP, + "shard1", + ZkStateReader.CORE_NAME_PROP, + notLeader.getStr(ZkStateReader.CORE_NAME_PROP), + ZkStateReader.ROLES_PROP, + "", + ZkStateReader.STATE_PROP, + Replica.State.DOWN.toString()); if (log.isInfoEnabled()) { - log.info("Forcing {} to go into 'down' state", notLeader.getStr(ZkStateReader.CORE_NAME_PROP)); + log.info( + "Forcing {} to go into 'down' state", notLeader.getStr(ZkStateReader.CORE_NAME_PROP)); } final Overseer overseer = jettys.get(0).getCoreContainer().getZkController().getOverseer(); if (overseer.getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - overseer.getDistributedClusterStateUpdater().doSingleStateUpdate( - DistributedClusterStateUpdater.MutatingCommand.ReplicaSetState, m, overseer.getSolrCloudManager(), overseer.getZkStateReader()); + overseer + .getDistributedClusterStateUpdater() + .doSingleStateUpdate( + DistributedClusterStateUpdater.MutatingCommand.ReplicaSetState, + m, + overseer.getSolrCloudManager(), + overseer.getZkStateReader()); } else { ZkDistributedQueue q = overseer.getStateUpdateQueue(); q.offer(Utils.toJSON(m)); } - verifyReplicaStatus(cloudClient.getZkStateReader(), "football", "shard1", notLeader.getName(), Replica.State.DOWN); + verifyReplicaStatus( + cloudClient.getZkStateReader(), + "football", + "shard1", + notLeader.getName(), + Replica.State.DOWN); - //Query against the node which hosts the down replica + // Query against the node which hosts the down replica String baseUrl = notLeader.getBaseUrl(); if (!baseUrl.endsWith("/")) baseUrl += "/"; @@ -218,11 +243,10 @@ private void testQueryAgainstDownReplica() throws Exception { String leaderRegistry = leaderCore.getCoreMetricManager().getRegistryName(); Counter cnt = leaderMetricManager.counter(null, leaderRegistry, "requests", "QUERY./select"); - // All queries should be served by the active replica - // To make sure that's true we keep querying the down replica - // If queries are getting processed by the down replica then the cluster state hasn't updated for that replica - // locally - // So we keep trying till it has updated and then verify if ALL queries go to the active replica + // All queries should be served by the active replica to make sure that's true we keep + // querying the down replica. If queries are getting processed by the down replica then the + // cluster state hasn't updated for that replica locally. So we keep trying till it has + // updated and then verify if ALL queries go to the active replica long count = 0; while (true) { count++; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java index 0ce5b709a28..0238f0059ca 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java @@ -21,7 +21,6 @@ import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -63,74 +62,77 @@ public static void setupCluster() throws Exception { numReplicas = random().nextInt(2) + 2; useAdminToSetProps = random().nextBoolean(); - configureCluster(numNodes) - .addConfig(COLLECTION_NAME, configset("cloud-minimal")) - .configure(); + configureCluster(numNodes).addConfig(COLLECTION_NAME, configset("cloud-minimal")).configure(); - CollectionAdminResponse resp = CollectionAdminRequest.createCollection(COLLECTION_NAME, COLLECTION_NAME, - numShards, numReplicas, 0, 0) - .process(cluster.getSolrClient()); + CollectionAdminResponse resp = + CollectionAdminRequest.createCollection( + COLLECTION_NAME, COLLECTION_NAME, numShards, numReplicas, 0, 0) + .process(cluster.getSolrClient()); assertEquals("Admin request failed; ", 0, resp.getStatus()); cluster.waitForActiveCollection(COLLECTION_NAME, numShards, numShards * numReplicas); - } @Before public void removeAllProperties() throws KeeperException, InterruptedException { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { for (Replica rep : slice.getReplicas()) { - rep.getProperties().forEach((key, value) -> { - if (key.startsWith("property.")) { - try { - delProp(slice, rep, key); - } catch (IOException | SolrServerException e) { - fail("Caught unexpected exception in @Before " + e.getMessage()); - } - } - }); + rep.getProperties() + .forEach( + (key, value) -> { + if (key.startsWith("property.")) { + try { + delProp(slice, rep, key); + } catch (IOException | SolrServerException e) { + fail("Caught unexpected exception in @Before " + e.getMessage()); + } + } + }); } } } int timeoutMs = 60000; - - // test that setting an arbitrary "slice unique" property un-sets the property if it's on another replica in the - // slice. This is testing when the property is set on an _individual_ replica whereas testBalancePropertySliceUnique - // tests whether changing an individual _replica_ un-sets the property on other replicas _in that slice_. + // test that setting an arbitrary "slice unique" property un-sets the property if it's on another + // replica in the slice. This is testing when the property is set on an _individual_ replica + // whereas testBalancePropertySliceUnique tests whether changing an individual _replica_ un-sets + // the property on other replicas _in that slice_. // // NOTE: There were significant problems because at one point the code implicitly defined - // shardUnique=true for the special property preferredLeader. That was removed at one point so we're explicitly - // testing that as well. + // shardUnique=true for the special property preferredLeader. That was removed at one point so + // we're explicitly testing that as well. @Test - public void testSetArbitraryPropertySliceUnique() throws IOException, SolrServerException, InterruptedException, KeeperException { + public void testSetArbitraryPropertySliceUnique() + throws IOException, SolrServerException, InterruptedException, KeeperException { // Check both special (preferredLeader) and something arbitrary. doTestSetArbitraryPropertySliceUnique("foo" + random().nextInt(1_000_000)); removeAllProperties(); doTestSetArbitraryPropertySliceUnique("preferredleader"); } - - // Test that automatically distributing a slice unique property un-sets that property if it's in any other replica - // on that slice. - // This is different than the test above. The test above sets individual properties on individual nodes. This one - // relies on Solr to pick which replicas to set the property on + // Test that automatically distributing a slice unique property un-sets that property if it's in + // any other replica on that slice. This is different than the test above. The test above sets + // individual properties on individual nodes. This one relies on Solr to pick which replicas to + // set the property on @Test - public void testBalancePropertySliceUnique() throws KeeperException, InterruptedException, IOException, SolrServerException { + public void testBalancePropertySliceUnique() + throws KeeperException, InterruptedException, IOException, SolrServerException { // Check both cases of "special" property preferred(Ll)eader doTestBalancePropertySliceUnique("foo" + random().nextInt(1_000_000)); removeAllProperties(); doTestBalancePropertySliceUnique("preferredleader"); } - // We've moved on from a property being tested, we need to check if rebalancing the leaders actually chantges the - // leader appropriately. + // We've moved on from a property being tested, we need to check if rebalancing the leaders + // actually chantges the leader appropriately. @Test public void testRebalanceLeaders() throws Exception { - // First let's unbalance the preferredLeader property, do all the leaders get reassigned properly? + // First let's unbalance the preferredLeader property, do all the leaders get reassigned + // properly? concentrateProp("preferredLeader"); sendRebalanceCommand(); checkPreferredsAreLeaders(); @@ -140,19 +142,23 @@ public void testRebalanceLeaders() throws Exception { sendRebalanceCommand(); checkPreferredsAreLeaders(); - // Now check the condition we saw "in the wild" where you could not rebalance properly when Jetty was restarted. + // Now check the condition we saw "in the wild" where you could not rebalance properly when + // Jetty was restarted. concentratePropByRestartingJettys(); sendRebalanceCommand(); checkPreferredsAreLeaders(); } - // Insure that the property is set on only one replica per slice when changing a unique property on an individual + // Insure that the property is set on only one replica per slice when changing a unique property + // on an individual // replica. - private void doTestSetArbitraryPropertySliceUnique(String propIn) throws InterruptedException, KeeperException, IOException, SolrServerException { + private void doTestSetArbitraryPropertySliceUnique(String propIn) + throws InterruptedException, KeeperException, IOException, SolrServerException { final String prop = (random().nextBoolean()) ? propIn : propIn.toUpperCase(Locale.ROOT); // First set the property in some replica in some slice forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); Slice[] slices = docCollection.getSlices().toArray(new Slice[0]); Slice slice = slices[random().nextInt(slices.length)]; @@ -173,10 +179,22 @@ private void doTestSetArbitraryPropertySliceUnique(String propIn) throws Interru // insure that no other replica in that slice has the property when we return. while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - modColl = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + modColl = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); modSlice = modColl.getSlice(slice.getName()); - rightRep = modSlice.getReplica(rep.getName()).getBool("property." + prop.toLowerCase(Locale.ROOT), false); - count = modSlice.getReplicas().stream().filter(thisRep -> thisRep.getBool("property." + prop.toLowerCase(Locale.ROOT), false)).count(); + rightRep = + modSlice + .getReplica(rep.getName()) + .getBool("property." + prop.toLowerCase(Locale.ROOT), false); + count = + modSlice.getReplicas().stream() + .filter( + thisRep -> thisRep.getBool("property." + prop.toLowerCase(Locale.ROOT), false)) + .count(); if (count == 1 && rightRep) { break; @@ -185,13 +203,17 @@ private void doTestSetArbitraryPropertySliceUnique(String propIn) throws Interru TimeUnit.MILLISECONDS.sleep(50); } if (count != 1 || rightRep == false) { - fail("The property " + prop + " was not uniquely distributed in slice " + slice.getName() - + " " + modColl.toString()); + fail( + "The property " + + prop + + " was not uniquely distributed in slice " + + slice.getName() + + " " + + modColl.toString()); } } } - // Fail if we the replicas with the preferredLeader property are _not_ also the leaders. private void checkPreferredsAreLeaders() throws InterruptedException, KeeperException { // Make sure that the shard unique are where you expect. @@ -199,48 +221,74 @@ private void checkPreferredsAreLeaders() throws InterruptedException, KeeperExce while (timeout.hasTimedOut() == false) { if (checkPreferredsAreLeaders(false)) { - // Ok, all preferreds are leaders. Just for Let's also get the election queue and guarantee that every - // live replica is in the queue and none are repeated. + // Ok, all preferreds are leaders. Just for Let's also get the election queue and guarantee + // that every live replica is in the queue and none are repeated. checkElectionQueues(); return; } TimeUnit.MILLISECONDS.sleep(50); } - log.error("Leaders are not all preferres {}", cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME)); + log.error( + "Leaders are not all preferres {}", + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME)); // Show the errors checkPreferredsAreLeaders(true); } // Do all active nodes in each slice appear exactly once in the slice's leader election queue? - // Since we assert that the number of live replicas is the same size as the leader election queue, we only - // have to compare one way. + // Since we assert that the number of live replicas is the same size as the leader election queue, + // we only have to compare one way. private void checkElectionQueues() throws KeeperException, InterruptedException { - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); - Set liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + Set liveNodes = + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); for (Slice slice : docCollection.getSlices()) { Set liveReplicas = new HashSet<>(); - slice.getReplicas().forEach(replica -> { - if (replica.isActive(liveNodes)) { - liveReplicas.add(replica); - } - }); + slice + .getReplicas() + .forEach( + replica -> { + if (replica.isActive(liveNodes)) { + liveReplicas.add(replica); + } + }); checkOneQueue(docCollection, slice, liveReplicas); } } // Helper method to check one leader election queue's consistency. - private void checkOneQueue(DocCollection coll, Slice slice, Set liveReplicas) throws KeeperException, InterruptedException { - - List leaderQueue = cluster.getSolrClient().getZkStateReader().getZkClient().getChildren("/collections/" + COLLECTION_NAME + - "/leader_elect/" + slice.getName() + "/election", null, true); + private void checkOneQueue(DocCollection coll, Slice slice, Set liveReplicas) + throws KeeperException, InterruptedException { + + List leaderQueue = + cluster + .getSolrClient() + .getZkStateReader() + .getZkClient() + .getChildren( + "/collections/" + + COLLECTION_NAME + + "/leader_elect/" + + slice.getName() + + "/election", + null, + true); if (leaderQueue.size() != liveReplicas.size()) { - log.error("One or more replicas is missing from the leader election queue! Slice {}, election queue: {}, collection: {}" - , slice.getName(), leaderQueue, coll); + log.error( + "One or more replicas is missing from the leader election queue! Slice {}, election queue: {}, collection: {}", + slice.getName(), + leaderQueue, + coll); fail("One or more replicas is missing from the leader election queue"); } // Check that each election node has a corresponding live replica. @@ -254,10 +302,13 @@ private void checkOneQueue(DocCollection coll, Slice slice, Set liveRep } // Just an encapsulation for checkPreferredsAreLeaders to make returning easier. - // the doAsserts var is to actually print the problem and fail the test if the condition is not met. - private boolean checkPreferredsAreLeaders(boolean doAsserts) throws KeeperException, InterruptedException { + // the doAsserts var is to actually print the problem and fail the test if the condition is not + // met. + private boolean checkPreferredsAreLeaders(boolean doAsserts) + throws KeeperException, InterruptedException { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { for (Replica rep : slice.getReplicas()) { if (rep.getBool("property.preferredleader", false)) { @@ -274,7 +325,8 @@ private boolean checkPreferredsAreLeaders(boolean doAsserts) throws KeeperExcept } // Arbitrarily send the rebalance command either with the SolrJ interface or with an HTTP request. - private void sendRebalanceCommand() throws SolrServerException, InterruptedException, IOException { + private void sendRebalanceCommand() + throws SolrServerException, InterruptedException, IOException { if (random().nextBoolean()) { rebalanceLeaderUsingSolrJAPI(); } else { @@ -282,9 +334,11 @@ private void sendRebalanceCommand() throws SolrServerException, InterruptedExcep } } - // Helper method to make sure the property is _unbalanced_ first, then it gets properly re-assigned with the + // Helper method to make sure the property is _unbalanced_ first, then it gets properly + // re-assigned with the // BALANCESHARDUNIQUE command. - private void doTestBalancePropertySliceUnique(String propIn) throws InterruptedException, IOException, KeeperException, SolrServerException { + private void doTestBalancePropertySliceUnique(String propIn) + throws InterruptedException, IOException, KeeperException, SolrServerException { final String prop = (random().nextBoolean()) ? propIn : propIn.toUpperCase(Locale.ROOT); // Concentrate the properties on as few replicas a possible @@ -298,10 +352,10 @@ private void doTestBalancePropertySliceUnique(String propIn) throws InterruptedE // Verify that the property is reasonably evenly distributed verifyPropCorrectlyDistributed(prop); - } - private void verifyPropCorrectlyDistributed(String prop) throws KeeperException, InterruptedException { + private void verifyPropCorrectlyDistributed(String prop) + throws KeeperException, InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); @@ -309,7 +363,12 @@ private void verifyPropCorrectlyDistributed(String prop) throws KeeperException, DocCollection docCollection = null; while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); int maxPropCount = Integer.MAX_VALUE; int minPropCount = Integer.MIN_VALUE; for (Slice slice : docCollection.getSlices()) { @@ -329,7 +388,9 @@ private void verifyPropCorrectlyDistributed(String prop) throws KeeperException, } // Used when we concentrate the leader on a few nodes. - private void verifyPropDistributedAsExpected(Map expectedShardReplicaMap, String prop) throws InterruptedException, KeeperException { + private void verifyPropDistributedAsExpected( + Map expectedShardReplicaMap, String prop) + throws InterruptedException, KeeperException { // Make sure that the shard unique are where you expect. TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); @@ -338,7 +399,12 @@ private void verifyPropDistributedAsExpected(Map expectedShardRe DocCollection docCollection = null; while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); failure = false; for (Map.Entry ent : expectedShardReplicaMap.entrySet()) { Replica rep = docCollection.getSlice(ent.getKey()).getReplica(ent.getValue()); @@ -352,12 +418,18 @@ private void verifyPropDistributedAsExpected(Map expectedShardRe TimeUnit.MILLISECONDS.sleep(100); } - fail(prop + " properties are not on the expected replicas: " + docCollection.toString() - + System.lineSeparator() + "Expected " + expectedShardReplicaMap.toString()); + fail( + prop + + " properties are not on the expected replicas: " + + docCollection.toString() + + System.lineSeparator() + + "Expected " + + expectedShardReplicaMap.toString()); } // Just check that the property is distributed as expectecd. This does _not_ rebalance the leaders - private void rebalancePropAndCheck(String prop) throws IOException, SolrServerException, InterruptedException, KeeperException { + private void rebalancePropAndCheck(String prop) + throws IOException, SolrServerException, InterruptedException, KeeperException { if (random().nextBoolean()) { rebalancePropUsingSolrJAPI(prop); @@ -366,12 +438,13 @@ private void rebalancePropAndCheck(String prop) throws IOException, SolrServerEx } } - - private void rebalanceLeaderUsingSolrJAPI() throws IOException, SolrServerException, InterruptedException { - CollectionAdminResponse resp = CollectionAdminRequest - .rebalanceLeaders(COLLECTION_NAME) - .process(cluster.getSolrClient()); - assertTrue("All leaders should have been verified", resp.getResponse().get("Summary").toString().contains("Success")); + private void rebalanceLeaderUsingSolrJAPI() + throws IOException, SolrServerException, InterruptedException { + CollectionAdminResponse resp = + CollectionAdminRequest.rebalanceLeaders(COLLECTION_NAME).process(cluster.getSolrClient()); + assertTrue( + "All leaders should have been verified", + resp.getResponse().get("Summary").toString().contains("Success")); assertEquals("Admin request failed; ", 0, resp.getStatus()); } @@ -382,31 +455,33 @@ private void rebalanceLeaderUsingStandardRequest() throws IOException, SolrServe QueryRequest request = new QueryRequest(params); request.setPath("/admin/collections"); QueryResponse resp = request.process(cluster.getSolrClient()); - assertTrue("All leaders should have been verified", resp.getResponse().get("Summary").toString().contains("Success")); + assertTrue( + "All leaders should have been verified", + resp.getResponse().get("Summary").toString().contains("Success")); assertEquals("Call to rebalanceLeaders failed ", 0, resp.getStatus()); } - - private void rebalancePropUsingSolrJAPI(String prop) throws IOException, SolrServerException, InterruptedException { + private void rebalancePropUsingSolrJAPI(String prop) + throws IOException, SolrServerException, InterruptedException { // Don't set the value, that should be done automatically. CollectionAdminResponse resp; if (prop.toLowerCase(Locale.ROOT).contains("preferredleader")) { - resp = CollectionAdminRequest - .balanceReplicaProperty(COLLECTION_NAME, prop) - .process(cluster.getSolrClient()); + resp = + CollectionAdminRequest.balanceReplicaProperty(COLLECTION_NAME, prop) + .process(cluster.getSolrClient()); } else { - resp = CollectionAdminRequest - .balanceReplicaProperty(COLLECTION_NAME, prop) - .setShardUnique(true) - .process(cluster.getSolrClient()); - + resp = + CollectionAdminRequest.balanceReplicaProperty(COLLECTION_NAME, prop) + .setShardUnique(true) + .process(cluster.getSolrClient()); } assertEquals("Admin request failed; ", 0, resp.getStatus()); } - private void rebalancePropUsingStandardRequest(String prop) throws IOException, SolrServerException { + private void rebalancePropUsingStandardRequest(String prop) + throws IOException, SolrServerException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString()); params.set("property", prop); @@ -421,11 +496,12 @@ private void rebalancePropUsingStandardRequest(String prop) throws IOException, assertEquals("Call to rebalanceLeaders failed ", 0, resp.getStatus()); } - // This important. I've (Erick Erickson) run across a situation where the "standard request" causes failures, but - // never the Admin request. So let's test both all the time for a given test. + // This important. I've (Erick Erickson) run across a situation where the "standard request" + // causes failures, but never the Admin request. So let's test both all the time for a given test. // // This sets an _individual_ replica to have the property, not collection-wide - private void setProp(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + private void setProp(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { if (useAdminToSetProps) { setPropWithAdminRequest(slice, rep, prop); } else { @@ -433,7 +509,8 @@ private void setProp(Slice slice, Replica rep, String prop) throws IOException, } } - void setPropWithStandardRequest(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + void setPropWithStandardRequest(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString()); @@ -451,45 +528,56 @@ void setPropWithStandardRequest(Slice slice, Replica rep, String prop) throws IO request.setPath("/admin/collections"); cluster.getSolrClient().request(request); String propLC = prop.toLowerCase(Locale.ROOT); - waitForState("Expecting property '" + prop + "'to appear on replica " + rep.getName(), COLLECTION_NAME, + waitForState( + "Expecting property '" + prop + "'to appear on replica " + rep.getName(), + COLLECTION_NAME, (n, c) -> "true".equals(c.getReplica(rep.getName()).getProperty(propLC))); - } - void setPropWithAdminRequest(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + void setPropWithAdminRequest(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { boolean setUnique = (prop.toLowerCase(Locale.ROOT).equals("preferredleader") == false); CollectionAdminRequest.AddReplicaProp addProp = - CollectionAdminRequest.addReplicaProperty(COLLECTION_NAME, slice.getName(), rep.getName(), prop, "true"); + CollectionAdminRequest.addReplicaProperty( + COLLECTION_NAME, slice.getName(), rep.getName(), prop, "true"); if (setUnique) { addProp.setShardUnique(true); } CollectionAdminResponse resp = addProp.process(cluster.getSolrClient()); assertEquals(0, resp.getStatus()); String propLC = prop.toLowerCase(Locale.ROOT); - waitForState("Expecting property '" + prop + "'to appear on replica " + rep.getName(), COLLECTION_NAME, + waitForState( + "Expecting property '" + prop + "'to appear on replica " + rep.getName(), + COLLECTION_NAME, (n, c) -> "true".equals(c.getReplica(rep.getName()).getProperty(propLC))); - } - private void delProp(Slice slice, Replica rep, String prop) throws IOException, SolrServerException { + private void delProp(Slice slice, Replica rep, String prop) + throws IOException, SolrServerException { String propLC = prop.toLowerCase(Locale.ROOT); - CollectionAdminResponse resp = CollectionAdminRequest.deleteReplicaProperty(COLLECTION_NAME, slice.getName(), rep.getName(), propLC) - .process(cluster.getSolrClient()); + CollectionAdminResponse resp = + CollectionAdminRequest.deleteReplicaProperty( + COLLECTION_NAME, slice.getName(), rep.getName(), propLC) + .process(cluster.getSolrClient()); assertEquals("Admin request failed; ", 0, resp.getStatus()); - waitForState("Expecting property '" + prop + "' to be removed from replica " + rep.getName(), COLLECTION_NAME, + waitForState( + "Expecting property '" + prop + "' to be removed from replica " + rep.getName(), + COLLECTION_NAME, (n, c) -> c.getReplica(rep.getName()).getProperty(prop) == null); } - // Intentionally un-balance the property to insure that BALANCESHARDUNIQUE does its job. There was an odd case - // where rebalancing didn't work very well if the Solr nodes were stopped and restarted that worked perfectly - // when if the nodes were _not_ restarted in the test. So we have to test that too. + // Intentionally un-balance the property to insure that BALANCESHARDUNIQUE does its job. There was + // an odd case where rebalancing didn't work very well if the Solr nodes were stopped and + // restarted that worked perfectly when if the nodes were _not_ restarted in the test. So we have + // to test that too. private void concentratePropByRestartingJettys() throws Exception { List jettys = new ArrayList<>(cluster.getJettySolrRunners()); Collections.shuffle(jettys, random()); jettys.remove(random().nextInt(jettys.size())); - // Now we have a list of jettys, and there is one missing. Stop all of the remaining jettys, then start them again - // to concentrate the leaders. It's not necessary that all shards have a leader. + // Now we have a list of jettys, and there is one missing. Stop all of the remaining jettys, + // then start them again to concentrate the leaders. It's not necessary that all shards have a + // leader. ExecutorService executorService = ExecutorUtil.newMDCAwareCachedThreadPool("Start Jetty"); @@ -503,7 +591,6 @@ private void concentratePropByRestartingJettys() throws Exception { cluster.stopJettySolrRunner(jetty); } - for (JettySolrRunner jetty : jettys) { cluster.waitForJettyToStop(jetty); } @@ -513,13 +600,14 @@ private void concentratePropByRestartingJettys() throws Exception { for (int idx = 0; idx < jettys.size(); ++idx) { int finalIdx = idx; - executorService.submit(()->{ - try { - cluster.startJettySolrRunner(jettys.get(finalIdx)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + executorService.submit( + () -> { + try { + cluster.startJettySolrRunner(jettys.get(finalIdx)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); } cluster.waitForAllNodes(60); // the nodes are present, but are all replica active? @@ -528,26 +616,34 @@ private void concentratePropByRestartingJettys() throws Exception { ExecutorUtil.shutdownAndAwaitTermination(executorService); } - // while banging my nead against a wall, I put a lot of force refresh statements in. Want to leave them in - // but have this be a no-op so if we start to get failures, we can re-enable with minimal effort. + // while banging my nead against a wall, I put a lot of force refresh statements in. Want to leave + // them in but have this be a no-op so if we start to get failures, we can re-enable with minimal + // effort. private void forceUpdateCollectionStatus() throws KeeperException, InterruptedException { // cluster.getSolrClient().getZkStateReader().forceUpdateCollection(COLLECTION_NAME); } - // Since we have to restart jettys, we don't want to try rebalancing etc. until we're sure all jettys that should - // be up are up and all replicas are active. - private void checkReplicasInactive(List downJettys) throws KeeperException, InterruptedException { + // Since we have to restart jettys, we don't want to try rebalancing etc. until we're sure all + // jettys that should be up are up and all replicas are active. + private void checkReplicasInactive(List downJettys) + throws KeeperException, InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); DocCollection docCollection = null; Set liveNodes = null; Set downJettyNodes = new TreeSet<>(); for (JettySolrRunner jetty : downJettys) { - downJettyNodes.add(jetty.getBaseUrl().getHost() + ":" + jetty.getBaseUrl().getPort() + "_solr"); + downJettyNodes.add( + jetty.getBaseUrl().getHost() + ":" + jetty.getBaseUrl().getPort() + "_solr"); } while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); boolean expectedInactive = true; @@ -567,18 +663,27 @@ private void checkReplicasInactive(List downJettys) throws Keep } TimeUnit.MILLISECONDS.sleep(100); } - fail("timed out waiting for all replicas to become inactive: livenodes: " + liveNodes + - " Collection state: " + docCollection.toString()); + fail( + "timed out waiting for all replicas to become inactive: livenodes: " + + liveNodes + + " Collection state: " + + docCollection.toString()); } - // We need to wait around until all replicas are active before expecting rebalancing or distributing shard-unique - // properties to work. + // We need to wait around until all replicas are active before expecting rebalancing or + // distributing shard-unique properties to work. private void checkAllReplicasActive() throws KeeperException, InterruptedException { TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); while (timeout.hasTimedOut() == false) { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); - Set liveNodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); + DocCollection docCollection = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME); + Set liveNodes = + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); boolean allActive = true; for (Slice slice : docCollection.getSlices()) { for (Replica rep : slice.getReplicas()) { @@ -595,17 +700,21 @@ private void checkAllReplicasActive() throws KeeperException, InterruptedExcepti fail("timed out waiting for all replicas to become active"); } - // use a simple heuristic to put as many replicas with the property on as few nodes as possible. The point is that - // then we can execute BALANCESHARDUNIQUE and be sure it worked correctly - private void concentrateProp(String prop) throws KeeperException, InterruptedException, IOException, SolrServerException { - // find all the live nodes - // for each slice, assign the leader to the first replica that is in the lowest position on live_nodes - List liveNodes = new ArrayList<>(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes()); + // use a simple heuristic to put as many replicas with the property on as few nodes as possible. + // The point is that then we can execute BALANCESHARDUNIQUE and be sure it worked correctly + private void concentrateProp(String prop) + throws KeeperException, InterruptedException, IOException, SolrServerException { + // find all the live nodes for each slice, assign the leader to the first replica that is in the + // lowest position on live_nodes + List liveNodes = + new ArrayList<>( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes()); Collections.shuffle(liveNodes, random()); Map uniquePropMap = new TreeMap<>(); forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { Replica changedRep = null; int livePos = Integer.MAX_VALUE; @@ -617,7 +726,9 @@ private void concentrateProp(String prop) throws KeeperException, InterruptedExc } } if (livePos == Integer.MAX_VALUE) { - fail("Invalid state! We should have a replica to add the property to! " + docCollection.toString()); + fail( + "Invalid state! We should have a replica to add the property to! " + + docCollection.toString()); } uniquePropMap.put(slice.getName(), changedRep.getName()); @@ -628,7 +739,8 @@ private void concentrateProp(String prop) throws KeeperException, InterruptedExc } // make sure that the property in question is unique per shard. - private Map verifyPropUniquePerShard(String prop) throws InterruptedException, KeeperException { + private Map verifyPropUniquePerShard(String prop) + throws InterruptedException, KeeperException { Map uniquePropMaps = new TreeMap<>(); TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); @@ -639,15 +751,25 @@ private Map verifyPropUniquePerShard(String prop) throws Interru } TimeUnit.MILLISECONDS.sleep(10); } - fail("There should be exactly one replica with value " + prop + " set to true per shard: " - + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).toString()); + fail( + "There should be exactly one replica with value " + + prop + + " set to true per shard: " + + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME) + .toString()); return null; // keeps IDE happy. } // return true if every shard has exactly one replica with the unique property set to "true" - private boolean checkdUniquePropPerShard(Map uniques, String prop) throws KeeperException, InterruptedException { + private boolean checkdUniquePropPerShard(Map uniques, String prop) + throws KeeperException, InterruptedException { forceUpdateCollectionStatus(); - DocCollection docCollection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + DocCollection docCollection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); for (Slice slice : docCollection.getSlices()) { int propfCount = 0; @@ -663,4 +785,4 @@ private boolean checkdUniquePropPerShard(Map uniques, String pro } return true; } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRequestForwarding.java b/solr/core/src/test/org/apache/solr/cloud/TestRequestForwarding.java index 45ac57777ec..85db70c443f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRequestForwarding.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRequestForwarding.java @@ -17,7 +17,6 @@ package org.apache.solr.cloud; import java.net.URL; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -52,19 +51,19 @@ public void tearDown() throws Exception { @Test public void testMultiCollectionQuery() throws Exception { createCollection("collection1", "conf1"); - // Test against all nodes (two of them host the collection, one of them will + // Test against all nodes (two of them host the collection, one of them will // forward the query) for (JettySolrRunner jettySolrRunner : solrCluster.getJettySolrRunners()) { String queryStrings[] = { - "q=cat%3Afootball%5E2", // URL encoded - "q=cat:football^2" // No URL encoding, contains disallowed character ^ + "q=cat%3Afootball%5E2", // URL encoded + "q=cat:football^2" // No URL encoding, contains disallowed character ^ }; - for (String q: queryStrings) { + for (String q : queryStrings) { try { - URL url = new URL(jettySolrRunner.getBaseUrl().toString()+"/collection1/select?"+q); + URL url = new URL(jettySolrRunner.getBaseUrl().toString() + "/collection1/select?" + q); url.openStream(); // Shouldn't throw any errors } catch (Exception ex) { - throw new RuntimeException("Query '" + q + "' failed, ",ex); + throw new RuntimeException("Query '" + q + "' failed, ", ex); } } } @@ -72,9 +71,10 @@ public void testMultiCollectionQuery() throws Exception { private void createCollection(String name, String config) throws Exception { CollectionAdminResponse response; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(name,config,2,1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(name, config, 2, 1); response = create.process(solrCluster.getSolrClient()); - + if (response.getStatus() != 0 || response.getErrorMessages() != null) { fail("Could not create collection. Response" + response.toString()); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java b/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java index 42cbb730bc1..13ee50d213e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSSLRandomization.java @@ -18,25 +18,22 @@ import java.lang.invoke.MethodHandles; import java.util.Arrays; - import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.util.SSLTestConfig; import org.apache.solr.util.RandomizeSSL; import org.apache.solr.util.RandomizeSSL.SSLRandomizer; - +import org.apache.solr.util.SSLTestConfig; import org.junit.BeforeClass; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * A "test the test" method that verifies the SSL options randomized by {@link SolrTestCaseJ4} are - * correctly used in the various helper methods available from the test framework and - * {@link MiniSolrCloudCluster}. + * A "test the test" method that verifies the SSL options randomized by {@link SolrTestCaseJ4} are + * correctly used in the various helper methods available from the test framework and {@link + * MiniSolrCloudCluster}. * * @see TestMiniSolrCloudClusterSSL */ -@RandomizeSSL(ssl=0.5,reason="frequent SSL usage to make test worth while") +@RandomizeSSL(ssl = 0.5, reason = "frequent SSL usage to make test worth while") public class TestSSLRandomization extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -45,102 +42,125 @@ public class TestSSLRandomization extends SolrCloudTestCase { public static void createMiniSolrCloudCluster() throws Exception { configureCluster(TestMiniSolrCloudClusterSSL.NUM_SERVERS).configure(); } - + public void testRandomizedSslAndClientAuth() throws Exception { - TestMiniSolrCloudClusterSSL.checkClusterWithCollectionCreations(cluster,sslConfig); + TestMiniSolrCloudClusterSSL.checkClusterWithCollectionCreations(cluster, sslConfig); } - + public void testBaseUrl() throws Exception { String url = buildUrl(6666, "/foo"); - assertEquals(sslConfig.isSSLMode() ? "https://127.0.0.1:6666/foo" : "http://127.0.0.1:6666/foo", url); + assertEquals( + sslConfig.isSSLMode() ? "https://127.0.0.1:6666/foo" : "http://127.0.0.1:6666/foo", url); } - + /** Used by {@link #testSSLRandomizer} */ - @RandomizeSSL(ssl=0.42,clientAuth=0.33,reason="foo") - public class FullyAnnotated { }; - + @RandomizeSSL(ssl = 0.42, clientAuth = 0.33, reason = "foo") + public class FullyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - public class InheritedFullyAnnotated extends FullyAnnotated { }; - + public class InheritedFullyAnnotated extends FullyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - public class NotAnnotated { }; - + public class NotAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - public class InheritedNotAnnotated extends NotAnnotated { }; - + public class InheritedNotAnnotated extends NotAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - @SuppressSSL(bugUrl="fakeBugUrl") - public class Suppressed { }; - + @SuppressSSL(bugUrl = "fakeBugUrl") + public class Suppressed {} + ; + /** Used by {@link #testSSLRandomizer} */ - public class InheritedSuppressed extends Suppressed { }; - + public class InheritedSuppressed extends Suppressed {} + ; + /** Used by {@link #testSSLRandomizer} */ - @SuppressSSL(bugUrl="fakeBugUrl") - public class InheritedAnnotationButSuppressed extends FullyAnnotated { }; - + @SuppressSSL(bugUrl = "fakeBugUrl") + public class InheritedAnnotationButSuppressed extends FullyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - @RandomizeSSL(ssl=0.42,clientAuth=0.33,reason="foo") + @RandomizeSSL(ssl = 0.42, clientAuth = 0.33, reason = "foo") public class InheritedSuppressedWithIgnoredAnnotation extends Suppressed { // Even with direct annotation, supression at superclass overrules us. // // (If it didn't work this way, it would be a pain in the ass to quickly disable SSL for a // broad hierarchy of tests) - }; - + } + /** Used by {@link #testSSLRandomizer} */ @RandomizeSSL() - public class EmptyAnnotated { }; - + public class EmptyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - public class InheritedEmptyAnnotated extends EmptyAnnotated { }; - + public class InheritedEmptyAnnotated extends EmptyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ @RandomizeSSL(0.5) - public class InheritedEmptyAnnotatationWithOverride extends EmptyAnnotated { }; - + public class InheritedEmptyAnnotatationWithOverride extends EmptyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - @RandomizeSSL(ssl=0.42,clientAuth=0.33,reason="foo") - public class GrandchildInheritedEmptyAnnotatationWithOverride extends InheritedEmptyAnnotated { }; - + @RandomizeSSL(ssl = 0.42, clientAuth = 0.33, reason = "foo") + public class GrandchildInheritedEmptyAnnotatationWithOverride extends InheritedEmptyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ @RandomizeSSL(0.5) - public class SimplyAnnotated { }; - + public class SimplyAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ @RandomizeSSL(0.0) - public class MinAnnotated { }; - + public class MinAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ @RandomizeSSL(1) - public class MaxAnnotated { }; - + public class MaxAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - @RandomizeSSL(ssl=0.42) - public class SSlButNoClientAuthAnnotated { }; - + @RandomizeSSL(ssl = 0.42) + public class SSlButNoClientAuthAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - @RandomizeSSL(clientAuth=0.42) - public class ClientAuthButNoSSLAnnotated { }; - + @RandomizeSSL(clientAuth = 0.42) + public class ClientAuthButNoSSLAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - @RandomizeSSL(ssl=42.0) - public class SSLOutOfRangeAnnotated { }; - + @RandomizeSSL(ssl = 42.0) + public class SSLOutOfRangeAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - @RandomizeSSL(clientAuth=42.0) - public class ClientAuthOutOfRangeAnnotated { }; - + @RandomizeSSL(clientAuth = 42.0) + public class ClientAuthOutOfRangeAnnotated {} + ; + /** Used by {@link #testSSLRandomizer} */ - public class InheritedOutOfRangeAnnotated extends ClientAuthOutOfRangeAnnotated { }; - + public class InheritedOutOfRangeAnnotated extends ClientAuthOutOfRangeAnnotated {} + ; + public void testSSLRandomizer() { SSLRandomizer r; // for some cases, we know exactly what the config should be regardless of randomization factors SSLTestConfig conf; - for (Class c : Arrays.asList(FullyAnnotated.class, InheritedFullyAnnotated.class, - GrandchildInheritedEmptyAnnotatationWithOverride.class )) { + for (Class c : + Arrays.asList( + FullyAnnotated.class, + InheritedFullyAnnotated.class, + GrandchildInheritedEmptyAnnotatationWithOverride.class)) { r = SSLRandomizer.getSSLRandomizerForClass(c); assertEquals(c.toString(), 0.42D, r.ssl, 0.0D); assertEquals(c.toString(), 0.33D, r.clientAuth, 0.0D); @@ -157,10 +177,12 @@ public void testSSLRandomizer() { assertFalse(c.toString(), conf.isClientAuthMode()); } - for (Class c : Arrays.asList(Suppressed.class, - InheritedSuppressed.class, - InheritedAnnotationButSuppressed.class, - InheritedSuppressedWithIgnoredAnnotation.class)) { + for (Class c : + Arrays.asList( + Suppressed.class, + InheritedSuppressed.class, + InheritedAnnotationButSuppressed.class, + InheritedSuppressedWithIgnoredAnnotation.class)) { r = SSLRandomizer.getSSLRandomizerForClass(Suppressed.class); assertEquals(c.toString(), 0.0D, r.ssl, 0.0D); assertEquals(c.toString(), 0.0D, r.clientAuth, 0.0D); @@ -177,19 +199,20 @@ public void testSSLRandomizer() { assertEquals(c.toString(), RandomizeSSL.DEFAULT_ODDS, r.clientAuth, 0.0D); } - for (Class c : Arrays.asList(SimplyAnnotated.class, InheritedEmptyAnnotatationWithOverride.class)) { + for (Class c : + Arrays.asList(SimplyAnnotated.class, InheritedEmptyAnnotatationWithOverride.class)) { r = SSLRandomizer.getSSLRandomizerForClass(c); assertEquals(c.toString(), 0.5D, r.ssl, 0.0D); assertEquals(c.toString(), 0.5D, r.clientAuth, 0.0D); } - + r = SSLRandomizer.getSSLRandomizerForClass(MinAnnotated.class); assertEquals(0.0D, r.ssl, 0.0D); assertEquals(0.0D, r.clientAuth, 0.0D); conf = r.createSSLTestConfig(); assertFalse(conf.isSSLMode()); assertFalse(conf.isClientAuthMode()); - + r = SSLRandomizer.getSSLRandomizerForClass(MaxAnnotated.class); assertEquals(1.0D, r.ssl, 0.0D); assertEquals(1.0D, r.clientAuth, 0.0D); @@ -205,29 +228,35 @@ public void testSSLRandomizer() { assertEquals(RandomizeSSL.DEFAULT_ODDS, r.ssl, 0.0D); assertEquals(0.42D, r.clientAuth, 0.0D); - for (Class c : Arrays.asList(SSLOutOfRangeAnnotated.class, - ClientAuthOutOfRangeAnnotated.class, - InheritedOutOfRangeAnnotated.class)) { - expectThrows(IllegalArgumentException.class, () -> { - Object trash = SSLRandomizer.getSSLRandomizerForClass(c); - }); + for (Class c : + Arrays.asList( + SSLOutOfRangeAnnotated.class, + ClientAuthOutOfRangeAnnotated.class, + InheritedOutOfRangeAnnotated.class)) { + expectThrows( + IllegalArgumentException.class, + () -> { + Object trash = SSLRandomizer.getSSLRandomizerForClass(c); + }); } - } + public void testSSLRandomizerEffectiveOdds() { - assertEquals(RandomizeSSL.DEFAULT_ODDS, - SSLRandomizer.getEffectiveOdds(RandomizeSSL.DEFAULT_ODDS, false, 1), 0.0005D); - assertEquals(0.2727D, - SSLRandomizer.getEffectiveOdds(RandomizeSSL.DEFAULT_ODDS, true, 1), 0.0005D); - + assertEquals( + RandomizeSSL.DEFAULT_ODDS, + SSLRandomizer.getEffectiveOdds(RandomizeSSL.DEFAULT_ODDS, false, 1), + 0.0005D); + assertEquals( + 0.2727D, SSLRandomizer.getEffectiveOdds(RandomizeSSL.DEFAULT_ODDS, true, 1), 0.0005D); + assertEquals(0.0100D, SSLRandomizer.getEffectiveOdds(0.01D, false, 1), 0.0005D); assertEquals(0.1000D, SSLRandomizer.getEffectiveOdds(0.01D, true, 1), 0.0005D); assertEquals(0.6206D, SSLRandomizer.getEffectiveOdds(0.01D, false, 5), 0.0005D); - + assertEquals(0.5000D, SSLRandomizer.getEffectiveOdds(0.5D, false, 1), 0.0005D); assertEquals(0.5454D, SSLRandomizer.getEffectiveOdds(0.5D, true, 1), 0.0005D); assertEquals(0.8083D, SSLRandomizer.getEffectiveOdds(0.5D, false, 5), 0.0005D); - + assertEquals(0.8000D, SSLRandomizer.getEffectiveOdds(0.8D, false, 1), 0.0005D); assertEquals(0.8181D, SSLRandomizer.getEffectiveOdds(0.8D, true, 1), 0.0005D); assertEquals(0.9233D, SSLRandomizer.getEffectiveOdds(0.8D, false, 5), 0.0005D); @@ -238,17 +267,20 @@ public void testSSLRandomizerEffectiveOdds() { assertEquals(0.0D, SSLRandomizer.getEffectiveOdds(0.0D, false, 100), 0.0D); assertEquals(0.0D, SSLRandomizer.getEffectiveOdds(0.0D, true, 10000), 0.0D); assertEquals(0.0D, SSLRandomizer.getEffectiveOdds(0.0D, false, 10000), 0.0D); - assertEquals(0.0D, SSLRandomizer.getEffectiveOdds(0.0D, random().nextBoolean(), random().nextInt()), 0.0D); - + assertEquals( + 0.0D, + SSLRandomizer.getEffectiveOdds(0.0D, random().nextBoolean(), random().nextInt()), + 0.0D); + // always assertEquals(1.0D, SSLRandomizer.getEffectiveOdds(1.0D, false, 1), 0.0D); assertEquals(1.0D, SSLRandomizer.getEffectiveOdds(1.0D, true, 100), 0.0D); assertEquals(1.0D, SSLRandomizer.getEffectiveOdds(1.0D, false, 100), 0.0D); assertEquals(1.0D, SSLRandomizer.getEffectiveOdds(1.0D, true, 10000), 0.0D); assertEquals(1.0D, SSLRandomizer.getEffectiveOdds(1.0D, false, 10000), 0.0D); - assertEquals(1.0D, SSLRandomizer.getEffectiveOdds(1.0D, random().nextBoolean(), random().nextInt()), 0.0D); - + assertEquals( + 1.0D, + SSLRandomizer.getEffectiveOdds(1.0D, random().nextBoolean(), random().nextInt()), + 0.0D); } - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java b/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java index 9e83b55288a..b7275d23801 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSegmentSorting.java @@ -20,25 +20,20 @@ import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.schema.SchemaRequest.Field; import org.apache.solr.client.solrj.response.RequestStatusState; - -import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.core.CoreDescriptor; - import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; - - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,16 +44,16 @@ public class TestSegmentSorting extends SolrCloudTestCase { private static final int NUM_SHARDS = 2; private static final int REPLICATION_FACTOR = 2; private static final String configName = MethodHandles.lookup().lookupClass() + "_configSet"; - + @BeforeClass public static void setupCluster() throws Exception { configureCluster(NUM_SERVERS) - .addConfig(configName, Paths.get(TEST_HOME(), "collection1", "conf")) - .configure(); + .addConfig(configName, Paths.get(TEST_HOME(), "collection1", "conf")) + .configure(); } - + @Rule public TestName testName = new TestName(); - + @After public void ensureClusterEmpty() throws Exception { cluster.deleteAllCollections(); @@ -70,29 +65,29 @@ public void createCollection() throws Exception { final String collectionName = testName.getMethodName(); final CloudSolrClient cloudSolrClient = cluster.getSolrClient(); - + final Map collectionProperties = new HashMap<>(); - collectionProperties.put(CoreDescriptor.CORE_CONFIG, "solrconfig-sortingmergepolicyfactory.xml"); - - CollectionAdminRequest.Create cmd = - CollectionAdminRequest.createCollection(collectionName, configName, - NUM_SHARDS, REPLICATION_FACTOR) - .setProperties(collectionProperties); + collectionProperties.put( + CoreDescriptor.CORE_CONFIG, "solrconfig-sortingmergepolicyfactory.xml"); + + CollectionAdminRequest.Create cmd = + CollectionAdminRequest.createCollection( + collectionName, configName, + NUM_SHARDS, REPLICATION_FACTOR) + .setProperties(collectionProperties); if (random().nextBoolean()) { - assertTrue( cmd.process(cloudSolrClient).isSuccess() ); + assertTrue(cmd.process(cloudSolrClient).isSuccess()); } else { // async assertEquals(RequestStatusState.COMPLETED, cmd.processAndWait(cloudSolrClient, 30)); } - + ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader(); cluster.waitForActiveCollection(collectionName, NUM_SHARDS, NUM_SHARDS * REPLICATION_FACTOR); - + cloudSolrClient.setDefaultCollection(collectionName); } - - // 12-Jun-2018 @Test@BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") public void testSegmentTerminateEarly() throws Exception { final SegmentTerminateEarlyTestState tstes = new SegmentTerminateEarlyTestState(random()); @@ -100,27 +95,26 @@ public void testSegmentTerminateEarly() throws Exception { // add some documents, then optimize to get merged-sorted segments tstes.addDocuments(cloudSolrClient, 10, 10, true); - + // CommonParams.SEGMENT_TERMINATE_EARLY parameter intentionally absent tstes.queryTimestampDescending(cloudSolrClient); - + // add a few more documents, but don't optimize to have some not-merge-sorted segments tstes.addDocuments(cloudSolrClient, 2, 10, false); - + // CommonParams.SEGMENT_TERMINATE_EARLY parameter now present tstes.queryTimestampDescendingSegmentTerminateEarlyYes(cloudSolrClient); tstes.queryTimestampDescendingSegmentTerminateEarlyNo(cloudSolrClient); - + // CommonParams.SEGMENT_TERMINATE_EARLY parameter present but it won't be used tstes.queryTimestampDescendingSegmentTerminateEarlyYesGrouped(cloudSolrClient); - tstes.queryTimestampAscendingSegmentTerminateEarlyYes(cloudSolrClient); // uses a sort order that is _not_ compatible with the merge sort order - + // uses a sort order that is _not_ compatible with the merge sort order + tstes.queryTimestampAscendingSegmentTerminateEarlyYes(cloudSolrClient); } - /** - * Verify that atomic updates against our (DVO) segment sort field doesn't cause errors. - * In this situation, the updates should *NOT* be done inplace, because that would - * break the index sorting + /** + * Verify that atomic updates against our (DVO) segment sort field doesn't cause errors. In this + * situation, the updates should *NOT* be done inplace, because that would break the index sorting */ @Test // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 26-Mar-2018 @@ -131,54 +125,68 @@ public void testAtomicUpdateOfSegmentSortField() throws Exception { // sanity check that updateField is in fact a DocValues only field, meaning it // would normally be eligable for inplace updates -- if it weren't also used for merge sorting - final Map schemaOpts - = new Field(updateField, params("includeDynamic", "true", - "showDefaults","true")).process(cloudSolrClient).getField(); + final Map schemaOpts = + new Field( + updateField, + params( + "includeDynamic", "true", + "showDefaults", "true")) + .process(cloudSolrClient) + .getField(); assertEquals(true, schemaOpts.get("docValues")); assertEquals(false, schemaOpts.get("indexed")); assertEquals(false, schemaOpts.get("stored")); - + // add some documents final int numDocs = atLeast(1000); for (int id = 1; id <= numDocs; id++) { cloudSolrClient.add(sdoc("id", id, updateField, random().nextInt(60))); - } cloudSolrClient.commit(); - // do some random iterations of replacing docs, atomic updates against segment sort field, and commits + // do some random iterations of replacing docs, atomic updates against segment sort field, and + // commits // (at this point we're just sanity checking no serious failures) for (int iter = 0; iter < 20; iter++) { final int iterSize = atLeast(20); for (int i = 0; i < iterSize; i++) { // replace - cloudSolrClient.add(sdoc("id", TestUtil.nextInt(random(), 1, numDocs), - updateField, random().nextInt(60))); + cloudSolrClient.add( + sdoc("id", TestUtil.nextInt(random(), 1, numDocs), updateField, random().nextInt(60))); // atomic update - cloudSolrClient.add(sdoc("id", TestUtil.nextInt(random(), 1, numDocs), - updateField, map("set", random().nextInt(60)))); + cloudSolrClient.add( + sdoc( + "id", + TestUtil.nextInt(random(), 1, numDocs), + updateField, + map("set", random().nextInt(60)))); } cloudSolrClient.commit(); } - // pick a random doc, and verify that doing an atomic update causes the docid to change // ie: not an inplace update final int id = TestUtil.nextInt(random(), 1, numDocs); - final int oldDocId = (Integer) cloudSolrClient.getById(""+id, params("fl","[docid]")).get("[docid]"); - - cloudSolrClient.add(sdoc("id", id, updateField, map("inc","666"))); + final int oldDocId = + (Integer) cloudSolrClient.getById("" + id, params("fl", "[docid]")).get("[docid]"); + + cloudSolrClient.add(sdoc("id", id, updateField, map("inc", "666"))); cloudSolrClient.commit(); - + // loop incase we're waiting for a newSearcher to be opened int newDocId = -1; int attempts = 10; while ((newDocId < 0) && (0 < attempts--)) { - SolrDocumentList docs = cloudSolrClient.query(params("q", "id:"+id, - "fl","[docid]", - "fq", updateField + "[666 TO *]")).getResults(); + SolrDocumentList docs = + cloudSolrClient + .query( + params( + "q", "id:" + id, + "fl", "[docid]", + "fq", updateField + "[666 TO *]")) + .getResults(); if (0 < docs.size()) { - newDocId = (Integer)docs.get(0).get("[docid]"); + newDocId = (Integer) docs.get(0).get("[docid]"); } else { Thread.sleep(50); } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java b/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java index 08e0eb58f2c..73b5351226a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java @@ -16,8 +16,8 @@ */ package org.apache.solr.cloud; -import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.params.ShardParams; @@ -27,7 +27,7 @@ public class TestShortCircuitedRequests extends AbstractFullDistribZkTestBase { public TestShortCircuitedRequests() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id super.sliceCount = 4; } @@ -35,20 +35,36 @@ public TestShortCircuitedRequests() { @ShardsFixed(num = 4) public void test() throws Exception { waitForRecoveriesToFinish(false); - assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size()); - index("id", "a!doc1"); // shard3 - index("id", "b!doc1"); // shard1 - index("id", "c!doc1"); // shard2 - index("id", "e!doc1"); // shard4 + assertEquals( + 4, + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_COLLECTION) + .getSlices() + .size()); + index("id", "a!doc1"); // shard3 + index("id", "b!doc1"); // shard1 + index("id", "c!doc1"); // shard2 + index("id", "e!doc1"); // shard4 commit(); doQuery("a!doc1", "q", "*:*", ShardParams._ROUTE_, "a!"); // can go to any random node // query shard3 directly with _route_=a! so that we trigger the short circuited request path - Replica shard3 = cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getLeader("shard3"); + Replica shard3 = + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_COLLECTION) + .getLeader("shard3"); String nodeName = shard3.getNodeName(); SolrClient shard3Client = getClient(nodeName); - QueryResponse response = shard3Client.query(new SolrQuery("*:*").add(ShardParams._ROUTE_, "a!").add(ShardParams.SHARDS_INFO, "true")); + QueryResponse response = + shard3Client.query( + new SolrQuery("*:*") + .add(ShardParams._ROUTE_, "a!") + .add(ShardParams.SHARDS_INFO, "true")); assertEquals("Could not find doc", 1, response.getResults().getNumFound()); NamedList sinfo = (NamedList) response.getResponse().get(ShardParams.SHARDS_INFO); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java b/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java index 879a4e6fed9..a086656d13c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSizeLimitedDistributedMap.java @@ -28,11 +28,13 @@ public class TestSizeLimitedDistributedMap extends TestDistributedMap { public void testCleanup() throws Exception { final List deletedItems = new LinkedList<>(); final Set expectedKeys = new HashSet<>(); - int numResponsesToStore=TEST_NIGHTLY?Overseer.NUM_RESPONSES_TO_STORE:100; - + int numResponsesToStore = TEST_NIGHTLY ? Overseer.NUM_RESPONSES_TO_STORE : 100; + try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 10000)) { String path = getAndMakeInitialPath(zkClient); - DistributedMap map = new SizeLimitedDistributedMap(zkClient, path, numResponsesToStore, (element)->deletedItems.add(element)); + DistributedMap map = + new SizeLimitedDistributedMap( + zkClient, path, numResponsesToStore, (element) -> deletedItems.add(element)); for (int i = 0; i < numResponsesToStore; i++) { map.put("xyz_" + i, new byte[0]); expectedKeys.add("xyz_" + i); @@ -44,8 +46,10 @@ public void testCleanup() throws Exception { // add another to trigger cleanup map.put("xyz_" + numResponsesToStore, new byte[0]); expectedKeys.add("xyz_" + numResponsesToStore); - assertEquals("Distributed queue was not cleaned up", - numResponsesToStore - (numResponsesToStore / 10) + 1, map.size()); + assertEquals( + "Distributed queue was not cleaned up", + numResponsesToStore - (numResponsesToStore / 10) + 1, + map.size()); for (int i = numResponsesToStore; i >= numResponsesToStore / 10; i--) { assertTrue(map.contains("xyz_" + i)); } @@ -57,13 +61,13 @@ public void testCleanup() throws Exception { assertTrue("Expected keys do not match", expectedKeys.containsAll(map.keys())); assertTrue("Expected keys do not match", map.keys().containsAll(expectedKeys)); map.remove("xyz_" + numResponsesToStore); - assertFalse("map.remove shouldn't trigger the observer", + assertFalse( + "map.remove shouldn't trigger the observer", deletedItems.contains("xyz_" + numResponsesToStore)); } } - + protected DistributedMap createMap(SolrZkClient zkClient, String path) { return new SizeLimitedDistributedMap(zkClient, path, Overseer.NUM_RESPONSES_TO_STORE, null); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSkipOverseerOperations.java b/solr/core/src/test/org/apache/solr/cloud/TestSkipOverseerOperations.java index 3d893b4cc2f..5b0f1a9e47b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestSkipOverseerOperations.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestSkipOverseerOperations.java @@ -24,7 +24,6 @@ import java.util.SortedSet; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -47,94 +46,102 @@ public void setupCluster() throws Exception { System.setProperty("solr.ulog.numRecordsToKeep", "1000"); configureCluster(3) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } - + @After public void tearDown() throws Exception { shutdownCluster(); super.tearDown(); } - + public void testSkipLeaderOperations() throws Exception { - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cluster.getSolrClient()).getIsCollectionApiDistributed()) { + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cluster.getSolrClient()) + .getIsCollectionApiDistributed()) { log.info("Skipping test because Collection API is distributed"); return; } String overseerLeader = getOverseerLeader(); - + assertNotNull(overseerLeader); assertTrue(overseerLeader.length() > 0); - - List notOverseerNodes = cluster.getJettySolrRunners() - .stream() - .filter(solrRunner -> !solrRunner.getNodeName().equals(overseerLeader)) - .collect(Collectors.toList()); - + + List notOverseerNodes = + cluster.getJettySolrRunners().stream() + .filter(solrRunner -> !solrRunner.getNodeName().equals(overseerLeader)) + .collect(Collectors.toList()); + assertEquals(2, notOverseerNodes.size()); - + String collection = "collection1"; - CollectionAdminRequest - .createCollection(collection, 2, 1) - .setCreateNodeSet(notOverseerNodes - .stream() - .map(JettySolrRunner::getNodeName) - .collect(Collectors.joining(",")) - ) + CollectionAdminRequest.createCollection(collection, 2, 1) + .setCreateNodeSet( + notOverseerNodes.stream() + .map(JettySolrRunner::getNodeName) + .collect(Collectors.joining(","))) .process(cluster.getSolrClient()); cluster.waitForActiveCollection("collection1", 2, 2); ZkStateReader reader = cluster.getSolrClient().getZkStateReader(); - + List nodes = new ArrayList<>(); for (JettySolrRunner solrRunner : notOverseerNodes) { nodes.add(solrRunner.getNodeName()); } - + for (JettySolrRunner solrRunner : notOverseerNodes) { solrRunner.stop(); } - + for (JettySolrRunner solrRunner : notOverseerNodes) { cluster.waitForJettyToStop(solrRunner); } - - reader.waitForLiveNodes(30, TimeUnit.SECONDS, new LiveNodesPredicate() { - - @Override - public boolean matches(SortedSet oldLiveNodes, SortedSet newLiveNodes) { - boolean success = true; - for (String lostNodeName : nodes) { - if (newLiveNodes.contains(lostNodeName)) { - success = false; - break; + + reader.waitForLiveNodes( + 30, + TimeUnit.SECONDS, + new LiveNodesPredicate() { + + @Override + public boolean matches(SortedSet oldLiveNodes, SortedSet newLiveNodes) { + boolean success = true; + for (String lostNodeName : nodes) { + if (newLiveNodes.contains(lostNodeName)) { + success = false; + break; + } + } + + return success; } - } - - return success; - } - }); - - waitForState("Expected single liveNode", collection, + }); + + waitForState( + "Expected single liveNode", + collection, (liveNodes, collectionState) -> liveNodes.size() == 1); - CollectionAdminResponse resp = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); + CollectionAdminResponse resp = + CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); for (JettySolrRunner solrRunner : notOverseerNodes) { solrRunner.start(); } - + cluster.waitForAllNodes(30); - waitForState("Expected 2x1 for collection: " + collection, collection, - clusterShape(2, 2)); - CollectionAdminResponse resp2 = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); + waitForState("Expected 2x1 for collection: " + collection, collection, clusterShape(2, 2)); + CollectionAdminResponse resp2 = + CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); - // When cluster state updates are done in a distributed way, the stats that this test is verifying are not available. - // See comment in OverseerStatusCmd.call(). + // When cluster state updates are done in a distributed way, the stats that this test is + // verifying are not available. See comment in OverseerStatusCmd.call(). // Keeping the rest of the test running in case other errors can happen and can be caught... - // Eventually maintain per node cluster state updates stats and be able to check them here? Longer term question... + // Eventually maintain per node cluster state updates stats and be able to check them here? + // Longer term question... if (!cluster.getOpenOverseer().getDistributedClusterStateUpdater().isDistributedStateUpdate()) { assertEquals(getNumLeaderOpeations(resp), getNumLeaderOpeations(resp2)); @@ -143,71 +150,76 @@ public boolean matches(SortedSet oldLiveNodes, SortedSet newLive } @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 public void testSkipDownOperations() throws Exception { - if (new CollectionAdminRequest.RequestApiDistributedProcessing().process(cluster.getSolrClient()).getIsCollectionApiDistributed()) { + if (new CollectionAdminRequest.RequestApiDistributedProcessing() + .process(cluster.getSolrClient()) + .getIsCollectionApiDistributed()) { log.info("Skipping test because Collection API is distributed"); return; } String overseerLeader = getOverseerLeader(); - List notOverseerNodes = cluster.getJettySolrRunners() - .stream() - .filter(solrRunner -> !solrRunner.getNodeName().equals(overseerLeader)) - .collect(Collectors.toList()); + List notOverseerNodes = + cluster.getJettySolrRunners().stream() + .filter(solrRunner -> !solrRunner.getNodeName().equals(overseerLeader)) + .collect(Collectors.toList()); String collection = "collection2"; - CollectionAdminRequest - .createCollection(collection, 2, 2) - .setCreateNodeSet(notOverseerNodes - .stream() - .map(JettySolrRunner::getNodeName) - .collect(Collectors.joining(",")) - ) + CollectionAdminRequest.createCollection(collection, 2, 2) + .setCreateNodeSet( + notOverseerNodes.stream() + .map(JettySolrRunner::getNodeName) + .collect(Collectors.joining(","))) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collection, 2, 4); - + ZkStateReader reader = cluster.getSolrClient().getZkStateReader(); - + List nodes = new ArrayList<>(); for (JettySolrRunner solrRunner : notOverseerNodes) { nodes.add(solrRunner.getNodeName()); } - + for (JettySolrRunner solrRunner : notOverseerNodes) { solrRunner.stop(); } for (JettySolrRunner solrRunner : notOverseerNodes) { cluster.waitForJettyToStop(solrRunner); } - - reader.waitForLiveNodes(30, TimeUnit.SECONDS, new LiveNodesPredicate() { - - @Override - public boolean matches(SortedSet oldLiveNodes, SortedSet newLiveNodes) { - boolean success = true; - for (String lostNodeName : nodes) { - if (newLiveNodes.contains(lostNodeName)) { - success = false; - break; + + reader.waitForLiveNodes( + 30, + TimeUnit.SECONDS, + new LiveNodesPredicate() { + + @Override + public boolean matches(SortedSet oldLiveNodes, SortedSet newLiveNodes) { + boolean success = true; + for (String lostNodeName : nodes) { + if (newLiveNodes.contains(lostNodeName)) { + success = false; + break; + } + } + + return success; } - } - - return success; - } - }); - - waitForState("Expected single liveNode", collection, + }); + + waitForState( + "Expected single liveNode", + collection, (liveNodes, collectionState) -> liveNodes.size() == 1); - CollectionAdminResponse resp = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); + CollectionAdminResponse resp = + CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); for (JettySolrRunner solrRunner : notOverseerNodes) { solrRunner.start(); } cluster.waitForAllNodes(30); - waitForState("Expected 2x2 for collection: " + collection, collection, - clusterShape(2, 4)); - CollectionAdminResponse resp2 = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); + waitForState("Expected 2x2 for collection: " + collection, collection, clusterShape(2, 4)); + CollectionAdminResponse resp2 = + CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); // See comment in testSkipLeaderOperations() above why this assert is skipped if (!cluster.getOpenOverseer().getDistributedClusterStateUpdater().isDistributedStateUpdate()) { @@ -218,27 +230,28 @@ public boolean matches(SortedSet oldLiveNodes, SortedSet newLive } /** - * Returns the value corresponding to stat: "overseer_operations", "leader", "requests" - * This stat (see {@link org.apache.solr.cloud.api.collections.OverseerStatusCmd} is updated when the cluster state - * updater processes a message of type {@link org.apache.solr.cloud.overseer.OverseerAction#LEADER} to set a shard leader

+ * Returns the value corresponding to stat: "overseer_operations", "leader", "requests" This stat + * (see {@link org.apache.solr.cloud.api.collections.OverseerStatusCmd} is updated when the + * cluster state updater processes a message of type {@link + * org.apache.solr.cloud.overseer.OverseerAction#LEADER} to set a shard leader * - * The update happens in org.apache.solr.cloud.Overseer.ClusterStateUpdater.processQueueItem() + *

The update happens in org.apache.solr.cloud.Overseer.ClusterStateUpdater.processQueueItem() */ private int getNumLeaderOpeations(CollectionAdminResponse resp) { return (int) resp.getResponse().findRecursive("overseer_operations", "leader", "requests"); } /** - * "state" stats are when Overseer processes a {@link org.apache.solr.cloud.overseer.OverseerAction#STATE} message - * that sets replica properties + * "state" stats are when Overseer processes a {@link + * org.apache.solr.cloud.overseer.OverseerAction#STATE} message that sets replica properties */ private int getNumStateOpeations(CollectionAdminResponse resp) { return (int) resp.getResponse().findRecursive("overseer_operations", "state", "requests"); } private String getOverseerLeader() throws IOException, SolrServerException { - CollectionAdminResponse resp = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); - return (String) resp.getResponse().get("leader"); + CollectionAdminResponse resp = + CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient()); + return (String) resp.getResponse().get("leader"); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java index 4e2a0274c2b..b5e1dc95464 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressCloudBlindAtomicUpdates.java @@ -30,7 +30,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicLong; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; @@ -63,26 +62,28 @@ /** * Stress test of Atomic Updates in a MinCloud Cluster. - * - * Focus of test is parallel threads hammering updates on diff docs using random clients/nodes, - * Optimistic Concurrency is not used here because of SOLR-8733, instead we just throw lots of - * "inc" operations at a numeric field and check that the math works out at the end. + * + *

Focus of test is parallel threads hammering updates on diff docs using random clients/nodes, + * Optimistic Concurrency is not used here because of SOLR-8733, instead we just throw lots of "inc" + * operations at a numeric field and check that the math works out at the end. */ @Slow -@SuppressSSL(bugUrl="SSL overhead seems to cause OutOfMemory when stress testing") +@SuppressSSL(bugUrl = "SSL overhead seems to cause OutOfMemory when stress testing") public class TestStressCloudBlindAtomicUpdates extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String DEBUG_LABEL = MethodHandles.lookup().lookupClass().getName(); private static final String COLLECTION_NAME = "test_col"; - + /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; /** One client per node */ private static final ArrayList CLIENTS = new ArrayList<>(5); - /** Service to execute all parallel work + /** + * Service to execute all parallel work + * * @see #NUM_THREADS */ private static ExecutorService EXEC_SERVICE; @@ -90,18 +91,19 @@ public class TestStressCloudBlindAtomicUpdates extends SolrCloudTestCase { /** num parallel threads in use by {@link #EXEC_SERVICE} */ private static int NUM_THREADS; - /** - * Used as an increment and multiplier when deciding how many docs should be in - * the test index. 1 means every doc in the index is a candidate for updates, bigger numbers mean a - * larger index is used (so tested docs are more likeely to be spread out in multiple segments) + /** + * Used as an increment and multiplier when deciding how many docs should be in the test index. 1 + * means every doc in the index is a candidate for updates, bigger numbers mean a larger index is + * used (so tested docs are more likeely to be spread out in multiple segments) */ private static int DOC_ID_INCR; /** * The TestInjection configuration to be used for the current test method. * - * Value is set by {@link #clearCloudCollection}, and used by {@link #startTestInjection} -- but only once - * initial index seeding has finished (we're focusing on testing atomic updates, not basic indexing). + *

Value is set by {@link #clearCloudCollection}, and used by {@link #startTestInjection} -- + * but only once initial index seeding has finished (we're focusing on testing atomic updates, not + * basic indexing). */ private String testInjection = null; @@ -111,16 +113,17 @@ private static void createMiniSolrCloudCluster() throws Exception { // NOTE: numDocsToCheck uses atLeast, so nightly & multiplier are alreayd a factor in index size // no need to redundently factor them in here as well DOC_ID_INCR = TestUtil.nextInt(random(), 1, 7); - + NUM_THREADS = atLeast(3); - EXEC_SERVICE = ExecutorUtil.newMDCAwareFixedThreadPool - (NUM_THREADS, new SolrNamedThreadFactory(DEBUG_LABEL)); - + EXEC_SERVICE = + ExecutorUtil.newMDCAwareFixedThreadPool( + NUM_THREADS, new SolrNamedThreadFactory(DEBUG_LABEL)); + // at least 2, but don't go crazy on nightly/test.multiplier with "atLeast()" - final int numShards = TEST_NIGHTLY ? 5 : 2; - final int repFactor = 2; + final int numShards = TEST_NIGHTLY ? 5 : 2; + final int repFactor = 2; final int numNodes = numShards * repFactor; - + final String configName = DEBUG_LABEL + "_config-set"; final Path configDir = TEST_COLL1_CONF(); @@ -145,14 +148,16 @@ private static void createMiniSolrCloudCluster() throws Exception { } // sanity check no one broke the assumptions we make about our schema - checkExpectedSchemaType( map("name","long", - "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Long.class), - "multiValued",Boolean.FALSE, - "indexed",Boolean.FALSE, - "stored",Boolean.FALSE, - "docValues",Boolean.FALSE) ); + checkExpectedSchemaType( + map( + "name", "long", + "class", RANDOMIZED_NUMERIC_FIELDTYPES.get(Long.class), + "multiValued", Boolean.FALSE, + "indexed", Boolean.FALSE, + "stored", Boolean.FALSE, + "docValues", Boolean.FALSE)); } - + @AfterClass private static void afterClass() throws Exception { TestInjection.reset(); @@ -172,99 +177,111 @@ private static void afterClass() throws Exception { } CLIENTS.clear(); } - + @Before private void clearCloudCollection() throws Exception { TestInjection.reset(); waitForRecoveriesToFinish(CLOUD_CLIENT); - + assertEquals(0, CLOUD_CLIENT.deleteByQuery("*:*").getStatus()); assertEquals(0, CLOUD_CLIENT.optimize().getStatus()); - - assertEquals("Collection should be empty!", - 0, CLOUD_CLIENT.query(params("q", "*:*")).getResults().getNumFound()); - final int injectionPercentage = (int)Math.ceil(atLeast(1) / 2); + assertEquals( + "Collection should be empty!", + 0, + CLOUD_CLIENT.query(params("q", "*:*")).getResults().getNumFound()); + + final int injectionPercentage = (int) Math.ceil(atLeast(1) / 2); testInjection = usually() ? "false:0" : ("true:" + injectionPercentage); } /** - * Assigns {@link #testInjection} to various TestInjection variables. Calling this - * method multiple times in the same method should always result in the same setting being applied - * (even if {@link TestInjection#reset} was called in between. + * Assigns {@link #testInjection} to various TestInjection variables. Calling this method multiple + * times in the same method should always result in the same setting being applied (even if {@link + * TestInjection#reset} was called in between. * - * NOTE: method is currently a No-Op pending SOLR-13189 + *

NOTE: method is currently a No-Op pending SOLR-13189 */ private void startTestInjection() { log.info("TODO: TestInjection disabled pending solution to SOLR-13189"); - //log.info("TestInjection: fail replica, update pause, tlog pauses: " + testInjection); - //TestInjection.failReplicaRequests = testInjection; - //TestInjection.updateLogReplayRandomPause = testInjection; - //TestInjection.updateRandomPause = testInjection; + // log.info("TestInjection: fail replica, update pause, tlog pauses: " + testInjection); + // TestInjection.failReplicaRequests = testInjection; + // TestInjection.updateLogReplayRandomPause = testInjection; + // TestInjection.updateRandomPause = testInjection; } - @Test @SuppressWarnings({"unchecked"}) public void test_dv() throws Exception { String field = "long_dv"; - checkExpectedSchemaField(map("name", field, - "type","long", - "stored",Boolean.FALSE, - "indexed",Boolean.FALSE, - "docValues",Boolean.TRUE)); - + checkExpectedSchemaField( + map( + "name", field, + "type", "long", + "stored", Boolean.FALSE, + "indexed", Boolean.FALSE, + "docValues", Boolean.TRUE)); + checkField(field); } - + @Test @SuppressWarnings({"unchecked"}) public void test_dv_stored() throws Exception { String field = "long_dv_stored"; - checkExpectedSchemaField(map("name", field, - "type","long", - "stored",Boolean.TRUE, - "indexed",Boolean.FALSE, - "docValues",Boolean.TRUE)); - - checkField(field); + checkExpectedSchemaField( + map( + "name", field, + "type", "long", + "stored", Boolean.TRUE, + "indexed", Boolean.FALSE, + "docValues", Boolean.TRUE)); + checkField(field); } + @SuppressWarnings({"unchecked"}) public void test_dv_stored_idx() throws Exception { String field = "long_dv_stored_idx"; - checkExpectedSchemaField(map("name", field, - "type","long", - "stored",Boolean.TRUE, - "indexed",Boolean.TRUE, - "docValues",Boolean.TRUE)); - + checkExpectedSchemaField( + map( + "name", field, + "type", "long", + "stored", Boolean.TRUE, + "indexed", Boolean.TRUE, + "docValues", Boolean.TRUE)); + checkField(field); } @SuppressWarnings({"unchecked"}) public void test_dv_idx() throws Exception { String field = "long_dv_idx"; - checkExpectedSchemaField(map("name", field, - "type","long", - "stored",Boolean.FALSE, - "indexed",Boolean.TRUE, - "docValues",Boolean.TRUE)); - + checkExpectedSchemaField( + map( + "name", field, + "type", "long", + "stored", Boolean.FALSE, + "indexed", Boolean.TRUE, + "docValues", Boolean.TRUE)); + checkField(field); } + @SuppressWarnings({"unchecked"}) public void test_stored_idx() throws Exception { String field = "long_stored_idx"; - checkExpectedSchemaField(map("name", field, - "type","long", - "stored",Boolean.TRUE, - "indexed",Boolean.TRUE, - "docValues",Boolean.FALSE)); - + checkExpectedSchemaField( + map( + "name", field, + "type", "long", + "stored", Boolean.TRUE, + "indexed", Boolean.TRUE, + "docValues", Boolean.FALSE)); + checkField(field); } - + public void checkField(final String numericFieldName) throws Exception { final CountDownLatch abortLatch = new CountDownLatch(1); @@ -273,37 +290,40 @@ public void checkField(final String numericFieldName) throws Exception { final int numDocsInIndex = (numDocsToCheck * DOC_ID_INCR); final AtomicLong[] expected = new AtomicLong[numDocsToCheck]; - log.info("Testing {}: numDocsToCheck={}, numDocsInIndex={}, incr={}" - , numericFieldName, numDocsToCheck, numDocsInIndex, DOC_ID_INCR); - + log.info( + "Testing {}: numDocsToCheck={}, numDocsInIndex={}, incr={}", + numericFieldName, + numDocsToCheck, + numDocsInIndex, + DOC_ID_INCR); + // seed the index & keep track of what docs exist and with what values for (int id = 0; id < numDocsInIndex; id++) { // NOTE: the field we're mutating is a long, but we seed with a random int, // and we will inc/dec by random smaller ints, to ensure we never over/under flow final int initValue = random().nextInt(); - SolrInputDocument doc = doc(f("id",""+id), f(numericFieldName, initValue)); + SolrInputDocument doc = doc(f("id", "" + id), f(numericFieldName, initValue)); UpdateResponse rsp = update(doc).process(CLOUD_CLIENT); assertEquals(doc.toString() + " => " + rsp.toString(), 0, rsp.getStatus()); if (0 == id % DOC_ID_INCR) { expected[id / DOC_ID_INCR] = new AtomicLong(initValue); } } - assertNotNull("Sanity Check no off-by-one in expected init: ", expected[expected.length-1]); - - + assertNotNull("Sanity Check no off-by-one in expected init: ", expected[expected.length - 1]); + // sanity check index contents waitForRecoveriesToFinish(CLOUD_CLIENT); assertEquals(0, CLOUD_CLIENT.commit().getStatus()); - assertEquals(numDocsInIndex, - CLOUD_CLIENT.query(params("q", "*:*")).getResults().getNumFound()); + assertEquals(numDocsInIndex, CLOUD_CLIENT.query(params("q", "*:*")).getResults().getNumFound()); startTestInjection(); - + // spin up parallel workers to hammer updates List> results = new ArrayList>(NUM_THREADS); for (int workerId = 0; workerId < NUM_THREADS; workerId++) { - Worker worker = new Worker(workerId, expected, abortLatch, new Random(random().nextLong()), - numericFieldName); + Worker worker = + new Worker( + workerId, expected, abortLatch, new Random(random().nextLong()), numericFieldName); // ask for the Worker to be returned in the Future so we can inspect it results.add(EXEC_SERVICE.submit(worker, worker)); } @@ -311,8 +331,9 @@ public void checkField(final String numericFieldName) throws Exception { for (Future r : results) { try { Worker w = r.get(); - if (! w.getFinishedOk() ) { - // quick and dirty sanity check if any workers didn't succeed, but didn't throw an exception either + if (!w.getFinishedOk()) { + // quick and dirty sanity check if any workers didn't succeed, but didn't throw an + // exception either abortLatch.countDown(); log.error("worker={} didn't finish ok, but didn't throw exception?", w.workerId); } @@ -322,16 +343,18 @@ public void checkField(final String numericFieldName) throws Exception { // low level error, or test assertion failure - either way don't leave it wrapped log.error("Worker exec Error, throwing root cause", ee); throw (Error) rootCause; - } else { + } else { log.error("Worker ExecutionException, re-throwing", ee); throw ee; } } } - assertEquals("Abort latch has changed, why didn't we get an exception from a worker?", - 1L, abortLatch.getCount()); - + assertEquals( + "Abort latch has changed, why didn't we get an exception from a worker?", + 1L, + abortLatch.getCount()); + TestInjection.reset(); waitForRecoveriesToFinish(CLOUD_CLIENT); @@ -339,45 +362,54 @@ public void checkField(final String numericFieldName) throws Exception { int incorrectDocs = 0; for (int id = 0; id < numDocsInIndex; id += DOC_ID_INCR) { assert 0 == id % DOC_ID_INCR : "WTF? " + id; - + final long expect = expected[id / DOC_ID_INCR].longValue(); - + final String docId = "" + id; - + // sometimes include an fq on the expected value to ensure the updated values // are "visible" for searching - final SolrParams p = (0 != TestUtil.nextInt(random(), 0,15)) - ? params() : params("fq",numericFieldName + ":\"" + expect + "\""); + final SolrParams p = + (0 != TestUtil.nextInt(random(), 0, 15)) + ? params() + : params("fq", numericFieldName + ":\"" + expect + "\""); SolrDocument doc = getRandClient(random()).getById(docId, p); - + final boolean foundWithFilter = (null != doc); - if (! foundWithFilter) { + if (!foundWithFilter) { // try again w/o fq to see what it does have doc = getRandClient(random()).getById(docId); } - + Long actual = (null == doc) ? null : (Long) doc.getFirstValue(numericFieldName); - if (actual == null || expect != actual.longValue() || ! foundWithFilter) { - log.error("docId={}, foundWithFilter={}, expected={}, actual={}", - docId, foundWithFilter, expect, actual); + if (actual == null || expect != actual.longValue() || !foundWithFilter) { + log.error( + "docId={}, foundWithFilter={}, expected={}, actual={}", + docId, + foundWithFilter, + expect, + actual); incorrectDocs++; } - } assertEquals("Some docs had errors -- check logs", 0, incorrectDocs); } - public static final class Worker implements Runnable { public final int workerId; final AtomicLong[] expected; final CountDownLatch abortLatch; final Random rand; - final String updateField; + final String updateField; final int numDocsToUpdate; boolean ok = false; // set to true only on successful completion - public Worker(int workerId, AtomicLong[] expected, CountDownLatch abortLatch, Random rand, - String updateField) { + + public Worker( + int workerId, + AtomicLong[] expected, + CountDownLatch abortLatch, + Random rand, + String updateField) { this.workerId = workerId; this.expected = expected; this.abortLatch = abortLatch; @@ -385,32 +417,33 @@ public Worker(int workerId, AtomicLong[] expected, CountDownLatch abortLatch, Ra this.updateField = updateField; this.numDocsToUpdate = atLeast(rand, 25); } + public boolean getFinishedOk() { return ok; } + private void doRandomAtomicUpdate(int docId) throws Exception { assert 0 == docId % DOC_ID_INCR : "WTF? " + docId; - + final int delta = TestUtil.nextInt(rand, -1000, 1000); log.info("worker={}, docId={}, delta={}", workerId, docId, delta); SolrClient client = getRandClient(rand); - SolrInputDocument doc = doc(f("id",""+docId), - f(updateField,Collections.singletonMap("inc",delta))); + SolrInputDocument doc = + doc(f("id", "" + docId), f(updateField, Collections.singletonMap("inc", delta))); UpdateResponse rsp = update(doc).process(client); assertEquals(doc + " => " + rsp, 0, rsp.getStatus()); - + AtomicLong counter = expected[docId / DOC_ID_INCR]; assertNotNull("null counter for " + docId + "/" + DOC_ID_INCR, counter); counter.getAndAdd(delta); - } - + public void run() { final String origThreadName = Thread.currentThread().getName(); try { Thread.currentThread().setName(origThreadName + "-w" + workerId); - final int maxDocMultiplier = expected.length-1; + final int maxDocMultiplier = expected.length - 1; for (int docIter = 0; docIter < numDocsToUpdate; docIter++) { final int docId = DOC_ID_INCR * TestUtil.nextInt(rand, 0, maxDocMultiplier); @@ -421,16 +454,18 @@ public void run() { // no matter how random the doc selection may be per thread, ensure // every doc that is selected by *a* thread gets at least a couple rapid fire updates final int itersPerDoc = atLeast(rand, 2); - + for (int updateIter = 0; updateIter < itersPerDoc; updateIter++) { if (0 == abortLatch.getCount()) { return; } doRandomAtomicUpdate(docId); } - if (rand.nextBoolean()) { Thread.yield(); } + if (rand.nextBoolean()) { + Thread.yield(); + } } - + } catch (Error err) { log.error(Thread.currentThread().getName(), err); abortLatch.countDown(); @@ -445,11 +480,11 @@ public void run() { ok = true; } } - - + public static UpdateRequest update(SolrInputDocument... docs) { return update(null, docs); } + public static UpdateRequest update(SolrParams params, SolrInputDocument... docs) { UpdateRequest r = new UpdateRequest(); if (null != params) { @@ -458,7 +493,7 @@ public static UpdateRequest update(SolrParams params, SolrInputDocument... docs) r.add(Arrays.asList(docs)); return r; } - + public static SolrInputDocument doc(SolrInputField... fields) { SolrInputDocument doc = new SolrInputDocument(); for (SolrInputField f : fields) { @@ -466,12 +501,13 @@ public static SolrInputDocument doc(SolrInputField... fields) { } return doc; } - + public static SolrInputField f(String fieldName, Object... values) { SolrInputField f = new SolrInputField(fieldName); f.setValue(values); // TODO: soooooooooo stupid (but currently neccessary because atomic updates freak out - // if the Map with the "inc" operation is inside of a collection - even if it's the only "value") ... + // if the Map with the "inc" operation is inside of a collection - even if it's the only + // "value") ... if (1 == values.length) { f.setValue(values[0]); } else { @@ -479,7 +515,7 @@ public static SolrInputField f(String fieldName, Object... values) { } return f; } - + public static SolrClient getRandClient(Random rand) { int numClients = CLIENTS.size(); int idx = TestUtil.nextInt(rand, 0, numClients); @@ -489,16 +525,17 @@ public static SolrClient getRandClient(Random rand) { public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); client.getZkStateReader().forceUpdateCollection(client.getDefaultCollection()); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } /** - * Use the schema API to verify that the specified expected Field exists with those exact attributes. + * Use the schema API to verify that the specified expected Field exists with those exact + * attributes. + * * @see #CLOUD_CLIENT */ - public static void checkExpectedSchemaField(Map expected) throws Exception { + public static void checkExpectedSchemaField(Map expected) throws Exception { String fieldName = (String) expected.get("name"); assertNotNull("expected contains no name: " + expected, fieldName); FieldResponse rsp = new Field(fieldName).process(CLOUD_CLIENT); @@ -506,19 +543,20 @@ public static void checkExpectedSchemaField(Map expected) throws assertEquals("Field Status: " + fieldName + " => " + rsp.toString(), 0, rsp.getStatus()); assertEquals("Field: " + fieldName, expected, rsp.getField()); } - + /** - * Use the schema API to verify that the specified expected FieldType exists with those exact attributes. + * Use the schema API to verify that the specified expected FieldType exists with those exact + * attributes. + * * @see #CLOUD_CLIENT */ - public static void checkExpectedSchemaType(Map expected) throws Exception { - + public static void checkExpectedSchemaType(Map expected) throws Exception { + String typeName = (String) expected.get("name"); assertNotNull("expected contains no type: " + expected, typeName); FieldTypeResponse rsp = new FieldType(typeName).process(CLOUD_CLIENT); assertNotNull("FieldType Null Response: " + typeName, rsp); assertEquals("FieldType Status: " + typeName + " => " + rsp.toString(), 0, rsp.getStatus()); assertEquals("FieldType: " + typeName, expected, rsp.getFieldType().getAttributes()); - } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java b/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java index 06f88420366..c1950e57edb 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressInPlaceUpdates.java @@ -18,8 +18,8 @@ package org.apache.solr.cloud; import java.lang.invoke.MethodHandles; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -28,7 +28,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; - import org.apache.commons.math3.primes.Primes; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrClient; @@ -94,7 +93,6 @@ private void initModel(int ndocs) { @Test @ShardsFixed(num = 3) - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018 public void stressTest() throws Exception { waitForRecoveriesToFinish(true); @@ -112,34 +110,50 @@ public void stressTest() throws Exception { // query variables final int percentRealtimeQuery = 75; // number of cumulative read/write operations by all threads - final AtomicLong operations = new AtomicLong(5000); + final AtomicLong operations = new AtomicLong(5000); int nReadThreads = 5 + random().nextInt(12); - - /** // testing - final int commitPercent = 5; - final int softCommitPercent = 100; // what percent of the commits are soft - final int deletePercent = 0; - final int deleteByQueryPercent = 50; - final int ndocs = 10; - int nWriteThreads = 10; - - final int maxConcurrentCommits = nWriteThreads; // number of committers at a time... it should be <= maxWarmingSearchers - - // query variables - final int percentRealtimeQuery = 101; - final AtomicLong operations = new AtomicLong(50000); // number of query operations to perform in total - int nReadThreads = 10; - - int fullUpdatePercent = 20; - **/ + // testing + // final int commitPercent = 5; + // final int softCommitPercent = 100; // what percent of the commits are soft + // final int deletePercent = 0; + // final int deleteByQueryPercent = 50; + // final int ndocs = 10; + // int nWriteThreads = 10; + // + // final int maxConcurrentCommits = nWriteThreads; // number of committers at a time... it + // should be <= maxWarmingSearchers + // + // query variables + // final int percentRealtimeQuery = 101; + // final AtomicLong operations = new AtomicLong(50000); // number of query operations to + // perform in total + // int nReadThreads = 10; + // + // int fullUpdatePercent = 20; if (log.isInfoEnabled()) { - log.info("{}", Arrays.asList - ("commitPercent", commitPercent, "softCommitPercent", softCommitPercent, - "deletePercent", deletePercent, "deleteByQueryPercent", deleteByQueryPercent, - "ndocs", ndocs, "nWriteThreads", nWriteThreads, "percentRealtimeQuery", percentRealtimeQuery, - "operations", operations, "nReadThreads", nReadThreads)); + log.info( + "{}", + Arrays.asList( + "commitPercent", + commitPercent, + "softCommitPercent", + softCommitPercent, + "deletePercent", + deletePercent, + "deleteByQueryPercent", + deleteByQueryPercent, + "ndocs", + ndocs, + "nWriteThreads", + nWriteThreads, + "percentRealtimeQuery", + percentRealtimeQuery, + "operations", + operations, + "nReadThreads", + nReadThreads)); } initModel(ndocs); @@ -147,283 +161,342 @@ public void stressTest() throws Exception { List threads = new ArrayList<>(); for (int i = 0; i < nWriteThreads; i++) { - Thread thread = new Thread("WRITER" + i) { - Random rand = new Random(random().nextInt()); - - @Override - public void run() { - try { - while (operations.decrementAndGet() > 0) { - int oper = rand.nextInt(50); - - if (oper < commitPercent) { - Map newCommittedModel; - long version; - - synchronized (TestStressInPlaceUpdates.this) { - // take a snapshot of the model - // this is safe to do w/o synchronizing on the model because it's a ConcurrentHashMap - newCommittedModel = new HashMap<>(model); - version = snapshotCount++; - - int chosenClientIndex = rand.nextInt(clients.size()); - - if (rand.nextInt(100) < softCommitPercent) { - log.info("softCommit start"); - clients.get(chosenClientIndex).commit(true, true, true); - log.info("softCommit end"); - } else { - log.info("hardCommit start"); - clients.get(chosenClientIndex).commit(); - log.info("hardCommit end"); - } - - // install this model snapshot only if it's newer than the current one - if (version >= committedModelClock) { - if (VERBOSE) { - log.info("installing new committedModel version={}", committedModelClock); + Thread thread = + new Thread("WRITER" + i) { + Random rand = new Random(random().nextInt()); + + @Override + public void run() { + try { + while (operations.decrementAndGet() > 0) { + int oper = rand.nextInt(50); + + if (oper < commitPercent) { + Map newCommittedModel; + long version; + + synchronized (TestStressInPlaceUpdates.this) { + // take a snapshot of the model + // this is safe to do w/o synchronizing on the model because it's a + // ConcurrentHashMap + newCommittedModel = new HashMap<>(model); + version = snapshotCount++; + + int chosenClientIndex = rand.nextInt(clients.size()); + + if (rand.nextInt(100) < softCommitPercent) { + log.info("softCommit start"); + clients.get(chosenClientIndex).commit(true, true, true); + log.info("softCommit end"); + } else { + log.info("hardCommit start"); + clients.get(chosenClientIndex).commit(); + log.info("hardCommit end"); + } + + // install this model snapshot only if it's newer than the current one + if (version >= committedModelClock) { + if (VERBOSE) { + log.info("installing new committedModel version={}", committedModelClock); + } + clientIndexUsedForCommit = chosenClientIndex; + committedModel = newCommittedModel; + committedModelClock = version; + } } - clientIndexUsedForCommit = chosenClientIndex; - committedModel = newCommittedModel; - committedModelClock = version; + continue; } - } - continue; - } - int id; - - if (rand.nextBoolean()) { - id = rand.nextInt(ndocs); - } else { - id = lastId; // reuse the last ID half of the time to force more race conditions - } - - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } + int id; - DocInfo info = model.get(id); - - // yield after getting the next version to increase the odds of updates happening out of order - if (rand.nextBoolean()) Thread.yield(); - - if (oper < commitPercent + deletePercent + deleteByQueryPercent) { - final boolean dbq = (oper >= commitPercent + deletePercent); - final String delType = dbq ? "DBI": "DBQ"; - log.info("{} id {}: {}", delType, id, info); - - Long returnedVersion = null; - - try { - returnedVersion = deleteDocAndGetVersion(Integer.toString(id), params("_version_", Long.toString(info.version)), dbq); - log.info("{}: Deleting id={}, version={}. Returned version={}" - , delType, id, info.version, returnedVersion); - } catch (RuntimeException e) { - if (e.getMessage() != null && e.getMessage().contains("version conflict") - || e.getMessage() != null && e.getMessage().contains("Conflict")) { - // Its okay for a leader to reject a concurrent request - log.warn("Conflict during {}, rejected id={}, {}", delType, id, e); - returnedVersion = null; + if (rand.nextBoolean()) { + id = rand.nextInt(ndocs); } else { - throw e; + id = lastId; // reuse the last ID half of the time to force more race conditions } - } - // only update model if update had no conflict & the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (null != returnedVersion && - (Math.abs(returnedVersion.longValue()) > Math.abs(currInfo.version))) { - model.put(id, new DocInfo(returnedVersion.longValue(), 0, 0)); + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; } - } - - } else { - int val1 = info.intFieldValue; - long val2 = info.longFieldValue; - int nextVal1 = val1; - long nextVal2 = val2; - - int addOper = rand.nextInt(30); - Long returnedVersion; - if (addOper < fullUpdatePercent || info.version <= 0) { // if document was never indexed or was deleted - // FULL UPDATE - nextVal1 = Primes.nextPrime(val1 + 1); - nextVal2 = nextVal1 * 1000000000l; - try { - returnedVersion = addDocAndGetVersion("id", id, "title_s", "title" + id, "val1_i_dvo", nextVal1, "val2_l_dvo", nextVal2, "_version_", info.version); - log.info("FULL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}" - ,id, nextVal1, nextVal2, info.version, val1, val2, returnedVersion); - - } catch (RuntimeException e) { - if (e.getMessage() != null && e.getMessage().contains("version conflict") - || e.getMessage() != null && e.getMessage().contains("Conflict")) { - // Its okay for a leader to reject a concurrent request - log.warn("Conflict during full update, rejected id={}, {}", id, e); - returnedVersion = null; - } else { - throw e; + + DocInfo info = model.get(id); + + // yield after getting the next version to increase the odds of updates happening + // out of order + if (rand.nextBoolean()) Thread.yield(); + + if (oper < commitPercent + deletePercent + deleteByQueryPercent) { + final boolean dbq = (oper >= commitPercent + deletePercent); + final String delType = dbq ? "DBI" : "DBQ"; + log.info("{} id {}: {}", delType, id, info); + + Long returnedVersion = null; + + try { + returnedVersion = + deleteDocAndGetVersion( + Integer.toString(id), + params("_version_", Long.toString(info.version)), + dbq); + log.info( + "{}: Deleting id={}, version={}. Returned version={}", + delType, + id, + info.version, + returnedVersion); + } catch (RuntimeException e) { + if (e.getMessage() != null && e.getMessage().contains("version conflict") + || e.getMessage() != null && e.getMessage().contains("Conflict")) { + // Its okay for a leader to reject a concurrent request + log.warn("Conflict during {}, rejected id={}, {}", delType, id, e); + returnedVersion = null; + } else { + throw e; + } } - } - } else { - // PARTIAL - nextVal2 = val2 + val1; - try { - returnedVersion = addDocAndGetVersion("id", id, "val2_l_dvo", map("inc", String.valueOf(val1)), "_version_", info.version); - log.info("PARTIAL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}" - ,id, nextVal1, nextVal2, info.version, val1, val2, returnedVersion); - } catch (RuntimeException e) { - if (e.getMessage() != null && e.getMessage().contains("version conflict") - || e.getMessage() != null && e.getMessage().contains("Conflict")) { - // Its okay for a leader to reject a concurrent request - log.warn("Conflict during partial update, rejected id={}, {}", id, e); - } else if (e.getMessage() != null && e.getMessage().contains("Document not found for update.") - && e.getMessage().contains("id="+id)) { - log.warn("Attempted a partial update for a recently deleted document, rejected id={}, {}", id, e); + + // only update model if update had no conflict & the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (null != returnedVersion + && (Math.abs(returnedVersion.longValue()) > Math.abs(currInfo.version))) { + model.put(id, new DocInfo(returnedVersion.longValue(), 0, 0)); + } + } + + } else { + int val1 = info.intFieldValue; + long val2 = info.longFieldValue; + int nextVal1 = val1; + long nextVal2 = val2; + + int addOper = rand.nextInt(30); + Long returnedVersion; + // if document was never indexed or was deleted FULL UPDATE + if (addOper < fullUpdatePercent || info.version <= 0) { + nextVal1 = Primes.nextPrime(val1 + 1); + nextVal2 = nextVal1 * 1000000000l; + try { + returnedVersion = + addDocAndGetVersion( + "id", + id, + "title_s", + "title" + id, + "val1_i_dvo", + nextVal1, + "val2_l_dvo", + nextVal2, + "_version_", + info.version); + log.info( + "FULL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}", + id, + nextVal1, + nextVal2, + info.version, + val1, + val2, + returnedVersion); + + } catch (RuntimeException e) { + if (e.getMessage() != null && e.getMessage().contains("version conflict") + || e.getMessage() != null && e.getMessage().contains("Conflict")) { + // Its okay for a leader to reject a concurrent request + log.warn("Conflict during full update, rejected id={}, {}", id, e); + returnedVersion = null; + } else { + throw e; + } + } } else { - throw e; + // PARTIAL + nextVal2 = val2 + val1; + try { + returnedVersion = + addDocAndGetVersion( + "id", + id, + "val2_l_dvo", + map("inc", String.valueOf(val1)), + "_version_", + info.version); + log.info( + "PARTIAL: Writing id={}, val=[{},{}], version={}, Prev was=[{},{}]. Returned version={}", + id, + nextVal1, + nextVal2, + info.version, + val1, + val2, + returnedVersion); + } catch (RuntimeException e) { + if (e.getMessage() != null && e.getMessage().contains("version conflict") + || e.getMessage() != null && e.getMessage().contains("Conflict")) { + // Its okay for a leader to reject a concurrent request + log.warn("Conflict during partial update, rejected id={}, {}", id, e); + } else if (e.getMessage() != null + && e.getMessage().contains("Document not found for update.") + && e.getMessage().contains("id=" + id)) { + log.warn( + "Attempted a partial update for a recently deleted document, rejected id={}, {}", + id, + e); + } else { + throw e; + } + returnedVersion = null; + } } - returnedVersion = null; - } - } - // only update model if update had no conflict & the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (null != returnedVersion && - (Math.abs(returnedVersion.longValue()) > Math.abs(currInfo.version))) { - model.put(id, new DocInfo(returnedVersion.longValue(), nextVal1, nextVal2)); + // only update model if update had no conflict & the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (null != returnedVersion + && (Math.abs(returnedVersion.longValue()) > Math.abs(currInfo.version))) { + model.put(id, new DocInfo(returnedVersion.longValue(), nextVal1, nextVal2)); + } + } } + if (!before) { + lastId = id; + } } - } - - if (!before) { - lastId = id; + } catch (Throwable e) { + operations.set(-1L); + log.error("", e); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - log.error("", e); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); - } // Read threads for (int i = 0; i < nReadThreads; i++) { - Thread thread = new Thread("READER" + i) { - Random rand = new Random(random().nextInt()); - - @Override - public void run() { - try { - while (operations.decrementAndGet() >= 0) { - // bias toward a recently changed doc - int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - - // when indexing, we update the index, then the model - // so when querying, we should first check the model, and then the index - - boolean realTime = rand.nextInt(100) < percentRealtimeQuery; - DocInfo expected; - - if (realTime) { - expected = model.get(id); - } else { - synchronized (TestStressInPlaceUpdates.this) { - expected = committedModel.get(id); - } - } + Thread thread = + new Thread("READER" + i) { + Random rand = new Random(random().nextInt()); - if (VERBOSE) { - log.info("querying id {}", id); - } - ModifiableSolrParams params = new ModifiableSolrParams(); - if (realTime) { - params.set("wt", "json"); - params.set("qt", "/get"); - params.set("ids", Integer.toString(id)); - } else { - params.set("wt", "json"); - params.set("q", "id:" + Integer.toString(id)); - params.set("omitHeader", "true"); - } + @Override + public void run() { + try { + while (operations.decrementAndGet() >= 0) { + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - int clientId = rand.nextInt(clients.size()); - if (!realTime) clientId = clientIndexUsedForCommit; - - QueryResponse response = clients.get(clientId).query(params); - if (response.getResults().size() == 0) { - // there's no info we can get back with a delete, so not much we can check without further synchronization - } else if (response.getResults().size() == 1) { - final SolrDocument actual = response.getResults().get(0); - final String msg = "Realtime=" + realTime + ", expected=" + expected + ", actual=" + actual; - assertNotNull(msg, actual); - - final Long foundVersion = (Long) actual.getFieldValue("_version_"); - assertNotNull(msg, foundVersion); - assertTrue(msg + "... solr doc has non-positive version???", - 0 < foundVersion.longValue()); - final Integer intVal = (Integer) actual.getFieldValue("val1_i_dvo"); - assertNotNull(msg, intVal); - - final Long longVal = (Long) actual.getFieldValue("val2_l_dvo"); - assertNotNull(msg, longVal); - - assertTrue(msg + " ...solr returned older version then model. " + - "should not be possible given the order of operations in writer threads", - Math.abs(expected.version) <= foundVersion.longValue()); - - if (foundVersion.longValue() == expected.version) { - assertEquals(msg, expected.intFieldValue, intVal.intValue()); - assertEquals(msg, expected.longFieldValue, longVal.longValue()); - } + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index + + boolean realTime = rand.nextInt(100) < percentRealtimeQuery; + DocInfo expected; + + if (realTime) { + expected = model.get(id); + } else { + synchronized (TestStressInPlaceUpdates.this) { + expected = committedModel.get(id); + } + } + + if (VERBOSE) { + log.info("querying id {}", id); + } + ModifiableSolrParams params = new ModifiableSolrParams(); + if (realTime) { + params.set("wt", "json"); + params.set("qt", "/get"); + params.set("ids", Integer.toString(id)); + } else { + params.set("wt", "json"); + params.set("q", "id:" + Integer.toString(id)); + params.set("omitHeader", "true"); + } - // Some things we can assert about any Doc returned from solr, - // even if it's newer then our (expected) model information... - - assertTrue(msg + " ...how did a doc in solr get a non positive intVal?", - 0 < intVal); - assertTrue(msg + " ...how did a doc in solr get a non positive longVal?", - 0 < longVal); - assertEquals(msg + " ...intVal and longVal in solr doc are internally (modulo) inconsistent w/eachother", - 0, (longVal % intVal)); - - // NOTE: when foundVersion is greater then the version read from the model, - // it's not possible to make any assertions about the field values in solr relative to the - // field values in the model -- ie: we can *NOT* assert expected.longFieldVal <= doc.longVal - // - // it's tempting to think that this would be possible if we changed our model to preserve the - // "old" valuess when doing a delete, but that's still no garuntee because of how oportunistic - // concurrency works with negative versions: When adding a doc, we can assert that it must not - // exist with version<0, but we can't assert that the *reason* it doesn't exist was because of - // a delete with the specific version of "-42". - // So a wrtier thread might (1) prep to add a doc for the first time with "intValue=1,_version_=-1", - // and that add may succeed and (2) return some version X which is put in the model. but - // inbetween #1 and #2 other threads may have added & deleted the doc repeatedly, updating - // the model with intValue=7,_version_=-42, and a reader thread might meanwhile read from the - // model before #2 and expect intValue=5, but get intValue=1 from solr (with a greater version) - - } else { - fail(String.format(Locale.ENGLISH, "There were more than one result: {}", response)); + int clientId = rand.nextInt(clients.size()); + if (!realTime) clientId = clientIndexUsedForCommit; + + QueryResponse response = clients.get(clientId).query(params); + if (response.getResults().size() == 0) { + // there's no info we can get back with a delete, so not much we can check + // without further synchronization + } else if (response.getResults().size() == 1) { + final SolrDocument actual = response.getResults().get(0); + final String msg = + "Realtime=" + realTime + ", expected=" + expected + ", actual=" + actual; + assertNotNull(msg, actual); + + final Long foundVersion = (Long) actual.getFieldValue("_version_"); + assertNotNull(msg, foundVersion); + assertTrue( + msg + "... solr doc has non-positive version???", + 0 < foundVersion.longValue()); + final Integer intVal = (Integer) actual.getFieldValue("val1_i_dvo"); + assertNotNull(msg, intVal); + + final Long longVal = (Long) actual.getFieldValue("val2_l_dvo"); + assertNotNull(msg, longVal); + + assertTrue( + msg + + " ...solr returned older version then model. " + + "should not be possible given the order of operations in writer threads", + Math.abs(expected.version) <= foundVersion.longValue()); + + if (foundVersion.longValue() == expected.version) { + assertEquals(msg, expected.intFieldValue, intVal.intValue()); + assertEquals(msg, expected.longFieldValue, longVal.longValue()); + } + + // Some things we can assert about any Doc returned from solr, + // even if it's newer then our (expected) model information... + + assertTrue( + msg + " ...how did a doc in solr get a non positive intVal?", 0 < intVal); + assertTrue( + msg + " ...how did a doc in solr get a non positive longVal?", 0 < longVal); + assertEquals( + msg + + " ...intVal and longVal in solr doc are internally (modulo) inconsistent w/eachother", + 0, + (longVal % intVal)); + + // NOTE: when foundVersion is greater then the version read from the model, it's + // not possible to make any assertions about the field values in solr relative + // to the field values in the model -- ie: we can *NOT* assert + // expected.longFieldVal <= doc.longVal + // + // it's tempting to think that this would be possible if we changed our model to + // preserve the "old" valuess when doing a delete, but that's still no garuntee + // because of how oportunistic concurrency works with negative versions: When + // adding a doc, we can assert that it must not exist with version<0, but we + // can't assert that the *reason* it doesn't exist was because of a delete with + // the specific version of "-42". So a wrtier thread might (1) prep to add a doc + // for the first time with "intValue=1,_version_=-1", and that add may succeed + // and (2) return some version X which is put in the model. but inbetween #1 + // and #2 other threads may have added & deleted the doc repeatedly, updating + // the model with intValue=7,_version_=-42, and a reader thread might meanwhile + // read from the model before #2 and expect intValue=5, but get intValue=1 from + // solr (with a greater version) + + } else { + fail( + String.format( + Locale.ENGLISH, "There were more than one result: {}", response)); + } + } + } catch (Throwable e) { + operations.set(-1L); + log.error("", e); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - log.error("", e); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } @@ -437,9 +510,8 @@ public void run() { } { // final pass over uncommitted model with RTG - for (SolrClient client : clients) { - for (Map.Entry entry : model.entrySet()) { + for (Map.Entry entry : model.entrySet()) { final Integer id = entry.getKey(); final DocInfo expected = entry.getValue(); final SolrDocument actual = client.getById(id.toString()); @@ -448,8 +520,9 @@ public void run() { if (null == actual) { // a deleted or non-existent document // sanity check of the model agrees... - assertTrue(msg + " is deleted/non-existent in Solr, but model has non-neg version", - expected.version < 0); + assertTrue( + msg + " is deleted/non-existent in Solr, but model has non-neg version", + expected.version < 0); assertEquals(msg + " is deleted/non-existent in Solr", expected.intFieldValue, 0); assertEquals(msg + " is deleted/non-existent in Solr", expected.longFieldValue, 0); } else { @@ -457,24 +530,24 @@ public void run() { assertEquals(msg, expected.intFieldValue, actual.getFieldValue("val1_i_dvo")); assertEquals(msg, expected.longFieldValue, actual.getFieldValue("val2_l_dvo")); assertEquals(msg, expected.version, actual.getFieldValue("_version_")); - assertTrue(msg + " doc exists in solr, but version is negative???", - 0 < expected.version); + assertTrue( + msg + " doc exists in solr, but version is negative???", 0 < expected.version); } } } } - - { // do a final search and compare every result with the model - - // because commits don't provide any sort of concrete versioning (or optimistic concurrency constraints) - // there's no way to garuntee that our committedModel matches what was in Solr at the time of the last commit. - // It's possible other threads made additional writes to solr before the commit was processed, but after - // the committedModel variable was assigned it's new value. - // - // what we can do however, is commit all completed updates, and *then* compare solr search results - // against the (new) committed model.... - - waitForThingsToLevelOut(30, TimeUnit.SECONDS); // NOTE: this does an automatic commit for us & ensures replicas are up to date + + { + // do a final search and compare every result with the model because commits don't provide any + // sort of concrete versioning (or optimistic concurrency constraints) there's no way to + // garuntee that our committedModel matches what was in Solr at the time of the last commit. + // It's possible other threads made additional writes to solr before the commit was processed, + // but after the committedModel variable was assigned it's new value. what we can do however, + // is commit all completed updates, and *then* compare solr search results against the (new) + // committed model.... + + // NOTE: this does an automatic commit for us & ensures replicas are up to date + waitForThingsToLevelOut(30, TimeUnit.SECONDS); committedModel = new HashMap<>(model); // first, prune the model of any docs that have negative versions @@ -483,49 +556,53 @@ public void run() { DocInfo info = committedModel.get(i); if (info.version < 0) { // first, a quick sanity check of the model itself... - assertEquals("Inconsistent int value in model for deleted doc" + i + "=" + info, - 0, info.intFieldValue); - assertEquals("Inconsistent long value in model for deleted doc" + i + "=" + info, - 0L, info.longFieldValue); + assertEquals( + "Inconsistent int value in model for deleted doc" + i + "=" + info, + 0, + info.intFieldValue); + assertEquals( + "Inconsistent long value in model for deleted doc" + i + "=" + info, + 0L, + info.longFieldValue); committedModel.remove(i); } } for (SolrClient client : clients) { - QueryResponse rsp = client.query(params("q","*:*", "sort", "id asc", "rows", ndocs+"")); + QueryResponse rsp = client.query(params("q", "*:*", "sort", "id asc", "rows", ndocs + "")); for (SolrDocument actual : rsp.getResults()) { final Integer id = Integer.parseInt(actual.getFieldValue("id").toString()); - final DocInfo expected = committedModel.get(id); - + final DocInfo expected = committedModel.get(id); + assertNotNull("Doc found but missing/deleted from model: " + actual, expected); - + final String msg = "Search: " + id + "=" + expected + " <==VS==> " + actual; assertEquals(msg, expected.intFieldValue, actual.getFieldValue("val1_i_dvo")); assertEquals(msg, expected.longFieldValue, actual.getFieldValue("val2_l_dvo")); assertEquals(msg, expected.version, actual.getFieldValue("_version_")); - assertTrue(msg + " doc exists in solr, but version is negative???", - 0 < expected.version); + assertTrue(msg + " doc exists in solr, but version is negative???", 0 < expected.version); // also sanity check the model (which we already know matches the doc) - assertEquals("Inconsistent (modulo) values in model for id " + id + "=" + expected, - 0, (expected.longFieldValue % expected.intFieldValue)); + assertEquals( + "Inconsistent (modulo) values in model for id " + id + "=" + expected, + 0, + (expected.longFieldValue % expected.intFieldValue)); } assertEquals(committedModel.size(), rsp.getResults().getNumFound()); } } } - /** - * Used for storing the info for a document in an in-memory model. - */ + /** Used for storing the info for a document in an in-memory model. */ private static class DocInfo { long version; int intFieldValue; long longFieldValue; public DocInfo(long version, int val1, long val2) { - assert version != 0; // must either be real positive version, or negative deleted version/indicator + // must either be real positive version, or negative deleted version/indicator + assert version != 0; this.version = version; this.intFieldValue = val1; this.longFieldValue = val2; @@ -533,7 +610,13 @@ public DocInfo(long version, int val1, long val2) { @Override public String toString() { - return "[version=" + version + ", intValue=" + intFieldValue + ",longValue=" + longFieldValue + "]"; + return "[version=" + + version + + ", intValue=" + + intFieldValue + + ",longValue=" + + longFieldValue + + "]"; } } @@ -552,35 +635,43 @@ protected long addDocAndGetVersion(Object... fields) throws Exception { // send updates to leader, to avoid SOLR-8733 resp = ureq.process(leaderClient); - long returnedVersion = Long.parseLong(((NamedList) resp.getResponse().get("adds")).getVal(0).toString()); - assertTrue("Due to SOLR-8733, sometimes returned version is 0. Let us assert that we have successfully" - + " worked around that problem here.", returnedVersion > 0); + long returnedVersion = + Long.parseLong(((NamedList) resp.getResponse().get("adds")).getVal(0).toString()); + assertTrue( + "Due to SOLR-8733, sometimes returned version is 0. Let us assert that we have successfully" + + " worked around that problem here.", + returnedVersion > 0); return returnedVersion; } - protected long deleteDocAndGetVersion(String id, ModifiableSolrParams params, boolean deleteByQuery) throws Exception { + protected long deleteDocAndGetVersion( + String id, ModifiableSolrParams params, boolean deleteByQuery) throws Exception { params.add("versions", "true"); - + UpdateRequest ureq = new UpdateRequest(); ureq.setParams(params); if (deleteByQuery) { - ureq.deleteByQuery("id:"+id); + ureq.deleteByQuery("id:" + id); } else { ureq.deleteById(id); } UpdateResponse resp; // send updates to leader, to avoid SOLR-8733 resp = ureq.process(leaderClient); - - String key = deleteByQuery? "deleteByQuery": "deletes"; - long returnedVersion = Long.parseLong(((NamedList) resp.getResponse().get(key)).getVal(0).toString()); - assertTrue("Due to SOLR-8733, sometimes returned version is 0. Let us assert that we have successfully" - + " worked around that problem here.", returnedVersion < 0); + + String key = deleteByQuery ? "deleteByQuery" : "deletes"; + long returnedVersion = + Long.parseLong(((NamedList) resp.getResponse().get(key)).getVal(0).toString()); + assertTrue( + "Due to SOLR-8733, sometimes returned version is 0. Let us assert that we have successfully" + + " worked around that problem here.", + returnedVersion < 0); return returnedVersion; } /** - * Method gets the SolrClient for the leader replica. This is needed for a workaround for SOLR-8733. + * Method gets the SolrClient for the leader replica. This is needed for a workaround for + * SOLR-8733. */ public SolrClient getClientForLeader() throws KeeperException, InterruptedException { ZkStateReader zkStateReader = cloudClient.getZkStateReader(); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java index 0a425e22c4b..8feb6cf8339 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestStressLiveNodes.java @@ -24,7 +24,6 @@ import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -32,20 +31,17 @@ import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; - import org.apache.zookeeper.CreateMode; - import org.junit.AfterClass; import org.junit.BeforeClass; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Stress test LiveNodes watching. * - * Does bursts of adds to live_nodes using parallel threads to and verifies that after each - * burst a ZkStateReader detects the correct set. + *

Does bursts of adds to live_nodes using parallel threads to and verifies that after each burst + * a ZkStateReader detects the correct set. */ @ThreadLeakLingering(linger = 10) @Slow @@ -55,12 +51,12 @@ public class TestStressLiveNodes extends SolrCloudTestCase { /** A basic cloud client, we'll be testing the behavior of it's ZkStateReader */ private static CloudSolrClient CLOUD_CLIENT; - + /** The addr of the zk server used in this test */ private static String ZK_SERVER_ADDR; /* how many seconds we're willing to wait for our executor tasks to finish before failing the test */ - private final static int WAIT_TIME = TEST_NIGHTLY ? 60 : 30; + private static final int WAIT_TIME = TEST_NIGHTLY ? 60 : 30; @BeforeClass private static void createMiniSolrCloudCluster() throws Exception { @@ -68,14 +64,13 @@ private static void createMiniSolrCloudCluster() throws Exception { // we only need 1 node, and we don't care about any configs or collections // we're going to fake all the live_nodes changes we want to fake. configureCluster(1).configure(); - + CLOUD_CLIENT = cluster.getSolrClient(); CLOUD_CLIENT.connect(); // force connection even though we aren't sending any requests - + ZK_SERVER_ADDR = cluster.getZkServer().getZkAddress(); - } - + @AfterClass private static void afterClass() throws Exception { if (null != CLOUD_CLIENT) { @@ -94,7 +89,8 @@ private static SolrZkClient newSolrZkClient() { private static List getTrueLiveNodesFromZk() throws Exception { SolrZkClient client = newSolrZkClient(); try { - ArrayList result = new ArrayList<>(client.getChildren(ZkStateReader.LIVE_NODES_ZKNODE, null, true)); + ArrayList result = + new ArrayList<>(client.getChildren(ZkStateReader.LIVE_NODES_ZKNODE, null, true)); Collections.sort(result); return result; } finally { @@ -102,21 +98,25 @@ private static List getTrueLiveNodesFromZk() throws Exception { } } - /** - * returns the cached set of live nodes (according to the ZkStateReader in our CloudSolrClient) - * as a sorted list. - * This is done in a sleep+retry loop until the result matches the expectedCount, or a few iters have passed - * (this way we aren't testing how fast the watchers complete, just that they got the correct result) + /** + * returns the cached set of live nodes (according to the ZkStateReader in our CloudSolrClient) as + * a sorted list. This is done in a sleep+retry loop until the result matches the expectedCount, + * or a few iters have passed (this way we aren't testing how fast the watchers complete, just + * that they got the correct result) */ - private static List getCachedLiveNodesFromLocalState(final int expectedCount) throws Exception { + private static List getCachedLiveNodesFromLocalState(final int expectedCount) + throws Exception { ArrayList result = null; for (int i = 0; i < 10; i++) { result = new ArrayList<>(CLOUD_CLIENT.getZkStateReader().getClusterState().getLiveNodes()); if (expectedCount != result.size()) { if (log.isInfoEnabled()) { - log.info("sleeping #{} to give watchers a chance to finish: {} != {}", - i, expectedCount, result.size()); + log.info( + "sleeping #{} to give watchers a chance to finish: {} != {}", + i, + expectedCount, + result.size()); } Thread.sleep(200); } else { @@ -124,13 +124,15 @@ private static List getCachedLiveNodesFromLocalState(final int expectedC } } if (expectedCount != result.size()) { - log.error("gave up waiting for live nodes to match expected size: {} != {}", - expectedCount, result.size()); + log.error( + "gave up waiting for live nodes to match expected size: {} != {}", + expectedCount, + result.size()); } Collections.sort(result); return result; } - + public void testStress() throws Exception { // do many iters, so we have "bursts" of adding nodes that we then check @@ -139,58 +141,66 @@ public void testStress() throws Exception { // sanity check that ZK says there is in fact 1 live node List actualLiveNodes = getTrueLiveNodesFromZk(); - assertEquals("iter"+iter+": " + actualLiveNodes.toString(), - 1, actualLiveNodes.size()); + assertEquals("iter" + iter + ": " + actualLiveNodes.toString(), 1, actualLiveNodes.size()); - // only here do we forcibly update the cached live nodes so we don't have to wait for it to catch up - // with all the ephemeral nodes that vanished after the last iteration + // only here do we forcibly update the cached live nodes so we don't have to wait for it to + // catch up with all the ephemeral nodes that vanished after the last iteration CLOUD_CLIENT.getZkStateReader().updateLiveNodes(); - - // sanity check that our Cloud Client's local state knows about the 1 (real) live node in our cluster + + // sanity check that our Cloud Client's local state knows about the 1 (real) live node in our + // cluster List cachedLiveNodes = getCachedLiveNodesFromLocalState(actualLiveNodes.size()); - assertEquals("iter"+iter+" " + actualLiveNodes.size() + " != " + cachedLiveNodes.size(), - actualLiveNodes, cachedLiveNodes); - - + assertEquals( + "iter" + iter + " " + actualLiveNodes.size() + " != " + cachedLiveNodes.size(), + actualLiveNodes, + cachedLiveNodes); + // start spinning up some threads to add some live_node children in parallel - // we don't need a lot of threads or nodes (we don't want to swamp the CPUs - // just bursts of concurrent adds) but we do want to randomize it a bit so we increase the - // odds of concurrent watchers firing regardless of the num CPUs or load on the machine running - // the test (but we deliberately don't look at availableProcessors() since we want randomization - // consistency across all machines for a given seed) + // we don't need a lot of threads or nodes (we don't want to swamp the CPUs just bursts of + // concurrent adds) but we do want to randomize it a bit so we increase the odds of concurrent + // watchers firing regardless of the num CPUs or load on the machine running the test (but we + // deliberately don't look at availableProcessors() since we want randomization consistency + // across all machines for a given seed) final int numThreads = TestUtil.nextInt(random(), 2, 5); - + // use same num for all thrashers, to increase likely hood of them all competing - // (diff random number would mean heavy concurrency only for ~ the first N=lowest num requests) + // (diff random number would mean heavy concurrency only for ~ the first N=lowest num + // requests) // // this does not need to be a large number -- in fact, the higher it is, the more // likely we are to see a mistake in early watcher triggers get "corrected" by a later one // and overlook a possible bug final int numNodesPerThrasher = TestUtil.nextInt(random(), 1, 5); - - log.info("preparing parallel adds to live nodes: iter={}, numThreads={} numNodesPerThread={}", - iter, numThreads, numNodesPerThrasher); - + + log.info( + "preparing parallel adds to live nodes: iter={}, numThreads={} numNodesPerThread={}", + iter, + numThreads, + numNodesPerThrasher); + // NOTE: using ephemeral nodes // so we can't close any of these thrashers until we are done with our assertions final List thrashers = new ArrayList<>(numThreads); for (int i = 0; i < numThreads; i++) { - thrashers.add(new LiveNodeTrasher("T"+iter+"_"+i, numNodesPerThrasher)); + thrashers.add(new LiveNodeTrasher("T" + iter + "_" + i, numNodesPerThrasher)); } try { - final ExecutorService executorService = ExecutorUtil.newMDCAwareFixedThreadPool - (thrashers.size()+1, new SolrNamedThreadFactory("test_live_nodes_thrasher_iter"+iter)); - + final ExecutorService executorService = + ExecutorUtil.newMDCAwareFixedThreadPool( + thrashers.size() + 1, + new SolrNamedThreadFactory("test_live_nodes_thrasher_iter" + iter)); + executorService.invokeAll(thrashers); executorService.shutdown(); - if (! executorService.awaitTermination(WAIT_TIME, TimeUnit.SECONDS)) { + if (!executorService.awaitTermination(WAIT_TIME, TimeUnit.SECONDS)) { for (LiveNodeTrasher thrasher : thrashers) { thrasher.stop(); } } - assertTrue("iter"+iter+": thrashers didn't finish even after explicitly stopping", - executorService.awaitTermination(WAIT_TIME, TimeUnit.SECONDS)); + assertTrue( + "iter" + iter + ": thrashers didn't finish even after explicitly stopping", + executorService.awaitTermination(WAIT_TIME, TimeUnit.SECONDS)); // sanity check the *real* live_nodes entries from ZK match what the thrashers added int totalAdded = 1; // 1 real live node when we started @@ -198,13 +208,15 @@ public void testStress() throws Exception { totalAdded += thrasher.getNumAdded(); } actualLiveNodes = getTrueLiveNodesFromZk(); - assertEquals("iter"+iter, totalAdded, actualLiveNodes.size()); - + assertEquals("iter" + iter, totalAdded, actualLiveNodes.size()); + // verify our local client knows the correct set of live nodes cachedLiveNodes = getCachedLiveNodesFromLocalState(actualLiveNodes.size()); - assertEquals("iter"+iter+" " + actualLiveNodes.size() + " != " + cachedLiveNodes.size(), - actualLiveNodes, cachedLiveNodes); - + assertEquals( + "iter" + iter + " " + actualLiveNodes.size() + " != " + cachedLiveNodes.size(), + actualLiveNodes, + cachedLiveNodes); + } finally { for (LiveNodeTrasher thrasher : thrashers) { // shutdown our zk connection, freeing our ephemeral nodes @@ -220,9 +232,10 @@ public static final class LiveNodeTrasher implements Callable { private final int numNodesToAdd; private final SolrZkClient client; - private boolean running = false;; + private boolean running = false; + ; private int numAdded = 0; - + /** ID should ideally be unique amongst any other instances */ public LiveNodeTrasher(String id, int numNodesToAdd) { this.id = id; @@ -244,12 +257,15 @@ public Integer call() { } return numAdded; } + public int getNumAdded() { return numAdded; } + public void close() { client.close(); } + public void stop() { running = false; } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java index 8b9f1cfd52b..6a79f64605e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplayVsRecovery.java @@ -26,9 +26,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.apache.lucene.util.LuceneTestCase.AwaitsFix; - import org.apache.solr.JSONTestUtil; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.cloud.SocketProxy; @@ -46,12 +44,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13486;https://issues.apache.org/jira/browse/SOLR-14183") +@AwaitsFix( + bugUrl = + "https://issues.apache.org/jira/browse/SOLR-13486;https://issues.apache.org/jira/browse/SOLR-14183") public class TestTlogReplayVsRecovery extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String COLLECTION = "collecion_with_slow_tlog_recovery"; - + private JettySolrRunner NODE0; private JettySolrRunner NODE1; private Map proxies; @@ -63,29 +63,27 @@ public class TestTlogReplayVsRecovery extends SolrCloudTestCase { // // TODO: once SOLR-13486 is fixed, we should randomize this... private static final boolean TEST_VALUE_FOR_SKIP_COMMIT_ON_CLOSE = true; - + @Before public void setupCluster() throws Exception { TestInjection.skipIndexWriterCommitOnClose = TEST_VALUE_FOR_SKIP_COMMIT_ON_CLOSE; - + System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory"); System.setProperty("solr.ulog.numRecordsToKeep", "1000"); System.setProperty("leaderVoteWait", "60000"); - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); NODE0 = cluster.getJettySolrRunner(0); NODE1 = cluster.getJettySolrRunner(1); - + // Add proxies proxies = new HashMap<>(cluster.getJettySolrRunners().size()); jettys = new HashMap<>(); - for (JettySolrRunner jetty:cluster.getJettySolrRunners()) { + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { SocketProxy proxy = new SocketProxy(); jetty.setProxyPort(proxy.getListenPort()); - cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart + cluster.stopJettySolrRunner(jetty); // TODO: Can we avoid this restart cluster.startJettySolrRunner(jetty); proxy.open(jetty.getBaseUrl().toURI()); if (log.isInfoEnabled()) { @@ -99,7 +97,7 @@ public void setupCluster() throws Exception { @After public void tearDownCluster() throws Exception { TestInjection.reset(); - + if (null != proxies) { for (SocketProxy proxy : proxies.values()) { proxy.close(); @@ -110,7 +108,7 @@ public void tearDownCluster() throws Exception { System.clearProperty("solr.directoryFactory"); System.clearProperty("solr.ulog.numRecordsToKeep"); System.clearProperty("leaderVoteWait"); - + shutdownCluster(); } @@ -125,28 +123,31 @@ public void testManyDocsInTlogReplayWhileReplicaIsTryingToRecover() throws Excep // d: docs not committed after network split (add w/o commit) final int committedDocs = 3; final int uncommittedDocs = 50; - + log.info("Create Collection..."); - assertEquals(RequestStatusState.COMPLETED, - CollectionAdminRequest.createCollection(COLLECTION, 1, 2) - .setCreateNodeSet("") - .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT)); - assertEquals(RequestStatusState.COMPLETED, - CollectionAdminRequest.addReplicaToShard(COLLECTION, "shard1") - .setNode(NODE0.getNodeName()) - .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT)); - + assertEquals( + RequestStatusState.COMPLETED, + CollectionAdminRequest.createCollection(COLLECTION, 1, 2) + .setCreateNodeSet("") + .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT)); + assertEquals( + RequestStatusState.COMPLETED, + CollectionAdminRequest.addReplicaToShard(COLLECTION, "shard1") + .setNode(NODE0.getNodeName()) + .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT)); + waitForState("Timeout waiting for shard leader", COLLECTION, clusterShape(1, 1)); - assertEquals(RequestStatusState.COMPLETED, - CollectionAdminRequest.addReplicaToShard(COLLECTION, "shard1") - .setNode(NODE1.getNodeName()) - .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT)); - + assertEquals( + RequestStatusState.COMPLETED, + CollectionAdminRequest.addReplicaToShard(COLLECTION, "shard1") + .setNode(NODE1.getNodeName()) + .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT)); + cluster.waitForActiveCollection(COLLECTION, 1, 2); - + waitForState("Timeout waiting for 1x2 collection", COLLECTION, clusterShape(1, 2)); - + final Replica leader = getCollectionState(COLLECTION).getSlice("shard1").getLeader(); assertEquals("Sanity check failed", NODE0.getNodeName(), leader.getNodeName()); @@ -162,62 +163,81 @@ public void testManyDocsInTlogReplayWhileReplicaIsTryingToRecover() throws Excep addDocs(false, uncommittedDocs, committedDocs + 1); log.info("Stopping leader node..."); - assertEquals("Something broke our expected skipIndexWriterCommitOnClose", - TEST_VALUE_FOR_SKIP_COMMIT_ON_CLOSE, TestInjection.skipIndexWriterCommitOnClose); + assertEquals( + "Something broke our expected skipIndexWriterCommitOnClose", + TEST_VALUE_FOR_SKIP_COMMIT_ON_CLOSE, + TestInjection.skipIndexWriterCommitOnClose); NODE0.stop(); cluster.waitForJettyToStop(NODE0); log.info("Un-Partition replica (NODE1)..."); proxies.get(NODE1).reopen(); - - waitForState("Timeout waiting for leader goes DOWN", COLLECTION, (liveNodes, collectionState) - -> collectionState.getReplica(leader.getName()).getState() == Replica.State.DOWN); + + waitForState( + "Timeout waiting for leader goes DOWN", + COLLECTION, + (liveNodes, collectionState) -> + collectionState.getReplica(leader.getName()).getState() == Replica.State.DOWN); // Sanity check that a new (out of sync) replica doesn't come up in our place... - expectThrows(TimeoutException.class, - "Did not time out waiting for new leader, out of sync replica became leader", - () -> { - cluster.getSolrClient().waitForState(COLLECTION, 10, TimeUnit.SECONDS, (state) -> { - Replica newLeader = state.getSlice("shard1").getLeader(); - if (newLeader != null && !newLeader.getName().equals(leader.getName()) && newLeader.getState() == Replica.State.ACTIVE) { - // this is is the bad case, our "bad" state was found before timeout - log.error("WTF: New Leader={}", newLeader); - return true; - } - return false; // still no bad state, wait for timeout - }); - }); + expectThrows( + TimeoutException.class, + "Did not time out waiting for new leader, out of sync replica became leader", + () -> { + cluster + .getSolrClient() + .waitForState( + COLLECTION, + 10, + TimeUnit.SECONDS, + (state) -> { + Replica newLeader = state.getSlice("shard1").getLeader(); + if (newLeader != null + && !newLeader.getName().equals(leader.getName()) + && newLeader.getState() == Replica.State.ACTIVE) { + // this is is the bad case, our "bad" state was found before timeout + log.error("WTF: New Leader={}", newLeader); + return true; + } + return false; // still no bad state, wait for timeout + }); + }); log.info("Enabling TestInjection.updateLogReplayRandomPause"); TestInjection.updateLogReplayRandomPause = "true:100"; - + log.info("Un-Partition & restart leader (NODE0)..."); proxies.get(NODE0).reopen(); NODE0.start(); log.info("Waiting for all nodes and active collection..."); - - cluster.waitForAllNodes(30);; - waitForState("Timeout waiting for leader", COLLECTION, (liveNodes, collectionState) -> { - Replica newLeader = collectionState.getLeader("shard1"); - return newLeader != null && newLeader.getName().equals(leader.getName()); - }); + + cluster.waitForAllNodes(30); + ; + waitForState( + "Timeout waiting for leader", + COLLECTION, + (liveNodes, collectionState) -> { + Replica newLeader = collectionState.getLeader("shard1"); + return newLeader != null && newLeader.getName().equals(leader.getName()); + }); waitForState("Timeout waiting for active collection", COLLECTION, clusterShape(1, 2)); - + cluster.waitForActiveCollection(COLLECTION, 1, 2); log.info("Check docs on both replicas..."); assertDocsExistInBothReplicas(1, committedDocs + uncommittedDocs); - + log.info("Test ok, delete collection..."); CollectionAdminRequest.deleteCollection(COLLECTION).process(cluster.getSolrClient()); } - /** - * Adds the specified number of docs directly to the leader, - * using increasing docIds begining with startId. Commits if and only if the boolean is true. + /** + * Adds the specified number of docs directly to the leader, using increasing docIds begining with + * startId. Commits if and only if the boolean is true. */ - private void addDocs(final boolean commit, final int numDocs, final int startId) throws SolrServerException, IOException { + private void addDocs(final boolean commit, final int numDocs, final int startId) + throws SolrServerException, IOException { List docs = new ArrayList<>(numDocs); for (int i = 0; i < numDocs; i++) { @@ -235,13 +255,12 @@ private void addDocs(final boolean commit, final int numDocs, final int startId) } /** - * uses distrib=false RTG requests to verify that every doc between firstDocId and lastDocId + * uses distrib=false RTG requests to verify that every doc between firstDocId and lastDocId * (inclusive) can be found on both the leader and the replica */ - private void assertDocsExistInBothReplicas(int firstDocId, - int lastDocId) throws Exception { + private void assertDocsExistInBothReplicas(int firstDocId, int lastDocId) throws Exception { try (HttpSolrClient leaderSolr = getHttpSolrClient(NODE0.getBaseUrl().toString()); - HttpSolrClient replicaSolr = getHttpSolrClient(NODE1.getBaseUrl().toString())) { + HttpSolrClient replicaSolr = getHttpSolrClient(NODE1.getBaseUrl().toString())) { for (int d = firstDocId; d <= lastDocId; d++) { String docId = String.valueOf(d); assertDocExists("leader", leaderSolr, docId); @@ -251,20 +270,27 @@ private void assertDocsExistInBothReplicas(int firstDocId, } /** - * uses distrib=false RTG requests to verify that the specified docId can be found using the + * uses distrib=false RTG requests to verify that the specified docId can be found using the * specified solr client */ - private void assertDocExists(final String clientName, final HttpSolrClient client, final String docId) throws Exception { - final QueryResponse rsp = (new QueryRequest(params("qt", "/get", - "id", docId, - "_trace", clientName, - "distrib", "false"))) - .process(client, COLLECTION); + private void assertDocExists( + final String clientName, final HttpSolrClient client, final String docId) throws Exception { + final QueryResponse rsp = + (new QueryRequest( + params("qt", "/get", "id", docId, "_trace", clientName, "distrib", "false"))) + .process(client, COLLECTION); assertEquals(0, rsp.getStatus()); - + String match = JSONTestUtil.matchObj("/id", rsp.getResponse().get("doc"), docId); - assertTrue("Doc with id=" + docId + " not found in " + clientName - + " due to: " + match + "; rsp="+rsp, match == null); + assertTrue( + "Doc with id=" + + docId + + " not found in " + + clientName + + " due to: " + + match + + "; rsp=" + + rsp, + match == null); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java index 8ffff31cc14..6319e8a671a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud; +import com.carrotsearch.randomizedtesting.annotations.Repeat; +import com.codahale.metrics.Meter; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -29,9 +31,6 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - -import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.codahale.metrics.Meter; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; @@ -83,10 +82,14 @@ public class TestTlogReplica extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String collectionName = null; - private final static int REPLICATION_TIMEOUT_SECS = 10; + private static final int REPLICATION_TIMEOUT_SECS = 10; private String suggestedCollectionName() { - return (getTestClass().getSimpleName().replace("Test", "") + "_" + getSaferTestName().split(" ")[0]).replaceAll("(.)(\\p{Upper})", "$1_$2").toLowerCase(Locale.ROOT); + return (getTestClass().getSimpleName().replace("Test", "") + + "_" + + getSaferTestName().split(" ")[0]) + .replaceAll("(.)(\\p{Upper})", "$1_$2") + .toLowerCase(Locale.ROOT); } @BeforeClass @@ -112,13 +115,18 @@ public void setUp() throws Exception { @Override public void tearDown() throws Exception { - for (JettySolrRunner jetty:cluster.getJettySolrRunners()) { + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { if (!jetty.isRunning()) { log.warn("Jetty {} not running, probably some bad test. Starting it", jetty.getLocalPort()); jetty.start(); } } - if (cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionOrNull(collectionName) != null) { + if (cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollectionOrNull(collectionName) + != null) { log.info("tearDown deleting collection"); CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient()); waitForDeletion(collectionName); @@ -127,17 +135,19 @@ public void tearDown() throws Exception { } /** - * Asserts that Update logs exist for replicas of type {@link org.apache.solr.common.cloud.Replica.Type#NRT}, but not - * for replicas of type {@link org.apache.solr.common.cloud.Replica.Type#PULL} + * Asserts that Update logs exist for replicas of type {@link + * org.apache.solr.common.cloud.Replica.Type#NRT}, but not for replicas of type {@link + * org.apache.solr.common.cloud.Replica.Type#PULL} */ private void assertUlogPresence(DocCollection collection) { - for (Slice s:collection.getSlices()) { - for (Replica r:s.getReplicas()) { + for (Slice s : collection.getSlices()) { + for (Replica r : s.getReplicas()) { SolrCore core = null; try { core = cluster.getReplicaJetty(r).getCoreContainer().getCore(r.getCoreName()); assertNotNull(core); - assertTrue("Update log should exist for replicas of type Append", + assertTrue( + "Update log should exist for replicas of type Append", new java.io.File(core.getUlogDir()).exists()); } finally { core.close(); @@ -146,33 +156,43 @@ private void assertUlogPresence(DocCollection collection) { } } - @Repeat(iterations=2) // 2 times to make sure cleanup is complete and we can create the same collection + // 2 times to make sure cleanup is complete and we can create the same collection + @Repeat(iterations = 2) public void testCreateDelete() throws Exception { switch (random().nextInt(3)) { case 0: CollectionAdminRequest.createCollection(collectionName, "conf", 2, 0, 4, 0) - .process(cluster.getSolrClient()); + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(collectionName, 2, 8); break; case 1: // Sometimes don't use SolrJ - String url = String.format(Locale.ROOT, "%s/admin/collections?action=CREATE&name=%s&collection.configName=%s&numShards=%s&tlogReplicas=%s", - cluster.getRandomJetty(random()).getBaseUrl(), - collectionName, "conf", - 2, // numShards - 4); // tlogReplicas + String url = + String.format( + Locale.ROOT, + "%s/admin/collections?action=CREATE&name=%s&collection.configName=%s&numShards=%s&tlogReplicas=%s", + cluster.getRandomJetty(random()).getBaseUrl(), + collectionName, + "conf", + 2, // numShards + 4); // tlogReplicas HttpGet createCollectionGet = new HttpGet(url); - HttpResponse httpResponse = cluster.getSolrClient().getHttpClient().execute(createCollectionGet); + HttpResponse httpResponse = + cluster.getSolrClient().getHttpClient().execute(createCollectionGet); assertEquals(200, httpResponse.getStatusLine().getStatusCode()); cluster.waitForActiveCollection(collectionName, 2, 8); break; case 2: // Sometimes use V2 API url = cluster.getRandomJetty(random()).getBaseUrl().toString() + "/____v2/c"; - String requestBody = String.format(Locale.ROOT, "{create:{name:%s, config:%s, numShards:%s, tlogReplicas:%s}}", - collectionName, "conf", - 2, // numShards - 4); // tlogReplicas + String requestBody = + String.format( + Locale.ROOT, + "{create:{name:%s, config:%s, numShards:%s, tlogReplicas:%s}}", + collectionName, + "conf", + 2, // numShards + 4); // tlogReplicas HttpPost createCollectionPost = new HttpPost(url); createCollectionPost.setHeader("Content-type", "application/json"); @@ -187,29 +207,45 @@ public void testCreateDelete() throws Exception { while (true) { DocCollection docCollection = getCollectionState(collectionName); assertNotNull(docCollection); - assertEquals("Expecting 2 shards", - 2, docCollection.getSlices().size()); - assertEquals("Expecting 4 relpicas per shard", - 8, docCollection.getReplicas().size()); - assertEquals("Expecting 8 tlog replicas, 4 per shard", - 8, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); - assertEquals("Expecting no nrt replicas", - 0, docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); - assertEquals("Expecting no pull replicas", - 0, docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); - for (Slice s:docCollection.getSlices()) { + assertEquals("Expecting 2 shards", 2, docCollection.getSlices().size()); + assertEquals("Expecting 4 relpicas per shard", 8, docCollection.getReplicas().size()); + assertEquals( + "Expecting 8 tlog replicas, 4 per shard", + 8, + docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).size()); + assertEquals( + "Expecting no nrt replicas", + 0, + docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).size()); + assertEquals( + "Expecting no pull replicas", + 0, + docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).size()); + for (Slice s : docCollection.getSlices()) { assertTrue(s.getLeader().getType() == Replica.Type.TLOG); - List shardElectionNodes = cluster.getZkClient().getChildren(ZkStateReader.getShardLeadersElectPath(collectionName, s.getName()), null, true); - assertEquals("Unexpected election nodes for Shard: " + s.getName() + ": " + Arrays.toString(shardElectionNodes.toArray()), - 4, shardElectionNodes.size()); + List shardElectionNodes = + cluster + .getZkClient() + .getChildren( + ZkStateReader.getShardLeadersElectPath(collectionName, s.getName()), + null, + true); + assertEquals( + "Unexpected election nodes for Shard: " + + s.getName() + + ": " + + Arrays.toString(shardElectionNodes.toArray()), + 4, + shardElectionNodes.size()); } assertUlogPresence(docCollection); if (reloaded) { break; } else { // reload - CollectionAdminResponse response = CollectionAdminRequest.reloadCollection(collectionName) - .process(cluster.getSolrClient()); + CollectionAdminResponse response = + CollectionAdminRequest.reloadCollection(collectionName) + .process(cluster.getSolrClient()); assertEquals(0, response.getStatus()); waitForState("failed waiting for active colletion", collectionName, clusterShape(2, 8)); reloaded = true; @@ -232,20 +268,31 @@ public void testAddDocs() throws Exception { } TimeOut t = new TimeOut(REPLICATION_TIMEOUT_SECS, TimeUnit.SECONDS, TimeSource.NANO_TIME); - for (Replica r:s.getReplicas(EnumSet.of(Replica.Type.TLOG))) { - //TODO: assert replication < REPLICATION_TIMEOUT_SECS + for (Replica r : s.getReplicas(EnumSet.of(Replica.Type.TLOG))) { + // TODO: assert replication < REPLICATION_TIMEOUT_SECS try (HttpSolrClient tlogReplicaClient = getHttpSolrClient(r.getCoreUrl())) { while (true) { try { - assertEquals("Replica " + r.getName() + " not up to date after 10 seconds", - 1, tlogReplicaClient.query(new SolrQuery("*:*")).getResults().getNumFound()); + assertEquals( + "Replica " + r.getName() + " not up to date after 10 seconds", + 1, + tlogReplicaClient.query(new SolrQuery("*:*")).getResults().getNumFound()); // Append replicas process all updates - SolrQuery req = new SolrQuery( - "qt", "/admin/plugins", - "stats", "true"); + SolrQuery req = + new SolrQuery( + "qt", "/admin/plugins", + "stats", "true"); QueryResponse statsResponse = tlogReplicaClient.query(req); - assertEquals("Append replicas should recive all updates. Replica: " + r + ", response: " + statsResponse, - 1L, ((Map)(statsResponse.getResponse()).findRecursive("plugins", "UPDATE", "updateHandler", "stats")).get("UPDATE.updateHandler.cumulativeAdds.count")); + assertEquals( + "Append replicas should recive all updates. Replica: " + + r + + ", response: " + + statsResponse, + 1L, + ((Map) + (statsResponse.getResponse()) + .findRecursive("plugins", "UPDATE", "updateHandler", "stats")) + .get("UPDATE.updateHandler.cumulativeAdds.count")); break; } catch (AssertionError e) { if (t.hasTimedOut()) { @@ -269,40 +316,56 @@ public void testAddRemoveTlogReplica() throws Exception { addReplicaToShard("shard2", Replica.Type.TLOG); docCollection = assertNumberOfReplicas(0, 4, 0, true, false); - waitForState("Expecting collection to have 2 shards and 2 replica each", collectionName, clusterShape(2, 4)); + waitForState( + "Expecting collection to have 2 shards and 2 replica each", + collectionName, + clusterShape(2, 4)); - //Delete tlog replica from shard1 + // Delete tlog replica from shard1 CollectionAdminRequest.deleteReplica( - collectionName, - "shard1", - docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.TLOG)).get(0).getName()) - .process(cluster.getSolrClient()); + collectionName, + "shard1", + docCollection + .getSlice("shard1") + .getReplicas(EnumSet.of(Replica.Type.TLOG)) + .get(0) + .getName()) + .process(cluster.getSolrClient()); assertNumberOfReplicas(0, 3, 0, true, true); } - private void addReplicaToShard(String shardName, Replica.Type type) throws ClientProtocolException, IOException, SolrServerException { + private void addReplicaToShard(String shardName, Replica.Type type) + throws ClientProtocolException, IOException, SolrServerException { switch (random().nextInt(3)) { case 0: // Add replica with SolrJ - CollectionAdminResponse response = CollectionAdminRequest.addReplicaToShard(collectionName, shardName, type).process(cluster.getSolrClient()); - assertEquals("Unexpected response status: " + response.getStatus(), 0, response.getStatus()); + CollectionAdminResponse response = + CollectionAdminRequest.addReplicaToShard(collectionName, shardName, type) + .process(cluster.getSolrClient()); + assertEquals( + "Unexpected response status: " + response.getStatus(), 0, response.getStatus()); break; case 1: // Add replica with V1 API - String url = String.format(Locale.ROOT, "%s/admin/collections?action=ADDREPLICA&collection=%s&shard=%s&type=%s", - cluster.getRandomJetty(random()).getBaseUrl(), - collectionName, - shardName, - type); + String url = + String.format( + Locale.ROOT, + "%s/admin/collections?action=ADDREPLICA&collection=%s&shard=%s&type=%s", + cluster.getRandomJetty(random()).getBaseUrl(), + collectionName, + shardName, + type); HttpGet addReplicaGet = new HttpGet(url); HttpResponse httpResponse = cluster.getSolrClient().getHttpClient().execute(addReplicaGet); assertEquals(200, httpResponse.getStatusLine().getStatusCode()); break; - case 2:// Add replica with V2 API - url = String.format(Locale.ROOT, "%s/____v2/c/%s/shards", - cluster.getRandomJetty(random()).getBaseUrl(), - collectionName); - String requestBody = String.format(Locale.ROOT, "{add-replica:{shard:%s, type:%s}}", - shardName, - type); + case 2: // Add replica with V2 API + url = + String.format( + Locale.ROOT, + "%s/____v2/c/%s/shards", + cluster.getRandomJetty(random()).getBaseUrl(), + collectionName); + String requestBody = + String.format(Locale.ROOT, "{add-replica:{shard:%s, type:%s}}", shardName, type); HttpPost addReplicaPost = new HttpPost(url); addReplicaPost.setHeader("Content-type", "application/json"); addReplicaPost.setEntity(new StringEntity(requestBody)); @@ -320,26 +383,33 @@ public void testKillLeader() throws Exception { doReplaceLeader(false); } - public void testRealTimeGet() throws SolrServerException, IOException, KeeperException, InterruptedException { + public void testRealTimeGet() + throws SolrServerException, IOException, KeeperException, InterruptedException { // should be redirected to Replica.Type.REALTIME - int numReplicas = random().nextBoolean()?1:2; - int numNrtReplicas = random().nextBoolean()?0:2; - CollectionAdminRequest.createCollection(collectionName, "conf", 1, numNrtReplicas, numReplicas, 0) - .process(cluster.getSolrClient()); - waitForState("Unexpected replica count", collectionName, activeReplicaCount(numNrtReplicas, numReplicas, 0)); - DocCollection docCollection = assertNumberOfReplicas(numNrtReplicas, numReplicas, 0, false, true); + int numReplicas = random().nextBoolean() ? 1 : 2; + int numNrtReplicas = random().nextBoolean() ? 0 : 2; + CollectionAdminRequest.createCollection( + collectionName, "conf", 1, numNrtReplicas, numReplicas, 0) + .process(cluster.getSolrClient()); + waitForState( + "Unexpected replica count", + collectionName, + activeReplicaCount(numNrtReplicas, numReplicas, 0)); + DocCollection docCollection = + assertNumberOfReplicas(numNrtReplicas, numReplicas, 0, false, true); HttpClient httpClient = cluster.getSolrClient().getHttpClient(); int id = 0; Slice slice = docCollection.getSlice("shard1"); List ids = new ArrayList<>(slice.getReplicas().size()); - for (Replica rAdd:slice.getReplicas()) { + for (Replica rAdd : slice.getReplicas()) { try (HttpSolrClient client = getHttpSolrClient(rAdd.getCoreUrl(), httpClient)) { client.add(new SolrInputDocument("id", String.valueOf(id), "foo_s", "bar")); } - SolrDocument docCloudClient = cluster.getSolrClient().getById(collectionName, String.valueOf(id)); + SolrDocument docCloudClient = + cluster.getSolrClient().getById(collectionName, String.valueOf(id)); assertNotNull(docCloudClient); assertEquals("bar", docCloudClient.getFieldValue("foo_s")); - for (Replica rGet:slice.getReplicas()) { + for (Replica rGet : slice.getReplicas()) { try (HttpSolrClient client = getHttpSolrClient(rGet.getCoreUrl(), httpClient)) { SolrDocument doc = client.getById(String.valueOf(id)); assertEquals("bar", doc.getFieldValue("foo_s")); @@ -349,7 +419,7 @@ public void testRealTimeGet() throws SolrServerException, IOException, KeeperExc id++; } SolrDocumentList previousAllIdsResult = null; - for (Replica rAdd:slice.getReplicas()) { + for (Replica rAdd : slice.getReplicas()) { try (HttpSolrClient client = getHttpSolrClient(rAdd.getCoreUrl(), httpClient)) { SolrDocumentList allIdsResult = client.getById(ids); if (previousAllIdsResult != null) { @@ -378,53 +448,58 @@ private void doReplaceLeader(boolean removeReplica) throws Exception { assertEquals(1, leaderClient.query(new SolrQuery("*:*")).getResults().getNumFound()); } - waitForNumDocsInAllReplicas(1, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS); + waitForNumDocsInAllReplicas( + 1, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS); // Delete leader replica from shard1 JettySolrRunner leaderJetty = null; if (removeReplica) { - CollectionAdminRequest.deleteReplica( - collectionName, - "shard1", - s.getLeader().getName()) - .process(cluster.getSolrClient()); + CollectionAdminRequest.deleteReplica(collectionName, "shard1", s.getLeader().getName()) + .process(cluster.getSolrClient()); } else { leaderJetty = cluster.getReplicaJetty(s.getLeader()); leaderJetty.stop(); waitForState("Leader replica not removed", collectionName, clusterShape(1, 1)); // Wait for cluster state to be updated - waitForState("Replica state not updated in cluster state", - collectionName, clusterStateReflectsActiveAndDownReplicas()); + waitForState( + "Replica state not updated in cluster state", + collectionName, + clusterStateReflectsActiveAndDownReplicas()); } docCollection = assertNumberOfReplicas(0, 1, 0, true, true); // Wait until a new leader is elected waitForLeaderChange(leaderJetty, "shard1"); - + // There is a new leader, I should be able to add and commit cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2", "foo", "zoo")); cluster.getSolrClient().commit(collectionName); // Queries should still work - waitForNumDocsInAllReplicas(2, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS); + waitForNumDocsInAllReplicas( + 2, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS); // Start back the node if (removeReplica) { addReplicaWithRetries(); - + } else { leaderJetty.start(); } waitForState("Expected collection to be 1x2", collectionName, clusterShape(1, 2)); // added replica should replicate from the leader - waitForNumDocsInAllReplicas(2, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS); + waitForNumDocsInAllReplicas( + 2, docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)), REPLICATION_TIMEOUT_SECS); } private void addReplicaWithRetries() throws SolrServerException, IOException { int maxAttempts = 3; - for (int i = 0; i < maxAttempts ; i++) { + for (int i = 0; i < maxAttempts; i++) { try { - CollectionAdminResponse respone = CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.TLOG).process(cluster.getSolrClient()); - // This is an unfortunate hack. There are cases where the ADDREPLICA fails, will create a Jira to address that separately. for now, we'll retry + CollectionAdminResponse respone = + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.TLOG) + .process(cluster.getSolrClient()); + // This is an unfortunate hack. There are cases where the ADDREPLICA fails, will create a + // Jira to address that separately. for now, we'll retry if (respone.isSuccess()) { break; } @@ -444,12 +519,15 @@ public void testKillTlogReplica() throws Exception { cluster.getSolrClient().commit(collectionName); waitForNumDocsInAllActiveReplicas(1); - JettySolrRunner pullReplicaJetty = cluster.getReplicaJetty(docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.TLOG)).get(0)); + JettySolrRunner pullReplicaJetty = + cluster.getReplicaJetty( + docCollection.getSlice("shard1").getReplicas(EnumSet.of(Replica.Type.TLOG)).get(0)); pullReplicaJetty.stop(); waitForState("Replica not removed", collectionName, activeReplicaCount(0, 1, 0)); waitForLeaderChange(pullReplicaJetty, "shard1"); -// // Also wait for the replica to be placed in state="down" -// waitForState("Didn't update state", collectionName, clusterStateReflectsActiveAndDownReplicas()); + // // Also wait for the replica to be placed in state="down" + // waitForState("Didn't update state", collectionName, + // clusterStateReflectsActiveAndDownReplicas()); cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "2", "foo", "bar")); cluster.getSolrClient().commit(collectionName); @@ -473,28 +551,43 @@ public void testOnlyLeaderIndexes() throws Exception { .process(cloudClient, collectionName); { - long docsPending = (long) getSolrCore(true).get(0).getSolrMetricsContext().getMetricRegistry().getGauges().get("UPDATE.updateHandler.docsPending").getValue(); - assertEquals("Expected 4 docs are pending in core " + getSolrCore(true).get(0).getCoreDescriptor(),4, docsPending); + long docsPending = + (long) + getSolrCore(true) + .get(0) + .getSolrMetricsContext() + .getMetricRegistry() + .getGauges() + .get("UPDATE.updateHandler.docsPending") + .getValue(); + assertEquals( + "Expected 4 docs are pending in core " + getSolrCore(true).get(0).getCoreDescriptor(), + 4, + docsPending); } for (SolrCore solrCore : getSolrCore(false)) { - long docsPending = (long) solrCore.getSolrMetricsContext().getMetricRegistry().getGauges().get("UPDATE.updateHandler.docsPending").getValue(); - assertEquals("Expected non docs are pending in core " + solrCore.getCoreDescriptor(),0, docsPending); + long docsPending = + (long) + solrCore + .getSolrMetricsContext() + .getMetricRegistry() + .getGauges() + .get("UPDATE.updateHandler.docsPending") + .getValue(); + assertEquals( + "Expected non docs are pending in core " + solrCore.getCoreDescriptor(), 0, docsPending); } checkRTG(1, 4, cluster.getJettySolrRunners()); - new UpdateRequest() - .deleteById("1") - .deleteByQuery("id:2") - .process(cloudClient, collectionName); + new UpdateRequest().deleteById("1").deleteByQuery("id:2").process(cloudClient, collectionName); // The DBQ is not processed at replicas, so we still can get doc2 and other docs by RTG - checkRTG(2,4, getSolrRunner(false)); + checkRTG(2, 4, getSolrRunner(false)); Map timeCopyOverPerCores = getTimesCopyOverOldUpdates(getSolrCore(false)); - new UpdateRequest() - .commit(cloudClient, collectionName); + new UpdateRequest().commit(cloudClient, collectionName); waitForNumDocsInAllActiveReplicas(2); // There are a small delay between new searcher and copy over old updates operation @@ -506,24 +599,28 @@ public void testOnlyLeaderIndexes() throws Exception { Thread.sleep(500); } } - assertTrue("Expect only one copy over updates per cores", assertCopyOverOldUpdates(1, timeCopyOverPerCores)); + assertTrue( + "Expect only one copy over updates per cores", + assertCopyOverOldUpdates(1, timeCopyOverPerCores)); boolean firstCommit = true; // UpdateLog copy over old updates for (int i = 15; i <= 150; i++) { - cloudClient.add(collectionName, sdoc("id",String.valueOf(i))); + cloudClient.add(collectionName, sdoc("id", String.valueOf(i))); if (random().nextInt(100) < 15 & i != 150) { if (firstCommit) { // because tlog replicas periodically ask leader for new segments, // therefore the copy over old updates action must not be triggered until // tlog replicas actually get new segments - assertTrue("Expect only one copy over updates per cores", assertCopyOverOldUpdates(1, timeCopyOverPerCores)); + assertTrue( + "Expect only one copy over updates per cores", + assertCopyOverOldUpdates(1, timeCopyOverPerCores)); firstCommit = false; } cloudClient.commit(collectionName); } } - checkRTG(120,150, cluster.getJettySolrRunners()); + checkRTG(120, 150, cluster.getJettySolrRunners()); waitForReplicasCatchUp(4 * REPLICATION_TIMEOUT_SECS); } @@ -536,68 +633,70 @@ public void testRecovery() throws Exception { .add(sdoc("id", "3")) .add(sdoc("id", "4")) .commit(cloudClient, collectionName); - new UpdateRequest() - .add(sdoc("id", "5")) - .process(cloudClient, collectionName); + new UpdateRequest().add(sdoc("id", "5")).process(cloudClient, collectionName); JettySolrRunner solrRunner = getSolrRunner(false).get(0); solrRunner.stop(); - waitForState("Replica still up", collectionName, activeReplicaCount(0,1,0)); - new UpdateRequest() - .add(sdoc("id", "6")) - .process(cloudClient, collectionName); + waitForState("Replica still up", collectionName, activeReplicaCount(0, 1, 0)); + new UpdateRequest().add(sdoc("id", "6")).process(cloudClient, collectionName); solrRunner.start(); - waitForState("Replica didn't recover", collectionName, activeReplicaCount(0,2,0)); - // We skip peerSync, so replica will always trigger commit on leader - // We query only the non-leader replicas, since we haven't opened a new searcher on the leader yet - waitForNumDocsInAllReplicas(4, getNonLeaderReplias(collectionName), 10); //timeout for stale collection state + waitForState("Replica didn't recover", collectionName, activeReplicaCount(0, 2, 0)); + // We skip peerSync, so replica will always trigger commit on leader. We query only the + // non-leader replicas, since we haven't opened a new searcher on the leader yet - // If I add the doc immediately, the leader fails to communicate with the follower with broken pipe. - // Options are, wait or retry... + // timeout for stale collection state + waitForNumDocsInAllReplicas(4, getNonLeaderReplias(collectionName), 10); + + // If I add the doc immediately, the leader fails to communicate with the follower with broken + // pipe. Options are, wait or retry... for (int i = 0; i < 3; i++) { UpdateRequest ureq = new UpdateRequest().add(sdoc("id", "7")); ureq.setParam("collection", collectionName); NamedList response = cloudClient.request(ureq); - if ((Integer)((NamedList)response.get("responseHeader")).get(UpdateRequest.REPFACT) >= 2) { + if ((Integer) ((NamedList) response.get("responseHeader")).get(UpdateRequest.REPFACT) + >= 2) { break; } log.info("Min RF not achieved yet. retrying"); } - checkRTG(3,7, cluster.getJettySolrRunners()); + checkRTG(3, 7, cluster.getJettySolrRunners()); try { TestInjection.skipIndexWriterCommitOnClose = true; solrRunner.stop(); - waitForState("Replica still up", collectionName, activeReplicaCount(0,1,0)); + waitForState("Replica still up", collectionName, activeReplicaCount(0, 1, 0)); } finally { TestInjection.skipIndexWriterCommitOnClose = false; } solrRunner.start(); - waitForState("Replica didn't recover", collectionName, activeReplicaCount(0,2,0)); - waitForNumDocsInAllReplicas(5, getNonLeaderReplias(collectionName), 10); //timeout for stale collection state - checkRTG(3,7, cluster.getJettySolrRunners()); + waitForState("Replica didn't recover", collectionName, activeReplicaCount(0, 2, 0)); + // timeout for stale collection state + waitForNumDocsInAllReplicas(5, getNonLeaderReplias(collectionName), 10); + checkRTG(3, 7, cluster.getJettySolrRunners()); cluster.getSolrClient().commit(collectionName); // Test replica recovery apply buffer updates Semaphore waitingForBufferUpdates = new Semaphore(0); Semaphore waitingForReplay = new Semaphore(0); - RecoveryStrategy.testing_beforeReplayBufferingUpdates = () -> { - try { - waitingForReplay.release(); - waitingForBufferUpdates.acquire(); - } catch (InterruptedException e) { - e.printStackTrace(); - fail("Test interrupted: " + e.getMessage()); - } - }; + RecoveryStrategy.testing_beforeReplayBufferingUpdates = + () -> { + try { + waitingForReplay.release(); + waitingForBufferUpdates.acquire(); + } catch (InterruptedException e) { + e.printStackTrace(); + fail("Test interrupted: " + e.getMessage()); + } + }; solrRunner.stop(); solrRunner.start(); waitingForReplay.acquire(); - // If I add the doc immediately, the leader fails to communicate with the follower with broken pipe. - // Options are, wait or retry... + // If I add the doc immediately, the leader fails to communicate with the follower with broken + // pipe. Options are, wait or retry... for (int i = 0; i < 3; i++) { UpdateRequest ureq = new UpdateRequest().add(sdoc("id", "8")); ureq.setParam("collection", collectionName); NamedList response = cloudClient.request(ureq); - if ((Integer)((NamedList)response.get("responseHeader")).get(UpdateRequest.REPFACT) >= 2) { + if ((Integer) ((NamedList) response.get("responseHeader")).get(UpdateRequest.REPFACT) + >= 2) { break; } log.info("Min RF not achieved yet. retrying"); @@ -608,49 +707,44 @@ public void testRecovery() throws Exception { .process(cloudClient, collectionName); waitingForBufferUpdates.release(); RecoveryStrategy.testing_beforeReplayBufferingUpdates = null; - waitForState("Replica didn't recover", collectionName, activeReplicaCount(0,2,0)); - checkRTG(3,10, cluster.getJettySolrRunners()); + waitForState("Replica didn't recover", collectionName, activeReplicaCount(0, 2, 0)); + checkRTG(3, 10, cluster.getJettySolrRunners()); for (SolrCore solrCore : getSolrCore(false)) { - RefCounted iwRef = solrCore.getUpdateHandler().getSolrCoreState().getIndexWriter(null); - assertFalse("IndexWriter at replicas must not see updates ", iwRef.get().hasUncommittedChanges()); + RefCounted iwRef = + solrCore.getUpdateHandler().getSolrCoreState().getIndexWriter(null); + assertFalse( + "IndexWriter at replicas must not see updates ", iwRef.get().hasUncommittedChanges()); iwRef.decref(); } } private List getNonLeaderReplias(String collectionName) { - return getCollectionState(collectionName).getReplicas().stream().filter( - (r)-> !r.getBool("leader", false)).collect(Collectors.toList()); + return getCollectionState(collectionName).getReplicas().stream() + .filter((r) -> !r.getBool("leader", false)) + .collect(Collectors.toList()); } - public void testDeleteById() throws Exception{ - createAndWaitForCollection(1,0,2,0); + public void testDeleteById() throws Exception { + createAndWaitForCollection(1, 0, 2, 0); CloudSolrClient cloudClient = cluster.getSolrClient(); - new UpdateRequest() - .deleteByQuery("*:*") - .commit(cluster.getSolrClient(), collectionName); - new UpdateRequest() - .add(sdoc("id", "1")) - .commit(cloudClient, collectionName); + new UpdateRequest().deleteByQuery("*:*").commit(cluster.getSolrClient(), collectionName); + new UpdateRequest().add(sdoc("id", "1")).commit(cloudClient, collectionName); waitForNumDocsInAllActiveReplicas(1); - new UpdateRequest() - .deleteById("1") - .process(cloudClient, collectionName); + new UpdateRequest().deleteById("1").process(cloudClient, collectionName); boolean successs = false; try { checkRTG(1, 1, cluster.getJettySolrRunners()); successs = true; } catch (AssertionError e) { - //expected + // expected } assertFalse("Doc1 is deleted but it's still exist", successs); } public void testBasicLeaderElection() throws Exception { - createAndWaitForCollection(1,0,2,0); + createAndWaitForCollection(1, 0, 2, 0); CloudSolrClient cloudClient = cluster.getSolrClient(); - new UpdateRequest() - .deleteByQuery("*:*") - .commit(cluster.getSolrClient(), collectionName); + new UpdateRequest().deleteByQuery("*:*").commit(cluster.getSolrClient(), collectionName); new UpdateRequest() .add(sdoc("id", "1")) .add(sdoc("id", "2")) @@ -659,29 +753,28 @@ public void testBasicLeaderElection() throws Exception { oldLeaderJetty.stop(); waitForState("Replica not removed", collectionName, activeReplicaCount(0, 1, 0)); - // Even after the replica is gone, a leader may not be elected yet. Wait for it. + // Even after the replica is gone, a leader may not be elected yet. Wait for it. waitForLeaderChange(oldLeaderJetty, "shard1"); - - new UpdateRequest() + + new UpdateRequest() .add(sdoc("id", "3")) .add(sdoc("id", "4")) .process(cloudClient, collectionName); oldLeaderJetty.start(); waitForState("Replica not added", collectionName, activeReplicaCount(0, 2, 0)); - checkRTG(1,4, cluster.getJettySolrRunners()); - new UpdateRequest() - .commit(cloudClient, collectionName); + checkRTG(1, 4, cluster.getJettySolrRunners()); + new UpdateRequest().commit(cloudClient, collectionName); waitForNumDocsInAllActiveReplicas(4, 0); } + public void testRebalanceLeaders() throws Exception { - createAndWaitForCollection(1,0,2,0); + createAndWaitForCollection(1, 0, 2, 0); CloudSolrClient cloudClient = cluster.getSolrClient(); - new UpdateRequest() - .deleteByQuery("*:*") - .commit(cluster.getSolrClient(), collectionName); + new UpdateRequest().deleteByQuery("*:*").commit(cluster.getSolrClient(), collectionName); // Find a replica which isn't leader - DocCollection docCollection = cloudClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection docCollection = + cloudClient.getZkStateReader().getClusterState().getCollection(collectionName); Slice slice = docCollection.getSlices().iterator().next(); Replica newLeader = null; for (Replica replica : slice.getReplicas()) { @@ -706,7 +799,12 @@ public void testRebalanceLeaders() throws Exception { // Wait until a preferredleader flag is set to the new leader candidate TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (!timeout.hasTimedOut()) { - Map slices = cloudClient.getZkStateReader().getClusterState().getCollection(collectionName).getSlicesMap(); + Map slices = + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(collectionName) + .getSlicesMap(); Replica me = slices.get(slice.getName()).getReplica(newLeader.getName()); if (me.getBool("property.preferredleader", false)) { break; @@ -729,8 +827,10 @@ public void testRebalanceLeaders() throws Exception { while (!timeout.hasTimedOut()) { docCollection = getCollectionState(collectionName); Replica leader = docCollection.getSlice(slice.getName()).getLeader(); - if (leader != null && leader.getName().equals(newLeader.getName()) && - leader.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) { + if (leader != null + && leader.getName().equals(newLeader.getName()) + && leader.isActive( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) { break; } Thread.sleep(100); @@ -743,33 +843,54 @@ public void testRebalanceLeaders() throws Exception { .add(sdoc("id", "3")) .add(sdoc("id", "4")) .process(cloudClient, collectionName); - checkRTG(1,4, cluster.getJettySolrRunners()); - new UpdateRequest() - .commit(cloudClient, collectionName); + checkRTG(1, 4, cluster.getJettySolrRunners()); + new UpdateRequest().commit(cloudClient, collectionName); waitForNumDocsInAllActiveReplicas(4); } + private void waitForLeaderChange(JettySolrRunner oldLeaderJetty, String shardName) { - waitForState("Expect new leader", collectionName, + waitForState( + "Expect new leader", + collectionName, (liveNodes, collectionState) -> { Replica leader = collectionState.getLeader(shardName); - if (leader == null || !leader.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) { + if (leader == null + || !leader.isActive( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) { return false; } - return oldLeaderJetty == null || !leader.getNodeName().equals(oldLeaderJetty.getNodeName()); - } - ); + return oldLeaderJetty == null + || !leader.getNodeName().equals(oldLeaderJetty.getNodeName()); + }); } public void testOutOfOrderDBQWithInPlaceUpdates() throws Exception { - createAndWaitForCollection(1,0,2,0); - assertFalse(getSolrCore(true).get(0).getLatestSchema().getField("inplace_updatable_int").indexed()); - assertFalse(getSolrCore(true).get(0).getLatestSchema().getField("inplace_updatable_int").stored()); - assertTrue(getSolrCore(true).get(0).getLatestSchema().getField("inplace_updatable_int").hasDocValues()); + createAndWaitForCollection(1, 0, 2, 0); + assertFalse( + getSolrCore(true).get(0).getLatestSchema().getField("inplace_updatable_int").indexed()); + assertFalse( + getSolrCore(true).get(0).getLatestSchema().getField("inplace_updatable_int").stored()); + assertTrue( + getSolrCore(true) + .get(0) + .getLatestSchema() + .getField("inplace_updatable_int") + .hasDocValues()); List updates = new ArrayList<>(); - updates.add(simulatedUpdateRequest(null, "id", 1, "title_s", "title0_new", "inplace_updatable_int", 5, "_version_", 1L)); // full update + updates.add( + simulatedUpdateRequest( + null, + "id", + 1, + "title_s", + "title0_new", + "inplace_updatable_int", + 5, + "_version_", + 1L)); // full update updates.add(simulatedDBQ("inplace_updatable_int:5", 3L)); updates.add(simulatedUpdateRequest(1L, "id", 1, "inplace_updatable_int", 6, "_version_", 2L)); - for (JettySolrRunner solrRunner: getSolrRunner(false)) { + for (JettySolrRunner solrRunner : getSolrRunner(false)) { try (SolrClient client = solrRunner.newClient()) { for (UpdateRequest up : updates) { up.process(client, collectionName); @@ -782,12 +903,13 @@ public void testOutOfOrderDBQWithInPlaceUpdates() throws Exception { waitForLeaderChange(oldLeaderJetty, "shard1"); oldLeaderJetty.start(); waitForState("Replica not added", collectionName, activeReplicaCount(0, 2, 0)); - checkRTG(1,1, cluster.getJettySolrRunners()); - SolrDocument doc = cluster.getSolrClient().getById(collectionName,"1"); + checkRTG(1, 1, cluster.getJettySolrRunners()); + SolrDocument doc = cluster.getSolrClient().getById(collectionName, "1"); assertNotNull(doc.get("title_s")); } - private UpdateRequest simulatedUpdateRequest(Long prevVersion, Object... fields) throws SolrServerException, IOException { + private UpdateRequest simulatedUpdateRequest(Long prevVersion, Object... fields) + throws SolrServerException, IOException { SolrInputDocument doc = sdoc(fields); // get baseUrl of the leader @@ -804,56 +926,85 @@ private UpdateRequest simulatedUpdateRequest(Long prevVersion, Object... fields) return ur; } - private UpdateRequest simulatedDBQ(String query, long version) throws SolrServerException, IOException { + private UpdateRequest simulatedDBQ(String query, long version) + throws SolrServerException, IOException { String baseUrl = getBaseUrl(); UpdateRequest ur = new UpdateRequest(); ur.deleteByQuery(query); - ur.setParam("_version_", ""+version); + ur.setParam("_version_", "" + version); ur.setParam("update.distrib", "FROMLEADER"); ur.setParam("distrib.from", baseUrl); return ur; } private String getBaseUrl() { - DocCollection collection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection collection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(collectionName); Slice slice = collection.getSlice("shard1"); return slice.getLeader().getCoreUrl(); } - private DocCollection createAndWaitForCollection(int numShards, int numNrtReplicas, int numTlogReplicas, int numPullReplicas) throws SolrServerException, IOException, KeeperException, InterruptedException { - CollectionAdminRequest.createCollection(collectionName, "conf", numShards, numNrtReplicas, numTlogReplicas, numPullReplicas) - .process(cluster.getSolrClient()); + private DocCollection createAndWaitForCollection( + int numShards, int numNrtReplicas, int numTlogReplicas, int numPullReplicas) + throws SolrServerException, IOException, KeeperException, InterruptedException { + CollectionAdminRequest.createCollection( + collectionName, "conf", numShards, numNrtReplicas, numTlogReplicas, numPullReplicas) + .process(cluster.getSolrClient()); int numReplicasPerShard = numNrtReplicas + numTlogReplicas + numPullReplicas; - waitForState("Expected collection to be created with " + numShards + " shards and " + numReplicasPerShard + " replicas", - collectionName, clusterShape(numShards, numShards * numReplicasPerShard)); - return assertNumberOfReplicas(numNrtReplicas*numShards, numTlogReplicas*numShards, numPullReplicas*numShards, false, true); + waitForState( + "Expected collection to be created with " + + numShards + + " shards and " + + numReplicasPerShard + + " replicas", + collectionName, + clusterShape(numShards, numShards * numReplicasPerShard)); + return assertNumberOfReplicas( + numNrtReplicas * numShards, + numTlogReplicas * numShards, + numPullReplicas * numShards, + false, + true); } - private void waitForNumDocsInAllActiveReplicas(int numDocs) throws IOException, SolrServerException, InterruptedException { + private void waitForNumDocsInAllActiveReplicas(int numDocs) + throws IOException, SolrServerException, InterruptedException { waitForNumDocsInAllActiveReplicas(numDocs, REPLICATION_TIMEOUT_SECS); } - private void waitForNumDocsInAllActiveReplicas(int numDocs, int timeout) throws IOException, SolrServerException, InterruptedException { + private void waitForNumDocsInAllActiveReplicas(int numDocs, int timeout) + throws IOException, SolrServerException, InterruptedException { DocCollection docCollection = getCollectionState(collectionName); - waitForNumDocsInAllReplicas(numDocs, docCollection.getReplicas().stream().filter(r -> r.getState() == Replica.State.ACTIVE).collect(Collectors.toList()), timeout); + waitForNumDocsInAllReplicas( + numDocs, + docCollection.getReplicas().stream() + .filter(r -> r.getState() == Replica.State.ACTIVE) + .collect(Collectors.toList()), + timeout); } - private void waitForNumDocsInAllReplicas(int numDocs, Collection replicas, int timeout) throws IOException, SolrServerException, InterruptedException { + private void waitForNumDocsInAllReplicas(int numDocs, Collection replicas, int timeout) + throws IOException, SolrServerException, InterruptedException { waitForNumDocsInAllReplicas(numDocs, replicas, "*:*", timeout); } - private void waitForNumDocsInAllReplicas(int numDocs, Collection replicas, String query, int timeout) throws IOException, SolrServerException, InterruptedException { + private void waitForNumDocsInAllReplicas( + int numDocs, Collection replicas, String query, int timeout) + throws IOException, SolrServerException, InterruptedException { TimeOut t = new TimeOut(timeout, TimeUnit.SECONDS, TimeSource.NANO_TIME); - for (Replica r:replicas) { - if (!r.isActive(cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) { + for (Replica r : replicas) { + if (!r.isActive( + cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes())) { continue; } try (HttpSolrClient replicaClient = getHttpSolrClient(r.getCoreUrl())) { while (true) { try { - assertEquals("Replica " + r.getName() + " not up to date after " + timeout + " seconds", - numDocs, replicaClient.query(new SolrQuery(query)).getResults().getNumFound()); + assertEquals( + "Replica " + r.getName() + " not up to date after " + timeout + " seconds", + numDocs, + replicaClient.query(new SolrQuery(query)).getResults().getNumFound()); break; } catch (AssertionError e) { if (t.hasTimedOut()) { @@ -876,25 +1027,42 @@ private void waitForDeletion(String collection) throws InterruptedException, Kee fail("Timed out waiting for collection " + collection + " to be deleted."); } cluster.getSolrClient().getZkStateReader().forceUpdateCollection(collection); - } catch(SolrException e) { + } catch (SolrException e) { return; } - } } - private DocCollection assertNumberOfReplicas(int numNrtReplicas, int numTlogReplicas, int numPullReplicas, boolean updateCollection, boolean activeOnly) throws KeeperException, InterruptedException { + private DocCollection assertNumberOfReplicas( + int numNrtReplicas, + int numTlogReplicas, + int numPullReplicas, + boolean updateCollection, + boolean activeOnly) + throws KeeperException, InterruptedException { if (updateCollection) { cluster.getSolrClient().getZkStateReader().forceUpdateCollection(collectionName); } DocCollection docCollection = getCollectionState(collectionName); assertNotNull(docCollection); - assertEquals("Unexpected number of nrt replicas: " + docCollection, numNrtReplicas, - docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); - assertEquals("Unexpected number of pull replicas: " + docCollection, numPullReplicas, - docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); - assertEquals("Unexpected number of tlog replicas: " + docCollection, numTlogReplicas, - docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).stream().filter(r->!activeOnly || r.getState() == Replica.State.ACTIVE).count()); + assertEquals( + "Unexpected number of nrt replicas: " + docCollection, + numNrtReplicas, + docCollection.getReplicas(EnumSet.of(Replica.Type.NRT)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); + assertEquals( + "Unexpected number of pull replicas: " + docCollection, + numPullReplicas, + docCollection.getReplicas(EnumSet.of(Replica.Type.PULL)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); + assertEquals( + "Unexpected number of tlog replicas: " + docCollection, + numTlogReplicas, + docCollection.getReplicas(EnumSet.of(Replica.Type.TLOG)).stream() + .filter(r -> !activeOnly || r.getState() == Replica.State.ACTIVE) + .count()); return docCollection; } @@ -903,7 +1071,7 @@ private DocCollection assertNumberOfReplicas(int numNrtReplicas, int numTlogRepl */ private CollectionStatePredicate clusterStateReflectsActiveAndDownReplicas() { return (liveNodes, collectionState) -> { - for (Replica r:collectionState.getReplicas()) { + for (Replica r : collectionState.getReplicas()) { if (r.getState() != Replica.State.DOWN && r.getState() != Replica.State.ACTIVE) { return false; } @@ -918,12 +1086,11 @@ private CollectionStatePredicate clusterStateReflectsActiveAndDownReplicas() { }; } - - private CollectionStatePredicate activeReplicaCount(int numNrtReplicas, int numTlogReplicas, int numPullReplicas) { + private CollectionStatePredicate activeReplicaCount( + int numNrtReplicas, int numTlogReplicas, int numPullReplicas) { return (liveNodes, collectionState) -> { int nrtFound = 0, tlogFound = 0, pullFound = 0; - if (collectionState == null) - return false; + if (collectionState == null) return false; for (Slice slice : collectionState) { for (Replica replica : slice) { if (replica.isActive(liveNodes)) @@ -942,7 +1109,9 @@ private CollectionStatePredicate activeReplicaCount(int numNrtReplicas, int numT } } } - return numNrtReplicas == nrtFound && numTlogReplicas == tlogFound && numPullReplicas == pullFound; + return numNrtReplicas == nrtFound + && numTlogReplicas == tlogFound + && numPullReplicas == pullFound; }; } @@ -950,7 +1119,8 @@ private List getSolrCore(boolean isLeader) { List rs = new ArrayList<>(); CloudSolrClient cloudClient = cluster.getSolrClient(); - DocCollection docCollection = cloudClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection docCollection = + cloudClient.getZkStateReader().getClusterState().getCollection(collectionName); for (JettySolrRunner solrRunner : cluster.getJettySolrRunners()) { if (solrRunner.getCoreContainer() == null) continue; @@ -968,16 +1138,18 @@ private List getSolrCore(boolean isLeader) { return rs; } - private void checkRTG(int from, int to, List solrRunners) throws Exception{ - for (JettySolrRunner solrRunner: solrRunners) { + private void checkRTG(int from, int to, List solrRunners) throws Exception { + for (JettySolrRunner solrRunner : solrRunners) { try (SolrClient client = solrRunner.newClient()) { for (int i = from; i <= to; i++) { SolrQuery query = new SolrQuery(); query.set("distrib", false); query.setRequestHandler("/get"); - query.set("id",i); + query.set("id", i); QueryResponse res = client.query(collectionName, query); - assertNotNull("Can not find doc "+ i + " in " + solrRunner.getBaseUrl(),res.getResponse().get("doc")); + assertNotNull( + "Can not find doc " + i + " in " + solrRunner.getBaseUrl(), + res.getResponse().get("doc")); } } } @@ -986,7 +1158,8 @@ private void checkRTG(int from, int to, List solrRunners) throw private List getSolrRunner(boolean isLeader) { List rs = new ArrayList<>(); CloudSolrClient cloudClient = cluster.getSolrClient(); - DocCollection docCollection = cloudClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection docCollection = + cloudClient.getZkStateReader().getClusterState().getCollection(collectionName); for (JettySolrRunner solrRunner : cluster.getJettySolrRunners()) { if (solrRunner.getCoreContainer() == null) continue; for (SolrCore solrCore : solrRunner.getCoreContainer().getCores()) { @@ -1004,12 +1177,23 @@ private List getSolrRunner(boolean isLeader) { } private void waitForReplicasCatchUp(int numTry) throws IOException, InterruptedException { - String leaderTimeCommit = getSolrCore(true).get(0).getDeletionPolicy().getLatestCommit().getUserData().get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY); + String leaderTimeCommit = + getSolrCore(true) + .get(0) + .getDeletionPolicy() + .getLatestCommit() + .getUserData() + .get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY); if (leaderTimeCommit == null) return; for (int i = 0; i < numTry; i++) { boolean inSync = true; for (SolrCore solrCore : getSolrCore(false)) { - String replicateTimeCommit = solrCore.getDeletionPolicy().getLatestCommit().getUserData().get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY); + String replicateTimeCommit = + solrCore + .getDeletionPolicy() + .getLatestCommit() + .getUserData() + .get(SolrIndexWriter.COMMIT_TIME_MSEC_KEY); if (!leaderTimeCommit.equals(replicateTimeCommit)) { inSync = false; Thread.sleep(500); @@ -1020,7 +1204,6 @@ private void waitForReplicasCatchUp(int numTry) throws IOException, InterruptedE } fail("Some replicas are not in sync with leader"); - } private boolean assertCopyOverOldUpdates(long delta, Map timesPerCore) { @@ -1040,6 +1223,11 @@ private Map getTimesCopyOverOldUpdates(List cores) { } private long getTimesCopyOverOldUpdates(SolrCore core) { - return ((Meter)core.getSolrMetricsContext().getMetricRegistry().getMetrics().get("TLOG.copyOverOldUpdates.ops")).getCount(); + return ((Meter) + core.getSolrMetricsContext() + .getMetricRegistry() + .getMetrics() + .get("TLOG.copyOverOldUpdates.ops")) + .getCount(); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java index 0fe45c966df..aa50ef63580 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java @@ -26,7 +26,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -56,69 +55,66 @@ import org.slf4j.LoggerFactory; /** - * Test of TolerantUpdateProcessor using a MiniSolrCloud. Updates (that include failures which - * should be tolerated) are explicitly tested against various initial nodes to confirm correct + * Test of TolerantUpdateProcessor using a MiniSolrCloud. Updates (that include failures which + * should be tolerated) are explicitly tested against various initial nodes to confirm correct * behavior regardless of routing. * - *

- * NOTE: This test sets up a static instance of MiniSolrCloud with a single collection - * and several clients pointed at specific nodes. These are all re-used across multiple test methods, + *

NOTE: This test sets up a static instance of MiniSolrCloud with a single collection and + * several clients pointed at specific nodes. These are all re-used across multiple test methods, * and assumes that the state of the cluster is healthy. - *

- * */ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final int NUM_SHARDS = 2; - private static final int REPLICATION_FACTOR = 2; - private static final int NUM_SERVERS = 5; - + private static final int NUM_SHARDS = 2; + private static final int REPLICATION_FACTOR = 2; + private static final int NUM_SERVERS = 5; + private static final String COLLECTION_NAME = "test_col"; - + /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; /** A client for talking directly to the leader of shard1 */ private static HttpSolrClient S_ONE_LEADER_CLIENT; - + /** A client for talking directly to the leader of shard2 */ private static HttpSolrClient S_TWO_LEADER_CLIENT; /** A client for talking directly to a passive replica of shard1 */ private static HttpSolrClient S_ONE_NON_LEADER_CLIENT; - + /** A client for talking directly to a passive replica of shard2 */ private static HttpSolrClient S_TWO_NON_LEADER_CLIENT; /** A client for talking directly to a node that has no piece of the collection */ private static HttpSolrClient NO_COLLECTION_CLIENT; - + /** id field doc routing prefix for shard1 */ private static final String S_ONE_PRE = "abc!"; - + /** id field doc routing prefix for shard2 */ private static final String S_TWO_PRE = "XYZ!"; - + @BeforeClass public static void createMiniSolrCloudCluster() throws Exception { - + final String configName = "solrCloudCollectionConfig"; - final File configDir = new File(TEST_HOME() + File.separator + "collection1" + File.separator + "conf"); + final File configDir = + new File(TEST_HOME() + File.separator + "collection1" + File.separator + "conf"); - configureCluster(NUM_SERVERS) - .addConfig(configName, configDir.toPath()) - .configure(); + configureCluster(NUM_SERVERS).addConfig(configName, configDir.toPath()).configure(); CLOUD_CLIENT = cluster.getSolrClient(); CLOUD_CLIENT.setDefaultCollection(COLLECTION_NAME); - CollectionAdminRequest.createCollection(COLLECTION_NAME, configName, NUM_SHARDS, REPLICATION_FACTOR) + CollectionAdminRequest.createCollection( + COLLECTION_NAME, configName, NUM_SHARDS, REPLICATION_FACTOR) .withProperty("config", "solrconfig-distrib-update-processor-chains.xml") .withProperty("schema", "schema15.xml") // string id for doc routing prefix .process(CLOUD_CLIENT); - + cluster.waitForActiveCollection(COLLECTION_NAME, NUM_SHARDS, REPLICATION_FACTOR * NUM_SHARDS); ZkStateReader zkStateReader = CLOUD_CLIENT.getZkStateReader(); @@ -127,7 +123,8 @@ public static void createMiniSolrCloudCluster() throws Exception { HashMap urlMap = new HashMap<>(); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { URL jettyURL = jetty.getBaseUrl(); - String nodeKey = jettyURL.getHost() + ":" + jettyURL.getPort() + jettyURL.getPath().replace("/","_"); + String nodeKey = + jettyURL.getHost() + ":" + jettyURL.getPort() + jettyURL.getPath().replace("/", "_"); urlMap.put(nodeKey, jettyURL.toString()); } zkStateReader.forceUpdateCollection(COLLECTION_NAME); @@ -138,18 +135,19 @@ public static void createMiniSolrCloudCluster() throws Exception { assertNotNull("slice has null leader: " + slice.toString(), leader); assertNotNull("slice leader has null node name: " + slice.toString(), leader.getNodeName()); String leaderUrl = urlMap.remove(leader.getNodeName()); - assertNotNull("could not find URL for " + shardName + " leader: " + leader.getNodeName(), - leaderUrl); - assertEquals("expected two total replicas for: " + slice.getName(), - 2, slice.getReplicas().size()); - + assertNotNull( + "could not find URL for " + shardName + " leader: " + leader.getNodeName(), leaderUrl); + assertEquals( + "expected two total replicas for: " + slice.getName(), 2, slice.getReplicas().size()); + String passiveUrl = null; - + for (Replica replica : slice.getReplicas()) { - if ( ! replica.equals(leader)) { + if (!replica.equals(leader)) { passiveUrl = urlMap.remove(replica.getNodeName()); - assertNotNull("could not find URL for " + shardName + " replica: " + replica.getNodeName(), - passiveUrl); + assertNotNull( + "could not find URL for " + shardName + " replica: " + replica.getNodeName(), + passiveUrl); } } assertNotNull("could not find URL for " + shardName + " replica", passiveUrl); @@ -165,54 +163,77 @@ public static void createMiniSolrCloudCluster() throws Exception { } } assertEquals("Should be exactly one server left (nost hosting either shard)", 1, urlMap.size()); - NO_COLLECTION_CLIENT = getHttpSolrClient(urlMap.values().iterator().next() + - "/" + COLLECTION_NAME + "/"); - + NO_COLLECTION_CLIENT = + getHttpSolrClient(urlMap.values().iterator().next() + "/" + COLLECTION_NAME + "/"); + assertNotNull(S_ONE_LEADER_CLIENT); assertNotNull(S_TWO_LEADER_CLIENT); assertNotNull(S_ONE_NON_LEADER_CLIENT); assertNotNull(S_TWO_NON_LEADER_CLIENT); assertNotNull(NO_COLLECTION_CLIENT); - // sanity check that our S_ONE_PRE & S_TWO_PRE really do map to shard1 & shard2 with default routing - assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_ONE_PRE + random().nextInt()), - f("expected_shard_s", "shard1"))).getStatus()); - assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_TWO_PRE + random().nextInt()), - f("expected_shard_s", "shard2"))).getStatus()); + // sanity check that our S_ONE_PRE & S_TWO_PRE really do map to shard1 & shard2 with default + // routing + assertEquals( + 0, + CLOUD_CLIENT + .add(doc(f("id", S_ONE_PRE + random().nextInt()), f("expected_shard_s", "shard1"))) + .getStatus()); + assertEquals( + 0, + CLOUD_CLIENT + .add(doc(f("id", S_TWO_PRE + random().nextInt()), f("expected_shard_s", "shard2"))) + .getStatus()); assertEquals(0, CLOUD_CLIENT.commit().getStatus()); - SolrDocumentList docs = CLOUD_CLIENT.query(params("q", "*:*", - "fl","id,expected_shard_s,[shard]")).getResults(); + SolrDocumentList docs = + CLOUD_CLIENT + .query( + params( + "q", "*:*", + "fl", "id,expected_shard_s,[shard]")) + .getResults(); assertEquals(2, docs.getNumFound()); assertEquals(2, docs.size()); for (SolrDocument doc : docs) { String expected = COLLECTION_NAME + "_" + doc.getFirstValue("expected_shard_s") + "_replica"; String docShard = doc.getFirstValue("[shard]").toString(); - assertTrue("shard routing prefixes don't seem to be aligned anymore, " + - "did someone change the default routing rules? " + - "and/or the the default core name rules? " + - "and/or the numShards used by this test? ... " + - "couldn't find " + expected + " as substring of [shard] == '" + docShard + - "' ... for docId == " + doc.getFirstValue("id"), - docShard.contains(expected)); + assertTrue( + "shard routing prefixes don't seem to be aligned anymore, " + + "did someone change the default routing rules? " + + "and/or the the default core name rules? " + + "and/or the numShards used by this test? ... " + + "couldn't find " + + expected + + " as substring of [shard] == '" + + docShard + + "' ... for docId == " + + doc.getFirstValue("id"), + docShard.contains(expected)); } } - + @AfterClass public static void afterClass() throws IOException { - close(S_ONE_LEADER_CLIENT); S_ONE_LEADER_CLIENT = null; - close(S_TWO_LEADER_CLIENT); S_TWO_LEADER_CLIENT = null; - close(S_ONE_NON_LEADER_CLIENT); S_ONE_NON_LEADER_CLIENT = null; - close(S_TWO_NON_LEADER_CLIENT); S_TWO_NON_LEADER_CLIENT = null; - close(NO_COLLECTION_CLIENT); NO_COLLECTION_CLIENT = null; - close(CLOUD_CLIENT); CLOUD_CLIENT = null; + close(S_ONE_LEADER_CLIENT); + S_ONE_LEADER_CLIENT = null; + close(S_TWO_LEADER_CLIENT); + S_TWO_LEADER_CLIENT = null; + close(S_ONE_NON_LEADER_CLIENT); + S_ONE_NON_LEADER_CLIENT = null; + close(S_TWO_NON_LEADER_CLIENT); + S_TWO_NON_LEADER_CLIENT = null; + close(NO_COLLECTION_CLIENT); + NO_COLLECTION_CLIENT = null; + close(CLOUD_CLIENT); + CLOUD_CLIENT = null; } - + private static void close(SolrClient client) throws IOException { if (client != null) { client.close(); } } - + @Before private void clearCollection() throws Exception { assertEquals(0, CLOUD_CLIENT.deleteByQuery("*:*").getStatus()); @@ -220,49 +241,65 @@ private void clearCollection() throws Exception { } public void testSanity() throws Exception { - + // verify some basic sanity checking of indexing & querying across the collection // w/o using our custom update processor chain - - assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_ONE_PRE + "1"), - f("foo_i", 42))).getStatus()); - assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_TWO_PRE + "2"), - f("foo_i", 66))).getStatus()); + + assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_ONE_PRE + "1"), f("foo_i", 42))).getStatus()); + assertEquals(0, CLOUD_CLIENT.add(doc(f("id", S_TWO_PRE + "2"), f("foo_i", 66))).getStatus()); assertEquals(0, CLOUD_CLIENT.commit().getStatus()); - for (SolrClient c : Arrays.asList(S_ONE_LEADER_CLIENT, S_TWO_LEADER_CLIENT, - S_ONE_NON_LEADER_CLIENT, S_TWO_NON_LEADER_CLIENT, - NO_COLLECTION_CLIENT, CLOUD_CLIENT)) { - assertQueryDocIds(c, true, S_ONE_PRE + "1", S_TWO_PRE + "2"); + for (SolrClient c : + Arrays.asList( + S_ONE_LEADER_CLIENT, S_TWO_LEADER_CLIENT, + S_ONE_NON_LEADER_CLIENT, S_TWO_NON_LEADER_CLIENT, + NO_COLLECTION_CLIENT, CLOUD_CLIENT)) { + assertQueryDocIds(c, true, S_ONE_PRE + "1", S_TWO_PRE + "2"); assertQueryDocIds(c, false, "id_not_exists"); // verify adding 2 broken docs causes a clint exception - SolrException e = expectThrows(SolrException.class, - "did not get a top level exception when more then 10 docs failed", () -> - update(params(), - doc(f("id", S_ONE_PRE + "X"), f("foo_i", "bogus_val_X")), - doc(f("id", S_TWO_PRE + "Y"), f("foo_i", "bogus_val_Y")) - ).process(c) - ); - assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(), - 400, e.code()); - + SolrException e = + expectThrows( + SolrException.class, + "did not get a top level exception when more then 10 docs failed", + () -> + update( + params(), + doc(f("id", S_ONE_PRE + "X"), f("foo_i", "bogus_val_X")), + doc(f("id", S_TWO_PRE + "Y"), f("foo_i", "bogus_val_Y"))) + .process(c)); + assertEquals( + "not the type of error we were expecting (" + e.code() + "): " + e.toString(), + 400, + e.code()); + // verify malformed deleteByQuerys fail - e = expectThrows(SolrException.class, - "sanity check for malformed DBQ didn't fail", - () -> update(params()).deleteByQuery("foo_i:not_a_num").process(c)); + e = + expectThrows( + SolrException.class, + "sanity check for malformed DBQ didn't fail", + () -> update(params()).deleteByQuery("foo_i:not_a_num").process(c)); assertEquals("not the expected DBQ failure: " + e.getMessage(), 400, e.code()); - + // verify opportunistic concurrency deletions fail as we expect when docs are / aren't present - for (UpdateRequest r : new UpdateRequest[] { - update(params("commit", "true")).deleteById(S_ONE_PRE + "1", -1L), - update(params("commit", "true")).deleteById(S_TWO_PRE + "2", -1L), - update(params("commit", "true")).deleteById("id_not_exists", 1L) }) { - e = expectThrows(SolrException.class, "sanity check for opportunistic concurrency delete didn't fail", - () -> r.process(c) - ); - assertEquals("not the expected opportunistic concurrency failure code: " - + r.toString() + " => " + e.getMessage(), 409, e.code()); + for (UpdateRequest r : + new UpdateRequest[] { + update(params("commit", "true")).deleteById(S_ONE_PRE + "1", -1L), + update(params("commit", "true")).deleteById(S_TWO_PRE + "2", -1L), + update(params("commit", "true")).deleteById("id_not_exists", 1L) + }) { + e = + expectThrows( + SolrException.class, + "sanity check for opportunistic concurrency delete didn't fail", + () -> r.process(c)); + assertEquals( + "not the expected opportunistic concurrency failure code: " + + r.toString() + + " => " + + e.getMessage(), + 409, + e.code()); } } } @@ -271,147 +308,210 @@ public void testSanity() throws Exception { public void testVariousDeletesViaCloudClient() throws Exception { testVariousDeletes(CLOUD_CLIENT); } + public void testVariousDeletesViaShard1LeaderClient() throws Exception { testVariousDeletes(S_ONE_LEADER_CLIENT); } + public void testVariousDeletesViaShard2LeaderClient() throws Exception { testVariousDeletes(S_TWO_LEADER_CLIENT); } + public void testVariousDeletesViaShard1NonLeaderClient() throws Exception { testVariousDeletes(S_ONE_NON_LEADER_CLIENT); } + public void testVariousDeletesViaShard2NonLeaderClient() throws Exception { testVariousDeletes(S_TWO_NON_LEADER_CLIENT); } + public void testVariousDeletesViaNoCollectionClient() throws Exception { testVariousDeletes(NO_COLLECTION_CLIENT); } - + protected static void testVariousDeletes(SolrClient client) throws Exception { assertNotNull("client not initialized", client); // 2 docs, one on each shard final String docId1 = S_ONE_PRE + "42"; final String docId2 = S_TWO_PRE + "666"; - + UpdateResponse rsp = null; - + // add 1 doc to each shard - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", docId1), f("foo_i", "2001")), - doc(f("id", docId2), f("foo_i", "1976"))).process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", docId1), f("foo_i", "2001")), + doc(f("id", docId2), f("foo_i", "1976"))) + .process(client); assertEquals(0, rsp.getStatus()); - // attempt to delete individual doc id(s) that should fail because of opportunistic concurrency constraints - for (String id : new String[] { docId1, docId2 }) { - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true")).deleteById(id, -1L).process(client); + // attempt to delete individual doc id(s) that should fail because of opportunistic concurrency + // constraints + for (String id : new String[] {docId1, docId2}) { + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteById(id, -1L) + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delId="+id, rsp, - delIErr(id)); + assertUpdateTolerantErrors("failed opportunistic concurrent delId=" + id, rsp, delIErr(id)); } - + // multiple failed deletes from the same shard (via opportunistic concurrent w/ bogus ids) - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true") - ).deleteById(S_ONE_PRE + "X", +1L).deleteById(S_ONE_PRE + "Y", +1L).process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteById(S_ONE_PRE + "X", +1L) + .deleteById(S_ONE_PRE + "Y", +1L) + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delete by id for 2 bogus docs", rsp, - delIErr(S_ONE_PRE + "X"), delIErr(S_ONE_PRE + "Y")); + assertUpdateTolerantErrors( + "failed opportunistic concurrent delete by id for 2 bogus docs", + rsp, + delIErr(S_ONE_PRE + "X"), + delIErr(S_ONE_PRE + "Y")); assertQueryDocIds(client, true, docId1, docId2); - + // multiple failed deletes from the diff shards due to opportunistic concurrency constraints - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true")).deleteById(docId2, -1L).deleteById(docId1, -1L).process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteById(docId2, -1L) + .deleteById(docId1, -1L) + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delete by id for 2 docs", rsp, - delIErr(docId1), delIErr(docId2)); + assertUpdateTolerantErrors( + "failed opportunistic concurrent delete by id for 2 docs", + rsp, + delIErr(docId1), + delIErr(docId2)); assertQueryDocIds(client, true, docId1, docId2); // deleteByQuery using malformed query (fail) - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true")).deleteByQuery("bogus_field:foo").process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteByQuery("bogus_field:foo") + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delete by query", rsp, - delQErr("bogus_field:foo")); + assertUpdateTolerantErrors( + "failed opportunistic concurrent delete by query", rsp, delQErr("bogus_field:foo")); assertQueryDocIds(client, true, docId1, docId2); // mix 2 deleteByQuery, one malformed (fail), one that doesn't match anything (ok) - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true") - ).deleteByQuery("bogus_field:foo").deleteByQuery("foo_i:23").process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteByQuery("bogus_field:foo") + .deleteByQuery("foo_i:23") + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delete by query", rsp, - delQErr("bogus_field:foo")); + assertUpdateTolerantErrors( + "failed opportunistic concurrent delete by query", rsp, delQErr("bogus_field:foo")); assertQueryDocIds(client, true, docId1, docId2); - + // mix 2 deleteById using _version_=-1, one for real doc1 (fail), one for bogus id (ok) - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true") - ).deleteById(docId1, -1L).deleteById("bogus", -1L).process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteById(docId1, -1L) + .deleteById("bogus", -1L) + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delete by id: exists", rsp, - delIErr(docId1)); + assertUpdateTolerantErrors( + "failed opportunistic concurrent delete by id: exists", rsp, delIErr(docId1)); assertQueryDocIds(client, true, docId1, docId2); - + // mix 2 deleteById using _version_=1, one for real doc1 (ok, deleted), one for bogus id (fail) - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true") - ).deleteById(docId1, +1L).deleteById("bogusId", +1L).process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteById(docId1, +1L) + .deleteById("bogusId", +1L) + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delete by id: bogus", rsp, - delIErr("bogusId")); + assertUpdateTolerantErrors( + "failed opportunistic concurrent delete by id: bogus", rsp, delIErr("bogusId")); assertQueryDocIds(client, false, docId1); assertQueryDocIds(client, true, docId2); - + // mix 2 deleteByQuery, one malformed (fail), one that alctaully removes some docs (ok) assertQueryDocIds(client, true, docId2); - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true") - ).deleteByQuery("bogus_field:foo").deleteByQuery("foo_i:1976").process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true")) + .deleteByQuery("bogus_field:foo") + .deleteByQuery("foo_i:1976") + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed opportunistic concurrent delete by query", rsp, - delQErr("bogus_field:foo")); + assertUpdateTolerantErrors( + "failed opportunistic concurrent delete by query", rsp, delQErr("bogus_field:foo")); assertQueryDocIds(client, false, docId2); - } - // public void testVariousAddsViaCloudClient() throws Exception { testVariousAdds(CLOUD_CLIENT); } + public void testVariousAddsViaShard1LeaderClient() throws Exception { testVariousAdds(S_ONE_LEADER_CLIENT); } + public void testVariousAddsViaShard2LeaderClient() throws Exception { testVariousAdds(S_TWO_LEADER_CLIENT); } + public void testVariousAddsViaShard1NonLeaderClient() throws Exception { testVariousAdds(S_ONE_NON_LEADER_CLIENT); } + public void testVariousAddsViaShard2NonLeaderClient() throws Exception { testVariousAdds(S_TWO_NON_LEADER_CLIENT); } + public void testVariousAddsViaNoCollectionClient() throws Exception { testVariousAdds(NO_COLLECTION_CLIENT); } protected static void testVariousAdds(SolrClient client) throws Exception { assertNotNull("client not initialized", client); - + UpdateResponse rsp = null; // 2 docs that are both on shard1, the first one should fail - for (int maxErrors : new int[] { -1, 2, 47, 10 }) { + for (int maxErrors : new int[] {-1, 2, 47, 10}) { // regardless of which of these maxErrors values we use, behavior should be the same... - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "maxErrors", ""+maxErrors, - "commit", "true"), - doc(f("id", S_ONE_PRE + "42"), f("foo_i", "bogus_value")), - doc(f("id", S_ONE_PRE + "666"), f("foo_i", "1976"))).process(client); - + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "maxErrors", "" + maxErrors, + "commit", "true"), + doc(f("id", S_ONE_PRE + "42"), f("foo_i", "bogus_value")), + doc(f("id", S_ONE_PRE + "666"), f("foo_i", "1976"))) + .process(client); + assertEquals(0, rsp.getStatus()); assertUpdateTolerantAddErrors("single shard, 1st doc should fail", rsp, S_ONE_PRE + "42"); assertEquals(0, client.commit().getStatus()); @@ -421,14 +521,18 @@ protected static void testVariousAdds(SolrClient client) throws Exception { // ...only diff should be that we get an accurate report of the effective maxErrors assertEquals(maxErrors, rsp.getResponseHeader().get("maxErrors")); } - + // 2 docs that are both on shard1, the second one should fail - - rsp = update(params("update.chain", "tolerant-chain-max-errors-not-set", - "commit", "true"), - doc(f("id", S_ONE_PRE + "55"), f("foo_i", "1976")), - doc(f("id", S_ONE_PRE + "77"), f("foo_i", "bogus_val"))).process(client); - + + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-not-set", + "commit", "true"), + doc(f("id", S_ONE_PRE + "55"), f("foo_i", "1976")), + doc(f("id", S_ONE_PRE + "77"), f("foo_i", "bogus_val"))) + .process(client); + assertEquals(0, rsp.getStatus()); assertUpdateTolerantAddErrors("single shard, 2nd doc should fail", rsp, S_ONE_PRE + "77"); assertQueryDocIds(client, false, S_ONE_PRE + "77"); @@ -440,25 +544,33 @@ protected static void testVariousAdds(SolrClient client) throws Exception { assertEquals(0, client.deleteByQuery("*:*").getStatus()); // 2 docs on 2 diff shards, first of which should fail - - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", S_ONE_PRE + "42"), f("foo_i", "bogus_value")), - doc(f("id", S_TWO_PRE + "666"), f("foo_i", "1976"))).process(client); - + + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", S_ONE_PRE + "42"), f("foo_i", "bogus_value")), + doc(f("id", S_TWO_PRE + "666"), f("foo_i", "1976"))) + .process(client); + assertEquals(0, rsp.getStatus()); assertUpdateTolerantAddErrors("two shards, 1st doc should fail", rsp, S_ONE_PRE + "42"); assertEquals(0, client.commit().getStatus()); assertQueryDocIds(client, false, S_ONE_PRE + "42"); assertQueryDocIds(client, true, S_TWO_PRE + "666"); - + // 2 docs on 2 diff shards, second of which should fail - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", S_ONE_PRE + "55"), f("foo_i", "1976")), - doc(f("id", S_TWO_PRE + "77"), f("foo_i", "bogus_val"))).process(client); - + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", S_ONE_PRE + "55"), f("foo_i", "1976")), + doc(f("id", S_TWO_PRE + "77"), f("foo_i", "bogus_val"))) + .process(client); + assertEquals(0, rsp.getStatus()); assertUpdateTolerantAddErrors("two shards, 2nd doc should fail", rsp, S_TWO_PRE + "77"); assertQueryDocIds(client, false, S_TWO_PRE + "77"); @@ -468,111 +580,145 @@ protected static void testVariousAdds(SolrClient client) throws Exception { assertEquals(0, client.deleteByQuery("*:*").getStatus()); // many docs from diff shards, 1 from each shard should fail - - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", S_ONE_PRE + "11")), - doc(f("id", S_TWO_PRE + "21")), - doc(f("id", S_ONE_PRE + "12")), - doc(f("id", S_TWO_PRE + "22"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "13")), - doc(f("id", S_TWO_PRE + "23")), - doc(f("id", S_ONE_PRE + "14")), - doc(f("id", S_TWO_PRE + "24")), - doc(f("id", S_ONE_PRE + "15"), f("foo_i", "bogus_val")), - doc(f("id", S_TWO_PRE + "25")), - doc(f("id", S_ONE_PRE + "16")), - doc(f("id", S_TWO_PRE + "26"))).process(client); - + + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", S_ONE_PRE + "11")), + doc(f("id", S_TWO_PRE + "21")), + doc(f("id", S_ONE_PRE + "12")), + doc(f("id", S_TWO_PRE + "22"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "13")), + doc(f("id", S_TWO_PRE + "23")), + doc(f("id", S_ONE_PRE + "14")), + doc(f("id", S_TWO_PRE + "24")), + doc(f("id", S_ONE_PRE + "15"), f("foo_i", "bogus_val")), + doc(f("id", S_TWO_PRE + "25")), + doc(f("id", S_ONE_PRE + "16")), + doc(f("id", S_TWO_PRE + "26"))) + .process(client); + assertEquals(0, rsp.getStatus()); - assertUpdateTolerantAddErrors("many docs, 1 from each shard should fail", rsp, - S_ONE_PRE + "15", - S_TWO_PRE + "22"); + assertUpdateTolerantAddErrors( + "many docs, 1 from each shard should fail", rsp, S_ONE_PRE + "15", S_TWO_PRE + "22"); assertQueryDocIds(client, false, S_TWO_PRE + "22", S_ONE_PRE + "15"); - assertQueryDocIds(client, true, - S_ONE_PRE + "11", S_TWO_PRE + "21", S_ONE_PRE + "12", - S_ONE_PRE + "13", S_TWO_PRE + "23", S_ONE_PRE + "14", S_TWO_PRE + "24", - S_TWO_PRE + "25", S_ONE_PRE + "16", S_TWO_PRE + "26"); + assertQueryDocIds( + client, + true, + S_ONE_PRE + "11", + S_TWO_PRE + "21", + S_ONE_PRE + "12", + S_ONE_PRE + "13", + S_TWO_PRE + "23", + S_ONE_PRE + "14", + S_TWO_PRE + "24", + S_TWO_PRE + "25", + S_ONE_PRE + "16", + S_TWO_PRE + "26"); // clean slate assertEquals(0, client.deleteByQuery("*:*").getStatus()); // many docs from diff shards, 1 from each shard should fail and 1 w/o uniqueKey - - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", S_ONE_PRE + "11")), - doc(f("id", S_TWO_PRE + "21")), - doc(f("id", S_ONE_PRE + "12")), - doc(f("id", S_TWO_PRE + "22"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "13")), - doc(f("id", S_TWO_PRE + "23")), - doc(f("foo_i", "42")), // no "id" - doc(f("id", S_ONE_PRE + "14")), - doc(f("id", S_TWO_PRE + "24")), - doc(f("id", S_ONE_PRE + "15"), f("foo_i", "bogus_val")), - doc(f("id", S_TWO_PRE + "25")), - doc(f("id", S_ONE_PRE + "16")), - doc(f("id", S_TWO_PRE + "26"))).process(client); - + + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", S_ONE_PRE + "11")), + doc(f("id", S_TWO_PRE + "21")), + doc(f("id", S_ONE_PRE + "12")), + doc(f("id", S_TWO_PRE + "22"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "13")), + doc(f("id", S_TWO_PRE + "23")), + doc(f("foo_i", "42")), // no "id" + doc(f("id", S_ONE_PRE + "14")), + doc(f("id", S_TWO_PRE + "24")), + doc(f("id", S_ONE_PRE + "15"), f("foo_i", "bogus_val")), + doc(f("id", S_TWO_PRE + "25")), + doc(f("id", S_ONE_PRE + "16")), + doc(f("id", S_TWO_PRE + "26"))) + .process(client); + assertEquals(0, rsp.getStatus()); - assertUpdateTolerantAddErrors("many docs, 1 from each shard (+ no id) should fail", rsp, - S_ONE_PRE + "15", - "(unknown)", - S_TWO_PRE + "22"); + assertUpdateTolerantAddErrors( + "many docs, 1 from each shard (+ no id) should fail", + rsp, + S_ONE_PRE + "15", + "(unknown)", + S_TWO_PRE + "22"); assertQueryDocIds(client, false, S_TWO_PRE + "22", S_ONE_PRE + "15"); - assertQueryDocIds(client, true, - S_ONE_PRE + "11", S_TWO_PRE + "21", S_ONE_PRE + "12", - S_ONE_PRE + "13", S_TWO_PRE + "23", S_ONE_PRE + "14", S_TWO_PRE + "24", - S_TWO_PRE + "25", S_ONE_PRE + "16", S_TWO_PRE + "26"); + assertQueryDocIds( + client, + true, + S_ONE_PRE + "11", + S_TWO_PRE + "21", + S_ONE_PRE + "12", + S_ONE_PRE + "13", + S_TWO_PRE + "23", + S_ONE_PRE + "14", + S_TWO_PRE + "24", + S_TWO_PRE + "25", + S_ONE_PRE + "16", + S_TWO_PRE + "26"); // clean slate assertEquals(0, client.deleteByQuery("*:*").getStatus()); - + // many docs from diff shards, more then 10 (total) should fail - SolrException e = expectThrows(SolrException.class, - "did not get a top level exception when more then 10 docs failed", - () -> update(params("update.chain", "tolerant-chain-max-errors-10", "commit", "true"), - doc(f("id", S_ONE_PRE + "11")), - doc(f("id", S_TWO_PRE + "21"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "12")), - doc(f("id", S_TWO_PRE + "22"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "13")), - doc(f("id", S_TWO_PRE + "23"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "14"), f("foo_i", "bogus_val")), - doc(f("id", S_TWO_PRE + "24")), - doc(f("id", S_ONE_PRE + "15"), f("foo_i", "bogus_val")), - doc(f("id", S_TWO_PRE + "25")), - doc(f("id", S_ONE_PRE + "16"), f("foo_i", "bogus_val")), - doc(f("id", S_TWO_PRE + "26"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "17")), - doc(f("id", S_TWO_PRE + "27")), - doc(f("id", S_ONE_PRE + "18"), f("foo_i", "bogus_val")), - doc(f("id", S_TWO_PRE + "28"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "19"), f("foo_i", "bogus_val")), - doc(f("id", S_TWO_PRE + "29"), f("foo_i", "bogus_val")), - doc(f("id", S_ONE_PRE + "10")), // may be skipped, more then 10 fails - doc(f("id", S_TWO_PRE + "20")) // may be skipped, more then 10 fails - ).process(client) - ); + SolrException e = + expectThrows( + SolrException.class, + "did not get a top level exception when more then 10 docs failed", + () -> + update( + params("update.chain", "tolerant-chain-max-errors-10", "commit", "true"), + doc(f("id", S_ONE_PRE + "11")), + doc(f("id", S_TWO_PRE + "21"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "12")), + doc(f("id", S_TWO_PRE + "22"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "13")), + doc(f("id", S_TWO_PRE + "23"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "14"), f("foo_i", "bogus_val")), + doc(f("id", S_TWO_PRE + "24")), + doc(f("id", S_ONE_PRE + "15"), f("foo_i", "bogus_val")), + doc(f("id", S_TWO_PRE + "25")), + doc(f("id", S_ONE_PRE + "16"), f("foo_i", "bogus_val")), + doc(f("id", S_TWO_PRE + "26"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "17")), + doc(f("id", S_TWO_PRE + "27")), + doc(f("id", S_ONE_PRE + "18"), f("foo_i", "bogus_val")), + doc(f("id", S_TWO_PRE + "28"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "19"), f("foo_i", "bogus_val")), + doc(f("id", S_TWO_PRE + "29"), f("foo_i", "bogus_val")), + doc(f("id", S_ONE_PRE + "10")), // may be skipped, more then 10 fails + doc(f("id", S_TWO_PRE + "20")) // may be skipped, more then 10 fails + ) + .process(client)); { // we can't make any reliable assertions about the error message, because // it varies based on how the request was routed -- see SOLR-8830 - assertEquals("not the type of error we were expecting (" + e.code() + "): " + e.toString(), - // NOTE: we always expect a 400 because we know that's what we would get from these types of errors - // on a single node setup -- a 5xx type error isn't something we should have triggered - 400, e.code()); + assertEquals( + "not the type of error we were expecting (" + e.code() + "): " + e.toString(), + // NOTE: we always expect a 400 because we know that's what we would get from these types + // of errors on a single node setup -- a 5xx type error isn't something we should have + // triggered + 400, + e.code()); // verify that the Exceptions metadata can tell us what failed. NamedList remoteErrMetadata = e.getMetadata(); assertNotNull("no metadata in: " + e.toString(), remoteErrMetadata); - Set actualKnownErrs - = new LinkedHashSet(remoteErrMetadata.size()); + Set actualKnownErrs = + new LinkedHashSet(remoteErrMetadata.size()); int actualKnownErrsCount = 0; for (int i = 0; i < remoteErrMetadata.size(); i++) { ToleratedUpdateError err = - ToleratedUpdateError.parseMetadataIfToleratedUpdateError(remoteErrMetadata.getName(i), - remoteErrMetadata.getVal(i)); + ToleratedUpdateError.parseMetadataIfToleratedUpdateError( + remoteErrMetadata.getName(i), remoteErrMetadata.getVal(i)); if (null == err) { // some metadata unrelated to this update processor continue; @@ -580,75 +726,104 @@ protected static void testVariousAdds(SolrClient client) throws Exception { actualKnownErrsCount++; actualKnownErrs.add(err); } - assertEquals("wrong number of errors in metadata: " + remoteErrMetadata.toString(), - 11, actualKnownErrsCount); - assertEquals("at least one dup error in metadata: " + remoteErrMetadata.toString(), - actualKnownErrsCount, actualKnownErrs.size()); + assertEquals( + "wrong number of errors in metadata: " + remoteErrMetadata.toString(), + 11, + actualKnownErrsCount); + assertEquals( + "at least one dup error in metadata: " + remoteErrMetadata.toString(), + actualKnownErrsCount, + actualKnownErrs.size()); for (ToleratedUpdateError err : actualKnownErrs) { - assertEquals("only expected type of error is ADD: " + err, - CmdType.ADD, err.getType()); - assertTrue("failed err msg didn't match expected value: " + err, + assertEquals("only expected type of error is ADD: " + err, CmdType.ADD, err.getType()); + assertTrue( + "failed err msg didn't match expected value: " + err, err.getMessage().contains("bogus_val")); } } assertEquals(0, client.commit().getStatus()); // need to force since update didn't finish - assertQueryDocIds(client, false - // explicitly failed - , S_TWO_PRE + "21", S_TWO_PRE + "22", S_TWO_PRE + "23", S_ONE_PRE + "14" - , S_ONE_PRE + "15", S_ONE_PRE + "16", S_TWO_PRE + "26", S_ONE_PRE + "18" - , S_TWO_PRE + "28", S_ONE_PRE + "19", S_TWO_PRE + "29" - // - // // we can't assert for sure these docs were skipped - // // depending on shard we hit, they may have been added async before errors were exceeded - // , S_ONE_PRE + "10", S_TWO_PRE + "20" // skipped - ); - assertQueryDocIds(client, true, - S_ONE_PRE + "11", S_ONE_PRE + "12", S_ONE_PRE + "13", S_TWO_PRE + "24", - S_TWO_PRE + "25", S_ONE_PRE + "17", S_TWO_PRE + "27"); - + assertQueryDocIds( + client, + false + // explicitly failed + , + S_TWO_PRE + "21", + S_TWO_PRE + "22", + S_TWO_PRE + "23", + S_ONE_PRE + "14", + S_ONE_PRE + "15", + S_ONE_PRE + "16", + S_TWO_PRE + "26", + S_ONE_PRE + "18", + S_TWO_PRE + "28", + S_ONE_PRE + "19", + S_TWO_PRE + "29" + // + // // we can't assert for sure these docs were skipped + // // depending on shard we hit, they may have been added async before errors were exceeded + // , S_ONE_PRE + "10", S_TWO_PRE + "20" // skipped + ); + assertQueryDocIds( + client, + true, + S_ONE_PRE + "11", + S_ONE_PRE + "12", + S_ONE_PRE + "13", + S_TWO_PRE + "24", + S_TWO_PRE + "25", + S_ONE_PRE + "17", + S_TWO_PRE + "27"); + // clean slate assertEquals(0, client.deleteByQuery("*:*").getStatus()); - - // many docs from diff shards, more then 10 from a single shard (two) should fail - e = expectThrows(SolrException.class, "did not get a top level exception when more then 10 docs failed", - () -> { - ArrayList docs = new ArrayList(30); - docs.add(doc(f("id", S_ONE_PRE + "z"))); - docs.add(doc(f("id", S_TWO_PRE + "z"))); - docs.add(doc(f("id", S_ONE_PRE + "y"))); - docs.add(doc(f("id", S_TWO_PRE + "y"))); - for (int i = 0; i < 11; i++) { - docs.add(doc(f("id", S_ONE_PRE + i))); - docs.add(doc(f("id", S_TWO_PRE + i), f("foo_i", "bogus_val"))); - } - docs.add(doc(f("id", S_ONE_PRE + "x"))); // may be skipped, more then 10 fails - docs.add(doc(f("id", S_TWO_PRE + "x"))); // may be skipped, more then 10 fails + // many docs from diff shards, more then 10 from a single shard (two) should fail - update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - docs.toArray(new SolrInputDocument[docs.size()])).process(client); - }); + e = + expectThrows( + SolrException.class, + "did not get a top level exception when more then 10 docs failed", + () -> { + ArrayList docs = new ArrayList(30); + docs.add(doc(f("id", S_ONE_PRE + "z"))); + docs.add(doc(f("id", S_TWO_PRE + "z"))); + docs.add(doc(f("id", S_ONE_PRE + "y"))); + docs.add(doc(f("id", S_TWO_PRE + "y"))); + for (int i = 0; i < 11; i++) { + docs.add(doc(f("id", S_ONE_PRE + i))); + docs.add(doc(f("id", S_TWO_PRE + i), f("foo_i", "bogus_val"))); + } + docs.add(doc(f("id", S_ONE_PRE + "x"))); // may be skipped, more then 10 fails + docs.add(doc(f("id", S_TWO_PRE + "x"))); // may be skipped, more then 10 fails + + update( + params("update.chain", "tolerant-chain-max-errors-10", "commit", "true"), + docs.toArray(new SolrInputDocument[docs.size()])) + .process(client); + }); { // we can't make any reliable assertions about the error message, because // it varies based on how the request was routed -- see SOLR-8830 - assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(), - // NOTE: we always expect a 400 because we know that's what we would get from these types of errors - // on a single node setup -- a 5xx type error isn't something we should have triggered - 400, e.code()); + assertEquals( + "not the type of error we were expecting (" + e.code() + "): " + e.toString(), + // NOTE: we always expect a 400 because we know that's what we would get from these types + // of errors on a single node setup -- a 5xx type error isn't something we should have + // triggered + 400, + e.code()); // verify that the Exceptions metadata can tell us what failed. NamedList remoteErrMetadata = e.getMetadata(); assertNotNull("no metadata in: " + e.toString(), remoteErrMetadata); - Set actualKnownErrs - = new LinkedHashSet(remoteErrMetadata.size()); + Set actualKnownErrs = + new LinkedHashSet(remoteErrMetadata.size()); int actualKnownErrsCount = 0; for (int i = 0; i < remoteErrMetadata.size(); i++) { ToleratedUpdateError err = - ToleratedUpdateError.parseMetadataIfToleratedUpdateError(remoteErrMetadata.getName(i), - remoteErrMetadata.getVal(i)); + ToleratedUpdateError.parseMetadataIfToleratedUpdateError( + remoteErrMetadata.getName(i), remoteErrMetadata.getVal(i)); if (null == err) { // some metadata unrelated to this update processor continue; @@ -656,69 +831,102 @@ protected static void testVariousAdds(SolrClient client) throws Exception { actualKnownErrsCount++; actualKnownErrs.add(err); } - assertEquals("wrong number of errors in metadata: " + remoteErrMetadata.toString(), - 11, actualKnownErrsCount); - assertEquals("at least one dup error in metadata: " + remoteErrMetadata.toString(), - actualKnownErrsCount, actualKnownErrs.size()); + assertEquals( + "wrong number of errors in metadata: " + remoteErrMetadata.toString(), + 11, + actualKnownErrsCount); + assertEquals( + "at least one dup error in metadata: " + remoteErrMetadata.toString(), + actualKnownErrsCount, + actualKnownErrs.size()); for (ToleratedUpdateError err : actualKnownErrs) { - assertEquals("only expected type of error is ADD: " + err, - CmdType.ADD, err.getType()); - assertTrue("failed id had unexpected prefix: " + err, - err.getId().startsWith(S_TWO_PRE)); - assertTrue("failed err msg didn't match expected value: " + err, + assertEquals("only expected type of error is ADD: " + err, CmdType.ADD, err.getType()); + assertTrue("failed id had unexpected prefix: " + err, err.getId().startsWith(S_TWO_PRE)); + assertTrue( + "failed err msg didn't match expected value: " + err, err.getMessage().contains("bogus_val")); } } assertEquals(0, client.commit().getStatus()); // need to force since update didn't finish - assertQueryDocIds(client, true - , S_ONE_PRE + "z", S_ONE_PRE + "y", S_TWO_PRE + "z", S_TWO_PRE + "y" // first - // - , S_ONE_PRE + "0", S_ONE_PRE + "1", S_ONE_PRE + "2", S_ONE_PRE + "3", S_ONE_PRE + "4" - , S_ONE_PRE + "5", S_ONE_PRE + "6", S_ONE_PRE + "7", S_ONE_PRE + "8", S_ONE_PRE + "9" - ); - assertQueryDocIds(client, false - // explicitly failed - , S_TWO_PRE + "0", S_TWO_PRE + "1", S_TWO_PRE + "2", S_TWO_PRE + "3", S_TWO_PRE + "4" - , S_TWO_PRE + "5", S_TWO_PRE + "6", S_TWO_PRE + "7", S_TWO_PRE + "8", S_TWO_PRE + "9" - // - // // we can't assert for sure these docs were skipped - // // depending on shard we hit, they may have been added async before errors were exceeded - // , S_ONE_PRE + "x", S_TWO_PRE + "x", // skipped - ); + assertQueryDocIds( + client, + true, + S_ONE_PRE + "z", + S_ONE_PRE + "y", + S_TWO_PRE + "z", + S_TWO_PRE + "y" // first + // + , + S_ONE_PRE + "0", + S_ONE_PRE + "1", + S_ONE_PRE + "2", + S_ONE_PRE + "3", + S_ONE_PRE + "4", + S_ONE_PRE + "5", + S_ONE_PRE + "6", + S_ONE_PRE + "7", + S_ONE_PRE + "8", + S_ONE_PRE + "9"); + assertQueryDocIds( + client, + false + // explicitly failed + , + S_TWO_PRE + "0", + S_TWO_PRE + "1", + S_TWO_PRE + "2", + S_TWO_PRE + "3", + S_TWO_PRE + "4", + S_TWO_PRE + "5", + S_TWO_PRE + "6", + S_TWO_PRE + "7", + S_TWO_PRE + "8", + S_TWO_PRE + "9" + // + // // we can't assert for sure these docs were skipped + // // depending on shard we hit, they may have been added async before errors were exceeded + // , S_ONE_PRE + "x", S_TWO_PRE + "x", // skipped + ); // clean slate assertEquals(0, client.deleteByQuery("*:*").getStatus()); - - // many docs from diff shards, more then 10 don't have any uniqueKey specified - e = expectThrows(SolrException.class, - "did not get a top level exception when more then 10 docs mising uniqueKey", - () -> { - ArrayList docs = new ArrayList(30); - docs.add(doc(f("id", S_ONE_PRE + "z"))); - docs.add(doc(f("id", S_TWO_PRE + "z"))); - docs.add(doc(f("id", S_ONE_PRE + "y"))); - docs.add(doc(f("id", S_TWO_PRE + "y"))); - for (int i = 0; i < 11; i++) { - // no "id" field - docs.add(doc(f("foo_i", "" + i))); - } - docs.add(doc(f("id", S_ONE_PRE + "x"))); // may be skipped, more then 10 fails - docs.add(doc(f("id", S_TWO_PRE + "x"))); // may be skipped, more then 10 fails + // many docs from diff shards, more then 10 don't have any uniqueKey specified - update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - docs.toArray(new SolrInputDocument[docs.size()])).process(client); - }); + e = + expectThrows( + SolrException.class, + "did not get a top level exception when more then 10 docs mising uniqueKey", + () -> { + ArrayList docs = new ArrayList(30); + docs.add(doc(f("id", S_ONE_PRE + "z"))); + docs.add(doc(f("id", S_TWO_PRE + "z"))); + docs.add(doc(f("id", S_ONE_PRE + "y"))); + docs.add(doc(f("id", S_TWO_PRE + "y"))); + for (int i = 0; i < 11; i++) { + // no "id" field + docs.add(doc(f("foo_i", "" + i))); + } + docs.add(doc(f("id", S_ONE_PRE + "x"))); // may be skipped, more then 10 fails + docs.add(doc(f("id", S_TWO_PRE + "x"))); // may be skipped, more then 10 fails + + update( + params("update.chain", "tolerant-chain-max-errors-10", "commit", "true"), + docs.toArray(new SolrInputDocument[docs.size()])) + .process(client); + }); { // we can't make any reliable assertions about the error message, because // it varies based on how the request was routed -- see SOLR-8830 - assertEquals("not the type of error we were expecting ("+e.code()+"): " + e.toString(), - // NOTE: we always expect a 400 because we know that's what we would get from these types of errors - // on a single node setup -- a 5xx type error isn't something we should have triggered - 400, e.code()); + assertEquals( + "not the type of error we were expecting (" + e.code() + "): " + e.toString(), + // NOTE: we always expect a 400 because we know that's what we would get from these types + // of errors on a single node setup -- a 5xx type error isn't something we should have + // triggered + 400, + e.code()); // verify that the Exceptions metadata can tell us what failed. NamedList remoteErrMetadata = e.getMetadata(); @@ -726,33 +934,33 @@ protected static void testVariousAdds(SolrClient client) throws Exception { int actualKnownErrsCount = 0; for (int i = 0; i < remoteErrMetadata.size(); i++) { ToleratedUpdateError err = - ToleratedUpdateError.parseMetadataIfToleratedUpdateError(remoteErrMetadata.getName(i), - remoteErrMetadata.getVal(i)); + ToleratedUpdateError.parseMetadataIfToleratedUpdateError( + remoteErrMetadata.getName(i), remoteErrMetadata.getVal(i)); if (null == err) { // some metadata unrelated to this update processor continue; } actualKnownErrsCount++; - assertEquals("only expected type of error is ADD: " + err, - CmdType.ADD, err.getType()); - assertTrue("failed id didn't match 'unknown': " + err, - err.getId().contains("unknown")); + assertEquals("only expected type of error is ADD: " + err, CmdType.ADD, err.getType()); + assertTrue("failed id didn't match 'unknown': " + err, err.getId().contains("unknown")); } - assertEquals("wrong number of errors in metadata: " + remoteErrMetadata.toString(), - 11, actualKnownErrsCount); + assertEquals( + "wrong number of errors in metadata: " + remoteErrMetadata.toString(), + 11, + actualKnownErrsCount); } assertEquals(0, client.commit().getStatus()); // need to force since update didn't finish - assertQueryDocIds(client, true - , S_ONE_PRE + "z", S_ONE_PRE + "y", S_TWO_PRE + "z", S_TWO_PRE + "y" // first - // // we can't assert for sure these docs were skipped or added - // // depending on shard we hit, they may have been added async before errors were exceeded - // , S_ONE_PRE + "x", S_TWO_PRE + "x" // skipped - ); + assertQueryDocIds( + client, true, S_ONE_PRE + "z", S_ONE_PRE + "y", S_TWO_PRE + "z", S_TWO_PRE + "y" // first + // // we can't assert for sure these docs were skipped or added + // // depending on shard we hit, they may have been added async before errors were exceeded + // , S_ONE_PRE + "x", S_TWO_PRE + "x" // skipped + ); // clean slate assertEquals(0, client.deleteByQuery("*:*").getStatus()); - + // many docs from diff shards, more then 10 from a single shard (two) should fail but // request should still succeed because of maxErrors=-1 param @@ -767,110 +975,149 @@ protected static void testVariousAdds(SolrClient client) throws Exception { docs.add(doc(f("id", S_TWO_PRE + i), f("foo_i", "bogus_val"))); expectedErrs.add(addErr(S_TWO_PRE + i)); } - docs.add(doc(f("id", S_ONE_PRE + "x"))); - docs.add(doc(f("id", S_TWO_PRE + "x"))); - - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "maxErrors", "-1", - "commit", "true"), - docs.toArray(new SolrInputDocument[docs.size()])).process(client); - assertUpdateTolerantErrors("many docs from shard2 fail, but req should succeed", rsp, - expectedErrs.toArray(new ExpectedErr[expectedErrs.size()])); - assertQueryDocIds(client, true - , S_ONE_PRE + "z", S_ONE_PRE + "y", S_TWO_PRE + "z", S_TWO_PRE + "y" // first - , S_ONE_PRE + "x", S_TWO_PRE + "x" // later - ); - + docs.add(doc(f("id", S_ONE_PRE + "x"))); + docs.add(doc(f("id", S_TWO_PRE + "x"))); + + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "maxErrors", "-1", + "commit", "true"), + docs.toArray(new SolrInputDocument[docs.size()])) + .process(client); + assertUpdateTolerantErrors( + "many docs from shard2 fail, but req should succeed", + rsp, + expectedErrs.toArray(new ExpectedErr[expectedErrs.size()])); + assertQueryDocIds( + client, + true, + S_ONE_PRE + "z", + S_ONE_PRE + "y", + S_TWO_PRE + "z", + S_TWO_PRE + "y" // first + , + S_ONE_PRE + "x", + S_TWO_PRE + "x" // later + ); } // public void testAddsMixedWithDeletesViaCloudClient() throws Exception { testAddsMixedWithDeletes(CLOUD_CLIENT); } + public void testAddsMixedWithDeletesViaShard1LeaderClient() throws Exception { testAddsMixedWithDeletes(S_ONE_LEADER_CLIENT); } + public void testAddsMixedWithDeletesViaShard2LeaderClient() throws Exception { testAddsMixedWithDeletes(S_TWO_LEADER_CLIENT); } + public void testAddsMixedWithDeletesViaShard1NonLeaderClient() throws Exception { testAddsMixedWithDeletes(S_ONE_NON_LEADER_CLIENT); } + public void testAddsMixedWithDeletesViaShard2NonLeaderClient() throws Exception { testAddsMixedWithDeletes(S_TWO_NON_LEADER_CLIENT); } + public void testAddsMixedWithDeletesViaNoCollectionClient() throws Exception { testAddsMixedWithDeletes(NO_COLLECTION_CLIENT); } - + protected static void testAddsMixedWithDeletes(SolrClient client) throws Exception { assertNotNull("client not initialized", client); // 3 doc ids, exactly one on shard1 - final String docId1 = S_ONE_PRE + "42"; + final String docId1 = S_ONE_PRE + "42"; final String docId21 = S_TWO_PRE + "42"; final String docId22 = S_TWO_PRE + "666"; - + UpdateResponse rsp = null; - + // add 2 docs, one to each shard - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", docId1), f("foo_i", "2001")), - doc(f("id", docId21), f("foo_i", "1976"))).process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", docId1), f("foo_i", "2001")), + doc(f("id", docId21), f("foo_i", "1976"))) + .process(client); assertEquals(0, rsp.getStatus()); // add failure on shard2, delete failure on shard1 - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", docId22), f("foo_i", "not_a_num"))) - .deleteById(docId1, -1L) - .process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", docId22), f("foo_i", "not_a_num"))) + .deleteById(docId1, -1L) + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("shard2 add fail, shard1 delI fail", rsp, - delIErr(docId1, "version conflict"), - addErr(docId22,"not_a_num")); - + assertUpdateTolerantErrors( + "shard2 add fail, shard1 delI fail", + rsp, + delIErr(docId1, "version conflict"), + addErr(docId22, "not_a_num")); + // attempt a request containing 4 errors of various types (add, delI, delQ) for (String maxErrors : new String[] {"4", "-1", "100"}) { // for all of these maxErrors values, the overall request should still succeed - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "maxErrors", maxErrors, - "commit", "true"), - doc(f("id", docId22), f("foo_i", "bogus_val"))) - .deleteById(docId1, -1L) - .deleteByQuery("malformed:[") - .deleteById(docId21, -1L) - .process(client); - + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "maxErrors", maxErrors, + "commit", "true"), + doc(f("id", docId22), f("foo_i", "bogus_val"))) + .deleteById(docId1, -1L) + .deleteByQuery("malformed:[") + .deleteById(docId21, -1L) + .process(client); + assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("failed variety of updates", rsp, - delIErr(docId1, "version conflict"), - delQErr("malformed:[", "SyntaxError"), - delIErr(docId21,"version conflict"), - addErr(docId22,"bogus_val")); + assertUpdateTolerantErrors( + "failed variety of updates", + rsp, + delIErr(docId1, "version conflict"), + delQErr("malformed:[", "SyntaxError"), + delIErr(docId21, "version conflict"), + addErr(docId22, "bogus_val")); } - + // attempt a request containing 4 errors of various types (add, delI, delQ) .. 1 too many - SolrException e = expectThrows(SolrException.class, - "did not get a top level exception when more then 4 updates failed", - () -> update(params("update.chain", "tolerant-chain-max-errors-10", - "maxErrors", "3", - "commit", "true"), - doc(f("id", docId22), f("foo_i", "bogus_val"))) - .deleteById(docId1, -1L) - .deleteByQuery("malformed:[") - .deleteById(docId21, -1L) - .process(client) - ); + SolrException e = + expectThrows( + SolrException.class, + "did not get a top level exception when more then 4 updates failed", + () -> + update( + params( + "update.chain", + "tolerant-chain-max-errors-10", + "maxErrors", + "3", + "commit", + "true"), + doc(f("id", docId22), f("foo_i", "bogus_val"))) + .deleteById(docId1, -1L) + .deleteByQuery("malformed:[") + .deleteById(docId21, -1L) + .process(client)); { // we can't make any reliable assertions about the error message, because // it varies based on how the request was routed -- see SOLR-8830 // likewise, we can't make a firm(er) assertion about the response code... - assertTrue("not the type of error we were expecting ("+e.code()+"): " + e.toString(), + assertTrue( + "not the type of error we were expecting (" + e.code() + "): " + e.toString(), // should be one these 2 depending on order that the async errors were hit... // on a single node setup -- a 5xx type error isn't something we should have triggered 400 == e.code() || 409 == e.code()); @@ -878,13 +1125,13 @@ protected static void testAddsMixedWithDeletes(SolrClient client) throws Excepti // verify that the Exceptions metadata can tell us what failed. NamedList remoteErrMetadata = e.getMetadata(); assertNotNull("no metadata in: " + e.toString(), remoteErrMetadata); - Set actualKnownErrs - = new LinkedHashSet(remoteErrMetadata.size()); + Set actualKnownErrs = + new LinkedHashSet(remoteErrMetadata.size()); int actualKnownErrsCount = 0; for (int i = 0; i < remoteErrMetadata.size(); i++) { ToleratedUpdateError err = - ToleratedUpdateError.parseMetadataIfToleratedUpdateError(remoteErrMetadata.getName(i), - remoteErrMetadata.getVal(i)); + ToleratedUpdateError.parseMetadataIfToleratedUpdateError( + remoteErrMetadata.getName(i), remoteErrMetadata.getVal(i)); if (null == err) { // some metadata unrelated to this update processor continue; @@ -892,10 +1139,14 @@ protected static void testAddsMixedWithDeletes(SolrClient client) throws Excepti actualKnownErrsCount++; actualKnownErrs.add(err); } - assertEquals("wrong number of errors in metadata: " + remoteErrMetadata.toString(), - 4, actualKnownErrsCount); - assertEquals("at least one dup error in metadata: " + remoteErrMetadata.toString(), - actualKnownErrsCount, actualKnownErrs.size()); + assertEquals( + "wrong number of errors in metadata: " + remoteErrMetadata.toString(), + 4, + actualKnownErrsCount); + assertEquals( + "at least one dup error in metadata: " + remoteErrMetadata.toString(), + actualKnownErrsCount, + actualKnownErrs.size()); } // sanity check our 2 existing docs are still here @@ -903,35 +1154,40 @@ protected static void testAddsMixedWithDeletes(SolrClient client) throws Excepti assertQueryDocIds(client, false, docId22); // tolerate some failures along with a DELQ that should succeed - rsp = update(params("update.chain", "tolerant-chain-max-errors-10", - "commit", "true"), - doc(f("id", docId22), f("foo_i", "not_a_num"))) - .deleteById(docId1, -1L) - .deleteByQuery("zot_i:[42 to gibberish...") - .deleteByQuery("foo_i:[50 TO 2000}") - .process(client); + rsp = + update( + params( + "update.chain", "tolerant-chain-max-errors-10", + "commit", "true"), + doc(f("id", docId22), f("foo_i", "not_a_num"))) + .deleteById(docId1, -1L) + .deleteByQuery("zot_i:[42 to gibberish...") + .deleteByQuery("foo_i:[50 TO 2000}") + .process(client); assertEquals(0, rsp.getStatus()); - assertUpdateTolerantErrors("mix fails with one valid DELQ", rsp, - delIErr(docId1, "version conflict"), - delQErr("zot_i:[42 to gibberish..."), - addErr(docId22,"not_a_num")); + assertUpdateTolerantErrors( + "mix fails with one valid DELQ", + rsp, + delIErr(docId1, "version conflict"), + delQErr("zot_i:[42 to gibberish..."), + addErr(docId22, "not_a_num")); // one of our previous docs should have been deleted now assertQueryDocIds(client, true, docId1); assertQueryDocIds(client, false, docId21, docId22); - } - + /** Asserts that the UpdateResponse contains the specified expectedErrs and no others */ - public static void assertUpdateTolerantErrors(String assertionMsgPrefix, - UpdateResponse response, - ExpectedErr... expectedErrs) { + public static void assertUpdateTolerantErrors( + String assertionMsgPrefix, UpdateResponse response, ExpectedErr... expectedErrs) { @SuppressWarnings("unchecked") - List> errors = (List>) - response.getResponseHeader().get("errors"); - + List> errors = + (List>) response.getResponseHeader().get("errors"); + assertNotNull(assertionMsgPrefix + ": Null errors: " + response.toString(), errors); - assertEquals(assertionMsgPrefix + ": Num error ids: " + errors.toString(), - expectedErrs.length, errors.size()); + assertEquals( + assertionMsgPrefix + ": Num error ids: " + errors.toString(), + expectedErrs.length, + errors.size()); for (SimpleOrderedMap err : errors) { String assertErrPre = assertionMsgPrefix + ": " + err.toString(); @@ -946,21 +1202,20 @@ public static void assertUpdateTolerantErrors(String assertionMsgPrefix, // inefficient scan, but good nough for the size of sets we're dealing with boolean found = false; for (ExpectedErr expected : expectedErrs) { - if (expected.type.equals(type) && expected.id.equals(id) + if (expected.type.equals(type) + && expected.id.equals(id) && (null == expected.msgSubStr || message.contains(expected.msgSubStr))) { found = true; break; } } assertTrue(assertErrPre + " ... unexpected err in: " + response.toString(), found); - } } - + /** convinience method when the only type of errors you expect are 'add' errors */ - public static void assertUpdateTolerantAddErrors(String assertionMsgPrefix, - UpdateResponse response, - String... errorIdsExpected) { + public static void assertUpdateTolerantAddErrors( + String assertionMsgPrefix, UpdateResponse response, String... errorIdsExpected) { ExpectedErr[] expected = new ExpectedErr[errorIdsExpected.length]; for (int i = 0; i < expected.length; i++) { expected[i] = addErr(errorIdsExpected[i]); @@ -968,28 +1223,30 @@ public static void assertUpdateTolerantAddErrors(String assertionMsgPrefix, assertUpdateTolerantErrors(assertionMsgPrefix, response, expected); } - /** - * Asserts that the specified document ids do/do-not exist in the index, using both the specified client, - * and the CLOUD_CLIENT + /** + * Asserts that the specified document ids do/do-not exist in the index, using both the specified + * client, and the CLOUD_CLIENT */ - public static void assertQueryDocIds(SolrClient client, boolean shouldExist, String... ids) throws Exception { + public static void assertQueryDocIds(SolrClient client, boolean shouldExist, String... ids) + throws Exception { for (String id : ids) { - assertEquals(client.toString() + " should " + (shouldExist ? "" : "not ") + "find id: " + id, - (shouldExist ? 1 : 0), - CLOUD_CLIENT.query(params("q", "{!term f=id}" + id)).getResults().getNumFound()); + assertEquals( + client.toString() + " should " + (shouldExist ? "" : "not ") + "find id: " + id, + (shouldExist ? 1 : 0), + CLOUD_CLIENT.query(params("q", "{!term f=id}" + id)).getResults().getNumFound()); } - if (! CLOUD_CLIENT.equals(client) ) { + if (!CLOUD_CLIENT.equals(client)) { assertQueryDocIds(CLOUD_CLIENT, shouldExist, ids); } } - + public static UpdateRequest update(SolrParams params, SolrInputDocument... docs) { UpdateRequest r = new UpdateRequest(); r.setParams(new ModifiableSolrParams(params)); r.add(Arrays.asList(docs)); return r; } - + public static SolrInputDocument doc(SolrInputField... fields) { SolrInputDocument doc = new SolrInputDocument(); for (SolrInputField f : fields) { @@ -997,7 +1254,7 @@ public static SolrInputDocument doc(SolrInputField... fields) { } return doc; } - + public static SolrInputField f(String fieldName, Object... values) { SolrInputField f = new SolrInputField(fieldName); f.setValue(values); @@ -1015,26 +1272,33 @@ public ExpectedErr(String type, String id, String msgSubStr) { this.id = id; this.msgSubStr = msgSubStr; } + public String toString() { - return "type=<"+type+">,id=<"+id+">,msgSubStr=<"+msgSubStr+">"; + return "type=<" + type + ">,id=<" + id + ">,msgSubStr=<" + msgSubStr + ">"; } } + public static ExpectedErr addErr(String id, String msgSubStr) { return new ExpectedErr("ADD", id, msgSubStr); } + public static ExpectedErr delIErr(String id, String msgSubStr) { return new ExpectedErr("DELID", id, msgSubStr); } + public static ExpectedErr delQErr(String id, String msgSubStr) { return new ExpectedErr("DELQ", id, msgSubStr); - } + } + public static ExpectedErr addErr(String id) { return addErr(id, null); } + public static ExpectedErr delIErr(String id) { return delIErr(id, null); } + public static ExpectedErr delQErr(String id) { return delQErr(id, null); - } + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java index d91078fac82..0a365d25b10 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorRandomCloud.java @@ -16,6 +16,15 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.addErr; +import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.assertUpdateTolerantErrors; +import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.delIErr; +import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.delQErr; +import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.f; +import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.update; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; + import java.io.File; import java.io.IOException; import java.lang.invoke.MethodHandles; @@ -26,7 +35,6 @@ import java.util.List; import java.util.Map; import java.util.Random; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrClient; @@ -48,33 +56,21 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.addErr; -import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.assertUpdateTolerantErrors; -import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.delIErr; -import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.delQErr; -import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.f; -import static org.apache.solr.cloud.TestTolerantUpdateProcessorCloud.update; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; - /** - * Test of TolerantUpdateProcessor using a randomized MiniSolrCloud. - * Reuses some utility methods in {@link TestTolerantUpdateProcessorCloud} + * Test of TolerantUpdateProcessor using a randomized MiniSolrCloud. Reuses some utility methods in + * {@link TestTolerantUpdateProcessorCloud} * - *

- * NOTE: This test sets up a static instance of MiniSolrCloud with a single collection - * and several clients pointed at specific nodes. These are all re-used across multiple test methods, + *

NOTE: This test sets up a static instance of MiniSolrCloud with a single collection and + * several clients pointed at specific nodes. These are all re-used across multiple test methods, * and assumes that the state of the cluster is healthy between tests. - *

- * */ -@SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-9182 - causes OOM") +@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-9182 - causes OOM") public class TestTolerantUpdateProcessorRandomCloud extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String COLLECTION_NAME = "test_col"; - + /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; /** one HttpSolrClient for each server */ @@ -82,23 +78,26 @@ public class TestTolerantUpdateProcessorRandomCloud extends SolrCloudTestCase { @BeforeClass public static void createMiniSolrCloudCluster() throws Exception { - + final String configName = "solrCloudCollectionConfig"; - final File configDir = new File(TEST_HOME() + File.separator + "collection1" + File.separator + "conf"); + final File configDir = + new File(TEST_HOME() + File.separator + "collection1" + File.separator + "conf"); final int numShards = TestUtil.nextInt(random(), 2, TEST_NIGHTLY ? 5 : 3); final int repFactor = TestUtil.nextInt(random(), 2, TEST_NIGHTLY ? 5 : 3); // at least one server won't have any replicas final int numServers = 1 + (numShards * repFactor); - log.info("Configuring cluster: servers={}, shards={}, repfactor={}", numServers, numShards, repFactor); - configureCluster(numServers) - .addConfig(configName, configDir.toPath()) - .configure(); - + log.info( + "Configuring cluster: servers={}, shards={}, repfactor={}", + numServers, + numShards, + repFactor); + configureCluster(numServers).addConfig(configName, configDir.toPath()).configure(); + Map collectionProperties = new HashMap<>(); collectionProperties.put("config", "solrconfig-distrib-update-processor-chains.xml"); - collectionProperties.put("schema", "schema15.xml"); // string id + collectionProperties.put("schema", "schema15.xml"); // string id CLOUD_CLIENT = cluster.getSolrClient(); CLOUD_CLIENT.setDefaultCollection(COLLECTION_NAME); @@ -108,28 +107,28 @@ public static void createMiniSolrCloudCluster() throws Exception { .process(CLOUD_CLIENT); cluster.waitForActiveCollection(COLLECTION_NAME, numShards, numShards * repFactor); - + if (NODE_CLIENTS != null) { for (HttpSolrClient client : NODE_CLIENTS) { client.close(); } } NODE_CLIENTS = new ArrayList(numServers); - + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { URL jettyURL = jetty.getBaseUrl(); NODE_CLIENTS.add(getHttpSolrClient(jettyURL.toString() + "/" + COLLECTION_NAME + "/")); } assertEquals(numServers, NODE_CLIENTS.size()); - } - + @Before private void deleteAllDocs() throws Exception { - assertEquals(0, update(params("commit","true")).deleteByQuery("*:*").process(CLOUD_CLIENT).getStatus()); + assertEquals( + 0, update(params("commit", "true")).deleteByQuery("*:*").process(CLOUD_CLIENT).getStatus()); assertEquals("index should be empty", 0L, countDocs(CLOUD_CLIENT)); } - + @AfterClass public static void afterClass() throws IOException { if (NODE_CLIENTS != null) { @@ -143,25 +142,28 @@ public static void afterClass() throws IOException { } CLOUD_CLIENT = null; } - + public void testRandomUpdates() throws Exception { final int maxDocId = atLeast(10000); - final BitSet expectedDocIds = new BitSet(maxDocId+1); - + final BitSet expectedDocIds = new BitSet(maxDocId + 1); + final int numIters = atLeast(50); for (int i = 0; i < numIters; i++) { log.info("BEGIN ITER #{}", i); - - final UpdateRequest req = update(params("maxErrors","-1", - "update.chain", "tolerant-chain-max-errors-10")); + + final UpdateRequest req = + update( + params( + "maxErrors", "-1", + "update.chain", "tolerant-chain-max-errors-10")); final int numCmds = TestUtil.nextInt(random(), 1, 20); final List expectedErrors = new ArrayList(numCmds); int expectedErrorsCount = 0; // it's ambigious/confusing which order mixed DELQ + ADD (or ADD and DELI for the same ID) // in the same request wll be processed by various clients, so we keep things simple // and ensure that no single doc Id is affected by more then one command in the same request - final BitSet docsAffectedThisRequest = new BitSet(maxDocId+1); + final BitSet docsAffectedThisRequest = new BitSet(maxDocId + 1); for (int cmdIter = 0; cmdIter < numCmds; cmdIter++) { if ((maxDocId / 2) < docsAffectedThisRequest.cardinality()) { // we're already mucking with more then half the docs in the index @@ -172,27 +174,26 @@ public void testRandomUpdates() throws Exception { if (causeError) { expectedErrorsCount++; } - + if (random().nextBoolean()) { // add a doc String id = null; SolrInputDocument doc = null; if (causeError && (0 == TestUtil.nextInt(random(), 0, 21))) { - doc = doc(f("foo_s","no unique key")); + doc = doc(f("foo_s", "no unique key")); expectedErrors.add(addErr("(unknown)")); } else { final int id_i = randomUnsetBit(random(), docsAffectedThisRequest, maxDocId); docsAffectedThisRequest.set(id_i); - id = "id_"+id_i; + id = "id_" + id_i; if (causeError) { expectedErrors.add(addErr(id)); } else { expectedDocIds.set(id_i); } - final String val = causeError ? "bogus_val" : (""+TestUtil.nextInt(random(), 42, 666)); - doc = doc(f("id",id), - f("id_i", id_i), - f("foo_i", val)); + final String val = + causeError ? "bogus_val" : ("" + TestUtil.nextInt(random(), 42, 666)); + doc = doc(f("id", id), f("id_i", id_i), f("foo_i", val)); } req.add(doc); log.info("ADD: {} = {}", id, doc); @@ -201,7 +202,7 @@ public void testRandomUpdates() throws Exception { if (random().nextBoolean()) { // delete by id final int id_i = randomUnsetBit(random(), docsAffectedThisRequest, maxDocId); - final String id = "id_"+id_i; + final String id = "id_" + id_i; final boolean docExists = expectedDocIds.get(id_i); docsAffectedThisRequest.set(id_i); long versionConstraint = docExists ? 1 : -1; @@ -213,17 +214,18 @@ public void testRandomUpdates() throws Exception { expectedDocIds.clear(id_i); } req.deleteById(id, versionConstraint); - log.info("DEL: {} = {}", id, causeError ? "ERR" : "OK" ); + log.info("DEL: {} = {}", id, causeError ? "ERR" : "OK"); } else { // delete by query final String q; if (causeError) { // even though our DBQ is gibberish that's going to fail, record a docId as affected // so that we don't generate the same random DBQ and get redundent errors - // (problematic because of how DUP forwarded DBQs have to have their errors deduped by TUP) + // (problematic because of how DUP forwarded DBQs have to have their errors deduped by + // TUP) final int id_i = randomUnsetBit(random(), docsAffectedThisRequest, maxDocId); docsAffectedThisRequest.set(id_i); - q = "foo_i:["+id_i+" TO ....giberish"; + q = "foo_i:[" + id_i + " TO ....giberish"; expectedErrors.add(delQErr(q)); } else { // ensure our DBQ is only over a range of docs not already affected @@ -231,20 +233,24 @@ public void testRandomUpdates() throws Exception { final int rangeAxis = randomUnsetBit(random(), docsAffectedThisRequest, maxDocId); final int loBound = docsAffectedThisRequest.previousSetBit(rangeAxis); final int hiBound = docsAffectedThisRequest.nextSetBit(rangeAxis); - final int lo = TestUtil.nextInt(random(), loBound+1, rangeAxis); - final int hi = TestUtil.nextInt(random(), rangeAxis, - // bound might be negative if no set bits above axis - (hiBound < 0) ? maxDocId : hiBound-1); + final int lo = TestUtil.nextInt(random(), loBound + 1, rangeAxis); + final int hi = + TestUtil.nextInt( + random(), + rangeAxis, + // bound might be negative if no set bits above axis + (hiBound < 0) ? maxDocId : hiBound - 1); if (lo != hi) { - assert lo < hi : "lo="+lo+" hi="+hi; + assert lo < hi : "lo=" + lo + " hi=" + hi; // NOTE: clear & set are exclusive of hi, so we use "}" in range query accordingly q = "id_i:[" + lo + " TO " + hi + "}"; expectedDocIds.clear(lo, hi); docsAffectedThisRequest.set(lo, hi); } else { // edge case: special case DBQ of one doc - assert (lo == rangeAxis && hi == rangeAxis) : "lo="+lo+" axis="+rangeAxis+" hi="+hi; + assert (lo == rangeAxis && hi == rangeAxis) + : "lo=" + lo + " axis=" + rangeAxis + " hi=" + hi; q = "id_i:[" + lo + " TO " + lo + "]"; // have to be inclusive of both ends expectedDocIds.clear(lo); docsAffectedThisRequest.set(lo); @@ -255,22 +261,28 @@ public void testRandomUpdates() throws Exception { } } } - assertEquals("expected error count sanity check: " + req.toString(), - expectedErrorsCount, expectedErrors.size()); - - final SolrClient client = random().nextBoolean() ? CLOUD_CLIENT - : NODE_CLIENTS.get(TestUtil.nextInt(random(), 0, NODE_CLIENTS.size()-1)); - + assertEquals( + "expected error count sanity check: " + req.toString(), + expectedErrorsCount, + expectedErrors.size()); + + final SolrClient client = + random().nextBoolean() + ? CLOUD_CLIENT + : NODE_CLIENTS.get(TestUtil.nextInt(random(), 0, NODE_CLIENTS.size() - 1)); + final UpdateResponse rsp = req.process(client); - assertUpdateTolerantErrors(client.toString() + " => " + expectedErrors.toString(), rsp, - expectedErrors.toArray(new ExpectedErr[expectedErrors.size()])); + assertUpdateTolerantErrors( + client.toString() + " => " + expectedErrors.toString(), + rsp, + expectedErrors.toArray(new ExpectedErr[expectedErrors.size()])); if (log.isInfoEnabled()) { log.info("END ITER #{}, expecting #docs: {}", i, expectedDocIds.cardinality()); } assertEquals("post update commit failed?", 0, CLOUD_CLIENT.commit().getStatus()); - + for (int j = 0; j < 5; j++) { if (expectedDocIds.cardinality() == countDocs(CLOUD_CLIENT)) { break; @@ -281,67 +293,68 @@ public void testRandomUpdates() throws Exception { // check the index contents against our expectations final BitSet actualDocIds = allDocs(CLOUD_CLIENT, maxDocId); - if ( expectedDocIds.cardinality() != actualDocIds.cardinality() ) { - log.error("cardinality mismatch: expected {} BUT actual {}", - expectedDocIds.cardinality(), - actualDocIds.cardinality()); + if (expectedDocIds.cardinality() != actualDocIds.cardinality()) { + log.error( + "cardinality mismatch: expected {} BUT actual {}", + expectedDocIds.cardinality(), + actualDocIds.cardinality()); } final BitSet x = (BitSet) actualDocIds.clone(); x.xor(expectedDocIds); - for (int b = x.nextSetBit(0); 0 <= b; b = x.nextSetBit(b+1)) { + for (int b = x.nextSetBit(0); 0 <= b; b = x.nextSetBit(b + 1)) { final boolean expectedBit = expectedDocIds.get(b); final boolean actualBit = actualDocIds.get(b); log.error("bit #{} mismatch: expected {} BUT actual {}", b, expectedBit, actualBit); } - assertEquals(x.cardinality() + " mismatched bits", - expectedDocIds.cardinality(), actualDocIds.cardinality()); + assertEquals( + x.cardinality() + " mismatched bits", + expectedDocIds.cardinality(), + actualDocIds.cardinality()); } } - /** sanity check that randomUnsetBit works as expected + /** + * sanity check that randomUnsetBit works as expected + * * @see #randomUnsetBit */ public void testSanityRandomUnsetBit() { final int max = atLeast(100); - BitSet bits = new BitSet(max+1); + BitSet bits = new BitSet(max + 1); for (int i = 0; i <= max; i++) { - assertFalse("how is bitset already full? iter="+i+" card="+bits.cardinality()+"/max="+max, - bits.cardinality() == max+1); + assertFalse( + "how is bitset already full? iter=" + i + " card=" + bits.cardinality() + "/max=" + max, + bits.cardinality() == max + 1); final int nextBit = randomUnsetBit(random(), bits, max); - assertTrue("nextBit shouldn't be negative yet: " + nextBit, - 0 <= nextBit); - assertTrue("nextBit can't exceed max: " + nextBit, - nextBit <= max); + assertTrue("nextBit shouldn't be negative yet: " + nextBit, 0 <= nextBit); + assertTrue("nextBit can't exceed max: " + nextBit, nextBit <= max); assertFalse("expect unset: " + nextBit, bits.get(nextBit)); bits.set(nextBit); } - - assertEquals("why isn't bitset full?", max+1, bits.cardinality()); + + assertEquals("why isn't bitset full?", max + 1, bits.cardinality()); final int firstClearBit = bits.nextClearBit(0); - assertTrue("why is there a clear bit? = " + firstClearBit, - max < firstClearBit); - assertEquals("why is a bit set above max?", - -1, bits.nextSetBit(max+1)); - - assertEquals("wrong nextBit at end of all iters", -1, - randomUnsetBit(random(), bits, max)); - assertEquals("wrong nextBit at redundant end of all iters", -1, - randomUnsetBit(random(), bits, max)); + assertTrue("why is there a clear bit? = " + firstClearBit, max < firstClearBit); + assertEquals("why is a bit set above max?", -1, bits.nextSetBit(max + 1)); + + assertEquals("wrong nextBit at end of all iters", -1, randomUnsetBit(random(), bits, max)); + assertEquals( + "wrong nextBit at redundant end of all iters", -1, randomUnsetBit(random(), bits, max)); } - + public static SolrInputDocument doc(SolrInputField... fields) { // SolrTestCaseJ4 has same method name, prevents static import from working return TestTolerantUpdateProcessorCloud.doc(fields); } /** - * Given a BitSet, returns a random bit that is currently false, or -1 if all bits are true. - * NOTE: this method is not fair. + * Given a BitSet, returns a random bit that is currently false, or -1 if all bits are true. NOTE: + * this method is not fair. */ public static final int randomUnsetBit(Random r, BitSet bits, final int max) { // NOTE: don't forget, BitSet will grow automatically if not careful - if (bits.cardinality() == max+1) { + if (bits.cardinality() == max + 1) { return -1; } final int candidate = TestUtil.nextInt(r, 0, max); @@ -349,7 +362,11 @@ public static final int randomUnsetBit(Random r, BitSet bits, final int max) { final int lo = bits.previousClearBit(candidate); final int hi = bits.nextClearBit(candidate); if (lo < 0 && max < hi) { - fail("how the hell did we not short circut out? card="+bits.cardinality()+"/size="+bits.size()); + fail( + "how the hell did we not short circut out? card=" + + bits.cardinality() + + "/size=" + + bits.size()); } else if (lo < 0) { return hi; } else if (max < hi) { @@ -362,28 +379,38 @@ public static final int randomUnsetBit(Random r, BitSet bits, final int max) { /** returns the numFound from a *:* query */ public static final long countDocs(SolrClient c) throws Exception { - return c.query(params("q","*:*","rows","0")).getResults().getNumFound(); + return c.query(params("q", "*:*", "rows", "0")).getResults().getNumFound(); } - /** uses a Cursor to iterate over every doc in the index, recording the 'id_i' value in a BitSet */ - private static final BitSet allDocs(final SolrClient c, final int maxDocIdExpected) throws Exception { - BitSet docs = new BitSet(maxDocIdExpected+1); + /** + * uses a Cursor to iterate over every doc in the index, recording the 'id_i' value in a BitSet + */ + private static final BitSet allDocs(final SolrClient c, final int maxDocIdExpected) + throws Exception { + BitSet docs = new BitSet(maxDocIdExpected + 1); String cursorMark = CURSOR_MARK_START; int docsOnThisPage = Integer.MAX_VALUE; while (0 < docsOnThisPage) { - final SolrParams p = params("q","*:*", - "rows","100", - // note: not numeric, but we don't actual care about the order - "sort", "id asc", - CURSOR_MARK_PARAM, cursorMark); + final SolrParams p = + params( + "q", + "*:*", + "rows", + "100", + // note: not numeric, but we don't actual care about the order + "sort", + "id asc", + CURSOR_MARK_PARAM, + cursorMark); QueryResponse rsp = c.query(p); cursorMark = rsp.getNextCursorMark(); docsOnThisPage = 0; for (SolrDocument doc : rsp.getResults()) { docsOnThisPage++; - int id_i = ((Integer)doc.get("id_i")).intValue(); - assertTrue("found id_i bigger then expected "+maxDocIdExpected+": " + id_i, - id_i <= maxDocIdExpected); + int id_i = ((Integer) doc.get("id_i")).intValue(); + assertTrue( + "found id_i bigger then expected " + maxDocIdExpected + ": " + id_i, + id_i <= maxDocIdExpected); docs.set(id_i); } cursorMark = rsp.getNextCursorMark(); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java b/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java index 1b820a4388c..091555cae26 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestWaitForStateWithJettyShutdowns.java @@ -17,16 +17,15 @@ package org.apache.solr.cloud; -import java.lang.invoke.MethodHandles; +import static org.apache.solr.cloud.SolrCloudTestCase.clusterShape; +import java.lang.invoke.MethodHandles; import java.util.Set; - import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -34,9 +33,6 @@ import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrNamedThreadFactory; - -import static org.apache.solr.cloud.SolrCloudTestCase.clusterShape; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -45,15 +41,16 @@ public class TestWaitForStateWithJettyShutdowns extends SolrTestCaseJ4 { public void testWaitForStateAfterShutDown() throws Exception { final String col_name = "test_col"; - final MiniSolrCloudCluster cluster = new MiniSolrCloudCluster - (1, createTempDir(), buildJettyConfig("/solr")); + final MiniSolrCloudCluster cluster = + new MiniSolrCloudCluster(1, createTempDir(), buildJettyConfig("/solr")); try { log.info("Create our collection"); - CollectionAdminRequest.createCollection(col_name, "_default", 1, 1).process(cluster.getSolrClient()); - + CollectionAdminRequest.createCollection(col_name, "_default", 1, 1) + .process(cluster.getSolrClient()); + log.info("Sanity check that our collection has come online"); cluster.getSolrClient().waitForState(col_name, 30, TimeUnit.SECONDS, clusterShape(1, 1)); - + log.info("Shutdown 1 node"); final JettySolrRunner nodeToStop = cluster.getJettySolrRunner(0); nodeToStop.stop(); @@ -61,11 +58,13 @@ public void testWaitForStateAfterShutDown() throws Exception { cluster.waitForJettyToStop(nodeToStop); // now that we're confident that node has stoped, check if a waitForState - // call will detect the missing replica -- shouldn't need long wait times (we know it's down)... + // call will detect the missing replica -- shouldn't need long wait times (we know it's + // down)... log.info("Now check if waitForState will recognize we already have the exepcted state"); - cluster.getSolrClient().waitForState(col_name, 500, TimeUnit.MILLISECONDS, clusterShape(1, 0)); - - + cluster + .getSolrClient() + .waitForState(col_name, 500, TimeUnit.MILLISECONDS, clusterShape(1, 0)); + } finally { cluster.shutdown(); } @@ -73,45 +72,54 @@ public void testWaitForStateAfterShutDown() throws Exception { public void testWaitForStateBeforeShutDown() throws Exception { final String col_name = "test_col"; - final ExecutorService executor = ExecutorUtil.newMDCAwareFixedThreadPool - (1, new SolrNamedThreadFactory("background_executor")); - final MiniSolrCloudCluster cluster = new MiniSolrCloudCluster - (1, createTempDir(), buildJettyConfig("/solr")); + final ExecutorService executor = + ExecutorUtil.newMDCAwareFixedThreadPool( + 1, new SolrNamedThreadFactory("background_executor")); + final MiniSolrCloudCluster cluster = + new MiniSolrCloudCluster(1, createTempDir(), buildJettyConfig("/solr")); try { log.info("Create our collection"); - CollectionAdminRequest.createCollection(col_name, "_default", 1, 1).process(cluster.getSolrClient()); - - log.info("Sanity check that our collection has come online"); - cluster.getSolrClient().waitForState(col_name, 30, TimeUnit.SECONDS, - SolrCloudTestCase.clusterShape(1, 1)); + CollectionAdminRequest.createCollection(col_name, "_default", 1, 1) + .process(cluster.getSolrClient()); + log.info("Sanity check that our collection has come online"); + cluster + .getSolrClient() + .waitForState(col_name, 30, TimeUnit.SECONDS, SolrCloudTestCase.clusterShape(1, 1)); // HACK implementation detail... // // we know that in the current implementation, waitForState invokes the predicate twice // independently of the current state of the collection and/or wether the predicate succeeds. - // If this implementation detail changes, (ie: so that it's only invoked once) - // then this number needs to change -- but the test fundementally depends on the implementation - // calling the predicate at least once, which should also be neccessary for any future impl - // (to verify that it didn't "miss" the state change when creating the watcher) + // If this implementation detail changes, (ie: so that it's only invoked once) then this + // number needs to change -- but the test fundementally depends on the implementation calling + // the predicate at least once, which should also be neccessary for any future impl (to verify + // that it didn't "miss" the state change when creating the watcher) final CountDownLatch latch = new CountDownLatch(2); - - final Future backgroundWaitForState = executor.submit - (() -> { - try { - cluster.getSolrClient().waitForState(col_name, 180, TimeUnit.SECONDS, - new LatchCountingPredicateWrapper(latch, - clusterShape(1, 0))); - } catch (Exception e) { - log.error("background thread got exception", e); - throw new RuntimeException(e); - } - return; - }, null); - + + final Future backgroundWaitForState = + executor.submit( + () -> { + try { + cluster + .getSolrClient() + .waitForState( + col_name, + 180, + TimeUnit.SECONDS, + new LatchCountingPredicateWrapper(latch, clusterShape(1, 0))); + } catch (Exception e) { + log.error("background thread got exception", e); + throw new RuntimeException(e); + } + return; + }, + null); + log.info("Awaiting latch..."); - if (! latch.await(120, TimeUnit.SECONDS)) { - fail("timed out Waiting a ridiculous amount of time for the waitForState latch -- did impl change?"); + if (!latch.await(120, TimeUnit.SECONDS)) { + fail( + "timed out Waiting a ridiculous amount of time for the waitForState latch -- did impl change?"); } log.info("Shutdown 1 node"); @@ -129,25 +137,32 @@ public void testWaitForStateBeforeShutDown() throws Exception { log.error("background waitForState exception", e); throw e; } - + } finally { ExecutorUtil.shutdownAndAwaitTermination(executor); cluster.shutdown(); } } - + public final class LatchCountingPredicateWrapper implements CollectionStatePredicate { private final CountDownLatch latch; private final CollectionStatePredicate inner; - public LatchCountingPredicateWrapper(final CountDownLatch latch, final CollectionStatePredicate inner) { + + public LatchCountingPredicateWrapper( + final CountDownLatch latch, final CollectionStatePredicate inner) { this.latch = latch; this.inner = inner; } + public boolean matches(Set liveNodes, DocCollection collectionState) { final boolean result = inner.matches(liveNodes, collectionState); if (log.isInfoEnabled()) { - log.info("Predicate called: result={}, (pre)latch={}, liveNodes={}, state={}", - result, latch.getCount(), liveNodes, collectionState); + log.info( + "Predicate called: result={}, (pre)latch={}, liveNodes={}, state={}", + result, + latch.getCount(), + liveNodes, + collectionState); } latch.countDown(); return result; diff --git a/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java b/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java index 79ce05c9317..4ff1e64e1bd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java @@ -28,7 +28,8 @@ * Implementation moved to AbstractTlogReplayBufferedWhileIndexingTestBase as it is also used by HDFS contrib * module tests */ -public class TlogReplayBufferedWhileIndexingTest extends AbstractTlogReplayBufferedWhileIndexingTestBase { +public class TlogReplayBufferedWhileIndexingTest + extends AbstractTlogReplayBufferedWhileIndexingTestBase { public TlogReplayBufferedWhileIndexingTest() throws Exception { super(); @@ -39,5 +40,4 @@ public TlogReplayBufferedWhileIndexingTest() throws Exception { public void test() throws Exception { super.test(); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java b/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java index 3629c6a456d..e2c35b58b6c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java @@ -22,7 +22,6 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; @@ -31,7 +30,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class TriLevelCompositeIdRoutingTest extends ShardRoutingTest { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -41,21 +39,21 @@ public class TriLevelCompositeIdRoutingTest extends ShardRoutingTest { final int MAX_DOC_ID; final int NUM_ADDS; - @BeforeClass public static void beforeTriLevelCompositeIdRoutingTest() throws Exception { // TODO: we use an fs based dir because something // like a ram dir will not recover correctly right now // because tran log will still exist on restart and ram // dir will not persist - perhaps translog can empty on - // start if using an EphemeralDirectoryFactory + // start if using an EphemeralDirectoryFactory useFactory(null); } public TriLevelCompositeIdRoutingTest() { - schemaString = "schema15.xml"; // we need a string id - - sliceCount = TestUtil.nextInt(random(), 1, (TEST_NIGHTLY ? 5 : 3)); // this is the number of *SHARDS* + schemaString = "schema15.xml"; // we need a string id + + sliceCount = + TestUtil.nextInt(random(), 1, (TEST_NIGHTLY ? 5 : 3)); // this is the number of *SHARDS* int replicationFactor = rarely() ? 2 : 1; // replication is not the focus of this test fixShardCount(replicationFactor * sliceCount); // total num cores, one per node @@ -65,7 +63,7 @@ public TriLevelCompositeIdRoutingTest() { NUM_ADDS = atLeast(200); } - @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13369") + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13369") @Test public void test() throws Exception { boolean testFinished = false; @@ -85,48 +83,62 @@ public void test() throws Exception { final int userId = r.nextInt(MAX_USER_ID) + 1; // skew the odds so half the time we have no mask, and half the time we // have an even distribution of 1-16 bits - final int bitMask = Math.max(0, r.nextInt(32)-15); - - String id = "app" + appId + (bitMask <= 0 ? "" : ("/" + bitMask)) - + "!" + "user" + userId - + "!" + "doc" + r.nextInt(MAX_DOC_ID); - + final int bitMask = Math.max(0, r.nextInt(32) - 15); + + String id = + "app" + + appId + + (bitMask <= 0 ? "" : ("/" + bitMask)) + + "!" + + "user" + + userId + + "!" + + "doc" + + r.nextInt(MAX_DOC_ID); + doAddDoc(id); expectedUniqueKeys.add(id); } - + commit(); - + final Map routePrefixMap = new HashMap<>(); final Set actualUniqueKeys = new HashSet<>(); for (int i = 1; i <= sliceCount; i++) { final String shardId = "shard" + i; final Set uniqueKeysInShard = fetchUniqueKeysFromShard(shardId); - + { // sanity check our uniqueKey values aren't duplicated across shards final Set uniqueKeysOnDuplicateShards = new HashSet<>(uniqueKeysInShard); uniqueKeysOnDuplicateShards.retainAll(actualUniqueKeys); - assertEquals(shardId + " contains some uniqueKeys that were already found on a previous shard", - Collections.emptySet(), uniqueKeysOnDuplicateShards); + assertEquals( + shardId + " contains some uniqueKeys that were already found on a previous shard", + Collections.emptySet(), + uniqueKeysOnDuplicateShards); actualUniqueKeys.addAll(uniqueKeysInShard); } - - // foreach uniqueKey, extract it's route prefix and confirm those aren't spread across multiple shards + + // foreach uniqueKey, extract it's route prefix and confirm those aren't spread across + // multiple shards for (String uniqueKey : uniqueKeysInShard) { final String routePrefix = uniqueKey.substring(0, uniqueKey.lastIndexOf('!')); - log.debug("shard( {} ) : uniqueKey( {} ) -> routePrefix( {} )", shardId, uniqueKey, routePrefix); + log.debug( + "shard( {} ) : uniqueKey( {} ) -> routePrefix( {} )", + shardId, + uniqueKey, + routePrefix); assertNotNull("null prefix WTF? " + uniqueKey, routePrefix); - + final String otherShard = routePrefixMap.put(routePrefix, shardId); if (null != otherShard) // if we already had a mapping, make sure it's an earlier doc from our current shard... - assertEquals("routePrefix " + routePrefix + " found in multiple shards", - shardId, otherShard); + assertEquals( + "routePrefix " + routePrefix + " found in multiple shards", shardId, otherShard); } } assertEquals("Docs missing?", expectedUniqueKeys.size(), actualUniqueKeys.size()); - + testFinished = true; } finally { if (!testFinished) { @@ -134,7 +146,7 @@ public void test() throws Exception { } } } - + void doAddDoc(String id) throws Exception { index("id", id); // todo - target diff servers and use cloud clients as well as non-cloud clients @@ -142,7 +154,8 @@ void doAddDoc(String id) throws Exception { private Set fetchUniqueKeysFromShard(final String shardId) throws Exception { // NUM_ADDS is an absolute upper bound on the num docs in the index - QueryResponse rsp = cloudClient.query(params("q", "*:*", "rows", ""+NUM_ADDS, "shards", shardId)); + QueryResponse rsp = + cloudClient.query(params("q", "*:*", "rows", "" + NUM_ADDS, "shards", shardId)); Set uniqueKeys = new HashSet<>(); for (SolrDocument doc : rsp.getResults()) { final String id = (String) doc.get("id"); diff --git a/solr/core/src/test/org/apache/solr/cloud/TrollingIndexReaderFactory.java b/solr/core/src/test/org/apache/solr/cloud/TrollingIndexReaderFactory.java index 553ed6f2327..78e7bf07a0f 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TrollingIndexReaderFactory.java +++ b/solr/core/src/test/org/apache/solr/cloud/TrollingIndexReaderFactory.java @@ -27,7 +27,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; - import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.ExitableDirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -40,35 +39,38 @@ public class TrollingIndexReaderFactory extends StandardIndexReaderFactory { private static volatile Trap trap; - private final static BlockingQueue> lastStacktraces = new LinkedBlockingQueue>(); - private final static long startTime = ManagementFactory.getRuntimeMXBean().getStartTime(); + private static final BlockingQueue> lastStacktraces = + new LinkedBlockingQueue>(); + private static final long startTime = ManagementFactory.getRuntimeMXBean().getStartTime(); private static final int keepStackTraceLines = 20; protected static final int maxTraces = 4; - private static Trap setTrap(Trap troll) { - trap = troll; + trap = troll; return troll; } - - public static abstract class Trap implements Closeable{ + + public abstract static class Trap implements Closeable { protected abstract boolean shouldExit(); + public abstract boolean hasCaught(); + @Override public final void close() throws IOException { setTrap(null); } + @Override public abstract String toString(); - + public static void dumpLastStackTraces(org.slf4j.Logger log) { ArrayList> stacks = new ArrayList<>(); lastStacktraces.drainTo(stacks); StringBuilder out = new StringBuilder("the last caught stacktraces: \n"); - for(List stack : stacks) { - int l=0; + for (List stack : stacks) { + int l = 0; for (Object line : stack) { - if (l++>0) { + if (l++ > 0) { out.append('\t'); } out.append(line); @@ -82,117 +84,123 @@ public static void dumpLastStackTraces(org.slf4j.Logger log) { static final class CheckMethodName implements Predicate { private final String methodName; - + CheckMethodName(String methodName) { this.methodName = methodName; } - + @Override public boolean test(StackTraceElement trace) { return trace.getMethodName().equals(methodName); } - + @Override public String toString() { - return "hunting for "+methodName+"()"; + return "hunting for " + methodName + "()"; } } public static Trap catchClass(String className) { - return catchClass(className, ()->{}); + return catchClass(className, () -> {}); } - + public static Trap catchClass(String className, Runnable onCaught) { - Predicate judge = new Predicate() { - @Override - public boolean test(StackTraceElement trace) { - return trace.getClassName().indexOf(className)>=0; - } - @Override - public String toString() { - return "className contains "+className; - } - }; - return catchTrace(judge, onCaught) ; + Predicate judge = + new Predicate() { + @Override + public boolean test(StackTraceElement trace) { + return trace.getClassName().indexOf(className) >= 0; + } + + @Override + public String toString() { + return "className contains " + className; + } + }; + return catchTrace(judge, onCaught); } - + public static Trap catchTrace(Predicate judge, Runnable onCaught) { - return setTrap(new Trap() { - - private boolean trigered; - - @Override - protected boolean shouldExit() { - Exception e = new Exception("stack sniffer"); - e.fillInStackTrace(); - StackTraceElement[] stackTrace = e.getStackTrace(); - for(StackTraceElement trace : stackTrace) { - if (judge.test(trace)) { - trigered = true; - recordStackTrace(stackTrace); - onCaught.run(); - return true; + return setTrap( + new Trap() { + + private boolean trigered; + + @Override + protected boolean shouldExit() { + Exception e = new Exception("stack sniffer"); + e.fillInStackTrace(); + StackTraceElement[] stackTrace = e.getStackTrace(); + for (StackTraceElement trace : stackTrace) { + if (judge.test(trace)) { + trigered = true; + recordStackTrace(stackTrace); + onCaught.run(); + return true; + } + } + return false; } - } - return false; - } - @Override - public boolean hasCaught() { - return trigered; - } + @Override + public boolean hasCaught() { + return trigered; + } - @Override - public String toString() { - return ""+judge; - } - }); + @Override + public String toString() { + return "" + judge; + } + }); } - + public static Trap catchCount(int boundary) { - return setTrap(new Trap() { - - private AtomicInteger count = new AtomicInteger(); - - @Override - public String toString() { - return ""+count.get()+"th tick of "+boundary+" allowed"; - } - - private boolean trigered; - - @Override - protected boolean shouldExit() { - int now = count.incrementAndGet(); - boolean trigger = now==boundary - || (now>boundary && LuceneTestCase.rarely(LuceneTestCase.random())); - if (trigger) { - Exception e = new Exception("stack sniffer"); - e.fillInStackTrace(); - recordStackTrace(e.getStackTrace()); - trigered = true; - } - return trigger; - } + return setTrap( + new Trap() { - @Override - public boolean hasCaught() { - return trigered; - } - }); + private AtomicInteger count = new AtomicInteger(); + + @Override + public String toString() { + return "" + count.get() + "th tick of " + boundary + " allowed"; + } + + private boolean trigered; + + @Override + protected boolean shouldExit() { + int now = count.incrementAndGet(); + boolean trigger = + now == boundary + || (now > boundary && LuceneTestCase.rarely(LuceneTestCase.random())); + if (trigger) { + Exception e = new Exception("stack sniffer"); + e.fillInStackTrace(); + recordStackTrace(e.getStackTrace()); + trigered = true; + } + return trigger; + } + + @Override + public boolean hasCaught() { + return trigered; + } + }); } - + private static void recordStackTrace(StackTraceElement[] stackTrace) { - //keep the last n limited traces. - //e.printStackTrace(); + // keep the last n limited traces. + // e.printStackTrace(); ArrayList stack = new ArrayList(); - stack.add(""+ (new Date().getTime()-startTime)+" ("+Thread.currentThread().getName()+")"); - for (int l=2; lmaxTraces) { + // triming queue + while (lastStacktraces.size() > maxTraces) { try { lastStacktraces.poll(100, TimeUnit.MILLISECONDS); } catch (InterruptedException e1) { @@ -208,17 +216,19 @@ public DirectoryReader newReader(Directory indexDir, SolrCore core) throws IOExc } private ExitableDirectoryReader wrap(DirectoryReader newReader) throws IOException { - return new ExitableDirectoryReader(newReader, new QueryTimeout() { - @Override - public boolean shouldExit() { - return trap!=null && trap.shouldExit(); - } - - @Override - public String toString() { - return ""+trap; - } - }); + return new ExitableDirectoryReader( + newReader, + new QueryTimeout() { + @Override + public boolean shouldExit() { + return trap != null && trap.shouldExit(); + } + + @Override + public String toString() { + return "" + trap; + } + }); } @Override diff --git a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java index c6c8680da99..745deb740f7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java @@ -18,13 +18,12 @@ import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; - import org.junit.Test; /** - * This test simply does a bunch of basic things in solrcloud mode and asserts things - * work as expected. - * Implementation moved to AbstractUnloadDistributedZkTestBase as it is used by HDFS contrib module tests. + * This test simply does a bunch of basic things in solrcloud mode and asserts things work as + * expected. Implementation moved to AbstractUnloadDistributedZkTestBase as it is used by HDFS + * contrib module tests. */ @Slow @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") @@ -42,5 +41,4 @@ protected String getSolrXml() { public void test() throws Exception { super.test(); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java b/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java index 32de7438790..addbe737e29 100644 --- a/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/VMParamsZkACLAndCredentialsProvidersTest.java @@ -16,6 +16,5 @@ */ package org.apache.solr.cloud; -public class VMParamsZkACLAndCredentialsProvidersTest extends AbstractVMParamsZkACLAndCredentialsProvidersTestBase { - -} +public class VMParamsZkACLAndCredentialsProvidersTest + extends AbstractVMParamsZkACLAndCredentialsProvidersTestBase {} diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java index 547124ec093..44d373495ee 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java @@ -27,7 +27,6 @@ import java.nio.file.Path; import java.util.Collection; import java.util.List; - import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.filefilter.RegexFileFilter; @@ -96,9 +95,7 @@ public void setUp() throws Exception { zkClient.makePath("/solr", false, true); zkClient.close(); - - this.zkClient = new SolrZkClient(zkServer.getZkAddress(), - AbstractZkTestCase.TIMEOUT); + this.zkClient = new SolrZkClient(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT); if (log.isInfoEnabled()) { log.info("####SETUP_END {}", getTestName()); @@ -118,23 +115,28 @@ public void testBootstrapWithChroot() throws Exception { String chroot = "/foo/bar"; assertFalse(zkClient.exists(chroot, true)); - String[] args = new String[] {"-zkhost", zkServer.getZkAddress() + chroot, - "-cmd", "bootstrap", "-solrhome", this.solrHome}; + String[] args = + new String[] { + "-zkhost", + zkServer.getZkAddress() + chroot, + "-cmd", + "bootstrap", + "-solrhome", + this.solrHome + }; ZkCLI.main(args); - assertTrue(zkClient.exists(chroot + ZkConfigSetService.CONFIGS_ZKNODE - + "/collection1", true)); + assertTrue(zkClient.exists(chroot + ZkConfigSetService.CONFIGS_ZKNODE + "/collection1", true)); } @Test public void testMakePath() throws Exception { // test bootstrap_conf - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "makepath", "/path/mynewpath"}; + String[] args = + new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "makepath", "/path/mynewpath"}; ZkCLI.main(args); - assertTrue(zkClient.exists("/path/mynewpath", true)); } @@ -142,30 +144,39 @@ public void testMakePath() throws Exception { public void testPut() throws Exception { // test put String data = "my data"; - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "put", "/data.txt", data}; + String[] args = + new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "put", "/data.txt", data}; ZkCLI.main(args); zkClient.getData("/data.txt", null, null, true); - assertArrayEquals(zkClient.getData("/data.txt", null, null, true), data.getBytes(StandardCharsets.UTF_8)); + assertArrayEquals( + zkClient.getData("/data.txt", null, null, true), data.getBytes(StandardCharsets.UTF_8)); // test re-put to existing data = "my data deux"; - args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "put", "/data.txt", data}; + args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "put", "/data.txt", data}; ZkCLI.main(args); - assertArrayEquals(zkClient.getData("/data.txt", null, null, true), data.getBytes(StandardCharsets.UTF_8)); + assertArrayEquals( + zkClient.getData("/data.txt", null, null, true), data.getBytes(StandardCharsets.UTF_8)); } @Test public void testPutFile() throws Exception { // test put file - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "putfile", "/solr.xml", SOLR_HOME + File.separator + "solr-stress-new.xml"}; + String[] args = + new String[] { + "-zkhost", + zkServer.getZkAddress(), + "-cmd", + "putfile", + "/solr.xml", + SOLR_HOME + File.separator + "solr-stress-new.xml" + }; ZkCLI.main(args); - String fromZk = new String(zkClient.getData("/solr.xml", null, null, true), StandardCharsets.UTF_8); + String fromZk = + new String(zkClient.getData("/solr.xml", null, null, true), StandardCharsets.UTF_8); File locFile = new File(SOLR_HOME + File.separator + "solr-stress-new.xml"); InputStream is = new FileInputStream(locFile); String fromLoc; @@ -180,18 +191,25 @@ public void testPutFile() throws Exception { @Test public void testPutFileNotExists() throws Exception { // test put file - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "putfile", "/solr.xml", SOLR_HOME + File.separator + "not-there.xml"}; + String[] args = + new String[] { + "-zkhost", + zkServer.getZkAddress(), + "-cmd", + "putfile", + "/solr.xml", + SOLR_HOME + File.separator + "not-there.xml" + }; FileNotFoundException e = expectThrows(FileNotFoundException.class, () -> ZkCLI.main(args)); - assertTrue("Didn't find expected error message containing 'not-there.xml' in " + e.getMessage(), + assertTrue( + "Didn't find expected error message containing 'not-there.xml' in " + e.getMessage(), e.getMessage().indexOf("not-there.xml") != -1); } @Test public void testList() throws Exception { zkClient.makePath("/test", true); - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "list"}; + String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "list"}; ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); final PrintStream myOut = new PrintStream(byteStream, false, StandardCharsets.UTF_8.name()); @@ -207,8 +225,7 @@ public void testList() throws Exception { @Test public void testLs() throws Exception { zkClient.makePath("/test/path", true); - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "ls", "/test"}; + String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "ls", "/test"}; ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); final PrintStream myOut = new PrintStream(byteStream, false, StandardCharsets.UTF_8.name()); @@ -218,7 +235,8 @@ public void testLs() throws Exception { final String standardOutput = byteStream.toString(StandardCharsets.UTF_8.name()); String separator = System.lineSeparator(); - assertEquals("/test (1)" + separator + " /test/path (0)" + separator + separator, standardOutput); + assertEquals( + "/test (1)" + separator + " /test/path (0)" + separator + separator, standardOutput); } @Test @@ -229,19 +247,33 @@ public void testUpConfigLinkConfigClearZk() throws Exception { String confsetname = "confsetone"; final String[] upconfigArgs; if (random().nextBoolean()) { - upconfigArgs = new String[] { - "-zkhost", zkServer.getZkAddress(), - "-cmd", ZkCLI.UPCONFIG, - "-confdir", ExternalPaths.TECHPRODUCTS_CONFIGSET, - "-confname", confsetname}; + upconfigArgs = + new String[] { + "-zkhost", + zkServer.getZkAddress(), + "-cmd", + ZkCLI.UPCONFIG, + "-confdir", + ExternalPaths.TECHPRODUCTS_CONFIGSET, + "-confname", + confsetname + }; } else { - final String excluderegexOption = (random().nextBoolean() ? "--"+ZkCLI.EXCLUDE_REGEX : "-"+ZkCLI.EXCLUDE_REGEX_SHORT); - upconfigArgs = new String[] { - "-zkhost", zkServer.getZkAddress(), - "-cmd", ZkCLI.UPCONFIG, - excluderegexOption, ZkCLI.EXCLUDE_REGEX_DEFAULT, - "-confdir", ExternalPaths.TECHPRODUCTS_CONFIGSET, - "-confname", confsetname}; + final String excluderegexOption = + (random().nextBoolean() ? "--" + ZkCLI.EXCLUDE_REGEX : "-" + ZkCLI.EXCLUDE_REGEX_SHORT); + upconfigArgs = + new String[] { + "-zkhost", + zkServer.getZkAddress(), + "-cmd", + ZkCLI.UPCONFIG, + excluderegexOption, + ZkCLI.EXCLUDE_REGEX_DEFAULT, + "-confdir", + ExternalPaths.TECHPRODUCTS_CONFIGSET, + "-confname", + confsetname + }; } ZkCLI.main(upconfigArgs); @@ -251,46 +283,84 @@ public void testUpConfigLinkConfigClearZk() throws Exception { // ZkCLI.main(new String[0]); // test linkconfig - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "linkconfig", "-collection", "collection1", "-confname", confsetname}; + String[] args = + new String[] { + "-zkhost", + zkServer.getZkAddress(), + "-cmd", + "linkconfig", + "-collection", + "collection1", + "-confname", + confsetname + }; ZkCLI.main(args); - ZkNodeProps collectionProps = ZkNodeProps.load(zkClient.getData(ZkStateReader.COLLECTIONS_ZKNODE + "/collection1", null, null, true)); + ZkNodeProps collectionProps = + ZkNodeProps.load( + zkClient.getData(ZkStateReader.COLLECTIONS_ZKNODE + "/collection1", null, null, true)); assertTrue(collectionProps.containsKey("configName")); assertEquals(confsetname, collectionProps.getStr("configName")); // test down config - File confDir = new File(tmpDir, - "solrtest-confdropspot-" + this.getClass().getName() + "-" + System.nanoTime()); + File confDir = + new File( + tmpDir, "solrtest-confdropspot-" + this.getClass().getName() + "-" + System.nanoTime()); assertFalse(confDir.exists()); - args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "downconfig", "-confdir", confDir.getAbsolutePath(), "-confname", confsetname}; + args = + new String[] { + "-zkhost", + zkServer.getZkAddress(), + "-cmd", + "downconfig", + "-confdir", + confDir.getAbsolutePath(), + "-confname", + confsetname + }; ZkCLI.main(args); File[] files = confDir.listFiles(); - List zkFiles = zkClient.getChildren(ZkConfigSetService.CONFIGS_ZKNODE + "/" + confsetname, null, true); + List zkFiles = + zkClient.getChildren(ZkConfigSetService.CONFIGS_ZKNODE + "/" + confsetname, null, true); assertEquals(files.length, zkFiles.size()); File sourceConfDir = new File(ExternalPaths.TECHPRODUCTS_CONFIGSET); // filter out all directories starting with . (e.g. .svn) - Collection sourceFiles = FileUtils.listFiles(sourceConfDir, TrueFileFilter.INSTANCE, new RegexFileFilter("[^\\.].*")); - for (File sourceFile :sourceFiles){ - int indexOfRelativePath = sourceFile.getAbsolutePath().lastIndexOf("sample_techproducts_configs" + File.separator + "conf"); - String relativePathofFile = sourceFile.getAbsolutePath().substring(indexOfRelativePath + 33, sourceFile.getAbsolutePath().length()); - File downloadedFile = new File(confDir,relativePathofFile); - if (ConfigSetService.UPLOAD_FILENAME_EXCLUDE_PATTERN.matcher(relativePathofFile).matches()) { - assertFalse(sourceFile.getAbsolutePath() + " exists in ZK, downloaded:" + downloadedFile.getAbsolutePath(), downloadedFile.exists()); - } else { - assertTrue(downloadedFile.getAbsolutePath() + " does not exist source:" + sourceFile.getAbsolutePath(), downloadedFile.exists()); - assertTrue(relativePathofFile+" content changed",FileUtils.contentEquals(sourceFile,downloadedFile)); - } + Collection sourceFiles = + FileUtils.listFiles( + sourceConfDir, TrueFileFilter.INSTANCE, new RegexFileFilter("[^\\.].*")); + for (File sourceFile : sourceFiles) { + int indexOfRelativePath = + sourceFile + .getAbsolutePath() + .lastIndexOf("sample_techproducts_configs" + File.separator + "conf"); + String relativePathofFile = + sourceFile + .getAbsolutePath() + .substring(indexOfRelativePath + 33, sourceFile.getAbsolutePath().length()); + File downloadedFile = new File(confDir, relativePathofFile); + if (ConfigSetService.UPLOAD_FILENAME_EXCLUDE_PATTERN.matcher(relativePathofFile).matches()) { + assertFalse( + sourceFile.getAbsolutePath() + + " exists in ZK, downloaded:" + + downloadedFile.getAbsolutePath(), + downloadedFile.exists()); + } else { + assertTrue( + downloadedFile.getAbsolutePath() + + " does not exist source:" + + sourceFile.getAbsolutePath(), + downloadedFile.exists()); + assertTrue( + relativePathofFile + " content changed", + FileUtils.contentEquals(sourceFile, downloadedFile)); + } } - // test reset zk - args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "clear", "/"}; + args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "clear", "/"}; ZkCLI.main(args); assertEquals(0, zkClient.getChildren("/", null, true).size()); @@ -299,10 +369,9 @@ public void testUpConfigLinkConfigClearZk() throws Exception { @Test public void testGet() throws Exception { String getNode = "/getNode"; - byte [] data = "getNode-data".getBytes(StandardCharsets.UTF_8); + byte[] data = "getNode-data".getBytes(StandardCharsets.UTF_8); this.zkClient.create(getNode, data, CreateMode.PERSISTENT, true); - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "get", getNode}; + String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "get", getNode}; ZkCLI.main(args); } @@ -311,16 +380,18 @@ public void testGetFile() throws Exception { File tmpDir = createTempDir().toFile(); String getNode = "/getFileNode"; - byte [] data = "getFileNode-data".getBytes(StandardCharsets.UTF_8); + byte[] data = "getFileNode-data".getBytes(StandardCharsets.UTF_8); this.zkClient.create(getNode, data, CreateMode.PERSISTENT, true); - File file = new File(tmpDir, - "solrtest-getfile-" + this.getClass().getName() + "-" + System.nanoTime()); - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "getfile", getNode, file.getAbsolutePath()}; + File file = + new File(tmpDir, "solrtest-getfile-" + this.getClass().getName() + "-" + System.nanoTime()); + String[] args = + new String[] { + "-zkhost", zkServer.getZkAddress(), "-cmd", "getfile", getNode, file.getAbsolutePath() + }; ZkCLI.main(args); - byte [] readData = FileUtils.readFileToByteArray(file); + byte[] readData = FileUtils.readFileToByteArray(file); assertArrayEquals(data, readData); } @@ -329,16 +400,21 @@ public void testGetFileNotExists() throws Exception { String getNode = "/getFileNotExistsNode"; File file = createTempFile("newfile", null).toFile(); - String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", - "getfile", getNode, file.getAbsolutePath()}; + String[] args = + new String[] { + "-zkhost", zkServer.getZkAddress(), "-cmd", "getfile", getNode, file.getAbsolutePath() + }; KeeperException e = expectThrows(KeeperException.class, () -> ZkCLI.main(args)); assertEquals(e.code(), KeeperException.Code.NONODE); } public void testInvalidZKAddress() throws SolrException { - SolrException ex = expectThrows(SolrException.class, () -> { - new SolrZkClient("----------:33332", 100); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + new SolrZkClient("----------:33332", 100); + }); zkClient.close(); } @@ -346,37 +422,57 @@ public void testInvalidZKAddress() throws SolrException { public void testSetClusterProperty() throws Exception { ClusterProperties properties = new ClusterProperties(zkClient); // add property urlScheme=http - String[] args = new String[]{"-zkhost", zkServer.getZkAddress(), - "-cmd", "CLUSTERPROP", "-name", "urlScheme", "-val", "http"}; + String[] args = + new String[] { + "-zkhost", + zkServer.getZkAddress(), + "-cmd", + "CLUSTERPROP", + "-name", + "urlScheme", + "-val", + "http" + }; ZkCLI.main(args); assertEquals("http", properties.getClusterProperty("urlScheme", "none")); // remove it again - args = new String[]{"-zkhost", zkServer.getZkAddress(), - "-cmd", "CLUSTERPROP", "-name", "urlScheme"}; + args = + new String[] { + "-zkhost", zkServer.getZkAddress(), "-cmd", "CLUSTERPROP", "-name", "urlScheme" + }; ZkCLI.main(args); assertNull(properties.getClusterProperty("urlScheme", (String) null)); - } @Test public void testUpdateAcls() throws Exception { try { - System.setProperty(SolrZkClient.ZK_ACL_PROVIDER_CLASS_NAME_VM_PARAM_NAME, VMParamsAllAndReadonlyDigestZkACLProvider.class.getName()); - System.setProperty(VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME, "user"); - System.setProperty(VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME, "pass"); - - String[] args = new String[]{"-zkhost", zkServer.getZkAddress(), "-cmd", "updateacls", "/"}; + System.setProperty( + SolrZkClient.ZK_ACL_PROVIDER_CLASS_NAME_VM_PARAM_NAME, + VMParamsAllAndReadonlyDigestZkACLProvider.class.getName()); + System.setProperty( + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME, + "user"); + System.setProperty( + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME, + "pass"); + + String[] args = new String[] {"-zkhost", zkServer.getZkAddress(), "-cmd", "updateacls", "/"}; ZkCLI.main(args); } finally { // Need to clear these before we open the next SolrZkClient System.clearProperty(SolrZkClient.ZK_ACL_PROVIDER_CLASS_NAME_VM_PARAM_NAME); - System.clearProperty(VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME); - System.clearProperty(VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME); + System.clearProperty( + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_USERNAME_VM_PARAM_NAME); + System.clearProperty( + VMParamsAllAndReadonlyDigestZkACLProvider.DEFAULT_DIGEST_READONLY_PASSWORD_VM_PARAM_NAME); } boolean excepted = false; - try (SolrZkClient zkClient = new SolrZkClient(zkServer.getZkAddress(), AbstractDistribZkTestBase.DEFAULT_CONNECTION_TIMEOUT)) { + try (SolrZkClient zkClient = + new SolrZkClient( + zkServer.getZkAddress(), AbstractDistribZkTestBase.DEFAULT_CONNECTION_TIMEOUT)) { zkClient.getData("/", null, null, true); } catch (KeeperException.NoAuthException e) { excepted = true; diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java index 90c2019cf80..4b82516b065 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java @@ -16,6 +16,11 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP; +import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP; +import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA; +import static org.mockito.Mockito.mock; + import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; @@ -23,7 +28,6 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ClusterProperties; @@ -43,11 +47,6 @@ import org.junit.AfterClass; import org.junit.BeforeClass; -import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP; -import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP; -import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA; -import static org.mockito.Mockito.mock; - @Slow @SolrTestCaseJ4.SuppressSSL public class ZkControllerTest extends SolrTestCaseJ4 { @@ -57,35 +56,31 @@ public class ZkControllerTest extends SolrTestCaseJ4 { static final int TIMEOUT = 10000; @BeforeClass - public static void beforeClass() throws Exception { - - } + public static void beforeClass() throws Exception {} @AfterClass - public static void afterClass() throws Exception { - - } + public static void afterClass() throws Exception {} public void testNodeNameUrlConversion() throws Exception { // nodeName from parts - assertEquals("localhost:8888_solr", - ZkController.generateNodeName("localhost", "8888", "solr")); - assertEquals("localhost:8888_solr", - ZkController.generateNodeName("localhost", "8888", "/solr")); - assertEquals("localhost:8888_solr", - ZkController.generateNodeName("localhost", "8888", "/solr/")); + assertEquals("localhost:8888_solr", ZkController.generateNodeName("localhost", "8888", "solr")); + assertEquals( + "localhost:8888_solr", ZkController.generateNodeName("localhost", "8888", "/solr")); + assertEquals( + "localhost:8888_solr", ZkController.generateNodeName("localhost", "8888", "/solr/")); // root context - assertEquals("localhost:8888_", - ZkController.generateNodeName("localhost", "8888", "")); - assertEquals("localhost:8888_", - ZkController.generateNodeName("localhost", "8888", "/")); + assertEquals("localhost:8888_", ZkController.generateNodeName("localhost", "8888", "")); + assertEquals("localhost:8888_", ZkController.generateNodeName("localhost", "8888", "/")); // subdir - assertEquals("foo-bar:77_solr%2Fsub_dir", + assertEquals( + "foo-bar:77_solr%2Fsub_dir", ZkController.generateNodeName("foo-bar", "77", "solr/sub_dir")); - assertEquals("foo-bar:77_solr%2Fsub_dir", + assertEquals( + "foo-bar:77_solr%2Fsub_dir", ZkController.generateNodeName("foo-bar", "77", "/solr/sub_dir")); - assertEquals("foo-bar:77_solr%2Fsub_dir", + assertEquals( + "foo-bar:77_solr%2Fsub_dir", ZkController.generateNodeName("foo-bar", "77", "/solr/sub_dir/")); // setup a SolrZkClient to do some getBaseUrlForNodeName testing @@ -103,64 +98,74 @@ public void testNodeNameUrlConversion() throws Exception { zkStateReader.createClusterStateWatchersAndUpdate(); // getBaseUrlForNodeName - assertEquals("http://zzz.xxx:1234/solr", - zkStateReader.getBaseUrlForNodeName("zzz.xxx:1234_solr")); - assertEquals("http://zzz_xxx:1234/solr", - zkStateReader.getBaseUrlForNodeName("zzz_xxx:1234_solr")); - assertEquals("http://xxx:99", - zkStateReader.getBaseUrlForNodeName("xxx:99_")); - assertEquals("http://foo-bar.baz.org:9999/some_dir", + assertEquals( + "http://zzz.xxx:1234/solr", zkStateReader.getBaseUrlForNodeName("zzz.xxx:1234_solr")); + assertEquals( + "http://zzz_xxx:1234/solr", zkStateReader.getBaseUrlForNodeName("zzz_xxx:1234_solr")); + assertEquals("http://xxx:99", zkStateReader.getBaseUrlForNodeName("xxx:99_")); + assertEquals( + "http://foo-bar.baz.org:9999/some_dir", zkStateReader.getBaseUrlForNodeName("foo-bar.baz.org:9999_some_dir")); - assertEquals("http://foo-bar.baz.org:9999/solr/sub_dir", + assertEquals( + "http://foo-bar.baz.org:9999/solr/sub_dir", zkStateReader.getBaseUrlForNodeName("foo-bar.baz.org:9999_solr%2Fsub_dir")); // generateNodeName + getBaseUrlForNodeName - assertEquals("http://foo:9876/solr", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo", "9876", "solr"))); - assertEquals("http://foo:9876/solr", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo", "9876", "/solr"))); - assertEquals("http://foo:9876/solr", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo", "9876", "/solr/"))); - assertEquals("http://foo.bar.com:9876/solr/sub_dir", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo.bar.com", "9876", "solr/sub_dir"))); - assertEquals("http://foo.bar.com:9876/solr/sub_dir", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo.bar.com", "9876", "/solr/sub_dir/"))); - assertEquals("http://foo-bar:9876", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo-bar", "9876", ""))); - assertEquals("http://foo-bar:9876", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo-bar", "9876", "/"))); - assertEquals("http://foo-bar.com:80/some_dir", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo-bar.com", "80", "some_dir"))); - assertEquals("http://foo-bar.com:80/some_dir", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo-bar.com", "80", "/some_dir"))); - + assertEquals( + "http://foo:9876/solr", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo", "9876", "solr"))); + assertEquals( + "http://foo:9876/solr", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo", "9876", "/solr"))); + assertEquals( + "http://foo:9876/solr", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo", "9876", "/solr/"))); + assertEquals( + "http://foo.bar.com:9876/solr/sub_dir", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo.bar.com", "9876", "solr/sub_dir"))); + assertEquals( + "http://foo.bar.com:9876/solr/sub_dir", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo.bar.com", "9876", "/solr/sub_dir/"))); + assertEquals( + "http://foo-bar:9876", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo-bar", "9876", ""))); + assertEquals( + "http://foo-bar:9876", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo-bar", "9876", "/"))); + assertEquals( + "http://foo-bar.com:80/some_dir", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo-bar.com", "80", "some_dir"))); + assertEquals( + "http://foo-bar.com:80/some_dir", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo-bar.com", "80", "/some_dir"))); } ClusterProperties cp = new ClusterProperties(client); cp.setClusterProperty("urlScheme", "https"); - //Verify the URL Scheme is taken into account + // Verify the URL Scheme is taken into account try (ZkStateReader zkStateReader = new ZkStateReader(client)) { zkStateReader.createClusterStateWatchersAndUpdate(); - assertEquals("https://zzz.xxx:1234/solr", + assertEquals( + "https://zzz.xxx:1234/solr", zkStateReader.getBaseUrlForNodeName("zzz.xxx:1234_solr")); - assertEquals("https://foo-bar.com:80/some_dir", - zkStateReader.getBaseUrlForNodeName - (ZkController.generateNodeName("foo-bar.com", "80", "/some_dir"))); - + assertEquals( + "https://foo-bar.com:80/some_dir", + zkStateReader.getBaseUrlForNodeName( + ZkController.generateNodeName("foo-bar.com", "80", "/some_dir"))); } } } finally { @@ -179,13 +184,14 @@ public void testGetHostName() throws Exception { ZkController zkController = null; try { - CloudConfig cloudConfig = new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build(); - zkController = new ZkController(cc, server.getZkAddress(), TIMEOUT, cloudConfig, () -> null); + CloudConfig cloudConfig = + new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build(); + zkController = + new ZkController(cc, server.getZkAddress(), TIMEOUT, cloudConfig, () -> null); } catch (IllegalArgumentException e) { fail("ZkController did not normalize host name correctly"); } finally { - if (zkController != null) - zkController.close(); + if (zkController != null) zkController.close(); if (cc != null) { cc.shutdown(); } @@ -197,7 +203,7 @@ public void testGetHostName() throws Exception { @Slow @LogLevel(value = "org.apache.solr.cloud=DEBUG;org.apache.solr.cloud.overseer=DEBUG") - public void testPublishAndWaitForDownStates() throws Exception { + public void testPublishAndWaitForDownStates() throws Exception { /* This test asserts that if zkController.publishAndWaitForDownStates uses only core name to check if all local @@ -219,31 +225,59 @@ cores are down then the method will return immediately but if it uses coreNodeNa server.run(); AtomicReference zkControllerRef = new AtomicReference<>(); - CoreContainer cc = new MockCoreContainer() { - @Override - public List getCoreDescriptors() { - CoreDescriptor descriptor = new CoreDescriptor(collectionName, TEST_PATH(), Collections.emptyMap(), new Properties(), zkControllerRef.get()); - // non-existent coreNodeName, this will cause zkController.publishAndWaitForDownStates to wait indefinitely - // when using coreNodeName but usage of core name alone will return immediately - descriptor.getCloudDescriptor().setCoreNodeName("core_node0"); - return Collections.singletonList(descriptor); - } - }; + CoreContainer cc = + new MockCoreContainer() { + @Override + public List getCoreDescriptors() { + CoreDescriptor descriptor = + new CoreDescriptor( + collectionName, + TEST_PATH(), + Collections.emptyMap(), + new Properties(), + zkControllerRef.get()); + // non-existent coreNodeName, this will cause zkController.publishAndWaitForDownStates + // to wait indefinitely when using coreNodeName but usage of core name alone will + // return immediately + descriptor.getCloudDescriptor().setCoreNodeName("core_node0"); + return Collections.singletonList(descriptor); + } + }; ZkController zkController = null; try { - CloudConfig cloudConfig = new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build(); - zkController = new ZkController(cc, server.getZkAddress(), TIMEOUT, cloudConfig, () -> null); + CloudConfig cloudConfig = + new CloudConfig.CloudConfigBuilder("127.0.0.1", 8983, "solr").build(); + zkController = + new ZkController(cc, server.getZkAddress(), TIMEOUT, cloudConfig, () -> null); zkControllerRef.set(zkController); - zkController.getZkClient().makePath(ZkStateReader.getCollectionPathRoot(collectionName), new byte[0], CreateMode.PERSISTENT, true); - - ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, - CollectionParams.CollectionAction.CREATE.toLower(), ZkStateReader.NODE_NAME_PROP, nodeName, ZkStateReader.NUM_SHARDS_PROP, "1", - "name", collectionName); + zkController + .getZkClient() + .makePath( + ZkStateReader.getCollectionPathRoot(collectionName), + new byte[0], + CreateMode.PERSISTENT, + true); + + ZkNodeProps m = + new ZkNodeProps( + Overseer.QUEUE_OPERATION, + CollectionParams.CollectionAction.CREATE.toLower(), + ZkStateReader.NODE_NAME_PROP, + nodeName, + ZkStateReader.NUM_SHARDS_PROP, + "1", + "name", + collectionName); if (zkController.getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - zkController.getDistributedClusterStateUpdater().doSingleStateUpdate(DistributedClusterStateUpdater.MutatingCommand.ClusterCreateCollection, m, - zkController.getSolrCloudManager(), zkController.getZkStateReader()); + zkController + .getDistributedClusterStateUpdater() + .doSingleStateUpdate( + DistributedClusterStateUpdater.MutatingCommand.ClusterCreateCollection, + m, + zkController.getSolrCloudManager(), + zkController.getZkStateReader()); } else { zkController.getOverseerJobQueue().offer(Utils.toJSON(m)); } @@ -256,8 +290,13 @@ public List getCoreDescriptors() { propMap.put(ZkStateReader.CORE_NAME_PROP, collectionName); propMap.put(ZkStateReader.STATE_PROP, "active"); if (zkController.getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - zkController.getDistributedClusterStateUpdater().doSingleStateUpdate(DistributedClusterStateUpdater.MutatingCommand.SliceAddReplica, new ZkNodeProps(propMap), - zkController.getSolrCloudManager(), zkController.getZkStateReader()); + zkController + .getDistributedClusterStateUpdater() + .doSingleStateUpdate( + DistributedClusterStateUpdater.MutatingCommand.SliceAddReplica, + new ZkNodeProps(propMap), + zkController.getSolrCloudManager(), + zkController.getZkStateReader()); } else { zkController.getOverseerJobQueue().offer(Utils.toJSON(propMap)); } @@ -270,8 +309,13 @@ public List getCoreDescriptors() { propMap.put(ZkStateReader.CORE_NAME_PROP, collectionName); propMap.put(ZkStateReader.STATE_PROP, "down"); if (zkController.getDistributedClusterStateUpdater().isDistributedStateUpdate()) { - zkController.getDistributedClusterStateUpdater().doSingleStateUpdate(DistributedClusterStateUpdater.MutatingCommand.SliceAddReplica, new ZkNodeProps(propMap), - zkController.getSolrCloudManager(), zkController.getZkStateReader()); + zkController + .getDistributedClusterStateUpdater() + .doSingleStateUpdate( + DistributedClusterStateUpdater.MutatingCommand.SliceAddReplica, + new ZkNodeProps(propMap), + zkController.getSolrCloudManager(), + zkController.getZkStateReader()); } else { zkController.getOverseerJobQueue().offer(Utils.toJSON(propMap)); } @@ -281,10 +325,11 @@ public List getCoreDescriptors() { long now = System.nanoTime(); long timeout = now + TimeUnit.NANOSECONDS.convert(5, TimeUnit.SECONDS); zkController.publishAndWaitForDownStates(5); - assertTrue("The ZkController.publishAndWaitForDownStates should have timed out but it didn't", System.nanoTime() >= timeout); + assertTrue( + "The ZkController.publishAndWaitForDownStates should have timed out but it didn't", + System.nanoTime() >= timeout); } finally { - if (zkController != null) - zkController.close(); + if (zkController != null) zkController.close(); cc.shutdown(); } } finally { @@ -302,7 +347,8 @@ public void tearDown() throws Exception { } private static class MockCoreContainer extends CoreContainer { - UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); + UpdateShardHandler updateShardHandler = + new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); SolrMetricManager metricManager; public MockCoreContainer() { @@ -315,8 +361,7 @@ public MockCoreContainer() { } @Override - public void load() { - } + public void load() {} @Override public UpdateShardHandler getUpdateShardHandler() { diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkDistributedLockTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkDistributedLockTest.java index 1b88c15384e..87dffae8d82 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkDistributedLockTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkDistributedLockTest.java @@ -18,7 +18,6 @@ package org.apache.solr.cloud; import java.util.concurrent.CountDownLatch; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.params.CollectionParams; @@ -35,13 +34,14 @@ public class ZkDistributedLockTest extends SolrTestCaseJ4 { static final int TIMEOUT = 10000; /** - * Tests the obtention of a single read or write lock at a specific hierarchical level, for distributed Collection API - * locking as well as distributed Config Set API locking.
+ * Tests the obtention of a single read or write lock at a specific hierarchical level, for + * distributed Collection API locking as well as distributed Config Set API locking.
* Tests the logic with a single thread, then tests multithreaded wait for lock acquire works. - * Tests grouped to pay setup only once.

+ * Tests grouped to pay setup only once. * - * Higher level locking tests can be found at {@link org.apache.solr.cloud.api.collections.CollectionApiLockingTest} and - * {@link org.apache.solr.cloud.ConfigSetApiLockingTest} + *

Higher level locking tests can be found at {@link + * org.apache.solr.cloud.api.collections.CollectionApiLockingTest} and {@link + * org.apache.solr.cloud.ConfigSetApiLockingTest} */ @Test public void testSingleLocks() throws Exception { @@ -49,12 +49,14 @@ public void testSingleLocks() throws Exception { try { server.run(); try (SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT)) { - DistributedCollectionLockFactory collLockFactory = new ZkDistributedCollectionLockFactory(zkClient, "/lockTestCollectionRoot"); + DistributedCollectionLockFactory collLockFactory = + new ZkDistributedCollectionLockFactory(zkClient, "/lockTestCollectionRoot"); monothreadedCollectionTests(collLockFactory); multithreadedCollectionTests(collLockFactory); - DistributedConfigSetLockFactory configSetLockFactory = new ZkDistributedConfigSetLockFactory(zkClient, "/lockTestConfigSetRoot"); + DistributedConfigSetLockFactory configSetLockFactory = + new ZkDistributedConfigSetLockFactory(zkClient, "/lockTestConfigSetRoot"); monothreadedConfigSetTests(configSetLockFactory); multithreadedConfigSetTests(configSetLockFactory); @@ -66,47 +68,79 @@ public void testSingleLocks() throws Exception { private void monothreadedCollectionTests(DistributedCollectionLockFactory factory) { // Collection level locks - DistributedLock collRL1 = factory.createLock(false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + DistributedLock collRL1 = + factory.createLock( + false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); assertTrue("collRL1 should have been acquired", collRL1.isAcquired()); - DistributedLock collRL2 = factory.createLock(false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + DistributedLock collRL2 = + factory.createLock( + false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); assertTrue("collRL1 should have been acquired", collRL2.isAcquired()); - DistributedLock collWL3 = factory.createLock(true, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); - assertFalse("collWL3 should not have been acquired, due to collRL1 and collRL2", collWL3.isAcquired()); + DistributedLock collWL3 = + factory.createLock( + true, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + assertFalse( + "collWL3 should not have been acquired, due to collRL1 and collRL2", collWL3.isAcquired()); - assertTrue("collRL2 should have been acquired, that should not have changed", collRL2.isAcquired()); + assertTrue( + "collRL2 should have been acquired, that should not have changed", collRL2.isAcquired()); collRL1.release(); collRL2.release(); - assertTrue("collWL3 should have been acquired, collRL1 and collRL2 were released", collWL3.isAcquired()); - - DistributedLock collRL4 = factory.createLock(false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); - assertFalse("collRL4 should not have been acquired, due to collWL3 locking the collection", collRL4.isAcquired()); - - // Collection is write locked by collWL3 and collRL4 read lock waiting behind. Now moving to request shard level locks. - // These are totally independent from the Collection level locks so should see no impact. - DistributedLock shardWL5 = factory.createLock(true, CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD_NAME, null); - assertTrue("shardWL5 should have been acquired, there is no lock on that shard", shardWL5.isAcquired()); - - DistributedLock shardWL6 = factory.createLock(true, CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD_NAME, null); - assertFalse("shardWL6 should not have been acquired, shardWL5 is locking that shard", shardWL6.isAcquired()); + assertTrue( + "collWL3 should have been acquired, collRL1 and collRL2 were released", + collWL3.isAcquired()); + + DistributedLock collRL4 = + factory.createLock( + false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + assertFalse( + "collRL4 should not have been acquired, due to collWL3 locking the collection", + collRL4.isAcquired()); + + // Collection is write locked by collWL3 and collRL4 read lock waiting behind. Now moving to + // request shard level locks. These are totally independent from the Collection level locks so + // should see no impact. + DistributedLock shardWL5 = + factory.createLock( + true, CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD_NAME, null); + assertTrue( + "shardWL5 should have been acquired, there is no lock on that shard", + shardWL5.isAcquired()); + + DistributedLock shardWL6 = + factory.createLock( + true, CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD_NAME, null); + assertFalse( + "shardWL6 should not have been acquired, shardWL5 is locking that shard", + shardWL6.isAcquired()); // Get a lock on a Replica. Again this is independent of collection or shard level - DistributedLock replicaRL7 = factory.createLock(false, CollectionParams.LockLevel.REPLICA, COLLECTION_NAME, SHARD_NAME, REPLICA_NAME); + DistributedLock replicaRL7 = + factory.createLock( + false, CollectionParams.LockLevel.REPLICA, COLLECTION_NAME, SHARD_NAME, REPLICA_NAME); assertTrue("replicaRL7 should have been acquired", replicaRL7.isAcquired()); - DistributedLock replicaWL8 = factory.createLock(true, CollectionParams.LockLevel.REPLICA, COLLECTION_NAME, SHARD_NAME, REPLICA_NAME); - assertFalse("replicaWL8 should not have been acquired, replicaRL7 is read locking that replica", replicaWL8.isAcquired()); + DistributedLock replicaWL8 = + factory.createLock( + true, CollectionParams.LockLevel.REPLICA, COLLECTION_NAME, SHARD_NAME, REPLICA_NAME); + assertFalse( + "replicaWL8 should not have been acquired, replicaRL7 is read locking that replica", + replicaWL8.isAcquired()); replicaRL7.release(); - assertTrue("replicaWL8 should have been acquired, as replicaRL7 got released", replicaWL8.isAcquired()); - + assertTrue( + "replicaWL8 should have been acquired, as replicaRL7 got released", + replicaWL8.isAcquired()); collWL3.release(); assertTrue("collRL4 should have been acquired given collWL3 released", collRL4.isAcquired()); shardWL5.release(); - assertTrue("shardWL6 should have been acquired, now that shardWL5 was released", shardWL6.isAcquired()); + assertTrue( + "shardWL6 should have been acquired, now that shardWL5 was released", + shardWL6.isAcquired()); replicaWL8.release(); try { @@ -120,37 +154,49 @@ private void monothreadedCollectionTests(DistributedCollectionLockFactory factor collRL4.release(); } - private void multithreadedCollectionTests(DistributedCollectionLockFactory factory) throws Exception { + private void multithreadedCollectionTests(DistributedCollectionLockFactory factory) + throws Exception { // Acquiring right away a read lock - DistributedLock readLock = factory.createLock(false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + DistributedLock readLock = + factory.createLock( + false, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); assertTrue("readLock should have been acquired", readLock.isAcquired()); // And now creating a write lock, that can't be acquired just yet, because of the read lock - DistributedLock writeLock = factory.createLock(true, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + DistributedLock writeLock = + factory.createLock( + true, CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); assertFalse("writeLock should not have been acquired", writeLock.isAcquired()); // Wait for acquisition of the write lock on another thread (and be notified via a latch) final CountDownLatch latch = new CountDownLatch(1); - new Thread(() -> { - writeLock.waitUntilAcquired(); - // countDown() will not be called if waitUntilAcquired() threw exception of any kind - latch.countDown(); - }).start(); - - // Wait for the thread to start and to get blocked in waitUntilAcquired() - // (thread start could have been checked more reliably using another latch, and verifying the thread is in waitUntilAcquired - // done through that thread stacktrace, but that would be overkill compared to the very slight race condition of waiting 30ms, - // but a race that would not cause the test to fail since we're testing... that nothing happened yet). + new Thread( + () -> { + writeLock.waitUntilAcquired(); + // countDown() will not be called if waitUntilAcquired() threw exception of any kind + latch.countDown(); + }) + .start(); + + // Wait for the thread to start and to get blocked in waitUntilAcquired() (thread start could + // have been checked more reliably using another latch, and verifying the thread is in + // waitUntilAcquired done through that thread stacktrace, but that would be overkill compared to + // the very slight race condition of waiting 30ms, but a race that would not cause the test to + // fail since we're testing... that nothing happened yet). Thread.sleep(30); - assertEquals("we should not have been notified that writeLock was acquired", 1, latch.getCount()); + assertEquals( + "we should not have been notified that writeLock was acquired", 1, latch.getCount()); assertFalse("writeLock should not have been acquired", writeLock.isAcquired()); readLock.release(); - assertTrue("writeLock should have been acquired now that readlock was released", writeLock.isAcquired()); + assertTrue( + "writeLock should have been acquired now that readlock was released", + writeLock.isAcquired()); // Wait for the Zookeeper watch to fire + the thread to be unblocked and countdown the latch - // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a pause + // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a + // pause int i = 0; while (i < 1000 && latch.getCount() != 0) { Thread.sleep(10); @@ -167,17 +213,25 @@ private void monothreadedConfigSetTests(DistributedConfigSetLockFactory factory) assertTrue("configSetRL2 should have been acquired", configSetRL2.isAcquired()); DistributedLock configSetWL1 = factory.createLock(true, CONFIG_SET_NAME); - assertFalse("configSetWL1 should not have been acquired due to configSetRL1 and configSetRL2", configSetWL1.isAcquired()); + assertFalse( + "configSetWL1 should not have been acquired due to configSetRL1 and configSetRL2", + configSetWL1.isAcquired()); configSetRL1.release(); configSetRL2.release(); - assertTrue("configSetWL1 should have been acquired, configSetRL1 and configSetRL2 were released", configSetWL1.isAcquired()); + assertTrue( + "configSetWL1 should have been acquired, configSetRL1 and configSetRL2 were released", + configSetWL1.isAcquired()); DistributedLock configSetRL3 = factory.createLock(false, CONFIG_SET_NAME); - assertFalse("configSetRL3 should not have been acquired due to configSetWL1", configSetRL3.isAcquired()); + assertFalse( + "configSetRL3 should not have been acquired due to configSetWL1", + configSetRL3.isAcquired()); configSetWL1.release(); - assertTrue("configSetRL3 should have been acquired, configSetWL1 was released", configSetRL3.isAcquired()); + assertTrue( + "configSetRL3 should have been acquired, configSetWL1 was released", + configSetRL3.isAcquired()); configSetRL3.release(); @@ -189,42 +243,53 @@ private void monothreadedConfigSetTests(DistributedConfigSetLockFactory factory) } } - private void multithreadedConfigSetTests(DistributedConfigSetLockFactory factory) throws Exception { + private void multithreadedConfigSetTests(DistributedConfigSetLockFactory factory) + throws Exception { // Acquiring right away a read lock DistributedLock configSetRL1 = factory.createLock(false, CONFIG_SET_NAME); assertTrue("configSetRL1 should have been acquired", configSetRL1.isAcquired()); // And now creating a write lock, that can't be acquired just yet, because of the read lock DistributedLock configSetWL1 = factory.createLock(true, CONFIG_SET_NAME); - assertFalse("configSetWL1 should not have been acquired due to configSetRL1", configSetWL1.isAcquired()); + assertFalse( + "configSetWL1 should not have been acquired due to configSetRL1", + configSetWL1.isAcquired()); // Wait for acquisition of the write lock on another thread (and be notified via a latch) final CountDownLatch latch = new CountDownLatch(1); - new Thread(() -> { - configSetWL1.waitUntilAcquired(); - // countDown() will not be called if waitUntilAcquired() threw exception of any kind - latch.countDown(); - }).start(); - - // Wait for the thread to start and to get blocked in waitUntilAcquired() - // (thread start could have been checked more reliably using another latch, and verifying the thread is in waitUntilAcquired - // done through that thread stacktrace, but that would be overkill compared to the very slight race condition of waiting 30ms, - // but a race that would not cause the test to fail since we're testing... that nothing happened yet). + new Thread( + () -> { + configSetWL1.waitUntilAcquired(); + // countDown() will not be called if waitUntilAcquired() threw exception of any kind + latch.countDown(); + }) + .start(); + + // Wait for the thread to start and to get blocked in waitUntilAcquired() (thread start could + // have been checked more reliably using another latch, and verifying the thread is in + // waitUntilAcquired done through that thread stacktrace, but that would be overkill compared to + // the very slight race condition of waiting 30ms, but a race that would not cause the test to + // fail since we're testing... that nothing happened yet). Thread.sleep(30); - assertEquals("we should not have been notified that configSetWL1 was acquired", 1, latch.getCount()); + assertEquals( + "we should not have been notified that configSetWL1 was acquired", 1, latch.getCount()); assertFalse("configSetWL1 should not have been acquired", configSetWL1.isAcquired()); configSetRL1.release(); - assertTrue("configSetWL1 should have been acquired now that configSetRL1 was released", configSetWL1.isAcquired()); + assertTrue( + "configSetWL1 should have been acquired now that configSetRL1 was released", + configSetWL1.isAcquired()); - // Wait for the Zookeeper watch to fire + the thread to be unblocked and countdown the latch - // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a pause + // Wait for the Zookeeper watch to fire + the thread to be unblocked and countdown the latch. + // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a + // pause int i = 0; while (i < 1000 && latch.getCount() != 0) { Thread.sleep(10); i++; } - assertEquals("we should have been notified that configSetWL1 was acquired", 0, latch.getCount()); + assertEquals( + "we should have been notified that configSetWL1 was acquired", 0, latch.getCount()); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkFailoverTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkFailoverTest.java index 1f54199972c..b914665bcc2 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkFailoverTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkFailoverTest.java @@ -35,12 +35,13 @@ public class ZkFailoverTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { useFactory("solr.StandardDirectoryFactory"); configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) .configure(); } @AfterClass - public static void resetWaitForZk(){ + public static void resetWaitForZk() { System.setProperty("waitForZk", "30"); } @@ -61,13 +62,13 @@ public void testRestartZkWhenClusterDown() throws Exception { restartSolrAndZk(); waitForLiveNodes(2); waitForState("Timeout waiting for " + coll, coll, clusterShape(2, 2)); - QueryResponse rsp = new QueryRequest(new SolrQuery("*:*")).process(cluster.getSolrClient(), coll); + QueryResponse rsp = + new QueryRequest(new SolrQuery("*:*")).process(cluster.getSolrClient(), coll); assertEquals(1, rsp.getResults().getNumFound()); zkTestServer.shutdown(); } - private void restartSolrAndZk() - throws Exception { + private void restartSolrAndZk() throws Exception { for (JettySolrRunner runner : cluster.getJettySolrRunners()) { runner.stop(); } @@ -75,13 +76,15 @@ private void restartSolrAndZk() Thread[] threads = new Thread[cluster.getJettySolrRunners().size()]; for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { final JettySolrRunner runner = cluster.getJettySolrRunner(i); - threads[i] = new Thread(() -> { - try { - runner.start(); - } catch (Exception e) { - e.printStackTrace(); - } - }); + threads[i] = + new Thread( + () -> { + try { + runner.start(); + } catch (Exception e) { + e.printStackTrace(); + } + }); threads[i].start(); } Thread.sleep(2500); diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java index 604c56b1aaf..b6fcb4bb6b3 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkNodePropsTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.ZkNodeProps; import org.apache.solr.common.util.JavaBinCodec; @@ -30,15 +29,15 @@ public class ZkNodePropsTest extends SolrTestCaseJ4 { @Test public void testBasic() throws IOException { - - Map props = new HashMap<>(); + + Map props = new HashMap<>(); props.put("prop1", "value1"); props.put("prop2", "value2"); props.put("prop3", "value3"); props.put("prop4", "value4"); props.put("prop5", "value5"); props.put("prop6", "value6"); - + ZkNodeProps zkProps = new ZkNodeProps(props); byte[] bytes = Utils.toJSON(zkProps); ZkNodeProps props2 = ZkNodeProps.load(bytes); diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java index d17c82a8f0f..5db31574fd7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkShardTermsTest.java @@ -30,7 +30,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.common.util.TimeSource; @@ -47,15 +46,17 @@ public class ZkShardTermsTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(1) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 15-Sep-2018 - public void testParticipationOfReplicas() throws IOException, SolrServerException, InterruptedException { + public void testParticipationOfReplicas() + throws IOException, SolrServerException, InterruptedException { String collection = "collection1"; - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard2", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard2", cluster.getZkClient())) { zkShardTerms.registerTerm("replica1"); zkShardTerms.registerTerm("replica2"); zkShardTerms.ensureTermsIsHigher("replica1", Collections.singleton("replica2")); @@ -65,20 +66,23 @@ public void testParticipationOfReplicas() throws IOException, SolrServerExceptio CollectionAdminRequest.createCollection(collection, 2, 2) .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()) .process(cluster.getSolrClient()); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { waitFor(2, () -> zkShardTerms.getTerms().size()); - assertArrayEquals(new Long[]{0L, 0L}, zkShardTerms.getTerms().values().toArray(new Long[2])); + assertArrayEquals(new Long[] {0L, 0L}, zkShardTerms.getTerms().values().toArray(new Long[2])); } - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard2", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard2", cluster.getZkClient())) { waitFor(2, () -> zkShardTerms.getTerms().size()); - assertArrayEquals(new Long[]{0L, 0L}, zkShardTerms.getTerms().values().toArray(new Long[2])); + assertArrayEquals(new Long[] {0L, 0L}, zkShardTerms.getTerms().values().toArray(new Long[2])); } } @Test public void testRecoveringFlag() { String collection = "recoveringFlag"; - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { // List all possible orders of ensureTermIsHigher, startRecovering, doneRecovering zkShardTerms.registerTerm("replica1"); zkShardTerms.registerTerm("replica2"); @@ -124,14 +128,14 @@ public void testRecoveringFlag() { assertEquals(zkShardTerms.getTerm("replica2_recovering"), 3); zkShardTerms.doneRecovering("replica2"); assertEquals(zkShardTerms.getTerm("replica2_recovering"), -1); - } } @Test public void testCoreRemovalWhileRecovering() { String collection = "recoveringFlagRemoval"; - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { // List all possible orders of ensureTermIsHigher, startRecovering, doneRecovering zkShardTerms.registerTerm("replica1_rem"); zkShardTerms.registerTerm("replica2_rem"); @@ -142,7 +146,8 @@ public void testCoreRemovalWhileRecovering() { assertEquals(zkShardTerms.getTerm("replica2_rem"), 1); assertEquals(zkShardTerms.getTerm("replica2_rem_recovering"), 0); - // Remove core, and check if the correct core was removed as well as the recovering term for that core + // Remove core, and check if the correct core was removed as well as the recovering term for + // that core zkShardTerms.removeTerm("replica2_rem"); assertEquals(zkShardTerms.getTerm("replica1_rem"), 1); assertEquals(zkShardTerms.getTerm("replica2_rem"), -1); @@ -157,7 +162,8 @@ public void testRegisterTerm() throws InterruptedException { rep1Terms.registerTerm("rep1"); rep2Terms.registerTerm("rep2"); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { assertEquals(0L, zkShardTerms.getTerm("rep1")); assertEquals(0L, zkShardTerms.getTerm("rep2")); } @@ -183,7 +189,8 @@ public void testRegisterTerm() throws InterruptedException { TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSource.CurrentTimeSource()); while (!timeOut.hasTimedOut()) { - if (Objects.equals(expectedTerms, rep1Terms.getTerms()) && Objects.equals(expectedTerms, rep2Terms.getTerms())) break; + if (Objects.equals(expectedTerms, rep1Terms.getTerms()) + && Objects.equals(expectedTerms, rep2Terms.getTerms())) break; } if (timeOut.hasTimedOut()) fail("Expected zkShardTerms must stay updated"); @@ -196,7 +203,8 @@ public void testRaceConditionOnUpdates() throws InterruptedException { String collection = "raceConditionOnUpdates"; List replicas = Arrays.asList("rep1", "rep2", "rep3", "rep4"); for (String replica : replicas) { - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { zkShardTerms.registerTerm(replica); } } @@ -210,18 +218,21 @@ public void testRaceConditionOnUpdates() throws InterruptedException { Thread[] threads = new Thread[failedReplicas.size()]; for (int i = 0; i < failedReplicas.size(); i++) { String replica = failedReplicas.get(i); - threads[i] = new Thread(() -> { - try (ZkShardTerms zkShardTerms = new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { - while (!stop.get()) { - try { - Thread.sleep(random().nextInt(200)); - zkShardTerms.setTermEqualsToLeader(replica); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - } - }); + threads[i] = + new Thread( + () -> { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms(collection, "shard1", cluster.getZkClient())) { + while (!stop.get()) { + try { + Thread.sleep(random().nextInt(200)); + zkShardTerms.setTermEqualsToLeader(replica); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + }); threads[i].start(); } @@ -231,7 +242,8 @@ public void testRaceConditionOnUpdates() throws InterruptedException { TimeOut timeOut = new TimeOut(10, TimeUnit.SECONDS, new TimeSource.CurrentTimeSource()); while (!timeOut.hasTimedOut()) { maxTerm++; - assertEquals(shardTerms.getTerms().get("leader"), Collections.max(shardTerms.getTerms().values())); + assertEquals( + shardTerms.getTerms().get("leader"), Collections.max(shardTerms.getTerms().values())); Thread.sleep(100); } assertTrue(maxTerm >= Collections.max(shardTerms.getTerms().values())); @@ -263,7 +275,6 @@ public void testCoreTermWatcher() throws InterruptedException { replicaTerms.close(); } - public void testSetTermToZero() { String collection = "setTermToZero"; ZkShardTerms terms = new ZkShardTerms(collection, "shard1", cluster.getZkClient()); @@ -326,5 +337,4 @@ private void waitFor(T expected, Supplier supplier) throws InterruptedExc } assertEquals(expected, supplier.get()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java index 3f86835128c..3992c19ca81 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud; +import static org.apache.solr.cloud.SolrCloudTestCase.configureCluster; + import java.io.IOException; import java.nio.file.Path; import java.util.Arrays; @@ -25,7 +27,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.cloud.SolrZkClient; import org.apache.solr.common.cloud.ZkCmdExecutor; @@ -39,8 +40,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.cloud.SolrCloudTestCase.configureCluster; - public class ZkSolrClientTest extends SolrTestCaseJ4 { @BeforeClass @@ -55,7 +54,7 @@ static class ZkConnection implements AutoCloseable { private SolrZkClient zkClient = null; ZkConnection() throws Exception { - this (true); + this(true); } ZkConnection(boolean makeRoot) throws Exception { @@ -66,11 +65,11 @@ static class ZkConnection implements AutoCloseable { zkClient = new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT); } - public ZkTestServer getServer () { + public ZkTestServer getServer() { return server; } - public SolrZkClient getClient () { + public SolrZkClient getClient() { return zkClient; } @@ -83,15 +82,16 @@ public void close() throws IOException, InterruptedException { @SuppressWarnings({"try"}) public void testConnect() throws Exception { - try (ZkConnection conn = new ZkConnection (false)) { + try (ZkConnection conn = new ZkConnection(false)) { // do nothing } } @SuppressWarnings({"try"}) public void testMakeRootNode() throws Exception { - try (ZkConnection conn = new ZkConnection ()) { - final SolrZkClient zkClient = new SolrZkClient(conn.getServer().getZkHost(), AbstractZkTestCase.TIMEOUT); + try (ZkConnection conn = new ZkConnection()) { + final SolrZkClient zkClient = + new SolrZkClient(conn.getServer().getZkHost(), AbstractZkTestCase.TIMEOUT); try { assertTrue(zkClient.exists("/solr", true)); } finally { @@ -102,7 +102,7 @@ public void testMakeRootNode() throws Exception { @SuppressWarnings({"try"}) public void testClean() throws Exception { - try (ZkConnection conn = new ZkConnection ()) { + try (ZkConnection conn = new ZkConnection()) { final SolrZkClient zkClient = conn.getClient(); zkClient.makePath("/test/path/here", true); @@ -121,7 +121,8 @@ public void testReconnect() throws Exception { ZkTestServer server = null; server = new ZkTestServer(zkDir); server.run(); - try (SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT);) { + try (SolrZkClient zkClient = + new SolrZkClient(server.getZkAddress(), AbstractZkTestCase.TIMEOUT); ) { String shardsPath = "/collections/collection1/shards"; zkClient.makePath(shardsPath, false, true); @@ -132,16 +133,17 @@ public void testReconnect() throws Exception { Thread.sleep(80); - Thread thread = new Thread() { - public void run() { - try { - zkClient.makePath("collections/collection2", false); - // Assert.fail("Server should be down here"); - } catch (KeeperException | InterruptedException e) { + Thread thread = + new Thread() { + public void run() { + try { + zkClient.makePath("collections/collection2", false); + // Assert.fail("Server should be down here"); + } catch (KeeperException | InterruptedException e) { - } - } - }; + } + } + }; thread.start(); @@ -153,53 +155,57 @@ public void run() { // wait for reconnect Thread.sleep(600); - Thread thread2 = new Thread() { - public void run() { - try { + Thread thread2 = + new Thread() { + public void run() { + try { - zkClient.makePath("collections/collection3", true); + zkClient.makePath("collections/collection3", true); - } catch (KeeperException e) { - throw new RuntimeException(e); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - }; + } catch (KeeperException e) { + throw new RuntimeException(e); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + }; thread2.start(); thread.join(); - + thread2.join(); assertNotNull(zkClient.exists("/collections/collection3", null, true)); assertNotNull(zkClient.exists("/collections/collection1", null, true)); - + // simulate session expiration - + // one option long sessionId = zkClient.getSolrZooKeeper().getSessionId(); server.expire(sessionId); - + // another option - //zkClient.getSolrZooKeeper().getConnection().disconnect(); + // zkClient.getSolrZooKeeper().getConnection().disconnect(); // this tests expired state Thread.sleep(1000); // pause for reconnect - + for (int i = 0; i < 8; i++) { try { zkClient.makePath("collections/collection4", true); break; - } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) { + } catch (KeeperException.SessionExpiredException + | KeeperException.ConnectionLossException e) { } Thread.sleep(1000 * i); } - assertNotNull("Node does not exist, but it should", zkClient.exists("/collections/collection4", null, true)); + assertNotNull( + "Node does not exist, but it should", + zkClient.exists("/collections/collection4", null, true)); } finally { @@ -208,7 +214,7 @@ public void run() { } } } - + public void testZkCmdExectutor() throws Exception { Path zkDir = createTempDir("zkData"); ZkTestServer server = null; @@ -218,17 +224,21 @@ public void testZkCmdExectutor() throws Exception { server.run(); final int timeout = random().nextInt(10000) + 5000; - + ZkCmdExecutor zkCmdExecutor = new ZkCmdExecutor(timeout); final long start = System.nanoTime(); - expectThrows(KeeperException.SessionExpiredException.class, () -> { - zkCmdExecutor.retryOperation(() -> { - if (System.nanoTime() - start > TimeUnit.NANOSECONDS.convert(timeout, TimeUnit.MILLISECONDS)) { - throw new KeeperException.SessionExpiredException(); - } - throw new KeeperException.ConnectionLossException(); - }); - }); + expectThrows( + KeeperException.SessionExpiredException.class, + () -> { + zkCmdExecutor.retryOperation( + () -> { + if (System.nanoTime() - start + > TimeUnit.NANOSECONDS.convert(timeout, TimeUnit.MILLISECONDS)) { + throw new KeeperException.SessionExpiredException(); + } + throw new KeeperException.ConnectionLossException(); + }); + }); } finally { if (server != null) { server.shutdown(); @@ -249,37 +259,45 @@ public void testMultipleWatchesAsync() throws Exception { final Set collectionsInProgress = new HashSet<>(numColls); AtomicInteger maxCollectionsInProgress = new AtomicInteger(); - for (int i = 1; i <= numColls; i ++) { + for (int i = 1; i <= numColls; i++) { String collPath = "/collections/collection" + i; zkClient.makePath(collPath, true); - zkClient.getChildren(collPath, new Watcher() { - @Override - public void process(WatchedEvent event) { - synchronized (collectionsInProgress) { - collectionsInProgress.add(event.getPath()); // Will be something like /collections/collection## - maxCollectionsInProgress.set(Math.max(maxCollectionsInProgress.get(), collectionsInProgress.size())); - } - latch.countDown(); - try { - latch.await(10000, TimeUnit.MILLISECONDS); - } - catch (InterruptedException e) {} - synchronized (collectionsInProgress) { - collectionsInProgress.remove(event.getPath()); - } - watchesDone.countDown(); - } - }, true); + zkClient.getChildren( + collPath, + new Watcher() { + @Override + public void process(WatchedEvent event) { + synchronized (collectionsInProgress) { + collectionsInProgress.add( + event.getPath()); // Will be something like /collections/collection## + maxCollectionsInProgress.set( + Math.max(maxCollectionsInProgress.get(), collectionsInProgress.size())); + } + latch.countDown(); + try { + latch.await(10000, TimeUnit.MILLISECONDS); + } catch (InterruptedException e) { + } + synchronized (collectionsInProgress) { + collectionsInProgress.remove(event.getPath()); + } + watchesDone.countDown(); + } + }, + true); } - for (int i = 1; i <= numColls; i ++) { + for (int i = 1; i <= numColls; i++) { String shardsPath = "/collections/collection" + i + "/shards"; zkClient.makePath(shardsPath, true); } assertTrue(latch.await(10000, TimeUnit.MILLISECONDS)); - assertEquals("All collections should have been processed in parallel", numColls, maxCollectionsInProgress.get()); - + assertEquals( + "All collections should have been processed in parallel", + numColls, + maxCollectionsInProgress.get()); + // just as sanity check for the test: assertTrue(watchesDone.await(10000, TimeUnit.MILLISECONDS)); synchronized (collectionsInProgress) { @@ -290,50 +308,52 @@ public void process(WatchedEvent event) { @SuppressWarnings({"try"}) public void testWatchChildren() throws Exception { - try (ZkConnection conn = new ZkConnection ()) { + try (ZkConnection conn = new ZkConnection()) { final SolrZkClient zkClient = conn.getClient(); final AtomicInteger cnt = new AtomicInteger(); final CountDownLatch latch = new CountDownLatch(1); zkClient.makePath("/collections", true); - zkClient.getChildren("/collections", new Watcher() { - - @Override - public void process(WatchedEvent event) { - cnt.incrementAndGet(); - // remake watch - try { - zkClient.getChildren("/collections", this, true); - latch.countDown(); - } catch (KeeperException | InterruptedException e) { - throw new RuntimeException(e); - } - } - }, true); + zkClient.getChildren( + "/collections", + new Watcher() { + + @Override + public void process(WatchedEvent event) { + cnt.incrementAndGet(); + // remake watch + try { + zkClient.getChildren("/collections", this, true); + latch.countDown(); + } catch (KeeperException | InterruptedException e) { + throw new RuntimeException(e); + } + } + }, + true); zkClient.makePath("/collections/collection99/shards", true); - latch.await(); //wait until watch has been re-created + latch.await(); // wait until watch has been re-created zkClient.makePath("collections/collection99/config=collection1", true); zkClient.makePath("collections/collection99/config=collection3", true); - + zkClient.makePath("/collections/collection97/shards", true); - + // pause for the watches to fire Thread.sleep(700); - + if (cnt.intValue() < 2) { Thread.sleep(4000); // wait a bit more } - + if (cnt.intValue() < 2) { Thread.sleep(4000); // wait a bit more } - - assertEquals(2, cnt.intValue()); + assertEquals(2, cnt.intValue()); } } @@ -345,73 +365,99 @@ public void testSkipPathPartsOnMakePath() throws Exception { zkClient.makePath("/test", true); // should work - zkClient.makePath("/test/path/here", (byte[]) null, CreateMode.PERSISTENT, (Watcher) null, true, true, 1); + zkClient.makePath( + "/test/path/here", (byte[]) null, CreateMode.PERSISTENT, (Watcher) null, true, true, 1); zkClient.clean("/"); // should not work - KeeperException e =expectThrows(KeeperException.NoNodeException.class, - "We should not be able to create this path", - () -> zkClient.makePath("/test/path/here", (byte[]) null, CreateMode.PERSISTENT, (Watcher) null, true, true, 1)); + KeeperException e = + expectThrows( + KeeperException.NoNodeException.class, + "We should not be able to create this path", + () -> + zkClient.makePath( + "/test/path/here", + (byte[]) null, + CreateMode.PERSISTENT, + (Watcher) null, + true, + true, + 1)); zkClient.clean("/"); ZkCmdExecutor zkCmdExecutor = new ZkCmdExecutor(30000); - expectThrows(KeeperException.NoNodeException.class, + expectThrows( + KeeperException.NoNodeException.class, "We should not be able to create this path", - () -> zkCmdExecutor.ensureExists("/collection/collection/leader", (byte[]) null, CreateMode.PERSISTENT, zkClient, 2)); + () -> + zkCmdExecutor.ensureExists( + "/collection/collection/leader", + (byte[]) null, + CreateMode.PERSISTENT, + zkClient, + 2)); zkClient.makePath("/collection", true); - expectThrows(KeeperException.NoNodeException.class, + expectThrows( + KeeperException.NoNodeException.class, "We should not be able to create this path", - () -> zkCmdExecutor.ensureExists("/collections/collection/leader", (byte[]) null, CreateMode.PERSISTENT, zkClient, 2)); + () -> + zkCmdExecutor.ensureExists( + "/collections/collection/leader", + (byte[]) null, + CreateMode.PERSISTENT, + zkClient, + 2)); zkClient.makePath("/collection/collection", true); - + byte[] bytes = new byte[10]; - zkCmdExecutor.ensureExists("/collection/collection", bytes, CreateMode.PERSISTENT, zkClient, 2); - + zkCmdExecutor.ensureExists( + "/collection/collection", bytes, CreateMode.PERSISTENT, zkClient, 2); + byte[] returnedBytes = zkClient.getData("/collection/collection", null, null, true); - + assertNull("We skipped 2 path parts, so data won't be written", returnedBytes); zkClient.makePath("/collection/collection/leader", true); - zkCmdExecutor.ensureExists("/collection/collection/leader", (byte[]) null, CreateMode.PERSISTENT, zkClient, 2); - + zkCmdExecutor.ensureExists( + "/collection/collection/leader", (byte[]) null, CreateMode.PERSISTENT, zkClient, 2); } } public void testZkBehavior() throws Exception { MiniSolrCloudCluster cluster = - configureCluster(4) - .withJettyConfig(jetty -> jetty.enableV2(true)) - .configure(); + configureCluster(4).withJettyConfig(jetty -> jetty.enableV2(true)).configure(); try { SolrZkClient zkClient = cluster.getZkClient(); zkClient.create("/test-node", null, CreateMode.PERSISTENT, true); Stat stat = zkClient.exists("/test-node", null, true); int cversion = stat.getCversion(); - List ops = Arrays.asList( - Op.create("/test-node/abc", null, zkClient.getZkACLProvider().getACLsToAdd("/test-node/abc"), CreateMode.PERSISTENT), - Op.delete("/test-node/abc", -1)); + List ops = + Arrays.asList( + Op.create( + "/test-node/abc", + null, + zkClient.getZkACLProvider().getACLsToAdd("/test-node/abc"), + CreateMode.PERSISTENT), + Op.delete("/test-node/abc", -1)); zkClient.multi(ops, true); stat = zkClient.exists("/test-node", null, true); assertTrue(stat.getCversion() >= cversion + 2); } finally { cluster.shutdown(); } - } - - @Override public void tearDown() throws Exception { super.tearDown(); } - + @AfterClass public static void afterClass() throws InterruptedException { // wait just a bit for any zk client threads to outlast timeout diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/AsyncCallRequestStatusResponseTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/AsyncCallRequestStatusResponseTest.java index 9c4bd3b3b37..a2ddd4756b7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/AsyncCallRequestStatusResponseTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/AsyncCallRequestStatusResponseTest.java @@ -20,9 +20,9 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest.Create; import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.client.solrj.response.RequestStatusState; -import org.apache.solr.cloud.api.collections.CollectionHandlingUtils.ShardRequestTracker; import org.apache.solr.cloud.AbstractFullDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; +import org.apache.solr.cloud.api.collections.CollectionHandlingUtils.ShardRequestTracker; import org.apache.solr.common.util.NamedList; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -37,11 +37,9 @@ public class AsyncCallRequestStatusResponseTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { oldResponseEntries = ShardRequestTracker.INCLUDE_TOP_LEVEL_RESPONSE; ShardRequestTracker.INCLUDE_TOP_LEVEL_RESPONSE = random().nextBoolean(); - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); } - + @SuppressWarnings("deprecation") @AfterClass public static void restoreFlag() throws Exception { @@ -53,24 +51,35 @@ public static void restoreFlag() throws Exception { public void testAsyncCallStatusResponse() throws Exception { int numShards = 4; int numReplicas = 1; - Create createCollection = CollectionAdminRequest.createCollection("asynccall", "conf", numShards, numReplicas); - String asyncId = - createCollection.processAsync(cluster.getSolrClient()); + Create createCollection = + CollectionAdminRequest.createCollection("asynccall", "conf", numShards, numReplicas); + String asyncId = createCollection.processAsync(cluster.getSolrClient()); - waitForState("Expected collection 'asynccall' to have "+numShards+" shards and "+ - numShards*numReplicas+" replica", "asynccall", clusterShape(numShards, numShards*numReplicas)); + waitForState( + "Expected collection 'asynccall' to have " + + numShards + + " shards and " + + numShards * numReplicas + + " replica", + "asynccall", + clusterShape(numShards, numShards * numReplicas)); - RequestStatusState state = AbstractFullDistribZkTestBase.getRequestStateAfterCompletion(asyncId, 30, cluster.getSolrClient()); + RequestStatusState state = + AbstractFullDistribZkTestBase.getRequestStateAfterCompletion( + asyncId, 30, cluster.getSolrClient()); assertEquals("Unexpected request status: " + state, "completed", state.getKey()); - CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus(asyncId); + CollectionAdminRequest.RequestStatus requestStatus = + CollectionAdminRequest.requestStatus(asyncId); CollectionAdminResponse rsp = requestStatus.process(cluster.getSolrClient()); NamedList r = rsp.getResponse(); if (ShardRequestTracker.INCLUDE_TOP_LEVEL_RESPONSE) { - final int actualNumOfElems = 3+(numShards*numReplicas); - // responseHeader, success, status, + old responses per every replica - assertEquals("Expected "+actualNumOfElems+" elements in the response" + r.jsonStr(), - actualNumOfElems, r.size()); + final int actualNumOfElems = 3 + (numShards * numReplicas); + // responseHeader, success, status, + old responses per every replica + assertEquals( + "Expected " + actualNumOfElems + " elements in the response" + r.jsonStr(), + actualNumOfElems, + r.size()); } else { // responseHeader, success, status assertEquals("Expected 3 elements in the response" + r.jsonStr(), 3, r.size()); @@ -78,14 +87,15 @@ public void testAsyncCallStatusResponse() throws Exception { assertNotNull("Expected 'responseHeader' response" + r, r.get("responseHeader")); assertNotNull("Expected 'status' response" + r, r.get("status")); { - final NamedList success = (NamedList)r.get("success"); + final NamedList success = (NamedList) r.get("success"); assertNotNull("Expected 'success' response" + r, success); - - final int actualSuccessElems = 2*(numShards*numReplicas); + + final int actualSuccessElems = 2 * (numShards * numReplicas); // every replica responds once on submit and once on complete - assertEquals("Expected "+actualSuccessElems+ - " elements in the success element" + success.jsonStr(), - actualSuccessElems, success.size()); + assertEquals( + "Expected " + actualSuccessElems + " elements in the success element" + success.jsonStr(), + actualSuccessElems, + success.size()); } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/BackupRestoreApiErrorConditionsTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/BackupRestoreApiErrorConditionsTest.java index fc6ff06e0cc..6ae68141d7e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/BackupRestoreApiErrorConditionsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/BackupRestoreApiErrorConditionsTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud.api.collections; +import java.io.File; +import java.lang.invoke.MethodHandles; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.RequestStatusState; import org.apache.solr.cloud.MiniSolrCloudCluster; @@ -26,12 +28,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.lang.invoke.MethodHandles; - - /** - * Integration test verifying particular errors are reported correctly by the Collection-level backup/restore APIs + * Integration test verifying particular errors are reported correctly by the Collection-level + * backup/restore APIs */ public class BackupRestoreApiErrorConditionsTest extends SolrCloudTestCase { // TODO could these be unit tests somehow and still validate the response users see with certainty @@ -58,22 +57,25 @@ public static void setUpClass() throws Exception { String solrXml = MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML; String local = - "" + - "" + - "" + - ""; + "" + + "" + + "" + + ""; solrXml = solrXml.replace("", local + ""); - configureCluster(NUM_SHARDS)// nodes - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .withSolrXml(solrXml) - .configure(); + configureCluster(NUM_SHARDS) // nodes + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .withSolrXml(solrXml) + .configure(); - final RequestStatusState createState = CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf1", NUM_SHARDS, NUM_REPLICAS) + final RequestStatusState createState = + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf1", NUM_SHARDS, NUM_REPLICAS) .processAndWait(cluster.getSolrClient(), ASYNC_COMMAND_WAIT_PERIOD_MILLIS); assertEquals(RequestStatusState.COMPLETED, createState); - final RequestStatusState backupState = CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) + final RequestStatusState backupState = + CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) .setRepositoryName(VALID_REPOSITORY_NAME) .setLocation(validBackupLocation) .processAndWait(cluster.getSolrClient(), ASYNC_COMMAND_WAIT_PERIOD_MILLIS); @@ -86,83 +88,117 @@ public static void tearDownClass() throws Exception { } @Test - public void testBackupOperationsReportErrorWhenUnknownBackupRepositoryRequested() throws Exception { + public void testBackupOperationsReportErrorWhenUnknownBackupRepositoryRequested() + throws Exception { // Check message for create-backup - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) - .setRepositoryName("some-nonexistent-repo-name") - .setLocation(validBackupLocation) - .process(cluster.getSolrClient()); - }); - assertTrue(e.getMessage().contains("Could not find a backup repository with name some-nonexistent-repo-name")); + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) + .setRepositoryName("some-nonexistent-repo-name") + .setLocation(validBackupLocation) + .process(cluster.getSolrClient()); + }); + assertTrue( + e.getMessage() + .contains("Could not find a backup repository with name some-nonexistent-repo-name")); // Check message for list-backup - e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.listBackup(BACKUP_NAME) - .setBackupLocation(validBackupLocation) - .setBackupRepository("some-nonexistent-repo-name") - .process(cluster.getSolrClient()); - }); - assertTrue(e.getMessage().contains("Could not find a backup repository with name some-nonexistent-repo-name")); + e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.listBackup(BACKUP_NAME) + .setBackupLocation(validBackupLocation) + .setBackupRepository("some-nonexistent-repo-name") + .process(cluster.getSolrClient()); + }); + assertTrue( + e.getMessage() + .contains("Could not find a backup repository with name some-nonexistent-repo-name")); // Check message for delete-backup - e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 1) - .setLocation(validBackupLocation) - .setRepositoryName("some-nonexistent-repo-name") - .process(cluster.getSolrClient()); - }); - assertTrue(e.getMessage().contains("Could not find a backup repository with name some-nonexistent-repo-name")); + e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 1) + .setLocation(validBackupLocation) + .setRepositoryName("some-nonexistent-repo-name") + .process(cluster.getSolrClient()); + }); + assertTrue( + e.getMessage() + .contains("Could not find a backup repository with name some-nonexistent-repo-name")); // Check message for restore-backup - e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.restoreCollection(COLLECTION_NAME + "_restored", BACKUP_NAME) - .setLocation(validBackupLocation) - .setRepositoryName("some-nonexistent-repo-name") - .process(cluster.getSolrClient()); - }); - assertTrue(e.getMessage().contains("Could not find a backup repository with name some-nonexistent-repo-name")); + e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.restoreCollection(COLLECTION_NAME + "_restored", BACKUP_NAME) + .setLocation(validBackupLocation) + .setRepositoryName("some-nonexistent-repo-name") + .process(cluster.getSolrClient()); + }); + assertTrue( + e.getMessage() + .contains("Could not find a backup repository with name some-nonexistent-repo-name")); } @Test public void testBackupOperationsReportErrorWhenNonexistentLocationProvided() { // Check message for create-backup - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) - .setRepositoryName(VALID_REPOSITORY_NAME) - .setLocation(validBackupLocation + File.pathSeparator + "someNonexistentLocation") - .process(cluster.getSolrClient()); - }); + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) + .setRepositoryName(VALID_REPOSITORY_NAME) + .setLocation(validBackupLocation + File.pathSeparator + "someNonexistentLocation") + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage().contains("specified location")); assertTrue(e.getMessage().contains("does not exist")); // Check message for list-backup - e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.listBackup(BACKUP_NAME) - .setBackupLocation(validBackupLocation + File.pathSeparator + "someNonexistentLocation") - .setBackupRepository(VALID_REPOSITORY_NAME) - .process(cluster.getSolrClient()); - }); + e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.listBackup(BACKUP_NAME) + .setBackupLocation( + validBackupLocation + File.pathSeparator + "someNonexistentLocation") + .setBackupRepository(VALID_REPOSITORY_NAME) + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage().contains("specified location")); assertTrue(e.getMessage().contains("does not exist")); // Check message for delete-backup - e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 1) - .setLocation(validBackupLocation + File.pathSeparator + "someNonexistentLocation") - .setRepositoryName(VALID_REPOSITORY_NAME) - .process(cluster.getSolrClient()); - }); + e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 1) + .setLocation(validBackupLocation + File.pathSeparator + "someNonexistentLocation") + .setRepositoryName(VALID_REPOSITORY_NAME) + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage().contains("specified location")); assertTrue(e.getMessage().contains("does not exist")); // Check message for restore-backup - e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.restoreCollection(COLLECTION_NAME + "_restored", BACKUP_NAME) - .setLocation(validBackupLocation + File.pathSeparator + "someNonexistentLocation") - .setRepositoryName(VALID_REPOSITORY_NAME) - .process(cluster.getSolrClient()); - }); + e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.restoreCollection(COLLECTION_NAME + "_restored", BACKUP_NAME) + .setLocation(validBackupLocation + File.pathSeparator + "someNonexistentLocation") + .setRepositoryName(VALID_REPOSITORY_NAME) + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage().contains("specified location")); assertTrue(e.getMessage().contains("does not exist")); } @@ -170,7 +206,8 @@ public void testBackupOperationsReportErrorWhenNonexistentLocationProvided() { @Test public void testListAndDeleteFailOnOldBackupLocations() throws Exception { final String nonIncrementalBackupLocation = createTempDir().toAbsolutePath().toString(); - final RequestStatusState backupState = CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) + final RequestStatusState backupState = + CollectionAdminRequest.backupCollection(COLLECTION_NAME, BACKUP_NAME) .setRepositoryName(VALID_REPOSITORY_NAME) .setLocation(nonIncrementalBackupLocation) .setIncremental(false) @@ -178,34 +215,49 @@ public void testListAndDeleteFailOnOldBackupLocations() throws Exception { assertEquals(RequestStatusState.COMPLETED, backupState); // Check message for list-backup - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.listBackup(BACKUP_NAME) - .setBackupLocation(nonIncrementalBackupLocation) - .setBackupRepository(VALID_REPOSITORY_NAME) - .process(cluster.getSolrClient()); - }); + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.listBackup(BACKUP_NAME) + .setBackupLocation(nonIncrementalBackupLocation) + .setBackupRepository(VALID_REPOSITORY_NAME) + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage().contains("The backup name [backup_name] at location")); - assertTrue(e.getMessage().contains("holds a non-incremental (legacy) backup, but backup-listing is only supported on incremental backups")); + assertTrue( + e.getMessage() + .contains( + "holds a non-incremental (legacy) backup, but backup-listing is only supported on incremental backups")); // Check message for delete-backup - e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 1) - .setLocation(nonIncrementalBackupLocation) - .setRepositoryName(VALID_REPOSITORY_NAME) - .process(cluster.getSolrClient()); - }); + e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 1) + .setLocation(nonIncrementalBackupLocation) + .setRepositoryName(VALID_REPOSITORY_NAME) + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage().contains("The backup name [backup_name] at location")); - assertTrue(e.getMessage().contains("holds a non-incremental (legacy) backup, but backup-deletion is only supported on incremental backups")); + assertTrue( + e.getMessage() + .contains( + "holds a non-incremental (legacy) backup, but backup-deletion is only supported on incremental backups")); } @Test public void testDeleteFailsOnNonexistentBackupId() { - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 123) - .setLocation(validBackupLocation) - .setRepositoryName(VALID_REPOSITORY_NAME) - .process(cluster.getSolrClient()); - }); + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.deleteBackupById(BACKUP_NAME, 123) + .setLocation(validBackupLocation) + .setRepositoryName(VALID_REPOSITORY_NAME) + .process(cluster.getSolrClient()); + }); assertTrue(e.getMessage().contains("Backup ID [123] not found; cannot be deleted")); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionApiLockingTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionApiLockingTest.java index e59ec4d0105..9ab75b979c8 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionApiLockingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionApiLockingTest.java @@ -18,7 +18,6 @@ package org.apache.solr.cloud.api.collections; import java.util.concurrent.CountDownLatch; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.DistributedMultiLock; import org.apache.solr.cloud.ZkDistributedCollectionLockFactory; @@ -44,7 +43,9 @@ public void monothreadedApiLockTests() throws Exception { try { server.run(); try (SolrZkClient zkClient = new SolrZkClient(server.getZkAddress(), TIMEOUT)) { - CollectionApiLockFactory apiLockFactory = new CollectionApiLockFactory(new ZkDistributedCollectionLockFactory(zkClient, "/apiLockTestRoot")); + CollectionApiLockFactory apiLockFactory = + new CollectionApiLockFactory( + new ZkDistributedCollectionLockFactory(zkClient, "/apiLockTestRoot")); monothreadedTests(apiLockFactory); multithreadedTests(apiLockFactory); @@ -55,53 +56,94 @@ public void monothreadedApiLockTests() throws Exception { } private void monothreadedTests(CollectionApiLockFactory apiLockingHelper) throws Exception { - // Lock at collection level (which prevents locking + acquiring on any other level of the hierarchy) - DistributedMultiLock collLock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + // Lock at collection level (which prevents locking + acquiring on any other level of the + // hierarchy) + DistributedMultiLock collLock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); assertTrue("Collection should have been acquired", collLock.isAcquired()); - assertEquals("Lock at collection level expected to need one distributed lock", 1, collLock.getCountInternalLocks()); - - // Request a shard lock. Will not be acquired as long as we don't release the collection lock above - DistributedMultiLock shard1Lock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD1_NAME, null); + assertEquals( + "Lock at collection level expected to need one distributed lock", + 1, + collLock.getCountInternalLocks()); + + // Request a shard lock. Will not be acquired as long as we don't release the collection lock + // above + DistributedMultiLock shard1Lock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD1_NAME, null); assertFalse("Shard1 should not have been acquired", shard1Lock.isAcquired()); - assertEquals("Lock at shard level expected to need two distributed locks", 2, shard1Lock.getCountInternalLocks()); - - // Request a lock on another shard. Will not be acquired as long as we don't release the collection lock above - DistributedMultiLock shard2Lock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD2_NAME, null); + assertEquals( + "Lock at shard level expected to need two distributed locks", + 2, + shard1Lock.getCountInternalLocks()); + + // Request a lock on another shard. Will not be acquired as long as we don't release the + // collection lock above + DistributedMultiLock shard2Lock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD2_NAME, null); assertFalse("Shard2 should not have been acquired", shard2Lock.isAcquired()); assertTrue("Collection should still be acquired", collLock.isAcquired()); collLock.release(); - assertTrue("Shard1 should have been acquired now that collection lock released", shard1Lock.isAcquired()); - assertTrue("Shard2 should have been acquired now that collection lock released", shard2Lock.isAcquired()); + assertTrue( + "Shard1 should have been acquired now that collection lock released", + shard1Lock.isAcquired()); + assertTrue( + "Shard2 should have been acquired now that collection lock released", + shard2Lock.isAcquired()); // Request a lock on replica of shard1 - DistributedMultiLock replicaShard1Lock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD1_NAME, REPLICA_NAME); - assertFalse("replicaShard1Lock should not have been acquired, shard1 is locked", replicaShard1Lock.isAcquired()); + DistributedMultiLock replicaShard1Lock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD1_NAME, REPLICA_NAME); + assertFalse( + "replicaShard1Lock should not have been acquired, shard1 is locked", + replicaShard1Lock.isAcquired()); // Now ask for a new lock on the collection - collLock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + collLock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); - assertFalse("Collection should not have been acquired, shard1 and shard2 locks preventing it", collLock.isAcquired()); + assertFalse( + "Collection should not have been acquired, shard1 and shard2 locks preventing it", + collLock.isAcquired()); shard1Lock.release(); - assertTrue("replicaShard1Lock should have been acquired, as shard1 got released", replicaShard1Lock.isAcquired()); - assertFalse("Collection should not have been acquired, shard2 lock is preventing it", collLock.isAcquired()); + assertTrue( + "replicaShard1Lock should have been acquired, as shard1 got released", + replicaShard1Lock.isAcquired()); + assertFalse( + "Collection should not have been acquired, shard2 lock is preventing it", + collLock.isAcquired()); replicaShard1Lock.release(); // Request a lock on replica of shard2 - DistributedMultiLock replicaShard2Lock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD2_NAME, REPLICA_NAME); - assertFalse("replicaShard2Lock should not have been acquired, shard2 is locked", replicaShard2Lock.isAcquired()); + DistributedMultiLock replicaShard2Lock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD2_NAME, REPLICA_NAME); + assertFalse( + "replicaShard2Lock should not have been acquired, shard2 is locked", + replicaShard2Lock.isAcquired()); shard2Lock.release(); - assertTrue("Collection should have been acquired as shard2 got released and replicaShard2Locks was requested after the collection lock", collLock.isAcquired()); - assertFalse("replicaShard2Lock should not have been acquired, collLock is locked", replicaShard2Lock.isAcquired()); + assertTrue( + "Collection should have been acquired as shard2 got released and replicaShard2Locks was requested after the collection lock", + collLock.isAcquired()); + assertFalse( + "replicaShard2Lock should not have been acquired, collLock is locked", + replicaShard2Lock.isAcquired()); collLock.release(); - assertTrue("replicaShard2Lock should have been acquired, the collection lock got released", replicaShard2Lock.isAcquired()); + assertTrue( + "replicaShard2Lock should have been acquired, the collection lock got released", + replicaShard2Lock.isAcquired()); // Release remaining lock to allow the multithreaded locking to succeed replicaShard2Lock.release(); @@ -109,41 +151,53 @@ private void monothreadedTests(CollectionApiLockFactory apiLockingHelper) throws private void multithreadedTests(CollectionApiLockFactory apiLockingHelper) throws Exception { // Lock on collection... - DistributedMultiLock collLock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); + DistributedMultiLock collLock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.COLLECTION, COLLECTION_NAME, null, null); assertTrue("Collection should have been acquired", collLock.isAcquired()); // ...blocks a lock on replica from being acquired - final DistributedMultiLock replicaShard1Lock = apiLockingHelper.createCollectionApiLock(CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD1_NAME, REPLICA_NAME); - assertFalse("replicaShard1Lock should not have been acquired, because collection is locked", replicaShard1Lock.isAcquired()); + final DistributedMultiLock replicaShard1Lock = + apiLockingHelper.createCollectionApiLock( + CollectionParams.LockLevel.SHARD, COLLECTION_NAME, SHARD1_NAME, REPLICA_NAME); + assertFalse( + "replicaShard1Lock should not have been acquired, because collection is locked", + replicaShard1Lock.isAcquired()); // Wait for acquisition of the replica lock on another thread (and be notified via a latch) final CountDownLatch latch = new CountDownLatch(1); - new Thread(() -> { - replicaShard1Lock.waitUntilAcquired(); - // countDown() will not be called if waitUntilAcquired() threw exception of any kind - latch.countDown(); - }).start(); - - // Wait for the thread to start and to get blocked in waitUntilAcquired() - // (thread start could have been checked more reliably using another latch, and verifying the thread is in waitUntilAcquired - // done through that thread stacktrace, but that would be overkill compared to the very slight race condition of waiting 30ms, - // but a race that would not cause the test to fail since we're testing... that nothing happened yet). + new Thread( + () -> { + replicaShard1Lock.waitUntilAcquired(); + // countDown() will not be called if waitUntilAcquired() threw exception of any kind + latch.countDown(); + }) + .start(); + + // Wait for the thread to start and to get blocked in waitUntilAcquired() (thread start could + // have been checked more reliably using another latch, and verifying the thread is in + // waitUntilAcquired done through that thread stacktrace, but that would be overkill compared to + // the very slight race condition of waiting 30ms, but a race that would not cause the test to + // fail since we're testing... that nothing happened yet). Thread.sleep(30); assertEquals("we should not have been notified that replica was acquired", 1, latch.getCount()); assertFalse("replica lock should not have been acquired", replicaShard1Lock.isAcquired()); collLock.release(); - assertTrue("replica lock should have been acquired now that collection lock was released", replicaShard1Lock.isAcquired()); + assertTrue( + "replica lock should have been acquired now that collection lock was released", + replicaShard1Lock.isAcquired()); // Wait for the Zookeeper watch to fire + the thread to be unblocked and countdown the latch - // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a pause + // We'll wait up to 10 seconds here, so should be safe even if GC is extraordinarily high with a + // pause int i = 0; while (i < 1000 && latch.getCount() != 0) { Thread.sleep(10); i++; } - assertEquals("we should have been notified that replica lock was acquired", 0, latch.getCount()); + assertEquals( + "we should have been notified that replica lock was acquired", 0, latch.getCount()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionReloadTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionReloadTest.java index 899efb3dd72..6f70e730be4 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionReloadTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionReloadTest.java @@ -18,7 +18,6 @@ import java.lang.invoke.MethodHandles; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; @@ -29,9 +28,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Verifies cluster state remains consistent after collection reload. - */ +/** Verifies cluster state remains consistent after collection reload. */ @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class CollectionReloadTest extends SolrCloudTestCase { @@ -39,11 +36,9 @@ public class CollectionReloadTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } - + @Test public void testReloadedLeaderStateAfterZkSessionLoss() throws Exception { @@ -53,32 +48,44 @@ public void testReloadedLeaderStateAfterZkSessionLoss() throws Exception { CollectionAdminRequest.createCollection(testCollectionName, "conf", 1, 1) .process(cluster.getSolrClient()); - Replica leader - = cluster.getSolrClient().getZkStateReader().getLeaderRetry(testCollectionName, "shard1", DEFAULT_TIMEOUT); + Replica leader = + cluster + .getSolrClient() + .getZkStateReader() + .getLeaderRetry(testCollectionName, "shard1", DEFAULT_TIMEOUT); long coreStartTime = getCoreStatus(leader).getCoreStartTime().getTime(); CollectionAdminRequest.reloadCollection(testCollectionName).process(cluster.getSolrClient()); - RetryUtil.retryUntil("Timed out waiting for core to reload", 30, 1000, TimeUnit.MILLISECONDS, () -> { - long restartTime = 0; - try { - restartTime = getCoreStatus(leader).getCoreStartTime().getTime(); - } catch (Exception e) { - log.warn("Exception getting core start time: ", e); - return false; - } - return restartTime > coreStartTime; - }); + RetryUtil.retryUntil( + "Timed out waiting for core to reload", + 30, + 1000, + TimeUnit.MILLISECONDS, + () -> { + long restartTime = 0; + try { + restartTime = getCoreStatus(leader).getCoreStartTime().getTime(); + } catch (Exception e) { + log.warn("Exception getting core start time: ", e); + return false; + } + return restartTime > coreStartTime; + }); final int initialStateVersion = getCollectionState(testCollectionName).getZNodeVersion(); cluster.expireZkSession(cluster.getReplicaJetty(leader)); - waitForState("Timed out waiting for core to re-register as ACTIVE after session expiry", testCollectionName, (n, c) -> { - log.info("Collection state: {}", c); - Replica expiredReplica = c.getReplica(leader.getName()); - return expiredReplica.getState() == Replica.State.ACTIVE && c.getZNodeVersion() > initialStateVersion; - }); + waitForState( + "Timed out waiting for core to re-register as ACTIVE after session expiry", + testCollectionName, + (n, c) -> { + log.info("Collection state: {}", c); + Replica expiredReplica = c.getReplica(leader.getName()); + return expiredReplica.getState() == Replica.State.ACTIVE + && c.getZNodeVersion() > initialStateVersion; + }); log.info("testReloadedLeaderStateAfterZkSessionLoss succeeded ... shutting down now!"); } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionTooManyReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionTooManyReplicasTest.java index 46a36554d73..134834d8e76 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionTooManyReplicasTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionTooManyReplicasTest.java @@ -19,7 +19,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -38,9 +37,7 @@ public class CollectionTooManyReplicasTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(3) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(3).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -49,7 +46,9 @@ public void deleteCollections() throws Exception { } @Test - @Ignore // Since maxShardsPerNode was removed in SOLR-12847 and autoscaling framework was removed in SOLR-14656, this test is broken + @Ignore + // Since maxShardsPerNode was removed in SOLR-12847 and autoscaling framework was removed in + // SOLR-14656, this test is broken public void testAddTooManyReplicas() throws Exception { final String collectionName = "TooManyReplicasInSeveralFlavors"; @@ -70,22 +69,25 @@ public void testAddTooManyReplicas() throws Exception { .process(cluster.getSolrClient()); // equivalent to maxShardsPerNode=1 - // String commands = "{ set-cluster-policy: [ {replica: '<2', shard: '#ANY', node: '#ANY', strict: true} ] }"; + // String commands = "{ set-cluster-policy: [ {replica: '<2', shard: '#ANY', node: '#ANY', + // strict: true} ] }"; // cluster.getSolrClient().request(CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.POST, commands)); // this should fail because the policy prevents it - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") - .setNode(nodeName) - .process(cluster.getSolrClient()); - }); + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") + .setNode(nodeName) + .process(cluster.getSolrClient()); + }); assertTrue(e.toString(), e.toString().contains("No node can satisfy")); // this should succeed because it places the replica on a different node CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") .process(cluster.getSolrClient()); - DocCollection collectionState = getCollectionState(collectionName); Slice slice = collectionState.getSlice("shard1"); Replica replica = getRandomReplica(slice, r -> r.getCoreName().equals("bogus2")); @@ -95,93 +97,121 @@ public void testAddTooManyReplicas() throws Exception { // Shard1 should have 2 replicas assertEquals("There should be 3 replicas for shard 1", 3, slice.getReplicas().size()); - // And let's fail one more time because to ensure that the math doesn't do weird stuff it we have more replicas - // than simple calcs would indicate. - Exception e2 = expectThrows(Exception.class, () -> { - CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") - .process(cluster.getSolrClient()); - }); - - assertTrue("Should have gotten the right error message back", + // And let's fail one more time because to ensure that the math doesn't do weird stuff it we + // have more replicas than simple calcs would indicate. + Exception e2 = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") + .process(cluster.getSolrClient()); + }); + + assertTrue( + "Should have gotten the right error message back", e2.getMessage().contains("No node can satisfy")); // wait for recoveries to finish, for a clean shutdown - see SOLR-9645 - waitForState("Expected to see all replicas active", collectionName, (n, c) -> { - for (Replica r : c.getReplicas()) { - if (r.getState() != Replica.State.ACTIVE) - return false; - } - return true; - }); + waitForState( + "Expected to see all replicas active", + collectionName, + (n, c) -> { + for (Replica r : c.getReplicas()) { + if (r.getState() != Replica.State.ACTIVE) return false; + } + return true; + }); } @Test - @Ignore // Since maxShardsPerNode was removed in SOLR-12847 and autoscaling framework was removed in SOLR-14656, this test is broken + @Ignore + // Since maxShardsPerNode was removed in SOLR-12847 and autoscaling framework was removed in + // SOLR-14656, this test is broken public void testAddShard() throws Exception { // equivalent to maxShardsPerNode=2 - // String commands = "{ set-cluster-policy: [ {replica: '<3', shard: '#ANY', node: '#ANY', strict: true} ] }"; + // String commands = "{ set-cluster-policy: [ {replica: '<3', shard: '#ANY', node: '#ANY', + // strict: true} ] }"; // cluster.getSolrClient().request(CloudTestUtils.AutoScalingRequest.create(SolrRequest.METHOD.POST, commands)); String collectionName = "TooManyReplicasWhenAddingShards"; - CollectionAdminRequest.createCollectionWithImplicitRouter(collectionName, "conf", "shardstart", 2) + CollectionAdminRequest.createCollectionWithImplicitRouter( + collectionName, "conf", "shardstart", 2) .process(cluster.getSolrClient()); - // We have two nodes, maxShardsPerNode is set to 2. Therefore, we should be able to add 2 shards each with - // two replicas, but fail on the third. - CollectionAdminRequest.createShard(collectionName, "shard1") - .process(cluster.getSolrClient()); + // We have two nodes, maxShardsPerNode is set to 2. Therefore, we should be able to add 2 shards + // each with two replicas, but fail on the third. + CollectionAdminRequest.createShard(collectionName, "shard1").process(cluster.getSolrClient()); // Now we should have one replica on each Jetty, add another to reach maxShardsPerNode - CollectionAdminRequest.createShard(collectionName, "shard2") - .process(cluster.getSolrClient()); + CollectionAdminRequest.createShard(collectionName, "shard2").process(cluster.getSolrClient()); // Now fail to add the third as it should exceed maxShardsPerNode - Exception e = expectThrows(Exception.class, () -> { - CollectionAdminRequest.createShard(collectionName, "shard3") - .process(cluster.getSolrClient()); - }); - assertTrue("Should have gotten the right error message back", + Exception e = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.createShard(collectionName, "shard3") + .process(cluster.getSolrClient()); + }); + assertTrue( + "Should have gotten the right error message back", e.getMessage().contains("No node can satisfy the rules")); // Hmmm, providing a nodeset also overrides the checks for max replicas, so prove it. List nodes = getAllNodeNames(collectionName); - Exception e2 = expectThrows(Exception.class, () -> { - CollectionAdminRequest.createShard(collectionName, "shard4") - .setNodeSet(String.join(",", nodes)) - .process(cluster.getSolrClient()); - }); - assertTrue("Should have gotten the right error message back", + Exception e2 = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.createShard(collectionName, "shard4") + .setNodeSet(String.join(",", nodes)) + .process(cluster.getSolrClient()); + }); + assertTrue( + "Should have gotten the right error message back", e2.getMessage().contains("No node can satisfy the rules")); -// // And just for yucks, insure we fail the "regular" one again. - Exception e3 = expectThrows(Exception.class, () -> { - CollectionAdminRequest.createShard(collectionName, "shard5") - .process(cluster.getSolrClient()); - }); - assertTrue("Should have gotten the right error message back", + // // And just for yucks, insure we fail the "regular" one again. + Exception e3 = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.createShard(collectionName, "shard5") + .process(cluster.getSolrClient()); + }); + assertTrue( + "Should have gotten the right error message back", e3.getMessage().contains("No node can satisfy the rules")); - // And finally, ensure that there are all the replicas we expect. We should have shards 1, 2 and 4 and each - // should have exactly two replicas - waitForState("Expected shards shardstart, 1, 2, each with two active replicas", collectionName, (n, c) -> { - return DocCollection.isFullyActive(n, c, 3, 2); - }); + // And finally, ensure that there are all the replicas we expect. We should have shards 1, 2 and + // 4 and each should have exactly two replicas + waitForState( + "Expected shards shardstart, 1, 2, each with two active replicas", + collectionName, + (n, c) -> { + return DocCollection.isFullyActive(n, c, 3, 2); + }); Map slices = getCollectionState(collectionName).getSlicesMap(); assertEquals("There should be exaclty three slices", slices.size(), 3); assertNotNull("shardstart should exist", slices.get("shardstart")); assertNotNull("shard1 should exist", slices.get("shard1")); assertNotNull("shard2 should exist", slices.get("shard2")); - assertEquals("Shardstart should have exactly 2 replicas", 2, slices.get("shardstart").getReplicas().size()); - assertEquals("Shard1 should have exactly 2 replicas", 2, slices.get("shard1").getReplicas().size()); - assertEquals("Shard2 should have exactly 2 replicas", 2, slices.get("shard2").getReplicas().size()); - + assertEquals( + "Shardstart should have exactly 2 replicas", + 2, + slices.get("shardstart").getReplicas().size()); + assertEquals( + "Shard1 should have exactly 2 replicas", 2, slices.get("shard1").getReplicas().size()); + assertEquals( + "Shard2 should have exactly 2 replicas", 2, slices.get("shard2").getReplicas().size()); } @Test public void testDownedShards() throws Exception { String collectionName = "TooManyReplicasWhenAddingDownedNode"; - CollectionAdminRequest.createCollectionWithImplicitRouter(collectionName, "conf", "shardstart", 1) + CollectionAdminRequest.createCollectionWithImplicitRouter( + collectionName, "conf", "shardstart", 1) .process(cluster.getSolrClient()); // Shut down a Jetty, I really don't care which @@ -192,32 +222,50 @@ public void testDownedShards() throws Exception { try { // Adding a replica on a dead node should fail - Exception e1 = expectThrows(Exception.class, () -> { - CollectionAdminRequest.addReplicaToShard(collectionName, "shardstart") - .setNode(deadNode) - .process(cluster.getSolrClient()); - }); - assertTrue("Should have gotten a message about shard not currently active: " + e1.toString(), - e1.toString().contains("At least one of the node(s) specified [" + deadNode + "] are not currently active in")); + Exception e1 = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.addReplicaToShard(collectionName, "shardstart") + .setNode(deadNode) + .process(cluster.getSolrClient()); + }); + assertTrue( + "Should have gotten a message about shard not currently active: " + e1.toString(), + e1.toString() + .contains( + "At least one of the node(s) specified [" + + deadNode + + "] are not currently active in")); // Should also die if we just add a shard - Exception e2 = expectThrows(Exception.class, () -> { - CollectionAdminRequest.createShard(collectionName, "shard1") - .setNodeSet(deadNode) - .process(cluster.getSolrClient()); - }); - - assertTrue("Should have gotten a message about shard not currently active: " + e2.toString(), - e2.toString().contains("At least one of the node(s) specified [" + deadNode + "] are not currently active in")); - } - finally { + Exception e2 = + expectThrows( + Exception.class, + () -> { + CollectionAdminRequest.createShard(collectionName, "shard1") + .setNodeSet(deadNode) + .process(cluster.getSolrClient()); + }); + + assertTrue( + "Should have gotten a message about shard not currently active: " + e2.toString(), + e2.toString() + .contains( + "At least one of the node(s) specified [" + + deadNode + + "] are not currently active in")); + } finally { cluster.startJettySolrRunner(jetty); } } - private List getAllNodeNames(String collectionName) throws KeeperException, InterruptedException { + private List getAllNodeNames(String collectionName) + throws KeeperException, InterruptedException { DocCollection state = getCollectionState(collectionName); - return state.getReplicas().stream().map(Replica::getNodeName).distinct().collect(Collectors.toList()); + return state.getReplicas().stream() + .map(Replica::getNodeName) + .distinct() + .collect(Collectors.toList()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java index 21dd9dbea67..79c9d40b195 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIAsyncDistributedZkTest.java @@ -24,7 +24,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; @@ -48,9 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Tests the Cloud Collections API. - */ +/** Tests the Cloud Collections API. */ @Slow public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase { @@ -62,7 +59,8 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase { public void setupCluster() throws Exception { // we recreate per test - they need to be isolated to be solid configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } @@ -77,23 +75,32 @@ public void testSolrJAPICalls() throws Exception { final CloudSolrClient client = cluster.getSolrClient(); - RequestStatusState state = CollectionAdminRequest.createCollection("testasynccollectioncreation","conf1",1,1) - .processAndWait(client, MAX_TIMEOUT_SECONDS); + RequestStatusState state = + CollectionAdminRequest.createCollection("testasynccollectioncreation", "conf1", 1, 1) + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("CreateCollection task did not complete!", RequestStatusState.COMPLETED, state); - state = CollectionAdminRequest.createCollection("testasynccollectioncreation","conf1",1,1) - .processAndWait(client, MAX_TIMEOUT_SECONDS); - assertSame("Recreating a collection with the same should have failed.", RequestStatusState.FAILED, state); + state = + CollectionAdminRequest.createCollection("testasynccollectioncreation", "conf1", 1, 1) + .processAndWait(client, MAX_TIMEOUT_SECONDS); + assertSame( + "Recreating a collection with the same should have failed.", + RequestStatusState.FAILED, + state); - state = CollectionAdminRequest.addReplicaToShard("testasynccollectioncreation", "shard1") - .processAndWait(client, MAX_TIMEOUT_SECONDS); + state = + CollectionAdminRequest.addReplicaToShard("testasynccollectioncreation", "shard1") + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("Add replica did not complete", RequestStatusState.COMPLETED, state); - state = CollectionAdminRequest.splitShard("testasynccollectioncreation") - .setShardName("shard1") - .processAndWait(client, MAX_TIMEOUT_SECONDS * 2); - assertEquals("Shard split did not complete. Last recorded state: " + state, RequestStatusState.COMPLETED, state); - + state = + CollectionAdminRequest.splitShard("testasynccollectioncreation") + .setShardName("shard1") + .processAndWait(client, MAX_TIMEOUT_SECONDS * 2); + assertEquals( + "Shard split did not complete. Last recorded state: " + state, + RequestStatusState.COMPLETED, + state); } @Test @@ -101,19 +108,19 @@ public void testAsyncRequests() throws Exception { final String collection = "testAsyncOperations"; final CloudSolrClient client = cluster.getSolrClient(); - RequestStatusState state = CollectionAdminRequest.createCollection(collection,"conf1",1,1) - .setRouterName("implicit") - .setShards("shard1") - .processAndWait(client, MAX_TIMEOUT_SECONDS); + RequestStatusState state = + CollectionAdminRequest.createCollection(collection, "conf1", 1, 1) + .setRouterName("implicit") + .setShards("shard1") + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("CreateCollection task did not complete!", RequestStatusState.COMPLETED, state); - cluster.waitForActiveCollection(collection, 1, 1); - //Add a few documents to shard1 + // Add a few documents to shard1 int numDocs = TestUtil.nextInt(random(), 10, 100); List docs = new ArrayList<>(numDocs); - for (int i=0; i { - if (c == null) - return false; - Slice slice = c.getSlice("shard1"); - if (slice == null) { - return false; - } + // cloudClient watch might take a couple of seconds to reflect it + client + .getZkStateReader() + .waitForState( + collection, + 20, + TimeUnit.SECONDS, + (n, c) -> { + if (c == null) return false; + Slice slice = c.getSlice("shard1"); + if (slice == null) { + return false; + } - if (slice.getReplicas().size() == 2) { - return true; - } + if (slice.getReplicas().size() == 2) { + return true; + } - return false; - }); + return false; + }); - state = CollectionAdminRequest.createAlias("myalias",collection) - .processAndWait(client, MAX_TIMEOUT_SECONDS); + state = + CollectionAdminRequest.createAlias("myalias", collection) + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("CreateAlias did not complete", RequestStatusState.COMPLETED, state); query = new SolrQuery("*:*"); query.set("shards", "shard1"); assertEquals(numDocs, client.query("myalias", query).getResults().getNumFound()); - state = CollectionAdminRequest.deleteAlias("myalias") - .processAndWait(client, MAX_TIMEOUT_SECONDS); + state = + CollectionAdminRequest.deleteAlias("myalias").processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteAlias did not complete", RequestStatusState.COMPLETED, state); try { client.query("myalias", query); fail("Alias should not exist"); } catch (SolrException e) { - //expected + // expected } - Slice shard1 = client.getZkStateReader().getClusterState().getCollection(collection).getSlice("shard1"); + Slice shard1 = + client.getZkStateReader().getClusterState().getCollection(collection).getSlice("shard1"); Replica replica = shard1.getReplicas().iterator().next(); for (String liveNode : client.getZkStateReader().getClusterState().getLiveNodes()) { if (!replica.getNodeName().equals(liveNode)) { - state = new CollectionAdminRequest.MoveReplica(collection, replica.getName(), liveNode) - .processAndWait(client, MAX_TIMEOUT_SECONDS); + state = + new CollectionAdminRequest.MoveReplica(collection, replica.getName(), liveNode) + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("MoveReplica did not complete", RequestStatusState.COMPLETED, state); break; } } client.getZkStateReader().forceUpdateCollection(collection); - shard1 = client.getZkStateReader().getClusterState().getCollection(collection).getSlice("shard1"); + shard1 = + client.getZkStateReader().getClusterState().getCollection(collection).getSlice("shard1"); String replicaName = shard1.getReplicas().iterator().next().getName(); - state = CollectionAdminRequest.deleteReplica(collection, "shard1", replicaName) - .processAndWait(client, MAX_TIMEOUT_SECONDS); + state = + CollectionAdminRequest.deleteReplica(collection, "shard1", replicaName) + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteReplica did not complete", RequestStatusState.COMPLETED, state); - state = CollectionAdminRequest.deleteCollection(collection) - .processAndWait(client, MAX_TIMEOUT_SECONDS); + state = + CollectionAdminRequest.deleteCollection(collection) + .processAndWait(client, MAX_TIMEOUT_SECONDS); assertSame("DeleteCollection did not complete", RequestStatusState.COMPLETED, state); } @@ -215,13 +238,14 @@ public void testAsyncIdRaceCondition() throws Exception { SolrClient[] clients = new SolrClient[cluster.getJettySolrRunners().size()]; int j = 0; - for (JettySolrRunner r:cluster.getJettySolrRunners()) { + for (JettySolrRunner r : cluster.getJettySolrRunners()) { clients[j++] = new HttpSolrClient.Builder(r.getBaseUrl().toString()).build(); } - RequestStatusState state = CollectionAdminRequest.createCollection("testAsyncIdRaceCondition","conf1",1,1) - .setRouterName("implicit") - .setShards("shard1") - .processAndWait(cluster.getSolrClient(), MAX_TIMEOUT_SECONDS); + RequestStatusState state = + CollectionAdminRequest.createCollection("testAsyncIdRaceCondition", "conf1", 1, 1) + .setRouterName("implicit") + .setShards("shard1") + .processAndWait(cluster.getSolrClient(), MAX_TIMEOUT_SECONDS); assertSame("CreateCollection task did not complete!", RequestStatusState.COMPLETED, state); int numThreads = 10; @@ -229,38 +253,44 @@ public void testAsyncIdRaceCondition() throws Exception { final AtomicInteger numFailure = new AtomicInteger(0); final CountDownLatch latch = new CountDownLatch(numThreads); - ExecutorService es = ExecutorUtil.newMDCAwareFixedThreadPool(numThreads, new SolrNamedThreadFactory("testAsyncIdRaceCondition")); + ExecutorService es = + ExecutorUtil.newMDCAwareFixedThreadPool( + numThreads, new SolrNamedThreadFactory("testAsyncIdRaceCondition")); try { for (int i = 0; i < numThreads; i++) { - es.submit(new Runnable() { - - @Override - public void run() { - CollectionAdminRequest.Reload reloadCollectionRequest = CollectionAdminRequest.reloadCollection("testAsyncIdRaceCondition"); - latch.countDown(); - try { - latch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(); - } - - try { - if (log.isInfoEnabled()) { - log.info("{} - Reloading Collection.", Thread.currentThread().getName()); - } - reloadCollectionRequest.processAsync("repeatedId", clients[random().nextInt(clients.length)]); - numSuccess.incrementAndGet(); - } catch (SolrServerException e) { - if (log.isInfoEnabled()) { - log.info("Exception during collection reloading, we were waiting for one: ", e); + es.submit( + new Runnable() { + + @Override + public void run() { + CollectionAdminRequest.Reload reloadCollectionRequest = + CollectionAdminRequest.reloadCollection("testAsyncIdRaceCondition"); + latch.countDown(); + try { + latch.await(); + } catch (InterruptedException e) { + throw new RuntimeException(); + } + + try { + if (log.isInfoEnabled()) { + log.info("{} - Reloading Collection.", Thread.currentThread().getName()); + } + reloadCollectionRequest.processAsync( + "repeatedId", clients[random().nextInt(clients.length)]); + numSuccess.incrementAndGet(); + } catch (SolrServerException e) { + if (log.isInfoEnabled()) { + log.info("Exception during collection reloading, we were waiting for one: ", e); + } + assertEquals( + "Task with the same requestid already exists. (repeatedId)", e.getMessage()); + numFailure.incrementAndGet(); + } catch (IOException e) { + throw new RuntimeException(); + } } - assertEquals("Task with the same requestid already exists. (repeatedId)", e.getMessage()); - numFailure.incrementAndGet(); - } catch (IOException e) { - throw new RuntimeException(); - } - } - }); + }); } es.shutdown(); assertTrue(es.awaitTermination(10, TimeUnit.SECONDS)); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java index 2df701ed6a5..dbd150d1f6b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CollectionsAPIDistributedZkTest.java @@ -18,9 +18,7 @@ import org.apache.lucene.util.LuceneTestCase.Slow; -/** - * Tests the Cloud Collections API. - */ +/** Tests the Cloud Collections API. */ @Slow public class CollectionsAPIDistributedZkTest extends AbstractCollectionsAPIDistributedZkTestBase { diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java index 9585d598128..fd20606e34c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ConcurrentDeleteAndCreateCollectionTest.java @@ -20,7 +20,6 @@ import java.lang.invoke.MethodHandles; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; - import org.apache.lucene.util.LuceneTestCase.Nightly; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -38,18 +37,18 @@ @Nightly public class ConcurrentDeleteAndCreateCollectionTest extends SolrTestCaseJ4 { - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + private MiniSolrCloudCluster solrCluster; - + @Override @Before public void setUp() throws Exception { super.setUp(); solrCluster = new MiniSolrCloudCluster(1, createTempDir(), buildJettyConfig("/solr")); } - + @Override @After public void tearDown() throws Exception { @@ -59,7 +58,7 @@ public void tearDown() throws Exception { } super.tearDown(); } - + public void testConcurrentCreateAndDeleteDoesNotFail() throws IOException { final AtomicReference failure = new AtomicReference<>(); final int timeToRunSec = 30; @@ -69,16 +68,22 @@ public void testConcurrentCreateAndDeleteDoesNotFail() throws IOException { solrCluster.uploadConfigSet(configset("configset-2"), collectionName); final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString(); final SolrClient solrClient = getHttpSolrClient(baseUrl); - threads[i] = new CreateDeleteSearchCollectionThread("create-delete-search-" + i, collectionName, collectionName, - timeToRunSec, solrClient, failure); + threads[i] = + new CreateDeleteSearchCollectionThread( + "create-delete-search-" + i, + collectionName, + collectionName, + timeToRunSec, + solrClient, + failure); } - + startAll(threads); joinAll(threads); - + assertNull("concurrent create and delete collection failed: " + failure.get(), failure.get()); } - + public void testConcurrentCreateAndDeleteOverTheSameConfig() throws IOException { final String configName = "testconfig"; // upload config once, to be used by all collections @@ -90,8 +95,9 @@ public void testConcurrentCreateAndDeleteOverTheSameConfig() throws IOException for (int i = 0; i < threads.length; i++) { final String collectionName = "collection" + i; final SolrClient solrClient = getHttpSolrClient(baseUrl); - threads[i] = new CreateDeleteCollectionThread("create-delete-" + i, collectionName, configName, - timeToRunSec, solrClient, failure); + threads[i] = + new CreateDeleteCollectionThread( + "create-delete-" + i, collectionName, configName, timeToRunSec, solrClient, failure); } startAll(threads); @@ -110,22 +116,27 @@ private void joinAll(final CreateDeleteCollectionThread[] threads) { } } } - + private void startAll(final Thread[] threads) { for (Thread t : threads) { t.start(); } } - + private static class CreateDeleteCollectionThread extends Thread { protected final String collectionName; protected final String configName; protected final long timeToRunSec; protected final SolrClient solrClient; protected final AtomicReference failure; - - public CreateDeleteCollectionThread(String name, String collectionName, String configName, long timeToRunSec, - SolrClient solrClient, AtomicReference failure) { + + public CreateDeleteCollectionThread( + String name, + String collectionName, + String configName, + long timeToRunSec, + SolrClient solrClient, + AtomicReference failure) { super(name); this.collectionName = collectionName; this.timeToRunSec = timeToRunSec; @@ -133,20 +144,20 @@ public CreateDeleteCollectionThread(String name, String collectionName, String c this.failure = failure; this.configName = configName; } - + @Override public void run() { final TimeOut timeout = new TimeOut(timeToRunSec, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while (! timeout.hasTimedOut() && failure.get() == null) { + while (!timeout.hasTimedOut() && failure.get() == null) { doWork(); } } - + protected void doWork() { createCollection(); deleteCollection(); } - + protected void addFailure(Exception e) { log.error("Add Failure", e); synchronized (failure) { @@ -157,10 +168,11 @@ protected void addFailure(Exception e) { } } } - + private void createCollection() { try { - final CollectionAdminResponse response = CollectionAdminRequest.createCollection(collectionName,configName,1,1) + final CollectionAdminResponse response = + CollectionAdminRequest.createCollection(collectionName, configName, 1, 1) .process(solrClient); if (response.getStatus() != 0) { addFailure(new RuntimeException("failed to create collection " + collectionName)); @@ -168,13 +180,12 @@ private void createCollection() { } catch (Exception e) { addFailure(e); } - } - + private void deleteCollection() { try { - final CollectionAdminRequest.Delete deleteCollectionRequest - = CollectionAdminRequest.deleteCollection(collectionName); + final CollectionAdminRequest.Delete deleteCollectionRequest = + CollectionAdminRequest.deleteCollection(collectionName); final CollectionAdminResponse response = deleteCollectionRequest.process(solrClient); if (response.getStatus() != 0) { addFailure(new RuntimeException("failed to delete collection " + collectionName)); @@ -183,7 +194,7 @@ private void deleteCollection() { addFailure(e); } } - + public void joinAndClose() throws InterruptedException { try { super.join(60000); @@ -192,20 +203,25 @@ public void joinAndClose() throws InterruptedException { } } } - + private static class CreateDeleteSearchCollectionThread extends CreateDeleteCollectionThread { - public CreateDeleteSearchCollectionThread(String name, String collectionName, String configName, long timeToRunSec, - SolrClient solrClient, AtomicReference failure) { + public CreateDeleteSearchCollectionThread( + String name, + String collectionName, + String configName, + long timeToRunSec, + SolrClient solrClient, + AtomicReference failure) { super(name, collectionName, configName, timeToRunSec, solrClient, failure); } - + @Override protected void doWork() { super.doWork(); searchNonExistingCollection(); } - + private void searchNonExistingCollection() { try { solrClient.query(collectionName, new SolrQuery("*")); @@ -215,7 +231,5 @@ private void searchNonExistingCollection() { } } } - } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/CustomCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/CustomCollectionTest.java index 69187a071ac..f7222773644 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/CustomCollectionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/CustomCollectionTest.java @@ -16,8 +16,11 @@ */ package org.apache.solr.cloud.api.collections; -import java.util.Map; +import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER; +import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR; +import static org.apache.solr.common.params.ShardParams._ROUTE_; +import java.util.Map; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -30,22 +33,14 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER; -import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR; -import static org.apache.solr.common.params.ShardParams._ROUTE_; - -/** - * Tests the Custom Sharding API. - */ +/** Tests the Custom Sharding API. */ public class CustomCollectionTest extends SolrCloudTestCase { private static final int NODE_COUNT = 4; @BeforeClass public static void setupCluster() throws Exception { - configureCluster(NODE_COUNT) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); + configureCluster(NODE_COUNT).addConfig("conf", configset("cloud-dynamic")).configure(); } @Before @@ -61,13 +56,15 @@ public void testCustomCollectionsAPI() throws Exception { int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2; int numShards = 3; - CollectionAdminRequest.createCollectionWithImplicitRouter(collection, "conf", "a,b,c", replicationFactor) + CollectionAdminRequest.createCollectionWithImplicitRouter( + collection, "conf", "a,b,c", replicationFactor) .process(cluster.getSolrClient()); DocCollection coll = getCollectionState(collection); assertEquals("implicit", ((Map) coll.get(DOC_ROUTER)).get("name")); assertNotNull(coll.getStr(REPLICATION_FACTOR)); - assertNull("A shard of a Collection configured with implicit router must have null range", + assertNull( + "A shard of a Collection configured with implicit router must have null range", coll.getSlice("a").getRange()); new UpdateRequest() @@ -77,13 +74,29 @@ public void testCustomCollectionsAPI() throws Exception { .withRoute("a") .commit(cluster.getSolrClient(), collection); - assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); - assertEquals(0, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "b")).getResults().getNumFound()); - assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound()); + assertEquals( + 3, + cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); + assertEquals( + 0, + cluster + .getSolrClient() + .query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "b")) + .getResults() + .getNumFound()); + assertEquals( + 3, + cluster + .getSolrClient() + .query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")) + .getResults() + .getNumFound()); cluster.getSolrClient().deleteByQuery(collection, "*:*"); cluster.getSolrClient().commit(collection, true, true); - assertEquals(0, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); + assertEquals( + 0, + cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); new UpdateRequest() .add("id", "9") @@ -92,28 +105,41 @@ public void testCustomCollectionsAPI() throws Exception { .withRoute("c") .commit(cluster.getSolrClient(), collection); - assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); - assertEquals(0, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound()); - assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "c")).getResults().getNumFound()); - - //Testing CREATESHARD - CollectionAdminRequest.createShard(collection, "x") - .process(cluster.getSolrClient()); - waitForState("Expected shard 'x' to be active", collection, (n, c) -> { - if (c.getSlice("x") == null) - return false; - for (Replica r : c.getSlice("x")) { - if (r.getState() != Replica.State.ACTIVE) - return false; - } - return true; - }); - - new UpdateRequest() - .add("id", "66", _ROUTE_, "x") - .commit(cluster.getSolrClient(), collection); + assertEquals( + 3, + cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); + assertEquals( + 0, + cluster + .getSolrClient() + .query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")) + .getResults() + .getNumFound()); + assertEquals( + 3, + cluster + .getSolrClient() + .query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "c")) + .getResults() + .getNumFound()); + + // Testing CREATESHARD + CollectionAdminRequest.createShard(collection, "x").process(cluster.getSolrClient()); + waitForState( + "Expected shard 'x' to be active", + collection, + (n, c) -> { + if (c.getSlice("x") == null) return false; + for (Replica r : c.getSlice("x")) { + if (r.getState() != Replica.State.ACTIVE) return false; + } + return true; + }); + + new UpdateRequest().add("id", "66", _ROUTE_, "x").commit(cluster.getSolrClient(), collection); // TODO - the local state is cached and causes the request to fail with 'unknown shard' - // assertEquals(1, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "x")).getResults().getNumFound()); + // assertEquals(1, cluster.getSolrClient().query(collection, new + // SolrQuery("*:*").setParam(_ROUTE_, "x")).getResults().getNumFound()); } @@ -126,7 +152,8 @@ public void testRouteFieldForImplicitRouter() throws Exception { final String collection = "withShardField"; - CollectionAdminRequest.createCollectionWithImplicitRouter(collection, "conf", "a,b,c,d", replicationFactor) + CollectionAdminRequest.createCollectionWithImplicitRouter( + collection, "conf", "a,b,c,d", replicationFactor) .setRouterField(shard_fld) .process(cluster.getSolrClient()); @@ -136,15 +163,27 @@ public void testRouteFieldForImplicitRouter() throws Exception { .add("id", "8", shard_fld, "b") .commit(cluster.getSolrClient(), collection); - assertEquals(3, cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); - assertEquals(1, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "b")).getResults().getNumFound()); - assertEquals(2, cluster.getSolrClient().query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound()); - + assertEquals( + 3, + cluster.getSolrClient().query(collection, new SolrQuery("*:*")).getResults().getNumFound()); + assertEquals( + 1, + cluster + .getSolrClient() + .query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "b")) + .getResults() + .getNumFound()); + assertEquals( + 2, + cluster + .getSolrClient() + .query(collection, new SolrQuery("*:*").setParam(_ROUTE_, "a")) + .getResults() + .getNumFound()); } @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 09-Aug-2018 - public void testRouteFieldForHashRouter()throws Exception{ + public void testRouteFieldForHashRouter() throws Exception { String collectionName = "routeFieldColl"; int numShards = 4; int replicationFactor = 2; @@ -153,7 +192,7 @@ public void testRouteFieldForHashRouter()throws Exception{ CollectionAdminRequest.createCollection(collectionName, "conf", numShards, replicationFactor) .setRouterField(shard_fld) .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(collectionName, numShards, numShards * replicationFactor); new UpdateRequest() @@ -162,34 +201,64 @@ public void testRouteFieldForHashRouter()throws Exception{ .add("id", "8", shard_fld, "b") .commit(cluster.getSolrClient(), collectionName); - assertEquals(3, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*")).getResults().getNumFound()); - assertEquals(2, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound()); - assertEquals(1, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "b")).getResults().getNumFound()); - assertEquals(0, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "c")).getResults().getNumFound()); - + assertEquals( + 3, + cluster + .getSolrClient() + .query(collectionName, new SolrQuery("*:*")) + .getResults() + .getNumFound()); + assertEquals( + 2, + cluster + .getSolrClient() + .query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "a")) + .getResults() + .getNumFound()); + assertEquals( + 1, + cluster + .getSolrClient() + .query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "b")) + .getResults() + .getNumFound()); + assertEquals( + 0, + cluster + .getSolrClient() + .query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "c")) + .getResults() + .getNumFound()); cluster.getSolrClient().deleteByQuery(collectionName, "*:*"); cluster.getSolrClient().commit(collectionName); - cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", "100", shard_fld, "c!doc1")); + cluster + .getSolrClient() + .add(collectionName, new SolrInputDocument("id", "100", shard_fld, "c!doc1")); cluster.getSolrClient().commit(collectionName); - assertEquals(1, cluster.getSolrClient().query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "c!")).getResults().getNumFound()); - + assertEquals( + 1, + cluster + .getSolrClient() + .query(collectionName, new SolrQuery("*:*").setParam(_ROUTE_, "c!")) + .getResults() + .getNumFound()); } @Test - public void testCreateShardRepFactor() throws Exception { + public void testCreateShardRepFactor() throws Exception { final String collectionName = "testCreateShardRepFactor"; CollectionAdminRequest.createCollectionWithImplicitRouter(collectionName, "conf", "a,b", 1) .process(cluster.getSolrClient()); - CollectionAdminRequest.createShard(collectionName, "x") - .process(cluster.getSolrClient()); - - waitForState("Not enough active replicas in shard 'x'", collectionName, (n, c) -> { - return c.getSlice("x").getReplicas().size() == 1; - }); + CollectionAdminRequest.createShard(collectionName, "x").process(cluster.getSolrClient()); + waitForState( + "Not enough active replicas in shard 'x'", + collectionName, + (n, c) -> { + return c.getSlice("x").getReplicas().size() == 1; + }); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/LocalFSCloudIncrementalBackupTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/LocalFSCloudIncrementalBackupTest.java index 8ef048cf729..a8a2a990cc9 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/LocalFSCloudIncrementalBackupTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/LocalFSCloudIncrementalBackupTest.java @@ -20,67 +20,69 @@ import org.apache.lucene.util.LuceneTestCase; import org.junit.BeforeClass; -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs({"SimpleText"}) public class LocalFSCloudIncrementalBackupTest extends AbstractIncrementalBackupTest { - private static final String SOLR_XML = "\n" + - "\n" + - " ALLOWPATHS_TEMPLATE_VAL\n" + - " ${shareSchema:false}\n" + - " ${configSetBaseDir:configsets}\n" + - " ${coreRootDirectory:.}\n" + - "\n" + - " \n" + - " ${urlScheme:}\n" + - " ${socketTimeout:90000}\n" + - " ${connTimeout:15000}\n" + - " \n" + - "\n" + - " \n" + - " 127.0.0.1\n" + - " ${hostPort:8983}\n" + - " ${hostContext:solr}\n" + - " ${solr.zkclienttimeout:30000}\n" + - " ${genericCoreNodeNames:true}\n" + - " 10000\n" + - " ${distribUpdateConnTimeout:45000}\n" + - " ${distribUpdateSoTimeout:340000}\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " localfs\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - "\n"; + private static final String SOLR_XML = + "\n" + + "\n" + + " ALLOWPATHS_TEMPLATE_VAL\n" + + " ${shareSchema:false}\n" + + " ${configSetBaseDir:configsets}\n" + + " ${coreRootDirectory:.}\n" + + "\n" + + " \n" + + " ${urlScheme:}\n" + + " ${socketTimeout:90000}\n" + + " ${connTimeout:15000}\n" + + " \n" + + "\n" + + " \n" + + " 127.0.0.1\n" + + " ${hostPort:8983}\n" + + " ${hostContext:solr}\n" + + " ${solr.zkclienttimeout:30000}\n" + + " ${genericCoreNodeNames:true}\n" + + " 10000\n" + + " ${distribUpdateConnTimeout:45000}\n" + + " ${distribUpdateSoTimeout:340000}\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " localfs\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + "\n"; - private static String backupLocation; + private static String backupLocation; - @BeforeClass - public static void setupClass() throws Exception { - boolean whitespacesInPath = random().nextBoolean(); - if (whitespacesInPath) { - backupLocation = createTempDir("my backup").toAbsolutePath().toString(); - } else { - backupLocation = createTempDir("mybackup").toAbsolutePath().toString(); - } - - configureCluster(NUM_SHARDS)// nodes - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .withSolrXml(SOLR_XML.replace("ALLOWPATHS_TEMPLATE_VAL", backupLocation)) - .configure(); + @BeforeClass + public static void setupClass() throws Exception { + boolean whitespacesInPath = random().nextBoolean(); + if (whitespacesInPath) { + backupLocation = createTempDir("my backup").toAbsolutePath().toString(); + } else { + backupLocation = createTempDir("mybackup").toAbsolutePath().toString(); } - @Override - public String getCollectionNamePrefix() { - return "backuprestore"; - } + configureCluster(NUM_SHARDS) // nodes + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .withSolrXml(SOLR_XML.replace("ALLOWPATHS_TEMPLATE_VAL", backupLocation)) + .configure(); + } - @Override - public String getBackupLocation() { - return backupLocation; - } + @Override + public String getCollectionNamePrefix() { + return "backuprestore"; + } + @Override + public String getBackupLocation() { + return backupLocation; + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/PurgeGraphTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/PurgeGraphTest.java index cb4dcb335eb..4a36952134e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/PurgeGraphTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/PurgeGraphTest.java @@ -17,13 +17,7 @@ package org.apache.solr.cloud.api.collections; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.cloud.api.collections.DeleteBackupCmd.PurgeGraph; -import org.apache.solr.core.backup.*; -import org.apache.solr.core.backup.repository.BackupRepository; -import org.apache.solr.core.backup.repository.LocalFileSystemRepository; -import org.junit.Before; -import org.junit.Test; +import static org.hamcrest.Matchers.containsInAnyOrder; import java.io.OutputStream; import java.io.OutputStreamWriter; @@ -32,160 +26,205 @@ import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.UUID; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.cloud.api.collections.DeleteBackupCmd.PurgeGraph; +import org.apache.solr.core.backup.*; +import org.apache.solr.core.backup.repository.BackupRepository; +import org.apache.solr.core.backup.repository.LocalFileSystemRepository; +import org.junit.Before; +import org.junit.Test; -import static org.hamcrest.Matchers.containsInAnyOrder; - -/** - * Unit tests for {@link PurgeGraph} - */ +/** Unit tests for {@link PurgeGraph} */ public class PurgeGraphTest extends SolrTestCaseJ4 { - private BackupRepository repository; - private URI baseLocationUri; - private BackupFilePaths backupPaths; - - @Before - public void setUpRepo() throws Exception { - repository = new LocalFileSystemRepository(); - baseLocationUri = repository.createDirectoryURI(createTempDir("backup_files_" + UUID.randomUUID().toString()).toAbsolutePath().toString()); - backupPaths = new BackupFilePaths(repository, baseLocationUri); - - backupPaths.createIncrementalBackupFolders(); + private BackupRepository repository; + private URI baseLocationUri; + private BackupFilePaths backupPaths; + + @Before + public void setUpRepo() throws Exception { + repository = new LocalFileSystemRepository(); + baseLocationUri = + repository.createDirectoryURI( + createTempDir("backup_files_" + UUID.randomUUID().toString()) + .toAbsolutePath() + .toString()); + backupPaths = new BackupFilePaths(repository, baseLocationUri); + + backupPaths.createIncrementalBackupFolders(); + } + + @Test + public void testGraphBuildingOnNoBackups() throws Exception { + PurgeGraph purgeGraph = new PurgeGraph(); + purgeGraph.build(repository, backupPaths.getBackupLocation()); + purgeGraph.findDeletableNodes(repository, backupPaths); + + assertEquals(0, purgeGraph.backupIdDeletes.size()); + assertEquals(0, purgeGraph.shardBackupMetadataDeletes.size()); + assertEquals(0, purgeGraph.indexFileDeletes.size()); + } + + @Test + public void testUnreferencedIndexFilesAreDeleted() throws Exception { + // Backup 0 files + createBackupIdFile(0, "shard1", "shard2"); + createShardMetadataFile( + 0, "shard1", Map.of("uniqName1", "localName1", "uniqName2", "localName2")); + createShardMetadataFile( + 0, "shard2", Map.of("uniqName3", "localName3", "uniqName4", "localName4")); + // Backup 1 files + createBackupIdFile(1, "shard1", "shard2"); + createShardMetadataFile( + 1, "shard1", Map.of("uniqName5", "localName1", "uniqName6", "localName2")); + createShardMetadataFile( + 1, "shard2", Map.of("uniqName7", "localName3", "uniqName8", "localName4")); + // Valid, referenced index files + createUniquelyNamedIndexFile( + "uniqName1", + "uniqName2", + "uniqName3", + "uniqName4", + "uniqName5", + "uniqName6", + "uniqName7", + "uniqName8"); + // Single orphaned index file + createUniquelyNamedIndexFile("someUnreferencedName"); + + PurgeGraph purgeGraph = new PurgeGraph(); + purgeGraph.build(repository, backupPaths.getBackupLocation()); + + assertEquals(0, purgeGraph.backupIdDeletes.size()); + assertEquals(0, purgeGraph.shardBackupMetadataDeletes.size()); + assertEquals(1, purgeGraph.indexFileDeletes.size()); + assertEquals("someUnreferencedName", purgeGraph.indexFileDeletes.get(0)); + } + + // TODO - this seems a bit extreme - should this really occur by default? + @Test + public void testEntireBackupPointFlaggedForDeletionIfAnyIndexFilesMissing() throws Exception { + // Backup 0 files + createBackupIdFile(0, "shard1", "shard2"); + createShardMetadataFile( + 0, "shard1", Map.of("uniqName1", "localName1", "uniqName2", "localName2")); + createShardMetadataFile( + 0, "shard2", Map.of("uniqName3", "localName3", "uniqName4", "localName4")); + // Valid, referenced index files - 'uniqName3' is missing! + createUniquelyNamedIndexFile("uniqName1", "uniqName2", "uniqName4"); + + PurgeGraph purgeGraph = new PurgeGraph(); + purgeGraph.build(repository, backupPaths.getBackupLocation()); + + // All files associated with backup '0' should be flagged for deletion since the required file + // 'uniqName3' is missing. + assertEquals(1, purgeGraph.backupIdDeletes.size()); + assertThat(purgeGraph.backupIdDeletes, containsInAnyOrder("backup_0.properties")); + assertEquals(2, purgeGraph.shardBackupMetadataDeletes.size()); + assertThat( + purgeGraph.shardBackupMetadataDeletes, + containsInAnyOrder("md_shard1_0.json", "md_shard2_0.json")); + assertEquals(3, purgeGraph.indexFileDeletes.size()); + assertThat( + purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName1", "uniqName2", "uniqName4")); + + // If a subsequent backup relies on an index file (uniqName4) that was previously only used by + // the invalid backup '0', that file will not be flagged for deletion. + // createBackupIdFile(1, "shard1", "shard2"); + // createShardMetadataFile(1, "shard1", Map.of("uniqName5", "localName1", "uniqName6", + // "localName2")); + // createShardMetadataFile(1, "shard2", Map.of("uniqName4", "localName4")); + // createUniquelyNamedIndexFile("uniqName5", "uniqName6"); + // + // assertEquals(1, purgeGraph.backupIdDeletes.size()); + // assertThat(purgeGraph.backupIdDeletes, containsInAnyOrder("backup_0.properties")); + // assertEquals(2, purgeGraph.shardBackupMetadataDeletes.size()); + // assertThat(purgeGraph.shardBackupMetadataDeletes, + // containsInAnyOrder("md_shard1_0.json", "md_shard2_0.json")); + // // NOTE that 'uniqName4' is NOT marked for deletion + // assertEquals(2, purgeGraph.indexFileDeletes.size()); + // assertThat(purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName1", "uniqName2")); + } + + @Test + public void testUnreferencedShardMetadataFilesAreDeleted() throws Exception { + // Backup 0 files + createBackupIdFile(0, "shard1", "shard2"); + createShardMetadataFile( + 0, "shard1", Map.of("uniqName1", "localName1", "uniqName2", "localName2")); + createShardMetadataFile( + 0, "shard2", Map.of("uniqName3", "localName3", "uniqName4", "localName4")); + // Extra shard unreferenced by backup_0.properties + createShardMetadataFile( + 0, "shard3", Map.of("uniqName5", "localName5", "uniqName6", "localName6")); + createUniquelyNamedIndexFile( + "uniqName1", "uniqName2", "uniqName3", "uniqName4", "uniqName5", "uniqName6"); + + PurgeGraph purgeGraph = new PurgeGraph(); + purgeGraph.build(repository, backupPaths.getBackupLocation()); + + assertEquals(0, purgeGraph.backupIdDeletes.size()); + assertEquals(1, purgeGraph.shardBackupMetadataDeletes.size()); + assertThat(purgeGraph.shardBackupMetadataDeletes, containsInAnyOrder("md_shard3_0.json")); + assertEquals(2, purgeGraph.indexFileDeletes.size()); + assertThat(purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName5", "uniqName6")); + + // If a subsequent backup relies on an index file (uniqName5) that was previously only used by + // the orphaned 'shard3' metadata file, that file should no longer be flagged for deletion + // createBackupIdFile(1, "shard1", "shard2"); + // createShardMetadataFile(1, "shard1", Map.of("uniqName7", "localName7")); + // createShardMetadataFile(1, "shard2", Map.of("uniqName5", "localName5", "uniqName8", + // "localName8")); + // + // purgeGraph = new PurgeGraph(); + // purgeGraph.build(repository, backupPaths.getBackupLocation()); + // + // assertEquals(0, purgeGraph.backupIdDeletes.size()); + // assertEquals(1, purgeGraph.shardBackupMetadataDeletes.size()); + // assertThat(purgeGraph.shardBackupMetadataDeletes, + // containsInAnyOrder("md_shard3_0.json")); + // assertEquals(1, purgeGraph.indexFileDeletes.size()); + // assertThat(purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName6")); + } + + private void createBackupIdFile(int backupId, String... shardNames) throws Exception { + final BackupProperties createdProps = + BackupProperties.create( + "someBackupName", "someCollectionName", "someExtCollectionName", "someConfigName"); + for (String shardName : shardNames) { + createdProps.putAndGetShardBackupIdFor(shardName, backupId); } - @Test - public void testGraphBuildingOnNoBackups() throws Exception { - PurgeGraph purgeGraph = new PurgeGraph(); - purgeGraph.build(repository, backupPaths.getBackupLocation()); - purgeGraph.findDeletableNodes(repository, backupPaths); - - assertEquals(0, purgeGraph.backupIdDeletes.size()); - assertEquals(0, purgeGraph.shardBackupMetadataDeletes.size()); - assertEquals(0, purgeGraph.indexFileDeletes.size()); - } - - @Test - public void testUnreferencedIndexFilesAreDeleted() throws Exception { - // Backup 0 files - createBackupIdFile(0, "shard1", "shard2"); - createShardMetadataFile(0, "shard1", Map.of("uniqName1", "localName1", "uniqName2", "localName2")); - createShardMetadataFile(0, "shard2", Map.of("uniqName3", "localName3", "uniqName4", "localName4")); - // Backup 1 files - createBackupIdFile(1, "shard1", "shard2"); - createShardMetadataFile(1, "shard1", Map.of("uniqName5", "localName1", "uniqName6", "localName2")); - createShardMetadataFile(1, "shard2", Map.of("uniqName7", "localName3", "uniqName8", "localName4")); - // Valid, referenced index files - createUniquelyNamedIndexFile("uniqName1", "uniqName2", "uniqName3", "uniqName4", "uniqName5", "uniqName6", "uniqName7", "uniqName8"); - // Single orphaned index file - createUniquelyNamedIndexFile("someUnreferencedName"); - - PurgeGraph purgeGraph = new PurgeGraph(); - purgeGraph.build(repository, backupPaths.getBackupLocation()); - - assertEquals(0, purgeGraph.backupIdDeletes.size()); - assertEquals(0, purgeGraph.shardBackupMetadataDeletes.size()); - assertEquals(1, purgeGraph.indexFileDeletes.size()); - assertEquals("someUnreferencedName", purgeGraph.indexFileDeletes.get(0)); + URI dest = + repository.resolve( + backupPaths.getBackupLocation(), + BackupFilePaths.getBackupPropsName(new BackupId(backupId))); + try (Writer propsWriter = + new OutputStreamWriter(repository.createOutput(dest), StandardCharsets.UTF_8)) { + createdProps.store(propsWriter); } - - // TODO - this seems a bit extreme - should this really occur by default? - @Test - public void testEntireBackupPointFlaggedForDeletionIfAnyIndexFilesMissing() throws Exception { - // Backup 0 files - createBackupIdFile(0, "shard1", "shard2"); - createShardMetadataFile(0, "shard1", Map.of("uniqName1", "localName1", "uniqName2", "localName2")); - createShardMetadataFile(0, "shard2", Map.of("uniqName3", "localName3", "uniqName4", "localName4")); - // Valid, referenced index files - 'uniqName3' is missing! - createUniquelyNamedIndexFile("uniqName1", "uniqName2", "uniqName4"); - - PurgeGraph purgeGraph = new PurgeGraph(); - purgeGraph.build(repository, backupPaths.getBackupLocation()); - - // All files associated with backup '0' should be flagged for deletion since the required file 'uniqName3' is missing. - assertEquals(1, purgeGraph.backupIdDeletes.size()); - assertThat(purgeGraph.backupIdDeletes, containsInAnyOrder("backup_0.properties")); - assertEquals(2, purgeGraph.shardBackupMetadataDeletes.size()); - assertThat(purgeGraph.shardBackupMetadataDeletes, containsInAnyOrder("md_shard1_0.json", "md_shard2_0.json")); - assertEquals(3, purgeGraph.indexFileDeletes.size()); - assertThat(purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName1", "uniqName2", "uniqName4")); - - // If a subsequent backup relies on an index file (uniqName4) that was previously only used by the invalid backup '0', that file will not be flagged for deletion. -// createBackupIdFile(1, "shard1", "shard2"); -// createShardMetadataFile(1, "shard1", Map.of("uniqName5", "localName1", "uniqName6", "localName2")); -// createShardMetadataFile(1, "shard2", Map.of("uniqName4", "localName4")); -// createUniquelyNamedIndexFile("uniqName5", "uniqName6"); -// -// assertEquals(1, purgeGraph.backupIdDeletes.size()); -// assertThat(purgeGraph.backupIdDeletes, containsInAnyOrder("backup_0.properties")); -// assertEquals(2, purgeGraph.shardBackupMetadataDeletes.size()); -// assertThat(purgeGraph.shardBackupMetadataDeletes, containsInAnyOrder("md_shard1_0.json", "md_shard2_0.json")); -// // NOTE that 'uniqName4' is NOT marked for deletion -// assertEquals(2, purgeGraph.indexFileDeletes.size()); -// assertThat(purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName1", "uniqName2")); + } + + private void createShardMetadataFile( + int backupId, String shardName, Map localIndexFilenamesByUniqueName) + throws Exception { + final ShardBackupMetadata createdShardMetadata = ShardBackupMetadata.empty(); + for (Map.Entry entry : localIndexFilenamesByUniqueName.entrySet()) { + createdShardMetadata.addBackedFile(entry.getKey(), entry.getValue(), new Checksum(1L, 1)); } - - @Test - public void testUnreferencedShardMetadataFilesAreDeleted() throws Exception { - // Backup 0 files - createBackupIdFile(0, "shard1", "shard2"); - createShardMetadataFile(0, "shard1", Map.of("uniqName1", "localName1", "uniqName2", "localName2")); - createShardMetadataFile(0, "shard2", Map.of("uniqName3", "localName3", "uniqName4", "localName4")); - // Extra shard unreferenced by backup_0.properties - createShardMetadataFile(0, "shard3", Map.of("uniqName5", "localName5", "uniqName6", "localName6")); - createUniquelyNamedIndexFile("uniqName1", "uniqName2", "uniqName3", "uniqName4", "uniqName5", "uniqName6"); - - PurgeGraph purgeGraph = new PurgeGraph(); - purgeGraph.build(repository, backupPaths.getBackupLocation()); - - assertEquals(0, purgeGraph.backupIdDeletes.size()); - assertEquals(1, purgeGraph.shardBackupMetadataDeletes.size()); - assertThat(purgeGraph.shardBackupMetadataDeletes, containsInAnyOrder("md_shard3_0.json")); - assertEquals(2, purgeGraph.indexFileDeletes.size()); - assertThat(purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName5", "uniqName6")); - - // If a subsequent backup relies on an index file (uniqName5) that was previously only used by the orphaned 'shard3' metadata file, that file should no longer be flagged for deletion -// createBackupIdFile(1, "shard1", "shard2"); -// createShardMetadataFile(1, "shard1", Map.of("uniqName7", "localName7")); -// createShardMetadataFile(1, "shard2", Map.of("uniqName5", "localName5", "uniqName8", "localName8")); -// -// purgeGraph = new PurgeGraph(); -// purgeGraph.build(repository, backupPaths.getBackupLocation()); -// -// assertEquals(0, purgeGraph.backupIdDeletes.size()); -// assertEquals(1, purgeGraph.shardBackupMetadataDeletes.size()); -// assertThat(purgeGraph.shardBackupMetadataDeletes, containsInAnyOrder("md_shard3_0.json")); -// assertEquals(1, purgeGraph.indexFileDeletes.size()); -// assertThat(purgeGraph.indexFileDeletes, containsInAnyOrder("uniqName6")); - } - - private void createBackupIdFile(int backupId, String... shardNames) throws Exception { - final BackupProperties createdProps = BackupProperties.create("someBackupName", "someCollectionName", - "someExtCollectionName", "someConfigName"); - for (String shardName : shardNames) { - createdProps.putAndGetShardBackupIdFor(shardName, backupId); - } - - URI dest = repository.resolve(backupPaths.getBackupLocation(), BackupFilePaths.getBackupPropsName(new BackupId(backupId))); - try (Writer propsWriter = new OutputStreamWriter(repository.createOutput(dest), StandardCharsets.UTF_8)) { - createdProps.store(propsWriter); - } - } - - private void createShardMetadataFile(int backupId, String shardName, Map localIndexFilenamesByUniqueName) throws Exception { - final ShardBackupMetadata createdShardMetadata = ShardBackupMetadata.empty(); - for (Map.Entry entry : localIndexFilenamesByUniqueName.entrySet()) { - createdShardMetadata.addBackedFile(entry.getKey(), entry.getValue(), new Checksum(1L, 1)); - } - createdShardMetadata.store(repository, backupPaths.getShardBackupMetadataDir(), new ShardBackupId(shardName, new BackupId(backupId))); - } - - private void createUniquelyNamedIndexFile(String... uniqNames) throws Exception { - for (String uniqName : uniqNames) { - final String randomContent = "some value"; - final URI indexFileUri = repository.resolve(backupPaths.getIndexDir(), uniqName); - try (OutputStream os = repository.createOutput(indexFileUri)) { - os.write(randomContent.getBytes(StandardCharsets.UTF_8)); - } - } + createdShardMetadata.store( + repository, + backupPaths.getShardBackupMetadataDir(), + new ShardBackupId(shardName, new BackupId(backupId))); + } + + private void createUniquelyNamedIndexFile(String... uniqNames) throws Exception { + for (String uniqName : uniqNames) { + final String randomContent = "some value"; + final URI indexFileUri = repository.resolve(backupPaths.getIndexDir(), uniqName); + try (OutputStream os = repository.createOutput(indexFileUri)) { + os.write(randomContent.getBytes(StandardCharsets.UTF_8)); + } } + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ReplicaPropertiesBase.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ReplicaPropertiesBase.java index fd4a4097bbb..60a87a07860 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ReplicaPropertiesBase.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ReplicaPropertiesBase.java @@ -16,6 +16,11 @@ */ package org.apache.solr.cloud.api.collections; +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -29,19 +34,16 @@ import org.apache.solr.common.util.NamedList; import org.apache.zookeeper.KeeperException; -import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - // Collect useful operations for testing assigning properties to individual replicas // Could probably expand this to do something creative with getting random slices // and shards, but for now this will do. public abstract class ReplicaPropertiesBase extends AbstractFullDistribZkTestBase { - public static NamedList doPropertyAction(CloudSolrClient client, String... paramsIn) throws IOException, SolrServerException { - assertTrue("paramsIn must be an even multiple of 2, it is: " + paramsIn.length, (paramsIn.length % 2) == 0); + public static NamedList doPropertyAction(CloudSolrClient client, String... paramsIn) + throws IOException, SolrServerException { + assertTrue( + "paramsIn must be an even multiple of 2, it is: " + paramsIn.length, + (paramsIn.length % 2) == 0); ModifiableSolrParams params = new ModifiableSolrParams(); for (int idx = 0; idx < paramsIn.length; idx += 2) { params.set(paramsIn[idx], paramsIn[idx + 1]); @@ -51,8 +53,8 @@ public static NamedList doPropertyAction(CloudSolrClient client, String. return client.request(request); } - public static void verifyPropertyNotPresent(CloudSolrClient client, String collectionName, String replicaName, - String property) + public static void verifyPropertyNotPresent( + CloudSolrClient client, String collectionName, String replicaName, String property) throws KeeperException, InterruptedException { ClusterState clusterState = null; Replica replica = null; @@ -66,23 +68,35 @@ public static void verifyPropertyNotPresent(CloudSolrClient client, String colle if (StringUtils.isBlank(replica.getProperty(property))) return; Thread.sleep(100); } - fail("Property " + property + " not set correctly for collection/replica pair: " + - collectionName + "/" + replicaName + ". Replica props: " + replica.getProperties().toString() + - ". Cluster state is " + clusterState.toString()); - + fail( + "Property " + + property + + " not set correctly for collection/replica pair: " + + collectionName + + "/" + + replicaName + + ". Replica props: " + + replica.getProperties().toString() + + ". Cluster state is " + + clusterState.toString()); } // The params are triplets, // collection // shard // replica - public static void verifyPropertyVal(CloudSolrClient client, String collectionName, - String replicaName, String property, String val) + public static void verifyPropertyVal( + CloudSolrClient client, + String collectionName, + String replicaName, + String property, + String val) throws InterruptedException, KeeperException { Replica replica = null; ClusterState clusterState = null; - for (int idx = 0; idx < 300; ++idx) { // Keep trying while Overseer writes the ZK state for up to 30 seconds. + // Keep trying while Overseer writes the ZK state for up to 30 seconds. + for (int idx = 0; idx < 300; ++idx) { clusterState = client.getZkStateReader().getClusterState(); final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); replica = (docCollection == null) ? null : docCollection.getReplica(replicaName); @@ -93,26 +107,37 @@ public static void verifyPropertyVal(CloudSolrClient client, String collectionNa Thread.sleep(100); } - fail("Property '" + property + "' with value " + replica.getProperty(property) + - " not set correctly for collection/replica pair: " + collectionName + "/" + replicaName + " property map is " + - replica.getProperties().toString() + "."); - + fail( + "Property '" + + property + + "' with value " + + replica.getProperty(property) + + " not set correctly for collection/replica pair: " + + collectionName + + "/" + + replicaName + + " property map is " + + replica.getProperties().toString() + + "."); } // Verify that // 1> the property is only set once in all the replicas in a slice. // 2> the property is balanced evenly across all the nodes hosting collection - public static void verifyUniqueAcrossCollection(CloudSolrClient client, String collectionName, - String property) throws KeeperException, InterruptedException { + public static void verifyUniqueAcrossCollection( + CloudSolrClient client, String collectionName, String property) + throws KeeperException, InterruptedException { verifyUnique(client, collectionName, property, true); } - public static void verifyUniquePropertyWithinCollection(CloudSolrClient client, String collectionName, - String property) throws KeeperException, InterruptedException { + public static void verifyUniquePropertyWithinCollection( + CloudSolrClient client, String collectionName, String property) + throws KeeperException, InterruptedException { verifyUnique(client, collectionName, property, false); } - public static void verifyUnique(CloudSolrClient client, String collectionName, String property, boolean balanced) + public static void verifyUnique( + CloudSolrClient client, String collectionName, String property, boolean balanced) throws KeeperException, InterruptedException { DocCollection col = null; @@ -165,14 +190,19 @@ public static void verifyUnique(CloudSolrClient client, String collectionName, S } if (doSleep == false) { - assertTrue("We really shouldn't be calling this if there is no node with the property " + property, + assertTrue( + "We really shouldn't be calling this if there is no node with the property " + + property, counts.size() > 0); return; } } Thread.sleep(100); } - fail("Collection " + collectionName + " does not have roles evenly distributed. Collection is: " + col.toString()); + fail( + "Collection " + + collectionName + + " does not have roles evenly distributed. Collection is: " + + col.toString()); } - } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java index 7fbdc97a18c..0068753874e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.cloud.api.collections; +import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -32,7 +34,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; @@ -73,10 +74,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR; - @Slow -@LogLevel("org.apache.solr.cloud.Overseer=DEBUG;org.apache.solr.cloud.overseer=DEBUG;org.apache.solr.cloud.api.collections=DEBUG;org.apache.solr.cloud.OverseerTaskProcessor=DEBUG;org.apache.solr.util.TestInjection=DEBUG") +@LogLevel( + "org.apache.solr.cloud.Overseer=DEBUG;org.apache.solr.cloud.overseer=DEBUG;org.apache.solr.cloud.api.collections=DEBUG;org.apache.solr.cloud.OverseerTaskProcessor=DEBUG;org.apache.solr.util.TestInjection=DEBUG") public class ShardSplitTest extends BasicDistributedZkTest { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -85,7 +85,7 @@ public class ShardSplitTest extends BasicDistributedZkTest { private static final String SHARD1_1 = SHARD1 + "_1"; public ShardSplitTest() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id } @Override @@ -108,7 +108,7 @@ public void test() throws Exception { // todo can't call waitForThingsToLevelOut because it looks for jettys of all shards // and the new sub-shards don't have any. waitForRecoveriesToFinish(true); - //waitForThingsToLevelOut(15); + // waitForThingsToLevelOut(15); } /* @@ -125,23 +125,34 @@ public void testSplitStaticIndexReplicationLink() throws Exception { doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod.LINK); } - private void doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod splitMethod) throws Exception { + private void doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod splitMethod) + throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); - DocCollection defCol = cloudClient.getZkStateReader().getClusterState().getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); + DocCollection defCol = + cloudClient + .getZkStateReader() + .getClusterState() + .getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); Replica replica = defCol.getReplicas().get(0); String nodeName = replica.getNodeName(); String collectionName = "testSplitStaticIndexReplication_" + splitMethod.toLower(); - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1); - create.setCreateNodeSet(nodeName); // we want to create the leader on a fixed node so that we know which one to restart later + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1); + // we want to create the leader on a fixed node so that we know which one to restart later + create.setCreateNodeSet(nodeName); create.process(cloudClient); - - cloudClient.waitForState(collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 1)); - - try (CloudSolrClient client = getCloudSolrClient(zkServer.getZkAddress(), true, cloudClient.getLbClient().getHttpClient())) { + + cloudClient.waitForState( + collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 1)); + + try (CloudSolrClient client = + getCloudSolrClient( + zkServer.getZkAddress(), true, cloudClient.getLbClient().getHttpClient())) { client.setDefaultCollection(collectionName); - StoppableIndexingThread thread = new StoppableIndexingThread(controlClient, client, "i1", true); + StoppableIndexingThread thread = + new StoppableIndexingThread(controlClient, client, "i1", true); try { thread.start(); Thread.sleep(1000); // give the indexer sometime to do its work @@ -150,30 +161,37 @@ private void doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod splitMe client.commit(); controlClient.commit(); - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(collectionName); splitShard.setShardName(SHARD1); splitShard.setSplitMethod(splitMethod.toLower()); String asyncId = splitShard.processAsync(client); - RequestStatusState state = CollectionAdminRequest.requestStatus(asyncId).waitFor(client, 120); - if (state == RequestStatusState.COMPLETED) { + RequestStatusState state = + CollectionAdminRequest.requestStatus(asyncId).waitFor(client, 120); + if (state == RequestStatusState.COMPLETED) { waitForRecoveriesToFinish(collectionName, true); // let's wait to see parent shard become inactive CountDownLatch latch = new CountDownLatch(1); - client.getZkStateReader().registerCollectionStateWatcher(collectionName, (liveNodes, collectionState) -> { - Slice parent = collectionState.getSlice(SHARD1); - Slice slice10 = collectionState.getSlice(SHARD1_0); - Slice slice11 = collectionState.getSlice(SHARD1_1); - if (slice10 != null && slice11 != null && - parent.getState() == Slice.State.INACTIVE && - slice10.getState() == Slice.State.ACTIVE && - slice11.getState() == Slice.State.ACTIVE) { - latch.countDown(); - return true; // removes the watch - } - return false; - }); + client + .getZkStateReader() + .registerCollectionStateWatcher( + collectionName, + (liveNodes, collectionState) -> { + Slice parent = collectionState.getSlice(SHARD1); + Slice slice10 = collectionState.getSlice(SHARD1_0); + Slice slice11 = collectionState.getSlice(SHARD1_1); + if (slice10 != null + && slice11 != null + && parent.getState() == Slice.State.INACTIVE + && slice10.getState() == Slice.State.ACTIVE + && slice11.getState() == Slice.State.ACTIVE) { + latch.countDown(); + return true; // removes the watch + } + return false; + }); latch.await(1, TimeUnit.MINUTES); - if (latch.getCount() != 0) { + if (latch.getCount() != 0) { // sanity check fail("Sub-shards did not become active even after waiting for 1 minute"); } @@ -185,7 +203,7 @@ private void doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod splitMe boolean restarted = false; for (JettySolrRunner jetty : jettys) { int port = jetty.getBaseUrl().getPort(); - if (replica.getBaseUrl().contains(":" + port)) { + if (replica.getBaseUrl().contains(":" + port)) { stoppedNodeName = jetty.getNodeName(); jetty.stop(); jetty.start(); @@ -197,48 +215,75 @@ private void doSplitStaticIndexReplication(SolrIndexSplitter.SplitMethod splitMe // sanity check fail("We could not find a jetty to kill for replica: " + replica.getCoreUrl()); } - - cloudClient.getZkStateReader().waitForLiveNodes(30, TimeUnit.SECONDS, SolrCloudTestCase.containsLiveNode(stoppedNodeName)); + + cloudClient + .getZkStateReader() + .waitForLiveNodes( + 30, TimeUnit.SECONDS, SolrCloudTestCase.containsLiveNode(stoppedNodeName)); // add a new replica for the sub-shard - CollectionAdminRequest.AddReplica addReplica = CollectionAdminRequest.addReplicaToShard(collectionName, SHARD1_0); + CollectionAdminRequest.AddReplica addReplica = + CollectionAdminRequest.addReplicaToShard(collectionName, SHARD1_0); // use control client because less chances of it being the node being restarted // this is to avoid flakiness of test because of NoHttpResponseExceptions - String control_collection = client.getZkStateReader().getClusterState().getCollection("control_collection").getReplicas().get(0).getBaseUrl(); - try (HttpSolrClient control = new HttpSolrClient.Builder(control_collection).withHttpClient(client.getLbClient().getHttpClient()).build()) { + String control_collection = + client + .getZkStateReader() + .getClusterState() + .getCollection("control_collection") + .getReplicas() + .get(0) + .getBaseUrl(); + try (HttpSolrClient control = + new HttpSolrClient.Builder(control_collection) + .withHttpClient(client.getLbClient().getHttpClient()) + .build()) { state = addReplica.processAndWait(control, 30); } - - cloudClient.waitForState(collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(2, 4)); - - if (state == RequestStatusState.COMPLETED) { + + cloudClient.waitForState( + collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(2, 4)); + + if (state == RequestStatusState.COMPLETED) { CountDownLatch newReplicaLatch = new CountDownLatch(1); - client.getZkStateReader().registerCollectionStateWatcher(collectionName, (liveNodes, collectionState) -> { - if (liveNodes.size() != liveNodeCount) { - return false; - } - Slice slice = collectionState.getSlice(SHARD1_0); - if (slice.getReplicas().size() == 2) { - if (slice.getReplicas().stream().noneMatch(r -> r.getState() == Replica.State.RECOVERING)) { - // we see replicas and none of them are recovering - newReplicaLatch.countDown(); - return true; - } - } - return false; - }); + client + .getZkStateReader() + .registerCollectionStateWatcher( + collectionName, + (liveNodes, collectionState) -> { + if (liveNodes.size() != liveNodeCount) { + return false; + } + Slice slice = collectionState.getSlice(SHARD1_0); + if (slice.getReplicas().size() == 2) { + if (slice.getReplicas().stream() + .noneMatch(r -> r.getState() == Replica.State.RECOVERING)) { + // we see replicas and none of them are recovering + newReplicaLatch.countDown(); + return true; + } + } + return false; + }); newReplicaLatch.await(30, TimeUnit.SECONDS); - // check consistency of sub-shard replica explicitly because checkShardConsistency methods doesn't - // handle new shards/replica so well. + // check consistency of sub-shard replica explicitly because checkShardConsistency + // methods doesn't handle new shards/replica so well. ClusterState clusterState = client.getZkStateReader().getClusterState(); DocCollection collection = clusterState.getCollection(collectionName); int numReplicasChecked = assertConsistentReplicas(collection.getSlice(SHARD1_0)); - assertEquals("We should have checked consistency for exactly 2 replicas of shard1_0", 2, numReplicasChecked); - } else { - fail("Adding a replica to sub-shard did not complete even after waiting for 30 seconds!. Saw state = " + state.getKey()); + assertEquals( + "We should have checked consistency for exactly 2 replicas of shard1_0", + 2, + numReplicasChecked); + } else { + fail( + "Adding a replica to sub-shard did not complete even after waiting for 30 seconds!. Saw state = " + + state.getKey()); } } else { - fail("We expected shard split to succeed on a static index but it didn't. Found state = " + state.getKey()); + fail( + "We expected shard split to succeed on a static index but it didn't. Found state = " + + state.getKey()); } } finally { thread.safeStop(); @@ -251,16 +296,24 @@ private int assertConsistentReplicas(Slice shard) throws SolrServerException, IO long numFound = Long.MIN_VALUE; int count = 0; for (Replica replica : shard.getReplicas()) { - HttpSolrClient client = new HttpSolrClient.Builder(replica.getCoreUrl()) - .withHttpClient(cloudClient.getLbClient().getHttpClient()).build(); + HttpSolrClient client = + new HttpSolrClient.Builder(replica.getCoreUrl()) + .withHttpClient(cloudClient.getLbClient().getHttpClient()) + .build(); QueryResponse response = client.query(new SolrQuery("q", "*:*", "distrib", "false")); if (log.isInfoEnabled()) { - log.info("Found numFound={} on replica: {}", response.getResults().getNumFound(), replica.getCoreUrl()); + log.info( + "Found numFound={} on replica: {}", + response.getResults().getNumFound(), + replica.getCoreUrl()); } - if (numFound == Long.MIN_VALUE) { + if (numFound == Long.MIN_VALUE) { numFound = response.getResults().getNumFound(); - } else { - assertEquals("Shard " + shard.getName() + " replicas do not have same number of documents", numFound, response.getResults().getNumFound()); + } else { + assertEquals( + "Shard " + shard.getName() + " replicas do not have same number of documents", + numFound, + response.getResults().getNumFound()); } count++; } @@ -268,18 +321,17 @@ private int assertConsistentReplicas(Slice shard) throws SolrServerException, IO } /** - * Used to test that we can split a shard when a previous split event - * left sub-shards in construction or recovery state. + * Used to test that we can split a shard when a previous split event left sub-shards in + * construction or recovery state. * - * See SOLR-9439 + *

See SOLR-9439 */ @Test - //05-Jul-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 15-Sep-2018 public void testSplitAfterFailedSplit() throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); - TestInjection.splitFailureBeforeReplicaCreation = "true:100"; // we definitely want split to fail + // we definitely want split to fail + TestInjection.splitFailureBeforeReplicaCreation = "true:100"; try { splitAfterFailedSplit(); } finally { @@ -289,7 +341,8 @@ public void testSplitAfterFailedSplit() throws Exception { private void splitAfterFailedSplit() throws KeeperException, InterruptedException { try { - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION); splitShard.setShardName(SHARD1); splitShard.process(cloudClient); fail("Shard split was not supposed to succeed after failure injection!"); @@ -313,13 +366,15 @@ private void splitAfterFailedSplit() throws KeeperException, InterruptedExceptio // lets retry the split TestInjection.reset(); // let the split succeed try { - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION); splitShard.setShardName(SHARD1); splitShard.process(cloudClient); // Yay! } catch (Exception e) { log.error("Shard split failed", e); - fail("Shard split did not succeed after a previous failed split attempt left sub-shards in construction state"); + fail( + "Shard split did not succeed after a previous failed split attempt left sub-shards in construction state"); } } @@ -337,25 +392,27 @@ public void testSplitAfterFailedSplit2() throws Exception { } @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 15-Sep-2018 public void testSplitMixedReplicaTypes() throws Exception { doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod.REWRITE); } @Test - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 14-Oct-2018 public void testSplitMixedReplicaTypesLink() throws Exception { doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod.LINK); } - private void doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod splitMethod) throws Exception { + private void doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod splitMethod) + throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); String collectionName = "testSplitMixedReplicaTypes_" + splitMethod.toLower(); - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2, 0, 2); // TODO tlog replicas disabled right now. + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection( + collectionName, "conf1", 1, 2, 0, 2); // TODO tlog replicas disabled right now. create.process(cloudClient); - - cloudClient.waitForState(collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 4)); - + + cloudClient.waitForState( + collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 4)); + waitForRecoveriesToFinish(collectionName, false); for (int i = 0; i < 100; i++) { @@ -363,13 +420,15 @@ private void doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod splitMethod) } cloudClient.commit(collectionName); - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(collectionName); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(collectionName); splitShard.setShardName(SHARD1); splitShard.setSplitMethod(splitMethod.toLower()); CollectionAdminResponse rsp = splitShard.process(cloudClient); waitForThingsToLevelOut(30, TimeUnit.SECONDS); - - cloudClient.waitForState(collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(2, 12)); + + cloudClient.waitForState( + collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(2, 12)); cloudClient.getZkStateReader().forceUpdateCollection(collectionName); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); @@ -383,19 +442,34 @@ private void doSplitMixedReplicaTypes(SolrIndexSplitter.SplitMethod splitMethod) verifyShard(coll, SHARD1_1, Slice.State.ACTIVE, 2, 0, 2); } - private void verifyShard(DocCollection coll, String shard, Slice.State expectedState, int numNrt, int numTlog, int numPull) throws Exception { + private void verifyShard( + DocCollection coll, + String shard, + Slice.State expectedState, + int numNrt, + int numTlog, + int numPull) + throws Exception { Slice s = coll.getSlice(shard); assertEquals("unexpected shard state", expectedState, s.getState()); AtomicInteger actualNrt = new AtomicInteger(); AtomicInteger actualTlog = new AtomicInteger(); AtomicInteger actualPull = new AtomicInteger(); - s.getReplicas().forEach(r -> { - switch (r.getType()) { - case NRT: actualNrt.incrementAndGet(); break; - case TLOG: actualTlog.incrementAndGet(); break; - case PULL: actualPull.incrementAndGet(); break; - } - }); + s.getReplicas() + .forEach( + r -> { + switch (r.getType()) { + case NRT: + actualNrt.incrementAndGet(); + break; + case TLOG: + actualTlog.incrementAndGet(); + break; + case PULL: + actualPull.incrementAndGet(); + break; + } + }); assertEquals("actual NRT", numNrt, actualNrt.get()); assertEquals("actual TLOG", numTlog, actualTlog.get()); assertEquals("actual PULL", numPull, actualPull.get()); @@ -409,7 +483,8 @@ public void testSplitWithChaosMonkey() throws Exception { List indexers = new ArrayList<>(); try { for (int i = 0; i < 1; i++) { - StoppableIndexingThread thread = new StoppableIndexingThread(controlClient, cloudClient, String.valueOf(i), true); + StoppableIndexingThread thread = + new StoppableIndexingThread(controlClient, cloudClient, String.valueOf(i), true); indexers.add(thread); thread.start(); } @@ -428,53 +503,68 @@ public void testSplitWithChaosMonkey() throws Exception { AtomicBoolean stop = new AtomicBoolean(); AtomicBoolean killed = new AtomicBoolean(false); - Runnable monkey = () -> { - ZkStateReader zkStateReader = cloudClient.getZkStateReader(); - zkStateReader.registerCollectionStateWatcher(AbstractDistribZkTestBase.DEFAULT_COLLECTION, (liveNodes, collectionState) -> { - if (stop.get()) { - return true; // abort and remove the watch - } - Slice slice = collectionState.getSlice(SHARD1_0); - if (slice != null && slice.getReplicas().size() > 1) { - // ensure that only one watcher invocation thread can kill! - if (killed.compareAndSet(false, true)) { - log.info("Monkey thread found 2 replicas for {} {}", AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1); - CloudJettyRunner cjetty = shardToLeaderJetty.get(SHARD1); - try { - Thread.sleep(1000 + random().nextInt(500)); - cjetty.jetty.stop(); - stop.set(true); - return true; - } catch (Exception e) { - log.error("Monkey unable to kill jetty at port {}", cjetty.jetty.getLocalPort(), e); - } - } - } - log.info("Monkey thread found only one replica for {} {}", AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1); - return false; - }); - }; + Runnable monkey = + () -> { + ZkStateReader zkStateReader = cloudClient.getZkStateReader(); + zkStateReader.registerCollectionStateWatcher( + AbstractDistribZkTestBase.DEFAULT_COLLECTION, + (liveNodes, collectionState) -> { + if (stop.get()) { + return true; // abort and remove the watch + } + Slice slice = collectionState.getSlice(SHARD1_0); + if (slice != null && slice.getReplicas().size() > 1) { + // ensure that only one watcher invocation thread can kill! + if (killed.compareAndSet(false, true)) { + log.info( + "Monkey thread found 2 replicas for {} {}", + AbstractDistribZkTestBase.DEFAULT_COLLECTION, + SHARD1); + CloudJettyRunner cjetty = shardToLeaderJetty.get(SHARD1); + try { + Thread.sleep(1000 + random().nextInt(500)); + cjetty.jetty.stop(); + stop.set(true); + return true; + } catch (Exception e) { + log.error( + "Monkey unable to kill jetty at port {}", cjetty.jetty.getLocalPort(), e); + } + } + } + log.info( + "Monkey thread found only one replica for {} {}", + AbstractDistribZkTestBase.DEFAULT_COLLECTION, + SHARD1); + return false; + }); + }; Thread monkeyThread = new Thread(monkey); monkeyThread.start(); try { - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(AbstractDistribZkTestBase.DEFAULT_COLLECTION); splitShard.setShardName(SHARD1); String asyncId = splitShard.processAsync(cloudClient); RequestStatusState splitStatus = null; try { splitStatus = CollectionAdminRequest.requestStatus(asyncId).waitFor(cloudClient, 120); } catch (Exception e) { - log.warn("Failed to get request status, maybe because the overseer node was shutdown by monkey", e); + log.warn( + "Failed to get request status, maybe because the overseer node was shutdown by monkey", + e); } // we don't care if the split failed because we are injecting faults and it is likely // that the split has failed but in any case we want to assert that all docs that got - // indexed are available in SolrCloud and if the split succeeded then all replicas of the sub-shard - // must be consistent (i.e. have same numdocs) + // indexed are available in SolrCloud and if the split succeeded then all replicas of the + // sub-shard must be consistent (i.e. have same numdocs) if (log.isInfoEnabled()) { - log.info("Shard split request state is {}", splitStatus == null ? "unknown" : splitStatus.getKey()); + log.info( + "Shard split request state is {}", + splitStatus == null ? "unknown" : splitStatus.getKey()); } stop.set(true); monkeyThread.join(); @@ -490,9 +580,13 @@ public void testSplitWithChaosMonkey() throws Exception { log.info("Starting shard1 leader jetty at port {}", cjetty.jetty.getLocalPort()); } cjetty.jetty.start(); - cloudClient.getZkStateReader().forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); + cloudClient + .getZkStateReader() + .forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); if (log.isInfoEnabled()) { - log.info("Current collection state: {}", printClusterStateInfo(AbstractDistribZkTestBase.DEFAULT_COLLECTION)); + log.info( + "Current collection state: {}", + printClusterStateInfo(AbstractDistribZkTestBase.DEFAULT_COLLECTION)); } // true if sub-shard states switch to 'active' eventually @@ -502,33 +596,40 @@ public void testSplitWithChaosMonkey() throws Exception { waitForRecoveriesToFinish(AbstractDistribZkTestBase.DEFAULT_COLLECTION, true); // let's wait for the overseer to switch shard states CountDownLatch latch = new CountDownLatch(1); - cloudClient.getZkStateReader().registerCollectionStateWatcher(AbstractDistribZkTestBase.DEFAULT_COLLECTION, (liveNodes, collectionState) -> { - Slice parent = collectionState.getSlice(SHARD1); - Slice slice10 = collectionState.getSlice(SHARD1_0); - Slice slice11 = collectionState.getSlice(SHARD1_1); - if (slice10 != null && slice11 != null && - parent.getState() == Slice.State.INACTIVE && - slice10.getState() == Slice.State.ACTIVE && - slice11.getState() == Slice.State.ACTIVE) { - areSubShardsActive.set(true); - latch.countDown(); - return true; // removes the watch - } else if (slice10 != null && slice11 != null && - parent.getState() == Slice.State.ACTIVE && - slice10.getState() == Slice.State.RECOVERY_FAILED && - slice11.getState() == Slice.State.RECOVERY_FAILED) { - areSubShardsActive.set(false); - latch.countDown(); - return true; - } - return false; - }); + cloudClient + .getZkStateReader() + .registerCollectionStateWatcher( + AbstractDistribZkTestBase.DEFAULT_COLLECTION, + (liveNodes, collectionState) -> { + Slice parent = collectionState.getSlice(SHARD1); + Slice slice10 = collectionState.getSlice(SHARD1_0); + Slice slice11 = collectionState.getSlice(SHARD1_1); + if (slice10 != null + && slice11 != null + && parent.getState() == Slice.State.INACTIVE + && slice10.getState() == Slice.State.ACTIVE + && slice11.getState() == Slice.State.ACTIVE) { + areSubShardsActive.set(true); + latch.countDown(); + return true; // removes the watch + } else if (slice10 != null + && slice11 != null + && parent.getState() == Slice.State.ACTIVE + && slice10.getState() == Slice.State.RECOVERY_FAILED + && slice11.getState() == Slice.State.RECOVERY_FAILED) { + areSubShardsActive.set(false); + latch.countDown(); + return true; + } + return false; + }); latch.await(2, TimeUnit.MINUTES); - if (latch.getCount() != 0) { + if (latch.getCount() != 0) { // sanity check - fail("We think that split was successful but sub-shard states were not updated even after 2 minutes."); + fail( + "We think that split was successful but sub-shard states were not updated even after 2 minutes."); } } @@ -539,17 +640,27 @@ public void testSplitWithChaosMonkey() throws Exception { // ensure we have added more than 0 docs long cloudClientDocs = cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound(); assertTrue("Found " + ctrlDocs + " control docs", cloudClientDocs > 0); - assertEquals("Found " + ctrlDocs + " control docs and " + cloudClientDocs + " cloud docs", ctrlDocs, cloudClientDocs); + assertEquals( + "Found " + ctrlDocs + " control docs and " + cloudClientDocs + " cloud docs", + ctrlDocs, + cloudClientDocs); - // check consistency of sub-shard replica explicitly because checkShardConsistency methods doesn't - // handle new shards/replica so well. + // check consistency of sub-shard replica explicitly because checkShardConsistency methods + // doesn't handle new shards/replica so well. if (areSubShardsActive.get()) { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - DocCollection collection = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); + DocCollection collection = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); int numReplicasChecked = assertConsistentReplicas(collection.getSlice(SHARD1_0)); - assertEquals("We should have checked consistency for exactly 2 replicas of shard1_0", 2, numReplicasChecked); + assertEquals( + "We should have checked consistency for exactly 2 replicas of shard1_0", + 2, + numReplicasChecked); numReplicasChecked = assertConsistentReplicas(collection.getSlice(SHARD1_1)); - assertEquals("We should have checked consistency for exactly 2 replicas of shard1_1", 2, numReplicasChecked); + assertEquals( + "We should have checked consistency for exactly 2 replicas of shard1_1", + 2, + numReplicasChecked); } } finally { stop.set(true); @@ -561,24 +672,28 @@ public void testSplitWithChaosMonkey() throws Exception { public void testSplitLocking() throws Exception { waitForThingsToLevelOut(15, TimeUnit.SECONDS); String collectionName = "testSplitLocking"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2); create.process(cloudClient); - - cloudClient.waitForState(collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 2)); - + + cloudClient.waitForState( + collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 2)); + waitForRecoveriesToFinish(collectionName, false); TestInjection.splitLatch = new CountDownLatch(1); // simulate a long split operation - String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName + "/" + SHARD1 + "-splitting"; + String path = + ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName + "/" + SHARD1 + "-splitting"; final AtomicReference exc = new AtomicReference<>(); try { - Runnable r = () -> { - try { - trySplit(collectionName, null, SHARD1, 1); - } catch (Exception e) { - exc.set(e); - } - }; + Runnable r = + () -> { + try { + trySplit(collectionName, null, SHARD1, 1); + } catch (Exception e) { + exc.set(e); + } + }; Thread t = new Thread(r); t.start(); // wait for the split to start executing @@ -601,7 +716,9 @@ public void testSplitLocking() throws Exception { } // make sure the lock still exists - assertTrue("lock znode expected but missing", cloudClient.getZkStateReader().getZkClient().exists(path, true)); + assertTrue( + "lock znode expected but missing", + cloudClient.getZkStateReader().getZkClient().exists(path, true)); // let the first split proceed TestInjection.splitLatch.countDown(); timeOut = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); @@ -632,29 +749,36 @@ private void doSplitShardWithRule(SolrIndexSplitter.SplitMethod splitMethod) thr log.info("Starting testSplitShardWithRule"); String collectionName = "shardSplitWithRule_" + splitMethod.toLower(); - CollectionAdminRequest.Create createRequest = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2) - .setRule("shard:*,replica:<2,node:*"); + CollectionAdminRequest.Create createRequest = + CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 2) + .setRule("shard:*,replica:<2,node:*"); CollectionAdminResponse response = createRequest.process(cloudClient); assertEquals(0, response.getStatus()); - + try { - cloudClient.waitForState(collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 2)); + cloudClient.waitForState( + collectionName, 30, TimeUnit.SECONDS, SolrCloudTestCase.activeClusterShape(1, 2)); } catch (TimeoutException e) { new RuntimeException("Timeout waiting for 1shards and 2 replicas.", e); } - CollectionAdminRequest.SplitShard splitShardRequest = CollectionAdminRequest.splitShard(collectionName) - .setShardName("shard1").setSplitMethod(splitMethod.toLower()); + CollectionAdminRequest.SplitShard splitShardRequest = + CollectionAdminRequest.splitShard(collectionName) + .setShardName("shard1") + .setSplitMethod(splitMethod.toLower()); response = splitShardRequest.process(cloudClient); assertEquals(String.valueOf(response.getErrorMessages()), 0, response.getStatus()); } - private void incompleteOrOverlappingCustomRangeTest() throws Exception { + private void incompleteOrOverlappingCustomRangeTest() throws Exception { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - final DocRouter router = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); - Slice shard1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); - DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); + final DocRouter router = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); + Slice shard1 = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); + DocRouter.Range shard1Range = + shard1.getRange() != null ? shard1.getRange() : router.fullRange(); List subRanges = new ArrayList<>(); List ranges = router.partitionRange(4, shard1Range); @@ -696,16 +820,19 @@ private void incompleteOrOverlappingCustomRangeTest() throws Exception { private void splitByUniqueKeyTest() throws Exception { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - final DocRouter router = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); - Slice shard1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); - DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); + final DocRouter router = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); + Slice shard1 = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); + DocRouter.Range shard1Range = + shard1.getRange() != null ? shard1.getRange() : router.fullRange(); List subRanges = new ArrayList<>(); - if (usually()) { + if (usually()) { List ranges = router.partitionRange(4, shard1Range); // 75% of range goes to shard1_0 and the rest to shard1_1 subRanges.add(new DocRouter.Range(ranges.get(0).min, ranges.get(2).max)); subRanges.add(ranges.get(3)); - } else { + } else { subRanges = router.partitionRange(2, shard1Range); } final Set documentIds = ConcurrentHashMap.newKeySet(1024); @@ -713,46 +840,56 @@ private void splitByUniqueKeyTest() throws Exception { final int[] docCounts = new int[ranges.size()]; int numReplicas = shard1.getReplicas().size(); - cloudClient.getZkStateReader().forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); + cloudClient + .getZkStateReader() + .forceUpdateCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION); clusterState = cloudClient.getZkStateReader().getClusterState(); if (log.isDebugEnabled()) { - log.debug("-- COLLECTION: {}", clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION)); + log.debug( + "-- COLLECTION: {}", + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION)); } del("*:*"); for (int id = 0; id <= 100; id++) { - String shardKey = "" + (char)('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution - indexAndUpdateCount(router, ranges, docCounts, shardKey + "!" + String.valueOf(id), id, documentIds); + String shardKey = + "" + (char) ('a' + (id % 26)); // See comment in ShardRoutingTest for hash distribution + indexAndUpdateCount( + router, ranges, docCounts, shardKey + "!" + String.valueOf(id), id, documentIds); } commit(); - Thread indexThread = new Thread(() -> { - Random random = random(); - int max = atLeast(random, 401); - int sleep = atLeast(random, 25); - log.info("SHARDSPLITTEST: Going to add {} number of docs at 1 doc per {} ms", max, sleep); - Set deleted = new HashSet<>(); - for (int id = 101; id < max; id++) { - try { - indexAndUpdateCount(router, ranges, docCounts, String.valueOf(id), id, documentIds); - Thread.sleep(sleep); - if (usually(random)) { - String delId = String.valueOf(random.nextInt(id - 101 + 1) + 101); - if (deleted.contains(delId)) continue; - try { - deleteAndUpdateCount(router, ranges, docCounts, delId); - deleted.add(delId); - documentIds.remove(String.valueOf(delId)); - } catch (Exception e) { - log.error("Exception while deleting docs", e); - } - } - } catch (Exception e) { - log.error("Exception while adding doc id = {}", id, e); - // do not select this id for deletion ever - deleted.add(String.valueOf(id)); - } - } - }); + Thread indexThread = + new Thread( + () -> { + Random random = random(); + int max = atLeast(random, 401); + int sleep = atLeast(random, 25); + log.info( + "SHARDSPLITTEST: Going to add {} number of docs at 1 doc per {} ms", max, sleep); + Set deleted = new HashSet<>(); + for (int id = 101; id < max; id++) { + try { + indexAndUpdateCount( + router, ranges, docCounts, String.valueOf(id), id, documentIds); + Thread.sleep(sleep); + if (usually(random)) { + String delId = String.valueOf(random.nextInt(id - 101 + 1) + 101); + if (deleted.contains(delId)) continue; + try { + deleteAndUpdateCount(router, ranges, docCounts, delId); + deleted.add(delId); + documentIds.remove(String.valueOf(delId)); + } catch (Exception e) { + log.error("Exception while deleting docs", e); + } + } + } catch (Exception e) { + log.error("Exception while adding doc id = {}", id, e); + // do not select this id for deletion ever + deleted.add(String.valueOf(id)); + } + } + }); indexThread.start(); try { @@ -763,7 +900,7 @@ private void splitByUniqueKeyTest() throws Exception { printLayout(); break; } catch (BaseHttpSolrClient.RemoteSolrException e) { - if (e.code() != 500) { + if (e.code() != 500) { throw e; } log.error("SPLITSHARD failed. {}", (i < 2 ? " Retring split" : ""), e); @@ -784,8 +921,7 @@ private void splitByUniqueKeyTest() throws Exception { checkDocCountsAndShardStates(docCounts, numReplicas, documentIds); } - - public void splitByRouteFieldTest() throws Exception { + public void splitByRouteFieldTest() throws Exception { log.info("Starting testSplitWithRouteField"); String collectionName = "routeFieldColl"; int numShards = 4; @@ -794,10 +930,14 @@ public void splitByRouteFieldTest() throws Exception { HashMap> collectionInfos = new HashMap<>(); String shard_fld = "shard_s"; try (CloudSolrClient client = createCloudClient(null)) { - Map props = Map.of( - REPLICATION_FACTOR, replicationFactor, - CollectionHandlingUtils.NUM_SLICES, numShards, - "router.field", shard_fld); + Map props = + Map.of( + REPLICATION_FACTOR, + replicationFactor, + CollectionHandlingUtils.NUM_SLICES, + numShards, + "router.field", + shard_fld); createCollection(collectionInfos, collectionName, props, client); } @@ -807,19 +947,23 @@ public void splitByRouteFieldTest() throws Exception { waitForRecoveriesToFinish(false); - String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName); + String url = + getUrlFromZk( + getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName); try (HttpSolrClient collectionClient = getHttpSolrClient(url)) { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); final DocRouter router = clusterState.getCollection(collectionName).getRouter(); Slice shard1 = clusterState.getCollection(collectionName).getSlice(SHARD1); - DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); + DocRouter.Range shard1Range = + shard1.getRange() != null ? shard1.getRange() : router.fullRange(); final List ranges = router.partitionRange(2, shard1Range); final int[] docCounts = new int[ranges.size()]; for (int i = 100; i <= 200; i++) { - String shardKey = "" + (char) ('a' + (i % 26)); // See comment in ShardRoutingTest for hash distribution + // See comment in ShardRoutingTest for hash distribution + String shardKey = "" + (char) ('a' + (i % 26)); collectionClient.add(getDoc(id, i, "n_ti", i, shard_fld, shardKey)); int idx = getHashRangeIdx(router, ranges, shardKey); @@ -839,8 +983,18 @@ public void splitByRouteFieldTest() throws Exception { waitForRecoveriesToFinish(collectionName, false); - assertEquals(docCounts[0], collectionClient.query(new SolrQuery("*:*").setParam("shards", "shard1_0")).getResults().getNumFound()); - assertEquals(docCounts[1], collectionClient.query(new SolrQuery("*:*").setParam("shards", "shard1_1")).getResults().getNumFound()); + assertEquals( + docCounts[0], + collectionClient + .query(new SolrQuery("*:*").setParam("shards", "shard1_0")) + .getResults() + .getNumFound()); + assertEquals( + docCounts[1], + collectionClient + .query(new SolrQuery("*:*").setParam("shards", "shard1_1")) + .getResults() + .getNumFound()); } } @@ -853,11 +1007,11 @@ private void splitByRouteKeyTest() throws Exception { HashMap> collectionInfos = new HashMap<>(); try (CloudSolrClient client = createCloudClient(null)) { - Map props = Map.of( - REPLICATION_FACTOR, replicationFactor, - CollectionHandlingUtils.NUM_SLICES, numShards); + Map props = + Map.of( + REPLICATION_FACTOR, replicationFactor, CollectionHandlingUtils.NUM_SLICES, numShards); - createCollection(collectionInfos, collectionName,props,client); + createCollection(collectionInfos, collectionName, props, client); } List list = collectionInfos.get(collectionName); @@ -865,7 +1019,9 @@ private void splitByRouteKeyTest() throws Exception { waitForRecoveriesToFinish(false); - String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName); + String url = + getUrlFromZk( + getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName); try (HttpSolrClient collectionClient = getHttpSolrClient(url)) { @@ -874,23 +1030,26 @@ private void splitByRouteKeyTest() throws Exception { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); final DocRouter router = clusterState.getCollection(collectionName).getRouter(); Slice shard1 = clusterState.getCollection(collectionName).getSlice(SHARD1); - DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); - final List ranges = ((CompositeIdRouter) router).partitionRangeByKey(splitKey, shard1Range); + DocRouter.Range shard1Range = + shard1.getRange() != null ? shard1.getRange() : router.fullRange(); + final List ranges = + ((CompositeIdRouter) router).partitionRangeByKey(splitKey, shard1Range); final int[] docCounts = new int[ranges.size()]; int uniqIdentifier = (1 << 12); int splitKeyDocCount = 0; for (int i = 100; i <= 200; i++) { - String shardKey = "" + (char) ('a' + (i % 26)); // See comment in ShardRoutingTest for hash distribution + // See comment in ShardRoutingTest for hash distribution + String shardKey = "" + (char) ('a' + (i % 26)); String idStr = shardKey + "!" + i; - collectionClient.add(getDoc(id, idStr, "n_ti", (shardKey + "!").equals(splitKey) ? uniqIdentifier : i)); + collectionClient.add( + getDoc(id, idStr, "n_ti", (shardKey + "!").equals(splitKey) ? uniqIdentifier : i)); int idx = getHashRangeIdx(router, ranges, idStr); if (idx != -1) { docCounts[idx]++; } - if (splitKey.equals(shardKey + "!")) - splitKeyDocCount++; + if (splitKey.equals(shardKey + "!")) splitKeyDocCount++; } for (int i = 0; i < docCounts.length; i++) { @@ -905,18 +1064,55 @@ private void splitByRouteKeyTest() throws Exception { waitForRecoveriesToFinish(collectionName, false); SolrQuery solrQuery = new SolrQuery("*:*"); - assertEquals("DocCount on shard1_0 does not match", docCounts[0], collectionClient.query(solrQuery.setParam("shards", "shard1_0")).getResults().getNumFound()); - assertEquals("DocCount on shard1_1 does not match", docCounts[1], collectionClient.query(solrQuery.setParam("shards", "shard1_1")).getResults().getNumFound()); - assertEquals("DocCount on shard1_2 does not match", docCounts[2], collectionClient.query(solrQuery.setParam("shards", "shard1_2")).getResults().getNumFound()); + assertEquals( + "DocCount on shard1_0 does not match", + docCounts[0], + collectionClient + .query(solrQuery.setParam("shards", "shard1_0")) + .getResults() + .getNumFound()); + assertEquals( + "DocCount on shard1_1 does not match", + docCounts[1], + collectionClient + .query(solrQuery.setParam("shards", "shard1_1")) + .getResults() + .getNumFound()); + assertEquals( + "DocCount on shard1_2 does not match", + docCounts[2], + collectionClient + .query(solrQuery.setParam("shards", "shard1_2")) + .getResults() + .getNumFound()); solrQuery = new SolrQuery("n_ti:" + uniqIdentifier); - assertEquals("shard1_0 must have 0 docs for route key: " + splitKey, 0, collectionClient.query(solrQuery.setParam("shards", "shard1_0")).getResults().getNumFound()); - assertEquals("Wrong number of docs on shard1_1 for route key: " + splitKey, splitKeyDocCount, collectionClient.query(solrQuery.setParam("shards", "shard1_1")).getResults().getNumFound()); - assertEquals("shard1_2 must have 0 docs for route key: " + splitKey, 0, collectionClient.query(solrQuery.setParam("shards", "shard1_2")).getResults().getNumFound()); + assertEquals( + "shard1_0 must have 0 docs for route key: " + splitKey, + 0, + collectionClient + .query(solrQuery.setParam("shards", "shard1_0")) + .getResults() + .getNumFound()); + assertEquals( + "Wrong number of docs on shard1_1 for route key: " + splitKey, + splitKeyDocCount, + collectionClient + .query(solrQuery.setParam("shards", "shard1_1")) + .getResults() + .getNumFound()); + assertEquals( + "shard1_2 must have 0 docs for route key: " + splitKey, + 0, + collectionClient + .query(solrQuery.setParam("shards", "shard1_2")) + .getResults() + .getNumFound()); } } - private void trySplit(String collectionName, String splitKey, String shardId, int maxTries) throws SolrServerException, IOException { + private void trySplit(String collectionName, String splitKey, String shardId, int maxTries) + throws SolrServerException, IOException { for (int i = 0; i < maxTries; i++) { try { splitShard(collectionName, shardId, null, splitKey, false); @@ -933,15 +1129,22 @@ private void trySplit(String collectionName, String splitKey, String shardId, in } } - protected void checkDocCountsAndShardStates(int[] docCounts, int numReplicas, Set documentIds) throws Exception { + protected void checkDocCountsAndShardStates( + int[] docCounts, int numReplicas, Set documentIds) throws Exception { ClusterState clusterState = null; Slice slice1_0 = null, slice1_1 = null; int i = 0; for (i = 0; i < 10; i++) { ZkStateReader zkStateReader = cloudClient.getZkStateReader(); clusterState = zkStateReader.getClusterState(); - slice1_0 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice("shard1_0"); - slice1_1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice("shard1_1"); + slice1_0 = + clusterState + .getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION) + .getSlice("shard1_0"); + slice1_1 = + clusterState + .getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION) + .getSlice("shard1_1"); if (slice1_0.getState() == Slice.State.ACTIVE && slice1_1.getState() == Slice.State.ACTIVE) { break; } @@ -954,8 +1157,14 @@ protected void checkDocCountsAndShardStates(int[] docCounts, int numReplicas, Se assertNotNull("Cluster state does not contain shard1_0", slice1_1); assertSame("shard1_0 is not active", Slice.State.ACTIVE, slice1_0.getState()); assertSame("shard1_1 is not active", Slice.State.ACTIVE, slice1_1.getState()); - assertEquals("Wrong number of replicas created for shard1_0", numReplicas, slice1_0.getReplicas().size()); - assertEquals("Wrong number of replicas created for shard1_1", numReplicas, slice1_1.getReplicas().size()); + assertEquals( + "Wrong number of replicas created for shard1_0", + numReplicas, + slice1_0.getReplicas().size()); + assertEquals( + "Wrong number of replicas created for shard1_1", + numReplicas, + slice1_1.getReplicas().size()); commit(); @@ -966,15 +1175,16 @@ protected void checkDocCountsAndShardStates(int[] docCounts, int numReplicas, Se SolrQuery query = new SolrQuery("*:*").setRows(1000).setFields("id", "_version_"); query.set("distrib", false); - ZkCoreNodeProps shard1_0 = getLeaderUrlFromZk(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1_0); + ZkCoreNodeProps shard1_0 = + getLeaderUrlFromZk(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1_0); QueryResponse response; try (HttpSolrClient shard1_0Client = getHttpSolrClient(shard1_0.getCoreUrl())) { response = shard1_0Client.query(query); } long shard10Count = response.getResults().getNumFound(); - ZkCoreNodeProps shard1_1 = getLeaderUrlFromZk( - AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1_1); + ZkCoreNodeProps shard1_1 = + getLeaderUrlFromZk(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1_1); QueryResponse response2; try (HttpSolrClient shard1_1Client = getHttpSolrClient(shard1_1.getCoreUrl())) { response2 = shard1_1Client.query(query); @@ -992,7 +1202,8 @@ protected void checkSubShardConsistency(String shard) throws SolrServerException query.set("distrib", false); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - Slice slice = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(shard); + Slice slice = + clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(shard); long[] numFound = new long[slice.getReplicasMap().size()]; int c = 0; for (Replica replica : slice.getReplicas()) { @@ -1003,31 +1214,42 @@ protected void checkSubShardConsistency(String shard) throws SolrServerException } numFound[c++] = response.getResults().getNumFound(); if (log.isInfoEnabled()) { - log.info("Shard: {} Replica: {} has {} docs", shard, coreUrl, String.valueOf(response.getResults().getNumFound())); + log.info( + "Shard: {} Replica: {} has {} docs", + shard, + coreUrl, + String.valueOf(response.getResults().getNumFound())); } - assertTrue("Shard: " + shard + " Replica: " + coreUrl + " has 0 docs", response.getResults().getNumFound() > 0); + assertTrue( + "Shard: " + shard + " Replica: " + coreUrl + " has 0 docs", + response.getResults().getNumFound() > 0); } for (int i = 0; i < slice.getReplicasMap().size(); i++) { assertEquals(shard + " is not consistent", numFound[0], numFound[i]); } } - protected void splitShard(String collection, String shardId, List subRanges, String splitKey, boolean offline) throws SolrServerException, IOException { + protected void splitShard( + String collection, + String shardId, + List subRanges, + String splitKey, + boolean offline) + throws SolrServerException, IOException { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.SPLITSHARD.toString()); params.set("timing", "true"); params.set("offline", String.valueOf(offline)); params.set("collection", collection); - if (shardId != null) { + if (shardId != null) { params.set("shard", shardId); } - if (subRanges != null) { + if (subRanges != null) { StringBuilder ranges = new StringBuilder(); for (int i = 0; i < subRanges.size(); i++) { DocRouter.Range subRange = subRanges.get(i); ranges.append(subRange.toString()); - if (i < subRanges.size() - 1) - ranges.append(","); + if (i < subRanges.size() - 1) ranges.append(","); } params.set("ranges", ranges.toString()); } @@ -1037,7 +1259,8 @@ protected void splitShard(String collection, String shardId, List ranges, int[] docCounts, String id, int n, Set documentIds) throws Exception { + protected void indexAndUpdateCount( + DocRouter router, + List ranges, + int[] docCounts, + String id, + int n, + Set documentIds) + throws Exception { index("id", id, "n_ti", n); int idx = getHashRangeIdx(router, ranges, id); - if (idx != -1) { + if (idx != -1) { docCounts[idx]++; documentIds.add(String.valueOf(id)); } } - protected void deleteAndUpdateCount(DocRouter router, List ranges, int[] docCounts, String id) throws Exception { + protected void deleteAndUpdateCount( + DocRouter router, List ranges, int[] docCounts, String id) throws Exception { controlClient.deleteById(id); cloudClient.deleteById(id); int idx = getHashRangeIdx(router, ranges, id); - if (idx != -1) { + if (idx != -1) { docCounts[idx]--; } } @@ -1072,17 +1303,22 @@ public static int getHashRangeIdx(DocRouter router, List ranges int hash = 0; if (router instanceof HashBasedRouter) { HashBasedRouter hashBasedRouter = (HashBasedRouter) router; - hash = hashBasedRouter.sliceHash(id, null, null,null); + hash = hashBasedRouter.sliceHash(id, null, null, null); } for (int i = 0; i < ranges.size(); i++) { DocRouter.Range range = ranges.get(i); - if (range.includes(hash)) - return i; + if (range.includes(hash)) return i; } return -1; } - protected void logDebugHelp(int[] docCounts, QueryResponse response, long shard10Count, QueryResponse response2, long shard11Count, Set documentIds) { + protected void logDebugHelp( + int[] docCounts, + QueryResponse response, + long shard10Count, + QueryResponse response2, + long shard11Count, + Set documentIds) { for (int i = 0; i < docCounts.length; i++) { int docCount = docCounts[i]; log.info("Expected docCount for shard1_{} = {}", i, docCount); @@ -1097,10 +1333,15 @@ protected void logDebugHelp(int[] docCounts, QueryResponse response, long shard1 Map shard11Docs = new HashMap<>(); for (int i = 0; i < response.getResults().size(); i++) { SolrDocument document = response.getResults().get(i); - idVsVersion.put(document.getFieldValue("id").toString(), document.getFieldValue("_version_").toString()); + idVsVersion.put( + document.getFieldValue("id").toString(), document.getFieldValue("_version_").toString()); SolrDocument old = shard10Docs.put(document.getFieldValue("id").toString(), document); if (old != null) { - log.error("EXTRA: ID: {} on shard1_0. Old version: {} new version: {}", document.getFieldValue("id"), old.getFieldValue("_version_"), document.getFieldValue("_version_")); + log.error( + "EXTRA: ID: {} on shard1_0. Old version: {} new version: {}", + document.getFieldValue("id"), + old.getFieldValue("_version_"), + document.getFieldValue("_version_")); } found.add(document.getFieldValue("id").toString()); } @@ -1109,17 +1350,24 @@ protected void logDebugHelp(int[] docCounts, QueryResponse response, long shard1 String value = document.getFieldValue("id").toString(); String version = idVsVersion.get(value); if (version != null) { - log.error("DUPLICATE: ID: {}, shard1_0Version {} shard1_1Version: {}", value, version, document.getFieldValue("_version_")); + log.error( + "DUPLICATE: ID: {}, shard1_0Version {} shard1_1Version: {}", + value, + version, + document.getFieldValue("_version_")); } SolrDocument old = shard11Docs.put(document.getFieldValue("id").toString(), document); if (old != null) { - log.error("EXTRA: ID: {} on shard1_1. Old version: {} new version: {}" - ,document.getFieldValue("id"), old.getFieldValue("_version_"), document.getFieldValue("_version_")); + log.error( + "EXTRA: ID: {} on shard1_1. Old version: {} new version: {}", + document.getFieldValue("id"), + old.getFieldValue("_version_"), + document.getFieldValue("_version_")); } found.add(document.getFieldValue("id").toString()); } - if (found.size() < documentIds.size()) { + if (found.size() < documentIds.size()) { documentIds.removeAll(found); log.error("MISSING: ID: {}", documentIds); } else if (found.size() > documentIds.size()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java index 77431ff371e..4a1da186a2c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/SimpleCollectionCreateDeleteTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.cloud.api.collections; +import java.util.Collection; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.AbstractFullDistribZkTestBase; @@ -29,155 +32,205 @@ import org.apache.solr.util.TimeOut; import org.junit.Test; -import java.util.Collection; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - public class SimpleCollectionCreateDeleteTest extends AbstractFullDistribZkTestBase { - public SimpleCollectionCreateDeleteTest() { - sliceCount = 1; - } - - @Test - @ShardsFixed(num = 1) - public void testCreateAndDeleteThenCreateAgain() throws Exception { - String overseerNode = OverseerCollectionConfigSetProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient()); - String notOverseerNode = null; - for (CloudJettyRunner cloudJetty : cloudJettys) { - if (!overseerNode.equals(cloudJetty.nodeName)) { - notOverseerNode = cloudJetty.nodeName; - break; - } - } - String collectionName = "SimpleCollectionCreateDeleteTest"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, 1, 1) - .setCreateNodeSet(overseerNode); - - NamedList request = create.process(cloudClient).getResponse(); - - if (request.get("success") != null) { - assertTrue(getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); - - CollectionAdminRequest.Delete delete = CollectionAdminRequest.deleteCollection(collectionName); - cloudClient.request(delete); - - assertFalse(getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); - - // currently, removing a collection does not wait for cores to be unloaded - TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while (true) { - - if (timeout.hasTimedOut()) { - throw new TimeoutException("Timed out waiting for all collections to be fully removed."); - } - - boolean allContainersEmpty = true; - for (JettySolrRunner jetty : jettys) { - - Collection cores = jetty.getCoreContainer().getCores(); - for (SolrCore core : cores) { - CoreDescriptor cd = core.getCoreDescriptor(); - if (cd != null) { - if (cd.getCloudDescriptor().getCollectionName().equals(collectionName)) { - allContainersEmpty = false; - } - } - } - } - if (allContainersEmpty) { - break; - } - } - - // create collection again on a node other than the overseer leader - create = CollectionAdminRequest.createCollection(collectionName, 1, 1) - .setCreateNodeSet(notOverseerNode); - request = create.process(cloudClient).getResponse(); - assertTrue("Collection creation should not have failed", request.get("success") != null); - } + public SimpleCollectionCreateDeleteTest() { + sliceCount = 1; + } + + @Test + @ShardsFixed(num = 1) + public void testCreateAndDeleteThenCreateAgain() throws Exception { + String overseerNode = + OverseerCollectionConfigSetProcessor.getLeaderNode( + cloudClient.getZkStateReader().getZkClient()); + String notOverseerNode = null; + for (CloudJettyRunner cloudJetty : cloudJettys) { + if (!overseerNode.equals(cloudJetty.nodeName)) { + notOverseerNode = cloudJetty.nodeName; + break; + } } + String collectionName = "SimpleCollectionCreateDeleteTest"; + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, 1, 1) + .setCreateNodeSet(overseerNode); - @Test - @ShardsFixed(num = 1) - public void testDeleteAlsoDeletesAutocreatedConfigSet() throws Exception { - String collectionName = "SimpleCollectionCreateDeleteTest.testDeleteAlsoDeletesAutocreatedConfigSet"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, 1, 1); + NamedList request = create.process(cloudClient).getResponse(); - NamedList request = create.process(cloudClient).getResponse(); + if (request.get("success") != null) { + assertTrue( + getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); - if (request.get("success") != null) { - // collection exists now - assertTrue(cloudClient.getZkStateReader().getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); + CollectionAdminRequest.Delete delete = + CollectionAdminRequest.deleteCollection(collectionName); + cloudClient.request(delete); - String configName = cloudClient.getClusterStateProvider().getCollection(collectionName).getConfigName(); + assertFalse( + getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); - // config for this collection is '.AUTOCREATED', and exists globally - assertTrue(configName.endsWith(".AUTOCREATED")); - assertTrue(getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); + // currently, removing a collection does not wait for cores to be unloaded + TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + while (true) { - CollectionAdminRequest.Delete delete = CollectionAdminRequest.deleteCollection(collectionName); - cloudClient.request(delete); - - // collection has been deleted - assertFalse(getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); - // ... and so has its autocreated config set - assertFalse("The auto-created config set should have been deleted with its collection", getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); + if (timeout.hasTimedOut()) { + throw new TimeoutException("Timed out waiting for all collections to be fully removed."); } - } - @Test - @ShardsFixed(num = 1) - public void testDeleteDoesNotDeleteSharedAutocreatedConfigSet() throws Exception { - String collectionNameInitial = "SimpleCollectionCreateDeleteTest.initialCollection"; - CollectionAdminRequest.Create createInitial = CollectionAdminRequest.createCollection(collectionNameInitial, 1, 1); + boolean allContainersEmpty = true; + for (JettySolrRunner jetty : jettys) { - NamedList requestInitial = createInitial.process(cloudClient).getResponse(); - - if (requestInitial.get("success") != null) { - // collection exists now - assertTrue(getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameInitial, false)); - - String configName = cloudClient.getClusterStateProvider().getCollection(collectionNameInitial).getConfigName(); - - // config for this collection is '.AUTOCREATED', and exists globally - assertTrue(configName.endsWith(".AUTOCREATED")); - assertTrue(getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); - - // create a second collection, sharing the same configSet - String collectionNameWithSharedConfig = "SimpleCollectionCreateDeleteTest.collectionSharingAutocreatedConfigSet"; - CollectionAdminRequest.Create createWithSharedConfig = CollectionAdminRequest.createCollection(collectionNameWithSharedConfig, configName, 1, 1); - - NamedList requestWithSharedConfig = createWithSharedConfig.process(cloudClient).getResponse(); - assertTrue("The collection with shared config set should have been created", requestWithSharedConfig.get("success") != null); - assertTrue("The new collection should exist after a successful creation", getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameWithSharedConfig, false)); - - String configNameOfSecondCollection = cloudClient.getClusterStateProvider().getCollection(collectionNameWithSharedConfig).getConfigName(); - - assertEquals("Both collections should be using the same config", configName, configNameOfSecondCollection); - - // delete the initial collection - the config set should stay, since it is shared with the other collection - CollectionAdminRequest.Delete deleteInitialCollection = CollectionAdminRequest.deleteCollection(collectionNameInitial); - cloudClient.request(deleteInitialCollection); - - // initial collection has been deleted - assertFalse(getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameInitial, false)); - // ... but not its autocreated config set, since it is shared with another collection - assertTrue("The auto-created config set should NOT have been deleted. Another collection is using it.", getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); - - // delete the second collection - the config set should now be deleted, since it is no longer shared any other collection - CollectionAdminRequest.Delete deleteSecondCollection = CollectionAdminRequest.deleteCollection(collectionNameWithSharedConfig); - cloudClient.request(deleteSecondCollection); - - // the collection has been deleted - assertFalse(getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameWithSharedConfig, false)); - // ... and the config set is now also deleted - once it doesn't get referenced by any collection - assertFalse("The auto-created config set should have been deleted now. No collection is referencing it.", getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); + Collection cores = jetty.getCoreContainer().getCores(); + for (SolrCore core : cores) { + CoreDescriptor cd = core.getCoreDescriptor(); + if (cd != null) { + if (cd.getCloudDescriptor().getCollectionName().equals(collectionName)) { + allContainersEmpty = false; + } + } + } } + if (allContainersEmpty) { + break; + } + } + + // create collection again on a node other than the overseer leader + create = + CollectionAdminRequest.createCollection(collectionName, 1, 1) + .setCreateNodeSet(notOverseerNode); + request = create.process(cloudClient).getResponse(); + assertTrue("Collection creation should not have failed", request.get("success") != null); } - - public SolrZkClient getZkClient() { - return cloudClient.getZkStateReader().getZkClient(); + } + + @Test + @ShardsFixed(num = 1) + public void testDeleteAlsoDeletesAutocreatedConfigSet() throws Exception { + String collectionName = + "SimpleCollectionCreateDeleteTest.testDeleteAlsoDeletesAutocreatedConfigSet"; + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, 1, 1); + + NamedList request = create.process(cloudClient).getResponse(); + + if (request.get("success") != null) { + // collection exists now + assertTrue( + cloudClient + .getZkStateReader() + .getZkClient() + .exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); + + String configName = + cloudClient.getClusterStateProvider().getCollection(collectionName).getConfigName(); + + // config for this collection is '.AUTOCREATED', and exists globally + assertTrue(configName.endsWith(".AUTOCREATED")); + assertTrue(getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); + + CollectionAdminRequest.Delete delete = + CollectionAdminRequest.deleteCollection(collectionName); + cloudClient.request(delete); + + // collection has been deleted + assertFalse( + getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionName, false)); + // ... and so has its autocreated config set + assertFalse( + "The auto-created config set should have been deleted with its collection", + getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); + } + } + + @Test + @ShardsFixed(num = 1) + public void testDeleteDoesNotDeleteSharedAutocreatedConfigSet() throws Exception { + String collectionNameInitial = "SimpleCollectionCreateDeleteTest.initialCollection"; + CollectionAdminRequest.Create createInitial = + CollectionAdminRequest.createCollection(collectionNameInitial, 1, 1); + + NamedList requestInitial = createInitial.process(cloudClient).getResponse(); + + if (requestInitial.get("success") != null) { + // collection exists now + assertTrue( + getZkClient() + .exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameInitial, false)); + + String configName = + cloudClient + .getClusterStateProvider() + .getCollection(collectionNameInitial) + .getConfigName(); + + // config for this collection is '.AUTOCREATED', and exists globally + assertTrue(configName.endsWith(".AUTOCREATED")); + assertTrue(getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); + + // create a second collection, sharing the same configSet + String collectionNameWithSharedConfig = + "SimpleCollectionCreateDeleteTest.collectionSharingAutocreatedConfigSet"; + CollectionAdminRequest.Create createWithSharedConfig = + CollectionAdminRequest.createCollection(collectionNameWithSharedConfig, configName, 1, 1); + + NamedList requestWithSharedConfig = + createWithSharedConfig.process(cloudClient).getResponse(); + assertTrue( + "The collection with shared config set should have been created", + requestWithSharedConfig.get("success") != null); + assertTrue( + "The new collection should exist after a successful creation", + getZkClient() + .exists( + ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameWithSharedConfig, false)); + + String configNameOfSecondCollection = + cloudClient + .getClusterStateProvider() + .getCollection(collectionNameWithSharedConfig) + .getConfigName(); + + assertEquals( + "Both collections should be using the same config", + configName, + configNameOfSecondCollection); + + // delete the initial collection - the config set should stay, since it is shared with the + // other collection + CollectionAdminRequest.Delete deleteInitialCollection = + CollectionAdminRequest.deleteCollection(collectionNameInitial); + cloudClient.request(deleteInitialCollection); + + // initial collection has been deleted + assertFalse( + getZkClient() + .exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameInitial, false)); + // ... but not its autocreated config set, since it is shared with another collection + assertTrue( + "The auto-created config set should NOT have been deleted. Another collection is using it.", + getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); + + // delete the second collection - the config set should now be deleted, since it is no longer + // shared any other collection + CollectionAdminRequest.Delete deleteSecondCollection = + CollectionAdminRequest.deleteCollection(collectionNameWithSharedConfig); + cloudClient.request(deleteSecondCollection); + + // the collection has been deleted + assertFalse( + getZkClient() + .exists( + ZkStateReader.COLLECTIONS_ZKNODE + "/" + collectionNameWithSharedConfig, false)); + // ... and the config set is now also deleted - once it doesn't get referenced by any + // collection + assertFalse( + "The auto-created config set should have been deleted now. No collection is referencing it.", + getZkClient().exists(ZkStateReader.CONFIGS_ZKNODE + "/" + configName, true)); } + } + public SolrZkClient getZkClient() { + return cloudClient.getZkStateReader().getZkClient(); + } } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/SplitByPrefixTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/SplitByPrefixTest.java index cd548793e97..8a999a8311d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/SplitByPrefixTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/SplitByPrefixTest.java @@ -23,7 +23,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -40,9 +39,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * This class tests higher level SPLITSHARD functionality when splitByPrefix is specified. - * See SplitHandlerTest for random tests of lower-level split selection logic. +/** + * This class tests higher level SPLITSHARD functionality when splitByPrefix is specified. See + * SplitHandlerTest for random tests of lower-level split selection logic. */ public class SplitByPrefixTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -51,15 +50,13 @@ public class SplitByPrefixTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - System.setProperty("managed.schema.mutable", "true"); // needed by cloud-managed config set + System.setProperty("managed.schema.mutable", "true"); // needed by cloud-managed config set // clould-managed has the copyField from ID to id_prefix // cloud-minimal does not and thus histogram should be driven from the "id" field directly String configSetName = random().nextBoolean() ? "cloud-minimal" : "cloud-managed"; - configureCluster(1) - .addConfig("conf", configset(configSetName)) // cloud-managed has the id copyfield to id_prefix - .configure(); + configureCluster(1).addConfig("conf", configset(configSetName)).configure(); } @Before @@ -86,21 +83,19 @@ public int compareTo(Prefix o) { @Override public String toString() { - return "prefix=" + key + ",range="+range; + return "prefix=" + key + ",range=" + range; } } - /** - * find prefixes (shard keys) matching certain criteria - */ + /** find prefixes (shard keys) matching certain criteria */ public static List findPrefixes(int numToFind, int lowerBound, int upperBound) { CompositeIdRouter router = new CompositeIdRouter(); ArrayList prefixes = new ArrayList<>(); int maxTries = 1000000; int numFound = 0; - for (int i=0; i= lowerBound && lower <= upperBound) { @@ -117,14 +112,12 @@ public static List findPrefixes(int numToFind, int lowerBound, int upper return prefixes; } - /** - * remove duplicate prefixes from the SORTED prefix list - */ + /** remove duplicate prefixes from the SORTED prefix list */ public static List removeDups(List prefixes) { ArrayList result = new ArrayList<>(); Prefix last = null; for (Prefix prefix : prefixes) { - if (last!=null && prefix.range.equals(last.range)) { + if (last != null && prefix.range.equals(last.range)) { continue; } last = prefix; @@ -134,78 +127,93 @@ public static List removeDups(List prefixes) { } // Randomly add a second level prefix to test that - // they are all collapsed to a single bucket. This behavior should change if/when counting support - // for more levels of compositeId values + // they are all collapsed to a single bucket. This behavior should change if/when counting + // support for more levels of compositeId values SolrInputDocument getDoc(String prefix, String unique) { String secondLevel = ""; if (random().nextBoolean()) { - prefix = prefix.substring(0, prefix.length()-1) + "/16!"; // change "foo!" into "foo/16!" to match 2 level compositeId - secondLevel="" + random().nextInt(2) + "!"; + // change "foo!" into "foo/16!" to match 2 level compositeId + prefix = prefix.substring(0, prefix.length() - 1) + "/16!"; + secondLevel = "" + random().nextInt(2) + "!"; } return sdoc("id", prefix + secondLevel + unique); } - @Test public void doTest() throws IOException, SolrServerException { // SPLITSHARD is recommended to be run in async mode, so we default to that. // Also, autoscale triggers use async with splits as well. boolean doAsync = true; - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 1, 1) + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 1, 1) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION_NAME, 1, 1); - CloudSolrClient client = cluster.getSolrClient(); client.setDefaultCollection(COLLECTION_NAME); - // splitting an empty collection by prefix should still work (i.e. fall back to old method of just dividing the hash range + // splitting an empty collection by prefix should still work (i.e. fall back to old method of + // just dividing the hash range - CollectionAdminRequest.SplitShard splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME) - .setNumSubShards(2) - .setSplitByPrefix(true) - .setShardName("shard1"); + CollectionAdminRequest.SplitShard splitShard = + CollectionAdminRequest.splitShard(COLLECTION_NAME) + .setNumSubShards(2) + .setSplitByPrefix(true) + .setShardName("shard1"); if (doAsync) { splitShard.setAsyncId("SPLIT1"); } splitShard.process(client); - waitForState("Timed out waiting for sub shards to be active.", - COLLECTION_NAME, activeClusterShape(2, 3)); // expectedReplicas==3 because original replica still exists (just inactive) - + // expectedReplicas==3 because original replica still exists (just inactive) + waitForState( + "Timed out waiting for sub shards to be active.", + COLLECTION_NAME, + activeClusterShape(2, 3)); List prefixes = findPrefixes(20, 0, 0x00ffffff); List uniquePrefixes = removeDups(prefixes); - if (uniquePrefixes.size() % 2 == 1) { // make it an even sized list so we can split it exactly in two - uniquePrefixes.remove(uniquePrefixes.size()-1); + // make it an even sized list so we can split it exactly in two + if (uniquePrefixes.size() % 2 == 1) { + uniquePrefixes.remove(uniquePrefixes.size() - 1); } log.info("Unique prefixes: {}", uniquePrefixes); for (Prefix prefix : uniquePrefixes) { - client.add( getDoc(prefix.key, "doc1") ); - client.add( getDoc(prefix.key, "doc2") ); + client.add(getDoc(prefix.key, "doc1")); + client.add(getDoc(prefix.key, "doc2")); } client.commit(); - - splitShard = CollectionAdminRequest.splitShard(COLLECTION_NAME) - .setSplitByPrefix(true) - .setShardName("shard1_1"); // should start out with the range of 0-7fffffff + splitShard = + CollectionAdminRequest.splitShard(COLLECTION_NAME) + .setSplitByPrefix(true) + .setShardName("shard1_1"); // should start out with the range of 0-7fffffff if (doAsync) { splitShard.setAsyncId("SPLIT2"); } splitShard.process(client); - waitForState("Timed out waiting for sub shards to be active.", - COLLECTION_NAME, activeClusterShape(3, 5)); + waitForState( + "Timed out waiting for sub shards to be active.", + COLLECTION_NAME, + activeClusterShape(3, 5)); // OK, now let's check that the correct split point was chosen - // We can use the router to find the shards for the middle prefixes and they should be different. - - DocCollection collection = client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); - Collection slices1 = collection.getRouter().getSearchSlicesSingle(uniquePrefixes.get(uniquePrefixes.size()/2 - 1).key, null, collection); - Collection slices2 = collection.getRouter().getSearchSlicesSingle(uniquePrefixes.get(uniquePrefixes.size()/2 ).key, null, collection); + // We can use the router to find the shards for the middle prefixes and they should be + // different. + + DocCollection collection = + client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME); + Collection slices1 = + collection + .getRouter() + .getSearchSlicesSingle( + uniquePrefixes.get(uniquePrefixes.size() / 2 - 1).key, null, collection); + Collection slices2 = + collection + .getRouter() + .getSearchSlicesSingle( + uniquePrefixes.get(uniquePrefixes.size() / 2).key, null, collection); Slice slice1 = slices1.iterator().next(); Slice slice2 = slices2.iterator().next(); @@ -213,28 +221,32 @@ public void doTest() throws IOException, SolrServerException { assertTrue(slices1.size() == 1 && slices2.size() == 1); assertTrue(slice1 != slice2); - // // now lets add enough documents to the first prefix to get it split out on its own // - for (int i=0; i rsp = CollectionAdminRequest.getClusterStatus().setCollectionName(COLLECTION_NAME) - .process(client).getResponse(); + NamedList rsp = + CollectionAdminRequest.getClusterStatus() + .setCollectionName(COLLECTION_NAME) + .process(client) + .getResponse(); NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); NamedList collections = (NamedList) cluster.get("collections"); @@ -136,8 +143,11 @@ private void testModifyCollection() throws Exception { client.request(request); - rsp = CollectionAdminRequest.getClusterStatus().setCollectionName(COLLECTION_NAME) - .process(client).getResponse(); + rsp = + CollectionAdminRequest.getClusterStatus() + .setCollectionName(COLLECTION_NAME) + .process(client) + .getResponse(); System.out.println(rsp); cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); @@ -165,7 +175,7 @@ private void testModifyCollection() throws Exception { private void testReplicationFactorValidaton() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { - //Test that you can't specify both replicationFactor and nrtReplicas + // Test that you can't specify both replicationFactor and nrtReplicas ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CREATE.toString()); params.set("name", "test_repFactorColl"); @@ -180,17 +190,20 @@ private void testReplicationFactorValidaton() throws Exception { fail(); } catch (BaseHttpSolrClient.RemoteSolrException e) { final String errorMessage = e.getMessage(); - assertTrue(errorMessage.contains("Cannot specify both replicationFactor and nrtReplicas as they mean the same thing")); + assertTrue( + errorMessage.contains( + "Cannot specify both replicationFactor and nrtReplicas as they mean the same thing")); } - //Create it again correctly - CollectionAdminRequest.Create req = CollectionAdminRequest.createCollection("test_repFactorColl", "conf1", 1, 3, 0, 0); + // Create it again correctly + CollectionAdminRequest.Create req = + CollectionAdminRequest.createCollection("test_repFactorColl", "conf1", 1, 3, 0, 0); client.request(req); waitForCollection(cloudClient.getZkStateReader(), "test_repFactorColl", 1); waitForRecoveriesToFinish("test_repFactorColl", false); - //Assert that replicationFactor has also been set to 3 + // Assert that replicationFactor has also been set to 3 assertCountsForRepFactorAndNrtReplicas(client, "test_repFactorColl"); params = new ModifiableSolrParams(); @@ -211,7 +224,8 @@ private void testNoConfigset() throws Exception { final String collection = "deleted_collection"; try (CloudSolrClient client = createCloudClient(null)) { - copyConfigUp(TEST_PATH().resolve("configsets"), "cloud-minimal", configSet, client.getZkHost()); + copyConfigUp( + TEST_PATH().resolve("configsets"), "cloud-minimal", configSet, client.getZkHost()); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CREATE.toString()); @@ -238,11 +252,13 @@ private void testNoConfigset() throws Exception { assertNotNull("Cluster state should not be null", cluster); NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); - assertNotNull("Testing to insure collections are returned", collections.get(COLLECTION_NAME1)); + assertNotNull( + "Testing to insure collections are returned", collections.get(COLLECTION_NAME1)); } } - private void deleteThemAll(SolrZkClient zkClient, String node) throws KeeperException, InterruptedException { + private void deleteThemAll(SolrZkClient zkClient, String node) + throws KeeperException, InterruptedException { List kids = zkClient.getChildren(node, null, true); for (String kid : kids) { deleteThemAll(zkClient, node + "/" + kid); @@ -250,7 +266,8 @@ private void deleteThemAll(SolrZkClient zkClient, String node) throws KeeperExce zkClient.delete(node, -1, true); } - private void assertCountsForRepFactorAndNrtReplicas(CloudSolrClient client, String collectionName) throws Exception { + private void assertCountsForRepFactorAndNrtReplicas(CloudSolrClient client, String collectionName) + throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString()); params.set("collection", collectionName); @@ -294,13 +311,14 @@ private void clusterStatusWithCollectionAndShard() throws IOException, SolrServe @SuppressWarnings({"unchecked"}) Map selectedShardStatus = (Map) shardStatus.get(SHARD1); assertNotNull(selectedShardStatus); - } } - private void clusterStatusWithCollectionAndMultipleShards() throws IOException, SolrServerException { + private void clusterStatusWithCollectionAndMultipleShards() + throws IOException, SolrServerException { try (CloudSolrClient client = createCloudClient(null)) { - final CollectionAdminRequest.ClusterStatus request = new CollectionAdminRequest.ClusterStatus(); + final CollectionAdminRequest.ClusterStatus request = + new CollectionAdminRequest.ClusterStatus(); request.setCollectionName(COLLECTION_NAME); request.setShardName(SHARD1 + "," + SHARD2); @@ -328,12 +346,15 @@ private void clusterStatusWithCollectionAndMultipleShards() throws IOException, @SuppressWarnings({"unchecked"}) private void clusterStatusWithCollectionHealthState() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { - final CollectionAdminRequest.ClusterStatus request = new CollectionAdminRequest.ClusterStatus(); + final CollectionAdminRequest.ClusterStatus request = + new CollectionAdminRequest.ClusterStatus(); request.setCollectionName(COLLECTION_NAME); NamedList rsp = request.process(client).getResponse(); NamedList cluster = (NamedList) rsp.get("cluster"); assertNotNull("Cluster state should not be null", cluster); - Map collection = (Map) Utils.getObjectByPath(cluster, false, "collections/" + COLLECTION_NAME); + Map collection = + (Map) + Utils.getObjectByPath(cluster, false, "collections/" + COLLECTION_NAME); assertEquals("collection health", "GREEN", collection.get("health")); Map shardStatus = (Map) collection.get("shards"); assertEquals(2, shardStatus.size()); @@ -347,32 +368,51 @@ private void clusterStatusWithCollectionHealthState() throws Exception { String nodeName = jetty.getNodeName(); jetty.stop(); ZkStateReader zkStateReader = client.getZkStateReader(); - zkStateReader.waitForState(COLLECTION_NAME, 30, TimeUnit.SECONDS, (liveNodes, docCollection) -> - docCollection != null && - docCollection.getReplicas().stream() - .anyMatch(r -> r.getState().equals(Replica.State.DOWN))); + zkStateReader.waitForState( + COLLECTION_NAME, + 30, + TimeUnit.SECONDS, + (liveNodes, docCollection) -> + docCollection != null + && docCollection.getReplicas().stream() + .anyMatch(r -> r.getState().equals(Replica.State.DOWN))); rsp = request.process(client).getResponse(); - collection = (Map) Utils.getObjectByPath(rsp, false, "cluster/collections/" + COLLECTION_NAME); + collection = + (Map) + Utils.getObjectByPath(rsp, false, "cluster/collections/" + COLLECTION_NAME); assertNotEquals("collection health should not be GREEN", "GREEN", collection.get("health")); shardStatus = (Map) collection.get("shards"); assertEquals(2, shardStatus.size()); String health1 = (String) Utils.getObjectByPath(shardStatus, false, "shard1/health"); String health2 = (String) Utils.getObjectByPath(shardStatus, false, "shard2/health"); - assertTrue("shard1=" + health1 + ", shard2=" + health2, !"GREEN".equals(health1) || !"GREEN".equals(health2)); + assertTrue( + "shard1=" + health1 + ", shard2=" + health2, + !"GREEN".equals(health1) || !"GREEN".equals(health2)); // bring them up again jetty.start(); - // Need to start a new client, in case the http connections in the old client are still cached to the restarted server. + // Need to start a new client, in case the http connections in the old client are still cached + // to the restarted server. // If this is the case, it will throw an HTTP Exception, and we don't retry Admin requests. try (CloudSolrClient newClient = createCloudClient(null)) { - newClient.getZkStateReader().waitForLiveNodes(30, TimeUnit.SECONDS, (o, n) -> n != null && n.contains(nodeName)); - newClient.getZkStateReader().waitForState(COLLECTION_NAME, 30, TimeUnit.SECONDS, (liveNodes, coll) -> - coll != null && - coll.getReplicas().stream() - .allMatch(r -> r.getState().equals(Replica.State.ACTIVE))); + newClient + .getZkStateReader() + .waitForLiveNodes(30, TimeUnit.SECONDS, (o, n) -> n != null && n.contains(nodeName)); + newClient + .getZkStateReader() + .waitForState( + COLLECTION_NAME, + 30, + TimeUnit.SECONDS, + (liveNodes, coll) -> + coll != null + && coll.getReplicas().stream() + .allMatch(r -> r.getState().equals(Replica.State.ACTIVE))); rsp = request.process(newClient).getResponse(); - collection = (Map) Utils.getObjectByPath(rsp, false, "cluster/collections/" + COLLECTION_NAME); + collection = + (Map) + Utils.getObjectByPath(rsp, false, "cluster/collections/" + COLLECTION_NAME); assertEquals("collection health", "GREEN", collection.get("health")); shardStatus = (Map) collection.get("shards"); assertEquals(2, shardStatus.size()); @@ -384,7 +424,6 @@ private void clusterStatusWithCollectionHealthState() throws Exception { } } - private void listCollection() throws IOException, SolrServerException { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); @@ -394,12 +433,14 @@ private void listCollection() throws IOException, SolrServerException { NamedList rsp = client.request(request); List collections = (List) rsp.get("collections"); - assertTrue("control_collection was not found in list", collections.contains("control_collection")); - assertTrue(DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION)); + assertTrue( + "control_collection was not found in list", collections.contains("control_collection")); + assertTrue( + DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION)); assertTrue(COLLECTION_NAME + " was not found in list", collections.contains(COLLECTION_NAME)); - assertTrue(COLLECTION_NAME1 + " was not found in list", collections.contains(COLLECTION_NAME1)); + assertTrue( + COLLECTION_NAME1 + " was not found in list", collections.contains(COLLECTION_NAME1)); } - } private void clusterStatusNoCollection() throws Exception { @@ -422,7 +463,6 @@ private void clusterStatusNoCollection() throws Exception { assertNotNull("Live nodes should not be null", liveNodes); assertFalse(liveNodes.isEmpty()); } - } private void clusterStatusWithCollection() throws IOException, SolrServerException { @@ -443,7 +483,7 @@ private void clusterStatusWithCollection() throws IOException, SolrServerExcepti Map collection = (Map) collections.get(COLLECTION_NAME); assertNotNull(collection); assertEquals("conf1", collection.get("configName")); -// assertEquals("1", collection.get("nrtReplicas")); + // assertEquals("1", collection.get("nrtReplicas")); } } @@ -472,7 +512,8 @@ private void clusterStatusZNodeVersion() throws Exception { Integer znodeVersion = (Integer) collection.get("znodeVersion"); assertNotNull(znodeVersion); - CollectionAdminRequest.AddReplica addReplica = CollectionAdminRequest.addReplicaToShard(cname, "shard1"); + CollectionAdminRequest.AddReplica addReplica = + CollectionAdminRequest.addReplicaToShard(cname, "shard1"); addReplica.process(client); waitForRecoveriesToFinish(cname, true); @@ -539,7 +580,7 @@ private void clusterStatusWithRouteKey() throws IOException, SolrServerException } @SuppressWarnings({"unchecked"}) - private void clusterStatusAliasTest() throws Exception { + private void clusterStatusAliasTest() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { // create an alias named myalias ModifiableSolrParams params = new ModifiableSolrParams(); @@ -564,8 +605,10 @@ private void clusterStatusAliasTest() throws Exception { assertNotNull("Cluster state should not be null", cluster); Map aliases = (Map) cluster.get("aliases"); assertNotNull("Aliases should not be null", aliases); - assertEquals("Alias: myalias not found in cluster status", - DEFAULT_COLLECTION + "," + COLLECTION_NAME, aliases.get("myalias")); + assertEquals( + "Alias: myalias not found in cluster status", + DEFAULT_COLLECTION + "," + COLLECTION_NAME, + aliases.get("myalias")); NamedList collections = (NamedList) cluster.get("collections"); assertNotNull("Collections should not be null in cluster state", collections); @@ -602,14 +645,15 @@ private void clusterStatusAliasTest() throws Exception { // SOLR-12938 - this should still cause an exception try { client.request(request); - fail("requesting status for 'notAnAliasOrCollection' should cause an exception from CLUSTERSTATUS" ); + fail( + "requesting status for 'notAnAliasOrCollection' should cause an exception from CLUSTERSTATUS"); } catch (RuntimeException e) { // success } } } - private void clusterStatusRolesTest() throws Exception { + private void clusterStatusRolesTest() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { client.connect(); Replica replica = client.getZkStateReader().getLeaderRetry(DEFAULT_COLLECTION, SHARD1); @@ -653,7 +697,7 @@ private void clusterStatusBadCollectionTest() throws Exception { client.request(request); fail("Collection does not exist. An exception should be thrown"); } catch (SolrException e) { - //expected + // expected assertTrue(e.getMessage().contains("Collection: bad_collection_name not found")); } } @@ -662,7 +706,8 @@ private void clusterStatusBadCollectionTest() throws Exception { private void replicaPropTest() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { client.connect(); - Map slices = client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlicesMap(); + Map slices = + client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlicesMap(); List sliceList = new ArrayList<>(slices.keySet()); String c1_s1 = sliceList.get(0); List replicasList = new ArrayList<>(slices.get(c1_s1).getReplicasMap().keySet()); @@ -673,7 +718,12 @@ private void replicaPropTest() throws Exception { replicasList = new ArrayList<>(slices.get(c1_s2).getReplicasMap().keySet()); String c1_s2_r1 = replicasList.get(0); - slices = client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME1).getSlicesMap(); + slices = + client + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME1) + .getSlicesMap(); sliceList = new ArrayList<>(slices.keySet()); String c2_s1 = sliceList.get(0); replicasList = new ArrayList<>(slices.get(c2_s1).getReplicasMap().keySet()); @@ -703,37 +753,58 @@ private void replicaPropTest() throws Exception { verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "preferredleader", "true"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r2, - "property", "preferredLeader", - "property.value", "true"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r2, + "property", + "preferredLeader", + "property.value", + "true"); // The preferred leader property for shard1 should have switched to the other replica. verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME, - "shard", c1_s2, - "replica", c1_s2_r1, - "property", "preferredLeader", - "property.value", "true"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME, + "shard", + c1_s2, + "replica", + c1_s2_r1, + "property", + "preferredLeader", + "property.value", + "true"); // Now we should have a preferred leader in both shards... verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME1, - "shard", c2_s1, - "replica", c2_s1_r1, - "property", "preferredLeader", - "property.value", "true"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME1, + "shard", + c2_s1, + "replica", + c2_s1_r1, + "property", + "preferredLeader", + "property.value", + "true"); // Now we should have three preferred leaders. verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); @@ -742,12 +813,18 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toString(), - "collection", COLLECTION_NAME1, - "shard", c2_s1, - "replica", c2_s1_r1, - "property", "preferredLeader"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.DELETEREPLICAPROP.toString(), + "collection", + COLLECTION_NAME1, + "shard", + c2_s1, + "replica", + c2_s1_r1, + "property", + "preferredLeader"); // Now we should have two preferred leaders. // But first we have to wait for the overseer to finish the action @@ -757,13 +834,20 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); // Try adding an arbitrary property to one that has the leader property - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "testprop", - "property.value", "true"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "testprop", + "property.value", + "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true"); @@ -771,13 +855,20 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r2, - "property", "prop", - "property.value", "silly"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r2, + "property", + "prop", + "property.value", + "silly"); verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true"); @@ -786,14 +877,22 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "testprop", - "property.value", "nonsense", - CollectionHandlingUtils.SHARD_UNIQUE, "true"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "testprop", + "property.value", + "nonsense", + CollectionHandlingUtils.SHARD_UNIQUE, + "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true"); @@ -802,15 +901,22 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "property.testprop", - "property.value", "true", - CollectionHandlingUtils.SHARD_UNIQUE, "false"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "property.testprop", + "property.value", + "true", + CollectionHandlingUtils.SHARD_UNIQUE, + "false"); verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true"); @@ -819,12 +925,18 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "property.testprop"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "property.testprop"); verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "preferredleader", "true"); @@ -834,18 +946,29 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); try { - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "preferredLeader", - "property.value", "true", - CollectionHandlingUtils.SHARD_UNIQUE, "false"); - fail("Should have thrown an exception, setting shardUnique=false is not allowed for 'preferredLeader'."); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "preferredLeader", + "property.value", + "true", + CollectionHandlingUtils.SHARD_UNIQUE, + "false"); + fail( + "Should have thrown an exception, setting shardUnique=false is not allowed for 'preferredLeader'."); } catch (SolrException se) { - assertTrue("Should have received a specific error message", - se.getMessage().contains("with the shardUnique parameter set to something other than 'true'")); + assertTrue( + "Should have received a specific error message", + se.getMessage() + .contains("with the shardUnique parameter set to something other than 'true'")); } verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "preferredleader", "true"); @@ -855,40 +978,68 @@ private void replicaPropTest() throws Exception { verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "preferredLeader"); verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "preferredLeader"); - Map origProps = getProps(client, COLLECTION_NAME, c1_s1_r1, - "state", "core", "node_name", "base_url"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "state", - "property.value", "state_bad"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "core", - "property.value", "core_bad"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "node_name", - "property.value", "node_name_bad"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "base_url", - "property.value", "base_url_bad"); + Map origProps = + getProps(client, COLLECTION_NAME, c1_s1_r1, "state", "core", "node_name", "base_url"); + + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "state", + "property.value", + "state_bad"); + + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "core", + "property.value", + "core_bad"); + + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "node_name", + "property.value", + "node_name_bad"); + + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "base_url", + "property.value", + "base_url_bad"); // The above should be on new proeprties. verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "state", "state_bad"); @@ -896,33 +1047,57 @@ private void replicaPropTest() throws Exception { verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "node_name", "node_name_bad"); verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "base_url", "base_url_bad"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "state"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "core"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "node_name"); - - doPropertyAction(client, - "action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "base_url"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "state"); + + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "core"); + + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "node_name"); + + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "base_url"); // They better not have been changed! for (Map.Entry ent : origProps.entrySet()) { @@ -933,7 +1108,6 @@ private void replicaPropTest() throws Exception { verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "core"); verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "node_name"); verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "base_url"); - } } @@ -956,7 +1130,7 @@ private void testCollectionCreationCollectionNameValidation() throws Exception { } } } - + private void testCollectionCreationShardNameValidation() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); @@ -979,8 +1153,8 @@ private void testCollectionCreationShardNameValidation() throws Exception { } } } - - private void testAliasCreationNameValidation() throws Exception{ + + private void testAliasCreationNameValidation() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.CREATEALIAS.toString()); @@ -1035,7 +1209,8 @@ private void testShardCreationNameValidation() throws Exception { } // Expects the map will have keys, but blank values. - private Map getProps(CloudSolrClient client, String collectionName, String replicaName, String... props) + private Map getProps( + CloudSolrClient client, String collectionName, String replicaName, String... props) throws KeeperException, InterruptedException { client.getZkStateReader().forceUpdateCollection(collectionName); @@ -1051,6 +1226,7 @@ private Map getProps(CloudSolrClient client, String collectionNa } return propMap; } + private void missingParamsError(CloudSolrClient client, ModifiableSolrParams origParams) throws IOException, SolrServerException { @@ -1061,36 +1237,48 @@ private void missingParamsError(CloudSolrClient client, ModifiableSolrParams ori client.request(request); fail("Should have thrown a SolrException due to lack of a required parameter."); } catch (SolrException se) { - assertTrue("Should have gotten a specific message back mentioning 'missing required parameter'. Got: " + se.getMessage(), + assertTrue( + "Should have gotten a specific message back mentioning 'missing required parameter'. Got: " + + se.getMessage(), se.getMessage().toLowerCase(Locale.ROOT).contains("missing required parameter:")); } } /** - * After a failed attempt to create a collection (due to bad configs), assert that - * the collection can be created with a good collection. + * After a failed attempt to create a collection (due to bad configs), assert that the collection + * can be created with a good collection. */ @Test @ShardsFixed(num = 2) public void testRecreateCollectionAfterFailure() throws Exception { // Upload a bad configset - SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), ZkTestServer.TIMEOUT, - ZkTestServer.TIMEOUT, null); - ZkTestServer.putConfig("badconf", zkClient, "/solr", ZkTestServer.SOLRHOME, "bad-error-solrconfig.xml", "solrconfig.xml"); - ZkTestServer.putConfig("badconf", zkClient, "/solr", ZkTestServer.SOLRHOME, "schema-minimal.xml", "schema.xml"); + SolrZkClient zkClient = + new SolrZkClient(zkServer.getZkHost(), ZkTestServer.TIMEOUT, ZkTestServer.TIMEOUT, null); + ZkTestServer.putConfig( + "badconf", + zkClient, + "/solr", + ZkTestServer.SOLRHOME, + "bad-error-solrconfig.xml", + "solrconfig.xml"); + ZkTestServer.putConfig( + "badconf", zkClient, "/solr", ZkTestServer.SOLRHOME, "schema-minimal.xml", "schema.xml"); zkClient.close(); try (CloudSolrClient client = createCloudClient(null)) { // first, try creating a collection with badconf - BaseHttpSolrClient.RemoteSolrException rse = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - CollectionAdminRequest.createCollection - ("testcollection", "badconf", 1, 2).process(client); - }); + BaseHttpSolrClient.RemoteSolrException rse = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + CollectionAdminRequest.createCollection("testcollection", "badconf", 1, 2) + .process(client); + }); assertNotNull(rse.getMessage()); assertNotSame(0, rse.code()); - CollectionAdminResponse rsp = CollectionAdminRequest.createCollection - ("testcollection", "conf1", 1, 2).process(client); + CollectionAdminResponse rsp = + CollectionAdminRequest.createCollection("testcollection", "conf1", 1, 2).process(client); assertNull(rsp.getErrorMessages()); assertSame(0, rsp.getStatus()); } diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionsAPIViaSolrCloudCluster.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionsAPIViaSolrCloudCluster.java index 13bbcda005c..f9bae8dfdef 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionsAPIViaSolrCloudCluster.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionsAPIViaSolrCloudCluster.java @@ -26,7 +26,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -46,9 +45,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Test of the Collections API with the MiniSolrCloudCluster. - */ +/** Test of the Collections API with the MiniSolrCloudCluster. */ @LuceneTestCase.Slow public class TestCollectionsAPIViaSolrCloudCluster extends SolrCloudTestCase { @@ -58,7 +55,8 @@ public class TestCollectionsAPIViaSolrCloudCluster extends SolrCloudTestCase { private static final int numReplicas = 2; private static final int nodeCount = 5; private static final String configName = "solrCloudCollectionConfig"; - private static final Map collectionProperties // ensure indexes survive core shutdown + private static final Map + collectionProperties // ensure indexes survive core shutdown = Collections.singletonMap("solr.directoryFactory", "solr.StandardDirectoryFactory"); @Override @@ -66,7 +64,7 @@ public void setUp() throws Exception { configureCluster(nodeCount).addConfig(configName, configset("cloud-minimal")).configure(); super.setUp(); } - + @Override public void tearDown() throws Exception { cluster.shutdown(); @@ -79,16 +77,15 @@ private void createCollection(String collectionName, String createNodeSet) throw .setCreateNodeSet(createNodeSet) .setProperties(collectionProperties) .processAndWait(cluster.getSolrClient(), 30); - } - else { + } else { CollectionAdminRequest.createCollection(collectionName, configName, numShards, numReplicas) .setCreateNodeSet(createNodeSet) .setProperties(collectionProperties) .process(cluster.getSolrClient()); - } - - if (createNodeSet != null && createNodeSet.equals(CollectionHandlingUtils.CREATE_NODE_SET_EMPTY)) { + + if (createNodeSet != null + && createNodeSet.equals(CollectionHandlingUtils.CREATE_NODE_SET_EMPTY)) { cluster.waitForActiveCollection(collectionName, numShards, 0); } else { cluster.waitForActiveCollection(collectionName, numShards, numShards * numReplicas); @@ -109,9 +106,9 @@ public void testCollectionCreateSearchDelete() throws Exception { // shut down a server JettySolrRunner stoppedServer = cluster.stopJettySolrRunner(0); - + cluster.waitForJettyToStop(stoppedServer); - + assertTrue(stoppedServer.isStopped()); assertEquals(nodeCount - 1, cluster.getJettySolrRunners().size()); @@ -133,9 +130,13 @@ public void testCollectionCreateSearchDelete() throws Exception { ZkStateReader zkStateReader = client.getZkStateReader(); zkStateReader.forceUpdateCollection(collectionName); ClusterState clusterState = zkStateReader.getClusterState(); - Map jettyMap = new HashMap<>(); + Map jettyMap = new HashMap<>(); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { - String key = jetty.getBaseUrl().toString().substring((jetty.getBaseUrl().getProtocol() + "://").length()); + String key = + jetty + .getBaseUrl() + .toString() + .substring((jetty.getBaseUrl().getProtocol() + "://").length()); jettyMap.put(key, jetty); } Collection slices = clusterState.getCollection(collectionName).getSlices(); @@ -163,12 +164,12 @@ public void testCollectionCreateSearchDelete() throws Exception { assertEquals(nodeCount, cluster.getJettySolrRunners().size()); CollectionAdminRequest.deleteCollection(collectionName).process(client); - AbstractDistribZkTestBase.waitForCollectionToDisappear - (collectionName, client.getZkStateReader(), true, 330); + AbstractDistribZkTestBase.waitForCollectionToDisappear( + collectionName, client.getZkStateReader(), true, 330); // create it again createCollection(collectionName, null); - + cluster.waitForActiveCollection(collectionName, numShards, numShards * numReplicas); // check that there's no left-over state @@ -194,16 +195,17 @@ public void testCollectionCreateWithoutCoresThenDelete() throws Exception { // check the collection's corelessness int coreCount = 0; - DocCollection docCollection = client.getZkStateReader().getClusterState().getCollection(collectionName); - for (Map.Entry entry : docCollection.getSlicesMap().entrySet()) { + DocCollection docCollection = + client.getZkStateReader().getClusterState().getCollection(collectionName); + for (Map.Entry entry : docCollection.getSlicesMap().entrySet()) { coreCount += entry.getValue().getReplicasMap().entrySet().size(); } assertEquals(0, coreCount); // delete the collection CollectionAdminRequest.deleteCollection(collectionName).process(client); - AbstractDistribZkTestBase.waitForCollectionToDisappear - (collectionName, client.getZkStateReader(), true, 330); + AbstractDistribZkTestBase.waitForCollectionToDisappear( + collectionName, client.getZkStateReader(), true, 330); } @Test @@ -230,9 +232,9 @@ public void testStopAllStartAll() throws Exception { // modify collection final int numDocs = 1 + random().nextInt(10); for (int ii = 1; ii <= numDocs; ++ii) { - doc.setField("id", ""+ii); + doc.setField("id", "" + ii); client.add(collectionName, doc); - if (ii*2 == numDocs) client.commit(collectionName); + if (ii * 2 == numDocs) client.commit(collectionName); } client.commit(collectionName); @@ -246,7 +248,7 @@ public void testStopAllStartAll() throws Exception { final Set leaderIndices = new HashSet<>(); final Set followerIndices = new HashSet<>(); { - final Map shardLeaderMap = new HashMap<>(); + final Map shardLeaderMap = new HashMap<>(); for (final Slice slice : clusterState.getCollection(collectionName).getSlices()) { for (final Replica replica : slice.getReplicas()) { shardLeaderMap.put(replica.getNodeName().replace("_solr", "/solr"), Boolean.FALSE); @@ -255,7 +257,8 @@ public void testStopAllStartAll() throws Exception { } for (int ii = 0; ii < jettys.size(); ++ii) { final URL jettyBaseUrl = jettys.get(ii).getBaseUrl(); - final String jettyBaseUrlString = jettyBaseUrl.toString().substring((jettyBaseUrl.getProtocol() + "://").length()); + final String jettyBaseUrlString = + jettyBaseUrl.toString().substring((jettyBaseUrl.getProtocol() + "://").length()); final Boolean isLeader = shardLeaderMap.get(jettyBaseUrlString); if (Boolean.TRUE.equals(isLeader)) { leaderIndices.add(ii); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java index 3c52362f7ee..66d2c5c0d2e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestLocalFSCloudBackupRestore.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.io.OutputStream; import java.net.URI; - import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -36,35 +35,50 @@ import org.junit.Test; /** - * This class implements the tests for local file-system integration for Solr backup/restore capability. Note that the - * Solr backup/restore still requires a "shared" file-system. Its just that in this case such file-system would be - * exposed via local file-system API. + * This class implements the tests for local file-system integration for Solr backup/restore + * capability. Note that the Solr backup/restore still requires a "shared" file-system. Its just + * that in this case such file-system would be exposed via local file-system API. */ -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs({"SimpleText"}) public class TestLocalFSCloudBackupRestore extends AbstractCloudBackupRestoreTestCase { private static String backupLocation; @BeforeClass public static void setupClass() throws Exception { String solrXml = MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML; - String poisioned = - " \n" + - " \n"; - String local = + String poisioned = + " \n" + + " \n"; + String local = " \n" + - " \n"; - solrXml = solrXml.replace("", - "" + (random().nextBoolean() ? poisioned+local: local+poisioned) - + ""+ ""); - - configureCluster(NUM_SHARDS)// nodes - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .addConfig("confFaulty", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + + "class=\"org.apache.solr.core.backup.repository.LocalFileSystemRepository\"> \n" + + " \n"; + solrXml = + solrXml.replace( + "", + "" + + (random().nextBoolean() ? poisioned + local : local + poisioned) + + "" + + ""); + + configureCluster(NUM_SHARDS) // nodes + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "confFaulty", + TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .withSolrXml(solrXml) .configure(); - cluster.getZkClient().delete(ZkConfigSetService.CONFIGS_ZKNODE + "/" + "confFaulty" + "/" + "solrconfig.xml", -1, true); + cluster + .getZkClient() + .delete( + ZkConfigSetService.CONFIGS_ZKNODE + "/" + "confFaulty" + "/" + "solrconfig.xml", + -1, + true); boolean whitespacesInPath = random().nextBoolean(); if (whitespacesInPath) { @@ -90,22 +104,22 @@ public String getBackupLocation() { } @Override - @Test + @Test public void test() throws Exception { super.test(); - + CloudSolrClient solrClient = cluster.getSolrClient(); errorBackup(solrClient); - + erroRestore(solrClient); } private void erroRestore(CloudSolrClient solrClient) throws SolrServerException, IOException { String backupName = BACKUPNAME_PREFIX + testSuffix; - CollectionAdminRequest.Restore restore = CollectionAdminRequest.restoreCollection(getCollectionName()+"boo", backupName) - .setLocation(backupLocation) - ; + CollectionAdminRequest.Restore restore = + CollectionAdminRequest.restoreCollection(getCollectionName() + "boo", backupName) + .setLocation(backupLocation); if (random().nextBoolean()) { restore.setRepositoryName(poisioned); } @@ -118,14 +132,14 @@ private void erroRestore(CloudSolrClient solrClient) throws SolrServerException, } } - private void errorBackup(CloudSolrClient solrClient) - throws SolrServerException, IOException { - CollectionAdminRequest.Backup backup = CollectionAdminRequest.backupCollection(getCollectionName(), "poisionedbackup") - .setLocation(getBackupLocation()); + private void errorBackup(CloudSolrClient solrClient) throws SolrServerException, IOException { + CollectionAdminRequest.Backup backup = + CollectionAdminRequest.backupCollection(getCollectionName(), "poisionedbackup") + .setLocation(getBackupLocation()); if (random().nextBoolean()) { backup.setRepositoryName(poisioned); } // otherwise we hit default - + try { backup.process(solrClient); fail("This request should have failed since omitting repo, picks up default poisioned."); @@ -133,26 +147,31 @@ private void errorBackup(CloudSolrClient solrClient) assertEquals(ErrorCode.SERVER_ERROR.code, ex.code()); } } - + private static final String poisioned = "poisioned"; - // let it go through collection handler, and break only when real thing is doing: Restore/BackupCore + // let it go through collection handler, and break only when real thing is doing: + // Restore/BackupCore public static class PoinsionedRepository extends LocalFileSystemRepository { - + public PoinsionedRepository() { super(); } + @Override public void copyFileFrom(Directory sourceDir, String fileName, URI dest) throws IOException { throw new UnsupportedOperationException(poisioned); } + @Override public void copyFileTo(URI sourceDir, String fileName, Directory dest) throws IOException { throw new UnsupportedOperationException(poisioned); } + @Override public IndexInput openInput(URI dirPath, String fileName, IOContext ctx) throws IOException { throw new UnsupportedOperationException(poisioned); } + @Override public OutputStream createOutput(URI path) throws IOException { throw new UnsupportedOperationException(poisioned); diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java index 4e906b89cd0..936268cc19e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestReplicaProperties.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -41,7 +40,7 @@ public class TestReplicaProperties extends ReplicaPropertiesBase { public static final String COLLECTION_NAME = "testcollection"; public TestReplicaProperties() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id sliceCount = 2; } @@ -50,8 +49,8 @@ public TestReplicaProperties() { public void test() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { - // Mix up a bunch of different combinations of shards and replicas in order to exercise boundary cases. - // shards, replicationfactor + // Mix up a bunch of different combinations of shards and replicas in order to exercise + // boundary cases. shards, replicationfactor int shards = random().nextInt(7); if (shards < 2) shards = 2; int rFactor = random().nextInt(4); @@ -78,112 +77,172 @@ private void listCollection() throws IOException, SolrServerException { NamedList rsp = client.request(request); @SuppressWarnings({"unchecked"}) List collections = (List) rsp.get("collections"); - assertTrue("control_collection was not found in list", collections.contains("control_collection")); - assertTrue(DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION)); + assertTrue( + "control_collection was not found in list", collections.contains("control_collection")); + assertTrue( + DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION)); assertTrue(COLLECTION_NAME + " was not found in list", collections.contains(COLLECTION_NAME)); } } - private void clusterAssignPropertyTest() throws Exception { try (CloudSolrClient client = createCloudClient(null)) { client.connect(); - SolrException se = assertThrows(SolrException.class, () -> - doPropertyAction(client, - "action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), - "property", "preferredLeader")); - assertTrue("Should have seen missing required parameter 'collection' error", + SolrException se = + assertThrows( + SolrException.class, + () -> + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), + "property", + "preferredLeader")); + assertTrue( + "Should have seen missing required parameter 'collection' error", se.getMessage().contains("Missing required parameter: collection")); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), - "collection", COLLECTION_NAME, - "property", "preferredLeader"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), + "collection", + COLLECTION_NAME, + "property", + "preferredLeader"); verifyUniqueAcrossCollection(client, COLLECTION_NAME, "preferredleader"); - doPropertyAction(client, - "action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), - "collection", COLLECTION_NAME, - "property", "property.newunique", - "shardUnique", "true"); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), + "collection", + COLLECTION_NAME, + "property", + "property.newunique", + "shardUnique", + "true"); verifyUniqueAcrossCollection(client, COLLECTION_NAME, "property.newunique"); - se = assertThrows(SolrException.class, () -> - doPropertyAction(client, - "action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), - "collection", COLLECTION_NAME, - "property", "whatever", - "shardUnique", "false")); - assertTrue("Should have gotten a specific error message here", - se.getMessage().contains("Balancing properties amongst replicas in a slice requires that the " + - "property be pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'")); + se = + assertThrows( + SolrException.class, + () -> + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), + "collection", + COLLECTION_NAME, + "property", + "whatever", + "shardUnique", + "false")); + assertTrue( + "Should have gotten a specific error message here", + se.getMessage() + .contains( + "Balancing properties amongst replicas in a slice requires that the " + + "property be pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'")); // Should be able to set non-unique-per-slice values in several places. - Map slices = client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlicesMap(); + Map slices = + client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlicesMap(); List sliceList = new ArrayList<>(slices.keySet()); String c1_s1 = sliceList.get(0); List replicasList = new ArrayList<>(slices.get(c1_s1).getReplicasMap().keySet()); String c1_s1_r1 = replicasList.get(0); String c1_s1_r2 = replicasList.get(1); - addProperty(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r1, - "property", "bogus1", - "property.value", "true"); - - addProperty(client, - "action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), - "collection", COLLECTION_NAME, - "shard", c1_s1, - "replica", c1_s1_r2, - "property", "property.bogus1", - "property.value", "whatever"); - - se = assertThrows(SolrException.class, () -> - doPropertyAction(client, - "action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), - "collection", COLLECTION_NAME, - "property", "bogus1", - "shardUnique", "false")); - assertTrue("Should have caught specific exception ", - se.getMessage().contains("Balancing properties amongst replicas in a slice requires that the property be " + - "pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'")); + addProperty( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r1, + "property", + "bogus1", + "property.value", + "true"); + + addProperty( + client, + "action", + CollectionParams.CollectionAction.ADDREPLICAPROP.toString(), + "collection", + COLLECTION_NAME, + "shard", + c1_s1, + "replica", + c1_s1_r2, + "property", + "property.bogus1", + "property.value", + "whatever"); + + se = + assertThrows( + SolrException.class, + () -> + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), + "collection", + COLLECTION_NAME, + "property", + "bogus1", + "shardUnique", + "false")); + assertTrue( + "Should have caught specific exception ", + se.getMessage() + .contains( + "Balancing properties amongst replicas in a slice requires that the property be " + + "pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'")); // Should have no effect despite the "shardUnique" param being set. - doPropertyAction(client, - "action", CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), - "collection", COLLECTION_NAME, - "property", "property.bogus1", - "shardUnique", "true"); - - verifyPropertyVal(client, COLLECTION_NAME, - c1_s1_r1, "bogus1", "true"); - verifyPropertyVal(client, COLLECTION_NAME, - c1_s1_r2, "property.bogus1", "whatever"); - - // At this point we've assigned a preferred leader. Make it happen and check that all the nodes that are - // leaders _also_ have the preferredLeader property set. - - - NamedList res = doPropertyAction(client, - "action", CollectionParams.CollectionAction.REBALANCELEADERS.toString(), - "collection", COLLECTION_NAME); + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.BALANCESHARDUNIQUE.toString(), + "collection", + COLLECTION_NAME, + "property", + "property.bogus1", + "shardUnique", + "true"); + + verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "bogus1", "true"); + verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.bogus1", "whatever"); + + // At this point we've assigned a preferred leader. Make it happen and check that all the + // nodes that are leaders _also_ have the preferredLeader property set. + + NamedList res = + doPropertyAction( + client, + "action", + CollectionParams.CollectionAction.REBALANCELEADERS.toString(), + "collection", + COLLECTION_NAME); verifyLeaderAssignment(client, COLLECTION_NAME); - } } private void verifyLeaderAssignment(CloudSolrClient client, String collectionName) throws InterruptedException, KeeperException { String lastFailMsg = ""; - for (int idx = 0; idx < 300; ++idx) { // Keep trying while Overseer writes the ZK state for up to 30 seconds. + // Keep trying while Overseer writes the ZK state for up to 30 seconds. + for (int idx = 0; idx < 300; ++idx) { lastFailMsg = ""; ClusterState clusterState = client.getZkStateReader().getClusterState(); for (Slice slice : clusterState.getCollection(collectionName).getSlices()) { @@ -193,16 +252,25 @@ private void verifyLeaderAssignment(CloudSolrClient client, String collectionNam boolean isLeader = replica.getBool("leader", false); boolean isPreferred = replica.getBool("property.preferredleader", false); if (isLeader != isPreferred) { - lastFailMsg = "Replica should NOT have preferredLeader != leader. Preferred: " + isPreferred + - " leader is " + isLeader; + lastFailMsg = + "Replica should NOT have preferredLeader != leader. Preferred: " + + isPreferred + + " leader is " + + isLeader; } if (foundLeader && isLeader) { - lastFailMsg = "There should only be a single leader in _any_ shard! Replica " + replica.getName() + - " is the second leader in slice " + slice.getName(); + lastFailMsg = + "There should only be a single leader in _any_ shard! Replica " + + replica.getName() + + " is the second leader in slice " + + slice.getName(); } if (foundPreferred && isPreferred) { - lastFailMsg = "There should only be a single preferredLeader in _any_ shard! Replica " + replica.getName() + - " is the second preferredLeader in slice " + slice.getName(); + lastFailMsg = + "There should only be a single preferredLeader in _any_ shard! Replica " + + replica.getName() + + " is the second preferredLeader in slice " + + slice.getName(); } foundLeader = foundLeader ? foundLeader : isLeader; foundPreferred = foundPreferred ? foundPreferred : isPreferred; @@ -214,8 +282,11 @@ private void verifyLeaderAssignment(CloudSolrClient client, String collectionNam fail(lastFailMsg); } - private void addProperty(CloudSolrClient client, String... paramsIn) throws IOException, SolrServerException { - assertTrue("paramsIn must be an even multiple of 2, it is: " + paramsIn.length, (paramsIn.length % 2) == 0); + private void addProperty(CloudSolrClient client, String... paramsIn) + throws IOException, SolrServerException { + assertTrue( + "paramsIn must be an even multiple of 2, it is: " + paramsIn.length, + (paramsIn.length % 2) == 0); ModifiableSolrParams params = new ModifiableSolrParams(); for (int idx = 0; idx < paramsIn.length; idx += 2) { params.set(paramsIn[idx], paramsIn[idx + 1]); @@ -223,7 +294,5 @@ private void addProperty(CloudSolrClient client, String... paramsIn) throws IOEx QueryRequest request = new QueryRequest(params); request.setPath("/admin/collections"); client.request(request); - } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java index 03de7b95a94..8c0ab26d078 100644 --- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestRequestStatusCollectionAPI.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Map; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.QueryRequest; @@ -36,7 +35,7 @@ public class TestRequestStatusCollectionAPI extends BasicDistributedZkTest { public static final int MAX_WAIT_TIMEOUT_SECONDS = 90; public TestRequestStatusCollectionAPI() { - schemaString = "schema15.xml"; // we need a string id + schemaString = "schema15.xml"; // we need a string id } @Test @@ -68,18 +67,20 @@ public void test() throws Exception { params.set("action", CollectionParams.CollectionAction.REQUESTSTATUS.toString()); params.set(CollectionHandlingUtils.REQUESTID, "1000"); - NamedList createResponse =null; + NamedList createResponse = null; try { createResponse = sendStatusRequestWithRetry(params, MAX_WAIT_TIMEOUT_SECONDS); - message = (String) createResponse.findRecursive("status","msg"); + message = (String) createResponse.findRecursive("status", "msg"); } catch (SolrServerException | IOException e) { e.printStackTrace(); } - assertEquals("found [1000] in completed tasks", message); - assertEquals("expecting "+numShards+" shard responses at "+createResponse, - numShards, numResponsesCompleted(createResponse)); - + assertEquals("found [1000] in completed tasks", message); + assertEquals( + "expecting " + numShards + " shard responses at " + createResponse, + numShards, + numResponsesCompleted(createResponse)); + // Check for a random (hopefully non-existent request id params = new ModifiableSolrParams(); params.set(CollectionParams.ACTION, CollectionParams.CollectionAction.REQUESTSTATUS.toString()); @@ -109,18 +110,20 @@ public void test() throws Exception { params = new ModifiableSolrParams(); params.set("action", CollectionParams.CollectionAction.REQUESTSTATUS.toString()); params.set(CollectionHandlingUtils.REQUESTID, "1001"); - NamedList splitResponse=null; + NamedList splitResponse = null; try { splitResponse = sendStatusRequestWithRetry(params, MAX_WAIT_TIMEOUT_SECONDS); - message = (String) splitResponse.findRecursive("status","msg"); + message = (String) splitResponse.findRecursive("status", "msg"); } catch (SolrServerException | IOException e) { e.printStackTrace(); } assertEquals("found [1001] in completed tasks", message); - // create * 2 + preprecovery *2 + split + req_apply_upd * 2 =7 - assertEquals("expecting "+(2+2+1+2)+" shard responses at "+splitResponse, - (2+2+1+2), numResponsesCompleted(splitResponse)); + // create * 2 + preprecovery *2 + split + req_apply_upd * 2 =7 + assertEquals( + "expecting " + (2 + 2 + 1 + 2) + " shard responses at " + splitResponse, + (2 + 2 + 1 + 2), + numResponsesCompleted(splitResponse)); params = new ModifiableSolrParams(); params.set(CollectionParams.ACTION, CollectionParams.CollectionAction.CREATE.toString()); @@ -142,7 +145,7 @@ public void test() throws Exception { try { NamedList response = sendStatusRequestWithRetry(params, MAX_WAIT_TIMEOUT_SECONDS); - message = (String) response.findRecursive("status","msg"); + message = (String) response.findRecursive("status", "msg"); } catch (SolrServerException | IOException e) { e.printStackTrace(); } @@ -167,14 +170,14 @@ public void test() throws Exception { @SuppressWarnings("unchecked") private int numResponsesCompleted(NamedList response) { - int sum=0; - for (String key: Arrays.asList("success","failure")) { - NamedList allStatuses = (NamedList)response.get(key); - if (allStatuses!=null) { - for (Map.Entry tuple: allStatuses) { + int sum = 0; + for (String key : Arrays.asList("success", "failure")) { + NamedList allStatuses = (NamedList) response.get(key); + if (allStatuses != null) { + for (Map.Entry tuple : allStatuses) { NamedList statusResponse = (NamedList) tuple.getValue(); - if (statusResponse.indexOf("STATUS",0)>=0) { - sum+=1; + if (statusResponse.indexOf("STATUS", 0) >= 0) { + sum += 1; } } } @@ -183,11 +186,11 @@ private int numResponsesCompleted(NamedList response) { } /** - * Helper method to send a status request with specific retry limit and return - * the message/null from the success response. + * Helper method to send a status request with specific retry limit and return the message/null + * from the success response. */ private NamedList sendStatusRequestWithRetry(ModifiableSolrParams params, int maxCounter) - throws SolrServerException, IOException{ + throws SolrServerException, IOException { NamedList r = null; while (maxCounter-- > 0) { r = sendRequest(params); @@ -203,22 +206,22 @@ private NamedList sendStatusRequestWithRetry(ModifiableSolrParams params Thread.sleep(1000); } catch (InterruptedException e) { } - } // Return last state? return r; } - protected NamedList sendRequest(ModifiableSolrParams params) throws SolrServerException, IOException { + protected NamedList sendRequest(ModifiableSolrParams params) + throws SolrServerException, IOException { QueryRequest request = new QueryRequest(params); request.setPath("/admin/collections"); - String baseUrl = ((HttpSolrClient) shardToJetty.get(SHARD1).get(0).client.getSolrClient()).getBaseURL(); + String baseUrl = + ((HttpSolrClient) shardToJetty.get(SHARD1).get(0).client.getSolrClient()).getBaseURL(); baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length()); try (HttpSolrClient baseServer = getHttpSolrClient(baseUrl, 15000)) { return baseServer.request(request); } - } } diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java b/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java index 7933d5b8be6..f160b813dab 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/TestClusterStateMutator.java @@ -16,9 +16,10 @@ */ package org.apache.solr.cloud.overseer; +import static org.mockito.Mockito.*; + import java.util.Collections; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.cloud.DistribStateManager; import org.apache.solr.client.solrj.cloud.SolrCloudManager; @@ -29,39 +30,43 @@ import org.apache.solr.common.cloud.ZkNodeProps; import org.junit.BeforeClass; -import static org.mockito.Mockito.*; - public class TestClusterStateMutator extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() { assumeWorkingMockito(); } - + public void testCreateCollection() throws Exception { - ClusterState clusterState = new ClusterState(Collections.emptySet(), Collections.emptyMap()); + ClusterState clusterState = + new ClusterState( + Collections.emptySet(), Collections.emptyMap()); DistribStateManager mockStateManager = mock(DistribStateManager.class); SolrCloudManager dataProvider = mock(SolrCloudManager.class); when(dataProvider.getDistribStateManager()).thenReturn(mockStateManager); ClusterStateMutator mutator = new ClusterStateMutator(dataProvider); - ZkNodeProps message = new ZkNodeProps(Map.of( - "name", "xyz", - "numShards", "1" - )); + ZkNodeProps message = + new ZkNodeProps( + Map.of( + "name", "xyz", + "numShards", "1")); ZkWriteCommand cmd = mutator.createCollection(clusterState, message); DocCollection collection = cmd.collection; assertEquals("xyz", collection.getName()); assertEquals(1, collection.getSlicesMap().size()); - ClusterState state = new ClusterState(Collections.emptySet(), Collections.singletonMap("xyz", collection)); - message = new ZkNodeProps(Map.of( - "name", "abc", - "numShards", "2", - "router.name", "implicit", - "shards", "x,y", - "replicationFactor", "3" - )); + ClusterState state = + new ClusterState( + Collections.emptySet(), Collections.singletonMap("xyz", collection)); + message = + new ZkNodeProps( + Map.of( + "name", "abc", + "numShards", "2", + "router.name", "implicit", + "shards", "x,y", + "replicationFactor", "3")); cmd = mutator.createCollection(state, message); collection = cmd.collection; assertEquals("abc", collection.getName()); @@ -76,4 +81,3 @@ public void testCreateCollection() throws Exception { assertNotNull(state.getCollectionOrNull("xyz")); // we still have the old collection } } - diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java index 604aec578fb..b8fffd07464 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkCollectionPropsCachingTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -37,18 +36,17 @@ @SolrTestCaseJ4.SuppressSSL public class ZkCollectionPropsCachingTest extends SolrCloudTestCase { // - // NOTE: This class can only have one test because our test for caching is to nuke the SolrZkClient to - // verify that a cached load is going to hit the cache, not try to talk to zk. Any other ZK related test - // method in this class will fail if it runs after testReadWriteCached, so don't add one! :) + // NOTE: This class can only have one test because our test for caching is to nuke the + // SolrZkClient to verify that a cached load is going to hit the cache, not try to talk to zk. Any + // other ZK related test method in this class will fail if it runs after testReadWriteCached, so + // don't add one! :) // private String collectionName; private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass public static void setupClass() throws Exception { - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -58,7 +56,8 @@ public void setUp() throws Exception { collectionName = "CollectionPropsTest" + System.nanoTime(); - CollectionAdminRequest.Create request = CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2); + CollectionAdminRequest.Create request = + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 2); CollectionAdminResponse response = request.process(cluster.getSolrClient()); assertTrue("Unable to create collection: " + response.toString(), response.isSuccess()); } @@ -70,27 +69,31 @@ public void testReadWriteCached() throws InterruptedException, IOException { CollectionProperties collectionProps = new CollectionProperties(zkClient()); collectionProps.setCollectionProperty(collectionName, "property1", "value1"); - checkValue("property1", "value1"); //Should be no cache, so the change should take effect immediately + // Should be no cache, so the change should take effect immediately + checkValue("property1", "value1"); - zkStateReader.getCollectionProperties(collectionName,9000); + zkStateReader.getCollectionProperties(collectionName, 9000); zkStateReader.getZkClient().close(); assertFalse(zkStateReader.isClosed()); - checkValue("property1", "value1"); //Should be cached, so the change should not try to hit zk + checkValue("property1", "value1"); // Should be cached, so the change should not try to hit zk Thread.sleep(10000); // test the timeout feature try { - checkValue("property1", "value1"); //Should not be cached anymore - fail("cache should have expired, prev line should throw an exception trying to access zookeeper after closed"); + checkValue("property1", "value1"); // Should not be cached anymore + fail( + "cache should have expired, prev line should throw an exception trying to access zookeeper after closed"); } catch (Exception e) { // expected, because we killed the client in zkStateReader. } } private void checkValue(String propertyName, String expectedValue) throws InterruptedException { - final Object value = cluster.getSolrClient().getZkStateReader().getCollectionProperties(collectionName).get(propertyName); + final Object value = + cluster + .getSolrClient() + .getZkStateReader() + .getCollectionProperties(collectionName) + .get(propertyName); assertEquals("Unexpected value for collection property: " + propertyName, expectedValue, value); } - - - } diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java index c73f1528bd9..639c7538a46 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateReaderTest.java @@ -21,7 +21,6 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.IOUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.OverseerTest; @@ -41,7 +40,7 @@ public class ZkStateReaderTest extends SolrTestCaseJ4 { private static final long TIMEOUT = 30; - public void testExternalCollectionWatchedNotWatched() throws Exception{ + public void testExternalCollectionWatchedNotWatched() throws Exception { Path zkDir = createTempDir("testExternalCollectionWatchedNotWatched"); ZkTestServer server = new ZkTestServer(zkDir); SolrZkClient zkClient = null; @@ -61,8 +60,15 @@ public void testExternalCollectionWatchedNotWatched() throws Exception{ zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); // create new collection - ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), DocRouter.DEFAULT, 0)); + ZkWriteCommand c1 = + new ZkWriteCommand( + "c1", + new DocCollection( + "c1", + new HashMap<>(), + Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), + DocRouter.DEFAULT, + 0)); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); writer.writePendingUpdates(); reader.forceUpdateCollection("c1"); @@ -79,7 +85,7 @@ public void testExternalCollectionWatchedNotWatched() throws Exception{ } } - public void testCollectionStateWatcherCaching() throws Exception { + public void testCollectionStateWatcherCaching() throws Exception { Path zkDir = createTempDir("testCollectionStateWatcherCaching"); ZkTestServer server = new ZkTestServer(zkDir); @@ -99,12 +105,19 @@ public void testCollectionStateWatcherCaching() throws Exception { zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); - DocCollection state = new DocCollection("c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), DocRouter.DEFAULT, 0); + DocCollection state = + new DocCollection( + "c1", + new HashMap<>(), + Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), + DocRouter.DEFAULT, + 0); ZkWriteCommand wc = new ZkWriteCommand("c1", state); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(wc), null); writer.writePendingUpdates(); assertTrue(zkClient.exists(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", true)); - reader.waitForState("c1", 1, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState != null); + reader.waitForState( + "c1", 1, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState != null); Map props = new HashMap<>(); props.put("x", "y"); @@ -116,7 +129,7 @@ public void testCollectionStateWatcherCaching() throws Exception { boolean found = false; TimeOut timeOut = new TimeOut(5, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while (!timeOut.hasTimedOut()) { + while (!timeOut.hasTimedOut()) { DocCollection c1 = reader.getClusterState().getCollection("c1"); if ("y".equals(c1.getStr("x"))) { found = true; @@ -159,16 +172,21 @@ public void testWatchedCollectionCreation() throws Exception { ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); - // create new collection - DocCollection state = new DocCollection("c1", new HashMap<>(), Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), DocRouter.DEFAULT, 0); + DocCollection state = + new DocCollection( + "c1", + new HashMap<>(), + Map.of(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), + DocRouter.DEFAULT, + 0); ZkWriteCommand wc = new ZkWriteCommand("c1", state); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(wc), null); writer.writePendingUpdates(); assertTrue(zkClient.exists(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", true)); - //reader.forceUpdateCollection("c1"); + // reader.forceUpdateCollection("c1"); reader.waitForState("c1", TIMEOUT, TimeUnit.SECONDS, (n, c) -> c != null); ClusterState.CollectionRef ref = reader.getClusterState().getCollectionRef("c1"); assertNotNull(ref); @@ -176,7 +194,6 @@ public void testWatchedCollectionCreation() throws Exception { } finally { IOUtils.close(reader, zkClient); server.shutdown(); - } } } diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java index 6773c6b9c0b..2d4b39f7d02 100644 --- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java @@ -22,7 +22,6 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; - import org.apache.lucene.util.IOUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.Overseer; @@ -47,7 +46,8 @@ public class ZkStateWriterTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final ZkStateWriter.ZkWriteCallback FAIL_ON_WRITE = () -> fail("Got unexpected flush"); + private static final ZkStateWriter.ZkWriteCallback FAIL_ON_WRITE = + () -> fail("Got unexpected flush"); @BeforeClass public static void setup() { @@ -81,24 +81,39 @@ public void testZkStateWriterBatching() throws Exception { zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c2", true); zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c3", true); - Map props = Collections.singletonMap(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME); - ZkWriteCommand c1 = new ZkWriteCommand("c1", new DocCollection("c1", new HashMap<>(), props, DocRouter.DEFAULT, 0)); - ZkWriteCommand c2 = new ZkWriteCommand("c2", new DocCollection("c2", new HashMap<>(), props, DocRouter.DEFAULT, 0)); - ZkWriteCommand c3 = new ZkWriteCommand("c3", new DocCollection("c3", new HashMap<>(), props, DocRouter.DEFAULT, 0)); + Map props = + Collections.singletonMap( + ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME); + ZkWriteCommand c1 = + new ZkWriteCommand( + "c1", new DocCollection("c1", new HashMap<>(), props, DocRouter.DEFAULT, 0)); + ZkWriteCommand c2 = + new ZkWriteCommand( + "c2", new DocCollection("c2", new HashMap<>(), props, DocRouter.DEFAULT, 0)); + ZkWriteCommand c3 = + new ZkWriteCommand( + "c3", new DocCollection("c3", new HashMap<>(), props, DocRouter.DEFAULT, 0)); ZkStateWriter writer = new ZkStateWriter(reader, new Stats()); // First write is flushed immediately - ClusterState clusterState = writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); - clusterState = writer.enqueueUpdate(clusterState, Collections.singletonList(c1), FAIL_ON_WRITE); - clusterState = writer.enqueueUpdate(clusterState, Collections.singletonList(c2), FAIL_ON_WRITE); + ClusterState clusterState = + writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); + clusterState = + writer.enqueueUpdate(clusterState, Collections.singletonList(c1), FAIL_ON_WRITE); + clusterState = + writer.enqueueUpdate(clusterState, Collections.singletonList(c2), FAIL_ON_WRITE); Thread.sleep(Overseer.STATE_UPDATE_DELAY + 100); AtomicBoolean didWrite = new AtomicBoolean(false); - clusterState = writer.enqueueUpdate(clusterState, Collections.singletonList(c3), () -> didWrite.set(true)); + clusterState = + writer.enqueueUpdate( + clusterState, Collections.singletonList(c3), () -> didWrite.set(true)); assertTrue("Exceed the update delay, should be flushed", didWrite.get()); for (int i = 0; i <= Overseer.STATE_UPDATE_BATCH_SIZE; i++) { - clusterState = writer.enqueueUpdate(clusterState, Collections.singletonList(c3), () -> didWrite.set(true)); + clusterState = + writer.enqueueUpdate( + clusterState, Collections.singletonList(c3), () -> didWrite.set(true)); } assertTrue("Exceed the update batch size, should be flushed", didWrite.get()); } @@ -130,13 +145,25 @@ public void testSingleExternalCollection() throws Exception { zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true); // create new collection - ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap(), Collections.singletonMap(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), DocRouter.DEFAULT, 0)); + ZkWriteCommand c1 = + new ZkWriteCommand( + "c1", + new DocCollection( + "c1", + new HashMap(), + Collections.singletonMap( + ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), + DocRouter.DEFAULT, + 0)); writer.enqueueUpdate(reader.getClusterState(), Collections.singletonList(c1), null); writer.writePendingUpdates(); - Map map = (Map) Utils.fromJSON(zkClient.getData(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", null, null, true)); + Map map = + (Map) + Utils.fromJSON( + zkClient.getData( + ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", null, null, true)); assertNotNull(map.get("c1")); } } finally { @@ -169,8 +196,16 @@ public void testExternalModification() throws Exception { ClusterState state = reader.getClusterState(); // create collection 2 - ZkWriteCommand c2 = new ZkWriteCommand("c2", - new DocCollection("c2", new HashMap(), Collections.singletonMap(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), DocRouter.DEFAULT, 0)); + ZkWriteCommand c2 = + new ZkWriteCommand( + "c2", + new DocCollection( + "c2", + new HashMap(), + Collections.singletonMap( + ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), + DocRouter.DEFAULT, + 0)); state = writer.enqueueUpdate(state, Collections.singletonList(c2), null); assertFalse(writer.hasPendingUpdates()); // first write is flushed immediately @@ -195,9 +230,17 @@ public void testExternalModification() throws Exception { state = reader.getClusterState(); // Will trigger flush - Thread.sleep(Overseer.STATE_UPDATE_DELAY+100); - ZkWriteCommand c1 = new ZkWriteCommand("c1", - new DocCollection("c1", new HashMap(), Collections.singletonMap(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), DocRouter.DEFAULT, 0)); + Thread.sleep(Overseer.STATE_UPDATE_DELAY + 100); + ZkWriteCommand c1 = + new ZkWriteCommand( + "c1", + new DocCollection( + "c1", + new HashMap(), + Collections.singletonMap( + ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), + DocRouter.DEFAULT, + 0)); try { writer.enqueueUpdate(state, Collections.singletonList(c1), null); diff --git a/solr/core/src/test/org/apache/solr/cluster/events/AllEventsListener.java b/solr/core/src/test/org/apache/solr/cluster/events/AllEventsListener.java index 873a0d56e0f..bac220400a5 100644 --- a/solr/core/src/test/org/apache/solr/cluster/events/AllEventsListener.java +++ b/solr/core/src/test/org/apache/solr/cluster/events/AllEventsListener.java @@ -17,8 +17,6 @@ package org.apache.solr.cluster.events; -import org.junit.Assert; - import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; @@ -26,10 +24,9 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import org.junit.Assert; -/** - * - */ +/** */ public class AllEventsListener implements ClusterEventListener { CountDownLatch eventLatch = new CountDownLatch(1); ClusterEvent.EventType expectedType; @@ -55,7 +52,5 @@ public void waitForExpectedEvent(int timeoutSeconds) throws InterruptedException } } - public void close() throws IOException { - - } + public void close() throws IOException {} } diff --git a/solr/core/src/test/org/apache/solr/cluster/events/ClusterEventProducerTest.java b/solr/core/src/test/org/apache/solr/cluster/events/ClusterEventProducerTest.java index a96e2093bd2..7a1dbc07fe2 100644 --- a/solr/core/src/test/org/apache/solr/cluster/events/ClusterEventProducerTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/events/ClusterEventProducerTest.java @@ -17,7 +17,21 @@ package org.apache.solr.cluster.events; +import static java.util.Collections.singletonMap; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; + import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.time.Instant; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; +import java.util.concurrent.TimeUnit; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.V2Request; @@ -37,25 +51,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.time.Instant; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Phaser; -import java.util.concurrent.TimeUnit; - -import static java.util.Collections.singletonMap; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; - - -@ThreadLeakLingering(linger = 0)/** - * - */ +@ThreadLeakLingering(linger = 0) +/** */ @LogLevel("org.apache.solr.cluster.events=DEBUG") public class ClusterEventProducerTest extends SolrCloudTestCase { private AllEventsListener eventsListener; @@ -64,20 +61,27 @@ public class ClusterEventProducerTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(3) - .addConfig("conf", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } @Before - public void setUp() throws Exception { + public void setUp() throws Exception { System.setProperty("enable.packages", "true"); super.setUp(); cluster.deleteAllCollections(); eventsListener = new AllEventsListener(); - cluster.getOpenOverseer().getCoreContainer().getClusterEventProducer().registerListener(eventsListener); - ClusterEventProducer clusterEventProducer = cluster.getOpenOverseer().getCoreContainer().getClusterEventProducer(); - assertTrue("not a delegating producer? " + clusterEventProducer.getClass(), - clusterEventProducer instanceof DelegatingClusterEventProducer); + cluster + .getOpenOverseer() + .getCoreContainer() + .getClusterEventProducer() + .registerListener(eventsListener); + ClusterEventProducer clusterEventProducer = + cluster.getOpenOverseer().getCoreContainer().getClusterEventProducer(); + assertTrue( + "not a delegating producer? " + clusterEventProducer.getClass(), + clusterEventProducer instanceof DelegatingClusterEventProducer); DelegatingClusterEventProducer wrapper = (DelegatingClusterEventProducer) clusterEventProducer; phaser = new Phaser(); wrapper.setDelegationPhaser(phaser); @@ -87,19 +91,22 @@ public void setUp() throws Exception { public void teardown() throws Exception { System.clearProperty("enable.packages"); if (eventsListener != null) { - cluster.getOpenOverseer().getCoreContainer().getClusterEventProducer().unregisterListener(eventsListener); + cluster + .getOpenOverseer() + .getCoreContainer() + .getClusterEventProducer() + .unregisterListener(eventsListener); eventsListener.events.clear(); } - V2Request readPluginState = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .withMethod(GET) - .build(); + V2Request readPluginState = + new V2Request.Builder("/cluster/plugin").forceV2(true).withMethod(GET).build(); V2Response rsp = readPluginState.process(cluster.getSolrClient()); if (rsp._getStr("/plugin/" + ClusterEventProducer.PLUGIN_NAME + "/class", null) != null) { - V2Request req = new V2Request.Builder("/cluster/plugin") - .withMethod(POST) - .withPayload(Collections.singletonMap("remove", ClusterEventProducer.PLUGIN_NAME)) - .build(); + V2Request req = + new V2Request.Builder("/cluster/plugin") + .withMethod(POST) + .withPayload(Collections.singletonMap("remove", ClusterEventProducer.PLUGIN_NAME)) + .build(); req.process(cluster.getSolrClient()); } } @@ -111,10 +118,11 @@ public void testEvents() throws Exception { PluginMeta plugin = new PluginMeta(); plugin.klass = DefaultClusterEventProducer.class.getName(); plugin.name = ClusterEventProducer.PLUGIN_NAME; - V2Request req = new V2Request.Builder("/cluster/plugin") - .withMethod(POST) - .withPayload(Collections.singletonMap("add", plugin)) - .build(); + V2Request req = + new V2Request.Builder("/cluster/plugin") + .withMethod(POST) + .withPayload(Collections.singletonMap("add", plugin)) + .build(); V2Response rsp = req.process(cluster.getSolrClient()); assertEquals(0, rsp.getStatus()); @@ -127,7 +135,12 @@ public void testEvents() throws Exception { // don't kill Overseer JettySolrRunner nonOverseerJetty = null; for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { - if (cluster.getOpenOverseer().getCoreContainer().getZkController().getNodeName().equals(jetty.getNodeName())) { + if (cluster + .getOpenOverseer() + .getCoreContainer() + .getZkController() + .getNodeName() + .equals(jetty.getNodeName())) { continue; } else { nonOverseerJetty = jetty; @@ -139,11 +152,14 @@ public void testEvents() throws Exception { cluster.stopJettySolrRunner(nonOverseerJetty); cluster.waitForJettyToStop(nonOverseerJetty); eventsListener.waitForExpectedEvent(30); - assertNotNull("should be NODES_DOWN events", eventsListener.events.get(ClusterEvent.EventType.NODES_DOWN)); + assertNotNull( + "should be NODES_DOWN events", + eventsListener.events.get(ClusterEvent.EventType.NODES_DOWN)); List events = eventsListener.events.get(ClusterEvent.EventType.NODES_DOWN); assertEquals("should be one NODES_DOWN event", 1, events.size()); ClusterEvent event = events.get(0); - assertEquals("should be NODES_DOWN event type", ClusterEvent.EventType.NODES_DOWN, event.getType()); + assertEquals( + "should be NODES_DOWN event type", ClusterEvent.EventType.NODES_DOWN, event.getType()); NodesDownEvent nodesDown = (NodesDownEvent) event; assertEquals("should be node " + nodeName, nodeName, nodesDown.getNodeNames().next()); @@ -152,41 +168,62 @@ public void testEvents() throws Exception { JettySolrRunner newNode = cluster.startJettySolrRunner(); cluster.waitForNode(newNode, 60); eventsListener.waitForExpectedEvent(30); - assertNotNull("should be NODES_UP events", eventsListener.events.get(ClusterEvent.EventType.NODES_UP)); + assertNotNull( + "should be NODES_UP events", eventsListener.events.get(ClusterEvent.EventType.NODES_UP)); events = eventsListener.events.get(ClusterEvent.EventType.NODES_UP); assertEquals("should be one NODES_UP event", 1, events.size()); event = events.get(0); assertEquals("should be NODES_UP event type", ClusterEvent.EventType.NODES_UP, event.getType()); NodesUpEvent nodesUp = (NodesUpEvent) event; - assertEquals("should be node " + newNode.getNodeName(), newNode.getNodeName(), nodesUp.getNodeNames().next()); + assertEquals( + "should be node " + newNode.getNodeName(), + newNode.getNodeName(), + nodesUp.getNodeNames().next()); // COLLECTIONS_ADDED eventsListener.setExpectedType(ClusterEvent.EventType.COLLECTIONS_ADDED); String collection = "testNodesEvent_collection"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collection, "conf", 1, 1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collection, "conf", 1, 1); cluster.getSolrClient().request(create); cluster.waitForActiveCollection(collection, 1, 1); eventsListener.waitForExpectedEvent(30); - assertNotNull("should be COLLECTIONS_ADDED events", eventsListener.events.get(ClusterEvent.EventType.COLLECTIONS_ADDED)); + assertNotNull( + "should be COLLECTIONS_ADDED events", + eventsListener.events.get(ClusterEvent.EventType.COLLECTIONS_ADDED)); events = eventsListener.events.get(ClusterEvent.EventType.COLLECTIONS_ADDED); assertEquals("should be one COLLECTIONS_ADDED event", 1, events.size()); event = events.get(0); - assertEquals("should be COLLECTIONS_ADDED event type", ClusterEvent.EventType.COLLECTIONS_ADDED, event.getType()); + assertEquals( + "should be COLLECTIONS_ADDED event type", + ClusterEvent.EventType.COLLECTIONS_ADDED, + event.getType()); CollectionsAddedEvent collectionsAdded = (CollectionsAddedEvent) event; - assertEquals("should be collection " + collection, collection, collectionsAdded.getCollectionNames().next()); + assertEquals( + "should be collection " + collection, + collection, + collectionsAdded.getCollectionNames().next()); // COLLECTIONS_REMOVED eventsListener.setExpectedType(ClusterEvent.EventType.COLLECTIONS_REMOVED); CollectionAdminRequest.Delete delete = CollectionAdminRequest.deleteCollection(collection); cluster.getSolrClient().request(delete); eventsListener.waitForExpectedEvent(30); - assertNotNull("should be COLLECTIONS_REMOVED events", eventsListener.events.get(ClusterEvent.EventType.COLLECTIONS_REMOVED)); + assertNotNull( + "should be COLLECTIONS_REMOVED events", + eventsListener.events.get(ClusterEvent.EventType.COLLECTIONS_REMOVED)); events = eventsListener.events.get(ClusterEvent.EventType.COLLECTIONS_REMOVED); assertEquals("should be one COLLECTIONS_REMOVED event", 1, events.size()); event = events.get(0); - assertEquals("should be COLLECTIONS_REMOVED event type", ClusterEvent.EventType.COLLECTIONS_REMOVED, event.getType()); + assertEquals( + "should be COLLECTIONS_REMOVED event type", + ClusterEvent.EventType.COLLECTIONS_REMOVED, + event.getType()); CollectionsRemovedEvent collectionsRemoved = (CollectionsRemovedEvent) event; - assertEquals("should be collection " + collection, collection, collectionsRemoved.getCollectionNames().next()); + assertEquals( + "should be collection " + collection, + collection, + collectionsRemoved.getCollectionNames().next()); // CLUSTER_CONFIG_CHANGED eventsListener.events.clear(); @@ -195,30 +232,44 @@ public void testEvents() throws Exception { Map oldProps = new HashMap<>(clusterProperties.getClusterProperties()); clusterProperties.setClusterProperty("ext.foo", "bar"); eventsListener.waitForExpectedEvent(30); - assertNotNull("should be CLUSTER_CONFIG_CHANGED events", eventsListener.events.get(ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED)); + assertNotNull( + "should be CLUSTER_CONFIG_CHANGED events", + eventsListener.events.get(ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED)); events = eventsListener.events.get(ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED); assertEquals("should be one CLUSTER_CONFIG_CHANGED event", 1, events.size()); event = events.get(0); - assertEquals("should be CLUSTER_CONFIG_CHANGED event type", ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED, event.getType()); + assertEquals( + "should be CLUSTER_CONFIG_CHANGED event type", + ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED, + event.getType()); ClusterPropertiesChangedEvent propertiesChanged = (ClusterPropertiesChangedEvent) event; Map newProps = propertiesChanged.getNewClusterProperties(); - assertEquals("new properties wrong value of the 'ext.foo' property: " + newProps, - "bar", newProps.get("ext.foo")); + assertEquals( + "new properties wrong value of the 'ext.foo' property: " + newProps, + "bar", + newProps.get("ext.foo")); // unset the property eventsListener.events.clear(); eventsListener.setExpectedType(ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED); clusterProperties.setClusterProperty("ext.foo", null); eventsListener.waitForExpectedEvent(30); - assertNotNull("should be CLUSTER_CONFIG_CHANGED events", eventsListener.events.get(ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED)); + assertNotNull( + "should be CLUSTER_CONFIG_CHANGED events", + eventsListener.events.get(ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED)); events = eventsListener.events.get(ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED); assertEquals("should be one CLUSTER_CONFIG_CHANGED event", 1, events.size()); event = events.get(0); - assertEquals("should be CLUSTER_CONFIG_CHANGED event type", ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED, event.getType()); + assertEquals( + "should be CLUSTER_CONFIG_CHANGED event type", + ClusterEvent.EventType.CLUSTER_PROPERTIES_CHANGED, + event.getType()); propertiesChanged = (ClusterPropertiesChangedEvent) event; - assertEquals("new properties should not have 'ext.foo' property: " + propertiesChanged.getNewClusterProperties(), - null, propertiesChanged.getNewClusterProperties().get("ext.foo")); - + assertEquals( + "new properties should not have 'ext.foo' property: " + + propertiesChanged.getNewClusterProperties(), + null, + propertiesChanged.getNewClusterProperties().get("ext.foo")); } private static CountDownLatch dummyEventLatch = new CountDownLatch(1); @@ -227,6 +278,7 @@ public void testEvents() throws Exception { public static class DummyEventListener implements ClusterEventListener, ClusterSingleton { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); State state = State.STOPPED; + @Override public void onEvent(ClusterEvent event) { if (state != State.RUNNING) { @@ -235,8 +287,8 @@ public void onEvent(ClusterEvent event) { } return; } - if (event.getType() == ClusterEvent.EventType.COLLECTIONS_ADDED || - event.getType() == ClusterEvent.EventType.COLLECTIONS_REMOVED) { + if (event.getType() == ClusterEvent.EventType.COLLECTIONS_ADDED + || event.getType() == ClusterEvent.EventType.COLLECTIONS_REMOVED) { if (log.isDebugEnabled()) { log.debug("recorded event {}", Utils.toJSONString(event)); } @@ -290,10 +342,11 @@ public void testListenerPlugins() throws Exception { PluginMeta plugin = new PluginMeta(); plugin.klass = DefaultClusterEventProducer.class.getName(); plugin.name = ClusterEventProducer.PLUGIN_NAME; - V2Request req = new V2Request.Builder("/cluster/plugin") - .withMethod(POST) - .withPayload(Collections.singletonMap("add", plugin)) - .build(); + V2Request req = + new V2Request.Builder("/cluster/plugin") + .withMethod(POST) + .withPayload(Collections.singletonMap("add", plugin)) + .build(); V2Response rsp = req.process(cluster.getSolrClient()); assertEquals(0, rsp.getStatus()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); @@ -301,22 +354,22 @@ public void testListenerPlugins() throws Exception { plugin = new PluginMeta(); plugin.name = "testplugin"; plugin.klass = DummyEventListener.class.getName(); - req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .withMethod(POST) - .withPayload(singletonMap("add", plugin)) - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .withMethod(POST) + .withPayload(singletonMap("add", plugin)) + .build(); rsp = req.process(cluster.getSolrClient()); - //just check if the plugin is indeed registered - V2Request readPluginState = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .withMethod(GET) - .build(); + // just check if the plugin is indeed registered + V2Request readPluginState = + new V2Request.Builder("/cluster/plugin").forceV2(true).withMethod(GET).build(); rsp = readPluginState.process(cluster.getSolrClient()); assertEquals(DummyEventListener.class.getName(), rsp._getStr("/plugin/testplugin/class", null)); String collection = "testListenerPlugins_collection"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collection, "conf", 1, 1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collection, "conf", 1, 1); cluster.getSolrClient().request(create); cluster.waitForActiveCollection(collection, 1, 1); boolean await = dummyEventLatch.await(30, TimeUnit.SECONDS); @@ -324,11 +377,14 @@ public void testListenerPlugins() throws Exception { fail("Timed out waiting for COLLECTIONS_ADDED event, " + collection); } assertNotNull("lastEvent should be COLLECTIONS_ADDED", lastEvent); - assertEquals("lastEvent should be COLLECTIONS_ADDED", ClusterEvent.EventType.COLLECTIONS_ADDED, lastEvent.getType()); + assertEquals( + "lastEvent should be COLLECTIONS_ADDED", + ClusterEvent.EventType.COLLECTIONS_ADDED, + lastEvent.getType()); // verify timestamp Instant now = Instant.now(); assertTrue("timestamp of the event is in the future", now.isAfter(lastEvent.getTimestamp())); - assertEquals(collection, ((CollectionsAddedEvent)lastEvent).getCollectionNames().next()); + assertEquals(collection, ((CollectionsAddedEvent) lastEvent).getCollectionNames().next()); dummyEventLatch = new CountDownLatch(1); lastEvent = null; @@ -340,19 +396,23 @@ public void testListenerPlugins() throws Exception { fail("Timed out waiting for COLLECTIONS_REMOVED event, " + collection); } assertNotNull("lastEvent should be COLLECTIONS_REMOVED", lastEvent); - assertEquals("lastEvent should be COLLECTIONS_REMOVED", ClusterEvent.EventType.COLLECTIONS_REMOVED, lastEvent.getType()); + assertEquals( + "lastEvent should be COLLECTIONS_REMOVED", + ClusterEvent.EventType.COLLECTIONS_REMOVED, + lastEvent.getType()); // verify timestamp now = Instant.now(); assertTrue("timestamp of the event is in the future", now.isAfter(lastEvent.getTimestamp())); - assertEquals(collection, ((CollectionsRemovedEvent)lastEvent).getCollectionNames().next()); + assertEquals(collection, ((CollectionsRemovedEvent) lastEvent).getCollectionNames().next()); // test changing the ClusterEventProducer plugin dynamically // remove the plugin (a NoOpProducer will be used instead) - req = new V2Request.Builder("/cluster/plugin") - .withMethod(POST) - .withPayload(Collections.singletonMap("remove", ClusterEventProducer.PLUGIN_NAME)) - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .withMethod(POST) + .withPayload(Collections.singletonMap("remove", ClusterEventProducer.PLUGIN_NAME)) + .build(); req.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); @@ -369,10 +429,11 @@ public void testListenerPlugins() throws Exception { plugin = new PluginMeta(); plugin.klass = DefaultClusterEventProducer.class.getName(); plugin.name = ClusterEventProducer.PLUGIN_NAME; - req = new V2Request.Builder("/cluster/plugin") - .withMethod(POST) - .withPayload(Collections.singletonMap("add", plugin)) - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .withMethod(POST) + .withPayload(Collections.singletonMap("add", plugin)) + .build(); rsp = req.process(cluster.getSolrClient()); assertEquals(0, rsp.getStatus()); phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); @@ -386,6 +447,9 @@ public void testListenerPlugins() throws Exception { fail("Timed out waiting for COLLECTIONS_REMOVED event, " + collection); } assertNotNull("lastEvent should be COLLECTIONS_REMOVED", lastEvent); - assertEquals("lastEvent should be COLLECTIONS_REMOVED", ClusterEvent.EventType.COLLECTIONS_REMOVED, lastEvent.getType()); + assertEquals( + "lastEvent should be COLLECTIONS_REMOVED", + ClusterEvent.EventType.COLLECTIONS_REMOVED, + lastEvent.getType()); } } diff --git a/solr/core/src/test/org/apache/solr/cluster/events/impl/CollectionsRepairEventListenerTest.java b/solr/core/src/test/org/apache/solr/cluster/events/impl/CollectionsRepairEventListenerTest.java index b34067e657e..9015286f99c 100644 --- a/solr/core/src/test/org/apache/solr/cluster/events/impl/CollectionsRepairEventListenerTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/events/impl/CollectionsRepairEventListenerTest.java @@ -17,6 +17,12 @@ package org.apache.solr.cluster.events.impl; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; + +import java.io.IOException; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.V2Request; @@ -34,20 +40,12 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.IOException; -import java.util.Collections; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - -import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; - -/** - * - */ +/** */ @LogLevel("org.apache.solr.cluster.events=DEBUG") public class CollectionsRepairEventListenerTest extends SolrCloudTestCase { - public static class CollectionsRepairWrapperListener implements ClusterEventListener, ClusterSingleton { + public static class CollectionsRepairWrapperListener + implements ClusterEventListener, ClusterSingleton { final CollectionsRepairEventListener delegate; CountDownLatch completed = new CountDownLatch(1); @@ -98,23 +96,24 @@ public void close() throws IOException { @BeforeClass public static void setupCluster() throws Exception { configureCluster(NUM_NODES) - .addConfig("conf", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); PluginMeta plugin = new PluginMeta(); plugin.klass = DefaultClusterEventProducer.class.getName(); plugin.name = ClusterEventProducer.PLUGIN_NAME; - V2Request req = new V2Request.Builder("/cluster/plugin") - .withMethod(POST) - .withPayload(Collections.singletonMap("add", plugin)) - .build(); + V2Request req = + new V2Request.Builder("/cluster/plugin") + .withMethod(POST) + .withPayload(Collections.singletonMap("add", plugin)) + .build(); V2Response rsp = req.process(cluster.getSolrClient()); assertNotNull(rsp); waitFor = 1 + random().nextInt(9); CoreContainer cc = cluster.getOpenOverseer().getCoreContainer(); - cc.getClusterEventProducer() - .registerListener(eventsListener, ClusterEvent.EventType.values()); + cc.getClusterEventProducer().registerListener(eventsListener, ClusterEvent.EventType.values()); repairListener = new CollectionsRepairWrapperListener(cc, waitFor); cc.getClusterEventProducer() .registerListener(repairListener, ClusterEvent.EventType.NODES_DOWN); @@ -122,7 +121,7 @@ public static void setupCluster() throws Exception { } @Before - public void setUp() throws Exception { + public void setUp() throws Exception { super.setUp(); cluster.deleteAllCollections(); } @@ -131,7 +130,8 @@ public void setUp() throws Exception { public void testCollectionRepair() throws Exception { eventsListener.setExpectedType(ClusterEvent.EventType.COLLECTIONS_ADDED); String collection = "testCollectionRepair_collection"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collection, "conf", 1, 3); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collection, "conf", 1, 3); cluster.getSolrClient().request(create); cluster.waitForActiveCollection(collection, 1, 3); eventsListener.waitForExpectedEvent(10); @@ -140,7 +140,12 @@ public void testCollectionRepair() throws Exception { // don't kill Overseer JettySolrRunner nonOverseerJetty = null; for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { - if (cluster.getOpenOverseer().getCoreContainer().getZkController().getNodeName().equals(jetty.getNodeName())) { + if (cluster + .getOpenOverseer() + .getCoreContainer() + .getZkController() + .getNodeName() + .equals(jetty.getNodeName())) { continue; } nonOverseerJetty = jetty; diff --git a/solr/core/src/test/org/apache/solr/cluster/placement/AttributeFetcherForTest.java b/solr/core/src/test/org/apache/solr/cluster/placement/AttributeFetcherForTest.java index 8553ee0c967..9b4551850f7 100644 --- a/solr/core/src/test/org/apache/solr/cluster/placement/AttributeFetcherForTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/placement/AttributeFetcherForTest.java @@ -17,11 +17,10 @@ package org.apache.solr.cluster.placement; +import java.util.Set; import org.apache.solr.cluster.Node; import org.apache.solr.cluster.SolrCollection; -import java.util.Set; - public class AttributeFetcherForTest implements AttributeFetcher { private final AttributeValues attributeValues; @@ -41,7 +40,8 @@ public AttributeFetcher requestNodeMetric(NodeMetric metric) { } @Override - public AttributeFetcher requestCollectionMetrics(SolrCollection solrCollection, Set> metricNames) { + public AttributeFetcher requestCollectionMetrics( + SolrCollection solrCollection, Set> metricNames) { return this; } diff --git a/solr/core/src/test/org/apache/solr/cluster/placement/Builders.java b/solr/core/src/test/org/apache/solr/cluster/placement/Builders.java index d31ba45afe0..4ef38be3e43 100644 --- a/solr/core/src/test/org/apache/solr/cluster/placement/Builders.java +++ b/solr/core/src/test/org/apache/solr/cluster/placement/Builders.java @@ -17,6 +17,7 @@ package org.apache.solr.cluster.placement; +import java.util.*; import org.apache.solr.cluster.*; import org.apache.solr.cluster.placement.impl.AttributeFetcherImpl; import org.apache.solr.cluster.placement.impl.AttributeValuesImpl; @@ -27,10 +28,9 @@ import org.apache.solr.common.util.Pair; import org.junit.Assert; -import java.util.*; - /** - * Builder classes to make tests using different cluster and node configurations easier to write and to read. + * Builder classes to make tests using different cluster and node configurations easier to write and + * to read. */ public class Builders { @@ -43,16 +43,16 @@ public static CollectionBuilder newCollectionBuilder(String collectionName) { } public static class ClusterBuilder { - /** - * {@link NodeBuilder} for the live nodes of the cluster. - */ + /** {@link NodeBuilder} for the live nodes of the cluster. */ private LinkedList nodeBuilders = new LinkedList<>(); + private LinkedList collectionBuilders = new LinkedList<>(); public ClusterBuilder initializeLiveNodes(int countNodes) { nodeBuilders = new LinkedList<>(); for (int n = 0; n < countNodes; n++) { - NodeBuilder nodeBuilder = new NodeBuilder().setNodeName("node_" + n); // Default name, can be changed + // Default name, can be changed + NodeBuilder nodeBuilder = new NodeBuilder().setNodeName("node_" + n); nodeBuilder.setTotalDiskGB(10000.0); nodeBuilder.setFreeDiskGB(5000.0); nodeBuilder.setCoreCount(0); @@ -72,7 +72,8 @@ public ClusterBuilder addCollection(CollectionBuilder collectionBuilder) { public Cluster build() { // TODO if converting all tests to use builders change ClusterImpl ctor to use list of nodes - return new ClusterAbstractionsForTest.ClusterImpl(new HashSet<>(buildLiveNodes()), buildClusterCollections()); + return new ClusterAbstractionsForTest.ClusterImpl( + new HashSet<>(buildLiveNodes()), buildClusterCollections()); } public List buildLiveNodes() { @@ -94,7 +95,8 @@ Map buildClusterCollections() { return clusterCollections; } - private static final PlacementPlanFactory PLACEMENT_PLAN_FACTORY = new PlacementPlanFactoryImpl(); + private static final PlacementPlanFactory PLACEMENT_PLAN_FACTORY = + new PlacementPlanFactoryImpl(); public PlacementContext buildPlacementContext() { Cluster cluster = build(); @@ -124,51 +126,71 @@ public AttributeFetcher buildAttributeFetcher() { // TODO And a few more missing and will be added... - // Slight redoing of work twice (building Node instances) but let's favor readability over tricks (I could think - // of many) to reuse the nodes computed in build() or build the AttributeFetcher at the same time. + // Slight redoing of work twice (building Node instances) but let's favor readability over + // tricks (I could think of many) to reuse the nodes computed in build() or build the + // AttributeFetcher at the same time. for (NodeBuilder nodeBuilder : nodeBuilders) { Node node = nodeBuilder.build(); if (nodeBuilder.getCoreCount() != null) { - metrics.computeIfAbsent(NodeMetricImpl.NUM_CORES, n -> new HashMap<>()) + metrics + .computeIfAbsent(NodeMetricImpl.NUM_CORES, n -> new HashMap<>()) .put(node, nodeBuilder.getCoreCount()); } if (nodeBuilder.getFreeDiskGB() != null) { - metrics.computeIfAbsent(NodeMetricImpl.FREE_DISK_GB, n -> new HashMap<>()) + metrics + .computeIfAbsent(NodeMetricImpl.FREE_DISK_GB, n -> new HashMap<>()) .put(node, nodeBuilder.getFreeDiskGB()); } if (nodeBuilder.getTotalDiskGB() != null) { - metrics.computeIfAbsent(NodeMetricImpl.TOTAL_DISK_GB, n -> new HashMap<>()) + metrics + .computeIfAbsent(NodeMetricImpl.TOTAL_DISK_GB, n -> new HashMap<>()) .put(node, nodeBuilder.getTotalDiskGB()); } if (nodeBuilder.getSysprops() != null) { - nodeBuilder.getSysprops().forEach((name, value) -> { - sysprops.computeIfAbsent(name, n -> new HashMap<>()) - .put(node, value); - }); + nodeBuilder + .getSysprops() + .forEach( + (name, value) -> { + sysprops.computeIfAbsent(name, n -> new HashMap<>()).put(node, value); + }); } if (nodeBuilder.getMetrics() != null) { - nodeBuilder.getMetrics().forEach((name, value) -> { - metrics.computeIfAbsent(name, n -> new HashMap<>()) - .put(node, value); - }); + nodeBuilder + .getMetrics() + .forEach( + (name, value) -> { + metrics.computeIfAbsent(name, n -> new HashMap<>()).put(node, value); + }); } } if (!collectionBuilders.isEmpty()) { - Map nodeToCoreCount = metrics.computeIfAbsent(NodeMetricImpl.NUM_CORES, n -> new HashMap<>()); - collectionBuilders.forEach(builder -> { - collectionMetrics.put(builder.collectionName, builder.collectionMetricsBuilder.build()); - SolrCollection collection = builder.build(); - collection.iterator().forEachRemaining(shard -> - shard.iterator().forEachRemaining(replica -> { - nodeToCoreCount.compute(replica.getNode(), (node, count) -> - (count == null) ? 1 : ((Number) count).intValue() + 1); - })); - }); + Map nodeToCoreCount = + metrics.computeIfAbsent(NodeMetricImpl.NUM_CORES, n -> new HashMap<>()); + collectionBuilders.forEach( + builder -> { + collectionMetrics.put( + builder.collectionName, builder.collectionMetricsBuilder.build()); + SolrCollection collection = builder.build(); + collection + .iterator() + .forEachRemaining( + shard -> + shard + .iterator() + .forEachRemaining( + replica -> { + nodeToCoreCount.compute( + replica.getNode(), + (node, count) -> + (count == null) ? 1 : ((Number) count).intValue() + 1); + })); + }); } - AttributeValues attributeValues = new AttributeValuesImpl(sysprops, metrics, collectionMetrics); + AttributeValues attributeValues = + new AttributeValuesImpl(sysprops, metrics, collectionMetrics); return new AttributeFetcherForTest(attributeValues); } } @@ -194,39 +216,46 @@ public CollectionMetricsBuilder getCollectionMetricsBuilder() { } /** - * @return The internal shards data structure to allow test code to modify the replica distribution to nodes. + * @return The internal shards data structure to allow test code to modify the replica + * distribution to nodes. */ public LinkedList getShardBuilders() { return shardBuilders; } /** - * Initializes the collection to a specific shard and replica distribution passed in {@code shardsReplicas}. + * Initializes the collection to a specific shard and replica distribution passed in {@code + * shardsReplicas}. + * * @param shardsReplicas A list of shard descriptions, describing the replicas of that shard. - * Replica description include the replica type and the node on which the replica should be placed. - * Everything is text to make it easy to design specific collections. For example the following value: - *
{@code
-     *  List.of(
-     *    List.of("NRT 0", "TLOG 0", "NRT 3"), // shard 1
-     *    List.of("NRT 1", "NRT 3", "TLOG 2")); // shard 2
-     *  }
- * Creates a placement that would distribute replicas to nodes (there must be at least 4 nodes) - * in the following way: - *
{@code
-     *  +--------------+----+----+----+----+
-     *  |         Node |  0 |  1 |  2 |  3 |
-     *  +----------------------------------+
-     *  |   Shard 1:   |    |    |    |    |
-     *  |         NRT  |  X |    |    |  X |
-     *  |         TLOG |  X |    |    |    |
-     *  +----------------------------------+
-     *  |   Shard 2:   |    |    |    |    |
-     *  |         NRT  |    |  X |    |  X |
-     *  |         TLOG |    |    |  X |    |
-     *  +--------------+----+----+----+----+
-     *  }
+ * Replica description include the replica type and the node on which the replica should be + * placed. Everything is text to make it easy to design specific collections. For example + * the following value: + *
{@code
+     * List.of(
+     *   List.of("NRT 0", "TLOG 0", "NRT 3"), // shard 1
+     *   List.of("NRT 1", "NRT 3", "TLOG 2")); // shard 2
+     *
+     * }
+ * Creates a placement that would distribute replicas to nodes (there must be at least 4 + * nodes) in the following way: + *
{@code
+     * +--------------+----+----+----+----+
+     * |         Node |  0 |  1 |  2 |  3 |
+     * +----------------------------------+
+     * |   Shard 1:   |    |    |    |    |
+     * |         NRT  |  X |    |    |  X |
+     * |         TLOG |  X |    |    |    |
+     * +----------------------------------+
+     * |   Shard 2:   |    |    |    |    |
+     * |         NRT  |    |  X |    |  X |
+     * |         TLOG |    |    |  X |    |
+     * +--------------+----+----+----+----+
+     *
+     * }
*/ - public CollectionBuilder customCollectionSetup(List> shardsReplicas, List liveNodes) { + public CollectionBuilder customCollectionSetup( + List> shardsReplicas, List liveNodes) { shardBuilders = new LinkedList<>(); int shardNumber = 1; // Shard numbering starts at 1 for (List replicasOnNodes : shardsReplicas) { @@ -244,21 +273,27 @@ public CollectionBuilder customCollectionSetup(List> shardsReplicas if (nodeIndex < liveNodes.size()) { node = liveNodes.get(nodeIndex); } else { - // The collection can have replicas on non live nodes. Let's create such a node here (that is not known to the - // cluster). There could be many non live nodes in the collection configuration, they will all reference new - // instances such as below of a node unknown to cluster, but all will have the same name (so will be equal if + // The collection can have replicas on non live nodes. Let's create such a node here + // (that is not known to the cluster). There could be many non live nodes in the + // collection configuration, they will all reference new instances such as below of a + // node unknown to cluster, but all will have the same name (so will be equal if // tested). node = new NodeBuilder().setNodeName("NonLiveNode"); } String replicaName = buildReplicaName(shardName, type); ReplicaBuilder replicaBuilder = new ReplicaBuilder(); - replicaBuilder.setReplicaName(replicaName).setCoreName(buildCoreName(replicaName)).setReplicaType(type) - .setReplicaState(Replica.ReplicaState.ACTIVE).setReplicaNode(node); + replicaBuilder + .setReplicaName(replicaName) + .setCoreName(buildCoreName(replicaName)) + .setReplicaType(type) + .setReplicaState(Replica.ReplicaState.ACTIVE) + .setReplicaNode(node); replicas.add(replicaBuilder); - // No way to specify which replica is the leader. Could be done by adding a "*" to the replica definition for example - // in the passed shardsReplicas but not implementing this until it is needed :) + // No way to specify which replica is the leader. Could be done by adding a "*" to the + // replica definition for example in the passed shardsReplicas but not implementing this + // until it is needed :) if (leader == null && type != Replica.ReplicaType.PULL) { leader = replicaBuilder; } @@ -273,24 +308,33 @@ public CollectionBuilder customCollectionSetup(List> shardsReplicas } /** - * Initializes shard and replica builders for the collection based on passed parameters. Replicas are assigned round - * robin to the nodes. The shard leader is the first NRT replica of each shard (or first TLOG is no NRT). - * Shard and replica configuration can be modified afterwards, the returned builder hierarchy is a convenient starting point. + * Initializes shard and replica builders for the collection based on passed parameters. + * Replicas are assigned round robin to the nodes. The shard leader is the first NRT replica of + * each shard (or first TLOG is no NRT). Shard and replica configuration can be modified + * afterwards, the returned builder hierarchy is a convenient starting point. + * * @param countShards number of shards to create * @param countNrtReplicas number of NRT replicas per shard * @param countTlogReplicas number of TLOG replicas per shard * @param countPullReplicas number of PULL replicas per shard * @param nodes list of nodes to place replicas on. */ - public CollectionBuilder initializeShardsReplicas(int countShards, int countNrtReplicas, int countTlogReplicas, - int countPullReplicas, List nodes) { - return initializeShardsReplicas(countShards, countNrtReplicas, countTlogReplicas, countPullReplicas, nodes, null); + public CollectionBuilder initializeShardsReplicas( + int countShards, + int countNrtReplicas, + int countTlogReplicas, + int countPullReplicas, + List nodes) { + return initializeShardsReplicas( + countShards, countNrtReplicas, countTlogReplicas, countPullReplicas, nodes, null); } /** - * Initializes shard and replica builders for the collection based on passed parameters. Replicas are assigned round - * robin to the nodes. The shard leader is the first NRT replica of each shard (or first TLOG is no NRT). - * Shard and replica configuration can be modified afterwards, the returned builder hierarchy is a convenient starting point. + * Initializes shard and replica builders for the collection based on passed parameters. + * Replicas are assigned round robin to the nodes. The shard leader is the first NRT replica of + * each shard (or first TLOG is no NRT). Shard and replica configuration can be modified + * afterwards, the returned builder hierarchy is a convenient starting point. + * * @param countShards number of shards to create * @param countNrtReplicas number of NRT replicas per shard * @param countTlogReplicas number of TLOG replicas per shard @@ -298,30 +342,38 @@ public CollectionBuilder initializeShardsReplicas(int countShards, int countNrtR * @param nodes list of nodes to place replicas on. * @param initialSizeGBPerShard initial replica size (in GB) per shard */ - public CollectionBuilder initializeShardsReplicas(int countShards, int countNrtReplicas, int countTlogReplicas, - int countPullReplicas, List nodes, - List initialSizeGBPerShard) { + public CollectionBuilder initializeShardsReplicas( + int countShards, + int countNrtReplicas, + int countTlogReplicas, + int countPullReplicas, + List nodes, + List initialSizeGBPerShard) { Iterator nodeIterator = nodes.iterator(); shardBuilders = new LinkedList<>(); if (initialSizeGBPerShard != null && initialSizeGBPerShard.size() != countShards) { - throw new RuntimeException("list of shard sizes must be the same length as the countShards!"); + throw new RuntimeException( + "list of shard sizes must be the same length as the countShards!"); } for (int shardNumber = 1; shardNumber <= countShards; shardNumber++) { String shardName = buildShardName(shardNumber); - CollectionMetricsBuilder.ShardMetricsBuilder shardMetricsBuilder = new CollectionMetricsBuilder.ShardMetricsBuilder(shardName); + CollectionMetricsBuilder.ShardMetricsBuilder shardMetricsBuilder = + new CollectionMetricsBuilder.ShardMetricsBuilder(shardName); LinkedList replicas = new LinkedList<>(); ReplicaBuilder leader = null; CollectionMetricsBuilder.ReplicaMetricsBuilder leaderMetrics = null; - // Iterate on requested counts, NRT then TLOG then PULL. Leader chosen as first NRT (or first TLOG if no NRT) - List> replicaTypes = List.of( - new Pair<>(Replica.ReplicaType.NRT, countNrtReplicas), - new Pair<>(Replica.ReplicaType.TLOG, countTlogReplicas), - new Pair<>(Replica.ReplicaType.PULL, countPullReplicas)); + // Iterate on requested counts, NRT then TLOG then PULL. Leader chosen as first NRT (or + // first TLOG if no NRT) + List> replicaTypes = + List.of( + new Pair<>(Replica.ReplicaType.NRT, countNrtReplicas), + new Pair<>(Replica.ReplicaType.TLOG, countTlogReplicas), + new Pair<>(Replica.ReplicaType.PULL, countPullReplicas)); for (Pair tc : replicaTypes) { Replica.ReplicaType type = tc.first(); @@ -336,14 +388,21 @@ public CollectionBuilder initializeShardsReplicas(int countShards, int countNrtR String replicaName = buildReplicaName(shardName, type); ReplicaBuilder replicaBuilder = new ReplicaBuilder(); - replicaBuilder.setReplicaName(replicaName).setCoreName(buildCoreName(replicaName)).setReplicaType(type) - .setReplicaState(Replica.ReplicaState.ACTIVE).setReplicaNode(node); + replicaBuilder + .setReplicaName(replicaName) + .setCoreName(buildCoreName(replicaName)) + .setReplicaType(type) + .setReplicaState(Replica.ReplicaState.ACTIVE) + .setReplicaNode(node); replicas.add(replicaBuilder); - CollectionMetricsBuilder.ReplicaMetricsBuilder replicaMetricsBuilder = new CollectionMetricsBuilder.ReplicaMetricsBuilder(replicaName); + CollectionMetricsBuilder.ReplicaMetricsBuilder replicaMetricsBuilder = + new CollectionMetricsBuilder.ReplicaMetricsBuilder(replicaName); shardMetricsBuilder.getReplicaMetricsBuilders().put(replicaName, replicaMetricsBuilder); if (initialSizeGBPerShard != null) { - replicaMetricsBuilder.addMetric(ReplicaMetricImpl.INDEX_SIZE_GB, initialSizeGBPerShard.get(shardNumber - 1) * ReplicaMetricImpl.GB); + replicaMetricsBuilder.addMetric( + ReplicaMetricImpl.INDEX_SIZE_GB, + initialSizeGBPerShard.get(shardNumber - 1) * ReplicaMetricImpl.GB); } if (leader == null && type != Replica.ReplicaType.PULL) { leader = replicaBuilder; @@ -367,7 +426,12 @@ private String buildShardName(int shardIndex) { } private String buildReplicaName(String shardName, Replica.ReplicaType replicaType) { - return collectionName + "_" + shardName + "_replica_" + replicaType.getSuffixChar() + replicaNumber++; + return collectionName + + "_" + + shardName + + "_replica_" + + replicaType.getSuffixChar() + + replicaNumber++; } private String buildCoreName(String replicaName) { @@ -375,7 +439,8 @@ private String buildCoreName(String replicaName) { } public SolrCollection build() { - ClusterAbstractionsForTest.SolrCollectionImpl solrCollection = new ClusterAbstractionsForTest.SolrCollectionImpl(collectionName, customProperties); + ClusterAbstractionsForTest.SolrCollectionImpl solrCollection = + new ClusterAbstractionsForTest.SolrCollectionImpl(collectionName, customProperties); final LinkedHashMap shards = new LinkedHashMap<>(); @@ -418,7 +483,8 @@ public ShardBuilder setLeader(ReplicaBuilder leaderReplicaBuilder) { } public Shard build(SolrCollection collection) { - ClusterAbstractionsForTest.ShardImpl shard = new ClusterAbstractionsForTest.ShardImpl(shardName, collection, Shard.ShardState.ACTIVE); + ClusterAbstractionsForTest.ShardImpl shard = + new ClusterAbstractionsForTest.ShardImpl(shardName, collection, Shard.ShardState.ACTIVE); final LinkedHashMap replicas = new LinkedHashMap<>(); Replica leader = null; @@ -483,7 +549,8 @@ public ReplicaBuilder setReplicaMetric(ReplicaMetric metric, Object value) { } public Replica build(Shard shard) { - return new ClusterAbstractionsForTest.ReplicaImpl(replicaName, coreName, shard, replicaType, replicaState, replicaNode.build()); + return new ClusterAbstractionsForTest.ReplicaImpl( + replicaName, coreName, shard, replicaType, replicaState, replicaNode.build()); } } @@ -553,7 +620,8 @@ public Map, Object> getMetrics() { } public Node build() { - // It is ok to build a new instance each time, that instance does the right thing with equals() and hashCode() + // It is ok to build a new instance each time, that instance does the right thing with + // equals() and hashCode() return new ClusterAbstractionsForTest.NodeImpl(nodeName); } } diff --git a/solr/core/src/test/org/apache/solr/cluster/placement/BuildersTest.java b/solr/core/src/test/org/apache/solr/cluster/placement/BuildersTest.java index 5b12b43f0f1..bad348e9622 100644 --- a/solr/core/src/test/org/apache/solr/cluster/placement/BuildersTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/placement/BuildersTest.java @@ -16,6 +16,11 @@ */ package org.apache.solr.cluster.placement; +import static org.apache.solr.cluster.placement.Builders.*; + +import java.util.List; +import java.util.Optional; +import java.util.Set; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cluster.Cluster; import org.apache.solr.cluster.Node; @@ -25,15 +30,7 @@ import org.apache.solr.cluster.placement.impl.ReplicaMetricImpl; import org.junit.Test; -import java.util.List; -import java.util.Optional; -import java.util.Set; - -import static org.apache.solr.cluster.placement.Builders.*; - -/** - * - */ +/** */ public class BuildersTest extends SolrTestCaseJ4 { @Test @@ -42,15 +39,16 @@ public void testClusterBuilder() throws Exception { int NUM_SHARDS = 2; int NUM_NRT_REPLICAS = 2; String collectionName = "test"; - ClusterBuilder clusterBuilder = newClusterBuilder() - .initializeLiveNodes(NUM_NODES); - CollectionBuilder collectionBuilder = newCollectionBuilder(collectionName) - .initializeShardsReplicas(NUM_SHARDS, - NUM_NRT_REPLICAS, - NUM_NRT_REPLICAS + 1, - NUM_NRT_REPLICAS + 2, - clusterBuilder.getLiveNodeBuilders(), - List.of(10, 20)); + ClusterBuilder clusterBuilder = newClusterBuilder().initializeLiveNodes(NUM_NODES); + CollectionBuilder collectionBuilder = + newCollectionBuilder(collectionName) + .initializeShardsReplicas( + NUM_SHARDS, + NUM_NRT_REPLICAS, + NUM_NRT_REPLICAS + 1, + NUM_NRT_REPLICAS + 2, + clusterBuilder.getLiveNodeBuilders(), + List.of(10, 20)); clusterBuilder.addCollection(collectionBuilder); Cluster cluster = clusterBuilder.build(); assertEquals("number of nodes", NUM_NODES, cluster.getLiveNodes().size()); @@ -61,18 +59,21 @@ public void testClusterBuilder() throws Exception { Shard shard = collection.getShard(shardName); assertNotNull("shard leader", shard.getLeader()); int[] counts = new int[3]; - shard.iterator().forEachRemaining(r -> { - switch (r.getType()) { - case NRT: - counts[0]++; - break; - case TLOG: - counts[1]++; - break; - case PULL: - counts[2]++; - } - }); + shard + .iterator() + .forEachRemaining( + r -> { + switch (r.getType()) { + case NRT: + counts[0]++; + break; + case TLOG: + counts[1]++; + break; + case PULL: + counts[2]++; + } + }); assertEquals("numNrt", NUM_NRT_REPLICAS, counts[0]); assertEquals("numTlog", NUM_NRT_REPLICAS + 1, counts[1]); assertEquals("numPull", NUM_NRT_REPLICAS + 2, counts[2]); @@ -94,7 +95,8 @@ public void testClusterBuilder() throws Exception { diskOpt = attributeValues.getNodeMetric(node, NodeMetricImpl.TOTAL_DISK_GB); assertTrue("totalDisk", diskOpt.isPresent()); } - Optional collectionMetricsOpt = attributeValues.getCollectionMetrics(collectionName); + Optional collectionMetricsOpt = + attributeValues.getCollectionMetrics(collectionName); assertTrue("collectionMetrics present", collectionMetricsOpt.isPresent()); CollectionMetrics collectionMetrics = collectionMetricsOpt.get(); for (String shardName : collection.getShardNames()) { @@ -112,18 +114,23 @@ public void testClusterBuilder() throws Exception { assertEquals("size", 20, ((Number) sizeOpt.get()).intValue()); } Shard shard = collection.getShard(shardName); - shard.iterator().forEachRemaining(r -> { - Optional metricsOpt = shardMetrics.getReplicaMetrics(r.getReplicaName()); - assertTrue("replica metrics", metricsOpt.isPresent()); - ReplicaMetrics metrics = metricsOpt.get(); - Optional replicaSizeOpt = metrics.getReplicaMetric(ReplicaMetricImpl.INDEX_SIZE_GB); - assertTrue("missing size", replicaSizeOpt.isPresent()); - if (shardName.endsWith("1")) { - assertEquals("size", 10, ((Number) replicaSizeOpt.get()).intValue()); - } else { - assertEquals("size", 20, ((Number) replicaSizeOpt.get()).intValue()); - } - }); + shard + .iterator() + .forEachRemaining( + r -> { + Optional metricsOpt = + shardMetrics.getReplicaMetrics(r.getReplicaName()); + assertTrue("replica metrics", metricsOpt.isPresent()); + ReplicaMetrics metrics = metricsOpt.get(); + Optional replicaSizeOpt = + metrics.getReplicaMetric(ReplicaMetricImpl.INDEX_SIZE_GB); + assertTrue("missing size", replicaSizeOpt.isPresent()); + if (shardName.endsWith("1")) { + assertEquals("size", 10, ((Number) replicaSizeOpt.get()).intValue()); + } else { + assertEquals("size", 20, ((Number) replicaSizeOpt.get()).intValue()); + } + }); } } } diff --git a/solr/core/src/test/org/apache/solr/cluster/placement/ClusterAbstractionsForTest.java b/solr/core/src/test/org/apache/solr/cluster/placement/ClusterAbstractionsForTest.java index f39d1a26281..5ddd0b2efda 100644 --- a/solr/core/src/test/org/apache/solr/cluster/placement/ClusterAbstractionsForTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/placement/ClusterAbstractionsForTest.java @@ -17,13 +17,13 @@ package org.apache.solr.cluster.placement; -import org.apache.solr.cluster.*; - import java.util.*; import java.util.stream.Collectors; +import org.apache.solr.cluster.*; /** - * Cluster abstractions independent of any internal SolrCloud abstractions to use in tests (of plugin code). + * Cluster abstractions independent of any internal SolrCloud abstractions to use in tests (of + * plugin code). */ class ClusterAbstractionsForTest { @@ -62,13 +62,10 @@ public Iterable collections() { } } - static class NodeImpl implements Node { public final String nodeName; - /** - * Transforms a collection of node names into a set of {@link Node} instances. - */ + /** Transforms a collection of node names into a set of {@link Node} instances. */ static Set getNodes(Collection nodeNames) { return nodeNames.stream().map(NodeImpl::new).collect(Collectors.toSet()); } @@ -88,9 +85,10 @@ public String toString() { } /** - * This class ends up as a key in Maps in {@link org.apache.solr.cluster.placement.AttributeValues}. - * It is important to implement this method comparing node names given that new instances of {@link Node} are created - * with names equal to existing instances (See {@link Builders.NodeBuilder#build()}). + * This class ends up as a key in Maps in {@link + * org.apache.solr.cluster.placement.AttributeValues}. It is important to implement this method + * comparing node names given that new instances of {@link Node} are created with names equal to + * existing instances (See {@link Builders.NodeBuilder#build()}). */ public boolean equals(Object obj) { if (obj == null) { @@ -111,13 +109,11 @@ public int hashCode() { } } - static class SolrCollectionImpl implements SolrCollection { private final String collectionName; - /** - * Map from {@link Shard#getShardName()} to {@link Shard} - */ + /** Map from {@link Shard#getShardName()} to {@link Shard} */ private Map shards; + private final Map customProperties; SolrCollectionImpl(String collectionName, Map customProperties) { @@ -126,7 +122,8 @@ static class SolrCollectionImpl implements SolrCollection { } /** - * Setting the shards has to happen (in tests) after creating the collection because shards reference the collection + * Setting the shards has to happen (in tests) after creating the collection because shards + * reference the collection */ void setShards(Map shards) { this.shards = shards; @@ -163,7 +160,6 @@ public String getCustomProperty(String customPropertyName) { } } - static class ShardImpl implements Shard { private final String shardName; private final SolrCollection collection; @@ -178,7 +174,8 @@ static class ShardImpl implements Shard { } /** - * Setting the replicas has to happen (in tests) after creating the shard because replicas reference the shard + * Setting the replicas has to happen (in tests) after creating the shard because replicas + * reference the shard */ void setReplicas(Map replicas, Replica leader) { this.replicas = replicas; @@ -243,7 +240,6 @@ public int hashCode() { } } - static class ReplicaImpl implements Replica { private final String replicaName; private final String coreName; @@ -252,7 +248,13 @@ static class ReplicaImpl implements Replica { private final ReplicaState replicaState; private final Node node; - ReplicaImpl(String replicaName, String coreName, Shard shard, ReplicaType replicaType, ReplicaState replicaState, Node node) { + ReplicaImpl( + String replicaName, + String coreName, + Shard shard, + ReplicaType replicaType, + ReplicaState replicaState, + Node node) { this.replicaName = replicaName; this.coreName = coreName; this.shard = shard; @@ -316,14 +318,23 @@ public int hashCode() { @Override public String toString() { - return "ReplicaImpl{" + - "replicaName='" + replicaName + '\'' + - ", coreName='" + coreName + '\'' + - ", shard='" + shard + '\'' + - ", replicaType=" + replicaType + - ", replicaState=" + replicaState + - ", node=" + node + - '}'; + return "ReplicaImpl{" + + "replicaName='" + + replicaName + + '\'' + + ", coreName='" + + coreName + + '\'' + + ", shard='" + + shard + + '\'' + + ", replicaType=" + + replicaType + + ", replicaState=" + + replicaState + + ", node=" + + node + + '}'; } } } diff --git a/solr/core/src/test/org/apache/solr/cluster/placement/impl/PlacementPluginIntegrationTest.java b/solr/core/src/test/org/apache/solr/cluster/placement/impl/PlacementPluginIntegrationTest.java index 93003a1316d..00299a69b3c 100644 --- a/solr/core/src/test/org/apache/solr/cluster/placement/impl/PlacementPluginIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/placement/impl/PlacementPluginIntegrationTest.java @@ -17,12 +17,25 @@ package org.apache.solr.cluster.placement.impl; +import static java.util.Collections.singletonMap; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Phaser; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.V2Request; import org.apache.solr.client.solrj.request.beans.PluginMeta; import org.apache.solr.client.solrj.response.CollectionAdminResponse; import org.apache.solr.client.solrj.response.V2Response; +import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.cluster.Cluster; import org.apache.solr.cluster.Node; @@ -37,36 +50,20 @@ import org.apache.solr.cluster.placement.ShardMetrics; import org.apache.solr.cluster.placement.plugins.AffinityPlacementConfig; import org.apache.solr.cluster.placement.plugins.AffinityPlacementFactory; -import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.cluster.placement.plugins.MinimizeCoresPlacementFactory; import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.core.CoreContainer; import org.apache.solr.util.LogLevel; - import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.Phaser; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicInteger; - -import static java.util.Collections.singletonMap; - -/** - * Test for {@link MinimizeCoresPlacementFactory} using a {@link MiniSolrCloudCluster}. - */ +/** Test for {@link MinimizeCoresPlacementFactory} using a {@link MiniSolrCloudCluster}. */ @LogLevel("org.apache.solr.cluster.placement.impl=DEBUG") public class PlacementPluginIntegrationTest extends SolrCloudTestCase { - private static final String COLLECTION = PlacementPluginIntegrationTest.class.getSimpleName() + "_collection"; + private static final String COLLECTION = + PlacementPluginIntegrationTest.class.getSimpleName() + "_collection"; private static SolrCloudManager cloudManager; private static CoreContainer cc; @@ -75,9 +72,7 @@ public class PlacementPluginIntegrationTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { // placement plugins need metrics System.setProperty("metricsEnabled", "true"); - configureCluster(3) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(3).addConfig("conf", configset("cloud-minimal")).configure(); cc = cluster.getJettySolrRunner(0).getCoreContainer(); cloudManager = cc.getZkController().getSolrCloudManager(); } @@ -85,17 +80,15 @@ public static void setupCluster() throws Exception { @After public void cleanup() throws Exception { cluster.deleteAllCollections(); - V2Request req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .GET() - .build(); + V2Request req = new V2Request.Builder("/cluster/plugin").forceV2(true).GET().build(); V2Response rsp = req.process(cluster.getSolrClient()); if (rsp._get(Arrays.asList("plugin", PlacementPluginFactory.PLUGIN_NAME), null) != null) { - req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload("{remove: '" + PlacementPluginFactory.PLUGIN_NAME + "'}") - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload("{remove: '" + PlacementPluginFactory.PLUGIN_NAME + "'}") + .build(); req.process(cluster.getSolrClient()); } } @@ -105,15 +98,17 @@ public void testMinimizeCores() throws Exception { PluginMeta plugin = new PluginMeta(); plugin.name = PlacementPluginFactory.PLUGIN_NAME; plugin.klass = MinimizeCoresPlacementFactory.class.getName(); - V2Request req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("add", plugin)) - .build(); + V2Request req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload(singletonMap("add", plugin)) + .build(); req.process(cluster.getSolrClient()); - CollectionAdminResponse rsp = CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) - .process(cluster.getSolrClient()); + CollectionAdminResponse rsp = + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) + .process(cluster.getSolrClient()); assertTrue(rsp.isSuccess()); cluster.waitForActiveCollection(COLLECTION, 2, 4); // use Solr-specific API to verify the expected placements @@ -121,11 +116,16 @@ public void testMinimizeCores() throws Exception { DocCollection collection = clusterState.getCollectionOrNull(COLLECTION); assertNotNull(collection); Map coresByNode = new HashMap<>(); - collection.forEachReplica((shard, replica) -> coresByNode.computeIfAbsent(replica.getNodeName(), n -> new AtomicInteger()).incrementAndGet()); + collection.forEachReplica( + (shard, replica) -> + coresByNode + .computeIfAbsent(replica.getNodeName(), n -> new AtomicInteger()) + .incrementAndGet()); int maxCores = 0; int minCores = Integer.MAX_VALUE; for (Map.Entry entry : coresByNode.entrySet()) { - assertTrue("too few cores on node " + entry.getKey() + ": " + entry.getValue(), + assertTrue( + "too few cores on node " + entry.getKey() + ": " + entry.getValue(), entry.getValue().get() > 0); if (entry.getValue().get() > maxCores) { maxCores = entry.getValue().get(); @@ -140,8 +140,11 @@ public void testMinimizeCores() throws Exception { @Test public void testDynamicReconfiguration() throws Exception { - PlacementPluginFactory pluginFactory = cc.getPlacementPluginFactory(); - assertTrue("wrong type " + pluginFactory.getClass().getName(), pluginFactory instanceof DelegatingPlacementPluginFactory); + PlacementPluginFactory pluginFactory = + cc.getPlacementPluginFactory(); + assertTrue( + "wrong type " + pluginFactory.getClass().getName(), + pluginFactory instanceof DelegatingPlacementPluginFactory); DelegatingPlacementPluginFactory wrapper = (DelegatingPlacementPluginFactory) pluginFactory; Phaser phaser = new Phaser(); wrapper.setDelegationPhaser(phaser); @@ -152,67 +155,78 @@ public void testDynamicReconfiguration() throws Exception { PluginMeta plugin = new PluginMeta(); plugin.name = PlacementPluginFactory.PLUGIN_NAME; plugin.klass = MinimizeCoresPlacementFactory.class.getName(); - V2Request req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("add", plugin)) - .build(); + V2Request req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload(singletonMap("add", plugin)) + .build(); req.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); PlacementPluginFactory factory = wrapper.getDelegate(); - assertTrue("wrong type " + factory.getClass().getName(), factory instanceof MinimizeCoresPlacementFactory); + assertTrue( + "wrong type " + factory.getClass().getName(), + factory instanceof MinimizeCoresPlacementFactory); // reconfigure plugin.klass = AffinityPlacementFactory.class.getName(); plugin.config = new AffinityPlacementConfig(1, 2); - req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("update", plugin)) - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload(singletonMap("update", plugin)) + .build(); req.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); factory = wrapper.getDelegate(); - assertTrue("wrong type " + factory.getClass().getName(), factory instanceof AffinityPlacementFactory); + assertTrue( + "wrong type " + factory.getClass().getName(), factory instanceof AffinityPlacementFactory); AffinityPlacementConfig config = ((AffinityPlacementFactory) factory).getConfig(); assertEquals("minimalFreeDiskGB", 1, config.minimalFreeDiskGB); assertEquals("prioritizedFreeDiskGB", 2, config.prioritizedFreeDiskGB); // change plugin config plugin.config = new AffinityPlacementConfig(3, 4); - req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("update", plugin)) - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload(singletonMap("update", plugin)) + .build(); req.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); factory = wrapper.getDelegate(); - assertTrue("wrong type " + factory.getClass().getName(), factory instanceof AffinityPlacementFactory); + assertTrue( + "wrong type " + factory.getClass().getName(), factory instanceof AffinityPlacementFactory); config = ((AffinityPlacementFactory) factory).getConfig(); assertEquals("minimalFreeDiskGB", 3, config.minimalFreeDiskGB); assertEquals("prioritizedFreeDiskGB", 4, config.prioritizedFreeDiskGB); // add plugin of the right type but with the wrong name plugin.name = "myPlugin"; - req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("add", plugin)) - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload(singletonMap("add", plugin)) + .build(); req.process(cluster.getSolrClient()); final int oldVersion = version; - expectThrows(TimeoutException.class, () -> phaser.awaitAdvanceInterruptibly(oldVersion, 5, TimeUnit.SECONDS)); + expectThrows( + TimeoutException.class, + () -> phaser.awaitAdvanceInterruptibly(oldVersion, 5, TimeUnit.SECONDS)); // remove plugin - req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload("{remove: '" + PlacementPluginFactory.PLUGIN_NAME + "'}") - .build(); + req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload("{remove: '" + PlacementPluginFactory.PLUGIN_NAME + "'}") + .build(); req.process(cluster.getSolrClient()); phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); factory = wrapper.getDelegate(); @@ -221,8 +235,11 @@ public void testDynamicReconfiguration() throws Exception { @Test public void testWithCollectionIntegration() throws Exception { - PlacementPluginFactory pluginFactory = cc.getPlacementPluginFactory(); - assertTrue("wrong type " + pluginFactory.getClass().getName(), pluginFactory instanceof DelegatingPlacementPluginFactory); + PlacementPluginFactory pluginFactory = + cc.getPlacementPluginFactory(); + assertTrue( + "wrong type " + pluginFactory.getClass().getName(), + pluginFactory instanceof DelegatingPlacementPluginFactory); DelegatingPlacementPluginFactory wrapper = (DelegatingPlacementPluginFactory) pluginFactory; Phaser phaser = new Phaser(); wrapper.setDelegationPhaser(phaser); @@ -241,52 +258,71 @@ public void testWithCollectionIntegration() throws Exception { PluginMeta plugin = new PluginMeta(); plugin.name = PlacementPluginFactory.PLUGIN_NAME; plugin.klass = AffinityPlacementFactory.class.getName(); - plugin.config = new AffinityPlacementConfig(1, 2, Map.of(COLLECTION, SECONDARY_COLLECTION), Map.of()); - V2Request req = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("add", plugin)) - .build(); + plugin.config = + new AffinityPlacementConfig(1, 2, Map.of(COLLECTION, SECONDARY_COLLECTION), Map.of()); + V2Request req = + new V2Request.Builder("/cluster/plugin") + .forceV2(true) + .POST() + .withPayload(singletonMap("add", plugin)) + .build(); req.process(cluster.getSolrClient()); phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - CollectionAdminResponse rsp = CollectionAdminRequest.createCollection(SECONDARY_COLLECTION, "conf", 1, 3) - .process(cluster.getSolrClient()); + CollectionAdminResponse rsp = + CollectionAdminRequest.createCollection(SECONDARY_COLLECTION, "conf", 1, 3) + .process(cluster.getSolrClient()); assertTrue(rsp.isSuccess()); cluster.waitForActiveCollection(SECONDARY_COLLECTION, 1, 3); - DocCollection secondary = cloudManager.getClusterStateProvider().getClusterState().getCollection(SECONDARY_COLLECTION); + DocCollection secondary = + cloudManager + .getClusterStateProvider() + .getClusterState() + .getCollection(SECONDARY_COLLECTION); Set secondaryNodes = new HashSet<>(); secondary.forEachReplica((shard, replica) -> secondaryNodes.add(replica.getNodeName())); - rsp = CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) - .setCreateNodeSet(String.join(",", nodeSet)) - .process(cluster.getSolrClient()); + rsp = + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) + .setCreateNodeSet(String.join(",", nodeSet)) + .process(cluster.getSolrClient()); assertTrue(rsp.isSuccess()); cluster.waitForActiveCollection(COLLECTION, 2, 4); // make sure the primary replicas were placed on the nodeset - DocCollection primary = cloudManager.getClusterStateProvider().getClusterState().getCollection(COLLECTION); - primary.forEachReplica((shard, replica) -> - assertTrue("primary replica not on secondary node!", nodeSet.contains(replica.getNodeName()))); + DocCollection primary = + cloudManager.getClusterStateProvider().getClusterState().getCollection(COLLECTION); + primary.forEachReplica( + (shard, replica) -> + assertTrue( + "primary replica not on secondary node!", nodeSet.contains(replica.getNodeName()))); // try deleting secondary replica from node without the primary replica - Optional onlySecondaryReplica = secondary.getReplicas().stream() - .filter(replica -> !nodeSet.contains(replica.getNodeName())) - .map(replica -> replica.getName()).findFirst(); + Optional onlySecondaryReplica = + secondary.getReplicas().stream() + .filter(replica -> !nodeSet.contains(replica.getNodeName())) + .map(replica -> replica.getName()) + .findFirst(); assertTrue("no secondary node without primary replica", onlySecondaryReplica.isPresent()); - rsp = CollectionAdminRequest.deleteReplica(SECONDARY_COLLECTION, "shard1", onlySecondaryReplica.get()) - .process(cluster.getSolrClient()); + rsp = + CollectionAdminRequest.deleteReplica( + SECONDARY_COLLECTION, "shard1", onlySecondaryReplica.get()) + .process(cluster.getSolrClient()); assertTrue("delete of a lone secondary replica should succeed", rsp.isSuccess()); // try deleting secondary replica from node WITH the primary replica - should fail - Optional secondaryWithPrimaryReplica = secondary.getReplicas().stream() - .filter(replica -> nodeSet.contains(replica.getNodeName())) - .map(replica -> replica.getName()).findFirst(); + Optional secondaryWithPrimaryReplica = + secondary.getReplicas().stream() + .filter(replica -> nodeSet.contains(replica.getNodeName())) + .map(replica -> replica.getName()) + .findFirst(); assertTrue("no secondary node with primary replica", secondaryWithPrimaryReplica.isPresent()); try { - rsp = CollectionAdminRequest.deleteReplica(SECONDARY_COLLECTION, "shard1", secondaryWithPrimaryReplica.get()) - .process(cluster.getSolrClient()); + rsp = + CollectionAdminRequest.deleteReplica( + SECONDARY_COLLECTION, "shard1", secondaryWithPrimaryReplica.get()) + .process(cluster.getSolrClient()); fail("should have failed: " + rsp); } catch (Exception e) { assertTrue(e.toString(), e.toString().contains("co-located with replicas")); @@ -294,8 +330,9 @@ public void testWithCollectionIntegration() throws Exception { // try deleting secondary collection try { - rsp = CollectionAdminRequest.deleteCollection(SECONDARY_COLLECTION) - .process(cluster.getSolrClient()); + rsp = + CollectionAdminRequest.deleteCollection(SECONDARY_COLLECTION) + .process(cluster.getSolrClient()); fail("should have failed: " + rsp); } catch (Exception e) { assertTrue(e.toString(), e.toString().contains("colocated collection")); @@ -313,8 +350,11 @@ public void testNodeTypeIntegration() throws Exception { String collectionName = "nodeTypeCollection"; - PlacementPluginFactory pluginFactory = cc.getPlacementPluginFactory(); - assertTrue("wrong type " + pluginFactory.getClass().getName(), pluginFactory instanceof DelegatingPlacementPluginFactory); + PlacementPluginFactory pluginFactory = + cc.getPlacementPluginFactory(); + assertTrue( + "wrong type " + pluginFactory.getClass().getName(), + pluginFactory instanceof DelegatingPlacementPluginFactory); DelegatingPlacementPluginFactory wrapper = (DelegatingPlacementPluginFactory) pluginFactory; Phaser phaser = new Phaser(); wrapper.setDelegationPhaser(phaser); @@ -325,7 +365,8 @@ public void testNodeTypeIntegration() throws Exception { plugin.name = PlacementPluginFactory.PLUGIN_NAME; plugin.klass = AffinityPlacementFactory.class.getName(); plugin.config = new AffinityPlacementConfig(1, 2, Map.of(), Map.of(collectionName, "type_0")); - V2Request req = new V2Request.Builder("/cluster/plugin") + V2Request req = + new V2Request.Builder("/cluster/plugin") .forceV2(true) .POST() .withPayload(singletonMap("add", plugin)) @@ -335,15 +376,18 @@ public void testNodeTypeIntegration() throws Exception { phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); try { - CollectionAdminResponse rsp = CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3) + CollectionAdminResponse rsp = + CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3) .process(cluster.getSolrClient()); fail("should have failed due to no nodes with the types: " + rsp); } catch (Exception e) { - assertTrue("should contain 'no nodes with types':" + e.toString(), - e.toString().contains("no nodes with types")); + assertTrue( + "should contain 'no nodes with types':" + e.toString(), + e.toString().contains("no nodes with types")); } System.setProperty(AffinityPlacementConfig.NODE_TYPE_SYSPROP, "type_0"); - CollectionAdminResponse rsp = CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3) + CollectionAdminResponse rsp = + CollectionAdminRequest.createCollection(collectionName, "conf", 1, 3) .process(cluster.getSolrClient()); System.clearProperty(AffinityPlacementConfig.NODE_TYPE_SYSPROP); @@ -351,8 +395,9 @@ public void testNodeTypeIntegration() throws Exception { @Test public void testAttributeFetcherImpl() throws Exception { - CollectionAdminResponse rsp = CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) - .process(cluster.getSolrClient()); + CollectionAdminResponse rsp = + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) + .process(cluster.getSolrClient()); assertTrue(rsp.isSuccess()); cluster.waitForActiveCollection(COLLECTION, 2, 4); Cluster cluster = new SimpleClusterAbstractionsImpl.ClusterImpl(cloudManager); @@ -370,14 +415,21 @@ public void testAttributeFetcherImpl() throws Exception { .requestNodeMetric(NodeMetricImpl.AVAILABLE_PROCESSORS) .requestNodeMetric(someMetricKey) .requestNodeSystemProperty(sysprop) - .requestCollectionMetrics(collection, Set.of(ReplicaMetricImpl.INDEX_SIZE_GB, ReplicaMetricImpl.QUERY_RATE_1MIN, ReplicaMetricImpl.UPDATE_RATE_1MIN)); + .requestCollectionMetrics( + collection, + Set.of( + ReplicaMetricImpl.INDEX_SIZE_GB, + ReplicaMetricImpl.QUERY_RATE_1MIN, + ReplicaMetricImpl.UPDATE_RATE_1MIN)); AttributeValues attributeValues = attributeFetcher.fetchAttributes(); String userName = System.getProperty("user.name"); // node metrics for (Node node : cluster.getLiveNodes()) { Optional doubleOpt = attributeValues.getNodeMetric(node, NodeMetricImpl.HEAP_USAGE); assertTrue("heap usage", doubleOpt.isPresent()); - assertTrue("heap usage should be 0 < heapUsage < 100 but was " + doubleOpt, doubleOpt.get() > 0 && doubleOpt.get() < 100); + assertTrue( + "heap usage should be 0 < heapUsage < 100 but was " + doubleOpt, + doubleOpt.get() > 0 && doubleOpt.get() < 100); doubleOpt = attributeValues.getNodeMetric(node, NodeMetricImpl.TOTAL_DISK_GB); assertTrue("total disk", doubleOpt.isPresent()); assertTrue("total disk should be > 0 but was " + doubleOpt, doubleOpt.get() > 0); @@ -387,8 +439,12 @@ public void testAttributeFetcherImpl() throws Exception { Optional intOpt = attributeValues.getNodeMetric(node, NodeMetricImpl.NUM_CORES); assertTrue("cores", intOpt.isPresent()); assertTrue("cores should be > 0", intOpt.get() > 0); - assertTrue("systemLoadAverage 2", attributeValues.getNodeMetric(node, NodeMetricImpl.SYSLOAD_AVG).isPresent()); - assertTrue("availableProcessors", attributeValues.getNodeMetric(node, NodeMetricImpl.AVAILABLE_PROCESSORS).isPresent()); + assertTrue( + "systemLoadAverage 2", + attributeValues.getNodeMetric(node, NodeMetricImpl.SYSLOAD_AVG).isPresent()); + assertTrue( + "availableProcessors", + attributeValues.getNodeMetric(node, NodeMetricImpl.AVAILABLE_PROCESSORS).isPresent()); Optional userNameOpt = attributeValues.getNodeMetric(node, someMetricKey); assertTrue("user.name", userNameOpt.isPresent()); assertEquals("userName", userName, userNameOpt.get()); @@ -398,22 +454,41 @@ public void testAttributeFetcherImpl() throws Exception { } assertTrue(attributeValues.getCollectionMetrics(COLLECTION).isPresent()); CollectionMetrics collectionMetrics = attributeValues.getCollectionMetrics(COLLECTION).get(); - collection.shards().forEach(shard -> { - Optional shardMetricsOpt = collectionMetrics.getShardMetrics(shard.getShardName()); - assertTrue("shard metrics", shardMetricsOpt.isPresent()); - shard.replicas().forEach(replica -> { - Optional replicaMetricsOpt = shardMetricsOpt.get().getReplicaMetrics(replica.getReplicaName()); - assertTrue("replica metrics", replicaMetricsOpt.isPresent()); - ReplicaMetrics replicaMetrics = replicaMetricsOpt.get(); - Optional indexSizeOpt = replicaMetrics.getReplicaMetric(ReplicaMetricImpl.INDEX_SIZE_GB); - assertTrue("indexSize", indexSizeOpt.isPresent()); - assertTrue("wrong type, expected Double but was " + indexSizeOpt.get().getClass(), indexSizeOpt.get() instanceof Double); - assertTrue("indexSize should be > 0 but was " + indexSizeOpt.get(), indexSizeOpt.get() > 0); - assertTrue("indexSize should be < 0.01 but was " + indexSizeOpt.get(), indexSizeOpt.get() < 0.01); - - assertNotNull("queryRate", replicaMetrics.getReplicaMetric(ReplicaMetricImpl.QUERY_RATE_1MIN)); - assertNotNull("updateRate", replicaMetrics.getReplicaMetric(ReplicaMetricImpl.UPDATE_RATE_1MIN)); - }); - }); + collection + .shards() + .forEach( + shard -> { + Optional shardMetricsOpt = + collectionMetrics.getShardMetrics(shard.getShardName()); + assertTrue("shard metrics", shardMetricsOpt.isPresent()); + shard + .replicas() + .forEach( + replica -> { + Optional replicaMetricsOpt = + shardMetricsOpt.get().getReplicaMetrics(replica.getReplicaName()); + assertTrue("replica metrics", replicaMetricsOpt.isPresent()); + ReplicaMetrics replicaMetrics = replicaMetricsOpt.get(); + Optional indexSizeOpt = + replicaMetrics.getReplicaMetric(ReplicaMetricImpl.INDEX_SIZE_GB); + assertTrue("indexSize", indexSizeOpt.isPresent()); + assertTrue( + "wrong type, expected Double but was " + indexSizeOpt.get().getClass(), + indexSizeOpt.get() instanceof Double); + assertTrue( + "indexSize should be > 0 but was " + indexSizeOpt.get(), + indexSizeOpt.get() > 0); + assertTrue( + "indexSize should be < 0.01 but was " + indexSizeOpt.get(), + indexSizeOpt.get() < 0.01); + + assertNotNull( + "queryRate", + replicaMetrics.getReplicaMetric(ReplicaMetricImpl.QUERY_RATE_1MIN)); + assertNotNull( + "updateRate", + replicaMetrics.getReplicaMetric(ReplicaMetricImpl.UPDATE_RATE_1MIN)); + }); + }); } } diff --git a/solr/core/src/test/org/apache/solr/cluster/placement/impl/SimpleClusterAbstractionsTest.java b/solr/core/src/test/org/apache/solr/cluster/placement/impl/SimpleClusterAbstractionsTest.java index 5e5877922dc..5d2e5228d60 100644 --- a/solr/core/src/test/org/apache/solr/cluster/placement/impl/SimpleClusterAbstractionsTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/placement/impl/SimpleClusterAbstractionsTest.java @@ -17,6 +17,8 @@ package org.apache.solr.cluster.placement.impl; +import java.util.Locale; +import java.util.Set; import org.apache.solr.client.solrj.cloud.SolrCloudManager; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; @@ -31,24 +33,19 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.Locale; -import java.util.Set; - -/** - * - */ +/** */ public class SimpleClusterAbstractionsTest extends SolrCloudTestCase { - private static final String COLLECTION = SimpleClusterAbstractionsTest.class.getName() + "_collection"; + private static final String COLLECTION = + SimpleClusterAbstractionsTest.class.getName() + "_collection"; private static SolrCloudManager cloudManager; @BeforeClass public static void setupCluster() throws Exception { - configureCluster(3) - .addConfig("conf", configset("cloud-minimal")) - .configure(); - cloudManager = cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getSolrCloudManager(); + configureCluster(3).addConfig("conf", configset("cloud-minimal")).configure(); + cloudManager = + cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getSolrCloudManager(); CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2) .process(cluster.getSolrClient()); } @@ -76,14 +73,19 @@ public void testBasic() throws Exception { assertNotNull("no leader in shard " + shard, shard.getLeader()); Replica replica = shard.getLeader(); assertEquals(slice.getLeader().getName(), replica.getReplicaName()); - slice.getReplicas().forEach(sreplica -> { - Replica r = shard.getReplica(sreplica.getName()); - assertNotNull("missing replica " + sreplica.getName(), r); - assertEquals(r.getCoreName(), sreplica.getCoreName()); - assertEquals(r.getNode().getName(), sreplica.getNodeName()); - assertEquals(r.getState().toString().toLowerCase(Locale.ROOT), sreplica.getState().toString()); - assertEquals(r.getType().toString(), sreplica.getType().toString()); - }); + slice + .getReplicas() + .forEach( + sreplica -> { + Replica r = shard.getReplica(sreplica.getName()); + assertNotNull("missing replica " + sreplica.getName(), r); + assertEquals(r.getCoreName(), sreplica.getCoreName()); + assertEquals(r.getNode().getName(), sreplica.getNodeName()); + assertEquals( + r.getState().toString().toLowerCase(Locale.ROOT), + sreplica.getState().toString()); + assertEquals(r.getType().toString(), sreplica.getType().toString()); + }); } } } diff --git a/solr/core/src/test/org/apache/solr/cluster/placement/plugins/AffinityPlacementFactoryTest.java b/solr/core/src/test/org/apache/solr/cluster/placement/plugins/AffinityPlacementFactoryTest.java index 42e60130e08..37e4cf10365 100644 --- a/solr/core/src/test/org/apache/solr/cluster/placement/plugins/AffinityPlacementFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/cluster/placement/plugins/AffinityPlacementFactoryTest.java @@ -17,6 +17,12 @@ package org.apache.solr.cluster.placement.plugins; +import java.lang.invoke.MethodHandles; +import java.util.*; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cluster.Cluster; import org.apache.solr.cluster.Node; @@ -34,29 +40,19 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.*; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; - -/** - * Unit test for {@link AffinityPlacementFactory} - */ +/** Unit test for {@link AffinityPlacementFactory} */ public class AffinityPlacementFactoryTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private PlacementPlugin plugin; - private final static long MINIMAL_FREE_DISK_GB = 10L; - private final static long PRIORITIZED_FREE_DISK_GB = 50L; - private final static String secondaryCollectionName = "withCollection_secondary"; - private final static String primaryCollectionName = "withCollection_primary"; + private static final long MINIMAL_FREE_DISK_GB = 10L; + private static final long PRIORITIZED_FREE_DISK_GB = 50L; + private static final String secondaryCollectionName = "withCollection_secondary"; + private static final String primaryCollectionName = "withCollection_primary"; - static AffinityPlacementConfig defaultConfig = new AffinityPlacementConfig( - MINIMAL_FREE_DISK_GB, - PRIORITIZED_FREE_DISK_GB); + static AffinityPlacementConfig defaultConfig = + new AffinityPlacementConfig(MINIMAL_FREE_DISK_GB, PRIORITIZED_FREE_DISK_GB); @Before public void setupPlugin() { @@ -80,17 +76,20 @@ public void testBasicPlacementExistingCollection() throws Exception { } /** - * When this test places a replica for a new collection, it should pick the node with less cores.

+ * When this test places a replica for a new collection, it should pick the node with less cores. + * *

- * When it places a replica for an existing collection, it should pick the node with less cores that doesn't already have a replica for the shard. + * + *

When it places a replica for an existing collection, it should pick the node with less cores + * that doesn't already have a replica for the shard. */ private void testBasicPlacementInternal(boolean hasExistingCollection) throws Exception { String collectionName = "basicCollection"; Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(2); LinkedList nodeBuilders = clusterBuilder.getLiveNodeBuilders(); - nodeBuilders.get(0).setCoreCount(1).setFreeDiskGB((double)(PRIORITIZED_FREE_DISK_GB + 1)); - nodeBuilders.get(1).setCoreCount(10).setFreeDiskGB((double)(PRIORITIZED_FREE_DISK_GB + 1)); + nodeBuilders.get(0).setCoreCount(1).setFreeDiskGB((double) (PRIORITIZED_FREE_DISK_GB + 1)); + nodeBuilders.get(1).setCoreCount(10).setFreeDiskGB((double) (PRIORITIZED_FREE_DISK_GB + 1)); Builders.CollectionBuilder collectionBuilder = Builders.newCollectionBuilder(collectionName); @@ -99,7 +98,8 @@ private void testBasicPlacementInternal(boolean hasExistingCollection) throws Ex collectionBuilder.initializeShardsReplicas(1, 1, 0, 0, nodeBuilders); clusterBuilder.addCollection(collectionBuilder); } else { - // New collection to create has the shards defined but no replicas and is not present in cluster state + // New collection to create has the shards defined but no replicas and is not present in + // cluster state collectionBuilder.initializeShardsReplicas(1, 0, 0, 0, List.of()); } @@ -109,9 +109,14 @@ private void testBasicPlacementInternal(boolean hasExistingCollection) throws Ex List liveNodes = clusterBuilder.buildLiveNodes(); // Place a new replica for the (only) existing shard of the collection - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, - Set.of(solrCollection.shards().iterator().next().getShardName()), new HashSet<>(liveNodes), - 1, 0, 0); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, + Set.of(solrCollection.shards().iterator().next().getShardName()), + new HashSet<>(liveNodes), + 1, + 0, + 0); PlacementPlan pp = plugin.computePlacement(placementRequest, placementContext); @@ -120,9 +125,7 @@ private void testBasicPlacementInternal(boolean hasExistingCollection) throws Ex assertEquals(hasExistingCollection ? liveNodes.get(1) : liveNodes.get(0), rp.getNode()); } - /** - * Test not placing replicas on nodes low free disk unless no other option - */ + /** Test not placing replicas on nodes low free disk unless no other option */ @Test public void testLowSpaceNode() throws Exception { String collectionName = "lowSpaceCollection"; @@ -135,11 +138,14 @@ public void testLowSpaceNode() throws Exception { LinkedList nodeBuilders = clusterBuilder.getLiveNodeBuilders(); for (int i = 0; i < nodeBuilders.size(); i++) { if (i == LOW_SPACE_NODE_INDEX) { - nodeBuilders.get(i).setCoreCount(1).setFreeDiskGB((double)(MINIMAL_FREE_DISK_GB + 1)); // Low space + nodeBuilders + .get(i) + .setCoreCount(1) + .setFreeDiskGB((double) (MINIMAL_FREE_DISK_GB + 1)); // Low space } else if (i == NO_SPACE_NODE_INDEX) { nodeBuilders.get(i).setCoreCount(10).setFreeDiskGB(1.0); // Really not enough space } else { - nodeBuilders.get(i).setCoreCount(10).setFreeDiskGB((double)(PRIORITIZED_FREE_DISK_GB + 1)); + nodeBuilders.get(i).setCoreCount(10).setFreeDiskGB((double) (PRIORITIZED_FREE_DISK_GB + 1)); } } List liveNodes = clusterBuilder.buildLiveNodes(); @@ -150,35 +156,53 @@ public void testLowSpaceNode() throws Exception { SolrCollection solrCollection = collectionBuilder.build(); // Place two replicas of each type for each shard - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), - 2, 2, 2); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), 2, 2, 2); - PlacementPlan pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); + PlacementPlan pp = + plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); assertEquals(18, pp.getReplicaPlacements().size()); // 3 shards, 6 replicas total each Set> placements = new HashSet<>(); for (ReplicaPlacement rp : pp.getReplicaPlacements()) { - assertTrue("two replicas for same shard placed on same node", placements.add(new Pair<>(rp.getShardName(), rp.getNode()))); - assertNotEquals("Replica unnecessarily placed on node with low free space", rp.getNode(), liveNodes.get(LOW_SPACE_NODE_INDEX)); - assertNotEquals("Replica placed on node with not enough free space", rp.getNode(), liveNodes.get(NO_SPACE_NODE_INDEX)); + assertTrue( + "two replicas for same shard placed on same node", + placements.add(new Pair<>(rp.getShardName(), rp.getNode()))); + assertNotEquals( + "Replica unnecessarily placed on node with low free space", + rp.getNode(), + liveNodes.get(LOW_SPACE_NODE_INDEX)); + assertNotEquals( + "Replica placed on node with not enough free space", + rp.getNode(), + liveNodes.get(NO_SPACE_NODE_INDEX)); } // Verify that if we ask for 7 replicas, the placement will use the low free space node - placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), - 7, 0, 0); + placementRequest = + new PlacementRequestImpl( + solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), 7, 0, 0); pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); assertEquals(21, pp.getReplicaPlacements().size()); // 3 shards, 7 replicas each placements = new HashSet<>(); for (ReplicaPlacement rp : pp.getReplicaPlacements()) { - assertEquals("Only NRT replicas should be created", Replica.ReplicaType.NRT, rp.getReplicaType()); - assertTrue("two replicas for same shard placed on same node", placements.add(new Pair<>(rp.getShardName(), rp.getNode()))); - assertNotEquals("Replica placed on node with not enough free space", rp.getNode(), liveNodes.get(NO_SPACE_NODE_INDEX)); + assertEquals( + "Only NRT replicas should be created", Replica.ReplicaType.NRT, rp.getReplicaType()); + assertTrue( + "two replicas for same shard placed on same node", + placements.add(new Pair<>(rp.getShardName(), rp.getNode()))); + assertNotEquals( + "Replica placed on node with not enough free space", + rp.getNode(), + liveNodes.get(NO_SPACE_NODE_INDEX)); } // Verify that if we ask for 8 replicas, the placement fails try { - placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), - 8, 0, 0); + placementRequest = + new PlacementRequestImpl( + solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), 8, 0, 0); plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); fail("Placing 8 replicas should not be possible given only 7 nodes have enough space"); } catch (PlacementException e) { @@ -187,8 +211,8 @@ public void testLowSpaceNode() throws Exception { } /** - * Tests that existing collection replicas are taken into account when preventing more than one replica per shard to be - * placed on any node. + * Tests that existing collection replicas are taken into account when preventing more than one + * replica per shard to be placed on any node. */ @Test public void testPlacementWithExistingReplicas() throws Exception { @@ -199,40 +223,44 @@ public void testPlacementWithExistingReplicas() throws Exception { LinkedList nodeBuilders = clusterBuilder.getLiveNodeBuilders(); int coresOnNode = 10; for (Builders.NodeBuilder nodeBuilder : nodeBuilders) { - nodeBuilder.setCoreCount(coresOnNode).setFreeDiskGB((double)(PRIORITIZED_FREE_DISK_GB + 1)); + nodeBuilder.setCoreCount(coresOnNode).setFreeDiskGB((double) (PRIORITIZED_FREE_DISK_GB + 1)); coresOnNode += 10; } // The collection already exists with shards and replicas Builders.CollectionBuilder collectionBuilder = Builders.newCollectionBuilder(collectionName); - // Note that the collection as defined below is in a state that would NOT be returned by the placement plugin: - // shard 1 has two replicas on node 0. - // The plugin should still be able to place additional replicas as long as they don't break the rules. - List> shardsReplicas = List.of( - List.of("NRT 0", "TLOG 0", "NRT 3"), // shard 1 - List.of("NRT 1", "NRT 3", "TLOG 2")); // shard 2 + // Note that the collection as defined below is in a state that would NOT be returned by the + // placement plugin: shard 1 has two replicas on node 0. The plugin should still be able to + // place additional replicas as long as they don't break the rules. + List> shardsReplicas = + List.of( + List.of("NRT 0", "TLOG 0", "NRT 3"), // shard 1 + List.of("NRT 1", "NRT 3", "TLOG 2")); // shard 2 collectionBuilder.customCollectionSetup(shardsReplicas, nodeBuilders); SolrCollection solrCollection = collectionBuilder.build(); List liveNodes = clusterBuilder.buildLiveNodes(); // Place an additional NRT and an additional TLOG replica for each shard - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), - 1, 1, 0); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), 1, 1, 0); - // The replicas must be placed on the most appropriate nodes, i.e. those that do not already have a replica for the - // shard and then on the node with the lowest number of cores. - // NRT are placed first and given the cluster state here the placement is deterministic (easier to test, only one good placement). - PlacementPlan pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); + // The replicas must be placed on the most appropriate nodes, i.e. those that do not already + // have a replica for the shard and then on the node with the lowest number of cores. NRT are + // placed first and given the cluster state here the placement is deterministic (easier to test, + // only one good placement). + PlacementPlan pp = + plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); // Each expected placement is represented as a string "shard replica-type node" Set expectedPlacements = Set.of("1 NRT 1", "1 TLOG 2", "2 NRT 0", "2 TLOG 4"); verifyPlacements(expectedPlacements, pp, collectionBuilder.getShardBuilders(), liveNodes); } - /** - * Tests placement with multiple criteria: Replica type restricted nodes, Availability zones + existing collection + * Tests placement with multiple criteria: Replica type restricted nodes, Availability zones + + * existing collection */ @Test public void testPlacementMultiCriteria() throws Exception { @@ -302,7 +330,9 @@ public void testPlacementMultiCriteria() throws Exception { acceptedReplicaType = NRT_REPLICA_TYPE; } - nodeBuilders.get(i).setSysprop(AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP, az) + nodeBuilders + .get(i) + .setSysprop(AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP, az) .setSysprop(AffinityPlacementConfig.REPLICA_TYPE_SYSPROP, acceptedReplicaType) .setCoreCount(numcores) .setFreeDiskGB(freedisk); @@ -310,36 +340,53 @@ public void testPlacementMultiCriteria() throws Exception { // The collection already exists with shards and replicas. Builders.CollectionBuilder collectionBuilder = Builders.newCollectionBuilder(collectionName); - List> shardsReplicas = List.of( - List.of("NRT " + AZ1_NRT_HIGHCORES, "TLOG " + AZ3_TLOGPULL), // shard 1 - List.of("TLOG " + AZ2_TLOGPULL)); // shard 2 + List> shardsReplicas = + List.of( + List.of("NRT " + AZ1_NRT_HIGHCORES, "TLOG " + AZ3_TLOGPULL), // shard 1 + List.of("TLOG " + AZ2_TLOGPULL)); // shard 2 collectionBuilder.customCollectionSetup(shardsReplicas, nodeBuilders); SolrCollection solrCollection = collectionBuilder.build(); List liveNodes = clusterBuilder.buildLiveNodes(); // Add 2 NRT and one TLOG to each shard. - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), - 2, 1, 0); - PlacementPlan pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); - // Shard 1: The NRT's should go to the med cores node on AZ2 and low core on az3 (even though - // a low core node can take the replica in az1, there's already an NRT replica there and we want spreading across AZ's), - // the TLOG to the TLOG node on AZ2 (because the tlog node on AZ1 has low free disk) - // Shard 2: The NRT's should go to AZ1 and AZ3 lowcores because AZ2 has more cores (and there's not NRT in any AZ for - // this shard). The TLOG should go to AZ3 because AZ1 TLOG node has low free disk. - // Each expected placement is represented as a string "shard replica-type node" - Set expectedPlacements = Set.of("1 NRT " + AZ2_NRT_MEDCORES, "1 NRT " + AZ3_NRT_LOWCORES, "1 TLOG " + AZ2_TLOGPULL, - "2 NRT " + AZ1_NRT_LOWCORES, "2 NRT " + AZ3_NRT_LOWCORES, "2 TLOG " + AZ3_TLOGPULL); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), 2, 1, 0); + PlacementPlan pp = + plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); + // Shard 1: The NRT's should go to the med cores node on AZ2 and low core on az3 (even though a + // low core node can take the replica in az1, there's already an NRT replica there and we want + // spreading across AZ's), the TLOG to the TLOG node on AZ2 (because the tlog node on AZ1 has + // low free disk) + // Shard 2: The NRT's should go to AZ1 and AZ3 lowcores because AZ2 has more cores (and there's + // not NRT in any AZ for this shard). The TLOG should go to AZ3 because AZ1 TLOG node has low + // free disk. Each expected placement is represented as a string "shard replica-type node" + Set expectedPlacements = + Set.of( + "1 NRT " + AZ2_NRT_MEDCORES, + "1 NRT " + AZ3_NRT_LOWCORES, + "1 TLOG " + AZ2_TLOGPULL, + "2 NRT " + AZ1_NRT_LOWCORES, + "2 NRT " + AZ3_NRT_LOWCORES, + "2 TLOG " + AZ3_TLOGPULL); verifyPlacements(expectedPlacements, pp, collectionBuilder.getShardBuilders(), liveNodes); // If we add instead 2 PULL replicas to each shard - placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), - 0, 0, 2); + placementRequest = + new PlacementRequestImpl( + solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), 0, 0, 2); pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); - // Shard 1: Given node AZ3_TLOGPULL is taken by the TLOG replica, the PULL should go to AZ1_TLOGPULL_LOWFREEDISK and AZ2_TLOGPULL - // Shard 2: Similarly AZ2_TLOGPULL is taken. Replicas should go to AZ1_TLOGPULL_LOWFREEDISK and AZ3_TLOGPULL - expectedPlacements = Set.of("1 PULL " + AZ1_TLOGPULL_LOWFREEDISK, "1 PULL " + AZ2_TLOGPULL, - "2 PULL " + AZ1_TLOGPULL_LOWFREEDISK, "2 PULL " + AZ3_TLOGPULL); + // Shard 1: Given node AZ3_TLOGPULL is taken by the TLOG replica, the PULL should go to + // AZ1_TLOGPULL_LOWFREEDISK and AZ2_TLOGPULL + // Shard 2: Similarly AZ2_TLOGPULL is taken. Replicas should go to AZ1_TLOGPULL_LOWFREEDISK and + // AZ3_TLOGPULL + expectedPlacements = + Set.of( + "1 PULL " + AZ1_TLOGPULL_LOWFREEDISK, + "1 PULL " + AZ2_TLOGPULL, + "2 PULL " + AZ1_TLOGPULL_LOWFREEDISK, + "2 PULL " + AZ3_TLOGPULL); verifyPlacements(expectedPlacements, pp, collectionBuilder.getShardBuilders(), liveNodes); } @@ -354,9 +401,11 @@ public void testPlacementAzsCores() throws Exception { Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(9); LinkedList nodeBuilders = clusterBuilder.getLiveNodeBuilders(); for (int i = 0; i < 9; i++) { - nodeBuilders.get(i).setSysprop(AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP, "AZ" + (i / 3)) + nodeBuilders + .get(i) + .setSysprop(AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP, "AZ" + (i / 3)) .setCoreCount(i) - .setFreeDiskGB((double)(PRIORITIZED_FREE_DISK_GB + 10)); + .setFreeDiskGB((double) (PRIORITIZED_FREE_DISK_GB + 10)); } // The collection does not exist, has 1 shard. @@ -368,27 +417,41 @@ public void testPlacementAzsCores() throws Exception { List liveNodes = clusterBuilder.buildLiveNodes(); // Test placing between 1 and 9 NRT replicas. check that it's done in order - List> placements = List.of( - Set.of("1 NRT 0"), - Set.of("1 NRT 0", "1 NRT 3"), - Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6"), - Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1"), - Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4"), - Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7"), - Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7", "1 NRT 2"), - Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7", "1 NRT 2", "1 NRT 5"), - Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7", "1 NRT 2", "1 NRT 5", "1 NRT 8")); + List> placements = + List.of( + Set.of("1 NRT 0"), + Set.of("1 NRT 0", "1 NRT 3"), + Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6"), + Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1"), + Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4"), + Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7"), + Set.of("1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7", "1 NRT 2"), + Set.of( + "1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7", "1 NRT 2", + "1 NRT 5"), + Set.of( + "1 NRT 0", "1 NRT 3", "1 NRT 6", "1 NRT 1", "1 NRT 4", "1 NRT 7", "1 NRT 2", + "1 NRT 5", "1 NRT 8")); for (int countNrtToPlace = 1; countNrtToPlace <= 9; countNrtToPlace++) { - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), new HashSet<>(liveNodes), - countNrtToPlace, 0, 0); - PlacementPlan pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); - verifyPlacements(placements.get(countNrtToPlace - 1), pp, collectionBuilder.getShardBuilders(), liveNodes); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, + solrCollection.getShardNames(), + new HashSet<>(liveNodes), + countNrtToPlace, + 0, + 0); + PlacementPlan pp = + plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); + verifyPlacements( + placements.get(countNrtToPlace - 1), pp, collectionBuilder.getShardBuilders(), liveNodes); } } /** - * Tests that if a collection has replicas on nodes not currently live, placement for new replicas works ok. + * Tests that if a collection has replicas on nodes not currently live, placement for new replicas + * works ok. */ @Test public void testCollectionOnDeadNodes() throws Exception { @@ -399,38 +462,48 @@ public void testCollectionOnDeadNodes() throws Exception { LinkedList nodeBuilders = clusterBuilder.getLiveNodeBuilders(); int coreCount = 0; for (Builders.NodeBuilder nodeBuilder : nodeBuilders) { - nodeBuilder.setCoreCount(coreCount++).setFreeDiskGB((double)(PRIORITIZED_FREE_DISK_GB + 1)); + nodeBuilder.setCoreCount(coreCount++).setFreeDiskGB((double) (PRIORITIZED_FREE_DISK_GB + 1)); } // The collection already exists with shards and replicas Builders.CollectionBuilder collectionBuilder = Builders.newCollectionBuilder(collectionName); - // The collection below has shard 1 having replicas only on dead nodes and shard 2 no replicas at all... (which is - // likely a challenging condition to recover from, but the placement computations should still execute happily). - List> shardsReplicas = List.of( - List.of("NRT 10", "TLOG 11"), // shard 1 - List.of()); // shard 2 + // The collection below has shard 1 having replicas only on dead nodes and shard 2 no replicas + // at all... (which is likely a challenging condition to recover from, but the placement + // computations should still execute happily). + List> shardsReplicas = + List.of( + List.of("NRT 10", "TLOG 11"), // shard 1 + List.of()); // shard 2 collectionBuilder.customCollectionSetup(shardsReplicas, nodeBuilders); SolrCollection solrCollection = collectionBuilder.build(); List liveNodes = clusterBuilder.buildLiveNodes(); // Place an additional PULL replica for shard 1 - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, Set.of(solrCollection.iterator().next().getShardName()), new HashSet<>(liveNodes), - 0, 0, 1); - - PlacementPlan pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, + Set.of(solrCollection.iterator().next().getShardName()), + new HashSet<>(liveNodes), + 0, + 0, + 1); + + PlacementPlan pp = + plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); // Each expected placement is represented as a string "shard replica-type node" // Node 0 has less cores than node 1 (0 vs 1) so the placement should go there. Set expectedPlacements = Set.of("1 PULL 0"); verifyPlacements(expectedPlacements, pp, collectionBuilder.getShardBuilders(), liveNodes); - // If we placed instead a replica for shard 2 (starting with the same initial cluster state, not including the first - // placement above), it should go too to node 0 since it has less cores... + // If we placed instead a replica for shard 2 (starting with the same initial cluster state, not + // including the first placement above), it should go too to node 0 since it has less cores... Iterator it = solrCollection.iterator(); it.next(); // skip first shard to do placement for the second one... - placementRequest = new PlacementRequestImpl(solrCollection, Set.of(it.next().getShardName()), new HashSet<>(liveNodes), - 0, 0, 1); + placementRequest = + new PlacementRequestImpl( + solrCollection, Set.of(it.next().getShardName()), new HashSet<>(liveNodes), 0, 0, 1); pp = plugin.computePlacement(placementRequest, clusterBuilder.buildPlacementContext()); expectedPlacements = Set.of("2 PULL 0"); verifyPlacements(expectedPlacements, pp, collectionBuilder.getShardBuilders(), liveNodes); @@ -438,15 +511,23 @@ public void testCollectionOnDeadNodes() throws Exception { /** * Verifies that a computed set of placements does match the expected placement on nodes. - * @param expectedPlacements a set of strings of the form {@code "1 NRT 3"} where 1 would be the shard index, NRT the - * replica type and 3 the node on which the replica is placed. Shards are 1-based. Nodes 0-based.

- * Read carefully: shard index and not shard name. Index in the order of shards as defined - * for the collection in the call to {@link org.apache.solr.cluster.placement.Builders.CollectionBuilder#customCollectionSetup(List, List)} - * @param shardBuilders the shard builders are passed here to get the shard names by index (1-based) rather than by - * parsing the shard names (which would break if we change the shard naming scheme). + * + * @param expectedPlacements a set of strings of the form {@code "1 NRT 3"} where 1 would be the + * shard index, NRT the replica type and 3 the node on which the replica is placed. Shards are + * 1-based. Nodes 0-based. + *

Read carefully: shard index and not shard name. Index in the order of + * shards as defined for the collection in the call to {@link + * org.apache.solr.cluster.placement.Builders.CollectionBuilder#customCollectionSetup(List, + * List)} + * @param shardBuilders the shard builders are passed here to get the shard names by index + * (1-based) rather than by parsing the shard names (which would break if we change the shard + * naming scheme). */ - private static void verifyPlacements(Set expectedPlacements, PlacementPlan placementPlan, - List shardBuilders, List liveNodes) { + private static void verifyPlacements( + Set expectedPlacements, + PlacementPlan placementPlan, + List shardBuilders, + List liveNodes) { Set computedPlacements = placementPlan.getReplicaPlacements(); // Prepare structures for looking up shard name index and node index @@ -462,22 +543,40 @@ private static void verifyPlacements(Set expectedPlacements, PlacementPl } if (expectedPlacements.size() != computedPlacements.size()) { - fail("Wrong number of placements, expected " + expectedPlacements.size() + " computed " + computedPlacements.size() + ". " + - getExpectedVsComputedPlacement(expectedPlacements, computedPlacements, shardNumbering, nodeNumbering)); + fail( + "Wrong number of placements, expected " + + expectedPlacements.size() + + " computed " + + computedPlacements.size() + + ". " + + getExpectedVsComputedPlacement( + expectedPlacements, computedPlacements, shardNumbering, nodeNumbering)); } Set expected = new HashSet<>(expectedPlacements); for (ReplicaPlacement p : computedPlacements) { - String lookUpPlacementResult = shardNumbering.get(p.getShardName()) + " " + p.getReplicaType().name() + " " + nodeNumbering.get(p.getNode()); + String lookUpPlacementResult = + shardNumbering.get(p.getShardName()) + + " " + + p.getReplicaType().name() + + " " + + nodeNumbering.get(p.getNode()); if (!expected.remove(lookUpPlacementResult)) { - fail("Computed placement [" + lookUpPlacementResult + "] not expected. " + - getExpectedVsComputedPlacement(expectedPlacements, computedPlacements, shardNumbering, nodeNumbering)); + fail( + "Computed placement [" + + lookUpPlacementResult + + "] not expected. " + + getExpectedVsComputedPlacement( + expectedPlacements, computedPlacements, shardNumbering, nodeNumbering)); } } } - private static String getExpectedVsComputedPlacement(Set expectedPlacements, Set computedPlacements, - Map shardNumbering, Map nodeNumbering) { + private static String getExpectedVsComputedPlacement( + Set expectedPlacements, + Set computedPlacements, + Map shardNumbering, + Map nodeNumbering) { StringBuilder sb = new StringBuilder("Expected placement: "); for (String placement : expectedPlacements) { @@ -486,7 +585,12 @@ private static String getExpectedVsComputedPlacement(Set expectedPlaceme sb.append("Computed placement: "); for (ReplicaPlacement placement : computedPlacements) { - String lookUpPlacementResult = shardNumbering.get(placement.getShardName()) + " " + placement.getReplicaType().name() + " " + nodeNumbering.get(placement.getNode()); + String lookUpPlacementResult = + shardNumbering.get(placement.getShardName()) + + " " + + placement.getReplicaType().name() + + " " + + nodeNumbering.get(placement.getNode()); sb.append("[").append(lookUpPlacementResult).append("] "); } @@ -498,7 +602,8 @@ private static String getExpectedVsComputedPlacement(Set expectedPlaceme public void testAvailabilityZones() throws Exception { String collectionName = "azCollection"; int NUM_NODES = 6; - Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); + Builders.ClusterBuilder clusterBuilder = + Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); for (int i = 0; i < NUM_NODES; i++) { Builders.NodeBuilder nodeBuilder = clusterBuilder.getLiveNodeBuilders().get(i); nodeBuilder.setCoreCount(0); @@ -519,10 +624,16 @@ public void testAvailabilityZones() throws Exception { SolrCollection solrCollection = cluster.getCollection(collectionName); - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, - StreamSupport.stream(solrCollection.shards().spliterator(), false) - .map(Shard::getShardName).collect(Collectors.toSet()), - cluster.getLiveNodes(), 2, 2, 2); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, + StreamSupport.stream(solrCollection.shards().spliterator(), false) + .map(Shard::getShardName) + .collect(Collectors.toSet()), + cluster.getLiveNodes(), + 2, + 2, + 2); PlacementPlan pp = plugin.computePlacement(placementRequest, placementContext); // 2 shards, 6 replicas @@ -531,31 +642,40 @@ public void testAvailabilityZones() throws Exception { Map>> replicas = new HashMap<>(); AttributeValues attributeValues = placementContext.getAttributeFetcher().fetchAttributes(); for (ReplicaPlacement rp : pp.getReplicaPlacements()) { - Optional azOptional = attributeValues.getSystemProperty(rp.getNode(), AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP); + Optional azOptional = + attributeValues.getSystemProperty( + rp.getNode(), AffinityPlacementConfig.AVAILABILITY_ZONE_SYSPROP); if (!azOptional.isPresent()) { fail("missing AZ sysprop for node " + rp.getNode()); } String az = azOptional.get(); - replicas.computeIfAbsent(rp.getReplicaType(), type -> new HashMap<>()) + replicas + .computeIfAbsent(rp.getReplicaType(), type -> new HashMap<>()) .computeIfAbsent(rp.getShardName(), shard -> new HashMap<>()) - .computeIfAbsent(az, zone -> new AtomicInteger()).incrementAndGet(); - } - replicas.forEach((type, perTypeReplicas) -> { - perTypeReplicas.forEach((shard, azCounts) -> { - assertEquals("number of AZs", 2, azCounts.size()); - azCounts.forEach((az, count) -> { - assertTrue("too few replicas shard=" + shard + ", type=" + type + ", az=" + az, - count.get() >= 1); + .computeIfAbsent(az, zone -> new AtomicInteger()) + .incrementAndGet(); + } + replicas.forEach( + (type, perTypeReplicas) -> { + perTypeReplicas.forEach( + (shard, azCounts) -> { + assertEquals("number of AZs", 2, azCounts.size()); + azCounts.forEach( + (az, count) -> { + assertTrue( + "too few replicas shard=" + shard + ", type=" + type + ", az=" + az, + count.get() >= 1); + }); + }); }); - }); - }); } @Test public void testReplicaType() throws Exception { String collectionName = "replicaTypeCollection"; int NUM_NODES = 6; - Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); + Builders.ClusterBuilder clusterBuilder = + Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); for (int i = 0; i < NUM_NODES; i++) { Builders.NodeBuilder nodeBuilder = clusterBuilder.getLiveNodeBuilders().get(i); nodeBuilder.setCoreCount(0); @@ -578,10 +698,16 @@ public void testReplicaType() throws Exception { SolrCollection solrCollection = cluster.getCollection(collectionName); - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, - StreamSupport.stream(solrCollection.shards().spliterator(), false) - .map(Shard::getShardName).collect(Collectors.toSet()), - cluster.getLiveNodes(), 2, 2, 2); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, + StreamSupport.stream(solrCollection.shards().spliterator(), false) + .map(Shard::getShardName) + .collect(Collectors.toSet()), + cluster.getLiveNodes(), + 2, + 2, + 2); PlacementPlan pp = plugin.computePlacement(placementRequest, placementContext); // 2 shards, 6 replicas @@ -596,32 +722,41 @@ public void testReplicaType() throws Exception { } String group = groupOptional.get(); if (group.equals("one")) { - assertTrue("wrong replica type in group one", - (rp.getReplicaType() == Replica.ReplicaType.NRT) || rp.getReplicaType() == Replica.ReplicaType.TLOG); + assertTrue( + "wrong replica type in group one", + (rp.getReplicaType() == Replica.ReplicaType.NRT) + || rp.getReplicaType() == Replica.ReplicaType.TLOG); } else { - assertEquals("wrong replica type in group two", Replica.ReplicaType.PULL, rp.getReplicaType()); + assertEquals( + "wrong replica type in group two", Replica.ReplicaType.PULL, rp.getReplicaType()); } - replicas.computeIfAbsent(rp.getReplicaType(), type -> new HashMap<>()) + replicas + .computeIfAbsent(rp.getReplicaType(), type -> new HashMap<>()) .computeIfAbsent(rp.getShardName(), shard -> new HashMap<>()) - .computeIfAbsent(group, g -> new AtomicInteger()).incrementAndGet(); - } - replicas.forEach((type, perTypeReplicas) -> { - perTypeReplicas.forEach((shard, groupCounts) -> { - assertEquals("number of groups", 1, groupCounts.size()); - groupCounts.forEach((group, count) -> { - assertTrue("too few replicas shard=" + shard + ", type=" + type + ", group=" + group, - count.get() >= 1); + .computeIfAbsent(group, g -> new AtomicInteger()) + .incrementAndGet(); + } + replicas.forEach( + (type, perTypeReplicas) -> { + perTypeReplicas.forEach( + (shard, groupCounts) -> { + assertEquals("number of groups", 1, groupCounts.size()); + groupCounts.forEach( + (group, count) -> { + assertTrue( + "too few replicas shard=" + shard + ", type=" + type + ", group=" + group, + count.get() >= 1); + }); + }); }); - }); - }); - } @Test public void testFreeDiskConstraints() throws Exception { String collectionName = "freeDiskCollection"; int NUM_NODES = 3; - Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); + Builders.ClusterBuilder clusterBuilder = + Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); Node smallNode = null; for (int i = 0; i < NUM_NODES; i++) { Builders.NodeBuilder nodeBuilder = clusterBuilder.getLiveNodeBuilders().get(i); @@ -644,10 +779,16 @@ public void testFreeDiskConstraints() throws Exception { SolrCollection solrCollection = cluster.getCollection(collectionName); - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, - StreamSupport.stream(solrCollection.shards().spliterator(), false) - .map(Shard::getShardName).collect(Collectors.toSet()), - cluster.getLiveNodes(), 1, 0, 1); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, + StreamSupport.stream(solrCollection.shards().spliterator(), false) + .map(Shard::getShardName) + .collect(Collectors.toSet()), + cluster.getLiveNodes(), + 1, + 0, + 1); PlacementPlan pp = plugin.computePlacement(placementRequest, placementContext); assertEquals(4, pp.getReplicaPlacements().size()); @@ -658,15 +799,19 @@ public void testFreeDiskConstraints() throws Exception { @Test public void testWithCollectionPlacement() throws Exception { - AffinityPlacementConfig config = new AffinityPlacementConfig( + AffinityPlacementConfig config = + new AffinityPlacementConfig( MINIMAL_FREE_DISK_GB, PRIORITIZED_FREE_DISK_GB, - Map.of(primaryCollectionName, secondaryCollectionName), Map.of()); + Map.of(primaryCollectionName, secondaryCollectionName), + Map.of()); configurePlugin(config); int NUM_NODES = 3; - Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); - Builders.CollectionBuilder collectionBuilder = Builders.newCollectionBuilder(secondaryCollectionName); + Builders.ClusterBuilder clusterBuilder = + Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); + Builders.CollectionBuilder collectionBuilder = + Builders.newCollectionBuilder(secondaryCollectionName); collectionBuilder.initializeShardsReplicas(1, 2, 0, 0, clusterBuilder.getLiveNodeBuilders()); clusterBuilder.addCollection(collectionBuilder); @@ -681,21 +826,27 @@ public void testWithCollectionPlacement() throws Exception { SolrCollection primaryCollection = cluster.getCollection(primaryCollectionName); Set secondaryNodes = new HashSet<>(); - secondaryCollection.shards().forEach(s -> s.replicas().forEach(r -> secondaryNodes.add(r.getNode()))); - - PlacementRequestImpl placementRequest = new PlacementRequestImpl(primaryCollection, - Set.of("shard1", "shard2"), cluster.getLiveNodes(), 1, 0, 0); + secondaryCollection + .shards() + .forEach(s -> s.replicas().forEach(r -> secondaryNodes.add(r.getNode()))); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + primaryCollection, Set.of("shard1", "shard2"), cluster.getLiveNodes(), 1, 0, 0); PlacementPlan pp = plugin.computePlacement(placementRequest, placementContext); assertEquals(2, pp.getReplicaPlacements().size()); // verify that all placements are on nodes with the secondary replica - pp.getReplicaPlacements().forEach(placement -> - assertTrue("placement node " + placement.getNode() + " not in secondary=" + secondaryNodes, - secondaryNodes.contains(placement.getNode()))); - - placementRequest = new PlacementRequestImpl(primaryCollection, - Set.of("shard1"), cluster.getLiveNodes(), 3, 0, 0); + pp.getReplicaPlacements() + .forEach( + placement -> + assertTrue( + "placement node " + placement.getNode() + " not in secondary=" + secondaryNodes, + secondaryNodes.contains(placement.getNode()))); + + placementRequest = + new PlacementRequestImpl( + primaryCollection, Set.of("shard1"), cluster.getLiveNodes(), 3, 0, 0); try { pp = plugin.computePlacement(placementRequest, placementContext); fail("should generate 'Not enough eligible nodes' failure here"); @@ -706,15 +857,19 @@ public void testWithCollectionPlacement() throws Exception { @Test public void testWithCollectionModificationRejected() throws Exception { - AffinityPlacementConfig config = new AffinityPlacementConfig( + AffinityPlacementConfig config = + new AffinityPlacementConfig( MINIMAL_FREE_DISK_GB, PRIORITIZED_FREE_DISK_GB, - Map.of(primaryCollectionName, secondaryCollectionName), Map.of()); + Map.of(primaryCollectionName, secondaryCollectionName), + Map.of()); configurePlugin(config); int NUM_NODES = 2; - Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); - Builders.CollectionBuilder collectionBuilder = Builders.newCollectionBuilder(secondaryCollectionName); + Builders.ClusterBuilder clusterBuilder = + Builders.newClusterBuilder().initializeLiveNodes(NUM_NODES); + Builders.CollectionBuilder collectionBuilder = + Builders.newCollectionBuilder(secondaryCollectionName); collectionBuilder.initializeShardsReplicas(1, 4, 0, 0, clusterBuilder.getLiveNodeBuilders()); clusterBuilder.addCollection(collectionBuilder); @@ -730,28 +885,42 @@ public void testWithCollectionModificationRejected() throws Exception { Node node = cluster.getLiveNodes().iterator().next(); Set secondaryReplicas = new HashSet<>(); - secondaryCollection.shards().forEach(shard -> - shard.replicas().forEach(replica -> { - if (secondaryReplicas.size() < 1 && replica.getNode().equals(node)) { - secondaryReplicas.add(replica); - } - })); - - DeleteReplicasRequest deleteReplicasRequest = ModificationRequestImpl.createDeleteReplicasRequest(secondaryCollection, secondaryReplicas); + secondaryCollection + .shards() + .forEach( + shard -> + shard + .replicas() + .forEach( + replica -> { + if (secondaryReplicas.size() < 1 && replica.getNode().equals(node)) { + secondaryReplicas.add(replica); + } + })); + + DeleteReplicasRequest deleteReplicasRequest = + ModificationRequestImpl.createDeleteReplicasRequest(secondaryCollection, secondaryReplicas); try { plugin.verifyAllowedModification(deleteReplicasRequest, placementContext); } catch (PlacementException pe) { fail("should have succeeded: " + pe.toString()); } - secondaryCollection.shards().forEach(shard -> - shard.replicas().forEach(replica -> { - if (secondaryReplicas.size() < 2 && replica.getNode().equals(node)) { - secondaryReplicas.add(replica); - } - })); - - deleteReplicasRequest = ModificationRequestImpl.createDeleteReplicasRequest(secondaryCollection, secondaryReplicas); + secondaryCollection + .shards() + .forEach( + shard -> + shard + .replicas() + .forEach( + replica -> { + if (secondaryReplicas.size() < 2 && replica.getNode().equals(node)) { + secondaryReplicas.add(replica); + } + })); + + deleteReplicasRequest = + ModificationRequestImpl.createDeleteReplicasRequest(secondaryCollection, secondaryReplicas); try { plugin.verifyAllowedModification(deleteReplicasRequest, placementContext); fail("should have failed: " + deleteReplicasRequest); @@ -772,10 +941,12 @@ public void testNodeType() throws Exception { collectionBuilder.initializeShardsReplicas(1, 0, 0, 0, clusterBuilder.getLiveNodeBuilders()); // test single node type in collection - AffinityPlacementConfig config = new AffinityPlacementConfig( + AffinityPlacementConfig config = + new AffinityPlacementConfig( MINIMAL_FREE_DISK_GB, PRIORITIZED_FREE_DISK_GB, - Map.of(), Map.of(collectionName, "type_0")); + Map.of(), + Map.of(collectionName, "type_0")); configurePlugin(config); clusterBuilder.addCollection(collectionBuilder); @@ -783,17 +954,27 @@ public void testNodeType() throws Exception { PlacementContext placementContext = clusterBuilder.buildPlacementContext(); Map> nodeNamesByType = new HashMap<>(); Cluster cluster = placementContext.getCluster(); - AttributeValues attributeValues = placementContext.getAttributeFetcher() + AttributeValues attributeValues = + placementContext + .getAttributeFetcher() .requestNodeSystemProperty(AffinityPlacementConfig.NODE_TYPE_SYSPROP) .fetchAttributes(); - placementContext.getCluster().getLiveNodes().forEach(n -> - nodeNamesByType - .computeIfAbsent(attributeValues.getSystemProperty(n, AffinityPlacementConfig.NODE_TYPE_SYSPROP).get(), type -> new HashSet<>()) - .add(n.getName()) - ); + placementContext + .getCluster() + .getLiveNodes() + .forEach( + n -> + nodeNamesByType + .computeIfAbsent( + attributeValues + .getSystemProperty(n, AffinityPlacementConfig.NODE_TYPE_SYSPROP) + .get(), + type -> new HashSet<>()) + .add(n.getName())); SolrCollection collection = placementContext.getCluster().getCollection(collectionName); - PlacementRequestImpl placementRequest = new PlacementRequestImpl(collection, - Set.of("shard1"), placementContext.getCluster().getLiveNodes(), 3, 0, 0); + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + collection, Set.of("shard1"), placementContext.getCluster().getLiveNodes(), 3, 0, 0); PlacementPlan pp = plugin.computePlacement(placementRequest, placementContext); assertEquals("expected 3 placements: " + pp, 3, pp.getReplicaPlacements().size()); @@ -806,22 +987,25 @@ public void testNodeType() throws Exception { } // test 2 node types in collection - config = new AffinityPlacementConfig( + config = + new AffinityPlacementConfig( MINIMAL_FREE_DISK_GB, PRIORITIZED_FREE_DISK_GB, - Map.of(), Map.of(collectionName, "type_0,type_1")); + Map.of(), + Map.of(collectionName, "type_0,type_1")); configurePlugin(config); placementContext = clusterBuilder.buildPlacementContext(); collection = placementContext.getCluster().getCollection(collectionName); - placementRequest = new PlacementRequestImpl(collection, - Set.of("shard1"), placementContext.getCluster().getLiveNodes(), 6, 0, 0); + placementRequest = + new PlacementRequestImpl( + collection, Set.of("shard1"), placementContext.getCluster().getLiveNodes(), 6, 0, 0); pp = plugin.computePlacement(placementRequest, placementContext); assertEquals("expected 6 placements: " + pp, 6, pp.getReplicaPlacements().size()); for (ReplicaPlacement p : pp.getReplicaPlacements()) { - assertTrue(type0nodes.contains(p.getNode().getName()) || - type1nodes.contains(p.getNode().getName())); + assertTrue( + type0nodes.contains(p.getNode().getName()) || type1nodes.contains(p.getNode().getName())); } // test 2 node types in nodes @@ -837,36 +1021,44 @@ public void testNodeType() throws Exception { placementContext = clusterBuilder.buildPlacementContext(); collection = placementContext.getCluster().getCollection(collectionName); - placementRequest = new PlacementRequestImpl(collection, - Set.of("shard1"), placementContext.getCluster().getLiveNodes(), 6, 0, 0); + placementRequest = + new PlacementRequestImpl( + collection, Set.of("shard1"), placementContext.getCluster().getLiveNodes(), 6, 0, 0); pp = plugin.computePlacement(placementRequest, placementContext); assertEquals("expected 6 placements: " + pp, 6, pp.getReplicaPlacements().size()); nodeNamesByType.clear(); - AttributeValues attributeValues2 = placementContext.getAttributeFetcher() + AttributeValues attributeValues2 = + placementContext + .getAttributeFetcher() .requestNodeSystemProperty(AffinityPlacementConfig.NODE_TYPE_SYSPROP) .fetchAttributes(); - placementContext.getCluster().getLiveNodes().forEach(n -> { - String nodeTypesStr = attributeValues2.getSystemProperty(n, AffinityPlacementConfig.NODE_TYPE_SYSPROP).get(); - for (String nodeType : StrUtils.splitSmart(nodeTypesStr, ',')) { - nodeNamesByType - .computeIfAbsent(nodeType, type -> new HashSet<>()) - .add(n.getName()); - } - }); + placementContext + .getCluster() + .getLiveNodes() + .forEach( + n -> { + String nodeTypesStr = + attributeValues2 + .getSystemProperty(n, AffinityPlacementConfig.NODE_TYPE_SYSPROP) + .get(); + for (String nodeType : StrUtils.splitSmart(nodeTypesStr, ',')) { + nodeNamesByType.computeIfAbsent(nodeType, type -> new HashSet<>()).add(n.getName()); + } + }); type0nodes = nodeNamesByType.get("type_0"); type1nodes = nodeNamesByType.get("type_1"); for (ReplicaPlacement p : pp.getReplicaPlacements()) { - assertTrue(type0nodes.contains(p.getNode().getName()) || - type1nodes.contains(p.getNode().getName())); + assertTrue( + type0nodes.contains(p.getNode().getName()) || type1nodes.contains(p.getNode().getName())); } - } - - @Test @Slow + @Test + @Slow public void testScalability() throws Exception { - // for non-nightly we scale a bit, but retain test speed - for nightly test speed can be 2+ minutes + // for non-nightly we scale a bit, but retain test speed - for nightly test speed can be 2+ + // minutes int numShards = TEST_NIGHTLY ? 100 : 10; int nrtReplicas = TEST_NIGHTLY ? 40 : 4; @@ -892,14 +1084,17 @@ public void testScalability() throws Exception { runTestScalability(numNodes, numShards, TEST_NIGHTLY ? 100 : 10, 0, 0); runTestScalability(numNodes, numShards, TEST_NIGHTLY ? 200 : 20, 0, 0); runTestScalability(numNodes, numShards, TEST_NIGHTLY ? 500 : 50, 0, 0); - runTestScalability(numNodes, numShards, TEST_NIGHTLY ? 1000 : 30, 0, 0); + runTestScalability(numNodes, numShards, TEST_NIGHTLY ? 1000 : 30, 0, 0); runTestScalability(numNodes, numShards, TEST_NIGHTLY ? 2000 : 50, 0, 0); } - private void runTestScalability(int numNodes, int numShards, int nrtReplicas, int tlogReplicas, int pullReplicas) throws Exception { + private void runTestScalability( + int numNodes, int numShards, int nrtReplicas, int tlogReplicas, int pullReplicas) + throws Exception { String collectionName = "scaleCollection"; - Builders.ClusterBuilder clusterBuilder = Builders.newClusterBuilder().initializeLiveNodes(numNodes); + Builders.ClusterBuilder clusterBuilder = + Builders.newClusterBuilder().initializeLiveNodes(numNodes); LinkedList nodeBuilders = clusterBuilder.getLiveNodeBuilders(); for (int i = 0; i < numNodes; i++) { nodeBuilders.get(i).setCoreCount(0).setFreeDiskGB((double) numNodes); @@ -912,9 +1107,16 @@ private void runTestScalability(int numNodes, int numShards, int nrtReplicas, in SolrCollection solrCollection = collectionBuilder.build(); List liveNodes = clusterBuilder.buildLiveNodes(); - // Place replicas for all the shards of the (newly created since it has no replicas yet) collection - PlacementRequestImpl placementRequest = new PlacementRequestImpl(solrCollection, solrCollection.getShardNames(), - new HashSet<>(liveNodes), nrtReplicas, tlogReplicas, pullReplicas); + // Place replicas for all the shards of the (newly created since it has no replicas yet) + // collection + PlacementRequestImpl placementRequest = + new PlacementRequestImpl( + solrCollection, + solrCollection.getShardNames(), + new HashSet<>(liveNodes), + nrtReplicas, + tlogReplicas, + pullReplicas); long start = System.nanoTime(); PlacementPlan pp = plugin.computePlacement(placementRequest, placementContext); @@ -923,8 +1125,15 @@ private void runTestScalability(int numNodes, int numShards, int nrtReplicas, in final int REPLICAS_PER_SHARD = nrtReplicas + tlogReplicas + pullReplicas; final int TOTAL_REPLICAS = numShards * REPLICAS_PER_SHARD; - log.info("ComputePlacement: {} nodes, {} shards, {} total replicas, elapsed time {} ms.", numNodes, numShards, TOTAL_REPLICAS, TimeUnit.NANOSECONDS.toMillis(end - start)); //nowarn - assertEquals("incorrect number of calculated placements", TOTAL_REPLICAS, + log.info( + "ComputePlacement: {} nodes, {} shards, {} total replicas, elapsed time {} ms.", + numNodes, + numShards, + TOTAL_REPLICAS, + TimeUnit.NANOSECONDS.toMillis(end - start)); // nowarn + assertEquals( + "incorrect number of calculated placements", + TOTAL_REPLICAS, pp.getReplicaPlacements().size()); // check that replicas are correctly placed Map replicasPerNode = new HashMap<>(); @@ -932,21 +1141,32 @@ private void runTestScalability(int numNodes, int numShards, int nrtReplicas, in Map replicasPerShard = new HashMap<>(); Map replicasByType = new HashMap<>(); for (ReplicaPlacement placement : pp.getReplicaPlacements()) { - replicasPerNode.computeIfAbsent(placement.getNode(), n -> new AtomicInteger()).incrementAndGet(); - shardsPerNode.computeIfAbsent(placement.getNode(), n -> new HashSet<>()).add(placement.getShardName()); - replicasByType.computeIfAbsent(placement.getReplicaType(), t -> new AtomicInteger()).incrementAndGet(); - replicasPerShard.computeIfAbsent(placement.getShardName(), s -> new AtomicInteger()).incrementAndGet(); + replicasPerNode + .computeIfAbsent(placement.getNode(), n -> new AtomicInteger()) + .incrementAndGet(); + shardsPerNode + .computeIfAbsent(placement.getNode(), n -> new HashSet<>()) + .add(placement.getShardName()); + replicasByType + .computeIfAbsent(placement.getReplicaType(), t -> new AtomicInteger()) + .incrementAndGet(); + replicasPerShard + .computeIfAbsent(placement.getShardName(), s -> new AtomicInteger()) + .incrementAndGet(); } int perNode = TOTAL_REPLICAS > numNodes ? TOTAL_REPLICAS / numNodes : 1; - replicasPerNode.forEach((node, count) -> { - assertEquals(count.get(), perNode); - }); - shardsPerNode.forEach((node, names) -> { - assertEquals(names.size(), perNode); - }); - - replicasPerShard.forEach((shard, count) -> { - assertEquals(count.get(), REPLICAS_PER_SHARD); - }); + replicasPerNode.forEach( + (node, count) -> { + assertEquals(count.get(), perNode); + }); + shardsPerNode.forEach( + (node, names) -> { + assertEquals(names.size(), perNode); + }); + + replicasPerShard.forEach( + (shard, count) -> { + assertEquals(count.get(), REPLICAS_PER_SHARD); + }); } } diff --git a/solr/core/src/test/org/apache/solr/common/cloud/ZkDynamicConfigTest.java b/solr/core/src/test/org/apache/solr/common/cloud/ZkDynamicConfigTest.java index 55e9354b99c..cf331ee2da7 100644 --- a/solr/core/src/test/org/apache/solr/common/cloud/ZkDynamicConfigTest.java +++ b/solr/core/src/test/org/apache/solr/common/cloud/ZkDynamicConfigTest.java @@ -24,13 +24,15 @@ public class ZkDynamicConfigTest extends SolrTestCaseJ4 { @Test public void parseLines() { - ZkDynamicConfig parsed = ZkDynamicConfig.parseLines( - "ignored-line\n" + - "server.1=zoo1:2780:2783:participant;0.0.0.0:2181\n" + - "server.2=zoo2:2781:2784:participant|zoo3:2783;2181\n" + - "server.3=zoo3:2782:2785;zoo3-client:2181\n" + - "server.4=zoo4:2783:2786:participant\n" + // this assumes clientPort specified in static config - "version=400000003"); + ZkDynamicConfig parsed = + ZkDynamicConfig.parseLines( + "ignored-line\n" + + "server.1=zoo1:2780:2783:participant;0.0.0.0:2181\n" + + "server.2=zoo2:2781:2784:participant|zoo3:2783;2181\n" + + "server.3=zoo3:2782:2785;zoo3-client:2181\n" + + "server.4=zoo4:2783:2786:participant\n" + + // this assumes clientPort specified in static config + "version=400000003"); assertEquals(4, parsed.size()); assertEquals("zoo1", parsed.getServers().get(0).address); @@ -62,8 +64,9 @@ public void parseLines() { @Test(expected = SolrException.class) public void parseLinesInvalid() { ZkDynamicConfig.parseLines( - "server.1=zoo2:2781:2784:participant|zoo3:2783;0.0.0.0:2181\n" + - "server.2=zoo3:2782\n" + // This line fails as it lacks mandatory parts + "server.1=zoo2:2781:2784:participant|zoo3:2783;0.0.0.0:2181\n" + + "server.2=zoo3:2782\n" + + // This line fails as it lacks mandatory parts "version=400000003"); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/common/cloud/ZkStateReaderAccessor.java b/solr/core/src/test/org/apache/solr/common/cloud/ZkStateReaderAccessor.java index b40a7a25c25..0b51b1d9c2d 100644 --- a/solr/core/src/test/org/apache/solr/common/cloud/ZkStateReaderAccessor.java +++ b/solr/core/src/test/org/apache/solr/common/cloud/ZkStateReaderAccessor.java @@ -18,9 +18,7 @@ import java.util.Set; -/** - * Helper class to access package-private methods from ZkStateReader. - */ +/** Helper class to access package-private methods from ZkStateReader. */ public class ZkStateReaderAccessor { ZkStateReader zkStateReader; @@ -31,6 +29,4 @@ public ZkStateReaderAccessor(ZkStateReader zkStateReader) { public Set getStateWatchers(String collection) { return zkStateReader.getStateWatchers(collection); } - - } diff --git a/solr/core/src/test/org/apache/solr/common/util/TestObjectReleaseTracker.java b/solr/core/src/test/org/apache/solr/common/util/TestObjectReleaseTracker.java index ddb33613631..e5b3ce53dbd 100644 --- a/solr/core/src/test/org/apache/solr/common/util/TestObjectReleaseTracker.java +++ b/solr/core/src/test/org/apache/solr/common/util/TestObjectReleaseTracker.java @@ -16,18 +16,17 @@ */ package org.apache.solr.common.util; -import org.apache.solr.SolrTestCaseJ4; -import org.hamcrest.MatcherAssert; -import org.junit.Test; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.stringContainsInOrder; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.stringContainsInOrder; +import org.apache.solr.SolrTestCaseJ4; +import org.hamcrest.MatcherAssert; +import org.junit.Test; public class TestObjectReleaseTracker extends SolrTestCaseJ4 { @@ -60,7 +59,7 @@ public void testUnreleased() { ObjectReleaseTracker.track(obj1); ObjectReleaseTracker.track(obj2); ObjectReleaseTracker.track(obj3); - + ObjectReleaseTracker.release(obj1); ObjectReleaseTracker.release(obj2); // ObjectReleaseTracker.release(obj3); @@ -86,57 +85,74 @@ public void testAnonymousClasses() { @Test public void testAsyncTracking() throws InterruptedException, ExecutionException { - ExecutorService es = ExecutorUtil.newMDCAwareSingleThreadExecutor(new SolrNamedThreadFactory("TestExec")); + ExecutorService es = + ExecutorUtil.newMDCAwareSingleThreadExecutor(new SolrNamedThreadFactory("TestExec")); Object trackable = new Object(); - Future result = es.submit(() -> { - ObjectReleaseTracker.track(trackable); - }); + Future result = + es.submit( + () -> { + ObjectReleaseTracker.track(trackable); + }); result.get(); // make sure that track has been called String message = ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty(); - MatcherAssert.assertThat(message, stringContainsInOrder( - ObjectReleaseTracker.ObjectTrackerException.class.getName(), - "Exception: Submitter stack trace", - getClassName() + "." + getTestName())); + MatcherAssert.assertThat( + message, + stringContainsInOrder( + ObjectReleaseTracker.ObjectTrackerException.class.getName(), + "Exception: Submitter stack trace", + getClassName() + "." + getTestName())); // Test the grandparent submitter case AtomicReference> indirectResult = new AtomicReference<>(); - result = es.submit(() -> - indirectResult.set(es.submit(() -> { - ObjectReleaseTracker.track(trackable); - })) - ); + result = + es.submit( + () -> + indirectResult.set( + es.submit( + () -> { + ObjectReleaseTracker.track(trackable); + }))); result.get(); indirectResult.get().get(); message = ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty(); - MatcherAssert.assertThat(message, stringContainsInOrder( - ObjectReleaseTracker.ObjectTrackerException.class.getName(), - "Exception: Submitter stack trace", - "Exception: Submitter stack trace", - getClassName() + "." + getTestName())); - - // Now test great-grandparent, which we don't explicitly account for, but should have been recursively set + MatcherAssert.assertThat( + message, + stringContainsInOrder( + ObjectReleaseTracker.ObjectTrackerException.class.getName(), + "Exception: Submitter stack trace", + "Exception: Submitter stack trace", + getClassName() + "." + getTestName())); + + // Now test great-grandparent, which we don't explicitly account for, but should have been + // recursively set AtomicReference> indirectIndirect = new AtomicReference<>(); - result = es.submit(() -> - indirectResult.set(es.submit(() -> - indirectIndirect.set(es.submit(() -> { - ObjectReleaseTracker.track(trackable); - })) - )) - ); + result = + es.submit( + () -> + indirectResult.set( + es.submit( + () -> + indirectIndirect.set( + es.submit( + () -> { + ObjectReleaseTracker.track(trackable); + }))))); result.get(); indirectResult.get().get(); indirectIndirect.get().get(); message = ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty(); - MatcherAssert.assertThat(message, stringContainsInOrder( - ObjectReleaseTracker.ObjectTrackerException.class.getName(), - "Exception: Submitter stack trace", - "Exception: Submitter stack trace", - "Exception: Submitter stack trace", - getClassName() + "." + getTestName())); + MatcherAssert.assertThat( + message, + stringContainsInOrder( + ObjectReleaseTracker.ObjectTrackerException.class.getName(), + "Exception: Submitter stack trace", + "Exception: Submitter stack trace", + "Exception: Submitter stack trace", + getClassName() + "." + getTestName())); es.shutdown(); assertTrue(es.awaitTermination(1, TimeUnit.SECONDS)); diff --git a/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java b/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java index a7323dc5fd0..b8c2da14acc 100644 --- a/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/AlternateDirectoryTest.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.nio.file.Path; - import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.Directory; @@ -26,10 +25,7 @@ import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; -/** - * test that configs can override the DirectoryFactory and - * IndexReaderFactory used in solr. - */ +/** test that configs can override the DirectoryFactory and IndexReaderFactory used in solr. */ public class AlternateDirectoryTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { @@ -37,34 +33,34 @@ public static void beforeClass() throws Exception { } public void testAltDirectoryUsed() throws Exception { - assertQ(req("q","*:*","qt","/select")); + assertQ(req("q", "*:*", "qt", "/select")); assertTrue(TestFSDirectoryFactory.openCalled); assertTrue(TestIndexReaderFactory.newReaderCalled); } - + public void testAltReaderUsed() throws Exception { IndexReaderFactory readerFactory = h.getCore().getIndexReaderFactory(); assertNotNull("Factory is null", readerFactory); - assertEquals("readerFactory is wrong class", - AlternateDirectoryTest.TestIndexReaderFactory.class.getName(), - readerFactory.getClass().getName()); + assertEquals( + "readerFactory is wrong class", + AlternateDirectoryTest.TestIndexReaderFactory.class.getName(), + readerFactory.getClass().getName()); } - static public class TestFSDirectoryFactory extends StandardDirectoryFactory { + public static class TestFSDirectoryFactory extends StandardDirectoryFactory { public static volatile boolean openCalled = false; public static volatile Directory dir; - + @Override - public Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException { + public Directory create(String path, LockFactory lockFactory, DirContext dirContext) + throws IOException { openCalled = true; return dir = newFSDirectory(Path.of(path), lockFactory); } - } - - static public class TestIndexReaderFactory extends IndexReaderFactory { + public static class TestIndexReaderFactory extends IndexReaderFactory { static volatile boolean newReaderCalled = false; @Override @@ -79,5 +75,4 @@ public DirectoryReader newReader(IndexWriter writer, SolrCore core) throws IOExc return DirectoryReader.open(writer); } } - } diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java index efa8e11f050..0746fc39b34 100644 --- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java +++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java @@ -42,15 +42,19 @@ public class BlobRepositoryCloudTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) // only sharing *within* a node + configureCluster(1) // only sharing *within* a node .addConfig("configname", TEST_PATH.resolve("resource-sharing")) .configure(); -// Thread.sleep(2000); + // Thread.sleep(2000); HashMap params = new HashMap<>(); CollectionAdminRequest.createCollection(CollectionAdminParams.SYSTEM_COLL, null, 1, 1) .process(cluster.getSolrClient()); // test component will fail if it cant' find a blob with this data by this name - TestBlobHandler.postData(cluster.getSolrClient(), findLiveNodeURI(), "testResource", ByteBuffer.wrap("foo,bar\nbaz,bam".getBytes(StandardCharsets.UTF_8))); + TestBlobHandler.postData( + cluster.getSolrClient(), + findLiveNodeURI(), + "testResource", + ByteBuffer.wrap("foo,bar\nbaz,bam".getBytes(StandardCharsets.UTF_8))); // Thread.sleep(2000); // if these don't load we probably failed to post the blob above CollectionAdminRequest.createCollection("col1", "configname", 1, 1) @@ -70,19 +74,19 @@ public static void setupCluster() throws Exception { solrClient.add("col2", document); solrClient.commit("col2"); Thread.sleep(2000); - } @Test public void test() throws Exception { - // This test relies on the installation of ResourceSharingTestComponent which has 2 useful properties: - // 1. it will fail to initialize if it doesn't find a 2 line CSV like foo,bar\nbaz,bam thus validating - // that we are properly pulling data from the blob store - // 2. It replaces any q for a query request to /select with "text:" where is the name - // of the last collection to run a query. It does this by caching a shared resource of type - // ResourceSharingTestComponent.TestObject, and the following sequence is proof that either - // collection can tell if it was (or was not) the last collection to issue a query by - // consulting the shared object + // This test relies on the installation of ResourceSharingTestComponent which has 2 useful + // properties: + // 1. it will fail to initialize if it doesn't find a 2 line CSV like foo,bar\nbaz,bam thus + // validating that we are properly pulling data from the blob store + // 2. It replaces any q for a query request to /select with "text:" where is + // the name of the last collection to run a query. It does this by caching a shared resource of + // type ResourceSharingTestComponent.TestObject, and the following sequence is proof that either + // collection can tell if it was (or was not) the last collection to issue a query by consulting + // the shared object assertLastQueryNotToCollection("col1"); assertLastQueryNotToCollection("col2"); assertLastQueryNotToCollection("col1"); @@ -94,23 +98,32 @@ public void test() throws Exception { // TODO: move this up to parent class? private static String findLiveNodeURI() { ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); - return zkStateReader.getBaseUrlForNodeName(zkStateReader.getClusterState().getCollection(".system").getSlices().iterator().next().getLeader().getNodeName()); + return zkStateReader.getBaseUrlForNodeName( + zkStateReader + .getClusterState() + .getCollection(".system") + .getSlices() + .iterator() + .next() + .getLeader() + .getNodeName()); } - private void assertLastQueryToCollection(String collection) throws SolrServerException, IOException { + private void assertLastQueryToCollection(String collection) + throws SolrServerException, IOException { assertEquals(1, getSolrDocuments(collection).size()); } - private void assertLastQueryNotToCollection(String collection) throws SolrServerException, IOException { + private void assertLastQueryNotToCollection(String collection) + throws SolrServerException, IOException { assertEquals(0, getSolrDocuments(collection).size()); } - private SolrDocumentList getSolrDocuments(String collection) throws SolrServerException, IOException { + private SolrDocumentList getSolrDocuments(String collection) + throws SolrServerException, IOException { SolrQuery query = new SolrQuery("*:*"); CloudSolrClient client = cluster.getSolrClient(); QueryResponse resp1 = client.query(collection, query); return resp1.getResults(); } - - } diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java index 26233d3b6a4..240ac874336 100644 --- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java +++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java @@ -17,6 +17,18 @@ package org.apache.solr.core; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -25,46 +37,33 @@ import java.nio.charset.Charset; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - public class BlobRepositoryMockingTest { private static final Charset UTF8 = Charset.forName("UTF-8"); - private static final String[][] PARSED = new String[][]{{"foo", "bar", "baz"}, {"bang", "boom", "bash"}}; + private static final String[][] PARSED = + new String[][] {{"foo", "bar", "baz"}, {"bang", "boom", "bash"}}; private static final String BLOBSTR = "foo,bar,baz\nbang,boom,bash"; private CoreContainer mockContainer = mock(CoreContainer.class); + @SuppressWarnings({"unchecked", "rawtypes"}) - private ConcurrentHashMap mapMock = mock(ConcurrentHashMap.class); - - private Object[] mocks = new Object[] { - mockContainer, - mapMock - }; - + private ConcurrentHashMap mapMock = + mock(ConcurrentHashMap.class); + + private Object[] mocks = new Object[] {mockContainer, mapMock}; + BlobRepository repository; ByteBuffer blobData = ByteBuffer.wrap(BLOBSTR.getBytes(UTF8)); boolean blobFetched = false; String blobKey = ""; String url = null; ByteBuffer filecontent = null; - + @BeforeClass public static void beforeClass() { SolrTestCaseJ4.assumeWorkingMockito(); @@ -75,32 +74,32 @@ public void setUp() throws IllegalAccessException, NoSuchFieldException { blobFetched = false; blobKey = ""; reset(mocks); - repository = new BlobRepository(mockContainer) { - @Override - ByteBuffer fetchBlob(String key) { - blobKey = key; - blobFetched = true; - return blobData; - } - - @Override - ByteBuffer fetchFromUrl(String key, String url) { - if(!Objects.equals(url, BlobRepositoryMockingTest.this.url)) return null; - blobKey = key; - blobFetched = true; - return filecontent; - } - - @Override - @SuppressWarnings({"rawtypes"}) - ConcurrentHashMap createMap() { - return mapMock; - } - - }; + repository = + new BlobRepository(mockContainer) { + @Override + ByteBuffer fetchBlob(String key) { + blobKey = key; + blobFetched = true; + return blobData; + } + + @Override + ByteBuffer fetchFromUrl(String key, String url) { + if (!Objects.equals(url, BlobRepositoryMockingTest.this.url)) return null; + blobKey = key; + blobFetched = true; + return filecontent; + } + + @Override + @SuppressWarnings({"rawtypes"}) + ConcurrentHashMap createMap() { + return mapMock; + } + }; } - @Test (expected = SolrException.class) + @Test(expected = SolrException.class) public void testCloudOnly() { when(mockContainer.isZooKeeperAware()).thenReturn(false); try { @@ -125,20 +124,23 @@ public void testGetBlobIncrRefString() { } @Test - public void testGetBlobIncrRefByUrl() throws Exception{ + public void testGetBlobIncrRefByUrl() throws Exception { when(mockContainer.isZooKeeperAware()).thenReturn(true); filecontent = TestSolrConfigHandler.getFileContent("runtimecode/runtimelibs_v2.jar.bin"); url = "http://localhost:8080/myjar/location.jar"; - BlobRepository.BlobContentRef ref = repository.getBlobIncRef( "filefoo",null,url, - "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417"); + BlobRepository.BlobContentRef ref = + repository.getBlobIncRef( + "filefoo", + null, + url, + "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417"); assertEquals("filefoo", blobKey); assertTrue(blobFetched); assertNotNull(ref.blob); assertEquals(filecontent, ref.blob.get()); verify(mockContainer).isZooKeeperAware(); try { - repository.getBlobIncRef( "filefoo",null,url, - "WRONG-SHA512-KEY"); + repository.getBlobIncRef("filefoo", null, url, "WRONG-SHA512-KEY"); fail("expected exception"); } catch (Exception e) { assertTrue(e.getMessage().contains(" expected sha512 hash : WRONG-SHA512-KEY , actual :")); @@ -151,9 +153,10 @@ public void testGetBlobIncrRefByUrl() throws Exception{ @Test public void testCachedAlready() { when(mockContainer.isZooKeeperAware()).thenReturn(true); - when(mapMock.get("foo!")).thenReturn(new BlobRepository.BlobContent("foo!", blobData)); + when(mapMock.get("foo!")) + .thenReturn(new BlobRepository.BlobContent("foo!", blobData)); BlobRepository.BlobContentRef ref = repository.getBlobIncRef("foo!"); - assertEquals("",blobKey); + assertEquals("", blobKey); assertFalse(blobFetched); assertNotNull(ref.blob); assertEquals(blobData, ref.blob.get()); @@ -164,26 +167,29 @@ public void testCachedAlready() { @Test public void testGetBlobIncrRefStringDecoder() { when(mockContainer.isZooKeeperAware()).thenReturn(true); - BlobRepository.BlobContentRef ref = repository.getBlobIncRef("foo!", new BlobRepository.Decoder<>() { - @Override - public String[][] decode(InputStream inputStream) { - StringWriter writer = new StringWriter(); - try { - new InputStreamReader(inputStream, UTF8).transferTo(writer); - } catch (IOException e) { - throw new RuntimeException(e); - } - - assertEquals(BLOBSTR, writer.toString()); - return PARSED; - } - - @Override - public String getName() { - return "mocked"; - } - }); - assertEquals("foo!",blobKey); + BlobRepository.BlobContentRef ref = + repository.getBlobIncRef( + "foo!", + new BlobRepository.Decoder<>() { + @Override + public String[][] decode(InputStream inputStream) { + StringWriter writer = new StringWriter(); + try { + new InputStreamReader(inputStream, UTF8).transferTo(writer); + } catch (IOException e) { + throw new RuntimeException(e); + } + + assertEquals(BLOBSTR, writer.toString()); + return PARSED; + } + + @Override + public String getName() { + return "mocked"; + } + }); + assertEquals("foo!", blobKey); assertTrue(blobFetched); assertNotNull(ref.blob); assertEquals(PARSED, ref.blob.get()); @@ -191,6 +197,4 @@ public String getName() { verify(mapMock).get("foo!mocked"); verify(mapMock).put(eq("foo!mocked"), any(BlobRepository.BlobContent.class)); } - - } diff --git a/solr/core/src/test/org/apache/solr/core/ByteBuffersDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/ByteBuffersDirectoryFactoryTest.java index 45c7a152c00..2858acf723a 100644 --- a/solr/core/src/test/org/apache/solr/core/ByteBuffersDirectoryFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/ByteBuffersDirectoryFactoryTest.java @@ -17,31 +17,32 @@ package org.apache.solr.core; import java.io.IOException; - +import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockFactory; -import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.core.DirectoryFactory.DirContext; -/** - * Test-case for ByteBuffersDirectoryFactory - */ +/** Test-case for ByteBuffersDirectoryFactory */ public class ByteBuffersDirectoryFactoryTest extends SolrTestCaseJ4 { public void testOpenReturnsTheSameForSamePath() throws IOException { final Directory directory = new ByteBuffersDirectory(); - ByteBuffersDirectoryFactory factory = new ByteBuffersDirectoryFactory() { - @Override - protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) { - return directory; - } - }; + ByteBuffersDirectoryFactory factory = + new ByteBuffersDirectoryFactory() { + @Override + protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) { + return directory; + } + }; String path = "/fake/path"; Directory dir1 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); Directory dir2 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); - assertEquals("ByteBuffersDirectoryFactory should not create new instance of ByteBuffersDirectory " + - "every time open() is called for the same path", dir1, dir2); + assertEquals( + "ByteBuffersDirectoryFactory should not create new instance of ByteBuffersDirectory " + + "every time open() is called for the same path", + dir1, + dir2); factory.release(dir1); factory.release(dir2); @@ -50,19 +51,23 @@ protected Directory create(String path, LockFactory lockFactory, DirContext dirC public void testOpenSucceedForEmptyDir() throws IOException { ByteBuffersDirectoryFactory factory = new ByteBuffersDirectoryFactory(); - Directory dir = factory.get("/fake/path", DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); - assertNotNull("ByteBuffersDirectoryFactory should create ByteBuffersDirectory even if the path doesn't lead " + - "to index directory on the file system", dir); + Directory dir = + factory.get("/fake/path", DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); + assertNotNull( + "ByteBuffersDirectoryFactory should create ByteBuffersDirectory even if the path doesn't lead " + + "to index directory on the file system", + dir); factory.release(dir); factory.close(); } public void testIndexRetrieve() throws Exception { System.setProperty("solr.directoryFactory", "solr.ByteBuffersDirectoryFactory"); - initCore("solrconfig-minimal.xml","schema-minimal.xml"); + initCore("solrconfig-minimal.xml", "schema-minimal.xml"); DirectoryFactory factory = h.getCore().getDirectoryFactory(); - assertTrue("Found: " + factory.getClass().getName(), factory instanceof ByteBuffersDirectoryFactory); - for (int i = 0 ; i < 5 ; ++i) { + assertTrue( + "Found: " + factory.getClass().getName(), factory instanceof ByteBuffersDirectoryFactory); + for (int i = 0; i < 5; ++i) { assertU(adoc("id", "" + i, "a_s", "_" + i + "_")); } assertU(commit()); diff --git a/solr/core/src/test/org/apache/solr/core/ConfigureRecoveryStrategyTest.java b/solr/core/src/test/org/apache/solr/core/ConfigureRecoveryStrategyTest.java index 246ed981137..7134dad3e85 100644 --- a/solr/core/src/test/org/apache/solr/core/ConfigureRecoveryStrategyTest.java +++ b/solr/core/src/test/org/apache/solr/core/ConfigureRecoveryStrategyTest.java @@ -18,7 +18,6 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.RecoveryStrategy; import org.apache.solr.common.cloud.ZkCoreNodeProps; @@ -26,20 +25,19 @@ import org.apache.solr.common.cloud.ZkStateReader; import org.junit.BeforeClass; -/** - * test that configs can override the RecoveryStrategy - */ +/** test that configs can override the RecoveryStrategy */ public class ConfigureRecoveryStrategyTest extends SolrTestCaseJ4 { - private static final String solrConfigFileNameConfigure = "solrconfig-configurerecoverystrategy.xml"; + private static final String solrConfigFileNameConfigure = + "solrconfig-configurerecoverystrategy.xml"; private static final String solrConfigFileNameCustom = "solrconfig-customrecoverystrategy.xml"; private static String solrConfigFileName; @BeforeClass public static void beforeClass() throws Exception { - solrConfigFileName = (random().nextBoolean() - ? solrConfigFileNameConfigure : solrConfigFileNameCustom); + solrConfigFileName = + (random().nextBoolean() ? solrConfigFileNameConfigure : solrConfigFileNameCustom); initCore(solrConfigFileName, "schema.xml"); } @@ -53,15 +51,19 @@ public void testBuilder() throws Exception { if (solrConfigFileName.equals(solrConfigFileNameConfigure)) { expectedClassName = RecoveryStrategy.Builder.class.getName(); } else if (solrConfigFileName.equals(solrConfigFileNameCustom)) { - assertTrue("recoveryStrategyBuilder is wrong class (instanceof)", + assertTrue( + "recoveryStrategyBuilder is wrong class (instanceof)", recoveryStrategyBuilder instanceof CustomRecoveryStrategyBuilder); - expectedClassName = ConfigureRecoveryStrategyTest.CustomRecoveryStrategyBuilder.class.getName(); + expectedClassName = + ConfigureRecoveryStrategyTest.CustomRecoveryStrategyBuilder.class.getName(); } else { expectedClassName = null; } - assertEquals("recoveryStrategyBuilder is wrong class (name)", - expectedClassName, recoveryStrategyBuilder.getClass().getName()); + assertEquals( + "recoveryStrategyBuilder is wrong class (name)", + expectedClassName, + recoveryStrategyBuilder.getClass().getName()); } public void testAlmostAllMethodsAreFinal() throws Exception { @@ -76,7 +78,7 @@ public void testAlmostAllMethodsAreFinal() throws Exception { } } - static public class CustomRecoveryStrategy extends RecoveryStrategy { + public static class CustomRecoveryStrategy extends RecoveryStrategy { private String alternativeBaseUrlProp; @@ -88,8 +90,8 @@ public void setAlternativeBaseUrlProp(String alternativeBaseUrlProp) { this.alternativeBaseUrlProp = alternativeBaseUrlProp; } - public CustomRecoveryStrategy(CoreContainer cc, CoreDescriptor cd, - RecoveryStrategy.RecoveryListener recoveryListener) { + public CustomRecoveryStrategy( + CoreContainer cc, CoreDescriptor cd, RecoveryStrategy.RecoveryListener recoveryListener) { super(cc, cd, recoveryListener); } @@ -101,12 +103,11 @@ protected String getReplicateLeaderUrl(ZkNodeProps leaderprops) { } } - static public class CustomRecoveryStrategyBuilder extends RecoveryStrategy.Builder { + public static class CustomRecoveryStrategyBuilder extends RecoveryStrategy.Builder { @Override - protected RecoveryStrategy newRecoveryStrategy(CoreContainer cc, CoreDescriptor cd, - RecoveryStrategy.RecoveryListener recoveryListener) { + protected RecoveryStrategy newRecoveryStrategy( + CoreContainer cc, CoreDescriptor cd, RecoveryStrategy.RecoveryListener recoveryListener) { return new CustomRecoveryStrategy(cc, cd, recoveryListener); } } - } diff --git a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java index 536443587b8..af00381528d 100644 --- a/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java +++ b/solr/core/src/test/org/apache/solr/core/CoreSorterTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.core; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -26,7 +29,6 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.cloud.ClusterState; @@ -34,42 +36,39 @@ import org.apache.solr.common.cloud.DocRouter; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.util.Utils; import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.common.util.Utils; import org.apache.solr.core.CoreSorter.CountsForEachShard; import org.apache.solr.handler.admin.ConfigSetsHandler; import org.junit.Test; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - @SolrTestCaseJ4.SuppressSSL public class CoreSorterTest extends SolrTestCaseJ4 { - private static final List inputCounts = Arrays.asList( - // DOWN LIVE MY - new CountsForEachShard(1, 3, 1), - new CountsForEachShard(0, 3, 2), - new CountsForEachShard(0, 3, 3), - new CountsForEachShard(0, 3, 4), - new CountsForEachShard(1, 0, 2), - new CountsForEachShard(1, 0, 1), - new CountsForEachShard(2, 5, 1), - new CountsForEachShard(2, 4, 2), - new CountsForEachShard(2, 3, 3) - ); - - private static final List expectedCounts = Arrays.asList( - new CountsForEachShard(0, 3, 2), - new CountsForEachShard(0, 3, 3), - new CountsForEachShard(0, 3, 4), - new CountsForEachShard(1, 3, 1), - new CountsForEachShard(2, 5, 1), - new CountsForEachShard(2, 4, 2), - new CountsForEachShard(2, 3, 3), - new CountsForEachShard(1, 0, 1), - new CountsForEachShard(1, 0, 2) - ); + private static final List inputCounts = + Arrays.asList( + // DOWN LIVE MY + new CountsForEachShard(1, 3, 1), + new CountsForEachShard(0, 3, 2), + new CountsForEachShard(0, 3, 3), + new CountsForEachShard(0, 3, 4), + new CountsForEachShard(1, 0, 2), + new CountsForEachShard(1, 0, 1), + new CountsForEachShard(2, 5, 1), + new CountsForEachShard(2, 4, 2), + new CountsForEachShard(2, 3, 3)); + + private static final List expectedCounts = + Arrays.asList( + new CountsForEachShard(0, 3, 2), + new CountsForEachShard(0, 3, 3), + new CountsForEachShard(0, 3, 4), + new CountsForEachShard(1, 3, 1), + new CountsForEachShard(2, 5, 1), + new CountsForEachShard(2, 4, 2), + new CountsForEachShard(2, 3, 3), + new CountsForEachShard(1, 0, 1), + new CountsForEachShard(1, 0, 2)); @Test public void testComparator() { @@ -91,21 +90,30 @@ public void integrationTest() { Collections.shuffle(perShardCounts, random()); // compute nodes, some live, some down - final int maxNodesOfAType = perShardCounts.stream() // not too important how many we have, but lets have plenty - .mapToInt(c -> c.totalReplicasInLiveNodes + c.totalReplicasInDownNodes + c.myReplicas).max().getAsInt(); - List liveNodes = IntStream.range(0, maxNodesOfAType).mapToObj(i -> "192.168.0." + i + ":8983_").collect(Collectors.toList()); + final int maxNodesOfAType = + perShardCounts.stream() // not too important how many we have, but lets have plenty + .mapToInt(c -> c.totalReplicasInLiveNodes + c.totalReplicasInDownNodes + c.myReplicas) + .max() + .getAsInt(); + List liveNodes = + IntStream.range(0, maxNodesOfAType) + .mapToObj(i -> "192.168.0." + i + ":8983_") + .collect(Collectors.toList()); Collections.shuffle(liveNodes, random()); String thisNode = liveNodes.get(0); List otherLiveNodes = liveNodes.subList(1, liveNodes.size()); - List downNodes = IntStream.range(0, maxNodesOfAType).mapToObj(i -> "192.168.1." + i + ":8983_").collect(Collectors.toList()); + List downNodes = + IntStream.range(0, maxNodesOfAType) + .mapToObj(i -> "192.168.1." + i + ":8983_") + .collect(Collectors.toList()); // divide into two collections int numCol1 = random().nextInt(perShardCounts.size()); - Map> collToCounts = new HashMap<>(); + Map> collToCounts = new HashMap<>(); collToCounts.put("col1", perShardCounts.subList(0, numCol1)); collToCounts.put("col2", perShardCounts.subList(numCol1, perShardCounts.size())); - Map collToState = new HashMap<>(); + Map collToState = new HashMap<>(); Map> myCountsToDescs = new HashMap<>(); for (Map.Entry> entry : collToCounts.entrySet()) { String collection = entry.getKey(); @@ -117,7 +125,9 @@ public void integrationTest() { for (int myRepNum = 0; myRepNum < shardCounts.myReplicas; myRepNum++) { addNewReplica(replicas, collection, slice, Collections.singletonList(thisNode)); // save this mapping for later - myCountsToDescs.put(shardCounts, replicas.stream().map(this::newCoreDescriptor).collect(Collectors.toList())); + myCountsToDescs.put( + shardCounts, + replicas.stream().map(this::newCoreDescriptor).collect(Collectors.toList())); } for (int myRepNum = 0; myRepNum < shardCounts.totalReplicasInLiveNodes; myRepNum++) { addNewReplica(replicas, collection, slice, otherLiveNodes); @@ -125,11 +135,18 @@ public void integrationTest() { for (int myRepNum = 0; myRepNum < shardCounts.totalReplicasInDownNodes; myRepNum++) { addNewReplica(replicas, collection, slice, downNodes); } - Map replicaMap = replicas.stream().collect(Collectors.toMap(Replica::getName, Function.identity())); + Map replicaMap = + replicas.stream().collect(Collectors.toMap(Replica::getName, Function.identity())); sliceMap.put(slice, new Slice(slice, replicaMap, Collections.emptyMap(), collection)); } @SuppressWarnings({"unchecked"}) - DocCollection col = new DocCollection(collection, sliceMap, Collections.singletonMap(ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), DocRouter.DEFAULT); + DocCollection col = + new DocCollection( + collection, + sliceMap, + Collections.singletonMap( + ZkStateReader.CONFIGNAME_PROP, ConfigSetsHandler.DEFAULT_CONFIGSET_NAME), + DocRouter.DEFAULT); collToState.put(collection, col); } // reverse map @@ -163,7 +180,6 @@ public void integrationTest() { NodeConfig mockNodeConfig = mock(NodeConfig.class); when(mockNodeConfig.getNodeName()).thenReturn(thisNode); when(mockCC.getNodeConfig()).thenReturn(mockNodeConfig); - } List myDescs = new ArrayList<>(myDescsToCounts.keySet()); @@ -186,20 +202,32 @@ public void integrationTest() { } private CoreDescriptor newCoreDescriptor(Replica r) { - Map props = Map.of( - CoreDescriptor.CORE_SHARD, r.getShard(), - CoreDescriptor.CORE_COLLECTION, r.getCollection(), - CoreDescriptor.CORE_NODE_NAME, r.getNodeName() - ); - return new CoreDescriptor(r.getCoreName(), TEST_PATH(), props , null, mock(ZkController.class)); + Map props = + Map.of( + CoreDescriptor.CORE_SHARD, r.getShard(), + CoreDescriptor.CORE_COLLECTION, r.getCollection(), + CoreDescriptor.CORE_NODE_NAME, r.getNodeName()); + return new CoreDescriptor(r.getCoreName(), TEST_PATH(), props, null, mock(ZkController.class)); } - protected Replica addNewReplica(List replicaList, String collection, String slice, List possibleNodes) { + protected Replica addNewReplica( + List replicaList, String collection, String slice, List possibleNodes) { String replica = "r" + replicaList.size(); - String node = possibleNodes.get(random().nextInt(possibleNodes.size())); // place on a random node - Replica r = new Replica(replica, Map.of(ZkStateReader.CORE_NAME_PROP, replica, ZkStateReader.NODE_NAME_PROP, node, ZkStateReader.BASE_URL_PROP, Utils.getBaseUrlForNodeName(node, "http")), collection, slice); + String node = + possibleNodes.get(random().nextInt(possibleNodes.size())); // place on a random node + Replica r = + new Replica( + replica, + Map.of( + ZkStateReader.CORE_NAME_PROP, + replica, + ZkStateReader.NODE_NAME_PROP, + node, + ZkStateReader.BASE_URL_PROP, + Utils.getBaseUrlForNodeName(node, "http")), + collection, + slice); replicaList.add(r); return r; } - } diff --git a/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java b/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java index 8e107110da6..8a82ccb98a3 100644 --- a/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java +++ b/solr/core/src/test/org/apache/solr/core/CountUsageValueSourceParser.java @@ -15,6 +15,12 @@ * limitations under the License. */ package org.apache.solr.core; + +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; @@ -24,25 +30,19 @@ import org.apache.solr.search.SyntaxError; import org.apache.solr.search.ValueSourceParser; -import java.io.IOException; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicInteger; - /** - * A Mock ValueSource parser that produces ValueSources that returns a constant - * value but also keeps track of how many times it was asked for a value for any - * document via a static map and a user defined key. - **/ + * A Mock ValueSource parser that produces ValueSources that returns a constant value but also keeps + * track of how many times it was asked for a value for any document via a static map and a user + * defined key. + */ public class CountUsageValueSourceParser extends ValueSourceParser { - private static final ConcurrentMap counters - = new ConcurrentHashMap<>(); + private static final ConcurrentMap counters = new ConcurrentHashMap<>(); public static void clearCounters() { counters.clear(); } + public static int getAndClearCount(String key) { AtomicInteger counter = counters.remove(key); if (null == counter) { @@ -55,25 +55,27 @@ public static int getAndClearCount(String key) { public ValueSource parse(FunctionQParser fp) throws SyntaxError { String key = fp.parseArg(); double val = fp.parseDouble(); - + AtomicInteger counter = new AtomicInteger(); if (null != counters.putIfAbsent(key, counter)) { throw new IllegalArgumentException("Key has already been used: " + key); - } + } return new CountDocsValueSource(counter, val); } - static final private class CountDocsValueSource extends DoubleConstValueSource { + private static final class CountDocsValueSource extends DoubleConstValueSource { private final AtomicInteger counter; private final double value; + public CountDocsValueSource(AtomicInteger counter, double value) { super(value); this.value = value; this.counter = counter; } + @Override - public FunctionValues getValues(Map context, - LeafReaderContext readerContext) throws IOException { + public FunctionValues getValues(Map context, LeafReaderContext readerContext) + throws IOException { return new DoubleDocValues(this) { @Override public double doubleVal(int doc) { @@ -83,5 +85,4 @@ public double doubleVal(int doc) { }; } } - } diff --git a/solr/core/src/test/org/apache/solr/core/DirectoryFactoriesTest.java b/solr/core/src/test/org/apache/solr/core/DirectoryFactoriesTest.java index 2cc1f188543..d13bf40cbc4 100644 --- a/solr/core/src/test/org/apache/solr/core/DirectoryFactoriesTest.java +++ b/solr/core/src/test/org/apache/solr/core/DirectoryFactoriesTest.java @@ -17,34 +17,31 @@ package org.apache.solr.core; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.IOContext; - -import org.apache.solr.common.util.NamedList; - -import org.apache.solr.SolrTestCaseJ4; - import java.util.Arrays; import java.util.Collections; import java.util.List; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexOutput; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.util.NamedList; /** * Test some expected equivilencies of all DirectoryFactory implementations. - *

- * TODO: test more methods besides exists(String) - *

+ * + *

TODO: test more methods besides exists(String) */ public class DirectoryFactoriesTest extends SolrTestCaseJ4 { - public static final List> ALL_CLASSES - = Arrays.asList(MMapDirectoryFactory.class, - MockDirectoryFactory.class, - MockFSDirectoryFactory.class, - NRTCachingDirectoryFactory.class, - NIOFSDirectoryFactory.class, - RAMDirectoryFactory.class, - StandardDirectoryFactory.class); - + public static final List> ALL_CLASSES = + Arrays.asList( + MMapDirectoryFactory.class, + MockDirectoryFactory.class, + MockFSDirectoryFactory.class, + NRTCachingDirectoryFactory.class, + NIOFSDirectoryFactory.class, + RAMDirectoryFactory.class, + StandardDirectoryFactory.class); + /* Test that MockDirectoryFactory's exist method behaves consistent w/other impls */ public void testExistsEquivilence() throws Exception { // TODO: ideally we'd init all of these using DirectoryFactory.loadDirectoryFactory() ... @@ -64,8 +61,8 @@ private void testExistsBehavior(Class clazz) throws dirFac.init(new NamedList<>()); assertFalse(path + " should not exist yet", dirFac.exists(path)); - Directory dir = dirFac.get(path, DirectoryFactory.DirContext.DEFAULT, - DirectoryFactory.LOCK_TYPE_SINGLE); + Directory dir = + dirFac.get(path, DirectoryFactory.DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); try { assertFalse(path + " should still not exist", dirFac.exists(path)); try (IndexOutput file = dir.createOutput("test_file", IOContext.DEFAULT)) { @@ -74,24 +71,24 @@ private void testExistsBehavior(Class clazz) throws // TODO: even StandardDirectoryFactory & NRTCachingDirectoryFactory can't agree on this... // ... should we consider this explicitly undefined? // ... or should *all* Caching DirFactories consult the cache as well as the disk itself? - // assertFalse(path + " should still not exist until file is closed", dirFac.exists(path)); - + // assertFalse(path + " should still not exist until file is closed", + // dirFac.exists(path)); + } // implicitly close file... - + // TODO: even StandardDirectoryFactory & NRTCachingDirectoryFactory can't agree on this... // ... should we consider this explicitly undefined? // ... or should *all* Caching DirFactories consult the cache as well as the disk itself? // assertTrue(path + " should exist once file is closed", dirFac.exists(path)); - + dir.sync(Collections.singleton("test_file")); assertTrue(path + " should exist once file is synced", dirFac.exists(path)); - } finally { dirFac.release(dir); } assertTrue(path + " should still exist even after being released", dirFac.exists(path)); - + } catch (AssertionError ae) { throw new AssertionError("Failed with " + clazz.getName(), ae); } finally { diff --git a/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java index 57d51c8996a..e2a7eb46480 100755 --- a/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java @@ -21,7 +21,6 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; - import org.apache.lucene.mockfile.FilterPath; import org.apache.solr.SolrTestCase; import org.apache.solr.common.util.NamedList; @@ -70,13 +69,14 @@ public void testLockTypesUnchanged() throws Exception { public void testGetDataHomeRAMDirectory() throws Exception { doTestGetDataHome(RAMDirectoryFactory.class); } - + @Test public void testGetDataHomeByteBuffersDirectory() throws Exception { doTestGetDataHome(ByteBuffersDirectoryFactory.class); } - - private void doTestGetDataHome(Class directoryFactoryClass) throws Exception { + + private void doTestGetDataHome(Class directoryFactoryClass) + throws Exception { NodeConfig config = loadNodeConfig("/solr/solr-solrDataHome.xml"); CoreContainer cc = new CoreContainer(config); Properties cp = cc.getContainerProperties(); @@ -92,7 +92,7 @@ private void doTestGetDataHome(Class directoryFactor args.add("solr.data.home", "/solrdata/"); df.init(args); assertDataHome("/solrdata/inst_dir/data", "inst_dir", df, cc); - + // solr.data.home set with System property, and relative path System.setProperty("solr.data.home", "solrdata"); config = loadNodeConfig("/solr/solr-solrDataHome.xml"); @@ -100,9 +100,16 @@ private void doTestGetDataHome(Class directoryFactor df = directoryFactoryClass.getConstructor().newInstance(); df.initCoreContainer(cc); df.init(new NamedList<>()); - assertDataHome(solrHome.resolve("solrdata/inst_dir/data").toAbsolutePath().toString(), "inst_dir", df, cc); + assertDataHome( + solrHome.resolve("solrdata/inst_dir/data").toAbsolutePath().toString(), "inst_dir", df, cc); // Test parsing last component of instanceDir, and using custom dataDir - assertDataHome(solrHome.resolve("solrdata/myinst/mydata").toAbsolutePath().toString(), "/path/to/myinst", df, cc, "dataDir", "mydata"); + assertDataHome( + solrHome.resolve("solrdata/myinst/mydata").toAbsolutePath().toString(), + "/path/to/myinst", + df, + cc, + "dataDir", + "mydata"); // solr.data.home set but also solrDataHome set in solr.xml, which should override the former System.setProperty("test.solr.data.home", "/foo"); config = loadNodeConfig("/solr/solr-solrDataHome.xml"); @@ -113,13 +120,20 @@ private void doTestGetDataHome(Class directoryFactor assertDataHome("/foo/inst_dir/data", "inst_dir", df, cc); } - private void assertDataHome(String expected, String instanceDir, DirectoryFactory df, CoreContainer cc, String... properties) throws IOException { - String dataHome = df.getDataHome( - new CoreDescriptor("core_name", Paths.get(instanceDir).toAbsolutePath(), cc, properties)); + private void assertDataHome( + String expected, + String instanceDir, + DirectoryFactory df, + CoreContainer cc, + String... properties) + throws IOException { + String dataHome = + df.getDataHome( + new CoreDescriptor( + "core_name", Paths.get(instanceDir).toAbsolutePath(), cc, properties)); assertEquals(Paths.get(expected).toAbsolutePath(), Paths.get(dataHome).toAbsolutePath()); } - private NodeConfig loadNodeConfig(String config) throws Exception { InputStream is = DirectoryFactoryTest.class.getResourceAsStream(config); return SolrXmlConfig.fromInputStream(solrHome, is, new Properties()); diff --git a/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java b/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java index 3e4fb304c08..94955aaa12a 100644 --- a/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java +++ b/solr/core/src/test/org/apache/solr/core/DummyValueSourceParser.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.core; + import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.SimpleFloatFunction; @@ -23,11 +24,7 @@ import org.apache.solr.search.SyntaxError; import org.apache.solr.search.ValueSourceParser; - -/** - * Mock ValueSource parser that doesn't do much of anything - * - **/ +/** Mock ValueSource parser that doesn't do much of anything */ public class DummyValueSourceParser extends ValueSourceParser { private NamedList args; @@ -39,20 +36,19 @@ public void init(NamedList args) { @Override public ValueSource parse(FunctionQParser fp) throws SyntaxError { ValueSource source = fp.parseValueSource(); - ValueSource result = new SimpleFloatFunction(source) { - @Override - protected String name() { - return "foo"; - } + ValueSource result = + new SimpleFloatFunction(source) { + @Override + protected String name() { + return "foo"; + } - @Override - protected float func(int doc, FunctionValues vals) { - float result = 0; - return result; - } - }; + @Override + protected float func(int doc, FunctionValues vals) { + float result = 0; + return result; + } + }; return result; } - - } diff --git a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java index 988b67b9142..210f3ff9e48 100644 --- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java +++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java @@ -16,8 +16,9 @@ */ package org.apache.solr.core; -import java.util.Map; +import static org.apache.solr.common.util.Utils.fromJSONString; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricManager; @@ -25,18 +26,16 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.common.util.Utils.fromJSONString; - /** - * Test that checks that long running queries are exited by Solr using the - * SolrQueryTimeoutImpl implementation. + * Test that checks that long running queries are exited by Solr using the SolrQueryTimeoutImpl + * implementation. */ public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 { - + static int NUM_DOCS = 100; - static final String assertionString = "/response/numFound=="+ NUM_DOCS; + static final String assertionString = "/response/numFound==" + NUM_DOCS; static final String failureAssertionString = "/responseHeader/partialResults==true]"; - static final String longTimeout="10000"; + static final String longTimeout = "10000"; static final String sleep = "2"; @BeforeClass @@ -47,9 +46,14 @@ public static void beforeClass() throws Exception { public static void createIndex() { for (int i = 0; i < NUM_DOCS; i++) { - assertU(adoc("id", Integer.toString(i), "name", "a" + i + " b" + i + " c" + i + " d"+i + " e" + i)); + assertU( + adoc( + "id", + Integer.toString(i), + "name", + "a" + i + " b" + i + " c" + i + " d" + i + " e" + i)); if (random().nextInt(NUM_DOCS) == 0) { - assertU(commit()); // sometimes make multiple segments + assertU(commit()); // sometimes make multiple segments } } assertU(commit()); @@ -58,72 +62,98 @@ public static void createIndex() { @Test public void testPrefixQuery() throws Exception { String q = "name:a*"; - assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); + assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); - // do the same query and test for both success, and that the number of documents matched (i.e. make sure no partial results were cached) + // do the same query and test for both success, and that the number of documents matched (i.e. + // make sure no partial results were cached) assertJQ(req("q", q, "timeAllowed", longTimeout), assertionString); - // this time we should get a query cache hit and hopefully no exception? this may change in the future if time checks are put into other places. - assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), assertionString); + // this time we should get a query cache hit and hopefully no exception? this may change in the + // future if time checks are put into other places. + assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), assertionString); // now do the same for the filter cache - assertJQ(req("q","*:*", "fq",q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); + assertJQ(req("q", "*:*", "fq", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString); // make sure that the result succeeds this time, and that a bad filter wasn't cached - assertJQ(req("q","*:*", "fq",q, "timeAllowed", longTimeout), assertionString); + assertJQ(req("q", "*:*", "fq", q, "timeAllowed", longTimeout), assertionString); // test that Long.MAX_VALUE works - assertJQ(req("q","name:b*", "timeAllowed",Long.toString(Long.MAX_VALUE)), assertionString); + assertJQ(req("q", "name:b*", "timeAllowed", Long.toString(Long.MAX_VALUE)), assertionString); // negative timeAllowed should disable timeouts. assertJQ(req("q", "name:c*", "timeAllowed", "-7"), assertionString); } - - - - // There are lots of assumptions about how/when cache entries should be changed in this method. The - // simple case above shows the root problem without the confusion. testFilterSimpleCase should be - // removed once it is running and this test should be un-ignored and the assumptions verified. + // There are lots of assumptions about how/when cache entries should be changed in this method. + // The simple case above shows the root problem without the confusion. testFilterSimpleCase should + // be removed once it is running and this test should be un-ignored and the assumptions verified. // With all the weirdness, I'm not going to vouch for this test. Feel free to change it. @Test public void testCacheAssumptions() throws Exception { - String fq= "name:d*"; + String fq = "name:d*"; SolrCore core = h.getCore(); - MetricsMap filterCacheStats = (MetricsMap)((SolrMetricManager.GaugeWrapper)core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.filterCache")).getGauge(); + MetricsMap filterCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + core.getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.filterCache")) + .getGauge(); long fqInserts = (long) filterCacheStats.getValue().get("inserts"); - MetricsMap queryCacheStats = (MetricsMap)((SolrMetricManager.GaugeWrapper)core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache")).getGauge(); + MetricsMap queryCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + core.getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.queryResultCache")) + .getGauge(); long qrInserts = (long) queryCacheStats.getValue().get("inserts"); - // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed and it gets 100 back and the for loop below - // succeeds. - String response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", "1", "sleep", sleep)); + // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed and it gets 100 back and the + // for loop below succeeds. + String response = + JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", "1", "sleep", sleep)); Map res = (Map) fromJSONString(response); Map body = (Map) (res.get("response")); assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); Map header = (Map) (res.get("responseHeader")); - assertTrue("Should have partial results", (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); + assertTrue( + "Should have partial results", + (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); - assertEquals("Should NOT have inserted partial results in the cache!", - (long) queryCacheStats.getValue().get("inserts"), qrInserts); + assertEquals( + "Should NOT have inserted partial results in the cache!", + (long) queryCacheStats.getValue().get("inserts"), + qrInserts); - assertEquals("Should NOT have another insert", fqInserts, (long) filterCacheStats.getValue().get("inserts")); + assertEquals( + "Should NOT have another insert", + fqInserts, + (long) filterCacheStats.getValue().get("inserts")); // At the end of all this, we should have no hits in the queryResultCache. response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", longTimeout)); // Check that we did insert this one. assertEquals("Hits should still be 0", (long) filterCacheStats.getValue().get("hits"), 0L); - assertEquals("Inserts should be bumped", (long) filterCacheStats.getValue().get("inserts"), fqInserts + 1); + assertEquals( + "Inserts should be bumped", + (long) filterCacheStats.getValue().get("inserts"), + fqInserts + 1); res = (Map) fromJSONString(response); body = (Map) (res.get("response")); assertEquals("Should have exactly " + NUM_DOCS, (long) (body.get("numFound")), NUM_DOCS); header = (Map) (res.get("responseHeader")); - assertTrue("Should NOT have partial results", header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY) == null); + assertTrue( + "Should NOT have partial results", + header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY) == null); } // When looking at a problem raised on the user's list I ran across this anomaly with timeAllowed @@ -132,8 +162,15 @@ public void testCacheAssumptions() throws Exception { public void testQueryResults() throws Exception { String q = "name:e*"; SolrCore core = h.getCore(); - MetricsMap queryCacheStats = (MetricsMap)((SolrMetricManager.GaugeWrapper)core.getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache")).getGauge(); - Map nl = queryCacheStats.getValue(); + MetricsMap queryCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + core.getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.queryResultCache")) + .getGauge(); + Map nl = queryCacheStats.getValue(); long inserts = (long) nl.get("inserts"); String response = JQ(req("q", q, "indent", "true", "timeAllowed", "1", "sleep", sleep)); @@ -147,12 +184,14 @@ public void testQueryResults() throws Exception { Map header = (Map) (res.get("responseHeader")); assertTrue("Should have fewer docs than " + NUM_DOCS, (long) (body.get("numFound")) < NUM_DOCS); - assertTrue("Should have partial results", (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); + assertTrue( + "Should have partial results", + (Boolean) (header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))); response = JQ(req("q", q, "indent", "true", "timeAllowed", longTimeout)); // Check that we did insert this one. - Map nl2 = queryCacheStats.getValue(); + Map nl2 = queryCacheStats.getValue(); assertEquals("Hits should still be 0", (long) nl.get("hits"), (long) nl2.get("hits")); assertTrue("Inserts should be bumped", inserts < (long) nl2.get("inserts")); @@ -167,5 +206,3 @@ public void testQueryResults() throws Exception { } } } - - diff --git a/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java b/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java index 577beeff7fb..54a5dbb2ebe 100644 --- a/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java +++ b/solr/core/src/test/org/apache/solr/core/FakeDeletionPolicy.java @@ -16,17 +16,14 @@ */ package org.apache.solr.core; +import java.io.IOException; +import java.util.List; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexDeletionPolicy; import org.apache.solr.common.util.NamedList; import org.apache.solr.util.plugin.NamedListInitializedPlugin; -import java.io.IOException; -import java.util.List; - -/** - * - */ +/** */ public class FakeDeletionPolicy extends IndexDeletionPolicy implements NamedListInitializedPlugin { private String var1; diff --git a/solr/core/src/test/org/apache/solr/core/HelloStream.java b/solr/core/src/test/org/apache/solr/core/HelloStream.java index 370200504dc..ba0135e6b3f 100644 --- a/solr/core/src/test/org/apache/solr/core/HelloStream.java +++ b/solr/core/src/test/org/apache/solr/core/HelloStream.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.List; - import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.StreamComparator; import org.apache.solr.client.solrj.io.stream.StreamContext; @@ -31,21 +30,16 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; -public class HelloStream extends TupleStream implements Expressible{ +public class HelloStream extends TupleStream implements Expressible { boolean isSentHelloWorld = false; - public HelloStream() { - } + public HelloStream() {} - public HelloStream(StreamExpression expression, StreamFactory factory) throws IOException{ - - } + public HelloStream(StreamExpression expression, StreamFactory factory) throws IOException {} @Override - public void setStreamContext(StreamContext context) { - - } + public void setStreamContext(StreamContext context) {} @Override public List children() { @@ -53,14 +47,10 @@ public List children() { } @Override - public void open() throws IOException { - - } + public void open() throws IOException {} @Override - public void close() throws IOException { - - } + public void close() throws IOException {} @Override public Tuple read() throws IOException { diff --git a/solr/core/src/test/org/apache/solr/core/MockEventListener.java b/solr/core/src/test/org/apache/solr/core/MockEventListener.java index 62b849487f1..cfc1eb42839 100644 --- a/solr/core/src/test/org/apache/solr/core/MockEventListener.java +++ b/solr/core/src/test/org/apache/solr/core/MockEventListener.java @@ -16,13 +16,12 @@ */ package org.apache.solr.core; -import org.apache.solr.search.SolrIndexSearcher; - import java.util.concurrent.atomic.AtomicInteger; +import org.apache.solr.search.SolrIndexSearcher; public class MockEventListener implements SolrEventListener { - final static AtomicInteger createCounter = new AtomicInteger(0); + static final AtomicInteger createCounter = new AtomicInteger(0); public static final int getCreateCount() { return createCounter.intValue(); @@ -36,16 +35,14 @@ public MockEventListener() { public void postCommit() { /* NOOP */ } - + @Override public void postSoftCommit() { /* NOOP */ } @Override - public void newSearcher(SolrIndexSearcher newSearcher, - SolrIndexSearcher currentSearcher) { + public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { /* NOOP */ } - } diff --git a/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java index 272638a9029..299f751393e 100644 --- a/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/core/MockShardHandlerFactory.java @@ -28,12 +28,12 @@ /** a fake shardhandler factory that does nothing. */ public class MockShardHandlerFactory extends ShardHandlerFactory implements PluginInfoInitialized { NamedList args; - + @Override public void init(PluginInfo info) { args = info.initArgs; } - + @Override public ShardHandler getShardHandler() { return new ShardHandler() { @@ -41,8 +41,7 @@ public ShardHandler getShardHandler() { public void prepDistributed(ResponseBuilder rb) {} @Override - public void submit(ShardRequest sreq, String shard, - ModifiableSolrParams params) {} + public void submit(ShardRequest sreq, String shard, ModifiableSolrParams params) {} @Override public ShardResponse takeCompletedIncludingErrors() { diff --git a/solr/core/src/test/org/apache/solr/core/PluginInfoTest.java b/solr/core/src/test/org/apache/solr/core/PluginInfoTest.java index 77ffc99be9d..df2555af218 100644 --- a/solr/core/src/test/org/apache/solr/core/PluginInfoTest.java +++ b/solr/core/src/test/org/apache/solr/core/PluginInfoTest.java @@ -17,57 +17,60 @@ package org.apache.solr.core; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.util.DOMUtilTestBase; import org.junit.Test; import org.w3c.dom.Node; /** - * TestCase for PluginInfo. - * Extends DOMUtilTestBase because PluginInfo heavily depends on DOMUtil + * TestCase for PluginInfo. Extends DOMUtilTestBase because PluginInfo heavily depends on DOMUtil * and the convinient {@link #getNode(String, String)} method. */ public class PluginInfoTest extends DOMUtilTestBase { - private final static String configWithNoChildren = "" + - "" + - "aString" + - "1" + - "true" + - "1.1f" + - "2.2d" + - "2" + - "" + - "" + - "" + - "aString" + - "" + - ""; - private final static String configWith2Children = "" + - "" + - "1" + - "0" + - "1" + - "1.1f" + - "2.2d" + - "2" + - "" + - "" + - "" + - "aString" + - "" + - ""; + private static final String configWithNoChildren = + "" + + "" + + "aString" + + "1" + + "true" + + "1.1f" + + "2.2d" + + "2" + + "" + + "" + + "" + + "aString" + + "" + + ""; + private static final String configWith2Children = + "" + + "" + + "1" + + "0" + + "1" + + "1.1f" + + "2.2d" + + "2" + + "" + + "" + + "" + + "aString" + + "" + + ""; - // This is in fact a DOMUtil test, but it is here for completeness + // This is in fact a DOMUtil test, but it is here for completeness @Test public void testNameRequired() throws Exception { Node nodeWithNoName = getNode("", "plugin"); try { SolrTestCaseJ4.ignoreException("missing mandatory attribute"); - RuntimeException thrown = expectThrows(RuntimeException.class, () -> { - PluginInfo pi = new PluginInfo(nodeWithNoName, "Node with No name", true, false); - }); + RuntimeException thrown = + expectThrows( + RuntimeException.class, + () -> { + PluginInfo pi = new PluginInfo(nodeWithNoName, "Node with No name", true, false); + }); assertTrue(thrown.getMessage().contains("missing mandatory attribute")); } finally { SolrTestCaseJ4.resetExceptionIgnores(); @@ -77,15 +80,18 @@ public void testNameRequired() throws Exception { PluginInfo pi2 = new PluginInfo(nodeWithAName, "Node with a Name", true, false); assertTrue(pi2.name.equals("myName")); } - + @Test public void testClassRequired() throws Exception { Node nodeWithNoClass = getNode("", "plugin"); try { SolrTestCaseJ4.ignoreException("missing mandatory attribute"); - RuntimeException thrown = expectThrows(RuntimeException.class, () -> { - PluginInfo pi = new PluginInfo(nodeWithNoClass, "Node with No Class", false, true); - }); + RuntimeException thrown = + expectThrows( + RuntimeException.class, + () -> { + PluginInfo pi = new PluginInfo(nodeWithNoClass, "Node with No Class", false, true); + }); assertTrue(thrown.getMessage().contains("missing mandatory attribute")); } finally { SolrTestCaseJ4.resetExceptionIgnores(); @@ -104,7 +110,6 @@ public void testIsEnabled() throws Exception { node = getNode("", "plugin"); pi = new PluginInfo(node, "not enabled", false, false); assertFalse(pi.isEnabled()); - } @Test @@ -115,11 +120,10 @@ public void testIsDefault() throws Exception { node = getNode("", "plugin"); pi = new PluginInfo(node, "not default", false, false); assertFalse(pi.isDefault()); - } @Test - public void testNoChildren() throws Exception{ + public void testNoChildren() throws Exception { Node node = getNode(configWithNoChildren, "/plugin"); PluginInfo pi = new PluginInfo(node, "from static", false, false); assertTrue(pi.children.isEmpty()); @@ -129,7 +133,7 @@ public void testNoChildren() throws Exception{ public void testHasChildren() throws Exception { Node node = getNode(configWith2Children, "plugin"); PluginInfo pi = new PluginInfo(node, "node with 2 Children", false, false); - assertTrue( pi.children.size() == 2 ); + assertTrue(pi.children.size() == 2); } @Test @@ -140,7 +144,7 @@ public void testChild() throws Exception { assertNotNull(childInfo); PluginInfo notExistent = pi.getChild("doesnotExist"); assertNull(notExistent); - assertTrue( childInfo instanceof PluginInfo ); + assertTrue(childInfo instanceof PluginInfo); assertTrue((Integer) childInfo.initArgs.get("index") == 0); Node node2 = getNode(configWithNoChildren, "plugin"); PluginInfo pi2 = new PluginInfo(node2, "with No Children", false, false); @@ -154,9 +158,9 @@ public void testChildren() throws Exception { PluginInfo pi = new PluginInfo(node, "with children", false, false); List children = pi.getChildren("child"); assertTrue(children.size() == 2); - for ( PluginInfo childInfo : children ) { + for (PluginInfo childInfo : children) { assertNotNull(childInfo); - assertTrue( childInfo instanceof PluginInfo ); + assertTrue(childInfo instanceof PluginInfo); } } @@ -164,6 +168,6 @@ public void testChildren() throws Exception { public void testInitArgsCount() throws Exception { Node node = getNode(configWithNoChildren, "plugin"); PluginInfo pi = new PluginInfo(node, "from static", true, false); - assertTrue( pi.initArgs.size() == node.getChildNodes().getLength() ); + assertTrue(pi.initArgs.size() == node.getChildNodes().getLength()); } } diff --git a/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java b/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java index b7147ee3251..9db7ee9d5be 100644 --- a/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java +++ b/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java @@ -16,10 +16,9 @@ */ package org.apache.solr.core; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; - import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -37,18 +36,20 @@ public class QueryResultKeyTest extends SolrTestCaseJ4 { public void testFiltersOutOfOrder1() { // the hashcode should be the same even when the list // of filters is in a different order - + Sort sort = new Sort(new SortField("test", SortField.Type.INT)); BooleanQuery.Builder query = new BooleanQuery.Builder(); query.add(new TermQuery(new Term("test", "field")), Occur.MUST); - - List filters = Arrays.asList(new TermQuery(new Term("test", "field")), - new TermQuery(new Term("test2", "field2"))); - QueryResultKey qrk1 = new QueryResultKey(query.build() , filters, sort, 1); - - List filters2 = Arrays.asList(new TermQuery(new Term("test2", "field2")), - new TermQuery(new Term("test", "field"))); - QueryResultKey qrk2 = new QueryResultKey(query.build() , filters2, sort, 1); + + List filters = + Arrays.asList( + new TermQuery(new Term("test", "field")), new TermQuery(new Term("test2", "field2"))); + QueryResultKey qrk1 = new QueryResultKey(query.build(), filters, sort, 1); + + List filters2 = + Arrays.asList( + new TermQuery(new Term("test2", "field2")), new TermQuery(new Term("test", "field"))); + QueryResultKey qrk2 = new QueryResultKey(query.build(), filters2, sort, 1); assertKeyEquals(qrk1, qrk2); } @@ -71,7 +72,7 @@ public void testFiltersOutOfOrder2() { public void testQueryResultKeyUnSortedFiltersWithDups() { Query query = new TermQuery(new Term("main", "val")); - // we need Query clauses that have identical hashCodes + // we need Query clauses that have identical hashCodes // but are not equal unless the term is equals Query fq_aa = new FlatHashTermQuery("fq_a"); Query fq_ab = new FlatHashTermQuery("fq_a"); @@ -89,19 +90,19 @@ public void testQueryResultKeyUnSortedFiltersWithDups() { assertEquals(fq_ac, fq_aa); assertEquals(fq_ac, fq_ab); - assertTrue( ! fq_aa.equals(fq_zz) ); - assertTrue( ! fq_ab.equals(fq_zz) ); - assertTrue( ! fq_ac.equals(fq_zz) ); - assertTrue( ! fq_zz.equals(fq_aa) ); - assertTrue( ! fq_zz.equals(fq_ab) ); - assertTrue( ! fq_zz.equals(fq_ac) ); + assertTrue(!fq_aa.equals(fq_zz)); + assertTrue(!fq_ab.equals(fq_zz)); + assertTrue(!fq_ac.equals(fq_zz)); + assertTrue(!fq_zz.equals(fq_aa)); + assertTrue(!fq_zz.equals(fq_ab)); + assertTrue(!fq_zz.equals(fq_ac)); List filters1 = Arrays.asList(fq_aa, fq_ab); List filters2 = Arrays.asList(fq_zz, fq_ac); QueryResultKey key1 = new QueryResultKey(query, filters1, null, 0); QueryResultKey key2 = new QueryResultKey(query, filters2, null, 0); - + assertEquals(key1.hashCode(), key2.hashCode()); assertKeyNotEquals(key1, key2); @@ -109,15 +110,14 @@ public void testQueryResultKeyUnSortedFiltersWithDups() { public void testRandomQueryKeyEquality() { - final int minIters = atLeast(100 * 1000); final Query base = new FlatHashTermQuery("base"); - + // ensure we cover both code paths at least once boolean didEquals = false; boolean didNotEquals = false; int iter = 1; - while (iter <= minIters || (! didEquals ) || (! didNotEquals ) ) { + while (iter <= minIters || (!didEquals) || (!didNotEquals)) { iter++; int[] numsA = smallArrayOfRandomNumbers(); int[] numsB = smallArrayOfRandomNumbers(); @@ -136,24 +136,25 @@ public void testRandomQueryKeyEquality() { } assert minIters <= iter; } - + public void testMinExactCount() { int[] nums = smallArrayOfRandomNumbers(); final Query base = new FlatHashTermQuery("base"); - assertKeyEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), + assertKeyEquals( + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10)); - assertKeyNotEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), + assertKeyNotEquals( + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 20)); - assertKeyNotEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), - new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0));//Integer.MAX_VALUE - assertKeyEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, Integer.MAX_VALUE), + assertKeyNotEquals( + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10), + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0)); // Integer.MAX_VALUE + assertKeyEquals( + new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, Integer.MAX_VALUE), new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0)); - } - - /** - * does bi-directional equality check as well as verifying hashCode - */ + + /** does bi-directional equality check as well as verifying hashCode */ public void assertKeyEquals(QueryResultKey key1, QueryResultKey key2) { assertNotNull(key1); assertNotNull(key2); @@ -163,33 +164,30 @@ public void assertKeyEquals(QueryResultKey key1, QueryResultKey key2) { assertEquals(key2, key1); } - /** - * does bi-directional check that the keys are not equals - */ + /** does bi-directional check that the keys are not equals */ public void assertKeyNotEquals(QueryResultKey key1, QueryResultKey key2) { - assertTrue( ! key1.equals(key2) ); - assertTrue( ! key2.equals(key1) ); + assertTrue(!key1.equals(key2)); + assertTrue(!key2.equals(key1)); } /** - * returns a "small" list of "small" random numbers. The idea behind this method is - * that multiple calls have a decent change of returning two arrays which are the - * same size and contain the same numbers but in a differed order. + * returns a "small" list of "small" random numbers. The idea behind this method is that multiple + * calls have a decent change of returning two arrays which are the same size and contain the same + * numbers but in a differed order. * - * the array is guaranteed to always have at least 1 element + *

the array is guaranteed to always have at least 1 element */ private int[] smallArrayOfRandomNumbers() { int size = TestUtil.nextInt(random(), 1, 5); int[] result = new int[size]; - for (int i=0; i < size; i++) { + for (int i = 0; i < size; i++) { result[i] = TestUtil.nextInt(random(), 1, 5); } return result; } /** - * Creates an array of Filter queries using {@link FlatHashTermQuery} based on the - * specified ints + * Creates an array of Filter queries using {@link FlatHashTermQuery} based on the specified ints */ private List buildFiltersFromNumbers(int[] values) { ArrayList filters = new ArrayList<>(values.length); @@ -200,8 +198,8 @@ private List buildFiltersFromNumbers(int[] values) { } /** - * Quick and dirty subclass of TermQuery that uses fixed field name and a constant - * value hashCode, regardless of the Term value. + * Quick and dirty subclass of TermQuery that uses fixed field name and a constant value hashCode, + * regardless of the Term value. */ private static class FlatHashTermQuery extends TermQuery { public FlatHashTermQuery(String val) { diff --git a/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java index 3ee673272f9..862cfd2ac95 100644 --- a/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/core/RAMDirectoryFactoryTest.java @@ -17,16 +17,13 @@ package org.apache.solr.core; import java.io.IOException; - import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockFactory; import org.apache.solr.SolrTestCase; import org.apache.solr.core.DirectoryFactory.DirContext; -/** - * Test-case for RAMDirectoryFactory - */ +/** Test-case for RAMDirectoryFactory */ public class RAMDirectoryFactoryTest extends SolrTestCase { public void test() throws Exception { @@ -36,17 +33,21 @@ public void test() throws Exception { private void dotestOpenReturnsTheSameForSamePath() throws IOException { final Directory directory = new ByteBuffersDirectory(); - RAMDirectoryFactory factory = new RAMDirectoryFactory() { - @Override - protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) { - return directory; - } - }; + RAMDirectoryFactory factory = + new RAMDirectoryFactory() { + @Override + protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) { + return directory; + } + }; String path = "/fake/path"; Directory dir1 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); Directory dir2 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); - assertEquals("RAMDirectoryFactory should not create new instance of RefCntRamDirectory " + - "every time open() is called for the same path", dir1, dir2); + assertEquals( + "RAMDirectoryFactory should not create new instance of RefCntRamDirectory " + + "every time open() is called for the same path", + dir1, + dir2); factory.release(dir1); factory.release(dir2); @@ -55,9 +56,12 @@ protected Directory create(String path, LockFactory lockFactory, DirContext dirC private void dotestOpenSucceedForEmptyDir() throws IOException { RAMDirectoryFactory factory = new RAMDirectoryFactory(); - Directory dir = factory.get("/fake/path", DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); - assertNotNull("RAMDirectoryFactory should create RefCntRamDirectory even if the path doen't lead " + - "to index directory on the file system", dir); + Directory dir = + factory.get("/fake/path", DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE); + assertNotNull( + "RAMDirectoryFactory should create RefCntRamDirectory even if the path doen't lead " + + "to index directory on the file system", + dir); factory.release(dir); factory.close(); } diff --git a/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java b/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java index c12957b6347..80d8c1f9c62 100644 --- a/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java +++ b/solr/core/src/test/org/apache/solr/core/RequestHandlersTest.java @@ -16,9 +16,8 @@ */ package org.apache.solr.core; -import java.util.Map; - import com.codahale.metrics.Gauge; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.request.SolrRequestHandler; @@ -36,68 +35,80 @@ public void testInitCount() { String registry = h.getCore().getCoreMetricManager().getRegistryName(); SolrMetricManager manager = h.getCoreContainer().getMetricManager(); @SuppressWarnings({"unchecked"}) - Gauge g = (Gauge)manager.registry(registry).getMetrics().get("QUERY./mock.initCount"); - assertEquals("Incorrect init count", - 1, g.getValue().intValue()); + Gauge g = + (Gauge) manager.registry(registry).getMetrics().get("QUERY./mock.initCount"); + assertEquals("Incorrect init count", 1, g.getValue().intValue()); } @Test - public void testImplicitRequestHandlers(){ + public void testImplicitRequestHandlers() { SolrCore core = h.getCore(); - assertNotNull(core.getRequestHandler( "/update/json")); - assertNotNull(core.getRequestHandler( "/update/json/docs")); - assertNotNull(core.getRequestHandler( "/update/csv")); + assertNotNull(core.getRequestHandler("/update/json")); + assertNotNull(core.getRequestHandler("/update/json/docs")); + assertNotNull(core.getRequestHandler("/update/csv")); } @Test public void testLazyLoading() { SolrCore core = h.getCore(); - PluginBag.PluginHolder handler = core.getRequestHandlers().getRegistry().get("/lazy"); + PluginBag.PluginHolder handler = + core.getRequestHandlers().getRegistry().get("/lazy"); assertFalse(handler.isLoaded()); - - assertU(adoc("id", "42", - "name", "Zapp Brannigan")); - assertU(adoc("id", "43", - "title", "Democratic Order of Planets")); - assertU(adoc("id", "44", - "name", "The Zapper")); - assertU(adoc("id", "45", - "title", "25 star General")); - assertU(adoc("id", "46", - "subject", "Defeated the pacifists of the Gandhi nebula")); - assertU(adoc("id", "47", - "text", "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); + + assertU( + adoc( + "id", "42", + "name", "Zapp Brannigan")); + assertU( + adoc( + "id", "43", + "title", "Democratic Order of Planets")); + assertU( + adoc( + "id", "44", + "name", "The Zapper")); + assertU( + adoc( + "id", "45", + "title", "25 star General")); + assertU( + adoc( + "id", "46", + "subject", "Defeated the pacifists of the Gandhi nebula")); + assertU( + adoc( + "id", "47", + "text", + "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); assertU(commit()); - assertQ("lazy request handler returns all matches", - req("q","id:[42 TO 47]"), - "*[count(//doc)=6]"); + assertQ( + "lazy request handler returns all matches", req("q", "id:[42 TO 47]"), "*[count(//doc)=6]"); - // But it should behave just like the 'defaults' request handler above - assertQ("lazy handler returns fewer matches", - req("q", "id:[42 TO 47]", "qt","/lazy"), - "*[count(//doc)=4]" - ); + // But it should behave just like the 'defaults' request handler above + assertQ( + "lazy handler returns fewer matches", + req("q", "id:[42 TO 47]", "qt", "/lazy"), + "*[count(//doc)=4]"); - assertQ("lazy handler includes highlighting", - req("q", "name:Zapp OR title:General", "qt","/lazy"), - "//lst[@name='highlighting']" - ); + assertQ( + "lazy handler includes highlighting", + req("q", "name:Zapp OR title:General", "qt", "/lazy"), + "//lst[@name='highlighting']"); } @Test - public void testPathNormalization() - { + public void testPathNormalization() { SolrCore core = h.getCore(); - SolrRequestHandler h1 = core.getRequestHandler("/update" ); - assertNotNull( h1 ); - - SolrRequestHandler h2 = core.getRequestHandler("/update/" ); - assertNotNull( h2 ); - - assertEquals( h1, h2 ); // the same object - - assertNull( core.getRequestHandler("/update/asdgadsgas" ) ); // prefix + SolrRequestHandler h1 = core.getRequestHandler("/update"); + assertNotNull(h1); + + SolrRequestHandler h2 = core.getRequestHandler("/update/"); + assertNotNull(h2); + + assertEquals(h1, h2); // the same object + + assertNull(core.getRequestHandler("/update/asdgadsgas")); // prefix } @Test @@ -106,12 +117,16 @@ public void testStatistics() { SolrRequestHandler updateHandler = core.getRequestHandler("/update"); SolrRequestHandler termHandler = core.getRequestHandler("/terms"); - assertU(adoc("id", "47", - "text", "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); + assertU( + adoc( + "id", + "47", + "text", + "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); assertU(commit()); - Map updateStats = updateHandler.getSolrMetricsContext().getMetricsSnapshot(); - Map termStats = termHandler.getSolrMetricsContext().getMetricsSnapshot(); + Map updateStats = updateHandler.getSolrMetricsContext().getMetricsSnapshot(); + Map termStats = termHandler.getSolrMetricsContext().getMetricsSnapshot(); Long updateTime = (Long) updateStats.get("UPDATE./update.totalTime"); Long termTime = (Long) termStats.get("QUERY./terms.totalTime"); diff --git a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java index d37f7497fd0..a22b3bfdbb8 100644 --- a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java +++ b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.core; +import static org.apache.solr.core.SolrResourceLoader.*; +import static org.hamcrest.core.Is.is; + import java.io.IOException; import java.io.InputStream; import java.nio.charset.CharacterCodingException; @@ -29,13 +32,12 @@ import java.util.Map; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; - +import org.apache.lucene.analysis.TokenFilterFactory; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.TokenizerFactory; import org.apache.lucene.analysis.core.KeywordTokenizerFactory; import org.apache.lucene.analysis.ngram.NGramFilterFactory; import org.apache.lucene.util.ResourceLoaderAware; -import org.apache.lucene.analysis.TokenFilterFactory; -import org.apache.lucene.analysis.TokenizerFactory; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.handler.admin.LukeRequestHandler; @@ -44,9 +46,6 @@ import org.apache.solr.util.plugin.SolrCoreAware; import org.junit.After; -import static org.apache.solr.core.SolrResourceLoader.*; -import static org.hamcrest.core.Is.is; - public class ResourceLoaderTest extends SolrTestCaseJ4 { @Override @After @@ -65,20 +64,26 @@ public void testInstanceDir() throws Exception { public void testEscapeInstanceDir() throws Exception { Path temp = createTempDir("testEscapeInstanceDir"); - Files.write(temp.resolve("dummy.txt"), new byte[]{}); + Files.write(temp.resolve("dummy.txt"), new byte[] {}); Path instanceDir = temp.resolve("instance"); Files.createDirectories(instanceDir.resolve("conf")); setUnsafeResourceLoading(false); try (SolrResourceLoader loader = new SolrResourceLoader(instanceDir)) { // Path traversal - assertTrue(assertThrows(IOException.class, () -> - loader.openResource("../../dummy.txt").close()).getMessage().contains("Can't find resource")); + assertTrue( + assertThrows(IOException.class, () -> loader.openResource("../../dummy.txt").close()) + .getMessage() + .contains("Can't find resource")); assertNull(loader.resourceLocation("../../dummy.txt")); // UNC paths - assertTrue(assertThrows(SolrResourceNotFoundException.class, () -> - loader.openResource("\\\\192.168.10.10\\foo").close()).getMessage().contains("Resource '\\\\192.168.10.10\\foo' could not be loaded.")); + assertTrue( + assertThrows( + SolrResourceNotFoundException.class, + () -> loader.openResource("\\\\192.168.10.10\\foo").close()) + .getMessage() + .contains("Resource '\\\\192.168.10.10\\foo' could not be loaded.")); assertNull(loader.resourceLocation("\\\\192.168.10.10\\foo")); } @@ -89,8 +94,12 @@ public void testEscapeInstanceDir() throws Exception { assertNotNull(loader.resourceLocation("../../dummy.txt")); // UNC paths never allowed - assertTrue(assertThrows(SolrResourceNotFoundException.class, () -> - loader.openResource("\\\\192.168.10.10\\foo").close()).getMessage().contains("Resource '\\\\192.168.10.10\\foo' could not be loaded.")); + assertTrue( + assertThrows( + SolrResourceNotFoundException.class, + () -> loader.openResource("\\\\192.168.10.10\\foo").close()) + .getMessage() + .contains("Resource '\\\\192.168.10.10\\foo' could not be loaded.")); assertNull(loader.resourceLocation("\\\\192.168.10.10\\foo")); } } @@ -105,41 +114,42 @@ private void setUnsafeResourceLoading(boolean unsafe) { @SuppressWarnings({"unchecked"}) public void testAwareCompatibility() throws Exception { - + final Class clazz1 = ResourceLoaderAware.class; // Check ResourceLoaderAware valid objects - assertAwareCompatibility(clazz1, new NGramFilterFactory(map("minGramSize", "1", "maxGramSize", "2"))); + assertAwareCompatibility( + clazz1, new NGramFilterFactory(map("minGramSize", "1", "maxGramSize", "2"))); assertAwareCompatibility(clazz1, new KeywordTokenizerFactory(new HashMap<>())); - + // Make sure it throws an error for invalid objects - Object[] invalid = new Object[] { - // new NGramTokenFilter( null ), - "hello", 12.3f, - new LukeRequestHandler(), - new JSONResponseWriter() - }; - for( Object obj : invalid ) { + Object[] invalid = + new Object[] { + // new NGramTokenFilter( null ), + "hello", 12.3f, new LukeRequestHandler(), new JSONResponseWriter() + }; + for (Object obj : invalid) { expectThrows(SolrException.class, () -> assertAwareCompatibility(clazz1, obj)); } - final Class clazz2 = SolrCoreAware.class; // Check ResourceLoaderAware valid objects assertAwareCompatibility(clazz2, new LukeRequestHandler()); assertAwareCompatibility(clazz2, new FacetComponent()); assertAwareCompatibility(clazz2, new JSONResponseWriter()); - + // Make sure it throws an error for invalid objects - invalid = new Object[] { - new NGramFilterFactory(map("minGramSize", "1", "maxGramSize", "2")), - "hello", 12.3f , - new KeywordTokenizerFactory(new HashMap<>()) - }; - for( Object obj : invalid ) { + invalid = + new Object[] { + new NGramFilterFactory(map("minGramSize", "1", "maxGramSize", "2")), + "hello", + 12.3f, + new KeywordTokenizerFactory(new HashMap<>()) + }; + for (Object obj : invalid) { expectThrows(SolrException.class, () -> assertAwareCompatibility(clazz2, obj)); } } - + public void testBOMMarkers() throws Exception { final String fileWithBom = "stopwithbom.txt"; SolrResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1")); @@ -147,32 +157,39 @@ public void testBOMMarkers() throws Exception { // preliminary sanity check InputStream bomStream = loader.openResource(fileWithBom); try { - final byte[] bomExpected = new byte[] { -17, -69, -65 }; + final byte[] bomExpected = new byte[] {-17, -69, -65}; final byte[] firstBytes = new byte[3]; - - assertEquals("Should have been able to read 3 bytes from bomStream", - 3, bomStream.read(firstBytes)); - assertTrue("This test only works if " + fileWithBom + - " contains a BOM -- it appears someone removed it.", - Arrays.equals(bomExpected, firstBytes)); + assertEquals( + "Should have been able to read 3 bytes from bomStream", 3, bomStream.read(firstBytes)); + + assertTrue( + "This test only works if " + + fileWithBom + + " contains a BOM -- it appears someone removed it.", + Arrays.equals(bomExpected, firstBytes)); } finally { - try { bomStream.close(); } catch (Exception e) { /* IGNORE */ } + try { + bomStream.close(); + } catch (Exception e) { + /* IGNORE */ + } } // now make sure getLines skips the BOM... List lines = loader.getLines(fileWithBom); assertEquals(1, lines.size()); assertEquals("BOMsAreEvil", lines.get(0)); - + loader.close(); } - + public void testWrongEncoding() throws Exception { String wrongEncoding = "stopwordsWrongEncoding.txt"; - try(SolrResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1"))) { + try (SolrResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1"))) { // ensure we get our exception - SolrException thrown = expectThrows(SolrException.class, () -> loader.getLines(wrongEncoding)); + SolrException thrown = + expectThrows(SolrException.class, () -> loader.getLines(wrongEncoding)); assertTrue(thrown.getCause() instanceof CharacterCodingException); } } @@ -191,11 +208,13 @@ public void testClassLoaderLibs() throws Exception { Path otherLib = tmpRoot.resolve("otherLib"); Files.createDirectories(otherLib); - try (JarOutputStream os = new JarOutputStream(Files.newOutputStream(otherLib.resolve("jar2.jar")))) { + try (JarOutputStream os = + new JarOutputStream(Files.newOutputStream(otherLib.resolve("jar2.jar")))) { os.putNextEntry(new JarEntry("explicitFile")); os.closeEntry(); } - try (JarOutputStream os = new JarOutputStream(Files.newOutputStream(otherLib.resolve("jar3.jar")))) { + try (JarOutputStream os = + new JarOutputStream(Files.newOutputStream(otherLib.resolve("jar3.jar")))) { os.putNextEntry(new JarEntry("otherFile")); os.closeEntry(); } @@ -207,7 +226,8 @@ public void testClassLoaderLibs() throws Exception { assertNotNull(loader.getClassLoader().getResource("aLibFile")); // add inidividual jars from other paths - loader.addToClassLoader(Collections.singletonList(otherLib.resolve("jar2.jar").toUri().toURL())); + loader.addToClassLoader( + Collections.singletonList(otherLib.resolve("jar2.jar").toUri().toURL())); assertNotNull(loader.getClassLoader().getResource("explicitFile")); assertNull(loader.getClassLoader().getResource("otherFile")); @@ -217,11 +237,11 @@ public void testClassLoaderLibs() throws Exception { assertNotNull(loader.getClassLoader().getResource("otherFile")); loader.close(); } - + @Deprecated public static final class DeprecatedTokenFilterFactory extends TokenFilterFactory { - public DeprecatedTokenFilterFactory(Map args) { + public DeprecatedTokenFilterFactory(Map args) { super(args); } @@ -229,33 +249,50 @@ public DeprecatedTokenFilterFactory(Map args) { public TokenStream create(TokenStream input) { return null; } - } @SuppressWarnings("deprecation") public void testLoadDeprecatedFactory() throws Exception { - SolrResourceLoader loader = new SolrResourceLoader(Paths.get("solr/collection1").toAbsolutePath()); + SolrResourceLoader loader = + new SolrResourceLoader(Paths.get("solr/collection1").toAbsolutePath()); // ensure we get our exception - loader.newInstance(DeprecatedTokenFilterFactory.class.getName(), TokenFilterFactory.class, null, - new Class[] { Map.class }, new Object[] { new HashMap() }); + loader.newInstance( + DeprecatedTokenFilterFactory.class.getName(), + TokenFilterFactory.class, + null, + new Class[] {Map.class}, + new Object[] {new HashMap()}); // TODO: How to check that a warning was printed to log file? - loader.close(); + loader.close(); } public void testCacheWrongType() throws Exception { clearCache(); SolrResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1")); - Class[] params = { Map.class }; - Map args = Map.of("minGramSize", "1", "maxGramSize", "2"); + Class[] params = {Map.class}; + Map args = Map.of("minGramSize", "1", "maxGramSize", "2"); final String className = "solr.NGramTokenizerFactory"; - // We could fail here since the class name and expected type don't match, but instead we try to infer what the user actually meant - TokenFilterFactory tff = loader.newInstance(className, TokenFilterFactory.class, new String[0], params, new Object[]{new HashMap<>(args)}); + // We could fail here since the class name and expected type don't match, but instead we try to + // infer what the user actually meant + TokenFilterFactory tff = + loader.newInstance( + className, + TokenFilterFactory.class, + new String[0], + params, + new Object[] {new HashMap<>(args)}); assertNotNull("Did not load TokenFilter when asking for corresponding Tokenizer", tff); // This should work, but won't if earlier call succeeding corrupting the cache - TokenizerFactory tf = loader.newInstance(className, TokenizerFactory.class, new String[0], params, new Object[]{new HashMap<>(args)}); + TokenizerFactory tf = + loader.newInstance( + className, + TokenizerFactory.class, + new String[0], + params, + new Object[] {new HashMap<>(args)}); assertNotNull("Did not load Tokenizer after bad call earlier", tf); loader.close(); } diff --git a/solr/core/src/test/org/apache/solr/core/SOLR749Test.java b/solr/core/src/test/org/apache/solr/core/SOLR749Test.java index 9a8218f3af8..534e419e4c9 100644 --- a/solr/core/src/test/org/apache/solr/core/SOLR749Test.java +++ b/solr/core/src/test/org/apache/solr/core/SOLR749Test.java @@ -15,17 +15,17 @@ * limitations under the License. */ package org.apache.solr.core; + import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.search.QParserPlugin; import org.apache.solr.search.FooQParserPlugin; +import org.apache.solr.search.QParserPlugin; import org.apache.solr.search.ValueSourceParser; import org.junit.BeforeClass; - /** * This class started life as a test for SOLR-749 to prove that value source plugins were properly - * intialized, but it has since evolved to also help prove that ValueSource's are not asked to compute - * values for documents unnecessarily. + * intialized, but it has since evolved to also help prove that ValueSource's are not asked to + * compute values for documents unnecessarily. * * @see CountUsageValueSourceParser * @see SOLR-749 @@ -33,7 +33,7 @@ public class SOLR749Test extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-SOLR-749.xml","schema.xml"); + initCore("solrconfig-SOLR-749.xml", "schema.xml"); } public void testConstruction() throws Exception { @@ -41,55 +41,72 @@ public void testConstruction() throws Exception { assertTrue("core is null and it shouldn't be", core != null); QParserPlugin parserPlugin = core.getQueryPlugin(QParserPlugin.DEFAULT_QTYPE); assertTrue("parserPlugin is null and it shouldn't be", parserPlugin != null); - assertTrue("parserPlugin is not an instanceof " + FooQParserPlugin.class, parserPlugin instanceof FooQParserPlugin); + assertTrue( + "parserPlugin is not an instanceof " + FooQParserPlugin.class, + parserPlugin instanceof FooQParserPlugin); ValueSourceParser vsp = core.getValueSourceParser("boost"); assertTrue("vsp is null and it shouldn't be", vsp != null); - assertTrue("vsp is not an instanceof " + DummyValueSourceParser.class, vsp instanceof DummyValueSourceParser); + assertTrue( + "vsp is not an instanceof " + DummyValueSourceParser.class, + vsp instanceof DummyValueSourceParser); } public void testHowManyDocsHaveBoostFunctionComputed() throws Exception { for (int i = 0; i < 100; i++) { - assertU(adoc("id",""+i)); + assertU(adoc("id", "" + i)); } assertU(commit()); - // NOTE: we can't rely on the default lucene syntax because "FooQParser" is registered as "lucene" - assertQ(req("q","{!notfoo}*:*"), "//result[@numFound=100]"); - assertQ(req("q","{!notfoo}id_i1:[* TO 49]"), "//result[@numFound=50]"); + // NOTE: we can't rely on the default lucene syntax because "FooQParser" is registered as + // "lucene" + assertQ(req("q", "{!notfoo}*:*"), "//result[@numFound=100]"); + assertQ(req("q", "{!notfoo}id_i1:[* TO 49]"), "//result[@numFound=50]"); try { - assertQ("query wrapped in boost func should only eval func for query matches", - req("q","{!boost b=$boostFunc defType=notfoo}id_i1:[* TO 49]", - "boostFunc", "countUsage('boost_func',3.4)"), - "//result[@numFound=50]"); + assertQ( + "query wrapped in boost func should only eval func for query matches", + req( + "q", "{!boost b=$boostFunc defType=notfoo}id_i1:[* TO 49]", + "boostFunc", "countUsage('boost_func',3.4)"), + "//result[@numFound=50]"); assertEquals(50, CountUsageValueSourceParser.getAndClearCount("boost_func")); - assertQ("func query that is filtered should be evaled only for filtered docs", - req("q","{!func}product(id_i1,countUsage('func_q',4.5))", - "fq", "{!notfoo}id_i1:[30 TO 59]"), - "//result[@numFound=30]"); + assertQ( + "func query that is filtered should be evaled only for filtered docs", + req( + "q", "{!func}product(id_i1,countUsage('func_q',4.5))", + "fq", "{!notfoo}id_i1:[30 TO 59]"), + "//result[@numFound=30]"); assertEquals(30, CountUsageValueSourceParser.getAndClearCount("func_q")); - assertQ("func query that wraps a query which is also used as a should be evaled only for filtered docs", - req("q","{!func}product(query($qq),countUsage('func_q_wrapping_fq',4.5))", - "qq", "{!notfoo}id_i1:[20 TO 39]", - "fq", "{!query v=$qq}"), - "//result[@numFound=20]"); + assertQ( + "func query that wraps a query which is also used as a should be evaled only for filtered docs", + req( + "q", "{!func}product(query($qq),countUsage('func_q_wrapping_fq',4.5))", + "qq", "{!notfoo}id_i1:[20 TO 39]", + "fq", "{!query v=$qq}"), + "//result[@numFound=20]"); assertEquals(20, CountUsageValueSourceParser.getAndClearCount("func_q_wrapping_fq")); - assertQ("frange in complex boolean query w/ other mandatory clauses to check skipping", - req("q","{!notfoo}(+id_i1:[20 TO 39] -id:25 +{!frange l=4.5 u=4.5 v='countUsage(frange_in_bq,4.5)'})"), - "//result[@numFound=19]"); - + assertQ( + "frange in complex boolean query w/ other mandatory clauses to check skipping", + req( + "q", + "{!notfoo}(+id_i1:[20 TO 39] -id:25 +{!frange l=4.5 u=4.5 v='countUsage(frange_in_bq,4.5)'})"), + "//result[@numFound=19]"); + // don't assume specific clause evaluation ordering. // ideally this is 19, but could be as high as 20 depending on whether frange's // scorer has next() called on it before other clauses skipTo int count = CountUsageValueSourceParser.getAndClearCount("frange_in_bq"); assertTrue("frange_in_bq: " + count, (19 <= count && count <= 20)); - - assertQ("func in complex boolean query w/ constant scoring mandatory clauses", - req("q","{!notfoo}(+id_i1:[20 TO 29]^0 +{!frange l=4.5 u=4.5 v='countUsage(func_in_bq,4.5)'})"), - "//result[@numFound=10]"); + + assertQ( + "func in complex boolean query w/ constant scoring mandatory clauses", + req( + "q", + "{!notfoo}(+id_i1:[20 TO 29]^0 +{!frange l=4.5 u=4.5 v='countUsage(func_in_bq,4.5)'})"), + "//result[@numFound=10]"); // don't assume specific clause evaluation ordering. // ideally this is 10, but could be as high as 11 depending on whether func's @@ -100,30 +117,42 @@ public void testHowManyDocsHaveBoostFunctionComputed() throws Exception { // non-cached frange queries should default to post-filtering // (ie: only be computed on candidates of other q/fq restrictions) // regardless of how few/many docs match the frange - assertQ("query matching 1 doc w/ implicitly post-filtered frange matching all docs", - req("q","{!notfoo cache=false}*:*", // match all... - "fq","{!frange cache=false l=30 u=30}abs(id_i1)", // ...restrict to 1 match - // post filter will happily match all docs, but should only be asked about 1... - "fq","{!frange cache=false l=4.5 u=4.5 v='countUsage(postfilt_match_all,4.5)'})"), - "//result[@numFound=1]"); + assertQ( + "query matching 1 doc w/ implicitly post-filtered frange matching all docs", + req( + "q", "{!notfoo cache=false}*:*", // match all... + "fq", "{!frange cache=false l=30 u=30}abs(id_i1)", // ...restrict to 1 match + // post filter will happily match all docs, but should only be asked about 1... + "fq", "{!frange cache=false l=4.5 u=4.5 v='countUsage(postfilt_match_all,4.5)'})"), + "//result[@numFound=1]"); assertEquals(1, CountUsageValueSourceParser.getAndClearCount("postfilt_match_all")); // - assertQ("query matching all docs w/ implicitly post-filtered frange matching no docs", - req("q","{!notfoo cache=false}id_i1:[20 TO 39]", // match some... - "fq","{!frange cache=false cost=0 l=50}abs(id_i1)", // ...regular conjunction filter rules out all - // post filter will happily match all docs, but should never be asked... - "fq","{!frange cache=false l=4.5 u=4.5 v='countUsage(postfilt_match_all,4.5)'})"), - "//result[@numFound=0]"); + assertQ( + "query matching all docs w/ implicitly post-filtered frange matching no docs", + req( + "q", "{!notfoo cache=false}id_i1:[20 TO 39]", // match some... + "fq", "{!frange cache=false cost=0 l=50}abs(id_i1)", // ...regular conjunction filter + // rules out all + // post filter will happily match all docs, but should never be asked... + "fq", "{!frange cache=false l=4.5 u=4.5 v='countUsage(postfilt_match_all,4.5)'})"), + "//result[@numFound=0]"); assertEquals(0, CountUsageValueSourceParser.getAndClearCount("postfilt_match_all")); // Tests that TwoPhaseIterator is employed optimally // note: map(countUsage(lowCost,0),0,0,id_i1) == return "id_i1" value & keep track of access - assertQ("query matching 20 -> 10 -> 5 docs; two non-cached queries", - req("q","{!notfoo cache=false}id_i1:[20 TO 39]", // match 20 - // the below IDs have alternating even/odd pairings so as to test possible sequencing of evaluation - "fq","{!notfoo cache=false}id_i1:(20 21 25 26 28 29 31 32 36 37)", // match 10 (subset of above) - "fq","{!frange cache=false cost=5 l=21 u=99 v='map(countUsage(lowCost,0),0,0,id_i1)'})", // eliminate #20 - "fq","{!frange cache=false cost=10 l=1 v='mod(map(countUsage(lastFilter,0),0,0,id_i1),2)'}"), // match 5 -- (the odd ones since l=1 thus don't match 0) + assertQ( + "query matching 20 -> 10 -> 5 docs; two non-cached queries", + req( + "q", "{!notfoo cache=false}id_i1:[20 TO 39]", // match 20 + // the below IDs have alternating even/odd pairings so as to test possible sequencing + // of evaluation + "fq", + "{!notfoo cache=false}id_i1:(20 21 25 26 28 29 31 32 36 37)", // match 10 (subset + // of above) + "fq", + "{!frange cache=false cost=5 l=21 u=99 v='map(countUsage(lowCost,0),0,0,id_i1)'})", // eliminate #20 + "fq", + "{!frange cache=false cost=10 l=1 v='mod(map(countUsage(lastFilter,0),0,0,id_i1),2)'}"), // match 5 -- (the odd ones since l=1 thus don't match 0) "//result[@numFound=5]"); assertEquals(10, CountUsageValueSourceParser.getAndClearCount("lowCost")); assertEquals(9, CountUsageValueSourceParser.getAndClearCount("lastFilter")); @@ -131,5 +160,4 @@ public void testHowManyDocsHaveBoostFunctionComputed() throws Exception { CountUsageValueSourceParser.clearCounters(); } } - } diff --git a/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java b/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java index 314af7c59e2..a7df47640f0 100644 --- a/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java +++ b/solr/core/src/test/org/apache/solr/core/SolrCoreCheckLockOnStartupTest.java @@ -16,6 +16,10 @@ */ package org.apache.solr.core; +import java.io.File; +import java.lang.invoke.MethodHandles; +import java.nio.file.Files; +import java.util.Map; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.store.Directory; @@ -28,11 +32,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.lang.invoke.MethodHandles; -import java.nio.file.Files; -import java.util.Map; - public class SolrCoreCheckLockOnStartupTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -44,25 +43,28 @@ public void setUp() throws Exception { System.setProperty("solr.directoryFactory", "org.apache.solr.core.NIOFSDirectoryFactory"); // test tests native and simple in the same jvm in the same exact directory: - // the file will remain after the native test (it cannot safely be deleted without the risk of deleting another guys lock) - // it's ok, these aren't "compatible" anyway: really this test should not re-use the same directory at all. - Files.deleteIfExists(new File(new File(initAndGetDataDir(), "index"), IndexWriter.WRITE_LOCK_NAME).toPath()); + // the file will remain after the native test (it cannot safely be deleted without the risk of + // deleting another guys lock) it's ok, these aren't "compatible" anyway: really this test + // should not re-use the same directory at all. + Files.deleteIfExists( + new File(new File(initAndGetDataDir(), "index"), IndexWriter.WRITE_LOCK_NAME).toPath()); } @Test public void testSimpleLockErrorOnStartup() throws Exception { - Directory directory = newFSDirectory(new File(initAndGetDataDir(), "index").toPath(), SimpleFSLockFactory.INSTANCE); - //creates a new IndexWriter without releasing the lock yet + Directory directory = + newFSDirectory( + new File(initAndGetDataDir(), "index").toPath(), SimpleFSLockFactory.INSTANCE); + // creates a new IndexWriter without releasing the lock yet IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(null)); ignoreException("locked"); try { - System.setProperty("solr.tests.lockType",DirectoryFactory.LOCK_TYPE_SIMPLE); - //opening a new core on the same index + System.setProperty("solr.tests.lockType", DirectoryFactory.LOCK_TYPE_SIMPLE); + // opening a new core on the same index initCore("solrconfig-basic.xml", "schema.xml"); - if (checkForCoreInitException(LockObtainFailedException.class)) - return; + if (checkForCoreInitException(LockObtainFailedException.class)) return; fail("Expected " + LockObtainFailedException.class.getSimpleName()); } finally { System.clearProperty("solr.tests.lockType"); @@ -81,17 +83,16 @@ public void testNativeLockErrorOnStartup() throws Exception { log.info("Acquiring lock on {}", indexDir.getAbsolutePath()); } Directory directory = newFSDirectory(indexDir.toPath(), NativeFSLockFactory.INSTANCE); - //creates a new IndexWriter without releasing the lock yet + // creates a new IndexWriter without releasing the lock yet IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(null)); ignoreException("locked"); try { - System.setProperty("solr.tests.lockType",DirectoryFactory.LOCK_TYPE_NATIVE); - //opening a new core on the same index + System.setProperty("solr.tests.lockType", DirectoryFactory.LOCK_TYPE_NATIVE); + // opening a new core on the same index initCore("solrconfig-basic.xml", "schema.xml"); CoreContainer cc = h.getCoreContainer(); - if (checkForCoreInitException(LockObtainFailedException.class)) - return; + if (checkForCoreInitException(LockObtainFailedException.class)) return; fail("Expected " + LockObtainFailedException.class.getSimpleName()); } finally { System.clearProperty("solr.tests.lockType"); @@ -103,10 +104,10 @@ public void testNativeLockErrorOnStartup() throws Exception { } private boolean checkForCoreInitException(Class clazz) { - for (Map.Entry entry : h.getCoreContainer().getCoreInitFailures().entrySet()) { + for (Map.Entry entry : + h.getCoreContainer().getCoreInitFailures().entrySet()) { for (Throwable t = entry.getValue().exception; t != null; t = t.getCause()) { - if (clazz.isInstance(t)) - return true; + if (clazz.isInstance(t)) return true; } } return false; diff --git a/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java b/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java index 2b1db3d2985..aa07f0e91fc 100644 --- a/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java +++ b/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java @@ -16,9 +16,18 @@ */ package org.apache.solr.core; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.ExecutorUtil; +import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.handler.ReplicationHandler; import org.apache.solr.handler.RequestHandlerBase; import org.apache.solr.handler.component.QueryComponent; @@ -29,23 +38,13 @@ import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.security.AuthorizationContext; import org.apache.solr.update.SolrCoreState; -import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.util.RefCounted; import org.apache.solr.util.plugin.SolrCoreAware; import org.junit.Test; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - public class SolrCoreTest extends SolrTestCaseJ4 { private static final String COLLECTION1 = "collection1"; - + @Override public void setUp() throws Exception { super.setUp(); @@ -66,12 +65,12 @@ public void testRequestHandlerRegistry() { EmptyRequestHandler handler2 = new EmptyRequestHandler(); String path = "/this/is A path /that won't be registered!"; - SolrRequestHandler old = core.registerRequestHandler( path, handler1 ); - assertNull( old ); // should not be anything... - assertEquals( core.getRequestHandlers().get( path ), handler1 ); - old = core.registerRequestHandler( path, handler2 ); - assertEquals( old, handler1 ); // should pop out the old one - assertEquals( core.getRequestHandlers().get( path ), handler2 ); + SolrRequestHandler old = core.registerRequestHandler(path, handler1); + assertNull(old); // should not be anything... + assertEquals(core.getRequestHandlers().get(path), handler1); + old = core.registerRequestHandler(path, handler2); + assertEquals(old, handler1); // should pop out the old one + assertEquals(core.getRequestHandlers().get(path), handler2); } @Test @@ -79,41 +78,69 @@ public void testImplicitPlugins() { final SolrCore core = h.getCore(); final List implicitHandlers = core.getImplicitHandlers(); - final Map pathToClassMap = new HashMap<>(implicitHandlers.size()); + final Map pathToClassMap = new HashMap<>(implicitHandlers.size()); for (PluginInfo implicitHandler : implicitHandlers) { - assertEquals("wrong type for "+implicitHandler.toString(), - SolrRequestHandler.TYPE, implicitHandler.type); + assertEquals( + "wrong type for " + implicitHandler.toString(), + SolrRequestHandler.TYPE, + implicitHandler.type); pathToClassMap.put(implicitHandler.name, implicitHandler.className); } int ihCount = 0; { - ++ihCount; assertEquals(pathToClassMap.get("/admin/file"), "solr.ShowFileRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/admin/luke"), "solr.LukeRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/admin/mbeans"), "solr.SolrInfoMBeanHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/admin/ping"), "solr.PingRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/admin/plugins"), "solr.PluginInfoHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/admin/segments"), "solr.SegmentsInfoRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/admin/system"), "solr.SystemInfoHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/config"), "solr.SolrConfigHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/export"), "solr.ExportHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/terms"), "solr.SearchHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/get"), "solr.RealTimeGetHandler"); - ++ihCount; assertEquals(pathToClassMap.get(ReplicationHandler.PATH), "solr.ReplicationHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/schema"), "solr.SchemaHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/sql"), "solr.SQLHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/stream"), "solr.StreamHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/graph"), "solr.GraphHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/update"), "solr.UpdateRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/update/csv"), "solr.UpdateRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/update/json"), "solr.UpdateRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/update/json/docs"), "solr.UpdateRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/analysis/document"), "solr.DocumentAnalysisRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/analysis/field"), "solr.FieldAnalysisRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/debug/dump"), "solr.DumpRequestHandler"); - ++ihCount; assertEquals(pathToClassMap.get("update"), "solr.UpdateRequestHandlerApi"); - ++ihCount; assertEquals(pathToClassMap.get("/tasks/cancel"), "solr.QueryCancellationHandler"); - ++ihCount; assertEquals(pathToClassMap.get("/tasks/list"), "solr.ActiveTasksListHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/admin/file"), "solr.ShowFileRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/admin/luke"), "solr.LukeRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/admin/mbeans"), "solr.SolrInfoMBeanHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/admin/ping"), "solr.PingRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/admin/plugins"), "solr.PluginInfoHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/admin/segments"), "solr.SegmentsInfoRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/admin/system"), "solr.SystemInfoHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/config"), "solr.SolrConfigHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/export"), "solr.ExportHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/terms"), "solr.SearchHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/get"), "solr.RealTimeGetHandler"); + ++ihCount; + assertEquals(pathToClassMap.get(ReplicationHandler.PATH), "solr.ReplicationHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/schema"), "solr.SchemaHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/sql"), "solr.SQLHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/stream"), "solr.StreamHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/graph"), "solr.GraphHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/update"), "solr.UpdateRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/update/csv"), "solr.UpdateRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/update/json"), "solr.UpdateRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/update/json/docs"), "solr.UpdateRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/analysis/document"), "solr.DocumentAnalysisRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/analysis/field"), "solr.FieldAnalysisRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/debug/dump"), "solr.DumpRequestHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("update"), "solr.UpdateRequestHandlerApi"); + ++ihCount; + assertEquals(pathToClassMap.get("/tasks/cancel"), "solr.QueryCancellationHandler"); + ++ihCount; + assertEquals(pathToClassMap.get("/tasks/list"), "solr.ActiveTasksListHandler"); } assertEquals("wrong number of implicit handlers", ihCount, implicitHandlers.size()); } @@ -124,43 +151,43 @@ public void testClose() throws Exception { SolrCore core = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME); ClosingRequestHandler handler1 = new ClosingRequestHandler(); - handler1.inform( core ); + handler1.inform(core); String path = "/this/is A path /that won't be registered 2!!!!!!!!!!!"; - SolrRequestHandler old = core.registerRequestHandler( path, handler1 ); - assertNull( old ); // should not be anything... - assertEquals( core.getRequestHandlers().get( path ), handler1 ); + SolrRequestHandler old = core.registerRequestHandler(path, handler1); + assertNull(old); // should not be anything... + assertEquals(core.getRequestHandlers().get(path), handler1); core.close(); cores.shutdown(); assertTrue("Handler not closed", handler1.closed == true); } - + @Test public void testRefCount() throws Exception { SolrCore core = h.getCore(); assertTrue("Refcount != 1", core.getOpenCount() == 1); - + final CoreContainer cores = h.getCoreContainer(); SolrCore c1 = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME); assertTrue("Refcount != 2", core.getOpenCount() == 2); ClosingRequestHandler handler1 = new ClosingRequestHandler(); - handler1.inform( core ); + handler1.inform(core); String path = "/this/is A path /that won't be registered!"; - SolrRequestHandler old = core.registerRequestHandler( path, handler1 ); - assertNull( old ); // should not be anything... - assertEquals( core.getRequestHandlers().get( path ), handler1 ); - + SolrRequestHandler old = core.registerRequestHandler(path, handler1); + assertNull(old); // should not be anything... + assertEquals(core.getRequestHandlers().get(path), handler1); + SolrCore c2 = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME); c1.close(); assertTrue("Refcount < 1", core.getOpenCount() >= 1); assertTrue("Handler is closed", handler1.closed == false); - + c1 = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME); assertTrue("Refcount < 2", core.getOpenCount() >= 2); assertTrue("Handler is closed", handler1.closed == false); - + c2.close(); assertTrue("Refcount < 1", core.getOpenCount() >= 1); assertTrue("Handler is closed", handler1.closed == false); @@ -170,7 +197,6 @@ public void testRefCount() throws Exception { assertTrue("Refcount != 0", core.getOpenCount() == 0); assertTrue("Handler not closed", core.isClosed() && handler1.closed == true); } - @Test public void testRefCountMT() throws Exception { @@ -186,106 +212,111 @@ public void testRefCountMT() throws Exception { final int LOOP = 100; final int MT = 16; - ExecutorService service = ExecutorUtil.newMDCAwareFixedThreadPool(MT, new SolrNamedThreadFactory("refCountMT")); + ExecutorService service = + ExecutorUtil.newMDCAwareFixedThreadPool(MT, new SolrNamedThreadFactory("refCountMT")); List> callees = new ArrayList<>(MT); final CoreContainer cores = h.getCoreContainer(); for (int i = 0; i < MT; ++i) { - Callable call = new Callable() { - void yieldInt(int n) { - try { - Thread.sleep(0, (n % 13 + 1) * 10); - } catch (InterruptedException xint) { - } - } - - @Override - public Integer call() { - SolrCore core = null; - int r = 0; - try { - for (int l = 0; l < LOOP; ++l) { - r += 1; - core = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME); - // sprinkle concurrency hinting... - yieldInt(l); - assertTrue("Refcount < 1", core.getOpenCount() >= 1); - yieldInt(l); - assertTrue("Refcount > 17", core.getOpenCount() <= 17); - yieldInt(l); - assertTrue("Handler is closed", handler1.closed == false); - yieldInt(l); - core.close(); - core = null; - yieldInt(l); + Callable call = + new Callable() { + void yieldInt(int n) { + try { + Thread.sleep(0, (n % 13 + 1) * 10); + } catch (InterruptedException xint) { + } } - return r; - } finally { - if (core != null) - core.close(); - } - } - }; + + @Override + public Integer call() { + SolrCore core = null; + int r = 0; + try { + for (int l = 0; l < LOOP; ++l) { + r += 1; + core = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME); + // sprinkle concurrency hinting... + yieldInt(l); + assertTrue("Refcount < 1", core.getOpenCount() >= 1); + yieldInt(l); + assertTrue("Refcount > 17", core.getOpenCount() <= 17); + yieldInt(l); + assertTrue("Handler is closed", handler1.closed == false); + yieldInt(l); + core.close(); + core = null; + yieldInt(l); + } + return r; + } finally { + if (core != null) core.close(); + } + } + }; callees.add(call); } List> results = service.invokeAll(callees); for (Future result : results) { - assertTrue("loop=" + result.get() +" < " + LOOP, result.get() >= LOOP); + assertTrue("loop=" + result.get() + " < " + LOOP, result.get() >= LOOP); } - + cores.shutdown(); assertTrue("Refcount != 0", core.getOpenCount() == 0); assertTrue("Handler not closed", core.isClosed() && handler1.closed == true); - + service.shutdown(); assertTrue("Running for too long...", service.awaitTermination(60, TimeUnit.SECONDS)); } @Test public void testInfoRegistry() throws Exception { - //TEst that SolrInfoMBeans are registered, including SearchComponents + // TEst that SolrInfoMBeans are registered, including SearchComponents SolrCore core = h.getCore(); Map infoRegistry = core.getInfoRegistry(); - assertTrue("infoRegistry Size: " + infoRegistry.size() + " is not greater than: " + 0, infoRegistry.size() > 0); - //try out some that we know are in the config + assertTrue( + "infoRegistry Size: " + infoRegistry.size() + " is not greater than: " + 0, + infoRegistry.size() > 0); + // try out some that we know are in the config SolrInfoBean bean = infoRegistry.get(SpellCheckComponent.COMPONENT_NAME); assertNotNull("bean not registered", bean); - //try a default one + // try a default one bean = infoRegistry.get(QueryComponent.COMPONENT_NAME); assertNotNull("bean not registered", bean); - //try a Req Handler, which are stored by name, not clas + // try a Req Handler, which are stored by name, not clas bean = infoRegistry.get("/select"); assertNotNull("bean not registered", bean); } @Test public void testConfiguration() throws Exception { - assertEquals("wrong config for slowQueryThresholdMillis", 2000, solrConfig.slowQueryThresholdMillis); + assertEquals( + "wrong config for slowQueryThresholdMillis", 2000, solrConfig.slowQueryThresholdMillis); assertEquals("wrong config for maxBooleanClauses", 1024, solrConfig.booleanQueryMaxClauseCount); - assertEquals("wrong config for enableLazyFieldLoading", true, solrConfig.enableLazyFieldLoading); + assertEquals( + "wrong config for enableLazyFieldLoading", true, solrConfig.enableLazyFieldLoading); assertEquals("wrong config for queryResultWindowSize", 10, solrConfig.queryResultWindowSize); } /** - * Test that's meant to be run with many iterations to expose a leak of SolrIndexSearcher when a core is closed - * due to a reload. Without the fix, this test fails with most iters=1000 runs. + * Test that's meant to be run with many iterations to expose a leak of SolrIndexSearcher when a + * core is closed due to a reload. Without the fix, this test fails with most iters=1000 runs. */ @Test public void testReloadLeak() throws Exception { final ExecutorService executor = ExecutorUtil.newMDCAwareFixedThreadPool(1, new SolrNamedThreadFactory("testReloadLeak")); - // Continuously open new searcher while core is not closed, and reload core to try to reproduce searcher leak. - // While in practice we never continuously open new searchers, this is trying to make up for the fact that opening - // a searcher in this empty core is very fast by opening new searchers continuously to increase the likelihood - // for race. + // Continuously open new searcher while core is not closed, and reload core to try to reproduce + // searcher leak. While in practice we never continuously open new searchers, this is trying to + // make up for the fact that opening a searcher in this empty core is very fast by opening new + // searchers continuously to increase the likelihood for race. SolrCore core = h.getCore(); assertTrue("Refcount != 1", core.getOpenCount() == 1); executor.execute(new NewSearcherRunnable(core)); - // Since we called getCore() vs getCoreInc() and don't own a refCount, the container should decRef the core - // and close it when we call reload. + // Since we called getCore() vs getCoreInc() and don't own a refCount, the container should + // decRef the core and close it when we call reload. h.reload(); executor.shutdown(); @@ -325,36 +356,34 @@ public void run() { } } } - } - - class ClosingRequestHandler extends EmptyRequestHandler implements SolrCoreAware { boolean closed = false; @Override public void inform(SolrCore core) { - core.addCloseHook( new CloseHook() { - @Override - public void preClose(SolrCore core) { - closed = true; - } - }); + core.addCloseHook( + new CloseHook() { + @Override + public void preClose(SolrCore core) { + closed = true; + } + }); } } -/** - * An empty handler for testing - */ -class EmptyRequestHandler extends RequestHandlerBase -{ +/** An empty handler for testing */ +class EmptyRequestHandler extends RequestHandlerBase { @Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { // nothing! } - @Override public String getDescription() { return null; } + @Override + public String getDescription() { + return null; + } @Override public Name getPermissionName(AuthorizationContext request) { diff --git a/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java b/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java index fde8709d386..60cb11694cd 100644 --- a/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java +++ b/solr/core/src/test/org/apache/solr/core/TestBackupRepositoryFactory.java @@ -16,13 +16,12 @@ */ package org.apache.solr.core; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CoreAdminParams; @@ -39,10 +38,8 @@ import org.junit.rules.TestRule; public class TestBackupRepositoryFactory extends SolrTestCaseJ4 { - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - @Rule - public ExpectedException expectedException = ExpectedException.none(); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public ExpectedException expectedException = ExpectedException.none(); // tmp dir, cleaned up automatically. private static File solrHome = null; @@ -68,7 +65,7 @@ public void testMultipleDefaultRepositories() { Map attrs = new HashMap<>(); attrs.put(CoreAdminParams.NAME, "repo1"); attrs.put(FieldType.CLASS_NAME, "a.b.C"); - attrs.put("default" , "true"); + attrs.put("default", "true"); plugins[0] = new PluginInfo("repository", attrs); } @@ -76,7 +73,7 @@ public void testMultipleDefaultRepositories() { Map attrs = new HashMap<>(); attrs.put(CoreAdminParams.NAME, "repo2"); attrs.put(FieldType.CLASS_NAME, "p.q.R"); - attrs.put("default" , "true"); + attrs.put("default", "true"); plugins[1] = new PluginInfo("repository", attrs); } @@ -93,7 +90,7 @@ public void testMultipleRepositoriesWithSameName() { Map attrs = new HashMap<>(); attrs.put(CoreAdminParams.NAME, "repo1"); attrs.put(FieldType.CLASS_NAME, "a.b.C"); - attrs.put("default" , "true"); + attrs.put("default", "true"); plugins[0] = new PluginInfo("repository", attrs); } @@ -127,7 +124,7 @@ public void testRepositoryConfig() { Map attrs = new HashMap<>(); attrs.put(CoreAdminParams.NAME, "repo1"); attrs.put(FieldType.CLASS_NAME, LocalFileSystemRepository.class.getName()); - attrs.put("default" , "true"); + attrs.put("default", "true"); attrs.put("location", "/tmp"); plugins[0] = new PluginInfo("repository", attrs); } @@ -149,7 +146,7 @@ public void testRepositoryConfig() { assertTrue(repo instanceof LocalFileSystemRepository); assertEquals("/tmp", repo.getConfigProperty("location")); } - + { try { BackupRepository repo = f.newInstance(loader, "boom"); diff --git a/solr/core/src/test/org/apache/solr/core/TestBadConfig.java b/solr/core/src/test/org/apache/solr/core/TestBadConfig.java index db444299227..1040aaf7645 100644 --- a/solr/core/src/test/org/apache/solr/core/TestBadConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestBadConfig.java @@ -19,14 +19,13 @@ public class TestBadConfig extends AbstractBadConfigTestBase { public void testUnsetSysProperty() throws Exception { - assertConfigs("bad_solrconfig.xml","schema.xml","unset.sys.property"); + assertConfigs("bad_solrconfig.xml", "schema.xml", "unset.sys.property"); } public void testNRTModeProperty() throws Exception { - assertConfigs("bad-solrconfig-nrtmode.xml","schema.xml", "nrtMode"); + assertConfigs("bad-solrconfig-nrtmode.xml", "schema.xml", "nrtMode"); } - public void testUpdateLogButNoVersionField() throws Exception { System.setProperty("enable.update.log", "true"); @@ -38,27 +37,34 @@ public void testUpdateLogButNoVersionField() throws Exception { } public void testBogusMergePolicy() throws Exception { - assertConfigs("bad-mpf-solrconfig.xml", "schema-minimal.xml", - "DummyMergePolicyFactory"); + assertConfigs("bad-mpf-solrconfig.xml", "schema-minimal.xml", "DummyMergePolicyFactory"); } public void testSchemaMutableButNotManaged() throws Exception { - assertConfigs("bad-solrconfig-schema-mutable-but-not-managed.xml", - "schema-minimal.xml", "Unexpected arg(s)"); // SOLR-15939 + assertConfigs( + "bad-solrconfig-schema-mutable-but-not-managed.xml", + "schema-minimal.xml", + "Unexpected arg(s)"); // SOLR-15939 } public void testManagedSchemaCannotBeNamedSchemaDotXml() throws Exception { - assertConfigs("bad-solrconfig-managed-schema-named-schema.xml.xml", - "schema-minimal.xml", "managedSchemaResourceName can't be 'schema.xml'"); + assertConfigs( + "bad-solrconfig-managed-schema-named-schema.xml.xml", + "schema-minimal.xml", + "managedSchemaResourceName can't be 'schema.xml'"); } public void testUnknownSchemaAttribute() throws Exception { - assertConfigs("bad-solrconfig-unexpected-schema-attribute.xml", "schema-minimal.xml", - "Unexpected arg(s): {bogusParam=bogusValue}"); + assertConfigs( + "bad-solrconfig-unexpected-schema-attribute.xml", + "schema-minimal.xml", + "Unexpected arg(s): {bogusParam=bogusValue}"); } public void testTolerantUpdateProcessorNoUniqueKey() throws Exception { - assertConfigs("solrconfig-tolerant-update-minimal.xml", "schema-minimal.xml", - "requires a schema that includes a uniqueKey field"); + assertConfigs( + "solrconfig-tolerant-update-minimal.xml", + "schema-minimal.xml", + "requires a schema that includes a uniqueKey field"); } } diff --git a/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java b/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java index 6bfe0eedaea..2d05f11e00f 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java +++ b/solr/core/src/test/org/apache/solr/core/TestCodecSupport.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.Map; - import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene90.Lucene90Codec.Mode; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; @@ -50,23 +49,30 @@ public void testPostingsFormats() { PerFieldPostingsFormat format = (PerFieldPostingsFormat) codec.postingsFormat(); assertEquals("Direct", format.getPostingsFormatForField(schemaField.getName()).getName()); schemaField = fields.get("string_standard_f"); - assertEquals(TestUtil.getDefaultPostingsFormat().getName(), format.getPostingsFormatForField(schemaField.getName()).getName()); + assertEquals( + TestUtil.getDefaultPostingsFormat().getName(), + format.getPostingsFormatForField(schemaField.getName()).getName()); schemaField = fields.get("string_f"); - assertEquals(TestUtil.getDefaultPostingsFormat().getName(), format.getPostingsFormatForField(schemaField.getName()).getName()); + assertEquals( + TestUtil.getDefaultPostingsFormat().getName(), + format.getPostingsFormatForField(schemaField.getName()).getName()); } public void testDocValuesFormats() { - // NOTE: Direct (and Disk) DocValues formats were removed, so we use "Asserting" + // NOTE: Direct (and Disk) DocValues formats were removed, so we use "Asserting" // as a way to vet that the configuration actually matters. Codec codec = h.getCore().getCodec(); Map fields = h.getCore().getLatestSchema().getFields(); SchemaField schemaField = fields.get("string_disk_f"); PerFieldDocValuesFormat format = (PerFieldDocValuesFormat) codec.docValuesFormat(); - assertEquals(TestUtil.getDefaultDocValuesFormat().getName(), format.getDocValuesFormatForField(schemaField.getName()).getName()); + assertEquals( + TestUtil.getDefaultDocValuesFormat().getName(), + format.getDocValuesFormatForField(schemaField.getName()).getName()); schemaField = fields.get("string_direct_f"); assertEquals("Asserting", format.getDocValuesFormatForField(schemaField.getName()).getName()); schemaField = fields.get("string_f"); - assertEquals(TestUtil.getDefaultDocValuesFormat().getName(), + assertEquals( + TestUtil.getDefaultDocValuesFormat().getName(), format.getDocValuesFormatForField(schemaField.getName()).getName()); } @@ -76,22 +82,30 @@ public void testDynamicFieldsPostingsFormats() { assertEquals("Direct", format.getPostingsFormatForField("foo_direct").getName()); assertEquals("Direct", format.getPostingsFormatForField("bar_direct").getName()); - assertEquals(TestUtil.getDefaultPostingsFormat().getName(), format.getPostingsFormatForField("foo_standard").getName()); - assertEquals(TestUtil.getDefaultPostingsFormat().getName(), format.getPostingsFormatForField("bar_standard").getName()); + assertEquals( + TestUtil.getDefaultPostingsFormat().getName(), + format.getPostingsFormatForField("foo_standard").getName()); + assertEquals( + TestUtil.getDefaultPostingsFormat().getName(), + format.getPostingsFormatForField("bar_standard").getName()); } public void testDynamicFieldsDocValuesFormats() { - // NOTE: Direct (and Disk) DocValues formats were removed, so we use "Asserting" + // NOTE: Direct (and Disk) DocValues formats were removed, so we use "Asserting" // as a way to vet that the configuration actually matters. Codec codec = h.getCore().getCodec(); PerFieldDocValuesFormat format = (PerFieldDocValuesFormat) codec.docValuesFormat(); - assertEquals(TestUtil.getDefaultDocValuesFormat().getName(), format.getDocValuesFormatForField("foo_disk").getName()); - assertEquals(TestUtil.getDefaultDocValuesFormat().getName(), format.getDocValuesFormatForField("bar_disk").getName()); + assertEquals( + TestUtil.getDefaultDocValuesFormat().getName(), + format.getDocValuesFormatForField("foo_disk").getName()); + assertEquals( + TestUtil.getDefaultDocValuesFormat().getName(), + format.getDocValuesFormatForField("bar_disk").getName()); assertEquals("Asserting", format.getDocValuesFormatForField("foo_direct").getName()); assertEquals("Asserting", format.getDocValuesFormatForField("bar_direct").getName()); } - + private void reloadCoreAndRecreateIndex() { h.getCoreContainer().reload(h.coreName); assertU(delQ("*:*")); @@ -99,42 +113,56 @@ private void reloadCoreAndRecreateIndex() { assertU(add(doc("string_f", "foo"))); assertU(commit()); } - - private void doTestCompressionMode(String propertyValue, String expectedModeString) throws IOException { + + private void doTestCompressionMode(String propertyValue, String expectedModeString) + throws IOException { if (propertyValue != null) { System.setProperty("tests.COMPRESSION_MODE", propertyValue); } try { reloadCoreAndRecreateIndex(); - assertCompressionMode(expectedModeString, h.getCore()); + assertCompressionMode(expectedModeString, h.getCore()); } finally { System.clearProperty("tests.COMPRESSION_MODE"); } } - protected void assertCompressionMode(String expectedModeString, SolrCore core) throws IOException { - h.getCore().withSearcher(searcher -> { - SegmentInfos infos = SegmentInfos.readLatestCommit(searcher.getIndexReader().directory()); - SegmentInfo info = infos.info(infos.size() - 1).info; - assertEquals("Expecting compression mode string to be " + expectedModeString + - " but got: " + info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY) + - "\n SegmentInfo: " + info + - "\n SegmentInfos: " + infos + - "\n Codec: " + core.getCodec(), - expectedModeString, info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY)); - return null; - }); + protected void assertCompressionMode(String expectedModeString, SolrCore core) + throws IOException { + h.getCore() + .withSearcher( + searcher -> { + SegmentInfos infos = + SegmentInfos.readLatestCommit(searcher.getIndexReader().directory()); + SegmentInfo info = infos.info(infos.size() - 1).info; + assertEquals( + "Expecting compression mode string to be " + + expectedModeString + + " but got: " + + info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY) + + "\n SegmentInfo: " + + info + + "\n SegmentInfos: " + + infos + + "\n Codec: " + + core.getCodec(), + expectedModeString, + info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY)); + return null; + }); } - + public void testCompressionMode() throws Exception { - assertEquals("incompatible change in compressionMode property", - "compressionMode", SchemaCodecFactory.COMPRESSION_MODE); + assertEquals( + "incompatible change in compressionMode property", + "compressionMode", + SchemaCodecFactory.COMPRESSION_MODE); doTestCompressionMode("BEST_SPEED", "BEST_SPEED"); doTestCompressionMode("BEST_COMPRESSION", "BEST_COMPRESSION"); doTestCompressionMode("best_speed", "BEST_SPEED"); doTestCompressionMode("best_compression", "BEST_COMPRESSION"); } - + public void testMixedCompressionMode() throws Exception { assertU(delQ("*:*")); assertU(commit()); @@ -153,63 +181,81 @@ public void testMixedCompressionMode() throws Exception { assertU(add(doc("string_f", "3", "text", "foo zoo"))); assertU(commit()); assertCompressionMode("BEST_SPEED", h.getCore()); - assertQ(req("q", "*:*"), - "//*[@numFound='3']"); - assertQ(req("q", "text:foo"), - "//*[@numFound='3']"); + assertQ(req("q", "*:*"), "//*[@numFound='3']"); + assertQ(req("q", "text:foo"), "//*[@numFound='3']"); assertU(optimize("maxSegments", "1")); assertCompressionMode("BEST_SPEED", h.getCore()); System.clearProperty("tests.COMPRESSION_MODE"); } - + public void testBadCompressionMode() throws Exception { - SolrException thrown = expectThrows(SolrException.class, () -> { - doTestCompressionMode("something_that_doesnt_exist", "something_that_doesnt_exist"); - }); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + doTestCompressionMode("something_that_doesnt_exist", "something_that_doesnt_exist"); + }); assertEquals(SolrException.ErrorCode.SERVER_ERROR.code, thrown.code()); - assertTrue("Unexpected Exception message: " + thrown.getMessage(), + assertTrue( + "Unexpected Exception message: " + thrown.getMessage(), thrown.getMessage().contains("Unable to reload core")); - + final SchemaCodecFactory factory1 = new SchemaCodecFactory(); final NamedList nl = new NamedList<>(); nl.add(SchemaCodecFactory.COMPRESSION_MODE, "something_that_doesnt_exist"); thrown = expectThrows(SolrException.class, () -> factory1.init(nl)); assertEquals(SolrException.ErrorCode.SERVER_ERROR.code, thrown.code()); - assertTrue("Unexpected Exception message: " + thrown.getMessage(), + assertTrue( + "Unexpected Exception message: " + thrown.getMessage(), thrown.getMessage().contains("Invalid compressionMode: 'something_that_doesnt_exist'")); - + final SchemaCodecFactory factory2 = new SchemaCodecFactory(); final NamedList nl2 = new NamedList<>(); nl2.add(SchemaCodecFactory.COMPRESSION_MODE, ""); thrown = expectThrows(SolrException.class, () -> factory2.init(nl2)); assertEquals(SolrException.ErrorCode.SERVER_ERROR.code, thrown.code()); - assertTrue("Unexpected Exception message: " + thrown.getMessage(), + assertTrue( + "Unexpected Exception message: " + thrown.getMessage(), thrown.getMessage().contains("Invalid compressionMode: ''")); } - + public void testCompressionModeDefault() throws IOException { - assertEquals("Default Solr compression mode changed. Is this expected?", - SchemaCodecFactory.SOLR_DEFAULT_COMPRESSION_MODE, Mode.valueOf("BEST_SPEED")); + assertEquals( + "Default Solr compression mode changed. Is this expected?", + SchemaCodecFactory.SOLR_DEFAULT_COMPRESSION_MODE, + Mode.valueOf("BEST_SPEED")); String previousCoreName = h.coreName; String newCoreName = "core_with_default_compression"; SolrCore c = null; - - SolrConfig config = TestHarness.createConfig(testSolrHome, previousCoreName, "solrconfig_codec2.xml"); - assertEquals("Unexpected codec factory for this test.", "solr.SchemaCodecFactory", config.get("codecFactory").attr("class")); - assertTrue("Unexpected configuration of codec factory for this test. Expecting empty element", - config.get("codecFactory").getAll(null, (String)null).isEmpty()); + + SolrConfig config = + TestHarness.createConfig(testSolrHome, previousCoreName, "solrconfig_codec2.xml"); + assertEquals( + "Unexpected codec factory for this test.", + "solr.SchemaCodecFactory", + config.get("codecFactory").attr("class")); + assertTrue( + "Unexpected configuration of codec factory for this test. Expecting empty element", + config.get("codecFactory").getAll(null, (String) null).isEmpty()); IndexSchema schema = IndexSchemaFactory.buildIndexSchema("schema_codec.xml", config); CoreContainer coreContainer = h.getCoreContainer(); - + try { - CoreDescriptor cd = new CoreDescriptor(newCoreName, testSolrHome.resolve(newCoreName), coreContainer); - c = new SolrCore(coreContainer, cd, - new ConfigSet("fakeConfigset", config, forceFetch -> schema, null, true)); + CoreDescriptor cd = + new CoreDescriptor(newCoreName, testSolrHome.resolve(newCoreName), coreContainer); + c = + new SolrCore( + coreContainer, + cd, + new ConfigSet("fakeConfigset", config, forceFetch -> schema, null, true)); assertNull(coreContainer.registerCore(cd, c, false, false)); h.coreName = newCoreName; - assertEquals("We are not using the correct core", "solrconfig_codec2.xml", h.getCore().getConfigResource()); + assertEquals( + "We are not using the correct core", + "solrconfig_codec2.xml", + h.getCore().getConfigResource()); assertU(add(doc("string_f", "foo"))); assertU(commit()); assertCompressionMode(SchemaCodecFactory.SOLR_DEFAULT_COMPRESSION_MODE.name(), h.getCore()); @@ -217,6 +263,5 @@ public void testCompressionModeDefault() throws IOException { h.coreName = previousCoreName; coreContainer.unload(newCoreName); } - } } diff --git a/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java b/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java index 4f98afac596..af67ea58b9b 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java @@ -17,6 +17,8 @@ package org.apache.solr.core; +import static org.apache.solr.core.TestConfigSets.solrxml; + import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; @@ -26,7 +28,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.ZkSolrResourceLoader; import org.apache.solr.common.util.SuppressForbidden; @@ -34,18 +35,17 @@ import org.apache.zookeeper.data.Stat; import org.junit.Ignore; -import static org.apache.solr.core.TestConfigSets.solrxml; - public class TestConfLoadPerf extends SolrTestCaseJ4 { @Ignore @SuppressForbidden(reason = "Needed to provide time for tests.") - public void testPerf() throws Exception{ + public void testPerf() throws Exception { String sourceHome = ExternalPaths.SOURCE_HOME; - File configSetDir = new File(sourceHome, "server/solr/configsets/sample_techproducts_configs/conf"); + File configSetDir = + new File(sourceHome, "server/solr/configsets/sample_techproducts_configs/conf"); String configSetsBaseDir = TEST_PATH().resolve("configsets").toString(); - byte[] b = Files.readAllBytes(new File(configSetDir, "solrconfig.xml").toPath()); + byte[] b = Files.readAllBytes(new File(configSetDir, "solrconfig.xml").toPath()); Path testDirectory = createTempDir(); System.setProperty("configsets", configSetsBaseDir); @@ -54,35 +54,39 @@ public void testPerf() throws Exception{ container.load(); container.shutdown(); - SolrResourceLoader srl = new SolrResourceLoader("temp", Collections.emptyList(), container.solrHome, container.getResourceLoader().classLoader){ - - @Override - public CoreContainer getCoreContainer() { - return container; - } + SolrResourceLoader srl = + new SolrResourceLoader( + "temp", + Collections.emptyList(), + container.solrHome, + container.getResourceLoader().classLoader) { - @Override - public InputStream openResource(String resource) throws IOException { - if(resource.equals("solrconfig.xml")) { - Stat stat = new Stat(); - stat.setVersion(1); - return new ZkSolrResourceLoader.ZkByteArrayInputStream(b, new File(configSetDir, "solrconfig.xml").getAbsolutePath(), stat); - } else { - throw new FileNotFoundException(resource); - } + @Override + public CoreContainer getCoreContainer() { + return container; + } - } - }; + @Override + public InputStream openResource(String resource) throws IOException { + if (resource.equals("solrconfig.xml")) { + Stat stat = new Stat(); + stat.setVersion(1); + return new ZkSolrResourceLoader.ZkByteArrayInputStream( + b, new File(configSetDir, "solrconfig.xml").getAbsolutePath(), stat); + } else { + throw new FileNotFoundException(resource); + } + } + }; System.gc(); long heapSize = Runtime.getRuntime().totalMemory(); List allConfigs = new ArrayList<>(); - long startTime = System.currentTimeMillis(); - for(int i=0;i<100;i++) { + long startTime = System.currentTimeMillis(); + for (int i = 0; i < 100; i++) { allConfigs.add(SolrConfig.readFromResourceLoader(srl, "solrconfig.xml", true, null)); - } System.gc(); - System.out.println("TIME_TAKEN : "+(System.currentTimeMillis()-startTime)); - System.out.println("HEAP_SIZE : "+((Runtime.getRuntime().totalMemory()-heapSize)/(1024))); + System.out.println("TIME_TAKEN : " + (System.currentTimeMillis() - startTime)); + System.out.println("HEAP_SIZE : " + ((Runtime.getRuntime().totalMemory() - heapSize) / (1024))); } } diff --git a/solr/core/src/test/org/apache/solr/core/TestConfig.java b/solr/core/src/test/org/apache/solr/core/TestConfig.java index 71b163ba4e6..75b944836ed 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfig.java @@ -18,10 +18,9 @@ import java.io.IOException; import java.io.InputStream; -import java.util.LinkedHashMap; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; - import org.apache.lucene.index.ConcurrentMergeScheduler; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.TieredMergePolicy; @@ -40,34 +39,39 @@ public class TestConfig extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-test-misc.xml","schema-reversed.xml"); + initCore("solrconfig-test-misc.xml", "schema-reversed.xml"); } @Test public void testLib() throws IOException { SolrResourceLoader loader = h.getCore().getResourceLoader(); InputStream data = null; - String[] expectedFiles = new String[] { "empty-file-main-lib.txt", - "empty-file-a1.txt", - "empty-file-a2.txt", - "empty-file-b1.txt", - "empty-file-b2.txt", - "empty-file-c1.txt" }; + String[] expectedFiles = + new String[] { + "empty-file-main-lib.txt", + "empty-file-a1.txt", + "empty-file-a2.txt", + "empty-file-b1.txt", + "empty-file-b2.txt", + "empty-file-c1.txt" + }; for (String f : expectedFiles) { data = loader.openResource(f); assertNotNull("Should have found file " + f, data); data.close(); } - String[] unexpectedFiles = new String[] { "empty-file-c2.txt", - "empty-file-d2.txt" }; + String[] unexpectedFiles = new String[] {"empty-file-c2.txt", "empty-file-d2.txt"}; for (String f : unexpectedFiles) { data = null; try { data = loader.openResource(f); - } catch (Exception e) { /* :NOOP: (un)expected */ } + } catch (Exception e) { + /* :NOOP: (un)expected */ + } assertNull("should not have been able to find " + f, data); } } + @Test public void testDisableRequetsHandler() throws Exception { assertNull(h.getCore().getRequestHandler("/disabled")); @@ -87,9 +91,9 @@ public void testJavaProperty() { s = solrConfig.get("propTest").attr("attr2", "default"); assertEquals("default-from-config", s); - - assertEquals("prefix-proptwo-suffix", solrConfig.get("propTest", - it -> "default-from-config".equals(it.attr("attr2"))).txt()); + assertEquals( + "prefix-proptwo-suffix", + solrConfig.get("propTest", it -> "default-from-config".equals(it.attr("attr2"))).txt()); List nl = solrConfig.root.getAll("propTest"); assertEquals(1, nl.size()); @@ -98,57 +102,56 @@ public void testJavaProperty() { assertEquals("prefix-proptwo-suffix", solrConfig.get("propTest").txt()); } - @Test - public void testCacheEnablingDisabling() throws Exception { - // ensure if cache is not defined in the config then cache is disabled - SolrConfig sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-defaults.xml"); - assertNull(sc.filterCacheConfig); - assertNull(sc.queryResultCacheConfig); - assertNull(sc.documentCacheConfig); - // - assertNotNull(sc.userCacheConfigs); - assertEquals(Collections.emptyMap(), sc.userCacheConfigs); - - // enable all the core caches (and one user cache) via system properties and verify - System.setProperty("filterCache.enabled", "true"); - System.setProperty("queryResultCache.enabled", "true"); - System.setProperty("documentCache.enabled", "true"); - System.setProperty("user_defined_cache_XXX.enabled","true"); - // user_defined_cache_ZZZ.enabled defaults to false in config - - sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-cache-enable-disable.xml"); - assertNotNull(sc.filterCacheConfig); - assertNotNull(sc.queryResultCacheConfig); - assertNotNull(sc.documentCacheConfig); - // - assertNotNull(sc.userCacheConfigs); - assertEquals(1, sc.userCacheConfigs.size()); - assertNotNull(sc.userCacheConfigs.get("user_defined_cache_XXX")); - - // disable all the core caches (and enable both user caches) via system properties and verify - System.setProperty("filterCache.enabled", "false"); - System.setProperty("queryResultCache.enabled", "false"); - System.setProperty("documentCache.enabled", "false"); - System.setProperty("user_defined_cache_XXX.enabled","true"); - System.setProperty("user_defined_cache_ZZZ.enabled","true"); - - sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-cache-enable-disable.xml"); - assertNull(sc.filterCacheConfig); - assertNull(sc.queryResultCacheConfig); - assertNull(sc.documentCacheConfig); - // - assertNotNull(sc.userCacheConfigs); - assertEquals(2, sc.userCacheConfigs.size()); - assertNotNull(sc.userCacheConfigs.get("user_defined_cache_XXX")); - assertNotNull(sc.userCacheConfigs.get("user_defined_cache_ZZZ")); - - System.clearProperty("user_defined_cache_XXX.enabled"); - System.clearProperty("user_defined_cache_ZZZ.enabled"); - System.clearProperty("filterCache.enabled"); - System.clearProperty("queryResultCache.enabled"); - System.clearProperty("documentCache.enabled"); - } - + @Test + public void testCacheEnablingDisabling() throws Exception { + // ensure if cache is not defined in the config then cache is disabled + SolrConfig sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-defaults.xml"); + assertNull(sc.filterCacheConfig); + assertNull(sc.queryResultCacheConfig); + assertNull(sc.documentCacheConfig); + // + assertNotNull(sc.userCacheConfigs); + assertEquals(Collections.emptyMap(), sc.userCacheConfigs); + + // enable all the core caches (and one user cache) via system properties and verify + System.setProperty("filterCache.enabled", "true"); + System.setProperty("queryResultCache.enabled", "true"); + System.setProperty("documentCache.enabled", "true"); + System.setProperty("user_defined_cache_XXX.enabled", "true"); + // user_defined_cache_ZZZ.enabled defaults to false in config + + sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-cache-enable-disable.xml"); + assertNotNull(sc.filterCacheConfig); + assertNotNull(sc.queryResultCacheConfig); + assertNotNull(sc.documentCacheConfig); + // + assertNotNull(sc.userCacheConfigs); + assertEquals(1, sc.userCacheConfigs.size()); + assertNotNull(sc.userCacheConfigs.get("user_defined_cache_XXX")); + + // disable all the core caches (and enable both user caches) via system properties and verify + System.setProperty("filterCache.enabled", "false"); + System.setProperty("queryResultCache.enabled", "false"); + System.setProperty("documentCache.enabled", "false"); + System.setProperty("user_defined_cache_XXX.enabled", "true"); + System.setProperty("user_defined_cache_ZZZ.enabled", "true"); + + sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-cache-enable-disable.xml"); + assertNull(sc.filterCacheConfig); + assertNull(sc.queryResultCacheConfig); + assertNull(sc.documentCacheConfig); + // + assertNotNull(sc.userCacheConfigs); + assertEquals(2, sc.userCacheConfigs.size()); + assertNotNull(sc.userCacheConfigs.get("user_defined_cache_XXX")); + assertNotNull(sc.userCacheConfigs.get("user_defined_cache_ZZZ")); + + System.clearProperty("user_defined_cache_XXX.enabled"); + System.clearProperty("user_defined_cache_ZZZ.enabled"); + System.clearProperty("filterCache.enabled"); + System.clearProperty("queryResultCache.enabled"); + System.clearProperty("documentCache.enabled"); + } // If defaults change, add test methods to cover each version @Test @@ -160,26 +163,40 @@ public void testDefaults() throws Exception { SolrConfig sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-defaults.xml"); SolrIndexConfig sic = sc.indexConfig; - ++numDefaultsTested; assertEquals("default useCompoundFile", false, sic.useCompoundFile); + ++numDefaultsTested; + assertEquals("default useCompoundFile", false, sic.useCompoundFile); - ++numDefaultsTested; assertEquals("default maxBufferedDocs", -1, sic.maxBufferedDocs); + ++numDefaultsTested; + assertEquals("default maxBufferedDocs", -1, sic.maxBufferedDocs); - ++numDefaultsTested; assertEquals("default ramBufferSizeMB", 100.0D, sic.ramBufferSizeMB, 0.0D); - ++numDefaultsTested; assertEquals("default ramPerThreadHardLimitMB", -1, sic.ramPerThreadHardLimitMB); - ++numDefaultsTested; assertEquals("default writeLockTimeout", -1, sic.writeLockTimeout); - ++numDefaultsTested; assertEquals("default LockType", DirectoryFactory.LOCK_TYPE_NATIVE, sic.lockType); + ++numDefaultsTested; + assertEquals("default ramBufferSizeMB", 100.0D, sic.ramBufferSizeMB, 0.0D); + ++numDefaultsTested; + assertEquals("default ramPerThreadHardLimitMB", -1, sic.ramPerThreadHardLimitMB); + ++numDefaultsTested; + assertEquals("default writeLockTimeout", -1, sic.writeLockTimeout); + ++numDefaultsTested; + assertEquals("default LockType", DirectoryFactory.LOCK_TYPE_NATIVE, sic.lockType); - ++numDefaultsTested; assertEquals("default infoStream", InfoStream.NO_OUTPUT, sic.infoStream); + ++numDefaultsTested; + assertEquals("default infoStream", InfoStream.NO_OUTPUT, sic.infoStream); - ++numDefaultsTested; assertNotNull("default metrics", sic.metricsInfo); + ++numDefaultsTested; + assertNotNull("default metrics", sic.metricsInfo); - ++numDefaultsTested; assertEquals("default maxCommitMergeWaitTime", -1, sic.maxCommitMergeWaitMillis); + ++numDefaultsTested; + assertEquals("default maxCommitMergeWaitTime", -1, sic.maxCommitMergeWaitMillis); - ++numDefaultsTested; ++numNullDefaults; + ++numDefaultsTested; + ++numNullDefaults; assertNull("default mergePolicyFactoryInfo", sic.mergePolicyFactoryInfo); - ++numDefaultsTested; ++numNullDefaults; assertNull("default mergeSchedulerInfo", sic.mergeSchedulerInfo); - ++numDefaultsTested; ++numNullDefaults; assertNull("default mergedSegmentWarmerInfo", sic.mergedSegmentWarmerInfo); + ++numDefaultsTested; + ++numNullDefaults; + assertNull("default mergeSchedulerInfo", sic.mergeSchedulerInfo); + ++numDefaultsTested; + ++numNullDefaults; + assertNull("default mergedSegmentWarmerInfo", sic.mergedSegmentWarmerInfo); IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig); IndexWriterConfig iwc = sic.toIndexWriterConfig(h.getCore()); @@ -193,7 +210,11 @@ public void testDefaults() throws Exception { assertNull("non-null mergedSegmentWarmer", iwc.getMergedSegmentWarmer()); final int numDefaultsMapped = sic.toMap(new LinkedHashMap<>()).size(); - assertEquals("numDefaultsTested vs. numDefaultsMapped+numNullDefaults ="+sic.toMap(new LinkedHashMap<>()).keySet(), numDefaultsTested, numDefaultsMapped+numNullDefaults); + assertEquals( + "numDefaultsTested vs. numDefaultsMapped+numNullDefaults =" + + sic.toMap(new LinkedHashMap<>()).keySet(), + numDefaultsTested, + numDefaultsMapped + numNullDefaults); } @Test @@ -205,25 +226,36 @@ public void testConvertAutoCommitMaxSizeStringToBytes() { Assert.assertEquals(307200, SolrConfig.convertHeapOptionStyleConfigStringToBytes("300K")); Assert.assertEquals(314572800, SolrConfig.convertHeapOptionStyleConfigStringToBytes("300m")); Assert.assertEquals(314572800, SolrConfig.convertHeapOptionStyleConfigStringToBytes("300M")); - Assert.assertEquals(322122547200L, SolrConfig.convertHeapOptionStyleConfigStringToBytes("300g")); - Assert.assertEquals(322122547200L, SolrConfig.convertHeapOptionStyleConfigStringToBytes("300G")); + Assert.assertEquals( + 322122547200L, SolrConfig.convertHeapOptionStyleConfigStringToBytes("300g")); + Assert.assertEquals( + 322122547200L, SolrConfig.convertHeapOptionStyleConfigStringToBytes("300G")); Assert.assertEquals(-1, SolrConfig.convertHeapOptionStyleConfigStringToBytes("")); // Invalid values - RuntimeException thrown = expectThrows(RuntimeException.class, () -> { - SolrConfig.convertHeapOptionStyleConfigStringToBytes("3jbk32k"); // valid suffix but non-numeric prefix - }); + RuntimeException thrown = + expectThrows( + RuntimeException.class, + () -> { + SolrConfig.convertHeapOptionStyleConfigStringToBytes( + "3jbk32k"); // valid suffix but non-numeric prefix + }); assertTrue(thrown.getMessage().contains("Invalid")); - thrown = expectThrows(RuntimeException.class, () -> { - SolrConfig.convertHeapOptionStyleConfigStringToBytes("300x"); // valid prefix but invalid suffix - }); + thrown = + expectThrows( + RuntimeException.class, + () -> { + SolrConfig.convertHeapOptionStyleConfigStringToBytes( + "300x"); // valid prefix but invalid suffix + }); assertTrue(thrown.getMessage().contains("Invalid")); } @Test public void testMaxSizeSettingWithoutAutoCommit() throws Exception { - SolrConfig solrConfig = new SolrConfig(TEST_PATH().resolve("collection1"), "bad-solrconfig-no-autocommit-tag.xml"); + SolrConfig solrConfig = + new SolrConfig(TEST_PATH().resolve("collection1"), "bad-solrconfig-no-autocommit-tag.xml"); Assert.assertEquals(-1, solrConfig.getUpdateHandlerInfo().autoCommitMaxSizeBytes); Assert.assertEquals(-1, solrConfig.getUpdateHandlerInfo().autoCommmitMaxDocs); Assert.assertEquals(-1, solrConfig.getUpdateHandlerInfo().autoCommmitMaxTime); @@ -235,13 +267,18 @@ public void testSanityCheckTestSysPropsAreUsed() throws Exception { SolrConfig sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-basic.xml"); SolrIndexConfig sic = sc.indexConfig; - assertEquals("ramBufferSizeMB sysprop", - Double.parseDouble(System.getProperty("solr.tests.ramBufferSizeMB")), - sic.ramBufferSizeMB, 0.0D); - assertEquals("ramPerThreadHardLimitMB sysprop", - Integer.parseInt(System.getProperty("solr.tests.ramPerThreadHardLimitMB")), sic.ramPerThreadHardLimitMB); - assertEquals("useCompoundFile sysprop", - Boolean.parseBoolean(System.getProperty("useCompoundFile")), sic.useCompoundFile); + assertEquals( + "ramBufferSizeMB sysprop", + Double.parseDouble(System.getProperty("solr.tests.ramBufferSizeMB")), + sic.ramBufferSizeMB, + 0.0D); + assertEquals( + "ramPerThreadHardLimitMB sysprop", + Integer.parseInt(System.getProperty("solr.tests.ramPerThreadHardLimitMB")), + sic.ramPerThreadHardLimitMB); + assertEquals( + "useCompoundFile sysprop", + Boolean.parseBoolean(System.getProperty("useCompoundFile")), + sic.useCompoundFile); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java b/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java index 857082cce17..e0d0d62b81e 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigOverlay.java @@ -16,13 +16,12 @@ */ package org.apache.solr.core; +import static org.apache.solr.core.ConfigOverlay.isEditableProp; + import java.util.Collections; import java.util.Map; - import org.apache.solr.SolrTestCase; -import static org.apache.solr.core.ConfigOverlay.isEditableProp; - public class TestConfigOverlay extends SolrTestCase { public void testPaths() { @@ -45,11 +44,15 @@ public void testPaths() { assertTrue(isEditableProp("query.enableLazyFieldLoading", false, null)); assertTrue(isEditableProp("query.boolTofilterOptimizer", false, null)); - assertTrue(isEditableProp("requestDispatcher.requestParsers.multipartUploadLimitInKB", false, null)); - assertTrue(isEditableProp("requestDispatcher.requestParsers.formdataUploadLimitInKB", false, null)); - assertTrue(isEditableProp("requestDispatcher.requestParsers.enableRemoteStreaming", false, null)); + assertTrue( + isEditableProp("requestDispatcher.requestParsers.multipartUploadLimitInKB", false, null)); + assertTrue( + isEditableProp("requestDispatcher.requestParsers.formdataUploadLimitInKB", false, null)); + assertTrue( + isEditableProp("requestDispatcher.requestParsers.enableRemoteStreaming", false, null)); assertTrue(isEditableProp("requestDispatcher.requestParsers.enableStreamBody", false, null)); - assertTrue(isEditableProp("requestDispatcher.requestParsers.addHttpRequestToContext", false, null)); + assertTrue( + isEditableProp("requestDispatcher.requestParsers.addHttpRequestToContext", false, null)); assertTrue(isEditableProp("requestDispatcher.handleSelect", false, null)); @@ -59,15 +62,13 @@ public void testPaths() { assertFalse(isEditableProp("query/filterCache/@initialSize1", true, null)); } - public void testSetProperty(){ - ConfigOverlay overlay = new ConfigOverlay(Collections.emptyMap(),0); - overlay = overlay.setProperty("query.filterCache.initialSize",100); + public void testSetProperty() { + ConfigOverlay overlay = new ConfigOverlay(Collections.emptyMap(), 0); + overlay = overlay.setProperty("query.filterCache.initialSize", 100); assertEquals(100, overlay.getXPathProperty("query/filterCache/@initialSize")); Map map = overlay.getEditableSubProperties("query/filterCache"); assertNotNull(map); - assertEquals(1,map.size()); - assertEquals(100,map.get("initialSize")); + assertEquals(1, map.size()); + assertEquals(100, map.get("initialSize")); } - - } diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java b/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java index c517edd6b66..ec5f29229b3 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigSetImmutable.java @@ -20,7 +20,6 @@ import java.io.StringReader; import java.nio.charset.StandardCharsets; import java.util.Map; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.XMLResponseParser; @@ -32,10 +31,11 @@ import org.junit.Test; /** - * Test that a ConfigSet marked as immutable cannot be modified via - * the known APIs, i.e. SolrConfigHandler and SchemaHandler. + * Test that a ConfigSet marked as immutable cannot be modified via the known APIs, i.e. + * SolrConfigHandler and SchemaHandler. */ -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class TestConfigSetImmutable extends RestTestBase { private static final String collection = "collection1"; @@ -47,12 +47,20 @@ public void before() throws Exception { File tmpConfDir = new File(tmpSolrHome, confDir); FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); // make the ConfigSet immutable - FileUtils.write(new File(tmpConfDir, "configsetprops.json"), new StringBuilder("{\"immutable\":\"true\"}"), StandardCharsets.UTF_8); + FileUtils.write( + new File(tmpConfDir, "configsetprops.json"), + new StringBuilder("{\"immutable\":\"true\"}"), + StandardCharsets.UTF_8); System.setProperty("managed.schema.mutable", "true"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-schemaless.xml", "schema-rest.xml", - "/solr", true, null); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-schemaless.xml", + "schema-rest.xml", + "/solr", + true, + null); } @After @@ -70,9 +78,10 @@ public void after() throws Exception { @Test public void testSolrConfigHandlerImmutable() throws Exception { - String payload = "{\n" + - "'create-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy'}\n" + - "}"; + String payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy'}\n" + + "}"; String uri = "/config"; String response = restTestHarness.post(uri, SolrTestCaseJ4.json(payload)); Map map = (Map) Utils.fromJSONString(response); @@ -82,20 +91,21 @@ public void testSolrConfigHandlerImmutable() throws Exception { @Test public void testSchemaHandlerImmutable() throws Exception { - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'a1',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " },\n" + - " }"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " }"; String response = restTestHarness.post("/schema", json(payload)); Map map = (Map) Utils.fromJSONString(response); - Map error = (Map)map.get("error"); + Map error = (Map) map.get("error"); assertNotNull("No errors", error); - String msg = (String)error.get("msg"); + String msg = (String) error.get("msg"); assertTrue(msg.contains("immutable")); } @@ -111,8 +121,9 @@ public void testAddSchemaFieldsImmutable() throws Exception { assertNull(listResponse.get(error)); // check writing a new field is not okay - String updateXMLNotSafe = "\"testdoc\"" + - "\"foobar\""; + String updateXMLNotSafe = + "\"testdoc\"" + + "\"foobar\""; response = restTestHarness.update(updateXMLNotSafe); listResponse = parser.processResponse(new StringReader(response)); assertNotNull(listResponse.get(error)); diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java b/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java index 767bdff4f00..3610e549743 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigSetProperties.java @@ -16,13 +16,12 @@ */ package org.apache.solr.core; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; import java.util.Map; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; @@ -35,9 +34,7 @@ public class TestConfigSetProperties extends SolrTestCaseJ4 { - @Rule - public TestRule testRule = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - + @Rule public TestRule testRule = RuleChain.outerRule(new SystemPropertiesRestoreRule()); @Test public void testNoConfigSetPropertiesFile() throws Exception { @@ -46,17 +43,23 @@ public void testNoConfigSetPropertiesFile() throws Exception { @Test public void testEmptyConfigSetProperties() throws Exception { - SolrException thrown = expectThrows(SolrException.class, () -> { - createConfigSetProps(""); - }); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + createConfigSetProps(""); + }); assertEquals(ErrorCode.SERVER_ERROR.code, thrown.code()); } @Test public void testConfigSetPropertiesNotMap() throws Exception { - SolrException thrown = expectThrows(SolrException.class, () -> { - createConfigSetProps(Utils.toJSONString(new String[] {"test"})); - }); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + createConfigSetProps(Utils.toJSONString(new String[] {"test"})); + }); assertEquals(ErrorCode.SERVER_ERROR.code, thrown.code()); } diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigSets.java b/solr/core/src/test/org/apache/solr/core/TestConfigSets.java index e044cfb6b74..58ee28f3475 100644 --- a/solr/core/src/test/org/apache/solr/core/TestConfigSets.java +++ b/solr/core/src/test/org/apache/solr/core/TestConfigSets.java @@ -16,13 +16,17 @@ */ package org.apache.solr.core; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.StringContains.containsString; + +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; +import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; -import com.google.common.collect.ImmutableMap; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.Rule; @@ -30,17 +34,12 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.StringContains.containsString; - public class TestConfigSets extends SolrTestCaseJ4 { - @Rule - public TestRule testRule = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public TestRule testRule = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - public static String solrxml = "${configsets:configsets}"; + public static String solrxml = + "${configsets:configsets}"; public CoreContainer setupContainer(String configSetsBaseDir) { Path testDirectory = createTempDir(); @@ -57,15 +56,19 @@ public CoreContainer setupContainer(String configSetsBaseDir) { public void testDefaultConfigSetBasePathResolution() throws IOException { Path solrHome = Paths.get("/path/to/solr/home"); - NodeConfig config - = SolrXmlConfig.fromString(solrHome, "configsets"); - assertThat(config.getConfigSetBaseDirectory().toAbsolutePath(), - is(Paths.get("/path/to/solr/home/configsets").toAbsolutePath())); - - NodeConfig absConfig - = SolrXmlConfig.fromString(solrHome, "/path/to/configsets"); - assertThat(absConfig.getConfigSetBaseDirectory().toAbsolutePath(), is(Paths.get("/path/to/configsets").toAbsolutePath())); - + NodeConfig config = + SolrXmlConfig.fromString( + solrHome, "configsets"); + assertThat( + config.getConfigSetBaseDirectory().toAbsolutePath(), + is(Paths.get("/path/to/solr/home/configsets").toAbsolutePath())); + + NodeConfig absConfig = + SolrXmlConfig.fromString( + solrHome, "/path/to/configsets"); + assertThat( + absConfig.getConfigSetBaseDirectory().toAbsolutePath(), + is(Paths.get("/path/to/configsets").toAbsolutePath())); } @Test @@ -77,11 +80,11 @@ public void testConfigSetServiceFindsConfigSets() { SolrCore core1 = container.create("core1", ImmutableMap.of("configSet", "configset-2")); assertThat(core1.getCoreDescriptor().getName(), is("core1")); - assertThat(Paths.get(core1.getDataDir()).toString(), is(solrHome.resolve("core1").resolve("data").toString())); - } - finally { - if (container != null) - container.shutdown(); + assertThat( + Paths.get(core1.getDataDir()).toString(), + is(solrHome.resolve("core1").resolve("data").toString())); + } finally { + if (container != null) container.shutdown(); } } @@ -89,14 +92,17 @@ public void testConfigSetServiceFindsConfigSets() { public void testNonExistentConfigSetThrowsException() { final CoreContainer container = setupContainer(getFile("solr/configsets").getAbsolutePath()); try { - Exception thrown = expectThrows(Exception.class, "Expected core creation to fail", () -> { - container.create("core1", ImmutableMap.of("configSet", "nonexistent")); - }); + Exception thrown = + expectThrows( + Exception.class, + "Expected core creation to fail", + () -> { + container.create("core1", ImmutableMap.of("configSet", "nonexistent")); + }); Throwable wrappedException = getWrappedException(thrown); assertThat(wrappedException.getMessage(), containsString("nonexistent")); } finally { - if (container != null) - container.shutdown(); + if (container != null) container.shutdown(); } } @@ -115,20 +121,24 @@ public void testConfigSetOnCoreReload() throws IOException { // We initially don't have a /dump handler defined SolrCore core = container.create("core1", ImmutableMap.of("configSet", "configset-2")); - assertThat("No /dump handler should be defined in the initial configuration", - core.getRequestHandler("/dump"), is(nullValue())); + assertThat( + "No /dump handler should be defined in the initial configuration", + core.getRequestHandler("/dump"), + is(nullValue())); // Now copy in a config with a /dump handler and reload - FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-withgethandler.xml"), + FileUtils.copyFile( + getFile("solr/collection1/conf/solrconfig-withgethandler.xml"), new File(new File(configSetsDir, "configset-2/conf"), "solrconfig.xml")); container.reload("core1"); core = container.getCore("core1"); - assertThat("A /dump handler should be defined in the reloaded configuration", - core.getRequestHandler("/dump"), is(notNullValue())); + assertThat( + "A /dump handler should be defined in the reloaded configuration", + core.getRequestHandler("/dump"), + is(notNullValue())); core.close(); container.shutdown(); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java index 6a8c0f01647..bbe302fb4fa 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java +++ b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java @@ -16,13 +16,20 @@ */ package org.apache.solr.core; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.junit.matchers.JUnitMatchers.containsString; + +import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; - import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -31,9 +38,6 @@ import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.regex.Pattern; - -import com.google.common.base.Throwables; -import com.google.common.collect.ImmutableMap; import org.apache.commons.exec.OS; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.IOUtils; @@ -52,13 +56,6 @@ import org.junit.Test; import org.xml.sax.SAXParseException; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.CoreMatchers.nullValue; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.matchers.JUnitMatchers.containsString; - - public class TestCoreContainer extends SolrTestCaseJ4 { private static String oldSolrHome; @@ -92,7 +89,8 @@ private CoreContainer init(Path homeDirectory, String xml) throws Exception { public void testSolrHomeAndResourceLoader() throws Exception { // regardless of what sys prop may be set, the CoreContainer's init arg should be the definitive - // solr home -- and nothing in the call stack should be "setting" the sys prop to make that work... + // solr home -- and nothing in the call stack should be "setting" the sys prop to make that + // work... final Path fakeSolrHome = createTempDir().toAbsolutePath(); System.setProperty(SOLR_HOME_PROP, fakeSolrHome.toString()); final Path realSolrHome = createTempDir().toAbsolutePath(); @@ -106,12 +104,12 @@ public void testSolrHomeAndResourceLoader() throws Exception { } finally { cc.shutdown(); } - assertEquals("Nothing in solr should be overriding the solr home sys prop in order to work!", - fakeSolrHome.toString(), - System.getProperty(SOLR_HOME_PROP)); + assertEquals( + "Nothing in solr should be overriding the solr home sys prop in order to work!", + fakeSolrHome.toString(), + System.getProperty(SOLR_HOME_PROP)); } - @Test public void testShareSchema() throws Exception { System.setProperty("shareSchema", "true"); @@ -121,7 +119,7 @@ public void testShareSchema() throws Exception { try { SolrCore core1 = cores.create("core1", ImmutableMap.of("configSet", "minimal")); SolrCore core2 = cores.create("core2", ImmutableMap.of("configSet", "minimal")); - + assertSame(core1.getLatestSchema(), core2.getLatestSchema()); } finally { @@ -147,9 +145,11 @@ public void testReloadSequential() throws Exception { private static class TestReloadThread extends Thread { private final CoreContainer cc; + TestReloadThread(CoreContainer cc) { this.cc = cc; } + @Override public void run() { cc.reload("core1"); @@ -161,7 +161,6 @@ public void testReloadThreaded() throws Exception { final CoreContainer cc = init(CONFIGSETS_SOLR_XML); cc.create("core1", ImmutableMap.of("configSet", "minimal")); - List threads = new ArrayList<>(); int numThreads = 4; for (int i = 0; i < numThreads; i++) { @@ -177,7 +176,6 @@ public void testReloadThreaded() throws Exception { } cc.shutdown(); - } private static class TestCreateThread extends Thread { @@ -190,13 +188,15 @@ private static class TestCreateThread extends Thread { this.cc = cc; this.coreName = coreName; } + @Override public void run() { try { core = cc.create(coreName, ImmutableMap.of("configSet", "minimal")); } catch (SolrException e) { String msg = e.getMessage(); - foundExpectedError = msg.contains("Already creating a core with name") || msg.contains("already exists"); + foundExpectedError = + msg.contains("Already creating a core with name") || msg.contains("already exists"); } } @@ -216,7 +216,6 @@ public void testCreateThreaded() throws Exception { final CoreContainer cc = init(CONFIGSETS_SOLR_XML); final int NUM_THREADS = 3; - // Try this a few times to increase the chances of failure. for (int idx = 0; idx < 3; ++idx) { TestCreateThread[] threads = new TestCreateThread[NUM_THREADS]; @@ -246,7 +245,6 @@ public void testCreateThreaded() throws Exception { } assertEquals("Only one create should have succeeded", 1, goodCount); - // Check bookkeeping by removing and creating the core again, making sure // we didn't leave the record of trying to create this core around. // NOTE: unloading the core closes it too. @@ -254,9 +252,12 @@ public void testCreateThreaded() throws Exception { cc.create(testName, ImmutableMap.of("configSet", "minimal")); // This call should fail with a different error because the core was // created successfully. - SolrException thrown = expectThrows(SolrException.class, () -> - cc.create(testName, ImmutableMap.of("configSet", "minimal"))); - assertTrue("Should have 'already exists' error", thrown.getMessage().contains("already exists")); + SolrException thrown = + expectThrows( + SolrException.class, + () -> cc.create(testName, ImmutableMap.of("configSet", "minimal"))); + assertTrue( + "Should have 'already exists' error", thrown.getMessage().contains("already exists")); cc.unload(testName, true, true, true); } @@ -269,36 +270,42 @@ public void testNoCores() throws Exception { CoreContainer cores = init(CONFIGSETS_SOLR_XML); try { - //assert zero cores + // assert zero cores assertEquals("There should not be cores", 0, cores.getCores().size()); - - //add a new core + + // add a new core cores.create("core1", ImmutableMap.of("configSet", "minimal")); - //assert one registered core + // assert one registered core assertEquals("There core registered", 1, cores.getCores().size()); cores.unload("core1"); - //assert cero cores + // assert cero cores assertEquals("There should not be cores", 0, cores.getCores().size()); - - // try and remove a core that does not exist - SolrException thrown = expectThrows(SolrException.class, () -> { - cores.unload("non_existent_core"); - }); - assertThat(thrown.getMessage(), containsString("Cannot unload non-existent core [non_existent_core]")); + // try and remove a core that does not exist + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + cores.unload("non_existent_core"); + }); + assertThat( + thrown.getMessage(), + containsString("Cannot unload non-existent core [non_existent_core]")); // try and remove a null core - thrown = expectThrows(SolrException.class, () -> { - cores.unload(null); - }); + thrown = + expectThrows( + SolrException.class, + () -> { + cores.unload(null); + }); assertThat(thrown.getMessage(), containsString("Cannot unload non-existent core [null]")); } finally { cores.shutdown(); } - } @Test @@ -306,8 +313,7 @@ public void testLogWatcherEnabledByDefault() throws Exception { CoreContainer cc = init(""); try { assertNotNull(cc.getLogging()); - } - finally { + } finally { cc.shutdown(); } } @@ -320,17 +326,20 @@ public void testDeleteBadCores() throws Exception { Path solrHome = createTempDir(); System.setProperty("configsets", getFile("solr/configsets").getAbsolutePath()); - final CoreContainer cc = new CoreContainer(SolrXmlConfig.fromString(solrHome, CONFIGSETS_SOLR_XML), cl); + final CoreContainer cc = + new CoreContainer(SolrXmlConfig.fromString(solrHome, CONFIGSETS_SOLR_XML), cl); Path corePath = solrHome.resolve("badcore"); - CoreDescriptor badcore = new CoreDescriptor("badcore", corePath, cc, - "configSet", "nosuchconfigset"); + CoreDescriptor badcore = + new CoreDescriptor("badcore", corePath, cc, "configSet", "nosuchconfigset"); cl.add(badcore); try { cc.load(); assertThat(cc.getCoreInitFailures().size(), is(1)); - assertThat(cc.getCoreInitFailures().get("badcore").exception.getMessage(), containsString("nosuchconfigset")); + assertThat( + cc.getCoreInitFailures().get("badcore").exception.getMessage(), + containsString("nosuchconfigset")); cc.unload("badcore", true, true, true); assertThat(cc.getCoreInitFailures().size(), is(0)); @@ -338,8 +347,7 @@ public void testDeleteBadCores() throws Exception { SolrCore core = cc.create("badcore", ImmutableMap.of("configSet", "minimal")); assertThat(core, not(nullValue())); - } - finally { + } finally { cc.shutdown(); } } @@ -368,7 +376,8 @@ public void testSharedLib() throws Exception { File lib = new File(tmpRoot.toFile(), "lib"); lib.mkdirs(); - try (JarOutputStream jar1 = new JarOutputStream(new FileOutputStream(new File(lib, "jar1.jar")))) { + try (JarOutputStream jar1 = + new JarOutputStream(new FileOutputStream(new File(lib, "jar1.jar")))) { jar1.putNextEntry(new JarEntry("defaultSharedLibFile")); jar1.closeEntry(); } @@ -376,7 +385,8 @@ public void testSharedLib() throws Exception { File customLib = new File(tmpRoot.toFile(), "customLib"); customLib.mkdirs(); - try (JarOutputStream jar2 = new JarOutputStream(new FileOutputStream(new File(customLib, "jar2.jar")))) { + try (JarOutputStream jar2 = + new JarOutputStream(new FileOutputStream(new File(customLib, "jar2.jar")))) { jar2.putNextEntry(new JarEntry("customSharedLibFile")); jar2.closeEntry(); } @@ -384,7 +394,8 @@ public void testSharedLib() throws Exception { File customLib2 = new File(tmpRoot.toFile(), "customLib2"); customLib2.mkdirs(); - try (JarOutputStream jar3 = new JarOutputStream(new FileOutputStream(new File(customLib2, "jar3.jar")))) { + try (JarOutputStream jar3 = + new JarOutputStream(new FileOutputStream(new File(customLib2, "jar3.jar")))) { jar3.putNextEntry(new JarEntry("jar3File")); jar3.closeEntry(); } @@ -414,7 +425,8 @@ public void testSharedLib() throws Exception { } // Comma separated list of lib folders - final CoreContainer cc4 = init(tmpRoot, "customLib, customLib2"); + final CoreContainer cc4 = + init(tmpRoot, "customLib, customLib2"); try { cc4.loader.openResource("defaultSharedLibFile").close(); cc4.loader.openResource("customSharedLibFile").close(); @@ -428,17 +440,22 @@ public void testSharedLib() throws Exception { public void testModuleLibs() throws Exception { Path tmpRoot = createTempDir("testModLib"); - File lib = Files.createDirectories(ModuleUtils.getModuleLibPath(tmpRoot, "mod1")).toFile();; + File lib = Files.createDirectories(ModuleUtils.getModuleLibPath(tmpRoot, "mod1")).toFile(); + ; - try (JarOutputStream jar1 = new JarOutputStream(new FileOutputStream(new File(lib, "jar1.jar")))) { + try (JarOutputStream jar1 = + new JarOutputStream(new FileOutputStream(new File(lib, "jar1.jar")))) { jar1.putNextEntry(new JarEntry("moduleLibFile")); jar1.closeEntry(); } - System.setProperty(SolrDispatchFilter.SOLR_INSTALL_DIR_ATTRIBUTE, tmpRoot.toAbsolutePath().toString()); + System.setProperty( + SolrDispatchFilter.SOLR_INSTALL_DIR_ATTRIBUTE, tmpRoot.toAbsolutePath().toString()); final CoreContainer cc1 = init(tmpRoot, ""); try { - Assert.assertThrows(SolrResourceNotFoundException.class, () -> cc1.loader.openResource("moduleLibFile").close()); + Assert.assertThrows( + SolrResourceNotFoundException.class, + () -> cc1.loader.openResource("moduleLibFile").close()); } finally { cc1.shutdown(); } @@ -451,36 +468,52 @@ public void testModuleLibs() throws Exception { cc2.shutdown(); } - SolrException ex = Assert.assertThrows(SolrException.class, () -> - init(tmpRoot, "nope")); + SolrException ex = + Assert.assertThrows( + SolrException.class, + () -> init(tmpRoot, "nope")); assertEquals("No module with name nope", ex.getMessage()); System.clearProperty(SolrDispatchFilter.SOLR_INSTALL_DIR_ATTRIBUTE); } - private static final String CONFIGSETS_SOLR_XML ="\n" + - "\n" + - "${configsets:configsets}\n" + - "${shareSchema:false}\n" + - ""; - - private static final String ALLOW_PATHS_SOLR_XML ="\n" + - "\n" + - "${solr.allowPaths:}\n" + - ""; - - private static final String CUSTOM_HANDLERS_SOLR_XML = "\n" + - "" + - " " + CustomCollectionsHandler.class.getName() + "" + - " " + CustomInfoHandler.class.getName() + "" + - " " + CustomCoreAdminHandler.class.getName() + "" + - " " + CustomConfigSetsHandler.class.getName() + "" + - ""; - - private static final String CUSTOM_CONFIG_SET_SERVICE_SOLR_XML = "\n" + - "" + - " " + CustomConfigSetService.class.getName() + "" + - ""; + private static final String CONFIGSETS_SOLR_XML = + "\n" + + "\n" + + "${configsets:configsets}\n" + + "${shareSchema:false}\n" + + ""; + + private static final String ALLOW_PATHS_SOLR_XML = + "\n" + + "\n" + + "${solr.allowPaths:}\n" + + ""; + + private static final String CUSTOM_HANDLERS_SOLR_XML = + "\n" + + "" + + " " + + CustomCollectionsHandler.class.getName() + + "" + + " " + + CustomInfoHandler.class.getName() + + "" + + " " + + CustomCoreAdminHandler.class.getName() + + "" + + " " + + CustomConfigSetsHandler.class.getName() + + "" + + ""; + + private static final String CUSTOM_CONFIG_SET_SERVICE_SOLR_XML = + "\n" + + "" + + " " + + CustomConfigSetService.class.getName() + + "" + + ""; public static class CustomCollectionsHandler extends CollectionsHandler { public CustomCollectionsHandler(CoreContainer cc) { @@ -494,7 +527,7 @@ public CustomConfigSetService(CoreContainer coreContainer) { } @Override - public String configSetName(CoreDescriptor cd){ + public String configSetName(CoreDescriptor cd) { return null; } @@ -504,33 +537,24 @@ public boolean checkConfigExists(String configName) throws IOException { } @Override - public void deleteConfig(String configName) throws IOException { - - } + public void deleteConfig(String configName) throws IOException {} @Override - public void deleteFilesFromConfig(String configName, List filesToDelete) throws IOException { + public void deleteFilesFromConfig(String configName, List filesToDelete) + throws IOException {} - } - - public void copyConfig(String fromConfig, String toConfig) throws IOException { - - } + public void copyConfig(String fromConfig, String toConfig) throws IOException {} @Override - public void uploadConfig(String configName, Path dir) throws IOException { - - } + public void uploadConfig(String configName, Path dir) throws IOException {} @Override - public void uploadFileToConfig(String configName, String fileName, byte[] data, boolean overwriteOnExists) throws IOException { - - } + public void uploadFileToConfig( + String configName, String fileName, byte[] data, boolean overwriteOnExists) + throws IOException {} @Override - public void setConfigMetadata(String configName, Map data) throws IOException { - - } + public void setConfigMetadata(String configName, Map data) throws IOException {} @Override public Map getConfigMetadata(String configName) throws IOException { @@ -538,9 +562,7 @@ public Map getConfigMetadata(String configName) throws IOExcepti } @Override - public void downloadConfig(String configName, Path dir) throws IOException { - - } + public void downloadConfig(String configName, Path dir) throws IOException {} @Override public List listConfigs() throws IOException { @@ -558,12 +580,13 @@ public List getAllConfigFiles(String configName) throws IOException { } @Override - protected SolrResourceLoader createCoreResourceLoader(CoreDescriptor cd){ + protected SolrResourceLoader createCoreResourceLoader(CoreDescriptor cd) { return null; } @Override - protected Long getCurrentSchemaModificationVersion(String configSet, SolrConfig solrConfig, String schemaFileName) throws IOException { + protected Long getCurrentSchemaModificationVersion( + String configSet, SolrConfig solrConfig, String schemaFileName) throws IOException { return null; } } @@ -595,7 +618,7 @@ public void assertAllowPathFromSolrXml() throws Exception { try { cc.assertPathAllowed(Paths.get("/tmp")); fail("Path /tmp should not be allowed"); - } catch(SolrException e) { + } catch (SolrException e) { /* Ignore */ } finally { cc.shutdown(); @@ -612,7 +635,7 @@ public void assertAllowPathFromSolrXmlWin() throws Exception { try { cc.assertPathAllowed(Paths.get("C:\\tmp")); fail("Path C:\\tmp should not be allowed"); - } catch(SolrException e) { + } catch (SolrException e) { /* Ignore */ } finally { cc.shutdown(); @@ -653,7 +676,12 @@ public void assertAllowPathNormalization() throws Exception { System.setProperty("solr.allowPaths", "/var/solr/../solr"); CoreContainer cc = init(ALLOW_PATHS_SOLR_XML); cc.assertPathAllowed(Paths.get("/var/solr/foo")); - assertThrows("Path /tmp should not be allowed", SolrException.class, () -> { cc.assertPathAllowed(Paths.get("/tmp")); }); + assertThrows( + "Path /tmp should not be allowed", + SolrException.class, + () -> { + cc.assertPathAllowed(Paths.get("/tmp")); + }); cc.shutdown(); System.clearProperty("solr.allowPaths"); } @@ -664,7 +692,12 @@ public void assertAllowPathNormalizationWin() throws Exception { System.setProperty("solr.allowPaths", "C:\\solr\\..\\solr"); CoreContainer cc = init(ALLOW_PATHS_SOLR_XML); cc.assertPathAllowed(Paths.get("C:\\solr\\foo")); - assertThrows("Path C:\\tmp should not be allowed", SolrException.class, () -> { cc.assertPathAllowed(Paths.get("C:\\tmp")); }); + assertThrows( + "Path C:\\tmp should not be allowed", + SolrException.class, + () -> { + cc.assertPathAllowed(Paths.get("C:\\tmp")); + }); cc.shutdown(); System.clearProperty("solr.allowPaths"); } @@ -675,13 +708,17 @@ public void assertAllowPathNormalizationWin() throws Exception { private void assertPathBlocked(String path) { try { - SolrPaths.assertPathAllowed(Path.of(path), OS.isFamilyWindows() ? ALLOWED_PATHS_WIN : ALLOWED_PATHS); + SolrPaths.assertPathAllowed( + Path.of(path), OS.isFamilyWindows() ? ALLOWED_PATHS_WIN : ALLOWED_PATHS); fail("Path " + path + " sould have been blocked."); - } catch (SolrException e) { /* Expected */ } + } catch (SolrException e) { + /* Expected */ + } } private void assertPathAllowed(String path) { - SolrPaths.assertPathAllowed(Path.of(path), OS.isFamilyWindows() ? ALLOWED_PATHS_WIN : ALLOWED_PATHS); + SolrPaths.assertPathAllowed( + Path.of(path), OS.isFamilyWindows() ? ALLOWED_PATHS_WIN : ALLOWED_PATHS); } @Test @@ -692,11 +729,9 @@ public void testCustomHandlers() throws Exception { assertThat(cc.getCollectionsHandler(), is(instanceOf(CustomCollectionsHandler.class))); assertThat(cc.getInfoHandler(), is(instanceOf(CustomInfoHandler.class))); assertThat(cc.getMultiCoreHandler(), is(instanceOf(CustomCoreAdminHandler.class))); - } - finally { + } finally { cc.shutdown(); } - } @Test @@ -704,8 +739,7 @@ public void testCustomConfigSetService() throws Exception { CoreContainer cc = init(CUSTOM_CONFIG_SET_SERVICE_SOLR_XML); try { assertThat(cc.getConfigSetService(), is(instanceOf(CustomConfigSetService.class))); - } - finally { + } finally { cc.shutdown(); } } @@ -724,36 +758,27 @@ public void create(CoreContainer cc, CoreDescriptor... coreDescriptors) { } @Override - public void persist(CoreContainer cc, CoreDescriptor... coreDescriptors) { - - } + public void persist(CoreContainer cc, CoreDescriptor... coreDescriptors) {} @Override - public void delete(CoreContainer cc, CoreDescriptor... coreDescriptors) { - - } + public void delete(CoreContainer cc, CoreDescriptor... coreDescriptors) {} @Override - public void rename(CoreContainer cc, CoreDescriptor oldCD, CoreDescriptor newCD) { - - } + public void rename(CoreContainer cc, CoreDescriptor oldCD, CoreDescriptor newCD) {} @Override - public void swap(CoreContainer cc, CoreDescriptor cd1, CoreDescriptor cd2) { - - } + public void swap(CoreContainer cc, CoreDescriptor cd1, CoreDescriptor cd2) {} @Override public List discover(CoreContainer cc) { return cores; } - } @Test public void testCoreInitFailuresFromEmptyContainer() throws Exception { // reused state - Map failures = null; + Map failures = null; Collection cores = null; Exception fail = null; @@ -774,11 +799,15 @@ public void testCoreInitFailuresFromEmptyContainer() throws Exception { // ----- // try to add a collection with a configset that doesn't exist ignoreException(Pattern.quote("bogus_path")); - SolrException thrown = expectThrows(SolrException.class, () -> { - cc.create("bogus", ImmutableMap.of("configSet", "bogus_path")); - }); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + cc.create("bogus", ImmutableMap.of("configSet", "bogus_path")); + }); Throwable rootCause = Throwables.getRootCause(thrown); - assertTrue("init exception doesn't mention bogus dir: " + rootCause.getMessage(), + assertTrue( + "init exception doesn't mention bogus dir: " + rootCause.getMessage(), 0 < rootCause.getMessage().indexOf("bogus_path")); // check that we have the cores we expect @@ -792,19 +821,23 @@ public void testCoreInitFailuresFromEmptyContainer() throws Exception { assertEquals("wrong number of core failures", 1, failures.size()); fail = failures.get("bogus").exception; assertNotNull("null failure for test core", fail); - assertTrue("init failure doesn't mention problem: " + fail.getMessage(), + assertTrue( + "init failure doesn't mention problem: " + fail.getMessage(), 0 < fail.getMessage().indexOf("bogus_path")); // check that we get null accessing a non-existent core assertNull(cc.getCore("does_not_exist")); // check that we get a 500 accessing the core with an init failure - thrown = expectThrows(SolrException.class, () -> { - SolrCore c = cc.getCore("bogus"); - }); + thrown = + expectThrows( + SolrException.class, + () -> { + SolrCore c = cc.getCore("bogus"); + }); assertEquals(500, thrown.code()); String cause = Throwables.getRootCause(thrown).getMessage(); - assertTrue("getCore() ex cause doesn't mention init fail: " + cause, - 0 < cause.indexOf("bogus_path")); + assertTrue( + "getCore() ex cause doesn't mention init fail: " + cause, 0 < cause.indexOf("bogus_path")); cc.shutdown(); } @@ -813,7 +846,7 @@ public void testCoreInitFailuresFromEmptyContainer() throws Exception { public void testCoreInitFailuresOnReload() throws Exception { // reused state - Map failures = null; + Map failures = null; Collection cores = null; Exception fail = null; @@ -825,11 +858,12 @@ public void testCoreInitFailuresOnReload() throws Exception { System.setProperty("configsets", getFile("solr/configsets").getAbsolutePath()); - final CoreContainer cc = new CoreContainer(SolrXmlConfig.fromString(solrHome, CONFIGSETS_SOLR_XML), cl); - cl.add(new CoreDescriptor("col_ok", solrHome.resolve("col_ok"), cc, - "configSet", "minimal")); - cl.add(new CoreDescriptor("col_bad", solrHome.resolve("col_bad"), cc, - "configSet", "bad-mergepolicy")); + final CoreContainer cc = + new CoreContainer(SolrXmlConfig.fromString(solrHome, CONFIGSETS_SOLR_XML), cl); + cl.add(new CoreDescriptor("col_ok", solrHome.resolve("col_ok"), cc, "configSet", "minimal")); + cl.add( + new CoreDescriptor( + "col_bad", solrHome.resolve("col_bad"), cc, "configSet", "bad-mergepolicy")); cc.load(); // check that we have the cores we expect @@ -844,25 +878,33 @@ public void testCoreInitFailuresOnReload() throws Exception { assertEquals("wrong number of core failures", 1, failures.size()); fail = failures.get("col_bad").exception; assertNotNull("null failure for test core", fail); - assertTrue("init failure doesn't mention problem: " + fail.getMessage(), + assertTrue( + "init failure doesn't mention problem: " + fail.getMessage(), 0 < fail.getMessage().indexOf("DummyMergePolicy")); // check that we get null accessing a non-existent core assertNull(cc.getCore("does_not_exist")); assertFalse(cc.isLoaded("does_not_exist")); // check that we get a 500 accessing the core with an init failure - SolrException thrown = expectThrows(SolrException.class, () -> { - SolrCore c = cc.getCore("col_bad"); - }); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + SolrCore c = cc.getCore("col_bad"); + }); assertEquals(500, thrown.code()); String cause = thrown.getCause().getCause().getMessage(); - assertTrue("getCore() ex cause doesn't mention init fail: " + cause, 0 < cause.indexOf("DummyMergePolicy")); + assertTrue( + "getCore() ex cause doesn't mention init fail: " + cause, + 0 < cause.indexOf("DummyMergePolicy")); // ----- // "fix" the bad collection - FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"), + FileUtils.copyFile( + getFile("solr/collection1/conf/solrconfig-defaults.xml"), FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "solrconfig.xml")); - FileUtils.copyFile(getFile("solr/collection1/conf/schema-minimal.xml"), + FileUtils.copyFile( + getFile("solr/collection1/conf/schema-minimal.xml"), FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "schema.xml")); cc.create("col_bad", ImmutableMap.of()); @@ -880,14 +922,17 @@ public void testCoreInitFailuresOnReload() throws Exception { assertNotNull("core failures is a null map", failures); assertEquals("wrong number of core failures", 0, failures.size()); - // ----- // try to add a collection with a path that doesn't exist ignoreException(Pattern.quote("bogus_path")); - thrown = expectThrows(SolrException.class, () -> { - cc.create("bogus", ImmutableMap.of("configSet", "bogus_path")); - }); - assertTrue("init exception doesn't mention bogus dir: " + thrown.getCause().getCause().getMessage(), + thrown = + expectThrows( + SolrException.class, + () -> { + cc.create("bogus", ImmutableMap.of("configSet", "bogus_path")); + }); + assertTrue( + "init exception doesn't mention bogus dir: " + thrown.getCause().getCause().getMessage(), 0 < thrown.getCause().getCause().getMessage().indexOf("bogus_path")); // check that we have the cores we expect @@ -903,43 +948,55 @@ public void testCoreInitFailuresOnReload() throws Exception { assertEquals("wrong number of core failures", 1, failures.size()); fail = failures.get("bogus").exception; assertNotNull("null failure for test core", fail); - assertTrue("init failure doesn't mention problem: " + fail.getMessage(), + assertTrue( + "init failure doesn't mention problem: " + fail.getMessage(), 0 < fail.getMessage().indexOf("bogus_path")); // check that we get null accessing a non-existent core assertNull(cc.getCore("does_not_exist")); // check that we get a 500 accessing the core with an init failure - thrown = expectThrows(SolrException.class, () -> { - SolrCore c = cc.getCore("bogus"); - }); + thrown = + expectThrows( + SolrException.class, + () -> { + SolrCore c = cc.getCore("bogus"); + }); assertEquals(500, thrown.code()); cause = thrown.getCause().getMessage(); - assertTrue("getCore() ex cause doesn't mention init fail: " + cause, - 0 < cause.indexOf("bogus_path")); + assertTrue( + "getCore() ex cause doesn't mention init fail: " + cause, 0 < cause.indexOf("bogus_path")); // ----- // break col_bad's config and try to RELOAD to add failure final long col_bad_old_start = getCoreStartTime(cc, "col_bad"); - FileUtils.write - (FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "solrconfig.xml"), - "This is giberish, not valid XML <", - IOUtils.UTF_8); + FileUtils.write( + FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "solrconfig.xml"), + "This is giberish, not valid XML <", + IOUtils.UTF_8); ignoreException(Pattern.quote("SAX")); - thrown = expectThrows(SolrException.class, - "corrupt solrconfig.xml failed to trigger exception from reload", - () -> { cc.reload("col_bad"); }); + thrown = + expectThrows( + SolrException.class, + "corrupt solrconfig.xml failed to trigger exception from reload", + () -> { + cc.reload("col_bad"); + }); Throwable rootException = getWrappedException(thrown); - assertTrue("We're supposed to have a wrapped SAXParserException here, but we don't", + assertTrue( + "We're supposed to have a wrapped SAXParserException here, but we don't", rootException instanceof SAXParseException); SAXParseException se = (SAXParseException) rootException; - assertTrue("reload exception doesn't refer to slrconfig.xml " + se.getSystemId(), + assertTrue( + "reload exception doesn't refer to slrconfig.xml " + se.getSystemId(), 0 < se.getSystemId().indexOf("solrconfig.xml")); - assertEquals("Failed core reload should not have changed start time", - col_bad_old_start, getCoreStartTime(cc, "col_bad")); + assertEquals( + "Failed core reload should not have changed start time", + col_bad_old_start, + getCoreStartTime(cc, "col_bad")); // check that we have the cores we expect cores = cc.getLoadedCoreNames(); @@ -954,21 +1011,23 @@ public void testCoreInitFailuresOnReload() throws Exception { assertEquals("wrong number of core failures", 2, failures.size()); Throwable ex = getWrappedException(failures.get("col_bad").exception); assertNotNull("null failure for test core", ex); - assertTrue("init failure isn't SAXParseException", - ex instanceof SAXParseException); + assertTrue("init failure isn't SAXParseException", ex instanceof SAXParseException); SAXParseException saxEx = (SAXParseException) ex; - assertTrue("init failure doesn't mention problem: " + saxEx.toString(), saxEx.getSystemId().contains("solrconfig.xml")); + assertTrue( + "init failure doesn't mention problem: " + saxEx.toString(), + saxEx.getSystemId().contains("solrconfig.xml")); // ---- // fix col_bad's config (again) and RELOAD to fix failure - FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"), + FileUtils.copyFile( + getFile("solr/collection1/conf/solrconfig-defaults.xml"), FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "solrconfig.xml")); cc.reload("col_bad"); - assertTrue("Core reload should have changed start time", + assertTrue( + "Core reload should have changed start time", col_bad_old_start < getCoreStartTime(cc, "col_bad")); - // check that we have the cores we expect cores = cc.getLoadedCoreNames(); assertNotNull("core names is null", cores); @@ -982,7 +1041,6 @@ public void testCoreInitFailuresOnReload() throws Exception { assertEquals("wrong number of core failures", 1, failures.size()); cc.shutdown(); - } private long getCoreStartTime(final CoreContainer cc, final String name) { diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java index 133927fb2f1..9138c769840 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java +++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java @@ -16,6 +16,13 @@ */ package org.apache.solr.core; +import static org.apache.solr.core.CoreContainer.CORE_DISCOVERY_COMPLETE; +import static org.apache.solr.core.CoreContainer.INITIAL_CORE_LOAD_COMPLETE; +import static org.apache.solr.core.CoreContainer.LOAD_COMPLETE; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.core.StringContains.containsString; + +import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -28,8 +35,6 @@ import java.nio.file.Paths; import java.util.List; import java.util.Properties; - -import com.google.common.collect.ImmutableMap; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.IOUtils; import org.apache.solr.SolrTestCaseJ4; @@ -38,12 +43,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.core.CoreContainer.CORE_DISCOVERY_COMPLETE; -import static org.apache.solr.core.CoreContainer.INITIAL_CORE_LOAD_COMPLETE; -import static org.apache.solr.core.CoreContainer.LOAD_COMPLETE; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.core.StringContains.containsString; - public class TestCoreDiscovery extends SolrTestCaseJ4 { @BeforeClass @@ -56,18 +55,20 @@ public static void beforeClass() throws Exception { private void setMeUp(String alternateCoreDir) throws Exception { String xmlStr = SOLR_XML; if (alternateCoreDir != null) { - xmlStr = xmlStr.replace("", " " + alternateCoreDir + " "); + xmlStr = + xmlStr.replace( + "", " " + alternateCoreDir + " "); } File tmpFile = new File(solrHomeDirectory.toFile(), SolrXmlConfig.SOLR_XML_FILE); FileUtils.write(tmpFile, xmlStr, IOUtils.UTF_8); - } private void setMeUp() throws Exception { setMeUp(null); } - private Properties makeCoreProperties(String name, boolean isTransient, boolean loadOnStartup, String... extraProps) { + private Properties makeCoreProperties( + String name, boolean isTransient, boolean loadOnStartup, String... extraProps) { Properties props = new Properties(); props.put(CoreDescriptor.CORE_NAME, name); props.put(CoreDescriptor.CORE_SCHEMA, "schema-tiny.xml"); @@ -97,7 +98,9 @@ private void addCoreWithProps(Properties stockProps, File propFile) throws Excep private void addCoreWithProps(String name, Properties stockProps) throws Exception { - File propFile = new File(new File(solrHomeDirectory.toFile(), name), CorePropertiesLocator.PROPERTIES_FILENAME); + File propFile = + new File( + new File(solrHomeDirectory.toFile(), name), CorePropertiesLocator.PROPERTIES_FILENAME); File parent = propFile.getParentFile(); assertTrue("Failed to mkdirs for " + parent.getAbsolutePath(), parent.mkdirs()); addCoreWithProps(stockProps, propFile); @@ -107,8 +110,11 @@ private void addConfFiles(File confDir) throws Exception { String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; assertTrue("Failed to mkdirs for " + confDir.getAbsolutePath(), confDir.mkdirs()); FileUtils.copyFile(new File(top, "schema-tiny.xml"), new File(confDir, "schema-tiny.xml")); - FileUtils.copyFile(new File(top, "solrconfig-minimal.xml"), new File(confDir, "solrconfig-minimal.xml")); - FileUtils.copyFile(new File(top, "solrconfig.snippet.randomindexconfig.xml"), new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); + FileUtils.copyFile( + new File(top, "solrconfig-minimal.xml"), new File(confDir, "solrconfig-minimal.xml")); + FileUtils.copyFile( + new File(top, "solrconfig.snippet.randomindexconfig.xml"), + new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); } private CoreContainer init() { @@ -122,22 +128,21 @@ private CoreContainer init() { long status = container.getStatus(); - assertTrue("Load complete flag should be set", - (status & LOAD_COMPLETE) == LOAD_COMPLETE); - assertTrue("Core discovery should be complete", + assertTrue("Load complete flag should be set", (status & LOAD_COMPLETE) == LOAD_COMPLETE); + assertTrue( + "Core discovery should be complete", (status & CORE_DISCOVERY_COMPLETE) == CORE_DISCOVERY_COMPLETE); - assertTrue("Initial core loading should be complete", + assertTrue( + "Initial core loading should be complete", (status & INITIAL_CORE_LOAD_COMPLETE) == INITIAL_CORE_LOAD_COMPLETE); return container; } @After - public void after() throws Exception { + public void after() throws Exception {} - } - - // Test the basic setup, create some dirs with core.properties files in them, but solr.xml has discoverCores - // set and insure that we find all the cores and can load them. + // Test the basic setup, create some dirs with core.properties files in them, but solr.xml has + // discoverCores set and insure that we find all the cores and can load them. @Test @SuppressWarnings({"try"}) public void testDiscovery() throws Exception { @@ -159,8 +164,8 @@ public void testDiscovery() throws Exception { // force loading of core2 and lazy1 by getting them from the CoreContainer try (SolrCore core1 = cc.getCore("core1"); - SolrCore core2 = cc.getCore("core2"); - SolrCore lazy1 = cc.getCore("lazy1")) { + SolrCore core2 = cc.getCore("core2"); + SolrCore lazy1 = cc.getCore("lazy1")) { // Let's assert we did the right thing for implicit properties too. CoreDescriptor desc = core1.getCoreDescriptor(); @@ -181,13 +186,20 @@ public void testDiscovery() throws Exception { Properties persistable = cd1.getPersistableUserProperties(); persistable.setProperty("bogusprop", "bogusval"); cc.getCoresLocator().persist(cc, cd1); - File propFile = new File(new File(solrHomeDirectory.toFile(), "core1"), CorePropertiesLocator.PROPERTIES_FILENAME); + File propFile = + new File( + new File(solrHomeDirectory.toFile(), "core1"), + CorePropertiesLocator.PROPERTIES_FILENAME); Properties newProps = new Properties(); - try (InputStreamReader is = new InputStreamReader(new FileInputStream(propFile), StandardCharsets.UTF_8)) { + try (InputStreamReader is = + new InputStreamReader(new FileInputStream(propFile), StandardCharsets.UTF_8)) { newProps.load(is); } // is it there? - assertEquals("Should have persisted bogusprop to disk", "bogusval", newProps.getProperty("bogusprop")); + assertEquals( + "Should have persisted bogusprop to disk", + "bogusval", + newProps.getProperty("bogusprop")); // is it in the user properties? CorePropertiesLocator cpl = new CorePropertiesLocator(solrHomeDirectory); List cores = cpl.discover(cc); @@ -195,7 +207,8 @@ public void testDiscovery() throws Exception { for (CoreDescriptor cd : cores) { if (cd.getName().equals("core1")) { found = true; - assertEquals("Should have persisted bogusprop to disk in user properties", + assertEquals( + "Should have persisted bogusprop to disk in user properties", "bogusval", cd.getPersistableUserProperties().getProperty("bogusprop")); break; @@ -222,17 +235,20 @@ public void testPropFilePersistence() throws Exception { addCoreWithProps("corep2", makeCoreProperties("corep2", false, true)); Path coreP1PropFile = Paths.get(solrHomeDirectory.toString(), "corep1", "core.properties"); - assertTrue("Core.properties file should exist for before core load failure core corep1", + assertTrue( + "Core.properties file should exist for before core load failure core corep1", Files.exists(coreP1PropFile)); CoreContainer cc = init(); try { - Exception thrown = expectThrows(SolrCoreInitializationException.class, () -> cc.getCore("corep1")); + Exception thrown = + expectThrows(SolrCoreInitializationException.class, () -> cc.getCore("corep1")); assertTrue(thrown.getMessage().contains("init failure")); try (SolrCore sc = cc.getCore("corep2")) { assertNotNull("Core corep2 should be loaded", sc); } - assertTrue("Core.properties file should still exist for core corep1", Files.exists(coreP1PropFile)); + assertTrue( + "Core.properties file should still exist for core corep1", Files.exists(coreP1PropFile)); // Creating a core successfully should create a core.properties file Path corePropFile = Paths.get(solrHomeDirectory.toString(), "corep3", "core.properties"); @@ -244,16 +260,23 @@ public void testPropFilePersistence() throws Exception { corePropFile = Paths.get(solrHomeDirectory.toString(), "corep4", "core.properties"); assertFalse("Should not be a properties file yet for corep4", Files.exists(corePropFile)); - thrown = expectThrows(SolrException.class, () -> { - cc.create("corep4", ImmutableMap.of( - CoreDescriptor.CORE_NAME, "corep4", - CoreDescriptor.CORE_SCHEMA, "not-there.xml", - CoreDescriptor.CORE_CONFIG, "solrconfig-minimal.xml", - CoreDescriptor.CORE_TRANSIENT, "false", - CoreDescriptor.CORE_LOADONSTARTUP, "true")); - }); + thrown = + expectThrows( + SolrException.class, + () -> { + cc.create( + "corep4", + ImmutableMap.of( + CoreDescriptor.CORE_NAME, "corep4", + CoreDescriptor.CORE_SCHEMA, "not-there.xml", + CoreDescriptor.CORE_CONFIG, "solrconfig-minimal.xml", + CoreDescriptor.CORE_TRANSIENT, "false", + CoreDescriptor.CORE_LOADONSTARTUP, "true")); + }); assertTrue(thrown.getMessage().contains("Can't find resource")); - assertFalse("Failed corep4 should not have left a core.properties file around", Files.exists(corePropFile)); + assertFalse( + "Failed corep4 should not have left a core.properties file around", + Files.exists(corePropFile)); // Finally, just for yucks, let's determine that a this create path also leaves a prop file. @@ -261,26 +284,28 @@ public void testPropFilePersistence() throws Exception { assertFalse("Should not be a properties file yet for corep5", Files.exists(corePropFile)); cc.create("corep5", ImmutableMap.of("configSet", "minimal")); - - assertTrue("corep5 should have left a core.properties file on disk", Files.exists(corePropFile)); + + assertTrue( + "corep5 should have left a core.properties file on disk", Files.exists(corePropFile)); } finally { cc.shutdown(); } } - - // Insure that if the number of transient cores that are loaded on startup is greater than the cache size that Solr - // "does the right thing". Which means - // 1> stop loading cores after transient cache size is reached, in this case that magic number is 3 - // one non-transient and two transient. + // Insure that if the number of transient cores that are loaded on startup is greater than the + // cache size that Solr "does the right thing". Which means + // 1> stop loading cores after transient cache size is reached, in this case that magic number is + // 3 one non-transient and two transient. // 2> still loads cores as time passes. // - // This seems like a silly test, but it hangs forever on 4.10 so let's guard against it in future. The behavior - // has gone away with the removal of the complexity around the old-style solr.xml files. + // This seems like a silly test, but it hangs forever on 4.10 so let's guard against it in future. + // The behavior has gone away with the removal of the complexity around the old-style solr.xml + // files. // - // NOTE: The order that cores are loaded depends upon how the core discovery is traversed. I don't think we can - // make the test depend on that order, so after load just insure that the cores counts are correct. + // NOTE: The order that cores are loaded depends upon how the core discovery is traversed. I don't + // think we can make the test depend on that order, so after load just insure that the cores + // counts are correct. @Test public void testTooManyTransientCores() throws Exception { @@ -300,9 +325,12 @@ public void testTooManyTransientCores() throws Exception { final CoreContainer cc = new CoreContainer(solrHomeDirectory, new Properties()); try { cc.load(); - // Just check that the proper number of cores are loaded since making the test depend on order would be fragile - assertEquals("There should only be 3 cores loaded, coreLOS and two coreT? cores", - 3, cc.getLoadedCoreNames().size()); + // Just check that the proper number of cores are loaded since making the test depend on order + // would be fragile + assertEquals( + "There should only be 3 cores loaded, coreLOS and two coreT? cores", + 3, + cc.getLoadedCoreNames().size()); SolrCore c1 = cc.getCore("coreT1"); assertNotNull("Core T1 should NOT BE NULL", c1); @@ -335,34 +363,44 @@ public void testDuplicateNames() throws Exception { // name, isLazy, loadOnStartup addCoreWithProps("core1", makeCoreProperties("core1", false, true)); addCoreWithProps("core2", makeCoreProperties("core2", false, false, "name=core1")); - SolrException thrown = expectThrows(SolrException.class, () -> { - CoreContainer cc = null; - try { cc = init(); } - finally { if (cc != null) cc.shutdown(); } - }); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + CoreContainer cc = null; + try { + cc = init(); + } finally { + if (cc != null) cc.shutdown(); + } + }); final String message = thrown.getMessage(); - assertTrue("Wrong exception thrown on duplicate core names", + assertTrue( + "Wrong exception thrown on duplicate core names", message.indexOf("Found multiple cores with the name [core1]") != -1); - assertTrue(File.separator + "core1 should have been mentioned in the message: " + message, + assertTrue( + File.separator + "core1 should have been mentioned in the message: " + message, message.indexOf(File.separator + "core1") != -1); - assertTrue(File.separator + "core2 should have been mentioned in the message:" + message, + assertTrue( + File.separator + "core2 should have been mentioned in the message:" + message, message.indexOf(File.separator + "core2") != -1); } - @Test public void testAlternateCoreDir() throws Exception { File alt = createTempDir().toFile(); setMeUp(alt.getAbsolutePath()); - addCoreWithProps(makeCoreProperties("core1", false, true, "dataDir=core1"), + addCoreWithProps( + makeCoreProperties("core1", false, true, "dataDir=core1"), new File(alt, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); - addCoreWithProps(makeCoreProperties("core2", false, false, "dataDir=core2"), + addCoreWithProps( + makeCoreProperties("core2", false, false, "dataDir=core2"), new File(alt, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); - SolrCore core2 = cc.getCore("core2")) { + SolrCore core2 = cc.getCore("core2")) { assertNotNull(core1); assertNotNull(core2); } finally { @@ -377,17 +415,31 @@ public void testAlternateRelativeCoreDir() throws Exception { setMeUp(relative); // two cores under the relative directory - addCoreWithProps(makeCoreProperties("core1", false, true, "dataDir=core1"), - solrHomeDirectory.resolve(relative).resolve("core1").resolve(CorePropertiesLocator.PROPERTIES_FILENAME).toFile()); - addCoreWithProps(makeCoreProperties("core2", false, false, "dataDir=core2"), - solrHomeDirectory.resolve(relative).resolve("core2").resolve(CorePropertiesLocator.PROPERTIES_FILENAME).toFile()); + addCoreWithProps( + makeCoreProperties("core1", false, true, "dataDir=core1"), + solrHomeDirectory + .resolve(relative) + .resolve("core1") + .resolve(CorePropertiesLocator.PROPERTIES_FILENAME) + .toFile()); + addCoreWithProps( + makeCoreProperties("core2", false, false, "dataDir=core2"), + solrHomeDirectory + .resolve(relative) + .resolve("core2") + .resolve(CorePropertiesLocator.PROPERTIES_FILENAME) + .toFile()); // one core *not* under the relative directory - addCoreWithProps(makeCoreProperties("core0", false, true, "datadir=core0"), - solrHomeDirectory.resolve("core0").resolve(CorePropertiesLocator.PROPERTIES_FILENAME).toFile()); + addCoreWithProps( + makeCoreProperties("core0", false, true, "datadir=core0"), + solrHomeDirectory + .resolve("core0") + .resolve(CorePropertiesLocator.PROPERTIES_FILENAME) + .toFile()); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); - SolrCore core2 = cc.getCore("core2")) { + SolrCore core2 = cc.getCore("core2")) { assertNotNull(core1); assertNotNull(core2); @@ -405,13 +457,15 @@ public void testAlternateRelativeCoreDir() throws Exception { public void testNoCoreDir() throws Exception { File noCoreDir = createTempDir().toFile(); setMeUp(noCoreDir.getAbsolutePath()); - addCoreWithProps(makeCoreProperties("core1", false, true), + addCoreWithProps( + makeCoreProperties("core1", false, true), new File(noCoreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); - addCoreWithProps(makeCoreProperties("core2", false, false), + addCoreWithProps( + makeCoreProperties("core2", false, false), new File(noCoreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); - SolrCore core2 = cc.getCore("core2")) { + SolrCore core2 = cc.getCore("core2")) { assertNotNull(core1); assertNotNull(core2); } finally { @@ -423,19 +477,22 @@ public void testNoCoreDir() throws Exception { public void testCoreDirCantRead() throws Exception { File coreDir = solrHomeDirectory.toFile(); setMeUp(coreDir.getAbsolutePath()); - addCoreWithProps(makeCoreProperties("core1", false, true), + addCoreWithProps( + makeCoreProperties("core1", false, true), new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); // Insure that another core is opened successfully - addCoreWithProps(makeCoreProperties("core2", false, false, "dataDir=core2"), + addCoreWithProps( + makeCoreProperties("core2", false, false, "dataDir=core2"), new File(coreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); File toSet = new File(coreDir, "core1"); - assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); + assumeTrue( + "Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); assumeFalse("Appears we are a super user, skip test", toSet.canRead()); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); - SolrCore core2 = cc.getCore("core2")) { + SolrCore core2 = cc.getCore("core2")) { assertNull(core1); assertNotNull(core2); } finally { @@ -449,39 +506,48 @@ public void testCoreDirCantRead() throws Exception { public void testNonCoreDirCantRead() throws Exception { File coreDir = solrHomeDirectory.toFile(); setMeUp(coreDir.getAbsolutePath()); - addCoreWithProps(makeCoreProperties("core1", false, true), + addCoreWithProps( + makeCoreProperties("core1", false, true), new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); - addCoreWithProps(makeCoreProperties("core2", false, false, "dataDir=core2"), + addCoreWithProps( + makeCoreProperties("core2", false, false, "dataDir=core2"), new File(coreDir, "core2" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); File toSet = new File(solrHomeDirectory.toFile(), "cantReadDir"); - assertTrue("Should have been able to make directory '" + toSet.getAbsolutePath() + "' ", toSet.mkdirs()); - assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); + assertTrue( + "Should have been able to make directory '" + toSet.getAbsolutePath() + "' ", + toSet.mkdirs()); + assumeTrue( + "Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); assumeFalse("Appears we are a super user, skip test", toSet.canRead()); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1"); - SolrCore core2 = cc.getCore("core2")) { - assertNotNull(core1); // Should be able to open the perfectly valid core1 despite a non-readable directory + SolrCore core2 = cc.getCore("core2")) { + // Should be able to open the perfectly valid core1 despite a non-readable directory + assertNotNull(core1); assertNotNull(core2); } finally { cc.shutdown(); } // So things can be cleaned up by the framework! toSet.setReadable(true, false); - } @Test public void testFileCantRead() throws Exception { File coreDir = solrHomeDirectory.toFile(); setMeUp(coreDir.getAbsolutePath()); - addCoreWithProps(makeCoreProperties("core1", false, true), + addCoreWithProps( + makeCoreProperties("core1", false, true), new File(coreDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); File toSet = new File(solrHomeDirectory.toFile(), "cantReadFile"); - assertTrue("Should have been able to make file '" + toSet.getAbsolutePath() + "' ", toSet.createNewFile()); - assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); + assertTrue( + "Should have been able to make file '" + toSet.getAbsolutePath() + "' ", + toSet.createNewFile()); + assumeTrue( + "Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false)); CoreContainer cc = init(); try (SolrCore core1 = cc.getCore("core1")) { assertNotNull(core1); // Should still be able to create core despite r/o file. @@ -500,7 +566,8 @@ public void testSolrHomeDoesntExist() throws Exception { try { cc = init(); } catch (SolrException ex) { - assertTrue("Core init doesn't report if solr home directory doesn't exist " + ex.getMessage(), + assertTrue( + "Core init doesn't report if solr home directory doesn't exist " + ex.getMessage(), ex.getMessage().contains("Error reading core root directory")); } finally { if (cc != null) { @@ -509,46 +576,63 @@ public void testSolrHomeDoesntExist() throws Exception { } } - @Test public void testSolrHomeNotReadable() throws Exception { File homeDir = solrHomeDirectory.toFile(); setMeUp(homeDir.getAbsolutePath()); - addCoreWithProps(makeCoreProperties("core1", false, true), + addCoreWithProps( + makeCoreProperties("core1", false, true), new File(homeDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME)); - assumeTrue("Cannot make " + homeDir + " non-readable. Test aborted.", homeDir.setReadable(false, false)); + assumeTrue( + "Cannot make " + homeDir + " non-readable. Test aborted.", + homeDir.setReadable(false, false)); assumeFalse("Appears we are a super user, skip test", homeDir.canRead()); - Exception thrown = expectThrows(Exception.class, () -> { - CoreContainer cc = null; - try { cc = init(); } - finally { if (cc != null) cc.shutdown(); } - }); + Exception thrown = + expectThrows( + Exception.class, + () -> { + CoreContainer cc = null; + try { + cc = init(); + } finally { + if (cc != null) cc.shutdown(); + } + }); assertThat(thrown.getMessage(), containsString("Error reading core root directory")); // So things can be cleaned up by the framework! homeDir.setReadable(true, false); - } // For testing whether finding a solr.xml overrides looking at solr.properties - private final static String SOLR_XML = " " + - "2 " + - "" + Paths.get(TEST_HOME()).resolve("configsets").toString() + "" + - " " + - "solrprop " + - "20 " + - "222.333.444.555 " + - "6000 " + - " " + - ""; + private static final String SOLR_XML = + " " + + "2 " + + "" + + Paths.get(TEST_HOME()).resolve("configsets").toString() + + "" + + " " + + "solrprop " + + "20 " + + "222.333.444.555 " + + "6000 " + + " " + + ""; @Test public void testRootDirectoryResolution() { - NodeConfig config = SolrXmlConfig.fromString(solrHomeDirectory, "relative"); - assertThat(config.getCoreRootDirectory().toString(), containsString(solrHomeDirectory.toAbsolutePath().toString())); - - NodeConfig absConfig = SolrXmlConfig.fromString(solrHomeDirectory, "/absolute"); - assertThat(absConfig.getCoreRootDirectory().toString(), not(containsString(solrHomeDirectory.toAbsolutePath().toString()))); + NodeConfig config = + SolrXmlConfig.fromString( + solrHomeDirectory, "relative"); + assertThat( + config.getCoreRootDirectory().toString(), + containsString(solrHomeDirectory.toAbsolutePath().toString())); + + NodeConfig absConfig = + SolrXmlConfig.fromString( + solrHomeDirectory, "/absolute"); + assertThat( + absConfig.getCoreRootDirectory().toString(), + not(containsString(solrHomeDirectory.toAbsolutePath().toString()))); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java b/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java index 56f6201e477..af838144fb7 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java +++ b/solr/core/src/test/org/apache/solr/core/TestCorePropertiesReload.java @@ -16,14 +16,13 @@ */ package org.apache.solr.core; +import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; -import java.io.BufferedWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.Properties; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.Test; @@ -48,7 +47,7 @@ public void testPropertiesReload() throws Exception { String testProp = coreDescriptor.getCoreProperty("test", null); assertTrue(testProp.equals("Before reload")); - //Re-write the properties file + // Re-write the properties file Properties props = new Properties(); props.setProperty("test", "After reload"); writeProperties(props); @@ -65,8 +64,11 @@ private void writeProperties(Properties props) throws Exception { Writer out = null; try { File confDir = new File(new File(solrHomeDirectory, "collection1"), "conf"); - out = new BufferedWriter(new OutputStreamWriter( - new FileOutputStream(new File(confDir, "solrcore.properties")), StandardCharsets.UTF_8)); + out = + new BufferedWriter( + new OutputStreamWriter( + new FileOutputStream(new File(confDir, "solrcore.properties")), + StandardCharsets.UTF_8)); props.store(out, "Reload Test"); } finally { diff --git a/solr/core/src/test/org/apache/solr/core/TestCustomStream.java b/solr/core/src/test/org/apache/solr/core/TestCustomStream.java index 5fc4242ec26..e4570801f2f 100644 --- a/solr/core/src/test/org/apache/solr/core/TestCustomStream.java +++ b/solr/core/src/test/org/apache/solr/core/TestCustomStream.java @@ -18,39 +18,39 @@ package org.apache.solr.core; import java.util.Arrays; - import org.apache.solr.cloud.AbstractFullDistribZkTestBase; import org.apache.solr.util.RestTestHarness; import org.junit.Test; -/** - * Created by caomanhdat on 6/3/16. - */ +/** Created by caomanhdat on 6/3/16. */ public class TestCustomStream extends AbstractFullDistribZkTestBase { @Test public void testDynamicLoadingCustomStream() throws Exception { setupRestTestHarnesses(); - String payload = "{\n" + - "'create-expressible' : { 'name' : 'hello', 'class': 'org.apache.solr.core.HelloStream' }\n" + - "}"; + String payload = + "{\n" + + "'create-expressible' : { 'name' : 'hello', 'class': 'org.apache.solr.core.HelloStream' }\n" + + "}"; RestTestHarness client = randomRestTestHarness(); - TestSolrConfigHandler.runConfigCommand(client,"/config",payload); - TestSolrConfigHandler.testForResponseElement(client, + TestSolrConfigHandler.runConfigCommand(client, "/config", payload); + TestSolrConfigHandler.testForResponseElement( + client, null, "/config/overlay", null, Arrays.asList("overlay", "expressible", "hello", "class"), - "org.apache.solr.core.HelloStream",10); + "org.apache.solr.core.HelloStream", + 10); - TestSolrConfigHandler.testForResponseElement(client, + TestSolrConfigHandler.testForResponseElement( + client, null, "/stream?expr=hello()", null, Arrays.asList("result-set", "docs[0]", "msg"), - "Hello World!",10); + "Hello World!", + 10); } - - } diff --git a/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java b/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java index 885ad71a715..e7fb137cf79 100644 --- a/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java +++ b/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java @@ -17,7 +17,6 @@ package org.apache.solr.core; import java.util.Properties; - import org.apache.solr.SolrTestCaseJ4; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -29,7 +28,9 @@ public class TestImplicitCoreProperties extends SolrTestCaseJ4 { @BeforeClass public static void setupContainer() { - cc = createCoreContainer("collection1", "data", "solrconfig-implicitproperties.xml", "schema.xml"); + cc = + createCoreContainer( + "collection1", "data", "solrconfig-implicitproperties.xml", "schema.xml"); } @AfterClass @@ -42,26 +43,26 @@ public static void teardownContainer() { @Test public void testImplicitPropertiesAreSubstitutedInSolrConfig() { - assertQ(req("q", "*:*") - , "//str[@name='dummy1'][.='collection1']" - , "//str[@name='dummy2'][.='data']" - , "//str[@name='dummy3'][.='solrconfig-implicitproperties.xml']" - , "//str[@name='dummy4'][.='schema.xml']" - , "//str[@name='dummy5'][.='false']" - ); + assertQ( + req("q", "*:*"), + "//str[@name='dummy1'][.='collection1']", + "//str[@name='dummy2'][.='data']", + "//str[@name='dummy3'][.='solrconfig-implicitproperties.xml']", + "//str[@name='dummy4'][.='schema.xml']", + "//str[@name='dummy5'][.='false']"); } // SOLR-5279 @Test public void testPropertiesArePersistedAcrossReload() { cc.reload("collection1"); - assertQ(req("q", "*:*") - , "//str[@name='dummy1'][.='collection1']" - , "//str[@name='dummy2'][.='data']" - , "//str[@name='dummy3'][.='solrconfig-implicitproperties.xml']" - , "//str[@name='dummy4'][.='schema.xml']" - , "//str[@name='dummy5'][.='false']" - ); + assertQ( + req("q", "*:*"), + "//str[@name='dummy1'][.='collection1']", + "//str[@name='dummy2'][.='data']", + "//str[@name='dummy3'][.='solrconfig-implicitproperties.xml']", + "//str[@name='dummy4'][.='schema.xml']", + "//str[@name='dummy5'][.='false']"); } // SOLR-8712 @@ -69,8 +70,8 @@ public void testPropertiesArePersistedAcrossReload() { public void testDefaultProperties() { Properties props = cc.getCoreDescriptor("collection1").getSubstitutableProperties(); assertEquals("collection1", props.getProperty("solr.core.name")); - assertTrue("solr.core.instanceDir not set correctly", + assertTrue( + "solr.core.instanceDir not set correctly", props.getProperty("solr.core.instanceDir").contains("collection1")); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java b/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java index c448583c4a5..3e031e1847f 100644 --- a/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java +++ b/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java @@ -25,9 +25,9 @@ public class TestInfoStreamLogging extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-infostream-logging.xml","schema.xml"); + initCore("solrconfig-infostream-logging.xml", "schema.xml"); } - + public void testIndexConfig() throws Exception { IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); diff --git a/solr/core/src/test/org/apache/solr/core/TestInitParams.java b/solr/core/src/test/org/apache/solr/core/TestInitParams.java index 44aa8078093..c328055d126 100644 --- a/solr/core/src/test/org/apache/solr/core/TestInitParams.java +++ b/solr/core/src/test/org/apache/solr/core/TestInitParams.java @@ -16,6 +16,10 @@ */ package org.apache.solr.core; +import static java.util.Collections.singletonMap; + +import java.util.Arrays; +import java.util.HashMap; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.NamedList; import org.apache.solr.request.SolrRequestHandler; @@ -23,20 +27,16 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.Arrays; -import java.util.HashMap; - -import static java.util.Collections.singletonMap; - public class TestInitParams extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-paramset.xml","schema.xml"); + initCore("solrconfig-paramset.xml", "schema.xml"); } + @Test - public void testComponentWithInitParams(){ + public void testComponentWithInitParams() { - for (String s : Arrays.asList("/dump1", "/dump3","/root/dump5" , "/root1/anotherlevel/dump6")) { + for (String s : Arrays.asList("/dump1", "/dump3", "/root/dump5", "/root1/anotherlevel/dump6")) { SolrRequestHandler handler = h.getCore().getRequestHandler(s); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); @@ -51,15 +51,18 @@ public void testComponentWithInitParams(){ InitParams initParams = h.getCore().getSolrConfig().getInitParams().get("a"); - PluginInfo pluginInfo = new PluginInfo("requestHandler", - new HashMap<>(), - new NamedList<>(singletonMap("defaults", new NamedList<>(singletonMap("a", "A1")))), null); + PluginInfo pluginInfo = + new PluginInfo( + "requestHandler", + new HashMap<>(), + new NamedList<>(singletonMap("defaults", new NamedList<>(singletonMap("a", "A1")))), + null); initParams.apply(pluginInfo); - assertEquals( "A",initParams.defaults.get("a")); + assertEquals("A", initParams.defaults.get("a")); } @Test - public void testMultiInitParams(){ + public void testMultiInitParams() { SolrRequestHandler handler = h.getCore().getRequestHandler("/dump6"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); @@ -71,39 +74,36 @@ public void testMultiInitParams(){ assertEquals("B", def.get("b")); def = (NamedList) nl.get(PluginInfo.APPENDS); assertEquals("C", def.get("c")); - } - @Test - public void testComponentWithConflictingInitParams(){ + public void testComponentWithConflictingInitParams() { SolrRequestHandler handler = h.getCore().getRequestHandler("/dump2"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); NamedList nl = (NamedList) rsp.getValues().get("initArgs"); NamedList def = (NamedList) nl.get(PluginInfo.DEFAULTS); - assertEquals("A1" ,def.get("a")); + assertEquals("A1", def.get("a")); def = (NamedList) nl.get(PluginInfo.INVARIANTS); - assertEquals("B1" ,def.get("b")); + assertEquals("B1", def.get("b")); def = (NamedList) nl.get(PluginInfo.APPENDS); - assertEquals(Arrays.asList("C1","C") ,def.getAll("c")); + assertEquals(Arrays.asList("C1", "C"), def.getAll("c")); } public void testNestedRequestHandler() { assertNotNull(h.getCore().getRequestHandler("/greedypath")); assertNotNull(h.getCore().getRequestHandler("/greedypath/some/path")); - assertNotNull( h.getCore().getRequestHandler("/greedypath/some/other/path")); + assertNotNull(h.getCore().getRequestHandler("/greedypath/some/other/path")); assertNull(h.getCore().getRequestHandler("/greedypath/unknownpath")); } - public void testElevateExample(){ + public void testElevateExample() { SolrRequestHandler handler = h.getCore().getRequestHandler("/elevate"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); NamedList nl = (NamedList) rsp.getValues().get("initArgs"); NamedList def = (NamedList) nl.get(PluginInfo.DEFAULTS); - assertEquals("text" ,def.get("df")); - + assertEquals("text", def.get("df")); } public void testArbitraryAttributes() { @@ -114,11 +114,13 @@ public void testArbitraryAttributes() { assertEquals("server-enabled.txt", nl.get("healthcheckFile")); } - public void testMatchPath(){ - InitParams initParams = new InitParams(new PluginInfo(InitParams.TYPE, singletonMap("path", "/update/json/docs"))); + public void testMatchPath() { + InitParams initParams = + new InitParams(new PluginInfo(InitParams.TYPE, singletonMap("path", "/update/json/docs"))); assertFalse(initParams.matchPath("/update")); assertTrue(initParams.matchPath("/update/json/docs")); - initParams = new InitParams(new PluginInfo(InitParams.TYPE, singletonMap("path", "/update/**"))); + initParams = + new InitParams(new PluginInfo(InitParams.TYPE, singletonMap("path", "/update/**"))); assertTrue(initParams.matchPath("/update/json/docs")); assertTrue(initParams.matchPath("/update/json")); assertTrue(initParams.matchPath("/update")); @@ -127,5 +129,4 @@ public void testMatchPath(){ assertTrue(initParams.matchPath("/update/json")); assertTrue(initParams.matchPath("/update")); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java index bc592419223..51584066525 100644 --- a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java +++ b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java @@ -17,12 +17,23 @@ package org.apache.solr.core; import com.codahale.metrics.MetricRegistry; +import java.lang.invoke.MethodHandles; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import javax.management.AttributeNotFoundException; +import javax.management.MBeanAttributeInfo; +import javax.management.MBeanInfo; +import javax.management.MBeanServer; +import javax.management.MBeanServerFactory; +import javax.management.ObjectInstance; +import javax.management.ObjectName; +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricReporter; +import org.apache.solr.metrics.reporters.SolrJmxReporter; import org.apache.solr.metrics.reporters.jmx.JmxMetricsReporter; import org.apache.solr.metrics.reporters.jmx.JmxObjectNameFactory; -import org.apache.solr.metrics.reporters.SolrJmxReporter; -import org.apache.solr.SolrTestCaseJ4; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -30,22 +41,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.management.AttributeNotFoundException; -import javax.management.MBeanAttributeInfo; -import javax.management.MBeanInfo; -import javax.management.MBeanServer; -import javax.management.MBeanServerFactory; -import javax.management.ObjectInstance; -import javax.management.ObjectName; -import java.lang.invoke.MethodHandles; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; - /** * Test for JMX Integration * - * * @since solr 1.3 */ public class TestJmxIntegration extends SolrTestCaseJ4 { @@ -62,7 +60,7 @@ public static void beforeClass() throws Exception { // Make sure that at least one MBeanServer is available // prior to initializing the core // - // (test configs are setup to use existing server if any, + // (test configs are setup to use existing server if any, // otherwise skip JMX) newMbeanServer = MBeanServerFactory.createMBeanServer(); @@ -71,19 +69,18 @@ public static void beforeClass() throws Exception { // we should be able to see that the core has JmxIntegration enabled registryName = h.getCore().getCoreMetricManager().getRegistryName(); SolrMetricManager manager = h.getCoreContainer().getMetricManager(); - Map reporters = manager.getReporters(registryName); + Map reporters = manager.getReporters(registryName); assertEquals(1, reporters.size()); SolrMetricReporter reporter = reporters.values().iterator().next(); assertTrue(reporter instanceof SolrJmxReporter); - SolrJmxReporter jmx = (SolrJmxReporter)reporter; + SolrJmxReporter jmx = (SolrJmxReporter) reporter; assertTrue("JMX not enabled", jmx.isActive()); // and we should be able to see that the reporter // refers to the JMX server we started mbeanServer = jmx.getMBeanServer(); - assertNotNull("No JMX server found in the reporter", - mbeanServer); + assertNotNull("No JMX server found in the reporter", mbeanServer); // NOTE: we can't guarantee that "mbeanServer == platformServer" // the JVM may have multiple MBean servers running when the test started @@ -114,8 +111,7 @@ public void testJmxRegistration() throws Exception { assertTrue("No MBeans found in server", mbeanServer.getMBeanCount() > 0); Set objects = mbeanServer.queryMBeans(null, null); - assertFalse("No objects found in mbean server", objects - .isEmpty()); + assertFalse("No objects found in mbean server", objects.isEmpty()); int numDynamicMbeans = 0; for (ObjectInstance o : objects) { ObjectName name = o.getObjectName(); @@ -124,18 +120,20 @@ public void testJmxRegistration() throws Exception { if (name.getDomain().equals("solr")) { numDynamicMbeans++; MBeanAttributeInfo[] attrs = mbeanInfo.getAttributes(); - if (name.getKeyProperty("name").equals("fetcher")) { // no attributes without active replication + if (name.getKeyProperty("name") + .equals("fetcher")) { // no attributes without active replication continue; } - assertTrue("No Attributes found for mbean: " + o.getObjectName() + ", " + mbeanInfo, + assertTrue( + "No Attributes found for mbean: " + o.getObjectName() + ", " + mbeanInfo, 0 < attrs.length); for (MBeanAttributeInfo attr : attrs) { // ensure every advertised attribute is gettable try { Object trash = mbeanServer.getAttribute(o.getObjectName(), attr.getName()); } catch (javax.management.AttributeNotFoundException e) { - throw new RuntimeException("Unable to featch attribute for " + o.getObjectName() - + ": " + attr.getName(), e); + throw new RuntimeException( + "Unable to featch attribute for " + o.getObjectName() + ": " + attr.getName(), e); } } } @@ -148,12 +146,12 @@ public void testJmxUpdate() throws Exception { SolrInfoBean bean = null; // wait until searcher is registered - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { bean = h.getCore().getInfoRegistry().get("searcher"); if (bean != null) break; Thread.sleep(250); } - if (bean==null) throw new RuntimeException("searcher was never registered"); + if (bean == null) throw new RuntimeException("searcher was never registered"); ObjectName searcher = nameFactory.createName("gauge", registryName, "SEARCHER.searcher.*"); if (log.isInfoEnabled()) { @@ -161,15 +159,15 @@ public void testJmxUpdate() throws Exception { } Set objects = mbeanServer.queryMBeans(searcher, null); - assertFalse("No mbean found for SolrIndexSearcher", mbeanServer.queryMBeans(searcher, null).isEmpty()); + assertFalse( + "No mbean found for SolrIndexSearcher", mbeanServer.queryMBeans(searcher, null).isEmpty()); ObjectName name = nameFactory.createName("gauge", registryName, "SEARCHER.searcher.numDocs"); - int oldNumDocs = (Integer)mbeanServer.getAttribute(name, "Value"); + int oldNumDocs = (Integer) mbeanServer.getAttribute(name, "Value"); assertU(adoc("id", "1")); assertU("commit", commit()); - int numDocs = (Integer)mbeanServer.getAttribute(name, "Value"); - assertTrue("New numDocs is same as old numDocs as reported by JMX", - numDocs > oldNumDocs); + int numDocs = (Integer) mbeanServer.getAttribute(name, "Value"); + assertTrue("New numDocs is same as old numDocs as reported by JMX", numDocs > oldNumDocs); } @Test @@ -187,7 +185,7 @@ public void testJmxOnCoreReload() throws Exception { SolrJmxReporter reporter = null; for (Map.Entry e : reporters.entrySet()) { if (e.getKey().endsWith(coreHashCode) && e.getValue() instanceof SolrJmxReporter) { - reporter = (SolrJmxReporter)e.getValue(); + reporter = (SolrJmxReporter) e.getValue(); break; } } @@ -198,7 +196,8 @@ public void testJmxOnCoreReload() throws Exception { int oldNumberOfObjects = 0; for (ObjectInstance bean : oldBeans) { try { - if (tag.equals(mbeanServer.getAttribute(bean.getObjectName(), JmxMetricsReporter.INSTANCE_TAG))) { + if (tag.equals( + mbeanServer.getAttribute(bean.getObjectName(), JmxMetricsReporter.INSTANCE_TAG))) { oldNumberOfObjects++; } } catch (AttributeNotFoundException e) { @@ -207,8 +206,14 @@ public void testJmxOnCoreReload() throws Exception { } int totalCoreMetrics = mgr.registry(registryName).getMetrics().size(); - log.info("Before Reload: size of all core metrics: {} MBeans: {}", totalCoreMetrics, oldNumberOfObjects); - assertEquals("Number of registered MBeans is not the same as the number of core metrics", totalCoreMetrics, oldNumberOfObjects); + log.info( + "Before Reload: size of all core metrics: {} MBeans: {}", + totalCoreMetrics, + oldNumberOfObjects); + assertEquals( + "Number of registered MBeans is not the same as the number of core metrics", + totalCoreMetrics, + oldNumberOfObjects); h.getCoreContainer().reload(coreName); assertQ(req("q", "*:*"), "//result[@numFound='0']"); @@ -218,7 +223,7 @@ public void testJmxOnCoreReload() throws Exception { reporter = null; for (Map.Entry e : reporters.entrySet()) { if (e.getKey().endsWith(coreHashCode) && e.getValue() instanceof SolrJmxReporter) { - reporter = (SolrJmxReporter)e.getValue(); + reporter = (SolrJmxReporter) e.getValue(); break; } } @@ -235,7 +240,8 @@ public void testJmxOnCoreReload() throws Exception { totalCoreMetrics = registry.getMetrics().size(); for (ObjectInstance bean : newBeans) { try { - if (tag.equals(mbeanServer.getAttribute(bean.getObjectName(), JmxMetricsReporter.INSTANCE_TAG))) { + if (tag.equals( + mbeanServer.getAttribute(bean.getObjectName(), JmxMetricsReporter.INSTANCE_TAG))) { String[] name = bean.getObjectName().toString().substring(32).split(","); StringBuilder sb = new StringBuilder(); for (String n : name) { @@ -253,7 +259,10 @@ public void testJmxOnCoreReload() throws Exception { } } - log.info("After Reload: size of all core metrics: {} MBeans: {}", totalCoreMetrics, newNumberOfObjects); + log.info( + "After Reload: size of all core metrics: {} MBeans: {}", + totalCoreMetrics, + newNumberOfObjects); if (totalCoreMetrics != newNumberOfObjects) { Set errors = new TreeSet<>(beanNames); errors.removeAll(metricNames); @@ -261,7 +270,11 @@ public void testJmxOnCoreReload() throws Exception { errors = new TreeSet<>(metricNames); errors.removeAll(beanNames); log.error("Unexpected metric names: {}", errors); - fail("Number of registered MBeans is not the same as the number of core metrics: " + totalCoreMetrics + " != " + newNumberOfObjects); + fail( + "Number of registered MBeans is not the same as the number of core metrics: " + + totalCoreMetrics + + " != " + + newNumberOfObjects); } } } diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java index 40435988044..3fc69232cd8 100644 --- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java +++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java @@ -16,6 +16,7 @@ */ package org.apache.solr.core; +import com.google.common.collect.ImmutableList; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -27,8 +28,6 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.regex.Pattern; - -import com.google.common.collect.ImmutableList; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrServerException; @@ -61,38 +60,43 @@ public class TestLazyCores extends SolrTestCaseJ4 { @BeforeClass public static void setupClass() throws Exception { - // Need to use a disk-based directory because there are tests that close a core after adding documents - // then expect to be able to re-open that core and execute a search + // Need to use a disk-based directory because there are tests that close a core after adding + // documents then expect to be able to re-open that core and execute a search useFactory("solr.StandardDirectoryFactory"); } - private static CoreDescriptor makeCoreDescriptor(CoreContainer cc, String coreName, String isTransient, String loadOnStartup) { - return new CoreDescriptor(coreName, cc.getCoreRootDirectory().resolve(coreName), cc, - CoreDescriptor.CORE_TRANSIENT, isTransient, - CoreDescriptor.CORE_LOADONSTARTUP, loadOnStartup); - } - - private static final CoresLocator testCores = new ReadOnlyCoresLocator() { - @Override - public List discover(CoreContainer cc) { - return ImmutableList.of( - makeCoreDescriptor(cc, "collection1", "false", "true"), - makeCoreDescriptor(cc, "collection2", "true", "true"), - makeCoreDescriptor(cc, "collection3", "on", "false"), - makeCoreDescriptor(cc, "collection4", "false", "false"), - makeCoreDescriptor(cc, "collection5", "false", "true"), - makeCoreDescriptor(cc, "collection6", "true", "false"), - makeCoreDescriptor(cc, "collection7", "true", "false"), - makeCoreDescriptor(cc, "collection8", "true", "false"), - makeCoreDescriptor(cc, "collection9", "true", "false") - ); - } - }; + private static CoreDescriptor makeCoreDescriptor( + CoreContainer cc, String coreName, String isTransient, String loadOnStartup) { + return new CoreDescriptor( + coreName, + cc.getCoreRootDirectory().resolve(coreName), + cc, + CoreDescriptor.CORE_TRANSIENT, + isTransient, + CoreDescriptor.CORE_LOADONSTARTUP, + loadOnStartup); + } + private static final CoresLocator testCores = + new ReadOnlyCoresLocator() { + @Override + public List discover(CoreContainer cc) { + return ImmutableList.of( + makeCoreDescriptor(cc, "collection1", "false", "true"), + makeCoreDescriptor(cc, "collection2", "true", "true"), + makeCoreDescriptor(cc, "collection3", "on", "false"), + makeCoreDescriptor(cc, "collection4", "false", "false"), + makeCoreDescriptor(cc, "collection5", "false", "true"), + makeCoreDescriptor(cc, "collection6", "true", "false"), + makeCoreDescriptor(cc, "collection7", "true", "false"), + makeCoreDescriptor(cc, "collection8", "true", "false"), + makeCoreDescriptor(cc, "collection9", "true", "false")); + } + }; private CoreContainer init() throws Exception { solrHomeDirectory = createTempDir().toFile(); - + copyXmlToHome(solrHomeDirectory.getAbsoluteFile(), "solr.xml"); for (int idx = 1; idx < 10; ++idx) { copyMinConf(new File(solrHomeDirectory, "collection" + idx)); @@ -106,12 +110,15 @@ private CoreContainer initEmpty() throws IOException { solrHomeDirectory = createTempDir().toFile(); copyXmlToHome(solrHomeDirectory.getAbsoluteFile(), "solr.xml"); NodeConfig cfg = NodeConfig.loadNodeConfig(solrHomeDirectory.toPath(), null); - return createCoreContainer(cfg, new ReadOnlyCoresLocator() { - @Override - public List discover(CoreContainer cc) { - return Collections.emptyList(); - } - }); } + return createCoreContainer( + cfg, + new ReadOnlyCoresLocator() { + @Override + public List discover(CoreContainer cc) { + return Collections.emptyList(); + } + }); + } @Test public void testLazyLoad() throws Exception { @@ -120,8 +127,14 @@ public void testLazyLoad() throws Exception { // NOTE: This checks the initial state for loading, no need to do this elsewhere. checkLoadedCores(cc, "collection1", "collection2", "collection5"); - checkCoresNotLoaded(cc, "collection3", "collection4", "collection6", "collection7", - "collection8", "collection9"); + checkCoresNotLoaded( + cc, + "collection3", + "collection4", + "collection6", + "collection7", + "collection8", + "collection9"); SolrCore core1 = cc.getCore("collection1"); assertFalse("core1 should not be transient", core1.getCoreDescriptor().isTransient()); @@ -154,8 +167,8 @@ public void testLazyLoad() throws Exception { } } - // This is a little weak. I'm not sure how to test that lazy core2 is loaded automagically. The getCore - // will, of course, load it. + // This is a little weak. I'm not sure how to test that lazy core2 is loaded automagically. The + // getCore will, of course, load it. private void checkSearch(SolrCore core) throws IOException { addLazy(core, "id", "0"); @@ -171,26 +184,27 @@ private void checkSearch(SolrCore core) throws IOException { core.getUpdateHandler().commit(cmtCmd); // Just get a couple of searches to work! - assertQ("test prefix query", - makeReq(core, "q", "{!prefix f=v_t}hel", "wt", "xml") - , "//result[@numFound='2']" - ); + assertQ( + "test prefix query", + makeReq(core, "q", "{!prefix f=v_t}hel", "wt", "xml"), + "//result[@numFound='2']"); - assertQ("test raw query", - makeReq(core, "q", "{!raw f=v_t}hello", "wt", "xml") - , "//result[@numFound='2']" - ); + assertQ( + "test raw query", + makeReq(core, "q", "{!raw f=v_t}hello", "wt", "xml"), + "//result[@numFound='2']"); // no analysis is done, so these should match nothing - assertQ("test raw query", - makeReq(core, "q", "{!raw f=v_t}Hello", "wt", "xml") - , "//result[@numFound='0']" - ); - assertQ("test raw query", - makeReq(core, "q", "{!raw f=v_f}1.5", "wt", "xml") - , "//result[@numFound='0']" - ); + assertQ( + "test raw query", + makeReq(core, "q", "{!raw f=v_t}Hello", "wt", "xml"), + "//result[@numFound='0']"); + assertQ( + "test raw query", + makeReq(core, "q", "{!raw f=v_f}1.5", "wt", "xml"), + "//result[@numFound='0']"); } + @Test public void testLazySearch() throws Exception { CoreContainer cc = init(); @@ -201,13 +215,13 @@ public void testLazySearch() throws Exception { checkSearch(core4); - // Now just insure that the normal searching on "collection1" finds _0_ on the same query that found _2_ above. - // Use of makeReq above and req below is tricky, very tricky. + // Now just insure that the normal searching on "collection1" finds _0_ on the same query that + // found _2_ above. Use of makeReq above and req below is tricky, very tricky. SolrCore collection1 = cc.getCore("collection1"); - assertQ("test raw query", - makeReq(collection1, "q", "{!raw f=v_t}hello", "wt", "xml") - , "//result[@numFound='0']" - ); + assertQ( + "test raw query", + makeReq(collection1, "q", "{!raw f=v_t}hello", "wt", "xml"), + "//result[@numFound='0']"); checkLoadedCores(cc, "collection1", "collection2", "collection4", "collection5"); @@ -225,43 +239,89 @@ public void testCachingLimit() throws Exception { // First check that all the cores that should be loaded at startup actually are. checkLoadedCores(cc, "collection1", "collection2", "collection5"); - checkCoresNotLoaded(cc, "collection3", "collection4", "collection6", "collection7", "collection8", "collection9"); - - // By putting these in non-alpha order, we're also checking that we're not just seeing an artifact. + checkCoresNotLoaded( + cc, + "collection3", + "collection4", + "collection6", + "collection7", + "collection8", + "collection9"); + + // By putting these in non-alpha order, we're also checking that we're not just seeing an + // artifact. getCoreAndPutBack(cc, "collection1"); getCoreAndPutBack(cc, "collection3"); getCoreAndPutBack(cc, "collection4"); getCoreAndPutBack(cc, "collection2"); getCoreAndPutBack(cc, "collection5"); - checkLoadedCores(cc, "collection1", "collection2", "collection3", "collection4", "collection5"); + checkLoadedCores( + cc, "collection1", "collection2", "collection3", "collection4", "collection5"); checkCoresNotLoaded(cc, "collection6", "collection7", "collection8", "collection9"); // map should be full up, add one more and verify getCoreAndPutBack(cc, "collection6"); - checkLoadedCores(cc, "collection1", "collection2", "collection3", "collection4", "collection5", + checkLoadedCores( + cc, + "collection1", + "collection2", + "collection3", + "collection4", + "collection5", "collection6"); checkCoresNotLoaded(cc, "collection7", "collection8", "collection9"); getCoreAndPutBack(cc, "collection7"); - checkLoadedCores(cc, "collection1", "collection2", "collection3", "collection4", "collection5", - "collection6", "collection7"); + checkLoadedCores( + cc, + "collection1", + "collection2", + "collection3", + "collection4", + "collection5", + "collection6", + "collection7"); checkCoresNotLoaded(cc, "collection8", "collection9"); getCoreAndPutBack(cc, "collection8"); checkLoadedCores(cc, "collection1", "collection4", "collection5", "collection8"); - checkSomeLoadedCores(cc, TRANSIENT_CORE_CACHE_MAX_SIZE, "collection2", "collection3", "collection6", - "collection7", "collection8"); + checkSomeLoadedCores( + cc, + TRANSIENT_CORE_CACHE_MAX_SIZE, + "collection2", + "collection3", + "collection6", + "collection7", + "collection8"); checkCoresNotLoaded(cc, "collection9"); - checkSomeCoresNotLoaded(cc, 5 - TRANSIENT_CORE_CACHE_MAX_SIZE, "collection2", "collection3", - "collection6", "collection7"); + checkSomeCoresNotLoaded( + cc, + 5 - TRANSIENT_CORE_CACHE_MAX_SIZE, + "collection2", + "collection3", + "collection6", + "collection7"); getCoreAndPutBack(cc, "collection9"); checkLoadedCores(cc, "collection1", "collection4", "collection5", "collection9"); - checkSomeLoadedCores(cc, TRANSIENT_CORE_CACHE_MAX_SIZE, "collection2", "collection3", "collection6", - "collection7", "collection8", "collection9"); - checkSomeCoresNotLoaded(cc, 6 - TRANSIENT_CORE_CACHE_MAX_SIZE, "collection2", "collection3", - "collection6", "collection7", "collection8"); + checkSomeLoadedCores( + cc, + TRANSIENT_CORE_CACHE_MAX_SIZE, + "collection2", + "collection3", + "collection6", + "collection7", + "collection8", + "collection9"); + checkSomeCoresNotLoaded( + cc, + 6 - TRANSIENT_CORE_CACHE_MAX_SIZE, + "collection2", + "collection3", + "collection6", + "collection7", + "collection8"); // verify that getting metrics from an unloaded core doesn't cause exceptions (SOLR-12541) try (SolrCore core1 = cc.getCore("collection1"); @@ -298,15 +358,16 @@ public void testRace() throws Exception { Thread[] threads = new Thread[15]; for (int idx = 0; idx < threads.length; idx++) { - threads[idx] = new Thread() { - @Override - public void run() { - SolrCore core = cc.getCore("collection3"); - synchronized (theCores) { - theCores.add(core); - } - } - }; + threads[idx] = + new Thread() { + @Override + public void run() { + SolrCore core = cc.getCore("collection3"); + synchronized (theCores) { + theCores.add(core); + } + } + }; threads[idx].start(); } for (Thread thread : threads) { @@ -324,25 +385,37 @@ public void run() { } } - private void tryCreateFail(CoreAdminHandler admin, String name, String dataDir, String... errs) throws Exception { - SolrException thrown = expectThrows(SolrException.class, () -> { - SolrQueryResponse resp = new SolrQueryResponse(); - - SolrQueryRequest request = req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.DATA_DIR, dataDir, - CoreAdminParams.NAME, name, - "schema", "schema.xml", - "config", "solrconfig.xml"); - - admin.handleRequestBody(request, resp); - }); + private void tryCreateFail(CoreAdminHandler admin, String name, String dataDir, String... errs) + throws Exception { + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + SolrQueryResponse resp = new SolrQueryResponse(); + + SolrQueryRequest request = + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATE.toString(), + CoreAdminParams.DATA_DIR, + dataDir, + CoreAdminParams.NAME, + name, + "schema", + "schema.xml", + "config", + "solrconfig.xml"); + + admin.handleRequestBody(request, resp); + }); assertEquals("Exception code should be 500", 500, thrown.code()); for (String err : errs) { - assertTrue("Should have seen an exception containing the an error", + assertTrue( + "Should have seen an exception containing the an error", thrown.getMessage().contains(err)); } } + @Test public void testCreateSame() throws Exception { final CoreContainer cc = init(); @@ -359,7 +432,6 @@ public void testCreateSame() throws Exception { copyMinConf(new File(solrHomeDirectory, "t5")); copyMinConf(new File(solrHomeDirectory, "t6")); - // Should also fail with the same name tryCreateFail(admin, "collection2", "t12", "Core with name", "collection2", "already exists"); tryCreateFail(admin, "collection4", "t14", "Core with name", "collection4", "already exists"); @@ -376,35 +448,41 @@ public void testCreateSame() throws Exception { } } - private void createViaAdmin(CoreContainer cc, String name, boolean isTransient, - boolean loadOnStartup) throws Exception { + private void createViaAdmin( + CoreContainer cc, String name, boolean isTransient, boolean loadOnStartup) throws Exception { try (final CoreAdminHandler admin = new CoreAdminHandler(cc)) { SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, name, - CoreAdminParams.TRANSIENT, Boolean.toString(isTransient), - CoreAdminParams.LOAD_ON_STARTUP, Boolean.toString(loadOnStartup)), - resp); + CoreAdminParams.NAME, + name, + CoreAdminParams.TRANSIENT, + Boolean.toString(isTransient), + CoreAdminParams.LOAD_ON_STARTUP, + Boolean.toString(loadOnStartup)), + resp); } - } private void unloadViaAdmin(CoreContainer cc, String name) throws Exception { try (final CoreAdminHandler admin = new CoreAdminHandler(cc)) { SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.UNLOAD.toString(), - CoreAdminParams.CORE, name), - resp); + CoreAdminParams.CORE, + name), + resp); } } - - // Make sure that creating a transient core from the admin handler correctly respects the transient limits etc. + + // Make sure that creating a transient core from the admin handler correctly respects the + // transient limits etc. @Test public void testCreateTransientFromAdmin() throws Exception { final CoreContainer cc = initEmpty(); @@ -421,7 +499,7 @@ public void testCreateTransientFromAdmin() throws Exception { createViaAdmin(cc, "core4", true, false); createViaAdmin(cc, "core5", true, false); - final var coreNames = new String[]{"core1", "core2", "core3", "core4", "core5"}; + final var coreNames = new String[] {"core1", "core2", "core3", "core4", "core5"}; checkSomeCoresNotLoaded(cc, coreNames.length - TRANSIENT_CORE_CACHE_MAX_SIZE, coreNames); final SolrCore c1 = cc.getCore("core1"); @@ -435,26 +513,33 @@ public void testCreateTransientFromAdmin() throws Exception { checkSomeLoadedCores(cc, 5, coreNames); - // While we're at it, a test for SOLR-5366, unloading transient core that's been unloaded b/c it's - // transient generates a "too many closes" error + // While we're at it, a test for SOLR-5366, unloading transient core that's been unloaded b/c + // it's transient generates a "too many closes" error class TestThread extends Thread { - + @Override public void run() { - + final int sleep_millis = random().nextInt(1000); try { if (sleep_millis > 0) { if (VERBOSE) { - System.out.println("TestLazyCores.testCreateTransientFromAdmin Thread.run sleeping for "+sleep_millis+" ms"); + System.out.println( + "TestLazyCores.testCreateTransientFromAdmin Thread.run sleeping for " + + sleep_millis + + " ms"); } Thread.sleep(sleep_millis); } - } - catch (InterruptedException ie) { + } catch (InterruptedException ie) { if (VERBOSE) { - System.out.println("TestLazyCores.testCreateTransientFromAdmin Thread.run caught "+ie+" whilst sleeping for "+sleep_millis+" ms"); + System.out.println( + "TestLazyCores.testCreateTransientFromAdmin Thread.run caught " + + ie + + " whilst sleeping for " + + sleep_millis + + " ms"); } } @@ -464,13 +549,14 @@ public void run() { c4.close(); c5.close(); } - }; - + } + ; + // with SOLR-6279 UNLOAD will wait for the core's reference count to have reached zero // hence cN.close() need to proceed or run in parallel with unloadViaAdmin(...) final TestThread cThread = new TestThread(); cThread.start(); - + unloadViaAdmin(cc, "core1"); unloadViaAdmin(cc, "core2"); unloadViaAdmin(cc, "core3"); @@ -491,11 +577,12 @@ public void run() { // 4> that having no solr.xml entry for transient chache handler correctly uses the default. @Test public void testBadConfigsGenerateErrors() throws Exception { - final CoreContainer cc = initGoodAndBad(Arrays.asList("core1", "core2"), - Arrays.asList("badSchema1", "badSchema2"), - Arrays.asList("badConfig1", "badConfig2")); - - + final CoreContainer cc = + initGoodAndBad( + Arrays.asList("core1", "core2"), + Arrays.asList("badSchema1", "badSchema2"), + Arrays.asList("badConfig1", "badConfig2")); + try { // first, did the two good cores load successfully? checkLoadedCores(cc, "core1", "core2"); @@ -507,14 +594,15 @@ public void testBadConfigsGenerateErrors() throws Exception { SolrCore core1 = cc.getCore("core1"); checkSearch(core1); - // Did we get the expected message for each of the cores that failed to load? Make sure we don't run afoul of - // the dreaded slash/backslash difference on Windows and *nix machines. + // Did we get the expected message for each of the cores that failed to load? Make sure we + // don't run afoul of the dreaded slash/backslash difference on Windows and *nix machines. testMessage(cc.getCoreInitFailures(), makePath("badConfig1", "conf", "solrconfig.xml")); testMessage(cc.getCoreInitFailures(), makePath("badConfig2", "conf", "solrconfig.xml")); testMessage(cc.getCoreInitFailures(), makePath("badSchema1", "conf", "schema.xml")); testMessage(cc.getCoreInitFailures(), makePath("badSchema2", "conf", "schema.xml")); - // Status should report that there are failure messages for the bad cores and none for the good cores. + // Status should report that there are failure messages for the bad cores and none for the + // good cores. checkStatus(cc, true, "core1"); checkStatus(cc, true, "core2"); checkStatus(cc, false, "badSchema1"); @@ -522,13 +610,13 @@ public void testBadConfigsGenerateErrors() throws Exception { checkStatus(cc, false, "badConfig1"); checkStatus(cc, false, "badConfig2"); - // Copy good config and schema files in and see if you can then load them (they are transient after all) + // Copy good config and schema files in and see if you can then load them (they are transient + // after all) copyGoodConf("badConfig1", "solrconfig-minimal.xml", "solrconfig.xml"); copyGoodConf("badConfig2", "solrconfig-minimal.xml", "solrconfig.xml"); copyGoodConf("badSchema1", "schema-tiny.xml", "schema.xml"); copyGoodConf("badSchema2", "schema-tiny.xml", "schema.xml"); - // Reload the cores and insure that // 1> they pick up the new configs // 2> they don't fail again b/c they still have entries in loadFailure in core container. @@ -536,15 +624,18 @@ public void testBadConfigsGenerateErrors() throws Exception { cc.reload("badConfig2"); cc.reload("badSchema1"); cc.reload("badSchema2"); - SolrCore bc1 = cc.getCore("badConfig1");; + SolrCore bc1 = cc.getCore("badConfig1"); + ; SolrCore bc2 = cc.getCore("badConfig2"); SolrCore bs1 = cc.getCore("badSchema1"); SolrCore bs2 = cc.getCore("badSchema2"); // all the cores should be found in the list now. - checkLoadedCores(cc, "core1", "core2", "badSchema1", "badSchema2", "badConfig1", "badConfig2"); + checkLoadedCores( + cc, "core1", "core2", "badSchema1", "badSchema2", "badConfig1", "badConfig2"); - // Did we clear out the errors by putting good files in place? And the cores that never were bad should be OK too. + // Did we clear out the errors by putting good files in place? And the cores that never were + // bad should be OK too. checkStatus(cc, true, "core1"); checkStatus(cc, true, "core2"); checkStatus(cc, true, "badSchema1"); @@ -575,11 +666,16 @@ private void testMessage(Map failures, St messages.add(message); if (message.contains(lookFor)) return; } - fail("Should have found message containing these tokens " + lookFor + " in the failure messages: " + messages); + fail( + "Should have found message containing these tokens " + + lookFor + + " in the failure messages: " + + messages); } // Just localizes writing a configuration rather than repeating it for good and bad files. - private void writeCustomConfig(String coreName, String config, String schema, String rand_snip) throws IOException { + private void writeCustomConfig(String coreName, String config, String schema, String rand_snip) + throws IOException { File coreRoot = new File(solrHomeDirectory, coreName); File subHome = new File(coreRoot, "conf"); @@ -587,41 +683,52 @@ private void writeCustomConfig(String coreName, String config, String schema, St assertTrue("Failed to make subdirectory ", coreRoot.mkdirs()); } // Write the file for core discovery - FileUtils.writeStringToFile(new File(coreRoot, "core.properties"), "name=" + coreName + - System.getProperty("line.separator") + "transient=true" + - System.getProperty("line.separator") + "loadOnStartup=true", StandardCharsets.UTF_8); + FileUtils.writeStringToFile( + new File(coreRoot, "core.properties"), + "name=" + + coreName + + System.getProperty("line.separator") + + "transient=true" + + System.getProperty("line.separator") + + "loadOnStartup=true", + StandardCharsets.UTF_8); - FileUtils.writeStringToFile(new File(subHome, "solrconfig.snippet.randomindexconfig.xml"), rand_snip, StandardCharsets.UTF_8); + FileUtils.writeStringToFile( + new File(subHome, "solrconfig.snippet.randomindexconfig.xml"), + rand_snip, + StandardCharsets.UTF_8); - FileUtils.writeStringToFile(new File(subHome, "solrconfig.xml"), config, StandardCharsets.UTF_8); + FileUtils.writeStringToFile( + new File(subHome, "solrconfig.xml"), config, StandardCharsets.UTF_8); FileUtils.writeStringToFile(new File(subHome, "schema.xml"), schema, StandardCharsets.UTF_8); } - // Write out the cores' config files, both bad schema files, bad config files as well as some good cores. - private CoreContainer initGoodAndBad(List goodCores, - List badSchemaCores, - List badConfigCores) throws Exception { + // Write out the cores' config files, both bad schema files, bad config files as well as some good + // cores. + private CoreContainer initGoodAndBad( + List goodCores, List badSchemaCores, List badConfigCores) + throws Exception { solrHomeDirectory = createTempDir().toFile(); - + // Don't pollute the log with exception traces when they're expected. ignoreException(Pattern.quote("SAXParseException")); - + // Create the cores that should be fine. for (String coreName : goodCores) { File coreRoot = new File(solrHomeDirectory, coreName); copyMinConf(coreRoot, "name=" + coreName); - } // Collect the files that we'll write to the config directories. String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; - String min_schema = FileUtils.readFileToString(new File(top, "schema-tiny.xml"), - StandardCharsets.UTF_8); - String min_config = FileUtils.readFileToString(new File(top, "solrconfig-minimal.xml"), - StandardCharsets.UTF_8); - String rand_snip = FileUtils.readFileToString(new File(top, "solrconfig.snippet.randomindexconfig.xml"), - StandardCharsets.UTF_8); + String min_schema = + FileUtils.readFileToString(new File(top, "schema-tiny.xml"), StandardCharsets.UTF_8); + String min_config = + FileUtils.readFileToString(new File(top, "solrconfig-minimal.xml"), StandardCharsets.UTF_8); + String rand_snip = + FileUtils.readFileToString( + new File(top, "solrconfig.snippet.randomindexconfig.xml"), StandardCharsets.UTF_8); // Now purposely mess up the config files, introducing stupid syntax errors. String bad_config = min_config.replace(" goodCores, return createCoreContainer(config, new CorePropertiesLocator(config.getCoreRootDirectory())); } - // We want to see that the core "heals itself" if an un-corrupted file is written to the directory. + // We want to see that the core "heals itself" if an un-corrupted file is written to the + // directory. private void copyGoodConf(String coreName, String srcName, String dstName) throws IOException { File coreRoot = new File(solrHomeDirectory, coreName); File subHome = new File(coreRoot, "conf"); String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; FileUtils.copyFile(new File(top, srcName), new File(subHome, dstName)); - } // If ok==true, we shouldn't be seeing any failure cases. @@ -657,16 +764,17 @@ private void copyGoodConf(String coreName, String srcName, String dstName) throw private void checkStatus(CoreContainer cc, Boolean ok, String core) throws Exception { SolrQueryResponse resp = new SolrQueryResponse(); try (final CoreAdminHandler admin = new CoreAdminHandler(cc)) { - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.STATUS.toString(), - CoreAdminParams.CORE, core), - resp); + CoreAdminParams.CORE, + core), + resp); } @SuppressWarnings({"unchecked"}) - Map failures = - (Map) resp.getValues().get("initFailures"); + Map failures = (Map) resp.getValues().get("initFailures"); if (ok) { if (failures.size() != 0) { @@ -683,7 +791,8 @@ public static void checkCoresNotLoaded(CoreContainer cc, String... coreNames) { checkSomeCoresNotLoaded(cc, coreNames.length, coreNames); } - public static void checkSomeCoresNotLoaded(CoreContainer cc, int numNotLoaded, String... coreNames) { + public static void checkSomeCoresNotLoaded( + CoreContainer cc, int numNotLoaded, String... coreNames) { Collection loadedCoreNames = cc.getLoadedCoreNames(); List notLoadedCoreNames = new ArrayList<>(); for (String coreName : coreNames) { @@ -691,16 +800,25 @@ public static void checkSomeCoresNotLoaded(CoreContainer cc, int numNotLoaded, S notLoadedCoreNames.add(coreName); } } - assertEquals("Expected " + numNotLoaded + " not loaded cores but found " + notLoadedCoreNames.size() - + ", coreNames=" + Arrays.asList(coreNames) - + ", notLoadedCoreNames=" + notLoadedCoreNames - + ", loadedCoreNames=" + loadedCoreNames, - numNotLoaded, notLoadedCoreNames.size()); + assertEquals( + "Expected " + + numNotLoaded + + " not loaded cores but found " + + notLoadedCoreNames.size() + + ", coreNames=" + + Arrays.asList(coreNames) + + ", notLoadedCoreNames=" + + notLoadedCoreNames + + ", loadedCoreNames=" + + loadedCoreNames, + numNotLoaded, + notLoadedCoreNames.size()); // All transient cores are listed in allCoreNames. Collection allCoreNames = cc.getAllCoreNames(); for (String coreName : coreNames) { - assertTrue("Core " + coreName + " should have been found in the list of all known core names", + assertTrue( + "Core " + coreName + " should have been found in the list of all known core names", allCoreNames.contains(coreName)); } @@ -712,14 +830,20 @@ private static void checkCoreNamesAndDescriptors(CoreContainer cc) { List descriptors = cc.getCoreDescriptors(); // Every core that has not failed to load should be in coreDescriptors. - assertEquals("There should be as many coreDescriptors as coreNames", allNames.size(), descriptors.size()); + assertEquals( + "There should be as many coreDescriptors as coreNames", + allNames.size(), + descriptors.size()); for (CoreDescriptor desc : descriptors) { - assertTrue("Each coreName should have a corresponding coreDescriptor", allNames.contains(desc.getName())); + assertTrue( + "Each coreName should have a corresponding coreDescriptor", + allNames.contains(desc.getName())); } // All loaded cores are in allNames. for (String name : cc.getLoadedCoreNames()) { - assertTrue("Loaded core " + name + " should have been found in the list of all possible core names", + assertTrue( + "Loaded core " + name + " should have been found in the list of all possible core names", allNames.contains(name)); } } @@ -728,7 +852,10 @@ private static void checkFailedCores(CoreContainer cc, String... failedCoreNames // Failed cores should not be in allCoreNames. Collection allNames = cc.getAllCoreNames(); for (String name : failedCoreNames) { - assertFalse("Failed core " + name + " should not have been found in the list of all possible core names", + assertFalse( + "Failed core " + + name + + " should not have been found in the list of all possible core names", allNames.contains(name)); } } @@ -745,17 +872,25 @@ public static void checkSomeLoadedCores(CoreContainer cc, int numLoaded, String. loadedListedCoreNames.add(coreName); } } - assertEquals("Expected " + numLoaded + " loaded cores but found " + loadedListedCoreNames.size() - + ", coreNames=" + Arrays.asList(coreNames) - + ", loadedListedCoreNames=" + loadedListedCoreNames - + ", loadedCoreNames=" + loadedCoreNames, - numLoaded, loadedListedCoreNames.size()); + assertEquals( + "Expected " + + numLoaded + + " loaded cores but found " + + loadedListedCoreNames.size() + + ", coreNames=" + + Arrays.asList(coreNames) + + ", loadedListedCoreNames=" + + loadedListedCoreNames + + ", loadedCoreNames=" + + loadedCoreNames, + numLoaded, + loadedListedCoreNames.size()); } private void addLazy(SolrCore core, String... fieldValues) throws IOException { UpdateHandler updater = core.getUpdateHandler(); AddUpdateCommand cmd = new AddUpdateCommand(makeReq(core)); - cmd.solrDoc = sdoc((Object[])fieldValues); + cmd.solrDoc = sdoc((Object[]) fieldValues); updater.addDoc(cmd); } @@ -769,50 +904,58 @@ private static final String makePath(String... args) { @Test public void testMidUseUnload() throws Exception { - final int maximumSleepMillis = random().nextInt(9999) + 1; // sleep for up to 10 s Must add 1 because using - // this as a seed will rea few lines down will - // throw an exception if this is zero + // sleep for up to 10 s Must add 1 because using + final int maximumSleepMillis = random().nextInt(9999) + 1; + // this as a seed will rea few lines down will + // throw an exception if this is zero if (VERBOSE) { - System.out.println("TestLazyCores.testMidUseUnload maximumSleepMillis="+maximumSleepMillis); + System.out.println("TestLazyCores.testMidUseUnload maximumSleepMillis=" + maximumSleepMillis); } - + class TestThread extends Thread { - + SolrCore core_to_use = null; - + @Override public void run() { - + final int sleep_millis = random().nextInt(maximumSleepMillis); try { if (sleep_millis > 0) { if (VERBOSE) { - System.out.println("TestLazyCores.testMidUseUnload Thread.run sleeping for "+sleep_millis+" ms"); + System.out.println( + "TestLazyCores.testMidUseUnload Thread.run sleeping for " + sleep_millis + " ms"); } Thread.sleep(sleep_millis); } - } - catch (InterruptedException ie) { + } catch (InterruptedException ie) { if (VERBOSE) { - System.out.println("TestLazyCores.testMidUseUnload Thread.run caught "+ie+" whilst sleeping for "+sleep_millis+" ms"); + System.out.println( + "TestLazyCores.testMidUseUnload Thread.run caught " + + ie + + " whilst sleeping for " + + sleep_millis + + " ms"); } } - - assertFalse(core_to_use.isClosed()); // not closed since we are still using it and hold a reference - core_to_use.close(); // now give up our reference to the core + + // not closed since we are still using it and hold a reference + assertFalse(core_to_use.isClosed()); + // now give up our reference to the core + core_to_use.close(); } - }; + } CoreContainer cc = init(); - + try { TestThread thread = new TestThread(); - + thread.core_to_use = cc.getCore("collection1"); assertNotNull(thread.core_to_use); assertFalse(thread.core_to_use.isClosed()); // freshly-in-use core is not closed thread.start(); - + unloadViaAdmin(cc, "collection1"); assertTrue(thread.core_to_use.isClosed()); // after unload-ing the core is closed @@ -822,20 +965,15 @@ public void run() { } } - // Insure that when a core is evicted from the transient cache, any uncommitted docs are preserved. - // Note, this needs FS-based indexes to persist! - // Cores 2, 3, 6, 7, 8, 9 are transient + // Insure that when a core is evicted from the transient cache, any uncommitted docs are + // preserved. Note, this needs FS-based indexes to persist! Cores 2, 3, 6, 7, 8, 9 are transient @Test public void testNoCommit() throws Exception { CoreContainer cc = init(); - String[] transientCoreNames = new String[]{ - "collection2", - "collection3", - "collection6", - "collection7", - "collection8", - "collection9" - }; + String[] transientCoreNames = + new String[] { + "collection2", "collection3", "collection6", "collection7", "collection8", "collection9" + }; try { // First, go through all the transient cores and add some docs. DO NOT COMMIT! // The evicted core should commit the docs when it gets closed. @@ -846,9 +984,11 @@ public void testNoCommit() throws Exception { } // Just proving that some cores have been evicted to respect transient core cache max size. - checkSomeCoresNotLoaded(cc, transientCoreNames.length - TRANSIENT_CORE_CACHE_MAX_SIZE, transientCoreNames); + checkSomeCoresNotLoaded( + cc, transientCoreNames.length - TRANSIENT_CORE_CACHE_MAX_SIZE, transientCoreNames); - // We still should have 4 transient cores loaded, their reference counts have NOT dropped to zero + // We still should have 4 transient cores loaded, their reference counts have NOT dropped to + // zero checkLoadedCores(cc, "collection1", "collection5"); checkSomeLoadedCores(cc, TRANSIENT_CORE_CACHE_MAX_SIZE, transientCoreNames); @@ -883,15 +1023,14 @@ private void add10(SolrCore core) throws IOException { addLazy(core, "id", "0" + idx); } SolrQueryRequest req = makeReq(core); - } private void check10(SolrCore core) { // Just get a couple of searches to work! - assertQ("test closing core without committing", - makeReq(core, "q", "*:*") - , "//result[@numFound='10']" - ); + assertQ( + "test closing core without committing", + makeReq(core, "q", "*:*"), + "//result[@numFound='10']"); } public void testDontEvictUsedCore() throws Exception { @@ -900,17 +1039,14 @@ public void testDontEvictUsedCore() throws Exception { // unload it (yet). CoreContainer cc = init(); - String[] transientCoreNames = new String[]{ - "collection2", - "collection3", - "collection6", - "collection7", - "collection8", - "collection9" - }; - - try (LogListener logs = LogListener.info(TransientSolrCoreCacheDefault.class.getName()) - .substring("NOT evicting transient core [" + transientCoreNames[0] + "]")) { + String[] transientCoreNames = + new String[] { + "collection2", "collection3", "collection6", "collection7", "collection8", "collection9" + }; + + try (LogListener logs = + LogListener.info(TransientSolrCoreCacheDefault.class.getName()) + .substring("NOT evicting transient core [" + transientCoreNames[0] + "]")) { cc.waitForLoadingCoresToFinish(1000); var solr = new EmbeddedSolrServer(cc, null); final var longReqTimeMs = 5000; // plenty of time for a slow/busy CI diff --git a/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java b/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java index bf004af7c74..9c38f56fbc4 100644 --- a/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java @@ -17,7 +17,6 @@ package org.apache.solr.core; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; @@ -39,26 +38,28 @@ import org.apache.solr.util.RefCounted; import org.junit.After; -/** @see SolrIndexConfigTest */ +/** + * @see SolrIndexConfigTest + */ public class TestMergePolicyConfig extends SolrTestCaseJ4 { - + private static AtomicInteger docIdCounter = new AtomicInteger(42); @After public void after() throws Exception { deleteCore(); } - + public void testSetNoCFSMergePolicyConfig() throws Exception { final boolean useCompoundFile = random().nextBoolean(); - System.setProperty("testSetNoCFSMergePolicyConfig.useCompoundFile", String.valueOf(useCompoundFile)); + System.setProperty( + "testSetNoCFSMergePolicyConfig.useCompoundFile", String.valueOf(useCompoundFile)); try { - initCore("solrconfig-mergepolicyfactory-nocfs.xml","schema-minimal.xml"); + initCore("solrconfig-mergepolicyfactory-nocfs.xml", "schema-minimal.xml"); IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); assertEquals(useCompoundFile, iwc.getUseCompoundFile()); - TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, - iwc.getMergePolicy()); + TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, iwc.getMergePolicy()); assertEquals(0.5D, tieredMP.getNoCFSRatio(), 0.0D); } finally { System.getProperties().remove("testSetNoCFSMergePolicyConfig.useCompoundFile"); @@ -66,12 +67,11 @@ public void testSetNoCFSMergePolicyConfig() throws Exception { } public void testDefaultMergePolicyConfig() throws Exception { - initCore("solrconfig-mergepolicy-defaults.xml","schema-minimal.xml"); + initCore("solrconfig-mergepolicy-defaults.xml", "schema-minimal.xml"); IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); assertEquals(false, iwc.getUseCompoundFile()); - TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, - iwc.getMergePolicy()); + TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, iwc.getMergePolicy()); assertEquals(TieredMergePolicy.DEFAULT_NO_CFS_RATIO, tieredMP.getNoCFSRatio(), 0.0D); assertCommitSomeNewDocs(); @@ -81,7 +81,7 @@ public void testDefaultMergePolicyConfig() throws Exception { public void testLegacyMergePolicyConfig() throws Exception { final boolean expectCFS = Boolean.parseBoolean(System.getProperty("useCompoundFile")); - initCore("solrconfig-mergepolicy-legacy.xml","schema-minimal.xml"); + initCore("solrconfig-mergepolicy-legacy.xml", "schema-minimal.xml"); IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); assertEquals(expectCFS, iwc.getUseCompoundFile()); @@ -93,30 +93,27 @@ public void testLegacyMergePolicyConfig() throws Exception { assertCommitSomeNewDocs(); assertCompoundSegments(h.getCore(), expectCFS); } - + public void testTieredMergePolicyConfig() throws Exception { - final boolean expectCFS - = Boolean.parseBoolean(System.getProperty("useCompoundFile")); + final boolean expectCFS = Boolean.parseBoolean(System.getProperty("useCompoundFile")); - initCore("solrconfig-tieredmergepolicyfactory.xml","schema-minimal.xml"); + initCore("solrconfig-tieredmergepolicyfactory.xml", "schema-minimal.xml"); IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); assertEquals(expectCFS, iwc.getUseCompoundFile()); - - TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, - iwc.getMergePolicy()); + TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, iwc.getMergePolicy()); // set by legacy setting assertEquals(7, tieredMP.getMaxMergeAtOnce()); - + // mp-specific setters assertEquals(0.1D, tieredMP.getNoCFSRatio(), 0.0D); - // make sure we overrode segmentsPerTier + // make sure we overrode segmentsPerTier // (split from maxMergeAtOnce out of mergeFactor) assertEquals(9D, tieredMP.getSegmentsPerTier(), 0.001); - + assertCommitSomeNewDocs(); - // even though we have a single segment (which is 100% of the size of + // even though we have a single segment (which is 100% of the size of // the index which is higher then our 0.6D threshold) the // compound ratio doesn't matter because the segment was never merged assertCompoundSegments(h.getCore(), expectCFS); @@ -132,10 +129,9 @@ public void testTieredMergePolicyConfig() throws Exception { } public void testNoMergePolicyFactoryConfig() throws Exception { - initCore("solrconfig-nomergepolicyfactory.xml","schema-minimal.xml"); + initCore("solrconfig-nomergepolicyfactory.xml", "schema-minimal.xml"); IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); - NoMergePolicy mergePolicy = assertAndCast(NoMergePolicy.class, - iwc.getMergePolicy()); + NoMergePolicy mergePolicy = assertAndCast(NoMergePolicy.class, iwc.getMergePolicy()); assertCommitSomeNewDocs(); @@ -145,7 +141,7 @@ public void testNoMergePolicyFactoryConfig() throws Exception { assertU(optimize()); assertNumSegments(h.getCore(), 2); deleteCore(); - initCore("solrconfig-nomergepolicyfactory.xml","schema-minimal.xml"); + initCore("solrconfig-nomergepolicyfactory.xml", "schema-minimal.xml"); iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); assertEquals(mergePolicy, iwc.getMergePolicy()); @@ -153,39 +149,37 @@ public void testNoMergePolicyFactoryConfig() throws Exception { SolrQueryRequest req = req(); CommitUpdateCommand cmtCmd = new CommitUpdateCommand(req, true); cmtCmd.maxOptimizeSegments = -1; - expectThrows(IllegalArgumentException.class, () -> { - updater.commit(cmtCmd); - }); - + expectThrows( + IllegalArgumentException.class, + () -> { + updater.commit(cmtCmd); + }); } public void testLogMergePolicyFactoryConfig() throws Exception { final boolean byteSizeMP = random().nextBoolean(); - final Class mpClass = byteSizeMP - ? LogByteSizeMergePolicy.class : LogDocMergePolicy.class; - final Class mpfClass = byteSizeMP - ? LogByteSizeMergePolicyFactory.class : LogDocMergePolicyFactory.class; + final Class mpClass = + byteSizeMP ? LogByteSizeMergePolicy.class : LogDocMergePolicy.class; + final Class mpfClass = + byteSizeMP ? LogByteSizeMergePolicyFactory.class : LogDocMergePolicyFactory.class; System.setProperty("solr.test.log.merge.policy.factory", mpfClass.getName()); implTestLogMergePolicyConfig("solrconfig-logmergepolicyfactory.xml", mpClass); } - private void implTestLogMergePolicyConfig(String solrConfigFileName, - Class mpClass) throws Exception { + private void implTestLogMergePolicyConfig( + String solrConfigFileName, Class mpClass) throws Exception { initCore(solrConfigFileName, "schema-minimal.xml"); IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); // verify some props set to -1 get lucene internal defaults assertEquals(-1, solrConfig.indexConfig.maxBufferedDocs); - assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, - iwc.getMaxBufferedDocs()); + assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, iwc.getMaxBufferedDocs()); assertEquals(-1, solrConfig.indexConfig.ramBufferSizeMB, 0.0D); - assertEquals(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, - iwc.getRAMBufferSizeMB(), 0.0D); - + assertEquals(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, iwc.getRAMBufferSizeMB(), 0.0D); LogMergePolicy logMP = assertAndCast(mpClass, iwc.getMergePolicy()); @@ -194,9 +188,8 @@ private void implTestLogMergePolicyConfig(String solrConfigFileName, } /** - * Given a Type and an object asserts that the object is non-null and an - * instance of the specified Type. The object is then cast to that type and - * returned. + * Given a Type and an object asserts that the object is non-null and an instance of the specified + * Type. The object is then cast to that type and returned. */ public static T assertAndCast(Class clazz, Object o) { assertNotNull(clazz); @@ -208,20 +201,22 @@ public static T assertAndCast(Class clazz, Object o) { public static void assertCommitSomeNewDocs() { for (int i = 0; i < 5; i++) { int val = docIdCounter.getAndIncrement(); - assertU(adoc("id", "" + val, - "a_s", val + "_" + val + "_" + val + "_" + val, - "b_s", val + "_" + val + "_" + val + "_" + val, - "c_s", val + "_" + val + "_" + val + "_" + val, - "d_s", val + "_" + val + "_" + val + "_" + val, - "e_s", val + "_" + val + "_" + val + "_" + val, - "f_s", val + "_" + val + "_" + val + "_" + val)); + assertU( + adoc( + "id", "" + val, + "a_s", val + "_" + val + "_" + val + "_" + val, + "b_s", val + "_" + val + "_" + val + "_" + val, + "c_s", val + "_" + val + "_" + val + "_" + val, + "d_s", val + "_" + val + "_" + val + "_" + val, + "e_s", val + "_" + val + "_" + val + "_" + val, + "f_s", val + "_" + val + "_" + val + "_" + val)); } assertU(commit()); } /** - * Given an SolrCore, asserts that the number of leave segments in - * the index reader matches the expected value. + * Given an SolrCore, asserts that the number of leave segments in the index reader matches the + * expected value. */ public static void assertNumSegments(SolrCore core, int expected) { RefCounted searcherRef = core.getRegisteredSearcher(); @@ -233,8 +228,8 @@ public static void assertNumSegments(SolrCore core, int expected) { } /** - * Given an SolrCore, asserts that each segment in the (searchable) index - * has a compound file status that matches the expected input. + * Given an SolrCore, asserts that each segment in the (searchable) index has a compound file + * status that matches the expected input. */ public static void assertCompoundSegments(SolrCore core, boolean compound) { RefCounted searcherRef = core.getRegisteredSearcher(); @@ -246,24 +241,24 @@ public static void assertCompoundSegments(SolrCore core, boolean compound) { } /** - * Given an IndexReader, asserts that there is at least one AtomcReader leaf, - * and that all LeafReader leaves are SegmentReader's that have a compound - * file status that matches the expected input. + * Given an IndexReader, asserts that there is at least one AtomcReader leaf, and that all + * LeafReader leaves are SegmentReader's that have a compound file status that matches the + * expected input. */ - private static void assertCompoundSegments(IndexReader reader, - boolean compound) { + private static void assertCompoundSegments(IndexReader reader, boolean compound) { assertNotNull("Null leaves", reader.leaves()); assertTrue("no leaves", 0 < reader.leaves().size()); for (LeafReaderContext atomic : reader.leaves()) { - assertTrue("not a segment reader: " + atomic.reader().toString(), - atomic.reader() instanceof SegmentReader); - - assertEquals("Compound status incorrect for: " + - atomic.reader().toString(), - compound, - ((SegmentReader)atomic.reader()).getSegmentInfo().info.getUseCompoundFile()); + assertTrue( + "not a segment reader: " + atomic.reader().toString(), + atomic.reader() instanceof SegmentReader); + + assertEquals( + "Compound status incorrect for: " + atomic.reader().toString(), + compound, + ((SegmentReader) atomic.reader()).getSegmentInfo().info.getUseCompoundFile()); } } } diff --git a/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java b/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java index 4f169fa8b20..1efa74d2d03 100644 --- a/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.io.InputStream; - import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; import org.junit.Test; @@ -27,15 +26,16 @@ public class TestMinimalConfig extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-minimal.xml","schema-minimal.xml"); + initCore("solrconfig-minimal.xml", "schema-minimal.xml"); } - // Make sure the content of the lib/ core subfolder is loaded even if there is no node in the solrconfig + // Make sure the content of the lib/ core subfolder is loaded even if there is no node in + // the solrconfig @Test public void testLib() throws IOException { SolrResourceLoader loader = h.getCore().getResourceLoader(); InputStream data = null; - String[] expectedFiles = new String[] { "empty-file-main-lib.txt"}; + String[] expectedFiles = new String[] {"empty-file-main-lib.txt"}; for (String f : expectedFiles) { data = loader.openResource(f); assertNotNull("Should have found file " + f, data); diff --git a/solr/core/src/test/org/apache/solr/core/TestNRTOpen.java b/solr/core/src/test/org/apache/solr/core/TestNRTOpen.java index 2b28110418d..9967b4ee84f 100644 --- a/solr/core/src/test/org/apache/solr/core/TestNRTOpen.java +++ b/solr/core/src/test/org/apache/solr/core/TestNRTOpen.java @@ -20,7 +20,6 @@ import java.util.Collections; import java.util.IdentityHashMap; import java.util.Set; - import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.solr.SolrTestCaseJ4; @@ -115,24 +114,28 @@ public void testSharedCores() { } static void assertNRT(int maxDoc) throws IOException { - h.getCore().withSearcher(searcher -> { - DirectoryReader ir = searcher.getRawReader(); - assertEquals(maxDoc, ir.maxDoc()); - assertTrue("expected NRT reader, got: " + ir, ir.toString().contains(":nrt")); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + DirectoryReader ir = searcher.getRawReader(); + assertEquals(maxDoc, ir.maxDoc()); + assertTrue("expected NRT reader, got: " + ir, ir.toString().contains(":nrt")); + return null; + }); } private Set getCoreCacheKeys() { try { - return h.getCore().withSearcher(searcher -> { - Set set = Collections.newSetFromMap(new IdentityHashMap<>()); - DirectoryReader ir = searcher.getRawReader(); - for (LeafReaderContext context : ir.leaves()) { - set.add(context.reader().getCoreCacheHelper().getKey()); - } - return set; - }); + return h.getCore() + .withSearcher( + searcher -> { + Set set = Collections.newSetFromMap(new IdentityHashMap<>()); + DirectoryReader ir = searcher.getRawReader(); + for (LeafReaderContext context : ir.leaves()) { + set.add(context.reader().getCoreCacheHelper().getKey()); + } + return set; + }); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/solr/core/src/test/org/apache/solr/core/TestQuerySenderListener.java b/solr/core/src/test/org/apache/solr/core/TestQuerySenderListener.java index ad564c20f97..cade626afaa 100644 --- a/solr/core/src/test/org/apache/solr/core/TestQuerySenderListener.java +++ b/solr/core/src/test/org/apache/solr/core/TestQuerySenderListener.java @@ -36,16 +36,16 @@ public static void beforeClass() throws Exception { // in the same VM preInitMockListenerCount = MockEventListener.getCreateCount(); - initCore("solrconfig-querysender.xml","schema.xml"); - + initCore("solrconfig-querysender.xml", "schema.xml"); } public void testListenerCreationCounts() { h.getCore(); - assertEquals("Unexpected number of listeners created", - EXPECTED_MOCK_LISTENER_INSTANCES, - MockEventListener.getCreateCount() - preInitMockListenerCount); + assertEquals( + "Unexpected number of listeners created", + EXPECTED_MOCK_LISTENER_INSTANCES, + MockEventListener.getCreateCount() - preInitMockListenerCount); } @Test @@ -53,43 +53,51 @@ public void testRequestHandlerRegistry() { // property values defined in build.xml SolrCore core = h.getCore(); - assertEquals( 2, core.firstSearcherListeners.size() ); - assertEquals( 2, core.newSearcherListeners.size() ); + assertEquals(2, core.firstSearcherListeners.size()); + assertEquals(2, core.newSearcherListeners.size()); } @Test public void testSearcherEvents() throws Exception { SolrCore core = h.getCore(); SolrEventListener newSearcherListener = core.newSearcherListeners.get(0); - assertTrue("Not an instance of QuerySenderListener", newSearcherListener instanceof QuerySenderListener); + assertTrue( + "Not an instance of QuerySenderListener", + newSearcherListener instanceof QuerySenderListener); QuerySenderListener qsl = (QuerySenderListener) newSearcherListener; - h.getCore().withSearcher(currentSearcher -> { - qsl.newSearcher(currentSearcher, null);//test new Searcher - - MockQuerySenderListenerReqHandler mock = (MockQuerySenderListenerReqHandler) core.getRequestHandler("/mock"); - assertNotNull("Mock is null", mock); - - { - String evt = mock.req.getParams().get(EventParams.EVENT); - assertNotNull("Event is null", evt); - assertTrue(evt + " is not equal to " + EventParams.FIRST_SEARCHER, evt.equals(EventParams.FIRST_SEARCHER) == true); - - assertU(adoc("id", "1")); - assertU(commit()); - } - - h.getCore().withSearcher(newSearcher -> { - String evt = mock.req.getParams().get(EventParams.EVENT); - assertNotNull("Event is null", evt); - assertTrue(evt + " is not equal to " + EventParams.NEW_SEARCHER, evt.equals(EventParams.NEW_SEARCHER) == true); - return null; - }); - - return null; - }); - + h.getCore() + .withSearcher( + currentSearcher -> { + qsl.newSearcher(currentSearcher, null); // test new Searcher + + MockQuerySenderListenerReqHandler mock = + (MockQuerySenderListenerReqHandler) core.getRequestHandler("/mock"); + assertNotNull("Mock is null", mock); + + { + String evt = mock.req.getParams().get(EventParams.EVENT); + assertNotNull("Event is null", evt); + assertTrue( + evt + " is not equal to " + EventParams.FIRST_SEARCHER, + evt.equals(EventParams.FIRST_SEARCHER) == true); + + assertU(adoc("id", "1")); + assertU(commit()); + } + + h.getCore() + .withSearcher( + newSearcher -> { + String evt = mock.req.getParams().get(EventParams.EVENT); + assertNotNull("Event is null", evt); + assertTrue( + evt + " is not equal to " + EventParams.NEW_SEARCHER, + evt.equals(EventParams.NEW_SEARCHER) == true); + return null; + }); + + return null; + }); } - } - diff --git a/solr/core/src/test/org/apache/solr/core/TestQuerySenderNoQuery.java b/solr/core/src/test/org/apache/solr/core/TestQuerySenderNoQuery.java index a44fe3ce300..9609fb315af 100644 --- a/solr/core/src/test/org/apache/solr/core/TestQuerySenderNoQuery.java +++ b/solr/core/src/test/org/apache/solr/core/TestQuerySenderNoQuery.java @@ -36,15 +36,16 @@ public static void beforeClass() throws Exception { // in the same VM preInitMockListenerCount = MockEventListener.getCreateCount(); - initCore("solrconfig-querysender-noquery.xml","schema.xml"); + initCore("solrconfig-querysender-noquery.xml", "schema.xml"); } public void testListenerCreationCounts() { SolrCore core = h.getCore(); - assertEquals("Unexpected number of listeners created", - EXPECTED_MOCK_LISTENER_INSTANCES, - MockEventListener.getCreateCount() - preInitMockListenerCount); + assertEquals( + "Unexpected number of listeners created", + EXPECTED_MOCK_LISTENER_INSTANCES, + MockEventListener.getCreateCount() - preInitMockListenerCount); } @Test @@ -52,8 +53,8 @@ public void testRequestHandlerRegistry() { // property values defined in build.xml SolrCore core = h.getCore(); - assertEquals( 2, core.firstSearcherListeners.size() ); - assertEquals( 2, core.newSearcherListeners.size() ); + assertEquals(2, core.firstSearcherListeners.size()); + assertEquals(2, core.newSearcherListeners.size()); } // Determine that when the query lists are commented out of both new and @@ -62,23 +63,35 @@ public void testRequestHandlerRegistry() { public void testSearcherEvents() throws Exception { SolrCore core = h.getCore(); SolrEventListener newSearcherListener = core.newSearcherListeners.get(0); - assertTrue("Not an instance of QuerySenderListener", newSearcherListener instanceof QuerySenderListener); + assertTrue( + "Not an instance of QuerySenderListener", + newSearcherListener instanceof QuerySenderListener); QuerySenderListener qsl = (QuerySenderListener) newSearcherListener; - h.getCore().withSearcher(currentSearcher -> { - SolrIndexSearcher dummy = null; - qsl.newSearcher(currentSearcher, dummy);//test first Searcher (since param is null) - MockQuerySenderListenerReqHandler mock = (MockQuerySenderListenerReqHandler) core.getRequestHandler("/mock"); - assertNotNull("Mock is null", mock); - assertNull("Req (firstsearcher) is not null", mock.req); + h.getCore() + .withSearcher( + currentSearcher -> { + SolrIndexSearcher dummy = null; + qsl.newSearcher(currentSearcher, dummy); // test first Searcher (since param is null) + MockQuerySenderListenerReqHandler mock = + (MockQuerySenderListenerReqHandler) core.getRequestHandler("/mock"); + assertNotNull("Mock is null", mock); + assertNull("Req (firstsearcher) is not null", mock.req); - SolrIndexSearcher newSearcher = new SolrIndexSearcher(core, core.getNewIndexDir(), core.getLatestSchema(), core.getSolrConfig().indexConfig, "testQuerySenderNoQuery", false, core.getDirectoryFactory()); + SolrIndexSearcher newSearcher = + new SolrIndexSearcher( + core, + core.getNewIndexDir(), + core.getLatestSchema(), + core.getSolrConfig().indexConfig, + "testQuerySenderNoQuery", + false, + core.getDirectoryFactory()); - qsl.newSearcher(newSearcher, currentSearcher); // get newSearcher. - assertNull("Req (newsearcher) is not null", mock.req); - newSearcher.close(); - return null; - }); + qsl.newSearcher(newSearcher, currentSearcher); // get newSearcher. + assertNull("Req (newsearcher) is not null", mock.req); + newSearcher.close(); + return null; + }); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestReloadAndDeleteDocs.java b/solr/core/src/test/org/apache/solr/core/TestReloadAndDeleteDocs.java index 5d74c9e4e7c..eea1f9b63f9 100644 --- a/solr/core/src/test/org/apache/solr/core/TestReloadAndDeleteDocs.java +++ b/solr/core/src/test/org/apache/solr/core/TestReloadAndDeleteDocs.java @@ -21,7 +21,7 @@ /** Inspired by SOLR-4858 */ public class TestReloadAndDeleteDocs extends SolrTestCaseJ4 { - + @After public void after() throws Exception { System.clearProperty("enable.update.log"); @@ -39,9 +39,10 @@ public void testReloadAndDeleteDocsWithUpdateLog() throws Exception { private void doTest(final boolean useUpdateLog) throws Exception { System.setProperty("enable.update.log", useUpdateLog ? "true" : "false"); initCore("solrconfig.xml", "schema.xml", TEST_HOME()); - assertEquals("UpdateLog existence doesn't match sys prop (test config changed?)", - useUpdateLog, - null != h.getCore().getUpdateHandler().getUpdateLog()); + assertEquals( + "UpdateLog existence doesn't match sys prop (test config changed?)", + useUpdateLog, + null != h.getCore().getUpdateHandler().getUpdateLog()); h.reload(); assertU("*:*"); } diff --git a/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java index cb18e6a6219..b920c6a0bfb 100644 --- a/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java @@ -17,14 +17,11 @@ package org.apache.solr.core; import java.nio.file.Path; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.NamedList; import org.apache.solr.handler.component.ShardHandlerFactory; -/** - * Tests specifying a custom ShardHandlerFactory - */ +/** Tests specifying a custom ShardHandlerFactory */ public class TestShardHandlerFactory extends SolrTestCaseJ4 { public void testXML() throws Exception { @@ -32,10 +29,9 @@ public void testXML() throws Exception { CoreContainer cc = CoreContainer.createAndLoad(home, home.resolve("solr-shardhandler.xml")); ShardHandlerFactory factory = cc.getShardHandlerFactory(); assertTrue(factory instanceof MockShardHandlerFactory); - NamedList args = ((MockShardHandlerFactory)factory).args; + NamedList args = ((MockShardHandlerFactory) factory).args; assertEquals("myMagicRequiredValue", args.get("myMagicRequiredParameter")); factory.close(); cc.shutdown(); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestSimpleTextCodec.java b/solr/core/src/test/org/apache/solr/core/TestSimpleTextCodec.java index abc7723e382..ba8fec62196 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSimpleTextCodec.java +++ b/solr/core/src/test/org/apache/solr/core/TestSimpleTextCodec.java @@ -33,31 +33,36 @@ public static void beforeClass() throws Exception { public void test() throws Exception { SolrConfig config = h.getCore().getSolrConfig(); - String codecFactory = config.get("codecFactory").attr("class"); - assertEquals("Unexpected solrconfig codec factory", "solr.SimpleTextCodecFactory", codecFactory); + String codecFactory = config.get("codecFactory").attr("class"); + assertEquals( + "Unexpected solrconfig codec factory", "solr.SimpleTextCodecFactory", codecFactory); assertEquals("Unexpected core codec", "SimpleText", h.getCore().getCodec().getName()); RefCounted writerRef = h.getCore().getSolrCoreState().getIndexWriter(h.getCore()); try { IndexWriter writer = writerRef.get(); - assertEquals("Unexpected codec in IndexWriter config", - "SimpleText", writer.getConfig().getCodec().getName()); + assertEquals( + "Unexpected codec in IndexWriter config", + "SimpleText", + writer.getConfig().getCodec().getName()); } finally { writerRef.decref(); } - assertU(add(doc("id","1", "text","textual content goes here"))); + assertU(add(doc("id", "1", "text", "textual content goes here"))); assertU(commit()); - h.getCore().withSearcher(searcher -> { - SegmentInfos infos = SegmentInfos.readLatestCommit(searcher.getIndexReader().directory()); - SegmentInfo info = infos.info(infos.size() - 1).info; - assertEquals("Unexpected segment codec", "SimpleText", info.getCodec().getName()); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + SegmentInfos infos = + SegmentInfos.readLatestCommit(searcher.getIndexReader().directory()); + SegmentInfo info = infos.info(infos.size() - 1).info; + assertEquals("Unexpected segment codec", "SimpleText", info.getCodec().getName()); + return null; + }); - assertQ(req("q", "id:1"), - "*[count(//doc)=1]"); + assertQ(req("q", "id:1"), "*[count(//doc)=1]"); } } diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java index b7e833e7894..65e5f33b3e0 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java @@ -16,6 +16,10 @@ */ package org.apache.solr.core; +import static java.util.Arrays.asList; +import static org.apache.solr.common.util.Utils.getObjectByPath; + +import com.google.common.collect.ImmutableList; import java.io.*; import java.lang.invoke.MethodHandles; import java.nio.ByteBuffer; @@ -28,8 +32,6 @@ import java.util.concurrent.TimeUnit; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; - -import com.google.common.collect.ImmutableList; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -56,9 +58,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; -import static org.apache.solr.common.util.Utils.getObjectByPath; - public class TestSolrConfigHandler extends RestTestBase { private static final int TIMEOUT_S = 10; @@ -70,53 +69,53 @@ public class TestSolrConfigHandler extends RestTestBase { private static final String collection = "collection1"; private static final String confDir = collection + "/conf"; - public static ByteBuffer getFileContent(String f) throws IOException { - return getFileContent(f, true); - } + public static ByteBuffer getFileContent(String f) throws IOException { + return getFileContent(f, true); + } - /** - * @param loadFromClassPath if true, it will look in the classpath to find the file, - * otherwise load from absolute filesystem path. - */ - public static ByteBuffer getFileContent(String f, boolean loadFromClassPath) throws IOException { - ByteBuffer jar; - File file = loadFromClassPath ? getFile(f): new File(f); - try (FileInputStream fis = new FileInputStream(file)) { - byte[] buf = new byte[fis.available()]; - // TODO: This should check that we read the entire stream - fis.read(buf); - jar = ByteBuffer.wrap(buf); - } - return jar; + /** + * @param loadFromClassPath if true, it will look in the classpath to find the file, otherwise + * load from absolute filesystem path. + */ + public static ByteBuffer getFileContent(String f, boolean loadFromClassPath) throws IOException { + ByteBuffer jar; + File file = loadFromClassPath ? getFile(f) : new File(f); + try (FileInputStream fis = new FileInputStream(file)) { + byte[] buf = new byte[fis.available()]; + // TODO: This should check that we read the entire stream + fis.read(buf); + jar = ByteBuffer.wrap(buf); } + return jar; + } - public static ByteBuffer persistZip(String loc, Class... classes) throws IOException { - ByteBuffer jar = generateZip(classes); - try (FileOutputStream fos = new FileOutputStream(loc)){ - fos.write(jar.array(), jar.arrayOffset(), jar.limit()); - fos.flush(); - } - return jar; + public static ByteBuffer persistZip(String loc, Class... classes) throws IOException { + ByteBuffer jar = generateZip(classes); + try (FileOutputStream fos = new FileOutputStream(loc)) { + fos.write(jar.array(), jar.arrayOffset(), jar.limit()); + fos.flush(); } + return jar; + } - public static ByteBuffer generateZip(Class... classes) throws IOException { - SimplePostTool.BAOS bos = new SimplePostTool.BAOS(); - try (ZipOutputStream zipOut = new ZipOutputStream(bos)) { - zipOut.setLevel(ZipOutputStream.DEFLATED); - for (Class c : classes) { - String path = c.getName().replace('.', '/').concat(".class"); - ZipEntry entry = new ZipEntry(path); - ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path)); - zipOut.putNextEntry(entry); - zipOut.write(b.array(), b.arrayOffset(), b.limit()); - zipOut.closeEntry(); - } + public static ByteBuffer generateZip(Class... classes) throws IOException { + SimplePostTool.BAOS bos = new SimplePostTool.BAOS(); + try (ZipOutputStream zipOut = new ZipOutputStream(bos)) { + zipOut.setLevel(ZipOutputStream.DEFLATED); + for (Class c : classes) { + String path = c.getName().replace('.', '/').concat(".class"); + ZipEntry entry = new ZipEntry(path); + ByteBuffer b = + SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path)); + zipOut.putNextEntry(entry); + zipOut.write(b.array(), b.arrayOffset(), b.limit()); + zipOut.closeEntry(); } - return bos.getByteBuffer(); } + return bos.getByteBuffer(); + } - - @Before + @Before public void before() throws Exception { tmpSolrHome = createTempDir().toFile(); tmpConfDir = new File(tmpSolrHome, confDir); @@ -127,11 +126,17 @@ public void before() throws Exception { System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, extraServlets); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-managed-schema.xml", + "schema-rest.xml", + "/solr", + true, + extraServlets); if (random().nextBoolean()) { log.info("These tests are run with V2 API"); - restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME); + restTestHarness.setServerProvider( + () -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME); } } @@ -148,7 +153,6 @@ public void after() throws Exception { restTestHarness = null; } - public void testProperty() throws Exception { RestTestHarness harness = restTestHarness; MapWriter confMap = getRespMap("/config", harness); @@ -159,61 +163,62 @@ public void testProperty() throws Exception { assertNotNull(confMap._get(asList("config", "requestHandler", "/admin/file"), null)); assertNotNull(confMap._get(asList("config", "requestHandler", "/admin/ping"), null)); - String payload = "{\n" + - " 'set-property' : { 'updateHandler.autoCommit.maxDocs':100, 'updateHandler.autoCommit.maxTime':10 , 'requestDispatcher.requestParsers.addHttpRequestToContext':true} \n" + - " }"; + String payload = + "{\n" + + " 'set-property' : { 'updateHandler.autoCommit.maxDocs':100, 'updateHandler.autoCommit.maxTime':10 , 'requestDispatcher.requestParsers.addHttpRequestToContext':true} \n" + + " }"; runConfigCommand(harness, "/config", payload); MapWriter m = getRespMap("/config/overlay", harness); - MapWriter props =null; + MapWriter props = null; assertEquals("100", m._getStr("overlay/props/updateHandler/autoCommit/maxDocs", null)); - assertEquals("10", m._getStr("overlay/props/updateHandler/autoCommit/maxTime",null)); + assertEquals("10", m._getStr("overlay/props/updateHandler/autoCommit/maxTime", null)); - m = getRespMap("/config/updateHandler", harness); - assertNotNull(m._get("config/updateHandler/commitWithin/softCommit",null)); - assertNotNull(m._get("config/updateHandler/autoCommit/maxDocs",null)); - assertNotNull(m._get("config/updateHandler/autoCommit/maxTime",null)); + m = getRespMap("/config/updateHandler", harness); + assertNotNull(m._get("config/updateHandler/commitWithin/softCommit", null)); + assertNotNull(m._get("config/updateHandler/autoCommit/maxDocs", null)); + assertNotNull(m._get("config/updateHandler/autoCommit/maxTime", null)); - m = getRespMap("/config", harness); + m = getRespMap("/config", harness); assertNotNull(m); - assertEquals("100", m._getStr("config/updateHandler/autoCommit/maxDocs",null)); - assertEquals("10", m._getStr("config/updateHandler/autoCommit/maxTime",null)); - assertEquals("true", m._getStr("config/requestDispatcher/requestParsers/addHttpRequestToContext",null)); - payload = "{\n" + - " 'unset-property' : 'updateHandler.autoCommit.maxDocs' \n" + - " }"; + assertEquals("100", m._getStr("config/updateHandler/autoCommit/maxDocs", null)); + assertEquals("10", m._getStr("config/updateHandler/autoCommit/maxTime", null)); + assertEquals( + "true", m._getStr("config/requestDispatcher/requestParsers/addHttpRequestToContext", null)); + payload = "{\n" + " 'unset-property' : 'updateHandler.autoCommit.maxDocs' \n" + " }"; runConfigCommand(harness, "/config", payload); - m = getRespMap("/config/overlay", harness); - assertNull(m._get("overlay/props/updateHandler/autoCommit/maxDocs",null)); - assertEquals("10", m._getStr("overlay/props/updateHandler/autoCommit/maxTime",null)); + m = getRespMap("/config/overlay", harness); + assertNull(m._get("overlay/props/updateHandler/autoCommit/maxDocs", null)); + assertEquals("10", m._getStr("overlay/props/updateHandler/autoCommit/maxTime", null)); } public void testUserProp() throws Exception { RestTestHarness harness = restTestHarness; - String payload = "{\n" + - " 'set-user-property' : { 'my.custom.variable.a':'MODIFIEDA'," + - " 'my.custom.variable.b':'MODIFIEDB' } \n" + - " }"; + String payload = + "{\n" + + " 'set-user-property' : { 'my.custom.variable.a':'MODIFIEDA'," + + " 'my.custom.variable.b':'MODIFIEDB' } \n" + + " }"; runConfigCommand(harness, "/config", payload); - MapWriter m = getRespMap("/config/overlay", harness);//.get("overlay"); - assertEquals(m._get("overlay/userProps/my.custom.variable.a",null), "MODIFIEDA"); - assertEquals(m._get("overlay/userProps/my.custom.variable.b",null), "MODIFIEDB"); - - m = getRespMap("/dump?json.nl=map&initArgs=true", harness);//.get("initArgs"); + MapWriter m = getRespMap("/config/overlay", harness); // .get("overlay"); + assertEquals(m._get("overlay/userProps/my.custom.variable.a", null), "MODIFIEDA"); + assertEquals(m._get("overlay/userProps/my.custom.variable.b", null), "MODIFIEDB"); - assertEquals("MODIFIEDA", m._get("initArgs/defaults/a",null)); - assertEquals("MODIFIEDB", m._get("initArgs/defaults/b",null)); + m = getRespMap("/dump?json.nl=map&initArgs=true", harness); // .get("initArgs"); + assertEquals("MODIFIEDA", m._get("initArgs/defaults/a", null)); + assertEquals("MODIFIEDB", m._get("initArgs/defaults/b", null)); } public void testReqHandlerAPIs() throws Exception { reqhandlertests(restTestHarness, null, null); } - public static void runConfigCommand(RestTestHarness harness, String uri, String payload) throws IOException { + public static void runConfigCommand(RestTestHarness harness, String uri, String payload) + throws IOException { String json = SolrTestCaseJ4.json(payload); log.info("going to send config command. path {} , payload: {}", uri, payload); String response = harness.post(uri, json); @@ -222,27 +227,36 @@ public static void runConfigCommand(RestTestHarness harness, String uri, String assertNull(response, map.get("errors")); // Will this ever be returned? } - public static void runConfigCommandExpectFailure(RestTestHarness harness, String uri, String payload, String expectedErrorMessage) throws Exception { + public static void runConfigCommandExpectFailure( + RestTestHarness harness, String uri, String payload, String expectedErrorMessage) + throws Exception { String json = SolrTestCaseJ4.json(payload); log.info("going to send config command. path {} , payload: {}", uri, payload); String response = harness.post(uri, json); - Map map = (Map)Utils.fromJSONString(response); + Map map = (Map) Utils.fromJSONString(response); assertNotNull(response, map.get("errorMessages")); assertNotNull(response, map.get("error")); - assertTrue("Expected status != 0: " + response, 0L != (Long)((Map)map.get("responseHeader")).get("status")); - List errorDetails = (List)((Map)map.get("error")).get("details"); - List errorMessages = (List)((Map)errorDetails.get(0)).get("errorMessages"); - assertTrue("Expected '" + expectedErrorMessage + "': " + response, + assertTrue( + "Expected status != 0: " + response, + 0L != (Long) ((Map) map.get("responseHeader")).get("status")); + List errorDetails = (List) ((Map) map.get("error")).get("details"); + List errorMessages = (List) ((Map) errorDetails.get(0)).get("errorMessages"); + assertTrue( + "Expected '" + expectedErrorMessage + "': " + response, errorMessages.get(0).toString().contains(expectedErrorMessage)); } - public static void reqhandlertests(RestTestHarness writeHarness, String testServerBaseUrl, CloudSolrClient cloudSolrClient) throws Exception { - String payload = "{\n" + - "'create-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy'}\n" + - "}"; + public static void reqhandlertests( + RestTestHarness writeHarness, String testServerBaseUrl, CloudSolrClient cloudSolrClient) + throws Exception { + String payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config/overlay", cloudSolrClient, @@ -250,13 +264,15 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "lazy", TIMEOUT_S); - payload = "{\n" + - "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' ,registerPath :'/solr,/v2', " + - " 'startup' : 'lazy' , 'a':'b' , 'defaults': {'def_a':'def A val', 'multival':['a','b','c']}}\n" + - "}"; + payload = + "{\n" + + "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' ,registerPath :'/solr,/v2', " + + " 'startup' : 'lazy' , 'a':'b' , 'defaults': {'def_a':'def A val', 'multival':['a','b','c']}}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config/overlay", cloudSolrClient, @@ -264,15 +280,17 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "b", TIMEOUT_S); - payload = "{\n" + - "'update-requesthandler' : { 'name' : '/dump', " + - "'initParams': 'a'," + - "'class': 'org.apache.solr.handler.DumpRequestHandler' ," + - " 'defaults': {'a':'A','b':'B','c':'C'}}\n" + - "}"; + payload = + "{\n" + + "'update-requesthandler' : { 'name' : '/dump', " + + "'initParams': 'a'," + + "'class': 'org.apache.solr.handler.DumpRequestHandler' ," + + " 'defaults': {'a':'A','b':'B','c':'C'}}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config/overlay", cloudSolrClient, @@ -280,7 +298,8 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "C", TIMEOUT_S); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/x?getdefaults=true&json.nl=map", cloudSolrClient, @@ -288,7 +307,8 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "def A val", TIMEOUT_S); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/x?param=multival&json.nl=map", cloudSolrClient, @@ -296,41 +316,46 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ asList("a", "b", "c"), TIMEOUT_S); - payload = "{\n" + - "'delete-requesthandler' : '/x'" + - "}"; + payload = "{\n" + "'delete-requesthandler' : '/x'" + "}"; runConfigCommand(writeHarness, "/config", payload); boolean success = false; long startTime = System.nanoTime(); int maxTimeoutSeconds = 10; - while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutSeconds) { + while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) + < maxTimeoutSeconds) { String uri = "/config/overlay"; - Map m = testServerBaseUrl == null ? getRespMap(uri, writeHarness) : TestSolrConfigHandlerConcurrent.getAsMap(testServerBaseUrl + uri, cloudSolrClient); + Map m = + testServerBaseUrl == null + ? getRespMap(uri, writeHarness) + : TestSolrConfigHandlerConcurrent.getAsMap(testServerBaseUrl + uri, cloudSolrClient); if (null == Utils.getObjectByPath(m, true, asList("overlay", "requestHandler", "/x", "a"))) { success = true; break; } Thread.sleep(100); - } assertTrue("Could not delete requestHandler ", success); - payload = "{\n" + - "'create-queryconverter' : { 'name' : 'qc', 'class': 'org.apache.solr.spelling.SpellingQueryConverter'}\n" + - "}"; + payload = + "{\n" + + "'create-queryconverter' : { 'name' : 'qc', 'class': 'org.apache.solr.spelling.SpellingQueryConverter'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, asList("config", "queryConverter", "qc", "class"), "org.apache.solr.spelling.SpellingQueryConverter", TIMEOUT_S); - payload = "{\n" + - "'update-queryconverter' : { 'name' : 'qc', 'class': 'org.apache.solr.spelling.SuggestQueryConverter'}\n" + - "}"; + payload = + "{\n" + + "'update-queryconverter' : { 'name' : 'qc', 'class': 'org.apache.solr.spelling.SuggestQueryConverter'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, @@ -338,11 +363,10 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "org.apache.solr.spelling.SuggestQueryConverter", TIMEOUT_S); - payload = "{\n" + - "'delete-queryconverter' : 'qc'" + - "}"; + payload = "{\n" + "'delete-queryconverter' : 'qc'" + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, @@ -350,22 +374,26 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ null, TIMEOUT_S); - payload = "{\n" + - "'create-searchcomponent' : { 'name' : 'tc', 'class': 'org.apache.solr.handler.component.TermsComponent'}\n" + - "}"; + payload = + "{\n" + + "'create-searchcomponent' : { 'name' : 'tc', 'class': 'org.apache.solr.handler.component.TermsComponent'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, asList("config", "searchComponent", "tc", "class"), "org.apache.solr.handler.component.TermsComponent", TIMEOUT_S); - payload = "{\n" + - "'update-searchcomponent' : { 'name' : 'tc', 'class': 'org.apache.solr.handler.component.TermVectorComponent' }\n" + - "}"; + payload = + "{\n" + + "'update-searchcomponent' : { 'name' : 'tc', 'class': 'org.apache.solr.handler.component.TermVectorComponent' }\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, @@ -373,23 +401,25 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "org.apache.solr.handler.component.TermVectorComponent", TIMEOUT_S); - payload = "{\n" + - "'delete-searchcomponent' : 'tc'" + - "}"; + payload = "{\n" + "'delete-searchcomponent' : 'tc'" + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, asList("config", "searchComponent", "tc"), null, TIMEOUT_S); - // - payload = "{\n" + - "'create-valuesourceparser' : { 'name' : 'cu', 'class': 'org.apache.solr.core.CountUsageValueSourceParser'}\n" + - "}"; + // + payload = + "{\n" + + "'create-valuesourceparser' : { 'name' : 'cu', 'class': 'org.apache.solr.core.CountUsageValueSourceParser'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, @@ -397,13 +427,15 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "org.apache.solr.core.CountUsageValueSourceParser", TIMEOUT_S); // -// 0.0 -// - payload = "{\n" + - "'update-valuesourceparser' : { 'name' : 'cu', 'class': 'org.apache.solr.search.function.NvlValueSourceParser'}\n" + - "}"; + // 0.0 + // + payload = + "{\n" + + "'update-valuesourceparser' : { 'name' : 'cu', 'class': 'org.apache.solr.search.function.NvlValueSourceParser'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, @@ -411,25 +443,27 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "org.apache.solr.search.function.NvlValueSourceParser", TIMEOUT_S); - payload = "{\n" + - "'delete-valuesourceparser' : 'cu'" + - "}"; + payload = "{\n" + "'delete-valuesourceparser' : 'cu'" + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, asList("config", "valueSourceParser", "cu"), null, TIMEOUT_S); -// -// 5 -// - payload = "{\n" + - "'create-transformer' : { 'name' : 'mytrans', 'class': 'org.apache.solr.response.transform.ValueAugmenterFactory', 'value':'5'}\n" + - "}"; + // + // 5 + // + payload = + "{\n" + + "'create-transformer' : { 'name' : 'mytrans', 'class': 'org.apache.solr.response.transform.ValueAugmenterFactory', 'value':'5'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, @@ -437,11 +471,13 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "org.apache.solr.response.transform.ValueAugmenterFactory", TIMEOUT_S); - payload = "{\n" + - "'update-transformer' : { 'name' : 'mytrans', 'class': 'org.apache.solr.response.transform.ValueAugmenterFactory', 'value':'6'}\n" + - "}"; + payload = + "{\n" + + "'update-transformer' : { 'name' : 'mytrans', 'class': 'org.apache.solr.response.transform.ValueAugmenterFactory', 'value':'6'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config", cloudSolrClient, @@ -449,80 +485,92 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "6", TIMEOUT_S); - payload = "{\n" + - "'delete-transformer' : 'mytrans'," + - "'create-initparams' : { 'name' : 'hello', 'key':'val'}\n" + - "}"; + payload = + "{\n" + + "'delete-transformer' : 'mytrans'," + + "'create-initparams' : { 'name' : 'hello', 'key':'val'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - Map map = testForResponseElement(writeHarness, - testServerBaseUrl, - "/config", - cloudSolrClient, - asList("config", "transformer", "mytrans"), - null, - TIMEOUT_S); + Map map = + testForResponseElement( + writeHarness, + testServerBaseUrl, + "/config", + cloudSolrClient, + asList("config", "transformer", "mytrans"), + null, + TIMEOUT_S); List l = (List) Utils.getObjectByPath(map, false, asList("config", "initParams")); - assertNotNull("no object /config/initParams : "+ map , l); - assertEquals( 2, l.size()); - assertEquals( "val", ((Map)l.get(1)).get("key") ); - - - payload = "{\n" + - " 'add-searchcomponent': {\n" + - " 'name': 'myspellcheck',\n" + - " 'class': 'solr.SpellCheckComponent',\n" + - " 'queryAnalyzerFieldType': 'text_general',\n" + - " 'spellchecker': {\n" + - " 'name': 'default',\n" + - " 'field': '_text_',\n" + - " 'class': 'solr.DirectSolrSpellChecker'\n" + - " }\n" + - " }\n" + - "}"; + assertNotNull("no object /config/initParams : " + map, l); + assertEquals(2, l.size()); + assertEquals("val", ((Map) l.get(1)).get("key")); + + payload = + "{\n" + + " 'add-searchcomponent': {\n" + + " 'name': 'myspellcheck',\n" + + " 'class': 'solr.SpellCheckComponent',\n" + + " 'queryAnalyzerFieldType': 'text_general',\n" + + " 'spellchecker': {\n" + + " 'name': 'default',\n" + + " 'field': '_text_',\n" + + " 'class': 'solr.DirectSolrSpellChecker'\n" + + " }\n" + + " }\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - map = testForResponseElement(writeHarness, - testServerBaseUrl, - "/config", - cloudSolrClient, - asList("config", "searchComponent", "myspellcheck", "spellchecker", "class"), - "solr.DirectSolrSpellChecker", - TIMEOUT_S); - - payload = "{\n" + - " 'add-requesthandler': {\n" + - " name : '/dump100',\n" + - " registerPath :'/solr,/v2',"+ - " class : 'org.apache.solr.handler.DumpRequestHandler'," + - " suggester: [{name: s1,lookupImpl: FuzzyLookupFactory, dictionaryImpl : DocumentDictionaryFactory}," + - " {name: s2,lookupImpl: FuzzyLookupFactory , dictionaryImpl : DocumentExpressionDictionaryFactory}]" + - " }\n" + - "}"; + map = + testForResponseElement( + writeHarness, + testServerBaseUrl, + "/config", + cloudSolrClient, + asList("config", "searchComponent", "myspellcheck", "spellchecker", "class"), + "solr.DirectSolrSpellChecker", + TIMEOUT_S); + + payload = + "{\n" + + " 'add-requesthandler': {\n" + + " name : '/dump100',\n" + + " registerPath :'/solr,/v2'," + + " class : 'org.apache.solr.handler.DumpRequestHandler'," + + " suggester: [{name: s1,lookupImpl: FuzzyLookupFactory, dictionaryImpl : DocumentDictionaryFactory}," + + " {name: s2,lookupImpl: FuzzyLookupFactory , dictionaryImpl : DocumentExpressionDictionaryFactory}]" + + " }\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - map = testForResponseElement(writeHarness, - testServerBaseUrl, - "/config", - cloudSolrClient, - asList("config", "requestHandler", "/dump100", "class"), - "org.apache.solr.handler.DumpRequestHandler", - TIMEOUT_S); + map = + testForResponseElement( + writeHarness, + testServerBaseUrl, + "/config", + cloudSolrClient, + asList("config", "requestHandler", "/dump100", "class"), + "org.apache.solr.handler.DumpRequestHandler", + TIMEOUT_S); map = getRespMap("/dump100?json.nl=arrmap&initArgs=true", writeHarness); List initArgs = (List) map.get("initArgs"); assertNotNull(initArgs); assertTrue(initArgs.size() >= 2); - assertTrue(((Map)initArgs.get(2)).containsKey("suggester")); - assertTrue(((Map)initArgs.get(1)).containsKey("suggester")); - - payload = "{\n" + - "'add-requesthandler' : { 'name' : '/dump101', 'class': " + - "'" + CacheTest.class.getName() + "', " + - " registerPath :'/solr,/v2'"+ - ", 'startup' : 'lazy'}\n" + - "}"; + assertTrue(((Map) initArgs.get(2)).containsKey("suggester")); + assertTrue(((Map) initArgs.get(1)).containsKey("suggester")); + + payload = + "{\n" + + "'add-requesthandler' : { 'name' : '/dump101', 'class': " + + "'" + + CacheTest.class.getName() + + "', " + + " registerPath :'/solr,/v2'" + + ", 'startup' : 'lazy'}\n" + + "}"; runConfigCommand(writeHarness, "/config", payload); - testForResponseElement(writeHarness, + testForResponseElement( + writeHarness, testServerBaseUrl, "/config/overlay", cloudSolrClient, @@ -530,45 +578,72 @@ public static void reqhandlertests(RestTestHarness writeHarness, String testServ "lazy", TIMEOUT_S); - payload = "{\n" + - "'add-cache' : {name:'lfuCacheDecayFalse', class:'solr.search.CaffeineCache', size:10 ,initialSize:9 , timeDecay:false }," + - "'add-cache' : {name: 'perSegFilter', class: 'solr.search.CaffeineCache', size:10, initialSize:0 , autowarmCount:10}}"; + payload = + "{\n" + + "'add-cache' : {name:'lfuCacheDecayFalse', class:'solr.search.CaffeineCache', size:10 ,initialSize:9 , timeDecay:false }," + + "'add-cache' : {name: 'perSegFilter', class: 'solr.search.CaffeineCache', size:10, initialSize:0 , autowarmCount:10}}"; runConfigCommand(writeHarness, "/config", payload); - map = testForResponseElement(writeHarness, - testServerBaseUrl, - "/config/overlay", - cloudSolrClient, - asList("overlay", "cache", "lfuCacheDecayFalse", "class"), + map = + testForResponseElement( + writeHarness, + testServerBaseUrl, + "/config/overlay", + cloudSolrClient, + asList("overlay", "cache", "lfuCacheDecayFalse", "class"), + "solr.search.CaffeineCache", + TIMEOUT_S); + assertEquals( "solr.search.CaffeineCache", - TIMEOUT_S); - assertEquals("solr.search.CaffeineCache",getObjectByPath(map, true, ImmutableList.of("overlay", "cache", "perSegFilter", "class"))); - - map = getRespMap("/dump101?cacheNames=lfuCacheDecayFalse&cacheNames=perSegFilter", writeHarness); - assertEquals("Actual output "+ Utils.toJSONString(map), "org.apache.solr.search.CaffeineCache",getObjectByPath(map, true, ImmutableList.of( "caches", "perSegFilter"))); - assertEquals("Actual output "+ Utils.toJSONString(map), "org.apache.solr.search.CaffeineCache",getObjectByPath(map, true, ImmutableList.of( "caches", "lfuCacheDecayFalse"))); - + getObjectByPath(map, true, ImmutableList.of("overlay", "cache", "perSegFilter", "class"))); + + map = + getRespMap("/dump101?cacheNames=lfuCacheDecayFalse&cacheNames=perSegFilter", writeHarness); + assertEquals( + "Actual output " + Utils.toJSONString(map), + "org.apache.solr.search.CaffeineCache", + getObjectByPath(map, true, ImmutableList.of("caches", "perSegFilter"))); + assertEquals( + "Actual output " + Utils.toJSONString(map), + "org.apache.solr.search.CaffeineCache", + getObjectByPath(map, true, ImmutableList.of("caches", "lfuCacheDecayFalse"))); } - + public void testFailures() throws Exception { String payload = "{ not-a-real-command: { param1: value1, param2: value2 } }"; - runConfigCommandExpectFailure(restTestHarness, "/config", payload, "Unknown operation 'not-a-real-command'"); + runConfigCommandExpectFailure( + restTestHarness, "/config", payload, "Unknown operation 'not-a-real-command'"); payload = "{ set-property: { update.autoCreateFields: false } }"; - runConfigCommandExpectFailure(restTestHarness, "/config", payload, "'update.autoCreateFields' is not an editable property"); - + runConfigCommandExpectFailure( + restTestHarness, + "/config", + payload, + "'update.autoCreateFields' is not an editable property"); + payload = "{ set-property: { updateHandler.autoCommit.maxDocs: false } }"; - runConfigCommandExpectFailure(restTestHarness, "/config", payload, "Property updateHandler.autoCommit.maxDocs must be of Integer type"); + runConfigCommandExpectFailure( + restTestHarness, + "/config", + payload, + "Property updateHandler.autoCommit.maxDocs must be of Integer type"); payload = "{ unset-property: not-an-editable-property }"; - runConfigCommandExpectFailure(restTestHarness, "/config", payload, "'[not-an-editable-property]' is not an editable property"); - - for (String component : new String[] { - "requesthandler", "searchcomponent", "initparams", "queryresponsewriter", "queryparser", - "valuesourceparser", "transformer", "updateprocessor", "queryconverter", "listener"}) { - for (String operation : new String[] { "add", "update" }) { + runConfigCommandExpectFailure( + restTestHarness, + "/config", + payload, + "'[not-an-editable-property]' is not an editable property"); + + for (String component : + new String[] { + "requesthandler", "searchcomponent", "initparams", "queryresponsewriter", "queryparser", + "valuesourceparser", "transformer", "updateprocessor", "queryconverter", "listener" + }) { + for (String operation : new String[] {"add", "update"}) { payload = "{ " + operation + "-" + component + ": { param1: value1 } }"; - runConfigCommandExpectFailure(restTestHarness, "/config", payload, "'name' is a required field"); + runConfigCommandExpectFailure( + restTestHarness, "/config", payload, "'name' is a required field"); } payload = "{ delete-" + component + ": not-a-real-component-name }"; runConfigCommandExpectFailure(restTestHarness, "/config", payload, "NO such "); @@ -580,41 +655,48 @@ public static class CacheTest extends DumpRequestHandler { public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { super.handleRequestBody(req, rsp); String[] caches = req.getParams().getParams("cacheNames"); - if(caches != null && caches.length>0){ + if (caches != null && caches.length > 0) { HashMap m = new HashMap<>(); rsp.add("caches", m); for (String c : caches) { SolrCache cache = req.getSearcher().getCache(c); - if(cache != null) m.put(c, cache.getClass().getName()); + if (cache != null) m.put(c, cache.getClass().getName()); } } } } @SuppressWarnings({"unchecked", "rawtypes"}) - public static LinkedHashMapWriter testForResponseElement(RestTestHarness harness, - String testServerBaseUrl, - String uri, - CloudSolrClient cloudSolrClient, List jsonPath, - Object expected, - long maxTimeoutSeconds) throws Exception { + public static LinkedHashMapWriter testForResponseElement( + RestTestHarness harness, + String testServerBaseUrl, + String uri, + CloudSolrClient cloudSolrClient, + List jsonPath, + Object expected, + long maxTimeoutSeconds) + throws Exception { boolean success = false; LinkedHashMapWriter m = null; TimeOut timeOut = new TimeOut(maxTimeoutSeconds, TimeUnit.SECONDS, TimeSource.NANO_TIME); - while (! timeOut.hasTimedOut()) { + while (!timeOut.hasTimedOut()) { try { - m = testServerBaseUrl == null ? getRespMap(uri, harness) : TestSolrConfigHandlerConcurrent.getAsMap(testServerBaseUrl + uri, cloudSolrClient); + m = + testServerBaseUrl == null + ? getRespMap(uri, harness) + : TestSolrConfigHandlerConcurrent.getAsMap( + testServerBaseUrl + uri, cloudSolrClient); } catch (Exception e) { Thread.sleep(100); continue; - } Object actual = Utils.getObjectByPath(m, false, jsonPath); if (expected instanceof ValidatingJsonMap.PredicateWithErrMsg) { - ValidatingJsonMap.PredicateWithErrMsg predicate = (ValidatingJsonMap.PredicateWithErrMsg) expected; + ValidatingJsonMap.PredicateWithErrMsg predicate = + (ValidatingJsonMap.PredicateWithErrMsg) expected; if (predicate.test(actual) == null) { success = true; break; @@ -627,22 +709,25 @@ public static LinkedHashMapWriter testForResponseElement(RestTestHarness harness } } Thread.sleep(100); - } - assertTrue(StrUtils.formatString("Could not get expected value ''{0}'' for path ''{1}'' full output: {2}, from server: {3}", expected, StrUtils.join(jsonPath, '/'), m.toString(), testServerBaseUrl), success); + assertTrue( + StrUtils.formatString( + "Could not get expected value ''{0}'' for path ''{1}'' full output: {2}, from server: {3}", + expected, StrUtils.join(jsonPath, '/'), m.toString(), testServerBaseUrl), + success); return m; } public void testReqParams() throws Exception { RestTestHarness harness = restTestHarness; - String payload = " {\n" + - " 'set' : {'x': {" + - " 'a':'A val',\n" + - " 'b': 'B val'}\n" + - " }\n" + - " }"; - + String payload = + " {\n" + + " 'set' : {'x': {" + + " 'a':'A val',\n" + + " 'b': 'B val'}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(harness, "/config/params", payload); @@ -664,9 +749,10 @@ public void testReqParams() throws Exception { "B val", TIMEOUT_S); - payload = "{\n" + - "'create-requesthandler' : { 'name' : '/d', registerPath :'/solr,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" + - "}"; + payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/d', registerPath :'/solr,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" + + "}"; TestSolrConfigHandler.runConfigCommand(harness, "/config", payload); @@ -679,14 +765,10 @@ public void testReqParams() throws Exception { "/d", TIMEOUT_S); - TestSolrConfigHandler.testForResponseElement(harness, - null, - "/d?useParams=x", - null, - asList("params", "a"), - "A val", - TIMEOUT_S); - TestSolrConfigHandler.testForResponseElement(harness, + TestSolrConfigHandler.testForResponseElement( + harness, null, "/d?useParams=x", null, asList("params", "a"), "A val", TIMEOUT_S); + TestSolrConfigHandler.testForResponseElement( + harness, null, "/d?useParams=x&a=fomrequest", null, @@ -694,13 +776,15 @@ public void testReqParams() throws Exception { "fomrequest", TIMEOUT_S); - payload = "{\n" + - "'create-requesthandler' : { 'name' : '/dump1', registerPath :'/solr,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" + - "}"; + payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/dump1', registerPath :'/solr,/v2' , 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" + + "}"; TestSolrConfigHandler.runConfigCommand(harness, "/config", payload); - TestSolrConfigHandler.testForResponseElement(harness, + TestSolrConfigHandler.testForResponseElement( + harness, null, "/config/overlay", null, @@ -709,23 +793,16 @@ public void testReqParams() throws Exception { TIMEOUT_S); TestSolrConfigHandler.testForResponseElement( - harness, - null, - "/dump1", - null, - asList("params", "a"), - "A val", - TIMEOUT_S); - - - payload = " {\n" + - " 'set' : {'y':{\n" + - " 'c':'CY val',\n" + - " 'b': 'BY val', " + - " 'd': ['val 1', 'val 2']}\n" + - " }\n" + - " }"; + harness, null, "/dump1", null, asList("params", "a"), "A val", TIMEOUT_S); + payload = + " {\n" + + " 'set' : {'y':{\n" + + " 'c':'CY val',\n" + + " 'b': 'BY val', " + + " 'd': ['val 1', 'val 2']}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(harness, "/config/params", payload); @@ -738,32 +815,14 @@ public void testReqParams() throws Exception { "CY val", TIMEOUT_S); - TestSolrConfigHandler.testForResponseElement(harness, - null, - "/dump1?useParams=y", - null, - asList("params", "c"), - "CY val", - TIMEOUT_S); - + TestSolrConfigHandler.testForResponseElement( + harness, null, "/dump1?useParams=y", null, asList("params", "c"), "CY val", TIMEOUT_S); TestSolrConfigHandler.testForResponseElement( - harness, - null, - "/dump1?useParams=y", - null, - asList("params", "b"), - "BY val", - TIMEOUT_S); + harness, null, "/dump1?useParams=y", null, asList("params", "b"), "BY val", TIMEOUT_S); TestSolrConfigHandler.testForResponseElement( - harness, - null, - "/dump1?useParams=y", - null, - asList("params", "a"), - "A val", - TIMEOUT_S); + harness, null, "/dump1?useParams=y", null, asList("params", "a"), "A val", TIMEOUT_S); TestSolrConfigHandler.testForResponseElement( harness, @@ -774,15 +833,15 @@ public void testReqParams() throws Exception { asList("val 1", "val 2"), TIMEOUT_S); - payload = " {\n" + - " 'update' : {'y': {\n" + - " 'c':'CY val modified',\n" + - " 'e':'EY val',\n" + - " 'b': 'BY val'" + - "}\n" + - " }\n" + - " }"; - + payload = + " {\n" + + " 'update' : {'y': {\n" + + " 'c':'CY val modified',\n" + + " 'e':'EY val',\n" + + " 'b': 'BY val'" + + "}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(harness, "/config/params", payload); @@ -804,14 +863,14 @@ public void testReqParams() throws Exception { "EY val", TIMEOUT_S); - payload = " {\n" + - " 'set' : {'y': {\n" + - " 'p':'P val',\n" + - " 'q': 'Q val'" + - "}\n" + - " }\n" + - " }"; - + payload = + " {\n" + + " 'set' : {'y': {\n" + + " 'p':'P val',\n" + + " 'q': 'Q val'" + + "}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(harness, "/config/params", payload); TestSolrConfigHandler.testForResponseElement( @@ -842,27 +901,29 @@ public void testReqParams() throws Exception { null, TIMEOUT_S); - payload = "{\n" + - " 'create-requesthandler': {\n" + - " 'name': 'aRequestHandler',\n" + - " 'registerPath': '/v2',\n" + - " 'class': 'org.apache.solr.handler.DumpRequestHandler',\n" + - " 'spec': {\n" + - " 'methods': [\n" + - " 'GET',\n" + - " 'POST'\n" + - " ],\n" + - " 'url': {\n" + - " 'paths': [\n" + - " '/something/{part1}/fixed/{part2}'\n" + - " ]\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; + payload = + "{\n" + + " 'create-requesthandler': {\n" + + " 'name': 'aRequestHandler',\n" + + " 'registerPath': '/v2',\n" + + " 'class': 'org.apache.solr.handler.DumpRequestHandler',\n" + + " 'spec': {\n" + + " 'methods': [\n" + + " 'GET',\n" + + " 'POST'\n" + + " ],\n" + + " 'url': {\n" + + " 'paths': [\n" + + " '/something/{part1}/fixed/{part2}'\n" + + " ]\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; TestSolrConfigHandler.runConfigCommand(harness, "/config", payload); - TestSolrConfigHandler.testForResponseElement(harness, + TestSolrConfigHandler.testForResponseElement( + harness, null, "/config/overlay", null, @@ -870,41 +931,43 @@ public void testReqParams() throws Exception { "org.apache.solr.handler.DumpRequestHandler", TIMEOUT_S); RESTfulServerProvider oldProvider = restTestHarness.getServerProvider(); - restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME); - - Map rsp = TestSolrConfigHandler.testForResponseElement( - harness, - null, - "/something/part1_Value/fixed/part2_Value?urlTemplateValues=part1&urlTemplateValues=part2", - null, - asList("urlTemplateValues"), - new ValidatingJsonMap.PredicateWithErrMsg<>() { - @Override - public String test(Object o) { - if (o instanceof Map) { - Map m = (Map) o; - if ("part1_Value".equals(m.get("part1")) && "part2_Value".equals(m.get("part2"))) return null; - - } - return "no match"; - } - - @Override - public String toString() { - return "{part1:part1_Value, part2 : part2_Value]"; - } - }, - TIMEOUT_S); + restTestHarness.setServerProvider( + () -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME); + + Map rsp = + TestSolrConfigHandler.testForResponseElement( + harness, + null, + "/something/part1_Value/fixed/part2_Value?urlTemplateValues=part1&urlTemplateValues=part2", + null, + asList("urlTemplateValues"), + new ValidatingJsonMap.PredicateWithErrMsg<>() { + @Override + public String test(Object o) { + if (o instanceof Map) { + Map m = (Map) o; + if ("part1_Value".equals(m.get("part1")) && "part2_Value".equals(m.get("part2"))) + return null; + } + return "no match"; + } + + @Override + public String toString() { + return "{part1:part1_Value, part2 : part2_Value]"; + } + }, + TIMEOUT_S); restTestHarness.setServerProvider(oldProvider); - } - @SuppressWarnings({"rawtypes"}) - public static LinkedHashMapWriter getRespMap(String path, RestTestHarness restHarness) throws Exception { + public static LinkedHashMapWriter getRespMap(String path, RestTestHarness restHarness) + throws Exception { String response = restHarness.query(path); try { - return (LinkedHashMapWriter) Utils.MAPWRITEROBJBUILDER.apply(Utils.getJSONParser(new StringReader(response))).getVal(); + return (LinkedHashMapWriter) + Utils.MAPWRITEROBJBUILDER.apply(Utils.getJSONParser(new StringReader(response))).getVal(); } catch (JSONParser.ParseException e) { log.error(response); return new LinkedHashMapWriter<>(); diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy1.java b/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy1.java index 375a58aa01e..47a0ad3a6ba 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy1.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy1.java @@ -16,6 +16,7 @@ */ package org.apache.solr.core; +import java.util.Map; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.util.Constants; import org.apache.solr.SolrTestCaseJ4; @@ -23,16 +24,12 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.Map; - -/** - * - */ +/** */ public class TestSolrDeletionPolicy1 extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-delpolicy1.xml","schema.xml"); + initCore("solrconfig-delpolicy1.xml", "schema.xml"); } @Override @@ -41,49 +38,28 @@ public void setUp() throws Exception { super.setUp(); clearIndex(); } - + private void addDocs() { - assertU(adoc("id", String.valueOf(1), - "name", "name" + String.valueOf(1))); + assertU(adoc("id", String.valueOf(1), "name", "name" + String.valueOf(1))); assertU(commit()); - assertQ("return all docs", - req("id:[0 TO 1]"), - "*[count(//doc)=1]" - ); + assertQ("return all docs", req("id:[0 TO 1]"), "*[count(//doc)=1]"); - assertU(adoc("id", String.valueOf(2), - "name", "name" + String.valueOf(2))); + assertU(adoc("id", String.valueOf(2), "name", "name" + String.valueOf(2))); assertU(commit()); - assertQ("return all docs", - req("id:[0 TO 2]"), - "*[count(//doc)=2]" - ); + assertQ("return all docs", req("id:[0 TO 2]"), "*[count(//doc)=2]"); - assertU(adoc("id", String.valueOf(3), - "name", "name" + String.valueOf(3))); + assertU(adoc("id", String.valueOf(3), "name", "name" + String.valueOf(3))); assertU(optimize()); - assertQ("return all docs", - req("id:[0 TO 3]"), - "*[count(//doc)=3]" - ); + assertQ("return all docs", req("id:[0 TO 3]"), "*[count(//doc)=3]"); - assertU(adoc("id", String.valueOf(4), - "name", "name" + String.valueOf(4))); + assertU(adoc("id", String.valueOf(4), "name", "name" + String.valueOf(4))); assertU(optimize()); - assertQ("return all docs", - req("id:[0 TO 4]"), - "*[count(//doc)=4]" - ); + assertQ("return all docs", req("id:[0 TO 4]"), "*[count(//doc)=4]"); - assertU(adoc("id", String.valueOf(5), - "name", "name" + String.valueOf(5))); + assertU(adoc("id", String.valueOf(5), "name", "name" + String.valueOf(5))); assertU(optimize()); - assertQ("return all docs", - req("id:[0 TO 5]"), - "*[count(//doc)=5]" - ); - + assertQ("return all docs", req("id:[0 TO 5]"), "*[count(//doc)=5]"); } @Test @@ -94,8 +70,7 @@ public void testKeepOptimizedOnlyCommits() { Map commits = delPolicy.getCommits(); IndexCommit latest = delPolicy.getLatestCommit(); for (Long gen : commits.keySet()) { - if (commits.get(gen) == latest) - continue; + if (commits.get(gen) == latest) continue; assertEquals(1, commits.get(gen).getSegmentCount()); } } @@ -105,32 +80,35 @@ public void testNumCommitsConfigured() { IndexDeletionPolicyWrapper delPolicy = h.getCore().getDeletionPolicy(); addDocs(); Map commits = delPolicy.getCommits(); - assertTrue(commits.size() <= ((SolrDeletionPolicy) (delPolicy.getWrappedDeletionPolicy())).getMaxOptimizedCommitsToKeep()); + assertTrue( + commits.size() + <= ((SolrDeletionPolicy) (delPolicy.getWrappedDeletionPolicy())) + .getMaxOptimizedCommitsToKeep()); } @Test public void testCommitAge() throws InterruptedException { - assumeFalse("This test is not working on Windows (or maybe machines with only 2 CPUs)", - Constants.WINDOWS); - + assumeFalse( + "This test is not working on Windows (or maybe machines with only 2 CPUs)", + Constants.WINDOWS); + IndexDeletionPolicyWrapper delPolicy = h.getCore().getDeletionPolicy(); addDocs(); Map commits = delPolicy.getCommits(); IndexCommit ic = delPolicy.getLatestCommit(); - String agestr = ((SolrDeletionPolicy) (delPolicy.getWrappedDeletionPolicy())).getMaxCommitAge().replaceAll("[a-zA-Z]", "").replaceAll("-", ""); + String agestr = + ((SolrDeletionPolicy) (delPolicy.getWrappedDeletionPolicy())) + .getMaxCommitAge() + .replaceAll("[a-zA-Z]", "") + .replaceAll("-", ""); long age = Long.parseLong(agestr); Thread.sleep(age); - assertU(adoc("id", String.valueOf(6), - "name", "name" + String.valueOf(6))); + assertU(adoc("id", String.valueOf(6), "name", "name" + String.valueOf(6))); assertU(optimize()); - assertQ("return all docs", - req("id:[0 TO 6]"), - "*[count(//doc)=6]" - ); + assertQ("return all docs", req("id:[0 TO 6]"), "*[count(//doc)=6]"); commits = delPolicy.getCommits(); assertTrue(!commits.containsKey(ic.getGeneration())); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy2.java b/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy2.java index 99f3a5bb34f..b072721c8e4 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy2.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrDeletionPolicy2.java @@ -20,13 +20,11 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ +/** */ public class TestSolrDeletionPolicy2 extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-delpolicy2.xml","schema.xml"); + initCore("solrconfig-delpolicy2.xml", "schema.xml"); } @Test @@ -40,22 +38,18 @@ public void testFakeDeletionPolicyClass() { assertTrue("value1".equals(f.getVar1())); assertTrue("value2".equals(f.getVar2())); - assertU(adoc("id", String.valueOf(1), - "name", "name" + String.valueOf(1))); - + assertU(adoc("id", String.valueOf(1), "name", "name" + String.valueOf(1))); - assertTrue(System.getProperty("onInit").equals("test.org.apache.solr.core.FakeDeletionPolicy.onInit")); + assertTrue( + System.getProperty("onInit").equals("test.org.apache.solr.core.FakeDeletionPolicy.onInit")); assertU(commit()); - assertQ("return all docs", - req("id:[0 TO 1]"), - "*[count(//doc)=1]" - ); + assertQ("return all docs", req("id:[0 TO 1]"), "*[count(//doc)=1]"); - - assertTrue(System.getProperty("onCommit").equals("test.org.apache.solr.core.FakeDeletionPolicy.onCommit")); + assertTrue( + System.getProperty("onCommit") + .equals("test.org.apache.solr.core.FakeDeletionPolicy.onCommit")); System.clearProperty("onInit"); System.clearProperty("onCommit"); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java b/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java index 6cad20e9579..1de3540f93d 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java @@ -19,19 +19,19 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LiveIndexWriterConfig; - import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.util.RefCounted; -import org.apache.solr.util.RandomMergePolicy; import org.apache.solr.update.LoggingInfoStream; +import org.apache.solr.util.RandomMergePolicy; +import org.apache.solr.util.RefCounted; import org.junit.BeforeClass; -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class TestSolrIndexConfig extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-indexconfig-mergepolicyfactory.xml","schema.xml"); + initCore("solrconfig-indexconfig-mergepolicyfactory.xml", "schema.xml"); } public void testLiveWriter() throws Exception { @@ -44,7 +44,6 @@ public void testLiveWriter() throws Exception { } } - public void testIndexConfigParsing() throws Exception { IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore()); try { @@ -57,9 +56,8 @@ public void testIndexConfigParsing() throws Exception { private void checkIndexWriterConfig(LiveIndexWriterConfig iwc) { assertTrue(iwc.getInfoStream() instanceof LoggingInfoStream); - assertTrue(iwc.getMergePolicy().getClass().toString(), - iwc.getMergePolicy() instanceof RandomMergePolicy); - + assertTrue( + iwc.getMergePolicy().getClass().toString(), + iwc.getMergePolicy() instanceof RandomMergePolicy); } - } diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java index 4405ddbfe37..17803fbbb4c 100644 --- a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java +++ b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java @@ -16,6 +16,9 @@ */ package org.apache.solr.core; +import static org.hamcrest.core.StringContains.containsString; + +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -24,8 +27,6 @@ import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.commons.exec.OS; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; @@ -38,14 +39,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import static org.hamcrest.core.StringContains.containsString; - public class TestSolrXml extends SolrTestCaseJ4 { - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - @Rule - public ExpectedException expectedException = ExpectedException.none(); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public ExpectedException expectedException = ExpectedException.none(); // tmp dir, cleaned up automatically. private Path solrHome; @@ -59,7 +56,8 @@ public void testAllInfoPresent() throws IOException { Path testSrcRoot = TEST_PATH(); Files.copy(testSrcRoot.resolve("solr-50-all.xml"), solrHome.resolve("solr.xml")); - System.setProperty("solr.allowPaths", OS.isFamilyWindows() ? "C:\\tmp,C:\\home\\john" : "/tmp,/home/john"); + System.setProperty( + "solr.allowPaths", OS.isFamilyWindows() ? "C:\\tmp,C:\\home\\john" : "/tmp,/home/john"); NodeConfig cfg = SolrXmlConfig.fromSolrHome(solrHome, new Properties()); CloudConfig ccfg = cfg.getCloudConfig(); UpdateShardHandlerConfig ucfg = cfg.getUpdateShardHandlerConfig(); @@ -67,16 +65,30 @@ public void testAllInfoPresent() throws IOException { assertEquals("maxBooleanClauses", (Integer) 42, cfg.getBooleanQueryMaxClauseCount()); assertEquals("core admin handler class", "testAdminHandler", cfg.getCoreAdminHandlerClass()); - assertEquals("collection handler class", "testCollectionsHandler", cfg.getCollectionsHandlerClass()); + assertEquals( + "collection handler class", "testCollectionsHandler", cfg.getCollectionsHandlerClass()); assertEquals("info handler class", "testInfoHandler", cfg.getInfoHandlerClass()); - assertEquals("config set handler class", "testConfigSetsHandler", cfg.getConfigSetsHandlerClass()); + assertEquals( + "config set handler class", "testConfigSetsHandler", cfg.getConfigSetsHandlerClass()); assertEquals("core load threads", 11, cfg.getCoreLoadThreadCount(false)); assertEquals("replay update threads", 100, cfg.getReplayUpdatesThreads()); - assertThat("core root dir", cfg.getCoreRootDirectory().toString(), containsString("testCoreRootDirectory")); - assertEquals("distrib conn timeout", 22, cfg.getUpdateShardHandlerConfig().getDistributedConnectionTimeout()); - assertEquals("distrib socket timeout", 33, cfg.getUpdateShardHandlerConfig().getDistributedSocketTimeout()); + assertThat( + "core root dir", + cfg.getCoreRootDirectory().toString(), + containsString("testCoreRootDirectory")); + assertEquals( + "distrib conn timeout", + 22, + cfg.getUpdateShardHandlerConfig().getDistributedConnectionTimeout()); + assertEquals( + "distrib socket timeout", + 33, + cfg.getUpdateShardHandlerConfig().getDistributedSocketTimeout()); assertEquals("max update conn", 3, cfg.getUpdateShardHandlerConfig().getMaxUpdateConnections()); - assertEquals("max update conn/host", 37, cfg.getUpdateShardHandlerConfig().getMaxUpdateConnectionsPerHost()); + assertEquals( + "max update conn/host", + 37, + cfg.getUpdateShardHandlerConfig().getMaxUpdateConnectionsPerHost()); assertEquals("distrib conn timeout", 22, ucfg.getDistributedConnectionTimeout()); assertEquals("distrib socket timeout", 33, ucfg.getDistributedSocketTimeout()); assertEquals("max update conn", 3, ucfg.getMaxUpdateConnections()); @@ -96,17 +108,26 @@ public void testAllInfoPresent() throws IOException { assertEquals("zk client timeout", 77, ccfg.getZkClientTimeout()); assertEquals("zk host", "testZkHost", ccfg.getZkHost()); assertEquals("zk ACL provider", "DefaultZkACLProvider", ccfg.getZkACLProviderClass()); - assertEquals("zk credentials provider", "DefaultZkCredentialsProvider", ccfg.getZkCredentialsProviderClass()); + assertEquals( + "zk credentials provider", + "DefaultZkCredentialsProvider", + ccfg.getZkCredentialsProviderClass()); assertEquals(1, backupRepoConfigs.length); assertEquals("local", backupRepoConfigs[0].name); assertEquals("a.b.C", backupRepoConfigs[0].className); assertEquals("true", backupRepoConfigs[0].attributes.get("default")); assertEquals(0, backupRepoConfigs[0].initArgs.size()); - assertTrue("allowPaths", cfg.getAllowPaths().containsAll(OS.isFamilyWindows() ? - Set.of("C:\\tmp", "C:\\home\\john").stream().map(s -> Path.of(s)).collect(Collectors.toSet()) : - Set.of("/tmp", "/home/john").stream().map(s -> Path.of(s)).collect(Collectors.toSet()) - ) - ); + assertTrue( + "allowPaths", + cfg.getAllowPaths() + .containsAll( + OS.isFamilyWindows() + ? Set.of("C:\\tmp", "C:\\home\\john").stream() + .map(s -> Path.of(s)) + .collect(Collectors.toSet()) + : Set.of("/tmp", "/home/john").stream() + .map(s -> Path.of(s)) + .collect(Collectors.toSet()))); System.clearProperty("solr.allowPaths"); } @@ -130,14 +151,15 @@ public void testPropertySub() throws IOException { public void testExplicitNullGivesDefaults() { System.setProperty("jetty.port", "8000"); - String solrXml = "" + - "" + - "" + - "host" + - "0" + - "solr" + - "" + - ""; + String solrXml = + "" + + "" + + "" + + "host" + + "0" + + "solr" + + "" + + ""; NodeConfig cfg = SolrXmlConfig.fromString(solrHome, solrXml); assertNull("maxBooleanClauses", cfg.getBooleanQueryMaxClauseCount()); // default is null @@ -146,8 +168,8 @@ public void testExplicitNullGivesDefaults() { } public void testIntAsLongBad() { - String bad = ""+TestUtil.nextLong(random(), Integer.MAX_VALUE, Long.MAX_VALUE); - String solrXml = ""+bad+""; + String bad = "" + TestUtil.nextLong(random(), Integer.MAX_VALUE, Long.MAX_VALUE); + String solrXml = "" + bad + ""; expectedException.expect(SolrException.class); expectedException.expectMessage("transientCacheSize"); @@ -156,46 +178,51 @@ public void testIntAsLongBad() { public void testIntAsLongOk() { int ok = random().nextInt(); - String solrXml = ""+ok+""; + String solrXml = "" + ok + ""; NodeConfig cfg = SolrXmlConfig.fromString(solrHome, solrXml); assertEquals(ok, cfg.getTransientCacheSize()); } public void testMultiCloudSectionError() { - String solrXml = "" - + "true" - + "false" - + ""; + String solrXml = + "" + + "true" + + "false" + + ""; expectedException.expect(SolrException.class); expectedException.expectMessage("Multiple instances of solrcloud section found in solr.xml"); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation } public void testMultiLoggingSectionError() { - String solrXml = "" - + "foo" - + "foo" - + ""; + String solrXml = + "" + + "foo" + + "foo" + + ""; expectedException.expect(SolrException.class); expectedException.expectMessage("Multiple instances of logging section found in solr.xml"); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation } public void testMultiLoggingWatcherSectionError() { - String solrXml = "" - + "42" - + "42" - + "42" - + ""; + String solrXml = + "" + + "42" + + "42" + + "42" + + ""; expectedException.expect(SolrException.class); - expectedException.expectMessage("Multiple instances of logging/watcher section found in solr.xml"); + expectedException.expectMessage( + "Multiple instances of logging/watcher section found in solr.xml"); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation } - + public void testValidStringValueWhenBoolTypeIsExpected() { boolean schemaCache = random().nextBoolean(); - String solrXml = String.format(Locale.ROOT, "%s", schemaCache); + String solrXml = + String.format(Locale.ROOT, "%s", schemaCache); NodeConfig nodeConfig = SolrXmlConfig.fromString(solrHome, solrXml); assertEquals("gen core node names", schemaCache, nodeConfig.hasSchemaCache()); @@ -203,14 +230,25 @@ public void testValidStringValueWhenBoolTypeIsExpected() { public void testValidStringValueWhenIntTypeIsExpected() { int maxUpdateConnections = random().nextInt(); - String solrXml = String.format(Locale.ROOT, "%d", maxUpdateConnections); + String solrXml = + String.format( + Locale.ROOT, + "%d", + maxUpdateConnections); NodeConfig nodeConfig = SolrXmlConfig.fromString(solrHome, solrXml); - assertEquals("max update conn", maxUpdateConnections, nodeConfig.getUpdateShardHandlerConfig().getMaxUpdateConnections()); + assertEquals( + "max update conn", + maxUpdateConnections, + nodeConfig.getUpdateShardHandlerConfig().getMaxUpdateConnections()); } public void testFailAtConfigParseTimeWhenIntTypeIsExpectedAndLongTypeIsGiven() { long val = TestUtil.nextLong(random(), Integer.MAX_VALUE, Long.MAX_VALUE); - String solrXml = String.format(Locale.ROOT, "%d", val); + String solrXml = + String.format( + Locale.ROOT, + "%d", + val); expectedException.expect(SolrException.class); expectedException.expectMessage("Error parsing 'maxUpdateConnections'"); @@ -218,7 +256,8 @@ public void testFailAtConfigParseTimeWhenIntTypeIsExpectedAndLongTypeIsGiven() { } public void testFailAtConfigParseTimeWhenBoolTypeIsExpectedAndValueIsInvalidString() { - String solrXml = "NOT_A_BOOLEAN"; + String solrXml = + "NOT_A_BOOLEAN"; expectedException.expect(SolrException.class); expectedException.expectMessage("invalid boolean value: NOT_A_BOOLEAN"); @@ -228,25 +267,36 @@ public void testFailAtConfigParseTimeWhenBoolTypeIsExpectedAndValueIsInvalidStri public void testFailAtConfigParseTimeWhenIntTypeIsExpectedAndBoolTypeIsGiven() { // given: boolean randomBoolean = random().nextBoolean(); - String solrXml = String.format(Locale.ROOT, "%s", randomBoolean); + String solrXml = + String.format( + Locale.ROOT, + "%s", + randomBoolean); expectedException.expect(SolrException.class); - expectedException.expectMessage(String.format(Locale.ROOT, "Value of 'unknown-option' can not be parsed as 'int': \"%s\"", randomBoolean)); + expectedException.expectMessage( + String.format( + Locale.ROOT, + "Value of 'unknown-option' can not be parsed as 'int': \"%s\"", + randomBoolean)); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation } public void testFailAtConfigParseTimeWhenUnrecognizedSolrCloudOptionWasFound() { - String solrXml = "host8983true"; + String solrXml = + "host8983true"; expectedException.expect(SolrException.class); - expectedException.expectMessage("Unknown configuration parameter in section of solr.xml: unknown-option"); + expectedException.expectMessage( + "Unknown configuration parameter in section of solr.xml: unknown-option"); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation } public void testFailAtConfigParseTimeWhenUnrecognizedSolrOptionWasFound() { - String solrXml = "truetrue"; + String solrXml = + "truetrue"; expectedException.expect(SolrException.class); expectedException.expectMessage("Unknown configuration value in solr.xml: unknown-bool-option"); @@ -255,7 +305,11 @@ public void testFailAtConfigParseTimeWhenUnrecognizedSolrOptionWasFound() { } public void testFailAtConfigParseTimeWhenUnrecognizedLoggingOptionWasFound() { - String solrXml = String.format(Locale.ROOT, "%s", random().nextBoolean()); + String solrXml = + String.format( + Locale.ROOT, + "%s", + random().nextBoolean()); expectedException.expect(SolrException.class); expectedException.expectMessage("Unknown value in logwatcher config: unknown-option"); @@ -264,14 +318,17 @@ public void testFailAtConfigParseTimeWhenUnrecognizedLoggingOptionWasFound() { } public void testFailAtConfigParseTimeWhenLoggingConfigParamsAreDuplicated() { - String v1 = ""+random().nextInt(); - String v2 = ""+random().nextInt(); - String solrXml = String.format(Locale.ROOT, - "" + - "%s" + - "%s" + - "", - v1, v2); + String v1 = "" + random().nextInt(); + String v2 = "" + random().nextInt(); + String solrXml = + String.format( + Locale.ROOT, + "" + + "%s" + + "%s" + + "", + v1, + v2); expectedException.expect(SolrException.class); expectedException.expectMessage(" section of solr.xml contains duplicated 'class'"); @@ -280,37 +337,47 @@ public void testFailAtConfigParseTimeWhenLoggingConfigParamsAreDuplicated() { } public void testFailAtConfigParseTimeWhenSolrCloudConfigParamsAreDuplicated() { - String v1 = ""+random().nextInt(); - String v2 = ""+random().nextInt(); - String v3 = ""+random().nextInt(); - String solrXml = String.format(Locale.ROOT, - "" + - "%s" + - "%s" + - "foo" + // other ok val in middle - "%s" + - "", - v1, v2, v3); - + String v1 = "" + random().nextInt(); + String v2 = "" + random().nextInt(); + String v3 = "" + random().nextInt(); + String solrXml = + String.format( + Locale.ROOT, + "" + + "%s" + + "%s" + + "foo" + + // other ok val in middle + "%s" + + "", + v1, + v2, + v3); + expectedException.expect(SolrException.class); - expectedException.expectMessage(" section of solr.xml contains duplicated 'zkClientTimeout'"); + expectedException.expectMessage( + " section of solr.xml contains duplicated 'zkClientTimeout'"); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation } @Ignore public void testFailAtConfigParseTimeWhenSolrConfigParamsAreDuplicated() { - String v1 = ""+random().nextInt(); - String v2 = ""+random().nextInt(); - String solrXml = String.format(Locale.ROOT, - "" + - "%s" + - "%s" + - "", - v1, v2); + String v1 = "" + random().nextInt(); + String v2 = "" + random().nextInt(); + String solrXml = + String.format( + Locale.ROOT, + "" + + "%s" + + "%s" + + "", + v1, + v2); expectedException.expect(SolrException.class); - expectedException.expectMessage("Main section of solr.xml contains duplicated 'coreLoadThreads'"); + expectedException.expectMessage( + "Main section of solr.xml contains duplicated 'coreLoadThreads'"); SolrXmlConfig.fromString(solrHome, solrXml); // return not used, only for validation } @@ -333,14 +400,17 @@ public void testCloudConfigRequiresHost() { expectedException.expect(SolrException.class); expectedException.expectMessage("solrcloud section missing required entry 'host'"); - SolrXmlConfig.fromString(solrHome, "8983"); + SolrXmlConfig.fromString( + solrHome, "8983"); } public void testCloudConfigRequiresHostContext() { expectedException.expect(SolrException.class); expectedException.expectMessage("solrcloud section missing required entry 'hostContext'"); - SolrXmlConfig.fromString(solrHome, "host8983"); + SolrXmlConfig.fromString( + solrHome, + "host8983"); } public void testMultiBackupSectionError() { diff --git a/solr/core/src/test/org/apache/solr/core/TestXIncludeConfig.java b/solr/core/src/test/org/apache/solr/core/TestXIncludeConfig.java index 0a807953df7..62dd2356d50 100644 --- a/solr/core/src/test/org/apache/solr/core/TestXIncludeConfig.java +++ b/solr/core/src/test/org/apache/solr/core/TestXIncludeConfig.java @@ -17,17 +17,14 @@ package org.apache.solr.core; import javax.xml.parsers.DocumentBuilderFactory; - +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.schema.IndexSchema; import org.apache.solr.update.processor.RegexReplaceProcessorFactory; import org.apache.solr.update.processor.UpdateRequestProcessorChain; -import org.apache.solr.SolrTestCaseJ4; import org.junit.Assume; import org.junit.BeforeClass; -/** - * Test both XInclude as well as more old school "entity includes" - */ +/** Test both XInclude as well as more old school "entity includes" */ public class TestXIncludeConfig extends SolrTestCaseJ4 { @BeforeClass @@ -39,7 +36,7 @@ public static void beforeClass() throws Exception { public void setUp() throws Exception { javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); try { - //see whether it even makes sense to run this test + // see whether it even makes sense to run this test dbf.setXIncludeAware(true); dbf.setNamespaceAware(true); } catch (UnsupportedOperationException e) { @@ -51,33 +48,29 @@ public void setUp() throws Exception { public void testXInclude() throws Exception { SolrCore core = h.getCore(); - assertNotNull("includedHandler is null", - core.getRequestHandler("/includedHandler")); + assertNotNull("includedHandler is null", core.getRequestHandler("/includedHandler")); - UpdateRequestProcessorChain chain - = core.getUpdateProcessingChain("special-include"); + UpdateRequestProcessorChain chain = core.getUpdateProcessingChain("special-include"); assertNotNull("chain is missing included processor", chain); - assertEquals("chain with inclued processor is wrong size", - 1, chain.getProcessors().size()); - assertEquals("chain has wrong included processor", - RegexReplaceProcessorFactory.class, - chain.getProcessors().get(0).getClass()); + assertEquals("chain with inclued processor is wrong size", 1, chain.getProcessors().size()); + assertEquals( + "chain has wrong included processor", + RegexReplaceProcessorFactory.class, + chain.getProcessors().get(0).getClass()); IndexSchema schema = core.getLatestSchema(); - + // xinclude assertNotNull("ft-included is null", schema.getFieldTypeByName("ft-included")); assertNotNull("field-included is null", schema.getFieldOrNull("field-included")); // entity include - assertNotNull("ft-entity-include1 is null", - schema.getFieldTypeByName("ft-entity-include1")); - assertNotNull("ft-entity-include2 is null", - schema.getFieldTypeByName("ft-entity-include2")); + assertNotNull("ft-entity-include1 is null", schema.getFieldTypeByName("ft-entity-include1")); + assertNotNull("ft-entity-include2 is null", schema.getFieldTypeByName("ft-entity-include2")); // sanity check - assertNull("ft-entity-include3 is not null", // Does Not Exist Anywhere - schema.getFieldTypeByName("ft-entity-include3")); - + assertNull( + "ft-entity-include3 is not null", // Does Not Exist Anywhere + schema.getFieldTypeByName("ft-entity-include3")); } } diff --git a/solr/core/src/test/org/apache/solr/core/backup/BackupFilePathsTest.java b/solr/core/src/test/org/apache/solr/core/backup/BackupFilePathsTest.java index bd48789195c..0b342e3ce76 100644 --- a/solr/core/src/test/org/apache/solr/core/backup/BackupFilePathsTest.java +++ b/solr/core/src/test/org/apache/solr/core/backup/BackupFilePathsTest.java @@ -16,56 +16,72 @@ */ package org.apache.solr.core.backup; -import org.junit.Test; +import static org.junit.Assert.*; import java.util.List; import java.util.Optional; +import org.junit.Test; -import static org.junit.Assert.*; - -/** - * Unit tests for {@link BackupFilePaths} - */ +/** Unit tests for {@link BackupFilePaths} */ public class BackupFilePathsTest { - @Test - public void testGetBackupPropsName() { - final BackupId initialId = BackupId.zero(); - final BackupId subsequentId = initialId.nextBackupId(); + @Test + public void testGetBackupPropsName() { + final BackupId initialId = BackupId.zero(); + final BackupId subsequentId = initialId.nextBackupId(); - assertEquals("backup_0.properties", BackupFilePaths.getBackupPropsName(initialId)); - assertEquals("backup_1.properties", BackupFilePaths.getBackupPropsName(subsequentId)); - } + assertEquals("backup_0.properties", BackupFilePaths.getBackupPropsName(initialId)); + assertEquals("backup_1.properties", BackupFilePaths.getBackupPropsName(subsequentId)); + } - @Test - public void testFindAllBackupIdCanReturnEmpty() { - final List foundBackupIds = BackupFilePaths.findAllBackupIdsFromFileListing(new String[0]); - assertTrue(foundBackupIds.isEmpty()); - } + @Test + public void testFindAllBackupIdCanReturnEmpty() { + final List foundBackupIds = + BackupFilePaths.findAllBackupIdsFromFileListing(new String[0]); + assertTrue(foundBackupIds.isEmpty()); + } - @Test - public void testFindAllBackupPropertiesFiles() { - final String[] backupFiles = new String[] {"aaa", "baa.properties", "backup.properties", "backup_1.properties", - "backup_2.properties", "backup_neqewq.properties", "backup999.properties"}; - final List foundBackupIds = BackupFilePaths.findAllBackupIdsFromFileListing(backupFiles); + @Test + public void testFindAllBackupPropertiesFiles() { + final String[] backupFiles = + new String[] { + "aaa", + "baa.properties", + "backup.properties", + "backup_1.properties", + "backup_2.properties", + "backup_neqewq.properties", + "backup999.properties" + }; + final List foundBackupIds = + BackupFilePaths.findAllBackupIdsFromFileListing(backupFiles); - assertEquals(2, foundBackupIds.size()); - assertEquals(new BackupId(1), foundBackupIds.get(0)); - assertEquals(new BackupId(2), foundBackupIds.get(1)); - } + assertEquals(2, foundBackupIds.size()); + assertEquals(new BackupId(1), foundBackupIds.get(0)); + assertEquals(new BackupId(2), foundBackupIds.get(1)); + } - @Test - public void testFindMostRecentBackupIdCanReturnEmpty() { - Optional op = BackupFilePaths.findMostRecentBackupIdFromFileListing(new String[0]); - assertFalse(op.isPresent()); - } + @Test + public void testFindMostRecentBackupIdCanReturnEmpty() { + Optional op = BackupFilePaths.findMostRecentBackupIdFromFileListing(new String[0]); + assertFalse(op.isPresent()); + } - @Test - public void testFindMostRecentBackupPropertiesFile() { - final String[] backupFiles = new String[] {"aaa", "baa.properties", "backup.properties", "backup_1.properties", - "backup_2.properties", "backup_neqewq.properties", "backup999.properties"}; - final Optional filenameOption = BackupFilePaths.findMostRecentBackupIdFromFileListing(backupFiles); - assertTrue(filenameOption.isPresent()); - assertEquals(new BackupId(2), filenameOption.get()); - } + @Test + public void testFindMostRecentBackupPropertiesFile() { + final String[] backupFiles = + new String[] { + "aaa", + "baa.properties", + "backup.properties", + "backup_1.properties", + "backup_2.properties", + "backup_neqewq.properties", + "backup999.properties" + }; + final Optional filenameOption = + BackupFilePaths.findMostRecentBackupIdFromFileListing(backupFiles); + assertTrue(filenameOption.isPresent()); + assertEquals(new BackupId(2), filenameOption.get()); + } } diff --git a/solr/core/src/test/org/apache/solr/core/backup/BackupIdTest.java b/solr/core/src/test/org/apache/solr/core/backup/BackupIdTest.java index d4d76fa1e15..3ddde07d079 100644 --- a/solr/core/src/test/org/apache/solr/core/backup/BackupIdTest.java +++ b/solr/core/src/test/org/apache/solr/core/backup/BackupIdTest.java @@ -17,35 +17,33 @@ package org.apache.solr.core.backup; +import static org.apache.solr.core.backup.BackupId.TRADITIONAL_BACKUP; + import org.apache.solr.SolrTestCase; import org.junit.Test; -import static org.apache.solr.core.backup.BackupId.TRADITIONAL_BACKUP; - -/** - * Unit tests for {@link BackupId} - */ +/** Unit tests for {@link BackupId} */ public class BackupIdTest extends SolrTestCase { - @Test - public void testZero() { - final BackupId id = BackupId.zero(); - assertEquals(0, id.getId()); - } - - @Test - public void testTraditionalBackupId() { - final BackupId id = BackupId.traditionalBackup(); - assertEquals(TRADITIONAL_BACKUP, id.getId()); - } - - @Test - public void testBackupIdIncrementing() { - final BackupId initialId = new BackupId(3); - final BackupId nextId = initialId.nextBackupId(); - - assertEquals(3, initialId.getId()); - assertEquals(4, nextId.getId()); - assertTrue(initialId.compareTo(nextId) < 0); - } + @Test + public void testZero() { + final BackupId id = BackupId.zero(); + assertEquals(0, id.getId()); + } + + @Test + public void testTraditionalBackupId() { + final BackupId id = BackupId.traditionalBackup(); + assertEquals(TRADITIONAL_BACKUP, id.getId()); + } + + @Test + public void testBackupIdIncrementing() { + final BackupId initialId = new BackupId(3); + final BackupId nextId = initialId.nextBackupId(); + + assertEquals(3, initialId.getId()); + assertEquals(4, nextId.getId()); + assertTrue(initialId.compareTo(nextId) < 0); + } } diff --git a/solr/core/src/test/org/apache/solr/core/backup/ShardBackupIdTest.java b/solr/core/src/test/org/apache/solr/core/backup/ShardBackupIdTest.java index 5a8558b7cb2..19415214fef 100644 --- a/solr/core/src/test/org/apache/solr/core/backup/ShardBackupIdTest.java +++ b/solr/core/src/test/org/apache/solr/core/backup/ShardBackupIdTest.java @@ -21,32 +21,32 @@ public class ShardBackupIdTest extends SolrTestCaseJ4 { - @Test - public void testCanParseIDFromStringWithUnsplitShardName() { - final String idString = "md_shard1_0"; + @Test + public void testCanParseIDFromStringWithUnsplitShardName() { + final String idString = "md_shard1_0"; - final ShardBackupId parsedId = ShardBackupId.from(idString); + final ShardBackupId parsedId = ShardBackupId.from(idString); - assertEquals("shard1", parsedId.getShardName()); - assertEquals(new BackupId(0), parsedId.getContainingBackupId()); - } + assertEquals("shard1", parsedId.getShardName()); + assertEquals(new BackupId(0), parsedId.getContainingBackupId()); + } - @Test - public void testCanParseIdFromStringWithSplitShardName() { - final String idString = "md_shard2_0_5"; + @Test + public void testCanParseIdFromStringWithSplitShardName() { + final String idString = "md_shard2_0_5"; - final ShardBackupId parsedId = ShardBackupId.from(idString); + final ShardBackupId parsedId = ShardBackupId.from(idString); - assertEquals("shard2_0", parsedId.getShardName()); - assertEquals(new BackupId(5), parsedId.getContainingBackupId()); - } + assertEquals("shard2_0", parsedId.getShardName()); + assertEquals(new BackupId(5), parsedId.getContainingBackupId()); + } - @Test - public void testCanParseIdFromStringWithManySplitShardName() { - final String idString = "md_shard2_0_1_3"; - final ShardBackupId parsedId = ShardBackupId.from(idString); + @Test + public void testCanParseIdFromStringWithManySplitShardName() { + final String idString = "md_shard2_0_1_3"; + final ShardBackupId parsedId = ShardBackupId.from(idString); - assertEquals("shard2_0_1", parsedId.getShardName()); - assertEquals(new BackupId(3), parsedId.getContainingBackupId()); - } + assertEquals("shard2_0_1", parsedId.getShardName()); + assertEquals(new BackupId(3), parsedId.getContainingBackupId()); + } } diff --git a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java index 9419228a9f8..e60a4647854 100644 --- a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java +++ b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCloudSnapshots.java @@ -24,7 +24,6 @@ import java.util.Optional; import java.util.function.Function; import java.util.stream.Collectors; - import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; @@ -54,10 +53,11 @@ /** * Tests snapshot functionality in a SolrCloud cluster. * - * This test uses the (now deprecated) traditional backup method/format. For more thorough tests using the new format, - * see {@link org.apache.solr.handler.TestIncrementalCoreBackup} + *

This test uses the (now deprecated) traditional backup method/format. For more thorough tests + * using the new format, see {@link org.apache.solr.handler.TestIncrementalCoreBackup} */ -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs({"SimpleText"}) @SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test @Slow public class TestSolrCloudSnapshots extends SolrCloudTestCase { @@ -71,8 +71,9 @@ public class TestSolrCloudSnapshots extends SolrCloudTestCase { public static void setupClass() throws Exception { useFactory("solr.StandardDirectoryFactory"); System.setProperty("solr.allowPaths", "*"); - configureCluster(NUM_NODES)// nodes - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + configureCluster(NUM_NODES) // nodes + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); docsSeed = random().nextLong(); @@ -89,7 +90,8 @@ public static void teardownClass() throws Exception { public void testSnapshots() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String collectionName = "SolrCloudSnapshots"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", NUM_SHARDS, NUM_REPLICAS); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, "conf1", NUM_SHARDS, NUM_REPLICAS); create.process(solrClient); cluster.waitForActiveCollection(collectionName, NUM_SHARDS, NUM_SHARDS * NUM_REPLICAS); @@ -99,7 +101,9 @@ public void testSnapshots() throws Exception { // Set a collection property final boolean collectionPropertySet = usually(); if (collectionPropertySet) { - CollectionAdminRequest.CollectionProp setProperty = CollectionAdminRequest.setCollectionProperty(collectionName, "test.property", "test.value"); + CollectionAdminRequest.CollectionProp setProperty = + CollectionAdminRequest.setCollectionProperty( + collectionName, "test.property", "test.value"); setProperty.process(solrClient); } @@ -118,7 +122,8 @@ public void testSnapshots() throws Exception { Thread.sleep(5000); // Figure out if at-least one replica is "down". - DocCollection collState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection collState = + solrClient.getZkStateReader().getClusterState().getCollection(collectionName); for (Slice s : collState.getSlices()) { for (Replica replica : s.getReplicas()) { if (replica.getState() == State.DOWN) { @@ -128,23 +133,28 @@ public void testSnapshots() throws Exception { } } - int expectedCoresWithSnapshot = stoppedCoreName.isPresent() ? (NUM_SHARDS * NUM_REPLICAS) - 1 : (NUM_SHARDS * NUM_REPLICAS); + int expectedCoresWithSnapshot = + stoppedCoreName.isPresent() ? (NUM_SHARDS * NUM_REPLICAS) - 1 : (NUM_SHARDS * NUM_REPLICAS); - CollectionAdminRequest.CreateSnapshot createSnap = new CollectionAdminRequest.CreateSnapshot(collectionName, commitName); + CollectionAdminRequest.CreateSnapshot createSnap = + new CollectionAdminRequest.CreateSnapshot(collectionName, commitName); createSnap.process(solrClient); - Collection collectionSnaps = listCollectionSnapshots(solrClient, collectionName); + Collection collectionSnaps = + listCollectionSnapshots(solrClient, collectionName); assertEquals(1, collectionSnaps.size()); CollectionSnapshotMetaData meta = collectionSnaps.iterator().next(); assertEquals(commitName, meta.getName()); assertEquals(CollectionSnapshotMetaData.SnapshotStatus.Successful, meta.getStatus()); assertEquals(expectedCoresWithSnapshot, meta.getReplicaSnapshots().size()); - Map snapshotByCoreName = meta.getReplicaSnapshots().stream() - .collect(Collectors.toMap(CoreSnapshotMetaData::getCoreName, Function.identity())); + Map snapshotByCoreName = + meta.getReplicaSnapshots().stream() + .collect(Collectors.toMap(CoreSnapshotMetaData::getCoreName, Function.identity())); - DocCollection collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection collectionState = + solrClient.getZkStateReader().getClusterState().getCollection(collectionName); assertEquals(2, collectionState.getActiveSlices().size()); - for ( Slice shard : collectionState.getActiveSlices() ) { + for (Slice shard : collectionState.getActiveSlices()) { assertEquals(2, shard.getReplicas().size()); for (Replica replica : shard.getReplicas()) { if (stoppedCoreName.isPresent() && stoppedCoreName.get().equals(replica.getCoreName())) { @@ -159,7 +169,8 @@ public void testSnapshots() throws Exception { try (SolrClient adminClient = getHttpSolrClient(replicaBaseUrl)) { Collection snapshots = listCoreSnapshots(adminClient, coreName); - Optional metaData = snapshots.stream().filter(x -> commitName.equals(x.getName())).findFirst(); + Optional metaData = + snapshots.stream().filter(x -> commitName.equals(x.getName())).findFirst(); assertTrue("Snapshot not created for core " + coreName, metaData.isPresent()); assertEquals(coreSnapshot.getIndexDirPath(), metaData.get().getIndexDirPath()); assertEquals(coreSnapshot.getGenerationNumber(), metaData.get().getGenerationNumber()); @@ -178,46 +189,59 @@ public void testSnapshots() throws Exception { String backupName = "mytestbackup"; String restoreCollectionName = collectionName + "_restored"; - //Create a backup using the earlier created snapshot. + // Create a backup using the earlier created snapshot. { - CollectionAdminRequest.Backup backup = CollectionAdminRequest.backupCollection(collectionName, backupName) - .setLocation(backupLocation).setCommitName(commitName).setIncremental(false); + CollectionAdminRequest.Backup backup = + CollectionAdminRequest.backupCollection(collectionName, backupName) + .setLocation(backupLocation) + .setCommitName(commitName) + .setIncremental(false); if (random().nextBoolean()) { assertEquals(0, backup.process(solrClient).getStatus()); } else { - assertEquals(RequestStatusState.COMPLETED, backup.processAndWait(solrClient, 30));//async + assertEquals(RequestStatusState.COMPLETED, backup.processAndWait(solrClient, 30)); // async } } // Restore backup. { - CollectionAdminRequest.Restore restore = CollectionAdminRequest.restoreCollection(restoreCollectionName, backupName) - .setLocation(backupLocation); -// if (replicaFailures) { -// // In this case one of the Solr servers would be down. Hence we need to increase -// // max_shards_per_node property for restore command to succeed. -// restore.setMaxShardsPerNode(2); -// } + CollectionAdminRequest.Restore restore = + CollectionAdminRequest.restoreCollection(restoreCollectionName, backupName) + .setLocation(backupLocation); + // if (replicaFailures) { + // // In this case one of the Solr servers would be down. Hence we need to increase + // // max_shards_per_node property for restore command to succeed. + // restore.setMaxShardsPerNode(2); + // } if (random().nextBoolean()) { assertEquals(0, restore.process(solrClient).getStatus()); } else { - assertEquals(RequestStatusState.COMPLETED, restore.processAndWait(solrClient, 30));//async + assertEquals(RequestStatusState.COMPLETED, restore.processAndWait(solrClient, 30)); // async } AbstractDistribZkTestBase.waitForRecoveriesToFinish( - restoreCollectionName, cluster.getSolrClient().getZkStateReader(), log.isDebugEnabled(), true, 30); + restoreCollectionName, + cluster.getSolrClient().getZkStateReader(), + log.isDebugEnabled(), + true, + 30); BackupRestoreUtils.verifyDocs(nDocs, solrClient, restoreCollectionName); } // Check collection property - Map collectionProperties = solrClient.getZkStateReader().getCollectionProperties(restoreCollectionName); + Map collectionProperties = + solrClient.getZkStateReader().getCollectionProperties(restoreCollectionName); if (collectionPropertySet) { - assertEquals("Snapshot restore hasn't restored collection properties", "test.value", collectionProperties.get("test.property")); + assertEquals( + "Snapshot restore hasn't restored collection properties", + "test.value", + collectionProperties.get("test.property")); } else { - assertNull("Collection property shouldn't be present", collectionProperties.get("test.property")); + assertNull( + "Collection property shouldn't be present", collectionProperties.get("test.property")); } - // Verify if the snapshot deletion works correctly when one or more replicas containing the snapshot are - // deleted + // Verify if the snapshot deletion works correctly when one or more replicas containing the + // snapshot are deleted boolean replicaDeletion = rarely(); if (replicaDeletion) { CoreSnapshotMetaData replicaToDelete = null; @@ -230,13 +254,15 @@ public void testSnapshots() throws Exception { } if (replicaToDelete != null) { - collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); + collectionState = + solrClient.getZkStateReader().getClusterState().getCollection(collectionName); for (Slice s : collectionState.getSlices()) { for (Replica r : s.getReplicas()) { if (r.getCoreName().equals(replicaToDelete.getCoreName())) { log.info("Deleting replica {}", r); - CollectionAdminRequest.DeleteReplica delReplica = CollectionAdminRequest.deleteReplica(collectionName, - replicaToDelete.getShardId(), r.getName()); + CollectionAdminRequest.DeleteReplica delReplica = + CollectionAdminRequest.deleteReplica( + collectionName, replicaToDelete.getShardId(), r.getName()); delReplica.process(solrClient); // The replica deletion will cleanup the snapshot meta-data. snapshotByCoreName.remove(r.getCoreName()); @@ -248,14 +274,15 @@ public void testSnapshots() throws Exception { } // Delete snapshot - CollectionAdminRequest.DeleteSnapshot deleteSnap = new CollectionAdminRequest.DeleteSnapshot(collectionName, commitName); + CollectionAdminRequest.DeleteSnapshot deleteSnap = + new CollectionAdminRequest.DeleteSnapshot(collectionName, commitName); deleteSnap.process(solrClient); // Wait for a while so that the cluster state updates are propagated to the client side. Thread.sleep(2000); collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); - for ( Slice shard : collectionState.getActiveSlices() ) { + for (Slice shard : collectionState.getActiveSlices()) { for (Replica replica : shard.getReplicas()) { if (stoppedCoreName.isPresent() && stoppedCoreName.get().equals(replica.getCoreName())) { continue; // We know that the snapshot was not created for this replica. @@ -266,7 +293,8 @@ public void testSnapshots() throws Exception { try (SolrClient adminClient = getHttpSolrClient(replicaBaseUrl)) { Collection snapshots = listCoreSnapshots(adminClient, coreName); - Optional metaData = snapshots.stream().filter(x -> commitName.equals(x.getName())).findFirst(); + Optional metaData = + snapshots.stream().filter(x -> commitName.equals(x.getName())).findFirst(); assertFalse("Snapshot not deleted for core " + coreName, metaData.isPresent()); // Remove the entry for core if the snapshot is deleted successfully. snapshotByCoreName.remove(coreName); @@ -282,49 +310,64 @@ public void testSnapshots() throws Exception { { String commitName_2 = commitName + "_2"; - CollectionAdminRequest.CreateSnapshot createSnap_2 = new CollectionAdminRequest.CreateSnapshot(collectionName, commitName_2); + CollectionAdminRequest.CreateSnapshot createSnap_2 = + new CollectionAdminRequest.CreateSnapshot(collectionName, commitName_2); assertEquals(0, createSnap_2.process(solrClient).getStatus()); - Collection collectionSnaps_2 = listCollectionSnapshots(solrClient, collectionName); + Collection collectionSnaps_2 = + listCollectionSnapshots(solrClient, collectionName); assertEquals(1, collectionSnaps.size()); assertEquals(commitName_2, collectionSnaps_2.iterator().next().getName()); // Delete collection - CollectionAdminRequest.Delete deleteCol = CollectionAdminRequest.deleteCollection(collectionName); + CollectionAdminRequest.Delete deleteCol = + CollectionAdminRequest.deleteCollection(collectionName); assertEquals(0, deleteCol.process(solrClient).getStatus()); - assertTrue(SolrSnapshotManager.listSnapshots(solrClient.getZkStateReader().getZkClient(), collectionName).isEmpty()); + assertTrue( + SolrSnapshotManager.listSnapshots( + solrClient.getZkStateReader().getZkClient(), collectionName) + .isEmpty()); } - } @SuppressWarnings({"unchecked"}) - private Collection listCollectionSnapshots(SolrClient adminClient, String collectionName) throws Exception { - CollectionAdminRequest.ListSnapshots listSnapshots = new CollectionAdminRequest.ListSnapshots(collectionName); + private Collection listCollectionSnapshots( + SolrClient adminClient, String collectionName) throws Exception { + CollectionAdminRequest.ListSnapshots listSnapshots = + new CollectionAdminRequest.ListSnapshots(collectionName); CollectionAdminResponse resp = listSnapshots.process(adminClient); - assertTrue( resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList ); - NamedList apiResult = (NamedList) resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO); + assertTrue(resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList); + NamedList apiResult = + (NamedList) resp.getResponse().get(SolrSnapshotManager.SNAPSHOTS_INFO); Collection result = new ArrayList<>(); for (int i = 0; i < apiResult.size(); i++) { - result.add(new CollectionSnapshotMetaData((NamedList)apiResult.getVal(i))); + result.add(new CollectionSnapshotMetaData((NamedList) apiResult.getVal(i))); } return result; } - private Collection listCoreSnapshots(SolrClient adminClient, String coreName) throws Exception { + private Collection listCoreSnapshots(SolrClient adminClient, String coreName) + throws Exception { ListSnapshots req = new ListSnapshots(); req.setCoreName(coreName); NamedList resp = adminClient.request(req); - assertTrue( resp.get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList ); + assertTrue(resp.get(SolrSnapshotManager.SNAPSHOTS_INFO) instanceof NamedList); NamedList apiResult = (NamedList) resp.get(SolrSnapshotManager.SNAPSHOTS_INFO); List result = new ArrayList<>(apiResult.size()); - for(int i = 0 ; i < apiResult.size(); i++) { + for (int i = 0; i < apiResult.size(); i++) { String commitName = apiResult.getName(i); - String indexDirPath = (String)((NamedList)apiResult.get(commitName)).get(SolrSnapshotManager.INDEX_DIR_PATH); - long genNumber = Long.parseLong((String)((NamedList)apiResult.get(commitName)).get(SolrSnapshotManager.GENERATION_NUM)); + String indexDirPath = + (String) + ((NamedList) apiResult.get(commitName)).get(SolrSnapshotManager.INDEX_DIR_PATH); + long genNumber = + Long.parseLong( + (String) + ((NamedList) apiResult.get(commitName)) + .get(SolrSnapshotManager.GENERATION_NUM)); result.add(new SnapshotMetaData(commitName, indexDirPath, genNumber)); } return result; diff --git a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java index 9f332951ac5..78f4abe19e0 100644 --- a/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java +++ b/solr/core/src/test/org/apache/solr/core/snapshots/TestSolrCoreSnapshots.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; - import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexNotFoundException; @@ -60,10 +59,12 @@ /** * Tests for index backing up and restoring index snapshots. * - * These tests use the deprecated "full-snapshot" based backup method. For tests that cover similar snapshot - * functionality incrementally, see {@link org.apache.solr.handler.TestIncrementalCoreBackup} + *

These tests use the deprecated "full-snapshot" based backup method. For tests that cover + * similar snapshot functionality incrementally, see {@link + * org.apache.solr.handler.TestIncrementalCoreBackup} */ -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs({"SimpleText"}) @SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test @Slow public class TestSolrCoreSnapshots extends SolrCloudTestCase { @@ -74,8 +75,9 @@ public class TestSolrCoreSnapshots extends SolrCloudTestCase { public static void setupClass() throws Exception { System.setProperty("solr.allowPaths", "*"); useFactory("solr.StandardDirectoryFactory"); - configureCluster(1)// nodes - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + configureCluster(1) // nodes + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); docsSeed = random().nextLong(); } @@ -91,13 +93,15 @@ public static void teardownClass() throws Exception { public void testBackupRestore() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String collectionName = "SolrCoreSnapshots"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1); create.process(solrClient); String location = createTempDir().toFile().getAbsolutePath(); int nDocs = BackupRestoreUtils.indexDocs(cluster.getSolrClient(), collectionName, docsSeed); - DocCollection collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection collectionState = + solrClient.getZkStateReader().getClusterState().getCollection(collectionName); assertEquals(1, collectionState.getActiveSlices().size()); Slice shard = collectionState.getActiveSlices().iterator().next(); assertEquals(1, shard.getReplicas().size()); @@ -109,8 +113,8 @@ public void testBackupRestore() throws Exception { String commitName = TestUtil.randomSimpleString(random(), 1, 5); String duplicateName = commitName.concat("_duplicate"); - try ( - SolrClient adminClient = getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); + try (SolrClient adminClient = + getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); SolrClient leaderClient = getHttpSolrClient(replica.getCoreUrl())) { SnapshotMetaData metaData = createSnapshot(adminClient, coreName, commitName); @@ -118,16 +122,16 @@ public void testBackupRestore() throws Exception { // reference counting implementation during snapshot deletion. SnapshotMetaData duplicateCommit = createSnapshot(adminClient, coreName, duplicateName); - assertEquals (metaData.getIndexDirPath(), duplicateCommit.getIndexDirPath()); - assertEquals (metaData.getGenerationNumber(), duplicateCommit.getGenerationNumber()); + assertEquals(metaData.getIndexDirPath(), duplicateCommit.getIndexDirPath()); + assertEquals(metaData.getGenerationNumber(), duplicateCommit.getGenerationNumber()); // Delete all documents leaderClient.deleteByQuery("*:*"); leaderClient.commit(); BackupRestoreUtils.verifyDocs(0, cluster.getSolrClient(), collectionName); - // Verify that the index directory contains at least 2 index commits - one referred by the snapshots - // and the other containing document deletions. + // Verify that the index directory contains at least 2 index commits - one referred by the + // snapshots and the other containing document deletions. { List commits = listCommits(metaData.getIndexDirPath()); assertTrue(commits.size() >= 2); @@ -135,26 +139,28 @@ public void testBackupRestore() throws Exception { // Backup the earlier created snapshot. { - Map params = new HashMap<>(); + Map params = new HashMap<>(); params.put("name", backupName); params.put("commitName", commitName); params.put("location", location); params.put("incremental", "false"); - BackupRestoreUtils.runCoreAdminCommand(replicaBaseUrl, coreName, CoreAdminAction.BACKUPCORE.toString(), params); + BackupRestoreUtils.runCoreAdminCommand( + replicaBaseUrl, coreName, CoreAdminAction.BACKUPCORE.toString(), params); } // Restore the backup { - Map params = new HashMap<>(); + Map params = new HashMap<>(); params.put("name", "snapshot." + backupName); params.put("location", location); - BackupRestoreUtils.runCoreAdminCommand(replicaBaseUrl, coreName, CoreAdminAction.RESTORECORE.toString(), params); + BackupRestoreUtils.runCoreAdminCommand( + replicaBaseUrl, coreName, CoreAdminAction.RESTORECORE.toString(), params); BackupRestoreUtils.verifyDocs(nDocs, cluster.getSolrClient(), collectionName); } - // Verify that the old index directory (before restore) contains only those index commits referred by snapshots. - // The IndexWriter (used to cleanup index files) creates an additional commit during closing. Hence we expect 2 commits (instead - // of 1). + // Verify that the old index directory (before restore) contains only those index commits + // referred by snapshots. The IndexWriter (used to cleanup index files) creates an additional + // commit during closing. Hence we expect 2 commits (instead of 1). { List commits = listCommits(metaData.getIndexDirPath()); assertEquals(2, commits.size()); @@ -173,8 +179,8 @@ public void testBackupRestore() throws Exception { // Verify that corresponding index files have been deleted. Ideally this directory should // be removed immediately. But the current DirectoryFactory impl waits until the // closing the core (or the directoryFactory) for actual removal. Since the IndexWriter - // (used to cleanup index files) creates an additional commit during closing, we expect a single - // commit (instead of 0). + // (used to cleanup index files) creates an additional commit during closing, we expect a + // single commit (instead of 0). assertEquals(1, listCommits(duplicateCommit.getIndexDirPath()).size()); } } @@ -183,12 +189,14 @@ public void testBackupRestore() throws Exception { public void testIndexOptimization() throws Exception { CloudSolrClient solrClient = cluster.getSolrClient(); String collectionName = "SolrCoreSnapshots_IndexOptimization"; - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection(collectionName, "conf1", 1, 1); create.process(solrClient); int nDocs = BackupRestoreUtils.indexDocs(cluster.getSolrClient(), collectionName, docsSeed); - DocCollection collectionState = solrClient.getZkStateReader().getClusterState().getCollection(collectionName); + DocCollection collectionState = + solrClient.getZkStateReader().getClusterState().getCollection(collectionName); assertEquals(1, collectionState.getActiveSlices().size()); Slice shard = collectionState.getActiveSlices().iterator().next(); assertEquals(1, shard.getReplicas().size()); @@ -197,24 +205,24 @@ public void testIndexOptimization() throws Exception { String coreName = replica.getStr(ZkStateReader.CORE_NAME_PROP); String commitName = TestUtil.randomSimpleString(random(), 1, 5); - try ( - SolrClient adminClient = getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); + try (SolrClient adminClient = + getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); SolrClient leaderClient = getHttpSolrClient(replica.getCoreUrl())) { SnapshotMetaData metaData = createSnapshot(adminClient, coreName, commitName); int numTests = nDocs > 0 ? TestUtil.nextInt(random(), 1, 5) : 1; - for (int attempt=0; attempt 0) { - //Delete a few docs + // Delete a few docs int numDeletes = TestUtil.nextInt(random(), 1, nDocs); - for(int i=0; i apiResult = (NamedList) resp.get("snapshots"); List result = new ArrayList<>(apiResult.size()); - for(int i = 0 ; i < apiResult.size(); i++) { + for (int i = 0; i < apiResult.size(); i++) { String commitName = apiResult.getName(i); - String indexDirPath = (String)((NamedList)apiResult.get(commitName)).get("indexDirPath"); - long genNumber = Long.parseLong((String)((NamedList)apiResult.get(commitName)).get("generation")); + String indexDirPath = (String) ((NamedList) apiResult.get(commitName)).get("indexDirPath"); + long genNumber = + Long.parseLong((String) ((NamedList) apiResult.get(commitName)).get("generation")); result.add(new SnapshotMetaData(commitName, indexDirPath, genNumber)); } return result; @@ -313,8 +342,8 @@ private List listCommits(String directory) throws Exception { try { return DirectoryReader.listCommits(dir); } catch (IndexNotFoundException ex) { - // This can happen when the delete snapshot functionality cleans up the index files (when the directory - // storing these files is not the *current* index directory). + // This can happen when the delete snapshot functionality cleans up the index files (when the + // directory storing these files is not the *current* index directory). return Collections.emptyList(); } } diff --git a/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java index e25be647f98..ea9926402eb 100644 --- a/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java +++ b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java @@ -17,6 +17,21 @@ package org.apache.solr.filestore; +import static org.apache.solr.common.util.Utils.JAVABINCONSUMER; +import static org.apache.solr.core.TestSolrConfigHandler.getFileContent; +import static org.hamcrest.CoreMatchers.containsString; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.Callable; +import java.util.function.Predicate; import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.client.methods.HttpDelete; import org.apache.solr.client.solrj.SolrClient; @@ -42,23 +57,8 @@ import org.junit.Before; import org.junit.Test; -import java.io.IOException; -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.nio.file.Paths; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.function.Predicate; - -import static org.apache.solr.common.util.Utils.JAVABINCONSUMER; -import static org.apache.solr.core.TestSolrConfigHandler.getFileContent; -import static org.hamcrest.CoreMatchers.containsString; - -@LogLevel("org.apache.solr.filestore.PackageStoreAPI=DEBUG;org.apache.solr.filestore.DistribPackageStore=DEBUG") +@LogLevel( + "org.apache.solr.filestore.PackageStoreAPI=DEBUG;org.apache.solr.filestore.DistribPackageStore=DEBUG") public class TestDistribPackageStore extends SolrCloudTestCase { @Before @@ -70,136 +70,152 @@ public void setup() { public void teardown() { System.clearProperty("enable.packages"); } - + @Test public void testPackageStoreManagement() throws Exception { MiniSolrCloudCluster cluster = configureCluster(4) - .withJettyConfig(jetty -> jetty.enableV2(true)) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + .withJettyConfig(jetty -> jetty.enableV2(true)) + .addConfig("conf", configset("cloud-minimal")) + .configure(); try { byte[] derFile = readFile("cryptokeys/pub_key512.der"); - uploadKey(derFile, PackageStoreAPI.KEYS_DIR+"/pub_key512.der", cluster); + uploadKey(derFile, PackageStoreAPI.KEYS_DIR + "/pub_key512.der", cluster); try { - postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"), + postFile( + cluster.getSolrClient(), + getFileContent("runtimecode/runtimelibs.jar.bin"), "/package/mypkg/v1.0/runtimelibs.jar", - "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA==" - ); + "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=="); fail("should have failed because of wrong signature "); } catch (RemoteExecutionException e) { assertThat(e.getMessage(), containsString("Signature does not match")); } - postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"), + postFile( + cluster.getSolrClient(), + getFileContent("runtimecode/runtimelibs.jar.bin"), "/package/mypkg/v1.0/runtimelibs.jar", - "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==" - ); + "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); - assertResponseValues(10, + assertResponseValues( + 10, cluster.getSolrClient(), new V2Request.Builder("/node/files/package/mypkg/v1.0") .withMethod(SolrRequest.METHOD.GET) .build(), Map.of( ":files:/package/mypkg/v1.0[0]:name", "runtimelibs.jar", - ":files:/package/mypkg/v1.0[0]:sha512", "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420", - ":files:/package/mypkg/v1.0[0]:sig[0]", "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==" - ) - ); + ":files:/package/mypkg/v1.0[0]:sha512", + "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420", + ":files:/package/mypkg/v1.0[0]:sig[0]", + "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==")); - assertResponseValues(10, + assertResponseValues( + 10, cluster.getSolrClient(), new V2Request.Builder("/node/files/package/mypkg") .withMethod(SolrRequest.METHOD.GET) .build(), Map.of( ":files:/package/mypkg[0]:name", "v1.0", - ":files:/package/mypkg[0]:dir", "true" - ) - ); + ":files:/package/mypkg[0]:dir", "true")); - Map expected = Map.of( - ":files:/package/mypkg/v1.0/runtimelibs.jar:name", "runtimelibs.jar", - ":files:/package/mypkg/v1.0/runtimelibs.jar:sha512", "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420" - ); - checkAllNodesForFile(cluster,"/package/mypkg/v1.0/runtimelibs.jar", expected, true); - postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v2.jar.bin"), + Map expected = + Map.of( + ":files:/package/mypkg/v1.0/runtimelibs.jar:name", "runtimelibs.jar", + ":files:/package/mypkg/v1.0/runtimelibs.jar:sha512", + "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"); + checkAllNodesForFile(cluster, "/package/mypkg/v1.0/runtimelibs.jar", expected, true); + postFile( + cluster.getSolrClient(), + getFileContent("runtimecode/runtimelibs_v2.jar.bin"), "/package/mypkg/v1.0/runtimelibs_v2.jar", - null - ); - expected = Map.of( - ":files:/package/mypkg/v1.0/runtimelibs_v2.jar:name", "runtimelibs_v2.jar", - ":files:/package/mypkg/v1.0/runtimelibs_v2.jar:sha512", - "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417" - ); - checkAllNodesForFile(cluster,"/package/mypkg/v1.0/runtimelibs_v2.jar", expected, false); - expected = Map.of( - ":files:/package/mypkg/v1.0", (Predicate) o -> { - List l = (List) o; - assertEquals(2, l.size()); - Set expectedKeys = Set.of("runtimelibs_v2.jar", "runtimelibs.jar"); - for (Object file : l) { - if(! expectedKeys.contains(Utils.getObjectByPath(file, true, "name"))) return false; - } - return true; - } - ); + null); + expected = + Map.of( + ":files:/package/mypkg/v1.0/runtimelibs_v2.jar:name", + "runtimelibs_v2.jar", + ":files:/package/mypkg/v1.0/runtimelibs_v2.jar:sha512", + "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417"); + checkAllNodesForFile(cluster, "/package/mypkg/v1.0/runtimelibs_v2.jar", expected, false); + expected = + Map.of( + ":files:/package/mypkg/v1.0", + (Predicate) + o -> { + List l = (List) o; + assertEquals(2, l.size()); + Set expectedKeys = Set.of("runtimelibs_v2.jar", "runtimelibs.jar"); + for (Object file : l) { + if (!expectedKeys.contains(Utils.getObjectByPath(file, true, "name"))) + return false; + } + return true; + }); for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { String baseUrl = jettySolrRunner.getBaseUrl().toString().replace("/solr", "/api"); String url = baseUrl + "/node/files/package/mypkg/v1.0?wt=javabin"; assertResponseValues(10, new Fetcher(url, jettySolrRunner), expected); } // Delete Jars - DistribPackageStore.deleteZKFileEntry(cluster.getZkClient(), "/package/mypkg/v1.0/runtimelibs.jar"); + DistribPackageStore.deleteZKFileEntry( + cluster.getZkClient(), "/package/mypkg/v1.0/runtimelibs.jar"); JettySolrRunner j = cluster.getRandomJetty(random()); String path = j.getBaseURLV2() + "/cluster/files" + "/package/mypkg/v1.0/runtimelibs.jar"; HttpDelete del = new HttpDelete(path); - try(HttpSolrClient cl = (HttpSolrClient) j.newClient()) { + try (HttpSolrClient cl = (HttpSolrClient) j.newClient()) { Utils.executeHttpMethod(cl.getHttpClient(), path, Utils.JSONCONSUMER, del); } expected = Collections.singletonMap(":files:/package/mypkg/v1.0/runtimelibs.jar", null); - checkAllNodesForFile(cluster,"/package/mypkg/v1.0/runtimelibs.jar", expected, false); + checkAllNodesForFile(cluster, "/package/mypkg/v1.0/runtimelibs.jar", expected, false); } finally { cluster.shutdown(); } } - public static void checkAllNodesForFile(MiniSolrCloudCluster cluster, String path, Map expected , boolean verifyContent) throws Exception { + public static void checkAllNodesForFile( + MiniSolrCloudCluster cluster, + String path, + Map expected, + boolean verifyContent) + throws Exception { for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { String baseUrl = jettySolrRunner.getBaseUrl().toString().replace("/solr", "/api"); String url = baseUrl + "/node/files" + path + "?wt=javabin&meta=true"; assertResponseValues(10, new Fetcher(url, jettySolrRunner), expected); - if(verifyContent) { + if (verifyContent) { try (HttpSolrClient solrClient = (HttpSolrClient) jettySolrRunner.newClient()) { - ByteBuffer buf = Utils.executeGET(solrClient.getHttpClient(), baseUrl + "/node/files" + path, - Utils.newBytesConsumer(Integer.MAX_VALUE)); + ByteBuffer buf = + Utils.executeGET( + solrClient.getHttpClient(), + baseUrl + "/node/files" + path, + Utils.newBytesConsumer(Integer.MAX_VALUE)); assertEquals( "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420", - DigestUtils.sha512Hex(new ByteBufferInputStream(buf)) - ); - + DigestUtils.sha512Hex(new ByteBufferInputStream(buf))); } } - } } - public static class Fetcher implements Callable { String url; JettySolrRunner jetty; - public Fetcher(String s, JettySolrRunner jettySolrRunner){ + + public Fetcher(String s, JettySolrRunner jettySolrRunner) { this.url = s; this.jetty = jettySolrRunner; } + @Override public NavigableObject call() throws Exception { try (HttpSolrClient solrClient = (HttpSolrClient) jetty.newClient()) { - return (NavigableObject) Utils.executeGET(solrClient.getHttpClient(), this.url, JAVABINCONSUMER); + return (NavigableObject) + Utils.executeGET(solrClient.getHttpClient(), this.url, JAVABINCONSUMER); } } @@ -207,20 +223,21 @@ public NavigableObject call() throws Exception { public String toString() { return url; } - } - public static NavigableObject assertResponseValues(int repeats, SolrClient client, - SolrRequest req, - Map vals) throws Exception { + public static NavigableObject assertResponseValues( + int repeats, SolrClient client, SolrRequest req, Map vals) + throws Exception { Callable callable = () -> req.process(client); - return assertResponseValues(repeats, callable,vals); + return assertResponseValues(repeats, callable, vals); } /** - * Evaluate the given predicates or objects against the given values, obtained by running a given callable. - * The values to verify are either predicates to evaluate directly, or strings to compare for equality. + * Evaluate the given predicates or objects against the given values, obtained by running a given + * callable. The values to verify are either predicates to evaluate directly, or strings to + * compare for equality. + * * @param repeats how many attempts to make with the Callable * @param callable the code to execute getting a result * @param vals the values to check in the result, this is a map of paths to predicates or values @@ -228,8 +245,8 @@ public static NavigableObject assertResponseValues(int repeats, SolrClient clien * @throws Exception if the callable throws an Exception, or on interrupt between retries */ @SuppressWarnings({"unchecked"}) - public static NavigableObject assertResponseValues(int repeats, Callable callable, - Map vals) throws Exception { + public static NavigableObject assertResponseValues( + int repeats, Callable callable, Map vals) throws Exception { NavigableObject rsp = null; for (int i = 0; i < repeats; i++) { @@ -248,35 +265,60 @@ public static NavigableObject assertResponseValues(int repeats, Callable key = StrUtils.split(k, '/'); Object val = entry.getValue(); - // TODO: This map should just be and we should instead provide a static eq() method for callers - Predicate p = val instanceof Predicate ? (Predicate) val : o -> { - String v = o == null ? null : o.toString(); - return Objects.equals(val, v); - }; + // TODO: This map should just be and we should instead provide a static + // eq() method for callers + Predicate p = + val instanceof Predicate + ? (Predicate) val + : o -> { + String v = o == null ? null : o.toString(); + return Objects.equals(val, v); + }; Object actual = rsp._get(key, null); - passed = passed && p.test(actual); // Important: check all of the values, not just the first one + // Important: check all of the values, not just the first one + passed = passed && p.test(actual); if (!passed && i >= repeats - 1) { String description = Utils.toJSONString(rsp); if (rsp instanceof SimpleSolrResponse) { description = ((SimpleSolrResponse) rsp).getResponse().jsonStr(); } - // we know these are unequal but call assert instead of fail() because it gives a better error message - assertEquals("Failed on path " + key + " of " + description + "after attempt #" + (i+1), - val, Utils.toJSONString(actual)); + // we know these are unequal but call assert instead of fail() because it gives a better + // error message + assertEquals( + "Failed on path " + key + " of " + description + "after attempt #" + (i + 1), + val, + Utils.toJSONString(actual)); } } } return rsp; } - public static void uploadKey(byte[] bytes, String path, MiniSolrCloudCluster cluster) throws Exception { + public static void uploadKey(byte[] bytes, String path, MiniSolrCloudCluster cluster) + throws Exception { JettySolrRunner jetty = cluster.getRandomJetty(random()); - try(HttpSolrClient client = (HttpSolrClient) jetty.newClient()) { - PackageUtils.uploadKey(bytes, path, Paths.get(jetty.getCoreContainer().getSolrHome()), client); - Object resp = Utils.executeGET(client.getHttpClient(), jetty.getBaseURLV2().toString() + "/node/files" + path + "?sync=true", null); - System.out.println("sync resp: "+jetty.getBaseURLV2().toString() + "/node/files" + path + "?sync=true" + " ,is: " + resp); + try (HttpSolrClient client = (HttpSolrClient) jetty.newClient()) { + PackageUtils.uploadKey( + bytes, path, Paths.get(jetty.getCoreContainer().getSolrHome()), client); + Object resp = + Utils.executeGET( + client.getHttpClient(), + jetty.getBaseURLV2().toString() + "/node/files" + path + "?sync=true", + null); + System.out.println( + "sync resp: " + + jetty.getBaseURLV2().toString() + + "/node/files" + + path + + "?sync=true" + + " ,is: " + + resp); } - checkAllNodesForFile(cluster,path, Map.of(":files:" + path + ":name", (Predicate) Objects::nonNull), false); + checkAllNodesForFile( + cluster, + path, + Map.of(":files:" + path + ":name", (Predicate) Objects::nonNull), + false); } public static void postFile(SolrClient client, ByteBuffer buffer, String name, String sig) @@ -284,25 +326,28 @@ public static void postFile(SolrClient client, ByteBuffer buffer, String name, S String resource = "/cluster/files" + name; ModifiableSolrParams params = new ModifiableSolrParams(); params.add("sig", sig); - V2Response rsp = new V2Request.Builder(resource) - .withMethod(SolrRequest.METHOD.PUT) - .withPayload(buffer) - .forceV2(true) - .withMimeType("application/octet-stream") - .withParams(params) - .build() - .process(client); + V2Response rsp = + new V2Request.Builder(resource) + .withMethod(SolrRequest.METHOD.PUT) + .withPayload(buffer) + .forceV2(true) + .withMimeType("application/octet-stream") + .withParams(params) + .build() + .process(client); assertEquals(name, rsp.getResponse().get(CommonParams.FILE)); } /** * Read and return the contents of the file-like resource + * * @param fname the name of the resource to read * @return the bytes of the resource * @throws IOException if there is an I/O error reading the contents */ public static byte[] readFile(String fname) throws IOException { - try (InputStream is = TestDistribPackageStore.class.getClassLoader().getResourceAsStream(fname)) { + try (InputStream is = + TestDistribPackageStore.class.getClassLoader().getResourceAsStream(fname)) { return is.readAllBytes(); } } diff --git a/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java b/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java index 57d4252975a..729eab2f505 100644 --- a/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java +++ b/solr/core/src/test/org/apache/solr/handler/AnalysisRequestHandlerTestBase.java @@ -16,14 +16,13 @@ */ package org.apache.solr.handler; +import org.apache.commons.lang3.ArrayUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.NamedList; -import org.apache.commons.lang3.ArrayUtils; /** * A base class for all analysis request handler tests. * - * * @since solr 1.4 */ public abstract class AnalysisRequestHandlerTestBase extends SolrTestCaseJ4 { @@ -37,7 +36,9 @@ protected void assertToken(NamedList token, TokenInfo info) { assertEquals(info.getStart(), token.get("start")); assertEquals(info.getEnd(), token.get("end")); assertEquals(info.getPosition(), token.get("position")); - assertArrayEquals(info.getPositionHistory(), ArrayUtils.toPrimitive((Integer[]) token.get("positionHistory"))); + assertArrayEquals( + info.getPositionHistory(), + ArrayUtils.toPrimitive((Integer[]) token.get("positionHistory"))); if (info.isMatch()) { assertEquals(Boolean.TRUE, token.get("match")); } @@ -46,8 +47,7 @@ protected void assertToken(NamedList token, TokenInfo info) { } } - - //================================================= Inner Classes ================================================== + // ===== Inner Classes ===== protected static class TokenInfo { @@ -62,15 +62,15 @@ protected static class TokenInfo { private boolean match; public TokenInfo( - String text, - String rawText, - String type, - int start, - int end, - int position, - int[] positionHistory, - String payload, - boolean match) { + String text, + String rawText, + String type, + int start, + int end, + int position, + int[] positionHistory, + String payload, + boolean match) { this.text = text; this.rawText = rawText; @@ -119,5 +119,4 @@ public boolean isMatch() { return match; } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java index 95c8d417d67..1e74ce58842 100644 --- a/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/BinaryUpdateRequestHandlerTest.java @@ -17,7 +17,6 @@ package org.apache.solr.handler; import java.io.ByteArrayOutputStream; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.BinaryRequestWriter; import org.apache.solr.client.solrj.request.RequestWriter; @@ -53,13 +52,15 @@ public void testRequestParams() throws Exception { BinaryRequestWriter brw = new BinaryRequestWriter(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); SolrQueryResponse rsp = new SolrQueryResponse(); - try (SolrQueryRequest req = req(); UpdateRequestHandler handler = new UpdateRequestHandler()) { + try (SolrQueryRequest req = req(); + UpdateRequestHandler handler = new UpdateRequestHandler()) { handler.init(new NamedList<>()); ContentStreamLoader csl = handler.newLoader(req, p); RequestWriter.ContentWriter cw = brw.getContentWriter(ureq); ByteArrayOutputStream baos = new ByteArrayOutputStream(); cw.write(baos); - ContentStreamBase.ByteArrayStream cs = new ContentStreamBase.ByteArrayStream(baos.toByteArray(), null, "application/javabin"); + ContentStreamBase.ByteArrayStream cs = + new ContentStreamBase.ByteArrayStream(baos.toByteArray(), null, "application/javabin"); csl.load(req, rsp, cs, p); AddUpdateCommand add = p.addCommands.get(0); System.out.println(add.solrDoc); diff --git a/solr/core/src/test/org/apache/solr/handler/CSVRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/CSVRequestHandlerTest.java index 4bf8bb61862..a2cfa8131a0 100644 --- a/solr/core/src/test/org/apache/solr/handler/CSVRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/CSVRequestHandlerTest.java @@ -26,7 +26,6 @@ import org.junit.BeforeClass; import org.junit.Test; - public class CSVRequestHandlerTest extends SolrTestCaseJ4 { @BeforeClass @@ -37,8 +36,10 @@ public static void beforeClass() throws Exception { @Test public void testCommitWithin() throws Exception { String csvString = "id;name\n123;hello"; - SolrQueryRequest req = req("separator", ";", - "commitWithin", "200"); + SolrQueryRequest req = + req( + "separator", ";", + "commitWithin", "200"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); diff --git a/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java index 036f277ad27..012daa047ca 100644 --- a/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java @@ -16,6 +16,13 @@ */ package org.apache.solr.handler; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.List; import org.apache.lucene.analysis.MockTokenizer; import org.apache.solr.client.solrj.request.DocumentAnalysisRequest; import org.apache.solr.common.SolrInputDocument; @@ -30,18 +37,9 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.Collections; -import java.util.List; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.IOException; -import java.io.Reader; -import java.nio.charset.StandardCharsets; - /** * A test for {@link DocumentAnalysisRequestHandler}. * - * * @since solr 1.4 */ public class DocumentAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestBase { @@ -62,30 +60,32 @@ public void setUp() throws Exception { } /** - * Tests the {@link DocumentAnalysisRequestHandler#resolveAnalysisRequest(org.apache.solr.request.SolrQueryRequest)} + * Tests the {@link + * DocumentAnalysisRequestHandler#resolveAnalysisRequest(org.apache.solr.request.SolrQueryRequest)} */ @Test public void testResolveAnalysisRequest() throws Exception { String docsInput = - "" + - "" + - "1" + - "The Whitetok" + - "The Text" + - "" + - ""; + "" + + "" + + "1" + + "The Whitetok" + + "The Text" + + "" + + ""; final ContentStream cs = new ContentStreamBase.StringStream(docsInput); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("analysis.query", "The Query String"); params.add("analysis.showmatch", "true"); - SolrQueryRequest req = new SolrQueryRequestBase(h.getCore(), params) { - @Override - public Iterable getContentStreams() { - return Collections.singleton(cs); - } - }; + SolrQueryRequest req = + new SolrQueryRequestBase(h.getCore(), params) { + @Override + public Iterable getContentStreams() { + return Collections.singleton(cs); + } + }; DocumentAnalysisRequest request = handler.resolveAnalysisRequest(req); @@ -113,9 +113,9 @@ public Iterable getContentStreams() { /** A binary-only ContentStream */ static class ByteStream extends ContentStreamBase { private final byte[] bytes; - + public ByteStream(byte[] bytes, String contentType) { - this.bytes = bytes; + this.bytes = bytes; this.contentType = contentType; name = null; size = Long.valueOf(bytes.length); @@ -133,30 +133,30 @@ public Reader getReader() throws IOException { } } - // This test should also test charset detection in UpdateRequestHandler, // but the DocumentAnalysisRequestHandler is simplier to use/check. @Test public void testCharsetInDocument() throws Exception { - final byte[] xmlBytes = ( - "\r\n" + - "\r\n" + - " \r\n" + - " Müller\r\n" + - " " + - "" - ).getBytes(StandardCharsets.ISO_8859_1); - + final byte[] xmlBytes = + ("\r\n" + + "\r\n" + + " \r\n" + + " Müller\r\n" + + " " + + "") + .getBytes(StandardCharsets.ISO_8859_1); + // we declare a content stream without charset: final ContentStream cs = new ByteStream(xmlBytes, "application/xml"); - + ModifiableSolrParams params = new ModifiableSolrParams(); - SolrQueryRequest req = new SolrQueryRequestBase(h.getCore(), params) { - @Override - public Iterable getContentStreams() { - return Collections.singleton(cs); - } - }; + SolrQueryRequest req = + new SolrQueryRequestBase(h.getCore(), params) { + @Override + public Iterable getContentStreams() { + return Collections.singleton(cs); + } + }; DocumentAnalysisRequest request = handler.resolveAnalysisRequest(req); assertNotNull(request); @@ -171,24 +171,25 @@ public Iterable getContentStreams() { // but the DocumentAnalysisRequestHandler is simplier to use/check. @Test public void testCharsetOutsideDocument() throws Exception { - final byte[] xmlBytes = ( - "\r\n" + - " \r\n" + - " Müller\r\n" + - " " + - "" - ).getBytes(StandardCharsets.ISO_8859_1); - + final byte[] xmlBytes = + ("\r\n" + + " \r\n" + + " Müller\r\n" + + " " + + "") + .getBytes(StandardCharsets.ISO_8859_1); + // we declare a content stream with charset: final ContentStream cs = new ByteStream(xmlBytes, "application/xml; charset=ISO-8859-1"); - + ModifiableSolrParams params = new ModifiableSolrParams(); - SolrQueryRequest req = new SolrQueryRequestBase(h.getCore(), params) { - @Override - public Iterable getContentStreams() { - return Collections.singleton(cs); - } - }; + SolrQueryRequest req = + new SolrQueryRequestBase(h.getCore(), params) { + @Override + public Iterable getContentStreams() { + return Collections.singleton(cs); + } + }; DocumentAnalysisRequest request = handler.resolveAnalysisRequest(req); assertNotNull(request); @@ -200,7 +201,8 @@ public Iterable getContentStreams() { } /** - * Tests the {@link DocumentAnalysisRequestHandler#handleAnalysisRequest(org.apache.solr.client.solrj.request.DocumentAnalysisRequest, + * Tests the {@link + * DocumentAnalysisRequestHandler#handleAnalysisRequest(org.apache.solr.client.solrj.request.DocumentAnalysisRequest, * org.apache.solr.schema.IndexSchema)} */ @Test @@ -213,14 +215,14 @@ public void testHandleAnalysisRequest() throws Exception { document.addField("text", "The Fox Jumped Over The Dogs"); document.addField("number_l_p", 88L); - DocumentAnalysisRequest request = new DocumentAnalysisRequest() - .setQuery("JUMPING") - .setShowMatch(true) - .addDocument(document); + DocumentAnalysisRequest request = + new DocumentAnalysisRequest().setQuery("JUMPING").setShowMatch(true).addDocument(document); - NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + NamedList result = + handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertNotNull("result is null and it shouldn't be", result); - NamedList>> documentResult = (NamedList>>) result.get("1"); + NamedList>> documentResult = + (NamedList>>) result.get("1"); assertNotNull("An analysis for document with key '1' should be returned", documentResult); NamedList queryResult; @@ -235,19 +237,26 @@ public void testHandleAnalysisRequest() throws Exception { queryResult = idResult.get("query"); assertEquals("Only the default analyzer should be applied", 1, queryResult.size()); name = queryResult.getName(0); - assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); + assertTrue( + "Only the default analyzer should be applied", + name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); tokenList = (List) queryResult.getVal(0); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[]{1}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[] {1}, null, false)); indexResult = idResult.get("index"); assertEquals("The id field has only a single value", 1, indexResult.size()); valueResult = (NamedList>) indexResult.get("1"); assertEquals("Only the default analyzer should be applied", 1, valueResult.size()); name = queryResult.getName(0); - assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); + assertTrue( + "Only the default analyzer should be applied", + name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); tokenList = valueResult.getVal(0); assertEquals("The 'id' field value has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("1", null, "word", 0, 1, 1, new int[]{1}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("1", null, "word", 0, 1, 1, new int[] {1}, null, false)); // the number_l_p field NamedList> number_l_p_Result = documentResult.get("number_l_p"); @@ -257,81 +266,156 @@ public void testHandleAnalysisRequest() throws Exception { valueResult = (NamedList>) indexResult.get("88"); assertEquals("Only the default analyzer should be applied", 1, valueResult.size()); name = queryResult.getName(0); - assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); + assertTrue( + "Only the default analyzer should be applied", + name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); tokenList = valueResult.getVal(0); assertEquals("The 'number_l_p' field value has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("88", null, "word", 0, 2, 1, new int[]{1}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("88", null, "word", 0, 2, 1, new int[] {1}, null, false)); // the name field NamedList> whitetokResult = documentResult.get("whitetok"); assertNotNull("an analysis for the 'whitetok' field should be returned", whitetokResult); queryResult = whitetokResult.get("query"); tokenList = (List) queryResult.get(MockTokenizer.class.getName()); - assertNotNull("Expecting the 'MockTokenizer' to be applied on the query for the 'whitetok' field", tokenList); + assertNotNull( + "Expecting the 'MockTokenizer' to be applied on the query for the 'whitetok' field", + tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[]{1}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[] {1}, null, false)); indexResult = whitetokResult.get("index"); assertEquals("The 'whitetok' field has only a single value", 1, indexResult.size()); valueResult = (NamedList>) indexResult.get("Jumping Jack"); tokenList = valueResult.getVal(0); assertEquals("Expecting 2 tokens to be present", 2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("Jumping", null, "word", 0, 7, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("Jack", null, "word", 8, 12, 2, new int[]{2}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("Jumping", null, "word", 0, 7, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("Jack", null, "word", 8, 12, 2, new int[] {2}, null, false)); // the text field NamedList> textResult = documentResult.get("text"); assertNotNull("an analysis for the 'text' field should be returned", textResult); queryResult = textResult.get("query"); - tokenList = (List) queryResult.get("org.apache.lucene.analysis.standard.StandardTokenizer"); - assertNotNull("Expecting the 'StandardTokenizer' to be applied on the query for the 'text' field", tokenList); + tokenList = + (List) queryResult.get("org.apache.lucene.analysis.standard.StandardTokenizer"); + assertNotNull( + "Expecting the 'StandardTokenizer' to be applied on the query for the 'text' field", + tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "", 0, 7, 1, new int[]{1}, null, false)); - tokenList = (List) queryResult.get("org.apache.lucene.analysis.core.LowerCaseFilter"); - assertNotNull("Expecting the 'LowerCaseFilter' to be applied on the query for the 'text' field", tokenList); + assertToken( + tokenList.get(0), + new TokenInfo("JUMPING", null, "", 0, 7, 1, new int[] {1}, null, false)); + tokenList = + (List) queryResult.get("org.apache.lucene.analysis.core.LowerCaseFilter"); + assertNotNull( + "Expecting the 'LowerCaseFilter' to be applied on the query for the 'text' field", + tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("jumping", null, "", 0, 7, 1, new int[]{1,1}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("jumping", null, "", 0, 7, 1, new int[] {1, 1}, null, false)); tokenList = (List) queryResult.get("org.apache.lucene.analysis.core.StopFilter"); - assertNotNull("Expecting the 'StopFilter' to be applied on the query for the 'text' field", tokenList); + assertNotNull( + "Expecting the 'StopFilter' to be applied on the query for the 'text' field", tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("jumping", null, "", 0, 7, 1, new int[]{1,1,1}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("jumping", null, "", 0, 7, 1, new int[] {1, 1, 1}, null, false)); tokenList = (List) queryResult.get("org.apache.lucene.analysis.en.PorterStemFilter"); - assertNotNull("Expecting the 'PorterStemFilter' to be applied on the query for the 'text' field", tokenList); + assertNotNull( + "Expecting the 'PorterStemFilter' to be applied on the query for the 'text' field", + tokenList); assertEquals("Query has only one token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("jump", null, "", 0, 7, 1, new int[]{1,1,1,1}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("jump", null, "", 0, 7, 1, new int[] {1, 1, 1, 1}, null, false)); indexResult = textResult.get("index"); assertEquals("The 'text' field has only a single value", 1, indexResult.size()); valueResult = (NamedList>) indexResult.get("The Fox Jumped Over The Dogs"); tokenList = valueResult.get("org.apache.lucene.analysis.standard.StandardTokenizer"); - assertNotNull("Expecting the 'StandardTokenizer' to be applied on the index for the 'text' field", tokenList); + assertNotNull( + "Expecting the 'StandardTokenizer' to be applied on the index for the 'text' field", + tokenList); assertEquals("Expecting 6 tokens", 6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("The", null, "", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("Fox", null, "", 4, 7, 2, new int[]{2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("Jumped", null, "", 8, 14, 3, new int[]{3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("Over", null, "", 15, 19, 4, new int[]{4}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("The", null, "", 20, 23, 5, new int[]{5}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("Dogs", null, "", 24, 28, 6, new int[]{6}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("The", null, "", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("Fox", null, "", 4, 7, 2, new int[] {2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("Jumped", null, "", 8, 14, 3, new int[] {3}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("Over", null, "", 15, 19, 4, new int[] {4}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("The", null, "", 20, 23, 5, new int[] {5}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("Dogs", null, "", 24, 28, 6, new int[] {6}, null, false)); tokenList = valueResult.get("org.apache.lucene.analysis.core.LowerCaseFilter"); - assertNotNull("Expecting the 'LowerCaseFilter' to be applied on the index for the 'text' field", tokenList); + assertNotNull( + "Expecting the 'LowerCaseFilter' to be applied on the index for the 'text' field", + tokenList); assertEquals("Expecting 6 tokens", 6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, new int[]{1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("fox", null, "", 4, 7, 2, new int[]{2,2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("jumped", null, "", 8, 14, 3, new int[]{3,3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("over", null, "", 15, 19, 4, new int[]{4,4}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("the", null, "", 20, 23, 5, new int[]{5,5}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("dogs", null, "", 24, 28, 6, new int[]{6,6}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("the", null, "", 0, 3, 1, new int[] {1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("fox", null, "", 4, 7, 2, new int[] {2, 2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("jumped", null, "", 8, 14, 3, new int[] {3, 3}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("over", null, "", 15, 19, 4, new int[] {4, 4}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("the", null, "", 20, 23, 5, new int[] {5, 5}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("dogs", null, "", 24, 28, 6, new int[] {6, 6}, null, false)); tokenList = valueResult.get("org.apache.lucene.analysis.core.StopFilter"); - assertNotNull("Expecting the 'StopFilter' to be applied on the index for the 'text' field", tokenList); + assertNotNull( + "Expecting the 'StopFilter' to be applied on the index for the 'text' field", tokenList); assertEquals("Expecting 4 tokens after stop word removal", 4, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 4, 7, 2, new int[]{2,2,2}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("jumped", null, "", 8, 14, 3, new int[]{3,3,3}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("over", null, "", 15, 19, 4, new int[]{4,4,4}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("dogs", null, "", 24, 28, 6, new int[]{6,6,6}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("fox", null, "", 4, 7, 2, new int[] {2, 2, 2}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("jumped", null, "", 8, 14, 3, new int[] {3, 3, 3}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("over", null, "", 15, 19, 4, new int[] {4, 4, 4}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("dogs", null, "", 24, 28, 6, new int[] {6, 6, 6}, null, false)); tokenList = valueResult.get("org.apache.lucene.analysis.en.PorterStemFilter"); - assertNotNull("Expecting the 'PorterStemFilter' to be applied on the index for the 'text' field", tokenList); + assertNotNull( + "Expecting the 'PorterStemFilter' to be applied on the index for the 'text' field", + tokenList); assertEquals("Expecting 4 tokens", 4, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 4, 7, 2, new int[]{2,2,2,2}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("jump", null, "", 8, 14, 3, new int[]{3,3,3,3}, null, true)); - assertToken(tokenList.get(2), new TokenInfo("over", null, "", 15, 19, 4, new int[]{4,4,4,4}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("dog", null, "", 24, 28, 6, new int[]{6,6,6,6}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("fox", null, "", 4, 7, 2, new int[] {2, 2, 2, 2}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("jump", null, "", 8, 14, 3, new int[] {3, 3, 3, 3}, null, true)); + assertToken( + tokenList.get(2), + new TokenInfo("over", null, "", 15, 19, 4, new int[] {4, 4, 4, 4}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("dog", null, "", 24, 28, 6, new int[] {6, 6, 6, 6}, null, false)); } } diff --git a/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java index 8f5f449a2b2..5543e9e2bbf 100644 --- a/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java @@ -20,20 +20,19 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenFilterFactory; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.TokenizerFactory; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.commongrams.CommonGramsQueryFilter; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; import org.apache.lucene.analysis.tokenattributes.FlagsAttributeImpl; -import org.apache.lucene.analysis.TokenFilterFactory; -import org.apache.lucene.analysis.TokenizerFactory; import org.apache.lucene.util.AttributeFactory; import org.apache.solr.analysis.TokenizerChain; import org.apache.solr.client.solrj.request.FieldAnalysisRequest; @@ -53,11 +52,10 @@ /** * A test for {@link FieldAnalysisRequestHandler}. * - * * @since solr 1.4 */ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestBase { - + private FieldAnalysisRequestHandler handler; @Override @@ -71,27 +69,29 @@ public void setUp() throws Exception { public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - + @Test public void testPointField() throws Exception { FieldAnalysisRequest request = new FieldAnalysisRequest(); request.addFieldType("pint"); request.setFieldValue("5"); - + @SuppressWarnings({"rawtypes"}) NamedList nl = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); @SuppressWarnings({"rawtypes"}) - NamedList pintNL = (NamedList)nl.get("field_types").get("pint"); + NamedList pintNL = (NamedList) nl.get("field_types").get("pint"); @SuppressWarnings({"rawtypes"}) - NamedList indexNL = (NamedList)pintNL.get("index"); + NamedList indexNL = (NamedList) pintNL.get("index"); @SuppressWarnings({"rawtypes"}) - ArrayList analyzerNL = (ArrayList)indexNL.get("org.apache.solr.schema.FieldType$DefaultAnalyzer$1"); - String text = (String)((NamedList)analyzerNL.get(0)).get("text"); + ArrayList analyzerNL = + (ArrayList) indexNL.get("org.apache.solr.schema.FieldType$DefaultAnalyzer$1"); + String text = (String) ((NamedList) analyzerNL.get(0)).get("text"); assertEquals("5", text); } /** - * Tests the {@link FieldAnalysisRequestHandler#resolveAnalysisRequest(org.apache.solr.request.SolrQueryRequest)} + * Tests the {@link + * FieldAnalysisRequestHandler#resolveAnalysisRequest(org.apache.solr.request.SolrQueryRequest)} */ @Test public void testResolveAnalysisRequest() throws Exception { @@ -118,20 +118,20 @@ public void testResolveAnalysisRequest() throws Exception { // testing overide of query value using analysis.query param params.add(AnalysisParams.QUERY, "quick lazy"); - req=new LocalSolrQueryRequest(h.getCore(), params); + req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertEquals("quick lazy", request.getQuery()); req.close(); // testing analysis.showmatch param params.add(AnalysisParams.SHOW_MATCH, "false"); - req=new LocalSolrQueryRequest(h.getCore(), params); + req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertFalse(request.isShowMatch()); req.close(); params.set(AnalysisParams.SHOW_MATCH, "true"); - req=new LocalSolrQueryRequest(h.getCore(), params); + req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertTrue(request.isShowMatch()); req.close(); @@ -139,7 +139,7 @@ public void testResolveAnalysisRequest() throws Exception { // testing absence of query value params.remove(CommonParams.Q); params.remove(AnalysisParams.QUERY); - req=new LocalSolrQueryRequest(h.getCore(), params); + req = new LocalSolrQueryRequest(h.getCore(), params); request = handler.resolveAnalysisRequest(req); assertNull(request.getQuery()); req.close(); @@ -163,7 +163,8 @@ public void testResolveAnalysisRequest() throws Exception { params.remove(AnalysisParams.QUERY); params.remove(AnalysisParams.FIELD_VALUE); try (SolrQueryRequest solrQueryRequest = new LocalSolrQueryRequest(h.getCore(), params)) { - SolrException ex = expectThrows(SolrException.class, () -> handler.resolveAnalysisRequest(solrQueryRequest)); + SolrException ex = + expectThrows(SolrException.class, () -> handler.resolveAnalysisRequest(solrQueryRequest)); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); } @@ -171,7 +172,8 @@ public void testResolveAnalysisRequest() throws Exception { } /** - * Tests the {@link FieldAnalysisRequestHandler#handleAnalysisRequest(org.apache.solr.client.solrj.request.FieldAnalysisRequest, + * Tests the {@link + * FieldAnalysisRequestHandler#handleAnalysisRequest(org.apache.solr.client.solrj.request.FieldAnalysisRequest, * org.apache.solr.schema.IndexSchema)} */ @Test @@ -188,7 +190,8 @@ public void testHandleAnalysisRequest() throws Exception { request.setShowMatch(true); @SuppressWarnings({"rawtypes"}) - NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + NamedList result = + handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertTrue("result is null and it shouldn't be", result != null); @SuppressWarnings({"rawtypes"}) @@ -203,54 +206,128 @@ public void testHandleAnalysisRequest() throws Exception { assertNotNull("expecting an index token analysis for field type 'text'", indexPart); @SuppressWarnings({"rawtypes"}) - List tokenList = indexPart.get("org.apache.lucene.analysis.standard.StandardTokenizer"); + List tokenList = + indexPart.get("org.apache.lucene.analysis.standard.StandardTokenizer"); assertNotNull("Expcting StandardTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 10); - assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, new int[]{2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, new int[]{3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, new int[]{4}, null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, new int[]{5}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, new int[]{6}, null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, new int[]{7}, null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, new int[]{8}, null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9}, null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, new int[]{10}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("the", null, "", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("quick", null, "", 4, 9, 2, new int[] {2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("red", null, "", 10, 13, 3, new int[] {3}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("fox", null, "", 14, 17, 4, new int[] {4}, null, true)); + assertToken( + tokenList.get(4), + new TokenInfo("jumped", null, "", 18, 24, 5, new int[] {5}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("over", null, "", 25, 29, 6, new int[] {6}, null, false)); + assertToken( + tokenList.get(6), + new TokenInfo("the", null, "", 30, 33, 7, new int[] {7}, null, false)); + assertToken( + tokenList.get(7), + new TokenInfo("lazy", null, "", 34, 38, 8, new int[] {8}, null, false)); + assertToken( + tokenList.get(8), + new TokenInfo("brown", null, "", 39, 44, 9, new int[] {9}, null, true)); + assertToken( + tokenList.get(9), + new TokenInfo("dogs", null, "", 45, 49, 10, new int[] {10}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 10); - assertToken(tokenList.get(0), new TokenInfo("the", null, "", 0, 3, 1, new int[]{1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "", 4, 9, 2, new int[]{2,2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "", 10, 13, 3, new int[]{3,3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "", 14, 17, 4, new int[]{4,4}, null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "", 18, 24, 5, new int[]{5,5}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "", 25, 29, 6, new int[]{6,6}, null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "", 30, 33, 7, new int[]{7,7}, null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "", 34, 38, 8, new int[]{8,8}, null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9,9}, null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "", 45, 49, 10, new int[]{10,10}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("the", null, "", 0, 3, 1, new int[] {1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("quick", null, "", 4, 9, 2, new int[] {2, 2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("red", null, "", 10, 13, 3, new int[] {3, 3}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("fox", null, "", 14, 17, 4, new int[] {4, 4}, null, true)); + assertToken( + tokenList.get(4), + new TokenInfo("jumped", null, "", 18, 24, 5, new int[] {5, 5}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("over", null, "", 25, 29, 6, new int[] {6, 6}, null, false)); + assertToken( + tokenList.get(6), + new TokenInfo("the", null, "", 30, 33, 7, new int[] {7, 7}, null, false)); + assertToken( + tokenList.get(7), + new TokenInfo("lazy", null, "", 34, 38, 8, new int[] {8, 8}, null, false)); + assertToken( + tokenList.get(8), + new TokenInfo("brown", null, "", 39, 44, 9, new int[] {9, 9}, null, true)); + assertToken( + tokenList.get(9), + new TokenInfo("dogs", null, "", 45, 49, 10, new int[] {10, 10}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.core.StopFilter"); assertNotNull("Expcting StopFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 8); - assertToken(tokenList.get(0), new TokenInfo("quick", null, "", 4, 9, 2, new int[]{2,2,2}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("red", null, "", 10, 13, 3, new int[]{3,3,3}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("fox", null, "", 14, 17, 4, new int[]{4,4,4}, null, true)); - assertToken(tokenList.get(3), new TokenInfo("jumped", null, "", 18, 24, 5, new int[]{5,5,5}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("over", null, "", 25, 29, 6, new int[]{6,6,6}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("lazy", null, "", 34, 38, 8, new int[]{8,8,8}, null, false)); - assertToken(tokenList.get(6), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9,9,9}, null, true)); - assertToken(tokenList.get(7), new TokenInfo("dogs", null, "", 45, 49, 10, new int[]{10,10,10}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("quick", null, "", 4, 9, 2, new int[] {2, 2, 2}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("red", null, "", 10, 13, 3, new int[] {3, 3, 3}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("fox", null, "", 14, 17, 4, new int[] {4, 4, 4}, null, true)); + assertToken( + tokenList.get(3), + new TokenInfo("jumped", null, "", 18, 24, 5, new int[] {5, 5, 5}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("over", null, "", 25, 29, 6, new int[] {6, 6, 6}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("lazy", null, "", 34, 38, 8, new int[] {8, 8, 8}, null, false)); + assertToken( + tokenList.get(6), + new TokenInfo("brown", null, "", 39, 44, 9, new int[] {9, 9, 9}, null, true)); + assertToken( + tokenList.get(7), + new TokenInfo("dogs", null, "", 45, 49, 10, new int[] {10, 10, 10}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.en.PorterStemFilter"); assertNotNull("Expcting PorterStemFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 8); - assertToken(tokenList.get(0), new TokenInfo("quick", null, "", 4, 9, 2, new int[]{2,2,2,2}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("red", null, "", 10, 13, 3, new int[]{3,3,3,3}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("fox", null, "", 14, 17, 4, new int[]{4,4,4,4}, null, true)); - assertToken(tokenList.get(3), new TokenInfo("jump", null, "", 18, 24, 5, new int[]{5,5,5,5}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("over", null, "", 25, 29, 6, new int[]{6,6,6,6}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("lazi", null, "", 34, 38, 8, new int[]{8,8,8,8}, null, false)); - assertToken(tokenList.get(6), new TokenInfo("brown", null, "", 39, 44, 9, new int[]{9,9,9,9}, null, true)); - assertToken(tokenList.get(7), new TokenInfo("dog", null, "", 45, 49, 10, new int[]{10,10,10,10}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("quick", null, "", 4, 9, 2, new int[] {2, 2, 2, 2}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("red", null, "", 10, 13, 3, new int[] {3, 3, 3, 3}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("fox", null, "", 14, 17, 4, new int[] {4, 4, 4, 4}, null, true)); + assertToken( + tokenList.get(3), + new TokenInfo("jump", null, "", 18, 24, 5, new int[] {5, 5, 5, 5}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("over", null, "", 25, 29, 6, new int[] {6, 6, 6, 6}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("lazi", null, "", 34, 38, 8, new int[] {8, 8, 8, 8}, null, false)); + assertToken( + tokenList.get(6), + new TokenInfo("brown", null, "", 39, 44, 9, new int[] {9, 9, 9, 9}, null, true)); + assertToken( + tokenList.get(7), + new TokenInfo( + "dog", null, "", 45, 49, 10, new int[] {10, 10, 10, 10}, null, false)); @SuppressWarnings({"rawtypes"}) NamedList> queryPart = textType.get("query"); @@ -258,24 +335,43 @@ public void testHandleAnalysisRequest() throws Exception { tokenList = queryPart.get("org.apache.lucene.analysis.standard.StandardTokenizer"); assertNotNull("Expecting StandardTokenizer analysis breakdown", tokenList); - assertEquals("Expecting StandardTokenizer to produce 2 tokens from '" + request.getQuery() + "'", 2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2}, null, false)); + assertEquals( + "Expecting StandardTokenizer to produce 2 tokens from '" + request.getQuery() + "'", + 2, + tokenList.size()); + assertToken( + tokenList.get(0), + new TokenInfo("fox", null, "", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("brown", null, "", 4, 9, 2, new int[] {2}, null, false)); tokenList = queryPart.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList); assertEquals(2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("fox", null, "", 0, 3, 1, new int[] {1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("brown", null, "", 4, 9, 2, new int[] {2, 2}, null, false)); tokenList = queryPart.get("org.apache.lucene.analysis.core.StopFilter"); assertNotNull("Expcting StopFilter analysis breakdown", tokenList); assertEquals(2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2,2}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("fox", null, "", 0, 3, 1, new int[] {1, 1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("brown", null, "", 4, 9, 2, new int[] {2, 2, 2}, null, false)); tokenList = queryPart.get("org.apache.lucene.analysis.en.PorterStemFilter"); assertNotNull("Expcting PorterStemFilter analysis breakdown", tokenList); assertEquals(2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "", 0, 3, 1, new int[]{1,1,1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "", 4, 9, 2, new int[]{2,2,2,2}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("fox", null, "", 0, 3, 1, new int[] {1, 1, 1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("brown", null, "", 4, 9, 2, new int[] {2, 2, 2, 2}, null, false)); @SuppressWarnings({"rawtypes"}) NamedList nameTextType = fieldTypes.get("nametext"); @@ -287,22 +383,43 @@ public void testHandleAnalysisRequest() throws Exception { tokenList = indexPart.get("org.apache.lucene.analysis.core.WhitespaceTokenizer"); assertNotNull("Expcting WhitespaceTokenizer analysis breakdown", tokenList); assertEquals(10, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, new int[]{3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[]{4}, null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, new int[]{5}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, new int[]{6}, null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, new int[]{7}, null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, new int[]{8}, null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, new int[]{9}, null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, new int[]{10}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("quick", null, "word", 4, 9, 2, new int[] {2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("red", null, "word", 10, 13, 3, new int[] {3}, null, false)); + assertToken( + tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[] {4}, null, true)); + assertToken( + tokenList.get(4), + new TokenInfo("jumped", null, "word", 18, 24, 5, new int[] {5}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("over", null, "word", 25, 29, 6, new int[] {6}, null, false)); + assertToken( + tokenList.get(6), + new TokenInfo("the", null, "word", 30, 33, 7, new int[] {7}, null, false)); + assertToken( + tokenList.get(7), + new TokenInfo("lazy", null, "word", 34, 38, 8, new int[] {8}, null, false)); + assertToken( + tokenList.get(8), + new TokenInfo("brown", null, "word", 39, 44, 9, new int[] {9}, null, true)); + assertToken( + tokenList.get(9), + new TokenInfo("dogs", null, "word", 45, 49, 10, new int[] {10}, null, false)); queryPart = nameTextType.get("query"); assertNotNull("expecting a query token analysis for field type 'nametext'", queryPart); tokenList = queryPart.get(WhitespaceTokenizer.class.getName()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("brown", null, "word", 4, 9, 2, new int[] {2}, null, false)); @SuppressWarnings({"rawtypes"}) NamedList fieldNames = result.get("field_names"); @@ -318,16 +435,34 @@ public void testHandleAnalysisRequest() throws Exception { tokenList = indexPart.get(MockTokenizer.class.getName()); assertNotNull("expecting only MockTokenizer to be applied", tokenList); assertEquals("expecting MockTokenizer to produce 10 tokens", 10, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("red", null, "word", 10, 13, 3, new int[]{3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[]{4}, null, true)); - assertToken(tokenList.get(4), new TokenInfo("jumped", null, "word", 18, 24, 5, new int[]{5}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("over", null, "word", 25, 29, 6, new int[]{6}, null, false)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 30, 33, 7, new int[]{7}, null, false)); - assertToken(tokenList.get(7), new TokenInfo("lazy", null, "word", 34, 38, 8, new int[]{8}, null, false)); - assertToken(tokenList.get(8), new TokenInfo("brown", null, "word", 39, 44, 9, new int[]{9}, null, true)); - assertToken(tokenList.get(9), new TokenInfo("dogs", null, "word", 45, 49, 10, new int[]{10}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("quick", null, "word", 4, 9, 2, new int[] {2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("red", null, "word", 10, 13, 3, new int[] {3}, null, false)); + assertToken( + tokenList.get(3), new TokenInfo("fox", null, "word", 14, 17, 4, new int[] {4}, null, true)); + assertToken( + tokenList.get(4), + new TokenInfo("jumped", null, "word", 18, 24, 5, new int[] {5}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("over", null, "word", 25, 29, 6, new int[] {6}, null, false)); + assertToken( + tokenList.get(6), + new TokenInfo("the", null, "word", 30, 33, 7, new int[] {7}, null, false)); + assertToken( + tokenList.get(7), + new TokenInfo("lazy", null, "word", 34, 38, 8, new int[] {8}, null, false)); + assertToken( + tokenList.get(8), + new TokenInfo("brown", null, "word", 39, 44, 9, new int[] {9}, null, true)); + assertToken( + tokenList.get(9), + new TokenInfo("dogs", null, "word", 45, 49, 10, new int[] {10}, null, false)); queryPart = whitetok.get("query"); assertNotNull("expecting a query token analysis for field 'whitetok'", queryPart); @@ -335,8 +470,11 @@ public void testHandleAnalysisRequest() throws Exception { tokenList = queryPart.get(MockTokenizer.class.getName()); assertNotNull("expecting only MockTokenizer to be applied", tokenList); assertEquals("expecting MockTokenizer to produce 2 tokens", 2, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("brown", null, "word", 4, 9, 2, new int[]{2}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("fox", null, "word", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("brown", null, "word", 4, 9, 2, new int[] {2}, null, false)); @SuppressWarnings({"rawtypes"}) NamedList keywordtok = fieldNames.get("keywordtok"); @@ -348,7 +486,18 @@ public void testHandleAnalysisRequest() throws Exception { tokenList = indexPart.get(MockTokenizer.class.getName()); assertNotNull("expecting only MockTokenizer to be applied", tokenList); assertEquals("expecting MockTokenizer to produce 1 token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("the quick red fox jumped over the lazy brown dogs", null, "word", 0, 49, 1, new int[]{1}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo( + "the quick red fox jumped over the lazy brown dogs", + null, + "word", + 0, + 49, + 1, + new int[] {1}, + null, + false)); queryPart = keywordtok.get("query"); assertNotNull("expecting a query token analysis for field 'keywordtok'", queryPart); @@ -356,8 +505,9 @@ public void testHandleAnalysisRequest() throws Exception { tokenList = queryPart.get(MockTokenizer.class.getName()); assertNotNull("expecting only MockTokenizer to be applied", tokenList); assertEquals("expecting MockTokenizer to produce 1 token", 1, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("fox brown", null, "word", 0, 9, 1, new int[]{1}, null, false)); - + assertToken( + tokenList.get(0), + new TokenInfo("fox brown", null, "word", 0, 9, 1, new int[] {1}, null, false)); } @Test @@ -369,7 +519,8 @@ public void testCharFilterAnalysis() throws Exception { request.setShowMatch(false); @SuppressWarnings({"rawtypes"}) - NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + NamedList result = + handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertTrue("result is null and it shouldn't be", result != null); @SuppressWarnings({"unchecked", "rawtypes"}) @@ -383,14 +534,20 @@ public void testCharFilterAnalysis() throws Exception { NamedList indexPart = textType.get("index"); assertNotNull("expecting an index token analysis for field type 'charfilthtmlmap'", indexPart); - assertEquals("\n\nwhátëvêr\n\n", indexPart.get("org.apache.lucene.analysis.charfilter.HTMLStripCharFilter")); - assertEquals("\n\nwhatever\n\n", indexPart.get("org.apache.lucene.analysis.charfilter.MappingCharFilter")); + assertEquals( + "\n\nwhátëvêr\n\n", + indexPart.get("org.apache.lucene.analysis.charfilter.HTMLStripCharFilter")); + assertEquals( + "\n\nwhatever\n\n", + indexPart.get("org.apache.lucene.analysis.charfilter.MappingCharFilter")); @SuppressWarnings({"unchecked", "rawtypes"}) - List tokenList = (List)indexPart.get(MockTokenizer.class.getName()); + List tokenList = (List) indexPart.get(MockTokenizer.class.getName()); assertNotNull("Expecting MockTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 1); - assertToken(tokenList.get(0), new TokenInfo("whatever", null, "word", 12, 20, 1, new int[]{1}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("whatever", null, "word", 12, 20, 1, new int[] {1}, null, false)); } @Test @@ -402,7 +559,8 @@ public void testPositionHistoryWithWDGF() throws Exception { request.setShowMatch(false); @SuppressWarnings({"rawtypes"}) - NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + NamedList result = + handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertTrue("result is null and it shouldn't be", result != null); @SuppressWarnings({"unchecked", "rawtypes"}) @@ -420,28 +578,58 @@ public void testPositionHistoryWithWDGF() throws Exception { List tokenList = indexPart.get(MockTokenizer.class.getName()); assertNotNull("Expcting MockTokenizer analysis breakdown", tokenList); assertEquals(4, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("hi,", null, "word", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("3456-12", null, "word", 4, 11, 2, new int[]{2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("a", null, "word", 12, 13, 3, new int[]{3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("Test", null, "word", 14, 18, 4, new int[]{4}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("hi,", null, "word", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("3456-12", null, "word", 4, 11, 2, new int[] {2}, null, false)); + assertToken( + tokenList.get(2), new TokenInfo("a", null, "word", 12, 13, 3, new int[] {3}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("Test", null, "word", 14, 18, 4, new int[] {4}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.miscellaneous.WordDelimiterGraphFilter"); assertNotNull("Expcting WordDelimiterGraphFilter analysis breakdown", tokenList); assertEquals(6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, new int[]{1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("345612", null, "word", 4, 11, 2, new int[]{2,2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("3456", null, "word", 4, 8, 2, new int[]{2,2}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("12", null, "word", 9, 11, 3, new int[]{2,3}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, new int[]{3,4}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("Test", null, "word", 14, 18, 5, new int[]{4,5}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("hi", null, "word", 0, 2, 1, new int[] {1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("345612", null, "word", 4, 11, 2, new int[] {2, 2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("3456", null, "word", 4, 8, 2, new int[] {2, 2}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("12", null, "word", 9, 11, 3, new int[] {2, 3}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("a", null, "word", 12, 13, 4, new int[] {3, 4}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("Test", null, "word", 14, 18, 5, new int[] {4, 5}, null, false)); tokenList = indexPart.get("org.apache.lucene.analysis.core.LowerCaseFilter"); assertNotNull("Expcting LowerCaseFilter analysis breakdown", tokenList); assertEquals(6, tokenList.size()); - assertToken(tokenList.get(0), new TokenInfo("hi", null, "word", 0, 2, 1, new int[]{1,1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("345612", null, "word", 4, 11, 2, new int[]{2,2,2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("3456", null, "word", 4, 8, 2, new int[]{2,2,2}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("12", null, "word", 9, 11, 3, new int[]{2,3,3}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("a", null, "word", 12, 13, 4, new int[]{3,4,4}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("test", null, "word", 14, 18, 5, new int[]{4,5,5}, null, false)); + assertToken( + tokenList.get(0), + new TokenInfo("hi", null, "word", 0, 2, 1, new int[] {1, 1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("345612", null, "word", 4, 11, 2, new int[] {2, 2, 2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("3456", null, "word", 4, 8, 2, new int[] {2, 2, 2}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("12", null, "word", 9, 11, 3, new int[] {2, 3, 3}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("a", null, "word", 12, 13, 4, new int[] {3, 4, 4}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("test", null, "word", 14, 18, 5, new int[] {4, 5, 5}, null, false)); } @SuppressWarnings({"unchecked"}) @@ -455,7 +643,8 @@ public void testCommonGrams() throws Exception { request.setShowMatch(true); @SuppressWarnings({"rawtypes"}) - NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + NamedList result = + handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); assertTrue("result is null and it shouldn't be", result != null); @SuppressWarnings({"rawtypes"}) @@ -473,24 +662,51 @@ public void testCommonGrams() throws Exception { List tokenList = indexPart.get(WhitespaceTokenizer.class.getName()); assertNotNull("Expcting WhitespaceTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 5); - assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("and", null, "word", 10, 13, 3, new int[]{3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("the", null, "word", 14, 17, 4, new int[]{4}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("dead", null, "word", 18, 22, 5, new int[]{5}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("quick", null, "word", 4, 9, 2, new int[] {2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("and", null, "word", 10, 13, 3, new int[] {3}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("the", null, "word", 14, 17, 4, new int[] {4}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("dead", null, "word", 18, 22, 5, new int[] {5}, null, false)); tokenList = indexPart.get(CommonGramsFilter.class.getName()); assertNotNull("Expcting CommonGramsFilter analysis breakdown", tokenList); assertEquals(tokenList.size(), 9); - assertToken(tokenList.get(0), new TokenInfo("the", null, "word", 0, 3, 1, new int[]{1,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("the_quick", null, "gram", 0, 9, 1, new int[]{2,1}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("quick", null, "word", 4, 9, 2, new int[]{2,2}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("quick_and", null, "gram", 4, 13, 2, new int[]{3,2}, null, false)); - assertToken(tokenList.get(4), new TokenInfo("and", null, "word", 10, 13, 3, new int[]{3,3}, null, false)); - assertToken(tokenList.get(5), new TokenInfo("and_the", null, "gram", 10, 17, 3, new int[]{4,3}, null, true)); - assertToken(tokenList.get(6), new TokenInfo("the", null, "word", 14, 17, 4, new int[]{4,4}, null, false)); - assertToken(tokenList.get(7), new TokenInfo("the_dead", null, "gram", 14, 22, 4, new int[]{5,4}, null, true)); - assertToken(tokenList.get(8), new TokenInfo("dead", null, "word", 18, 22, 5, new int[]{5,5}, null, true)); - + assertToken( + tokenList.get(0), + new TokenInfo("the", null, "word", 0, 3, 1, new int[] {1, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("the_quick", null, "gram", 0, 9, 1, new int[] {2, 1}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("quick", null, "word", 4, 9, 2, new int[] {2, 2}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("quick_and", null, "gram", 4, 13, 2, new int[] {3, 2}, null, false)); + assertToken( + tokenList.get(4), + new TokenInfo("and", null, "word", 10, 13, 3, new int[] {3, 3}, null, false)); + assertToken( + tokenList.get(5), + new TokenInfo("and_the", null, "gram", 10, 17, 3, new int[] {4, 3}, null, true)); + assertToken( + tokenList.get(6), + new TokenInfo("the", null, "word", 14, 17, 4, new int[] {4, 4}, null, false)); + assertToken( + tokenList.get(7), + new TokenInfo("the_dead", null, "gram", 14, 22, 4, new int[] {5, 4}, null, true)); + assertToken( + tokenList.get(8), + new TokenInfo("dead", null, "word", 18, 22, 5, new int[] {5, 5}, null, true)); + @SuppressWarnings({"rawtypes"}) NamedList> queryPart = type.get("query"); assertNotNull("expecting a query token analysis for field type: " + fieldType, queryPart); @@ -498,20 +714,36 @@ public void testCommonGrams() throws Exception { tokenList = queryPart.get(WhitespaceTokenizer.class.getName()); assertNotNull("Expecting WhitespaceTokenizer analysis breakdown", tokenList); assertEquals(tokenList.size(), 4); - assertToken(tokenList.get(0), new TokenInfo("and", null, "word", 0, 3, 1, new int[]{1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("the", null, "word", 4, 7, 2, new int[]{2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("dead", null, "word", 8, 12, 3, new int[]{3}, null, false)); - assertToken(tokenList.get(3), new TokenInfo("man", null, "word", 13, 16, 4, new int[]{4}, null, false)); + assertToken( + tokenList.get(0), new TokenInfo("and", null, "word", 0, 3, 1, new int[] {1}, null, false)); + assertToken( + tokenList.get(1), new TokenInfo("the", null, "word", 4, 7, 2, new int[] {2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo("dead", null, "word", 8, 12, 3, new int[] {3}, null, false)); + assertToken( + tokenList.get(3), + new TokenInfo("man", null, "word", 13, 16, 4, new int[] {4}, null, false)); tokenList = queryPart.get(CommonGramsQueryFilter.class.getName()); assertNotNull("Expcting CommonGramsQueryFilter analysis breakdown", tokenList); - // Hmmm... Not clear if "dead" should really be here, but it's what the filter currently produces, see: LUCENE-10007 - assertEquals(4, tokenList.size()); // LUCENE-10007 - assertToken(tokenList.get(0), new TokenInfo("and_the", null, "gram", 0, 7, 1, new int[]{2,1}, null, false)); - assertToken(tokenList.get(1), new TokenInfo("the_dead", null, "gram", 4, 12, 2, new int[]{3,2}, null, false)); - assertToken(tokenList.get(2), new TokenInfo("dead", null, "word", 8, 12, 3, new int[]{3,3}, null, false)); // LUCENE-10007 - assertToken(tokenList.get(3), new TokenInfo("man", null, "word", 13, 16, 4, new int[]{4,4}, null, false)); + // Hmmm... Not clear if "dead" should really be here, but it's what the filter currently + // produces, see: LUCENE-10007 + assertEquals(4, tokenList.size()); // LUCENE-10007 + assertToken( + tokenList.get(0), + new TokenInfo("and_the", null, "gram", 0, 7, 1, new int[] {2, 1}, null, false)); + assertToken( + tokenList.get(1), + new TokenInfo("the_dead", null, "gram", 4, 12, 2, new int[] {3, 2}, null, false)); + assertToken( + tokenList.get(2), + new TokenInfo( + "dead", null, "word", 8, 12, 3, new int[] {3, 3}, null, false)); // LUCENE-10007 + assertToken( + tokenList.get(3), + new TokenInfo("man", null, "word", 13, 16, 4, new int[] {4, 4}, null, false)); } - + @Test public void testSpatial() throws Exception { FieldAnalysisRequest request = new FieldAnalysisRequest(); @@ -519,54 +751,62 @@ public void testSpatial() throws Exception { request.setFieldValue("MULTIPOINT ((10 40), (40 30), (20 20), (30 10))"); @SuppressWarnings({"rawtypes"}) - NamedList result = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + NamedList result = + handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); @SuppressWarnings({"unchecked", "rawtypes"}) - NamedList> tokens = (NamedList>) - ((NamedList)result.get("field_types").get("location_rpt")).get("index"); + NamedList> tokens = + (NamedList>) + ((NamedList) result.get("field_types").get("location_rpt")).get("index"); @SuppressWarnings({"rawtypes"}) - List tokenList = tokens.get("org.apache.lucene.spatial.prefix.PrefixTreeStrategy$ShapeTokenStream"); - + List tokenList = + tokens.get("org.apache.lucene.spatial.prefix.PrefixTreeStrategy$ShapeTokenStream"); List vals = new ArrayList<>(tokenList.size()); - for(@SuppressWarnings({"rawtypes"})NamedList v : tokenList) { - vals.add( (String)v.get("text") ); + for (@SuppressWarnings({"rawtypes"}) NamedList v : tokenList) { + vals.add((String) v.get("text")); } Collections.sort(vals); - assertEquals( "[s, s7, s7w, s7w1+, s9, s9v, s9v2+, sp, spp, spp5+, sv, svk, svk6+]", vals.toString() ); + assertEquals( + "[s, s7, s7w, s7w1+, s9, s9v, s9v2+, sp, spp, spp5+, sv, svk, svk6+]", vals.toString()); } - @Test //See SOLR-8460 + @Test // See SOLR-8460 public void testCustomAttribute() throws Exception { FieldAnalysisRequest request = new FieldAnalysisRequest(); request.addFieldType("skutype1"); request.setFieldValue("hi, 3456-12 a Test"); request.setShowMatch(false); FieldType fieldType = new TextField(); - Analyzer analyzer = new TokenizerChain( - new TokenizerFactory(Collections.emptyMap()) { - @Override - public Tokenizer create(AttributeFactory factory) { - return new CustomTokenizer(factory); - } - }, - new TokenFilterFactory[] { - new TokenFilterFactory(Collections.emptyMap()) { + Analyzer analyzer = + new TokenizerChain( + new TokenizerFactory(Collections.emptyMap()) { @Override - public TokenStream create(TokenStream input) { - return new CustomTokenFilter(input); + public Tokenizer create(AttributeFactory factory) { + return new CustomTokenizer(factory); + } + }, + new TokenFilterFactory[] { + new TokenFilterFactory(Collections.emptyMap()) { + @Override + public TokenStream create(TokenStream input) { + return new CustomTokenFilter(input); + } } - } - } - ); + }); fieldType.setIndexAnalyzer(analyzer); @SuppressWarnings({"rawtypes"}) NamedList result = handler.analyzeValues(request, fieldType, "fieldNameUnused"); // just test that we see "900" in the flags attribute here @SuppressWarnings({"unchecked", "rawtypes"}) - List tokenInfoList = (List) result.findRecursive("index", CustomTokenFilter.class.getName()); + List tokenInfoList = + (List) result.findRecursive("index", CustomTokenFilter.class.getName()); // '1' from CustomTokenFilter plus 900 from CustomFlagsAttributeImpl. - assertEquals(901, tokenInfoList.get(0).get("org.apache.lucene.analysis.tokenattributes.FlagsAttribute#flags")); + assertEquals( + 901, + tokenInfoList + .get(0) + .get("org.apache.lucene.analysis.tokenattributes.FlagsAttribute#flags")); } @Test(expected = Exception.class) @@ -581,7 +821,7 @@ public void testNoDefaultField() throws Exception { public class CustomFlagsAttributeImpl extends FlagsAttributeImpl { @Override public void setFlags(int flags) { - super.setFlags(900 + flags);//silly modification + super.setFlags(900 + flags); // silly modification } } @@ -624,7 +864,8 @@ public CustomTokenFilter(TokenStream input) { throw new IllegalStateException("FlagsAttribute should have been added already"); } if (!(flagAtt instanceof CustomFlagsAttributeImpl)) { - throw new IllegalStateException("FlagsAttribute should be our custom " + CustomFlagsAttributeImpl.class); + throw new IllegalStateException( + "FlagsAttribute should be our custom " + CustomFlagsAttributeImpl.class); } } diff --git a/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java b/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java index a2bfe1cfbb8..2b420fff7b6 100644 --- a/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java +++ b/solr/core/src/test/org/apache/solr/handler/JsonLoaderTest.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Map; import java.util.function.UnaryOperator; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -41,123 +40,121 @@ public class JsonLoaderTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema.xml"); - } - - static String input = json("{\n" + - "\n" + - "'add': {\n" + - " 'doc': {\n" + - " 'bool': true,\n" + - " 'f0': 'v0',\n" + - " 'array': [ 'aaa', 'bbb' ]\n" + - " }\n" + - "},\n" + - "'add': {\n" + - " 'commitWithin': 1234,\n" + - " 'overwrite': false,\n" + - " 'boost': 3.45,\n" + - " 'doc': {\n" + - " 'f1': 'v1',\n" + - " 'f1': 'v2',\n" + - " 'f2': null\n" + - " }\n" + - "},\n" + - "\n" + - "'commit': {},\n" + - "'optimize': { 'waitSearcher':false, 'openSearcher':false },\n" + - "\n" + - "'delete': { 'id':'ID' },\n" + - "'delete': { 'id':'ID', 'commitWithin':500 },\n" + - "'delete': { 'query':'QUERY' },\n" + - "'delete': { 'query':'QUERY', 'commitWithin':500 },\n" + - "'rollback': {}\n" + - "\n" + - "}\n" + - ""); - - - public void testParsing() throws Exception - { + initCore("solrconfig.xml", "schema.xml"); + } + + static String input = + json( + "{\n" + + "\n" + + "'add': {\n" + + " 'doc': {\n" + + " 'bool': true,\n" + + " 'f0': 'v0',\n" + + " 'array': [ 'aaa', 'bbb' ]\n" + + " }\n" + + "},\n" + + "'add': {\n" + + " 'commitWithin': 1234,\n" + + " 'overwrite': false,\n" + + " 'boost': 3.45,\n" + + " 'doc': {\n" + + " 'f1': 'v1',\n" + + " 'f1': 'v2',\n" + + " 'f2': null\n" + + " }\n" + + "},\n" + + "\n" + + "'commit': {},\n" + + "'optimize': { 'waitSearcher':false, 'openSearcher':false },\n" + + "\n" + + "'delete': { 'id':'ID' },\n" + + "'delete': { 'id':'ID', 'commitWithin':500 },\n" + + "'delete': { 'query':'QUERY' },\n" + + "'delete': { 'query':'QUERY', 'commitWithin':500 },\n" + + "'rollback': {}\n" + + "\n" + + "}\n" + + ""); + + public void testParsing() throws Exception { SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(input), p); - assertEquals( 2, p.addCommands.size() ); - + assertEquals(2, p.addCommands.size()); + AddUpdateCommand add = p.addCommands.get(0); - assertEquals("SolrInputDocument(fields: [bool=true, f0=v0, array=[aaa, bbb]])", add.solrDoc.toString()); + assertEquals( + "SolrInputDocument(fields: [bool=true, f0=v0, array=[aaa, bbb]])", add.solrDoc.toString()); - // + // add = p.addCommands.get(1); assertEquals("SolrInputDocument(fields: [f1=[v1, v2], f2=null])", add.solrDoc.toString()); assertFalse(add.overwrite); // parse the commit commands - assertEquals( 2, p.commitCommands.size() ); - CommitUpdateCommand commit = p.commitCommands.get( 0 ); - assertFalse( commit.optimize ); - assertTrue( commit.waitSearcher ); - assertTrue( commit.openSearcher ); - - commit = p.commitCommands.get( 1 ); - assertTrue( commit.optimize ); - assertFalse( commit.waitSearcher ); - assertFalse( commit.openSearcher ); + assertEquals(2, p.commitCommands.size()); + CommitUpdateCommand commit = p.commitCommands.get(0); + assertFalse(commit.optimize); + assertTrue(commit.waitSearcher); + assertTrue(commit.openSearcher); + commit = p.commitCommands.get(1); + assertTrue(commit.optimize); + assertFalse(commit.waitSearcher); + assertFalse(commit.openSearcher); // DELETE COMMANDS - assertEquals( 4, p.deleteCommands.size() ); - DeleteUpdateCommand delete = p.deleteCommands.get( 0 ); - assertEquals( delete.id, "ID" ); - assertNull( delete.query ); - assertEquals( delete.commitWithin, -1); - - delete = p.deleteCommands.get( 1 ); - assertEquals( delete.id, "ID" ); - assertNull( delete.query ); - assertEquals( delete.commitWithin, 500); - - delete = p.deleteCommands.get( 2 ); - assertNull( delete.id ); - assertEquals( delete.query, "QUERY" ); - assertEquals( delete.commitWithin, -1); - - delete = p.deleteCommands.get( 3 ); - assertNull( delete.id ); - assertEquals( delete.query, "QUERY" ); - assertEquals( delete.commitWithin, 500); + assertEquals(4, p.deleteCommands.size()); + DeleteUpdateCommand delete = p.deleteCommands.get(0); + assertEquals(delete.id, "ID"); + assertNull(delete.query); + assertEquals(delete.commitWithin, -1); + + delete = p.deleteCommands.get(1); + assertEquals(delete.id, "ID"); + assertNull(delete.query); + assertEquals(delete.commitWithin, 500); + + delete = p.deleteCommands.get(2); + assertNull(delete.id); + assertEquals(delete.query, "QUERY"); + assertEquals(delete.commitWithin, -1); + + delete = p.deleteCommands.get(3); + assertNull(delete.id); + assertEquals(delete.query, "QUERY"); + assertEquals(delete.commitWithin, 500); // ROLLBACK COMMANDS - assertEquals( 1, p.rollbackCommands.size() ); + assertEquals(1, p.rollbackCommands.size()); req.close(); } - - public void testSimpleFormat() throws Exception - { + public void testSimpleFormat() throws Exception { String str = "[{'id':'1'},{'id':'2'}]".replace('\'', '"'); - SolrQueryRequest req = req("commitWithin","100", "overwrite","false"); + SolrQueryRequest req = req("commitWithin", "100", "overwrite", "false"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); - assertEquals( 2, p.addCommands.size() ); + assertEquals(2, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument d = add.solrDoc; - SolrInputField f = d.getField( "id" ); + SolrInputField f = d.getField("id"); assertEquals("1", f.getValue()); assertEquals(add.commitWithin, 100); assertFalse(add.overwrite); add = p.addCommands.get(1); d = add.solrDoc; - f = d.getField( "id" ); + f = d.getField("id"); assertEquals("2", f.getValue()); assertEquals(add.commitWithin, 100); assertFalse(add.overwrite); @@ -172,16 +169,18 @@ public void testInvalidJsonProducesBadRequestSolrException() throws Exception { JsonLoader loader = new JsonLoader(); String invalidJsonString = "}{"; - SolrException ex = expectThrows(SolrException.class, () -> { - loader.load(req(), rsp, new ContentStreamBase.StringStream(invalidJsonString), p); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + loader.load(req(), rsp, new ContentStreamBase.StringStream(invalidJsonString), p); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Cannot parse")); assertTrue(ex.getMessage().contains("JSON")); } - public void testSimpleFormatInAdd() throws Exception - { + public void testSimpleFormatInAdd() throws Exception { String str = "{'add':[{'id':'1'},{'id':'2'}]}".replace('\'', '"'); SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); @@ -189,18 +188,18 @@ public void testSimpleFormatInAdd() throws Exception JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); - assertEquals( 2, p.addCommands.size() ); + assertEquals(2, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument d = add.solrDoc; - SolrInputField f = d.getField( "id" ); + SolrInputField f = d.getField("id"); assertEquals("1", f.getValue()); assertEquals(add.commitWithin, -1); assertTrue(add.overwrite); add = p.addCommands.get(1); d = add.solrDoc; - f = d.getField( "id" ); + f = d.getField("id"); assertEquals("2", f.getValue()); assertEquals(add.commitWithin, -1); assertTrue(add.overwrite); @@ -213,26 +212,23 @@ public void testFieldValueOrdering() throws Exception { final String post = "},{'id':'2'}]}"; // list - checkFieldValueOrdering((pre+ "'f':[45,67,89]" +post) - .replace('\'', '"') - ); + checkFieldValueOrdering((pre + "'f':[45,67,89]" + post).replace('\'', '"')); // dup fieldname keys - checkFieldValueOrdering((pre+ "'f':45,'f':67,'f':89" +post) - .replace('\'', '"') - ); + checkFieldValueOrdering((pre + "'f':45,'f':67,'f':89" + post).replace('\'', '"')); } + private void checkFieldValueOrdering(String rawJson) throws Exception { SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(rawJson), p); - assertEquals( 2, p.addCommands.size() ); + assertEquals(2, p.addCommands.size()); SolrInputDocument d = p.addCommands.get(0).solrDoc; assertEquals(2, d.getFieldNames().size()); assertEquals("1", d.getFieldValue("id")); - assertArrayEquals(new Object[] {45L, 67L, 89L} , d.getFieldValues("f").toArray()); + assertArrayEquals(new Object[] {45L, 67L, 89L}, d.getFieldValues("f").toArray()); d = p.addCommands.get(1).solrDoc; assertEquals(1, d.getFieldNames().size()); @@ -242,55 +238,52 @@ private void checkFieldValueOrdering(String rawJson) throws Exception { } public void testMultipleDocsWithoutArray() throws Exception { - String doc = "\n" + - "\n" + - "{\"f1\": 1111 }\n" + - "\n" + - "{\"f1\": 2222 }\n"; - SolrQueryRequest req = req("srcField","_src_"); - req.getContext().put("path","/update/json/docs"); + String doc = "\n" + "\n" + "{\"f1\": 1111 }\n" + "\n" + "{\"f1\": 2222 }\n"; + SolrQueryRequest req = req("srcField", "_src_"); + req.getContext().put("path", "/update/json/docs"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(doc), p); - assertEquals( 2, p.addCommands.size() ); + assertEquals(2, p.addCommands.size()); } - public void testJsonDocFormat() throws Exception{ + public void testJsonDocFormat() throws Exception { String doc; SolrQueryRequest req; SolrQueryResponse rsp; BufferingRequestProcessor p; JsonLoader loader; - doc = "\n" + - "\n" + - "{\"bool\": true,\n" + - " \"f0\": \"v0\",\n" + - " \"f2\": {\n" + - " \t \"boost\": 2.3,\n" + - " \t \"value\": \"test\"\n" + - " \t },\n" + - "\"array\": [ \"aaa\", \"bbb\" ],\n" + - "\"boosted\": {\n" + - " \t \"boost\": 6.7,\n" + - " \t \"value\": [ \"aaa\", \"bbb\" ]\n" + - " \t }\n" + - " }\n" + - "\n" + - "\n" + - " {\"f1\": \"v1\",\n" + - " \"f2\": \"v2\",\n" + - " \"f3\": null\n" + - " }\n"; - req = req("srcField","_src_"); - req.getContext().put("path","/update/json/docs"); + doc = + "\n" + + "\n" + + "{\"bool\": true,\n" + + " \"f0\": \"v0\",\n" + + " \"f2\": {\n" + + " \t \"boost\": 2.3,\n" + + " \t \"value\": \"test\"\n" + + " \t },\n" + + "\"array\": [ \"aaa\", \"bbb\" ],\n" + + "\"boosted\": {\n" + + " \t \"boost\": 6.7,\n" + + " \t \"value\": [ \"aaa\", \"bbb\" ]\n" + + " \t }\n" + + " }\n" + + "\n" + + "\n" + + " {\"f1\": \"v1\",\n" + + " \"f2\": \"v2\",\n" + + " \"f3\": null\n" + + " }\n"; + req = req("srcField", "_src_"); + req.getContext().put("path", "/update/json/docs"); rsp = new SolrQueryResponse(); p = new BufferingRequestProcessor(null); loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(doc), p); - assertEquals( 2, p.addCommands.size() ); + assertEquals(2, p.addCommands.size()); String content = (String) p.addCommands.get(0).solrDoc.getFieldValue("_src_"); assertNotNull(content); @@ -308,15 +301,15 @@ public void testJsonDocFormat() throws Exception{ assertEquals("v2", obj.get("f2")); assertTrue(obj.containsKey("f3")); - //TODO new test method + // TODO new test method doc = "[{'id':'1'},{'id':'2'}]".replace('\'', '"'); - req = req("srcField","_src_"); - req.getContext().put("path","/update/json/docs"); + req = req("srcField", "_src_"); + req.getContext().put("path", "/update/json/docs"); rsp = new SolrQueryResponse(); p = new BufferingRequestProcessor(null); loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(doc), p); - assertEquals( 2, p.addCommands.size() ); + assertEquals(2, p.addCommands.size()); content = (String) p.addCommands.get(0).solrDoc.getFieldValue("_src_"); assertNotNull(content); @@ -327,61 +320,73 @@ public void testJsonDocFormat() throws Exception{ obj = (Map) Utils.fromJSONString(content); assertEquals("2", obj.get("id")); - //TODO new test method - String json = "{a:{" + - "b:[{c:c1, e:e1},{c:c2, e :e2, d:{p:q}}]," + - "x:y" + - "}}"; - req = req("split", "/|/a/b" ); - req.getContext().put("path","/update/json/docs"); + // TODO new test method + String json = "{a:{" + "b:[{c:c1, e:e1},{c:c2, e :e2, d:{p:q}}]," + "x:y" + "}}"; + req = req("split", "/|/a/b"); + req.getContext().put("path", "/update/json/docs"); rsp = new SolrQueryResponse(); p = new BufferingRequestProcessor(null); loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(json), p); - assertEquals( 1, p.addCommands.size() ); - assertEquals("SolrInputDocument(fields: [" + - "b=[" + - "SolrInputDocument(fields: [c=c1, e=e1]), " + - "SolrInputDocument(fields: [c=c2, e=e2, d.p=q])], " + - "a.x=y" + - "])", p.addCommands.get(0).solrDoc.toString()); - } - - private static final String PARENT_TWO_CHILDREN_JSON = "{\n" + - " \"id\": \"1\",\n" + - " \"name\": \"i am the parent\",\n" + - " \"cat\": \"parent\",\n" + - " \"children\": [\n" + - " {\n" + - " \"id\": \"1.1\",\n" + - " \"name\": \"i am the 1st child\",\n" + - " \"cat\": \"child\"\n" + - " },\n" + - " {\n" + - " \"id\": \"1.2\",\n" + - " \"name\": \"i am the 2nd child\",\n" + - " \"cat\": \"child\",\n" + - " \"test_s\": \"test-new-label\",\n" + - " \"grandchildren\": [\n" + - " {\n" + - " \"id\": \"1.2.1\",\n" + - " \"name\": \"i am the grandchild\",\n" + - " \"cat\": \"grandchild\"\n" + - " }\n" + - " ]\n" + - " }\n" + - " ]\n" + - "}"; - - private static final String[] PARENT_TWO_CHILDREN_PARAMS = new String[] { "split", "/|/children|/children/grandchildren", - "f","$FQN:/**", - "f", "id:/children/id", - "f", "/name", - "f", "/children/name", - "f", "cat:/children/cat", - "f", "id:/children/grandchildren/id", - "f", "name:/children/grandchildren/name", - "f", "cat:/children/grandchildren/cat"}; + assertEquals(1, p.addCommands.size()); + assertEquals( + "SolrInputDocument(fields: [" + + "b=[" + + "SolrInputDocument(fields: [c=c1, e=e1]), " + + "SolrInputDocument(fields: [c=c2, e=e2, d.p=q])], " + + "a.x=y" + + "])", + p.addCommands.get(0).solrDoc.toString()); + } + + private static final String PARENT_TWO_CHILDREN_JSON = + "{\n" + + " \"id\": \"1\",\n" + + " \"name\": \"i am the parent\",\n" + + " \"cat\": \"parent\",\n" + + " \"children\": [\n" + + " {\n" + + " \"id\": \"1.1\",\n" + + " \"name\": \"i am the 1st child\",\n" + + " \"cat\": \"child\"\n" + + " },\n" + + " {\n" + + " \"id\": \"1.2\",\n" + + " \"name\": \"i am the 2nd child\",\n" + + " \"cat\": \"child\",\n" + + " \"test_s\": \"test-new-label\",\n" + + " \"grandchildren\": [\n" + + " {\n" + + " \"id\": \"1.2.1\",\n" + + " \"name\": \"i am the grandchild\",\n" + + " \"cat\": \"grandchild\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + "}"; + + private static final String[] PARENT_TWO_CHILDREN_PARAMS = + new String[] { + "split", + "/|/children|/children/grandchildren", + "f", + "$FQN:/**", + "f", + "id:/children/id", + "f", + "/name", + "f", + "/children/name", + "f", + "cat:/children/cat", + "f", + "id:/children/grandchildren/id", + "f", + "name:/children/grandchildren/name", + "f", + "cat:/children/grandchildren/cat" + }; @Test public void testFewParentsJsonDoc() throws Exception { @@ -390,17 +395,17 @@ public void testFewParentsJsonDoc() throws Exception { SolrQueryRequest req; SolrQueryResponse rsp; BufferingRequestProcessor p; - JsonLoader loader;//multichild test case + JsonLoader loader; // multichild test case final boolean array = random().nextBoolean(); StringBuilder b = new StringBuilder(); if (array) { b.append("["); } final int passes = atLeast(2); - for (int i=1;i<=passes;i++){ - b.append(json.replace("1",""+i)); + for (int i = 1; i <= passes; i++) { + b.append(json.replace("1", "" + i)); if (array) { - b.append(i s = (v)-> v.replace("1",""+ii); - final SolrInputDocument parent = p.addCommands.get(i-1).solrDoc; - assertOnlyValue(s.apply("1"), parent,"id"); + UnaryOperator s = (v) -> v.replace("1", "" + ii); + final SolrInputDocument parent = p.addCommands.get(i - 1).solrDoc; + assertOnlyValue(s.apply("1"), parent, "id"); assertOnlyValue("i am the parent", parent, "name"); assertOnlyValue("parent", parent, "cat"); @@ -426,7 +431,7 @@ public void testFewParentsJsonDoc() throws Exception { final SolrInputDocument child1 = childDocs1.get(0); assertOnlyValue(s.apply("1.1"), child1, "id"); assertOnlyValue(s.apply("i am the 1st child"), child1, "name"); - assertOnlyValue("child", child1,"cat"); + assertOnlyValue("child", child1, "cat"); } { final SolrInputDocument child2 = childDocs1.get(1); @@ -440,7 +445,7 @@ public void testFewParentsJsonDoc() throws Exception { assertEquals(1, childDocs2.size()); final SolrInputDocument grandChild = childDocs2.get(0); - assertOnlyValue(s.apply("1.2.1"), grandChild,"id"); + assertOnlyValue(s.apply("1.2.1"), grandChild, "id"); assertOnlyValue("i am the grandchild", grandChild, "name"); assertOnlyValue("grandchild", grandChild, "cat"); } @@ -459,7 +464,7 @@ public void testAtomicUpdateFieldValue() throws Exception { JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); - assertEquals( 1, p.addCommands.size() ); + assertEquals(1, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); assertEquals(add.commitWithin, -1); @@ -471,12 +476,12 @@ public void testAtomicUpdateFieldValue() throws Exception { @Test public void testNullValues() throws Exception { - updateJ( json( "[{'id':'10','foo_s':null,'foo2_s':['hi',null,'there']}]" ), params("commit","true")); - assertJQ(req("q","id:10", "fl","foo_s,foo2_s") - ,"/response/docs/[0]=={'foo2_s':['hi','there']}" - ); + updateJ( + json("[{'id':'10','foo_s':null,'foo2_s':['hi',null,'there']}]"), params("commit", "true")); + assertJQ( + req("q", "id:10", "fl", "foo_s,foo2_s"), "/response/docs/[0]=={'foo2_s':['hi','there']}"); } - + @Test public void testBooleanValuesInAdd() throws Exception { String str = "{'add':[{'id':'1','b1':true,'b2':false,'b3':[false,true]}]}".replace('\'', '"'); @@ -495,9 +500,9 @@ public void testBooleanValuesInAdd() throws Exception { f = d.getField("b2"); assertEquals(Boolean.FALSE, f.getValue()); f = d.getField("b3"); - assertEquals(2, ((List)f.getValue()).size()); - assertEquals(Boolean.FALSE, ((List)f.getValue()).get(0)); - assertEquals(Boolean.TRUE, ((List)f.getValue()).get(1)); + assertEquals(2, ((List) f.getValue()).size()); + assertEquals(Boolean.FALSE, ((List) f.getValue()).get(0)); + assertEquals(Boolean.TRUE, ((List) f.getValue()).get(1)); req.close(); } @@ -520,17 +525,18 @@ public void testIntegerValuesInAdd() throws Exception { f = d.getField("i2"); assertEquals(-5123456789L, f.getValue()); f = d.getField("i3"); - assertEquals(2, ((List)f.getValue()).size()); - assertEquals(0L, ((List)f.getValue()).get(0)); - assertEquals(1L, ((List)f.getValue()).get(1)); + assertEquals(2, ((List) f.getValue()).size()); + assertEquals(0L, ((List) f.getValue()).get(0)); + assertEquals(1L, ((List) f.getValue()).get(1)); req.close(); } - @Test public void testDecimalValuesInAdd() throws Exception { - String str = "{'add':[{'id':'1','d1':256.78,'d2':-5123456789.0,'d3':0.0,'d3':1.0,'d4':1.7E-10}]}".replace('\'', '"'); + String str = + "{'add':[{'id':'1','d1':256.78,'d2':-5123456789.0,'d3':0.0,'d3':1.0,'d4':1.7E-10}]}" + .replace('\'', '"'); SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); @@ -546,8 +552,8 @@ public void testDecimalValuesInAdd() throws Exception { f = d.getField("d2"); assertEquals(-5123456789.0, f.getValue()); f = d.getField("d3"); - assertEquals(2, ((List)f.getValue()).size()); - assertTrue(((List)f.getValue()).contains(0.0)); + assertEquals(2, ((List) f.getValue()).size()); + assertTrue(((List) f.getValue()).contains(0.0)); assertTrue(((List) f.getValue()).contains(1.0)); f = d.getField("d4"); assertEquals(1.7E-10, f.getValue()); @@ -557,9 +563,11 @@ public void testDecimalValuesInAdd() throws Exception { @Test public void testBigDecimalValuesInAdd() throws Exception { - String str = ("{'add':[{'id':'1','bd1':0.12345678901234567890123456789012345," - + "'bd2':12345678901234567890.12345678901234567890,'bd3':0.012345678901234567890123456789012345," - + "'bd3':123456789012345678900.012345678901234567890}]}").replace('\'', '"'); + String str = + ("{'add':[{'id':'1','bd1':0.12345678901234567890123456789012345," + + "'bd2':12345678901234567890.12345678901234567890,'bd3':0.012345678901234567890123456789012345," + + "'bd3':123456789012345678900.012345678901234567890}]}") + .replace('\'', '"'); SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); @@ -570,24 +578,26 @@ public void testBigDecimalValuesInAdd() throws Exception { AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument d = add.solrDoc; - SolrInputField f = d.getField("bd1"); + SolrInputField f = d.getField("bd1"); assertTrue(f.getValue() instanceof String); assertEquals("0.12345678901234567890123456789012345", f.getValue()); f = d.getField("bd2"); assertTrue(f.getValue() instanceof String); assertEquals("12345678901234567890.12345678901234567890", f.getValue()); f = d.getField("bd3"); - assertEquals(2, ((List)f.getValue()).size()); - assertTrue(((List)f.getValue()).contains("0.012345678901234567890123456789012345")); - assertTrue(((List)f.getValue()).contains("123456789012345678900.012345678901234567890")); + assertEquals(2, ((List) f.getValue()).size()); + assertTrue(((List) f.getValue()).contains("0.012345678901234567890123456789012345")); + assertTrue(((List) f.getValue()).contains("123456789012345678900.012345678901234567890")); req.close(); } @Test public void testBigIntegerValuesInAdd() throws Exception { - String str = ("{'add':[{'id':'1','bi1':123456789012345678901,'bi2':1098765432109876543210," - + "'bi3':[1234567890123456789012,10987654321098765432109]}]}").replace('\'', '"'); + String str = + ("{'add':[{'id':'1','bi1':123456789012345678901,'bi2':1098765432109876543210," + + "'bi3':[1234567890123456789012,10987654321098765432109]}]}") + .replace('\'', '"'); SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); @@ -605,27 +615,29 @@ public void testBigIntegerValuesInAdd() throws Exception { assertTrue(f.getValue() instanceof String); assertEquals("1098765432109876543210", f.getValue()); f = d.getField("bi3"); - assertEquals(2, ((List)f.getValue()).size()); - assertTrue(((List)f.getValue()).contains("1234567890123456789012")); - assertTrue(((List)f.getValue()).contains("10987654321098765432109")); + assertEquals(2, ((List) f.getValue()).size()); + assertTrue(((List) f.getValue()).contains("1234567890123456789012")); + assertTrue(((List) f.getValue()).contains("10987654321098765432109")); req.close(); } - @Test public void testAddNonStringValues() throws Exception { - // BigInteger and BigDecimal should be typed as strings, since there is no direct support for them - updateJ(json( "[{'id':'1','boolean_b':false,'long_l':19,'double_d':18.6,'big_integer_s':12345678901234567890," - +" 'big_decimal_s':0.1234567890123456789012345}]" ), params("commit","true")); - assertJQ(req("q","id:1", "fl","boolean_b,long_l,double_d,big_integer_s,big_decimal_s") - ,"/response/docs/[0]=={'boolean_b':[false],'long_l':[19],'double_d':[18.6]," - +"'big_integer_s':['12345678901234567890']," - +"'big_decimal_s':['0.1234567890123456789012345']}]}" - ); + // BigInteger and BigDecimal should be typed as strings, since there is no direct support for + // them + updateJ( + json( + "[{'id':'1','boolean_b':false,'long_l':19,'double_d':18.6,'big_integer_s':12345678901234567890," + + " 'big_decimal_s':0.1234567890123456789012345}]"), + params("commit", "true")); + assertJQ( + req("q", "id:1", "fl", "boolean_b,long_l,double_d,big_integer_s,big_decimal_s"), + "/response/docs/[0]=={'boolean_b':[false],'long_l':[19],'double_d':[18.6]," + + "'big_integer_s':['12345678901234567890']," + + "'big_decimal_s':['0.1234567890123456789012345']}]}"); } - @Test public void testAddBigIntegerValueToTrieField() throws Exception { // Adding a BigInteger to a long field should fail @@ -633,16 +645,22 @@ public void testAddBigIntegerValueToTrieField() throws Exception { ignoreException("big_integer_t"); - SolrException ex = expectThrows(SolrException.class, () -> { - updateJ(json( "[{'id':'1','big_integer_tl':12345678901234567890}]" ), null); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + updateJ(json("[{'id':'1','big_integer_tl':12345678901234567890}]"), null); + }); assertTrue(ex.getCause() instanceof NumberFormatException); // Adding a BigInteger to an integer field should fail // BigInteger.intValue() returns only the low-order 32 bits. - ex = expectThrows(SolrException.class, () -> { - updateJ(json( "[{'id':'1','big_integer_ti':12345678901234567890}]" ), null); - }); + ex = + expectThrows( + SolrException.class, + () -> { + updateJ(json("[{'id':'1','big_integer_ti':12345678901234567890}]"), null); + }); assertTrue(ex.getCause() instanceof NumberFormatException); unIgnoreException("big_integer_t"); @@ -651,30 +669,33 @@ public void testAddBigIntegerValueToTrieField() throws Exception { @Test public void testAddBigDecimalValueToTrieField() throws Exception { // Adding a BigDecimal to a double field should succeed by reducing precision - updateJ(json( "[{'id':'1','big_decimal_td':100000000000000000000000000001234567890.0987654321}]" ), - params("commit", "true")); - assertJQ(req("q","id:1", "fl","big_decimal_td"), - "/response/docs/[0]=={'big_decimal_td':[1.0E38]}" - ); + updateJ( + json("[{'id':'1','big_decimal_td':100000000000000000000000000001234567890.0987654321}]"), + params("commit", "true")); + assertJQ( + req("q", "id:1", "fl", "big_decimal_td"), + "/response/docs/[0]=={'big_decimal_td':[1.0E38]}"); // Adding a BigDecimal to a float field should succeed by reducing precision - updateJ(json( "[{'id':'2','big_decimal_tf':100000000000000000000000000001234567890.0987654321}]" ), - params("commit", "true")); - assertJQ(req("q","id:2", "fl","big_decimal_tf"), - "/response/docs/[0]=={'big_decimal_tf':[1.0E38]}" - ); + updateJ( + json("[{'id':'2','big_decimal_tf':100000000000000000000000000001234567890.0987654321}]"), + params("commit", "true")); + assertJQ( + req("q", "id:2", "fl", "big_decimal_tf"), + "/response/docs/[0]=={'big_decimal_tf':[1.0E38]}"); } // The delete syntax was both extended for simplification in 4.0 @Test public void testDeleteSyntax() throws Exception { - String str = "{'delete':10" - +"\n ,'delete':'20'" - +"\n ,'delete':['30','40']" - +"\n ,'delete':{'id':50, '_version_':12345}" - +"\n ,'delete':[{'id':60, '_version_':67890}, {'id':70, '_version_':77777}, {'query':'id:80', '_version_':88888}]" - +"\n ,'delete':{'id':90, '_route_':'shard1', '_version_':88888}" - + "\n}\n"; + String str = + "{'delete':10" + + "\n ,'delete':'20'" + + "\n ,'delete':['30','40']" + + "\n ,'delete':{'id':50, '_version_':12345}" + + "\n ,'delete':[{'id':60, '_version_':67890}, {'id':70, '_version_':77777}, {'query':'id:80', '_version_':88888}]" + + "\n ,'delete':{'id':90, '_route_':'shard1', '_version_':88888}" + + "\n}\n"; str = str.replace('\'', '"'); SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); @@ -683,45 +704,45 @@ public void testDeleteSyntax() throws Exception { loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); // DELETE COMMANDS - assertEquals( 9, p.deleteCommands.size() ); - DeleteUpdateCommand delete = p.deleteCommands.get( 0 ); - assertEquals( delete.id, "10" ); - assertEquals( delete.query, null ); - assertEquals( delete.commitWithin, -1); - - delete = p.deleteCommands.get( 1 ); - assertEquals( delete.id, "20" ); - assertEquals( delete.query, null ); - assertEquals( delete.commitWithin, -1); - - delete = p.deleteCommands.get( 2 ); - assertEquals( delete.id, "30" ); - assertEquals( delete.query, null ); - assertEquals( delete.commitWithin, -1); - - delete = p.deleteCommands.get( 3 ); - assertEquals( delete.id, "40" ); - assertEquals( delete.query, null ); - assertEquals( delete.commitWithin, -1); - - delete = p.deleteCommands.get( 4 ); - assertEquals( delete.id, "50" ); - assertEquals( delete.query, null ); - assertEquals( delete.getVersion(), 12345L); - - delete = p.deleteCommands.get( 5 ); - assertEquals( delete.id, "60" ); - assertEquals( delete.query, null ); - assertEquals( delete.getVersion(), 67890L); - - delete = p.deleteCommands.get( 6 ); - assertEquals( delete.id, "70" ); - assertEquals( delete.query, null ); - assertEquals( delete.getVersion(), 77777L); - - delete = p.deleteCommands.get( 7 ); - assertEquals( delete.id, null ); - assertEquals( delete.query, "id:80" ); + assertEquals(9, p.deleteCommands.size()); + DeleteUpdateCommand delete = p.deleteCommands.get(0); + assertEquals(delete.id, "10"); + assertEquals(delete.query, null); + assertEquals(delete.commitWithin, -1); + + delete = p.deleteCommands.get(1); + assertEquals(delete.id, "20"); + assertEquals(delete.query, null); + assertEquals(delete.commitWithin, -1); + + delete = p.deleteCommands.get(2); + assertEquals(delete.id, "30"); + assertEquals(delete.query, null); + assertEquals(delete.commitWithin, -1); + + delete = p.deleteCommands.get(3); + assertEquals(delete.id, "40"); + assertEquals(delete.query, null); + assertEquals(delete.commitWithin, -1); + + delete = p.deleteCommands.get(4); + assertEquals(delete.id, "50"); + assertEquals(delete.query, null); + assertEquals(delete.getVersion(), 12345L); + + delete = p.deleteCommands.get(5); + assertEquals(delete.id, "60"); + assertEquals(delete.query, null); + assertEquals(delete.getVersion(), 67890L); + + delete = p.deleteCommands.get(6); + assertEquals(delete.id, "70"); + assertEquals(delete.query, null); + assertEquals(delete.getVersion(), 77777L); + + delete = p.deleteCommands.get(7); + assertEquals(delete.id, null); + assertEquals(delete.query, "id:80"); assertEquals(delete.getVersion(), 88888L); delete = p.deleteCommands.get(8); @@ -733,22 +754,23 @@ public void testDeleteSyntax() throws Exception { req.close(); } - private static final String SIMPLE_ANON_CHILD_DOCS_JSON = "{\n" + - " \"add\": {\n" + - " \"doc\": {\n" + - " \"id\": \"1\",\n" + - " \"_childDocuments_\": [\n" + - " {\n" + - " \"id\": \"2\"\n" + - " },\n" + - " {\n" + - " \"id\": \"3\",\n" + - " \"foo_i\": [666,777]\n" + - " }\n" + - " ]\n" + - " }\n" + - " }\n" + - "}"; + private static final String SIMPLE_ANON_CHILD_DOCS_JSON = + "{\n" + + " \"add\": {\n" + + " \"doc\": {\n" + + " \"id\": \"1\",\n" + + " \"_childDocuments_\": [\n" + + " {\n" + + " \"id\": \"2\"\n" + + " },\n" + + " {\n" + + " \"id\": \"3\",\n" + + " \"foo_i\": [666,777]\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + "}"; @Test public void testSimpleAnonymousChildDocs() throws Exception { @@ -760,25 +782,26 @@ public void testSimpleChildDocs() throws Exception { checkTwoAnonymousChildDocs(SIMPLE_ANON_CHILD_DOCS_JSON, false); } - private static final String DUP_KEYS_ANON_CHILD_DOCS_JSON = "{\n" + - " \"add\": {\n" + - " \"doc\": {\n" + - " \"_childDocuments_\": [\n" + - " {\n" + - " \"id\": \"2\"\n" + - " }\n" + - " ],\n" + - " \"id\": \"1\",\n" + - " \"_childDocuments_\": [\n" + - " {\n" + - " \"id\": \"3\",\n" + - " \"foo_i\": 666,\n" + - " \"foo_i\": 777\n" + - " }\n" + - " ]\n" + - " }\n" + - " }\n" + - "}"; + private static final String DUP_KEYS_ANON_CHILD_DOCS_JSON = + "{\n" + + " \"add\": {\n" + + " \"doc\": {\n" + + " \"_childDocuments_\": [\n" + + " {\n" + + " \"id\": \"2\"\n" + + " }\n" + + " ],\n" + + " \"id\": \"1\",\n" + + " \"_childDocuments_\": [\n" + + " {\n" + + " \"id\": \"3\",\n" + + " \"foo_i\": 666,\n" + + " \"foo_i\": 777\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + "}"; @Test public void testDupKeysAnonymousChildDocs() throws Exception { @@ -794,26 +817,27 @@ public void testDupKeysChildDocs() throws Exception { public void testChildDocWithoutId() throws Exception { final String json = DUP_KEYS_ANON_CHILD_DOCS_JSON.replace("\"id\": \"3\",\n", ""); assert !json.equals(DUP_KEYS_ANON_CHILD_DOCS_JSON); - checkTwoAnonymousChildDocs( - json, false); + checkTwoAnonymousChildDocs(json, false); } - // rawJsonStr has "_childDocuments_" key. if anonChildDocs then we want to test with something else. - private void checkTwoAnonymousChildDocs(String rawJsonStr, boolean anonChildDocs) throws Exception { + // rawJsonStr has "_childDocuments_" key. if anonChildDocs then we want to test with something + // else. + private void checkTwoAnonymousChildDocs(String rawJsonStr, boolean anonChildDocs) + throws Exception { if (!anonChildDocs) { rawJsonStr = rawJsonStr.replaceAll("_childDocuments_", "childLabel"); } - SolrQueryRequest req = req("commit","true"); + SolrQueryRequest req = req("commit", "true"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(rawJsonStr), p); - assertEquals( 1, p.addCommands.size() ); + assertEquals(1, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument d = add.solrDoc; - SolrInputField f = d.getField( "id" ); + SolrInputField f = d.getField("id"); assertEquals("1", f.getValue()); SolrInputDocument cd; @@ -822,49 +846,50 @@ private void checkTwoAnonymousChildDocs(String rawJsonStr, boolean anonChildDocs } else { cd = (SolrInputDocument) (d.getField("childLabel")).getFirstValue(); } - SolrInputField cf = cd.getField( "id" ); + SolrInputField cf = cd.getField("id"); assertEquals("2", cf.getValue()); if (anonChildDocs) { cd = d.getChildDocuments().get(1); } else { - cd = (SolrInputDocument)((List)(d.getField("childLabel")).getValue()).get(1); + cd = (SolrInputDocument) ((List) (d.getField("childLabel")).getValue()).get(1); } - cf = cd.getField( "id" ); + cf = cd.getField("id"); if (rawJsonStr.contains("\"3\"")) { assertEquals("3", cf.getValue()); } else { // ID 3 was removed previously to test we don't need an ID to have a child doc assertNull("child doc should have no ID", cf); } - cf = cd.getField( "foo_i" ); + cf = cd.getField("foo_i"); assertEquals(2, cf.getValueCount()); - assertEquals(new Object[] {666L,777L}, cf.getValues().toArray()); + assertEquals(new Object[] {666L, 777L}, cf.getValues().toArray()); req.close(); } @Test public void testEmptyAnonymousChildDocs() throws Exception { - String str = "{\n" + - " \"add\": {\n" + - " \"doc\": {\n" + - " \"id\": \"1\",\n" + - " \"_childDocuments_\": []\n" + - " }\n" + - " }\n" + - "}"; - SolrQueryRequest req = req("commit","true"); + String str = + "{\n" + + " \"add\": {\n" + + " \"doc\": {\n" + + " \"id\": \"1\",\n" + + " \"_childDocuments_\": []\n" + + " }\n" + + " }\n" + + "}"; + SolrQueryRequest req = req("commit", "true"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); - assertEquals( 1, p.addCommands.size() ); + assertEquals(1, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument d = add.solrDoc; - SolrInputField f = d.getField( "id" ); + SolrInputField f = d.getField("id"); assertEquals("1", f.getValue()); List cd = d.getChildDocuments(); assertNull(cd); @@ -874,37 +899,38 @@ public void testEmptyAnonymousChildDocs() throws Exception { @Test public void testAnonymousGrandChildDocs() throws Exception { - String str = "{\n" + - " \"add\": {\n" + - " \"doc\": {\n" + - " \"id\": \"1\",\n" + - " \"_childDocuments_\": [\n" + - " {\n" + - " \"id\": \"2\",\n" + - " \"_childDocuments_\": [\n" + - " {\n" + - " \"id\": \"4\",\n" + - " \"foo_s\": \"Baz\"\n" + - " }\n" + - " ],\n" + - " \"foo_s\": \"Yaz\"\n" + - " },\n" + - " {\n" + - " \"id\": \"3\",\n" + - " \"foo_s\": \"Bar\"\n" + - " }\n" + - " ]\n" + - " }\n" + - " }\n" + - "}"; - - SolrQueryRequest req = req("commit","true"); + String str = + "{\n" + + " \"add\": {\n" + + " \"doc\": {\n" + + " \"id\": \"1\",\n" + + " \"_childDocuments_\": [\n" + + " {\n" + + " \"id\": \"2\",\n" + + " \"_childDocuments_\": [\n" + + " {\n" + + " \"id\": \"4\",\n" + + " \"foo_s\": \"Baz\"\n" + + " }\n" + + " ],\n" + + " \"foo_s\": \"Yaz\"\n" + + " },\n" + + " {\n" + + " \"id\": \"3\",\n" + + " \"foo_s\": \"Bar\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + "}"; + + SolrQueryRequest req = req("commit", "true"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); - assertEquals( 1, p.addCommands.size() ); + assertEquals(1, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument one = add.solrDoc; @@ -923,36 +949,36 @@ public void testAnonymousGrandChildDocs() throws Exception { assertEquals("Bar", three.getFieldValue("foo_s")); req.close(); - } @Test public void testChildDocs() throws Exception { - String str = "{\n" + - " \"add\": {\n" + - " \"doc\": {\n" + - " \"id\": \"1\",\n" + - " \"children\": [\n" + - " {\n" + - " \"id\": \"2\",\n" + - " \"foo_s\": \"Yaz\"\n" + - " },\n" + - " {\n" + - " \"id\": \"3\",\n" + - " \"foo_s\": \"Bar\"\n" + - " }\n" + - " ]\n" + - " }\n" + - " }\n" + - "}"; - - SolrQueryRequest req = req("commit","true"); + String str = + "{\n" + + " \"add\": {\n" + + " \"doc\": {\n" + + " \"id\": \"1\",\n" + + " \"children\": [\n" + + " {\n" + + " \"id\": \"2\",\n" + + " \"foo_s\": \"Yaz\"\n" + + " },\n" + + " {\n" + + " \"id\": \"3\",\n" + + " \"foo_s\": \"Bar\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + "}"; + + SolrQueryRequest req = req("commit", "true"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); - assertEquals( 1, p.addCommands.size() ); + assertEquals(1, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument one = add.solrDoc; @@ -969,31 +995,31 @@ public void testChildDocs() throws Exception { assertEquals("Bar", three.getFieldValue("foo_s")); req.close(); - } @Test public void testSingleRelationalChildDoc() throws Exception { - String str = "{\n" + - " \"add\": {\n" + - " \"doc\": {\n" + - " \"id\": \"1\",\n" + - " \"child1\": \n" + - " {\n" + - " \"id\": \"2\",\n" + - " \"foo_s\": \"Yaz\"\n" + - " },\n" + - " }\n" + - " }\n" + - "}"; - - SolrQueryRequest req = req("commit","true"); + String str = + "{\n" + + " \"add\": {\n" + + " \"doc\": {\n" + + " \"id\": \"1\",\n" + + " \"child1\": \n" + + " {\n" + + " \"id\": \"2\",\n" + + " \"foo_s\": \"Yaz\"\n" + + " },\n" + + " }\n" + + " }\n" + + "}"; + + SolrQueryRequest req = req("commit", "true"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); JsonLoader loader = new JsonLoader(); loader.load(req, rsp, new ContentStreamBase.StringStream(str), p); - assertEquals( 1, p.addCommands.size() ); + assertEquals(1, p.addCommands.size()); AddUpdateCommand add = p.addCommands.get(0); SolrInputDocument one = add.solrDoc; @@ -1006,16 +1032,13 @@ public void testSingleRelationalChildDoc() throws Exception { assertEquals("Yaz", two.getFieldValue("foo_s")); req.close(); - } @Test public void JSONLoader_denseVector_shouldIndexCorrectly() throws Exception { - updateJ(json( "[{'id':'888','vector':[1.8,2.8,3.8,4.8]}]" ), - params("commit", "true")); + updateJ(json("[{'id':'888','vector':[1.8,2.8,3.8,4.8]}]"), params("commit", "true")); - assertJQ(req("q","id:888", "fl","vector"), - "/response/docs/[0]=={'vector':[1.8,2.8,3.8,4.8]}"); + assertJQ( + req("q", "id:888", "fl", "vector"), "/response/docs/[0]=={'vector':[1.8,2.8,3.8,4.8]}"); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java index 05f83d81989..afa0bb44f6f 100644 --- a/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/MoreLikeThisHandlerTest.java @@ -30,9 +30,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * TODO -- this needs to actually test the results/query etc - */ +/** TODO -- this needs to actually test the results/query etc */ public class MoreLikeThisHandlerTest extends SolrTestCaseJ4 { @BeforeClass public static void moreLikeThisBeforeClass() throws Exception { @@ -40,94 +38,182 @@ public static void moreLikeThisBeforeClass() throws Exception { } @Test - public void testInterface() throws Exception - { + public void testInterface() throws Exception { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); - assertU(adoc("id","42","name","Tom Cruise","subword","Top Gun","subword","Risky Business","subword","The Color of Money","subword","Minority Report","subword", "Days of Thunder","subword", "Eyes Wide Shut","subword", "Far and Away", "foo_ti","10")); - assertU(adoc("id","43","name","Tom Hanks","subword","The Green Mile","subword","Forest Gump","subword","Philadelphia Story","subword","Big","subword","Cast Away", "foo_ti","10")); - assertU(adoc("id","44","name","Harrison Ford","subword","Star Wars","subword","Indiana Jones","subword","Patriot Games","subword","Regarding Henry")); - assertU(adoc("id","45","name","George Harrison","subword","Yellow Submarine","subword","Help","subword","Magical Mystery Tour","subword","Sgt. Peppers Lonley Hearts Club Band")); - assertU(adoc("id","46","name","Nicole Kidman","subword","Batman","subword","Days of Thunder","subword","Eyes Wide Shut","subword","Far and Away")); + assertU( + adoc( + "id", + "42", + "name", + "Tom Cruise", + "subword", + "Top Gun", + "subword", + "Risky Business", + "subword", + "The Color of Money", + "subword", + "Minority Report", + "subword", + "Days of Thunder", + "subword", + "Eyes Wide Shut", + "subword", + "Far and Away", + "foo_ti", + "10")); + assertU( + adoc( + "id", + "43", + "name", + "Tom Hanks", + "subword", + "The Green Mile", + "subword", + "Forest Gump", + "subword", + "Philadelphia Story", + "subword", + "Big", + "subword", + "Cast Away", + "foo_ti", + "10")); + assertU( + adoc( + "id", + "44", + "name", + "Harrison Ford", + "subword", + "Star Wars", + "subword", + "Indiana Jones", + "subword", + "Patriot Games", + "subword", + "Regarding Henry")); + assertU( + adoc( + "id", + "45", + "name", + "George Harrison", + "subword", + "Yellow Submarine", + "subword", + "Help", + "subword", + "Magical Mystery Tour", + "subword", + "Sgt. Peppers Lonley Hearts Club Band")); + assertU( + adoc( + "id", + "46", + "name", + "Nicole Kidman", + "subword", + "Batman", + "subword", + "Days of Thunder", + "subword", + "Eyes Wide Shut", + "subword", + "Far and Away")); assertU(commit()); params.set(MoreLikeThisParams.MLT, "true"); params.set(MoreLikeThisParams.SIMILARITY_FIELDS, "name,subword"); params.set(MoreLikeThisParams.INTERESTING_TERMS, "details"); - params.set(MoreLikeThisParams.MIN_TERM_FREQ,"1"); - params.set(MoreLikeThisParams.MIN_DOC_FREQ,"1"); - params.set("indent","true"); + params.set(MoreLikeThisParams.MIN_TERM_FREQ, "1"); + params.set(MoreLikeThisParams.MIN_DOC_FREQ, "1"); + params.set("indent", "true"); // requires 'q' or a single content stream - SolrException ex = expectThrows(SolrException.class, () -> { - try (MoreLikeThisHandler mlt = new MoreLikeThisHandler(); - SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { - mlt.handleRequestBody(req, new SolrQueryResponse()); - } - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + try (MoreLikeThisHandler mlt = new MoreLikeThisHandler(); + SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { + mlt.handleRequestBody(req, new SolrQueryResponse()); + } + }); assertEquals(ex.getMessage(), MoreLikeThisHandler.ERR_MSG_QUERY_OR_TEXT_REQUIRED); assertEquals(ex.code(), SolrException.ErrorCode.BAD_REQUEST.code); // requires a single content stream (more than one is not supported). - ex = expectThrows(SolrException.class, () -> { - try (MoreLikeThisHandler mlt = new MoreLikeThisHandler(); - SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { - ArrayList streams = new ArrayList<>(2); - streams.add(new ContentStreamBase.StringStream("hello")); - streams.add(new ContentStreamBase.StringStream("there")); - req.setContentStreams(streams); - mlt.handleRequestBody(req, new SolrQueryResponse()); - } - }); + ex = + expectThrows( + SolrException.class, + () -> { + try (MoreLikeThisHandler mlt = new MoreLikeThisHandler(); + SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}) { + ArrayList streams = new ArrayList<>(2); + streams.add(new ContentStreamBase.StringStream("hello")); + streams.add(new ContentStreamBase.StringStream("there")); + req.setContentStreams(streams); + mlt.handleRequestBody(req, new SolrQueryResponse()); + } + }); assertEquals(ex.getMessage(), MoreLikeThisHandler.ERR_MSG_SINGLE_STREAM_ONLY); assertEquals(ex.code(), SolrException.ErrorCode.BAD_REQUEST.code); params.set(CommonParams.Q, "id:42"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { - assertQ("morelikethis - tom cruise", mltreq, + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { + assertQ( + "morelikethis - tom cruise", + mltreq, "//result/doc[1]/str[@name='id'][.='46']", "//result/doc[2]/str[@name='id'][.='43']"); } params.set(MoreLikeThisParams.BOOST, "true"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { - assertQ("morelikethis - tom cruise", mltreq, + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { + assertQ( + "morelikethis - tom cruise", + mltreq, "//result/doc[1]/str[@name='id'][.='46']", "//result/doc[2]/str[@name='id'][.='43']"); } - + params.set(CommonParams.Q, "id:44"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { - assertQ("morelike this - harrison ford", mltreq, - "//result/doc[1]/str[@name='id'][.='45']"); + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { + assertQ("morelike this - harrison ford", mltreq, "//result/doc[1]/str[@name='id'][.='45']"); } // test MoreLikeThis debug params.set(CommonParams.DEBUG_QUERY, "true"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { - assertQ("morelike this - harrison ford", mltreq, + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { + assertQ( + "morelike this - harrison ford", + mltreq, "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='rawMLTQuery']", "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='boostedMLTQuery']", "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='realMLTQuery']", - "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']" - ); + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']"); } // test that qparser plugins work params.remove(CommonParams.DEBUG_QUERY); params.set(CommonParams.Q, "{!field f=id}44"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { assertQ(mltreq, "//result/doc[1]/str[@name='id'][.='45']"); } params.set(CommonParams.Q, "id:42"); - params.set(MoreLikeThisParams.QF,"name^5.0 subword^0.1"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { - assertQ("morelikethis with weights", mltreq, + params.set(MoreLikeThisParams.QF, "name^5.0 subword^0.1"); + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { + assertQ( + "morelikethis with weights", + mltreq, "//result/doc[1]/str[@name='id'][.='43']", "//result/doc[2]/str[@name='id'][.='46']"); } @@ -135,24 +221,23 @@ public void testInterface() throws Exception // test that qparser plugins work w/ the MoreLikeThisHandler params.set(CommonParams.QT, "/mlt"); params.set(CommonParams.Q, "{!field f=id}44"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { assertQ(mltreq, "//result/doc[1]/str[@name='id'][.='45']"); } // test that debugging works (test for MoreLikeThis*Handler*) params.set(CommonParams.QT, "/mlt"); params.set(CommonParams.DEBUG_QUERY, "true"); - try (SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params)) { - assertQ(mltreq, + try (SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params)) { + assertQ( + mltreq, "//result/doc[1]/str[@name='id'][.='45']", - "//lst[@name='debug']/lst[@name='explain']" - ); + "//lst[@name='debug']/lst[@name='explain']"); } } @Test - public void testMultifieldSimilarity() throws Exception - { + public void testMultifieldSimilarity() throws Exception { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); @@ -176,9 +261,10 @@ public void testMultifieldSimilarity() throws Exception streams.add(new ContentStreamBase.StringStream("bbb", "zzz")); req.setContentStreams(streams); - // Make sure we have terms from both fields in the interestingTerms array and all documents have been - // retrieved as matching. - assertQ(req, + // Make sure we have terms from both fields in the interestingTerms array and all documents + // have been retrieved as matching. + assertQ( + req, "//lst[@name = 'interestingTerms']/float[@name = 'subword:bbb']", "//lst[@name = 'interestingTerms']/float[@name = 'name:bbb']", "//result[@name = 'response' and @numFound = '4']"); diff --git a/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java index eb9db262ce8..14ebd940519 100644 --- a/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/PingRequestHandlerTest.java @@ -19,7 +19,6 @@ import java.io.File; import java.io.IOException; import java.util.List; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -44,7 +43,7 @@ public class PingRequestHandlerTest extends SolrTestCaseJ4 { private final String fileName = this.getClass().getName() + ".server-enabled"; private File healthcheckFile = null; private PingRequestHandler handler = null; - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); @@ -56,38 +55,36 @@ public void before() throws IOException { healthcheckFile = new File(initAndGetDataDir(), fileName); String fileNameParam = fileName; - // sometimes randomly use an absolute File path instead + // sometimes randomly use an absolute File path instead if (random().nextBoolean()) { fileNameParam = healthcheckFile.getAbsolutePath(); - } - + } + if (healthcheckFile.exists()) FileUtils.forceDelete(healthcheckFile); handler = new PingRequestHandler(); NamedList initParams = new NamedList<>(); - initParams.add(PingRequestHandler.HEALTHCHECK_FILE_PARAM, - fileNameParam); + initParams.add(PingRequestHandler.HEALTHCHECK_FILE_PARAM, fileNameParam); handler.init(initParams); handler.inform(h.getCore()); } - + public void testPingWithNoHealthCheck() throws Exception { - + // for this test, we don't want any healthcheck file configured at all handler = new PingRequestHandler(); handler.init(new NamedList<>()); handler.inform(h.getCore()); SolrQueryResponse rsp = null; - + rsp = makeRequest(handler, req()); assertEquals("OK", rsp.getValues().get("status")); - - rsp = makeRequest(handler, req("action","ping")); - assertEquals("OK", rsp.getValues().get("status")); + rsp = makeRequest(handler, req("action", "ping")); + assertEquals("OK", rsp.getValues().get("status")); } - + public void testEnablingServer() throws Exception { assertTrue(!healthcheckFile.exists()); @@ -96,8 +93,9 @@ public void testEnablingServer() throws Exception { SolrQueryResponse sqr = makeRequest(handler, req()); SolrException se = (SolrException) sqr.getException(); assertEquals( - "Response should have been replaced with a 503 SolrException.", - se.code(), SolrException.ErrorCode.SERVICE_UNAVAILABLE.code); + "Response should have been replaced with a 503 SolrException.", + se.code(), + SolrException.ErrorCode.SERVICE_UNAVAILABLE.code); // now enable @@ -114,12 +112,12 @@ public void testEnablingServer() throws Exception { // enable when already enabled shouldn't cause any problems makeRequest(handler, req("action", "enable")); assertTrue(healthcheckFile.exists()); - } + public void testDisablingServer() throws Exception { - assertTrue(! healthcheckFile.exists()); - + assertTrue(!healthcheckFile.exists()); + healthcheckFile.createNewFile(); // first make sure that ping responds back that the service is enabled @@ -128,48 +126,48 @@ public void testDisablingServer() throws Exception { assertEquals("OK", rsp.getValues().get("status")); // now disable - + makeRequest(handler, req("action", "disable")); - + assertFalse(healthcheckFile.exists()); - // now make sure that ping responds back that the service is disabled + // now make sure that ping responds back that the service is disabled SolrQueryResponse sqr = makeRequest(handler, req()); SolrException se = (SolrException) sqr.getException(); assertEquals( - "Response should have been replaced with a 503 SolrException.", - se.code(), SolrException.ErrorCode.SERVICE_UNAVAILABLE.code); - + "Response should have been replaced with a 503 SolrException.", + se.code(), + SolrException.ErrorCode.SERVICE_UNAVAILABLE.code); + // disable when already disabled shouldn't cause any problems makeRequest(handler, req("action", "disable")); assertFalse(healthcheckFile.exists()); - } - public void testGettingStatus() throws Exception { SolrQueryResponse rsp = null; handler.handleEnable(true); - + rsp = makeRequest(handler, req("action", "status")); assertEquals("enabled", rsp.getValues().get("status")); - - handler.handleEnable(false); - + + handler.handleEnable(false); + rsp = makeRequest(handler, req("action", "status")); assertEquals("disabled", rsp.getValues().get("status")); - } - + public void testBadActionRaisesException() throws Exception { - SolrException se = expectThrows(SolrException.class, () -> makeRequest(handler, req("action", "badaction"))); - assertEquals(SolrException.ErrorCode.BAD_REQUEST.code,se.code()); + SolrException se = + expectThrows(SolrException.class, () -> makeRequest(handler, req("action", "badaction"))); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, se.code()); } - public void testPingInClusterWithNoHealthCheck() throws Exception { + public void testPingInClusterWithNoHealthCheck() throws Exception { - MiniSolrCloudCluster miniCluster = new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), buildJettyConfig("/solr")); + MiniSolrCloudCluster miniCluster = + new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), buildJettyConfig("/solr")); final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); @@ -184,8 +182,10 @@ public void testPingInClusterWithNoHealthCheck() throws Exception { // create collection String collectionName = "testSolrCloudCollection"; String configName = "solrCloudCollectionConfig"; - miniCluster.uploadConfigSet(SolrTestCaseJ4.TEST_PATH().resolve("collection1").resolve("conf"), configName); - CollectionAdminRequest.createCollection(collectionName, configName, NUM_SHARDS, REPLICATION_FACTOR) + miniCluster.uploadConfigSet( + SolrTestCaseJ4.TEST_PATH().resolve("collection1").resolve("conf"), configName); + CollectionAdminRequest.createCollection( + collectionName, configName, NUM_SHARDS, REPLICATION_FACTOR) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(miniCluster.getSolrClient()); @@ -193,29 +193,25 @@ public void testPingInClusterWithNoHealthCheck() throws Exception { SolrPingWithDistrib reqDistrib = new SolrPingWithDistrib(); reqDistrib.setDistrib(true); SolrPingResponse rsp = reqDistrib.process(cloudSolrClient, collectionName); - assertEquals(0, rsp.getStatus()); + assertEquals(0, rsp.getStatus()); assertTrue(rsp.getResponseHeader().getBooleanArg(("zkConnected"))); - SolrPing reqNonDistrib = new SolrPing(); rsp = reqNonDistrib.process(cloudSolrClient, collectionName); - assertEquals(0, rsp.getStatus()); + assertEquals(0, rsp.getStatus()); assertTrue(rsp.getResponseHeader().getBooleanArg(("zkConnected"))); - } - finally { + } finally { miniCluster.shutdown(); - } + } } - /** - * Helper Method: Executes the request against the handler, returns - * the response, and closes the request. + * Helper Method: Executes the request against the handler, returns the response, and closes the + * request. */ - private SolrQueryResponse makeRequest(PingRequestHandler handler, - SolrQueryRequest req) - throws Exception { + private SolrQueryResponse makeRequest(PingRequestHandler handler, SolrQueryRequest req) + throws Exception { SolrQueryResponse rsp = new SolrQueryResponse(); try { @@ -227,11 +223,9 @@ private SolrQueryResponse makeRequest(PingRequestHandler handler, } static class SolrPingWithDistrib extends SolrPing { - public SolrPing setDistrib(boolean distrib) { + public SolrPing setDistrib(boolean distrib) { getParams().add("distrib", distrib ? "true" : "false"); - return this; - } + return this; + } } - - } diff --git a/solr/core/src/test/org/apache/solr/handler/ReplicationTestHelper.java b/solr/core/src/test/org/apache/solr/handler/ReplicationTestHelper.java index 2ba611bbdc0..6ac93f84833 100644 --- a/solr/core/src/test/org/apache/solr/handler/ReplicationTestHelper.java +++ b/solr/core/src/test/org/apache/solr/handler/ReplicationTestHelper.java @@ -16,6 +16,15 @@ */ package org.apache.solr.handler; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.*; +import java.lang.invoke.MethodHandles; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Properties; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -33,301 +42,297 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.*; -import java.lang.invoke.MethodHandles; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - public final class ReplicationTestHelper { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - public static final String CONF_DIR = "solr" - + File.separator + "collection1" + File.separator + "conf" - + File.separator; - - - public static JettySolrRunner createAndStartJetty(SolrInstance instance) throws Exception { - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(instance.getHomeDir(), "solr.xml")); - Properties nodeProperties = new Properties(); - nodeProperties.setProperty("solr.data.dir", instance.getDataDir()); - JettyConfig jettyConfig = JettyConfig.builder().setContext("/solr").setPort(0).build(); - JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), nodeProperties, jettyConfig); - jetty.start(); - return jetty; - } - - public static HttpSolrClient createNewSolrClient(String baseUrl) { - try { - // setup the client... - HttpSolrClient client = SolrTestCaseJ4.getHttpSolrClient(baseUrl, 15000, 90000); - return client; - } - catch (Exception ex) { - throw new RuntimeException(ex); - } + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public static final String CONF_DIR = + "solr" + File.separator + "collection1" + File.separator + "conf" + File.separator; + + public static JettySolrRunner createAndStartJetty(SolrInstance instance) throws Exception { + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), + new File(instance.getHomeDir(), "solr.xml")); + Properties nodeProperties = new Properties(); + nodeProperties.setProperty("solr.data.dir", instance.getDataDir()); + JettyConfig jettyConfig = JettyConfig.builder().setContext("/solr").setPort(0).build(); + JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), nodeProperties, jettyConfig); + jetty.start(); + return jetty; + } + + public static HttpSolrClient createNewSolrClient(String baseUrl) { + try { + // setup the client... + HttpSolrClient client = SolrTestCaseJ4.getHttpSolrClient(baseUrl, 15000, 90000); + return client; + } catch (Exception ex) { + throw new RuntimeException(ex); } + } - public static int index(SolrClient s, Object... fields) throws Exception { - SolrInputDocument doc = new SolrInputDocument(); - for (int i = 0; i < fields.length; i += 2) { - doc.addField((String) (fields[i]), fields[i + 1]); - } - return s.add(doc).getStatus(); + public static int index(SolrClient s, Object... fields) throws Exception { + SolrInputDocument doc = new SolrInputDocument(); + for (int i = 0; i < fields.length; i += 2) { + doc.addField((String) (fields[i]), fields[i + 1]); } - - /** - * character copy of file using UTF-8. If port is non-null, will be substituted any time "TEST_PORT" is found. - */ - private static void copyFile(File src, File dst, Integer port, boolean internalCompression) throws IOException { - try (BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(src), StandardCharsets.UTF_8)); - Writer out = new OutputStreamWriter(new FileOutputStream(dst), StandardCharsets.UTF_8)) { - - for (String line = in.readLine(); null != line; line = in.readLine()) { - if (null != port) { - line = line.replace("TEST_PORT", port.toString()); - } - line = line.replace("COMPRESSION", internalCompression ? "internal" : "false"); - out.write(line); - } + return s.add(doc).getStatus(); + } + + /** + * character copy of file using UTF-8. If port is non-null, will be substituted any time + * "TEST_PORT" is found. + */ + private static void copyFile(File src, File dst, Integer port, boolean internalCompression) + throws IOException { + try (BufferedReader in = + new BufferedReader( + new InputStreamReader(new FileInputStream(src), StandardCharsets.UTF_8)); + Writer out = new OutputStreamWriter(new FileOutputStream(dst), StandardCharsets.UTF_8)) { + + for (String line = in.readLine(); null != line; line = in.readLine()) { + if (null != port) { + line = line.replace("TEST_PORT", port.toString()); } + line = line.replace("COMPRESSION", internalCompression ? "internal" : "false"); + out.write(line); + } } + } - public static void assertVersions(SolrClient client1, SolrClient client2) throws Exception { - NamedList details = getDetails(client1); - @SuppressWarnings({"unchecked"}) - ArrayList> commits = (ArrayList>) details.get("commits"); - Long maxVersionClient1 = getVersion(client1); - Long maxVersionClient2 = getVersion(client2); - - if (maxVersionClient1 > 0 && maxVersionClient2 > 0) { - assertEquals(maxVersionClient1, maxVersionClient2); - } + public static void assertVersions(SolrClient client1, SolrClient client2) throws Exception { + NamedList details = getDetails(client1); + @SuppressWarnings({"unchecked"}) + ArrayList> commits = (ArrayList>) details.get("commits"); + Long maxVersionClient1 = getVersion(client1); + Long maxVersionClient2 = getVersion(client2); - // check vs /replication?command=indexversion call - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("qt", ReplicationHandler.PATH); - params.set("_trace", "assertVersions"); - params.set("command", "indexversion"); - QueryRequest req = new QueryRequest(params); - NamedList resp = client1.request(req); - assertReplicationResponseSucceeded(resp); - Long version = (Long) resp.get("indexversion"); - assertEquals(maxVersionClient1, version); - - // check vs /replication?command=indexversion call - resp = client2.request(req); - assertReplicationResponseSucceeded(resp); - version = (Long) resp.get("indexversion"); - assertEquals(maxVersionClient2, version); + if (maxVersionClient1 > 0 && maxVersionClient2 > 0) { + assertEquals(maxVersionClient1, maxVersionClient2); } - @SuppressWarnings({"unchecked"}) - public static Long getVersion(SolrClient client) throws Exception { - NamedList details; - ArrayList> commits; - details = getDetails(client); - commits = (ArrayList>) details.get("commits"); - Long maxVersionFollower= 0L; - for(NamedList commit : commits) { - Long version = (Long) commit.get("indexVersion"); - maxVersionFollower = Math.max(version, maxVersionFollower); - } - return maxVersionFollower; + // check vs /replication?command=indexversion call + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("qt", ReplicationHandler.PATH); + params.set("_trace", "assertVersions"); + params.set("command", "indexversion"); + QueryRequest req = new QueryRequest(params); + NamedList resp = client1.request(req); + assertReplicationResponseSucceeded(resp); + Long version = (Long) resp.get("indexversion"); + assertEquals(maxVersionClient1, version); + + // check vs /replication?command=indexversion call + resp = client2.request(req); + assertReplicationResponseSucceeded(resp); + version = (Long) resp.get("indexversion"); + assertEquals(maxVersionClient2, version); + } + + @SuppressWarnings({"unchecked"}) + public static Long getVersion(SolrClient client) throws Exception { + NamedList details; + ArrayList> commits; + details = getDetails(client); + commits = (ArrayList>) details.get("commits"); + Long maxVersionFollower = 0L; + for (NamedList commit : commits) { + Long version = (Long) commit.get("indexVersion"); + maxVersionFollower = Math.max(version, maxVersionFollower); } - - //Simple function to wrap the invocation of replication commands on the various - //jetty servers. - public static void invokeReplicationCommand(String baseUrl, String pCommand) throws IOException - { - //String leaderUrl = buildUrl(pJettyPort) + "/" + DEFAULT_TEST_CORENAME + ReplicationHandler.PATH+"?command=" + pCommand; - String url = baseUrl + ReplicationHandler.PATH+"?command=" + pCommand; - URL u = new URL(url); - InputStream stream = u.openStream(); - stream.close(); + return maxVersionFollower; + } + + // Simple function to wrap the invocation of replication commands on the various + // jetty servers. + public static void invokeReplicationCommand(String baseUrl, String pCommand) throws IOException { + // String leaderUrl = buildUrl(pJettyPort) + "/" + DEFAULT_TEST_CORENAME + + // ReplicationHandler.PATH+"?command=" + pCommand; + String url = baseUrl + ReplicationHandler.PATH + "?command=" + pCommand; + URL u = new URL(url); + InputStream stream = u.openStream(); + stream.close(); + } + + public static NamedList query(String query, SolrClient s) + throws SolrServerException, IOException { + ModifiableSolrParams params = new ModifiableSolrParams(); + + params.add("q", query); + params.add("sort", "id desc"); + + QueryResponse qres = s.query(params); + return qres.getResponse(); + } + + /** will sleep up to 30 seconds, looking for expectedDocCount */ + public static NamedList rQuery(int expectedDocCount, String query, SolrClient client) + throws Exception { + int timeSlept = 0; + NamedList res = query(query, client); + while (expectedDocCount != numFound(res) && timeSlept < 30000) { + log.info("Waiting for {} docs", expectedDocCount); + timeSlept += 100; + Thread.sleep(100); + res = query(query, client); } + if (log.isInfoEnabled()) { + log.info("Waited for {}ms and found {} docs", timeSlept, numFound(res)); + } + return res; + } - public static NamedList query(String query, SolrClient s) throws SolrServerException, IOException { - ModifiableSolrParams params = new ModifiableSolrParams(); + public static long numFound(NamedList res) { + return ((SolrDocumentList) res.get("response")).getNumFound(); + } - params.add("q", query); - params.add("sort","id desc"); + public static NamedList getDetails(SolrClient s) throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("command", "details"); + params.set("_trace", "getDetails"); + params.set("qt", ReplicationHandler.PATH); + QueryRequest req = new QueryRequest(params); - QueryResponse qres = s.query(params); - return qres.getResponse(); - } + NamedList res = s.request(req); + assertReplicationResponseSucceeded(res); - /** will sleep up to 30 seconds, looking for expectedDocCount */ - public static NamedList rQuery(int expectedDocCount, String query, SolrClient client) throws Exception { - int timeSlept = 0; - NamedList res = query(query, client); - while (expectedDocCount != numFound(res) - && timeSlept < 30000) { - log.info("Waiting for {} docs", expectedDocCount); - timeSlept += 100; - Thread.sleep(100); - res = query(query, client); - } - if (log.isInfoEnabled()) { - log.info("Waited for {}ms and found {} docs", timeSlept, numFound(res)); - } - return res; - } + @SuppressWarnings("unchecked") + NamedList details = (NamedList) res.get("details"); - public static long numFound(NamedList res) { - return ((SolrDocumentList) res.get("response")).getNumFound(); - } + assertNotNull("null details", details); - public static NamedList getDetails(SolrClient s) throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("command","details"); - params.set("_trace","getDetails"); - params.set("qt",ReplicationHandler.PATH); - QueryRequest req = new QueryRequest(params); + return details; + } - NamedList res = s.request(req); - assertReplicationResponseSucceeded(res); + public static NamedList getIndexVersion(SolrClient s) throws Exception { - @SuppressWarnings("unchecked") NamedList details - = (NamedList) res.get("details"); + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("command", "indexversion"); + params.set("_trace", "getIndexVersion"); + params.set("qt", ReplicationHandler.PATH); + QueryRequest req = new QueryRequest(params); - assertNotNull("null details", details); + NamedList res = s.request(req); + assertReplicationResponseSucceeded(res); - return details; - } + return res; + } - public static NamedList getIndexVersion(SolrClient s) throws Exception { + public static void assertReplicationResponseSucceeded(NamedList response) { + assertNotNull("null response from server", response); + assertNotNull("Expected replication response to have 'status' field", response.get("status")); + assertEquals("OK", response.get("status")); + } - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("command","indexversion"); - params.set("_trace","getIndexVersion"); - params.set("qt",ReplicationHandler.PATH); - QueryRequest req = new QueryRequest(params); + public static NamedList reloadCore(SolrClient s, String core) throws Exception { - NamedList res = s.request(req); - assertReplicationResponseSucceeded(res); + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("action", "reload"); + params.set("core", core); + params.set("qt", "/admin/cores"); + QueryRequest req = new QueryRequest(params); - return res; + try (HttpSolrClient adminClient = adminClient(s)) { + NamedList res = adminClient.request(req); + assertNotNull("null response from server", res); + return res; } + } + + public static HttpSolrClient adminClient(SolrClient client) { + String adminUrl = ((HttpSolrClient) client).getBaseURL().replace("/collection1", ""); + return SolrTestCaseJ4.getHttpSolrClient(adminUrl); + } + + public static void pullFromTo(String srcUrl, String destUrl) throws IOException { + URL url; + InputStream stream; + String leaderUrl = + destUrl + + ReplicationHandler.PATH + + "?wait=true&command=fetchindex&leaderUrl=" + + srcUrl + + ReplicationHandler.PATH; + url = new URL(leaderUrl); + stream = url.openStream(); + stream.close(); + } + + public static class SolrInstance { + + private final String name; + private Integer testPort; + private final File homeDir; + private File confDir; + private File dataDir; - public static void assertReplicationResponseSucceeded(NamedList response) { - assertNotNull("null response from server", response); - assertNotNull("Expected replication response to have 'status' field", response.get("status")); - assertEquals("OK", response.get("status")); + /** + * @param homeDir Base directory to build solr configuration and index in + * @param name used to pick which "solrconfig-${name}.xml" file gets copied to solrconfig.xml in + * new conf dir. + * @param testPort if not null, used as a replacement for TEST_PORT in the cloned config files. + */ + public SolrInstance(File homeDir, String name, Integer testPort) { + this.homeDir = homeDir; + this.name = name; + this.testPort = testPort; } - public static NamedList reloadCore(SolrClient s, String core) throws Exception { - - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("action","reload"); - params.set("core", core); - params.set("qt","/admin/cores"); - QueryRequest req = new QueryRequest(params); - - try (HttpSolrClient adminClient = adminClient(s)) { - NamedList res = adminClient.request(req); - assertNotNull("null response from server", res); - return res; - } + public String getHomeDir() { + return homeDir.toString(); } - public static HttpSolrClient adminClient(SolrClient client) { - String adminUrl = ((HttpSolrClient)client).getBaseURL().replace("/collection1", ""); - return SolrTestCaseJ4.getHttpSolrClient(adminUrl); + public String getSchemaFile() { + return CONF_DIR + "schema-replication1.xml"; } - - public static void pullFromTo(String srcUrl, String destUrl) throws IOException { - URL url; - InputStream stream; - String leaderUrl = destUrl - + ReplicationHandler.PATH+"?wait=true&command=fetchindex&leaderUrl=" - + srcUrl - + ReplicationHandler.PATH; - url = new URL(leaderUrl); - stream = url.openStream(); - stream.close(); + public String getConfDir() { + return confDir.toString(); } - public static class SolrInstance { - - final private String name; - private Integer testPort; - final private File homeDir; - private File confDir; - private File dataDir; - - /** - * @param homeDir Base directory to build solr configuration and index in - * @param name used to pick which - * "solrconfig-${name}.xml" file gets copied - * to solrconfig.xml in new conf dir. - * @param testPort if not null, used as a replacement for - * TEST_PORT in the cloned config files. - */ - public SolrInstance(File homeDir, String name, Integer testPort) { - this.homeDir = homeDir; - this.name = name; - this.testPort = testPort; - } - - public String getHomeDir() { - return homeDir.toString(); - } - - public String getSchemaFile() { - return CONF_DIR + "schema-replication1.xml"; - } - - public String getConfDir() { - return confDir.toString(); - } - - public String getDataDir() { - return dataDir.getAbsolutePath(); - } + public String getDataDir() { + return dataDir.getAbsolutePath(); + } - public String getSolrConfigFile() { - return CONF_DIR + "solrconfig-"+name+".xml"; - } + public String getSolrConfigFile() { + return CONF_DIR + "solrconfig-" + name + ".xml"; + } - /** If it needs to change */ - public void setTestPort(Integer testPort) { - this.testPort = testPort; - } + /** If it needs to change */ + public void setTestPort(Integer testPort) { + this.testPort = testPort; + } - public void setUp() throws Exception { - System.setProperty("solr.test.sys.prop1", "propone"); - System.setProperty("solr.test.sys.prop2", "proptwo"); + public void setUp() throws Exception { + System.setProperty("solr.test.sys.prop1", "propone"); + System.setProperty("solr.test.sys.prop2", "proptwo"); - Properties props = new Properties(); - props.setProperty("name", "collection1"); + Properties props = new Properties(); + props.setProperty("name", "collection1"); - SolrTestCaseJ4.writeCoreProperties(homeDir.toPath().resolve("collection1"), props, "TestReplicationHandler"); + SolrTestCaseJ4.writeCoreProperties( + homeDir.toPath().resolve("collection1"), props, "TestReplicationHandler"); - dataDir = new File(homeDir + "/collection1", "data"); - confDir = new File(homeDir + "/collection1", "conf"); + dataDir = new File(homeDir + "/collection1", "data"); + confDir = new File(homeDir + "/collection1", "conf"); - homeDir.mkdirs(); - dataDir.mkdirs(); - confDir.mkdirs(); + homeDir.mkdirs(); + dataDir.mkdirs(); + confDir.mkdirs(); - copyConfigFile(getSolrConfigFile(), "solrconfig.xml"); - copyConfigFile(getSchemaFile(), "schema.xml"); - copyConfigFile(CONF_DIR + "solrconfig.snippet.randomindexconfig.xml", - "solrconfig.snippet.randomindexconfig.xml"); - } + copyConfigFile(getSolrConfigFile(), "solrconfig.xml"); + copyConfigFile(getSchemaFile(), "schema.xml"); + copyConfigFile( + CONF_DIR + "solrconfig.snippet.randomindexconfig.xml", + "solrconfig.snippet.randomindexconfig.xml"); + } - public void copyConfigFile(String srcFile, String destFile) - throws IOException { - copyFile(SolrTestCaseJ4.getFile(srcFile), - new File(confDir, destFile), - testPort, LuceneTestCase.random().nextBoolean()); - } + public void copyConfigFile(String srcFile, String destFile) throws IOException { + copyFile( + SolrTestCaseJ4.getFile(srcFile), + new File(confDir, destFile), + testPort, + LuceneTestCase.random().nextBoolean()); } + } } diff --git a/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java b/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java index 040d2d2584a..578145455f3 100644 --- a/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/RequestLoggingTest.java @@ -20,7 +20,6 @@ import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; - import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -53,14 +52,15 @@ public void setupAppender() { LoggerContext ctx = (LoggerContext) LogManager.getContext(false); writer = new StringWriter(); - appender = WriterAppender.createAppender( - PatternLayout - .newBuilder() - .withPattern("%-5p [%t]: %m%n") - .build(), - null, writer, "RequestLoggingTest", false, true); + appender = + WriterAppender.createAppender( + PatternLayout.newBuilder().withPattern("%-5p [%t]: %m%n").build(), + null, + writer, + "RequestLoggingTest", + false, + true); appender.start(); - } @Test diff --git a/solr/core/src/test/org/apache/solr/handler/SearchHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/SearchHandlerTest.java index ebf4294a3e7..28d90e77ca7 100644 --- a/solr/core/src/test/org/apache/solr/handler/SearchHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/SearchHandlerTest.java @@ -20,61 +20,56 @@ import org.junit.BeforeClass; /** - * Most of the tests for {@link org.apache.solr.handler.component.SearchHandler} are in {@link org.apache.solr.ConvertedLegacyTest}. + * Most of the tests for {@link org.apache.solr.handler.component.SearchHandler} are in {@link + * org.apache.solr.ConvertedLegacyTest}. */ public class SearchHandlerTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - + public void testSorting() throws Exception { assertU(adoc("id", "10", "title", "test", "val_s1", "aaa")); assertU(adoc("id", "11", "title", "test", "val_s1", "bbb")); assertU(adoc("id", "12", "title", "test", "val_s1", "ccc")); assertU(commit()); - assertQ(req("q", "title:test") - ,"//*[@numFound='3']" - ); - - assertQ(req("q", "title:test", "sort","val_s1 asc") - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='10']" - ,"//result/doc[2]/str[@name='id'][.='11']" - ,"//result/doc[3]/str[@name='id'][.='12']" - ); + assertQ(req("q", "title:test"), "//*[@numFound='3']"); + + assertQ( + req("q", "title:test", "sort", "val_s1 asc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='10']", + "//result/doc[2]/str[@name='id'][.='11']", + "//result/doc[3]/str[@name='id'][.='12']"); + + assertQ( + req("q", "title:test", "sort", "val_s1 desc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='12']", + "//result/doc[2]/str[@name='id'][.='11']", + "//result/doc[3]/str[@name='id'][.='10']"); - assertQ(req("q", "title:test", "sort","val_s1 desc") - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='12']" - ,"//result/doc[2]/str[@name='id'][.='11']" - ,"//result/doc[3]/str[@name='id'][.='10']" - ); - // Make sure score parsing works - assertQ(req("q", "title:test", "sort","score desc") - ,"//*[@numFound='3']" - ); + assertQ(req("q", "title:test", "sort", "score desc"), "//*[@numFound='3']"); + + assertQ(req("q", "title:test", "sort", "score asc"), "//*[@numFound='3']"); - assertQ(req("q", "title:test", "sort","score asc") - ,"//*[@numFound='3']" - ); - // Using legacy ';' param - assertQ(req("q", "title:test", "sort","val_s1 desc") - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='12']" - ,"//result/doc[2]/str[@name='id'][.='11']" - ,"//result/doc[3]/str[@name='id'][.='10']" - ); + assertQ( + req("q", "title:test", "sort", "val_s1 desc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='12']", + "//result/doc[2]/str[@name='id'][.='11']", + "//result/doc[3]/str[@name='id'][.='10']"); - assertQ(req("q", "title:test", "sort", "val_s1 asc") - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='10']" - ,"//result/doc[2]/str[@name='id'][.='11']" - ,"//result/doc[3]/str[@name='id'][.='12']" - ); + assertQ( + req("q", "title:test", "sort", "val_s1 asc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='10']", + "//result/doc[2]/str[@name='id'][.='11']", + "//result/doc[3]/str[@name='id'][.='12']"); } } diff --git a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java index 8949d6c8f48..524a8095bf7 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java @@ -16,13 +16,15 @@ */ package org.apache.solr.handler; +import static java.util.Arrays.asList; +import static org.apache.solr.common.util.Utils.fromJSONString; + import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Map; - import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; @@ -48,9 +50,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; -import static org.apache.solr.common.util.Utils.fromJSONString; - @ThreadLeakLingering(linger = 0) public class TestBlobHandler extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -60,30 +59,27 @@ public void doBlobHandlerTest() throws Exception { try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) { CollectionAdminResponse response1; - CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest.createCollection(".system",1,2); + CollectionAdminRequest.Create createCollectionRequest = + CollectionAdminRequest.createCollection(".system", 1, 2); response1 = createCollectionRequest.process(client); assertEquals(0, response1.getStatus()); assertTrue(response1.isSuccess()); - DocCollection sysColl = cloudClient.getZkStateReader().getClusterState().getCollection(".system"); + DocCollection sysColl = + cloudClient.getZkStateReader().getClusterState().getCollection(".system"); Replica replica = sysColl.getActiveSlicesMap().values().iterator().next().getLeader(); String baseUrl = replica.getBaseUrl(); String url = baseUrl + "/.system/config/requestHandler"; MapWriter map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient); assertNotNull(map); - assertEquals("solr.BlobHandler", map._get(asList( - "config", - "requestHandler", - "/blob", - "class"),null)); - map = TestSolrConfigHandlerConcurrent.getAsMap(baseUrl + "/.system/schema/fields/blob", cloudClient); + assertEquals( + "solr.BlobHandler", map._get(asList("config", "requestHandler", "/blob", "class"), null)); + map = + TestSolrConfigHandlerConcurrent.getAsMap( + baseUrl + "/.system/schema/fields/blob", cloudClient); assertNotNull(map); - assertEquals("blob", map._get(asList( - "field", - "name"),null)); - assertEquals("bytes", map._get( asList( - "field", - "type"),null)); + assertEquals("blob", map._get(asList("field", "name"), null)); + assertEquals("bytes", map._get(asList("field", "type"), null)); checkBlobPost(baseUrl, cloudClient); checkBlobPostMd5(baseUrl, cloudClient); @@ -103,7 +99,7 @@ static void checkBlobPost(String baseUrl, CloudSolrClient cloudClient) throws Ex url = baseUrl + "/.system/blob/test/1"; map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient); - assertEquals("" + bytarr.length, map._getStr("response/docs[0]/size",null)); + assertEquals("" + bytarr.length, map._getStr("response/docs[0]/size", null)); compareInputAndOutput(baseUrl + "/.system/blob/test?wt=filestream", bytarr2, cloudClient); compareInputAndOutput(baseUrl + "/.system/blob/test/1?wt=filestream", bytarr, cloudClient); @@ -113,43 +109,57 @@ static void checkBlobPostMd5(String baseUrl, CloudSolrClient cloudClient) throws String blobName = "md5Test"; String stringValue = "MHMyugAGUxFzeqbpxVemACGbQ"; // Random string requires padding in md5 hash String stringValueMd5 = "02d82dd5aabc47fae54ee3dd236ad83d"; - postAndCheck(cloudClient, baseUrl, blobName, ByteBuffer.wrap(stringValue.getBytes(StandardCharsets.UTF_8)), 1); - MapWriter map = TestSolrConfigHandlerConcurrent.getAsMap(baseUrl + "/.system/blob/" + blobName, cloudClient); + postAndCheck( + cloudClient, + baseUrl, + blobName, + ByteBuffer.wrap(stringValue.getBytes(StandardCharsets.UTF_8)), + 1); + MapWriter map = + TestSolrConfigHandlerConcurrent.getAsMap( + baseUrl + "/.system/blob/" + blobName, cloudClient); assertEquals(stringValueMd5, map._getStr("response/docs[0]/md5", null)); } - public static void createSystemCollection(SolrClient client) throws SolrServerException, IOException { + public static void createSystemCollection(SolrClient client) + throws SolrServerException, IOException { CollectionAdminResponse response1; - CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest.createCollection(".system",1,2); + CollectionAdminRequest.Create createCollectionRequest = + CollectionAdminRequest.createCollection(".system", 1, 2); response1 = createCollectionRequest.process(client); assertEquals(0, response1.getStatus()); assertTrue(response1.isSuccess()); } - public static void postAndCheck(CloudSolrClient cloudClient, String baseUrl, String blobName, ByteBuffer bytes, int count) throws Exception { + public static void postAndCheck( + CloudSolrClient cloudClient, String baseUrl, String blobName, ByteBuffer bytes, int count) + throws Exception { postData(cloudClient, baseUrl, blobName, bytes); String url; MapWriter map = null; final RTimer timer = new RTimer(); int i = 0; - for (; i < 150; i++) {//15 secs + for (; i < 150; i++) { // 15 secs url = baseUrl + "/.system/blob/" + blobName; map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient); - String numFound = map._getStr(asList("response", "numFound"),null); + String numFound = map._getStr(asList("response", "numFound"), null); if (!("" + count).equals(numFound)) { Thread.sleep(100); continue; } - assertEquals("" + bytes.limit(), map._getStr("response/docs[0]/size",null)); + assertEquals("" + bytes.limit(), map._getStr("response/docs[0]/size", null)); return; } - fail(StrUtils.formatString("Could not successfully add blob after {0} attempts. Expecting {1} items. time elapsed {2} output for url is {3}", - i, count, timer.getTime(), map.toString())); + fail( + StrUtils.formatString( + "Could not successfully add blob after {0} attempts. Expecting {1} items. time elapsed {2} output for url is {3}", + i, count, timer.getTime(), map.toString())); } - static void compareInputAndOutput(String url, byte[] bytarr, CloudSolrClient cloudClient) throws IOException { + static void compareInputAndOutput(String url, byte[] bytarr, CloudSolrClient cloudClient) + throws IOException { HttpClient httpClient = cloudClient.getLbClient().getHttpClient(); @@ -164,10 +174,11 @@ static void compareInputAndOutput(String url, byte[] bytarr, CloudSolrClient clo } finally { httpGet.releaseConnection(); } - } - public static void postData(CloudSolrClient cloudClient, String baseUrl, String blobName, ByteBuffer bytarr) throws IOException { + public static void postData( + CloudSolrClient cloudClient, String baseUrl, String blobName, ByteBuffer bytarr) + throws IOException { HttpPost httpPost = null; HttpEntity entity; String response = null; diff --git a/solr/core/src/test/org/apache/solr/handler/TestCSVLoader.java b/solr/core/src/test/org/apache/solr/handler/TestCSVLoader.java index 6e031fb7a71..09281ac545d 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestCSVLoader.java +++ b/solr/core/src/test/org/apache/solr/handler/TestCSVLoader.java @@ -24,7 +24,6 @@ import java.nio.file.Files; import java.util.ArrayList; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.ContentStream; @@ -40,7 +39,7 @@ public class TestCSVLoader extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ - initCore("solrconfig.xml","schema12.xml"); + initCore("solrconfig.xml", "schema12.xml"); } String filename; @@ -57,7 +56,7 @@ public void setUp() throws Exception { filename = file.getPath(); cleanup(); } - + @Override @After public void tearDown() throws Exception { @@ -71,7 +70,8 @@ public void tearDown() throws Exception { } void makeFile(String contents) { - try (Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8)) { + try (Writer out = + new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8)) { out.write(contents); } catch (Exception e) { throw new RuntimeException(e); @@ -84,7 +84,7 @@ void cleanup() { } void loadLocal(String... args) throws Exception { - LocalSolrQueryRequest req = (LocalSolrQueryRequest)req(args); + LocalSolrQueryRequest req = (LocalSolrQueryRequest) req(args); // TODO: stop using locally defined streams once stream.file and // stream.body work everywhere @@ -93,7 +93,7 @@ void loadLocal(String... args) throws Exception { f.setContentType("text/csv"); cs.add(f); req.setContentStreams(cs); - h.query("/update",req); + h.query("/update", req); } @Test @@ -101,250 +101,230 @@ public void testCSVLoad() throws Exception { makeFile("id\n100\n101\n102"); loadLocal(); // check default commit of false - assertQ(req("id:[100 TO 110]"),"//*[@numFound='0']"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='0']"); assertU(commit()); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='3']"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='3']"); } @Test public void testCSVRowId() throws Exception { makeFile("id\n100\n101\n102"); - loadLocal("rowid", "rowid_i");//add a special field + loadLocal("rowid", "rowid_i"); // add a special field // check default commit of false assertU(commit()); - assertQ(req("rowid_i:1"),"//*[@numFound='1']"); - assertQ(req("rowid_i:2"),"//*[@numFound='1']"); - assertQ(req("rowid_i:100"),"//*[@numFound='0']"); + assertQ(req("rowid_i:1"), "//*[@numFound='1']"); + assertQ(req("rowid_i:2"), "//*[@numFound='1']"); + assertQ(req("rowid_i:100"), "//*[@numFound='0']"); makeFile("id\n200\n201\n202"); - loadLocal("rowid", "rowid_i", "rowidOffset", "100");//add a special field + loadLocal("rowid", "rowid_i", "rowidOffset", "100"); // add a special field // check default commit of false assertU(commit()); - assertQ(req("rowid_i:101"),"//*[@numFound='1']"); - assertQ(req("rowid_i:102"),"//*[@numFound='1']"); - assertQ(req("rowid_i:10000"),"//*[@numFound='0']"); + assertQ(req("rowid_i:101"), "//*[@numFound='1']"); + assertQ(req("rowid_i:102"), "//*[@numFound='1']"); + assertQ(req("rowid_i:10000"), "//*[@numFound='0']"); } @Test public void testCommitFalse() throws Exception { makeFile("id\n100\n101\n102"); - loadLocal("commit","false"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='0']"); + loadLocal("commit", "false"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='0']"); assertU(commit()); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='3']"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='3']"); } @Test public void testCommitTrue() throws Exception { makeFile("id\n100\n101\n102"); - loadLocal("commit","true"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='3']"); + loadLocal("commit", "true"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='3']"); } @Test public void testLiteral() throws Exception { makeFile("id\n100"); - loadLocal("commit","true", "literal.name","LITERAL_VALUE"); - assertQ(req("*:*"),"//doc/str[@name='name'][.='LITERAL_VALUE']"); + loadLocal("commit", "true", "literal.name", "LITERAL_VALUE"); + assertQ(req("*:*"), "//doc/str[@name='name'][.='LITERAL_VALUE']"); } - @Test public void testCSV() throws Exception { - lrf.args.put(CommonParams.VERSION,"2.2"); - + lrf.args.put(CommonParams.VERSION, "2.2"); + makeFile("id,str_s\n100,\"quoted\"\n101,\n102,\"\"\n103,"); - loadLocal("commit","true"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); - assertQ(req("id:101"),"count(//str[@name='str_s'])=0"); + loadLocal("commit", "true"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='quoted']"); + assertQ(req("id:101"), "count(//str[@name='str_s'])=0"); // 102 is a quoted zero length field ,"", as opposed to ,, // but we can't distinguish this case (and it's debateable // if we should). Does CSV have a way to specify missing // from zero-length? - assertQ(req("id:102"),"count(//str[@name='str_s'])=0"); - assertQ(req("id:103"),"count(//str[@name='str_s'])=0"); + assertQ(req("id:102"), "count(//str[@name='str_s'])=0"); + assertQ(req("id:103"), "count(//str[@name='str_s'])=0"); // test overwrite by default - loadLocal("commit","true"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); + loadLocal("commit", "true"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); // test explicitly adding header=true (the default) - loadLocal("commit","true","header","true"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); + loadLocal("commit", "true", "header", "true"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); // test no overwrites - loadLocal("commit","true", "overwrite","false"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='8']"); + loadLocal("commit", "true", "overwrite", "false"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='8']"); // test overwrite - loadLocal("commit","true"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); + loadLocal("commit", "true"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); // test global value mapping - loadLocal("commit","true", "map","quoted:QUOTED"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='QUOTED']"); - assertQ(req("id:101"),"count(//str[@name='str_s'])=0"); - assertQ(req("id:102"),"count(//str[@name='str_s'])=0"); - assertQ(req("id:103"),"count(//str[@name='str_s'])=0"); + loadLocal("commit", "true", "map", "quoted:QUOTED"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='QUOTED']"); + assertQ(req("id:101"), "count(//str[@name='str_s'])=0"); + assertQ(req("id:102"), "count(//str[@name='str_s'])=0"); + assertQ(req("id:103"), "count(//str[@name='str_s'])=0"); // test value mapping to empty (remove) - loadLocal("commit","true", "map","quoted:"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"count(//str[@name='str_s'])=0"); + loadLocal("commit", "true", "map", "quoted:"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "count(//str[@name='str_s'])=0"); // test value mapping from empty - loadLocal("commit","true", "map",":EMPTY"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[.='EMPTY']"); - assertQ(req("id:102"),"//arr[@name='str_s']/str[.='EMPTY']"); - assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']"); + loadLocal("commit", "true", "map", ":EMPTY"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='quoted']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[.='EMPTY']"); + assertQ(req("id:102"), "//arr[@name='str_s']/str[.='EMPTY']"); + assertQ(req("id:103"), "//arr[@name='str_s']/str[.='EMPTY']"); // test multiple map rules - loadLocal("commit","true", "map",":EMPTY", "map","quoted:QUOTED"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='QUOTED']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[.='EMPTY']"); - assertQ(req("id:102"),"//arr[@name='str_s']/str[.='EMPTY']"); - assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']"); + loadLocal("commit", "true", "map", ":EMPTY", "map", "quoted:QUOTED"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='QUOTED']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[.='EMPTY']"); + assertQ(req("id:102"), "//arr[@name='str_s']/str[.='EMPTY']"); + assertQ(req("id:103"), "//arr[@name='str_s']/str[.='EMPTY']"); // test indexing empty fields - loadLocal("commit","true", "f.str_s.keepEmpty","true"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[.='']"); - assertQ(req("id:102"),"//arr[@name='str_s']/str[.='']"); - assertQ(req("id:103"),"//arr[@name='str_s']/str[.='']"); + loadLocal("commit", "true", "f.str_s.keepEmpty", "true"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='quoted']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[.='']"); + assertQ(req("id:102"), "//arr[@name='str_s']/str[.='']"); + assertQ(req("id:103"), "//arr[@name='str_s']/str[.='']"); // test overriding the name of fields - loadLocal("commit","true", - "fieldnames","id,my_s", "header","true", - "f.my_s.map",":EMPTY"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"//arr[@name='my_s']/str[.='quoted']"); - assertQ(req("id:101"),"count(//arr[@name='str_s']/str)=0"); - assertQ(req("id:102"),"count(//arr[@name='str_s']/str)=0"); - assertQ(req("id:103"),"count(//arr[@name='str_s']/str)=0"); - assertQ(req("id:101"),"//arr[@name='my_s']/str[.='EMPTY']"); - assertQ(req("id:102"),"//arr[@name='my_s']/str[.='EMPTY']"); - assertQ(req("id:103"),"//arr[@name='my_s']/str[.='EMPTY']"); + loadLocal("commit", "true", "fieldnames", "id,my_s", "header", "true", "f.my_s.map", ":EMPTY"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "//arr[@name='my_s']/str[.='quoted']"); + assertQ(req("id:101"), "count(//arr[@name='str_s']/str)=0"); + assertQ(req("id:102"), "count(//arr[@name='str_s']/str)=0"); + assertQ(req("id:103"), "count(//arr[@name='str_s']/str)=0"); + assertQ(req("id:101"), "//arr[@name='my_s']/str[.='EMPTY']"); + assertQ(req("id:102"), "//arr[@name='my_s']/str[.='EMPTY']"); + assertQ(req("id:103"), "//arr[@name='my_s']/str[.='EMPTY']"); // test that header in file was skipped - assertQ(req("id:id"),"//*[@numFound='0']"); + assertQ(req("id:id"), "//*[@numFound='0']"); // test skipping a field via the "skip" parameter - loadLocal("commit","true","keepEmpty","true","skip","str_s"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:[100 TO 110]"),"count(//str[@name='str_s']/str)=0"); + loadLocal("commit", "true", "keepEmpty", "true", "skip", "str_s"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:[100 TO 110]"), "count(//str[@name='str_s']/str)=0"); // test skipping a field by specifying an empty name - loadLocal("commit","true","keepEmpty","true","fieldnames","id,"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:[100 TO 110]"),"count(//str[@name='str_s']/str)=0"); + loadLocal("commit", "true", "keepEmpty", "true", "fieldnames", "id,"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:[100 TO 110]"), "count(//str[@name='str_s']/str)=0"); // test loading file as if it didn't have a header - loadLocal("commit","true", - "fieldnames","id,my_s", "header","false"); - assertQ(req("id:id"),"//*[@numFound='1']"); - assertQ(req("id:100"),"//arr[@name='my_s']/str[.='quoted']"); + loadLocal("commit", "true", "fieldnames", "id,my_s", "header", "false"); + assertQ(req("id:id"), "//*[@numFound='1']"); + assertQ(req("id:100"), "//arr[@name='my_s']/str[.='quoted']"); // test skipLines - loadLocal("commit","true", - "fieldnames","id,my_s", "header","false", "skipLines","1"); - assertQ(req("id:id"),"//*[@numFound='1']"); - assertQ(req("id:100"),"//arr[@name='my_s']/str[.='quoted']"); - + loadLocal("commit", "true", "fieldnames", "id,my_s", "header", "false", "skipLines", "1"); + assertQ(req("id:id"), "//*[@numFound='1']"); + assertQ(req("id:100"), "//arr[@name='my_s']/str[.='quoted']"); // test multi-valued fields via field splitting w/ mapping of subvalues - makeFile("id,str_s\n" - +"100,\"quoted\"\n" - +"101,\"a,b,c\"\n" - +"102,\"a,,b\"\n" - +"103,\n"); - loadLocal("commit","true", - "f.str_s.map",":EMPTY", - "f.str_s.split","true"); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='4']"); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[1][.='a']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[2][.='b']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[3][.='c']"); - assertQ(req("id:102"),"//arr[@name='str_s']/str[2][.='EMPTY']"); - assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']"); - + makeFile("id,str_s\n" + "100,\"quoted\"\n" + "101,\"a,b,c\"\n" + "102,\"a,,b\"\n" + "103,\n"); + loadLocal( + "commit", "true", + "f.str_s.map", ":EMPTY", + "f.str_s.split", "true"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='4']"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='quoted']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[1][.='a']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[2][.='b']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[3][.='c']"); + assertQ(req("id:102"), "//arr[@name='str_s']/str[2][.='EMPTY']"); + assertQ(req("id:103"), "//arr[@name='str_s']/str[.='EMPTY']"); // test alternate values for delimiters - makeFile("id|str_s\n" - +"100|^quoted^\n" - +"101|a;'b';c\n" - +"102|a;;b\n" - +"103|\n" - +"104|a\\\\b\n" // no backslash escaping should be done by default - ); - - loadLocal("commit","true", - "separator","|", - "encapsulator","^", - "f.str_s.map",":EMPTY", - "f.str_s.split","true", - "f.str_s.separator",";", - "f.str_s.encapsulator","'" - ); - assertQ(req("id:[100 TO 110]"),"//*[@numFound='5']"); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[1][.='a']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[2][.='b']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[3][.='c']"); - assertQ(req("id:102"),"//arr[@name='str_s']/str[2][.='EMPTY']"); - assertQ(req("id:103"),"//arr[@name='str_s']/str[.='EMPTY']"); - assertQ(req("id:104"),"//arr[@name='str_s']/str[.='a\\\\b']"); + makeFile( + "id|str_s\n" + + "100|^quoted^\n" + + "101|a;'b';c\n" + + "102|a;;b\n" + + "103|\n" + + "104|a\\\\b\n" // no backslash escaping should be done by default + ); + + loadLocal( + "commit", "true", + "separator", "|", + "encapsulator", "^", + "f.str_s.map", ":EMPTY", + "f.str_s.split", "true", + "f.str_s.separator", ";", + "f.str_s.encapsulator", "'"); + assertQ(req("id:[100 TO 110]"), "//*[@numFound='5']"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='quoted']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[1][.='a']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[2][.='b']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[3][.='c']"); + assertQ(req("id:102"), "//arr[@name='str_s']/str[2][.='EMPTY']"); + assertQ(req("id:103"), "//arr[@name='str_s']/str[.='EMPTY']"); + assertQ(req("id:104"), "//arr[@name='str_s']/str[.='a\\\\b']"); // test no escaping + double encapsulator escaping by default - makeFile("id,str_s\n" - +"100,\"quoted \"\" \\ string\"\n" - +"101,unquoted \"\" \\ string\n" // double encap shouldn't be an escape outside encap - +"102,end quote \\\n" - ); - loadLocal("commit","true" - ); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='quoted \" \\ string']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[.='unquoted \"\" \\ string']"); - assertQ(req("id:102"),"//arr[@name='str_s']/str[.='end quote \\']"); - + makeFile( + "id,str_s\n" + + "100,\"quoted \"\" \\ string\"\n" + + "101,unquoted \"\" \\ string\n" // double encap shouldn't be an escape outside encap + + "102,end quote \\\n"); + loadLocal("commit", "true"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='quoted \" \\ string']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[.='unquoted \"\" \\ string']"); + assertQ(req("id:102"), "//arr[@name='str_s']/str[.='end quote \\']"); // setting an escape should disable encapsulator - makeFile("id,str_s\n" - +"100,\"quoted \"\" \\\" \\\\ string\"\n" // quotes should be part of value - +"101,unquoted \"\" \\\" \\, \\\\ string\n" - ); - loadLocal("commit","true" - ,"escape","\\" - ); - assertQ(req("id:100"),"//arr[@name='str_s']/str[.='\"quoted \"\" \" \\ string\"']"); - assertQ(req("id:101"),"//arr[@name='str_s']/str[.='unquoted \"\" \" , \\ string']"); - + makeFile( + "id,str_s\n" + + "100,\"quoted \"\" \\\" \\\\ string\"\n" // quotes should be part of value + + "101,unquoted \"\" \\\" \\, \\\\ string\n"); + loadLocal("commit", "true", "escape", "\\"); + assertQ(req("id:100"), "//arr[@name='str_s']/str[.='\"quoted \"\" \" \\ string\"']"); + assertQ(req("id:101"), "//arr[@name='str_s']/str[.='unquoted \"\" \" , \\ string']"); } @Test public void CSVLoader_denseVector_shouldIndexCorrectly() throws Exception { - makeFile("id,vector\n" - + "999,\"1.3,2.3,3.3,4.3\"\n"); - - loadLocal("commit", "true", - "f.str_s.map", ":EMPTY", - "f.vector.split", "true"); - - assertQ(req("q", "id:999", "fl", "vector"), "*[count(//doc)=1]", - "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.3 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.3 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.3 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.3 + "']" - ); - } - -} + makeFile("id,vector\n" + "999,\"1.3,2.3,3.3,4.3\"\n"); + loadLocal("commit", "true", "f.str_s.map", ":EMPTY", "f.vector.split", "true"); - + assertQ( + req("q", "id:999", "fl", "vector"), + "*[count(//doc)=1]", + "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.3 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.3 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.3 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.3 + "']"); + } +} diff --git a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java index c9efd4972ee..e3e01d2366d 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java +++ b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java @@ -16,6 +16,8 @@ */ package org.apache.solr.handler; +import static java.util.Arrays.asList; + import java.io.StringReader; import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; @@ -23,7 +25,6 @@ import java.util.HashSet; import java.util.List; import java.util.concurrent.TimeUnit; - import org.apache.http.HttpEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.util.EntityUtils; @@ -46,8 +47,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; - public class TestConfigReload extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -65,69 +64,74 @@ public void test() throws Exception { private void reloadTest() throws Exception { SolrZkClient client = cloudClient.getZkStateReader().getZkClient(); if (log.isInfoEnabled()) { - log.info("live_nodes_count : {}", cloudClient.getZkStateReader().getClusterState().getLiveNodes()); + log.info( + "live_nodes_count : {}", + cloudClient.getZkStateReader().getClusterState().getLiveNodes()); } - String confPath = ZkConfigSetService.CONFIGS_ZKNODE+"/conf1/"; -// checkConfReload(client, confPath + ConfigOverlay.RESOURCE_NAME, "overlay"); - checkConfReload(client, confPath + SolrConfig.DEFAULT_CONF_FILE,"config", "/config"); - + String confPath = ZkConfigSetService.CONFIGS_ZKNODE + "/conf1/"; + // checkConfReload(client, confPath + ConfigOverlay.RESOURCE_NAME, "overlay"); + checkConfReload(client, confPath + SolrConfig.DEFAULT_CONF_FILE, "config", "/config"); } - private void checkConfReload(SolrZkClient client, String resPath, String name, String uri) throws Exception { - Stat stat = new Stat(); + private void checkConfReload(SolrZkClient client, String resPath, String name, String uri) + throws Exception { + Stat stat = new Stat(); byte[] data = null; try { data = client.getData(resPath, null, stat, true); } catch (KeeperException.NoNodeException e) { data = "{}".getBytes(StandardCharsets.UTF_8); - log.info("creating_node {}",resPath); - client.create(resPath,data, CreateMode.PERSISTENT,true); + log.info("creating_node {}", resPath); + client.create(resPath, data, CreateMode.PERSISTENT, true); } long startTime = System.nanoTime(); Stat newStat = client.setData(resPath, data, true); - client.setData("/configs/conf1", new byte[]{1}, true); + client.setData("/configs/conf1", new byte[] {1}, true); assertTrue(newStat.getVersion() > stat.getVersion()); if (log.isInfoEnabled()) { log.info("new_version {}", newStat.getVersion()); } Integer newVersion = newStat.getVersion(); long maxTimeoutSeconds = 60; - DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); + DocCollection coll = + cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); List urls = new ArrayList<>(); for (Slice slice : coll.getSlices()) { for (Replica replica : slice.getReplicas()) - urls.add(""+replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); + urls.add("" + replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); } HashSet succeeded = new HashSet<>(); - while ( TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutSeconds){ + while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) + < maxTimeoutSeconds) { Thread.sleep(50); for (String url : urls) { MapWriter respMap = getAsMap(url + uri); - if (String.valueOf(newVersion).equals(respMap._getStr(asList(name, "znodeVersion"), null))) { + if (String.valueOf(newVersion) + .equals(respMap._getStr(asList(name, "znodeVersion"), null))) { succeeded.add(url); } } - if(succeeded.size() == urls.size()) break; + if (succeeded.size() == urls.size()) break; succeeded.clear(); } - assertEquals(StrUtils.formatString("tried these servers {0} succeeded only in {1} ", urls, succeeded) , urls.size(), succeeded.size()); + assertEquals( + StrUtils.formatString("tried these servers {0} succeeded only in {1} ", urls, succeeded), + urls.size(), + succeeded.size()); } @SuppressWarnings({"rawtypes"}) private LinkedHashMapWriter getAsMap(String uri) throws Exception { - HttpGet get = new HttpGet(uri) ; + HttpGet get = new HttpGet(uri); HttpEntity entity = null; try { entity = cloudClient.getLbClient().getHttpClient().execute(get).getEntity(); String response = EntityUtils.toString(entity, StandardCharsets.UTF_8); - return (LinkedHashMapWriter) Utils.MAPWRITEROBJBUILDER.apply(Utils.getJSONParser(new StringReader(response))).getVal(); + return (LinkedHashMapWriter) + Utils.MAPWRITEROBJBUILDER.apply(Utils.getJSONParser(new StringReader(response))).getVal(); } finally { EntityUtils.consumeQuietly(entity); } } - - - - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestContainerPlugin.java b/solr/core/src/test/org/apache/solr/handler/TestContainerPlugin.java index 7254b0f66a1..39b939dd80a 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestContainerPlugin.java +++ b/solr/core/src/test/org/apache/solr/handler/TestContainerPlugin.java @@ -17,6 +17,13 @@ package org.apache.solr.handler; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET; +import static org.apache.solr.filestore.TestDistribPackageStore.readFile; +import static org.apache.solr.filestore.TestDistribPackageStore.uploadKey; + +import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; @@ -24,8 +31,6 @@ import java.util.concurrent.Callable; import java.util.concurrent.Phaser; import java.util.concurrent.TimeUnit; - -import com.google.common.collect.ImmutableMap; import org.apache.commons.io.IOUtils; import org.apache.lucene.util.ResourceLoader; import org.apache.lucene.util.ResourceLoaderAware; @@ -60,12 +65,6 @@ import org.junit.Before; import org.junit.Test; -import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET; -import static org.apache.solr.filestore.TestDistribPackageStore.readFile; -import static org.apache.solr.filestore.TestDistribPackageStore.uploadKey; - public class TestContainerPlugin extends SolrCloudTestCase { private Phaser phaser; @@ -83,10 +82,9 @@ public void teardown() { @Test public void testApi() throws Exception { MiniSolrCloudCluster cluster = - configureCluster(4) - .withJettyConfig(jetty -> jetty.enableV2(true)) - .configure(); - ContainerPluginsRegistry pluginsRegistry = cluster.getOpenOverseer().getCoreContainer().getContainerPluginsRegistry(); + configureCluster(4).withJettyConfig(jetty -> jetty.enableV2(true)).configure(); + ContainerPluginsRegistry pluginsRegistry = + cluster.getOpenOverseer().getCoreContainer().getContainerPluginsRegistry(); pluginsRegistry.setPhaser(phaser); int version = phaser.getPhase(); @@ -96,55 +94,58 @@ public void testApi() throws Exception { PluginMeta plugin = new PluginMeta(); plugin.name = "testplugin"; plugin.klass = C2.class.getName(); - //test with an invalid class - V2Request req = new V2Request.Builder("/cluster/plugin") + // test with an invalid class + V2Request req = + new V2Request.Builder("/cluster/plugin") .forceV2(true) .POST() .withPayload(singletonMap("add", plugin)) .build(); expectError(req, cluster.getSolrClient(), errPath, "No method with @Command in class"); - //test with a valid class. This should succeed now + // test with a valid class. This should succeed now plugin.klass = C3.class.getName(); req.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - //just check if the plugin is indeed registered - V2Request readPluginState = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .GET() - .build(); + // just check if the plugin is indeed registered + V2Request readPluginState = + new V2Request.Builder("/cluster/plugin").forceV2(true).GET().build(); V2Response rsp = readPluginState.process(cluster.getSolrClient()); assertEquals(C3.class.getName(), rsp._getStr("/plugin/testplugin/class", null)); - //let's test the plugin - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/plugin/my/plugin") - .forceV2(true) - .GET() - .build().process(cluster.getSolrClient()), - ImmutableMap.of("/testkey", "testval")); - - //now remove the plugin + // let's test the plugin + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/plugin/my/plugin") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient()), + ImmutableMap.of("/testkey", "testval")); + + // now remove the plugin new V2Request.Builder("/cluster/plugin") - .POST() - .forceV2(true) - .withPayload("{remove : testplugin}") - .build() - .process(cluster.getSolrClient()); + .POST() + .forceV2(true) + .withPayload("{remove : testplugin}") + .build() + .process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - //verify it is removed + // verify it is removed rsp = readPluginState.process(cluster.getSolrClient()); assertEquals(null, rsp._get("/plugin/testplugin/class", null)); - //test with a class @EndPoint methods. This also uses a template in the path name + // test with a class @EndPoint methods. This also uses a template in the path name plugin.klass = C4.class.getName(); plugin.name = "collections"; plugin.pathPrefix = "collections"; - expectError(req, cluster.getSolrClient(), errPath, "path must not have a prefix: collections"); + expectError( + req, cluster.getSolrClient(), errPath, "path must not have a prefix: collections"); plugin.name = "my-random-name"; plugin.pathPrefix = "my-random-prefix"; @@ -152,40 +153,50 @@ public void testApi() throws Exception { req.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - //let's test the plugin - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/my-random-name/my/plugin") - .forceV2(true) - .GET() - .build().process(cluster.getSolrClient()), - ImmutableMap.of("/method.name", "m1")); - - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/my-random-prefix/their/plugin") - .forceV2(true) - .GET() - .build().process(cluster.getSolrClient()), - ImmutableMap.of("/method.name", "m2")); - //now remove the plugin + // let's test the plugin + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/my-random-name/my/plugin") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient()), + ImmutableMap.of("/method.name", "m1")); + + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/my-random-prefix/their/plugin") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient()), + ImmutableMap.of("/method.name", "m2")); + // now remove the plugin new V2Request.Builder("/cluster/plugin") - .POST() - .forceV2(true) - .withPayload("{remove : my-random-name}") - .build() - .process(cluster.getSolrClient()); + .POST() + .forceV2(true) + .withPayload("{remove : my-random-name}") + .build() + .process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - expectFail(() -> new V2Request.Builder("/my-random-prefix/their/plugin") - .forceV2(true) - .GET() - .build() - .process(cluster.getSolrClient())); - expectFail(() -> new V2Request.Builder("/my-random-prefix/their/plugin") - .forceV2(true) - .GET() - .build() - .process(cluster.getSolrClient())); + expectFail( + () -> + new V2Request.Builder("/my-random-prefix/their/plugin") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient())); + expectFail( + () -> + new V2Request.Builder("/my-random-prefix/their/plugin") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient())); // test ClusterSingleton plugin plugin.name = "clusterSingleton"; @@ -193,12 +204,8 @@ public void testApi() throws Exception { req.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - - //just check if the plugin is indeed registered - readPluginState = new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .GET() - .build(); + // just check if the plugin is indeed registered + readPluginState = new V2Request.Builder("/cluster/plugin").forceV2(true).GET().build(); rsp = readPluginState.process(cluster.getSolrClient()); assertEquals(C6.class.getName(), rsp._getStr("/plugin/clusterSingleton/class", null)); @@ -220,36 +227,44 @@ public void testApi() throws Exception { p.config = cfg; new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("add", p)) - .build() - .process(cluster.getSolrClient()); + .forceV2(true) + .POST() + .withPayload(singletonMap("add", p)) + .build() + .process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("hello/plugin") - .forceV2(true) - .GET() - .build().process(cluster.getSolrClient()), - ImmutableMap.of("/config/boolVal", "true", "/config/strVal", "Something", "/config/longVal", "1234")); + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("hello/plugin") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient()), + ImmutableMap.of( + "/config/boolVal", "true", "/config/strVal", "Something", "/config/longVal", "1234")); cfg.strVal = "Something else"; new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("update", p)) - .build() - .process(cluster.getSolrClient()); + .forceV2(true) + .POST() + .withPayload(singletonMap("update", p)) + .build() + .process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("hello/plugin") - .forceV2(true) - .GET() - .build().process(cluster.getSolrClient()), - ImmutableMap.of("/config/boolVal", "true", "/config/strVal", cfg.strVal, "/config/longVal", "1234")); + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("hello/plugin") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient()), + ImmutableMap.of( + "/config/boolVal", "true", "/config/strVal", cfg.strVal, "/config/longVal", "1234")); // kill the Overseer leader for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { @@ -279,49 +294,60 @@ private void expectFail(ThrowingRunnable runnable) throws Exception { @Test public void testApiFromPackage() throws Exception { MiniSolrCloudCluster cluster = - configureCluster(4) - .withJettyConfig(jetty -> jetty.enableV2(true)) - .configure(); + configureCluster(4).withJettyConfig(jetty -> jetty.enableV2(true)).configure(); String FILE1 = "/myplugin/v1.jar"; String FILE2 = "/myplugin/v2.jar"; - ContainerPluginsRegistry pluginsRegistry = cluster.getOpenOverseer().getCoreContainer().getContainerPluginsRegistry(); + ContainerPluginsRegistry pluginsRegistry = + cluster.getOpenOverseer().getCoreContainer().getContainerPluginsRegistry(); pluginsRegistry.setPhaser(phaser); int version = phaser.getPhase(); - String errPath = "/error/details[0]/errorMessages[0]"; try { byte[] derFile = readFile("cryptokeys/pub_key512.der"); uploadKey(derFile, PackageStoreAPI.KEYS_DIR + "/pub_key512.der", cluster); - TestPackages.postFileAndWait(cluster, "runtimecode/containerplugin.v.1.jar.bin", FILE1, - "pmrmWCDafdNpYle2rueAGnU2J6NYlcAey9mkZYbqh+5RdYo2Ln+llLF9voyRj+DDivK9GV1XdtKvD9rgCxlD7Q=="); - TestPackages.postFileAndWait(cluster, "runtimecode/containerplugin.v.2.jar.bin", FILE2, - "StR3DmqaUSL7qjDOeVEiCqE+ouiZAkW99fsL48F9oWG047o7NGgwwZ36iGgzDC3S2tPaFjRAd9Zg4UK7OZLQzg=="); - - // We have two versions of the plugin in 2 different jar files. they are already uploaded to the package store + TestPackages.postFileAndWait( + cluster, + "runtimecode/containerplugin.v.1.jar.bin", + FILE1, + "pmrmWCDafdNpYle2rueAGnU2J6NYlcAey9mkZYbqh+5RdYo2Ln+llLF9voyRj+DDivK9GV1XdtKvD9rgCxlD7Q=="); + TestPackages.postFileAndWait( + cluster, + "runtimecode/containerplugin.v.2.jar.bin", + FILE2, + "StR3DmqaUSL7qjDOeVEiCqE+ouiZAkW99fsL48F9oWG047o7NGgwwZ36iGgzDC3S2tPaFjRAd9Zg4UK7OZLQzg=="); + + // We have two versions of the plugin in 2 different jar files. they are already uploaded to + // the package store Package.AddVersion add = new Package.AddVersion(); add.version = "1.0"; add.pkg = "mypkg"; add.files = singletonList(FILE1); - V2Request addPkgVersionReq = new V2Request.Builder("/cluster/package") + V2Request addPkgVersionReq = + new V2Request.Builder("/cluster/package") .forceV2(true) .POST() .withPayload(singletonMap("add", add)) .build(); addPkgVersionReq.process(cluster.getSolrClient()); - waitForAllNodesToSync(cluster, "/cluster/package", Map.of( - ":result:packages:mypkg[0]:version", "1.0", - ":result:packages:mypkg[0]:files[0]", FILE1 - )); + waitForAllNodesToSync( + cluster, + "/cluster/package", + Map.of( + ":result:packages:mypkg[0]:version", + "1.0", + ":result:packages:mypkg[0]:files[0]", + FILE1)); // Now lets create a plugin using v1 jar file PluginMeta plugin = new PluginMeta(); plugin.name = "myplugin"; plugin.klass = "mypkg:org.apache.solr.handler.MyPlugin"; plugin.version = add.version; - final V2Request req1 = new V2Request.Builder("/cluster/plugin") + final V2Request req1 = + new V2Request.Builder("/cluster/plugin") .forceV2(true) .POST() .withPayload(singletonMap("add", plugin)) @@ -329,52 +355,56 @@ public void testApiFromPackage() throws Exception { req1.process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - //verify the plugin creation - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/cluster/plugin"). - GET() - .build().process(cluster.getSolrClient()), - ImmutableMap.of( - "/plugin/myplugin/class", plugin.klass, - "/plugin/myplugin/version", plugin.version - )); - //let's test this now - Callable invokePlugin = () -> new V2Request.Builder("/plugin/my/path") - .forceV2(true) - .GET() - .build().process(cluster.getSolrClient()); - TestDistribPackageStore.assertResponseValues(10, - invokePlugin, - ImmutableMap.of("/myplugin.version", "1.0")); - - //now let's upload the jar file for version 2.0 of the plugin + // verify the plugin creation + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/cluster/plugin") + .GET() + .build() + .process(cluster.getSolrClient()), + ImmutableMap.of( + "/plugin/myplugin/class", plugin.klass, + "/plugin/myplugin/version", plugin.version)); + // let's test this now + Callable invokePlugin = + () -> + new V2Request.Builder("/plugin/my/path") + .forceV2(true) + .GET() + .build() + .process(cluster.getSolrClient()); + TestDistribPackageStore.assertResponseValues( + 10, invokePlugin, ImmutableMap.of("/myplugin.version", "1.0")); + + // now let's upload the jar file for version 2.0 of the plugin add.version = "2.0"; add.files = singletonList(FILE2); addPkgVersionReq.process(cluster.getSolrClient()); - //here the plugin version is updated + // here the plugin version is updated plugin.version = add.version; new V2Request.Builder("/cluster/plugin") - .forceV2(true) - .POST() - .withPayload(singletonMap("update", plugin)) - .build() - .process(cluster.getSolrClient()); + .forceV2(true) + .POST() + .withPayload(singletonMap("update", plugin)) + .build() + .process(cluster.getSolrClient()); version = phaser.awaitAdvanceInterruptibly(version, 10, TimeUnit.SECONDS); - //now verify if it is indeed updated - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/cluster/plugin"). - GET() - .build().process(cluster.getSolrClient()), - ImmutableMap.of( - "/plugin/myplugin/class", plugin.klass, - "/plugin/myplugin/version", "2.0" - )); + // now verify if it is indeed updated + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/cluster/plugin") + .GET() + .build() + .process(cluster.getSolrClient()), + ImmutableMap.of( + "/plugin/myplugin/class", plugin.klass, "/plugin/myplugin/version", "2.0")); // invoke the plugin and test thye output - TestDistribPackageStore.assertResponseValues(10, - invokePlugin, - ImmutableMap.of("/myplugin.version", "2.0")); + TestDistribPackageStore.assertResponseValues( + 10, invokePlugin, ImmutableMap.of("/myplugin.version", "2.0")); plugin.name = "plugin2"; plugin.klass = "mypkg:" + C5.class.getName(); @@ -388,43 +418,34 @@ public void testApiFromPackage() throws Exception { } } - public static class CC1 extends CC { - - } + public static class CC1 extends CC {} - public static class CC2 extends CC1 { - - } + public static class CC2 extends CC1 {} public static class CC implements ConfigurablePlugin { private CConfig cfg; - @Override public void configure(CConfig cfg) { this.cfg = cfg; - } - @EndPoint(method = GET, - path = "/hello/plugin", - permission = PermissionNameProvider.Name.READ_PERM) + @EndPoint( + method = GET, + path = "/hello/plugin", + permission = PermissionNameProvider.Name.READ_PERM) public void m2(SolrQueryRequest req, SolrQueryResponse rsp) { rsp.add("config", cfg); } - } public static class CConfig implements ReflectMapWriter { - @JsonProperty - public String strVal; + @JsonProperty public String strVal; - @JsonProperty - public Long longVal; + @JsonProperty public Long longVal; - @JsonProperty - public Boolean boolVal; + @JsonProperty public Boolean boolVal; } public static class C6 implements ClusterSingleton { @@ -465,7 +486,6 @@ public void stop() { } } - public static class C5 implements ResourceLoaderAware { static ByteBuffer classData; private SolrResourceLoader resourceLoader; @@ -479,64 +499,57 @@ public void inform(ResourceLoader loader) throws IOException { int sz = IOUtils.read(is, buf); classData = ByteBuffer.wrap(buf, 0, sz); } catch (IOException e) { - //do not do anything + // do not do anything } } - @EndPoint(method = GET, - path = "/$plugin-name/m2", - permission = PermissionNameProvider.Name.COLL_READ_PERM) - public void m2() { - - - } - + @EndPoint( + method = GET, + path = "/$plugin-name/m2", + permission = PermissionNameProvider.Name.COLL_READ_PERM) + public void m2() {} } - public static class C1 { - - } + public static class C1 {} @EndPoint( - method = GET, - path = "/plugin/my/plugin", - permission = PermissionNameProvider.Name.COLL_READ_PERM) - public class C2 { - - - } + method = GET, + path = "/plugin/my/plugin", + permission = PermissionNameProvider.Name.COLL_READ_PERM) + public class C2 {} @EndPoint( - method = GET, - path = "/plugin/my/plugin", - permission = PermissionNameProvider.Name.COLL_READ_PERM) + method = GET, + path = "/plugin/my/plugin", + permission = PermissionNameProvider.Name.COLL_READ_PERM) public static class C3 { @Command public void read(SolrQueryRequest req, SolrQueryResponse rsp) { rsp.add("testkey", "testval"); } - } public static class C4 { - @EndPoint(method = GET, - path = "$plugin-name/my/plugin", - permission = PermissionNameProvider.Name.READ_PERM) + @EndPoint( + method = GET, + path = "$plugin-name/my/plugin", + permission = PermissionNameProvider.Name.READ_PERM) public void m1(SolrQueryRequest req, SolrQueryResponse rsp) { rsp.add("method.name", "m1"); } - @EndPoint(method = GET, - path = "$path-prefix/their/plugin", - permission = PermissionNameProvider.Name.READ_PERM) + @EndPoint( + method = GET, + path = "$path-prefix/their/plugin", + permission = PermissionNameProvider.Name.READ_PERM) public void m2(SolrQueryRequest req, SolrQueryResponse rsp) { rsp.add("method.name", "m2"); } - } - public static void waitForAllNodesToSync(MiniSolrCloudCluster cluster, String path, Map expected) throws Exception { + public static void waitForAllNodesToSync( + MiniSolrCloudCluster cluster, String path, Map expected) throws Exception { for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { String baseUrl = jettySolrRunner.getBaseUrl().toString().replace("/solr", "/api"); String url = baseUrl + path + "?wt=javabin"; @@ -544,8 +557,10 @@ public static void waitForAllNodesToSync(MiniSolrCloudCluster cluster, String pa } } - private void expectError(V2Request req, SolrClient client, String errPath, String expectErrorMsg) throws IOException, SolrServerException { - RemoteExecutionException e = expectThrows(RemoteExecutionException.class, () -> req.process(client)); + private void expectError(V2Request req, SolrClient client, String errPath, String expectErrorMsg) + throws IOException, SolrServerException { + RemoteExecutionException e = + expectThrows(RemoteExecutionException.class, () -> req.process(client)); String msg = e.getMetaData()._getStr(errPath, ""); assertTrue(expectErrorMsg, msg.contains(expectErrorMsg)); } diff --git a/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java b/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java index b19e5cf336e..13b6e219dd3 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java +++ b/solr/core/src/test/org/apache/solr/handler/TestHealthCheckHandlerLegacyMode.java @@ -16,6 +16,9 @@ */ package org.apache.solr.handler; +import static org.apache.solr.handler.ReplicationTestHelper.*; + +import java.io.IOException; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; @@ -30,174 +33,211 @@ import org.junit.Before; import org.junit.Test; -import java.io.IOException; - -import static org.apache.solr.handler.ReplicationTestHelper.*; - - -/** - * Test for HealthCheckHandler in legacy mode - * - * - */ +/** Test for HealthCheckHandler in legacy mode */ @Slow -@SuppressSSL // Currently unknown why SSL does not work with this test +@SuppressSSL // Currently unknown why SSL does not work with this test public class TestHealthCheckHandlerLegacyMode extends SolrTestCaseJ4 { - HttpSolrClient leaderClientHealthCheck, followerClientHealthCheck; - - JettySolrRunner leaderJetty, followerJetty; - HttpSolrClient leaderClient, followerClient; - ReplicationTestHelper.SolrInstance leader = null, follower = null; - - private static final String context = "/solr"; - - @Before - public void setUp() throws Exception { - super.setUp(); - - systemSetPropertySolrDisableUrlAllowList("true"); - - leader = new ReplicationTestHelper.SolrInstance(createTempDir("solr-instance").toFile(), "leader", null); - leader.setUp(); - leaderJetty = ReplicationTestHelper.createAndStartJetty(leader); - leaderClient = ReplicationTestHelper.createNewSolrClient(buildUrl(leaderJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); - leaderClientHealthCheck = ReplicationTestHelper.createNewSolrClient(buildUrl(leaderJetty.getLocalPort(), context)); - - follower = new SolrInstance(createTempDir("solr-instance").toFile(), "follower", leaderJetty.getLocalPort()); - follower.setUp(); - followerJetty = createAndStartJetty(follower); - followerClient = ReplicationTestHelper.createNewSolrClient(buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); - followerClientHealthCheck = ReplicationTestHelper.createNewSolrClient(buildUrl(followerJetty.getLocalPort(), context)); - - System.setProperty("solr.indexfetcher.sotimeout2", "45000"); + HttpSolrClient leaderClientHealthCheck, followerClientHealthCheck; + JettySolrRunner leaderJetty, followerJetty; + HttpSolrClient leaderClient, followerClient; + ReplicationTestHelper.SolrInstance leader = null, follower = null; + + private static final String context = "/solr"; + + @Before + public void setUp() throws Exception { + super.setUp(); + + systemSetPropertySolrDisableUrlAllowList("true"); + + leader = + new ReplicationTestHelper.SolrInstance( + createTempDir("solr-instance").toFile(), "leader", null); + leader.setUp(); + leaderJetty = ReplicationTestHelper.createAndStartJetty(leader); + leaderClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(leaderJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); + leaderClientHealthCheck = + ReplicationTestHelper.createNewSolrClient(buildUrl(leaderJetty.getLocalPort(), context)); + + follower = + new SolrInstance( + createTempDir("solr-instance").toFile(), "follower", leaderJetty.getLocalPort()); + follower.setUp(); + followerJetty = createAndStartJetty(follower); + followerClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); + followerClientHealthCheck = + ReplicationTestHelper.createNewSolrClient(buildUrl(followerJetty.getLocalPort(), context)); + + System.setProperty("solr.indexfetcher.sotimeout2", "45000"); + } + + public void clearIndexWithReplication() throws Exception { + if (numFound(ReplicationTestHelper.query("*:*", leaderClient)) != 0) { + leaderClient.deleteByQuery("*:*"); + leaderClient.commit(); + // wait for replication to sync & verify + assertEquals(0, numFound(rQuery(0, "*:*", followerClient))); } - - public void clearIndexWithReplication() throws Exception { - if (numFound(ReplicationTestHelper.query("*:*", leaderClient)) != 0) { - leaderClient.deleteByQuery("*:*"); - leaderClient.commit(); - // wait for replication to sync & verify - assertEquals(0, numFound(rQuery(0, "*:*", followerClient))); - } + } + + @Override + @After + public void tearDown() throws Exception { + super.tearDown(); + if (null != leaderJetty) { + leaderJetty.stop(); + leaderJetty = null; } - - @Override - @After - public void tearDown() throws Exception { - super.tearDown(); - if (null != leaderJetty) { - leaderJetty.stop(); - leaderJetty = null; - } - if (null != followerJetty) { - followerJetty.stop(); - followerJetty = null; - } - if (null != leaderClient) { - leaderClient.close(); - leaderClient = null; - } - if (null != followerClient) { - followerClient.close(); - followerClient = null; - } - if (null != leaderClientHealthCheck) { - leaderClientHealthCheck.close(); - leaderClientHealthCheck = null; - } - - if (null != followerClientHealthCheck) { - followerClientHealthCheck.close(); - followerClientHealthCheck = null; - } - System.clearProperty("solr.indexfetcher.sotimeout"); + if (null != followerJetty) { + followerJetty.stop(); + followerJetty = null; } - - - @Test - // keep this - public void doTestHealthCheckWithReplication() throws Exception { - int nDocs = 500; - - TestInjection.delayBeforeFollowerCommitRefresh = random().nextInt(10); - - // stop replication so that the follower doesn't pull the index - invokeReplicationCommand(buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME, "disablepoll"); - - // create multiple commits - int docsAdded = 0; - for (int i = 0; docsAdded < nDocs / 2; i++, docsAdded++) { - index(leaderClient, "id", i, "name", "name = " + i); - if (i % 10 == 0) { - leaderClient.commit(); - } - } - - leaderClient.commit(); - - assertNumFoundWithQuery(leaderClient, docsAdded); - - // ensure that the leader is always happy - // first try without specifying maxGenerationLag lag - ModifiableSolrParams solrParamsLeaderHealthCheck = new ModifiableSolrParams(); - HealthCheckRequest healthCheckRequestLeader = new HealthCheckRequest(); - assertEquals(CommonParams.OK, healthCheckRequestLeader.process(leaderClientHealthCheck).getResponse().get(CommonParams.STATUS)); - - // now try adding maxGenerationLag request param - solrParamsLeaderHealthCheck.add(HealthCheckRequest.PARAM_MAX_GENERATION_LAG, "2"); - assertEquals(CommonParams.OK, healthCheckRequestLeader.process(leaderClientHealthCheck).getResponse().get(CommonParams.STATUS)); - - // follower should report healthy if maxGenerationLag is not specified - HealthCheckRequest healthCheckRequestFollower = new HealthCheckRequest(); - assertEquals(CommonParams.OK, healthCheckRequestFollower.process(followerClientHealthCheck).getResponse().get(CommonParams.STATUS)); - - // ensure follower is unhealthy when maxGenerationLag is specified - // ModifiableSolrParams params = new ModifiableSolrParams(); - healthCheckRequestFollower = new HealthCheckRequest(); - healthCheckRequestFollower.setMaxGenerationLag(2); - assertEquals(CommonParams.FAILURE, healthCheckRequestFollower.process(followerClientHealthCheck).getResponse().get(CommonParams.STATUS)); - - // enable polling, force replication and ensure that the follower is healthy - //invokeReplicationCommand(buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME, "enablepoll"); - pullFromTo(leaderJetty, followerJetty); - - // check replicated and is healthy - assertNumFoundWithQuery(followerClient, docsAdded); - assertEquals(CommonParams.OK, healthCheckRequestFollower.process(followerClientHealthCheck).getResponse().get(CommonParams.STATUS)); - assertVersions(leaderClient, followerClient); - - // index more docs on the leader - for (int i = docsAdded; docsAdded < nDocs; i++, docsAdded++) { - index(leaderClient, "id", i, "name", "name = " + i); - if (i % 10 == 0) { - leaderClient.commit(); - } - } - leaderClient.commit(); - - assertNumFoundWithQuery(leaderClient, docsAdded); - - // we have added docs to the leader and polling is disabled, this should fail - healthCheckRequestFollower = new HealthCheckRequest(); - healthCheckRequestFollower.setMaxGenerationLag(2); - assertEquals(CommonParams.FAILURE, healthCheckRequestFollower.process(followerClientHealthCheck).getResponse().get(CommonParams.STATUS)); - - //force replication and ensure that the follower is healthy - pullFromTo(leaderJetty, followerJetty); - assertEquals(CommonParams.OK, healthCheckRequestFollower.process(followerClientHealthCheck).getResponse().get(CommonParams.STATUS)); - assertNumFoundWithQuery(followerClient, docsAdded); - assertVersions(leaderClient, followerClient); + if (null != leaderClient) { + leaderClient.close(); + leaderClient = null; } - - public static void pullFromTo(JettySolrRunner srcSolr, JettySolrRunner destSolr) throws IOException { - String srcUrl = buildUrl(srcSolr.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME; - String destUrl = buildUrl(destSolr.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME; - ReplicationTestHelper.pullFromTo(srcUrl, destUrl); + if (null != followerClient) { + followerClient.close(); + followerClient = null; + } + if (null != leaderClientHealthCheck) { + leaderClientHealthCheck.close(); + leaderClientHealthCheck = null; } - private void assertNumFoundWithQuery(HttpSolrClient client, int nDocs) throws Exception { - NamedList queryRsp = rQuery(nDocs, "*:*", client); - assertEquals(nDocs, numFound(queryRsp)); + if (null != followerClientHealthCheck) { + followerClientHealthCheck.close(); + followerClientHealthCheck = null; + } + System.clearProperty("solr.indexfetcher.sotimeout"); + } + + @Test + // keep this + public void doTestHealthCheckWithReplication() throws Exception { + int nDocs = 500; + + TestInjection.delayBeforeFollowerCommitRefresh = random().nextInt(10); + + // stop replication so that the follower doesn't pull the index + invokeReplicationCommand( + buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME, + "disablepoll"); + + // create multiple commits + int docsAdded = 0; + for (int i = 0; docsAdded < nDocs / 2; i++, docsAdded++) { + index(leaderClient, "id", i, "name", "name = " + i); + if (i % 10 == 0) { + leaderClient.commit(); + } } + leaderClient.commit(); + + assertNumFoundWithQuery(leaderClient, docsAdded); + + // ensure that the leader is always happy + // first try without specifying maxGenerationLag lag + ModifiableSolrParams solrParamsLeaderHealthCheck = new ModifiableSolrParams(); + HealthCheckRequest healthCheckRequestLeader = new HealthCheckRequest(); + assertEquals( + CommonParams.OK, + healthCheckRequestLeader + .process(leaderClientHealthCheck) + .getResponse() + .get(CommonParams.STATUS)); + + // now try adding maxGenerationLag request param + solrParamsLeaderHealthCheck.add(HealthCheckRequest.PARAM_MAX_GENERATION_LAG, "2"); + assertEquals( + CommonParams.OK, + healthCheckRequestLeader + .process(leaderClientHealthCheck) + .getResponse() + .get(CommonParams.STATUS)); + + // follower should report healthy if maxGenerationLag is not specified + HealthCheckRequest healthCheckRequestFollower = new HealthCheckRequest(); + assertEquals( + CommonParams.OK, + healthCheckRequestFollower + .process(followerClientHealthCheck) + .getResponse() + .get(CommonParams.STATUS)); + + // ensure follower is unhealthy when maxGenerationLag is specified + // ModifiableSolrParams params = new ModifiableSolrParams(); + healthCheckRequestFollower = new HealthCheckRequest(); + healthCheckRequestFollower.setMaxGenerationLag(2); + assertEquals( + CommonParams.FAILURE, + healthCheckRequestFollower + .process(followerClientHealthCheck) + .getResponse() + .get(CommonParams.STATUS)); + + // enable polling, force replication and ensure that the follower is healthy + // invokeReplicationCommand(buildUrl(followerJetty.getLocalPort(), context) + "/" + + // DEFAULT_TEST_CORENAME, "enablepoll"); + pullFromTo(leaderJetty, followerJetty); + + // check replicated and is healthy + assertNumFoundWithQuery(followerClient, docsAdded); + assertEquals( + CommonParams.OK, + healthCheckRequestFollower + .process(followerClientHealthCheck) + .getResponse() + .get(CommonParams.STATUS)); + assertVersions(leaderClient, followerClient); + + // index more docs on the leader + for (int i = docsAdded; docsAdded < nDocs; i++, docsAdded++) { + index(leaderClient, "id", i, "name", "name = " + i); + if (i % 10 == 0) { + leaderClient.commit(); + } + } + leaderClient.commit(); + + assertNumFoundWithQuery(leaderClient, docsAdded); + + // we have added docs to the leader and polling is disabled, this should fail + healthCheckRequestFollower = new HealthCheckRequest(); + healthCheckRequestFollower.setMaxGenerationLag(2); + assertEquals( + CommonParams.FAILURE, + healthCheckRequestFollower + .process(followerClientHealthCheck) + .getResponse() + .get(CommonParams.STATUS)); + + // force replication and ensure that the follower is healthy + pullFromTo(leaderJetty, followerJetty); + assertEquals( + CommonParams.OK, + healthCheckRequestFollower + .process(followerClientHealthCheck) + .getResponse() + .get(CommonParams.STATUS)); + assertNumFoundWithQuery(followerClient, docsAdded); + assertVersions(leaderClient, followerClient); + } + + public static void pullFromTo(JettySolrRunner srcSolr, JettySolrRunner destSolr) + throws IOException { + String srcUrl = buildUrl(srcSolr.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME; + String destUrl = buildUrl(destSolr.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME; + ReplicationTestHelper.pullFromTo(srcUrl, destUrl); + } + + private void assertNumFoundWithQuery(HttpSolrClient client, int nDocs) throws Exception { + NamedList queryRsp = rQuery(nDocs, "*:*", client); + assertEquals(nDocs, numFound(queryRsp)); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/TestIncrementalCoreBackup.java b/solr/core/src/test/org/apache/solr/handler/TestIncrementalCoreBackup.java index 37ace265255..d1b2ab95a67 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestIncrementalCoreBackup.java +++ b/solr/core/src/test/org/apache/solr/handler/TestIncrementalCoreBackup.java @@ -16,6 +16,11 @@ */ package org.apache.solr.handler; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Arrays; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; @@ -32,345 +37,417 @@ import org.junit.Before; import org.junit.Test; -import java.io.IOException; -import java.net.URI; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Arrays; - -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs({"SimpleText"}) public class TestIncrementalCoreBackup extends SolrTestCaseJ4 { - @Before // unique core per test - public void coreInit() throws Exception { - initCore("solrconfig.xml", "schema.xml"); + @Before // unique core per test + public void coreInit() throws Exception { + initCore("solrconfig.xml", "schema.xml"); + } + + @After // unique core per test + public void coreDestroy() throws Exception { + deleteCore(); + } + + @Test + public void testBackupWithDocsNotSearchable() throws Exception { + // See SOLR-11616 to see when this issue can be triggered + + assertU(adoc("id", "1")); + assertU(commit()); + + assertU(adoc("id", "2")); + + assertU(commit("openSearcher", "false")); + assertQ(req("q", "*:*"), "//result[@numFound='1']"); + assertQ(req("q", "id:1"), "//result[@numFound='1']"); + assertQ(req("q", "id:2"), "//result[@numFound='0']"); + + // call backup + final Path locationPath = createBackupLocation(); + final URI locationUri = bootstrapBackupLocation(locationPath); + final ShardBackupId shardBackupId = new ShardBackupId("shard1", BackupId.zero()); + + final CoreContainer cores = h.getCoreContainer(); + cores.getAllowPaths().add(Paths.get(locationUri)); + try (final CoreAdminHandler admin = new CoreAdminHandler(cores)) { + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + shardBackupId.getIdAsString()), + resp); + assertNull("Backup should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, shardBackupId); } - @After // unique core per test - public void coreDestroy() throws Exception { - deleteCore(); + } + + public void testBackupBeforeFirstCommit() throws Exception { + + // even w/o a user sending any data, the SolrCore initialiation logic should have automatically + // created an "empty" commit point that can be backed up... + final IndexCommit empty = h.getCore().getDeletionPolicy().getLatestCommit(); + assertNotNull(empty); + + // white box sanity check that the commit point of the "reader" available from SolrIndexSearcher + // matches the commit point that IDPW claims is the "latest" + // + // this is important to ensure that backup/snapshot behavior is consistent with user expection + // when using typical commit + openSearcher + assertEquals(empty, h.getCore().withSearcher(s -> s.getIndexReader().getIndexCommit())); + + assertEquals(1L, empty.getGeneration()); + assertNotNull(empty.getSegmentsFileName()); + final String initialEmptyIndexSegmentFileName = empty.getSegmentsFileName(); + + final CoreContainer cores = h.getCoreContainer(); + final CoreAdminHandler admin = new CoreAdminHandler(cores); + final Path locationPath = createBackupLocation(); + final URI locationUri = bootstrapBackupLocation(locationPath); + + final ShardBackupId firstShardBackup = new ShardBackupId("shard1", BackupId.zero()); + { // first a backup before we've ever done *anything*... + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + firstShardBackup.getIdAsString()), + resp); + assertNull("Backup should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, firstShardBackup, initialEmptyIndexSegmentFileName); } - @Test - public void testBackupWithDocsNotSearchable() throws Exception { - //See SOLR-11616 to see when this issue can be triggered - - assertU(adoc("id", "1")); - assertU(commit()); - - assertU(adoc("id", "2")); - - assertU(commit("openSearcher", "false")); - assertQ(req("q", "*:*"), "//result[@numFound='1']"); - assertQ(req("q", "id:1"), "//result[@numFound='1']"); - assertQ(req("q", "id:2"), "//result[@numFound='0']"); - - //call backup - final Path locationPath = createBackupLocation(); - final URI locationUri = bootstrapBackupLocation(locationPath); - final ShardBackupId shardBackupId = new ShardBackupId("shard1", BackupId.zero()); - - final CoreContainer cores = h.getCoreContainer(); - cores.getAllowPaths().add(Paths.get(locationUri)); - try (final CoreAdminHandler admin = new CoreAdminHandler(cores)) { - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, shardBackupId.getIdAsString()) - , resp); - assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, shardBackupId); - } + { // Empty (named) snapshot.. + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "empty_snapshotA"), + resp); + assertNull("Snapshot A should have succeeded", resp.getException()); } - public void testBackupBeforeFirstCommit() throws Exception { - - // even w/o a user sending any data, the SolrCore initialiation logic should have automatically created - // an "empty" commit point that can be backed up... - final IndexCommit empty = h.getCore().getDeletionPolicy().getLatestCommit(); - assertNotNull(empty); - - // white box sanity check that the commit point of the "reader" available from SolrIndexSearcher - // matches the commit point that IDPW claims is the "latest" - // - // this is important to ensure that backup/snapshot behavior is consistent with user expection - // when using typical commit + openSearcher - assertEquals(empty, h.getCore().withSearcher(s -> s.getIndexReader().getIndexCommit())); - - assertEquals(1L, empty.getGeneration()); - assertNotNull(empty.getSegmentsFileName()); - final String initialEmptyIndexSegmentFileName = empty.getSegmentsFileName(); - - final CoreContainer cores = h.getCoreContainer(); - final CoreAdminHandler admin = new CoreAdminHandler(cores); - final Path locationPath = createBackupLocation(); - final URI locationUri = bootstrapBackupLocation(locationPath); - - final ShardBackupId firstShardBackup = new ShardBackupId("shard1", BackupId.zero()); - { // first a backup before we've ever done *anything*... - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, firstShardBackup.getIdAsString()), - resp); - assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, firstShardBackup, initialEmptyIndexSegmentFileName); - } - - { // Empty (named) snapshot.. - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "empty_snapshotA"), - resp); - assertNull("Snapshot A should have succeeded", resp.getException()); - } - - assertU(adoc("id", "1")); // uncommitted - - final ShardBackupId secondShardBackupId = new ShardBackupId("shard1", new BackupId(1)); - { // second backup w/uncommited docs - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, secondShardBackupId.getIdAsString()), - resp); - assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, secondShardBackupId, initialEmptyIndexSegmentFileName); - } - - { // Second empty (named) snapshot.. - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "empty_snapshotB"), - resp); - assertNull("Snapshot A should have succeeded", resp.getException()); - } - - // Committing the doc now should not affect the existing backups or snapshots... - assertU(commit()); - - for (ShardBackupId shardBackupId: Arrays.asList(firstShardBackup, secondShardBackupId)) { - simpleBackupCheck(locationUri, shardBackupId, initialEmptyIndexSegmentFileName); - } - - // Make backups from each of the snapshots and check they are still empty as well... - { - final ShardBackupId thirdShardBackup = new ShardBackupId("shard1", new BackupId(2)); - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "empty_snapshotA", - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, thirdShardBackup.getIdAsString()), - resp); - assertNull("Backup from snapshot empty_snapshotA should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, thirdShardBackup, initialEmptyIndexSegmentFileName); - } - { - final ShardBackupId fourthShardBackup = new ShardBackupId("shard1", new BackupId(3)); - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "empty_snapshotB", - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, fourthShardBackup.getIdAsString()), - resp); - assertNull("Backup from snapshot empty_snapshotB should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, fourthShardBackup, initialEmptyIndexSegmentFileName); - } - admin.close(); + assertU(adoc("id", "1")); // uncommitted + + final ShardBackupId secondShardBackupId = new ShardBackupId("shard1", new BackupId(1)); + { // second backup w/uncommited docs + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + secondShardBackupId.getIdAsString()), + resp); + assertNull("Backup should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, secondShardBackupId, initialEmptyIndexSegmentFileName); } - /** - * Tests that a softCommit does not affect what data is in a backup - */ - public void testBackupAfterSoftCommit() throws Exception { - - // sanity check empty index... - assertQ(req("q", "id:42"), "//result[@numFound='0']"); - assertQ(req("q", "id:99"), "//result[@numFound='0']"); - assertQ(req("q", "*:*"), "//result[@numFound='0']"); - - // hard commit one doc... - assertU(adoc("id", "99")); - assertU(commit()); - assertQ(req("q", "id:99"), "//result[@numFound='1']"); - assertQ(req("q", "*:*"), "//result[@numFound='1']"); - - final IndexCommit oneDocCommit = h.getCore().getDeletionPolicy().getLatestCommit(); - assertNotNull(oneDocCommit); - final String oneDocSegmentFile = oneDocCommit.getSegmentsFileName(); - - final CoreContainer cores = h.getCoreContainer(); - final CoreAdminHandler admin = new CoreAdminHandler(cores); - final Path locationPath = createBackupLocation(); - final URI locationUri = bootstrapBackupLocation(locationPath); - - final ShardBackupId firstShardBackupId = new ShardBackupId("shard1", BackupId.zero()); - { // take an initial 'backup1a' containing our 1 document - final SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", "backup1a", - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, firstShardBackupId.getIdAsString()), - resp); - assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, firstShardBackupId, oneDocSegmentFile); - } - - { // and an initial "snapshot1a' that should eventually match - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "snapshot1a"), - resp); - assertNull("Snapshot 1A should have succeeded", resp.getException()); - } - - // now we add our 2nd doc, and make it searchable, but we do *NOT* hard commit it to the index dir... - assertU(adoc("id", "42")); - assertU(commit("softCommit", "true", "openSearcher", "true")); - - assertQ(req("q", "id:99"), "//result[@numFound='1']"); - assertQ(req("q", "id:42"), "//result[@numFound='1']"); - assertQ(req("q", "*:*"), "//result[@numFound='2']"); - - - final ShardBackupId secondShardBackupId = new ShardBackupId("shard1", new BackupId(1)); - { // we now have an index with two searchable docs, but a new 'backup1b' should still - // be identical to the previous backup... - final SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, secondShardBackupId.getIdAsString()), - resp); - assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, secondShardBackupId, oneDocSegmentFile); - } - - { // and a second "snapshot1b' should also still be identical - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "snapshot1b"), - resp); - assertNull("Snapshot 1B should have succeeded", resp.getException()); - } - - // Hard Committing the 2nd doc now should not affect the existing backups or snapshots... - assertU(commit()); - simpleBackupCheck(locationUri, firstShardBackupId, oneDocSegmentFile); // backup1a - simpleBackupCheck(locationUri, secondShardBackupId, oneDocSegmentFile); // backup1b - - final ShardBackupId thirdShardBackupId = new ShardBackupId("shard1", new BackupId(2)); - { // But we should be able to confirm both docs appear in a new backup (not based on a previous snapshot) - final SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, thirdShardBackupId.getIdAsString()), - resp); - assertNull("Backup should have succeeded", resp.getException()); - // TODO This doesn't actually check that backup has both docs! Can we do better than this without doing a full restore? - // Maybe validate the new segments_X file at least to show that it's picked up the latest commit? - simpleBackupCheck(locationUri, thirdShardBackupId); - } - - // if we go back and create backups from our earlier snapshots they should still only - // have 1 expected doc... - // Make backups from each of the snapshots and check they are still empty as well... - final ShardBackupId fourthShardBackupId = new ShardBackupId("shard1", new BackupId(3)); - { - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "snapshot1a", - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, fourthShardBackupId.getIdAsString()), - resp); - assertNull("Backup of snapshot1a should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, fourthShardBackupId, oneDocSegmentFile); - } - final ShardBackupId fifthShardBackupId = new ShardBackupId("shard1", new BackupId(4)); - { - SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "snapshot1b", - "location", locationPath.toString(), - CoreAdminParams.SHARD_BACKUP_ID, fifthShardBackupId.getIdAsString()), - resp); - assertNull("Backup of snapshot1b should have succeeded", resp.getException()); - simpleBackupCheck(locationUri, fifthShardBackupId, oneDocSegmentFile); - } - - admin.close(); + { // Second empty (named) snapshot.. + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "empty_snapshotB"), + resp); + assertNull("Snapshot A should have succeeded", resp.getException()); } - /** - * Check that the backup metadata file exists, and the corresponding index files can be found. - */ - private static void simpleBackupCheck(URI locationURI, ShardBackupId shardBackupId, String... expectedIndexFiles) throws IOException { - try(BackupRepository backupRepository = h.getCoreContainer().newBackupRepository(null)) { - final BackupFilePaths backupFilePaths = new BackupFilePaths(backupRepository, locationURI); - - // Ensure that the overall file structure looks correct. - assertTrue(backupRepository.exists(locationURI)); - assertTrue(backupRepository.exists(backupFilePaths.getIndexDir())); - assertTrue(backupRepository.exists(backupFilePaths.getShardBackupMetadataDir())); - final String metadataFilename = shardBackupId.getBackupMetadataFilename(); - final URI shardBackupMetadataURI = backupRepository.resolve(backupFilePaths.getShardBackupMetadataDir(), metadataFilename); - assertTrue(backupRepository.exists(shardBackupMetadataURI)); - - // Ensure that all files listed in the shard-meta file are stored in the index dir - final ShardBackupMetadata backupMetadata = ShardBackupMetadata.from(backupRepository, - backupFilePaths.getShardBackupMetadataDir(), shardBackupId); - for (String indexFileName : backupMetadata.listUniqueFileNames()) { - final URI indexFileURI = backupRepository.resolve(backupFilePaths.getIndexDir(), indexFileName); - assertTrue("Expected " + indexFileName + " to exist in " + backupFilePaths.getIndexDir(), backupRepository.exists(indexFileURI)); - } - - - // Ensure that the expected filenames (if any are provided) exist - for (String expectedIndexFile : expectedIndexFiles) { - assertTrue("Expected backup to hold a renamed copy of " + expectedIndexFile, - backupMetadata.listOriginalFileNames().contains(expectedIndexFile)); - } - } + // Committing the doc now should not affect the existing backups or snapshots... + assertU(commit()); + + for (ShardBackupId shardBackupId : Arrays.asList(firstShardBackup, secondShardBackupId)) { + simpleBackupCheck(locationUri, shardBackupId, initialEmptyIndexSegmentFileName); } - private Path createBackupLocation() { - return createTempDir().toAbsolutePath(); + // Make backups from each of the snapshots and check they are still empty as well... + { + final ShardBackupId thirdShardBackup = new ShardBackupId("shard1", new BackupId(2)); + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "empty_snapshotA", + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + thirdShardBackup.getIdAsString()), + resp); + assertNull("Backup from snapshot empty_snapshotA should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, thirdShardBackup, initialEmptyIndexSegmentFileName); + } + { + final ShardBackupId fourthShardBackup = new ShardBackupId("shard1", new BackupId(3)); + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "empty_snapshotB", + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + fourthShardBackup.getIdAsString()), + resp); + assertNull("Backup from snapshot empty_snapshotB should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, fourthShardBackup, initialEmptyIndexSegmentFileName); + } + admin.close(); + } + + /** Tests that a softCommit does not affect what data is in a backup */ + public void testBackupAfterSoftCommit() throws Exception { + + // sanity check empty index... + assertQ(req("q", "id:42"), "//result[@numFound='0']"); + assertQ(req("q", "id:99"), "//result[@numFound='0']"); + assertQ(req("q", "*:*"), "//result[@numFound='0']"); + + // hard commit one doc... + assertU(adoc("id", "99")); + assertU(commit()); + assertQ(req("q", "id:99"), "//result[@numFound='1']"); + assertQ(req("q", "*:*"), "//result[@numFound='1']"); + + final IndexCommit oneDocCommit = h.getCore().getDeletionPolicy().getLatestCommit(); + assertNotNull(oneDocCommit); + final String oneDocSegmentFile = oneDocCommit.getSegmentsFileName(); + + final CoreContainer cores = h.getCoreContainer(); + final CoreAdminHandler admin = new CoreAdminHandler(cores); + final Path locationPath = createBackupLocation(); + final URI locationUri = bootstrapBackupLocation(locationPath); + + final ShardBackupId firstShardBackupId = new ShardBackupId("shard1", BackupId.zero()); + { // take an initial 'backup1a' containing our 1 document + final SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + "backup1a", + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + firstShardBackupId.getIdAsString()), + resp); + assertNull("Backup should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, firstShardBackupId, oneDocSegmentFile); } - private URI bootstrapBackupLocation(Path locationPath) throws IOException { - final String locationPathStr = locationPath.toString(); - h.getCoreContainer().getAllowPaths().add(locationPath); - try (BackupRepository backupRepo = h.getCoreContainer().newBackupRepository(null)) { - final URI locationUri = backupRepo.createDirectoryURI(locationPathStr); - final BackupFilePaths backupFilePaths = new BackupFilePaths(backupRepo, locationUri); - backupFilePaths.createIncrementalBackupFolders(); - return locationUri; - } + { // and an initial "snapshot1a' that should eventually match + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "snapshot1a"), + resp); + assertNull("Snapshot 1A should have succeeded", resp.getException()); + } + + // now we add our 2nd doc, and make it searchable, but we do *NOT* hard commit it to the index + // dir... + assertU(adoc("id", "42")); + assertU(commit("softCommit", "true", "openSearcher", "true")); + + assertQ(req("q", "id:99"), "//result[@numFound='1']"); + assertQ(req("q", "id:42"), "//result[@numFound='1']"); + assertQ(req("q", "*:*"), "//result[@numFound='2']"); + + final ShardBackupId secondShardBackupId = new ShardBackupId("shard1", new BackupId(1)); + { // we now have an index with two searchable docs, but a new 'backup1b' should still + // be identical to the previous backup... + final SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + secondShardBackupId.getIdAsString()), + resp); + assertNull("Backup should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, secondShardBackupId, oneDocSegmentFile); + } + + { // and a second "snapshot1b' should also still be identical + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "snapshot1b"), + resp); + assertNull("Snapshot 1B should have succeeded", resp.getException()); + } + + // Hard Committing the 2nd doc now should not affect the existing backups or snapshots... + assertU(commit()); + simpleBackupCheck(locationUri, firstShardBackupId, oneDocSegmentFile); // backup1a + simpleBackupCheck(locationUri, secondShardBackupId, oneDocSegmentFile); // backup1b + + final ShardBackupId thirdShardBackupId = new ShardBackupId("shard1", new BackupId(2)); + { // But we should be able to confirm both docs appear in a new backup (not based on a previous + // snapshot) + final SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + thirdShardBackupId.getIdAsString()), + resp); + assertNull("Backup should have succeeded", resp.getException()); + // TODO This doesn't actually check that backup has both docs! Can we do better than this + // without doing a full restore? + // Maybe validate the new segments_X file at least to show that it's picked up the latest + // commit? + simpleBackupCheck(locationUri, thirdShardBackupId); } -} + // if we go back and create backups from our earlier snapshots they should still only + // have 1 expected doc... + // Make backups from each of the snapshots and check they are still empty as well... + final ShardBackupId fourthShardBackupId = new ShardBackupId("shard1", new BackupId(3)); + { + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "snapshot1a", + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + fourthShardBackupId.getIdAsString()), + resp); + assertNull("Backup of snapshot1a should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, fourthShardBackupId, oneDocSegmentFile); + } + final ShardBackupId fifthShardBackupId = new ShardBackupId("shard1", new BackupId(4)); + { + SolrQueryResponse resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "snapshot1b", + "location", + locationPath.toString(), + CoreAdminParams.SHARD_BACKUP_ID, + fifthShardBackupId.getIdAsString()), + resp); + assertNull("Backup of snapshot1b should have succeeded", resp.getException()); + simpleBackupCheck(locationUri, fifthShardBackupId, oneDocSegmentFile); + } + + admin.close(); + } + + /** Check that the backup metadata file exists, and the corresponding index files can be found. */ + private static void simpleBackupCheck( + URI locationURI, ShardBackupId shardBackupId, String... expectedIndexFiles) + throws IOException { + try (BackupRepository backupRepository = h.getCoreContainer().newBackupRepository(null)) { + final BackupFilePaths backupFilePaths = new BackupFilePaths(backupRepository, locationURI); + + // Ensure that the overall file structure looks correct. + assertTrue(backupRepository.exists(locationURI)); + assertTrue(backupRepository.exists(backupFilePaths.getIndexDir())); + assertTrue(backupRepository.exists(backupFilePaths.getShardBackupMetadataDir())); + final String metadataFilename = shardBackupId.getBackupMetadataFilename(); + final URI shardBackupMetadataURI = + backupRepository.resolve(backupFilePaths.getShardBackupMetadataDir(), metadataFilename); + assertTrue(backupRepository.exists(shardBackupMetadataURI)); + + // Ensure that all files listed in the shard-meta file are stored in the index dir + final ShardBackupMetadata backupMetadata = + ShardBackupMetadata.from( + backupRepository, backupFilePaths.getShardBackupMetadataDir(), shardBackupId); + for (String indexFileName : backupMetadata.listUniqueFileNames()) { + final URI indexFileURI = + backupRepository.resolve(backupFilePaths.getIndexDir(), indexFileName); + assertTrue( + "Expected " + indexFileName + " to exist in " + backupFilePaths.getIndexDir(), + backupRepository.exists(indexFileURI)); + } + + // Ensure that the expected filenames (if any are provided) exist + for (String expectedIndexFile : expectedIndexFiles) { + assertTrue( + "Expected backup to hold a renamed copy of " + expectedIndexFile, + backupMetadata.listOriginalFileNames().contains(expectedIndexFile)); + } + } + } + + private Path createBackupLocation() { + return createTempDir().toAbsolutePath(); + } + + private URI bootstrapBackupLocation(Path locationPath) throws IOException { + final String locationPathStr = locationPath.toString(); + h.getCoreContainer().getAllowPaths().add(locationPath); + try (BackupRepository backupRepo = h.getCoreContainer().newBackupRepository(null)) { + final URI locationUri = backupRepo.createDirectoryURI(locationPathStr); + final BackupFilePaths backupFilePaths = new BackupFilePaths(backupRepo, locationUri); + backupFilePaths.createIncrementalBackupFolders(); + return locationUri; + } + } +} diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java index 9c2417c3a3b..67b18f648a8 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java @@ -16,6 +16,18 @@ */ package org.apache.solr.handler; +import static org.apache.solr.handler.ReplicationTestHelper.*; +import static org.junit.matchers.JUnitMatchers.containsString; + +import java.io.File; +import java.io.FilenameFilter; +import java.io.IOException; +import java.io.InputStream; +import java.lang.invoke.MethodHandles; +import java.net.URL; +import java.nio.file.Paths; +import java.util.*; +import java.util.concurrent.TimeUnit; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.store.Directory; @@ -65,29 +77,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.io.FilenameFilter; -import java.io.IOException; -import java.io.InputStream; -import java.lang.invoke.MethodHandles; -import java.net.URL; -import java.nio.file.Paths; -import java.util.*; -import java.util.concurrent.TimeUnit; - -import static org.apache.solr.handler.ReplicationTestHelper.*; -import static org.junit.matchers.JUnitMatchers.containsString; - /** * Test for ReplicationHandler * - * * @since 1.4 */ @Slow -@SuppressSSL // Currently unknown why SSL does not work with this test -// commented 20-July-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018 -// commented out on: 24-Dec-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018 +@SuppressSSL // Currently unknown why SSL does not work with this test public class TestReplicationHandler extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -108,25 +104,30 @@ public class TestReplicationHandler extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() { useLegacyParams = rarely(); - } @Before public void setUp() throws Exception { super.setUp(); systemSetPropertySolrDisableUrlAllowList("true"); -// System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory"); + // System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory"); // For manual testing only // useFactory(null); // force an FS factory. leader = new SolrInstance(createTempDir("solr-instance").toFile(), "leader", null); leader.setUp(); leaderJetty = createAndStartJetty(leader); - leaderClient = ReplicationTestHelper.createNewSolrClient(buildUrl(leaderJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(leaderJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); - follower = new SolrInstance(createTempDir("solr-instance").toFile(), "follower", leaderJetty.getLocalPort()); + follower = + new SolrInstance( + createTempDir("solr-instance").toFile(), "follower", leaderJetty.getLocalPort()); follower.setUp(); followerJetty = createAndStartJetty(follower); - followerClient = ReplicationTestHelper.createNewSolrClient(buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); System.setProperty("solr.indexfetcher.sotimeout2", "45000"); } @@ -165,7 +166,9 @@ public void tearDown() throws Exception { } static JettySolrRunner createAndStartJetty(SolrInstance instance) throws Exception { - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(instance.getHomeDir(), "solr.xml")); + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), + new File(instance.getHomeDir(), "solr.xml")); Properties nodeProperties = new Properties(); nodeProperties.setProperty("solr.data.dir", instance.getDataDir()); JettyConfig jettyConfig = JettyConfig.builder().setContext("/solr").setPort(0).build(); @@ -186,18 +189,18 @@ NamedList query(String query, SolrClient s) throws SolrServerException, ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", query); - params.add("sort","id desc"); + params.add("sort", "id desc"); QueryResponse qres = s.query(params); return qres.getResponse(); } /** will sleep up to 30 seconds, looking for expectedDocCount */ - private NamedList rQuery(int expectedDocCount, String query, SolrClient client) throws Exception { + private NamedList rQuery(int expectedDocCount, String query, SolrClient client) + throws Exception { int timeSlept = 0; NamedList res = query(query, client); - while (expectedDocCount != numFound(res) - && timeSlept < 30000) { + while (expectedDocCount != numFound(res) && timeSlept < 30000) { log.info("Waiting for {} docs", expectedDocCount); timeSlept += 100; Thread.sleep(100); @@ -215,18 +218,17 @@ private long numFound(NamedList res) { private NamedList getDetails(SolrClient s) throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("command","details"); - params.set("_trace","getDetails"); - params.set("qt",ReplicationHandler.PATH); + params.set("command", "details"); + params.set("_trace", "getDetails"); + params.set("qt", ReplicationHandler.PATH); QueryRequest req = new QueryRequest(params); NamedList res = s.request(req); assertReplicationResponseSucceeded(res); - @SuppressWarnings("unchecked") NamedList details - = (NamedList) res.get("details"); + @SuppressWarnings("unchecked") + NamedList details = (NamedList) res.get("details"); assertNotNull("null details", details); @@ -236,9 +238,9 @@ private NamedList getDetails(SolrClient s) throws Exception { private NamedList getIndexVersion(SolrClient s) throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("command","indexversion"); - params.set("_trace","getIndexVersion"); - params.set("qt",ReplicationHandler.PATH); + params.set("command", "indexversion"); + params.set("_trace", "getIndexVersion"); + params.set("qt", ReplicationHandler.PATH); QueryRequest req = new QueryRequest(params); NamedList res = s.request(req); @@ -250,9 +252,9 @@ private NamedList getIndexVersion(SolrClient s) throws Exception { private NamedList reloadCore(SolrClient s, String core) throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("action","reload"); + params.set("action", "reload"); params.set("core", core); - params.set("qt","/admin/cores"); + params.set("qt", "/admin/cores"); QueryRequest req = new QueryRequest(params); try (HttpSolrClient adminClient = adminClient(s)) { @@ -260,11 +262,10 @@ private NamedList reloadCore(SolrClient s, String core) throws Exception assertNotNull("null response from server", res); return res; } - } private HttpSolrClient adminClient(SolrClient client) { - String adminUrl = ((HttpSolrClient)client).getBaseURL().replace("/collection1", ""); + String adminUrl = ((HttpSolrClient) client).getBaseURL().replace("/collection1", ""); return getHttpSolrClient(adminUrl); } @@ -279,11 +280,14 @@ public void testUrlAllowList() throws Exception { // Expect an exception because the leader URL is not allowed. systemClearPropertySolrDisableUrlAllowList(); SolrException e = expectThrows(SolrException.class, this::doTestDetails); - assertTrue(e.getMessage().contains("nor in the configured '" + AllowListUrlChecker.URL_ALLOW_LIST + "'")); + assertTrue( + e.getMessage() + .contains("nor in the configured '" + AllowListUrlChecker.URL_ALLOW_LIST + "'")); // Set the allow-list to allow the leader URL. // Expect the same test to pass now. - System.setProperty(TEST_URL_ALLOW_LIST, leaderJetty.getBaseUrl() + "," + followerJetty.getBaseUrl()); + System.setProperty( + TEST_URL_ALLOW_LIST, leaderJetty.getBaseUrl() + "," + followerJetty.getBaseUrl()); try { doTestDetails(); } finally { @@ -301,19 +305,20 @@ public void doTestDetails() throws Exception { followerClient.close(); leaderClient.close(); - leaderClient = ReplicationTestHelper.createNewSolrClient(buildUrl(leaderJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); - followerClient = ReplicationTestHelper.createNewSolrClient(buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(leaderJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); clearIndexWithReplication(); { NamedList details = getDetails(leaderClient); - assertEquals("leader isLeader?", - "true", details.get("isLeader")); - assertEquals("leader isFollower?", - "false", details.get("isFollower")); - assertNotNull("leader has leader section", - details.get("leader")); + assertEquals("leader isLeader?", "true", details.get("isLeader")); + assertEquals("leader isFollower?", "false", details.get("isFollower")); + assertNotNull("leader has leader section", details.get("leader")); } // check details on the follower a couple of times before & after fetching @@ -324,29 +329,37 @@ public void doTestDetails() throws Exception { if (i > 0) { rQuery(i, "*:*", followerClient); - List replicatedAtCount = (List) ((NamedList) details.get("follower")).get("indexReplicatedAtList"); + List replicatedAtCount = + (List) ((NamedList) details.get("follower")).get("indexReplicatedAtList"); int tries = 0; while ((replicatedAtCount == null || replicatedAtCount.size() < i) && tries++ < 5) { Thread.sleep(1000); details = getDetails(followerClient); - replicatedAtCount = (List) ((NamedList) details.get("follower")).get("indexReplicatedAtList"); + replicatedAtCount = + (List) ((NamedList) details.get("follower")).get("indexReplicatedAtList"); } - assertNotNull("Expected to see that the follower has replicated" + i + ": " + details.toString(), replicatedAtCount); + assertNotNull( + "Expected to see that the follower has replicated" + i + ": " + details.toString(), + replicatedAtCount); - // we can have more replications than we added docs because a replication can legally fail and try - // again (sometimes we cannot merge into a live index and have to try again) - assertTrue("i:" + i + " replicationCount:" + replicatedAtCount.size(), replicatedAtCount.size() >= i); + // we can have more replications than we added docs because a replication can legally fail + // and try again (sometimes we cannot merge into a live index and have to try again) + assertTrue( + "i:" + i + " replicationCount:" + replicatedAtCount.size(), + replicatedAtCount.size() >= i); } assertEquals(i + ": " + "follower isLeader?", "false", details.get("isLeader")); assertEquals(i + ": " + "follower isFollower?", "true", details.get("isFollower")); assertNotNull(i + ": " + "follower has follower section", details.get("follower")); // SOLR-2677: assert not false negatives - Object timesFailed = ((NamedList)details.get("follower")).get(IndexFetcher.TIMES_FAILED); + Object timesFailed = ((NamedList) details.get("follower")).get(IndexFetcher.TIMES_FAILED); // SOLR-7134: we can have a fail because some mock index files have no checksum, will // always be downloaded, and may not be able to be moved into the existing index - assertTrue(i + ": " + "follower has fetch error count: " + timesFailed, timesFailed == null || ((Number) timesFailed).intValue() == 1); + assertTrue( + i + ": " + "follower has fetch error count: " + timesFailed, + timesFailed == null || ((Number) timesFailed).intValue() == 1); if (3 != i) { // index & fetch @@ -360,27 +373,28 @@ public void doTestDetails() throws Exception { JettySolrRunner repeaterJetty = null; SolrClient repeaterClient = null; try { - repeater = new SolrInstance(createTempDir("solr-instance").toFile(), "repeater", leaderJetty.getLocalPort()); + repeater = + new SolrInstance( + createTempDir("solr-instance").toFile(), "repeater", leaderJetty.getLocalPort()); repeater.setUp(); repeaterJetty = createAndStartJetty(repeater); - repeaterClient = ReplicationTestHelper.createNewSolrClient(buildUrl(repeaterJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); - + repeaterClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(repeaterJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); NamedList details = getDetails(repeaterClient); - assertEquals("repeater isLeader?", - "true", details.get("isLeader")); - assertEquals("repeater isFollower?", - "true", details.get("isFollower")); - assertNotNull("repeater has leader section", - details.get("leader")); - assertNotNull("repeater has follower section", - details.get("follower")); + assertEquals("repeater isLeader?", "true", details.get("isLeader")); + assertEquals("repeater isFollower?", "true", details.get("isFollower")); + assertNotNull("repeater has leader section", details.get("leader")); + assertNotNull("repeater has follower section", details.get("follower")); } finally { try { if (repeaterJetty != null) repeaterJetty.stop(); - } catch (Exception e) { /* :NOOP: */ } + } catch (Exception e) { + /* :NOOP: */ + } if (repeaterClient != null) repeaterClient.close(); } } @@ -391,22 +405,23 @@ public void testLegacyConfiguration() throws Exception { JettySolrRunner instanceJetty = null; SolrClient client = null; try { - solrInstance = new SolrInstance(createTempDir("solr-instance").toFile(), "replication-legacy", leaderJetty.getLocalPort()); + solrInstance = + new SolrInstance( + createTempDir("solr-instance").toFile(), + "replication-legacy", + leaderJetty.getLocalPort()); solrInstance.setUp(); instanceJetty = createAndStartJetty(solrInstance); - client = ReplicationTestHelper.createNewSolrClient(buildUrl(instanceJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); - + client = + ReplicationTestHelper.createNewSolrClient( + buildUrl(instanceJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); NamedList details = getDetails(client); - assertEquals("repeater isLeader?", - "true", details.get("isLeader")); - assertEquals("repeater isFollower?", - "true", details.get("isFollower")); - assertNotNull("repeater has leader section", - details.get("leader")); - assertNotNull("repeater has follower section", - details.get("follower")); + assertEquals("repeater isLeader?", "true", details.get("isLeader")); + assertEquals("repeater isFollower?", "true", details.get("isFollower")); + assertNotNull("repeater has leader section", details.get("leader")); + assertNotNull("repeater has follower section", details.get("follower")); } finally { if (instanceJetty != null) { @@ -416,10 +431,9 @@ public void testLegacyConfiguration() throws Exception { } } - /** - * Verify that empty commits and/or commits with openSearcher=false - * on the leader do not cause subsequent replication problems on the follower + * Verify that empty commits and/or commits with openSearcher=false on the leader do not cause + * subsequent replication problems on the follower */ public void testEmptyCommits() throws Exception { clearIndexWithReplication(); @@ -464,7 +478,6 @@ public void testEmptyCommits() throws Exception { rQuery(4, q, leaderClient); rQuery(4, q, followerClient); assertVersions(leaderClient, followerClient); - } @Test @@ -475,8 +488,10 @@ public void doTestReplicateAfterWrite2Follower() throws Exception { index(leaderClient, "id", i, "name", "name = " + i); } - invokeReplicationCommand(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "disableReplication"); - invokeReplicationCommand(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "disablepoll"); + invokeReplicationCommand( + buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "disableReplication"); + invokeReplicationCommand( + buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "disablepoll"); leaderClient.commit(); @@ -496,12 +511,14 @@ public void doTestReplicateAfterWrite2Follower() throws Exception { index(followerClient, "id", 555, "name", "name = " + 555); followerClient.commit(true, true); - //this doc is added to follower so it should show an item w/ that result + // this doc is added to follower so it should show an item w/ that result assertEquals(1, numFound(rQuery(1, "id:555", followerClient))); - //Let's fetch the index rather than rely on the polling. - invokeReplicationCommand(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "enablereplication"); - invokeReplicationCommand(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "fetchindex"); + // Let's fetch the index rather than rely on the polling. + invokeReplicationCommand( + buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "enablereplication"); + invokeReplicationCommand( + buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "fetchindex"); /* //the follower should have done a full copy of the index so the doc with id:555 should not be there in the follower now @@ -525,8 +542,7 @@ public void doTestIndexAndConfigReplication() throws Exception { clearIndexWithReplication(); nDocs--; - for (int i = 0; i < nDocs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < nDocs; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -534,29 +550,31 @@ public void doTestIndexAndConfigReplication() throws Exception { SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); assertEquals(nDocs, numFound(leaderQueryRsp)); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(nDocs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, numFound(followerQueryRsp)); - //compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); + // compare results + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertNull(cmp); assertVersions(leaderClient, followerClient); - //start config files replication test + // start config files replication test leaderClient.deleteByQuery("*:*"); leaderClient.commit(); - //change the schema on leader + // change the schema on leader leader.copyConfigFile(CONF_DIR + "schema-replication2.xml", "schema.xml"); leaderJetty.stop(); leaderJetty = createAndStartJetty(leader); leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); follower.setTestPort(leaderJetty.getLocalPort()); follower.copyConfigFile(follower.getSolrConfigFile(), "solrconfig.xml"); @@ -575,12 +593,13 @@ public void doTestIndexAndConfigReplication() throws Exception { followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - //add a doc with new field and commit on leader to trigger index fetch from follower. + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + // add a doc with new field and commit on leader to trigger index fetch from follower. index(leaderClient, "id", "2000", "name", "name = " + 2000, "newname", "newname = " + 2000); leaderClient.commit(); - assertEquals(1, numFound( rQuery(1, "*:*", leaderClient))); + assertEquals(1, numFound(rQuery(1, "*:*", leaderClient))); followerQueryRsp = rQuery(1, "*:*", followerClient); assertVersions(leaderClient, followerClient); @@ -602,8 +621,7 @@ public void doTestStopPoll() throws Exception { // setup leader/follower. // stop polling on follower, add a doc to leader and verify follower hasn't picked it. nDocs--; - for (int i = 0; i < nDocs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < nDocs; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -611,61 +629,62 @@ public void doTestStopPoll() throws Exception { SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); assertEquals(nDocs, numFound(leaderQueryRsp)); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(nDocs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, numFound(followerQueryRsp)); - //compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); + // compare results + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertNull(cmp); // start stop polling test - invokeReplicationCommand(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "disablepoll"); + invokeReplicationCommand( + buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "disablepoll"); index(leaderClient, "id", 501, "name", "name = " + 501); leaderClient.commit(); - //get docs from leader and check if number is equal to leader - assertEquals(nDocs+1, numFound(rQuery(nDocs+1, "*:*", leaderClient))); + // get docs from leader and check if number is equal to leader + assertEquals(nDocs + 1, numFound(rQuery(nDocs + 1, "*:*", leaderClient))); // NOTE: this test is wierd, we want to verify it DOESNT replicate... // for now, add a sleep for this.., but the logic is wierd. Thread.sleep(3000); - //get docs from follower and check if number is not equal to leader; polling is disabled + // get docs from follower and check if number is not equal to leader; polling is disabled assertEquals(nDocs, numFound(rQuery(nDocs, "*:*", followerClient))); // re-enable replication - invokeReplicationCommand(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "enablepoll"); + invokeReplicationCommand( + buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "enablepoll"); - assertEquals(nDocs+1, numFound(rQuery(nDocs+1, "*:*", followerClient))); + assertEquals(nDocs + 1, numFound(rQuery(nDocs + 1, "*:*", followerClient))); } /** - * We assert that if leader is down for more than poll interval, - * the follower doesn't re-fetch the whole index from leader again if - * the index hasn't changed. See SOLR-9036 + * We assert that if leader is down for more than poll interval, the follower doesn't re-fetch the + * whole index from leader again if the index hasn't changed. See SOLR-9036 */ @Test - public void doTestIndexFetchOnLeaderRestart() throws Exception { + public void doTestIndexFetchOnLeaderRestart() throws Exception { useFactory(null); try { clearIndexWithReplication(); // change solrconfig having 'replicateAfter startup' option on leader - leader.copyConfigFile(CONF_DIR + "solrconfig-leader2.xml", - "solrconfig.xml"); + leader.copyConfigFile(CONF_DIR + "solrconfig-leader2.xml", "solrconfig.xml"); leaderJetty.stop(); leaderJetty.start(); // close and re-create leader client because its connection pool has stale connections leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); nDocs--; - for (int i = 0; i < nDocs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < nDocs; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -673,13 +692,14 @@ public void doTestIndexFetchOnLeaderRestart() throws Exception { SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); assertEquals(nDocs, numFound(leaderQueryRsp)); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(nDocs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, numFound(followerQueryRsp)); - //compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); + // compare results + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertNull(cmp); String timesReplicatedString = getFollowerDetails("timesIndexReplicated"); @@ -695,38 +715,45 @@ public void doTestIndexFetchOnLeaderRestart() throws Exception { } previousTimesFailed = Integer.parseInt(timesFailed); - // Sometimes replication will fail because leader's core is still loading; make sure there was one success + // Sometimes replication will fail because leader's core is still loading; make sure there + // was one success assertEquals(1, timesReplicated - previousTimesFailed); - } leaderJetty.stop(); - final TimeOut waitForLeaderToShutdown = new TimeOut(300, TimeUnit.SECONDS, TimeSource.NANO_TIME); - waitForLeaderToShutdown.waitFor - ("Gave up after waiting an obscene amount of time for leader to shut down", - () -> leaderJetty.isStopped() ); + final TimeOut waitForLeaderToShutdown = + new TimeOut(300, TimeUnit.SECONDS, TimeSource.NANO_TIME); + waitForLeaderToShutdown.waitFor( + "Gave up after waiting an obscene amount of time for leader to shut down", + () -> leaderJetty.isStopped()); - for(int retries=0; ;retries++) { + for (int retries = 0; ; retries++) { Thread.yield(); // might not be necessary at all // poll interval on follower is 1 second, so we just sleep for a few seconds Thread.sleep(2000); - NamedList followerDetails=null; + NamedList followerDetails = null; try { followerDetails = getFollowerDetails(); - int failed = Integer.parseInt(getStringOrNull(followerDetails,"timesFailed")); + int failed = Integer.parseInt(getStringOrNull(followerDetails, "timesFailed")); if (previousTimesFailed != null) { assertTrue(failed > previousTimesFailed); } - assertEquals(1, Integer.parseInt(getStringOrNull(followerDetails,"timesIndexReplicated")) - failed); + assertEquals( + 1, + Integer.parseInt(getStringOrNull(followerDetails, "timesIndexReplicated")) - failed); break; } catch (NumberFormatException | AssertionError notYet) { if (log.isInfoEnabled()) { - log.info("{}th attempt failure on {} details are {}", retries + 1, notYet, followerDetails); // nowarn + log.info( + "{}th attempt failure on {} details are {}", + retries + 1, + notYet, + followerDetails); // nowarn } - if (retries>9) { + if (retries > 9) { log.error("giving up: ", notYet); throw notYet; } @@ -737,7 +764,7 @@ public void doTestIndexFetchOnLeaderRestart() throws Exception { // poll interval on follower is 1 second, so we just sleep for a few seconds Thread.sleep(2000); - //get docs from follower and assert that they are still the same as before + // get docs from follower and assert that they are still the same as before followerQueryRsp = rQuery(nDocs, "*:*", followerClient); followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, numFound(followerQueryRsp)); @@ -778,21 +805,22 @@ private NamedList getFollowerDetails() throws SolrServerException, IOExc @Test public void doTestIndexFetchWithLeaderUrl() throws Exception { - //change solrconfig on follower - //this has no entry for pollinginterval + // change solrconfig on follower + // this has no entry for pollinginterval follower.setTestPort(leaderJetty.getLocalPort()); follower.copyConfigFile(CONF_DIR + "solrconfig-follower1.xml", "solrconfig.xml"); followerJetty.stop(); followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = ReplicationTestHelper.createNewSolrClient(buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + ReplicationTestHelper.createNewSolrClient( + buildUrl(followerJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME); leaderClient.deleteByQuery("*:*"); followerClient.deleteByQuery("*:*"); followerClient.commit(); nDocs--; - for (int i = 0; i < nDocs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < nDocs; i++) index(leaderClient, "id", i, "name", "name = " + i); // make sure prepareCommit doesn't mess up commit (SOLR-3938) @@ -811,35 +839,46 @@ public void doTestIndexFetchWithLeaderUrl() throws Exception { } // index fetch - String leaderUrl = buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME + ReplicationHandler.PATH+"?command=fetchindex&" + urlKey + "="; - leaderUrl += buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME + ReplicationHandler.PATH; + String leaderUrl = + buildUrl(followerJetty.getLocalPort()) + + "/" + + DEFAULT_TEST_CORENAME + + ReplicationHandler.PATH + + "?command=fetchindex&" + + urlKey + + "="; + leaderUrl += + buildUrl(leaderJetty.getLocalPort()) + + "/" + + DEFAULT_TEST_CORENAME + + ReplicationHandler.PATH; URL url = new URL(leaderUrl); InputStream stream = url.openStream(); stream.close(); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(nDocs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, followerQueryResult.getNumFound()); - //compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); + // compare results + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertEquals(null, cmp); // index fetch from the follower to the leader - for (int i = nDocs; i < nDocs + 3; i++) - index(followerClient, "id", i, "name", "name = " + i); + for (int i = nDocs; i < nDocs + 3; i++) index(followerClient, "id", i, "name", "name = " + i); followerClient.commit(); pullFromTo(followerJetty, leaderJetty); rQuery(nDocs + 3, "*:*", leaderClient); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader followerQueryRsp = rQuery(nDocs + 3, "*:*", followerClient); followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs + 3, followerQueryResult.getNumFound()); - //compare results + // compare results leaderQueryRsp = rQuery(nDocs + 3, "*:*", leaderClient); leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); @@ -849,11 +888,11 @@ public void doTestIndexFetchWithLeaderUrl() throws Exception { pullFromTo(followerJetty, leaderJetty); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader followerQueryRsp = rQuery(nDocs + 3, "*:*", followerClient); followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs + 3, followerQueryResult.getNumFound()); - //compare results + // compare results leaderQueryRsp = rQuery(nDocs + 3, "*:*", leaderClient); leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); @@ -862,19 +901,20 @@ public void doTestIndexFetchWithLeaderUrl() throws Exception { assertVersions(leaderClient, followerClient); // now force a new index directory - for (int i = nDocs + 3; i < nDocs + 7; i++) + for (int i = nDocs + 3; i < nDocs + 7; i++) { index(leaderClient, "id", i, "name", "name = " + i); + } leaderClient.commit(); pullFromTo(followerJetty, leaderJetty); rQuery((int) followerQueryResult.getNumFound(), "*:*", leaderClient); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader followerQueryRsp = rQuery(nDocs + 3, "*:*", followerClient); followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs + 3, followerQueryResult.getNumFound()); - //compare results + // compare results leaderQueryRsp = rQuery(nDocs + 3, "*:*", leaderClient); leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); @@ -883,11 +923,11 @@ public void doTestIndexFetchWithLeaderUrl() throws Exception { assertVersions(leaderClient, followerClient); pullFromTo(followerJetty, leaderJetty); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader followerQueryRsp = rQuery(nDocs + 3, "*:*", followerClient); followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs + 3, followerQueryResult.getNumFound()); - //compare results + // compare results leaderQueryRsp = rQuery(nDocs + 3, "*:*", leaderClient); leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); @@ -903,9 +943,7 @@ public void doTestIndexFetchWithLeaderUrl() throws Exception { checkForSingleIndex(followerJetty); } - @Test - //commented 20-Sep-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 17-Aug-2018 public void doTestStressReplication() throws Exception { // change solrconfig on follower // this has no entry for pollinginterval @@ -928,14 +966,15 @@ public void doTestStressReplication() throws Exception { followerJetty.stop(); followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - leader.copyConfigFile(CONF_DIR + "solrconfig-leader3.xml", - "solrconfig.xml"); + leader.copyConfigFile(CONF_DIR + "solrconfig-leader3.xml", "solrconfig.xml"); leaderJetty.stop(); leaderJetty = createAndStartJetty(leader); leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); leaderClient.deleteByQuery("*:*"); followerClient.deleteByQuery("*:*"); @@ -951,8 +990,8 @@ public void doTestStressReplication() throws Exception { if (confCoreReload) { // toggle the schema file used - followerSchema = followerSchema.equals(FOLLOWER_SCHEMA_1) ? - FOLLOWER_SCHEMA_2 : FOLLOWER_SCHEMA_1; + followerSchema = + followerSchema.equals(FOLLOWER_SCHEMA_1) ? FOLLOWER_SCHEMA_2 : FOLLOWER_SCHEMA_1; leader.copyConfigFile(CONF_DIR + followerSchema, "schema.xml"); } @@ -965,25 +1004,23 @@ public void doTestStressReplication() throws Exception { leaderClient.commit(); NamedList leaderQueryRsp = rQuery(totalDocs, "*:*", leaderClient); - SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp - .get("response"); + SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); assertEquals(totalDocs, leaderQueryResult.getNumFound()); // index fetch Date followerCoreStart = watchCoreStartAt(followerClient, 30 * 1000, null); - pullFromTo(leaderJetty, followerJetty); + pullFromTo(leaderJetty, followerJetty); if (confCoreReload) { watchCoreStartAt(followerClient, 30 * 1000, followerCoreStart); } // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(totalDocs, "*:*", followerClient); - SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp - .get("response"); + SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(totalDocs, followerQueryResult.getNumFound()); // compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, - followerQueryResult, 0, null); + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertEquals(null, cmp); assertVersions(leaderClient, followerClient); @@ -1001,7 +1038,6 @@ public void doTestStressReplication() throws Exception { } followerClient.commit(); } - } } finally { @@ -1030,7 +1066,9 @@ private void checkForSingleIndex(JettySolrRunner jetty, boolean afterReload) { // one for data, one for the index under data and one for the snapshot metadata. // we also allow one extra index dir - it may not be removed until the core is closed if (afterReload) { - assertTrue(livePaths.toString() + ":" + livePaths.size(), 3 == livePaths.size() || 4 == livePaths.size()); + assertTrue( + livePaths.toString() + ":" + livePaths.size(), + 3 == livePaths.size() || 4 == livePaths.size()); } else { assertTrue(livePaths.toString() + ":" + livePaths.size(), 3 == livePaths.size()); } @@ -1043,27 +1081,33 @@ private void checkForSingleIndex(JettySolrRunner jetty, boolean afterReload) { int cnt = indexDirCount(ddir); // if after reload, there may be 2 index dirs while the reloaded SolrCore closes. if (afterReload) { - assertTrue("found:" + cnt + Arrays.asList(new File(ddir).list()).toString(), 1 == cnt || 2 == cnt); + assertTrue( + "found:" + cnt + Arrays.asList(new File(ddir).list()).toString(), + 1 == cnt || 2 == cnt); } else { assertTrue("found:" + cnt + Arrays.asList(new File(ddir).list()).toString(), 1 == cnt); } - } } } private int indexDirCount(String ddir) { - String[] list = new File(ddir).list(new FilenameFilter() { - @Override - public boolean accept(File dir, String name) { - File f = new File(dir, name); - return f.isDirectory() && !SolrSnapshotMetaDataManager.SNAPSHOT_METADATA_DIR.equals(name); - } - }); + String[] list = + new File(ddir) + .list( + new FilenameFilter() { + @Override + public boolean accept(File dir, String name) { + File f = new File(dir, name); + return f.isDirectory() + && !SolrSnapshotMetaDataManager.SNAPSHOT_METADATA_DIR.equals(name); + } + }); return list.length; } - public static void pullFromTo(JettySolrRunner srcSolr, JettySolrRunner destSolr) throws IOException { + public static void pullFromTo(JettySolrRunner srcSolr, JettySolrRunner destSolr) + throws IOException { String srcUrl = buildUrl(srcSolr.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME; String destUrl = buildUrl(destSolr.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME; ReplicationTestHelper.pullFromTo(srcUrl, destUrl); @@ -1077,21 +1121,23 @@ public void doTestRepeater() throws Exception { followerJetty.stop(); followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); try { - repeater = new SolrInstance(createTempDir("solr-instance").toFile(), "repeater", leaderJetty.getLocalPort()); + repeater = + new SolrInstance( + createTempDir("solr-instance").toFile(), "repeater", leaderJetty.getLocalPort()); repeater.setUp(); - repeater.copyConfigFile(CONF_DIR + "solrconfig-repeater.xml", - "solrconfig.xml"); + repeater.copyConfigFile(CONF_DIR + "solrconfig-repeater.xml", "solrconfig.xml"); repeaterJetty = createAndStartJetty(repeater); if (repeaterClient != null) { repeaterClient.close(); } - repeaterClient = createNewSolrClient(buildUrl(repeaterJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + repeaterClient = + createNewSolrClient(buildUrl(repeaterJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - for (int i = 0; i < 3; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < 3; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -1106,8 +1152,7 @@ public void doTestRepeater() throws Exception { assertVersions(leaderClient, repeaterClient); assertVersions(repeaterClient, followerClient); - for (int i = 0; i < 4; i++) - index(repeaterClient, "id", i, "name", "name = " + i); + for (int i = 0; i < 4; i++) index(repeaterClient, "id", i, "name", "name = " + i); repeaterClient.commit(); pullFromTo(leaderJetty, repeaterJetty); @@ -1118,8 +1163,7 @@ public void doTestRepeater() throws Exception { rQuery(3, "*:*", followerClient); - for (int i = 3; i < 6; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 3; i < 6; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -1140,12 +1184,11 @@ public void doTestRepeater() throws Exception { repeaterClient.close(); } } - } @Test public void doTestReplicateAfterStartup() throws Exception { - //stop follower + // stop follower followerJetty.stop(); nDocs--; @@ -1153,19 +1196,17 @@ public void doTestReplicateAfterStartup() throws Exception { leaderClient.commit(); - - //change solrconfig having 'replicateAfter startup' option on leader - leader.copyConfigFile(CONF_DIR + "solrconfig-leader2.xml", - "solrconfig.xml"); + // change solrconfig having 'replicateAfter startup' option on leader + leader.copyConfigFile(CONF_DIR + "solrconfig-leader2.xml", "solrconfig.xml"); leaderJetty.stop(); leaderJetty = createAndStartJetty(leader); leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - for (int i = 0; i < nDocs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < nDocs; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -1173,24 +1214,24 @@ public void doTestReplicateAfterStartup() throws Exception { SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); assertEquals(nDocs, leaderQueryResult.getNumFound()); - follower.setTestPort(leaderJetty.getLocalPort()); follower.copyConfigFile(follower.getSolrConfigFile(), "solrconfig.xml"); - //start follower + // start follower followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(nDocs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, followerQueryResult.getNumFound()); - //compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); + // compare results + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertEquals(null, cmp); - } @Test @@ -1207,17 +1248,16 @@ public void doTestReplicateAfterStartupWithNoActivity() throws Exception { leaderClient.commit(); // change solrconfig having 'replicateAfter startup' option on leader - leader.copyConfigFile(CONF_DIR + "solrconfig-leader2.xml", - "solrconfig.xml"); + leader.copyConfigFile(CONF_DIR + "solrconfig-leader2.xml", "solrconfig.xml"); leaderJetty.stop(); leaderJetty = createAndStartJetty(leader); leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - for (int i = 0; i < nDocs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < nDocs; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -1230,8 +1270,7 @@ public void doTestReplicateAfterStartupWithNoActivity() throws Exception { // leaderClient = createNewSolrClient(leaderJetty.getLocalPort()); NamedList leaderQueryRsp = rQuery(nDocs, "*:*", leaderClient); - SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp - .get("response"); + SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); assertEquals(nDocs, leaderQueryResult.getNumFound()); follower.setTestPort(leaderJetty.getLocalPort()); @@ -1240,17 +1279,17 @@ public void doTestReplicateAfterStartupWithNoActivity() throws Exception { // start follower followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(nDocs, "*:*", followerClient); - SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp - .get("response"); + SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, followerQueryResult.getNumFound()); // compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, - followerQueryResult, 0, null); + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertEquals(null, cmp); } finally { @@ -1262,23 +1301,21 @@ public void doTestReplicateAfterStartupWithNoActivity() throws Exception { public void doTestReplicateAfterCoreReload() throws Exception { int docs = TEST_NIGHTLY ? 200000 : 10; - //stop follower + // stop follower followerJetty.stop(); - - //change solrconfig having 'replicateAfter startup' option on leader - leader.copyConfigFile(CONF_DIR + "solrconfig-leader3.xml", - "solrconfig.xml"); + // change solrconfig having 'replicateAfter startup' option on leader + leader.copyConfigFile(CONF_DIR + "solrconfig-leader3.xml", "solrconfig.xml"); leaderJetty.stop(); leaderJetty = createAndStartJetty(leader); leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); leaderClient.deleteByQuery("*:*"); - for (int i = 0; i < docs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < docs; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -1289,18 +1326,20 @@ public void doTestReplicateAfterCoreReload() throws Exception { follower.setTestPort(leaderJetty.getLocalPort()); follower.copyConfigFile(follower.getSolrConfigFile(), "solrconfig.xml"); - //start follower + // start follower followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(docs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(docs, followerQueryResult.getNumFound()); - //compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); + // compare results + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertEquals(null, cmp); Object version = getIndexVersion(leaderClient).get("indexversion"); @@ -1318,11 +1357,10 @@ public void doTestReplicateAfterCoreReload() throws Exception { leaderQueryResult = (SolrDocumentList) resp.get("response"); assertEquals(docs + 2, leaderQueryResult.getNumFound()); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader followerQueryRsp = rQuery(docs + 2, "*:*", followerClient); followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(docs + 2, followerQueryResult.getNumFound()); - } @Test @@ -1331,8 +1369,7 @@ public void doTestIndexAndConfigAliasReplication() throws Exception { clearIndexWithReplication(); nDocs--; - for (int i = 0; i < nDocs; i++) - index(leaderClient, "id", i, "name", "name = " + i); + for (int i = 0; i < nDocs; i++) index(leaderClient, "id", i, "name", "name = " + i); leaderClient.commit(); @@ -1340,39 +1377,38 @@ public void doTestIndexAndConfigAliasReplication() throws Exception { SolrDocumentList leaderQueryResult = (SolrDocumentList) leaderQueryRsp.get("response"); assertEquals(nDocs, leaderQueryResult.getNumFound()); - //get docs from follower and check if number is equal to leader + // get docs from follower and check if number is equal to leader NamedList followerQueryRsp = rQuery(nDocs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(nDocs, followerQueryResult.getNumFound()); - //compare results - String cmp = BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); + // compare results + String cmp = + BaseDistributedSearchTestCase.compare(leaderQueryResult, followerQueryResult, 0, null); assertEquals(null, cmp); - //start config files replication test - //clear leader index + // start config files replication test + // clear leader index leaderClient.deleteByQuery("*:*"); leaderClient.commit(); rQuery(0, "*:*", leaderClient); // sanity check w/retry - //change solrconfig on leader - leader.copyConfigFile(CONF_DIR + "solrconfig-leader1.xml", - "solrconfig.xml"); + // change solrconfig on leader + leader.copyConfigFile(CONF_DIR + "solrconfig-leader1.xml", "solrconfig.xml"); - //change schema on leader - leader.copyConfigFile(CONF_DIR + "schema-replication2.xml", - "schema.xml"); + // change schema on leader + leader.copyConfigFile(CONF_DIR + "schema-replication2.xml", "schema.xml"); - //keep a copy of the new schema - leader.copyConfigFile(CONF_DIR + "schema-replication2.xml", - "schema-replication2.xml"); + // keep a copy of the new schema + leader.copyConfigFile(CONF_DIR + "schema-replication2.xml", "schema-replication2.xml"); leaderJetty.stop(); leaderJetty = createAndStartJetty(leader); leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); follower.setTestPort(leaderJetty.getLocalPort()); follower.copyConfigFile(follower.getSolrConfigFile(), "solrconfig.xml"); @@ -1380,22 +1416,23 @@ public void doTestIndexAndConfigAliasReplication() throws Exception { followerJetty.stop(); followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); followerClient.deleteByQuery("*:*"); followerClient.commit(); rQuery(0, "*:*", followerClient); // sanity check w/retry // record collection1's start time on follower - final Date followerStartTime = watchCoreStartAt(followerClient, 30*1000, null); + final Date followerStartTime = watchCoreStartAt(followerClient, 30 * 1000, null); - //add a doc with new field and commit on leader to trigger index fetch from follower. + // add a doc with new field and commit on leader to trigger index fetch from follower. index(leaderClient, "id", "2000", "name", "name = " + 2000, "newname", "n2000"); leaderClient.commit(); - rQuery(1, "newname:n2000", leaderClient); // sanity check + rQuery(1, "newname:n2000", leaderClient); // sanity check // wait for follower to reload core by watching updated startTime - watchCoreStartAt(followerClient, 30*1000, followerStartTime); + watchCoreStartAt(followerClient, 30 * 1000, followerStartTime); NamedList leaderQueryRsp2 = rQuery(1, "id:2000", leaderClient); SolrDocumentList leaderQueryResult2 = (SolrDocumentList) leaderQueryRsp2.get("response"); @@ -1412,7 +1449,7 @@ public void doTestIndexAndConfigAliasReplication() throws Exception { @Test public void testRateLimitedReplication() throws Exception { - //clean index + // clean index leaderClient.deleteByQuery("*:*"); followerClient.deleteByQuery("*:*"); leaderClient.commit(); @@ -1421,21 +1458,22 @@ public void testRateLimitedReplication() throws Exception { leaderJetty.stop(); followerJetty.stop(); - //Start leader with the new solrconfig + // Start leader with the new solrconfig leader.copyConfigFile(CONF_DIR + "solrconfig-leader-throttled.xml", "solrconfig.xml"); useFactory(null); leaderJetty = createAndStartJetty(leader); leaderClient.close(); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - //index docs + // index docs final int totalDocs = TestUtil.nextInt(random(), 17, 53); for (int i = 0; i < totalDocs; i++) - index(leaderClient, "id", i, "name", TestUtil.randomSimpleString(random(), 1000 , 5000)); + index(leaderClient, "id", i, "name", TestUtil.randomSimpleString(random(), 1000, 5000)); leaderClient.commit(); - //Check Index Size + // Check Index Size String dataDir = leader.getDataDir(); leaderClient.close(); leaderJetty.stop(); @@ -1443,33 +1481,36 @@ public void testRateLimitedReplication() throws Exception { Directory dir = FSDirectory.open(Paths.get(dataDir).resolve("index")); String[] files = dir.listAll(); long totalBytes = 0; - for(String file : files) { + for (String file : files) { totalBytes += dir.fileLength(file); } - float approximateTimeInSeconds = Math.round( totalBytes/1024/1024/0.1 ); // maxWriteMBPerSec=0.1 in solrconfig + float approximateTimeInSeconds = + Math.round(totalBytes / 1024 / 1024 / 0.1); // maxWriteMBPerSec=0.1 in solrconfig - //Start again and replicate the data + // Start again and replicate the data useFactory(null); leaderJetty = createAndStartJetty(leader); - leaderClient = createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + createNewSolrClient(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); - //start follower + // start follower follower.setTestPort(leaderJetty.getLocalPort()); follower.copyConfigFile(CONF_DIR + "solrconfig-follower1.xml", "solrconfig.xml"); followerJetty = createAndStartJetty(follower); followerClient.close(); - followerClient = createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + createNewSolrClient(buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); long startTime = System.nanoTime(); - pullFromTo(leaderJetty, followerJetty); + pullFromTo(leaderJetty, followerJetty); - //Add a few more docs in the leader. Just to make sure that we are replicating the correct index point - //These extra docs should not get replicated + // Add a few more docs in the leader. Just to make sure that we are replicating the correct + // index point. These extra docs should not get replicated new Thread(new AddExtraDocs(leaderClient, totalDocs)).start(); - //Wait and make sure that it actually replicated correctly. + // Wait and make sure that it actually replicated correctly. NamedList followerQueryRsp = rQuery(totalDocs, "*:*", followerClient); SolrDocumentList followerQueryResult = (SolrDocumentList) followerQueryRsp.get("response"); assertEquals(totalDocs, followerQueryResult.getNumFound()); @@ -1478,22 +1519,34 @@ public void testRateLimitedReplication() throws Exception { long timeTakenInSeconds = TimeUnit.SECONDS.convert(timeTaken, TimeUnit.NANOSECONDS); - //Let's make sure it took more than approximateTimeInSeconds to make sure that it was throttled - log.info("approximateTimeInSeconds = {} timeTakenInSeconds = {}" - , approximateTimeInSeconds, timeTakenInSeconds); + // Let's make sure it took more than approximateTimeInSeconds to make sure that it was throttled + log.info( + "approximateTimeInSeconds = {} timeTakenInSeconds = {}", + approximateTimeInSeconds, + timeTakenInSeconds); assertTrue(timeTakenInSeconds - approximateTimeInSeconds > 0); } @Test public void doTestIllegalFilePaths() throws Exception { - // Loop through the file=, cf=, tlogFile= params and prove that it throws exception for path traversal attempts + // Loop through the file=, cf=, tlogFile= params and prove that it throws exception for path + // traversal attempts String absFile = Paths.get("foo").toAbsolutePath().toString(); - List illegalFilenames = Arrays.asList(absFile, "../dir/traversal", "illegal\rfile\nname\t"); - List params = Arrays.asList(ReplicationHandler.FILE, ReplicationHandler.CONF_FILE_SHORT, ReplicationHandler.TLOG_FILE); + List illegalFilenames = + Arrays.asList(absFile, "../dir/traversal", "illegal\rfile\nname\t"); + List params = + Arrays.asList( + ReplicationHandler.FILE, + ReplicationHandler.CONF_FILE_SHORT, + ReplicationHandler.TLOG_FILE); for (String param : params) { for (String filename : illegalFilenames) { - expectThrows(Exception.class, () -> - invokeReplicationCommand(buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "filecontent&" + param + "=" + filename)); + expectThrows( + Exception.class, + () -> + invokeReplicationCommand( + buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, + "filecontent&" + param + "=" + filename)); } } } @@ -1504,7 +1557,9 @@ public void testFileListShouldReportErrorsWhenTheyOccur() throws Exception { q.add("qt", "/replication") .add("wt", "json") .add("command", "filelist") - .add("generation", "-2"); // A 'generation' value not matching any commit point should cause error. + .add( + "generation", + "-2"); // A 'generation' value not matching any commit point should cause error. QueryResponse response = followerClient.query(q); NamedList resp = response.getResponse(); assertNotNull(resp); @@ -1513,7 +1568,7 @@ public void testFileListShouldReportErrorsWhenTheyOccur() throws Exception { } @Test - public void testFetchIndexShouldReportErrorsWhenTheyOccur() throws Exception { + public void testFetchIndexShouldReportErrorsWhenTheyOccur() throws Exception { int leaderPort = leaderJetty.getLocalPort(); leaderJetty.stop(); SolrQuery q = new SolrQuery(); @@ -1525,17 +1580,22 @@ public void testFetchIndexShouldReportErrorsWhenTheyOccur() throws Exception { QueryResponse response = followerClient.query(q); NamedList resp = response.getResponse(); assertNotNull(resp); - assertEquals("Fetch index with wait=true should have returned an error response", "ERROR", resp.get("status")); + assertEquals( + "Fetch index with wait=true should have returned an error response", + "ERROR", + resp.get("status")); } @Test public void testShouldReportErrorWhenRequiredCommandArgMissing() throws Exception { SolrQuery q = new SolrQuery(); - q.add("qt", "/replication") - .add("wt", "json"); - SolrException thrown = expectThrows(SolrException.class, () -> { - followerClient.query(q); - }); + q.add("qt", "/replication").add("wt", "json"); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + followerClient.query(q); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, thrown.code()); assertThat(thrown.getMessage(), containsString("Missing required parameter: command")); } @@ -1543,12 +1603,13 @@ public void testShouldReportErrorWhenRequiredCommandArgMissing() throws Exceptio @Test public void testShouldReportErrorWhenDeletingBackupButNameMissing() { SolrQuery q = new SolrQuery(); - q.add("qt", "/replication") - .add("wt", "json") - .add("command", "deletebackup"); - SolrException thrown = expectThrows(SolrException.class, () -> { - followerClient.query(q); - }); + q.add("qt", "/replication").add("wt", "json").add("command", "deletebackup"); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + followerClient.query(q); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, thrown.code()); assertThat(thrown.getMessage(), containsString("Missing required parameter: name")); } @@ -1562,45 +1623,63 @@ public void testEmptyBackups() throws Exception { { // initial request w/o any committed docs final String backupName = "empty_backup1"; - final GenericSolrRequest req = new GenericSolrRequest - (SolrRequest.METHOD.GET, "/replication", - params("command", "backup", - "location", backupDir.getAbsolutePath(), - "name", backupName)); + final GenericSolrRequest req = + new GenericSolrRequest( + SolrRequest.METHOD.GET, + "/replication", + params( + "command", + "backup", + "location", + backupDir.getAbsolutePath(), + "name", + backupName)); final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); final SimpleSolrResponse rsp = req.process(leaderClient); final String dirName = backupStatus.waitForBackupSuccess(backupName, timeout); - assertEquals("Did not get expected dir name for backup, did API change?", - "snapshot.empty_backup1", dirName); - assertTrue(dirName + " doesn't exist in expected location for backup " + backupName, - new File(backupDir, dirName).exists()); + assertEquals( + "Did not get expected dir name for backup, did API change?", + "snapshot.empty_backup1", + dirName); + assertTrue( + dirName + " doesn't exist in expected location for backup " + backupName, + new File(backupDir, dirName).exists()); } index(leaderClient, "id", "1", "name", "foo"); { // second backup w/uncommited doc final String backupName = "empty_backup2"; - final GenericSolrRequest req = new GenericSolrRequest - (SolrRequest.METHOD.GET, "/replication", - params("command", "backup", - "location", backupDir.getAbsolutePath(), - "name", backupName)); + final GenericSolrRequest req = + new GenericSolrRequest( + SolrRequest.METHOD.GET, + "/replication", + params( + "command", + "backup", + "location", + backupDir.getAbsolutePath(), + "name", + backupName)); final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); final SimpleSolrResponse rsp = req.process(leaderClient); final String dirName = backupStatus.waitForBackupSuccess(backupName, timeout); - assertEquals("Did not get expected dir name for backup, did API change?", - "snapshot.empty_backup2", dirName); - assertTrue(dirName + " doesn't exist in expected location for backup " + backupName, - new File(backupDir, dirName).exists()); + assertEquals( + "Did not get expected dir name for backup, did API change?", + "snapshot.empty_backup2", + dirName); + assertTrue( + dirName + " doesn't exist in expected location for backup " + backupName, + new File(backupDir, dirName).exists()); } // confirm backups really are empty - for (int i = 1; i <=2; i++) { - final String name = "snapshot.empty_backup"+i; + for (int i = 1; i <= 2; i++) { + final String name = "snapshot.empty_backup" + i; try (Directory dir = new NIOFSDirectory(new File(backupDir, name).toPath()); - IndexReader reader = DirectoryReader.open(dir)) { + IndexReader reader = DirectoryReader.open(dir)) { assertEquals(name + " is not empty", 0, reader.numDocs()); } } @@ -1609,16 +1688,33 @@ public void testEmptyBackups() throws Exception { public void testGetBoolWithBackwardCompatibility() { assertTrue(ReplicationHandler.getBoolWithBackwardCompatibility(params(), "foo", "bar", true)); assertFalse(ReplicationHandler.getBoolWithBackwardCompatibility(params(), "foo", "bar", false)); - assertTrue(ReplicationHandler.getBoolWithBackwardCompatibility(params("foo", "true"), "foo", "bar", false)); - assertTrue(ReplicationHandler.getBoolWithBackwardCompatibility(params("bar", "true"), "foo", "bar", false)); - assertTrue(ReplicationHandler.getBoolWithBackwardCompatibility(params("foo", "true", "bar", "false"), "foo", "bar", false)); + assertTrue( + ReplicationHandler.getBoolWithBackwardCompatibility( + params("foo", "true"), "foo", "bar", false)); + assertTrue( + ReplicationHandler.getBoolWithBackwardCompatibility( + params("bar", "true"), "foo", "bar", false)); + assertTrue( + ReplicationHandler.getBoolWithBackwardCompatibility( + params("foo", "true", "bar", "false"), "foo", "bar", false)); } public void testGetObjectWithBackwardCompatibility() { - assertEquals("aaa", ReplicationHandler.getObjectWithBackwardCompatibility(params(), "foo", "bar", "aaa")); - assertEquals("bbb", ReplicationHandler.getObjectWithBackwardCompatibility(params("foo", "bbb"), "foo", "bar", "aaa")); - assertEquals("bbb", ReplicationHandler.getObjectWithBackwardCompatibility(params("bar", "bbb"), "foo", "bar", "aaa")); - assertEquals("bbb", ReplicationHandler.getObjectWithBackwardCompatibility(params("foo", "bbb", "bar", "aaa"), "foo", "bar", "aaa")); + assertEquals( + "aaa", + ReplicationHandler.getObjectWithBackwardCompatibility(params(), "foo", "bar", "aaa")); + assertEquals( + "bbb", + ReplicationHandler.getObjectWithBackwardCompatibility( + params("foo", "bbb"), "foo", "bar", "aaa")); + assertEquals( + "bbb", + ReplicationHandler.getObjectWithBackwardCompatibility( + params("bar", "bbb"), "foo", "bar", "aaa")); + assertEquals( + "bbb", + ReplicationHandler.getObjectWithBackwardCompatibility( + params("foo", "bbb", "bar", "aaa"), "foo", "bar", "aaa")); assertNull(ReplicationHandler.getObjectWithBackwardCompatibility(params(), "foo", "bar", null)); } @@ -1631,11 +1727,11 @@ public void testGetObjectWithBackwardCompatibilityFromNL() { assertEquals("aaa", ReplicationHandler.getObjectWithBackwardCompatibility(nl, "foo", "bar")); } - private class AddExtraDocs implements Runnable { SolrClient leaderClient; int startId; + public AddExtraDocs(SolrClient leaderClient, int startId) { this.leaderClient = leaderClient; this.startId = startId; @@ -1646,21 +1742,26 @@ public void run() { final int totalDocs = TestUtil.nextInt(random(), 1, 10); for (int i = 0; i < totalDocs; i++) { try { - index(leaderClient, "id", i + startId, "name", TestUtil.randomSimpleString(random(), 1000, 5000)); + index( + leaderClient, + "id", + i + startId, + "name", + TestUtil.randomSimpleString(random(), 1000, 5000)); } catch (Exception e) { - //Do nothing. Wasn't able to add doc. + // Do nothing. Wasn't able to add doc. } } try { leaderClient.commit(); } catch (Exception e) { - //Do nothing. No extra doc got committed. + // Do nothing. No extra doc got committed. } } } private UpdateResponse emptyUpdate(SolrClient client, String... params) - throws SolrServerException, IOException { + throws SolrServerException, IOException { UpdateRequest req = new UpdateRequest(); req.setParams(params(params)); @@ -1668,18 +1769,19 @@ private UpdateResponse emptyUpdate(SolrClient client, String... params) } /** - * Polls the SolrCore stats using the specified client until the "startTime" - * time for collection is after the specified "min". Will loop for - * at most "timeout" milliseconds before throwing an assertion failure. + * Polls the SolrCore stats using the specified client until the "startTime" time for collection + * is after the specified "min". Will loop for at most "timeout" milliseconds before throwing an + * assertion failure. * * @param client The SolrClient to poll * @param timeout the max milliseconds to continue polling for - * @param min the startTime value must exceed this value before the method will return, if null this method will return the first startTime value encountered. + * @param min the startTime value must exceed this value before the method will return, if null + * this method will return the first startTime value encountered. * @return the startTime value of collection */ @SuppressWarnings("unchecked") - private Date watchCoreStartAt(SolrClient client, final long timeout, - final Date min) throws InterruptedException, IOException, SolrServerException { + private Date watchCoreStartAt(SolrClient client, final long timeout, final Date min) + throws InterruptedException, IOException, SolrServerException { final long sleepInterval = 200; long timeSlept = 0; @@ -1690,7 +1792,7 @@ private Date watchCoreStartAt(SolrClient client, final long timeout, req.setPath("/admin/cores"); try { NamedList data = adminClient.request(req); - for (String k : new String[]{"status", "collection1"}) { + for (String k : new String[] {"status", "collection1"}) { Object o = data.get(k); assertNotNull("core status rsp missing key: " + k, o); data = (NamedList) o; @@ -1721,7 +1823,7 @@ private void assertReplicationResponseSucceeded(NamedList response) { assertEquals("OK", response.get("status")); } - public static String buildUrl(int port) { + public static String buildUrl(int port) { return buildUrl(port, context); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java index fe641b3e1ad..5099adbb37c 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java @@ -29,7 +29,6 @@ import java.util.Iterator; import java.util.List; import java.util.Properties; - import org.apache.commons.io.IOUtils; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; @@ -53,16 +52,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' -@SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs({"SimpleText"}) +@SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test public class TestReplicationHandlerBackup extends SolrJettyTestBase { JettySolrRunner leaderJetty; ReplicationTestHelper.SolrInstance leader = null; SolrClient leaderClient; - - private static final String CONF_DIR = "solr" + File.separator + "collection1" + File.separator + "conf" - + File.separator; + + private static final String CONF_DIR = + "solr" + File.separator + "collection1" + File.separator + "conf" + File.separator; private static String context = "/solr"; @@ -71,8 +71,11 @@ public class TestReplicationHandlerBackup extends SolrJettyTestBase { private static long docsSeed; // see indexDocs() private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static JettySolrRunner createAndStartJetty(ReplicationTestHelper.SolrInstance instance) throws Exception { - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(instance.getHomeDir(), "solr.xml")); + private static JettySolrRunner createAndStartJetty(ReplicationTestHelper.SolrInstance instance) + throws Exception { + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), + new File(instance.getHomeDir(), "solr.xml")); Properties nodeProperties = new Properties(); nodeProperties.setProperty("solr.data.dir", instance.getDataDir()); JettyConfig jettyConfig = JettyConfig.builder().setContext("/solr").setPort(0).build(); @@ -87,24 +90,24 @@ private static SolrClient createNewSolrClient(int port) { final String baseUrl = buildUrl(port, context); HttpSolrClient client = getHttpSolrClient(baseUrl, 15000, 60000); return client; - } - catch (Exception ex) { + } catch (Exception ex) { throw new RuntimeException(ex); } } - @Before public void setUp() throws Exception { super.setUp(); String configFile = "solrconfig-leader1.xml"; - if(random().nextBoolean()) { + if (random().nextBoolean()) { configFile = "solrconfig-leader1-keepOneBackup.xml"; addNumberToKeepInRequest = false; backupKeepParamName = ReplicationHandler.NUMBER_BACKUPS_TO_KEEP_INIT_PARAM; } - leader = new ReplicationTestHelper.SolrInstance(createTempDir("solr-instance").toFile(), "leader", null); + leader = + new ReplicationTestHelper.SolrInstance( + createTempDir("solr-instance").toFile(), "leader", null); leader.setUp(); leader.copyConfigFile(CONF_DIR + configFile, "solrconfig.xml"); @@ -119,7 +122,7 @@ public void tearDown() throws Exception { super.tearDown(); if (null != leaderClient) { leaderClient.close(); - leaderClient = null; + leaderClient = null; } if (null != leaderJetty) { leaderJetty.stop(); @@ -130,45 +133,42 @@ public void tearDown() throws Exception { @Test public void testBackupOnCommit() throws Exception { - final BackupStatusChecker backupStatus - = new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); + final BackupStatusChecker backupStatus = + new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); final String lastBackupDir = backupStatus.checkBackupSuccess(); // sanity check no backups yet - assertNull("Already have a successful backup", - lastBackupDir); - - //Index + assertNull("Already have a successful backup", lastBackupDir); + + // Index int nDocs = BackupRestoreUtils.indexDocs(leaderClient, DEFAULT_TEST_COLLECTION_NAME, docsSeed); - + final String newBackupDir = backupStatus.waitForDifferentBackupDir(lastBackupDir, 30); - //Validate + // Validate verify(Paths.get(leader.getDataDir(), newBackupDir), nDocs); } private void verify(Path backup, int nDocs) throws IOException { log.info("Verifying ndocs={} in {}", nDocs, backup); try (Directory dir = new NIOFSDirectory(backup); - IndexReader reader = DirectoryReader.open(dir)) { + IndexReader reader = DirectoryReader.open(dir)) { IndexSearcher searcher = new IndexSearcher(reader); TopDocs hits = searcher.search(new MatchAllDocsQuery(), 1); assertEquals(nDocs, hits.totalHits.value); } } - @Test public void doTestBackup() throws Exception { - final BackupStatusChecker backupStatus - = new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); + final BackupStatusChecker backupStatus = + new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); String lastBackupDir = backupStatus.checkBackupSuccess(); - assertNull("Already have a successful backup", - lastBackupDir); + assertNull("Already have a successful backup", lastBackupDir); + + final Path[] snapDir = new Path[5]; // One extra for the backup on commit + // First snapshot location - final Path[] snapDir = new Path[5]; //One extra for the backup on commit - //First snapshot location - int nDocs = BackupRestoreUtils.indexDocs(leaderClient, DEFAULT_TEST_COLLECTION_NAME, docsSeed); lastBackupDir = backupStatus.waitForDifferentBackupDir(lastBackupDir, 30); @@ -184,30 +184,31 @@ public void doTestBackup() throws Exception { final String backupName = TestUtil.randomSimpleString(random(), 1, 20) + "_" + i; if (!namedBackup) { if (addNumberToKeepInRequest) { - runBackupCommand(leaderJetty, ReplicationHandler.CMD_BACKUP, "&" + backupKeepParamName + "=2"); + runBackupCommand( + leaderJetty, ReplicationHandler.CMD_BACKUP, "&" + backupKeepParamName + "=2"); } else { runBackupCommand(leaderJetty, ReplicationHandler.CMD_BACKUP, ""); } lastBackupDir = backupStatus.waitForDifferentBackupDir(lastBackupDir, 30); } else { - runBackupCommand(leaderJetty, ReplicationHandler.CMD_BACKUP, "&name=" + backupName); + runBackupCommand(leaderJetty, ReplicationHandler.CMD_BACKUP, "&name=" + backupName); lastBackupDir = backupStatus.waitForBackupSuccess(backupName, 30); backupNames[i] = backupName; } - snapDir[i+1] = Paths.get(leader.getDataDir(), lastBackupDir); - verify(snapDir[i+1], nDocs); + snapDir[i + 1] = Paths.get(leader.getDataDir(), lastBackupDir); + verify(snapDir[i + 1], nDocs); } - - //Test Deletion of named backup + // Test Deletion of named backup if (namedBackup) { testDeleteNamedBackup(backupNames); } else { - //5 backups got created. 4 explicitly and one because a commit was called. + // 5 backups got created. 4 explicitly and one because a commit was called. // Only the last two should still exist. final List remainingBackups = new ArrayList<>(); - - try (DirectoryStream stream = Files.newDirectoryStream(Paths.get(leader.getDataDir()), "snapshot*")) { + + try (DirectoryStream stream = + Files.newDirectoryStream(Paths.get(leader.getDataDir()), "snapshot*")) { Iterator iter = stream.iterator(); while (iter.hasNext()) { remainingBackups.add(iter.next().getFileName().toString()); @@ -216,43 +217,53 @@ public void doTestBackup() throws Exception { // Depending on the use of backupKeepParamName there should either be 2 or 1 backups remaining if (backupKeepParamName.equals(ReplicationHandler.NUMBER_BACKUPS_TO_KEEP_REQUEST_PARAM)) { - assertEquals(remainingBackups.toString(), - 2, remainingBackups.size()); + assertEquals(remainingBackups.toString(), 2, remainingBackups.size()); if (Files.exists(snapDir[0]) || Files.exists(snapDir[1]) || Files.exists(snapDir[2])) { - fail("Backup should have been cleaned up because " + backupKeepParamName + " was set to 2."); + fail( + "Backup should have been cleaned up because " + + backupKeepParamName + + " was set to 2."); } } else { - assertEquals(remainingBackups.toString(), - 1, remainingBackups.size()); + assertEquals(remainingBackups.toString(), 1, remainingBackups.size()); - if (Files.exists(snapDir[0]) || Files.exists(snapDir[1]) || Files.exists(snapDir[2]) + if (Files.exists(snapDir[0]) + || Files.exists(snapDir[1]) + || Files.exists(snapDir[2]) || Files.exists(snapDir[3])) { - fail("Backup should have been cleaned up because " + backupKeepParamName + " was set to 1."); + fail( + "Backup should have been cleaned up because " + + backupKeepParamName + + " was set to 1."); } } - } } private void testDeleteNamedBackup(String backupNames[]) throws Exception { - final BackupStatusChecker backupStatus - = new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); + final BackupStatusChecker backupStatus = + new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); for (int i = 0; i < 2; i++) { final Path p = Paths.get(leader.getDataDir(), "snapshot." + backupNames[i]); - assertTrue("WTF: Backup doesn't exist: " + p.toString(), - Files.exists(p)); - runBackupCommand(leaderJetty, ReplicationHandler.CMD_DELETE_BACKUP, "&name=" +backupNames[i]); + assertTrue("WTF: Backup doesn't exist: " + p.toString(), Files.exists(p)); + runBackupCommand( + leaderJetty, ReplicationHandler.CMD_DELETE_BACKUP, "&name=" + backupNames[i]); backupStatus.waitForBackupDeletionSuccess(backupNames[i], 30); - assertFalse("backup still exists after deletion: " + p.toString(), - Files.exists(p)); + assertFalse("backup still exists after deletion: " + p.toString(), Files.exists(p)); } - } - public static void runBackupCommand(JettySolrRunner leaderJetty, String cmd, String params) throws IOException { - String leaderUrl = buildUrl(leaderJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME - + ReplicationHandler.PATH+"?wt=xml&command=" + cmd + params; + public static void runBackupCommand(JettySolrRunner leaderJetty, String cmd, String params) + throws IOException { + String leaderUrl = + buildUrl(leaderJetty.getLocalPort(), context) + + "/" + + DEFAULT_TEST_CORENAME + + ReplicationHandler.PATH + + "?wt=xml&command=" + + cmd + + params; InputStream stream = null; try { URL url = new URL(leaderUrl); @@ -262,5 +273,4 @@ public static void runBackupCommand(JettySolrRunner leaderJetty, String cmd, Str IOUtils.closeQuietly(stream); } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java index 7ae3c991926..82a562a9f87 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerDiskOverFlow.java @@ -17,6 +17,11 @@ package org.apache.solr.handler; +import static org.apache.solr.handler.ReplicationHandler.CMD_FETCH_INDEX; +import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE_LIST; +import static org.apache.solr.handler.ReplicationTestHelper.invokeReplicationCommand; +import static org.apache.solr.handler.TestReplicationHandler.createAndStartJetty; + import java.io.StringWriter; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -28,15 +33,14 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BooleanSupplier; import java.util.function.Function; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.util.Utils; import org.apache.solr.common.SolrException; +import org.apache.solr.common.util.Utils; import org.apache.solr.util.LogLevel; import org.junit.After; import org.junit.Before; @@ -44,12 +48,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.handler.ReplicationHandler.CMD_FETCH_INDEX; -import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE_LIST; -import static org.apache.solr.handler.TestReplicationHandler.createAndStartJetty; -import static org.apache.solr.handler.ReplicationTestHelper.invokeReplicationCommand; - - @LogLevel("org.apache.solr.handler.IndexFetcher=DEBUG") @SolrTestCaseJ4.SuppressSSL public class TestReplicationHandlerDiskOverFlow extends SolrTestCaseJ4 { @@ -58,7 +56,7 @@ public class TestReplicationHandlerDiskOverFlow extends SolrTestCaseJ4 { private static final String expectedErr = "Search is temporarily disabled"; Function originalDiskSpaceprovider = null; BooleanSupplier originalTestWait = null; - + JettySolrRunner leaderJetty, followerJetty; SolrClient leaderClient, followerClient; ReplicationTestHelper.SolrInstance leader = null, follower = null; @@ -69,21 +67,36 @@ public class TestReplicationHandlerDiskOverFlow extends SolrTestCaseJ4 { public void setUp() throws Exception { originalDiskSpaceprovider = IndexFetcher.usableDiskSpaceProvider; originalTestWait = IndexFetcher.testWait; - + super.setUp(); System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory"); - String factory = random().nextInt(100) < 75 ? "solr.NRTCachingDirectoryFactory" : "solr.StandardDirectoryFactory"; // test the default most of the time + String factory = + random().nextInt(100) < 75 + ? "solr.NRTCachingDirectoryFactory" + : "solr.StandardDirectoryFactory"; // test the default most of the time System.setProperty("solr.directoryFactory", factory); - leader = new ReplicationTestHelper.SolrInstance(createTempDir("solr-instance").toFile(), "leader", null); + leader = + new ReplicationTestHelper.SolrInstance( + createTempDir("solr-instance").toFile(), "leader", null); leader.setUp(); leaderJetty = createAndStartJetty(leader); - leaderClient = ReplicationTestHelper.createNewSolrClient( TestReplicationHandler.buildUrl(leaderJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + leaderClient = + ReplicationTestHelper.createNewSolrClient( + TestReplicationHandler.buildUrl(leaderJetty.getLocalPort()) + + "/" + + DEFAULT_TEST_CORENAME); System.setProperty(TEST_URL_ALLOW_LIST, leaderJetty.getBaseUrl().toString()); - follower = new ReplicationTestHelper.SolrInstance(createTempDir("solr-instance").toFile(), "follower", leaderJetty.getLocalPort()); + follower = + new ReplicationTestHelper.SolrInstance( + createTempDir("solr-instance").toFile(), "follower", leaderJetty.getLocalPort()); follower.setUp(); followerJetty = createAndStartJetty(follower); - followerClient = ReplicationTestHelper.createNewSolrClient( TestReplicationHandler.buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME); + followerClient = + ReplicationTestHelper.createNewSolrClient( + TestReplicationHandler.buildUrl(followerJetty.getLocalPort()) + + "/" + + DEFAULT_TEST_CORENAME); System.setProperty("solr.indexfetcher.sotimeout2", "45000"); } @@ -98,7 +111,7 @@ public void tearDown() throws Exception { } if (null != followerJetty) { followerJetty.stop(); - followerJetty = null; + followerJetty = null; } leader = follower = null; if (null != leaderClient) { @@ -111,118 +124,137 @@ public void tearDown() throws Exception { } System.clearProperty(TEST_URL_ALLOW_LIST); System.clearProperty("solr.indexfetcher.sotimeout"); - + IndexFetcher.usableDiskSpaceProvider = originalDiskSpaceprovider; IndexFetcher.testWait = originalTestWait; } @Test public void testDiskOverFlow() throws Exception { - invokeReplicationCommand(TestReplicationHandler.buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, "disablepoll"); - //index docs + invokeReplicationCommand( + TestReplicationHandler.buildUrl(followerJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME, + "disablepoll"); + // index docs log.info("Indexing to LEADER"); int docsInLeader = 1000; long szLeader = indexDocs(leaderClient, docsInLeader, 0); log.info("Indexing to FOLLOWER"); long szFollower = indexDocs(followerClient, 1200, 1000); - IndexFetcher.usableDiskSpaceProvider = new Function() { - @Override - public Long apply(String s) { - return szLeader; - } - }; + IndexFetcher.usableDiskSpaceProvider = + new Function() { + @Override + public Long apply(String s) { + return szLeader; + } + }; - // we don't need/want the barrier to be cyclic, so we use a ref that our barrier action will null - // out to prevent it from being triggered multiple times (which shouldn't happen anyway) + // we don't need/want the barrier to be cyclic, so we use a ref that our barrier action will + // null out to prevent it from being triggered multiple times (which shouldn't happen anyway) final AtomicReference commonBarrier = new AtomicReference<>(); - commonBarrier.set(new CyclicBarrier(2, () -> { commonBarrier.set(null); })); - + commonBarrier.set( + new CyclicBarrier( + 2, + () -> { + commonBarrier.set(null); + })); + final List threadFailures = new ArrayList<>(7); - - IndexFetcher.testWait = new BooleanSupplier() { - @Override - public boolean getAsBoolean() { - try { - final CyclicBarrier barrier = commonBarrier.get(); - if (null != barrier) { - barrier.await(60, TimeUnit.SECONDS); - } - } catch (Exception e) { - log.error("IndexFetcher Thread Failure", e); - threadFailures.add(e); - } - return true; - } - }; - - new Thread(() -> { - try { - for (int i = 0; i < 100; i++) { - final CyclicBarrier barrier = commonBarrier.get(); - assertNotNull("why is query thread still looping if barrier has already been cleared?", - barrier); + + IndexFetcher.testWait = + new BooleanSupplier() { + @Override + public boolean getAsBoolean() { try { - QueryResponse rsp = followerClient.query(new SolrQuery() - .setQuery("*:*") - .setRows(0)); - Thread.sleep(200); - } catch (SolrException e) { - if (e.code() == SolrException.ErrorCode.SERVICE_UNAVAILABLE.code - && e.getMessage().contains(expectedErr) - ) { - log.info("Got expected exception", e); - // now let the barrier complete & clear itself, and we're done + final CyclicBarrier barrier = commonBarrier.get(); + if (null != barrier) { barrier.await(60, TimeUnit.SECONDS); - return; // break out } - // else... - // not our expected exception, re-throw to fail fast... - throw e; + } catch (Exception e) { + log.error("IndexFetcher Thread Failure", e); + threadFailures.add(e); } + return true; } - // if we made it this far, something is wrong... - throw new RuntimeException("Query thread gave up waiting for expected error: " + expectedErr); - } catch (Exception e) { - log.error("Query Thread Failure", e); - threadFailures.add(e); - } - }).start(); - - QueryResponse response = followerClient.query(new SolrQuery() - .add("qt", "/replication") - .add("command", CMD_FETCH_INDEX) - .add("wait", "true") - ); - assertEquals("Replication command status", - "OK", response._getStr("status", null)); - - assertEquals("threads encountered failures (see logs for when)", - Collections.emptyList(), threadFailures); + }; + + new Thread( + () -> { + try { + for (int i = 0; i < 100; i++) { + final CyclicBarrier barrier = commonBarrier.get(); + assertNotNull( + "why is query thread still looping if barrier has already been cleared?", + barrier); + try { + QueryResponse rsp = + followerClient.query(new SolrQuery().setQuery("*:*").setRows(0)); + Thread.sleep(200); + } catch (SolrException e) { + if (e.code() == SolrException.ErrorCode.SERVICE_UNAVAILABLE.code + && e.getMessage().contains(expectedErr)) { + log.info("Got expected exception", e); + // now let the barrier complete & clear itself, and we're done + barrier.await(60, TimeUnit.SECONDS); + return; // break out + } + // else... + // not our expected exception, re-throw to fail fast... + throw e; + } + } + // if we made it this far, something is wrong... + throw new RuntimeException( + "Query thread gave up waiting for expected error: " + expectedErr); + } catch (Exception e) { + log.error("Query Thread Failure", e); + threadFailures.add(e); + } + }) + .start(); + + QueryResponse response = + followerClient.query( + new SolrQuery() + .add("qt", "/replication") + .add("command", CMD_FETCH_INDEX) + .add("wait", "true")); + assertEquals("Replication command status", "OK", response._getStr("status", null)); + + assertEquals( + "threads encountered failures (see logs for when)", + Collections.emptyList(), + threadFailures); response = followerClient.query(new SolrQuery().setQuery("*:*").setRows(0)); assertEquals("docs in follower", docsInLeader, response.getResults().getNumFound()); - response = followerClient.query(new SolrQuery() - .add("qt", "/replication") - .add("command", ReplicationHandler.CMD_DETAILS) - ); + response = + followerClient.query( + new SolrQuery() + .add("qt", "/replication") + .add("command", ReplicationHandler.CMD_DETAILS)); if (log.isInfoEnabled()) { log.info("DETAILS {}", Utils.writeJson(response, new StringWriter(), true).toString()); } - assertEquals("follower's clearedLocalIndexFirst (from rep details)", - "true", response._getStr("details/follower/clearedLocalIndexFirst", null)); + assertEquals( + "follower's clearedLocalIndexFirst (from rep details)", + "true", + response._getStr("details/follower/clearedLocalIndexFirst", null)); } @SuppressWarnings("unchecked") private long indexDocs(SolrClient client, int totalDocs, int start) throws Exception { for (int i = 0; i < totalDocs; i++) - ReplicationTestHelper.index(client, "id", i + start, "name", TestUtil.randomSimpleString(random(), 1000, 5000)); + ReplicationTestHelper.index( + client, "id", i + start, "name", TestUtil.randomSimpleString(random(), 1000, 5000)); client.commit(true, true); - QueryResponse response = client.query(new SolrQuery() - .add("qt", "/replication") - .add("command", "filelist") - .add("generation", "-1")); + QueryResponse response = + client.query( + new SolrQuery() + .add("qt", "/replication") + .add("command", "filelist") + .add("generation", "-1")); long totalSize = 0; for (Map map : (List>) response.getResponse().get(CMD_GET_FILE_LIST)) { @@ -231,5 +263,4 @@ private long indexDocs(SolrClient client, int totalDocs, int start) throws Excep } return totalSize; } - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java index def36df74e9..48559cb1189 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java +++ b/solr/core/src/test/org/apache/solr/handler/TestReqParamsAPI.java @@ -16,12 +16,14 @@ */ package org.apache.solr.handler; +import static java.util.Arrays.asList; +import static org.apache.solr.handler.TestSolrConfigHandlerCloud.compareValues; + import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.function.Predicate; - import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -37,9 +39,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import static java.util.Arrays.asList; -import static org.apache.solr.handler.TestSolrConfigHandlerCloud.compareValues; - public class TestReqParamsAPI extends SolrCloudTestCase { private List restTestHarnesses = new ArrayList<>(); @@ -47,9 +46,11 @@ public class TestReqParamsAPI extends SolrCloudTestCase { private void setupHarnesses() { for (final JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { - RestTestHarness harness = new RestTestHarness(() -> jettySolrRunner.getBaseUrl().toString() + "/" + COLL_NAME); + RestTestHarness harness = + new RestTestHarness(() -> jettySolrRunner.getBaseUrl().toString() + "/" + COLL_NAME); if (random().nextBoolean()) { - harness.setServerProvider(() -> jettySolrRunner.getBaseUrl().toString() + "/____v2/c/" + COLL_NAME); + harness.setServerProvider( + () -> jettySolrRunner.getBaseUrl().toString() + "/____v2/c/" + COLL_NAME); } restTestHarnesses.add(harness); } @@ -59,7 +60,8 @@ private void setupHarnesses() { public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) .configure(); CollectionAdminRequest.createCollection(COLL_NAME, "conf1", 1, 2) .process(cluster.getSolrClient()); @@ -87,40 +89,48 @@ private void testReqParams() throws Exception { urls.add("" + replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); } - RestTestHarness writeHarness = restTestHarnesses.get(random().nextInt(restTestHarnesses.size())); + RestTestHarness writeHarness = + restTestHarnesses.get(random().nextInt(restTestHarnesses.size())); - String payload = "{\n" + - "'create-requesthandler' : { 'name' : '/dump0', 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" + - "}"; + String payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/dump0', 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" + + "}"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config", payload); - payload = "{\n" + - "'create-requesthandler' : { 'name' : '/dump1', 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" + - "}"; + payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/dump1', 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" + + "}"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config", payload); - AbstractFullDistribZkTestBase.waitForRecoveriesToFinish(COLL_NAME, cloudClient.getZkStateReader(), false, true, 90); + AbstractFullDistribZkTestBase.waitForRecoveriesToFinish( + COLL_NAME, cloudClient.getZkStateReader(), false, true, 90); - payload = " {\n" + - " 'set' : {'x': {" + - " 'a':'A val',\n" + - " 'b': 'B val'}\n" + - " }\n" + - " }"; + payload = + " {\n" + + " 'set' : {'x': {" + + " 'a':'A val',\n" + + " 'b': 'B val'}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); - Map result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "x", "a"), - "A val", - 10); + Map result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "x", "a"), + "A val", + 10); compareValues(result, "B val", asList("response", "params", "x", "b")); - TestSolrConfigHandler.testForResponseElement(null, + TestSolrConfigHandler.testForResponseElement( + null, urls.get(random().nextInt(urls.size())), "/config/overlay", cloudClient, @@ -128,16 +138,19 @@ private void testReqParams() throws Exception { "/dump0", 10); - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/dump0?useParams=x", - cloudClient, - asList("params", "a"), - "A val", - 5); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/dump0?useParams=x", + cloudClient, + asList("params", "a"), + "A val", + 5); compareValues(result, "", asList("params", RequestParams.USEPARAM)); - TestSolrConfigHandler.testForResponseElement(null, + TestSolrConfigHandler.testForResponseElement( + null, urls.get(random().nextInt(urls.size())), "/dump0?useParams=x&a=fomrequest", cloudClient, @@ -145,147 +158,172 @@ private void testReqParams() throws Exception { "fomrequest", 5); - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/config/overlay", - cloudClient, - asList("overlay", "requestHandler", "/dump1", "name"), - "/dump1", - 10); - - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/dump1", - cloudClient, - asList("params", "a"), - "A val", - 5); - + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/overlay", + cloudClient, + asList("overlay", "requestHandler", "/dump1", "name"), + "/dump1", + 10); + + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/dump1", + cloudClient, + asList("params", "a"), + "A val", + 5); writeHarness = restTestHarnesses.get(random().nextInt(restTestHarnesses.size())); - payload = " {\n" + - " 'set' : {'y':{\n" + - " 'c':'CY val',\n" + - " 'b': 'BY val', " + - " 'i': 20, " + - " 'd': ['val 1', 'val 2']}\n" + - " }\n" + - " }"; - + payload = + " {\n" + + " 'set' : {'y':{\n" + + " 'c':'CY val',\n" + + " 'b': 'BY val', " + + " 'i': 20, " + + " 'd': ['val 1', 'val 2']}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); - result = TestSolrConfigHandler.testForResponseElement( - null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "y", "c"), - "CY val", - 10); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "y", "c"), + "CY val", + 10); compareValues(result, 20l, asList("response", "params", "y", "i")); compareValues(result, null, asList("response", "params", "y", "a")); - - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/dump1?useParams=y", - cloudClient, - asList("params", "c"), - "CY val", - 5); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/dump1?useParams=y", + cloudClient, + asList("params", "c"), + "CY val", + 5); compareValues(result, "BY val", asList("params", "b")); compareValues(result, "A val", asList("params", "a")); compareValues(result, Arrays.asList("val 1", "val 2"), asList("params", "d")); compareValues(result, "20", asList("params", "i")); - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/config/requestHandler?componentName=/dump1&expandParams=true&useParams=y&c=CC", - cloudClient, - asList("config", "requestHandler","/dump1","_useParamsExpanded_","x", "a"), - "A val", - 5); - compareValues(result, "B val", asList("config", "requestHandler","/dump1","_useParamsExpanded_","x", "b")); - compareValues(result, "CY val", asList("config", "requestHandler","/dump1","_useParamsExpanded_","y", "c")); - compareValues(result, "BY val", asList("config", "requestHandler","/dump1","_useParamsExpanded_","y", "b")); - compareValues(result, "A val", asList("config", "requestHandler","/dump1","_effectiveParams_", "a")); - compareValues(result, "BY val", asList("config", "requestHandler","/dump1","_effectiveParams_", "b")); - compareValues(result, "CC", asList("config", "requestHandler","/dump1","_effectiveParams_", "c")); - - payload = " {\n" + - " 'update' : {'y': {\n" + - " 'c':'CY val modified',\n" + - " 'e':'EY val',\n" + - " 'b': 'BY val'" + - "}\n" + - " }\n" + - " }"; - + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/requestHandler?componentName=/dump1&expandParams=true&useParams=y&c=CC", + cloudClient, + asList("config", "requestHandler", "/dump1", "_useParamsExpanded_", "x", "a"), + "A val", + 5); + compareValues( + result, + "B val", + asList("config", "requestHandler", "/dump1", "_useParamsExpanded_", "x", "b")); + compareValues( + result, + "CY val", + asList("config", "requestHandler", "/dump1", "_useParamsExpanded_", "y", "c")); + compareValues( + result, + "BY val", + asList("config", "requestHandler", "/dump1", "_useParamsExpanded_", "y", "b")); + compareValues( + result, "A val", asList("config", "requestHandler", "/dump1", "_effectiveParams_", "a")); + compareValues( + result, "BY val", asList("config", "requestHandler", "/dump1", "_effectiveParams_", "b")); + compareValues( + result, "CC", asList("config", "requestHandler", "/dump1", "_effectiveParams_", "c")); + + payload = + " {\n" + + " 'update' : {'y': {\n" + + " 'c':'CY val modified',\n" + + " 'e':'EY val',\n" + + " 'b': 'BY val'" + + "}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); - result = TestSolrConfigHandler.testForResponseElement( - null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "y", "c"), - "CY val modified", - 10); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "y", "c"), + "CY val modified", + 10); compareValues(result, "EY val", asList("response", "params", "y", "e")); - - payload = " {\n" + - " 'set' : {'y': {\n" + - " 'p':'P val',\n" + - " 'q': 'Q val'" + - "}\n" + - " }\n" + - " }"; - + payload = + " {\n" + + " 'set' : {'y': {\n" + + " 'p':'P val',\n" + + " 'q': 'Q val'" + + "}\n" + + " }\n" + + " }"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); - result = TestSolrConfigHandler.testForResponseElement( - null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "y", "p"), - "P val", - 10); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "y", "p"), + "P val", + 10); compareValues(result, null, asList("response", "params", "y", "c")); - compareValues(result, 2l, asList("response", "params", "y", "","v")); - compareValues(result, 0l, asList("response", "params", "x", "","v")); + compareValues(result, 2l, asList("response", "params", "y", "", "v")); + compareValues(result, 0l, asList("response", "params", "x", "", "v")); payload = "{update :{x : {_appends_ :{ add : 'first' }, _invariants_ : {fixed: f }}}}"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); - result = TestSolrConfigHandler.testForResponseElement( - null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "x", "_appends_", "add"), - "first", - 10); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "x", "_appends_", "add"), + "first", + 10); compareValues(result, "f", asList("response", "params", "x", "_invariants_", "fixed")); - - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/dump1?fixed=changeit&add=second", - cloudClient, - asList("params", "fixed"), - "f", - 5); - compareValues(result, new Predicate<>() { - @Override - public boolean test(Object o) { - List l = (List) o; - return l.contains("first") && l.contains("second"); - } - }, asList("params", "add")); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/dump1?fixed=changeit&add=second", + cloudClient, + asList("params", "fixed"), + "f", + 5); + compareValues( + result, + new Predicate<>() { + @Override + public boolean test(Object o) { + List l = (List) o; + return l.contains("first") && l.contains("second"); + } + }, + asList("params", "add")); payload = " {'delete' : 'y'}"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); @@ -300,12 +338,12 @@ public boolean test(Object o) { payload = " {'unset' : 'y'}"; TestSolrConfigHandler.runConfigCommandExpectFailure( - writeHarness,"/config/params", payload, "Unknown operation 'unset'"); + writeHarness, "/config/params", payload, "Unknown operation 'unset'"); // deleting already deleted one should fail // error message should contain parameter set name payload = " {'delete' : 'y'}"; TestSolrConfigHandler.runConfigCommandExpectFailure( - writeHarness,"/config/params", payload, "Could not delete. No such params 'y' exist"); + writeHarness, "/config/params", payload, "Could not delete. No such params 'y' exist"); } } diff --git a/solr/core/src/test/org/apache/solr/handler/TestRequestId.java b/solr/core/src/test/org/apache/solr/handler/TestRequestId.java index 8521e9e0e46..264e65e5e7e 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestRequestId.java +++ b/solr/core/src/test/org/apache/solr/handler/TestRequestId.java @@ -17,47 +17,45 @@ package org.apache.solr.handler; -import java.lang.invoke.MethodHandles; +import static org.hamcrest.core.StringContains.containsString; +import java.lang.invoke.MethodHandles; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.SuppressForbidden; -import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.core.SolrCore; import org.apache.solr.util.LogListener; -import org.apache.solr.SolrTestCaseJ4; - +import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; -import org.junit.BeforeClass; - -import static org.hamcrest.core.StringContains.containsString; - -@SuppressForbidden(reason="We need to use log4J2 classes directly to test MDC impacts") +@SuppressForbidden(reason = "We need to use log4J2 classes directly to test MDC impacts") public class TestRequestId extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } public void testRequestId() { - + try (LogListener reqLog = LogListener.info(SolrCore.class.getName() + ".Request")) { - + // Sanity check that the our MDC doesn't already have some sort of rid set in it assertNull(MDC.get(CommonParams.REQUEST_ID)); // simple request that should successfully be logged ... assertQ("xxx", req("q", "*:*", CommonParams.REQUEST_ID, "xxx"), "//*[@numFound='0']"); - // Sanity check that the test framework didn't let our "request" MDC info "leak" out of assertQ.. + // Sanity check that the test framework didn't let our "request" MDC info "leak" out of + // assertQ.. assertNull(MDC.get(CommonParams.REQUEST_ID)); - { + { var reqEvent = reqLog.getQueue().poll(); assertNotNull(reqEvent); assertEquals("xxx", reqEvent.getContextData().getValue("rid")); @@ -65,18 +63,26 @@ public void testRequestId() { } // request that should cause some ERROR logging... - // NOTE: we can't just listen for errors at the 'root' logger because assertQEx will 'mute' them before we can intercept + // NOTE: we can't just listen for errors at the 'root' logger because assertQEx will 'mute' + // them before we can intercept try (LogListener errLog = LogListener.error(RequestHandlerBase.class)) { - assertQEx("yyy", "bogus_yyy", req("q", "*:*", "sort", "bogus_yyy", CommonParams.REQUEST_ID, "yyy"), ErrorCode.BAD_REQUEST); - - // Sanity check that the test framework didn't let our "request" MDC info "leak" out of assertQEx.. + assertQEx( + "yyy", + "bogus_yyy", + req("q", "*:*", "sort", "bogus_yyy", CommonParams.REQUEST_ID, "yyy"), + ErrorCode.BAD_REQUEST); + + // Sanity check that the test framework didn't let our "request" MDC info "leak" out of + // assertQEx.. assertNull(MDC.get(CommonParams.REQUEST_ID)); - { + { var reqEvent = reqLog.getQueue().poll(); assertNotNull(reqEvent); assertEquals("yyy", reqEvent.getContextData().getValue("rid")); - assertThat(reqEvent.getMessage().getFormattedMessage(), containsString("status="+ErrorCode.BAD_REQUEST.code)); + assertThat( + reqEvent.getMessage().getFormattedMessage(), + containsString("status=" + ErrorCode.BAD_REQUEST.code)); } { var errEvent = errLog.getQueue().poll(); @@ -87,5 +93,4 @@ public void testRequestId() { } } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestRestoreCore.java b/solr/core/src/test/org/apache/solr/handler/TestRestoreCore.java index 1d48de02373..60290504aad 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestRestoreCore.java +++ b/solr/core/src/test/org/apache/solr/handler/TestRestoreCore.java @@ -18,14 +18,12 @@ import java.io.File; import java.io.IOException; - import java.net.URLEncoder; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; - import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -41,22 +39,26 @@ import org.junit.Before; import org.junit.Test; -@SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test -@LuceneTestCase.SuppressCodecs("SimpleText") // Backups do checksum validation against a footer value not present in 'SimpleText' +@SolrTestCaseJ4.SuppressSSL // Currently unknown why SSL does not work with this test +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs("SimpleText") public class TestRestoreCore extends SolrJettyTestBase { JettySolrRunner leaderJetty; - ReplicationTestHelper.SolrInstance leader = null; + ReplicationTestHelper.SolrInstance leader = null; SolrClient leaderClient; - private static final String CONF_DIR = "solr" + File.separator + DEFAULT_TEST_CORENAME + File.separator + "conf" - + File.separator; + private static final String CONF_DIR = + "solr" + File.separator + DEFAULT_TEST_CORENAME + File.separator + "conf" + File.separator; private static String context = "/solr"; private static long docsSeed; // see indexDocs() - private static JettySolrRunner createAndStartJetty(ReplicationTestHelper.SolrInstance instance) throws Exception { - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(instance.getHomeDir(), "solr.xml")); + private static JettySolrRunner createAndStartJetty(ReplicationTestHelper.SolrInstance instance) + throws Exception { + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), + new File(instance.getHomeDir(), "solr.xml")); Properties nodeProperties = new Properties(); nodeProperties.setProperty("solr.data.dir", instance.getDataDir()); JettyConfig jettyConfig = JettyConfig.builder().setContext("/solr").setPort(0).build(); @@ -71,19 +73,19 @@ private static SolrClient createNewSolrClient(int port) { final String baseUrl = buildUrl(port, context); HttpSolrClient client = getHttpSolrClient(baseUrl, 15000, 60000); return client; - } - catch (Exception ex) { + } catch (Exception ex) { throw new RuntimeException(ex); } } - @Before public void setUp() throws Exception { super.setUp(); String configFile = "solrconfig-leader.xml"; - leader = new ReplicationTestHelper.SolrInstance(createTempDir("solr-instance").toFile(), "leader", null); + leader = + new ReplicationTestHelper.SolrInstance( + createTempDir("solr-instance").toFile(), "leader", null); leader.setUp(); leader.copyConfigFile(CONF_DIR + configFile, "solrconfig.xml"); @@ -98,7 +100,7 @@ public void tearDown() throws Exception { super.tearDown(); if (null != leaderClient) { leaderClient.close(); - leaderClient = null; + leaderClient = null; } if (null != leaderJetty) { leaderJetty.stop(); @@ -112,28 +114,32 @@ public void testSimpleRestore() throws Exception { int nDocs = usually() ? BackupRestoreUtils.indexDocs(leaderClient, "collection1", docsSeed) : 0; - final BackupStatusChecker backupStatus - = new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); + final BackupStatusChecker backupStatus = + new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); final String oldBackupDir = backupStatus.checkBackupSuccess(); String snapshotName = null; String location; String params = ""; String baseUrl = leaderJetty.getBaseUrl().toString(); - //Use the default backup location or an externally provided location. + // Use the default backup location or an externally provided location. if (random().nextBoolean()) { location = createTempDir().toFile().getAbsolutePath(); - leaderJetty.getCoreContainer().getAllowPaths().add(Path.of(location)); // Allow core to be created outside SOLR_HOME + leaderJetty + .getCoreContainer() + .getAllowPaths() + .add(Path.of(location)); // Allow core to be created outside SOLR_HOME params += "&location=" + URLEncoder.encode(location, "UTF-8"); } - //named snapshot vs default snapshot name + // named snapshot vs default snapshot name if (random().nextBoolean()) { snapshotName = TestUtil.randomSimpleString(random(), 1, 5); params += "&name=" + snapshotName; } - TestReplicationHandlerBackup.runBackupCommand(leaderJetty, ReplicationHandler.CMD_BACKUP, params); + TestReplicationHandlerBackup.runBackupCommand( + leaderJetty, ReplicationHandler.CMD_BACKUP, params); if (null == snapshotName) { backupStatus.waitForDifferentBackupDir(oldBackupDir, 30); @@ -143,48 +149,54 @@ public void testSimpleRestore() throws Exception { int numRestoreTests = nDocs > 0 ? TestUtil.nextInt(random(), 1, 5) : 1; - for (int attempts=0; attempts 0) { - //Delete a few docs + // Delete a few docs int numDeletes = TestUtil.nextInt(random(), 1, nDocs); - for(int i=0; i { - TestReplicationHandlerBackup.runBackupCommand(leaderJetty, ReplicationHandler.CMD_BACKUP, params); - }); + final String params = + "&location=" + URLEncoder.encode(createTempDir().toFile().getAbsolutePath(), "UTF-8"); + Throwable t = + expectThrows( + IOException.class, + () -> { + TestReplicationHandlerBackup.runBackupCommand( + leaderJetty, ReplicationHandler.CMD_BACKUP, params); + }); // The backup command will fail since the tmp dir is outside allowPaths assertTrue(t.getMessage().contains("Server returned HTTP response code: 400")); } @@ -199,39 +211,41 @@ public void testFailedRestore() throws Exception { String params = "&name=" + snapshotName + "&location=" + URLEncoder.encode(location, "UTF-8"); String baseUrl = leaderJetty.getBaseUrl().toString(); - TestReplicationHandlerBackup.runBackupCommand(leaderJetty, ReplicationHandler.CMD_BACKUP, params); + TestReplicationHandlerBackup.runBackupCommand( + leaderJetty, ReplicationHandler.CMD_BACKUP, params); - final BackupStatusChecker backupStatus - = new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); + final BackupStatusChecker backupStatus = + new BackupStatusChecker(leaderClient, "/" + DEFAULT_TEST_CORENAME + "/replication"); final String backupDirName = backupStatus.waitForBackupSuccess(snapshotName, 30); - //Remove the segments_n file so that the backup index is corrupted. - //Restore should fail and it should automatically rollback to the original index. + // Remove the segments_n file so that the backup index is corrupted. + // Restore should fail and it should automatically rollback to the original index. final Path restoreIndexPath = Paths.get(location, backupDirName); assertTrue("Does not exist: " + restoreIndexPath, Files.exists(restoreIndexPath)); - try (DirectoryStream stream = Files.newDirectoryStream(restoreIndexPath, IndexFileNames.SEGMENTS + "*")) { + try (DirectoryStream stream = + Files.newDirectoryStream(restoreIndexPath, IndexFileNames.SEGMENTS + "*")) { Path segmentFileName = stream.iterator().next(); Files.delete(segmentFileName); } - TestReplicationHandlerBackup.runBackupCommand(leaderJetty, ReplicationHandler.CMD_RESTORE, params); + TestReplicationHandlerBackup.runBackupCommand( + leaderJetty, ReplicationHandler.CMD_RESTORE, params); - expectThrows(AssertionError.class, () -> { - for (int i = 0; i < 10; i++) { - // this will throw an assertion once we get what we expect - TestRestoreCoreUtil.fetchRestoreStatus(baseUrl, DEFAULT_TEST_CORENAME); - Thread.sleep(50); - } - // if we never got an assertion let expectThrows complain - }); + expectThrows( + AssertionError.class, + () -> { + for (int i = 0; i < 10; i++) { + // this will throw an assertion once we get what we expect + TestRestoreCoreUtil.fetchRestoreStatus(baseUrl, DEFAULT_TEST_CORENAME); + Thread.sleep(50); + } + // if we never got an assertion let expectThrows complain + }); BackupRestoreUtils.verifyDocs(nDocs, leaderClient, DEFAULT_TEST_CORENAME); - //make sure we can write to the index again + // make sure we can write to the index again nDocs = BackupRestoreUtils.indexDocs(leaderClient, "collection1", docsSeed); BackupRestoreUtils.verifyDocs(nDocs, leaderClient, DEFAULT_TEST_CORENAME); - } - - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestSampleDocumentsLoader.java b/solr/core/src/test/org/apache/solr/handler/TestSampleDocumentsLoader.java index 057b97f0a7b..bfb3d3960a9 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSampleDocumentsLoader.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSampleDocumentsLoader.java @@ -17,10 +17,15 @@ package org.apache.solr.handler; +import static org.apache.solr.handler.designer.DefaultSampleDocumentsLoader.CSV_MULTI_VALUE_DELIM_PARAM; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.File; import java.io.IOException; import java.util.List; - import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; import org.apache.solr.common.params.ModifiableSolrParams; @@ -35,12 +40,6 @@ import org.junit.Before; import org.junit.Test; -import static org.apache.solr.handler.designer.DefaultSampleDocumentsLoader.CSV_MULTI_VALUE_DELIM_PARAM; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - public class TestSampleDocumentsLoader { SampleDocumentsLoader loader; @@ -51,7 +50,9 @@ public void setup() throws IOException { loader = new DefaultSampleDocumentsLoader(); loader.init(new NamedList<>()); exampleDir = new File(ExternalPaths.SOURCE_HOME, "example"); - assertTrue("Required test data directory " + exampleDir.getCanonicalPath() + " not found!", exampleDir.isDirectory()); + assertTrue( + "Required test data directory " + exampleDir.getCanonicalPath() + " not found!", + exampleDir.isDirectory()); } @Test @@ -63,7 +64,8 @@ public void testJson() throws Exception { public void testCsv() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set(CSV_MULTI_VALUE_DELIM_PARAM, "\\|"); - List docs = loadTestDocs(params, new File(exampleDir, "films/films.csv"), -1, 1100); + List docs = + loadTestDocs(params, new File(exampleDir, "films/films.csv"), -1, 1100); boolean foundIt = false; for (SolrInputDocument next : docs) { if (".45".equals(next.getFieldValue("name"))) { @@ -84,7 +86,8 @@ public void testSolrXml() throws Exception { loadTestDocs(null, new File(exampleDir, "films/films.xml"), 1000, 1000); } - protected List loadTestDocs(SolrParams params, File inputDocs, int maxDocsToLoad, int expectedDocs) throws IOException { + protected List loadTestDocs( + SolrParams params, File inputDocs, int maxDocsToLoad, int expectedDocs) throws IOException { assertTrue(inputDocs.getCanonicalPath() + " not found", inputDocs.isFile()); ContentStream stream = getContentStream(inputDocs); SampleDocuments sampleDocs = loader.parseDocsFromStream(params, stream, maxDocsToLoad); diff --git a/solr/core/src/test/org/apache/solr/handler/TestSnapshotCoreBackup.java b/solr/core/src/test/org/apache/solr/handler/TestSnapshotCoreBackup.java index 81f5a1b69e5..f86ef0b3a4e 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSnapshotCoreBackup.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSnapshotCoreBackup.java @@ -16,6 +16,10 @@ */ package org.apache.solr.handler; +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; +import java.util.Arrays; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.store.Directory; @@ -30,24 +34,21 @@ import org.junit.After; import org.junit.Before; -import java.io.File; -import java.io.IOException; -import java.nio.file.Paths; -import java.util.Arrays; - -@LuceneTestCase.SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' +// Backups do checksum validation against a footer value not present in 'SimpleText' +@LuceneTestCase.SuppressCodecs({"SimpleText"}) public class TestSnapshotCoreBackup extends SolrTestCaseJ4 { @Before // unique core per test public void coreInit() throws Exception { initCore("solrconfig.xml", "schema.xml"); } + @After // unique core per test public void coreDestroy() throws Exception { deleteCore(); } public void testBackupWithDocsNotSearchable() throws Exception { - //See SOLR-11616 to see when this issue can be triggered + // See SOLR-11616 to see when this issue can be triggered assertU(adoc("id", "1")); assertU(commit()); @@ -59,7 +60,7 @@ public void testBackupWithDocsNotSearchable() throws Exception { assertQ(req("q", "id:1"), "//result[@numFound='1']"); assertQ(req("q", "id:2"), "//result[@numFound='0']"); - //call backup + // call backup String location = createTempDir().toFile().getAbsolutePath(); String snapshotName = TestUtil.randomSimpleString(random(), 1, 5); @@ -67,10 +68,19 @@ public void testBackupWithDocsNotSearchable() throws Exception { cores.getAllowPaths().add(Paths.get(location)); try (final CoreAdminHandler admin = new CoreAdminHandler(cores)) { SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody(req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, "name", snapshotName, "location", - location, CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + snapshotName, + "location", + location, + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); assertNull("Backup should have succeeded", resp.getException()); simpleBackupCheck(new File(location, "snapshot." + snapshotName), 2); } @@ -78,14 +88,14 @@ public void testBackupWithDocsNotSearchable() throws Exception { public void testBackupBeforeFirstCommit() throws Exception { - // even w/o a user sending any data, the SolrCore initialiation logic should have automatically created - // an "empty" commit point that can be backed up... + // even w/o a user sending any data, the SolrCore initialiation logic should have automatically + // created an "empty" commit point that can be backed up... final IndexCommit empty = h.getCore().getDeletionPolicy().getLatestCommit(); assertNotNull(empty); - + // white box sanity check that the commit point of the "reader" available from SolrIndexSearcher // matches the commit point that IDPW claims is the "latest" - // + // // this is important to ensure that backup/snapshot behavior is consistent with user expection // when using typical commit + openSearcher assertEquals(empty, h.getCore().withSearcher(s -> s.getIndexReader().getIndexCommit())); @@ -93,7 +103,7 @@ public void testBackupBeforeFirstCommit() throws Exception { assertEquals(1L, empty.getGeneration()); assertNotNull(empty.getSegmentsFileName()); final String initialEmptyIndexSegmentFileName = empty.getSegmentsFileName(); - + final CoreContainer cores = h.getCoreContainer(); final CoreAdminHandler admin = new CoreAdminHandler(cores); @@ -102,86 +112,110 @@ public void testBackupBeforeFirstCommit() throws Exception { { // first a backup before we've ever done *anything*... SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", "empty_backup1", - "location", backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + "empty_backup1", + "location", + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(new File(backupDir, "snapshot.empty_backup1"), - 0, initialEmptyIndexSegmentFileName); + simpleBackupCheck( + new File(backupDir, "snapshot.empty_backup1"), 0, initialEmptyIndexSegmentFileName); } { // Empty (named) snapshot.. SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "empty_snapshotA"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "empty_snapshotA"), + resp); assertNull("Snapshot A should have succeeded", resp.getException()); } - + assertU(adoc("id", "1")); // uncommitted { // second backup w/uncommited docs SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", "empty_backup2", - "location", backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + "empty_backup2", + "location", + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(new File(backupDir, "snapshot.empty_backup2"), - 0, initialEmptyIndexSegmentFileName); + simpleBackupCheck( + new File(backupDir, "snapshot.empty_backup2"), 0, initialEmptyIndexSegmentFileName); } - + { // Second empty (named) snapshot.. SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "empty_snapshotB"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "empty_snapshotB"), + resp); assertNull("Snapshot A should have succeeded", resp.getException()); } // Committing the doc now should not affect the existing backups or snapshots... assertU(commit()); - + for (String name : Arrays.asList("empty_backup1", "empty_backup2")) { - simpleBackupCheck(new File(backupDir, "snapshot." + name ), - 0, initialEmptyIndexSegmentFileName); + simpleBackupCheck( + new File(backupDir, "snapshot." + name), 0, initialEmptyIndexSegmentFileName); } // Make backups from each of the snapshots and check they are still empty as well... for (String snapName : Arrays.asList("empty_snapshotA", "empty_snapshotB")) { String name = "empty_backup_from_" + snapName; SolrQueryResponse resp = new SolrQueryResponse(); - - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", name, - "commitName", snapName, - "location", backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); - assertNull("Backup "+name+" should have succeeded", resp.getException()); - simpleBackupCheck(new File(backupDir, "snapshot." + name), - 0, initialEmptyIndexSegmentFileName); - + + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + name, + "commitName", + snapName, + "location", + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); + assertNull("Backup " + name + " should have succeeded", resp.getException()); + simpleBackupCheck( + new File(backupDir, "snapshot." + name), 0, initialEmptyIndexSegmentFileName); } admin.close(); } - /** - * Tests that a softCommit does not affect what data is in a backup - */ + /** Tests that a softCommit does not affect what data is in a backup */ public void testBackupAfterSoftCommit() throws Exception { // sanity check empty index... @@ -194,73 +228,90 @@ public void testBackupAfterSoftCommit() throws Exception { assertU(commit()); assertQ(req("q", "id:99"), "//result[@numFound='1']"); assertQ(req("q", "*:*"), "//result[@numFound='1']"); - + final IndexCommit oneDocCommit = h.getCore().getDeletionPolicy().getLatestCommit(); assertNotNull(oneDocCommit); final String oneDocSegmentFile = oneDocCommit.getSegmentsFileName(); - + final CoreContainer cores = h.getCoreContainer(); final CoreAdminHandler admin = new CoreAdminHandler(cores); - + final File backupDir = createTempDir().toFile(); cores.getAllowPaths().add(backupDir.toPath()); - { // take an initial 'backup1a' containing our 1 document final SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", "backup1a", - "location", backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + "backup1a", + "location", + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(new File(backupDir, "snapshot.backup1a"), - 1, oneDocSegmentFile); + simpleBackupCheck(new File(backupDir, "snapshot.backup1a"), 1, oneDocSegmentFile); } - + { // and an initial "snapshot1a' that should eventually match SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "snapshot1a"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "snapshot1a"), + resp); assertNull("Snapshot 1A should have succeeded", resp.getException()); } - // now we add our 2nd doc, and make it searchable, but we do *NOT* hard commit it to the index dir... + // now we add our 2nd doc, and make it searchable, but we do *NOT* hard commit it to the index + // dir... assertU(adoc("id", "42")); assertU(commit("softCommit", "true", "openSearcher", "true")); - + assertQ(req("q", "id:99"), "//result[@numFound='1']"); assertQ(req("q", "id:42"), "//result[@numFound='1']"); assertQ(req("q", "*:*"), "//result[@numFound='2']"); - { // we now have an index with two searchable docs, but a new 'backup1b' should still // be identical to the previous backup... final SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", "backup1b", - "location", backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + "backup1b", + "location", + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); assertNull("Backup should have succeeded", resp.getException()); - simpleBackupCheck(new File(backupDir, "snapshot.backup1b"), - 1, oneDocSegmentFile); + simpleBackupCheck(new File(backupDir, "snapshot.backup1b"), 1, oneDocSegmentFile); } - + { // and a second "snapshot1b' should also still be identical SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "commitName", "snapshot1b"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATESNAPSHOT.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "commitName", + "snapshot1b"), + resp); assertNull("Snapshot 1B should have succeeded", resp.getException()); } @@ -268,19 +319,25 @@ public void testBackupAfterSoftCommit() throws Exception { assertU(commit()); for (String name : Arrays.asList("backup1a", "backup1b")) { - simpleBackupCheck(new File(backupDir, "snapshot." + name ), - 1, oneDocSegmentFile); + simpleBackupCheck(new File(backupDir, "snapshot." + name), 1, oneDocSegmentFile); } - - { // But we should be able to confirm both docs appear in a new backup (not based on a previous snapshot) + + { // But we should be able to confirm both docs appear in a new backup (not based on a previous + // snapshot) final SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", "backup2", - "location", backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + "backup2", + "location", + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); assertNull("Backup should have succeeded", resp.getException()); simpleBackupCheck(new File(backupDir, "snapshot.backup2"), 2); } @@ -291,48 +348,56 @@ public void testBackupAfterSoftCommit() throws Exception { for (String snapName : Arrays.asList("snapshot1a", "snapshot1b")) { String name = "backup_from_" + snapName; SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), - "core", DEFAULT_TEST_COLLECTION_NAME, - "name", name, - "commitName", snapName, - "location", backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"), - resp); - assertNull("Backup "+name+" should have succeeded", resp.getException()); - simpleBackupCheck(new File(backupDir, "snapshot." + name), - 1, oneDocSegmentFile); - + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.BACKUPCORE.toString(), + "core", + DEFAULT_TEST_COLLECTION_NAME, + "name", + name, + "commitName", + snapName, + "location", + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"), + resp); + assertNull("Backup " + name + " should have succeeded", resp.getException()); + simpleBackupCheck(new File(backupDir, "snapshot." + name), 1, oneDocSegmentFile); } admin.close(); } /** - * A simple sanity check that asserts the current weird behavior of DirectoryReader.openIfChanged() - * and demos how 'softCommit' can cause the IndexReader in use by SolrIndexSearcher to missrepresent what - * commit is "current". So Backup code should only ever "trust" the IndexCommit info available from the - * IndexDeletionPolicyWrapper - * + * A simple sanity check that asserts the current weird behavior of + * DirectoryReader.openIfChanged() and demos how 'softCommit' can cause the IndexReader in use by + * SolrIndexSearcher to missrepresent what commit is "current". So Backup code should only ever + * "trust" the IndexCommit info available from the IndexDeletionPolicyWrapper + * * @see LUCENE-9040 * @see SOLR-13909 */ public void testDemoWhyBackupCodeShouldNeverUseIndexCommitFromSearcher() throws Exception { final long EXPECTED_GEN_OF_EMPTY_INDEX = 1L; - + // sanity check this is an empty index... assertQ(req("q", "*:*"), "//result[@numFound='0']"); - + // sanity check what the searcher/reader of this empty index report about current commit - final IndexCommit empty = h.getCore().withSearcher(s -> { - // sanity check we are empty... - assertEquals(0L, (long) s.getIndexReader().numDocs()); - - // sanity check this is the initial commit.. - final IndexCommit commit = s.getIndexReader().getIndexCommit(); - assertEquals(EXPECTED_GEN_OF_EMPTY_INDEX, commit.getGeneration()); - return commit; - }); + final IndexCommit empty = + h.getCore() + .withSearcher( + s -> { + // sanity check we are empty... + assertEquals(0L, (long) s.getIndexReader().numDocs()); + + // sanity check this is the initial commit.. + final IndexCommit commit = s.getIndexReader().getIndexCommit(); + assertEquals(EXPECTED_GEN_OF_EMPTY_INDEX, commit.getGeneration()); + return commit; + }); // now let's add & soft commit 1 doc... assertU(adoc("id", "42")); @@ -342,20 +407,26 @@ public void testDemoWhyBackupCodeShouldNeverUseIndexCommitFromSearcher() throws assertQ(req("q", "id:42"), "//result[@numFound='1']"); // sanity check what the searcher/reader of this empty index report about current commit - IndexCommit oneDoc = h.getCore().withSearcher(s -> { - // sanity check this really is the searcher/reader that has the new doc... - assertEquals(1L, (long) s.getIndexReader().numDocs()); - - final IndexCommit commit = s.getIndexReader().getIndexCommit(); - // WTF: how/why does this reader still have the same commit generation as before ? ? ? ? ? - assertEquals("WTF: This Reader (claims) the same generation as our previous pre-softCommif (empty) reader", - EXPECTED_GEN_OF_EMPTY_INDEX, commit.getGeneration()); - return commit; - }); - - assertEquals("WTF: Our two IndexCommits, which we know have different docs, claim to be equals", - empty, oneDoc); - + IndexCommit oneDoc = + h.getCore() + .withSearcher( + s -> { + // sanity check this really is the searcher/reader that has the new doc... + assertEquals(1L, (long) s.getIndexReader().numDocs()); + + final IndexCommit commit = s.getIndexReader().getIndexCommit(); + // WTF: how/why does this reader still have the same commit generation as before? + assertEquals( + "WTF: This Reader (claims) the same generation as our previous pre-softCommif (empty) reader", + EXPECTED_GEN_OF_EMPTY_INDEX, + commit.getGeneration()); + return commit; + }); + + assertEquals( + "WTF: Our two IndexCommits, which we know have different docs, claim to be equals", + empty, + oneDoc); } /** @@ -366,30 +437,32 @@ private static void simpleBackupCheck(final File backup, final int numDocs) thro } /** - * Simple check that the backup exists, is a valid index, and contains the expected number of docs. - * If expectedSegmentsFileName is non null then confirms that file exists in the bakup dir - * and that it is reported as the current segment file when opening a reader on that backup. + * Simple check that the backup exists, is a valid index, and contains the expected number of + * docs. If expectedSegmentsFileName is non null then confirms that file exists in the bakup dir + * and that it is reported as the current segment file when opening a reader on that + * backup. */ - private static void simpleBackupCheck(final File backup, final int numDocs, - final String expectedSegmentsFileName) throws IOException { + private static void simpleBackupCheck( + final File backup, final int numDocs, final String expectedSegmentsFileName) + throws IOException { assertNotNull(backup); assertTrue("Backup doesn't exist" + backup.toString(), backup.exists()); if (null != expectedSegmentsFileName) { - assertTrue(expectedSegmentsFileName + " doesn't exist in " + backup.toString(), - new File(backup, expectedSegmentsFileName).exists()); + assertTrue( + expectedSegmentsFileName + " doesn't exist in " + backup.toString(), + new File(backup, expectedSegmentsFileName).exists()); } try (Directory dir = FSDirectory.open(backup.toPath())) { TestUtil.checkIndex(dir, true, true, true, null); try (DirectoryReader r = DirectoryReader.open(dir)) { - assertEquals("numDocs in " + backup.toString(), - numDocs, r.numDocs()); + assertEquals("numDocs in " + backup.toString(), numDocs, r.numDocs()); if (null != expectedSegmentsFileName) { - assertEquals("segmentsFile of IndexCommit for: " + backup.toString(), - expectedSegmentsFileName, r.getIndexCommit().getSegmentsFileName()); + assertEquals( + "segmentsFile of IndexCommit for: " + backup.toString(), + expectedSegmentsFileName, + r.getIndexCommit().getSegmentsFileName()); } } } - } - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java index 7726376eb2f..69d2ba9f7a1 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java @@ -16,13 +16,14 @@ */ package org.apache.solr.handler; +import static java.util.Arrays.asList; + import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Predicate; - import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.LukeRequest; import org.apache.solr.cloud.AbstractFullDistribZkTestBase; @@ -38,8 +39,6 @@ import org.apache.solr.util.RestTestHarness; import org.junit.Test; -import static java.util.Arrays.asList; - public class TestSolrConfigHandlerCloud extends AbstractFullDistribZkTestBase { private static final long TIMEOUT_S = 10; @@ -52,17 +51,18 @@ public void test() throws Exception { testAdminPath(); } - private void testAdminPath() throws Exception{ - String testServerBaseUrl = getRandomServer(cloudClient,"collection1"); + private void testAdminPath() throws Exception { + String testServerBaseUrl = getRandomServer(cloudClient, "collection1"); RestTestHarness writeHarness = randomRestTestHarness(); - String payload = "{\n" + - "'create-requesthandler' : { 'name' : '/admin/luke', " + - "'class': 'org.apache.solr.handler.DumpRequestHandler'}}"; + String payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/admin/luke', " + + "'class': 'org.apache.solr.handler.DumpRequestHandler'}}"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config", payload); - - TestSolrConfigHandler.testForResponseElement(writeHarness, + TestSolrConfigHandler.testForResponseElement( + writeHarness, testServerBaseUrl, "/config/overlay", cloudClient, @@ -70,14 +70,14 @@ private void testAdminPath() throws Exception{ "org.apache.solr.handler.DumpRequestHandler", TIMEOUT_S); - NamedList rsp = cloudClient.request(new LukeRequest()); - System.out.println(rsp); + NamedList rsp = cloudClient.request(new LukeRequest()); + System.out.println(rsp); } private void testReqHandlerAPIs() throws Exception { - String testServerBaseUrl = getRandomServer(cloudClient,"collection1"); + String testServerBaseUrl = getRandomServer(cloudClient, "collection1"); RestTestHarness writeHarness = randomRestTestHarness(); - TestSolrConfigHandler.reqhandlertests(writeHarness, testServerBaseUrl , cloudClient); + TestSolrConfigHandler.reqhandlertests(writeHarness, testServerBaseUrl, cloudClient); } public static String getRandomServer(CloudSolrClient cloudClient, String collName) { @@ -85,46 +85,51 @@ public static String getRandomServer(CloudSolrClient cloudClient, String collNam List urls = new ArrayList<>(); for (Slice slice : coll.getSlices()) { for (Replica replica : slice.getReplicas()) - urls.add(""+replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); + urls.add("" + replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); } return urls.get(random().nextInt(urls.size())); } - private void testReqParams() throws Exception{ - DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); + private void testReqParams() throws Exception { + DocCollection coll = + cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); List urls = new ArrayList<>(); for (Slice slice : coll.getSlices()) { for (Replica replica : slice.getReplicas()) - urls.add(""+replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); + urls.add("" + replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); } RestTestHarness writeHarness = randomRestTestHarness(); - String payload = " {\n" + - " 'set' : {'x': {" + - " 'a':'A val',\n" + - " 'b': 'B val'}\n" + - " }\n" + - " }"; - - - TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params", payload); - - Map result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "x", "a"), - "A val", - TIMEOUT_S); + String payload = + " {\n" + + " 'set' : {'x': {" + + " 'a':'A val',\n" + + " 'b': 'B val'}\n" + + " }\n" + + " }"; + + TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); + + Map result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "x", "a"), + "A val", + TIMEOUT_S); compareValues(result, "B val", asList("response", "params", "x", "b")); - payload = "{\n" + - "'update-requesthandler' : { 'name' : '/dump', 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" + - "}"; + payload = + "{\n" + + "'update-requesthandler' : { 'name' : '/dump', 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" + + "}"; TestSolrConfigHandler.runConfigCommand(writeHarness, "/config", payload); - TestSolrConfigHandler.testForResponseElement(null, + TestSolrConfigHandler.testForResponseElement( + null, urls.get(random().nextInt(urls.size())), "/config/overlay", cloudClient, @@ -132,16 +137,19 @@ private void testReqParams() throws Exception{ "/dump", TIMEOUT_S); - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/dump?useParams=x", - cloudClient, - asList("params", "a"), - "A val", - TIMEOUT_S); - compareValues(result, "", asList( "params", RequestParams.USEPARAM)); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/dump?useParams=x", + cloudClient, + asList("params", "a"), + "A val", + TIMEOUT_S); + compareValues(result, "", asList("params", RequestParams.USEPARAM)); - TestSolrConfigHandler.testForResponseElement(null, + TestSolrConfigHandler.testForResponseElement( + null, urls.get(random().nextInt(urls.size())), "/dump?useParams=x&a=fomrequest", cloudClient, @@ -149,110 +157,116 @@ private void testReqParams() throws Exception{ "fomrequest", TIMEOUT_S); - payload = "{\n" + - "'create-requesthandler' : { 'name' : '/dump1', 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" + - "}"; - - TestSolrConfigHandler.runConfigCommand(writeHarness,"/config", payload); - - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/config/overlay", - cloudClient, - asList("overlay", "requestHandler", "/dump1", "name"), - "/dump1", - TIMEOUT_S); - - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/dump1", - cloudClient, - asList("params", "a"), - "A val", - TIMEOUT_S); + payload = + "{\n" + + "'create-requesthandler' : { 'name' : '/dump1', 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" + + "}"; + TestSolrConfigHandler.runConfigCommand(writeHarness, "/config", payload); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/overlay", + cloudClient, + asList("overlay", "requestHandler", "/dump1", "name"), + "/dump1", + TIMEOUT_S); + + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/dump1", + cloudClient, + asList("params", "a"), + "A val", + TIMEOUT_S); writeHarness = randomRestTestHarness(); - payload = " {\n" + - " 'set' : {'y':{\n" + - " 'c':'CY val',\n" + - " 'b': 'BY val', " + - " 'i': 20, " + - " 'd': ['val 1', 'val 2']}\n" + - " }\n" + - " }"; - - - TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params", payload); - - result = TestSolrConfigHandler.testForResponseElement( - null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "y", "c"), - "CY val", - TIMEOUT_S); + payload = + " {\n" + + " 'set' : {'y':{\n" + + " 'c':'CY val',\n" + + " 'b': 'BY val', " + + " 'i': 20, " + + " 'd': ['val 1', 'val 2']}\n" + + " }\n" + + " }"; + + TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); + + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "y", "c"), + "CY val", + TIMEOUT_S); compareValues(result, 20l, asList("response", "params", "y", "i")); - - result = TestSolrConfigHandler.testForResponseElement(null, - urls.get(random().nextInt(urls.size())), - "/dump?useParams=y", - cloudClient, - asList("params", "c"), - "CY val", - TIMEOUT_S); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/dump?useParams=y", + cloudClient, + asList("params", "c"), + "CY val", + TIMEOUT_S); compareValues(result, "BY val", asList("params", "b")); compareValues(result, null, asList("params", "a")); - compareValues(result, Arrays.asList("val 1", "val 2") , asList("params", "d")); - compareValues(result, "20" , asList("params", "i")); - payload = " {\n" + - " 'update' : {'y': {\n" + - " 'c':'CY val modified',\n" + - " 'e':'EY val',\n" + - " 'b': 'BY val'" + - "}\n" + - " }\n" + - " }"; - - - TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params", payload); - - result = TestSolrConfigHandler.testForResponseElement( - null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "y", "c"), - "CY val modified", - TIMEOUT_S); + compareValues(result, Arrays.asList("val 1", "val 2"), asList("params", "d")); + compareValues(result, "20", asList("params", "i")); + payload = + " {\n" + + " 'update' : {'y': {\n" + + " 'c':'CY val modified',\n" + + " 'e':'EY val',\n" + + " 'b': 'BY val'" + + "}\n" + + " }\n" + + " }"; + + TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); + + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "y", "c"), + "CY val modified", + TIMEOUT_S); compareValues(result, "EY val", asList("response", "params", "y", "e")); - - payload = " {\n" + - " 'set' : {'y': {\n" + - " 'p':'P val',\n" + - " 'q': 'Q val'" + - "}\n" + - " }\n" + - " }"; - - - TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params", payload); - result = TestSolrConfigHandler.testForResponseElement( - null, - urls.get(random().nextInt(urls.size())), - "/config/params", - cloudClient, - asList("response", "params", "y", "p"), - "P val", - TIMEOUT_S); + payload = + " {\n" + + " 'set' : {'y': {\n" + + " 'p':'P val',\n" + + " 'q': 'Q val'" + + "}\n" + + " }\n" + + " }"; + + TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); + result = + TestSolrConfigHandler.testForResponseElement( + null, + urls.get(random().nextInt(urls.size())), + "/config/params", + cloudClient, + asList("response", "params", "y", "p"), + "P val", + TIMEOUT_S); compareValues(result, null, asList("response", "params", "y", "c")); payload = " {'delete' : 'y'}"; - TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params", payload); + TestSolrConfigHandler.runConfigCommand(writeHarness, "/config/params", payload); TestSolrConfigHandler.testForResponseElement( null, urls.get(random().nextInt(urls.size())), @@ -264,24 +278,24 @@ private void testReqParams() throws Exception{ payload = " {'unset' : 'y'}"; TestSolrConfigHandler.runConfigCommandExpectFailure( - writeHarness,"/config/params", payload, "Unknown operation 'unset'"); + writeHarness, "/config/params", payload, "Unknown operation 'unset'"); // deleting already deleted one should fail // error message should contain parameter set name payload = " {'delete' : 'y'}"; TestSolrConfigHandler.runConfigCommandExpectFailure( - writeHarness,"/config/params", payload, "Could not delete. No such params 'y' exist"); - + writeHarness, "/config/params", payload, "Could not delete. No such params 'y' exist"); } @SuppressWarnings({"unchecked"}) - public static void compareValues(Map result, Object expected, List jsonPath) { + public static void compareValues(Map result, Object expected, List jsonPath) { Object val = Utils.getObjectByPath(result, false, jsonPath); - assertTrue(StrUtils.formatString("Could not get expected value {0} for path {1} full output {2}", expected, jsonPath, result.toString()), - expected instanceof Predicate ? - ((Predicate)expected ).test(val) : - Objects.equals(expected, val) - ); + assertTrue( + StrUtils.formatString( + "Could not get expected value {0} for path {1} full output {2}", + expected, jsonPath, result.toString()), + expected instanceof Predicate + ? ((Predicate) expected).test(val) + : Objects.equals(expected, val)); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java index 3067511fde6..5f9f3a4e82e 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java @@ -16,6 +16,8 @@ */ package org.apache.solr.handler; +import static java.util.Arrays.asList; + import java.io.StringReader; import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; @@ -25,7 +27,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.http.HttpEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.util.EntityUtils; @@ -46,16 +47,16 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Arrays.asList; - - public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Test public void test() throws Exception { - Map editable_prop_map = (Map) Utils.fromJSONResource(getClass().getClassLoader(), "EditableSolrConfigAttributes.json"); + Map editable_prop_map = + (Map) + Utils.fromJSONResource( + getClass().getClassLoader(), "EditableSolrConfigAttributes.json"); Map caches = (Map) editable_prop_map.get("query"); setupRestTestHarnesses(); @@ -67,128 +68,142 @@ public void test() throws Exception { List errs = new ArrayList<>(); collectErrors.add(errs); Map value = (Map) e.getValue(); - Thread t = new Thread(() -> { - try { - invokeBulkCall((String)e.getKey() , errs, value); - } catch (Exception e1) { - e1.printStackTrace(); - } - }); + Thread t = + new Thread( + () -> { + try { + invokeBulkCall((String) e.getKey(), errs, value); + } catch (Exception e1) { + e1.printStackTrace(); + } + }); threads.add(t); t.start(); } } - for (Thread thread : threads) thread.join(); boolean success = true; for (List e : collectErrors) { - if(!e.isEmpty()){ + if (!e.isEmpty()) { success = false; log.error("{}", e); } - } assertTrue(collectErrors.toString(), success); - - } + private void invokeBulkCall( + String cacheName, + List errs, + // TODO this is unused - is that a bug? + Map val) + throws Exception { - private void invokeBulkCall(String cacheName, List errs, - // TODO this is unused - is that a bug? - Map val) throws Exception { - - String payload = "{" + - "'set-property' : {'query.CACHENAME.size':'CACHEVAL1'," + - " 'query.CACHENAME.initialSize':'CACHEVAL2'}," + - "'set-property': {'query.CACHENAME.autowarmCount' : 'CACHEVAL3'}" + - "}"; + String payload = + "{" + + "'set-property' : {'query.CACHENAME.size':'CACHEVAL1'," + + " 'query.CACHENAME.initialSize':'CACHEVAL2'}," + + "'set-property': {'query.CACHENAME.autowarmCount' : 'CACHEVAL3'}" + + "}"; Set errmessages = new HashSet<>(); - for(int i =1;i<2;i++){//make it ahigher number + for (int i = 1; i < 2; i++) { // make it ahigher number RestTestHarness publisher = randomRestTestHarness(r); String response; String val1; String val2; String val3; try { - payload = payload.replaceAll("CACHENAME" , cacheName); + payload = payload.replaceAll("CACHENAME", cacheName); val1 = String.valueOf(10 * i + 1); payload = payload.replace("CACHEVAL1", val1); val2 = String.valueOf(10 * i + 2); payload = payload.replace("CACHEVAL2", val2); val3 = String.valueOf(10 * i + 3); payload = payload.replace("CACHEVAL3", val3); - + response = publisher.post("/config", SolrTestCaseJ4.json(payload)); } finally { publisher.close(); } - + Map map = (Map) Utils.fromJSONString(response); Object errors = map.get("errors"); - if(errors!= null){ + if (errors != null) { errs.add(new String(Utils.toJSON(errors), StandardCharsets.UTF_8)); return; } - DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); + DocCollection coll = + cloudClient.getZkStateReader().getClusterState().getCollection("collection1"); List urls = new ArrayList<>(); for (Slice slice : coll.getSlices()) { for (Replica replica : slice.getReplicas()) - urls.add(""+replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); + urls.add("" + replica.getBaseUrl() + "/" + replica.get(ZkStateReader.CORE_NAME_PROP)); } - - //get another node + // get another node String url = urls.get(urls.size() - 1); long startTime = System.nanoTime(); long maxTimeoutSeconds = 20; - while ( TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutSeconds) { + while (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) + < maxTimeoutSeconds) { Thread.sleep(100); errmessages.clear(); MapWriter respMap = getAsMap(url + "/config/overlay", cloudClient); MapWriter m = (MapWriter) respMap._get("overlay/props", null); - if(m == null) { - errmessages.add(StrUtils.formatString("overlay does not exist for cache: {0} , iteration: {1} response {2} ", cacheName, i, respMap.toString())); + if (m == null) { + errmessages.add( + StrUtils.formatString( + "overlay does not exist for cache: {0} , iteration: {1} response {2} ", + cacheName, i, respMap.toString())); continue; } - Object o = m._get(asList("query", cacheName, "size"), null); - if(!val1.equals(o.toString())) errmessages.add(StrUtils.formatString("'size' property not set, expected = {0}, actual {1}", val1, o)); + if (!val1.equals(o.toString())) + errmessages.add( + StrUtils.formatString( + "'size' property not set, expected = {0}, actual {1}", val1, o)); o = m._get(asList("query", cacheName, "initialSize"), null); - if(!val2.equals(o.toString())) errmessages.add(StrUtils.formatString("'initialSize' property not set, expected = {0}, actual {1}", val2, o)); + if (!val2.equals(o.toString())) + errmessages.add( + StrUtils.formatString( + "'initialSize' property not set, expected = {0}, actual {1}", val2, o)); o = m._get(asList("query", cacheName, "autowarmCount"), null); - if(!val3.equals(o.toString())) errmessages.add(StrUtils.formatString("'autowarmCount' property not set, expected = {0}, actual {1}", val3, o)); - if(errmessages.isEmpty()) break; + if (!val3.equals(o.toString())) + errmessages.add( + StrUtils.formatString( + "'autowarmCount' property not set, expected = {0}, actual {1}", val3, o)); + if (errmessages.isEmpty()) break; } - if(!errmessages.isEmpty()) { + if (!errmessages.isEmpty()) { errs.addAll(errmessages); return; } } - } @SuppressWarnings({"rawtypes"}) - public static LinkedHashMapWriter getAsMap(String uri, CloudSolrClient cloudClient) throws Exception { - HttpGet get = new HttpGet(uri) ; + public static LinkedHashMapWriter getAsMap(String uri, CloudSolrClient cloudClient) + throws Exception { + HttpGet get = new HttpGet(uri); HttpEntity entity = null; try { entity = cloudClient.getLbClient().getHttpClient().execute(get).getEntity(); String response = EntityUtils.toString(entity, StandardCharsets.UTF_8); try { - return (LinkedHashMapWriter) Utils.MAPWRITEROBJBUILDER.apply(new JSONParser(new StringReader(response))).getVal(); + return (LinkedHashMapWriter) + Utils.MAPWRITEROBJBUILDER.apply(new JSONParser(new StringReader(response))).getVal(); } catch (JSONParser.ParseException e) { - log.error(response,e); + log.error(response, e); throw e; } } finally { diff --git a/solr/core/src/test/org/apache/solr/handler/TestStressIncrementalBackup.java b/solr/core/src/test/org/apache/solr/handler/TestStressIncrementalBackup.java index 4c4bdf417d8..39c2730a054 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestStressIncrementalBackup.java +++ b/solr/core/src/test/org/apache/solr/handler/TestStressIncrementalBackup.java @@ -17,6 +17,12 @@ package org.apache.solr.handler; +import static org.apache.solr.handler.TestStressThreadBackup.makeDoc; + +import java.lang.invoke.MethodHandles; +import java.nio.file.Path; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -32,149 +38,159 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.nio.file.Path; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -import static org.apache.solr.handler.TestStressThreadBackup.makeDoc; - -//@LuceneTestCase.Nightly +// @LuceneTestCase.Nightly @LuceneTestCase.SuppressCodecs({"SimpleText"}) public class TestStressIncrementalBackup extends SolrCloudTestCase { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private Path backupPath; - private SolrClient adminClient; - private SolrClient coreClient; - - @Before - public void beforeTest() throws Exception { - backupPath = createTempDir(getTestClass().getSimpleName() + "_backups"); - System.setProperty("solr.allowPaths", backupPath.toString()); - - // NOTE: we don't actually care about using SolrCloud, but we want to use SolrClient and I can't - // bring myself to deal with the nonsense that is SolrJettyTestBase. - - // We do however explicitly want a fresh "cluster" every time a test is run - configureCluster(1) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .configure(); - - assertEquals(0, (CollectionAdminRequest.createCollection(DEFAULT_TEST_COLLECTION_NAME, "conf1", 1, 1) - .process(cluster.getSolrClient()).getStatus())); - adminClient = getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); - initCoreNameAndSolrCoreClient(); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private Path backupPath; + private SolrClient adminClient; + private SolrClient coreClient; + + @Before + public void beforeTest() throws Exception { + backupPath = createTempDir(getTestClass().getSimpleName() + "_backups"); + System.setProperty("solr.allowPaths", backupPath.toString()); + + // NOTE: we don't actually care about using SolrCloud, but we want to use SolrClient and I can't + // bring myself to deal with the nonsense that is SolrJettyTestBase. + + // We do however explicitly want a fresh "cluster" every time a test is run + configureCluster(1) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .configure(); + + assertEquals( + 0, + (CollectionAdminRequest.createCollection(DEFAULT_TEST_COLLECTION_NAME, "conf1", 1, 1) + .process(cluster.getSolrClient()) + .getStatus())); + adminClient = getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); + initCoreNameAndSolrCoreClient(); + } + + private void initCoreNameAndSolrCoreClient() { + // Sigh. + Replica r = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_TEST_COLLECTION_NAME) + .getActiveSlices() + .iterator() + .next() + .getReplicas() + .iterator() + .next(); + coreName = r.getCoreName(); + coreClient = getHttpSolrClient(r.getCoreUrl()); + } + + @After + public void afterTest() throws Exception { + // we use a clean cluster instance for every test, so we need to clean it up + shutdownCluster(); + + if (null != adminClient) { + adminClient.close(); } - - private void initCoreNameAndSolrCoreClient() { - // Sigh. - Replica r = cluster.getSolrClient().getZkStateReader().getClusterState() - .getCollection(DEFAULT_TEST_COLLECTION_NAME).getActiveSlices().iterator().next() - .getReplicas().iterator().next(); - coreName = r.getCoreName(); - coreClient = getHttpSolrClient(r.getCoreUrl()); + if (null != coreClient) { + coreClient.close(); } - @After - public void afterTest() throws Exception { - // we use a clean cluster instance for every test, so we need to clean it up - shutdownCluster(); + System.clearProperty("solr.allowPaths"); + } - if (null != adminClient) { - adminClient.close(); - } - if (null != coreClient) { - coreClient.close(); - } + @SuppressWarnings("AssertionFailureIgnored") // failure happens inside of a thread + public void testCoreAdminHandler() throws Exception { + final int numBackupIters = 20; // don't use 'atLeast', we don't want to blow up on nightly - System.clearProperty("solr.allowPaths"); - } + final AtomicReference heavyCommitFailure = new AtomicReference<>(); + final AtomicBoolean keepGoing = new AtomicBoolean(true); - @SuppressWarnings("AssertionFailureIgnored") // failure happens inside of a thread - public void testCoreAdminHandler() throws Exception { - final int numBackupIters = 20; // don't use 'atLeast', we don't want to blow up on nightly - - final AtomicReference heavyCommitFailure = new AtomicReference<>(); - final AtomicBoolean keepGoing = new AtomicBoolean(true); - - // this thread will do nothing but add/commit new 'dummy' docs over and over again as fast as possible - // to create a lot of index churn w/ segment merging - final Thread heavyCommitting = new Thread() { - public void run() { - try { - int docIdCounter = 0; - while (keepGoing.get()) { - docIdCounter++; - - final UpdateRequest req = new UpdateRequest().add(makeDoc("dummy_" + docIdCounter, "dummy")); - // always commit to force lots of new segments - req.setParam(UpdateParams.COMMIT,"true"); - req.setParam(UpdateParams.OPEN_SEARCHER,"false"); // we don't care about searching - - // frequently forceMerge to ensure segments are frequently deleted - if (0 == (docIdCounter % 13)) { // arbitrary - req.setParam(UpdateParams.OPTIMIZE, "true"); - req.setParam(UpdateParams.MAX_OPTIMIZE_SEGMENTS, "5"); // arbitrary - } - - log.info("Heavy Committing #{}: {}", docIdCounter, req); - final UpdateResponse rsp = req.process(coreClient); - assertEquals("Dummy Doc#" + docIdCounter + " add status: " + rsp.toString(), 0, rsp.getStatus()); - - } - } catch (Throwable t) { - heavyCommitFailure.set(t); + // this thread will do nothing but add/commit new 'dummy' docs over and over again as fast as + // possible to create a lot of index churn w/ segment merging + final Thread heavyCommitting = + new Thread() { + public void run() { + try { + int docIdCounter = 0; + while (keepGoing.get()) { + docIdCounter++; + + final UpdateRequest req = + new UpdateRequest().add(makeDoc("dummy_" + docIdCounter, "dummy")); + // always commit to force lots of new segments + req.setParam(UpdateParams.COMMIT, "true"); + req.setParam(UpdateParams.OPEN_SEARCHER, "false"); // we don't care about searching + + // frequently forceMerge to ensure segments are frequently deleted + if (0 == (docIdCounter % 13)) { // arbitrary + req.setParam(UpdateParams.OPTIMIZE, "true"); + req.setParam(UpdateParams.MAX_OPTIMIZE_SEGMENTS, "5"); // arbitrary } + + log.info("Heavy Committing #{}: {}", docIdCounter, req); + final UpdateResponse rsp = req.process(coreClient); + assertEquals( + "Dummy Doc#" + docIdCounter + " add status: " + rsp.toString(), + 0, + rsp.getStatus()); + } + } catch (Throwable t) { + heavyCommitFailure.set(t); } + } }; - heavyCommitting.start(); - try { - // now have the "main" test thread try to take a serious of backups/snapshots - // while adding other "real" docs + heavyCommitting.start(); + try { + // now have the "main" test thread try to take a serious of backups/snapshots + // while adding other "real" docs - // NOTE #1: start at i=1 for 'id' & doc counting purposes... - // NOTE #2: abort quickly if the oher thread reports a heavyCommitFailure... - for (int i = 1; (i <= numBackupIters && null == heavyCommitFailure.get()); i++) { + // NOTE #1: start at i=1 for 'id' & doc counting purposes... + // NOTE #2: abort quickly if the oher thread reports a heavyCommitFailure... + for (int i = 1; (i <= numBackupIters && null == heavyCommitFailure.get()); i++) { - // in each iteration '#i', the commit we create should have exactly 'i' documents in - // it with the term 'type_s:real' (regardless of what the other thread does with dummy docs) + // in each iteration '#i', the commit we create should have exactly 'i' documents in + // it with the term 'type_s:real' (regardless of what the other thread does with dummy docs) - // add & commit a doc #i - final UpdateRequest req = new UpdateRequest().add(makeDoc("doc_" + i, "real")); - req.setParam(UpdateParams.COMMIT,"true"); // make immediately available for backup - req.setParam(UpdateParams.OPEN_SEARCHER,"false"); // we don't care about searching + // add & commit a doc #i + final UpdateRequest req = new UpdateRequest().add(makeDoc("doc_" + i, "real")); + req.setParam(UpdateParams.COMMIT, "true"); // make immediately available for backup + req.setParam(UpdateParams.OPEN_SEARCHER, "false"); // we don't care about searching - final UpdateResponse rsp = req.process(coreClient); - assertEquals("Real Doc#" + i + " add status: " + rsp.toString(), 0, rsp.getStatus()); + final UpdateResponse rsp = req.process(coreClient); + assertEquals("Real Doc#" + i + " add status: " + rsp.toString(), 0, rsp.getStatus()); - makeBackup(); - } + makeBackup(); + } - } finally { - keepGoing.set(false); - heavyCommitting.join(); - } - assertNull(heavyCommitFailure.get()); + } finally { + keepGoing.set(false); + heavyCommitting.join(); } - - public void makeBackup() throws Exception { - CollectionAdminRequest.Backup backup = CollectionAdminRequest.backupCollection(DEFAULT_TEST_COLLECTION_NAME, "stressBackup") - .setLocation(backupPath.toString()) - .setIncremental(true) - .setMaxNumberBackupPoints(5); - if (random().nextBoolean()) { - try { - RequestStatusState state = backup.processAndWait(cluster.getSolrClient(), 1000); - assertEquals(RequestStatusState.COMPLETED, state); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } else { - CollectionAdminResponse rsp = backup.process(cluster.getSolrClient()); - assertEquals(0, rsp.getStatus()); - } + assertNull(heavyCommitFailure.get()); + } + + public void makeBackup() throws Exception { + CollectionAdminRequest.Backup backup = + CollectionAdminRequest.backupCollection(DEFAULT_TEST_COLLECTION_NAME, "stressBackup") + .setLocation(backupPath.toString()) + .setIncremental(true) + .setMaxNumberBackupPoints(5); + if (random().nextBoolean()) { + try { + RequestStatusState state = backup.processAndWait(cluster.getSolrClient(), 1000); + assertEquals(RequestStatusState.COMPLETED, state); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } else { + CollectionAdminResponse rsp = backup.process(cluster.getSolrClient()); + assertEquals(0, rsp.getStatus()); } - + } } diff --git a/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java b/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java index 93eb6cbf430..59090847123 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java +++ b/solr/core/src/test/org/apache/solr/handler/TestStressThreadBackup.java @@ -16,6 +16,19 @@ */ package org.apache.solr.handler; +import java.io.File; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; @@ -46,22 +59,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.Arrays; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Queue; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - @Nightly -@SuppressCodecs({"SimpleText"}) // Backups do checksum validation against a footer value not present in 'SimpleText' +// Backups do checksum validation against a footer value not present in 'SimpleText' +@SuppressCodecs({"SimpleText"}) public class TestStressThreadBackup extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -87,15 +87,19 @@ public void beforeTest() throws Exception { // NOTE: we don't actually care about using SolrCloud, but we want to use SolrClient and I can't // bring myself to deal with the nonsense that is SolrJettyTestBase. - + // We do however explicitly want a fresh "cluster" every time a test is run configureCluster(1) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); - assertEquals(0, (CollectionAdminRequest.createCollection(DEFAULT_TEST_COLLECTION_NAME, "conf1", 1, 1) - .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .process(cluster.getSolrClient()).getStatus())); + assertEquals( + 0, + (CollectionAdminRequest.createCollection(DEFAULT_TEST_COLLECTION_NAME, "conf1", 1, 1) + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) + .process(cluster.getSolrClient()) + .getStatus())); adminClient = getHttpSolrClient(cluster.getJettySolrRunners().get(0).getBaseUrl().toString()); initCoreNameAndSolrCoreClient(); } @@ -104,7 +108,7 @@ public void beforeTest() throws Exception { public void afterTest() throws Exception { // we use a clean cluster instance for every test, so we need to clean it up shutdownCluster(); - + if (null != adminClient) { adminClient.close(); } @@ -123,95 +127,105 @@ public void testCoreAdminHandler() throws Exception { public void testReplicationHandler() throws Exception { // Create a custom BackupAPIImpl which uses ReplicatoinHandler for the backups // but still defaults to CoreAdmin for making named snapshots (since that's what's documented) - testSnapshotsAndBackupsDuringConcurrentCommitsAndOptimizes(new BackupAPIImpl() { - final BackupStatusChecker backupStatus = new BackupStatusChecker(coreClient); - /** no solrj API for ReplicationHandler */ - private GenericSolrRequest makeReplicationReq(SolrParams p) { - return new GenericSolrRequest(GenericSolrRequest.METHOD.GET, "/replication", p); - } - - /** - * Override default backup impl to hit ReplicationHandler, - * and then poll that same handler until success - */ - public void makeBackup(final String backupName, final String snapName) throws Exception { - final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); - ModifiableSolrParams p = params("command", "backup", - "name", backupName, - CoreAdminParams.BACKUP_LOCATION, backupDir.getAbsolutePath()); - if (null != snapName) { - p.add(CoreAdminParams.COMMIT_NAME, snapName); - } - makeReplicationReq(p).process(coreClient); - backupStatus.waitForBackupSuccess(backupName, timeout); - } - }); - + testSnapshotsAndBackupsDuringConcurrentCommitsAndOptimizes( + new BackupAPIImpl() { + final BackupStatusChecker backupStatus = new BackupStatusChecker(coreClient); + /** no solrj API for ReplicationHandler */ + private GenericSolrRequest makeReplicationReq(SolrParams p) { + return new GenericSolrRequest(GenericSolrRequest.METHOD.GET, "/replication", p); + } + + /** + * Override default backup impl to hit ReplicationHandler, and then poll that same handler + * until success + */ + public void makeBackup(final String backupName, final String snapName) throws Exception { + final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); + ModifiableSolrParams p = + params( + "command", + "backup", + "name", + backupName, + CoreAdminParams.BACKUP_LOCATION, + backupDir.getAbsolutePath()); + if (null != snapName) { + p.add(CoreAdminParams.COMMIT_NAME, snapName); + } + makeReplicationReq(p).process(coreClient); + backupStatus.waitForBackupSuccess(backupName, timeout); + } + }); } @SuppressWarnings("AssertionFailureIgnored") // failure happens inside of a thread - public void testSnapshotsAndBackupsDuringConcurrentCommitsAndOptimizes(final BackupAPIImpl impl) throws Exception { + public void testSnapshotsAndBackupsDuringConcurrentCommitsAndOptimizes(final BackupAPIImpl impl) + throws Exception { final int numBackupIters = 20; // don't use 'atLeast', we don't want to blow up on nightly - + final AtomicReference heavyCommitFailure = new AtomicReference<>(); final AtomicBoolean keepGoing = new AtomicBoolean(true); - // this thread will do nothing but add/commit new 'dummy' docs over and over again as fast as possible - // to create a lot of index churn w/ segment merging - final Thread heavyCommitting = new Thread() { - public void run() { - try { - int docIdCounter = 0; - while (keepGoing.get()) { - docIdCounter++; - - final UpdateRequest req = new UpdateRequest().add(makeDoc("dummy_" + docIdCounter, "dummy")); - // always commit to force lots of new segments - req.setParam(UpdateParams.COMMIT,"true"); - req.setParam(UpdateParams.OPEN_SEARCHER,"false"); // we don't care about searching - - // frequently forceMerge to ensure segments are frequently deleted - if (0 == (docIdCounter % 13)) { // arbitrary - req.setParam(UpdateParams.OPTIMIZE, "true"); - req.setParam(UpdateParams.MAX_OPTIMIZE_SEGMENTS, "5"); // arbitrary + // this thread will do nothing but add/commit new 'dummy' docs over and over again as fast as + // possible to create a lot of index churn w/ segment merging + final Thread heavyCommitting = + new Thread() { + public void run() { + try { + int docIdCounter = 0; + while (keepGoing.get()) { + docIdCounter++; + + final UpdateRequest req = + new UpdateRequest().add(makeDoc("dummy_" + docIdCounter, "dummy")); + // always commit to force lots of new segments + req.setParam(UpdateParams.COMMIT, "true"); + req.setParam(UpdateParams.OPEN_SEARCHER, "false"); // we don't care about searching + + // frequently forceMerge to ensure segments are frequently deleted + if (0 == (docIdCounter % 13)) { // arbitrary + req.setParam(UpdateParams.OPTIMIZE, "true"); + req.setParam(UpdateParams.MAX_OPTIMIZE_SEGMENTS, "5"); // arbitrary + } + + log.info("Heavy Committing #{}: {}", docIdCounter, req); + final UpdateResponse rsp = req.process(coreClient); + assertEquals( + "Dummy Doc#" + docIdCounter + " add status: " + rsp.toString(), + 0, + rsp.getStatus()); + } + } catch (Throwable t) { + heavyCommitFailure.set(t); } - - log.info("Heavy Committing #{}: {}", docIdCounter, req); - final UpdateResponse rsp = req.process(coreClient); - assertEquals("Dummy Doc#" + docIdCounter + " add status: " + rsp.toString(), 0, rsp.getStatus()); - } - } catch (Throwable t) { - heavyCommitFailure.set(t); - } - } - }; - + }; + heavyCommitting.start(); try { // now have the "main" test thread try to take a serious of backups/snapshots // while adding other "real" docs - + final Queue namedSnapshots = new LinkedList<>(); // NOTE #1: start at i=1 for 'id' & doc counting purposes... // NOTE #2: abort quickly if the oher thread reports a heavyCommitFailure... for (int i = 1; (i <= numBackupIters && null == heavyCommitFailure.get()); i++) { - + // in each iteration '#i', the commit we create should have exactly 'i' documents in // it with the term 'type_s:real' (regardless of what the other thread does with dummy docs) - + // add & commit a doc #i final UpdateRequest req = new UpdateRequest().add(makeDoc("doc_" + i, "real")); - req.setParam(UpdateParams.COMMIT,"true"); // make immediately available for backup - req.setParam(UpdateParams.OPEN_SEARCHER,"false"); // we don't care about searching - + req.setParam(UpdateParams.COMMIT, "true"); // make immediately available for backup + req.setParam(UpdateParams.OPEN_SEARCHER, "false"); // we don't care about searching + final UpdateResponse rsp = req.process(coreClient); assertEquals("Real Doc#" + i + " add status: " + rsp.toString(), 0, rsp.getStatus()); // create a backup of the 'current' index impl.makeBackup("backup_currentAt_" + i); - + // verify backup is valid and has the number of 'real' docs we expect... validateBackup("backup_currentAt_" + i); @@ -219,7 +233,7 @@ public void run() { // NOTE: we don't want to do this too often, or the SnapShotMetadataManager will protect // too many segment files "long term". It's more important to stress the thread contention // between backups calling save/release vs the DelPolicy trying to delete segments - if ( 0 == random().nextInt(7 + namedSnapshots.size()) ) { + if (0 == random().nextInt(7 + namedSnapshots.size())) { final String snapshotName = "snapshot_" + i; log.info("Creating snapshot: {}", snapshotName); impl.makeSnapshot(snapshotName); @@ -229,8 +243,8 @@ public void run() { // occasionally make a backup of a snapshot and remove it // the odds of doing this increase based on how many snapshots currently exist, // and how few iterations we have left - if (3 < namedSnapshots.size() && - random().nextInt(3 + numBackupIters - i) < random().nextInt(namedSnapshots.size())) { + if (3 < namedSnapshots.size() + && random().nextInt(3 + numBackupIters - i) < random().nextInt(namedSnapshots.size())) { assert 0 < namedSnapshots.size() : "Someone broke the conditionl"; final String snapshotName = namedSnapshots.poll(); @@ -250,16 +264,17 @@ public void run() { } } - + } finally { keepGoing.set(false); heavyCommitting.join(); } assertNull(heavyCommitFailure.get()); - { log.info("Done with (concurrent) updates, Deleting all docs..."); + { + log.info("Done with (concurrent) updates, Deleting all docs..."); final UpdateRequest delAll = new UpdateRequest().deleteByQuery("*:*"); - delAll.setParam(UpdateParams.COMMIT,"true"); + delAll.setParam(UpdateParams.COMMIT, "true"); delAll.setParam(UpdateParams.OPTIMIZE, "true"); delAll.setParam(UpdateParams.MAX_OPTIMIZE_SEGMENTS, "1"); // purge as many files as possible final UpdateResponse delRsp = delAll.process(coreClient); @@ -268,11 +283,12 @@ public void run() { { // Validate some backups at random... final int numBackupsToCheck = atLeast(1); - log.info("Validating {} random backups to ensure they are un-affected by deleting all docs...", - numBackupsToCheck); + log.info( + "Validating {} random backups to ensure they are un-affected by deleting all docs...", + numBackupsToCheck); final List allBackups = Arrays.asList(backupDir.listFiles()); // insure consistent (arbitrary) ordering before shuffling - Collections.sort(allBackups); + Collections.sort(allBackups); Collections.shuffle(allBackups, random()); for (int i = 0; i < numBackupsToCheck; i++) { final File backup = allBackups.get(i); @@ -280,9 +296,10 @@ public void run() { } } } - + /** - * Given a backup name, extrats the numberic suffix identifying how many "real" docs should be in it + * Given a backup name, extrats the numberic suffix identifying how many "real" docs should be in + * it * * @see #ENDS_WITH_INT_DIGITS */ @@ -291,22 +308,22 @@ private static int getNumRealDocsFromBackupName(final String backupName) { assertTrue("Backup name does not end with int digits: " + backupName, m.find()); return Integer.parseInt(m.group()); } - - /** - * Validates a backup exists, passes check index, and contains a number of "real" documents - * that match it's name - * + + /** + * Validates a backup exists, passes check index, and contains a number of "real" documents that + * match it's name + * * @see #validateBackup(File) */ private void validateBackup(final String backupName) throws IOException { final File backup = new File(backupDir, "snapshot." + backupName); validateBackup(backup); } - - /** + + /** * Validates a backup dir exists, passes check index, and contains a number of "real" documents * that match it's name - * + * * @see #getNumRealDocsFromBackupName */ private void validateBackup(final File backup) throws IOException { @@ -315,19 +332,21 @@ private void validateBackup(final File backup) throws IOException { final Matcher m = ENDS_WITH_INT_DIGITS.matcher(backup.getName()); assertTrue("Backup dir name does not end with int digits: " + backup.toString(), m.find()); final int numRealDocsExpected = Integer.parseInt(m.group()); - + try (Directory dir = FSDirectory.open(backup.toPath())) { TestUtil.checkIndex(dir, true, true, true, null); try (DirectoryReader r = DirectoryReader.open(dir)) { - assertEquals("num real docs in " + backup.toString(), - numRealDocsExpected, r.docFreq(new Term("type_s","real"))); + assertEquals( + "num real docs in " + backup.toString(), + numRealDocsExpected, + r.docFreq(new Term("type_s", "real"))); } } } - - - /** - * Creates a "large" document with lots of fields (to stimulate lots of files in each segment) + + /** + * Creates a "large" document with lots of fields (to stimulate lots of files in each segment) + * * @param id the uniqueKey * @param type the type of the doc for use in the 'type_s' field (for term counting later) */ @@ -341,57 +360,82 @@ static SolrInputDocument makeDoc(String id, String type) { private void initCoreNameAndSolrCoreClient() { // Sigh. - Replica r = cluster.getSolrClient().getZkStateReader().getClusterState() - .getCollection(DEFAULT_TEST_COLLECTION_NAME).getActiveSlices().iterator().next() - .getReplicas().iterator().next(); + Replica r = + cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(DEFAULT_TEST_COLLECTION_NAME) + .getActiveSlices() + .iterator() + .next() + .getReplicas() + .iterator() + .next(); coreName = r.getCoreName(); coreClient = getHttpSolrClient(r.getCoreUrl()); } - /** - * API for taking backups and snapshots that can hide the impl quirks of - * using ReplicationHandler vs CoreAdminHandler (the default) + /** + * API for taking backups and snapshots that can hide the impl quirks of using ReplicationHandler + * vs CoreAdminHandler (the default) */ private class BackupAPIImpl { /** TODO: SOLR-9239, no solrj API for CoreAdmin Backups */ protected GenericSolrRequest makeCoreAdmin(CoreAdminAction action, SolrParams p) { - return new GenericSolrRequest(GenericSolrRequest.METHOD.POST, "/admin/cores", - SolrParams.wrapDefaults(params(CoreAdminParams.ACTION, action.toString()), p)); + return new GenericSolrRequest( + GenericSolrRequest.METHOD.POST, + "/admin/cores", + SolrParams.wrapDefaults(params(CoreAdminParams.ACTION, action.toString()), p)); } - /** Make a backup or the named commit snapshot (or null for latest), and only return if successful */ + /** + * Make a backup or the named commit snapshot (or null for latest), and only return if + * successful + */ public void makeBackup(final String backupName) throws Exception { makeBackup(backupName, null); } /** Make a backup or latest commit, and only return if successful */ public void makeBackup(final String backupName, final String snapName) throws Exception { - ModifiableSolrParams p = params(CoreAdminParams.CORE, coreName, - CoreAdminParams.NAME, backupName, - CoreAdminParams.BACKUP_LOCATION, backupDir.getAbsolutePath(), - CoreAdminParams.BACKUP_INCREMENTAL, "false"); + ModifiableSolrParams p = + params( + CoreAdminParams.CORE, + coreName, + CoreAdminParams.NAME, + backupName, + CoreAdminParams.BACKUP_LOCATION, + backupDir.getAbsolutePath(), + CoreAdminParams.BACKUP_INCREMENTAL, + "false"); if (null != snapName) { p.add(CoreAdminParams.COMMIT_NAME, snapName); } makeCoreAdmin(CoreAdminAction.BACKUPCORE, p).process(adminClient); // CoreAdmin BACKUPCORE is synchronous by default, no need to wait for anything. } - + /** Make a named snapshot, and only return if successful */ public void makeSnapshot(final String snapName) throws Exception { - makeCoreAdmin(CoreAdminAction.CREATESNAPSHOT, - params(CoreAdminParams.CORE, coreName, - CoreAdminParams.COMMIT_NAME, snapName)).process(adminClient); + makeCoreAdmin( + CoreAdminAction.CREATESNAPSHOT, + params( + CoreAdminParams.CORE, coreName, + CoreAdminParams.COMMIT_NAME, snapName)) + .process(adminClient); // CoreAdmin CREATESNAPSHOT is synchronous by default, no need to wait for anything. } - + /** Delete a named snapshot, and only return if successful */ public void deleteSnapshot(final String snapName) throws Exception { - makeCoreAdmin(CoreAdminAction.DELETESNAPSHOT, - params(CoreAdminParams.CORE, coreName, - CoreAdminParams.COMMIT_NAME, snapName)).process(adminClient); + makeCoreAdmin( + CoreAdminAction.DELETESNAPSHOT, + params( + CoreAdminParams.CORE, coreName, + CoreAdminParams.COMMIT_NAME, snapName)) + .process(adminClient); // CoreAdmin DELETESNAPSHOT is synchronous by default, no need to wait for anything. } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/TestSystemCollAutoCreate.java b/solr/core/src/test/org/apache/solr/handler/TestSystemCollAutoCreate.java index 94846d8668c..44c8c974fcd 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSystemCollAutoCreate.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSystemCollAutoCreate.java @@ -17,14 +17,14 @@ package org.apache.solr.handler; - import org.apache.solr.cloud.AbstractFullDistribZkTestBase; import org.apache.solr.common.cloud.DocCollection; public class TestSystemCollAutoCreate extends AbstractFullDistribZkTestBase { - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void testAutoCreate() throws Exception { - TestBlobHandler.checkBlobPost(cloudJettys.get(0).jetty.getBaseUrl().toExternalForm(), cloudClient); - DocCollection sysColl = cloudClient.getZkStateReader().getClusterState().getCollection(".system"); + TestBlobHandler.checkBlobPost( + cloudJettys.get(0).jetty.getBaseUrl().toExternalForm(), cloudClient); + DocCollection sysColl = + cloudClient.getZkStateReader().getClusterState().getCollection(".system"); } } diff --git a/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java b/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java index 0ac4bf2fa48..6d07e9867a5 100644 --- a/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java @@ -17,20 +17,15 @@ package org.apache.solr.handler; import java.io.IOException; - import org.apache.solr.common.util.NamedList; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.security.AuthorizationContext; -/** - * throws a {@link java.lang.Error} on init for testing purposes - */ -public class ThrowErrorOnInitRequestHandler extends RequestHandlerBase -{ +/** throws a {@link java.lang.Error} on init for testing purposes */ +public class ThrowErrorOnInitRequestHandler extends RequestHandlerBase { @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException - { + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { /* NOOP */ } diff --git a/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java b/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java index 1be56ee2a3a..5336b2ed92d 100644 --- a/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/handler/V2ApiIntegrationTest.java @@ -17,6 +17,11 @@ package org.apache.solr.handler; +import java.io.IOException; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.solr.client.solrj.ResponseParser; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -33,12 +38,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.IOException; -import java.nio.file.Paths; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - public class V2ApiIntegrationTest extends SolrCloudTestCase { private static String COLL_NAME = "collection1"; @@ -46,7 +45,8 @@ public class V2ApiIntegrationTest extends SolrCloudTestCase { public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) .configure(); CollectionAdminRequest.createCollection(COLL_NAME, "conf1", 1, 2) .process(cluster.getSolrClient()); @@ -62,14 +62,19 @@ public void testWelcomeMessage() throws Exception { assertEquals(0, res.getStatus()); } - private void testException(ResponseParser responseParser, int expectedCode, String path, String payload) throws IOException, SolrServerException { - V2Request v2Request = new V2Request.Builder(path) - .withMethod(SolrRequest.METHOD.POST) - .withPayload(payload) - .build(); + private void testException( + ResponseParser responseParser, int expectedCode, String path, String payload) + throws IOException, SolrServerException { + V2Request v2Request = + new V2Request.Builder(path) + .withMethod(SolrRequest.METHOD.POST) + .withPayload(payload) + .build(); v2Request.setResponseParser(responseParser); - BaseHttpSolrClient.RemoteSolrException ex = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, - () -> v2Request.process(cluster.getSolrClient())); + BaseHttpSolrClient.RemoteSolrException ex = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> v2Request.process(cluster.getSolrClient())); assertEquals(expectedCode, ex.code()); } @@ -77,15 +82,17 @@ private void testException(ResponseParser responseParser, int expectedCode, Stri public void testException() throws Exception { String notFoundPath = "/c/" + COLL_NAME + "/abccdef"; String incorrectPayload = "{rebalance-leaders: {maxAtOnce: abc, maxWaitSeconds: xyz}}"; - testException(new XMLResponseParser(),404, - notFoundPath, incorrectPayload); - testException(new DelegationTokenResponse.JsonMapResponseParser(),404, - notFoundPath, incorrectPayload); - testException(new BinaryResponseParser(),404, - notFoundPath, incorrectPayload); + testException(new XMLResponseParser(), 404, notFoundPath, incorrectPayload); + testException( + new DelegationTokenResponse.JsonMapResponseParser(), 404, notFoundPath, incorrectPayload); + testException(new BinaryResponseParser(), 404, notFoundPath, incorrectPayload); testException(new XMLResponseParser(), 400, "/c/" + COLL_NAME, incorrectPayload); testException(new BinaryResponseParser(), 400, "/c/" + COLL_NAME, incorrectPayload); - testException(new DelegationTokenResponse.JsonMapResponseParser(), 400, "/c/" + COLL_NAME, incorrectPayload); + testException( + new DelegationTokenResponse.JsonMapResponseParser(), + 400, + "/c/" + COLL_NAME, + incorrectPayload); } private long getStatus(V2Response response) { @@ -100,12 +107,14 @@ private long getStatus(V2Response response) { @Test public void testIntrospect() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("command","XXXX"); + params.set("command", "XXXX"); params.set("method", "POST"); - Map result = resAsMap(cluster.getSolrClient(), - new V2Request.Builder("/c/"+COLL_NAME+"/_introspect") - .withParams(params).build()); - assertEquals("Command not found!", Utils.getObjectByPath(result, false, "/spec[0]/commands/XXXX")); + Map result = + resAsMap( + cluster.getSolrClient(), + new V2Request.Builder("/c/" + COLL_NAME + "/_introspect").withParams(params).build()); + assertEquals( + "Command not found!", Utils.getObjectByPath(result, false, "/spec[0]/commands/XXXX")); } @Test @@ -120,7 +129,9 @@ public void testWTParam() throws Exception { assertFalse(respString.contains("

HTTP ERROR 500

")); assertFalse(respString.contains("500")); assertFalse(respString.contains("NullPointerException")); - assertFalse(respString.contains("

Problem accessing /solr/____v2/c/collection1/get/_introspect. Reason:")); + assertFalse( + respString.contains( + "

Problem accessing /solr/____v2/c/collection1/get/_introspect. Reason:")); // since no-op response writer is used, doing contains match assertTrue(respString.contains("/c/collection1/get")); @@ -130,26 +141,42 @@ public void testWTParam() throws Exception { respString = resp.toString(); assertFalse(respString.contains("

HTTP ERROR 500

")); - assertFalse(respString.contains("

Problem accessing /solr/____v2/c/collection1/get/_introspect. Reason:")); + assertFalse( + respString.contains( + "

Problem accessing /solr/____v2/c/collection1/get/_introspect. Reason:")); assertEquals("/c/collection1/get", Utils.getObjectByPath(resp, true, "/spec[0]/url/paths[0]")); assertEquals(respString, 0, Utils.getObjectByPath(resp, true, "/responseHeader/status")); } @Test public void testSingleWarning() throws Exception { - NamedList resp = cluster.getSolrClient().request( - new V2Request.Builder("/c/"+COLL_NAME+"/_introspect").build()); + NamedList resp = + cluster + .getSolrClient() + .request(new V2Request.Builder("/c/" + COLL_NAME + "/_introspect").build()); List warnings = resp.getAll("WARNING"); assertEquals(1, warnings.size()); } @Test public void testSetPropertyValidationOfCluster() throws IOException, SolrServerException { - NamedList resp = cluster.getSolrClient().request( - new V2Request.Builder("/cluster").withMethod(SolrRequest.METHOD.POST).withPayload("{set-property: {name: maxCoresPerNode, val:42}}").build()); + NamedList resp = + cluster + .getSolrClient() + .request( + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload("{set-property: {name: maxCoresPerNode, val:42}}") + .build()); assertTrue(resp.toString().contains("status=0")); - resp = cluster.getSolrClient().request( - new V2Request.Builder("/cluster").withMethod(SolrRequest.METHOD.POST).withPayload("{set-property: {name: maxCoresPerNode, val:null}}").build()); + resp = + cluster + .getSolrClient() + .request( + new V2Request.Builder("/cluster") + .withMethod(SolrRequest.METHOD.POST) + .withPayload("{set-property: {name: maxCoresPerNode, val:null}}") + .build()); assertTrue(resp.toString().contains("status=0")); } @@ -159,9 +186,15 @@ public void testCollectionsApi() throws Exception { V2Request req1 = new V2Request.Builder("/c/" + COLL_NAME + "/get/_introspect").build(); assertEquals(COLL_NAME, req1.getCollection()); Map result = resAsMap(client, req1); - assertEquals("/c/collection1/get", Utils.getObjectByPath(result, true, "/spec[0]/url/paths[0]")); - result = resAsMap(client, new V2Request.Builder("/collections/"+COLL_NAME+"/get/_introspect").build()); - assertEquals("/collections/collection1/get", Utils.getObjectByPath(result, true, "/spec[0]/url/paths[0]")); + assertEquals( + "/c/collection1/get", Utils.getObjectByPath(result, true, "/spec[0]/url/paths[0]")); + result = + resAsMap( + client, + new V2Request.Builder("/collections/" + COLL_NAME + "/get/_introspect").build()); + assertEquals( + "/collections/collection1/get", + Utils.getObjectByPath(result, true, "/spec[0]/url/paths[0]")); String tempDir = createTempDir().toFile().getPath(); Map backupPayload = new HashMap<>(); Map backupParams = new HashMap<>(); @@ -169,25 +202,30 @@ public void testCollectionsApi() throws Exception { backupParams.put("name", "backup_test"); backupParams.put("collection", COLL_NAME); backupParams.put("location", tempDir); - cluster.getJettySolrRunners().forEach(j -> j.getCoreContainer().getAllowPaths().add(Paths.get(tempDir))); - client.request(new V2Request.Builder("/c") - .withMethod(SolrRequest.METHOD.POST) - .withPayload(Utils.toJSONString(backupPayload)) - .build()); + cluster + .getJettySolrRunners() + .forEach(j -> j.getCoreContainer().getAllowPaths().add(Paths.get(tempDir))); + client.request( + new V2Request.Builder("/c") + .withMethod(SolrRequest.METHOD.POST) + .withPayload(Utils.toJSONString(backupPayload)) + .build()); } @Test public void testSelect() throws Exception { CloudSolrClient cloudClient = cluster.getSolrClient(); - final V2Response v2Response = new V2Request.Builder("/c/" + COLL_NAME + "/select") - .withMethod(SolrRequest.METHOD.GET) - .withParams(params("q", "-*:*")) - .build() - .process(cloudClient); - assertEquals(0, ((SolrDocumentList)v2Response.getResponse().get("response")).getNumFound()); + final V2Response v2Response = + new V2Request.Builder("/c/" + COLL_NAME + "/select") + .withMethod(SolrRequest.METHOD.GET) + .withParams(params("q", "-*:*")) + .build() + .process(cloudClient); + assertEquals(0, ((SolrDocumentList) v2Response.getResponse().get("response")).getNumFound()); } - - private Map resAsMap(CloudSolrClient client, V2Request request) throws SolrServerException, IOException { + + private Map resAsMap(CloudSolrClient client, V2Request request) + throws SolrServerException, IOException { NamedList rsp = client.request(request); return rsp.asMap(100); } diff --git a/solr/core/src/test/org/apache/solr/handler/V2ClusterAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/V2ClusterAPIMappingTest.java index e9a8cd1f6c9..20c4bccdab0 100644 --- a/solr/core/src/test/org/apache/solr/handler/V2ClusterAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/V2ClusterAPIMappingTest.java @@ -17,7 +17,19 @@ package org.apache.solr.handler; +import static org.apache.solr.SolrTestCaseJ4.assumeWorkingMockito; +import static org.apache.solr.cloud.api.collections.CollectionHandlingUtils.REQUESTID; +import static org.apache.solr.common.params.CommonParams.ACTION; +import static org.apache.solr.common.params.CommonParams.NAME; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + import com.google.common.collect.Maps; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; import org.apache.solr.cloud.ConfigSetCmds; @@ -36,22 +48,7 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.solr.SolrTestCaseJ4.assumeWorkingMockito; -import static org.apache.solr.cloud.api.collections.CollectionHandlingUtils.REQUESTID; -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.apache.solr.common.params.CommonParams.NAME; -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; - -/** - * Unit tests for the v2 to v1 API mappings found in {@link ClusterAPI} - */ +/** Unit tests for the v2 to v1 API mappings found in {@link ClusterAPI} */ public class V2ClusterAPIMappingTest { private ApiBag apiBag; private ArgumentCaptor queryRequestCaptor; @@ -78,7 +75,8 @@ public void setupApiBag() throws Exception { @Test public void testAsyncCommandStatusAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cluster/command-status/someId", "GET", null); + final SolrParams v1Params = + captureConvertedV1Params("/cluster/command-status/someId", "GET", null); assertEquals(CollectionParams.CollectionAction.REQUESTSTATUS.lowerName, v1Params.get(ACTION)); assertEquals("someId", v1Params.get(REQUESTID)); @@ -107,7 +105,8 @@ public void testListClusterAllParams() throws Exception { @Test public void testDeleteCommandStatusAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cluster/command-status/someId", "DELETE", null); + final SolrParams v1Params = + captureConvertedV1Params("/cluster/command-status/someId", "DELETE", null); assertEquals(CollectionParams.CollectionAction.DELETESTATUS.lowerName, v1Params.get(ACTION)); assertEquals("someId", v1Params.get(REQUESTID)); @@ -116,7 +115,8 @@ public void testDeleteCommandStatusAllParams() throws Exception { // TODO This should probably really get its own class. @Test public void testDeleteConfigetAllParams() throws Exception { - final SolrParams v1Params = captureConvertedConfigsetV1Params("/cluster/configs/someConfigset", "DELETE", null); + final SolrParams v1Params = + captureConvertedConfigsetV1Params("/cluster/configs/someConfigset", "DELETE", null); assertEquals(ConfigSetParams.ConfigSetAction.DELETE.toString(), v1Params.get(ACTION)); assertEquals("someConfigset", v1Params.get(NAME)); @@ -131,11 +131,14 @@ public void testListConfigsetsAllParams() throws Exception { @Test public void testCreateConfigsetAllParams() throws Exception { - final SolrParams v1Params = captureConvertedConfigsetV1Params("/cluster/configs", "POST", - "{'create': {" + - "'name': 'new_configset_name', " + - "'baseConfigSet':'some_existing_configset', " + - "'properties': {'prop1': 'val1', 'prop2': 'val2'}}}"); + final SolrParams v1Params = + captureConvertedConfigsetV1Params( + "/cluster/configs", + "POST", + "{'create': {" + + "'name': 'new_configset_name', " + + "'baseConfigSet':'some_existing_configset', " + + "'properties': {'prop1': 'val1', 'prop2': 'val2'}}}"); assertEquals(ConfigSetParams.ConfigSetAction.CREATE.toString(), v1Params.get(ACTION)); assertEquals("new_configset_name", v1Params.get(NAME)); @@ -146,7 +149,8 @@ public void testCreateConfigsetAllParams() throws Exception { @Test public void testUploadConfigsetAllParams() throws Exception { - final SolrParams v1Params = captureConvertedConfigsetV1Params("/cluster/configs/someConfigSetName", "PUT", null); + final SolrParams v1Params = + captureConvertedConfigsetV1Params("/cluster/configs/someConfigSetName", "PUT", null); assertEquals(ConfigSetParams.ConfigSetAction.UPLOAD.toString(), v1Params.get(ACTION)); assertEquals("someConfigSetName", v1Params.get(NAME)); @@ -158,21 +162,27 @@ public void testUploadConfigsetAllParams() throws Exception { @Test public void testAddFileToConfigsetAllParams() throws Exception { - final SolrParams v1Params = captureConvertedConfigsetV1Params("/cluster/configs/someConfigSetName/some/file/path", "PUT", null); + final SolrParams v1Params = + captureConvertedConfigsetV1Params( + "/cluster/configs/someConfigSetName/some/file/path", "PUT", null); assertEquals(ConfigSetParams.ConfigSetAction.UPLOAD.toString(), v1Params.get(ACTION)); assertEquals("someConfigSetName", v1Params.get(NAME)); - assertEquals("/some/file/path", v1Params.get(ConfigSetParams.FILE_PATH)); // Note the leading '/' that makes the path appear absolute + assertEquals( + "/some/file/path", + v1Params.get( + ConfigSetParams.FILE_PATH)); // Note the leading '/' that makes the path appear absolute assertEquals(true, v1Params.getPrimitiveBool(ConfigSetParams.OVERWRITE)); assertEquals(false, v1Params.getPrimitiveBool(ConfigSetParams.CLEANUP)); } @Test public void testAddRoleAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cluster", "POST", - "{'add-role': {" + - "'node': 'some_node_name', " + - "'role':'some_role'}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/cluster", + "POST", + "{'add-role': {" + "'node': 'some_node_name', " + "'role':'some_role'}}"); assertEquals(CollectionParams.CollectionAction.ADDROLE.toString(), v1Params.get(ACTION)); assertEquals("some_node_name", v1Params.get("node")); @@ -181,10 +191,11 @@ public void testAddRoleAllParams() throws Exception { @Test public void testRemoveRoleAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cluster", "POST", - "{'remove-role': {" + - "'node': 'some_node_name', " + - "'role':'some_role'}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/cluster", + "POST", + "{'remove-role': {" + "'node': 'some_node_name', " + "'role':'some_role'}}"); assertEquals(CollectionParams.CollectionAction.REMOVEROLE.toString(), v1Params.get(ACTION)); assertEquals("some_node_name", v1Params.get("node")); @@ -193,46 +204,52 @@ public void testRemoveRoleAllParams() throws Exception { @Test public void testSetPropertyAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cluster", "POST", - "{'set-property': {" + - "'name': 'some_prop_name', " + - "'val':'some_value'}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/cluster", + "POST", + "{'set-property': {" + "'name': 'some_prop_name', " + "'val':'some_value'}}"); assertEquals(CollectionParams.CollectionAction.CLUSTERPROP.toString(), v1Params.get(ACTION)); assertEquals("some_prop_name", v1Params.get(NAME)); assertEquals("some_value", v1Params.get("val")); } - private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) throws Exception { + private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) + throws Exception { return doCaptureParams(path, method, v2RequestBody, mockCollectionsHandler); } - private SolrParams captureConvertedConfigsetV1Params(String path, String method, String v2RequestBody) throws Exception { + private SolrParams captureConvertedConfigsetV1Params( + String path, String method, String v2RequestBody) throws Exception { return doCaptureParams(path, method, v2RequestBody, mockConfigSetHandler); } - private SolrParams doCaptureParams(String path, String method, String v2RequestBody, RequestHandlerBase mockHandler) throws Exception { + private SolrParams doCaptureParams( + String path, String method, String v2RequestBody, RequestHandlerBase mockHandler) + throws Exception { final HashMap parts = new HashMap<>(); final Api api = apiBag.lookup(path, method, parts); final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, Maps.newHashMap()) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } - }; - + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, Maps.newHashMap()) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } + }; api.call(req, rsp); verify(mockHandler).handleRequestBody(queryRequestCaptor.capture(), any()); diff --git a/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java b/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java index 438bd27facc..cf087509401 100644 --- a/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java +++ b/solr/core/src/test/org/apache/solr/handler/V2StandaloneTest.java @@ -18,7 +18,6 @@ package org.apache.solr.handler; import java.io.File; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -27,18 +26,20 @@ import org.apache.solr.common.util.NamedList; import org.junit.Test; -public class V2StandaloneTest extends SolrTestCaseJ4{ +public class V2StandaloneTest extends SolrTestCaseJ4 { @Test public void testWelcomeMessage() throws Exception { File solrHomeTmp = createTempDir().toFile().getAbsoluteFile(); - FileUtils.copyDirectory(new File(TEST_HOME(), "configsets/minimal/conf"), new File(solrHomeTmp,"/conf")); + FileUtils.copyDirectory( + new File(TEST_HOME(), "configsets/minimal/conf"), new File(solrHomeTmp, "/conf")); FileUtils.copyFile(new File(TEST_HOME(), "solr.xml"), new File(solrHomeTmp, "solr.xml")); - JettySolrRunner jetty = new JettySolrRunner(solrHomeTmp.getAbsolutePath(), buildJettyConfig("/solr")); + JettySolrRunner jetty = + new JettySolrRunner(solrHomeTmp.getAbsolutePath(), buildJettyConfig("/solr")); jetty.start(); - try (HttpSolrClient client = getHttpSolrClient(buildUrl(jetty.getLocalPort(),"/solr/"))) { + try (HttpSolrClient client = getHttpSolrClient(buildUrl(jetty.getLocalPort(), "/solr/"))) { NamedList res = client.request(new V2Request.Builder("/").build()); NamedList header = (NamedList) res.get("responseHeader"); assertEquals(0, header.get("status")); diff --git a/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java index d8709d0056f..886f1209ed6 100644 --- a/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/XmlUpdateRequestHandlerTest.java @@ -16,6 +16,14 @@ */ package org.apache.solr.handler; +import java.io.IOException; +import java.io.StringReader; +import java.util.Collection; +import java.util.LinkedList; +import java.util.Objects; +import java.util.Queue; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamReader; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.ContentStreamBase; @@ -30,25 +38,14 @@ import org.junit.BeforeClass; import org.junit.Test; -import javax.xml.stream.XMLInputFactory; -import javax.xml.stream.XMLStreamReader; -import java.io.IOException; -import java.io.StringReader; -import java.util.Collection; -import java.util.LinkedList; -import java.util.Objects; -import java.util.Queue; - -/** - * Tests the UpdateRequestHandler support for XML updates. - */ +/** Tests the UpdateRequestHandler support for XML updates. */ public class XmlUpdateRequestHandlerTest extends SolrTestCaseJ4 { private static XMLInputFactory inputFactory; protected static UpdateRequestHandler handler; @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); handler = new UpdateRequestHandler(); inputFactory = XMLInputFactory.newInstance(); } @@ -60,47 +57,44 @@ public static void afterTests() { } @Test - public void testReadDoc() throws Exception - { - String xml = - "" + - " 12345" + - " kitten" + - " aaa" + - " bbb" + - " bbb" + - " a&b" + - ""; - - XMLStreamReader parser = - inputFactory.createXMLStreamReader( new StringReader( xml ) ); + public void testReadDoc() throws Exception { + String xml = + "" + + " 12345" + + " kitten" + + " aaa" + + " bbb" + + " bbb" + + " a&b" + + ""; + + XMLStreamReader parser = inputFactory.createXMLStreamReader(new StringReader(xml)); parser.next(); // read the START document... - //null for the processor is all right here + // null for the processor is all right here XMLLoader loader = new XMLLoader(); - SolrInputDocument doc = loader.readDoc( parser ); - + SolrInputDocument doc = loader.readDoc(parser); + // Read values - assertEquals( "12345", doc.getField( "id" ).getValue() ); - assertEquals( "kitten", doc.getField( "name").getValue() ); - assertEquals( "a&b", doc.getField( "ab").getValue() ); // read something with escaped characters - - Collection out = doc.getField( "cat" ).getValues(); - assertEquals( 3, out.size() ); - assertEquals( "[aaa, bbb, bbb]", out.toString() ); + assertEquals("12345", doc.getField("id").getValue()); + assertEquals("kitten", doc.getField("name").getValue()); + assertEquals("a&b", doc.getField("ab").getValue()); // read something with escaped characters + + Collection out = doc.getField("cat").getValues(); + assertEquals(3, out.size()); + assertEquals("[aaa, bbb, bbb]", out.toString()); } - - @Test - public void testRequestParams() throws Exception - { - String xml = - "" + - " " + - " 12345" + - " kitten" + - " " + - ""; - SolrQueryRequest req = req("commitWithin","100","overwrite","false"); + @Test + public void testRequestParams() throws Exception { + String xml = + "" + + " " + + " 12345" + + " kitten" + + " " + + ""; + + SolrQueryRequest req = req("commitWithin", "100", "overwrite", "false"); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); @@ -112,22 +106,24 @@ public void testRequestParams() throws Exception assertEquals(false, add.overwrite); req.close(); } - + @Test - public void testExternalEntities() throws Exception - { + public void testExternalEntities() throws Exception { String file = getFile("mailing_lists.pdf").toURI().toASCIIString(); - String xml = - "" + - // check that external entities are not resolved! - "]>" + - "" + - " &bar;" + - " " + - " 12345" + - " kitten" + - " " + - ""; + String xml = + "" + + + // check that external entities are not resolved! + "]>" + + "" + + " &bar;" + + " " + + " 12345" + + " kitten" + + " " + + ""; SolrQueryRequest req = req(); SolrQueryResponse rsp = new SolrQueryResponse(); BufferingRequestProcessor p = new BufferingRequestProcessor(null); @@ -140,118 +136,118 @@ public void testExternalEntities() throws Exception } public void testNamedEntity() throws Exception { - assertU("\n"+ - "\n]>"+ - ""+ - "1"+ - "&wacky;" + - ""); - + assertU( + "\n" + + "\n]>" + + "" + + "1" + + "&wacky;" + + ""); + assertU(""); - assertQ(req("foo_s:zzz"), - "//*[@numFound='1']" - ); + assertQ(req("foo_s:zzz"), "//*[@numFound='1']"); } - + @Test public void testReadDelete() throws Exception { - String xml = - "" + - " " + - " id:150" + - " 150" + - " 200" + - " id:200" + - " " + - " " + - " id:150" + - " " + - " " + - " 150" + - " " + - " " + - " 300" + - " " + - " " + - " 400" + - " " + - " " + - " 500" + - " " + - ""; - - MockUpdateRequestProcessor p = new MockUpdateRequestProcessor(null); - p.expectDelete(null, "id:150", -1, 0, null); - p.expectDelete("150", null, -1, 0, null); - p.expectDelete("200", null, -1, 0, null); - p.expectDelete(null, "id:200", -1, 0, null); - p.expectDelete(null, "id:150", 500, 0, null); - p.expectDelete("150", null, -1, 0, null); - p.expectDelete("300", null, -1, 42, null); - p.expectDelete("400", null, -1, 0, "shard1"); - p.expectDelete("500", null, -1, 42, "shard1"); + String xml = + "" + + " " + + " id:150" + + " 150" + + " 200" + + " id:200" + + " " + + " " + + " id:150" + + " " + + " " + + " 150" + + " " + + " " + + " 300" + + " " + + " " + + " 400" + + " " + + " " + + " 500" + + " " + + ""; + + MockUpdateRequestProcessor p = new MockUpdateRequestProcessor(null); + p.expectDelete(null, "id:150", -1, 0, null); + p.expectDelete("150", null, -1, 0, null); + p.expectDelete("200", null, -1, 0, null); + p.expectDelete(null, "id:200", -1, 0, null); + p.expectDelete(null, "id:150", 500, 0, null); + p.expectDelete("150", null, -1, 0, null); + p.expectDelete("300", null, -1, 42, null); + p.expectDelete("400", null, -1, 0, "shard1"); + p.expectDelete("500", null, -1, 42, "shard1"); - XMLLoader loader = new XMLLoader().init(null); - loader.load(req(), new SolrQueryResponse(), new ContentStreamBase.StringStream(xml), p); + XMLLoader loader = new XMLLoader().init(null); + loader.load(req(), new SolrQueryResponse(), new ContentStreamBase.StringStream(xml), p); - p.assertNoCommandsPending(); - } + p.assertNoCommandsPending(); + } @Test public void XMLLoader_denseVector_shouldIndexCorrectly() throws Exception { - assertU("\n" + - "" + - "777" + - "1.5" + - "2.5" + - "3.5" + - "4.5" + - ""); + assertU( + "\n" + + "" + + "777" + + "1.5" + + "2.5" + + "3.5" + + "4.5" + + ""); assertU(""); - - assertQ(req("q", "id:777", "fl", "vector"), "*[count(//doc)=1]", - "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.5 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.5 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.5 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.5 + "']" - ); + + assertQ( + req("q", "id:777", "fl", "vector"), + "*[count(//doc)=1]", + "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.5 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.5 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.5 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.5 + "']"); } private static class MockUpdateRequestProcessor extends UpdateRequestProcessor { - private Queue deleteCommands = new LinkedList<>(); - - public MockUpdateRequestProcessor(UpdateRequestProcessor next) { - super(next); - } + private Queue deleteCommands = new LinkedList<>(); - public void expectDelete(String id, String query, int commitWithin, long version, String route) { - DeleteUpdateCommand cmd = new DeleteUpdateCommand(null); - cmd.id = id; - cmd.query = query; - cmd.commitWithin = commitWithin; - if (version!=0) - cmd.setVersion(version); - if (route!=null) - cmd.setRoute(route); - deleteCommands.add(cmd); - } + public MockUpdateRequestProcessor(UpdateRequestProcessor next) { + super(next); + } - public void assertNoCommandsPending() { - assertTrue(deleteCommands.isEmpty()); - } + public void expectDelete( + String id, String query, int commitWithin, long version, String route) { + DeleteUpdateCommand cmd = new DeleteUpdateCommand(null); + cmd.id = id; + cmd.query = query; + cmd.commitWithin = commitWithin; + if (version != 0) cmd.setVersion(version); + if (route != null) cmd.setRoute(route); + deleteCommands.add(cmd); + } - @Override - public void processDelete(DeleteUpdateCommand cmd) throws IOException { - DeleteUpdateCommand expected = deleteCommands.poll(); - assertNotNull("Unexpected delete command: [" + cmd + "]", expected); - assertTrue("Expected [" + expected + "] but found [" + cmd + "]", - Objects.equals(expected.id, cmd.id) && - Objects.equals(expected.query, cmd.query) && - expected.commitWithin==cmd.commitWithin && - Objects.equals(expected.getRoute(), cmd.getRoute())); - } + public void assertNoCommandsPending() { + assertTrue(deleteCommands.isEmpty()); } + @Override + public void processDelete(DeleteUpdateCommand cmd) throws IOException { + DeleteUpdateCommand expected = deleteCommands.poll(); + assertNotNull("Unexpected delete command: [" + cmd + "]", expected); + assertTrue( + "Expected [" + expected + "] but found [" + cmd + "]", + Objects.equals(expected.id, cmd.id) + && Objects.equals(expected.query, cmd.query) + && expected.commitWithin == cmd.commitWithin + && Objects.equals(expected.getRoute(), cmd.getRoute())); + } + } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java b/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java index ee6183bfd27..4b123707eb1 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/AdminHandlersProxyTest.java @@ -21,7 +21,6 @@ import java.util.Collections; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.http.impl.client.CloseableHttpClient; import org.apache.lucene.util.IOUtils; import org.apache.solr.client.solrj.SolrRequest; @@ -45,9 +44,7 @@ public class AdminHandlersProxyTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { System.setProperty("metricsEnabled", "true"); - configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -69,52 +66,58 @@ public void tearDown() throws Exception { @Test public void proxySystemInfoHandlerAllNodes() throws IOException, SolrServerException { MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", "all")); - GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params); + GenericSolrRequest req = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params); SimpleSolrResponse rsp = req.process(solrClient, null); NamedList nl = rsp.getResponse(); assertEquals(3, nl.size()); assertTrue(nl.getName(1).endsWith("_solr")); assertTrue(nl.getName(2).endsWith("_solr")); - assertEquals("solrcloud", ((NamedList)nl.get(nl.getName(1))).get("mode")); - assertEquals(nl.getName(2), ((NamedList)nl.get(nl.getName(2))).get("node")); + assertEquals("solrcloud", ((NamedList) nl.get(nl.getName(1))).get("mode")); + assertEquals(nl.getName(2), ((NamedList) nl.get(nl.getName(2))).get("node")); } @Test public void proxyMetricsHandlerAllNodes() throws IOException, SolrServerException { MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", "all")); - GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/metrics", params); + GenericSolrRequest req = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/metrics", params); SimpleSolrResponse rsp = req.process(solrClient, null); NamedList nl = rsp.getResponse(); assertEquals(3, nl.size()); assertTrue(nl.getName(1).endsWith("_solr")); assertTrue(nl.getName(2).endsWith("_solr")); - assertNotNull(((NamedList)nl.get(nl.getName(1))).get("metrics")); + assertNotNull(((NamedList) nl.get(nl.getName(1))).get("metrics")); } @Test(expected = SolrException.class) public void proxySystemInfoHandlerNonExistingNode() throws IOException, SolrServerException { - MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", "example.com:1234_solr")); - GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params); + MapSolrParams params = + new MapSolrParams(Collections.singletonMap("nodes", "example.com:1234_solr")); + GenericSolrRequest req = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params); SimpleSolrResponse rsp = req.process(solrClient, null); } - + @Test public void proxySystemInfoHandlerOneNode() { Set nodes = solrClient.getClusterStateProvider().getLiveNodes(); assertEquals(2, nodes.size()); - nodes.forEach(node -> { - MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", node)); - GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params); - SimpleSolrResponse rsp = null; - try { - rsp = req.process(solrClient, null); - } catch (Exception e) { - fail("Exception while proxying request to node " + node); - } - NamedList nl = rsp.getResponse(); - assertEquals(2, nl.size()); - assertEquals("solrcloud", ((NamedList)nl.get(nl.getName(1))).get("mode")); - assertEquals(nl.getName(1), ((NamedList)nl.get(nl.getName(1))).get("node")); - }); + nodes.forEach( + node -> { + MapSolrParams params = new MapSolrParams(Collections.singletonMap("nodes", node)); + GenericSolrRequest req = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/system", params); + SimpleSolrResponse rsp = null; + try { + rsp = req.process(solrClient, null); + } catch (Exception e) { + fail("Exception while proxying request to node " + node); + } + NamedList nl = rsp.getResponse(); + assertEquals(2, nl.size()); + assertEquals("solrcloud", ((NamedList) nl.get(nl.getName(1))).get("mode")); + assertEquals(nl.getName(1), ((NamedList) nl.get(nl.getName(1))).get("node")); + }); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java index 5cb5510a48d..31aa6a2429b 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminCreateDiscoverTest.java @@ -22,7 +22,6 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Properties; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -69,9 +68,11 @@ private static void setupCore(String coreName, boolean blivet) throws IOExceptio // Be sure we pick up sysvars when we create this String srcDir = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; FileUtils.copyFile(new File(srcDir, "schema-tiny.xml"), new File(subHome, "schema_ren.xml")); - FileUtils.copyFile(new File(srcDir, "solrconfig-minimal.xml"), new File(subHome, "solrconfig_ren.xml")); + FileUtils.copyFile( + new File(srcDir, "solrconfig-minimal.xml"), new File(subHome, "solrconfig_ren.xml")); - FileUtils.copyFile(new File(srcDir, "solrconfig.snippet.randomindexconfig.xml"), + FileUtils.copyFile( + new File(srcDir, "solrconfig.snippet.randomindexconfig.xml"), new File(subHome, "solrconfig.snippet.randomindexconfig.xml")); } @@ -91,21 +92,28 @@ public void testCreateSavesSysProps() throws Exception { System.setProperty("DATA_TEST", "data_diff"); SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, coreSysProps, - CoreAdminParams.INSTANCE_DIR, "${INSTDIR_TEST}", - CoreAdminParams.CONFIG, "${CONFIG_TEST}", - CoreAdminParams.SCHEMA, "${SCHEMA_TEST}", - CoreAdminParams.DATA_DIR, "${DATA_TEST}"), - resp); + CoreAdminParams.NAME, + coreSysProps, + CoreAdminParams.INSTANCE_DIR, + "${INSTDIR_TEST}", + CoreAdminParams.CONFIG, + "${CONFIG_TEST}", + CoreAdminParams.SCHEMA, + "${SCHEMA_TEST}", + CoreAdminParams.DATA_DIR, + "${DATA_TEST}"), + resp); assertNull("Exception on create", resp.getException()); // verify props are in persisted file Properties props = new Properties(); - File propFile = new File(solrHomeDirectory, coreSysProps + "/" + CorePropertiesLocator.PROPERTIES_FILENAME); + File propFile = + new File(solrHomeDirectory, coreSysProps + "/" + CorePropertiesLocator.PROPERTIES_FILENAME); FileInputStream is = new FileInputStream(propFile); try { props.load(new InputStreamReader(is, StandardCharsets.UTF_8)); @@ -113,29 +121,40 @@ public void testCreateSavesSysProps() throws Exception { org.apache.commons.io.IOUtils.closeQuietly(is); } - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.NAME), coreSysProps); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.NAME), + coreSysProps); - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.CONFIG), "${CONFIG_TEST}"); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.CONFIG), + "${CONFIG_TEST}"); - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.SCHEMA), "${SCHEMA_TEST}"); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.SCHEMA), + "${SCHEMA_TEST}"); - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.DATA_DIR), "${DATA_TEST}"); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.DATA_DIR), + "${DATA_TEST}"); assertEquals(props.size(), 4); - //checkOnlyKnown(propFile); + // checkOnlyKnown(propFile); - // Now assert that certain values are properly dereferenced in the process of creating the core, see - // SOLR-4982. Really, we should be able to just verify that the index files exist. + // Now assert that certain values are properly dereferenced in the process of creating the core, + // see SOLR-4982. Really, we should be able to just verify that the index files exist. // Should NOT be a datadir named ${DATA_TEST} (literal). File badDir = new File(workDir, "${DATA_TEST}"); - assertFalse("Should have substituted the sys var, found file " + badDir.getAbsolutePath(), badDir.exists()); + assertFalse( + "Should have substituted the sys var, found file " + badDir.getAbsolutePath(), + badDir.exists()); - // For the other 3 vars, we couldn't get past creating the core if dereferencing didn't work correctly. + // For the other 3 vars, we couldn't get past creating the core if dereferencing didn't work + // correctly. // Should have segments in the directory pointed to by the ${DATA_TEST}. File test = new File(dataDir, "index"); @@ -152,29 +171,44 @@ public void testCannotCreateTwoCoresWithSameInstanceDir() throws Exception { // Create one core SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, coreDuplicate, - CoreAdminParams.INSTANCE_DIR, workDir.getAbsolutePath(), - CoreAdminParams.CONFIG, "solrconfig_ren.xml", - CoreAdminParams.SCHEMA, "schema_ren.xml", - CoreAdminParams.DATA_DIR, data.getAbsolutePath()), - resp); + CoreAdminParams.NAME, + coreDuplicate, + CoreAdminParams.INSTANCE_DIR, + workDir.getAbsolutePath(), + CoreAdminParams.CONFIG, + "solrconfig_ren.xml", + CoreAdminParams.SCHEMA, + "schema_ren.xml", + CoreAdminParams.DATA_DIR, + data.getAbsolutePath()), + resp); assertNull("Exception on create", resp.getException()); // Try to create another core with a different name, but the same instance dir - SolrException e = expectThrows(SolrException.class, () -> { - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, "different_name_core", - CoreAdminParams.INSTANCE_DIR, workDir.getAbsolutePath(), - CoreAdminParams.CONFIG, "solrconfig_ren.xml", - CoreAdminParams.SCHEMA, "schema_ren.xml", - CoreAdminParams.DATA_DIR, data.getAbsolutePath()), - new SolrQueryResponse()); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATE.toString(), + CoreAdminParams.NAME, + "different_name_core", + CoreAdminParams.INSTANCE_DIR, + workDir.getAbsolutePath(), + CoreAdminParams.CONFIG, + "solrconfig_ren.xml", + CoreAdminParams.SCHEMA, + "schema_ren.xml", + CoreAdminParams.DATA_DIR, + data.getAbsolutePath()), + new SolrQueryResponse()); + }); assertTrue(e.getMessage().contains("already defined there")); } @@ -189,30 +223,41 @@ public void testInstanceDirAsPropertyParam() throws Exception { // Create one core SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, "testInstanceDirAsPropertyParam", - "property.instanceDir", workDir.getAbsolutePath(), - CoreAdminParams.CONFIG, "solrconfig_ren.xml", - CoreAdminParams.SCHEMA, "schema_ren.xml", - CoreAdminParams.DATA_DIR, data.getAbsolutePath()), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATE.toString(), + CoreAdminParams.NAME, + "testInstanceDirAsPropertyParam", + "property.instanceDir", + workDir.getAbsolutePath(), + CoreAdminParams.CONFIG, + "solrconfig_ren.xml", + CoreAdminParams.SCHEMA, + "schema_ren.xml", + CoreAdminParams.DATA_DIR, + data.getAbsolutePath()), + resp); assertNull("Exception on create", resp.getException()); resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.STATUS.toString(), - CoreAdminParams.CORE, "testInstanceDirAsPropertyParam"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.STATUS.toString(), + CoreAdminParams.CORE, + "testInstanceDirAsPropertyParam"), + resp); NamedList status = (NamedList) resp.getValues().get("status"); assertNotNull(status); NamedList coreProps = (NamedList) status.get("testInstanceDirAsPropertyParam"); assertNotNull(status); String instanceDir = (String) coreProps.get("instanceDir"); assertNotNull(instanceDir); - assertEquals("Instance dir does not match param given in property.instanceDir syntax", workDir.getAbsolutePath(), new File(instanceDir).getAbsolutePath()); + assertEquals( + "Instance dir does not match param given in property.instanceDir syntax", + workDir.getAbsolutePath(), + new File(instanceDir).getAbsolutePath()); } @Test @@ -226,20 +271,27 @@ public void testCreateSavesRegProps() throws Exception { File data = new File(workDir, "data"); SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, coreNormal, - CoreAdminParams.INSTANCE_DIR, workDir.getAbsolutePath(), - CoreAdminParams.CONFIG, "solrconfig_ren.xml", - CoreAdminParams.SCHEMA, "schema_ren.xml", - CoreAdminParams.DATA_DIR, data.getAbsolutePath()), - resp); + CoreAdminParams.NAME, + coreNormal, + CoreAdminParams.INSTANCE_DIR, + workDir.getAbsolutePath(), + CoreAdminParams.CONFIG, + "solrconfig_ren.xml", + CoreAdminParams.SCHEMA, + "schema_ren.xml", + CoreAdminParams.DATA_DIR, + data.getAbsolutePath()), + resp); assertNull("Exception on create", resp.getException()); // verify props are in persisted file Properties props = new Properties(); - File propFile = new File(solrHomeDirectory, coreNormal + "/" + CorePropertiesLocator.PROPERTIES_FILENAME); + File propFile = + new File(solrHomeDirectory, coreNormal + "/" + CorePropertiesLocator.PROPERTIES_FILENAME); FileInputStream is = new FileInputStream(propFile); try { props.load(new InputStreamReader(is, StandardCharsets.UTF_8)); @@ -247,26 +299,34 @@ public void testCreateSavesRegProps() throws Exception { org.apache.commons.io.IOUtils.closeQuietly(is); } - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.NAME), coreNormal); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.NAME), + coreNormal); - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.CONFIG), "solrconfig_ren.xml"); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.CONFIG), + "solrconfig_ren.xml"); - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.SCHEMA), "schema_ren.xml"); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.SCHEMA), + "schema_ren.xml"); - assertEquals("Unexpected value preserved in properties file " + propFile.getAbsolutePath(), - props.getProperty(CoreAdminParams.DATA_DIR), data.getAbsolutePath()); + assertEquals( + "Unexpected value preserved in properties file " + propFile.getAbsolutePath(), + props.getProperty(CoreAdminParams.DATA_DIR), + data.getAbsolutePath()); assertEquals(props.size(), 4); - //checkOnlyKnown(propFile); - // For the other 3 vars, we couldn't get past creating the core if dereferencing didn't work correctly. + // checkOnlyKnown(propFile); + // For the other 3 vars, we couldn't get past creating the core if dereferencing didn't work + // correctly. // Should have segments in the directory pointed to by the ${DATA_TEST}. File test = new File(data, "index"); assertTrue("Should have found index dir at " + test.getAbsolutePath(), test.exists()); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java index 5dac62b4f25..d5c81574f3f 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.admin; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.io.File; import java.io.FileInputStream; import java.io.InputStreamReader; @@ -25,8 +26,6 @@ import java.nio.file.Paths; import java.util.Map; import java.util.Properties; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.Constants; import org.apache.solr.SolrTestCaseJ4; @@ -52,16 +51,17 @@ import org.junit.rules.TestRule; public class CoreAdminHandlerTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - public String getCoreName() { return this.getClass().getName() + "_sys_vars"; } + public String getCoreName() { + return this.getClass().getName() + "_sys_vars"; + } @Test public void testCreateWithSysVars() throws Exception { @@ -77,8 +77,10 @@ public void testCreateWithSysVars() throws Exception { // Be sure we pick up sysvars when we create this String srcDir = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; FileUtils.copyFile(new File(srcDir, "schema-tiny.xml"), new File(subHome, "schema_ren.xml")); - FileUtils.copyFile(new File(srcDir, "solrconfig-minimal.xml"), new File(subHome, "solrconfig_ren.xml")); - FileUtils.copyFile(new File(srcDir, "solrconfig.snippet.randomindexconfig.xml"), + FileUtils.copyFile( + new File(srcDir, "solrconfig-minimal.xml"), new File(subHome, "solrconfig_ren.xml")); + FileUtils.copyFile( + new File(srcDir, "solrconfig.snippet.randomindexconfig.xml"), new File(subHome, "solrconfig.snippet.randomindexconfig.xml")); final CoreContainer cores = h.getCoreContainer(); @@ -95,25 +97,34 @@ public void testCreateWithSysVars() throws Exception { System.setProperty("DATA_TEST", dataDir.getAbsolutePath()); SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, getCoreName(), - CoreAdminParams.INSTANCE_DIR, "${INSTDIR_TEST}", - CoreAdminParams.CONFIG, "${CONFIG_TEST}", - CoreAdminParams.SCHEMA, "${SCHEMA_TEST}", - CoreAdminParams.DATA_DIR, "${DATA_TEST}"), - resp); + CoreAdminParams.NAME, + getCoreName(), + CoreAdminParams.INSTANCE_DIR, + "${INSTDIR_TEST}", + CoreAdminParams.CONFIG, + "${CONFIG_TEST}", + CoreAdminParams.SCHEMA, + "${SCHEMA_TEST}", + CoreAdminParams.DATA_DIR, + "${DATA_TEST}"), + resp); assertNull("Exception on create", resp.getException()); - // Now assert that certain values are properly dereferenced in the process of creating the core, see - // SOLR-4982. + // Now assert that certain values are properly dereferenced in the process of creating the core, + // see SOLR-4982. // Should NOT be a datadir named ${DATA_TEST} (literal). This is the bug after all File badDir = new File(instDir, "${DATA_TEST}"); - assertFalse("Should have substituted the sys var, found file " + badDir.getAbsolutePath(), badDir.exists()); + assertFalse( + "Should have substituted the sys var, found file " + badDir.getAbsolutePath(), + badDir.exists()); - // For the other 3 vars, we couldn't get past creating the core fi dereferencing didn't work correctly. + // For the other 3 vars, we couldn't get past creating the core fi dereferencing didn't work + // correctly. // Should have segments in the directory pointed to by the ${DATA_TEST}. File test = new File(dataDir, "index"); @@ -124,7 +135,7 @@ CoreAdminParams.NAME, getCoreName(), @Test public void testCoreAdminHandler() throws Exception { final File workDir = createTempDir().toFile(); - + final CoreContainer cores = h.getCoreContainer(); cores.getAllowPaths().add(workDir.toPath()); @@ -142,14 +153,20 @@ public void testCoreAdminHandler() throws Exception { SolrQueryResponse resp = new SolrQueryResponse(); // Sneaking in a test for using a bad core name - SolrException se = expectThrows(SolrException.class, () -> { - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.INSTANCE_DIR, instPropFile.getAbsolutePath(), - CoreAdminParams.NAME, "ugly$core=name"), - new SolrQueryResponse()); - }); + SolrException se = + expectThrows( + SolrException.class, + () -> { + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATE.toString(), + CoreAdminParams.INSTANCE_DIR, + instPropFile.getAbsolutePath(), + CoreAdminParams.NAME, + "ugly$core=name"), + new SolrQueryResponse()); + }); assertTrue("Expected error message for bad core name.", se.toString().contains("Invalid core")); CoreDescriptor cd = cores.getCoreDescriptor("ugly$core=name"); @@ -157,14 +174,19 @@ public void testCoreAdminHandler() throws Exception { // create a new core (using CoreAdminHandler) w/ properties - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.INSTANCE_DIR, instPropFile.getAbsolutePath(), - CoreAdminParams.NAME, "props", - CoreAdminParams.PROPERTY_PREFIX + "hoss","man", - CoreAdminParams.PROPERTY_PREFIX + "foo","baz"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATE.toString(), + CoreAdminParams.INSTANCE_DIR, + instPropFile.getAbsolutePath(), + CoreAdminParams.NAME, + "props", + CoreAdminParams.PROPERTY_PREFIX + "hoss", + "man", + CoreAdminParams.PROPERTY_PREFIX + "foo", + "baz"), + resp); assertNull("Exception on create", resp.getException()); cd = cores.getCoreDescriptor("props"); @@ -174,70 +196,92 @@ public void testCoreAdminHandler() throws Exception { // attempt to create a bogus core and confirm failure ignoreException("Could not load config"); - se = expectThrows(SolrException.class, () -> { - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.NAME, "bogus_dir_core", - CoreAdminParams.INSTANCE_DIR, "dir_does_not_exist_127896"), - new SolrQueryResponse()); - }); + se = + expectThrows( + SolrException.class, + () -> { + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.CREATE.toString(), + CoreAdminParams.NAME, + "bogus_dir_core", + CoreAdminParams.INSTANCE_DIR, + "dir_does_not_exist_127896"), + new SolrQueryResponse()); + }); // :NOOP: - // :TODO: CoreAdminHandler's exception messages are terrible, otherwise we could assert something useful here + // :TODO: CoreAdminHandler's exception messages are terrible, otherwise we could assert + // something useful here unIgnoreException("Could not load config"); // check specifically for status of the failed core name resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.STATUS.toString(), - CoreAdminParams.CORE, "bogus_dir_core"), - resp); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.STATUS.toString(), + CoreAdminParams.CORE, + "bogus_dir_core"), + resp); @SuppressWarnings("unchecked") - Map failures = - (Map) resp.getValues().get("initFailures"); + Map failures = (Map) resp.getValues().get("initFailures"); assertNotNull("core failures is null", failures); - NamedList status = (NamedList)resp.getValues().get("status"); + NamedList status = (NamedList) resp.getValues().get("status"); assertNotNull("core status is null", status); assertEquals("wrong number of core failures", 1, failures.size()); Exception fail = failures.get("bogus_dir_core"); assertNotNull("null failure for test core", fail); - assertTrue("init failure doesn't mention problem: " + fail.getCause().getMessage(), - 0 < fail.getCause().getMessage().indexOf("dir_does_not_exist")); - - assertEquals("bogus_dir_core status isn't empty", - 0, ((NamedList)status.get("bogus_dir_core")).size()); + assertTrue( + "init failure doesn't mention problem: " + fail.getCause().getMessage(), + 0 < fail.getCause().getMessage().indexOf("dir_does_not_exist")); + assertEquals( + "bogus_dir_core status isn't empty", + 0, + ((NamedList) status.get("bogus_dir_core")).size()); - //Try renaming the core, we should fail + // Try renaming the core, we should fail // First assert that the props core exists cd = cores.getCoreDescriptor("props"); assertNotNull("Core disappeared!", cd); - // now rename it something else just for kicks since we don't actually test this that I could find. - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + // now rename it something else just for kicks since we don't actually test this that I could + // find. + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.RENAME.toString(), - CoreAdminParams.CORE, "props", - CoreAdminParams.OTHER, "rename_me"), - resp); + CoreAdminParams.CORE, + "props", + CoreAdminParams.OTHER, + "rename_me"), + resp); cd = cores.getCoreDescriptor("rename_me"); assertNotNull("Core should have been renamed!", cd); - // Rename it something bogus and see if you get an exception, the old core is still there and the bogus one isn't - se = expectThrows(SolrException.class, () -> { - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.RENAME.toString(), - CoreAdminParams.CORE, "rename_me", - CoreAdminParams.OTHER, "bad$name"), - new SolrQueryResponse()); - }); - assertTrue("Expected error message for bad core name.", se.getMessage().contains("Invalid core")); + // Rename it something bogus and see if you get an exception, the old core is still there and + // the bogus one isn't + se = + expectThrows( + SolrException.class, + () -> { + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.RENAME.toString(), + CoreAdminParams.CORE, + "rename_me", + CoreAdminParams.OTHER, + "bad$name"), + new SolrQueryResponse()); + }); + assertTrue( + "Expected error message for bad core name.", se.getMessage().contains("Invalid core")); cd = cores.getCoreDescriptor("bad$name"); assertNull("Core should NOT exist!", cd); @@ -250,7 +294,7 @@ public void testCoreAdminHandler() throws Exception { } @Test - public void testDeleteInstanceDir() throws Exception { + public void testDeleteInstanceDir() throws Exception { File solrHomeDirectory = createTempDir("solr-home").toFile(); copySolrHomeToTemp(solrHomeDirectory, "corex"); File corex = new File(solrHomeDirectory, "corex"); @@ -262,17 +306,26 @@ public void testDeleteInstanceDir() throws Exception { File renamePropFile = new File(coreRename, "core.properties"); FileUtils.write(renamePropFile, "", StandardCharsets.UTF_8); - JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr")); + JettySolrRunner runner = + new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr")); runner.start(); - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex", DEFAULT_CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT)) { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl() + "/corex", + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "123"); client.add(doc); client.commit(); } - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl().toString(), DEFAULT_CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT)) { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl().toString(), + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { CoreAdminRequest.Unload req = new CoreAdminRequest.Unload(false); req.setDeleteInstanceDir(true); req.setCoreName("corex"); @@ -283,87 +336,119 @@ public void testDeleteInstanceDir() throws Exception { // 1> has the property persisted (SOLR-11783) // 2> is deleted after rename properly. - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl().toString(), DEFAULT_CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT)) { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl().toString(), + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { CoreAdminRequest.renameCore("corerename", "brand_new_core_name", client); Properties props = new Properties(); - try (InputStreamReader is = new InputStreamReader(new FileInputStream(renamePropFile), StandardCharsets.UTF_8)) { + try (InputStreamReader is = + new InputStreamReader(new FileInputStream(renamePropFile), StandardCharsets.UTF_8)) { props.load(is); } - assertEquals("Name should have been persisted!", "brand_new_core_name", props.getProperty("name")); + assertEquals( + "Name should have been persisted!", "brand_new_core_name", props.getProperty("name")); } - - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl().toString(), DEFAULT_CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT)) { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl().toString(), + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { CoreAdminRequest.Unload req = new CoreAdminRequest.Unload(false); req.setDeleteInstanceDir(true); req.setCoreName("brand_new_core_name"); req.process(client); } - runner.stop(); - assertFalse("Instance directory exists after core unload with deleteInstanceDir=true : " + corex, + assertFalse( + "Instance directory exists after core unload with deleteInstanceDir=true : " + corex, corex.exists()); - assertFalse("Instance directory exists after core unload with deleteInstanceDir=true : " + coreRename, + assertFalse( + "Instance directory exists after core unload with deleteInstanceDir=true : " + coreRename, coreRename.exists()); - } @Test - public void testUnloadForever() throws Exception { + public void testUnloadForever() throws Exception { File solrHomeDirectory = createTempDir("solr-home").toFile(); copySolrHomeToTemp(solrHomeDirectory, "corex"); File corex = new File(solrHomeDirectory, "corex"); FileUtils.write(new File(corex, "core.properties"), "", StandardCharsets.UTF_8); - JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr")); + JettySolrRunner runner = + new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr")); runner.start(); - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex", DEFAULT_CONNECTION_TIMEOUT, - DEFAULT_CONNECTION_TIMEOUT)) { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl() + "/corex", + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "123"); client.add(doc); client.commit(); } - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex", DEFAULT_CONNECTION_TIMEOUT, - DEFAULT_CONNECTION_TIMEOUT)) { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl() + "/corex", + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { QueryResponse result = client.query(new SolrQuery("id:*")); - assertEquals(1,result.getResults().getNumFound()); + assertEquals(1, result.getResults().getNumFound()); } - - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl().toString(), DEFAULT_CONNECTION_TIMEOUT, - DEFAULT_CONNECTION_TIMEOUT)) { + + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl().toString(), + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { CoreAdminRequest.Unload req = new CoreAdminRequest.Unload(false); - req.setDeleteInstanceDir(false);//random().nextBoolean()); + req.setDeleteInstanceDir(false); // random().nextBoolean()); req.setCoreName("corex"); req.process(client); } - BaseHttpSolrClient.RemoteSolrException rse = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex", DEFAULT_CONNECTION_TIMEOUT, - DEFAULT_CONNECTION_TIMEOUT * 1000)) { - client.query(new SolrQuery("id:*")); - } finally { - runner.stop(); - } - }); - assertEquals("Should have received a 404 error", 404, rse.code()); + BaseHttpSolrClient.RemoteSolrException rse = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl() + "/corex", + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT * 1000)) { + client.query(new SolrQuery("id:*")); + } finally { + runner.stop(); + } + }); + assertEquals("Should have received a 404 error", 404, rse.code()); } - + @Test - public void testDeleteInstanceDirAfterCreateFailure() throws Exception { - assumeFalse("Ignore test on windows because it does not delete data directory immediately after unload", Constants.WINDOWS); + public void testDeleteInstanceDirAfterCreateFailure() throws Exception { + assumeFalse( + "Ignore test on windows because it does not delete data directory immediately after unload", + Constants.WINDOWS); File solrHomeDirectory = createTempDir("solr-home").toFile(); copySolrHomeToTemp(solrHomeDirectory, "corex"); File corex = new File(solrHomeDirectory, "corex"); FileUtils.write(new File(corex, "core.properties"), "", StandardCharsets.UTF_8); - JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr")); + JettySolrRunner runner = + new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), buildJettyConfig("/solr")); runner.start(); - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl() + "/corex", DEFAULT_CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT)) { + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl() + "/corex", + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "123"); client.add(doc); @@ -380,22 +465,30 @@ public void testDeleteInstanceDirAfterCreateFailure() throws Exception { File subHome = new File(solrHomeDirectory, "corex" + File.separator + "conf"); String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; - FileUtils.copyFile(new File(top, "bad-error-solrconfig.xml"), new File(subHome, "solrconfig.xml")); - - try (HttpSolrClient client = getHttpSolrClient(runner.getBaseUrl().toString(), DEFAULT_CONNECTION_TIMEOUT, DEFAULT_CONNECTION_TIMEOUT)) { + FileUtils.copyFile( + new File(top, "bad-error-solrconfig.xml"), new File(subHome, "solrconfig.xml")); + + try (HttpSolrClient client = + getHttpSolrClient( + runner.getBaseUrl().toString(), + DEFAULT_CONNECTION_TIMEOUT, + DEFAULT_CONNECTION_TIMEOUT)) { // this is expected because we put a bad solrconfig -- ignore expectThrows(Exception.class, () -> CoreAdminRequest.reloadCore("corex", client)); CoreAdminRequest.Unload req = new CoreAdminRequest.Unload(false); req.setDeleteDataDir(true); - req.setDeleteInstanceDir(false); // important because the data directory is inside the instance directory + req.setDeleteInstanceDir( + false); // important because the data directory is inside the instance directory req.setCoreName("corex"); req.process(client); } runner.stop(); - assertTrue("The data directory was not cleaned up on unload after a failed core reload", Files.notExists(dataDir)); + assertTrue( + "The data directory was not cleaned up on unload after a failed core reload", + Files.notExists(dataDir)); } @Test @@ -403,23 +496,36 @@ public void testNonexistentCoreReload() throws Exception { final CoreAdminHandler admin = new CoreAdminHandler(h.getCoreContainer()); SolrQueryResponse resp = new SolrQueryResponse(); - SolrException e = expectThrows(SolrException.class, () -> { - admin.handleRequestBody( - req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.RELOAD.toString(), - CoreAdminParams.CORE, "non-existent-core") - , resp); - }); - assertEquals("Expected error message for non-existent core.", "No such core: non-existent-core", e.getMessage()); + SolrException e = + expectThrows( + SolrException.class, + () -> { + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.RELOAD.toString(), + CoreAdminParams.CORE, + "non-existent-core"), + resp); + }); + assertEquals( + "Expected error message for non-existent core.", + "No such core: non-existent-core", + e.getMessage()); // test null core - e = expectThrows(SolrException.class, () -> { - admin.handleRequestBody( - req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.RELOAD.toString()) - , resp); - }); - assertEquals("Expected error message for non-existent core.", "Missing required parameter: core", e.getMessage()); + e = + expectThrows( + SolrException.class, + () -> { + admin.handleRequestBody( + req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.RELOAD.toString()), + resp); + }); + assertEquals( + "Expected error message for non-existent core.", + "Missing required parameter: core", + e.getMessage()); admin.close(); } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminOperationTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminOperationTest.java index cf55cc6d8a6..f286d6aaa3a 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminOperationTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminOperationTest.java @@ -16,9 +16,11 @@ */ package org.apache.solr.handler.admin; -import java.util.Map; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import com.google.common.collect.Maps; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; @@ -29,11 +31,8 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - public class CoreAdminOperationTest extends SolrTestCaseJ4 { - + private CoreAdminHandler.CallInfo callInfo; private SolrQueryRequest mockRequest; @@ -43,246 +42,280 @@ public static void setUpClass() { } @Before - public void setUp() throws Exception{ + public void setUp() throws Exception { super.setUp(); - + mockRequest = mock(SolrQueryRequest.class); callInfo = new CoreAdminHandler.CallInfo(null, mockRequest, null, null); } - + @After public void tearDown() throws Exception { super.tearDown(); } - + @Test public void testStatusUnexpectedFailuresResultIn500SolrException() throws Exception { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.STATUS_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.STATUS_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testUnloadUnexpectedFailuresResultIn500SolrException() throws Exception { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.UNLOAD_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.UNLOAD_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testUnloadMissingCoreNameResultsIn400SolrException() throws Exception { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.UNLOAD_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.UNLOAD_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testReloadUnexpectedFailuresResultIn500SolrException() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RELOAD_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RELOAD_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testReloadMissingCoreNameResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RELOAD_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RELOAD_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testCreateUnexpectedFailuresResultIn500SolrException() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.CREATE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.CREATE_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testCreateMissingCoreNameResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.CREATE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.CREATE_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testSwapUnexpectedFailuresResultIn500SolrException() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.SWAP_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.SWAP_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testSwapMissingCoreParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("other", "some-core-name"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.SWAP_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.SWAP_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testSwapMissingOtherParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("core", "some-core-name"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.SWAP_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.SWAP_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testRenameUnexpectedFailuresResultIn500SolrException() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RENAME_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RENAME_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRenameMissingCoreParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("other", "some-core-name"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RENAME_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RENAME_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testRenameMissingOtherParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("core", "some-core-name"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RENAME_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RENAME_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testMergeUnexpectedFailuresResultIn500SolrException() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.MERGEINDEXES_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.MERGEINDEXES_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testMergeMissingCoreParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("indexDir", "some/index/dir"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.MERGEINDEXES_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.MERGEINDEXES_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testSplitUnexpectedFailuresResultIn500SolrException() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.SPLIT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.SPLIT_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testSplitMissingCoreParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.SPLIT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.SPLIT_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testPrepRecoveryUnexpectedFailuresResultIn500SolrException() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.PREPRECOVERY_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.PREPRECOVERY_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRequestRecoveryUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTRECOVERY_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTRECOVERY_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRequestRecoveryMissingCoreParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTRECOVERY_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTRECOVERY_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testRequestSyncUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTSYNCSHARD_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTSYNCSHARD_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRequestSyncMissingCoreParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTSYNCSHARD_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTSYNCSHARD_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testRequestBufferUpdatesUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTBUFFERUPDATES_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTBUFFERUPDATES_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRequestBufferUpdatesMissingCoreParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTBUFFERUPDATES_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTBUFFERUPDATES_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testRequestApplyUpdatesUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTAPPLYUPDATES_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTAPPLYUPDATES_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRequestApplyUpdatesMissingCoreParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTAPPLYUPDATES_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REQUESTAPPLYUPDATES_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } @@ -291,24 +324,27 @@ public void testOverseerOpUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.OVERSEEROP_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.OVERSEEROP_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.SERVER_ERROR.code); } - + @Test public void testRequestStatusUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTSTATUS_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTSTATUS_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRequestStatusMissingRequestIdParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTSTATUS_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.REQUESTSTATUS_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } @@ -317,178 +353,196 @@ public void testRejoinLeaderElectionUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.REJOINLEADERELECTION_OP.execute(callInfo)); + Exception ex = + expectThrows( + Exception.class, () -> CoreAdminOperation.REJOINLEADERELECTION_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.SERVER_ERROR.code); } - @Test public void testInvokeUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.INVOKE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.INVOKE_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testInvokeMissingClassParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.INVOKE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.INVOKE_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testBackupUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.BACKUPCORE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.BACKUPCORE_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testBackupMissingCoreParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("name", "any-name-param"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.BACKUPCORE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.BACKUPCORE_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testBackupMissingNameParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("core", "any-core-param"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.BACKUPCORE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.BACKUPCORE_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testRestoreUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RESTORECORE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RESTORECORE_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testRestoreMissingCoreParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("name", "any-name-param"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RESTORECORE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RESTORECORE_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testRestoreMissingNameParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("core", "any-core-param"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.RESTORECORE_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.RESTORECORE_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testCreateSnapshotUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.CREATESNAPSHOT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.CREATESNAPSHOT_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testCreateSnapshotMissingCoreParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("commitName", "anyCommitName"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.CREATESNAPSHOT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.CREATESNAPSHOT_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testCreateSnapshotMissingCommitNameParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("core", "any-core-param"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.CREATESNAPSHOT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.CREATESNAPSHOT_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testDeleteSnapshotUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.DELETESNAPSHOT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.DELETESNAPSHOT_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testDeleteSnapshotMissingCoreParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("commitName", "anyCommitName"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.DELETESNAPSHOT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.DELETESNAPSHOT_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testDeleteSnapshotMissingCommitNameParamResultsIn400SolrException() { final Map params = Maps.newHashMap(); params.put("core", "any-core-param"); whenCoreAdminOpHasParams(params); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.DELETESNAPSHOT_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.DELETESNAPSHOT_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + @Test public void testListSnapshotUnexpectedFailuresResultIn500Exception() { final Throwable cause = new NullPointerException(); whenUnexpectedErrorOccursDuringCoreAdminOp(cause); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.LISTSNAPSHOTS_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.LISTSNAPSHOTS_OP.execute(callInfo)); assertSolrExceptionWithCodeAndCause(ex, ErrorCode.SERVER_ERROR.code, cause); } - + @Test public void testListSnapshotMissingCoreParamResultsIn400SolrException() { whenCoreAdminOpHasParams(Maps.newHashMap()); - Exception ex = expectThrows(Exception.class, () -> CoreAdminOperation.LISTSNAPSHOTS_OP.execute(callInfo)); + Exception ex = + expectThrows(Exception.class, () -> CoreAdminOperation.LISTSNAPSHOTS_OP.execute(callInfo)); assertSolrExceptionWithCode(ex, ErrorCode.BAD_REQUEST.code); } - + private void whenUnexpectedErrorOccursDuringCoreAdminOp(Throwable cause) { when(mockRequest.getParams()).thenThrow(cause); } - + private void whenCoreAdminOpHasParams(Map solrParams) { when(mockRequest.getParams()).thenReturn(new MapSolrParams(solrParams)); } - - private void assertSolrExceptionWithCodeAndCause(Throwable thrownException, int expectedStatus, Throwable expectedCause) { + + private void assertSolrExceptionWithCodeAndCause( + Throwable thrownException, int expectedStatus, Throwable expectedCause) { assertEquals(SolrException.class, thrownException.getClass()); - + final SolrException solrException = (SolrException) thrownException; assertEquals(expectedStatus, solrException.code()); - + if (expectedCause != null) assertEquals(expectedCause, solrException.getCause()); } - + private void assertSolrExceptionWithCode(Throwable thrownException, int expectedStatus) { assertSolrExceptionWithCodeAndCause(thrownException, expectedStatus, null); } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java index d9577334303..d210be05bcf 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminRequestStatusTest.java @@ -19,7 +19,6 @@ import java.io.File; import java.nio.file.Files; import java.nio.file.Path; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CommonAdminParams; @@ -30,8 +29,7 @@ import org.junit.BeforeClass; import org.junit.Test; - -public class CoreAdminRequestStatusTest extends SolrTestCaseJ4{ +public class CoreAdminRequestStatusTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); @@ -59,46 +57,55 @@ public void testCoreAdminRequestStatus() throws Exception { // create a new core (using CoreAdminHandler) w/ properties SolrQueryResponse resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString(), - CoreAdminParams.INSTANCE_DIR, instPropFile.getAbsolutePath(), - CoreAdminParams.NAME, "dummycore", - CommonAdminParams.ASYNC, "42"), - resp); + CoreAdminParams.INSTANCE_DIR, + instPropFile.getAbsolutePath(), + CoreAdminParams.NAME, + "dummycore", + CommonAdminParams.ASYNC, + "42"), + resp); assertNull("Exception on create", resp.getException()); int maxRetries = 10; - while(maxRetries-- > 0) { - resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.REQUESTSTATUS.toString(), - CoreAdminParams.REQUESTID, "42"), - resp - ); - if(resp.getValues().get("STATUS") != null && resp.getValues().get("STATUS").equals("completed")) - break; + while (maxRetries-- > 0) { + resp = new SolrQueryResponse(); + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.REQUESTSTATUS.toString(), + CoreAdminParams.REQUESTID, + "42"), + resp); + if (resp.getValues().get("STATUS") != null + && resp.getValues().get("STATUS").equals("completed")) break; Thread.sleep(1000); } - assertEquals("The status of request was expected to be completed", - "completed", resp.getValues().get("STATUS")); + assertEquals( + "The status of request was expected to be completed", + "completed", + resp.getValues().get("STATUS")); resp = new SolrQueryResponse(); - admin.handleRequestBody - (req(CoreAdminParams.ACTION, + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.REQUESTSTATUS.toString(), - CoreAdminParams.REQUESTID, "9999999"), - resp - ); + CoreAdminParams.REQUESTID, + "9999999"), + resp); - assertEquals("Was expecting it to be invalid but found a task with the id.", - "notfound", resp.getValues().get("STATUS")); + assertEquals( + "Was expecting it to be invalid but found a task with the id.", + "notfound", + resp.getValues().get("STATUS")); admin.shutdown(); admin.close(); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java index 9ceaa8a57d4..eda0d06eba1 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreMergeIndexesAdminHandlerTest.java @@ -17,6 +17,8 @@ package org.apache.solr.handler.admin; import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; +import java.io.File; +import java.io.IOException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockFactory; import org.apache.solr.SolrTestCaseJ4; @@ -33,23 +35,20 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import java.io.File; -import java.io.IOException; - public class CoreMergeIndexesAdminHandlerTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { useFactory(FailingDirectoryFactory.class.getName()); initCore("solrconfig.xml", "schema.xml"); } - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); private static String WRAPPED_FAILING_MSG = "Error handling 'mergeindexes' action"; - private static String FAILING_CAUSE_MSG = "Creating a directory using FailingDirectoryFactoryException always fails"; + private static String FAILING_CAUSE_MSG = + "Creating a directory using FailingDirectoryFactoryException always fails"; + public static class FailingDirectoryFactory extends MockFSDirectoryFactory { public static class FailingDirectoryFactoryException extends RuntimeException { public FailingDirectoryFactoryException() { @@ -58,8 +57,10 @@ public FailingDirectoryFactoryException() { } public boolean fail = false; + @Override - public Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException { + public Directory create(String path, LockFactory lockFactory, DirContext dirContext) + throws IOException { if (fail) { throw new FailingDirectoryFactoryException(); } else { @@ -76,22 +77,30 @@ public void testMergeIndexesCoreAdminHandler() throws Exception { cores.getAllowPaths().add(workDir.toPath()); try (final CoreAdminHandler admin = new CoreAdminHandler(cores); - SolrCore core = cores.getCore("collection1")) { + SolrCore core = cores.getCore("collection1")) { DirectoryFactory df = core.getDirectoryFactory(); FailingDirectoryFactory dirFactory = (FailingDirectoryFactory) df; try { dirFactory.fail = true; ignoreException(WRAPPED_FAILING_MSG); - SolrException e = expectThrows(SolrException.class, () -> { - admin.handleRequestBody - (req(CoreAdminParams.ACTION, - CoreAdminParams.CoreAdminAction.MERGEINDEXES.toString(), - CoreAdminParams.CORE, "collection1", - CoreAdminParams.INDEX_DIR, workDir.getAbsolutePath()), - new SolrQueryResponse()); - }); - assertEquals(FailingDirectoryFactory.FailingDirectoryFactoryException.class, e.getCause().getClass()); + SolrException e = + expectThrows( + SolrException.class, + () -> { + admin.handleRequestBody( + req( + CoreAdminParams.ACTION, + CoreAdminParams.CoreAdminAction.MERGEINDEXES.toString(), + CoreAdminParams.CORE, + "collection1", + CoreAdminParams.INDEX_DIR, + workDir.getAbsolutePath()), + new SolrQueryResponse()); + }); + assertEquals( + FailingDirectoryFactory.FailingDirectoryFactoryException.class, + e.getCause().getClass()); } finally { unIgnoreException(WRAPPED_FAILING_MSG); } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java b/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java index 11011dfd58c..c1fc5b6aa02 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/DaemonStreamApiTest.java @@ -23,7 +23,6 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -54,7 +53,8 @@ public class DaemonStreamApiTest extends SolrTestCaseJ4 { private static final String DAEMON_OP = "DaemonOp"; - // We want 2-5 daemons. Choose one of them to start/stop/kill to catch any off-by-one or other bookeeping errors. + // We want 2-5 daemons. Choose one of them to start/stop/kill to catch any off-by-one or other + // bookeeping errors. final int numDaemons = random().nextInt(3) + 2; String daemonOfInterest; @@ -62,7 +62,6 @@ public class DaemonStreamApiTest extends SolrTestCaseJ4 { private String url; - @Override @Before public void setUp() throws Exception { @@ -72,8 +71,8 @@ public void setUp() throws Exception { url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + CHECKPOINT_COLL; cluster.uploadConfigSet(configset("cloud-minimal"), CONF_NAME); - // create a single shard, single replica collection. This is necessary until SOLR-13245 since the commands - // don't look in all replicas. + // create a single shard, single replica collection. This is necessary until SOLR-13245 since + // the commands don't look in all replicas. CollectionAdminRequest.createCollection(SOURCE_COLL, CONF_NAME, 1, 1) .process(cluster.getSolrClient()); @@ -114,7 +113,8 @@ public void testAPIs() throws IOException, SolrServerException, InterruptedExcep assertEquals("Should have all daemons listed", numDaemons, tuples.size()); for (int idx = 0; idx < numDaemons; ++idx) { - assertEquals("Daemon should be running ", tuples.get(idx).getString("id"), daemonNames.get(idx)); + assertEquals( + "Daemon should be running ", tuples.get(idx).getString("id"), daemonNames.get(idx)); } // Are all the daemons in a good state? @@ -123,15 +123,19 @@ public void testAPIs() throws IOException, SolrServerException, InterruptedExcep } // We shouldn't be able to open a daemon twice without closing., leads to thread leeks. - Tuple tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "start", "id", daemonOfInterest) - , DAEMON_OP); - assertTrue("Should not open twice without closing", + Tuple tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "start", "id", daemonOfInterest), DAEMON_OP); + assertTrue( + "Should not open twice without closing", tupleOfInterest.getString(DAEMON_OP).contains("There is already an open daemon named")); // Try stopping and check return. - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "stop", "id", daemonOfInterest), - DAEMON_OP); - assertTrue("Should have been able to stop the daemon", + tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "stop", "id", daemonOfInterest), DAEMON_OP); + assertTrue( + "Should have been able to stop the daemon", tupleOfInterest.getString(DAEMON_OP).contains(daemonOfInterest + " stopped")); checkStopped(); @@ -143,9 +147,11 @@ public void testAPIs() throws IOException, SolrServerException, InterruptedExcep } // Try starting and check return. - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "start", "id", daemonOfInterest), - DAEMON_OP); - assertTrue("Should have been able to start the daemon", + tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "start", "id", daemonOfInterest), DAEMON_OP); + assertTrue( + "Should have been able to start the daemon", tupleOfInterest.getString(DAEMON_OP).contains(daemonOfInterest + " started")); // Are all the daemons alive? @@ -154,42 +160,49 @@ public void testAPIs() throws IOException, SolrServerException, InterruptedExcep } // Try killing a daemon, it should be removed from lists. - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "kill", "id", daemonOfInterest), - DAEMON_OP); - assertTrue("Daemon should have been killed", + tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "kill", "id", daemonOfInterest), DAEMON_OP); + assertTrue( + "Daemon should have been killed", tupleOfInterest.getString(DAEMON_OP).contains(daemonOfInterest + " killed")); // Loop for a bit, waiting for the daemon to be removed from the list of possible entries. checkDaemonKilled(daemonOfInterest); // Should not be able to start a killed daemon - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "start", "id", daemonOfInterest), - DAEMON_OP); - assertTrue("Daemon should not be found", + tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "start", "id", daemonOfInterest), DAEMON_OP); + assertTrue( + "Daemon should not be found", tupleOfInterest.getString(DAEMON_OP).contains(daemonOfInterest + " not found")); // Should not be able to sop a killed daemon - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "stop", "id", daemonOfInterest), - DAEMON_OP); - assertTrue("Daemon should not be found", + tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "stop", "id", daemonOfInterest), DAEMON_OP); + assertTrue( + "Daemon should not be found", tupleOfInterest.getString(DAEMON_OP).contains(daemonOfInterest + " not found")); // Should not be able to kill a killed daemon - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "kill", "id", daemonOfInterest), - DAEMON_OP); - assertTrue("Daemon should not be found", + tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "kill", "id", daemonOfInterest), DAEMON_OP); + assertTrue( + "Daemon should not be found", tupleOfInterest.getString(DAEMON_OP).contains(daemonOfInterest + " not found")); - - // Let's bring the killed daemon back and see if it returns in our lists. Use the method that loops a bit to check - // in case there's a delay. + // Let's bring the killed daemon back and see if it returns in our lists. Use the method that + // loops a bit to check in case there's a delay. createDaemon(DAEMON_DEF.replace("DAEMON_NAME", daemonOfInterest), daemonOfInterest); checkAlive(daemonOfInterest); // Now kill them all so the threads disappear. for (String daemon : daemonNames) { - getTuples(params("qt", "/stream", "action", "kill", "id", daemon)); - checkDaemonKilled(daemon); + getTuples(params("qt", "/stream", "action", "kill", "id", daemon)); + checkDaemonKilled(daemon); } } @@ -198,24 +211,25 @@ private void checkAlive(String daemonName) throws InterruptedException, IOExcept TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (timeout.hasTimedOut() == false) { - Tuple tuple = getTupleOfInterest(params("qt", "/stream", "action", "list"), - daemonName); + Tuple tuple = getTupleOfInterest(params("qt", "/stream", "action", "list"), daemonName); String state = tuple.getString("state"); if (state.equals("RUNNABLE") || state.equals("WAITING") || state.equals("TIMED_WAITING")) { return; } TimeUnit.MILLISECONDS.sleep(100); } - fail("State for daemon '" + daemonName + "' did not become RUNNABLE, WAITING or TIMED_WAITING in 10 seconds"); + fail( + "State for daemon '" + + daemonName + + "' did not become RUNNABLE, WAITING or TIMED_WAITING in 10 seconds"); } - // There can be some delay while threads stabilize, so we need to loop. Evenutally, the statu of a stopped - // thread should be "TERMINATED" + // There can be some delay while threads stabilize, so we need to loop. Evenutally, the statu of a + // stopped thread should be "TERMINATED" private void checkStopped() throws InterruptedException, IOException { TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (timeout.hasTimedOut() == false) { - Tuple tuple = getTupleOfInterest(params("qt", "/stream", "action", "list"), - daemonOfInterest); + Tuple tuple = getTupleOfInterest(params("qt", "/stream", "action", "list"), daemonOfInterest); if (tuple.getString("state").equals("TERMINATED")) { return; } @@ -240,7 +254,9 @@ private void checkDaemonKilled(String daemon) throws IOException, InterruptedExc } fail("'" + daemonOfInterest + "' did not disappear in 10 seconds"); } - private void createDaemon(String daemonDef, String errMsg) throws IOException, SolrServerException { + + private void createDaemon(String daemonDef, String errMsg) + throws IOException, SolrServerException { SolrClient client = cluster.getSolrClient(); // create a daemon QueryResponse resp = client.query(CHECKPOINT_COLL, params("expr", daemonDef, "qt", "/stream")); @@ -256,18 +272,25 @@ private void checkCmdsNoDaemon(String daemonName) throws IOException { List tuples = getTuples(params("qt", "/stream", "action", "list")); assertEquals("List should be empty", 0, tuples.size()); - Tuple tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "start", "id", daemonName), - "DaemonOp"); - assertTrue("Start for daemon should not be found", tupleOfInterest.getString("DaemonOp").contains("not found on")); - - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "stop", "id", daemonName), - "DaemonOp"); - assertTrue("Stop for daemon should not be found", tupleOfInterest.getString("DaemonOp").contains("not found on")); - - tupleOfInterest = getTupleOfInterest(params("qt", "/stream", "action", "kill", "id", daemonName), - "DaemonOp"); - - assertTrue("Kill for daemon should not be found", tupleOfInterest.getString("DaemonOp").contains("not found on")); + Tuple tupleOfInterest = + getTupleOfInterest( + params("qt", "/stream", "action", "start", "id", daemonName), "DaemonOp"); + assertTrue( + "Start for daemon should not be found", + tupleOfInterest.getString("DaemonOp").contains("not found on")); + + tupleOfInterest = + getTupleOfInterest(params("qt", "/stream", "action", "stop", "id", daemonName), "DaemonOp"); + assertTrue( + "Stop for daemon should not be found", + tupleOfInterest.getString("DaemonOp").contains("not found on")); + + tupleOfInterest = + getTupleOfInterest(params("qt", "/stream", "action", "kill", "id", daemonName), "DaemonOp"); + + assertTrue( + "Kill for daemon should not be found", + tupleOfInterest.getString("DaemonOp").contains("not found on")); } // It's _really_ useful to have the tuples sorted.... @@ -276,17 +299,19 @@ private List getTuples(final SolrParams params) throws IOException { } private List getTuples(final SolrParams params, String ofInterest) throws IOException { - //log.info("Tuples from params: {}", params); + // log.info("Tuples from params: {}", params); TupleStream tupleStream = new SolrStream(url, params); tupleStream.open(); List tuples = new ArrayList<>(); for (; ; ) { Tuple t = tupleStream.read(); - //log.info(" ... {}", t.fields); + // log.info(" ... {}", t.fields); if (t.EOF) { break; - } else if (ofInterest == null || t.getString("id").equals(ofInterest) || t.getString(ofInterest).equals("null") == false) { + } else if (ofInterest == null + || t.getString("id").equals(ofInterest) + || t.getString(ofInterest).equals("null") == false) { // a failed return is a bit different, the onlyh key is DaemonOp tuples.add(t); } @@ -305,16 +330,16 @@ private Tuple getTupleOfInterest(final SolrParams params, String ofInterest) thr } private static String DAEMON_DEF = - " daemon(id=\"DAEMON_NAME\"," + - " runInterval=\"1000\"," + - " terminate=\"false\"," + - " update(targetColl," + - " batchSize=100," + - " topic(checkpointColl," + - " sourceColl," + - " q=\"*:*\"," + - " fl=\"id\"," + - " id=\"topic1\"," + - " initialCheckpoint=0)" + - "))"; + " daemon(id=\"DAEMON_NAME\"," + + " runInterval=\"1000\"," + + " terminate=\"false\"," + + " update(targetColl," + + " batchSize=100," + + " topic(checkpointColl," + + " sourceColl," + + " q=\"*:*\"," + + " fl=\"id\"," + + " id=\"topic1\"," + + " initialCheckpoint=0)" + + "))"; } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java index 5e8f16635d4..3e7b1d76d8e 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java @@ -17,11 +17,12 @@ package org.apache.solr.handler.admin; +import static org.apache.solr.common.params.CommonParams.HEALTH_CHECK_HANDLER_PATH; + import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Properties; - import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.client.solrj.SolrServerException; @@ -48,39 +49,41 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.common.params.CommonParams.HEALTH_CHECK_HANDLER_PATH; - public class HealthCheckHandlerTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } @Test public void testHealthCheckHandler() throws Exception { - GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, HEALTH_CHECK_HANDLER_PATH, new ModifiableSolrParams()); + GenericSolrRequest req = + new GenericSolrRequest( + SolrRequest.METHOD.GET, HEALTH_CHECK_HANDLER_PATH, new ModifiableSolrParams()); // positive check that our only existing "healthy" node works with cloud client // NOTE: this is using GenericSolrRequest, not HealthCheckRequest which is why it passes // as compared with testHealthCheckHandlerWithCloudClient // (Not sure if that's actaully a good thing -- but it's how the existing test worked) - assertEquals(CommonParams.OK, + assertEquals( + CommonParams.OK, req.process(cluster.getSolrClient()).getResponse().get(CommonParams.STATUS)); // positive check that our exiting "healthy" node works with direct http client - try (HttpSolrClient httpSolrClient = getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { + try (HttpSolrClient httpSolrClient = + getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { SolrResponse response = req.process(httpSolrClient); assertEquals(CommonParams.OK, response.getResponse().get(CommonParams.STATUS)); } // successfully create a dummy collection - try (HttpSolrClient httpSolrClient = getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { - CollectionAdminResponse collectionAdminResponse = CollectionAdminRequest.createCollection("test", "_default", 1, 1) - .withProperty("solr.directoryFactory", "solr.StandardDirectoryFactory") - .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .process(httpSolrClient); + try (HttpSolrClient httpSolrClient = + getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { + CollectionAdminResponse collectionAdminResponse = + CollectionAdminRequest.createCollection("test", "_default", 1, 1) + .withProperty("solr.directoryFactory", "solr.StandardDirectoryFactory") + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) + .process(httpSolrClient); assertEquals(0, collectionAdminResponse.getStatus()); SolrResponse response = req.process(httpSolrClient); assertEquals(CommonParams.OK, response.getResponse().get(CommonParams.STATUS)); @@ -94,16 +97,19 @@ public void testHealthCheckHandler() throws Exception { try (HttpSolrClient httpSolrClient = getHttpSolrClient(newJetty.getBaseUrl().toString())) { // postive check that our (new) "healthy" node works with direct http client - assertEquals(CommonParams.OK, req.process(httpSolrClient).getResponse().get(CommonParams.STATUS)); + assertEquals( + CommonParams.OK, req.process(httpSolrClient).getResponse().get(CommonParams.STATUS)); // now "break" our (new) node newJetty.getCoreContainer().getZkController().getZkClient().close(); // negative check of our (new) "broken" node that we deliberately put into an unhealth state - BaseHttpSolrClient.RemoteSolrException e = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> - { - req.process(httpSolrClient); - }); + BaseHttpSolrClient.RemoteSolrException e = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + req.process(httpSolrClient); + }); assertTrue(e.getMessage(), e.getMessage().contains("Host Unavailable")); assertEquals(SolrException.ErrorCode.SERVICE_UNAVAILABLE.code, e.code()); } finally { @@ -116,37 +122,47 @@ public void testHealthCheckHandler() throws Exception { try (HttpSolrClient httpSolrClient = getHttpSolrClient(newJetty.getBaseUrl().toString())) { // postive check that our (new) "healthy" node works with direct http client - assertEquals(CommonParams.OK, req.process(httpSolrClient).getResponse().get(CommonParams.STATUS)); + assertEquals( + CommonParams.OK, req.process(httpSolrClient).getResponse().get(CommonParams.STATUS)); // shutdown the core container of new node newJetty.getCoreContainer().shutdown(); // api shouldn't unreachable - SolrException thrown = expectThrows(SolrException.class, () -> { - req.process(httpSolrClient).getResponse().get(CommonParams.STATUS); - fail("API shouldn't be available, and fail at above request"); - }); + SolrException thrown = + expectThrows( + SolrException.class, + () -> { + req.process(httpSolrClient).getResponse().get(CommonParams.STATUS); + fail("API shouldn't be available, and fail at above request"); + }); assertEquals("Exception code should be 404", 404, thrown.code()); - assertTrue("Should have seen an exception containing the an error", thrown.getMessage().contains( - "Error processing the request. CoreContainer is either not initialized or shutting down.")); + assertTrue( + "Should have seen an exception containing the an error", + thrown + .getMessage() + .contains( + "Error processing the request. CoreContainer is either not initialized or shutting down.")); } finally { newJetty.stop(); } // (redundent) positive check that our (previously) exiting "healthy" node (still) works // after getting negative results from our broken node and failed core container - try (HttpSolrClient httpSolrClient = getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { + try (HttpSolrClient httpSolrClient = + getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { - assertEquals(CommonParams.OK, req.process(httpSolrClient).getResponse().get(CommonParams.STATUS)); + assertEquals( + CommonParams.OK, req.process(httpSolrClient).getResponse().get(CommonParams.STATUS)); } - } @Test public void testHealthCheckHandlerSolrJ() throws IOException, SolrServerException { // positive check of a HealthCheckRequest using http client HealthCheckRequest req = new HealthCheckRequest(); - try (HttpSolrClient httpSolrClient = getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { + try (HttpSolrClient httpSolrClient = + getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { HealthCheckResponse rsp = req.process(httpSolrClient); assertEquals(CommonParams.OK, rsp.getNodeStatus()); } @@ -170,17 +186,24 @@ public void testHealthCheckV2Api() throws Exception { try (HttpSolrClient httpSolrClient = getHttpSolrClient(newJetty.getBaseUrl().toString())) { // postive check that our (new) "healthy" node works with direct http client - assertEquals(CommonParams.OK, new V2Request.Builder("/node/health").build().process(httpSolrClient). - getResponse().get(CommonParams.STATUS)); + assertEquals( + CommonParams.OK, + new V2Request.Builder("/node/health") + .build() + .process(httpSolrClient) + .getResponse() + .get(CommonParams.STATUS)); // now "break" our (new) node newJetty.getCoreContainer().getZkController().getZkClient().close(); // negative check of our (new) "broken" node that we deliberately put into an unhealth state - BaseHttpSolrClient.RemoteSolrException e = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> - { - new V2Request.Builder("/node/health").build().process(httpSolrClient); - }); + BaseHttpSolrClient.RemoteSolrException e = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + new V2Request.Builder("/node/health").build().process(httpSolrClient); + }); assertTrue(e.getMessage(), e.getMessage().contains("Host Unavailable")); assertEquals(SolrException.ErrorCode.SERVICE_UNAVAILABLE.code, e.code()); } finally { @@ -195,36 +218,39 @@ public void testFindUnhealthyCores() throws Exception { // collection2 -> shard1: [ replica2 (recovering) ] // node2: collection1 -> shard1: [ replica2 (active), replica4 (down) ] // collection2 -> shard1: [ replica1 (active) ] - try (ZkStateReader reader = ClusterStateMockUtil.buildClusterState( - "csrr2rDr2Dcsr2FrR", 1, "baseUrl1:8983_", "baseUrl2:8984_")) { + try (ZkStateReader reader = + ClusterStateMockUtil.buildClusterState( + "csrr2rDr2Dcsr2FrR", 1, "baseUrl1:8983_", "baseUrl2:8984_")) { ClusterState clusterState = reader.getClusterState(); // Node 1 - Collection node1Cores = Arrays.asList( - mockCD("collection1", "slice1_replica1", "slice1", true, Replica.State.ACTIVE), - mockCD("collection1", "slice1_replica3", "slice1", true, Replica.State.DOWN), - mockCD("collection2", "slice1_replica5", "slice1", true, Replica.State.RECOVERING), - // A dangling core for a non-existant collection will not fail the check - mockCD("invalid", "invalid", "slice1", false, Replica.State.RECOVERING), - // A core for a slice that is not an active slice will not fail the check - mockCD("collection1", "invalid_replica1", "invalid", true, Replica.State.DOWN) - ); + Collection node1Cores = + Arrays.asList( + mockCD("collection1", "slice1_replica1", "slice1", true, Replica.State.ACTIVE), + mockCD("collection1", "slice1_replica3", "slice1", true, Replica.State.DOWN), + mockCD("collection2", "slice1_replica5", "slice1", true, Replica.State.RECOVERING), + // A dangling core for a non-existant collection will not fail the check + mockCD("invalid", "invalid", "slice1", false, Replica.State.RECOVERING), + // A core for a slice that is not an active slice will not fail the check + mockCD("collection1", "invalid_replica1", "invalid", true, Replica.State.DOWN)); long unhealthy1 = HealthCheckHandler.findUnhealthyCores(node1Cores, clusterState); assertEquals(2, unhealthy1); // Node 2 - Collection node2Cores = Arrays.asList( - mockCD("collection1", "slice1_replica2", "slice1", true, Replica.State.ACTIVE), - mockCD("collection1", "slice1_replica4", "slice1", true, Replica.State.DOWN), - mockCD("collection2", "slice1_replica1", "slice1", true, Replica.State.RECOVERY_FAILED) - ); + Collection node2Cores = + Arrays.asList( + mockCD("collection1", "slice1_replica2", "slice1", true, Replica.State.ACTIVE), + mockCD("collection1", "slice1_replica4", "slice1", true, Replica.State.DOWN), + mockCD( + "collection2", "slice1_replica1", "slice1", true, Replica.State.RECOVERY_FAILED)); long unhealthy2 = HealthCheckHandler.findUnhealthyCores(node2Cores, clusterState); assertEquals(1, unhealthy2); } } /* Creates a minimal cloud descriptor for a core */ - private CloudDescriptor mockCD(String collection, String name, String shardId, boolean registered, Replica.State state) { + private CloudDescriptor mockCD( + String collection, String name, String shardId, boolean registered, Replica.State state) { Properties props = new Properties(); props.put(CoreDescriptor.CORE_SHARD, shardId); props.put(CoreDescriptor.CORE_COLLECTION, collection); @@ -234,4 +260,4 @@ private CloudDescriptor mockCD(String collection, String name, String shardId, b cd.setLastPublished(state); return cd; } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java index ec7ed292e36..0ae0ede103a 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java @@ -22,7 +22,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.document.DocumentStoredFieldVisitor; import org.apache.lucene.index.CodecReader; @@ -53,9 +52,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * - */ +/** */ public class IndexSizeEstimatorTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -70,12 +67,9 @@ public static void setupCluster() throws Exception { System.setProperty("solr.tests.numeric.dv", "true"); System.setProperty("solr.tests.numeric.points", "true"); System.setProperty("solr.tests.numeric.points.dv", "true"); - configureCluster(2) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); + configureCluster(2).addConfig("conf", configset("cloud-dynamic")).configure(); solrClient = cluster.getSolrClient(); - CollectionAdminRequest.createCollection(collection, "conf", 2, 2) - .process(solrClient); + CollectionAdminRequest.createCollection(collection, "conf", 2, 2).process(solrClient); cluster.waitForActiveCollection(collection, 2, 4); SolrInputDocument lastDoc = addDocs(collection, NUM_DOCS); HashSet docFields = new HashSet<>(lastDoc.keySet()); @@ -100,7 +94,8 @@ public void testEstimator() throws Exception { try { SolrIndexSearcher searcher = searcherRef.get(); // limit the max length - IndexSizeEstimator estimator = new IndexSizeEstimator(searcher.getRawReader(), 20, 50, true, true); + IndexSizeEstimator estimator = + new IndexSizeEstimator(searcher.getRawReader(), 20, 50, true, true); IndexSizeEstimator.Estimate estimate = estimator.estimate(); Map fieldsBySize = estimate.getFieldsBySize(); assertFalse("empty fieldsBySize", fieldsBySize.isEmpty()); @@ -126,17 +121,25 @@ public void testEstimator() throws Exception { Map sampledFieldsBySize = sampledEstimate.getFieldsBySize(); assertFalse("empty fieldsBySize", sampledFieldsBySize.isEmpty()); // verify that the sampled values are within 50% of the original values - fieldsBySize.forEach((field, size) -> { - Long sampledSize = sampledFieldsBySize.get(field); - assertNotNull("sampled size for " + field + " is missing in " + sampledFieldsBySize, sampledSize); - double delta = (double) size * 0.5; - assertEquals("sampled size of " + field + " is wildly off", (double)size, (double)sampledSize, delta); - }); + fieldsBySize.forEach( + (field, size) -> { + Long sampledSize = sampledFieldsBySize.get(field); + assertNotNull( + "sampled size for " + field + " is missing in " + sampledFieldsBySize, sampledSize); + double delta = (double) size * 0.5; + assertEquals( + "sampled size of " + field + " is wildly off", + (double) size, + (double) sampledSize, + delta); + }); // verify the reader is still usable - SOLR-13694 IndexReader reader = searcher.getRawReader(); for (LeafReaderContext context : reader.leaves()) { LeafReader leafReader = context.reader(); - assertTrue("unexpected LeafReader class: " + leafReader.getClass().getName(), leafReader instanceof CodecReader); + assertTrue( + "unexpected LeafReader class: " + leafReader.getClass().getName(), + leafReader instanceof CodecReader); Bits liveDocs = leafReader.getLiveDocs(); CodecReader codecReader = (CodecReader) leafReader; StoredFieldsReader storedFieldsReader = codecReader.getFieldsReader(); @@ -157,64 +160,79 @@ public void testEstimator() throws Exception { @Test public void testIntegration() throws Exception { - CollectionAdminResponse rsp = CollectionAdminRequest.collectionStatus(collection) - .setWithRawSizeInfo(true) - .setWithRawSizeSummary(true) - .setWithRawSizeDetails(true) - .process(solrClient); - CollectionAdminResponse sampledRsp = CollectionAdminRequest.collectionStatus(collection) - .setWithRawSizeInfo(true) - .setWithRawSizeSummary(true) - .setWithRawSizeDetails(true) - .setRawSizeSamplingPercent(5) - .process(solrClient); + CollectionAdminResponse rsp = + CollectionAdminRequest.collectionStatus(collection) + .setWithRawSizeInfo(true) + .setWithRawSizeSummary(true) + .setWithRawSizeDetails(true) + .process(solrClient); + CollectionAdminResponse sampledRsp = + CollectionAdminRequest.collectionStatus(collection) + .setWithRawSizeInfo(true) + .setWithRawSizeSummary(true) + .setWithRawSizeDetails(true) + .setRawSizeSamplingPercent(5) + .process(solrClient); assertEquals(0, rsp.getStatus()); assertEquals(0, sampledRsp.getStatus()); for (int i : Arrays.asList(1, 2)) { @SuppressWarnings({"unchecked"}) - NamedList segInfos = (NamedList) rsp.getResponse().findRecursive(collection, "shards", "shard" + i, "leader", "segInfos"); + NamedList segInfos = + (NamedList) + rsp.getResponse() + .findRecursive(collection, "shards", "shard" + i, "leader", "segInfos"); @SuppressWarnings({"unchecked"}) - NamedList rawSize = (NamedList)segInfos.get("rawSize"); + NamedList rawSize = (NamedList) segInfos.get("rawSize"); assertNotNull("rawSize missing", rawSize); @SuppressWarnings({"unchecked"}) Map rawSizeMap = rawSize.asMap(10); @SuppressWarnings({"unchecked"}) - Map fieldsBySize = (Map)rawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE); + Map fieldsBySize = + (Map) rawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE); assertNotNull("fieldsBySize missing", fieldsBySize); assertEquals(fieldsBySize.toString(), fields.size(), fieldsBySize.size()); fields.forEach(field -> assertNotNull("missing field " + field, fieldsBySize.get(field))); @SuppressWarnings({"unchecked"}) - Map typesBySize = (Map)rawSizeMap.get(IndexSizeEstimator.TYPES_BY_SIZE); + Map typesBySize = + (Map) rawSizeMap.get(IndexSizeEstimator.TYPES_BY_SIZE); assertNotNull("typesBySize missing", typesBySize); assertTrue("expected at least 8 types: " + typesBySize.toString(), typesBySize.size() >= 8); @SuppressWarnings({"unchecked"}) - Map summary = (Map)rawSizeMap.get(IndexSizeEstimator.SUMMARY); + Map summary = + (Map) rawSizeMap.get(IndexSizeEstimator.SUMMARY); assertNotNull("summary missing", summary); assertEquals(summary.toString(), fields.size(), summary.size()); fields.forEach(field -> assertNotNull("missing field " + field, summary.get(field))); @SuppressWarnings({"unchecked"}) - Map details = (Map)rawSizeMap.get(IndexSizeEstimator.DETAILS); + Map details = + (Map) rawSizeMap.get(IndexSizeEstimator.DETAILS); assertNotNull("details missing", summary); assertEquals(details.keySet().toString(), 6, details.size()); // compare with sampled @SuppressWarnings({"unchecked"}) - NamedList sampledRawSize = (NamedList) rsp.getResponse().findRecursive(collection, "shards", "shard" + i, "leader", "segInfos", "rawSize"); + NamedList sampledRawSize = + (NamedList) + rsp.getResponse() + .findRecursive( + collection, "shards", "shard" + i, "leader", "segInfos", "rawSize"); assertNotNull("sampled rawSize missing", sampledRawSize); @SuppressWarnings({"unchecked"}) Map sampledRawSizeMap = rawSize.asMap(10); @SuppressWarnings({"unchecked"}) - Map sampledFieldsBySize = (Map)sampledRawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE); + Map sampledFieldsBySize = + (Map) sampledRawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE); assertNotNull("sampled fieldsBySize missing", sampledFieldsBySize); - fieldsBySize.forEach((k, v) -> { - double size = fromHumanReadableUnits((String)v); - double sampledSize = fromHumanReadableUnits((String)sampledFieldsBySize.get(k)); - assertNotNull("sampled size missing for field " + k + " in " + sampledFieldsBySize, sampledSize); - double delta = size * 0.5; - assertEquals("sampled size of " + k + " is wildly off", size, sampledSize, delta); - }); + fieldsBySize.forEach( + (k, v) -> { + double size = fromHumanReadableUnits((String) v); + double sampledSize = fromHumanReadableUnits((String) sampledFieldsBySize.get(k)); + assertNotNull( + "sampled size missing for field " + k + " in " + sampledFieldsBySize, sampledSize); + double delta = size * 0.5; + assertEquals("sampled size of " + k + " is wildly off", size, sampledSize, delta); + }); } - } private static double fromHumanReadableUnits(String value) { @@ -252,7 +270,7 @@ private static SolrInputDocument addDocs(String collection, int n) throws Except // multival, stored, indexed, tv, pos, offsets doc.addField("tv_mv_string", TestUtil.randomAnalysisString(random(), 100, true)); doc.addField("tv_mv_string", TestUtil.randomAnalysisString(random(), 100, true)); - //binary + // binary doc.addField("payload", TestUtil.randomBinaryTerm(random()).bytes); // points doc.addField("point", random().nextInt(100) + "," + random().nextInt(100)); @@ -272,5 +290,4 @@ private static SolrInputDocument addDocs(String collection, int n) throws Except assertFalse("timed out waiting for documents to be added", timeOut.hasTimedOut()); return doc; } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/InfoHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/InfoHandlerTest.java index f8461457acc..73593b5e151 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/InfoHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/InfoHandlerTest.java @@ -26,28 +26,27 @@ import org.junit.Test; public class InfoHandlerTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - + @Test public void testCoreAdminHandler() throws Exception { final CoreContainer cores = h.getCoreContainer(); InfoHandler infoHandler = cores.getInfoHandler(); SolrQueryResponse rsp = handleRequest(infoHandler, "properties"); - + assertNotNull(rsp.getValues().get("system.properties")); - rsp = handleRequest(infoHandler, "threads"); - + assertNotNull(rsp.getValues().get("system")); - + rsp = handleRequest(infoHandler, "logging"); - + assertNotNull(rsp.getValues().get("watcher")); SolrException e = expectThrows(SolrException.class, () -> handleRequest(infoHandler, "info")); @@ -97,65 +96,71 @@ public static class CountPropertiesRequestHandler extends PropertiesRequestHandl private int requestCount = 0; @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) - throws IOException { + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { ++requestCount; super.handleRequestBody(req, rsp); } - public int getRequestCount() { return requestCount; } + public int getRequestCount() { + return requestCount; + } } public static class CountThreadDumpHandler extends ThreadDumpHandler { private int requestCount = 0; @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) - throws IOException { + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { ++requestCount; super.handleRequestBody(req, rsp); } - public int getRequestCount() { return requestCount; } + public int getRequestCount() { + return requestCount; + } } public static class CountLoggingHandler extends LoggingHandler { private int requestCount = 0; - CountLoggingHandler(CoreContainer cores) { super(cores); } + CountLoggingHandler(CoreContainer cores) { + super(cores); + } @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) - throws Exception { + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { ++requestCount; super.handleRequestBody(req, rsp); } - public int getRequestCount() { return requestCount; } + public int getRequestCount() { + return requestCount; + } } public static class CountSystemInfoHandler extends SystemInfoHandler { private int requestCount = 0; - CountSystemInfoHandler(CoreContainer cores) { super(cores); } + CountSystemInfoHandler(CoreContainer cores) { + super(cores); + } @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) - throws Exception { + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { ++requestCount; super.handleRequestBody(req, rsp); } - public int getRequestCount() { return requestCount; } + public int getRequestCount() { + return requestCount; + } } - private SolrQueryResponse handleRequest(InfoHandler infoHandler, String path) - throws Exception { + private SolrQueryResponse handleRequest(InfoHandler infoHandler, String path) throws Exception { SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryRequest req = req(); req.getContext().put("path", path); infoHandler.handleRequestBody(req, rsp); return rsp; } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/LoggingHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/LoggingHandlerTest.java index 8f7e4b8a54b..a8b471a5e47 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/LoggingHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/LoggingHandlerTest.java @@ -16,9 +16,7 @@ */ package org.apache.solr.handler.admin; - import java.util.ArrayList; - import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LoggerContext; @@ -35,19 +33,18 @@ import org.junit.BeforeClass; import org.junit.Test; - @SuppressForbidden(reason = "test uses log4j2 because it tests output at a specific level") @LogLevel("org.apache.solr.bogus_logger_package.BogusLoggerClass=DEBUG") public class LoggingHandlerTest extends SolrTestCaseJ4 { private final String PARENT_LOGGER_NAME = "org.apache.solr.bogus_logger_package"; private final String CLASS_LOGGER_NAME = PARENT_LOGGER_NAME + ".BogusLoggerClass"; - + // TODO: This only tests Log4j at the moment, as that's what's defined // through the CoreContainer. // TODO: Would be nice to throw an exception on trying to set a // log level that doesn't exist - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); @@ -55,28 +52,31 @@ public static void beforeClass() throws Exception { @Test public void testLogLevelHandlerOutput() throws Exception { - + // sanity check our setup... assertNotNull(this.getClass().getAnnotation(LogLevel.class)); final String annotationConfig = this.getClass().getAnnotation(LogLevel.class).value(); - assertTrue("WTF: " + annotationConfig, annotationConfig.startsWith( PARENT_LOGGER_NAME )); - assertTrue("WTF: " + annotationConfig, annotationConfig.startsWith( CLASS_LOGGER_NAME )); - assertTrue("WTF: " + annotationConfig, annotationConfig.endsWith( Level.DEBUG.toString() )); - - assertEquals(Level.DEBUG, LogManager.getLogger( CLASS_LOGGER_NAME ).getLevel()); - + assertTrue("WTF: " + annotationConfig, annotationConfig.startsWith(PARENT_LOGGER_NAME)); + assertTrue("WTF: " + annotationConfig, annotationConfig.startsWith(CLASS_LOGGER_NAME)); + assertTrue("WTF: " + annotationConfig, annotationConfig.endsWith(Level.DEBUG.toString())); + + assertEquals(Level.DEBUG, LogManager.getLogger(CLASS_LOGGER_NAME).getLevel()); + final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); final Configuration config = ctx.getConfiguration(); - assertEquals("Unexpected config for " + PARENT_LOGGER_NAME + " ... expected 'root' config", - config.getRootLogger(), - config.getLoggerConfig(PARENT_LOGGER_NAME)); + assertEquals( + "Unexpected config for " + PARENT_LOGGER_NAME + " ... expected 'root' config", + config.getRootLogger(), + config.getLoggerConfig(PARENT_LOGGER_NAME)); assertEquals(Level.DEBUG, config.getLoggerConfig(CLASS_LOGGER_NAME).getLevel()); SolrClient client = new EmbeddedSolrServer(h.getCore()); ModifiableSolrParams mparams = new ModifiableSolrParams(); - NamedList rsp = client.request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/logging", mparams)); + NamedList rsp = + client.request( + new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/logging", mparams)); @SuppressWarnings({"unchecked"}) ArrayList> loggers = (ArrayList>) rsp._get("loggers", null); @@ -87,36 +87,42 @@ public void testLogLevelHandlerOutput() throws Exception { // update parent logger level mparams.set("set", PARENT_LOGGER_NAME + ":TRACE"); - rsp = client.request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/logging", mparams)); + rsp = + client.request( + new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/logging", mparams)); @SuppressWarnings({"unchecked"}) - ArrayList> updatedLoggerLevel = (ArrayList>) rsp._get("loggers", null); + ArrayList> updatedLoggerLevel = + (ArrayList>) rsp._get("loggers", null); // check new parent logger level assertTrue(checkLoggerLevel(updatedLoggerLevel, PARENT_LOGGER_NAME, "TRACE")); assertEquals(Level.TRACE, config.getLoggerConfig(PARENT_LOGGER_NAME).getLevel()); assertEquals(Level.DEBUG, config.getLoggerConfig(CLASS_LOGGER_NAME).getLevel()); - + // NOTE: LoggingHandler doesn't actually "remove" the LoggerConfig, ... // evidently so people using they UI can see that it was explicitly turned "OFF" ? mparams.set("set", PARENT_LOGGER_NAME + ":null"); - rsp = client.request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/logging", mparams)); + rsp = + client.request( + new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/logging", mparams)); @SuppressWarnings({"unchecked"}) - ArrayList> removedLoggerLevel = (ArrayList>) rsp._get("loggers", null); + ArrayList> removedLoggerLevel = + (ArrayList>) rsp._get("loggers", null); assertTrue(checkLoggerLevel(removedLoggerLevel, PARENT_LOGGER_NAME, "OFF")); assertEquals(Level.OFF, config.getLoggerConfig(PARENT_LOGGER_NAME).getLevel()); assertEquals(Level.DEBUG, config.getLoggerConfig(CLASS_LOGGER_NAME).getLevel()); - } - private boolean checkLoggerLevel(ArrayList> properties, String logger, String level) { + private boolean checkLoggerLevel( + ArrayList> properties, String logger, String level) { for (NamedList property : properties) { String loggerProperty = property._get("name", "").toString(); - String levelProperty = property._get("level", "").toString(); + String levelProperty = property._get("level", "").toString(); if (loggerProperty.equals(logger) && levelProperty.equals(level)) { return true; } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java index 5db9ce4af95..89012c50a9f 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/LukeRequestHandlerTest.java @@ -18,20 +18,17 @@ import java.util.Arrays; import java.util.EnumSet; - +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.luke.FieldFlag; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.CustomAnalyzerStrField; // jdoc import org.apache.solr.schema.IndexSchema; -import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.util.TestHarness; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -/** - * :TODO: currently only tests some of the utilities in the LukeRequestHandler - */ +/** :TODO: currently only tests some of the utilities in the LukeRequestHandler */ public class LukeRequestHandlerTest extends SolrTestCaseJ4 { @BeforeClass @@ -42,26 +39,45 @@ public static void beforeClass() throws Exception { @Before public void before() { - assertU(adoc("id","SOLR1000", "name","Apache Solr", - "solr_s", "10", - "solr_sI", "10", - "solr_sS", "10", - "solr_t", "10", - "solr_tt", "10", - "solr_b", "true", - "solr_i", "10", - "solr_l", "10", - "solr_f", "10", - "solr_d", "10", - "solr_ti", "10", - "solr_tl", "10", - "solr_tf", "10", - "solr_td", "10", - "solr_dt", "2000-01-01T01:01:01Z", - "solr_tdt", "2000-01-01T01:01:01Z" - )); + assertU( + adoc( + "id", + "SOLR1000", + "name", + "Apache Solr", + "solr_s", + "10", + "solr_sI", + "10", + "solr_sS", + "10", + "solr_t", + "10", + "solr_tt", + "10", + "solr_b", + "true", + "solr_i", + "10", + "solr_l", + "10", + "solr_f", + "10", + "solr_d", + "10", + "solr_ti", + "10", + "solr_tl", + "10", + "solr_tf", + "10", + "solr_td", + "10", + "solr_dt", + "2000-01-01T01:01:01Z", + "solr_tdt", + "2000-01-01T01:01:01Z")); assertU(commit()); - } @Test @@ -76,71 +92,75 @@ public void testHistogramBucket() { assertHistoBucket(3, 8); assertHistoBucket(4, 9); - final int MAX_VALID = ((Integer.MAX_VALUE/2)+1)/2; - - assertHistoBucket(29, MAX_VALID-1 ); - assertHistoBucket(29, MAX_VALID ); - assertHistoBucket(30, MAX_VALID+1 ); + final int MAX_VALID = ((Integer.MAX_VALUE / 2) + 1) / 2; + assertHistoBucket(29, MAX_VALID - 1); + assertHistoBucket(29, MAX_VALID); + assertHistoBucket(30, MAX_VALID + 1); } private void assertHistoBucket(int slot, int in) { - assertEquals("histobucket: " + in, slot, 32 - Integer.numberOfLeadingZeros(Math.max(0, in - 1))); + assertEquals( + "histobucket: " + in, slot, 32 - Integer.numberOfLeadingZeros(Math.max(0, in - 1))); } @Test public void testLuke() { - // test that Luke can handle all of the field types - assertQ(req("qt","/admin/luke", "id","SOLR1000")); + assertQ(req("qt", "/admin/luke", "id", "SOLR1000")); final int numFlags = EnumSet.allOf(FieldFlag.class).size(); - assertQ("Not all flags ("+numFlags+") mentioned in info->key", - req("qt","/admin/luke"), - numFlags+"=count(//lst[@name='info']/lst[@name='key']/str)"); + assertQ( + "Not all flags (" + numFlags + ") mentioned in info->key", + req("qt", "/admin/luke"), + numFlags + "=count(//lst[@name='info']/lst[@name='key']/str)"); // code should be the same for all fields, but just in case do several - for (String f : Arrays.asList("solr_t","solr_s","solr_ti", - "solr_td","solr_dt","solr_b", - "solr_sS","solr_sI")) { + for (String f : + Arrays.asList( + "solr_t", "solr_s", "solr_ti", "solr_td", "solr_dt", "solr_b", "solr_sS", "solr_sI")) { final String xp = getFieldXPathPrefix(f); - assertQ("Not as many schema flags as expected ("+numFlags+") for " + f, - req("qt","/admin/luke", "fl", f), - numFlags+"=string-length("+xp+"[@name='schema'])"); - + assertQ( + "Not as many schema flags as expected (" + numFlags + ") for " + f, + req("qt", "/admin/luke", "fl", f), + numFlags + "=string-length(" + xp + "[@name='schema'])"); } // diff loop for checking 'index' flags, // only valid for fields that are indexed & stored - for (String f : Arrays.asList("solr_t","solr_s","solr_ti", - "solr_td","solr_dt","solr_b")) { + for (String f : Arrays.asList("solr_t", "solr_s", "solr_ti", "solr_td", "solr_dt", "solr_b")) { if (h.getCore().getLatestSchema().getField(f).getType().isPointField()) continue; final String xp = getFieldXPathPrefix(f); - assertQ("Not as many index flags as expected ("+numFlags+") for " + f, - req("qt","/admin/luke", "fl", f), - numFlags+"=string-length("+xp+"[@name='index'])"); + assertQ( + "Not as many index flags as expected (" + numFlags + ") for " + f, + req("qt", "/admin/luke", "fl", f), + numFlags + "=string-length(" + xp + "[@name='index'])"); final String hxp = getFieldXPathHistogram(f); - assertQ("Historgram field should be present for field "+f, + assertQ( + "Historgram field should be present for field " + f, req("qt", "/admin/luke", "fl", f), - hxp+"[@name='histogram']"); + hxp + "[@name='histogram']"); } } private static String getFieldXPathHistogram(String field) { - return "//lst[@name='fields']/lst[@name='"+field+"']/lst"; + return "//lst[@name='fields']/lst[@name='" + field + "']/lst"; } + private static String getFieldXPathPrefix(String field) { - return "//lst[@name='fields']/lst[@name='"+field+"']/str"; + return "//lst[@name='fields']/lst[@name='" + field + "']/str"; } + private static String field(String field) { - return "//lst[@name='fields']/lst[@name='"+field+"']/"; + return "//lst[@name='fields']/lst[@name='" + field + "']/"; } + private static String dynfield(String field) { - return "//lst[@name='dynamicFields']/lst[@name='"+field+"']/"; + return "//lst[@name='dynamicFields']/lst[@name='" + field + "']/"; } @Test @@ -149,27 +169,25 @@ public void testFlParam() { try { // First, determine that the two fields ARE there String response = h.query(req); - assertNull(TestHarness.validateXPath(response, - getFieldXPathPrefix("solr_t") + "[@name='index']", - getFieldXPathPrefix("solr_s") + "[@name='index']" - )); + assertNull( + TestHarness.validateXPath( + response, + getFieldXPathPrefix("solr_t") + "[@name='index']", + getFieldXPathPrefix("solr_s") + "[@name='index']")); // Now test that the other fields are NOT there - for (String f : Arrays.asList("solr_ti", - "solr_td", "solr_dt", "solr_b")) { - - assertNotNull(TestHarness.validateXPath(response, - getFieldXPathPrefix(f) + "[@name='index']")); + for (String f : Arrays.asList("solr_ti", "solr_td", "solr_dt", "solr_b")) { + assertNotNull( + TestHarness.validateXPath(response, getFieldXPathPrefix(f) + "[@name='index']")); } // Insure * works req = req("qt", "/admin/luke", "fl", "*"); response = h.query(req); - for (String f : Arrays.asList("solr_t", "solr_s", "solr_ti", - "solr_td", "solr_dt", "solr_b")) { + for (String f : + Arrays.asList("solr_t", "solr_s", "solr_ti", "solr_td", "solr_dt", "solr_b")) { if (h.getCore().getLatestSchema().getField(f).getType().isPointField()) continue; - assertNull(TestHarness.validateXPath(response, - getFieldXPathPrefix(f) + "[@name='index']")); + assertNull(TestHarness.validateXPath(response, getFieldXPathPrefix(f) + "[@name='index']")); } } catch (Exception e) { fail("Caught unexpected exception " + e.getMessage()); @@ -179,40 +197,46 @@ public void testFlParam() { public void testNumTerms() throws Exception { final String f = "name"; for (String n : new String[] {"2", "3", "100", "99999"}) { - assertQ(req("qt", "/admin/luke", "fl", f, "numTerms", n), - field(f) + "lst[@name='topTerms']/int[@name='Apache']", - field(f) + "lst[@name='topTerms']/int[@name='Solr']", - "count("+field(f)+"lst[@name='topTerms']/int)=2"); + assertQ( + req("qt", "/admin/luke", "fl", f, "numTerms", n), + field(f) + "lst[@name='topTerms']/int[@name='Apache']", + field(f) + "lst[@name='topTerms']/int[@name='Solr']", + "count(" + field(f) + "lst[@name='topTerms']/int)=2"); } - - assertQ(req("qt", "/admin/luke", "fl", f, "numTerms", "1"), - // no garuntee which one we find - "count("+field(f)+"lst[@name='topTerms']/int)=1"); - assertQ(req("qt", "/admin/luke", "fl", f, "numTerms", "0"), - "count("+field(f)+"lst[@name='topTerms']/int)=0"); + assertQ( + req("qt", "/admin/luke", "fl", f, "numTerms", "1"), + // no garuntee which one we find + "count(" + field(f) + "lst[@name='topTerms']/int)=1"); + + assertQ( + req("qt", "/admin/luke", "fl", f, "numTerms", "0"), + "count(" + field(f) + "lst[@name='topTerms']/int)=0"); // field with no terms shouldn't error for (String n : new String[] {"0", "1", "2", "100", "99999"}) { - assertQ(req("qt", "/admin/luke", "fl", "bogus_s", "numTerms", n), - "count("+field(f)+"lst[@name='topTerms']/int)=0"); + assertQ( + req("qt", "/admin/luke", "fl", "bogus_s", "numTerms", n), + "count(" + field(f) + "lst[@name='topTerms']/int)=0"); } } - /** @see CustomAnalyzerStrField */ + /** + * @see CustomAnalyzerStrField + */ public void testNullFactories() throws Exception { deleteCore(); initCore("solrconfig.xml", "schema-null-charfilters-analyzer.xml"); try { - assertQ(req("qt", "/admin/luke", "show", "schema") - , "//lst[@name='custom_tc_string']/lst[@name='indexAnalyzer']" - , "//lst[@name='custom_tc_string']/lst[@name='queryAnalyzer']" - , "0=count(//lst[@name='custom_tc_string']/lst[@name='indexAnalyzer']/lst[@name='filters'])" - , "0=count(//lst[@name='custom_tc_string']/lst[@name='queryAnalyzer']/lst[@name='filters'])" - , "0=count(//lst[@name='custom_tc_string']/lst[@name='indexAnalyzer']/lst[@name='charFilters'])" - , "0=count(//lst[@name='custom_tc_string']/lst[@name='queryAnalyzer']/lst[@name='charFilters'])" - ); + assertQ( + req("qt", "/admin/luke", "show", "schema"), + "//lst[@name='custom_tc_string']/lst[@name='indexAnalyzer']", + "//lst[@name='custom_tc_string']/lst[@name='queryAnalyzer']", + "0=count(//lst[@name='custom_tc_string']/lst[@name='indexAnalyzer']/lst[@name='filters'])", + "0=count(//lst[@name='custom_tc_string']/lst[@name='queryAnalyzer']/lst[@name='filters'])", + "0=count(//lst[@name='custom_tc_string']/lst[@name='indexAnalyzer']/lst[@name='charFilters'])", + "0=count(//lst[@name='custom_tc_string']/lst[@name='queryAnalyzer']/lst[@name='charFilters'])"); } finally { // Put back the configuration expected by the rest of the tests in this suite deleteCore(); @@ -224,37 +248,43 @@ public void testCopyFieldLists() throws Exception { SolrQueryRequest req = req("qt", "/admin/luke", "show", "schema"); String xml = h.query(req); - String r = TestHarness.validateXPath - (xml, - field("text") + "/arr[@name='copySources']/str[.='title']", - field("text") + "/arr[@name='copySources']/str[.='subject']", - field("title") + "/arr[@name='copyDests']/str[.='text']", - field("title") + "/arr[@name='copyDests']/str[.='title_stemmed']", - dynfield("bar_copydest_*") + "/arr[@name='copySources']/str[.='foo_copysource_*']", - dynfield("foo_copysource_*") + "/arr[@name='copyDests']/str[.='bar_copydest_*']"); + String r = + TestHarness.validateXPath( + xml, + field("text") + "/arr[@name='copySources']/str[.='title']", + field("text") + "/arr[@name='copySources']/str[.='subject']", + field("title") + "/arr[@name='copyDests']/str[.='text']", + field("title") + "/arr[@name='copyDests']/str[.='title_stemmed']", + dynfield("bar_copydest_*") + "/arr[@name='copySources']/str[.='foo_copysource_*']", + dynfield("foo_copysource_*") + "/arr[@name='copyDests']/str[.='bar_copydest_*']"); assertEquals(xml, null, r); } public void testCatchAllCopyField() throws Exception { deleteCore(); initCore("solrconfig.xml", "schema-copyfield-test.xml"); - + IndexSchema schema = h.getCore().getLatestSchema(); - + assertNull("'*' should not be (or match) a dynamic field", schema.getDynamicPattern("*")); - + boolean foundCatchAllCopyField = false; for (IndexSchema.DynamicCopy dcf : schema.getDynamicCopyFields()) { - foundCatchAllCopyField = dcf.getRegex().equals("*") && dcf.getDestFieldName().equals("catchall_t"); + foundCatchAllCopyField = + dcf.getRegex().equals("*") && dcf.getDestFieldName().equals("catchall_t"); if (foundCatchAllCopyField) { break; } } - assertTrue(" is missing from the schema", foundCatchAllCopyField); + assertTrue( + " is missing from the schema", + foundCatchAllCopyField); SolrQueryRequest req = req("qt", "/admin/luke", "show", "schema", "indent", "on"); String xml = h.query(req); - String result = TestHarness.validateXPath(xml, field("bday") + "/arr[@name='copyDests']/str[.='catchall_t']"); + String result = + TestHarness.validateXPath( + xml, field("bday") + "/arr[@name='copyDests']/str[.='catchall_t']"); assertNull(xml, result); // Put back the configuration expected by the rest of the tests in this suite diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java index 2a7a2a02f2b..5c67280cc5a 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java @@ -23,7 +23,6 @@ import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.ContentStream; @@ -44,29 +43,32 @@ public static void beforeClass() throws Exception { @Test public void testDiff() throws Exception { - String xml = h.query(req( - CommonParams.QT,"/admin/mbeans", - "stats","true", - CommonParams.WT,"xml" - )); + String xml = + h.query(req(CommonParams.QT, "/admin/mbeans", "stats", "true", CommonParams.WT, "xml")); List streams = new ArrayList<>(); streams.add(new ContentStreamBase.StringStream(xml)); - LocalSolrQueryRequest req = lrf.makeRequest( - CommonParams.QT,"/admin/mbeans", - "stats","true", - CommonParams.WT,"xml", - "diff","true"); + LocalSolrQueryRequest req = + lrf.makeRequest( + CommonParams.QT, + "/admin/mbeans", + "stats", + "true", + CommonParams.WT, + "xml", + "diff", + "true"); req.setContentStreams(streams); xml = h.query(req); NamedList>> diff = SolrInfoMBeanHandler.fromXML(xml); // The stats bean for SolrInfoMBeanHandler - NamedList stats = (NamedList)diff.get("ADMIN").get("/admin/mbeans").get("stats"); + NamedList stats = (NamedList) diff.get("ADMIN").get("/admin/mbeans").get("stats"); - //System.out.println("stats:"+stats); - Pattern p = Pattern.compile("Was: (?[0-9]+), Now: (?[0-9]+), Delta: (?[0-9]+)"); + // System.out.println("stats:"+stats); + Pattern p = + Pattern.compile("Was: (?[0-9]+), Now: (?[0-9]+), Delta: (?[0-9]+)"); String response = stats.get("ADMIN./admin/mbeans.requests").toString(); Matcher m = p.matcher(response); if (!m.matches()) { @@ -78,47 +80,62 @@ public void testDiff() throws Exception { int now = Integer.parseInt(m.group("now")); assertEquals(1, now - was); - xml = h.query(req( - CommonParams.QT,"/admin/mbeans", - "stats","true", - "key","org.apache.solr.handler.admin.CollectionsHandler" - )); + xml = + h.query( + req( + CommonParams.QT, + "/admin/mbeans", + "stats", + "true", + "key", + "org.apache.solr.handler.admin.CollectionsHandler")); NamedList>> nl = SolrInfoMBeanHandler.fromXML(xml); - assertNotNull( nl.get("ADMIN").get("org.apache.solr.handler.admin.CollectionsHandler")); + assertNotNull(nl.get("ADMIN").get("org.apache.solr.handler.admin.CollectionsHandler")); } @Test public void testAddedMBeanDiff() throws Exception { - String xml = h.query(req( - CommonParams.QT,"/admin/mbeans", - "stats","true", - CommonParams.WT,"xml" - )); - - // Artificially convert a long value to a null, to trigger the ADD case in SolrInfoMBeanHandler.diffObject() - xml = xml.replaceFirst("[^<]*", ""); - - LocalSolrQueryRequest req = lrf.makeRequest( - CommonParams.QT,"/admin/mbeans", - "stats","true", - CommonParams.WT,"xml", - "diff","true"); + String xml = + h.query(req(CommonParams.QT, "/admin/mbeans", "stats", "true", CommonParams.WT, "xml")); + + // Artificially convert a long value to a null, to trigger the ADD case in + // SolrInfoMBeanHandler.diffObject() + xml = + xml.replaceFirst( + "[^<]*", + ""); + + LocalSolrQueryRequest req = + lrf.makeRequest( + CommonParams.QT, + "/admin/mbeans", + "stats", + "true", + CommonParams.WT, + "xml", + "diff", + "true"); req.setContentStreams(Collections.singletonList(new ContentStreamBase.StringStream(xml))); xml = h.query(req); NamedList>> nl = SolrInfoMBeanHandler.fromXML(xml); - assertNotNull(((NamedList)nl.get("ADMIN").get("/admin/mbeans").get("stats")).get("ADD ADMIN./admin/mbeans.totalTime")); + assertNotNull( + ((NamedList) nl.get("ADMIN").get("/admin/mbeans").get("stats")) + .get("ADD ADMIN./admin/mbeans.totalTime")); } @Test public void testXMLDiffWithExternalEntity() throws Exception { String file = getFile("mailing_lists.pdf").toURI().toASCIIString(); - String xml = "\n" + - "]>\n" + - "\n" + - "&bar;" + - "031\n" + - ""; + String xml = + "\n" + + "]>\n" + + "\n" + + "&bar;" + + "031\n" + + ""; NamedList>> nl = SolrInfoMBeanHandler.fromXML(xml); @@ -130,65 +147,74 @@ public void testXMLDiffWithExternalEntity() throws Exception { @Test public void testMetricsSnapshot() throws Exception { final CountDownLatch counter = new CountDownLatch(500); - SolrInfoBean bean = new SolrInfoBean() { - SolrMetricsContext solrMetricsContext; - @Override - public String getName() { - return "foo"; - } - - @Override - public String getDescription() { - return "foo"; - } - - @Override - public Category getCategory() { - return Category.ADMIN; - } - - @Override - public void initializeMetrics(SolrMetricsContext parentContext, String scope) { - this.solrMetricsContext = parentContext.getChildContext(this); - } - - @Override - public SolrMetricsContext getSolrMetricsContext() { - return solrMetricsContext; - } - }; - bean.initializeMetrics(new SolrMetricsContext(h.getCoreContainer().getMetricManager(), "testMetricsSnapshot", "foobar"), "foo"); + SolrInfoBean bean = + new SolrInfoBean() { + SolrMetricsContext solrMetricsContext; + + @Override + public String getName() { + return "foo"; + } + + @Override + public String getDescription() { + return "foo"; + } + + @Override + public Category getCategory() { + return Category.ADMIN; + } + + @Override + public void initializeMetrics(SolrMetricsContext parentContext, String scope) { + this.solrMetricsContext = parentContext.getChildContext(this); + } + + @Override + public SolrMetricsContext getSolrMetricsContext() { + return solrMetricsContext; + } + }; + bean.initializeMetrics( + new SolrMetricsContext( + h.getCoreContainer().getMetricManager(), "testMetricsSnapshot", "foobar"), + "foo"); runSnapshots = true; - Thread modifier = new Thread(() -> { - int i = 0; - while (runSnapshots) { - bean.getSolrMetricsContext().registerMetricName("name-" + i++); - try { - Thread.sleep(31); - } catch (InterruptedException e) { - runSnapshots = false; - break; - } - } - }); - Thread reader = new Thread(() -> { - while (runSnapshots) { - try { - bean.getSolrMetricsContext().getMetricsSnapshot(); - } catch (Exception e) { - runSnapshots = false; - e.printStackTrace(); - fail("Exception getting metrics snapshot: " + e.toString()); - } - try { - Thread.sleep(53); - } catch (InterruptedException e) { - runSnapshots = false; - break; - } - counter.countDown(); - } - }); + Thread modifier = + new Thread( + () -> { + int i = 0; + while (runSnapshots) { + bean.getSolrMetricsContext().registerMetricName("name-" + i++); + try { + Thread.sleep(31); + } catch (InterruptedException e) { + runSnapshots = false; + break; + } + } + }); + Thread reader = + new Thread( + () -> { + while (runSnapshots) { + try { + bean.getSolrMetricsContext().getMetricsSnapshot(); + } catch (Exception e) { + runSnapshots = false; + e.printStackTrace(); + fail("Exception getting metrics snapshot: " + e.toString()); + } + try { + Thread.sleep(53); + } catch (InterruptedException e) { + runSnapshots = false; + break; + } + counter.countDown(); + } + }); modifier.start(); reader.start(); counter.await(30, TimeUnit.SECONDS); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java index a2e3a2aa55f..d8d60b60af3 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java @@ -17,12 +17,11 @@ package org.apache.solr.handler.admin; +import com.codahale.metrics.Counter; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; - -import com.codahale.metrics.Counter; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.MapWriter; import org.apache.solr.common.params.CommonParams; @@ -42,9 +41,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * Test for {@link MetricsHandler} - */ +/** Test for {@link MetricsHandler} */ public class MetricsHandlerTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { @@ -77,7 +74,15 @@ public void test() throws Exception { MetricsHandler handler = new MetricsHandler(h.getCoreContainer()); SolrQueryResponse resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json"), + resp); NamedList values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -98,7 +103,17 @@ public void test() throws Exception { assertEquals(5, map.size()); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm,jetty"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "group", + "jvm,jetty"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -108,7 +123,17 @@ public void test() throws Exception { resp = new SolrQueryResponse(); // "collection" works too, because it's a prefix for "collection1" - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "registry", "solr.core.collection,solr.jvm"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "registry", + "solr.core.collection,solr.jvm"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -118,7 +143,19 @@ public void test() throws Exception { resp = new SolrQueryResponse(); // "collection" works too, because it's a prefix for "collection1" - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "registry", "solr.core.collection", "registry", "solr.jvm"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "registry", + "solr.core.collection", + "registry", + "solr.jvm"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -127,7 +164,17 @@ public void test() throws Exception { assertNotNull(values.get("solr.jvm")); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm,jetty"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "group", + "jvm,jetty"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -136,7 +183,19 @@ public void test() throws Exception { assertNotNull(values.get("solr.jvm")); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm", "group", "jetty"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "group", + "jvm", + "group", + "jetty"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -145,7 +204,19 @@ public void test() throws Exception { assertNotNull(values.get("solr.jvm")); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "node", "type", "counter"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "group", + "node", + "type", + "counter"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -155,7 +226,17 @@ public void test() throws Exception { assertNull(values.get("ADMIN./admin/authorization.errors")); // this is a timer node resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "prefix", "CONTAINER.cores,CONTAINER.threadPool"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "prefix", + "CONTAINER.cores,CONTAINER.threadPool"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -168,7 +249,19 @@ public void test() throws Exception { assertNotNull(values.get("CONTAINER.threadPool.coreLoadExecutor.completed")); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "prefix", "CONTAINER.cores", "regex", "C.*thread.*completed"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "prefix", + "CONTAINER.cores", + "regex", + "C.*thread.*completed"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -179,7 +272,19 @@ public void test() throws Exception { assertNotNull(values.get("CONTAINER.threadPool.coreLoadExecutor.completed")); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "prefix", "CACHE.core.fieldCache", "property", "entries_count", MetricsHandler.COMPACT_PARAM, "true"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + "prefix", + "CACHE.core.fieldCache", + "property", + "entries_count", + MetricsHandler.COMPACT_PARAM, + "true"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -191,14 +296,40 @@ public void test() throws Exception { assertNotNull(writer._get("entries_count", null)); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm", "prefix", "CONTAINER.cores"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "group", + "jvm", + "prefix", + "CONTAINER.cores"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); assertEquals(0, values.size()); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "node", "type", "timer", "prefix", "CONTAINER.cores"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + MetricsHandler.COMPACT_PARAM, + "false", + CommonParams.WT, + "json", + "group", + "node", + "type", + "timer", + "prefix", + "CONTAINER.cores"), + resp); values = resp.getValues(); assertNotNull(values.get("metrics")); SimpleOrderedMap map1 = (SimpleOrderedMap) values.get("metrics"); @@ -211,7 +342,15 @@ public void testCompact() throws Exception { MetricsHandler handler = new MetricsHandler(h.getCoreContainer()); SolrQueryResponse resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.COMPACT_PARAM, + "true"), + resp); NamedList values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); @@ -230,38 +369,65 @@ public void testPropertyFilter() throws Exception { MetricsHandler handler = new MetricsHandler(h.getCoreContainer()); SolrQueryResponse resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.COMPACT_PARAM, "true", "group", "core", "prefix", "CACHE.searcher"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.COMPACT_PARAM, + "true", + "group", + "core", + "prefix", + "CACHE.searcher"), + resp); NamedList values = resp.getValues(); assertNotNull(values.get("metrics")); values = (NamedList) values.get("metrics"); NamedList nl = (NamedList) values.get("solr.core.collection1"); assertNotNull(nl); assertTrue(nl.size() > 0); - nl.forEach((k, v) -> { - assertTrue(v instanceof MapWriter); - Map map = new HashMap<>(); - ((MapWriter) v).toMap(map); - assertTrue(map.size() > 2); - }); + nl.forEach( + (k, v) -> { + assertTrue(v instanceof MapWriter); + Map map = new HashMap<>(); + ((MapWriter) v).toMap(map); + assertTrue(map.size() > 2); + }); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.COMPACT_PARAM, "true", "group", "core", "prefix", "CACHE.searcher", - "property", "inserts", "property", "size"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.COMPACT_PARAM, + "true", + "group", + "core", + "prefix", + "CACHE.searcher", + "property", + "inserts", + "property", + "size"), + resp); values = resp.getValues(); values = (NamedList) values.get("metrics"); nl = (NamedList) values.get("solr.core.collection1"); assertNotNull(nl); assertTrue(nl.size() > 0); - nl.forEach((k, v) -> { - assertTrue(v instanceof MapWriter); - Map map = new HashMap<>(); - ((MapWriter) v).toMap(map); - assertEquals("k=" + k + ", v=" + map, 2, map.size()); - assertNotNull(map.get("inserts")); - assertNotNull(map.get("size")); - }); + nl.forEach( + (k, v) -> { + assertTrue(v instanceof MapWriter); + Map map = new HashMap<>(); + ((MapWriter) v).toMap(map); + assertEquals("k=" + k + ", v=" + map, 2, map.size()); + assertNotNull(map.get("inserts")); + assertNotNull(map.get("size")); + }); handler.close(); } @@ -271,48 +437,87 @@ public void testKeyMetrics() throws Exception { String key1 = "solr.core.collection1:CACHE.core.fieldCache"; SolrQueryResponse resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, key1), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + key1), + resp); NamedList values = resp.getValues(); Object val = values.findRecursive("metrics", key1); assertNotNull(val); assertTrue(val instanceof MapWriter); - assertTrue(((MapWriter)val)._size() >= 2); + assertTrue(((MapWriter) val)._size() >= 2); String key2 = "solr.core.collection1:CACHE.core.fieldCache:entries_count"; resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, key2), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + key2), + resp); val = resp.getValues()._get("metrics/" + key2, null); assertNotNull(val); assertTrue(val instanceof Number); String key3 = "solr.jetty:solrtest_foo\\:bar"; resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, key3), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + key3), + resp); - val = resp.getValues()._get( "metrics/" + key3, null); + val = resp.getValues()._get("metrics/" + key3, null); assertNotNull(val); assertTrue(val instanceof Number); assertEquals(3, ((Number) val).intValue()); // test multiple keys resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, key1, MetricsHandler.KEY_PARAM, key2, MetricsHandler.KEY_PARAM, key3), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + key1, + MetricsHandler.KEY_PARAM, + key2, + MetricsHandler.KEY_PARAM, + key3), + resp); - val = resp.getValues()._get( "metrics/" + key1, null); + val = resp.getValues()._get("metrics/" + key1, null); assertNotNull(val); - val = resp.getValues()._get( "metrics/" + key2, null); + val = resp.getValues()._get("metrics/" + key2, null); assertNotNull(val); - val = resp.getValues()._get( "metrics/" + key3, null); + val = resp.getValues()._get("metrics/" + key3, null); assertNotNull(val); String key4 = "solr.core.collection1:QUERY./select.requestTimes:1minRate"; resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, key4), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + key4), + resp); // the key contains a slash, need explicit list of path elements val = resp.getValues()._get(Arrays.asList("metrics", key4), null); assertNotNull(val); @@ -322,8 +527,17 @@ public void testKeyMetrics() throws Exception { // invalid keys resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, "foo", MetricsHandler.KEY_PARAM, "foo:bar:baz:xyz"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + "foo", + MetricsHandler.KEY_PARAM, + "foo:bar:baz:xyz"), + resp); values = resp.getValues(); NamedList metrics = (NamedList) values.get("metrics"); assertEquals(0, metrics.size()); @@ -332,8 +546,15 @@ public void testKeyMetrics() throws Exception { // unknown registry resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, "foo:bar:baz"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + "foo:bar:baz"), + resp); values = resp.getValues(); metrics = (NamedList) values.get("metrics"); assertEquals(0, metrics.size()); @@ -341,8 +562,15 @@ public void testKeyMetrics() throws Exception { // unknown metric resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.KEY_PARAM, "solr.jetty:unknown:baz"), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.KEY_PARAM, + "solr.jetty:unknown:baz"), + resp); values = resp.getValues(); metrics = (NamedList) values.get("metrics"); assertEquals(0, metrics.size()); @@ -358,10 +586,19 @@ public void testExprMetrics() throws Exception { String key1 = "solr\\.core\\..*:.*/select\\.request.*:.*Rate"; SolrQueryResponse resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.EXPR_PARAM, key1), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.EXPR_PARAM, + key1), + resp); // response structure is like in the case of non-key params - Object val = resp.getValues().findRecursive( "metrics", "solr.core.collection1", "QUERY./select.requestTimes"); + Object val = + resp.getValues() + .findRecursive("metrics", "solr.core.collection1", "QUERY./select.requestTimes"); assertNotNull(val); assertTrue(val instanceof MapWriter); Map map = new HashMap<>(); @@ -376,10 +613,17 @@ public void testExprMetrics() throws Exception { String key2 = "solr\\.core\\..*:.*/select\\.request.*"; resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.EXPR_PARAM, key2), resp); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.EXPR_PARAM, + key2), + resp); // response structure is like in the case of non-key params - val = resp.getValues().findRecursive( "metrics", "solr.core.collection1"); + val = resp.getValues().findRecursive("metrics", "solr.core.collection1"); assertNotNull(val); Object v = ((SimpleOrderedMap) val).get("QUERY./select.requestTimes"); assertNotNull(v); @@ -400,9 +644,20 @@ public void testExprMetrics() throws Exception { String key3 = "solr\\.core\\..*:.*/select.*\\.requestTimes:count"; resp = new SolrQueryResponse(); // ORDER OF PARAMS MATTERS HERE! see the refguide - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", - MetricsHandler.EXPR_PARAM, key2, MetricsHandler.EXPR_PARAM, key1, MetricsHandler.EXPR_PARAM, key3), resp); - val = resp.getValues().findRecursive( "metrics", "solr.core.collection1"); + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.EXPR_PARAM, + key2, + MetricsHandler.EXPR_PARAM, + key1, + MetricsHandler.EXPR_PARAM, + key3), + resp); + val = resp.getValues().findRecursive("metrics", "solr.core.collection1"); assertNotNull(val); // for requestTimes only the full set of values from the first expr should be present assertNotNull(val); @@ -426,47 +681,111 @@ public void testExprMetrics() throws Exception { @Test public void testMetricsUnload() throws Exception { - SolrCore core = h.getCoreContainer().getCore("collection1");//;.getRequestHandlers().put("/dumphandler", new DumpRequestHandler()); - RefreshablePluginHolder pluginHolder =null; + SolrCore core = h.getCoreContainer().getCore("collection1"); + // .getRequestHandlers().put("/dumphandler", new DumpRequestHandler()); + RefreshablePluginHolder pluginHolder = null; try { - PluginInfo info = new PluginInfo(SolrRequestHandler.TYPE, Map.of("name", "/dumphandler", "class", DumpRequestHandler.class.getName())); + PluginInfo info = + new PluginInfo( + SolrRequestHandler.TYPE, + Map.of("name", "/dumphandler", "class", DumpRequestHandler.class.getName())); DumpRequestHandler requestHandler = new DumpRequestHandler(); - requestHandler.gaugevals = Map.of("d_k1","v1", "d_k2","v2"); + requestHandler.gaugevals = Map.of("d_k1", "v1", "d_k2", "v2"); pluginHolder = new RefreshablePluginHolder(info, requestHandler); - core.getRequestHandlers().put("/dumphandler", + core.getRequestHandlers().put("/dumphandler", pluginHolder); - pluginHolder); } finally { core.close(); } - - MetricsHandler handler = new MetricsHandler(h.getCoreContainer()); SolrQueryResponse resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true", "key", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"), + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.COMPACT_PARAM, + "true", + "key", + "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"), resp); - assertEquals("v1", resp.getValues()._getStr(Arrays.asList("metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k1"), null)); - assertEquals("v2", resp.getValues()._getStr(Arrays.asList("metrics","solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k2"), null)); + assertEquals( + "v1", + resp.getValues() + ._getStr( + Arrays.asList( + "metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge", "d_k1"), + null)); + assertEquals( + "v2", + resp.getValues() + ._getStr( + Arrays.asList( + "metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge", "d_k2"), + null)); pluginHolder.closeHandler(); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true", "key", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"), + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.COMPACT_PARAM, + "true", + "key", + "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"), resp); - assertEquals(null, resp.getValues()._getStr(Arrays.asList("metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k1"), null)); - assertEquals(null, resp.getValues()._getStr(Arrays.asList("metrics","solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k2"), null)); + assertEquals( + null, + resp.getValues() + ._getStr( + Arrays.asList( + "metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge", "d_k1"), + null)); + assertEquals( + null, + resp.getValues() + ._getStr( + Arrays.asList( + "metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge", "d_k2"), + null)); DumpRequestHandler requestHandler = new DumpRequestHandler(); - requestHandler.gaugevals = Map.of("d_k1","v1.1", "d_k2","v2.1"); + requestHandler.gaugevals = Map.of("d_k1", "v1.1", "d_k2", "v2.1"); pluginHolder.reset(requestHandler); resp = new SolrQueryResponse(); - handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", MetricsHandler.COMPACT_PARAM, "true", "key", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"), + handler.handleRequestBody( + req( + CommonParams.QT, + "/admin/metrics", + CommonParams.WT, + "json", + MetricsHandler.COMPACT_PARAM, + "true", + "key", + "solr.core.collection1:QUERY./dumphandler.dumphandlergauge"), resp); - assertEquals("v1.1", resp.getValues()._getStr(Arrays.asList("metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k1"), null)); - assertEquals("v2.1", resp.getValues()._getStr(Arrays.asList("metrics","solr.core.collection1:QUERY./dumphandler.dumphandlergauge","d_k2"), null)); + assertEquals( + "v1.1", + resp.getValues() + ._getStr( + Arrays.asList( + "metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge", "d_k1"), + null)); + assertEquals( + "v2.1", + resp.getValues() + ._getStr( + Arrays.asList( + "metrics", "solr.core.collection1:QUERY./dumphandler.dumphandlergauge", "d_k2"), + null)); handler.close(); } @@ -488,20 +807,18 @@ public boolean isLoaded() { void closeHandler() throws Exception { this.metricsInfo = rh.getSolrMetricsContext(); -// if(metricsInfo.tag.contains(String.valueOf(rh.hashCode()))){ -// //this created a new child metrics -// metricsInfo = metricsInfo.getParent(); -// } + // if(metricsInfo.tag.contains(String.valueOf(rh.hashCode()))){ + // //this created a new child metrics + // metricsInfo = metricsInfo.getParent(); + // } this.rh.close(); } void reset(DumpRequestHandler rh) throws Exception { - this.rh = rh; - if(metricsInfo != null) - this.rh.initializeMetrics(metricsInfo, "/dumphandler"); + this.rh = rh; + if (metricsInfo != null) this.rh.initializeMetrics(metricsInfo, "/dumphandler"); } - @Override public SolrRequestHandler get() { return rh; @@ -511,7 +828,8 @@ public SolrRequestHandler get() { public static class DumpRequestHandler extends RequestHandlerBase { static String key = DumpRequestHandler.class.getName(); - Map gaugevals ; + Map gaugevals; + @Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { rsp.add("key", key); @@ -526,9 +844,7 @@ public String getDescription() { public void initializeMetrics(SolrMetricsContext parentContext, String scope) { super.initializeMetrics(parentContext, scope); MetricsMap metrics = new MetricsMap(map -> gaugevals.forEach((k, v) -> map.putNoEx(k, v))); - solrMetricsContext.gauge( - metrics, true, "dumphandlergauge", getCategory().toString(), scope); - + solrMetricsContext.gauge(metrics, true, "dumphandlergauge", getCategory().toString(), scope); } @Override diff --git a/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java index be95fb97b26..20fc0bc6b77 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/PropertiesRequestHandlerTest.java @@ -16,7 +16,6 @@ */ package org.apache.solr.handler.admin; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; @@ -28,13 +27,11 @@ import org.junit.BeforeClass; import org.junit.Test; - public class PropertiesRequestHandlerTest extends SolrTestCaseJ4 { public static final String PASSWORD = "secret123"; public static final String REDACT_STRING = RedactionUtils.getRedactString(); - @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); @@ -43,22 +40,22 @@ public static void beforeClass() throws Exception { @Test public void testRedaction() throws Exception { RedactionUtils.setRedactSystemProperty(true); - for(String propName: new String[]{"some.password", "javax.net.ssl.trustStorePassword"}){ + for (String propName : new String[] {"some.password", "javax.net.ssl.trustStorePassword"}) { System.setProperty(propName, PASSWORD); NamedList properties = readProperties(); - assertEquals("Failed to redact "+propName, REDACT_STRING, properties.get(propName)); + assertEquals("Failed to redact " + propName, REDACT_STRING, properties.get(propName)); } } @Test public void testDisabledRedaction() throws Exception { RedactionUtils.setRedactSystemProperty(false); - for(String propName: new String[]{"some.password", "javax.net.ssl.trustStorePassword"}){ + for (String propName : new String[] {"some.password", "javax.net.ssl.trustStorePassword"}) { System.setProperty(propName, PASSWORD); NamedList properties = readProperties(); - assertEquals("Failed to *not* redact "+propName, PASSWORD, properties.get(propName)); + assertEquals("Failed to *not* redact " + propName, PASSWORD, properties.get(propName)); } } @@ -66,8 +63,10 @@ public void testDisabledRedaction() throws Exception { private NamedList readProperties() throws Exception { SolrClient client = new EmbeddedSolrServer(h.getCore()); - NamedList properties = client.request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/info/properties", - new ModifiableSolrParams())); + NamedList properties = + client.request( + new GenericSolrRequest( + SolrRequest.METHOD.GET, "/admin/info/properties", new ModifiableSolrParams())); return (NamedList) properties.get("system.properties"); } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java index e56dd5bc1cd..f69723870ec 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerLocalForTesting.java @@ -18,12 +18,9 @@ package org.apache.solr.handler.admin; import java.io.IOException; - import org.apache.solr.core.CoreContainer; -/** - * Wrapper for use in tests - */ +/** Wrapper for use in tests */ public class SecurityConfHandlerLocalForTesting extends SecurityConfHandlerLocal { public SecurityConfHandlerLocalForTesting(CoreContainer coreContainer) { super(coreContainer); @@ -32,7 +29,7 @@ public SecurityConfHandlerLocalForTesting(CoreContainer coreContainer) { public boolean persistConf(SecurityConfig securityConfig) throws IOException { return super.persistConf(securityConfig); } - + public void securityConfEdited() { super.securityConfEdited(); } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java index d04b6fdb4ac..64bd120e343 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SecurityConfHandlerTest.java @@ -16,12 +16,13 @@ */ package org.apache.solr.handler.admin; +import static org.apache.solr.handler.admin.SecurityConfHandler.SecurityConfig; + import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.CommandOperation; @@ -32,33 +33,30 @@ import org.apache.solr.security.BasicAuthPlugin; import org.apache.solr.security.RuleBasedAuthorizationPlugin; -import static org.apache.solr.handler.admin.SecurityConfHandler.SecurityConfig; - public class SecurityConfHandlerTest extends SolrTestCaseJ4 { @SuppressWarnings({"unchecked", "rawtypes"}) public void testEdit() throws Exception { MockSecurityHandler handler = new MockSecurityHandler(); - String command = "{\n" + - "'set-user': {'tom':'TomIsCool'},\n" + - "'set-user':{ 'tom':'TomIsUberCool'}\n" + - "}"; - LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authentication"); - ContentStreamBase.ByteArrayStream o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + String command = + "{\n" + + "'set-user': {'tom':'TomIsCool'},\n" + + "'set-user':{ 'tom':'TomIsUberCool'}\n" + + "}"; + LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authentication"); + ContentStreamBase.ByteArrayStream o = + new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); - handler.handleRequestBody(req,new SolrQueryResponse()); + handler.handleRequestBody(req, new SolrQueryResponse()); try (BasicAuthPlugin basicAuth = new BasicAuthPlugin()) { SecurityConfig securityCfg = handler.m.get("/security.json"); basicAuth.init((Map) securityCfg.getData().get("authentication")); assertTrue(basicAuth.authenticate("tom", "TomIsUberCool")); - command = "{\n" + - "'set-user': {'harry':'HarryIsCool'},\n" + - "'delete-user': ['tom']\n" + - "}"; + command = "{\n" + "'set-user': {'harry':'HarryIsCool'},\n" + "'delete-user': ['tom']\n" + "}"; o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); handler.handleRequestBody(req, new SolrQueryResponse()); @@ -66,23 +64,22 @@ public void testEdit() throws Exception { assertEquals(3, securityCfg.getVersion()); Map result = (Map) securityCfg.getData().get("authentication"); result = (Map) result.get("credentials"); - assertEquals(1,result.size()); + assertEquals(1, result.size()); } - - - command = "{'set-permission':{ collection : acoll ,\n" + - " path : '/nonexistentpath',\n" + - " role :guest },\n" + - "'set-user-role': { 'tom': ['admin','dev']},"+ - "'set-permission':{'name': 'security-edit',\n" + - " 'role': 'admin'}\n" + - "}"; + command = + "{'set-permission':{ collection : acoll ,\n" + + " path : '/nonexistentpath',\n" + + " role :guest },\n" + + "'set-user-role': { 'tom': ['admin','dev']}," + + "'set-permission':{'name': 'security-edit',\n" + + " 'role': 'admin'}\n" + + "}"; req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authorization"); - o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authorization"); + o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequestBody(req, rsp); @@ -98,14 +95,15 @@ public void testEdit() throws Exception { assertEquals("acoll", p.get("collection")); break; } - command = "{\n" + - "'set-permission':{index : 2, name : security-edit,\n" + - " 'role': ['admin','dev']\n" + - " }}"; + command = + "{\n" + + "'set-permission':{index : 2, name : security-edit,\n" + + " 'role': ['admin','dev']\n" + + " }}"; req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authorization"); - o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authorization"); + o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); rsp = new SolrQueryResponse(); handler.handleRequestBody(req, rsp); @@ -115,17 +113,18 @@ public void testEdit() throws Exception { Map p = permissions.get(1); assertEquals("security-edit", p.get("name")); List rol = (List) p.get("role"); - assertEquals( "admin", rol.get(0)); - assertEquals( "dev", rol.get(1)); - - command = "{\n" + - "'update-permission':{'index': 1,\n" + - " 'role': ['guest','admin']\n" + - " }}"; + assertEquals("admin", rol.get(0)); + assertEquals("dev", rol.get(1)); + + command = + "{\n" + + "'update-permission':{'index': 1,\n" + + " 'role': ['guest','admin']\n" + + " }}"; req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authorization"); - o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authorization"); + o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); rsp = new SolrQueryResponse(); handler.handleRequestBody(req, rsp); @@ -135,19 +134,14 @@ public void testEdit() throws Exception { p = permissions.get(0); assertEquals("acoll", p.get("collection")); rol = (List) p.get("role"); - assertEquals( "guest", rol.get(0)); - assertEquals( "admin", rol.get(1)); - + assertEquals("guest", rol.get(0)); + assertEquals("admin", rol.get(1)); - - command = "{\n" + - "delete-permission: 1,\n" + - " set-user-role : { tom :null}\n" + - "}"; + command = "{\n" + "delete-permission: 1,\n" + " set-user-role : { tom :null}\n" + "}"; req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authorization"); - o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authorization"); + o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); rsp = new SolrQueryResponse(); handler.handleRequestBody(req, rsp); @@ -161,38 +155,43 @@ public void testEdit() throws Exception { for (Map permission : permissions) { assertFalse("some-permission".equals(permission.get("name"))); } - command = "{\n" + - "'set-permission':{index : 2, 'name': 'security-edit',\n" + - " 'method':'POST',"+ // -ve test security edit is a well-known permission , only role attribute should be provided - " 'role': 'admin'\n" + - " }}"; + // -ve test security edit is a well-known permission, only role attribute should be provided + command = + "{\n" + + "'set-permission':{index : 2, 'name': 'security-edit',\n" + + " 'method':'POST'," + + " 'role': 'admin'\n" + + " }}"; req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authorization"); - o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authorization"); + o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); rsp = new SolrQueryResponse(); handler.handleRequestBody(req, rsp); @SuppressWarnings({"rawtypes"}) - List l = (List) ((Map) ((List)rsp.getValues().get("errorMessages")).get(0)).get("errorMessages"); + List l = + (List) ((Map) ((List) rsp.getValues().get("errorMessages")).get(0)).get("errorMessages"); assertEquals(1, l.size()); handler.close(); } - public static class MockSecurityHandler extends SecurityConfHandler { private Map m; final BasicAuthPlugin basicAuthPlugin = new BasicAuthPlugin(); - final RuleBasedAuthorizationPlugin rulesBasedAuthorizationPlugin = new RuleBasedAuthorizationPlugin(); - + final RuleBasedAuthorizationPlugin rulesBasedAuthorizationPlugin = + new RuleBasedAuthorizationPlugin(); public MockSecurityHandler() { super(null); m = new HashMap<>(); SecurityConfig sp = new SecurityConfig(); Map securityData = new HashMap<>(); - securityData.put("authentication", Map.of("class", "solr."+ BasicAuthPlugin.class.getSimpleName())); - securityData.put("authorization", Map.of("class", "solr."+RuleBasedAuthorizationPlugin.class.getSimpleName())); + securityData.put( + "authentication", Map.of("class", "solr." + BasicAuthPlugin.class.getSimpleName())); + securityData.put( + "authorization", + Map.of("class", "solr." + RuleBasedAuthorizationPlugin.class.getSimpleName())); sp.setVersion(1); sp.setData(securityData); m.put("/security.json", sp); @@ -231,7 +230,7 @@ public SecurityConfig getSecurityConfig(boolean getFresh) { protected boolean persistConf(SecurityConfig props) { SecurityConfig fromMap = m.get("/security.json"); if (fromMap.getVersion() == props.getVersion()) { - props.setVersion(props.getVersion()+1); + props.setVersion(props.getVersion() + 1); m.put("/security.json", props); return true; } else { @@ -239,44 +238,37 @@ protected boolean persistConf(SecurityConfig props) { } } - public String getStandardJson() throws Exception { - String command = "{\n" + - "'set-user': {'solr':'SolrRocks'}\n" + - "}"; - LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authentication"); - ContentStreamBase.ByteArrayStream o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + String command = "{\n" + "'set-user': {'solr':'SolrRocks'}\n" + "}"; + LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authentication"); + ContentStreamBase.ByteArrayStream o = + new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); handleRequestBody(req, new SolrQueryResponse()); - command = "{'set-user-role': { 'solr': 'admin'},\n" + - "'set-permission':{'name': 'security-edit', 'role': 'admin'}" + - "}"; + command = + "{'set-user-role': { 'solr': 'admin'},\n" + + "'set-permission':{'name': 'security-edit', 'role': 'admin'}" + + "}"; req = new LocalSolrQueryRequest(null, new ModifiableSolrParams()); - req.getContext().put("httpMethod","POST"); - req.getContext().put("path","/admin/authorization"); - o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8),""); + req.getContext().put("httpMethod", "POST"); + req.getContext().put("path", "/admin/authorization"); + o = new ContentStreamBase.ByteArrayStream(command.getBytes(StandardCharsets.UTF_8), ""); req.setContentStreams(Collections.singletonList(o)); SolrQueryResponse rsp = new SolrQueryResponse(); handleRequestBody(req, rsp); Map data = m.get("/security.json").getData(); - ((Map)data.get("authentication")).remove(""); - ((Map)data.get("authorization")).remove(""); - return Utils.toJSONString (data); + ((Map) data.get("authentication")).remove(""); + ((Map) data.get("authorization")).remove(""); + return Utils.toJSONString(data); } } - - public static void main(String[] args) throws Exception{ + public static void main(String[] args) throws Exception { try (MockSecurityHandler msh = new MockSecurityHandler()) { System.out.println(msh.getStandardJson()); } } - - - } - - diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java index 23c0df52683..95e0a55e900 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SegmentsInfoRequestHandlerTest.java @@ -17,7 +17,6 @@ package org.apache.solr.handler.admin; import java.io.IOException; - import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; @@ -29,18 +28,16 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * Tests for SegmentsInfoRequestHandler. Plugin entry, returning data of created segment. - */ +/** Tests for SegmentsInfoRequestHandler. Plugin entry, returning data of created segment. */ public class SegmentsInfoRequestHandlerTest extends SolrTestCaseJ4 { private static final int DOC_COUNT = 5; - + private static final int DEL_COUNT = 1; - + private static final int NUM_SEGMENTS = 2; private static int initialRefCount; - + @BeforeClass public static void beforeClass() throws Exception { @@ -51,35 +48,41 @@ public static void beforeClass() throws Exception { // Also prevent flushes System.setProperty("solr.tests.maxBufferedDocs", "1000"); System.setProperty("solr.tests.ramBufferSizeMB", "5000"); - + System.setProperty("enable.update.log", "false"); // no _version_ in our schema - initCore("solrconfig.xml", "schema12.xml"); // segments API shouldn't depend on _version_ or ulog - + // segments API shouldn't depend on _version_ or ulog + initCore("solrconfig.xml", "schema12.xml"); + // build up an index with at least 2 segments and some deletes for (int i = 0; i < DOC_COUNT; i++) { - assertU(adoc("id","SOLR100" + i, "name","Apache Solr:" + i)); + assertU(adoc("id", "SOLR100" + i, "name", "Apache Solr:" + i)); } for (int i = 0; i < DEL_COUNT; i++) { assertU(delI("SOLR100" + i)); } assertU(commit()); for (int i = 0; i < DOC_COUNT; i++) { - assertU(adoc("id","SOLR200" + i, "name","Apache Solr:" + i)); + assertU(adoc("id", "SOLR200" + i, "name", "Apache Solr:" + i)); } assertU(commit()); - h.getCore().withSearcher((searcher) -> { - int numSegments = SegmentInfos.readLatestCommit(searcher.getIndexReader().directory()).size(); - // if this is not NUM_SEGMENTS, there was some unexpected flush or merge - assertEquals("Unexpected number of segment in the index: " + numSegments, - NUM_SEGMENTS, numSegments); - return null; - }); + h.getCore() + .withSearcher( + (searcher) -> { + int numSegments = + SegmentInfos.readLatestCommit(searcher.getIndexReader().directory()).size(); + // if this is not NUM_SEGMENTS, there was some unexpected flush or merge + assertEquals( + "Unexpected number of segment in the index: " + numSegments, + NUM_SEGMENTS, + numSegments); + return null; + }); // see SOLR-14431 RefCounted iwRef = h.getCore().getSolrCoreState().getIndexWriter(h.getCore()); initialRefCount = iwRef.getRefcount(); iwRef.decref(); } - + @AfterClass public static void afterClass() throws Exception { RefCounted iwRef = h.getCore().getSolrCoreState().getIndexWriter(h.getCore()); @@ -92,50 +95,59 @@ public static void afterClass() throws Exception { } @Test - public void testSegmentInfos() { - assertQ("Unexpected number of segments returned", - req("qt","/admin/segments"), + public void testSegmentInfos() { + assertQ( + "Unexpected number of segments returned", + req("qt", "/admin/segments"), NUM_SEGMENTS + "=count(//lst[@name='segments']/lst)"); } @Test public void testSegmentInfosVersion() { - assertQ("Unexpected number of segments returned", - req("qt","/admin/segments"), - NUM_SEGMENTS + "=count(//lst[@name='segments']/lst/str[@name='version'][.='" + Version.LATEST + "'])"); + assertQ( + "Unexpected number of segments returned", + req("qt", "/admin/segments"), + NUM_SEGMENTS + + "=count(//lst[@name='segments']/lst/str[@name='version'][.='" + + Version.LATEST + + "'])"); } - + @Test public void testSegmentNames() throws IOException { String[] segmentNamePatterns = new String[NUM_SEGMENTS]; - h.getCore().withSearcher((searcher) -> { - int i = 0; - for (SegmentCommitInfo sInfo : SegmentInfos.readLatestCommit(searcher.getIndexReader().directory())) { - assertTrue("Unexpected number of segment in the index: " + i, i < NUM_SEGMENTS); - segmentNamePatterns[i] = "//lst[@name='segments']/lst/str[@name='name'][.='" + sInfo.info.name + "']"; - i++; - } - - return null; - }); - assertQ("Unexpected segment names returned", - req("qt","/admin/segments"), - segmentNamePatterns); + h.getCore() + .withSearcher( + (searcher) -> { + int i = 0; + for (SegmentCommitInfo sInfo : + SegmentInfos.readLatestCommit(searcher.getIndexReader().directory())) { + assertTrue("Unexpected number of segment in the index: " + i, i < NUM_SEGMENTS); + segmentNamePatterns[i] = + "//lst[@name='segments']/lst/str[@name='name'][.='" + sInfo.info.name + "']"; + i++; + } + + return null; + }); + assertQ("Unexpected segment names returned", req("qt", "/admin/segments"), segmentNamePatterns); } - + @Test public void testSegmentInfosData() { - assertQ("Unexpected document counts in result", - req("qt","/admin/segments"), - //#Document - (DOC_COUNT*2)+"=sum(//lst[@name='segments']/lst/int[@name='size'])", - //#Deletes - DEL_COUNT+"=sum(//lst[@name='segments']/lst/int[@name='delCount'])"); + assertQ( + "Unexpected document counts in result", + req("qt", "/admin/segments"), + // #Document + (DOC_COUNT * 2) + "=sum(//lst[@name='segments']/lst/int[@name='size'])", + // #Deletes + DEL_COUNT + "=sum(//lst[@name='segments']/lst/int[@name='delCount'])"); } @Test public void testCoreInfo() { - assertQ("Missing core info", + assertQ( + "Missing core info", req("qt", "/admin/segments", "coreInfo", "true"), "boolean(//lst[@name='info']/lst[@name='core'])"); } @@ -143,20 +155,25 @@ public void testCoreInfo() { @Test public void testFieldInfo() throws Exception { String[] segmentNamePatterns = new String[NUM_SEGMENTS]; - h.getCore().withSearcher((searcher) -> { - int i = 0; - for (SegmentCommitInfo sInfo : SegmentInfos.readLatestCommit(searcher.getIndexReader().directory())) { - assertTrue("Unexpected number of segment in the index: " + i, i < NUM_SEGMENTS); - segmentNamePatterns[i] = "boolean(//lst[@name='segments']/lst[@name='" + sInfo.info.name + "']/lst[@name='fields']/lst[@name='id']/str[@name='flags'])"; - i++; - } - - return null; - }); - assertQ("Unexpected field infos returned", - req("qt","/admin/segments", "fieldInfo", "true"), + h.getCore() + .withSearcher( + (searcher) -> { + int i = 0; + for (SegmentCommitInfo sInfo : + SegmentInfos.readLatestCommit(searcher.getIndexReader().directory())) { + assertTrue("Unexpected number of segment in the index: " + i, i < NUM_SEGMENTS); + segmentNamePatterns[i] = + "boolean(//lst[@name='segments']/lst[@name='" + + sInfo.info.name + + "']/lst[@name='fields']/lst[@name='id']/str[@name='flags'])"; + i++; + } + + return null; + }); + assertQ( + "Unexpected field infos returned", + req("qt", "/admin/segments", "fieldInfo", "true"), segmentNamePatterns); } - - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/ShowFileRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/ShowFileRequestHandlerTest.java index ccdb465fc1d..2a06b99dcf4 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/ShowFileRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/ShowFileRequestHandlerTest.java @@ -16,6 +16,10 @@ */ package org.apache.solr.handler.admin; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.client.solrj.ResponseParser; import org.apache.solr.client.solrj.SolrClient; @@ -33,14 +37,9 @@ import org.apache.solr.response.SolrQueryResponse; import org.junit.BeforeClass; -import java.io.IOException; -import java.io.InputStream; -import java.io.Reader; -import java.util.concurrent.atomic.AtomicBoolean; - /** - * Extend SolrJettyTestBase because the SOLR-2535 bug only manifested itself when - * the {@link org.apache.solr.servlet.SolrDispatchFilter} is used, which isn't for embedded Solr use. + * Extend SolrJettyTestBase because the SOLR-2535 bug only manifested itself when the {@link + * org.apache.solr.servlet.SolrDispatchFilter} is used, which isn't for embedded Solr use. */ public class ShowFileRequestHandlerTest extends SolrJettyTestBase { @@ -52,79 +51,84 @@ public static void beforeTest() throws Exception { public void test404ViaHttp() throws Exception { SolrClient client = getSolrClient(); - QueryRequest request = new QueryRequest(params("file", - "does-not-exist-404.txt")); + QueryRequest request = new QueryRequest(params("file", "does-not-exist-404.txt")); request.setPath("/admin/file"); SolrException e = expectThrows(SolrException.class, () -> request.process(client)); assertEquals(404, e.code()); } public void test404Locally() throws Exception { - // we need to test that executing the handler directly does not + // we need to test that executing the handler directly does not // throw an exception, just sets the exception on the response. // bypass TestHarness since it will throw any exception found in the // response. SolrCore core = h.getCore(); SolrQueryResponse rsp = new SolrQueryResponse(); - core.execute(core.getRequestHandler("/admin/file"), - req("file", "does-not-exist-404.txt"), rsp); + core.execute(core.getRequestHandler("/admin/file"), req("file", "does-not-exist-404.txt"), rsp); assertNotNull("no exception in response", rsp.getException()); - assertTrue("wrong type of exception: " + rsp.getException().getClass(), + assertTrue( + "wrong type of exception: " + rsp.getException().getClass(), rsp.getException() instanceof SolrException); - assertEquals(404, ((SolrException)rsp.getException()).code()); + assertEquals(404, ((SolrException) rsp.getException()).code()); } public void testDirList() throws SolrServerException, IOException { SolrClient client = getSolrClient(); - //assertQ(req("qt", "/admin/file")); TODO file bug that SolrJettyTestBase extends SolrTestCaseJ4 + // assertQ(req("qt", "/admin/file")); TODO file bug that SolrJettyTestBase extends + // SolrTestCaseJ4 QueryRequest request = new QueryRequest(); request.setPath("/admin/file"); QueryResponse resp = request.process(client); - assertEquals(0,resp.getStatus()); - assertTrue(((NamedList) resp.getResponse().get("files")).size() > 0);//some files + assertEquals(0, resp.getStatus()); + assertTrue(((NamedList) resp.getResponse().get("files")).size() > 0); // some files } public void testGetRawFile() throws SolrServerException, IOException { SolrClient client = getSolrClient(); - //assertQ(req("qt", "/admin/file")); TODO file bug that SolrJettyTestBase extends SolrTestCaseJ4 + // assertQ(req("qt", "/admin/file")); + // TODO file bug that SolrJettyTestBase extends SolrTestCaseJ4 QueryRequest request = new QueryRequest(params("file", "managed-schema")); request.setPath("/admin/file"); final AtomicBoolean readFile = new AtomicBoolean(); - request.setResponseParser(new ResponseParser() { - @Override - public String getWriterType() { - return "mock";//unfortunately this gets put onto params wt=mock but it apparently has no effect - } - - @Override - public NamedList processResponse(InputStream body, String encoding) { - try { - if (body.read() >= 0) - readFile.set(true); - } catch (IOException e) { - throw new RuntimeException(e); - } - return null; - } - - @Override - public NamedList processResponse(Reader reader) { - throw new UnsupportedOperationException("TODO unimplemented");//TODO - } - }); - - client.request(request);//runs request - //request.process(client); but we don't have a NamedList response + request.setResponseParser( + new ResponseParser() { + @Override + public String getWriterType() { + // unfortunately this gets put onto params wt=mock but it apparently has no effect + return "mock"; + } + + @Override + public NamedList processResponse(InputStream body, String encoding) { + try { + if (body.read() >= 0) readFile.set(true); + } catch (IOException e) { + throw new RuntimeException(e); + } + return null; + } + + @Override + public NamedList processResponse(Reader reader) { + throw new UnsupportedOperationException("TODO unimplemented"); // TODO + } + }); + + client.request(request); // runs request + // request.process(client); but we don't have a NamedList response assertTrue(readFile.get()); } public void testContentTypeHtmlBecomesTextPlain() throws Exception { SolrRequestHandler handler = h.getCore().getRequestHandler("/admin/file"); - SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params("file", "schema.xml", "contentType", "text/html")); + SolrQueryRequest req = + new LocalSolrQueryRequest( + h.getCore(), params("file", "schema.xml", "contentType", "text/html")); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req, rsp); - ContentStreamBase.FileStream content = (ContentStreamBase.FileStream) rsp.getValues().get("content"); + ContentStreamBase.FileStream content = + (ContentStreamBase.FileStream) rsp.getValues().get("content"); assertEquals("text/plain", content.getContentType()); } @@ -133,14 +137,16 @@ public void testContentTypeHtmlDefault() throws Exception { SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params("file", "example.html")); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req, rsp); - ContentStreamBase.FileStream content = (ContentStreamBase.FileStream) rsp.getValues().get("content"); + ContentStreamBase.FileStream content = + (ContentStreamBase.FileStream) rsp.getValues().get("content"); // System attempts to guess content type, but will only return XML, JSON, CSV, never HTML assertEquals("application/xml", content.getContentType()); } public void testIllegalContentType() { SolrClient client = getSolrClient(); - QueryRequest request = new QueryRequest(params("file", "managed-schema", "contentType", "not/known")); + QueryRequest request = + new QueryRequest(params("file", "managed-schema", "contentType", "not/known")); request.setPath("/admin/file"); request.setResponseParser(new NoOpResponseParser()); expectThrows(SolrException.class, () -> client.request(request)); @@ -172,8 +178,12 @@ public void testGetSafeContentType() { assertEquals("text/plain", ShowFileRequestHandler.getSafeContentType("application/xhtml+xml")); // Content-type with charset - assertEquals("text/csv ; charset=utf-8", ShowFileRequestHandler.getSafeContentType("text/csv ; charset=utf-8")); - assertEquals("text/xml;charset=utf-8", ShowFileRequestHandler.getSafeContentType("text/xml;charset=utf-8")); + assertEquals( + "text/csv ; charset=utf-8", + ShowFileRequestHandler.getSafeContentType("text/csv ; charset=utf-8")); + assertEquals( + "text/xml;charset=utf-8", + ShowFileRequestHandler.getSafeContentType("text/xml;charset=utf-8")); // Null assertNull(ShowFileRequestHandler.getSafeContentType(null)); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SolrEnvironmentTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SolrEnvironmentTest.java index 268c05666c9..042660368d1 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SolrEnvironmentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SolrEnvironmentTest.java @@ -17,11 +17,11 @@ package org.apache.solr.handler.admin; +import static org.junit.Assert.*; + import org.apache.solr.common.SolrException; import org.junit.Test; -import static org.junit.Assert.*; - public class SolrEnvironmentTest { @Test(expected = SolrException.class) @@ -70,4 +70,4 @@ public void tryingToHackColor() { public void illegalParam() { SolrEnvironment.parse("prod,foo=hello"); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java index dcfc74968f6..ee819753451 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SplitHandlerTest.java @@ -21,7 +21,6 @@ import java.util.Iterator; import java.util.List; import java.util.Random; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.api.collections.SplitByPrefixTest; import org.apache.solr.cloud.api.collections.SplitByPrefixTest.Prefix; @@ -33,13 +32,14 @@ // test low level splitByPrefix range recommendations. // This is here to access package private methods. -// See SplitByPrefixTest for cloud level tests of SPLITSHARD that use this by passing getRanges with the SPLIT command +// See SplitByPrefixTest for cloud level tests of SPLITSHARD that use this by passing getRanges with +// the SPLIT command public class SplitHandlerTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { - System.setProperty("managed.schema.mutable", "true"); // needed by cloud-managed config set - initCore("solrconfig.xml","schema_latest.xml"); + System.setProperty("managed.schema.mutable", "true"); // needed by cloud-managed config set + initCore("solrconfig.xml", "schema_latest.xml"); } void verifyContiguous(Collection results, DocRouter.Range currentRange) { @@ -61,15 +61,21 @@ void verifyContiguous(Collection results, DocRouter.Range curre assertEquals(prev.max, currentRange.max); } - // bias around special numbers int randomBound(Random rand) { int ret = 0; - switch(rand.nextInt(10)) { - case 0: ret = Integer.MIN_VALUE; break; - case 1: ret = Integer.MAX_VALUE; break; - case 2: ret = 0; break; - default: ret = rand.nextInt(); + switch (rand.nextInt(10)) { + case 0: + ret = Integer.MIN_VALUE; + break; + case 1: + ret = Integer.MAX_VALUE; + break; + case 2: + ret = 0; + break; + default: + ret = rand.nextInt(); } if (rand.nextBoolean()) { ret += rand.nextInt(2000) - 1000; @@ -80,7 +86,7 @@ int randomBound(Random rand) { @Test public void testRandomSplitRecommendations() throws Exception { Random rand = random(); - for (int i=0; i<10000; i++) { // 1M takes ~ 1 sec + for (int i = 0; i < 10000; i++) { // 1M takes ~ 1 sec doRandomSplitRecommendation(rand); } } @@ -89,7 +95,7 @@ public void doRandomSplitRecommendation(Random rand) throws Exception { int low = 0; int high = 0; - while (high-low < 10) { + while (high - low < 10) { low = randomBound(rand); high = randomBound(rand); if (low > high) { @@ -99,8 +105,7 @@ public void doRandomSplitRecommendation(Random rand) throws Exception { } } - DocRouter.Range curr = new DocRouter.Range(low,high); - + DocRouter.Range curr = new DocRouter.Range(low, high); int maxRanges = rand.nextInt(20); @@ -108,26 +113,26 @@ public void doRandomSplitRecommendation(Random rand) throws Exception { // bucket can start before or after if (rand.nextBoolean()) { - start += rand.nextInt(200) - 100; - if (start > low) { - // underflow - start = Integer.MIN_VALUE; - } + start += rand.nextInt(200) - 100; + if (start > low) { + // underflow + start = Integer.MIN_VALUE; + } } List counts = new ArrayList<>(maxRanges); - for (;;) { + for (; ; ) { int end = start + rand.nextInt(100) + 1; if (end < start) { // overflow end = Integer.MAX_VALUE; } - counts.add( new SplitOp.RangeCount(new DocRouter.Range(start, end), rand.nextInt(1000)+1)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(start, end), rand.nextInt(1000) + 1)); if (counts.size() >= maxRanges) break; if (counts.size() == maxRanges / 2 && rand.nextBoolean()) { // transition toward the end of the range (more boundary cases for large ranges) start = high - rand.nextInt(100); - start = Math.max(start, end+1); + start = Math.max(start, end + 1); } else { start = end + 1; } @@ -148,79 +153,78 @@ public void doRandomSplitRecommendation(Random rand) throws Exception { } } - @Test public void testSplitRecommendations() throws Exception { // split whole range exactly in two - DocRouter.Range curr = new DocRouter.Range(10,15); + DocRouter.Range curr = new DocRouter.Range(10, 15); List counts = new ArrayList<>(); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(10,15), 100)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(10, 15), 100)); Collection results = SplitOp.getSplits(counts, curr); assertEquals(12, results.iterator().next().max); verifyContiguous(results, curr); // make sure range with docs is split in half even if current range of shard is bigger - curr = new DocRouter.Range(-100,101); + curr = new DocRouter.Range(-100, 101); counts = new ArrayList<>(); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(10,15), 100)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(10, 15), 100)); results = SplitOp.getSplits(counts, curr); assertEquals(12, results.iterator().next().max); verifyContiguous(results, curr); // don't freak out if we encounter some ranges outside of the current defined shard range // this can happen since document routing can be overridden. - curr = new DocRouter.Range(-100,101); + curr = new DocRouter.Range(-100, 101); counts = new ArrayList<>(); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(-1000,-990), 100)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(-980,-970), 2)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(10,15), 100)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(1000,1010), 5)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(1020,1030), 7)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(-1000, -990), 100)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(-980, -970), 2)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(10, 15), 100)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(1000, 1010), 5)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(1020, 1030), 7)); results = SplitOp.getSplits(counts, curr); assertEquals(12, results.iterator().next().max); verifyContiguous(results, curr); - // splitting counts of [1,4,3] should result in [1,4],[3] // splitting count sof [3,4,1] should result in [3],[4,1] - // The current implementation has specific code for the latter case (hence this is needed for code coverage) + // The current implementation has specific code for the latter case (hence this is needed for + // code coverage) // The random tests *should* catch this as well though. - curr = new DocRouter.Range(-100,101); + curr = new DocRouter.Range(-100, 101); counts = new ArrayList<>(); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(0,9), 1)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(10,19), 4)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(20,29), 3)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(0, 9), 1)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(10, 19), 4)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(20, 29), 3)); results = SplitOp.getSplits(counts, curr); assertEquals(19, results.iterator().next().max); verifyContiguous(results, curr); - curr = new DocRouter.Range(-100,101); + curr = new DocRouter.Range(-100, 101); counts = new ArrayList<>(); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(0,9), 3)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(10,19), 4)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(20,29), 1)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(0, 9), 3)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(10, 19), 4)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(20, 29), 1)); results = SplitOp.getSplits(counts, curr); assertEquals(9, results.iterator().next().max); verifyContiguous(results, curr); - // test that if largest count is first - curr = new DocRouter.Range(-100,101); + curr = new DocRouter.Range(-100, 101); counts = new ArrayList<>(); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(0,9), 4)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(10,19), 1)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(20,29), 1)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(0, 9), 4)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(10, 19), 1)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(20, 29), 1)); results = SplitOp.getSplits(counts, curr); assertEquals(9, results.iterator().next().max); verifyContiguous(results, curr); - // test that if largest count is last (this has specific code since we don't get over midpoint until the last range and then need to back up) - curr = new DocRouter.Range(-100,101); + // test that if largest count is last (this has specific code since we don't get over midpoint + // until the last range and then need to back up) + curr = new DocRouter.Range(-100, 101); counts = new ArrayList<>(); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(0,9), 1)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(10,19), 1)); - counts.add(new SplitOp.RangeCount(new DocRouter.Range(20,29), 4)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(0, 9), 1)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(10, 19), 1)); + counts.add(new SplitOp.RangeCount(new DocRouter.Range(20, 29), 4)); results = SplitOp.getSplits(counts, curr); assertEquals(19, results.iterator().next().max); verifyContiguous(results, curr); @@ -230,30 +234,31 @@ public void testSplitRecommendations() throws Exception { public void testHistogramBuilding() throws Exception { List prefixes = SplitByPrefixTest.findPrefixes(20, 0, 0x00ffffff); List uniquePrefixes = SplitByPrefixTest.removeDups(prefixes); - assertTrue(prefixes.size() > uniquePrefixes.size()); // make sure we have some duplicates to test hash collisions + // make sure we have some duplicates to test hash collisions + assertTrue(prefixes.size() > uniquePrefixes.size()); String prefixField = "id_prefix_s"; String idField = "id"; DocRouter router = new CompositeIdRouter(); - - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { SolrQueryRequest req = req("myquery"); try { // the first time through the loop we do this before adding docs to test an empty index - Collection counts1 = SplitOp.getHashHistogram(req.getSearcher(), prefixField, router, null); - Collection counts2 = SplitOp.getHashHistogramFromId(req.getSearcher(), idField, router, null); + Collection counts1 = + SplitOp.getHashHistogram(req.getSearcher(), prefixField, router, null); + Collection counts2 = + SplitOp.getHashHistogramFromId(req.getSearcher(), idField, router, null); assertTrue(eqCount(counts1, counts2)); - if (i>0) { - assertTrue(counts1.size() > 0); // make sure we are testing something + if (i > 0) { + assertTrue(counts1.size() > 0); // make sure we are testing something } - // index a few random documents int ndocs = random().nextInt(10) + 1; - for (int j=0; j a, Collection b) { if (a.size() != b.size()) { return false; } - + Iterator it1 = a.iterator(); Iterator it2 = b.iterator(); while (it1.hasNext()) { @@ -288,5 +290,4 @@ private boolean eqCount(Collection a, Collection statusLog = rsp.getValues().getAll(CoreAdminAction.STATUS.name()); - assertFalse("expect status check w/o error, got:" + statusLog, - statusLog.contains(CoreAdminHandler.FAILED)); + assertFalse( + "expect status check w/o error, got:" + statusLog, + statusLog.contains(CoreAdminHandler.FAILED)); isCompleted = statusLog.contains(CoreAdminHandler.COMPLETED); return isCompleted; @@ -112,14 +126,15 @@ private void requestMetrics(boolean softFail) throws Exception { boolean found = false; int count = 10; while (!found && count-- > 0) { - h.getCoreContainer().getRequestHandler("/admin/metrics").handleRequest( - req("prefix", "SEARCHER", "registry", registry, "compact", "true"), rsp); + h.getCoreContainer() + .getRequestHandler("/admin/metrics") + .handleRequest(req("prefix", "SEARCHER", "registry", registry, "compact", "true"), rsp); NamedList values = rsp.getValues(); // this is not guaranteed to exist right away after core reload - there's a // small window between core load and before searcher metrics are registered // so we may have to check a few times, and then fail softly if reload is not complete yet - NamedList metrics = (NamedList)values.get("metrics"); + NamedList metrics = (NamedList) values.get("metrics"); if (metrics == null) { if (softFail) { return; @@ -127,7 +142,7 @@ private void requestMetrics(boolean softFail) throws Exception { fail("missing 'metrics' element in handler's output: " + values.asMap(5).toString()); } } - metrics = (NamedList)metrics.get(registry); + metrics = (NamedList) metrics.get(registry); if (metrics.get(key) != null) { found = true; assertTrue(metrics.get(key) instanceof Long); @@ -141,5 +156,4 @@ private void requestMetrics(boolean softFail) throws Exception { } assertTrue("Key " + key + " not found in registry " + registry, found); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java index f0126990757..c61af34c26e 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/SystemInfoHandlerTest.java @@ -16,16 +16,14 @@ */ package org.apache.solr.handler.admin; +import com.codahale.metrics.Gauge; import java.lang.management.ManagementFactory; import java.lang.management.OperatingSystemMXBean; import java.util.Arrays; - -import com.codahale.metrics.Gauge; import org.apache.solr.SolrTestCase; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.util.stats.MetricUtils; - public class SystemInfoHandlerTest extends SolrTestCase { public void testMagickGetter() throws Exception { @@ -34,20 +32,23 @@ public void testMagickGetter() throws Exception { // make one directly SimpleOrderedMap info = new SimpleOrderedMap<>(); - info.add( "name", os.getName() ); - info.add( "version", os.getVersion() ); - info.add( "arch", os.getArch() ); + info.add("name", os.getName()); + info.add("version", os.getVersion()); + info.add("arch", os.getArch()); // make another using MetricUtils.addMXBeanMetrics() SimpleOrderedMap info2 = new SimpleOrderedMap<>(); - MetricUtils.addMXBeanMetrics( os, OperatingSystemMXBean.class, null, (k, v) -> { - info2.add(k, ((Gauge)v).getValue()); - } ); + MetricUtils.addMXBeanMetrics( + os, + OperatingSystemMXBean.class, + null, + (k, v) -> { + info2.add(k, ((Gauge) v).getValue()); + }); // make sure they got the same thing for (String p : Arrays.asList("name", "version", "arch")) { assertEquals(info.get(p), info2.get(p)); } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java index b5f4c7ec6d0..858c92c44ab 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java @@ -17,6 +17,15 @@ package org.apache.solr.handler.admin; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.solr.api.ApiBag.EMPTY_SPEC; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; +import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH; +import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH; +import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH; +import static org.apache.solr.common.util.ValidatingJsonMap.NOT_NULL; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -26,7 +35,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; @@ -63,23 +71,16 @@ import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.security.PermissionNameProvider; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.apache.solr.api.ApiBag.EMPTY_SPEC; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; -import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH; -import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH; -import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH; -import static org.apache.solr.common.util.ValidatingJsonMap.NOT_NULL; - public class TestApiFramework extends SolrTestCaseJ4 { public void testFramework() { Map calls = new HashMap<>(); Map out = new HashMap<>(); CoreContainer mockCC = TestCoreAdminApis.getCoreContainerMock(calls, out); - PluginBag containerHandlers = new PluginBag<>(SolrRequestHandler.class, null, false); - TestCollectionAPIs.MockCollectionsHandler collectionsHandler = new TestCollectionAPIs.MockCollectionsHandler(); + PluginBag containerHandlers = + new PluginBag<>(SolrRequestHandler.class, null, false); + TestCollectionAPIs.MockCollectionsHandler collectionsHandler = + new TestCollectionAPIs.MockCollectionsHandler(); containerHandlers.put(COLLECTIONS_HANDLER_PATH, collectionsHandler); containerHandlers.getApiBag().registerObject(new CollectionsAPI(collectionsHandler)); ApiRegistrar.registerCollectionApis(containerHandlers.getApiBag(), collectionsHandler); @@ -88,60 +89,57 @@ public void testFramework() { containerHandlers.put(CONFIGSETS_HANDLER_PATH, new ConfigSetsHandler(mockCC)); out.put("getRequestHandlers", containerHandlers); - PluginBag coreHandlers = new PluginBag<>(SolrRequestHandler.class, null, false); + PluginBag coreHandlers = + new PluginBag<>(SolrRequestHandler.class, null, false); coreHandlers.put("/schema", new SchemaHandler()); coreHandlers.put("/config", new SolrConfigHandler()); coreHandlers.put("/admin/ping", new PingRequestHandler()); Map parts = new HashMap<>(); String fullPath = "/collections/hello/shards"; - Api api = V2HttpCall.getApiInfo(containerHandlers, fullPath, "POST", - fullPath, parts); + Api api = V2HttpCall.getApiInfo(containerHandlers, fullPath, "POST", fullPath, parts); assertNotNull(api); - assertConditions(api.getSpec(), Map.of( - "/methods[0]", "POST", - "/commands/create", NOT_NULL)); + assertConditions(api.getSpec(), Map.of("/methods[0]", "POST", "/commands/create", NOT_NULL)); assertEquals("hello", parts.get("collection")); - parts = new HashMap<>(); - api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello/shards", "POST", - null, parts); - assertConditions(api.getSpec(), Map.of( - "/methods[0]", "POST", - "/commands/split", NOT_NULL, - "/commands/add-replica", NOT_NULL - )); - + api = + V2HttpCall.getApiInfo(containerHandlers, "/collections/hello/shards", "POST", null, parts); + assertConditions( + api.getSpec(), + Map.of( + "/methods[0]", "POST", + "/commands/split", NOT_NULL, + "/commands/add-replica", NOT_NULL)); parts = new HashMap<>(); - api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello/shards/shard1", "POST", - null, parts); - assertConditions(api.getSpec(), Map.of( - "/methods[0]", "POST", - "/commands/force-leader", NOT_NULL - )); + api = + V2HttpCall.getApiInfo( + containerHandlers, "/collections/hello/shards/shard1", "POST", null, parts); + assertConditions( + api.getSpec(), Map.of("/methods[0]", "POST", "/commands/force-leader", NOT_NULL)); assertEquals("hello", parts.get("collection")); assertEquals("shard1", parts.get("shard")); - parts = new HashMap<>(); - api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello", "POST", - null, parts); - assertConditions(api.getSpec(), Map.of( - "/methods[0]", "POST", - "/commands/add-replica-property", NOT_NULL, - "/commands/delete-replica-property", NOT_NULL - )); + api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello", "POST", null, parts); + assertConditions( + api.getSpec(), + Map.of( + "/methods[0]", "POST", + "/commands/add-replica-property", NOT_NULL, + "/commands/delete-replica-property", NOT_NULL)); assertEquals("hello", parts.get("collection")); - api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello/shards/shard1/replica1", "DELETE", - null, parts); + api = + V2HttpCall.getApiInfo( + containerHandlers, "/collections/hello/shards/shard1/replica1", "DELETE", null, parts); assertEquals("hello", parts.get("collection")); assertEquals("shard1", parts.get("shard")); assertEquals("replica1", parts.get("replica")); - SolrQueryResponse rsp = invoke(containerHandlers, null, "/collections/_introspect", GET, mockCC); + SolrQueryResponse rsp = + invoke(containerHandlers, null, "/collections/_introspect", GET, mockCC); Set methodNames = new HashSet<>(); methodNames.add(rsp.getValues()._getStr("/spec[0]/methods[0]", null)); @@ -153,22 +151,28 @@ public void testFramework() { methodNames = new HashSet<>(); - rsp = invoke(coreHandlers, "/schema/_introspect", "/collections/hello/schema/_introspect", GET, mockCC); + rsp = + invoke( + coreHandlers, + "/schema/_introspect", + "/collections/hello/schema/_introspect", + GET, + mockCC); methodNames.add(rsp.getValues()._getStr("/spec[0]/methods[0]", null)); methodNames.add(rsp.getValues()._getStr("/spec[1]/methods[0]", null)); assertTrue(methodNames.contains("POST")); assertTrue(methodNames.contains("GET")); rsp = invoke(coreHandlers, "/", "/collections/hello/_introspect", GET, mockCC); - assertConditions(rsp.getValues().asMap(2), Map.of( - "/availableSubPaths", NOT_NULL, - "availableSubPaths /collections/hello/config/jmx", NOT_NULL, - "availableSubPaths /collections/hello/schema", NOT_NULL, - "availableSubPaths /collections/hello/shards", NOT_NULL, - "availableSubPaths /collections/hello/shards/{shard}", NOT_NULL, - "availableSubPaths /collections/hello/shards/{shard}/{replica}", NOT_NULL - )); - + assertConditions( + rsp.getValues().asMap(2), + Map.of( + "/availableSubPaths", NOT_NULL, + "availableSubPaths /collections/hello/config/jmx", NOT_NULL, + "availableSubPaths /collections/hello/schema", NOT_NULL, + "availableSubPaths /collections/hello/shards", NOT_NULL, + "availableSubPaths /collections/hello/shards/{shard}", NOT_NULL, + "availableSubPaths /collections/hello/shards/{shard}/{replica}", NOT_NULL)); } public void testPayload() throws IOException { @@ -176,20 +180,25 @@ public void testPayload() throws IOException { Utils.fromJSONString(json); ApiBag apiBag = new ApiBag(false); - List apis = apiBag.registerObject(new ApiTest()); + List apis = apiBag.registerObject(new ApiTest()); ValidatingJsonMap spec = apis.get(0).getSpec(); - assertEquals("POST", spec._getStr("/methods[0]",null) ); - assertEquals("POST", spec._getStr("/methods[0]",null) ); - assertEquals("/cluster/package", spec._getStr("/url/paths[0]",null) ); - assertEquals("string", spec._getStr("/commands/add/properties/package/type",null) ); - assertEquals("array", spec._getStr("/commands/add/properties/files/type",null) ); - assertEquals("string", spec._getStr("/commands/add/properties/files/items/type",null) ); - assertEquals("string", spec._getStr("/commands/delete/items/type",null) ); - SolrQueryResponse rsp = v2ApiInvoke(apiBag, "/cluster/package", "POST", new ModifiableSolrParams(), - new ByteArrayInputStream("{add:{package:mypkg, version: '1.0', files : [a.jar, b.jar]}}".getBytes(UTF_8))); - + assertEquals("POST", spec._getStr("/methods[0]", null)); + assertEquals("POST", spec._getStr("/methods[0]", null)); + assertEquals("/cluster/package", spec._getStr("/url/paths[0]", null)); + assertEquals("string", spec._getStr("/commands/add/properties/package/type", null)); + assertEquals("array", spec._getStr("/commands/add/properties/files/type", null)); + assertEquals("string", spec._getStr("/commands/add/properties/files/items/type", null)); + assertEquals("string", spec._getStr("/commands/delete/items/type", null)); + SolrQueryResponse rsp = + v2ApiInvoke( + apiBag, + "/cluster/package", + "POST", + new ModifiableSolrParams(), + new ByteArrayInputStream( + "{add:{package:mypkg, version: '1.0', files : [a.jar, b.jar]}}".getBytes(UTF_8))); AddVersion addversion = (AddVersion) rsp.getValues().get("add"); assertEquals("mypkg", addversion.pkg); @@ -197,10 +206,16 @@ public void testPayload() throws IOException { assertEquals("a.jar", addversion.files.get(0)); assertEquals("b.jar", addversion.files.get(1)); - apiBag.registerObject(new C()); - rsp = v2ApiInvoke(apiBag, "/path1", "POST", new ModifiableSolrParams(), - new ByteArrayInputStream("{\"package\":\"mypkg\", \"version\": \"1.0\", \"files\" : [\"a.jar\", \"b.jar\"]}".getBytes(UTF_8))); + rsp = + v2ApiInvoke( + apiBag, + "/path1", + "POST", + new ModifiableSolrParams(), + new ByteArrayInputStream( + "{\"package\":\"mypkg\", \"version\": \"1.0\", \"files\" : [\"a.jar\", \"b.jar\"]}" + .getBytes(UTF_8))); assertEquals("mypkg", rsp.getValues()._getStr("payload/package", null)); assertEquals("1.0", rsp.getValues()._getStr("payload/version", null)); } @@ -208,7 +223,7 @@ public void testPayload() throws IOException { public static class C { @EndPoint(path = "/path1", method = POST, permission = PermissionNameProvider.Name.ALL) public void m1(PayloadObj add) { - add.getResponse().add("payload",add.get()); + add.getResponse().add("payload", add.get()); } } @@ -217,22 +232,21 @@ public static class ApiTest { @Command(name = "add") public void add(SolrQueryRequest req, SolrQueryResponse rsp, AddVersion addVersion) { rsp.add("add", addVersion); - } @Command(name = "delete") public void del(SolrQueryRequest req, SolrQueryResponse rsp, List names) { - rsp.add("delete",names); - + rsp.add("delete", names); } - } public static class AddVersion implements ReflectMapWriter { @JsonProperty(value = "package", required = true) public String pkg; + @JsonProperty(value = "version", required = true) public String version; + @JsonProperty(value = "files", required = true) public List files; } @@ -240,16 +254,25 @@ public static class AddVersion implements ReflectMapWriter { public void testAnnotatedApi() { ApiBag apiBag = new ApiBag(false); apiBag.registerObject(new DummyTest()); - SolrQueryResponse rsp = v2ApiInvoke(apiBag, "/node/filestore/package/mypkg/jar1.jar", "GET", - new ModifiableSolrParams(), null); + SolrQueryResponse rsp = + v2ApiInvoke( + apiBag, + "/node/filestore/package/mypkg/jar1.jar", + "GET", + new ModifiableSolrParams(), + null); assertEquals("/package/mypkg/jar1.jar", rsp.getValues().get("path")); apiBag = new ApiBag(false); apiBag.registerObject(new DummyTest1()); - rsp = v2ApiInvoke(apiBag, "/node/filestore/package/mypkg/jar1.jar", "GET", - new ModifiableSolrParams(), null); + rsp = + v2ApiInvoke( + apiBag, + "/node/filestore/package/mypkg/jar1.jar", + "GET", + new ModifiableSolrParams(), + null); assertEquals("/package/mypkg/jar1.jar", rsp.getValues().get("path")); - } @EndPoint( @@ -264,7 +287,6 @@ public void read(SolrQueryRequest req, SolrQueryResponse rsp) { } } - public class DummyTest1 { @EndPoint( path = "/node/filestore/*", @@ -276,67 +298,72 @@ public void read(SolrQueryRequest req, SolrQueryResponse rsp) { } } - private static SolrQueryResponse v2ApiInvoke(ApiBag bag, String uri, String method, SolrParams params, InputStream payload) { + private static SolrQueryResponse v2ApiInvoke( + ApiBag bag, String uri, String method, SolrParams params, InputStream payload) { if (params == null) params = new ModifiableSolrParams(); SolrQueryResponse rsp = new SolrQueryResponse(); HashMap templateVals = new HashMap<>(); Api[] currentApi = new Api[1]; - SolrQueryRequestBase req = new SolrQueryRequestBase(null, params) { + SolrQueryRequestBase req = + new SolrQueryRequestBase(null, params) { - @Override - public Map getPathTemplateValues() { - return templateVals; - } - - @Override - protected Map getValidators() { - return currentApi[0] == null? - Collections.emptyMap(): - currentApi[0].getCommandSchema(); - } + @Override + public Map getPathTemplateValues() { + return templateVals; + } - @Override - public Iterable getContentStreams() { - return Collections.singletonList(new ContentStreamBase() { @Override - public InputStream getStream() throws IOException { - return payload; + protected Map getValidators() { + return currentApi[0] == null + ? Collections.emptyMap() + : currentApi[0].getCommandSchema(); } - }); - } - }; + @Override + public Iterable getContentStreams() { + return Collections.singletonList( + new ContentStreamBase() { + @Override + public InputStream getStream() throws IOException { + return payload; + } + }); + } + }; Api api = bag.lookup(uri, method, templateVals); currentApi[0] = api; - api.call(req, rsp); return rsp; - } public void testTrailingTemplatePaths() { PathTrie registry = new PathTrie<>(); - Api api = new Api(EMPTY_SPEC) { - @Override - public void call(SolrQueryRequest req, SolrQueryResponse rsp) { - - } - }; + Api api = + new Api(EMPTY_SPEC) { + @Override + public void call(SolrQueryRequest req, SolrQueryResponse rsp) {} + }; Api intropsect = new ApiBag.IntrospectApi(api, false); - ApiBag.registerIntrospect(Collections.emptyMap(), registry, "/c/.system/blob/{name}", intropsect); - ApiBag.registerIntrospect(Collections.emptyMap(), registry, "/c/.system/{x}/{name}", intropsect); - assertEquals(intropsect, registry.lookup("/c/.system/blob/random_string/_introspect", new HashMap<>())); + ApiBag.registerIntrospect( + Collections.emptyMap(), registry, "/c/.system/blob/{name}", intropsect); + ApiBag.registerIntrospect( + Collections.emptyMap(), registry, "/c/.system/{x}/{name}", intropsect); + assertEquals( + intropsect, registry.lookup("/c/.system/blob/random_string/_introspect", new HashMap<>())); assertEquals(intropsect, registry.lookup("/c/.system/blob/_introspect", new HashMap<>())); assertEquals(intropsect, registry.lookup("/c/.system/_introspect", new HashMap<>())); assertEquals(intropsect, registry.lookup("/c/.system/v1/_introspect", new HashMap<>())); assertEquals(intropsect, registry.lookup("/c/.system/v1/v2/_introspect", new HashMap<>())); } - private SolrQueryResponse invoke(PluginBag reqHandlers, String path, - String fullPath, SolrRequest.METHOD method, - CoreContainer mockCC) { + private SolrQueryResponse invoke( + PluginBag reqHandlers, + String path, + String fullPath, + SolrRequest.METHOD method, + CoreContainer mockCC) { HashMap parts = new HashMap<>(); boolean containerHandlerLookup = mockCC.getRequestHandlers() == reqHandlers; path = path == null ? fullPath : path; @@ -355,19 +382,18 @@ private SolrQueryResponse invoke(PluginBag reqHandlers, Stri } SolrQueryResponse rsp = new SolrQueryResponse(); - LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, new MapSolrParams(new HashMap<>())) { - @Override - public List getCommands(boolean validateInput) { - return Collections.emptyList(); - } - }; + LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, new MapSolrParams(new HashMap<>())) { + @Override + public List getCommands(boolean validateInput) { + return Collections.emptyList(); + } + }; api.call(req, rsp); return rsp; - } - public static void assertConditions(Map root, Map conditions) { for (Map.Entry e : conditions.entrySet()) { String path = e.getKey(); @@ -375,13 +401,18 @@ public static void assertConditions(Map root, Map conditio Object val = Utils.getObjectByPath(root, false, parts); if (e.getValue() instanceof ValidatingJsonMap.PredicateWithErrMsg) { @SuppressWarnings("unchecked") - ValidatingJsonMap.PredicateWithErrMsg value = (ValidatingJsonMap.PredicateWithErrMsg) e.getValue(); + ValidatingJsonMap.PredicateWithErrMsg value = + (ValidatingJsonMap.PredicateWithErrMsg) e.getValue(); String err = value.test(val); if (err != null) { - assertEquals(err + " for " + e.getKey() + " in :" + Utils.toJSONString(root), e.getValue(), val); + assertEquals( + err + " for " + e.getKey() + " in :" + Utils.toJSONString(root), e.getValue(), val); } } else { - assertEquals("incorrect value for path " + e.getKey() + " in :" + Utils.toJSONString(root), e.getValue(), val); + assertEquals( + "incorrect value for path " + e.getKey() + " in :" + Utils.toJSONString(root), + e.getValue(), + val); } } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java b/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java index 4ef36f8a845..09327fd37d4 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestCollectionAPIs.java @@ -17,6 +17,20 @@ package org.apache.solr.handler.admin; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; +import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION; +import static org.apache.solr.common.params.CollectionAdminParams.PROPERTY_NAME; +import static org.apache.solr.common.params.CollectionAdminParams.PROPERTY_VALUE; +import static org.apache.solr.common.params.CommonParams.NAME; +import static org.apache.solr.common.util.Utils.fromJSONString; + +import java.lang.invoke.MethodHandles; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; @@ -43,21 +57,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST; -import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION; -import static org.apache.solr.common.params.CollectionAdminParams.PROPERTY_NAME; -import static org.apache.solr.common.params.CollectionAdminParams.PROPERTY_VALUE; -import static org.apache.solr.common.params.CommonParams.NAME; -import static org.apache.solr.common.util.Utils.fromJSONString; - public class TestCollectionAPIs extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -74,9 +73,12 @@ public void testCopyParamsToMap() { assertEquals("X2", x[1]); assertEquals("Y", m.get("y")); - SolrException e = expectThrows(SolrException.class, () -> { - CollectionsHandler.copy(params.required(), null, "z"); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + CollectionsHandler.copy(params.required(), null, "z"); + }); assertEquals(e.code(), SolrException.ErrorCode.BAD_REQUEST.code); } @@ -90,122 +92,200 @@ public void testCommands() throws Exception { ApiRegistrar.registerCollectionApis(apiBag, collectionsHandler); ApiRegistrar.registerShardApis(apiBag, collectionsHandler); - ClusterAPI clusterAPI = new ClusterAPI(collectionsHandler,null); + ClusterAPI clusterAPI = new ClusterAPI(collectionsHandler, null); apiBag.registerObject(clusterAPI); apiBag.registerObject(clusterAPI.commands); } - //test a simple create collection call - compareOutput(apiBag, "/collections", POST, - "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2 }}", null, + // test a simple create collection call + compareOutput( + apiBag, + "/collections", + POST, + "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2 }}", + null, "{name:newcoll, fromApi:'true', replicationFactor:'2', nrtReplicas:'2', collection.configName:schemaless, numShards:'2', operation:create}"); - - compareOutput(apiBag, "/collections", POST, - "{create:{name:'newcoll', config:'schemaless', numShards:2, nrtReplicas:2 }}", null, + + compareOutput( + apiBag, + "/collections", + POST, + "{create:{name:'newcoll', config:'schemaless', numShards:2, nrtReplicas:2 }}", + null, "{name:newcoll, fromApi:'true', nrtReplicas:'2', replicationFactor:'2', collection.configName:schemaless, numShards:'2', operation:create}"); - - compareOutput(apiBag, "/collections", POST, - "{create:{name:'newcoll', config:'schemaless', numShards:2, nrtReplicas:2, tlogReplicas:2, pullReplicas:2 }}", null, + + compareOutput( + apiBag, + "/collections", + POST, + "{create:{name:'newcoll', config:'schemaless', numShards:2, nrtReplicas:2, tlogReplicas:2, pullReplicas:2 }}", + null, "{name:newcoll, fromApi:'true', nrtReplicas:'2', replicationFactor:'2', tlogReplicas:'2', pullReplicas:'2', collection.configName:schemaless, numShards:'2', operation:create}"); - //test a create collection with custom properties - compareOutput(apiBag, "/collections", POST, - "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2, properties:{prop1:'prop1val', prop2: prop2val} }}", null, + // test a create collection with custom properties + compareOutput( + apiBag, + "/collections", + POST, + "{create:{name:'newcoll', config:'schemaless', numShards:2, replicationFactor:2, properties:{prop1:'prop1val', prop2: prop2val} }}", + null, "{name:newcoll, fromApi:'true', replicationFactor:'2', nrtReplicas:'2', collection.configName:schemaless, numShards:'2', operation:create, property.prop1:prop1val, property.prop2:prop2val}"); - - compareOutput(apiBag, "/collections", POST, - "{create-alias:{name: aliasName , collections:[c1,c2] }}", null, "{operation : createalias, name: aliasName, collections:\"c1,c2\" }"); - - compareOutput(apiBag, "/collections", POST, - "{delete-alias:{ name: aliasName}}", null, "{operation : deletealias, name: aliasName}"); - - compareOutput(apiBag, "/collections/collName", POST, - "{reload:{}}", null, + compareOutput( + apiBag, + "/collections", + POST, + "{create-alias:{name: aliasName , collections:[c1,c2] }}", + null, + "{operation : createalias, name: aliasName, collections:\"c1,c2\" }"); + + compareOutput( + apiBag, + "/collections", + POST, + "{delete-alias:{ name: aliasName}}", + null, + "{operation : deletealias, name: aliasName}"); + + compareOutput( + apiBag, + "/collections/collName", + POST, + "{reload:{}}", + null, "{name:collName, operation :reload}"); - compareOutput(apiBag, "/collections/collName", DELETE, - null, null, - "{name:collName, operation :delete}"); + compareOutput( + apiBag, "/collections/collName", DELETE, null, null, "{name:collName, operation :delete}"); - compareOutput(apiBag, "/collections/collName/shards/shard1", DELETE, - null, null, + compareOutput( + apiBag, + "/collections/collName/shards/shard1", + DELETE, + null, + null, "{collection:collName, shard: shard1 , operation :deleteshard }"); - compareOutput(apiBag, "/collections/collName/shards/shard1/replica1?deleteDataDir=true&onlyIfDown=true", DELETE, - null, null, + compareOutput( + apiBag, + "/collections/collName/shards/shard1/replica1?deleteDataDir=true&onlyIfDown=true", + DELETE, + null, + null, "{collection:collName, shard: shard1, replica :replica1 , deleteDataDir:'true', onlyIfDown: 'true', operation :deletereplica }"); - compareOutput(apiBag, "/collections/collName/shards", POST, - "{split:{shard:shard1, ranges: '0-1f4,1f5-3e8,3e9-5dc', coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", null, - "{collection: collName , shard : shard1, ranges :'0-1f4,1f5-3e8,3e9-5dc', operation : splitshard, property.prop1:prop1Val, property.prop2: prop2Val}" - ); - - compareOutput(apiBag, "/collections/collName/shards", POST, - "{add-replica:{shard: shard1, node: 'localhost_8978' , coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", null, - "{collection: collName , shard : shard1, node :'localhost_8978', operation : addreplica, property.prop1:prop1Val, property.prop2: prop2Val}" - ); - - compareOutput(apiBag, "/collections/collName/shards", POST, - "{split:{ splitKey:id12345, coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", null, - "{collection: collName , split.key : id12345 , operation : splitshard, property.prop1:prop1Val, property.prop2: prop2Val}" - ); - - compareOutput(apiBag, "/collections/collName/shards", POST, - "{add-replica:{shard: shard1, node: 'localhost_8978' , type:'TLOG' }}", null, - "{collection: collName , shard : shard1, node :'localhost_8978', operation : addreplica, type: TLOG}" - ); - - compareOutput(apiBag, "/collections/collName/shards", POST, - "{add-replica:{shard: shard1, node: 'localhost_8978' , type:'PULL' }}", null, - "{collection: collName , shard : shard1, node :'localhost_8978', operation : addreplica, type: PULL}" - ); - - // TODO annotation-based v2 APIs still miss enum support to validate the 'type' parameter as this test requires. + compareOutput( + apiBag, + "/collections/collName/shards", + POST, + "{split:{shard:shard1, ranges: '0-1f4,1f5-3e8,3e9-5dc', coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", + null, + "{collection: collName , shard : shard1, ranges :'0-1f4,1f5-3e8,3e9-5dc', operation : splitshard, property.prop1:prop1Val, property.prop2: prop2Val}"); + + compareOutput( + apiBag, + "/collections/collName/shards", + POST, + "{add-replica:{shard: shard1, node: 'localhost_8978' , coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", + null, + "{collection: collName , shard : shard1, node :'localhost_8978', operation : addreplica, property.prop1:prop1Val, property.prop2: prop2Val}"); + + compareOutput( + apiBag, + "/collections/collName/shards", + POST, + "{split:{ splitKey:id12345, coreProperties : {prop1:prop1Val, prop2:prop2Val} }}", + null, + "{collection: collName , split.key : id12345 , operation : splitshard, property.prop1:prop1Val, property.prop2: prop2Val}"); + + compareOutput( + apiBag, + "/collections/collName/shards", + POST, + "{add-replica:{shard: shard1, node: 'localhost_8978' , type:'TLOG' }}", + null, + "{collection: collName , shard : shard1, node :'localhost_8978', operation : addreplica, type: TLOG}"); + + compareOutput( + apiBag, + "/collections/collName/shards", + POST, + "{add-replica:{shard: shard1, node: 'localhost_8978' , type:'PULL' }}", + null, + "{collection: collName , shard : shard1, node :'localhost_8978', operation : addreplica, type: PULL}"); + + // TODO annotation-based v2 APIs still miss enum support to validate the 'type' parameter as + // this test requires. // Uncomment this test after fixing SOLR-15796 -// assertErrorContains(apiBag, "/collections/collName/shards", POST, -// "{add-replica:{shard: shard1, node: 'localhost_8978' , type:'foo' }}", null, -// "Value of enum must be one of" -// ); - - compareOutput(apiBag, "/collections/collName", POST, - "{add-replica-property : {name:propA , value: VALA, shard: shard1, replica:replica1}}", null, - "{collection: collName, shard: shard1, replica : replica1 , property : propA , operation : addreplicaprop, property.value : 'VALA'}" - ); - - compareOutput(apiBag, "/collections/collName", POST, - "{delete-replica-property : {property: propA , shard: shard1, replica:replica1} }", null, - "{collection: collName, shard: shard1, replica : replica1 , property : propA , operation : deletereplicaprop}" - ); - - compareOutput(apiBag, "/cluster", POST, - "{add-role : {role : overseer, node : 'localhost_8978'} }", null, - "{operation : addrole ,role : overseer, node : 'localhost_8978'}" - ); - - compareOutput(apiBag, "/cluster", POST, - "{remove-role : {role : overseer, node : 'localhost_8978'} }", null, - "{operation : removerole ,role : overseer, node : 'localhost_8978'}" - ); - - compareOutput(apiBag, "/collections/coll1", POST, - "{balance-shard-unique : {property: preferredLeader} }", null, - "{operation : balanceshardunique ,collection : coll1, property : preferredLeader}" - ); - - compareOutput(apiBag, "/collections/coll1", POST, - "{migrate-docs : {forwardTimeout: 1800, target: coll2, splitKey: 'a123!'} }", null, - "{operation : migrate ,collection : coll1, target.collection:coll2, forward.timeout:1800, split.key:'a123!'}" - ); - - compareOutput(apiBag, "/collections/coll1", POST, - "{set-collection-property : {name: 'foo', value:'bar'} }", null, - "{operation : collectionprop, name : coll1, propertyName:'foo', propertyValue:'bar'}" - ); - + // assertErrorContains(apiBag, "/collections/collName/shards", POST, + // "{add-replica:{shard: shard1, node: 'localhost_8978' , type:'foo' }}", null, + // "Value of enum must be one of" + // ); + + compareOutput( + apiBag, + "/collections/collName", + POST, + "{add-replica-property : {name:propA , value: VALA, shard: shard1, replica:replica1}}", + null, + "{collection: collName, shard: shard1, replica : replica1 , property : propA , operation : addreplicaprop, property.value : 'VALA'}"); + + compareOutput( + apiBag, + "/collections/collName", + POST, + "{delete-replica-property : {property: propA , shard: shard1, replica:replica1} }", + null, + "{collection: collName, shard: shard1, replica : replica1 , property : propA , operation : deletereplicaprop}"); + + compareOutput( + apiBag, + "/cluster", + POST, + "{add-role : {role : overseer, node : 'localhost_8978'} }", + null, + "{operation : addrole ,role : overseer, node : 'localhost_8978'}"); + + compareOutput( + apiBag, + "/cluster", + POST, + "{remove-role : {role : overseer, node : 'localhost_8978'} }", + null, + "{operation : removerole ,role : overseer, node : 'localhost_8978'}"); + + compareOutput( + apiBag, + "/collections/coll1", + POST, + "{balance-shard-unique : {property: preferredLeader} }", + null, + "{operation : balanceshardunique ,collection : coll1, property : preferredLeader}"); + + compareOutput( + apiBag, + "/collections/coll1", + POST, + "{migrate-docs : {forwardTimeout: 1800, target: coll2, splitKey: 'a123!'} }", + null, + "{operation : migrate ,collection : coll1, target.collection:coll2, forward.timeout:1800, split.key:'a123!'}"); + + compareOutput( + apiBag, + "/collections/coll1", + POST, + "{set-collection-property : {name: 'foo', value:'bar'} }", + null, + "{operation : collectionprop, name : coll1, propertyName:'foo', propertyValue:'bar'}"); } - static ZkNodeProps compareOutput(final ApiBag apiBag, final String path, final SolrRequest.METHOD method, - final String payload, final CoreContainer cc, String expectedOutputMapJson) throws Exception { + static ZkNodeProps compareOutput( + final ApiBag apiBag, + final String path, + final SolrRequest.METHOD method, + final String payload, + final CoreContainer cc, + String expectedOutputMapJson) + throws Exception { Pair ctx = makeCall(apiBag, path, method, payload, cc); ZkNodeProps output = (ZkNodeProps) ctx.second().getValues().get(ZkNodeProps.class.getName()); @SuppressWarnings("unchecked") @@ -213,17 +293,32 @@ static ZkNodeProps compareOutput(final ApiBag apiBag, final String path, final S assertMapEqual(expected, output); return output; } - - static void assertErrorContains(final ApiBag apiBag, final String path, final SolrRequest.METHOD method, - final String payload, final CoreContainer cc, String expectedErrorMsg) throws Exception { - RuntimeException e = expectThrows(RuntimeException.class, () -> makeCall(apiBag, path, method, payload, cc)); - assertTrue("Expected exception with error message '" + expectedErrorMsg + "' but got: " + e.getMessage(), + + static void assertErrorContains( + final ApiBag apiBag, + final String path, + final SolrRequest.METHOD method, + final String payload, + final CoreContainer cc, + String expectedErrorMsg) + throws Exception { + RuntimeException e = + expectThrows(RuntimeException.class, () -> makeCall(apiBag, path, method, payload, cc)); + assertTrue( + "Expected exception with error message '" + + expectedErrorMsg + + "' but got: " + + e.getMessage(), e.getMessage().contains(expectedErrorMsg)); } - public static Pair makeCall(final ApiBag apiBag, String path, - final SolrRequest.METHOD method, - final String payload, final CoreContainer cc) throws Exception { + public static Pair makeCall( + final ApiBag apiBag, + String path, + final SolrRequest.METHOD method, + final String payload, + final CoreContainer cc) + throws Exception { SolrParams queryParams = new MultiMapSolrParams(Collections.emptyMap()); if (path.indexOf('?') > 0) { String queryStr = path.substring(path.indexOf('?') + 1); @@ -234,28 +329,29 @@ public static Pair makeCall(final ApiBag ap Api api = apiBag.lookup(path, method.toString(), parts); if (api == null) throw new RuntimeException("No handler at path :" + path); SolrQueryResponse rsp = new SolrQueryResponse(); - LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, queryParams) { - @Override - public List getCommands(boolean validateInput) { - if (payload == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(payload), api.getCommandSchema(), true); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method.toString(); - } - }; + LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, queryParams) { + @Override + public List getCommands(boolean validateInput) { + if (payload == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(payload), api.getCommandSchema(), true); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method.toString(); + } + }; try { api.call(req, rsp); } catch (ApiBag.ExceptionWithErrObject e) { throw new RuntimeException(e.getMessage() + Utils.toJSONString(e.getErrs()), e); - } return new Pair<>(req, rsp); } @@ -267,20 +363,19 @@ private static void assertMapEqual(Map expected, ZkNodeProps actual) if (actualVal instanceof String[]) { actualVal = Arrays.asList((String[]) actualVal); } - assertEquals(errorMessage(expected, actual), String.valueOf(e.getValue()), String.valueOf(actualVal)); + assertEquals( + errorMessage(expected, actual), String.valueOf(e.getValue()), String.valueOf(actualVal)); } } private static String errorMessage(Map expected, ZkNodeProps actual) { return "expected: " + Utils.toJSONString(expected) + "\nactual: " + Utils.toJSONString(actual); - } static class MockCollectionsHandler extends CollectionsHandler { LocalSolrQueryRequest req; - MockCollectionsHandler() { - } + MockCollectionsHandler() {} @Override protected CoreContainer checkErrors() { @@ -288,18 +383,19 @@ protected CoreContainer checkErrors() { } @Override - protected void copyFromClusterProp(Map props, String prop) { - - } + protected void copyFromClusterProp(Map props, String prop) {} @Override - void invokeAction(SolrQueryRequest req, SolrQueryResponse rsp, - CoreContainer cores, - CollectionParams.CollectionAction action, - CollectionOperation operation) throws Exception { + void invokeAction( + SolrQueryRequest req, + SolrQueryResponse rsp, + CoreContainer cores, + CollectionParams.CollectionAction action, + CollectionOperation operation) + throws Exception { Map result = null; if (action == CollectionParams.CollectionAction.COLLECTIONPROP) { - //Fake this action, since we don't want to write to ZooKeeper in this test + // Fake this action, since we don't want to write to ZooKeeper in this test result = new HashMap<>(); result.put(NAME, req.getParams().required().get(NAME)); result.put(PROPERTY_NAME, req.getParams().required().get(PROPERTY_NAME)); @@ -313,5 +409,4 @@ void invokeAction(SolrQueryRequest req, SolrQueryResponse rsp, } } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java b/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java index 2f4070265a0..7e2672dd07b 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java @@ -17,45 +17,46 @@ package org.apache.solr.handler.admin; +import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE; +import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION; +import static org.apache.solr.handler.admin.TestCollectionAPIs.compareOutput; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.ApiBag; import org.apache.solr.common.cloud.ZkNodeProps; import org.apache.solr.handler.ClusterAPI; import org.apache.solr.response.SolrQueryResponse; -import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE; -import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION; -import static org.apache.solr.handler.admin.TestCollectionAPIs.compareOutput; - public class TestConfigsApi extends SolrTestCaseJ4 { - public void testCommands() throws Exception { - try (ConfigSetsHandler handler = new ConfigSetsHandler(null) { + try (ConfigSetsHandler handler = + new ConfigSetsHandler(null) { - @Override - protected void checkErrors() { - } + @Override + protected void checkErrors() {} - @Override - protected void sendToOverseer(SolrQueryResponse rsp, - ConfigSetOperation operation, - Map result) { - result.put(QUEUE_OPERATION, operation.action.toLower()); - rsp.add(ZkNodeProps.class.getName(), new ZkNodeProps(result)); - } - }) { + @Override + protected void sendToOverseer( + SolrQueryResponse rsp, ConfigSetOperation operation, Map result) { + result.put(QUEUE_OPERATION, operation.action.toLower()); + rsp.add(ZkNodeProps.class.getName(), new ZkNodeProps(result)); + } + }) { ApiBag apiBag = new ApiBag(false); ClusterAPI o = new ClusterAPI(null, handler); apiBag.registerObject(o); apiBag.registerObject(o.configSetCommands); -// for (Api api : handler.getApis()) apiBag.register(api, emptyMap()); - compareOutput(apiBag, "/cluster/configs/sample", DELETE, null, null, + // for (Api api : handler.getApis()) apiBag.register(api, emptyMap()); + compareOutput( + apiBag, + "/cluster/configs/sample", + DELETE, + null, + null, "{name :sample, operation:delete}"); } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java b/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java index 3ea289a9370..6624e86a25c 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/TestCoreAdminApis.java @@ -17,21 +17,20 @@ package org.apache.solr.handler.admin; +import static org.apache.solr.common.util.Utils.fromJSONString; +import static org.mockito.Mockito.*; + import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Properties; - import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.client.solrj.SolrRequest; -import org.apache.solr.core.CoreContainer; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; - -import static org.apache.solr.common.util.Utils.fromJSONString; -import static org.mockito.Mockito.*; +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.core.CoreContainer; public class TestCoreAdminApis extends SolrTestCaseJ4 { @@ -40,63 +39,77 @@ public void testCalls() throws Exception { CoreContainer mockCC = getCoreContainerMock(calls, new HashMap<>()); ApiBag apiBag; - try (CoreAdminHandler coreAdminHandler = new CoreAdminHandler(mockCC)) { - apiBag = new ApiBag(false); - for (Api api : coreAdminHandler.getApis()) { - apiBag.register(api, Collections.emptyMap()); - } + try (CoreAdminHandler coreAdminHandler = new CoreAdminHandler(mockCC)) { + apiBag = new ApiBag(false); + for (Api api : coreAdminHandler.getApis()) { + apiBag.register(api, Collections.emptyMap()); + } } - TestCollectionAPIs.makeCall(apiBag, "/cores", SolrRequest.METHOD.POST, - "{create:{name: hello, instanceDir : someDir, schema: 'schema.xml'}}", mockCC); + TestCollectionAPIs.makeCall( + apiBag, + "/cores", + SolrRequest.METHOD.POST, + "{create:{name: hello, instanceDir : someDir, schema: 'schema.xml'}}", + mockCC); Object[] params = calls.get("create"); - assertEquals("hello" ,params[0]); - assertEquals(fromJSONString("{schema : schema.xml}") ,params[2]); + assertEquals("hello", params[0]); + assertEquals(fromJSONString("{schema : schema.xml}"), params[2]); - TestCollectionAPIs.makeCall(apiBag, "/cores/core1", SolrRequest.METHOD.POST, - "{swap:{with: core2}}", mockCC); + TestCollectionAPIs.makeCall( + apiBag, "/cores/core1", SolrRequest.METHOD.POST, "{swap:{with: core2}}", mockCC); params = calls.get("swap"); - assertEquals("core1" ,params[0]); - assertEquals("core2" ,params[1]); + assertEquals("core1", params[0]); + assertEquals("core2", params[1]); - TestCollectionAPIs.makeCall(apiBag, "/cores/core1", SolrRequest.METHOD.POST, - "{rename:{to: core2}}", mockCC); + TestCollectionAPIs.makeCall( + apiBag, "/cores/core1", SolrRequest.METHOD.POST, "{rename:{to: core2}}", mockCC); params = calls.get("rename"); - assertEquals("core1" ,params[0]); - assertEquals("core2" ,params[1]); + assertEquals("core1", params[0]); + assertEquals("core2", params[1]); - TestCollectionAPIs.makeCall(apiBag, "/cores/core1", SolrRequest.METHOD.POST, - "{unload:{deleteIndex : true}}", mockCC); + TestCollectionAPIs.makeCall( + apiBag, "/cores/core1", SolrRequest.METHOD.POST, "{unload:{deleteIndex : true}}", mockCC); params = calls.get("unload"); - assertEquals("core1" ,params[0]); - assertEquals(Boolean.TRUE ,params[1]); + assertEquals("core1", params[0]); + assertEquals(Boolean.TRUE, params[1]); } @SuppressWarnings({"unchecked"}) - public static CoreContainer getCoreContainerMock(final Map in,Map out ) { + public static CoreContainer getCoreContainerMock( + final Map in, Map out) { assumeWorkingMockito(); - - CoreContainer mockCC = mock(CoreContainer.class); - when(mockCC.create(any(String.class), any(Path.class) , any(Map.class), anyBoolean())).thenAnswer(invocationOnMock -> { - in.put("create", invocationOnMock.getArguments()); - return null; - }); - - doAnswer(invocationOnMock -> { - in.put("swap", invocationOnMock.getArguments()); - return null; - }).when(mockCC).swap(any(String.class), any(String.class)); - - doAnswer(invocationOnMock -> { - in.put("rename", invocationOnMock.getArguments()); - return null; - }).when(mockCC).rename(any(String.class), any(String.class)); - - doAnswer(invocationOnMock -> { - in.put("unload", invocationOnMock.getArguments()); - return null; - }).when(mockCC).unload(any(String.class), anyBoolean(), - anyBoolean(), anyBoolean()); + CoreContainer mockCC = mock(CoreContainer.class); + when(mockCC.create(any(String.class), any(Path.class), any(Map.class), anyBoolean())) + .thenAnswer( + invocationOnMock -> { + in.put("create", invocationOnMock.getArguments()); + return null; + }); + + doAnswer( + invocationOnMock -> { + in.put("swap", invocationOnMock.getArguments()); + return null; + }) + .when(mockCC) + .swap(any(String.class), any(String.class)); + + doAnswer( + invocationOnMock -> { + in.put("rename", invocationOnMock.getArguments()); + return null; + }) + .when(mockCC) + .rename(any(String.class), any(String.class)); + + doAnswer( + invocationOnMock -> { + in.put("unload", invocationOnMock.getArguments()); + return null; + }) + .when(mockCC) + .unload(any(String.class), anyBoolean(), anyBoolean(), anyBoolean()); when(mockCC.getCoreRootDirectory()).thenReturn(Paths.get("coreroot")); when(mockCC.getContainerProperties()).thenReturn(new Properties()); @@ -104,6 +117,4 @@ public static CoreContainer getCoreContainerMock(final Map in, when(mockCC.getRequestHandlers()).thenAnswer(invocationOnMock -> out.get("getRequestHandlers")); return mockCC; } - - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/ThreadDumpHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/ThreadDumpHandlerTest.java index 2591cf53119..1b06e422f01 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/ThreadDumpHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/ThreadDumpHandlerTest.java @@ -25,7 +25,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Consumer; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrRequest; @@ -38,15 +37,14 @@ import org.slf4j.LoggerFactory; /** - * This test is currently flawed because it only ensures the 'test-*' threads don't exit before the asserts, - * it doesn't adequately ensure they 'start' before the asserts. - * Fixing the ownership should be possible using latches, but fixing the '*-blocked' threads may not be possible - * w/o polling + * This test is currently flawed because it only ensures the 'test-*' threads don't exit before the + * asserts, it doesn't adequately ensure they 'start' before the asserts. Fixing the ownership + * should be possible using latches, but fixing the '*-blocked' threads may not be possible w/o + * polling */ public class ThreadDumpHandlerTest extends SolrTestCaseJ4 { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); @@ -61,55 +59,71 @@ public void testMonitor() throws Exception { public void testMonitorBlocked() throws Exception { doTestMonitor(true); } - + public void doTestMonitor(final boolean checkBlockedThreadViaPolling) throws Exception { - assumeTrue("monitor checking not supported on this JVM", - ManagementFactory.getThreadMXBean().isObjectMonitorUsageSupported()); - + assumeTrue( + "monitor checking not supported on this JVM", + ManagementFactory.getThreadMXBean().isObjectMonitorUsageSupported()); + /** unique class name to show up as a lock class name in output */ - final class TestMonitorStruct { /* empty */ } - + final class TestMonitorStruct { + /* empty */ + } + final List failures = new ArrayList<>(); final CountDownLatch lockIsHeldLatch = new CountDownLatch(1); final CountDownLatch doneWithTestLatch = new CountDownLatch(1); final Object monitor = new TestMonitorStruct(); - final Thread ownerT = new Thread(() -> { - synchronized (monitor) { - lockIsHeldLatch.countDown(); - log.info("monitor ownerT waiting for doneWithTestLatch to release me..."); - try { - if ( ! doneWithTestLatch.await(30, TimeUnit.SECONDS ) ){ - failures.add("ownerT: never saw doneWithTestLatch released"); - } - } catch (InterruptedException ie) { - failures.add("ownerT: " + ie.toString()); - } - } - }, "test-thread-monitor-owner"); + final Thread ownerT = + new Thread( + () -> { + synchronized (monitor) { + lockIsHeldLatch.countDown(); + log.info("monitor ownerT waiting for doneWithTestLatch to release me..."); + try { + if (!doneWithTestLatch.await(30, TimeUnit.SECONDS)) { + failures.add("ownerT: never saw doneWithTestLatch released"); + } + } catch (InterruptedException ie) { + failures.add("ownerT: " + ie.toString()); + } + } + }, + "test-thread-monitor-owner"); // only used if checkBlockedThreadViaPolling - // don't start until after lockIsHeldLatch fires - final Thread blockedT = new Thread(() -> { - log.info("blockedT waiting for monitor..."); - synchronized (monitor) { - log.info("monitor now unblocked"); - } - }, "test-thread-monitor-blocked"); - + // don't start until after lockIsHeldLatch fires + final Thread blockedT = + new Thread( + () -> { + log.info("blockedT waiting for monitor..."); + synchronized (monitor) { + log.info("monitor now unblocked"); + } + }, + "test-thread-monitor-blocked"); + try { ownerT.start(); - if ( ! lockIsHeldLatch.await(30, TimeUnit.SECONDS ) ){ + if (!lockIsHeldLatch.await(30, TimeUnit.SECONDS)) { failures.add("never saw lockIsHeldLatch released"); return; } - request("/admin/info/threads", rsp -> { - // monitor owner 'ownerT' - // (which *MAY* also be waiting on doneWithTestLatch, but may not have reached that line yet) - NamedList monitorOwnerThreadInfo = getThreadInfo(rsp,"test-thread-monitor-owner"); - assert monitorOwnerThreadInfo != null; - assertTrue("Thread monitor ownerT: ", monitorOwnerThreadInfo._getStr("monitors-locked", "").contains("TestMonitorStruct")); - }); + request( + "/admin/info/threads", + rsp -> { + // monitor owner 'ownerT' + // (which *MAY* also be waiting on doneWithTestLatch, but may not have reached that line + // yet) + NamedList monitorOwnerThreadInfo = getThreadInfo(rsp, "test-thread-monitor-owner"); + assert monitorOwnerThreadInfo != null; + assertTrue( + "Thread monitor ownerT: ", + monitorOwnerThreadInfo + ._getStr("monitors-locked", "") + .contains("TestMonitorStruct")); + }); if (checkBlockedThreadViaPolling) { log.info("Also checking with blockedT thread setup via polling..."); @@ -119,25 +133,36 @@ final class TestMonitorStruct { /* empty */ } log.warn("Couldn't set blockedT priority", e); } blockedT.start(); - // there is no way to "await" on the situation of the 'blockedT' thread actually reaching the - // "synchronized" block and becoming BLOCKED ... we just have to Poll for it... - for (int i = 0; i < 500 && (! Thread.State.BLOCKED.equals(blockedT.getState())); i++) { + // there is no way to "await" on the situation of the 'blockedT' thread actually reaching + // the "synchronized" block and becoming BLOCKED ... we just have to Poll for it... + for (int i = 0; i < 500 && (!Thread.State.BLOCKED.equals(blockedT.getState())); i++) { Thread.sleep(10); // 10ms at a time, at most 5 sec total } if (Thread.State.BLOCKED.equals(blockedT.getState())) { - request("/admin/info/threads", rsp -> { - // same monitor owner 'ownerT' - final NamedList monitorOwnerThreadInfo = getThreadInfo(rsp, "test-thread-monitor-owner"); - assert monitorOwnerThreadInfo != null; - assertTrue("Same thread ownerT: ", monitorOwnerThreadInfo._getStr("monitors-locked", "").contains("TestMonitorStruct")); + request( + "/admin/info/threads", + rsp -> { + // same monitor owner 'ownerT' + final NamedList monitorOwnerThreadInfo = + getThreadInfo(rsp, "test-thread-monitor-owner"); + assert monitorOwnerThreadInfo != null; + assertTrue( + "Same thread ownerT: ", + monitorOwnerThreadInfo + ._getStr("monitors-locked", "") + .contains("TestMonitorStruct")); - // blocked thread 'blockedT', waiting on the monitor - final NamedList blockedThreadInfo = getThreadInfo(rsp, "test-thread-monitor-blocked"); - assert blockedThreadInfo != null; - assertTrue("blocked thread blockedT waiting on the monitor: ", + // blocked thread 'blockedT', waiting on the monitor + final NamedList blockedThreadInfo = + getThreadInfo(rsp, "test-thread-monitor-blocked"); + assert blockedThreadInfo != null; + assertTrue( + "blocked thread blockedT waiting on the monitor: ", blockedThreadInfo._getStr("state", "").contains("BLOCKED") - && blockedThreadInfo._getStr("lock-waiting", "").contains("test-thread-monitor-owner")); - }); + && blockedThreadInfo + ._getStr("lock-waiting", "") + .contains("test-thread-monitor-owner")); + }); } } } finally { @@ -150,70 +175,84 @@ final class TestMonitorStruct { /* empty */ } } } - public void testOwnableSync() throws Exception { doTestOwnableSync(false); } - + /* checking for the WAITING thread requires some polling, so only do it nightly */ @Nightly public void testOwnableSyncWaiting() throws Exception { doTestOwnableSync(true); } - + public void doTestOwnableSync(final boolean checkWaitingThreadViaPolling) throws Exception { - assumeTrue("ownable sync checking not supported on this JVM", - ManagementFactory.getThreadMXBean().isSynchronizerUsageSupported()); + assumeTrue( + "ownable sync checking not supported on this JVM", + ManagementFactory.getThreadMXBean().isSynchronizerUsageSupported()); /** unique class name to show up as a lock class name in output */ - final class TestReentrantLockStruct extends ReentrantLock { /* empty */ } - + final class TestReentrantLockStruct extends ReentrantLock { + /* empty */ + } + final List failures = new ArrayList<>(); final CountDownLatch lockIsHeldLatch = new CountDownLatch(1); final CountDownLatch doneWithTestLatch = new CountDownLatch(1); final ReentrantLock lock = new ReentrantLock(); - final Thread ownerT = new Thread(() -> { - lock.lock(); - try { - lockIsHeldLatch.countDown(); - log.info("lock ownerT waiting for doneWithTestLatch to release me..."); - try { - if ( ! doneWithTestLatch.await(5, TimeUnit.SECONDS ) ){ - failures.add("ownerT: never saw doneWithTestLatch release"); - } - } catch (InterruptedException ie) { - failures.add("ownerT: " + ie.toString()); - } - } finally { - lock.unlock(); - } - }, "test-thread-sync-lock-owner"); + final Thread ownerT = + new Thread( + () -> { + lock.lock(); + try { + lockIsHeldLatch.countDown(); + log.info("lock ownerT waiting for doneWithTestLatch to release me..."); + try { + if (!doneWithTestLatch.await(5, TimeUnit.SECONDS)) { + failures.add("ownerT: never saw doneWithTestLatch release"); + } + } catch (InterruptedException ie) { + failures.add("ownerT: " + ie.toString()); + } + } finally { + lock.unlock(); + } + }, + "test-thread-sync-lock-owner"); // only used if checkWaitingThreadViaPolling // don't start until after lockIsHeldLatch fires - final Thread blockedT = new Thread(() -> { - log.info("blockedT waiting for lock..."); - lock.lock(); - try { - log.info("lock now unblocked"); - } finally { - lock.unlock(); - } - }, "test-thread-sync-lock-blocked"); + final Thread blockedT = + new Thread( + () -> { + log.info("blockedT waiting for lock..."); + lock.lock(); + try { + log.info("lock now unblocked"); + } finally { + lock.unlock(); + } + }, + "test-thread-sync-lock-blocked"); try { ownerT.start(); - if ( ! lockIsHeldLatch.await(30, TimeUnit.SECONDS ) ){ + if (!lockIsHeldLatch.await(30, TimeUnit.SECONDS)) { failures.add("never saw lockIsHeldLatch released"); return; } - request("/admin/info/threads", rsp -> { - // lock owner 'ownerT' - // (which *MAY* also be waiting on doneWithTestLatch, but may not have reached that line yet) - final NamedList lockOwnerThreadInfo = getThreadInfo(rsp,"test-thread-sync-lock-owner"); - assert lockOwnerThreadInfo != null; - assertTrue("Thread lock:", lockOwnerThreadInfo._getStr("synchronizers-locked", "").contains("ReentrantLock")); - }); + request( + "/admin/info/threads", + rsp -> { + // lock owner 'ownerT' + // (which *MAY* also be waiting on doneWithTestLatch, but may not have reached that line + // yet) + final NamedList lockOwnerThreadInfo = + getThreadInfo(rsp, "test-thread-sync-lock-owner"); + assert lockOwnerThreadInfo != null; + assertTrue( + "Thread lock:", + lockOwnerThreadInfo._getStr("synchronizers-locked", "").contains("ReentrantLock")); + }); if (checkWaitingThreadViaPolling) { log.info("Also checking with blockedT thread setup via polling..."); @@ -223,24 +262,36 @@ final class TestReentrantLockStruct extends ReentrantLock { /* empty */ } log.warn("Couldn't set blockedT priority", e); } blockedT.start(); - // there is no way to "await" on the situation of the 'blockedT' thread actually reaches the lock() - // call and WAITING in the queue ... we just have to Poll for it... - for (int i = 0; i < 500 && (! lock.hasQueuedThread(blockedT)); i++) { + // there is no way to "await" on the situation of the 'blockedT' thread actually reaches the + // lock() call and WAITING in the queue ... we just have to Poll for it... + for (int i = 0; i < 500 && (!lock.hasQueuedThread(blockedT)); i++) { Thread.sleep(10); // 10ms at a time, at most 5 sec total } if (lock.hasQueuedThread(blockedT)) { - request("/admin/info/threads", rsp -> { - // lock owner 'ownerT' - final NamedList lockOwnerThreadInfo = getThreadInfo(rsp, "test-thread-sync-lock-owner"); - assert lockOwnerThreadInfo != null; - assertTrue("Thread locked: ", lockOwnerThreadInfo._getStr("synchronizers-locked", "").contains("ReentrantLock")); + request( + "/admin/info/threads", + rsp -> { + // lock owner 'ownerT' + final NamedList lockOwnerThreadInfo = + getThreadInfo(rsp, "test-thread-sync-lock-owner"); + assert lockOwnerThreadInfo != null; + assertTrue( + "Thread locked: ", + lockOwnerThreadInfo + ._getStr("synchronizers-locked", "") + .contains("ReentrantLock")); - // blocked thread 'blockedT', waiting on the lock - final NamedList blockedThreadInfo = getThreadInfo(rsp, "test-thread-sync-lock-blocked"); - assert blockedThreadInfo != null; - assertTrue("Waiting on the lock: ", blockedThreadInfo._getStr("state", "").contains("WAITING") - && blockedThreadInfo._getStr("lock-waiting", "").contains("test-thread-sync-lock-owner")); - }); + // blocked thread 'blockedT', waiting on the lock + final NamedList blockedThreadInfo = + getThreadInfo(rsp, "test-thread-sync-lock-blocked"); + assert blockedThreadInfo != null; + assertTrue( + "Waiting on the lock: ", + blockedThreadInfo._getStr("state", "").contains("WAITING") + && blockedThreadInfo + ._getStr("lock-waiting", "") + .contains("test-thread-sync-lock-owner")); + }); } } } finally { @@ -257,18 +308,19 @@ private void request(String path, Consumer> consumer) throws Except SolrClient client = new EmbeddedSolrServer(h.getCore()); ModifiableSolrParams mparams = new ModifiableSolrParams(); mparams.set("indent", true); - NamedList rsp = client.request(new GenericSolrRequest(SolrRequest.METHOD.GET, path, mparams)); + NamedList rsp = + client.request(new GenericSolrRequest(SolrRequest.METHOD.GET, path, mparams)); consumer.accept(rsp); } private NamedList getThreadInfo(NamedList rsp, String threadName) { - for (Map.Entry threadInfoEntry : (NamedList) rsp._get("system/threadDump", null)) { + for (Map.Entry threadInfoEntry : + (NamedList) rsp._get("system/threadDump", null)) { NamedList thread = (NamedList) threadInfoEntry.getValue(); - if (thread._getStr("name", "").contains(threadName)) { + if (thread._getStr("name", "").contains(threadName)) { return thread; } } return null; } - } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionBackupsAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionBackupsAPIMappingTest.java index f05b9bd6e4a..6ad3ccad53f 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionBackupsAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionBackupsAPIMappingTest.java @@ -16,7 +16,17 @@ */ package org.apache.solr.handler.admin; +import static org.apache.solr.common.params.CommonParams.ACTION; +import static org.apache.solr.common.params.CommonParams.NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + import com.google.common.collect.Maps; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; @@ -35,17 +45,6 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.apache.solr.common.params.CommonParams.NAME; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - public class V2CollectionBackupsAPIMappingTest extends SolrTestCaseJ4 { private ApiBag apiBag; @@ -69,17 +68,20 @@ public void setupApiBag() throws Exception { @Test public void testDeleteBackupsAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/backups", "POST", - "{'delete-backups': {" + - "'name': 'backupName', " + - "'collection': 'collectionName', " + - "'location': '/some/location/uri', " + - "'repository': 'someRepository', " + - "'backupId': 123, " + - "'maxNumBackupPoints': 456, " + - "'purgeUnused': true, " + - "'async': 'requestTrackingId'" + - "}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/backups", + "POST", + "{'delete-backups': {" + + "'name': 'backupName', " + + "'collection': 'collectionName', " + + "'location': '/some/location/uri', " + + "'repository': 'someRepository', " + + "'backupId': 123, " + + "'maxNumBackupPoints': 456, " + + "'purgeUnused': true, " + + "'async': 'requestTrackingId'" + + "}}"); assertEquals(CollectionParams.CollectionAction.DELETEBACKUP.lowerName, v1Params.get(ACTION)); assertEquals("backupName", v1Params.get(NAME)); @@ -93,12 +95,15 @@ public void testDeleteBackupsAllParams() throws Exception { @Test public void testListBackupsAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/backups", "POST", - "{'list-backups': {" + - "'name': 'backupName', " + - "'location': '/some/location/uri', " + - "'repository': 'someRepository' " + - "}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/backups", + "POST", + "{'list-backups': {" + + "'name': 'backupName', " + + "'location': '/some/location/uri', " + + "'repository': 'someRepository' " + + "}}"); assertEquals(CollectionParams.CollectionAction.LISTBACKUP.lowerName, v1Params.get(ACTION)); assertEquals("backupName", v1Params.get(NAME)); @@ -106,28 +111,30 @@ public void testListBackupsAllParams() throws Exception { assertEquals("someRepository", v1Params.get(CoreAdminParams.BACKUP_REPOSITORY)); } - private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) throws Exception { + private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) + throws Exception { final HashMap parts = new HashMap<>(); final Api api = apiBag.lookup(path, method, parts); final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, Maps.newHashMap()) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } - }; - + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, Maps.newHashMap()) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } + }; api.call(req, rsp); verify(mockCollectionsHandler).handleRequestBody(queryRequestCaptor.capture(), any()); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionsAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionsAPIMappingTest.java index a2b0c85530e..f2eff0b8629 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionsAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/V2CollectionsAPIMappingTest.java @@ -16,7 +16,17 @@ */ package org.apache.solr.handler.admin; +import static org.apache.solr.common.params.CommonParams.ACTION; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + import com.google.common.collect.Maps; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; @@ -44,257 +54,283 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - /** * Unit tests for the API mappings found in {@link org.apache.solr.handler.CollectionsAPI}. * - * This test bears many similarities to {@link TestCollectionAPIs} which appears to test the mappings indirectly by - * checking message sent to the ZK overseer (which is similar, but not identical to the v1 param list). If there's no - * particular benefit to testing the mappings in this way (there very well may be), then we should combine these two - * test classes at some point in the future using the simpler approach here. + *

This test bears many similarities to {@link TestCollectionAPIs} which appears to test the + * mappings indirectly by checking message sent to the ZK overseer (which is similar, but not + * identical to the v1 param list). If there's no particular benefit to testing the mappings in this + * way (there very well may be), then we should combine these two test classes at some point in the + * future using the simpler approach here. * - * Note that the V2 requests made by these tests are not necessarily semantically valid. They shouldn't be taken as - * examples. In several instances, mutually exclusive JSON parameters are provided. This is done to exercise conversion - * of all parameters, even if particular combinations are never expected in the same request. + *

Note that the V2 requests made by these tests are not necessarily semantically valid. They + * shouldn't be taken as examples. In several instances, mutually exclusive JSON parameters are + * provided. This is done to exercise conversion of all parameters, even if particular combinations + * are never expected in the same request. */ public class V2CollectionsAPIMappingTest extends SolrTestCaseJ4 { - private ApiBag apiBag; + private ApiBag apiBag; - private ArgumentCaptor queryRequestCaptor; - private CollectionsHandler mockCollectionsHandler; + private ArgumentCaptor queryRequestCaptor; + private CollectionsHandler mockCollectionsHandler; - @BeforeClass - public static void ensureWorkingMockito() { - assumeWorkingMockito(); - } + @BeforeClass + public static void ensureWorkingMockito() { + assumeWorkingMockito(); + } - @Before - public void setupApiBag() throws Exception { - mockCollectionsHandler = mock(CollectionsHandler.class); - queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); + @Before + public void setupApiBag() throws Exception { + mockCollectionsHandler = mock(CollectionsHandler.class); + queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); - apiBag = new ApiBag(false); - final CollectionsAPI collectionsAPI = new CollectionsAPI(mockCollectionsHandler); - apiBag.registerObject(collectionsAPI); - apiBag.registerObject(collectionsAPI.collectionsCommands); - } + apiBag = new ApiBag(false); + final CollectionsAPI collectionsAPI = new CollectionsAPI(mockCollectionsHandler); + apiBag.registerObject(collectionsAPI); + apiBag.registerObject(collectionsAPI.collectionsCommands); + } - @Test - public void testCreateCollectionAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections", "POST", - "{'create': {" + - "'name': 'techproducts', " + - "'config':'_default', " + - "'router': {'name': 'composite', 'field': 'routeField', 'foo': 'bar'}, " + - "'shards': 'customShardName,anotherCustomShardName', " + - "'replicationFactor': 3," + - "'nrtReplicas': 1, " + - "'tlogReplicas': 1, " + - "'pullReplicas': 1, " + - "'nodeSet': ['localhost:8983_solr', 'localhost:7574_solr']," + - "'shuffleNodes': true," + - "'properties': {'foo': 'bar', 'foo2': 'bar2'}, " + - "'async': 'requestTrackingId', " + - "'waitForFinalState': false, " + - "'perReplicaState': false," + - "'numShards': 1}}"); + @Test + public void testCreateCollectionAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections", + "POST", + "{'create': {" + + "'name': 'techproducts', " + + "'config':'_default', " + + "'router': {'name': 'composite', 'field': 'routeField', 'foo': 'bar'}, " + + "'shards': 'customShardName,anotherCustomShardName', " + + "'replicationFactor': 3," + + "'nrtReplicas': 1, " + + "'tlogReplicas': 1, " + + "'pullReplicas': 1, " + + "'nodeSet': ['localhost:8983_solr', 'localhost:7574_solr']," + + "'shuffleNodes': true," + + "'properties': {'foo': 'bar', 'foo2': 'bar2'}, " + + "'async': 'requestTrackingId', " + + "'waitForFinalState': false, " + + "'perReplicaState': false," + + "'numShards': 1}}"); - assertEquals(CollectionParams.CollectionAction.CREATE.lowerName, v1Params.get(ACTION)); - assertEquals("techproducts", v1Params.get(CommonParams.NAME)); - assertEquals("_default", v1Params.get(CollectionAdminParams.COLL_CONF)); - assertEquals("composite", v1Params.get("router.name")); - assertEquals("routeField", v1Params.get("router.field")); - assertEquals("bar", v1Params.get("router.foo")); - assertEquals("customShardName,anotherCustomShardName", v1Params.get(ShardParams.SHARDS)); - assertEquals(3, v1Params.getPrimitiveInt(ZkStateReader.REPLICATION_FACTOR)); - assertEquals(1, v1Params.getPrimitiveInt(ZkStateReader.NRT_REPLICAS)); - assertEquals(1, v1Params.getPrimitiveInt(ZkStateReader.TLOG_REPLICAS)); - assertEquals(1, v1Params.getPrimitiveInt(ZkStateReader.PULL_REPLICAS)); - assertEquals("localhost:8983_solr,localhost:7574_solr", v1Params.get(CollectionAdminParams.CREATE_NODE_SET_PARAM)); - assertEquals(true, v1Params.getPrimitiveBool(CollectionAdminParams.CREATE_NODE_SET_SHUFFLE_PARAM)); - assertEquals("bar", v1Params.get("property.foo")); - assertEquals("bar2", v1Params.get("property.foo2")); - assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); - assertEquals(false, v1Params.getPrimitiveBool(CommonAdminParams.WAIT_FOR_FINAL_STATE)); - assertEquals(false, v1Params.getPrimitiveBool(DocCollection.PER_REPLICA_STATE)); - assertEquals(1, v1Params.getPrimitiveInt(CollectionAdminParams.NUM_SHARDS)); - } + assertEquals(CollectionParams.CollectionAction.CREATE.lowerName, v1Params.get(ACTION)); + assertEquals("techproducts", v1Params.get(CommonParams.NAME)); + assertEquals("_default", v1Params.get(CollectionAdminParams.COLL_CONF)); + assertEquals("composite", v1Params.get("router.name")); + assertEquals("routeField", v1Params.get("router.field")); + assertEquals("bar", v1Params.get("router.foo")); + assertEquals("customShardName,anotherCustomShardName", v1Params.get(ShardParams.SHARDS)); + assertEquals(3, v1Params.getPrimitiveInt(ZkStateReader.REPLICATION_FACTOR)); + assertEquals(1, v1Params.getPrimitiveInt(ZkStateReader.NRT_REPLICAS)); + assertEquals(1, v1Params.getPrimitiveInt(ZkStateReader.TLOG_REPLICAS)); + assertEquals(1, v1Params.getPrimitiveInt(ZkStateReader.PULL_REPLICAS)); + assertEquals( + "localhost:8983_solr,localhost:7574_solr", + v1Params.get(CollectionAdminParams.CREATE_NODE_SET_PARAM)); + assertEquals( + true, v1Params.getPrimitiveBool(CollectionAdminParams.CREATE_NODE_SET_SHUFFLE_PARAM)); + assertEquals("bar", v1Params.get("property.foo")); + assertEquals("bar2", v1Params.get("property.foo2")); + assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); + assertEquals(false, v1Params.getPrimitiveBool(CommonAdminParams.WAIT_FOR_FINAL_STATE)); + assertEquals(false, v1Params.getPrimitiveBool(DocCollection.PER_REPLICA_STATE)); + assertEquals(1, v1Params.getPrimitiveInt(CollectionAdminParams.NUM_SHARDS)); + } - @Test - public void testListCollectionsAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections", "GET", null); + @Test + public void testListCollectionsAllProperties() throws Exception { + final SolrParams v1Params = captureConvertedV1Params("/collections", "GET", null); - assertEquals(CollectionParams.CollectionAction.LIST.lowerName, v1Params.get(ACTION)); - } + assertEquals(CollectionParams.CollectionAction.LIST.lowerName, v1Params.get(ACTION)); + } - @Test - public void testCreateAliasAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections", "POST", - "{'create-alias': {" + - "'name': 'aliasName', " + - "'collections': ['techproducts1', 'techproducts2'], " + - "'tz': 'someTimeZone', " + - "'async': 'requestTrackingId', " + - "'router': {" + - " 'name': 'time', " + - " 'field': 'date_dt', " + - " 'interval': '+1HOUR', " + - " 'maxFutureMs': 3600, " + - " 'preemptiveCreateMath': 'somePreemptiveCreateMathString', " + - " 'autoDeleteAge': 'someAutoDeleteAgeExpression', " + - " 'maxCardinality': 36, " + - " 'mustMatch': 'someRegex', " + - "}, " + - "'create-collection': {" + - " 'numShards': 1, " + - " 'properties': {'foo': 'bar', 'foo2': 'bar2'}, " + - " 'replicationFactor': 3 " + - "}" + - "}}"); + @Test + public void testCreateAliasAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections", + "POST", + "{'create-alias': {" + + "'name': 'aliasName', " + + "'collections': ['techproducts1', 'techproducts2'], " + + "'tz': 'someTimeZone', " + + "'async': 'requestTrackingId', " + + "'router': {" + + " 'name': 'time', " + + " 'field': 'date_dt', " + + " 'interval': '+1HOUR', " + + " 'maxFutureMs': 3600, " + + " 'preemptiveCreateMath': 'somePreemptiveCreateMathString', " + + " 'autoDeleteAge': 'someAutoDeleteAgeExpression', " + + " 'maxCardinality': 36, " + + " 'mustMatch': 'someRegex', " + + "}, " + + "'create-collection': {" + + " 'numShards': 1, " + + " 'properties': {'foo': 'bar', 'foo2': 'bar2'}, " + + " 'replicationFactor': 3 " + + "}" + + "}}"); - assertEquals(CollectionParams.CollectionAction.CREATEALIAS.lowerName, v1Params.get(ACTION)); - assertEquals("aliasName", v1Params.get(CommonParams.NAME)); - assertEquals("techproducts1,techproducts2", v1Params.get("collections")); - assertEquals("someTimeZone", v1Params.get(CommonParams.TZ.toLowerCase(Locale.ROOT))); - assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); - assertEquals("time", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_TYPE_NAME)); - assertEquals("date_dt", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_FIELD)); - assertEquals("+1HOUR", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_INTERVAL)); - assertEquals(3600, v1Params.getPrimitiveInt(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_MAX_FUTURE)); - assertEquals("somePreemptiveCreateMathString", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_PREEMPTIVE_CREATE_WINDOW)); - assertEquals("someAutoDeleteAgeExpression", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_AUTO_DELETE_AGE)); - assertEquals(36, v1Params.getPrimitiveInt(CategoryRoutedAlias.ROUTER_MAX_CARDINALITY)); - assertEquals("someRegex", v1Params.get(CategoryRoutedAlias.ROUTER_MUST_MATCH)); - assertEquals(1, v1Params.getPrimitiveInt(RoutedAlias.CREATE_COLLECTION_PREFIX + CollectionAdminParams.NUM_SHARDS)); - assertEquals("bar", v1Params.get(RoutedAlias.CREATE_COLLECTION_PREFIX + "property.foo")); - assertEquals("bar2", v1Params.get(RoutedAlias.CREATE_COLLECTION_PREFIX + "property.foo2")); - assertEquals(3, v1Params.getPrimitiveInt(RoutedAlias.CREATE_COLLECTION_PREFIX + ZkStateReader.REPLICATION_FACTOR)); - } + assertEquals(CollectionParams.CollectionAction.CREATEALIAS.lowerName, v1Params.get(ACTION)); + assertEquals("aliasName", v1Params.get(CommonParams.NAME)); + assertEquals("techproducts1,techproducts2", v1Params.get("collections")); + assertEquals("someTimeZone", v1Params.get(CommonParams.TZ.toLowerCase(Locale.ROOT))); + assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); + assertEquals( + "time", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_TYPE_NAME)); + assertEquals( + "date_dt", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_FIELD)); + assertEquals( + "+1HOUR", v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_INTERVAL)); + assertEquals( + 3600, + v1Params.getPrimitiveInt(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_MAX_FUTURE)); + assertEquals( + "somePreemptiveCreateMathString", + v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_PREEMPTIVE_CREATE_WINDOW)); + assertEquals( + "someAutoDeleteAgeExpression", + v1Params.get(CollectionAdminRequest.CreateTimeRoutedAlias.ROUTER_AUTO_DELETE_AGE)); + assertEquals(36, v1Params.getPrimitiveInt(CategoryRoutedAlias.ROUTER_MAX_CARDINALITY)); + assertEquals("someRegex", v1Params.get(CategoryRoutedAlias.ROUTER_MUST_MATCH)); + assertEquals( + 1, + v1Params.getPrimitiveInt( + RoutedAlias.CREATE_COLLECTION_PREFIX + CollectionAdminParams.NUM_SHARDS)); + assertEquals("bar", v1Params.get(RoutedAlias.CREATE_COLLECTION_PREFIX + "property.foo")); + assertEquals("bar2", v1Params.get(RoutedAlias.CREATE_COLLECTION_PREFIX + "property.foo2")); + assertEquals( + 3, + v1Params.getPrimitiveInt( + RoutedAlias.CREATE_COLLECTION_PREFIX + ZkStateReader.REPLICATION_FACTOR)); + } - @Test - public void testDeleteAliasAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections", "POST", - "{'delete-alias': {" + - "'name': 'aliasName', " + - "'async': 'requestTrackingId'" + - "}}"); + @Test + public void testDeleteAliasAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections", + "POST", + "{'delete-alias': {" + "'name': 'aliasName', " + "'async': 'requestTrackingId'" + "}}"); - assertEquals(CollectionParams.CollectionAction.DELETEALIAS.lowerName, v1Params.get(ACTION)); - assertEquals("aliasName", v1Params.get(CommonParams.NAME)); - assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); - } + assertEquals(CollectionParams.CollectionAction.DELETEALIAS.lowerName, v1Params.get(ACTION)); + assertEquals("aliasName", v1Params.get(CommonParams.NAME)); + assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); + } - @Test - public void testSetAliasAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections", "POST", - "{'set-alias-property': {" + - "'name': 'aliasName', " + - "'async': 'requestTrackingId', " + - "'properties': {'foo':'bar', 'foo2':'bar2'}" + - "}}"); + @Test + public void testSetAliasAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections", + "POST", + "{'set-alias-property': {" + + "'name': 'aliasName', " + + "'async': 'requestTrackingId', " + + "'properties': {'foo':'bar', 'foo2':'bar2'}" + + "}}"); - assertEquals(CollectionParams.CollectionAction.ALIASPROP.lowerName, v1Params.get(ACTION)); - assertEquals("aliasName", v1Params.get(CommonParams.NAME)); - assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); - assertEquals("bar", v1Params.get("property.foo")); - assertEquals("bar2", v1Params.get("property.foo2")); - } + assertEquals(CollectionParams.CollectionAction.ALIASPROP.lowerName, v1Params.get(ACTION)); + assertEquals("aliasName", v1Params.get(CommonParams.NAME)); + assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); + assertEquals("bar", v1Params.get("property.foo")); + assertEquals("bar2", v1Params.get("property.foo2")); + } - @Test - public void testBackupAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections", "POST", - "{'backup-collection': {" + - "'name': 'backupName', " + - "'collection': 'collectionName', " + - "'location': '/some/location/uri', " + - "'repository': 'someRepository', " + - "'followAliases': true, " + - "'indexBackup': 'copy-files', " + - "'commitName': 'someSnapshotName', " + - "'incremental': true, " + - "'async': 'requestTrackingId' " + - "}}"); + @Test + public void testBackupAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections", + "POST", + "{'backup-collection': {" + + "'name': 'backupName', " + + "'collection': 'collectionName', " + + "'location': '/some/location/uri', " + + "'repository': 'someRepository', " + + "'followAliases': true, " + + "'indexBackup': 'copy-files', " + + "'commitName': 'someSnapshotName', " + + "'incremental': true, " + + "'async': 'requestTrackingId' " + + "}}"); - assertEquals(CollectionParams.CollectionAction.BACKUP.lowerName, v1Params.get(ACTION)); - assertEquals("backupName", v1Params.get(CommonParams.NAME)); - assertEquals("collectionName", v1Params.get(BackupManager.COLLECTION_NAME_PROP)); - assertEquals("/some/location/uri", v1Params.get(CoreAdminParams.BACKUP_LOCATION)); - assertEquals("someRepository", v1Params.get(CoreAdminParams.BACKUP_REPOSITORY)); - assertEquals(true, v1Params.getPrimitiveBool(CollectionAdminParams.FOLLOW_ALIASES)); - assertEquals("copy-files", v1Params.get(CollectionAdminParams.INDEX_BACKUP_STRATEGY)); - assertEquals("someSnapshotName", v1Params.get(CoreAdminParams.COMMIT_NAME)); - assertEquals(true, v1Params.getPrimitiveBool(CoreAdminParams.BACKUP_INCREMENTAL)); - assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); - } + assertEquals(CollectionParams.CollectionAction.BACKUP.lowerName, v1Params.get(ACTION)); + assertEquals("backupName", v1Params.get(CommonParams.NAME)); + assertEquals("collectionName", v1Params.get(BackupManager.COLLECTION_NAME_PROP)); + assertEquals("/some/location/uri", v1Params.get(CoreAdminParams.BACKUP_LOCATION)); + assertEquals("someRepository", v1Params.get(CoreAdminParams.BACKUP_REPOSITORY)); + assertEquals(true, v1Params.getPrimitiveBool(CollectionAdminParams.FOLLOW_ALIASES)); + assertEquals("copy-files", v1Params.get(CollectionAdminParams.INDEX_BACKUP_STRATEGY)); + assertEquals("someSnapshotName", v1Params.get(CoreAdminParams.COMMIT_NAME)); + assertEquals(true, v1Params.getPrimitiveBool(CoreAdminParams.BACKUP_INCREMENTAL)); + assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); + } - @Test - public void testRestoreAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections", "POST", - "{'restore-collection': {" + - "'name': 'backupName', " + - "'collection': 'collectionName', " + - "'location': '/some/location/uri', " + - "'repository': 'someRepository', " + - "'backupId': 123, " + - "'async': 'requestTrackingId', " + - "'create-collection': {" + - " 'numShards': 1, " + - " 'properties': {'foo': 'bar', 'foo2': 'bar2'}, " + - " 'replicationFactor': 3 " + - "}" + - "}}"); + @Test + public void testRestoreAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections", + "POST", + "{'restore-collection': {" + + "'name': 'backupName', " + + "'collection': 'collectionName', " + + "'location': '/some/location/uri', " + + "'repository': 'someRepository', " + + "'backupId': 123, " + + "'async': 'requestTrackingId', " + + "'create-collection': {" + + " 'numShards': 1, " + + " 'properties': {'foo': 'bar', 'foo2': 'bar2'}, " + + " 'replicationFactor': 3 " + + "}" + + "}}"); - assertEquals(CollectionParams.CollectionAction.RESTORE.lowerName, v1Params.get(ACTION)); - assertEquals("backupName", v1Params.get(CommonParams.NAME)); - assertEquals("collectionName", v1Params.get(BackupManager.COLLECTION_NAME_PROP)); - assertEquals("/some/location/uri", v1Params.get(CoreAdminParams.BACKUP_LOCATION)); - assertEquals("someRepository", v1Params.get(CoreAdminParams.BACKUP_REPOSITORY)); - assertEquals(123, v1Params.getPrimitiveInt(CoreAdminParams.BACKUP_ID)); - assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); - // NOTE: Unlike other v2 APIs that have a nested object for collection-creation params, restore's v1 equivalent - // for these properties doesn't have a "create-collection." prefix. - assertEquals(1, v1Params.getPrimitiveInt(CollectionAdminParams.NUM_SHARDS)); - assertEquals("bar", v1Params.get("property.foo")); - assertEquals("bar2", v1Params.get("property.foo2")); - assertEquals(3, v1Params.getPrimitiveInt(ZkStateReader.REPLICATION_FACTOR)); - } + assertEquals(CollectionParams.CollectionAction.RESTORE.lowerName, v1Params.get(ACTION)); + assertEquals("backupName", v1Params.get(CommonParams.NAME)); + assertEquals("collectionName", v1Params.get(BackupManager.COLLECTION_NAME_PROP)); + assertEquals("/some/location/uri", v1Params.get(CoreAdminParams.BACKUP_LOCATION)); + assertEquals("someRepository", v1Params.get(CoreAdminParams.BACKUP_REPOSITORY)); + assertEquals(123, v1Params.getPrimitiveInt(CoreAdminParams.BACKUP_ID)); + assertEquals("requestTrackingId", v1Params.get(CommonAdminParams.ASYNC)); + // NOTE: Unlike other v2 APIs that have a nested object for collection-creation params, + // restore's v1 equivalent for these properties doesn't have a "create-collection." prefix. + assertEquals(1, v1Params.getPrimitiveInt(CollectionAdminParams.NUM_SHARDS)); + assertEquals("bar", v1Params.get("property.foo")); + assertEquals("bar2", v1Params.get("property.foo2")); + assertEquals(3, v1Params.getPrimitiveInt(ZkStateReader.REPLICATION_FACTOR)); + } - private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) throws Exception { - final HashMap parts = new HashMap<>(); - final Api api = apiBag.lookup(path, method, parts); - final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, Maps.newHashMap()) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } + private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) + throws Exception { + final HashMap parts = new HashMap<>(); + final Api api = apiBag.lookup(path, method, parts); + final SolrQueryResponse rsp = new SolrQueryResponse(); + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, Maps.newHashMap()) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } - @Override - public Map getPathTemplateValues() { - return parts; - } + @Override + public Map getPathTemplateValues() { + return parts; + } - @Override - public String getHttpMethod() { - return method; - } + @Override + public String getHttpMethod() { + return method; + } }; - - api.call(req, rsp); - verify(mockCollectionsHandler).handleRequestBody(queryRequestCaptor.capture(), any()); - return queryRequestCaptor.getValue().getParams(); - } + api.call(req, rsp); + verify(mockCollectionsHandler).handleRequestBody(queryRequestCaptor.capture(), any()); + return queryRequestCaptor.getValue().getParams(); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/V2CoresAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/V2CoresAPIMappingTest.java index 6290c1330ab..92a305cb86c 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/V2CoresAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/V2CoresAPIMappingTest.java @@ -17,7 +17,22 @@ package org.apache.solr.handler.admin; +import static org.apache.solr.common.params.CollectionAdminParams.NUM_SHARDS; +import static org.apache.solr.common.params.CommonAdminParams.ASYNC; +import static org.apache.solr.common.params.CommonParams.ACTION; +import static org.apache.solr.common.params.CoreAdminParams.*; +import static org.apache.solr.common.params.CoreAdminParams.CoreAdminAction.CREATE; +import static org.apache.solr.common.params.CoreAdminParams.CoreAdminAction.STATUS; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + import com.google.common.collect.Maps; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; @@ -35,168 +50,160 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static org.apache.solr.common.params.CollectionAdminParams.NUM_SHARDS; -import static org.apache.solr.common.params.CommonAdminParams.ASYNC; -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.apache.solr.common.params.CoreAdminParams.*; -import static org.apache.solr.common.params.CoreAdminParams.CoreAdminAction.CREATE; -import static org.apache.solr.common.params.CoreAdminParams.CoreAdminAction.STATUS; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - /** * Unit tests for the /cores APIs. * - * Note that the V2 requests made by these tests are not necessarily semantically valid. They shouldn't be taken as - * examples. In several instances, mutually exclusive JSON parameters are provided. This is done to exercise conversion - * of all parameters, even if particular combinations are never expected in the same request. + *

Note that the V2 requests made by these tests are not necessarily semantically valid. They + * shouldn't be taken as examples. In several instances, mutually exclusive JSON parameters are + * provided. This is done to exercise conversion of all parameters, even if particular combinations + * are never expected in the same request. */ public class V2CoresAPIMappingTest extends SolrTestCaseJ4 { - private ApiBag apiBag; - private ArgumentCaptor queryRequestCaptor; - private CoreAdminHandler mockCoreAdminHandler; - - @BeforeClass - public static void ensureWorkingMockito() { - assumeWorkingMockito(); - } - - @Before - public void setUpMocks() { - mockCoreAdminHandler = mock(CoreAdminHandler.class); - queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); - - apiBag = new ApiBag(false); - apiBag.registerObject(new CreateCoreAPI(mockCoreAdminHandler)); - apiBag.registerObject(new SingleCoreStatusAPI(mockCoreAdminHandler)); - apiBag.registerObject(new AllCoresStatusAPI(mockCoreAdminHandler)); - } - - @Test - public void testCreateCoreAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores", "POST", - "{'create': {" + - "'name': 'someCoreName', " + - "'instanceDir': 'someInstanceDir', " + - "'dataDir': 'someDataDir', " + - "'ulogDir': 'someUpdateLogDirectory', " + - "'schema': 'some-schema-file-name', " + - "'config': 'some-config-file-name', " + - "'configSet': 'someConfigSetName', " + - "'loadOnStartup': true, " + - "'isTransient': true, " + - "'shard': 'someShardName', " + - "'collection': 'someCollectionName', " + - "'replicaType': 'TLOG', " + - "'coreNodeName': 'someNodeName', " + - "'numShards': 123, " + - "'roles': ['role1', 'role2'], " + - "'properties': {'prop1': 'val1', 'prop2': 'val2'}, " + - "'newCollection': true, " + - "'async': 'requestTrackingId' " + - "}}"); - - assertEquals(CREATE.name().toLowerCase(Locale.ROOT), v1Params.get(ACTION)); - assertEquals("someCoreName", v1Params.get(NAME)); - assertEquals("someInstanceDir", v1Params.get(INSTANCE_DIR)); - assertEquals("someDataDir", v1Params.get(DATA_DIR)); - assertEquals("someUpdateLogDirectory", v1Params.get(ULOG_DIR)); - assertEquals("some-schema-file-name", v1Params.get(SCHEMA)); - assertEquals("some-config-file-name", v1Params.get(CONFIG)); - assertEquals("someConfigSetName", v1Params.get(CONFIGSET)); - assertEquals(true, v1Params.getPrimitiveBool(LOAD_ON_STARTUP)); - assertEquals(true, v1Params.getPrimitiveBool(TRANSIENT)); - assertEquals("someShardName", v1Params.get(SHARD)); - assertEquals("someCollectionName", v1Params.get(COLLECTION)); - assertEquals("TLOG", v1Params.get(REPLICA_TYPE)); - assertEquals("someNodeName", v1Params.get(CORE_NODE_NAME)); - assertEquals(123, v1Params.getPrimitiveInt(NUM_SHARDS)); - assertEquals("role1,role2", v1Params.get(ROLES)); - assertEquals("val1", v1Params.get("property.prop1")); - assertEquals("val2", v1Params.get("property.prop2")); - assertEquals(true, v1Params.getPrimitiveBool(NEW_COLLECTION)); - assertEquals("requestTrackingId", v1Params.get(ASYNC)); - } - - @Test - public void testSpecificCoreStatusApiAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/someCore", "GET", - Map.of(INDEX_INFO, new String[] { "true" })); - - assertEquals(STATUS.name().toLowerCase(Locale.ROOT), v1Params.get(ACTION)); - assertEquals("someCore", v1Params.get(CORE)); - assertEquals(true, v1Params.getPrimitiveBool(INDEX_INFO)); - } - - @Test - public void testAllCoreStatusApiAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores", "GET", - Map.of(INDEX_INFO, new String[] { "true" })); - - assertEquals(STATUS.name().toLowerCase(Locale.ROOT), v1Params.get(ACTION)); - assertNull("Expected 'core' parameter to be null", v1Params.get(CORE)); - assertEquals(true, v1Params.getPrimitiveBool(INDEX_INFO)); - } - - private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) throws Exception { - final HashMap parts = new HashMap<>(); - final Api api = apiBag.lookup(path, method, parts); - final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, Maps.newHashMap()) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } + private ApiBag apiBag; + private ArgumentCaptor queryRequestCaptor; + private CoreAdminHandler mockCoreAdminHandler; + + @BeforeClass + public static void ensureWorkingMockito() { + assumeWorkingMockito(); + } + + @Before + public void setUpMocks() { + mockCoreAdminHandler = mock(CoreAdminHandler.class); + queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); + + apiBag = new ApiBag(false); + apiBag.registerObject(new CreateCoreAPI(mockCoreAdminHandler)); + apiBag.registerObject(new SingleCoreStatusAPI(mockCoreAdminHandler)); + apiBag.registerObject(new AllCoresStatusAPI(mockCoreAdminHandler)); + } + + @Test + public void testCreateCoreAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores", + "POST", + "{'create': {" + + "'name': 'someCoreName', " + + "'instanceDir': 'someInstanceDir', " + + "'dataDir': 'someDataDir', " + + "'ulogDir': 'someUpdateLogDirectory', " + + "'schema': 'some-schema-file-name', " + + "'config': 'some-config-file-name', " + + "'configSet': 'someConfigSetName', " + + "'loadOnStartup': true, " + + "'isTransient': true, " + + "'shard': 'someShardName', " + + "'collection': 'someCollectionName', " + + "'replicaType': 'TLOG', " + + "'coreNodeName': 'someNodeName', " + + "'numShards': 123, " + + "'roles': ['role1', 'role2'], " + + "'properties': {'prop1': 'val1', 'prop2': 'val2'}, " + + "'newCollection': true, " + + "'async': 'requestTrackingId' " + + "}}"); + + assertEquals(CREATE.name().toLowerCase(Locale.ROOT), v1Params.get(ACTION)); + assertEquals("someCoreName", v1Params.get(NAME)); + assertEquals("someInstanceDir", v1Params.get(INSTANCE_DIR)); + assertEquals("someDataDir", v1Params.get(DATA_DIR)); + assertEquals("someUpdateLogDirectory", v1Params.get(ULOG_DIR)); + assertEquals("some-schema-file-name", v1Params.get(SCHEMA)); + assertEquals("some-config-file-name", v1Params.get(CONFIG)); + assertEquals("someConfigSetName", v1Params.get(CONFIGSET)); + assertEquals(true, v1Params.getPrimitiveBool(LOAD_ON_STARTUP)); + assertEquals(true, v1Params.getPrimitiveBool(TRANSIENT)); + assertEquals("someShardName", v1Params.get(SHARD)); + assertEquals("someCollectionName", v1Params.get(COLLECTION)); + assertEquals("TLOG", v1Params.get(REPLICA_TYPE)); + assertEquals("someNodeName", v1Params.get(CORE_NODE_NAME)); + assertEquals(123, v1Params.getPrimitiveInt(NUM_SHARDS)); + assertEquals("role1,role2", v1Params.get(ROLES)); + assertEquals("val1", v1Params.get("property.prop1")); + assertEquals("val2", v1Params.get("property.prop2")); + assertEquals(true, v1Params.getPrimitiveBool(NEW_COLLECTION)); + assertEquals("requestTrackingId", v1Params.get(ASYNC)); + } + + @Test + public void testSpecificCoreStatusApiAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores/someCore", "GET", Map.of(INDEX_INFO, new String[] {"true"})); + + assertEquals(STATUS.name().toLowerCase(Locale.ROOT), v1Params.get(ACTION)); + assertEquals("someCore", v1Params.get(CORE)); + assertEquals(true, v1Params.getPrimitiveBool(INDEX_INFO)); + } + + @Test + public void testAllCoreStatusApiAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params("/cores", "GET", Map.of(INDEX_INFO, new String[] {"true"})); + + assertEquals(STATUS.name().toLowerCase(Locale.ROOT), v1Params.get(ACTION)); + assertNull("Expected 'core' parameter to be null", v1Params.get(CORE)); + assertEquals(true, v1Params.getPrimitiveBool(INDEX_INFO)); + } + + private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) + throws Exception { + final HashMap parts = new HashMap<>(); + final Api api = apiBag.lookup(path, method, parts); + final SolrQueryResponse rsp = new SolrQueryResponse(); + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, Maps.newHashMap()) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } }; - - api.call(req, rsp); - verify(mockCoreAdminHandler).handleRequestBody(queryRequestCaptor.capture(), any()); - return queryRequestCaptor.getValue().getParams(); - } - - private SolrParams captureConvertedV1Params(String path, String method, Map queryParams) throws Exception { - final HashMap parts = new HashMap<>(); - final Api api = apiBag.lookup(path, method, parts); - final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, queryParams) { - @Override - public List getCommands(boolean validateInput) { - return Collections.emptyList(); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } + api.call(req, rsp); + verify(mockCoreAdminHandler).handleRequestBody(queryRequestCaptor.capture(), any()); + return queryRequestCaptor.getValue().getParams(); + } + + private SolrParams captureConvertedV1Params( + String path, String method, Map queryParams) throws Exception { + final HashMap parts = new HashMap<>(); + final Api api = apiBag.lookup(path, method, parts); + final SolrQueryResponse rsp = new SolrQueryResponse(); + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, queryParams) { + @Override + public List getCommands(boolean validateInput) { + return Collections.emptyList(); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } }; - - api.call(req, rsp); - verify(mockCoreAdminHandler).handleRequestBody(queryRequestCaptor.capture(), any()); - return queryRequestCaptor.getValue().getParams(); - } + api.call(req, rsp); + verify(mockCoreAdminHandler).handleRequestBody(queryRequestCaptor.capture(), any()); + return queryRequestCaptor.getValue().getParams(); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperReadAPITest.java b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperReadAPITest.java index 3376d34cb2f..bee96cb6d05 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperReadAPITest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperReadAPITest.java @@ -17,10 +17,12 @@ package org.apache.solr.handler.admin; +import static org.apache.solr.common.util.StrUtils.split; +import static org.apache.solr.common.util.Utils.getObjectByPath; + import java.lang.invoke.MethodHandles; import java.net.URL; import java.util.Map; - import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.util.Utils; @@ -32,17 +34,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.util.StrUtils.split; -import static org.apache.solr.common.util.Utils.getObjectByPath; - public class ZookeeperReadAPITest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -64,29 +61,30 @@ public void testZkread() throws Exception { String basezkls = baseUrl.toString().replace("/solr", "/api") + "/cluster/zk/ls"; try (HttpSolrClient client = new HttpSolrClient.Builder(baseUrl.toString()).build()) { - Object o = Utils.executeGET(client.getHttpClient(), - basezk + "/security.json", - Utils.JSONCONSUMER); + Object o = + Utils.executeGET(client.getHttpClient(), basezk + "/security.json", Utils.JSONCONSUMER); assertNotNull(o); - o = Utils.executeGET(client.getHttpClient(), - basezkls + "/configs", - Utils.JSONCONSUMER); - assertEquals("0", String.valueOf(getObjectByPath(o, true, split(":/configs:_default:dataLength", ':')))); - assertEquals("0", String.valueOf(getObjectByPath(o, true, split(":/configs:conf:dataLength", ':')))); + o = Utils.executeGET(client.getHttpClient(), basezkls + "/configs", Utils.JSONCONSUMER); + assertEquals( + "0", + String.valueOf(getObjectByPath(o, true, split(":/configs:_default:dataLength", ':')))); + assertEquals( + "0", String.valueOf(getObjectByPath(o, true, split(":/configs:conf:dataLength", ':')))); assertEquals("0", String.valueOf(getObjectByPath(o, true, split("/stat/version", '/')))); - o = Utils.executeGET(client.getHttpClient(), - basezk + "/configs", - Utils.JSONCONSUMER); - assertTrue(((Map)o).containsKey("/configs")); - assertNull(((Map)o).get("/configs")); + o = Utils.executeGET(client.getHttpClient(), basezk + "/configs", Utils.JSONCONSUMER); + assertTrue(((Map) o).containsKey("/configs")); + assertNull(((Map) o).get("/configs")); byte[] bytes = new byte[1024 * 5]; for (int i = 0; i < bytes.length; i++) { bytes[i] = (byte) random().nextInt(128); } - cluster.getZkClient().create("/configs/_default/testdata", bytes, CreateMode.PERSISTENT, true); - Utils.executeGET(client.getHttpClient(), + cluster + .getZkClient() + .create("/configs/_default/testdata", bytes, CreateMode.PERSISTENT, true); + Utils.executeGET( + client.getHttpClient(), basezk + "/configs/_default/testdata", is -> { byte[] newBytes = new byte[bytes.length]; @@ -98,5 +96,4 @@ public void testZkread() throws Exception { }); } } - -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerFailureTest.java b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerFailureTest.java index 090117a0b85..2c6b980a57e 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerFailureTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerFailureTest.java @@ -17,6 +17,13 @@ package org.apache.solr.handler.admin; +import java.io.IOException; +import java.net.URL; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -28,20 +35,10 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.IOException; -import java.net.URL; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - public class ZookeeperStatusHandlerFailureTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); // Kill the ZK cluster.getZkServer().shutdown(); } @@ -50,25 +47,29 @@ public static void setupCluster() throws Exception { Test the monitoring endpoint, when no Zookeeper is answering. There should still be a response */ @Test - public void monitorZookeeperAfterZkShutdown() throws IOException, SolrServerException, InterruptedException, ExecutionException, TimeoutException { + public void monitorZookeeperAfterZkShutdown() + throws IOException, SolrServerException, InterruptedException, ExecutionException, + TimeoutException { URL baseUrl = cluster.getJettySolrRunner(0).getBaseUrl(); HttpSolrClient solr = new HttpSolrClient.Builder(baseUrl.toString()).build(); - GenericSolrRequest mntrReq = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/zookeeper/status", new ModifiableSolrParams()); + GenericSolrRequest mntrReq = + new GenericSolrRequest( + SolrRequest.METHOD.GET, "/admin/zookeeper/status", new ModifiableSolrParams()); mntrReq.setResponseParser(new DelegationTokenResponse.JsonMapResponseParser()); NamedList nl = solr.httpUriRequest(mntrReq).future.get(10000, TimeUnit.MILLISECONDS); assertEquals("zkStatus", nl.getName(1)); @SuppressWarnings({"unchecked"}) - Map zkStatus = (Map) nl.get("zkStatus"); + Map zkStatus = (Map) nl.get("zkStatus"); assertEquals("red", zkStatus.get("status")); assertEquals("standalone", zkStatus.get("mode")); assertEquals(1L, zkStatus.get("ensembleSize")); @SuppressWarnings({"unchecked"}) - List detailsList = (List)zkStatus.get("details"); + List detailsList = (List) zkStatus.get("details"); assertEquals(1, detailsList.size()); @SuppressWarnings({"unchecked"}) - Map details = (Map) detailsList.get(0); + Map details = (Map) detailsList.get(0); assertEquals(false, details.get("ok")); solr.close(); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java index f9efc393929..47f77e2431a 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/ZookeeperStatusHandlerTest.java @@ -17,6 +17,11 @@ package org.apache.solr.handler.admin; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import java.io.IOException; import java.net.URL; import java.util.Arrays; @@ -26,7 +31,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -45,17 +49,10 @@ import org.mockito.ArgumentMatchers; import org.noggit.JSONUtil; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - public class ZookeeperStatusHandlerTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); } @Before @@ -71,28 +68,32 @@ public void tearDown() throws Exception { } /* - Test the monitoring endpoint, used in the Cloud => ZkStatus Admin UI screen - NOTE: We do not currently test with multiple zookeepers, but the only difference is that there are multiple "details" objects and mode is "ensemble"... - */ + Test the monitoring endpoint, used in the Cloud => ZkStatus Admin UI screen + NOTE: We do not currently test with multiple zookeepers, but the only difference is that there are multiple "details" objects and mode is "ensemble"... + */ @Test - public void monitorZookeeper() throws IOException, SolrServerException, InterruptedException, ExecutionException, TimeoutException { + public void monitorZookeeper() + throws IOException, SolrServerException, InterruptedException, ExecutionException, + TimeoutException { URL baseUrl = cluster.getJettySolrRunner(0).getBaseUrl(); HttpSolrClient solr = new HttpSolrClient.Builder(baseUrl.toString()).build(); - GenericSolrRequest mntrReq = new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/zookeeper/status", new ModifiableSolrParams()); + GenericSolrRequest mntrReq = + new GenericSolrRequest( + SolrRequest.METHOD.GET, "/admin/zookeeper/status", new ModifiableSolrParams()); mntrReq.setResponseParser(new DelegationTokenResponse.JsonMapResponseParser()); NamedList nl = solr.httpUriRequest(mntrReq).future.get(10000, TimeUnit.MILLISECONDS); assertEquals("zkStatus", nl.getName(1)); @SuppressWarnings({"unchecked"}) - Map zkStatus = (Map) nl.get("zkStatus"); + Map zkStatus = (Map) nl.get("zkStatus"); assertEquals("green", zkStatus.get("status")); assertEquals("standalone", zkStatus.get("mode")); assertEquals(1L, zkStatus.get("ensembleSize")); @SuppressWarnings({"unchecked"}) - List detailsList = (List)zkStatus.get("details"); + List detailsList = (List) zkStatus.get("details"); assertEquals(1, detailsList.size()); @SuppressWarnings({"unchecked"}) - Map details = (Map) detailsList.get(0); + Map details = (Map) detailsList.get(0); assertEquals(true, details.get("ok")); assertTrue(Integer.parseInt((String) details.get("zk_znode_count")) > 50); solr.close(); @@ -103,67 +104,75 @@ public void testEnsembleStatusMock() { assumeWorkingMockito(); ZookeeperStatusHandler zkStatusHandler = mock(ZookeeperStatusHandler.class); when(zkStatusHandler.getZkRawResponse("zoo1:2181", "ruok")).thenReturn(Arrays.asList("imok")); - when(zkStatusHandler.getZkRawResponse("zoo1:2181", "mntr")).thenReturn( - Arrays.asList("zk_version\t3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT", - "zk_avg_latency\t1")); - when(zkStatusHandler.getZkRawResponse("zoo1:2181", "conf")).thenReturn( - Arrays.asList("clientPort=2181", - "secureClientPort=-1", - "thisIsUnexpected", - "membership: ")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "mntr")) + .thenReturn( + Arrays.asList( + "zk_version\t3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT", + "zk_avg_latency\t1")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "conf")) + .thenReturn( + Arrays.asList( + "clientPort=2181", "secureClientPort=-1", "thisIsUnexpected", "membership: ")); when(zkStatusHandler.getZkRawResponse("zoo2:2181", "ruok")).thenReturn(Arrays.asList("")); when(zkStatusHandler.getZkRawResponse("zoo3:2181", "ruok")).thenReturn(Arrays.asList("imok")); - when(zkStatusHandler.getZkRawResponse("zoo3:2181", "mntr")).thenReturn( - Arrays.asList("mntr is not executed because it is not in the whitelist.")); // Actual response from ZK if not whitelisted - when(zkStatusHandler.getZkRawResponse("zoo3:2181", "conf")).thenReturn( - Arrays.asList("clientPort=2181")); + // Actual response from ZK if not whitelisted + when(zkStatusHandler.getZkRawResponse("zoo3:2181", "mntr")) + .thenReturn(Arrays.asList("mntr is not executed because it is not in the whitelist.")); + when(zkStatusHandler.getZkRawResponse("zoo3:2181", "conf")) + .thenReturn(Arrays.asList("clientPort=2181")); when(zkStatusHandler.getZkStatus(anyString(), any())).thenCallRealMethod(); when(zkStatusHandler.monitorZookeeper(anyString())).thenCallRealMethod(); - when(zkStatusHandler.validateZkRawResponse(ArgumentMatchers.any(), any(), any())).thenAnswer(Answers.CALLS_REAL_METHODS); + when(zkStatusHandler.validateZkRawResponse(ArgumentMatchers.any(), any(), any())) + .thenAnswer(Answers.CALLS_REAL_METHODS); - ZkDynamicConfig zkDynamicConfig = ZkDynamicConfig.parseLines( - "server.1=zoo1:2780:2783:participant;0.0.0.0:2181\n" + - "server.2=zoo2:2781:2784:participant;0.0.0.0:2181\n" + - "server.3=zoo3:2782:2785:participant;0.0.0.0:2181\n" + - "version=400000003"); - Map mockStatus = zkStatusHandler.getZkStatus("zoo4:2181,zoo5:2181,zoo6:2181", zkDynamicConfig); - String expected = "{\n" + - " \"dynamicReconfig\":true,\n" + - " \"ensembleSize\":3,\n" + - " \"details\":[\n" + - " {\n" + - " \"role\":\"participant\",\n" + - " \"zk_version\":\"3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT\",\n" + - " \"zk_avg_latency\":\"1\",\n" + - " \"host\":\"zoo1:2181\",\n" + - " \"clientPort\":\"2181\",\n" + - " \"secureClientPort\":\"-1\",\n" + - " \"ok\":true},\n" + - " {\n" + - " \"host\":\"zoo2:2181\",\n" + - " \"ok\":false},\n" + - " {\n" + - " \"host\":\"zoo3:2181\",\n" + - " \"ok\":false}],\n" + - " \"zkHost\":\"zoo4:2181,zoo5:2181,zoo6:2181\",\n" + - " \"errors\":[\n" + - " \"Your ZK connection string (3 hosts) is different from the dynamic ensemble config (3 hosts). Solr does not currently support dynamic reconfiguration and will only be able to connect to the zk hosts in your connection string.\",\n" + - " \"Unexpected line in 'conf' response from Zookeeper zoo1:2181: thisIsUnexpected\",\n" + - " \"Empty response from Zookeeper zoo2:2181\",\n" + - " \"Could not execute mntr towards ZK host zoo3:2181. Add this line to the 'zoo.cfg' configuration file on each zookeeper node: '4lw.commands.whitelist=mntr,conf,ruok'. See also chapter 'Setting Up an External ZooKeeper Ensemble' in the Solr Reference Guide.\"],\n" + - " \"status\":\"yellow\"}"; + ZkDynamicConfig zkDynamicConfig = + ZkDynamicConfig.parseLines( + "server.1=zoo1:2780:2783:participant;0.0.0.0:2181\n" + + "server.2=zoo2:2781:2784:participant;0.0.0.0:2181\n" + + "server.3=zoo3:2782:2785:participant;0.0.0.0:2181\n" + + "version=400000003"); + Map mockStatus = + zkStatusHandler.getZkStatus("zoo4:2181,zoo5:2181,zoo6:2181", zkDynamicConfig); + String expected = + "{\n" + + " \"dynamicReconfig\":true,\n" + + " \"ensembleSize\":3,\n" + + " \"details\":[\n" + + " {\n" + + " \"role\":\"participant\",\n" + + " \"zk_version\":\"3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT\",\n" + + " \"zk_avg_latency\":\"1\",\n" + + " \"host\":\"zoo1:2181\",\n" + + " \"clientPort\":\"2181\",\n" + + " \"secureClientPort\":\"-1\",\n" + + " \"ok\":true},\n" + + " {\n" + + " \"host\":\"zoo2:2181\",\n" + + " \"ok\":false},\n" + + " {\n" + + " \"host\":\"zoo3:2181\",\n" + + " \"ok\":false}],\n" + + " \"zkHost\":\"zoo4:2181,zoo5:2181,zoo6:2181\",\n" + + " \"errors\":[\n" + + " \"Your ZK connection string (3 hosts) is different from the dynamic ensemble config (3 hosts). Solr does not currently support dynamic reconfiguration and will only be able to connect to the zk hosts in your connection string.\",\n" + + " \"Unexpected line in 'conf' response from Zookeeper zoo1:2181: thisIsUnexpected\",\n" + + " \"Empty response from Zookeeper zoo2:2181\",\n" + + " \"Could not execute mntr towards ZK host zoo3:2181. Add this line to the 'zoo.cfg' configuration file on each zookeeper node: '4lw.commands.whitelist=mntr,conf,ruok'. See also chapter 'Setting Up an External ZooKeeper Ensemble' in the Solr Reference Guide.\"],\n" + + " \"status\":\"yellow\"}"; assertEquals(expected, JSONUtil.toJSON(mockStatus)); } @Test(expected = SolrException.class) public void validateNotWhitelisted() { try (ZookeeperStatusHandler zsh = new ZookeeperStatusHandler(null)) { - zsh.validateZkRawResponse(Collections.singletonList("mntr is not executed because it is not in the whitelist."), - "zoo1:2181", "mntr"); - } catch (IOException e) { + zsh.validateZkRawResponse( + Collections.singletonList("mntr is not executed because it is not in the whitelist."), + "zoo1:2181", + "mntr"); + } catch (IOException e) { fail("Error closing ZookeeperStatusHandler"); } } @@ -180,9 +189,11 @@ public void validateEmptyResponse() { @Test(expected = SolrException.class) public void validateNotServingRequestsResponse() { try (ZookeeperStatusHandler zsh = new ZookeeperStatusHandler(null)) { - zsh.validateZkRawResponse(Collections.singletonList("This ZooKeeper instance is not currently serving requests"), - "zoo1:2181", "mntr"); - } catch (IOException e) { + zsh.validateZkRawResponse( + Collections.singletonList("This ZooKeeper instance is not currently serving requests"), + "zoo1:2181", + "mntr"); + } catch (IOException e) { fail("Error closing ZookeeperStatusHandler"); } } @@ -192,33 +203,38 @@ public void testMntrBugZk36Solr14463() { assumeWorkingMockito(); ZookeeperStatusHandler zkStatusHandler = mock(ZookeeperStatusHandler.class); when(zkStatusHandler.getZkRawResponse("zoo1:2181", "ruok")).thenReturn(Arrays.asList("imok")); - when(zkStatusHandler.getZkRawResponse("zoo1:2181", "mntr")).thenReturn( - Arrays.asList("zk_version\t3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT", - "zk_avg_latency\t1", - "zk_server_state\tleader", - "zk_synced_followers\t2")); - when(zkStatusHandler.getZkRawResponse("zoo1:2181", "conf")).thenReturn( - Arrays.asList("clientPort=2181")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "mntr")) + .thenReturn( + Arrays.asList( + "zk_version\t3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT", + "zk_avg_latency\t1", + "zk_server_state\tleader", + "zk_synced_followers\t2")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "conf")) + .thenReturn(Arrays.asList("clientPort=2181")); when(zkStatusHandler.getZkStatus(anyString(), any())).thenCallRealMethod(); when(zkStatusHandler.monitorZookeeper(anyString())).thenCallRealMethod(); - when(zkStatusHandler.validateZkRawResponse(ArgumentMatchers.any(), any(), any())).thenAnswer(Answers.CALLS_REAL_METHODS); + when(zkStatusHandler.validateZkRawResponse(ArgumentMatchers.any(), any(), any())) + .thenAnswer(Answers.CALLS_REAL_METHODS); - Map mockStatus = zkStatusHandler.getZkStatus("zoo1:2181", ZkDynamicConfig.fromZkConnectString("zoo1:2181")); - String expected = "{\n" + - " \"mode\":\"ensemble\",\n" + - " \"dynamicReconfig\":true,\n" + - " \"ensembleSize\":1,\n" + - " \"details\":[{\n" + - " \"zk_synced_followers\":\"2\",\n" + - " \"zk_version\":\"3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT\",\n" + - " \"zk_avg_latency\":\"1\",\n" + - " \"host\":\"zoo1:2181\",\n" + - " \"clientPort\":\"2181\",\n" + - " \"ok\":true,\n" + - " \"zk_server_state\":\"leader\"}],\n" + - " \"zkHost\":\"zoo1:2181\",\n" + - " \"errors\":[\"Leader reports 2 followers, but we only found 0. Please check zkHost configuration\"],\n" + - " \"status\":\"red\"}"; + Map mockStatus = + zkStatusHandler.getZkStatus("zoo1:2181", ZkDynamicConfig.fromZkConnectString("zoo1:2181")); + String expected = + "{\n" + + " \"mode\":\"ensemble\",\n" + + " \"dynamicReconfig\":true,\n" + + " \"ensembleSize\":1,\n" + + " \"details\":[{\n" + + " \"zk_synced_followers\":\"2\",\n" + + " \"zk_version\":\"3.5.5-390fe37ea45dee01bf87dc1c042b5e3dcce88653, built on 05/03/2019 12:07 GMT\",\n" + + " \"zk_avg_latency\":\"1\",\n" + + " \"host\":\"zoo1:2181\",\n" + + " \"clientPort\":\"2181\",\n" + + " \"ok\":true,\n" + + " \"zk_server_state\":\"leader\"}],\n" + + " \"zkHost\":\"zoo1:2181\",\n" + + " \"errors\":[\"Leader reports 2 followers, but we only found 0. Please check zkHost configuration\"],\n" + + " \"status\":\"red\"}"; assertEquals(expected, JSONUtil.toJSON(mockStatus)); } @@ -227,33 +243,39 @@ public void testZkWithPrometheusSolr14752() { assumeWorkingMockito(); ZookeeperStatusHandler zkStatusHandler = mock(ZookeeperStatusHandler.class); when(zkStatusHandler.getZkRawResponse("zoo1:2181", "ruok")).thenReturn(Arrays.asList("imok")); - when(zkStatusHandler.getZkRawResponse("zoo1:2181", "mntr")).thenReturn( - Arrays.asList("zk_version\t3.6.1--104dcb3e3fb464b30c5186d229e00af9f332524b, built on 04/21/2020 15:01 GMT", - "zk_avg_latency\t0.24", - "zk_server_state\tleader", - "zk_synced_followers\t0.0")); - when(zkStatusHandler.getZkRawResponse("zoo1:2181", "conf")).thenReturn( - Arrays.asList("clientPort=2181")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "mntr")) + .thenReturn( + Arrays.asList( + "zk_version\t3.6.1--104dcb3e3fb464b30c5186d229e00af9f332524b, built on 04/21/2020 15:01 GMT", + "zk_avg_latency\t0.24", + "zk_server_state\tleader", + "zk_synced_followers\t0.0")); + when(zkStatusHandler.getZkRawResponse("zoo1:2181", "conf")) + .thenReturn(Arrays.asList("clientPort=2181")); when(zkStatusHandler.getZkStatus(anyString(), any())).thenCallRealMethod(); when(zkStatusHandler.monitorZookeeper(anyString())).thenCallRealMethod(); - when(zkStatusHandler.validateZkRawResponse(ArgumentMatchers.any(), any(), any())).thenAnswer(Answers.CALLS_REAL_METHODS); + when(zkStatusHandler.validateZkRawResponse(ArgumentMatchers.any(), any(), any())) + .thenAnswer(Answers.CALLS_REAL_METHODS); - // Verifying that parsing the status strings with floating point no longer triggers a NumberFormatException, although floats are still displayed in UI - Map mockStatus = zkStatusHandler.getZkStatus("Zoo1:2181", ZkDynamicConfig.fromZkConnectString("zoo1:2181")); - String expected = "{\n" + - " \"mode\":\"ensemble\",\n" + - " \"dynamicReconfig\":true,\n" + - " \"ensembleSize\":1,\n" + - " \"details\":[{\n" + - " \"zk_synced_followers\":\"0.0\",\n" + - " \"zk_version\":\"3.6.1--104dcb3e3fb464b30c5186d229e00af9f332524b, built on 04/21/2020 15:01 GMT\",\n" + - " \"zk_avg_latency\":\"0.24\",\n" + - " \"host\":\"zoo1:2181\",\n" + - " \"clientPort\":\"2181\",\n" + - " \"ok\":true,\n" + - " \"zk_server_state\":\"leader\"}],\n" + - " \"zkHost\":\"Zoo1:2181\",\n" + - " \"status\":\"green\"}"; + // Verifying that parsing the status strings with floating point no longer triggers a + // NumberFormatException, although floats are still displayed in UI + Map mockStatus = + zkStatusHandler.getZkStatus("Zoo1:2181", ZkDynamicConfig.fromZkConnectString("zoo1:2181")); + String expected = + "{\n" + + " \"mode\":\"ensemble\",\n" + + " \"dynamicReconfig\":true,\n" + + " \"ensembleSize\":1,\n" + + " \"details\":[{\n" + + " \"zk_synced_followers\":\"0.0\",\n" + + " \"zk_version\":\"3.6.1--104dcb3e3fb464b30c5186d229e00af9f332524b, built on 04/21/2020 15:01 GMT\",\n" + + " \"zk_avg_latency\":\"0.24\",\n" + + " \"host\":\"zoo1:2181\",\n" + + " \"clientPort\":\"2181\",\n" + + " \"ok\":true,\n" + + " \"zk_server_state\":\"leader\"}],\n" + + " \"zkHost\":\"Zoo1:2181\",\n" + + " \"status\":\"green\"}"; assertEquals(expected, JSONUtil.toJSON(mockStatus)); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java index 0f073e92f45..3deaf524185 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java @@ -17,7 +17,21 @@ package org.apache.solr.handler.admin.api; +import static org.apache.solr.common.params.CollectionAdminParams.COLLECTION; +import static org.apache.solr.common.params.CollectionAdminParams.COLL_CONF; +import static org.apache.solr.common.params.CommonAdminParams.ASYNC; +import static org.apache.solr.common.params.CommonParams.ACTION; +import static org.apache.solr.common.params.CommonParams.NAME; +import static org.apache.solr.common.params.CoreAdminParams.SHARD; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + import com.google.common.collect.Maps; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; @@ -37,32 +51,20 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.solr.common.params.CollectionAdminParams.COLLECTION; -import static org.apache.solr.common.params.CollectionAdminParams.COLL_CONF; -import static org.apache.solr.common.params.CommonAdminParams.ASYNC; -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.apache.solr.common.params.CommonParams.NAME; -import static org.apache.solr.common.params.CoreAdminParams.SHARD; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - /** - * Unit tests for the V2 APIs found in {@link org.apache.solr.handler.admin.api} that use the /c/{collection} path. + * Unit tests for the V2 APIs found in {@link org.apache.solr.handler.admin.api} that use the + * /c/{collection} path. * - * This test bears many similarities to {@link TestCollectionAPIs} which appears to test the mappings indirectly by - * checking message sent to the ZK overseer (which is similar, but not identical to the v1 param list). If there's no - * particular benefit to testing the mappings in this way (there very well may be), then we should combine these two - * test classes at some point in the future using the simpler approach here. + *

This test bears many similarities to {@link TestCollectionAPIs} which appears to test the + * mappings indirectly by checking message sent to the ZK overseer (which is similar, but not + * identical to the v1 param list). If there's no particular benefit to testing the mappings in this + * way (there very well may be), then we should combine these two test classes at some point in the + * future using the simpler approach here. * - * Note that the V2 requests made by these tests are not necessarily semantically valid. They shouldn't be taken as - * examples. In several instances, mutually exclusive JSON parameters are provided. This is done to exercise conversion - * of all parameters, even if particular combinations are never expected in the same request. + *

Note that the V2 requests made by these tests are not necessarily semantically valid. They + * shouldn't be taken as examples. In several instances, mutually exclusive JSON parameters are + * provided. This is done to exercise conversion of all parameters, even if particular combinations + * are never expected in the same request. */ public class V2CollectionAPIMappingTest extends SolrTestCaseJ4 { private ApiBag apiBag; @@ -86,7 +88,9 @@ public void setupApiBag() throws Exception { @Test public void testGetCollectionStatus() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "GET", Map.of(SHARD, new String[]{"shard2"})); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", "GET", Map.of(SHARD, new String[] {"shard2"})); assertEquals(CollectionParams.CollectionAction.CLUSTERSTATUS.toString(), v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); @@ -95,19 +99,23 @@ public void testGetCollectionStatus() throws Exception { @Test public void testModifyCollectionAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'modify': {" + - "'replicationFactor': 123, " + - "'readOnly': true, " + - "'config': 'techproducts_config', " + - "'async': 'requestTrackingId', " + - "'properties': {" + - " 'foo': 'bar', " + - " 'baz': 456 " + - "}" + - "}}"); - - assertEquals(CollectionParams.CollectionAction.MODIFYCOLLECTION.lowerName, v1Params.get(ACTION)); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'modify': {" + + "'replicationFactor': 123, " + + "'readOnly': true, " + + "'config': 'techproducts_config', " + + "'async': 'requestTrackingId', " + + "'properties': {" + + " 'foo': 'bar', " + + " 'baz': 456 " + + "}" + + "}}"); + + assertEquals( + CollectionParams.CollectionAction.MODIFYCOLLECTION.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); assertEquals(123, v1Params.getPrimitiveInt(ZkStateReader.REPLICATION_FACTOR)); assertEquals(true, v1Params.getPrimitiveBool(ZkStateReader.READ_ONLY)); @@ -119,8 +127,9 @@ public void testModifyCollectionAllProperties() throws Exception { @Test public void testReloadCollectionAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'reload': {'async': 'requestTrackingId'}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", "POST", "{ 'reload': {'async': 'requestTrackingId'}}"); assertEquals(CollectionParams.CollectionAction.RELOAD.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(NAME)); @@ -129,17 +138,20 @@ public void testReloadCollectionAllProperties() throws Exception { @Test public void testMoveReplicaAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'move-replica': {" + - "'sourceNode': 'someSourceNode', " + - "'targetNode': 'someTargetNode', " + - "'replica': 'someReplica', " + - "'shard': 'someShard', " + - "'waitForFinalState': true, " + - "'timeout': 123, " + - "'inPlaceMove': true, " + - "'followAliases': true " + - "}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'move-replica': {" + + "'sourceNode': 'someSourceNode', " + + "'targetNode': 'someTargetNode', " + + "'replica': 'someReplica', " + + "'shard': 'someShard', " + + "'waitForFinalState': true, " + + "'timeout': 123, " + + "'inPlaceMove': true, " + + "'followAliases': true " + + "}}"); assertEquals(CollectionParams.CollectionAction.MOVEREPLICA.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); @@ -155,14 +167,17 @@ public void testMoveReplicaAllProperties() throws Exception { @Test public void testMigrateDocsAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'migrate-docs': {" + - "'target': 'someTargetCollection', " + - "'splitKey': 'someSplitKey', " + - "'forwardTimeout': 123, " + - "'followAliases': true, " + - "'async': 'requestTrackingId' " + - "}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'migrate-docs': {" + + "'target': 'someTargetCollection', " + + "'splitKey': 'someSplitKey', " + + "'forwardTimeout': 123, " + + "'followAliases': true, " + + "'async': 'requestTrackingId' " + + "}}"); assertEquals(CollectionParams.CollectionAction.MIGRATE.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); @@ -175,14 +190,18 @@ public void testMigrateDocsAllProperties() throws Exception { @Test public void testBalanceShardUniqueAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'balance-shard-unique': {" + - "'property': 'somePropertyToBalance', " + - "'onlyactivenodes': false, " + - "'shardUnique': true" + - "}}"); - - assertEquals(CollectionParams.CollectionAction.BALANCESHARDUNIQUE.lowerName, v1Params.get(ACTION)); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'balance-shard-unique': {" + + "'property': 'somePropertyToBalance', " + + "'onlyactivenodes': false, " + + "'shardUnique': true" + + "}}"); + + assertEquals( + CollectionParams.CollectionAction.BALANCESHARDUNIQUE.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); assertEquals("somePropertyToBalance", v1Params.get("property")); assertEquals(false, v1Params.getPrimitiveBool("onlyactivenodes")); @@ -191,13 +210,14 @@ public void testBalanceShardUniqueAllProperties() throws Exception { @Test public void testRebalanceLeadersAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'rebalance-leaders': {" + - "'maxAtOnce': 123, " + - "'maxWaitSeconds': 456" + - "}}"); - - assertEquals(CollectionParams.CollectionAction.REBALANCELEADERS.lowerName, v1Params.get(ACTION)); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'rebalance-leaders': {" + "'maxAtOnce': 123, " + "'maxWaitSeconds': 456" + "}}"); + + assertEquals( + CollectionParams.CollectionAction.REBALANCELEADERS.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); assertEquals(123, v1Params.getPrimitiveInt("maxAtOnce")); assertEquals(456, v1Params.getPrimitiveInt("maxWaitSeconds")); @@ -205,13 +225,16 @@ public void testRebalanceLeadersAllProperties() throws Exception { @Test public void testAddReplicaPropertyAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'add-replica-property': {" + - "'shard': 'someShardName', " + - "'replica': 'someReplicaName', " + - "'name': 'somePropertyName', " + - "'value': 'somePropertyValue'" + - "}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'add-replica-property': {" + + "'shard': 'someShardName', " + + "'replica': 'someReplicaName', " + + "'name': 'somePropertyName', " + + "'value': 'somePropertyValue'" + + "}}"); assertEquals(CollectionParams.CollectionAction.ADDREPLICAPROP.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); @@ -223,14 +246,18 @@ public void testAddReplicaPropertyAllProperties() throws Exception { @Test public void testDeleteReplicaPropertyAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'delete-replica-property': {" + - "'shard': 'someShardName', " + - "'replica': 'someReplicaName', " + - "'property': 'somePropertyName' " + - "}}"); - - assertEquals(CollectionParams.CollectionAction.DELETEREPLICAPROP.lowerName, v1Params.get(ACTION)); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'delete-replica-property': {" + + "'shard': 'someShardName', " + + "'replica': 'someReplicaName', " + + "'property': 'somePropertyName' " + + "}}"); + + assertEquals( + CollectionParams.CollectionAction.DELETEREPLICAPROP.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(COLLECTION)); assertEquals("someShardName", v1Params.get("shard")); assertEquals("someReplicaName", v1Params.get("replica")); @@ -239,11 +266,14 @@ public void testDeleteReplicaPropertyAllProperties() throws Exception { @Test public void testSetCollectionPropertyAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName", "POST", - "{ 'set-collection-property': {" + - "'name': 'somePropertyName', " + - "'value': 'somePropertyValue' " + - "}}"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName", + "POST", + "{ 'set-collection-property': {" + + "'name': 'somePropertyName', " + + "'value': 'somePropertyValue' " + + "}}"); assertEquals(CollectionParams.CollectionAction.COLLECTIONPROP.lowerName, v1Params.get(ACTION)); assertEquals("collName", v1Params.get(NAME)); @@ -251,55 +281,58 @@ public void testSetCollectionPropertyAllProperties() throws Exception { assertEquals("somePropertyValue", v1Params.get("propertyValue")); } - private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) throws Exception { + private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) + throws Exception { final HashMap parts = new HashMap<>(); final Api api = apiBag.lookup(path, method, parts); final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, Maps.newHashMap()) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } - }; - + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, Maps.newHashMap()) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } + }; api.call(req, rsp); verify(mockCollectionsHandler).handleRequestBody(queryRequestCaptor.capture(), any()); return queryRequestCaptor.getValue().getParams(); } - private SolrParams captureConvertedV1Params(String path, String method, Map queryParams) throws Exception { + private SolrParams captureConvertedV1Params( + String path, String method, Map queryParams) throws Exception { final HashMap parts = new HashMap<>(); final Api api = apiBag.lookup(path, method, parts); final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, queryParams) { - @Override - public List getCommands(boolean validateInput) { - return Collections.emptyList(); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } - }; - + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, queryParams) { + @Override + public List getCommands(boolean validateInput) { + return Collections.emptyList(); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } + }; api.call(req, rsp); verify(mockCollectionsHandler).handleRequestBody(queryRequestCaptor.capture(), any()); diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java index 0f83e1692cc..2ff782f7eb5 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CoreAPIMappingTest.java @@ -17,7 +17,21 @@ package org.apache.solr.handler.admin.api; +import static org.apache.solr.common.params.CommonAdminParams.ASYNC; +import static org.apache.solr.common.params.CommonAdminParams.SPLIT_KEY; +import static org.apache.solr.common.params.CommonParams.ACTION; +import static org.apache.solr.common.params.CommonParams.PATH; +import static org.apache.solr.common.params.CoreAdminParams.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + import com.google.common.collect.Maps; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; @@ -35,245 +49,251 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.solr.common.params.CommonAdminParams.ASYNC; -import static org.apache.solr.common.params.CommonAdminParams.SPLIT_KEY; -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.apache.solr.common.params.CommonParams.PATH; -import static org.apache.solr.common.params.CoreAdminParams.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - /** - * Unit tests for the V2 APIs found in {@link org.apache.solr.handler.admin.api} that use the /cores/{core} path. + * Unit tests for the V2 APIs found in {@link org.apache.solr.handler.admin.api} that use the + * /cores/{core} path. * - * Note that the V2 requests made by these tests are not necessarily semantically valid. They shouldn't be taken as - * examples. In several instances, mutually exclusive JSON parameters are provided. This is done to exercise conversion - * of all parameters, even if particular combinations are never expected in the same request. + *

Note that the V2 requests made by these tests are not necessarily semantically valid. They + * shouldn't be taken as examples. In several instances, mutually exclusive JSON parameters are + * provided. This is done to exercise conversion of all parameters, even if particular combinations + * are never expected in the same request. */ public class V2CoreAPIMappingTest extends SolrTestCaseJ4 { - private ApiBag apiBag; - - private ArgumentCaptor queryRequestCaptor; - - private CoreAdminHandler mockCoreHandler; - - @BeforeClass - public static void ensureWorkingMockito() { - assumeWorkingMockito(); - } - - @Before - public void setupApiBag() throws Exception { - mockCoreHandler = mock(CoreAdminHandler.class); - queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); - - apiBag = new ApiBag(false); - apiBag.registerObject(new ReloadCoreAPI(mockCoreHandler)); - apiBag.registerObject(new SwapCoresAPI(mockCoreHandler)); - apiBag.registerObject(new RenameCoreAPI(mockCoreHandler)); - apiBag.registerObject(new UnloadCoreAPI(mockCoreHandler)); - apiBag.registerObject(new MergeIndexesAPI(mockCoreHandler)); - apiBag.registerObject(new SplitCoreAPI(mockCoreHandler)); - apiBag.registerObject(new RequestCoreRecoveryAPI(mockCoreHandler)); - apiBag.registerObject(new PrepareCoreRecoveryAPI(mockCoreHandler)); - apiBag.registerObject(new RequestApplyCoreUpdatesAPI(mockCoreHandler)); - apiBag.registerObject(new RequestSyncShardAPI(mockCoreHandler)); - apiBag.registerObject(new RequestBufferUpdatesAPI(mockCoreHandler)); - apiBag.registerObject(new RequestCoreCommandStatusAPI(mockCoreHandler)); - } - - @Test - public void testReloadCoreAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", "{\"reload\": {}}"); - - assertEquals("reload", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - } - - @Test - public void testSwapCoresAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", "{\"swap\": {\"with\": \"otherCore\"}}"); - - assertEquals("swap", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - assertEquals("otherCore", v1Params.get(OTHER)); - } - - @Test - public void testRenameCoreAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", "{\"rename\": {\"to\": \"otherCore\"}}"); - - assertEquals("rename", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - assertEquals("otherCore", v1Params.get(OTHER)); - } - - @Test - public void testUnloadCoreAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{" + - "\"unload\": {" + - "\"deleteIndex\": true, " + - "\"deleteDataDir\": true, " + - "\"deleteInstanceDir\": true, " + - "\"async\": \"someRequestId\"}}"); - - assertEquals("unload", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - assertEquals(true, v1Params.getBool(DELETE_INDEX)); - assertEquals(true, v1Params.getBool(DELETE_DATA_DIR)); - assertEquals(true, v1Params.getBool(DELETE_INSTANCE_DIR)); - assertEquals("someRequestId", v1Params.get(ASYNC)); - } - - @Test - public void testMergeIndexesAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{" + - "\"merge-indexes\": {" + - "\"indexDir\": [\"dir1\", \"dir2\"], " + - "\"srcCore\": [\"core1\", \"core2\"], " + - "\"updateChain\": \"someUpdateChain\", " + - "\"async\": \"someRequestId\"}}"); - - assertEquals("mergeindexes", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - assertEquals("someUpdateChain", v1Params.get(UpdateParams.UPDATE_CHAIN)); - assertEquals("someRequestId", v1Params.get(ASYNC)); - final List indexDirs = Arrays.asList(v1Params.getParams("indexDir")); - assertEquals(2, indexDirs.size()); - assertTrue(indexDirs.containsAll(List.of("dir1", "dir2"))); - final List srcCores = Arrays.asList(v1Params.getParams("srcCore")); - assertEquals(2, srcCores.size()); - assertTrue(srcCores.containsAll(List.of("core1", "core2"))); - } - - @Test - public void testSplitCoreAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{" + - "\"split\": {" + - "\"path\": [\"path1\", \"path2\"], " + - "\"targetCore\": [\"core1\", \"core2\"], " + - "\"splitKey\": \"someSplitKey\", " + - "\"getRanges\": true, " + - "\"ranges\": \"range1,range2\", " + - "\"async\": \"someRequestId\"}}"); - - assertEquals("split", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - assertEquals("someSplitKey", v1Params.get(SPLIT_KEY)); - assertEquals("range1,range2", v1Params.get(RANGES)); - assertEquals("someRequestId", v1Params.get(ASYNC)); - final List pathEntries = Arrays.asList(v1Params.getParams(PATH)); - assertEquals(2, pathEntries.size()); - assertTrue(pathEntries.containsAll(List.of("path1", "path2"))); - final List targetCoreEntries = Arrays.asList(v1Params.getParams(TARGET_CORE)); - assertEquals(2, targetCoreEntries.size()); - assertTrue(targetCoreEntries.containsAll(List.of("core1", "core2"))); - } - - @Test - public void testRequestCoreRecoveryAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{\"request-recovery\": {}}"); - - assertEquals("requestrecovery", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - } - - @Test - public void testPrepareCoreRecoveryAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{\"prep-recovery\": {" + - "\"nodeName\": \"someNodeName\", " + - "\"coreNodeName\": \"someCoreNodeName\", " + - "\"state\": \"someState\", " + - "\"checkLive\": true, " + - "\"onlyIfLeader\": true" + - "\"onlyIfLeaderActive\": true " + - "}}"); - - assertEquals("preprecovery", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - assertEquals("someNodeName", v1Params.get("nodeName")); - assertEquals("someCoreNodeName", v1Params.get(CORE_NODE_NAME)); - assertEquals("someState", v1Params.get(ZkStateReader.STATE_PROP)); - assertEquals(true, v1Params.getPrimitiveBool("checkLive")); - assertEquals(true, v1Params.getPrimitiveBool("onlyIfLeader")); - assertEquals(true, v1Params.getPrimitiveBool("onlyIfLeaderActive")); - } - - @Test - public void testApplyCoreUpdatesAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{\"request-apply-updates\": {}}"); - - assertEquals("requestapplyupdates", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(NAME)); - } - - @Test - public void testSyncShardAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{\"request-sync-shard\": {}}"); - - assertEquals("requestsyncshard", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(CORE)); - } - - @Test - public void testRequestBufferUpdatesAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName", "POST", - "{\"request-buffer-updates\": {}}"); - - assertEquals("requestbufferupdates", v1Params.get(ACTION)); - assertEquals("coreName", v1Params.get(NAME)); - } - - // Strictly speaking, this API isn't at the /cores/coreName path, but as the only API at its path - // (/cores/coreName/command-status/requestId) it doesn't merit its own test class. - @Test - public void testRequestCommandStatusAllParams() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/cores/coreName/command-status/someId", "GET", - null); - - assertEquals("requeststatus", v1Params.get(ACTION)); - assertEquals("someId", v1Params.get(REQUESTID)); - } - - private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) throws Exception { - final HashMap parts = new HashMap<>(); - final Api api = apiBag.lookup(path, method, parts); - final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, Maps.newHashMap()) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } + private ApiBag apiBag; + + private ArgumentCaptor queryRequestCaptor; + + private CoreAdminHandler mockCoreHandler; + + @BeforeClass + public static void ensureWorkingMockito() { + assumeWorkingMockito(); + } + + @Before + public void setupApiBag() throws Exception { + mockCoreHandler = mock(CoreAdminHandler.class); + queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); + + apiBag = new ApiBag(false); + apiBag.registerObject(new ReloadCoreAPI(mockCoreHandler)); + apiBag.registerObject(new SwapCoresAPI(mockCoreHandler)); + apiBag.registerObject(new RenameCoreAPI(mockCoreHandler)); + apiBag.registerObject(new UnloadCoreAPI(mockCoreHandler)); + apiBag.registerObject(new MergeIndexesAPI(mockCoreHandler)); + apiBag.registerObject(new SplitCoreAPI(mockCoreHandler)); + apiBag.registerObject(new RequestCoreRecoveryAPI(mockCoreHandler)); + apiBag.registerObject(new PrepareCoreRecoveryAPI(mockCoreHandler)); + apiBag.registerObject(new RequestApplyCoreUpdatesAPI(mockCoreHandler)); + apiBag.registerObject(new RequestSyncShardAPI(mockCoreHandler)); + apiBag.registerObject(new RequestBufferUpdatesAPI(mockCoreHandler)); + apiBag.registerObject(new RequestCoreCommandStatusAPI(mockCoreHandler)); + } + + @Test + public void testReloadCoreAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params("/cores/coreName", "POST", "{\"reload\": {}}"); + + assertEquals("reload", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + } + + @Test + public void testSwapCoresAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores/coreName", "POST", "{\"swap\": {\"with\": \"otherCore\"}}"); + + assertEquals("swap", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + assertEquals("otherCore", v1Params.get(OTHER)); + } + + @Test + public void testRenameCoreAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores/coreName", "POST", "{\"rename\": {\"to\": \"otherCore\"}}"); + + assertEquals("rename", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + assertEquals("otherCore", v1Params.get(OTHER)); + } + + @Test + public void testUnloadCoreAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores/coreName", + "POST", + "{" + + "\"unload\": {" + + "\"deleteIndex\": true, " + + "\"deleteDataDir\": true, " + + "\"deleteInstanceDir\": true, " + + "\"async\": \"someRequestId\"}}"); + + assertEquals("unload", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + assertEquals(true, v1Params.getBool(DELETE_INDEX)); + assertEquals(true, v1Params.getBool(DELETE_DATA_DIR)); + assertEquals(true, v1Params.getBool(DELETE_INSTANCE_DIR)); + assertEquals("someRequestId", v1Params.get(ASYNC)); + } + + @Test + public void testMergeIndexesAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores/coreName", + "POST", + "{" + + "\"merge-indexes\": {" + + "\"indexDir\": [\"dir1\", \"dir2\"], " + + "\"srcCore\": [\"core1\", \"core2\"], " + + "\"updateChain\": \"someUpdateChain\", " + + "\"async\": \"someRequestId\"}}"); + + assertEquals("mergeindexes", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + assertEquals("someUpdateChain", v1Params.get(UpdateParams.UPDATE_CHAIN)); + assertEquals("someRequestId", v1Params.get(ASYNC)); + final List indexDirs = Arrays.asList(v1Params.getParams("indexDir")); + assertEquals(2, indexDirs.size()); + assertTrue(indexDirs.containsAll(List.of("dir1", "dir2"))); + final List srcCores = Arrays.asList(v1Params.getParams("srcCore")); + assertEquals(2, srcCores.size()); + assertTrue(srcCores.containsAll(List.of("core1", "core2"))); + } + + @Test + public void testSplitCoreAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores/coreName", + "POST", + "{" + + "\"split\": {" + + "\"path\": [\"path1\", \"path2\"], " + + "\"targetCore\": [\"core1\", \"core2\"], " + + "\"splitKey\": \"someSplitKey\", " + + "\"getRanges\": true, " + + "\"ranges\": \"range1,range2\", " + + "\"async\": \"someRequestId\"}}"); + + assertEquals("split", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + assertEquals("someSplitKey", v1Params.get(SPLIT_KEY)); + assertEquals("range1,range2", v1Params.get(RANGES)); + assertEquals("someRequestId", v1Params.get(ASYNC)); + final List pathEntries = Arrays.asList(v1Params.getParams(PATH)); + assertEquals(2, pathEntries.size()); + assertTrue(pathEntries.containsAll(List.of("path1", "path2"))); + final List targetCoreEntries = Arrays.asList(v1Params.getParams(TARGET_CORE)); + assertEquals(2, targetCoreEntries.size()); + assertTrue(targetCoreEntries.containsAll(List.of("core1", "core2"))); + } + + @Test + public void testRequestCoreRecoveryAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params("/cores/coreName", "POST", "{\"request-recovery\": {}}"); + + assertEquals("requestrecovery", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + } + + @Test + public void testPrepareCoreRecoveryAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/cores/coreName", + "POST", + "{\"prep-recovery\": {" + + "\"nodeName\": \"someNodeName\", " + + "\"coreNodeName\": \"someCoreNodeName\", " + + "\"state\": \"someState\", " + + "\"checkLive\": true, " + + "\"onlyIfLeader\": true" + + "\"onlyIfLeaderActive\": true " + + "}}"); + + assertEquals("preprecovery", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + assertEquals("someNodeName", v1Params.get("nodeName")); + assertEquals("someCoreNodeName", v1Params.get(CORE_NODE_NAME)); + assertEquals("someState", v1Params.get(ZkStateReader.STATE_PROP)); + assertEquals(true, v1Params.getPrimitiveBool("checkLive")); + assertEquals(true, v1Params.getPrimitiveBool("onlyIfLeader")); + assertEquals(true, v1Params.getPrimitiveBool("onlyIfLeaderActive")); + } + + @Test + public void testApplyCoreUpdatesAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params("/cores/coreName", "POST", "{\"request-apply-updates\": {}}"); + + assertEquals("requestapplyupdates", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(NAME)); + } + + @Test + public void testSyncShardAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params("/cores/coreName", "POST", "{\"request-sync-shard\": {}}"); + + assertEquals("requestsyncshard", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(CORE)); + } + + @Test + public void testRequestBufferUpdatesAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params("/cores/coreName", "POST", "{\"request-buffer-updates\": {}}"); + + assertEquals("requestbufferupdates", v1Params.get(ACTION)); + assertEquals("coreName", v1Params.get(NAME)); + } + + // Strictly speaking, this API isn't at the /cores/coreName path, but as the only API at its path + // (/cores/coreName/command-status/requestId) it doesn't merit its own test class. + @Test + public void testRequestCommandStatusAllParams() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params("/cores/coreName/command-status/someId", "GET", null); + + assertEquals("requeststatus", v1Params.get(ACTION)); + assertEquals("someId", v1Params.get(REQUESTID)); + } + + private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) + throws Exception { + final HashMap parts = new HashMap<>(); + final Api api = apiBag.lookup(path, method, parts); + final SolrQueryResponse rsp = new SolrQueryResponse(); + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, Maps.newHashMap()) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } }; - - api.call(req, rsp); - verify(mockCoreHandler).handleRequestBody(queryRequestCaptor.capture(), any()); - return queryRequestCaptor.getValue().getParams(); - } + api.call(req, rsp); + verify(mockCoreHandler).handleRequestBody(queryRequestCaptor.capture(), any()); + return queryRequestCaptor.getValue().getParams(); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/V2NodeAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/api/V2NodeAPIMappingTest.java index 193a8157155..261ad17fbb3 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/api/V2NodeAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/api/V2NodeAPIMappingTest.java @@ -17,7 +17,19 @@ package org.apache.solr.handler.admin.api; +import static org.apache.solr.SolrTestCaseJ4.assumeWorkingMockito; +import static org.apache.solr.common.params.CommonParams.ACTION; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + import com.google.common.collect.Maps; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.solr.api.Api; import org.apache.solr.api.ApiBag; import org.apache.solr.common.params.ModifiableSolrParams; @@ -40,233 +52,245 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.solr.SolrTestCaseJ4.assumeWorkingMockito; -import static org.apache.solr.common.params.CommonParams.ACTION; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; - -/** - * Unit tests for the v2 to v1 mapping for Solr's /node/ APIs - */ +/** Unit tests for the v2 to v1 mapping for Solr's /node/ APIs */ public class V2NodeAPIMappingTest { - private ApiBag apiBag; - private ArgumentCaptor queryRequestCaptor; - private CoreAdminHandler mockCoresHandler; - private InfoHandler infoHandler; - private SystemInfoHandler mockSystemInfoHandler; - private LoggingHandler mockLoggingHandler; - private PropertiesRequestHandler mockPropertiesHandler; - private HealthCheckHandler mockHealthCheckHandler; - private ThreadDumpHandler mockThreadDumpHandler; - - @BeforeClass - public static void ensureWorkingMockito() { - assumeWorkingMockito(); - } - - @Before - public void setupApiBag() throws Exception { - mockCoresHandler = mock(CoreAdminHandler.class); - infoHandler = mock(InfoHandler.class); - mockSystemInfoHandler = mock(SystemInfoHandler.class); - mockLoggingHandler = mock(LoggingHandler.class); - mockPropertiesHandler = mock(PropertiesRequestHandler.class); - mockHealthCheckHandler = mock(HealthCheckHandler.class); - mockThreadDumpHandler = mock(ThreadDumpHandler.class); - queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); - - when(infoHandler.getSystemInfoHandler()).thenReturn(mockSystemInfoHandler); - when(infoHandler.getLoggingHandler()).thenReturn(mockLoggingHandler); - when(infoHandler.getPropertiesHandler()).thenReturn(mockPropertiesHandler); - when(infoHandler.getHealthCheckHandler()).thenReturn(mockHealthCheckHandler); - when(infoHandler.getThreadDumpHandler()).thenReturn(mockThreadDumpHandler); - - apiBag = new ApiBag(false); - registerAllNodeApis(apiBag, mockCoresHandler, infoHandler); - } - - @Test - public void testOverseerOpApiAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedCoreV1Params("/node", "POST", - "{" + - "\"overseer-op\": {" + - "\"op\": \"asdf\", " + - "\"electionNode\": \"someNodeName\"" + - "}}"); - - assertEquals("overseerop", v1Params.get(ACTION)); - assertEquals("asdf", v1Params.get("op")); - assertEquals("someNodeName", v1Params.get("electionNode")); - } - - @Test - public void testRejoinLeaderElectionApiAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedCoreV1Params("/node", "POST", - "{" + - "\"rejoin-leader-election\": {" + - "\"collection\": \"someCollection\", " + - "\"shard\": \"someShard\"," + - "\"coreNodeName\": \"someNodeName\"," + - "\"core\": \"someCore\"," + - "\"electionNode\": \"someElectionNode\"," + - "\"rejoinAtHead\": true" + - "}}"); - - assertEquals("rejoinleaderelection", v1Params.get(ACTION)); - assertEquals("someCollection", v1Params.get("collection")); - assertEquals("someShard", v1Params.get("shard")); - assertEquals("someNodeName", v1Params.get("core_node_name")); - assertEquals("someCore", v1Params.get("core")); - assertEquals("someElectionNode", v1Params.get("election_node")); - assertEquals("true", v1Params.get("rejoinAtHead")); - } - - @Test - public void testInvokeClassApiAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedCoreV1Params("/node", "POST", - "{" + - "\"invoke\": {" + - "\"classes\": [\"someClassName\", \"someOtherClassName\"]" + - "}}"); - - assertEquals("invoke", v1Params.get(ACTION)); - assertEquals(2, v1Params.getParams("class").length); - final List classes = Arrays.asList(v1Params.getParams("class")); - assertTrue(classes.contains("someClassName")); - assertTrue(classes.contains("someOtherClassName")); - - } - - @Test - public void testSystemPropsApiAllProperties() throws Exception { - final ModifiableSolrParams solrParams = new ModifiableSolrParams(); - solrParams.add("name", "specificPropertyName"); - final SolrParams v1Params = captureConvertedPropertiesV1Params("/node/properties", "GET", solrParams); - - assertEquals("specificPropertyName", v1Params.get("name")); - } - - @Test - public void testThreadDumpApiAllProperties() throws Exception { - final ModifiableSolrParams solrParams = new ModifiableSolrParams(); - solrParams.add("anyParamName", "anyParamValue"); - final SolrParams v1Params = captureConvertedThreadDumpV1Params("/node/threads", "GET", solrParams); - - // All parameters are passed through to v1 API as-is - assertEquals("anyParamValue", v1Params.get("anyParamName")); - } - - @Test - public void testLogLevelsApiAllProperties() throws Exception { - final ModifiableSolrParams solrParams = new ModifiableSolrParams(); - solrParams.add("since", "12345678"); - solrParams.add("threshold", "someThresholdValue"); - solrParams.add("test", "someTestValue"); - solrParams.add("set", "SomeClassName"); - final SolrParams v1Params = captureConvertedLoggingV1Params("/node/logging", "GET", solrParams); - - // All parameters are passed through to v1 API as-is. - assertEquals("12345678", v1Params.get("since")); - assertEquals("someThresholdValue", v1Params.get("threshold")); - assertEquals("someTestValue", v1Params.get("test")); - assertEquals("SomeClassName", v1Params.get("set")); - } - - @Test - public void testSystemInfoApiAllProperties() throws Exception { - final ModifiableSolrParams solrParams = new ModifiableSolrParams(); - solrParams.add("anyParamName", "anyParamValue"); - final SolrParams v1Params = captureConvertedSystemV1Params("/node/system", "GET", solrParams); - - // All parameters are passed through to v1 API as-is. - assertEquals("anyParamValue", v1Params.get("anyParamName")); - } - - @Test - public void testHealthCheckApiAllProperties() throws Exception { - final ModifiableSolrParams solrParams = new ModifiableSolrParams(); - solrParams.add("requireHealthyCores", "true"); - solrParams.add("maxGenerationLag", "123"); - final SolrParams v1Params = captureConvertedHealthCheckV1Params("/node/health", "GET", solrParams); - - // All parameters are passed through to v1 API as-is. - assertEquals(true, v1Params.getBool("requireHealthyCores")); - assertEquals(123, v1Params.getPrimitiveInt("maxGenerationLag")); - } - - private SolrParams captureConvertedCoreV1Params(String path, String method, String v2RequestBody) throws Exception { - return doCaptureParams(path, method, new ModifiableSolrParams(), v2RequestBody, mockCoresHandler); - } - - private SolrParams captureConvertedSystemV1Params(String path, String method, SolrParams inputParams) throws Exception { - return doCaptureParams(path, method, inputParams, null, mockSystemInfoHandler); - } - - private SolrParams captureConvertedLoggingV1Params(String path, String method, SolrParams inputParams) throws Exception { - return doCaptureParams(path, method, inputParams, null, mockLoggingHandler); - } - - private SolrParams captureConvertedPropertiesV1Params(String path, String method, SolrParams inputParams) throws Exception { - return doCaptureParams(path, method, inputParams, null, mockPropertiesHandler); - } - - private SolrParams captureConvertedHealthCheckV1Params(String path, String method, SolrParams inputParams) throws Exception { - return doCaptureParams(path, method, inputParams, null, mockHealthCheckHandler); - } - - private SolrParams captureConvertedThreadDumpV1Params(String path, String method, SolrParams inputParams) throws Exception { - return doCaptureParams(path, method, inputParams, null, mockThreadDumpHandler); - } - - private SolrParams doCaptureParams(String path, String method, SolrParams inputParams, String v2RequestBody, RequestHandlerBase mockHandler) throws Exception { - final HashMap parts = new HashMap<>(); - final Map inputParamsMap = Maps.newHashMap(); - inputParams.stream().forEach(e -> { - inputParamsMap.put(e.getKey(), e.getValue()); - }); - final Api api = apiBag.lookup(path, method, parts); - final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, inputParamsMap) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } - - @Override - public Map getPathTemplateValues() { - return parts; - } - - @Override - public String getHttpMethod() { - return method; - } + private ApiBag apiBag; + private ArgumentCaptor queryRequestCaptor; + private CoreAdminHandler mockCoresHandler; + private InfoHandler infoHandler; + private SystemInfoHandler mockSystemInfoHandler; + private LoggingHandler mockLoggingHandler; + private PropertiesRequestHandler mockPropertiesHandler; + private HealthCheckHandler mockHealthCheckHandler; + private ThreadDumpHandler mockThreadDumpHandler; + + @BeforeClass + public static void ensureWorkingMockito() { + assumeWorkingMockito(); + } + + @Before + public void setupApiBag() throws Exception { + mockCoresHandler = mock(CoreAdminHandler.class); + infoHandler = mock(InfoHandler.class); + mockSystemInfoHandler = mock(SystemInfoHandler.class); + mockLoggingHandler = mock(LoggingHandler.class); + mockPropertiesHandler = mock(PropertiesRequestHandler.class); + mockHealthCheckHandler = mock(HealthCheckHandler.class); + mockThreadDumpHandler = mock(ThreadDumpHandler.class); + queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); + + when(infoHandler.getSystemInfoHandler()).thenReturn(mockSystemInfoHandler); + when(infoHandler.getLoggingHandler()).thenReturn(mockLoggingHandler); + when(infoHandler.getPropertiesHandler()).thenReturn(mockPropertiesHandler); + when(infoHandler.getHealthCheckHandler()).thenReturn(mockHealthCheckHandler); + when(infoHandler.getThreadDumpHandler()).thenReturn(mockThreadDumpHandler); + + apiBag = new ApiBag(false); + registerAllNodeApis(apiBag, mockCoresHandler, infoHandler); + } + + @Test + public void testOverseerOpApiAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedCoreV1Params( + "/node", + "POST", + "{" + + "\"overseer-op\": {" + + "\"op\": \"asdf\", " + + "\"electionNode\": \"someNodeName\"" + + "}}"); + + assertEquals("overseerop", v1Params.get(ACTION)); + assertEquals("asdf", v1Params.get("op")); + assertEquals("someNodeName", v1Params.get("electionNode")); + } + + @Test + public void testRejoinLeaderElectionApiAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedCoreV1Params( + "/node", + "POST", + "{" + + "\"rejoin-leader-election\": {" + + "\"collection\": \"someCollection\", " + + "\"shard\": \"someShard\"," + + "\"coreNodeName\": \"someNodeName\"," + + "\"core\": \"someCore\"," + + "\"electionNode\": \"someElectionNode\"," + + "\"rejoinAtHead\": true" + + "}}"); + + assertEquals("rejoinleaderelection", v1Params.get(ACTION)); + assertEquals("someCollection", v1Params.get("collection")); + assertEquals("someShard", v1Params.get("shard")); + assertEquals("someNodeName", v1Params.get("core_node_name")); + assertEquals("someCore", v1Params.get("core")); + assertEquals("someElectionNode", v1Params.get("election_node")); + assertEquals("true", v1Params.get("rejoinAtHead")); + } + + @Test + public void testInvokeClassApiAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedCoreV1Params( + "/node", + "POST", + "{" + + "\"invoke\": {" + + "\"classes\": [\"someClassName\", \"someOtherClassName\"]" + + "}}"); + + assertEquals("invoke", v1Params.get(ACTION)); + assertEquals(2, v1Params.getParams("class").length); + final List classes = Arrays.asList(v1Params.getParams("class")); + assertTrue(classes.contains("someClassName")); + assertTrue(classes.contains("someOtherClassName")); + } + + @Test + public void testSystemPropsApiAllProperties() throws Exception { + final ModifiableSolrParams solrParams = new ModifiableSolrParams(); + solrParams.add("name", "specificPropertyName"); + final SolrParams v1Params = + captureConvertedPropertiesV1Params("/node/properties", "GET", solrParams); + + assertEquals("specificPropertyName", v1Params.get("name")); + } + + @Test + public void testThreadDumpApiAllProperties() throws Exception { + final ModifiableSolrParams solrParams = new ModifiableSolrParams(); + solrParams.add("anyParamName", "anyParamValue"); + final SolrParams v1Params = + captureConvertedThreadDumpV1Params("/node/threads", "GET", solrParams); + + // All parameters are passed through to v1 API as-is + assertEquals("anyParamValue", v1Params.get("anyParamName")); + } + + @Test + public void testLogLevelsApiAllProperties() throws Exception { + final ModifiableSolrParams solrParams = new ModifiableSolrParams(); + solrParams.add("since", "12345678"); + solrParams.add("threshold", "someThresholdValue"); + solrParams.add("test", "someTestValue"); + solrParams.add("set", "SomeClassName"); + final SolrParams v1Params = captureConvertedLoggingV1Params("/node/logging", "GET", solrParams); + + // All parameters are passed through to v1 API as-is. + assertEquals("12345678", v1Params.get("since")); + assertEquals("someThresholdValue", v1Params.get("threshold")); + assertEquals("someTestValue", v1Params.get("test")); + assertEquals("SomeClassName", v1Params.get("set")); + } + + @Test + public void testSystemInfoApiAllProperties() throws Exception { + final ModifiableSolrParams solrParams = new ModifiableSolrParams(); + solrParams.add("anyParamName", "anyParamValue"); + final SolrParams v1Params = captureConvertedSystemV1Params("/node/system", "GET", solrParams); + + // All parameters are passed through to v1 API as-is. + assertEquals("anyParamValue", v1Params.get("anyParamName")); + } + + @Test + public void testHealthCheckApiAllProperties() throws Exception { + final ModifiableSolrParams solrParams = new ModifiableSolrParams(); + solrParams.add("requireHealthyCores", "true"); + solrParams.add("maxGenerationLag", "123"); + final SolrParams v1Params = + captureConvertedHealthCheckV1Params("/node/health", "GET", solrParams); + + // All parameters are passed through to v1 API as-is. + assertEquals(true, v1Params.getBool("requireHealthyCores")); + assertEquals(123, v1Params.getPrimitiveInt("maxGenerationLag")); + } + + private SolrParams captureConvertedCoreV1Params(String path, String method, String v2RequestBody) + throws Exception { + return doCaptureParams( + path, method, new ModifiableSolrParams(), v2RequestBody, mockCoresHandler); + } + + private SolrParams captureConvertedSystemV1Params( + String path, String method, SolrParams inputParams) throws Exception { + return doCaptureParams(path, method, inputParams, null, mockSystemInfoHandler); + } + + private SolrParams captureConvertedLoggingV1Params( + String path, String method, SolrParams inputParams) throws Exception { + return doCaptureParams(path, method, inputParams, null, mockLoggingHandler); + } + + private SolrParams captureConvertedPropertiesV1Params( + String path, String method, SolrParams inputParams) throws Exception { + return doCaptureParams(path, method, inputParams, null, mockPropertiesHandler); + } + + private SolrParams captureConvertedHealthCheckV1Params( + String path, String method, SolrParams inputParams) throws Exception { + return doCaptureParams(path, method, inputParams, null, mockHealthCheckHandler); + } + + private SolrParams captureConvertedThreadDumpV1Params( + String path, String method, SolrParams inputParams) throws Exception { + return doCaptureParams(path, method, inputParams, null, mockThreadDumpHandler); + } + + private SolrParams doCaptureParams( + String path, + String method, + SolrParams inputParams, + String v2RequestBody, + RequestHandlerBase mockHandler) + throws Exception { + final HashMap parts = new HashMap<>(); + final Map inputParamsMap = Maps.newHashMap(); + inputParams.stream() + .forEach( + e -> { + inputParamsMap.put(e.getKey(), e.getValue()); + }); + final Api api = apiBag.lookup(path, method, parts); + final SolrQueryResponse rsp = new SolrQueryResponse(); + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, inputParamsMap) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } + + @Override + public Map getPathTemplateValues() { + return parts; + } + + @Override + public String getHttpMethod() { + return method; + } }; - - api.call(req, rsp); - verify(mockHandler).handleRequestBody(queryRequestCaptor.capture(), any()); - return queryRequestCaptor.getValue().getParams(); - } - - private static void registerAllNodeApis(ApiBag apiBag, CoreAdminHandler coreHandler, - InfoHandler infoHandler) { - apiBag.registerObject(new OverseerOperationAPI(coreHandler)); - apiBag.registerObject(new RejoinLeaderElectionAPI(coreHandler)); - apiBag.registerObject(new InvokeClassAPI(coreHandler)); - apiBag.registerObject(new NodePropertiesAPI(infoHandler.getPropertiesHandler())); - apiBag.registerObject(new NodeThreadsAPI(infoHandler.getThreadDumpHandler())); - apiBag.registerObject(new NodeLoggingAPI(infoHandler.getLoggingHandler())); - apiBag.registerObject(new NodeSystemInfoAPI(infoHandler.getSystemInfoHandler())); - apiBag.registerObject(new NodeHealthAPI(infoHandler.getHealthCheckHandler())); - } + api.call(req, rsp); + verify(mockHandler).handleRequestBody(queryRequestCaptor.capture(), any()); + return queryRequestCaptor.getValue().getParams(); + } + + private static void registerAllNodeApis( + ApiBag apiBag, CoreAdminHandler coreHandler, InfoHandler infoHandler) { + apiBag.registerObject(new OverseerOperationAPI(coreHandler)); + apiBag.registerObject(new RejoinLeaderElectionAPI(coreHandler)); + apiBag.registerObject(new InvokeClassAPI(coreHandler)); + apiBag.registerObject(new NodePropertiesAPI(infoHandler.getPropertiesHandler())); + apiBag.registerObject(new NodeThreadsAPI(infoHandler.getThreadDumpHandler())); + apiBag.registerObject(new NodeLoggingAPI(infoHandler.getLoggingHandler())); + apiBag.registerObject(new NodeSystemInfoAPI(infoHandler.getSystemInfoHandler())); + apiBag.registerObject(new NodeHealthAPI(infoHandler.getHealthCheckHandler())); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/V2ShardsAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/api/V2ShardsAPIMappingTest.java index 07b7e81849a..d59d50cf713 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/api/V2ShardsAPIMappingTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/api/V2ShardsAPIMappingTest.java @@ -21,23 +21,6 @@ package org.apache.solr.handler.admin.api; -import org.apache.solr.api.Api; -import org.apache.solr.api.ApiBag; -import org.apache.solr.common.params.*; -import org.apache.solr.common.util.CommandOperation; -import org.apache.solr.common.util.ContentStreamBase; -import org.apache.solr.handler.admin.CollectionsHandler; -import org.apache.solr.handler.api.ApiRegistrar; -import org.apache.solr.request.LocalSolrQueryRequest; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.response.SolrQueryResponse; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.mockito.ArgumentCaptor; - -import java.util.*; - import static org.apache.solr.SolrTestCaseJ4.assumeWorkingMockito; import static org.apache.solr.cloud.api.collections.CollectionHandlingUtils.ONLY_IF_DOWN; import static org.apache.solr.common.cloud.ZkStateReader.*; @@ -53,239 +36,278 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import java.util.*; +import org.apache.solr.api.Api; +import org.apache.solr.api.ApiBag; +import org.apache.solr.common.params.*; +import org.apache.solr.common.util.CommandOperation; +import org.apache.solr.common.util.ContentStreamBase; +import org.apache.solr.handler.admin.CollectionsHandler; +import org.apache.solr.handler.api.ApiRegistrar; +import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.ArgumentCaptor; + /** - * Unit tests for the V2 APIs that use the /c/{collection}/shards or /c/{collection}/shards/{shard} paths. + * Unit tests for the V2 APIs that use the /c/{collection}/shards or /c/{collection}/shards/{shard} + * paths. * - * Note that the V2 requests made by these tests are not necessarily semantically valid. They shouldn't be taken as - * examples. In several instances, mutually exclusive JSON parameters are provided. This is done to exercise conversion - * of all parameters, even if particular combinations are never expected in the same request. + *

Note that the V2 requests made by these tests are not necessarily semantically valid. They + * shouldn't be taken as examples. In several instances, mutually exclusive JSON parameters are + * provided. This is done to exercise conversion of all parameters, even if particular combinations + * are never expected in the same request. */ public class V2ShardsAPIMappingTest { - private ApiBag apiBag; + private ApiBag apiBag; - private ArgumentCaptor queryRequestCaptor; - private CollectionsHandler mockCollectionsHandler; + private ArgumentCaptor queryRequestCaptor; + private CollectionsHandler mockCollectionsHandler; - @BeforeClass - public static void ensureWorkingMockito() { - assumeWorkingMockito(); - } + @BeforeClass + public static void ensureWorkingMockito() { + assumeWorkingMockito(); + } - @Before - public void setupApiBag() throws Exception { - mockCollectionsHandler = mock(CollectionsHandler.class); - queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); + @Before + public void setupApiBag() throws Exception { + mockCollectionsHandler = mock(CollectionsHandler.class); + queryRequestCaptor = ArgumentCaptor.forClass(SolrQueryRequest.class); - apiBag = new ApiBag(false); - ApiRegistrar.registerShardApis(apiBag, mockCollectionsHandler); - } + apiBag = new ApiBag(false); + ApiRegistrar.registerShardApis(apiBag, mockCollectionsHandler); + } - @Test - public void testForceLeaderAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName/shards/shardName", "POST", - "{ 'force-leader': {}}"); + @Test + public void testForceLeaderAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName/shards/shardName", "POST", "{ 'force-leader': {}}"); - assertEquals(CollectionParams.CollectionAction.FORCELEADER.lowerName, v1Params.get(ACTION)); - assertEquals("collName", v1Params.get(COLLECTION)); - assertEquals("shardName", v1Params.get(SHARD)); - } + assertEquals(CollectionParams.CollectionAction.FORCELEADER.lowerName, v1Params.get(ACTION)); + assertEquals("collName", v1Params.get(COLLECTION)); + assertEquals("shardName", v1Params.get(SHARD)); + } - @Test - public void testSyncShardAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName/shards/shardName", "POST", - "{ 'sync-shard': {}}"); + @Test + public void testSyncShardAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName/shards/shardName", "POST", "{ 'sync-shard': {}}"); - assertEquals(CollectionParams.CollectionAction.SYNCSHARD.lowerName, v1Params.get(ACTION)); - assertEquals("collName", v1Params.get(COLLECTION)); - assertEquals("shardName", v1Params.get(SHARD)); - } + assertEquals(CollectionParams.CollectionAction.SYNCSHARD.lowerName, v1Params.get(ACTION)); + assertEquals("collName", v1Params.get(COLLECTION)); + assertEquals("shardName", v1Params.get(SHARD)); + } - @Test - public void testDeleteShardAllProperties() throws Exception { - final ModifiableSolrParams v2QueryParams = new ModifiableSolrParams(); - v2QueryParams.add("deleteIndex", "true"); - v2QueryParams.add("deleteDataDir", "true"); - v2QueryParams.add("deleteInstanceDir", "true"); - v2QueryParams.add("followAliases", "true"); - final SolrParams v1Params = captureConvertedV1Params("/collections/collName/shards/shardName", "DELETE", v2QueryParams); + @Test + public void testDeleteShardAllProperties() throws Exception { + final ModifiableSolrParams v2QueryParams = new ModifiableSolrParams(); + v2QueryParams.add("deleteIndex", "true"); + v2QueryParams.add("deleteDataDir", "true"); + v2QueryParams.add("deleteInstanceDir", "true"); + v2QueryParams.add("followAliases", "true"); + final SolrParams v1Params = + captureConvertedV1Params("/collections/collName/shards/shardName", "DELETE", v2QueryParams); - assertEquals(CollectionParams.CollectionAction.DELETESHARD.lowerName, v1Params.get(ACTION)); - assertEquals("collName", v1Params.get(COLLECTION)); - assertEquals("shardName", v1Params.get(SHARD)); - assertEquals("true", v1Params.get("deleteIndex")); - assertEquals("true", v1Params.get("deleteDataDir")); - assertEquals("true", v1Params.get("deleteInstanceDir")); - assertEquals("true", v1Params.get("followAliases")); - } + assertEquals(CollectionParams.CollectionAction.DELETESHARD.lowerName, v1Params.get(ACTION)); + assertEquals("collName", v1Params.get(COLLECTION)); + assertEquals("shardName", v1Params.get(SHARD)); + assertEquals("true", v1Params.get("deleteIndex")); + assertEquals("true", v1Params.get("deleteDataDir")); + assertEquals("true", v1Params.get("deleteInstanceDir")); + assertEquals("true", v1Params.get("followAliases")); + } - @Test - public void testSplitShardAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName/shards", "POST", - "{ 'split': {" + - "'shard': 'shard1', " + - "'ranges': 'someRangeValues', " + - "'splitKey': 'someSplitKey', " + - "'numSubShards': 123, " + - "'splitFuzz': 'some_fuzz_value', " + - "'timing': true, " + - "'splitByPrefix': true, " + - "'followAliases': true, " + - "'splitMethod': 'rewrite', " + - "'async': 'some_async_id', " + - "'waitForFinalState': true, " + - "'coreProperties': {" + - " 'foo': 'foo1', " + - " 'bar': 'bar1', " + - "}}}"); + @Test + public void testSplitShardAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName/shards", + "POST", + "{ 'split': {" + + "'shard': 'shard1', " + + "'ranges': 'someRangeValues', " + + "'splitKey': 'someSplitKey', " + + "'numSubShards': 123, " + + "'splitFuzz': 'some_fuzz_value', " + + "'timing': true, " + + "'splitByPrefix': true, " + + "'followAliases': true, " + + "'splitMethod': 'rewrite', " + + "'async': 'some_async_id', " + + "'waitForFinalState': true, " + + "'coreProperties': {" + + " 'foo': 'foo1', " + + " 'bar': 'bar1', " + + "}}}"); - assertEquals(CollectionParams.CollectionAction.SPLITSHARD.lowerName, v1Params.get(ACTION)); - assertEquals("collName", v1Params.get(COLLECTION)); - assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); - assertEquals("someRangeValues", v1Params.get(CoreAdminParams.RANGES)); - assertEquals("someSplitKey", v1Params.get(SPLIT_KEY)); - assertEquals(123, v1Params.getPrimitiveInt(NUM_SUB_SHARDS)); - assertEquals("some_fuzz_value", v1Params.get(SPLIT_FUZZ)); - assertEquals(true, v1Params.getPrimitiveBool(TIMING)); - assertEquals(true, v1Params.getPrimitiveBool(SPLIT_BY_PREFIX)); - assertEquals(true, v1Params.getPrimitiveBool(FOLLOW_ALIASES)); - assertEquals("rewrite", v1Params.get(SPLIT_METHOD)); - assertEquals("some_async_id", v1Params.get(ASYNC)); - assertEquals(true, v1Params.getPrimitiveBool(WAIT_FOR_FINAL_STATE)); - assertEquals("foo1", v1Params.get("property.foo")); - assertEquals("bar1", v1Params.get("property.bar")); - } + assertEquals(CollectionParams.CollectionAction.SPLITSHARD.lowerName, v1Params.get(ACTION)); + assertEquals("collName", v1Params.get(COLLECTION)); + assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); + assertEquals("someRangeValues", v1Params.get(CoreAdminParams.RANGES)); + assertEquals("someSplitKey", v1Params.get(SPLIT_KEY)); + assertEquals(123, v1Params.getPrimitiveInt(NUM_SUB_SHARDS)); + assertEquals("some_fuzz_value", v1Params.get(SPLIT_FUZZ)); + assertEquals(true, v1Params.getPrimitiveBool(TIMING)); + assertEquals(true, v1Params.getPrimitiveBool(SPLIT_BY_PREFIX)); + assertEquals(true, v1Params.getPrimitiveBool(FOLLOW_ALIASES)); + assertEquals("rewrite", v1Params.get(SPLIT_METHOD)); + assertEquals("some_async_id", v1Params.get(ASYNC)); + assertEquals(true, v1Params.getPrimitiveBool(WAIT_FOR_FINAL_STATE)); + assertEquals("foo1", v1Params.get("property.foo")); + assertEquals("bar1", v1Params.get("property.bar")); + } - @Test - public void testCreateShardAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName/shards", "POST", - "{ 'create': {" + - "'shard': 'shard1', " + - "'nodeSet': ['foo', 'bar', 'baz'], " + - "'followAliases': true, " + - "'async': 'some_async_id', " + - "'waitForFinalState': true, " + - "'replicationFactor': 123, " + - "'nrtReplicas': 456, " + - "'tlogReplicas': 789, " + - "'pullReplicas': 101, " + - "'coreProperties': {" + - " 'foo': 'foo1', " + - " 'bar': 'bar1', " + - "}}}"); + @Test + public void testCreateShardAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName/shards", + "POST", + "{ 'create': {" + + "'shard': 'shard1', " + + "'nodeSet': ['foo', 'bar', 'baz'], " + + "'followAliases': true, " + + "'async': 'some_async_id', " + + "'waitForFinalState': true, " + + "'replicationFactor': 123, " + + "'nrtReplicas': 456, " + + "'tlogReplicas': 789, " + + "'pullReplicas': 101, " + + "'coreProperties': {" + + " 'foo': 'foo1', " + + " 'bar': 'bar1', " + + "}}}"); - assertEquals(CollectionParams.CollectionAction.CREATESHARD.lowerName, v1Params.get(ACTION)); - assertEquals("collName", v1Params.get(COLLECTION)); - assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); - assertEquals("foo,bar,baz", v1Params.get(CREATE_NODE_SET_PARAM)); - assertEquals(true, v1Params.getPrimitiveBool(FOLLOW_ALIASES)); - assertEquals("some_async_id", v1Params.get(ASYNC)); - assertEquals(true, v1Params.getPrimitiveBool(WAIT_FOR_FINAL_STATE)); - assertEquals(123, v1Params.getPrimitiveInt(REPLICATION_FACTOR)); - assertEquals(456, v1Params.getPrimitiveInt(NRT_REPLICAS)); - assertEquals(789, v1Params.getPrimitiveInt(TLOG_REPLICAS)); - assertEquals(101, v1Params.getPrimitiveInt(PULL_REPLICAS)); - assertEquals("foo1", v1Params.get("property.foo")); - assertEquals("bar1", v1Params.get("property.bar")); - } + assertEquals(CollectionParams.CollectionAction.CREATESHARD.lowerName, v1Params.get(ACTION)); + assertEquals("collName", v1Params.get(COLLECTION)); + assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); + assertEquals("foo,bar,baz", v1Params.get(CREATE_NODE_SET_PARAM)); + assertEquals(true, v1Params.getPrimitiveBool(FOLLOW_ALIASES)); + assertEquals("some_async_id", v1Params.get(ASYNC)); + assertEquals(true, v1Params.getPrimitiveBool(WAIT_FOR_FINAL_STATE)); + assertEquals(123, v1Params.getPrimitiveInt(REPLICATION_FACTOR)); + assertEquals(456, v1Params.getPrimitiveInt(NRT_REPLICAS)); + assertEquals(789, v1Params.getPrimitiveInt(TLOG_REPLICAS)); + assertEquals(101, v1Params.getPrimitiveInt(PULL_REPLICAS)); + assertEquals("foo1", v1Params.get("property.foo")); + assertEquals("bar1", v1Params.get("property.bar")); + } - @Test - public void testAddReplicaAllProperties() throws Exception { - final SolrParams v1Params = captureConvertedV1Params("/collections/collName/shards", "POST", - "{ 'add-replica': {" + - "'shard': 'shard1', " + - "'_route_': 'someRouteValue', " + - "'node': 'someNodeValue', " + - "'name': 'someName', " + - "'instanceDir': 'dir1', " + - "'dataDir': 'dir2', " + - "'ulogDir': 'dir3', " + - "'createNodeSet': ['foo', 'bar', 'baz'], " + - "'followAliases': true, " + - "'async': 'some_async_id', " + - "'waitForFinalState': true, " + - "'skipNodeAssignment': true, " + - "'type': 'tlog', " + - "'coreProperties': {" + - " 'foo': 'foo1', " + - " 'bar': 'bar1', " + - "}}}"); + @Test + public void testAddReplicaAllProperties() throws Exception { + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName/shards", + "POST", + "{ 'add-replica': {" + + "'shard': 'shard1', " + + "'_route_': 'someRouteValue', " + + "'node': 'someNodeValue', " + + "'name': 'someName', " + + "'instanceDir': 'dir1', " + + "'dataDir': 'dir2', " + + "'ulogDir': 'dir3', " + + "'createNodeSet': ['foo', 'bar', 'baz'], " + + "'followAliases': true, " + + "'async': 'some_async_id', " + + "'waitForFinalState': true, " + + "'skipNodeAssignment': true, " + + "'type': 'tlog', " + + "'coreProperties': {" + + " 'foo': 'foo1', " + + " 'bar': 'bar1', " + + "}}}"); - assertEquals(CollectionParams.CollectionAction.ADDREPLICA.lowerName, v1Params.get(ACTION)); - assertEquals("collName", v1Params.get(COLLECTION)); - assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); - assertEquals("someRouteValue", v1Params.get("_route_")); - assertEquals("someNodeValue", v1Params.get("node")); - assertEquals("foo,bar,baz", v1Params.get(CREATE_NODE_SET_PARAM)); - assertEquals("someName", v1Params.get(NAME)); - assertEquals("dir1", v1Params.get("instanceDir")); - assertEquals("dir2", v1Params.get("dataDir")); - assertEquals("dir3", v1Params.get("ulogDir")); - assertEquals(true, v1Params.getPrimitiveBool(FOLLOW_ALIASES)); - assertEquals("some_async_id", v1Params.get(ASYNC)); - assertEquals(true, v1Params.getPrimitiveBool(WAIT_FOR_FINAL_STATE)); - assertEquals(true, v1Params.getPrimitiveBool("skipNodeAssignment")); - assertEquals("tlog", v1Params.get("type")); - assertEquals("foo1", v1Params.get("property.foo")); - assertEquals("bar1", v1Params.get("property.bar")); - } + assertEquals(CollectionParams.CollectionAction.ADDREPLICA.lowerName, v1Params.get(ACTION)); + assertEquals("collName", v1Params.get(COLLECTION)); + assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); + assertEquals("someRouteValue", v1Params.get("_route_")); + assertEquals("someNodeValue", v1Params.get("node")); + assertEquals("foo,bar,baz", v1Params.get(CREATE_NODE_SET_PARAM)); + assertEquals("someName", v1Params.get(NAME)); + assertEquals("dir1", v1Params.get("instanceDir")); + assertEquals("dir2", v1Params.get("dataDir")); + assertEquals("dir3", v1Params.get("ulogDir")); + assertEquals(true, v1Params.getPrimitiveBool(FOLLOW_ALIASES)); + assertEquals("some_async_id", v1Params.get(ASYNC)); + assertEquals(true, v1Params.getPrimitiveBool(WAIT_FOR_FINAL_STATE)); + assertEquals(true, v1Params.getPrimitiveBool("skipNodeAssignment")); + assertEquals("tlog", v1Params.get("type")); + assertEquals("foo1", v1Params.get("property.foo")); + assertEquals("bar1", v1Params.get("property.bar")); + } - // really this is a replica API, but since there's only 1 API on the replica path, it's included here for simplicity. - @Test - public void testDeleteReplicaAllProperties() throws Exception { - final ModifiableSolrParams v2QueryParams = new ModifiableSolrParams(); - v2QueryParams.add("deleteIndex", "true"); - v2QueryParams.add("deleteDataDir", "true"); - v2QueryParams.add("deleteInstanceDir", "true"); - v2QueryParams.add("followAliases", "true"); - v2QueryParams.add("count", "4"); - v2QueryParams.add("onlyIfDown", "true"); - final SolrParams v1Params = captureConvertedV1Params("/collections/collName/shards/shard1/someReplica", - "DELETE", v2QueryParams); + // really this is a replica API, but since there's only 1 API on the replica path, it's included + // here for simplicity. + @Test + public void testDeleteReplicaAllProperties() throws Exception { + final ModifiableSolrParams v2QueryParams = new ModifiableSolrParams(); + v2QueryParams.add("deleteIndex", "true"); + v2QueryParams.add("deleteDataDir", "true"); + v2QueryParams.add("deleteInstanceDir", "true"); + v2QueryParams.add("followAliases", "true"); + v2QueryParams.add("count", "4"); + v2QueryParams.add("onlyIfDown", "true"); + final SolrParams v1Params = + captureConvertedV1Params( + "/collections/collName/shards/shard1/someReplica", "DELETE", v2QueryParams); - assertEquals(CollectionParams.CollectionAction.DELETEREPLICA.lowerName, v1Params.get(ACTION).toLowerCase(Locale.ROOT)); - assertEquals("collName", v1Params.get(COLLECTION)); - assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); - assertEquals("someReplica", v1Params.get(REPLICA)); - assertEquals("true", v1Params.get(DELETE_INDEX)); - assertEquals("true", v1Params.get(DELETE_DATA_DIR)); - assertEquals("true", v1Params.get(DELETE_INSTANCE_DIR)); - assertEquals("true", v1Params.get(FOLLOW_ALIASES)); - assertEquals("4", v1Params.get(COUNT_PROP)); - assertEquals("true", v1Params.get(ONLY_IF_DOWN)); - } + assertEquals( + CollectionParams.CollectionAction.DELETEREPLICA.lowerName, + v1Params.get(ACTION).toLowerCase(Locale.ROOT)); + assertEquals("collName", v1Params.get(COLLECTION)); + assertEquals("shard1", v1Params.get(SHARD_ID_PROP)); + assertEquals("someReplica", v1Params.get(REPLICA)); + assertEquals("true", v1Params.get(DELETE_INDEX)); + assertEquals("true", v1Params.get(DELETE_DATA_DIR)); + assertEquals("true", v1Params.get(DELETE_INSTANCE_DIR)); + assertEquals("true", v1Params.get(FOLLOW_ALIASES)); + assertEquals("4", v1Params.get(COUNT_PROP)); + assertEquals("true", v1Params.get(ONLY_IF_DOWN)); + } - private SolrParams captureConvertedV1Params(String path, String method, SolrParams queryParams) throws Exception { - return captureConvertedV1Params(path, method, queryParams, null); - } + private SolrParams captureConvertedV1Params(String path, String method, SolrParams queryParams) + throws Exception { + return captureConvertedV1Params(path, method, queryParams, null); + } - private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) throws Exception { - return captureConvertedV1Params(path, method, new ModifiableSolrParams(), v2RequestBody); - } + private SolrParams captureConvertedV1Params(String path, String method, String v2RequestBody) + throws Exception { + return captureConvertedV1Params(path, method, new ModifiableSolrParams(), v2RequestBody); + } - private SolrParams captureConvertedV1Params(String path, String method, SolrParams queryParams, String v2RequestBody) throws Exception { - final HashMap parts = new HashMap<>(); - final Api api = apiBag.lookup(path, method, parts); - final SolrQueryResponse rsp = new SolrQueryResponse(); - final LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, queryParams) { - @Override - public List getCommands(boolean validateInput) { - if (v2RequestBody == null) return Collections.emptyList(); - return ApiBag.getCommandOperations(new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); - } + private SolrParams captureConvertedV1Params( + String path, String method, SolrParams queryParams, String v2RequestBody) throws Exception { + final HashMap parts = new HashMap<>(); + final Api api = apiBag.lookup(path, method, parts); + final SolrQueryResponse rsp = new SolrQueryResponse(); + final LocalSolrQueryRequest req = + new LocalSolrQueryRequest(null, queryParams) { + @Override + public List getCommands(boolean validateInput) { + if (v2RequestBody == null) return Collections.emptyList(); + return ApiBag.getCommandOperations( + new ContentStreamBase.StringStream(v2RequestBody), api.getCommandSchema(), true); + } - @Override - public Map getPathTemplateValues() { - return parts; - } + @Override + public Map getPathTemplateValues() { + return parts; + } - @Override - public String getHttpMethod() { - return method; - } + @Override + public String getHttpMethod() { + return method; + } }; - api.call(req, rsp); - verify(mockCollectionsHandler).handleRequestBody(queryRequestCaptor.capture(), any()); - return queryRequestCaptor.getValue().getParams(); - } + api.call(req, rsp); + verify(mockCollectionsHandler).handleRequestBody(queryRequestCaptor.capture(), any()); + return queryRequestCaptor.getValue().getParams(); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/package-info.java b/solr/core/src/test/org/apache/solr/handler/admin/api/package-info.java index 4282156900f..eb4e334bff9 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/api/package-info.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/api/package-info.java @@ -15,7 +15,5 @@ * limitations under the License. */ -/** - * Unit tests for v2 "admin" API implementations. - */ -package org.apache.solr.handler.admin.api; \ No newline at end of file +/** Unit tests for v2 "admin" API implementations. */ +package org.apache.solr.handler.admin.api; diff --git a/solr/core/src/test/org/apache/solr/handler/component/BadComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/BadComponentTest.java index c09b5bbb18e..e6fd6fb3747 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/BadComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/BadComponentTest.java @@ -16,16 +16,12 @@ */ package org.apache.solr.handler.component; - import org.apache.solr.SolrTestCaseJ4; import org.junit.After; import org.junit.Test; -/** - * SOLR-1730, tests what happens when a component fails to initialize properly - * - **/ -public class BadComponentTest extends SolrTestCaseJ4{ +/** SOLR-1730, tests what happens when a component fails to initialize properly */ +public class BadComponentTest extends SolrTestCaseJ4 { @Test public void testBadElevate() throws Exception { try { diff --git a/solr/core/src/test/org/apache/solr/handler/component/CloudReplicaSourceTest.java b/solr/core/src/test/org/apache/solr/handler/component/CloudReplicaSourceTest.java index 88c4ced7888..f52aefbb42c 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/CloudReplicaSourceTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/CloudReplicaSourceTest.java @@ -18,7 +18,6 @@ package org.apache.solr.handler.component; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.routing.ReplicaListTransformer; import org.apache.solr.cloud.ClusterStateMockUtil; @@ -29,9 +28,7 @@ import org.junit.Test; import org.mockito.Mockito; -/** - * Tests for {@link CloudReplicaSource} - */ +/** Tests for {@link CloudReplicaSource} */ @SolrTestCaseJ4.SuppressSSL // lots of assumptions about http: in this test public class CloudReplicaSourceTest extends SolrTestCaseJ4 { @@ -46,21 +43,25 @@ public void testSimple_ShardsParam() { AllowListUrlChecker checker = Mockito.mock(AllowListUrlChecker.class); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("shards", "slice1,slice2"); - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csr*sr2", "baseUrl1:8983_", "baseUrl2:8984_")) { - CloudReplicaSource cloudReplicaSource = new CloudReplicaSource.Builder() - .collection("collection1") - .onlyNrt(false) - .zkStateReader(zkStateReader) - .replicaListTransformer(replicaListTransformer) - .allowListUrlChecker(checker) - .params(params) - .build(); + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csr*sr2", "baseUrl1:8983_", "baseUrl2:8984_")) { + CloudReplicaSource cloudReplicaSource = + new CloudReplicaSource.Builder() + .collection("collection1") + .onlyNrt(false) + .zkStateReader(zkStateReader) + .replicaListTransformer(replicaListTransformer) + .allowListUrlChecker(checker) + .params(params) + .build(); assertEquals(2, cloudReplicaSource.getSliceCount()); assertEquals(2, cloudReplicaSource.getSliceNames().size()); assertEquals(1, cloudReplicaSource.getReplicasBySlice(0).size()); - assertEquals("http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(0).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(0).get(0)); assertEquals(1, cloudReplicaSource.getReplicasBySlice(1).size()); - assertEquals("http://baseUrl2:8984/slice2_replica2/", cloudReplicaSource.getReplicasBySlice(1).get(0)); + assertEquals( + "http://baseUrl2:8984/slice2_replica2/", cloudReplicaSource.getReplicasBySlice(1).get(0)); } } @@ -71,19 +72,22 @@ public void testShardsParam_DeadNode() { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("shards", "slice1,slice2"); // here node2 is not live so there should be no replicas found for slice2 - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csr*sr2", "baseUrl1:8983_")) { - CloudReplicaSource cloudReplicaSource = new CloudReplicaSource.Builder() - .collection("collection1") - .onlyNrt(false) - .zkStateReader(zkStateReader) - .replicaListTransformer(replicaListTransformer) - .allowListUrlChecker(checker) - .params(params) - .build(); + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csr*sr2", "baseUrl1:8983_")) { + CloudReplicaSource cloudReplicaSource = + new CloudReplicaSource.Builder() + .collection("collection1") + .onlyNrt(false) + .zkStateReader(zkStateReader) + .replicaListTransformer(replicaListTransformer) + .allowListUrlChecker(checker) + .params(params) + .build(); assertEquals(2, cloudReplicaSource.getSliceCount()); assertEquals(2, cloudReplicaSource.getSliceNames().size()); assertEquals(1, cloudReplicaSource.getReplicasBySlice(0).size()); - assertEquals("http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(0).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(0).get(0)); assertEquals(0, cloudReplicaSource.getReplicasBySlice(1).size()); } } @@ -95,22 +99,27 @@ public void testShardsParam_DownReplica() { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("shards", "slice1,slice2"); // here replica3 is in DOWN state so only 1 replica should be returned for slice2 - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csr*sr2r3D", "baseUrl1:8983_", "baseUrl2:8984_", "baseUrl3:8985_")) { - CloudReplicaSource cloudReplicaSource = new CloudReplicaSource.Builder() - .collection("collection1") - .onlyNrt(false) - .zkStateReader(zkStateReader) - .replicaListTransformer(replicaListTransformer) - .allowListUrlChecker(checker) - .params(params) - .build(); + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState( + "csr*sr2r3D", "baseUrl1:8983_", "baseUrl2:8984_", "baseUrl3:8985_")) { + CloudReplicaSource cloudReplicaSource = + new CloudReplicaSource.Builder() + .collection("collection1") + .onlyNrt(false) + .zkStateReader(zkStateReader) + .replicaListTransformer(replicaListTransformer) + .allowListUrlChecker(checker) + .params(params) + .build(); assertEquals(2, cloudReplicaSource.getSliceCount()); assertEquals(2, cloudReplicaSource.getSliceNames().size()); assertEquals(1, cloudReplicaSource.getReplicasBySlice(0).size()); - assertEquals("http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(0).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(0).get(0)); assertEquals(1, cloudReplicaSource.getReplicasBySlice(1).size()); assertEquals(1, cloudReplicaSource.getReplicasBySlice(1).size()); - assertEquals("http://baseUrl2:8984/slice2_replica2/", cloudReplicaSource.getReplicasBySlice(1).get(0)); + assertEquals( + "http://baseUrl2:8984/slice2_replica2/", cloudReplicaSource.getReplicasBySlice(1).get(0)); } } @@ -120,15 +129,17 @@ public void testMultipleCollections() { AllowListUrlChecker checker = Mockito.mock(AllowListUrlChecker.class); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("collection", "collection1,collection2"); - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csr*sr2csr*", "baseUrl1:8983_", "baseUrl2:8984_")) { - CloudReplicaSource cloudReplicaSource = new CloudReplicaSource.Builder() - .collection("collection1") - .onlyNrt(false) - .zkStateReader(zkStateReader) - .replicaListTransformer(replicaListTransformer) - .allowListUrlChecker(checker) - .params(params) - .build(); + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csr*sr2csr*", "baseUrl1:8983_", "baseUrl2:8984_")) { + CloudReplicaSource cloudReplicaSource = + new CloudReplicaSource.Builder() + .collection("collection1") + .onlyNrt(false) + .zkStateReader(zkStateReader) + .replicaListTransformer(replicaListTransformer) + .allowListUrlChecker(checker) + .params(params) + .build(); assertEquals(3, cloudReplicaSource.getSliceCount()); List sliceNames = cloudReplicaSource.getSliceNames(); assertEquals(3, sliceNames.size()); @@ -136,17 +147,24 @@ public void testMultipleCollections() { String sliceName = sliceNames.get(i); assertEquals(1, cloudReplicaSource.getReplicasBySlice(i).size()); - // need a switch here because unlike the testShards* tests which always returns slices in the order they were specified, - // using the collection param can return slice names in any order + // need a switch here because unlike the testShards* tests which always returns slices in + // the order they were specified, using the collection param can return slice names in any + // order switch (sliceName) { case "collection1_slice1": - assertEquals("http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica1/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; case "collection1_slice2": - assertEquals("http://baseUrl2:8984/slice2_replica2/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl2:8984/slice2_replica2/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; case "collection2_slice1": - assertEquals("http://baseUrl1:8983/slice1_replica3/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica3/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; } } @@ -158,15 +176,17 @@ public void testSimple_UsingClusterState() { ReplicaListTransformer replicaListTransformer = Mockito.mock(ReplicaListTransformer.class); AllowListUrlChecker checker = Mockito.mock(AllowListUrlChecker.class); ModifiableSolrParams params = new ModifiableSolrParams(); - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csr*sr2", "baseUrl1:8983_", "baseUrl2:8984_")) { - CloudReplicaSource cloudReplicaSource = new CloudReplicaSource.Builder() - .collection("collection1") - .onlyNrt(false) - .zkStateReader(zkStateReader) - .replicaListTransformer(replicaListTransformer) - .allowListUrlChecker(checker) - .params(params) - .build(); + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csr*sr2", "baseUrl1:8983_", "baseUrl2:8984_")) { + CloudReplicaSource cloudReplicaSource = + new CloudReplicaSource.Builder() + .collection("collection1") + .onlyNrt(false) + .zkStateReader(zkStateReader) + .replicaListTransformer(replicaListTransformer) + .allowListUrlChecker(checker) + .params(params) + .build(); assertEquals(2, cloudReplicaSource.getSliceCount()); List sliceNames = cloudReplicaSource.getSliceNames(); assertEquals(2, sliceNames.size()); @@ -177,10 +197,14 @@ public void testSimple_UsingClusterState() { // need to switch because without a shards param, the order of slices is not deterministic switch (sliceName) { case "slice1": - assertEquals("http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica1/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; case "slice2": - assertEquals("http://baseUrl2:8984/slice2_replica2/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl2:8984/slice2_replica2/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; } } @@ -192,16 +216,19 @@ public void testSimple_OnlyNrt() { ReplicaListTransformer replicaListTransformer = Mockito.mock(ReplicaListTransformer.class); AllowListUrlChecker checker = Mockito.mock(AllowListUrlChecker.class); ModifiableSolrParams params = new ModifiableSolrParams(); - // the cluster state will have slice2 with two tlog replicas out of which the first one will be the leader - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csrr*st2t2", "baseUrl1:8983_", "baseUrl2:8984_")) { - CloudReplicaSource cloudReplicaSource = new CloudReplicaSource.Builder() - .collection("collection1") - .onlyNrt(true) // enable only nrt mode - .zkStateReader(zkStateReader) - .replicaListTransformer(replicaListTransformer) - .allowListUrlChecker(checker) - .params(params) - .build(); + // the cluster state will have slice2 with two tlog replicas out of which the first one will be + // the leader + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState("csrr*st2t2", "baseUrl1:8983_", "baseUrl2:8984_")) { + CloudReplicaSource cloudReplicaSource = + new CloudReplicaSource.Builder() + .collection("collection1") + .onlyNrt(true) // enable only nrt mode + .zkStateReader(zkStateReader) + .replicaListTransformer(replicaListTransformer) + .allowListUrlChecker(checker) + .params(params) + .build(); assertEquals(2, cloudReplicaSource.getSliceCount()); List sliceNames = cloudReplicaSource.getSliceNames(); assertEquals(2, sliceNames.size()); @@ -211,11 +238,15 @@ public void testSimple_OnlyNrt() { switch (sliceName) { case "slice1": assertEquals(2, cloudReplicaSource.getReplicasBySlice(i).size()); - assertEquals("http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica1/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; case "slice2": assertEquals(1, cloudReplicaSource.getReplicasBySlice(i).size()); - assertEquals("http://baseUrl2:8984/slice2_replica3/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl2:8984/slice2_replica3/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; } } @@ -228,17 +259,21 @@ public void testMultipleCollections_OnlyNrt() { AllowListUrlChecker checker = Mockito.mock(AllowListUrlChecker.class); ModifiableSolrParams params = new ModifiableSolrParams(); params.set("collection", "collection1,collection2"); - // the cluster state will have collection1 with slice2 with two tlog replicas out of which the first one will be the leader - // and collection2 with just a single slice and a tlog replica that will be leader - try (ZkStateReader zkStateReader = ClusterStateMockUtil.buildClusterState("csrr*st2t2cst", "baseUrl1:8983_", "baseUrl2:8984_")) { - CloudReplicaSource cloudReplicaSource = new CloudReplicaSource.Builder() - .collection("collection1") - .onlyNrt(true) // enable only nrt mode - .zkStateReader(zkStateReader) - .replicaListTransformer(replicaListTransformer) - .allowListUrlChecker(checker) - .params(params) - .build(); + // the cluster state will have collection1 with slice2 with two tlog replicas out of which the + // first one will be the leader and collection2 with just a single slice and a tlog replica that + // will be leader + try (ZkStateReader zkStateReader = + ClusterStateMockUtil.buildClusterState( + "csrr*st2t2cst", "baseUrl1:8983_", "baseUrl2:8984_")) { + CloudReplicaSource cloudReplicaSource = + new CloudReplicaSource.Builder() + .collection("collection1") + .onlyNrt(true) // enable only nrt mode + .zkStateReader(zkStateReader) + .replicaListTransformer(replicaListTransformer) + .allowListUrlChecker(checker) + .params(params) + .build(); assertEquals(3, cloudReplicaSource.getSliceCount()); List sliceNames = cloudReplicaSource.getSliceNames(); assertEquals(3, sliceNames.size()); @@ -248,15 +283,21 @@ public void testMultipleCollections_OnlyNrt() { switch (sliceName) { case "collection1_slice1": assertEquals(2, cloudReplicaSource.getReplicasBySlice(i).size()); - assertEquals("http://baseUrl1:8983/slice1_replica1/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica1/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; case "collection1_slice2": assertEquals(1, cloudReplicaSource.getReplicasBySlice(i).size()); - assertEquals("http://baseUrl2:8984/slice2_replica3/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl2:8984/slice2_replica3/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; case "collection2_slice1": assertEquals(1, cloudReplicaSource.getReplicasBySlice(i).size()); - assertEquals("http://baseUrl1:8983/slice1_replica5/", cloudReplicaSource.getReplicasBySlice(i).get(0)); + assertEquals( + "http://baseUrl1:8983/slice1_replica5/", + cloudReplicaSource.getReplicasBySlice(i).get(0)); break; } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java index 016df2fb105..4f489f94431 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/CustomHighlightComponentTest.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.QueryRequest; @@ -52,11 +51,11 @@ protected String highlightingResponseField() { @Override protected Object convertHighlights(NamedList hl) { final ArrayList> hlMaps = new ArrayList<>(); - for (int i=0; i hlMap = new SimpleOrderedMap<>(); - hlMap.add(id_key, hl.getName(i)); - hlMap.add(snippets_key, hl.getVal(i)); - hlMaps.add(hlMap); + for (int i = 0; i < hl.size(); ++i) { + SimpleOrderedMap hlMap = new SimpleOrderedMap<>(); + hlMap.add(id_key, hl.getName(i)); + hlMap.add(snippets_key, hl.getVal(i)); + hlMaps.add(hlMap); } return hlMaps; } @@ -68,11 +67,11 @@ protected Object[] newHighlightsArray(int size) { @Override protected void addHighlights(Object[] objArr, Object obj, Map resultIds) { - SimpleOrderedMap[] mapArr = (SimpleOrderedMap[])objArr; + SimpleOrderedMap[] mapArr = (SimpleOrderedMap[]) objArr; @SuppressWarnings("unchecked") - final ArrayList> hlMaps = (ArrayList>)obj; + final ArrayList> hlMaps = (ArrayList>) obj; for (SimpleOrderedMap hlMap : hlMaps) { - String id = (String)hlMap.get(id_key); + String id = (String) hlMap.get(id_key); ShardDoc sdoc = resultIds.get(id); int idx = sdoc.positionInResponse; mapArr[idx] = hlMap; @@ -81,7 +80,7 @@ protected void addHighlights(Object[] objArr, Object obj, Map @Override protected Object getAllHighlights(Object[] objArr) { - final SimpleOrderedMap[] mapArr = (SimpleOrderedMap[])objArr; + final SimpleOrderedMap[] mapArr = (SimpleOrderedMap[]) objArr; // remove nulls in case not all docs were able to be retrieved ArrayList> mapList = new ArrayList<>(); for (SimpleOrderedMap map : mapArr) { @@ -91,7 +90,6 @@ protected Object getAllHighlights(Object[] objArr) { } return mapList; } - } protected String customHighlightComponentClassName() { @@ -107,30 +105,27 @@ protected String customHighlightComponentClassName() { public static void setupCluster() throws Exception { // decide collection name ... - COLLECTION = "collection"+(1+random().nextInt(100)) ; + COLLECTION = "collection" + (1 + random().nextInt(100)); // ... and shard/replica/node numbers final int numShards = 3; final int numReplicas = 2; - final int nodeCount = numShards*numReplicas; + final int nodeCount = numShards * numReplicas; // create and configure cluster - configureCluster(nodeCount) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); + configureCluster(nodeCount).addConfig("conf", configset("cloud-dynamic")).configure(); // create an empty collection - CollectionAdminRequest - .createCollection(COLLECTION, "conf", numShards, numReplicas) - .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); + CollectionAdminRequest.createCollection(COLLECTION, "conf", numShards, numReplicas) + .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); } @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 14-Oct-2018 public void test() throws Exception { // determine custom search handler name (the exact name should not matter) - final String customSearchHandlerName = "/custom_select"+random().nextInt(); + final String customSearchHandlerName = "/custom_select" + random().nextInt(); final String defaultHighlightComponentName = HighlightComponent.COMPONENT_NAME; final String highlightComponentName; @@ -142,28 +137,42 @@ public void test() throws Exception { highlightComponentName = defaultHighlightComponentName; } else { // custom component - highlightComponentName = "customhighlight"+random().nextInt(); - cluster.getSolrClient().request( - new ConfigRequest( - "{\n" + - " 'add-searchcomponent': {\n" + - " 'name': '"+highlightComponentName+"',\n" + - " 'class': '"+customHighlightComponentClassName()+"'\n" + - " }\n" + - "}"), - COLLECTION); + highlightComponentName = "customhighlight" + random().nextInt(); + cluster + .getSolrClient() + .request( + new ConfigRequest( + "{\n" + + " 'add-searchcomponent': {\n" + + " 'name': '" + + highlightComponentName + + "',\n" + + " 'class': '" + + customHighlightComponentClassName() + + "'\n" + + " }\n" + + "}"), + COLLECTION); } // handler - cluster.getSolrClient().request( - new ConfigRequest( - "{\n" + - " 'add-requesthandler': {\n" + - " 'name' : '"+customSearchHandlerName+"',\n" + - " 'class' : 'org.apache.solr.handler.component.SearchHandler',\n" + - " 'components' : [ '"+QueryComponent.COMPONENT_NAME+"', '"+highlightComponentName+"' ]\n" + - " }\n" + - "}"), - COLLECTION); + cluster + .getSolrClient() + .request( + new ConfigRequest( + "{\n" + + " 'add-requesthandler': {\n" + + " 'name' : '" + + customSearchHandlerName + + "',\n" + + " 'class' : 'org.apache.solr.handler.component.SearchHandler',\n" + + " 'components' : [ '" + + QueryComponent.COMPONENT_NAME + + "', '" + + highlightComponentName + + "' ]\n" + + " }\n" + + "}"), + COLLECTION); } // add some documents @@ -181,7 +190,7 @@ public void test() throws Exception { // search for the documents { // compose the query - final SolrQuery solrQuery = new SolrQuery(t1+":bee"); + final SolrQuery solrQuery = new SolrQuery(t1 + ":bee"); solrQuery.setRequestHandler(customSearchHandlerName); solrQuery.setHighlight(true); final boolean t1Highlights = random().nextBoolean(); @@ -194,14 +203,15 @@ public void test() throws Exception { } // make the query - final QueryResponse queryResponse = new QueryRequest(solrQuery) - .process(cluster.getSolrClient(), COLLECTION); + final QueryResponse queryResponse = + new QueryRequest(solrQuery).process(cluster.getSolrClient(), COLLECTION); // analyse the response final Map>> highlighting = queryResponse.getHighlighting(); @SuppressWarnings({"unchecked"}) final ArrayList> custom_highlighting = - (ArrayList>)queryResponse.getResponse().get("custom_highlighting"); + (ArrayList>) + queryResponse.getResponse().get("custom_highlighting"); if (defaultHighlightComponentName.equals(highlightComponentName)) { // regular 'highlighting' ... @@ -234,33 +244,44 @@ public void test() throws Exception { } } - protected void checkHighlightingResponseMap(Map>> highlightingMap, - String highlightedField) throws Exception { - assertEquals("too few or too many keys: "+highlightingMap.keySet(), - 3, highlightingMap.size()); - checkHighlightingResponseMapElement(highlightingMap.get("1"), highlightedField, "bumble ", "bee"); - checkHighlightingResponseMapElement(highlightingMap.get("2"), highlightedField, "honey ", "bee"); - checkHighlightingResponseMapElement(highlightingMap.get("3"), highlightedField, "solitary ", "bee"); + protected void checkHighlightingResponseMap( + Map>> highlightingMap, String highlightedField) + throws Exception { + assertEquals( + "too few or too many keys: " + highlightingMap.keySet(), 3, highlightingMap.size()); + checkHighlightingResponseMapElement( + highlightingMap.get("1"), highlightedField, "bumble ", "bee"); + checkHighlightingResponseMapElement( + highlightingMap.get("2"), highlightedField, "honey ", "bee"); + checkHighlightingResponseMapElement( + highlightingMap.get("3"), highlightedField, "solitary ", "bee"); } - protected void checkHighlightingResponseMapElement(Map> docHighlights, - String highlightedField, String preHighlightText, String highlightedText) throws Exception { + protected void checkHighlightingResponseMapElement( + Map> docHighlights, + String highlightedField, + String preHighlightText, + String highlightedText) + throws Exception { if (highlightedField == null) { assertEquals(0, docHighlights.size()); } else { List docHighlightsList = docHighlights.get(highlightedField); assertEquals(1, docHighlightsList.size()); - assertEquals(preHighlightText - + SolrFragmentsBuilder.DEFAULT_PRE_TAGS - + highlightedText - + SolrFragmentsBuilder.DEFAULT_POST_TAGS, docHighlightsList.get(0)); + assertEquals( + preHighlightText + + SolrFragmentsBuilder.DEFAULT_PRE_TAGS + + highlightedText + + SolrFragmentsBuilder.DEFAULT_POST_TAGS, + docHighlightsList.get(0)); } } - protected void checkHighlightingResponseList(ArrayList> highlightingList, - String highlightedField) throws Exception { - assertEquals("too few or too many elements: "+highlightingList.size(), - 3, highlightingList.size()); + protected void checkHighlightingResponseList( + ArrayList> highlightingList, String highlightedField) + throws Exception { + assertEquals( + "too few or too many elements: " + highlightingList.size(), 3, highlightingList.size()); final Set seenDocIds = new HashSet<>(); for (SimpleOrderedMap highlightingListElementMap : highlightingList) { final String expectedHighlightText; @@ -269,7 +290,7 @@ protected void checkHighlightingResponseList(ArrayList> assertEquals(highlightingList.toString(), 2, highlightingListElementMap.size()); // id element { - final String docId = (String)highlightingListElementMap.get(id_key); + final String docId = (String) highlightingListElementMap.get(id_key); seenDocIds.add(docId); final String preHighlightText; final String highlightedText = "bee"; @@ -279,24 +300,26 @@ protected void checkHighlightingResponseList(ArrayList> preHighlightText = "honey "; } else if ("3".equals(docId)) { preHighlightText = "solitary "; - } else { + } else { preHighlightText = null; - fail("unknown docId "+docId); + fail("unknown docId " + docId); } - expectedHighlightText = preHighlightText - + SolrFragmentsBuilder.DEFAULT_PRE_TAGS - + highlightedText - + SolrFragmentsBuilder.DEFAULT_POST_TAGS; + expectedHighlightText = + preHighlightText + + SolrFragmentsBuilder.DEFAULT_PRE_TAGS + + highlightedText + + SolrFragmentsBuilder.DEFAULT_POST_TAGS; } // snippets element { @SuppressWarnings({"unchecked"}) - SimpleOrderedMap snippets = (SimpleOrderedMap)highlightingListElementMap.get(snippets_key); + SimpleOrderedMap snippets = + (SimpleOrderedMap) highlightingListElementMap.get(snippets_key); if (highlightedField == null) { assertEquals(0, snippets.size()); } else { @SuppressWarnings({"unchecked"}) - ArrayList docHighlights = (ArrayList)(snippets).get(highlightedField); + ArrayList docHighlights = (ArrayList) (snippets).get(highlightedField); assertEquals(1, docHighlights.size()); actualHighlightText = docHighlights.get(0); assertEquals(expectedHighlightText, actualHighlightText); @@ -305,5 +328,4 @@ protected void checkHighlightingResponseList(ArrayList> } assertEquals(3, seenDocIds.size()); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java index ceba23a862c..e46917d9ef2 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; @@ -30,11 +29,7 @@ import org.junit.BeforeClass; import org.junit.Test; - -/** - * - * - **/ +/** */ public class DebugComponentTest extends SolrTestCaseJ4 { private static final String ANY_RID = "ANY_RID"; @@ -46,157 +41,164 @@ public static void beforeClass() throws Exception { assertU(adoc("id", "2", "title", "this is another title.", "inStock_b1", "true")); assertU(adoc("id", "3", "title", "Mary had a little lamb.", "inStock_b1", "false")); assertU(commit()); - } @Test public void testBasicInterface() throws Exception { - //make sure the basics are in place - assertQ(req("q", "*:*", CommonParams.DEBUG_QUERY, "true"), - "//str[@name='rawquerystring']='*:*'", - "//str[@name='querystring']='*:*'", - "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", - "//str[@name='parsedquery_toString']='*:*'", - "count(//lst[@name='explain']/*)=3", - "//lst[@name='explain']/str[@name='1']", - "//lst[@name='explain']/str[@name='2']", - "//lst[@name='explain']/str[@name='3']", - "//str[@name='QParser']",// make sure the QParser is specified - "count(//lst[@name='timing']/*)=3", //should be three pieces to timings - "//lst[@name='timing']/double[@name='time']", //make sure we have a time value, but don't specify its result - "count(//lst[@name='prepare']/*)>0", - "//lst[@name='prepare']/double[@name='time']", - "count(//lst[@name='process']/*)>0", - "//lst[@name='process']/double[@name='time']" - ); + // make sure the basics are in place + assertQ( + req("q", "*:*", CommonParams.DEBUG_QUERY, "true"), + "//str[@name='rawquerystring']='*:*'", + "//str[@name='querystring']='*:*'", + "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", + "//str[@name='parsedquery_toString']='*:*'", + "count(//lst[@name='explain']/*)=3", + "//lst[@name='explain']/str[@name='1']", + "//lst[@name='explain']/str[@name='2']", + "//lst[@name='explain']/str[@name='3']", + "//str[@name='QParser']", // make sure the QParser is specified + "count(//lst[@name='timing']/*)=3", // should be three pieces to timings + "//lst[@name='timing']/double[@name='time']", // make sure we have a time value, but don't + // specify its result + "count(//lst[@name='prepare']/*)>0", + "//lst[@name='prepare']/double[@name='time']", + "count(//lst[@name='process']/*)>0", + "//lst[@name='process']/double[@name='time']"); } // Test the ability to specify which pieces to include @Test public void testPerItemInterface() throws Exception { - //Same as debugQuery = true - assertQ(req("q", "*:*", "debug", "true"), - "//str[@name='rawquerystring']='*:*'", - "//str[@name='querystring']='*:*'", - "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", - "//str[@name='parsedquery_toString']='*:*'", - "//str[@name='QParser']",// make sure the QParser is specified - "count(//lst[@name='explain']/*)=3", - "//lst[@name='explain']/str[@name='1']", - "//lst[@name='explain']/str[@name='2']", - "//lst[@name='explain']/str[@name='3']", - "count(//lst[@name='timing']/*)=3", //should be three pieces to timings - "//lst[@name='timing']/double[@name='time']", //make sure we have a time value, but don't specify its result - "count(//lst[@name='prepare']/*)>0", - "//lst[@name='prepare']/double[@name='time']", - "count(//lst[@name='process']/*)>0", - "//lst[@name='process']/double[@name='time']" - ); - //timing only - assertQ(req("q", "*:*", "debug", CommonParams.TIMING), - "count(//str[@name='rawquerystring'])=0", - "count(//str[@name='querystring'])=0", - "count(//str[@name='parsedquery'])=0", - "count(//str[@name='parsedquery_toString'])=0", - "count(//lst[@name='explain']/*)=0", - "count(//str[@name='QParser'])=0",// make sure the QParser is specified - "count(//lst[@name='timing']/*)=3", //should be three pieces to timings - "//lst[@name='timing']/double[@name='time']", //make sure we have a time value, but don't specify its result - "count(//lst[@name='prepare']/*)>0", - "//lst[@name='prepare']/double[@name='time']", - "count(//lst[@name='process']/*)>0", - "//lst[@name='process']/double[@name='time']" - ); - //query only - assertQ(req("q", "*:*", "debug", CommonParams.QUERY), - "//str[@name='rawquerystring']='*:*'", - "//str[@name='querystring']='*:*'", - "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", - "//str[@name='parsedquery_toString']='*:*'", - "count(//lst[@name='explain']/*)=0", - "//str[@name='QParser']",// make sure the QParser is specified - "count(//lst[@name='timing']/*)=0" - - ); - - //explains - assertQ(req("q", "*:*", "debug", CommonParams.RESULTS), - "count(//str[@name='rawquerystring'])=0", - "count(//str[@name='querystring'])=0", - "count(//str[@name='parsedquery'])=0", - "count(//str[@name='parsedquery_toString'])=0", - "count(//lst[@name='explain']/*)=3", - "//lst[@name='explain']/str[@name='1']", - "//lst[@name='explain']/str[@name='2']", - "//lst[@name='explain']/str[@name='3']", - "count(//str[@name='QParser'])=0",// make sure the QParser is specified - "count(//lst[@name='timing']/*)=0" - ); + // Same as debugQuery = true + assertQ( + req("q", "*:*", "debug", "true"), + "//str[@name='rawquerystring']='*:*'", + "//str[@name='querystring']='*:*'", + "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", + "//str[@name='parsedquery_toString']='*:*'", + "//str[@name='QParser']", // make sure the QParser is specified + "count(//lst[@name='explain']/*)=3", + "//lst[@name='explain']/str[@name='1']", + "//lst[@name='explain']/str[@name='2']", + "//lst[@name='explain']/str[@name='3']", + "count(//lst[@name='timing']/*)=3", // should be three pieces to timings + "//lst[@name='timing']/double[@name='time']", // make sure we have a time value, but don't + // specify its result + "count(//lst[@name='prepare']/*)>0", + "//lst[@name='prepare']/double[@name='time']", + "count(//lst[@name='process']/*)>0", + "//lst[@name='process']/double[@name='time']"); + // timing only + assertQ( + req("q", "*:*", "debug", CommonParams.TIMING), + "count(//str[@name='rawquerystring'])=0", + "count(//str[@name='querystring'])=0", + "count(//str[@name='parsedquery'])=0", + "count(//str[@name='parsedquery_toString'])=0", + "count(//lst[@name='explain']/*)=0", + "count(//str[@name='QParser'])=0", // make sure the QParser is specified + "count(//lst[@name='timing']/*)=3", // should be three pieces to timings + "//lst[@name='timing']/double[@name='time']", // make sure we have a time value, but don't + // specify its result + "count(//lst[@name='prepare']/*)>0", + "//lst[@name='prepare']/double[@name='time']", + "count(//lst[@name='process']/*)>0", + "//lst[@name='process']/double[@name='time']"); + // query only + assertQ( + req("q", "*:*", "debug", CommonParams.QUERY), + "//str[@name='rawquerystring']='*:*'", + "//str[@name='querystring']='*:*'", + "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", + "//str[@name='parsedquery_toString']='*:*'", + "count(//lst[@name='explain']/*)=0", + "//str[@name='QParser']", // make sure the QParser is specified + "count(//lst[@name='timing']/*)=0"); - assertQ(req("q", "*:*", "debug", CommonParams.RESULTS, - "debug", CommonParams.QUERY), - "//str[@name='rawquerystring']='*:*'", - "//str[@name='querystring']='*:*'", - "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", - "//str[@name='parsedquery_toString']='*:*'", - "//str[@name='QParser']",// make sure the QParser is specified + // explains + assertQ( + req("q", "*:*", "debug", CommonParams.RESULTS), + "count(//str[@name='rawquerystring'])=0", + "count(//str[@name='querystring'])=0", + "count(//str[@name='parsedquery'])=0", + "count(//str[@name='parsedquery_toString'])=0", + "count(//lst[@name='explain']/*)=3", + "//lst[@name='explain']/str[@name='1']", + "//lst[@name='explain']/str[@name='2']", + "//lst[@name='explain']/str[@name='3']", + "count(//str[@name='QParser'])=0", // make sure the QParser is specified + "count(//lst[@name='timing']/*)=0"); - "count(//lst[@name='explain']/*)=3", - "//lst[@name='explain']/str[@name='1']", - "//lst[@name='explain']/str[@name='2']", - "//lst[@name='explain']/str[@name='3']", + assertQ( + req("q", "*:*", "debug", CommonParams.RESULTS, "debug", CommonParams.QUERY), + "//str[@name='rawquerystring']='*:*'", + "//str[@name='querystring']='*:*'", + "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", + "//str[@name='parsedquery_toString']='*:*'", + "//str[@name='QParser']", // make sure the QParser is specified + "count(//lst[@name='explain']/*)=3", + "//lst[@name='explain']/str[@name='1']", + "//lst[@name='explain']/str[@name='2']", + "//lst[@name='explain']/str[@name='3']", + "count(//lst[@name='timing']/*)=0"); - "count(//lst[@name='timing']/*)=0" - ); - - //Grouping - assertQ(req("q", "*:*", "debug", CommonParams.RESULTS, - "group", CommonParams.TRUE, - "group.field", "inStock_b1", - "debug", CommonParams.TRUE), + // Grouping + assertQ( + req( + "q", + "*:*", + "debug", + CommonParams.RESULTS, + "group", + CommonParams.TRUE, + "group.field", + "inStock_b1", + "debug", + CommonParams.TRUE), "//str[@name='rawquerystring']='*:*'", - "count(//lst[@name='explain']/*)=2" - ); + "count(//lst[@name='explain']/*)=2"); } - + @Test public void testModifyRequestTrack() { DebugComponent component = new DebugComponent(); List components = new ArrayList<>(1); components.add(component); - for(int i = 0; i < 10; i++) { - SolrQueryRequest req = req("q", "test query", "distrib", "true", CommonParams.REQUEST_ID, "123456-my_rid"); + for (int i = 0; i < 10; i++) { + SolrQueryRequest req = + req("q", "test query", "distrib", "true", CommonParams.REQUEST_ID, "123456-my_rid"); SolrQueryResponse resp = new SolrQueryResponse(); ResponseBuilder rb = new ResponseBuilder(req, resp, components); ShardRequest sreq = new ShardRequest(); sreq.params = new ModifiableSolrParams(); sreq.purpose = ShardRequest.PURPOSE_GET_FIELDS; sreq.purpose |= ShardRequest.PURPOSE_GET_DEBUG; - //expecting the same results with debugQuery=true or debug=track - if(random().nextBoolean()) { + // expecting the same results with debugQuery=true or debug=track + if (random().nextBoolean()) { rb.setDebug(true); } else { rb.setDebug(false); rb.setDebugTrack(true); - //should not depend on other debug options + // should not depend on other debug options rb.setDebugQuery(random().nextBoolean()); rb.setDebugTimings(random().nextBoolean()); rb.setDebugResults(random().nextBoolean()); } component.modifyRequest(rb, null, sreq); - //if the request has debugQuery=true or debug=track, the sreq should get debug=track always - assertTrue(Arrays.asList(sreq.params.getParams(CommonParams.DEBUG)).contains(CommonParams.TRACK)); - //the purpose must be added as readable param to be included in the shard logs - assertEquals("GET_FIELDS,GET_DEBUG,SET_TERM_STATS", sreq.params.get(CommonParams.REQUEST_PURPOSE)); - //the rid must be added to be included in the shard logs + // if the request has debugQuery=true or debug=track, the sreq should get debug=track always + assertTrue( + Arrays.asList(sreq.params.getParams(CommonParams.DEBUG)).contains(CommonParams.TRACK)); + // the purpose must be added as readable param to be included in the shard logs + assertEquals( + "GET_FIELDS,GET_DEBUG,SET_TERM_STATS", sreq.params.get(CommonParams.REQUEST_PURPOSE)); + // the rid must be added to be included in the shard logs assertEquals("123456-my_rid", sreq.params.get(CommonParams.REQUEST_ID)); // close requests - this method obtains a searcher in order to access its StatsCache req.close(); } - } - + @Test public void testPrepare() throws IOException { DebugComponent component = new DebugComponent(); @@ -204,19 +206,19 @@ public void testPrepare() throws IOException { components.add(component); SolrQueryRequest req; ResponseBuilder rb; - for(int i = 0; i < 10; i++) { + for (int i = 0; i < 10; i++) { req = req("q", "test query", "distrib", "true"); rb = new ResponseBuilder(req, new SolrQueryResponse(), components); rb.isDistrib = true; addRequestId(rb, ANY_RID); - //expecting the same results with debugQuery=true or debug=track - if(random().nextBoolean()) { + // expecting the same results with debugQuery=true or debug=track + if (random().nextBoolean()) { rb.setDebug(true); } else { rb.setDebug(false); rb.setDebugTrack(true); - //should not depend on other debug options + // should not depend on other debug options rb.setDebugQuery(random().nextBoolean()); rb.setDebugTimings(random().nextBoolean()); rb.setDebugResults(random().nextBoolean()); @@ -224,7 +226,7 @@ public void testPrepare() throws IOException { component.prepare(rb); ensureTrackRecordsRid(rb, ANY_RID); } - + req = req("q", "test query", "distrib", "true", CommonParams.REQUEST_ID, "123"); rb = new ResponseBuilder(req, new SolrQueryResponse(), components); rb.isDistrib = true; @@ -242,35 +244,51 @@ public void testPrepare() throws IOException { public void testQueryToString() throws Exception { // test that both boosts are represented in a double-boost scenario - assertQ(req("debugQuery", "true", "indent","true", "rows","0", "q", "(foo_s:aaa^3)^4"), - "//str[@name='parsedquery'][.='foo_s:aaa^3.0^4.0']" - ); + assertQ( + req("debugQuery", "true", "indent", "true", "rows", "0", "q", "(foo_s:aaa^3)^4"), + "//str[@name='parsedquery'][.='foo_s:aaa^3.0^4.0']"); // test to see that extra parens are avoided - assertQ(req("debugQuery", "true", "indent","true", "rows","0", "q", "+foo_s:aaa^3 -bar_s:bbb^0"), - "//str[@name='parsedquery'][.='+foo_s:aaa^3.0 -bar_s:bbb^0.0']" - ); + assertQ( + req("debugQuery", "true", "indent", "true", "rows", "0", "q", "+foo_s:aaa^3 -bar_s:bbb^0"), + "//str[@name='parsedquery'][.='+foo_s:aaa^3.0 -bar_s:bbb^0.0']"); // test that parens are added when needed - assertQ(req("debugQuery", "true", "indent", "true", "rows", "0", "q", "foo_s:aaa (bar_s:bbb baz_s:ccc)"), - "//str[@name='parsedquery'][.='foo_s:aaa (bar_s:bbb baz_s:ccc)']" - ); + assertQ( + req( + "debugQuery", + "true", + "indent", + "true", + "rows", + "0", + "q", + "foo_s:aaa (bar_s:bbb baz_s:ccc)"), + "//str[@name='parsedquery'][.='foo_s:aaa (bar_s:bbb baz_s:ccc)']"); // test boosts on subqueries - assertQ(req("debugQuery", "true", "indent", "true", "rows", "0", "q", "foo_s:aaa^3 (bar_s:bbb baz_s:ccc)^4"), - "//str[@name='parsedquery'][.='foo_s:aaa^3.0 (bar_s:bbb baz_s:ccc)^4.0']" - ); + assertQ( + req( + "debugQuery", + "true", + "indent", + "true", + "rows", + "0", + "q", + "foo_s:aaa^3 (bar_s:bbb baz_s:ccc)^4"), + "//str[@name='parsedquery'][.='foo_s:aaa^3.0 (bar_s:bbb baz_s:ccc)^4.0']"); // test constant score query boost exists - assertQ(req("debugQuery", "true", "indent", "true", "rows", "0", "q", "foo_s:aaa^=3"), - "//str[@name='parsedquery'][contains(.,'3.0')]" - ); - + assertQ( + req("debugQuery", "true", "indent", "true", "rows", "0", "q", "foo_s:aaa^=3"), + "//str[@name='parsedquery'][contains(.,'3.0')]"); } @SuppressWarnings("unchecked") private void ensureTrackRecordsRid(ResponseBuilder rb, String expectedRid) { - final String rid = (String) ((NamedList) rb.getDebugInfo().get("track")).get(CommonParams.REQUEST_ID); + final String rid = + (String) ((NamedList) rb.getDebugInfo().get("track")).get(CommonParams.REQUEST_ID); assertEquals("Expecting " + expectedRid + " but found " + rid, expectedRid, rid); } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java index 506219a0b49..ce81f308f4a 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java @@ -26,7 +26,6 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.client.solrj.SolrClient; @@ -45,13 +44,13 @@ import org.junit.Test; public class DistributedDebugComponentTest extends SolrJettyTestBase { - + private static SolrClient collection1; private static SolrClient collection2; private static String shard1; private static String shard2; private static File solrHome; - + private static File createSolrHome() throws Exception { File workDir = createTempDir().toFile(); setupJettyTestHome(workDir, "collection1"); @@ -59,7 +58,6 @@ private static File createSolrHome() throws Exception { return workDir; } - @BeforeClass public static void createThings() throws Exception { systemSetPropertySolrDisableUrlAllowList("true"); @@ -69,13 +67,13 @@ public static void createThings() throws Exception { collection1 = getHttpSolrClient(url + "/collection1"); collection2 = getHttpSolrClient(url + "/collection2"); - + String urlCollection1 = jetty.getBaseUrl().toString() + "/" + "collection1"; String urlCollection2 = jetty.getBaseUrl().toString() + "/" + "collection2"; shard1 = urlCollection1.replaceAll("https?://", ""); shard2 = urlCollection2.replaceAll("https?://", ""); - - //create second core + + // create second core try (HttpSolrClient nodeClient = getHttpSolrClient(url)) { CoreAdminRequest.Create req = new CoreAdminRequest.Create(); req.setCoreName("collection2"); @@ -88,14 +86,13 @@ public static void createThings() throws Exception { doc.setField("text", "batman"); collection1.add(doc); collection1.commit(); - + doc.setField("id", "2"); doc.setField("text", "superman"); collection2.add(doc); collection2.commit(); - } - + @AfterClass public static void destroyThings() throws Exception { if (null != collection1) { @@ -108,22 +105,22 @@ public static void destroyThings() throws Exception { } if (null != jetty) { jetty.stop(); - jetty=null; + jetty = null; } resetExceptionIgnores(); systemClearPropertySolrDisableUrlAllowList(); } - + @Test @SuppressWarnings("unchecked") public void testSimpleSearch() throws Exception { SolrQuery query = new SolrQuery(); query.setQuery("*:*"); - query.set("debug", "track"); + query.set("debug", "track"); query.set("distrib", "true"); query.setFields("id", "text"); query.set("shards", shard1 + "," + shard2); - + if (random().nextBoolean()) { query.add("omitHeader", Boolean.toString(random().nextBoolean())); } @@ -132,33 +129,53 @@ public void testSimpleSearch() throws Exception { assertNotNull(track); assertNotNull(track.get("rid")); assertNotNull(track.get("EXECUTE_QUERY")); - assertNotNull(((NamedList)track.get("EXECUTE_QUERY")).get(shard1)); - assertNotNull(((NamedList)track.get("EXECUTE_QUERY")).get(shard2)); - - assertNotNull(((NamedList)track.get("GET_FIELDS")).get(shard1)); - assertNotNull(((NamedList)track.get("GET_FIELDS")).get(shard2)); - - assertElementsPresent((NamedList)((NamedList)track.get("EXECUTE_QUERY")).get(shard1), - "QTime", "ElapsedTime", "RequestPurpose", "NumFound", "Response"); - assertElementsPresent((NamedList)((NamedList)track.get("EXECUTE_QUERY")).get(shard2), - "QTime", "ElapsedTime", "RequestPurpose", "NumFound", "Response"); - - assertElementsPresent((NamedList)((NamedList)track.get("GET_FIELDS")).get(shard1), - "QTime", "ElapsedTime", "RequestPurpose", "NumFound", "Response"); - assertElementsPresent((NamedList)((NamedList)track.get("GET_FIELDS")).get(shard2), - "QTime", "ElapsedTime", "RequestPurpose", "NumFound", "Response"); - + assertNotNull(((NamedList) track.get("EXECUTE_QUERY")).get(shard1)); + assertNotNull(((NamedList) track.get("EXECUTE_QUERY")).get(shard2)); + + assertNotNull(((NamedList) track.get("GET_FIELDS")).get(shard1)); + assertNotNull(((NamedList) track.get("GET_FIELDS")).get(shard2)); + + assertElementsPresent( + (NamedList) ((NamedList) track.get("EXECUTE_QUERY")).get(shard1), + "QTime", + "ElapsedTime", + "RequestPurpose", + "NumFound", + "Response"); + assertElementsPresent( + (NamedList) ((NamedList) track.get("EXECUTE_QUERY")).get(shard2), + "QTime", + "ElapsedTime", + "RequestPurpose", + "NumFound", + "Response"); + + assertElementsPresent( + (NamedList) ((NamedList) track.get("GET_FIELDS")).get(shard1), + "QTime", + "ElapsedTime", + "RequestPurpose", + "NumFound", + "Response"); + assertElementsPresent( + (NamedList) ((NamedList) track.get("GET_FIELDS")).get(shard2), + "QTime", + "ElapsedTime", + "RequestPurpose", + "NumFound", + "Response"); + query.setQuery("id:1"); response = collection1.query(query); track = (NamedList) response.getDebugMap().get("track"); - assertNotNull(((NamedList)track.get("EXECUTE_QUERY")).get(shard1)); - assertNotNull(((NamedList)track.get("EXECUTE_QUERY")).get(shard2)); - - assertNotNull(((NamedList)track.get("GET_FIELDS")).get(shard1)); + assertNotNull(((NamedList) track.get("EXECUTE_QUERY")).get(shard1)); + assertNotNull(((NamedList) track.get("EXECUTE_QUERY")).get(shard2)); + + assertNotNull(((NamedList) track.get("GET_FIELDS")).get(shard1)); // This test is invalid, as GET_FIELDS should not be executed in shard 2 - assertNull(((NamedList)track.get("GET_FIELDS")).get(shard2)); + assertNull(((NamedList) track.get("GET_FIELDS")).get(shard2)); } - + @Test @SuppressWarnings("resource") // Cannot close client in this loop! public void testRandom() throws Exception { @@ -191,7 +208,6 @@ public void testRandom() throws Exception { } q.set("shards", String.join(",", shards)); - List debug = new ArrayList(10); boolean all = false; @@ -200,10 +216,18 @@ public void testRandom() throws Exception { final boolean results = random().nextBoolean(); final boolean track = random().nextBoolean(); - if (timing) { debug.add("timing"); } - if (query) { debug.add("query"); } - if (results) { debug.add("results"); } - if (track) { debug.add("track"); } + if (timing) { + debug.add("timing"); + } + if (query) { + debug.add("query"); + } + if (results) { + debug.add("results"); + } + if (track) { + debug.add("track"); + } if (debug.isEmpty()) { debug.add("true"); all = true; @@ -227,8 +251,8 @@ public void testRandom() throws Exception { } /** - * Asserts that the specified debug result key does or does not exist in the - * response based on the expected boolean. + * Asserts that the specified debug result key does or does not exist in the response based on the + * expected boolean. */ private void assertDebug(QueryResponse response, boolean expected, String key) { if (expected) { @@ -237,23 +261,18 @@ private void assertDebug(QueryResponse response, boolean expected, String key) { assertNotInDebug(response, key); } } - /** - * Asserts that the specified debug result key does exist in the response and is non-null - */ + /** Asserts that the specified debug result key does exist in the response and is non-null */ private void assertInDebug(QueryResponse response, String key) { assertNotNull("debug map is null", response.getDebugMap()); assertNotNull("debug map has null for : " + key, response.getDebugMap().get(key)); } - /** - * Asserts that the specified debug result key does NOT exist in the response - */ + /** Asserts that the specified debug result key does NOT exist in the response */ private void assertNotInDebug(QueryResponse response, String key) { assertNotNull("debug map is null", response.getDebugMap()); assertFalse("debug map contains: " + key, response.getDebugMap().containsKey(key)); } - @Test public void testDebugSections() throws Exception { SolrQuery query = new SolrQuery(); @@ -264,10 +283,10 @@ public void testDebugSections() throws Exception { verifyDebugSections(query, collection1); query.setQuery("id:1 OR text:_query_with_no_results_ OR id:[0 TO 300]"); verifyDebugSections(query, collection1); - } - - private void verifyDebugSections(SolrQuery query, SolrClient client) throws SolrServerException, IOException { + + private void verifyDebugSections(SolrQuery query, SolrClient client) + throws SolrServerException, IOException { query.set("debugQuery", "true"); query.remove("debug"); QueryResponse response = client.query(query); @@ -280,7 +299,7 @@ private void verifyDebugSections(SolrQuery query, SolrClient client) throws Solr assertInDebug(response, "QParser"); assertInDebug(response, "explain"); assertInDebug(response, "timing"); - + query.set("debug", "true"); query.remove("debugQuery"); response = client.query(query); @@ -293,7 +312,7 @@ private void verifyDebugSections(SolrQuery query, SolrClient client) throws Solr assertInDebug(response, "QParser"); assertInDebug(response, "explain"); assertInDebug(response, "timing"); - + query.set("debug", "track"); response = client.query(query); assertFalse(response.getDebugMap().isEmpty()); @@ -305,7 +324,7 @@ private void verifyDebugSections(SolrQuery query, SolrClient client) throws Solr assertNotInDebug(response, "QParser"); assertNotInDebug(response, "explain"); assertNotInDebug(response, "timing"); - + query.set("debug", "query"); response = client.query(query); assertFalse(response.getDebugMap().isEmpty()); @@ -317,7 +336,7 @@ private void verifyDebugSections(SolrQuery query, SolrClient client) throws Solr assertInDebug(response, "QParser"); assertNotInDebug(response, "explain"); assertNotInDebug(response, "timing"); - + query.set("debug", "results"); response = client.query(query); assertFalse(response.getDebugMap().isEmpty()); @@ -329,7 +348,7 @@ private void verifyDebugSections(SolrQuery query, SolrClient client) throws Solr assertNotInDebug(response, "QParser"); assertInDebug(response, "explain"); assertNotInDebug(response, "timing"); - + query.set("debug", "timing"); response = client.query(query); assertFalse(response.getDebugMap().isEmpty()); @@ -341,21 +360,20 @@ private void verifyDebugSections(SolrQuery query, SolrClient client) throws Solr assertNotInDebug(response, "QParser"); assertNotInDebug(response, "explain"); assertInDebug(response, "timing"); - + query.set("debug", "false"); response = client.query(query); assertNull(response.getDebugMap()); } @Test - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018 public void testCompareWithNonDistributedRequest() throws SolrServerException, IOException { SolrQuery query = new SolrQuery(); query.setQuery("id:1 OR id:2"); query.setFilterQueries("id:[0 TO 10]", "id:[0 TO 5]"); query.setRows(1); query.setSort("id", SolrQuery.ORDER.asc); // thus only return id:1 since rows 1 - query.set("debug", "true"); + query.set("debug", "true"); query.set("distrib", "true"); query.setFields("id"); if (random().nextBoolean()) { // can affect rb.onePassDistributedQuery @@ -364,16 +382,16 @@ public void testCompareWithNonDistributedRequest() throws SolrServerException, I query.set(ShardParams.DISTRIB_SINGLE_PASS, random().nextBoolean()); query.set("shards", shard1 + "," + shard2); QueryResponse distribResponse = collection1.query(query); - + // same query but not distributed query.set("distrib", "false"); query.remove("shards"); QueryResponse nonDistribResponse = collection1.query(query); - + assertNotNull(distribResponse.getDebugMap().get("track")); assertNull(nonDistribResponse.getDebugMap().get("track")); assertEquals(distribResponse.getDebugMap().size() - 1, nonDistribResponse.getDebugMap().size()); - + assertSectionEquals(distribResponse, nonDistribResponse, "explain"); assertSectionEquals(distribResponse, nonDistribResponse, "rawquerystring"); assertSectionEquals(distribResponse, nonDistribResponse, "querystring"); @@ -382,16 +400,18 @@ public void testCompareWithNonDistributedRequest() throws SolrServerException, I assertSectionEquals(distribResponse, nonDistribResponse, "QParser"); assertSectionEquals(distribResponse, nonDistribResponse, "filter_queries"); assertSectionEquals(distribResponse, nonDistribResponse, "parsed_filter_queries"); - + // timing should have the same sections: - assertSameKeys((NamedList)nonDistribResponse.getDebugMap().get("timing"), (NamedList)distribResponse.getDebugMap().get("timing")); + assertSameKeys( + (NamedList) nonDistribResponse.getDebugMap().get("timing"), + (NamedList) distribResponse.getDebugMap().get("timing")); } - + public void testTolerantSearch() throws SolrServerException, IOException { String badShard = DEAD_HOST_1; SolrQuery query = new SolrQuery(); query.setQuery("*:*"); - query.set("debug", "true"); + query.set("debug", "true"); query.set("distrib", "true"); query.setFields("id", "text"); query.set("shards", shard1 + "," + shard2 + "," + badShard); @@ -402,44 +422,51 @@ public void testTolerantSearch() throws SolrServerException, IOException { query.set(ShardParams.SHARDS_TOLERANT, "true"); QueryResponse response = collection1.query(query); - assertTrue((Boolean)response.getResponseHeader().get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); + assertTrue( + (Boolean) + response + .getResponseHeader() + .get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)); @SuppressWarnings("unchecked") NamedList badShardTrack = - (((NamedList>>)response.getDebugMap().get("track")).get("EXECUTE_QUERY")).get(badShard); + (((NamedList>>) response.getDebugMap().get("track")) + .get("EXECUTE_QUERY")) + .get(badShard); assertEquals("Unexpected response size for shard", 1, badShardTrack.size()); Entry exception = badShardTrack.iterator().next(); assertEquals("Expected key 'Exception' not found", "Exception", exception.getKey()); assertNotNull("Exception message should not be null", exception.getValue()); unIgnoreException("Server refused connection"); } - - /** - * Compares the same section on the two query responses - */ - private void assertSectionEquals(QueryResponse distrib, QueryResponse nonDistrib, String section) { - assertEquals(section + " debug should be equal", distrib.getDebugMap().get(section), nonDistrib.getDebugMap().get(section)); + + /** Compares the same section on the two query responses */ + private void assertSectionEquals( + QueryResponse distrib, QueryResponse nonDistrib, String section) { + assertEquals( + section + " debug should be equal", + distrib.getDebugMap().get(section), + nonDistrib.getDebugMap().get(section)); } private void assertSameKeys(NamedList object, NamedList object2) { Iterator> iteratorObj2 = object2.iterator(); - for (Map.Entry entry: object) { + for (Map.Entry entry : object) { assertTrue(iteratorObj2.hasNext()); Map.Entry entry2 = iteratorObj2.next(); assertEquals(entry.getKey(), entry2.getKey()); if (entry.getValue() instanceof NamedList) { assertTrue(entry2.getValue() instanceof NamedList); - assertSameKeys((NamedList)entry.getValue(), (NamedList)entry2.getValue()); + assertSameKeys((NamedList) entry.getValue(), (NamedList) entry2.getValue()); } } assertFalse(iteratorObj2.hasNext()); } - private void assertElementsPresent(NamedList namedList, String...elements) { - for(String element:elements) { + private void assertElementsPresent(NamedList namedList, String... elements) { + for (String element : elements) { String value = namedList.get(element); assertNotNull("Expected element '" + element + "' but was not found", value); assertTrue("Expected element '" + element + "' but was empty", !value.isEmpty()); } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java index 0975bcfce2f..1bacbdffaba 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java @@ -19,7 +19,6 @@ import java.util.Arrays; import java.util.Iterator; import java.util.Map; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; @@ -48,37 +47,67 @@ public static void setUpBeforeClass() throws Exception { public void test() throws Exception { _test("group_s", "g1", "g2", "g3", "g4"); _test("group_s_dv", "g1", "g2", "g3", "g4"); - _test("group_i", "1", "0", "3", "-1"); // NOTE: using 0 to explicitly confim we don't assume null - _test("group_ti_dv", "1", "-2", "0", "4"); // NOTE: using 0 to explicitly confim we don't assume null + _test( + "group_i", "1", "0", "3", "-1"); // NOTE: using 0 to explicitly confim we don't assume null + _test( + "group_ti_dv", + "1", + "-2", + "0", + "4"); // NOTE: using 0 to explicitly confim we don't assume null } - - private void _test(final String group, - final String aaa, final String bbb, final String ccc, final String ddd) throws Exception { - - del("*:*"); - index_specific(0,"id","1", "term_s", "YYYY", group, aaa, "test_i", "5", "test_l", "10", "test_f", "2000"); - index_specific(0,"id","2", "term_s", "YYYY", group, aaa, "test_i", "50", "test_l", "100", "test_f", "200"); - index_specific(1,"id","5", "term_s", "YYYY", group, bbb, "test_i", "4", "test_l", "10", "test_f", "2000"); - index_specific(1,"id","6", "term_s", "YYYY", group, bbb, "test_i", "10", "test_l", "100", "test_f", "200"); - index_specific(0,"id","7", "term_s", "YYYY", group, aaa, "test_i", "1", "test_l", "100000", "test_f", "2000"); - index_specific(1,"id","8", "term_s", "YYYY", group, bbb, "test_i", "2", "test_l", "100000", "test_f", "200"); - index_specific(2,"id","9", "term_s", "YYYY", group, ccc, "test_i", "1000", "test_l", "1005", "test_f", "3000"); - index_specific(2,"id","10","term_s", "YYYY", group, ccc, "test_i", "1500", "test_l", "1001", "test_f", "3200"); + private void _test( + final String group, final String aaa, final String bbb, final String ccc, final String ddd) + throws Exception { - // NOTE: nullPolicy=collapse will only be viable because all null docs are in collocated in shard #2 - index_specific(2,"id","88", "test_i", "1001", "test_l", "1001", "test_f", "3200"); - index_specific(2,"id","99", "test_i", "11", "test_l", "100", "test_f", "200"); - - index_specific(2,"id","11","term_s", "YYYY", group, ccc, "test_i", "1300", "test_l", "1002", "test_f", "3300"); - index_specific(1,"id","12","term_s", "YYYY", group, ddd, "test_i", "15", "test_l", "10", "test_f", "2000"); - index_specific(1,"id","13","term_s", "YYYY", group, ddd, "test_i", "16", "test_l", "9", "test_f", "2000"); - index_specific(1,"id","14","term_s", "YYYY", group, ddd, "test_i", "1", "test_l", "20", "test_f", "2000"); + del("*:*"); + index_specific( + 0, "id", "1", "term_s", "YYYY", group, aaa, "test_i", "5", "test_l", "10", "test_f", + "2000"); + index_specific( + 0, "id", "2", "term_s", "YYYY", group, aaa, "test_i", "50", "test_l", "100", "test_f", + "200"); + index_specific( + 1, "id", "5", "term_s", "YYYY", group, bbb, "test_i", "4", "test_l", "10", "test_f", + "2000"); + index_specific( + 1, "id", "6", "term_s", "YYYY", group, bbb, "test_i", "10", "test_l", "100", "test_f", + "200"); + index_specific( + 0, "id", "7", "term_s", "YYYY", group, aaa, "test_i", "1", "test_l", "100000", "test_f", + "2000"); + index_specific( + 1, "id", "8", "term_s", "YYYY", group, bbb, "test_i", "2", "test_l", "100000", "test_f", + "200"); + index_specific( + 2, "id", "9", "term_s", "YYYY", group, ccc, "test_i", "1000", "test_l", "1005", "test_f", + "3000"); + index_specific( + 2, "id", "10", "term_s", "YYYY", group, ccc, "test_i", "1500", "test_l", "1001", "test_f", + "3200"); + + // NOTE: nullPolicy=collapse will only be viable because all null docs are in collocated in + // shard #2 + index_specific(2, "id", "88", "test_i", "1001", "test_l", "1001", "test_f", "3200"); + index_specific(2, "id", "99", "test_i", "11", "test_l", "100", "test_f", "200"); + + index_specific( + 2, "id", "11", "term_s", "YYYY", group, ccc, "test_i", "1300", "test_l", "1002", "test_f", + "3300"); + index_specific( + 1, "id", "12", "term_s", "YYYY", group, ddd, "test_i", "15", "test_l", "10", "test_f", + "2000"); + index_specific( + 1, "id", "13", "term_s", "YYYY", group, ddd, "test_i", "16", "test_l", "9", "test_f", + "2000"); + index_specific( + 1, "id", "14", "term_s", "YYYY", group, ddd, "test_i", "1", "test_l", "20", "test_f", + "2000"); commit(); - handle.put("explain", SKIPVAL); handle.put("timestamp", SKIPVAL); handle.put("score", SKIPVAL); @@ -91,28 +120,132 @@ private void _test(final String group, handle.put("_version_", SKIP); handle.put("expanded", UNORDERED); - query("q", "*:*", "fq", "{!collapse field="+group+"}", "defType", "edismax", "bf", "field(test_i)", "expand", "true", "fl","*,score"); - query("q", "*:*", "fq", "{!collapse field="+group+"}", "defType", "edismax", "bf", "field(test_i)", "expand", "true", "expand.sort", "test_l desc", "fl","*,score"); - query("q", "*:*", "fq", "{!collapse field="+group+"}", "defType", "edismax", "bf", "field(test_i)", "expand", "true", "expand.sort", "test_l desc", "expand.rows", "1", "fl","*,score"); - //Test no expand results - query("q", "test_i:5", "fq", "{!collapse field="+group+"}", "defType", "edismax", "bf", "field(test_i)", "expand", "true", "expand.sort", "test_l desc", "expand.rows", "1", "fl","*,score"); - //Test zero results - query("q", "test_i:5434343", "fq", "{!collapse field="+group+"}", "defType", "edismax", "bf", "field(test_i)", "expand", "true", "expand.sort", "test_l desc", "expand.rows", "1", "fl","*,score"); - //Test page 2 - query("q", "*:*", "start","1", "rows", "1", "fq", "{!collapse field="+group+"}", "defType", "edismax", "bf", "field(test_i)", "expand", "true", "fl","*,score"); + query( + "q", + "*:*", + "fq", + "{!collapse field=" + group + "}", + "defType", + "edismax", + "bf", + "field(test_i)", + "expand", + "true", + "fl", + "*,score"); + query( + "q", + "*:*", + "fq", + "{!collapse field=" + group + "}", + "defType", + "edismax", + "bf", + "field(test_i)", + "expand", + "true", + "expand.sort", + "test_l desc", + "fl", + "*,score"); + query( + "q", + "*:*", + "fq", + "{!collapse field=" + group + "}", + "defType", + "edismax", + "bf", + "field(test_i)", + "expand", + "true", + "expand.sort", + "test_l desc", + "expand.rows", + "1", + "fl", + "*,score"); + // Test no expand results + query( + "q", + "test_i:5", + "fq", + "{!collapse field=" + group + "}", + "defType", + "edismax", + "bf", + "field(test_i)", + "expand", + "true", + "expand.sort", + "test_l desc", + "expand.rows", + "1", + "fl", + "*,score"); + // Test zero results + query( + "q", + "test_i:5434343", + "fq", + "{!collapse field=" + group + "}", + "defType", + "edismax", + "bf", + "field(test_i)", + "expand", + "true", + "expand.sort", + "test_l desc", + "expand.rows", + "1", + "fl", + "*,score"); + // Test page 2 + query( + "q", + "*:*", + "start", + "1", + "rows", + "1", + "fq", + "{!collapse field=" + group + "}", + "defType", + "edismax", + "bf", + "field(test_i)", + "expand", + "true", + "fl", + "*,score"); // multiple collapse and equal cost - ModifiableSolrParams baseParams = params("q", "term_s:YYYY", "defType", "edismax", "expand", "true", "fl", "*,score", - "bf", "field(test_i)", "expand.sort", "id asc"); - baseParams.set("fq", "{!collapse field="+group+"}", "{!collapse field=test_i}"); + ModifiableSolrParams baseParams = + params( + "q", + "term_s:YYYY", + "defType", + "edismax", + "expand", + "true", + "fl", + "*,score", + "bf", + "field(test_i)", + "expand.sort", + "id asc"); + baseParams.set("fq", "{!collapse field=" + group + "}", "{!collapse field=test_i}"); query(baseParams); // multiple collapse and unequal cost case1 - baseParams.set("fq", "{!collapse cost=1000 field="+group+"}", "{!collapse cost=2000 field=test_i}"); + baseParams.set( + "fq", "{!collapse cost=1000 field=" + group + "}", "{!collapse cost=2000 field=test_i}"); query(baseParams); // multiple collapse and unequal cost case2 - baseParams.set("fq", "{!collapse cost=1000 field="+group+"}", "{!collapse cost=200 field=test_i}"); + baseParams.set( + "fq", "{!collapse cost=1000 field=" + group + "}", "{!collapse cost=200 field=test_i}"); query(baseParams); ignoreException("missing expand field"); @@ -121,40 +254,43 @@ private void _test(final String group, assertTrue(e.getMessage().contains("missing expand field")); resetExceptionIgnores(); - // Since none of these queries will match any doc w/null in the group field, it shouldn't matter what nullPolicy is used... - for (String np : Arrays.asList("", " nullPolicy=ignore", " nullPolicy=expand", " nullPolicy=collapse")) { - - //First basic test case. + // Since none of these queries will match any doc w/null in the group field, it shouldn't matter + // what nullPolicy is used... + for (String np : + Arrays.asList("", " nullPolicy=ignore", " nullPolicy=expand", " nullPolicy=collapse")) { + + // First basic test case. ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "term_s:YYYY"); - params.add("fq", "{!collapse field="+group+np+"}"); + params.add("fq", "{!collapse field=" + group + np + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); - + setDistributedParams(params); QueryResponse rsp = queryServer(params); - assertCountAndOrder(4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); Map results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 2, results, "1", "7"); assertExpandGroupCountAndOrder(bbb, 2, results, "5", "8"); assertExpandGroupCountAndOrder(ccc, 2, results, "11", "9"); assertExpandGroupCountAndOrder(ddd, 2, results, "12", "14"); - - - //Test expand.sort - + + // Test expand.sort + params = new ModifiableSolrParams(); params.add("q", "term_s:YYYY"); - params.add("fq", "{!collapse field="+group+np+"}"); + params.add("fq", "{!collapse field=" + group + np + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("expand.sort", "test_l desc"); setDistributedParams(params); rsp = queryServer(params); - assertCountAndOrder(4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 2, results, "7", "1"); @@ -162,12 +298,11 @@ private void _test(final String group, assertExpandGroupCountAndOrder(ccc, 2, results, "9", "11"); assertExpandGroupCountAndOrder(ddd, 2, results, "14", "12"); + // Test expand.rows - //Test expand.rows - params = new ModifiableSolrParams(); params.add("q", "term_s:YYYY"); - params.add("fq", "{!collapse field="+group+np+"}"); + params.add("fq", "{!collapse field=" + group + np + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); @@ -175,19 +310,20 @@ private void _test(final String group, params.add("expand.rows", "1"); setDistributedParams(params); rsp = queryServer(params); - assertCountAndOrder(4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 1, results, "7"); assertExpandGroupCountAndOrder(bbb, 1, results, "8"); assertExpandGroupCountAndOrder(ccc, 1, results, "9"); assertExpandGroupCountAndOrder(ddd, 1, results, "14"); - - //Test expand.rows = 0 - no docs only expand count - + + // Test expand.rows = 0 - no docs only expand count + params = new ModifiableSolrParams(); params.add("q", "term_s:YYYY"); - params.add("fq", "{!collapse field="+group+np+"}"); + params.add("fq", "{!collapse field=" + group + np + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); @@ -195,16 +331,17 @@ private void _test(final String group, params.add("fl", "id"); setDistributedParams(params); rsp = queryServer(params); - assertCountAndOrder(4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 0, results); assertExpandGroupCountAndOrder(bbb, 0, results); assertExpandGroupCountAndOrder(ccc, 0, results); assertExpandGroupCountAndOrder(ddd, 0, results); - - //Test expand.rows = 0 with expand.field - + + // Test expand.rows = 0 with expand.field + params = new ModifiableSolrParams(); params.add("q", "term_s:YYYY"); params.add("fq", "test_l:10"); @@ -222,40 +359,42 @@ private void _test(final String group, assertExpandGroups(results, aaa, ddd); assertExpandGroupCountAndOrder(aaa, 0, results); assertExpandGroupCountAndOrder(ddd, 0, results); - - //Test key-only fl - + + // Test key-only fl + params = new ModifiableSolrParams(); params.add("q", "term_s:YYYY"); - params.add("fq", "{!collapse field="+group+np+"}"); + params.add("fq", "{!collapse field=" + group + np + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("fl", "id"); - + setDistributedParams(params); rsp = queryServer(params); - assertCountAndOrder(4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 2, results, "1", "7"); assertExpandGroupCountAndOrder(bbb, 2, results, "5", "8"); assertExpandGroupCountAndOrder(ccc, 2, results, "11", "9"); assertExpandGroupCountAndOrder(ddd, 2, results, "12", "14"); - - //Test distrib.singlePass true - + + // Test distrib.singlePass true + params = new ModifiableSolrParams(); params.add("q", "term_s:YYYY"); - params.add("fq", "{!collapse field="+group+np+"}"); + params.add("fq", "{!collapse field=" + group + np + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("distrib.singlePass", "true"); - + setDistributedParams(params); rsp = queryServer(params); - assertCountAndOrder(4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 4, rsp.getResults(), "10" /* c */, "2" /* a */, "13" /* d */, "6" /* b */); results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 2, results, "1", "7"); @@ -265,7 +404,6 @@ private void _test(final String group, } { // queries matching all docs to test null groups from collapse and how it affects expand - ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("defType", "edismax"); @@ -274,10 +412,18 @@ private void _test(final String group, setDistributedParams(params); // nullPolicy=expand - params.add("fq", "{!collapse field="+group+" nullPolicy=expand}"); - + params.add("fq", "{!collapse field=" + group + " nullPolicy=expand}"); + QueryResponse rsp = queryServer(params); - assertCountAndOrder(6, rsp.getResults(), "10" /* c */, "88" /* null */, "2" /* a */, "13" /* d */, "99" /* null */, "6" /* b */); + assertCountAndOrder( + 6, + rsp.getResults(), + "10" /* c */, + "88" /* null */, + "2" /* a */, + "13" /* d */, + "99" /* null */, + "6" /* b */); Map results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 2, results, "1", "7"); @@ -286,10 +432,17 @@ private void _test(final String group, assertExpandGroupCountAndOrder(ddd, 2, results, "12", "14"); // nullPolicy=collapse - params.set("fq", "{!collapse field="+group+" nullPolicy=collapse}"); - + params.set("fq", "{!collapse field=" + group + " nullPolicy=collapse}"); + rsp = queryServer(params); - assertCountAndOrder(5, rsp.getResults(), "10" /* c */, "88" /* null */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 5, + rsp.getResults(), + "10" /* c */, + "88" /* null */, + "2" /* a */, + "13" /* d */, + "6" /* b */); results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd); assertExpandGroupCountAndOrder(aaa, 2, results, "1", "7"); @@ -298,11 +451,18 @@ private void _test(final String group, assertExpandGroupCountAndOrder(ddd, 2, results, "12", "14"); // nullPolicy=collapse w/ expand.nullGroup=true... - params.set("fq", "{!collapse field="+group+" nullPolicy=collapse}"); + params.set("fq", "{!collapse field=" + group + " nullPolicy=collapse}"); params.set("expand.nullGroup", "true"); - + rsp = queryServer(params); - assertCountAndOrder(5, rsp.getResults(), "10" /* c */, "88" /* null */, "2" /* a */, "13" /* d */, "6" /* b */); + assertCountAndOrder( + 5, + rsp.getResults(), + "10" /* c */, + "88" /* null */, + "2" /* a */, + "13" /* d */, + "6" /* b */); results = rsp.getExpandedResults(); assertExpandGroups(results, aaa, bbb, ccc, ddd, null); assertExpandGroupCountAndOrder(aaa, 2, results, "1", "7"); @@ -312,9 +472,9 @@ private void _test(final String group, assertExpandGroupCountAndOrder(null, 1, results, "99"); // nullPolicy=expand w/ expand.nullGroup=true (use small rows to ensure null expanded group) - params.set("fq", "{!collapse field="+group+" nullPolicy=expand}"); + params.set("fq", "{!collapse field=" + group + " nullPolicy=expand}"); params.set("rows", "3"); - + rsp = queryServer(params); assertCountAndOrder(3, rsp.getResults(), "10" /* c */, "88" /* null */, "2" /* a */); results = rsp.getExpandedResults(); @@ -323,9 +483,9 @@ private void _test(final String group, assertExpandGroupCountAndOrder(ccc, 2, results, "11", "9"); assertExpandGroupCountAndOrder(null, 1, results, "99"); - // nullPolicy=expand w/ expand.nullGroup=true & expand.rows = 0 + // nullPolicy=expand w/ expand.nullGroup=true & expand.rows = 0 params.set("expand.rows", "0"); - + rsp = queryServer(params); assertCountAndOrder(3, rsp.getResults(), "10" /* c */, "88" /* null */, "2" /* a */); results = rsp.getExpandedResults(); @@ -333,15 +493,15 @@ private void _test(final String group, assertExpandGroupCountAndOrder(aaa, 0, results); assertExpandGroupCountAndOrder(ccc, 0, results); assertExpandGroupCountAndOrder(null, 0, results); - } - } - private void assertExpandGroups(Map expandedResults, String... groups) throws Exception { - for(int i=0; i expandedResults, String... groups) + throws Exception { + for (int i = 0; i < groups.length; i++) { + if (!expandedResults.containsKey(groups[i])) { + throw new Exception( + "Expanded Group Not Found:" + groups[i] + ", Found:" + exportGroups(expandedResults)); } } } @@ -349,30 +509,34 @@ private void assertExpandGroups(Map expandedResults, S private String exportGroups(Map groups) { StringBuilder buf = new StringBuilder(); Iterator it = groups.keySet().iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String group = it.next(); buf.append(group); - if(it.hasNext()) { + if (it.hasNext()) { buf.append(","); } } return buf.toString(); } - private void assertExpandGroupCountAndOrder(final String group, final int count, - final MapexpandedResults, - final String... docs) throws Exception { + private void assertExpandGroupCountAndOrder( + final String group, + final int count, + final Map expandedResults, + final String... docs) + throws Exception { SolrDocumentList results = expandedResults.get(group); - if(results == null) { - throw new Exception("Group Not Found:"+group); + if (results == null) { + throw new Exception("Group Not Found:" + group); } assertCountAndOrder(count, results, docs); } - private void assertCountAndOrder(final int count, final SolrDocumentList results, - final String... docs) throws Exception { + + private void assertCountAndOrder( + final int count, final SolrDocumentList results, final String... docs) throws Exception { assertEquals(results.toString(), count, results.size()); - for(int i=0; i { - if (random().nextBoolean()) { - setDistributedParams(params); - queryServer(params); - } else { - params.set("distrib", "false"); - controlClient.query(params); - } - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + if (random().nextBoolean()) { + setDistributedParams(params); + queryServer(params); + } else { + params.set("distrib", "false"); + controlClient.query(params); + } + }); assertEquals(e.code(), ErrorCode.BAD_REQUEST.code); assertTrue(e.getMessage().contains("facet.exists")); assertTrue(e.getMessage().contains("facet.mincount")); @@ -173,13 +181,13 @@ private void checkBasicRequest() throws Exception { } private void checkWithMinCountEqOne() throws Exception { - final ModifiableSolrParams params = buildParams("facet.mincount","1"); + final ModifiableSolrParams params = buildParams("facet.mincount", "1"); QueryResponse rsp = query(params); assertResponse(rsp); } private void checkWithSortCount() throws Exception { - final ModifiableSolrParams params = buildParams("facet.sort","count"); + final ModifiableSolrParams params = buildParams("facet.sort", "count"); QueryResponse rsp = query(params); assertResponse(rsp); } @@ -196,17 +204,17 @@ private ModifiableSolrParams buildParams(String... additionalParams) { params.add("q", "*:*"); params.add("rows", "0"); - //params.add("debugQuery", "true"); + // params.add("debugQuery", "true"); params.add("facet", "true"); params.add("sort", "id asc"); - - if(random().nextBoolean()){ + + if (random().nextBoolean()) { params.add("facet.method", "enum"); } - + params.add("facet.exists", "true"); params.add("facet.field", FLD); - for(int i = 0; i < additionalParams.length;) { + for (int i = 0; i < additionalParams.length; ) { params.add(additionalParams[i++], additionalParams[i++]); } return params; diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java index 7dd4de27c00..99ee8c00ff4 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.Date; import java.util.List; - import junit.framework.AssertionFailedError; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrClient; @@ -35,27 +34,28 @@ import org.junit.Test; public class DistributedFacetPivotLargeTest extends BaseDistributedSearchTestCase { - - public static final String SPECIAL = ""; + + public static final String SPECIAL = ""; public DistributedFacetPivotLargeTest() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); } - + @Test @ShardsFixed(num = 4) public void test() throws Exception { - this.stress = 0 ; + this.stress = 0; handle.clear(); handle.put("QTime", SKIPVAL); handle.put("timestamp", SKIPVAL); - handle.put("maxScore", SKIPVAL); - + handle.put("maxScore", SKIPVAL); + setupDistributedPivotFacetDocuments(); - + QueryResponse rsp = null; - + List pivots = null; PivotField firstInt = null; PivotField firstBool = null; @@ -64,11 +64,18 @@ public void test() throws Exception { PivotField firstCompany = null; // basic check w/ limit & default sort (count) - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","place_s,company_t", - FacetParams.FACET_LIMIT, "12"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + FacetParams.FACET_LIMIT, + "12"); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(12, pivots.size()); firstPlace = pivots.get(0); @@ -78,12 +85,20 @@ public void test() throws Exception { assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(1)); // trivial mincount=0 check - rsp = query( "q", "does_not_exist_s:foo", - "rows", "0", - "facet","true", - "facet.pivot","company_t", - FacetParams.FACET_LIMIT, "10", - FacetParams.FACET_PIVOT_MINCOUNT,"0"); + rsp = + query( + "q", + "does_not_exist_s:foo", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "company_t", + FacetParams.FACET_LIMIT, + "10", + FacetParams.FACET_PIVOT_MINCOUNT, + "0"); pivots = rsp.getFacetPivot().get("company_t"); assertEquals(10, pivots.size()); for (PivotField p : pivots) { @@ -91,31 +106,50 @@ public void test() throws Exception { } // sanity check limit=0 w/ mincount=0 & missing=true - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "f.company_t.facet.limit", "10", - "facet.pivot","special_s,bogus_s,company_t", - "facet.missing", "true", - FacetParams.FACET_LIMIT, "0", - FacetParams.FACET_PIVOT_MINCOUNT,"0"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "f.company_t.facet.limit", + "10", + "facet.pivot", + "special_s,bogus_s,company_t", + "facet.missing", + "true", + FacetParams.FACET_LIMIT, + "0", + FacetParams.FACET_PIVOT_MINCOUNT, + "0"); pivots = rsp.getFacetPivot().get("special_s,bogus_s,company_t"); assertEquals(1, pivots.size()); // only the missing assertPivot("special_s", null, docNumber - 5, pivots.get(0)); // 5 docs w/special_s assertEquals(pivots.toString(), 1, pivots.get(0).getPivot().size()); - assertPivot("bogus_s", null, docNumber - 5 , pivots.get(0).getPivot().get(0)); // 5 docs w/special_s + assertPivot( + "bogus_s", null, docNumber - 5, pivots.get(0).getPivot().get(0)); // 5 docs w/special_s PivotField bogus = pivots.get(0).getPivot().get(0); assertEquals(bogus.toString(), 11, bogus.getPivot().size()); // last value would always be missing docs assertPivot("company_t", null, 2, bogus.getPivot().get(10)); // 2 docs w/company_t // basic check w/ default sort, limit, & mincount==0 - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","place_s,company_t", - FacetParams.FACET_LIMIT, "50", - FacetParams.FACET_PIVOT_MINCOUNT,"0"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + FacetParams.FACET_LIMIT, + "50", + FacetParams.FACET_PIVOT_MINCOUNT, + "0"); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(50, pivots.size()); firstPlace = pivots.get(0); @@ -125,29 +159,34 @@ public void test() throws Exception { assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(1)); // sort=index + offset + limit w/ some variables - for (SolrParams variableParams : - new SolrParams[] { // bother variations should kwrk just as well - // defauts - params(), - // force refinement - params(FacetParams.FACET_OVERREQUEST_RATIO, "1", - FacetParams.FACET_OVERREQUEST_COUNT, "0") }) { - - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.sort","index", - "f.place_s.facet.limit", "20", - "f.place_s.facet.offset", "40", - "facet.pivot", "place_s,company_t"), - variableParams ); + for (SolrParams variableParams : + new SolrParams[] { // bother variations should kwrk just as well + // defauts + params(), + // force refinement + params( + FacetParams.FACET_OVERREQUEST_RATIO, "1", + FacetParams.FACET_OVERREQUEST_COUNT, "0") + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.sort", "index", + "f.place_s.facet.limit", "20", + "f.place_s.facet.offset", "40", + "facet.pivot", "place_s,company_t"), + variableParams); try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(20, pivots.size()); // limit for (int i = 0; i < 10; i++) { - PivotField place = pivots.get(i); + PivotField place = pivots.get(i); assertTrue(place.toString(), place.getValue().toString().endsWith("placeholder")); assertEquals(3, place.getPivot().size()); assertPivot("company_t", "bbc", 6, place.getPivot().get(0)); @@ -158,7 +197,7 @@ public void test() throws Exception { assertPivot("place_s", "krakaw", 1, pivots.get(11)); assertPivot("place_s", "medical staffing network holdings, inc.", 51, pivots.get(12)); for (int i = 13; i < 20; i++) { - PivotField place = pivots.get(i); + PivotField place = pivots.get(i); assertTrue(place.toString(), place.getValue().toString().startsWith("placeholder")); assertEquals(1, place.getPivot().size()); PivotField company = place.getPivot().get(0); @@ -176,7 +215,7 @@ public void test() throws Exception { // // broken honda // - // This is tricky, here's what i think is happening.... + // This is tricky, here's what i think is happening.... // - "company:honda" only exists on twoShard, and only w/ "place:cardiff" // - twoShard has no other places in its docs // - twoShard can't return any other places to w/ honda as a count=0 sub-value @@ -198,7 +237,7 @@ public void test() throws Exception { // // // really trivial demonstration of the above problem - // + // // rsp = query( params( "q", "*:*", // "rows", "0", // "facet","true", @@ -208,50 +247,64 @@ public void test() throws Exception { // facet.missing=true + facet.sort=index + facet.pivot.mincount > 0 (SOLR-7829) final int expectedNumDocsMissingBool = 111; for (String facetSort : new String[] {"count", "index"}) { - for (int mincount : new int[] { 1, 20, - (expectedNumDocsMissingBool / 2) - 1, - (expectedNumDocsMissingBool / 2) + 1, - expectedNumDocsMissingBool }) { - - SolrParams p = params( "q", "*:*", - "fq","-real_b:true", // simplify asserts by ruling out true counts - "rows", "0", - "facet","true", - "facet.pivot", "real_b", - "facet.missing", "true", - "facet.pivot.mincount", ""+mincount, - "facet.sort", facetSort); - + for (int mincount : + new int[] { + 1, + 20, + (expectedNumDocsMissingBool / 2) - 1, + (expectedNumDocsMissingBool / 2) + 1, + expectedNumDocsMissingBool + }) { + + SolrParams p = + params( + "q", "*:*", + "fq", "-real_b:true", // simplify asserts by ruling out true counts + "rows", "0", + "facet", "true", + "facet.pivot", "real_b", + "facet.missing", "true", + "facet.pivot.mincount", "" + mincount, + "facet.sort", facetSort); + try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("real_b"); assertEquals(2, pivots.size()); // false, missing - in that order, regardless of sort - assertPivot("real_b", false, 300, pivots.get(0)); + assertPivot("real_b", false, 300, pivots.get(0)); assertPivot("real_b", null, expectedNumDocsMissingBool, pivots.get(1)); - + } catch (AssertionFailedError ae) { throw new AssertionError(ae.getMessage() + " <== " + p.toString(), ae); } } } - + // basic check w/ limit & index sort - for (SolrParams facetParams : - // results should be the same regardless of whether local params are used - new SolrParams[] { - // Broken: SOLR-6193 - // params("facet.pivot","{!facet.limit=4 facet.sort=index}place_s,company_t"), - // params("facet.pivot","{!facet.sort=index}place_s,company_t", - // FacetParams.FACET_LIMIT, "4"), - params("facet.pivot","place_s,company_t", - FacetParams.FACET_LIMIT, "4", - "facet.sort", "index") }) { - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true"), - facetParams ); + for (SolrParams facetParams : + // results should be the same regardless of whether local params are used + new SolrParams[] { + // Broken: SOLR-6193 + // params("facet.pivot","{!facet.limit=4 facet.sort=index}place_s,company_t"), + // params("facet.pivot","{!facet.sort=index}place_s,company_t", + // FacetParams.FACET_LIMIT, "4"), + params( + "facet.pivot", + "place_s,company_t", + FacetParams.FACET_LIMIT, + "4", + "facet.sort", + "index") + }) { + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true"), + facetParams); try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(4, pivots.size()); firstPlace = pivots.get(0); @@ -267,20 +320,31 @@ public void test() throws Exception { // to triggers per-shard mincount > num docs on one shard // (beefed up test of same with nested pivot below) for (int limit : Arrays.asList(4, 444444, -1)) { - SolrParams p = params("q", "*:*", - "rows", "0", - // skip place_s:Nplaceholder buckets - "fq","-hiredate_dt:\"2012-10-01T12:30:00Z\"", - // skip company_t:compHolderN buckets from twoShard - "fq","-(+company_t:compHolder* +real_b:true)", - "facet","true", - "facet.pivot","place_s", - FacetParams.FACET_PIVOT_MINCOUNT, "50", - FacetParams.FACET_LIMIT, ""+limit, - "facet.sort", "index"); + SolrParams p = + params( + "q", + "*:*", + "rows", + "0", + // skip place_s:Nplaceholder buckets + "fq", + "-hiredate_dt:\"2012-10-01T12:30:00Z\"", + // skip company_t:compHolderN buckets from twoShard + "fq", + "-(+company_t:compHolder* +real_b:true)", + "facet", + "true", + "facet.pivot", + "place_s", + FacetParams.FACET_PIVOT_MINCOUNT, + "50", + FacetParams.FACET_LIMIT, + "" + limit, + "facet.sort", + "index"); rsp = null; try { - rsp = query( p ); + rsp = query(p); assertPivot("place_s", "cardiff", 107, rsp.getFacetPivot().get("place_s").get(0)); // - zeroShard = 50 ... above per-shard min of 50/(numShards=4) // - oneShard = 5 ... below per-shard min of 50/(numShards=4) .. should be refined @@ -290,86 +354,112 @@ public void test() throws Exception { throw new AssertionError(ae.getMessage() + ": " + p.toString() + " ==> " + rsp, ae); } } - + // test permutations of mincount & limit with sort=index // (there is a per-shard optimization on mincount when sort=index is used) for (int limit : Arrays.asList(4, 444444, -1)) { - SolrParams p = params("q", "*:*", - "rows", "0", - // skip place_s:Nplaceholder buckets - "fq","-hiredate_dt:\"2012-10-01T12:30:00Z\"", - // skip company_t:compHolderN buckets from twoShard - "fq","-(+company_t:compHolder* +real_b:true)", - "facet","true", - "facet.pivot","place_s,company_t", - FacetParams.FACET_PIVOT_MINCOUNT, "50", - FacetParams.FACET_LIMIT, ""+limit, - "facet.sort", "index"); + SolrParams p = + params( + "q", + "*:*", + "rows", + "0", + // skip place_s:Nplaceholder buckets + "fq", + "-hiredate_dt:\"2012-10-01T12:30:00Z\"", + // skip company_t:compHolderN buckets from twoShard + "fq", + "-(+company_t:compHolder* +real_b:true)", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + FacetParams.FACET_PIVOT_MINCOUNT, + "50", + FacetParams.FACET_LIMIT, + "" + limit, + "facet.sort", + "index"); rsp = null; try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("place_s,company_t"); firstPlace = pivots.get(0); assertPivot("place_s", "cardiff", 107, firstPlace); // - assertPivot("company_t", "bbc", 101, firstPlace.getPivot().get(0)); - assertPivot("company_t", "honda", 50, firstPlace.getPivot().get(1)); - assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(2)); - assertPivot("company_t", "polecat", 52, firstPlace.getPivot().get(3)); + assertPivot("company_t", "bbc", 101, firstPlace.getPivot().get(0)); + assertPivot("company_t", "honda", 50, firstPlace.getPivot().get(1)); + assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(2)); + assertPivot("company_t", "polecat", 52, firstPlace.getPivot().get(3)); } catch (AssertionError ae) { throw new AssertionError(ae.getMessage() + ": " + p.toString() + " ==> " + rsp, ae); } } { // similar to the test above, but now force a restriction on the over request and allow - // terms that are early in index sort -- but don't meet the mincount overall -- to be considered + // terms that are early in index sort -- but don't meet the mincount overall -- to be + // considered // in the first phase. (SOLR-12954) - SolrParams p = params("q", "*:*", - "rows", "0", - // skip company_t:compHolderN buckets from twoShard - "fq","-(+company_t:compHolder* +real_b:true)", - "facet","true", - "facet.pivot","place_s,company_t", - // the (50) Nplaceholder place_s values exist in 6 each on oneShard - FacetParams.FACET_PIVOT_MINCOUNT, ""+(6 * shardsArr.length), - FacetParams.FACET_LIMIT, "4", - "facet.sort", "index"); + SolrParams p = + params( + "q", + "*:*", + "rows", + "0", + // skip company_t:compHolderN buckets from twoShard + "fq", + "-(+company_t:compHolder* +real_b:true)", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + // the (50) Nplaceholder place_s values exist in 6 each on oneShard + FacetParams.FACET_PIVOT_MINCOUNT, + "" + (6 * shardsArr.length), + FacetParams.FACET_LIMIT, + "4", + "facet.sort", + "index"); rsp = null; try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("place_s,company_t"); firstPlace = pivots.get(0); assertPivot("place_s", "cardiff", 107, firstPlace); // - assertPivot("company_t", "bbc", 101, firstPlace.getPivot().get(0)); - assertPivot("company_t", "honda", 50, firstPlace.getPivot().get(1)); - assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(2)); - assertPivot("company_t", "polecat", 52, firstPlace.getPivot().get(3)); + assertPivot("company_t", "bbc", 101, firstPlace.getPivot().get(0)); + assertPivot("company_t", "honda", 50, firstPlace.getPivot().get(1)); + assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(2)); + assertPivot("company_t", "polecat", 52, firstPlace.getPivot().get(3)); } catch (AssertionError ae) { throw new AssertionError(ae.getMessage() + ": " + p.toString() + " ==> " + rsp, ae); } } - + // Pivot Faceting (combined wtih Field Faceting) - for (SolrParams facetParams : - // with and w/o an excluded fq - // (either way, facet results should be the same) - new SolrParams[] { - params("facet.pivot","place_s,company_t", - "facet.field","place_s"), - params("facet.pivot","{!ex=ok}place_s,company_t", - "facet.field","{!ex=ok}place_s", - "fq","{!tag=ok}place_s:cardiff"), - params("facet.pivot","{!ex=pl,co}place_s,company_t", - "fq","{!tag=pl}place_s:cardiff", - "fq","{!tag=co}company_t:bbc") }) { - + for (SolrParams facetParams : + // with and w/o an excluded fq + // (either way, facet results should be the same) + new SolrParams[] { + params( + "facet.pivot", "place_s,company_t", + "facet.field", "place_s"), + params( + "facet.pivot", "{!ex=ok}place_s,company_t", + "facet.field", "{!ex=ok}place_s", + "fq", "{!tag=ok}place_s:cardiff"), + params( + "facet.pivot", "{!ex=pl,co}place_s,company_t", + "fq", "{!tag=pl}place_s:cardiff", + "fq", "{!tag=co}company_t:bbc") + }) { + // default order (count) - rsp = query( SolrParams.wrapDefaults(params("q", "*:*", - "rows", "0", - "facet","true", - FacetParams.FACET_LIMIT, "4"), - facetParams) ); + rsp = + query( + SolrParams.wrapDefaults( + params("q", "*:*", "rows", "0", "facet", "true", FacetParams.FACET_LIMIT, "4"), + facetParams)); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(4, pivots.size()); firstPlace = pivots.get(0); @@ -379,12 +469,21 @@ public void test() throws Exception { assertPivot("company_t", "bbc", 101, firstCompany); // Index Order - rsp = query( SolrParams.wrapDefaults(params("q", "*:*", - "rows", "0", - "facet","true", - FacetParams.FACET_LIMIT, "4", - "facet.sort", "index"), - facetParams) ); + rsp = + query( + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + FacetParams.FACET_LIMIT, + "4", + "facet.sort", + "index"), + facetParams)); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(4, pivots.size()); firstPlace = pivots.get(0); @@ -392,14 +491,18 @@ public void test() throws Exception { assertEquals(3, firstPlace.getPivot().size()); // num vals in data < limit==3 firstCompany = firstPlace.getPivot().get(0); assertPivot("company_t", "bbc", 6, firstCompany); - - // Field level limits - rsp = query( SolrParams.wrapDefaults(params("q", "*:*", - "rows", "0", - "facet","true", - "f.place_s.facet.limit","2", - "f.company_t.facet.limit","4"), - facetParams) ); + + // Field level limits + rsp = + query( + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "f.place_s.facet.limit", "2", + "f.company_t.facet.limit", "4"), + facetParams)); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(2, pivots.size()); firstPlace = pivots.get(0); @@ -410,12 +513,20 @@ public void test() throws Exception { } // Pivot Faceting Count w/fq (not excluded) - rsp = query( "q", "*:*", - "rows", "0", - "fq","place_s:cardiff", - "facet","true", - "facet.pivot","place_s,company_t", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "fq", + "place_s:cardiff", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(1, pivots.size()); firstPlace = pivots.get(0); @@ -424,15 +535,23 @@ public void test() throws Exception { firstCompany = firstPlace.getPivot().get(0); assertPivot("company_t", "bbc", 101, firstCompany); - // Same Pivot - one with exclusion and one w/o - rsp = query( "q", "*:*", - "rows", "0", - "fq","{!tag=ff}pay_i:[2000 TO *]", - "facet","true", - "facet.pivot","{!key=filt}place_s,company_t", - "facet.pivot","{!key=nofilt ex=ff}place_s,company_t", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "fq", + "{!tag=ff}pay_i:[2000 TO *]", + "facet", + "true", + "facet.pivot", + "{!key=filt}place_s,company_t", + "facet.pivot", + "{!key=nofilt ex=ff}place_s,company_t", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("filt"); assertEquals(4, pivots.size()); firstPlace = pivots.get(0); @@ -463,7 +582,7 @@ public void test() throws Exception { // pivots = rsp.getFacetPivot().get("sc"); // assertEquals(4, pivots.size()); // firstPlace = pivots.get(0); - // assertPivot("place_s", "cardiff", 105, firstPlace); + // assertPivot("place_s", "cardiff", 105, firstPlace); // assertEquals(4, firstPlace.getPivot().size()); // assertPivot("company_t", "bbc", 101, firstPlace.getPivot().get(0)); // assertPivot("company_t", "microsoft", 54, firstPlace.getPivot().get(1)); @@ -471,20 +590,21 @@ public void test() throws Exception { // pivots = rsp.getFacetPivot().get("si"); // assertEquals(4, pivots.size()); // firstPlace = pivots.get(0); - // assertPivot("place_s", "0placeholder", 6, firstPlace); + // assertPivot("place_s", "0placeholder", 6, firstPlace); // assertEquals(3, firstPlace.getPivot().size()); // only 3 in the data < facet.limit // assertPivot("company_t", "bbc", 6, firstPlace.getPivot().get(0)); // assertPivot("company_t", "microsoft", 6, firstPlace.getPivot().get(1)); - // Field level limits and small offset - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","place_s,company_t", - "f.place_s.facet.limit","2", - "f.company_t.facet.limit","4", - "facet.offset","1"); + rsp = + query( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.pivot", "place_s,company_t", + "f.place_s.facet.limit", "2", + "f.company_t.facet.limit", "4", + "facet.offset", "1"); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(2, pivots.size()); firstPlace = pivots.get(0); @@ -492,17 +612,26 @@ public void test() throws Exception { assertEquals(2, firstPlace.getPivot().size()); // num vals in data < limit==4 firstCompany = firstPlace.getPivot().get(0); assertPivot("company_t", "bbc", 50, firstCompany); - - + // Field level offsets and limit - rsp = query( "q", "*:*", - "rows", "0", - "fq","{!tag=pl}place_s:cardiff", - "facet","true", - "facet.pivot","{!ex=pl}place_s,company_t", - "f.place_s.facet.offset","1", - "f.company_t.facet.offset","2", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "fq", + "{!tag=pl}place_s:cardiff", + "facet", + "true", + "facet.pivot", + "{!ex=pl}place_s,company_t", + "f.place_s.facet.offset", + "1", + "f.company_t.facet.offset", + "2", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(4, pivots.size()); firstPlace = pivots.get(0); @@ -510,16 +639,24 @@ public void test() throws Exception { assertEquals(1, firstPlace.getPivot().size()); // num vals in data < limit==4 firstCompany = firstPlace.getPivot().get(0); assertPivot("company_t", "polecat", 50, firstCompany); - // datetime - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","hiredate_dt,place_s,company_t", - "f.hiredate_dt.facet.limit","2", - "f.hiredate_dt.facet.offset","1", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "hiredate_dt,place_s,company_t", + "f.hiredate_dt.facet.limit", + "2", + "f.hiredate_dt.facet.offset", + "1", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("hiredate_dt,place_s,company_t"); assertEquals(2, pivots.size()); firstDate = pivots.get(0); // 2012-09-01T12:30:00Z @@ -532,13 +669,22 @@ public void test() throws Exception { assertPivot("company_t", "bbc", 50, firstCompany); // int - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","pay_i,place_s,company_t", - "f.pay_i.facet.limit","2", - "f.pay_i.facet.offset","1", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "pay_i,place_s,company_t", + "f.pay_i.facet.limit", + "2", + "f.pay_i.facet.offset", + "1", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("pay_i,place_s,company_t"); assertEquals(2, pivots.size()); firstInt = pivots.get(0); @@ -549,15 +695,24 @@ public void test() throws Exception { assertEquals(3, firstPlace.getPivot().size()); firstCompany = firstPlace.getPivot().get(0); assertPivot("company_t", "bbc", 1, firstCompany); - + // boolean - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","real_b,place_s,company_t", - "f.real_b.facet.missing","true", - "f.real_b.facet.limit","2", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "real_b,place_s,company_t", + "f.real_b.facet.missing", + "true", + "f.real_b.facet.limit", + "2", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("real_b,place_s,company_t"); assertEquals(3, pivots.size()); firstBool = pivots.get(0); @@ -568,23 +723,38 @@ public void test() throws Exception { assertEquals(3, firstPlace.getPivot().size()); firstCompany = firstPlace.getPivot().get(0); assertPivot("company_t", "bbc", 6, firstCompany); - + // bogus fields - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","doesntexist_t,neitherdoi_i", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "doesntexist_t,neitherdoi_i", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("doesntexist_t,neitherdoi_i"); assertEquals(0, pivots.size()); // bogus fields with facet.missing - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","doesntexist_t,neitherdoi_i", - "facet.missing", "true", - FacetParams.FACET_LIMIT, "4"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "doesntexist_t,neitherdoi_i", + "facet.missing", + "true", + FacetParams.FACET_LIMIT, + "4"); pivots = rsp.getFacetPivot().get("doesntexist_t,neitherdoi_i"); assertEquals(1, pivots.size()); assertPivot("doesntexist_t", null, docNumber, pivots.get(0)); @@ -592,54 +762,63 @@ public void test() throws Exception { assertPivot("neitherdoi_i", null, docNumber, pivots.get(0).getPivot().get(0)); // Negative facet limit - for (SolrParams facetParams : - // results should be the same regardless of whether facet.limit is global, - // a local param, or specified as a per-field override for both fields - new SolrParams[] { - params(FacetParams.FACET_LIMIT, "-1", - "facet.pivot","place_s,company_t"), - // Broken: SOLR-6193 - // params("facet.pivot","{!facet.limit=-1}place_s,company_t"), - params("f.place_s.facet.limit", "-1", - "f.company_t.facet.limit", "-1", - "facet.pivot","place_s,company_t") }) { - - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.sort", "count" ), - facetParams); + for (SolrParams facetParams : + // results should be the same regardless of whether facet.limit is global, + // a local param, or specified as a per-field override for both fields + new SolrParams[] { + params(FacetParams.FACET_LIMIT, "-1", "facet.pivot", "place_s,company_t"), + // Broken: SOLR-6193 + // params("facet.pivot","{!facet.limit=-1}place_s,company_t"), + params( + "f.place_s.facet.limit", "-1", + "f.company_t.facet.limit", "-1", + "facet.pivot", "place_s,company_t") + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.sort", "count"), + facetParams); try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("place_s,company_t"); assertEquals(103, pivots.size()); firstPlace = pivots.get(0); assertPivot("place_s", "cardiff", 257, firstPlace); assertEquals(54, firstPlace.getPivot().size()); firstCompany = firstPlace.getPivot().get(0); - assertPivot("company_t","bbc", 101, firstCompany); + assertPivot("company_t", "bbc", 101, firstCompany); } catch (AssertionFailedError ae) { throw new AssertionError(ae.getMessage() + " <== " + p.toString(), ae); } } // Negative per-field facet limit (outer) - for (SolrParams facetParams : - // results should be the same regardless of whether per-field facet.limit is - // a global or a local param - new SolrParams[] { - // Broken: SOLR-6193 - // params( "facet.pivot","{!f.id.facet.limit=-1}place_s,id" ), - params( "facet.pivot","place_s,id", - "f.id.facet.limit", "-1") }) { - - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.sort", "count" ), - facetParams); + for (SolrParams facetParams : + // results should be the same regardless of whether per-field facet.limit is + // a global or a local param + new SolrParams[] { + // Broken: SOLR-6193 + // params( "facet.pivot","{!f.id.facet.limit=-1}place_s,id" ), + params( + "facet.pivot", "place_s,id", + "f.id.facet.limit", "-1") + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.sort", "count"), + facetParams); try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("place_s,id"); assertEquals(100, pivots.size()); // default firstPlace = pivots.get(0); @@ -651,22 +830,27 @@ public void test() throws Exception { } // Negative per-field facet limit (inner) - for (SolrParams facetParams : - // results should be the same regardless of whether per-field facet.limit is - // a global or a local param - new SolrParams[] { - // Broken: SOLR-6193 - // params( "facet.pivot","{!f.place_s.facet.limit=-1}place_s,id" ), - params( "facet.pivot","place_s,id", - "f.place_s.facet.limit", "-1") }) { - - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.sort", "count" ), - facetParams); + for (SolrParams facetParams : + // results should be the same regardless of whether per-field facet.limit is + // a global or a local param + new SolrParams[] { + // Broken: SOLR-6193 + // params( "facet.pivot","{!f.place_s.facet.limit=-1}place_s,id" ), + params( + "facet.pivot", "place_s,id", + "f.place_s.facet.limit", "-1") + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.sort", "count"), + facetParams); try { - rsp = query( p ); + rsp = query(p); pivots = rsp.getFacetPivot().get("place_s,id"); assertEquals(103, pivots.size()); firstPlace = pivots.get(0); @@ -676,16 +860,24 @@ public void test() throws Exception { throw new AssertionError(ae.getMessage() + " <== " + p.toString(), ae); } } - + // Mincount + facet.pivot 2 different ways (swap field order) - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","place_s,company_t", - "facet.pivot","company_t,place_s", - FacetParams.FACET_PIVOT_MINCOUNT,"6"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + "facet.pivot", + "company_t,place_s", + FacetParams.FACET_PIVOT_MINCOUNT, + "6"); pivots = rsp.getFacetPivot().get("place_s,company_t"); - assertEquals(52, pivots.size()); + assertEquals(52, pivots.size()); firstPlace = pivots.get(0); assertPivot("place_s", "cardiff", 257, firstPlace); assertEquals(4, firstPlace.getPivot().size()); @@ -701,17 +893,27 @@ public void test() throws Exception { assertPivot("place_s", "cardiff", 101, firstPlace); // refine on SPECIAL empty string - rsp = query( "q", "*:*", - "fq", "-place_s:0placeholder", - "rows", "0", - "facet","true", - "facet.limit","1", - FacetParams.FACET_OVERREQUEST_RATIO, "0", // force refinement - FacetParams.FACET_OVERREQUEST_COUNT, "1", // force refinement - "facet.pivot","special_s,company_t"); + rsp = + query( + "q", + "*:*", + "fq", + "-place_s:0placeholder", + "rows", + "0", + "facet", + "true", + "facet.limit", + "1", + FacetParams.FACET_OVERREQUEST_RATIO, + "0", // force refinement + FacetParams.FACET_OVERREQUEST_COUNT, + "1", // force refinement + "facet.pivot", + "special_s,company_t"); assertEquals(docNumber - 6, rsp.getResults().getNumFound()); // all docs but 0place pivots = rsp.getFacetPivot().get("special_s,company_t"); - assertEquals(1, pivots.size()); + assertEquals(1, pivots.size()); firstPlace = pivots.get(0); assertPivot("special_s", SPECIAL, 3, firstPlace); assertEquals(1, firstPlace.getPivot().size()); @@ -720,37 +922,56 @@ public void test() throws Exception { // TODO test "company_t,special_s" as well - // refine on SPECIAL empty string & facet.missing // Also proves refinement on non-top elements occurs and allows them to get into the top - rsp = query( "q", "*:*", - "fq", "-place_s:0placeholder", - "rows", "0", - "facet","true", - "facet.limit","1", - "facet.missing","true", - FacetParams.FACET_OVERREQUEST_RATIO, "0", // force refinement - FacetParams.FACET_OVERREQUEST_COUNT, "2", // force refinement - "facet.pivot","special_s,company_t"); + rsp = + query( + "q", + "*:*", + "fq", + "-place_s:0placeholder", + "rows", + "0", + "facet", + "true", + "facet.limit", + "1", + "facet.missing", + "true", + FacetParams.FACET_OVERREQUEST_RATIO, + "0", // force refinement + FacetParams.FACET_OVERREQUEST_COUNT, + "2", // force refinement + "facet.pivot", + "special_s,company_t"); assertEquals(docNumber - 6, rsp.getResults().getNumFound()); // all docs but 0place pivots = rsp.getFacetPivot().get("special_s,company_t"); - assertEquals(2, pivots.size()); + assertEquals(2, pivots.size()); firstPlace = pivots.get(0); assertPivot("special_s", SPECIAL, 3, firstPlace); assertEquals(1, firstPlace.getPivot().size()); firstCompany = firstPlace.getPivot().get(0); assertPivot("company_t", "microsoft", 2, firstCompany); // last is "missing" val - assertPivot("special_s", null, docNumber -6 -3 -2, pivots.get(1)); // -0place -SPECIAL -xxx + assertPivot("special_s", null, docNumber - 6 - 3 - 2, pivots.get(1)); // -0place -SPECIAL -xxx // forced refinement on facet.missing - rsp = query( "q", "*:*", - "rows", "0", - "facet","true", - "f.bogus_x_s.facet.missing","true", - "f.bogus_y_s.facet.missing","true", - "facet.pivot","bogus_x_s,place_s,bogus_y_s,company_t", - FacetParams.FACET_LIMIT, "12"); + rsp = + query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "f.bogus_x_s.facet.missing", + "true", + "f.bogus_y_s.facet.missing", + "true", + "facet.pivot", + "bogus_x_s,place_s,bogus_y_s,company_t", + FacetParams.FACET_LIMIT, + "12"); pivots = rsp.getFacetPivot().get("bogus_x_s,place_s,bogus_y_s,company_t"); assertEquals(1, pivots.size()); // just the missing value for bogus_x_s assertPivot("bogus_x_s", null, docNumber, pivots.get(0)); @@ -764,25 +985,35 @@ public void test() throws Exception { // Microsoft will come back wrong if refinement was not done correctly assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(0).getPivot().get(1)); - - - - // Overrequesting a lot - this.query( "q", "*:*", - "rows", "0", - "facet", "true", - "facet.pivot","place_s,company_t", - FacetParams.FACET_OVERREQUEST_RATIO, "10", - FacetParams.FACET_OVERREQUEST_COUNT, "100"); - + this.query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + FacetParams.FACET_OVERREQUEST_RATIO, + "10", + FacetParams.FACET_OVERREQUEST_COUNT, + "100"); + // Overrequesting off - this.query( "q", "*:*", - "rows", "0", - "facet", "true", - "facet.pivot","place_s,company_t", - FacetParams.FACET_OVERREQUEST_RATIO, "0", - FacetParams.FACET_OVERREQUEST_COUNT, "0"); + this.query( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_s,company_t", + FacetParams.FACET_OVERREQUEST_RATIO, + "0", + FacetParams.FACET_OVERREQUEST_COUNT, + "0"); doTestDeepPivotStats(); doTestPivotRanges(); @@ -790,12 +1021,14 @@ public void test() throws Exception { private void doTestDeepPivotStats() throws Exception { - QueryResponse rsp = query("q", "*:*", - "rows", "0", - "facet", "true", - "facet.pivot","{!stats=s1}place_s,company_t", - "stats", "true", - "stats.field", "{!key=avg_price tag=s1}pay_i"); + QueryResponse rsp = + query( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.pivot", "{!stats=s1}place_s,company_t", + "stats", "true", + "stats.field", "{!key=avg_price tag=s1}pay_i"); List pivots = rsp.getFacetPivot().get("place_s,company_t"); @@ -818,7 +1051,8 @@ private void doTestDeepPivotStats() throws Exception { assertEquals("bbc", bbcCardifftPivotField.getValue()); assertEquals(101, bbcCardifftPivotField.getCount()); - FieldStatsInfo bbcCardifftPivotFieldStatsInfo = bbcCardifftPivotField.getFieldStatsInfo().get("avg_price"); + FieldStatsInfo bbcCardifftPivotFieldStatsInfo = + bbcCardifftPivotField.getFieldStatsInfo().get("avg_price"); assertEquals(2400.0, bbcCardifftPivotFieldStatsInfo.getMin()); assertEquals(8742.0, bbcCardifftPivotFieldStatsInfo.getMax()); assertEquals(101, (long) bbcCardifftPivotFieldStatsInfo.getCount()); @@ -828,12 +1062,12 @@ private void doTestDeepPivotStats() throws Exception { assertEquals(2462.792079208, (double) bbcCardifftPivotFieldStatsInfo.getMean(), 0.1E-7); assertEquals(631.0525860312, bbcCardifftPivotFieldStatsInfo.getStddev(), 0.1E-7); - PivotField placeholder0PivotField = pivots.get(2); assertEquals("0placeholder", placeholder0PivotField.getValue()); assertEquals(6, placeholder0PivotField.getCount()); - FieldStatsInfo placeholder0PivotFieldStatsInfo = placeholder0PivotField.getFieldStatsInfo().get("avg_price"); + FieldStatsInfo placeholder0PivotFieldStatsInfo = + placeholder0PivotField.getFieldStatsInfo().get("avg_price"); assertEquals("avg_price", placeholder0PivotFieldStatsInfo.getName()); assertEquals(2000.0, placeholder0PivotFieldStatsInfo.getMin()); assertEquals(6400.0, placeholder0PivotFieldStatsInfo.getMax()); @@ -848,7 +1082,8 @@ private void doTestDeepPivotStats() throws Exception { assertEquals("microsoft", microsoftPlaceholder0PivotField.getValue()); assertEquals(6, microsoftPlaceholder0PivotField.getCount()); - FieldStatsInfo microsoftPlaceholder0PivotFieldStatsInfo = microsoftPlaceholder0PivotField.getFieldStatsInfo().get("avg_price"); + FieldStatsInfo microsoftPlaceholder0PivotFieldStatsInfo = + microsoftPlaceholder0PivotField.getFieldStatsInfo().get("avg_price"); assertEquals("avg_price", microsoftPlaceholder0PivotFieldStatsInfo.getName()); assertEquals(2000.0, microsoftPlaceholder0PivotFieldStatsInfo.getMin()); assertEquals(6400.0, microsoftPlaceholder0PivotFieldStatsInfo.getMax()); @@ -856,39 +1091,65 @@ private void doTestDeepPivotStats() throws Exception { assertEquals(0, (long) microsoftPlaceholder0PivotFieldStatsInfo.getMissing()); assertEquals(22700.0, microsoftPlaceholder0PivotFieldStatsInfo.getSum()); assertEquals(1.0105E8, microsoftPlaceholder0PivotFieldStatsInfo.getSumOfSquares(), 0); - assertEquals(3783.333333333, (double) microsoftPlaceholder0PivotFieldStatsInfo.getMean(), 0.1E-7); + assertEquals( + 3783.333333333, (double) microsoftPlaceholder0PivotFieldStatsInfo.getMean(), 0.1E-7); assertEquals(1741.742422595, microsoftPlaceholder0PivotFieldStatsInfo.getStddev(), 0.1E-7); } - /** - * spot checks some pivot values and the ranges hanging on them - */ + /** spot checks some pivot values and the ranges hanging on them */ @SuppressWarnings({"unchecked"}) private void doTestPivotRanges() throws Exception { // note: 'p0' is only a top level range, not included in per-pivot ranges - for (SolrParams p : new SolrParams[]{ - // results should be identical for all of these - params("facet.range", "{!key=p0 facet.range.gap=500}pay_i", - "facet.range", "{!key=p1 tag=t1 facet.range.gap=100}pay_i", - "facet.range", "{!key=p2 tag=t1 facet.range.gap=200}pay_i", - "facet.range.start", "0", - "facet.range.end", "1000"), - params("facet.range", "{!key=p0 facet.range.gap=500}pay_i", - "facet.range", "{!key=p1 tag=t1 facet.range.gap=100}pay_i", - "facet.range", "{!key=p2 tag=t1 facet.range.gap=200}pay_i", - "f.pay_i.facet.range.start", "0", - "facet.range.end", "1000"), - params("facet.range", "{!key=p0 facet.range.gap=500 facet.range.start=0}pay_i", - "facet.range", "{!key=p1 tag=t1 facet.range.gap=100 facet.range.start=0}pay_i", - "facet.range", "{!key=p2 tag=t1 facet.range.gap=200 facet.range.start=0}pay_i", - "facet.range.end", "1000")}) { - - QueryResponse rsp - = query(SolrParams.wrapDefaults(p, params("q", "*:*", - "rows", "0", - "facet", "true", - "facet.pivot", "{!range=t1}place_s,company_t"))); + for (SolrParams p : + new SolrParams[] { + // results should be identical for all of these + params( + "facet.range", + "{!key=p0 facet.range.gap=500}pay_i", + "facet.range", + "{!key=p1 tag=t1 facet.range.gap=100}pay_i", + "facet.range", + "{!key=p2 tag=t1 facet.range.gap=200}pay_i", + "facet.range.start", + "0", + "facet.range.end", + "1000"), + params( + "facet.range", + "{!key=p0 facet.range.gap=500}pay_i", + "facet.range", + "{!key=p1 tag=t1 facet.range.gap=100}pay_i", + "facet.range", + "{!key=p2 tag=t1 facet.range.gap=200}pay_i", + "f.pay_i.facet.range.start", + "0", + "facet.range.end", + "1000"), + params( + "facet.range", + "{!key=p0 facet.range.gap=500 facet.range.start=0}pay_i", + "facet.range", + "{!key=p1 tag=t1 facet.range.gap=100 facet.range.start=0}pay_i", + "facet.range", + "{!key=p2 tag=t1 facet.range.gap=200 facet.range.start=0}pay_i", + "facet.range.end", + "1000") + }) { + + QueryResponse rsp = + query( + SolrParams.wrapDefaults( + p, + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "{!range=t1}place_s,company_t"))); List pivots = rsp.getFacetPivot().get("place_s,company_t"); PivotField pf = null; // changes as we spot check @@ -953,26 +1214,25 @@ private void doTestPivotRanges() throws Exception { assertEquals(1, rfc.get(0).getCount()); assertEquals("100", rfc.get(1).getValue()); assertEquals(0, rfc.get(1).getCount()); - } } - /** - * asserts that the actual PivotField matches the expected criteria - */ - private void assertPivot(String field, Object value, int count, // int numKids, - PivotField actual) { + /** asserts that the actual PivotField matches the expected criteria */ + private void assertPivot( + String field, + Object value, + int count, // int numKids, + PivotField actual) { assertEquals("FIELD: " + actual.toString(), field, actual.getField()); assertEquals("VALUE: " + actual.toString(), value, actual.getValue()); assertEquals("COUNT: " + actual.toString(), count, actual.getCount()); // TODO: add arg && assert on number of kids - //assertEquals("#KIDS: " + actual.toString(), numKids, actual.getPivot().size()); + // assertEquals("#KIDS: " + actual.toString(), numKids, actual.getPivot().size()); } - /** - * asserts that the actual RangeFacet matches the expected criteria - */ - private void assertRange(String name, B start, G gap, B end, int numCount, RangeFacet actual) { + /** asserts that the actual RangeFacet matches the expected criteria */ + private void assertRange( + String name, B start, G gap, B end, int numCount, RangeFacet actual) { assertEquals("NAME: " + actual.toString(), name, actual.getName()); assertEquals("START: " + actual.toString(), start, actual.getStart()); assertEquals("GAP: " + actual.toString(), gap, actual.getGap()); @@ -980,9 +1240,9 @@ private void assertRange(String name, B start, G gap, B end, int numCount assertEquals("#COUNT: " + actual.toString(), numCount, actual.getCounts().size()); } - private void setupDistributedPivotFacetDocuments() throws Exception{ - - //Clear docs + private void setupDistributedPivotFacetDocuments() throws Exception { + + // Clear docs del("*:*"); commit(); @@ -992,69 +1252,342 @@ private void setupDistributedPivotFacetDocuments() throws Exception{ final SolrClient twoShard = clients.get(2); final SolrClient threeShard = clients.get(3); // edge case: never gets any matching docs - for(Integer i=0;i - * test demonstrating how overrequesting helps finds top-terms in the "long tail" - * of shards that don't have even distributions of terms (something that can be common - * in cases of custom sharding -- even if you don't know that there is a corrolation - * between the property you are sharding on and the property you are faceting on). - *

- * NOTE: This test ignores the control collection (in single node mode, there is no - * need for the overrequesting, all the data is local -- so comparisons with it wouldn't - * be valid in the cases we are testing here) - *

- *

- * NOTE: uses the same indexed documents as {@link DistributedFacetSimpleRefinementLongTailTest} -- - * however the behavior of refine:simple is "simpler" then the refinement logic used by - * facet.pivot so the assertions in this test vary from that test. - *

+ * test demonstrating how overrequesting helps finds top-terms in the "long tail" of shards that + * don't have even distributions of terms (something that can be common in cases of custom sharding + * -- even if you don't know that there is a corrolation between the property you are sharding on + * and the property you are faceting on). + * + *

NOTE: This test ignores the control collection (in single node mode, there is no need + * for the overrequesting, all the data is local -- so comparisons with it wouldn't be valid in the + * cases we are testing here) + * + *

NOTE: uses the same indexed documents as {@link + * DistributedFacetSimpleRefinementLongTailTest} -- however the behavior of refine:simple + * is "simpler" then the refinement logic used by facet.pivot so the assertions + * in this test vary from that test. */ public class DistributedFacetPivotLongTailTest extends BaseDistributedSearchTestCase { - + private String STAT_FIELD = null; // will be randomized single value vs multivalued public DistributedFacetPivotLongTailTest() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); STAT_FIELD = random().nextBoolean() ? "stat_i1" : "stat_i"; } - + @Test @ShardsFixed(num = 3) public void test() throws Exception { @@ -67,24 +64,26 @@ public void test() throws Exception { checkRefinementAndOverrequesting(); doTestDeepPivotStats(); } - + private void sanityCheckIndividualShards() throws Exception { assertEquals("This test assumes exactly 3 shards/clients", 3, clients.size()); - - SolrParams req = params( "q", "*:*", - "distrib", "false", - "facet", "true", - "facet.limit", "10", - "facet.pivot", "foo_s,bar_s"); + + SolrParams req = + params( + "q", "*:*", + "distrib", "false", + "facet", "true", + "facet.limit", "10", + "facet.pivot", "foo_s,bar_s"); // sanity check that our expectations about each shard (non-distrib) are correct PivotField pivot = null; List pivots = null; - + List> shardPivots = new ArrayList<>(clients.size()); for (int i = 0; i < clients.size(); i++) { - shardPivots.add(clients.get(i).query( req ).getFacetPivot().get("foo_s,bar_s")); + shardPivots.add(clients.get(i).query(req).getFacetPivot().get("foo_s,bar_s")); } // top 5 same on all shards @@ -92,7 +91,7 @@ private void sanityCheckIndividualShards() throws Exception { assertEquals(10, shardPivots.get(i).size()); for (int j = 0; j < 5; j++) { pivot = shardPivots.get(i).get(j); - assertEquals(pivot.toString(), "aaa"+j, pivot.getValue()); + assertEquals(pivot.toString(), "aaa" + j, pivot.getValue()); assertEquals(pivot.toString(), 100, pivot.getCount()); } } @@ -131,16 +130,29 @@ private void checkRefinementAndOverrequesting() throws Exception { // if we disable overrequesting, we don't find the long tail List pivots = null; PivotField pivot = null; - pivots = queryServer( params( "q", "*:*", - "shards", getShardsString(), - FacetParams.FACET_OVERREQUEST_COUNT, "0", - FacetParams.FACET_OVERREQUEST_RATIO, "0", - "facet", "true", - "facet.limit", "6", - "facet.pivot", "{!stats=sxy}foo_s,bar_s", - "stats", "true", - "stats.field", "{!tag=sxy}" + STAT_FIELD) - ).getFacetPivot().get("foo_s,bar_s"); + pivots = + queryServer( + params( + "q", + "*:*", + "shards", + getShardsString(), + FacetParams.FACET_OVERREQUEST_COUNT, + "0", + FacetParams.FACET_OVERREQUEST_RATIO, + "0", + "facet", + "true", + "facet.limit", + "6", + "facet.pivot", + "{!stats=sxy}foo_s,bar_s", + "stats", + "true", + "stats.field", + "{!tag=sxy}" + STAT_FIELD)) + .getFacetPivot() + .get("foo_s,bar_s"); assertEquals(6, pivots.size()); for (int i = 0; i < 5; i++) { pivot = pivots.get(i); @@ -164,22 +176,26 @@ private void checkRefinementAndOverrequesting() throws Exception { assertEquals(0.55846323792, bbb0Stats.getStddev(), 0.1E-7); } - - // with default overrequesting, we should find the correct top 6 including + // with default overrequesting, we should find the correct top 6 including // long tail and top sub-pivots // (even if we disable overrequesting on the sub-pivot) - for (ModifiableSolrParams q : new ModifiableSolrParams[] { - params(), - params("f.bar_s.facet.overrequest.ratio","0", - "f.bar_s.facet.overrequest.count","0") }) { - - q.add( params( "q", "*:*", - "shards", getShardsString(), - "facet", "true", - "facet.limit", "6", - "facet.pivot", "foo_s,bar_s" )); - pivots = queryServer( q ).getFacetPivot().get("foo_s,bar_s"); - + for (ModifiableSolrParams q : + new ModifiableSolrParams[] { + params(), + params( + "f.bar_s.facet.overrequest.ratio", "0", + "f.bar_s.facet.overrequest.count", "0") + }) { + + q.add( + params( + "q", "*:*", + "shards", getShardsString(), + "facet", "true", + "facet.limit", "6", + "facet.pivot", "foo_s,bar_s")); + pivots = queryServer(q).getFacetPivot().get("foo_s,bar_s"); + assertEquals(6, pivots.size()); for (int i = 0; i < 5; i++) { pivot = pivots.get(i); @@ -202,16 +218,20 @@ private void checkRefinementAndOverrequesting() throws Exception { } } - // if we lower the facet.limit on the sub-pivot, overrequesting should still ensure + // if we lower the facet.limit on the sub-pivot, overrequesting should still ensure // that we get the correct top5 including "tailB" - pivots = queryServer( params( "q", "*:*", - "shards", getShardsString(), - "facet", "true", - "facet.limit", "6", - "f.bar_s.facet.limit", "5", - "facet.pivot", "foo_s,bar_s" ) - ).getFacetPivot().get("foo_s,bar_s"); + pivots = + queryServer( + params( + "q", "*:*", + "shards", getShardsString(), + "facet", "true", + "facet.limit", "6", + "f.bar_s.facet.limit", "5", + "facet.pivot", "foo_s,bar_s")) + .getFacetPivot() + .get("foo_s,bar_s"); assertEquals(6, pivots.size()); for (int i = 0; i < 5; i++) { pivot = pivots.get(i); @@ -233,18 +253,22 @@ private void checkRefinementAndOverrequesting() throws Exception { assertEquals(pivot.toString(), 14, pivot.getCount()); } - // however with a lower limit and overrequesting disabled, + // however with a lower limit and overrequesting disabled, // we're going to miss out on tailB - pivots = queryServer( params( "q", "*:*", - "shards", getShardsString(), - "facet", "true", - "facet.limit", "6", - "f.bar_s.facet.overrequest.ratio", "0", - "f.bar_s.facet.overrequest.count", "0", - "f.bar_s.facet.limit", "5", - "facet.pivot", "foo_s,bar_s" ) - ).getFacetPivot().get("foo_s,bar_s"); + pivots = + queryServer( + params( + "q", "*:*", + "shards", getShardsString(), + "facet", "true", + "facet.limit", "6", + "f.bar_s.facet.overrequest.ratio", "0", + "f.bar_s.facet.overrequest.count", "0", + "f.bar_s.facet.limit", "5", + "facet.pivot", "foo_s,bar_s")) + .getFacetPivot() + .get("foo_s,bar_s"); assertEquals(6, pivots.size()); for (int i = 0; i < 5; i++) { pivot = pivots.get(i); @@ -262,20 +286,22 @@ private void checkRefinementAndOverrequesting() throws Exception { assertTrue(pivot.toString(), pivot.getValue().toString().startsWith("ccc")); assertEquals(pivot.toString(), 14, pivot.getCount()); } - } private void doTestDeepPivotStats() throws Exception { // Deep checking of some Facet stats - no refinement involved here - List pivots = - query("q", "*:*", - "shards", getShardsString(), - "facet", "true", - "rows" , "0", - "facet.pivot","{!stats=s1}foo_s,bar_s", - "stats", "true", - "stats.field", "{!key=avg_price tag=s1}" + STAT_FIELD).getFacetPivot().get("foo_s,bar_s"); + List pivots = + query( + "q", "*:*", + "shards", getShardsString(), + "facet", "true", + "rows", "0", + "facet.pivot", "{!stats=s1}foo_s,bar_s", + "stats", "true", + "stats.field", "{!key=avg_price tag=s1}" + STAT_FIELD) + .getFacetPivot() + .get("foo_s,bar_s"); PivotField aaa0PivotField = pivots.get(0); assertEquals("aaa0", aaa0PivotField.getValue()); assertEquals(300, aaa0PivotField.getCount()); @@ -321,5 +347,4 @@ private void doTestDeepPivotStats() throws Exception { assertEquals(16910.0, tailBPivotFieldStatsInfo.getSumOfSquares(), 0.1E-7); assertEquals(1.78376517, tailBPivotFieldStatsInfo.getStddev(), 0.1E-7); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java index d579b7aadac..8ae9929b557 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.component; +import java.util.List; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.response.FieldStatsInfo; @@ -26,22 +27,21 @@ import org.apache.solr.common.params.SolrParams; import org.junit.Test; -import java.util.List; - /** * tests some edge cases of pivot faceting with stats * - * NOTE: This test ignores the control collection (in single node mode, there is no - * need for the overrequesting, all the data is local -- so comparisons with it wouldn't - * be valid in some cases we are testing here) + *

NOTE: This test ignores the control collection (in single node mode, there is no need for the + * overrequesting, all the data is local -- so comparisons with it wouldn't be valid in some cases + * we are testing here) */ public class DistributedFacetPivotSmallAdvancedTest extends BaseDistributedSearchTestCase { public DistributedFacetPivotSmallAdvancedTest() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); } - + @Test @ShardsFixed(num = 2) public void test() throws Exception { @@ -51,40 +51,109 @@ public void test() throws Exception { final SolrClient shard1 = clients.get(1); // NOTE: we use the literal (4 character) string "null" as a company name - // to help ensure there isn't any bugs where the literal string is treated as if it + // to help ensure there isn't any bugs where the literal string is treated as if it // were a true NULL value. // shard0 - shard0.add(sdoc(id, 19, "place_t", "cardiff dublin", - "company_t", "microsoft polecat", - "price_ti", "15", "foo_s", "aaa", "foo_i", 10)); - shard0.add(sdoc(id, 20, "place_t", "dublin", - "company_t", "polecat microsoft null", - "price_ti", "19", "foo_s", "bbb", "foo_i", 4)); - shard0.add(sdoc(id, 21, "place_t", "london la dublin", - "company_t", "microsoft fujitsu null polecat", - "price_ti", "29", "foo_s", "bbb", "foo_i", 3)); - shard0.add(sdoc(id, 22, "place_t", "krakow london cardiff", - "company_t", "polecat null bbc", - "price_ti", "39", "foo_s", "bbb", "foo_i", 6)); - shard0.add(sdoc(id, 23, "place_t", "london", - "company_t", "", - "price_ti", "29", "foo_s", "bbb", "foo_i", 9)); + shard0.add( + sdoc( + id, + 19, + "place_t", + "cardiff dublin", + "company_t", + "microsoft polecat", + "price_ti", + "15", + "foo_s", + "aaa", + "foo_i", + 10)); + shard0.add( + sdoc( + id, + 20, + "place_t", + "dublin", + "company_t", + "polecat microsoft null", + "price_ti", + "19", + "foo_s", + "bbb", + "foo_i", + 4)); + shard0.add( + sdoc( + id, + 21, + "place_t", + "london la dublin", + "company_t", + "microsoft fujitsu null polecat", + "price_ti", + "29", + "foo_s", + "bbb", + "foo_i", + 3)); + shard0.add( + sdoc( + id, + 22, + "place_t", + "krakow london cardiff", + "company_t", + "polecat null bbc", + "price_ti", + "39", + "foo_s", + "bbb", + "foo_i", + 6)); + shard0.add( + sdoc( + id, + 23, + "place_t", + "london", + "company_t", + "", + "price_ti", + "29", + "foo_s", + "bbb", + "foo_i", + 9)); // shard1 - shard1.add(sdoc(id, 24, "place_t", "la", - "company_t", "", - "foo_s", "aaa", "foo_i", 21)); - shard1.add(sdoc(id, 25, - "company_t", "microsoft polecat null fujitsu null bbc", - "price_ti", "59", "foo_s", "aaa", "foo_i", 5)); - shard1.add(sdoc(id, 26, "place_t", "krakow", - "company_t", "null", - "foo_s", "aaa", "foo_i", 23)); - shard1.add(sdoc(id, 27, "place_t", "krakow cardiff dublin london la", - "company_t", "null microsoft polecat bbc fujitsu", - "foo_s", "aaa", "foo_i", 91)); - shard1.add(sdoc(id, 28, "place_t", "cork", - "company_t", "fujitsu rte", "foo_s", "aaa", "foo_i", 76)); + shard1.add(sdoc(id, 24, "place_t", "la", "company_t", "", "foo_s", "aaa", "foo_i", 21)); + shard1.add( + sdoc( + id, + 25, + "company_t", + "microsoft polecat null fujitsu null bbc", + "price_ti", + "59", + "foo_s", + "aaa", + "foo_i", + 5)); + shard1.add(sdoc(id, 26, "place_t", "krakow", "company_t", "null", "foo_s", "aaa", "foo_i", 23)); + shard1.add( + sdoc( + id, + 27, + "place_t", + "krakow cardiff dublin london la", + "company_t", + "null microsoft polecat bbc fujitsu", + "foo_s", + "aaa", + "foo_i", + 91)); + shard1.add( + sdoc(id, 28, "place_t", "cork", "company_t", "fujitsu rte", "foo_s", "aaa", "foo_i", 76)); commit(); handle.clear(); @@ -99,30 +168,33 @@ public void test() throws Exception { } /** - * we need to ensure that stats never "overcount" the values from a single shard - * even if we hit that shard with a refinement request + * we need to ensure that stats never "overcount" the values from a single shard even if we hit + * that shard with a refinement request */ private void doTestTopStatsWithRefinement(final boolean allStats) throws Exception { - String stat_param = allStats ? - "{!tag=s1}foo_i" : "{!tag=s1 min=true max=true count=true missing=true}foo_i"; + String stat_param = + allStats ? "{!tag=s1}foo_i" : "{!tag=s1 min=true max=true count=true missing=true}foo_i"; - ModifiableSolrParams coreParams = params("q", "*:*", "rows", "0", - "stats", "true", - "stats.field", stat_param ); + ModifiableSolrParams coreParams = + params("q", "*:*", "rows", "0", "stats", "true", "stats.field", stat_param); ModifiableSolrParams facetParams = new ModifiableSolrParams(coreParams); - facetParams.add(params("facet", "true", - "facet.limit", "1", - "facet.pivot", "{!stats=s1}place_t,company_t")); - + facetParams.add( + params( + "facet", "true", + "facet.limit", "1", + "facet.pivot", "{!stats=s1}place_t,company_t")); + ModifiableSolrParams facetForceRefineParams = new ModifiableSolrParams(facetParams); - facetForceRefineParams.add(params(FacetParams.FACET_OVERREQUEST_COUNT, "0", - FacetParams.FACET_OVERREQUEST_RATIO, "0")); + facetForceRefineParams.add( + params( + FacetParams.FACET_OVERREQUEST_COUNT, "0", + FacetParams.FACET_OVERREQUEST_RATIO, "0")); - for (ModifiableSolrParams params : new ModifiableSolrParams[] { - coreParams, facetParams, facetForceRefineParams }) { + for (ModifiableSolrParams params : + new ModifiableSolrParams[] {coreParams, facetParams, facetForceRefineParams}) { - // for all three sets of these params, the "top level" + // for all three sets of these params, the "top level" // stats in the response of a distributed query should be the same ModifiableSolrParams q = new ModifiableSolrParams(params); q.set("shards", getShardsString()); @@ -150,13 +222,13 @@ private void doTestTopStatsWithRefinement(final boolean allStats) throws Excepti } if (params.getBool("facet", false)) { - // if this was a facet request, then the top pivot constraint and pivot + // if this was a facet request, then the top pivot constraint and pivot // stats should match what we expect - regardless of whether refine // was used, or if the query was initially satisfied by the default overrequest - + List placePivots = rsp.getFacetPivot().get("place_t,company_t"); assertEquals(1, placePivots.size()); - + PivotField dublinPivotField = placePivots.get(0); assertEquals("dublin", dublinPivotField.getValue()); assertEquals(4, dublinPivotField.getCount()); @@ -165,14 +237,15 @@ private void doTestTopStatsWithRefinement(final boolean allStats) throws Excepti PivotField microsoftPivotField = dublinPivotField.getPivot().get(0); assertEquals("microsoft", microsoftPivotField.getValue()); assertEquals(4, microsoftPivotField.getCount()); - - FieldStatsInfo dublinMicrosoftStatsInfo = microsoftPivotField.getFieldStatsInfo().get("foo_i"); + + FieldStatsInfo dublinMicrosoftStatsInfo = + microsoftPivotField.getFieldStatsInfo().get("foo_i"); assertEquals(3.0D, dublinMicrosoftStatsInfo.getMin()); assertEquals(91.0D, dublinMicrosoftStatsInfo.getMax()); assertEquals(4, (long) dublinMicrosoftStatsInfo.getCount()); assertEquals(0, (long) dublinMicrosoftStatsInfo.getMissing()); - - if (! allStats) { + + if (!allStats) { assertNull(msg, dublinMicrosoftStatsInfo.getSum()); assertNull(msg, dublinMicrosoftStatsInfo.getSumOfSquares()); assertNull(msg, dublinMicrosoftStatsInfo.getMean()); @@ -181,24 +254,50 @@ private void doTestTopStatsWithRefinement(final boolean allStats) throws Excepti } } - // sanity check that the top pivot from each shard is diff, to prove to + // sanity check that the top pivot from each shard is diff, to prove to // ourselves that the above queries really must have involved refinement. - Object s0pivValue = clients.get(0) - .query(facetParams).getFacetPivot().get("place_t,company_t").get(0).getValue(); - Object s1pivValue = clients.get(1) - .query(facetParams).getFacetPivot().get("place_t,company_t").get(0).getValue(); - assertFalse("both shards have same top constraint, test is invalid" + - "(did someone change the test data?) ==> " + - s0pivValue + "==" + s1pivValue, s0pivValue.equals(s1pivValue)); - + Object s0pivValue = + clients + .get(0) + .query(facetParams) + .getFacetPivot() + .get("place_t,company_t") + .get(0) + .getValue(); + Object s1pivValue = + clients + .get(1) + .query(facetParams) + .getFacetPivot() + .get("place_t,company_t") + .get(0) + .getValue(); + assertFalse( + "both shards have same top constraint, test is invalid" + + "(did someone change the test data?) ==> " + + s0pivValue + + "==" + + s1pivValue, + s0pivValue.equals(s1pivValue)); } private void doTestDeepPivotStatsOnString() throws Exception { - SolrParams params = params("q", "*:*", "rows", "0", - "shards", getShardsString(), - "facet", "true", "stats", "true", - "facet.pivot", "{!stats=s1}place_t,company_t", - "stats.field", "{!key=avg_price tag=s1}foo_s"); + SolrParams params = + params( + "q", + "*:*", + "rows", + "0", + "shards", + getShardsString(), + "facet", + "true", + "stats", + "true", + "facet.pivot", + "{!stats=s1}place_t,company_t", + "stats.field", + "{!key=avg_price tag=s1}foo_s"); QueryResponse rsp = queryServer(new ModifiableSolrParams(params)); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -211,7 +310,8 @@ private void doTestDeepPivotStatsOnString() throws Exception { assertEquals("microsoft", microsoftPivotField.getValue()); assertEquals(4, microsoftPivotField.getCount()); - FieldStatsInfo dublinMicrosoftStatsInfo = microsoftPivotField.getFieldStatsInfo().get("avg_price"); + FieldStatsInfo dublinMicrosoftStatsInfo = + microsoftPivotField.getFieldStatsInfo().get("avg_price"); assertEquals("aaa", dublinMicrosoftStatsInfo.getMin()); assertEquals("bbb", dublinMicrosoftStatsInfo.getMax()); assertEquals(4, (long) dublinMicrosoftStatsInfo.getCount()); @@ -245,5 +345,4 @@ private void doTestDeepPivotStatsOnString() throws Exception { assertEquals(1, (long) krakowFujitsuStatsInfo.getCount()); assertEquals(0, (long) krakowFujitsuStatsInfo.getMissing()); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java index 54609a80739..27ce80df14c 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java @@ -23,7 +23,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - import junit.framework.AssertionFailedError; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.response.FieldStatsInfo; @@ -39,51 +38,82 @@ public class DistributedFacetPivotSmallTest extends BaseDistributedSearchTestCas public DistributedFacetPivotSmallTest() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); } - + @Test @ShardsFixed(num = 4) public void test() throws Exception { - + del("*:*"); // NOTE: we use the literal (4 character) string "null" as a company name - // to help ensure there isn't any bugs where the literal string is treated as if it + // to help ensure there isn't any bugs where the literal string is treated as if it // were a true NULL value. index(id, 19, "place_t", "cardiff dublin", "company_t", "microsoft polecat", "price_ti", "15"); - index(id, 20, "place_t", "dublin", "company_t", "polecat microsoft null", "price_ti", "19", - // this is the only doc to have solo_* fields, therefore only 1 shard has them - // TODO: add enum field - blocked by SOLR-6682 - "solo_i", 42, "solo_s", "lonely", "solo_dt", "1976-03-06T01:23:45Z"); - index(id, 21, "place_t", "london la dublin", "company_t", - "microsoft fujitsu null polecat", "price_ti", "29"); - index(id, 22, "place_t", "krakow london cardiff", "company_t", - "polecat null bbc", "price_ti", "39"); + index( + id, + 20, + "place_t", + "dublin", + "company_t", + "polecat microsoft null", + "price_ti", + "19", + // this is the only doc to have solo_* fields, therefore only 1 shard has them + // TODO: add enum field - blocked by SOLR-6682 + "solo_i", + 42, + "solo_s", + "lonely", + "solo_dt", + "1976-03-06T01:23:45Z"); + index( + id, + 21, + "place_t", + "london la dublin", + "company_t", + "microsoft fujitsu null polecat", + "price_ti", + "29"); + index( + id, + 22, + "place_t", + "krakow london cardiff", + "company_t", + "polecat null bbc", + "price_ti", + "39"); index(id, 23, "place_t", "london", "company_t", "", "price_ti", "29"); index(id, 24, "place_t", "la", "company_t", ""); index(id, 25, "company_t", "microsoft polecat null fujitsu null bbc", "price_ti", "59"); index(id, 26, "place_t", "krakow", "company_t", "null"); - index(id, 27, "place_t", "krakow cardiff dublin london la", - "company_t", "null microsoft polecat bbc fujitsu"); + index( + id, + 27, + "place_t", + "krakow cardiff dublin london la", + "company_t", + "null microsoft polecat bbc fujitsu"); index(id, 28, "place_t", "cork", "company_t", "fujitsu rte"); commit(); - + handle.clear(); handle.put("QTime", SKIPVAL); handle.put("timestamp", SKIPVAL); handle.put("maxScore", SKIPVAL); - - + final ModifiableSolrParams params = new ModifiableSolrParams(); setDistributedParams(params); params.add("q", "*:*"); params.add("facet", "true"); params.add("facet.pivot", "place_t,company_t"); - - + QueryResponse rsp = queryServer(params); - + List expectedPlacePivots = new UnorderedEqualityArrayList(); List expectedCardiffPivots = new UnorderedEqualityArrayList(); expectedCardiffPivots.add(new ComparablePivotField("company_t", "microsoft", 2, null)); @@ -101,169 +131,177 @@ public void test() throws Exception { expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", 3, null)); expectedLondonPivots.add(new ComparablePivotField("company_t", "microsoft", 2, null)); expectedLondonPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3,null)); + expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, null)); expectedLondonPivots.add(new ComparablePivotField("company_t", "bbc", 2, null)); List expectedLAPivots = new UnorderedEqualityArrayList(); - expectedLAPivots.add(new ComparablePivotField("company_t", "microsoft", 2,null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2,null)); + expectedLAPivots.add(new ComparablePivotField("company_t", "microsoft", 2, null)); + expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, null)); expectedLAPivots.add(new ComparablePivotField("company_t", "null", 2, null)); expectedLAPivots.add(new ComparablePivotField("company_t", "bbc", 1, null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "polecat", 2,null)); + expectedLAPivots.add(new ComparablePivotField("company_t", "polecat", 2, null)); List expectedKrakowPivots = new UnorderedEqualityArrayList(); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "polecat",2, null)); + expectedKrakowPivots.add(new ComparablePivotField("company_t", "polecat", 2, null)); expectedKrakowPivots.add(new ComparablePivotField("company_t", "bbc", 2, null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3,null)); + expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3, null)); expectedKrakowPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, null)); expectedKrakowPivots.add(new ComparablePivotField("company_t", "microsoft", 1, null)); List expectedCorkPivots = new UnorderedEqualityArrayList(); expectedCorkPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, null)); expectedCorkPivots.add(new ComparablePivotField("company_t", "rte", 1, null)); expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, expectedDublinPivots)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "cardiff", 3, expectedCardiffPivots)); + expectedPlacePivots.add( + new ComparablePivotField("place_t", "cardiff", 3, expectedCardiffPivots)); expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, expectedLondonPivots)); expectedPlacePivots.add(new ComparablePivotField("place_t", "la", 3, expectedLAPivots)); expectedPlacePivots.add(new ComparablePivotField("place_t", "krakow", 3, expectedKrakowPivots)); expectedPlacePivots.add(new ComparablePivotField("place_t", "cork", 1, expectedCorkPivots)); - - + List placePivots = rsp.getFacetPivot().get("place_t,company_t"); - + // Useful to check for errors, orders lists and does toString() equality // check testOrderedPivotsStringEquality(expectedPlacePivots, placePivots); - + assertEquals(expectedPlacePivots, placePivots); - + // Test sorting by count - + params.set(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT); - + rsp = queryServer(params); - + placePivots = rsp.getFacetPivot().get("place_t,company_t"); - + testCountSorting(placePivots); - + // Test limit - + params.set(FacetParams.FACET_LIMIT, 2); - + rsp = queryServer(params); - + expectedPlacePivots = new UnorderedEqualityArrayList(); expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null)); + expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", 4, null)); + expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", 4, null)); expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", 3, - null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, - expectedLondonPivots)); - + expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, null)); + expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", 3, null)); + expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, expectedDublinPivots)); + expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, expectedLondonPivots)); + placePivots = rsp.getFacetPivot().get("place_t,company_t"); - + assertEquals(expectedPlacePivots, placePivots); - + // Test individual facet.limit values params.remove(FacetParams.FACET_LIMIT); - + params.set("f.place_t." + FacetParams.FACET_LIMIT, 1); params.set("f.company_t." + FacetParams.FACET_LIMIT, 4); - + rsp = queryServer(params); - + expectedPlacePivots = new UnorderedEqualityArrayList(); - + expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft",4, null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat",4, null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "null",3, null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "fujitsu",2, null)); - + expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", 4, null)); + expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", 4, null)); + expectedDublinPivots.add(new ComparablePivotField("company_t", "null", 3, null)); + expectedDublinPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, null)); + expectedLondonPivots = new UnorderedEqualityArrayList(); expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, null)); expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", 3, null)); expectedLondonPivots.add(new ComparablePivotField("company_t", "bbc", 2, null)); expectedLondonPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, null)); - + expectedCardiffPivots = new UnorderedEqualityArrayList(); expectedCardiffPivots.add(new ComparablePivotField("company_t", "polecat", 3, null)); - + expectedKrakowPivots = new UnorderedEqualityArrayList(); expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3, null)); - + expectedLAPivots = new UnorderedEqualityArrayList(); expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, null)); - + expectedCorkPivots = new UnorderedEqualityArrayList(); expectedCorkPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, null)); - + expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, expectedDublinPivots)); - - placePivots = rsp.getFacetPivot().get("place_t,company_t"); + + placePivots = rsp.getFacetPivot().get("place_t,company_t"); assertEquals(expectedPlacePivots, placePivots); - + params.remove("f.company_t." + FacetParams.FACET_LIMIT); params.remove("f.place_t." + FacetParams.FACET_LIMIT); params.set(FacetParams.FACET_LIMIT, 2); - + // Test facet.missing=true with diff sorts - index("id",777); // NOTE: id=25 has no place as well + index("id", 777); // NOTE: id=25 has no place as well commit(); - SolrParams missingA = params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","place_t,company_t", - "f.place_t.facet.mincount", "2", - // default facet.sort - FacetParams.FACET_MISSING, "true" ); - SolrParams missingB = SolrParams.wrapDefaults(missingA, - params(FacetParams.FACET_LIMIT, "4", - "facet.sort", "index")); - for (SolrParams p : new SolrParams[] { missingA, missingB }) { + SolrParams missingA = + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_t,company_t", + "f.place_t.facet.mincount", + "2", + // default facet.sort + FacetParams.FACET_MISSING, + "true"); + SolrParams missingB = + SolrParams.wrapDefaults( + missingA, params(FacetParams.FACET_LIMIT, "4", "facet.sort", "index")); + for (SolrParams p : new SolrParams[] {missingA, missingB}) { // in either case, the last pivot option should be the same - rsp = query( p ); + rsp = query(p); placePivots = rsp.getFacetPivot().get("place_t,company_t"); - assertTrue("not enough values for pivot: " + p + " => " + placePivots, - 1 < placePivots.size()); - PivotField missing = placePivots.get(placePivots.size()-1); + assertTrue( + "not enough values for pivot: " + p + " => " + placePivots, 1 < placePivots.size()); + PivotField missing = placePivots.get(placePivots.size() - 1); assertNull("not the missing place value: " + p, missing.getValue()); assertEquals("wrong missing place count: " + p, 2, missing.getCount()); - assertTrue("not enough sub-pivots for missing place: "+ p +" => " + missing.getPivot(), - 1 < missing.getPivot().size()); - missing = missing.getPivot().get(missing.getPivot().size()-1); + assertTrue( + "not enough sub-pivots for missing place: " + p + " => " + missing.getPivot(), + 1 < missing.getPivot().size()); + missing = missing.getPivot().get(missing.getPivot().size() - 1); assertNull("not the missing company value: " + p, missing.getValue()); assertEquals("wrong missing company count: " + p, 1, missing.getCount()); assertNull("company shouldn't have sub-pivots: " + p, missing.getPivot()); } // sort=index + mincount + limit - for (SolrParams variableParams : new SolrParams[] { - // we should get the same results regardless of overrequest - params("facet.overrequest.count","0", - "facet.overrequest.ratio","0"), - params() }) { - - - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","company_t", - "facet.sort", "index", - "facet.pivot.mincount", "4", - "facet.limit", "4"), - variableParams ); + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params( + "facet.overrequest.count", "0", + "facet.overrequest.ratio", "0"), + params() + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.pivot", "company_t", + "facet.sort", "index", + "facet.pivot.mincount", "4", + "facet.limit", "4"), + variableParams); try { - List pivots = query( p ).getFacetPivot().get("company_t"); + List pivots = query(p).getFacetPivot().get("company_t"); assertEquals(4, pivots.size()); assertEquals("fujitsu", pivots.get(0).getValue()); assertEquals(4, pivots.get(0).getCount()); @@ -273,30 +311,36 @@ public void test() throws Exception { assertEquals(6, pivots.get(2).getCount()); assertEquals("polecat", pivots.get(3).getValue()); assertEquals(6, pivots.get(3).getCount()); - + } catch (AssertionFailedError ae) { throw new AssertionError(ae.getMessage() + " <== " + p.toString(), ae); } } // sort=index + mincount + limit + offset - for (SolrParams variableParams : new SolrParams[] { - // we should get the same results regardless of overrequest - params("facet.overrequest.count","0", - "facet.overrequest.ratio","0"), - params() }) { - - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.pivot","company_t", - "facet.sort", "index", - "facet.pivot.mincount", "4", - "facet.offset", "1", - "facet.limit", "4"), - variableParams ); + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params( + "facet.overrequest.count", "0", + "facet.overrequest.ratio", "0"), + params() + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.pivot", "company_t", + "facet.sort", "index", + "facet.pivot.mincount", "4", + "facet.offset", "1", + "facet.limit", "4"), + variableParams); try { - List pivots = query( p ).getFacetPivot().get("company_t"); + List pivots = query(p).getFacetPivot().get("company_t"); assertEquals(3, pivots.size()); // asked for 4, but not enough meet the mincount assertEquals("microsoft", pivots.get(0).getValue()); assertEquals(5, pivots.get(0).getCount()); @@ -308,30 +352,37 @@ public void test() throws Exception { } catch (AssertionFailedError ae) { throw new AssertionError(ae.getMessage() + " <== " + p.toString(), ae); } - } - + // sort=index + mincount + limit + offset (more permutations) - for (SolrParams variableParams : new SolrParams[] { - // all of these combinations should result in the same first value - params("facet.pivot.mincount", "4", - "facet.offset", "2"), - params("facet.pivot.mincount", "5", - "facet.offset", "1"), - params("facet.pivot.mincount", "6", - "facet.offset", "0" ) }) { - - SolrParams p = SolrParams.wrapDefaults( params( "q", "*:*", - "rows", "0", - "facet","true", - "facet.limit","1", - "facet.sort","index", - "facet.overrequest.ratio","0", - "facet.pivot", "company_t"), - variableParams ); + for (SolrParams variableParams : + new SolrParams[] { + // all of these combinations should result in the same first value + params( + "facet.pivot.mincount", "4", + "facet.offset", "2"), + params( + "facet.pivot.mincount", "5", + "facet.offset", "1"), + params( + "facet.pivot.mincount", "6", + "facet.offset", "0") + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.limit", "1", + "facet.sort", "index", + "facet.overrequest.ratio", "0", + "facet.pivot", "company_t"), + variableParams); try { - List pivots = query( p ).getFacetPivot().get("company_t"); + List pivots = query(p).getFacetPivot().get("company_t"); assertEquals(1, pivots.size()); assertEquals(pivots.toString(), "null", pivots.get(0).getValue()); assertEquals(pivots.toString(), 6, pivots.get(0).getCount()); @@ -361,11 +412,20 @@ public void test() throws Exception { * @param justMean - only the mean stat is requested/computed */ private void doTestDeepPivotStats(boolean justMean) throws Exception { - SolrParams params = params("q", "*:*", "rows", "0", - "facet", "true", "stats", "true", - "facet.pivot", "{!stats=s1}place_t,company_t", - "stats.field", ("{!key=avg_price tag=s1 "+ - (justMean ? "mean=true" : "") +"}price_ti")); + SolrParams params = + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "stats", + "true", + "facet.pivot", + "{!stats=s1}place_t,company_t", + "stats.field", + ("{!key=avg_price tag=s1 " + (justMean ? "mean=true" : "") + "}price_ti")); QueryResponse rsp = query(params); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -378,7 +438,8 @@ private void doTestDeepPivotStats(boolean justMean) throws Exception { assertEquals("microsoft", microsoftPivotField.getValue()); assertEquals(4, microsoftPivotField.getCount()); - FieldStatsInfo dublinMicrosoftStatsInfo = microsoftPivotField.getFieldStatsInfo().get("avg_price"); + FieldStatsInfo dublinMicrosoftStatsInfo = + microsoftPivotField.getFieldStatsInfo().get("avg_price"); assertEquals(21.0, (double) dublinMicrosoftStatsInfo.getMean(), 0.1E-7); if (justMean) { assertNull(dublinMicrosoftStatsInfo.getMin()); @@ -444,7 +505,7 @@ private void doTestDeepPivotStats(boolean justMean) throws Exception { assertNull(krakowFujitsuStatsInfo.getSum()); assertNull(krakowFujitsuStatsInfo.getSumOfSquares()); assertNull(krakowFujitsuStatsInfo.getStddev()); - } else { + } else { assertEquals(null, krakowFujitsuStatsInfo.getMin()); assertEquals(null, krakowFujitsuStatsInfo.getMax()); assertEquals(0, (long) krakowFujitsuStatsInfo.getCount()); @@ -471,109 +532,276 @@ private void testFacetPivotRange() throws Exception { List expectedPlacePivots = new UnorderedEqualityArrayList(); List expectedCardiffPivots = new UnorderedEqualityArrayList(); - expectedCardiffPivots.add(new ComparablePivotField("company_t", - "microsoft", 2, null, null, createExpectedRange("price", 0, 100, - 20, 1, 0, 0, 0, 0))); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "null", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, null, createExpectedRange("price", 0, 100, 20, 1, 1, 0, - 0, 0))); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "fujitsu", - 1, null, null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, - 0, 0))); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 1, 0, 0, 0, 0))); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "null", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 1, 1, 0, 0, 0))); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); List expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, null, createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, null, createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "null", 3, - null, null, createExpectedRange("price", 0, 100, 20, 1, 1, 0, 0, - 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, - 0, 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "bbc", 1, - null, null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, - 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 1, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); List expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, null, createExpectedRange("price", 0, 100, 20, 0, 2, 0, - 0, 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "microsoft", - 2, null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, - 0, 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, - 0, 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, null, createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, - 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); List expectedLAPivots = new UnorderedEqualityArrayList(); - expectedLAPivots.add(new ComparablePivotField("company_t", "microsoft", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); - expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); - expectedLAPivots.add(new ComparablePivotField("company_t", "null", 2, null, - null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); - expectedLAPivots.add(new ComparablePivotField("company_t", "bbc", 1, null, - null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); - expectedLAPivots.add(new ComparablePivotField("company_t", "polecat", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "null", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); List expectedKrakowPivots = new UnorderedEqualityArrayList(); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "polecat", - 2, null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, - 0, 0))); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "fujitsu", - 1, null, null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, - 0, 0))); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "microsoft", - 1, null, null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, - 0, 0))); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); List expectedCorkPivots = new UnorderedEqualityArrayList(); - expectedCorkPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, - null, null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, - 0))); - expectedCorkPivots.add(new ComparablePivotField("company_t", "rte", 1, - null, null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, - 0))); - - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, null, createExpectedRange("price", 0, 100, - 20, 2, 1, 0, 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "cardiff", 3, - expectedCardiffPivots, null, createExpectedRange("price", 0, 100, - 20, 1, 1, 0, 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, - expectedLondonPivots, null, createExpectedRange("price", 0, 100, - 20, 0, 3, 0, 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "la", 3, - expectedLAPivots, null, createExpectedRange("price", 0, 100, 20, - 0, 1, 0, 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "krakow", 3, - expectedKrakowPivots, null, createExpectedRange("price", 0, 100, - 20, 0, 1, 0, 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "cork", 1, - expectedCorkPivots, null, createExpectedRange("price", 0, 100, - 20, 0, 0, 0, 0, 0))); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "rte", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); + + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "cardiff", + 3, + expectedCardiffPivots, + null, + createExpectedRange("price", 0, 100, 20, 1, 1, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "london", + 4, + expectedLondonPivots, + null, + createExpectedRange("price", 0, 100, 20, 0, 3, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "la", + 3, + expectedLAPivots, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "krakow", + 3, + expectedKrakowPivots, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "cork", + 1, + expectedCorkPivots, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -601,25 +829,55 @@ expectedCorkPivots, null, createExpectedRange("price", 0, 100, expectedPlacePivots = new UnorderedEqualityArrayList(); expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, null, createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, null, createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, null, createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, - 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, null, createExpectedRange("price", 0, 100, 20, 0, 2, 0, - 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, null, createExpectedRange("price", 0, 100, - 20, 2, 1, 0, 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, - expectedLondonPivots, null, createExpectedRange("price", 0, 100, - 20, 0, 3, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "london", + 4, + expectedLondonPivots, + null, + createExpectedRange("price", 0, 100, 20, 0, 3, 0, 0, 0))); placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -636,56 +894,121 @@ expectedLondonPivots, null, createExpectedRange("price", 0, 100, expectedPlacePivots = new UnorderedEqualityArrayList(); expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, null, createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, null, createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "null", 3, - null, null, createExpectedRange("price", 0, 100, 20, 1, 1, 0, 0, - 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, - 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 1, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, null, createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, - 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, null, createExpectedRange("price", 0, 100, 20, 0, 2, 0, - 0, 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, - 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); expectedCardiffPivots = new UnorderedEqualityArrayList(); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, null, createExpectedRange("price", 0, 100, 20, 1, 1, 0, - 0, 0))); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 1, 1, 0, 0, 0))); expectedKrakowPivots = new UnorderedEqualityArrayList(); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); expectedLAPivots = new UnorderedEqualityArrayList(); - expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, - null, null, createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, - 0))); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 1, 0, 0, 0))); expectedCorkPivots = new UnorderedEqualityArrayList(); - expectedCorkPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, - null, null, createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, - 0))); - - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, null, createExpectedRange("price", 0, 100, - 20, 2, 1, 0, 0, 0))); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + null, + createExpectedRange("price", 0, 100, 20, 0, 0, 0, 0, 0))); + + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + null, + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); placePivots = rsp.getFacetPivot().get("place_t,company_t"); assertEquals(expectedPlacePivots, placePivots); @@ -699,23 +1022,34 @@ expectedDublinPivots, null, createExpectedRange("price", 0, 100, index("id", 777); // NOTE: id=25 has no place as well commit(); - SolrParams missingA = params("q", "*:*", "rows", "0", "facet", "true", - "facet.pivot", "place_t,company_t", - // default facet.sort - FacetParams.FACET_MISSING, "true"); - SolrParams missingB = SolrParams.wrapDefaults(missingA, - params(FacetParams.FACET_LIMIT, "4", "facet.sort", "index")); - for (SolrParams p : new SolrParams[]{missingA, missingB}) { + SolrParams missingA = + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_t,company_t", + // default facet.sort + FacetParams.FACET_MISSING, + "true"); + SolrParams missingB = + SolrParams.wrapDefaults( + missingA, params(FacetParams.FACET_LIMIT, "4", "facet.sort", "index")); + for (SolrParams p : new SolrParams[] {missingA, missingB}) { // in either case, the last pivot option should be the same rsp = query(p); placePivots = rsp.getFacetPivot().get("place_t,company_t"); - assertTrue("not enough values for pivot: " + p + " => " + placePivots, - 1 < placePivots.size()); + assertTrue( + "not enough values for pivot: " + p + " => " + placePivots, 1 < placePivots.size()); PivotField missing = placePivots.get(placePivots.size() - 1); assertNull("not the missing place value: " + p, missing.getValue()); assertEquals("wrong missing place count: " + p, 2, missing.getCount()); - assertTrue("not enough sub-pivots for missing place: " + p + " => " - + missing.getPivot(), 1 < missing.getPivot().size()); + assertTrue( + "not enough sub-pivots for missing place: " + p + " => " + missing.getPivot(), + 1 < missing.getPivot().size()); missing = missing.getPivot().get(missing.getPivot().size() - 1); assertNull("not the missing company value: " + p, missing.getValue()); assertEquals("wrong missing company count: " + p, 1, missing.getCount()); @@ -723,15 +1057,30 @@ expectedDublinPivots, null, createExpectedRange("price", 0, 100, } // sort=index + mincount + limit - for (SolrParams variableParams : new SolrParams[]{ - // we should get the same results regardless of overrequest - params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), - params()}) { - - SolrParams p = SolrParams.wrapDefaults( - params("q", "*:*", "rows", "0", "facet", "true", "facet.pivot", - "company_t", "facet.sort", "index", "facet.pivot.mincount", "4", - "facet.limit", "4"), variableParams); + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), params() + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "company_t", + "facet.sort", + "index", + "facet.pivot.mincount", + "4", + "facet.limit", + "4"), + variableParams); try { List pivots = query(p).getFacetPivot().get("company_t"); @@ -751,15 +1100,32 @@ expectedDublinPivots, null, createExpectedRange("price", 0, 100, } // sort=index + mincount + limit + offset - for (SolrParams variableParams : new SolrParams[]{ - // we should get the same results regardless of overrequest - params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), - params()}) { - - SolrParams p = SolrParams.wrapDefaults( - params("q", "*:*", "rows", "0", "facet", "true", "facet.pivot", - "company_t", "facet.sort", "index", "facet.pivot.mincount", "4", - "facet.offset", "1", "facet.limit", "4"), variableParams); + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), params() + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "company_t", + "facet.sort", + "index", + "facet.pivot.mincount", + "4", + "facet.offset", + "1", + "facet.limit", + "4"), + variableParams); try { List pivots = query(p).getFacetPivot().get("company_t"); assertEquals(3, pivots.size()); // asked for 4, but not enough meet the @@ -774,20 +1140,35 @@ expectedDublinPivots, null, createExpectedRange("price", 0, 100, } catch (AssertionFailedError ae) { throw new AssertionError(ae.getMessage() + " <== " + p.toString(), ae); } - } // sort=index + mincount + limit + offset (more permutations) - for (SolrParams variableParams : new SolrParams[]{ - // all of these combinations should result in the same first value - params("facet.pivot.mincount", "4", "facet.offset", "2"), - params("facet.pivot.mincount", "5", "facet.offset", "1"), - params("facet.pivot.mincount", "6", "facet.offset", "0")}) { - - SolrParams p = SolrParams.wrapDefaults( - params("q", "*:*", "rows", "0", "facet", "true", "facet.limit", "1", - "facet.sort", "index", "facet.overrequest.ratio", "0", - "facet.pivot", "company_t"), variableParams); + for (SolrParams variableParams : + new SolrParams[] { + // all of these combinations should result in the same first value + params("facet.pivot.mincount", "4", "facet.offset", "2"), + params("facet.pivot.mincount", "5", "facet.offset", "1"), + params("facet.pivot.mincount", "6", "facet.offset", "0") + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.limit", + "1", + "facet.sort", + "index", + "facet.overrequest.ratio", + "0", + "facet.pivot", + "company_t"), + variableParams); try { List pivots = query(p).getFacetPivot().get("company_t"); @@ -813,111 +1194,276 @@ private void testFacetPivotQuery() throws Exception { List expectedPlacePivots = new UnorderedEqualityArrayList(); List expectedCardiffPivots = new UnorderedEqualityArrayList(); - expectedCardiffPivots.add(new ComparablePivotField("company_t", - "microsoft", 2, null, createExpectedQCount( - new String[]{"highPrice"}, new int[]{0}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "null", 2, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "fujitsu", - 1, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{0}), null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "null", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); List expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "bbc", 1, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{0}), null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 1, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); List expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{2}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "microsoft", - 2, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{2}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {2}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {2}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); List expectedLAPivots = new UnorderedEqualityArrayList(); - expectedLAPivots.add(new ComparablePivotField("company_t", "microsoft", 2, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "null", 2, null, - createExpectedQCount(new String[]{"highPrice"}, new int[]{1}), - null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "bbc", 1, null, - createExpectedQCount(new String[]{"highPrice"}, new int[]{0}), - null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "polecat", 2, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "null", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 1, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); List expectedKrakowPivots = new UnorderedEqualityArrayList(); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "polecat", - 2, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{1}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "fujitsu", - 1, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{0}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "microsoft", - 1, null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{0}), null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 1, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); List expectedCorkPivots = new UnorderedEqualityArrayList(); - expectedCorkPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{0}), null)); - expectedCorkPivots.add(new ComparablePivotField("company_t", "rte", 1, - null, createExpectedQCount(new String[]{"highPrice"}, - new int[]{0}), null)); - - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, createExpectedQCount( - new String[]{"highPrice"}, new int[]{1}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "cardiff", 3, - expectedCardiffPivots, createExpectedQCount( - new String[]{"highPrice"}, new int[]{1}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, - expectedLondonPivots, createExpectedQCount( - new String[]{"highPrice"}, new int[]{3}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "la", 3, - expectedLAPivots, createExpectedQCount( - new String[]{"highPrice"}, new int[]{1}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "krakow", 3, - expectedKrakowPivots, createExpectedQCount( - new String[]{"highPrice"}, new int[]{1}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "cork", 1, - expectedCorkPivots, createExpectedQCount( - new String[]{"highPrice"}, new int[]{0}), null)); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "rte", + 1, + null, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); + + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "cardiff", + 3, + expectedCardiffPivots, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "london", + 4, + expectedLondonPivots, + createExpectedQCount(new String[] {"highPrice"}, new int[] {3}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "la", + 3, + expectedLAPivots, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "krakow", + 3, + expectedKrakowPivots, + createExpectedQCount(new String[] {"highPrice"}, new int[] {1}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "cork", + 1, + expectedCorkPivots, + createExpectedQCount(new String[] {"highPrice"}, new int[] {0}), + null)); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -931,111 +1477,276 @@ expectedCorkPivots, createExpectedQCount( params.add("facet.query", "{!tag=s1 key=lowPrice}price_ti:[0 TO 20]"); expectedPlacePivots = new UnorderedEqualityArrayList(); expectedCardiffPivots = new UnorderedEqualityArrayList(); - expectedCardiffPivots.add(new ComparablePivotField("company_t", - "microsoft", 2, null, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{0, 1}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "null", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 1}), null)); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "fujitsu", - 1, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{0, 0}), null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 1}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "null", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 1}), + null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 1}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 0}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "bbc", 1, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{0, 0}), null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 1}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{2, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "microsoft", - 2, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{2, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); expectedLAPivots = new UnorderedEqualityArrayList(); - expectedLAPivots.add(new ComparablePivotField("company_t", "microsoft", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "null", 2, null, - createExpectedQCount(new String[]{"highPrice", "lowPrice"}, - new int[]{1, 0}), null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "bbc", 1, null, - createExpectedQCount(new String[]{"highPrice", "lowPrice"}, - new int[]{0, 0}), null)); - expectedLAPivots.add(new ComparablePivotField("company_t", "polecat", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "null", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); expectedKrakowPivots = new UnorderedEqualityArrayList(); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "polecat", - 2, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 0}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "fujitsu", - 1, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{0, 0}), null)); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "microsoft", - 1, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{0, 0}), null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); expectedCorkPivots = new UnorderedEqualityArrayList(); - expectedCorkPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{0, 0}), null)); - expectedCorkPivots.add(new ComparablePivotField("company_t", "rte", 1, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{0, 0}), null)); - - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{1, 2}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "cardiff", 3, - expectedCardiffPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{1, 1}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, - expectedLondonPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{3, 0}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "la", 3, - expectedLAPivots, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 0}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "krakow", 3, - expectedKrakowPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "cork", 1, - expectedCorkPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{0, 0}), null)); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "rte", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); + + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "cardiff", + 3, + expectedCardiffPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 1}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "london", + 4, + expectedLondonPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {3, 0}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "la", + 3, + expectedLAPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "krakow", + 3, + expectedKrakowPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "cork", + 1, + expectedCorkPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); rsp = queryServer(params); @@ -1064,25 +1775,55 @@ expectedCorkPivots, createExpectedQCount(new String[]{ expectedPlacePivots = new UnorderedEqualityArrayList(); expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{2, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{2, 0}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{1, 2}), null)); - expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, - expectedLondonPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{3, 0}), null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "london", + 4, + expectedLondonPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {3, 0}), + null)); placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -1099,56 +1840,121 @@ expectedLondonPivots, createExpectedQCount(new String[]{ expectedPlacePivots = new UnorderedEqualityArrayList(); expectedDublinPivots = new UnorderedEqualityArrayList(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 1}), null)); - expectedDublinPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 0}), null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 1}), + null)); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); expectedLondonPivots = new UnorderedEqualityArrayList(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{2, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{2, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "bbc", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); - expectedLondonPivots.add(new ComparablePivotField("company_t", "fujitsu", - 2, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 0}), null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "bbc", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); expectedCardiffPivots = new UnorderedEqualityArrayList(); - expectedCardiffPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 1}), null)); + expectedCardiffPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 1}), + null)); expectedKrakowPivots = new UnorderedEqualityArrayList(); - expectedKrakowPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); + expectedKrakowPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); expectedLAPivots = new UnorderedEqualityArrayList(); - expectedLAPivots.add(new ComparablePivotField("company_t", "fujitsu", 2, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{1, 0}), null)); + expectedLAPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 2, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 0}), + null)); expectedCorkPivots = new UnorderedEqualityArrayList(); - expectedCorkPivots.add(new ComparablePivotField("company_t", "fujitsu", 1, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{0, 0}), null)); - - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{1, 2}), null)); + expectedCorkPivots.add( + new ComparablePivotField( + "company_t", + "fujitsu", + 1, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {0, 0}), + null)); + + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + null)); placePivots = rsp.getFacetPivot().get("place_t,company_t"); assertEquals(expectedPlacePivots, placePivots); @@ -1162,23 +1968,34 @@ expectedDublinPivots, createExpectedQCount(new String[]{ index("id", 777); // NOTE: id=25 has no place as well commit(); - SolrParams missingA = params("q", "*:*", "rows", "0", "facet", "true", - "facet.pivot", "place_t,company_t", - // default facet.sort - FacetParams.FACET_MISSING, "true"); - SolrParams missingB = SolrParams.wrapDefaults(missingA, - params(FacetParams.FACET_LIMIT, "4", "facet.sort", "index")); - for (SolrParams p : new SolrParams[]{missingA, missingB}) { + SolrParams missingA = + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_t,company_t", + // default facet.sort + FacetParams.FACET_MISSING, + "true"); + SolrParams missingB = + SolrParams.wrapDefaults( + missingA, params(FacetParams.FACET_LIMIT, "4", "facet.sort", "index")); + for (SolrParams p : new SolrParams[] {missingA, missingB}) { // in either case, the last pivot option should be the same rsp = query(p); placePivots = rsp.getFacetPivot().get("place_t,company_t"); - assertTrue("not enough values for pivot: " + p + " => " + placePivots, - 1 < placePivots.size()); + assertTrue( + "not enough values for pivot: " + p + " => " + placePivots, 1 < placePivots.size()); PivotField missing = placePivots.get(placePivots.size() - 1); assertNull("not the missing place value: " + p, missing.getValue()); assertEquals("wrong missing place count: " + p, 2, missing.getCount()); - assertTrue("not enough sub-pivots for missing place: " + p + " => " - + missing.getPivot(), 1 < missing.getPivot().size()); + assertTrue( + "not enough sub-pivots for missing place: " + p + " => " + missing.getPivot(), + 1 < missing.getPivot().size()); missing = missing.getPivot().get(missing.getPivot().size() - 1); assertNull("not the missing company value: " + p, missing.getValue()); assertEquals("wrong missing company count: " + p, 1, missing.getCount()); @@ -1186,15 +2003,30 @@ expectedDublinPivots, createExpectedQCount(new String[]{ } // sort=index + mincount + limit - for (SolrParams variableParams : new SolrParams[]{ - // we should get the same results regardless of overrequest - params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), - params()}) { - - SolrParams p = SolrParams.wrapDefaults( - params("q", "*:*", "rows", "0", "facet", "true", "facet.pivot", - "company_t", "facet.sort", "index", "facet.pivot.mincount", "4", - "facet.limit", "4"), variableParams); + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), params() + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "company_t", + "facet.sort", + "index", + "facet.pivot.mincount", + "4", + "facet.limit", + "4"), + variableParams); try { List pivots = query(p).getFacetPivot().get("company_t"); @@ -1214,15 +2046,32 @@ expectedDublinPivots, createExpectedQCount(new String[]{ } // sort=index + mincount + limit + offset - for (SolrParams variableParams : new SolrParams[]{ - // we should get the same results regardless of overrequest - params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), - params()}) { - - SolrParams p = SolrParams.wrapDefaults( - params("q", "*:*", "rows", "0", "facet", "true", "facet.pivot", - "company_t", "facet.sort", "index", "facet.pivot.mincount", "4", - "facet.offset", "1", "facet.limit", "4"), variableParams); + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params("facet.overrequest.count", "0", "facet.overrequest.ratio", "0"), params() + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "company_t", + "facet.sort", + "index", + "facet.pivot.mincount", + "4", + "facet.offset", + "1", + "facet.limit", + "4"), + variableParams); try { List pivots = query(p).getFacetPivot().get("company_t"); assertEquals(3, pivots.size()); // asked for 4, but not enough meet the @@ -1237,20 +2086,35 @@ expectedDublinPivots, createExpectedQCount(new String[]{ } catch (AssertionFailedError ae) { throw new AssertionError(ae.getMessage() + " <== " + p.toString(), ae); } - } // sort=index + mincount + limit + offset (more permutations) - for (SolrParams variableParams : new SolrParams[]{ - // all of these combinations should result in the same first value - params("facet.pivot.mincount", "4", "facet.offset", "2"), - params("facet.pivot.mincount", "5", "facet.offset", "1"), - params("facet.pivot.mincount", "6", "facet.offset", "0")}) { - - SolrParams p = SolrParams.wrapDefaults( - params("q", "*:*", "rows", "0", "facet", "true", "facet.limit", "1", - "facet.sort", "index", "facet.overrequest.ratio", "0", - "facet.pivot", "company_t"), variableParams); + for (SolrParams variableParams : + new SolrParams[] { + // all of these combinations should result in the same first value + params("facet.pivot.mincount", "4", "facet.offset", "2"), + params("facet.pivot.mincount", "5", "facet.offset", "1"), + params("facet.pivot.mincount", "6", "facet.offset", "0") + }) { + + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.limit", + "1", + "facet.sort", + "index", + "facet.overrequest.ratio", + "0", + "facet.pivot", + "company_t"), + variableParams); try { List pivots = query(p).getFacetPivot().get("company_t"); @@ -1265,48 +2129,90 @@ expectedDublinPivots, createExpectedQCount(new String[]{ } private void testPivotFacetRangeAndQuery() throws Exception { - SolrParams params = params("q", "*:*", - "rows", "0", - "facet", "true", - "stats", "true", - "facet.pivot", "{!range=s1 query=s2 stats=s3}place_t,company_t", - "facet.range", "{!tag=s1 key=price}price_ti", - "facet.query", "{!tag=s2 key=highPrice}price_ti:[25 TO 100]", - "facet.query", "{!tag=s2 key=lowPrice}price_ti:[0 TO 20]", - "stats.field", ("{!tag=s3 key=avg_price}price_ti"), - "facet.range.start", "0", - "facet.range.end", "100", - "facet.range.gap", "20", - FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT, - FacetParams.FACET_LIMIT, "2"); + SolrParams params = + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "stats", + "true", + "facet.pivot", + "{!range=s1 query=s2 stats=s3}place_t,company_t", + "facet.range", + "{!tag=s1 key=price}price_ti", + "facet.query", + "{!tag=s2 key=highPrice}price_ti:[25 TO 100]", + "facet.query", + "{!tag=s2 key=lowPrice}price_ti:[0 TO 20]", + "stats.field", + ("{!tag=s3 key=avg_price}price_ti"), + "facet.range.start", + "0", + "facet.range.end", + "100", + "facet.range.gap", + "20", + FacetParams.FACET_SORT, + FacetParams.FACET_SORT_COUNT, + FacetParams.FACET_LIMIT, + "2"); UnorderedEqualityArrayList expectedPlacePivots = new UnorderedEqualityArrayList<>(); - UnorderedEqualityArrayList expectedDublinPivots = new UnorderedEqualityArrayList<>(); - expectedDublinPivots.add(new ComparablePivotField("company_t", "polecat", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); - expectedDublinPivots.add(new ComparablePivotField("company_t", "microsoft", - 4, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{1, 2}), createExpectedRange("price", 0, 100, 20, 2, 1, 0, - 0, 0))); - UnorderedEqualityArrayList expectedLondonPivots = new UnorderedEqualityArrayList<>(); - expectedLondonPivots.add(new ComparablePivotField("company_t", "null", 3, - null, createExpectedQCount( - new String[]{"highPrice", "lowPrice"}, new int[]{2, 0}), createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, - 0))); - expectedLondonPivots.add(new ComparablePivotField("company_t", "polecat", - 3, null, createExpectedQCount(new String[]{"highPrice", - "lowPrice"}, new int[]{2, 0}), createExpectedRange("price", 0, 100, 20, 0, 2, 0, - 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "dublin", 4, - expectedDublinPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{1, 2}), createExpectedRange("price", 0, 100, - 20, 2, 1, 0, 0, 0))); - expectedPlacePivots.add(new ComparablePivotField("place_t", "london", 4, - expectedLondonPivots, createExpectedQCount(new String[]{ - "highPrice", "lowPrice"}, new int[]{3, 0}), createExpectedRange("price", 0, 100, - 20, 0, 3, 0, 0, 0))); + UnorderedEqualityArrayList expectedDublinPivots = + new UnorderedEqualityArrayList<>(); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedDublinPivots.add( + new ComparablePivotField( + "company_t", + "microsoft", + 4, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + UnorderedEqualityArrayList expectedLondonPivots = + new UnorderedEqualityArrayList<>(); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "null", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedLondonPivots.add( + new ComparablePivotField( + "company_t", + "polecat", + 3, + null, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {2, 0}), + createExpectedRange("price", 0, 100, 20, 0, 2, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "dublin", + 4, + expectedDublinPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {1, 2}), + createExpectedRange("price", 0, 100, 20, 2, 1, 0, 0, 0))); + expectedPlacePivots.add( + new ComparablePivotField( + "place_t", + "london", + 4, + expectedLondonPivots, + createExpectedQCount(new String[] {"highPrice", "lowPrice"}, new int[] {3, 0}), + createExpectedRange("price", 0, 100, 20, 0, 3, 0, 0, 0))); QueryResponse rsp = query(params); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -1320,7 +2226,8 @@ expectedLondonPivots, createExpectedQCount(new String[]{ assertEquals("microsoft", microsoftPivotField.getValue()); assertEquals(4, microsoftPivotField.getCount()); - FieldStatsInfo dublinMicrosoftStatsInfo = microsoftPivotField.getFieldStatsInfo().get("avg_price"); + FieldStatsInfo dublinMicrosoftStatsInfo = + microsoftPivotField.getFieldStatsInfo().get("avg_price"); assertEquals(21.0, (double) dublinMicrosoftStatsInfo.getMean(), 0.1E-7); assertEquals(15.0, dublinMicrosoftStatsInfo.getMin()); assertEquals(29.0, dublinMicrosoftStatsInfo.getMax()); @@ -1333,83 +2240,157 @@ expectedLondonPivots, createExpectedQCount(new String[]{ private void testNegativeFacetQuery() throws Exception { // this should not hang facet.query under the pivot - SolrParams params = params("q", "*:*", - "rows", "0", - "stats", "true", - "facet.query", "{!tag=ttt}price_ti:[25 TO 100]", - "facet", "true", - "facet.pivot", "{!query=t}place_t,company_t"); + SolrParams params = + params( + "q", + "*:*", + "rows", + "0", + "stats", + "true", + "facet.query", + "{!tag=ttt}price_ti:[25 TO 100]", + "facet", + "true", + "facet.pivot", + "{!query=t}place_t,company_t"); QueryResponse rsp = query(params); - assertNullFacetTypeInsidePivot(FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); - - params = params("q", "*:*", - "rows", "0", - "stats", "true", - "facet", "true", - "facet.pivot", "{!query=t}place_t,company_t"); + assertNullFacetTypeInsidePivot( + FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); + + params = + params( + "q", + "*:*", + "rows", + "0", + "stats", + "true", + "facet", + "true", + "facet.pivot", + "{!query=t}place_t,company_t"); rsp = query(params); - assertNullFacetTypeInsidePivot(FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); - - params = params("q", "*:*", - "rows", "0", - "facet.query", "{!tag=t}price_ti:[25 TO 100]", - "hang", "", // empty - "facet", "true", - "facet.pivot", "{!query=$hang}place_t,company_t"); + assertNullFacetTypeInsidePivot( + FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); + + params = + params( + "q", + "*:*", + "rows", + "0", + "facet.query", + "{!tag=t}price_ti:[25 TO 100]", + "hang", + "", // empty + "facet", + "true", + "facet.pivot", + "{!query=$hang}place_t,company_t"); rsp = query(params); - assertNullFacetTypeInsidePivot(FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); - - params = params("q", "*:*", - "rows", "0", - "facet.query", "{!tag=t}price_ti:[25 TO 100]", - "hang", "price_ti:[0 TO 20]", // with a query - "facet", "true", - "facet.pivot", "{!query=$hang}place_t,company_t"); + assertNullFacetTypeInsidePivot( + FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); + + params = + params( + "q", + "*:*", + "rows", + "0", + "facet.query", + "{!tag=t}price_ti:[25 TO 100]", + "hang", + "price_ti:[0 TO 20]", // with a query + "facet", + "true", + "facet.pivot", + "{!query=$hang}place_t,company_t"); rsp = query(params); - // we aren't going to start calculating facet query unless the query is specified with a 'facet.query' param + // we aren't going to start calculating facet query unless the query is specified with a + // 'facet.query' param // hence hanging an arbitrary query shouldn't work - assertNullFacetTypeInsidePivot(FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); + assertNullFacetTypeInsidePivot( + FacetParams.FACET_QUERY, rsp.getFacetPivot().get("place_t,company_t")); } private void testNegativeRangeQuery() throws Exception { - SolrParams params = params("q", "*:*", - "rows", "0", - "stats", "true", - "facet.range", "{!tag=s1 key=price}price_ti", - "facet", "true", - "facet.pivot", "{!range=s}place_t,company_t", - "facet.range.start", "0", - "facet.range.end", "100", - "facet.range.gap", "20"); + SolrParams params = + params( + "q", + "*:*", + "rows", + "0", + "stats", + "true", + "facet.range", + "{!tag=s1 key=price}price_ti", + "facet", + "true", + "facet.pivot", + "{!range=s}place_t,company_t", + "facet.range.start", + "0", + "facet.range.end", + "100", + "facet.range.gap", + "20"); QueryResponse rsp = query(params); - assertNullFacetTypeInsidePivot(FacetParams.FACET_RANGE, rsp.getFacetPivot().get("place_t,company_t")); - - params = params("q", "*:*", - "rows", "0", - "stats", "true", - "facet.range", "{!tag=s1 key=price}price_ti", - "facet", "true", - "hang", "", // empty! - "facet.pivot", "{!range=$hang}place_t,company_t", - "facet.range.start", "0", - "facet.range.end", "100", - "facet.range.gap", "20"); + assertNullFacetTypeInsidePivot( + FacetParams.FACET_RANGE, rsp.getFacetPivot().get("place_t,company_t")); + + params = + params( + "q", + "*:*", + "rows", + "0", + "stats", + "true", + "facet.range", + "{!tag=s1 key=price}price_ti", + "facet", + "true", + "hang", + "", // empty! + "facet.pivot", + "{!range=$hang}place_t,company_t", + "facet.range.start", + "0", + "facet.range.end", + "100", + "facet.range.gap", + "20"); rsp = query(params); - assertNullFacetTypeInsidePivot(FacetParams.FACET_RANGE, rsp.getFacetPivot().get("place_t,company_t")); - - params = params("q", "*:*", - "rows", "0", - "stats", "true", - "facet.range", "{!tag=s1 key=price}price_ti", - "facet", "true", - "hang", "price_ti", - "facet.pivot", "{!range=$hang}place_t,company_t", - "facet.range.start", "0", - "facet.range.end", "100", - "facet.range.gap", "20"); + assertNullFacetTypeInsidePivot( + FacetParams.FACET_RANGE, rsp.getFacetPivot().get("place_t,company_t")); + + params = + params( + "q", + "*:*", + "rows", + "0", + "stats", + "true", + "facet.range", + "{!tag=s1 key=price}price_ti", + "facet", + "true", + "hang", + "price_ti", + "facet.pivot", + "{!range=$hang}place_t,company_t", + "facet.range.start", + "0", + "facet.range.end", + "100", + "facet.range.gap", + "20"); rsp = query(params); - assertNullFacetTypeInsidePivot(FacetParams.FACET_RANGE, rsp.getFacetPivot().get("place_t,company_t")); + assertNullFacetTypeInsidePivot( + FacetParams.FACET_RANGE, rsp.getFacetPivot().get("place_t,company_t")); } private Map createExpectedQCount(String[] keys, int[] counts) { @@ -1423,11 +2404,23 @@ private Map createExpectedQCount(String[] keys, int[] counts) { private void assertNullFacetTypeInsidePivot(String facetType, List pivots) { for (PivotField pivot : pivots) { if (facetType == FacetParams.FACET_QUERY) { - assertNull("pivot=" + pivot + " facetType=" + facetType - + " should've been null. Found: " + pivot.getFacetQuery(), pivot.getFacetQuery()); + assertNull( + "pivot=" + + pivot + + " facetType=" + + facetType + + " should've been null. Found: " + + pivot.getFacetQuery(), + pivot.getFacetQuery()); } else if (facetType == FacetParams.FACET_RANGE) { - assertNull("pivot=" + pivot + " facetType=" + facetType - + " should've been null. Found: " + pivot.getFacetRanges(), pivot.getFacetRanges()); + assertNull( + "pivot=" + + pivot + + " facetType=" + + facetType + + " should've been null. Found: " + + pivot.getFacetRanges(), + pivot.getFacetRanges()); } if (pivot.getPivot() != null) { @@ -1455,17 +2448,29 @@ private void testOrderedPivotsStringEquality( } /** - * sanity check the stat values nested under a pivot when at least one shard - * has nothing but missing values for the stat + * sanity check the stat values nested under a pivot when at least one shard has nothing but + * missing values for the stat */ private void doTestPivotStatsFromOneShard() throws Exception { - SolrParams params = params("q", "*:*", "rows", "0", - "facet", "true", "stats", "true", - "facet.pivot", "{!stats=s1}place_t,company_t", - "stats.field", "{!tag=s1}solo_i", - "stats.field", "{!tag=s1}solo_s", - "stats.field", "{!tag=s1}solo_dt"); - + SolrParams params = + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "stats", + "true", + "facet.pivot", + "{!stats=s1}place_t,company_t", + "stats.field", + "{!tag=s1}solo_i", + "stats.field", + "{!tag=s1}solo_s", + "stats.field", + "{!tag=s1}solo_dt"); + QueryResponse rsp = query(params); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); @@ -1478,22 +2483,26 @@ private void doTestPivotStatsFromOneShard() throws Exception { assertEquals("null", companyPivot.getValue()); assertEquals(3, companyPivot.getCount()); - for (PivotField pf : new PivotField[] { placePivot, companyPivot }) { + for (PivotField pf : new PivotField[] {placePivot, companyPivot}) { assertThereCanBeOnlyOne(pf, pf.getFieldStatsInfo().get("solo_s"), "lonely"); assertThereCanBeOnlyOne(pf, pf.getFieldStatsInfo().get("solo_i"), 42.0D); - assertEquals(pf.getField()+":"+pf.getValue()+": int mean", - 42.0D, pf.getFieldStatsInfo().get("solo_i").getMean()); + assertEquals( + pf.getField() + ":" + pf.getValue() + ": int mean", + 42.0D, + pf.getFieldStatsInfo().get("solo_i").getMean()); Object expected = new Date(194923425000L); // 1976-03-06T01:23:45Z assertThereCanBeOnlyOne(pf, pf.getFieldStatsInfo().get("solo_dt"), expected); - assertEquals(pf.getField()+":"+pf.getValue()+": date mean", - expected, pf.getFieldStatsInfo().get("solo_dt").getMean()); + assertEquals( + pf.getField() + ":" + pf.getValue() + ": date mean", + expected, + pf.getFieldStatsInfo().get("solo_dt").getMean()); // TODO: add enum field asserts - blocked by SOLR-6682 } } - + private void testCountSorting(List pivots) { Integer lastCount = null; for (PivotField pivot : pivots) { @@ -1506,26 +2515,27 @@ private void testCountSorting(List pivots) { } } } - + /** * given a PivotField, a FieldStatsInfo, and a value; asserts that: + * *

    - *
  • stat count == 1
  • - *
  • stat missing == pivot count - 1
  • - *
  • stat min == stat max == value
  • + *
  • stat count == 1 + *
  • stat missing == pivot count - 1 + *
  • stat min == stat max == value *
*/ private void assertThereCanBeOnlyOne(PivotField pf, FieldStatsInfo stats, Object val) { String msg = pf.getField() + ":" + pf.getValue(); assertEquals(msg + " stats count", 1L, (long) stats.getCount()); - assertEquals(msg + " stats missing", pf.getCount()-1L, (long) stats.getMissing()); + assertEquals(msg + " stats missing", pf.getCount() - 1L, (long) stats.getMissing()); assertEquals(msg + " stats min", val, stats.getMin()); assertEquals(msg + " stats max", val, stats.getMax()); } @SuppressWarnings({"rawtypes"}) - private List createExpectedRange(String key, int start, int end, - int gap, int... values) { + private List createExpectedRange( + String key, int start, int end, int gap, int... values) { List expectedRanges = new ArrayList<>(); RangeFacet expectedPrices = new RangeFacet.Numeric(key, start, end, gap, null, null, null); expectedRanges.add(expectedPrices); @@ -1541,14 +2551,17 @@ private List createExpectedRange(String key, int start, int end, public static class ComparablePivotField extends PivotField { - public ComparablePivotField(String f, Object v, int count, - List pivot, Map queryCounts, - @SuppressWarnings({"rawtypes"})List ranges) { + public ComparablePivotField( + String f, + Object v, + int count, + List pivot, + Map queryCounts, + @SuppressWarnings({"rawtypes"}) List ranges) { super(f, v, count, pivot, null, queryCounts, ranges); } - public ComparablePivotField(String f, Object v, int count, - List pivot) { + public ComparablePivotField(String f, Object v, int count, List pivot) { super(f, v, count, pivot, null, null, null); } @@ -1572,27 +2585,28 @@ public boolean equals(Object obj) { if (other.getFacetRanges() != null) return false; } else { if (getFacetRanges().size() != other.getFacetRanges().size()) return false; - for (@SuppressWarnings({"rawtypes"})RangeFacet entry : getFacetRanges()) { + for (@SuppressWarnings({"rawtypes"}) RangeFacet entry : getFacetRanges()) { boolean found = false; - for (@SuppressWarnings({"rawtypes"})RangeFacet otherRange : other.getFacetRanges()) { + for (@SuppressWarnings({"rawtypes"}) RangeFacet otherRange : other.getFacetRanges()) { if (otherRange.getName().equals(entry.getName())) { found = true; - if (!entry.getGap().equals(otherRange.getGap())) return false; - if (!entry.getStart().equals(otherRange.getStart())) return false; - if (!entry.getEnd().equals(otherRange.getEnd())) return false; + if (!entry.getGap().equals(otherRange.getGap())) return false; + if (!entry.getStart().equals(otherRange.getStart())) return false; + if (!entry.getEnd().equals(otherRange.getEnd())) return false; @SuppressWarnings({"unchecked"}) List myCounts = entry.getCounts(); @SuppressWarnings({"unchecked"}) List otherRangeCounts = otherRange.getCounts(); - if ( (myCounts == null && otherRangeCounts != null) + if ((myCounts == null && otherRangeCounts != null) || (myCounts != null && otherRangeCounts == null) - || (myCounts.size() != otherRangeCounts.size())) return false; + || (myCounts.size() != otherRangeCounts.size())) return false; - for (int i=0; i entry : getFacetQuery().entrySet()) { + for (Map.Entry entry : getFacetQuery().entrySet()) { Integer otherQCount = other.getFacetQuery().get(entry.getKey()); if (otherQCount == null || !otherQCount.equals(entry.getValue())) return false; } @@ -1616,7 +2630,7 @@ public int hashCode() { throw new UnsupportedOperationException("Calling hashCode in ComparablePivotField"); } } - + public static class UnorderedEqualityArrayList extends ArrayList { @Override @@ -1640,7 +2654,7 @@ public boolean equals(Object o) { } return equal; } - + public int indexOf(Object o) { for (int i = 0; i < size(); i++) { if (get(i).equals(o)) { @@ -1650,9 +2664,9 @@ public int indexOf(Object o) { return -1; } } - + public static class PivotFieldComparator implements Comparator { - + @Override public int compare(PivotField o1, PivotField o2) { int compare = Integer.compare(o2.getCount(), o1.getCount()); @@ -1660,23 +2674,21 @@ public int compare(PivotField o1, PivotField o2) { compare = ((String) o2.getValue()).compareTo((String) o1.getValue()); } if (compare == 0) { - for (Map.Entry entry : o1.getFacetQuery().entrySet()) { - compare = entry.getValue().compareTo( - o2.getFacetQuery().get(entry.getKey())); + for (Map.Entry entry : o1.getFacetQuery().entrySet()) { + compare = entry.getValue().compareTo(o2.getFacetQuery().get(entry.getKey())); if (compare != 0) { break; } } if (compare == 0) { - compare = Integer.compare(o1.getFacetQuery().size(), - o2.getFacetQuery().size()); + compare = Integer.compare(o1.getFacetQuery().size(), o2.getFacetQuery().size()); } } if (compare == 0) { - for (@SuppressWarnings({"rawtypes"})RangeFacet entry : o1.getFacetRanges()) { + for (@SuppressWarnings({"rawtypes"}) RangeFacet entry : o1.getFacetRanges()) { boolean found = false; - for (@SuppressWarnings({"rawtypes"})RangeFacet otherRangeFacet : o2.getFacetRanges()) { - if (otherRangeFacet.getName().equals(entry.getName())) { + for (@SuppressWarnings({"rawtypes"}) RangeFacet otherRangeFacet : o2.getFacetRanges()) { + if (otherRangeFacet.getName().equals(entry.getName())) { found = true; } } @@ -1688,7 +2700,5 @@ public int compare(PivotField o1, PivotField o2) { } return compare; } - } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java index 89cd6875703..1f86e97e477 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.component; +import java.util.List; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.response.PivotField; import org.apache.solr.client.solrj.response.QueryResponse; @@ -23,15 +24,14 @@ import org.apache.solr.common.params.SolrParams; import org.junit.Test; -import java.util.List; - public class DistributedFacetPivotWhiteBoxTest extends BaseDistributedSearchTestCase { public DistributedFacetPivotWhiteBoxTest() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); } - + @Test @ShardsFixed(num = 4) public void test() throws Exception { @@ -39,23 +39,55 @@ public void test() throws Exception { del("*:*"); // NOTE: we use the literal (4 character) string "null" as a company name - // to help ensure there isn't any bugs where the literal string is treated as if it + // to help ensure there isn't any bugs where the literal string is treated as if it // were a true NULL value. index(id, 19, "place_t", "cardiff dublin", "company_t", "microsoft polecat", "price_ti", "15"); - index(id, 20, "place_t", "dublin", "company_t", "polecat microsoft null", "price_ti", "19", + index( + id, + 20, + "place_t", + "dublin", + "company_t", + "polecat microsoft null", + "price_ti", + "19", // this is the only doc to have solo_* fields, therefore only 1 shard has them // TODO: add enum field - blocked by SOLR-6682 - "solo_i", 42, "solo_s", "lonely", "solo_dt", "1976-03-06T01:23:45Z"); - index(id, 21, "place_t", "krakow london la dublin", "company_t", - "microsoft fujitsu null polecat", "price_ti", "29"); - index(id, 22, "place_t", "krakow london cardiff", "company_t", - "polecat null bbc", "price_ti", "39"); + "solo_i", + 42, + "solo_s", + "lonely", + "solo_dt", + "1976-03-06T01:23:45Z"); + index( + id, + 21, + "place_t", + "krakow london la dublin", + "company_t", + "microsoft fujitsu null polecat", + "price_ti", + "29"); + index( + id, + 22, + "place_t", + "krakow london cardiff", + "company_t", + "polecat null bbc", + "price_ti", + "39"); index(id, 23, "place_t", "krakow london", "company_t", "", "price_ti", "29"); index(id, 24, "place_t", "krakow la", "company_t", ""); index(id, 25, "company_t", "microsoft polecat null fujitsu null bbc", "price_ti", "59"); index(id, 26, "place_t", "krakow", "company_t", "null"); - index(id, 27, "place_t", "krakow cardiff dublin london la", - "company_t", "null microsoft polecat bbc fujitsu"); + index( + id, + 27, + "place_t", + "krakow cardiff dublin london la", + "company_t", + "null microsoft polecat bbc fujitsu"); index(id, 28, "place_t", "krakow cork", "company_t", "fujitsu rte"); commit(); @@ -68,72 +100,93 @@ public void test() throws Exception { doTestRefinementRequest(); } - /** - * recreates the initial request to a shard in a distributed query - * confirming that both top level stats, and per-pivot stats are returned. + /** + * recreates the initial request to a shard in a distributed query confirming that both top level + * stats, and per-pivot stats are returned. */ private void doShardTestTopStats() throws Exception { - SolrParams params = params("facet", "true", - "q", "*:*", - // "wt", "javabin", - "facet.pivot", "{!stats=s1}place_t,company_t", - // "version", "2", - "start", "0", "rows", "0", - "fsv", "true", - "fl", "id,score", - "stats", "true", - "stats.field", "{!key=avg_price tag=s1}price_ti", - "f.place_t.facet.limit", "160", - "f.place_t.facet.pivot.mincount", "0", - "f.company_t.facet.limit", "160", - "f.company_t.facet.pivot.mincount", "0", - "isShard", "true", "distrib", "false"); + SolrParams params = + params( + "facet", + "true", + "q", + "*:*", + // "wt", "javabin", + "facet.pivot", + "{!stats=s1}place_t,company_t", + // "version", "2", + "start", + "0", + "rows", + "0", + "fsv", + "true", + "fl", + "id,score", + "stats", + "true", + "stats.field", + "{!key=avg_price tag=s1}price_ti", + "f.place_t.facet.limit", + "160", + "f.place_t.facet.pivot.mincount", + "0", + "f.company_t.facet.limit", + "160", + "f.company_t.facet.pivot.mincount", + "0", + "isShard", + "true", + "distrib", + "false"); QueryResponse rsp = queryServer(new ModifiableSolrParams(params)); - assertNotNull("initial shard request should include non-null top level stats", - rsp.getFieldStatsInfo()); - assertFalse("initial shard request should include top level stats", - rsp.getFieldStatsInfo().isEmpty()); + assertNotNull( + "initial shard request should include non-null top level stats", rsp.getFieldStatsInfo()); + assertFalse( + "initial shard request should include top level stats", rsp.getFieldStatsInfo().isEmpty()); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); for (PivotField pivotField : placePivots) { - assertFalse("pivot stats should not be empty in initial request", - pivotField.getFieldStatsInfo().isEmpty()); + assertFalse( + "pivot stats should not be empty in initial request", + pivotField.getFieldStatsInfo().isEmpty()); } } - /** - * recreates a pivot refinement request to a shard in a distributed query - * confirming that the per-pivot stats are returned, but not the top level stats - * because they shouldn't be overcounted. + /** + * recreates a pivot refinement request to a shard in a distributed query confirming that the + * per-pivot stats are returned, but not the top level stats because they shouldn't be + * overcounted. */ private void doTestRefinementRequest() throws Exception { - SolrParams params = params("facet.missing", "true", - "facet", "true", - "facet.limit", "4", - "distrib", "false", - // "wt", "javabin", - // "version", "2", - "rows", "0", - "facet.sort", "index", - "fpt0", "~krakow", - "facet.pivot.mincount", "-1", - "isShard", "true", - "facet.pivot", "{!fpt=0 stats=st1}place_t,company_t", - "stats", "false", - "stats.field", "{!key=sk1 tag=st1,st2}price_ti"); + SolrParams params = + params( + "facet.missing", "true", + "facet", "true", + "facet.limit", "4", + "distrib", "false", + // "wt", "javabin", + // "version", "2", + "rows", "0", + "facet.sort", "index", + "fpt0", "~krakow", + "facet.pivot.mincount", "-1", + "isShard", "true", + "facet.pivot", "{!fpt=0 stats=st1}place_t,company_t", + "stats", "false", + "stats.field", "{!key=sk1 tag=st1,st2}price_ti"); QueryResponse rsp = clients.get(0).query(new ModifiableSolrParams(params)); - assertNull("pivot refine request should *NOT* include top level stats", - rsp.getFieldStatsInfo()); + assertNull( + "pivot refine request should *NOT* include top level stats", rsp.getFieldStatsInfo()); List placePivots = rsp.getFacetPivot().get("place_t,company_t"); - assertEquals("asked to refine exactly one place", - 1, placePivots.size()); - assertFalse("pivot stats should not be empty in refinement request", - placePivots.get(0).getFieldStatsInfo().isEmpty()); - + assertEquals("asked to refine exactly one place", 1, placePivots.size()); + assertFalse( + "pivot stats should not be empty in refinement request", + placePivots.get(0).getFieldStatsInfo().isEmpty()); } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java index 4e91e0d4e89..9ed5c6f31a1 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java @@ -18,7 +18,6 @@ import java.util.HashMap; import java.util.Map; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; import org.apache.solr.BaseDistributedSearchTestCase; @@ -34,20 +33,18 @@ import org.junit.Test; /** - * Test for distributed MoreLikeThisComponent's + * Test for distributed MoreLikeThisComponent's * * @since solr 4.1 - * * @see org.apache.solr.handler.component.MoreLikeThisComponent */ @Slow public class DistributedMLTComponentTest extends BaseDistributedSearchTestCase { - + private String requestHandlerName; - public DistributedMLTComponentTest() - { - stress=0; + public DistributedMLTComponentTest() { + stress = 0; } @Override @@ -72,7 +69,7 @@ public static void beforeClass() { public static void afterClass() { System.clearProperty("solr.statsCache"); } - + @Test @ShardsFixed(num = 3) @SuppressWarnings({"unchecked"}) @@ -84,25 +81,109 @@ public void test() throws Exception { index(id, "4", "lowerfilt", "ford", "lowerfilt1", "x"); index(id, "5", "lowerfilt", "ferrari", "lowerfilt1", "x"); index(id, "6", "lowerfilt", "jaguar", "lowerfilt1", "x"); - index(id, "7", "lowerfilt", "mclaren moon or the moon and moon moon shine and the moon but moon was good foxes too", "lowerfilt1", "x"); + index( + id, + "7", + "lowerfilt", + "mclaren moon or the moon and moon moon shine and the moon but moon was good foxes too", + "lowerfilt1", + "x"); index(id, "8", "lowerfilt", "sonata", "lowerfilt1", "x"); - index(id, "9", "lowerfilt", "The quick red fox jumped over the lazy big and large brown dogs.", "lowerfilt1", "x"); + index( + id, + "9", + "lowerfilt", + "The quick red fox jumped over the lazy big and large brown dogs.", + "lowerfilt1", + "x"); index(id, "10", "lowerfilt", "blue", "lowerfilt1", "x"); index(id, "12", "lowerfilt", "glue", "lowerfilt1", "x"); - index(id, "13", "lowerfilt", "The quote red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "14", "lowerfilt", "The quote red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "15", "lowerfilt", "The fat red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "16", "lowerfilt", "The slim red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "17", "lowerfilt", "The quote red fox jumped moon over the lazy brown dogs moon. Of course moon. Foxes and moon come back to the foxes and moon", "lowerfilt1", "y"); - index(id, "18", "lowerfilt", "The quote red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "19", "lowerfilt", "The hose red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "20", "lowerfilt", "The quote red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "21", "lowerfilt", "The court red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "22", "lowerfilt", "The quote red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "23", "lowerfilt", "The quote red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); - index(id, "24", "lowerfilt", "The file red fox jumped over the lazy brown dogs.", "lowerfilt1", "y"); + index( + id, + "13", + "lowerfilt", + "The quote red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "14", + "lowerfilt", + "The quote red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "15", + "lowerfilt", + "The fat red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "16", + "lowerfilt", + "The slim red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "17", + "lowerfilt", + "The quote red fox jumped moon over the lazy brown dogs moon. Of course moon. Foxes and moon come back to the foxes and moon", + "lowerfilt1", + "y"); + index( + id, + "18", + "lowerfilt", + "The quote red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "19", + "lowerfilt", + "The hose red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "20", + "lowerfilt", + "The quote red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "21", + "lowerfilt", + "The court red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "22", + "lowerfilt", + "The quote red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "23", + "lowerfilt", + "The quote red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); + index( + id, + "24", + "lowerfilt", + "The file red fox jumped over the lazy brown dogs.", + "lowerfilt1", + "y"); index(id, "25", "lowerfilt", "rod fix", "lowerfilt1", "y"); - + commit(); handle.clear(); @@ -110,15 +191,35 @@ public void test() throws Exception { handle.put("maxScore", SKIPVAL); // we care only about the mlt results handle.put("response", SKIP); - - // currently distrib mlt is sorting by score (even though it's not really comparable across shards) - // so it may not match the sort of single shard mlt + + // currently distrib mlt is sorting by score (even though it's not really comparable across + // shards) so it may not match the sort of single shard mlt handle.put("17", UNORDERED); - - query("q", "match_none", "mlt", "true", "mlt.fl", "lowerfilt", "qt", requestHandlerName, "shards.qt", requestHandlerName); - - query("q", "lowerfilt:sonata", "mlt", "true", "mlt.fl", "lowerfilt", "qt", requestHandlerName, "shards.qt", requestHandlerName); - + + query( + "q", + "match_none", + "mlt", + "true", + "mlt.fl", + "lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName); + + query( + "q", + "lowerfilt:sonata", + "mlt", + "true", + "mlt.fl", + "lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName); + handle.put("24", UNORDERED); handle.put("23", UNORDERED); handle.put("22", UNORDERED); @@ -132,39 +233,125 @@ public void test() throws Exception { handle.put("14", UNORDERED); handle.put("13", UNORDERED); handle.put("7", UNORDERED); - + // keep in mind that MLT params influence stats that are calulated // per shard - because of this, depending on params, distrib and single // shard queries will not match. - + // because distrib and single node do not currently sort exactly the same, // we ask for an mlt.count of 20 to ensure both include all results - - query("q", "lowerfilt:moon", "fl", id, MoreLikeThisParams.MIN_TERM_FREQ, 2, - MoreLikeThisParams.MIN_DOC_FREQ, 1, "sort", "id_i1 desc", "mlt", "true", - "mlt.fl", "lowerfilt", "qt", requestHandlerName, "shards.qt", - requestHandlerName, "mlt.count", "20"); - - query("q", "lowerfilt:fox", "fl", id, MoreLikeThisParams.MIN_TERM_FREQ, 1, - MoreLikeThisParams.MIN_DOC_FREQ, 1, "sort", "id_i1 desc", "mlt", "true", - "mlt.fl", "lowerfilt", "qt", requestHandlerName, "shards.qt", - requestHandlerName, "mlt.count", "20"); - - query("q", "lowerfilt:the red fox", "fl", id, MoreLikeThisParams.MIN_TERM_FREQ, 1, - MoreLikeThisParams.MIN_DOC_FREQ, 1, "sort", "id_i1 desc", "mlt", "true", - "mlt.fl", "lowerfilt", "qt", requestHandlerName, "shards.qt", - requestHandlerName, "mlt.count", "20"); - - query("q", "lowerfilt:blue moon", "fl", id, MoreLikeThisParams.MIN_TERM_FREQ, 1, - MoreLikeThisParams.MIN_DOC_FREQ, 1, "sort", "id_i1 desc", "mlt", "true", - "mlt.fl", "lowerfilt", "qt", requestHandlerName, "shards.qt", - requestHandlerName, "mlt.count", "20"); + + query( + "q", + "lowerfilt:moon", + "fl", + id, + MoreLikeThisParams.MIN_TERM_FREQ, + 2, + MoreLikeThisParams.MIN_DOC_FREQ, + 1, + "sort", + "id_i1 desc", + "mlt", + "true", + "mlt.fl", + "lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName, + "mlt.count", + "20"); + + query( + "q", + "lowerfilt:fox", + "fl", + id, + MoreLikeThisParams.MIN_TERM_FREQ, + 1, + MoreLikeThisParams.MIN_DOC_FREQ, + 1, + "sort", + "id_i1 desc", + "mlt", + "true", + "mlt.fl", + "lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName, + "mlt.count", + "20"); + + query( + "q", + "lowerfilt:the red fox", + "fl", + id, + MoreLikeThisParams.MIN_TERM_FREQ, + 1, + MoreLikeThisParams.MIN_DOC_FREQ, + 1, + "sort", + "id_i1 desc", + "mlt", + "true", + "mlt.fl", + "lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName, + "mlt.count", + "20"); + + query( + "q", + "lowerfilt:blue moon", + "fl", + id, + MoreLikeThisParams.MIN_TERM_FREQ, + 1, + MoreLikeThisParams.MIN_DOC_FREQ, + 1, + "sort", + "id_i1 desc", + "mlt", + "true", + "mlt.fl", + "lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName, + "mlt.count", + "20"); // let's query by specifying multiple mlt.fl as comma-separated values - QueryResponse response = query("q", "lowerfilt:moon", "fl", id, MoreLikeThisParams.MIN_TERM_FREQ, 2, - MoreLikeThisParams.MIN_DOC_FREQ, 1, "sort", "id_i1 desc", "mlt", "true", - "mlt.fl", "lowerfilt1,lowerfilt", "qt", requestHandlerName, "shards.qt", - requestHandlerName, "mlt.count", "20"); + QueryResponse response = + query( + "q", + "lowerfilt:moon", + "fl", + id, + MoreLikeThisParams.MIN_TERM_FREQ, + 2, + MoreLikeThisParams.MIN_DOC_FREQ, + 1, + "sort", + "id_i1 desc", + "mlt", + "true", + "mlt.fl", + "lowerfilt1,lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName, + "mlt.count", + "20"); NamedList moreLikeThis = (NamedList) response.getResponse().get("moreLikeThis"); Map idVsMLTCount = new HashMap<>(); for (Map.Entry entry : moreLikeThis) { @@ -173,10 +360,30 @@ public void test() throws Exception { } // let's query by specifying multiple mlt.fl as multiple request parameters - response = query("q", "lowerfilt:moon", "fl", id, MoreLikeThisParams.MIN_TERM_FREQ, 2, - MoreLikeThisParams.MIN_DOC_FREQ, 1, "sort", "id_i1 desc", "mlt", "true", - "mlt.fl", "lowerfilt1", "mlt.fl", "lowerfilt", "qt", requestHandlerName, "shards.qt", - requestHandlerName, "mlt.count", "20"); + response = + query( + "q", + "lowerfilt:moon", + "fl", + id, + MoreLikeThisParams.MIN_TERM_FREQ, + 2, + MoreLikeThisParams.MIN_DOC_FREQ, + 1, + "sort", + "id_i1 desc", + "mlt", + "true", + "mlt.fl", + "lowerfilt1", + "mlt.fl", + "lowerfilt", + "qt", + requestHandlerName, + "shards.qt", + requestHandlerName, + "mlt.count", + "20"); moreLikeThis = (NamedList) response.getResponse().get("moreLikeThis"); for (Map.Entry entry : moreLikeThis) { String key = entry.getKey(); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java index 7e72cc7970f..4b612e242d3 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java @@ -17,6 +17,9 @@ package org.apache.solr.handler.component; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Objects; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrServerException; @@ -26,10 +29,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Objects; - /** * Test for QueryComponent's distributed querying * @@ -53,41 +52,110 @@ public static void setUpBeforeClass() throws Exception { public void test() throws Exception { del("*:*"); - index(id, "1", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x12, 0x62, 0x15 }), // 2 - // quick check to prove "*" dynamicField hasn't been broken by somebody mucking with schema - "asdfasdf_field_should_match_catchall_dynamic_field_adsfasdf", "value"); - index(id, "2", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x16 })); // 5 - index(id, "3", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x35, 0x32, 0x58 })); // 8 - index(id, "4", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x15 })); // 4 - index(id, "5", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x35, 0x35, 0x10, 0x00 })); // 9 - index(id, "6", "text", "c", "payload", ByteBuffer.wrap(new byte[] { 0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03 })); // 3 - index(id, "7", "text", "c", "payload", ByteBuffer.wrap(new byte[] { 0x00, 0x3c, 0x73 })); // 1 - index(id, "8", "text", "c", "payload", ByteBuffer.wrap(new byte[] { 0x59, 0x2d, 0x4d })); // 11 - index(id, "9", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x39, 0x79, 0x7a })); // 10 - index(id, "10", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x31, 0x39, 0x7c })); // 6 - index(id, "11", "text", "d", "payload", ByteBuffer.wrap(new byte[] { (byte)0xff, (byte)0xaf, (byte)0x9c })); // 13 - index(id, "12", "text", "d", "payload", ByteBuffer.wrap(new byte[] { 0x34, (byte)0xdd, 0x4d })); // 7 - index(id, "13", "text", "d", "payload", ByteBuffer.wrap(new byte[] { (byte)0x80, 0x11, 0x33 })); // 12 + index( + id, + "1", + "text", + "a", + "payload", + ByteBuffer.wrap(new byte[] {0x12, 0x62, 0x15}), // 2 + // quick check to prove "*" dynamicField hasn't been broken by somebody mucking with schema + "asdfasdf_field_should_match_catchall_dynamic_field_adsfasdf", + "value"); + index(id, "2", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x16})); // 5 + index(id, "3", "text", "a", "payload", ByteBuffer.wrap(new byte[] {0x35, 0x32, 0x58})); // 8 + index(id, "4", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x15})); // 4 + index( + id, + "5", + "text", + "a", + "payload", + ByteBuffer.wrap(new byte[] {0x35, 0x35, 0x10, 0x00})); // 9 + index( + id, + "6", + "text", + "c", + "payload", + ByteBuffer.wrap(new byte[] {0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03})); // 3 + index(id, "7", "text", "c", "payload", ByteBuffer.wrap(new byte[] {0x00, 0x3c, 0x73})); // 1 + index(id, "8", "text", "c", "payload", ByteBuffer.wrap(new byte[] {0x59, 0x2d, 0x4d})); // 11 + index(id, "9", "text", "a", "payload", ByteBuffer.wrap(new byte[] {0x39, 0x79, 0x7a})); // 10 + index(id, "10", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x31, 0x39, 0x7c})); // 6 + index( + id, + "11", + "text", + "d", + "payload", + ByteBuffer.wrap(new byte[] {(byte) 0xff, (byte) 0xaf, (byte) 0x9c})); // 13 + index( + id, + "12", + "text", + "d", + "payload", + ByteBuffer.wrap(new byte[] {0x34, (byte) 0xdd, 0x4d})); // 7 + index( + id, + "13", + "text", + "d", + "payload", + ByteBuffer.wrap(new byte[] {(byte) 0x80, 0x11, 0x33})); // 12 commit(); QueryResponse rsp; rsp = query("q", "*:*", "fl", "id", "sort", "payload asc", "rows", "20"); - assertFieldValues(rsp.getResults(), id, "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "11"); + assertFieldValues( + rsp.getResults(), id, "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "11"); rsp = query("q", "*:*", "fl", "id", "sort", "payload desc", "rows", "20"); - assertFieldValues(rsp.getResults(), id, "11", "13", "8", "9", "5", "3", "12", "10", "2", "4", "6", "1", "7"); + assertFieldValues( + rsp.getResults(), id, "11", "13", "8", "9", "5", "3", "12", "10", "2", "4", "6", "1", "7"); // SOLR-6744 rsp = query("q", "*:*", "fl", "key:id", "sort", "payload asc", "rows", "20"); - assertFieldValues(rsp.getResults(), "key", "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "11"); + assertFieldValues( + rsp.getResults(), + "key", + "7", + "1", + "6", + "4", + "2", + "10", + "12", + "3", + "5", + "9", + "8", + "13", + "11"); rsp = query("q", "*:*", "fl", "key:id,id:text", "sort", "payload asc", "rows", "20"); - assertFieldValues(rsp.getResults(), "key", "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "11"); - + assertFieldValues( + rsp.getResults(), + "key", + "7", + "1", + "6", + "4", + "2", + "10", + "12", + "3", + "5", + "9", + "8", + "13", + "11"); + rsp = query("q", "text:a", "fl", "id", "sort", "payload asc", "rows", "20"); assertFieldValues(rsp.getResults(), id, "1", "3", "5", "9"); rsp = query("q", "text:a", "fl", "id", "sort", "payload desc", "rows", "20"); assertFieldValues(rsp.getResults(), id, "9", "5", "3", "1"); - + rsp = query("q", "text:b", "fl", "id", "sort", "payload asc", "rows", "20"); assertFieldValues(rsp.getResults(), id, "4", "2", "10"); rsp = query("q", "text:b", "fl", "id", "sort", "payload desc", "rows", "20"); @@ -101,43 +169,99 @@ public void test() throws Exception { assertFieldValues(rsp.getResults(), id, "7", "6", "8"); rsp = query("q", "text:c", "fl", "id", "sort", "payload desc", "rows", "20"); assertFieldValues(rsp.getResults(), id, "8", "6", "7"); - + rsp = query("q", "text:d", "fl", "id", "sort", "payload asc", "rows", "20"); assertFieldValues(rsp.getResults(), id, "12", "13", "11"); rsp = query("q", "text:d", "fl", "id", "sort", "payload desc", "rows", "20"); assertFieldValues(rsp.getResults(), id, "11", "13", "12"); // sanity check function sorting - rsp = query("q", "id_i:[1 TO 10]", "fl", "id", "rows", "20", - "sort", "abs(sub(5,id_i)) asc, id desc"); - assertFieldValues(rsp.getResults(), id, "5", "6","4", "7","3" , "8","2" , "9","1" , "10" ); + rsp = + query( + "q", + "id_i:[1 TO 10]", + "fl", + "id", + "rows", + "20", + "sort", + "abs(sub(5,id_i)) asc, id desc"); + assertFieldValues(rsp.getResults(), id, "5", "6", "4", "7", "3", "8", "2", "9", "1", "10"); - // Add two more docs with same payload as in doc #4 - index(id, "14", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x15 })); - index(id, "15", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x15 })); + // Add two more docs with same payload as in doc #4 + index(id, "14", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x15})); + index(id, "15", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x15})); // Add three more docs with same payload as in doc #10 - index(id, "16", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x31, 0x39, 0x7c })); - index(id, "17", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x31, 0x39, 0x7c })); - index(id, "18", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x31, 0x39, 0x7c })); - + index(id, "16", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x31, 0x39, 0x7c})); + index(id, "17", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x31, 0x39, 0x7c})); + index(id, "18", "text", "b", "payload", ByteBuffer.wrap(new byte[] {0x31, 0x39, 0x7c})); + commit(); - + rsp = query("q", "*:*", "fl", "id", "sort", "payload asc, id_i desc", "rows", "20"); - assertFieldValues(rsp.getResults(), id, "7", "1", "6", "15","14","4", "2", "18","17","16","10", "12", "3", "5", "9", "8", "13", "11"); + assertFieldValues( + rsp.getResults(), + id, + "7", + "1", + "6", + "15", + "14", + "4", + "2", + "18", + "17", + "16", + "10", + "12", + "3", + "5", + "9", + "8", + "13", + "11"); rsp = query("q", "*:*", "fl", "id", "sort", "payload desc, id_i asc", "rows", "20"); - assertFieldValues(rsp.getResults(), id, "11", "13", "8", "9", "5", "3", "12", "10","16","17","18", "2", "4","14","15", "6", "1", "7"); + assertFieldValues( + rsp.getResults(), + id, + "11", + "13", + "8", + "9", + "5", + "3", + "12", + "10", + "16", + "17", + "18", + "2", + "4", + "14", + "15", + "6", + "1", + "7"); // Regression check on timeAllowed in combination with sorting, SOLR-14758 // Should see either a complete result or a partial result, but never an NPE - rsp = queryAllowPartialResults("q", "text:d", "fl", "id", "sort", "payload desc", "rows", "20", "timeAllowed", "1"); - if (!Objects.equals(Boolean.TRUE, rsp.getHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { + rsp = + queryAllowPartialResults( + "q", "text:d", "fl", "id", "sort", "payload desc", "rows", "20", "timeAllowed", "1"); + if (!Objects.equals( + Boolean.TRUE, + rsp.getHeader().getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { assertFieldValues(rsp.getResults(), id, "11", "13", "12"); } } - - /** Modified version of {@link BaseDistributedSearchTestCase#query(Object...)} that allows partial results. */ - private QueryResponse queryAllowPartialResults(Object... q) throws SolrServerException, IOException { + + /** + * Modified version of {@link BaseDistributedSearchTestCase#query(Object...)} that allows partial + * results. + */ + private QueryResponse queryAllowPartialResults(Object... q) + throws SolrServerException, IOException { ModifiableSolrParams params = createParams(q); setDistributedParams(params); return queryServer(params); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java index 9b583167345..bf1d45a3d8a 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java @@ -23,7 +23,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -39,10 +38,10 @@ import org.junit.Test; /** - * Test for QueryComponent's distributed querying optimization. - * If the "fl" param is just "id" or just "id,score", all document data to return is already fetched by STAGE_EXECUTE_QUERY. - * The second STAGE_GET_FIELDS query is completely unnecessary. - * Eliminating that 2nd HTTP request can make a big difference in overall performance. + * Test for QueryComponent's distributed querying optimization. If the "fl" param is just "id" or + * just "id,score", all document data to return is already fetched by STAGE_EXECUTE_QUERY. The + * second STAGE_GET_FIELDS query is completely unnecessary. Eliminating that 2nd HTTP request can + * make a big difference in overall performance. * * @see QueryComponent */ @@ -63,27 +62,159 @@ public static void setupCluster() throws Exception { CollectionAdminRequest.createCollection(COLLECTION, "conf", 3, 1) .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, sliceCount, 1)); + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, sliceCount, 1)); new UpdateRequest() - .add(sdoc(id, "1", "text", "a", "test_sS", "21", "payload", ByteBuffer.wrap(new byte[]{0x12, 0x62, 0x15}))) - .add(sdoc(id, "2", "text", "b", "test_sS", "22", "payload", ByteBuffer.wrap(new byte[]{0x25, 0x21, 0x16}))) // 5 - .add(sdoc(id, "3", "text", "a", "test_sS", "23", "payload", ByteBuffer.wrap(new byte[]{0x35, 0x32, 0x58}))) // 8 - .add(sdoc(id, "4", "text", "b", "test_sS", "24", "payload", ByteBuffer.wrap(new byte[]{0x25, 0x21, 0x15}))) // 4 - .add(sdoc(id, "5", "text", "a", "test_sS", "25", "payload", ByteBuffer.wrap(new byte[]{0x35, 0x35, 0x10, 0x00}))) // 9 - .add(sdoc(id, "6", "text", "c", "test_sS", "26", "payload", ByteBuffer.wrap(new byte[]{0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03}))) // 3 - .add(sdoc(id, "7", "text", "c", "test_sS", "27", "payload", ByteBuffer.wrap(new byte[]{0x00, 0x3c, 0x73}))) // 1 - .add(sdoc(id, "8", "text", "c", "test_sS", "28", "payload", ByteBuffer.wrap(new byte[]{0x59, 0x2d, 0x4d}))) // 11 - .add(sdoc(id, "9", "text", "a", "test_sS", "29", "payload", ByteBuffer.wrap(new byte[]{0x39, 0x79, 0x7a}))) // 10 - .add(sdoc(id, "10", "text", "b", "test_sS", "30", "payload", ByteBuffer.wrap(new byte[]{0x31, 0x39, 0x7c}))) // 6 - .add(sdoc(id, "11", "text", "d", "test_sS", "31", "payload", ByteBuffer.wrap(new byte[]{(byte) 0xff, (byte) 0xaf, (byte) 0x9c}))) // 13 - .add(sdoc(id, "12", "text", "d", "test_sS", "32", "payload", ByteBuffer.wrap(new byte[]{0x34, (byte) 0xdd, 0x4d}))) // 7 - .add(sdoc(id, "13", "text", "d", "test_sS", "33", "payload", ByteBuffer.wrap(new byte[]{(byte) 0x80, 0x11, 0x33}))) // 12 + .add( + sdoc( + id, + "1", + "text", + "a", + "test_sS", + "21", + "payload", + ByteBuffer.wrap(new byte[] {0x12, 0x62, 0x15}))) + .add( + sdoc( + id, + "2", + "text", + "b", + "test_sS", + "22", + "payload", + ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x16}))) // 5 + .add( + sdoc( + id, + "3", + "text", + "a", + "test_sS", + "23", + "payload", + ByteBuffer.wrap(new byte[] {0x35, 0x32, 0x58}))) // 8 + .add( + sdoc( + id, + "4", + "text", + "b", + "test_sS", + "24", + "payload", + ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x15}))) // 4 + .add( + sdoc( + id, + "5", + "text", + "a", + "test_sS", + "25", + "payload", + ByteBuffer.wrap(new byte[] {0x35, 0x35, 0x10, 0x00}))) // 9 + .add( + sdoc( + id, + "6", + "text", + "c", + "test_sS", + "26", + "payload", + ByteBuffer.wrap(new byte[] {0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03}))) // 3 + .add( + sdoc( + id, + "7", + "text", + "c", + "test_sS", + "27", + "payload", + ByteBuffer.wrap(new byte[] {0x00, 0x3c, 0x73}))) // 1 + .add( + sdoc( + id, + "8", + "text", + "c", + "test_sS", + "28", + "payload", + ByteBuffer.wrap(new byte[] {0x59, 0x2d, 0x4d}))) // 11 + .add( + sdoc( + id, + "9", + "text", + "a", + "test_sS", + "29", + "payload", + ByteBuffer.wrap(new byte[] {0x39, 0x79, 0x7a}))) // 10 + .add( + sdoc( + id, + "10", + "text", + "b", + "test_sS", + "30", + "payload", + ByteBuffer.wrap(new byte[] {0x31, 0x39, 0x7c}))) // 6 + .add( + sdoc( + id, + "11", + "text", + "d", + "test_sS", + "31", + "payload", + ByteBuffer.wrap(new byte[] {(byte) 0xff, (byte) 0xaf, (byte) 0x9c}))) // 13 + .add( + sdoc( + id, + "12", + "text", + "d", + "test_sS", + "32", + "payload", + ByteBuffer.wrap(new byte[] {0x34, (byte) 0xdd, 0x4d}))) // 7 + .add( + sdoc( + id, + "13", + "text", + "d", + "test_sS", + "33", + "payload", + ByteBuffer.wrap(new byte[] {(byte) 0x80, 0x11, 0x33}))) // 12 // SOLR-6545, wild card field list - .add(sdoc(id, "19", "text", "d", "cat_a_sS", "1", "dynamic_s", "2", "payload", ByteBuffer.wrap(new byte[]{(byte) 0x80, 0x11, 0x34}))) + .add( + sdoc( + id, + "19", + "text", + "d", + "cat_a_sS", + "1", + "dynamic_s", + "2", + "payload", + ByteBuffer.wrap(new byte[] {(byte) 0x80, 0x11, 0x34}))) .commit(cluster.getSolrClient(), COLLECTION); - } private static final String id = "id"; @@ -92,82 +223,304 @@ public static void setupCluster() throws Exception { public void testBasics() throws Exception { QueryResponse rsp; - rsp = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("q", "*:*", "fl", "id,test_sS,score", "sort", "payload asc", "rows", "20")); - assertFieldValues(rsp.getResults(), id, "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "19", "11"); - assertFieldValues(rsp.getResults(), "test_sS", "27", "21", "26", "24", "22", "30", "32", "23", "25", "29", "28", "33", null, "31"); - rsp = cluster.getSolrClient().query(COLLECTION, new SolrQuery("q", "*:*", "fl", "id,score", "sort", "payload desc", "rows", "20")); - assertFieldValues(rsp.getResults(), id, "11", "19", "13", "8", "9", "5", "3", "12", "10", "2", "4", "6", "1", "7"); - + rsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery( + "q", "*:*", "fl", "id,test_sS,score", "sort", "payload asc", "rows", "20")); + assertFieldValues( + rsp.getResults(), + id, + "7", + "1", + "6", + "4", + "2", + "10", + "12", + "3", + "5", + "9", + "8", + "13", + "19", + "11"); + assertFieldValues( + rsp.getResults(), + "test_sS", + "27", + "21", + "26", + "24", + "22", + "30", + "32", + "23", + "25", + "29", + "28", + "33", + null, + "31"); + rsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery("q", "*:*", "fl", "id,score", "sort", "payload desc", "rows", "20")); + assertFieldValues( + rsp.getResults(), + id, + "11", + "19", + "13", + "8", + "9", + "5", + "3", + "12", + "10", + "2", + "4", + "6", + "1", + "7"); } @Test public void testFieldList() throws Exception { // works with just fl=id as well - QueryResponse rsp = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("q", "*:*", "fl", "id", "sort", "payload desc", "rows", "20")); - assertFieldValues(rsp.getResults(), id, "11", "19", "13", "8", "9", "5", "3", "12", "10", "2", "4", "6", "1", "7"); - - rsp = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("q", "*:*", "fl", "id,score", "sort", "payload asc", "rows", "20")); - assertFieldValues(rsp.getResults(), id, "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "19", "11"); + QueryResponse rsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery("q", "*:*", "fl", "id", "sort", "payload desc", "rows", "20")); + assertFieldValues( + rsp.getResults(), + id, + "11", + "19", + "13", + "8", + "9", + "5", + "3", + "12", + "10", + "2", + "4", + "6", + "1", + "7"); + + rsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery("q", "*:*", "fl", "id,score", "sort", "payload asc", "rows", "20")); + assertFieldValues( + rsp.getResults(), + id, + "7", + "1", + "6", + "4", + "2", + "10", + "12", + "3", + "5", + "9", + "8", + "13", + "19", + "11"); } @Test public void testDistribSinglePass() throws Exception { - QueryResponse rsp = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("q", "*:*", "fl", "id,test_sS,score", "sort", "payload asc", "rows", "20", "distrib.singlePass", "true")); - assertFieldValues(rsp.getResults(), id, "7", "1", "6", "4", "2", "10", "12", "3", "5", "9", "8", "13", "19", "11"); - assertFieldValues(rsp.getResults(), "test_sS", "27", "21", "26", "24", "22", "30", "32", "23", "25", "29", "28", "33", null, "31"); - - - QueryResponse nonDistribRsp = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("q", "*:*", "fl", "id,test_sS,score", "sort", "payload asc", "rows", "20")); - compareResponses(rsp, nonDistribRsp); // make sure distrib and distrib.singlePass return the same thing - - nonDistribRsp = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20")); - rsp = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20", "distrib.singlePass", "true")); - compareResponses(rsp, nonDistribRsp); // make sure distrib and distrib.singlePass return the same thing - + QueryResponse rsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery( + "q", + "*:*", + "fl", + "id,test_sS,score", + "sort", + "payload asc", + "rows", + "20", + "distrib.singlePass", + "true")); + assertFieldValues( + rsp.getResults(), + id, + "7", + "1", + "6", + "4", + "2", + "10", + "12", + "3", + "5", + "9", + "8", + "13", + "19", + "11"); + assertFieldValues( + rsp.getResults(), + "test_sS", + "27", + "21", + "26", + "24", + "22", + "30", + "32", + "23", + "25", + "29", + "28", + "33", + null, + "31"); + + QueryResponse nonDistribRsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery( + "q", "*:*", "fl", "id,test_sS,score", "sort", "payload asc", "rows", "20")); + compareResponses( + rsp, nonDistribRsp); // make sure distrib and distrib.singlePass return the same thing + + nonDistribRsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20")); + rsp = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery( + "q", + "*:*", + "fl", + "score", + "sort", + "payload asc", + "rows", + "20", + "distrib.singlePass", + "true")); + compareResponses( + rsp, nonDistribRsp); // make sure distrib and distrib.singlePass return the same thing } @Test public void testOptimizations() throws Exception { // verify that the optimization actually works - queryWithAsserts("q", "*:*", "fl", "id", "sort", "payload desc", "rows", "20"); // id only is optimized by default - queryWithAsserts("q", "*:*", "fl", "id,score", "sort", "payload desc", "rows", "20"); // id,score only is optimized by default - queryWithAsserts("q", "*:*", "fl", "score", "sort", "payload asc", "rows", "20", "distrib.singlePass", "true"); - + queryWithAsserts( + "q", + "*:*", + "fl", + "id", + "sort", + "payload desc", + "rows", + "20"); // id only is optimized by default + queryWithAsserts( + "q", + "*:*", + "fl", + "id,score", + "sort", + "payload desc", + "rows", + "20"); // id,score only is optimized by default + queryWithAsserts( + "q", + "*:*", + "fl", + "score", + "sort", + "payload asc", + "rows", + "20", + "distrib.singlePass", + "true"); } @Test public void testWildcardFieldList() throws Exception { - QueryResponse nonDistribRsp = queryWithAsserts("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc"); - QueryResponse rsp = queryWithAsserts("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true"); + QueryResponse nonDistribRsp = + queryWithAsserts("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc"); + QueryResponse rsp = + queryWithAsserts( + "q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true"); assertFieldValues(nonDistribRsp.getResults(), "id", "19"); assertFieldValues(rsp.getResults(), "id", "19"); - nonDistribRsp = queryWithAsserts("q", "id:19", "fl", "id,dynamic_s,cat*", "sort", "payload asc"); - rsp = queryWithAsserts("q", "id:19", "fl", "id,dynamic_s,cat*", "sort", "payload asc", "distrib.singlePass", "true"); + nonDistribRsp = + queryWithAsserts("q", "id:19", "fl", "id,dynamic_s,cat*", "sort", "payload asc"); + rsp = + queryWithAsserts( + "q", + "id:19", + "fl", + "id,dynamic_s,cat*", + "sort", + "payload asc", + "distrib.singlePass", + "true"); assertFieldValues(nonDistribRsp.getResults(), "id", "19"); assertFieldValues(rsp.getResults(), "id", "19"); - queryWithAsserts("q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true"); - queryWithAsserts("q", "id:19", "fl", "id,dynamic_s,cat*", "sort", "payload asc", "distrib.singlePass", "true"); + queryWithAsserts( + "q", "id:19", "fl", "id,*a_sS", "sort", "payload asc", "distrib.singlePass", "true"); + queryWithAsserts( + "q", + "id:19", + "fl", + "id,dynamic_s,cat*", + "sort", + "payload asc", + "distrib.singlePass", + "true"); // fl=* - queryWithAsserts("q", "*:*", "fl", "*", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true"); + queryWithAsserts( + "q", "*:*", "fl", "*", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true"); queryWithAsserts("q", "*:*", "fl", "*", "sort", "payload desc"); // fl=*,score - queryWithAsserts("q", "*:*", "fl", "*,score", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true"); + queryWithAsserts( + "q", + "*:*", + "fl", + "*,score", + "sort", + "payload desc", + ShardParams.DISTRIB_SINGLE_PASS, + "true"); queryWithAsserts("q", "*:*", "fl", "*,score", "sort", "payload desc"); } @@ -179,8 +532,19 @@ public void testScoreAlwaysReturned() throws Exception { @Test public void testMultipleFlParams() throws Exception { - // fix for a bug where not all fields are returned if using multiple fl parameters, see SOLR-6796 - queryWithAsserts("q", "*:*", "fl", "id", "fl", "dynamic_s", "sort", "payload desc", ShardParams.DISTRIB_SINGLE_PASS, "true"); + // fix for a bug where not all fields are returned if using multiple fl parameters, see + // SOLR-6796 + queryWithAsserts( + "q", + "*:*", + "fl", + "id", + "fl", + "dynamic_s", + "sort", + "payload desc", + ShardParams.DISTRIB_SINGLE_PASS, + "true"); } @Test @@ -200,34 +564,41 @@ private static void compareResponses(QueryResponse rsp1, QueryResponse rsp2) { /** * This test now asserts that every distrib.singlePass query: + * *
    - *
  1. Makes exactly 'numSlices' number of shard requests
  2. - *
  3. Makes no GET_FIELDS requests
  4. - *
  5. Must request the unique key field from shards
  6. - *
  7. Must request the score if 'fl' has score or sort by score is requested
  8. - *
  9. Requests all fields that are present in 'fl' param
  10. + *
  11. Makes exactly 'numSlices' number of shard requests + *
  12. Makes no GET_FIELDS requests + *
  13. Must request the unique key field from shards + *
  14. Must request the score if 'fl' has score or sort by score is requested + *
  15. Requests all fields that are present in 'fl' param *
- *

- * It also asserts that every regular two phase distribtued search: + * + *

It also asserts that every regular two phase distribtued search: + * *

    - *
  1. Makes at most 2 * 'numSlices' number of shard requests
  2. - *
  3. Must request the unique key field from shards
  4. - *
  5. Must request the score if 'fl' has score or sort by score is requested
  6. - *
  7. Requests no fields other than id and score in GET_TOP_IDS request
  8. - *
  9. Requests exactly the fields that are present in 'fl' param in GET_FIELDS request and no others
  10. + *
  11. Makes at most 2 * 'numSlices' number of shard requests + *
  12. Must request the unique key field from shards + *
  13. Must request the score if 'fl' has score or sort by score is requested + *
  14. Requests no fields other than id and score in GET_TOP_IDS request + *
  15. Requests exactly the fields that are present in 'fl' param in GET_FIELDS request and no + * others *
- *

- * and also asserts that each query which requests id or score or both behaves exactly like a single pass query + * + *

and also asserts that each query which requests id or score or both behaves exactly like a + * single pass query */ private QueryResponse queryWithAsserts(String... q) throws Exception { - TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue = new TrackingShardHandlerFactory.RequestTrackingQueue(); + TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue = + new TrackingShardHandlerFactory.RequestTrackingQueue(); // the jettys doesn't include the control jetty which is exactly what we need here TrackingShardHandlerFactory.setTrackingQueue(cluster, trackingQueue); // let's add debug=track to such requests so we can use DebugComponent responses for assertions - QueryResponse response = cluster.getSolrClient().query(COLLECTION, new SolrQuery("debug", "track", q)); + QueryResponse response = + cluster.getSolrClient().query(COLLECTION, new SolrQuery("debug", "track", q)); - Map> requests = trackingQueue.getAllRequests(); + Map> requests = + trackingQueue.getAllRequests(); int numRequests = getNumRequests(requests); boolean distribSinglePass = false; @@ -235,8 +606,10 @@ private QueryResponse queryWithAsserts(String... q) throws Exception { Set fls = new HashSet<>(); Set sortFields = new HashSet<>(); for (int i = 0; i < q.length; i += 2) { - if (ShardParams.DISTRIB_SINGLE_PASS.equals(q[i].toString()) && Boolean.parseBoolean(q[i + 1].toString())) { - assertTrue("distrib.singlePass=true made more requests than number of shards", + if (ShardParams.DISTRIB_SINGLE_PASS.equals(q[i].toString()) + && Boolean.parseBoolean(q[i + 1].toString())) { + assertTrue( + "distrib.singlePass=true made more requests than number of shards", numRequests == sliceCount); distribSinglePass = true; } @@ -256,7 +629,8 @@ private QueryResponse queryWithAsserts(String... q) throws Exception { if (fls.contains("score") || sortFields.contains("score")) idScoreFields.add("score"); if (idScoreFields.containsAll(fls) && !fls.isEmpty()) { - // if id and/or score are the only fields being requested then we implicitly turn on distribSinglePass=true + // if id and/or score are the only fields being requested then we implicitly turn on + // distribSinglePass=true distribSinglePass = true; } @@ -266,53 +640,100 @@ private QueryResponse queryWithAsserts(String... q) throws Exception { SimpleOrderedMap track = (SimpleOrderedMap) debugMap.get("track"); assertNotNull(track); assertNotNull(track.get("EXECUTE_QUERY")); - assertNull("A single pass request should not have a GET_FIELDS phase", track.get("GET_FIELDS")); + assertNull( + "A single pass request should not have a GET_FIELDS phase", track.get("GET_FIELDS")); // all fields should be requested in one go but even if 'id' is not requested by user // it must still be fetched in this phase to merge correctly Set reqAndIdScoreFields = new HashSet<>(fls); reqAndIdScoreFields.addAll(idScoreFields); - assertParamsEquals(trackingQueue, COLLECTION, SHARD1, - CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, reqAndIdScoreFields.toArray(new String[reqAndIdScoreFields.size()])); - assertParamsEquals(trackingQueue, COLLECTION, SHARD2, - CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, reqAndIdScoreFields.toArray(new String[reqAndIdScoreFields.size()])); + assertParamsEquals( + trackingQueue, + COLLECTION, + SHARD1, + CommonParams.FL, + ShardRequest.PURPOSE_GET_TOP_IDS, + reqAndIdScoreFields.toArray(new String[reqAndIdScoreFields.size()])); + assertParamsEquals( + trackingQueue, + COLLECTION, + SHARD2, + CommonParams.FL, + ShardRequest.PURPOSE_GET_TOP_IDS, + reqAndIdScoreFields.toArray(new String[reqAndIdScoreFields.size()])); } else { // we are assuming there are facet refinement or distributed idf requests here - assertTrue("distrib.singlePass=false made more requests than 2 * number of shards." + - " Actual: " + numRequests + " but expected <= " + sliceCount * 2, + assertTrue( + "distrib.singlePass=false made more requests than 2 * number of shards." + + " Actual: " + + numRequests + + " but expected <= " + + sliceCount * 2, numRequests <= sliceCount * 2); // only id and/or score should be requested - assertParamsEquals(trackingQueue, COLLECTION, SHARD1, - CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, idScoreFields.toArray(new String[idScoreFields.size()])); - assertParamsEquals(trackingQueue, COLLECTION, SHARD2, - CommonParams.FL, ShardRequest.PURPOSE_GET_TOP_IDS, idScoreFields.toArray(new String[idScoreFields.size()])); + assertParamsEquals( + trackingQueue, + COLLECTION, + SHARD1, + CommonParams.FL, + ShardRequest.PURPOSE_GET_TOP_IDS, + idScoreFields.toArray(new String[idScoreFields.size()])); + assertParamsEquals( + trackingQueue, + COLLECTION, + SHARD2, + CommonParams.FL, + ShardRequest.PURPOSE_GET_TOP_IDS, + idScoreFields.toArray(new String[idScoreFields.size()])); // only originally requested fields must be requested in GET_FIELDS request - assertParamsEquals(trackingQueue, COLLECTION, SHARD1, - CommonParams.FL, ShardRequest.PURPOSE_GET_FIELDS, fls.toArray(new String[fls.size()])); - assertParamsEquals(trackingQueue, COLLECTION, SHARD2, - CommonParams.FL, ShardRequest.PURPOSE_GET_FIELDS, fls.toArray(new String[fls.size()])); + assertParamsEquals( + trackingQueue, + COLLECTION, + SHARD1, + CommonParams.FL, + ShardRequest.PURPOSE_GET_FIELDS, + fls.toArray(new String[fls.size()])); + assertParamsEquals( + trackingQueue, + COLLECTION, + SHARD2, + CommonParams.FL, + ShardRequest.PURPOSE_GET_FIELDS, + fls.toArray(new String[fls.size()])); } return response; } - private int getNumRequests(Map> requests) { + private int getNumRequests( + Map> requests) { int beforeNumRequests = 0; - for (Map.Entry> entry : requests.entrySet()) { + for (Map.Entry> entry : + requests.entrySet()) { beforeNumRequests += entry.getValue().size(); } return beforeNumRequests; } - private void assertParamsEquals(TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue, String collection, String shard, String paramName, int purpose, String... values) { - TrackingShardHandlerFactory.ShardRequestAndParams getByIdRequest - = trackingQueue.getShardRequestByPurpose(cluster.getSolrClient().getZkStateReader(), collection, shard, purpose); + private void assertParamsEquals( + TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue, + String collection, + String shard, + String paramName, + int purpose, + String... values) { + TrackingShardHandlerFactory.ShardRequestAndParams getByIdRequest = + trackingQueue.getShardRequestByPurpose( + cluster.getSolrClient().getZkStateReader(), collection, shard, purpose); assertParamsEquals(getByIdRequest, paramName, values); } - private void assertParamsEquals(TrackingShardHandlerFactory.ShardRequestAndParams requestAndParams, String paramName, String... values) { + private void assertParamsEquals( + TrackingShardHandlerFactory.ShardRequestAndParams requestAndParams, + String paramName, + String... values) { if (requestAndParams == null) return; int expectedCount = values.length; String[] params = requestAndParams.params.getParams(paramName); @@ -325,17 +746,32 @@ private void assertParamsEquals(TrackingShardHandlerFactory.ShardRequestAndParam List list = StrUtils.splitSmart(p, ','); for (String s : list) { // make sure field names aren't duplicated in the parameters - assertTrue("Field name " + s + " was requested multiple times: params = " + requestAndParams.params, + assertTrue( + "Field name " + + s + + " was requested multiple times: params = " + + requestAndParams.params, requestedFields.add(s)); } } } // if a wildcard ALL field is requested then we don't need to match exact number of params - if (!requestedFields.contains("*")) { - assertEquals("Number of requested fields do not match with expectations", expectedCount, requestedFields.size()); + if (!requestedFields.contains("*")) { + assertEquals( + "Number of requested fields do not match with expectations", + expectedCount, + requestedFields.size()); for (String field : values) { if (!requestedFields.contains(field)) { - fail("Field " + field + " not found in param: " + paramName + " request had " + paramName + "=" + requestedFields); + fail( + "Field " + + field + + " not found in param: " + + paramName + + " request had " + + paramName + + "=" + + requestedFields); } } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java index 42fcb958692..83d773a014f 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java @@ -28,16 +28,15 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +/** */ +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class DistributedQueryElevationComponentTest extends BaseDistributedSearchTestCase { @BeforeClass public static void betterNotBeJ9() { - assumeFalse("FIXME: SOLR-5791: This test fails under IBM J9", - Constants.JAVA_VENDOR.startsWith("IBM")); + assumeFalse( + "FIXME: SOLR-5791: This test fails under IBM J9", Constants.JAVA_VENDOR.startsWith("IBM")); } public DistributedQueryElevationComponentTest() { @@ -52,14 +51,13 @@ public DistributedQueryElevationComponentTest() { @ShardsFixed(num = 3) public void test() throws Exception { - del("*:*"); - indexr(id,"1", "int_i", "1", "text", "XXXX XXXX", "field_t", "anything"); - indexr(id,"2", "int_i", "2", "text", "YYYY YYYY", "plow_t", "rake"); - indexr(id,"3", "int_i", "3", "text", "ZZZZ ZZZZ"); - indexr(id,"4", "int_i", "4", "text", "XXXX XXXX"); - indexr(id,"5", "int_i", "5", "text", "ZZZZ ZZZZ ZZZZ"); - indexr(id,"6", "int_i", "6", "text", "ZZZZ"); + indexr(id, "1", "int_i", "1", "text", "XXXX XXXX", "field_t", "anything"); + indexr(id, "2", "int_i", "2", "text", "YYYY YYYY", "plow_t", "rake"); + indexr(id, "3", "int_i", "3", "text", "ZZZZ ZZZZ"); + indexr(id, "4", "int_i", "4", "text", "XXXX XXXX"); + indexr(id, "5", "int_i", "5", "text", "ZZZZ ZZZZ ZZZZ"); + indexr(id, "6", "int_i", "6", "text", "ZZZZ"); index_specific(2, id, "7", "int_i", "7", "text", "solr"); commit(); @@ -75,19 +73,91 @@ public void test() throws Exception { handle.put("shards", SKIP); handle.put("q", SKIP); handle.put("qt", SKIP); - query("q", "*:*", "qt", "/elevate", "shards.qt", "/elevate", "rows", "500", "sort", "id desc", CommonParams.FL, "id, score, [elevated]"); - - query("q", "ZZZZ", "qt", "/elevate", "shards.qt", "/elevate", "rows", "500", CommonParams.FL, "*, [elevated]", "forceElevation", "true", "sort", "int_i desc"); - - query("q", "solr", "qt", "/elevate", "shards.qt", "/elevate", "rows", "500", CommonParams.FL, "*, [elevated]", "forceElevation", "true", "sort", "int_i asc"); - - query("q", "ZZZZ", "qt", "/elevate", "shards.qt", "/elevate", "rows", "500", CommonParams.FL, "*, [elevated]", "forceElevation", "true", "sort", "id desc"); + query( + "q", + "*:*", + "qt", + "/elevate", + "shards.qt", + "/elevate", + "rows", + "500", + "sort", + "id desc", + CommonParams.FL, + "id, score, [elevated]"); + + query( + "q", + "ZZZZ", + "qt", + "/elevate", + "shards.qt", + "/elevate", + "rows", + "500", + CommonParams.FL, + "*, [elevated]", + "forceElevation", + "true", + "sort", + "int_i desc"); + + query( + "q", + "solr", + "qt", + "/elevate", + "shards.qt", + "/elevate", + "rows", + "500", + CommonParams.FL, + "*, [elevated]", + "forceElevation", + "true", + "sort", + "int_i asc"); + + query( + "q", + "ZZZZ", + "qt", + "/elevate", + "shards.qt", + "/elevate", + "rows", + "500", + CommonParams.FL, + "*, [elevated]", + "forceElevation", + "true", + "sort", + "id desc"); // See SOLR-4854 for background on following test code // Uses XML response format by default - QueryResponse response = query("q", "XXXX", "qt", "/elevate", "shards.qt", "/elevate", "rows", "500", CommonParams.FL, "id, [elevated]", "enableElevation", "true", - "forceElevation", "true", "elevateIds", "6", "sort", "id desc"); + QueryResponse response = + query( + "q", + "XXXX", + "qt", + "/elevate", + "shards.qt", + "/elevate", + "rows", + "500", + CommonParams.FL, + "id, [elevated]", + "enableElevation", + "true", + "forceElevation", + "true", + "elevateIds", + "6", + "sort", + "id desc"); assertTrue(response.getResults().getNumFound() > 0); SolrDocument document = response.getResults().get(0); @@ -95,12 +165,19 @@ public void test() throws Exception { assertEquals(true, document.getFieldValue("[elevated]")); // Force javabin format - final String clientUrl = ((HttpSolrClient)clients.get(0)).getBaseURL(); + final String clientUrl = ((HttpSolrClient) clients.get(0)).getBaseURL(); HttpSolrClient client = getHttpSolrClient(clientUrl); client.setParser(new BinaryResponseParser()); - SolrQuery solrQuery = new SolrQuery("XXXX").setParam("qt", "/elevate").setParam("shards.qt", "/elevate").setRows(500).setFields("id,[elevated]") - .setParam("enableElevation", "true").setParam("forceElevation", "true").setParam("elevateIds", "6", "wt", "javabin") - .setSort("id", SolrQuery.ORDER.desc); + SolrQuery solrQuery = + new SolrQuery("XXXX") + .setParam("qt", "/elevate") + .setParam("shards.qt", "/elevate") + .setRows(500) + .setFields("id,[elevated]") + .setParam("enableElevation", "true") + .setParam("forceElevation", "true") + .setParam("elevateIds", "6", "wt", "javabin") + .setSort("id", SolrQuery.ORDER.desc); setDistributedParams(solrQuery); response = client.query(solrQuery); client.close(); @@ -110,12 +187,11 @@ public void test() throws Exception { assertEquals("6", document.getFieldValue("id")); assertEquals(true, document.getFieldValue("[elevated]")); } - + @Override protected void indexr(Object... fields) throws Exception { SolrInputDocument doc = new SolrInputDocument(); addFields(doc, fields); indexDoc(doc); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java index 9e84fd2adfe..0b308351e89 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java @@ -19,9 +19,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; - import junit.framework.Assert; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.SuppressTempFileChecks; import org.apache.solr.BaseDistributedSearchTestCase; @@ -37,19 +35,18 @@ * Test for SpellCheckComponent's distributed querying * * @since solr 1.5 - * * @see org.apache.solr.handler.component.SpellCheckComponent */ @Slow -@SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") +@SuppressTempFileChecks( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") public class DistributedSpellCheckComponentTest extends BaseDistributedSearchTestCase { - - public DistributedSpellCheckComponentTest() - { - //Helpful for debugging - //fixShardCount=true; - //shardCount=2; - //stress=0; + + public DistributedSpellCheckComponentTest() { + // Helpful for debugging + // fixShardCount=true; + // shardCount=2; + // stress=0; } @BeforeClass @@ -72,16 +69,15 @@ private void q(Object... q) throws Exception { SolrClient client = clients.get(which); client.query(params); } - + @Override - public void validateControlData(QueryResponse control) throws Exception - { + public void validateControlData(QueryResponse control) throws Exception { NamedList nl = control.getResponse(); @SuppressWarnings("unchecked") NamedList sc = (NamedList) nl.get("spellcheck"); @SuppressWarnings("unchecked") NamedList sug = (NamedList) sc.get("suggestions"); - if(sug.size()==0) { + if (sug.size() == 0) { Assert.fail("Control data did not return any suggestions."); } } @@ -122,16 +118,16 @@ public void test() throws Exception { // we care only about the spellcheck results handle.put("response", SKIP); handle.put("grouped", SKIP); - - //Randomly select either IndexBasedSpellChecker or DirectSolrSpellChecker + + // Randomly select either IndexBasedSpellChecker or DirectSolrSpellChecker String requestHandlerName = "/spellCheckCompRH_Direct"; String reqHandlerWithWordbreak = "/spellCheckWithWordbreak_Direct"; - if(random().nextBoolean()) { + if (random().nextBoolean()) { requestHandlerName = "/spellCheckCompRH"; reqHandlerWithWordbreak = "/spellCheckWithWordbreak"; - } - - //Shortcut names + } + + // Shortcut names String build = SpellingParams.SPELLCHECK_BUILD; String extended = SpellingParams.SPELLCHECK_EXTENDED_RESULTS; String count = SpellingParams.SPELLCHECK_COUNT; @@ -141,88 +137,260 @@ public void test() throws Exception { String maxCollations = SpellingParams.SPELLCHECK_MAX_COLLATIONS; String altTermCount = SpellingParams.SPELLCHECK_ALTERNATIVE_TERM_COUNT; String maxResults = SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST; - - //Build the dictionary for IndexBasedSpellChecker + + // Build the dictionary for IndexBasedSpellChecker q(buildRequest("*:*", false, "/spellCheckCompRH", false, build, "true")); - - //Test Basic Functionality - query(buildRequest("toyata", true, requestHandlerName, random().nextBoolean(), (String[]) null)); - query(buildRequest("toyata", true, requestHandlerName, random().nextBoolean(), extended, "true")); - query(buildRequest("bluo", true, requestHandlerName, random().nextBoolean(), extended, "true", count, "4")); - - //Test Collate functionality - query(buildRequest("The quick reb fox jumped over the lazy brown dogs", - false, requestHandlerName, random().nextBoolean(), extended, "true", count, "4", collate, "true")); - query(buildRequest("lowerfilt:(+quock +reb)", - false, requestHandlerName, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "10", maxCollations, "10", collateExtended, "true")); - query(buildRequest("lowerfilt:(+quock +reb)", - false, requestHandlerName, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "10", maxCollations, "10", collateExtended, "false")); - query(buildRequest("lowerfilt:(+quock +reb)", - false, requestHandlerName, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "0", maxCollations, "1", collateExtended, "false")); - - //Test context-sensitive collate - query(buildRequest("lowerfilt:(\"quick red fox\")", - false, requestHandlerName, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "10", maxCollations, "1", collateExtended, "false", - altTermCount, "5", maxResults, "10")); - query(buildRequest("lowerfilt:(\"rod fix\")", - false, requestHandlerName, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "10", maxCollations, "1", collateExtended, "false", - altTermCount, "5", maxResults, "10")); - query(buildRequest("lowerfilt:(\"rod fix\")", - false, requestHandlerName, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "10", maxCollations, "1", collateExtended, "false", - altTermCount, "5", maxResults, ".10", "fq", "id:[13 TO 22]")); - - //Test word-break spellchecker - query(buildRequest("lowerfilt:(+quock +redfox +jum +ped)", - false, reqHandlerWithWordbreak, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "0", maxCollations, "1", collateExtended, "true")); - query(buildRequest("lowerfilt:(+rodfix)", - false, reqHandlerWithWordbreak, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "0", maxCollations, "1", collateExtended, "true")); - query(buildRequest("lowerfilt:(+son +ata)", - false, reqHandlerWithWordbreak, random().nextBoolean(), extended, "true", count, "10", - collate, "true", maxCollationTries, "0", maxCollations, "1", collateExtended, "true")); + + // Test Basic Functionality + query( + buildRequest("toyata", true, requestHandlerName, random().nextBoolean(), (String[]) null)); + query( + buildRequest("toyata", true, requestHandlerName, random().nextBoolean(), extended, "true")); + query( + buildRequest( + "bluo", + true, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "4")); + + // Test Collate functionality + query( + buildRequest( + "The quick reb fox jumped over the lazy brown dogs", + false, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "4", + collate, + "true")); + query( + buildRequest( + "lowerfilt:(+quock +reb)", + false, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "10", + maxCollations, + "10", + collateExtended, + "true")); + query( + buildRequest( + "lowerfilt:(+quock +reb)", + false, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "10", + maxCollations, + "10", + collateExtended, + "false")); + query( + buildRequest( + "lowerfilt:(+quock +reb)", + false, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "0", + maxCollations, + "1", + collateExtended, + "false")); + + // Test context-sensitive collate + query( + buildRequest( + "lowerfilt:(\"quick red fox\")", + false, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "10", + maxCollations, + "1", + collateExtended, + "false", + altTermCount, + "5", + maxResults, + "10")); + query( + buildRequest( + "lowerfilt:(\"rod fix\")", + false, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "10", + maxCollations, + "1", + collateExtended, + "false", + altTermCount, + "5", + maxResults, + "10")); + query( + buildRequest( + "lowerfilt:(\"rod fix\")", + false, + requestHandlerName, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "10", + maxCollations, + "1", + collateExtended, + "false", + altTermCount, + "5", + maxResults, + ".10", + "fq", + "id:[13 TO 22]")); + + // Test word-break spellchecker + query( + buildRequest( + "lowerfilt:(+quock +redfox +jum +ped)", + false, + reqHandlerWithWordbreak, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "0", + maxCollations, + "1", + collateExtended, + "true")); + query( + buildRequest( + "lowerfilt:(+rodfix)", + false, + reqHandlerWithWordbreak, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "0", + maxCollations, + "1", + collateExtended, + "true")); + query( + buildRequest( + "lowerfilt:(+son +ata)", + false, + reqHandlerWithWordbreak, + random().nextBoolean(), + extended, + "true", + count, + "10", + collate, + "true", + maxCollationTries, + "0", + maxCollations, + "1", + collateExtended, + "true")); } - private Object[] buildRequest(String q, boolean useSpellcheckQ, String handlerName, boolean useGrouping, String... addlParams) { + + private Object[] buildRequest( + String q, + boolean useSpellcheckQ, + String handlerName, + boolean useGrouping, + String... addlParams) { List params = new ArrayList<>(); - + params.add("q"); params.add(useSpellcheckQ ? "*:*" : q); - - if(useSpellcheckQ) { + + if (useSpellcheckQ) { params.add("spellcheck.q"); params.add(q); } - + params.add("fl"); params.add("id,lowerfilt"); - + params.add("qt"); params.add(handlerName); - + params.add("shards.qt"); params.add(handlerName); - + params.add("spellcheck"); params.add("true"); - - if(useGrouping) { + + if (useGrouping) { params.add("group"); params.add("true"); - + params.add("group.field"); params.add("id"); } - - if(addlParams!=null) { + + if (addlParams != null) { params.addAll(Arrays.asList(addlParams)); } - return params.toArray(new Object[params.size()]); + return params.toArray(new Object[params.size()]); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java index 6cae9de684c..6cdd4296e4e 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.handler.component; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import junit.framework.Assert; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.BaseDistributedSearchTestCase; @@ -26,10 +29,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - /** * Test for SuggestComponent's distributed querying * @@ -37,13 +36,13 @@ */ @Slow public class DistributedSuggestComponentTest extends BaseDistributedSearchTestCase { - + public DistributedSuggestComponentTest() { - //Helpful for debugging - //fixShardCount=true; - //shardCount=2; - //stress=0; - //deadServers=null; + // Helpful for debugging + // fixShardCount=true; + // shardCount=2; + // stress=0; + // deadServers=null; configString = "solrconfig-suggestercomponent.xml"; } @@ -53,13 +52,13 @@ public static void beforeClass() throws Exception { } @Override - public void validateControlData(QueryResponse control) throws Exception - { + public void validateControlData(QueryResponse control) throws Exception { NamedList nl = control.getResponse(); @SuppressWarnings("unchecked") - NamedList>> sc = (NamedList>>) nl.get("suggest"); + NamedList>> sc = + (NamedList>>) nl.get("suggest"); String command = (String) nl.get("command"); - if(sc.size() == 0 && command == null) { + if (sc.size() == 0 && command == null) { Assert.fail("Control data did not return any suggestions or execute any command"); } } @@ -83,56 +82,73 @@ public void test() throws Exception { handle.put("timestamp", SKIPVAL); handle.put("maxScore", SKIPVAL); handle.put("response", SKIP); - + String requestHandlerName = "/suggest"; String docDictName = "suggest_fuzzy_doc_dict"; String docExprDictName = "suggest_fuzzy_doc_expr_dict"; - - //Shortcut names + + // Shortcut names String build = SuggesterParams.SUGGEST_BUILD; String buildAll = SuggesterParams.SUGGEST_BUILD_ALL; String count = SuggesterParams.SUGGEST_COUNT; String dictionaryName = SuggesterParams.SUGGEST_DICT; - - //Build the suggest dictionary + + // Build the suggest dictionary if (random().nextBoolean()) { // build all the suggesters in one go query(buildRequest("", true, requestHandlerName, buildAll, "true")); } else { // build suggesters individually query(buildRequest("", true, requestHandlerName, build, "true", dictionaryName, docDictName)); - query(buildRequest("", true, requestHandlerName, build, "true", dictionaryName, docExprDictName)); + query( + buildRequest( + "", true, requestHandlerName, build, "true", dictionaryName, docExprDictName)); } - - //Test Basic Functionality - query(buildRequest("exampel", false, requestHandlerName, dictionaryName, docDictName, count, "2")); - query(buildRequest("Yet", false, requestHandlerName, dictionaryName, docExprDictName, count, "2")); - query(buildRequest("blah", true, requestHandlerName, dictionaryName, docExprDictName, count, "2")); + + // Test Basic Functionality + query( + buildRequest( + "exampel", false, requestHandlerName, dictionaryName, docDictName, count, "2")); + query( + buildRequest( + "Yet", false, requestHandlerName, dictionaryName, docExprDictName, count, "2")); + query( + buildRequest( + "blah", true, requestHandlerName, dictionaryName, docExprDictName, count, "2")); query(buildRequest("blah", true, requestHandlerName, dictionaryName, docDictName, count, "2")); - - //Test multiSuggester - query(buildRequest("exampel", false, requestHandlerName, dictionaryName, docDictName, dictionaryName, docExprDictName, count, "2")); - + + // Test multiSuggester + query( + buildRequest( + "exampel", + false, + requestHandlerName, + dictionaryName, + docDictName, + dictionaryName, + docExprDictName, + count, + "2")); } - private Object[] buildRequest(String q, boolean useSuggestQ, String handlerName, String... addlParams) { + + private Object[] buildRequest( + String q, boolean useSuggestQ, String handlerName, String... addlParams) { List params = new ArrayList<>(); - if(useSuggestQ) { + if (useSuggestQ) { params.add("suggest.q"); } else { params.add("q"); } params.add(q); - params.add("qt"); params.add(handlerName); - + params.add("shards.qt"); params.add(handlerName); - - if(addlParams!=null) { + + if (addlParams != null) { params.addAll(Arrays.asList(addlParams)); } - return params.toArray(new Object[params.size()]); + return params.toArray(new Object[params.size()]); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java index b83320c08c5..38574cba64f 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Random; import java.util.stream.Stream; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.ResponseParser; import org.apache.solr.client.solrj.SolrClient; @@ -40,7 +39,6 @@ /** * Test for TermsComponent distributed querying * - * * @since solr 1.5 */ public class DistributedTermsComponentTest extends BaseDistributedSearchTestCase { @@ -58,52 +56,144 @@ public void test() throws Exception { query("qt", "/terms", "terms.fl", "foo_i_p"); index(id, random.nextInt(), "b_t", "snake a,b spider shark snail slug seal", "foo_i", "1"); - index(id, random.nextInt(), "b_t", "snake spider shark snail slug", "foo_i", "2", "foo_date_p", "2015-01-03T14:30:00Z"); + index( + id, + random.nextInt(), + "b_t", + "snake spider shark snail slug", + "foo_i", + "2", + "foo_date_p", + "2015-01-03T14:30:00Z"); index(id, random.nextInt(), "b_t", "snake spider shark snail", "foo_i", "3"); - index(id, random.nextInt(), "b_t", "snake spider shark", "foo_i", "2", "foo_date_p", "2014-03-15T12:00:00Z"); - index(id, random.nextInt(), "b_t", "snake spider", "c_t", "snake spider", "foo_date_p", "2014-03-15T12:00:00Z"); - index(id, random.nextInt(), "b_t", "snake", "c_t", "snake", "foo_date_p", "2014-03-15T12:00:00Z"); - index(id, random.nextInt(), "b_t", "ant zebra", "c_t", "ant zebra", "foo_date_p", "2015-01-03T14:30:00Z"); - index(id, random.nextInt(), "b_t", "zebra", "c_t", "zebra", "foo_date_p", "2015-01-03T14:30:00Z"); + index( + id, + random.nextInt(), + "b_t", + "snake spider shark", + "foo_i", + "2", + "foo_date_p", + "2014-03-15T12:00:00Z"); + index( + id, + random.nextInt(), + "b_t", + "snake spider", + "c_t", + "snake spider", + "foo_date_p", + "2014-03-15T12:00:00Z"); + index( + id, random.nextInt(), "b_t", "snake", "c_t", "snake", "foo_date_p", "2014-03-15T12:00:00Z"); + index( + id, + random.nextInt(), + "b_t", + "ant zebra", + "c_t", + "ant zebra", + "foo_date_p", + "2015-01-03T14:30:00Z"); + index( + id, random.nextInt(), "b_t", "zebra", "c_t", "zebra", "foo_date_p", "2015-01-03T14:30:00Z"); commit(); handle.clear(); handle.put("terms", UNORDERED); - query("qt", "/terms", "terms.fl", "b_t"); - query("qt", "/terms", "terms.limit", 5, "terms.fl", "b_t", "terms.lower", "s"); - query("qt", "/terms", "terms.limit", 5, "terms.fl", "b_t", "terms.prefix", "sn", "terms.lower", "sn"); - query("qt", "/terms", "terms.limit", 5, "terms.fl", "b_t", "terms.prefix", "s", "terms.lower", "s", "terms.upper", "sn"); + query("qt", "/terms", "terms.fl", "b_t"); + query("qt", "/terms", "terms.limit", 5, "terms.fl", "b_t", "terms.lower", "s"); + query( + "qt", + "/terms", + "terms.limit", + 5, + "terms.fl", + "b_t", + "terms.prefix", + "sn", + "terms.lower", + "sn"); + query( + "qt", + "/terms", + "terms.limit", + 5, + "terms.fl", + "b_t", + "terms.prefix", + "s", + "terms.lower", + "s", + "terms.upper", + "sn"); // terms.sort - query("qt", "/terms", "terms.limit", 5, "terms.fl", "b_t", "terms.prefix", "s", "terms.lower", "s", "terms.sort", "index"); - query("qt", "/terms", "terms.limit", 5, "terms.fl", "b_t", "terms.prefix", "s", "terms.lower", "s", "terms.upper", "sn", "terms.sort", "index"); - query("qt", "/terms", "terms.fl", "b_t", "terms.sort", "index"); + query( + "qt", + "/terms", + "terms.limit", + 5, + "terms.fl", + "b_t", + "terms.prefix", + "s", + "terms.lower", + "s", + "terms.sort", + "index"); + query( + "qt", + "/terms", + "terms.limit", + 5, + "terms.fl", + "b_t", + "terms.prefix", + "s", + "terms.lower", + "s", + "terms.upper", + "sn", + "terms.sort", + "index"); + query("qt", "/terms", "terms.fl", "b_t", "terms.sort", "index"); // terms.list - query("qt", "/terms", "terms.fl", "b_t", "terms.list", "snake,zebra,ant,bad"); - query("qt", "/terms", "terms.fl", "foo_i", "terms.list", "2,3,1"); - query("qt", "/terms", "terms.fl", "foo_i", "terms.stats", "true","terms.list", "2,3,1"); - query("qt", "/terms", "terms.fl", "b_t", "terms.list", "snake,zebra", "terms.ttf", "true"); - query("qt", "/terms", "terms.fl", "b_t", "terms.fl", "c_t", "terms.list", "snake,ant,zebra", "terms.ttf", "true"); + query("qt", "/terms", "terms.fl", "b_t", "terms.list", "snake,zebra,ant,bad"); + query("qt", "/terms", "terms.fl", "foo_i", "terms.list", "2,3,1"); + query("qt", "/terms", "terms.fl", "foo_i", "terms.stats", "true", "terms.list", "2,3,1"); + query("qt", "/terms", "terms.fl", "b_t", "terms.list", "snake,zebra", "terms.ttf", "true"); + query( + "qt", + "/terms", + "terms.fl", + "b_t", + "terms.fl", + "c_t", + "terms.list", + "snake,ant,zebra", + "terms.ttf", + "true"); // for date point field - query("qt", "/terms", "terms.fl", "foo_date_p"); + query("qt", "/terms", "terms.fl", "foo_date_p"); // terms.ttf=true doesn't work for point fields - //query("qt", "/terms", "terms.fl", "foo_date_p", "terms.ttf", "true"); + // query("qt", "/terms", "terms.fl", "foo_date_p", "terms.ttf", "true"); } - + protected QueryResponse query(Object... q) throws Exception { - if (Stream.of(q).noneMatch(s->s.equals("terms.list"))) { + if (Stream.of(q).noneMatch(s -> s.equals("terms.list"))) { // SOLR-9243 doesn't support max/min count - for (int i = 0; i < q.length; i+=2) { - if (q[i].equals("terms.sort") && q[i+1].equals("index") || rarely()) { + for (int i = 0; i < q.length; i += 2) { + if (q[i].equals("terms.sort") && q[i + 1].equals("index") || rarely()) { List params = new ArrayList(Arrays.asList(q)); if (usually()) { params.add("terms.mincount"); - params.add(random().nextInt(4)-1); + params.add(random().nextInt(4) - 1); } if (usually()) { params.add("terms.maxcount"); - params.add(random().nextInt(4)-1); + params.add(random().nextInt(4) - 1); } q = params.toArray(new Object[params.size()]); break; @@ -135,18 +225,23 @@ protected QueryResponse query(boolean setDistribParams, SolrParams p) throws Exc // flags needs to be called here since only terms response is passed to compare // other way is to pass whole response to compare - assertNull(compare(rsp.findRecursive("terms"), - controlRsp.findRecursive("terms"), flags(handle, "terms"), handle)); + assertNull( + compare( + rsp.findRecursive("terms"), + controlRsp.findRecursive("terms"), + flags(handle, "terms"), + handle)); } return queryResponse; } /** - * Returns a {@link NamedList} containing server - * response deserialization is based on the {@code responseParser} + * Returns a {@link NamedList} containing server response deserialization is based on the {@code + * responseParser} */ - private NamedList queryClient(SolrClient solrClient, final ModifiableSolrParams params, - ResponseParser responseParser) throws SolrServerException, IOException { + private NamedList queryClient( + SolrClient solrClient, final ModifiableSolrParams params, ResponseParser responseParser) + throws SolrServerException, IOException { QueryRequest queryRequest = new QueryRequest(params); queryRequest.setResponseParser(responseParser); return solrClient.request(queryRequest); @@ -154,9 +249,10 @@ private NamedList queryClient(SolrClient solrClient, final ModifiableSol private ResponseParser[] getResponseParsers() { // can't use junit parameters as this would also require RunWith - return new ResponseParser[]{ - new BinaryResponseParser(), new DelegationTokenResponse.JsonMapResponseParser(), - new XMLResponseParser() + return new ResponseParser[] { + new BinaryResponseParser(), + new DelegationTokenResponse.JsonMapResponseParser(), + new XMLResponseParser() }; } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/FacetPivot2CollectionsTest.java b/solr/core/src/test/org/apache/solr/handler/component/FacetPivot2CollectionsTest.java index 69e370bbab1..ab9eeb3889f 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/FacetPivot2CollectionsTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/FacetPivot2CollectionsTest.java @@ -21,7 +21,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Random; - import org.apache.commons.lang3.RandomStringUtils; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; @@ -39,9 +38,9 @@ import org.junit.BeforeClass; /** - * Testing of pivot facets on multiple collections. - * This class mainly aims to test that there is no issue with stop words on multiple collections. - * Facets pivot counts are validated elsewhere. There's no validation of counts here. + * Testing of pivot facets on multiple collections. This class mainly aims to test that there is no + * issue with stop words on multiple collections. Facets pivot counts are validated elsewhere. + * There's no validation of counts here. */ @SuppressSSL public class FacetPivot2CollectionsTest extends SolrCloudTestCase { @@ -62,7 +61,6 @@ public class FacetPivot2CollectionsTest extends SolrCloudTestCase { private static final String DYNAMIC_STR_FIELD_MULTIVALUED = "strFieldMulti_s"; private static final String DYNAMIC_STR_FIELD_NOT_MULTIVALUED = "strFieldSingle_s1"; - @BeforeClass public static void setupCluster() throws Exception { // create and configure cluster @@ -70,11 +68,13 @@ public static void setupCluster() throws Exception { .addConfig(COLL_A, configset("different-stopwords" + File.separator + COLL_A)) .addConfig(COLL_B, configset("different-stopwords" + File.separator + COLL_B)) .configure(); - + try { - CollectionAdminResponse responseA = CollectionAdminRequest.createCollection(COLL_A,COLL_A,1,1).process(cluster.getSolrClient()); + CollectionAdminResponse responseA = + CollectionAdminRequest.createCollection(COLL_A, COLL_A, 1, 1) + .process(cluster.getSolrClient()); NamedList result = responseA.getResponse(); - if(result.get("failure") != null) { + if (result.get("failure") != null) { fail("Collection A creation failed : " + result.get("failure")); } } catch (SolrException e) { @@ -82,21 +82,25 @@ public static void setupCluster() throws Exception { } try { - CollectionAdminResponse responseB = CollectionAdminRequest.createCollection(COLL_B,COLL_B,1,1).process(cluster.getSolrClient()); + CollectionAdminResponse responseB = + CollectionAdminRequest.createCollection(COLL_B, COLL_B, 1, 1) + .process(cluster.getSolrClient()); NamedList result = responseB.getResponse(); - if(result.get("failure") != null) { + if (result.get("failure") != null) { fail("Collection B creation failed : " + result.get("failure")); } - }catch (SolrException e) { - fail("Collection B creation failed : " + e.getMessage()); + } catch (SolrException e) { + fail("Collection B creation failed : " + e.getMessage()); } - - CollectionAdminResponse response = CollectionAdminRequest.createAlias(ALIAS, COLL_A+","+COLL_B).process(cluster.getSolrClient()); + + CollectionAdminResponse response = + CollectionAdminRequest.createAlias(ALIAS, COLL_A + "," + COLL_B) + .process(cluster.getSolrClient()); NamedList result = response.getResponse(); - if(result.get("failure") != null) { + if (result.get("failure") != null) { fail("Alias creation failed : " + result.get("failure")); } - + index(COLL_A, 10); index(COLL_B, 10); } @@ -105,16 +109,25 @@ public static void setupCluster() throws Exception { public static void tearDownCluster() throws Exception { shutdownCluster(); } - + public void testOneCollectionPivotName() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", NAME_TXT_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + NAME_TXT_FIELD_NOT_MULTIVALUED); QueryResponse response = cluster.getSolrClient().query(COLL_A, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { /* * Can happen if the random string that is used as facet pivot value is a stopword, or an empty string. * PivotFacetProcessor.getDocSet @@ -122,7 +135,7 @@ public void testOneCollectionPivotName() throws SolrServerException, IOException * -> returns null * searcher.getDocSet(query, base); * -> throws NPE - * + * * ft.getFieldQuery(null, field, "") // empty str * -> returned query= name: * searcher.getDocSet(query, base); @@ -131,140 +144,226 @@ public void testOneCollectionPivotName() throws SolrServerException, IOException fail("Facet pivot on one collection failed"); } } - + public void testOneCollectionPivotInt() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", DYNAMIC_INT_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + DYNAMIC_INT_FIELD_NOT_MULTIVALUED); QueryResponse response = cluster.getSolrClient().query(COLL_A, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on one collection failed"); } } - + public void testOneCollectionPivotFloat() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED); QueryResponse response = cluster.getSolrClient().query(COLL_A, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on one collection failed"); } } - + public void testOneCollectionPivotDate() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", DYNAMIC_DATE_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + DYNAMIC_DATE_FIELD_NOT_MULTIVALUED); QueryResponse response = cluster.getSolrClient().query(COLL_A, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on one collection failed"); } } - + public void testOneCollectionPivotTitleFileType() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", TITLE_TXT_FIELD_NOT_MULTIVALUED, - "facet.pivot", String.join(",", FILETYPE_TXT_FIELD_NOT_MULTIVALUED,TITLE_TXT_FIELD_NOT_MULTIVALUED)); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + TITLE_TXT_FIELD_NOT_MULTIVALUED, + "facet.pivot", + String.join(",", FILETYPE_TXT_FIELD_NOT_MULTIVALUED, TITLE_TXT_FIELD_NOT_MULTIVALUED)); QueryResponse response = cluster.getSolrClient().query(COLL_A, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on one collection failed"); } } - + public void testAliasPivotName() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", NAME_TXT_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + NAME_TXT_FIELD_NOT_MULTIVALUED); final QueryResponse response = cluster.getSolrClient().query(ALIAS, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on the alias failed"); } } - + public void testAliasPivotType() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", FILETYPE_TXT_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + FILETYPE_TXT_FIELD_NOT_MULTIVALUED); final QueryResponse response = cluster.getSolrClient().query(ALIAS, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on the alias failed"); } } - + public void testAliasPivotFloat() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED); final QueryResponse response = cluster.getSolrClient().query(ALIAS, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on the alias failed"); } } - + public void testAliasPivotDate() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", SUBJECT_TXT_FIELD_MULTIVALUED, - "facet.pivot", DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + SUBJECT_TXT_FIELD_MULTIVALUED, + "facet.pivot", + DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED); final QueryResponse response = cluster.getSolrClient().query(ALIAS, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on the alias failed"); } } - + public void testAliasPivotTitleFileType() throws SolrServerException, IOException { - SolrParams params = params("q", "*:*", "wt", "xml", - "rows", "0", - "facet", "true", - "facet.field", TITLE_TXT_FIELD_NOT_MULTIVALUED, - "facet.pivot", String.join(",", FILETYPE_TXT_FIELD_NOT_MULTIVALUED,TITLE_TXT_FIELD_NOT_MULTIVALUED)); + SolrParams params = + params( + "q", + "*:*", + "wt", + "xml", + "rows", + "0", + "facet", + "true", + "facet.field", + TITLE_TXT_FIELD_NOT_MULTIVALUED, + "facet.pivot", + String.join(",", FILETYPE_TXT_FIELD_NOT_MULTIVALUED, TITLE_TXT_FIELD_NOT_MULTIVALUED)); QueryResponse response = cluster.getSolrClient().query(ALIAS, params); NamedList result = response.getResponse(); - if(result.get("facet_counts") == null) { + if (result.get("facet_counts") == null) { fail("Facet pivot on the alias failed"); } } - - - private static void index(final String collection, final int numDocs) throws SolrServerException, IOException { - for(int i=0; i < numDocs; i++) { - final Map fieldValues= addDocFields(i); + + private static void index(final String collection, final int numDocs) + throws SolrServerException, IOException { + for (int i = 0; i < numDocs; i++) { + final Map fieldValues = addDocFields(i); final SolrInputDocument solrDoc = new SolrInputDocument(fieldValues); - solrDoc.addField(DYNAMIC_DATE_FIELD_NOT_MULTIVALUED, skewed(randomSkewedDate(), randomDate())); - solrDoc.addField(DYNAMIC_INT_FIELD_NOT_MULTIVALUED, skewed(TestUtil.nextInt(random(), 0, 100), random().nextInt())); - solrDoc.addField(DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED, skewed(1.0F / random().nextInt(25), random().nextFloat() * random().nextInt())); + solrDoc.addField( + DYNAMIC_DATE_FIELD_NOT_MULTIVALUED, skewed(randomSkewedDate(), randomDate())); + solrDoc.addField( + DYNAMIC_INT_FIELD_NOT_MULTIVALUED, + skewed(TestUtil.nextInt(random(), 0, 100), random().nextInt())); + solrDoc.addField( + DYNAMIC_FLOAT_FIELD_NOT_MULTIVALUED, + skewed(1.0F / random().nextInt(25), random().nextFloat() * random().nextInt())); cluster.getSolrClient().add(collection, solrDoc); } cluster.getSolrClient().commit(COLL_A); cluster.getSolrClient().commit(COLL_B); } - private static Map addDocFields(final int id) { - final Map fieldValues = new HashMap<>(); + private static Map addDocFields(final int id) { + final Map fieldValues = new HashMap<>(); final SolrInputField idField = new SolrInputField(ID_FIELD); idField.setValue(String.valueOf(id)); fieldValues.put(ID_FIELD, idField); @@ -277,7 +376,8 @@ private static Map addDocFields(final int id) { final SolrInputField strField2 = new SolrInputField(DYNAMIC_STR_FIELD_MULTIVALUED); final Random random = random(); textField1.setValue(randomText(random, 10, true) + ".txt"); // make it look like a file name - textField2.setValue(new String[] {randomText(random, 25, false), randomText(random, 25, false)}); + textField2.setValue( + new String[] {randomText(random, 25, false), randomText(random, 25, false)}); textField3.setValue(randomText(random, 10, true)); textField4.setValue(randomText(random, 1, false)); textField5.setValue(new String[] {randomText(random, 2, false), randomText(random, 5, false)}); @@ -285,7 +385,7 @@ private static Map addDocFields(final int id) { strField2.addValue(new String[] {randomText(random, 1, false), randomText(random, 1, false)}); fieldValues.put(NAME_TXT_FIELD_NOT_MULTIVALUED, textField1); fieldValues.put(TXT_FIELD_MULTIVALUED, textField2); - if(random.nextInt(10) % 3 == 0) { // every now and then, a doc without a 'title' field. + if (random.nextInt(10) % 3 == 0) { // every now and then, a doc without a 'title' field. fieldValues.put(TITLE_TXT_FIELD_NOT_MULTIVALUED, textField3); } fieldValues.put(FILETYPE_TXT_FIELD_NOT_MULTIVALUED, textField4); @@ -295,14 +395,15 @@ private static Map addDocFields(final int id) { return fieldValues; } - private static String randomText(final Random random, final int maxWords, final boolean addNonAlphaChars) { + private static String randomText( + final Random random, final int maxWords, final boolean addNonAlphaChars) { final StringBuilder builder = new StringBuilder(); int words = random.nextInt(maxWords); - while(words-- > 0) { + while (words-- > 0) { String word = ""; - if(addNonAlphaChars && (words % 3 == 0)) { + if (addNonAlphaChars && (words % 3 == 0)) { word = RandomStringUtils.random(random.nextInt(3), "\\p{Digit}\\p{Punct}"); - System.out.println("generated non-alpha string:" + word); + System.out.println("generated non-alpha string:" + word); } else { word = RandomStringUtils.randomAlphabetic(1, 10); } diff --git a/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java b/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java index 18131839e61..f5d84b0b0e2 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/FacetPivotSmallTest.java @@ -23,15 +23,14 @@ import org.apache.solr.request.SolrQueryRequest; import org.junit.BeforeClass; -/** - * Single node testing of pivot facets - */ +/** Single node testing of pivot facets */ public class FacetPivotSmallTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); initCore("solrconfig.xml", "schema11.xml"); } @@ -42,9 +41,7 @@ public void setUp() throws Exception { assertU(commit()); } - /** - * we don't support comma's in the "stats" local param ... yet: SOLR-6663 - */ + /** we don't support comma's in the "stats" local param ... yet: SOLR-6663 */ public void testStatsTagHasComma() throws Exception { if (random().nextBoolean()) { @@ -52,27 +49,36 @@ public void testStatsTagHasComma() throws Exception { index(); } - assertQEx("Can't use multiple tags in stats local param until SOLR-6663 is decided", - req("q","*:*", "facet", "true", - "stats", "true", - "stats.field", "{!tag=foo}price_ti", - "stats.field", "{!tag=bar}id", - "facet.pivot", "{!stats=foo,bar}place_t,company_t"), - 400); + assertQEx( + "Can't use multiple tags in stats local param until SOLR-6663 is decided", + req( + "q", + "*:*", + "facet", + "true", + "stats", + "true", + "stats.field", + "{!tag=foo}price_ti", + "stats.field", + "{!tag=bar}id", + "facet.pivot", + "{!stats=foo,bar}place_t,company_t"), + 400); } - /** - * if bogus stats are requested, the pivots should still work - */ + /** if bogus stats are requested, the pivots should still work */ public void testBogusStatsTag() throws Exception { index(); - assertQ(req("q","*:*", "facet", "true", - "facet.pivot", "{!stats=bogus}place_t,company_t") - // check we still get pivots... - , "//arr[@name='place_t,company_t']/lst[str[@name='value'][.='dublin']]" - // .. but sanity check we don't have any stats - , "count(//arr[@name='place_t,company_t']/lst[str[@name='value'][.='dublin']]/lst[@name='stats'])=0"); + assertQ( + req("q", "*:*", "facet", "true", "facet.pivot", "{!stats=bogus}place_t,company_t") + // check we still get pivots... + , + "//arr[@name='place_t,company_t']/lst[str[@name='value'][.='dublin']]" + // .. but sanity check we don't have any stats + , + "count(//arr[@name='place_t,company_t']/lst[str[@name='value'][.='dublin']]/lst[@name='stats'])=0"); } public void testPivotFacetUnsorted() throws Exception { @@ -84,70 +90,126 @@ public void testPivotFacetUnsorted() throws Exception { params.add("facet.pivot", "place_t,company_t"); SolrQueryRequest req = req(params); - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; - assertQ(req, facetPivotPrefix + "/str[@name='field'][.='place_t']", + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; + assertQ( + req, + facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // london - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=2]", // cardiff - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // krakow - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=1]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // la - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // cork - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='rte']", - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=1]" - ); + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='rte']", + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=1]"); } public void testPivotFacetStatsUnsortedTagged() throws Exception { @@ -161,11 +223,19 @@ public void testPivotFacetStatsUnsortedTagged() throws Exception { params.add("stats.field", "{!key=avg_price tag=s1}price_ti"); SolrQueryRequest req = req(params); - final String statsPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; - String dublinMicrosoftStats = statsPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[str[@name='value'][.='microsoft']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; - String cardiffPolecatStats = statsPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[str[@name='value'][.='polecat']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; - String krakowFujitsuStats = statsPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[str[@name='value'][.='fujitsu']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; - assertQ(req, + final String statsPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; + String dublinMicrosoftStats = + statsPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[str[@name='value'][.='microsoft']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; + String cardiffPolecatStats = + statsPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[str[@name='value'][.='polecat']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; + String krakowFujitsuStats = + statsPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[str[@name='value'][.='fujitsu']]/lst[@name='stats']/lst[@name='stats_fields']/lst[@name='avg_price']"; + assertQ( + req, dublinMicrosoftStats + "/double[@name='min'][.=15.0]", dublinMicrosoftStats + "/double[@name='max'][.=29.0]", dublinMicrosoftStats + "/long[@name='count'][.=3]", @@ -176,7 +246,6 @@ public void testPivotFacetStatsUnsortedTagged() throws Exception { dublinMicrosoftStats + "/double[@name='stddev'][.=7.211102550927978]", // if new stats are supported, this will break - update test to assert values for each "count(" + dublinMicrosoftStats + "/*)=8", - cardiffPolecatStats + "/double[@name='min'][.=15.0]", cardiffPolecatStats + "/double[@name='max'][.=39.0]", cardiffPolecatStats + "/long[@name='count'][.=2]", @@ -187,7 +256,6 @@ public void testPivotFacetStatsUnsortedTagged() throws Exception { cardiffPolecatStats + "/double[@name='stddev'][.=16.97056274847714]", // if new stats are supported, this will break - update test to assert values for each "count(" + cardiffPolecatStats + "/*)=8", - krakowFujitsuStats + "/null[@name='min']", krakowFujitsuStats + "/null[@name='max']", krakowFujitsuStats + "/long[@name='count'][.=0]", @@ -197,12 +265,9 @@ public void testPivotFacetStatsUnsortedTagged() throws Exception { krakowFujitsuStats + "/double[@name='mean'][.='NaN']", krakowFujitsuStats + "/double[@name='stddev'][.=0.0]", // if new stats are supported, this will break - update test to assert values for each - "count(" + krakowFujitsuStats + "/*)=8" - - ); + "count(" + krakowFujitsuStats + "/*)=8"); } - public void testPivotFacetSortedCount() throws Exception { index(); @@ -212,79 +277,133 @@ public void testPivotFacetSortedCount() throws Exception { params.add("facet.pivot", "place_t,company_t"); // Test sorting by count - //TODO clarify why facet count active by default - // The default is count if facet.limit is greater than 0, index otherwise, but facet.limit was not defined + // TODO clarify why facet count active by default + // The default is count if facet.limit is greater than 0, index otherwise, but facet.limit was + // not defined params.set(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT); - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(params); - assertQ(req, facetPivotPrefix + "/str[@name='field'][.='place_t']", + assertQ( + req, + facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // london - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=2]", // cardiff - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='cardiff']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // krakow - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=1]", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='krakow']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // la - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", - facetPivotPrefix + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/str[@name='value'][.='bbc']", + facetPivotPrefix + + "[str[@name='value'][.='la']]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // cork - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='rte']", - facetPivotPrefix + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=1]" - ); - - + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='rte']", + facetPivotPrefix + + "[str[@name='value'][.='cork']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=1]"); } - public void testPivotFacetLimit() throws Exception { index(); @@ -296,20 +415,30 @@ public void testPivotFacetLimit() throws Exception { params.set(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT); params.set(FacetParams.FACET_LIMIT, 2); - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(params); - assertQ(req, facetPivotPrefix + "/str[@name='field'][.='place_t']", + assertQ( + req, + facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", // london - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]" - ); + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='london']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=3]"); } public void testPivotIndividualFacetLimit() throws Exception { @@ -324,19 +453,29 @@ public void testPivotIndividualFacetLimit() throws Exception { params.set("f.place_t." + FacetParams.FACET_LIMIT, 1); params.set("f.company_t." + FacetParams.FACET_LIMIT, 4); - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(params); - assertQ(req, facetPivotPrefix + "/str[@name='field'][.='place_t']", + assertQ( + req, + facetPivotPrefix + "/str[@name='field'][.='place_t']", // dublin - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", - facetPivotPrefix + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]" - ); + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/str[@name='value'][.='microsoft']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[1]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/str[@name='value'][.='polecat']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[2]/int[@name='count'][.=4]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/str[@name='value'][.='null']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[3]/int[@name='count'][.=3]", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/str[@name='value'][.='fujitsu']", + facetPivotPrefix + + "[str[@name='value'][.='dublin']]/arr[@name='pivot']/lst[4]/int[@name='count'][.=2]"); } public void testPivotFacetMissing() throws Exception { @@ -344,50 +483,85 @@ public void testPivotFacetMissing() throws Exception { index(); indexMissing(); - SolrParams missingA = params("q", "*:*", - "rows", "0", - "facet", "true", - "facet.pivot", "place_t,company_t", - // default facet.sort - FacetParams.FACET_MISSING, "true"); - - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; + SolrParams missingA = + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "place_t,company_t", + // default facet.sort + FacetParams.FACET_MISSING, + "true"); + + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='place_t,company_t']/lst"; SolrQueryRequest req = req(missingA); - assertQ(req, facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not enough values for pivot - facetPivotPrefix + "[7]/null[@name='value'][.='']", // not the missing place value - facetPivotPrefix + "[7]/int[@name='count'][.=2]", // wrong missing place count - facetPivotPrefix + "[7]/arr[@name='pivot'][count(.) > 0]", // not enough sub-pivots for missing place - facetPivotPrefix + "[7]/arr[@name='pivot']/lst[6]/null[@name='value'][.='']", // not the missing company value - facetPivotPrefix + "[7]/arr[@name='pivot']/lst[6]/int[@name='count'][.=1]", // wrong missing company count - facetPivotPrefix + "[7]/arr[@name='pivot']/lst[6][not(arr[@name='pivot'])]" // company shouldn't have sub-pivots - ); - - SolrParams missingB = SolrParams.wrapDefaults(missingA, - params(FacetParams.FACET_LIMIT, "4", - "facet.sort", "index")); - + assertQ( + req, + facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not enough values for pivot + facetPivotPrefix + "[7]/null[@name='value'][.='']", // not the missing place value + facetPivotPrefix + "[7]/int[@name='count'][.=2]", // wrong missing place count + facetPivotPrefix + + "[7]/arr[@name='pivot'][count(.) > 0]", // not enough sub-pivots for missing place + facetPivotPrefix + + "[7]/arr[@name='pivot']/lst[6]/null[@name='value'][.='']", // not the missing company + // value + facetPivotPrefix + + "[7]/arr[@name='pivot']/lst[6]/int[@name='count'][.=1]", // wrong missing company + // count + facetPivotPrefix + + "[7]/arr[@name='pivot']/lst[6][not(arr[@name='pivot'])]" // company shouldn't have + // sub-pivots + ); + + SolrParams missingB = + SolrParams.wrapDefaults( + missingA, params(FacetParams.FACET_LIMIT, "4", "facet.sort", "index")); req = req(missingB); - assertQ(req, facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not enough values for pivot - facetPivotPrefix + "[5]/null[@name='value'][.='']", // not the missing place value - facetPivotPrefix + "[5]/int[@name='count'][.=2]", // wrong missing place count - facetPivotPrefix + "[5]/arr[@name='pivot'][count(.) > 0]", // not enough sub-pivots for missing place - facetPivotPrefix + "[5]/arr[@name='pivot']/lst[5]/null[@name='value'][.='']", // not the missing company value - facetPivotPrefix + "[5]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // wrong missing company count - facetPivotPrefix + "[5]/arr[@name='pivot']/lst[5][not(arr[@name='pivot'])]" // company shouldn't have sub-pivots - ); - - SolrParams missingC = SolrParams.wrapDefaults(missingA, - params(FacetParams.FACET_LIMIT, "0", "facet.sort", "index")); - - assertQ(req(missingC), facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not enough values for pivot - facetPivotPrefix + "[1]/null[@name='value'][.='']", // not the missing place value - facetPivotPrefix + "[1]/int[@name='count'][.=2]", // wrong missing place count - facetPivotPrefix + "[1]/arr[@name='pivot'][count(.) > 0]", // not enough sub-pivots for missing place - facetPivotPrefix + "[1]/arr[@name='pivot']/lst[1]/null[@name='value'][.='']", // not the missing company value - facetPivotPrefix + "[1]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", // wrong missing company count - facetPivotPrefix + "[1]/arr[@name='pivot']/lst[1][not(arr[@name='pivot'])]" // company shouldn't have sub-pivots - ); + assertQ( + req, + facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not enough values for pivot + facetPivotPrefix + "[5]/null[@name='value'][.='']", // not the missing place value + facetPivotPrefix + "[5]/int[@name='count'][.=2]", // wrong missing place count + facetPivotPrefix + + "[5]/arr[@name='pivot'][count(.) > 0]", // not enough sub-pivots for missing place + facetPivotPrefix + + "[5]/arr[@name='pivot']/lst[5]/null[@name='value'][.='']", // not the missing company + // value + facetPivotPrefix + + "[5]/arr[@name='pivot']/lst[5]/int[@name='count'][.=1]", // wrong missing company + // count + facetPivotPrefix + + "[5]/arr[@name='pivot']/lst[5][not(arr[@name='pivot'])]" // company shouldn't have + // sub-pivots + ); + + SolrParams missingC = + SolrParams.wrapDefaults( + missingA, params(FacetParams.FACET_LIMIT, "0", "facet.sort", "index")); + + assertQ( + req(missingC), + facetPivotPrefix + "/arr[@name='pivot'][count(.) > 0]", // not enough values for pivot + facetPivotPrefix + "[1]/null[@name='value'][.='']", // not the missing place value + facetPivotPrefix + "[1]/int[@name='count'][.=2]", // wrong missing place count + facetPivotPrefix + + "[1]/arr[@name='pivot'][count(.) > 0]", // not enough sub-pivots for missing place + facetPivotPrefix + + "[1]/arr[@name='pivot']/lst[1]/null[@name='value'][.='']", // not the missing company + // value + facetPivotPrefix + + "[1]/arr[@name='pivot']/lst[1]/int[@name='count'][.=1]", // wrong missing company + // count + facetPivotPrefix + + "[1]/arr[@name='pivot']/lst[1][not(arr[@name='pivot'])]" // company shouldn't have + // sub-pivots + ); } public void testPivotFacetIndexSortMincountAndLimit() throws Exception { @@ -395,21 +569,35 @@ public void testPivotFacetIndexSortMincountAndLimit() throws Exception { index(); indexMissing(); - for (SolrParams variableParams : new SolrParams[]{ - // we should get the same results regardless of overrequest - params(), - params()}) { - SolrParams p = SolrParams.wrapDefaults(params("q", "*:*", - "rows", "0", - "facet", "true", - "facet.pivot", "company_t", - "facet.sort", "index", - "facet.pivot.mincount", "4", - "facet.limit", "4"), - variableParams); - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params(), params() + }) { + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "company_t", + "facet.sort", + "index", + "facet.pivot.mincount", + "4", + "facet.limit", + "4"), + variableParams); + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; SolrQueryRequest req = req(p); - assertQ(req, facetPivotPrefix + "[count(./lst) = 4]", // not enough values for pivot + assertQ( + req, + facetPivotPrefix + "[count(./lst) = 4]", // not enough values for pivot facetPivotPrefix + "/lst[1]/str[@name='value'][.='fujitsu']", facetPivotPrefix + "/lst[1]/int[@name='count'][.=4]", facetPivotPrefix + "/lst[2]/str[@name='value'][.='microsoft']", @@ -417,8 +605,7 @@ public void testPivotFacetIndexSortMincountAndLimit() throws Exception { facetPivotPrefix + "/lst[3]/str[@name='value'][.='null']", facetPivotPrefix + "/lst[3]/int[@name='count'][.=6]", facetPivotPrefix + "/lst[4]/str[@name='value'][.='polecat']", - facetPivotPrefix + "/lst[4]/int[@name='count'][.=6]" - ); + facetPivotPrefix + "/lst[4]/int[@name='count'][.=6]"); } } @@ -427,60 +614,84 @@ public void testPivotFacetIndexSortMincountLimitAndOffset() throws Exception { index(); indexMissing(); - for (SolrParams variableParams : new SolrParams[]{ - // we should get the same results regardless of overrequest - params(), - params()}) { - SolrParams p = SolrParams.wrapDefaults(params("q", "*:*", - "rows", "0", - "facet", "true", - "facet.pivot", "company_t", - "facet.sort", "index", - "facet.pivot.mincount", "4", - "facet.offset", "1", - "facet.limit", "4"), - variableParams); - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; + for (SolrParams variableParams : + new SolrParams[] { + // we should get the same results regardless of overrequest + params(), params() + }) { + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.pivot", + "company_t", + "facet.sort", + "index", + "facet.pivot.mincount", + "4", + "facet.offset", + "1", + "facet.limit", + "4"), + variableParams); + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; SolrQueryRequest req = req(p); - assertQ(req, facetPivotPrefix + "[count(./lst) = 3]", // asked for 4, but not enough meet the mincount + assertQ( + req, + facetPivotPrefix + "[count(./lst) = 3]", // asked for 4, but not enough meet the mincount facetPivotPrefix + "/lst[1]/str[@name='value'][.='microsoft']", facetPivotPrefix + "/lst[1]/int[@name='count'][.=5]", facetPivotPrefix + "/lst[2]/str[@name='value'][.='null']", facetPivotPrefix + "/lst[2]/int[@name='count'][.=6]", facetPivotPrefix + "/lst[3]/str[@name='value'][.='polecat']", - facetPivotPrefix + "/lst[3]/int[@name='count'][.=6]" - ); + facetPivotPrefix + "/lst[3]/int[@name='count'][.=6]"); } } - public void testPivotFacetIndexSortMincountLimitAndOffsetPermutations() throws Exception { // sort=index + mincount + limit + offset (more permutations) index(); indexMissing(); - for (SolrParams variableParams : new SolrParams[]{ - // all of these combinations should result in the same first value - params("facet.pivot.mincount", "4", - "facet.offset", "2"), - params("facet.pivot.mincount", "5", - "facet.offset", "1"), - params("facet.pivot.mincount", "6", - "facet.offset", "0")}) { - SolrParams p = SolrParams.wrapDefaults(params("q", "*:*", - "rows", "0", - "facet", "true", - "facet.limit", "1", - "facet.sort", "index", - "facet.overrequest.ratio", "0", - "facet.pivot", "company_t"), - variableParams); - final String facetPivotPrefix = "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; + for (SolrParams variableParams : + new SolrParams[] { + // all of these combinations should result in the same first value + params("facet.pivot.mincount", "4", "facet.offset", "2"), + params("facet.pivot.mincount", "5", "facet.offset", "1"), + params("facet.pivot.mincount", "6", "facet.offset", "0") + }) { + SolrParams p = + SolrParams.wrapDefaults( + params( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.limit", + "1", + "facet.sort", + "index", + "facet.overrequest.ratio", + "0", + "facet.pivot", + "company_t"), + variableParams); + final String facetPivotPrefix = + "//lst[@name='facet_counts']/lst[@name='facet_pivot']/arr[@name='company_t']"; SolrQueryRequest req = req(p); - assertQ(req, facetPivotPrefix + "[count(./lst) = 1]", // asked for 4, but not enough meet the mincount + assertQ( + req, + facetPivotPrefix + "[count(./lst) = 1]", // asked for 4, but not enough meet the mincount facetPivotPrefix + "/lst[1]/str[@name='value'][.='null']", - facetPivotPrefix + "/lst[1]/int[@name='count'][.=6]" - ); + facetPivotPrefix + "/lst[1]/int[@name='count'][.=6]"); } } @@ -494,29 +705,56 @@ private void index() { // NOTE: we use the literal (4 character) string "null" as a company name // to help ensure there isn't any bugs where the literal string is treated as if it // were a true NULL value. - String[] doc = {"id", "19", "place_t", "cardiff dublin", "company_t", "microsoft polecat", "price_ti", "15"}; + String[] doc = { + "id", "19", "place_t", "cardiff dublin", "company_t", "microsoft polecat", "price_ti", "15" + }; assertU(adoc(doc)); - String[] doc1 = {"id", "20", "place_t", "dublin", "company_t", "polecat microsoft null", "price_ti", "19"}; + String[] doc1 = { + "id", "20", "place_t", "dublin", "company_t", "polecat microsoft null", "price_ti", "19" + }; assertU(adoc(doc1)); - String[] doc2 = {"id", "21", "place_t", "london la dublin", "company_t", - "microsoft fujitsu null polecat", "price_ti", "29"}; + String[] doc2 = { + "id", + "21", + "place_t", + "london la dublin", + "company_t", + "microsoft fujitsu null polecat", + "price_ti", + "29" + }; assertU(adoc(doc2)); - String[] doc3 = {"id", "22", "place_t", "krakow london cardiff", "company_t", - "polecat null bbc", "price_ti", "39"}; + String[] doc3 = { + "id", + "22", + "place_t", + "krakow london cardiff", + "company_t", + "polecat null bbc", + "price_ti", + "39" + }; assertU(adoc(doc3)); String[] doc4 = {"id", "23", "place_t", "london", "company_t", "", "price_ti", "29"}; assertU(adoc(doc4)); String[] doc5 = {"id", "24", "place_t", "la", "company_t", ""}; assertU(adoc(doc5)); - String[] doc6 = {"id", "25", "company_t", "microsoft polecat null fujitsu null bbc", "price_ti", "59"}; + String[] doc6 = { + "id", "25", "company_t", "microsoft polecat null fujitsu null bbc", "price_ti", "59" + }; assertU(adoc(doc6)); String[] doc7 = {"id", "26", "place_t", "krakow", "company_t", "null"}; assertU(adoc(doc7)); - String[] doc8 = {"id", "27", "place_t", "krakow cardiff dublin london la", "company_t", - "null microsoft polecat bbc fujitsu"}; + String[] doc8 = { + "id", + "27", + "place_t", + "krakow cardiff dublin london la", + "company_t", + "null microsoft polecat bbc fujitsu" + }; assertU(adoc(doc8)); - String[] doc9 = {"id", "28", "place_t", "cork", "company_t", - "fujitsu rte"}; + String[] doc9 = {"id", "28", "place_t", "cork", "company_t", "fujitsu rte"}; assertU(adoc(doc9)); assertU(commit()); } diff --git a/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java b/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java index 17d65c0bb20..db606921231 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/InfixSuggestersTest.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.ExecutorUtil; @@ -39,60 +38,54 @@ public class InfixSuggestersTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-infixsuggesters.xml","schema.xml"); + initCore("solrconfig-infixsuggesters.xml", "schema.xml"); } @Test public void test2xBuildReload() throws Exception { - for (int i = 0 ; i < 2 ; ++i) { - assertQ(req("qt", rh_analyzing_short, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + for (int i = 0; i < 2; ++i) { + assertQ( + req("qt", rh_analyzing_short, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); h.reload(); } } @Test public void testTwoSuggestersBuildThenReload() throws Exception { - assertQ(req("qt", rh_analyzing_short, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + assertQ( + req("qt", rh_analyzing_short, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); h.reload(); - assertQ(req("qt", rh_blended_short, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + assertQ( + req("qt", rh_blended_short, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); h.reload(); } @Test public void testBuildThen2xReload() throws Exception { - assertQ(req("qt", rh_analyzing_short, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + assertQ( + req("qt", rh_analyzing_short, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); h.reload(); h.reload(); } @Test public void testAnalyzingInfixSuggesterBuildThenReload() throws Exception { - assertQ(req("qt", rh_analyzing_short, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + assertQ( + req("qt", rh_analyzing_short, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); h.reload(); } @Test public void testBlendedInfixSuggesterBuildThenReload() throws Exception { - assertQ(req("qt", rh_blended_short, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + assertQ( + req("qt", rh_blended_short, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); h.reload(); } @@ -101,15 +94,25 @@ public void testReloadDuringBuild() throws Exception { ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("InfixSuggesterTest"); try { // Build the suggester in the background with a long dictionary - Future job = executor.submit(() -> - expectThrows(RuntimeException.class, SolrCoreState.CoreIsClosedException.class, - () -> assertQ(req("qt", rh_analyzing_long, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']"))); + Future job = + executor.submit( + () -> + expectThrows( + RuntimeException.class, + SolrCoreState.CoreIsClosedException.class, + () -> + assertQ( + req( + "qt", + rh_analyzing_long, + SuggesterParams.SUGGEST_BUILD_ALL, + "true"), + "//str[@name='command'][.='buildAll']"))); h.reload(); // Stop the dictionary's input iterator - System.clearProperty(RandomTestDictionaryFactory.RandomTestDictionary - .getEnabledSysProp("longRandomAnalyzingInfixSuggester")); + System.clearProperty( + RandomTestDictionaryFactory.RandomTestDictionary.getEnabledSysProp( + "longRandomAnalyzingInfixSuggester")); assertNotNull("Should have thrown exception", job.get()); } finally { ExecutorUtil.shutdownAndAwaitTermination(executor); @@ -120,36 +123,65 @@ public void testReloadDuringBuild() throws Exception { public void testShutdownDuringBuild() throws Exception { ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("InfixSuggesterTest"); try { - LinkedHashMap, List>> expected = new LinkedHashMap<>(); - expected.put(RuntimeException.class, Arrays.asList - (SolrCoreState.CoreIsClosedException.class, SolrException.class, IllegalStateException.class, NullPointerException.class)); + LinkedHashMap, List>> expected = + new LinkedHashMap<>(); + expected.put( + RuntimeException.class, + Arrays.asList( + SolrCoreState.CoreIsClosedException.class, + SolrException.class, + IllegalStateException.class, + NullPointerException.class)); final Throwable[] outerException = new Throwable[1]; // Build the suggester in the background with a long dictionary - Future job = executor.submit(() -> outerException[0] = expectThrowsAnyOf(expected, - () -> assertQ(req("qt", rh_analyzing_long, SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']"))); + Future job = + executor.submit( + () -> + outerException[0] = + expectThrowsAnyOf( + expected, + () -> + assertQ( + req( + "qt", + rh_analyzing_long, + SuggesterParams.SUGGEST_BUILD_ALL, + "true"), + "//str[@name='command'][.='buildAll']"))); Thread.sleep(100); // TODO: is there a better way to ensure that the build has begun? h.close(); // Stop the dictionary's input iterator - System.clearProperty(RandomTestDictionaryFactory.RandomTestDictionary - .getEnabledSysProp("longRandomAnalyzingInfixSuggester")); + System.clearProperty( + RandomTestDictionaryFactory.RandomTestDictionary.getEnabledSysProp( + "longRandomAnalyzingInfixSuggester")); job.get(); Throwable wrappedException = outerException[0].getCause(); if (wrappedException instanceof SolrException) { String expectedMessage = "SolrCoreState already closed."; - assertTrue("Expected wrapped SolrException message to contain '" + expectedMessage - + "' but message is '" + wrappedException.getMessage() + "'", + assertTrue( + "Expected wrapped SolrException message to contain '" + + expectedMessage + + "' but message is '" + + wrappedException.getMessage() + + "'", wrappedException.getMessage().contains(expectedMessage)); } else if (wrappedException instanceof IllegalStateException - && ! (wrappedException instanceof SolrCoreState.CoreIsClosedException)) { // CoreIsClosedException extends IllegalStateException + && !(wrappedException + instanceof + SolrCoreState + .CoreIsClosedException)) { // CoreIsClosedException extends IllegalStateException String expectedMessage = "Cannot commit on an closed writer. Add documents first"; - assertTrue("Expected wrapped IllegalStateException message to contain '" + expectedMessage - + "' but message is '" + wrappedException.getMessage() + "'", + assertTrue( + "Expected wrapped IllegalStateException message to contain '" + + expectedMessage + + "' but message is '" + + wrappedException.getMessage() + + "'", wrappedException.getMessage().contains(expectedMessage)); } } finally { ExecutorUtil.shutdownAndAwaitTermination(executor); - initCore("solrconfig-infixsuggesters.xml","schema.xml"); // put the core back for other tests + initCore("solrconfig-infixsuggesters.xml", "schema.xml"); // put the core back for other tests } } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/MockResponseBuilder.java b/solr/core/src/test/org/apache/solr/handler/component/MockResponseBuilder.java index 9ed7d504136..6ef06bcb6d3 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/MockResponseBuilder.java +++ b/solr/core/src/test/org/apache/solr/handler/component/MockResponseBuilder.java @@ -16,6 +16,8 @@ */ package org.apache.solr.handler.component; +import java.util.ArrayList; +import java.util.List; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; @@ -27,44 +29,40 @@ import org.apache.solr.search.SortSpec; import org.mockito.Mockito; -import java.util.ArrayList; -import java.util.List; - public class MockResponseBuilder extends ResponseBuilder { - private MockResponseBuilder(SolrQueryRequest request, SolrQueryResponse response, List components) { - super(request, response, components); - } - - public static MockResponseBuilder create() { - - // the mocks - SolrQueryRequest request = Mockito.mock(SolrQueryRequest.class); - SolrQueryResponse response = Mockito.mock(SolrQueryResponse.class); - IndexSchema indexSchema = Mockito.mock(IndexSchema.class); - SolrParams params = Mockito.mock(SolrParams.class); + private MockResponseBuilder( + SolrQueryRequest request, SolrQueryResponse response, List components) { + super(request, response, components); + } - // SchemaField must be concrete due to field access - SchemaField uniqueIdField = new SchemaField("id", new StrField()); + public static MockResponseBuilder create() { - // we need this because QueryComponent adds a property to it. - NamedList responseHeader = new NamedList<>(); + // the mocks + SolrQueryRequest request = Mockito.mock(SolrQueryRequest.class); + SolrQueryResponse response = Mockito.mock(SolrQueryResponse.class); + IndexSchema indexSchema = Mockito.mock(IndexSchema.class); + SolrParams params = Mockito.mock(SolrParams.class); - // the mock implementations - Mockito.when(request.getSchema()).thenReturn(indexSchema); - Mockito.when(indexSchema.getUniqueKeyField()).thenReturn(uniqueIdField); - Mockito.when(params.getBool(ShardParams.SHARDS_INFO)).thenReturn(false); - Mockito.when(request.getParams()).thenReturn(params); - Mockito.when(response.getResponseHeader()).thenReturn(responseHeader); + // SchemaField must be concrete due to field access + SchemaField uniqueIdField = new SchemaField("id", new StrField()); - List components = new ArrayList<>(); - return new MockResponseBuilder(request, response, components); + // we need this because QueryComponent adds a property to it. + NamedList responseHeader = new NamedList<>(); - } + // the mock implementations + Mockito.when(request.getSchema()).thenReturn(indexSchema); + Mockito.when(indexSchema.getUniqueKeyField()).thenReturn(uniqueIdField); + Mockito.when(params.getBool(ShardParams.SHARDS_INFO)).thenReturn(false); + Mockito.when(request.getParams()).thenReturn(params); + Mockito.when(response.getResponseHeader()).thenReturn(responseHeader); - public MockResponseBuilder withSortSpec(SortSpec sortSpec) { - this.setSortSpec(sortSpec); - return this; - } + List components = new ArrayList<>(); + return new MockResponseBuilder(request, response, components); + } + public MockResponseBuilder withSortSpec(SortSpec sortSpec) { + this.setSortSpec(sortSpec); + return this; + } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/MockShardRequest.java b/solr/core/src/test/org/apache/solr/handler/component/MockShardRequest.java index d33c4e27942..3a3b6585714 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/MockShardRequest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/MockShardRequest.java @@ -16,37 +16,37 @@ */ package org.apache.solr.handler.component; +import java.util.ArrayList; import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.util.NamedList; import org.mockito.Mockito; -import java.util.ArrayList; - public class MockShardRequest extends ShardRequest { - public static MockShardRequest create() { - MockShardRequest mockShardRequest = new MockShardRequest(); - mockShardRequest.responses = new ArrayList<>(); - return mockShardRequest; - } - - public MockShardRequest withShardResponse(NamedList responseHeader, SolrDocumentList solrDocuments) { - ShardResponse shardResponse = buildShardResponse(responseHeader, solrDocuments); - responses.add(shardResponse); - return this; - } - - private ShardResponse buildShardResponse(NamedList responseHeader, SolrDocumentList solrDocuments) { - SolrResponse solrResponse = Mockito.mock(SolrResponse.class); - ShardResponse shardResponse = new ShardResponse(); - NamedList response = new NamedList<>(); - response.add("response", solrDocuments); - shardResponse.setSolrResponse(solrResponse); - response.add("responseHeader", responseHeader); - Mockito.when(solrResponse.getResponse()).thenReturn(response); - - return shardResponse; - } - + public static MockShardRequest create() { + MockShardRequest mockShardRequest = new MockShardRequest(); + mockShardRequest.responses = new ArrayList<>(); + return mockShardRequest; + } + + public MockShardRequest withShardResponse( + NamedList responseHeader, SolrDocumentList solrDocuments) { + ShardResponse shardResponse = buildShardResponse(responseHeader, solrDocuments); + responses.add(shardResponse); + return this; + } + + private ShardResponse buildShardResponse( + NamedList responseHeader, SolrDocumentList solrDocuments) { + SolrResponse solrResponse = Mockito.mock(SolrResponse.class); + ShardResponse shardResponse = new ShardResponse(); + NamedList response = new NamedList<>(); + response.add("response", solrDocuments); + shardResponse.setSolrResponse(solrResponse); + response.add("responseHeader", responseHeader); + Mockito.when(solrResponse.getResponse()).thenReturn(response); + + return shardResponse; + } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/MockSortSpecBuilder.java b/solr/core/src/test/org/apache/solr/handler/component/MockSortSpecBuilder.java index 6a8194d788d..5c42abe0af7 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/MockSortSpecBuilder.java +++ b/solr/core/src/test/org/apache/solr/handler/component/MockSortSpecBuilder.java @@ -22,30 +22,29 @@ import org.mockito.Mockito; public class MockSortSpecBuilder { - private final SortSpec sortSpec; - - public MockSortSpecBuilder() { - this.sortSpec = Mockito.mock(SortSpec.class); - Mockito.when(sortSpec.getCount()).thenReturn(10); - } - - public static MockSortSpecBuilder create() { - return new MockSortSpecBuilder(); - } - - public MockSortSpecBuilder withSortFields(SortField[] sortFields) { - Sort sort = new Sort(sortFields); - Mockito.when(sortSpec.getSort()).thenReturn(sort); - return this; - } - - public MockSortSpecBuilder withIncludesNonScoreOrDocSortField(boolean include) { - Mockito.when(sortSpec.includesNonScoreOrDocField()).thenReturn(include); - return this; - } - - public SortSpec build() { - return sortSpec; - } - + private final SortSpec sortSpec; + + public MockSortSpecBuilder() { + this.sortSpec = Mockito.mock(SortSpec.class); + Mockito.when(sortSpec.getCount()).thenReturn(10); + } + + public static MockSortSpecBuilder create() { + return new MockSortSpecBuilder(); + } + + public MockSortSpecBuilder withSortFields(SortField[] sortFields) { + Sort sort = new Sort(sortFields); + Mockito.when(sortSpec.getSort()).thenReturn(sort); + return this; + } + + public MockSortSpecBuilder withIncludesNonScoreOrDocSortField(boolean include) { + Mockito.when(sortSpec.includesNonScoreOrDocField()).thenReturn(include); + return this; + } + + public SortSpec build() { + return sortSpec; + } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/MoreLikeThisComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/MoreLikeThisComponentTest.java index 764d2607836..ffc3cbde8de 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/MoreLikeThisComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/MoreLikeThisComponentTest.java @@ -30,7 +30,6 @@ /** * Test for MoreLikeThisComponent * - * * @see MoreLikeThisComponent */ @Slow @@ -39,65 +38,145 @@ public class MoreLikeThisComponentTest extends SolrTestCaseJ4 { @BeforeClass public static void moreLikeThisBeforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); - assertU(adoc("id","42","name","Tom Cruise","subword","Top Gun","subword","Risky Business","subword","The Color of Money","subword","Minority Report","subword", "Days of Thunder","subword", "Eyes Wide Shut","subword", "Far and Away", "foo_ti","10")); - assertU(adoc("id","43","name","Tom Hanks","subword","The Green Mile","subword","Forest Gump","subword","Philadelphia Story","subword","Big","subword","Cast Away", "foo_ti","10")); - assertU(adoc("id","44","name","Harrison Ford","subword","Star Wars","subword","Indiana Jones","subword","Patriot Games","subword","Regarding Henry")); - assertU(adoc("id","45","name","George Harrison","subword","Yellow Submarine","subword","Help","subword","Magical Mystery Tour","subword","Sgt. Peppers Lonley Hearts Club Band")); - assertU(adoc("id","46","name","Nicole Kidman","subword","Batman","subword","Days of Thunder","subword","Eyes Wide Shut","subword","Far and Away")); + assertU( + adoc( + "id", + "42", + "name", + "Tom Cruise", + "subword", + "Top Gun", + "subword", + "Risky Business", + "subword", + "The Color of Money", + "subword", + "Minority Report", + "subword", + "Days of Thunder", + "subword", + "Eyes Wide Shut", + "subword", + "Far and Away", + "foo_ti", + "10")); + assertU( + adoc( + "id", + "43", + "name", + "Tom Hanks", + "subword", + "The Green Mile", + "subword", + "Forest Gump", + "subword", + "Philadelphia Story", + "subword", + "Big", + "subword", + "Cast Away", + "foo_ti", + "10")); + assertU( + adoc( + "id", + "44", + "name", + "Harrison Ford", + "subword", + "Star Wars", + "subword", + "Indiana Jones", + "subword", + "Patriot Games", + "subword", + "Regarding Henry")); + assertU( + adoc( + "id", + "45", + "name", + "George Harrison", + "subword", + "Yellow Submarine", + "subword", + "Help", + "subword", + "Magical Mystery Tour", + "subword", + "Sgt. Peppers Lonley Hearts Club Band")); + assertU( + adoc( + "id", + "46", + "name", + "Nicole Kidman", + "subword", + "Batman", + "subword", + "Days of Thunder", + "subword", + "Eyes Wide Shut", + "subword", + "Far and Away")); assertU(commit()); } - + private void initCommonMoreLikeThisParams(ModifiableSolrParams params) { params.set(MoreLikeThisParams.MLT, "true"); params.set(MoreLikeThisParams.SIMILARITY_FIELDS, "name,subword"); - params.set(MoreLikeThisParams.MIN_TERM_FREQ,"1"); - params.set(MoreLikeThisParams.MIN_DOC_FREQ,"1"); - params.set("indent","true"); + params.set(MoreLikeThisParams.MIN_TERM_FREQ, "1"); + params.set(MoreLikeThisParams.MIN_DOC_FREQ, "1"); + params.set("indent", "true"); } @Test - public void testMLT_baseParams_shouldReturnSimilarDocuments() - { + public void testMLT_baseParams_shouldReturnSimilarDocuments() { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); initCommonMoreLikeThisParams(params); - + params.set(CommonParams.Q, "id:42"); - SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params); - assertQ("morelikethis - tom cruise",mltreq - ,"//result/doc[1]/str[@name='id'][.='46']" - ,"//result/doc[2]/str[@name='id'][.='43']"); + SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params); + assertQ( + "morelikethis - tom cruise", + mltreq, + "//result/doc[1]/str[@name='id'][.='46']", + "//result/doc[2]/str[@name='id'][.='43']"); params.set(CommonParams.Q, "id:44"); - mltreq.close(); mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ("morelike this - harrison ford",mltreq - ,"//result/doc[1]/str[@name='id'][.='45']"); + mltreq.close(); + mltreq = new LocalSolrQueryRequest(h.getCore(), params); + assertQ("morelike this - harrison ford", mltreq, "//result/doc[1]/str[@name='id'][.='45']"); mltreq.close(); } @Test - public void testMLT_baseParamsInterestingTermsDetails_shouldReturnSimilarDocumentsAndInterestingTermsDetails() - { + public void + testMLT_baseParamsInterestingTermsDetails_shouldReturnSimilarDocumentsAndInterestingTermsDetails() { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); initCommonMoreLikeThisParams(params); params.set(MoreLikeThisParams.INTERESTING_TERMS, "details"); - + params.set(CommonParams.Q, "id:42"); - SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params); - assertQ("morelikethis - tom cruise",mltreq - ,"//result/doc[1]/str[@name='id'][.='46']" - ,"//result/doc[2]/str[@name='id'][.='43']", + SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params); + assertQ( + "morelikethis - tom cruise", + mltreq, + "//result/doc[1]/str[@name='id'][.='46']", + "//result/doc[2]/str[@name='id'][.='43']", "//lst[@name='interestingTerms']/lst[1][count(*)>0]", "//lst[@name='interestingTerms']/lst[1]/float[.=1.0]"); mltreq.close(); } @Test - public void testMLT_baseParamsInterestingTermsList_shouldReturnSimilarDocumentsAndInterestingTermsList() - { + public void + testMLT_baseParamsInterestingTermsList_shouldReturnSimilarDocumentsAndInterestingTermsList() { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); @@ -105,18 +184,19 @@ public void testMLT_baseParamsInterestingTermsList_shouldReturnSimilarDocumentsA params.set(MoreLikeThisParams.INTERESTING_TERMS, "list"); params.set(CommonParams.Q, "id:42"); - SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params); - assertQ("morelikethis - tom cruise",mltreq - ,"//result/doc[1]/str[@name='id'][.='46']" - ,"//result/doc[2]/str[@name='id'][.='43']", + SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params); + assertQ( + "morelikethis - tom cruise", + mltreq, + "//result/doc[1]/str[@name='id'][.='46']", + "//result/doc[2]/str[@name='id'][.='43']", "//lst[@name='interestingTerms']/arr[@name='42'][count(*)>0]", "//lst[@name='interestingTerms']/arr[@name='42']/str[.='name:Cruise']"); mltreq.close(); } @Test - public void testMLT_boostEnabled_shouldReturnSimilarDocumentsConsideringBoost() - { + public void testMLT_boostEnabled_shouldReturnSimilarDocumentsConsideringBoost() { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); @@ -124,44 +204,54 @@ public void testMLT_boostEnabled_shouldReturnSimilarDocumentsConsideringBoost() params.set(MoreLikeThisParams.BOOST, "true"); params.set(CommonParams.Q, "id:42"); - SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params); - assertQ("morelikethis - tom cruise",mltreq - ,"//result/doc[1]/str[@name='id'][.='46']" - ,"//result/doc[2]/str[@name='id'][.='43']"); + SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params); + assertQ( + "morelikethis - tom cruise", + mltreq, + "//result/doc[1]/str[@name='id'][.='46']", + "//result/doc[2]/str[@name='id'][.='43']"); params.set(CommonParams.Q, "id:42"); - params.set(MoreLikeThisParams.QF,"name^5.0 subword^0.1"); - mltreq.close(); mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ("morelikethis with weights",mltreq - ,"//result/doc[1]/str[@name='id'][.='43']" - ,"//result/doc[2]/str[@name='id'][.='46']"); + params.set(MoreLikeThisParams.QF, "name^5.0 subword^0.1"); + mltreq.close(); + mltreq = new LocalSolrQueryRequest(h.getCore(), params); + assertQ( + "morelikethis with weights", + mltreq, + "//result/doc[1]/str[@name='id'][.='43']", + "//result/doc[2]/str[@name='id'][.='46']"); mltreq.close(); } @Test - public void testMLT_boostEnabledInterestingTermsDetails_shouldReturnSimilarDocumentsConsideringBoostAndInterestingTermsDetails() - { + public void + testMLT_boostEnabledInterestingTermsDetails_shouldReturnSimilarDocumentsConsideringBoostAndInterestingTermsDetails() { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); initCommonMoreLikeThisParams(params); params.set(MoreLikeThisParams.BOOST, "true"); params.set(MoreLikeThisParams.INTERESTING_TERMS, "details"); - + params.set(CommonParams.Q, "id:42"); - SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params); - assertQ("morelikethis - tom cruise",mltreq - ,"//result/doc[1]/str[@name='id'][.='46']" - ,"//result/doc[2]/str[@name='id'][.='43']", + SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params); + assertQ( + "morelikethis - tom cruise", + mltreq, + "//result/doc[1]/str[@name='id'][.='46']", + "//result/doc[2]/str[@name='id'][.='43']", "//lst[@name='interestingTerms']/lst[1][count(*)>0]", "//lst[@name='interestingTerms']/lst[1]/float[.>1.0]"); - - params.set(MoreLikeThisParams.QF,"name^5.0 subword^0.1"); - mltreq.close(); mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ("morelikethis with weights",mltreq - ,"//result/doc[1]/str[@name='id'][.='43']" - ,"//result/doc[2]/str[@name='id'][.='46']", + + params.set(MoreLikeThisParams.QF, "name^5.0 subword^0.1"); + mltreq.close(); + mltreq = new LocalSolrQueryRequest(h.getCore(), params); + assertQ( + "morelikethis with weights", + mltreq, + "//result/doc[1]/str[@name='id'][.='43']", + "//result/doc[2]/str[@name='id'][.='46']", "//lst[@name='interestingTerms']/lst[1][count(*)>0]", "//lst[@name='interestingTerms']/lst[1]/float[.>5.0]"); @@ -169,8 +259,8 @@ public void testMLT_boostEnabledInterestingTermsDetails_shouldReturnSimilarDocum } @Test - public void testMLT_boostEnabledInterestingTermsList_shouldReturnSimilarDocumentsConsideringBoostAndInterestingTermsList() - { + public void + testMLT_boostEnabledInterestingTermsList_shouldReturnSimilarDocumentsConsideringBoostAndInterestingTermsList() { SolrCore core = h.getCore(); ModifiableSolrParams params = new ModifiableSolrParams(); @@ -179,18 +269,23 @@ public void testMLT_boostEnabledInterestingTermsList_shouldReturnSimilarDocument params.set(MoreLikeThisParams.INTERESTING_TERMS, "list"); params.set(CommonParams.Q, "id:42"); - SolrQueryRequest mltreq = new LocalSolrQueryRequest( core, params); - assertQ("morelikethis - tom cruise",mltreq - ,"//result/doc[1]/str[@name='id'][.='46']" - ,"//result/doc[2]/str[@name='id'][.='43']", + SolrQueryRequest mltreq = new LocalSolrQueryRequest(core, params); + assertQ( + "morelikethis - tom cruise", + mltreq, + "//result/doc[1]/str[@name='id'][.='46']", + "//result/doc[2]/str[@name='id'][.='43']", "//lst[@name='interestingTerms']/arr[@name='42'][count(*)>0]", "//lst[@name='interestingTerms']/arr[@name='42']/str[.='name:Cruise']"); - params.set(MoreLikeThisParams.QF,"name^5.0 subword^0.1"); - mltreq.close(); mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ("morelikethis with weights",mltreq - ,"//result/doc[1]/str[@name='id'][.='43']" - ,"//result/doc[2]/str[@name='id'][.='46']", + params.set(MoreLikeThisParams.QF, "name^5.0 subword^0.1"); + mltreq.close(); + mltreq = new LocalSolrQueryRequest(h.getCore(), params); + assertQ( + "morelikethis with weights", + mltreq, + "//result/doc[1]/str[@name='id'][.='43']", + "//result/doc[2]/str[@name='id'][.='46']", "//lst[@name='interestingTerms']/arr[@name='42'][count(*)>0]", "//lst[@name='interestingTerms']/arr[@name='42']/str[.='name:Cruise']"); @@ -198,8 +293,7 @@ public void testMLT_boostEnabledInterestingTermsList_shouldReturnSimilarDocument } @Test - public void testMLT_debugEnabled_shouldReturnSimilarDocumentsWithDebug() - { + public void testMLT_debugEnabled_shouldReturnSimilarDocumentsWithDebug() { ModifiableSolrParams params = new ModifiableSolrParams(); initCommonMoreLikeThisParams(params); @@ -208,24 +302,25 @@ public void testMLT_debugEnabled_shouldReturnSimilarDocumentsWithDebug() params.set(CommonParams.Q, "id:44"); params.set(CommonParams.DEBUG_QUERY, "true"); SolrQueryRequest mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ("morelike this - harrison ford",mltreq - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='rawMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='boostedMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='realMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']" - ); - + assertQ( + "morelike this - harrison ford", + mltreq, + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='rawMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='boostedMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='realMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']"); + params.remove(CommonParams.DEBUG_QUERY); params.set(CommonParams.Q, "{!field f=id}44"); - mltreq.close(); mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ(mltreq - ,"//result/doc[1]/str[@name='id'][.='45']"); + mltreq.close(); + mltreq = new LocalSolrQueryRequest(h.getCore(), params); + assertQ(mltreq, "//result/doc[1]/str[@name='id'][.='45']"); mltreq.close(); } @Test - public void testMLT_debugEnabledInterestingTermsDetails_shouldReturnSimilarDocumentsWithDebugAndInterestingTermsDetails() - { + public void + testMLT_debugEnabledInterestingTermsDetails_shouldReturnSimilarDocumentsWithDebugAndInterestingTermsDetails() { ModifiableSolrParams params = new ModifiableSolrParams(); initCommonMoreLikeThisParams(params); @@ -235,27 +330,31 @@ public void testMLT_debugEnabledInterestingTermsDetails_shouldReturnSimilarDocum params.set(CommonParams.Q, "id:44"); params.set(CommonParams.DEBUG_QUERY, "true"); SolrQueryRequest mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ("morelike this - harrison ford",mltreq - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='rawMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='boostedMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='realMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']", + assertQ( + "morelike this - harrison ford", + mltreq, + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='rawMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='boostedMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='realMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']", "//lst[@name='interestingTerms']/lst[1][count(*)>0]", "//lst[@name='interestingTerms']/lst[1]/float[.>1.0]"); params.remove(CommonParams.DEBUG_QUERY); params.set(CommonParams.Q, "{!field f=id}44"); - mltreq.close(); mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ(mltreq - ,"//result/doc[1]/str[@name='id'][.='45']", + mltreq.close(); + mltreq = new LocalSolrQueryRequest(h.getCore(), params); + assertQ( + mltreq, + "//result/doc[1]/str[@name='id'][.='45']", "//lst[@name='interestingTerms']/lst[1][count(*)>0]", "//lst[@name='interestingTerms']/lst[1]/float[.>1.0]"); mltreq.close(); } @Test - public void testMLT_debugEnabledInterestingTermsList_shouldReturnSimilarDocumentsWithDebugAndInterestingTermsList() - { + public void + testMLT_debugEnabledInterestingTermsList_shouldReturnSimilarDocumentsWithDebugAndInterestingTermsList() { ModifiableSolrParams params = new ModifiableSolrParams(); initCommonMoreLikeThisParams(params); @@ -264,21 +363,25 @@ public void testMLT_debugEnabledInterestingTermsList_shouldReturnSimilarDocument params.set(CommonParams.Q, "id:44"); params.set(CommonParams.DEBUG_QUERY, "true"); - + SolrQueryRequest mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ("morelike this - harrison ford",mltreq - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='rawMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='boostedMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='realMLTQuery']" - ,"//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']", + assertQ( + "morelike this - harrison ford", + mltreq, + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='rawMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='boostedMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/str[@name='realMLTQuery']", + "//lst[@name='debug']/lst[@name='moreLikeThis']/lst[@name='44']/lst[@name='explain']/str[@name='45']", "//lst[@name='interestingTerms']/arr[@name='44'][count(*)>0]", "//lst[@name='interestingTerms']/arr[@name='44']/str[.='name:Harrison']"); params.remove(CommonParams.DEBUG_QUERY); params.set(CommonParams.Q, "{!field f=id}44"); - mltreq.close(); mltreq = new LocalSolrQueryRequest(h.getCore(), params); - assertQ(mltreq - ,"//result/doc[1]/str[@name='id'][.='45']", + mltreq.close(); + mltreq = new LocalSolrQueryRequest(h.getCore(), params); + assertQ( + mltreq, + "//result/doc[1]/str[@name='id'][.='45']", "//lst[@name='interestingTerms']/arr[@name='44'][count(*)>0]", "//lst[@name='interestingTerms']/arr[@name='44']/str[.='name:Harrison']"); mltreq.close(); diff --git a/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java index d2d9a47419b..b2c6def2eeb 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/PhrasesIdentificationComponentTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.handler.component; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.lessThan; + import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -25,22 +28,17 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; - import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.handler.component.PhrasesIdentificationComponent.Phrase; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.params.ShardParams; +import org.apache.solr.handler.component.PhrasesIdentificationComponent.Phrase; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; - import org.junit.After; -import org.junit.BeforeClass; import org.junit.Before; - -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.lessThan; +import org.junit.BeforeClass; public class PhrasesIdentificationComponentTest extends SolrTestCaseJ4 { @@ -48,26 +46,35 @@ public class PhrasesIdentificationComponentTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrases-identification.xml","schema-phrases-identification.xml"); + initCore("solrconfig-phrases-identification.xml", "schema-phrases-identification.xml"); } - + @Before public void addSomeDocs() throws Exception { - assertU(adoc("id", "42", - "title","Tale of the Brown Fox: was he lazy?", - "body", "No. The quick brown fox was a very brown fox who liked to get into trouble.")); - assertU(adoc("id", "43", - "title","A fable in two acts", - "body", "The brOwn fOx jumped. The lazy dog did not")); - assertU(adoc("id", "44", - "title","Why the LazY dog was lazy", - "body", "News flash: Lazy Dog was not actually lazy, it just seemd so compared to Fox")); - assertU(adoc("id", "45", - "title","Why Are We Lazy?", - "body", "Because we are. that's why")); + assertU( + adoc( + "id", "42", + "title", "Tale of the Brown Fox: was he lazy?", + "body", "No. The quick brown fox was a very brown fox who liked to get into trouble.")); + assertU( + adoc( + "id", "43", + "title", "A fable in two acts", + "body", "The brOwn fOx jumped. The lazy dog did not")); + assertU( + adoc( + "id", "44", + "title", "Why the LazY dog was lazy", + "body", + "News flash: Lazy Dog was not actually lazy, it just seemd so compared to Fox")); + assertU( + adoc( + "id", "45", + "title", "Why Are We Lazy?", + "body", "Because we are. that's why")); assertU((commit())); } - + @After public void deleteAllDocs() throws Exception { assertU(delQ("*:*")); @@ -77,14 +84,16 @@ public void deleteAllDocs() throws Exception { public void testWhiteBoxPhraseParsingLongInput() throws Exception { final SchemaField field = h.getCore().getLatestSchema().getField("multigrams_body"); assertNotNull(field); - final List phrases = Phrase.extractPhrases - (" did a Quick brown FOX perniciously jump over the lAZy dog", field, 3, 7); + final List phrases = + Phrase.extractPhrases( + " did a Quick brown FOX perniciously jump over the lAZy dog", field, 3, 7); + + assertEquals( + IntStream.rangeClosed((11 - 7 + 1), 11).sum(), // 11 words, max query phrase size is 7 + phrases.size()); - assertEquals(IntStream.rangeClosed((11-7+1), 11).sum(), // 11 words, max query phrase size is 7 - phrases.size()); - // spot check a few explicitly choosen phrases of various lengths... - + { // single term, close to edge so not as many super phrases as other terms might have final Phrase lazy = phrases.get(phrases.size() - 1 - 2); final String debug = lazy.toString(); @@ -93,7 +102,7 @@ public void testWhiteBoxPhraseParsingLongInput() throws Exception { assertEquals(debug, 10, lazy.getPositionStart()); assertEquals(debug, 11, lazy.getPositionEnd()); assertEquals(debug, 1, lazy.getPositionLength()); - + assertEquals(debug, 54, lazy.getOffsetStart()); assertEquals(debug, 58, lazy.getOffsetEnd()); @@ -111,7 +120,7 @@ public void testWhiteBoxPhraseParsingLongInput() throws Exception { assertEquals(debug, 4, brown_fox.getPositionStart()); assertEquals(debug, 6, brown_fox.getPositionEnd()); assertEquals(debug, 2, brown_fox.getPositionLength()); - + assertEquals(debug, 17, brown_fox.getOffsetStart()); assertEquals(debug, 26, brown_fox.getOffsetEnd()); @@ -119,7 +128,6 @@ public void testWhiteBoxPhraseParsingLongInput() throws Exception { assertEquals(debug, 1, brown_fox.getLargestIndexedSubPhrases().size()); assertEquals(debug, brown_fox, brown_fox.getLargestIndexedSubPhrases().get(0)); assertEquals(debug, 2, brown_fox.getIndexedSuperPhrases().size()); // (2 @ len=3) - } { // length 3 (which is the max indexed size) @ start of the string final Phrase daq = phrases.get(2); @@ -129,7 +137,7 @@ public void testWhiteBoxPhraseParsingLongInput() throws Exception { assertEquals(debug, 1, daq.getPositionStart()); assertEquals(debug, 4, daq.getPositionEnd()); assertEquals(debug, 3, daq.getPositionLength()); - + assertEquals(debug, 1, daq.getOffsetStart()); assertEquals(debug, 13, daq.getOffsetEnd()); @@ -146,7 +154,7 @@ public void testWhiteBoxPhraseParsingLongInput() throws Exception { assertEquals(debug, 3, qbfp.getPositionStart()); assertEquals(debug, 7, qbfp.getPositionEnd()); assertEquals(debug, 4, qbfp.getPositionLength()); - + assertEquals(debug, 8, qbfp.getOffsetStart()); assertEquals(debug, 39, qbfp.getOffsetEnd()); @@ -154,42 +162,43 @@ public void testWhiteBoxPhraseParsingLongInput() throws Exception { assertEquals(debug, 2, qbfp.getLargestIndexedSubPhrases().size()); assertEquals(debug, 0, qbfp.getIndexedSuperPhrases().size()); } - + // some blanket assumptions about the results... assertBasicSanityChecks(phrases, 11, 3, 7); } public void testWhiteBoxPhraseParsingShortInput() throws Exception { // for input this short, either of these fields should be (mostly) equivalent - final Map fields = new TreeMap<>(); - fields.put("multigrams_body", 7); + final Map fields = new TreeMap<>(); + fields.put("multigrams_body", 7); fields.put("multigrams_body_short", 3); - for (Map.Entry entry : fields.entrySet()) { + for (Map.Entry entry : fields.entrySet()) { try { final int maxQ = entry.getValue(); final SchemaField field = h.getCore().getLatestSchema().getField(entry.getKey()); assertNotNull(field); - + // empty input shouldn't break anything - assertEquals(0, Phrase.extractPhrases(random().nextBoolean() ? "" : " ", field, 3, maxQ).size()); - + assertEquals( + 0, Phrase.extractPhrases(random().nextBoolean() ? "" : " ", field, 3, maxQ).size()); + // input shorter them our index/query phrase sizes shouldn't break anything either.... final List phrases = Phrase.extractPhrases("brown FOX", field, 3, maxQ); - + assertEquals(3, phrases.size()); - + { // length 2 final Phrase brown_fox = phrases.get(1); final String debug = brown_fox.toString(); - + assertEquals(debug, "brown FOX", brown_fox.getSubSequence()); assertEquals(debug, 1, brown_fox.getPositionStart()); assertEquals(debug, 3, brown_fox.getPositionEnd()); assertEquals(debug, 2, brown_fox.getPositionLength()); - + assertEquals(debug, 0, brown_fox.getOffsetStart()); assertEquals(debug, 9, brown_fox.getOffsetEnd()); - + assertEquals(debug, 2, brown_fox.getIndividualIndexedTerms().size()); assertEquals(debug, 1, brown_fox.getLargestIndexedSubPhrases().size()); assertEquals(debug, brown_fox, brown_fox.getLargestIndexedSubPhrases().get(0)); @@ -198,21 +207,21 @@ public void testWhiteBoxPhraseParsingShortInput() throws Exception { { // length 1 final Phrase fox = phrases.get(2); final String debug = fox.toString(); - + assertEquals(debug, "FOX", fox.getSubSequence()); assertEquals(debug, 2, fox.getPositionStart()); assertEquals(debug, 3, fox.getPositionEnd()); assertEquals(debug, 1, fox.getPositionLength()); - + assertEquals(debug, 6, fox.getOffsetStart()); assertEquals(debug, 9, fox.getOffsetEnd()); - + assertEquals(debug, 1, fox.getIndividualIndexedTerms().size()); assertEquals(debug, 1, fox.getLargestIndexedSubPhrases().size()); assertEquals(debug, fox, fox.getLargestIndexedSubPhrases().get(0)); assertEquals(debug, 1, fox.getIndexedSuperPhrases().size()); } - + assertBasicSanityChecks(phrases, 2, 3, maxQ); } catch (AssertionError e) { throw new AssertionError(entry.getKey() + " => " + e.getMessage(), e); @@ -220,84 +229,110 @@ public void testWhiteBoxPhraseParsingShortInput() throws Exception { } } - /** - * Asserts some basic rules that should be enforced about all Phrases - * & their linkages to oher phrases + /** + * Asserts some basic rules that should be enforced about all Phrases & their linkages to oher + * phrases */ - private void assertBasicSanityChecks(final List phrases, - final int inputPositionLength, - final int maxIndexedPositionLength, - final int maxQueryPositionLength) throws Exception { + private void assertBasicSanityChecks( + final List phrases, + final int inputPositionLength, + final int maxIndexedPositionLength, + final int maxQueryPositionLength) + throws Exception { assert 0 < phrases.size() : "Don't use this method if phrases might be empty"; - - assertEmptyStream("no phrase should be longer then "+maxQueryPositionLength+" positions", - phrases.stream().filter(p -> p.getPositionLength() > maxQueryPositionLength)); - - assertEmptyStream("no phrase should have a start offset < 0", - phrases.stream().filter(p -> p.getOffsetStart() < 0)); - assertEmptyStream("no phrase should have a start position < 1", - phrases.stream().filter(p -> p.getPositionStart() < 1)); - - assertEmptyStream("If a phrase has a start offset of 0, then it must have position 1", - phrases.stream().filter(p -> (p.getOffsetStart() == 0) - && (p.getPositionStart() != 1))); - + + assertEmptyStream( + "no phrase should be longer then " + maxQueryPositionLength + " positions", + phrases.stream().filter(p -> p.getPositionLength() > maxQueryPositionLength)); + + assertEmptyStream( + "no phrase should have a start offset < 0", + phrases.stream().filter(p -> p.getOffsetStart() < 0)); + assertEmptyStream( + "no phrase should have a start position < 1", + phrases.stream().filter(p -> p.getPositionStart() < 1)); + + assertEmptyStream( + "If a phrase has a start offset of 0, then it must have position 1", + phrases.stream().filter(p -> (p.getOffsetStart() == 0) && (p.getPositionStart() != 1))); + final Phrase first = phrases.get(0); - final Phrase last = phrases.get(phrases.size()-1); - - assertEmptyStream("no phrase should have a start offset < first phrase", - phrases.stream().filter(p -> p.getOffsetStart() < first.getOffsetStart())); - assertEmptyStream("no phrase should have an end offset > last phrase", - phrases.stream().filter(p -> last.getOffsetEnd() < p.getOffsetEnd())); - - assertEmptyStream("no phrase should have a start position < first phrase", - phrases.stream().filter(p -> p.getPositionStart() < first.getPositionStart())); - assertEmptyStream("no phrase should have an end position > last phrase", - phrases.stream().filter(p -> last.getPositionEnd() < p.getPositionEnd())); - + final Phrase last = phrases.get(phrases.size() - 1); + + assertEmptyStream( + "no phrase should have a start offset < first phrase", + phrases.stream().filter(p -> p.getOffsetStart() < first.getOffsetStart())); + assertEmptyStream( + "no phrase should have an end offset > last phrase", + phrases.stream().filter(p -> last.getOffsetEnd() < p.getOffsetEnd())); + + assertEmptyStream( + "no phrase should have a start position < first phrase", + phrases.stream().filter(p -> p.getPositionStart() < first.getPositionStart())); + assertEmptyStream( + "no phrase should have an end position > last phrase", + phrases.stream().filter(p -> last.getPositionEnd() < p.getPositionEnd())); // NOTE: stuff below this point may not be true for all analyzers (ie: stopwords) // but should be valid for the analyzers used in this test... // (if we expand test to cover analyzers w/stopwords, refactor this into a new method) - + for (int n = 1; n <= maxQueryPositionLength; n++) { final int len = n; final int expected = Math.max(0, 1 + inputPositionLength - n); - final List sizeN = phrases.stream().filter(p -> p.getPositionLength() == len - ).collect(Collectors.toList()); + final List sizeN = + phrases.stream().filter(p -> p.getPositionLength() == len).collect(Collectors.toList()); assertEquals("Expected # phrases of size " + n + ": " + sizeN, expected, sizeN.size()); } // check the quantities of sub-terms/phrases... - assertEmptyStream("no phrase should have num indexed terms != pos_len", - phrases.stream().filter - (p -> last.getPositionLength() != last.getIndividualIndexedTerms().size())); - assertEmptyStream("no phrase should have num sub-phrases != max(1, 1 + pos_len - "+maxIndexedPositionLength+")", - phrases.stream().filter - (p -> (Math.max(1, 1 + last.getPositionLength() - maxIndexedPositionLength) - != last.getLargestIndexedSubPhrases().size()))); + assertEmptyStream( + "no phrase should have num indexed terms != pos_len", + phrases.stream() + .filter(p -> last.getPositionLength() != last.getIndividualIndexedTerms().size())); + assertEmptyStream( + "no phrase should have num sub-phrases != max(1, 1 + pos_len - " + + maxIndexedPositionLength + + ")", + phrases.stream() + .filter( + p -> + (Math.max(1, 1 + last.getPositionLength() - maxIndexedPositionLength) + != last.getLargestIndexedSubPhrases().size()))); // NOTE: indexed super phrases can be of various lengths, and differing quantities near - // begining/end of input so don't worry about an exact count, just check their properties (below) + // begining/end of input so don't worry about an exact count, just check their properties + // (below) // check the properties of our sub/super phrases for (Phrase phrase : phrases) { final String debug = phrase.toString(); - - assertEmptyStream(debug + " should not have any indexed terms where pos_len != 1", - phrase.getIndividualIndexedTerms().stream().filter - (term -> 1 != term.getPositionLength())); - - assertEmptyStream(debug + " should not have any sub-phrases where pos_len > min(pos_len, " - + maxIndexedPositionLength+")", - phrase.getLargestIndexedSubPhrases().stream().filter - (inner -> (Math.min(phrase.getPositionLength(), maxIndexedPositionLength) - < inner.getPositionLength()))); - - assertEmptyStream(debug + " should not have any super-phrases where super.len <= phrase.len or " - + maxIndexedPositionLength + " < super.len", - phrase.getIndexedSuperPhrases().stream().filter - (outer -> (outer.getPositionLength() <= phrase.getPositionLength() || - maxIndexedPositionLength < outer.getPositionLength()))); + + assertEmptyStream( + debug + " should not have any indexed terms where pos_len != 1", + phrase.getIndividualIndexedTerms().stream() + .filter(term -> 1 != term.getPositionLength())); + + assertEmptyStream( + debug + + " should not have any sub-phrases where pos_len > min(pos_len, " + + maxIndexedPositionLength + + ")", + phrase.getLargestIndexedSubPhrases().stream() + .filter( + inner -> + (Math.min(phrase.getPositionLength(), maxIndexedPositionLength) + < inner.getPositionLength()))); + + assertEmptyStream( + debug + + " should not have any super-phrases where super.len <= phrase.len or " + + maxIndexedPositionLength + + " < super.len", + phrase.getIndexedSuperPhrases().stream() + .filter( + outer -> + (outer.getPositionLength() <= phrase.getPositionLength() + || maxIndexedPositionLength < outer.getPositionLength()))); } } @@ -308,68 +343,84 @@ public void testWhiteboxStats() throws Exception { // a function we'll re-use on phrases generated from the above input // the multiplier let's us simulate multiple shards returning the same values - BiConsumer> assertions = (mult, phrases) -> { - final Phrase brown_fox = phrases.get(1); - assertEquals("BROWN fox", brown_fox.getSubSequence()); - - assertEquals(mult * 1, brown_fox.getTTF("multigrams_title")); - assertEquals(mult * 1, brown_fox.getDocFreq("multigrams_title")); - assertEquals(mult * 1, brown_fox.getConjunctionDocCount("multigrams_title")); - - assertEquals(mult * 3, brown_fox.getTTF("multigrams_body")); - assertEquals(mult * 2, brown_fox.getDocFreq("multigrams_body")); - assertEquals(mult * 2, brown_fox.getConjunctionDocCount("multigrams_body")); - - final Phrase fox_lazy = phrases.get(6); - assertEquals("fox lAzY", fox_lazy.getSubSequence()); - - assertEquals(mult * 0, fox_lazy.getTTF("multigrams_title")); - assertEquals(mult * 0, fox_lazy.getDocFreq("multigrams_title")); - assertEquals(mult * 1, fox_lazy.getConjunctionDocCount("multigrams_title")); - - assertEquals(mult * 0, fox_lazy.getTTF("multigrams_body")); - assertEquals(mult * 0, fox_lazy.getDocFreq("multigrams_body")); - assertEquals(mult * 2, fox_lazy.getConjunctionDocCount("multigrams_body")); - - final Phrase bfld = phrases.get(3); - assertEquals("BROWN fox lAzY dog", bfld.getSubSequence()); - - expectThrows(SolrException.class, () -> { bfld.getTTF("multigrams_title"); }); - expectThrows(SolrException.class, () -> { bfld.getDocFreq("multigrams_title"); }); - assertEquals(mult * 0, bfld.getConjunctionDocCount("multigrams_title")); - - expectThrows(SolrException.class, () -> { bfld.getTTF("multigrams_body"); }); - expectThrows(SolrException.class, () -> { bfld.getDocFreq("multigrams_body"); }); - assertEquals(mult * 1, bfld.getConjunctionDocCount("multigrams_body")); - - final Phrase xyz = phrases.get(phrases.size()-1); - - assertEquals("xxxyyyzzz", xyz.getSubSequence()); - assertEquals(mult * 0, xyz.getTTF("multigrams_title")); - assertEquals(mult * 0, xyz.getDocFreq("multigrams_title")); - assertEquals(mult * 0, xyz.getConjunctionDocCount("multigrams_title")); - - assertEquals(mult * 0, xyz.getTTF("multigrams_body")); - assertEquals(mult * 0, xyz.getDocFreq("multigrams_body")); - assertEquals(mult * 0, xyz.getConjunctionDocCount("multigrams_body")); - return; - }; - + BiConsumer> assertions = + (mult, phrases) -> { + final Phrase brown_fox = phrases.get(1); + assertEquals("BROWN fox", brown_fox.getSubSequence()); + + assertEquals(mult * 1, brown_fox.getTTF("multigrams_title")); + assertEquals(mult * 1, brown_fox.getDocFreq("multigrams_title")); + assertEquals(mult * 1, brown_fox.getConjunctionDocCount("multigrams_title")); + + assertEquals(mult * 3, brown_fox.getTTF("multigrams_body")); + assertEquals(mult * 2, brown_fox.getDocFreq("multigrams_body")); + assertEquals(mult * 2, brown_fox.getConjunctionDocCount("multigrams_body")); + + final Phrase fox_lazy = phrases.get(6); + assertEquals("fox lAzY", fox_lazy.getSubSequence()); + + assertEquals(mult * 0, fox_lazy.getTTF("multigrams_title")); + assertEquals(mult * 0, fox_lazy.getDocFreq("multigrams_title")); + assertEquals(mult * 1, fox_lazy.getConjunctionDocCount("multigrams_title")); + + assertEquals(mult * 0, fox_lazy.getTTF("multigrams_body")); + assertEquals(mult * 0, fox_lazy.getDocFreq("multigrams_body")); + assertEquals(mult * 2, fox_lazy.getConjunctionDocCount("multigrams_body")); + + final Phrase bfld = phrases.get(3); + assertEquals("BROWN fox lAzY dog", bfld.getSubSequence()); + + expectThrows( + SolrException.class, + () -> { + bfld.getTTF("multigrams_title"); + }); + expectThrows( + SolrException.class, + () -> { + bfld.getDocFreq("multigrams_title"); + }); + assertEquals(mult * 0, bfld.getConjunctionDocCount("multigrams_title")); + + expectThrows( + SolrException.class, + () -> { + bfld.getTTF("multigrams_body"); + }); + expectThrows( + SolrException.class, + () -> { + bfld.getDocFreq("multigrams_body"); + }); + assertEquals(mult * 1, bfld.getConjunctionDocCount("multigrams_body")); + + final Phrase xyz = phrases.get(phrases.size() - 1); + + assertEquals("xxxyyyzzz", xyz.getSubSequence()); + assertEquals(mult * 0, xyz.getTTF("multigrams_title")); + assertEquals(mult * 0, xyz.getDocFreq("multigrams_title")); + assertEquals(mult * 0, xyz.getConjunctionDocCount("multigrams_title")); + + assertEquals(mult * 0, xyz.getTTF("multigrams_body")); + assertEquals(mult * 0, xyz.getDocFreq("multigrams_body")); + assertEquals(mult * 0, xyz.getConjunctionDocCount("multigrams_body")); + return; + }; final List phrasesLocal = Phrase.extractPhrases(input, analysisField, 3, 7); - + // freshly parsed phrases, w/o any stats populated, all the stats should be 0 assertions.accept(0, phrasesLocal); // If we populate with our index stats, we should get the basic values in our BiConsumer try (SolrQueryRequest req = req()) { - Phrase.populateStats(phrasesLocal, Arrays.asList("multigrams_body","multigrams_title"), - req.getSearcher()); + Phrase.populateStats( + phrasesLocal, Arrays.asList("multigrams_body", "multigrams_title"), req.getSearcher()); } assertions.accept(1, phrasesLocal); - // likewise, if we create a new freshly parsed set of phrases, and "merge" in the previous index stats - // (ie: merge results from one shard) we should get the same results + // likewise, if we create a new freshly parsed set of phrases, and "merge" in the previous index + // stats (ie: merge results from one shard) we should get the same results final List phrasesMerged = Phrase.extractPhrases(input, analysisField, 3, 7); Phrase.populateStats(phrasesMerged, Phrase.formatShardResponse(phrasesLocal)); assertions.accept(1, phrasesMerged); @@ -378,16 +429,15 @@ public void testWhiteboxStats() throws Exception { // our results should be double what we had before Phrase.populateStats(phrasesMerged, Phrase.formatShardResponse(phrasesLocal)); assertions.accept(2, phrasesMerged); - } - + public void testWhiteboxScores() throws Exception { final SchemaField analysisField = h.getCore().getLatestSchema().getField("multigrams_body"); assertNotNull(analysisField); - final Map fieldWeights = new TreeMap<>(); + final Map fieldWeights = new TreeMap<>(); fieldWeights.put("multigrams_title", 1.0D); fieldWeights.put("multigrams_body", 0.0D); // NOTE: 0 weighting should only affect total score - + final String input = "xxxyyyzzz BROWN fox why are we lAzY"; final List phrases = Phrase.extractPhrases(input, analysisField, 3, 7); try (SolrQueryRequest req = req()) { @@ -398,17 +448,20 @@ public void testWhiteboxScores() throws Exception { // do some basic sanity checks of the field & total scores... for (Phrase xyz : phrases.subList(0, 7)) { - // first 7 all start with xyz which isn't in index (in either field) so all scores should be -1 + // first 7 all start with xyz which isn't in index (in either field) so all scores should be + // -1 assertEquals(xyz.toString(), -1.0D, xyz.getTotalScore(), 0.0D); assertEquals(xyz.toString(), -1.0D, xyz.getFieldScore("multigrams_title"), 0.0D); assertEquals(xyz.toString(), -1.0D, xyz.getFieldScore("multigrams_body"), 0.0D); } - + // any individual terms (past xyz) should score 0.0 because they are all actually in the index // (in both fields) - for (Phrase term : phrases.subList(7, phrases.size()).stream().filter - ((p -> 1 == p.getPositionLength())).collect(Collectors.toList())) { - + for (Phrase term : + phrases.subList(7, phrases.size()).stream() + .filter((p -> 1 == p.getPositionLength())) + .collect(Collectors.toList())) { + assertEquals(term.toString(), 0.0D, term.getFieldScore("multigrams_title"), 0.0D); assertEquals(term.toString(), 0.0D, term.getFieldScore("multigrams_body"), 0.0D); assertEquals(term.toString(), 0.0D, term.getTotalScore(), 0.0D); @@ -417,31 +470,36 @@ public void testWhiteboxScores() throws Exception { // "brown fox" should score positively in both fields, and overall... final Phrase brown_fox = phrases.get(8); assertEquals("BROWN fox", brown_fox.getSubSequence()); - assertThat(brown_fox.toString(), brown_fox.getFieldScore("multigrams_title"), greaterThan(0.0D)); - assertThat(brown_fox.toString(), brown_fox.getFieldScore("multigrams_body"), greaterThan(0.0D) ); + assertThat( + brown_fox.toString(), brown_fox.getFieldScore("multigrams_title"), greaterThan(0.0D)); + assertThat(brown_fox.toString(), brown_fox.getFieldScore("multigrams_body"), greaterThan(0.0D)); assertThat(brown_fox.toString(), brown_fox.getTotalScore(), greaterThan(0.0D)); - + // "we lazy" does appear in a title value, but should score poorly given how often the terms // are used in other contexts, and should score -1 against body -- but because of our weights, // that shouldn't bring down the total - final Phrase we_lazy = phrases.get(phrases.size()-2); + final Phrase we_lazy = phrases.get(phrases.size() - 2); assertEquals("we lAzY", we_lazy.getSubSequence()); assertEquals(we_lazy.toString(), -1.0D, we_lazy.getFieldScore("multigrams_body"), 0.0D); assertThat(we_lazy.toString(), we_lazy.getFieldScore("multigrams_title"), lessThan(0.0D)); assertThat(we_lazy.toString(), we_lazy.getTotalScore(), lessThan(0.0D)); - assertEquals(we_lazy.toString(), we_lazy.getFieldScore("multigrams_title"), we_lazy.getTotalScore(), - 0.0D); - - // "why are we lazy" is longer then the max indexed phrase size & appears verbatim in a title value - // it should score -1 against body -- but because of our weights, that shouldn't bring down the total - final Phrase wawl = phrases.get(phrases.size()-7); + assertEquals( + we_lazy.toString(), + we_lazy.getFieldScore("multigrams_title"), + we_lazy.getTotalScore(), + 0.0D); + + // "why are we lazy" is longer then the max indexed phrase size & appears verbatim in a title + // value it should score -1 against body -- but because of our weights, that shouldn't bring + // down the total + final Phrase wawl = phrases.get(phrases.size() - 7); assertEquals("why are we lAzY", wawl.getSubSequence()); assertEquals(wawl.toString(), -1.0D, wawl.getFieldScore("multigrams_body"), 0.0D); assertThat(wawl.toString(), wawl.getFieldScore("multigrams_title"), greaterThan(0.0D)); assertThat(wawl.toString(), wawl.getTotalScore(), greaterThan(0.0D)); - assertEquals(wawl.toString(), wawl.getFieldScore("multigrams_title"), wawl.getTotalScore(), - 0.0D); + assertEquals( + wawl.toString(), wawl.getFieldScore("multigrams_title"), wawl.getTotalScore(), 0.0D); // "brown fox why are we" is longer then the max indexed phrase, and none of it's // (longest) sub phrases exists in either field -- so all of it's scores should be -1 @@ -450,20 +508,19 @@ public void testWhiteboxScores() throws Exception { assertEquals(bfwaw.toString(), -1.0D, bfwaw.getFieldScore("multigrams_title"), 0.0D); assertEquals(bfwaw.toString(), -1.0D, bfwaw.getFieldScore("multigrams_body"), 0.0D); assertEquals(bfwaw.toString(), -1.0D, bfwaw.getTotalScore(), 0.0D); - } - + public void testWhiteboxScorcesStopwords() throws Exception { final String input = "why the lazy dog brown fox"; - final Map fieldWeights = new TreeMap<>(); - fieldWeights.put("multigrams_title", 1.0D); + final Map fieldWeights = new TreeMap<>(); + fieldWeights.put("multigrams_title", 1.0D); fieldWeights.put("multigrams_title_stop", 1.0D); - + { // If our analysisField uses all terms, // be we also generate scores from a field that filters stopwords... final SchemaField analysisField = h.getCore().getLatestSchema().getField("multigrams_title"); assertNotNull(analysisField); - + final List phrases = Phrase.extractPhrases(input, analysisField, 3, 7); try (SolrQueryRequest req = req()) { Phrase.populateStats(phrases, fieldWeights.keySet(), req.getSearcher()); @@ -474,31 +531,50 @@ public void testWhiteboxScorcesStopwords() throws Exception { // about stop words, but the stopword field should reject them final Phrase why_the_lazy = phrases.get(2); assertEquals("why the lazy", why_the_lazy.getSubSequence()); - assertThat(why_the_lazy.toString(), why_the_lazy.getFieldScore("multigrams_title"), greaterThan(0.0D) ); - assertEquals(why_the_lazy.toString(), -1.0D, why_the_lazy.getFieldScore("multigrams_title_stop"), 0.0D); - + assertThat( + why_the_lazy.toString(), + why_the_lazy.getFieldScore("multigrams_title"), + greaterThan(0.0D)); + assertEquals( + why_the_lazy.toString(), + -1.0D, + why_the_lazy.getFieldScore("multigrams_title_stop"), + 0.0D); + final Phrase the_lazy_dog = phrases.get(8); assertEquals("the lazy dog", the_lazy_dog.getSubSequence()); - assertThat(the_lazy_dog.toString(), the_lazy_dog.getFieldScore("multigrams_title"), greaterThan(0.0D) ); - assertEquals(the_lazy_dog.toString(), -1.0D, the_lazy_dog.getFieldScore("multigrams_title_stop"), 0.0D); - + assertThat( + the_lazy_dog.toString(), + the_lazy_dog.getFieldScore("multigrams_title"), + greaterThan(0.0D)); + assertEquals( + the_lazy_dog.toString(), + -1.0D, + the_lazy_dog.getFieldScore("multigrams_title_stop"), + 0.0D); + // sanity check that good scores are still possible with stopwords // "brown fox" should score positively in both fields, and overall... - final Phrase brown_fox = phrases.get(phrases.size()-2); + final Phrase brown_fox = phrases.get(phrases.size() - 2); assertEquals("brown fox", brown_fox.getSubSequence()); - assertThat(brown_fox.toString(), brown_fox.getFieldScore("multigrams_title"), greaterThan(0.0D)); - assertThat(brown_fox.toString(), brown_fox.getFieldScore("multigrams_title_stop"), greaterThan(0.0D) ); + assertThat( + brown_fox.toString(), brown_fox.getFieldScore("multigrams_title"), greaterThan(0.0D)); + assertThat( + brown_fox.toString(), + brown_fox.getFieldScore("multigrams_title_stop"), + greaterThan(0.0D)); assertThat(brown_fox.toString(), brown_fox.getTotalScore(), greaterThan(0.0D)); } - - { // now flip things: our analysisField filters stopwords, + + { // now flip things: our analysisField filters stopwords, // but we also generates scores from a field that doesn't know about them... // - // (NOTE: the parser will still generate _some_ candidate phrases spaning the stop word position, - // but not ones that start with the stopword) - final SchemaField analysisField = h.getCore().getLatestSchema().getField("multigrams_title_stop"); + // (NOTE: the parser will still generate _some_ candidate phrases spaning the stop word + // position, but not ones that start with the stopword) + final SchemaField analysisField = + h.getCore().getLatestSchema().getField("multigrams_title_stop"); assertNotNull(analysisField); - + final List phrases = Phrase.extractPhrases(input, analysisField, 3, 7); try (SolrQueryRequest req = req()) { Phrase.populateStats(phrases, fieldWeights.keySet(), req.getSearcher()); @@ -508,83 +584,112 @@ public void testWhiteboxScorcesStopwords() throws Exception { for (Phrase p : phrases) { if (p.getPositionStart() <= 2 && 2 < p.getPositionEnd()) { - // phrases that span the stop word should have valid scores from the field that doesn't care - // about stop words, but the stopword field should reject them + // phrases that span the stop word should have valid scores from the field that doesn't + // care about stop words, but the stopword field should reject them assertEquals(p.toString(), -1.0D, p.getFieldScore("multigrams_title"), 0.0D); assertEquals(p.toString(), -1.0D, p.getFieldScore("multigrams_title_stop"), 0.0D); } } - + // sanity check that good scores are still possible with stopwords // "brown fox" should score positively in both fields, and overall... - final Phrase brown_fox = phrases.get(phrases.size()-2); + final Phrase brown_fox = phrases.get(phrases.size() - 2); assertEquals("brown fox", brown_fox.getSubSequence()); - assertThat(brown_fox.toString(), brown_fox.getFieldScore("multigrams_title"), greaterThan(0.0D)); - assertThat(brown_fox.toString(), brown_fox.getFieldScore("multigrams_title_stop"), greaterThan(0.0D) ); + assertThat( + brown_fox.toString(), brown_fox.getFieldScore("multigrams_title"), greaterThan(0.0D)); + assertThat( + brown_fox.toString(), + brown_fox.getFieldScore("multigrams_title_stop"), + greaterThan(0.0D)); assertThat(brown_fox.toString(), brown_fox.getTotalScore(), greaterThan(0.0D)); } - } - + public void testExpectedUserErrors() throws Exception { - assertQEx("empty field list should error", - "must specify a (weighted) list of fields", - req("q","foo", "phrases","true", - "phrases.fields", " "), - ErrorCode.BAD_REQUEST); - - assertQEx("bogus field name should error", - "does not exist", - req("q","foo", "phrases","true", - "phrases.fields", "bogus1 bogus2"), - ErrorCode.BAD_REQUEST); - - assertQEx("lack of shingles should cause error", - "Unable to determine max position length", - req("q","foo", "phrases","true", - "phrases.fields", "title"), - ErrorCode.BAD_REQUEST); - - assertQEx("analyzer missmatch should cause error", - "must have the same fieldType", - req("q","foo", "phrases","true", - "phrases.fields", "multigrams_title multigrams_title_short"), - ErrorCode.BAD_REQUEST); - - assertQEx("analysis field must exist", - "does not exist", - req("q","foo", "phrases","true", - "phrases.analysis.field", "bogus", - "phrases.fields", "multigrams_title multigrams_title_short"), - ErrorCode.BAD_REQUEST); - - assertQEx("no query param should error", - "requires a query string", - req("qt", "/phrases", - "phrases.fields", "multigrams_title"), - ErrorCode.BAD_REQUEST); + assertQEx( + "empty field list should error", + "must specify a (weighted) list of fields", + req("q", "foo", "phrases", "true", "phrases.fields", " "), + ErrorCode.BAD_REQUEST); + + assertQEx( + "bogus field name should error", + "does not exist", + req("q", "foo", "phrases", "true", "phrases.fields", "bogus1 bogus2"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "lack of shingles should cause error", + "Unable to determine max position length", + req("q", "foo", "phrases", "true", "phrases.fields", "title"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "analyzer missmatch should cause error", + "must have the same fieldType", + req( + "q", + "foo", + "phrases", + "true", + "phrases.fields", + "multigrams_title multigrams_title_short"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "analysis field must exist", + "does not exist", + req( + "q", + "foo", + "phrases", + "true", + "phrases.analysis.field", + "bogus", + "phrases.fields", + "multigrams_title multigrams_title_short"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "no query param should error", + "requires a query string", + req( + "qt", "/phrases", + "phrases.fields", "multigrams_title"), + ErrorCode.BAD_REQUEST); } - + public void testMaxShingleSizeHelper() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); - - assertEquals(3, PhrasesIdentificationComponent.getMaxShingleSize - (schema.getFieldTypeByName("multigrams_3_7").getIndexAnalyzer())); - assertEquals(7, PhrasesIdentificationComponent.getMaxShingleSize - (schema.getFieldTypeByName("multigrams_3_7").getQueryAnalyzer())); - - assertEquals(3, PhrasesIdentificationComponent.getMaxShingleSize - (schema.getFieldTypeByName("multigrams_3").getIndexAnalyzer())); - assertEquals(3, PhrasesIdentificationComponent.getMaxShingleSize - (schema.getFieldTypeByName("multigrams_3").getQueryAnalyzer())); - - assertEquals(-1, PhrasesIdentificationComponent.getMaxShingleSize - (schema.getFieldTypeByName("text").getIndexAnalyzer())); - assertEquals(-1, PhrasesIdentificationComponent.getMaxShingleSize - (schema.getFieldTypeByName("text").getQueryAnalyzer())); - + + assertEquals( + 3, + PhrasesIdentificationComponent.getMaxShingleSize( + schema.getFieldTypeByName("multigrams_3_7").getIndexAnalyzer())); + assertEquals( + 7, + PhrasesIdentificationComponent.getMaxShingleSize( + schema.getFieldTypeByName("multigrams_3_7").getQueryAnalyzer())); + + assertEquals( + 3, + PhrasesIdentificationComponent.getMaxShingleSize( + schema.getFieldTypeByName("multigrams_3").getIndexAnalyzer())); + assertEquals( + 3, + PhrasesIdentificationComponent.getMaxShingleSize( + schema.getFieldTypeByName("multigrams_3").getQueryAnalyzer())); + + assertEquals( + -1, + PhrasesIdentificationComponent.getMaxShingleSize( + schema.getFieldTypeByName("text").getIndexAnalyzer())); + assertEquals( + -1, + PhrasesIdentificationComponent.getMaxShingleSize( + schema.getFieldTypeByName("text").getQueryAnalyzer())); } - + public void testSimplePhraseRequest() throws Exception { final String input = " did a Quick brown FOX perniciously jump over the lazy dog"; final String expected = " did a Quick {brown FOX} perniciously jump over {the lazy dog}"; @@ -592,116 +697,144 @@ public void testSimplePhraseRequest() throws Exception { // should get same behavior regardless of wether we use "q" or "phrases.q" for (String p : Arrays.asList("q", "phrases.q")) { // basic request... - assertQ(req("qt", HANDLER, p, input) - // expect no search results... - , "count(//result)=0" - - // just phrase info... - , "//lst[@name='phrases']/str[@name='input'][.='"+input+"']" - , "//lst[@name='phrases']/str[@name='summary'][.='"+expected+"']" - , "count(//lst[@name='phrases']/arr[@name='details']/lst) = 2" - // - , "//lst[@name='phrases']/arr[@name='details']/lst[1]/str[@name='text'][.='the lazy dog']" - , "//lst[@name='phrases']/arr[@name='details']/lst[1]/int[@name='offset_start'][.='50']" - , "//lst[@name='phrases']/arr[@name='details']/lst[1]/int[@name='offset_end'][.='62']" - , "//lst[@name='phrases']/arr[@name='details']/lst[1]/double[@name='score'][number(.) > 0]" - // - , "//lst[@name='phrases']/arr[@name='details']/lst[2]/str[@name='text'][.='brown FOX']" - , "//lst[@name='phrases']/arr[@name='details']/lst[2]/int[@name='offset_start'][.='17']" - , "//lst[@name='phrases']/arr[@name='details']/lst[2]/int[@name='offset_end'][.='26']" - , "//lst[@name='phrases']/arr[@name='details']/lst[2]/double[@name='score'][number(.) > 0]" - ); + assertQ( + req("qt", HANDLER, p, input) + // expect no search results... + , + "count(//result)=0" + + // just phrase info... + , + "//lst[@name='phrases']/str[@name='input'][.='" + input + "']", + "//lst[@name='phrases']/str[@name='summary'][.='" + expected + "']", + "count(//lst[@name='phrases']/arr[@name='details']/lst) = 2" + // + , + "//lst[@name='phrases']/arr[@name='details']/lst[1]/str[@name='text'][.='the lazy dog']", + "//lst[@name='phrases']/arr[@name='details']/lst[1]/int[@name='offset_start'][.='50']", + "//lst[@name='phrases']/arr[@name='details']/lst[1]/int[@name='offset_end'][.='62']", + "//lst[@name='phrases']/arr[@name='details']/lst[1]/double[@name='score'][number(.) > 0]" + // + , + "//lst[@name='phrases']/arr[@name='details']/lst[2]/str[@name='text'][.='brown FOX']", + "//lst[@name='phrases']/arr[@name='details']/lst[2]/int[@name='offset_start'][.='17']", + "//lst[@name='phrases']/arr[@name='details']/lst[2]/int[@name='offset_end'][.='26']", + "//lst[@name='phrases']/arr[@name='details']/lst[2]/double[@name='score'][number(.) > 0]"); // empty input, empty phrases (and no error)... - assertQ(req("qt", HANDLER, p, "") - // expect no search results... - , "count(//result)=0" - // just empty phrase info for our empty input... - , "//lst[@name='phrases']/str[@name='input'][.='']" - , "//lst[@name='phrases']/str[@name='summary'][.='']" - , "count(//lst[@name='phrases']/arr[@name='details']) = 1" - , "count(//lst[@name='phrases']/arr[@name='details']/lst) = 0" - ); + assertQ( + req("qt", HANDLER, p, "") + // expect no search results... + , + "count(//result)=0" + // just empty phrase info for our empty input... + , + "//lst[@name='phrases']/str[@name='input'][.='']", + "//lst[@name='phrases']/str[@name='summary'][.='']", + "count(//lst[@name='phrases']/arr[@name='details']) = 1", + "count(//lst[@name='phrases']/arr[@name='details']/lst) = 0"); } } - + public void testSimpleSearchRequests() throws Exception { final String input = "\"brown fox\""; - - assertQ(req("q", input) - // basic search should have worked... - , "//result[@numFound='2']" - , "//result/doc/str[@name='id'][.='42']" - , "//result/doc/str[@name='id'][.='43']" - // and phrases should not be returned since they weren't requested... - , "0=count(//lst[@name='phrases'])" - ); - - assertQ(req("phrases", "false", "q", input) - // basic search should have worked... - , "//result[@numFound='2']" - , "//result/doc/str[@name='id'][.='42']" - , "//result/doc/str[@name='id'][.='43']" - // and phrases should not be returned since they were explicitly disabled... - , "0=count(//lst[@name='phrases'])" - ); - - // with input this short, all of these permutations of requests should produce the same output... - for (SolrQueryRequest req : Arrays.asList - ( // simple, using 3/7 defaults - req("phrases","true", "q", input), - - // simple, using just the 3/3 'short' fields - req("phrases","true", "q", input, - "phrases.fields", "multigrams_body_short multigrams_title_short^2"), - - // diff analysers, but explicit override using 3/3 "short" field... - req("phrases","true", "q", input, - "phrases.fields", "multigrams_body multigrams_title_short^2", - "phrases.analysis.field", "multigrams_title_short"))) { - assertQ(req - // basic search should have worked... - , "//result[@numFound='2']" - , "//result/doc/str[@name='id'][.='42']" - , "//result/doc/str[@name='id'][.='43']" - - // and we should have gotten phrase info... - , "//lst[@name='phrases']/str[@name='input'][.='"+input+"']" - , "//lst[@name='phrases']/str[@name='summary'][.='\"{brown fox}\"']" - , "count(//lst[@name='phrases']/arr[@name='details']/lst)=1" - , "//lst[@name='phrases']/arr[@name='details']/lst/str[@name='text'][.='brown fox']" - , "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_start'][.='1']" - , "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_end'][.='10']" - , "//lst[@name='phrases']/arr[@name='details']/lst/double[@name='score'][number(.) > 0]" - ); + + assertQ( + req("q", input) + // basic search should have worked... + , + "//result[@numFound='2']", + "//result/doc/str[@name='id'][.='42']", + "//result/doc/str[@name='id'][.='43']" + // and phrases should not be returned since they weren't requested... + , + "0=count(//lst[@name='phrases'])"); + + assertQ( + req("phrases", "false", "q", input) + // basic search should have worked... + , + "//result[@numFound='2']", + "//result/doc/str[@name='id'][.='42']", + "//result/doc/str[@name='id'][.='43']" + // and phrases should not be returned since they were explicitly disabled... + , + "0=count(//lst[@name='phrases'])"); + + // with input this short, all of these permutations of requests should produce the same + // output... + for (SolrQueryRequest req : + Arrays.asList( // simple, using 3/7 defaults + req("phrases", "true", "q", input), + + // simple, using just the 3/3 'short' fields + req( + "phrases", + "true", + "q", + input, + "phrases.fields", + "multigrams_body_short multigrams_title_short^2"), + + // diff analysers, but explicit override using 3/3 "short" field... + req( + "phrases", + "true", + "q", + input, + "phrases.fields", + "multigrams_body multigrams_title_short^2", + "phrases.analysis.field", + "multigrams_title_short"))) { + assertQ( + req + // basic search should have worked... + , + "//result[@numFound='2']", + "//result/doc/str[@name='id'][.='42']", + "//result/doc/str[@name='id'][.='43']" + + // and we should have gotten phrase info... + , + "//lst[@name='phrases']/str[@name='input'][.='" + input + "']", + "//lst[@name='phrases']/str[@name='summary'][.='\"{brown fox}\"']", + "count(//lst[@name='phrases']/arr[@name='details']/lst)=1", + "//lst[@name='phrases']/arr[@name='details']/lst/str[@name='text'][.='brown fox']", + "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_start'][.='1']", + "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_end'][.='10']", + "//lst[@name='phrases']/arr[@name='details']/lst/double[@name='score'][number(.) > 0]"); } // override the query string to get different phrases - assertQ(req("phrases","true", "q", "*:*", "phrases.q", input) - // basic search should have found all docs... - , "//result[@numFound='4']" - // and we should have gotten phrase info for our alternative q string... - , "//lst[@name='phrases']/str[@name='input'][.='"+input+"']" - , "//lst[@name='phrases']/str[@name='summary'][.='\"{brown fox}\"']" - , "count(//lst[@name='phrases']/arr[@name='details']/lst)=1" - , "//lst[@name='phrases']/arr[@name='details']/lst/str[@name='text'][.='brown fox']" - , "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_start'][.='1']" - , "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_end'][.='10']" - , "//lst[@name='phrases']/arr[@name='details']/lst/double[@name='score'][number(.) > 0]" - ); - + assertQ( + req("phrases", "true", "q", "*:*", "phrases.q", input) + // basic search should have found all docs... + , + "//result[@numFound='4']" + // and we should have gotten phrase info for our alternative q string... + , + "//lst[@name='phrases']/str[@name='input'][.='" + input + "']", + "//lst[@name='phrases']/str[@name='summary'][.='\"{brown fox}\"']", + "count(//lst[@name='phrases']/arr[@name='details']/lst)=1", + "//lst[@name='phrases']/arr[@name='details']/lst/str[@name='text'][.='brown fox']", + "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_start'][.='1']", + "//lst[@name='phrases']/arr[@name='details']/lst/int[@name='offset_end'][.='10']", + "//lst[@name='phrases']/arr[@name='details']/lst/double[@name='score'][number(.) > 0]"); + // empty input, empty phrases (but no error) - assertQ(req("phrases","true", "q", "*:*", "phrases.q", "") - // basic search should have found all docs... - , "//result[@numFound='4']" - // and we should have gotten (empty) phrase info for our alternative q string... - , "//lst[@name='phrases']/str[@name='input'][.='']" - , "//lst[@name='phrases']/str[@name='summary'][.='']" - , "count(//lst[@name='phrases']/arr[@name='details']) = 1" - , "count(//lst[@name='phrases']/arr[@name='details']/lst) = 0" - ); + assertQ( + req("phrases", "true", "q", "*:*", "phrases.q", "") + // basic search should have found all docs... + , + "//result[@numFound='4']" + // and we should have gotten (empty) phrase info for our alternative q string... + , + "//lst[@name='phrases']/str[@name='input'][.='']", + "//lst[@name='phrases']/str[@name='summary'][.='']", + "count(//lst[@name='phrases']/arr[@name='details']) = 1", + "count(//lst[@name='phrases']/arr[@name='details']/lst) = 0"); } - + public void testGreyboxShardSearchRequests() throws Exception { final String input = "quick brown fox ran"; @@ -709,65 +842,86 @@ public void testGreyboxShardSearchRequests() throws Exception { final String all_phrase_xpath = phrase_xpath + "/arr[@name='_all']"; // phrases requested, and correct request stage / shard purpose ... - assertQ(req("q", input, - "phrases","true", - ShardParams.IS_SHARD, "true", - ShardParams.SHARDS_PURPOSE, ""+PhrasesIdentificationComponent.SHARD_PURPOSE) - - // this shard request should have caused stats to be returned about all phrases... - , "10=count("+ all_phrase_xpath +"/lst)" - // "quick" ... - , all_phrase_xpath + "/lst[1]/lst[@name='ttf']/long[@name='multigrams_body'][.='1']" - , all_phrase_xpath + "/lst[1]/lst[@name='ttf']/long[@name='multigrams_title'][.='0']" - // ... - // "brown fox" - , all_phrase_xpath + "/lst[6]/lst[@name='ttf']/long[@name='multigrams_body'][.='3']" - , all_phrase_xpath + "/lst[6]/lst[@name='ttf']/long[@name='multigrams_title'][.='1']" - , all_phrase_xpath + "/lst[6]/lst[@name='df']/long[@name='multigrams_body'][.='2']" - , all_phrase_xpath + "/lst[6]/lst[@name='df']/long[@name='multigrams_title'][.='1']" - , all_phrase_xpath + "/lst[6]/lst[@name='conj_dc']/long[@name='multigrams_body'][.='2']" - , all_phrase_xpath + "/lst[6]/lst[@name='conj_dc']/long[@name='multigrams_title'][.='1']" - - // but no computed "scores"... - , "0=count("+phrase_xpath+"//*[@name='score'])" - ); + assertQ( + req( + "q", + input, + "phrases", + "true", + ShardParams.IS_SHARD, + "true", + ShardParams.SHARDS_PURPOSE, + "" + PhrasesIdentificationComponent.SHARD_PURPOSE) + + // this shard request should have caused stats to be returned about all phrases... + , + "10=count(" + all_phrase_xpath + "/lst)" + // "quick" ... + , + all_phrase_xpath + "/lst[1]/lst[@name='ttf']/long[@name='multigrams_body'][.='1']", + all_phrase_xpath + "/lst[1]/lst[@name='ttf']/long[@name='multigrams_title'][.='0']" + // ... + // "brown fox" + , + all_phrase_xpath + "/lst[6]/lst[@name='ttf']/long[@name='multigrams_body'][.='3']", + all_phrase_xpath + "/lst[6]/lst[@name='ttf']/long[@name='multigrams_title'][.='1']", + all_phrase_xpath + "/lst[6]/lst[@name='df']/long[@name='multigrams_body'][.='2']", + all_phrase_xpath + "/lst[6]/lst[@name='df']/long[@name='multigrams_title'][.='1']", + all_phrase_xpath + "/lst[6]/lst[@name='conj_dc']/long[@name='multigrams_body'][.='2']", + all_phrase_xpath + "/lst[6]/lst[@name='conj_dc']/long[@name='multigrams_title'][.='1']" + + // but no computed "scores"... + , + "0=count(" + phrase_xpath + "//*[@name='score'])"); // phrases requested, but incorrect request stage / shard purpose ... - assertQ(req("q", input, - "phrases","true", - ShardParams.IS_SHARD, "true", - ShardParams.SHARDS_PURPOSE, ""+ShardRequest.PURPOSE_GET_FIELDS) - , "0=count("+ phrase_xpath +"/lst)"); - + assertQ( + req( + "q", + input, + "phrases", + "true", + ShardParams.IS_SHARD, + "true", + ShardParams.SHARDS_PURPOSE, + "" + ShardRequest.PURPOSE_GET_FIELDS), + "0=count(" + phrase_xpath + "/lst)"); + // phrases disabled, regardless of request stage / shard purpose ... - assertTrue("sanity check failed, stage was modified in code w/o updating test", - PhrasesIdentificationComponent.SHARD_PURPOSE != ShardRequest.PURPOSE_GET_FIELDS); - assertQ(req("q", input, - "phrases","false", - ShardParams.IS_SHARD, "true", - ShardParams.SHARDS_PURPOSE, ""+ShardRequest.PURPOSE_GET_FIELDS) - , "0=count("+ phrase_xpath +"/lst)"); - assertQ(req("q", input, - "phrases","false", - ShardParams.IS_SHARD, "true", - ShardParams.SHARDS_PURPOSE, ""+PhrasesIdentificationComponent.SHARD_PURPOSE) - , "0=count("+ phrase_xpath +"/lst)"); + assertTrue( + "sanity check failed, stage was modified in code w/o updating test", + PhrasesIdentificationComponent.SHARD_PURPOSE != ShardRequest.PURPOSE_GET_FIELDS); + assertQ( + req( + "q", + input, + "phrases", + "false", + ShardParams.IS_SHARD, + "true", + ShardParams.SHARDS_PURPOSE, + "" + ShardRequest.PURPOSE_GET_FIELDS), + "0=count(" + phrase_xpath + "/lst)"); + assertQ( + req( + "q", + input, + "phrases", + "false", + ShardParams.IS_SHARD, + "true", + ShardParams.SHARDS_PURPOSE, + "" + PhrasesIdentificationComponent.SHARD_PURPOSE), + "0=count(" + phrase_xpath + "/lst)"); } - - // //////////////////////////////////////////////////////////////// - - - - /** - * Trivial Helper method that collects & compares to an empty List so - * the assertion shows the unexpected stream elements + /** + * Trivial Helper method that collects & compares to an empty List so the assertion shows the + * unexpected stream elements */ public void assertEmptyStream(final String msg, final Stream stream) { - assertEquals(msg, - Collections.emptyList(), - stream.collect(Collectors.toList())); + assertEquals(msg, Collections.emptyList(), stream.collect(Collectors.toList())); } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/QueryComponentPartialResultsTest.java b/solr/core/src/test/org/apache/solr/handler/component/QueryComponentPartialResultsTest.java index 952e720af30..8063bce1bca 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/QueryComponentPartialResultsTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/QueryComponentPartialResultsTest.java @@ -27,93 +27,96 @@ import org.junit.Test; public class QueryComponentPartialResultsTest extends SolrTestCaseJ4 { - private static final String SORT_FIELD_NAME = "category"; - private static final int shard1Size = 2; - private static final int shard2Size = 3; - - private static int id = 0; - private static ShardRequest shardRequestWithPartialResults; - - @BeforeClass - public static void setup() { - assumeWorkingMockito(); - shardRequestWithPartialResults = createShardRequestWithPartialResults(); - } - - @Test - public void includesPartialShardResultWhenUsingImplicitScoreSort() { - SortSpec sortSpec = MockSortSpecBuilder.create() - .withIncludesNonScoreOrDocSortField(false) - .build(); - testPartialResultsForSortSpec(sortSpec, true); + private static final String SORT_FIELD_NAME = "category"; + private static final int shard1Size = 2; + private static final int shard2Size = 3; + + private static int id = 0; + private static ShardRequest shardRequestWithPartialResults; + + @BeforeClass + public static void setup() { + assumeWorkingMockito(); + shardRequestWithPartialResults = createShardRequestWithPartialResults(); + } + + @Test + public void includesPartialShardResultWhenUsingImplicitScoreSort() { + SortSpec sortSpec = + MockSortSpecBuilder.create().withIncludesNonScoreOrDocSortField(false).build(); + testPartialResultsForSortSpec(sortSpec, true); + } + + @Test + public void includesPartialShardResultWhenUsingExplicitScoreSort() { + SortSpec sortSpec = + MockSortSpecBuilder.create() + .withSortFields(new SortField[] {SortField.FIELD_SCORE}) + .withIncludesNonScoreOrDocSortField(false) + .build(); + testPartialResultsForSortSpec(sortSpec, true); + } + + @Test + public void includesPartialShardResultWhenUsingExplicitDocSort() { + SortSpec sortSpec = + MockSortSpecBuilder.create() + .withSortFields(new SortField[] {SortField.FIELD_DOC}) + .withIncludesNonScoreOrDocSortField(false) + .build(); + testPartialResultsForSortSpec(sortSpec, true); + } + + @Test + public void excludesPartialShardResultWhenUsingNonScoreOrDocSortField() { + SortField sortField = new SortField(SORT_FIELD_NAME, SortField.Type.INT); + SortSpec sortSpec = + MockSortSpecBuilder.create() + .withSortFields(new SortField[] {sortField}) + .withIncludesNonScoreOrDocSortField(true) + .build(); + testPartialResultsForSortSpec(sortSpec, false); + } + + private void testPartialResultsForSortSpec( + SortSpec sortSpec, boolean shouldIncludePartialShardResult) { + + MockResponseBuilder responseBuilder = MockResponseBuilder.create().withSortSpec(sortSpec); + + QueryComponent queryComponent = new QueryComponent(); + queryComponent.mergeIds(responseBuilder, shardRequestWithPartialResults); + + // do we have the expected document count? + // if results are not merged for the partial results shard, then the total doc count will + // exclude them + if (shouldIncludePartialShardResult) { + assertEquals(shard1Size + shard2Size, responseBuilder.getResponseDocs().size()); + } else { + assertEquals(shard2Size, responseBuilder.getResponseDocs().size()); } - - @Test - public void includesPartialShardResultWhenUsingExplicitScoreSort() { - SortSpec sortSpec = MockSortSpecBuilder.create() - .withSortFields(new SortField[]{SortField.FIELD_SCORE}) - .withIncludesNonScoreOrDocSortField(false) - .build(); - testPartialResultsForSortSpec(sortSpec, true); + } + + private static ShardRequest createShardRequestWithPartialResults() { + final NamedList shard1ResponseHeader = new NamedList<>(); + final NamedList shard2ResponseHeader = new NamedList<>(); + + // the results from shard1 are marked partial + shard1ResponseHeader.add(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE); + + return MockShardRequest.create() + .withShardResponse(shard1ResponseHeader, createSolrDocumentList(shard1Size)) + .withShardResponse(shard2ResponseHeader, createSolrDocumentList(shard2Size)); + } + + private static SolrDocumentList createSolrDocumentList(int size) { + SolrDocumentList solrDocuments = new SolrDocumentList(); + for (int i = 0; i < size; i++) { + SolrDocument solrDocument = new SolrDocument(); + solrDocument.addField("id", id++); + solrDocument.addField("score", (float) id); + solrDocument.addField(SORT_FIELD_NAME, id); + solrDocuments.add(solrDocument); } - - @Test - public void includesPartialShardResultWhenUsingExplicitDocSort() { - SortSpec sortSpec = MockSortSpecBuilder.create() - .withSortFields(new SortField[]{SortField.FIELD_DOC}) - .withIncludesNonScoreOrDocSortField(false) - .build(); - testPartialResultsForSortSpec(sortSpec, true); - } - - @Test - public void excludesPartialShardResultWhenUsingNonScoreOrDocSortField() { - SortField sortField = new SortField(SORT_FIELD_NAME, SortField.Type.INT); - SortSpec sortSpec = MockSortSpecBuilder.create() - .withSortFields(new SortField[]{sortField}) - .withIncludesNonScoreOrDocSortField(true) - .build(); - testPartialResultsForSortSpec(sortSpec, false); - } - - private void testPartialResultsForSortSpec(SortSpec sortSpec, boolean shouldIncludePartialShardResult) { - - MockResponseBuilder responseBuilder = MockResponseBuilder.create().withSortSpec(sortSpec); - - QueryComponent queryComponent = new QueryComponent(); - queryComponent.mergeIds(responseBuilder, shardRequestWithPartialResults); - - // do we have the expected document count? - // if results are not merged for the partial results shard, then the total doc count will exclude them - if (shouldIncludePartialShardResult) { - assertEquals(shard1Size + shard2Size, responseBuilder.getResponseDocs().size()); - } else { - assertEquals(shard2Size, responseBuilder.getResponseDocs().size()); - } - } - - private static ShardRequest createShardRequestWithPartialResults() { - final NamedList shard1ResponseHeader = new NamedList<>(); - final NamedList shard2ResponseHeader = new NamedList<>(); - - // the results from shard1 are marked partial - shard1ResponseHeader.add(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE); - - return MockShardRequest.create() - .withShardResponse(shard1ResponseHeader, createSolrDocumentList(shard1Size)) - .withShardResponse(shard2ResponseHeader, createSolrDocumentList(shard2Size)); - } - - private static SolrDocumentList createSolrDocumentList(int size) { - SolrDocumentList solrDocuments = new SolrDocumentList(); - for(int i = 0; i < size; i++) { - SolrDocument solrDocument = new SolrDocument(); - solrDocument.addField("id", id++); - solrDocument.addField("score", (float)id); - solrDocument.addField(SORT_FIELD_NAME, id); - solrDocuments.add(solrDocument); - } - return solrDocuments; - } - + return solrDocuments; + } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java index ce7576a4453..8d26eae2511 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/QueryElevationComponentTest.java @@ -16,6 +16,12 @@ */ package org.apache.solr.handler.component; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; +import static org.apache.solr.common.util.Utils.fromJSONString; + +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; @@ -27,8 +33,6 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; @@ -51,16 +55,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; -import static org.apache.solr.common.util.Utils.fromJSONString; - public class QueryElevationComponentTest extends SolrTestCaseJ4 { - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); - + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -96,12 +93,12 @@ private void init(String schema) throws Exception { } private void init(String config, String schema) throws Exception { - initCore(config,schema); + initCore(config, schema); clearIndex(); assertU(commit()); } - //TODO should be @After ? + // TODO should be @After ? private void delete() { deleteCore(); } @@ -124,16 +121,22 @@ public void testFieldType() throws Exception { assertU(adoc("id", "9", "text", "AAAA AAAA", "str_s", "a")); assertU(commit()); - assertQ("", req(CommonParams.Q, "AAAA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.='7']" - , "//result/doc[2]/str[@name='id'][.='9']" - , "//result/doc[3]/str[@name='id'][.='8']", + assertQ( + "", + req( + CommonParams.Q, + "AAAA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[2]/str[@name='id'][.='9']", + "//result/doc[3]/str[@name='id'][.='8']", "//result/doc[1]/bool[@name='[elevated]'][.='true']", "//result/doc[2]/bool[@name='[elevated]'][.='false']", - "//result/doc[3]/bool[@name='[elevated]'][.='false']" - ); + "//result/doc[3]/bool[@name='[elevated]'][.='false']"); } finally { delete(); } @@ -163,178 +166,164 @@ public void testGroupedQuery() throws Exception { final String groups = "//arr[@name='groups']"; - assertQ("non-elevated group query", - req(CommonParams.Q, "AAAA", - CommonParams.QT, "/elevate", - GroupParams.GROUP_FIELD, "str_s", - GroupParams.GROUP, "true", - GroupParams.GROUP_TOTAL_COUNT, "true", - GroupParams.GROUP_LIMIT, "100", - QueryElevationParams.ENABLE, "false", - CommonParams.FL, "id, score, [elevated]") - , "//*[@name='ngroups'][.='3']" - , "//*[@name='matches'][.='6']" - - , groups +"/lst[1]//doc[1]/str[@name='id'][.='6']" - , groups +"/lst[1]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[1]//doc[2]/str[@name='id'][.='66']" - , groups +"/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[2]//doc[1]/str[@name='id'][.='7']" - , groups +"/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[2]//doc[2]/str[@name='id'][.='77']" - , groups +"/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[3]//doc[1]/str[@name='id'][.='2']" - , groups +"/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[3]//doc[2]/str[@name='id'][.='22']" - , groups +"/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']" - ); - - assertQ("elevated group query", - req(CommonParams.Q, "AAAA", - CommonParams.QT, "/elevate", - GroupParams.GROUP_FIELD, "str_s", - GroupParams.GROUP, "true", - GroupParams.GROUP_TOTAL_COUNT, "true", - GroupParams.GROUP_LIMIT, "100", - CommonParams.FL, "id, score, [elevated]") - , "//*[@name='ngroups'][.='3']" - , "//*[@name='matches'][.='6']" - - , groups +"/lst[1]//doc[1]/str[@name='id'][.='7']" - , groups +"/lst[1]//doc[1]/bool[@name='[elevated]'][.='true']" - , groups +"/lst[1]//doc[2]/str[@name='id'][.='77']" - , groups +"/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[2]//doc[1]/str[@name='id'][.='6']" - , groups +"/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[2]//doc[2]/str[@name='id'][.='66']" - , groups +"/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[3]//doc[1]/str[@name='id'][.='2']" - , groups +"/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[3]//doc[2]/str[@name='id'][.='22']" - , groups +"/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']" - ); - - assertQ("non-elevated because sorted group query", - req(CommonParams.Q, "AAAA", - CommonParams.QT, "/elevate", - CommonParams.SORT, "id asc", - GroupParams.GROUP_FIELD, "str_s", - GroupParams.GROUP, "true", - GroupParams.GROUP_TOTAL_COUNT, "true", - GroupParams.GROUP_LIMIT, "100", - CommonParams.FL, "id, score, [elevated]") - , "//*[@name='ngroups'][.='3']" - , "//*[@name='matches'][.='6']" - - , groups +"/lst[1]//doc[1]/str[@name='id'][.='2']" - , groups +"/lst[1]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[1]//doc[2]/str[@name='id'][.='22']" - , groups +"/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[2]//doc[1]/str[@name='id'][.='6']" - , groups +"/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[2]//doc[2]/str[@name='id'][.='66']" - , groups +"/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[3]//doc[1]/str[@name='id'][.='7']" - , groups +"/lst[3]//doc[1]/bool[@name='[elevated]'][.='true']" - , groups +"/lst[3]//doc[2]/str[@name='id'][.='77']" - , groups +"/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']" - ); - - assertQ("force-elevated sorted group query", - req(CommonParams.Q, "AAAA", - CommonParams.QT, "/elevate", - CommonParams.SORT, "id asc", - QueryElevationParams.FORCE_ELEVATION, "true", - GroupParams.GROUP_FIELD, "str_s", - GroupParams.GROUP, "true", - GroupParams.GROUP_TOTAL_COUNT, "true", - GroupParams.GROUP_LIMIT, "100", - CommonParams.FL, "id, score, [elevated]") - , "//*[@name='ngroups'][.='3']" - , "//*[@name='matches'][.='6']" - - , groups +"/lst[1]//doc[1]/str[@name='id'][.='7']" - , groups +"/lst[1]//doc[1]/bool[@name='[elevated]'][.='true']" - , groups +"/lst[1]//doc[2]/str[@name='id'][.='77']" - , groups +"/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[2]//doc[1]/str[@name='id'][.='2']" - , groups +"/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[2]//doc[2]/str[@name='id'][.='22']" - , groups +"/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[3]//doc[1]/str[@name='id'][.='6']" - , groups +"/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[3]//doc[2]/str[@name='id'][.='66']" - , groups +"/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']" - ); - - - assertQ("non-elevated because of sort within group query", - req(CommonParams.Q, "AAAA", - CommonParams.QT, "/elevate", - CommonParams.SORT, "id asc", - GroupParams.GROUP_SORT, "id desc", - GroupParams.GROUP_FIELD, "str_s", - GroupParams.GROUP, "true", - GroupParams.GROUP_TOTAL_COUNT, "true", - GroupParams.GROUP_LIMIT, "100", - CommonParams.FL, "id, score, [elevated]") - , "//*[@name='ngroups'][.='3']" - , "//*[@name='matches'][.='6']" - - , groups +"/lst[1]//doc[1]/str[@name='id'][.='22']" - , groups +"/lst[1]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[1]//doc[2]/str[@name='id'][.='2']" - , groups +"/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[2]//doc[1]/str[@name='id'][.='66']" - , groups +"/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[2]//doc[2]/str[@name='id'][.='6']" - , groups +"/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[3]//doc[1]/str[@name='id'][.='77']" - , groups +"/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[3]//doc[2]/str[@name='id'][.='7']" - , groups +"/lst[3]//doc[2]/bool[@name='[elevated]'][.='true']" - ); - - - assertQ("force elevated sort within sorted group query", - req(CommonParams.Q, "AAAA", - CommonParams.QT, "/elevate", - CommonParams.SORT, "id asc", - GroupParams.GROUP_SORT, "id desc", - QueryElevationParams.FORCE_ELEVATION, "true", - GroupParams.GROUP_FIELD, "str_s", - GroupParams.GROUP, "true", - GroupParams.GROUP_TOTAL_COUNT, "true", - GroupParams.GROUP_LIMIT, "100", - CommonParams.FL, "id, score, [elevated]") - , "//*[@name='ngroups'][.='3']" - , "//*[@name='matches'][.='6']" - - , groups +"/lst[1]//doc[1]/str[@name='id'][.='7']" - , groups +"/lst[1]//doc[1]/bool[@name='[elevated]'][.='true']" - , groups +"/lst[1]//doc[2]/str[@name='id'][.='77']" - , groups +"/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[2]//doc[1]/str[@name='id'][.='22']" - , groups +"/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[2]//doc[2]/str[@name='id'][.='2']" - , groups +"/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']" - - , groups +"/lst[3]//doc[1]/str[@name='id'][.='66']" - , groups +"/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']" - , groups +"/lst[3]//doc[2]/str[@name='id'][.='6']" - , groups +"/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']" - ); + assertQ( + "non-elevated group query", + req( + CommonParams.Q, "AAAA", + CommonParams.QT, "/elevate", + GroupParams.GROUP_FIELD, "str_s", + GroupParams.GROUP, "true", + GroupParams.GROUP_TOTAL_COUNT, "true", + GroupParams.GROUP_LIMIT, "100", + QueryElevationParams.ENABLE, "false", + CommonParams.FL, "id, score, [elevated]"), + "//*[@name='ngroups'][.='3']", + "//*[@name='matches'][.='6']", + groups + "/lst[1]//doc[1]/str[@name='id'][.='6']", + groups + "/lst[1]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[1]//doc[2]/str[@name='id'][.='66']", + groups + "/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[1]/str[@name='id'][.='7']", + groups + "/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[2]/str[@name='id'][.='77']", + groups + "/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[1]/str[@name='id'][.='2']", + groups + "/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[2]/str[@name='id'][.='22']", + groups + "/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']"); + + assertQ( + "elevated group query", + req( + CommonParams.Q, "AAAA", + CommonParams.QT, "/elevate", + GroupParams.GROUP_FIELD, "str_s", + GroupParams.GROUP, "true", + GroupParams.GROUP_TOTAL_COUNT, "true", + GroupParams.GROUP_LIMIT, "100", + CommonParams.FL, "id, score, [elevated]"), + "//*[@name='ngroups'][.='3']", + "//*[@name='matches'][.='6']", + groups + "/lst[1]//doc[1]/str[@name='id'][.='7']", + groups + "/lst[1]//doc[1]/bool[@name='[elevated]'][.='true']", + groups + "/lst[1]//doc[2]/str[@name='id'][.='77']", + groups + "/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[1]/str[@name='id'][.='6']", + groups + "/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[2]/str[@name='id'][.='66']", + groups + "/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[1]/str[@name='id'][.='2']", + groups + "/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[2]/str[@name='id'][.='22']", + groups + "/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']"); + + assertQ( + "non-elevated because sorted group query", + req( + CommonParams.Q, "AAAA", + CommonParams.QT, "/elevate", + CommonParams.SORT, "id asc", + GroupParams.GROUP_FIELD, "str_s", + GroupParams.GROUP, "true", + GroupParams.GROUP_TOTAL_COUNT, "true", + GroupParams.GROUP_LIMIT, "100", + CommonParams.FL, "id, score, [elevated]"), + "//*[@name='ngroups'][.='3']", + "//*[@name='matches'][.='6']", + groups + "/lst[1]//doc[1]/str[@name='id'][.='2']", + groups + "/lst[1]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[1]//doc[2]/str[@name='id'][.='22']", + groups + "/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[1]/str[@name='id'][.='6']", + groups + "/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[2]/str[@name='id'][.='66']", + groups + "/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[1]/str[@name='id'][.='7']", + groups + "/lst[3]//doc[1]/bool[@name='[elevated]'][.='true']", + groups + "/lst[3]//doc[2]/str[@name='id'][.='77']", + groups + "/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']"); + + assertQ( + "force-elevated sorted group query", + req( + CommonParams.Q, "AAAA", + CommonParams.QT, "/elevate", + CommonParams.SORT, "id asc", + QueryElevationParams.FORCE_ELEVATION, "true", + GroupParams.GROUP_FIELD, "str_s", + GroupParams.GROUP, "true", + GroupParams.GROUP_TOTAL_COUNT, "true", + GroupParams.GROUP_LIMIT, "100", + CommonParams.FL, "id, score, [elevated]"), + "//*[@name='ngroups'][.='3']", + "//*[@name='matches'][.='6']", + groups + "/lst[1]//doc[1]/str[@name='id'][.='7']", + groups + "/lst[1]//doc[1]/bool[@name='[elevated]'][.='true']", + groups + "/lst[1]//doc[2]/str[@name='id'][.='77']", + groups + "/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[1]/str[@name='id'][.='2']", + groups + "/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[2]/str[@name='id'][.='22']", + groups + "/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[1]/str[@name='id'][.='6']", + groups + "/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[2]/str[@name='id'][.='66']", + groups + "/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']"); + + assertQ( + "non-elevated because of sort within group query", + req( + CommonParams.Q, "AAAA", + CommonParams.QT, "/elevate", + CommonParams.SORT, "id asc", + GroupParams.GROUP_SORT, "id desc", + GroupParams.GROUP_FIELD, "str_s", + GroupParams.GROUP, "true", + GroupParams.GROUP_TOTAL_COUNT, "true", + GroupParams.GROUP_LIMIT, "100", + CommonParams.FL, "id, score, [elevated]"), + "//*[@name='ngroups'][.='3']", + "//*[@name='matches'][.='6']", + groups + "/lst[1]//doc[1]/str[@name='id'][.='22']", + groups + "/lst[1]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[1]//doc[2]/str[@name='id'][.='2']", + groups + "/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[1]/str[@name='id'][.='66']", + groups + "/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[2]/str[@name='id'][.='6']", + groups + "/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[1]/str[@name='id'][.='77']", + groups + "/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[2]/str[@name='id'][.='7']", + groups + "/lst[3]//doc[2]/bool[@name='[elevated]'][.='true']"); + + assertQ( + "force elevated sort within sorted group query", + req( + CommonParams.Q, "AAAA", + CommonParams.QT, "/elevate", + CommonParams.SORT, "id asc", + GroupParams.GROUP_SORT, "id desc", + QueryElevationParams.FORCE_ELEVATION, "true", + GroupParams.GROUP_FIELD, "str_s", + GroupParams.GROUP, "true", + GroupParams.GROUP_TOTAL_COUNT, "true", + GroupParams.GROUP_LIMIT, "100", + CommonParams.FL, "id, score, [elevated]"), + "//*[@name='ngroups'][.='3']", + "//*[@name='matches'][.='6']", + groups + "/lst[1]//doc[1]/str[@name='id'][.='7']", + groups + "/lst[1]//doc[1]/bool[@name='[elevated]'][.='true']", + groups + "/lst[1]//doc[2]/str[@name='id'][.='77']", + groups + "/lst[1]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[1]/str[@name='id'][.='22']", + groups + "/lst[2]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[2]//doc[2]/str[@name='id'][.='2']", + groups + "/lst[2]//doc[2]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[1]/str[@name='id'][.='66']", + groups + "/lst[3]//doc[1]/bool[@name='[elevated]'][.='false']", + groups + "/lst[3]//doc[2]/str[@name='id'][.='6']", + groups + "/lst[3]//doc[2]/bool[@name='[elevated]'][.='false']"); } finally { delete(); @@ -347,34 +336,39 @@ public void testTrieFieldType() throws Exception { init("schema.xml"); clearIndex(); assertU(commit()); - assertU(adoc("id", "1", "text", "XXXX XXXX", "str_s", "a" )); - assertU(adoc("id", "2", "text", "YYYY", "str_s", "b" )); - assertU(adoc("id", "3", "text", "ZZZZ", "str_s", "c" )); - - assertU(adoc("id", "4", "text", "XXXX XXXX", "str_s", "x" )); - assertU(adoc("id", "5", "text", "YYYY YYYY", "str_s", "y" )); - assertU(adoc("id", "6", "text", "XXXX XXXX", "str_s", "z" )); - assertU(adoc("id", "7", "text", "AAAA", "str_s", "a" )); - assertU(adoc("id", "8", "text", "AAAA", "str_s", "a" )); - assertU(adoc("id", "9", "text", "AAAA AAAA", "str_s", "a" )); + assertU(adoc("id", "1", "text", "XXXX XXXX", "str_s", "a")); + assertU(adoc("id", "2", "text", "YYYY", "str_s", "b")); + assertU(adoc("id", "3", "text", "ZZZZ", "str_s", "c")); + + assertU(adoc("id", "4", "text", "XXXX XXXX", "str_s", "x")); + assertU(adoc("id", "5", "text", "YYYY YYYY", "str_s", "y")); + assertU(adoc("id", "6", "text", "XXXX XXXX", "str_s", "z")); + assertU(adoc("id", "7", "text", "AAAA", "str_s", "a")); + assertU(adoc("id", "8", "text", "AAAA", "str_s", "a")); + assertU(adoc("id", "9", "text", "AAAA AAAA", "str_s", "a")); assertU(commit()); - assertQ("", req(CommonParams.Q, "AAAA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - ,"//*[@numFound='3']" - ,"//result/doc[1]/str[@name='id'][.='7']" - ,"//result/doc[2]/str[@name='id'][.='8']" - ,"//result/doc[3]/str[@name='id'][.='9']", - "//result/doc[1]/bool[@name='[elevated]'][.='true']", - "//result/doc[2]/bool[@name='[elevated]'][.='false']", - "//result/doc[3]/bool[@name='[elevated]'][.='false']" - ); - } finally{ + assertQ( + "", + req( + CommonParams.Q, + "AAAA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[2]/str[@name='id'][.='8']", + "//result/doc[3]/str[@name='id'][.='9']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']", + "//result/doc[2]/bool[@name='[elevated]'][.='false']", + "//result/doc[3]/bool[@name='[elevated]'][.='false']"); + } finally { delete(); } } - @Test public void testInterface() throws Exception { try { @@ -394,7 +388,8 @@ public void testInterface() throws Exception { comp.init(args); comp.inform(core); - QueryElevationComponent.ElevationProvider elevationProvider = comp.getElevationProvider(reader, core); + QueryElevationComponent.ElevationProvider elevationProvider = + comp.getElevationProvider(reader, core); // Make sure the boosts loaded properly assertEquals(11, elevationProvider.size()); @@ -414,7 +409,8 @@ public void testInterface() throws Exception { try (QueryElevationComponent comp = new QueryElevationComponent()) { comp.init(args); comp.inform(core); - QueryElevationComponent.ElevationProvider elevationProvider = comp.getElevationProvider(reader, core); + QueryElevationComponent.ElevationProvider elevationProvider = + comp.getElevationProvider(reader, core); assertEquals(11, elevationProvider.size()); assertEquals(1, elevationProvider.getElevationForQuery("XXXX").elevatedIds.size()); assertEquals(2, elevationProvider.getElevationForQuery("YYYY").elevatedIds.size()); @@ -426,13 +422,14 @@ public void testInterface() throws Exception { assertEquals("xxxx", comp.analyzeQuery("XXXX")); assertEquals("xxxxyyyy", comp.analyzeQuery("XXXX YYYY")); - assertQ("Make sure QEC handles null queries", req("qt", "/elevate", "q.alt", "*:*", "defType", "dismax"), + assertQ( + "Make sure QEC handles null queries", + req("qt", "/elevate", "q.alt", "*:*", "defType", "dismax"), "//*[@numFound='0']"); } } finally { delete(); } - } @Test @@ -449,31 +446,52 @@ public void testMarker() throws Exception { assertU(adoc("id", "7", "title", "AAAA", "str_s1", "a")); assertU(commit()); - assertQ("", req(CommonParams.Q, "XXXX", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.='1']" - , "//result/doc[2]/str[@name='id'][.='4']" - , "//result/doc[3]/str[@name='id'][.='6']", + assertQ( + "", + req( + CommonParams.Q, + "XXXX", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='4']", + "//result/doc[3]/str[@name='id'][.='6']", "//result/doc[1]/bool[@name='[elevated]'][.='true']", "//result/doc[2]/bool[@name='[elevated]'][.='false']", - "//result/doc[3]/bool[@name='[elevated]'][.='false']" - ); - - assertQ("", req(CommonParams.Q, "AAAA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='7']", - "//result/doc[1]/bool[@name='[elevated]'][.='true']" - ); - - assertQ("", req(CommonParams.Q, "AAAA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elev]") - , "//*[@numFound='1']" - , "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[3]/bool[@name='[elevated]'][.='false']"); + + assertQ( + "", + req( + CommonParams.Q, + "AAAA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']"); + + assertQ( + "", + req( + CommonParams.Q, + "AAAA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elev]"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='7']", "not(//result/doc[1]/bool[@name='[elevated]'][.='false'])", - "not(//result/doc[1]/bool[@name='[elev]'][.='false'])" // even though we asked for elev, there is no Transformer registered w/ that, so we shouldn't get a result - ); + "not(//result/doc[1]/bool[@name='[elev]'][.='false'])" + // even though we asked for elev, there is no Transformer registered w/ that, so we + // shouldn't get a result + ); } finally { delete(); } @@ -498,60 +516,91 @@ public void testMarkExcludes() throws Exception { assertU(commit()); - assertQ("", req(CommonParams.Q, "XXXX XXXX", CommonParams.QT, "/elevate", - QueryElevationParams.MARK_EXCLUDES, "true", - "indent", "true", - CommonParams.FL, "id, score, [excluded]") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.='5']" - , "//result/doc[2]/str[@name='id'][.='1']" - , "//result/doc[3]/str[@name='id'][.='4']" - , "//result/doc[4]/str[@name='id'][.='6']", + assertQ( + "", + req( + CommonParams.Q, + "XXXX XXXX", + CommonParams.QT, + "/elevate", + QueryElevationParams.MARK_EXCLUDES, + "true", + "indent", + "true", + CommonParams.FL, + "id, score, [excluded]"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='5']", + "//result/doc[2]/str[@name='id'][.='1']", + "//result/doc[3]/str[@name='id'][.='4']", + "//result/doc[4]/str[@name='id'][.='6']", "//result/doc[1]/bool[@name='[excluded]'][.='false']", "//result/doc[2]/bool[@name='[excluded]'][.='false']", "//result/doc[3]/bool[@name='[excluded]'][.='false']", - "//result/doc[4]/bool[@name='[excluded]'][.='true']" - ); - - //ask for excluded as a field, but don't actually request the MARK_EXCLUDES - //thus, number 6 should not be returned, b/c it is excluded - assertQ("", req(CommonParams.Q, "XXXX XXXX", CommonParams.QT, "/elevate", - QueryElevationParams.MARK_EXCLUDES, "false", - CommonParams.FL, "id, score, [excluded]") - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.='5']" - , "//result/doc[2]/str[@name='id'][.='1']" - , "//result/doc[3]/str[@name='id'][.='4']", + "//result/doc[4]/bool[@name='[excluded]'][.='true']"); + + // ask for excluded as a field, but don't actually request the MARK_EXCLUDES + // thus, number 6 should not be returned, b/c it is excluded + assertQ( + "", + req( + CommonParams.Q, + "XXXX XXXX", + CommonParams.QT, + "/elevate", + QueryElevationParams.MARK_EXCLUDES, + "false", + CommonParams.FL, + "id, score, [excluded]"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='5']", + "//result/doc[2]/str[@name='id'][.='1']", + "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[1]/bool[@name='[excluded]'][.='false']", "//result/doc[2]/bool[@name='[excluded]'][.='false']", - "//result/doc[3]/bool[@name='[excluded]'][.='false']" - ); + "//result/doc[3]/bool[@name='[excluded]'][.='false']"); // test that excluded results are on the same positions in the result list // as when elevation component is disabled // (i.e. test that elevation component with MARK_EXCLUDES does not boost // excluded results) - assertQ("", req(CommonParams.Q, "QQQQ", CommonParams.QT, "/elevate", - QueryElevationParams.ENABLE, "false", - "indent", "true", - CommonParams.FL, "id, score") - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.='10']" - , "//result/doc[2]/str[@name='id'][.='9']" - , "//result/doc[3]/str[@name='id'][.='8']" - ); - assertQ("", req(CommonParams.Q, "QQQQ", CommonParams.QT, "/elevate", - QueryElevationParams.MARK_EXCLUDES, "true", - "indent", "true", - CommonParams.FL, "id, score, [excluded]") - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.='10']" - , "//result/doc[2]/str[@name='id'][.='9']" - , "//result/doc[3]/str[@name='id'][.='8']", + assertQ( + "", + req( + CommonParams.Q, + "QQQQ", + CommonParams.QT, + "/elevate", + QueryElevationParams.ENABLE, + "false", + "indent", + "true", + CommonParams.FL, + "id, score"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='10']", + "//result/doc[2]/str[@name='id'][.='9']", + "//result/doc[3]/str[@name='id'][.='8']"); + assertQ( + "", + req( + CommonParams.Q, + "QQQQ", + CommonParams.QT, + "/elevate", + QueryElevationParams.MARK_EXCLUDES, + "true", + "indent", + "true", + CommonParams.FL, + "id, score, [excluded]"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='10']", + "//result/doc[2]/str[@name='id'][.='9']", + "//result/doc[3]/str[@name='id'][.='8']", "//result/doc[1]/bool[@name='[excluded]'][.='true']", "//result/doc[2]/bool[@name='[excluded]'][.='false']", - "//result/doc[3]/bool[@name='[excluded]'][.='false']" - ); + "//result/doc[3]/bool[@name='[excluded]'][.='false']"); } finally { delete(); } @@ -565,128 +614,137 @@ public void testSorting() throws Exception { assertU(adoc("id", "b", "title", "ipod ipod trash", "str_s1", "group2")); assertU(adoc("id", "c", "title", "ipod ipod ipod ", "str_s1", "group2")); - assertU(adoc("id", "x", "title", "boosted", "str_s1", "group1")); - assertU(adoc("id", "y", "title", "boosted boosted", "str_s1", "group2")); + assertU(adoc("id", "x", "title", "boosted", "str_s1", "group1")); + assertU(adoc("id", "y", "title", "boosted boosted", "str_s1", "group2")); assertU(adoc("id", "z", "title", "boosted boosted boosted", "str_s1", "group2")); assertU(commit()); final String query = "title:ipod"; - final SolrParams baseParams = params( - "qt", "/elevate", - "q", query, - "fl", "id,score", - "indent", "true"); + final SolrParams baseParams = + params( + "qt", "/elevate", + "q", query, + "fl", "id,score", + "indent", "true"); - QueryElevationComponent booster = (QueryElevationComponent) h.getCore().getSearchComponent("elevate"); + QueryElevationComponent booster = + (QueryElevationComponent) h.getCore().getSearchComponent("elevate"); IndexReader reader = h.getCore().withSearcher(SolrIndexSearcher::getIndexReader); - assertQ("Make sure standard sort works as expected", req(baseParams) - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.='c']" - , "//result/doc[2]/str[@name='id'][.='b']" - , "//result/doc[3]/str[@name='id'][.='a']" - ); + assertQ( + "Make sure standard sort works as expected", + req(baseParams), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='c']", + "//result/doc[2]/str[@name='id'][.='b']", + "//result/doc[3]/str[@name='id'][.='a']"); // Explicitly set what gets boosted - booster.setTopQueryResults(reader, query, false, new String[]{"x", "y", "z"}, null); - - assertQ("All six should make it", req(baseParams) - , "//*[@numFound='6']" - , "//result/doc[1]/str[@name='id'][.='x']" - , "//result/doc[2]/str[@name='id'][.='y']" - , "//result/doc[3]/str[@name='id'][.='z']" - , "//result/doc[4]/str[@name='id'][.='c']" - , "//result/doc[5]/str[@name='id'][.='b']" - , "//result/doc[6]/str[@name='id'][.='a']" - ); + booster.setTopQueryResults(reader, query, false, new String[] {"x", "y", "z"}, null); + + assertQ( + "All six should make it", + req(baseParams), + "//*[@numFound='6']", + "//result/doc[1]/str[@name='id'][.='x']", + "//result/doc[2]/str[@name='id'][.='y']", + "//result/doc[3]/str[@name='id'][.='z']", + "//result/doc[4]/str[@name='id'][.='c']", + "//result/doc[5]/str[@name='id'][.='b']", + "//result/doc[6]/str[@name='id'][.='a']"); // now switch the order: - booster.setTopQueryResults(reader, query, false, new String[]{"a", "x"}, null); - assertQ(req(baseParams) - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.='a']" - , "//result/doc[2]/str[@name='id'][.='x']" - , "//result/doc[3]/str[@name='id'][.='c']" - , "//result/doc[4]/str[@name='id'][.='b']" - ); + booster.setTopQueryResults(reader, query, false, new String[] {"a", "x"}, null); + assertQ( + req(baseParams), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='a']", + "//result/doc[2]/str[@name='id'][.='x']", + "//result/doc[3]/str[@name='id'][.='c']", + "//result/doc[4]/str[@name='id'][.='b']"); // Try normal sort by 'id' // default 'forceBoost' should be false assertFalse(booster.forceElevation); - assertQ(req(baseParams, "sort", "id asc") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.='a']" - , "//result/doc[2]/str[@name='id'][.='b']" - , "//result/doc[3]/str[@name='id'][.='c']" - , "//result/doc[4]/str[@name='id'][.='x']" - ); - - assertQ("useConfiguredElevatedOrder=false", - req(baseParams, "sort", "str_s1 asc,id desc", "useConfiguredElevatedOrder", "false") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.='x']"//group1 - , "//result/doc[2]/str[@name='id'][.='a']"//group1 - , "//result/doc[3]/str[@name='id'][.='c']" - , "//result/doc[4]/str[@name='id'][.='b']" - ); + assertQ( + req(baseParams, "sort", "id asc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='a']", + "//result/doc[2]/str[@name='id'][.='b']", + "//result/doc[3]/str[@name='id'][.='c']", + "//result/doc[4]/str[@name='id'][.='x']"); + + assertQ( + "useConfiguredElevatedOrder=false", + req(baseParams, "sort", "str_s1 asc,id desc", "useConfiguredElevatedOrder", "false"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='x']" // group1 + , + "//result/doc[2]/str[@name='id'][.='a']" // group1 + , + "//result/doc[3]/str[@name='id'][.='c']", + "//result/doc[4]/str[@name='id'][.='b']"); booster.forceElevation = true; - assertQ(req(baseParams, "sort", "id asc") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.='a']" - , "//result/doc[2]/str[@name='id'][.='x']" - , "//result/doc[3]/str[@name='id'][.='b']" - , "//result/doc[4]/str[@name='id'][.='c']" - ); + assertQ( + req(baseParams, "sort", "id asc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='a']", + "//result/doc[2]/str[@name='id'][.='x']", + "//result/doc[3]/str[@name='id'][.='b']", + "//result/doc[4]/str[@name='id'][.='c']"); booster.forceElevation = true; - assertQ("useConfiguredElevatedOrder=false and forceElevation", - req(baseParams, "sort", "id desc", "useConfiguredElevatedOrder", "false") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.='x']" // force elevated - , "//result/doc[2]/str[@name='id'][.='a']" // force elevated - , "//result/doc[3]/str[@name='id'][.='c']" - , "//result/doc[4]/str[@name='id'][.='b']" - ); - - //Test exclusive (not to be confused with exclusion) - booster.setTopQueryResults(reader, query, false, new String[]{"x", "a"}, new String[]{}); - assertQ(req(baseParams, "exclusive", "true") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.='x']" - , "//result/doc[2]/str[@name='id'][.='a']" - ); + assertQ( + "useConfiguredElevatedOrder=false and forceElevation", + req(baseParams, "sort", "id desc", "useConfiguredElevatedOrder", "false"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='x']" // force elevated + , + "//result/doc[2]/str[@name='id'][.='a']" // force elevated + , + "//result/doc[3]/str[@name='id'][.='c']", + "//result/doc[4]/str[@name='id'][.='b']"); + + // Test exclusive (not to be confused with exclusion) + booster.setTopQueryResults(reader, query, false, new String[] {"x", "a"}, new String[] {}); + assertQ( + req(baseParams, "exclusive", "true"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='x']", + "//result/doc[2]/str[@name='id'][.='a']"); // Test exclusion - booster.setTopQueryResults(reader, query, false, new String[]{"x"}, new String[]{"a"}); - assertQ(req(baseParams) - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.='x']" - , "//result/doc[2]/str[@name='id'][.='c']" - , "//result/doc[3]/str[@name='id'][.='b']" - ); - + booster.setTopQueryResults(reader, query, false, new String[] {"x"}, new String[] {"a"}); + assertQ( + req(baseParams), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='x']", + "//result/doc[2]/str[@name='id'][.='c']", + "//result/doc[3]/str[@name='id'][.='b']"); // Test setting ids and excludes from http parameters booster.clearElevationProviderCache(); - assertQ("All five should make it", req(baseParams, "elevateIds", "x,y,z", "excludeIds", "b") - , "//*[@numFound='5']" - , "//result/doc[1]/str[@name='id'][.='x']" - , "//result/doc[2]/str[@name='id'][.='y']" - , "//result/doc[3]/str[@name='id'][.='z']" - , "//result/doc[4]/str[@name='id'][.='c']" - , "//result/doc[5]/str[@name='id'][.='a']" - ); - - assertQ("All four should make it", req(baseParams, "elevateIds", "x,z,y", "excludeIds", "b,c") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.='x']" - , "//result/doc[2]/str[@name='id'][.='z']" - , "//result/doc[3]/str[@name='id'][.='y']" - , "//result/doc[4]/str[@name='id'][.='a']" - ); + assertQ( + "All five should make it", + req(baseParams, "elevateIds", "x,y,z", "excludeIds", "b"), + "//*[@numFound='5']", + "//result/doc[1]/str[@name='id'][.='x']", + "//result/doc[2]/str[@name='id'][.='y']", + "//result/doc[3]/str[@name='id'][.='z']", + "//result/doc[4]/str[@name='id'][.='c']", + "//result/doc[5]/str[@name='id'][.='a']"); + + assertQ( + "All four should make it", + req(baseParams, "elevateIds", "x,z,y", "excludeIds", "b,c"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='x']", + "//result/doc[2]/str[@name='id'][.='z']", + "//result/doc[3]/str[@name='id'][.='y']", + "//result/doc[4]/str[@name='id'][.='a']"); } finally { delete(); @@ -695,7 +753,8 @@ public void testSorting() throws Exception { // write an elevation config file to boost some docs private void writeElevationConfigFile(File file, String query, String... ids) throws Exception { - PrintWriter out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8)); + PrintWriter out = + new PrintWriter(new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8)); out.println(""); out.println(""); out.println(""); @@ -726,7 +785,8 @@ public void testElevationReloading() throws Exception { try { - QueryElevationComponent comp = (QueryElevationComponent) h.getCore().getSearchComponent("elevate"); + QueryElevationComponent comp = + (QueryElevationComponent) h.getCore().getSearchComponent("elevate"); NamedList args = new NamedList<>(); args.add(QueryElevationComponent.CONFIG_FILE, configFile.getName()); comp.init(args); @@ -735,18 +795,23 @@ public void testElevationReloading() throws Exception { QueryElevationComponent.ElevationProvider elevationProvider; try (SolrQueryRequest req = req()) { - elevationProvider = comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); - assertTrue(elevationProvider.getElevationForQuery("aaa").elevatedIds.contains(new BytesRef("A"))); + elevationProvider = + comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); + assertTrue( + elevationProvider.getElevationForQuery("aaa").elevatedIds.contains(new BytesRef("A"))); assertNull(elevationProvider.getElevationForQuery("bbb")); } // now change the file writeElevationConfigFile(configFile, "bbb", "B"); - // With no index change, we get the same index reader, so the elevationProviderCache returns the previous ElevationProvider without the change. + // With no index change, we get the same index reader, so the elevationProviderCache returns + // the previous ElevationProvider without the change. try (SolrQueryRequest req = req()) { - elevationProvider = comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); - assertTrue(elevationProvider.getElevationForQuery("aaa").elevatedIds.contains(new BytesRef("A"))); + elevationProvider = + comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); + assertTrue( + elevationProvider.getElevationForQuery("aaa").elevatedIds.contains(new BytesRef("A"))); assertNull(elevationProvider.getElevationForQuery("bbb")); } @@ -754,24 +819,30 @@ public void testElevationReloading() throws Exception { assertU(adoc("id", "10000")); assertU(commit()); - // Check that we effectively reload a new ElevationProvider for a different index reader (so two entries in elevationProviderCache). + // Check that we effectively reload a new ElevationProvider for a different index reader (so + // two entries in elevationProviderCache). try (SolrQueryRequest req = req()) { - elevationProvider = comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); + elevationProvider = + comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); assertNull(elevationProvider.getElevationForQuery("aaa")); - assertTrue(elevationProvider.getElevationForQuery("bbb").elevatedIds.contains(new BytesRef("B"))); + assertTrue( + elevationProvider.getElevationForQuery("bbb").elevatedIds.contains(new BytesRef("B"))); } // Now change the config file again. writeElevationConfigFile(configFile, "ccc", "C"); - // Without index change, but calling a different method that clears the elevationProviderCache, so we should load a new ElevationProvider. + // Without index change, but calling a different method that clears the + // elevationProviderCache, so we should load a new ElevationProvider. int elevationRuleNumber = comp.loadElevationConfiguration(h.getCore()); assertEquals(1, elevationRuleNumber); try (SolrQueryRequest req = req()) { - elevationProvider = comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); + elevationProvider = + comp.getElevationProvider(req.getSearcher().getIndexReader(), req.getCore()); assertNull(elevationProvider.getElevationForQuery("aaa")); assertNull(elevationProvider.getElevationForQuery("bbb")); - assertTrue(elevationProvider.getElevationForQuery("ccc").elevatedIds.contains(new BytesRef("C"))); + assertTrue( + elevationProvider.getElevationForQuery("ccc").elevatedIds.contains(new BytesRef("C"))); } } finally { delete(); @@ -787,24 +858,42 @@ public void testWithLocalParam() throws Exception { assertU(adoc("id", "7", "text", "AAAA", "str_s", "a")); assertU(commit()); - assertQ("", req(CommonParams.Q, "AAAA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - , "//*[@numFound='1']" - , "//result/doc[1]/str[@name='id'][.='7']" - , "//result/doc[1]/bool[@name='[elevated]'][.='true']" - ); - assertQ("", req(CommonParams.Q, "{!q.op=AND}AAAA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - , "//*[@numFound='1']" - , "//result/doc[1]/str[@name='id'][.='7']" - , "//result/doc[1]/bool[@name='[elevated]'][.='true']" - ); - assertQ("", req(CommonParams.Q, "{!q.op=AND v='AAAA'}", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - , "//*[@numFound='1']" - , "//result/doc[1]/str[@name='id'][.='7']" - , "//result/doc[1]/bool[@name='[elevated]'][.='true']" - ); + assertQ( + "", + req( + CommonParams.Q, + "AAAA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']"); + assertQ( + "", + req( + CommonParams.Q, + "{!q.op=AND}AAAA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']"); + assertQ( + "", + req( + CommonParams.Q, + "{!q.op=AND v='AAAA'}", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']"); } finally { delete(); } @@ -831,26 +920,45 @@ public void testQuerySubsetMatching() throws Exception { assertU(commit()); // Exact matching. - assertQ("", req(CommonParams.Q, "XXXX", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]"), + assertQ( + "", + req( + CommonParams.Q, + "XXXX", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='4']", "//result/doc[3]/str[@name='id'][.='6']", "//result/doc[1]/bool[@name='[elevated]'][.='true']", "//result/doc[2]/bool[@name='[elevated]'][.='false']", - "//result/doc[3]/bool[@name='[elevated]'][.='false']" - ); + "//result/doc[3]/bool[@name='[elevated]'][.='false']"); // Exact matching. - assertQ("", req(CommonParams.Q, "QQQQ EE", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]"), - "//*[@numFound='0']" - ); + assertQ( + "", + req( + CommonParams.Q, + "QQQQ EE", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='0']"); // Subset matching. - assertQ("", req(CommonParams.Q, "BB DD CC VV", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]"), + assertQ( + "", + req( + CommonParams.Q, + "BB DD CC VV", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='10']", "//result/doc[2]/str[@name='id'][.='12']", @@ -859,12 +967,18 @@ public void testQuerySubsetMatching() throws Exception { "//result/doc[1]/bool[@name='[elevated]'][.='true']", "//result/doc[2]/bool[@name='[elevated]'][.='true']", "//result/doc[3]/bool[@name='[elevated]'][.='true']", - "//result/doc[4]/bool[@name='[elevated]'][.='false']" - ); + "//result/doc[4]/bool[@name='[elevated]'][.='false']"); // Subset + exact matching. - assertQ("", req(CommonParams.Q, "BB CC", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]"), + assertQ( + "", + req( + CommonParams.Q, + "BB CC", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='13']", "//result/doc[2]/str[@name='id'][.='10']", @@ -873,12 +987,18 @@ public void testQuerySubsetMatching() throws Exception { "//result/doc[1]/bool[@name='[elevated]'][.='true']", "//result/doc[2]/bool[@name='[elevated]'][.='true']", "//result/doc[3]/bool[@name='[elevated]'][.='true']", - "//result/doc[4]/bool[@name='[elevated]'][.='true']" - ); + "//result/doc[4]/bool[@name='[elevated]'][.='true']"); // Subset matching. - assertQ("", req(CommonParams.Q, "AA BB DD CC AA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]"), + assertQ( + "", + req( + CommonParams.Q, + "AA BB DD CC AA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.='10']", "//result/doc[2]/str[@name='id'][.='12']", @@ -887,26 +1007,37 @@ public void testQuerySubsetMatching() throws Exception { "//result/doc[1]/bool[@name='[elevated]'][.='true']", "//result/doc[2]/bool[@name='[elevated]'][.='true']", "//result/doc[3]/bool[@name='[elevated]'][.='true']", - "//result/doc[4]/bool[@name='[elevated]'][.='true']" - ); + "//result/doc[4]/bool[@name='[elevated]'][.='true']"); // Subset matching. - assertQ("", req(CommonParams.Q, "AA RR BB DD AA", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]"), + assertQ( + "", + req( + CommonParams.Q, + "AA RR BB DD AA", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.='12']", "//result/doc[2]/str[@name='id'][.='14']", "//result/doc[3]/str[@name='id'][.='10']", "//result/doc[1]/bool[@name='[elevated]'][.='true']", "//result/doc[2]/bool[@name='[elevated]'][.='true']", - "//result/doc[3]/bool[@name='[elevated]'][.='false']" - ); + "//result/doc[3]/bool[@name='[elevated]'][.='false']"); // Subset matching. - assertQ("", req(CommonParams.Q, "AA BB EE", CommonParams.QT, "/elevate", - CommonParams.FL, "id, score, [elevated]") - , "//*[@numFound='0']" - ); + assertQ( + "", + req( + CommonParams.Q, + "AA BB EE", + CommonParams.QT, + "/elevate", + CommonParams.FL, + "id, score, [elevated]"), + "//*[@numFound='0']"); } finally { delete(); } @@ -927,13 +1058,20 @@ public void testElevatedIds() throws Exception { SolrQueryRequest req = req(); IndexReader reader = req.getSearcher().getIndexReader(); - QueryElevationComponent.ElevationProvider elevationProvider = comp.getElevationProvider(reader, core); + QueryElevationComponent.ElevationProvider elevationProvider = + comp.getElevationProvider(reader, core); req.close(); assertEquals(toIdSet("1"), elevationProvider.getElevationForQuery("xxxx").elevatedIds); - assertEquals(toIdSet("10", "11", "12"), elevationProvider.getElevationForQuery("bb DD CC vv").elevatedIds); - assertEquals(toIdSet("10", "11", "12", "13"), elevationProvider.getElevationForQuery("BB Cc").elevatedIds); - assertEquals(toIdSet("10", "11", "12", "14"), elevationProvider.getElevationForQuery("aa bb dd cc aa").elevatedIds); + assertEquals( + toIdSet("10", "11", "12"), + elevationProvider.getElevationForQuery("bb DD CC vv").elevatedIds); + assertEquals( + toIdSet("10", "11", "12", "13"), + elevationProvider.getElevationForQuery("BB Cc").elevatedIds); + assertEquals( + toIdSet("10", "11", "12", "14"), + elevationProvider.getElevationForQuery("aa bb dd cc aa").elevatedIds); } finally { delete(); } @@ -955,34 +1093,34 @@ public void testOnlyDocsInSearchResultsWillBeElevated() throws Exception { assertU(commit()); // default behaviour - assertQ("", req( + assertQ( + "", + req( CommonParams.Q, "YYYY", CommonParams.QT, "/elevate", QueryElevationParams.ELEVATE_ONLY_DOCS_MATCHING_QUERY, "false", CommonParams.FL, "id, score, [elevated]"), - "//*[@numFound='3']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='5']", - "//result/doc[1]/bool[@name='[elevated]'][.='true']", - "//result/doc[2]/bool[@name='[elevated]'][.='true']", - "//result/doc[3]/bool[@name='[elevated]'][.='false']" - ); + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='5']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']", + "//result/doc[2]/bool[@name='[elevated]'][.='true']", + "//result/doc[3]/bool[@name='[elevated]'][.='false']"); // only docs that matches q - assertQ("", req( + assertQ( + "", + req( CommonParams.Q, "YYYY", CommonParams.QT, "/elevate", QueryElevationParams.ELEVATE_ONLY_DOCS_MATCHING_QUERY, "true", CommonParams.FL, "id, score, [elevated]"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='5']", - "//result/doc[1]/bool[@name='[elevated]'][.='true']", - "//result/doc[2]/bool[@name='[elevated]'][.='false']" - ); - - + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='2']", + "//result/doc[2]/str[@name='id'][.='5']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']", + "//result/doc[2]/bool[@name='[elevated]'][.='false']"); } finally { delete(); @@ -1001,138 +1139,134 @@ public void testOnlyRepresentativeIsVisibleWhenCollapsing() throws Exception { assertU(commit()); // default behaviour - all elevated docs are visible - assertQ("", req( + assertQ( + "", + req( CommonParams.Q, "ZZZZ", CommonParams.QT, "/elevate", CollapsingQParserPlugin.COLLECT_ELEVATED_DOCS_WHEN_COLLAPSING, "true", CommonParams.FQ, "{!collapse field=str_s1 sort='score desc'}", CommonParams.FL, "id, score, [elevated]"), - "//*[@numFound='4']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='3']", - "//result/doc[4]/str[@name='id'][.='4']", - "//result/doc[1]/bool[@name='[elevated]'][.='true']", - "//result/doc[2]/bool[@name='[elevated]'][.='true']", - "//result/doc[3]/bool[@name='[elevated]'][.='true']", - "//result/doc[4]/bool[@name='[elevated]'][.='false']" - ); + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='3']", + "//result/doc[4]/str[@name='id'][.='4']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']", + "//result/doc[2]/bool[@name='[elevated]'][.='true']", + "//result/doc[3]/bool[@name='[elevated]'][.='true']", + "//result/doc[4]/bool[@name='[elevated]'][.='false']"); // only representative elevated doc visible - assertQ("", req( + assertQ( + "", + req( CommonParams.Q, "ZZZZ", CommonParams.QT, "/elevate", CollapsingQParserPlugin.COLLECT_ELEVATED_DOCS_WHEN_COLLAPSING, "false", CommonParams.FQ, "{!collapse field=str_s1 sort='score desc'}", CommonParams.FL, "id, score, [elevated]"), - "//*[@numFound='3']", - "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='3']", - "//result/doc[3]/str[@name='id'][.='4']", - "//result/doc[1]/bool[@name='[elevated]'][.='true']", - "//result/doc[2]/bool[@name='[elevated]'][.='true']", - "//result/doc[3]/bool[@name='[elevated]'][.='false']" - ); + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='2']", + "//result/doc[2]/str[@name='id'][.='3']", + "//result/doc[3]/str[@name='id'][.='4']", + "//result/doc[1]/bool[@name='[elevated]'][.='true']", + "//result/doc[2]/bool[@name='[elevated]'][.='true']", + "//result/doc[3]/bool[@name='[elevated]'][.='false']"); } finally { delete(); } } + @Test public void testCursor() throws Exception { try { init("schema12.xml"); - + assertU(adoc("id", "a", "title", "ipod trash trash", "str_s1", "group1")); assertU(adoc("id", "b", "title", "ipod ipod trash", "str_s1", "group2")); assertU(adoc("id", "c", "title", "ipod ipod ipod ", "str_s1", "group2")); - assertU(adoc("id", "x", "title", "boosted", "str_s1", "group1")); - assertU(adoc("id", "y", "title", "boosted boosted", "str_s1", "group2")); + assertU(adoc("id", "x", "title", "boosted", "str_s1", "group1")); + assertU(adoc("id", "y", "title", "boosted boosted", "str_s1", "group2")); assertU(adoc("id", "z", "title", "boosted boosted boosted", "str_s1", "group2")); assertU(commit()); - final SolrParams baseParams = params("qt", "/elevate", - "q", "title:ipod", - "sort", "score desc, id asc", - "fl", "id", - "elevateIds", "x,y,z", - "excludeIds", "b"); + final SolrParams baseParams = + params( + "qt", "/elevate", + "q", "title:ipod", + "sort", "score desc, id asc", + "fl", "id", + "elevateIds", "x,y,z", + "excludeIds", "b"); // sanity check everything returned w/these elevation options... - assertJQ(req(baseParams) - , "/response/numFound==5" - , "/response/start==0" - , "/response/docs==[{'id':'x'},{'id':'y'},{'id':'z'},{'id':'c'},{'id':'a'}]" - ); + assertJQ( + req(baseParams), + "/response/numFound==5", + "/response/start==0", + "/response/docs==[{'id':'x'},{'id':'y'},{'id':'z'},{'id':'c'},{'id':'a'}]"); // same query using CURSOR_MARK_START should produce a 'next' cursor... - assertCursorJQ(req(baseParams, CURSOR_MARK_PARAM, CURSOR_MARK_START) - , "/response/numFound==5" - , "/response/start==0" - , "/response/docs==[{'id':'x'},{'id':'y'},{'id':'z'},{'id':'c'},{'id':'a'}]" - ); + assertCursorJQ( + req(baseParams, CURSOR_MARK_PARAM, CURSOR_MARK_START), + "/response/numFound==5", + "/response/start==0", + "/response/docs==[{'id':'x'},{'id':'y'},{'id':'z'},{'id':'c'},{'id':'a'}]"); // use a cursor w/rows < 5, then fetch next cursor... String nextCursor = null; - nextCursor = assertCursorJQ(req(baseParams - , CURSOR_MARK_PARAM, CURSOR_MARK_START - , "rows", "2" - ) - , "/response/numFound==5" - , "/response/start==0" - , "/response/docs==[{'id':'x'},{'id':'y'}]" - ); - nextCursor = assertCursorJQ(req(baseParams - , CURSOR_MARK_PARAM, nextCursor - , "rows", "2" - ) - , "/response/numFound==5" - , "/response/start==0" - , "/response/docs==[{'id':'z'},{'id':'c'}]" - ); - nextCursor = assertCursorJQ(req(baseParams - , CURSOR_MARK_PARAM, nextCursor - , "rows", "2" - ) - , "/response/numFound==5" - , "/response/start==0" - , "/response/docs==[{'id':'a'}]" - ); + nextCursor = + assertCursorJQ( + req(baseParams, CURSOR_MARK_PARAM, CURSOR_MARK_START, "rows", "2"), + "/response/numFound==5", + "/response/start==0", + "/response/docs==[{'id':'x'},{'id':'y'}]"); + nextCursor = + assertCursorJQ( + req(baseParams, CURSOR_MARK_PARAM, nextCursor, "rows", "2"), + "/response/numFound==5", + "/response/start==0", + "/response/docs==[{'id':'z'},{'id':'c'}]"); + nextCursor = + assertCursorJQ( + req(baseParams, CURSOR_MARK_PARAM, nextCursor, "rows", "2"), + "/response/numFound==5", + "/response/start==0", + "/response/docs==[{'id':'a'}]"); final String lastCursor = nextCursor; - nextCursor = assertCursorJQ(req(baseParams - , CURSOR_MARK_PARAM, nextCursor - , "rows", "2" - ) - , "/response/numFound==5" - , "/response/start==0" - , "/response/docs==[]" - ); + nextCursor = + assertCursorJQ( + req(baseParams, CURSOR_MARK_PARAM, nextCursor, "rows", "2"), + "/response/numFound==5", + "/response/start==0", + "/response/docs==[]"); assertEquals(lastCursor, nextCursor); } finally { delete(); } - } + private static Set toIdSet(String... ids) { return Arrays.stream(ids).map(BytesRef::new).collect(Collectors.toSet()); } - /** - * Asserts that the query matches the specified JSON patterns and then returns the - * {@link CursorMarkParams#CURSOR_MARK_NEXT} value from the response + * Asserts that the query matches the specified JSON patterns and then returns the {@link + * CursorMarkParams#CURSOR_MARK_NEXT} value from the response * * @see #assertJQ */ private static String assertCursorJQ(SolrQueryRequest req, String... tests) throws Exception { String json = assertJQ(req, tests); Map rsp = (Map) fromJSONString(json); - assertTrue("response doesn't contain "+CURSOR_MARK_NEXT + ": " + json, - rsp.containsKey(CURSOR_MARK_NEXT)); - String next = (String)rsp.get(CURSOR_MARK_NEXT); + assertTrue( + "response doesn't contain " + CURSOR_MARK_NEXT + ": " + json, + rsp.containsKey(CURSOR_MARK_NEXT)); + String next = (String) rsp.get(CURSOR_MARK_NEXT); assertNotNull(CURSOR_MARK_NEXT + " is null", next); return next; } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java index 8558991083c..47b3555e4c8 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java +++ b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java @@ -17,13 +17,7 @@ package org.apache.solr.handler.component; -import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.common.params.SolrParams; -import org.apache.solr.core.BlobRepository; -import org.apache.solr.core.SolrCore; -import org.apache.solr.util.plugin.SolrCoreAware; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import static org.junit.Assert.assertEquals; import java.io.BufferedReader; import java.io.IOException; @@ -34,8 +28,13 @@ import java.util.HashMap; import java.util.Map; import java.util.stream.Stream; - -import static org.junit.Assert.assertEquals; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.core.BlobRepository; +import org.apache.solr.core.SolrCore; +import org.apache.solr.util.plugin.SolrCoreAware; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ResourceSharingTestComponent extends SearchComponent implements SolrCoreAware { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -72,7 +71,7 @@ TestObject getTestObj() { public void inform(SolrCore core) { log.info("Informing test component..."); this.core = core; - this.blob = core.loadDecodeAndCacheBlob(getKey(), new DumbCsvDecoder()).blob; + this.blob = core.loadDecodeAndCacheBlob(getKey(), new DumbCsvDecoder()).blob; log.info("Test component informed!"); } @@ -90,9 +89,9 @@ public String getResourceVersion() { class DumbCsvDecoder implements BlobRepository.Decoder { private final Map dict = new HashMap<>(); - + public DumbCsvDecoder() {} - + void processSimpleCsvRow(String string) { String[] row = string.split(","); // dumbest csv parser ever... :) getDict().put(row[0], row[1]); @@ -105,30 +104,31 @@ public Map getDict() { @Override public TestObject decode(InputStream inputStream) { // loading a tiny csv like: - // + // // foo,bar // baz,bam - try (Stream lines = new BufferedReader(new InputStreamReader(inputStream, Charset.forName("UTF-8"))).lines()) { - lines.forEach(this::processSimpleCsvRow); + try (Stream lines = + new BufferedReader(new InputStreamReader(inputStream, Charset.forName("UTF-8"))) + .lines()) { + lines.forEach(this::processSimpleCsvRow); } catch (Exception e) { - log.error("failed to read dictionary {}", getResourceName() ); - throw new RuntimeException("Cannot load dictionary " , e); + log.error("failed to read dictionary {}", getResourceName()); + throw new RuntimeException("Cannot load dictionary ", e); } - + assertEquals("bar", dict.get("foo")); assertEquals("bam", dict.get("baz")); if (log.isInfoEnabled()) { log.info("Loaded {} using {}", getDict().size(), this.getClass().getClassLoader()); } - - // if we get here we have seen the data from the blob and all we need is to test that two collections - // are able to see the same object.. + + // if we get here we have seen the data from the blob and all we need is to test that two + // collections are able to see the same object.. return new TestObject(); } } - public static class TestObject { public static final String NEVER_UPDATED = "never updated"; private volatile String lastCollection = NEVER_UPDATED; @@ -141,5 +141,4 @@ public void setLastCollection(String lastCollection) { this.lastCollection = lastCollection; } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResponseBuilderTest.java b/solr/core/src/test/org/apache/solr/handler/component/ResponseBuilderTest.java index 28bad7a6cb7..7adfed4f68e 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/ResponseBuilderTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/ResponseBuilderTest.java @@ -18,7 +18,6 @@ package org.apache.solr.handler.component; import java.util.ArrayList; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; @@ -34,8 +33,8 @@ public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - //This test is being added to verify responseBuilder.isDistributed() exists and is visible. - public void testIsDistrib(){ + // This test is being added to verify responseBuilder.isDistributed() exists and is visible. + public void testIsDistrib() { ResponseBuilder responseBuilder = new ResponseBuilder(req, rsp, new ArrayList<>(0)); assertFalse(responseBuilder.isDistributed()); } diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResponseLogComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/ResponseLogComponentTest.java index cf657dae273..569bed2b463 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/ResponseLogComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/ResponseLogComponentTest.java @@ -27,7 +27,7 @@ public class ResponseLogComponentTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTest() throws Exception { - initCore("solrconfig-response-log-component.xml","schema12.xml"); + initCore("solrconfig-response-log-component.xml", "schema12.xml"); assertNull(h.validateUpdate(adoc("id", "1", "subject", "aa"))); assertNull(h.validateUpdate(adoc("id", "two", "subject", "aa"))); assertNull(h.validateUpdate(adoc("id", "3", "subject", "aa"))); @@ -38,9 +38,21 @@ public static void beforeTest() throws Exception { public void testToLogIds() throws Exception { SolrQueryRequest req = null; try { - String handler="/withlog"; - req = req("indent","true", "qt","/withlog", "q","aa", "rows","2", - "fl","id,subject", "responseLog","true"); + String handler = "/withlog"; + req = + req( + "indent", + "true", + "qt", + "/withlog", + "q", + "aa", + "rows", + "2", + "fl", + "id,subject", + "responseLog", + "true"); SolrQueryResponse qr = h.queryAndResponse(handler, req); NamedList entries = qr.getToLog(); String responseLog = (String) entries.get("responseLog"); @@ -55,9 +67,21 @@ public void testToLogIds() throws Exception { public void testToLogScores() throws Exception { SolrQueryRequest req = null; try { - String handler="/withlog"; - req = req("indent","true", "qt","/withlog", "q","aa", "rows","2", - "fl","id,subject,score", "responseLog","true"); + String handler = "/withlog"; + req = + req( + "indent", + "true", + "qt", + "/withlog", + "q", + "aa", + "rows", + "2", + "fl", + "id,subject,score", + "responseLog", + "true"); SolrQueryResponse qr = h.queryAndResponse(handler, req); NamedList entries = qr.getToLog(); String responseLog = (String) entries.get("responseLog"); @@ -67,20 +91,32 @@ public void testToLogScores() throws Exception { req.close(); } } - + @Test public void testDisabling() throws Exception { SolrQueryRequest req = null; try { - String handler="/withlog"; - req = req("indent","true", "qt","/withlog", "q","aa", "rows","2", - "fl","id,subject", "responseLog","false"); + String handler = "/withlog"; + req = + req( + "indent", + "true", + "qt", + "/withlog", + "q", + "aa", + "rows", + "2", + "fl", + "id,subject", + "responseLog", + "false"); SolrQueryResponse qr = h.queryAndResponse(handler, req); NamedList entries = qr.getToLog(); String responseLog = (String) entries.get("responseLog"); assertNull(responseLog); } finally { req.close(); - } + } } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java index aa17b551c2f..a3f66eba13f 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/SearchHandlerTest.java @@ -21,7 +21,6 @@ import java.util.Collection; import java.util.Iterator; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -40,72 +39,71 @@ import org.junit.BeforeClass; import org.junit.Test; -public class SearchHandlerTest extends SolrTestCaseJ4 -{ +public class SearchHandlerTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } @Test - public void testInitialization() - { + public void testInitialization() { SolrCore core = h.getCore(); - + // Build an explicit list - //----------------------------------------------- + // ----------------------------------------------- List names0 = new ArrayList<>(); - names0.add( MoreLikeThisComponent.COMPONENT_NAME ); - + names0.add(MoreLikeThisComponent.COMPONENT_NAME); + NamedList> args = new NamedList<>(); - args.add( SearchHandler.INIT_COMPONENTS, names0 ); + args.add(SearchHandler.INIT_COMPONENTS, names0); try (SearchHandler handler = new SearchHandler()) { handler.init(args); handler.inform(core); assertEquals(1, handler.getComponents().size()); - assertEquals(core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), + assertEquals( + core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), handler.getComponents().get(0)); } catch (IOException e) { fail("IOExcepiton closing SearchHandler"); } // Build an explicit list that includes the debug comp. - //----------------------------------------------- + // ----------------------------------------------- names0 = new ArrayList<>(); - names0.add( FacetComponent.COMPONENT_NAME ); - names0.add( DebugComponent.COMPONENT_NAME ); - names0.add( MoreLikeThisComponent.COMPONENT_NAME ); + names0.add(FacetComponent.COMPONENT_NAME); + names0.add(DebugComponent.COMPONENT_NAME); + names0.add(MoreLikeThisComponent.COMPONENT_NAME); args = new NamedList<>(); - args.add( SearchHandler.INIT_COMPONENTS, names0 ); + args.add(SearchHandler.INIT_COMPONENTS, names0); try (SearchHandler handler = new SearchHandler()) { handler.init(args); handler.inform(core); assertEquals(3, handler.getComponents().size()); - assertEquals(core.getSearchComponent(FacetComponent.COMPONENT_NAME), - handler.getComponents().get(0)); - assertEquals(core.getSearchComponent(DebugComponent.COMPONENT_NAME), - handler.getComponents().get(1)); - assertEquals(core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), + assertEquals( + core.getSearchComponent(FacetComponent.COMPONENT_NAME), handler.getComponents().get(0)); + assertEquals( + core.getSearchComponent(DebugComponent.COMPONENT_NAME), handler.getComponents().get(1)); + assertEquals( + core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), handler.getComponents().get(2)); } catch (IOException e) { fail("Exception when closing SearchHandler"); } - // First/Last list - //----------------------------------------------- + // ----------------------------------------------- names0 = new ArrayList<>(); - names0.add( MoreLikeThisComponent.COMPONENT_NAME ); - + names0.add(MoreLikeThisComponent.COMPONENT_NAME); + List names1 = new ArrayList<>(); - names1.add( FacetComponent.COMPONENT_NAME ); - + names1.add(FacetComponent.COMPONENT_NAME); + args = new NamedList<>(); - args.add( SearchHandler.INIT_FIRST_COMPONENTS, names0 ); - args.add( SearchHandler.INIT_LAST_COMPONENTS, names1 ); + args.add(SearchHandler.INIT_FIRST_COMPONENTS, names0); + args.add(SearchHandler.INIT_LAST_COMPONENTS, names1); try (SearchHandler handler = new SearchHandler()) { handler.init(args); handler.inform(core); @@ -113,17 +111,20 @@ public void testInitialization() List comps = handler.getComponents(); assertEquals(2 + handler.getDefaultComponents().size(), comps.size()); assertEquals(core.getSearchComponent(MoreLikeThisComponent.COMPONENT_NAME), comps.get(0)); - assertEquals(core.getSearchComponent(FacetComponent.COMPONENT_NAME), comps.get(comps.size() - 2)); - //Debug component is always last in this case - assertEquals(core.getSearchComponent(DebugComponent.COMPONENT_NAME), comps.get(comps.size() - 1)); + assertEquals( + core.getSearchComponent(FacetComponent.COMPONENT_NAME), comps.get(comps.size() - 2)); + // Debug component is always last in this case + assertEquals( + core.getSearchComponent(DebugComponent.COMPONENT_NAME), comps.get(comps.size() - 1)); } catch (IOException e) { fail("Exception when closing SearchHandler"); } } - + @Test - public void testZkConnected() throws Exception{ - MiniSolrCloudCluster miniCluster = new MiniSolrCloudCluster(5, createTempDir(), buildJettyConfig("/solr")); + public void testZkConnected() throws Exception { + MiniSolrCloudCluster miniCluster = + new MiniSolrCloudCluster(5, createTempDir(), buildJettyConfig("/solr")); final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); @@ -138,17 +139,23 @@ public void testZkConnected() throws Exception{ // create collection String collectionName = "testSolrCloudCollection"; String configName = "solrCloudCollectionConfig"; - miniCluster.uploadConfigSet(SolrTestCaseJ4.TEST_PATH().resolve("collection1/conf"), configName); + miniCluster.uploadConfigSet( + SolrTestCaseJ4.TEST_PATH().resolve("collection1/conf"), configName); CollectionAdminRequest.createCollection(collectionName, configName, 2, 2) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(miniCluster.getSolrClient()); - + QueryRequest req = new QueryRequest(); QueryResponse rsp = req.process(cloudSolrClient, collectionName); assertTrue(rsp.getResponseHeader().getBooleanArg("zkConnected")); - Collection slices = cloudSolrClient.getZkStateReader().getClusterState().getCollection(collectionName).getSlices(); + Collection slices = + cloudSolrClient + .getZkStateReader() + .getClusterState() + .getCollection(collectionName) + .getSlices(); Slice slice = getRandomEntry(slices); Replica replica = getRandomEntry(slice.getReplicas()); JettySolrRunner jetty = miniCluster.getReplicaJetty(replica); @@ -158,15 +165,15 @@ public void testZkConnected() throws Exception{ rsp = req.process(client); assertFalse(rsp.getResponseHeader().getBooleanArg("zkConnected")); } - } - finally { + } finally { miniCluster.shutdown(); } } @Test - public void testRequireZkConnected() throws Exception{ - MiniSolrCloudCluster miniCluster = new MiniSolrCloudCluster(5, createTempDir(), buildJettyConfig("/solr")); + public void testRequireZkConnected() throws Exception { + MiniSolrCloudCluster miniCluster = + new MiniSolrCloudCluster(5, createTempDir(), buildJettyConfig("/solr")); final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); @@ -181,7 +188,8 @@ public void testRequireZkConnected() throws Exception{ // create collection String collectionName = "testRequireZkConnectedCollection"; String configName = collectionName + "Config"; - miniCluster.uploadConfigSet(SolrTestCaseJ4.TEST_PATH().resolve("collection1/conf"), configName); + miniCluster.uploadConfigSet( + SolrTestCaseJ4.TEST_PATH().resolve("collection1/conf"), configName); CollectionAdminRequest.createCollection(collectionName, configName, 2, 2) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) @@ -193,28 +201,35 @@ public void testRequireZkConnected() throws Exception{ QueryResponse rsp = req.process(cloudSolrClient, collectionName); assertTrue(rsp.getResponseHeader().getBooleanArg("zkConnected")); - Collection slices = cloudSolrClient.getZkStateReader().getClusterState().getCollection(collectionName).getSlices(); + Collection slices = + cloudSolrClient + .getZkStateReader() + .getClusterState() + .getCollection(collectionName) + .getSlices(); Slice disconnectedSlice = getRandomEntry(slices); Replica disconnectedReplica = getRandomEntry(disconnectedSlice.getReplicas()); JettySolrRunner disconnectedJetty = miniCluster.getReplicaJetty(disconnectedReplica); // Use the replica's core URL to avoid ZK communication - try (HttpSolrClient httpSolrClient = new HttpSolrClient.Builder(disconnectedReplica.getCoreUrl()).build()) { + try (HttpSolrClient httpSolrClient = + new HttpSolrClient.Builder(disconnectedReplica.getCoreUrl()).build()) { ignoreException("ZooKeeper is not connected"); disconnectedJetty.getCoreContainer().getZkController().getZkClient().close(); req.process(httpSolrClient); - fail("An exception should be thrown when ZooKeeper is not connected and shards.tolerant=requireZkConnected"); + fail( + "An exception should be thrown when ZooKeeper is not connected and shards.tolerant=requireZkConnected"); } catch (Exception e) { assertTrue(e.getMessage().contains("ZooKeeper is not connected")); } - } - finally { + } finally { miniCluster.shutdown(); } } @Test - public void testRequireZkConnectedDistrib() throws Exception{ - MiniSolrCloudCluster miniCluster = new MiniSolrCloudCluster(2, createTempDir(), buildJettyConfig("/solr")); + public void testRequireZkConnectedDistrib() throws Exception { + MiniSolrCloudCluster miniCluster = + new MiniSolrCloudCluster(2, createTempDir(), buildJettyConfig("/solr")); final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); @@ -229,7 +244,8 @@ public void testRequireZkConnectedDistrib() throws Exception{ // create collection String collectionName = "testRequireZkConnectedDistribCollection"; String configName = collectionName + "Config"; - miniCluster.uploadConfigSet(SolrTestCaseJ4.TEST_PATH().resolve("collection1/conf"), configName); + miniCluster.uploadConfigSet( + SolrTestCaseJ4.TEST_PATH().resolve("collection1/conf"), configName); CollectionAdminRequest.createCollection(collectionName, configName, 2, 1) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) @@ -241,7 +257,12 @@ public void testRequireZkConnectedDistrib() throws Exception{ QueryResponse rsp = req.process(cloudSolrClient, collectionName); assertTrue(rsp.getResponseHeader().getBooleanArg("zkConnected")); - Collection slices = cloudSolrClient.getZkStateReader().getClusterState().getCollection(collectionName).getSlices(); + Collection slices = + cloudSolrClient + .getZkStateReader() + .getClusterState() + .getCollection(collectionName) + .getSlices(); Slice disconnectedSlice = getRandomEntry(slices); Replica disconnectedReplica = getRandomEntry(disconnectedSlice.getReplicas()); @@ -251,20 +272,22 @@ public void testRequireZkConnectedDistrib() throws Exception{ connectedSlice = getRandomEntry(slices); } Replica connectedReplica = connectedSlice.getReplicas().iterator().next(); - try (HttpSolrClient httpSolrClient = new HttpSolrClient.Builder(connectedReplica.getCoreUrl()).build()) { + try (HttpSolrClient httpSolrClient = + new HttpSolrClient.Builder(connectedReplica.getCoreUrl()).build()) { ignoreException("ZooKeeper is not connected"); ignoreException("no servers hosting shard:"); JettySolrRunner disconnectedJetty = miniCluster.getReplicaJetty(disconnectedReplica); disconnectedJetty.getCoreContainer().getZkController().getZkClient().close(); req.process(httpSolrClient); - fail("An exception should be thrown when ZooKeeper is not connected and shards.tolerant=requireZkConnected"); + fail( + "An exception should be thrown when ZooKeeper is not connected and shards.tolerant=requireZkConnected"); } catch (Exception e) { - assertTrue("Unrecognized exception message: " + e, - e.getMessage().contains("no servers hosting shard:") + assertTrue( + "Unrecognized exception message: " + e, + e.getMessage().contains("no servers hosting shard:") || e.getMessage().contains("ZooKeeper is not connected")); } - } - finally { + } finally { miniCluster.shutdown(); unIgnoreException("no servers hosting shard:"); unIgnoreException("ZooKeeper is not connected"); @@ -272,14 +295,12 @@ public void testRequireZkConnectedDistrib() throws Exception{ } private static T getRandomEntry(Collection collection) { - if (null == collection || collection.isEmpty()) - return null; + if (null == collection || collection.isEmpty()) return null; Iterator iterator = collection.iterator(); T entry = iterator.next(); int index = 0, rand = random().nextInt(collection.size()); - while (index++ < rand) - entry = iterator.next(); + while (index++ < rand) entry = iterator.next(); return entry; } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/ShardRequestTest.java b/solr/core/src/test/org/apache/solr/handler/component/ShardRequestTest.java index 1dbf6e34acc..3cc27ac8c8b 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/ShardRequestTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/ShardRequestTest.java @@ -29,8 +29,9 @@ public void testDefaultPurposeIsZero() throws Exception { /** * Test that the constant stays constant. The constant's value is used in various places, ideally - * directly via ShardRequest.PURPOSE_PRIVATE but possibly also indirectly via magic '1' hard-coding. - * If the constant's value needs to change please carefully account for the code impacted by that. + * directly via ShardRequest.PURPOSE_PRIVATE but possibly also indirectly via magic '1' + * hard-coding. If the constant's value needs to change please carefully account for the code + * impacted by that. */ @Test public void testPurposePrivateIsOne() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/handler/component/ShardsAllowListTest.java b/solr/core/src/test/org/apache/solr/handler/component/ShardsAllowListTest.java index d0a0d70bd13..99ba72fc862 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/ShardsAllowListTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/ShardsAllowListTest.java @@ -42,23 +42,29 @@ public class ShardsAllowListTest extends MultiSolrCloudTestCase { /** - * The cluster with this key will include an explicit list of host allowed (all hosts in both the clusters) + * The cluster with this key will include an explicit list of host allowed (all hosts in both the + * clusters) */ private static final String EXPLICIT_CLUSTER_KEY = "explicitCluster"; /** - * The cluster with this key will not include an explicit list of host allowed, will rely on live_nodes + * The cluster with this key will not include an explicit list of host allowed, will rely on + * live_nodes */ private static final String IMPLICIT_CLUSTER_KEY = "implicitCluster"; - private static final String EXPLICIT_ALLOW_LIST_PROPERTY = "solr.tests.ShardsAllowListTest.explicitAllowList."; + + private static final String EXPLICIT_ALLOW_LIST_PROPERTY = + "solr.tests.ShardsAllowListTest.explicitAllowList."; protected static final String COLLECTION_NAME = "ShardsAllowListTestCollection"; private static int numShards; private static int numReplicas; private static int nodesPerCluster; - private static void appendClusterNodes(final StringBuilder sb, final String delimiter, - final MiniSolrCloudCluster cluster) { - cluster.getJettySolrRunners().forEach((jetty) -> sb.append(jetty.getBaseUrl().toString() + delimiter)); + private static void appendClusterNodes( + final StringBuilder sb, final String delimiter, final MiniSolrCloudCluster cluster) { + cluster + .getJettySolrRunners() + .forEach((jetty) -> sb.append(jetty.getBaseUrl().toString() + delimiter)); } @BeforeClass @@ -72,17 +78,20 @@ public static void setupClusters() throws Exception { final StringBuilder sb = new StringBuilder(); - doSetupClusters(clusterIds, + doSetupClusters( + clusterIds, new DefaultClusterCreateFunction() { @Override public MiniSolrCloudCluster apply(String clusterId) { try { - final MiniSolrCloudCluster cluster = new MiniSolrCloudCluster.Builder(nodesPerCluster(clusterId), - createTempDir()) + final MiniSolrCloudCluster cluster = + new MiniSolrCloudCluster.Builder(nodesPerCluster(clusterId), createTempDir()) .addConfig("conf", configset("cloud-dynamic")) - .withSolrXml(MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML.replace( - MiniSolrCloudCluster.TEST_URL_ALLOW_LIST, EXPLICIT_ALLOW_LIST_PROPERTY + clusterId)) + .withSolrXml( + MiniSolrCloudCluster.DEFAULT_CLOUD_SOLR_XML.replace( + MiniSolrCloudCluster.TEST_URL_ALLOW_LIST, + EXPLICIT_ALLOW_LIST_PROPERTY + clusterId)) .build(); return cluster; } catch (Exception e) { @@ -130,7 +139,8 @@ public void test() throws Exception { for (MiniSolrCloudCluster cluster : clusterId2cluster.values()) { for (JettySolrRunner runner : cluster.getJettySolrRunners()) { URI uri = runner.getBaseUrl().toURI(); - assertThat(getAllowListUrlChecker(EXPLICIT_CLUSTER_KEY).getHostAllowList(), + assertThat( + getAllowListUrlChecker(EXPLICIT_CLUSTER_KEY).getHostAllowList(), hasItem(uri.getHost() + ":" + uri.getPort())); } } @@ -138,7 +148,7 @@ public void test() throws Exception { MiniSolrCloudCluster implicitCluster = clusterId2cluster.get(IMPLICIT_CLUSTER_KEY); MiniSolrCloudCluster explicitCluster = clusterId2cluster.get(EXPLICIT_CLUSTER_KEY); - for (Map.Entry entry : clusterId2cluster.entrySet()) { + for (Map.Entry entry : clusterId2cluster.entrySet()) { List docs = new ArrayList<>(10); for (int i = 0; i < 10; i++) { docs.add(new SolrInputDocument("id", entry.getKey() + i)); @@ -148,87 +158,119 @@ public void test() throws Exception { cluster.getSolrClient().commit(COLLECTION_NAME, true, true); // test using ClusterState elements - assertThat("No shards specified, should work in both clusters", - numDocs("*:*", null, cluster), is(10)); - assertThat("Both shards specified, should work in both clusters", - numDocs("*:*", "shard1,shard2", cluster), is(10)); - assertThat("Both shards specified with collection name, should work in both clusters", - numDocs("*:*", COLLECTION_NAME + "_shard1", cluster), is(numDocs("*:*", "shard1", cluster))); + assertThat( + "No shards specified, should work in both clusters", + numDocs("*:*", null, cluster), + is(10)); + assertThat( + "Both shards specified, should work in both clusters", + numDocs("*:*", "shard1,shard2", cluster), + is(10)); + assertThat( + "Both shards specified with collection name, should work in both clusters", + numDocs("*:*", COLLECTION_NAME + "_shard1", cluster), + is(numDocs("*:*", "shard1", cluster))); // test using explicit urls from within the cluster - assertThat("Shards has the full URLs, should be allowed since they are internal. Cluster=" + entry.getKey(), - numDocs("*:*", getShardUrl("shard1", cluster) + "," + getShardUrl("shard2", cluster), cluster), is(10)); - assertThat("Full URL without scheme", - numDocs("*:*", getShardUrl("shard1", cluster).replaceAll("http://", "") + "," - + getShardUrl("shard2", cluster).replaceAll("http://", ""), cluster), + assertThat( + "Shards has the full URLs, should be allowed since they are internal. Cluster=" + + entry.getKey(), + numDocs( + "*:*", + getShardUrl("shard1", cluster) + "," + getShardUrl("shard2", cluster), + cluster), + is(10)); + assertThat( + "Full URL without scheme", + numDocs( + "*:*", + getShardUrl("shard1", cluster).replaceAll("http://", "") + + "," + + getShardUrl("shard2", cluster).replaceAll("http://", ""), + cluster), is(10)); // Mix shards with URLs - assertThat("Mix URL and cluster state object", - numDocs("*:*", "shard1," + getShardUrl("shard2", cluster), cluster), is(10)); - assertThat("Mix URL and cluster state object", - numDocs("*:*", getShardUrl("shard1", cluster) + ",shard2", cluster), is(10)); + assertThat( + "Mix URL and cluster state object", + numDocs("*:*", "shard1," + getShardUrl("shard2", cluster), cluster), + is(10)); + assertThat( + "Mix URL and cluster state object", + numDocs("*:*", getShardUrl("shard1", cluster) + ",shard2", cluster), + is(10)); } - // explicit allow-list includes all the nodes in both clusters. Requests should be allowed to go through - assertThat("A request to the explicit cluster with shards that point to the implicit one", + // explicit allow-list includes all the nodes in both clusters. Requests should be allowed to go + // through + assertThat( + "A request to the explicit cluster with shards that point to the implicit one", numDocs( "id:implicitCluster*", getShardUrl("shard1", implicitCluster) + "," + getShardUrl("shard2", implicitCluster), explicitCluster), is(10)); - assertThat("A request to the explicit cluster with shards that point to the both clusters", + assertThat( + "A request to the explicit cluster with shards that point to the both clusters", numDocs( "*:*", getShardUrl("shard1", implicitCluster) - + "," + getShardUrl("shard2", implicitCluster) - + "," + getShardUrl("shard1", explicitCluster) - + "," + getShardUrl("shard2", explicitCluster), + + "," + + getShardUrl("shard2", implicitCluster) + + "," + + getShardUrl("shard1", explicitCluster) + + "," + + getShardUrl("shard2", explicitCluster), explicitCluster), is(20)); // Implicit shouldn't allow requests to the other cluster - assertForbidden("id:explicitCluster*", + assertForbidden( + "id:explicitCluster*", getShardUrl("shard1", explicitCluster) + "," + getShardUrl("shard2", explicitCluster), implicitCluster); - assertForbidden("id:explicitCluster*", - "shard1," + getShardUrl("shard2", explicitCluster), - implicitCluster); + assertForbidden( + "id:explicitCluster*", "shard1," + getShardUrl("shard2", explicitCluster), implicitCluster); - assertForbidden("id:explicitCluster*", - getShardUrl("shard1", explicitCluster) + ",shard2", - implicitCluster); + assertForbidden( + "id:explicitCluster*", getShardUrl("shard1", explicitCluster) + ",shard2", implicitCluster); - assertForbidden("id:explicitCluster*", - getShardUrl("shard1", explicitCluster), - implicitCluster); + assertForbidden("id:explicitCluster*", getShardUrl("shard1", explicitCluster), implicitCluster); - assertThat("A typical internal request, should be handled locally", + assertThat( + "A typical internal request, should be handled locally", numDocs( "id:explicitCluster*", null, implicitCluster, - "distrib", "false", - "shard.url", getShardUrl("shard2", explicitCluster), - "shards.purpose", "64", - "isShard", "true"), + "distrib", + "false", + "shard.url", + getShardUrl("shard2", explicitCluster), + "shards.purpose", + "64", + "isShard", + "true"), is(0)); } private AllowListUrlChecker getAllowListUrlChecker(String clusterId) { - return clusterId2cluster.get(clusterId).getJettySolrRunner(0).getCoreContainer().getAllowListUrlChecker(); + return clusterId2cluster + .get(clusterId) + .getJettySolrRunner(0) + .getCoreContainer() + .getAllowListUrlChecker(); } - private void assertForbidden(String query, String shards, MiniSolrCloudCluster cluster) throws IOException { - String expectedExceptionMessage = "nor in the configured '" + AllowListUrlChecker.URL_ALLOW_LIST + "'"; + private void assertForbidden(String query, String shards, MiniSolrCloudCluster cluster) + throws IOException { + String expectedExceptionMessage = + "nor in the configured '" + AllowListUrlChecker.URL_ALLOW_LIST + "'"; ignoreException(expectedExceptionMessage); try { - numDocs( - query, - shards, - cluster); + numDocs(query, shards, cluster); fail("Expecting failure for shards parameter: '" + shards + "'"); } catch (SolrServerException e) { assertThat(e.getCause(), instanceOf(SolrException.class)); @@ -240,11 +282,23 @@ private void assertForbidden(String query, String shards, MiniSolrCloudCluster c } private String getShardUrl(String shardName, MiniSolrCloudCluster cluster) { - return cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLLECTION_NAME) - .getSlice(shardName).getReplicas().iterator().next().getCoreUrl(); + return cluster + .getSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection(COLLECTION_NAME) + .getSlice(shardName) + .getReplicas() + .iterator() + .next() + .getCoreUrl(); } - private int numDocs(String queryString, String shardsParamValue, MiniSolrCloudCluster cluster, String... otherParams) + private int numDocs( + String queryString, + String shardsParamValue, + MiniSolrCloudCluster cluster, + String... otherParams) throws SolrServerException, IOException { SolrQuery q = new SolrQuery(queryString); if (shardsParamValue != null) { @@ -258,5 +312,4 @@ private int numDocs(String queryString, String shardsParamValue, MiniSolrCloudCl } return (int) cluster.getSolrClient().query(COLLECTION_NAME, q).getResults().getNumFound(); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java index 07b14f2ec21..24de8bbb109 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/SpellCheckComponentTest.java @@ -18,7 +18,6 @@ import java.io.File; import java.util.*; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.SuppressTempFileChecks; import org.apache.solr.SolrTestCaseJ4; @@ -41,25 +40,24 @@ * @since solr 1.3 */ @Slow -@SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") +@SuppressTempFileChecks( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") public class SpellCheckComponentTest extends SolrTestCaseJ4 { static String rh = "/spellCheckCompRH"; - @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-spellcheckcomponent.xml","schema.xml"); + initCore("solrconfig-spellcheckcomponent.xml", "schema.xml"); } - + @Override public void setUp() throws Exception { super.setUp(); assertU(adoc("id", "0", "lowerfilt", "This is a title")); - assertU((adoc("id", "1", "lowerfilt", - "The quick reb fox jumped over the lazy brown dogs."))); + assertU((adoc("id", "1", "lowerfilt", "The quick reb fox jumped over the lazy brown dogs."))); assertU((adoc("id", "2", "lowerfilt", "This is a document"))); assertU((adoc("id", "3", "lowerfilt", "another document"))); - //bunch of docs that are variants on blue + // bunch of docs that are variants on blue assertU((adoc("id", "4", "lowerfilt", "this blue"))); assertU((adoc("id", "5", "lowerfilt", "this blud"))); assertU((adoc("id", "6", "lowerfilt", "this boue"))); @@ -68,114 +66,286 @@ public void setUp() throws Exception { assertU((adoc("id", "9", "lowerfilt", "pixmaa 12345"))); assertU((commit())); } - + @Override public void tearDown() throws Exception { super.tearDown(); assertU(delQ("*:*")); optimize(); assertU((commit())); - } - + @Test public void testMaximumResultsForSuggest() throws Exception { - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","lowerfilt:(this OR brwn)", - SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"false", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, "7") - ,"/spellcheck/suggestions/[0]=='brwn'" - ,"/spellcheck/suggestions/[1]/numFound==1" - ); - - expectThrows(Exception.class, () -> { - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","lowerfilt:(this OR brwn)", - SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"false", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, "6") - ,"/spellcheck/suggestions/[1]/numFound==1" - ); - }); - - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","lowerfilt:(this OR brwn)", - "fq", "id:[0 TO 9]", /*returns 10, less selective */ "fq", "lowerfilt:th*", /* returns 8, most selective */ - SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"false", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, ".90") - ,"/spellcheck/suggestions/[0]=='brwn'" - ,"/spellcheck/suggestions/[1]/numFound==1" - ); - - expectThrows(Exception.class, () -> { - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","lowerfilt:(this OR brwn)", - "fq", "id:[0 TO 9]", /*returns 10, less selective */ "fq", "lowerfilt:th*", /* returns 8, most selective */ - SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"false", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, ".80") - ,"/spellcheck/suggestions/[1]/numFound==1" - ); - }); - - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","lowerfilt:(this OR brwn)", - "fq", "id:[0 TO 9]", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST_FQ, "id:[0 TO 9]", - SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"false", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, ".70") - ,"/spellcheck/suggestions/[0]=='brwn'" - ,"/spellcheck/suggestions/[1]/numFound==1" - ); - - expectThrows(Exception.class, () -> { - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","lowerfilt:(this OR brwn)", - "fq", "id:[0 TO 9]", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST_FQ, "lowerfilt:th*", - SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"false", SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, ".64") - ,"/spellcheck/suggestions/[1]/numFound==1" - ); - }); - } - + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "lowerfilt:(this OR brwn)", + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "false", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + "7"), + "/spellcheck/suggestions/[0]=='brwn'", + "/spellcheck/suggestions/[1]/numFound==1"); + + expectThrows( + Exception.class, + () -> { + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "lowerfilt:(this OR brwn)", + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "false", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + "6"), + "/spellcheck/suggestions/[1]/numFound==1"); + }); + + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "lowerfilt:(this OR brwn)", + "fq", + "id:[0 TO 9]", /*returns 10, less selective */ + "fq", + "lowerfilt:th*", /* returns 8, most selective */ + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "false", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + ".90"), + "/spellcheck/suggestions/[0]=='brwn'", + "/spellcheck/suggestions/[1]/numFound==1"); + + expectThrows( + Exception.class, + () -> { + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "lowerfilt:(this OR brwn)", + "fq", + "id:[0 TO 9]", /*returns 10, less selective */ + "fq", + "lowerfilt:th*", /* returns 8, most selective */ + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "false", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + ".80"), + "/spellcheck/suggestions/[1]/numFound==1"); + }); + + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "lowerfilt:(this OR brwn)", + "fq", + "id:[0 TO 9]", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST_FQ, + "id:[0 TO 9]", + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "false", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + ".70"), + "/spellcheck/suggestions/[0]=='brwn'", + "/spellcheck/suggestions/[1]/numFound==1"); + + expectThrows( + Exception.class, + () -> { + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "lowerfilt:(this OR brwn)", + "fq", + "id:[0 TO 9]", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST_FQ, + "lowerfilt:th*", + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "false", + SpellingParams.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + ".64"), + "/spellcheck/suggestions/[1]/numFound==1"); + }); + } + @Test public void testExtendedResultsCount() throws Exception { - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","bluo", SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"false") - ,"/spellcheck/suggestions/[0]=='bluo'" - ,"/spellcheck/suggestions/[1]/numFound==5" - ); - - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","bluo", SpellingParams.SPELLCHECK_COUNT,"3", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"true") - ,"/spellcheck/suggestions/[1]/suggestion==[{'word':'blud','freq':1}, {'word':'blue','freq':1}, {'word':'blee','freq':1}]" - ); + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "bluo", + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "false"), + "/spellcheck/suggestions/[0]=='bluo'", + "/spellcheck/suggestions/[1]/numFound==5"); + + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "q", + "bluo", + SpellingParams.SPELLCHECK_COUNT, + "3", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "true"), + "/spellcheck/suggestions/[1]/suggestion==[{'word':'blud','freq':1}, {'word':'blue','freq':1}, {'word':'blee','freq':1}]"); } @Test public void test() throws Exception { - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","documemt") - ,"/spellcheck=={'suggestions':['documemt',{'numFound':1,'startOffset':0,'endOffset':8,'suggestion':['document']}]}" - ); + assertJQ( + req("qt", rh, SpellCheckComponent.COMPONENT_NAME, "true", "q", "documemt"), + "/spellcheck=={'suggestions':['documemt',{'numFound':1,'startOffset':0,'endOffset':8,'suggestion':['document']}]}"); } - + @Test public void testNumericQuery() throws Exception { - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","12346") - ,"/spellcheck=={'suggestions':['12346',{'numFound':1,'startOffset':0,'endOffset':5,'suggestion':['12345']}]}" - ); + assertJQ( + req("qt", rh, SpellCheckComponent.COMPONENT_NAME, "true", "q", "12346"), + "/spellcheck=={'suggestions':['12346',{'numFound':1,'startOffset':0,'endOffset':5,'suggestion':['12345']}]}"); } - @Test public void testPerDictionary() throws Exception { - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","documemt" - , SpellingParams.SPELLCHECK_DICT, "perDict", SpellingParams.SPELLCHECK_PREFIX + "perDict.foo", "bar", SpellingParams.SPELLCHECK_PREFIX + "perDict.bar", "foo") - ,"/spellcheck/suggestions/bar=={'numFound':1, 'startOffset':0, 'endOffset':1, 'suggestion':['foo']}" - ,"/spellcheck/suggestions/foo=={'numFound':1, 'startOffset':2, 'endOffset':3, 'suggestion':['bar']}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "documemt", + SpellingParams.SPELLCHECK_DICT, + "perDict", + SpellingParams.SPELLCHECK_PREFIX + "perDict.foo", + "bar", + SpellingParams.SPELLCHECK_PREFIX + "perDict.bar", + "foo"), + "/spellcheck/suggestions/bar=={'numFound':1, 'startOffset':0, 'endOffset':1, 'suggestion':['foo']}", + "/spellcheck/suggestions/foo=={'numFound':1, 'startOffset':2, 'endOffset':3, 'suggestion':['bar']}"); } @Test public void testCollate() throws Exception { - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_BUILD, "true", "q","documemt", SpellingParams.SPELLCHECK_COLLATE, "true") - ,"/spellcheck/collations/collation=='document'" - ); - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","documemt lowerfilt:broen^4", SpellingParams.SPELLCHECK_COLLATE, "true") - ,"/spellcheck/collations/collation=='document lowerfilt:brown^4'" - ); - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","documemtsss broens", SpellingParams.SPELLCHECK_COLLATE, "true") - ,"/spellcheck/collations/collation=='document brown'" - ); - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","pixma", SpellingParams.SPELLCHECK_COLLATE, "true") - ,"/spellcheck/collations/collation=='pixmaa'" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_BUILD, + "true", + "q", + "documemt", + SpellingParams.SPELLCHECK_COLLATE, + "true"), + "/spellcheck/collations/collation=='document'"); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "q", + "documemt lowerfilt:broen^4", + SpellingParams.SPELLCHECK_COLLATE, + "true"), + "/spellcheck/collations/collation=='document lowerfilt:brown^4'"); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "q", + "documemtsss broens", + SpellingParams.SPELLCHECK_COLLATE, + "true"), + "/spellcheck/collations/collation=='document brown'"); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "q", + "pixma", + SpellingParams.SPELLCHECK_COLLATE, + "true"), + "/spellcheck/collations/collation=='pixmaa'"); } - @Test public void testCollateExtendedResultsWithJsonNl() throws Exception { @@ -188,57 +358,99 @@ public void testCollateExtendedResultsWithJsonNl() throws Exception { testsList.add("/spellcheck/collations/collation/hits==0"); switch (jsonNl) { case "map": - testsList.add("/spellcheck/collations/collation/misspellingsAndCorrections/documemtsss=='document'"); - testsList.add("/spellcheck/collations/collation/misspellingsAndCorrections/broens=='brown'"); + testsList.add( + "/spellcheck/collations/collation/misspellingsAndCorrections/documemtsss=='document'"); + testsList.add( + "/spellcheck/collations/collation/misspellingsAndCorrections/broens=='brown'"); break; default: - fail("unexpected json.nl choice: "+jsonNl); + fail("unexpected json.nl choice: " + jsonNl); break; } } else { testsList.add("/spellcheck/collations/collation=='document brown'"); } final String[] testsArray = new String[testsList.size()]; - implTestCollateExtendedResultsWithJsonNl(q, jsonNl, collateExtendedResults, testsList.toArray(testsArray)); + implTestCollateExtendedResultsWithJsonNl( + q, jsonNl, collateExtendedResults, testsList.toArray(testsArray)); } - private void implTestCollateExtendedResultsWithJsonNl(String q, String jsonNl, boolean collateExtendedResults, String ... tests) throws Exception { - final SolrQueryRequest solrQueryRequest = req( - CommonParams.QT, rh, - CommonParams.Q, q, - "json.nl", jsonNl, - SpellCheckComponent.COMPONENT_NAME, "true", - SpellingParams.SPELLCHECK_COLLATE_EXTENDED_RESULTS, Boolean.toString(collateExtendedResults), - SpellingParams.SPELLCHECK_COLLATE, "true"); + private void implTestCollateExtendedResultsWithJsonNl( + String q, String jsonNl, boolean collateExtendedResults, String... tests) throws Exception { + final SolrQueryRequest solrQueryRequest = + req( + CommonParams.QT, + rh, + CommonParams.Q, + q, + "json.nl", + jsonNl, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + Boolean.toString(collateExtendedResults), + SpellingParams.SPELLCHECK_COLLATE, + "true"); assertJQ(solrQueryRequest, tests); } @Test public void testCorrectSpelling() throws Exception { // Make sure correct spellings are signaled in the response - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", - "q","lowerfilt:lazy lowerfilt:brown", SpellingParams.SPELLCHECK_EXTENDED_RESULTS, "true") - ,"/spellcheck/correctlySpelled==true" - ); - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "spellcheck.dictionary", "direct_lowerfilt", - "q","lowerfilt:lazy lowerfilt:brown", SpellingParams.SPELLCHECK_EXTENDED_RESULTS, "true") - ,"/spellcheck/correctlySpelled==true" - ); - assertJQ(req("json.nl","map", "qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "spellcheck.dictionary", "direct_lowerfilt", - "q","lakkle", SpellingParams.SPELLCHECK_EXTENDED_RESULTS, "true") - ,"/spellcheck/correctlySpelled==false" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "q", + "lowerfilt:lazy lowerfilt:brown", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "true"), + "/spellcheck/correctlySpelled==true"); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "spellcheck.dictionary", + "direct_lowerfilt", + "q", + "lowerfilt:lazy lowerfilt:brown", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "true"), + "/spellcheck/correctlySpelled==true"); + assertJQ( + req( + "json.nl", + "map", + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "spellcheck.dictionary", + "direct_lowerfilt", + "q", + "lakkle", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "true"), + "/spellcheck/correctlySpelled==false"); } - + @Test public void testRelativeIndexDirLocation() throws Exception { SolrCore core = h.getCore(); File indexDir = new File(core.getDataDir() + File.separator + "spellchecker1"); assertTrue(indexDir.exists()); - + indexDir = new File(core.getDataDir() + File.separator + "spellchecker2"); assertTrue(indexDir.exists()); - + indexDir = new File(core.getDataDir() + File.separator + "spellchecker3"); assertTrue(indexDir.exists()); } @@ -247,9 +459,20 @@ public void testRelativeIndexDirLocation() throws Exception { public void testReloadOnStart() throws Exception { assertU(adoc("id", "0", "lowerfilt", "This is a title")); assertU(commit()); - SolrQueryRequest request = req("qt", "/spellCheckCompRH", "q", "*:*", - "spellcheck.q", "ttle", "spellcheck", "true", "spellcheck.dictionary", - "default", "spellcheck.build", "true"); + SolrQueryRequest request = + req( + "qt", + "/spellCheckCompRH", + "q", + "*:*", + "spellcheck.q", + "ttle", + "spellcheck", + "true", + "spellcheck.dictionary", + "default", + "spellcheck.build", + "true"); assertQ(request, "//arr[@name='suggestion'][.='title']"); NamedList args = new NamedList<>(); @@ -264,9 +487,20 @@ public void testReloadOnStart() throws Exception { checker.init(args); checker.inform(h.getCore()); - request = req("qt", "/spellCheckCompRH", "q", "*:*", "spellcheck.q", "ttle", - "spellcheck", "true", "spellcheck.dictionary", "default", - "spellcheck.reload", "true"); + request = + req( + "qt", + "/spellCheckCompRH", + "q", + "*:*", + "spellcheck.q", + "ttle", + "spellcheck", + "true", + "spellcheck.dictionary", + "default", + "spellcheck.reload", + "true"); List components = new ArrayList<>(); for (String name : h.getCore().getSearchComponents().keySet()) { components.add(h.getCore().getSearchComponent(name)); @@ -283,69 +517,94 @@ public void testReloadOnStart() throws Exception { rb.req.close(); checker.close(); } - - @Test + + @Test public void testRebuildOnCommit() throws Exception { - SolrQueryRequest req = req("q", "lowerfilt:lucenejavt", "qt", "/spellCheckCompRH", "spellcheck", "true"); + SolrQueryRequest req = + req("q", "lowerfilt:lucenejavt", "qt", "/spellCheckCompRH", "spellcheck", "true"); String response = h.query(req); assertFalse("No suggestions should be returned", response.contains("lucenejava")); - + assertU(adoc("id", "11231", "lowerfilt", "lucenejava")); assertU("commit", commit()); - + assertQ(req, "//arr[@name='suggestion'][.='lucenejava']"); } - - @Test - public void testThresholdTokenFrequency() throws Exception { - - //"document" is in 2 documents but "another" is only in 1. - //So with a threshold of 29%, "another" is absent from the dictionary - //while "document" is present. - - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","documenq", SpellingParams.SPELLCHECK_DICT, "threshold", SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"true") - ,"/spellcheck/suggestions/[1]/suggestion==[{'word':'document','freq':2}]" - ); - - assertJQ(req("qt",rh, SpellCheckComponent.COMPONENT_NAME, "true", "q","documenq", SpellingParams.SPELLCHECK_DICT, "threshold_direct", SpellingParams.SPELLCHECK_COUNT,"5", SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"true") - ,"/spellcheck/suggestions/[1]/suggestion==[{'word':'document','freq':2}]" - ); - - //TODO: how do we make this into a 1-liner using "assertQ()" ??? - SolrCore core = h.getCore(); - SearchComponent speller = core.getSearchComponent("spellcheck"); - assertTrue("speller is null and it shouldn't be", speller != null); - - ModifiableSolrParams params = new ModifiableSolrParams(); - params.add(SpellCheckComponent.COMPONENT_NAME, "true"); - params.add(SpellingParams.SPELLCHECK_COUNT, "10"); - params.add(SpellingParams.SPELLCHECK_DICT, "threshold"); - params.add(SpellingParams.SPELLCHECK_EXTENDED_RESULTS,"true"); - params.add(CommonParams.Q, "anotheq"); - - SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); - SolrQueryResponse rsp = new SolrQueryResponse(); - rsp.addResponseHeader(new SimpleOrderedMap<>()); - SolrQueryRequest req = new LocalSolrQueryRequest(core, params); - handler.handleRequest(req, rsp); - req.close(); - NamedList values = rsp.getValues(); - NamedList spellCheck = (NamedList) values.get("spellcheck"); - NamedList suggestions = (NamedList) spellCheck.get("suggestions"); - assertTrue(suggestions.get("suggestion")==null); - assertTrue((Boolean) spellCheck.get("correctlySpelled")==false); - - params.remove(SpellingParams.SPELLCHECK_DICT); - params.add(SpellingParams.SPELLCHECK_DICT, "threshold_direct"); - rsp = new SolrQueryResponse(); - rsp.addResponseHeader(new SimpleOrderedMap<>()); - req = new LocalSolrQueryRequest(core, params); - handler.handleRequest(req, rsp); - req.close(); - values = rsp.getValues(); - spellCheck = (NamedList) values.get("spellcheck"); - suggestions = (NamedList) spellCheck.get("suggestions"); - assertTrue(suggestions.get("suggestion")==null); - assertTrue((Boolean) spellCheck.get("correctlySpelled")==false); - } + + @Test + public void testThresholdTokenFrequency() throws Exception { + + // "document" is in 2 documents but "another" is only in 1. + // So with a threshold of 29%, "another" is absent from the dictionary + // while "document" is present. + + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "q", + "documenq", + SpellingParams.SPELLCHECK_DICT, + "threshold", + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "true"), + "/spellcheck/suggestions/[1]/suggestion==[{'word':'document','freq':2}]"); + + assertJQ( + req( + "qt", + rh, + SpellCheckComponent.COMPONENT_NAME, + "true", + "q", + "documenq", + SpellingParams.SPELLCHECK_DICT, + "threshold_direct", + SpellingParams.SPELLCHECK_COUNT, + "5", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "true"), + "/spellcheck/suggestions/[1]/suggestion==[{'word':'document','freq':2}]"); + + // TODO: how do we make this into a 1-liner using "assertQ()" ??? + SolrCore core = h.getCore(); + SearchComponent speller = core.getSearchComponent("spellcheck"); + assertTrue("speller is null and it shouldn't be", speller != null); + + ModifiableSolrParams params = new ModifiableSolrParams(); + params.add(SpellCheckComponent.COMPONENT_NAME, "true"); + params.add(SpellingParams.SPELLCHECK_COUNT, "10"); + params.add(SpellingParams.SPELLCHECK_DICT, "threshold"); + params.add(SpellingParams.SPELLCHECK_EXTENDED_RESULTS, "true"); + params.add(CommonParams.Q, "anotheq"); + + SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); + SolrQueryResponse rsp = new SolrQueryResponse(); + rsp.addResponseHeader(new SimpleOrderedMap<>()); + SolrQueryRequest req = new LocalSolrQueryRequest(core, params); + handler.handleRequest(req, rsp); + req.close(); + NamedList values = rsp.getValues(); + NamedList spellCheck = (NamedList) values.get("spellcheck"); + NamedList suggestions = (NamedList) spellCheck.get("suggestions"); + assertTrue(suggestions.get("suggestion") == null); + assertTrue((Boolean) spellCheck.get("correctlySpelled") == false); + + params.remove(SpellingParams.SPELLCHECK_DICT); + params.add(SpellingParams.SPELLCHECK_DICT, "threshold_direct"); + rsp = new SolrQueryResponse(); + rsp.addResponseHeader(new SimpleOrderedMap<>()); + req = new LocalSolrQueryRequest(core, params); + handler.handleRequest(req, rsp); + req.close(); + values = rsp.getValues(); + spellCheck = (NamedList) values.get("spellcheck"); + suggestions = (NamedList) spellCheck.get("suggestions"); + assertTrue(suggestions.get("suggestion") == null); + assertTrue((Boolean) spellCheck.get("correctlySpelled") == false); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java index 41eea09ec5e..0cd47fdfe6e 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/StatsComponentTest.java @@ -15,6 +15,9 @@ * limitations under the License. */ package org.apache.solr.handler.component; + +import com.google.common.hash.HashFunction; +import com.tdunning.math.stats.AVLTreeDigest; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.text.DateFormat; @@ -32,9 +35,6 @@ import java.util.Locale; import java.util.Map; import java.util.TimeZone; - -import com.google.common.hash.HashFunction; -import com.tdunning.math.stats.AVLTreeDigest; import org.apache.commons.math3.util.Combinations; import org.apache.lucene.index.Term; import org.apache.lucene.queries.function.valuesource.QueryValueSource; @@ -58,17 +58,16 @@ import org.apache.solr.util.hll.HLL; import org.junit.BeforeClass; -/** - * Statistics Component Test (which also checks some equivalent json.facet functionality) - */ +/** Statistics Component Test (which also checks some equivalent json.facet functionality) */ public class StatsComponentTest extends SolrTestCaseJ4 { - final static String XPRE = "/response/lst[@name='stats']/"; + static final String XPRE = "/response/lst[@name='stats']/"; @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); initCore("solrconfig.xml", "schema11.xml"); } @@ -80,42 +79,65 @@ public void setUp() throws Exception { } public void testStats() throws Exception { - for (String f : new String[] { - "stats_i","stats_l","stats_f","stats_d", - "stats_ti","stats_tl","stats_tf","stats_td", - "stats_ti_dv","stats_tl_dv","stats_tf_dv","stats_td_dv", - "stats_ti_ni_dv","stats_tl_ni_dv","stats_tf_ni_dv","stats_td_ni_dv", - "stats_i_ni_p","stats_l_ni_p","stats_f_ni_p","stats_d_ni_p", - }) { + for (String f : + new String[] { + "stats_i", "stats_l", "stats_f", "stats_d", + "stats_ti", "stats_tl", "stats_tf", "stats_td", + "stats_ti_dv", "stats_tl_dv", "stats_tf_dv", "stats_td_dv", + "stats_ti_ni_dv", "stats_tl_ni_dv", "stats_tf_ni_dv", "stats_td_ni_dv", + "stats_i_ni_p", "stats_l_ni_p", "stats_f_ni_p", "stats_d_ni_p", + }) { // all of our checks should work with all of these params // ie: with or w/o these excluded filters, results should be the same. - SolrParams[] baseParamsSet = new SolrParams[] { - // NOTE: doTestFieldStatisticsResult needs the full list of possible tags to exclude - params("stats.field", f, "stats", "true"), - params("stats.field", "{!ex=fq1,fq2}"+f, "stats", "true", - "fq", "{!tag=fq1}-id_i:[0 TO 2]", - "fq", "{!tag=fq2}-id_i:[2 TO 1000]"), - params("stats.field", "{!ex=fq1}"+f, "stats", "true", - "fq", "{!tag=fq1}id:1") - }; + SolrParams[] baseParamsSet = + new SolrParams[] { + // NOTE: doTestFieldStatisticsResult needs the full list of possible tags to exclude + params("stats.field", f, "stats", "true"), + params( + "stats.field", + "{!ex=fq1,fq2}" + f, + "stats", + "true", + "fq", + "{!tag=fq1}-id_i:[0 TO 2]", + "fq", + "{!tag=fq2}-id_i:[2 TO 1000]"), + params("stats.field", "{!ex=fq1}" + f, "stats", "true", "fq", "{!tag=fq1}id:1") + }; doTestFieldStatisticsResult(f, baseParamsSet); doTestFieldStatisticsMissingResult(f, baseParamsSet); doTestFacetStatisticsResult(f, baseParamsSet); doTestFacetStatisticsMissingResult(f, baseParamsSet); - + clearIndex(); assertU(commit()); } - for (String f : new String[] {"stats_ii", - "stats_tis","stats_tfs","stats_tls","stats_tds", // trie fields - "stats_tis_dv","stats_tfs_dv","stats_tls_dv","stats_tds_dv", // Doc Values - "stats_tis_ni_dv","stats_tfs_ni_dv","stats_tls_ni_dv","stats_tds_ni_dv", // Doc Values Not indexed - "stats_is_p", "stats_fs_p", "stats_ls_p", "stats_ds_p", // Point Fields - "stats_is_ni_p","stats_fs_ni_p","stats_ls_ni_p" // Point Doc Values Not indexed - }) { + for (String f : + new String[] { + "stats_ii", + "stats_tis", + "stats_tfs", + "stats_tls", + "stats_tds", // trie fields + "stats_tis_dv", + "stats_tfs_dv", + "stats_tls_dv", + "stats_tds_dv", // Doc Values + "stats_tis_ni_dv", + "stats_tfs_ni_dv", + "stats_tls_ni_dv", + "stats_tds_ni_dv", // Doc Values Not indexed + "stats_is_p", + "stats_fs_p", + "stats_ls_p", + "stats_ds_p", // Point Fields + "stats_is_ni_p", + "stats_fs_ni_p", + "stats_ls_ni_p" // Point Doc Values Not indexed + }) { doTestMVFieldStatisticsResult(f); clearIndex(); @@ -137,145 +159,172 @@ public void doTestFieldStatisticsResult(String f, SolrParams[] baseParamsSet) th assertU(adoc("id", "4", f, "-40")); assertU(commit()); - final String fpre = XPRE + "lst[@name='stats_fields']/lst[@name='"+f+"']/"; + final String fpre = XPRE + "lst[@name='stats_fields']/lst[@name='" + f + "']/"; final String key = "key_key"; - final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='"+key+"']/"; + final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='" + key + "']/"; // status should be the same regardless of baseParams for (SolrParams baseParams : baseParamsSet) { - for (String ct : new String[] {"stats.calcdistinct", "f."+f+".stats.calcdistinct"}) { - assertQ("test statistics values using: " + ct, - req(baseParams, "q", "*:*", ct, "true") - , fpre + "double[@name='min'][.='-40.0']" - , fpre + "double[@name='max'][.='-10.0']" - , fpre + "double[@name='sum'][.='-100.0']" - , fpre + "long[@name='count'][.='4']" - , fpre + "long[@name='missing'][.='0']" - , fpre + "long[@name='countDistinct'][.='4']" - , "count(" + fpre + "arr[@name='distinctValues']/*)=4" - , fpre + "double[@name='sumOfSquares'][.='3000.0']" - , fpre + "double[@name='mean'][.='-25.0']" - , fpre + "double[@name='stddev'][.='12.909944487358056']" - ); - - assertQ("test statistics w/fq using: " + ct, - req(baseParams, "q", "*:*", "fq", "-id:4", ct, "true") - , fpre + "double[@name='min'][.='-30.0']" - , fpre + "double[@name='max'][.='-10.0']" - , fpre + "double[@name='sum'][.='-60.0']" - , fpre + "long[@name='count'][.='3']" - , fpre + "long[@name='missing'][.='0']" - , fpre + "long[@name='countDistinct'][.='3']" - , "count(" + fpre + "arr[@name='distinctValues']/*)=3" - , fpre + "double[@name='sumOfSquares'][.='1400.0']" - , fpre + "double[@name='mean'][.='-20.0']" - , fpre + "double[@name='stddev'][.='10.0']" - ); - + for (String ct : new String[] {"stats.calcdistinct", "f." + f + ".stats.calcdistinct"}) { + assertQ( + "test statistics values using: " + ct, + req(baseParams, "q", "*:*", ct, "true"), + fpre + "double[@name='min'][.='-40.0']", + fpre + "double[@name='max'][.='-10.0']", + fpre + "double[@name='sum'][.='-100.0']", + fpre + "long[@name='count'][.='4']", + fpre + "long[@name='missing'][.='0']", + fpre + "long[@name='countDistinct'][.='4']", + "count(" + fpre + "arr[@name='distinctValues']/*)=4", + fpre + "double[@name='sumOfSquares'][.='3000.0']", + fpre + "double[@name='mean'][.='-25.0']", + fpre + "double[@name='stddev'][.='12.909944487358056']"); + + assertQ( + "test statistics w/fq using: " + ct, + req(baseParams, "q", "*:*", "fq", "-id:4", ct, "true"), + fpre + "double[@name='min'][.='-30.0']", + fpre + "double[@name='max'][.='-10.0']", + fpre + "double[@name='sum'][.='-60.0']", + fpre + "long[@name='count'][.='3']", + fpre + "long[@name='missing'][.='0']", + fpre + "long[@name='countDistinct'][.='3']", + "count(" + fpre + "arr[@name='distinctValues']/*)=3", + fpre + "double[@name='sumOfSquares'][.='1400.0']", + fpre + "double[@name='mean'][.='-20.0']", + fpre + "double[@name='stddev'][.='10.0']"); + // now do both in a single query - assertQ("test statistics w & w/fq via key override using: " + ct, - req(baseParams, "q", "*:*", ct, "true", - "fq", "{!tag=key_ex_tag}-id:4", - "stats.field", "{!key="+key+" ex=key_ex_tag,"+all_possible_ex+"}"+f) - - // field name key, fq is applied - , fpre + "double[@name='min'][.='-30.0']" - , fpre + "double[@name='max'][.='-10.0']" - , fpre + "double[@name='sum'][.='-60.0']" - , fpre + "long[@name='count'][.='3']" - , fpre + "long[@name='missing'][.='0']" - , fpre + "long[@name='countDistinct'][.='3']" - , "count(" + fpre + "arr[@name='distinctValues']/*)=3" - , fpre + "double[@name='sumOfSquares'][.='1400.0']" - , fpre + "double[@name='mean'][.='-20.0']" - , fpre + "double[@name='stddev'][.='10.0']" - - // overridden key, fq is excluded - , kpre + "double[@name='min'][.='-40.0']" - , kpre + "double[@name='max'][.='-10.0']" - , kpre + "double[@name='sum'][.='-100.0']" - , kpre + "long[@name='count'][.='4']" - , kpre + "long[@name='missing'][.='0']" - , kpre + "long[@name='countDistinct'][.='4']" - , "count(" + kpre + "arr[@name='distinctValues']/*)=4" - , kpre + "double[@name='sumOfSquares'][.='3000.0']" - , kpre + "double[@name='mean'][.='-25.0']" - , kpre + "double[@name='stddev'][.='12.909944487358056']" - - ); + assertQ( + "test statistics w & w/fq via key override using: " + ct, + req( + baseParams, + "q", + "*:*", + ct, + "true", + "fq", + "{!tag=key_ex_tag}-id:4", + "stats.field", + "{!key=" + key + " ex=key_ex_tag," + all_possible_ex + "}" + f) + + // field name key, fq is applied + , + fpre + "double[@name='min'][.='-30.0']", + fpre + "double[@name='max'][.='-10.0']", + fpre + "double[@name='sum'][.='-60.0']", + fpre + "long[@name='count'][.='3']", + fpre + "long[@name='missing'][.='0']", + fpre + "long[@name='countDistinct'][.='3']", + "count(" + fpre + "arr[@name='distinctValues']/*)=3", + fpre + "double[@name='sumOfSquares'][.='1400.0']", + fpre + "double[@name='mean'][.='-20.0']", + fpre + "double[@name='stddev'][.='10.0']" + + // overridden key, fq is excluded + , + kpre + "double[@name='min'][.='-40.0']", + kpre + "double[@name='max'][.='-10.0']", + kpre + "double[@name='sum'][.='-100.0']", + kpre + "long[@name='count'][.='4']", + kpre + "long[@name='missing'][.='0']", + kpre + "long[@name='countDistinct'][.='4']", + "count(" + kpre + "arr[@name='distinctValues']/*)=4", + kpre + "double[@name='sumOfSquares'][.='3000.0']", + kpre + "double[@name='mean'][.='-25.0']", + kpre + "double[@name='stddev'][.='12.909944487358056']"); } } // we should be able to compute exact same stats for a field even - // when we specify it using the "field()" function, or use other + // when we specify it using the "field()" function, or use other // identify equivalent functions - for (String param : new String[] { - // bare - "{!key="+key+" ex=key_ex_tag}" + f, - "{!key="+key+" ex=key_ex_tag v="+f+"}", - // field func - "{!lucene key="+key+" ex=key_ex_tag}_val_:\"field("+f+")\"", - "{!func key="+key+" ex=key_ex_tag}field("+f+")", - "{!type=func key="+key+" ex=key_ex_tag}field("+f+")", - "{!type=func key="+key+" ex=key_ex_tag v=field("+f+")}", - "{!type=func key="+key+" ex=key_ex_tag v='field("+f+")'}", - // identity math functions - "{!type=func key="+key+" ex=key_ex_tag v='sum(0,"+f+")'}", - "{!type=func key="+key+" ex=key_ex_tag v='product(1,"+f+")'}", - }) { - - assertQ("test statistics over field specified as a function: " + param, - // NOTE: baseParams aren't used, we're looking at the function - req("q", "*:*", "stats", "true", "stats.calcdistinct", "true", - "fq", "{!tag=key_ex_tag}-id:4", - "stats.field", param) - - , kpre + "double[@name='min'][.='-40.0']" - , kpre + "double[@name='max'][.='-10.0']" - , kpre + "double[@name='sum'][.='-100.0']" - , kpre + "long[@name='count'][.='4']" - , kpre + "long[@name='missing'][.='0']" - , kpre + "long[@name='countDistinct'][.='4']" - , "count(" + kpre + "arr[@name='distinctValues']/*)=4" - , kpre + "double[@name='sumOfSquares'][.='3000.0']" - , kpre + "double[@name='mean'][.='-25.0']" - , kpre + "double[@name='stddev'][.='12.909944487358056']" - - ); + for (String param : + new String[] { + // bare + "{!key=" + key + " ex=key_ex_tag}" + f, + "{!key=" + key + " ex=key_ex_tag v=" + f + "}", + // field func + "{!lucene key=" + key + " ex=key_ex_tag}_val_:\"field(" + f + ")\"", + "{!func key=" + key + " ex=key_ex_tag}field(" + f + ")", + "{!type=func key=" + key + " ex=key_ex_tag}field(" + f + ")", + "{!type=func key=" + key + " ex=key_ex_tag v=field(" + f + ")}", + "{!type=func key=" + key + " ex=key_ex_tag v='field(" + f + ")'}", + // identity math functions + "{!type=func key=" + key + " ex=key_ex_tag v='sum(0," + f + ")'}", + "{!type=func key=" + key + " ex=key_ex_tag v='product(1," + f + ")'}", + }) { + + assertQ( + "test statistics over field specified as a function: " + param, + // NOTE: baseParams aren't used, we're looking at the function + req( + "q", + "*:*", + "stats", + "true", + "stats.calcdistinct", + "true", + "fq", + "{!tag=key_ex_tag}-id:4", + "stats.field", + param), + kpre + "double[@name='min'][.='-40.0']", + kpre + "double[@name='max'][.='-10.0']", + kpre + "double[@name='sum'][.='-100.0']", + kpre + "long[@name='count'][.='4']", + kpre + "long[@name='missing'][.='0']", + kpre + "long[@name='countDistinct'][.='4']", + "count(" + kpre + "arr[@name='distinctValues']/*)=4", + kpre + "double[@name='sumOfSquares'][.='3000.0']", + kpre + "double[@name='mean'][.='-25.0']", + kpre + "double[@name='stddev'][.='12.909944487358056']"); } - + // now get stats over a non-trivial function on our (single) field String func = "product(2, " + f + ")"; - assertQ("test function statistics & key override", - // NOTE: baseParams aren't used, we're looking at the function - req("q", "*:*", "stats", "true", "stats.calcdistinct", "true", - "fq", "{!tag=key_ex_tag}-id:4", - "stats.field", "{!func key="+key+" ex=key_ex_tag}"+func) - - , kpre + "double[@name='min'][.='-80.0']" - , kpre + "double[@name='max'][.='-20.0']" - , kpre + "double[@name='sum'][.='-200.0']" - , kpre + "long[@name='count'][.='4']" - , kpre + "long[@name='missing'][.='0']" - , kpre + "long[@name='countDistinct'][.='4']" - , "count(" + kpre + "arr[@name='distinctValues']/*)=4" - , kpre + "double[@name='sumOfSquares'][.='12000.0']" - , kpre + "double[@name='mean'][.='-50.0']" - , kpre + "double[@name='stddev'][.='25.81988897471611']" - ); + assertQ( + "test function statistics & key override", + // NOTE: baseParams aren't used, we're looking at the function + req( + "q", + "*:*", + "stats", + "true", + "stats.calcdistinct", + "true", + "fq", + "{!tag=key_ex_tag}-id:4", + "stats.field", + "{!func key=" + key + " ex=key_ex_tag}" + func), + kpre + "double[@name='min'][.='-80.0']", + kpre + "double[@name='max'][.='-20.0']", + kpre + "double[@name='sum'][.='-200.0']", + kpre + "long[@name='count'][.='4']", + kpre + "long[@name='missing'][.='0']", + kpre + "long[@name='countDistinct'][.='4']", + "count(" + kpre + "arr[@name='distinctValues']/*)=4", + kpre + "double[@name='sumOfSquares'][.='12000.0']", + kpre + "double[@name='mean'][.='-50.0']", + kpre + "double[@name='stddev'][.='25.81988897471611']"); // simple cardinality over a numeric field - assertQ("test function statistics & key override", - // NOTE: baseParams aren't used, we're looking only at the cardinality - req("q", "*:*", "stats", "true", - "fq", "{!tag=key_ex_tag}-id:4", - "stats.field", "{!key="+key+" cardinality=true}"+f) - - , kpre + "long[@name='cardinality'][.='3']" - , "count(" + kpre + "/*)=1" - ); + assertQ( + "test function statistics & key override", + // NOTE: baseParams aren't used, we're looking only at the cardinality + req( + "q", + "*:*", + "stats", + "true", + "fq", + "{!tag=key_ex_tag}-id:4", + "stats.field", + "{!key=" + key + " cardinality=true}" + f), + kpre + "long[@name='cardinality'][.='3']", + "count(" + kpre + "/*)=1"); } public void doTestMVFieldStatisticsResult(String f) throws Exception { @@ -287,106 +336,134 @@ public void doTestMVFieldStatisticsResult(String f) throws Exception { assertU(adoc("id", "5", "active_s", "false")); assertU(adoc("id", "6", "active_s", "false")); assertU(adoc("id", "7", "active_s", "true")); - + assertU(commit()); // with or w/o these excluded filters, results should be the same - for (SolrParams baseParams : new SolrParams[] { - params("stats.field", f, "stats", "true"), - params("stats.field", "{!ex=fq1}"+f, "stats", "true", - "fq", "{!tag=fq1}id:1"), - params("stats.field", "{!ex=fq1,fq2}"+f, "stats", "true", - "fq", "{!tag=fq1}-id_i:[0 TO 2]", - "fq", "{!tag=fq2}-id_i:[2 TO 1000]"), - params("json.facet", // note: no distinctValues support and not comparing min/max values - "{min:'min("+f+")',count:'countvals("+f+")',missing:'missing("+f+")',max:'max("+f+")', sum:'sum("+f+")', " + - " countDistinct:'unique("+f+")', sumOfSquares:'sumsq("+f+")', mean:'avg("+f+")', stddev:'stddev("+f+")' }") - }) { + for (SolrParams baseParams : + new SolrParams[] { + params("stats.field", f, "stats", "true"), + params("stats.field", "{!ex=fq1}" + f, "stats", "true", "fq", "{!tag=fq1}id:1"), + params( + "stats.field", + "{!ex=fq1,fq2}" + f, + "stats", + "true", + "fq", + "{!tag=fq1}-id_i:[0 TO 2]", + "fq", + "{!tag=fq2}-id_i:[2 TO 1000]"), + params( + "json.facet", // note: no distinctValues support and not comparing min/max values + "{min:'min(" + + f + + ")',count:'countvals(" + + f + + ")',missing:'missing(" + + f + + ")',max:'max(" + + f + + ")', sum:'sum(" + + f + + ")', " + + " countDistinct:'unique(" + + f + + ")', sumOfSquares:'sumsq(" + + f + + ")', mean:'avg(" + + f + + ")', stddev:'stddev(" + + f + + ")' }") + }) { // easy switch to know if/when we are using json.facet which doesn't support some options final boolean json = (null != baseParams.get("json.facet")); - assertQ("test statistics values", - req(baseParams, "q", "*:*", "stats.calcdistinct", "true") - , json ? "//*" : "//double[@name='min'][.='-100.0']" - , json ? "//*" : "//double[@name='max'][.='200.0']" - , "//double[@name='sum'][.='9.0']" - , "//long[@name='count'][.='8']" - , "//long[@name='missing'][.='3']" - , "//long[@name='countDistinct'][.='8']" - , json ? "//*" : "count(//arr[@name='distinctValues']/*)=8" - , "//double[@name='sumOfSquares'][.='53101.0']" - , "//double[@name='mean'][.='1.125']" - ,"//double[@name='stddev'][.='87.08852228787508']" - ); - - assertQ("test statistics values w/fq", - req(baseParams, "fq", "-id:1", - "q", "*:*", "stats.calcdistinct", "true") - , json ? "//*" : "//double[@name='min'][.='-40.0']" - , json ? "//*" : "//double[@name='max'][.='200.0']" - , "//double[@name='sum'][.='119.0']" - , "//long[@name='count'][.='6']" - , "//long[@name='missing'][.='3']" - , "//long[@name='countDistinct'][.='6']" - , json ? "//*" : "count(//arr[@name='distinctValues']/*)=6" - , "//double[@name='sumOfSquares'][.='43001.0']" - , "//double[@name='mean'][.='19.833333333333332']" - ,"//double[@name='stddev'][.='90.15634568163611']" - ); - - assertQ("test stdDev", - req(baseParams, "q", "id:5", "rows", "0") - ,"//double[@name='stddev'][.='0.0']" - ); - + assertQ( + "test statistics values", + req(baseParams, "q", "*:*", "stats.calcdistinct", "true"), + json ? "//*" : "//double[@name='min'][.='-100.0']", + json ? "//*" : "//double[@name='max'][.='200.0']", + "//double[@name='sum'][.='9.0']", + "//long[@name='count'][.='8']", + "//long[@name='missing'][.='3']", + "//long[@name='countDistinct'][.='8']", + json ? "//*" : "count(//arr[@name='distinctValues']/*)=8", + "//double[@name='sumOfSquares'][.='53101.0']", + "//double[@name='mean'][.='1.125']", + "//double[@name='stddev'][.='87.08852228787508']"); + + assertQ( + "test statistics values w/fq", + req(baseParams, "fq", "-id:1", "q", "*:*", "stats.calcdistinct", "true"), + json ? "//*" : "//double[@name='min'][.='-40.0']", + json ? "//*" : "//double[@name='max'][.='200.0']", + "//double[@name='sum'][.='119.0']", + "//long[@name='count'][.='6']", + "//long[@name='missing'][.='3']", + "//long[@name='countDistinct'][.='6']", + json ? "//*" : "count(//arr[@name='distinctValues']/*)=6", + "//double[@name='sumOfSquares'][.='43001.0']", + "//double[@name='mean'][.='19.833333333333332']", + "//double[@name='stddev'][.='90.15634568163611']"); + + assertQ( + "test stdDev", + req(baseParams, "q", "id:5", "rows", "0"), + "//double[@name='stddev'][.='0.0']"); + if (!json) { // checking stats.facet makes no sense for json faceting - assertQ("test stats.facet (using boolean facet field)", + assertQ( + "test stats.facet (using boolean facet field)", req(baseParams, "q", "*:*", "stats.calcdistinct", "true", "stats.facet", "active_s") // baseline - , "//lst[@name='"+f+"']/double[@name='min'][.='-100.0']" - , "//lst[@name='"+f+"']/double[@name='max'][.='200.0']" - , "//lst[@name='"+f+"']/double[@name='sum'][.='9.0']" - , "//lst[@name='"+f+"']/long[@name='count'][.='8']" - , "//lst[@name='"+f+"']/long[@name='missing'][.='3']" - , "//lst[@name='"+f+"']/long[@name='countDistinct'][.='8']" - , "count(//lst[@name='" + f + "']/arr[@name='distinctValues']/*)=8" - , "//lst[@name='"+f+"']/double[@name='sumOfSquares'][.='53101.0']" - , "//lst[@name='"+f+"']/double[@name='mean'][.='1.125']" - , "//lst[@name='"+f+"']/double[@name='stddev'][.='87.08852228787508']" + , + "//lst[@name='" + f + "']/double[@name='min'][.='-100.0']", + "//lst[@name='" + f + "']/double[@name='max'][.='200.0']", + "//lst[@name='" + f + "']/double[@name='sum'][.='9.0']", + "//lst[@name='" + f + "']/long[@name='count'][.='8']", + "//lst[@name='" + f + "']/long[@name='missing'][.='3']", + "//lst[@name='" + f + "']/long[@name='countDistinct'][.='8']", + "count(//lst[@name='" + f + "']/arr[@name='distinctValues']/*)=8", + "//lst[@name='" + f + "']/double[@name='sumOfSquares'][.='53101.0']", + "//lst[@name='" + f + "']/double[@name='mean'][.='1.125']", + "//lst[@name='" + f + "']/double[@name='stddev'][.='87.08852228787508']" // facet 'true' - , "//lst[@name='true']/double[@name='min'][.='-100.0']" - , "//lst[@name='true']/double[@name='max'][.='200.0']" - , "//lst[@name='true']/double[@name='sum'][.='70.0']" - , "//lst[@name='true']/long[@name='count'][.='4']" - , "//lst[@name='true']/long[@name='missing'][.='1']" - , "//lst[@name='true']//long[@name='countDistinct'][.='4']" - , "count(//lst[@name='true']/arr[@name='distinctValues']/*)=4" - , "//lst[@name='true']/double[@name='sumOfSquares'][.='50500.0']" - , "//lst[@name='true']/double[@name='mean'][.='17.5']" - , "//lst[@name='true']/double[@name='stddev'][.='128.16005617976296']" + , + "//lst[@name='true']/double[@name='min'][.='-100.0']", + "//lst[@name='true']/double[@name='max'][.='200.0']", + "//lst[@name='true']/double[@name='sum'][.='70.0']", + "//lst[@name='true']/long[@name='count'][.='4']", + "//lst[@name='true']/long[@name='missing'][.='1']", + "//lst[@name='true']//long[@name='countDistinct'][.='4']", + "count(//lst[@name='true']/arr[@name='distinctValues']/*)=4", + "//lst[@name='true']/double[@name='sumOfSquares'][.='50500.0']", + "//lst[@name='true']/double[@name='mean'][.='17.5']", + "//lst[@name='true']/double[@name='stddev'][.='128.16005617976296']" // facet 'false' - , "//lst[@name='false']/double[@name='min'][.='-40.0']" - , "//lst[@name='false']/double[@name='max'][.='10.0']" - , "//lst[@name='false']/double[@name='sum'][.='-61.0']" - , "//lst[@name='false']/long[@name='count'][.='4']" - , "//lst[@name='false']/long[@name='missing'][.='2']" - , "//lst[@name='true']//long[@name='countDistinct'][.='4']" - , "count(//lst[@name='true']/arr[@name='distinctValues']/*)=4" - , "//lst[@name='false']/double[@name='sumOfSquares'][.='2601.0']" - , "//lst[@name='false']/double[@name='mean'][.='-15.25']" - , "//lst[@name='false']/double[@name='stddev'][.='23.59908190304586']" - ); + , + "//lst[@name='false']/double[@name='min'][.='-40.0']", + "//lst[@name='false']/double[@name='max'][.='10.0']", + "//lst[@name='false']/double[@name='sum'][.='-61.0']", + "//lst[@name='false']/long[@name='count'][.='4']", + "//lst[@name='false']/long[@name='missing'][.='2']", + "//lst[@name='true']//long[@name='countDistinct'][.='4']", + "count(//lst[@name='true']/arr[@name='distinctValues']/*)=4", + "//lst[@name='false']/double[@name='sumOfSquares'][.='2601.0']", + "//lst[@name='false']/double[@name='mean'][.='-15.25']", + "//lst[@name='false']/double[@name='stddev'][.='23.59908190304586']"); } } // cardinality - for (SolrParams baseParams : new SolrParams[] { - params("stats.field", "{!cardinality=true}"+f, "stats", "true"), - params("json.facet", "{cardinality:'hll("+f+")'}") - }) { - assertQ("test cardinality", + for (SolrParams baseParams : + new SolrParams[] { + params("stats.field", "{!cardinality=true}" + f, "stats", "true"), + params("json.facet", "{cardinality:'hll(" + f + ")'}") + }) { + assertQ( + "test cardinality", req(baseParams, "q", "*:*", "rows", "0"), - "//long[@name='cardinality'][.='8']" - ); + "//long[@name='cardinality'][.='8']"); } } @@ -404,53 +481,81 @@ public void testFieldStatisticsResultsStringField() throws Exception { args.put(CommonParams.Q, "*:*"); args.put(StatsParams.STATS, "true"); args.put(StatsParams.STATS_FIELD, f); - args.put("f." + f +".stats.calcdistinct","true"); + args.put("f." + f + ".stats.calcdistinct", "true"); args.put("indent", "true"); - for (SolrParams baseParams : new SolrParams[] { - params("stats.field", f, "stats", "true", "f." + f +".stats.calcdistinct","true"), - params("json.facet", // note: no distinctValues support - "{min:'min("+f+")',count:'countvals("+f+")',missing:'missing("+f+")',max:'max("+f+")', " + - " countDistinct:'unique("+f+")'}") - }) { + for (SolrParams baseParams : + new SolrParams[] { + params("stats.field", f, "stats", "true", "f." + f + ".stats.calcdistinct", "true"), + params( + "json.facet", // note: no distinctValues support + "{min:'min(" + + f + + ")',count:'countvals(" + + f + + ")',missing:'missing(" + + f + + ")',max:'max(" + + f + + ")', " + + " countDistinct:'unique(" + + f + + ")'}") + }) { final boolean json = (null != baseParams.get("json.facet")); - assertQ("test string statistics values", req(baseParams, "q", "*:*", "rows", "0"), + assertQ( + "test string statistics values", + req(baseParams, "q", "*:*", "rows", "0"), "//str[@name='min'][.='string1']", "//str[@name='max'][.='string3']", "//long[@name='count'][.='3']", "//long[@name='missing'][.='1']", "//long[@name='countDistinct'][.='3']", - json ? "//*": "count(//arr[@name='distinctValues']/str)=3"); // SOLR-14011 + json ? "//*" : "count(//arr[@name='distinctValues']/str)=3"); // SOLR-14011 } // string field cardinality - for (SolrParams baseParams : new SolrParams[] { - params("stats.field", "{!cardinality=true}"+f, "stats", "true"), - params("json.facet", "{cardinality:'hll("+f+")'}") - }) { - assertQ("test string cardinality", + for (SolrParams baseParams : + new SolrParams[] { + params("stats.field", "{!cardinality=true}" + f, "stats", "true"), + params("json.facet", "{cardinality:'hll(" + f + ")'}") + }) { + assertQ( + "test string cardinality", req(baseParams, "q", "*:*", "rows", "0"), - "//long[@name='cardinality'][.='3']" - ); + "//long[@name='cardinality'][.='3']"); } - String strFunc = "strdist(\"string22\","+ f +",edit)"; + String strFunc = "strdist(\"string22\"," + f + ",edit)"; // stats over a string function - for (SolrParams baseParams : new SolrParams[] { - params("stats.field", "{!func}"+strFunc, "stats", "true"), - params("json.facet", // note: no function support for unique - "{min:'min("+strFunc+")',count:'countvals("+strFunc+")',missing:'missing("+strFunc+")'," + - "sum:'sum("+ strFunc +")', max:'max("+strFunc+")'}") - }) { + for (SolrParams baseParams : + new SolrParams[] { + params("stats.field", "{!func}" + strFunc, "stats", "true"), + params( + "json.facet", // note: no function support for unique + "{min:'min(" + + strFunc + + ")',count:'countvals(" + + strFunc + + ")',missing:'missing(" + + strFunc + + ")'," + + "sum:'sum(" + + strFunc + + ")', max:'max(" + + strFunc + + ")'}") + }) { final boolean json = (null != baseParams.get("json.facet")); - assertQ("strdist func stats", - req(baseParams, "q", "*:*", "rows", "0") - , "//double[@name='min'][.='0.75']" - , "//double[@name='max'][.='0.875']" - , "//double[@name='sum'][.='2.375']" - , json? "//*": "//long[@name='count'][.='3']" // SOLR-14010 - ,"//long[@name='missing'][.='1']" - ); + assertQ( + "strdist func stats", + req(baseParams, "q", "*:*", "rows", "0"), + "//double[@name='min'][.='0.75']", + "//double[@name='max'][.='0.875']", + "//double[@name='sum'][.='2.375']", + json ? "//*" : "//long[@name='count'][.='3']" // SOLR-14010 + , + "//long[@name='missing'][.='1']"); } } @@ -472,25 +577,27 @@ public void testFieldStatisticsResultsDateField() throws Exception { args.put(CommonParams.Q, "*:*"); args.put(StatsParams.STATS, "true"); args.put(StatsParams.STATS_FIELD, "active_dt"); - args.put("f.active_dt.stats.calcdistinct","true"); + args.put("f.active_dt.stats.calcdistinct", "true"); args.put("indent", "true"); SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("test date statistics values", req, - "//long[@name='count'][.='2']", - "//long[@name='missing'][.='1']", - "//date[@name='min'][.='1970-01-02T10:17:36Z']", - "//date[@name='max'][.='1970-01-12T10:20:54Z']", - "//long[@name='countDistinct'][.='2']", - "count(//arr[@name='distinctValues']/date)=2" + assertQ( + "test date statistics values", + req, + "//long[@name='count'][.='2']", + "//long[@name='missing'][.='1']", + "//date[@name='min'][.='1970-01-02T10:17:36Z']", + "//date[@name='max'][.='1970-01-12T10:20:54Z']", + "//long[@name='countDistinct'][.='2']", + "count(//arr[@name='distinctValues']/date)=2" // "//date[@name='sum'][.='1970-01-13T20:38:30Z']", // sometimes 29.999Z // "//date[@name='mean'][.='1970-01-07T10:19:15Z']" // sometiems 14.999Z - ); - - assertQ("cardinality", - req("q","*:*", "stats", "true", "stats.field", "{!cardinality=true}active_dt") - , "//lst[@name='active_dt']/long[@name='cardinality'][.='2']"); + ); + assertQ( + "cardinality", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}active_dt"), + "//lst[@name='active_dt']/long[@name='cardinality'][.='2']"); } // Check for overflow of sumOfSquares @@ -507,35 +614,37 @@ public void testFieldStatisticsResultsDateFieldOverflow() throws Exception { args.put("indent", "true"); SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("test date statistics values", req, - "//long[@name='count'][.='1']", - "//date[@name='min'][.='2015-12-14T09:00:00Z']", - "//date[@name='max'][.='2015-12-14T09:00:00Z']", - "//double[@name='sum'][.='1.4500836E12']", - "//date[@name='mean'][.='2015-12-14T09:00:00Z']", - "//double[@name='sumOfSquares'][.='" + Double.toString(2102742446988960000000000.0)+"']" - ); + assertQ( + "test date statistics values", + req, + "//long[@name='count'][.='1']", + "//date[@name='min'][.='2015-12-14T09:00:00Z']", + "//date[@name='max'][.='2015-12-14T09:00:00Z']", + "//double[@name='sum'][.='1.4500836E12']", + "//date[@name='mean'][.='2015-12-14T09:00:00Z']", + "//double[@name='sumOfSquares'][.='" + Double.toString(2102742446988960000000000.0) + "']"); assertU(adoc("id", "2", "active_dt", "2115-12-14T09:00:00Z")); assertU(adoc("id", "3", "active_dt", "2215-12-14T09:00:00Z")); assertU(commit()); - assertQ("test date statistics values", req, + assertQ( + "test date statistics values", + req, "//long[@name='count'][.='3']", "//date[@name='min'][.='2015-12-14T09:00:00Z']", "//date[@name='max'][.='2215-12-14T09:00:00Z']", "//double[@name='sum'][.='1.38172716E13']", "//date[@name='mean'][.='2115-12-14T09:00:00Z']", - "//double[@name='sumOfSquares'][.='" + Double.toString(83555549895529430000000000.0)+"']", + "//double[@name='sumOfSquares'][.='" + Double.toString(83555549895529430000000000.0) + "']", // The following number matches the number returned by the current solr // implementation of standard deviation. Should be 3155673600000. // That number is not precise, and the implementation should be fixed. - "//double[@name='stddev'][.='" + Double.toString(3155673599999.999)+"']" - ); + "//double[@name='stddev'][.='" + Double.toString(3155673599999.999) + "']"); } - - public void doTestFieldStatisticsMissingResult(String f, SolrParams[] baseParamsSet) throws Exception { + public void doTestFieldStatisticsMissingResult(String f, SolrParams[] baseParamsSet) + throws Exception { assertU(adoc("id", "1", f, "-10")); assertU(adoc("id", "2", f, "-20")); assertU(commit()); @@ -543,219 +652,262 @@ public void doTestFieldStatisticsMissingResult(String f, SolrParams[] baseParams assertU(adoc("id", "4", f, "-40")); assertU(commit()); - final String fpre = XPRE + "lst[@name='stats_fields']/lst[@name='"+f+"']/"; + final String fpre = XPRE + "lst[@name='stats_fields']/lst[@name='" + f + "']/"; final String key = "key_key"; - final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='"+key+"']/"; + final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='" + key + "']/"; // status should be the same regardless of baseParams for (SolrParams baseParams : baseParamsSet) { SolrQueryRequest request = req(baseParams, "q", "*:*", "stats.calcdistinct", "true"); - assertQ("test statistics values", request - , "//double[@name='min'][.='-40.0']" - , "//double[@name='max'][.='-10.0']" - , "//double[@name='sum'][.='-70.0']" - , "//long[@name='count'][.='3']" - , "//long[@name='missing'][.='1']" - , "//long[@name='countDistinct'][.='3']" - , "count(//arr[@name='distinctValues']/*)=3" - , "//double[@name='sumOfSquares'][.='2100.0']" - , "//double[@name='mean'][.='-23.333333333333332']" - , "//double[@name='stddev'][.='15.275252316519467']" - ); + assertQ( + "test statistics values", + request, + "//double[@name='min'][.='-40.0']", + "//double[@name='max'][.='-10.0']", + "//double[@name='sum'][.='-70.0']", + "//long[@name='count'][.='3']", + "//long[@name='missing'][.='1']", + "//long[@name='countDistinct'][.='3']", + "count(//arr[@name='distinctValues']/*)=3", + "//double[@name='sumOfSquares'][.='2100.0']", + "//double[@name='mean'][.='-23.333333333333332']", + "//double[@name='stddev'][.='15.275252316519467']"); } // we should be able to compute exact same stats for a field even - // when we specify it using the "field()" function, or use other + // when we specify it using the "field()" function, or use other // identify equivalent functions - for (String param : new String[] { - // bare - "{!key="+key+" ex=key_ex_tag}" + f, - "{!key="+key+" ex=key_ex_tag v="+f+"}", - // field func - "{!lucene key="+key+" ex=key_ex_tag}_val_:\"field("+f+")\"", - "{!func key="+key+" ex=key_ex_tag}field("+f+")", - "{!type=func key="+key+" ex=key_ex_tag}field("+f+")", - "{!type=func key="+key+" ex=key_ex_tag v=field("+f+")}", - "{!type=func key="+key+" ex=key_ex_tag v='field("+f+")'}", - // identity math functions - "{!type=func key="+key+" ex=key_ex_tag v='sum(0,"+f+")'}", - "{!type=func key="+key+" ex=key_ex_tag v='product(1,"+f+")'}", - }) { - - assertQ("test statistics over field specified as a function: " + param, - // NOTE: baseParams aren't used, we're looking at the function - req("q", "*:*", "stats", "true", "stats.calcdistinct", "true", - "fq", "{!tag=key_ex_tag}-id:4", - "stats.field", param) - - , kpre + "double[@name='min'][.='-40.0']" - , kpre + "double[@name='max'][.='-10.0']" - , kpre + "double[@name='sum'][.='-70.0']" - , kpre + "long[@name='count'][.='3']" - , kpre + "long[@name='missing'][.='1']" - , kpre + "long[@name='countDistinct'][.='3']" - , "count(" + kpre + "arr[@name='distinctValues']/*)=3" - , kpre + "double[@name='sumOfSquares'][.='2100.0']" - , kpre + "double[@name='mean'][.='-23.333333333333332']" - , kpre + "double[@name='stddev'][.='15.275252316519467']" - - ); + for (String param : + new String[] { + // bare + "{!key=" + key + " ex=key_ex_tag}" + f, + "{!key=" + key + " ex=key_ex_tag v=" + f + "}", + // field func + "{!lucene key=" + key + " ex=key_ex_tag}_val_:\"field(" + f + ")\"", + "{!func key=" + key + " ex=key_ex_tag}field(" + f + ")", + "{!type=func key=" + key + " ex=key_ex_tag}field(" + f + ")", + "{!type=func key=" + key + " ex=key_ex_tag v=field(" + f + ")}", + "{!type=func key=" + key + " ex=key_ex_tag v='field(" + f + ")'}", + // identity math functions + "{!type=func key=" + key + " ex=key_ex_tag v='sum(0," + f + ")'}", + "{!type=func key=" + key + " ex=key_ex_tag v='product(1," + f + ")'}", + }) { + + assertQ( + "test statistics over field specified as a function: " + param, + // NOTE: baseParams aren't used, we're looking at the function + req( + "q", + "*:*", + "stats", + "true", + "stats.calcdistinct", + "true", + "fq", + "{!tag=key_ex_tag}-id:4", + "stats.field", + param), + kpre + "double[@name='min'][.='-40.0']", + kpre + "double[@name='max'][.='-10.0']", + kpre + "double[@name='sum'][.='-70.0']", + kpre + "long[@name='count'][.='3']", + kpre + "long[@name='missing'][.='1']", + kpre + "long[@name='countDistinct'][.='3']", + "count(" + kpre + "arr[@name='distinctValues']/*)=3", + kpre + "double[@name='sumOfSquares'][.='2100.0']", + kpre + "double[@name='mean'][.='-23.333333333333332']", + kpre + "double[@name='stddev'][.='15.275252316519467']"); } - } public void doTestFacetStatisticsResult(String f, SolrParams[] baseParamsSet) throws Exception { - assertU(adoc("id", "1", f, "10", "active_s", "true", "other_s", "foo")); - assertU(adoc("id", "2", f, "20", "active_s", "true", "other_s", "bar")); + assertU(adoc("id", "1", f, "10", "active_s", "true", "other_s", "foo")); + assertU(adoc("id", "2", f, "20", "active_s", "true", "other_s", "bar")); assertU(commit()); assertU(adoc("id", "3", f, "30", "active_s", "false", "other_s", "foo")); assertU(adoc("id", "4", f, "40", "active_s", "false", "other_s", "foo")); assertU(commit()); - - final String pre = "//lst[@name='stats_fields']/lst[@name='"+f+"']/lst[@name='facets']/lst[@name='active_s']"; + + final String pre = + "//lst[@name='stats_fields']/lst[@name='" + + f + + "']/lst[@name='facets']/lst[@name='active_s']"; // status should be the same regardless of baseParams for (SolrParams baseParams : baseParamsSet) { - assertQ("test value for active_s=true", - req(baseParams, - "q", "*:*", "stats.calcdistinct", "true", - "stats.facet", "active_s", "stats.facet", "other_s") - , "*[count("+pre+")=1]" - , pre+"/lst[@name='true']/double[@name='min'][.='10.0']" - , pre+"/lst[@name='true']/double[@name='max'][.='20.0']" - , pre+"/lst[@name='true']/double[@name='sum'][.='30.0']" - , pre+"/lst[@name='true']/long[@name='count'][.='2']" - , pre+"/lst[@name='true']/long[@name='missing'][.='0']" - , pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']" - , "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2" - , pre+"/lst[@name='true']/double[@name='sumOfSquares'][.='500.0']" - , pre+"/lst[@name='true']/double[@name='mean'][.='15.0']" - , pre+"/lst[@name='true']/double[@name='stddev'][.='7.0710678118654755']" - ); - - assertQ("test value for active_s=false", - req(baseParams, "q", "*:*", "stats.calcdistinct", "true", "stats.facet", "active_s") - , pre+"/lst[@name='false']/double[@name='min'][.='30.0']" - , pre+"/lst[@name='false']/double[@name='max'][.='40.0']" - , pre+"/lst[@name='false']/double[@name='sum'][.='70.0']" - , pre+"/lst[@name='false']/long[@name='count'][.='2']" - , pre+"/lst[@name='false']/long[@name='missing'][.='0']" - , pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']" - , "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2" - , pre+"/lst[@name='false']/double[@name='sumOfSquares'][.='2500.0']" - , pre+"/lst[@name='false']/double[@name='mean'][.='35.0']" - , pre+"/lst[@name='false']/double[@name='stddev'][.='7.0710678118654755']" - ); + assertQ( + "test value for active_s=true", + req( + baseParams, + "q", + "*:*", + "stats.calcdistinct", + "true", + "stats.facet", + "active_s", + "stats.facet", + "other_s"), + "*[count(" + pre + ")=1]", + pre + "/lst[@name='true']/double[@name='min'][.='10.0']", + pre + "/lst[@name='true']/double[@name='max'][.='20.0']", + pre + "/lst[@name='true']/double[@name='sum'][.='30.0']", + pre + "/lst[@name='true']/long[@name='count'][.='2']", + pre + "/lst[@name='true']/long[@name='missing'][.='0']", + pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']", + "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2", + pre + "/lst[@name='true']/double[@name='sumOfSquares'][.='500.0']", + pre + "/lst[@name='true']/double[@name='mean'][.='15.0']", + pre + "/lst[@name='true']/double[@name='stddev'][.='7.0710678118654755']"); + + assertQ( + "test value for active_s=false", + req(baseParams, "q", "*:*", "stats.calcdistinct", "true", "stats.facet", "active_s"), + pre + "/lst[@name='false']/double[@name='min'][.='30.0']", + pre + "/lst[@name='false']/double[@name='max'][.='40.0']", + pre + "/lst[@name='false']/double[@name='sum'][.='70.0']", + pre + "/lst[@name='false']/long[@name='count'][.='2']", + pre + "/lst[@name='false']/long[@name='missing'][.='0']", + pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']", + "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2", + pre + "/lst[@name='false']/double[@name='sumOfSquares'][.='2500.0']", + pre + "/lst[@name='false']/double[@name='mean'][.='35.0']", + pre + "/lst[@name='false']/double[@name='stddev'][.='7.0710678118654755']"); } // we should be able to compute exact same stats & stats.facet for a field even - // when we specify it using the "field()" function, or use other + // when we specify it using the "field()" function, or use other // identify equivalent functions - for (String param : new String[] { - // bare - "{!key="+f+" ex=key_ex_tag}" + f, - "{!key="+f+" ex=key_ex_tag v="+f+"}", - // field func - "{!lucene key="+f+" ex=key_ex_tag}_val_:\"field("+f+")\"", - "{!func key="+f+" ex=key_ex_tag}field("+f+")", - "{!type=func key="+f+" ex=key_ex_tag}field("+f+")", - "{!type=func key="+f+" ex=key_ex_tag v=field("+f+")}", - "{!type=func key="+f+" ex=key_ex_tag v='field("+f+")'}", - // identity math functions - "{!type=func key="+f+" ex=key_ex_tag v='sum(0,"+f+")'}", - "{!type=func key="+f+" ex=key_ex_tag v='product(1,"+f+")'}", - }) { - assertQ("test statis & stats.facet over field specified as a function: " + param, - req("q", "*:*", "stats", "true", "stats.calcdistinct", "true", - "fq", "{!tag=key_ex_tag}-id:4", - "stats.field", param, - "stats.facet", "active_s", "stats.facet", "other_s") - , "*[count("+pre+")=1]" - , pre+"/lst[@name='true']/double[@name='min'][.='10.0']" - , pre+"/lst[@name='true']/double[@name='max'][.='20.0']" - , pre+"/lst[@name='true']/double[@name='sum'][.='30.0']" - , pre+"/lst[@name='true']/long[@name='count'][.='2']" - , pre+"/lst[@name='true']/long[@name='missing'][.='0']" - , pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']" - , "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2" - , pre+"/lst[@name='true']/double[@name='sumOfSquares'][.='500.0']" - , pre+"/lst[@name='true']/double[@name='mean'][.='15.0']" - , pre+"/lst[@name='true']/double[@name='stddev'][.='7.0710678118654755']" - // - , pre+"/lst[@name='false']/double[@name='min'][.='30.0']" - , pre+"/lst[@name='false']/double[@name='max'][.='40.0']" - , pre+"/lst[@name='false']/double[@name='sum'][.='70.0']" - , pre+"/lst[@name='false']/long[@name='count'][.='2']" - , pre+"/lst[@name='false']/long[@name='missing'][.='0']" - , pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']" - , "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2" - , pre+"/lst[@name='false']/double[@name='sumOfSquares'][.='2500.0']" - , pre+"/lst[@name='false']/double[@name='mean'][.='35.0']" - , pre+"/lst[@name='false']/double[@name='stddev'][.='7.0710678118654755']" - ); + for (String param : + new String[] { + // bare + "{!key=" + f + " ex=key_ex_tag}" + f, + "{!key=" + f + " ex=key_ex_tag v=" + f + "}", + // field func + "{!lucene key=" + f + " ex=key_ex_tag}_val_:\"field(" + f + ")\"", + "{!func key=" + f + " ex=key_ex_tag}field(" + f + ")", + "{!type=func key=" + f + " ex=key_ex_tag}field(" + f + ")", + "{!type=func key=" + f + " ex=key_ex_tag v=field(" + f + ")}", + "{!type=func key=" + f + " ex=key_ex_tag v='field(" + f + ")'}", + // identity math functions + "{!type=func key=" + f + " ex=key_ex_tag v='sum(0," + f + ")'}", + "{!type=func key=" + f + " ex=key_ex_tag v='product(1," + f + ")'}", + }) { + assertQ( + "test statis & stats.facet over field specified as a function: " + param, + req( + "q", + "*:*", + "stats", + "true", + "stats.calcdistinct", + "true", + "fq", + "{!tag=key_ex_tag}-id:4", + "stats.field", + param, + "stats.facet", + "active_s", + "stats.facet", + "other_s"), + "*[count(" + pre + ")=1]", + pre + "/lst[@name='true']/double[@name='min'][.='10.0']", + pre + "/lst[@name='true']/double[@name='max'][.='20.0']", + pre + "/lst[@name='true']/double[@name='sum'][.='30.0']", + pre + "/lst[@name='true']/long[@name='count'][.='2']", + pre + "/lst[@name='true']/long[@name='missing'][.='0']", + pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']", + "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2", + pre + "/lst[@name='true']/double[@name='sumOfSquares'][.='500.0']", + pre + "/lst[@name='true']/double[@name='mean'][.='15.0']", + pre + "/lst[@name='true']/double[@name='stddev'][.='7.0710678118654755']" + // + , + pre + "/lst[@name='false']/double[@name='min'][.='30.0']", + pre + "/lst[@name='false']/double[@name='max'][.='40.0']", + pre + "/lst[@name='false']/double[@name='sum'][.='70.0']", + pre + "/lst[@name='false']/long[@name='count'][.='2']", + pre + "/lst[@name='false']/long[@name='missing'][.='0']", + pre + "/lst[@name='true']/long[@name='countDistinct'][.='2']", + "count(" + pre + "/lst[@name='true']/arr[@name='distinctValues']/*)=2", + pre + "/lst[@name='false']/double[@name='sumOfSquares'][.='2500.0']", + pre + "/lst[@name='false']/double[@name='mean'][.='35.0']", + pre + "/lst[@name='false']/double[@name='stddev'][.='7.0710678118654755']"); } - assertQ("stats.facet w/ cardinality" - , req("q", "*:*", "stats", "true", - "fq", "-other_s:bar", - "stats.facet", "active_s", - "stats.field", "{!cardinality=true}"+f) - , pre+"/lst[@name='true' ]/long[@name='cardinality'][.='1']" - , pre+"/lst[@name='false']/long[@name='cardinality'][.='2']" - ); - + assertQ( + "stats.facet w/ cardinality", + req( + "q", + "*:*", + "stats", + "true", + "fq", + "-other_s:bar", + "stats.facet", + "active_s", + "stats.field", + "{!cardinality=true}" + f), + pre + "/lst[@name='true' ]/long[@name='cardinality'][.='1']", + pre + "/lst[@name='false']/long[@name='cardinality'][.='2']"); } - - public void doTestFacetStatisticsMissingResult(String f, SolrParams[] baseParamsSet) throws Exception { + + public void doTestFacetStatisticsMissingResult(String f, SolrParams[] baseParamsSet) + throws Exception { assertU(adoc("id", "1", f, "10", "active_s", "true")); assertU(adoc("id", "2", f, "20", "active_s", "true")); assertU(commit()); assertU(adoc("id", "3", "active_s", "false")); assertU(adoc("id", "4", f, "40", "active_s", "false")); assertU(commit()); - + // status should be the same regardless of baseParams for (SolrParams baseParams : baseParamsSet) { - - assertQ("test value for active_s=true", - req(baseParams, "q", "*:*", "stats.calcdistinct", "true", "stats.facet", "active_s") - , "//lst[@name='true']/double[@name='min'][.='10.0']" - , "//lst[@name='true']/double[@name='max'][.='20.0']" - , "//lst[@name='true']/double[@name='sum'][.='30.0']" - , "//lst[@name='true']/long[@name='count'][.='2']" - , "//lst[@name='true']/long[@name='missing'][.='0']" - , "//lst[@name='true']/long[@name='countDistinct'][.='2']" - , "count(//lst[@name='true']/arr[@name='distinctValues']/*)=2" - , "//lst[@name='true']/double[@name='sumOfSquares'][.='500.0']" - , "//lst[@name='true']/double[@name='mean'][.='15.0']" - , "//lst[@name='true']/double[@name='stddev'][.='7.0710678118654755']" - ); - - assertQ("test value for active_s=false", - req(baseParams, "q", "*:*", "stats.facet", "active_s", "stats.calcdistinct", "true") - , "//lst[@name='false']/double[@name='min'][.='40.0']" - , "//lst[@name='false']/double[@name='max'][.='40.0']" - , "//lst[@name='false']/double[@name='sum'][.='40.0']" - , "//lst[@name='false']/long[@name='count'][.='1']" - , "//lst[@name='false']/long[@name='missing'][.='1']" - , "//lst[@name='false']/long[@name='countDistinct'][.='1']" - , "count(//lst[@name='false']/arr[@name='distinctValues']/*)=1" - , "//lst[@name='false']/double[@name='sumOfSquares'][.='1600.0']" - , "//lst[@name='false']/double[@name='mean'][.='40.0']" - , "//lst[@name='false']/double[@name='stddev'][.='0.0']" - ); + + assertQ( + "test value for active_s=true", + req(baseParams, "q", "*:*", "stats.calcdistinct", "true", "stats.facet", "active_s"), + "//lst[@name='true']/double[@name='min'][.='10.0']", + "//lst[@name='true']/double[@name='max'][.='20.0']", + "//lst[@name='true']/double[@name='sum'][.='30.0']", + "//lst[@name='true']/long[@name='count'][.='2']", + "//lst[@name='true']/long[@name='missing'][.='0']", + "//lst[@name='true']/long[@name='countDistinct'][.='2']", + "count(//lst[@name='true']/arr[@name='distinctValues']/*)=2", + "//lst[@name='true']/double[@name='sumOfSquares'][.='500.0']", + "//lst[@name='true']/double[@name='mean'][.='15.0']", + "//lst[@name='true']/double[@name='stddev'][.='7.0710678118654755']"); + + assertQ( + "test value for active_s=false", + req(baseParams, "q", "*:*", "stats.facet", "active_s", "stats.calcdistinct", "true"), + "//lst[@name='false']/double[@name='min'][.='40.0']", + "//lst[@name='false']/double[@name='max'][.='40.0']", + "//lst[@name='false']/double[@name='sum'][.='40.0']", + "//lst[@name='false']/long[@name='count'][.='1']", + "//lst[@name='false']/long[@name='missing'][.='1']", + "//lst[@name='false']/long[@name='countDistinct'][.='1']", + "count(//lst[@name='false']/arr[@name='distinctValues']/*)=1", + "//lst[@name='false']/double[@name='sumOfSquares'][.='1600.0']", + "//lst[@name='false']/double[@name='mean'][.='40.0']", + "//lst[@name='false']/double[@name='stddev'][.='0.0']"); } - assertQ("stats.facet w/ cardinality" - , req("q", "*:*", "stats", "true", - "stats.facet", "active_s", - "stats.field", "{!cardinality=true}"+f) - , "//lst[@name='active_s']/lst[@name='true' ]/long[@name='cardinality'][.='2']" - , "//lst[@name='active_s']/lst[@name='false']/long[@name='cardinality'][.='1']" - ); + assertQ( + "stats.facet w/ cardinality", + req( + "q", + "*:*", + "stats", + "true", + "stats.facet", + "active_s", + "stats.field", + "{!cardinality=true}" + f), + "//lst[@name='active_s']/lst[@name='true' ]/long[@name='cardinality'][.='2']", + "//lst[@name='active_s']/lst[@name='false']/long[@name='cardinality'][.='1']"); } public void testFieldStatisticsResultsNumericFieldAlwaysMissing() throws Exception { @@ -774,28 +926,27 @@ public void testFieldStatisticsResultsNumericFieldAlwaysMissing() throws Excepti args.put("indent", "true"); SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("test string statistics values", req - ,"//lst[@name='active_i']/long[@name='count'][.='0']" - ,"//lst[@name='active_i']/long[@name='missing'][.='4']" - - ,"//lst[@name='active_i']/null[@name='min']" - ,"//lst[@name='active_i']/null[@name='max']" - ,"//lst[@name='active_i']/double[@name='sum'][.='0.0']" - ,"//lst[@name='active_i']/double[@name='sumOfSquares'][.='0.0']" - ,"//lst[@name='active_i']/double[@name='stddev'][.='0.0']" - ,"//lst[@name='active_i']/double[@name='mean'][.='NaN']" - // if new stats are supported, this will break - update test to assert values for each - ,"count(//lst[@name='active_i']/*)=8" - - ); + assertQ( + "test string statistics values", + req, + "//lst[@name='active_i']/long[@name='count'][.='0']", + "//lst[@name='active_i']/long[@name='missing'][.='4']", + "//lst[@name='active_i']/null[@name='min']", + "//lst[@name='active_i']/null[@name='max']", + "//lst[@name='active_i']/double[@name='sum'][.='0.0']", + "//lst[@name='active_i']/double[@name='sumOfSquares'][.='0.0']", + "//lst[@name='active_i']/double[@name='stddev'][.='0.0']", + "//lst[@name='active_i']/double[@name='mean'][.='NaN']" + // if new stats are supported, this will break - update test to assert values for each + , + "count(//lst[@name='active_i']/*)=8"); // NOTE: empty set percentiles covered in testPercentiles() - assertQ("test cardinality of missing" - , req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}active_i") - ,"//lst[@name='active_i']/long[@name='cardinality'][.='0']" - ); - + assertQ( + "test cardinality of missing", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}active_i"), + "//lst[@name='active_i']/long[@name='cardinality'][.='0']"); } public void testFieldStatisticsResultsStringFieldAlwaysMissing() throws Exception { @@ -814,24 +965,24 @@ public void testFieldStatisticsResultsStringFieldAlwaysMissing() throws Exceptio args.put("indent", "true"); SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("test string statistics values", req - ,"//lst[@name='active_s']/long[@name='count'][.='0']" - ,"//lst[@name='active_s']/long[@name='missing'][.='4']" - - ,"//lst[@name='active_s']/null[@name='min']" - ,"//lst[@name='active_s']/null[@name='max']" - // if new stats are supported, this will break - update test to assert values for each - ,"count(//lst[@name='active_s']/*)=4" - ); - - assertQ("test string statistics values" - , req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}active_s") - ,"//lst[@name='active_s']/long[@name='cardinality'][.='0']" - ); - + assertQ( + "test string statistics values", + req, + "//lst[@name='active_s']/long[@name='count'][.='0']", + "//lst[@name='active_s']/long[@name='missing'][.='4']", + "//lst[@name='active_s']/null[@name='min']", + "//lst[@name='active_s']/null[@name='max']" + // if new stats are supported, this will break - update test to assert values for each + , + "count(//lst[@name='active_s']/*)=4"); + + assertQ( + "test string statistics values", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}active_s"), + "//lst[@name='active_s']/long[@name='cardinality'][.='0']"); } - //SOLR-3160 + // SOLR-3160 public void testFieldStatisticsResultsDateFieldAlwaysMissing() throws Exception { SolrCore core = h.getCore(); @@ -848,51 +999,56 @@ public void testFieldStatisticsResultsDateFieldAlwaysMissing() throws Exception args.put("indent", "true"); SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("test string statistics values", req - ,"//lst[@name='active_dt']/long[@name='count'][.='0']" - ,"//lst[@name='active_dt']/long[@name='missing'][.='3']" - - ,"//lst[@name='active_dt']/null[@name='min']" - ,"//lst[@name='active_dt']/null[@name='max']" - ,"//lst[@name='active_dt']/null[@name='mean']" - ,"//lst[@name='active_dt']/double[@name='sum'][.='0.0']" - ,"//lst[@name='active_dt']/double[@name='sumOfSquares'][.='0.0']" - ,"//lst[@name='active_dt']/double[@name='stddev'][.='0.0']" - - // if new stats are supported, this will break - update test to assert values for each - ,"count(//lst[@name='active_dt']/*)=8" - ); - - assertQ("cardinality" - , req("q","*:*", "stats", "true", "stats.field", "{!cardinality=true}active_dt") - ,"//lst[@name='active_dt']/long[@name='cardinality'][.='0']" - ); - + assertQ( + "test string statistics values", + req, + "//lst[@name='active_dt']/long[@name='count'][.='0']", + "//lst[@name='active_dt']/long[@name='missing'][.='3']", + "//lst[@name='active_dt']/null[@name='min']", + "//lst[@name='active_dt']/null[@name='max']", + "//lst[@name='active_dt']/null[@name='mean']", + "//lst[@name='active_dt']/double[@name='sum'][.='0.0']", + "//lst[@name='active_dt']/double[@name='sumOfSquares'][.='0.0']", + "//lst[@name='active_dt']/double[@name='stddev'][.='0.0']" + + // if new stats are supported, this will break - update test to assert values for each + , + "count(//lst[@name='active_dt']/*)=8"); + + assertQ( + "cardinality", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}active_dt"), + "//lst[@name='active_dt']/long[@name='cardinality'][.='0']"); } public void testStatsFacetMultivaluedErrorHandling() throws Exception { SolrCore core = h.getCore(); SchemaField foo_ss = core.getLatestSchema().getField("foo_ss"); - assertU(adoc("id", "1", "active_i", "1", "foo_ss", "aa" )); + assertU(adoc("id", "1", "active_i", "1", "foo_ss", "aa")); assertU(commit()); - assertU(adoc("id", "2", "active_i", "1", "foo_ss", "bb" )); - assertU(adoc("id", "3", "active_i", "5", "foo_ss", "aa" )); + assertU(adoc("id", "2", "active_i", "1", "foo_ss", "bb")); + assertU(adoc("id", "3", "active_i", "5", "foo_ss", "aa")); assertU(commit()); - assertTrue("schema no longer satisfies test requirements: foo_ss no longer multivalued", foo_ss.multiValued()); - assertTrue("schema no longer satisfies test requirements: foo_ss's fieldtype no longer single valued", ! foo_ss.getType().isMultiValued()); - - assertQEx("no failure trying to get stats facet on foo_ss", - req("q", "*:*", - "stats", "true", - "stats.field", "active_i", - "stats.facet", "foo_ss"), - 400); - + assertTrue( + "schema no longer satisfies test requirements: foo_ss no longer multivalued", + foo_ss.multiValued()); + assertTrue( + "schema no longer satisfies test requirements: foo_ss's fieldtype no longer single valued", + !foo_ss.getType().isMultiValued()); + + assertQEx( + "no failure trying to get stats facet on foo_ss", + req( + "q", "*:*", + "stats", "true", + "stats.field", "active_i", + "stats.facet", "foo_ss"), + 400); } - //SOLR-3177 + // SOLR-3177 public void testStatsExcludeFilterQuery() throws Exception { SolrCore core = h.getCore(); assertU(adoc("id", "1")); @@ -908,9 +1064,11 @@ public void testStatsExcludeFilterQuery() throws Exception { args.put("fq", "{!tag=id}id_i:[2 TO 3]"); SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("test exluding filter query", req - , "//lst[@name='id_i']/double[@name='min'][.='1.0']" - , "//lst[@name='id_i']/double[@name='max'][.='4.0']"); + assertQ( + "test exluding filter query", + req, + "//lst[@name='id_i']/double[@name='min'][.='1.0']", + "//lst[@name='id_i']/double[@name='max'][.='4.0']"); args = new HashMap(); args.put(CommonParams.Q, "*:*"); @@ -919,24 +1077,32 @@ public void testStatsExcludeFilterQuery() throws Exception { args.put("fq", "{!tag=id}id_i:[2 TO 3]"); req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("test rename field", req - , "//lst[@name='id2']/double[@name='min'][.='2.0']" - , "//lst[@name='id2']/double[@name='max'][.='3.0']"); + assertQ( + "test rename field", + req, + "//lst[@name='id2']/double[@name='min'][.='2.0']", + "//lst[@name='id2']/double[@name='max'][.='3.0']"); } - + // SOLR-6024 public void testFieldStatisticsDocValuesAndMultiValued() throws Exception { SolrCore core = h.getCore(); - + // precondition for the test SchemaField catDocValues = core.getLatestSchema().getField("cat_docValues"); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer multivalued", catDocValues.multiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", !catDocValues.getType().isMultiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer has docValues", catDocValues.hasDocValues()); - + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer multivalued", + catDocValues.multiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", + !catDocValues.getType().isMultiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer has docValues", + catDocValues.hasDocValues()); + List types = new ArrayList<>(); types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); - types.add(new FldType("cat_docValues",new IRange(2,2), new SVal('a','z',1, 30))); + types.add(new FldType("cat_docValues", new IRange(2, 2), new SVal('a', 'z', 1, 30))); Doc d1 = createDoc(types); d1.getValues("id").set(0, "1"); d1.getValues("cat_docValues").set(0, "test"); @@ -947,114 +1113,128 @@ public void testFieldStatisticsDocValuesAndMultiValued() throws Exception { d2.getValues("cat_docValues").set(0, "test"); d2.getValues("cat_docValues").set(1, "testtt"); updateJ(toJSON(d2), null); - + assertU(commit()); - + Map args = new HashMap<>(); args.put(CommonParams.Q, "*:*"); args.put(StatsParams.STATS, "true"); args.put(StatsParams.STATS_FIELD, "cat_docValues"); args.put("indent", "true"); SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - - assertQ("test min/max on docValues and multiValued", req - , "//lst[@name='cat_docValues']/str[@name='min'][.='test']" - , "//lst[@name='cat_docValues']/str[@name='max'][.='testtw']"); - - assertQ("cardinality", - req("q","*:*", "stats", "true", "stats.field", "{!cardinality=true}cat_docValues") - , "//lst[@name='cat_docValues']/long[@name='cardinality'][.='3']"); - + + assertQ( + "test min/max on docValues and multiValued", + req, + "//lst[@name='cat_docValues']/str[@name='min'][.='test']", + "//lst[@name='cat_docValues']/str[@name='max'][.='testtw']"); + + assertQ( + "cardinality", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}cat_docValues"), + "//lst[@name='cat_docValues']/long[@name='cardinality'][.='3']"); } public void testFieldStatisticsDocValuesAndMultiValuedInteger() throws Exception { - SolrCore core = h.getCore(); - String fieldName = "cat_intDocValues"; - // precondition for the test - SchemaField catDocValues = core.getLatestSchema().getField(fieldName); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer multivalued", catDocValues.multiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", !catDocValues.getType().isMultiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer has docValues", catDocValues.hasDocValues()); + SolrCore core = h.getCore(); + String fieldName = "cat_intDocValues"; + // precondition for the test + SchemaField catDocValues = core.getLatestSchema().getField(fieldName); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer multivalued", + catDocValues.multiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", + !catDocValues.getType().isMultiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer has docValues", + catDocValues.hasDocValues()); - List types = new ArrayList<>(); - types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); - types.add(new FldType(fieldName, ONE_ONE, new IRange(0, 0))); + List types = new ArrayList<>(); + types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); + types.add(new FldType(fieldName, ONE_ONE, new IRange(0, 0))); - Doc d1 = createDocValuesDocument(types, fieldName, "1", -1, 3, 5); - updateJ(toJSON(d1), null); + Doc d1 = createDocValuesDocument(types, fieldName, "1", -1, 3, 5); + updateJ(toJSON(d1), null); - Doc d2 = createDocValuesDocument(types, fieldName, "2", 3, -2, 6); - updateJ(toJSON(d2), null); + Doc d2 = createDocValuesDocument(types, fieldName, "2", 3, -2, 6); + updateJ(toJSON(d2), null); - Doc d3 = createDocValuesDocument(types, fieldName, "3", 16, -3, 11); - updateJ(toJSON(d3), null); + Doc d3 = createDocValuesDocument(types, fieldName, "3", 16, -3, 11); + updateJ(toJSON(d3), null); - assertU(commit()); + assertU(commit()); + + Map args = new HashMap<>(); + args.put(CommonParams.Q, "*:*"); + args.put(StatsParams.STATS, "true"); + args.put(StatsParams.STATS_FIELD, fieldName); + args.put(StatsParams.STATS_CALC_DISTINCT, "true"); + args.put("indent", "true"); - Map args = new HashMap<>(); - args.put(CommonParams.Q, "*:*"); - args.put(StatsParams.STATS, "true"); - args.put(StatsParams.STATS_FIELD, fieldName); - args.put(StatsParams.STATS_CALC_DISTINCT, "true"); - args.put("indent", "true"); - - SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - - assertQ("test min/max on docValues and multiValued", req - , "//lst[@name='" + fieldName + "']/double[@name='min'][.='-3.0']" - , "//lst[@name='" + fieldName + "']/double[@name='max'][.='16.0']" - , "//lst[@name='" + fieldName + "']/long[@name='count'][.='12']" - , "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='9']" - , "//lst[@name='" + fieldName + "']/double[@name='sum'][.='38.0']" - , "//lst[@name='" + fieldName + "']/double[@name='mean'][.='3.1666666666666665']" - , "//lst[@name='" + fieldName + "']/double[@name='stddev'][.='5.638074031784151']" - , "//lst[@name='" + fieldName + "']/double[@name='sumOfSquares'][.='470.0']" - , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='0']"); - - assertQ("cardinality", - req("q","*:*", "stats", "true", "stats.field", "{!cardinality=true}" + fieldName) - , "//lst[@name='"+fieldName+"']/long[@name='cardinality'][.='9']"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "test min/max on docValues and multiValued", + req, + "//lst[@name='" + fieldName + "']/double[@name='min'][.='-3.0']", + "//lst[@name='" + fieldName + "']/double[@name='max'][.='16.0']", + "//lst[@name='" + fieldName + "']/long[@name='count'][.='12']", + "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='9']", + "//lst[@name='" + fieldName + "']/double[@name='sum'][.='38.0']", + "//lst[@name='" + fieldName + "']/double[@name='mean'][.='3.1666666666666665']", + "//lst[@name='" + fieldName + "']/double[@name='stddev'][.='5.638074031784151']", + "//lst[@name='" + fieldName + "']/double[@name='sumOfSquares'][.='470.0']", + "//lst[@name='" + fieldName + "']/long[@name='missing'][.='0']"); + + assertQ( + "cardinality", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}" + fieldName), + "//lst[@name='" + fieldName + "']/long[@name='cardinality'][.='9']"); } public void testFieldStatisticsDocValuesAndMultiValuedIntegerFacetStats() throws Exception { - SolrCore core = h.getCore(); - String fieldName = "cat_intDocValues"; - // precondition for the test - SchemaField catDocValues = core.getLatestSchema().getField(fieldName); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer multivalued", catDocValues.multiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", !catDocValues.getType().isMultiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer has docValues", catDocValues.hasDocValues()); - - List types = new ArrayList<>(); - types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); - types.add(new FldType(fieldName, ONE_ONE, new IRange(0, 0))); - - Doc d1 = createDocValuesDocument(types, fieldName, "1", -1, 3, 5); - updateJ(toJSON(d1), null); + SolrCore core = h.getCore(); + String fieldName = "cat_intDocValues"; + // precondition for the test + SchemaField catDocValues = core.getLatestSchema().getField(fieldName); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer multivalued", + catDocValues.multiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", + !catDocValues.getType().isMultiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer has docValues", + catDocValues.hasDocValues()); - Doc d2 = createDocValuesDocument(types, fieldName, "2", 3, -2, 6); - updateJ(toJSON(d2), null); + List types = new ArrayList<>(); + types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); + types.add(new FldType(fieldName, ONE_ONE, new IRange(0, 0))); - Doc d3 = createDocValuesDocument(types, fieldName, "3", 16, -3, 11); - updateJ(toJSON(d3), null); + Doc d1 = createDocValuesDocument(types, fieldName, "1", -1, 3, 5); + updateJ(toJSON(d1), null); - assertU(commit()); + Doc d2 = createDocValuesDocument(types, fieldName, "2", 3, -2, 6); + updateJ(toJSON(d2), null); - Map args = new HashMap<>(); - args.put(CommonParams.Q, "*:*"); - args.put(StatsParams.STATS, "true"); - args.put(StatsParams.STATS_FIELD, fieldName); - args.put(StatsParams.STATS_FACET, fieldName); - args.put(StatsParams.STATS_CALC_DISTINCT, "true"); - args.put("indent", "true"); + Doc d3 = createDocValuesDocument(types, fieldName, "3", 16, -3, 11); + updateJ(toJSON(d3), null); - SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertU(commit()); - assertQEx("can not use FieldCache on multivalued field: cat_intDocValues", req, 400); + Map args = new HashMap<>(); + args.put(CommonParams.Q, "*:*"); + args.put(StatsParams.STATS, "true"); + args.put(StatsParams.STATS_FIELD, fieldName); + args.put(StatsParams.STATS_FACET, fieldName); + args.put(StatsParams.STATS_CALC_DISTINCT, "true"); + args.put("indent", "true"); - } + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQEx("can not use FieldCache on multivalued field: cat_intDocValues", req, 400); + } public void testMiscQueryStats() throws Exception { final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='k']/"; @@ -1065,81 +1245,75 @@ public void testMiscQueryStats() throws Exception { assertU(adoc("id", "4", "a_f", "6.7", "b_f", "6.3", "foo_t", "red cow")); assertU(commit()); - assertQ("functions over multiple fields", - req("q","foo_t:cow", "stats", "true", - "stats.field", "{!func key=k}product(a_f,b_f)") - - , kpre + "double[@name='min'][.='22.309999465942383']" - , kpre + "double[@name='max'][.='42.209999084472656']" - , kpre + "double[@name='sum'][.='103.21999931335449']" - , kpre + "long[@name='count'][.='3']" - , kpre + "long[@name='missing'][.='0']" - , kpre + "double[@name='sumOfSquares'][.='3777.110157933046']" - , kpre + "double[@name='mean'][.='34.40666643778483']" - , kpre + "double[@name='stddev'][.='10.622007151430441']" - ); + assertQ( + "functions over multiple fields", + req("q", "foo_t:cow", "stats", "true", "stats.field", "{!func key=k}product(a_f,b_f)"), + kpre + "double[@name='min'][.='22.309999465942383']", + kpre + "double[@name='max'][.='42.209999084472656']", + kpre + "double[@name='sum'][.='103.21999931335449']", + kpre + "long[@name='count'][.='3']", + kpre + "long[@name='missing'][.='0']", + kpre + "double[@name='sumOfSquares'][.='3777.110157933046']", + kpre + "double[@name='mean'][.='34.40666643778483']", + kpre + "double[@name='stddev'][.='10.622007151430441']"); // force constant score for matches so we aren't dependent on similarity final float constScore = 4.2F; final double expectedScore = (double) constScore; - assertQ("functions over a query", - req("q","*:*", "stats", "true", - "stats.field", "{!lucene key=k}foo_t:cow^=" + constScore) - , kpre + "double[@name='min'][.='" + expectedScore + "']" - , kpre + "double[@name='max'][.='" + expectedScore + "']" - , kpre + "double[@name='sum'][.='" + (3D * expectedScore) + "']" - , kpre + "long[@name='count'][.='3']" - , kpre + "long[@name='missing'][.='1']" - , kpre + "double[@name='sumOfSquares'][.='" + (3D * Math.pow(expectedScore, 2D)) + "']" - , kpre + "double[@name='mean'][.='" + expectedScore + "']" - , kpre + "double[@name='stddev'][.='0.0']" - ); + assertQ( + "functions over a query", + req("q", "*:*", "stats", "true", "stats.field", "{!lucene key=k}foo_t:cow^=" + constScore), + kpre + "double[@name='min'][.='" + expectedScore + "']", + kpre + "double[@name='max'][.='" + expectedScore + "']", + kpre + "double[@name='sum'][.='" + (3D * expectedScore) + "']", + kpre + "long[@name='count'][.='3']", + kpre + "long[@name='missing'][.='1']", + kpre + "double[@name='sumOfSquares'][.='" + (3D * Math.pow(expectedScore, 2D)) + "']", + kpre + "double[@name='mean'][.='" + expectedScore + "']", + kpre + "double[@name='stddev'][.='0.0']"); } /** - * Whitebox test of {@link StatsField} parsing to ensure expected equivilence - * operations hold up + * Whitebox test of {@link StatsField} parsing to ensure expected equivilence operations hold up */ public void testStatsFieldWhitebox() throws Exception { StatsComponent component = new StatsComponent(); List components = new ArrayList<>(1); components.add(component); - SolrParams common = params("stats", "true", "q", "*:*", "nested","foo_t:cow"); + SolrParams common = params("stats", "true", "q", "*:*", "nested", "foo_t:cow"); // all of these should produce the same SchemaField based StatsField - for (String param : new String[] { - "foo_i", "{!func}field(\"foo_i\")", "{!lucene}_val_:\"field(foo_i)\"" - }) { - try (SolrQueryRequest req = req(common)){ + for (String param : + new String[] {"foo_i", "{!func}field(\"foo_i\")", "{!lucene}_val_:\"field(foo_i)\""}) { + try (SolrQueryRequest req = req(common)) { ResponseBuilder rb = new ResponseBuilder(req, new SolrQueryResponse(), components); - + StatsField sf = new StatsField(rb, param); - + assertNull("value source of: " + param, sf.getValueSource()); assertNotNull("schema field of: " + param, sf.getSchemaField()); - assertEquals("field name of: " + param, - "foo_i", sf.getSchemaField().getName()); + assertEquals("field name of: " + param, "foo_i", sf.getSchemaField().getName()); } } // all of these should produce the same QueryValueSource based StatsField - for (String param : new String[] { - "{!lucene}foo_t:cow", "{!func}query($nested)", "{!field f=foo_t}cow", - }) { + for (String param : + new String[] { + "{!lucene}foo_t:cow", "{!func}query($nested)", "{!field f=foo_t}cow", + }) { try (SolrQueryRequest req = req(common)) { ResponseBuilder rb = new ResponseBuilder(req, new SolrQueryResponse(), components); - + StatsField sf = new StatsField(rb, param); - + assertNull("schema field of: " + param, sf.getSchemaField()); assertNotNull("value source of: " + param, sf.getValueSource()); - assertTrue(sf.getValueSource().getClass() + " is vs type of: " + param, - sf.getValueSource() instanceof QueryValueSource); + assertTrue( + sf.getValueSource().getClass() + " is vs type of: " + param, + sf.getValueSource() instanceof QueryValueSource); QueryValueSource qvs = (QueryValueSource) sf.getValueSource(); - assertEquals("query of :" + param, - new TermQuery(new Term("foo_t","cow")), - qvs.getQuery()); + assertEquals("query of :" + param, new TermQuery(new Term("foo_t", "cow")), qvs.getQuery()); } } } @@ -1149,67 +1323,85 @@ public void testFieldStatisticsDocValuesAndMultiValuedDouble() throws Exception String fieldName = "cat_floatDocValues"; // precondition for the test SchemaField catDocValues = core.getLatestSchema().getField(fieldName); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer multivalued", catDocValues.multiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", !catDocValues.getType().isMultiValued()); - assertTrue("schema no longer satisfies test requirements: cat_docValues no longer has docValues", catDocValues.hasDocValues()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer multivalued", + catDocValues.multiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues fieldtype no longer single valued", + !catDocValues.getType().isMultiValued()); + assertTrue( + "schema no longer satisfies test requirements: cat_docValues no longer has docValues", + catDocValues.hasDocValues()); List types = new ArrayList<>(); types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); types.add(new FldType(fieldName, ONE_ONE, new FVal(0, 0))); - Doc d1 = createDocValuesDocument(types, fieldName, "1", -1, 3, 5); + Doc d1 = createDocValuesDocument(types, fieldName, "1", -1, 3, 5); updateJ(toJSON(d1), null); - Doc d2 = createDocValuesDocument(types, fieldName, "2", 3, -2, 6); + Doc d2 = createDocValuesDocument(types, fieldName, "2", 3, -2, 6); updateJ(toJSON(d2), null); - Doc d3 = createDocValuesDocument(types, fieldName, "3", 16, -3, 11); + Doc d3 = createDocValuesDocument(types, fieldName, "3", 16, -3, 11); updateJ(toJSON(d3), null); assertU(commit()); - final SolrParams baseParams = params(CommonParams.Q, "*:*", - "indent", "true", - StatsParams.STATS, "true"); - - SolrQueryRequest req1 = req(baseParams, - StatsParams.STATS_CALC_DISTINCT, "true", - StatsParams.STATS_FIELD, fieldName); - SolrQueryRequest req2 = req(baseParams, - StatsParams.STATS_FIELD, - "{!min=true, max=true, count=true, sum=true, mean=true, stddev=true, sumOfSquares=true, missing=true, calcdistinct=true}" + fieldName); - SolrQueryRequest req3 = req(baseParams, - StatsParams.STATS_FIELD, - "{!min=true, max=true, count=true, sum=true, mean=true, stddev=true, sumOfSquares=true, missing=true, countDistinct=true, distinctValues=true}" + fieldName); - - for (SolrQueryRequest req : new SolrQueryRequest[] { req1, req2, req3 }) { - assertQ("test status on docValues and multiValued: " + req.toString(), req - , "//lst[@name='" + fieldName + "']/double[@name='min'][.='-3.0']" - , "//lst[@name='" + fieldName + "']/double[@name='max'][.='16.0']" - , "//lst[@name='" + fieldName + "']/long[@name='count'][.='12']" - , "//lst[@name='" + fieldName + "']/double[@name='sum'][.='38.0']" - , "//lst[@name='" + fieldName + "']/double[@name='mean'][.='3.1666666666666665']" - , "//lst[@name='" + fieldName + "']/double[@name='stddev'][.='5.638074031784151']" - , "//lst[@name='" + fieldName + "']/double[@name='sumOfSquares'][.='470.0']" - , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='0']" - , "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='9']" - // always comes along with countDistinct - , "count(//lst[@name='" + fieldName + "']/arr[@name='distinctValues']/float)=9" - // if new default stats are added, this will break - update test to assert values for each - ,"count(//lst[@name='" + fieldName + "']/*)=10" - ); + final SolrParams baseParams = + params(CommonParams.Q, "*:*", "indent", "true", StatsParams.STATS, "true"); + + SolrQueryRequest req1 = + req( + baseParams, + StatsParams.STATS_CALC_DISTINCT, + "true", + StatsParams.STATS_FIELD, + fieldName); + SolrQueryRequest req2 = + req( + baseParams, + StatsParams.STATS_FIELD, + "{!min=true, max=true, count=true, sum=true, mean=true, stddev=true, sumOfSquares=true, missing=true, calcdistinct=true}" + + fieldName); + SolrQueryRequest req3 = + req( + baseParams, + StatsParams.STATS_FIELD, + "{!min=true, max=true, count=true, sum=true, mean=true, stddev=true, sumOfSquares=true, missing=true, countDistinct=true, distinctValues=true}" + + fieldName); + + for (SolrQueryRequest req : new SolrQueryRequest[] {req1, req2, req3}) { + assertQ( + "test status on docValues and multiValued: " + req.toString(), + req, + "//lst[@name='" + fieldName + "']/double[@name='min'][.='-3.0']", + "//lst[@name='" + fieldName + "']/double[@name='max'][.='16.0']", + "//lst[@name='" + fieldName + "']/long[@name='count'][.='12']", + "//lst[@name='" + fieldName + "']/double[@name='sum'][.='38.0']", + "//lst[@name='" + fieldName + "']/double[@name='mean'][.='3.1666666666666665']", + "//lst[@name='" + fieldName + "']/double[@name='stddev'][.='5.638074031784151']", + "//lst[@name='" + fieldName + "']/double[@name='sumOfSquares'][.='470.0']", + "//lst[@name='" + fieldName + "']/long[@name='missing'][.='0']", + "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='9']" + // always comes along with countDistinct + , + "count(//lst[@name='" + fieldName + "']/arr[@name='distinctValues']/float)=9" + // if new default stats are added, this will break - update test to assert values for each + , + "count(//lst[@name='" + fieldName + "']/*)=10"); } - assertQ("cardinality", - req("q","*:*", "stats", "true", "stats.field", "{!cardinality=true}"+fieldName) - , "//lst[@name='"+fieldName+"']/long[@name='cardinality'][.='9']"); - + assertQ( + "cardinality", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}" + fieldName), + "//lst[@name='" + fieldName + "']/long[@name='cardinality'][.='9']"); } - + public void testEnumFieldTypeStatus() throws Exception { clearIndex(); - - String fieldName = "severity"; + + String fieldName = "severity"; assertU(adoc("id", "0", fieldName, "Not Available")); assertU(adoc("id", "1", fieldName, "Not Available")); assertU(adoc("id", "2", fieldName, "Not Available")); @@ -1225,63 +1417,80 @@ public void testEnumFieldTypeStatus() throws Exception { assertU(adoc("id", "12", fieldName, "High")); assertU(adoc("id", "13", fieldName, "High")); assertU(adoc("id", "14", fieldName, "Critical")); - - + for (int i = 20; i <= 30; i++) { assertU(adoc("id", "" + i)); } assertU(commit()); - - assertQ("enum", req("q","*:*", "stats", "true", "stats.field", fieldName) - , "//lst[@name='" + fieldName + "']/str[@name='min'][.='Not Available']" - , "//lst[@name='" + fieldName + "']/str[@name='max'][.='Critical']" - , "//lst[@name='" + fieldName + "']/long[@name='count'][.='15']" - , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='11']"); - - assertQ("cardinality", - req("q","*:*", "stats", "true", "stats.field", "{!cardinality=true}"+fieldName) - , "//lst[@name='" + fieldName + "']/long[@name='cardinality'][.='5']"); - - assertQ("enum calcdistinct", req("q","*:*", "stats", "true", "stats.field", fieldName, - StatsParams.STATS_CALC_DISTINCT, "true") - , "//lst[@name='" + fieldName + "']/str[@name='min'][.='Not Available']" - , "//lst[@name='" + fieldName + "']/str[@name='max'][.='Critical']" - , "//lst[@name='" + fieldName + "']/long[@name='count'][.='15']" - , "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='5']" - , "count(//lst[@name='" + fieldName + "']/arr[@name='distinctValues']/*)=5" - , "//lst[@name='" + fieldName + "']/long[@name='missing'][.='11']"); - - - final String pre = "//lst[@name='stats_fields']/lst[@name='"+fieldName+"']/lst[@name='facets']/lst[@name='severity']"; - - assertQ("enum + stats.facet", req("q","*:*", "stats", "true", "stats.field", fieldName, - "stats.facet", fieldName) - , pre + "/lst[@name='High']/str[@name='min'][.='High']" - , pre + "/lst[@name='High']/str[@name='max'][.='High']" - , pre + "/lst[@name='High']/long[@name='count'][.='2']" - , pre + "/lst[@name='High']/long[@name='missing'][.='0']" - , pre + "/lst[@name='Low']/str[@name='min'][.='Low']" - , pre + "/lst[@name='Low']/str[@name='max'][.='Low']" - , pre + "/lst[@name='Low']/long[@name='count'][.='4']" - , pre + "/lst[@name='Low']/long[@name='missing'][.='0']" - , pre + "/lst[@name='Medium']/str[@name='min'][.='Medium']" - , pre + "/lst[@name='Medium']/str[@name='max'][.='Medium']" - , pre + "/lst[@name='Medium']/long[@name='count'][.='3']" - , pre + "/lst[@name='Medium']/long[@name='missing'][.='0']" - , pre + "/lst[@name='Not Available']/str[@name='min'][.='Not Available']" - , pre + "/lst[@name='Not Available']/str[@name='max'][.='Not Available']" - , pre + "/lst[@name='Not Available']/long[@name='count'][.='5']" - , pre + "/lst[@name='Not Available']/long[@name='missing'][.='0']" - , pre + "/lst[@name='Critical']/str[@name='min'][.='Critical']" - , pre + "/lst[@name='Critical']/str[@name='max'][.='Critical']" - , pre + "/lst[@name='Critical']/long[@name='count'][.='1']" - , pre + "/lst[@name='Critical']/long[@name='missing'][.='0']" - ); + + assertQ( + "enum", + req("q", "*:*", "stats", "true", "stats.field", fieldName), + "//lst[@name='" + fieldName + "']/str[@name='min'][.='Not Available']", + "//lst[@name='" + fieldName + "']/str[@name='max'][.='Critical']", + "//lst[@name='" + fieldName + "']/long[@name='count'][.='15']", + "//lst[@name='" + fieldName + "']/long[@name='missing'][.='11']"); + + assertQ( + "cardinality", + req("q", "*:*", "stats", "true", "stats.field", "{!cardinality=true}" + fieldName), + "//lst[@name='" + fieldName + "']/long[@name='cardinality'][.='5']"); + + assertQ( + "enum calcdistinct", + req( + "q", + "*:*", + "stats", + "true", + "stats.field", + fieldName, + StatsParams.STATS_CALC_DISTINCT, + "true"), + "//lst[@name='" + fieldName + "']/str[@name='min'][.='Not Available']", + "//lst[@name='" + fieldName + "']/str[@name='max'][.='Critical']", + "//lst[@name='" + fieldName + "']/long[@name='count'][.='15']", + "//lst[@name='" + fieldName + "']/long[@name='countDistinct'][.='5']", + "count(//lst[@name='" + fieldName + "']/arr[@name='distinctValues']/*)=5", + "//lst[@name='" + fieldName + "']/long[@name='missing'][.='11']"); + + final String pre = + "//lst[@name='stats_fields']/lst[@name='" + + fieldName + + "']/lst[@name='facets']/lst[@name='severity']"; + + assertQ( + "enum + stats.facet", + req("q", "*:*", "stats", "true", "stats.field", fieldName, "stats.facet", fieldName), + pre + "/lst[@name='High']/str[@name='min'][.='High']", + pre + "/lst[@name='High']/str[@name='max'][.='High']", + pre + "/lst[@name='High']/long[@name='count'][.='2']", + pre + "/lst[@name='High']/long[@name='missing'][.='0']", + pre + "/lst[@name='Low']/str[@name='min'][.='Low']", + pre + "/lst[@name='Low']/str[@name='max'][.='Low']", + pre + "/lst[@name='Low']/long[@name='count'][.='4']", + pre + "/lst[@name='Low']/long[@name='missing'][.='0']", + pre + "/lst[@name='Medium']/str[@name='min'][.='Medium']", + pre + "/lst[@name='Medium']/str[@name='max'][.='Medium']", + pre + "/lst[@name='Medium']/long[@name='count'][.='3']", + pre + "/lst[@name='Medium']/long[@name='missing'][.='0']", + pre + "/lst[@name='Not Available']/str[@name='min'][.='Not Available']", + pre + "/lst[@name='Not Available']/str[@name='max'][.='Not Available']", + pre + "/lst[@name='Not Available']/long[@name='count'][.='5']", + pre + "/lst[@name='Not Available']/long[@name='missing'][.='0']", + pre + "/lst[@name='Critical']/str[@name='min'][.='Critical']", + pre + "/lst[@name='Critical']/str[@name='max'][.='Critical']", + pre + "/lst[@name='Critical']/long[@name='count'][.='1']", + pre + "/lst[@name='Critical']/long[@name='missing'][.='0']"); } - - private Doc createDocValuesDocument(List types, String fieldName, String id, - @SuppressWarnings({"rawtypes"})Comparable... values) throws Exception { + + private Doc createDocValuesDocument( + List types, + String fieldName, + String id, + @SuppressWarnings({"rawtypes"}) Comparable... values) + throws Exception { Doc doc = createDoc(types); doc.getValues("id").set(0, id); initMultyValued(doc.getValues(fieldName), values); @@ -1289,113 +1498,127 @@ private Doc createDocValuesDocument(List types, String fieldName, Stri } @SuppressWarnings({"rawtypes"}) - private List initMultyValued(List cat_docValues, Comparable... comparables) { + private List initMultyValued( + List cat_docValues, Comparable... comparables) { Collections.addAll(cat_docValues, comparables); return cat_docValues; } - + /** Convinience struct used in {@link #testIndividualStatLocalParams} */ private static final class ExpectedStat { - public final static String KPRE = XPRE + "lst[@name='stats_fields']/lst[@name='k']/"; + public static final String KPRE = XPRE + "lst[@name='stats_fields']/lst[@name='k']/"; public final Stat stat; public final String input; public final List perShardXpaths; public final List finalXpaths; - - public final static Map ALL = new LinkedHashMap(); - private ExpectedStat(Stat stat, String input, - List perShardXpaths, List finalXpaths) { + + public static final Map ALL = new LinkedHashMap(); + + private ExpectedStat( + Stat stat, String input, List perShardXpaths, List finalXpaths) { this.stat = stat; this.input = input; this.perShardXpaths = perShardXpaths; this.finalXpaths = finalXpaths; } - + public static void createSimple(Stat stat, String input, String type, String result) { EnumSet deps = stat.getDistribDeps(); List perShardXpaths = new ArrayList(deps.size()); String xpath = KPRE + type + "[@name='" + stat + "'][.='" + result + "']"; for (Stat dep : deps) { if (dep.equals(stat)) { // self dependency - perShardXpaths.add(xpath);; + perShardXpaths.add(xpath); + ; } else { ExpectedStat expectedDep = ALL.get(dep); assertNotNull("can't find dep in ExpectedStat.ALL", expectedDep); perShardXpaths.addAll(expectedDep.perShardXpaths); } } - ALL.put(stat, new ExpectedStat(stat, input, perShardXpaths, Collections.singletonList(xpath))); + ALL.put( + stat, new ExpectedStat(stat, input, perShardXpaths, Collections.singletonList(xpath))); } - public static void create(Stat stat, String input, - List perShardXpaths, List finalXpaths) { + + public static void create( + Stat stat, String input, List perShardXpaths, List finalXpaths) { ALL.put(stat, new ExpectedStat(stat, input, perShardXpaths, finalXpaths)); } } - + public void testIndividualStatLocalParams() throws Exception { final String kpre = ExpectedStat.KPRE; - + assertU(adoc("id", "1", "a_f", "2.3", "b_f", "9.7", "a_i", "9", "foo_t", "how now brown cow")); assertU(commit()); SolrCore core = h.getCore(); SchemaField field = core.getLatestSchema().getField("a_i"); - HllOptions hllOpts = HllOptions.parseHllOptions(params("cardinality","true"), field); - + HllOptions hllOpts = HllOptions.parseHllOptions(params("cardinality", "true"), field); + HLL hll = hllOpts.newHLL(); HashFunction hasher = hllOpts.getHasher(); AVLTreeDigest tdigest = new AVLTreeDigest(100); - + // some quick sanity check assertions... // trivial check that we only get the exact 2 we ask for - assertQ("ask for and get only 2 stats", - req("q","*:*", "stats", "true", - "stats.field", "{!key=k mean=true min=true}a_i") - , kpre + "double[@name='mean'][.='9.0']" - , kpre + "double[@name='min'][.='9.0']" - , "count(" + kpre + "*)=2" - ); - + assertQ( + "ask for and get only 2 stats", + req("q", "*:*", "stats", "true", "stats.field", "{!key=k mean=true min=true}a_i"), + kpre + "double[@name='mean'][.='9.0']", + kpre + "double[@name='min'][.='9.0']", + "count(" + kpre + "*)=2"); + // for stats that are true/false, sanity check false does it's job - assertQ("min=true & max=false: only min should come back", - req("q","*:*", "stats", "true", - "stats.field", "{!key=k max=false min=true}a_i") - , kpre + "double[@name='min'][.='9.0']" - , "count(" + kpre + "*)=1" - ); - assertQ("min=false: localparam stat means ignore default set, "+ - "but since only local param is false no stats should be returned", - req("q","*:*", "stats", "true", - "stats.field", "{!key=k min=false}a_i") - // section of stats for this field should exist ... - , XPRE + "lst[@name='stats_fields']/lst[@name='k']" - // ...but be empty - , "count(" + kpre + "*)=0" - ); - + assertQ( + "min=true & max=false: only min should come back", + req("q", "*:*", "stats", "true", "stats.field", "{!key=k max=false min=true}a_i"), + kpre + "double[@name='min'][.='9.0']", + "count(" + kpre + "*)=1"); + assertQ( + "min=false: localparam stat means ignore default set, " + + "but since only local param is false no stats should be returned", + req("q", "*:*", "stats", "true", "stats.field", "{!key=k min=false}a_i") + // section of stats for this field should exist ... + , + XPRE + "lst[@name='stats_fields']/lst[@name='k']" + // ...but be empty + , + "count(" + kpre + "*)=0"); + double sum = 0; double sumOfSquares = 0; final int count = 20; for (int i = 0; i < count; i++) { int a_i = i % 10; - assertU(adoc("id", String.valueOf(i), "a_f", "2.3", "b_f", "9.7", "a_i", - String.valueOf(a_i), "foo_t", "how now brown cow")); + assertU( + adoc( + "id", + String.valueOf(i), + "a_f", + "2.3", + "b_f", + "9.7", + "a_i", + String.valueOf(a_i), + "foo_t", + "how now brown cow")); tdigest.add(a_i); hll.addRaw(hasher.hashInt(a_i).asLong()); sum += a_i; sumOfSquares += (a_i) * (a_i); } - double stddev = Math.sqrt(((count * sumOfSquares) - (sum * sum))/ (20 * (count - 1.0D))); - + double stddev = Math.sqrt(((count * sumOfSquares) - (sum * sum)) / (20 * (count - 1.0D))); + assertU(commit()); - + ByteBuffer tdigestBuf = ByteBuffer.allocate(tdigest.smallByteSize()); tdigest.asSmallBytes(tdigestBuf); byte[] hllBytes = hll.toBytes(); EnumSet allStats = EnumSet.allOf(Stat.class); - + final List expected = new ArrayList(allStats.size()); ExpectedStat.createSimple(Stat.min, "true", "double", "0.0"); ExpectedStat.createSimple(Stat.max, "true", "double", "9.0"); @@ -1406,31 +1629,47 @@ public void testIndividualStatLocalParams() throws Exception { ExpectedStat.createSimple(Stat.sumOfSquares, "true", "double", String.valueOf(sumOfSquares)); ExpectedStat.createSimple(Stat.stddev, "true", "double", String.valueOf(stddev)); final String distinctValsXpath = "count(" + kpre + "arr[@name='distinctValues']/*)=10"; - ExpectedStat.create(Stat.distinctValues, "true", - Collections.singletonList(distinctValsXpath), - Collections.singletonList(distinctValsXpath)); + ExpectedStat.create( + Stat.distinctValues, + "true", + Collections.singletonList(distinctValsXpath), + Collections.singletonList(distinctValsXpath)); ExpectedStat.createSimple(Stat.countDistinct, "true", "long", "10"); - final String percentileShardXpath = kpre + "str[@name='percentiles'][.='" - + new String(Base64.getEncoder().encode(tdigestBuf.array()), StandardCharsets.ISO_8859_1) + "']"; + final String percentileShardXpath = + kpre + + "str[@name='percentiles'][.='" + + new String( + Base64.getEncoder().encode(tdigestBuf.array()), StandardCharsets.ISO_8859_1) + + "']"; final String p90 = "" + tdigest.quantile(0.90D); final String p99 = "" + tdigest.quantile(0.99D); - ExpectedStat.create(Stat.percentiles, "'90, 99'", - Collections.singletonList(percentileShardXpath), - Arrays.asList("count(" + kpre + "lst[@name='percentiles']/*)=2", - kpre + "lst[@name='percentiles']/double[@name='90.0'][.="+p90+"]", - kpre + "lst[@name='percentiles']/double[@name='99.0'][.="+p99+"]")); - final String cardinalityShardXpath = kpre + "str[@name='cardinality'][.='" - + new String(Base64.getEncoder().encode(hllBytes),StandardCharsets.ISO_8859_1) + "']"; - final String cardinalityXpath = kpre + "long[@name='cardinality'][.='10']"; - ExpectedStat.create(Stat.cardinality, "true", - Collections.singletonList(cardinalityShardXpath), - Collections.singletonList(cardinalityXpath)); + ExpectedStat.create( + Stat.percentiles, + "'90, 99'", + Collections.singletonList(percentileShardXpath), + Arrays.asList( + "count(" + kpre + "lst[@name='percentiles']/*)=2", + kpre + "lst[@name='percentiles']/double[@name='90.0'][.=" + p90 + "]", + kpre + "lst[@name='percentiles']/double[@name='99.0'][.=" + p99 + "]")); + final String cardinalityShardXpath = + kpre + + "str[@name='cardinality'][.='" + + new String(Base64.getEncoder().encode(hllBytes), StandardCharsets.ISO_8859_1) + + "']"; + final String cardinalityXpath = kpre + "long[@name='cardinality'][.='10']"; + ExpectedStat.create( + Stat.cardinality, + "true", + Collections.singletonList(cardinalityShardXpath), + Collections.singletonList(cardinalityXpath)); // canary in the coal mine - assertEquals("num of ExpectedStat doesn't match all known stats; " + - "enum was updated w/o updating test?", - ExpectedStat.ALL.size(), allStats.size()); - + assertEquals( + "num of ExpectedStat doesn't match all known stats; " + + "enum was updated w/o updating test?", + ExpectedStat.ALL.size(), + allStats.size()); + // whitebox test: explicitly ask for isShard=true with each individual stat for (ExpectedStat expect : ExpectedStat.ALL.values()) { Stat stat = expect.stat; @@ -1444,9 +1683,9 @@ public void testIndividualStatLocalParams() throws Exception { for (Stat perShardDep : distribDeps) { numKeysExpected++; - // even if we go out of our way to exclude the dependent stats, + // even if we go out of our way to exclude the dependent stats, // the shard should return them since they are a dependency for the requested stat - if (!stat.equals(perShardDep)){ + if (!stat.equals(perShardDep)) { // NOTE: this only works because all the cases where there are distribDeps // beyond a self dependency are simple true/false options exclude.append(perShardDep + "=false "); @@ -1455,20 +1694,27 @@ public void testIndividualStatLocalParams() throws Exception { // we don't want to find anything we aren't expecting testXpaths.add("count(" + kpre + "*)=" + numKeysExpected); - assertQ("ask for only "+stat+", with isShard=true, and expect only deps: " + distribDeps, - req("q", "*:*", "isShard", "true", "stats", "true", - "stats.field", "{!key=k " + exclude + stat +"=" + expect.input + "}a_i") - , testXpaths.toArray(new String[testXpaths.size()]) - ); + assertQ( + "ask for only " + stat + ", with isShard=true, and expect only deps: " + distribDeps, + req( + "q", + "*:*", + "isShard", + "true", + "stats", + "true", + "stats.field", + "{!key=k " + exclude + stat + "=" + expect.input + "}a_i"), + testXpaths.toArray(new String[testXpaths.size()])); } - + // test all the possible combinations (of all possible sizes) of stats params for (int numParams = 1; numParams <= allStats.size(); numParams++) { for (EnumSet set : new StatSetCombinations(numParams, allStats)) { // EnumSets use natural ordering, we want to randomize the order of the params List combo = new ArrayList(set); Collections.shuffle(combo, random()); - + StringBuilder paras = new StringBuilder("{!key=k "); List testXpaths = new ArrayList(numParams + 5); @@ -1487,205 +1733,292 @@ public void testIndividualStatLocalParams() throws Exception { // we don't want to find anything we aren't expecting testXpaths.add("count(" + kpre + "*)=" + numKeysExpected); - assertQ("ask for and get only: "+ combo, - req("q","*:*", "stats", "true", - "stats.field", paras.toString()) - , testXpaths.toArray(new String[testXpaths.size()]) - ); + assertQ( + "ask for and get only: " + combo, + req("q", "*:*", "stats", "true", "stats.field", paras.toString()), + testXpaths.toArray(new String[testXpaths.size()])); } } } - + // Test for Solr-6349 public void testCalcDistinctStats() throws Exception { final String kpre = XPRE + "lst[@name='stats_fields']/lst[@name='k']/"; - final String min = "count(" + kpre +"/double[@name='min'])"; - final String countDistinct = "count(" + kpre +"/long[@name='countDistinct'])"; - final String distinctValues = "count(" + kpre +"/arr[@name='distinctValues'])"; + final String min = "count(" + kpre + "/double[@name='min'])"; + final String countDistinct = "count(" + kpre + "/long[@name='countDistinct'])"; + final String distinctValues = "count(" + kpre + "/arr[@name='distinctValues'])"; final int count = 20; for (int i = 0; i < count; i++) { - assertU(adoc("id", String.valueOf(i), "a_f", "2.3", "b_f", "9.7", "a_i", - String.valueOf(i % 10), "foo_t", "how now brown cow")); + assertU( + adoc( + "id", + String.valueOf(i), + "a_f", + "2.3", + "b_f", + "9.7", + "a_i", + String.valueOf(i % 10), + "foo_t", + "how now brown cow")); } - + assertU(commit()); - - String[] baseParams = new String[] { "q", "*:*", "stats", "true","indent", "true" }; - - for (SolrParams p : new SolrParams[] { - params("stats.field", "{!key=k}a_i"), - params(StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k}a_i"), - params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k}a_i"), - params(StatsParams.STATS_CALC_DISTINCT, "true", - "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k}a_i"), - params("stats.field", "{!key=k min='true'}a_i"), - params(StatsParams.STATS_CALC_DISTINCT, "true", - "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "true", - "stats.field", "{!key=k min='true' calcdistinct='false'}a_i"), - }) { - - assertQ("min is either default or explicitly requested; "+ - "countDistinct & distinctValues either default or explicitly prevented" - , req(p, baseParams) - , min + "=1" - , countDistinct + "=0" - , distinctValues + "=0"); + + String[] baseParams = new String[] {"q", "*:*", "stats", "true", "indent", "true"}; + + for (SolrParams p : + new SolrParams[] { + params("stats.field", "{!key=k}a_i"), + params(StatsParams.STATS_CALC_DISTINCT, "false", "stats.field", "{!key=k}a_i"), + params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", "stats.field", "{!key=k}a_i"), + params( + StatsParams.STATS_CALC_DISTINCT, + "true", + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "false", + "stats.field", + "{!key=k}a_i"), + params("stats.field", "{!key=k min='true'}a_i"), + params( + StatsParams.STATS_CALC_DISTINCT, + "true", + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "true", + "stats.field", + "{!key=k min='true' calcdistinct='false'}a_i"), + }) { + + assertQ( + "min is either default or explicitly requested; " + + "countDistinct & distinctValues either default or explicitly prevented", + req(p, baseParams), + min + "=1", + countDistinct + "=0", + distinctValues + "=0"); } - - for (SolrParams p : new SolrParams[] { - params("stats.calcdistinct", "true", - "stats.field", "{!key=k}a_i"), - params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "true", - "stats.field", "{!key=k}a_i"), - params("stats.calcdistinct", "false", - "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "true", - "stats.field", "{!key=k}a_i"), - params("stats.calcdistinct", "false ", - "stats.field", "{!key=k min=true calcdistinct=true}a_i"), - params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k min=true calcdistinct=true}a_i"), - params("stats.calcdistinct", "false ", - "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k min=true calcdistinct=true}a_i"), - }) { - - assertQ("min is either default or explicitly requested; " + - "countDistinct & distinctValues explicitly requested" - , req(p, baseParams) - , min + "=1" - , countDistinct + "=1" - , distinctValues + "=1"); + + for (SolrParams p : + new SolrParams[] { + params( + "stats.calcdistinct", "true", + "stats.field", "{!key=k}a_i"), + params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "true", "stats.field", "{!key=k}a_i"), + params( + "stats.calcdistinct", + "false", + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "true", + "stats.field", + "{!key=k}a_i"), + params( + "stats.calcdistinct", "false ", + "stats.field", "{!key=k min=true calcdistinct=true}a_i"), + params( + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "false", + "stats.field", + "{!key=k min=true calcdistinct=true}a_i"), + params( + "stats.calcdistinct", + "false ", + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "false", + "stats.field", + "{!key=k min=true calcdistinct=true}a_i"), + }) { + + assertQ( + "min is either default or explicitly requested; " + + "countDistinct & distinctValues explicitly requested", + req(p, baseParams), + min + "=1", + countDistinct + "=1", + distinctValues + "=1"); } - - for (SolrParams p : new SolrParams[] { - params("stats.field", "{!key=k calcdistinct=true}a_i"), - - params("stats.calcdistinct", "true", - "stats.field", "{!key=k min='false'}a_i"), - - params("stats.calcdistinct", "true", - "stats.field", "{!key=k max='true' min='false'}a_i"), - - params("stats.calcdistinct", "false", - "stats.field", "{!key=k calcdistinct=true}a_i"), - params("f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k calcdistinct=true}a_i"), - params("stats.calcdistinct", "false", - "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k calcdistinct=true}a_i"), - params("stats.calcdistinct", "false", - "f.a_i." + StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k min='false' calcdistinct=true}a_i"), - }) { - - assertQ("min is explicitly excluded; " + - "countDistinct & distinctValues explicitly requested" - , req(p, baseParams) - , min + "=0" - , countDistinct + "=1" - , distinctValues + "=1"); + + for (SolrParams p : + new SolrParams[] { + params("stats.field", "{!key=k calcdistinct=true}a_i"), + params( + "stats.calcdistinct", "true", + "stats.field", "{!key=k min='false'}a_i"), + params( + "stats.calcdistinct", "true", + "stats.field", "{!key=k max='true' min='false'}a_i"), + params( + "stats.calcdistinct", "false", + "stats.field", "{!key=k calcdistinct=true}a_i"), + params( + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "false", + "stats.field", + "{!key=k calcdistinct=true}a_i"), + params( + "stats.calcdistinct", + "false", + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "false", + "stats.field", + "{!key=k calcdistinct=true}a_i"), + params( + "stats.calcdistinct", + "false", + "f.a_i." + StatsParams.STATS_CALC_DISTINCT, + "false", + "stats.field", + "{!key=k min='false' calcdistinct=true}a_i"), + }) { + + assertQ( + "min is explicitly excluded; " + "countDistinct & distinctValues explicitly requested", + req(p, baseParams), + min + "=0", + countDistinct + "=1", + distinctValues + "=1"); } - - for (SolrParams p : new SolrParams[] { - params(StatsParams.STATS_CALC_DISTINCT, "true", - "stats.field", "{!key=k min=true}a_i"), - params("f.a_i.stats.calcdistinct", "true", - "stats.field", "{!key=k min=true}a_i"), - params(StatsParams.STATS_CALC_DISTINCT, "false", - "f.a_i.stats.calcdistinct", "true", - "stats.field", "{!key=k min=true}a_i"), - params("f.a_i.stats.calcdistinct", "false", - "stats.field", "{!key=k min=true calcdistinct=true}a_i"), - params(StatsParams.STATS_CALC_DISTINCT, "false", - "stats.field", "{!key=k min=true calcdistinct=true}a_i"), - params(StatsParams.STATS_CALC_DISTINCT, "false", - "f.a_i.stats.calcdistinct", "false", - "stats.field", "{!key=k min=true calcdistinct=true}a_i"), - }) { - - assertQ("min is explicitly requested; " + - "countDistinct & distinctValues explicitly requested" - , req(p, baseParams) - , min + "=1" - , countDistinct + "=1" - , distinctValues + "=1"); + + for (SolrParams p : + new SolrParams[] { + params(StatsParams.STATS_CALC_DISTINCT, "true", "stats.field", "{!key=k min=true}a_i"), + params( + "f.a_i.stats.calcdistinct", "true", + "stats.field", "{!key=k min=true}a_i"), + params( + StatsParams.STATS_CALC_DISTINCT, + "false", + "f.a_i.stats.calcdistinct", + "true", + "stats.field", + "{!key=k min=true}a_i"), + params( + "f.a_i.stats.calcdistinct", "false", + "stats.field", "{!key=k min=true calcdistinct=true}a_i"), + params( + StatsParams.STATS_CALC_DISTINCT, + "false", + "stats.field", + "{!key=k min=true calcdistinct=true}a_i"), + params( + StatsParams.STATS_CALC_DISTINCT, + "false", + "f.a_i.stats.calcdistinct", + "false", + "stats.field", + "{!key=k min=true calcdistinct=true}a_i"), + }) { + + assertQ( + "min is explicitly requested; " + "countDistinct & distinctValues explicitly requested", + req(p, baseParams), + min + "=1", + countDistinct + "=1", + distinctValues + "=1"); } } /** Helper used in {@link #testCardinality} */ public static String cardinalityXpath(String key, int cardinality) { - return XPRE + "lst[@name='stats_fields']/lst[@name='" + key + - "']/long[@name='cardinality'][.='"+cardinality+"']"; + return XPRE + + "lst[@name='stats_fields']/lst[@name='" + + key + + "']/long[@name='cardinality'][.='" + + cardinality + + "']"; } - /** @see #testHllOptions */ + /** + * @see #testHllOptions + */ public void testCardinality() throws Exception { SolrCore core = h.getCore(); // insure we have the same hasher a_l would use - HashFunction hasher = HllOptions.parseHllOptions - (params("cardinality","true"), core.getLatestSchema().getField("a_l")).getHasher(); - - String[] baseParams = new String[] { "q","*:*", "stats","true", "indent","true", "rows","0" }; - assertQ("empty cardinalities" - , req(params("stats.field","{!key=a cardinality=true}a_l", - "stats.field","{!key=pa cardinality=true}prehashed_a_l", - "stats.field","{!key=b cardinality=true}b_l", - "stats.field","{!key=c cardinality=true}c_l"), - baseParams) - , cardinalityXpath("a", 0) - , cardinalityXpath("pa", 0) - , cardinalityXpath("b", 0) - , cardinalityXpath("c", 0) - ); + HashFunction hasher = + HllOptions.parseHllOptions( + params("cardinality", "true"), core.getLatestSchema().getField("a_l")) + .getHasher(); + + String[] baseParams = new String[] {"q", "*:*", "stats", "true", "indent", "true", "rows", "0"}; + assertQ( + "empty cardinalities", + req( + params( + "stats.field", "{!key=a cardinality=true}a_l", + "stats.field", "{!key=pa cardinality=true}prehashed_a_l", + "stats.field", "{!key=b cardinality=true}b_l", + "stats.field", "{!key=c cardinality=true}c_l"), + baseParams), + cardinalityXpath("a", 0), + cardinalityXpath("pa", 0), + cardinalityXpath("b", 0), + cardinalityXpath("c", 0)); int id = 0; // add trivial docs to test basic cardinality for (int i = 0; i < 100; i++) { // add the same values multiple times (diff docs) - for (int j =0; j < 5; j++) { + for (int j = 0; j < 5; j++) { ++id; - assertU(adoc("id", ""+id, - "a_l", ""+i, "prehashed_a_l", ""+hasher.hashLong((long)i).asLong(), - "b_l", ""+(i % 7), "c_l", ""+id)); + assertU( + adoc( + "id", + "" + id, + "a_l", + "" + i, + "prehashed_a_l", + "" + hasher.hashLong((long) i).asLong(), + "b_l", + "" + (i % 7), + "c_l", + "" + id)); } } assertU(commit()); - assertQ("various cardinalities" - , req(params("stats.field","{!key=a cardinality=true}a_l", - "stats.field","{!key=pa hllPreHashed=true cardinality=true}prehashed_a_l", - "stats.field","{!key=b cardinality=true}b_l", - "stats.field","{!key=c cardinality=true}c_l"), - baseParams) - , cardinalityXpath("a", 100) - , cardinalityXpath("pa", 100) - , cardinalityXpath("b", 7) - , cardinalityXpath("c", 500) - ); - + assertQ( + "various cardinalities", + req( + params( + "stats.field", "{!key=a cardinality=true}a_l", + "stats.field", "{!key=pa hllPreHashed=true cardinality=true}prehashed_a_l", + "stats.field", "{!key=b cardinality=true}b_l", + "stats.field", "{!key=c cardinality=true}c_l"), + baseParams), + cardinalityXpath("a", 100), + cardinalityXpath("pa", 100), + cardinalityXpath("b", 7), + cardinalityXpath("c", 500)); + // various ways of explicitly saying "don't bother to compute cardinality" - for (SolrParams p : new SolrParams[] { - params("stats.field","{!key=a min=true cardinality=false}a_l"), - params("stats.field","{!key=a min=true cardinality=$doit}a_l", "doit", "false"), - params("stats.field","{!key=a min=true cardinality=$doit}a_l"), // missing doit param - // other tunning options shouldn't change things - params("stats.field","{!key=a min=true hllPreHashed=true cardinality=false}a_l"), - params("stats.field","{!key=a min=true hllRegwidth=4 cardinality=$doit}a_l", "doit", "false"), - params("stats.field","{!key=a min=true hllLog2m=18 cardinality=$doit}a_l"), // missing doit param - }) { - assertQ("min w/cardinality explicitly disabled", req(p, baseParams), - "count(//lst[@name='stats_fields']/lst[@name='a']/double[@name='min'])=1", - "count(//lst[@name='stats_fields']/lst[@name='a']/long[@name='cardinality'])=0"); + for (SolrParams p : + new SolrParams[] { + params("stats.field", "{!key=a min=true cardinality=false}a_l"), + params("stats.field", "{!key=a min=true cardinality=$doit}a_l", "doit", "false"), + params("stats.field", "{!key=a min=true cardinality=$doit}a_l"), // missing doit param + // other tunning options shouldn't change things + params("stats.field", "{!key=a min=true hllPreHashed=true cardinality=false}a_l"), + params( + "stats.field", + "{!key=a min=true hllRegwidth=4 cardinality=$doit}a_l", + "doit", + "false"), + params( + "stats.field", + "{!key=a min=true hllLog2m=18 cardinality=$doit}a_l"), // missing doit param + }) { + assertQ( + "min w/cardinality explicitly disabled", + req(p, baseParams), + "count(//lst[@name='stats_fields']/lst[@name='a']/double[@name='min'])=1", + "count(//lst[@name='stats_fields']/lst[@name='a']/long[@name='cardinality'])=0"); } } /** * whitebox test that HLL Option parsing does the right thing - * @see #testCardinality + * + * @see #testCardinality * @see #testHllOptionsErrors */ public void testHllOptions() throws Exception { @@ -1701,40 +2034,50 @@ public void testHllOptions() throws Exception { // simple cases that shouldn't use HLL assertNull(HllOptions.parseHllOptions(params(), field_l)); - assertNull(HllOptions.parseHllOptions(params("cardinality","false"), field_l)); + assertNull(HllOptions.parseHllOptions(params("cardinality", "false"), field_l)); // sanity check, future proof against the HLL library changing stuff on us - assertEquals("HLL Changed definition min for log2m, " + - "need to note in upgrade instructions and maybe adjust accuracy hueristic", - 4, HLL.MINIMUM_LOG2M_PARAM); + assertEquals( + "HLL Changed definition min for log2m, " + + "need to note in upgrade instructions and maybe adjust accuracy hueristic", + 4, + HLL.MINIMUM_LOG2M_PARAM); // NOTE: https://github.com/aggregateknowledge/java-hll/issues/14 - assertEquals("HLL Changed definition max for log2m, " + - "need to note in upgrade instructions and maybe adjust accuracy hueristic", - 30, HLL.MAXIMUM_LOG2M_PARAM); - assertEquals("HLL Changed definition min for regwidth, " + - "need to note in upgrade instructions and probably adjust hueristic", - 1, HLL.MINIMUM_REGWIDTH_PARAM); - assertEquals("HLL Changed definition max for regwidth, " + - "need to note in upgrade instructions and probably adjust hueristic", - 8, HLL.MAXIMUM_REGWIDTH_PARAM); + assertEquals( + "HLL Changed definition max for log2m, " + + "need to note in upgrade instructions and maybe adjust accuracy hueristic", + 30, + HLL.MAXIMUM_LOG2M_PARAM); + assertEquals( + "HLL Changed definition min for regwidth, " + + "need to note in upgrade instructions and probably adjust hueristic", + 1, + HLL.MINIMUM_REGWIDTH_PARAM); + assertEquals( + "HLL Changed definition max for regwidth, " + + "need to note in upgrade instructions and probably adjust hueristic", + 8, + HLL.MAXIMUM_REGWIDTH_PARAM); // all of these should produce equivalent HLLOptions (Long, Double, or String using defaults) - SolrParams[] longDefaultParams = new SolrParams[] { - // basic usage - params("cardinality","true"), - params("cardinality","0.33"), - - // expert level options - params("cardinality","true", "hllLog2m","13"), - params("cardinality","true", "hllRegwidth","6"), - params("cardinality","true", "hllPreHash","false"), - params("cardinality","true", "hllLog2m","13", "hllRegwidth","6", "hllPreHash", "false"), - - // explicit hllLog2M should override numeric arg - params("cardinality","1.0", "hllLog2m","13", "hllRegwidth","6"), - params("cardinality","0.0", "hllLog2m","13", "hllRegwidth","6", "hllPreHash","false") - }; - for (SchemaField field : new SchemaField[] { field_l, field_d, field_dt, field_s }) { + SolrParams[] longDefaultParams = + new SolrParams[] { + // basic usage + params("cardinality", "true"), + params("cardinality", "0.33"), + + // expert level options + params("cardinality", "true", "hllLog2m", "13"), + params("cardinality", "true", "hllRegwidth", "6"), + params("cardinality", "true", "hllPreHash", "false"), + params( + "cardinality", "true", "hllLog2m", "13", "hllRegwidth", "6", "hllPreHash", "false"), + + // explicit hllLog2M should override numeric arg + params("cardinality", "1.0", "hllLog2m", "13", "hllRegwidth", "6"), + params("cardinality", "0.0", "hllLog2m", "13", "hllRegwidth", "6", "hllPreHash", "false") + }; + for (SchemaField field : new SchemaField[] {field_l, field_d, field_dt, field_s}) { final String f = field.getName(); for (SolrParams p : longDefaultParams) { HllOptions opts = HllOptions.parseHllOptions(p, field); @@ -1744,33 +2087,34 @@ public void testHllOptions() throws Exception { } // non defaults: lower/upper accuracy bounds should give min/max log2m & adjusted regwidth - HllOptions optsMin = HllOptions.parseHllOptions(params("cardinality","0"), field); + HllOptions optsMin = HllOptions.parseHllOptions(params("cardinality", "0"), field); assertEquals(f + " min log2m", HLL.MINIMUM_LOG2M_PARAM, optsMin.getLog2m()); assertEquals(f + " min regwidth", 5, optsMin.getRegwidth()); // lowest hueristic for 64bit - HllOptions optsMax = HllOptions.parseHllOptions(params("cardinality","1"), field); + HllOptions optsMax = HllOptions.parseHllOptions(params("cardinality", "1"), field); assertEquals(f + " max log2m", HLL.MAXIMUM_LOG2M_PARAM, optsMax.getLog2m()); assertEquals(f + " max regwidth", HLL.MAXIMUM_REGWIDTH_PARAM, optsMax.getRegwidth()); - } // all of these should produce equivalent HLLOptions (Int, Float, or ValueSource using defaults) - SolrParams[] intDefaultParams = new SolrParams[] { - // basic usage - params("cardinality","true"), - params("cardinality","0.33"), - - // expert level options - params("cardinality","true", "hllLog2m","13"), - params("cardinality","true", "hllRegwidth","5"), - params("cardinality","true", "hllPreHash","false"), - params("cardinality","true", "hllLog2m","13", "hllRegwidth","5", "hllPreHash", "false"), - - // explicit hllLog2M & hllRegwidth should override hueristic float arg - params("cardinality","1.0", "hllLog2m","13", "hllRegwidth","5"), - params("cardinality","0.0", "hllLog2m","13", "hllRegwidth","5", "hllPreHash","false") - }; - for (SchemaField field : new SchemaField[] { field_i, field_f, field_severity, null }) { + SolrParams[] intDefaultParams = + new SolrParams[] { + // basic usage + params("cardinality", "true"), + params("cardinality", "0.33"), + + // expert level options + params("cardinality", "true", "hllLog2m", "13"), + params("cardinality", "true", "hllRegwidth", "5"), + params("cardinality", "true", "hllPreHash", "false"), + params( + "cardinality", "true", "hllLog2m", "13", "hllRegwidth", "5", "hllPreHash", "false"), + + // explicit hllLog2M & hllRegwidth should override hueristic float arg + params("cardinality", "1.0", "hllLog2m", "13", "hllRegwidth", "5"), + params("cardinality", "0.0", "hllLog2m", "13", "hllRegwidth", "5", "hllPreHash", "false") + }; + for (SchemaField field : new SchemaField[] {field_i, field_f, field_severity, null}) { final String f = null == field ? "(func)" : field.getName(); for (SolrParams p : intDefaultParams) { HllOptions opts = HllOptions.parseHllOptions(p, field); @@ -1780,122 +2124,160 @@ public void testHllOptions() throws Exception { } // non defaults: lower/upper accuracy bounds should give min/max log2m & adjusted regwidth - HllOptions optsMin = HllOptions.parseHllOptions(params("cardinality","0"), field); + HllOptions optsMin = HllOptions.parseHllOptions(params("cardinality", "0"), field); assertEquals(f + " min log2m", HLL.MINIMUM_LOG2M_PARAM, optsMin.getLog2m()); assertEquals(f + " min regwidth", 4, optsMin.getRegwidth()); // lowest hueristic for 32bit - HllOptions optsMax = HllOptions.parseHllOptions(params("cardinality","1"), field); + HllOptions optsMax = HllOptions.parseHllOptions(params("cardinality", "1"), field); assertEquals(f + " max log2m", HLL.MAXIMUM_LOG2M_PARAM, optsMax.getLog2m()); assertEquals(f + " max regwidth", HLL.MAXIMUM_REGWIDTH_PARAM, optsMax.getRegwidth()); - } // basic pre-hashed arg check specifically for long fields - assertNotNull(HllOptions.parseHllOptions(params("cardinality","true"), field_l).getHasher()); - assertNotNull(HllOptions.parseHllOptions(params("cardinality","true", "hllPreHashed", "false"), - field_l).getHasher()); - assertNull(HllOptions.parseHllOptions(params("cardinality","true", "hllPreHashed", "true"), - field_l).getHasher()); - + assertNotNull(HllOptions.parseHllOptions(params("cardinality", "true"), field_l).getHasher()); + assertNotNull( + HllOptions.parseHllOptions(params("cardinality", "true", "hllPreHashed", "false"), field_l) + .getHasher()); + assertNull( + HllOptions.parseHllOptions(params("cardinality", "true", "hllPreHashed", "true"), field_l) + .getHasher()); } /** * Test user input errors (split into its own test to isolate ignored exceptions) - * @see #testCardinality + * + * @see #testCardinality * @see #testHllOptions */ public void testHllOptionsErrors() throws Exception { - String[] baseParams = new String[] { "q","*:*", "stats","true", "indent","true", "rows","0" }; + String[] baseParams = new String[] {"q", "*:*", "stats", "true", "indent", "true", "rows", "0"}; SolrCore core = h.getCore(); SchemaField foo_s = core.getLatestSchema().getField("foo_s"); SchemaField foo_i = core.getLatestSchema().getField("foo_i"); ignoreException("hllPreHashed"); - for (SchemaField field : new SchemaField[] { foo_s, foo_i }) { + for (SchemaField field : new SchemaField[] {foo_s, foo_i}) { // whitebox - field - SolrException ex = expectThrows(SolrException.class, () -> { - HllOptions.parseHllOptions(params("cardinality","true", "hllPreHashed", "true"), field); - }); - assertTrue("MSG: " + ex.getMessage(), + SolrException ex = + expectThrows( + SolrException.class, + () -> { + HllOptions.parseHllOptions( + params("cardinality", "true", "hllPreHashed", "true"), field); + }); + assertTrue( + "MSG: " + ex.getMessage(), ex.getMessage().contains("hllPreHashed is only supported with Long")); // blackbox - field - assertQEx("hllPreHashed " + field.getName(), "hllPreHashed is only supported with Long", - req(params("stats.field","{!cardinality=true hllPreHashed=true}" + field.getName()), - baseParams), - ErrorCode.BAD_REQUEST); + assertQEx( + "hllPreHashed " + field.getName(), + "hllPreHashed is only supported with Long", + req( + params("stats.field", "{!cardinality=true hllPreHashed=true}" + field.getName()), + baseParams), + ErrorCode.BAD_REQUEST); } // whitebox - function - SolrException ex = expectThrows(SolrException.class, () -> { - HllOptions.parseHllOptions(params("cardinality","true", "hllPreHashed", "true"), null); - }); - assertTrue("MSG: " + ex.getMessage(), + SolrException ex = + expectThrows( + SolrException.class, + () -> { + HllOptions.parseHllOptions( + params("cardinality", "true", "hllPreHashed", "true"), null); + }); + assertTrue( + "MSG: " + ex.getMessage(), ex.getMessage().contains("hllPreHashed is only supported with Long")); // blackbox - function - assertQEx("hllPreHashed function", "hllPreHashed is only supported with Long", - req(params("stats.field","{!func cardinality=true hllPreHashed=true}sum(foo_i,foo_l)"), - baseParams), - ErrorCode.BAD_REQUEST); - + assertQEx( + "hllPreHashed function", + "hllPreHashed is only supported with Long", + req( + params("stats.field", "{!func cardinality=true hllPreHashed=true}sum(foo_i,foo_l)"), + baseParams), + ErrorCode.BAD_REQUEST); ignoreException("accuracy"); - for (String invalid : new String[] { "-1", "1.1", "100" }) { + for (String invalid : new String[] {"-1", "1.1", "100"}) { // whitebox - ex = expectThrows(SolrException.class, () -> { - HllOptions.parseHllOptions(params("cardinality",invalid), foo_s); - }); + ex = + expectThrows( + SolrException.class, + () -> { + HllOptions.parseHllOptions(params("cardinality", invalid), foo_s); + }); assertTrue("MSG: " + ex.getMessage(), ex.getMessage().contains("number between 0 and 1")); // blackbox - assertQEx("cardinality="+invalid, "number between 0 and 1", - req(params("stats.field","{!cardinality="+invalid+"}foo_s"), - baseParams), - ErrorCode.BAD_REQUEST); + assertQEx( + "cardinality=" + invalid, + "number between 0 and 1", + req(params("stats.field", "{!cardinality=" + invalid + "}foo_s"), baseParams), + ErrorCode.BAD_REQUEST); } - + ignoreException("hllLog2m must be"); - for (int invalid : new int[] { HLL.MINIMUM_LOG2M_PARAM-1, HLL.MAXIMUM_LOG2M_PARAM+11 }) { + for (int invalid : new int[] {HLL.MINIMUM_LOG2M_PARAM - 1, HLL.MAXIMUM_LOG2M_PARAM + 11}) { // whitebox - ex = expectThrows(SolrException.class, () -> { - HllOptions.parseHllOptions(params("cardinality","true", "hllLog2m", ""+invalid), foo_s); - }); + ex = + expectThrows( + SolrException.class, + () -> { + HllOptions.parseHllOptions( + params("cardinality", "true", "hllLog2m", "" + invalid), foo_s); + }); assertTrue("MSG: " + ex.getMessage(), ex.getMessage().contains("hllLog2m must be")); // blackbox - assertQEx("hllLog2m="+invalid, "hllLog2m must be", - req(params("stats.field","{!cardinality=true hllLog2m="+invalid+"}foo_s"), - baseParams), - ErrorCode.BAD_REQUEST); + assertQEx( + "hllLog2m=" + invalid, + "hllLog2m must be", + req( + params("stats.field", "{!cardinality=true hllLog2m=" + invalid + "}foo_s"), + baseParams), + ErrorCode.BAD_REQUEST); } ignoreException("hllRegwidth must be"); - for (int invalid : new int[] { HLL.MINIMUM_REGWIDTH_PARAM-1, HLL.MAXIMUM_REGWIDTH_PARAM+1 }) { + for (int invalid : new int[] {HLL.MINIMUM_REGWIDTH_PARAM - 1, HLL.MAXIMUM_REGWIDTH_PARAM + 1}) { // whitebox - ex = expectThrows(SolrException.class, () -> { - HllOptions.parseHllOptions(params("cardinality","true", - "hllRegwidth", ""+invalid), foo_s); - }); - assertTrue("MSG: " + ex.getMessage(), - ex.getMessage().contains("hllRegwidth must be")); + ex = + expectThrows( + SolrException.class, + () -> { + HllOptions.parseHllOptions( + params("cardinality", "true", "hllRegwidth", "" + invalid), foo_s); + }); + assertTrue("MSG: " + ex.getMessage(), ex.getMessage().contains("hllRegwidth must be")); // blackbox - assertQEx("hllRegwidth="+invalid, "hllRegwidth must be", - req(params("stats.field","{!cardinality=true hllRegwidth="+invalid+"}foo_s"), - baseParams), - ErrorCode.BAD_REQUEST); + assertQEx( + "hllRegwidth=" + invalid, + "hllRegwidth must be", + req( + params("stats.field", "{!cardinality=true hllRegwidth=" + invalid + "}foo_s"), + baseParams), + ErrorCode.BAD_REQUEST); } } // simple percentiles test public void testPercentiles() throws Exception { - + // NOTE: deliberately not in numeric order String percentiles = "10.0,99.9,1.0,2.0,20.0,30.0,40.0,50.0,60.0,70.0,80.0,98.0,99.0"; - List percentilesList = StrUtils.splitSmart(percentiles, ','); - + List percentilesList = StrUtils.splitSmart(percentiles, ','); + // test empty case - try (SolrQueryRequest query = req("q", "*:*", "stats", "true", "stats.field", - "{!percentiles='" + percentiles + "'}stat_f")) { + try (SolrQueryRequest query = + req( + "q", + "*:*", + "stats", + "true", + "stats.field", + "{!percentiles='" + percentiles + "'}stat_f")) { SolrQueryResponse rsp = h.queryAndResponse(null, query); NamedList pout = extractPercentils(rsp, "stat_f"); for (int i = 0; i < percentilesList.size(); i++) { @@ -1904,82 +2286,100 @@ public void testPercentiles() throws Exception { assertNull(pout.getVal(i)); } } - + int id = 0; // add trivial docs to test basic percentiles for (int i = 0; i < 100; i++) { // add the same values multiple times (diff docs) - for (int j =0; j < 5; j++) { - assertU(adoc("id", ++id+"", "stat_f", ""+i)); + for (int j = 0; j < 5; j++) { + assertU(adoc("id", ++id + "", "stat_f", "" + i)); } } assertU(commit()); - try (SolrQueryRequest query = req("q", "*:*", "stats", "true", - "stats.field", "{!percentiles='" + percentiles + "'}stat_f")) { + try (SolrQueryRequest query = + req( + "q", + "*:*", + "stats", + "true", + "stats.field", + "{!percentiles='" + percentiles + "'}stat_f")) { SolrQueryResponse rsp = h.queryAndResponse(null, query); NamedList pout = extractPercentils(rsp, "stat_f"); - for (int i = 0; i < percentilesList.size(); i++) { + for (int i = 0; i < percentilesList.size(); i++) { String p = percentilesList.get(i); assertEquals(p, pout.getName(i)); assertEquals(Double.parseDouble(p), pout.getVal(i), 1.0D); - } } - + // test request for no percentiles - try (SolrQueryRequest query = req("q", "*:*", "stats", "true", - "stats.field", "{!percentiles=''}stat_f")) { + try (SolrQueryRequest query = + req("q", "*:*", "stats", "true", "stats.field", "{!percentiles=''}stat_f")) { SolrQueryResponse rsp = h.queryAndResponse(null, query); NamedList pout = extractPercentils(rsp, "stat_f"); assertNull(pout); } // non-numeric types don't support percentiles - assertU(adoc("id", ++id+"", "stat_dt", "1999-05-03T04:55:01Z")); - assertU(adoc("id", ++id+"", "stat_s", "cow")); - + assertU(adoc("id", ++id + "", "stat_dt", "1999-05-03T04:55:01Z")); + assertU(adoc("id", ++id + "", "stat_s", "cow")); + assertU(commit()); - try (SolrQueryRequest query = req("q", "*:*", "stats", "true", - "stats.field", "{!percentiles='" + percentiles + "'}stat_dt", - "stats.field", "{!percentiles='" + percentiles + "'}stat_s")) { + try (SolrQueryRequest query = + req( + "q", + "*:*", + "stats", + "true", + "stats.field", + "{!percentiles='" + percentiles + "'}stat_dt", + "stats.field", + "{!percentiles='" + percentiles + "'}stat_s")) { SolrQueryResponse rsp = h.queryAndResponse(null, query); assertNull(extractPercentils(rsp, "stat_dt")); assertNull(extractPercentils(rsp, "stat_s")); } - } @SuppressWarnings({"unchecked"}) private NamedList extractPercentils(SolrQueryResponse rsp, String key) { - return ((NamedList>>> ) - rsp.getValues().get("stats")).get("stats_fields").get(key).get("percentiles"); + return ((NamedList>>>) rsp.getValues().get("stats")) + .get("stats_fields") + .get(key) + .get("percentiles"); } - /** - * given a comboSize and an EnumSet of Stats, generates iterators that produce every possible - * enum combination of that size + /** + * given a comboSize and an EnumSet of Stats, generates iterators that produce every possible enum + * combination of that size */ public static final class StatSetCombinations implements Iterable> { // we need an array so we can do fixed index offset lookups private final Stat[] all; private final Combinations intCombos; + public StatSetCombinations(int comboSize, EnumSet universe) { // NOTE: should not need to sort, EnumSet uses natural ordering all = universe.toArray(new Stat[universe.size()]); intCombos = new Combinations(all.length, comboSize); } + public Iterator> iterator() { return new Iterator>() { final Iterator wrapped = intCombos.iterator(); + public void remove() { wrapped.remove(); } + public boolean hasNext() { return wrapped.hasNext(); } + public EnumSet next() { EnumSet result = EnumSet.noneOf(Stat.class); int[] indexes = wrapped.next(); @@ -1991,5 +2391,4 @@ public EnumSet next() { }; } } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentContextFilterQueryTest.java b/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentContextFilterQueryTest.java index 12893921a28..e12bbae05a1 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentContextFilterQueryTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentContextFilterQueryTest.java @@ -16,14 +16,14 @@ */ package org.apache.solr.handler.component; +import static org.hamcrest.core.Is.is; + import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.spelling.suggest.SuggesterParams; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; -import static org.hamcrest.core.Is.is; - public class SuggestComponentContextFilterQueryTest extends SolrTestCaseJ4 { static String rh = "/suggest"; @@ -39,210 +39,379 @@ public void setUp() throws Exception { assertU(delQ("*:*")); // id, cat, price, weight, contexts - assertU(adoc("id", "0", "cat", "This is a title", "price", "5", "weight", "10", "my_contexts_t", "ctx1")); - assertU(adoc("id", "1", "cat", "This is another title", "price", "10", "weight", "10", "my_contexts_t", "ctx1")); - assertU(adoc("id", "7", "cat", "example with ctx1 at 40", "price", "40", "weight", "30", "my_contexts_t", "ctx1")); - assertU(adoc("id", "8", "cat", "example with ctx2 and ctx3 at 45", "price", "45", "weight", "30", "my_contexts_t", "CTX2", "my_contexts_t", "CTX3")); - assertU(adoc("id", "9", "cat", "example with ctx4 at 50 using my_contexts_s", "price", "50", "weight", "40", "my_contexts_s", "ctx4")); + assertU( + adoc( + "id", + "0", + "cat", + "This is a title", + "price", + "5", + "weight", + "10", + "my_contexts_t", + "ctx1")); + assertU( + adoc( + "id", + "1", + "cat", + "This is another title", + "price", + "10", + "weight", + "10", + "my_contexts_t", + "ctx1")); + assertU( + adoc( + "id", + "7", + "cat", + "example with ctx1 at 40", + "price", + "40", + "weight", + "30", + "my_contexts_t", + "ctx1")); + assertU( + adoc( + "id", + "8", + "cat", + "example with ctx2 and ctx3 at 45", + "price", + "45", + "weight", + "30", + "my_contexts_t", + "CTX2", + "my_contexts_t", + "CTX3")); + assertU( + adoc( + "id", + "9", + "cat", + "example with ctx4 at 50 using my_contexts_s", + "price", + "50", + "weight", + "40", + "my_contexts_s", + "ctx4")); assertU((commit())); waitForWarming(); } @Test public void testContextFilterParamIsIgnoredWhenContextIsNotImplemented() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_lookup_has_no_context_implementation", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "ctx1", - SuggesterParams.SUGGEST_Q, "examp"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_lookup_has_no_context_implementation", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "ctx1", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_lookup_has_no_context_implementation']/lst[@name='examp']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='suggest_lookup_has_no_context_implementation']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx4 at 50 using my_contexts_s']", "//lst[@name='suggest']/lst[@name='suggest_lookup_has_no_context_implementation']/lst[@name='examp']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example with ctx2 and ctx3 at 45']", - "//lst[@name='suggest']/lst[@name='suggest_lookup_has_no_context_implementation']/lst[@name='examp']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='example with ctx1 at 40']" - ); + "//lst[@name='suggest']/lst[@name='suggest_lookup_has_no_context_implementation']/lst[@name='examp']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='example with ctx1 at 40']"); } - @Test - public void testContextFilteringIsIgnoredWhenContextIsImplementedButNotConfigured() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_context_implemented_but_not_configured", - SuggesterParams.SUGGEST_Q, "examp"), + public void testContextFilteringIsIgnoredWhenContextIsImplementedButNotConfigured() + throws Exception { + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_context_implemented_but_not_configured", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_context_implemented_but_not_configured']/lst[@name='examp']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='suggest_context_implemented_but_not_configured']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx4 at 50 using my_contexts_s']", "//lst[@name='suggest']/lst[@name='suggest_context_implemented_but_not_configured']/lst[@name='examp']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example with ctx2 and ctx3 at 45']", - "//lst[@name='suggest']/lst[@name='suggest_context_implemented_but_not_configured']/lst[@name='examp']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='example with ctx1 at 40']" - ); + "//lst[@name='suggest']/lst[@name='suggest_context_implemented_but_not_configured']/lst[@name='examp']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='example with ctx1 at 40']"); } @Test - public void testBuildThrowsIllegalArgumentExceptionWhenContextIsConfiguredButNotImplemented() throws Exception { - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> { - h.query(req("qt", rh, SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_context_filtering_not_implemented", - SuggesterParams.SUGGEST_Q, "examp")); - }); + public void testBuildThrowsIllegalArgumentExceptionWhenContextIsConfiguredButNotImplemented() + throws Exception { + IllegalArgumentException ex = + expectThrows( + IllegalArgumentException.class, + () -> { + h.query( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_context_filtering_not_implemented", + SuggesterParams.SUGGEST_Q, + "examp")); + }); assertThat(ex.getMessage(), is("this suggester doesn't support contexts")); // When not building, no exception is thrown - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "false", - SuggesterParams.SUGGEST_DICT, "suggest_context_filtering_not_implemented", - SuggesterParams.SUGGEST_Q, "examp"), - "//lst[@name='suggest']/lst[@name='suggest_context_filtering_not_implemented']/lst[@name='examp']/int[@name='numFound'][.='0']" - ); + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "false", + SuggesterParams.SUGGEST_DICT, + "suggest_context_filtering_not_implemented", + SuggesterParams.SUGGEST_Q, + "examp"), + "//lst[@name='suggest']/lst[@name='suggest_context_filtering_not_implemented']/lst[@name='examp']/int[@name='numFound'][.='0']"); } - @Test public void testContextFilterIsTrimmed() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, " ", //trimmed to null... just as if there was no context filter param - SuggesterParams.SUGGEST_Q, "examp"), - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='3']" - ); + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + " ", // trimmed to null... just as if there was no context filter param + SuggesterParams.SUGGEST_Q, + "examp"), + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='3']"); } public void testExplicitFieldedQuery() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "contexts:ctx1", - SuggesterParams.SUGGEST_Q, "examp"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "contexts:ctx1", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx1 at 40']" - ); + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx1 at 40']"); } public void testContextFilterOK() throws Exception { - //No filtering - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_Q, "examp"), + // No filtering + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx4 at 50 using my_contexts_s']", "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example with ctx2 and ctx3 at 45']", - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='example with ctx1 at 40']" - ); - - //TermQuery - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "ctx1", - SuggesterParams.SUGGEST_Q, "examp"), + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='example with ctx1 at 40']"); + + // TermQuery + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "ctx1", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx1 at 40']" - ); - - //OR BooleanQuery - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "ctx1 OR CTX2", - SuggesterParams.SUGGEST_Q, "examp"), + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx1 at 40']"); + + // OR BooleanQuery + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "ctx1 OR CTX2", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx2 and ctx3 at 45']", - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example with ctx1 at 40']" - ); - - //AND BooleanQuery - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "CTX2 AND CTX3", - SuggesterParams.SUGGEST_Q, "examp"), + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example with ctx1 at 40']"); + + // AND BooleanQuery + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "CTX2 AND CTX3", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='1']", "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx2 and ctx3 at 45']"); - - //PrefixQuery - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "ctx*", - SuggesterParams.SUGGEST_Q, "examp"), + // PrefixQuery + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "ctx*", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx1 at 40']" - ); - - //RangeQuery - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "[* TO *]", - SuggesterParams.SUGGEST_Q, "examp"), + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx1 at 40']"); + + // RangeQuery + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "[* TO *]", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx2 and ctx3 at 45']", - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example with ctx1 at 40']" - ); - - //WildcardQuery - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "c*1", - SuggesterParams.SUGGEST_Q, "examp"), + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example with ctx1 at 40']"); + + // WildcardQuery + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "c*1", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='1']", "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example with ctx1 at 40']"); } @Test - public void testStringContext(){ - //Here, the context field is a string, so it's case sensitive - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester_string", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "Ctx4", - SuggesterParams.SUGGEST_Q, "examp"), + public void testStringContext() { + // Here, the context field is a string, so it's case sensitive + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester_string", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "Ctx4", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester_string']/lst[@name='examp']/int[@name='numFound'][.='0']"); - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester_string", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "ctx4", - SuggesterParams.SUGGEST_Q, "examp"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester_string", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "ctx4", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester_string']/lst[@name='examp']/int[@name='numFound'][.='1']"); } @Test public void testContextFilterOnInvalidFieldGivesNoSuggestions() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "some_invalid_context_field:some_invalid_value", - SuggesterParams.SUGGEST_Q, "examp"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "some_invalid_context_field:some_invalid_value", + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='0']"); } - @Test public void testContextFilterUsesAnalyzer() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "CTx1", // Will not match due to case - SuggesterParams.SUGGEST_Q, "examp"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "CTx1", // Will not match due to case + SuggesterParams.SUGGEST_Q, + "examp"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='examp']/int[@name='numFound'][.='0']"); } - @Ignore// TODO: SOLR-7964 + @Ignore // TODO: SOLR-7964 @Test public void testContextFilterWithHighlight() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_DICT, "suggest_blended_infix_suggester", - SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, "ctx1", - SuggesterParams.SUGGEST_HIGHLIGHT, "true", - SuggesterParams.SUGGEST_Q, "example"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_DICT, + "suggest_blended_infix_suggester", + SuggesterParams.SUGGEST_CONTEXT_FILTER_QUERY, + "ctx1", + SuggesterParams.SUGGEST_HIGHLIGHT, + "true", + SuggesterParams.SUGGEST_Q, + "example"), "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='example']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='example']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example data']" - ); + "//lst[@name='suggest']/lst[@name='suggest_blended_infix_suggester']/lst[@name='example']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example data']"); } - } - diff --git a/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java index a36319029ab..7acb1274fde 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java @@ -25,7 +25,6 @@ import org.junit.BeforeClass; import org.junit.Test; - public class SuggestComponentTest extends SolrTestCaseJ4 { private static final String rh = "/suggest"; @@ -34,13 +33,13 @@ public class SuggestComponentTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-suggestercomponent.xml","schema.xml"); + initCore("solrconfig-suggestercomponent.xml", "schema.xml"); } - + @Override public void setUp() throws Exception { super.setUp(); - + // id, cat, price, weight assertU(adoc("id", "0", "cat", "This is a title", "price", "5", "weight", "10")); assertU(adoc("id", "1", "cat", "This is another title", "price", "10", "weight", "10")); @@ -56,7 +55,7 @@ public void setUp() throws Exception { assertU((commit())); waitForWarming(); } - + @Override public void tearDown() throws Exception { super.tearDown(); @@ -64,76 +63,108 @@ public void tearDown() throws Exception { assertU((commit())); waitForWarming(); // rebuild suggesters with empty index - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + assertQ( + req("qt", rh, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); } - + @Test public void testDocumentBased() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_dict", - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + "suggest_fuzzy_doc_dict", + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example inputdata']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='45']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example data']", - "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='40']" - ); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_dict", - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_Q, "Rad", - SuggesterParams.SUGGEST_COUNT, "5"), + "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='40']"); + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + "suggest_fuzzy_doc_dict", + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_Q, + "Rad", + SuggesterParams.SUGGEST_COUNT, + "5"), "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Rad fox']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='35']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Red fox']", - "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='30']" - ); + "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='30']"); } - + @Test public void testExpressionBased() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_expr_dict", - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + "suggest_fuzzy_doc_expr_dict", + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example inputdata']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='120']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example data']", - "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='110']" - ); + "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='110']"); } - + @Test public void testFileBased() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_file_based", - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_Q, "chn", - SuggesterParams.SUGGEST_COUNT, "2"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + "suggest_fuzzy_file_based", + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_Q, + "chn", + SuggesterParams.SUGGEST_COUNT, + "2"), "//lst[@name='suggest']/lst[@name='suggest_fuzzy_file_based']/lst[@name='chn']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_file_based']/lst[@name='chn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_file_based']/lst[@name='chn']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='1']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_file_based']/lst[@name='chn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']", - "//lst[@name='suggest']/lst[@name='suggest_fuzzy_file_based']/lst[@name='chn']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='1']" - ); + "//lst[@name='suggest']/lst[@name='suggest_fuzzy_file_based']/lst[@name='chn']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='1']"); } + @Test public void testMultiSuggester() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_dict", - SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_expr_dict", - SuggesterParams.SUGGEST_BUILD, "true", - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + "suggest_fuzzy_doc_dict", + SuggesterParams.SUGGEST_DICT, + "suggest_fuzzy_doc_expr_dict", + SuggesterParams.SUGGEST_BUILD, + "true", + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example inputdata']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='45']", @@ -143,414 +174,651 @@ public void testMultiSuggester() throws Exception { "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example inputdata']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='120']", "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='example data']", - "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='110']" - ); + "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[2]/long[@name='weight'][.='110']"); } - + @Test public void testBuildAllSuggester() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD_ALL, "true", - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//str[@name='command'][.='buildAll']" - ); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_BUILD_ALL, "true"), - "//str[@name='command'][.='buildAll']" - ); + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_BUILD_ALL, + "true", + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//str[@name='command'][.='buildAll']"); + + assertQ( + req("qt", rh, SuggesterParams.SUGGEST_BUILD_ALL, "true"), + "//str[@name='command'][.='buildAll']"); } - + @Test public void testReloadAllSuggester() throws Exception { - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_RELOAD_ALL, "true", - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//str[@name='command'][.='reloadAll']" - ); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_RELOAD_ALL, "true"), - "//str[@name='command'][.='reloadAll']" - ); + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_RELOAD_ALL, + "true", + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//str[@name='command'][.='reloadAll']"); + + assertQ( + req("qt", rh, SuggesterParams.SUGGEST_RELOAD_ALL, "true"), + "//str[@name='command'][.='reloadAll']"); } - + @Test public void testBadSuggesterName() throws Exception { String fakeSuggesterName = "does-not-exist"; - assertQEx("No suggester named " + fakeSuggesterName +" was configured", - req("qt", rh, - SuggesterParams.SUGGEST_DICT, fakeSuggesterName, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - SolrException.ErrorCode.BAD_REQUEST - ); - - assertQEx("'" + SuggesterParams.SUGGEST_DICT + - "' parameter not specified and no default suggester configured", - req("qt", rh, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - SolrException.ErrorCode.BAD_REQUEST - ); + assertQEx( + "No suggester named " + fakeSuggesterName + " was configured", + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + fakeSuggesterName, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQEx( + "'" + + SuggesterParams.SUGGEST_DICT + + "' parameter not specified and no default suggester configured", + req("qt", rh, SuggesterParams.SUGGEST_Q, "exampel", SuggesterParams.SUGGEST_COUNT, "5"), + SolrException.ErrorCode.BAD_REQUEST); } - @Test public void testDefaultBuildOnStartupNotStoredDict() throws Exception { - + final String suggester = "suggest_doc_default_startup_no_store"; - + // validate that this suggester is not storing the lookup - assertEquals(suggester, - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",7). - get("str", n -> "name".equals(n.attr("name"))).txt()); - - assertNull( h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",7). - get("str", n -> "storeDir".equals(n.attr("name"))).txt()); - + assertEquals( + suggester, + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 7) + .get("str", n -> "name".equals(n.attr("name"))) + .txt()); + + assertNull( + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 7) + .get("str", n -> "storeDir".equals(n.attr("name"))) + .txt()); + // validate that this suggester only builds manually and has not buildOnStartup parameter - assertEquals("false", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",7). - get("str", n -> "buildOnCommit".equals(n.attr("name"))).txt()); + assertEquals( + "false", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 7) + .get("str", n -> "buildOnCommit".equals(n.attr("name"))) + .txt()); + + assertNull( + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 7) + .get("str", n -> "buildOnStartup".equals(n.attr("name"))) + .txt()); - assertNull(h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",7). - get("str", n -> "buildOnStartup".equals(n.attr("name"))).txt()); - reloadCore(random().nextBoolean()); - + // Validate that the suggester was built on new/reload core - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='2']"); + // add one more doc, should be visible after core reload assertU(adoc("id", "10", "cat", "example data extra ", "price", "40", "weight", "35")); assertU((commit())); - + waitForWarming(); - + // buildOnCommit=false, this doc should not be in the suggester yet - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='2']"); + reloadCore(random().nextBoolean()); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='3']" - ); - + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='3']"); } - + @Test public void testDefaultBuildOnStartupStoredDict() throws Exception { - + final String suggester = "suggest_doc_default_startup"; - + // validate that this suggester is storing the lookup - assertEquals(suggester, - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",6) - .get("str", n -> "name".equals(n.attr("name"))).txt()); + assertEquals( + suggester, + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 6) + .get("str", n -> "name".equals(n.attr("name"))) + .txt()); + + assertEquals( + suggester, + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 6) + .get("str", n -> "storeDir".equals(n.attr("name"))) + .txt()); - assertEquals(suggester, - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",6) - .get("str", n -> "storeDir".equals(n.attr("name"))).txt()); - // validate that this suggester only builds manually and has not buildOnStartup parameter - assertEquals("false", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",6) - .get("str", n -> "buildOnCommit".equals(n.attr("name"))).txt()); - - assertNull( h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",6) - .get("str", n -> "buildOnStartup".equals(n.attr("name"))).txt()); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='0']" - ); - + assertEquals( + "false", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 6) + .get("str", n -> "buildOnCommit".equals(n.attr("name"))) + .txt()); + + assertNull( + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 6) + .get("str", n -> "buildOnStartup".equals(n.attr("name"))) + .txt()); + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='0']"); + // build the suggester manually - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_BUILD, "true"), - "//str[@name='command'][.='build']" - ); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_BUILD, + "true"), + "//str[@name='command'][.='build']"); + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='2']"); + reloadCore(random().nextBoolean()); - + // Validate that the suggester was loaded on new/reload core - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']" - ); - - // add one more doc, this should not be seen after a core reload (not until the suggester is manually rebuilt) + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='2']"); + + // add one more doc, this should not be seen after a core reload (not until the suggester is + // manually rebuilt) assertU(adoc("id", "10", "cat", "example data extra ", "price", "40", "weight", "35")); assertU((commit())); - + waitForWarming(); // buildOnCommit=false, this doc should not be in the suggester yet - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='2']"); + reloadCore(random().nextBoolean()); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']" - ); - + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='2']"); + // build the suggester manually - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_BUILD, "true"), - "//str[@name='command'][.='build']" - ); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "example", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='3']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_BUILD, + "true"), + "//str[@name='command'][.='build']"); + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "example", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='example']/int[@name='numFound'][.='3']"); } - + @Test public void testLoadOnStartup() throws Exception { - + final String suggester = "suggest_fuzzy_doc_manal_build"; - + // validate that this suggester is storing the lookup - assertEquals(suggester, - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",5). - get("str", n -> "name".equals(n.attr("name"))).txt()); + assertEquals( + suggester, + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 5) + .get("str", n -> "name".equals(n.attr("name"))) + .txt()); + + assertEquals( + suggester, + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 5) + .get("str", n -> "storeDir".equals(n.attr("name"))) + .txt()); - assertEquals(suggester, - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",5). - get("str", n -> "storeDir".equals(n.attr("name"))).txt()); - // validate that this suggester only builds manually - assertEquals("false", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",5). - get("str", n -> "buildOnCommit".equals(n.attr("name"))).txt()); - assertEquals("false", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",5). - get("str", n -> "buildOnStartup".equals(n.attr("name"))).txt()); - + assertEquals( + "false", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 5) + .get("str", n -> "buildOnCommit".equals(n.attr("name"))) + .txt()); + assertEquals( + "false", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 5) + .get("str", n -> "buildOnStartup".equals(n.attr("name"))) + .txt()); + // build the suggester manually - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_BUILD, "true"), - "//str[@name='command'][.='build']" - ); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='exampel']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_BUILD, + "true"), + "//str[@name='command'][.='build']"); + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"); + reloadCore(false); - + // Validate that the suggester was loaded on core reload - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='exampel']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"); + reloadCore(true); - + // Validate that the suggester was loaded on new core - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggester, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='exampel']/int[@name='numFound'][.='2']" - ); + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggester, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggester + + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"); } - + public void testBuildOnStartupWithCoreReload() throws Exception { doTestBuildOnStartup(false); } - + public void testBuildOnStartupWithNewCores() throws Exception { doTestBuildOnStartup(true); } - + private void doTestBuildOnStartup(boolean createNewCores) throws Exception { - + final String suggesterFuzzy = "suggest_fuzzy_doc_dict"; - + // the test relies on useColdSearcher=false - assertFalse("Precondition not met for test. useColdSearcher must be false", + assertFalse( + "Precondition not met for test. useColdSearcher must be false", h.getCore().getSolrConfig().useColdSearcher); - + // validate that this suggester is not storing the lookup and buildOnStartup is not set - assertEquals(suggesterFuzzy, - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",2). - get("str", n -> "name".equals(n.attr("name"))).txt()); + assertEquals( + suggesterFuzzy, + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 2) + .get("str", n -> "name".equals(n.attr("name"))) + .txt()); - assertNull(h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",2) - .get("str", n -> "storeDir".equals(n.attr("name"))).txt()); + assertNull( + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 2) + .get("str", n -> "storeDir".equals(n.attr("name"))) + .txt()); - // assert that buildOnStartup=false - assertEquals("false", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",2) - .get("str", n -> "buildOnStartup".equals(n.attr("name"))).txt()); - assertEquals("true", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",2). - get("str", n -> "buildOnCommit".equals(n.attr("name"))).txt()); - + assertEquals( + "false", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 2) + .get("str", n -> "buildOnStartup".equals(n.attr("name"))) + .txt()); + assertEquals( + "true", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 2) + .get("str", n -> "buildOnCommit".equals(n.attr("name"))) + .txt()); + // verify that this suggester is built (there was a commit in setUp) - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggesterFuzzy, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggesterFuzzy + "']/lst[@name='exampel']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggesterFuzzy, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggesterFuzzy + + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"); + // reload the core and wait for for the listeners to finish reloadCore(createNewCores); if (System.getProperty(SYSPROP_NIGHTLY) != null) { // wait some time here in nightly to make sure there are no race conditions in suggester build Thread.sleep(1000); } - + // The suggester should be empty - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggesterFuzzy, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggesterFuzzy + "']/lst[@name='exampel']/int[@name='numFound'][.='0']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggesterFuzzy, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggesterFuzzy + + "']/lst[@name='exampel']/int[@name='numFound'][.='0']"); + // build the suggester manually - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggesterFuzzy, - SuggesterParams.SUGGEST_BUILD, "true"), - "//str[@name='command'][.='build']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggesterFuzzy, + SuggesterParams.SUGGEST_BUILD, + "true"), + "//str[@name='command'][.='build']"); + // validate the suggester is built again - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggesterFuzzy, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggesterFuzzy + "']/lst[@name='exampel']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggesterFuzzy, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggesterFuzzy + + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"); + final String suggestStartup = "suggest_fuzzy_doc_dict_build_startup"; - - // repeat the test with "suggest_fuzzy_doc_dict_build_startup", it is exactly the same but with buildOnStartup=true - assertEquals(suggestStartup, - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",4) - .get("str", n -> "name".equals(n.attr("name"))).txt()); - assertNull( h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",4) - .get("str", n -> "storeDir".equals(n.attr("name"))).txt()); - assertEquals("true", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",4) - .get("str", n -> "buildOnStartup".equals(n.attr("name"))).txt()); - assertEquals("false", - h.getCore().getSolrConfig().get("searchComponent", n -> "suggest".equals(n.attr("name"))) - .get("lst",4) - .get("str", n -> "buildOnCommit".equals(n.attr("name"))).txt()); - + + // repeat the test with "suggest_fuzzy_doc_dict_build_startup", it is exactly the same but with + // buildOnStartup=true + assertEquals( + suggestStartup, + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 4) + .get("str", n -> "name".equals(n.attr("name"))) + .txt()); + assertNull( + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 4) + .get("str", n -> "storeDir".equals(n.attr("name"))) + .txt()); + assertEquals( + "true", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 4) + .get("str", n -> "buildOnStartup".equals(n.attr("name"))) + .txt()); + assertEquals( + "false", + h.getCore() + .getSolrConfig() + .get("searchComponent", n -> "suggest".equals(n.attr("name"))) + .get("lst", 4) + .get("str", n -> "buildOnCommit".equals(n.attr("name"))) + .txt()); + // reload the core reloadCore(createNewCores); // verify that this suggester is built (should build on startup) - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggestStartup, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggestStartup + "']/lst[@name='exampel']/int[@name='numFound'][.='2']" - ); - - // add one more doc, this should not be seen without rebuilding manually or reloading the core (buildOnCommit=false) + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggestStartup, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggestStartup + + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"); + + // add one more doc, this should not be seen without rebuilding manually or reloading the core + // (buildOnCommit=false) assertU(adoc("id", "10", "cat", "example data extra ", "price", "40", "weight", "35")); assertU((commit())); - + waitForWarming(); - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggestStartup, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggestStartup + "']/lst[@name='exampel']/int[@name='numFound'][.='2']" - ); - + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggestStartup, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggestStartup + + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"); + // build the suggester manually - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggestStartup, - SuggesterParams.SUGGEST_BUILD, "true"), - "//str[@name='command'][.='build']" - ); - - assertQ(req("qt", rh, - SuggesterParams.SUGGEST_DICT, suggestStartup, - SuggesterParams.SUGGEST_Q, "exampel", - SuggesterParams.SUGGEST_COUNT, "5"), - "//lst[@name='suggest']/lst[@name='" + suggestStartup + "']/lst[@name='exampel']/int[@name='numFound'][.='3']" - ); + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggestStartup, + SuggesterParams.SUGGEST_BUILD, + "true"), + "//str[@name='command'][.='build']"); + + assertQ( + req( + "qt", + rh, + SuggesterParams.SUGGEST_DICT, + suggestStartup, + SuggesterParams.SUGGEST_Q, + "exampel", + SuggesterParams.SUGGEST_COUNT, + "5"), + "//lst[@name='suggest']/lst[@name='" + + suggestStartup + + "']/lst[@name='exampel']/int[@name='numFound'][.='3']"); } - + private void reloadCore(boolean createNewCore) throws Exception { if (createNewCore) { CoreContainer cores = h.getCoreContainer(); @@ -567,11 +835,7 @@ private void reloadCore(boolean createNewCore) throws Exception { // On regular reloading, wait until the new searcher is registered waitForWarming(); } - - assertQ(req("qt", "/select", - "q", "*:*"), - "//*[@numFound='11']" - ); - } + assertQ(req("qt", "/select", "q", "*:*"), "//*[@numFound='11']"); + } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java b/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java index 0527d9e5074..345d953b9f6 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java @@ -17,7 +17,6 @@ package org.apache.solr.handler.component; import org.apache.lucene.util.Constants; - import org.apache.lucene.util.TestUtil; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.common.params.ShardParams; @@ -32,8 +31,8 @@ public class TermVectorComponentDistributedTest extends BaseDistributedSearchTestCase { @BeforeClass public static void betterNotBeJ9() { - assumeFalse("FIXME: SOLR-5792: This test fails under IBM J9", - Constants.JAVA_VENDOR.startsWith("IBM")); + assumeFalse( + "FIXME: SOLR-5792: This test fails under IBM J9", Constants.JAVA_VENDOR.startsWith("IBM")); int statsType = TestUtil.nextInt(random(), 1, 3); if (statsType == 1) { System.setProperty("solr.statsCache", ExactStatsCache.class.getName()); @@ -63,175 +62,260 @@ public void test() throws Exception { handle.put("df", SKIPVAL); handle.put("tf-idf", SKIPVAL); - index("id", "0", - "test_posofftv", "This is a title and another title", - "test_basictv", "This is a title and another title", - "test_notv", "This is a title and another title", - "test_postv", "This is a title and another title", - "test_offtv", "This is a title and another title" - ); - index("id", "1", - "test_posofftv", "The quick reb fox jumped over the lazy brown dogs.", - "test_basictv", "The quick reb fox jumped over the lazy brown dogs.", - "test_notv", "The quick reb fox jumped over the lazy brown dogs.", - "test_postv", "The quick reb fox jumped over the lazy brown dogs.", - "test_offtv", "The quick reb fox jumped over the lazy brown dogs." - ); - - index("id", "2", - "test_posofftv", "This is a document", - "test_basictv", "This is a document", - "test_notv", "This is a document", - "test_postv", "This is a document", - "test_offtv", "This is a document" - ); - index("id", "3", - "test_posofftv", "another document", - "test_basictv", "another document", - "test_notv", "another document", - "test_postv", "another document", - "test_offtv", "another document" - ); - //bunch of docs that are variants on blue - index("id", "4", - "test_posofftv", "blue", - "test_basictv", "blue", - "test_notv", "blue", - "test_postv", "blue", - "test_offtv", "blue" - ); - index("id", "5", - "test_posofftv", "blud", - "test_basictv", "blud", - "test_notv", "blud", - "test_postv", "blud", - "test_offtv", "blud" - ); - index("id", "6", - "test_posofftv", "boue", - "test_basictv", "boue", - "test_notv", "boue", - "test_postv", "boue", - "test_offtv", "boue" - ); - index("id", "7", - "test_posofftv", "glue", - "test_basictv", "glue", - "test_notv", "glue", - "test_postv", "glue", - "test_offtv", "glue" - ); - index("id", "8", - "test_posofftv", "blee", - "test_basictv", "blee", - "test_notv", "blee", - "test_postv", "blee", - "test_offtv", "blee" - ); - index("id", "9", - "test_posofftv", "blah", - "test_basictv", "blah", - "test_notv", "blah", - "test_postv", "blah", - "test_offtv", "blah" - ); + index( + "id", "0", + "test_posofftv", "This is a title and another title", + "test_basictv", "This is a title and another title", + "test_notv", "This is a title and another title", + "test_postv", "This is a title and another title", + "test_offtv", "This is a title and another title"); + index( + "id", "1", + "test_posofftv", "The quick reb fox jumped over the lazy brown dogs.", + "test_basictv", "The quick reb fox jumped over the lazy brown dogs.", + "test_notv", "The quick reb fox jumped over the lazy brown dogs.", + "test_postv", "The quick reb fox jumped over the lazy brown dogs.", + "test_offtv", "The quick reb fox jumped over the lazy brown dogs."); + + index( + "id", "2", + "test_posofftv", "This is a document", + "test_basictv", "This is a document", + "test_notv", "This is a document", + "test_postv", "This is a document", + "test_offtv", "This is a document"); + index( + "id", "3", + "test_posofftv", "another document", + "test_basictv", "another document", + "test_notv", "another document", + "test_postv", "another document", + "test_offtv", "another document"); + // bunch of docs that are variants on blue + index( + "id", "4", + "test_posofftv", "blue", + "test_basictv", "blue", + "test_notv", "blue", + "test_postv", "blue", + "test_offtv", "blue"); + index( + "id", "5", + "test_posofftv", "blud", + "test_basictv", "blud", + "test_notv", "blud", + "test_postv", "blud", + "test_offtv", "blud"); + index( + "id", "6", + "test_posofftv", "boue", + "test_basictv", "boue", + "test_notv", "boue", + "test_postv", "boue", + "test_offtv", "boue"); + index( + "id", "7", + "test_posofftv", "glue", + "test_basictv", "glue", + "test_notv", "glue", + "test_postv", "glue", + "test_offtv", "glue"); + index( + "id", "8", + "test_posofftv", "blee", + "test_basictv", "blee", + "test_notv", "blee", + "test_postv", "blee", + "test_offtv", "blee"); + index( + "id", "9", + "test_posofftv", "blah", + "test_basictv", "blah", + "test_notv", "blah", + "test_postv", "blah", + "test_offtv", "blah"); commit(); final String tv = "tvrh"; for (String q : new String[] {"id:0", "id:7", "id:[3 TO 6]", "*:*"}) { - query("sort","id desc", - "qt",tv, - "q", q, - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true"); + query( + "sort", + "id desc", + "qt", + tv, + "q", + q, + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"); // tv.fl diff from fl - query("sort", "id asc", - "qt",tv, - "q", q, - "fl", "*,score", - "tv.fl", "test_basictv,test_offtv", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true"); - - // multi-valued tv.fl - query("sort", "id asc", - "qt",tv, - "q", q, - "fl", "*,score", - "tv.fl", "test_basictv", - "tv.fl","test_offtv", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true"); + query( + "sort", + "id asc", + "qt", + tv, + "q", + q, + "fl", + "*,score", + "tv.fl", + "test_basictv,test_offtv", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"); + + // multi-valued tv.fl + query( + "sort", + "id asc", + "qt", + tv, + "q", + q, + "fl", + "*,score", + "tv.fl", + "test_basictv", + "tv.fl", + "test_offtv", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"); // re-use fl glob - query("sort", "id desc", - "qt",tv, - "q", q, - "fl", "*,score", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true"); + query( + "sort", + "id desc", + "qt", + tv, + "q", + q, + "fl", + "*,score", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"); // re-use fl, ignore things we can't handle - query("sort", "id desc", - "qt",tv, - "q", q, - "fl", "score,test_basictv,[docid],test_postv,val:sum(3,4)", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true"); + query( + "sort", + "id desc", + "qt", + tv, + "q", + q, + "fl", + "score,test_basictv,[docid],test_postv,val:sum(3,4)", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"); // re-use (multi-valued) fl, ignore things we can't handle - query("sort", "id desc", - "qt",tv, - "q", q, - "fl", "score,test_basictv", - "fl", "[docid],test_postv,val:sum(3,4)", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true"); + query( + "sort", + "id desc", + "qt", + tv, + "q", + q, + "fl", + "score,test_basictv", + "fl", + "[docid],test_postv,val:sum(3,4)", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"); // test some other options - - query("sort", "id asc", - "qt",tv, - "q", q, - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true", - TermVectorParams.DF, "true", - TermVectorParams.OFFSETS, "true", - TermVectorParams.POSITIONS, "true", - TermVectorParams.TF_IDF, "true"); - - query("sort", "id desc", - "qt",tv, - "q", q, - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.ALL, "true"); - - query("sort", "id desc", - "qt",tv, - "q", q, - "rows", 1, - ShardParams.DISTRIB_SINGLE_PASS, "true", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.ALL, "true"); + + query( + "sort", + "id asc", + "qt", + tv, + "q", + q, + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true", + TermVectorParams.DF, + "true", + TermVectorParams.OFFSETS, + "true", + TermVectorParams.POSITIONS, + "true", + TermVectorParams.TF_IDF, + "true"); + + query( + "sort", + "id desc", + "qt", + tv, + "q", + q, + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.ALL, + "true"); + + query( + "sort", + "id desc", + "qt", + tv, + "q", + q, + "rows", + 1, + ShardParams.DISTRIB_SINGLE_PASS, + "true", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.ALL, + "true"); // per field stuff - query("sort", "id desc", - "qt",tv, - "q", q, - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true", - TermVectorParams.DF, "true", - TermVectorParams.OFFSETS, "true", - TermVectorParams.POSITIONS, "true", - TermVectorParams.TF_IDF, "true", - TermVectorParams.FIELDS, "test_basictv,test_notv,test_postv,test_offtv,test_posofftv", - "f.test_posofftv." + TermVectorParams.POSITIONS, "false", - "f.test_offtv." + TermVectorParams.OFFSETS, "false", - "f.test_basictv." + TermVectorParams.DF, "false", - "f.test_basictv." + TermVectorParams.TF, "false", - "f.test_basictv." + TermVectorParams.TF_IDF, "false"); + query( + "sort", + "id desc", + "qt", + tv, + "q", + q, + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true", + TermVectorParams.DF, + "true", + TermVectorParams.OFFSETS, + "true", + TermVectorParams.POSITIONS, + "true", + TermVectorParams.TF_IDF, + "true", + TermVectorParams.FIELDS, + "test_basictv,test_notv,test_postv,test_offtv,test_posofftv", + "f.test_posofftv." + TermVectorParams.POSITIONS, + "false", + "f.test_offtv." + TermVectorParams.OFFSETS, + "false", + "f.test_basictv." + TermVectorParams.DF, + "false", + "f.test_basictv." + TermVectorParams.TF, + "false", + "f.test_basictv." + TermVectorParams.TF_IDF, + "false"); } } } diff --git a/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java index c083a0a616e..ba710d8788d 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentTest.java @@ -16,11 +16,10 @@ */ package org.apache.solr.handler.component; +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.util.ArrayList; import java.util.Arrays; import java.util.List; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.TermVectorParams; import org.junit.BeforeClass; @@ -29,14 +28,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -/** - * - * - **/ +/** */ public class TermVectorComponentTest extends SolrTestCaseJ4 { - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); // ensure that we operate correctly with all valid combinations of the uniqueKey being // stored and/or in docValues. @@ -68,87 +63,167 @@ public static void beforeClass() throws Exception { public void testCanned() throws Exception { clearIndex(); - assertU(adoc("id", "0", - "test_posoffpaytv", "This is a title and another title", - "test_posofftv", "This is a title and another title", - "test_basictv", "This is a title and another title", - "test_notv", "This is a title and another title", - "test_postv", "This is a title and another title", - "test_offtv", "This is a title and another title" - )); - assertU(adoc("id", "1", - "test_posoffpaytv", "The quick reb fox jumped over the lazy brown dogs.", - "test_posofftv", "The quick reb fox jumped over the lazy brown dogs.", - "test_basictv", "The quick reb fox jumped over the lazy brown dogs.", - "test_notv", "The quick reb fox jumped over the lazy brown dogs.", - "test_postv", "The quick reb fox jumped over the lazy brown dogs.", - "test_offtv", "The quick reb fox jumped over the lazy brown dogs." - )); - assertU(adoc("id", "2", - "test_posoffpaytv", "This is a document", - "test_posofftv", "This is a document", - "test_basictv", "This is a document", - "test_notv", "This is a document", - "test_postv", "This is a document", - "test_offtv", "This is a document" - )); - assertU(adoc("id", "3", - "test_posoffpaytv", "another document", - "test_posofftv", "another document", - "test_basictv", "another document", - "test_notv", "another document", - "test_postv", "another document", - "test_offtv", "another document" - )); - //bunch of docs that are variants on blue - assertU(adoc("id", "4", - "test_posoffpaytv", "blue", - "test_posofftv", "blue", - "test_basictv", "blue", - "test_notv", "blue", - "test_postv", "blue", - "test_offtv", "blue" - )); - assertU(adoc("id", "5", - "test_posoffpaytv", "blud", - "test_posofftv", "blud", - "test_basictv", "blud", - "test_notv", "blud", - "test_postv", "blud", - "test_offtv", "blud" - )); - assertU(adoc("id", "6", - "test_posoffpaytv", "boue", - "test_posofftv", "boue", - "test_basictv", "boue", - "test_notv", "boue", - "test_postv", "boue", - "test_offtv", "boue" - )); - assertU(adoc("id", "7", - "test_posoffpaytv", "glue", - "test_posofftv", "glue", - "test_basictv", "glue", - "test_notv", "glue", - "test_postv", "glue", - "test_offtv", "glue" - )); - assertU(adoc("id", "8", - "test_posoffpaytv", "blee", - "test_posofftv", "blee", - "test_basictv", "blee", - "test_notv", "blee", - "test_postv", "blee", - "test_offtv", "blee" - )); - assertU(adoc("id", "9", - "test_posoffpaytv", "blah", - "test_posofftv", "blah", - "test_basictv", "blah", - "test_notv", "blah", - "test_postv", "blah", - "test_offtv", "blah" - )); + assertU( + adoc( + "id", + "0", + "test_posoffpaytv", + "This is a title and another title", + "test_posofftv", + "This is a title and another title", + "test_basictv", + "This is a title and another title", + "test_notv", + "This is a title and another title", + "test_postv", + "This is a title and another title", + "test_offtv", + "This is a title and another title")); + assertU( + adoc( + "id", + "1", + "test_posoffpaytv", + "The quick reb fox jumped over the lazy brown dogs.", + "test_posofftv", + "The quick reb fox jumped over the lazy brown dogs.", + "test_basictv", + "The quick reb fox jumped over the lazy brown dogs.", + "test_notv", + "The quick reb fox jumped over the lazy brown dogs.", + "test_postv", + "The quick reb fox jumped over the lazy brown dogs.", + "test_offtv", + "The quick reb fox jumped over the lazy brown dogs.")); + assertU( + adoc( + "id", + "2", + "test_posoffpaytv", + "This is a document", + "test_posofftv", + "This is a document", + "test_basictv", + "This is a document", + "test_notv", + "This is a document", + "test_postv", + "This is a document", + "test_offtv", + "This is a document")); + assertU( + adoc( + "id", + "3", + "test_posoffpaytv", + "another document", + "test_posofftv", + "another document", + "test_basictv", + "another document", + "test_notv", + "another document", + "test_postv", + "another document", + "test_offtv", + "another document")); + // bunch of docs that are variants on blue + assertU( + adoc( + "id", + "4", + "test_posoffpaytv", + "blue", + "test_posofftv", + "blue", + "test_basictv", + "blue", + "test_notv", + "blue", + "test_postv", + "blue", + "test_offtv", + "blue")); + assertU( + adoc( + "id", + "5", + "test_posoffpaytv", + "blud", + "test_posofftv", + "blud", + "test_basictv", + "blud", + "test_notv", + "blud", + "test_postv", + "blud", + "test_offtv", + "blud")); + assertU( + adoc( + "id", + "6", + "test_posoffpaytv", + "boue", + "test_posofftv", + "boue", + "test_basictv", + "boue", + "test_notv", + "boue", + "test_postv", + "boue", + "test_offtv", + "boue")); + assertU( + adoc( + "id", + "7", + "test_posoffpaytv", + "glue", + "test_posofftv", + "glue", + "test_basictv", + "glue", + "test_notv", + "glue", + "test_postv", + "glue", + "test_offtv", + "glue")); + assertU( + adoc( + "id", + "8", + "test_posoffpaytv", + "blee", + "test_posofftv", + "blee", + "test_basictv", + "blee", + "test_notv", + "blee", + "test_postv", + "blee", + "test_offtv", + "blee")); + assertU( + adoc( + "id", + "9", + "test_posoffpaytv", + "blah", + "test_posofftv", + "blah", + "test_basictv", + "blah", + "test_notv", + "blah", + "test_postv", + "blah", + "test_offtv", + "blah")); assertNull(h.validateUpdate(commit())); doBasics(); @@ -157,100 +232,178 @@ public void testCanned() throws Exception { doPayloads(); } - private void doBasics() throws Exception { - assertJQ(req("json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true", TermVectorParams.TF, "true") - , "/termVectors=={'0':{'uniqueKey':'0'," + - " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_posoffpaytv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"), + "/termVectors=={'0':{'uniqueKey':'0'," + + " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_posoffpaytv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}"); // tv.fl diff from fl - assertJQ(req("json.nl", "map", - "qt", tv, - "q", "id:0", - "fl", "*,score", - "tv.fl", "test_basictv,test_offtv", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true") - , "/termVectors=={'0':{'uniqueKey':'0'," + - " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}}}" - ); - // multi-valued tv.fl - assertJQ(req("json.nl", "map", - "qt", tv, - "q", "id:0", - "fl", "*,score", - "tv.fl", "test_basictv", - "tv.fl", "test_offtv", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true") - , "/termVectors=={'0':{'uniqueKey':'0'," + - " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}}}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + "fl", + "*,score", + "tv.fl", + "test_basictv,test_offtv", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"), + "/termVectors=={'0':{'uniqueKey':'0'," + + " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}}}"); + // multi-valued tv.fl + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + "fl", + "*,score", + "tv.fl", + "test_basictv", + "tv.fl", + "test_offtv", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"), + "/termVectors=={'0':{'uniqueKey':'0'," + + " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}}}"); // re-use fl glob - assertJQ(req("json.nl", "map", - "qt", tv, - "q", "id:0", - "fl", "*,score", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true") - , "/termVectors=={'0':{'uniqueKey':'0'," + - " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_posoffpaytv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + "fl", + "*,score", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"), + "/termVectors=={'0':{'uniqueKey':'0'," + + " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_offtv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_posofftv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_posoffpaytv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}"); // re-use fl, ignore things we can't handle - assertJQ(req("json.nl", "map", - "qt", tv, - "q", "id:0", - "fl", "score,test_basictv,[docid],test_postv,val:sum(3,4)", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true") - , "/termVectors=={'0':{'uniqueKey':'0'," + - " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + "fl", + "score,test_basictv,[docid],test_postv,val:sum(3,4)", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"), + "/termVectors=={'0':{'uniqueKey':'0'," + + " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}"); // re-use (multi-valued) fl, ignore things we can't handle - assertJQ(req("json.nl", "map", - "qt", tv, - "q", "id:0", - "fl", "score,test_basictv", - "fl", "[docid],test_postv,val:sum(3,4)", - TermVectorComponent.COMPONENT_NAME, "true", - TermVectorParams.TF, "true") - , "/termVectors=={'0':{'uniqueKey':'0'," + - " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + - " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}" - ); - + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + "fl", + "score,test_basictv", + "fl", + "[docid],test_postv,val:sum(3,4)", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"), + "/termVectors=={'0':{'uniqueKey':'0'," + + " 'test_basictv':{'anoth':{'tf':1},'titl':{'tf':2}}," + + " 'test_postv':{'anoth':{'tf':1},'titl':{'tf':2}}}}"); } private void doOptions() throws Exception { - assertJQ(req("json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true" - , TermVectorParams.TF, "true", TermVectorParams.DF, "true", TermVectorParams.OFFSETS, "true", TermVectorParams.POSITIONS, "true", TermVectorParams.TF_IDF, "true") - , "/termVectors/0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':5}, 'df':2, 'tf-idf':0.5}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true", + TermVectorParams.DF, + "true", + TermVectorParams.OFFSETS, + "true", + TermVectorParams.POSITIONS, + "true", + TermVectorParams.TF_IDF, + "true"), + "/termVectors/0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':5}, 'df':2, 'tf-idf':0.5}"); - assertJQ(req("json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true" - , TermVectorParams.ALL, "true") - , "/termVectors/0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':5}, 'df':2, 'tf-idf':0.5}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.ALL, + "true"), + "/termVectors/0/test_posofftv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':5}, 'df':2, 'tf-idf':0.5}"); // test each combination at random final List list = new ArrayList<>(); - list.addAll(Arrays.asList("json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true")); - String[][] options = new String[][]{{TermVectorParams.TF, "'tf':1"}, - {TermVectorParams.OFFSETS, "'offsets':{'start':20, 'end':27}"}, - {TermVectorParams.POSITIONS, "'positions':{'position':5}"}, - {TermVectorParams.DF, "'df':2"}, - {TermVectorParams.TF_IDF, "'tf-idf':0.5"}}; + list.addAll( + Arrays.asList( + "json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true")); + String[][] options = + new String[][] { + {TermVectorParams.TF, "'tf':1"}, + {TermVectorParams.OFFSETS, "'offsets':{'start':20, 'end':27}"}, + {TermVectorParams.POSITIONS, "'positions':{'position':5}"}, + {TermVectorParams.DF, "'df':2"}, + {TermVectorParams.TF_IDF, "'tf-idf':0.5"} + }; StringBuilder expected = new StringBuilder("/termVectors/0/test_posofftv/anoth=={"); boolean first = true; for (int i = 0; i < options.length; i++) { @@ -261,7 +414,6 @@ private void doOptions() throws Exception { } first = false; expected.append(options[i][1]); - } list.add(options[i][0]); list.add(use ? "true" : "false"); @@ -272,32 +424,73 @@ private void doOptions() throws Exception { } private void doPerField() throws Exception { - assertJQ(req("json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true" - , TermVectorParams.TF, "true", TermVectorParams.DF, "true", TermVectorParams.OFFSETS, "true", TermVectorParams.POSITIONS, "true", TermVectorParams.TF_IDF, "true" - , TermVectorParams.FIELDS, "test_basictv,test_notv,test_postv,test_offtv,test_posofftv,test_posoffpaytv" - , "f.test_posoffpaytv." + TermVectorParams.PAYLOADS, "false" - , "f.test_posofftv." + TermVectorParams.POSITIONS, "false" - , "f.test_offtv." + TermVectorParams.OFFSETS, "false" - , "f.test_basictv." + TermVectorParams.DF, "false" - , "f.test_basictv." + TermVectorParams.TF, "false" - , "f.test_basictv." + TermVectorParams.TF_IDF, "false" - ) - , "/termVectors/0/test_basictv=={'anoth':{},'titl':{}}" - , "/termVectors/0/test_postv/anoth=={'tf':1, 'positions':{'position':5}, 'df':2, 'tf-idf':0.5}" - , "/termVectors/0/test_offtv/anoth=={'tf':1, 'df':2, 'tf-idf':0.5}" - , "/termVectors/warnings=={ 'noTermVectors':['test_notv'], 'noPositions':['test_basictv', 'test_offtv'], 'noOffsets':['test_basictv', 'test_postv']}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true", + TermVectorParams.DF, + "true", + TermVectorParams.OFFSETS, + "true", + TermVectorParams.POSITIONS, + "true", + TermVectorParams.TF_IDF, + "true", + TermVectorParams.FIELDS, + "test_basictv,test_notv,test_postv,test_offtv,test_posofftv,test_posoffpaytv", + "f.test_posoffpaytv." + TermVectorParams.PAYLOADS, + "false", + "f.test_posofftv." + TermVectorParams.POSITIONS, + "false", + "f.test_offtv." + TermVectorParams.OFFSETS, + "false", + "f.test_basictv." + TermVectorParams.DF, + "false", + "f.test_basictv." + TermVectorParams.TF, + "false", + "f.test_basictv." + TermVectorParams.TF_IDF, + "false"), + "/termVectors/0/test_basictv=={'anoth':{},'titl':{}}", + "/termVectors/0/test_postv/anoth=={'tf':1, 'positions':{'position':5}, 'df':2, 'tf-idf':0.5}", + "/termVectors/0/test_offtv/anoth=={'tf':1, 'df':2, 'tf-idf':0.5}", + "/termVectors/warnings=={ 'noTermVectors':['test_notv'], 'noPositions':['test_basictv', 'test_offtv'], 'noOffsets':['test_basictv', 'test_postv']}"); } private void doPayloads() throws Exception { // This field uses TokenOffsetPayloadTokenFilter, which // stuffs start (20) and end offset (27) into the // payload: - assertJQ(req("json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true" - , TermVectorParams.TF, "true", TermVectorParams.DF, "true", TermVectorParams.OFFSETS, "true", TermVectorParams.POSITIONS, "true", TermVectorParams.TF_IDF, "true", - TermVectorParams.PAYLOADS, "true") - , "/termVectors/0/test_posoffpaytv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':5}, 'payloads':{'payload': 'AAAAFAAAABs='}, 'df':2, 'tf-idf':0.5}" - ); + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true", + TermVectorParams.DF, + "true", + TermVectorParams.OFFSETS, + "true", + TermVectorParams.POSITIONS, + "true", + TermVectorParams.TF_IDF, + "true", + TermVectorParams.PAYLOADS, + "true"), + "/termVectors/0/test_posoffpaytv/anoth=={'tf':1, 'offsets':{'start':20, 'end':27}, 'positions':{'position':5}, 'payloads':{'payload': 'AAAAFAAAABs='}, 'df':2, 'tf-idf':0.5}"); } @Test @@ -309,17 +502,24 @@ public void testNoVectors() throws Exception { assertU(adoc("id", "3")); assertNull(h.validateUpdate(commit())); - // Kind of an odd test, but we just want to know if we don't generate an NPE when there is nothing to give back in the term vectors. - assertJQ(req("json.nl", "map", "qt", tv, "q", "id:0", TermVectorComponent.COMPONENT_NAME, "true", TermVectorParams.TF, "true") - , "/termVectors=={'0':{'uniqueKey':'0'}}}" - ); - - + // Kind of an odd test, but we just want to know if we don't generate an NPE when there is + // nothing to give back in the term vectors. + assertJQ( + req( + "json.nl", + "map", + "qt", + tv, + "q", + "id:0", + TermVectorComponent.COMPONENT_NAME, + "true", + TermVectorParams.TF, + "true"), + "/termVectors=={'0':{'uniqueKey':'0'}}}"); } - } - /* * diff --git a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java index e53c6387c69..1d509ac8a7e 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.regex.Pattern; - import org.apache.lucene.util.mutable.MutableValueDouble; import org.apache.lucene.util.mutable.MutableValueFloat; import org.apache.lucene.util.mutable.MutableValueInt; @@ -34,7 +33,6 @@ import org.junit.BeforeClass; import org.junit.Test; - public class TermsComponentTest extends SolrTestCaseJ4 { @BeforeClass @@ -50,8 +48,8 @@ public void createIndex() { assertU(commit()); assertU(adoc("id", "0", "lowerfilt", "a", "standardfilt", "a", "foo_i", "1")); - assertU(adoc("id", "1", "lowerfilt", "a", "standardfilt", "aa", "foo_i","1")); - assertU(adoc("id", "2", "lowerfilt", "aa", "standardfilt", "aaa", "foo_i","2")); + assertU(adoc("id", "1", "lowerfilt", "a", "standardfilt", "aa", "foo_i", "1")); + assertU(adoc("id", "2", "lowerfilt", "aa", "standardfilt", "aaa", "foo_i", "2")); assertU(adoc("id", "3", "lowerfilt", "aaa", "standardfilt", "abbb")); assertU(adoc("id", "4", "lowerfilt", "ab", "standardfilt", "b")); assertU(adoc("id", "5", "lowerfilt", "abb", "standardfilt", "bb")); @@ -60,7 +58,6 @@ public void createIndex() { assertU(adoc("id", "8", "lowerfilt", "baa", "standardfilt", "cccc")); assertU(adoc("id", "9", "lowerfilt", "bbb", "standardfilt", "ccccc")); - assertU(adoc("id", "10", "standardfilt", "ddddd")); assertU(commit()); @@ -86,225 +83,357 @@ public void createIndex() { @Test public void testEmptyLower() throws Exception { - assertQ(req("indent","true", "qt","/terms", "terms.fl","lowerfilt", "terms.upper","b") - ,"count(//lst[@name='lowerfilt']/*)=6" - ,"//int[@name='a'] " - ,"//int[@name='aa'] " - ,"//int[@name='aaa'] " - ,"//int[@name='ab'] " - ,"//int[@name='abb'] " - ,"//int[@name='abc'] " - ); + assertQ( + req("indent", "true", "qt", "/terms", "terms.fl", "lowerfilt", "terms.upper", "b"), + "count(//lst[@name='lowerfilt']/*)=6", + "//int[@name='a'] ", + "//int[@name='aa'] ", + "//int[@name='aaa'] ", + "//int[@name='ab'] ", + "//int[@name='abb'] ", + "//int[@name='abc'] "); } - @Test public void testMultipleFields() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", "terms.upper","b", - "terms.fl","standardfilt") - ,"count(//lst[@name='lowerfilt']/*)=6" - ,"count(//lst[@name='standardfilt']/*)=5" - ); - + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + "terms.upper", + "b", + "terms.fl", + "standardfilt"), + "count(//lst[@name='lowerfilt']/*)=6", + "count(//lst[@name='standardfilt']/*)=5"); } @Test public void testUnlimitedRows() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", - "terms.fl","standardfilt") - ,"count(//lst[@name='lowerfilt']/*)=9" - ,"count(//lst[@name='standardfilt']/*)=10" - ); - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", - "terms.fl","standardfilt", - "terms.limit","-1") - ,"count(//lst[@name='lowerfilt']/*)=9" - ,"count(//lst[@name='standardfilt']/*)=16" - ); - - + assertQ( + req("indent", "true", "qt", "/terms", "terms.fl", "lowerfilt", "terms.fl", "standardfilt"), + "count(//lst[@name='lowerfilt']/*)=9", + "count(//lst[@name='standardfilt']/*)=10"); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + "terms.fl", + "standardfilt", + "terms.limit", + "-1"), + "count(//lst[@name='lowerfilt']/*)=9", + "count(//lst[@name='standardfilt']/*)=16"); } @Test public void testPrefix() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", "terms.upper","b", - "terms.fl","standardfilt", - "terms.lower","aa", "terms.lower.incl","false", "terms.prefix","aa", "terms.upper","b", "terms.limit","50") - ,"count(//lst[@name='lowerfilt']/*)=1" - ,"count(//lst[@name='standardfilt']/*)=1" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + "terms.upper", + "b", + "terms.fl", + "standardfilt", + "terms.lower", + "aa", + "terms.lower.incl", + "false", + "terms.prefix", + "aa", + "terms.upper", + "b", + "terms.limit", + "50"), + "count(//lst[@name='lowerfilt']/*)=1", + "count(//lst[@name='standardfilt']/*)=1"); } @Test public void testRegexp() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","standardfilt", - "terms.lower","a", "terms.lower.incl","false", - "terms.upper","c", "terms.upper.incl","true", - "terms.regex","b.*") - ,"count(//lst[@name='standardfilt']/*)=3" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.lower", + "a", + "terms.lower.incl", + "false", + "terms.upper", + "c", + "terms.upper.incl", + "true", + "terms.regex", + "b.*"), + "count(//lst[@name='standardfilt']/*)=3"); } @Test public void testRegexpFlagParsing() { - ModifiableSolrParams params = new ModifiableSolrParams(); - params.add(TermsParams.TERMS_REGEXP_FLAG, "case_insensitive", "literal", "comments", "multiline", "unix_lines", - "unicode_case", "dotall", "canon_eq"); - try (TermsComponent termsComponent = new TermsComponent()) { - int flags = termsComponent.resolveRegexpFlags(params); - int expected = Pattern.CASE_INSENSITIVE | Pattern.LITERAL | Pattern.COMMENTS | Pattern.MULTILINE | Pattern.UNIX_LINES - | Pattern.UNICODE_CASE | Pattern.DOTALL | Pattern.CANON_EQ; - assertEquals(expected, flags); - } catch (IOException e) { - fail("Error closing TermsComponent"); - } + ModifiableSolrParams params = new ModifiableSolrParams(); + params.add( + TermsParams.TERMS_REGEXP_FLAG, + "case_insensitive", + "literal", + "comments", + "multiline", + "unix_lines", + "unicode_case", + "dotall", + "canon_eq"); + try (TermsComponent termsComponent = new TermsComponent()) { + int flags = termsComponent.resolveRegexpFlags(params); + int expected = + Pattern.CASE_INSENSITIVE + | Pattern.LITERAL + | Pattern.COMMENTS + | Pattern.MULTILINE + | Pattern.UNIX_LINES + | Pattern.UNICODE_CASE + | Pattern.DOTALL + | Pattern.CANON_EQ; + assertEquals(expected, flags); + } catch (IOException e) { + fail("Error closing TermsComponent"); + } } @Test public void testRegexpWithFlags() throws Exception { // TODO: there are no uppercase or mixed-case terms in the index! - assertQ(req("indent", "true", "qt", "/terms", - "terms.fl", "standardfilt", - "terms.lower", "a", "terms.lower.incl", "false", - "terms.upper", "c", "terms.upper.incl", "true", - "terms.regex", "B.*", - "terms.regex.flag", "case_insensitive") - , "count(//lst[@name='standardfilt']/*)=3" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.lower", + "a", + "terms.lower.incl", + "false", + "terms.upper", + "c", + "terms.upper.incl", + "true", + "terms.regex", + "B.*", + "terms.regex.flag", + "case_insensitive"), + "count(//lst[@name='standardfilt']/*)=3"); } @Test public void testSortCount() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","standardfilt", - "terms.lower","s", "terms.lower.incl","false", - "terms.prefix","s", - "terms.sort","count") - ,"count(//lst[@name='standardfilt']/*)=3" - ,"//lst[@name='standardfilt']/int[1][@name='snake'][.='3']" - ,"//lst[@name='standardfilt']/int[2][@name='shark'][.='2']" - ,"//lst[@name='standardfilt']/int[3][@name='spider'][.='1']" - ); - + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.lower", + "s", + "terms.lower.incl", + "false", + "terms.prefix", + "s", + "terms.sort", + "count"), + "count(//lst[@name='standardfilt']/*)=3", + "//lst[@name='standardfilt']/int[1][@name='snake'][.='3']", + "//lst[@name='standardfilt']/int[2][@name='shark'][.='2']", + "//lst[@name='standardfilt']/int[3][@name='spider'][.='1']"); } @Test public void testTermsList() throws Exception { - //Terms list always returns in index order - assertQ(req("indent","true", "qt","/terms", - "terms.fl","standardfilt", - "terms.list","spider,snake,a\\,b,shark,ddddd,bad") - ,"count(//lst[@name='standardfilt']/*)=5" - ,"//lst[@name='standardfilt']/int[1][@name='a,b'][.='1']" - ,"//lst[@name='standardfilt']/int[2][@name='ddddd'][.='4']" - ,"//lst[@name='standardfilt']/int[3][@name='shark'][.='2']" - ,"//lst[@name='standardfilt']/int[4][@name='snake'][.='3']" - ,"//lst[@name='standardfilt']/int[5][@name='spider'][.='1']" - ); - - - //Test with numeric terms - assertQ(req("indent","true", "qt","/terms", - "terms.fl","foo_i", - "terms.list","2,1") - ,"count(//lst[@name='foo_i']/*)=2" - ,"//lst[@name='foo_i']/int[1][@name='1'][.='2']" - ,"//lst[@name='foo_i']/int[2][@name='2'][.='1']" - ); + // Terms list always returns in index order + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.list", + "spider,snake,a\\,b,shark,ddddd,bad"), + "count(//lst[@name='standardfilt']/*)=5", + "//lst[@name='standardfilt']/int[1][@name='a,b'][.='1']", + "//lst[@name='standardfilt']/int[2][@name='ddddd'][.='4']", + "//lst[@name='standardfilt']/int[3][@name='shark'][.='2']", + "//lst[@name='standardfilt']/int[4][@name='snake'][.='3']", + "//lst[@name='standardfilt']/int[5][@name='spider'][.='1']"); + + // Test with numeric terms + assertQ( + req("indent", "true", "qt", "/terms", "terms.fl", "foo_i", "terms.list", "2,1"), + "count(//lst[@name='foo_i']/*)=2", + "//lst[@name='foo_i']/int[1][@name='1'][.='2']", + "//lst[@name='foo_i']/int[2][@name='2'][.='1']"); } - @Test public void testStats() throws Exception { - //Terms list always returns in index order - assertQ(req("indent", "true", "qt", "/terms", - "terms.fl", "standardfilt","terms.stats", "true", - "terms.list", "spider,snake,shark,ddddd,bad") - , "//lst[@name='indexstats']/long[1][@name='numDocs'][.='24']" - ); + // Terms list always returns in index order + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.stats", + "true", + "terms.list", + "spider,snake,shark,ddddd,bad"), + "//lst[@name='indexstats']/long[1][@name='numDocs'][.='24']"); } @Test public void testSortIndex() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","standardfilt", - "terms.lower","s", "terms.lower.incl","false", - "terms.prefix","s", - "terms.sort","index") - ,"count(//lst[@name='standardfilt']/*)=3" - ,"//lst[@name='standardfilt']/int[1][@name='shark'][.='2']" - ,"//lst[@name='standardfilt']/int[2][@name='snake'][.='3']" - ,"//lst[@name='standardfilt']/int[3][@name='spider'][.='1']" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.lower", + "s", + "terms.lower.incl", + "false", + "terms.prefix", + "s", + "terms.sort", + "index"), + "count(//lst[@name='standardfilt']/*)=3", + "//lst[@name='standardfilt']/int[1][@name='shark'][.='2']", + "//lst[@name='standardfilt']/int[2][@name='snake'][.='3']", + "//lst[@name='standardfilt']/int[3][@name='spider'][.='1']"); } - + @Test public void testPastUpper() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", - //no upper bound, lower bound doesn't exist - "terms.lower","d") - ,"count(//lst[@name='standardfilt']/*)=0" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + // no upper bound, lower bound doesn't exist + "terms.lower", + "d"), + "count(//lst[@name='standardfilt']/*)=0"); } @Test public void testLowerExclusive() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", - "terms.lower","a", "terms.lower.incl","false", - "terms.upper","b") - ,"count(//lst[@name='lowerfilt']/*)=5" - ,"//int[@name='aa'] " - ,"//int[@name='aaa'] " - ,"//int[@name='ab'] " - ,"//int[@name='abb'] " - ,"//int[@name='abc'] " - ); - - assertQ(req("indent","true", "qt","/terms", - "terms.fl","standardfilt", - "terms.lower","cc", "terms.lower.incl","false", - "terms.upper","d") - ,"count(//lst[@name='standardfilt']/*)=2" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + "terms.lower", + "a", + "terms.lower.incl", + "false", + "terms.upper", + "b"), + "count(//lst[@name='lowerfilt']/*)=5", + "//int[@name='aa'] ", + "//int[@name='aaa'] ", + "//int[@name='ab'] ", + "//int[@name='abb'] ", + "//int[@name='abc'] "); + + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.lower", + "cc", + "terms.lower.incl", + "false", + "terms.upper", + "d"), + "count(//lst[@name='standardfilt']/*)=2"); } @Test public void test() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", - "terms.lower","a", - "terms.upper","b") - ,"count(//lst[@name='lowerfilt']/*)=6" - ,"//int[@name='a'] " - ,"//int[@name='aa'] " - ,"//int[@name='aaa'] " - ,"//int[@name='ab'] " - ,"//int[@name='abb'] " - ,"//int[@name='abc'] " - ); - - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", - "terms.lower","a", - "terms.upper","b", - "terms.raw","true", // this should have no effect on a text field - "terms.limit","2") - ,"count(//lst[@name='lowerfilt']/*)=2" - ,"//int[@name='a']" - ,"//int[@name='aa']" - ); - - assertQ(req("indent","true", "qt","/terms", - "terms.fl","foo_i") - ,"//int[@name='1'][.='2']" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + "terms.lower", + "a", + "terms.upper", + "b"), + "count(//lst[@name='lowerfilt']/*)=6", + "//int[@name='a'] ", + "//int[@name='aa'] ", + "//int[@name='aaa'] ", + "//int[@name='ab'] ", + "//int[@name='abb'] ", + "//int[@name='abc'] "); + + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + "terms.lower", + "a", + "terms.upper", + "b", + "terms.raw", + "true", // this should have no effect on a text field + "terms.limit", + "2"), + "count(//lst[@name='lowerfilt']/*)=2", + "//int[@name='a']", + "//int[@name='aa']"); + + assertQ(req("indent", "true", "qt", "/terms", "terms.fl", "foo_i"), "//int[@name='1'][.='2']"); /* terms.raw only applies to indexed fields assertQ(req("indent","true", "qt","/terms", @@ -314,72 +443,112 @@ public void test() throws Exception { */ // check something at the end of the index - assertQ(req("indent","true", "qt","/terms", - "terms.fl","zzz_i") - ,"count(//lst[@name='zzz_i']/*)=0" - ); + assertQ( + req("indent", "true", "qt", "/terms", "terms.fl", "zzz_i"), + "count(//lst[@name='zzz_i']/*)=0"); } @Test public void testMinMaxFreq() throws Exception { - assertQ(req("indent","true", "qt","/terms", - "terms.fl","lowerfilt", - "terms.lower","a", - "terms.mincount","2", - "terms.maxcount","-1", - "terms.limit","50") - ,"count(//lst[@name='lowerfilt']/*)=1" - ); - - assertQ(req("indent","true", "qt","/terms", - "terms.fl","standardfilt", - "terms.lower","d", - "terms.mincount","2", - "terms.maxcount","3", - "terms.limit","50") - ,"count(//lst[@name='standardfilt']/*)=3" - ); + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "lowerfilt", + "terms.lower", + "a", + "terms.mincount", + "2", + "terms.maxcount", + "-1", + "terms.limit", + "50"), + "count(//lst[@name='lowerfilt']/*)=1"); + + assertQ( + req( + "indent", + "true", + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.lower", + "d", + "terms.mincount", + "2", + "terms.maxcount", + "3", + "terms.limit", + "50"), + "count(//lst[@name='standardfilt']/*)=3"); } @Test public void testTermsWithJSON() throws Exception { - ModifiableSolrParams params = params( - "qt", "/terms", "terms.fl", "standardfilt", "terms.lower", "a", - "terms.sort", "index", "wt", "json" - ); + ModifiableSolrParams params = + params( + "qt", + "/terms", + "terms.fl", + "standardfilt", + "terms.lower", + "a", + "terms.sort", + "index", + "wt", + "json"); assertJQ(req(params), "/terms/standardfilt/[0]==a", "/terms/standardfilt/[1]==1"); // enable terms.ttf params.set("terms.ttf", "true"); - assertJQ(req(params), "/terms/standardfilt/[0]==a", "/terms/standardfilt/[1]/df==1", + assertJQ( + req(params), + "/terms/standardfilt/[0]==a", + "/terms/standardfilt/[1]/df==1", "/terms/standardfilt/[1]/ttf==1"); // test the response with terms.list and terms.ttf=false params.set("terms.list", "spider,snake,shark"); params.remove("terms.ttf"); - assertJQ(req(params), "/terms/standardfilt/[0]==shark", "/terms/standardfilt/[1]==2", - "/terms/standardfilt/[2]==snake", "/terms/standardfilt/[3]==3", - "/terms/standardfilt/[4]==spider", "/terms/standardfilt/[5]==1" - ); + assertJQ( + req(params), + "/terms/standardfilt/[0]==shark", + "/terms/standardfilt/[1]==2", + "/terms/standardfilt/[2]==snake", + "/terms/standardfilt/[3]==3", + "/terms/standardfilt/[4]==spider", + "/terms/standardfilt/[5]==1"); // with terms.list and terms.ttf=true params.set("terms.ttf", "true"); - assertJQ(req(params), - "/terms/standardfilt/[0]==shark", "/terms/standardfilt/[1]/df==2", "/terms/standardfilt/[1]/ttf==2", - "/terms/standardfilt/[2]==snake", "/terms/standardfilt/[3]/df==3", "/terms/standardfilt/[3]/ttf==3", - "/terms/standardfilt/[4]==spider", "/terms/standardfilt/[5]/df==1", "/terms/standardfilt/[5]/ttf==1" - ); + assertJQ( + req(params), + "/terms/standardfilt/[0]==shark", + "/terms/standardfilt/[1]/df==2", + "/terms/standardfilt/[1]/ttf==2", + "/terms/standardfilt/[2]==snake", + "/terms/standardfilt/[3]/df==3", + "/terms/standardfilt/[3]/ttf==3", + "/terms/standardfilt/[4]==spider", + "/terms/standardfilt/[5]/df==1", + "/terms/standardfilt/[5]/ttf==1"); } @Test public void testDocFreqAndTotalTermFreq() throws Exception { - SolrQueryRequest req = req( - "indent","true", - "qt", "/terms", - "terms.fl", "standardfilt", - "terms.ttf", "true", - "terms.list", "snake,spider,shark,ddddd"); - assertQ(req, + SolrQueryRequest req = + req( + "indent", "true", + "qt", "/terms", + "terms.fl", "standardfilt", + "terms.ttf", "true", + "terms.list", "snake,spider,shark,ddddd"); + assertQ( + req, "count(//lst[@name='standardfilt']/*)=4", "//lst[@name='standardfilt']/lst[@name='ddddd']/long[@name='df'][.='4']", "//lst[@name='standardfilt']/lst[@name='ddddd']/long[@name='ttf'][.='4']", @@ -391,15 +560,16 @@ public void testDocFreqAndTotalTermFreq() throws Exception { "//lst[@name='standardfilt']/lst[@name='spider']/long[@name='ttf'][.='1']"); // terms.limit=-1 and terms.sort=count and NO terms.list - req = req( - "indent","true", - "qt", "/terms", - "terms.fl", "standardfilt", - "terms.ttf", "true", - "terms.limit", "-1", - "terms.sort", "count" - ); - assertQ(req, + req = + req( + "indent", "true", + "qt", "/terms", + "terms.fl", "standardfilt", + "terms.ttf", "true", + "terms.limit", "-1", + "terms.sort", "count"); + assertQ( + req, "count(//lst[@name='standardfilt']/*)>=4", // it would be at-least 4 "//lst[@name='standardfilt']/lst[@name='ddddd']/long[@name='df'][.='4']", "//lst[@name='standardfilt']/lst[@name='ddddd']/long[@name='ttf'][.='4']", @@ -413,13 +583,15 @@ public void testDocFreqAndTotalTermFreq() throws Exception { @Test public void testDocFreqAndTotalTermFreqForNonExistingTerm() throws Exception { - SolrQueryRequest req = req( - "indent","true", - "qt", "/terms", - "terms.fl", "standardfilt", - "terms.ttf", "true", - "terms.list", "boo,snake"); - assertQ(req, + SolrQueryRequest req = + req( + "indent", "true", + "qt", "/terms", + "terms.fl", "standardfilt", + "terms.ttf", "true", + "terms.list", "boo,snake"); + assertQ( + req, "count(//lst[@name='standardfilt']/*)=1", "//lst[@name='standardfilt']/lst[@name='snake']/long[@name='df'][.='3']", "//lst[@name='standardfilt']/lst[@name='snake']/long[@name='ttf'][.='3']"); @@ -427,14 +599,16 @@ public void testDocFreqAndTotalTermFreqForNonExistingTerm() throws Exception { @Test public void testDocFreqAndTotalTermFreqForMultipleFields() throws Exception { - SolrQueryRequest req = req( - "indent","true", - "qt", "/terms", - "terms.fl", "lowerfilt", - "terms.fl", "standardfilt", - "terms.ttf", "true", - "terms.list", "a,aa,aaa"); - assertQ(req, + SolrQueryRequest req = + req( + "indent", "true", + "qt", "/terms", + "terms.fl", "lowerfilt", + "terms.fl", "standardfilt", + "terms.ttf", "true", + "terms.list", "a,aa,aaa"); + assertQ( + req, "count(//lst[@name='lowerfilt']/*)=3", "count(//lst[@name='standardfilt']/*)=3", "//lst[@name='lowerfilt']/lst[@name='a']/long[@name='df'][.='2']", @@ -451,16 +625,17 @@ public void testDocFreqAndTotalTermFreqForMultipleFields() throws Exception { "//lst[@name='standardfilt']/lst[@name='aaa']/long[@name='ttf'][.='1']"); // terms.ttf=true, terms.sort=index and no terms list - req = req( - "indent","true", - "qt", "/terms", - "terms.fl", "lowerfilt", - "terms.fl", "standardfilt", - "terms.ttf", "true", - "terms.sort", "index", - "terms.limit", "10" - ); - assertQ(req, + req = + req( + "indent", "true", + "qt", "/terms", + "terms.fl", "lowerfilt", + "terms.fl", "standardfilt", + "terms.ttf", "true", + "terms.sort", "index", + "terms.limit", "10"); + assertQ( + req, "count(//lst[@name='lowerfilt']/*)<=10", "count(//lst[@name='standardfilt']/*)<=10", "//lst[@name='lowerfilt']/lst[@name='a']/long[@name='df'][.='2']", @@ -479,14 +654,26 @@ public void testDocFreqAndTotalTermFreqForMultipleFields() throws Exception { @Test public void testPointField() throws Exception { - int nvals = 10000; int maxval = 1000000; + int nvals = 10000; + int maxval = 1000000; // int nvals = 5; int maxval = 2; final int vals[] = new int[nvals]; - for (int i=0; i STAT_FIELDS = - Collections.unmodifiableList(Arrays.asList( "int_i", "long_l", "string_s" )); + static final List STAT_FIELDS = + Collections.unmodifiableList(Arrays.asList("int_i", "long_l", "string_s")); final int NUM_DOCS; final long MAX_LONG; @@ -65,15 +60,16 @@ public class TestDistributedStatsComponentCardinality extends BaseDistributedSea public TestDistributedStatsComponentCardinality() { // we need DVs on point fields to compute stats - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + // we want some randomness in the shard number, but we don't want multiple iterations fixShardCount(TEST_NIGHTLY ? 7 : random().nextInt(3) + 1); handle.put("maxScore", SKIPVAL); NUM_DOCS = TestUtil.nextInt(random(), 10000, 15000); MAX_LONG = TestUtil.nextLong(random(), 0, NUM_DOCS * BIG_PRIME); - MIN_LONG = MAX_LONG - (((long)NUM_DOCS-1) * BIG_PRIME); + MIN_LONG = MAX_LONG - (((long) NUM_DOCS - 1) * BIG_PRIME); } /** CAUTION: this builds a very large index */ @@ -88,32 +84,43 @@ public void buildIndex() throws Exception { // fields we will compute cardinality against. // which means the number of docs matching a query is the true cardinality for each field - final String strValue = "s"+longValue; - indexDoc(sdoc("id","" + i, - "int_i", ""+i, - "int_i_prehashed_l", ""+HASHER.hashInt(i).asLong(), - "long_l", ""+longValue, - "long_l_prehashed_l", ""+HASHER.hashLong(longValue).asLong(), - "string_s", strValue, - "string_s_prehashed_l", ""+HASHER.hashString(strValue, StandardCharsets.UTF_8).asLong())); + final String strValue = "s" + longValue; + indexDoc( + sdoc( + "id", "" + i, + "int_i", "" + i, + "int_i_prehashed_l", "" + HASHER.hashInt(i).asLong(), + "long_l", "" + longValue, + "long_l_prehashed_l", "" + HASHER.hashLong(longValue).asLong(), + "string_s", strValue, + "string_s_prehashed_l", + "" + HASHER.hashString(strValue, StandardCharsets.UTF_8).asLong())); longValue -= BIG_PRIME; } commit(); - } public void test() throws Exception { buildIndex(); - + { // simple sanity checks - don't leak variables QueryResponse rsp = null; - rsp = query(params("rows", "0", "q", "id:42")); + rsp = query(params("rows", "0", "q", "id:42")); assertEquals(1, rsp.getResults().getNumFound()); - - rsp = query(params("rows", "0", "q", "*:*", - "stats","true", "stats.field", "{!min=true max=true}long_l")); + + rsp = + query( + params( + "rows", + "0", + "q", + "*:*", + "stats", + "true", + "stats.field", + "{!min=true max=true}long_l")); assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); assertEquals(MIN_LONG, Math.round((double) rsp.getFieldStatsInfo().get("long_l").getMin())); assertEquals(MAX_LONG, Math.round((double) rsp.getFieldStatsInfo().get("long_l").getMax())); @@ -124,38 +131,39 @@ public void test() throws Exception { // Some Randomized queries with randomized log2m and max regwidth for (int i = 0; i < NUM_QUERIES; i++) { - // testing shows that on random data, at the size we're dealing with, - // MINIMUM_LOG2M_PARAM is just too absurdly small to give anything remotely close the + // testing shows that on random data, at the size we're dealing with, + // MINIMUM_LOG2M_PARAM is just too absurdly small to give anything remotely close the // the theoretically expected relative error. // // So we have to use a slightly higher lower bound on what log2m values we randomly test - final int log2m = TestUtil.nextInt(random(), - 2 + HLL.MINIMUM_LOG2M_PARAM, - HLL.MAXIMUM_LOG2M_PARAM); + final int log2m = + TestUtil.nextInt(random(), 2 + HLL.MINIMUM_LOG2M_PARAM, HLL.MAXIMUM_LOG2M_PARAM); // use max regwidth to try and prevent hash collisions from introducing problems final int regwidth = HLL.MAXIMUM_REGWIDTH_PARAM; - final int lowId = TestUtil.nextInt(random(), 1, NUM_DOCS-2000); - final int highId = TestUtil.nextInt(random(), lowId+1000, NUM_DOCS); - final int numMatches = 1+highId-lowId; + final int lowId = TestUtil.nextInt(random(), 1, NUM_DOCS - 2000); + final int highId = TestUtil.nextInt(random(), lowId + 1000, NUM_DOCS); + final int numMatches = 1 + highId - lowId; SolrParams p = buildCardinalityQ(lowId, highId, log2m, regwidth); QueryResponse rsp = query(p); - assertEquals("sanity check num matches, p="+p, numMatches, rsp.getResults().getNumFound()); + assertEquals("sanity check num matches, p=" + p, numMatches, rsp.getResults().getNumFound()); - Map stats = rsp.getFieldStatsInfo(); + Map stats = rsp.getFieldStatsInfo(); if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) { - log.warn("SOLR-10918: can't relying on exact match with pre-hashed values when using points"); + log.warn( + "SOLR-10918: can't relying on exact match with pre-hashed values when using points"); } else { for (String f : STAT_FIELDS) { - // regardless of log2m and regwidth, the estimated cardinality of the + // regardless of log2m and regwidth, the estimated cardinality of the // hashed vs prehashed values should be exactly the same for each field - - assertEquals(f + ": hashed vs prehashed, real="+ numMatches + ", p=" + p, - stats.get(f).getCardinality().longValue(), - stats.get(f+"_prehashed_l").getCardinality().longValue()); + + assertEquals( + f + ": hashed vs prehashed, real=" + numMatches + ", p=" + p, + stats.get(f).getCardinality().longValue(), + stats.get(f + "_prehashed_l").getCardinality().longValue()); } } @@ -164,74 +172,91 @@ public void test() throws Exception { final double relErr = expectedRelativeError(log2m); final long estimate = stats.get(f).getCardinality().longValue(); - assertTrue(f + ": relativeErr="+relErr+", estimate="+estimate+", real="+numMatches+", p=" + p, - (Math.abs(numMatches - estimate) / numMatches) < relErr); - + assertTrue( + f + + ": relativeErr=" + + relErr + + ", estimate=" + + estimate + + ", real=" + + numMatches + + ", p=" + + p, + (Math.abs(numMatches - estimate) / numMatches) < relErr); } } - + // Some Randomized queries with both low and high accuracy options for (int i = 0; i < NUM_QUERIES; i++) { - final int lowId = TestUtil.nextInt(random(), 1, NUM_DOCS-2000); - final int highId = TestUtil.nextInt(random(), lowId+1000, NUM_DOCS); - final int numMatches = 1+highId-lowId; + final int lowId = TestUtil.nextInt(random(), 1, NUM_DOCS - 2000); + final int highId = TestUtil.nextInt(random(), lowId + 1000, NUM_DOCS); + final int numMatches = 1 + highId - lowId; // WTF? - https://github.com/aggregateknowledge/java-hll/issues/15 - // + // // aparently we can't rely on estimates always being more accurate with higher log2m values? // so for now, just try testing accuracy values that differ by at least 0.5 // // (that should give us a significant enough log2m diff that the "highAccuracy" is always // more accurate -- if, not then the entire premise of the float value is fundementally bogus) - // + // final double lowAccuracy = random().nextDouble() / 2; // final double highAccuracy = Math.min(1.0D, lowAccuracy + (random().nextDouble() / 2)); final double highAccuracy = Math.min(1.0D, lowAccuracy + 0.5D); SolrParams p = buildCardinalityQ(lowId, highId, lowAccuracy, highAccuracy); QueryResponse rsp = query(p); - assertEquals("sanity check num matches, p="+p, numMatches, rsp.getResults().getNumFound()); + assertEquals("sanity check num matches, p=" + p, numMatches, rsp.getResults().getNumFound()); - Map stats = rsp.getFieldStatsInfo(); + Map stats = rsp.getFieldStatsInfo(); // can't use STAT_FIELDS here ... // - // hueristic differences for regwidth on 32 bit values mean we get differences + // hueristic differences for regwidth on 32 bit values mean we get differences // between estimates for the normal field vs the prehashed (long) field // - // so we settle for only testing things where the regwidth is consistent + // so we settle for only testing things where the regwidth is consistent // w/the prehashed long... - for (String f : new String[] { "long_l", "string_s" }) { + for (String f : new String[] {"long_l", "string_s"}) { - // regardless of accuracy, the estimated cardinality of the + // regardless of accuracy, the estimated cardinality of the // hashed vs prehashed values should be exactly the same for each field - assertEquals(f + ": hashed vs prehashed (low), real="+ numMatches + ", p=" + p, - stats.get("low_"+f).getCardinality().longValue(), - stats.get("low_"+f+"_prehashed_l").getCardinality().longValue()); - assertEquals(f + ": hashed vs prehashed (high), real="+ numMatches + ", p=" + p, - stats.get("high_"+f).getCardinality().longValue(), - stats.get("high_"+f+"_prehashed_l").getCardinality().longValue()); + assertEquals( + f + ": hashed vs prehashed (low), real=" + numMatches + ", p=" + p, + stats.get("low_" + f).getCardinality().longValue(), + stats.get("low_" + f + "_prehashed_l").getCardinality().longValue()); + assertEquals( + f + ": hashed vs prehashed (high), real=" + numMatches + ", p=" + p, + stats.get("high_" + f).getCardinality().longValue(), + stats.get("high_" + f + "_prehashed_l").getCardinality().longValue()); } - + for (String f : STAT_FIELDS) { - for (String ff : new String[] { f, f+"_prehashed_l"}) { - // for both the prehashed and regular fields, the high accuracy option + for (String ff : new String[] {f, f + "_prehashed_l"}) { + // for both the prehashed and regular fields, the high accuracy option // should always produce an estimate at least as good as the low accuracy option - - long poorEst = stats.get("low_"+ff).getCardinality(); - long goodEst = stats.get("high_"+ff).getCardinality(); - assertTrue(ff + ": goodEst="+goodEst+", poorEst="+poorEst+", real="+numMatches+", p=" + p, - Math.abs(numMatches - goodEst) <= Math.abs(numMatches - poorEst)); + + long poorEst = stats.get("low_" + ff).getCardinality(); + long goodEst = stats.get("high_" + ff).getCardinality(); + assertTrue( + ff + + ": goodEst=" + + goodEst + + ", poorEst=" + + poorEst + + ", real=" + + numMatches + + ", p=" + + p, + Math.abs(numMatches - goodEst) <= Math.abs(numMatches - poorEst)); } } } } - - /** - * Returns the (max) expected relative error according ot the HLL algorithm docs - */ + + /** Returns the (max) expected relative error according ot the HLL algorithm docs */ private static double expectedRelativeError(final int log2m) { final long m = 1 << log2m; // theoretical error is 1.04D * sqrt(m) @@ -239,53 +264,50 @@ private static double expectedRelativeError(final int log2m) { return 1.1D / Math.sqrt(m); } - /** - * Helper utility for building up a set of query params. + /** + * Helper utility for building up a set of query params. + * + *

The main query is a simple range query against the id field (using lowId TO highId). 2 + * stats.field params are generated for every field in {@link #STAT_FIELDS} -- both with and w/o a + * prehashed_l suffix -- using the specified log2m and regwidth. * - * The main query is a simple range query against the id field (using lowId TO highId). - * 2 stats.field params are generated for every field in {@link #STAT_FIELDS} -- - * both with and w/o a prehashed_l suffix -- using the specified log2m and regwidth. - * - * The response keys will be the full field names + *

The response keys will be the full field names */ - private static SolrParams buildCardinalityQ(final int lowId, - final int highId, - final int log2m, - final int regwidth) { - ModifiableSolrParams p = params("q", "id_i1:["+lowId+" TO "+highId+"]", - "rows", "0", "stats", "true"); - final String prefix = "{!cardinality=true hllLog2m="+log2m+" hllRegwidth="+regwidth; + private static SolrParams buildCardinalityQ( + final int lowId, final int highId, final int log2m, final int regwidth) { + ModifiableSolrParams p = + params("q", "id_i1:[" + lowId + " TO " + highId + "]", "rows", "0", "stats", "true"); + final String prefix = "{!cardinality=true hllLog2m=" + log2m + " hllRegwidth=" + regwidth; for (String f : STAT_FIELDS) { - p.add("stats.field", prefix+"}"+f); - p.add("stats.field", prefix+" hllPreHashed=true}"+f+"_prehashed_l"); + p.add("stats.field", prefix + "}" + f); + p.add("stats.field", prefix + " hllPreHashed=true}" + f + "_prehashed_l"); } return p; } - /** - * Helper utility for building up a set of query params. + /** + * Helper utility for building up a set of query params. * - * The main query is a simple range query against the id field (using lowId TO highId). - * 4 stats.field params are generated for every field in {@link #STAT_FIELDS} -- - * both with and w/o a prehashed_l suffix, and using both the low and high accuracy values + *

The main query is a simple range query against the id field (using lowId TO highId). 4 + * stats.field params are generated for every field in {@link #STAT_FIELDS} -- both with and w/o a + * prehashed_l suffix, and using both the low and high accuracy values * - * The response keys will be the full field names with either a "low_" or "high_" prefix + *

The response keys will be the full field names with either a "low_" or "high_" prefix */ - private static SolrParams buildCardinalityQ(final int lowId, - final int highId, - final double lowAccuracy, - final double highAccuracy) { - ModifiableSolrParams p = params("q", "id_i1:["+lowId+" TO "+highId+"]", - "rows", "0", "stats", "true"); - final String[] prefixes = new String[] { - "{!cardinality=" + lowAccuracy + " key=low_", - "{!cardinality=" + highAccuracy + " key=high_" - }; + private static SolrParams buildCardinalityQ( + final int lowId, final int highId, final double lowAccuracy, final double highAccuracy) { + ModifiableSolrParams p = + params("q", "id_i1:[" + lowId + " TO " + highId + "]", "rows", "0", "stats", "true"); + final String[] prefixes = + new String[] { + "{!cardinality=" + lowAccuracy + " key=low_", + "{!cardinality=" + highAccuracy + " key=high_" + }; for (String f : STAT_FIELDS) { for (String prefix : prefixes) { - p.add("stats.field", prefix+f+"}"+f); - p.add("stats.field", prefix+f+"_prehashed_l hllPreHashed=true}"+f+"_prehashed_l"); + p.add("stats.field", prefix + f + "}" + f); + p.add("stats.field", prefix + f + "_prehashed_l hllPreHashed=true}" + f + "_prehashed_l"); } } return p; diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java index 88218bc6fdb..bbd6131de80 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java @@ -18,7 +18,6 @@ import java.util.Arrays; import java.util.Collections; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.ModifiableSolrParams; @@ -45,92 +44,216 @@ public void setUp() throws Exception { } private static String maybeTopFc() { - return (random().nextBoolean() ? " hint="+ CollapsingQParserPlugin.HINT_TOP_FC : ""); + return (random().nextBoolean() ? " hint=" + CollapsingQParserPlugin.HINT_TOP_FC : ""); } + private static String floatAppend() { - String floatAppend = "."+random().nextInt(100); //Append the float - floatAppend = Float.toString(Float.parseFloat(floatAppend)); //Create a proper float out of the string. - floatAppend = floatAppend.substring(1); //Drop off the leading 0, leaving just the decimal + String floatAppend = "." + random().nextInt(100); // Append the float + floatAppend = + Float.toString(Float.parseFloat(floatAppend)); // Create a proper float out of the string. + floatAppend = floatAppend.substring(1); // Drop off the leading 0, leaving just the decimal return floatAppend; } - + public void testString() throws Exception { _testExpand("group_s", "", maybeTopFc()); } + public void testStringDv() throws Exception { _testExpand("group_s_dv", "", maybeTopFc()); } - + public void testInt() throws Exception { _testExpand("group_i", "", ""); } + public void testIntDv() throws Exception { _testExpand("group_ti_dv", "", ""); } - + public void testFloat() throws Exception { _testExpand("group_f", floatAppend(), ""); _testExpand("group_f", ".0", ""); // explicit 0 check for 0 vs null group } + public void testFloatDv() throws Exception { _testExpand("group_tf_dv", floatAppend(), ""); _testExpand("group_tf_dv", ".0", ""); // explicit 0 check for 0 vs null group } - + private void _testExpand(String group, String floatAppend, String hint) throws Exception { - // NOTE: one of our groups uses '0' as the group value to explicitly check numeric expand for 0 vs null group behavior + // NOTE: one of our groups uses '0' as the group value to explicitly check numeric expand for 0 + // vs null group behavior String[][] docs = { - {"id","1", "term_s", "YYYY", group, "1"+floatAppend, "test_i", "5", "test_l", "10", "test_f", "2000", "type_s", "parent"}, - {"id","2", "term_s","YYYY", group, "1"+floatAppend, "test_i", "50", "test_l", "100", "test_f", "200", "type_s", "child"}, - {"id","3", "term_s", "YYYY", "test_i", "5000", "test_l", "100", "test_f", "200", "type_s", "other"}, - {"id","4", "term_s", "YYYY", "test_i", "40", "test_l", "1000", "test_f", "2000", "type_s", "other"}, - {"id","5", "term_s", "YYYY", group, "0"+floatAppend, "test_i", "4", "test_l", "10", "test_f", "2000", "type_s", "parent"}, - {"id","6", "term_s","YYYY", group, "0"+floatAppend, "test_i", "10", "test_l", "100", "test_f", "200", "type_s", "child"}, - {"id","7", "term_s", "YYYY", group, "1"+floatAppend, "test_i", "1", "test_l", "100000", "test_f", "2000", "type_s", "child"}, - {"id","8", "term_s","YYYY", group, "0"+floatAppend, "test_i", "2", "test_l", "100000", "test_f", "200", "type_s", "child"} + { + "id", + "1", + "term_s", + "YYYY", + group, + "1" + floatAppend, + "test_i", + "5", + "test_l", + "10", + "test_f", + "2000", + "type_s", + "parent" + }, + { + "id", + "2", + "term_s", + "YYYY", + group, + "1" + floatAppend, + "test_i", + "50", + "test_l", + "100", + "test_f", + "200", + "type_s", + "child" + }, + { + "id", "3", "term_s", "YYYY", "test_i", "5000", "test_l", "100", "test_f", "200", "type_s", + "other" + }, + { + "id", "4", "term_s", "YYYY", "test_i", "40", "test_l", "1000", "test_f", "2000", "type_s", + "other" + }, + { + "id", + "5", + "term_s", + "YYYY", + group, + "0" + floatAppend, + "test_i", + "4", + "test_l", + "10", + "test_f", + "2000", + "type_s", + "parent" + }, + { + "id", + "6", + "term_s", + "YYYY", + group, + "0" + floatAppend, + "test_i", + "10", + "test_l", + "100", + "test_f", + "200", + "type_s", + "child" + }, + { + "id", + "7", + "term_s", + "YYYY", + group, + "1" + floatAppend, + "test_i", + "1", + "test_l", + "100000", + "test_f", + "2000", + "type_s", + "child" + }, + { + "id", + "8", + "term_s", + "YYYY", + group, + "0" + floatAppend, + "test_i", + "2", + "test_l", + "100000", + "test_f", + "200", + "type_s", + "child" + } }; createIndex(docs); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!collapse field="+group+hint+"}"); + params.add("fq", "{!collapse field=" + group + hint + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); - //First basic test case. - assertQ(req(params), "*[count(/response/result/doc)=2]", + // First basic test case. + assertQ( + req(params), + "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='5']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='8']" - ); - - //Basic test case page 2 - assertQ(req(params, "rows", "1", "start", "1"), "*[count(/response/result/doc)=1]", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='8']"); + + // Basic test case page 2 + assertQ( + req(params, "rows", "1", "start", "1"), + "*[count(/response/result/doc)=1]", "*[count(/response/lst[@name='expanded']/result)=1]", "/response/result/doc[1]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='5']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='8']" - ); + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='8']"); - //Test expand.sort - //the "sub()" just testing function queries - assertQ(req(params,"expand.sort", "test_l desc, sub(1,1) asc"), + // Test expand.sort + // the "sub()" just testing function queries + assertQ( + req(params, "expand.sort", "test_l desc, sub(1,1) asc"), "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='5']" - ); + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='5']"); - //Test with nullPolicy... + // Test with nullPolicy... // Main result set should include the doc(s) with null value in the collapse field. // By default ExpandComponent should ignore docs with null values in the collapse fields.... params = new ModifiableSolrParams(); @@ -139,81 +262,123 @@ private void _testExpand(String group, String floatAppend, String hint) throws E params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("expand.sort", "test_l desc"); - assertQ(req(params, - "fq", "{!collapse field="+group+hint+" nullPolicy=collapse}") - ,"*[count(/response/result/doc)=3]" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='3']" - ,"/response/result/doc[2]/str[@name='id'][.='2']" - ,"/response/result/doc[3]/str[@name='id'][.='6']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='5']" - ); - assertQ(req(params, - "fq", "{!collapse field="+group+hint+" nullPolicy=expand}") - ,"*[count(/response/result/doc)=4]" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='3']" - ,"/response/result/doc[2]/str[@name='id'][.='2']" - ,"/response/result/doc[3]/str[@name='id'][.='4']" - ,"/response/result/doc[4]/str[@name='id'][.='6']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='5']" - ); + assertQ( + req(params, "fq", "{!collapse field=" + group + hint + " nullPolicy=collapse}"), + "*[count(/response/result/doc)=3]", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/result/doc[1]/str[@name='id'][.='3']", + "/response/result/doc[2]/str[@name='id'][.='2']", + "/response/result/doc[3]/str[@name='id'][.='6']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='5']"); + assertQ( + req(params, "fq", "{!collapse field=" + group + hint + " nullPolicy=expand}"), + "*[count(/response/result/doc)=4]", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/result/doc[1]/str[@name='id'][.='3']", + "/response/result/doc[2]/str[@name='id'][.='2']", + "/response/result/doc[3]/str[@name='id'][.='4']", + "/response/result/doc[4]/str[@name='id'][.='6']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='5']"); // Adding expand.nullGroup should cause a 'null' group in our expanded results... params.add("expand.nullGroup", "true"); - assertQ(req(params, - "fq", "{!collapse field="+group+hint+" nullPolicy=collapse}") - ,"*[count(/response/result/doc)=3]" - ,"*[count(/response/lst[@name='expanded']/result)=3]" - ,"/response/result/doc[1]/str[@name='id'][.='3']" - ,"/response/result/doc[2]/str[@name='id'][.='2']" - ,"/response/result/doc[3]/str[@name='id'][.='6']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='5']" - ,"/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']" - ); - assertQ(req(params, - // no null group here because all null docs already in current page - "fq", "{!collapse field="+group+hint+" nullPolicy=expand}") - ,"*[count(/response/result/doc)=4]" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='3']" - ,"/response/result/doc[2]/str[@name='id'][.='2']" - ,"/response/result/doc[3]/str[@name='id'][.='4']" - ,"/response/result/doc[4]/str[@name='id'][.='6']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='5']" - ); - assertQ(req(params, - // limiting rows should cause null group to pop up since we now have a null doc not on page... - "rows", "2", - "fq", "{!collapse field="+group+hint+" nullPolicy=expand}") - ,"*[count(/response/result/doc)=2]" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='3']" - ,"/response/result/doc[2]/str[@name='id'][.='2']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']" - ,"/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']" - ); - assertQ(req(params, - // with only 1 rows, the only expanded group we should see is the nullGroup... - "rows", "1", - "fq", "{!collapse field="+group+hint+" nullPolicy=expand}") - ,"*[count(/response/result/doc)=1]" - ,"*[count(/response/lst[@name='expanded']/result)=1]" - ,"/response/result/doc[1]/str[@name='id'][.='3']" - ,"/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']" - ); + assertQ( + req(params, "fq", "{!collapse field=" + group + hint + " nullPolicy=collapse}"), + "*[count(/response/result/doc)=3]", + "*[count(/response/lst[@name='expanded']/result)=3]", + "/response/result/doc[1]/str[@name='id'][.='3']", + "/response/result/doc[2]/str[@name='id'][.='2']", + "/response/result/doc[3]/str[@name='id'][.='6']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']"); + assertQ( + req( + params, + // no null group here because all null docs already in current page + "fq", + "{!collapse field=" + group + hint + " nullPolicy=expand}"), + "*[count(/response/result/doc)=4]", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/result/doc[1]/str[@name='id'][.='3']", + "/response/result/doc[2]/str[@name='id'][.='2']", + "/response/result/doc[3]/str[@name='id'][.='4']", + "/response/result/doc[4]/str[@name='id'][.='6']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='5']"); + assertQ( + req( + params, + // limiting rows should cause null group to pop up since we now have a null doc not on + // page... + "rows", + "2", + "fq", + "{!collapse field=" + group + hint + " nullPolicy=expand}"), + "*[count(/response/result/doc)=2]", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/result/doc[1]/str[@name='id'][.='3']", + "/response/result/doc[2]/str[@name='id'][.='2']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']"); + assertQ( + req( + params, + // with only 1 rows, the only expanded group we should see is the nullGroup... + "rows", + "1", + "fq", + "{!collapse field=" + group + hint + " nullPolicy=expand}"), + "*[count(/response/result/doc)=1]", + "*[count(/response/lst[@name='expanded']/result)=1]", + "/response/result/doc[1]/str[@name='id'][.='3']", + "/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']"); // Test override expand.q // the fact that expand.q matches docs in null group shouldn't matter w/o expand.nullGroup=true @@ -225,31 +390,48 @@ private void _testExpand(String group, String floatAppend, String hint) throws E params.add("expand.q", "type_s:(child OR other)"); params.add("expand.field", group); params.add("expand.sort", "test_l desc"); - assertQ(req(params) - ,"*[count(/response/result/doc)=2]" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='1']" - ,"/response/result/doc[2]/str[@name='id'][.='5']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='2']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='6']" - ); - assertQ(req(params, - // now the 'other' docs should show up in an expanded null group - "expand.nullGroup", "true") - ,"*[count(/response/result/doc)=2]" - ,"*[count(/response/lst[@name='expanded']/result)=3]" - ,"/response/result/doc[1]/str[@name='id'][.='1']" - ,"/response/result/doc[2]/str[@name='id'][.='5']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='2']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='6']" - ,"/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']" - ,"/response/lst[@name='expanded']/result[not(@name)]/doc[2]/str[@name='id'][.='3']" - ); - + assertQ( + req(params), + "*[count(/response/result/doc)=2]", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/result/doc[1]/str[@name='id'][.='1']", + "/response/result/doc[2]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='2']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='6']"); + assertQ( + req( + params, + // now the 'other' docs should show up in an expanded null group + "expand.nullGroup", + "true"), + "*[count(/response/result/doc)=2]", + "*[count(/response/lst[@name='expanded']/result)=3]", + "/response/result/doc[1]/str[@name='id'][.='1']", + "/response/result/doc[2]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='2']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='6']", + "/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']", + "/response/lst[@name='expanded']/result[not(@name)]/doc[2]/str[@name='id'][.='3']"); // Test override expand.fq // the fact that expand.fq matches docs in null group shouldn't matter w/o expand.nullGroup=true @@ -262,33 +444,50 @@ private void _testExpand(String group, String floatAppend, String hint) throws E params.add("expand.fq", "type_s:(child OR other)"); params.add("expand.field", group); params.add("expand.sort", "test_l desc"); - assertQ(req(params) - ,"*[count(/response/result/doc)=2]" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='1']" - ,"/response/result/doc[2]/str[@name='id'][.='5']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='2']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='6']" - ); - assertQ(req(params, - // now the 'other' docs should show up in an expanded null group - "expand.nullGroup", "true") - ,"*[count(/response/result/doc)=2]" - ,"*[count(/response/lst[@name='expanded']/result)=3]" - ,"/response/result/doc[1]/str[@name='id'][.='1']" - ,"/response/result/doc[2]/str[@name='id'][.='5']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='2']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='6']" - ,"/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']" - ,"/response/lst[@name='expanded']/result[not(@name)]/doc[2]/str[@name='id'][.='3']" - ); - - - //Test override expand.fq and expand.q + assertQ( + req(params), + "*[count(/response/result/doc)=2]", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/result/doc[1]/str[@name='id'][.='1']", + "/response/result/doc[2]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='2']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='6']"); + assertQ( + req( + params, + // now the 'other' docs should show up in an expanded null group + "expand.nullGroup", + "true"), + "*[count(/response/result/doc)=2]", + "*[count(/response/lst[@name='expanded']/result)=3]", + "/response/result/doc[1]/str[@name='id'][.='1']", + "/response/result/doc[2]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='2']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='6']", + "/response/lst[@name='expanded']/result[not(@name)]/doc[1]/str[@name='id'][.='4']", + "/response/lst[@name='expanded']/result[not(@name)]/doc[2]/str[@name='id'][.='3']"); + + // Test override expand.fq and expand.q params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "type_s:parent"); @@ -299,34 +498,48 @@ private void _testExpand(String group, String floatAppend, String hint) throws E params.add("expand.fq", "*:*"); params.add("expand.field", group); params.add("expand.sort", "test_l desc"); - assertQ(req(params), "*[count(/response/result/doc)=2]", + assertQ( + req(params), + "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='1']", "/response/result/doc[2]/str[@name='id'][.='5']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='2']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='6']" - ); + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='2']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='6']"); - //Test expand.rows + // Test expand.rows params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!collapse field="+group+hint+"}"); + params.add("fq", "{!collapse field=" + group + hint + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("expand.sort", "test_l desc"); params.add("expand.rows", "1"); - assertQ(req(params), "*[count(/response/result/doc)=2]", + assertQ( + req(params), + "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", - "*[count(/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc)=1]", - "*[count(/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc)=1]", + "*[count(/response/lst[@name='expanded']/result[@name='1" + floatAppend + "']/doc)=1]", + "*[count(/response/lst[@name='expanded']/result[@name='0" + floatAppend + "']/doc)=1]", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']" - ); + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']"); // Test expand.rows=0 - no docs only expand count params = new ModifiableSolrParams(); @@ -335,33 +548,36 @@ private void _testExpand(String group, String floatAppend, String hint) throws E params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("expand.rows", "0"); - assertQ(req(params, - "fq", "{!collapse field="+group+hint+"}") - ,"*[count(/response/result/doc)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='2']" - ,"/response/result/doc[2]/str[@name='id'][.='6']" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"' and @numFound=2]" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"' and @numFound=2]" - ,"*[count(/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc)=0]" - ); - assertQ(req(params, - // same, but with collapsed nulls and a nullGroup, we should have our expanded null group count - "fq", "{!collapse field="+group+hint+" nullPolicy=collapse}", - "expand.nullGroup", "true") - ,"*[count(/response/result/doc)=3]" - ,"/response/result/doc[1]/str[@name='id'][.='3']" - ,"/response/result/doc[2]/str[@name='id'][.='2']" - ,"/response/result/doc[3]/str[@name='id'][.='6']" - ,"*[count(/response/lst[@name='expanded']/result)=3]" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"' and @numFound=2]" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"' and @numFound=2]" - ,"/response/lst[@name='expanded']/result[not(@name) and @numFound=1]" - ,"*[count(/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result[not(@name)]/doc)=0]" - ); + assertQ( + req(params, "fq", "{!collapse field=" + group + hint + "}"), + "*[count(/response/result/doc)=2]", + "/response/result/doc[1]/str[@name='id'][.='2']", + "/response/result/doc[2]/str[@name='id'][.='6']", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/lst[@name='expanded']/result[@name='1" + floatAppend + "' and @numFound=2]", + "/response/lst[@name='expanded']/result[@name='0" + floatAppend + "' and @numFound=2]", + "*[count(/response/lst[@name='expanded']/result[@name='1" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[@name='0" + floatAppend + "']/doc)=0]"); + assertQ( + req( + params, + // same, but with collapsed nulls and a nullGroup, we should have our expanded null + // group count + "fq", + "{!collapse field=" + group + hint + " nullPolicy=collapse}", + "expand.nullGroup", + "true"), + "*[count(/response/result/doc)=3]", + "/response/result/doc[1]/str[@name='id'][.='3']", + "/response/result/doc[2]/str[@name='id'][.='2']", + "/response/result/doc[3]/str[@name='id'][.='6']", + "*[count(/response/lst[@name='expanded']/result)=3]", + "/response/lst[@name='expanded']/result[@name='1" + floatAppend + "' and @numFound=2]", + "/response/lst[@name='expanded']/result[@name='0" + floatAppend + "' and @numFound=2]", + "/response/lst[@name='expanded']/result[not(@name) and @numFound=1]", + "*[count(/response/lst[@name='expanded']/result[@name='1" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[@name='0" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[not(@name)]/doc)=0]"); // Test expand.rows = 0 with expand.field // the fact that expand.q matches docs in null group shouldn't matter w/o expand.nullGroup=true @@ -375,34 +591,34 @@ private void _testExpand(String group, String floatAppend, String hint) throws E params.add("expand.field", group); params.add("expand.rows", "0"); params.add("fl", "id"); - assertQ(req(params) - ,"*[count(/response/result/doc)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='1']" - ,"/response/result/doc[2]/str[@name='id'][.='5']" - ,"*[count(/response/lst[@name='expanded']/result)=2]" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"' and @numFound=2]" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"' and @numFound=2]" - ,"*[count(/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc)=0]" - ); - assertQ(req(params, - // now the 'other' docs should show up in an expanded null group - "expand.nullGroup", "true") - ,"*[count(/response/result/doc)=2]" - ,"/response/result/doc[1]/str[@name='id'][.='1']" - ,"/response/result/doc[2]/str[@name='id'][.='5']" - ,"*[count(/response/lst[@name='expanded']/result)=3]" - ,"/response/lst[@name='expanded']/result[@name='1"+floatAppend+"' and @numFound=2]" - ,"/response/lst[@name='expanded']/result[@name='0"+floatAppend+"' and @numFound=2]" - ,"/response/lst[@name='expanded']/result[not(@name) and @numFound=2]" - ,"*[count(/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result[not(@name)]/doc)=0]" - - ); - - - //Test score with expand.rows = 0 + assertQ( + req(params), + "*[count(/response/result/doc)=2]", + "/response/result/doc[1]/str[@name='id'][.='1']", + "/response/result/doc[2]/str[@name='id'][.='5']", + "*[count(/response/lst[@name='expanded']/result)=2]", + "/response/lst[@name='expanded']/result[@name='1" + floatAppend + "' and @numFound=2]", + "/response/lst[@name='expanded']/result[@name='0" + floatAppend + "' and @numFound=2]", + "*[count(/response/lst[@name='expanded']/result[@name='1" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[@name='0" + floatAppend + "']/doc)=0]"); + assertQ( + req( + params, + // now the 'other' docs should show up in an expanded null group + "expand.nullGroup", + "true"), + "*[count(/response/result/doc)=2]", + "/response/result/doc[1]/str[@name='id'][.='1']", + "/response/result/doc[2]/str[@name='id'][.='5']", + "*[count(/response/lst[@name='expanded']/result)=3]", + "/response/lst[@name='expanded']/result[@name='1" + floatAppend + "' and @numFound=2]", + "/response/lst[@name='expanded']/result[@name='0" + floatAppend + "' and @numFound=2]", + "/response/lst[@name='expanded']/result[not(@name) and @numFound=2]", + "*[count(/response/lst[@name='expanded']/result[@name='1" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[@name='0" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[not(@name)]/doc)=0]"); + + // Test score with expand.rows = 0 params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "type_s:parent"); @@ -412,213 +628,309 @@ private void _testExpand(String group, String floatAppend, String hint) throws E params.add("expand.fq", "*:*"); params.add("expand.field", group); params.add("expand.rows", "0"); - assertQ(req(params, "fl", "id,score"), "*[count(/response/result/doc)=2]", - "*[count(/response/lst[@name='expanded']/result)=2]", - "*[count(/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc)=0]", - "*[count(/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc)=0]", - "*[count(/response/lst[@name='expanded']/result[@maxScore])=0]", //maxScore should not be available - "/response/result/doc[1]/str[@name='id'][.='1']", - "/response/result/doc[2]/str[@name='id'][.='5']", - "count(//*[@name='score' and .='NaN'])=0" - - ); - - //Test no group results + assertQ( + req(params, "fl", "id,score"), + "*[count(/response/result/doc)=2]", + "*[count(/response/lst[@name='expanded']/result)=2]", + "*[count(/response/lst[@name='expanded']/result[@name='1" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[@name='0" + floatAppend + "']/doc)=0]", + "*[count(/response/lst[@name='expanded']/result[@maxScore])=0]", // maxScore should not be + // available + "/response/result/doc[1]/str[@name='id'][.='1']", + "/response/result/doc[2]/str[@name='id'][.='5']", + "count(//*[@name='score' and .='NaN'])=0"); + + // Test no group results params = new ModifiableSolrParams(); params.add("q", "test_i:5"); - params.add("fq", "{!collapse field="+group+hint+"}"); + params.add("fq", "{!collapse field=" + group + hint + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("expand.sort", "test_l desc"); params.add("expand.rows", "1"); - assertQ(req(params), "*[count(/response/result/doc)=1]", - "*[count(/response/lst[@name='expanded']/result)=0]" - ); + assertQ( + req(params), + "*[count(/response/result/doc)=1]", + "*[count(/response/lst[@name='expanded']/result)=0]"); // Test zero results params = new ModifiableSolrParams(); params.add("q", "test_i:5532535"); - params.add("fq", "{!collapse field="+group+hint+" nullPolicy=collapse}"); + params.add("fq", "{!collapse field=" + group + hint + " nullPolicy=collapse}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("expand.sort", "test_l desc"); params.add("expand.rows", "1"); - assertQ(req(params) - ,"*[count(/response/result/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result)=0]" - ); - assertQ(req(params, - "expand.nullGroup", "true") - ,"*[count(/response/result/doc)=0]" - ,"*[count(/response/lst[@name='expanded']/result)=0]" - ); + assertQ( + req(params), + "*[count(/response/result/doc)=0]", + "*[count(/response/lst[@name='expanded']/result)=0]"); + assertQ( + req(params, "expand.nullGroup", "true"), + "*[count(/response/result/doc)=0]", + "*[count(/response/lst[@name='expanded']/result)=0]"); // Query has results, but expand.q has none... params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!collapse field="+group+hint+" nullPolicy=collapse}"); + params.add("fq", "{!collapse field=" + group + hint + " nullPolicy=collapse}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); params.add("expand.q", "test_i:5532535"); - assertQ(req(params) - ,"*[count(/response/result/doc)=3]" - ,"*[count(/response/lst[@name='expanded']/result)=0]" - ); - assertQ(req(params, - "expand.nullGroup", "true") - ,"*[count(/response/result/doc)=3]" - ,"*[count(/response/lst[@name='expanded']/result)=0]" - ); - - - //Test key-only fl + assertQ( + req(params), + "*[count(/response/result/doc)=3]", + "*[count(/response/lst[@name='expanded']/result)=0]"); + assertQ( + req(params, "expand.nullGroup", "true"), + "*[count(/response/result/doc)=3]", + "*[count(/response/lst[@name='expanded']/result)=0]"); + + // Test key-only fl params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!collapse field="+group+hint+"}"); + params.add("fq", "{!collapse field=" + group + hint + "}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("expand", "true"); - assertQ(req(params, "fl", "id"), + assertQ( + req(params, "fl", "id"), "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='5']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='8']", "count(//*[@name='score'])=0" // score shouldn't be returned when not requested - ); + ); - //Test key-only fl with score but no sorting - assertQ(req(params, "fl", "id,score"), "*[count(/response/result/doc)=2]", + // Test key-only fl with score but no sorting + assertQ( + req(params, "fl", "id,score"), + "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='5']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='8']", - "count(//*[@name='score' and .='NaN'])=0" - ); - + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='8']", + "count(//*[@name='score' and .='NaN'])=0"); // Test with fl and sort=score desc - assertQ(req(params, "expand.sort", "score desc", "fl", "id,score"), + assertQ( + req(params, "expand.sort", "score desc", "fl", "id,score"), "*[count(/response/result/doc)=2]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='5']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='8']", - "count(//*[@name='score' and .='NaN'])=0" - ); - - //Test fl with score, sort by non-score - assertQ(req(params, "expand.sort", "test_l desc", "fl", "id,test_i,score"), + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='8']", + "count(//*[@name='score' and .='NaN'])=0"); + + // Test fl with score, sort by non-score + assertQ( + req(params, "expand.sort", "test_l desc", "fl", "id,test_i,score"), "*[count(/response/result/doc)=2]", "count(/response/lst[@name='expanded']/result)=2", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", // note that the expanded docs are score descending order (score is 1 test_i) - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='5']", "count(//*[@name='score' and .='NaN'])=0", - "count(/response/lst[@name='expanded']/result/doc[number(*/@name='score')!=number(*/@name='test_i')])=0" - ); + "count(/response/lst[@name='expanded']/result/doc[number(*/@name='score')!=number(*/@name='test_i')])=0"); - //Test fl with score with multi-sort - assertQ(req(params, "expand.sort", "test_l desc, score asc", "fl", "id,test_i,score"), + // Test fl with score with multi-sort + assertQ( + req(params, "expand.sort", "test_l desc, score asc", "fl", "id,test_i,score"), "*[count(/response/result/doc)=2]", "count(/response/lst[@name='expanded']/result)=2", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/result/doc[2]/str[@name='id'][.='6']", // note that the expanded docs are score descending order (score is 1 test_i) - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='8']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='7']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='8']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='5']", "count(//*[@name='score' and .='NaN'])=0", - "count(/response/lst[@name='expanded']/result/doc[number(*/@name='score')!=number(*/@name='test_i')])=0" - ); + "count(/response/lst[@name='expanded']/result/doc[number(*/@name='score')!=number(*/@name='test_i')])=0"); // Test for expand with collapse // when matched docs have fewer unique values - params = params("q", "*:*", "sort", "id asc", "fl", "id", "rows", "6", "expand", "true", "expand.sort", "id asc"); - assertQ(req(params, "expand.field", "term_s"), + params = + params( + "q", + "*:*", + "sort", + "id asc", + "fl", + "id", + "rows", + "6", + "expand", + "true", + "expand.sort", + "id asc"); + assertQ( + req(params, "expand.field", "term_s"), "*[count(/response/result/doc)=6]", "/response/lst[@name='expanded']/result[@name='YYYY']/doc[1]/str[@name='id'][.='7']", "/response/lst[@name='expanded']/result[@name='YYYY']/doc[2]/str[@name='id'][.='8']", - "count(//*[@name='score'])=0" - ); - assertQ(req(params, "expand.field", "test_f"), + "count(//*[@name='score'])=0"); + assertQ( + req(params, "expand.field", "test_f"), "*[count(/response/result/doc)=6]", "/response/lst[@name='expanded']/result[@name='200.0']/doc[1]/str[@name='id'][.='8']", "/response/lst[@name='expanded']/result[@name='2000.0']/doc[1]/str[@name='id'][.='7']", - "count(//*[@name='score'])=0" - ); + "count(//*[@name='score'])=0"); // Support expand enabled without previous collapse - assertQ(req("q", "type_s:child", "sort", group+" desc, test_l desc", "defType", "edismax", - "expand", "true", "expand.q", "type_s:parent", "expand.field", group), + assertQ( + req( + "q", + "type_s:child", + "sort", + group + " desc, test_l desc", + "defType", + "edismax", + "expand", + "true", + "expand.q", + "type_s:parent", + "expand.field", + group), "*[count(/response/result/doc)=4]", "*[count(/response/lst[@name='expanded']/result)=2]", "/response/result/doc[1]/str[@name='id'][.='7']", "/response/result/doc[2]/str[@name='id'][.='2']", "/response/result/doc[3]/str[@name='id'][.='8']", "/response/result/doc[4]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='5']" - ); + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='5']"); // With multiple collapse // with different cost - params = params("q", "*:*", "defType", "edismax", "expand", "true", "bf", "field(test_i)", "expand.sort", "id asc"); - params.set("fq", "{!collapse cost=1000 field="+group+"}", "{!collapse cost=2000 field=test_f}"); - assertQ(req(params), + params = + params( + "q", + "*:*", + "defType", + "edismax", + "expand", + "true", + "bf", + "field(test_i)", + "expand.sort", + "id asc"); + params.set( + "fq", "{!collapse cost=1000 field=" + group + "}", "{!collapse cost=2000 field=test_f}"); + assertQ( + req(params), "*[count(/response/result/doc)=1]", "/response/result/doc[1]/str[@name='id'][.='2']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']" - ); + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']"); // with same cost (default cost) - params.set("fq", "{!collapse field="+group+"}", "{!collapse field=test_f}"); - assertQ(req(params), + params.set("fq", "{!collapse field=" + group + "}", "{!collapse field=test_f}"); + assertQ( + req(params), "*[count(/response/result/doc)=1]", "/response/result/doc[1]/str[@name='id'][.='2']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']" - ); + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']"); // with different cost but choose the test_f - params.set("fq", "{!collapse cost=3000 field="+group+"}", "{!collapse cost=2000 field=test_f}"); - assertQ(req(params), + params.set( + "fq", "{!collapse cost=3000 field=" + group + "}", "{!collapse cost=2000 field=test_f}"); + assertQ( + req(params), "*[count(/response/result/doc)=1]", "/response/result/doc[1]/str[@name='id'][.='2']", "/response/lst[@name='expanded']/result[@name='200.0']/doc[1]/str[@name='id'][.='3']", "/response/lst[@name='expanded']/result[@name='200.0']/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='200.0']/doc[3]/str[@name='id'][.='8']" - ); + "/response/lst[@name='expanded']/result[@name='200.0']/doc[3]/str[@name='id'][.='8']"); // with different cost and nullPolicy params.set("bf", "ord(id)"); - params.set("fq", "{!collapse cost=1000 field="+group+" nullPolicy=collapse}", "{!collapse cost=2000 field=test_f}"); - assertQ(req(params), + params.set( + "fq", + "{!collapse cost=1000 field=" + group + " nullPolicy=collapse}", + "{!collapse cost=2000 field=test_f}"); + assertQ( + req(params), "*[count(/response/result/doc)=2]", "/response/result/doc[1]/str[@name='id'][.='8']", "/response/result/doc[2]/str[@name='id'][.='7']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[1]/str[@name='id'][.='5']", - "/response/lst[@name='expanded']/result[@name='0"+floatAppend+"']/doc[2]/str[@name='id'][.='6']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/str[@name='id'][.='1']", - "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/str[@name='id'][.='2']" - ); + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[1]/str[@name='id'][.='5']", + "/response/lst[@name='expanded']/result[@name='0" + + floatAppend + + "']/doc[2]/str[@name='id'][.='6']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[1]/str[@name='id'][.='1']", + "/response/lst[@name='expanded']/result[@name='1" + + floatAppend + + "']/doc[2]/str[@name='id'][.='2']"); } @Test @@ -627,7 +939,7 @@ public void testExpandWithEmptyIndexReturnsZeroResults() { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_s}"); - params.add("expand" ,"true"); + params.add("expand", "true"); params.add("expand.rows", "10"); assertQ(req(params), "*[count(//doc)=0]"); @@ -635,10 +947,36 @@ public void testExpandWithEmptyIndexReturnsZeroResults() { @Test public void testErrorCases() { - String[] doc = {"id","1", "term_s", "YYYY", "text_t", "bleh bleh", "test_i", "5000", "test_l", "100", "test_f", "200"}; + String[] doc = { + "id", + "1", + "term_s", + "YYYY", + "text_t", + "bleh bleh", + "test_i", + "5000", + "test_l", + "100", + "test_f", + "200" + }; assertU(adoc(doc)); assertU(commit()); - String[] doc1 = {"id","2", "term_s", "YYYY", "text_t", "bleh bleh", "test_i", "500", "test_l", "1000", "test_f", "2000"}; + String[] doc1 = { + "id", + "2", + "term_s", + "YYYY", + "text_t", + "bleh bleh", + "test_i", + "500", + "test_l", + "1000", + "test_f", + "2000" + }; assertU(adoc(doc1)); ignoreException("missing expand field"); @@ -647,40 +985,113 @@ public void testErrorCases() { ignoreException("Expand not supported for fieldType:'text'"); // expand with grouping - SolrException e = expectThrows(SolrException.class, () -> { - h.query(req("q", "*:*", "expand", "true", "expand.field", "id", "group", "true", "group.field", "id")); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + "q", + "*:*", + "expand", + "true", + "expand.field", + "id", + "group", + "true", + "group.field", + "id")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertEquals("Can not use expand with Grouping enabled", e.getMessage()); // no expand field - e = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "expand", "true"))); + e = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "expand", "true"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertEquals("missing expand field", e.getMessage()); // query and filter syntax errors - e = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "expand", "true", - "expand.field", "term_s", "expand.q", "{!"))); + e = + expectThrows( + SolrException.class, + () -> + h.query( + req("q", "*:*", "expand", "true", "expand.field", "term_s", "expand.q", "{!"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertTrue(e.getMessage().contains("Expected identifier at pos 2 str='{!'")); - e = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "expand", "true", - "expand.field", "term_s", "expand.q", "*:*", "expand.fq", "{!"))); + e = + expectThrows( + SolrException.class, + () -> + h.query( + req( + "q", + "*:*", + "expand", + "true", + "expand.field", + "term_s", + "expand.q", + "*:*", + "expand.fq", + "{!"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertTrue(e.getMessage().contains("Expected identifier at pos 2 str='{!'")); - e = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "expand", "true", - "expand.field", "term_s", "expand.q", "*:*", "expand.fq", "{!"))); + e = + expectThrows( + SolrException.class, + () -> + h.query( + req( + "q", + "*:*", + "expand", + "true", + "expand.field", + "term_s", + "expand.q", + "*:*", + "expand.fq", + "{!"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertTrue(e.getMessage().contains("Expected identifier at pos 2 str='{!'")); - e = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "expand", "true", - "expand.field", "term_s", "expand.q", "*:*", "expand.sort", "bleh"))); + e = + expectThrows( + SolrException.class, + () -> + h.query( + req( + "q", + "*:*", + "expand", + "true", + "expand.field", + "term_s", + "expand.q", + "*:*", + "expand.sort", + "bleh"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertTrue(e.getMessage().contains("Can't determine a Sort Order (asc or desc) in sort spec 'bleh'")); + assertTrue( + e.getMessage().contains("Can't determine a Sort Order (asc or desc) in sort spec 'bleh'")); - e = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "expand", "true", - "expand.field", "text_t", "expand.q", "*:*"))); + e = + expectThrows( + SolrException.class, + () -> + h.query( + req( + "q", + "*:*", + "expand", + "true", + "expand.field", + "text_t", + "expand.q", + "*:*"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertEquals("Expand not supported for fieldType:'text'", e.getMessage()); @@ -688,8 +1099,8 @@ public void testErrorCases() { } /** - * randomize addition of docs into bunch of segments - * TODO: there ought to be a test utility to do this; even add in batches + * randomize addition of docs into bunch of segments TODO: there ought to be a test utility to do + * this; even add in batches */ private void createIndex(String[][] docs) { Collections.shuffle(Arrays.asList(docs), random()); diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java index 822a09f0a44..cb082e56a00 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestHttpShardHandlerFactory.java @@ -16,6 +16,10 @@ */ package org.apache.solr.handler.component; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.hamcrest.CoreMatchers.is; + import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; @@ -23,7 +27,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.LBSolrClient; import org.apache.solr.client.solrj.request.QueryRequest; @@ -33,28 +36,27 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.hasItem; -import static org.hamcrest.CoreMatchers.is; - -/** - * Tests specifying a custom ShardHandlerFactory - */ +/** Tests specifying a custom ShardHandlerFactory */ public class TestHttpShardHandlerFactory extends SolrTestCaseJ4 { - private static final String LOAD_BALANCER_REQUESTS_MIN_ABSOLUTE = "solr.tests.loadBalancerRequestsMinimumAbsolute"; - private static final String LOAD_BALANCER_REQUESTS_MAX_FRACTION = "solr.tests.loadBalancerRequestsMaximumFraction"; + private static final String LOAD_BALANCER_REQUESTS_MIN_ABSOLUTE = + "solr.tests.loadBalancerRequestsMinimumAbsolute"; + private static final String LOAD_BALANCER_REQUESTS_MAX_FRACTION = + "solr.tests.loadBalancerRequestsMaximumFraction"; - private static int expectedLoadBalancerRequestsMinimumAbsolute = 0; + private static int expectedLoadBalancerRequestsMinimumAbsolute = 0; private static float expectedLoadBalancerRequestsMaximumFraction = 1.0f; @BeforeClass public static void beforeTests() throws Exception { expectedLoadBalancerRequestsMinimumAbsolute = random().nextInt(3); // 0 .. 2 - expectedLoadBalancerRequestsMaximumFraction = (1+random().nextInt(10))/10f; // 0.1 .. 1.0 - System.setProperty(LOAD_BALANCER_REQUESTS_MIN_ABSOLUTE, Integer.toString(expectedLoadBalancerRequestsMinimumAbsolute)); - System.setProperty(LOAD_BALANCER_REQUESTS_MAX_FRACTION, Float.toString(expectedLoadBalancerRequestsMaximumFraction)); - + expectedLoadBalancerRequestsMaximumFraction = (1 + random().nextInt(10)) / 10f; // 0.1 .. 1.0 + System.setProperty( + LOAD_BALANCER_REQUESTS_MIN_ABSOLUTE, + Integer.toString(expectedLoadBalancerRequestsMinimumAbsolute)); + System.setProperty( + LOAD_BALANCER_REQUESTS_MAX_FRACTION, + Float.toString(expectedLoadBalancerRequestsMaximumFraction)); } @AfterClass @@ -68,36 +70,50 @@ public void testLoadBalancerRequestsMinMax() throws Exception { CoreContainer cc = null; ShardHandlerFactory factory = null; try { - cc = CoreContainer.createAndLoad(home, home.resolve("solr-shardhandler-loadBalancerRequests.xml")); + cc = + CoreContainer.createAndLoad( + home, home.resolve("solr-shardhandler-loadBalancerRequests.xml")); factory = cc.getShardHandlerFactory(); // test that factory is HttpShardHandlerFactory with expected url reserve fraction assertTrue(factory instanceof HttpShardHandlerFactory); @SuppressWarnings("resource") - final HttpShardHandlerFactory httpShardHandlerFactory = ((HttpShardHandlerFactory)factory); - assertEquals(expectedLoadBalancerRequestsMinimumAbsolute, httpShardHandlerFactory.permittedLoadBalancerRequestsMinimumAbsolute, 0.0); - assertEquals(expectedLoadBalancerRequestsMaximumFraction, httpShardHandlerFactory.permittedLoadBalancerRequestsMaximumFraction, 0.0); + final HttpShardHandlerFactory httpShardHandlerFactory = ((HttpShardHandlerFactory) factory); + assertEquals( + expectedLoadBalancerRequestsMinimumAbsolute, + httpShardHandlerFactory.permittedLoadBalancerRequestsMinimumAbsolute, + 0.0); + assertEquals( + expectedLoadBalancerRequestsMaximumFraction, + httpShardHandlerFactory.permittedLoadBalancerRequestsMaximumFraction, + 0.0); // create a dummy request and dummy url list final QueryRequest queryRequest = null; final List urls = new ArrayList<>(); - for (int ii=0; ii<10; ++ii) { + for (int ii = 0; ii < 10; ++ii) { urls.add(null); } // create LBHttpSolrClient request - final LBSolrClient.Req req = httpShardHandlerFactory.newLBHttpSolrClientReq(queryRequest, urls); + final LBSolrClient.Req req = + httpShardHandlerFactory.newLBHttpSolrClientReq(queryRequest, urls); // actual vs. expected test final int actualNumServersToTry = req.getNumServersToTry(); - int expectedNumServersToTry = (int)Math.floor(urls.size() * expectedLoadBalancerRequestsMaximumFraction); + int expectedNumServersToTry = + (int) Math.floor(urls.size() * expectedLoadBalancerRequestsMaximumFraction); if (expectedNumServersToTry < expectedLoadBalancerRequestsMinimumAbsolute) { expectedNumServersToTry = expectedLoadBalancerRequestsMinimumAbsolute; } - assertEquals("wrong numServersToTry for" - + " urls.size="+urls.size() - + " expectedLoadBalancerRequestsMinimumAbsolute="+expectedLoadBalancerRequestsMinimumAbsolute - + " expectedLoadBalancerRequestsMaximumFraction="+expectedLoadBalancerRequestsMaximumFraction, + assertEquals( + "wrong numServersToTry for" + + " urls.size=" + + urls.size() + + " expectedLoadBalancerRequestsMinimumAbsolute=" + + expectedLoadBalancerRequestsMinimumAbsolute + + " expectedLoadBalancerRequestsMaximumFraction=" + + expectedLoadBalancerRequestsMaximumFraction, expectedNumServersToTry, actualNumServersToTry); @@ -117,21 +133,20 @@ public void getShardsAllowList() throws Exception { cc = CoreContainer.createAndLoad(home, home.resolve("solr.xml")); factory = cc.getShardHandlerFactory(); assertTrue(factory instanceof HttpShardHandlerFactory); - assertThat(cc.getAllowListUrlChecker().getHostAllowList(), - equalTo(new HashSet<>(Arrays.asList("abc:8983", "def:8984")))); + assertThat( + cc.getAllowListUrlChecker().getHostAllowList(), + equalTo(new HashSet<>(Arrays.asList("abc:8983", "def:8984")))); } finally { if (factory != null) factory.close(); if (cc != null) cc.shutdown(); System.clearProperty(TEST_URL_ALLOW_LIST); } } - + @Test public void testLiveNodesToHostUrl() throws Exception { - Set liveNodes = new HashSet<>(Arrays.asList( - "1.2.3.4:8983_solr", - "1.2.3.4:9000_", - "1.2.3.4:9001_solr-2")); + Set liveNodes = + new HashSet<>(Arrays.asList("1.2.3.4:8983_solr", "1.2.3.4:9000_", "1.2.3.4:9001_solr-2")); ClusterState cs = new ClusterState(liveNodes, new HashMap<>()); Set hostSet = cs.getHostAllowList(); assertThat(hostSet.size(), is(3)); diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestPivotHelperCode.java b/solr/core/src/test/org/apache/solr/handler/component/TestPivotHelperCode.java index 89bb8309774..cc7478cdd80 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestPivotHelperCode.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestPivotHelperCode.java @@ -16,32 +16,28 @@ */ package org.apache.solr.handler.component; - -import org.apache.solr.SolrTestCaseJ4; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.SuppressForbidden; -import java.util.List; -import java.util.Arrays; -import java.util.ArrayList; - -/** - * A light weight test of various helper methods used in pivot faceting - * - **/ -public class TestPivotHelperCode extends SolrTestCaseJ4{ +/** A light weight test of various helper methods used in pivot faceting */ +public class TestPivotHelperCode extends SolrTestCaseJ4 { /** - * test refinement encoding/decoding matches specific expected encoded values + * test refinement encoding/decoding matches specific expected encoded values + * * @see PivotFacetHelper#encodeRefinementValuePath * @see PivotFacetHelper#decodeRefinementValuePath */ public void testRefinementStringEncodingWhiteBox() { // trivial example with some basci escaping of an embedded comma - assertBiDirectionalEncoding(strs("foo,bar","yak","zat"), "~foo\\,bar,~yak,~zat"); + assertBiDirectionalEncoding(strs("foo,bar", "yak", "zat"), "~foo\\,bar,~yak,~zat"); // simple single valued case - assertBiDirectionalEncoding( strs("foo"), "~foo"); + assertBiDirectionalEncoding(strs("foo"), "~foo"); // special case: empty list assertBiDirectionalEncoding(strs(), ""); @@ -50,16 +46,15 @@ public void testRefinementStringEncodingWhiteBox() { assertBiDirectionalEncoding(strs(""), "~"); // special case: single element list containing null - assertBiDirectionalEncoding(strs((String)null), "^"); + assertBiDirectionalEncoding(strs((String) null), "^"); // mix of empty strings & null with other values - assertBiDirectionalEncoding(strs("", "foo", "", "", null, "bar"), - "~,~foo,~,~,^,~bar"); + assertBiDirectionalEncoding(strs("", "foo", "", "", null, "bar"), "~,~foo,~,~,^,~bar"); } /** - * test refinement encoding/decoding of random sets of values can be round tripped, - * w/o worrying about what the actual encoding looks like + * test refinement encoding/decoding of random sets of values can be round tripped, w/o worrying + * about what the actual encoding looks like * * @see PivotFacetHelper#encodeRefinementValuePath * @see PivotFacetHelper#decodeRefinementValuePath @@ -78,7 +73,6 @@ public void testRefinementStringEncodingBlockBoxRoundTrip() { List decoded = PivotFacetHelper.decodeRefinementValuePath(encoded); assertEquals(data, decoded); } - } private void assertBiDirectionalEncoding(List data, String encoded) { @@ -86,7 +80,6 @@ private void assertBiDirectionalEncoding(List data, String encoded) { assertEquals(encoded, PivotFacetHelper.encodeRefinementValuePath(data)); } - @SuppressForbidden(reason = "Checking object equality for Long instance") @SuppressWarnings("BoxedPrimitiveConstructor") public void testCompareWithNullLast() throws Exception { @@ -101,17 +94,14 @@ public void testCompareWithNullLast() throws Exception { assertEquals(0, PivotFacetFieldValueCollection.compareWithNullLast(null, null)); - assertTrue( PivotFacetFieldValueCollection.compareWithNullLast(a, null) < 0 ); - assertTrue( PivotFacetFieldValueCollection.compareWithNullLast(b, null) < 0 ); - - assertTrue( 0 < PivotFacetFieldValueCollection.compareWithNullLast(null, a) ); - assertTrue( 0 < PivotFacetFieldValueCollection.compareWithNullLast(null, b) ); + assertTrue(PivotFacetFieldValueCollection.compareWithNullLast(a, null) < 0); + assertTrue(PivotFacetFieldValueCollection.compareWithNullLast(b, null) < 0); + assertTrue(0 < PivotFacetFieldValueCollection.compareWithNullLast(null, a)); + assertTrue(0 < PivotFacetFieldValueCollection.compareWithNullLast(null, b)); } - private List strs(String... strs) { return Arrays.asList(strs); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java index e72a62cbeda..f531414fa59 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TestTrackingShardHandlerFactory.java @@ -19,7 +19,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrQuery; @@ -31,8 +30,8 @@ import org.junit.Test; /** - * Test for {@link org.apache.solr.handler.component.TrackingShardHandlerFactory} - * See SOLR-7147 for more information + * Test for {@link org.apache.solr.handler.component.TrackingShardHandlerFactory} See SOLR-7147 for + * more information */ @SolrTestCaseJ4.SuppressSSL public class TestTrackingShardHandlerFactory extends AbstractFullDistribZkTestBase { @@ -54,7 +53,8 @@ public void testRequestTracking() throws Exception { List runners = new ArrayList<>(jettys); runners.add(controlJetty); - TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue = new TrackingShardHandlerFactory.RequestTrackingQueue(); + TrackingShardHandlerFactory.RequestTrackingQueue trackingQueue = + new TrackingShardHandlerFactory.RequestTrackingQueue(); TrackingShardHandlerFactory.setTrackingQueue(runners, trackingQueue); for (JettySolrRunner runner : runners) { @@ -62,7 +62,8 @@ public void testRequestTracking() throws Exception { ShardHandlerFactory factory = container.getShardHandlerFactory(); assert factory instanceof TrackingShardHandlerFactory; @SuppressWarnings("resource") - TrackingShardHandlerFactory trackingShardHandlerFactory = (TrackingShardHandlerFactory) factory; + TrackingShardHandlerFactory trackingShardHandlerFactory = + (TrackingShardHandlerFactory) factory; assertSame(trackingQueue, trackingShardHandlerFactory.getTrackingQueue()); } @@ -70,17 +71,19 @@ public void testRequestTracking() throws Exception { waitForRecoveriesToFinish(collectionName, true); - List coreAdminRequests = trackingQueue.getCoreAdminRequests(); + List coreAdminRequests = + trackingQueue.getCoreAdminRequests(); assertNotNull(coreAdminRequests); - assertEquals("Unexpected number of core admin requests were found", 2, coreAdminRequests.size()); + assertEquals( + "Unexpected number of core admin requests were found", 2, coreAdminRequests.size()); CloudSolrClient client = cloudClient; client.setDefaultCollection(collectionName); - /* - hash of b is 95de7e03 high bits=2 shard=shard1 - hash of e is 656c4367 high bits=1 shard=shard2 - */ + /* + hash of b is 95de7e03 high bits=2 shard=shard1 + hash of e is 656c4367 high bits=1 shard=shard2 + */ for (int i = 0; i < 10; i++) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", (i % 2 == 0 ? "b!" : "e!") + i); @@ -92,19 +95,29 @@ public void testRequestTracking() throws Exception { client.query(new SolrQuery("*:*")); - TrackingShardHandlerFactory.ShardRequestAndParams getTopIdsRequest = trackingQueue.getShardRequestByPurpose(client.getZkStateReader(), collectionName, "shard1", ShardRequest.PURPOSE_GET_TOP_IDS); + TrackingShardHandlerFactory.ShardRequestAndParams getTopIdsRequest = + trackingQueue.getShardRequestByPurpose( + client.getZkStateReader(), collectionName, "shard1", ShardRequest.PURPOSE_GET_TOP_IDS); assertNotNull(getTopIdsRequest); - getTopIdsRequest = trackingQueue.getShardRequestByPurpose(client.getZkStateReader(), collectionName, "shard2", ShardRequest.PURPOSE_GET_TOP_IDS); + getTopIdsRequest = + trackingQueue.getShardRequestByPurpose( + client.getZkStateReader(), collectionName, "shard2", ShardRequest.PURPOSE_GET_TOP_IDS); assertNotNull(getTopIdsRequest); - TrackingShardHandlerFactory.ShardRequestAndParams getFieldsRequest = trackingQueue.getShardRequestByPurpose(client.getZkStateReader(), collectionName, "shard1", ShardRequest.PURPOSE_GET_FIELDS); + TrackingShardHandlerFactory.ShardRequestAndParams getFieldsRequest = + trackingQueue.getShardRequestByPurpose( + client.getZkStateReader(), collectionName, "shard1", ShardRequest.PURPOSE_GET_FIELDS); assertNotNull(getFieldsRequest); - getFieldsRequest = trackingQueue.getShardRequestByPurpose(client.getZkStateReader(), collectionName, "shard2", ShardRequest.PURPOSE_GET_FIELDS); + getFieldsRequest = + trackingQueue.getShardRequestByPurpose( + client.getZkStateReader(), collectionName, "shard2", ShardRequest.PURPOSE_GET_FIELDS); assertNotNull(getFieldsRequest); int numRequests = 0; - Map> allRequests = trackingQueue.getAllRequests(); - for (Map.Entry> entry : allRequests.entrySet()) { + Map> allRequests = + trackingQueue.getAllRequests(); + for (Map.Entry> entry : + allRequests.entrySet()) { numRequests += entry.getValue().size(); } // 4 shard requests + 2 core admin requests (invoked by create collection API) @@ -118,7 +131,8 @@ public void testRequestTracking() throws Exception { ShardHandlerFactory factory = container.getShardHandlerFactory(); assert factory instanceof TrackingShardHandlerFactory; @SuppressWarnings("resource") - TrackingShardHandlerFactory trackingShardHandlerFactory = (TrackingShardHandlerFactory) factory; + TrackingShardHandlerFactory trackingShardHandlerFactory = + (TrackingShardHandlerFactory) factory; assertFalse(trackingShardHandlerFactory.isTracking()); } @@ -126,7 +140,8 @@ public void testRequestTracking() throws Exception { client.query(new SolrQuery("*:*")); numRequests = 0; allRequests = trackingQueue.getAllRequests(); - for (Map.Entry> entry : allRequests.entrySet()) { + for (Map.Entry> entry : + allRequests.entrySet()) { numRequests += entry.getValue().size(); } // should still be 6 diff --git a/solr/core/src/test/org/apache/solr/handler/component/UpdateLogCloudTest.java b/solr/core/src/test/org/apache/solr/handler/component/UpdateLogCloudTest.java index d0773f27225..5a8893f5ed5 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/UpdateLogCloudTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/UpdateLogCloudTest.java @@ -20,7 +20,6 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -47,35 +46,34 @@ public class UpdateLogCloudTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { // choose a directory factory expectVersionsAfterRestart = random().nextBoolean(); - System.setProperty("solr.directoryFactory", + System.setProperty( + "solr.directoryFactory", (expectVersionsAfterRestart ? StandardDirectoryFactory.class.getCanonicalName() - : MockDirectoryFactory.class.getCanonicalName())); + : MockDirectoryFactory.class.getCanonicalName())); // create and configure cluster - configureCluster(NUM_SHARDS*NUM_REPLICAS /* nodeCount */) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); + configureCluster(NUM_SHARDS * NUM_REPLICAS /* nodeCount */) + .addConfig("conf", configset("cloud-dynamic")) + .configure(); } @Before public void beforeTest() throws Exception { // decide collection name ... - COLLECTION = "collection"+(1+random().nextInt(100)) ; + COLLECTION = "collection" + (1 + random().nextInt(100)); // create an empty collection - CollectionAdminRequest - .createCollection(COLLECTION, "conf", NUM_SHARDS, NUM_REPLICAS) - .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); + CollectionAdminRequest.createCollection(COLLECTION, "conf", NUM_SHARDS, NUM_REPLICAS) + .processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); } @After public void afterTest() throws Exception { - CollectionAdminRequest - .deleteCollection(COLLECTION) - .process(cluster.getSolrClient()); + CollectionAdminRequest.deleteCollection(COLLECTION).process(cluster.getSolrClient()); } @Test @@ -85,51 +83,54 @@ public void test() throws Exception { final List solrClients = new ArrayList<>(); for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) { - if (!jettySolrRunner.getBaseUrl().toString().equals( - getCollectionState(COLLECTION).getLeader("shard1").getBaseUrl())) { + if (!jettySolrRunner + .getBaseUrl() + .toString() + .equals(getCollectionState(COLLECTION).getLeader("shard1").getBaseUrl())) { specialIdx = solrClients.size(); } solrClients.add(jettySolrRunner.newClient()); } new UpdateRequest() - .add(sdoc("id", "0", "a_t", "zero")) - .commit(cluster.getSolrClient(), COLLECTION); + .add(sdoc("id", "0", "a_t", "zero")) + .commit(cluster.getSolrClient(), COLLECTION); for (SolrClient solrClient : solrClients) { implTest(solrClient, 1); } cluster.getJettySolrRunner(specialIdx).stop(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); new UpdateRequest() - .add(sdoc("id", "1", "a_t", "one")) - .deleteById("2") - .deleteByQuery("a_t:three") - .commit(cluster.getSolrClient(), COLLECTION); + .add(sdoc("id", "1", "a_t", "one")) + .deleteById("2") + .deleteByQuery("a_t:three") + .commit(cluster.getSolrClient(), COLLECTION); cluster.getJettySolrRunner(specialIdx).start(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, DEFAULT_TIMEOUT); int idx = 0; for (SolrClient solrClient : solrClients) { - implTest(solrClient, idx==specialIdx ? (expectVersionsAfterRestart ? 4 : 0) : 4); + implTest(solrClient, idx == specialIdx ? (expectVersionsAfterRestart ? 4 : 0) : 4); ++idx; } for (SolrClient solrClient : solrClients) { solrClient.close(); } - } @SuppressWarnings("unchecked") private void implTest(SolrClient solrClient, int numExpected) throws Exception { - final QueryRequest reqV = new QueryRequest(params("qt","/get", "getVersions","12345")); + final QueryRequest reqV = new QueryRequest(params("qt", "/get", "getVersions", "12345")); final NamedList rspV = solrClient.request(reqV, COLLECTION); - final List versions = (List)rspV.get("versions"); + final List versions = (List) rspV.get("versions"); assertEquals(versions.toString(), numExpected, versions.size()); if (numExpected == 0) { return; @@ -143,13 +144,19 @@ private void implTest(SolrClient solrClient, int numExpected) throws Exception { final Long minVersion = absVersions.getFirst(); final Long maxVersion = absVersions.getLast(); - for (boolean skipDbq : new boolean[] { false, true }) { - final QueryRequest reqU = new QueryRequest(params("qt","/get", "getUpdates", minVersion + "..."+maxVersion, "skipDbq", Boolean.toString(skipDbq))); + for (boolean skipDbq : new boolean[] {false, true}) { + final QueryRequest reqU = + new QueryRequest( + params( + "qt", + "/get", + "getUpdates", + minVersion + "..." + maxVersion, + "skipDbq", + Boolean.toString(skipDbq))); final NamedList rspU = solrClient.request(reqU, COLLECTION); - final List updatesList = (List)rspU.get("updates"); + final List updatesList = (List) rspU.get("updates"); assertEquals(updatesList.toString(), numExpected, updatesList.size()); } - } - } diff --git a/solr/core/src/test/org/apache/solr/handler/designer/ManagedSchemaDiffTest.java b/solr/core/src/test/org/apache/solr/handler/designer/ManagedSchemaDiffTest.java index df3fcddeb31..567622cfe20 100644 --- a/solr/core/src/test/org/apache/solr/handler/designer/ManagedSchemaDiffTest.java +++ b/solr/core/src/test/org/apache/solr/handler/designer/ManagedSchemaDiffTest.java @@ -17,13 +17,15 @@ package org.apache.solr.handler.designer; +import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; +import static org.apache.solr.handler.designer.ManagedSchemaDiff.mapFieldsToPropertyValues; + import java.io.File; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; - import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.schema.BoolField; @@ -34,15 +36,14 @@ import org.junit.Assert; import org.junit.BeforeClass; -import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; -import static org.apache.solr.handler.designer.ManagedSchemaDiff.mapFieldsToPropertyValues; - public class ManagedSchemaDiffTest extends SolrCloudTestCase { @BeforeClass public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); - configureCluster(1).addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()).configure(); + configureCluster(1) + .addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()) + .configure(); } public void testFieldDiff() { @@ -58,7 +59,9 @@ public void testFieldDiff() { schema2FieldMap.put("strfield", schema.newField("strfield", "strings", Collections.emptyMap())); schema2FieldMap.put("intfield", new SchemaField("intfield", new IntPointField())); - Map diff = ManagedSchemaDiff.diff(mapFieldsToPropertyValues(schema1FieldMap), mapFieldsToPropertyValues(schema2FieldMap)); + Map diff = + ManagedSchemaDiff.diff( + mapFieldsToPropertyValues(schema1FieldMap), mapFieldsToPropertyValues(schema2FieldMap)); Assert.assertTrue(diff.containsKey("updated")); Assert.assertTrue(diff.containsKey("added")); Assert.assertTrue(diff.containsKey("removed")); @@ -67,19 +70,23 @@ public void testFieldDiff() { Assert.assertEquals(1, changedFields.size()); Assert.assertTrue(changedFields.containsKey("strfield")); Assert.assertEquals( - Arrays.asList(Map.of("type", "string", "multiValued", false), + Arrays.asList( + Map.of("type", "string", "multiValued", false), Map.of("type", "strings", "multiValued", true)), changedFields.get("strfield")); Map addedFields = getInnerMap(diff, "added"); Assert.assertEquals(1, addedFields.size()); Assert.assertTrue(addedFields.containsKey("intfield")); - Assert.assertEquals(schema2FieldMap.get("intfield").getNamedPropertyValues(true), addedFields.get("intfield")); + Assert.assertEquals( + schema2FieldMap.get("intfield").getNamedPropertyValues(true), addedFields.get("intfield")); Map removedFields = getInnerMap(diff, "removed"); Assert.assertEquals(1, removedFields.size()); Assert.assertTrue(removedFields.containsKey("boolfield")); - Assert.assertEquals(schema1FieldMap.get("boolfield").getNamedPropertyValues(true), removedFields.get("boolfield")); + Assert.assertEquals( + schema1FieldMap.get("boolfield").getNamedPropertyValues(true), + removedFields.get("boolfield")); } public void testSimpleOrderedMapListDiff() { diff --git a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java index 552a5dbc038..26258d52ac0 100644 --- a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java +++ b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java @@ -17,6 +17,14 @@ package org.apache.solr.handler.designer; +import static org.apache.solr.common.params.CommonParams.JSON_MIME; +import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; +import static org.apache.solr.handler.designer.SchemaDesignerAPI.getMutableId; +import static org.apache.solr.response.RawResponseWriter.CONTENT; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.google.common.collect.ImmutableMap; import java.io.File; import java.util.Arrays; import java.util.Collection; @@ -25,8 +33,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; - -import com.google.common.collect.ImmutableMap; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.QueryResponse; @@ -54,13 +60,6 @@ import org.junit.Test; import org.noggit.JSONUtil; -import static org.apache.solr.common.params.CommonParams.JSON_MIME; -import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; -import static org.apache.solr.handler.designer.SchemaDesignerAPI.getMutableId; -import static org.apache.solr.response.RawResponseWriter.CONTENT; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - public class TestSchemaDesignerAPI extends SolrCloudTestCase implements SchemaDesignerConstants { private CoreContainer cc; @@ -69,7 +68,9 @@ public class TestSchemaDesignerAPI extends SolrCloudTestCase implements SchemaDe @BeforeClass public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); - configureCluster(1).addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()).configure(); + configureCluster(1) + .addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()) + .configure(); // SchemaDesignerAPI depends on the blob store CollectionAdminRequest.createCollection(BLOB_STORE_ID, 1, 1).process(cluster.getSolrClient()); cluster.waitForActiveCollection(BLOB_STORE_ID, 1, 1); @@ -134,7 +135,13 @@ public void testTSV() throws Exception { public void testAddTechproductsProgressively() throws Exception { File docsDir = new File(ExternalPaths.SOURCE_HOME, "example/exampledocs"); assertTrue(docsDir.getAbsolutePath() + " not found!", docsDir.isDirectory()); - File[] toAdd = docsDir.listFiles((dir, name) -> name.endsWith(".xml") || name.endsWith(".json") || name.endsWith(".csv") || name.endsWith(".jsonl")); + File[] toAdd = + docsDir.listFiles( + (dir, name) -> + name.endsWith(".xml") + || name.endsWith(".json") + || name.endsWith(".csv") + || name.endsWith(".jsonl")); assertNotNull("No test data files found in " + docsDir.getAbsolutePath(), toAdd); String configSet = "techproducts"; @@ -148,11 +155,12 @@ public void testAddTechproductsProgressively() throws Exception { when(req.getParams()).thenReturn(reqParams); schemaDesignerAPI.getInfo(req, rsp); // response should just be the default values - Map expSettings = Map.of( - ENABLE_DYNAMIC_FIELDS_PARAM, true, - ENABLE_FIELD_GUESSING_PARAM, true, - ENABLE_NESTED_DOCS_PARAM, false, - LANGUAGES_PARAM, Collections.emptyList()); + Map expSettings = + Map.of( + ENABLE_DYNAMIC_FIELDS_PARAM, true, + ENABLE_FIELD_GUESSING_PARAM, true, + ENABLE_NESTED_DOCS_PARAM, false, + LANGUAGES_PARAM, Collections.emptyList()); assertDesignerSettings(expSettings, rsp.getValues()); SolrParams rspData = rsp.getValues().toSolrParams(); int schemaVersion = rspData.getInt(SCHEMA_VERSION_PARAM); @@ -210,12 +218,13 @@ public void testAddTechproductsProgressively() throws Exception { // GET /schema-designer/info schemaDesignerAPI.getInfo(req, rsp); - expSettings = Map.of( - ENABLE_DYNAMIC_FIELDS_PARAM, false, - ENABLE_FIELD_GUESSING_PARAM, true, - ENABLE_NESTED_DOCS_PARAM, false, - LANGUAGES_PARAM, Collections.singletonList("en"), - COPY_FROM_PARAM, "_default"); + expSettings = + Map.of( + ENABLE_DYNAMIC_FIELDS_PARAM, false, + ENABLE_FIELD_GUESSING_PARAM, true, + ENABLE_NESTED_DOCS_PARAM, false, + LANGUAGES_PARAM, Collections.singletonList("en"), + COPY_FROM_PARAM, "_default"); assertDesignerSettings(expSettings, rsp.getValues()); // query to see how the schema decisions impact retrieval / ranking @@ -258,7 +267,8 @@ public void testAddTechproductsProgressively() throws Exception { assertNotNull(collections); assertTrue(collections.contains(collection)); - // now try to create another temp, which should fail since designer is disabled for this configSet now + // now try to create another temp, which should fail since designer is disabled for this + // configSet now reqParams.clear(); reqParams.set(CONFIG_SET_PARAM, configSet); rsp = new SolrQueryResponse(); @@ -328,12 +338,13 @@ public void testBasicUserWorkflow() throws Exception { assertNotNull(rsp.getValues().get(CONFIG_SET_PARAM)); assertNotNull(rsp.getValues().get(SCHEMA_VERSION_PARAM)); - Map expSettings = Map.of( - ENABLE_DYNAMIC_FIELDS_PARAM, true, - ENABLE_FIELD_GUESSING_PARAM, true, - ENABLE_NESTED_DOCS_PARAM, false, - LANGUAGES_PARAM, Collections.emptyList(), - COPY_FROM_PARAM, "_default"); + Map expSettings = + Map.of( + ENABLE_DYNAMIC_FIELDS_PARAM, true, + ENABLE_FIELD_GUESSING_PARAM, true, + ENABLE_NESTED_DOCS_PARAM, false, + LANGUAGES_PARAM, Collections.emptyList(), + COPY_FROM_PARAM, "_default"); assertDesignerSettings(expSettings, rsp.getValues()); // Analyze some sample documents to refine the schema @@ -397,8 +408,10 @@ public void testBasicUserWorkflow() throws Exception { req = mock(SolrQueryRequest.class); when(req.getParams()).thenReturn(reqParams); - when(req.getContentStreams()).thenReturn( - Collections.singletonList(new ContentStreamBase.StringStream(solrconfigXml, "application/xml"))); + when(req.getContentStreams()) + .thenReturn( + Collections.singletonList( + new ContentStreamBase.StringStream(solrconfigXml, "application/xml"))); schemaDesignerAPI.updateFileContents(req, rsp); rspData = rsp.getValues().toSolrParams(); @@ -413,8 +426,10 @@ public void testBasicUserWorkflow() throws Exception { req = mock(SolrQueryRequest.class); when(req.getParams()).thenReturn(reqParams); - when(req.getContentStreams()).thenReturn( - Collections.singletonList(new ContentStreamBase.StringStream("", "application/xml"))); + when(req.getContentStreams()) + .thenReturn( + Collections.singletonList( + new ContentStreamBase.StringStream("", "application/xml"))); // this should fail b/c the updated solrconfig.xml is invalid schemaDesignerAPI.updateFileContents(req, rsp); @@ -434,12 +449,13 @@ public void testBasicUserWorkflow() throws Exception { when(req.getParams()).thenReturn(reqParams); schemaDesignerAPI.analyze(req, rsp); - expSettings = Map.of( - ENABLE_DYNAMIC_FIELDS_PARAM, false, - ENABLE_FIELD_GUESSING_PARAM, false, - ENABLE_NESTED_DOCS_PARAM, false, - LANGUAGES_PARAM, Collections.singletonList("en"), - COPY_FROM_PARAM, "_default"); + expSettings = + Map.of( + ENABLE_DYNAMIC_FIELDS_PARAM, false, + ENABLE_FIELD_GUESSING_PARAM, false, + ENABLE_NESTED_DOCS_PARAM, false, + LANGUAGES_PARAM, Collections.singletonList("en"), + COPY_FROM_PARAM, "_default"); assertDesignerSettings(expSettings, rsp.getValues()); List filesInResp = (List) rsp.getValues().get("files"); @@ -463,12 +479,13 @@ public void testBasicUserWorkflow() throws Exception { when(req.getParams()).thenReturn(reqParams); schemaDesignerAPI.analyze(req, rsp); - expSettings = Map.of( - ENABLE_DYNAMIC_FIELDS_PARAM, true, - ENABLE_FIELD_GUESSING_PARAM, false, - ENABLE_NESTED_DOCS_PARAM, false, - LANGUAGES_PARAM, Arrays.asList("en", "fr"), - COPY_FROM_PARAM, "_default"); + expSettings = + Map.of( + ENABLE_DYNAMIC_FIELDS_PARAM, true, + ENABLE_FIELD_GUESSING_PARAM, false, + ENABLE_NESTED_DOCS_PARAM, false, + LANGUAGES_PARAM, Arrays.asList("en", "fr"), + COPY_FROM_PARAM, "_default"); assertDesignerSettings(expSettings, rsp.getValues()); filesInResp = (List) rsp.getValues().get("files"); @@ -489,12 +506,13 @@ public void testBasicUserWorkflow() throws Exception { when(req.getParams()).thenReturn(reqParams); schemaDesignerAPI.analyze(req, rsp); - expSettings = Map.of( - ENABLE_DYNAMIC_FIELDS_PARAM, false, - ENABLE_FIELD_GUESSING_PARAM, false, - ENABLE_NESTED_DOCS_PARAM, false, - LANGUAGES_PARAM, Collections.emptyList(), - COPY_FROM_PARAM, "_default"); + expSettings = + Map.of( + ENABLE_DYNAMIC_FIELDS_PARAM, false, + ENABLE_FIELD_GUESSING_PARAM, false, + ENABLE_NESTED_DOCS_PARAM, false, + LANGUAGES_PARAM, Collections.emptyList(), + COPY_FROM_PARAM, "_default"); assertDesignerSettings(expSettings, rsp.getValues()); filesInResp = (List) rsp.getValues().get("files"); @@ -555,7 +573,8 @@ public void testBasicUserWorkflow() throws Exception { req = mock(SolrQueryRequest.class); when(req.getParams()).thenReturn(reqParams); - // switch a single-valued field to a multi-valued field, which triggers a full rebuild of the "temp" collection + // switch a single-valued field to a multi-valued field, which triggers a full rebuild of the + // "temp" collection stream = new ContentStreamBase.FileStream(getFile("schema-designer/update-author-field.json")); stream.setContentType(JSON_MIME); when(req.getContentStreams()).thenReturn(Collections.singletonList(stream)); @@ -586,9 +605,13 @@ public void testBasicUserWorkflow() throws Exception { rspData = rsp.getValues().toSolrParams(); schemaVersion = rspData.getInt(SCHEMA_VERSION_PARAM); assertNotNull(rsp.getValues().get("fieldTypes")); - List> fieldTypes = (List>) rsp.getValues().get("fieldTypes"); - Optional> expected = fieldTypes.stream().filter(m -> expectedTypeName.equals(m.get("name"))).findFirst(); - assertTrue("New field type '" + expectedTypeName + "' not found in add type response!", expected.isPresent()); + List> fieldTypes = + (List>) rsp.getValues().get("fieldTypes"); + Optional> expected = + fieldTypes.stream().filter(m -> expectedTypeName.equals(m.get("name"))).findFirst(); + assertTrue( + "New field type '" + expectedTypeName + "' not found in add type response!", + expected.isPresent()); reqParams.clear(); reqParams.set(SCHEMA_VERSION_PARAM, String.valueOf(schemaVersion)); @@ -691,7 +714,8 @@ public void testFieldUpdates() throws Exception { reqParams.set(CONFIG_SET_PARAM, configSet); req = mock(SolrQueryRequest.class); when(req.getParams()).thenReturn(reqParams); - ContentStreamBase.FileStream stream = new ContentStreamBase.FileStream(getFile("schema-designer/add-new-field.json")); + ContentStreamBase.FileStream stream = + new ContentStreamBase.FileStream(getFile("schema-designer/add-new-field.json")); stream.setContentType(JSON_MIME); when(req.getContentStreams()).thenReturn(Collections.singletonList(stream)); rsp = new SolrQueryResponse(); @@ -703,7 +727,8 @@ public void testFieldUpdates() throws Exception { final String fieldName = "keywords"; Optional> maybeField = - ((List>) rsp.getValues().get("fields")).stream().filter(m -> fieldName.equals(m.get("name"))).findFirst(); + ((List>) rsp.getValues().get("fields")) + .stream().filter(m -> fieldName.equals(m.get("name"))).findFirst(); assertTrue(maybeField.isPresent()); SimpleOrderedMap field = maybeField.get(); assertEquals(Boolean.FALSE, field.get("indexed")); @@ -720,21 +745,34 @@ public void testFieldUpdates() throws Exception { ManagedIndexSchema schema = schemaDesignerAPI.loadLatestSchema(mutableId); // make it required - Map updateField = Map.of("name", fieldName, "type", field.get("type"), "required", true); + Map updateField = + Map.of("name", fieldName, "type", field.get("type"), "required", true); configSetHelper.updateField(configSet, updateField, schema); schema = schemaDesignerAPI.loadLatestSchema(mutableId); SchemaField schemaField = schema.getField(fieldName); assertTrue(schemaField.isRequired()); - updateField = Map.of("name", fieldName, "type", field.get("type"), "required", false, "stored", false); + updateField = + Map.of("name", fieldName, "type", field.get("type"), "required", false, "stored", false); configSetHelper.updateField(configSet, updateField, schema); schema = schemaDesignerAPI.loadLatestSchema(mutableId); schemaField = schema.getField(fieldName); assertFalse(schemaField.isRequired()); assertFalse(schemaField.stored()); - updateField = Map.of("name", fieldName, "type", field.get("type"), "required", false, "stored", false, "multiValued", true); + updateField = + Map.of( + "name", + fieldName, + "type", + field.get("type"), + "required", + false, + "stored", + false, + "multiValued", + true); configSetHelper.updateField(configSet, updateField, schema); schema = schemaDesignerAPI.loadLatestSchema(mutableId); schemaField = schema.getField(fieldName); @@ -794,20 +832,25 @@ public void testSchemaDiffEndpoint() throws Exception { schemaDesignerAPI.analyze(req, rsp); // Update id field to not use docValues - List> fields = (List>) rsp.getValues().get("fields"); - SimpleOrderedMap idFieldMap = fields.stream().filter(field -> field.get("name").equals("id")).findFirst().get(); + List> fields = + (List>) rsp.getValues().get("fields"); + SimpleOrderedMap idFieldMap = + fields.stream().filter(field -> field.get("name").equals("id")).findFirst().get(); idFieldMap.remove("copyDest"); // Don't include copyDest as it is not a property of SchemaField SimpleOrderedMap idFieldMapUpdated = idFieldMap.clone(); idFieldMapUpdated.setVal(idFieldMapUpdated.indexOf("docValues", 0), Boolean.FALSE); idFieldMapUpdated.setVal(idFieldMapUpdated.indexOf("useDocValuesAsStored", 0), Boolean.FALSE); - idFieldMapUpdated.setVal(idFieldMapUpdated.indexOf("omitTermFreqAndPositions", 0), Boolean.FALSE); + idFieldMapUpdated.setVal( + idFieldMapUpdated.indexOf("omitTermFreqAndPositions", 0), Boolean.FALSE); SolrParams solrParams = idFieldMapUpdated.toSolrParams(); Map mapParams = solrParams.toMap(new HashMap<>()); mapParams.put("termVectors", Boolean.FALSE); - reqParams.set(SCHEMA_VERSION_PARAM, rsp.getValues().toSolrParams().getInt(SCHEMA_VERSION_PARAM)); + reqParams.set( + SCHEMA_VERSION_PARAM, rsp.getValues().toSolrParams().getInt(SCHEMA_VERSION_PARAM)); - ContentStreamBase.StringStream stringStream = new ContentStreamBase.StringStream(JSONUtil.toJSON(mapParams), JSON_MIME); + ContentStreamBase.StringStream stringStream = + new ContentStreamBase.StringStream(JSONUtil.toJSON(mapParams), JSON_MIME); when(req.getContentStreams()).thenReturn(Collections.singletonList(stringStream)); rsp = new SolrQueryResponse(); @@ -816,7 +859,8 @@ public void testSchemaDiffEndpoint() throws Exception { // Add a new field Integer schemaVersion = rsp.getValues().toSolrParams().getInt(SCHEMA_VERSION_PARAM); reqParams.set(SCHEMA_VERSION_PARAM, schemaVersion); - ContentStreamBase.FileStream fileStream = new ContentStreamBase.FileStream(getFile("schema-designer/add-new-field.json")); + ContentStreamBase.FileStream fileStream = + new ContentStreamBase.FileStream(getFile("schema-designer/add-new-field.json")); fileStream.setContentType(JSON_MIME); when(req.getContentStreams()).thenReturn(Collections.singletonList(fileStream)); rsp = new SolrQueryResponse(); @@ -848,8 +892,15 @@ public void testSchemaDiffEndpoint() throws Exception { Map mapDiff = (Map) fieldsDiff.get("updated"); assertEquals( Arrays.asList( - ImmutableMap.of("omitTermFreqAndPositions", true, "useDocValuesAsStored", true, "docValues", true), - ImmutableMap.of("omitTermFreqAndPositions", false, "useDocValuesAsStored", false, "docValues", false)), + ImmutableMap.of( + "omitTermFreqAndPositions", true, "useDocValuesAsStored", true, "docValues", true), + ImmutableMap.of( + "omitTermFreqAndPositions", + false, + "useDocValuesAsStored", + false, + "docValues", + false)), mapDiff.get("id")); assertNotNull(fieldsDiff.get("added")); Map fieldsAdded = (Map) fieldsDiff.get("added"); @@ -866,7 +917,10 @@ public void testSchemaDiffEndpoint() throws Exception { protected void assertDesignerSettings(Map expected, NamedList actual) { for (String expKey : expected.keySet()) { Object expValue = expected.get(expKey); - assertEquals("Value for designer setting '" + expKey + "' not match expected!", expValue, actual.get(expKey)); + assertEquals( + "Value for designer setting '" + expKey + "' not match expected!", + expValue, + actual.get(expKey)); } } } diff --git a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java index cb96303a538..979ecd9ef0b 100644 --- a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java +++ b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerConfigSetHelper.java @@ -17,6 +17,12 @@ package org.apache.solr.handler.designer; +import static org.apache.solr.common.util.Utils.toJavabin; +import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; +import static org.apache.solr.handler.designer.SchemaDesignerAPI.getMutableId; +import static org.apache.solr.schema.IndexSchema.NEST_PATH_FIELD_NAME; +import static org.apache.solr.schema.IndexSchema.ROOT_FIELD_NAME; + import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; @@ -26,7 +32,6 @@ import java.util.Map; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; - import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrInputDocument; @@ -42,13 +47,8 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.common.util.Utils.toJavabin; -import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; -import static org.apache.solr.handler.designer.SchemaDesignerAPI.getMutableId; -import static org.apache.solr.schema.IndexSchema.NEST_PATH_FIELD_NAME; -import static org.apache.solr.schema.IndexSchema.ROOT_FIELD_NAME; - -public class TestSchemaDesignerConfigSetHelper extends SolrCloudTestCase implements SchemaDesignerConstants { +public class TestSchemaDesignerConfigSetHelper extends SolrCloudTestCase + implements SchemaDesignerConstants { private CoreContainer cc; private SchemaDesignerConfigSetHelper helper; @@ -56,7 +56,9 @@ public class TestSchemaDesignerConfigSetHelper extends SolrCloudTestCase impleme @BeforeClass public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); - configureCluster(1).addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()).configure(); + configureCluster(1) + .addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()) + .configure(); // SchemaDesignerConfigSetHelper depends on the blob store CollectionAdminRequest.createCollection(BLOB_STORE_ID, 1, 1).process(cluster.getSolrClient()); cluster.waitForActiveCollection(BLOB_STORE_ID, 1, 1); @@ -109,7 +111,9 @@ public void testSetupMutable() throws Exception { helper.checkSchemaVersion(mutableId, version, -1); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Collections.emptyList(), true, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Collections.emptyList(), true, DEFAULT_CONFIGSET_NAME); assertEquals(2, schema.getSchemaZkVersion()); byte[] zipped = helper.downloadAndZipConfigSet(mutableId); @@ -147,25 +151,37 @@ public void testEnableDisableOptions() throws Exception { ManagedIndexSchema schema = helper.loadLatestSchema(helper.loadSolrConfig(mutableId)); assertEquals(schema.getSchemaZkVersion(), helper.getCurrentSchemaVersion(mutableId)); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Collections.singletonList("en"), true, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Collections.singletonList("en"), true, DEFAULT_CONFIGSET_NAME); assertNotNull(schema.getFieldTypeByName("text_en")); assertNotNull(schema.getFieldOrNull("*_txt_en")); assertNull(schema.getFieldTypeByName("text_fr")); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Collections.singletonList("en"), false, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Collections.singletonList("en"), false, DEFAULT_CONFIGSET_NAME); assertNotNull(schema.getFieldTypeByName("text_en")); assertNull(schema.getFieldOrNull("*_txt_en")); assertNull(schema.getFieldTypeByName("text_fr")); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Arrays.asList("en", "fr"), false, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Arrays.asList("en", "fr"), false, DEFAULT_CONFIGSET_NAME); assertNotNull(schema.getFieldTypeByName("text_en")); assertNull(schema.getFieldOrNull("*_txt_en")); assertNotNull(schema.getFieldTypeByName("text_fr")); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Arrays.asList("en", "fr"), true, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Arrays.asList("en", "fr"), true, DEFAULT_CONFIGSET_NAME); assertNotNull(schema.getFieldTypeByName("text_en")); assertNotNull(schema.getFieldOrNull("*_txt_en")); - assertTrue(cluster.getZkClient().exists(SchemaDesignerAPI.getConfigSetZkPath(mutableId, "lang/stopwords_en.txt"), true)); + assertTrue( + cluster + .getZkClient() + .exists( + SchemaDesignerAPI.getConfigSetZkPath(mutableId, "lang/stopwords_en.txt"), true)); assertNotNull(schema.getFieldTypeByName("text_fr")); assertNotNull(schema.getFieldOrNull("*_txt_fr")); assertNull(schema.getFieldOrNull("*_txt_ga")); @@ -173,21 +189,30 @@ public void testEnableDisableOptions() throws Exception { // add a field that uses text_en and then try removing "en" from the lang set helper.createCollection(mutableId, mutableId); // need to create field Map addField = Map.of("name", "title", "type", "text_en"); - String addedFieldName = helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); + String addedFieldName = + helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); assertEquals("title", addedFieldName); schema = helper.loadLatestSchema(helper.loadSolrConfig(mutableId)); assertNotNull(schema.getField("title")); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Collections.singletonList("fr"), true, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Collections.singletonList("fr"), true, DEFAULT_CONFIGSET_NAME); assertNotNull(schema.getFieldTypeByName("text_en")); // being used, so not removed assertNotNull(schema.getFieldOrNull("*_txt_en")); - assertTrue(cluster.getZkClient().exists(SchemaDesignerAPI.getConfigSetZkPath(mutableId, "lang/stopwords_en.txt"), true)); + assertTrue( + cluster + .getZkClient() + .exists( + SchemaDesignerAPI.getConfigSetZkPath(mutableId, "lang/stopwords_en.txt"), true)); assertNotNull(schema.getFieldTypeByName("text_fr")); assertNotNull(schema.getFieldOrNull("*_txt_fr")); assertNull(schema.getFieldOrNull("*_txt_ga")); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Collections.emptyList(), true, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Collections.emptyList(), true, DEFAULT_CONFIGSET_NAME); assertNotNull(schema.getFieldTypeByName("text_en")); assertNotNull(schema.getFieldOrNull("*_txt_en")); assertNotNull(schema.getFieldTypeByName("text_fr")); @@ -195,7 +220,9 @@ public void testEnableDisableOptions() throws Exception { assertNotNull(schema.getFieldTypeByName("text_ga")); assertNotNull(schema.getFieldOrNull("*_txt_ga")); - schema = helper.syncLanguageSpecificObjectsAndFiles(configSet, schema, Collections.emptyList(), false, DEFAULT_CONFIGSET_NAME); + schema = + helper.syncLanguageSpecificObjectsAndFiles( + configSet, schema, Collections.emptyList(), false, DEFAULT_CONFIGSET_NAME); assertNotNull(schema.getFieldTypeByName("text_en")); assertNull(schema.getFieldOrNull("*_txt_en")); assertNotNull(schema.getFieldTypeByName("text_fr")); @@ -226,7 +253,9 @@ public void testPersistSampleDocs() throws Exception { doc.setField("pages", 809); doc.setField("published_year", 1989); - helper.postDataToBlobStore(cluster.getSolrClient(), configSet + "_sample", + helper.postDataToBlobStore( + cluster.getSolrClient(), + configSet + "_sample", DefaultSampleDocumentsLoader.streamAsBytes(toJavabin(Collections.singletonList(doc)))); List docs = helper.getStoredSampleDocs(configSet); @@ -247,12 +276,15 @@ public void testAnalyzeField() throws Exception { helper.createCollection(mutableId, mutableId); Map addField = Map.of("name", "title", "type", "text_en"); - String addedFieldName = helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); + String addedFieldName = + helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); assertEquals("title", addedFieldName); - Map analysis = helper.analyzeField(configSet, "title", "The Pillars of the Earth"); + Map analysis = + helper.analyzeField(configSet, "title", "The Pillars of the Earth"); - Map title = (Map) ((Map) analysis.get("field_names")).get("title"); + Map title = + (Map) ((Map) analysis.get("field_names")).get("title"); assertNotNull(title); List index = (List) title.get("index"); assertNotNull(index); @@ -271,13 +303,17 @@ public void testCopyFieldUpdates() throws Exception { // add / update field Map addField = Map.of("name", "author", "type", "string"); - String addedFieldName = helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); + String addedFieldName = + helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); assertEquals("author", addedFieldName); - helper.addSchemaObject(configSet, - Collections.singletonMap("add-field", Map.of("name", "_catch_all_", "type", "text_general"))); + helper.addSchemaObject( + configSet, + Collections.singletonMap( + "add-field", Map.of("name", "_catch_all_", "type", "text_general"))); - Map updateField = Map.of("name", "author", "type", "string", "copyDest", "_text_"); + Map updateField = + Map.of("name", "author", "type", "string", "copyDest", "_text_"); ManagedIndexSchema latest = helper.loadLatestSchema(helper.loadSolrConfig(mutableId)); latest.getField("_catch_all_"); @@ -320,7 +356,8 @@ public void testAddUpdateObjects() throws Exception { // add / update field Map addField = Map.of("name", "author", "type", "string"); - String addedFieldName = helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); + String addedFieldName = + helper.addSchemaObject(configSet, Collections.singletonMap("add-field", addField)); assertEquals("author", addedFieldName); Map updateField = Map.of("name", "author", "type", "string", "required", true); @@ -335,8 +372,23 @@ public void testAddUpdateObjects() throws Exception { assertTrue(addedField.hasDocValues()); // an update that requires a full-rebuild - updateField = Map.of("name", "author", "type", "string", "required", true, "docValues", true, "multiValued", true, "copyDest", "_text_"); - resp = helper.updateSchemaObject(configSet, updateField, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); + updateField = + Map.of( + "name", + "author", + "type", + "string", + "required", + true, + "docValues", + true, + "multiValued", + true, + "copyDest", + "_text_"); + resp = + helper.updateSchemaObject( + configSet, updateField, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); assertNotNull(resp); assertEquals("field", resp.get("updateType")); assertEquals(true, resp.get("rebuild")); @@ -346,15 +398,21 @@ public void testAddUpdateObjects() throws Exception { assertEquals(Collections.singletonList("author"), latest.getCopySources("_text_")); // switch the author field type to strings - updateField = Map.of("name", "author", "type", "strings", "docValues", true, "copyDest", "_text_"); - resp = helper.updateSchemaObject(configSet, updateField, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); + updateField = + Map.of("name", "author", "type", "strings", "docValues", true, "copyDest", "_text_"); + resp = + helper.updateSchemaObject( + configSet, updateField, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); assertNotNull(resp); assertEquals("field", resp.get("updateType")); - assertEquals(false, resp.get("rebuild")); // tricky, we didn't actually change the field to multiValue (it already was) + // tricky, we didn't actually change the field to multiValue (it already was) + assertEquals(false, resp.get("rebuild")); // add / update field type - Map addType = Map.of("name", "testType", "class", "solr.StrField", "docValues", true); - String addTypeName = helper.addSchemaObject(configSet, Collections.singletonMap("add-field-type", addType)); + Map addType = + Map.of("name", "testType", "class", "solr.StrField", "docValues", true); + String addTypeName = + helper.addSchemaObject(configSet, Collections.singletonMap("add-field-type", addType)); assertEquals("testType", addTypeName); latest = helper.loadLatestSchema(helper.loadSolrConfig(mutableId)); @@ -364,20 +422,29 @@ public void testAddUpdateObjects() throws Exception { assertTrue(props.getBooleanArg("docValues")); assertFalse(addedType.isMultiValued()); - Map updateType = Map.of("name", "testType", "class", "solr.StrField", "docValues", true, "multiValued", true); - resp = helper.updateSchemaObject(configSet, updateType, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); + Map updateType = + Map.of( + "name", "testType", "class", "solr.StrField", "docValues", true, "multiValued", true); + resp = + helper.updateSchemaObject( + configSet, updateType, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); assertNotNull(resp); assertEquals("type", resp.get("updateType")); assertEquals(true, resp.get("rebuild")); // add / update dynamic field Map addDynField = Map.of("name", "*_test", "type", "string"); - String addedDynFieldName = helper.addSchemaObject(configSet, Collections.singletonMap("add-dynamic-field", addDynField)); + String addedDynFieldName = + helper.addSchemaObject( + configSet, Collections.singletonMap("add-dynamic-field", addDynField)); assertEquals("*_test", addedDynFieldName); // update the dynamic field - Map updateDynField = Map.of("name", "*_test", "type", "string", "docValues", false); - resp = helper.updateSchemaObject(configSet, updateDynField, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); + Map updateDynField = + Map.of("name", "*_test", "type", "string", "docValues", false); + resp = + helper.updateSchemaObject( + configSet, updateDynField, helper.loadLatestSchema(helper.loadSolrConfig(mutableId))); assertEquals("*_test", addedDynFieldName); assertNotNull(resp); assertEquals("dynamicField", resp.get("updateType")); diff --git a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerSettingsDAO.java b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerSettingsDAO.java index 948036865df..c27f9d49b89 100644 --- a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerSettingsDAO.java +++ b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerSettingsDAO.java @@ -17,10 +17,11 @@ package org.apache.solr.handler.designer; +import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; + import java.io.File; import java.util.Collections; import java.util.Map; - import org.apache.solr.client.solrj.SolrResponse; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; @@ -32,16 +33,17 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.handler.admin.ConfigSetsHandler.DEFAULT_CONFIGSET_NAME; - -public class TestSchemaDesignerSettingsDAO extends SolrCloudTestCase implements SchemaDesignerConstants { +public class TestSchemaDesignerSettingsDAO extends SolrCloudTestCase + implements SchemaDesignerConstants { private CoreContainer cc; @BeforeClass public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); - configureCluster(1).addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()).configure(); + configureCluster(1) + .addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath()) + .configure(); } @AfterClass @@ -65,35 +67,49 @@ public void testDAO() throws Exception { String configSet = DEFAULT_CONFIGSET_NAME; SolrResponse rsp = - CollectionAdminRequest.createCollection(collection, configSet, 1, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(collection, configSet, 1, 1) + .process(cluster.getSolrClient()); CollectionsHandler.waitForActiveCollection(collection, cc, rsp); SchemaDesignerSettingsDAO dao = new SchemaDesignerSettingsDAO(cc); SchemaDesignerSettings settings = dao.getSettings(configSet); assertNotNull(settings); - Map expSettings = Map.of( - DESIGNER_KEY + ENABLE_DYNAMIC_FIELDS_PARAM, true, - AUTO_CREATE_FIELDS, true, - DESIGNER_KEY + ENABLE_NESTED_DOCS_PARAM, false, - DESIGNER_KEY + LANGUAGES_PARAM, Collections.emptyList()); + Map expSettings = + Map.of( + DESIGNER_KEY + ENABLE_DYNAMIC_FIELDS_PARAM, + true, + AUTO_CREATE_FIELDS, + true, + DESIGNER_KEY + ENABLE_NESTED_DOCS_PARAM, + false, + DESIGNER_KEY + LANGUAGES_PARAM, + Collections.emptyList()); assertDesignerSettings(expSettings, settings); settings.setDisabled(false); settings.setCopyFrom("foo"); - assertTrue("updated settings should have changed in ZK", dao.persistIfChanged(configSet, settings)); + assertTrue( + "updated settings should have changed in ZK", dao.persistIfChanged(configSet, settings)); settings = dao.getSettings(configSet); assertNotNull(settings); - expSettings = Map.of( - DESIGNER_KEY + DISABLED, false, - DESIGNER_KEY + COPY_FROM_PARAM, "foo", - DESIGNER_KEY + ENABLE_DYNAMIC_FIELDS_PARAM, true, - AUTO_CREATE_FIELDS, true, - DESIGNER_KEY + ENABLE_NESTED_DOCS_PARAM, false, - DESIGNER_KEY + LANGUAGES_PARAM, Collections.emptyList()); + expSettings = + Map.of( + DESIGNER_KEY + DISABLED, + false, + DESIGNER_KEY + COPY_FROM_PARAM, + "foo", + DESIGNER_KEY + ENABLE_DYNAMIC_FIELDS_PARAM, + true, + AUTO_CREATE_FIELDS, + true, + DESIGNER_KEY + ENABLE_NESTED_DOCS_PARAM, + false, + DESIGNER_KEY + LANGUAGES_PARAM, + Collections.emptyList()); assertDesignerSettings(expSettings, settings); assertFalse("should not be disabled", dao.isDesignerDisabled(configSet)); @@ -104,27 +120,36 @@ public void testDAO() throws Exception { settings.setFieldGuessingEnabled(false); settings.setLanguages(Collections.singletonList("en")); - assertTrue("updated settings should have changed in ZK", dao.persistIfChanged(configSet, settings)); + assertTrue( + "updated settings should have changed in ZK", dao.persistIfChanged(configSet, settings)); settings = dao.getSettings(configSet); assertNotNull(settings); - expSettings = Map.of( - DESIGNER_KEY + DISABLED, true, - DESIGNER_KEY + COPY_FROM_PARAM, "bar", - DESIGNER_KEY + ENABLE_DYNAMIC_FIELDS_PARAM, false, - AUTO_CREATE_FIELDS, false, - DESIGNER_KEY + ENABLE_NESTED_DOCS_PARAM, true, - DESIGNER_KEY + LANGUAGES_PARAM, Collections.singletonList("en")); + expSettings = + Map.of( + DESIGNER_KEY + DISABLED, + true, + DESIGNER_KEY + COPY_FROM_PARAM, + "bar", + DESIGNER_KEY + ENABLE_DYNAMIC_FIELDS_PARAM, + false, + AUTO_CREATE_FIELDS, + false, + DESIGNER_KEY + ENABLE_NESTED_DOCS_PARAM, + true, + DESIGNER_KEY + LANGUAGES_PARAM, + Collections.singletonList("en")); assertDesignerSettings(expSettings, settings); assertTrue("should be disabled", dao.isDesignerDisabled(configSet)); // handles booleans stored as strings in the overlay - Map stored = Map.of(AUTO_CREATE_FIELDS, "false"); + Map stored = Map.of(AUTO_CREATE_FIELDS, "false"); settings = new SchemaDesignerSettings(stored); assertFalse(settings.fieldGuessingEnabled()); } - protected void assertDesignerSettings(Map expectedMap, SchemaDesignerSettings actual) { + protected void assertDesignerSettings( + Map expectedMap, SchemaDesignerSettings actual) { assertEquals(new SchemaDesignerSettings(expectedMap), actual); } } diff --git a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java index 152c2bb7636..eaa9afd5f18 100644 --- a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java +++ b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.export; +import com.fasterxml.jackson.databind.ObjectMapper; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; @@ -27,8 +28,6 @@ import java.util.Locale; import java.util.Map; import java.util.Set; - -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; @@ -45,13 +44,13 @@ public class TestExportWriter extends SolrTestCaseJ4 { private ObjectMapper mapper = new ObjectMapper(); - + @BeforeClass public static void beforeClass() throws Exception { // force LogDocMergePolicy so that we get a predictable doc order // when testing index order results systemSetPropertySolrTestsMergePolicyFactory(LogDocMergePolicyFactory.class.getName()); - initCore("solrconfig-sortingresponse.xml","schema-sortingresponse.xml"); + initCore("solrconfig-sortingresponse.xml", "schema-sortingresponse.xml"); } @Before @@ -60,33 +59,33 @@ public void setUp() throws Exception { super.setUp(); assertU(delQ("*:*")); assertU(commit()); - } @Test public void testEmptyValues() throws Exception { - //Index 2 document with one document that doesn't have field2_i_p - //Sort and return field2_i_p - //Test SOLR-12572 for potential NPEs + // Index 2 document with one document that doesn't have field2_i_p + // Sort and return field2_i_p + // Test SOLR-12572 for potential NPEs assertU(delQ("*:*")); assertU(commit()); - - assertU(adoc("id","1", "field2_i_p","1")); - assertU(adoc("id","2")); + assertU(adoc("id", "1", "field2_i_p", "1")); + assertU(adoc("id", "2")); assertU(commit()); - String resp = h.query(req("q", "*:*", "qt", "/export", "fl", "id,field2_i_p", "sort", "field2_i_p asc")); - assertJsonEquals(resp, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":2,\n" + - " \"docs\":[{\n" + - " \"id\":\"2\"}\n" + - " ,{\n" + - " \"id\":\"1\",\n" + - " \"field2_i_p\":1}]}}"); - + String resp = + h.query(req("q", "*:*", "qt", "/export", "fl", "id,field2_i_p", "sort", "field2_i_p asc")); + assertJsonEquals( + resp, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":2,\n" + + " \"docs\":[{\n" + + " \"id\":\"2\"}\n" + + " ,{\n" + + " \"id\":\"1\",\n" + + " \"field2_i_p\":1}]}}"); } @Test @@ -94,129 +93,235 @@ public void testSortingOnFieldWithNoValues() throws Exception { assertU(delQ("*:*")); assertU(commit()); - assertU(adoc("id","1")); + assertU(adoc("id", "1")); assertU(commit()); // 10 fields - List fieldNames = new ArrayList<>(Arrays.asList("floatdv", "intdv", "stringdv", "longdv", "doubledv", - "datedv", "booleandv", "field1_s_dv", "field2_i_p", "field3_l_p")); + List fieldNames = + new ArrayList<>( + Arrays.asList( + "floatdv", + "intdv", + "stringdv", + "longdv", + "doubledv", + "datedv", + "booleandv", + "field1_s_dv", + "field2_i_p", + "field3_l_p")); for (String sortField : fieldNames) { - String resp = h.query(req("q", "*:*", "qt", "/export", "fl", "id," + sortField, "sort", sortField + " desc")); - assertJsonEquals(resp, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":1,\n" + - " \"docs\":[{\n" + - " \"id\":\"1\"}]}}"); + String resp = + h.query( + req( + "q", + "*:*", + "qt", + "/export", + "fl", + "id," + sortField, + "sort", + sortField + " desc")); + assertJsonEquals( + resp, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":1,\n" + + " \"docs\":[{\n" + + " \"id\":\"1\"}]}}"); } - } public static void createIndex() { - assertU(adoc("id","1", - "floatdv","2.1", - "intdv", "1", - "stringdv", "hello world", - "longdv", "323223232323", - "doubledv","2344.345", - "intdv_m","100", - "intdv_m","250", - "floatdv_m", "123.321", - "floatdv_m", "345.123", - "doubledv_m", "3444.222", - "doubledv_m", "23232.2", - "longdv_m", "43434343434", - "longdv_m", "343332", - "stringdv_m", "manchester \"city\"", - "stringdv_m", "liverpool", - "stringdv_m", "Everton", - "datedv", "2017-06-16T07:00:00Z", - "datedv_m", "2017-06-16T01:00:00Z", - "datedv_m", "2017-06-16T02:00:00Z", - "datedv_m", "2017-06-16T03:00:00Z", - "datedv_m", "2017-06-16T04:00:00Z", - "sortabledv_m", "this is some text one_1", - "sortabledv_m", "this is some text two_1", - "sortabledv_m", "this is some text three_1", - "sortabledv_m_udvas", "this is some text one_1", - "sortabledv_m_udvas", "this is some text two_1", - "sortabledv_m_udvas", "this is some text three_1")); - - assertU(adoc("id","7", - "floatdv","2.1", - "intdv", "7", - "longdv", "323223232323", - "doubledv","2344.345", - "floatdv_m", "123.321", - "floatdv_m", "345.123", - "doubledv_m", "3444.222", - "doubledv_m", "23232.2", - "longdv_m", "43434343434", - "longdv_m", "343332")); + assertU( + adoc( + "id", "1", + "floatdv", "2.1", + "intdv", "1", + "stringdv", "hello world", + "longdv", "323223232323", + "doubledv", "2344.345", + "intdv_m", "100", + "intdv_m", "250", + "floatdv_m", "123.321", + "floatdv_m", "345.123", + "doubledv_m", "3444.222", + "doubledv_m", "23232.2", + "longdv_m", "43434343434", + "longdv_m", "343332", + "stringdv_m", "manchester \"city\"", + "stringdv_m", "liverpool", + "stringdv_m", "Everton", + "datedv", "2017-06-16T07:00:00Z", + "datedv_m", "2017-06-16T01:00:00Z", + "datedv_m", "2017-06-16T02:00:00Z", + "datedv_m", "2017-06-16T03:00:00Z", + "datedv_m", "2017-06-16T04:00:00Z", + "sortabledv_m", "this is some text one_1", + "sortabledv_m", "this is some text two_1", + "sortabledv_m", "this is some text three_1", + "sortabledv_m_udvas", "this is some text one_1", + "sortabledv_m_udvas", "this is some text two_1", + "sortabledv_m_udvas", "this is some text three_1")); + + assertU( + adoc( + "id", + "7", + "floatdv", + "2.1", + "intdv", + "7", + "longdv", + "323223232323", + "doubledv", + "2344.345", + "floatdv_m", + "123.321", + "floatdv_m", + "345.123", + "doubledv_m", + "3444.222", + "doubledv_m", + "23232.2", + "longdv_m", + "43434343434", + "longdv_m", + "343332")); assertU(commit()); - assertU(adoc("id","2", "floatdv","2.1", "intdv", "2", "stringdv", "hello world", "longdv", "323223232323","doubledv","2344.344")); + assertU( + adoc( + "id", + "2", + "floatdv", + "2.1", + "intdv", + "2", + "stringdv", + "hello world", + "longdv", + "323223232323", + "doubledv", + "2344.344")); assertU(commit()); - assertU(adoc("id","3", - "floatdv","2.1", - "intdv", "3", - "stringdv", "chello world", - "longdv", "323223232323", - "doubledv","2344.346", - "intdv_m","100", - "intdv_m","250", - "floatdv_m", "123.321", - "floatdv_m", "345.123", - "doubledv_m", "3444.222", - "doubledv_m", "23232.2", - "longdv_m", "43434343434", - "longdv_m", "343332", - "stringdv_m", "manchester \"city\"", - "stringdv_m", "liverpool", - "stringdv_m", "everton", - "int_is_t", "1", - "int_is_t", "1", - "int_is_t", "1", - "int_is_t", "1", - "sortabledv", "this is some text_1", - "sortabledv_udvas", "this is some text_1")); + assertU( + adoc( + "id", + "3", + "floatdv", + "2.1", + "intdv", + "3", + "stringdv", + "chello world", + "longdv", + "323223232323", + "doubledv", + "2344.346", + "intdv_m", + "100", + "intdv_m", + "250", + "floatdv_m", + "123.321", + "floatdv_m", + "345.123", + "doubledv_m", + "3444.222", + "doubledv_m", + "23232.2", + "longdv_m", + "43434343434", + "longdv_m", + "343332", + "stringdv_m", + "manchester \"city\"", + "stringdv_m", + "liverpool", + "stringdv_m", + "everton", + "int_is_t", + "1", + "int_is_t", + "1", + "int_is_t", + "1", + "int_is_t", + "1", + "sortabledv", + "this is some text_1", + "sortabledv_udvas", + "this is some text_1")); assertU(commit()); - assertU(adoc("id","8", - "floatdv","2.1", - "intdv", "10000000", - "stringdv", "chello \"world\"", - "longdv", "323223232323", - "doubledv","2344.346", - "intdv_m","100", - "intdv_m","250", - "floatdv_m", "123.321", - "floatdv_m", "345.123", - "doubledv_m", "3444.222", - "doubledv_m", "23232.2", - "longdv_m", "43434343434", - "longdv_m", "343332", - "stringdv_m", "manchester \"city\"", - "stringdv_m", "liverpool", - "stringdv_m", "everton", - "datedv", "2017-01-01T00:00:00Z", - "datedv_m", "2017-01-01T01:00:00Z", - "datedv_m", "2017-01-01T02:00:00Z", - "int_is_p", "1", - "int_is_p", "1", - "int_is_p", "1", - "int_is_p", "1", - "sortabledv", "this is some text_2", - "sortabledv_udvas", "this is some text_2", - "sortabledv_m", "this is some text one_2", - "sortabledv_m", "this is some text two_2", - "sortabledv_m", "this is some text three_2", - "sortabledv_m_udvas", "this is some text one_2", - "sortabledv_m_udvas", "this is some text two_2", - "sortabledv_m_udvas", "this is some text three_2" - )); + assertU( + adoc( + "id", + "8", + "floatdv", + "2.1", + "intdv", + "10000000", + "stringdv", + "chello \"world\"", + "longdv", + "323223232323", + "doubledv", + "2344.346", + "intdv_m", + "100", + "intdv_m", + "250", + "floatdv_m", + "123.321", + "floatdv_m", + "345.123", + "doubledv_m", + "3444.222", + "doubledv_m", + "23232.2", + "longdv_m", + "43434343434", + "longdv_m", + "343332", + "stringdv_m", + "manchester \"city\"", + "stringdv_m", + "liverpool", + "stringdv_m", + "everton", + "datedv", + "2017-01-01T00:00:00Z", + "datedv_m", + "2017-01-01T01:00:00Z", + "datedv_m", + "2017-01-01T02:00:00Z", + "int_is_p", + "1", + "int_is_p", + "1", + "int_is_p", + "1", + "int_is_p", + "1", + "sortabledv", + "this is some text_2", + "sortabledv_udvas", + "this is some text_2", + "sortabledv_m", + "this is some text one_2", + "sortabledv_m", + "this is some text two_2", + "sortabledv_m", + "this is some text three_2", + "sortabledv_m_udvas", + "this is some text one_2", + "sortabledv_m_udvas", + "this is some text two_2", + "sortabledv_m_udvas", + "this is some text three_2")); assertU(commit()); - - } @Test @@ -236,321 +341,651 @@ public void test() throws Exception { public void testSmallChains() throws Exception { clearIndex(); - assertU(adoc("id","1", - "field1_i_p",Integer.toString(Integer.MIN_VALUE), - "field2_i_p","1")); + assertU(adoc("id", "1", "field1_i_p", Integer.toString(Integer.MIN_VALUE), "field2_i_p", "1")); assertU(commit()); - assertU(adoc("id","2", - "field1_i_p",Integer.toString(Integer.MIN_VALUE), - "field2_i_p",Integer.toString(Integer.MIN_VALUE + 1))); + assertU( + adoc( + "id", + "2", + "field1_i_p", + Integer.toString(Integer.MIN_VALUE), + "field2_i_p", + Integer.toString(Integer.MIN_VALUE + 1))); assertU(commit()); - assertU(adoc("id","3", - "field1_i_p",Integer.toString(Integer.MIN_VALUE), - "field2_i_p",Integer.toString(Integer.MIN_VALUE))); + assertU( + adoc( + "id", + "3", + "field1_i_p", + Integer.toString(Integer.MIN_VALUE), + "field2_i_p", + Integer.toString(Integer.MIN_VALUE))); assertU(commit()); - //Test single value DocValue output - //Expected for asc sort doc3 -> doc2 -> doc1 - String s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "field1_i_p asc,field2_i_p asc")); - assertJsonEquals(s, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":3,\n" + - " \"docs\":[{\n" + - " \"id\":\"3\"}\n" + - " ,{\n" + - " \"id\":\"2\"}\n" + - " ,{\n" + - " \"id\":\"1\"}]}}"); + // Test single value DocValue output + // Expected for asc sort doc3 -> doc2 -> doc1 + String s = + h.query( + req("q", "*:*", "qt", "/export", "fl", "id", "sort", "field1_i_p asc,field2_i_p asc")); + assertJsonEquals( + s, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":3,\n" + + " \"docs\":[{\n" + + " \"id\":\"3\"}\n" + + " ,{\n" + + " \"id\":\"2\"}\n" + + " ,{\n" + + " \"id\":\"1\"}]}}"); clearIndex(); - //Adding 3 docs of integers with the following values + // Adding 3 docs of integers with the following values // doc1: Integer.MIN_VALUE,1,2,Integer.MAX_VALUE,3,4,5,6 // doc2: Integer.MIN_VALUE,Integer.MIN_VALUE,2,Integer.MAX_VALUE,4,4,5,6 // doc3: Integer.MIN_VALUE,Integer.MIN_VALUE,2,Integer.MAX_VALUE,3,4,5,6 - assertU(adoc("id","1", - "field1_i_p",Integer.toString(Integer.MIN_VALUE), - "field2_i_p","1", - "field3_i_p","2", - "field4_i_p",Integer.toString(Integer.MAX_VALUE), - "field5_i_p","3", - "field6_i_p","4", - "field7_i_p","5", - "field8_i_p","6")); + assertU( + adoc( + "id", + "1", + "field1_i_p", + Integer.toString(Integer.MIN_VALUE), + "field2_i_p", + "1", + "field3_i_p", + "2", + "field4_i_p", + Integer.toString(Integer.MAX_VALUE), + "field5_i_p", + "3", + "field6_i_p", + "4", + "field7_i_p", + "5", + "field8_i_p", + "6")); assertU(commit()); - assertU(adoc("id","2", - "field1_i_p",Integer.toString(Integer.MIN_VALUE), - "field2_i_p",Integer.toString(Integer.MIN_VALUE), - "field3_i_p","2", - "field4_i_p",Integer.toString(Integer.MAX_VALUE), - "field5_i_p","4", - "field6_i_p","4", - "field7_i_p","5", - "field8_i_p","6")); + assertU( + adoc( + "id", + "2", + "field1_i_p", + Integer.toString(Integer.MIN_VALUE), + "field2_i_p", + Integer.toString(Integer.MIN_VALUE), + "field3_i_p", + "2", + "field4_i_p", + Integer.toString(Integer.MAX_VALUE), + "field5_i_p", + "4", + "field6_i_p", + "4", + "field7_i_p", + "5", + "field8_i_p", + "6")); assertU(commit()); - assertU(adoc("id","3", - "field1_i_p",Integer.toString(Integer.MIN_VALUE), - "field2_i_p",Integer.toString(Integer.MIN_VALUE), - "field3_i_p","2", - "field4_i_p",Integer.toString(Integer.MAX_VALUE), - "field5_i_p","3", - "field6_i_p","4", - "field7_i_p","5", - "field8_i_p","6")); + assertU( + adoc( + "id", + "3", + "field1_i_p", + Integer.toString(Integer.MIN_VALUE), + "field2_i_p", + Integer.toString(Integer.MIN_VALUE), + "field3_i_p", + "2", + "field4_i_p", + Integer.toString(Integer.MAX_VALUE), + "field5_i_p", + "3", + "field6_i_p", + "4", + "field7_i_p", + "5", + "field8_i_p", + "6")); assertU(commit()); - s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "field1_i_p asc,field2_i_p asc,field3_i_p asc,field4_i_p asc,field5_i_p desc,field6_i_p desc,field7_i_p desc,field8_i_p asc")); - assertJsonEquals(s, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":3,\n" + - " \"docs\":[{\n" + - " \"id\":\"2\"}\n" + - " ,{\n" + - " \"id\":\"3\"}\n" + - " ,{\n" + - " \"id\":\"1\"}]}}"); - + s = + h.query( + req( + "q", + "*:*", + "qt", + "/export", + "fl", + "id", + "sort", + "field1_i_p asc,field2_i_p asc,field3_i_p asc,field4_i_p asc,field5_i_p desc,field6_i_p desc,field7_i_p desc,field8_i_p asc")); + assertJsonEquals( + s, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":3,\n" + + " \"docs\":[{\n" + + " \"id\":\"2\"}\n" + + " ,{\n" + + " \"id\":\"3\"}\n" + + " ,{\n" + + " \"id\":\"1\"}]}}"); } @Test public void testIndexOrder() throws Exception { clearIndex(); - assertU(adoc("id","1", "stringdv","a")); - assertU(adoc("id","2", "stringdv","a")); + assertU(adoc("id", "1", "stringdv", "a")); + assertU(adoc("id", "2", "stringdv", "a")); assertU(commit()); - assertU(adoc("id","3", "stringdv","a")); - assertU(adoc("id","4", "stringdv","a")); + assertU(adoc("id", "3", "stringdv", "a")); + assertU(adoc("id", "4", "stringdv", "a")); assertU(commit()); - String expectedResult = "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":4,\n" + - " \"docs\":[{\n" + - " \"id\":\"1\"}\n" + - " ,{\n" + - " \"id\":\"2\"}\n" + - " ,{\n" + - " \"id\":\"3\"}\n" + - " ,{\n" + - " \"id\":\"4\"}]}}"; - - String s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "stringdv asc")); + String expectedResult = + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":4,\n" + + " \"docs\":[{\n" + + " \"id\":\"1\"}\n" + + " ,{\n" + + " \"id\":\"2\"}\n" + + " ,{\n" + + " \"id\":\"3\"}\n" + + " ,{\n" + + " \"id\":\"4\"}]}}"; + + String s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "stringdv asc")); assertJsonEquals(s, expectedResult); - s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "stringdv desc")); + s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "stringdv desc")); assertJsonEquals(s, expectedResult); - } @Test public void testStringWithCase() throws Exception { clearIndex(); - assertU(adoc("id","1", "stringdv","a")); - assertU(adoc("id","2", "stringdv","ABC")); + assertU(adoc("id", "1", "stringdv", "a")); + assertU(adoc("id", "2", "stringdv", "ABC")); assertU(commit()); - assertU(adoc("id","3", "stringdv","xyz")); - assertU(adoc("id","4", "stringdv","a")); + assertU(adoc("id", "3", "stringdv", "xyz")); + assertU(adoc("id", "4", "stringdv", "a")); assertU(commit()); - String s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "stringdv desc")); - assertJsonEquals(s, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":4,\n" + - " \"docs\":[{\n" + - " \"id\":\"3\"}\n" + - " ,{\n" + - " \"id\":\"1\"}\n" + - " ,{\n" + - " \"id\":\"4\"}\n" + - " ,{\n" + - " \"id\":\"2\"}]}}"); + String s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "stringdv desc")); + assertJsonEquals( + s, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":4,\n" + + " \"docs\":[{\n" + + " \"id\":\"3\"}\n" + + " ,{\n" + + " \"id\":\"1\"}\n" + + " ,{\n" + + " \"id\":\"4\"}\n" + + " ,{\n" + + " \"id\":\"2\"}]}}"); } @Test public void testBooleanField() throws Exception { clearIndex(); - assertU(adoc("id","1", - "booleandv","true")); + assertU(adoc("id", "1", "booleandv", "true")); assertU(commit()); - assertU(adoc("id","2", - "booleandv","false")); + assertU(adoc("id", "2", "booleandv", "false")); assertU(commit()); - String s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "booleandv asc")); - assertJsonEquals(s, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":2,\n" + - " \"docs\":[{\n" + - " \"id\":\"2\"}\n" + - " ,{\n" + - " \"id\":\"1\"}]}}"); - - s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "booleandv desc")); - assertJsonEquals(s, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":2,\n" + - " \"docs\":[{\n" + - " \"id\":\"1\"}\n" + - " ,{\n" + - " \"id\":\"2\"}]}}"); + String s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "booleandv asc")); + assertJsonEquals( + s, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":2,\n" + + " \"docs\":[{\n" + + " \"id\":\"2\"}\n" + + " ,{\n" + + " \"id\":\"1\"}]}}"); + + s = h.query(req("q", "*:*", "qt", "/export", "fl", "id", "sort", "booleandv desc")); + assertJsonEquals( + s, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":2,\n" + + " \"docs\":[{\n" + + " \"id\":\"1\"}\n" + + " ,{\n" + + " \"id\":\"2\"}]}}"); } private void testSortingOutput() throws Exception { - //Test single value DocValue output - String s = h.query(req("q", "id:1", "qt", "/export", "fl", "floatdv,intdv,stringdv,longdv,doubledv", "sort", "intdv asc")); - - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv\":2.1,\"intdv\":1,\"stringdv\":\"hello world\",\"longdv\":323223232323,\"doubledv\":2344.345}]}}"); - - //Test null value string: - s = h.query(req("q", "id:7", "qt", "/export", "fl", "floatdv,intdv,stringdv,longdv,doubledv", "sort", "intdv asc")); - - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv\":2.1,\"intdv\":7,\"longdv\":323223232323,\"doubledv\":2344.345}]}}"); - - //Test multiValue docValues output - s = h.query(req("q", "id:1", "qt", "/export", "fl", "intdv_m,floatdv_m,doubledv_m,longdv_m,stringdv_m", "sort", "intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"intdv_m\":[100,250],\"floatdv_m\":[123.321,345.123],\"doubledv_m\":[3444.222,23232.2],\"longdv_m\":[343332,43434343434],\"stringdv_m\":[\"Everton\",\"liverpool\",\"manchester \\\"city\\\"\"]}]}}"); - - //Test multiValues docValues output with nulls - s = h.query(req("q", "id:7", "qt", "/export", "fl", "intdv_m,floatdv_m,doubledv_m,longdv_m,stringdv_m", "sort", "intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv_m\":[123.321,345.123],\"doubledv_m\":[3444.222,23232.2],\"longdv_m\":[343332,43434343434]}]}}"); - - //Test single sort param is working - s = h.query(req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":2},{\"intdv\":1}]}}"); - - s = h.query(req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":1},{\"intdv\":2}]}}"); + // Test single value DocValue output + String s = + h.query( + req( + "q", + "id:1", + "qt", + "/export", + "fl", + "floatdv,intdv,stringdv,longdv,doubledv", + "sort", + "intdv asc")); + + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv\":2.1,\"intdv\":1,\"stringdv\":\"hello world\",\"longdv\":323223232323,\"doubledv\":2344.345}]}}"); + + // Test null value string: + s = + h.query( + req( + "q", + "id:7", + "qt", + "/export", + "fl", + "floatdv,intdv,stringdv,longdv,doubledv", + "sort", + "intdv asc")); + + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv\":2.1,\"intdv\":7,\"longdv\":323223232323,\"doubledv\":2344.345}]}}"); + + // Test multiValue docValues output + s = + h.query( + req( + "q", + "id:1", + "qt", + "/export", + "fl", + "intdv_m,floatdv_m,doubledv_m,longdv_m,stringdv_m", + "sort", + "intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"intdv_m\":[100,250],\"floatdv_m\":[123.321,345.123],\"doubledv_m\":[3444.222,23232.2],\"longdv_m\":[343332,43434343434],\"stringdv_m\":[\"Everton\",\"liverpool\",\"manchester \\\"city\\\"\"]}]}}"); + + // Test multiValues docValues output with nulls + s = + h.query( + req( + "q", + "id:7", + "qt", + "/export", + "fl", + "intdv_m,floatdv_m,doubledv_m,longdv_m,stringdv_m", + "sort", + "intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"floatdv_m\":[123.321,345.123],\"doubledv_m\":[3444.222,23232.2],\"longdv_m\":[343332,43434343434]}]}}"); + + // Test single sort param is working + s = h.query(req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":2},{\"intdv\":1}]}}"); + + s = h.query(req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":1},{\"intdv\":2}]}}"); // Test sort on String will null value. Null value should sort last on desc and first on asc. - s = h.query(req("q", "id:(1 7)", "qt", "/export", "fl", "intdv", "sort", "stringdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":1},{\"intdv\":7}]}}"); - - s = h.query(req("q", "id:(1 7)", "qt", "/export", "fl", "intdv", "sort", "stringdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":7},{\"intdv\":1}]}}"); - - - //Test multi-sort params - s = h.query(req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":2},{\"intdv\":1}]}}"); - - s = h.query(req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "floatdv desc,intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":1},{\"intdv\":2}]}}"); - - //Test three sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,stringdv asc,intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); - - //Test three sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,stringdv desc,intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); - - //Test four sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,floatdv desc,floatdv asc,intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); - - //Test five sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "intdv desc,floatdv asc,floatdv desc,floatdv asc,intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv desc,intdv asc,floatdv desc,floatdv desc,intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); - - //Test six sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); - - //Test seven sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv desc,intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); - - //Test eight sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,intdv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); - - //Test nine sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv asc,intdv desc,floatdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv desc,intdv asc,floatdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); - - //Test ten sort fields - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv asc,intdv desc,floatdv desc,floatdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv desc,intdv asc,floatdv desc,floatdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); - - s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "doubledv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":1},{\"intdv\":2}]}}"); - - s = h.query(req("q", "intdv:[2 TO 1000]", "qt", "/export", "fl", "intdv", "sort", "doubledv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":7},{\"intdv\":2}]}}"); - - s = h.query(req("q", "stringdv:blah", "qt", "/export", "fl", "intdv", "sort", "doubledv desc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":0, \"docs\":[]}}"); - - s = h.query(req("q", "id:8", "qt", "/export", "fl", "stringdv", "sort", "intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"stringdv\":\"chello \\\"world\\\"\"}]}}"); + s = h.query(req("q", "id:(1 7)", "qt", "/export", "fl", "intdv", "sort", "stringdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":1},{\"intdv\":7}]}}"); + + s = h.query(req("q", "id:(1 7)", "qt", "/export", "fl", "intdv", "sort", "stringdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":7},{\"intdv\":1}]}}"); + + // Test multi-sort params + s = + h.query( + req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "floatdv asc,intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":2},{\"intdv\":1}]}}"); + + s = + h.query( + req("q", "id:(1 2)", "qt", "/export", "fl", "intdv", "sort", "floatdv desc,intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":2, \"docs\":[{\"intdv\":1},{\"intdv\":2}]}}"); + + // Test three sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv asc,stringdv asc,intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); + + // Test three sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv asc,stringdv desc,intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); + + // Test four sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv asc,floatdv desc,floatdv asc,intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); + + // Test five sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "intdv desc,floatdv asc,floatdv desc,floatdv asc,intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv desc,intdv asc,floatdv desc,floatdv desc,intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); + + // Test six sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); + + // Test seven sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv desc,intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); + + // Test eight sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,intdv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); + + // Test nine sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv asc,intdv desc,floatdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv desc,intdv asc,floatdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); + + // Test ten sort fields + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "intdv asc,floatdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv asc,intdv desc,floatdv desc,floatdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":1},{\"intdv\":2},{\"intdv\":3}]}}"); + s = + h.query( + req( + "q", + "id:(1 2 3)", + "qt", + "/export", + "fl", + "intdv", + "sort", + "floatdv asc,intdv desc,floatdv asc,floatdv desc,floatdv asc,floatdv desc,intdv desc,intdv asc,floatdv desc,floatdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":2},{\"intdv\":1}]}}"); + + s = h.query(req("q", "id:(1 2 3)", "qt", "/export", "fl", "intdv", "sort", "doubledv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":1},{\"intdv\":2}]}}"); + + s = + h.query( + req("q", "intdv:[2 TO 1000]", "qt", "/export", "fl", "intdv", "sort", "doubledv desc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":3, \"docs\":[{\"intdv\":3},{\"intdv\":7},{\"intdv\":2}]}}"); + + s = h.query(req("q", "stringdv:blah", "qt", "/export", "fl", "intdv", "sort", "doubledv desc")); + assertJsonEquals( + s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":0, \"docs\":[]}}"); + + s = h.query(req("q", "id:8", "qt", "/export", "fl", "stringdv", "sort", "intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"stringdv\":\"chello \\\"world\\\"\"}]}}"); // Test sortable text fields: - s = h.query(req("q", "id:(1 OR 3 OR 8)", "qt", "/export", "fl", "sortabledv_m_udvas,sortabledv_udvas", "sort", "sortabledv_udvas asc")); - assertJsonEquals(s, "{\n" + - " \"responseHeader\":{\"status\":0},\n" + - " \"response\":{\n" + - " \"numFound\":3,\n" + - " \"docs\":[{\n" + - " \"sortabledv_m_udvas\":[\"this is some text one_1\"\n" + - " ,\"this is some text three_1\"\n" + - " ,\"this is some text two_1\"]}\n" + - " ,{\n" + - " \"sortabledv_udvas\":\"this is some text_1\"}\n" + - " ,{\n" + - " \"sortabledv_m_udvas\":[\"this is some text one_2\"\n" + - " ,\"this is some text three_2\"\n" + - " ,\"this is some text two_2\"],\n" + - " \"sortabledv_udvas\":\"this is some text_2\"}]}}"); - - s = h.query(req("q", "id:(1 OR 3 OR 8)", "qt", "/export", "fl", "sortabledv_m", "sort", "sortabledv_udvas asc")); - assertTrue("Should have 400 status when exporting sortabledv_m, it does not have useDocValuesAsStored='true'", s.contains("\"status\":400}")); - assertTrue("Should have a cause when exporting sortabledv_m, it does not have useDocValuesAsStored='true'", s.contains("Must have useDocValuesAsStored='true' to be used with export writer")); - - s = h.query(req("q", "id:(1 OR 3 OR 8)", "qt", "/export", "fl", "sortabledv", "sort", "sortabledv_udvas asc")); - assertTrue("Should have 400 status when exporting sortabledv, it does not have useDocValuesAsStored='true'", s.contains("\"status\":400}")); - assertTrue("Should have a cause when exporting sortabledv, it does not have useDocValuesAsStored='true'", s.contains("Must have useDocValuesAsStored='true' to be used with export writer")); - + s = + h.query( + req( + "q", + "id:(1 OR 3 OR 8)", + "qt", + "/export", + "fl", + "sortabledv_m_udvas,sortabledv_udvas", + "sort", + "sortabledv_udvas asc")); + assertJsonEquals( + s, + "{\n" + + " \"responseHeader\":{\"status\":0},\n" + + " \"response\":{\n" + + " \"numFound\":3,\n" + + " \"docs\":[{\n" + + " \"sortabledv_m_udvas\":[\"this is some text one_1\"\n" + + " ,\"this is some text three_1\"\n" + + " ,\"this is some text two_1\"]}\n" + + " ,{\n" + + " \"sortabledv_udvas\":\"this is some text_1\"}\n" + + " ,{\n" + + " \"sortabledv_m_udvas\":[\"this is some text one_2\"\n" + + " ,\"this is some text three_2\"\n" + + " ,\"this is some text two_2\"],\n" + + " \"sortabledv_udvas\":\"this is some text_2\"}]}}"); + + s = + h.query( + req( + "q", + "id:(1 OR 3 OR 8)", + "qt", + "/export", + "fl", + "sortabledv_m", + "sort", + "sortabledv_udvas asc")); + assertTrue( + "Should have 400 status when exporting sortabledv_m, it does not have useDocValuesAsStored='true'", + s.contains("\"status\":400}")); + assertTrue( + "Should have a cause when exporting sortabledv_m, it does not have useDocValuesAsStored='true'", + s.contains("Must have useDocValuesAsStored='true' to be used with export writer")); + + s = + h.query( + req( + "q", + "id:(1 OR 3 OR 8)", + "qt", + "/export", + "fl", + "sortabledv", + "sort", + "sortabledv_udvas asc")); + assertTrue( + "Should have 400 status when exporting sortabledv, it does not have useDocValuesAsStored='true'", + s.contains("\"status\":400}")); + assertTrue( + "Should have a cause when exporting sortabledv, it does not have useDocValuesAsStored='true'", + s.contains("Must have useDocValuesAsStored='true' to be used with export writer")); } private void assertJsonEquals(String actual, String expected) { - assertEquals(Utils.toJSONString(Utils.fromJSONString(expected)), Utils.toJSONString(Utils.fromJSONString(actual))); + assertEquals( + Utils.toJSONString(Utils.fromJSONString(expected)), + Utils.toJSONString(Utils.fromJSONString(actual))); } private void testExportRequiredParams() throws Exception { - //Test whether missing required parameters returns expected errors. + // Test whether missing required parameters returns expected errors. - //String s = h.query(req("q", "id:1", "qt", "/export", "fl", "floatdv,intdv,stringdv,longdv,doubledv", "sort", "intdv asc")); + // String s = h.query(req("q", "id:1", "qt", "/export", "fl", + // "floatdv,intdv,stringdv,longdv,doubledv", "sort", "intdv asc")); String s; s = h.query(req("qt", "/export")); assertTrue("Should have had a sort error", s.contains("No sort criteria")); @@ -558,53 +993,68 @@ private void testExportRequiredParams() throws Exception { assertTrue("Should have had fl error", s.contains("export field list (fl) must be specified")); s = h.query(req("sort", "intdv asc", "qt", "/export", "fl", "stringdv")); // Interesting you don't even need to specify a "q" parameter. - + } private void testDates() throws Exception { - String s = h.query(req("q", "id:1", "qt", "/export", "fl", "datedv", "sort", "datedv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"datedv\":\"2017-06-16T07:00:00Z\"}]}}"); - s = h.query(req("q", "id:1", "qt", "/export", "fl", "datedv_m", "sort", "datedv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"datedv_m\":[\"2017-06-16T01:00:00Z\",\"2017-06-16T02:00:00Z\",\"2017-06-16T03:00:00Z\",\"2017-06-16T04:00:00Z\"]}]}}"); + String s = h.query(req("q", "id:1", "qt", "/export", "fl", "datedv", "sort", "datedv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"datedv\":\"2017-06-16T07:00:00Z\"}]}}"); + s = h.query(req("q", "id:1", "qt", "/export", "fl", "datedv_m", "sort", "datedv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"datedv_m\":[\"2017-06-16T01:00:00Z\",\"2017-06-16T02:00:00Z\",\"2017-06-16T03:00:00Z\",\"2017-06-16T04:00:00Z\"]}]}}"); } private void testDuplicates() throws Exception { // see SOLR-10924 - String expected = h.getCore().getLatestSchema().getField("int_is_t").getType().isPointField() - ? "1,1,1,1" : "1"; - String s = h.query(req("q", "id:3", "qt", "/export", "fl", "int_is_t", "sort", "intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"int_is_t\":["+expected+"]}]}}"); - expected = h.getCore().getLatestSchema().getField("int_is_p").getType().isPointField() - ? "1,1,1,1" : "1"; - s = h.query(req("q", "id:8", "qt", "/export", "fl", "int_is_p", "sort", "intdv asc")); - assertJsonEquals(s, "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"int_is_p\":[1,1,1,1]}]}}"); + String expected = + h.getCore().getLatestSchema().getField("int_is_t").getType().isPointField() + ? "1,1,1,1" + : "1"; + String s = h.query(req("q", "id:3", "qt", "/export", "fl", "int_is_t", "sort", "intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"int_is_t\":[" + + expected + + "]}]}}"); + expected = + h.getCore().getLatestSchema().getField("int_is_p").getType().isPointField() + ? "1,1,1,1" + : "1"; + s = h.query(req("q", "id:8", "qt", "/export", "fl", "int_is_p", "sort", "intdv asc")); + assertJsonEquals( + s, + "{\"responseHeader\": {\"status\": 0}, \"response\":{\"numFound\":1, \"docs\":[{\"int_is_p\":[1,1,1,1]}]}}"); } - + /** - * This test doesn't validate the correctness of results, it just compares the response of the same request - * when asking for Trie fields vs Point fields. Can be removed once Trie fields are no longer supported + * This test doesn't validate the correctness of results, it just compares the response of the + * same request when asking for Trie fields vs Point fields. Can be removed once Trie fields are + * no longer supported */ @Test - @SuppressForbidden(reason="using new Date(time) to create random dates") + @SuppressForbidden(reason = "using new Date(time) to create random dates") public void testRandomNumerics() throws Exception { SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT); assertU(delQ("*:*")); assertU(commit()); List trieFields = new ArrayList(); List pointFields = new ArrayList(); - for (String mv:new String[]{"s", ""}) { - for (String indexed:new String[]{"_ni", ""}) { - for (String type:new String[]{"i", "l", "f", "d", "dt"}) { + for (String mv : new String[] {"s", ""}) { + for (String indexed : new String[] {"_ni", ""}) { + for (String type : new String[] {"i", "l", "f", "d", "dt"}) { String field = "number_" + type + mv + indexed; SchemaField sf = h.getCore().getLatestSchema().getField(field + "_t"); assertTrue(sf.hasDocValues()); assertTrue(sf.getType().getNumberType() != null); - + sf = h.getCore().getLatestSchema().getField(field + "_p"); assertTrue(sf.hasDocValues()); assertTrue(sf.getType().getNumberType() != null); assertTrue(sf.getType().isPointField()); - + trieFields.add(field + "_t"); pointFields.add(field + "_p"); } @@ -612,22 +1062,22 @@ public void testRandomNumerics() throws Exception { } for (int i = 0; i < atLeast(100); i++) { if (random().nextInt(20) == 0) { - //have some empty docs + // have some empty docs assertU(adoc("id", String.valueOf(i))); continue; } if (random().nextInt(20) == 0 && i > 0) { - //delete some docs + // delete some docs assertU(delI(String.valueOf(i - 1))); } - + SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", String.valueOf(i)); addInt(doc, random().nextInt(), false); addLong(doc, random().nextLong(), false); - addFloat(doc, random().nextFloat() * 3000 * (random().nextBoolean()?1:-1), false); - addDouble(doc, random().nextDouble() * 3000 * (random().nextBoolean()?1:-1), false); + addFloat(doc, random().nextFloat() * 3000 * (random().nextBoolean() ? 1 : -1), false); + addDouble(doc, random().nextDouble() * 3000 * (random().nextBoolean() ? 1 : -1), false); addDate(doc, dateFormat.format(new Date()), false); // MV need to be unique in order to be the same in Trie vs Points @@ -636,19 +1086,19 @@ public void testRandomNumerics() throws Exception { Set floats = new HashSet<>(); Set doubles = new HashSet<>(); Set dates = new HashSet<>(); - for (int j=0; j < random().nextInt(20); j++) { + for (int j = 0; j < random().nextInt(20); j++) { ints.add(random().nextInt()); longs.add(random().nextLong()); - floats.add(random().nextFloat() * 3000 * (random().nextBoolean()?1:-1)); - doubles.add(random().nextDouble() * 3000 * (random().nextBoolean()?1:-1)); + floats.add(random().nextFloat() * 3000 * (random().nextBoolean() ? 1 : -1)); + doubles.add(random().nextDouble() * 3000 * (random().nextBoolean() ? 1 : -1)); dates.add(dateFormat.format(new Date(System.currentTimeMillis() + random().nextInt()))); } - ints.stream().forEach((val)->addInt(doc, val, true)); - longs.stream().forEach((val)->addLong(doc, val, true)); - floats.stream().forEach((val)->addFloat(doc, val, true)); - doubles.stream().forEach((val)->addDouble(doc, val, true)); - dates.stream().forEach((val)->addDate(doc, val, true)); - + ints.stream().forEach((val) -> addInt(doc, val, true)); + longs.stream().forEach((val) -> addLong(doc, val, true)); + floats.stream().forEach((val) -> addFloat(doc, val, true)); + doubles.stream().forEach((val) -> addDouble(doc, val, true)); + dates.stream().forEach((val) -> addDate(doc, val, true)); + assertU(adoc(doc)); if (random().nextInt(20) == 0) { assertU(commit()); @@ -657,7 +1107,8 @@ public void testRandomNumerics() throws Exception { assertU(commit()); doTestQuery("id:1", trieFields, pointFields); doTestQuery("*:*", trieFields, pointFields); - doTestQuery("id:[0 TO 2]", trieFields, pointFields);// "id" field is really a string, this is not a numeric range query + // "id" field is really a string, this is not a numeric range query + doTestQuery("id:[0 TO 2]", trieFields, pointFields); doTestQuery("id:[0 TO 9]", trieFields, pointFields); doTestQuery("id:DOES_NOT_EXIST", trieFields, pointFields); } @@ -669,9 +1120,9 @@ public void testMultipleSorts() throws Exception { int numDocs = 1000; - //10 unique values + // 10 unique values String[] str_vals = new String[10]; - for (int i=0; i rspMap = mapper.readValue(rsp, HashMap.class); - List> docs = (List>) Utils.getObjectByPath(rspMap, false, "/response/docs"); + List> docs = + (List>) Utils.getObjectByPath(rspMap, false, "/response/docs"); assertNotNull("missing document results: " + rspMap, docs); assertEquals("wrong number of unique docs", 101, docs.size()); for (int i = 0; i < 100; i++) { @@ -756,7 +1243,18 @@ public void testExpr() throws Exception { } assertTrue("missing value " + i + " in results", found); } - req = req("q", "*:*", "qt", "/export", "fl", "id,sortabledv_udvas,small_i_p", "sort", "sortabledv_udvas asc", "expr", "rollup(input(),over=\"sortabledv_udvas\", sum(small_i_p),avg(small_i_p),min(small_i_p),count(*))"); + req = + req( + "q", + "*:*", + "qt", + "/export", + "fl", + "id,sortabledv_udvas,small_i_p", + "sort", + "sortabledv_udvas asc", + "expr", + "rollup(input(),over=\"sortabledv_udvas\", sum(small_i_p),avg(small_i_p),min(small_i_p),count(*))"); rsp = h.query(req); rspMap = mapper.readValue(rsp, HashMap.class); docs = (List>) Utils.getObjectByPath(rspMap, false, "/response/docs"); @@ -764,33 +1262,63 @@ public void testExpr() throws Exception { assertEquals("wrong number of unique docs", 101, docs.size()); for (Map doc : docs) { assertNotNull("missing sum: " + doc, doc.get("sum(small_i_p)")); - assertEquals(18000.0, ((Number)doc.get("sum(small_i_p)")).doubleValue(), 1000.0); + assertEquals(18000.0, ((Number) doc.get("sum(small_i_p)")).doubleValue(), 1000.0); assertNotNull("missing avg: " + doc, doc.get("avg(small_i_p)")); - assertEquals(18.0, ((Number)doc.get("avg(small_i_p)")).doubleValue(), 1.0); + assertEquals(18.0, ((Number) doc.get("avg(small_i_p)")).doubleValue(), 1.0); assertNotNull("missing count: " + doc, doc.get("count(*)")); - assertEquals(1000.0, ((Number)doc.get("count(*)")).doubleValue(), 100.0); + assertEquals(1000.0, ((Number) doc.get("count(*)")).doubleValue(), 100.0); } // try invalid field types - req = req("q", "*:*", "qt", "/export", "fl", "id,sortabledv,small_i_p", "sort", "sortabledv asc", "expr", "unique(input(),over=\"sortabledv\")"); + req = + req( + "q", + "*:*", + "qt", + "/export", + "fl", + "id,sortabledv,small_i_p", + "sort", + "sortabledv asc", + "expr", + "unique(input(),over=\"sortabledv\")"); rsp = h.query(req); rspMap = mapper.readValue(rsp, HashMap.class); - assertEquals("wrong response status", 400, ((Number)Utils.getObjectByPath(rspMap, false, "/responseHeader/status")).intValue()); + assertEquals( + "wrong response status", + 400, + ((Number) Utils.getObjectByPath(rspMap, false, "/responseHeader/status")).intValue()); docs = (List>) Utils.getObjectByPath(rspMap, false, "/response/docs"); assertEquals("wrong number of docs", 1, docs.size()); Map doc = docs.get(0); assertTrue("doc doesn't have exception", doc.containsKey(StreamParams.EXCEPTION)); - assertTrue("wrong exception message", doc.get(StreamParams.EXCEPTION).toString().contains("Must have useDocValuesAsStored='true'")); + assertTrue( + "wrong exception message", + doc.get(StreamParams.EXCEPTION) + .toString() + .contains("Must have useDocValuesAsStored='true'")); } @SuppressWarnings("rawtypes") private void validateSort(int numDocs) throws Exception { // 10 fields - List fieldNames = new ArrayList<>(Arrays.asList("floatdv", "intdv", "stringdv", "longdv", "doubledv", - "datedv", "booleandv", "field1_s_dv", "field2_i_p", "field3_l_p")); + List fieldNames = + new ArrayList<>( + Arrays.asList( + "floatdv", + "intdv", + "stringdv", + "longdv", + "doubledv", + "datedv", + "booleandv", + "field1_s_dv", + "field2_i_p", + "field3_l_p")); SortFields[] fieldSorts = new SortFields[TestUtil.nextInt(random(), 1, fieldNames.size())]; for (int i = 0; i < fieldSorts.length; i++) { - fieldSorts[i] = new SortFields(fieldNames.get(TestUtil.nextInt(random(), 0, fieldNames.size() - 1))); + fieldSorts[i] = + new SortFields(fieldNames.get(TestUtil.nextInt(random(), 0, fieldNames.size() - 1))); fieldNames.remove(fieldSorts[i].getField()); } String[] fieldWithOrderStrs = new String[fieldSorts.length]; @@ -803,19 +1331,36 @@ private void validateSort(int numDocs) throws Exception { String sortStr = String.join(",", fieldWithOrderStrs); // sort : field1 asc, field2 desc String fieldsStr = String.join(",", fieldStrs); // fl : field1, field2 - String resp = h.query(req("q", "*:*", "qt", "/export", "fl", "id," + fieldsStr, "sort", sortStr)); + String resp = + h.query(req("q", "*:*", "qt", "/export", "fl", "id," + fieldsStr, "sort", sortStr)); HashMap respMap = mapper.readValue(resp, HashMap.class); List docs = (ArrayList) ((HashMap) respMap.get("response")).get("docs"); - SolrQueryRequest selectReq = req("q", "*:*", "qt", "/select", "fl", "id," + fieldsStr, "sort", sortStr, "rows", Integer.toString(numDocs), "wt", "json"); + SolrQueryRequest selectReq = + req( + "q", + "*:*", + "qt", + "/select", + "fl", + "id," + fieldsStr, + "sort", + sortStr, + "rows", + Integer.toString(numDocs), + "wt", + "json"); String response = h.query(selectReq); - Map rsp = (Map)Utils.fromJSONString(response); - List doclist = (List)(((Map)rsp.get("response")).get("docs")); + Map rsp = (Map) Utils.fromJSONString(response); + List doclist = (List) (((Map) rsp.get("response")).get("docs")); assert docs.size() == numDocs; for (int i = 0; i < docs.size() - 1; i++) { // docs.. - assertEquals("Position:" + i + " has different id value" , ((LinkedHashMap)doclist.get(i)).get("id"), String.valueOf(((HashMap) docs.get(i)).get("id"))); + assertEquals( + "Position:" + i + " has different id value", + ((LinkedHashMap) doclist.get(i)).get("id"), + String.valueOf(((HashMap) docs.get(i)).get("id"))); for (int j = 0; j < fieldSorts.length; j++) { // fields .. String field = fieldSorts[j].getField(); @@ -826,27 +1371,35 @@ private void validateSort(int numDocs) throws Exception { continue; } else { if (sort.equals("asc")) { - if (field.equals("stringdv") || field.equals("field1_s_dv")|| field.equals("datedv") || field.equals("booleandv")) { // use string comparator + if (field.equals("stringdv") + || field.equals("field1_s_dv") + || field.equals("datedv") + || field.equals("booleandv")) { // use string comparator assertTrue(fieldVal1.compareTo(fieldVal2) < 0); - } else if (field.equals("doubledv")){ + } else if (field.equals("doubledv")) { assertTrue(Double.compare(Double.valueOf(fieldVal1), Double.valueOf(fieldVal2)) <= 0); - } else if(field.equals("floatdv")) { + } else if (field.equals("floatdv")) { assertTrue(Float.compare(Float.valueOf(fieldVal1), Float.valueOf(fieldVal2)) <= 0); - } else if(field.equals("intdv") || "field2_i_p".equals(field)) { - assertTrue(Integer.compare(Integer.valueOf(fieldVal1), Integer.valueOf(fieldVal2)) <= 0); - } else if(field.equals("longdv") || field.equals("field3_l_p")) { + } else if (field.equals("intdv") || "field2_i_p".equals(field)) { + assertTrue( + Integer.compare(Integer.valueOf(fieldVal1), Integer.valueOf(fieldVal2)) <= 0); + } else if (field.equals("longdv") || field.equals("field3_l_p")) { assertTrue(Long.compare(Integer.valueOf(fieldVal1), Long.valueOf(fieldVal2)) <= 0); } } else { - if (field.equals("stringdv") || field.equals("field1_s_dv")|| field.equals("datedv") || field.equals("booleandv")) { // use string comparator + if (field.equals("stringdv") + || field.equals("field1_s_dv") + || field.equals("datedv") + || field.equals("booleandv")) { // use string comparator assertTrue(fieldVal1.compareTo(fieldVal2) > 0); - } else if (field.equals("doubledv")){ + } else if (field.equals("doubledv")) { assertTrue(Double.compare(Double.valueOf(fieldVal1), Double.valueOf(fieldVal2)) >= 0); - } else if(field.equals("floatdv")) { + } else if (field.equals("floatdv")) { assertTrue(Float.compare(Float.valueOf(fieldVal1), Float.valueOf(fieldVal2)) >= 0); - } else if(field.equals("intdv") || "field2_i_p".equals(field)) { - assertTrue(Integer.compare(Integer.valueOf(fieldVal1), Integer.valueOf(fieldVal2)) >= 0); - } else if(field.equals("longdv") || field.equals("field3_l_p")) { + } else if (field.equals("intdv") || "field2_i_p".equals(field)) { + assertTrue( + Integer.compare(Integer.valueOf(fieldVal1), Integer.valueOf(fieldVal2)) >= 0); + } else if (field.equals("longdv") || field.equals("field3_l_p")) { assertTrue(Long.compare(Integer.valueOf(fieldVal1), Long.valueOf(fieldVal2)) >= 0); } } @@ -879,12 +1432,20 @@ public String getSort() { } } - private void doTestQuery(String query, List trieFields, List pointFields) throws Exception { + private void doTestQuery(String query, List trieFields, List pointFields) + throws Exception { String trieFieldsFl = String.join(",", trieFields); String pointFieldsFl = String.join(",", pointFields); - String sort = pickRandom((String)pickRandom(trieFields.toArray()), (String)pickRandom(pointFields.toArray())).replace("s_", "_") + pickRandom(" asc", " desc"); - String resultPoints = h.query(req("q", query, "qt", "/export", "fl", pointFieldsFl, "sort", sort)); - String resultTries = h.query(req("q", query, "qt", "/export", "fl", trieFieldsFl, "sort", sort)); + String sort = + pickRandom( + (String) pickRandom(trieFields.toArray()), + (String) pickRandom(pointFields.toArray())) + .replace("s_", "_") + + pickRandom(" asc", " desc"); + String resultPoints = + h.query(req("q", query, "qt", "/export", "fl", pointFieldsFl, "sort", sort)); + String resultTries = + h.query(req("q", query, "qt", "/export", "fl", trieFieldsFl, "sort", sort)); assertJsonEquals(resultPoints.replaceAll("_p", ""), resultTries.replaceAll("_t", "")); } @@ -903,16 +1464,15 @@ private void addLong(SolrInputDocument doc, long value, boolean mv) { private void addInt(SolrInputDocument doc, int value, boolean mv) { addField(doc, "i", String.valueOf(value), mv); } - + private void addDate(SolrInputDocument doc, String value, boolean mv) { addField(doc, "dt", value, mv); } - + private void addField(SolrInputDocument doc, String type, String value, boolean mv) { - doc.addField("number_" + type + (mv?"s":"") + "_t", value); - doc.addField("number_" + type + (mv?"s":"") + "_p", value); - doc.addField("number_" + type + (mv?"s":"") + "_ni_t", value); - doc.addField("number_" + type + (mv?"s":"") + "_ni_p", value); + doc.addField("number_" + type + (mv ? "s" : "") + "_t", value); + doc.addField("number_" + type + (mv ? "s" : "") + "_p", value); + doc.addField("number_" + type + (mv ? "s" : "") + "_ni_t", value); + doc.addField("number_" + type + (mv ? "s" : "") + "_ni_p", value); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/loader/JavabinLoaderTest.java b/solr/core/src/test/org/apache/solr/handler/loader/JavabinLoaderTest.java index 1f3e26f66fd..40bf640da08 100644 --- a/solr/core/src/test/org/apache/solr/handler/loader/JavabinLoaderTest.java +++ b/solr/core/src/test/org/apache/solr/handler/loader/JavabinLoaderTest.java @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec; @@ -38,11 +37,12 @@ public class JavabinLoaderTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } /** - * Verifies the isLastDocInBatch flag gets set correctly for a batch of docs and for a request with a single doc. + * Verifies the isLastDocInBatch flag gets set correctly for a batch of docs and for a request + * with a single doc. */ public void testLastDocInBatchFlag() throws Exception { doTestLastDocInBatchFlag(1); // single doc @@ -51,7 +51,7 @@ public void testLastDocInBatchFlag() throws Exception { protected void doTestLastDocInBatchFlag(int numDocsInBatch) throws Exception { List batch = new ArrayList<>(numDocsInBatch); - for (int d=0; d < numDocsInBatch; d++) { + for (int d = 0; d < numDocsInBatch; d++) { SolrInputDocument doc = new SolrInputDocument(); doc.setField("id", String.valueOf(d)); batch.add(doc); @@ -69,27 +69,31 @@ protected void doTestLastDocInBatchFlag(int numDocsInBatch) throws Exception { (new JavaBinUpdateRequestCodec()).marshal(updateRequest, os); // need to override the processAdd method b/c JavabinLoader calls - // clear on the addCmd after it is passed on to the handler ... a simple clone will suffice for this test - BufferingRequestProcessor mockUpdateProcessor = new BufferingRequestProcessor(null) { - @Override - public void processAdd(AddUpdateCommand cmd) throws IOException { - addCommands.add((AddUpdateCommand)cmd.clone()); - } - }; + // clear on the addCmd after it is passed on to the handler ... a simple clone will suffice for + // this test + BufferingRequestProcessor mockUpdateProcessor = + new BufferingRequestProcessor(null) { + @Override + public void processAdd(AddUpdateCommand cmd) throws IOException { + addCommands.add((AddUpdateCommand) cmd.clone()); + } + }; SolrQueryRequest req = req(); - (new JavabinLoader()).load(req, - new SolrQueryResponse(), - new ContentStreamBase.ByteArrayStream(os.toByteArray(), "test"), - mockUpdateProcessor); + (new JavabinLoader()) + .load( + req, + new SolrQueryResponse(), + new ContentStreamBase.ByteArrayStream(os.toByteArray(), "test"), + mockUpdateProcessor); req.close(); assertTrue(mockUpdateProcessor.addCommands.size() == numDocsInBatch); - for (int i=0; i < numDocsInBatch-1; i++) + for (int i = 0; i < numDocsInBatch - 1; i++) assertFalse(mockUpdateProcessor.addCommands.get(i).isLastDocInBatch); // not last doc in batch // last doc should have the flag set - assertTrue(mockUpdateProcessor.addCommands.get(batch.size()-1).isLastDocInBatch); + assertTrue(mockUpdateProcessor.addCommands.get(batch.size() - 1).isLastDocInBatch); } @Test @@ -98,15 +102,18 @@ public void javabinLoader_denseVector_shouldIndexCorrectly() throws Exception { doc1.addField("id", "555"); doc1.addField("vector", Arrays.asList(1.4f, 2.4f, 3.4f, 4.4f)); - EmbeddedSolrServer solrJClient = new EmbeddedSolrServer(h.getCoreContainer(), "collection1", EmbeddedSolrServer.RequestWriterSupplier.JavaBin); + EmbeddedSolrServer solrJClient = + new EmbeddedSolrServer( + h.getCoreContainer(), "collection1", EmbeddedSolrServer.RequestWriterSupplier.JavaBin); solrJClient.add(doc1); solrJClient.commit(); - assertQ(req("q", "id:555", "fl", "vector"), "*[count(//doc)=1]", - "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.4 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.4 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.4 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.4 + "']" - ); + assertQ( + req("q", "id:555", "fl", "vector"), + "*[count(//doc)=1]", + "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.4 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.4 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.4 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.4 + "']"); } } diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/EmbeddedSolrNoSerializeTest.java b/solr/core/src/test/org/apache/solr/handler/tagger/EmbeddedSolrNoSerializeTest.java index f82af9a9dca..21a68296f29 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/EmbeddedSolrNoSerializeTest.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/EmbeddedSolrNoSerializeTest.java @@ -27,7 +27,6 @@ import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; - import org.apache.lucene.document.Field; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrServerException; @@ -48,10 +47,7 @@ import org.junit.Ignore; import org.junit.Test; -/** - * Tests that we can skip serialization of the documents when embedding - * Solr. - */ +/** Tests that we can skip serialization of the documents when embedding Solr. */ public class EmbeddedSolrNoSerializeTest extends SolrTestCaseJ4 { static EmbeddedSolrServer solrServer; @@ -60,7 +56,7 @@ public class EmbeddedSolrNoSerializeTest extends SolrTestCaseJ4 { public static void init() throws Exception { initCore("solrconfig-tagger.xml", "schema-tagger.xml"); solrServer = new EmbeddedSolrServer(h.getCoreContainer(), "collection1"); - //we don't need to close the EmbeddedSolrServer because SolrTestCaseJ4 closes the core + // we don't need to close the EmbeddedSolrServer because SolrTestCaseJ4 closes the core } @AfterClass @@ -79,12 +75,12 @@ public void setUp() throws Exception { @Test public void testTag() throws SolrServerException, IOException { ModifiableSolrParams params = params(); - String input = "foo boston bar";//just one tag; + String input = "foo boston bar"; // just one tag; QueryRequest req = new SolrTaggerRequest(params, input); req.setPath("/tag"); QueryResponse rsp = req.process(solrServer); - SolrDocumentList results= (SolrDocumentList) rsp.getResponse().get("response"); + SolrDocumentList results = (SolrDocumentList) rsp.getResponse().get("response"); assertNotNull(rsp.getResponse().get("tags")); assertNotNull(results.get(0)); } @@ -128,33 +124,35 @@ public void testAssertTagStreamingWithSolrTaggerRequest() throws Exception { @Ignore("As of Solr 7, stream.body is disabled by default for security ") // DWS: dubious, IMO // and it can't be enabled with EmbeddedSolrServer until SOLR-12126 public void testAssertTagStreamingWithStreamBodyParam() throws Exception { - doTestAssertTagStreaming((params, input) -> { - params.set("stream.body", input); - return new QueryRequest(params); - }); + doTestAssertTagStreaming( + (params, input) -> { + params.set("stream.body", input); + return new QueryRequest(params); + }); } - public void doTestAssertTagStreaming(BiFunction newQueryRequest) throws IOException, SolrServerException { + public void doTestAssertTagStreaming( + BiFunction newQueryRequest) + throws IOException, SolrServerException { ModifiableSolrParams params = params(); - String input = "foo boston bar";//just one tag; + String input = "foo boston bar"; // just one tag; QueryRequest req = newQueryRequest.apply(params, input); req.setPath("/tag"); final AtomicReference refDoc = new AtomicReference<>(); - req.setStreamingResponseCallback(new StreamingResponseCallback() { - @Override - public void streamSolrDocument(SolrDocument doc) { - refDoc.set(doc); - } - - @Override - public void streamDocListInfo(long numFound, long start, Float maxScore) { - - } - }); + req.setStreamingResponseCallback( + new StreamingResponseCallback() { + @Override + public void streamSolrDocument(SolrDocument doc) { + refDoc.set(doc); + } + + @Override + public void streamDocListInfo(long numFound, long start, Float maxScore) {} + }); QueryResponse rsp = req.process(solrServer); assertNotNull(rsp.getResponse().get("tags")); assertNotNull(refDoc.get()); - assertEquals("Boston", ((Field)refDoc.get().getFieldValue("name")).stringValue()); + assertEquals("Boston", ((Field) refDoc.get().getFieldValue("name")).stringValue()); } } diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/RandomizedTaggerTest.java b/solr/core/src/test/org/apache/solr/handler/tagger/RandomizedTaggerTest.java index cb742a87a8c..f4b8368dfb5 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/RandomizedTaggerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/RandomizedTaggerTest.java @@ -22,23 +22,20 @@ package org.apache.solr.handler.tagger; +import com.carrotsearch.randomizedtesting.annotations.Repeat; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Random; import java.util.Set; - -import com.carrotsearch.randomizedtesting.annotations.Repeat; -import com.carrotsearch.randomizedtesting.generators.RandomNumbers; -import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.junit.BeforeClass; import org.junit.Test; -/** - * Randomly generate taggable text and verify via simple tag algorithm. - */ +/** Randomly generate taggable text and verify via simple tag algorithm. */ @Repeat(iterations = 10) public class RandomizedTaggerTest extends TaggerTestCase { @@ -52,26 +49,24 @@ public void test() throws Exception { final Random R = random(); Set names = new HashSet<>(); - //random list of single-word names - final int NUM_SINGLES = 4;//RandomInts.randomIntBetween(R, 1, 5); + // random list of single-word names + final int NUM_SINGLES = 4; // RandomInts.randomIntBetween(R, 1, 5); for (int i = 0; i < NUM_SINGLES; i++) { - if (i == 0)//first is a big string (perhaps triggers bugs related to growing buffers) - names.add(randomStringOfLength(16, 32)); - else - names.add(randomString()); + if (i == 0) // first is a big string (perhaps triggers bugs related to growing buffers) + names.add(randomStringOfLength(16, 32)); + else names.add(randomString()); } - //add random list of multi-word names, partially including existing names + // add random list of multi-word names, partially including existing names final int NUM_MULTI = 10; for (int i = 0; i < NUM_MULTI; i++) { final int numWords = RandomNumbers.randomIntBetween(R, 2, 4); StringBuilder buf = new StringBuilder(); for (int j = 0; j < numWords; j++) { - if (j != 0) - buf.append(' '); - if (R.nextBoolean()) {//new likely non-existent word + if (j != 0) buf.append(' '); + if (R.nextBoolean()) { // new likely non-existent word buf.append(randomString()); - } else {//existing word (possible multi-word from prev iteration) + } else { // existing word (possible multi-word from prev iteration) buf.append(RandomPicks.randomFrom(R, names)); } } @@ -86,14 +81,14 @@ public void test() throws Exception { // Build up random input, similar to multi-word random names above StringBuilder input = new StringBuilder(); final int INPUT_WORD_LEN = 20; - input.append(' ');//must start with space based on assertBruteForce logic + input.append(' '); // must start with space based on assertBruteForce logic for (int i = 0; i < INPUT_WORD_LEN; i++) { - if (R.nextBoolean()) {//new likely non-existent word + if (R.nextBoolean()) { // new likely non-existent word input.append(randomString()); - } else {//existing word (possible multi-word from prev iteration) + } else { // existing word (possible multi-word from prev iteration) input.append(RandomPicks.randomFrom(R, NAMES)); } - input.append(' ');//must end with a space + input.append(' '); // must end with a space } boolean madeIt = false; @@ -105,46 +100,45 @@ public void test() throws Exception { System.out.println("Reproduce with:"); System.out.print(" buildNames("); for (int i = 0; i < NAMES.size(); i++) { - if (i != 0) - System.out.print(','); + if (i != 0) System.out.print(','); System.out.print('"'); System.out.print(NAMES.get(i)); System.out.print('"'); } System.out.println(");"); - System.out.println(" assertBruteForce(\"" + input+"\");"); + System.out.println(" assertBruteForce(\"" + input + "\");"); } } } - } private void assertBruteForce(String input) throws Exception { assert input.matches(" .* "); baseParams.set("overlaps", "ALL"); - //loop through NAMES and find all tag offsets + // loop through NAMES and find all tag offsets List testTags = new ArrayList<>(); for (String name : NAMES) { - String spaceName = " "+name+" "; + String spaceName = " " + name + " "; int off = 0; while (true) { int idx = input.indexOf(spaceName, off); - if (idx < 0) - break; + if (idx < 0) break; testTags.add(new TestTag(idx + 1, idx + 1 + name.length(), name, name)); off = idx + 1; } } - //assert + // assert assertTags(reqDoc(input), testTags.toArray(new TestTag[testTags.size()])); } - private String randomString() { return randomStringOfLength(1, 1); } + private String randomString() { + return randomStringOfLength(1, 1); + } private String randomStringOfLength(int min, int max) { - return RandomStrings.randomAsciiLettersOfLengthBetween(random(), min, max).toLowerCase(Locale.ROOT); + return RandomStrings.randomAsciiLettersOfLengthBetween(random(), min, max) + .toLowerCase(Locale.ROOT); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/Tagger2Test.java b/solr/core/src/test/org/apache/solr/handler/tagger/Tagger2Test.java index cafda46ade4..b9576f66f35 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/Tagger2Test.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/Tagger2Test.java @@ -23,14 +23,11 @@ package org.apache.solr.handler.tagger; import java.nio.charset.StandardCharsets; - import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; -/** - * Test the {@link TaggerRequestHandler}. - */ +/** Test the {@link TaggerRequestHandler}. */ public class Tagger2Test extends TaggerTestCase { @BeforeClass @@ -47,25 +44,32 @@ public void setUp() throws Exception { /** whole matching, no sub-tags */ @Test public void testLongestDominantRight() throws Exception { - buildNames("in", "San", "in San", "Francisco", "San Francisco", - "San Francisco State College", "College of California", - "Clayton", "Clayton North", "North Carolina"); - - assertTags("He lived in San Francisco.", - "in", "San Francisco"); - - assertTags("He enrolled in San Francisco State College of California", - "in", "San Francisco State College"); - - assertTags("He lived in Clayton North Carolina", - "in", "Clayton", "North Carolina"); - + buildNames( + "in", + "San", + "in San", + "Francisco", + "San Francisco", + "San Francisco State College", + "College of California", + "Clayton", + "Clayton North", + "North Carolina"); + + assertTags("He lived in San Francisco.", "in", "San Francisco"); + + assertTags( + "He enrolled in San Francisco State College of California", + "in", + "San Francisco State College"); + + assertTags("He lived in Clayton North Carolina", "in", "Clayton", "North Carolina"); } // As of Lucene/Solr 4.9, StandardTokenizer never does this anymore (reported to Lucene dev-list, // Jan 26th 2015. Honestly it's not particularly important to us but it renders this test // pointless. - /** Orig issue https://github.com/OpenSextant/SolrTextTagger/issues/2 related: #13 */ + /** Orig issue https://github.com/OpenSextant/SolrTextTagger/issues/2 related: #13 */ @Test @Ignore public void testVeryLongWord() throws Exception { @@ -73,37 +77,41 @@ public void testVeryLongWord() throws Exception { buildNames(SANFRAN); // exceeds default 255 max token length which means it in-effect becomes a stop-word - StringBuilder STOP = new StringBuilder(260);//>255 + StringBuilder STOP = new StringBuilder(260); // >255 for (int i = 0; i < STOP.capacity(); i++) { STOP.append((char) ('0' + (i % 10))); } String doc = "San " + STOP + " Francisco"; - assertTags(doc);//no match due to default stop word handling - //and we find it when we ignore stop words - assertTags(reqDoc(doc, "ignoreStopwords", "true"), new TestTag(0, doc.length(), doc, lookupByName(SANFRAN))); + assertTags(doc); // no match due to default stop word handling + // and we find it when we ignore stop words + assertTags( + reqDoc(doc, "ignoreStopwords", "true"), + new TestTag(0, doc.length(), doc, lookupByName(SANFRAN))); } - /** Support for stopwords (posInc > 1); - * discussion: https://github.com/OpenSextant/SolrTextTagger/issues/13 */ + /** + * Support for stopwords (posInc > 1); discussion: + * https://github.com/OpenSextant/SolrTextTagger/issues/13 + */ @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-8344") @Test public void testStopWords() throws Exception { - baseParams.set("field", "name_tagStop");//stop filter (pos inc enabled) index & query + baseParams.set("field", "name_tagStop"); // stop filter (pos inc enabled) index & query - String SOUTHOFWALES = "South of Wales";//'of' is stop word index time & query + String SOUTHOFWALES = "South of Wales"; // 'of' is stop word index time & query String ACITYA = "A City A"; buildNames(SOUTHOFWALES, ACITYA); - //round-trip works - assertTags(reqDoc(SOUTHOFWALES), new TestTag(0, SOUTHOFWALES.length(), SOUTHOFWALES, - lookupByName(SOUTHOFWALES))); + // round-trip works + assertTags( + reqDoc(SOUTHOFWALES), + new TestTag(0, SOUTHOFWALES.length(), SOUTHOFWALES, lookupByName(SOUTHOFWALES))); // but offsets doesn't include stopword when leading or trailing... - assertTags(reqDoc(ACITYA), new TestTag(2, 6, "City", - lookupByName(ACITYA))); - //break on stop words - assertTags(reqDoc(SOUTHOFWALES, "ignoreStopwords", "false"));//match nothing + assertTags(reqDoc(ACITYA), new TestTag(2, 6, "City", lookupByName(ACITYA))); + // break on stop words + assertTags(reqDoc(SOUTHOFWALES, "ignoreStopwords", "false")); // match nothing } /** Tests WordDelimiterGraphFilter, stacked/synonymous tokens at index time (catenate options) */ @@ -111,36 +119,37 @@ public void testStopWords() throws Exception { public void testWDF() throws Exception { baseParams.set("field", "name_tagWDF"); - final String WINSTONSALEM = "City of Winston-Salem";//hyphen - final String BOSTONHARBOR = "Boston Harbor";//space + final String WINSTONSALEM = "City of Winston-Salem"; // hyphen + final String BOSTONHARBOR = "Boston Harbor"; // space buildNames(WINSTONSALEM, BOSTONHARBOR); - //round-trip works - assertTags(reqDoc(WINSTONSALEM), new TestTag(0, WINSTONSALEM.length(), WINSTONSALEM, - lookupByName(WINSTONSALEM))); + // round-trip works + assertTags( + reqDoc(WINSTONSALEM), + new TestTag(0, WINSTONSALEM.length(), WINSTONSALEM, lookupByName(WINSTONSALEM))); // space separated works final String WS_SPACE = WINSTONSALEM.replace('-', ' '); - assertTags(reqDoc(WS_SPACE), - new TestTag(0, WS_SPACE.length(), WS_SPACE, - lookupByName(WINSTONSALEM))); + assertTags( + reqDoc(WS_SPACE), new TestTag(0, WS_SPACE.length(), WS_SPACE, lookupByName(WINSTONSALEM))); - //must be full match - assertTags(reqDoc("Winston"));//match nothing - assertTags(reqDoc("Salem"));//match nothing + // must be full match + assertTags(reqDoc("Winston")); // match nothing + assertTags(reqDoc("Salem")); // match nothing // round-trip works - assertTags(reqDoc(BOSTONHARBOR), new TestTag(0, BOSTONHARBOR.length(), BOSTONHARBOR, - lookupByName(BOSTONHARBOR))); + assertTags( + reqDoc(BOSTONHARBOR), + new TestTag(0, BOSTONHARBOR.length(), BOSTONHARBOR, lookupByName(BOSTONHARBOR))); // hyphen separated works final String BH_HYPHEN = BOSTONHARBOR.replace(' ', '-'); - assertTags(reqDoc(BH_HYPHEN), - new TestTag(0, BH_HYPHEN.length(), BH_HYPHEN, - lookupByName(BOSTONHARBOR))); - //must be full match - assertTags(reqDoc("Boston"));//match nothing - assertTags(reqDoc("Harbor"));//match nothing + assertTags( + reqDoc(BH_HYPHEN), + new TestTag(0, BH_HYPHEN.length(), BH_HYPHEN, lookupByName(BOSTONHARBOR))); + // must be full match + assertTags(reqDoc("Boston")); // match nothing + assertTags(reqDoc("Harbor")); // match nothing } /** Ensure character offsets work for multi-byte characters */ @@ -155,22 +164,20 @@ public void testMultibyteChar() throws Exception { String QUOTE = TEXT.substring(14, 15); assertEquals(8217, QUOTE.codePointAt(0)); - //UTF8 + // UTF8 assertEquals(3, QUOTE.getBytes(StandardCharsets.UTF_8).length); assertEquals(1, "a".getBytes(StandardCharsets.UTF_8).length); - assertEquals(40 + 2*2, TEXT.getBytes(StandardCharsets.UTF_8).length); + assertEquals(40 + 2 * 2, TEXT.getBytes(StandardCharsets.UTF_8).length); - //UTF16 big endian (by specifying big/little endian, there is no "byte order mark") + // UTF16 big endian (by specifying big/little endian, there is no "byte order mark") assertEquals(2, QUOTE.getBytes(StandardCharsets.UTF_16BE).length); assertEquals(2, "a".getBytes(StandardCharsets.UTF_16BE).length); assertEquals(40 * 2, TEXT.getBytes(StandardCharsets.UTF_16BE).length); - buildNames("Obama"); assertTags(TEXT, "Obama"); // TODO test surrogate pairs (i.e. code points not in the BMP) } - } diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java index 71d5af5dffa..ca67d4e22ed 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTest.java @@ -24,16 +24,13 @@ import java.util.Arrays; import java.util.stream.Collectors; - import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.request.SolrQueryRequest; import org.junit.BeforeClass; import org.junit.Ignore; -/** - * The original test for {@link TaggerRequestHandler}. - */ +/** The original test for {@link TaggerRequestHandler}. */ public class TaggerTest extends TaggerTestCase { @BeforeClass @@ -52,14 +49,26 @@ private void indexAndBuild() throws Exception { /** Name corpus */ enum N { - //keep order to retain ord() - London, London_Business_School, Boston, City_of_London, - of, the//filtered out of the corpus by a custom query - ; - - String getName() { return name().replace('_',' '); } - static N lookupByName(String name) { return N.valueOf(name.replace(' ', '_')); } - int getId() { return ordinal(); } + // keep order to retain ord() + London, + London_Business_School, + Boston, + City_of_London, + of, + the // filtered out of the corpus by a custom query + ; + + String getName() { + return name().replace('_', ' '); + } + + static N lookupByName(String name) { + return N.valueOf(name.replace(' ', '_')); + } + + int getId() { + return ordinal(); + } } public void testFormat() throws Exception { @@ -67,26 +76,27 @@ public void testFormat() throws Exception { indexAndBuild(); String rspStr = _testFormatRequest(false); - String expected = "\n" + - "\n" + - "\n" + - "1\n" + - "\n" + - " \n" + - " 0\n" + - " 22\n" + - " \n" + - " 1\n" + - " \n" + - " \n" + - "\n" + - "\n" + - " \n" + - " 1\n" + - " London Business School\n" + - " 1\n" + - "\n" + - "\n"; + String expected = + "\n" + + "\n" + + "\n" + + "1\n" + + "\n" + + " \n" + + " 0\n" + + " 22\n" + + " \n" + + " 1\n" + + " \n" + + " \n" + + "\n" + + "\n" + + " \n" + + " 1\n" + + " London Business School\n" + + " 1\n" + + "\n" + + "\n"; assertEquals(expected, rspStr); } @@ -95,72 +105,77 @@ public void testFormatMatchText() throws Exception { indexAndBuild(); String rspStr = _testFormatRequest(true); - String expected = "\n" + - "\n" + - "\n" + - "1\n" + - "\n" + - " \n" + - " 0\n" + - " 22\n" + - " london business school\n" + - " \n" + - " 1\n" + - " \n" + - " \n" + - "\n" + - "\n" + - " \n" + - " 1\n" + - " London Business School\n" + - " 1\n" + - "\n" + - "\n"; + String expected = + "\n" + + "\n" + + "\n" + + "1\n" + + "\n" + + " \n" + + " 0\n" + + " 22\n" + + " london business school\n" + + " \n" + + " 1\n" + + " \n" + + " \n" + + "\n" + + "\n" + + " \n" + + " 1\n" + + " London Business School\n" + + " 1\n" + + "\n" + + "\n"; assertEquals(expected, rspStr); } private String _testFormatRequest(boolean matchText) throws Exception { - String doc = "london business school";//just one tag - SolrQueryRequest req = reqDoc(doc, "indent", "on", "omitHeader", "on", "matchText", ""+matchText); + String doc = "london business school"; // just one tag + SolrQueryRequest req = + reqDoc(doc, "indent", "on", "omitHeader", "on", "matchText", "" + matchText); String rspStr = h.query(req); req.close(); return rspStr; } /** Partial matching, no sub-tags */ - @Ignore //TODO ConcatenateGraphFilter uses a special separator char that we can't put into XML (invalid char) + @Ignore // TODO ConcatenateGraphFilter uses a special separator char that we can't put into XML + // (invalid char) public void testPartialMatching() throws Exception { baseParams.set("field", "name_tagPartial"); baseParams.set("overlaps", "NO_SUB"); - baseParams.set("fq", "NOT name:(of the)");//test filtering + baseParams.set("fq", "NOT name:(of the)"); // test filtering indexAndBuild(); - //these match nothing - assertTags(reqDoc("") ); - assertTags(reqDoc(" ") ); - assertTags(reqDoc("the") ); + // these match nothing + assertTags(reqDoc("")); + assertTags(reqDoc(" ")); + assertTags(reqDoc("the")); String doc; - //just London Business School via "school" substring + // just London Business School via "school" substring doc = "school"; - assertTags(reqDoc(doc), tt(doc,"school", 0, N.London_Business_School)); + assertTags(reqDoc(doc), tt(doc, "school", 0, N.London_Business_School)); doc = "a school"; - assertTags(reqDoc(doc), tt(doc,"school", 0, N.London_Business_School)); + assertTags(reqDoc(doc), tt(doc, "school", 0, N.London_Business_School)); doc = "school a"; - assertTags(reqDoc(doc), tt(doc,"school", 0, N.London_Business_School)); + assertTags(reqDoc(doc), tt(doc, "school", 0, N.London_Business_School)); - //More interesting + // More interesting doc = "school City"; - assertTags(reqDoc(doc), + assertTags( + reqDoc(doc), tt(doc, "school", 0, N.London_Business_School), - tt(doc, "City", 0, N.City_of_London) ); + tt(doc, "City", 0, N.City_of_London)); doc = "City of London Business School"; - assertTags(reqDoc(doc), //no plain London (sub-tag) + assertTags( + reqDoc(doc), // no plain London (sub-tag) tt(doc, "City of London", 0, N.City_of_London), tt(doc, "London Business School", 0, N.London_Business_School)); } @@ -168,75 +183,81 @@ public void testPartialMatching() throws Exception { /** whole matching, no sub-tags */ public void testWholeMatching() throws Exception { baseParams.set("overlaps", "NO_SUB"); - baseParams.set("fq", "NOT name:(of the)");//test filtering + baseParams.set("fq", "NOT name:(of the)"); // test filtering indexAndBuild(); - //these match nothing + // these match nothing assertTags(reqDoc("")); - assertTags(reqDoc(" ") ); - assertTags(reqDoc("the") ); + assertTags(reqDoc(" ")); + assertTags(reqDoc("the")); - //partial on N.London_Business_School matches nothing - assertTags(reqDoc("school") ); - assertTags(reqDoc("a school") ); - assertTags(reqDoc("school a") ); - assertTags(reqDoc("school City") ); + // partial on N.London_Business_School matches nothing + assertTags(reqDoc("school")); + assertTags(reqDoc("a school")); + assertTags(reqDoc("school a")); + assertTags(reqDoc("school City")); String doc; - doc = "school business london";//backwards - assertTags(reqDoc(doc), tt(doc,"london", 0, N.London)); + doc = "school business london"; // backwards + assertTags(reqDoc(doc), tt(doc, "london", 0, N.London)); doc = "of London Business School"; - assertTags(reqDoc(doc), //no plain London (sub-tag) + assertTags( + reqDoc(doc), // no plain London (sub-tag) tt(doc, "London Business School", 0, N.London_Business_School)); - //More interesting + // More interesting doc = "City of London Business School"; - assertTags(reqDoc(doc), //no plain London (sub-tag) + assertTags( + reqDoc(doc), // no plain London (sub-tag) tt(doc, "City of London", 0, N.City_of_London), tt(doc, "London Business School", 0, N.London_Business_School)); doc = "City of London Business"; - assertTags(reqDoc(doc), //no plain London (sub-tag) no Business (partial-match) + assertTags( + reqDoc(doc), // no plain London (sub-tag) no Business (partial-match) tt(doc, "City of London", 0, N.City_of_London)); doc = "London Business magazine"; - assertTags(reqDoc(doc), //Just London; L.B.S. fails + assertTags( + reqDoc(doc), // Just London; L.B.S. fails tt(doc, "London", 0, N.London)); } /** whole matching, with sub-tags */ public void testSubTags() throws Exception { baseParams.set("overlaps", "ALL"); - baseParams.set("fq", "NOT name:(of the)");//test filtering + baseParams.set("fq", "NOT name:(of the)"); // test filtering indexAndBuild(); - //these match nothing + // these match nothing assertTags(reqDoc("")); - assertTags(reqDoc(" ") ); - assertTags(reqDoc("the") ); + assertTags(reqDoc(" ")); + assertTags(reqDoc("the")); - //partial on N.London_Business_School matches nothing - assertTags(reqDoc("school") ); - assertTags(reqDoc("a school") ); - assertTags(reqDoc("school a") ); - assertTags(reqDoc("school City") ); + // partial on N.London_Business_School matches nothing + assertTags(reqDoc("school")); + assertTags(reqDoc("a school")); + assertTags(reqDoc("school a")); + assertTags(reqDoc("school City")); String doc; - doc = "school business london";//backwards - assertTags(reqDoc(doc), tt(doc,"london", 0, N.London)); + doc = "school business london"; // backwards + assertTags(reqDoc(doc), tt(doc, "london", 0, N.London)); - //More interesting + // More interesting doc = "City of London Business School"; - assertTags(reqDoc(doc), + assertTags( + reqDoc(doc), tt(doc, "City of London", 0, N.City_of_London), tt(doc, "London", 0, N.London), tt(doc, "London Business School", 0, N.London_Business_School)); doc = "City of London Business"; - assertTags(reqDoc(doc), + assertTags( + reqDoc(doc), tt(doc, "City of London", 0, N.City_of_London), tt(doc, "London", 0, N.London)); } @@ -248,10 +269,20 @@ public void testMultipleFilterQueries() throws Exception { deleteByQueryAndGetVersion("*:*", null); int i = 0; - assertU(adoc("id", ""+i++, "name", N.London.getName(), "type", "city", "country", "UK")); - assertU(adoc("id", ""+i++, "name", N.London_Business_School.getName(), "type", "school", "country", "UK")); - assertU(adoc("id", ""+i++, "name", N.Boston.getName(), "type", "city", "country", "US")); - assertU(adoc("id", ""+i++, "name", N.City_of_London.getName(), "type", "org", "country", "UK")); + assertU(adoc("id", "" + i++, "name", N.London.getName(), "type", "city", "country", "UK")); + assertU( + adoc( + "id", + "" + i++, + "name", + N.London_Business_School.getName(), + "type", + "school", + "country", + "UK")); + assertU(adoc("id", "" + i++, "name", N.Boston.getName(), "type", "city", "country", "US")); + assertU( + adoc("id", "" + i++, "name", N.City_of_London.getName(), "type", "org", "country", "UK")); assertU(commit()); // not calling buildNames so that we can bring along extra attributes for filtering @@ -263,7 +294,8 @@ public void testMultipleFilterQueries() throws Exception { // first do no filtering ModifiableSolrParams p = new ModifiableSolrParams(); p.add(CommonParams.Q, "*:*"); - assertTags(reqDoc(doc, p), + assertTags( + reqDoc(doc, p), tt(doc, "City of London", 0, N.City_of_London), tt(doc, "London", 0, N.London), tt(doc, "London Business School", 0, N.London_Business_School), @@ -271,46 +303,43 @@ public void testMultipleFilterQueries() throws Exception { // add a single fq p.add(CommonParams.FQ, "type:city"); - assertTags(reqDoc(doc, p), - tt(doc, "London", 0, N.London), - tt(doc, "Boston", 0, N.Boston)); + assertTags(reqDoc(doc, p), tt(doc, "London", 0, N.London), tt(doc, "Boston", 0, N.Boston)); // add another fq p.add(CommonParams.FQ, "country:US"); - assertTags(reqDoc(doc, p), - tt(doc, "Boston", 0, N.Boston)); + assertTags(reqDoc(doc, p), tt(doc, "Boston", 0, N.Boston)); } private TestTag tt(String doc, String substring, int substringIndex, N name) { assert substringIndex == 0; - //little bit of copy-paste code from super.tt() + // little bit of copy-paste code from super.tt() int startOffset = -1, endOffset; int substringIndex1 = 0; - for(int i = 0; i <= substringIndex1; i++) { + for (int i = 0; i <= substringIndex1; i++) { startOffset = doc.indexOf(substring, ++startOffset); assert startOffset >= 0 : "The test itself is broken"; } - endOffset = startOffset+ substring.length();//1 greater (exclusive) + endOffset = startOffset + substring.length(); // 1 greater (exclusive) return new TestTag(startOffset, endOffset, substring, lookupByName(name.getName())); } - public void testEmptyCollection() throws Exception { - //SOLR-14396: Ensure tagger handler doesn't fail on empty collections - SolrQueryRequest req = reqDoc("anything", "indent", "on", "omitHeader", "on", "matchText", "false"); + // SOLR-14396: Ensure tagger handler doesn't fail on empty collections + SolrQueryRequest req = + reqDoc("anything", "indent", "on", "omitHeader", "on", "matchText", "false"); String rspStr = h.query(req); req.close(); - String expected = "\n" + - "\n" + - "\n" + - "0\n" + - "\n" + - "\n" + - "\n" + - "\n"; + String expected = + "\n" + + "\n" + + "\n" + + "0\n" + + "\n" + + "\n" + + "\n" + + "\n"; assertEquals(expected, rspStr); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java index c0d030a898e..f48e3f5a1ab 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/TaggerTestCase.java @@ -31,7 +31,6 @@ import java.util.List; import java.util.Map; import java.util.TreeSet; - import org.apache.commons.lang3.builder.CompareToBuilder; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.lucene.document.Document; @@ -59,18 +58,19 @@ public abstract class TaggerTestCase extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @Rule - public TestWatcher watchman = new TestWatcher() { - @Override - protected void starting(Description description) { - if (log.isInfoEnabled()) { - log.info("{} being run...", description.getDisplayName()); - } - } - }; + public TestWatcher watchman = + new TestWatcher() { + @Override + protected void starting(Description description) { + if (log.isInfoEnabled()) { + log.info("{} being run...", description.getDisplayName()); + } + } + }; protected final ModifiableSolrParams baseParams = new ModifiableSolrParams(); - //populated in buildNames; tested in assertTags + // populated in buildNames; tested in assertTags protected static List NAMES; @Override @@ -92,18 +92,17 @@ protected void assertTags(String doc, String... tags) throws Exception { protected static void buildNames(String... names) throws Exception { deleteByQueryAndGetVersion("*:*", null); NAMES = Arrays.asList(names); - //Collections.sort(NAMES); + // Collections.sort(NAMES); int i = 0; for (String n : NAMES) { - assertU(adoc("id", ""+(i++), "name", n)); + assertU(adoc("id", "" + (i++), "name", n)); } assertU(commit()); } protected String lookupByName(String name) { for (String n : NAMES) { - if (n.equalsIgnoreCase(name)) - return n; + if (n.equalsIgnoreCase(name)) return n; } return null; } @@ -111,25 +110,23 @@ protected String lookupByName(String name) { protected TestTag tt(String doc, String substring) { int startOffset = -1, endOffset; int substringIndex = 0; - for(int i = 0; i <= substringIndex; i++) { - startOffset = doc.indexOf(substring,++startOffset); + for (int i = 0; i <= substringIndex; i++) { + startOffset = doc.indexOf(substring, ++startOffset); assert startOffset >= 0 : "The test itself is broken"; } - endOffset = startOffset+substring.length();//1 greater (exclusive) + endOffset = startOffset + substring.length(); // 1 greater (exclusive) return new TestTag(startOffset, endOffset, substring, lookupByName(substring)); } - /** Asserts the tags. Will call req.close(). */ + /** Asserts the tags. Will call req.close(). */ protected void assertTags(SolrQueryRequest req, TestTag... eTags) throws Exception { try { SolrQueryResponse rsp = h.queryAndResponse(req.getParams().get(CommonParams.QT), req); TestTag[] aTags = pullTagsFromResponse(req, rsp); String message; - if (aTags.length > 10) - message = null; - else - message = Arrays.asList(aTags).toString(); + if (aTags.length > 10) message = null; + else message = Arrays.asList(aTags).toString(); Arrays.sort(eTags); assertSortedArrayEquals(message, eTags, aTags); @@ -138,7 +135,8 @@ protected void assertTags(SolrQueryRequest req, TestTag... eTags) throws Excepti } } - protected TestTag[] pullTagsFromResponse(SolrQueryRequest req, SolrQueryResponse rsp ) throws IOException { + protected TestTag[] pullTagsFromResponse(SolrQueryRequest req, SolrQueryResponse rsp) + throws IOException { NamedList rspValues = rsp.getValues(); Map matchingNames = new HashMap<>(); SolrIndexSearcher searcher = req.getSearcher(); @@ -149,23 +147,24 @@ protected TestTag[] pullTagsFromResponse(SolrQueryRequest req, SolrQueryResponse Document doc = searcher.doc(docId); String id = doc.getField("id").stringValue(); String name = lookupByName(doc.get("name")); - assertEquals("looking for "+name, NAMES.indexOf(name)+"", id); + assertEquals("looking for " + name, NAMES.indexOf(name) + "", id); matchingNames.put(id, name); } - //build TestTag[] aTags from response ('a' is actual) + // build TestTag[] aTags from response ('a' is actual) @SuppressWarnings("unchecked") List> mTagsList = (List>) rspValues.get("tags"); List aTags = new ArrayList<>(); for (NamedList map : mTagsList) { @SuppressWarnings("unchecked") List foundIds = (List) map.get("ids"); - for (String id : foundIds) { - aTags.add(new TestTag( - ((Number)map.get("startOffset")).intValue(), - ((Number)map.get("endOffset")).intValue(), - null, - matchingNames.get(id))); + for (String id : foundIds) { + aTags.add( + new TestTag( + ((Number) map.get("startOffset")).intValue(), + ((Number) map.get("endOffset")).intValue(), + null, + matchingNames.get(id))); } } return aTags.toArray(new TestTag[0]); @@ -181,12 +180,13 @@ protected SolrQueryRequest reqDoc(String doc, SolrParams moreParams) { log.debug("Test doc: {}", doc); SolrParams params = SolrParams.wrapDefaults(moreParams, baseParams); SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), params) {}; - Iterable stream = Collections.singleton((ContentStream)new ContentStreamBase.StringStream(doc)); + Iterable stream = + Collections.singleton((ContentStream) new ContentStreamBase.StringStream(doc)); req.setContentStreams(stream); return req; } - /** Asserts the sorted arrays are equals, with a helpful error message when not.*/ + /** Asserts the sorted arrays are equals, with a helpful error message when not. */ public void assertSortedArrayEquals(String message, Object[] expecteds, Object[] actuals) { try { assertArrayEquals(null, expecteds, actuals); @@ -196,10 +196,16 @@ public void assertSortedArrayEquals(String message, Object[] expecteds, Object[] expectedRemaining.removeAll(actualsRemaining); // expected - actual if (!expectedRemaining.isEmpty()) { - message += ": didn't find expected " + expectedRemaining.first() + " (of " + expectedRemaining.size() + ")"; + message += + ": didn't find expected " + + expectedRemaining.first() + + " (of " + + expectedRemaining.size() + + ")"; } else { Arrays.asList(expecteds).forEach(actualsRemaining::remove); // actual - expected - message += ": didn't expect " + actualsRemaining.first() + " (of " + actualsRemaining.size() + ")"; + message += + ": didn't expect " + actualsRemaining.first() + " (of " + actualsRemaining.size() + ")"; } throw new AssertionError(message, error); } @@ -219,11 +225,19 @@ class TestTag implements Comparable { @Override public String toString() { - return "TestTag{" + - "[" + startOffset + "-" + endOffset + "]" + - " doc=" + NAMES.indexOf(docName) + ":'" + docName + "'" + - (docName.equals(substring) || substring == null ? "" : " substr="+substring)+ - '}'; + return "TestTag{" + + "[" + + startOffset + + "-" + + endOffset + + "]" + + " doc=" + + NAMES.indexOf(docName) + + ":'" + + docName + + "'" + + (docName.equals(substring) || substring == null ? "" : " substr=" + substring) + + '}'; } @Override @@ -238,7 +252,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return startOffset;//cheesy but acceptable + return startOffset; // cheesy but acceptable } @Override @@ -246,7 +260,7 @@ public int compareTo(TestTag that) { return new CompareToBuilder() .append(this.startOffset, that.startOffset) .append(this.endOffset, that.endOffset) - .append(this.docName,that.docName) + .append(this.docName, that.docName) .toComparison(); } } diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/TaggingAttributeTest.java b/solr/core/src/test/org/apache/solr/handler/tagger/TaggingAttributeTest.java index 39c78286713..a05842733ee 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/TaggingAttributeTest.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/TaggingAttributeTest.java @@ -26,9 +26,8 @@ import org.junit.Test; /** - * Test the {@link TaggerRequestHandler} with - * a Analyzer chain that does use the {@link TaggingAttribute}. See the test - * configuration under 'taggingattribute'. + * Test the {@link TaggerRequestHandler} with a Analyzer chain that does use the {@link + * TaggingAttribute}. See the test configuration under 'taggingattribute'. */ public class TaggingAttributeTest extends TaggerTestCase { @@ -38,36 +37,47 @@ public static void beforeClass() throws Exception { } /** - * Whole matching, no sub-tags. Links only words with > 3 letters. - * Because of that "San" is not used to start tags - * + * Whole matching, no sub-tags. Links only words with > 3 letters. Because of that "San" is not + * used to start tags */ @Test public void testTaggingAttribute() throws Exception { - baseParams.set("field", "name_tagAttribute"); // has WordLengthTaggingFilter using the TaggingAttribute + baseParams.set( + "field", "name_tagAttribute"); // has WordLengthTaggingFilter using the TaggingAttribute // this test is based on the longest dominant right test, so we use the // the same TagClusterReducer setting baseParams.set("overlaps", "LONGEST_DOMINANT_RIGHT"); - buildNames("in", "San", "in San", "Francisco", "San Francisco", - "San Francisco State College", "College of California", - "Clayton", "Clayton North", "North Carolina"); + buildNames( + "in", + "San", + "in San", + "Francisco", + "San Francisco", + "San Francisco State College", + "College of California", + "Clayton", + "Clayton North", + "North Carolina"); - assertTags("He lived in San Francisco.", - //"in", "San Francisco"); //whis would be expected without taggable - "Francisco");// this are the expected results with taggable + assertTags( + "He lived in San Francisco.", + // "in", "San Francisco"); //whis would be expected without taggable + "Francisco"); // this are the expected results with taggable - assertTags("He enrolled in San Francisco State College of California", - //"in", "San Francisco State College"); //without taggable enabled - "Francisco", "College of California");// With taggable - //NOTE this also tests that started tags are advanced for non-taggable + assertTags( + "He enrolled in San Francisco State College of California", + // "in", "San Francisco State College"); //without taggable enabled + "Francisco", + "College of California"); // With taggable + // NOTE this also tests that started tags are advanced for non-taggable // tokens, as otherwise 'College of California' would not be // suggested. - assertTags("He lived in Clayton North Carolina", - //"in", "Clayton", "North Carolina"); - "Clayton", "North Carolina"); - + assertTags( + "He lived in Clayton North Carolina", + // "in", "Clayton", "North Carolina"); + "Clayton", + "North Carolina"); } - } diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilter.java b/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilter.java index 237a8b82c39..599954fb980 100644 --- a/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilter.java +++ b/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilter.java @@ -23,27 +23,25 @@ package org.apache.solr.handler.tagger; import java.io.IOException; - import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; /** - * Simple TokenFilter that lookup only Tokens with more as the parsed number - * of chars.

- * NOTE:This implementation is only intended to be used as an example - * and for unit testing the {@link TaggingAttribute} feature. Typically - * implementations will be based on NLP results (e.g. using POS tags or - * detected Named Entities). - *

- * Example Usage:

- * Currently the usage requires to modify the Analyzer as defined by the - * indexedField. An alternative would be to allow the configuration - * of a special FieldType in the schema.xml and use this Analyzer for processing - * the text sent to the request.

- * While the current solution is fine for direct API usage, defining the - * Analyzer in the schema.xml would be better suitable for using this feature - * with the {@link TaggerRequestHandler}. + * Simple TokenFilter that lookup only Tokens with more as the parsed number of chars. + * + *

NOTE:This implementation is only intended to be used as an example and for unit testing + * the {@link TaggingAttribute} feature. Typically implementations will be based on NLP results + * (e.g. using POS tags or detected Named Entities). + * + *

Example Usage: + * + *

Currently the usage requires to modify the Analyzer as defined by the indexedField + * . An alternative would be to allow the configuration of a special FieldType in the + * schema.xml and use this Analyzer for processing the text sent to the request. + * + *

While the current solution is fine for direct API usage, defining the Analyzer in the + * schema.xml would be better suitable for using this feature with the {@link TaggerRequestHandler}. * *

  *     Analyzer analyzer = req.getSchema().getField(indexedField).getType().getAnalyzer();
@@ -63,29 +61,28 @@
  */
 public class WordLengthTaggingFilter extends TokenFilter {
 
-  /**
-   * The default minimum length is 3
-   */
+  /** The default minimum length is 3 */
   public static final int DEFAULT_MIN_LENGTH = 3;
+
   private final TaggingAttribute lookupAtt = addAttribute(TaggingAttribute.class);
   private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
   private int minLength;
 
   /**
-   * TokenFilter only marks tokens to be looked up with equals or more as
-   * {@link #DEFAULT_MIN_LENGTH} characters
+   * TokenFilter only marks tokens to be looked up with equals or more as {@link
+   * #DEFAULT_MIN_LENGTH} characters
    */
   public WordLengthTaggingFilter(TokenStream input) {
     this(input, null);
   }
 
   /**
-   * TokenFilter only marks tokens to be looked up with equals or more characters
-   * as the parsed minimum.
+   * TokenFilter only marks tokens to be looked up with equals or more characters as the parsed
+   * minimum.
    *
-   * @param input     the TokenStream to consume tokens from
-   * @param minLength The minimum length to lookup a Token. null
-   *                  or <= 0 to use the #DEFAULT_MIN_LENGTH
+   * @param input the TokenStream to consume tokens from
+   * @param minLength The minimum length to lookup a Token. null or <= 0 to use the
+   *     #DEFAULT_MIN_LENGTH
    */
   public WordLengthTaggingFilter(TokenStream input, Integer minLength) {
     super(input);
@@ -106,5 +103,4 @@ public final boolean incrementToken() throws IOException {
       return false;
     }
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilterFactory.java b/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilterFactory.java
index 89b725900a5..75e1e782a28 100644
--- a/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilterFactory.java
+++ b/solr/core/src/test/org/apache/solr/handler/tagger/WordLengthTaggingFilterFactory.java
@@ -24,9 +24,8 @@
 
 import java.lang.invoke.MethodHandles;
 import java.util.Map;
-
-import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.TokenFilterFactory;
+import org.apache.lucene.analysis.TokenStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -53,7 +52,6 @@ public WordLengthTaggingFilterFactory(Map args) {
         minLength = Integer.parseInt(value.toString());
       } catch (NumberFormatException e) {
         log.warn("Unable to parse minLength from value 'minLength=\"{}\"'", value);
-
       }
     }
     if (minLength <= 0) {
@@ -74,5 +72,4 @@ public WordLengthTaggingFilterFactory() {
   public TokenStream create(TokenStream input) {
     return new WordLengthTaggingFilter(input, minLength);
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/tagger/XmlInterpolationTest.java b/solr/core/src/test/org/apache/solr/handler/tagger/XmlInterpolationTest.java
index e43212119bd..82d0bbf48f1 100644
--- a/solr/core/src/test/org/apache/solr/handler/tagger/XmlInterpolationTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/tagger/XmlInterpolationTest.java
@@ -22,8 +22,6 @@
 
 package org.apache.solr.handler.tagger;
 
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
 import java.io.IOException;
 import java.io.Reader;
 import java.io.StringReader;
@@ -31,7 +29,8 @@
 import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
-
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
 import org.apache.commons.io.IOUtils;
 import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter;
 import org.apache.lucene.analysis.core.WhitespaceTokenizer;
@@ -49,7 +48,6 @@ public class XmlInterpolationTest extends TaggerTestCase {
 
   private static DocumentBuilder xmlDocBuilder;
 
-
   @BeforeClass
   public static void beforeClass() throws Exception {
     DocumentBuilderFactory xmlDocBuilderFactory = DocumentBuilderFactory.newInstance();
@@ -82,7 +80,7 @@ public void test() throws Exception {
     assertXmlTag("before start end after", true);
     assertXmlTag("before start end after", true);
     assertXmlTag("before start end after", true);
-    assertXmlTag("before start end after", true);//adjacent tags
+    assertXmlTag("before start end after", true); // adjacent tags
     assertXmlTag("before  start  end after", true);
     assertXmlTag("before  start    end   after", true);
 
@@ -124,56 +122,57 @@ protected void validateXml(String xml) throws Exception {
     xmlDocBuilder.parse(new InputSource(new StringReader(xml)));
   }
 
-
   @Test
   public void testLuceneHtmlFilterBehavior() {
     String docText;
 
-    //Close tag adjacent to start & end results in end offset including the close tag. LUCENE-5734
+    // Close tag adjacent to start & end results in end offset including the close tag. LUCENE-5734
     docText = "start end";
-    assertArrayEquals(tagExpect(docText, "start", "end"), analyzeTagOne(docText, "start", "end"));
+    assertArrayEquals(
+        tagExpect(docText, "start", "end"), analyzeTagOne(docText, "start", "end"));
 
-    //Space after "end" means offset doesn't include 
+    // Space after "end" means offset doesn't include 
     docText = "start end ";
     assertArrayEquals(tagExpect(docText, "start", "end"), analyzeTagOne(docText, "start", "end"));
 
-    //Matches entity at end
+    // Matches entity at end
     final String endStr = String.format(Locale.ROOT, "en&#x%02x;", (int) 'd');
     docText = "start " + endStr + "";
     assertArrayEquals(tagExpect(docText, "start", endStr), analyzeTagOne(docText, "start", "end"));
-    //... and at start
+    // ... and at start
     final String startStr = String.format(Locale.ROOT, "&#x%02x;tart", (int) 's');
     docText = "" + startStr + " end";
     assertArrayEquals(tagExpect(docText, startStr, "end"), analyzeTagOne(docText, "start", "end"));
 
-    //Test ignoring proc instructions & comments. Note: doesn't expand the entity to "start".
-    docText = ""
             + "]>&start;";
-    assertArrayEquals(new int[]{-1, -1}, analyzeTagOne(docText, "start", "start"));
+    assertArrayEquals(new int[] {-1, -1}, analyzeTagOne(docText, "start", "start"));
 
-    //Test entity behavior
-    docText =                " — – & &foo;   a b";
-    assertArrayEquals(new String[]{"—", "–", "&", "&foo;", "\u00A0", "a", "b"},
-            analyzeReturnTokens(docText));
+    // Test entity behavior
+    docText = " — – & &foo;   a b";
+    assertArrayEquals(
+        new String[] {"—", "–", "&", "&foo;", "\u00A0", "a", "b"}, analyzeReturnTokens(docText));
 
-    //Observe offset adjustment of trailing entity to end tag
+    // Observe offset adjustment of trailing entity to end tag
     docText = "foo bar";
     assertArrayEquals(tagExpect(docText, "foo", "foo"), analyzeTagOne(docText, "foo", "foo"));
   }
 
   private String insertAnchorAtOffsets(String docText, int startOffset, int endOffset, String id) {
-    String insertStart = "";// (normally we'd escape id)
+    String insertStart = ""; // (normally we'd escape id)
     String insertEnd = "";
     return docText.substring(0, startOffset)
-            + insertStart
-            + docText.substring(startOffset, endOffset)
-            + insertEnd
-            + docText.substring(endOffset);
+        + insertStart
+        + docText.substring(startOffset, endOffset)
+        + insertEnd
+        + docText.substring(endOffset);
   }
 
   private int[] tagExpect(String docText, String start, String end) {
-    return new int[]{docText.indexOf(start), docText.indexOf(end) + end.length()};
+    return new int[] {docText.indexOf(start), docText.indexOf(end) + end.length()};
   }
 
   private int[] analyzeTagOne(String docText, String start, String end) {
@@ -189,8 +188,7 @@ private int[] analyzeTagOne(String docText, String start, String end) {
       ts.reset();
       while (ts.incrementToken()) {
         final String termString = termAttribute.toString();
-        if (termString.equals(start))
-          result[0] = offsetAttribute.startOffset();
+        if (termString.equals(start)) result[0] = offsetAttribute.startOffset();
         if (termString.equals(end)) {
           result[1] = offsetAttribute.endOffset();
           return result;
@@ -208,8 +206,8 @@ private int[] analyzeTagOne(String docText, String start, String end) {
   private String[] analyzeReturnTokens(String docText) {
     List result = new ArrayList<>();
 
-    Reader filter = new HTMLStripCharFilter(new StringReader(docText),
-            Collections.singleton("unescaped"));
+    Reader filter =
+        new HTMLStripCharFilter(new StringReader(docText), Collections.singleton("unescaped"));
     WhitespaceTokenizer ts = new WhitespaceTokenizer();
     final CharTermAttribute termAttribute = ts.addAttribute(CharTermAttribute.class);
     try {
@@ -226,5 +224,4 @@ private String[] analyzeReturnTokens(String docText) {
     }
     return result.toArray(new String[result.size()]);
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java b/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java
index 2b445e448c5..f3f467c48cc 100644
--- a/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java
+++ b/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java
@@ -16,22 +16,20 @@
  */
 package org.apache.solr.highlight;
 
+import java.io.IOException;
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.DocList;
 
-import java.io.IOException;
-
 public class DummyHighlighter extends SolrHighlighter {
 
   @Override
-  public NamedList doHighlighting(DocList docs, Query query,
-      SolrQueryRequest req, String[] defaultFields) throws IOException {
+  public NamedList doHighlighting(
+      DocList docs, Query query, SolrQueryRequest req, String[] defaultFields) throws IOException {
     NamedList fragments = new SimpleOrderedMap<>();
     fragments.add("dummy", "thing1");
     return fragments;
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/highlight/FastVectorHighlighterTest.java b/solr/core/src/test/org/apache/solr/highlight/FastVectorHighlighterTest.java
index e4975b07e1b..c1ad1b89a68 100644
--- a/solr/core/src/test/org/apache/solr/highlight/FastVectorHighlighterTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/FastVectorHighlighterTest.java
@@ -29,31 +29,31 @@ public class FastVectorHighlighterTest extends SolrTestCaseJ4 {
 
   @BeforeClass
   public static void beforeClass() throws Exception {
-    initCore("solrconfig.xml","schema.xml");
+    initCore("solrconfig.xml", "schema.xml");
   }
 
   @Test
-  public void testConfig(){
+  public void testConfig() {
     DefaultSolrHighlighter highlighter = getHighlighter();
 
     // Make sure we loaded one fragListBuilder
-    SolrFragListBuilder solrFlbNull = highlighter.fragListBuilders.get( null );
-    SolrFragListBuilder solrFlbEmpty = highlighter.fragListBuilders.get( "" );
-    SolrFragListBuilder solrFlbSimple = highlighter.fragListBuilders.get( "simple" );
-    assertSame( solrFlbNull, solrFlbEmpty );
-    assertTrue( solrFlbNull instanceof SimpleFragListBuilder );
-    assertTrue( solrFlbSimple instanceof SimpleFragListBuilder );
-        
+    SolrFragListBuilder solrFlbNull = highlighter.fragListBuilders.get(null);
+    SolrFragListBuilder solrFlbEmpty = highlighter.fragListBuilders.get("");
+    SolrFragListBuilder solrFlbSimple = highlighter.fragListBuilders.get("simple");
+    assertSame(solrFlbNull, solrFlbEmpty);
+    assertTrue(solrFlbNull instanceof SimpleFragListBuilder);
+    assertTrue(solrFlbSimple instanceof SimpleFragListBuilder);
+
     // Make sure we loaded two fragmentsBuilders
-    SolrFragmentsBuilder solrFbNull = highlighter.fragmentsBuilders.get( null );
-    SolrFragmentsBuilder solrFbEmpty = highlighter.fragmentsBuilders.get( "" );
-    SolrFragmentsBuilder solrFbSimple = highlighter.fragmentsBuilders.get( "simple" );
-    SolrFragmentsBuilder solrFbSO = highlighter.fragmentsBuilders.get( "scoreOrder" );
-    assertSame( solrFbNull, solrFbEmpty );
-    assertTrue( solrFbNull instanceof SimpleFragmentsBuilder );
-    assertTrue( solrFbSimple instanceof SimpleFragmentsBuilder );
-    assertTrue( solrFbSO instanceof ScoreOrderFragmentsBuilder );
-    
+    SolrFragmentsBuilder solrFbNull = highlighter.fragmentsBuilders.get(null);
+    SolrFragmentsBuilder solrFbEmpty = highlighter.fragmentsBuilders.get("");
+    SolrFragmentsBuilder solrFbSimple = highlighter.fragmentsBuilders.get("simple");
+    SolrFragmentsBuilder solrFbSO = highlighter.fragmentsBuilders.get("scoreOrder");
+    assertSame(solrFbNull, solrFbEmpty);
+    assertTrue(solrFbNull instanceof SimpleFragmentsBuilder);
+    assertTrue(solrFbSimple instanceof SimpleFragmentsBuilder);
+    assertTrue(solrFbSO instanceof ScoreOrderFragmentsBuilder);
+
     // Make sure we loaded two boundaryScanners
     SolrBoundaryScanner solrBsNull = highlighter.boundaryScanners.get(null);
     SolrBoundaryScanner solrBsEmpty = highlighter.boundaryScanners.get("");
@@ -67,30 +67,35 @@ public void testConfig(){
 
   @Test
   public void test() {
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "tv_text");
     args.put("hl.snippets", "2");
-    args.put("hl.tag.pre", ""); //... and let post default to . This is just a test.
+    args.put("hl.tag.pre", ""); // ... and let post default to . This is just a test.
     if (random().nextBoolean()) {
       args.put("hl.useFastVectorHighlighter", "true"); // old way
     } else {
       args.put("hl.method", "fastVector"); // the new way
     }
-    
-    assertU(adoc("tv_text", "basic fast vector highlighter test", 
-                 "id", "1"));
+
+    assertU(
+        adoc(
+            "tv_text", "basic fast vector highlighter test",
+            "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
+    assertQ(
+        "Basic summarization",
         req(new MapSolrParams(args), "q", "tv_text:vector"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='tv_text']/str[.='basic fast vector highlighter test']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_text']/str[.='basic fast vector highlighter test']");
   }
 
   private static DefaultSolrHighlighter getHighlighter() {
-    var hl = (HighlightComponent) h.getCore().getSearchComponents().get(HighlightComponent.COMPONENT_NAME);
-    return (DefaultSolrHighlighter) hl.getHighlighter(new MapSolrParams(Map.of(HighlightParams.METHOD, "fastVector")));
+    var hl =
+        (HighlightComponent)
+            h.getCore().getSearchComponents().get(HighlightComponent.COMPONENT_NAME);
+    return (DefaultSolrHighlighter)
+        hl.getHighlighter(new MapSolrParams(Map.of(HighlightParams.METHOD, "fastVector")));
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterConfigTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterConfigTest.java
index ec9ae9e8cbf..29d2d96ff2e 100644
--- a/solr/core/src/test/org/apache/solr/highlight/HighlighterConfigTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterConfigTest.java
@@ -18,11 +18,10 @@
 
 import java.lang.invoke.MethodHandles;
 import java.util.HashMap;
-
 import java.util.Map;
+import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.handler.component.HighlightComponent;
-import org.apache.solr.SolrTestCaseJ4;
 import org.junit.BeforeClass;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -52,12 +51,12 @@ public void tearDown() throws Exception {
 
   public void testConfig() {
     SolrHighlighter highlighter = getHighlighter();
-    log.info( "highlighter" );
+    log.info("highlighter");
 
-    assertTrue( highlighter instanceof DummyHighlighter );
+    assertTrue(highlighter instanceof DummyHighlighter);
 
     // check to see that doHighlight is called from the DummyHighlighter
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.method", "original");
     args.put("df", "t_text");
@@ -66,16 +65,16 @@ public void testConfig() {
     assertU(adoc("t_text", "a long day's night", "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
-            req(new MapSolrParams(args), "q", "long"),
-            "//lst[@name='highlighting']/str[@name='dummy']"
-            );
+    assertQ(
+        "Basic summarization",
+        req(new MapSolrParams(args), "q", "long"),
+        "//lst[@name='highlighting']/str[@name='dummy']");
   }
 
   private static SolrHighlighter getHighlighter() {
-    var hl = (HighlightComponent) h.getCore().getSearchComponents().get(HighlightComponent.COMPONENT_NAME);
+    var hl =
+        (HighlightComponent)
+            h.getCore().getSearchComponents().get(HighlightComponent.COMPONENT_NAME);
     return hl.getHighlighter(new MapSolrParams(Map.of("hl.method", "original")));
   }
 }
-
-
diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterMaxOffsetTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterMaxOffsetTest.java
index 5be67b9a2b8..efcc3d709c1 100644
--- a/solr/core/src/test/org/apache/solr/highlight/HighlighterMaxOffsetTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterMaxOffsetTest.java
@@ -21,9 +21,7 @@
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-/**
- * Tests the max offset limit of highlighter
- */
+/** Tests the max offset limit of highlighter */
 public class HighlighterMaxOffsetTest extends SolrTestCaseJ4 {
 
   @BeforeClass
@@ -38,20 +36,20 @@ public void purgeDocs() throws Exception {
 
   public void testIndexedMultiValuedButSingleValuePassed() throws Exception {
 
-    assertU(adoc("indexed_multiValued", SHORT_TEXT,
-                 "id", "DOC1"));
+    assertU(adoc("indexed_multiValued", SHORT_TEXT, "id", "DOC1"));
 
     assertU(commit());
     assertHighlighting("indexed_multiValued");
-
   }
 
   @Test
   public void testIndexedMultiValued() throws Exception {
 
-    assertU(adoc("indexed_multiValued", REALY_LONG_TEXT,
-                 "indexed_multiValued", SHORT_TEXT,
-                 "id", "DOC1"));
+    assertU(
+        adoc(
+            "indexed_multiValued", REALY_LONG_TEXT,
+            "indexed_multiValued", SHORT_TEXT,
+            "id", "DOC1"));
 
     assertU(commit());
     assertHighlighting("indexed_multiValued");
@@ -60,8 +58,7 @@ public void testIndexedMultiValued() throws Exception {
   @Test
   public void testIndexedSingleValued() throws Exception {
 
-    assertU(adoc("indexed_singleValued", SHORT_TEXT,
-                 "id", "DOC1"));
+    assertU(adoc("indexed_singleValued", SHORT_TEXT, "id", "DOC1"));
 
     assertU(commit());
     assertHighlighting("indexed_singleValued");
@@ -70,9 +67,8 @@ public void testIndexedSingleValued() throws Exception {
   @Test
   public void testNonIndexedSingleValued() throws Exception {
 
-    assertU(adoc("non_indexed_singleValued", SHORT_TEXT,
-                 "id", "DOC1"));
-    
+    assertU(adoc("non_indexed_singleValued", SHORT_TEXT, "id", "DOC1"));
+
     assertU(commit());
     assertHighlighting("non_indexed_singleValued");
   }
@@ -80,9 +76,11 @@ public void testNonIndexedSingleValued() throws Exception {
   @Test
   public void testNonIndexedMultiValued() throws Exception {
 
-    assertU(adoc("non_indexed_multiValued", REALY_LONG_TEXT,
-                 "non_indexed_multiValued", SHORT_TEXT,
-                 "id", "DOC1"));
+    assertU(
+        adoc(
+            "non_indexed_multiValued", REALY_LONG_TEXT,
+            "non_indexed_multiValued", SHORT_TEXT,
+            "id", "DOC1"));
 
     assertU(commit());
     assertHighlighting("non_indexed_multiValued");
@@ -91,29 +89,36 @@ public void testNonIndexedMultiValued() throws Exception {
   @Test
   public void testText_general_rev() throws Exception {
 
-    assertU(adoc("content", REALY_LONG_TEXT,
-                 "content", SHORT_TEXT,
-                 "id", "DOC1"));
+    assertU(
+        adoc(
+            "content", REALY_LONG_TEXT,
+            "content", SHORT_TEXT,
+            "id", "DOC1"));
 
     assertU(commit());
     assertHighlighting("content");
   }
 
   private static void assertHighlighting(String field) throws Exception {
-    assertQ(req("q", "id:DOC1", 
-                "df", "content",
-                "hl.q", "JPEG",
-                "indent", "true", 
-                "hl", "true", 
-                "hl.fl", field, 
-                "hl.simple.pre", "", 
-                "hl.simple.post", ""),
-            "//lst[@name='highlighting']/lst[@name='DOC1']/arr[@name='"+field+"']/str[contains(., 'jpeg')]");
+    assertQ(
+        req(
+            "q", "id:DOC1",
+            "df", "content",
+            "hl.q", "JPEG",
+            "indent", "true",
+            "hl", "true",
+            "hl.fl", field,
+            "hl.simple.pre", "",
+            "hl.simple.post", ""),
+        "//lst[@name='highlighting']/lst[@name='DOC1']/arr[@name='"
+            + field
+            + "']/str[contains(., 'jpeg')]");
   }
 
-  private static String REALY_LONG_TEXT = "Green TRC=0.0, 0.0000763, 0.0001526, 0.0002289, 0.0003052, 0.0003815, 0.0004578, 0.0005341, 0.0006104, 0.0006867, 0.000763, 0.0008392, 0.0009003, 0.0009766, 0.0010529, 0.0011292, 0.0012055, 0.0012818, 0.0013581, 0.0014343, 0.0015106, 0.0015869, 0.0016632, 0.0017395, 0.0018158, 0.0018921, 0.0019684, 0.0020447, 0.002121, 0.0021973, 0.0022736, 0.0023499, 0.0024262, 0.0025025, 0.0025788, 0.0026551, 0.0027161, 0.0027924, 0.0028687, 0.002945, 0.0030213, 0.0030976, 0.0031739, 0.0032502, 0.0033417, 0.003418, 0.0034943, 0.0035859, 0.0036622, 0.0037537, 0.00383, 0.0039216, 0.0040131, 0.0041047, 0.0041962, 0.0042878, 0.0043793, 0.0044709, 0.0045624, 0.0046693, 0.0047608, 0.0048524, 0.0049592, 0.005066, 0.0051575, 0.0052644, 0.0053712, 0.005478, 0.0055848, 0.0056916, 0.0057984, 0.0059052, 0.0060273, 0.0061341, 0.0062562, 0.006363, 0.0064851, 0.0066072, 0.0067292, 0.0068513, 0.0069734, 0.0070954, 0.0072175, 0.0073396, 0.0074617, 0.007599, 0.0077211, 0.0078584, 0.0079957, 0.0081178, 0.0082551, 0.0083925, 0.0085298, 0.0086671, 0.0088045, 0.008957, 0.0090944, 0.0092317, 0.0093843, 0.0095369, 0.0096742, 0.0098268, 0.0099794, 0.010132, 0.0102846, 0.0104372, 0.0105898, 0.0107576, 0.0109102, 0.0110628, 0.0112306, 0.0113985, 0.0115511, 0.0117189, 0.0118868, 0.0120546, 0.0122225, 0.0124056, 0.0125734, 0.0127413, 0.0129244, 0.0130922, 0.0132753, 0.0134585, 0.0136416, 0.0138247, 0.0140078, 0.0141909, 0.014374, 0.0145571, 0.0147555, 0.0149386, 0.0151369, 0.0153201, 0.0155184, 0.0157168, 0.0159152, 0.0161135, 0.0163119, 0.0165255, 0.0167239, 0.0169223, 0.0171359, 0.0173495, 0.0175479, 0.0177615, 0.0179751, 0.0181888, 0.0184024, 0.018616, 0.0188449, 0.0190585, 0.0192874, 0.019501, 0.0197299, 0.0199588, 0.0201877, 0.0204166, 0.0206455, 0.0208743, 0.0211032, 0.0213474, 0.0215763, 0.0218204, 0.0220645, 0.0222934, 0.0225376, 0.0227817, 0.0230259, 0.0232853, 0.0235294, 0.0237736, 0.024033, 0.0242771, 0.0245365, 0.0247959, 0.0250553, 0.0253147, 0.0255741, 0.0258335, 0.0261082, 0.0263676, 0.026627, 0.0269017, 0.0271763, 0.027451, 0.0277256, 0.0280003, 0.028275, 0.0285496, 0.0288243, 0.0291142, 0.0293889, 0.0296788, 0.0299687, 0.0302586, 0.0305486, 0.0308385, 0.0311284, 0.0314183, 0.0317235, 0.0320134, 0.0323186, 0.0326238, 0.032929, 0.0332341, 0.0335393, 0.0338445, 0.0341497, 0.0344549, 0.0347753, 0.0350805, 0.0354009, 0.0357214, 0.0360418, 0.0363622, 0.0366827, 0.0370031, 0.0373388, 0.0376593, 0.037995, 0.0383154, 0.0386511, 0.0389868, 0.0393225, 0.0396582, 0.0399939, 0.0403449, 0.0406806, 0.0410315, 0.0413825, 0.0417182, 0.0420691, 0.0424201, 0.042771, 0.0431373, 0.0434882, 0.0438392, 0.0442054, 0.0445716, 0.0449226, 0.0452888, 0.045655, 0.0460212, 0.0464027, 0.0467689, 0.0471504, 0.0475166, 0.0478981, 0.0482795, 0.048661, 0.0490425, 0.049424, 0.0498054, 0.0501869, 0.0505837, 0.0509804, 0.0513619, 0.0517586, 0.0521553, 0.0525521, 0.0529488, 0.0533608, 0.0537575, 0.0541695, 0.0545663, 0.0549783, 0.0553902, 0.0558022, 0.0562142, 0.0566262, 0.0570535, 0.0574655, 0.0578927, 0.05832, 0.058732, 0.0591592, 0.0595865, 0.060029, 0.0604562, 0.0608835, 0.061326, 0.0617533, 0.0621958, 0.0626383, 0.0630808, 0.0635233, 0.0639811, 0.0644236, 0.0648661, 0.0653239, 0.0657816, 0.0662394, 0.0666972, 0.067155, 0.0676127, 0.0680705, 0.0685435, 0.0690013, 0.0694743, 0.0699474, 0.0704204, 0.0708934, 0.0713664, 0.0718395, 0.0723278, 0.0728008, 0.0732891, 0.0737774, 0.0742657, 0.0747539, 0.0752422, 0.0757305, 0.0762188, 0.0767224, 0.0772259, 0.0777142, 0.0782177, 0.0787213, 0.0792401, 0.0797436, 0.0802472, 0.080766, 0.0812696, 0.0817884, 0.0823072, 0.082826, 0.0833448, 0.0838636, 0.0843977, 0.0849165, 0.0854505, 0.0859846, 0.0865187, 0.0870527, 0.0875868, 0.0881209, 0.0886549, 0.0892042, 0.0897536, 0.0902876, 0.090837, 0.0913863, 0.0919356, 0.0925002, 0.0930495, 0.0936141, 0.0941634, 0.094728, 0.0952926, 0.0958572, 0.0964218, 0.0970016, 0.0975662, 0.098146, 0.0987106, 0.0992905, 0.0998703, 0.1004501, 0.10103, 0.1016251, 0.1022049, 0.1028, 0.1033799, 0.103975, 0.1045701, 0.1051652, 0.1057755, 0.1063706, 0.106981, 0.1075761, 0.1081865, 0.1087968, 0.1094072, 0.1100175, 0.1106279, 0.1112535, 0.1118639, 0.1124895, 0.1131151, 0.1137407, 0.1143664, 0.114992, 0.1156176, 0.1162585, 0.1168841, 0.117525, 0.1181659, 0.1188067, 0.1194476, 0.1200885, 0.1207446, 0.1213855, 0.1220417, 0.1226978, 0.1233539, 0.1240101, 0.1246662, 0.1253223, 0.1259937, 0.1266499, 0.1273213, 0.1279927, 0.1286641, 0.1293355, 0.1300069, 0.1306935, 0.1313649, 0.1320516, 0.1327382, 0.1334096, 0.1341115, 0.1347982, 0.1354849, 0.1361868, 0.1368734, 0.1375753, 0.1382773, 0.1389792, 0.1396811, 0.140383, 0.1411002, 0.1418021, 0.1425193, 0.1432364, 0.1439536, 0.1446708, 0.145388, 0.1461204, 0.1468376, 0.14757, 0.1483024, 0.1490349, 0.1497673, 0.1504997, 0.1512322, 0.1519799, 0.1527123, 0.15346, 0.1542077, 0.1549554, 0.1557031, 0.1564508, 0.1572137, 0.1579767, 0.1587243, 0.1594873, 0.1602502, 0.1610132, 0.1617914, 0.1625544, 0.1633326, 0.1640955, 0.1648737, 0.1656519, 0.1664302, 0.1672236, 0.1680018, 0.1687953, 0.1695735, 0.170367, 0.1711604, 0.1719539, 0.1727474, 0.1735561, 0.1743496, 0.1751583, 0.175967, 0.1767758, 0.1775845, 0.1783932, 0.1792172, 0.1800259, 0.1808499, 0.1816739, 0.1824826, 0.1833219, 0.1841459, 0.1849699, 0.1858091, 0.1866331, 0.1874723, 0.1883116, 0.1891508, 0.1900053, 0.1908446, 0.1916838, 0.1925383, 0.1933928, 0.1942473, 0.1951019, 0.1959564, 0.1968261, 0.1976806, 0.1985504, 0.1994202, 0.2002899, 0.2011597, 0.2020294, 0.2028992, 0.2037842, 0.2046693, 0.205539, 0.206424, 0.2073243, 0.2082094, 0.2090944, 0.2099947, 0.2108949, 0.21178, 0.2126802, 0.2135958, 0.2144961, 0.2153964, 0.2163119, 0.2172274, 0.2181277, 0.2190585, 0.2199741, 0.2208896, 0.2218051, 0.2227359, 0.2236667, 0.2245975, 0.2255283, 0.2264591, 0.2273899, 0.228336, 0.2292821, 0.2302129, 0.2311589, 0.232105, 0.2330663, 0.2340124, 0.2349737, 0.2359197, 0.2368811, 0.2378424, 0.2388037, 0.239765, 0.2407416, 0.2417029, 0.2426795, 0.2436561, 0.2446326, 0.2456092, 0.2466011, 0.2475776, 0.2485695, 0.249546, 0.2505379, 0.2515297, 0.2525368, 0.2535286, 0.2545357, 0.2555276, 0.2565347, 0.2575418, 0.2585489, 0.259556, 0.2605783, 0.2615854, 0.2626078, 0.2636301, 0.2646525, 0.2656748, 0.2667124, 0.2677348, 0.2687724, 0.26981, 0.2708324, 0.2718853, 0.2729229, 0.2739605, 0.2750134, 0.276051, 0.2771038, 0.2781567, 0.2792248, 0.2802777, 0.2813306, 0.2823987, 0.2834668, 0.284535, 0.2856031, 0.2866712, 0.2877394, 0.2888228, 0.2899062, 0.2909743, 0.2920577, 0.2931563, 0.2942397, 0.2953231, 0.2964218, 0.2975204, 0.2986191, 0.2997177, 0.3008164, 0.301915, 0.3030289, 0.3041428, 0.3052567, 0.3063706, 0.3074846, 0.3085985, 0.3097124, 0.3108415, 0.3119707, 0.3130999, 0.314229, 0.3153582, 0.3165026, 0.3176318, 0.3187762, 0.3199207, 0.3210651, 0.3222095, 0.3233539, 0.3245136, 0.3256733, 0.3268177, 0.3279774, 0.3291371, 0.330312, 0.3314717, 0.3326467, 0.3338216, 0.3349966, 0.3361715, 0.3373465, 0.3385214, 0.3397116, 0.3408865, 0.3420768, 0.343267, 0.3444724, 0.3456626, 0.3468528, 0.3480583, 0.3492638, 0.3504692, 0.3516747, 0.3528801, 0.3541009, 0.3553063, 0.356527, 0.3577478, 0.3589685, 0.3601892, 0.3614252, 0.3626459, 0.3638819, 0.3651179, 0.3663539, 0.3675898, 0.3688411, 0.3700771, 0.3713283, 0.3725795, 0.3738308, 0.375082, 0.3763333, 0.3775998, 0.378851, 0.3801175, 0.381384, 0.3826505, 0.3839322, 0.3851987, 0.3864805, 0.387747, 0.3890288, 0.3903105, 0.3916075, 0.3928893, 0.3941863, 0.3954681, 0.3967651, 0.3980621, 0.3993744, 0.4006714, 0.4019837, 0.4032807, 0.404593, 0.4059052, 0.4072175, 0.4085451, 0.4098573, 0.4111849, 0.4125124, 0.4138399, 0.4151675, 0.416495, 0.4178378, 0.4191806, 0.4205234, 0.4218662, 0.423209, 0.4245518, 0.4259098, 0.4272526, 0.4286107, 0.4299687, 0.4313268, 0.4326848, 0.4340581, 0.4354314, 0.4367895, 0.4381628, 0.4395514, 0.4409247, 0.442298, 0.4436866, 0.4450752, 0.4464637, 0.4478523, 0.4492409, 0.4506447, 0.4520333, 0.4534371, 0.4548409, 0.4562448, 0.4576486, 0.4590677, 0.4604715, 0.4618906, 0.4633097, 0.4647288, 0.4661631, 0.4675822, 0.4690166, 0.4704356, 0.47187, 0.4733043, 0.4747539, 0.4761883, 0.4776379, 0.4790875, 0.4805371, 0.4819867, 0.4834363, 0.4848859, 0.4863508, 0.4878157, 0.4892805, 0.4907454, 0.4922103, 0.4936904, 0.4951553, 0.4966354, 0.4981155, 0.4995956, 0.501091, 0.5025711, 0.5040665, 0.5055467, 0.507042, 0.5085527, 0.5100481, 0.5115435, 0.5130541, 0.5145647, 0.5160754, 0.517586, 0.5190967, 0.5206226, 0.5221485, 0.5236591, 0.525185, 0.5267262, 0.5282521, 0.529778, 0.5313191, 0.5328603, 0.5344015, 0.5359426, 0.537499, 0.5390402, 0.5405966, 0.542153, 0.5437095, 0.5452659, 0.5468223, 0.548394, 0.5499657, 0.5515373, 0.553109, 0.5546807, 0.5562524, 0.5578393, 0.5594263, 0.5610132, 0.5626001, 0.5641871, 0.565774, 0.5673762, 0.5689784, 0.5705806, 0.5721828, 0.573785, 0.5754025, 0.5770047, 0.5786221, 0.5802396, 0.581857, 0.5834897, 0.5851072, 0.5867399, 0.5883726, 0.5900053, 0.5916381, 0.5932708, 0.5949187, 0.5965667, 0.5982147, 0.5998627, 0.6015106, 0.6031586, 0.6048219, 0.6064851, 0.6081483, 0.6098116, 0.6114748, 0.6131533, 0.6148165, 0.616495, 0.6181735, 0.619852, 0.6215457, 0.6232242, 0.624918, 0.6266117, 0.6283055, 0.6299992, 0.631693, 0.633402, 0.635111, 0.63682, 0.638529, 0.640238, 0.6419471, 0.6436713, 0.6453956, 0.6471199, 0.6488441, 0.6505684, 0.6523079, 0.6540322, 0.6557717, 0.6575113, 0.6592508, 0.6610056, 0.6627451, 0.6644999, 0.6662547, 0.6680095, 0.6697642, 0.6715343, 0.6732891, 0.6750591, 0.6768292, 0.6785992, 0.6803845, 0.6821546, 0.6839399, 0.6857252, 0.6875105, 0.6892958, 0.6910811, 0.6928817, 0.6946822, 0.6964675, 0.6982834, 0.7000839, 0.7018845, 0.7037003, 0.7055161, 0.707332, 0.7091478, 0.7109636, 0.7127947, 0.7146105, 0.7164416, 0.7182727, 0.720119, 0.7219501, 0.7237964, 0.7256275, 0.7274739, 0.7293355, 0.7311818, 0.7330282, 0.7348898, 0.7367514, 0.738613, 0.7404746, 0.7423514, 0.744213, 0.7460899, 0.7479667, 0.7498436, 0.7517205, 0.7536126, 0.7554894, 0.7573816, 0.7592737, 0.7611658, 0.7630732, 0.7649653, 0.7668727, 0.76878, 0.7706874, 0.7725948, 0.7745174, 0.7764248, 0.7783474, 0.7802701, 0.7821927, 0.7841306, 0.7860533, 0.7879911, 0.789929, 0.7918669, 0.7938048, 0.795758, 0.7976959, 0.799649, 0.8016022, 0.8035554, 0.8055238, 0.8074769, 0.8094453, 0.8114137, 0.8133822, 0.8153506, 0.8173342, 0.8193179, 0.8212863, 0.82327, 0.8252689, 0.8272526, 0.8292515, 0.8312352, 0.8332341, 0.8352331, 0.8372473, 0.8392462, 0.8412604, 0.8432746, 0.8452888, 0.847303, 0.8493172, 0.8513466, 0.8533761, 0.8554055, 0.857435, 0.8594644, 0.8614939, 0.8635386, 0.8655833, 0.867628, 0.8696727, 0.8717327, 0.8737774, 0.8758373, 0.8778973, 0.8799573, 0.8820325, 0.8840925, 0.8861677, 0.8882429, 0.8903182, 0.8923934, 0.8944839, 0.8965591, 0.8986496, 0.9007401, 0.9028305, 0.9049363, 0.9070268, 0.9091325, 0.9112383, 0.913344, 0.915465, 0.9175708, 0.9196918, 0.9218128, 0.9239338, 0.9260548, 0.9281758, 0.930312, 0.9324483, 0.9345846, 0.9367208, 0.9388571, 0.9410086, 0.9431601, 0.9453117, 0.9474632, 0.9496147, 0.9517815, 0.953933, 0.9560998, 0.9582666, 0.9604334, 0.9626154, 0.9647822, 0.9669642, 0.9691463, 0.9713283, 0.9735256, 0.9757076, 0.9779049, 0.9801022, 0.9822995, 0.9844968, 0.9867094, 0.988922, 0.9911345, 0.9933471, 0.9955596, 0.9977722, 1.0 "
-      + "Rewed TRC=0.0, 0.0000763, 0.0001526, 0.0002289, 0.0003052, 0.0003815, 0.0004578, 0.0005341, 0.0006104, 0.0006867, 0.000763, 0.0008392, 0.0009003, 0.0009766, 0.0010529, 0.0011292, 0.0012055, 0.0012818, 0.0013581, 0.0014343, 0.0015106, 0.0015869, 0.0016632, 0.0017395, 0.0018158, 0.0018921, 0.0019684, 0.0020447, 0.002121, 0.0021973, 0.0022736, 0.0023499, 0.0024262, 0.0025025, 0.0025788, 0.0026551, 0.0027161, 0.0027924, 0.0028687, 0.002945, 0.0030213, 0.0030976, 0.0031739, 0.0032502, 0.0033417, 0.003418, 0.0034943, 0.0035859, 0.0036622, 0.0037537, 0.00383, 0.0039216, 0.0040131, 0.0041047, 0.0041962, 0.0042878, 0.0043793, 0.0044709, 0.0045624, 0.0046693, 0.0047608, 0.0048524, 0.0049592, 0.005066, 0.0051575, 0.0052644, 0.0053712, 0.005478, 0.0055848, 0.0056916, 0.0057984, 0.0059052, 0.0060273, 0.0061341, 0.0062562, 0.006363, 0.0064851, 0.0066072, 0.0067292, 0.0068513, 0.0069734, 0.0070954, 0.0072175, 0.0073396, 0.0074617, 0.007599, 0.0077211, 0.0078584, 0.0079957, 0.0081178, 0.0082551, 0.0083925, 0.0085298, 0.0086671, 0.0088045, 0.008957, 0.0090944, 0.0092317, 0.0093843, 0.0095369, 0.0096742, 0.0098268, 0.0099794, 0.010132, 0.0102846, 0.0104372, 0.0105898, 0.0107576, 0.0109102, 0.0110628, 0.0112306, 0.0113985, 0.0115511, 0.0117189, 0.0118868, 0.0120546, 0.0122225, 0.0124056, 0.0125734, 0.0127413, 0.0129244, 0.0130922, 0.0132753, 0.0134585, 0.0136416, 0.0138247, 0.0140078, 0.0141909, 0.014374, 0.0145571, 0.0147555, 0.0149386, 0.0151369, 0.0153201, 0.0155184, 0.0157168, 0.0159152, 0.0161135, 0.0163119, 0.0165255, 0.0167239, 0.0169223, 0.0171359, 0.0173495, 0.0175479, 0.0177615, 0.0179751, 0.0181888, 0.0184024, 0.018616, 0.0188449, 0.0190585, 0.0192874, 0.019501, 0.0197299, 0.0199588, 0.0201877, 0.0204166, 0.0206455, 0.0208743, 0.0211032, 0.0213474, 0.0215763, 0.0218204, 0.0220645, 0.0222934, 0.0225376, 0.0227817, 0.0230259, 0.0232853, 0.0235294, 0.0237736, 0.024033, 0.0242771, 0.0245365, 0.0247959, 0.0250553, 0.0253147, 0.0255741, 0.0258335, 0.0261082, 0.0263676, 0.026627, 0.0269017, 0.0271763, 0.027451, 0.0277256, 0.0280003, 0.028275, 0.0285496, 0.0288243, 0.0291142, 0.0293889, 0.0296788, 0.0299687, 0.0302586, 0.0305486, 0.0308385, 0.0311284, 0.0314183, 0.0317235, 0.0320134, 0.0323186, 0.0326238, 0.032929, 0.0332341, 0.0335393, 0.0338445, 0.0341497, 0.0344549, 0.0347753, 0.0350805, 0.0354009, 0.0357214, 0.0360418, 0.0363622, 0.0366827, 0.0370031, 0.0373388, 0.0376593, 0.037995, 0.0383154, 0.0386511, 0.0389868, 0.0393225, 0.0396582, 0.0399939, 0.0403449, 0.0406806, 0.0410315, 0.0413825, 0.0417182, 0.0420691, 0.0424201, 0.042771, 0.0431373, 0.0434882, 0.0438392, 0.0442054, 0.0445716, 0.0449226, 0.0452888, 0.045655, 0.0460212, 0.0464027, 0.0467689, 0.0471504, 0.0475166, 0.0478981, 0.0482795, 0.048661, 0.0490425, 0.049424, 0.0498054, 0.0501869, 0.0505837, 0.0509804, 0.0513619, 0.0517586, 0.0521553, 0.0525521, 0.0529488, 0.0533608, 0.0537575, 0.0541695, 0.0545663, 0.0549783, 0.0553902, 0.0558022, 0.0562142, 0.0566262, 0.0570535, 0.0574655, 0.0578927, 0.05832, 0.058732, 0.0591592, 0.0595865, 0.060029, 0.0604562, 0.0608835, 0.061326, 0.0617533, 0.0621958, 0.0626383, 0.0630808, 0.0635233, 0.0639811, 0.0644236, 0.0648661, 0.0653239, 0.0657816, 0.0662394, 0.0666972, 0.067155, 0.0676127, 0.0680705, 0.0685435, 0.0690013, 0.0694743, 0.0699474, 0.0704204, 0.0708934, 0.0713664, 0.0718395, 0.0723278, 0.0728008, 0.0732891, 0.0737774, 0.0742657, 0.0747539, 0.0752422, 0.0757305, 0.0762188, 0.0767224, 0.0772259, 0.0777142, 0.0782177, 0.0787213, 0.0792401, 0.0797436, 0.0802472, 0.080766, 0.0812696, 0.0817884, 0.0823072, 0.082826, 0.0833448, 0.0838636, 0.0843977, 0.0849165, 0.0854505, 0.0859846, 0.0865187, 0.0870527, 0.0875868, 0.0881209, 0.0886549, 0.0892042, 0.0897536, 0.0902876, 0.090837, 0.0913863, 0.0919356, 0.0925002, 0.0930495, 0.0936141, 0.0941634, 0.094728, 0.0952926, 0.0958572, 0.0964218, 0.0970016, 0.0975662, 0.098146, 0.0987106, 0.0992905, 0.0998703, 0.1004501, 0.10103, 0.1016251, 0.1022049, 0.1028, 0.1033799, 0.103975, 0.1045701, 0.1051652, 0.1057755, 0.1063706, 0.106981, 0.1075761, 0.1081865, 0.1087968, 0.1094072, 0.1100175, 0.1106279, 0.1112535, 0.1118639, 0.1124895, 0.1131151, 0.1137407, 0.1143664, 0.114992, 0.1156176, 0.1162585, 0.1168841, 0.117525, 0.1181659, 0.1188067, 0.1194476, 0.1200885, 0.1207446, 0.1213855, 0.1220417, 0.1226978, 0.1233539, 0.1240101, 0.1246662, 0.1253223, 0.1259937, 0.1266499, 0.1273213, 0.1279927, 0.1286641, 0.1293355, 0.1300069, 0.1306935, 0.1313649, 0.1320516, 0.1327382, 0.1334096, 0.1341115, 0.1347982, 0.1354849, 0.1361868, 0.1368734, 0.1375753, 0.1382773, 0.1389792, 0.1396811, 0.140383, 0.1411002, 0.1418021, 0.1425193, 0.1432364, 0.1439536, 0.1446708, 0.145388, 0.1461204, 0.1468376, 0.14757, 0.1483024, 0.1490349, 0.1497673, 0.1504997, 0.1512322, 0.1519799, 0.1527123, 0.15346, 0.1542077, 0.1549554, 0.1557031, 0.1564508, 0.1572137, 0.1579767, 0.1587243, 0.1594873, 0.1602502, 0.1610132, 0.1617914, 0.1625544, 0.1633326, 0.1640955, 0.1648737, 0.1656519, 0.1664302, 0.1672236, 0.1680018, 0.1687953, 0.1695735, 0.170367, 0.1711604, 0.1719539, 0.1727474, 0.1735561, 0.1743496, 0.1751583, 0.175967, 0.1767758, 0.1775845, 0.1783932, 0.1792172, 0.1800259, 0.1808499, 0.1816739, 0.1824826, 0.1833219, 0.1841459, 0.1849699, 0.1858091, 0.1866331, 0.1874723, 0.1883116, 0.1891508, 0.1900053, 0.1908446, 0.1916838, 0.1925383, 0.1933928, 0.1942473, 0.1951019, 0.1959564, 0.1968261, 0.1976806, 0.1985504, 0.1994202, 0.2002899, 0.2011597, 0.2020294, 0.2028992, 0.2037842, 0.2046693, 0.205539, 0.206424, 0.2073243, 0.2082094, 0.2090944, 0.2099947, 0.2108949, 0.21178, 0.2126802, 0.2135958, 0.2144961, 0.2153964, 0.2163119, 0.2172274, 0.2181277, 0.2190585, 0.2199741, 0.2208896, 0.2218051, 0.2227359, 0.2236667, 0.2245975, 0.2255283, 0.2264591, 0.2273899, 0.228336, 0.2292821, 0.2302129, 0.2311589, 0.232105, 0.2330663, 0.2340124, 0.2349737, 0.2359197, 0.2368811, 0.2378424, 0.2388037, 0.239765, 0.2407416, 0.2417029, 0.2426795, 0.2436561, 0.2446326, 0.2456092, 0.2466011, 0.2475776, 0.2485695, 0.249546, 0.2505379, 0.2515297, 0.2525368, 0.2535286, 0.2545357, 0.2555276, 0.2565347, 0.2575418, 0.2585489, 0.259556, 0.2605783, 0.2615854, 0.2626078, 0.2636301, 0.2646525, 0.2656748, 0.2667124, 0.2677348, 0.2687724, 0.26981, 0.2708324, 0.2718853, 0.2729229, 0.2739605, 0.2750134, 0.276051, 0.2771038, 0.2781567, 0.2792248, 0.2802777, 0.2813306, 0.2823987, 0.2834668, 0.284535, 0.2856031, 0.2866712, 0.2877394, 0.2888228, 0.2899062, 0.2909743, 0.2920577, 0.2931563, 0.2942397, 0.2953231, 0.2964218, 0.2975204, 0.2986191, 0.2997177, 0.3008164, 0.301915, 0.3030289, 0.3041428, 0.3052567, 0.3063706, 0.3074846, 0.3085985, 0.3097124, 0.3108415, 0.3119707, 0.3130999, 0.314229, 0.3153582, 0.3165026, 0.3176318, 0.3187762, 0.3199207, 0.3210651, 0.3222095, 0.3233539, 0.3245136, 0.3256733, 0.3268177, 0.3279774, 0.3291371, 0.330312, 0.3314717, 0.3326467, 0.3338216, 0.3349966, 0.3361715, 0.3373465, 0.3385214, 0.3397116, 0.3408865, 0.3420768, 0.343267, 0.3444724, 0.3456626, 0.3468528, 0.3480583, 0.3492638, 0.3504692, 0.3516747, 0.3528801, 0.3541009, 0.3553063, 0.356527, 0.3577478, 0.3589685, 0.3601892, 0.3614252, 0.3626459, 0.3638819, 0.3651179, 0.3663539, 0.3675898, 0.3688411, 0.3700771, 0.3713283, 0.3725795, 0.3738308, 0.375082, 0.3763333, 0.3775998, 0.378851, 0.3801175, 0.381384, 0.3826505, 0.3839322, 0.3851987, 0.3864805, 0.387747, 0.3890288, 0.3903105, 0.3916075, 0.3928893, 0.3941863, 0.3954681, 0.3967651, 0.3980621, 0.3993744, 0.4006714, 0.4019837, 0.4032807, 0.404593, 0.4059052, 0.4072175, 0.4085451, 0.4098573, 0.4111849, 0.4125124, 0.4138399, 0.4151675, 0.416495, 0.4178378, 0.4191806, 0.4205234, 0.4218662, 0.423209, 0.4245518, 0.4259098, 0.4272526, 0.4286107, 0.4299687, 0.4313268, 0.4326848, 0.4340581, 0.4354314, 0.4367895, 0.4381628, 0.4395514, 0.4409247, 0.442298, 0.4436866, 0.4450752, 0.4464637, 0.4478523, 0.4492409, 0.4506447, 0.4520333, 0.4534371, 0.4548409, 0.4562448, 0.4576486, 0.4590677, 0.4604715, 0.4618906, 0.4633097, 0.4647288, 0.4661631, 0.4675822, 0.4690166, 0.4704356, 0.47187, 0.4733043, 0.4747539, 0.4761883, 0.4776379, 0.4790875, 0.4805371, 0.4819867, 0.4834363, 0.4848859, 0.4863508, 0.4878157, 0.4892805, 0.4907454, 0.4922103, 0.4936904, 0.4951553, 0.4966354, 0.4981155, 0.4995956, 0.501091, 0.5025711, 0.5040665, 0.5055467, 0.507042, 0.5085527, 0.5100481, 0.5115435, 0.5130541, 0.5145647, 0.5160754, 0.517586, 0.5190967, 0.5206226, 0.5221485, 0.5236591, 0.525185, 0.5267262, 0.5282521, 0.529778, 0.5313191, 0.5328603, 0.5344015, 0.5359426, 0.537499, 0.5390402, 0.5405966, 0.542153, 0.5437095, 0.5452659, 0.5468223, 0.548394, 0.5499657, 0.5515373, 0.553109, 0.5546807, 0.5562524, 0.5578393, 0.5594263, 0.5610132, 0.5626001, 0.5641871, 0.565774, 0.5673762, 0.5689784, 0.5705806, 0.5721828, 0.573785, 0.5754025, 0.5770047, 0.5786221, 0.5802396, 0.581857, 0.5834897, 0.5851072, 0.5867399, 0.5883726, 0.5900053, 0.5916381, 0.5932708, 0.5949187, 0.5965667, 0.5982147, 0.5998627, 0.6015106, 0.6031586, 0.6048219, 0.6064851, 0.6081483, 0.6098116, 0.6114748, 0.6131533, 0.6148165, 0.616495, 0.6181735, 0.619852, 0.6215457, 0.6232242, 0.624918, 0.6266117, 0.6283055, 0.6299992, 0.631693, 0.633402, 0.635111, 0.63682, 0.638529, 0.640238, 0.6419471, 0.6436713, 0.6453956, 0.6471199, 0.6488441, 0.6505684, 0.6523079, 0.6540322, 0.6557717, 0.6575113, 0.6592508, 0.6610056, 0.6627451, 0.6644999, 0.6662547, 0.6680095, 0.6697642, 0.6715343, 0.6732891, 0.6750591, 0.6768292, 0.6785992, 0.6803845, 0.6821546, 0.6839399, 0.6857252, 0.6875105, 0.6892958, 0.6910811, 0.6928817, 0.6946822, 0.6964675, 0.6982834, 0.7000839, 0.7018845, 0.7037003, 0.7055161, 0.707332, 0.7091478, 0.7109636, 0.7127947, 0.7146105, 0.7164416, 0.7182727, 0.720119, 0.7219501, 0.7237964, 0.7256275, 0.7274739, 0.7293355, 0.7311818, 0.7330282, 0.7348898, 0.7367514, 0.738613, 0.7404746, 0.7423514, 0.744213, 0.7460899, 0.7479667, 0.7498436, 0.7517205, 0.7536126, 0.7554894, 0.7573816, 0.7592737, 0.7611658, 0.7630732, 0.7649653, 0.7668727, 0.76878, 0.7706874, 0.7725948, 0.7745174, 0.7764248, 0.7783474, 0.7802701, 0.7821927, 0.7841306, 0.7860533, 0.7879911, 0.789929, 0.7918669, 0.7938048, 0.795758, 0.7976959, 0.799649, 0.8016022, 0.8035554, 0.8055238, 0.8074769, 0.8094453, 0.8114137, 0.8133822, 0.8153506, 0.8173342, 0.8193179, 0.8212863, 0.82327, 0.8252689, 0.8272526, 0.8292515, 0.8312352, 0.8332341, 0.8352331, 0.8372473, 0.8392462, 0.8412604, 0.8432746, 0.8452888, 0.847303, 0.8493172, 0.8513466, 0.8533761, 0.8554055, 0.857435, 0.8594644, 0.8614939, 0.8635386, 0.8655833, 0.867628, 0.8696727, 0.8717327, 0.8737774, 0.8758373, 0.8778973, 0.8799573, 0.8820325, 0.8840925, 0.8861677, 0.8882429, 0.8903182, 0.8923934, 0.8944839, 0.8965591, 0.8986496, 0.9007401, 0.9028305, 0.9049363, 0.9070268, 0.9091325, 0.9112383, 0.913344, 0.915465, 0.9175708, 0.9196918, 0.9218128, 0.9239338, 0.9260548, 0.9281758, 0.930312, 0.9324483, 0.9345846, 0.9367208, 0.9388571, 0.9410086, 0.9431601, 0.9453117, 0.9474632, 0.9496147, 0.9517815, 0.953933, 0.9560998, 0.9582666, 0.9604334, 0.9626154, 0.9647822, 0.9669642, 0.9691463, 0.9713283, 0.9735256, 0.9757076, 0.9779049, 0.9801022, 0.9822995, 0.9844968, 0.9867094, 0.988922, 0.9911345, 0.9933471, 0.9955596, 0.9977722, 1.0";
-
-  private static String SHORT_TEXT = "Comments=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100 Component 1=Y component: Quantization table 0, Sampling factors 2 horiz/2 vert Component 2=Cb component: Quantization table 1, Sampling factors 1 horiz/1 vert Component 3=Cr component: Quantization table 1, Sampling factors 1 horiz/1 vert Compression Type=Baseline Content-Length=832251 Content-Type=image/jpeg Data Precision=8 bits Image Height=1200 pixels Image Width=1600 pixels Jpeg Comment=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100 Number of Components=3 Resolution Units=none X Resolution=1 dot Y Resolution=1 dot comment=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100 resourceName=9.jpg tiff:BitsPerSample=8 tiff:ImageLength=1200 tiff:ImageWidth=1600 w:comments=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100";
+  private static String REALY_LONG_TEXT =
+      "Green TRC=0.0, 0.0000763, 0.0001526, 0.0002289, 0.0003052, 0.0003815, 0.0004578, 0.0005341, 0.0006104, 0.0006867, 0.000763, 0.0008392, 0.0009003, 0.0009766, 0.0010529, 0.0011292, 0.0012055, 0.0012818, 0.0013581, 0.0014343, 0.0015106, 0.0015869, 0.0016632, 0.0017395, 0.0018158, 0.0018921, 0.0019684, 0.0020447, 0.002121, 0.0021973, 0.0022736, 0.0023499, 0.0024262, 0.0025025, 0.0025788, 0.0026551, 0.0027161, 0.0027924, 0.0028687, 0.002945, 0.0030213, 0.0030976, 0.0031739, 0.0032502, 0.0033417, 0.003418, 0.0034943, 0.0035859, 0.0036622, 0.0037537, 0.00383, 0.0039216, 0.0040131, 0.0041047, 0.0041962, 0.0042878, 0.0043793, 0.0044709, 0.0045624, 0.0046693, 0.0047608, 0.0048524, 0.0049592, 0.005066, 0.0051575, 0.0052644, 0.0053712, 0.005478, 0.0055848, 0.0056916, 0.0057984, 0.0059052, 0.0060273, 0.0061341, 0.0062562, 0.006363, 0.0064851, 0.0066072, 0.0067292, 0.0068513, 0.0069734, 0.0070954, 0.0072175, 0.0073396, 0.0074617, 0.007599, 0.0077211, 0.0078584, 0.0079957, 0.0081178, 0.0082551, 0.0083925, 0.0085298, 0.0086671, 0.0088045, 0.008957, 0.0090944, 0.0092317, 0.0093843, 0.0095369, 0.0096742, 0.0098268, 0.0099794, 0.010132, 0.0102846, 0.0104372, 0.0105898, 0.0107576, 0.0109102, 0.0110628, 0.0112306, 0.0113985, 0.0115511, 0.0117189, 0.0118868, 0.0120546, 0.0122225, 0.0124056, 0.0125734, 0.0127413, 0.0129244, 0.0130922, 0.0132753, 0.0134585, 0.0136416, 0.0138247, 0.0140078, 0.0141909, 0.014374, 0.0145571, 0.0147555, 0.0149386, 0.0151369, 0.0153201, 0.0155184, 0.0157168, 0.0159152, 0.0161135, 0.0163119, 0.0165255, 0.0167239, 0.0169223, 0.0171359, 0.0173495, 0.0175479, 0.0177615, 0.0179751, 0.0181888, 0.0184024, 0.018616, 0.0188449, 0.0190585, 0.0192874, 0.019501, 0.0197299, 0.0199588, 0.0201877, 0.0204166, 0.0206455, 0.0208743, 0.0211032, 0.0213474, 0.0215763, 0.0218204, 0.0220645, 0.0222934, 0.0225376, 0.0227817, 0.0230259, 0.0232853, 0.0235294, 0.0237736, 0.024033, 0.0242771, 0.0245365, 0.0247959, 0.0250553, 0.0253147, 0.0255741, 0.0258335, 0.0261082, 0.0263676, 0.026627, 0.0269017, 0.0271763, 0.027451, 0.0277256, 0.0280003, 0.028275, 0.0285496, 0.0288243, 0.0291142, 0.0293889, 0.0296788, 0.0299687, 0.0302586, 0.0305486, 0.0308385, 0.0311284, 0.0314183, 0.0317235, 0.0320134, 0.0323186, 0.0326238, 0.032929, 0.0332341, 0.0335393, 0.0338445, 0.0341497, 0.0344549, 0.0347753, 0.0350805, 0.0354009, 0.0357214, 0.0360418, 0.0363622, 0.0366827, 0.0370031, 0.0373388, 0.0376593, 0.037995, 0.0383154, 0.0386511, 0.0389868, 0.0393225, 0.0396582, 0.0399939, 0.0403449, 0.0406806, 0.0410315, 0.0413825, 0.0417182, 0.0420691, 0.0424201, 0.042771, 0.0431373, 0.0434882, 0.0438392, 0.0442054, 0.0445716, 0.0449226, 0.0452888, 0.045655, 0.0460212, 0.0464027, 0.0467689, 0.0471504, 0.0475166, 0.0478981, 0.0482795, 0.048661, 0.0490425, 0.049424, 0.0498054, 0.0501869, 0.0505837, 0.0509804, 0.0513619, 0.0517586, 0.0521553, 0.0525521, 0.0529488, 0.0533608, 0.0537575, 0.0541695, 0.0545663, 0.0549783, 0.0553902, 0.0558022, 0.0562142, 0.0566262, 0.0570535, 0.0574655, 0.0578927, 0.05832, 0.058732, 0.0591592, 0.0595865, 0.060029, 0.0604562, 0.0608835, 0.061326, 0.0617533, 0.0621958, 0.0626383, 0.0630808, 0.0635233, 0.0639811, 0.0644236, 0.0648661, 0.0653239, 0.0657816, 0.0662394, 0.0666972, 0.067155, 0.0676127, 0.0680705, 0.0685435, 0.0690013, 0.0694743, 0.0699474, 0.0704204, 0.0708934, 0.0713664, 0.0718395, 0.0723278, 0.0728008, 0.0732891, 0.0737774, 0.0742657, 0.0747539, 0.0752422, 0.0757305, 0.0762188, 0.0767224, 0.0772259, 0.0777142, 0.0782177, 0.0787213, 0.0792401, 0.0797436, 0.0802472, 0.080766, 0.0812696, 0.0817884, 0.0823072, 0.082826, 0.0833448, 0.0838636, 0.0843977, 0.0849165, 0.0854505, 0.0859846, 0.0865187, 0.0870527, 0.0875868, 0.0881209, 0.0886549, 0.0892042, 0.0897536, 0.0902876, 0.090837, 0.0913863, 0.0919356, 0.0925002, 0.0930495, 0.0936141, 0.0941634, 0.094728, 0.0952926, 0.0958572, 0.0964218, 0.0970016, 0.0975662, 0.098146, 0.0987106, 0.0992905, 0.0998703, 0.1004501, 0.10103, 0.1016251, 0.1022049, 0.1028, 0.1033799, 0.103975, 0.1045701, 0.1051652, 0.1057755, 0.1063706, 0.106981, 0.1075761, 0.1081865, 0.1087968, 0.1094072, 0.1100175, 0.1106279, 0.1112535, 0.1118639, 0.1124895, 0.1131151, 0.1137407, 0.1143664, 0.114992, 0.1156176, 0.1162585, 0.1168841, 0.117525, 0.1181659, 0.1188067, 0.1194476, 0.1200885, 0.1207446, 0.1213855, 0.1220417, 0.1226978, 0.1233539, 0.1240101, 0.1246662, 0.1253223, 0.1259937, 0.1266499, 0.1273213, 0.1279927, 0.1286641, 0.1293355, 0.1300069, 0.1306935, 0.1313649, 0.1320516, 0.1327382, 0.1334096, 0.1341115, 0.1347982, 0.1354849, 0.1361868, 0.1368734, 0.1375753, 0.1382773, 0.1389792, 0.1396811, 0.140383, 0.1411002, 0.1418021, 0.1425193, 0.1432364, 0.1439536, 0.1446708, 0.145388, 0.1461204, 0.1468376, 0.14757, 0.1483024, 0.1490349, 0.1497673, 0.1504997, 0.1512322, 0.1519799, 0.1527123, 0.15346, 0.1542077, 0.1549554, 0.1557031, 0.1564508, 0.1572137, 0.1579767, 0.1587243, 0.1594873, 0.1602502, 0.1610132, 0.1617914, 0.1625544, 0.1633326, 0.1640955, 0.1648737, 0.1656519, 0.1664302, 0.1672236, 0.1680018, 0.1687953, 0.1695735, 0.170367, 0.1711604, 0.1719539, 0.1727474, 0.1735561, 0.1743496, 0.1751583, 0.175967, 0.1767758, 0.1775845, 0.1783932, 0.1792172, 0.1800259, 0.1808499, 0.1816739, 0.1824826, 0.1833219, 0.1841459, 0.1849699, 0.1858091, 0.1866331, 0.1874723, 0.1883116, 0.1891508, 0.1900053, 0.1908446, 0.1916838, 0.1925383, 0.1933928, 0.1942473, 0.1951019, 0.1959564, 0.1968261, 0.1976806, 0.1985504, 0.1994202, 0.2002899, 0.2011597, 0.2020294, 0.2028992, 0.2037842, 0.2046693, 0.205539, 0.206424, 0.2073243, 0.2082094, 0.2090944, 0.2099947, 0.2108949, 0.21178, 0.2126802, 0.2135958, 0.2144961, 0.2153964, 0.2163119, 0.2172274, 0.2181277, 0.2190585, 0.2199741, 0.2208896, 0.2218051, 0.2227359, 0.2236667, 0.2245975, 0.2255283, 0.2264591, 0.2273899, 0.228336, 0.2292821, 0.2302129, 0.2311589, 0.232105, 0.2330663, 0.2340124, 0.2349737, 0.2359197, 0.2368811, 0.2378424, 0.2388037, 0.239765, 0.2407416, 0.2417029, 0.2426795, 0.2436561, 0.2446326, 0.2456092, 0.2466011, 0.2475776, 0.2485695, 0.249546, 0.2505379, 0.2515297, 0.2525368, 0.2535286, 0.2545357, 0.2555276, 0.2565347, 0.2575418, 0.2585489, 0.259556, 0.2605783, 0.2615854, 0.2626078, 0.2636301, 0.2646525, 0.2656748, 0.2667124, 0.2677348, 0.2687724, 0.26981, 0.2708324, 0.2718853, 0.2729229, 0.2739605, 0.2750134, 0.276051, 0.2771038, 0.2781567, 0.2792248, 0.2802777, 0.2813306, 0.2823987, 0.2834668, 0.284535, 0.2856031, 0.2866712, 0.2877394, 0.2888228, 0.2899062, 0.2909743, 0.2920577, 0.2931563, 0.2942397, 0.2953231, 0.2964218, 0.2975204, 0.2986191, 0.2997177, 0.3008164, 0.301915, 0.3030289, 0.3041428, 0.3052567, 0.3063706, 0.3074846, 0.3085985, 0.3097124, 0.3108415, 0.3119707, 0.3130999, 0.314229, 0.3153582, 0.3165026, 0.3176318, 0.3187762, 0.3199207, 0.3210651, 0.3222095, 0.3233539, 0.3245136, 0.3256733, 0.3268177, 0.3279774, 0.3291371, 0.330312, 0.3314717, 0.3326467, 0.3338216, 0.3349966, 0.3361715, 0.3373465, 0.3385214, 0.3397116, 0.3408865, 0.3420768, 0.343267, 0.3444724, 0.3456626, 0.3468528, 0.3480583, 0.3492638, 0.3504692, 0.3516747, 0.3528801, 0.3541009, 0.3553063, 0.356527, 0.3577478, 0.3589685, 0.3601892, 0.3614252, 0.3626459, 0.3638819, 0.3651179, 0.3663539, 0.3675898, 0.3688411, 0.3700771, 0.3713283, 0.3725795, 0.3738308, 0.375082, 0.3763333, 0.3775998, 0.378851, 0.3801175, 0.381384, 0.3826505, 0.3839322, 0.3851987, 0.3864805, 0.387747, 0.3890288, 0.3903105, 0.3916075, 0.3928893, 0.3941863, 0.3954681, 0.3967651, 0.3980621, 0.3993744, 0.4006714, 0.4019837, 0.4032807, 0.404593, 0.4059052, 0.4072175, 0.4085451, 0.4098573, 0.4111849, 0.4125124, 0.4138399, 0.4151675, 0.416495, 0.4178378, 0.4191806, 0.4205234, 0.4218662, 0.423209, 0.4245518, 0.4259098, 0.4272526, 0.4286107, 0.4299687, 0.4313268, 0.4326848, 0.4340581, 0.4354314, 0.4367895, 0.4381628, 0.4395514, 0.4409247, 0.442298, 0.4436866, 0.4450752, 0.4464637, 0.4478523, 0.4492409, 0.4506447, 0.4520333, 0.4534371, 0.4548409, 0.4562448, 0.4576486, 0.4590677, 0.4604715, 0.4618906, 0.4633097, 0.4647288, 0.4661631, 0.4675822, 0.4690166, 0.4704356, 0.47187, 0.4733043, 0.4747539, 0.4761883, 0.4776379, 0.4790875, 0.4805371, 0.4819867, 0.4834363, 0.4848859, 0.4863508, 0.4878157, 0.4892805, 0.4907454, 0.4922103, 0.4936904, 0.4951553, 0.4966354, 0.4981155, 0.4995956, 0.501091, 0.5025711, 0.5040665, 0.5055467, 0.507042, 0.5085527, 0.5100481, 0.5115435, 0.5130541, 0.5145647, 0.5160754, 0.517586, 0.5190967, 0.5206226, 0.5221485, 0.5236591, 0.525185, 0.5267262, 0.5282521, 0.529778, 0.5313191, 0.5328603, 0.5344015, 0.5359426, 0.537499, 0.5390402, 0.5405966, 0.542153, 0.5437095, 0.5452659, 0.5468223, 0.548394, 0.5499657, 0.5515373, 0.553109, 0.5546807, 0.5562524, 0.5578393, 0.5594263, 0.5610132, 0.5626001, 0.5641871, 0.565774, 0.5673762, 0.5689784, 0.5705806, 0.5721828, 0.573785, 0.5754025, 0.5770047, 0.5786221, 0.5802396, 0.581857, 0.5834897, 0.5851072, 0.5867399, 0.5883726, 0.5900053, 0.5916381, 0.5932708, 0.5949187, 0.5965667, 0.5982147, 0.5998627, 0.6015106, 0.6031586, 0.6048219, 0.6064851, 0.6081483, 0.6098116, 0.6114748, 0.6131533, 0.6148165, 0.616495, 0.6181735, 0.619852, 0.6215457, 0.6232242, 0.624918, 0.6266117, 0.6283055, 0.6299992, 0.631693, 0.633402, 0.635111, 0.63682, 0.638529, 0.640238, 0.6419471, 0.6436713, 0.6453956, 0.6471199, 0.6488441, 0.6505684, 0.6523079, 0.6540322, 0.6557717, 0.6575113, 0.6592508, 0.6610056, 0.6627451, 0.6644999, 0.6662547, 0.6680095, 0.6697642, 0.6715343, 0.6732891, 0.6750591, 0.6768292, 0.6785992, 0.6803845, 0.6821546, 0.6839399, 0.6857252, 0.6875105, 0.6892958, 0.6910811, 0.6928817, 0.6946822, 0.6964675, 0.6982834, 0.7000839, 0.7018845, 0.7037003, 0.7055161, 0.707332, 0.7091478, 0.7109636, 0.7127947, 0.7146105, 0.7164416, 0.7182727, 0.720119, 0.7219501, 0.7237964, 0.7256275, 0.7274739, 0.7293355, 0.7311818, 0.7330282, 0.7348898, 0.7367514, 0.738613, 0.7404746, 0.7423514, 0.744213, 0.7460899, 0.7479667, 0.7498436, 0.7517205, 0.7536126, 0.7554894, 0.7573816, 0.7592737, 0.7611658, 0.7630732, 0.7649653, 0.7668727, 0.76878, 0.7706874, 0.7725948, 0.7745174, 0.7764248, 0.7783474, 0.7802701, 0.7821927, 0.7841306, 0.7860533, 0.7879911, 0.789929, 0.7918669, 0.7938048, 0.795758, 0.7976959, 0.799649, 0.8016022, 0.8035554, 0.8055238, 0.8074769, 0.8094453, 0.8114137, 0.8133822, 0.8153506, 0.8173342, 0.8193179, 0.8212863, 0.82327, 0.8252689, 0.8272526, 0.8292515, 0.8312352, 0.8332341, 0.8352331, 0.8372473, 0.8392462, 0.8412604, 0.8432746, 0.8452888, 0.847303, 0.8493172, 0.8513466, 0.8533761, 0.8554055, 0.857435, 0.8594644, 0.8614939, 0.8635386, 0.8655833, 0.867628, 0.8696727, 0.8717327, 0.8737774, 0.8758373, 0.8778973, 0.8799573, 0.8820325, 0.8840925, 0.8861677, 0.8882429, 0.8903182, 0.8923934, 0.8944839, 0.8965591, 0.8986496, 0.9007401, 0.9028305, 0.9049363, 0.9070268, 0.9091325, 0.9112383, 0.913344, 0.915465, 0.9175708, 0.9196918, 0.9218128, 0.9239338, 0.9260548, 0.9281758, 0.930312, 0.9324483, 0.9345846, 0.9367208, 0.9388571, 0.9410086, 0.9431601, 0.9453117, 0.9474632, 0.9496147, 0.9517815, 0.953933, 0.9560998, 0.9582666, 0.9604334, 0.9626154, 0.9647822, 0.9669642, 0.9691463, 0.9713283, 0.9735256, 0.9757076, 0.9779049, 0.9801022, 0.9822995, 0.9844968, 0.9867094, 0.988922, 0.9911345, 0.9933471, 0.9955596, 0.9977722, 1.0 "
+          + "Rewed TRC=0.0, 0.0000763, 0.0001526, 0.0002289, 0.0003052, 0.0003815, 0.0004578, 0.0005341, 0.0006104, 0.0006867, 0.000763, 0.0008392, 0.0009003, 0.0009766, 0.0010529, 0.0011292, 0.0012055, 0.0012818, 0.0013581, 0.0014343, 0.0015106, 0.0015869, 0.0016632, 0.0017395, 0.0018158, 0.0018921, 0.0019684, 0.0020447, 0.002121, 0.0021973, 0.0022736, 0.0023499, 0.0024262, 0.0025025, 0.0025788, 0.0026551, 0.0027161, 0.0027924, 0.0028687, 0.002945, 0.0030213, 0.0030976, 0.0031739, 0.0032502, 0.0033417, 0.003418, 0.0034943, 0.0035859, 0.0036622, 0.0037537, 0.00383, 0.0039216, 0.0040131, 0.0041047, 0.0041962, 0.0042878, 0.0043793, 0.0044709, 0.0045624, 0.0046693, 0.0047608, 0.0048524, 0.0049592, 0.005066, 0.0051575, 0.0052644, 0.0053712, 0.005478, 0.0055848, 0.0056916, 0.0057984, 0.0059052, 0.0060273, 0.0061341, 0.0062562, 0.006363, 0.0064851, 0.0066072, 0.0067292, 0.0068513, 0.0069734, 0.0070954, 0.0072175, 0.0073396, 0.0074617, 0.007599, 0.0077211, 0.0078584, 0.0079957, 0.0081178, 0.0082551, 0.0083925, 0.0085298, 0.0086671, 0.0088045, 0.008957, 0.0090944, 0.0092317, 0.0093843, 0.0095369, 0.0096742, 0.0098268, 0.0099794, 0.010132, 0.0102846, 0.0104372, 0.0105898, 0.0107576, 0.0109102, 0.0110628, 0.0112306, 0.0113985, 0.0115511, 0.0117189, 0.0118868, 0.0120546, 0.0122225, 0.0124056, 0.0125734, 0.0127413, 0.0129244, 0.0130922, 0.0132753, 0.0134585, 0.0136416, 0.0138247, 0.0140078, 0.0141909, 0.014374, 0.0145571, 0.0147555, 0.0149386, 0.0151369, 0.0153201, 0.0155184, 0.0157168, 0.0159152, 0.0161135, 0.0163119, 0.0165255, 0.0167239, 0.0169223, 0.0171359, 0.0173495, 0.0175479, 0.0177615, 0.0179751, 0.0181888, 0.0184024, 0.018616, 0.0188449, 0.0190585, 0.0192874, 0.019501, 0.0197299, 0.0199588, 0.0201877, 0.0204166, 0.0206455, 0.0208743, 0.0211032, 0.0213474, 0.0215763, 0.0218204, 0.0220645, 0.0222934, 0.0225376, 0.0227817, 0.0230259, 0.0232853, 0.0235294, 0.0237736, 0.024033, 0.0242771, 0.0245365, 0.0247959, 0.0250553, 0.0253147, 0.0255741, 0.0258335, 0.0261082, 0.0263676, 0.026627, 0.0269017, 0.0271763, 0.027451, 0.0277256, 0.0280003, 0.028275, 0.0285496, 0.0288243, 0.0291142, 0.0293889, 0.0296788, 0.0299687, 0.0302586, 0.0305486, 0.0308385, 0.0311284, 0.0314183, 0.0317235, 0.0320134, 0.0323186, 0.0326238, 0.032929, 0.0332341, 0.0335393, 0.0338445, 0.0341497, 0.0344549, 0.0347753, 0.0350805, 0.0354009, 0.0357214, 0.0360418, 0.0363622, 0.0366827, 0.0370031, 0.0373388, 0.0376593, 0.037995, 0.0383154, 0.0386511, 0.0389868, 0.0393225, 0.0396582, 0.0399939, 0.0403449, 0.0406806, 0.0410315, 0.0413825, 0.0417182, 0.0420691, 0.0424201, 0.042771, 0.0431373, 0.0434882, 0.0438392, 0.0442054, 0.0445716, 0.0449226, 0.0452888, 0.045655, 0.0460212, 0.0464027, 0.0467689, 0.0471504, 0.0475166, 0.0478981, 0.0482795, 0.048661, 0.0490425, 0.049424, 0.0498054, 0.0501869, 0.0505837, 0.0509804, 0.0513619, 0.0517586, 0.0521553, 0.0525521, 0.0529488, 0.0533608, 0.0537575, 0.0541695, 0.0545663, 0.0549783, 0.0553902, 0.0558022, 0.0562142, 0.0566262, 0.0570535, 0.0574655, 0.0578927, 0.05832, 0.058732, 0.0591592, 0.0595865, 0.060029, 0.0604562, 0.0608835, 0.061326, 0.0617533, 0.0621958, 0.0626383, 0.0630808, 0.0635233, 0.0639811, 0.0644236, 0.0648661, 0.0653239, 0.0657816, 0.0662394, 0.0666972, 0.067155, 0.0676127, 0.0680705, 0.0685435, 0.0690013, 0.0694743, 0.0699474, 0.0704204, 0.0708934, 0.0713664, 0.0718395, 0.0723278, 0.0728008, 0.0732891, 0.0737774, 0.0742657, 0.0747539, 0.0752422, 0.0757305, 0.0762188, 0.0767224, 0.0772259, 0.0777142, 0.0782177, 0.0787213, 0.0792401, 0.0797436, 0.0802472, 0.080766, 0.0812696, 0.0817884, 0.0823072, 0.082826, 0.0833448, 0.0838636, 0.0843977, 0.0849165, 0.0854505, 0.0859846, 0.0865187, 0.0870527, 0.0875868, 0.0881209, 0.0886549, 0.0892042, 0.0897536, 0.0902876, 0.090837, 0.0913863, 0.0919356, 0.0925002, 0.0930495, 0.0936141, 0.0941634, 0.094728, 0.0952926, 0.0958572, 0.0964218, 0.0970016, 0.0975662, 0.098146, 0.0987106, 0.0992905, 0.0998703, 0.1004501, 0.10103, 0.1016251, 0.1022049, 0.1028, 0.1033799, 0.103975, 0.1045701, 0.1051652, 0.1057755, 0.1063706, 0.106981, 0.1075761, 0.1081865, 0.1087968, 0.1094072, 0.1100175, 0.1106279, 0.1112535, 0.1118639, 0.1124895, 0.1131151, 0.1137407, 0.1143664, 0.114992, 0.1156176, 0.1162585, 0.1168841, 0.117525, 0.1181659, 0.1188067, 0.1194476, 0.1200885, 0.1207446, 0.1213855, 0.1220417, 0.1226978, 0.1233539, 0.1240101, 0.1246662, 0.1253223, 0.1259937, 0.1266499, 0.1273213, 0.1279927, 0.1286641, 0.1293355, 0.1300069, 0.1306935, 0.1313649, 0.1320516, 0.1327382, 0.1334096, 0.1341115, 0.1347982, 0.1354849, 0.1361868, 0.1368734, 0.1375753, 0.1382773, 0.1389792, 0.1396811, 0.140383, 0.1411002, 0.1418021, 0.1425193, 0.1432364, 0.1439536, 0.1446708, 0.145388, 0.1461204, 0.1468376, 0.14757, 0.1483024, 0.1490349, 0.1497673, 0.1504997, 0.1512322, 0.1519799, 0.1527123, 0.15346, 0.1542077, 0.1549554, 0.1557031, 0.1564508, 0.1572137, 0.1579767, 0.1587243, 0.1594873, 0.1602502, 0.1610132, 0.1617914, 0.1625544, 0.1633326, 0.1640955, 0.1648737, 0.1656519, 0.1664302, 0.1672236, 0.1680018, 0.1687953, 0.1695735, 0.170367, 0.1711604, 0.1719539, 0.1727474, 0.1735561, 0.1743496, 0.1751583, 0.175967, 0.1767758, 0.1775845, 0.1783932, 0.1792172, 0.1800259, 0.1808499, 0.1816739, 0.1824826, 0.1833219, 0.1841459, 0.1849699, 0.1858091, 0.1866331, 0.1874723, 0.1883116, 0.1891508, 0.1900053, 0.1908446, 0.1916838, 0.1925383, 0.1933928, 0.1942473, 0.1951019, 0.1959564, 0.1968261, 0.1976806, 0.1985504, 0.1994202, 0.2002899, 0.2011597, 0.2020294, 0.2028992, 0.2037842, 0.2046693, 0.205539, 0.206424, 0.2073243, 0.2082094, 0.2090944, 0.2099947, 0.2108949, 0.21178, 0.2126802, 0.2135958, 0.2144961, 0.2153964, 0.2163119, 0.2172274, 0.2181277, 0.2190585, 0.2199741, 0.2208896, 0.2218051, 0.2227359, 0.2236667, 0.2245975, 0.2255283, 0.2264591, 0.2273899, 0.228336, 0.2292821, 0.2302129, 0.2311589, 0.232105, 0.2330663, 0.2340124, 0.2349737, 0.2359197, 0.2368811, 0.2378424, 0.2388037, 0.239765, 0.2407416, 0.2417029, 0.2426795, 0.2436561, 0.2446326, 0.2456092, 0.2466011, 0.2475776, 0.2485695, 0.249546, 0.2505379, 0.2515297, 0.2525368, 0.2535286, 0.2545357, 0.2555276, 0.2565347, 0.2575418, 0.2585489, 0.259556, 0.2605783, 0.2615854, 0.2626078, 0.2636301, 0.2646525, 0.2656748, 0.2667124, 0.2677348, 0.2687724, 0.26981, 0.2708324, 0.2718853, 0.2729229, 0.2739605, 0.2750134, 0.276051, 0.2771038, 0.2781567, 0.2792248, 0.2802777, 0.2813306, 0.2823987, 0.2834668, 0.284535, 0.2856031, 0.2866712, 0.2877394, 0.2888228, 0.2899062, 0.2909743, 0.2920577, 0.2931563, 0.2942397, 0.2953231, 0.2964218, 0.2975204, 0.2986191, 0.2997177, 0.3008164, 0.301915, 0.3030289, 0.3041428, 0.3052567, 0.3063706, 0.3074846, 0.3085985, 0.3097124, 0.3108415, 0.3119707, 0.3130999, 0.314229, 0.3153582, 0.3165026, 0.3176318, 0.3187762, 0.3199207, 0.3210651, 0.3222095, 0.3233539, 0.3245136, 0.3256733, 0.3268177, 0.3279774, 0.3291371, 0.330312, 0.3314717, 0.3326467, 0.3338216, 0.3349966, 0.3361715, 0.3373465, 0.3385214, 0.3397116, 0.3408865, 0.3420768, 0.343267, 0.3444724, 0.3456626, 0.3468528, 0.3480583, 0.3492638, 0.3504692, 0.3516747, 0.3528801, 0.3541009, 0.3553063, 0.356527, 0.3577478, 0.3589685, 0.3601892, 0.3614252, 0.3626459, 0.3638819, 0.3651179, 0.3663539, 0.3675898, 0.3688411, 0.3700771, 0.3713283, 0.3725795, 0.3738308, 0.375082, 0.3763333, 0.3775998, 0.378851, 0.3801175, 0.381384, 0.3826505, 0.3839322, 0.3851987, 0.3864805, 0.387747, 0.3890288, 0.3903105, 0.3916075, 0.3928893, 0.3941863, 0.3954681, 0.3967651, 0.3980621, 0.3993744, 0.4006714, 0.4019837, 0.4032807, 0.404593, 0.4059052, 0.4072175, 0.4085451, 0.4098573, 0.4111849, 0.4125124, 0.4138399, 0.4151675, 0.416495, 0.4178378, 0.4191806, 0.4205234, 0.4218662, 0.423209, 0.4245518, 0.4259098, 0.4272526, 0.4286107, 0.4299687, 0.4313268, 0.4326848, 0.4340581, 0.4354314, 0.4367895, 0.4381628, 0.4395514, 0.4409247, 0.442298, 0.4436866, 0.4450752, 0.4464637, 0.4478523, 0.4492409, 0.4506447, 0.4520333, 0.4534371, 0.4548409, 0.4562448, 0.4576486, 0.4590677, 0.4604715, 0.4618906, 0.4633097, 0.4647288, 0.4661631, 0.4675822, 0.4690166, 0.4704356, 0.47187, 0.4733043, 0.4747539, 0.4761883, 0.4776379, 0.4790875, 0.4805371, 0.4819867, 0.4834363, 0.4848859, 0.4863508, 0.4878157, 0.4892805, 0.4907454, 0.4922103, 0.4936904, 0.4951553, 0.4966354, 0.4981155, 0.4995956, 0.501091, 0.5025711, 0.5040665, 0.5055467, 0.507042, 0.5085527, 0.5100481, 0.5115435, 0.5130541, 0.5145647, 0.5160754, 0.517586, 0.5190967, 0.5206226, 0.5221485, 0.5236591, 0.525185, 0.5267262, 0.5282521, 0.529778, 0.5313191, 0.5328603, 0.5344015, 0.5359426, 0.537499, 0.5390402, 0.5405966, 0.542153, 0.5437095, 0.5452659, 0.5468223, 0.548394, 0.5499657, 0.5515373, 0.553109, 0.5546807, 0.5562524, 0.5578393, 0.5594263, 0.5610132, 0.5626001, 0.5641871, 0.565774, 0.5673762, 0.5689784, 0.5705806, 0.5721828, 0.573785, 0.5754025, 0.5770047, 0.5786221, 0.5802396, 0.581857, 0.5834897, 0.5851072, 0.5867399, 0.5883726, 0.5900053, 0.5916381, 0.5932708, 0.5949187, 0.5965667, 0.5982147, 0.5998627, 0.6015106, 0.6031586, 0.6048219, 0.6064851, 0.6081483, 0.6098116, 0.6114748, 0.6131533, 0.6148165, 0.616495, 0.6181735, 0.619852, 0.6215457, 0.6232242, 0.624918, 0.6266117, 0.6283055, 0.6299992, 0.631693, 0.633402, 0.635111, 0.63682, 0.638529, 0.640238, 0.6419471, 0.6436713, 0.6453956, 0.6471199, 0.6488441, 0.6505684, 0.6523079, 0.6540322, 0.6557717, 0.6575113, 0.6592508, 0.6610056, 0.6627451, 0.6644999, 0.6662547, 0.6680095, 0.6697642, 0.6715343, 0.6732891, 0.6750591, 0.6768292, 0.6785992, 0.6803845, 0.6821546, 0.6839399, 0.6857252, 0.6875105, 0.6892958, 0.6910811, 0.6928817, 0.6946822, 0.6964675, 0.6982834, 0.7000839, 0.7018845, 0.7037003, 0.7055161, 0.707332, 0.7091478, 0.7109636, 0.7127947, 0.7146105, 0.7164416, 0.7182727, 0.720119, 0.7219501, 0.7237964, 0.7256275, 0.7274739, 0.7293355, 0.7311818, 0.7330282, 0.7348898, 0.7367514, 0.738613, 0.7404746, 0.7423514, 0.744213, 0.7460899, 0.7479667, 0.7498436, 0.7517205, 0.7536126, 0.7554894, 0.7573816, 0.7592737, 0.7611658, 0.7630732, 0.7649653, 0.7668727, 0.76878, 0.7706874, 0.7725948, 0.7745174, 0.7764248, 0.7783474, 0.7802701, 0.7821927, 0.7841306, 0.7860533, 0.7879911, 0.789929, 0.7918669, 0.7938048, 0.795758, 0.7976959, 0.799649, 0.8016022, 0.8035554, 0.8055238, 0.8074769, 0.8094453, 0.8114137, 0.8133822, 0.8153506, 0.8173342, 0.8193179, 0.8212863, 0.82327, 0.8252689, 0.8272526, 0.8292515, 0.8312352, 0.8332341, 0.8352331, 0.8372473, 0.8392462, 0.8412604, 0.8432746, 0.8452888, 0.847303, 0.8493172, 0.8513466, 0.8533761, 0.8554055, 0.857435, 0.8594644, 0.8614939, 0.8635386, 0.8655833, 0.867628, 0.8696727, 0.8717327, 0.8737774, 0.8758373, 0.8778973, 0.8799573, 0.8820325, 0.8840925, 0.8861677, 0.8882429, 0.8903182, 0.8923934, 0.8944839, 0.8965591, 0.8986496, 0.9007401, 0.9028305, 0.9049363, 0.9070268, 0.9091325, 0.9112383, 0.913344, 0.915465, 0.9175708, 0.9196918, 0.9218128, 0.9239338, 0.9260548, 0.9281758, 0.930312, 0.9324483, 0.9345846, 0.9367208, 0.9388571, 0.9410086, 0.9431601, 0.9453117, 0.9474632, 0.9496147, 0.9517815, 0.953933, 0.9560998, 0.9582666, 0.9604334, 0.9626154, 0.9647822, 0.9669642, 0.9691463, 0.9713283, 0.9735256, 0.9757076, 0.9779049, 0.9801022, 0.9822995, 0.9844968, 0.9867094, 0.988922, 0.9911345, 0.9933471, 0.9955596, 0.9977722, 1.0";
 
+  private static String SHORT_TEXT =
+      "Comments=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100 Component 1=Y component: Quantization table 0, Sampling factors 2 horiz/2 vert Component 2=Cb component: Quantization table 1, Sampling factors 1 horiz/1 vert Component 3=Cr component: Quantization table 1, Sampling factors 1 horiz/1 vert Compression Type=Baseline Content-Length=832251 Content-Type=image/jpeg Data Precision=8 bits Image Height=1200 pixels Image Width=1600 pixels Jpeg Comment=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100 Number of Components=3 Resolution Units=none X Resolution=1 dot Y Resolution=1 dot comment=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100 resourceName=9.jpg tiff:BitsPerSample=8 tiff:ImageLength=1200 tiff:ImageWidth=1600 w:comments=CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 100";
 }
diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
index 979f1899a85..5536d6a98e5 100644
--- a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
@@ -24,7 +24,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
@@ -49,50 +48,49 @@
 
 public class HighlighterTest extends SolrTestCaseJ4 {
 
-  private static String LONG_TEXT = "a long days night this should be a piece of text which is is is is is is is is is is is is is is is is is is is " +
-          "is is is is is isis is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is " +
-          "is is is is is is is is is is is is is " +
-          "is is is is is is is is is is is is is is is is is is is is sufficiently lengthly to produce multiple fragments which are not concatenated " +
-          "at all--we want two disjoint long fragments.";
+  private static String LONG_TEXT =
+      "a long days night this should be a piece of text which is is is is is is is is is is is is is is is is is is is "
+          + "is is is is is isis is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is "
+          + "is is is is is is is is is is is is is "
+          + "is is is is is is is is is is is is is is is is is is is is sufficiently lengthly to produce multiple fragments which are not concatenated "
+          + "at all--we want two disjoint long fragments.";
 
   @BeforeClass
   public static void beforeClass() throws Exception {
-    initCore("solrconfig.xml","schema.xml");
+    initCore("solrconfig.xml", "schema.xml");
   }
- 
+
   @After
-  @Override 
+  @Override
   public void tearDown() throws Exception {
     // if you override setUp or tearDown, you better call
     // the super classes version
     clearIndex();
     super.tearDown();
   }
-  
+
   @Test
-  public void testConfig()
-  {
+  public void testConfig() {
     DefaultSolrHighlighter highlighter = (DefaultSolrHighlighter) getHighlighter();
 
     // Make sure we loaded the one formatter
-    SolrFormatter fmt1 = highlighter.formatters.get( null );
-    SolrFormatter fmt2 = highlighter.formatters.get( "" );
-    assertSame( fmt1, fmt2 );
-    assertTrue( fmt1 instanceof HtmlFormatter );
-    
-    
+    SolrFormatter fmt1 = highlighter.formatters.get(null);
+    SolrFormatter fmt2 = highlighter.formatters.get("");
+    assertSame(fmt1, fmt2);
+    assertTrue(fmt1 instanceof HtmlFormatter);
+
     // Make sure we loaded the one formatter
-    SolrFragmenter gap = highlighter.fragmenters.get( "gap" );
-    SolrFragmenter regex = highlighter.fragmenters.get( "regex" );
-    SolrFragmenter frag = highlighter.fragmenters.get( null );
-    assertSame( gap, frag );
+    SolrFragmenter gap = highlighter.fragmenters.get("gap");
+    SolrFragmenter regex = highlighter.fragmenters.get("regex");
+    SolrFragmenter frag = highlighter.fragmenters.get(null);
+    assertSame(gap, frag);
     assertTrue(gap instanceof GapFragmenter);
     assertTrue(regex instanceof RegexFragmenter);
   }
 
   @Test
   public void testMergeContiguous() throws Exception {
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put(HighlightParams.HIGHLIGHT, "true");
     args.put("df", "t_text");
     args.put(HighlightParams.FIELDS, "");
@@ -100,88 +98,90 @@ public void testMergeContiguous() throws Exception {
     args.put(HighlightParams.FRAGSIZE, String.valueOf(40));
     args.put(HighlightParams.MERGE_CONTIGUOUS_FRAGMENTS, "true");
     args.put(HighlightParams.METHOD, "original"); // test works; no complaints
-    
-    String input = "this is some long text.  It has the word long in many places.  In fact, it has long on some different fragments.  " +
-            "Let us see what happens to long in this case.";
-    String gold = "this is some long text.  It has the word long in many places.  In fact, it has long on some different fragments.  " +
-            "Let us see what happens to long in this case.";
+
+    String input =
+        "this is some long text.  It has the word long in many places.  In fact, it has long on some different fragments.  "
+            + "Let us see what happens to long in this case.";
+    String gold =
+        "this is some long text.  It has the word long in many places.  In fact, it has long on some different fragments.  "
+            + "Let us see what happens to long in this case.";
     assertU(adoc("t_text", input, "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Merge Contiguous",
+    assertQ(
+        "Merge Contiguous",
         req(args, "q", "t_text:long"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='t_text']/str[.='" + gold + "']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='t_text']/str[.='" + gold + "']");
     args.put("f.t_text." + HighlightParams.MERGE_CONTIGUOUS_FRAGMENTS, "true");
     assertU(adoc("t_text", input, "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Merge Contiguous",
+    assertQ(
+        "Merge Contiguous",
         req(args, "q", "t_text:long"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='t_text']/str[.='" + gold + "']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='t_text']/str[.='" + gold + "']");
 
     args.put(HighlightParams.MERGE_CONTIGUOUS_FRAGMENTS, "false");
     args.put("f.t_text." + HighlightParams.MERGE_CONTIGUOUS_FRAGMENTS, "false");
-    
-    assertQ("Merge Contiguous",
+
+    assertQ(
+        "Merge Contiguous",
         req(args, "q", "t_text:long"),
         "//lst[@name='highlighting']/lst[@name='1']",
         "//lst[@name='1']/arr[@name='t_text']/str[.='this is some long text.  It has']",
         "//lst[@name='1']/arr[@name='t_text']/str[.=' the word long in many places.  In fact, it has']",
         "//lst[@name='1']/arr[@name='t_text']/str[.=' long on some different fragments.  Let us']",
-        "//lst[@name='1']/arr[@name='t_text']/str[.=' see what happens to long in this case.']"
-    );
+        "//lst[@name='1']/arr[@name='t_text']/str[.=' see what happens to long in this case.']");
   }
 
   @Test
   public void testTermVecHighlight() {
 
     // do summarization using term vectors
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "tv_text");
     args.put("hl.snippets", "2");
-    
-    assertU(adoc("tv_text", LONG_TEXT, 
-                 "id", "1"));
+
+    assertU(adoc("tv_text", LONG_TEXT, "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
-            req(args, "q", "tv_text:long"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night this should be a piece of text which']",
-            "//arr[@name='tv_text']/str[.=' long fragments.']"
-            );
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "tv_text:long"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night this should be a piece of text which']",
+        "//arr[@name='tv_text']/str[.=' long fragments.']");
   }
 
   @Test
   public void testTermVectorWithoutOffsetsHighlight() {
 
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.method", "original");
     args.put("hl.fl", "tv_no_off_text");
-    
+
     assertU(adoc("tv_no_off_text", "Crackerjack Cameron", "id", "1"));
     assertU(commit());
     assertU(optimize());
 
-    assertQ("Fields with term vectors switched on but no offsets should be correctly highlighted",
+    assertQ(
+        "Fields with term vectors switched on but no offsets should be correctly highlighted",
         req(args, "q", "tv_no_off_text:cameron"),
-            "//arr[@name='tv_no_off_text']/str[.='Crackerjack Cameron']");
-
+        "//arr[@name='tv_no_off_text']/str[.='Crackerjack Cameron']");
   }
-  
+
   @Test
   public void testOffsetWindowTokenFilter() throws Exception {
-    String[] multivalued = { "a b c d", "e f g", "h", "i j k l m n" };
+    String[] multivalued = {"a b c d", "e f g", "h", "i j k l m n"};
     try (Analyzer a1 = new WhitespaceAnalyzer()) {
       TokenStream tokenStream = a1.tokenStream("", "a b c d e f g h i j k l m n");
 
-      try (DefaultSolrHighlighter.OffsetWindowTokenFilter tots = new DefaultSolrHighlighter.OffsetWindowTokenFilter(tokenStream)) {
+      try (DefaultSolrHighlighter.OffsetWindowTokenFilter tots =
+          new DefaultSolrHighlighter.OffsetWindowTokenFilter(tokenStream)) {
         for (String v : multivalued) {
           TokenStream ts1 = tots.advanceToNextWindowOfLength(v.length());
           ts1.reset();
@@ -204,22 +204,24 @@ public void testOffsetWindowTokenFilter() throws Exception {
   public void testTermVecMultiValuedHighlight() throws Exception {
 
     // do summarization using term vectors on multivalued field
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "tv_mv_text");
     args.put("hl.snippets", "2");
-    
-    assertU(adoc("tv_mv_text", LONG_TEXT, 
-                 "tv_mv_text", LONG_TEXT, 
-                 "id", "1"));
+
+    assertU(
+        adoc(
+            "tv_mv_text", LONG_TEXT,
+            "tv_mv_text", LONG_TEXT,
+            "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
+    assertQ(
+        "Basic summarization",
         req(args, "q", "tv_mv_text:long"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='tv_mv_text']/str[.='a long days night this should be a piece of text which']",
-            "//arr[@name='tv_mv_text']/str[.=' long fragments.']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_mv_text']/str[.='a long days night this should be a piece of text which']",
+        "//arr[@name='tv_mv_text']/str[.=' long fragments.']");
   }
 
   // Variant of testTermVecMultiValuedHighlight to make sure that
@@ -229,30 +231,32 @@ public void testTermVecMultiValuedHighlight() throws Exception {
   public void testTermVecMultiValuedHighlight2() throws Exception {
 
     // do summarization using term vectors on multivalued field
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "tv_mv_text");
     args.put("hl.snippets", "2");
 
     String shortText = "short";
-    assertU(adoc("tv_mv_text", shortText,
-                 "tv_mv_text", LONG_TEXT,
-                 "id", "1"));
+    assertU(
+        adoc(
+            "tv_mv_text", shortText,
+            "tv_mv_text", LONG_TEXT,
+            "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
+    assertQ(
+        "Basic summarization",
         req(args, "q", "tv_mv_text:long"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='tv_mv_text']/str[.='a long days night this should be a piece of text which']",
-            "//arr[@name='tv_mv_text']/str[.=' long fragments.']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_mv_text']/str[.='a long days night this should be a piece of text which']",
+        "//arr[@name='tv_mv_text']/str[.=' long fragments.']");
   }
 
   @Test
   public void testDisMaxHighlight() {
 
     // same test run through dismax handler
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "tv_text");
     args.put("qf", "tv_text");
@@ -262,256 +266,287 @@ public void testDisMaxHighlight() {
     assertU(adoc("tv_text", "a long day's night", "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
+    assertQ(
+        "Basic summarization",
         req(args, "q", "long"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        "//lst[@name='1']/arr[@name='tv_text']/str"
-    );
-    
+        "//lst[@name='1']/arr[@name='tv_text']/str");
+
     // try the same thing without a q param
-    assertQ("Should not explode...", // q.alt should return everything
+    assertQ(
+        "Should not explode...", // q.alt should return everything
         req(args), // empty query
-        "//result[@numFound='1']"
-        );
+        "//result[@numFound='1']");
   }
 
   @Test
   public void testMultiValueAnalysisHighlight() {
 
     // do summarization using re-analysis of the field
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "textgap");
     args.put("df", "textgap");
-    
-    assertU(adoc("textgap", "first entry hasnt queryword",
-        "textgap", "second entry has queryword long",
-        "id", "1"));
+
+    assertU(
+        adoc(
+            "textgap",
+            "first entry hasnt queryword",
+            "textgap",
+            "second entry has queryword long",
+            "id",
+            "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
-            req(args, "q", "long"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='textgap']/str"
-            );
-
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "long"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='textgap']/str");
   }
-  
+
   @Test
   public void testMultiValueBestFragmentHighlight() {
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "textgap");
     args.put("df", "textgap");
-    
-    assertU(adoc("textgap", "first entry has one word foo", 
-        "textgap", "second entry has both words foo bar",
-        "id", "1"));
+
+    assertU(
+        adoc(
+            "textgap",
+            "first entry has one word foo",
+            "textgap",
+            "second entry has both words foo bar",
+            "id",
+            "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Best fragment summarization",
+    assertQ(
+        "Best fragment summarization",
         req(args, "q", "foo bar"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        "//lst[@name='1']/arr[@name='textgap']/str[.=\'second entry has both words foo bar\']"
-    );
+        "//lst[@name='1']/arr[@name='textgap']/str[.=\'second entry has both words foo bar\']");
   }
 
-
   @Test
   public void testPreserveMulti() throws Exception {
-    assertU(adoc("id","1", "cat", "electronics", "cat", "monitor"));
+    assertU(adoc("id", "1", "cat", "electronics", "cat", "monitor"));
     assertU(commit());
 
-    assertJQ(req("q", "cat:monitor", "hl", "true", "hl.fl", "cat", "hl.snippets", "2", "f.cat.hl.preserveMulti", "true"),
-        "/highlighting/1/cat==['electronics','monitor']"
-    );
+    assertJQ(
+        req(
+            "q",
+            "cat:monitor",
+            "hl",
+            "true",
+            "hl.fl",
+            "cat",
+            "hl.snippets",
+            "2",
+            "f.cat.hl.preserveMulti",
+            "true"),
+        "/highlighting/1/cat==['electronics','monitor']");
 
     // No match still lists all snippets?
-    assertJQ(req("q", "id:1 OR cat:duuuude", "hl", "true", "hl.fl", "cat", "hl.snippets", "2", "f.cat.hl.preserveMulti", "true"),
-        "/highlighting/1/cat==['electronics','monitor']"
-    );
+    assertJQ(
+        req(
+            "q",
+            "id:1 OR cat:duuuude",
+            "hl",
+            "true",
+            "hl.fl",
+            "cat",
+            "hl.snippets",
+            "2",
+            "f.cat.hl.preserveMulti",
+            "true"),
+        "/highlighting/1/cat==['electronics','monitor']");
   }
 
   @Test
   public void testDefaultFieldHighlight() {
 
     // do summarization using re-analysis of the field
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("df", "t_text");
     args.put("hl.fl", "");
-    
+
     assertU(adoc("t_text", "a long day's night", "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
+    assertQ(
+        "Basic summarization",
         req(args, "q", "long"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        "//lst[@name='1']/arr[@name='t_text']/str"
-    );
-
+        "//lst[@name='1']/arr[@name='t_text']/str");
   }
 
-
   @Test
   public void testHighlightDisabled() {
 
     // ensure highlighting can be explicitly disabled
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "false");
     args.put("hl.fl", "t_text");
-    
+
     assertU(adoc("t_text", "a long day's night", "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
-            req(args, "q", "t_text:long"), "not(//lst[@name='highlighting'])");
-
+    assertQ(
+        "Basic summarization", req(args, "q", "t_text:long"), "not(//lst[@name='highlighting'])");
   }
 
   @Test
   public void testTwoFieldHighlight() {
 
     // do summarization using re-analysis of the field
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "t_text tv_text");
-    
-    assertU(adoc("t_text", "a long day's night", "id", "1",
-                 "tv_text", "a long night's day"));
+
+    assertU(adoc("t_text", "a long day's night", "id", "1", "tv_text", "a long night's day"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
-            req(args, "q", "t_text:long"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='t_text']/str",
-            "//lst[@name='1']/arr[@name='tv_text']/str"
-            );
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "t_text:long"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='t_text']/str",
+        "//lst[@name='1']/arr[@name='tv_text']/str");
   }
-  
+
   @Test
-  public void testFieldMatch()
-  {
-     assertU(adoc("t_text1", "random words for highlighting tests", "id", "1",
-           "t_text2", "more random words for second field"));
-     assertU(commit());
-     assertU(optimize());
-     
-     HashMap args = new HashMap<>();
-     args.put("hl", "true");
-     args.put("hl.fl", "t_text1 t_text2");
-
-     // default should highlight both random and words in both fields
-     assertQ("Test Default",
-           req(args, "q", "t_text1:random OR t_text2:words"),
-           "//lst[@name='highlighting']/lst[@name='1']",
-           "//lst[@name='1']/arr[@name='t_text1']/str[.='random words for highlighting tests']",
-           "//lst[@name='1']/arr[@name='t_text2']/str[.='more random words for second field']"
-           );
-     
-     // requireFieldMatch=true - highlighting should only occur if term matched in that field
-     args.put("hl.requireFieldMatch", "true");
-     assertQ("Test RequireFieldMatch",
-         req(args, "q", "t_text1:random OR t_text2:words"),
-         "//lst[@name='highlighting']/lst[@name='1']",
-         "//lst[@name='1']/arr[@name='t_text1']/str[.='random words for highlighting tests']",
-         "//lst[@name='1']/arr[@name='t_text2']/str[.='more random words for second field']"
-     );
-
-     // test case for un-optimized index
-     assertU(adoc("t_text1", "random words for highlighting tests", "id", "2",
-         "t_text2", "more random words for second field"));
-     assertU(delI("1"));
-     assertU(commit());
-     assertQ("Test RequireFieldMatch on un-optimized index",
-           req(args, "q", "t_text1:random OR t_text2:words"),
-           "//lst[@name='highlighting']/lst[@name='2']",
-           "//lst[@name='2']/arr[@name='t_text1']/str[.='random words for highlighting tests']",
-           "//lst[@name='2']/arr[@name='t_text2']/str[.='more random words for second field']"
-           );
+  public void testFieldMatch() {
+    assertU(
+        adoc(
+            "t_text1",
+            "random words for highlighting tests",
+            "id",
+            "1",
+            "t_text2",
+            "more random words for second field"));
+    assertU(commit());
+    assertU(optimize());
+
+    HashMap args = new HashMap<>();
+    args.put("hl", "true");
+    args.put("hl.fl", "t_text1 t_text2");
+
+    // default should highlight both random and words in both fields
+    assertQ(
+        "Test Default",
+        req(args, "q", "t_text1:random OR t_text2:words"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='t_text1']/str[.='random words for highlighting tests']",
+        "//lst[@name='1']/arr[@name='t_text2']/str[.='more random words for second field']");
+
+    // requireFieldMatch=true - highlighting should only occur if term matched in that field
+    args.put("hl.requireFieldMatch", "true");
+    assertQ(
+        "Test RequireFieldMatch",
+        req(args, "q", "t_text1:random OR t_text2:words"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='t_text1']/str[.='random words for highlighting tests']",
+        "//lst[@name='1']/arr[@name='t_text2']/str[.='more random words for second field']");
+
+    // test case for un-optimized index
+    assertU(
+        adoc(
+            "t_text1",
+            "random words for highlighting tests",
+            "id",
+            "2",
+            "t_text2",
+            "more random words for second field"));
+    assertU(delI("1"));
+    assertU(commit());
+    assertQ(
+        "Test RequireFieldMatch on un-optimized index",
+        req(args, "q", "t_text1:random OR t_text2:words"),
+        "//lst[@name='highlighting']/lst[@name='2']",
+        "//lst[@name='2']/arr[@name='t_text1']/str[.='random words for highlighting tests']",
+        "//lst[@name='2']/arr[@name='t_text2']/str[.='more random words for second field']");
   }
 
   @Test
   public void testCustomSimpleFormatterHighlight() {
 
     // do summarization using a custom formatter
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "t_text");
-    args.put("hl.simple.pre","");
+    args.put("hl.simple.pre", "");
     args.put("hl.simple.post", "");
-    
+
     assertU(adoc("t_text", "a long days night", "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
+    assertQ(
+        "Basic summarization",
         req(args, "q", "t_text:long"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        "//lst[@name='1']/arr[@name='t_text']/str[.='a long days night']"
-    );
-    
+        "//lst[@name='1']/arr[@name='t_text']/str[.='a long days night']");
+
     // test a per-field override
     args.put("f.t_text.hl.simple.pre", "");
     args.put("f.t_text.hl.simple.post", "");
-    assertQ("Basic summarization",
-          req(args, "q", "t_text:long"),
-          "//lst[@name='highlighting']/lst[@name='1']",
-          "//lst[@name='1']/arr[@name='t_text']/str[.='a long days night']"
-          );
-    
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "t_text:long"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='t_text']/str[.='a long days night']");
   }
 
   @Test
   public void testLongFragment() {
 
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "tv_text");
-    
 
-    String text = 
-      "junit: [mkdir] Created dir: /home/klaas/worio/backend/trunk/build-src/solr-nightly/build/test-results [junit] Running org.apache.solr.BasicFunctionalityTest [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 5.36 sec [junit] Running org.apache.solr.ConvertedLegacyTest [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.268 sec [junit] Running org.apache.solr.DisMaxRequestHandlerTest [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.56 sec [junit] Running org.apache.solr.HighlighterTest [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 4.979 sec [junit] Running org.apache.solr.OutputWriterTest [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.797 sec [junit] Running org.apache.solr.SampleTest [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 1.021 sec [junit] Running org.apache.solr.analysis.TestBufferedTokenStream [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.05 sec [junit] Running org.apache.solr.analysis.TestRemoveDuplicatesTokenFilter [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.054 sec [junit] Running org.apache.solr.analysis.TestSynonymFilter [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 0.081 sec [junit] Running org.apache.solr.analysis.TestWordDelimiterFilter [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.714 sec [junit] Running org.apache.solr.search.TestDocSet [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.788 sec [junit] Running org.apache.solr.util.SolrPluginUtilsTest [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 3.519 sec [junit] Running org.apache.solr.util.TestOpenBitSet [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.533 sec";
+    String text =
+        "junit: [mkdir] Created dir: /home/klaas/worio/backend/trunk/build-src/solr-nightly/build/test-results [junit] Running org.apache.solr.BasicFunctionalityTest [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 5.36 sec [junit] Running org.apache.solr.ConvertedLegacyTest [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.268 sec [junit] Running org.apache.solr.DisMaxRequestHandlerTest [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.56 sec [junit] Running org.apache.solr.HighlighterTest [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 4.979 sec [junit] Running org.apache.solr.OutputWriterTest [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.797 sec [junit] Running org.apache.solr.SampleTest [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 1.021 sec [junit] Running org.apache.solr.analysis.TestBufferedTokenStream [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.05 sec [junit] Running org.apache.solr.analysis.TestRemoveDuplicatesTokenFilter [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.054 sec [junit] Running org.apache.solr.analysis.TestSynonymFilter [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 0.081 sec [junit] Running org.apache.solr.analysis.TestWordDelimiterFilter [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.714 sec [junit] Running org.apache.solr.search.TestDocSet [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.788 sec [junit] Running org.apache.solr.util.SolrPluginUtilsTest [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 3.519 sec [junit] Running org.apache.solr.util.TestOpenBitSet [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.533 sec";
     assertU(adoc("tv_text", text, "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
-            req(args, "q", "tv_text:dir"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='tv_text']/str"
-            );
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "tv_text:dir"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_text']/str");
   }
 
   @Test
   public void testMaxChars() {
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("fl", "id score");
     args.put("hl", "true");
     args.put("hl.snippets", "10");
     final String field = random().nextBoolean() ? "t_text" : "tv_text";
     args.put("hl.fl", field);
-    
 
     assertU(adoc(field, LONG_TEXT, "id", "1"));
     assertU(commit());
 
-    assertQ("token at start of text",
-            req(args, "q", field + ":disjoint"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[count(str)=1]"
-            );
+    assertQ(
+        "token at start of text",
+        req(args, "q", field + ":disjoint"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[count(str)=1]");
     args.put("hl.maxAnalyzedChars", "20");
-    assertQ("token at end of text",
+    assertQ(
+        "token at end of text",
         req(args, "q", field + ":disjoint"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        "//lst[@name='1'][not(*)]"
-    );
+        "//lst[@name='1'][not(*)]");
     args.put("hl.maxAnalyzedChars", "-1");
-    assertQ("token at start of text",
+    assertQ(
+        "token at start of text",
         req(args, "q", field + ":disjoint"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        "//lst[@name='1']/arr[count(str)=1]"
-    );
-
+        "//lst[@name='1']/arr[count(str)=1]");
   }
 
   // Test multi-valued together with hl.maxAnalyzedChars
@@ -519,26 +554,32 @@ public void testMaxChars() {
   public void testMultiValuedMaxAnalyzedChars() throws Exception {
     String shortText = "some short blah blah blah blah";
     final String field = random().nextBoolean() ? "tv_mv_text" : "textgap"; // term vecs or not
-    assertU(adoc(field, shortText,
-        field, LONG_TEXT,
-        "id", "1"));
+    assertU(adoc(field, shortText, field, LONG_TEXT, "id", "1"));
     assertU(commit());
 
-    assertQ(req("q", field + ":(short OR long)",
-            "indent", "on",
-            "hl", "true",
-            "hl.fl", field,
-            "hl.snippets", "2",
-            "hl.maxAnalyzedChars", "8"),
+    assertQ(
+        req(
+            "q",
+            field + ":(short OR long)",
+            "indent",
+            "on",
+            "hl",
+            "true",
+            "hl.fl",
+            field,
+            "hl.snippets",
+            "2",
+            "hl.maxAnalyzedChars",
+            "8"),
         "//lst[@name='highlighting']/lst[@name='1']/arr[count(*)=1]",
         "//lst[@name='1']/arr/str[1][.='some short']"
-        //"//lst[@name='1']/arr/str[2][.='a long days']"
-    );
+        // "//lst[@name='1']/arr/str[2][.='a long days']"
+        );
   }
 
   @Test
   public void testRegexFragmenter() {
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("fl", "id score");
     args.put("hl", "true");
     args.put("hl.snippets", "10");
@@ -546,194 +587,203 @@ public void testRegexFragmenter() {
     args.put("hl.fragmenter", "regex");
     args.put("hl.regex.pattern", "[-\\w ,\"']{20,200}");
     args.put("hl.regex.slop", ".9");
-    
-    String t = "This is an example of a sentence. Another example \"sentence\" with " +
-      "special characters\nand a line-break! Miscellaneous character like ^ are " +
-      "unknowns and end up being bad example s of sentences? I wonder how " +
-      "slashes/other punctuation fare in these examples?";
+
+    String t =
+        "This is an example of a sentence. Another example \"sentence\" with "
+            + "special characters\nand a line-break! Miscellaneous character like ^ are "
+            + "unknowns and end up being bad example s of sentences? I wonder how "
+            + "slashes/other punctuation fare in these examples?";
     assertU(adoc("t_text", t, "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("regex fragmenter",
+    assertQ(
+        "regex fragmenter",
         req(args, "q", "t_text:example"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//arr/str[.='This is an example of a sentence']",
-            "//arr/str[.='. Another example \"sentence\" with special characters\nand a line-break']",
-            "//arr/str[.=' ^ are unknowns and end up being bad example s of sentences']",
-            "//arr/str[.='/other punctuation fare in these examples?']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//arr/str[.='This is an example of a sentence']",
+        "//arr/str[.='. Another example \"sentence\" with special characters\nand a line-break']",
+        "//arr/str[.=' ^ are unknowns and end up being bad example s of sentences']",
+        "//arr/str[.='/other punctuation fare in these examples?']");
     // try with some punctuation included
     args.put("hl.regex.pattern", "[-\\w ,^/\\n\"']{20,200}");
-    assertQ("regex fragmenter 2",
-            req(args, "q", "t_text:example"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//arr/str[.='This is an example of a sentence']",
-            "//arr/str[.='. Another example \"sentence\" with special characters\nand a line-break']",
-            "//arr/str[.='! Miscellaneous character like ^ are unknowns and end up being bad example s of sentences']",
-            "//arr/str[.='? I wonder how slashes/other punctuation fare in these examples?']"
-            );
+    assertQ(
+        "regex fragmenter 2",
+        req(args, "q", "t_text:example"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//arr/str[.='This is an example of a sentence']",
+        "//arr/str[.='. Another example \"sentence\" with special characters\nand a line-break']",
+        "//arr/str[.='! Miscellaneous character like ^ are unknowns and end up being bad example s of sentences']",
+        "//arr/str[.='? I wonder how slashes/other punctuation fare in these examples?']");
   }
-  
+
   @Test
   public void testVariableFragsize() {
-     assertU(adoc("tv_text", "a long days night this should be a piece of text which is is is is is is is is is is is is is is is is is is is is is is is is isis is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is sufficiently lengthly to produce multiple fragments which are not concatenated at all", 
-           "id", "1"));
-     assertU(commit());
-     assertU(optimize());
-
-     // default length
-     HashMap args = new HashMap<>();
-     args.put("hl", "true");
-     args.put("hl.fl", "tv_text");
-     assertQ("Basic summarization",
-           req(args, "q", "tv_text:long"),
-           "//lst[@name='highlighting']/lst[@name='1']",
-           "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night this should be a piece of text which']"
-           );
-     
-     // 25
-     args.put("hl.fragsize","25");
-     assertQ("Basic summarization",
-         req(args, "q", "tv_text:long"),
-           "//lst[@name='highlighting']/lst[@name='1']",
-           "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night']"
-           );
-     
-     // 0 - NullFragmenter
-     args.put("hl.fragsize","0");
-     assertQ("Basic summarization",
-         req(args, "q", "tv_text:long"),
-           "//lst[@name='highlighting']/lst[@name='1']",
-           "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night this should be a piece of text which is is is is is is is is is is is is is is is is is is is is is is is is isis is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is sufficiently lengthly to produce multiple fragments which are not concatenated at all']"
-           );
+    assertU(
+        adoc(
+            "tv_text",
+            "a long days night this should be a piece of text which is is is is is is is is is is is is is is is is is is is is is is is is isis is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is sufficiently lengthly to produce multiple fragments which are not concatenated at all",
+            "id",
+            "1"));
+    assertU(commit());
+    assertU(optimize());
+
+    // default length
+    HashMap args = new HashMap<>();
+    args.put("hl", "true");
+    args.put("hl.fl", "tv_text");
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "tv_text:long"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night this should be a piece of text which']");
+
+    // 25
+    args.put("hl.fragsize", "25");
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "tv_text:long"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night']");
+
+    // 0 - NullFragmenter
+    args.put("hl.fragsize", "0");
+    assertQ(
+        "Basic summarization",
+        req(args, "q", "tv_text:long"),
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='tv_text']/str[.='a long days night this should be a piece of text which is is is is is is is is is is is is is is is is is is is is is is is is isis is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is is sufficiently lengthly to produce multiple fragments which are not concatenated at all']");
   }
 
   @Test
   public void testAlternateSummary() {
-     //long document
-     assertU(adoc("tv_text", "keyword is only here",
-                  "t_text", "a piece of text to be substituted",
-                  "id", "1",
-                  "foo_t","hi"));
-     assertU(commit());
-     assertU(optimize());
+    // long document
+    assertU(
+        adoc(
+            "tv_text", "keyword is only here",
+            "t_text", "a piece of text to be substituted",
+            "id", "1",
+            "foo_t", "hi"));
+    assertU(commit());
+    assertU(optimize());
 
     // do summarization
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
-    args.put("hl.fragsize","0");
+    args.put("hl.fragsize", "0");
     args.put("hl.fl", "t_text");
 
     // no alternate
-    assertQ("Alternate summarization",
+    assertQ(
+        "Alternate summarization",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=0]"
-            );
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=0]");
 
     // with an alternate
     args.put("hl.alternateField", "foo_t");
-    assertQ("Alternate summarization",
+    assertQ(
+        "Alternate summarization",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='hi']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='hi']");
 
     // with an alternate + max length
     args.put("hl.alternateField", "t_text");
     args.put("hl.maxAlternateFieldLength", "15");
-    assertQ("Alternate summarization",
+    assertQ(
+        "Alternate summarization",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='a piece of text']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='a piece of text']");
 
     // with a non-existing alternate field + max length
     args.put("hl.alternateField", "NonExistingField");
     args.put("hl.maxAlternateFieldLength", "15");
-    assertQ("Alternate summarization",
+    assertQ(
+        "Alternate summarization",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='a piece of text']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='a piece of text']");
   }
 
   @Test
   public void testAlternateSummaryWithHighlighting() {
-     //long document
-     assertU(adoc("tv_text", "keyword is only here, tv_text alternate field",
-                  "t_text", "a piece of text to be substituted",
-                  "other_t", "keyword",
-                  "id", "1",
-                  "foo_t","hi"));
-     assertU(commit());
-     assertU(optimize());
+    // long document
+    assertU(
+        adoc(
+            "tv_text", "keyword is only here, tv_text alternate field",
+            "t_text", "a piece of text to be substituted",
+            "other_t", "keyword",
+            "id", "1",
+            "foo_t", "hi"));
+    assertU(commit());
+    assertU(optimize());
 
     // Prove that hl.highlightAlternate is default true and respects maxAlternateFieldLength
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
-    args.put("hl.fragsize","0");
+    args.put("hl.fragsize", "0");
     args.put("hl.fl", "t_text");
     args.put("hl.simple.pre", "");
     args.put("hl.simple.post", "");
     args.put("hl.alternateField", "tv_text");
     args.put("hl.maxAlternateFieldLength", "39");
-    assertQ("Alternate summarization with highlighting",
+    assertQ(
+        "Alternate summarization with highlighting",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text']");
 
-    // Query on other field than hl or alternate. Still we get the hightlighted snippet from alternate
-    assertQ("Alternate summarization with highlighting, query other field",
+    // Query on other field than hl or alternate. Still we get the hightlighted snippet from
+    // alternate
+    assertQ(
+        "Alternate summarization with highlighting, query other field",
         req(args, "q", "other_t:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text']");
 
     // With hl.requireFieldMatch, will not highlight but fall back to plain-text alternate
     args.put("hl.requireFieldMatch", "true");
-    assertQ("Alternate summarization with highlighting, requireFieldMatch",
+    assertQ(
+        "Alternate summarization with highlighting, requireFieldMatch",
         req(args, "q", "other_t:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text alternate']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text alternate']");
     args.put("hl.requireFieldMatch", "false");
 
-
     // Works with field specific params, overriding maxAlternateFieldLength to return everything
     args.remove("hl.alternateField");
     args.put("f.t_text.hl.alternateField", "tv_text");
     args.put("f.t_text.hl.maxAlternateFieldLength", "0");
-    assertQ("Alternate summarization with highlighting",
+    assertQ(
+        "Alternate summarization with highlighting",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text alternate field']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here, tv_text alternate field']");
 
     // Prove fallback highlighting works also with FVH
     args.put("hl.method", "fastVector");
     args.put("hl.tag.pre", "");
     args.put("hl.tag.post", "");
     args.put("f.t_text.hl.maxAlternateFieldLength", "18");
-    assertQ("Alternate summarization with highlighting using FVH",
+    assertQ(
+        "Alternate summarization with highlighting using FVH",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only here']");
 
     // Prove it is possible to turn off highlighting of alternate field
     args.put("hl.highlightAlternate", "false");
-    assertQ("Alternate summarization without highlighting",
+    assertQ(
+        "Alternate summarization without highlighting",
         req(args, "q", "tv_text:keyword"),
-            "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
-            "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only he']"
-            );
+        "//lst[@name='highlighting']/lst[@name='1' and count(*)=1]",
+        "//lst[@name='highlighting']/lst[@name='1']/arr[@name='t_text']/str[.='keyword is only he']");
   }
 
   @Test
   public void testPhraseHighlighter() {
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("hl.fl", "t_text");
     args.put("hl.fragsize", "40");
@@ -741,48 +791,59 @@ public void testPhraseHighlighter() {
     args.put("hl.usePhraseHighlighter", "false");
 
     // String borrowed from Lucene's HighlighterTest
-    String t = "This piece of text refers to Kennedy at the beginning then has a longer piece of text that is very long in the middle and finally ends with another reference to Kennedy";
-    
+    String t =
+        "This piece of text refers to Kennedy at the beginning then has a longer piece of text that is very long in the middle and finally ends with another reference to Kennedy";
+
     assertU(adoc("t_text", t, "id", "1"));
     assertU(commit());
     assertU(optimize());
-    
-    String oldHighlight1 = "//lst[@name='1']/arr[@name='t_text']/str[.='This piece of text refers to Kennedy']";
-    String oldHighlight2 = "//lst[@name='1']/arr[@name='t_text']/str[.=' at the beginning then has a longer piece of text']";
-    String oldHighlight3 = "//lst[@name='1']/arr[@name='t_text']/str[.=' with another reference to Kennedy']";
-    String newHighlight1 = "//lst[@name='1']/arr[@name='t_text']/str[.='This piece of text refers to Kennedy']";
-  
+
+    String oldHighlight1 =
+        "//lst[@name='1']/arr[@name='t_text']/str[.='This piece of text refers to Kennedy']";
+    String oldHighlight2 =
+        "//lst[@name='1']/arr[@name='t_text']/str[.=' at the beginning then has a longer piece of text']";
+    String oldHighlight3 =
+        "//lst[@name='1']/arr[@name='t_text']/str[.=' with another reference to Kennedy']";
+    String newHighlight1 =
+        "//lst[@name='1']/arr[@name='t_text']/str[.='This piece of text refers to Kennedy']";
+
     // check if old functionality is still the same
-    assertQ("Phrase highlighting - old",
+    assertQ(
+        "Phrase highlighting - old",
         req(args, "q", "t_text:\"text refers\""),
         "//lst[@name='highlighting']/lst[@name='1']",
-        oldHighlight1, oldHighlight2, oldHighlight3
-        );
+        oldHighlight1,
+        oldHighlight2,
+        oldHighlight3);
 
-    assertQ("Phrase highlighting - old",
+    assertQ(
+        "Phrase highlighting - old",
         req(args, "q", "t_text:text refers"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        oldHighlight1, oldHighlight2, oldHighlight3
-        );
-    
+        oldHighlight1,
+        oldHighlight2,
+        oldHighlight3);
+
     // now check if Lucene-794 highlighting works as expected
     args.put("hl.usePhraseHighlighter", "true");
 
     // check phrase highlighting
-    assertQ("Phrase highlighting - Lucene-794",
+    assertQ(
+        "Phrase highlighting - Lucene-794",
         req(args, "q", "t_text:\"text refers\""),
         "//lst[@name='highlighting']/lst[@name='1']",
-        newHighlight1
-        );
+        newHighlight1);
 
     // non phrase queries should be highlighted as they were before this fix
-    assertQ("Phrase highlighting - Lucene-794",
+    assertQ(
+        "Phrase highlighting - Lucene-794",
         req(args, "q", "t_text:text refers"),
         "//lst[@name='highlighting']/lst[@name='1']",
-        oldHighlight1, oldHighlight2, oldHighlight3
-        );
+        oldHighlight1,
+        oldHighlight2,
+        oldHighlight3);
   }
-  
+
   @Test
   public void testGetHighlightFields() {
     HashMap args = new HashMap<>();
@@ -790,37 +851,36 @@ public void testGetHighlightFields() {
     args.put("hl", "true");
     args.put("hl.fl", "t*");
 
-    assertU(adoc("id", "0", "title", "test", // static stored
-        "text", "test", // static not stored
-        "foo_s", "test", // dynamic stored
-        "foo_sI", "test", // dynamic not stored
-        "bar_s", "test", // dynamic stored
-        "bar_sI", "test", // dynamic not stored
-        "weight", "1.0")); // stored but not text
+    assertU(
+        adoc(
+            "id", "0", "title", "test", // static stored
+            "text", "test", // static not stored
+            "foo_s", "test", // dynamic stored
+            "foo_sI", "test", // dynamic not stored
+            "bar_s", "test", // dynamic stored
+            "bar_sI", "test", // dynamic not stored
+            "weight", "1.0")); // stored but not text
     assertU(commit());
     assertU(optimize());
 
     SolrQueryRequest request = req(args, "q", "test");
     SolrHighlighter highlighter = getHighlighter();
-    List highlightFieldNames = Arrays.asList(highlighter
-        .getHighlightFields(null, request, new String[] {}));
-    assertTrue("Expected to highlight on field \"title\"", highlightFieldNames
-        .contains("title"));
-    assertFalse("Expected to not highlight on field \"text\"",
-        highlightFieldNames.contains("text"));
-    assertFalse("Expected to not highlight on field \"weight\"",
-        highlightFieldNames.contains("weight"));
+    List highlightFieldNames =
+        Arrays.asList(highlighter.getHighlightFields(null, request, new String[] {}));
+    assertTrue("Expected to highlight on field \"title\"", highlightFieldNames.contains("title"));
+    assertFalse(
+        "Expected to not highlight on field \"text\"", highlightFieldNames.contains("text"));
+    assertFalse(
+        "Expected to not highlight on field \"weight\"", highlightFieldNames.contains("weight"));
     request.close();
 
     args.put("hl.fl", "foo_*");
     request = req(args, "q", "test");
     highlighter = getHighlighter();
-    highlightFieldNames = Arrays.asList(highlighter.getHighlightFields(null,
-        request, new String[] {}));
-    assertEquals("Expected one field to highlight on", 1, highlightFieldNames
-        .size());
-    assertEquals("Expected to highlight on field \"foo_s\"", "foo_s",
-        highlightFieldNames.get(0));
+    highlightFieldNames =
+        Arrays.asList(highlighter.getHighlightFields(null, request, new String[] {}));
+    assertEquals("Expected one field to highlight on", 1, highlightFieldNames.size());
+    assertEquals("Expected to highlight on field \"foo_s\"", "foo_s", highlightFieldNames.get(0));
     request.close();
 
     // SOLR-5127
@@ -831,9 +891,9 @@ public void testGetHighlightFields() {
     highlightedSetExpected.add("bar_s");
     try (var localRequest = req(args, "q", "test")) {
       highlighter = getHighlighter();
-      final Set highlightedSetActual = new HashSet(
-          Arrays.asList(highlighter.getHighlightFields(null,
-              localRequest, new String[] {})));
+      final Set highlightedSetActual =
+          new HashSet(
+              Arrays.asList(highlighter.getHighlightFields(null, localRequest, new String[] {})));
       assertEquals(highlightedSetExpected, highlightedSetActual);
     }
 
@@ -841,16 +901,12 @@ public void testGetHighlightFields() {
     args.put("hl.fl", "title, text"); // comma then space
     request = req(args, "q", "test");
     highlighter = getHighlighter();
-    highlightFieldNames = Arrays.asList(highlighter.getHighlightFields(null,
-        request, new String[] {}));
-    assertEquals("Expected one field to highlight on", 2, highlightFieldNames
-        .size());
-    assertTrue("Expected to highlight on field \"title\"",
-        highlightFieldNames.contains("title"));
-    assertTrue("Expected to highlight on field \"text\"",
-        highlightFieldNames.contains("text"));
-    assertFalse("Expected to not highlight on field \"\"",
-        highlightFieldNames.contains(""));
+    highlightFieldNames =
+        Arrays.asList(highlighter.getHighlightFields(null, request, new String[] {}));
+    assertEquals("Expected one field to highlight on", 2, highlightFieldNames.size());
+    assertTrue("Expected to highlight on field \"title\"", highlightFieldNames.contains("title"));
+    assertTrue("Expected to highlight on field \"text\"", highlightFieldNames.contains("text"));
+    assertFalse("Expected to not highlight on field \"\"", highlightFieldNames.contains(""));
 
     request.close();
   }
@@ -859,288 +915,447 @@ public void testGetHighlightFields() {
   public void testDefaultFieldPrefixWildcardHighlight() {
 
     // do summarization using re-analysis of the field
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("df", "t_text");
     args.put("hl.fl", "");
     args.put("hl.usePhraseHighlighter", "true");
     args.put("hl.highlightMultiTerm", "true");
-    
+
     assertU(adoc("t_text", "a long day's night", "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
+    assertQ(
+        "Basic summarization",
         req(args, "q", "lon*"),
-            "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='t_text']/str"
-            );
-
+        "//lst[@name='highlighting']/lst[@name='1']",
+        "//lst[@name='1']/arr[@name='t_text']/str");
   }
 
   @Test
   public void testDefaultFieldNonPrefixWildcardHighlight() {
 
     // do summarization using re-analysis of the field
-    HashMap args = new HashMap<>();
+    HashMap args = new HashMap<>();
     args.put("hl", "true");
     args.put("df", "t_text");
     args.put("hl.fl", "");
     args.put("hl.usePhraseHighlighter", "true");
     args.put("hl.highlightMultiTerm", "true");
-    
+
     assertU(adoc("t_text", "a long day's night", "id", "1"));
     assertU(commit());
     assertU(optimize());
-    assertQ("Basic summarization",
-        req(args, "q", "l*g",
+    assertQ(
+        "Basic summarization",
+        req(
+            args,
+            "q",
+            "l*g",
             "//lst[@name='highlighting']/lst[@name='1']",
-            "//lst[@name='1']/arr[@name='t_text']/str"
-            ));
-
+            "//lst[@name='1']/arr[@name='t_text']/str"));
   }
-  
+
   public void testSubwordWildcardHighlight() {
     assertU(adoc("subword", "lorem PowerShot.com ipsum", "id", "1"));
     assertU(commit());
-    assertQ("subword wildcard highlighting", 
-            req("q", "subword:pow*", "hl", "true", "hl.fl", "subword"),
-            "//lst[@name='highlighting']/lst[@name='1']" +
-            "/arr[@name='subword']/str='lorem PowerShot.com ipsum'");
+    assertQ(
+        "subword wildcard highlighting",
+        req("q", "subword:pow*", "hl", "true", "hl.fl", "subword"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='subword']/str='lorem PowerShot.com ipsum'");
   }
 
   public void testSubwordWildcardHighlightWithTermOffsets() {
     assertU(adoc("subword_offsets", "lorem PowerShot.com ipsum", "id", "1"));
     assertU(commit());
-    assertQ("subword wildcard highlighting", 
-            req("q", "subword_offsets:pow*", "hl", "true", "hl.fl", "subword_offsets"),
-            "//lst[@name='highlighting']/lst[@name='1']" +
-            "/arr[@name='subword_offsets']/str='lorem PowerShot.com ipsum'");
+    assertQ(
+        "subword wildcard highlighting",
+        req("q", "subword_offsets:pow*", "hl", "true", "hl.fl", "subword_offsets"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='subword_offsets']/str='lorem PowerShot.com ipsum'");
   }
-  
+
   public void testSubwordWildcardHighlightWithTermOffsets2() {
     assertU(adoc("subword_offsets", "lorem PowerShot ipsum", "id", "1"));
     assertU(commit());
-    assertQ("subword wildcard highlighting",
-            req("q", "subword_offsets:pow*", "hl", "true", "hl.fl", "subword_offsets"),
-            "//lst[@name='highlighting']/lst[@name='1']" +
-            "/arr[@name='subword_offsets']/str='lorem PowerShot ipsum'");
+    assertQ(
+        "subword wildcard highlighting",
+        req("q", "subword_offsets:pow*", "hl", "true", "hl.fl", "subword_offsets"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='subword_offsets']/str='lorem PowerShot ipsum'");
   }
-  
+
   public void testHlQParameter() {
-    assertU(adoc("title", "Apache Software Foundation", "t_text", "apache software foundation", "id", "1"));
+    assertU(
+        adoc(
+            "title",
+            "Apache Software Foundation",
+            "t_text",
+            "apache software foundation",
+            "id",
+            "1"));
     assertU(commit());
-    assertQ("hl.q parameter overrides q parameter", 
+    assertQ(
+        "hl.q parameter overrides q parameter",
         req("q", "title:Apache", "hl", "true", "hl.fl", "title", "hl.q", "title:Software"),
-        "//lst[@name='highlighting']/lst[@name='1']" +
-        "/arr[@name='title']/str='Apache Software Foundation'");
-    assertQ("hl.q parameter overrides q parameter", 
-        req("q", "title:Apache", "hl", "true", "hl.fl", "title", "hl.q", "{!v=$qq}", "qq", "title:Foundation"),
-        "//lst[@name='highlighting']/lst[@name='1']" +
-        "/arr[@name='title']/str='Apache Software Foundation'");
-    assertQ("hl.q parameter uses localparam parser definition correctly",
-        req("q", "Apache", "defType", "edismax", "qf", "title t_text", "hl", "true", "hl.fl", "title", "hl.q", "{!edismax}Software", "hl.qparser", "lucene"),
-        "//lst[@name='highlighting']/lst[@name='1']" +
-            "/arr[@name='title']/str='Apache Software Foundation'");
-    assertQ("hl.q parameter uses defType correctly",
-        req("q", "Apache", "defType", "edismax", "qf", "title t_text", "hl", "true", "hl.fl", "title", "hl.q", "Software"),
-        "//lst[@name='highlighting']/lst[@name='1']" +
-        "/arr[@name='title']/str='Apache Software Foundation'");
-    assertQ("hl.q parameter uses hl.qparser param correctly",
-        req("q", "t_text:Apache", "qf", "title t_text", "hl", "true", "hl.fl", "title", "hl.q", "Software", "hl.qparser", "edismax"),
-        "//lst[@name='highlighting']/lst[@name='1']" +
-            "/arr[@name='title']/str='Apache Software Foundation'");
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='title']/str='Apache Software Foundation'");
+    assertQ(
+        "hl.q parameter overrides q parameter",
+        req(
+            "q",
+            "title:Apache",
+            "hl",
+            "true",
+            "hl.fl",
+            "title",
+            "hl.q",
+            "{!v=$qq}",
+            "qq",
+            "title:Foundation"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='title']/str='Apache Software Foundation'");
+    assertQ(
+        "hl.q parameter uses localparam parser definition correctly",
+        req(
+            "q",
+            "Apache",
+            "defType",
+            "edismax",
+            "qf",
+            "title t_text",
+            "hl",
+            "true",
+            "hl.fl",
+            "title",
+            "hl.q",
+            "{!edismax}Software",
+            "hl.qparser",
+            "lucene"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='title']/str='Apache Software Foundation'");
+    assertQ(
+        "hl.q parameter uses defType correctly",
+        req(
+            "q",
+            "Apache",
+            "defType",
+            "edismax",
+            "qf",
+            "title t_text",
+            "hl",
+            "true",
+            "hl.fl",
+            "title",
+            "hl.q",
+            "Software"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='title']/str='Apache Software Foundation'");
+    assertQ(
+        "hl.q parameter uses hl.qparser param correctly",
+        req(
+            "q",
+            "t_text:Apache",
+            "qf",
+            "title t_text",
+            "hl",
+            "true",
+            "hl.fl",
+            "title",
+            "hl.q",
+            "Software",
+            "hl.qparser",
+            "edismax"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='title']/str='Apache Software Foundation'");
   }
 
   public void testHlQEdismaxParameter() {
     assertU(adoc("title", "Apache Software Foundation", "id", "1"));
     assertU(commit());
-    assertQ("hl.q parameter overrides q parameter",
-        req("q", "title:Apache", "hl", "true", "hl.fl", "title", "hl.q", "{!edismax qf=title v=Software}"),
-        "//lst[@name='highlighting']/lst[@name='1']" +
-            "/arr[@name='title']/str='Apache Software Foundation'");
-    assertQ("hl.q parameter overrides q parameter",
-        req("q", "title:Apache", "hl", "true", "hl.fl", "title", "hl.q", "{!v=$qq}", "qq", "title:Foundation"),
-        "//lst[@name='highlighting']/lst[@name='1']" +
-            "/arr[@name='title']/str='Apache Software Foundation'");
+    assertQ(
+        "hl.q parameter overrides q parameter",
+        req(
+            "q",
+            "title:Apache",
+            "hl",
+            "true",
+            "hl.fl",
+            "title",
+            "hl.q",
+            "{!edismax qf=title v=Software}"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='title']/str='Apache Software Foundation'");
+    assertQ(
+        "hl.q parameter overrides q parameter",
+        req(
+            "q",
+            "title:Apache",
+            "hl",
+            "true",
+            "hl.fl",
+            "title",
+            "hl.q",
+            "{!v=$qq}",
+            "qq",
+            "title:Foundation"),
+        "//lst[@name='highlighting']/lst[@name='1']"
+            + "/arr[@name='title']/str='Apache Software Foundation'");
   }
 
   @Test
   public void testMaxMvParams() {
-    assertU(adoc("title", "Apache Software Foundation", "id", "1000",
-        "lower", "gap1 target",
-        "lower", "gap2 target",
-        "lower", "gap3 nothing",
-        "lower", "gap4 nothing",
-        "lower", "gap5 target",
-        "lower", "gap6 target",
-        "lower", "gap7 nothing",
-        "lower", "gap8 nothing",
-        "lower", "gap9 target",
-        "lower", "gap10 target"));
+    assertU(
+        adoc(
+            "title",
+            "Apache Software Foundation",
+            "id",
+            "1000",
+            "lower",
+            "gap1 target",
+            "lower",
+            "gap2 target",
+            "lower",
+            "gap3 nothing",
+            "lower",
+            "gap4 nothing",
+            "lower",
+            "gap5 target",
+            "lower",
+            "gap6 target",
+            "lower",
+            "gap7 nothing",
+            "lower",
+            "gap8 nothing",
+            "lower",
+            "gap9 target",
+            "lower",
+            "gap10 target"));
 
     assertU(commit());
 
     // First ensure we can count all six
-    assertQ("Counting all MV pairs failed",
+    assertQ(
+        "Counting all MV pairs failed",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=6]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=6]");
 
     // NOTE: These tests seem repeated, but we're testing for off-by-one errors
 
     // Now we should see exactly 2 by limiting the number of values searched to 4
-    assertQ("Off by one by going too far",
+    assertQ(
+        "Off by one by going too far",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, "4"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE,
+            "4"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]");
 
     // Does 0 work?
-    assertQ("Off by one by going too far",
+    assertQ(
+        "Off by one by going too far",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, "0"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000' and count(child::*) = 0]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE,
+            "0"),
+        "//lst[@name='highlighting']/lst[@name='1000' and count(child::*) = 0]");
 
     // Now we should see exactly 2 by limiting the number of values searched to 2
-    assertQ("Off by one by not going far enough",
+    assertQ(
+        "Off by one by not going far enough",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, "2"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE,
+            "2"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]");
 
     // Now we should see exactly 1 by limiting the number of values searched to 1
-    assertQ("Not counting exactly 1",
+    assertQ(
+        "Not counting exactly 1",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, "1"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=1]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE,
+            "1"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=1]");
 
     // Now we should see exactly 4 by limiting the number of values found to 4
-    assertQ("Matching 4 should exactly match 4",
+    assertQ(
+        "Matching 4 should exactly match 4",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_MATCH, "4"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=4]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_MATCH,
+            "4"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=4]");
 
     // But if hl.preserveMulti=true then we should see 6 snippets even though 2 didn't match
-    assertQ("hl.preserveMulti",
+    assertQ(
+        "hl.preserveMulti",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_MATCH, "4",
-            HighlightParams.PRESERVE_MULTI, "true"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=6]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_MATCH,
+            "4",
+            HighlightParams.PRESERVE_MULTI,
+            "true"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=6]");
 
     // Now we should see exactly 2 by limiting the number of values found to 2
-    assertQ("Matching 6 should exactly search them all",
+    assertQ(
+        "Matching 6 should exactly search them all",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_MATCH, "6"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=6]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_MATCH,
+            "6"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=6]");
 
     // Now we should see exactly 1 by limiting the number of values found to 1
-    assertQ("Matching 6 should exactly match them all",
+    assertQ(
+        "Matching 6 should exactly match them all",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_MATCH, "1"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=1]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_MATCH,
+            "1"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=1]");
 
     // Now we should see exactly 0 by limiting the number of values found to 0
-    assertQ("Matching 6 should exactly match them all",
+    assertQ(
+        "Matching 6 should exactly match them all",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_MATCH, "0"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000' and count(child::*) = 0]"
-    );
-
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_MATCH,
+            "0"),
+        "//lst[@name='highlighting']/lst[@name='1000' and count(child::*) = 0]");
 
     // Should bail at the first parameter matched.
-    assertQ("Matching 6 should exactly match them all",
+    assertQ(
+        "Matching 6 should exactly match them all",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_MATCH, "2",
-            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, "10"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]"
-    );
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_MATCH,
+            "2",
+            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE,
+            "10"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]");
 
     // Should bail at the first parameter matched.
-    assertQ("Matching 6 should exactly match them all",
+    assertQ(
+        "Matching 6 should exactly match them all",
         req(
-            "q", "id:1000",
-            HighlightParams.HIGHLIGHT, "true",
-            HighlightParams.FIELDS, "lower",
-            HighlightParams.Q, "target",
-            HighlightParams.SNIPPETS, "100",
-            HighlightParams.MAX_MULTIVALUED_TO_MATCH, "10",
-            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE, "2"
-        ),
-        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]"
-    );
-
+            "q",
+            "id:1000",
+            HighlightParams.HIGHLIGHT,
+            "true",
+            HighlightParams.FIELDS,
+            "lower",
+            HighlightParams.Q,
+            "target",
+            HighlightParams.SNIPPETS,
+            "100",
+            HighlightParams.MAX_MULTIVALUED_TO_MATCH,
+            "10",
+            HighlightParams.MAX_MULTIVALUED_TO_EXAMINE,
+            "2"),
+        "//lst[@name='highlighting']/lst[@name='1000']/arr[@name='lower' and count(*)=2]");
   }
 
   @Test
@@ -1151,25 +1366,30 @@ public void payloadFilteringSpanQuery() throws IOException {
     assertU(adoc("id", "0", FIELD_NAME, "word|7 word|2"));
     assertU(commit());
 
-    //We search at a lower level than typical Solr tests because there's no QParser for payloads
+    // We search at a lower level than typical Solr tests because there's no QParser for payloads
 
-    //Create query matching this payload
-    Query query = new SpanPayloadCheckQuery(new SpanTermQuery(new Term(FIELD_NAME, "word")),
-        Collections.singletonList(new BytesRef(new byte[]{0, 0, 0, 7})));//bytes for integer 7
+    // Create query matching this payload
+    Query query =
+        new SpanPayloadCheckQuery(
+            new SpanTermQuery(new Term(FIELD_NAME, "word")),
+            Collections.singletonList(
+                new BytesRef(new byte[] {0, 0, 0, 7}))); // bytes for integer 7
 
-    //invoke highlight component... the hard way
+    // invoke highlight component... the hard way
     final SearchComponent hlComp = h.getCore().getSearchComponent("highlight");
-    SolrQueryRequest req = req("hl", "true", "hl.fl", FIELD_NAME, HighlightParams.USE_PHRASE_HIGHLIGHTER, "true");
+    SolrQueryRequest req =
+        req("hl", "true", "hl.fl", FIELD_NAME, HighlightParams.USE_PHRASE_HIGHLIGHTER, "true");
     try {
       SolrQueryResponse resp = new SolrQueryResponse();
       ResponseBuilder rb = new ResponseBuilder(req, resp, Collections.singletonList(hlComp));
       rb.setHighlightQuery(query);
       rb.setResults(req.getSearcher().getDocListAndSet(query, (DocSet) null, null, 0, 1));
-      //highlight:
+      // highlight:
       hlComp.prepare(rb);
       hlComp.process(rb);
-      //inspect response
-      final String[] snippets = (String[]) resp.getValues().findRecursive("highlighting", "0", FIELD_NAME);
+      // inspect response
+      final String[] snippets =
+          (String[]) resp.getValues().findRecursive("highlighting", "0", FIELD_NAME);
       assertEquals("word|7 word|2", snippets[0]);
     } finally {
       req.close();
@@ -1177,7 +1397,9 @@ public void payloadFilteringSpanQuery() throws IOException {
   }
 
   private static SolrHighlighter getHighlighter() {
-    var hl = (HighlightComponent) h.getCore().getSearchComponents().get(HighlightComponent.COMPONENT_NAME);
+    var hl =
+        (HighlightComponent)
+            h.getCore().getSearchComponents().get(HighlightComponent.COMPONENT_NAME);
     return hl.getHighlighter(new MapSolrParams(Map.of("hl.method", "original")));
   }
 
diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterWithoutStoredIdTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterWithoutStoredIdTest.java
index 97df83bf8b8..20d8cdc84b7 100644
--- a/solr/core/src/test/org/apache/solr/highlight/HighlighterWithoutStoredIdTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterWithoutStoredIdTest.java
@@ -32,5 +32,4 @@ public static void afterClassProps() {
     System.clearProperty("solr.tests.id.stored");
     System.clearProperty("solr.tests.id.docValues");
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
index 07b6d307856..9d52314775d 100644
--- a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
+++ b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighter.java
@@ -22,16 +22,17 @@
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
-/** Tests for the UnifiedHighlighter Solr plugin **/
+/** Tests for the UnifiedHighlighter Solr plugin * */
 public class TestUnifiedSolrHighlighter extends SolrTestCaseJ4 {
-  
+
   @BeforeClass
   public static void beforeClass() throws Exception {
     System.setProperty("filterCache.enabled", "false");
     System.setProperty("queryResultCache.enabled", "false");
-    System.setProperty("documentCache.enabled", "true"); // this is why we use this particular solrconfig
+    System.setProperty(
+        "documentCache.enabled", "true"); // this is why we use this particular solrconfig
     initCore("solrconfig-cache-enable-disable.xml", "schema-unifiedhighlight.xml");
-    
+
     // test our config is sane, just to be sure:
 
     // 'text' and 'text3' should have offsets, 'text2' should not
@@ -40,6 +41,7 @@ public static void beforeClass() throws Exception {
     assertTrue(schema.getField("text3").storeOffsetsWithPositions());
     assertFalse(schema.getField("text2").storeOffsetsWithPositions());
   }
+
   @AfterClass
   public static void afterClass() {
     System.clearProperty("filterCache.enabled");
@@ -48,13 +50,31 @@ public static void afterClass() {
     System.clearProperty("solr.tests.id.stored");
     System.clearProperty("solr.tests.id.docValues");
   }
-  
+
   @Override
   public void setUp() throws Exception {
     super.setUp();
     clearIndex();
-    assertU(adoc("text", "document one", "text2", "document one", "text3", "crappy document", "id", "101"));
-    assertU(adoc("text", "second document", "text2", "second document", "text3", "crappier document", "id", "102"));
+    assertU(
+        adoc(
+            "text",
+            "document one",
+            "text2",
+            "document one",
+            "text3",
+            "crappy document",
+            "id",
+            "101"));
+    assertU(
+        adoc(
+            "text",
+            "second document",
+            "text2",
+            "second document",
+            "text3",
+            "crappier document",
+            "id",
+            "102"));
     assertU(commit());
   }
 
@@ -63,7 +83,8 @@ public static SolrQueryRequest req(String... params) {
   }
 
   public void testSimple() {
-    assertQ("simplest test", 
+    assertQ(
+        "simplest test",
         req("q", "text:document", "sort", "id asc", "hl", "true"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='document one'",
@@ -71,22 +92,54 @@ public void testSimple() {
   }
 
   public void testImpossibleOffsetSource() {
-    IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
-      h.query(req("q", "text2:document", "hl.offsetSource", "postings",
-          "hl.fl", "text2", "sort", "id asc", "hl", "true"));
-    });
+    IllegalArgumentException e =
+        expectThrows(
+            IllegalArgumentException.class,
+            () -> {
+              h.query(
+                  req(
+                      "q",
+                      "text2:document",
+                      "hl.offsetSource",
+                      "postings",
+                      "hl.fl",
+                      "text2",
+                      "sort",
+                      "id asc",
+                      "hl",
+                      "true"));
+            });
     assertTrue("Should warn no offsets", e.getMessage().contains("indexed without offsets"));
-
   }
 
   public void testMultipleSnippetsReturned() {
     clearIndex();
-    assertU(adoc("text", "Document snippet one. Intermediate sentence. Document snippet two.",
-        "text2", "document one", "text3", "crappy document", "id", "101"));
+    assertU(
+        adoc(
+            "text",
+            "Document snippet one. Intermediate sentence. Document snippet two.",
+            "text2",
+            "document one",
+            "text3",
+            "crappy document",
+            "id",
+            "101"));
     assertU(commit());
-    assertQ("multiple snippets test",
-        req("q", "text:document", "sort", "id asc", "hl", "true", "hl.snippets", "2", "hl.bs.type", "SENTENCE",
-            "hl.fragsize", "-1"),
+    assertQ(
+        "multiple snippets test",
+        req(
+            "q",
+            "text:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.snippets",
+            "2",
+            "hl.bs.type",
+            "SENTENCE",
+            "hl.fragsize",
+            "-1"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr/str[1]='Document snippet one. '",
         "//lst[@name='highlighting']/lst[@name='101']/arr/str[2]='Document snippet two.'");
@@ -94,10 +147,19 @@ public void testMultipleSnippetsReturned() {
 
   public void testStrictPhrasesEnabledByDefault() {
     clearIndex();
-    assertU(adoc("text", "Strict phrases should be enabled for phrases",
-        "text2", "document one", "text3", "crappy document", "id", "101"));
+    assertU(
+        adoc(
+            "text",
+            "Strict phrases should be enabled for phrases",
+            "text2",
+            "document one",
+            "text3",
+            "crappy document",
+            "id",
+            "101"));
     assertU(commit());
-    assertQ("strict phrase handling",
+    assertQ(
+        "strict phrase handling",
         req("q", "text:\"strict phrases\"", "sort", "id asc", "hl", "true"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/*)=1",
         "//lst[@name='highlighting']/lst[@name='101']/arr/str[1]='Strict phrases should be enabled for phrases'");
@@ -105,21 +167,47 @@ public void testStrictPhrasesEnabledByDefault() {
 
   public void testStrictPhrasesCanBeDisabled() {
     clearIndex();
-    assertU(adoc("text", "Strict phrases should be disabled for phrases",
-        "text2", "document one", "text3", "crappy document", "id", "101"));
+    assertU(
+        adoc(
+            "text",
+            "Strict phrases should be disabled for phrases",
+            "text2",
+            "document one",
+            "text3",
+            "crappy document",
+            "id",
+            "101"));
     assertU(commit());
-    assertQ("strict phrase handling",
-        req("q", "text:\"strict phrases\"", "sort", "id asc", "hl", "true", "hl.usePhraseHighlighter", "false"),
+    assertQ(
+        "strict phrase handling",
+        req(
+            "q",
+            "text:\"strict phrases\"",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.usePhraseHighlighter",
+            "false"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/*)=1",
         "//lst[@name='highlighting']/lst[@name='101']/arr/str[1]='Strict phrases should be disabled for phrases'");
   }
 
   public void testMultiTermQueryEnabledByDefault() {
     clearIndex();
-    assertU(adoc("text", "Aviary Avenue document",
-        "text2", "document one", "text3", "crappy document", "id", "101"));
+    assertU(
+        adoc(
+            "text",
+            "Aviary Avenue document",
+            "text2",
+            "document one",
+            "text3",
+            "crappy document",
+            "id",
+            "101"));
     assertU(commit());
-    assertQ("multi term query handling",
+    assertQ(
+        "multi term query handling",
         req("q", "text:av*", "sort", "id asc", "hl", "true"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/*)=1",
         "//lst[@name='highlighting']/lst[@name='101']/arr/str[1]='Aviary Avenue document'");
@@ -127,48 +215,70 @@ public void testMultiTermQueryEnabledByDefault() {
 
   public void testMultiTermQueryCanBeDisabled() {
     clearIndex();
-    assertU(adoc("text", "Aviary Avenue document",
-        "text2", "document one", "text3", "crappy document", "id", "101"));
+    assertU(
+        adoc(
+            "text",
+            "Aviary Avenue document",
+            "text2",
+            "document one",
+            "text3",
+            "crappy document",
+            "id",
+            "101"));
     assertU(commit());
-    assertQ("multi term query handling",
+    assertQ(
+        "multi term query handling",
         req("q", "text:av*", "sort", "id asc", "hl", "true", "hl.highlightMultiTerm", "false"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/*)=0");
   }
 
   public void testPagination() {
-    assertQ("pagination test", 
+    assertQ(
+        "pagination test",
         req("q", "text:document", "sort", "id asc", "hl", "true", "rows", "1", "start", "1"),
         "count(//lst[@name='highlighting']/*)=1",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second document'");
   }
-  
+
   public void testEmptySnippet() {
-    assertQ("null snippet test", 
-      req("q", "text:one OR *:*", "sort", "id asc", "hl", "true"),
+    assertQ(
+        "null snippet test",
+        req("q", "text:one OR *:*", "sort", "id asc", "hl", "true"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='document one'",
         "count(//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/*)=0");
   }
-  
+
   public void testDefaultSummary() {
-    assertQ("null snippet test", 
-      req("q", "text:one OR *:*", "sort", "id asc", "hl", "true", "hl.defaultSummary", "true"),
+    assertQ(
+        "null snippet test",
+        req("q", "text:one OR *:*", "sort", "id asc", "hl", "true", "hl.defaultSummary", "true"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='document one'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second document'");
   }
-  
+
   public void testDifferentField() {
-    assertQ("highlighting text3", 
+    assertQ(
+        "highlighting text3",
         req("q", "text3:document", "sort", "id asc", "hl", "true", "hl.fl", "text3"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/str='crappy document'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text3']/str='crappier document'");
   }
-  
+
   public void testTwoFields() {
-    assertQ("highlighting text and text3", 
-        req("q", "text:document text3:document", "sort", "id asc", "hl", "true", "hl.fl", "text,text3"),
+    assertQ(
+        "highlighting text and text3",
+        req(
+            "q",
+            "text:document text3:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.fl",
+            "text,text3"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='document one'",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/str='crappy document'",
@@ -178,10 +288,19 @@ public void testTwoFields() {
 
   // SOLR-5127
   public void testMultipleFieldsViaWildcard() {
-    assertQ("highlighting text and text3*",
-        req("q", (random().nextBoolean() ? "text:document text3:document" : "text3:document text:document"),
-            "sort", "id asc", "hl", "true",
-            "hl.fl", (random().nextBoolean() ? "text,text3*" : "text3*,text")),
+    assertQ(
+        "highlighting text and text3*",
+        req(
+            "q",
+            (random().nextBoolean()
+                ? "text:document text3:document"
+                : "text3:document text:document"),
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.fl",
+            (random().nextBoolean() ? "text,text3*" : "text3*,text")),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='document one'",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/str='crappy document'",
@@ -190,126 +309,265 @@ public void testMultipleFieldsViaWildcard() {
   }
 
   public void testTags() {
-    assertQ("different pre/post tags", 
-        req("q", "text:document", "sort", "id asc", "hl", "true", "hl.tag.pre", "[", "hl.tag.post", "]"),
+    assertQ(
+        "different pre/post tags",
+        req(
+            "q",
+            "text:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.tag.pre",
+            "[",
+            "hl.tag.post",
+            "]"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='[document] one'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second [document]'");
   }
 
   public void testUsingSimplePrePostTags() {
-    assertQ("different pre/post tags",
-        req("q", "text:document", "sort", "id asc", "hl", "true", "hl.simple.pre", "[", "hl.simple.post", "]"),
+    assertQ(
+        "different pre/post tags",
+        req(
+            "q",
+            "text:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.simple.pre",
+            "[",
+            "hl.simple.post",
+            "]"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='[document] one'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second [document]'");
   }
 
   public void testUsingSimplePrePostTagsPerField() {
-    assertQ("different pre/post tags",
-        req("q", "text:document", "sort", "id asc", "hl", "true", "f.text.hl.simple.pre", "[", "f.text.hl.simple.post", "]"),
+    assertQ(
+        "different pre/post tags",
+        req(
+            "q",
+            "text:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "f.text.hl.simple.pre",
+            "[",
+            "f.text.hl.simple.post",
+            "]"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='[document] one'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second [document]'");
   }
 
   public void testTagsPerField() {
-    assertQ("highlighting text and text3", 
-        req("q", "text:document text3:document", "sort", "id asc", "hl", "true", "hl.fl", "text,text3", "f.text3.hl.tag.pre", "[", "f.text3.hl.tag.post", "]"),
+    assertQ(
+        "highlighting text and text3",
+        req(
+            "q",
+            "text:document text3:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.fl",
+            "text,text3",
+            "f.text3.hl.tag.pre",
+            "[",
+            "f.text3.hl.tag.post",
+            "]"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='document one'",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/str='crappy [document]'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second document'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text3']/str='crappier [document]'");
   }
-  
+
   public void testBreakIteratorWord() {
-    assertQ("different breakiterator", 
-        req("q", "text:document", "sort", "id asc", "hl", "true", "hl.bs.type", "WORD", "hl.fragsize", "-1"),
+    assertQ(
+        "different breakiterator",
+        req(
+            "q",
+            "text:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.bs.type",
+            "WORD",
+            "hl.fragsize",
+            "-1"),
         "count(//lst[@name='highlighting']/*)=2",
         "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='document'",
         "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='document'");
   }
-  
+
   public void testBreakIteratorWhole() {
-    assertU(adoc("text", "Document one has a first sentence. Document two has a second sentence.", "id", "103"));
+    assertU(
+        adoc(
+            "text",
+            "Document one has a first sentence. Document two has a second sentence.",
+            "id",
+            "103"));
     assertU(commit());
-    assertQ("WHOLE breakiterator",
-        req("q", "text:document", "sort", "id asc", "hl", "true", "hl.bs.type", "WHOLE", "hl.fragsize", "-1"),
+    assertQ(
+        "WHOLE breakiterator",
+        req(
+            "q",
+            "text:document",
+            "sort",
+            "id asc",
+            "hl",
+            "true",
+            "hl.bs.type",
+            "WHOLE",
+            "hl.fragsize",
+            "-1"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='Document one has a first sentence. Document two has a second sentence.'");
-    assertQ("hl.fragsize 0 is equivalent to WHOLE",
+    assertQ(
+        "hl.fragsize 0 is equivalent to WHOLE",
         req("q", "text:document", "sort", "id asc", "hl", "true", "hl.fragsize", "0"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='Document one has a first sentence. Document two has a second sentence.'");
   }
-  
+
   public void testBreakIteratorCustom() {
-    assertU(adoc("text", "This document contains # special characters, while the other document contains the same # special character.", "id", "103"));
-    assertU(adoc("text", "While the other document contains the same # special character.", "id", "104"));
+    assertU(
+        adoc(
+            "text",
+            "This document contains # special characters, while the other document contains the same # special character.",
+            "id",
+            "103"));
+    assertU(
+        adoc(
+            "text",
+            "While the other document contains the same # special character.",
+            "id",
+            "104"));
     assertU(commit());
     // Set hl.fragAlignRatio because this test was written when it had a middle default
-    String[] defParams = {"q", "text:document", "sort", "id asc", "hl", "true", "hl.method", "unified", "hl.bs.type", "SEPARATOR","hl.bs.separator","#", "hl.fragAlignRatio", "0.5"};
-    assertQ("CUSTOM breakiterator",
-        req(defParams,"hl.fragsize", "-1"),
+    String[] defParams = {
+      "q",
+      "text:document",
+      "sort",
+      "id asc",
+      "hl",
+      "true",
+      "hl.method",
+      "unified",
+      "hl.bs.type",
+      "SEPARATOR",
+      "hl.bs.separator",
+      "#",
+      "hl.fragAlignRatio",
+      "0.5"
+    };
+    assertQ(
+        "CUSTOM breakiterator",
+        req(defParams, "hl.fragsize", "-1"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='This document contains #'");
-    assertQ("different breakiterator",
-        req(defParams,"hl.fragsize", "-1"),
+    assertQ(
+        "different breakiterator",
+        req(defParams, "hl.fragsize", "-1"),
         "//lst[@name='highlighting']/lst[@name='104']/arr[@name='text']/str='While the other document contains the same #'");
 
-    assertQ("CUSTOM breakiterator with fragsize 70 minimum",
-        req(defParams,"hl.fragsize", "70", "hl.fragsizeIsMinimum", "true"),
+    assertQ(
+        "CUSTOM breakiterator with fragsize 70 minimum",
+        req(defParams, "hl.fragsize", "70", "hl.fragsizeIsMinimum", "true"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='This document contains # special characters, while the other document contains the same #'");
-    assertQ("CUSTOM breakiterator with fragsize 70 avg",
-        req(defParams,"hl.fragsize", "70", "hl.fragsizeIsMinimum", "false"),
+    assertQ(
+        "CUSTOM breakiterator with fragsize 70 avg",
+        req(defParams, "hl.fragsize", "70", "hl.fragsizeIsMinimum", "false"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='This document contains #'");
-    assertQ("CUSTOM breakiterator with fragsize 90 avg",
-        req(defParams,"hl.fragsize", "90", "hl.fragsizeIsMinimum", "false"),
+    assertQ(
+        "CUSTOM breakiterator with fragsize 90 avg",
+        req(defParams, "hl.fragsize", "90", "hl.fragsizeIsMinimum", "false"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='This document contains #'");
-    assertQ("CUSTOM breakiterator with fragsize 100 avg",
-        req(defParams,"hl.fragsize", "100", "hl.fragsizeIsMinimum", "false"),
+    assertQ(
+        "CUSTOM breakiterator with fragsize 100 avg",
+        req(defParams, "hl.fragsize", "100", "hl.fragsizeIsMinimum", "false"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='This document contains # special characters, while the other document contains the same #'");
   }
 
   public void testFragsize() {
-    // test default is 70... so make a sentence that is a little less (closer to 70 than end of text)
+    // test default is 70... so make a sentence that is a little less (closer to 70 than end of
+    // text)
     clearIndex();
-    assertU(adoc("id", "10", "text", "This is a sentence just under seventy chars in length blah blah. Next sentence is here."));
+    assertU(
+        adoc(
+            "id",
+            "10",
+            "text",
+            "This is a sentence just under seventy chars in length blah blah. Next sentence is here."));
     assertU(commit());
     // Set hl.fragAlignRatio because this test was written when it had a middle default
-    String[] defParams = {"q", "text:seventy", "hl", "true", "hl.method", "unified", "hl.fragAlignRatio", "0.5"};
-    assertQ("default fragsize",
+    String[] defParams = {
+      "q", "text:seventy", "hl", "true", "hl.method", "unified", "hl.fragAlignRatio", "0.5"
+    };
+    assertQ(
+        "default fragsize",
         req(defParams, "hl.fragsizeIsMinimum", "true"),
         "//lst[@name='highlighting']/lst[@name='10']/arr[@name='text']/str='This is a sentence just under seventy chars in length blah blah. Next sentence is here.'");
-    assertQ("default fragsize",
+    assertQ(
+        "default fragsize",
         req(defParams, "hl.fragsizeIsMinimum", "true", "hl.fragsize", "60"),
         "//lst[@name='highlighting']/lst[@name='10']/arr[@name='text']/str='This is a sentence just under seventy chars in length blah blah. '");
-    assertQ("smaller fragsize",
+    assertQ(
+        "smaller fragsize",
         req(defParams, "hl.fragsizeIsMinimum", "false"),
         "//lst[@name='highlighting']/lst[@name='10']/arr[@name='text']/str='This is a sentence just under seventy chars in length blah blah. '");
-    assertQ("default fragsize",
+    assertQ(
+        "default fragsize",
         req(defParams, "hl.fragsize", "90", "hl.fragsizeIsMinimum", "false"),
         "//lst[@name='highlighting']/lst[@name='10']/arr[@name='text']/str='This is a sentence just under seventy chars in length blah blah. Next sentence is here.'");
   }
-  
+
   public void testEncoder() {
     assertU(adoc("text", "Document one has a first sentence.", "id", "103"));
     assertU(commit());
-    assertQ("html escaped", 
+    assertQ(
+        "html escaped",
         req("q", "text:document", "sort", "id asc", "hl", "true", "hl.encoder", "html"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='Document one has a first <i>sentence</i>.'");
   }
 
   public void testRangeQuery() {
-    assertQ(req("q", "id:101", "hl", "true", "hl.q", "text:[dob TO doe]"),
+    assertQ(
+        req("q", "id:101", "hl", "true", "hl.q", "text:[dob TO doe]"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/*)=1");
   }
 
   public void testRequireFieldMatch() {
-    // We highlight on field text3 (hl.fl), but our query only references the "text" field. Nonetheless, the query word
-    //  "document" is found in all fields here.
+    // We highlight on field text3 (hl.fl), but our query only references the "text" field.
+    // Nonetheless, the query word "document" is found in all fields here.
 
-    assertQ(req("q", "id:101", "hl", "true", "hl.q", "text:document", "hl.fl", "text3"), //hl.requireFieldMatch is false by default
+    assertQ(
+        req(
+            "q",
+            "id:101",
+            "hl",
+            "true",
+            "hl.q",
+            "text:document",
+            "hl.fl",
+            "text3"), // hl.requireFieldMatch is false by default
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/*)=1");
-    assertQ(req("q", "id:101", "hl", "true", "hl.q", "text:document", "hl.fl", "text3", "hl.requireFieldMatch", "true"),
+    assertQ(
+        req(
+            "q",
+            "id:101",
+            "hl",
+            "true",
+            "hl.q",
+            "text:document",
+            "hl.fl",
+            "text3",
+            "hl.requireFieldMatch",
+            "true"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text3']/*)=0");
   }
 
@@ -317,7 +575,8 @@ public void testWeightMatchesDisabled() {
     clearIndex();
     assertU(adoc("text", "alpha bravo charlie", "id", "101"));
     assertU(commit());
-    assertQ("weight matches disabled, phrase highlights separately",
+    assertQ(
+        "weight matches disabled, phrase highlights separately",
         req("q", "text:\"alpha bravo\"", "hl", "true", "hl.weightMatches", "false"),
         "count(//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/*)=1",
         "//lst[@name='highlighting']/lst[@name='101']/arr/str[1]='alpha bravo charlie'");
@@ -325,13 +584,15 @@ public void testWeightMatchesDisabled() {
 
   // LUCENE-8492
   public void testSurroundQParser() {
-    assertQ(req("q", "{!surround df=text}2w(second, document)", "hl", "true", "hl.fl", "text"),
+    assertQ(
+        req("q", "{!surround df=text}2w(second, document)", "hl", "true", "hl.fl", "text"),
         "count(//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/*)=1");
   }
 
   // LUCENE-7757
   public void testComplexPhraseQParser() {
-    assertQ(req("q", "{!complexphrase df=text}(\"sec* doc*\")", "hl", "true", "hl.fl", "text"),
+    assertQ(
+        req("q", "{!complexphrase df=text}(\"sec* doc*\")", "hl", "true", "hl.fl", "text"),
         "count(//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/*)=1");
   }
 
@@ -349,9 +610,6 @@ public void testDontReturnEmptyHighlights() throws Exception {
             "hl.fl", "text, text2",
             "sort", "id asc",
             "hl", "true"),
-        "highlighting=={\n"
-            + "    '103':{\n"
-            + "      'text':['third document']}}}");
+        "highlighting=={\n" + "    '103':{\n" + "      'text':['third document']}}}");
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighterWithoutStoredId.java b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighterWithoutStoredId.java
index e7fb6cd320d..a0f48615349 100644
--- a/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighterWithoutStoredId.java
+++ b/solr/core/src/test/org/apache/solr/highlight/TestUnifiedSolrHighlighterWithoutStoredId.java
@@ -16,11 +16,10 @@
  */
 package org.apache.solr.highlight;
 
-
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
-/** Tests for the UnifiedHighlighter Solr plugin **/
+/** Tests for the UnifiedHighlighter Solr plugin * */
 public class TestUnifiedSolrHighlighterWithoutStoredId extends TestUnifiedSolrHighlighter {
 
   @BeforeClass
diff --git a/solr/core/src/test/org/apache/solr/index/DummyMergePolicyFactory.java b/solr/core/src/test/org/apache/solr/index/DummyMergePolicyFactory.java
index 30a52a08e36..c936244c432 100644
--- a/solr/core/src/test/org/apache/solr/index/DummyMergePolicyFactory.java
+++ b/solr/core/src/test/org/apache/solr/index/DummyMergePolicyFactory.java
@@ -20,14 +20,13 @@
 import org.apache.solr.schema.IndexSchema;
 
 /**
- * Dummy implementation of {@link org.apache.solr.index.MergePolicyFactory}
- * which doesn't have a suitable public constructor and thus is expected to
- * fail if used within Solr.
+ * Dummy implementation of {@link org.apache.solr.index.MergePolicyFactory} which doesn't have a
+ * suitable public constructor and thus is expected to fail if used within Solr.
  */
 class DummyMergePolicyFactory extends LogByteSizeMergePolicyFactory {
 
-  private DummyMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
+  private DummyMergePolicyFactory(
+      SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
     super(resourceLoader, args, schema);
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java b/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java
index 0f474c92b8c..32d52f5d3a0 100644
--- a/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java
+++ b/solr/core/src/test/org/apache/solr/index/TestSlowCompositeReaderWrapper.java
@@ -20,7 +20,6 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
-
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.SortedDocValuesField;
@@ -55,21 +54,25 @@ public void testCoreListenerOnSlowCompositeReaderWrapper() throws IOException {
 
     final IndexReader reader = DirectoryReader.open(w.w.getDirectory());
     final LeafReader leafReader = SlowCompositeReaderWrapper.wrap(reader);
-    
+
     final int numListeners = TestUtil.nextInt(random(), 1, 10);
     final List listeners = new ArrayList<>();
     AtomicInteger counter = new AtomicInteger(numListeners);
-    
+
     for (int i = 0; i < numListeners; ++i) {
-      CountCoreListener listener = new CountCoreListener(counter, leafReader.getCoreCacheHelper().getKey());
+      CountCoreListener listener =
+          new CountCoreListener(counter, leafReader.getCoreCacheHelper().getKey());
       listeners.add(listener);
       leafReader.getCoreCacheHelper().addClosedListener(listener);
     }
     for (int i = 0; i < 100; ++i) {
-      leafReader.getCoreCacheHelper().addClosedListener(listeners.get(random().nextInt(listeners.size())));
+      leafReader
+          .getCoreCacheHelper()
+          .addClosedListener(listeners.get(random().nextInt(listeners.size())));
     }
     assertEquals(numListeners, counter.get());
-    // make sure listeners are registered on the wrapped reader and that closing any of them has the same effect
+    // make sure listeners are registered on the wrapped reader and that closing any of them has the
+    // same effect
     if (random().nextBoolean()) {
       reader.close();
     } else {
@@ -94,12 +97,13 @@ public void onClose(IndexReader.CacheKey coreCacheKey) {
       assertSame(this.coreCacheKey, coreCacheKey);
       count.decrementAndGet();
     }
-
   }
 
   public void testOrdMapsAreCached() throws Exception {
     Directory dir = newDirectory();
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
+    RandomIndexWriter w =
+        new RandomIndexWriter(
+            random(), dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
     Document doc = new Document();
     doc.add(new SortedDocValuesField("sorted", new BytesRef("a")));
     doc.add(new SortedSetDocValuesField("sorted_set", new BytesRef("b")));
@@ -113,11 +117,13 @@ public void testOrdMapsAreCached() throws Exception {
     w.addDocument(doc);
     IndexReader reader = w.getReader();
     assertTrue(reader.leaves().size() > 1);
-    SlowCompositeReaderWrapper slowWrapper = (SlowCompositeReaderWrapper) SlowCompositeReaderWrapper.wrap(reader);
+    SlowCompositeReaderWrapper slowWrapper =
+        (SlowCompositeReaderWrapper) SlowCompositeReaderWrapper.wrap(reader);
     assertEquals(0, slowWrapper.cachedOrdMaps.size());
     assertEquals(MultiSortedDocValues.class, slowWrapper.getSortedDocValues("sorted").getClass());
     assertEquals(1, slowWrapper.cachedOrdMaps.size());
-    assertEquals(MultiSortedSetDocValues.class, slowWrapper.getSortedSetDocValues("sorted_set").getClass());
+    assertEquals(
+        MultiSortedSetDocValues.class, slowWrapper.getSortedSetDocValues("sorted_set").getClass());
     assertEquals(2, slowWrapper.cachedOrdMaps.size());
     reader.close();
     w.close();
@@ -126,7 +132,9 @@ public void testOrdMapsAreCached() throws Exception {
 
   public void testTermsAreCached() throws IOException {
     Directory dir = newDirectory();
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
+    RandomIndexWriter w =
+        new RandomIndexWriter(
+            random(), dir, newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE));
     Document doc = new Document();
     doc.add(new TextField("text", "hello world", Field.Store.NO));
     w.addDocument(doc);
@@ -137,12 +145,13 @@ public void testTermsAreCached() throws IOException {
 
     IndexReader reader = w.getReader();
     assertTrue(reader.leaves().size() > 1);
-    SlowCompositeReaderWrapper slowWrapper = (SlowCompositeReaderWrapper) SlowCompositeReaderWrapper.wrap(reader);
+    SlowCompositeReaderWrapper slowWrapper =
+        (SlowCompositeReaderWrapper) SlowCompositeReaderWrapper.wrap(reader);
     assertEquals(0, slowWrapper.cachedTerms.size());
     assertEquals(MultiTerms.class, slowWrapper.terms("text").getClass());
     assertEquals(1, slowWrapper.cachedTerms.size());
     assertNull(slowWrapper.terms("bogusField"));
-    assertEquals(1, slowWrapper.cachedTerms.size());//bogus field isn't cached
+    assertEquals(1, slowWrapper.cachedTerms.size()); // bogus field isn't cached
     reader.close();
     w.close();
     dir.close();
diff --git a/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java b/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
index 699c4df4d61..d217ceca8b5 100644
--- a/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
@@ -38,7 +38,8 @@ public void testReturnsDefaultMergePolicyIfNoneSpecified() {
   public void testFailsIfNoClassSpecifiedForWrappedPolicy() {
     final MergePolicyFactoryArgs args = new MergePolicyFactoryArgs();
     args.add(WrapperMergePolicyFactory.WRAPPED_PREFIX, "foo");
-    expectThrows(IllegalArgumentException.class,
+    expectThrows(
+        IllegalArgumentException.class,
         () -> new DefaultingWrapperMergePolicyFactory(resourceLoader, args, null).getMergePolicy());
   }
 
@@ -52,27 +53,35 @@ public void testProperlyInitializesWrappedMergePolicy() {
     args.add("test.class", TieredMergePolicyFactory.class.getName());
     args.add("test.maxMergeAtOnce", testMaxMergeAtOnce);
     args.add("test.maxMergedSegmentMB", testMaxMergedSegmentMB);
-    MergePolicyFactory mpf = new DefaultingWrapperMergePolicyFactory(resourceLoader, args, null) {
-      @Override
-      protected MergePolicy getDefaultWrappedMergePolicy() {
-        throw new IllegalStateException("Should not have reached here!");
-      }
-    };
+    MergePolicyFactory mpf =
+        new DefaultingWrapperMergePolicyFactory(resourceLoader, args, null) {
+          @Override
+          protected MergePolicy getDefaultWrappedMergePolicy() {
+            throw new IllegalStateException("Should not have reached here!");
+          }
+        };
     final MergePolicy mp = mpf.getMergePolicy();
     assertSame(mp.getClass(), TieredMergePolicy.class);
-    final TieredMergePolicy tmp = (TieredMergePolicy)mp;
+    final TieredMergePolicy tmp = (TieredMergePolicy) mp;
     assertEquals("maxMergeAtOnce", testMaxMergeAtOnce, tmp.getMaxMergeAtOnce());
     assertEquals("maxMergedSegmentMB", testMaxMergedSegmentMB, tmp.getMaxMergedSegmentMB(), 0.0d);
   }
 
   public void testUpgradeIndexMergePolicyFactory() {
     final int N = 10;
-    final Double wrappingNoCFSRatio = random().nextBoolean() ? null : random().nextInt(N+1)/((double)N); // must be: 0.0 <= value <= 1.0
-    final Double wrappedNoCFSRatio  = random().nextBoolean() ? null : random().nextInt(N+1)/((double)N); // must be: 0.0 <= value <= 1.0
+    final Double wrappingNoCFSRatio =
+        random().nextBoolean()
+            ? null
+            : random().nextInt(N + 1) / ((double) N); // must be: 0.0 <= value <= 1.0
+    final Double wrappedNoCFSRatio =
+        random().nextBoolean()
+            ? null
+            : random().nextInt(N + 1) / ((double) N); // must be: 0.0 <= value <= 1.0
     implTestUpgradeIndexMergePolicyFactory(wrappingNoCFSRatio, wrappedNoCFSRatio);
   }
 
-  private void implTestUpgradeIndexMergePolicyFactory(Double wrappingNoCFSRatio, Double wrappedNoCFSRatio) {
+  private void implTestUpgradeIndexMergePolicyFactory(
+      Double wrappingNoCFSRatio, Double wrappedNoCFSRatio) {
     final MergePolicyFactoryArgs args = new MergePolicyFactoryArgs();
     if (wrappingNoCFSRatio != null) {
       args.add("noCFSRatio", wrappingNoCFSRatio); // noCFSRatio for the wrapping merge policy
@@ -86,33 +95,48 @@ private void implTestUpgradeIndexMergePolicyFactory(Double wrappingNoCFSRatio, D
     MergePolicyFactory mpf;
     try {
       mpf = new UpgradeIndexMergePolicyFactory(resourceLoader, args, null);
-      assertFalse("Should only reach here if wrapping and wrapped args don't overlap!",
+      assertFalse(
+          "Should only reach here if wrapping and wrapped args don't overlap!",
           (wrappingNoCFSRatio != null && wrappedNoCFSRatio != null));
 
-      for (int ii=1; ii<=2; ++ii) { // it should be okay to call getMergePolicy() more than once
+      for (int ii = 1; ii <= 2; ++ii) { // it should be okay to call getMergePolicy() more than once
         final MergePolicy mp = mpf.getMergePolicy();
         if (wrappingNoCFSRatio != null) {
-          assertEquals("#"+ii+" wrappingNoCFSRatio", wrappingNoCFSRatio.doubleValue(), mp.getNoCFSRatio(), 0.0d);
+          assertEquals(
+              "#" + ii + " wrappingNoCFSRatio",
+              wrappingNoCFSRatio.doubleValue(),
+              mp.getNoCFSRatio(),
+              0.0d);
         }
         if (wrappedNoCFSRatio != null) {
-          assertEquals("#"+ii+" wrappedNoCFSRatio", wrappedNoCFSRatio.doubleValue(), mp.getNoCFSRatio(), 0.0d);
+          assertEquals(
+              "#" + ii + " wrappedNoCFSRatio",
+              wrappedNoCFSRatio.doubleValue(),
+              mp.getNoCFSRatio(),
+              0.0d);
         }
         assertSame(mp.getClass(), UpgradeIndexMergePolicy.class);
       }
 
     } catch (IllegalArgumentException iae) {
-      assertEquals("Wrapping and wrapped merge policy args overlap! [noCFSRatio]", iae.getMessage());
-      assertTrue("Should only reach here if wrapping and wrapped args do overlap!",
+      assertEquals(
+          "Wrapping and wrapped merge policy args overlap! [noCFSRatio]", iae.getMessage());
+      assertTrue(
+          "Should only reach here if wrapping and wrapped args do overlap!",
           (wrappingNoCFSRatio != null && wrappedNoCFSRatio != null));
     }
   }
 
   private static class DefaultingWrapperMergePolicyFactory extends WrapperMergePolicyFactory {
 
-    DefaultingWrapperMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs wrapperArgs, IndexSchema schema) {
+    DefaultingWrapperMergePolicyFactory(
+        SolrResourceLoader resourceLoader, MergePolicyFactoryArgs wrapperArgs, IndexSchema schema) {
       super(resourceLoader, wrapperArgs, schema);
       if (!args.keys().isEmpty()) {
-        throw new IllegalArgumentException("All arguments should have been claimed by the wrapped policy but some ("+args+") remain.");
+        throw new IllegalArgumentException(
+            "All arguments should have been claimed by the wrapped policy but some ("
+                + args
+                + ") remain.");
       }
     }
 
@@ -125,7 +149,5 @@ protected MergePolicy getDefaultWrappedMergePolicy() {
     protected MergePolicy getMergePolicyInstance(MergePolicy wrappedMP) {
       return getWrappedMergePolicy();
     }
-
   }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/internal/csv/CSVParserTest.java b/solr/core/src/test/org/apache/solr/internal/csv/CSVParserTest.java
index e2a35c8be69..7047b3ffe78 100644
--- a/solr/core/src/test/org/apache/solr/internal/csv/CSVParserTest.java
+++ b/solr/core/src/test/org/apache/solr/internal/csv/CSVParserTest.java
@@ -20,26 +20,23 @@
 import java.io.Reader;
 import java.io.StringReader;
 import java.util.Arrays;
-
 import junit.framework.TestCase;
 
 /**
  * CSVParserTest
  *
- * The test are organized in three different sections:
- * The 'setter/getter' section, the lexer section and finally the parser 
- * section. In case a test fails, you should follow a top-down approach for 
- * fixing a potential bug (it's likely that the parser itself fails if the lexer
- * has problems...).
+ * 

The test are organized in three different sections: The 'setter/getter' section, the lexer + * section and finally the parser section. In case a test fails, you should follow a top-down + * approach for fixing a potential bug (it's likely that the parser itself fails if the lexer has + * problems...). */ public class CSVParserTest extends TestCase { - - /** - * TestCSVParser. - */ + + /** TestCSVParser. */ static class TestCSVParser extends CSVParser { /** * Test parser to investigate the type of the internal Token. + * * @param in a Reader */ TestCSVParser(Reader in) { @@ -50,8 +47,8 @@ static class TestCSVParser extends CSVParser { super(in, strategy); } /** - * Calls super.nextToken() and prints out a String representation of token - * type and content. + * Calls super.nextToken() and prints out a String representation of token type and content. + * * @return String representation of token type and content * @throws IOException like {@link CSVParser#nextToken()} */ @@ -60,11 +57,11 @@ public String testNextToken() throws IOException { return Integer.toString(t.type) + ";" + t.content + ";"; } } - + // ====================================================== // lexer tests // ====================================================== - + // Single line (without comment) public void testNextToken1() throws IOException { String code = "abc,def, hijk, lmnop, qrst,uv ,wxy ,z , ,"; @@ -78,25 +75,33 @@ public void testNextToken1() throws IOException { assertEquals(CSVParser.TT_TOKEN + ";wxy;", parser.testNextToken()); assertEquals(CSVParser.TT_TOKEN + ";z;", parser.testNextToken()); assertEquals(CSVParser.TT_TOKEN + ";;", parser.testNextToken()); - assertEquals(CSVParser.TT_EOF + ";;", parser.testNextToken()); + assertEquals(CSVParser.TT_EOF + ";;", parser.testNextToken()); } - + // multiline including comments (and empty lines) public void testNextToken2() throws IOException { /* file: 1,2,3, * a,b x,c * - * # this is a comment + * # this is a comment * d,e, - * + * */ String code = "1,2,3,\na,b x,c\n#foo\n\nd,e,\n\n"; - CSVStrategy strategy = new CSVStrategy - (',', '"', '#', CSVStrategy.ESCAPE_DISABLED, true, true, false, true, CSVStrategy.DEFAULT_PRINTER_NEWLINE); + CSVStrategy strategy = + new CSVStrategy( + ',', + '"', + '#', + CSVStrategy.ESCAPE_DISABLED, + true, + true, + false, + true, + CSVStrategy.DEFAULT_PRINTER_NEWLINE); TestCSVParser parser = new TestCSVParser(new StringReader(code), strategy); - assertEquals(CSVParser.TT_TOKEN + ";1;", parser.testNextToken()); assertEquals(CSVParser.TT_TOKEN + ";2;", parser.testNextToken()); assertEquals(CSVParser.TT_TOKEN + ";3;", parser.testNextToken()); @@ -108,19 +113,27 @@ public void testNextToken2() throws IOException { assertEquals(CSVParser.TT_TOKEN + ";d;", parser.testNextToken()); assertEquals(CSVParser.TT_TOKEN + ";e;", parser.testNextToken()); assertEquals(CSVParser.TT_EORECORD + ";;", parser.testNextToken()); - assertEquals(CSVParser.TT_EOF + ";;", parser.testNextToken()); - assertEquals(CSVParser.TT_EOF + ";;", parser.testNextToken()); - + assertEquals(CSVParser.TT_EOF + ";;", parser.testNextToken()); + assertEquals(CSVParser.TT_EOF + ";;", parser.testNextToken()); } - + // simple token with escaping public void testNextToken3() throws IOException { /* file: a,\,,b * \,, */ String code = "a,\\,,b\n\\,,"; - CSVStrategy strategy = new CSVStrategy - (',', '"', '#', CSVStrategy.ESCAPE_DISABLED, true, true, false, true, CSVStrategy.DEFAULT_PRINTER_NEWLINE); + CSVStrategy strategy = + new CSVStrategy( + ',', + '"', + '#', + CSVStrategy.ESCAPE_DISABLED, + true, + true, + false, + true, + CSVStrategy.DEFAULT_PRINTER_NEWLINE); TestCSVParser parser = new TestCSVParser(new StringReader(code), strategy); assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); @@ -133,46 +146,42 @@ public void testNextToken3() throws IOException { assertEquals(CSVParser.TT_TOKEN + ";;", parser.testNextToken()); assertEquals(CSVParser.TT_EOF + ";;", parser.testNextToken()); } - + // encapsulator tokenizer (sinle line) public void testNextToken4() throws IOException { /* file: a,"foo",b * a, " foo",b * a,"foo " ,b // whitespace after closing encapsulator * a, " foo " ,b - */ - String code = - "a,\"foo\",b\na, \" foo\",b\na,\"foo \" ,b\na, \" foo \" ,b"; - TestCSVParser parser = new TestCSVParser(new StringReader(code)); - assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";foo;", parser.testNextToken()); - assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + "; foo;", parser.testNextToken()); - assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";foo ;", parser.testNextToken()); - assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + "; foo ;", parser.testNextToken()); -// assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); - assertEquals(CSVParser.TT_EOF + ";b;", parser.testNextToken()); + */ + String code = "a,\"foo\",b\na, \" foo\",b\na,\"foo \" ,b\na, \" foo \" ,b"; + TestCSVParser parser = new TestCSVParser(new StringReader(code)); + assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";foo;", parser.testNextToken()); + assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + "; foo;", parser.testNextToken()); + assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";foo ;", parser.testNextToken()); + assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + "; foo ;", parser.testNextToken()); + // assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); + assertEquals(CSVParser.TT_EOF + ";b;", parser.testNextToken()); } - + // encapsulator tokenizer (multi line, delimiter in string) - public void testNextToken5() throws IOException { - String code = - "a,\"foo\n\",b\n\"foo\n baar ,,,\"\n\"\n\t \n\""; + public void testNextToken5() throws IOException { + String code = "a,\"foo\n\",b\n\"foo\n baar ,,,\"\n\"\n\t \n\""; TestCSVParser parser = new TestCSVParser(new StringReader(code)); assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); assertEquals(CSVParser.TT_TOKEN + ";foo\n;", parser.testNextToken()); assertEquals(CSVParser.TT_EORECORD + ";b;", parser.testNextToken()); - assertEquals(CSVParser.TT_EORECORD + ";foo\n baar ,,,;", - parser.testNextToken()); + assertEquals(CSVParser.TT_EORECORD + ";foo\n baar ,,,;", parser.testNextToken()); assertEquals(CSVParser.TT_EOF + ";\n\t \n;", parser.testNextToken()); - } - + // change delimiters, comment, encapsulater public void testNextToken6() throws IOException { /* file: a;'b and \' more @@ -181,30 +190,29 @@ public void testNextToken6() throws IOException { * ;; */ String code = "a;'b and '' more\n'\n!comment;;;;\n;;"; - TestCSVParser parser = new TestCSVParser(new StringReader(code), new CSVStrategy(';', '\'', '!')); + TestCSVParser parser = + new TestCSVParser(new StringReader(code), new CSVStrategy(';', '\'', '!')); assertEquals(CSVParser.TT_TOKEN + ";a;", parser.testNextToken()); - assertEquals( - CSVParser.TT_EORECORD + ";b and ' more\n;", - parser.testNextToken()); + assertEquals(CSVParser.TT_EORECORD + ";b and ' more\n;", parser.testNextToken()); } - - + // ====================================================== // parser tests // ====================================================== - - String code = - "a,b,c,d\n" - + " a , b , 1 2 \n" - + "\"foo baar\", b,\n" - // + " \"foo\n,,\n\"\",,\n\\\"\",d,e\n"; - + " \"foo\n,,\n\"\",,\n\"\"\",d,e\n"; // changed to use standard CSV escaping + + String code = + "a,b,c,d\n" + + " a , b , 1 2 \n" + + "\"foo baar\", b,\n" + // + " \"foo\n,,\n\"\",,\n\\\"\",d,e\n"; + + " \"foo\n,,\n\"\",,\n\"\"\",d,e\n"; // changed to use standard CSV escaping String[][] res = { {"a", "b", "c", "d"}, - {"a", "b", "1 2"}, - {"foo baar", "b", ""}, + {"a", "b", "1 2"}, + {"foo baar", "b", ""}, {"foo\n,,\n\",,\n\"", "d", "e"} }; + public void testGetLine() throws IOException { CSVParser parser = new CSVParser(new StringReader(code)); String[] tmp = null; @@ -215,7 +223,7 @@ public void testGetLine() throws IOException { tmp = parser.getLine(); assertTrue(tmp == null); } - + public void testNextValue() throws IOException { CSVParser parser = new CSVParser(new StringReader(code)); String tmp = null; @@ -226,23 +234,23 @@ public void testNextValue() throws IOException { } } tmp = parser.nextValue(); - assertTrue(tmp == null); + assertTrue(tmp == null); } - + public void testGetAllValues() throws IOException { CSVParser parser = new CSVParser(new StringReader(code)); String[][] tmp = parser.getAllValues(); assertEquals(res.length, tmp.length); assertTrue(tmp.length > 0); for (int i = 0; i < res.length; i++) { - assertTrue(Arrays.equals(res[i], tmp[i])); + assertTrue(Arrays.equals(res[i], tmp[i])); } } - + public void testExcelStrategy1() throws IOException { - String code = - "value1,value2,value3,value4\r\na,b,c,d\r\n x,,," - + "\r\n\r\n\"\"\"hello\"\"\",\" \"\"world\"\"\",\"abc\ndef\",\r\n"; + String code = + "value1,value2,value3,value4\r\na,b,c,d\r\n x,,," + + "\r\n\r\n\"\"\"hello\"\"\",\" \"\"world\"\"\",\"abc\ndef\",\r\n"; String[][] res = { {"value1", "value2", "value3", "value4"}, {"a", "b", "c", "d"}, @@ -255,10 +263,10 @@ public void testExcelStrategy1() throws IOException { assertEquals(res.length, tmp.length); assertTrue(tmp.length > 0); for (int i = 0; i < res.length; i++) { - assertTrue(Arrays.equals(res[i], tmp[i])); + assertTrue(Arrays.equals(res[i], tmp[i])); } } - + public void testExcelStrategy2() throws Exception { String code = "foo,baar\r\n\r\nhello,\r\n\r\nworld,\r\n"; String[][] res = { @@ -273,24 +281,24 @@ public void testExcelStrategy2() throws Exception { assertEquals(res.length, tmp.length); assertTrue(tmp.length > 0); for (int i = 0; i < res.length; i++) { - assertTrue(Arrays.equals(res[i], tmp[i])); + assertTrue(Arrays.equals(res[i], tmp[i])); } } - + public void testEndOfFileBehaviourExcel() throws Exception { String[] codes = { - "hello,\r\n\r\nworld,\r\n", - "hello,\r\n\r\nworld,", - "hello,\r\n\r\nworld,\"\"\r\n", - "hello,\r\n\r\nworld,\"\"", - "hello,\r\n\r\nworld,\n", - "hello,\r\n\r\nworld,", - "hello,\r\n\r\nworld,\"\"\n", - "hello,\r\n\r\nworld,\"\"" - }; + "hello,\r\n\r\nworld,\r\n", + "hello,\r\n\r\nworld,", + "hello,\r\n\r\nworld,\"\"\r\n", + "hello,\r\n\r\nworld,\"\"", + "hello,\r\n\r\nworld,\n", + "hello,\r\n\r\nworld,", + "hello,\r\n\r\nworld,\"\"\n", + "hello,\r\n\r\nworld,\"\"" + }; String[][] res = { {"hello", ""}, - {""}, // ExcelStrategy does not ignore empty lines + {""}, // ExcelStrategy does not ignore empty lines {"world", ""} }; String code; @@ -305,20 +313,20 @@ public void testEndOfFileBehaviourExcel() throws Exception { } } } - + public void testEndOfFileBehaviorCSV() throws Exception { String[] codes = { - "hello,\r\n\r\nworld,\r\n", - "hello,\r\n\r\nworld,", - "hello,\r\n\r\nworld,\"\"\r\n", - "hello,\r\n\r\nworld,\"\"", - "hello,\r\n\r\nworld,\n", - "hello,\r\n\r\nworld,", - "hello,\r\n\r\nworld,\"\"\n", - "hello,\r\n\r\nworld,\"\"" - }; + "hello,\r\n\r\nworld,\r\n", + "hello,\r\n\r\nworld,", + "hello,\r\n\r\nworld,\"\"\r\n", + "hello,\r\n\r\nworld,\"\"", + "hello,\r\n\r\nworld,\n", + "hello,\r\n\r\nworld,", + "hello,\r\n\r\nworld,\"\"\n", + "hello,\r\n\r\nworld,\"\"" + }; String[][] res = { - {"hello", ""}, // CSV Strategy ignores empty lines + {"hello", ""}, // CSV Strategy ignores empty lines {"world", ""} }; String code; @@ -333,17 +341,14 @@ public void testEndOfFileBehaviorCSV() throws Exception { } } } - + public void testEmptyLineBehaviourExcel() throws Exception { String[] codes = { - "hello,\r\n\r\n\r\n", - "hello,\n\n\n", - "hello,\"\"\r\n\r\n\r\n", - "hello,\"\"\n\n\n" - }; + "hello,\r\n\r\n\r\n", "hello,\n\n\n", "hello,\"\"\r\n\r\n\r\n", "hello,\"\"\n\n\n" + }; String[][] res = { {"hello", ""}, - {""}, // ExcelStrategy does not ignore empty lines + {""}, // ExcelStrategy does not ignore empty lines {""} }; String code; @@ -358,16 +363,13 @@ public void testEmptyLineBehaviourExcel() throws Exception { } } } - + public void testEmptyLineBehaviourCSV() throws Exception { String[] codes = { - "hello,\r\n\r\n\r\n", - "hello,\n\n\n", - "hello,\"\"\r\n\r\n\r\n", - "hello,\"\"\n\n\n" - }; + "hello,\r\n\r\n\r\n", "hello,\n\n\n", "hello,\"\"\r\n\r\n\r\n", "hello,\"\"\n\n\n" + }; String[][] res = { - {"hello", ""} // CSV Strategy ignores empty lines + {"hello", ""} // CSV Strategy ignores empty lines }; String code; for (int codeIndex = 0; codeIndex < codes.length; codeIndex++) { @@ -381,38 +383,38 @@ public void testEmptyLineBehaviourCSV() throws Exception { } } } - + public void OLDtestBackslashEscaping() throws IOException { String code = - "one,two,three\n" - + "on\\\"e,two\n" - + "on\"e,two\n" - + "one,\"tw\\\"o\"\n" - + "one,\"t\\,wo\"\n" - + "one,two,\"th,ree\"\n" - + "\"a\\\\\"\n" - + "a\\,b\n" - + "\"a\\\\,b\""; + "one,two,three\n" + + "on\\\"e,two\n" + + "on\"e,two\n" + + "one,\"tw\\\"o\"\n" + + "one,\"t\\,wo\"\n" + + "one,two,\"th,ree\"\n" + + "\"a\\\\\"\n" + + "a\\,b\n" + + "\"a\\\\,b\""; String[][] res = { - { "one", "two", "three" }, - { "on\\\"e", "two" }, - { "on\"e", "two" }, - { "one", "tw\"o" }, - { "one", "t\\,wo" }, // backslash in quotes only escapes a delimiter (",") - { "one", "two", "th,ree" }, - { "a\\\\" }, // backslash in quotes only escapes a delimiter (",") - { "a\\", "b" }, // a backslash must be returnd - { "a\\\\,b" } // backslash in quotes only escapes a delimiter (",") - }; + {"one", "two", "three"}, + {"on\\\"e", "two"}, + {"on\"e", "two"}, + {"one", "tw\"o"}, + {"one", "t\\,wo"}, // backslash in quotes only escapes a delimiter (",") + {"one", "two", "th,ree"}, + {"a\\\\"}, // backslash in quotes only escapes a delimiter (",") + {"a\\", "b"}, // a backslash must be returnd + {"a\\\\,b"} // backslash in quotes only escapes a delimiter (",") + }; CSVParser parser = new CSVParser(new StringReader(code)); String[][] tmp = parser.getAllValues(); assertEquals(res.length, tmp.length); assertTrue(tmp.length > 0); for (int i = 0; i < res.length; i++) { - assertTrue(Arrays.equals(res[i], tmp[i])); + assertTrue(Arrays.equals(res[i], tmp[i])); } } - + public void testBackslashEscaping() throws IOException { // To avoid confusion over the need for escaping chars in java code, @@ -420,32 +422,33 @@ public void testBackslashEscaping() throws IOException { // quote as the encapsulator. String code = - "one,two,three\n" // 0 - + "'',''\n" // 1) empty encapsulators - + "/',/'\n" // 2) single encapsulators - + "'/'','/''\n" // 3) single encapsulators encapsulated via escape - + "'''',''''\n" // 4) single encapsulators encapsulated via doubling - + "/,,/,\n" // 5) separator escaped - + "//,//\n" // 6) escape escaped - + "'//','//'\n" // 7) escape escaped in encapsulation - + " 8 , \"quoted \"\" /\" // string\" \n" // don't eat spaces - + "9, /\n \n" // escaped newline - + ""; + "one,two,three\n" // 0 + + "'',''\n" // 1) empty encapsulators + + "/',/'\n" // 2) single encapsulators + + "'/'','/''\n" // 3) single encapsulators encapsulated via escape + + "'''',''''\n" // 4) single encapsulators encapsulated via doubling + + "/,,/,\n" // 5) separator escaped + + "//,//\n" // 6) escape escaped + + "'//','//'\n" // 7) escape escaped in encapsulation + + " 8 , \"quoted \"\" /\" // string\" \n" // don't eat spaces + + "9, /\n \n" // escaped newline + + ""; String[][] res = { - { "one", "two", "three" }, // 0 - { "", "" }, // 1 - { "'", "'" }, // 2 - { "'", "'" }, // 3 - { "'", "'" }, // 4 - { ",", "," }, // 5 - { "/", "/" }, // 6 - { "/", "/" }, // 7 - { " 8 ", " \"quoted \"\" \" / string\" " }, - { "9", " \n " }, - }; - + {"one", "two", "three"}, // 0 + {"", ""}, // 1 + {"'", "'"}, // 2 + {"'", "'"}, // 3 + {"'", "'"}, // 4 + {",", ","}, // 5 + {"/", "/"}, // 6 + {"/", "/"}, // 7 + {" 8 ", " \"quoted \"\" \" / string\" "}, + {"9", " \n "}, + }; - CSVStrategy strategy = new CSVStrategy(',','\'',CSVStrategy.COMMENTS_DISABLED,'/',false,false,true,true,"\n"); + CSVStrategy strategy = + new CSVStrategy( + ',', '\'', CSVStrategy.COMMENTS_DISABLED, '/', false, false, true, true, "\n"); CSVParser parser = new CSVParser(new StringReader(code), strategy); String[][] tmp = parser.getAllValues(); @@ -461,20 +464,29 @@ public void testBackslashEscaping2() throws IOException { // We will test with a forward slash as the escape char, and a single // quote as the encapsulator. - String code = "" - + " , , \n" // 1) - + " \t , , \n" // 2) - + " // , /, , /,\n" // 3) - + ""; + String code = + "" + + " , , \n" // 1) + + " \t , , \n" // 2) + + " // , /, , /,\n" // 3) + + ""; String[][] res = { - { " ", " ", " " }, // 1 - { " \t ", " ", " " }, // 2 - { " / ", " , ", " ," }, //3 - }; - + {" ", " ", " "}, // 1 + {" \t ", " ", " "}, // 2 + {" / ", " , ", " ,"}, // 3 + }; - CSVStrategy strategy = new CSVStrategy - (',', CSVStrategy.ENCAPSULATOR_DISABLED, CSVStrategy.COMMENTS_DISABLED, '/', false, false, true, true, "\n"); + CSVStrategy strategy = + new CSVStrategy( + ',', + CSVStrategy.ENCAPSULATOR_DISABLED, + CSVStrategy.COMMENTS_DISABLED, + '/', + false, + false, + true, + true, + "\n"); CSVParser parser = new CSVParser(new StringReader(code), strategy); String[][] tmp = parser.getAllValues(); @@ -483,21 +495,20 @@ public void testBackslashEscaping2() throws IOException { if (!CSVPrinterTest.equals(res, tmp)) { assertTrue(false); } - } - public void testDefaultStrategy() throws IOException { - String code = "" - + "a,b\n" // 1) - + "\"\n\",\" \"\n" // 2) - + "\"\",#\n" // 2) + String code = + "" + + "a,b\n" // 1) + + "\"\n\",\" \"\n" // 2) + + "\"\",#\n" // 2) ; String[][] res = { - { "a", "b" }, - { "\n", " " }, - { "", "#" }, + {"a", "b"}, + {"\n", " "}, + {"", "#"}, }; CSVStrategy strategy = CSVStrategy.DEFAULT_STRATEGY; @@ -512,12 +523,12 @@ public void testDefaultStrategy() throws IOException { } String[][] res_comments = { - { "a", "b" }, - { "\n", " " }, - { ""}, + {"a", "b"}, + {"\n", " "}, + {""}, }; - strategy = new CSVStrategy(',','"','#'); + strategy = new CSVStrategy(',', '"', '#'); parser = new CSVParser(new StringReader(code), strategy); tmp = parser.getAllValues(); @@ -526,57 +537,65 @@ public void testDefaultStrategy() throws IOException { } } + public void testUnicodeEscape() throws IOException { + String code = "abc,\\u0070\\u0075\\u0062\\u006C\\u0069\\u0063"; + CSVStrategy strategy = + new CSVStrategy( + ',', + '"', + CSVStrategy.COMMENTS_DISABLED, + CSVStrategy.ESCAPE_DISABLED, + true, + true, + true, + true, + CSVStrategy.DEFAULT_PRINTER_NEWLINE); + CSVParser parser = new CSVParser(new StringReader(code), strategy); + String[] data = parser.getLine(); + assertEquals(2, data.length); + assertEquals("abc", data[0]); + assertEquals("public", data[1]); + } - public void testUnicodeEscape() throws IOException { - String code = "abc,\\u0070\\u0075\\u0062\\u006C\\u0069\\u0063"; - CSVStrategy strategy = new CSVStrategy - (',', '"', CSVStrategy.COMMENTS_DISABLED, CSVStrategy.ESCAPE_DISABLED, true, true, true, true, CSVStrategy.DEFAULT_PRINTER_NEWLINE); - CSVParser parser = new CSVParser(new StringReader(code), strategy); - String[] data = parser.getLine(); - assertEquals(2, data.length); - assertEquals("abc", data[0]); - assertEquals("public", data[1]); - } - - public void testCarriageReturnLineFeedEndings() throws IOException { - String code = "foo\r\nbaar,\r\nhello,world\r\n,kanu"; - CSVParser parser = new CSVParser(new StringReader(code)); - String[][] data = parser.getAllValues(); - assertEquals(4, data.length); - } - - public void testIgnoreEmptyLines() throws IOException { - String code = "\nfoo,baar\n\r\n,\n\n,world\r\n\n"; - //String code = "world\r\n\n"; - //String code = "foo;baar\r\n\r\nhello;\r\n\r\nworld;\r\n"; - CSVParser parser = new CSVParser(new StringReader(code)); - String[][] data = parser.getAllValues(); - assertEquals(3, data.length); - } - - public void testLineTokenConsistency() throws IOException { - String code = "\nfoo,baar\n\r\n,\n\n,world\r\n\n"; - CSVParser parser = new CSVParser(new StringReader(code)); - String[][] data = parser.getAllValues(); - parser = new CSVParser(new StringReader(code)); - CSVParser parser1 = new CSVParser(new StringReader(code)); - for (int i = 0; i < data.length; i++) { - assertTrue(Arrays.equals(parser1.getLine(), data[i])); - for (int j = 0; j < data[i].length; j++) { - assertEquals(parser.nextValue(), data[i][j]); - } + public void testCarriageReturnLineFeedEndings() throws IOException { + String code = "foo\r\nbaar,\r\nhello,world\r\n,kanu"; + CSVParser parser = new CSVParser(new StringReader(code)); + String[][] data = parser.getAllValues(); + assertEquals(4, data.length); + } + + public void testIgnoreEmptyLines() throws IOException { + String code = "\nfoo,baar\n\r\n,\n\n,world\r\n\n"; + // String code = "world\r\n\n"; + // String code = "foo;baar\r\n\r\nhello;\r\n\r\nworld;\r\n"; + CSVParser parser = new CSVParser(new StringReader(code)); + String[][] data = parser.getAllValues(); + assertEquals(3, data.length); + } + + public void testLineTokenConsistency() throws IOException { + String code = "\nfoo,baar\n\r\n,\n\n,world\r\n\n"; + CSVParser parser = new CSVParser(new StringReader(code)); + String[][] data = parser.getAllValues(); + parser = new CSVParser(new StringReader(code)); + CSVParser parser1 = new CSVParser(new StringReader(code)); + for (int i = 0; i < data.length; i++) { + assertTrue(Arrays.equals(parser1.getLine(), data[i])); + for (int j = 0; j < data[i].length; j++) { + assertEquals(parser.nextValue(), data[i][j]); } } + } - // From SANDBOX-153 - public void testDelimiterIsWhitespace() throws IOException { - String code = "one\ttwo\t\tfour \t five\t six"; - TestCSVParser parser = new TestCSVParser(new StringReader(code), CSVStrategy.TDF_STRATEGY); - assertEquals(CSVParser.TT_TOKEN + ";one;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";two;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";four;", parser.testNextToken()); - assertEquals(CSVParser.TT_TOKEN + ";five;", parser.testNextToken()); - assertEquals(CSVParser.TT_EOF + ";six;", parser.testNextToken()); - } + // From SANDBOX-153 + public void testDelimiterIsWhitespace() throws IOException { + String code = "one\ttwo\t\tfour \t five\t six"; + TestCSVParser parser = new TestCSVParser(new StringReader(code), CSVStrategy.TDF_STRATEGY); + assertEquals(CSVParser.TT_TOKEN + ";one;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";two;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";four;", parser.testNextToken()); + assertEquals(CSVParser.TT_TOKEN + ";five;", parser.testNextToken()); + assertEquals(CSVParser.TT_EOF + ";six;", parser.testNextToken()); + } } diff --git a/solr/core/src/test/org/apache/solr/internal/csv/CSVPrinterTest.java b/solr/core/src/test/org/apache/solr/internal/csv/CSVPrinterTest.java index 199717bf659..6a3a4c6768e 100644 --- a/solr/core/src/test/org/apache/solr/internal/csv/CSVPrinterTest.java +++ b/solr/core/src/test/org/apache/solr/internal/csv/CSVPrinterTest.java @@ -20,14 +20,11 @@ import java.io.StringReader; import java.io.StringWriter; import java.util.Random; - import junit.framework.TestCase; -/** - * CSVPrinterTest - */ +/** CSVPrinterTest */ public class CSVPrinterTest extends TestCase { - + String lineSeparator = "\n"; public void testPrinter1() throws IOException { @@ -70,18 +67,25 @@ public void testExcelPrinter2() throws IOException { assertEquals("\"a,b\",b" + lineSeparator, sw.toString()); } - - public void testRandom() throws Exception { - int iter=10000; + int iter = 10000; strategy = CSVStrategy.DEFAULT_STRATEGY; doRandom(iter); strategy = CSVStrategy.EXCEL_STRATEGY; doRandom(iter); // Strategy for MySQL - strategy = new CSVStrategy - ('\t', CSVStrategy.ENCAPSULATOR_DISABLED, CSVStrategy.COMMENTS_DISABLED,'\\',false, false, false, false, "\n"); + strategy = + new CSVStrategy( + '\t', + CSVStrategy.ENCAPSULATOR_DISABLED, + CSVStrategy.COMMENTS_DISABLED, + '\\', + false, + false, + false, + false, + "\n"); doRandom(iter); } @@ -89,20 +93,20 @@ public void testRandom() throws Exception { CSVStrategy strategy; public void doRandom(int iter) throws Exception { - for (int i=0; i=128) { - sb.append("(").append((int)ch).append(")"); + if (ch <= ' ' || ch >= 128) { + sb.append("(").append((int) ch).append(")"); } else { sb.append(ch); } @@ -170,26 +175,45 @@ public String randStr() { int sz = r.nextInt(20); // sz = r.nextInt(3); char[] buf = new char[sz]; - for (int i=0; iThe test are organized in three different sections: The 'setter/getter' section, the lexer + * section and finally the strategy section. In case a test fails, you should follow a top-down + * approach for fixing a potential bug (it's likely that the strategy itself fails if the lexer has + * problems...). */ public class CSVStrategyTest extends TestCase { @@ -53,26 +52,25 @@ public void testSetCSVStrategy() { assertEquals(strategy.getDelimiter(), ','); assertEquals(strategy.getEncapsulator(), '"'); assertEquals(strategy.getCommentStart(), CSVStrategy.COMMENTS_DISABLED); - assertEquals(true, strategy.getIgnoreLeadingWhitespaces()); + assertEquals(true, strategy.getIgnoreLeadingWhitespaces()); assertEquals(false, strategy.getUnicodeEscapeInterpretation()); - assertEquals(true, strategy.getIgnoreEmptyLines()); + assertEquals(true, strategy.getIgnoreEmptyLines()); // explicit csv settings assertEquals(strategy.getDelimiter(), ','); assertEquals(strategy.getEncapsulator(), '"'); assertEquals(strategy.getCommentStart(), CSVStrategy.COMMENTS_DISABLED); - assertEquals(true, strategy.getIgnoreLeadingWhitespaces()); + assertEquals(true, strategy.getIgnoreLeadingWhitespaces()); assertEquals(false, strategy.getUnicodeEscapeInterpretation()); - assertEquals(true, strategy.getIgnoreEmptyLines()); + assertEquals(true, strategy.getIgnoreEmptyLines()); } - + public void testSetExcelStrategy() { CSVStrategy strategy = CSVStrategy.EXCEL_STRATEGY; assertEquals(strategy.getDelimiter(), ','); assertEquals(strategy.getEncapsulator(), '"'); assertEquals(strategy.getCommentStart(), CSVStrategy.COMMENTS_DISABLED); - assertEquals(false, strategy.getIgnoreLeadingWhitespaces()); + assertEquals(false, strategy.getIgnoreLeadingWhitespaces()); assertEquals(false, strategy.getUnicodeEscapeInterpretation()); assertEquals(false, strategy.getIgnoreEmptyLines()); } - -} +} diff --git a/solr/core/src/test/org/apache/solr/internal/csv/CSVUtilsTest.java b/solr/core/src/test/org/apache/solr/internal/csv/CSVUtilsTest.java index 7be19d34d17..89018cf1bcd 100644 --- a/solr/core/src/test/org/apache/solr/internal/csv/CSVUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/internal/csv/CSVUtilsTest.java @@ -17,134 +17,130 @@ package org.apache.solr.internal.csv; import java.io.IOException; - import junit.framework.TestCase; -/** - * CSVUtilsTest - */ +/** CSVUtilsTest */ public class CSVUtilsTest extends TestCase { - + // ====================================================== // static parser tests // ====================================================== public void testParse1() throws IOException { - String[][] data = CSVUtils.parse("abc\ndef"); - assertEquals(2, data.length); - assertEquals(1, data[0].length); - assertEquals(1, data[1].length); - assertEquals("abc", data[0][0]); - assertEquals("def", data[1][0]); - } + String[][] data = CSVUtils.parse("abc\ndef"); + assertEquals(2, data.length); + assertEquals(1, data[0].length); + assertEquals(1, data[1].length); + assertEquals("abc", data[0][0]); + assertEquals("def", data[1][0]); + } - public void testParse2() throws IOException { - String[][] data = CSVUtils.parse("abc,def,\"ghi,jkl\"\ndef"); - assertEquals(2, data.length); - assertEquals(3, data[0].length); - assertEquals(1, data[1].length); - assertEquals("abc", data[0][0]); - assertEquals("def", data[0][1]); - assertEquals("ghi,jkl", data[0][2]); - assertEquals("def", data[1][0]); - } + public void testParse2() throws IOException { + String[][] data = CSVUtils.parse("abc,def,\"ghi,jkl\"\ndef"); + assertEquals(2, data.length); + assertEquals(3, data[0].length); + assertEquals(1, data[1].length); + assertEquals("abc", data[0][0]); + assertEquals("def", data[0][1]); + assertEquals("ghi,jkl", data[0][2]); + assertEquals("def", data[1][0]); + } - public void testParse3() throws IOException { - String[][] data = CSVUtils.parse("abc,\"def\nghi\"\njkl"); - assertEquals(2, data.length); - assertEquals(2, data[0].length); - assertEquals(1, data[1].length); - assertEquals("abc", data[0][0]); - assertEquals("def\nghi", data[0][1]); - assertEquals("jkl", data[1][0]); - } + public void testParse3() throws IOException { + String[][] data = CSVUtils.parse("abc,\"def\nghi\"\njkl"); + assertEquals(2, data.length); + assertEquals(2, data[0].length); + assertEquals(1, data[1].length); + assertEquals("abc", data[0][0]); + assertEquals("def\nghi", data[0][1]); + assertEquals("jkl", data[1][0]); + } - public void testParse4() throws IOException { - String[][] data = CSVUtils.parse("abc,\"def\\\\nghi\"\njkl"); - assertEquals(2, data.length); - assertEquals(2, data[0].length); - assertEquals(1, data[1].length); - assertEquals("abc", data[0][0]); - // an escape char in quotes only escapes a delimiter, not itself - assertEquals("def\\\\nghi", data[0][1]); - assertEquals("jkl", data[1][0]); - } + public void testParse4() throws IOException { + String[][] data = CSVUtils.parse("abc,\"def\\\\nghi\"\njkl"); + assertEquals(2, data.length); + assertEquals(2, data[0].length); + assertEquals(1, data[1].length); + assertEquals("abc", data[0][0]); + // an escape char in quotes only escapes a delimiter, not itself + assertEquals("def\\\\nghi", data[0][1]); + assertEquals("jkl", data[1][0]); + } - public void testParse5() throws IOException { - String[][] data = CSVUtils.parse("abc,def\\nghi\njkl"); - assertEquals(2, data.length); - assertEquals(2, data[0].length); - assertEquals(1, data[1].length); - assertEquals("abc", data[0][0]); - assertEquals("def\\nghi", data[0][1]); - assertEquals("jkl", data[1][0]); - } - - public void testParse6() throws IOException { - String[][] data = CSVUtils.parse(""); - // default strategy is CSV, which ignores empty lines - assertEquals(0, data.length); - } - - public void testParse7() throws IOException { - boolean io = false; - try { - CSVUtils.parse(null); - } catch (IllegalArgumentException e) { - io = true; - } - assertTrue(io); - } - - public void testParseLine1() throws IOException { - String[] data = CSVUtils.parseLine("abc,def,ghi"); - assertEquals(3, data.length); - assertEquals("abc", data[0]); - assertEquals("def", data[1]); - assertEquals("ghi", data[2]); - } + public void testParse5() throws IOException { + String[][] data = CSVUtils.parse("abc,def\\nghi\njkl"); + assertEquals(2, data.length); + assertEquals(2, data[0].length); + assertEquals(1, data[1].length); + assertEquals("abc", data[0][0]); + assertEquals("def\\nghi", data[0][1]); + assertEquals("jkl", data[1][0]); + } - public void testParseLine2() throws IOException { - String[] data = CSVUtils.parseLine("abc,def,ghi\n"); - assertEquals(3, data.length); - assertEquals("abc", data[0]); - assertEquals("def", data[1]); - assertEquals("ghi", data[2]); - } + public void testParse6() throws IOException { + String[][] data = CSVUtils.parse(""); + // default strategy is CSV, which ignores empty lines + assertEquals(0, data.length); + } - public void testParseLine3() throws IOException { - String[] data = CSVUtils.parseLine("abc,\"def,ghi\""); - assertEquals(2, data.length); - assertEquals("abc", data[0]); - assertEquals("def,ghi", data[1]); + public void testParse7() throws IOException { + boolean io = false; + try { + CSVUtils.parse(null); + } catch (IllegalArgumentException e) { + io = true; } + assertTrue(io); + } - public void testParseLine4() throws IOException { - String[] data = CSVUtils.parseLine("abc,\"def\nghi\""); - assertEquals(2, data.length); - assertEquals("abc", data[0]); - assertEquals("def\nghi", data[1]); - } - - public void testParseLine5() throws IOException { - String[] data = CSVUtils.parseLine(""); - assertEquals(0, data.length); - // assertEquals("", data[0]); - } - - public void testParseLine6() throws IOException { - boolean io = false; - try { - CSVUtils.parseLine(null); - } catch (IllegalArgumentException e) { - io = true; - } - assertTrue(io); - } - - public void testParseLine7() throws IOException { - String[] res = CSVUtils.parseLine(""); - assertNotNull(res); - assertEquals(0, res.length); + public void testParseLine1() throws IOException { + String[] data = CSVUtils.parseLine("abc,def,ghi"); + assertEquals(3, data.length); + assertEquals("abc", data[0]); + assertEquals("def", data[1]); + assertEquals("ghi", data[2]); + } + + public void testParseLine2() throws IOException { + String[] data = CSVUtils.parseLine("abc,def,ghi\n"); + assertEquals(3, data.length); + assertEquals("abc", data[0]); + assertEquals("def", data[1]); + assertEquals("ghi", data[2]); + } + + public void testParseLine3() throws IOException { + String[] data = CSVUtils.parseLine("abc,\"def,ghi\""); + assertEquals(2, data.length); + assertEquals("abc", data[0]); + assertEquals("def,ghi", data[1]); + } + + public void testParseLine4() throws IOException { + String[] data = CSVUtils.parseLine("abc,\"def\nghi\""); + assertEquals(2, data.length); + assertEquals("abc", data[0]); + assertEquals("def\nghi", data[1]); + } + + public void testParseLine5() throws IOException { + String[] data = CSVUtils.parseLine(""); + assertEquals(0, data.length); + // assertEquals("", data[0]); + } + + public void testParseLine6() throws IOException { + boolean io = false; + try { + CSVUtils.parseLine(null); + } catch (IllegalArgumentException e) { + io = true; } - + assertTrue(io); + } + + public void testParseLine7() throws IOException { + String[] res = CSVUtils.parseLine(""); + assertNotNull(res); + assertEquals(0, res.length); + } } diff --git a/solr/core/src/test/org/apache/solr/internal/csv/CharBufferTest.java b/solr/core/src/test/org/apache/solr/internal/csv/CharBufferTest.java index 10e58f46a37..9fb65ed013b 100644 --- a/solr/core/src/test/org/apache/solr/internal/csv/CharBufferTest.java +++ b/solr/core/src/test/org/apache/solr/internal/csv/CharBufferTest.java @@ -20,166 +20,166 @@ import org.apache.solr.SolrTestCaseJ4; public class CharBufferTest extends TestCase { - public void testCreate() { - CharBuffer cb = new CharBuffer(); - assertEquals(0, cb.length()); - SolrTestCaseJ4.expectThrows(IllegalArgumentException.class, () -> new CharBuffer(0)); - cb = new CharBuffer(128); - assertEquals(0, cb.length()); - } - - public void testAppendChar() { - CharBuffer cb = new CharBuffer(1); - String expected = ""; - for (char c = 'a'; c < 'z'; c++) { - cb.append(c); - expected += c; - assertEquals(expected, cb.toString()); - assertEquals(expected.length(), cb.length()); - } - } - - public void testAppendCharArray() { - CharBuffer cb = new CharBuffer(1); - char[] abcd = "abcd".toCharArray(); - String expected = ""; - for (int i=0; i<10; i++) { - cb.append(abcd); - expected += "abcd"; - assertEquals(expected, cb.toString()); - assertEquals(4*(i+1), cb.length()); - } - } - - public void testAppendString() { - CharBuffer cb = new CharBuffer(1); - String abcd = "abcd"; - String expected = ""; - for (int i=0; i<10; i++) { - cb.append(abcd); - expected += abcd; - assertEquals(expected, cb.toString()); - assertEquals(4*(i+1), cb.length()); - } - } - - public void testAppendStringBuffer() { - CharBuffer cb = new CharBuffer(1); - StringBuffer abcd = new StringBuffer("abcd"); - String expected = ""; - for (int i=0; i<10; i++) { - cb.append(abcd); - expected += "abcd"; - assertEquals(expected, cb.toString()); - assertEquals(4*(i+1), cb.length()); - } - } - - public void testAppendCharBuffer() { - CharBuffer cb = new CharBuffer(1); - CharBuffer abcd = new CharBuffer(17); - abcd.append("abcd"); - String expected = ""; - for (int i=0; i<10; i++) { - cb.append(abcd); - expected += "abcd"; - assertEquals(expected, cb.toString()); - assertEquals(4*(i+1), cb.length()); - } - } - - public void testShrink() { - String data = "123456789012345678901234567890"; - - CharBuffer cb = new CharBuffer(data.length() + 100); - assertEquals(data.length() + 100, cb.capacity()); - cb.append(data); - assertEquals(data.length() + 100, cb.capacity()); - assertEquals(data.length(), cb.length()); - cb.shrink(); - assertEquals(data.length(), cb.capacity()); - assertEquals(data.length(), cb.length()); - assertEquals(data, cb.toString()); - } - - //-- the following test cases have been adapted from the HttpComponents project - //-- written by Oleg Kalnichevski - - public void testSimpleAppend() throws Exception { - CharBuffer buffer = new CharBuffer(16); - assertEquals(16, buffer.capacity()); - assertEquals(0, buffer.length()); - char[] b1 = buffer.getCharacters(); - assertNotNull(b1); - assertEquals(0, b1.length); - assertEquals(0, buffer.length()); - - char[] tmp = new char[] { '1', '2', '3', '4'}; - buffer.append(tmp); - assertEquals(16, buffer.capacity()); - assertEquals(4, buffer.length()); - - char[] b2 = buffer.getCharacters(); - assertNotNull(b2); - assertEquals(4, b2.length); - for (int i = 0; i < tmp.length; i++) { - assertEquals(tmp[i], b2[i]); - } - assertEquals("1234", buffer.toString()); - - buffer.clear(); - assertEquals(16, buffer.capacity()); - assertEquals(0, buffer.length()); + public void testCreate() { + CharBuffer cb = new CharBuffer(); + assertEquals(0, cb.length()); + SolrTestCaseJ4.expectThrows(IllegalArgumentException.class, () -> new CharBuffer(0)); + cb = new CharBuffer(128); + assertEquals(0, cb.length()); + } + + public void testAppendChar() { + CharBuffer cb = new CharBuffer(1); + String expected = ""; + for (char c = 'a'; c < 'z'; c++) { + cb.append(c); + expected += c; + assertEquals(expected, cb.toString()); + assertEquals(expected.length(), cb.length()); } - - public void testAppendString2() throws Exception { - CharBuffer buffer = new CharBuffer(8); - buffer.append("stuff"); - buffer.append(" and more stuff"); - assertEquals("stuff and more stuff", buffer.toString()); + } + + public void testAppendCharArray() { + CharBuffer cb = new CharBuffer(1); + char[] abcd = "abcd".toCharArray(); + String expected = ""; + for (int i = 0; i < 10; i++) { + cb.append(abcd); + expected += "abcd"; + assertEquals(expected, cb.toString()); + assertEquals(4 * (i + 1), cb.length()); } - - public void testAppendNull() throws Exception { - CharBuffer buffer = new CharBuffer(8); - - buffer.append((StringBuffer)null); - assertEquals("", buffer.toString()); - - buffer.append((String)null); - assertEquals("", buffer.toString()); - - buffer.append((CharBuffer)null); - assertEquals("", buffer.toString()); - - buffer.append((char[])null); - assertEquals("", buffer.toString()); + } + + public void testAppendString() { + CharBuffer cb = new CharBuffer(1); + String abcd = "abcd"; + String expected = ""; + for (int i = 0; i < 10; i++) { + cb.append(abcd); + expected += abcd; + assertEquals(expected, cb.toString()); + assertEquals(4 * (i + 1), cb.length()); } - - public void testAppendCharArrayBuffer() throws Exception { - CharBuffer buffer1 = new CharBuffer(8); - buffer1.append(" and more stuff"); - CharBuffer buffer2 = new CharBuffer(8); - buffer2.append("stuff"); - buffer2.append(buffer1); - assertEquals("stuff and more stuff", buffer2.toString()); + } + + public void testAppendStringBuffer() { + CharBuffer cb = new CharBuffer(1); + StringBuffer abcd = new StringBuffer("abcd"); + String expected = ""; + for (int i = 0; i < 10; i++) { + cb.append(abcd); + expected += "abcd"; + assertEquals(expected, cb.toString()); + assertEquals(4 * (i + 1), cb.length()); } - - public void testAppendSingleChar() throws Exception { - CharBuffer buffer = new CharBuffer(4); - buffer.append('1'); - buffer.append('2'); - buffer.append('3'); - buffer.append('4'); - buffer.append('5'); - buffer.append('6'); - assertEquals("123456", buffer.toString()); + } + + public void testAppendCharBuffer() { + CharBuffer cb = new CharBuffer(1); + CharBuffer abcd = new CharBuffer(17); + abcd.append("abcd"); + String expected = ""; + for (int i = 0; i < 10; i++) { + cb.append(abcd); + expected += "abcd"; + assertEquals(expected, cb.toString()); + assertEquals(4 * (i + 1), cb.length()); } - - public void testProvideCapacity() throws Exception { - CharBuffer buffer = new CharBuffer(4); - buffer.provideCapacity(2); - assertEquals(4, buffer.capacity()); - buffer.provideCapacity(8); - assertTrue(buffer.capacity() >= 8); + } + + public void testShrink() { + String data = "123456789012345678901234567890"; + + CharBuffer cb = new CharBuffer(data.length() + 100); + assertEquals(data.length() + 100, cb.capacity()); + cb.append(data); + assertEquals(data.length() + 100, cb.capacity()); + assertEquals(data.length(), cb.length()); + cb.shrink(); + assertEquals(data.length(), cb.capacity()); + assertEquals(data.length(), cb.length()); + assertEquals(data, cb.toString()); + } + + // -- the following test cases have been adapted from the HttpComponents project + // -- written by Oleg Kalnichevski + + public void testSimpleAppend() throws Exception { + CharBuffer buffer = new CharBuffer(16); + assertEquals(16, buffer.capacity()); + assertEquals(0, buffer.length()); + char[] b1 = buffer.getCharacters(); + assertNotNull(b1); + assertEquals(0, b1.length); + assertEquals(0, buffer.length()); + + char[] tmp = new char[] {'1', '2', '3', '4'}; + buffer.append(tmp); + assertEquals(16, buffer.capacity()); + assertEquals(4, buffer.length()); + + char[] b2 = buffer.getCharacters(); + assertNotNull(b2); + assertEquals(4, b2.length); + for (int i = 0; i < tmp.length; i++) { + assertEquals(tmp[i], b2[i]); } + assertEquals("1234", buffer.toString()); + + buffer.clear(); + assertEquals(16, buffer.capacity()); + assertEquals(0, buffer.length()); + } + + public void testAppendString2() throws Exception { + CharBuffer buffer = new CharBuffer(8); + buffer.append("stuff"); + buffer.append(" and more stuff"); + assertEquals("stuff and more stuff", buffer.toString()); + } + + public void testAppendNull() throws Exception { + CharBuffer buffer = new CharBuffer(8); + + buffer.append((StringBuffer) null); + assertEquals("", buffer.toString()); + + buffer.append((String) null); + assertEquals("", buffer.toString()); + + buffer.append((CharBuffer) null); + assertEquals("", buffer.toString()); + + buffer.append((char[]) null); + assertEquals("", buffer.toString()); + } + + public void testAppendCharArrayBuffer() throws Exception { + CharBuffer buffer1 = new CharBuffer(8); + buffer1.append(" and more stuff"); + CharBuffer buffer2 = new CharBuffer(8); + buffer2.append("stuff"); + buffer2.append(buffer1); + assertEquals("stuff and more stuff", buffer2.toString()); + } + + public void testAppendSingleChar() throws Exception { + CharBuffer buffer = new CharBuffer(4); + buffer.append('1'); + buffer.append('2'); + buffer.append('3'); + buffer.append('4'); + buffer.append('5'); + buffer.append('6'); + assertEquals("123456", buffer.toString()); + } + + public void testProvideCapacity() throws Exception { + CharBuffer buffer = new CharBuffer(4); + buffer.provideCapacity(2); + assertEquals(4, buffer.capacity()); + buffer.provideCapacity(8); + assertTrue(buffer.capacity() >= 8); + } } diff --git a/solr/core/src/test/org/apache/solr/internal/csv/ExtendedBufferedReaderTest.java b/solr/core/src/test/org/apache/solr/internal/csv/ExtendedBufferedReaderTest.java index 804c36b94a8..07d78c5f48d 100644 --- a/solr/core/src/test/org/apache/solr/internal/csv/ExtendedBufferedReaderTest.java +++ b/solr/core/src/test/org/apache/solr/internal/csv/ExtendedBufferedReaderTest.java @@ -18,26 +18,22 @@ import java.io.StringReader; import java.util.Arrays; - import junit.framework.TestCase; -/** - * ExtendedBufferedReaderTest - * - */ +/** ExtendedBufferedReaderTest */ public class ExtendedBufferedReaderTest extends TestCase { // ====================================================== // the test cases // ====================================================== - + public void testConstructors() { ExtendedBufferedReader br = new ExtendedBufferedReader(new StringReader("")); - br = new ExtendedBufferedReader(new StringReader(""), 10); + br = new ExtendedBufferedReader(new StringReader(""), 10); } - + public void testReadLookahead1() throws Exception { - + assertEquals(ExtendedBufferedReader.END_OF_STREAM, getEBR("").read()); ExtendedBufferedReader br = getEBR("1\n2\r3\n"); assertEquals('1', br.lookAhead()); @@ -48,29 +44,29 @@ public void testReadLookahead1() throws Exception { assertEquals(0, br.getLineNumber()); assertEquals('\n', br.lookAhead()); assertEquals(0, br.getLineNumber()); - assertEquals('1', br.readAgain()); + assertEquals('1', br.readAgain()); assertEquals('\n', br.read()); assertEquals(1, br.getLineNumber()); assertEquals('\n', br.readAgain()); assertEquals(1, br.getLineNumber()); - + assertEquals('2', br.lookAhead()); assertEquals(1, br.getLineNumber()); assertEquals('\n', br.readAgain()); assertEquals(1, br.getLineNumber()); assertEquals('2', br.read()); assertEquals('2', br.readAgain()); - + assertEquals('\r', br.lookAhead()); assertEquals('2', br.readAgain()); assertEquals('\r', br.read()); assertEquals('\r', br.readAgain()); - + assertEquals('3', br.lookAhead()); assertEquals('\r', br.readAgain()); assertEquals('3', br.read()); assertEquals('3', br.readAgain()); - + assertEquals('\n', br.lookAhead()); assertEquals(1, br.getLineNumber()); assertEquals('3', br.readAgain()); @@ -78,25 +74,23 @@ public void testReadLookahead1() throws Exception { assertEquals(2, br.getLineNumber()); assertEquals('\n', br.readAgain()); assertEquals(2, br.getLineNumber()); - + assertEquals(ExtendedBufferedReader.END_OF_STREAM, br.lookAhead()); assertEquals('\n', br.readAgain()); assertEquals(ExtendedBufferedReader.END_OF_STREAM, br.read()); assertEquals(ExtendedBufferedReader.END_OF_STREAM, br.readAgain()); assertEquals(ExtendedBufferedReader.END_OF_STREAM, br.read()); assertEquals(ExtendedBufferedReader.END_OF_STREAM, br.lookAhead()); - } - public void testReadLookahead2() throws Exception { char[] ref = new char[5]; - char[] res = new char[5]; - + char[] res = new char[5]; + ExtendedBufferedReader br = getEBR(""); assertEquals(0, br.read(res, 0, 0)); - assertTrue(Arrays.equals(res, ref)); - + assertTrue(Arrays.equals(res, ref)); + br = getEBR("abcdefg"); ref[0] = 'a'; ref[1] = 'b'; @@ -104,27 +98,26 @@ public void testReadLookahead2() throws Exception { assertEquals(3, br.read(res, 0, 3)); assertTrue(Arrays.equals(res, ref)); assertEquals('c', br.readAgain()); - + assertEquals('d', br.lookAhead()); ref[4] = 'd'; assertEquals(1, br.read(res, 4, 1)); assertTrue(Arrays.equals(res, ref)); assertEquals('d', br.readAgain()); - } - + public void testMarkSupported() { assertFalse(getEBR("foo").markSupported()); } - + public void testReadLine() throws Exception { ExtendedBufferedReader br = getEBR(""); assertTrue(br.readLine() == null); - + br = getEBR("\n"); assertTrue(br.readLine().equals("")); assertTrue(br.readLine() == null); - + br = getEBR("foo\n\nhello"); assertEquals(0, br.getLineNumber()); assertTrue(br.readLine().equals("foo")); @@ -135,7 +128,7 @@ public void testReadLine() throws Exception { assertEquals(3, br.getLineNumber()); assertTrue(br.readLine() == null); assertEquals(3, br.getLineNumber()); - + br = getEBR("foo\n\nhello"); assertEquals('f', br.read()); assertEquals('o', br.lookAhead()); @@ -148,8 +141,7 @@ public void testReadLine() throws Exception { assertTrue(br.readLine().equals("hello")); assertTrue(br.readLine() == null); assertEquals(3, br.getLineNumber()); - - + br = getEBR("foo\rbaar\r\nfoo"); assertTrue(br.readLine().equals("foo")); assertEquals('b', br.lookAhead()); @@ -158,16 +150,16 @@ public void testReadLine() throws Exception { assertTrue(br.readLine().equals("foo")); assertTrue(br.readLine() == null); } - + public void testSkip0() throws Exception { - + ExtendedBufferedReader br = getEBR(""); assertEquals(0, br.skip(0)); assertEquals(0, br.skip(1)); - + br = getEBR(""); assertEquals(0, br.skip(1)); - + br = getEBR("abcdefg"); assertEquals(0, br.skip(0)); assertEquals('a', br.lookAhead()); @@ -180,13 +172,13 @@ public void testSkip0() throws Exception { assertEquals('f', br.lookAhead()); assertEquals(2, br.skip(5)); assertTrue(br.readLine() == null); - + br = getEBR("12345"); assertEquals(5, br.skip(5)); - assertTrue (br.lookAhead() == ExtendedBufferedReader.END_OF_STREAM); + assertTrue(br.lookAhead() == ExtendedBufferedReader.END_OF_STREAM); } - - public void testSkipUntil() throws Exception { + + public void testSkipUntil() throws Exception { ExtendedBufferedReader br = getEBR(""); assertEquals(0, br.skipUntil(';')); br = getEBR("ABCDEF,GHL,,MN"); @@ -199,7 +191,7 @@ public void testSkipUntil() throws Exception { br.skip(1); assertEquals(2, br.skipUntil(',')); } - + public void testReadUntil() throws Exception { ExtendedBufferedReader br = getEBR(""); assertTrue(br.readUntil(';').equals("")); @@ -213,7 +205,7 @@ public void testReadUntil() throws Exception { br.skip(1); assertTrue(br.readUntil(',').equals("MN")); } - + private ExtendedBufferedReader getEBR(String s) { return new ExtendedBufferedReader(new StringReader(s)); } diff --git a/solr/core/src/test/org/apache/solr/legacy/TestLegacyField.java b/solr/core/src/test/org/apache/solr/legacy/TestLegacyField.java index fec08ee935a..69348fcee6e 100644 --- a/solr/core/src/test/org/apache/solr/legacy/TestLegacyField.java +++ b/solr/core/src/test/org/apache/solr/legacy/TestLegacyField.java @@ -17,7 +17,6 @@ package org.apache.solr.legacy; import java.io.StringReader; - import org.apache.lucene.analysis.CannedTokenStream; import org.apache.lucene.analysis.Token; import org.apache.lucene.document.Field; @@ -25,12 +24,13 @@ import org.apache.solr.SolrTestCase; public class TestLegacyField extends SolrTestCase { - + public void testLegacyDoubleField() throws Exception { - Field fields[] = new Field[] { - new LegacyDoubleField("foo", 5d, Field.Store.NO), - new LegacyDoubleField("foo", 5d, Field.Store.YES) - }; + Field fields[] = + new Field[] { + new LegacyDoubleField("foo", 5d, Field.Store.NO), + new LegacyDoubleField("foo", 5d, Field.Store.YES) + }; for (Field field : fields) { trySetByteValue(field); @@ -44,16 +44,17 @@ public void testLegacyDoubleField() throws Exception { trySetShortValue(field); trySetStringValue(field); trySetTokenStreamValue(field); - + assertEquals(6d, field.numericValue().doubleValue(), 0.0d); } } - + public void testLegacyFloatField() throws Exception { - Field fields[] = new Field[] { - new LegacyFloatField("foo", 5f, Field.Store.NO), - new LegacyFloatField("foo", 5f, Field.Store.YES) - }; + Field fields[] = + new Field[] { + new LegacyFloatField("foo", 5f, Field.Store.NO), + new LegacyFloatField("foo", 5f, Field.Store.YES) + }; for (Field field : fields) { trySetByteValue(field); @@ -67,16 +68,17 @@ public void testLegacyFloatField() throws Exception { trySetShortValue(field); trySetStringValue(field); trySetTokenStreamValue(field); - + assertEquals(6f, field.numericValue().floatValue(), 0.0f); } } - + public void testLegacyIntField() throws Exception { - Field fields[] = new Field[] { - new LegacyIntField("foo", 5, Field.Store.NO), - new LegacyIntField("foo", 5, Field.Store.YES) - }; + Field fields[] = + new Field[] { + new LegacyIntField("foo", 5, Field.Store.NO), + new LegacyIntField("foo", 5, Field.Store.YES) + }; for (Field field : fields) { trySetByteValue(field); @@ -90,16 +92,17 @@ public void testLegacyIntField() throws Exception { trySetShortValue(field); trySetStringValue(field); trySetTokenStreamValue(field); - + assertEquals(6, field.numericValue().intValue()); } } - + public void testLegacyLongField() throws Exception { - Field fields[] = new Field[] { - new LegacyLongField("foo", 5L, Field.Store.NO), - new LegacyLongField("foo", 5L, Field.Store.YES) - }; + Field fields[] = + new Field[] { + new LegacyLongField("foo", 5L, Field.Store.NO), + new LegacyLongField("foo", 5L, Field.Store.YES) + }; for (Field field : fields) { trySetByteValue(field); @@ -113,74 +116,96 @@ public void testLegacyLongField() throws Exception { trySetShortValue(field); trySetStringValue(field); trySetTokenStreamValue(field); - + assertEquals(6L, field.numericValue().longValue()); } } - + private void trySetByteValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setByteValue((byte) 10); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setByteValue((byte) 10); + }); } private void trySetBytesValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setBytesValue(new byte[] { 5, 5 }); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setBytesValue(new byte[] {5, 5}); + }); } - + private void trySetBytesRefValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setBytesValue(new BytesRef("bogus")); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setBytesValue(new BytesRef("bogus")); + }); } - + private void trySetDoubleValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setDoubleValue(Double.MAX_VALUE); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setDoubleValue(Double.MAX_VALUE); + }); } - + private void trySetIntValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setIntValue(Integer.MAX_VALUE); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setIntValue(Integer.MAX_VALUE); + }); } - + private void trySetLongValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setLongValue(Long.MAX_VALUE); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setLongValue(Long.MAX_VALUE); + }); } - + private void trySetFloatValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setFloatValue(Float.MAX_VALUE); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setFloatValue(Float.MAX_VALUE); + }); } - + private void trySetReaderValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setReaderValue(new StringReader("BOO!")); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setReaderValue(new StringReader("BOO!")); + }); } - + private void trySetShortValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setShortValue(Short.MAX_VALUE); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setShortValue(Short.MAX_VALUE); + }); } - + private void trySetStringValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setStringValue("BOO!"); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setStringValue("BOO!"); + }); } - + private void trySetTokenStreamValue(Field f) { - expectThrows(IllegalArgumentException.class, () -> { - f.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3))); - }); + expectThrows( + IllegalArgumentException.class, + () -> { + f.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3))); + }); } } diff --git a/solr/core/src/test/org/apache/solr/legacy/TestLegacyFieldReuse.java b/solr/core/src/test/org/apache/solr/legacy/TestLegacyFieldReuse.java index af8dc7c99ac..746a6aa4dda 100644 --- a/solr/core/src/test/org/apache/solr/legacy/TestLegacyFieldReuse.java +++ b/solr/core/src/test/org/apache/solr/legacy/TestLegacyFieldReuse.java @@ -16,9 +16,7 @@ */ package org.apache.solr.legacy; - import java.io.IOException; - import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.CannedTokenStream; import org.apache.lucene.analysis.Token; @@ -28,14 +26,16 @@ /** test tokenstream reuse by DefaultIndexingChain */ public class TestLegacyFieldReuse extends BaseTokenStreamTestCase { - + public void testNumericReuse() throws IOException { LegacyIntField legacyIntField = new LegacyIntField("foo", 5, Field.Store.NO); - + // passing null TokenStream ts = legacyIntField.tokenStream(null, null); assertTrue(ts instanceof LegacyNumericTokenStream); - assertEquals(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, ((LegacyNumericTokenStream)ts).getPrecisionStep()); + assertEquals( + LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, + ((LegacyNumericTokenStream) ts).getPrecisionStep()); assertNumericContents(5, ts); // now reuse previous stream @@ -43,14 +43,14 @@ public void testNumericReuse() throws IOException { TokenStream ts2 = legacyIntField.tokenStream(null, ts); assertSame(ts, ts2); assertNumericContents(20, ts); - + // pass a bogus stream and ensure it's still ok legacyIntField = new LegacyIntField("foo", 2343, Field.Store.NO); TokenStream bogus = new CannedTokenStream(new Token("bogus", 0, 5)); ts = legacyIntField.tokenStream(null, bogus); assertNotSame(bogus, ts); assertNumericContents(2343, ts); - + // pass another bogus stream (numeric, but different precision step!) legacyIntField = new LegacyIntField("foo", 42, Field.Store.NO); assert 3 != LegacyNumericUtils.PRECISION_STEP_DEFAULT; @@ -59,7 +59,7 @@ public void testNumericReuse() throws IOException { assertNotSame(bogus, ts); assertNumericContents(42, ts); } - + private void assertNumericContents(int value, TokenStream ts) throws IOException { assertTrue(ts instanceof LegacyNumericTokenStream); LegacyNumericTermAttribute numericAtt = ts.getAttribute(LegacyNumericTermAttribute.class); diff --git a/solr/core/src/test/org/apache/solr/legacy/TestLegacyNumericUtils.java b/solr/core/src/test/org/apache/solr/legacy/TestLegacyNumericUtils.java index aa350e9bbac..d10146326ef 100644 --- a/solr/core/src/test/org/apache/solr/legacy/TestLegacyNumericUtils.java +++ b/solr/core/src/test/org/apache/solr/legacy/TestLegacyNumericUtils.java @@ -16,12 +16,10 @@ */ package org.apache.solr.legacy; - import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.Random; - import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.LongBitSet; @@ -34,15 +32,20 @@ public void testLongConversionAndOrdering() throws Exception { // generate a series of encoded longs, each numerical one bigger than the one before BytesRefBuilder last = new BytesRefBuilder(); BytesRefBuilder act = new BytesRefBuilder(); - for (long l=-100000L; l<100000L; l++) { + for (long l = -100000L; l < 100000L; l++) { LegacyNumericUtils.longToPrefixCoded(l, 0, act); - if (last!=null) { + if (last != null) { // test if smaller - assertTrue("actual bigger than last (BytesRef)", last.get().compareTo(act.get()) < 0 ); - assertTrue("actual bigger than last (as String)", last.get().utf8ToString().compareTo(act.get().utf8ToString()) < 0 ); + assertTrue("actual bigger than last (BytesRef)", last.get().compareTo(act.get()) < 0); + assertTrue( + "actual bigger than last (as String)", + last.get().utf8ToString().compareTo(act.get().utf8ToString()) < 0); } // test is back and forward conversion works - assertEquals("forward and back conversion should generate same long", l, LegacyNumericUtils.prefixCodedToLong(act.get())); + assertEquals( + "forward and back conversion should generate same long", + l, + LegacyNumericUtils.prefixCodedToLong(act.get())); // next step last.copyBytes(act); } @@ -52,112 +55,183 @@ public void testIntConversionAndOrdering() throws Exception { // generate a series of encoded ints, each numerical one bigger than the one before BytesRefBuilder act = new BytesRefBuilder(); BytesRefBuilder last = new BytesRefBuilder(); - for (int i=-100000; i<100000; i++) { + for (int i = -100000; i < 100000; i++) { LegacyNumericUtils.intToPrefixCoded(i, 0, act); - if (last!=null) { + if (last != null) { // test if smaller - assertTrue("actual bigger than last (BytesRef)", last.get().compareTo(act.get()) < 0 ); - assertTrue("actual bigger than last (as String)", last.get().utf8ToString().compareTo(act.get().utf8ToString()) < 0 ); + assertTrue("actual bigger than last (BytesRef)", last.get().compareTo(act.get()) < 0); + assertTrue( + "actual bigger than last (as String)", + last.get().utf8ToString().compareTo(act.get().utf8ToString()) < 0); } // test is back and forward conversion works - assertEquals("forward and back conversion should generate same int", i, LegacyNumericUtils.prefixCodedToInt(act.get())); + assertEquals( + "forward and back conversion should generate same int", + i, + LegacyNumericUtils.prefixCodedToInt(act.get())); // next step last.copyBytes(act.get()); } } public void testLongSpecialValues() throws Exception { - long[] vals=new long[]{ - Long.MIN_VALUE, Long.MIN_VALUE+1, Long.MIN_VALUE+2, -5003400000000L, - -4000L, -3000L, -2000L, -1000L, -1L, 0L, 1L, 10L, 300L, 50006789999999999L, Long.MAX_VALUE-2, Long.MAX_VALUE-1, Long.MAX_VALUE - }; + long[] vals = + new long[] { + Long.MIN_VALUE, + Long.MIN_VALUE + 1, + Long.MIN_VALUE + 2, + -5003400000000L, + -4000L, + -3000L, + -2000L, + -1000L, + -1L, + 0L, + 1L, + 10L, + 300L, + 50006789999999999L, + Long.MAX_VALUE - 2, + Long.MAX_VALUE - 1, + Long.MAX_VALUE + }; BytesRefBuilder[] prefixVals = new BytesRefBuilder[vals.length]; - - for (int i=0; i { - LegacyNumericUtils.prefixCodedToInt(prefixVals[index].get()); - }); + expectThrows( + NumberFormatException.class, + () -> { + LegacyNumericUtils.prefixCodedToInt(prefixVals[index].get()); + }); } - + // check sort order (prefixVals should be ascending) - for (int i=1; i { - LegacyNumericUtils.prefixCodedToLong(prefixVals[index].get()); - }); + expectThrows( + NumberFormatException.class, + () -> { + LegacyNumericUtils.prefixCodedToLong(prefixVals[index].get()); + }); } - + // check sort order (prefixVals should be ascending) - for (int i=1; i plusInf); + assertTrue( + "Double not sorted correctly: " + + nan + + ", long repr: " + + sortable + + ", positive inf.: " + + plusInf, + sortable > plusInf); } } - + public void testFloats() throws Exception { - float[] vals=new float[]{ - Float.NEGATIVE_INFINITY, -2.3E25f, -1.0E15f, -1.0f, -1.0E-1f, -1.0E-2f, -0.0f, - +0.0f, 1.0E-2f, 1.0E-1f, 1.0f, 1.0E15f, 2.3E25f, Float.POSITIVE_INFINITY, Float.NaN - }; - int[] intVals=new int[vals.length]; - + float[] vals = + new float[] { + Float.NEGATIVE_INFINITY, + -2.3E25f, + -1.0E15f, + -1.0f, + -1.0E-1f, + -1.0E-2f, + -0.0f, + +0.0f, + 1.0E-2f, + 1.0E-1f, + 1.0f, + 1.0E15f, + 2.3E25f, + Float.POSITIVE_INFINITY, + Float.NaN + }; + int[] intVals = new int[vals.length]; + // check forward and back conversion - for (int i=0; i plusInf); + assertTrue( + "Float not sorted correctly: " + + nan + + ", int repr: " + + sortable + + ", positive inf.: " + + plusInf, + sortable > plusInf); } } - // INFO: Tests for trieCodeLong()/trieCodeInt() not needed because implicitely tested by range filter tests - + // INFO: Tests for trieCodeLong()/trieCodeInt() not needed because implicitely tested by range + // filter tests + /** Note: The neededBounds Iterable must be unsigned (easier understanding what's happening) */ - private void assertLongRangeSplit(final long lower, final long upper, int precisionStep, - final boolean useBitSet, final Iterable expectedBounds, final Iterable expectedShifts - ) { + private void assertLongRangeSplit( + final long lower, + final long upper, + int precisionStep, + final boolean useBitSet, + final Iterable expectedBounds, + final Iterable expectedShifts) { // Cannot use FixedBitSet since the range could be long: - final LongBitSet bits=useBitSet ? new LongBitSet(upper-lower+1) : null; + final LongBitSet bits = useBitSet ? new LongBitSet(upper - lower + 1) : null; final Iterator neededBounds = (expectedBounds == null) ? null : expectedBounds.iterator(); - final Iterator neededShifts = (expectedShifts == null) ? null : expectedShifts.iterator(); - - LegacyNumericUtils.splitLongRange(new LegacyNumericUtils.LongRangeBuilder() { - @Override - public void addRange(long min, long max, int shift) { - assertTrue("min, max should be inside bounds", min >= lower && min <= upper && max >= lower && max <= upper); - if (useBitSet) for (long l = min; l <= max; l++) { - assertFalse("ranges should not overlap", bits.getAndSet(l - lower)); - // extra exit condition to prevent overflow on MAX_VALUE - if (l == max) break; - } - if (neededBounds == null || neededShifts == null) - return; - // make unsigned longs for easier display and understanding - min ^= 0x8000000000000000L; - max ^= 0x8000000000000000L; - //System.out.println("0x"+Long.toHexString(min>>>shift)+"L,0x"+Long.toHexString(max>>>shift)+"L)/*shift="+shift+"*/,"); - assertEquals("shift", neededShifts.next().intValue(), shift); - assertEquals("inner min bound", neededBounds.next().longValue(), min >>> shift); - assertEquals("inner max bound", neededBounds.next().longValue(), max >>> shift); - } - }, precisionStep, lower, upper); - + final Iterator neededShifts = + (expectedShifts == null) ? null : expectedShifts.iterator(); + + LegacyNumericUtils.splitLongRange( + new LegacyNumericUtils.LongRangeBuilder() { + @Override + public void addRange(long min, long max, int shift) { + assertTrue( + "min, max should be inside bounds", + min >= lower && min <= upper && max >= lower && max <= upper); + if (useBitSet) + for (long l = min; l <= max; l++) { + assertFalse("ranges should not overlap", bits.getAndSet(l - lower)); + // extra exit condition to prevent overflow on MAX_VALUE + if (l == max) break; + } + if (neededBounds == null || neededShifts == null) return; + // make unsigned longs for easier display and understanding + min ^= 0x8000000000000000L; + max ^= 0x8000000000000000L; + // System.out.println("0x"+Long.toHexString(min>>>shift)+"L,0x"+Long.toHexString(max>>>shift)+"L)/*shift="+shift+"*/,"); + assertEquals("shift", neededShifts.next().intValue(), shift); + assertEquals("inner min bound", neededBounds.next().longValue(), min >>> shift); + assertEquals("inner max bound", neededBounds.next().longValue(), max >>> shift); + } + }, + precisionStep, + lower, + upper); + if (useBitSet) { // after flipping all bits in the range, the cardinality should be zero - bits.flip(0,upper-lower+1); + bits.flip(0, upper - lower + 1); assertEquals("The sub-range concenated should match the whole range", 0, bits.cardinality()); } } - + /** LUCENE-2541: LegacyNumericRangeQuery errors with endpoints near long min and max values */ public void testLongExtremeValues() throws Exception { // upper end extremes - assertLongRangeSplit(Long.MAX_VALUE, Long.MAX_VALUE, 1, true, Arrays.asList( - 0xffffffffffffffffL,0xffffffffffffffffL - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MAX_VALUE, Long.MAX_VALUE, 2, true, Arrays.asList( - 0xffffffffffffffffL,0xffffffffffffffffL - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MAX_VALUE, Long.MAX_VALUE, 4, true, Arrays.asList( - 0xffffffffffffffffL,0xffffffffffffffffL - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MAX_VALUE, Long.MAX_VALUE, 6, true, Arrays.asList( - 0xffffffffffffffffL,0xffffffffffffffffL - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MAX_VALUE, Long.MAX_VALUE, 8, true, Arrays.asList( - 0xffffffffffffffffL,0xffffffffffffffffL - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MAX_VALUE, Long.MAX_VALUE, 64, true, Arrays.asList( - 0xffffffffffffffffL,0xffffffffffffffffL - ), Arrays.asList( - 0 - )); - - assertLongRangeSplit(Long.MAX_VALUE-0xfL, Long.MAX_VALUE, 4, true, Arrays.asList( - 0xfffffffffffffffL,0xfffffffffffffffL - ), Arrays.asList( - 4 - )); - assertLongRangeSplit(Long.MAX_VALUE-0x10L, Long.MAX_VALUE, 4, true, Arrays.asList( - 0xffffffffffffffefL,0xffffffffffffffefL, - 0xfffffffffffffffL,0xfffffffffffffffL - ), Arrays.asList( - 0, 4 - )); + assertLongRangeSplit( + Long.MAX_VALUE, + Long.MAX_VALUE, + 1, + true, + Arrays.asList(0xffffffffffffffffL, 0xffffffffffffffffL), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MAX_VALUE, + Long.MAX_VALUE, + 2, + true, + Arrays.asList(0xffffffffffffffffL, 0xffffffffffffffffL), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MAX_VALUE, + Long.MAX_VALUE, + 4, + true, + Arrays.asList(0xffffffffffffffffL, 0xffffffffffffffffL), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MAX_VALUE, + Long.MAX_VALUE, + 6, + true, + Arrays.asList(0xffffffffffffffffL, 0xffffffffffffffffL), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MAX_VALUE, + Long.MAX_VALUE, + 8, + true, + Arrays.asList(0xffffffffffffffffL, 0xffffffffffffffffL), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MAX_VALUE, + Long.MAX_VALUE, + 64, + true, + Arrays.asList(0xffffffffffffffffL, 0xffffffffffffffffL), + Arrays.asList(0)); + + assertLongRangeSplit( + Long.MAX_VALUE - 0xfL, + Long.MAX_VALUE, + 4, + true, + Arrays.asList(0xfffffffffffffffL, 0xfffffffffffffffL), + Arrays.asList(4)); + assertLongRangeSplit( + Long.MAX_VALUE - 0x10L, + Long.MAX_VALUE, + 4, + true, + Arrays.asList( + 0xffffffffffffffefL, 0xffffffffffffffefL, + 0xfffffffffffffffL, 0xfffffffffffffffL), + Arrays.asList(0, 4)); // lower end extremes - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE, 1, true, Arrays.asList( - 0x0000000000000000L,0x0000000000000000L - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE, 2, true, Arrays.asList( - 0x0000000000000000L,0x0000000000000000L - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE, 4, true, Arrays.asList( - 0x0000000000000000L,0x0000000000000000L - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE, 6, true, Arrays.asList( - 0x0000000000000000L,0x0000000000000000L - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE, 8, true, Arrays.asList( - 0x0000000000000000L,0x0000000000000000L - ), Arrays.asList( - 0 - )); - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE, 64, true, Arrays.asList( - 0x0000000000000000L,0x0000000000000000L - ), Arrays.asList( - 0 - )); - - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE+0xfL, 4, true, Arrays.asList( - 0x000000000000000L,0x000000000000000L - ), Arrays.asList( - 4 - )); - assertLongRangeSplit(Long.MIN_VALUE, Long.MIN_VALUE+0x10L, 4, true, Arrays.asList( - 0x0000000000000010L,0x0000000000000010L, - 0x000000000000000L,0x000000000000000L - ), Arrays.asList( - 0, 4 - )); + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE, + 1, + true, + Arrays.asList(0x0000000000000000L, 0x0000000000000000L), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE, + 2, + true, + Arrays.asList(0x0000000000000000L, 0x0000000000000000L), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE, + 4, + true, + Arrays.asList(0x0000000000000000L, 0x0000000000000000L), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE, + 6, + true, + Arrays.asList(0x0000000000000000L, 0x0000000000000000L), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE, + 8, + true, + Arrays.asList(0x0000000000000000L, 0x0000000000000000L), + Arrays.asList(0)); + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE, + 64, + true, + Arrays.asList(0x0000000000000000L, 0x0000000000000000L), + Arrays.asList(0)); + + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE + 0xfL, + 4, + true, + Arrays.asList(0x000000000000000L, 0x000000000000000L), + Arrays.asList(4)); + assertLongRangeSplit( + Long.MIN_VALUE, + Long.MIN_VALUE + 0x10L, + 4, + true, + Arrays.asList( + 0x0000000000000010L, 0x0000000000000010L, + 0x000000000000000L, 0x000000000000000L), + Arrays.asList(0, 4)); } - + public void testRandomSplit() throws Exception { long num = (long) atLeast(10); - for (long i=0; i < num; i++) { + for (long i = 0; i < num; i++) { executeOneRandomSplit(random()); } } - + private void executeOneRandomSplit(final Random random) throws Exception { long lower = randomLong(random); - long len = random.nextInt(16384*1024); // not too large bitsets, else OOME! + long len = random.nextInt(16384 * 1024); // not too large bitsets, else OOME! while (lower + len < lower) { // overflow lower >>= 1; } assertLongRangeSplit(lower, lower + len, random.nextInt(64) + 1, true, null, null); } - + private long randomLong(final Random random) { long val; - switch(random.nextInt(4)) { + switch (random.nextInt(4)) { case 0: - val = 1L << (random.nextInt(63)); // patterns like 0x000000100000 (-1 yields patterns like 0x0000fff) + val = + 1L + << (random.nextInt( + 63)); // patterns like 0x000000100000 (-1 yields patterns like 0x0000fff) break; case 1: val = -1L << (random.nextInt(63)); // patterns like 0xfffff00000 @@ -375,196 +526,196 @@ private long randomLong(final Random random) { val = random.nextLong(); } - val += random.nextInt(5)-2; + val += random.nextInt(5) - 2; if (random.nextBoolean()) { - if (random.nextBoolean()) val += random.nextInt(100)-50; + if (random.nextBoolean()) val += random.nextInt(100) - 50; if (random.nextBoolean()) val = ~val; - if (random.nextBoolean()) val = val<<1; - if (random.nextBoolean()) val = val>>>1; + if (random.nextBoolean()) val = val << 1; + if (random.nextBoolean()) val = val >>> 1; } return val; } - + public void testSplitLongRange() throws Exception { // a hard-coded "standard" range - assertLongRangeSplit(-5000L, 9500L, 4, true, Arrays.asList( - 0x7fffffffffffec78L,0x7fffffffffffec7fL, - 0x8000000000002510L,0x800000000000251cL, - 0x7fffffffffffec8L, 0x7fffffffffffecfL, - 0x800000000000250L, 0x800000000000250L, - 0x7fffffffffffedL, 0x7fffffffffffefL, - 0x80000000000020L, 0x80000000000024L, - 0x7ffffffffffffL, 0x8000000000001L - ), Arrays.asList( - 0, 0, - 4, 4, - 8, 8, - 12 - )); - + assertLongRangeSplit( + -5000L, + 9500L, + 4, + true, + Arrays.asList( + 0x7fffffffffffec78L, 0x7fffffffffffec7fL, + 0x8000000000002510L, 0x800000000000251cL, + 0x7fffffffffffec8L, 0x7fffffffffffecfL, + 0x800000000000250L, 0x800000000000250L, + 0x7fffffffffffedL, 0x7fffffffffffefL, + 0x80000000000020L, 0x80000000000024L, + 0x7ffffffffffffL, 0x8000000000001L), + Arrays.asList(0, 0, 4, 4, 8, 8, 12)); + // the same with no range splitting - assertLongRangeSplit(-5000L, 9500L, 64, true, Arrays.asList( - 0x7fffffffffffec78L,0x800000000000251cL - ), Arrays.asList( - 0 - )); - + assertLongRangeSplit( + -5000L, + 9500L, + 64, + true, + Arrays.asList(0x7fffffffffffec78L, 0x800000000000251cL), + Arrays.asList(0)); + // this tests optimized range splitting, if one of the inner bounds // is also the bound of the next lower precision, it should be used completely - assertLongRangeSplit(0L, 1024L+63L, 4, true, Arrays.asList( - 0x800000000000040L, 0x800000000000043L, - 0x80000000000000L, 0x80000000000003L - ), Arrays.asList( - 4, 8 - )); - - // the full long range should only consist of a lowest precision range; no bitset testing here, as too much memory needed :-) - assertLongRangeSplit(Long.MIN_VALUE, Long.MAX_VALUE, 8, false, Arrays.asList( - 0x00L,0xffL - ), Arrays.asList( - 56 - )); + assertLongRangeSplit( + 0L, + 1024L + 63L, + 4, + true, + Arrays.asList( + 0x800000000000040L, 0x800000000000043L, + 0x80000000000000L, 0x80000000000003L), + Arrays.asList(4, 8)); + + // the full long range should only consist of a lowest precision range; no bitset testing here, + // as too much memory needed :-) + assertLongRangeSplit( + Long.MIN_VALUE, Long.MAX_VALUE, 8, false, Arrays.asList(0x00L, 0xffL), Arrays.asList(56)); // the same with precisionStep=4 - assertLongRangeSplit(Long.MIN_VALUE, Long.MAX_VALUE, 4, false, Arrays.asList( - 0x0L,0xfL - ), Arrays.asList( - 60 - )); + assertLongRangeSplit( + Long.MIN_VALUE, Long.MAX_VALUE, 4, false, Arrays.asList(0x0L, 0xfL), Arrays.asList(60)); // the same with precisionStep=2 - assertLongRangeSplit(Long.MIN_VALUE, Long.MAX_VALUE, 2, false, Arrays.asList( - 0x0L,0x3L - ), Arrays.asList( - 62 - )); + assertLongRangeSplit( + Long.MIN_VALUE, Long.MAX_VALUE, 2, false, Arrays.asList(0x0L, 0x3L), Arrays.asList(62)); // the same with precisionStep=1 - assertLongRangeSplit(Long.MIN_VALUE, Long.MAX_VALUE, 1, false, Arrays.asList( - 0x0L,0x1L - ), Arrays.asList( - 63 - )); + assertLongRangeSplit( + Long.MIN_VALUE, Long.MAX_VALUE, 1, false, Arrays.asList(0x0L, 0x1L), Arrays.asList(63)); // a inverse range should produce no sub-ranges - assertLongRangeSplit(9500L, -5000L, 4, false, Collections.emptyList(), Collections.emptyList()); + assertLongRangeSplit( + 9500L, -5000L, 4, false, Collections.emptyList(), Collections.emptyList()); // a 0-length range should reproduce the range itself - assertLongRangeSplit(9500L, 9500L, 4, false, Arrays.asList( - 0x800000000000251cL,0x800000000000251cL - ), Arrays.asList( - 0 - )); + assertLongRangeSplit( + 9500L, + 9500L, + 4, + false, + Arrays.asList(0x800000000000251cL, 0x800000000000251cL), + Arrays.asList(0)); } /** Note: The neededBounds Iterable must be unsigned (easier understanding what's happening) */ - private void assertIntRangeSplit(final int lower, final int upper, int precisionStep, - final boolean useBitSet, final Iterable expectedBounds, final Iterable expectedShifts - ) { - final FixedBitSet bits=useBitSet ? new FixedBitSet(upper-lower+1) : null; - final Iterator neededBounds = (expectedBounds == null) ? null : expectedBounds.iterator(); - final Iterator neededShifts = (expectedShifts == null) ? null : expectedShifts.iterator(); - - LegacyNumericUtils.splitIntRange(new LegacyNumericUtils.IntRangeBuilder() { - @Override - public void addRange(int min, int max, int shift) { - assertTrue("min, max should be inside bounds", min >= lower && min <= upper && max >= lower && max <= upper); - if (useBitSet) for (int i = min; i <= max; i++) { - assertFalse("ranges should not overlap", bits.getAndSet(i - lower)); - // extra exit condition to prevent overflow on MAX_VALUE - if (i == max) break; - } - if (neededBounds == null) - return; - // make unsigned ints for easier display and understanding - min ^= 0x80000000; - max ^= 0x80000000; - //System.out.println("0x"+Integer.toHexString(min>>>shift)+",0x"+Integer.toHexString(max>>>shift)+")/*shift="+shift+"*/,"); - assertEquals("shift", neededShifts.next().intValue(), shift); - assertEquals("inner min bound", neededBounds.next().intValue(), min >>> shift); - assertEquals("inner max bound", neededBounds.next().intValue(), max >>> shift); - } - }, precisionStep, lower, upper); - + private void assertIntRangeSplit( + final int lower, + final int upper, + int precisionStep, + final boolean useBitSet, + final Iterable expectedBounds, + final Iterable expectedShifts) { + final FixedBitSet bits = useBitSet ? new FixedBitSet(upper - lower + 1) : null; + final Iterator neededBounds = + (expectedBounds == null) ? null : expectedBounds.iterator(); + final Iterator neededShifts = + (expectedShifts == null) ? null : expectedShifts.iterator(); + + LegacyNumericUtils.splitIntRange( + new LegacyNumericUtils.IntRangeBuilder() { + @Override + public void addRange(int min, int max, int shift) { + assertTrue( + "min, max should be inside bounds", + min >= lower && min <= upper && max >= lower && max <= upper); + if (useBitSet) + for (int i = min; i <= max; i++) { + assertFalse("ranges should not overlap", bits.getAndSet(i - lower)); + // extra exit condition to prevent overflow on MAX_VALUE + if (i == max) break; + } + if (neededBounds == null) return; + // make unsigned ints for easier display and understanding + min ^= 0x80000000; + max ^= 0x80000000; + // System.out.println("0x"+Integer.toHexString(min>>>shift)+",0x"+Integer.toHexString(max>>>shift)+")/*shift="+shift+"*/,"); + assertEquals("shift", neededShifts.next().intValue(), shift); + assertEquals("inner min bound", neededBounds.next().intValue(), min >>> shift); + assertEquals("inner max bound", neededBounds.next().intValue(), max >>> shift); + } + }, + precisionStep, + lower, + upper); + if (useBitSet) { // after flipping all bits in the range, the cardinality should be zero - bits.flip(0, upper-lower+1); + bits.flip(0, upper - lower + 1); assertEquals("The sub-range concenated should match the whole range", 0, bits.cardinality()); } } - + public void testSplitIntRange() throws Exception { // a hard-coded "standard" range - assertIntRangeSplit(-5000, 9500, 4, true, Arrays.asList( - 0x7fffec78,0x7fffec7f, - 0x80002510,0x8000251c, - 0x7fffec8, 0x7fffecf, - 0x8000250, 0x8000250, - 0x7fffed, 0x7fffef, - 0x800020, 0x800024, - 0x7ffff, 0x80001 - ), Arrays.asList( - 0, 0, - 4, 4, - 8, 8, - 12 - )); - + assertIntRangeSplit( + -5000, + 9500, + 4, + true, + Arrays.asList( + 0x7fffec78, 0x7fffec7f, + 0x80002510, 0x8000251c, + 0x7fffec8, 0x7fffecf, + 0x8000250, 0x8000250, + 0x7fffed, 0x7fffef, + 0x800020, 0x800024, + 0x7ffff, 0x80001), + Arrays.asList(0, 0, 4, 4, 8, 8, 12)); + // the same with no range splitting - assertIntRangeSplit(-5000, 9500, 32, true, Arrays.asList( - 0x7fffec78,0x8000251c - ), Arrays.asList( - 0 - )); - + assertIntRangeSplit( + -5000, 9500, 32, true, Arrays.asList(0x7fffec78, 0x8000251c), Arrays.asList(0)); + // this tests optimized range splitting, if one of the inner bounds // is also the bound of the next lower precision, it should be used completely - assertIntRangeSplit(0, 1024+63, 4, true, Arrays.asList( - 0x8000040, 0x8000043, - 0x800000, 0x800003 - ), Arrays.asList( - 4, 8 - )); - - // the full int range should only consist of a lowest precision range; no bitset testing here, as too much memory needed :-) - assertIntRangeSplit(Integer.MIN_VALUE, Integer.MAX_VALUE, 8, false, Arrays.asList( - 0x00,0xff - ), Arrays.asList( - 24 - )); + assertIntRangeSplit( + 0, + 1024 + 63, + 4, + true, + Arrays.asList( + 0x8000040, 0x8000043, + 0x800000, 0x800003), + Arrays.asList(4, 8)); + + // the full int range should only consist of a lowest precision range; no bitset testing here, + // as too much memory needed :-) + assertIntRangeSplit( + Integer.MIN_VALUE, + Integer.MAX_VALUE, + 8, + false, + Arrays.asList(0x00, 0xff), + Arrays.asList(24)); // the same with precisionStep=4 - assertIntRangeSplit(Integer.MIN_VALUE, Integer.MAX_VALUE, 4, false, Arrays.asList( - 0x0,0xf - ), Arrays.asList( - 28 - )); + assertIntRangeSplit( + Integer.MIN_VALUE, Integer.MAX_VALUE, 4, false, Arrays.asList(0x0, 0xf), Arrays.asList(28)); // the same with precisionStep=2 - assertIntRangeSplit(Integer.MIN_VALUE, Integer.MAX_VALUE, 2, false, Arrays.asList( - 0x0,0x3 - ), Arrays.asList( - 30 - )); + assertIntRangeSplit( + Integer.MIN_VALUE, Integer.MAX_VALUE, 2, false, Arrays.asList(0x0, 0x3), Arrays.asList(30)); // the same with precisionStep=1 - assertIntRangeSplit(Integer.MIN_VALUE, Integer.MAX_VALUE, 1, false, Arrays.asList( - 0x0,0x1 - ), Arrays.asList( - 31 - )); + assertIntRangeSplit( + Integer.MIN_VALUE, Integer.MAX_VALUE, 1, false, Arrays.asList(0x0, 0x1), Arrays.asList(31)); // a inverse range should produce no sub-ranges - assertIntRangeSplit(9500, -5000, 4, false, Collections.emptyList(), Collections.emptyList()); + assertIntRangeSplit( + 9500, -5000, 4, false, Collections.emptyList(), Collections.emptyList()); // a 0-length range should reproduce the range itself - assertIntRangeSplit(9500, 9500, 4, false, Arrays.asList( - 0x8000251c,0x8000251c - ), Arrays.asList( - 0 - )); + assertIntRangeSplit( + 9500, 9500, 4, false, Arrays.asList(0x8000251c, 0x8000251c), Arrays.asList(0)); } - } diff --git a/solr/core/src/test/org/apache/solr/legacy/TestLegacyTerms.java b/solr/core/src/test/org/apache/solr/legacy/TestLegacyTerms.java index 2e2f38d7632..64f2f78d1fc 100644 --- a/solr/core/src/test/org/apache/solr/legacy/TestLegacyTerms.java +++ b/solr/core/src/test/org/apache/solr/legacy/TestLegacyTerms.java @@ -33,14 +33,14 @@ public void testEmptyIntFieldMinMax() throws Exception { assertNull(LegacyNumericUtils.getMinInt(EMPTY_TERMS)); assertNull(LegacyNumericUtils.getMaxInt(EMPTY_TERMS)); } - + public void testIntFieldMinMax() throws Exception { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); int numDocs = atLeast(100); int minValue = Integer.MAX_VALUE; int maxValue = Integer.MIN_VALUE; - for(int i=0;iupper) { - int a=lower; lower=upper; upper=a; + int lower = random().nextInt(Integer.MAX_VALUE); + int upper = random().nextInt(Integer.MAX_VALUE); + if (lower > upper) { + int a = lower; + lower = upper; + upper = a; } - TermRangeQuery cq=TermRangeQuery.newStringRange("asc", format.format(lower), format.format(upper), true, true); - LegacyNumericRangeQuery tq= LegacyNumericRangeQuery.newIntRange("trie", lower, upper, true, true); + TermRangeQuery cq = + TermRangeQuery.newStringRange( + "asc", format.format(lower), format.format(upper), true, true); + LegacyNumericRangeQuery tq = + LegacyNumericRangeQuery.newIntRange("trie", lower, upper, true, true); TopScoreDocCollector trCollector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); TopScoreDocCollector nrCollector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(cq, trCollector); searcher.search(tq, nrCollector); TopDocs trTopDocs = trCollector.topDocs(); TopDocs nrTopDocs = nrCollector.topDocs(); - assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", trTopDocs.totalHits.value, nrTopDocs.totalHits.value ); + assertEquals( + "Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", + trTopDocs.totalHits.value, + nrTopDocs.totalHits.value); } reader.close(); directory.close(); } - } diff --git a/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery32.java b/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery32.java index 1b904fe1d93..a79548bb59b 100644 --- a/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery32.java +++ b/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery32.java @@ -16,7 +16,6 @@ */ package org.apache.solr.legacy; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -44,24 +43,27 @@ public class TestNumericRangeQuery32 extends SolrTestCase { // distance of entries private static int distance; // shift the starting of the values to the left, to also have negative values: - private static final int startOffset = - 1 << 15; + private static final int startOffset = -1 << 15; // number of docs to generate for testing private static int noDocs; - + private static Directory directory = null; private static IndexReader reader = null; private static IndexSearcher searcher = null; - + @BeforeClass public static void beforeClass() throws Exception { noDocs = atLeast(4096); distance = (1 << 30) / noDocs; directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(new MockAnalyzer(random())) - .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) - .setMergePolicy(newLogMergePolicy())); - + RandomIndexWriter writer = + new RandomIndexWriter( + random(), + directory, + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) + .setMergePolicy(newLogMergePolicy())); + final LegacyFieldType storedInt = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED); storedInt.setStored(true); storedInt.freeze(); @@ -89,41 +91,46 @@ public static void beforeClass() throws Exception { final LegacyFieldType unstoredInt2 = new LegacyFieldType(unstoredInt); unstoredInt2.setNumericPrecisionStep(2); - LegacyIntField - field8 = new LegacyIntField("field8", 0, storedInt8), - field4 = new LegacyIntField("field4", 0, storedInt4), - field2 = new LegacyIntField("field2", 0, storedInt2), - fieldNoTrie = new LegacyIntField("field"+Integer.MAX_VALUE, 0, storedIntNone), - ascfield8 = new LegacyIntField("ascfield8", 0, unstoredInt8), - ascfield4 = new LegacyIntField("ascfield4", 0, unstoredInt4), - ascfield2 = new LegacyIntField("ascfield2", 0, unstoredInt2); - + LegacyIntField field8 = new LegacyIntField("field8", 0, storedInt8), + field4 = new LegacyIntField("field4", 0, storedInt4), + field2 = new LegacyIntField("field2", 0, storedInt2), + fieldNoTrie = new LegacyIntField("field" + Integer.MAX_VALUE, 0, storedIntNone), + ascfield8 = new LegacyIntField("ascfield8", 0, unstoredInt8), + ascfield4 = new LegacyIntField("ascfield4", 0, unstoredInt4), + ascfield2 = new LegacyIntField("ascfield2", 0, unstoredInt2); + Document doc = new Document(); // add fields, that have a distance to test general functionality - doc.add(field8); doc.add(field4); doc.add(field2); doc.add(fieldNoTrie); - // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive - doc.add(ascfield8); doc.add(ascfield4); doc.add(ascfield2); - + doc.add(field8); + doc.add(field4); + doc.add(field2); + doc.add(fieldNoTrie); + // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct + // splitting of range and inclusive/exclusive + doc.add(ascfield8); + doc.add(ascfield4); + doc.add(ascfield2); + // Add a series of noDocs docs with increasing int values - for (int l=0; l q = LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true); - for (byte i=0; i<2; i++) { - TopFieldCollector collector = TopFieldCollector.create(Sort.INDEXORDER, noDocs, Integer.MAX_VALUE); + String field = "field" + precisionStep; + int count = 3000; + int lower = (distance * 3 / 2) + startOffset, upper = lower + count * distance + (distance / 3); + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true); + for (byte i = 0; i < 2; i++) { + TopFieldCollector collector = + TopFieldCollector.create(Sort.INDEXORDER, noDocs, Integer.MAX_VALUE); String type; switch (i) { case 0: @@ -170,11 +182,17 @@ private void testRange(int precisionStep) throws Exception { TopDocs topDocs = collector.topDocs(); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count"+type, count, sd.length ); - Document doc=searcher.doc(sd[0].doc); - assertEquals("First doc"+type, 2*distance+startOffset, doc.getField(field).numericValue().intValue()); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc"+type, (1+count)*distance+startOffset, doc.getField(field).numericValue().intValue()); + assertEquals("Score doc count" + type, count, sd.length); + Document doc = searcher.doc(sd[0].doc); + assertEquals( + "First doc" + type, + 2 * distance + startOffset, + doc.getField(field).numericValue().intValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc" + type, + (1 + count) * distance + startOffset, + doc.getField(field).numericValue().intValue()); } } @@ -182,215 +200,257 @@ private void testRange(int precisionStep) throws Exception { public void testRange_8bit() throws Exception { testRange(8); } - + @Test public void testRange_4bit() throws Exception { testRange(4); } - + @Test public void testRange_2bit() throws Exception { testRange(2); } - + @Test public void testOneMatchQuery() throws Exception { - LegacyNumericRangeQuery q = LegacyNumericRangeQuery.newIntRange("ascfield8", 8, 1000, 1000, true, true); + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newIntRange("ascfield8", 8, 1000, 1000, true, true); TopDocs topDocs = searcher.search(q, noDocs); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", 1, sd.length ); + assertEquals("Score doc count", 1, sd.length); } - + private void testLeftOpenRange(int precisionStep) throws Exception { - String field="field"+precisionStep; - int count=3000; - int upper=(count-1)*distance + (distance/3) + startOffset; - LegacyNumericRangeQuery q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, null, upper, true, true); + String field = "field" + precisionStep; + int count = 3000; + int upper = (count - 1) * distance + (distance / 3) + startOffset; + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newIntRange(field, precisionStep, null, upper, true, true); TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", count, sd.length ); - Document doc=searcher.doc(sd[0].doc); + assertEquals("Score doc count", count, sd.length); + Document doc = searcher.doc(sd[0].doc); assertEquals("First doc", startOffset, doc.getField(field).numericValue().intValue()); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().intValue()); - - q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, null, upper, false, true); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (count - 1) * distance + startOffset, + doc.getField(field).numericValue().intValue()); + + q = LegacyNumericRangeQuery.newIntRange(field, precisionStep, null, upper, false, true); topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", count, sd.length ); - doc=searcher.doc(sd[0].doc); + assertEquals("Score doc count", count, sd.length); + doc = searcher.doc(sd[0].doc); assertEquals("First doc", startOffset, doc.getField(field).numericValue().intValue()); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().intValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (count - 1) * distance + startOffset, + doc.getField(field).numericValue().intValue()); } - + @Test public void testLeftOpenRange_8bit() throws Exception { testLeftOpenRange(8); } - + @Test public void testLeftOpenRange_4bit() throws Exception { testLeftOpenRange(4); } - + @Test public void testLeftOpenRange_2bit() throws Exception { testLeftOpenRange(2); } - + private void testRightOpenRange(int precisionStep) throws Exception { - String field="field"+precisionStep; - int count=3000; - int lower=(count-1)*distance + (distance/3) +startOffset; - LegacyNumericRangeQuery q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, true); - TopFieldCollector collector = TopFieldCollector.create(Sort.INDEXORDER, noDocs, Integer.MAX_VALUE); + String field = "field" + precisionStep; + int count = 3000; + int lower = (count - 1) * distance + (distance / 3) + startOffset; + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, true); + TopFieldCollector collector = + TopFieldCollector.create(Sort.INDEXORDER, noDocs, Integer.MAX_VALUE); searcher.search(q, collector); TopDocs topDocs = collector.topDocs(); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", noDocs-count, sd.length ); - Document doc=searcher.doc(sd[0].doc); - assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().intValue()); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().intValue()); + assertEquals("Score doc count", noDocs - count, sd.length); + Document doc = searcher.doc(sd[0].doc); + assertEquals( + "First doc", count * distance + startOffset, doc.getField(field).numericValue().intValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (noDocs - 1) * distance + startOffset, + doc.getField(field).numericValue().intValue()); - q= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, false); + q = LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, false); collector = TopFieldCollector.create(Sort.INDEXORDER, noDocs, Integer.MAX_VALUE); searcher.search(q, collector); topDocs = collector.topDocs(); sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", noDocs-count, sd.length ); - doc=searcher.doc(sd[0].doc); - assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().intValue() ); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().intValue() ); + assertEquals("Score doc count", noDocs - count, sd.length); + doc = searcher.doc(sd[0].doc); + assertEquals( + "First doc", count * distance + startOffset, doc.getField(field).numericValue().intValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (noDocs - 1) * distance + startOffset, + doc.getField(field).numericValue().intValue()); } - + @Test public void testRightOpenRange_8bit() throws Exception { testRightOpenRange(8); } - + @Test public void testRightOpenRange_4bit() throws Exception { testRightOpenRange(4); } - + @Test public void testRightOpenRange_2bit() throws Exception { testRightOpenRange(2); } - + @Test public void testInfiniteValues() throws Exception { Directory dir = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(new MockAnalyzer(random()))); + RandomIndexWriter writer = + new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new LegacyFloatField("float", Float.NEGATIVE_INFINITY, Field.Store.NO)); doc.add(new LegacyIntField("int", Integer.MIN_VALUE, Field.Store.NO)); writer.addDocument(doc); - + doc = new Document(); doc.add(new LegacyFloatField("float", Float.POSITIVE_INFINITY, Field.Store.NO)); doc.add(new LegacyIntField("int", Integer.MAX_VALUE, Field.Store.NO)); writer.addDocument(doc); - + doc = new Document(); doc.add(new LegacyFloatField("float", 0.0f, Field.Store.NO)); doc.add(new LegacyIntField("int", 0, Field.Store.NO)); writer.addDocument(doc); - + for (float f : TestLegacyNumericUtils.FLOAT_NANs) { doc = new Document(); doc.add(new LegacyFloatField("float", f, Field.Store.NO)); writer.addDocument(doc); } - + writer.close(); - + IndexReader r = DirectoryReader.open(dir); IndexSearcher s = newSearcher(r); - - Query q= LegacyNumericRangeQuery.newIntRange("int", null, null, true, true); + + Query q = LegacyNumericRangeQuery.newIntRange("int", null, null, true, true); TopDocs topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); - - q= LegacyNumericRangeQuery.newIntRange("int", null, null, false, false); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); + + q = LegacyNumericRangeQuery.newIntRange("int", null, null, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newIntRange("int", Integer.MIN_VALUE, Integer.MAX_VALUE, true, true); + q = + LegacyNumericRangeQuery.newIntRange( + "int", Integer.MIN_VALUE, Integer.MAX_VALUE, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); - - q= LegacyNumericRangeQuery.newIntRange("int", Integer.MIN_VALUE, Integer.MAX_VALUE, false, false); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); + + q = + LegacyNumericRangeQuery.newIntRange( + "int", Integer.MIN_VALUE, Integer.MAX_VALUE, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 1, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 1, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newFloatRange("float", null, null, true, true); + q = LegacyNumericRangeQuery.newFloatRange("float", null, null, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newFloatRange("float", null, null, false, false); + q = LegacyNumericRangeQuery.newFloatRange("float", null, null, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newFloatRange("float", Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, true, true); + q = + LegacyNumericRangeQuery.newFloatRange( + "float", Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newFloatRange("float", Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, false, false); + q = + LegacyNumericRangeQuery.newFloatRange( + "float", Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 1, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 1, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newFloatRange("float", Float.NaN, Float.NaN, true, true); + q = LegacyNumericRangeQuery.newFloatRange("float", Float.NaN, Float.NaN, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", TestLegacyNumericUtils.FLOAT_NANs.length, topDocs.scoreDocs.length ); + assertEquals( + "Score doc count", TestLegacyNumericUtils.FLOAT_NANs.length, topDocs.scoreDocs.length); r.close(); dir.close(); } - + private void testRangeSplit(int precisionStep) throws Exception { - String field="ascfield"+precisionStep; + String field = "ascfield" + precisionStep; // 10 random tests int num = TestUtil.nextInt(random(), 10, 20); - for (int i =0; i< num; i++) { - int lower=(int)(random().nextDouble()*noDocs - noDocs/2); - int upper=(int)(random().nextDouble()*noDocs - noDocs/2); - if (lower>upper) { - int a=lower; lower=upper; upper=a; + for (int i = 0; i < num; i++) { + int lower = (int) (random().nextDouble() * noDocs - noDocs / 2); + int upper = (int) (random().nextDouble() * noDocs - noDocs / 2); + if (lower > upper) { + int a = lower; + lower = upper; + upper = a; } // test inclusive range - Query tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true); + Query tq = + LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true); TopScoreDocCollector collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); TopDocs tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to inclusive range length", + upper - lower + 1, + tTopDocs.totalHits.value); // test exclusive range - tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, false); + tq = LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, false); collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to exclusive range length", Math.max(upper-lower-1, 0), tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to exclusive range length", + Math.max(upper - lower - 1, 0), + tTopDocs.totalHits.value); // test left exclusive range - tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, true); + tq = LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, true); collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to half exclusive range length", + upper - lower, + tTopDocs.totalHits.value); // test right exclusive range - tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, false); + tq = LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, false); collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to half exclusive range length", + upper - lower, + tTopDocs.totalHits.value); } } @@ -398,83 +458,92 @@ private void testRangeSplit(int precisionStep) throws Exception { public void testRangeSplit_8bit() throws Exception { testRangeSplit(8); } - + @Test public void testRangeSplit_4bit() throws Exception { testRangeSplit(4); } - + @Test public void testRangeSplit_2bit() throws Exception { testRangeSplit(2); } - + /** we fake a float test using int2float conversion of LegacyNumericUtils */ private void testFloatRange(int precisionStep) throws Exception { - final String field="ascfield"+precisionStep; - final int lower=-1000, upper=+2000; - - Query tq= LegacyNumericRangeQuery.newFloatRange(field, precisionStep, - NumericUtils.sortableIntToFloat(lower), NumericUtils.sortableIntToFloat(upper), true, true); + final String field = "ascfield" + precisionStep; + final int lower = -1000, upper = +2000; + + Query tq = + LegacyNumericRangeQuery.newFloatRange( + field, + precisionStep, + NumericUtils.sortableIntToFloat(lower), + NumericUtils.sortableIntToFloat(upper), + true, + true); TopScoreDocCollector collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); TopDocs tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to inclusive range length", + upper - lower + 1, + tTopDocs.totalHits.value); } @Test public void testFloatRange_8bit() throws Exception { testFloatRange(8); } - + @Test public void testFloatRange_4bit() throws Exception { testFloatRange(4); } - + @Test public void testFloatRange_2bit() throws Exception { testFloatRange(2); } - + @Test public void testEqualsAndHash() throws Exception { QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test1", 4, 10, 20, true, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test2", 4, 10, 20, false, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test3", 4, 10, 20, true, false)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test4", 4, 10, 20, false, false)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test5", 4, 10, null, true, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test6", 4, null, 20, true, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newIntRange("test7", 4, null, null, true, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newIntRange("test2", 4, 10, 20, false, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newIntRange("test3", 4, 10, 20, true, false)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newIntRange("test4", 4, 10, 20, false, false)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newIntRange("test5", 4, 10, null, true, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newIntRange("test6", 4, null, 20, true, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newIntRange("test7", 4, null, null, true, true)); QueryUtils.checkEqual( - LegacyNumericRangeQuery.newIntRange("test8", 4, 10, 20, true, true), - LegacyNumericRangeQuery.newIntRange("test8", 4, 10, 20, true, true) - ); + LegacyNumericRangeQuery.newIntRange("test8", 4, 10, 20, true, true), + LegacyNumericRangeQuery.newIntRange("test8", 4, 10, 20, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newIntRange("test9", 4, 10, 20, true, true), - LegacyNumericRangeQuery.newIntRange("test9", 8, 10, 20, true, true) - ); + LegacyNumericRangeQuery.newIntRange("test9", 4, 10, 20, true, true), + LegacyNumericRangeQuery.newIntRange("test9", 8, 10, 20, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newIntRange("test10a", 4, 10, 20, true, true), - LegacyNumericRangeQuery.newIntRange("test10b", 4, 10, 20, true, true) - ); + LegacyNumericRangeQuery.newIntRange("test10a", 4, 10, 20, true, true), + LegacyNumericRangeQuery.newIntRange("test10b", 4, 10, 20, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newIntRange("test11", 4, 10, 20, true, true), - LegacyNumericRangeQuery.newIntRange("test11", 4, 20, 10, true, true) - ); + LegacyNumericRangeQuery.newIntRange("test11", 4, 10, 20, true, true), + LegacyNumericRangeQuery.newIntRange("test11", 4, 20, 10, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newIntRange("test12", 4, 10, 20, true, true), - LegacyNumericRangeQuery.newIntRange("test12", 4, 10, 20, false, true) - ); + LegacyNumericRangeQuery.newIntRange("test12", 4, 10, 20, true, true), + LegacyNumericRangeQuery.newIntRange("test12", 4, 10, 20, false, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newIntRange("test13", 4, 10, 20, true, true), - LegacyNumericRangeQuery.newFloatRange("test13", 4, 10f, 20f, true, true) - ); - // the following produces a hash collision, because Long and Integer have the same hashcode, so only test equality: + LegacyNumericRangeQuery.newIntRange("test13", 4, 10, 20, true, true), + LegacyNumericRangeQuery.newFloatRange("test13", 4, 10f, 20f, true, true)); + // the following produces a hash collision, because Long and Integer have the same hashcode, so + // only test equality: Query q1 = LegacyNumericRangeQuery.newIntRange("test14", 4, 10, 20, true, true); Query q2 = LegacyNumericRangeQuery.newLongRange("test14", 4, 10L, 20L, true, true); assertFalse(q1.equals(q2)); assertFalse(q2.equals(q1)); } - } diff --git a/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery64.java b/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery64.java index dca9f2c26dc..92d81956e8c 100644 --- a/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery64.java +++ b/solr/core/src/test/org/apache/solr/legacy/TestNumericRangeQuery64.java @@ -16,7 +16,6 @@ */ package org.apache.solr.legacy; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -44,23 +43,26 @@ public class TestNumericRangeQuery64 extends SolrTestCase { // distance of entries private static long distance; // shift the starting of the values to the left, to also have negative values: - private static final long startOffset = - 1L << 31; + private static final long startOffset = -1L << 31; // number of docs to generate for testing private static int noDocs; - + private static Directory directory = null; private static IndexReader reader = null; private static IndexSearcher searcher = null; - + @BeforeClass public static void beforeClass() throws Exception { noDocs = atLeast(4096); distance = (1L << 60) / noDocs; directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(new MockAnalyzer(random())) - .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) - .setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = + new RandomIndexWriter( + random(), + directory, + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) + .setMergePolicy(newLogMergePolicy())); final LegacyFieldType storedLong = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED); storedLong.setStored(true); @@ -95,33 +97,40 @@ public static void beforeClass() throws Exception { final LegacyFieldType unstoredLong2 = new LegacyFieldType(unstoredLong); unstoredLong2.setNumericPrecisionStep(2); - LegacyLongField - field8 = new LegacyLongField("field8", 0L, storedLong8), - field6 = new LegacyLongField("field6", 0L, storedLong6), - field4 = new LegacyLongField("field4", 0L, storedLong4), - field2 = new LegacyLongField("field2", 0L, storedLong2), - fieldNoTrie = new LegacyLongField("field"+Integer.MAX_VALUE, 0L, storedLongNone), - ascfield8 = new LegacyLongField("ascfield8", 0L, unstoredLong8), - ascfield6 = new LegacyLongField("ascfield6", 0L, unstoredLong6), - ascfield4 = new LegacyLongField("ascfield4", 0L, unstoredLong4), - ascfield2 = new LegacyLongField("ascfield2", 0L, unstoredLong2); + LegacyLongField field8 = new LegacyLongField("field8", 0L, storedLong8), + field6 = new LegacyLongField("field6", 0L, storedLong6), + field4 = new LegacyLongField("field4", 0L, storedLong4), + field2 = new LegacyLongField("field2", 0L, storedLong2), + fieldNoTrie = new LegacyLongField("field" + Integer.MAX_VALUE, 0L, storedLongNone), + ascfield8 = new LegacyLongField("ascfield8", 0L, unstoredLong8), + ascfield6 = new LegacyLongField("ascfield6", 0L, unstoredLong6), + ascfield4 = new LegacyLongField("ascfield4", 0L, unstoredLong4), + ascfield2 = new LegacyLongField("ascfield2", 0L, unstoredLong2); Document doc = new Document(); // add fields, that have a distance to test general functionality - doc.add(field8); doc.add(field6); doc.add(field4); doc.add(field2); doc.add(fieldNoTrie); - // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive - doc.add(ascfield8); doc.add(ascfield6); doc.add(ascfield4); doc.add(ascfield2); - + doc.add(field8); + doc.add(field6); + doc.add(field4); + doc.add(field2); + doc.add(fieldNoTrie); + // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct + // splitting of range and inclusive/exclusive + doc.add(ascfield8); + doc.add(ascfield6); + doc.add(ascfield4); + doc.add(ascfield2); + // Add a series of noDocs docs with increasing long values, by updating the fields - for (int l=0; l q = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true); - for (byte i=0; i<2; i++) { - TopFieldCollector collector = TopFieldCollector.create(Sort.INDEXORDER, noDocs, Integer.MAX_VALUE); + String field = "field" + precisionStep; + int count = 3000; + long lower = (distance * 3 / 2) + startOffset, + upper = lower + count * distance + (distance / 3); + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true); + for (byte i = 0; i < 2; i++) { + TopFieldCollector collector = + TopFieldCollector.create(Sort.INDEXORDER, noDocs, Integer.MAX_VALUE); String type; switch (i) { case 0: @@ -179,11 +194,17 @@ private void testRange(int precisionStep) throws Exception { TopDocs topDocs = collector.topDocs(); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count"+type, count, sd.length ); - Document doc=searcher.doc(sd[0].doc); - assertEquals("First doc"+type, 2*distance+startOffset, doc.getField(field).numericValue().longValue() ); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc"+type, (1+count)*distance+startOffset, doc.getField(field).numericValue().longValue() ); + assertEquals("Score doc count" + type, count, sd.length); + Document doc = searcher.doc(sd[0].doc); + assertEquals( + "First doc" + type, + 2 * distance + startOffset, + doc.getField(field).numericValue().longValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc" + type, + (1 + count) * distance + startOffset, + doc.getField(field).numericValue().longValue()); } } @@ -191,227 +212,268 @@ private void testRange(int precisionStep) throws Exception { public void testRange_8bit() throws Exception { testRange(8); } - + @Test public void testRange_6bit() throws Exception { testRange(6); } - + @Test public void testRange_4bit() throws Exception { testRange(4); } - + @Test public void testRange_2bit() throws Exception { testRange(2); } - + @Test public void testOneMatchQuery() throws Exception { - LegacyNumericRangeQuery q = LegacyNumericRangeQuery.newLongRange("ascfield8", 8, 1000L, 1000L, true, true); + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newLongRange("ascfield8", 8, 1000L, 1000L, true, true); TopDocs topDocs = searcher.search(q, noDocs); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", 1, sd.length ); + assertEquals("Score doc count", 1, sd.length); } - + private void testLeftOpenRange(int precisionStep) throws Exception { - String field="field"+precisionStep; - int count=3000; - long upper=(count-1)*distance + (distance/3) + startOffset; - LegacyNumericRangeQuery q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, null, upper, true, true); + String field = "field" + precisionStep; + int count = 3000; + long upper = (count - 1) * distance + (distance / 3) + startOffset; + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newLongRange(field, precisionStep, null, upper, true, true); TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", count, sd.length ); - Document doc=searcher.doc(sd[0].doc); - assertEquals("First doc", startOffset, doc.getField(field).numericValue().longValue() ); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().longValue() ); + assertEquals("Score doc count", count, sd.length); + Document doc = searcher.doc(sd[0].doc); + assertEquals("First doc", startOffset, doc.getField(field).numericValue().longValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (count - 1) * distance + startOffset, + doc.getField(field).numericValue().longValue()); - q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, null, upper, false, true); + q = LegacyNumericRangeQuery.newLongRange(field, precisionStep, null, upper, false, true); topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", count, sd.length ); - doc=searcher.doc(sd[0].doc); - assertEquals("First doc", startOffset, doc.getField(field).numericValue().longValue() ); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().longValue() ); + assertEquals("Score doc count", count, sd.length); + doc = searcher.doc(sd[0].doc); + assertEquals("First doc", startOffset, doc.getField(field).numericValue().longValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (count - 1) * distance + startOffset, + doc.getField(field).numericValue().longValue()); } - + @Test public void testLeftOpenRange_8bit() throws Exception { testLeftOpenRange(8); } - + @Test public void testLeftOpenRange_6bit() throws Exception { testLeftOpenRange(6); } - + @Test public void testLeftOpenRange_4bit() throws Exception { testLeftOpenRange(4); } - + @Test public void testLeftOpenRange_2bit() throws Exception { testLeftOpenRange(2); } - + private void testRightOpenRange(int precisionStep) throws Exception { - String field="field"+precisionStep; - int count=3000; - long lower=(count-1)*distance + (distance/3) +startOffset; - LegacyNumericRangeQuery q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, true); + String field = "field" + precisionStep; + int count = 3000; + long lower = (count - 1) * distance + (distance / 3) + startOffset; + LegacyNumericRangeQuery q = + LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, true); TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", noDocs-count, sd.length ); - Document doc=searcher.doc(sd[0].doc); - assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().longValue() ); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().longValue() ); + assertEquals("Score doc count", noDocs - count, sd.length); + Document doc = searcher.doc(sd[0].doc); + assertEquals( + "First doc", + count * distance + startOffset, + doc.getField(field).numericValue().longValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (noDocs - 1) * distance + startOffset, + doc.getField(field).numericValue().longValue()); - q= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, false); + q = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, false); topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); sd = topDocs.scoreDocs; assertNotNull(sd); - assertEquals("Score doc count", noDocs-count, sd.length ); - doc=searcher.doc(sd[0].doc); - assertEquals("First doc", count*distance+startOffset, doc.getField(field).numericValue().longValue() ); - doc=searcher.doc(sd[sd.length-1].doc); - assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().longValue() ); + assertEquals("Score doc count", noDocs - count, sd.length); + doc = searcher.doc(sd[0].doc); + assertEquals( + "First doc", + count * distance + startOffset, + doc.getField(field).numericValue().longValue()); + doc = searcher.doc(sd[sd.length - 1].doc); + assertEquals( + "Last doc", + (noDocs - 1) * distance + startOffset, + doc.getField(field).numericValue().longValue()); } - + @Test public void testRightOpenRange_8bit() throws Exception { testRightOpenRange(8); } - + @Test public void testRightOpenRange_6bit() throws Exception { testRightOpenRange(6); } - + @Test public void testRightOpenRange_4bit() throws Exception { testRightOpenRange(4); } - + @Test public void testRightOpenRange_2bit() throws Exception { testRightOpenRange(2); } - + @Test public void testInfiniteValues() throws Exception { Directory dir = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(new MockAnalyzer(random()))); + RandomIndexWriter writer = + new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new LegacyDoubleField("double", Double.NEGATIVE_INFINITY, Field.Store.NO)); doc.add(new LegacyLongField("long", Long.MIN_VALUE, Field.Store.NO)); writer.addDocument(doc); - + doc = new Document(); doc.add(new LegacyDoubleField("double", Double.POSITIVE_INFINITY, Field.Store.NO)); doc.add(new LegacyLongField("long", Long.MAX_VALUE, Field.Store.NO)); writer.addDocument(doc); - + doc = new Document(); doc.add(new LegacyDoubleField("double", 0.0, Field.Store.NO)); doc.add(new LegacyLongField("long", 0L, Field.Store.NO)); writer.addDocument(doc); - + for (double d : TestLegacyNumericUtils.DOUBLE_NANs) { doc = new Document(); doc.add(new LegacyDoubleField("double", d, Field.Store.NO)); writer.addDocument(doc); } - + writer.close(); - + IndexReader r = DirectoryReader.open(dir); IndexSearcher s = newSearcher(r); - - Query q= LegacyNumericRangeQuery.newLongRange("long", null, null, true, true); + + Query q = LegacyNumericRangeQuery.newLongRange("long", null, null, true, true); TopDocs topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); - - q= LegacyNumericRangeQuery.newLongRange("long", null, null, false, false); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); + + q = LegacyNumericRangeQuery.newLongRange("long", null, null, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newLongRange("long", Long.MIN_VALUE, Long.MAX_VALUE, true, true); + q = LegacyNumericRangeQuery.newLongRange("long", Long.MIN_VALUE, Long.MAX_VALUE, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); - - q= LegacyNumericRangeQuery.newLongRange("long", Long.MIN_VALUE, Long.MAX_VALUE, false, false); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); + + q = LegacyNumericRangeQuery.newLongRange("long", Long.MIN_VALUE, Long.MAX_VALUE, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 1, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 1, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newDoubleRange("double", null, null, true, true); + q = LegacyNumericRangeQuery.newDoubleRange("double", null, null, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newDoubleRange("double", null, null, false, false); + q = LegacyNumericRangeQuery.newDoubleRange("double", null, null, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newDoubleRange("double", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true, true); + q = + LegacyNumericRangeQuery.newDoubleRange( + "double", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", 3, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 3, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newDoubleRange("double", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, false, false); + q = + LegacyNumericRangeQuery.newDoubleRange( + "double", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, false, false); topDocs = s.search(q, 10); - assertEquals("Score doc count", 1, topDocs.scoreDocs.length ); + assertEquals("Score doc count", 1, topDocs.scoreDocs.length); - q= LegacyNumericRangeQuery.newDoubleRange("double", Double.NaN, Double.NaN, true, true); + q = LegacyNumericRangeQuery.newDoubleRange("double", Double.NaN, Double.NaN, true, true); topDocs = s.search(q, 10); - assertEquals("Score doc count", TestLegacyNumericUtils.DOUBLE_NANs.length, topDocs.scoreDocs.length ); + assertEquals( + "Score doc count", TestLegacyNumericUtils.DOUBLE_NANs.length, topDocs.scoreDocs.length); r.close(); dir.close(); } - + private void testRangeSplit(int precisionStep) throws Exception { - String field="ascfield"+precisionStep; + String field = "ascfield" + precisionStep; // 10 random tests int num = TestUtil.nextInt(random(), 10, 20); for (int i = 0; i < num; i++) { - long lower=(long)(random().nextDouble()*noDocs - noDocs/2); - long upper=(long)(random().nextDouble()*noDocs - noDocs/2); - if (lower>upper) { - long a=lower; lower=upper; upper=a; + long lower = (long) (random().nextDouble() * noDocs - noDocs / 2); + long upper = (long) (random().nextDouble() * noDocs - noDocs / 2); + if (lower > upper) { + long a = lower; + lower = upper; + upper = a; } // test inclusive range - Query tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true); + Query tq = + LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true); TopScoreDocCollector collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); TopDocs tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to inclusive range length", + upper - lower + 1, + tTopDocs.totalHits.value); // test exclusive range - tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, false); + tq = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, false); collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to exclusive range length", Math.max(upper-lower-1, 0), tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to exclusive range length", + Math.max(upper - lower - 1, 0), + tTopDocs.totalHits.value); // test left exclusive range - tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, true); + tq = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, true); collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to half exclusive range length", + upper - lower, + tTopDocs.totalHits.value); // test right exclusive range - tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, false); + tq = LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, false); collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to half exclusive range length", upper-lower, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to half exclusive range length", + upper - lower, + tTopDocs.totalHits.value); } } @@ -419,88 +481,98 @@ private void testRangeSplit(int precisionStep) throws Exception { public void testRangeSplit_8bit() throws Exception { testRangeSplit(8); } - + @Test public void testRangeSplit_6bit() throws Exception { testRangeSplit(6); } - + @Test public void testRangeSplit_4bit() throws Exception { testRangeSplit(4); } - + @Test public void testRangeSplit_2bit() throws Exception { testRangeSplit(2); } - + /** we fake a double test using long2double conversion of LegacyNumericUtils */ private void testDoubleRange(int precisionStep) throws Exception { - final String field="ascfield"+precisionStep; - final long lower=-1000L, upper=+2000L; - - Query tq= LegacyNumericRangeQuery.newDoubleRange(field, precisionStep, - NumericUtils.sortableLongToDouble(lower), NumericUtils.sortableLongToDouble(upper), true, true); + final String field = "ascfield" + precisionStep; + final long lower = -1000L, upper = +2000L; + + Query tq = + LegacyNumericRangeQuery.newDoubleRange( + field, + precisionStep, + NumericUtils.sortableLongToDouble(lower), + NumericUtils.sortableLongToDouble(upper), + true, + true); TopScoreDocCollector collector = TopScoreDocCollector.create(1, Integer.MAX_VALUE); searcher.search(tq, collector); TopDocs tTopDocs = collector.topDocs(); - assertEquals("Returned count of range query must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits.value ); + assertEquals( + "Returned count of range query must be equal to inclusive range length", + upper - lower + 1, + tTopDocs.totalHits.value); } @Test public void testDoubleRange_8bit() throws Exception { testDoubleRange(8); } - + @Test public void testDoubleRange_6bit() throws Exception { testDoubleRange(6); } - + @Test public void testDoubleRange_4bit() throws Exception { testDoubleRange(4); } - + @Test public void testDoubleRange_2bit() throws Exception { testDoubleRange(2); } - + @Test public void testEqualsAndHash() throws Exception { - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test1", 4, 10L, 20L, true, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test2", 4, 10L, 20L, false, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test3", 4, 10L, 20L, true, false)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test4", 4, 10L, 20L, false, false)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test5", 4, 10L, null, true, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test6", 4, null, 20L, true, true)); - QueryUtils.checkHashEquals(LegacyNumericRangeQuery.newLongRange("test7", 4, null, null, true, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newLongRange("test1", 4, 10L, 20L, true, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newLongRange("test2", 4, 10L, 20L, false, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newLongRange("test3", 4, 10L, 20L, true, false)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newLongRange("test4", 4, 10L, 20L, false, false)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newLongRange("test5", 4, 10L, null, true, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newLongRange("test6", 4, null, 20L, true, true)); + QueryUtils.checkHashEquals( + LegacyNumericRangeQuery.newLongRange("test7", 4, null, null, true, true)); QueryUtils.checkEqual( - LegacyNumericRangeQuery.newLongRange("test8", 4, 10L, 20L, true, true), - LegacyNumericRangeQuery.newLongRange("test8", 4, 10L, 20L, true, true) - ); + LegacyNumericRangeQuery.newLongRange("test8", 4, 10L, 20L, true, true), + LegacyNumericRangeQuery.newLongRange("test8", 4, 10L, 20L, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newLongRange("test9", 4, 10L, 20L, true, true), - LegacyNumericRangeQuery.newLongRange("test9", 8, 10L, 20L, true, true) - ); + LegacyNumericRangeQuery.newLongRange("test9", 4, 10L, 20L, true, true), + LegacyNumericRangeQuery.newLongRange("test9", 8, 10L, 20L, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newLongRange("test10a", 4, 10L, 20L, true, true), - LegacyNumericRangeQuery.newLongRange("test10b", 4, 10L, 20L, true, true) - ); + LegacyNumericRangeQuery.newLongRange("test10a", 4, 10L, 20L, true, true), + LegacyNumericRangeQuery.newLongRange("test10b", 4, 10L, 20L, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newLongRange("test11", 4, 10L, 20L, true, true), - LegacyNumericRangeQuery.newLongRange("test11", 4, 20L, 10L, true, true) - ); + LegacyNumericRangeQuery.newLongRange("test11", 4, 10L, 20L, true, true), + LegacyNumericRangeQuery.newLongRange("test11", 4, 20L, 10L, true, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newLongRange("test12", 4, 10L, 20L, true, true), - LegacyNumericRangeQuery.newLongRange("test12", 4, 10L, 20L, false, true) - ); + LegacyNumericRangeQuery.newLongRange("test12", 4, 10L, 20L, true, true), + LegacyNumericRangeQuery.newLongRange("test12", 4, 10L, 20L, false, true)); QueryUtils.checkUnequal( - LegacyNumericRangeQuery.newLongRange("test13", 4, 10L, 20L, true, true), - LegacyNumericRangeQuery.newFloatRange("test13", 4, 10f, 20f, true, true) - ); - // difference to int range is tested in TestNumericRangeQuery32 + LegacyNumericRangeQuery.newLongRange("test13", 4, 10L, 20L, true, true), + LegacyNumericRangeQuery.newFloatRange("test13", 4, 10f, 20f, true, true)); + // difference to int range is tested in TestNumericRangeQuery32 } } diff --git a/solr/core/src/test/org/apache/solr/legacy/TestNumericTokenStream.java b/solr/core/src/test/org/apache/solr/legacy/TestNumericTokenStream.java index 4cdabba08d5..e05c664973b 100644 --- a/solr/core/src/test/org/apache/solr/legacy/TestNumericTokenStream.java +++ b/solr/core/src/test/org/apache/solr/legacy/TestNumericTokenStream.java @@ -16,15 +16,14 @@ */ package org.apache.solr.legacy; - -import org.apache.lucene.util.AttributeImpl; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; -import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.apache.solr.legacy.LegacyNumericTokenStream.LegacyNumericTermAttributeImpl; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; +import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.util.AttributeImpl; +import org.apache.lucene.util.BytesRef; +import org.apache.solr.legacy.LegacyNumericTokenStream.LegacyNumericTermAttributeImpl; @Deprecated public class TestNumericTokenStream extends BaseTokenStreamTestCase { @@ -34,21 +33,33 @@ public class TestNumericTokenStream extends BaseTokenStreamTestCase { public void testLongStream() throws Exception { @SuppressWarnings("resource") - final LegacyNumericTokenStream stream=new LegacyNumericTokenStream().setLongValue(lvalue); + final LegacyNumericTokenStream stream = new LegacyNumericTokenStream().setLongValue(lvalue); final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class); assertNotNull(bytesAtt); final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class); assertNotNull(typeAtt); - final LegacyNumericTokenStream.LegacyNumericTermAttribute numericAtt = stream.getAttribute(LegacyNumericTokenStream.LegacyNumericTermAttribute.class); + final LegacyNumericTokenStream.LegacyNumericTermAttribute numericAtt = + stream.getAttribute(LegacyNumericTokenStream.LegacyNumericTermAttribute.class); assertNotNull(numericAtt); stream.reset(); assertEquals(64, numericAtt.getValueSize()); - for (int shift=0; shift<64; shift+= LegacyNumericUtils.PRECISION_STEP_DEFAULT) { + for (int shift = 0; shift < 64; shift += LegacyNumericUtils.PRECISION_STEP_DEFAULT) { assertTrue("New token is available", stream.incrementToken()); assertEquals("Shift value wrong", shift, numericAtt.getShift()); - assertEquals("Term is incorrectly encoded", lvalue & ~((1L << shift) - 1L), LegacyNumericUtils.prefixCodedToLong(bytesAtt.getBytesRef())); - assertEquals("Term raw value is incorrectly encoded", lvalue & ~((1L << shift) - 1L), numericAtt.getRawValue()); - assertEquals("Type incorrect", (shift == 0) ? LegacyNumericTokenStream.TOKEN_TYPE_FULL_PREC : LegacyNumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type()); + assertEquals( + "Term is incorrectly encoded", + lvalue & ~((1L << shift) - 1L), + LegacyNumericUtils.prefixCodedToLong(bytesAtt.getBytesRef())); + assertEquals( + "Term raw value is incorrectly encoded", + lvalue & ~((1L << shift) - 1L), + numericAtt.getRawValue()); + assertEquals( + "Type incorrect", + (shift == 0) + ? LegacyNumericTokenStream.TOKEN_TYPE_FULL_PREC + : LegacyNumericTokenStream.TOKEN_TYPE_LOWER_PREC, + typeAtt.type()); } assertFalse("More tokens available", stream.incrementToken()); stream.end(); @@ -57,96 +68,123 @@ public void testLongStream() throws Exception { public void testIntStream() throws Exception { @SuppressWarnings("resource") - final LegacyNumericTokenStream stream=new LegacyNumericTokenStream().setIntValue(ivalue); + final LegacyNumericTokenStream stream = new LegacyNumericTokenStream().setIntValue(ivalue); final TermToBytesRefAttribute bytesAtt = stream.getAttribute(TermToBytesRefAttribute.class); assertNotNull(bytesAtt); final TypeAttribute typeAtt = stream.getAttribute(TypeAttribute.class); assertNotNull(typeAtt); - final LegacyNumericTokenStream.LegacyNumericTermAttribute numericAtt = stream.getAttribute(LegacyNumericTokenStream.LegacyNumericTermAttribute.class); + final LegacyNumericTokenStream.LegacyNumericTermAttribute numericAtt = + stream.getAttribute(LegacyNumericTokenStream.LegacyNumericTermAttribute.class); assertNotNull(numericAtt); stream.reset(); assertEquals(32, numericAtt.getValueSize()); - for (int shift=0; shift<32; shift+= LegacyNumericUtils.PRECISION_STEP_DEFAULT) { + for (int shift = 0; shift < 32; shift += LegacyNumericUtils.PRECISION_STEP_DEFAULT) { assertTrue("New token is available", stream.incrementToken()); assertEquals("Shift value wrong", shift, numericAtt.getShift()); - assertEquals("Term is incorrectly encoded", ivalue & ~((1 << shift) - 1), LegacyNumericUtils.prefixCodedToInt(bytesAtt.getBytesRef())); - assertEquals("Term raw value is incorrectly encoded", ((long) ivalue) & ~((1L << shift) - 1L), numericAtt.getRawValue()); - assertEquals("Type incorrect", (shift == 0) ? LegacyNumericTokenStream.TOKEN_TYPE_FULL_PREC : LegacyNumericTokenStream.TOKEN_TYPE_LOWER_PREC, typeAtt.type()); + assertEquals( + "Term is incorrectly encoded", + ivalue & ~((1 << shift) - 1), + LegacyNumericUtils.prefixCodedToInt(bytesAtt.getBytesRef())); + assertEquals( + "Term raw value is incorrectly encoded", + ((long) ivalue) & ~((1L << shift) - 1L), + numericAtt.getRawValue()); + assertEquals( + "Type incorrect", + (shift == 0) + ? LegacyNumericTokenStream.TOKEN_TYPE_FULL_PREC + : LegacyNumericTokenStream.TOKEN_TYPE_LOWER_PREC, + typeAtt.type()); } assertFalse("More tokens available", stream.incrementToken()); stream.end(); stream.close(); } - + public void testNotInitialized() throws Exception { - final LegacyNumericTokenStream stream=new LegacyNumericTokenStream(); - - expectThrows(IllegalStateException.class, () -> { - stream.reset(); - }); + final LegacyNumericTokenStream stream = new LegacyNumericTokenStream(); + + expectThrows( + IllegalStateException.class, + () -> { + stream.reset(); + }); + + expectThrows( + IllegalStateException.class, + () -> { + stream.incrementToken(); + }); - expectThrows(IllegalStateException.class, () -> { - stream.incrementToken(); - }); - stream.close(); } - + public static interface TestAttribute extends CharTermAttribute {} + public static class TestAttributeImpl extends CharTermAttributeImpl implements TestAttribute {} - + public void testCTA() throws Exception { - final LegacyNumericTokenStream stream=new LegacyNumericTokenStream(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - stream.addAttribute(CharTermAttribute.class); - }); + final LegacyNumericTokenStream stream = new LegacyNumericTokenStream(); + IllegalArgumentException e = + expectThrows( + IllegalArgumentException.class, + () -> { + stream.addAttribute(CharTermAttribute.class); + }); assertTrue(e.getMessage().startsWith("LegacyNumericTokenStream does not support")); - e = expectThrows(IllegalArgumentException.class, () -> { - stream.addAttribute(TestAttribute.class); - }); + e = + expectThrows( + IllegalArgumentException.class, + () -> { + stream.addAttribute(TestAttribute.class); + }); assertTrue(e.getMessage().startsWith("LegacyNumericTokenStream does not support")); stream.close(); } - + /** LUCENE-7027 */ public void testCaptureStateAfterExhausted() throws Exception { // default precstep - try (LegacyNumericTokenStream stream=new LegacyNumericTokenStream()) { + try (LegacyNumericTokenStream stream = new LegacyNumericTokenStream()) { // int stream.setIntValue(ivalue); stream.reset(); - while (stream.incrementToken()); + while (stream.incrementToken()) + ; stream.captureState(); stream.end(); stream.captureState(); // long stream.setLongValue(lvalue); stream.reset(); - while (stream.incrementToken()); + while (stream.incrementToken()) + ; stream.captureState(); stream.end(); stream.captureState(); } // huge precstep - try (LegacyNumericTokenStream stream=new LegacyNumericTokenStream(Integer.MAX_VALUE)) { + try (LegacyNumericTokenStream stream = new LegacyNumericTokenStream(Integer.MAX_VALUE)) { // int stream.setIntValue(ivalue); stream.reset(); - while (stream.incrementToken()); + while (stream.incrementToken()) + ; stream.captureState(); stream.end(); stream.captureState(); // long stream.setLongValue(lvalue); stream.reset(); - while (stream.incrementToken()); + while (stream.incrementToken()) + ; stream.captureState(); stream.end(); stream.captureState(); } } - + public void testAttributeClone() throws Exception { LegacyNumericTermAttributeImpl att = new LegacyNumericTermAttributeImpl(); att.init(lvalue, 64, 8, 0); // set some value, to make getBytesRef() work @@ -154,7 +192,7 @@ public void testAttributeClone() throws Exception { assertNotSame(att.getBytesRef(), copy.getBytesRef()); LegacyNumericTermAttributeImpl copy2 = assertCopyIsEqual(att); assertNotSame(att.getBytesRef(), copy2.getBytesRef()); - + // LUCENE-7027 test att.init(lvalue, 64, 8, 64); // Exhausted TokenStream -> should return empty BytesRef assertEquals(new BytesRef(), att.getBytesRef()); @@ -165,7 +203,7 @@ public void testAttributeClone() throws Exception { assertEquals(new BytesRef(), copy2.getBytesRef()); assertNotSame(att.getBytesRef(), copy2.getBytesRef()); } - + public static T assertCloneIsEqual(T att) { @SuppressWarnings("unchecked") T clone = (T) att.clone(); @@ -182,5 +220,4 @@ public static T assertCopyIsEqual(T att) throws Except assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode()); return copy; } - } diff --git a/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java b/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java index 4532c539c4e..6c7d9149647 100644 --- a/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java +++ b/solr/core/src/test/org/apache/solr/logging/TestLogWatcher.java @@ -16,6 +16,10 @@ */ package org.apache.solr.logging; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; @@ -26,11 +30,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - public class TestLogWatcher extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private LogWatcherConfig config; @@ -49,8 +48,9 @@ public void before() { public void testLog4jWatcher() throws InterruptedException { LogWatcher watcher = null; int lim = random().nextInt(3) + 2; - // Every time through this loop, insure that, of all the test messages that have been logged, only the current - // test message is present. NOTE: there may be log messages from the superclass the first time around. + // Every time through this loop, insure that, of all the test messages that have been logged, + // only the current test message is present. NOTE: there may be log messages from the superclass + // the first time around. List oldMessages = new ArrayList<>(lim); for (int idx = 0; idx < lim; ++idx) { @@ -96,7 +96,10 @@ public void testLog4jWatcher() throws InterruptedException { System.out.println(" " + oldMsg); } - fail("Did not find expected message state, dumped current watcher's messages above, last message added: '" + msg + "'"); + fail( + "Did not find expected message state, dumped current watcher's messages above, last message added: '" + + msg + + "'"); } oldMessages.add(msg); } diff --git a/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java b/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java index 786261210f4..d09a4d4149e 100644 --- a/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/JvmMetricsTest.java @@ -16,42 +16,32 @@ */ package org.apache.solr.metrics; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Metric; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Map; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Metric; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.core.NodeConfig; import org.apache.solr.core.SolrXmlConfig; import org.junit.BeforeClass; import org.junit.Test; -/** - * Test {@link OperatingSystemMetricSet} and proper JVM metrics registration. - */ +/** Test {@link OperatingSystemMetricSet} and proper JVM metrics registration. */ public class JvmMetricsTest extends SolrJettyTestBase { - static final String[] STRING_OS_METRICS = { - "arch", - "name", - "version" - }; - static final String[] NUMERIC_OS_METRICS = { - "availableProcessors", - "systemLoadAverage" - }; + static final String[] STRING_OS_METRICS = {"arch", "name", "version"}; + static final String[] NUMERIC_OS_METRICS = {"availableProcessors", "systemLoadAverage"}; static final String[] BUFFER_METRICS = { - "direct.Count", - "direct.MemoryUsed", - "direct.TotalCapacity", - "mapped.Count", - "mapped.MemoryUsed", - "mapped.TotalCapacity" + "direct.Count", + "direct.MemoryUsed", + "direct.TotalCapacity", + "mapped.Count", + "mapped.MemoryUsed", + "mapped.TotalCapacity" }; @BeforeClass @@ -65,16 +55,16 @@ public void testOperatingSystemMetricSet() throws Exception { Map metrics = set.getMetrics(); assertTrue(metrics.size() > 0); for (String metric : NUMERIC_OS_METRICS) { - Gauge gauge = (Gauge)metrics.get(metric); + Gauge gauge = (Gauge) metrics.get(metric); assertNotNull(metric, gauge); - double value = ((Number)gauge.getValue()).doubleValue(); + double value = ((Number) gauge.getValue()).doubleValue(); // SystemLoadAverage on Windows may be -1.0 assertTrue("unexpected value of " + metric + ": " + value, value >= 0 || value == -1.0); } for (String metric : STRING_OS_METRICS) { - Gauge gauge = (Gauge)metrics.get(metric); + Gauge gauge = (Gauge) metrics.get(metric); assertNotNull(metric, gauge); - String value = (String)gauge.getValue(); + String value = (String) gauge.getValue(); assertNotNull(value); assertFalse(value.isEmpty()); } @@ -89,7 +79,7 @@ public void testAltBufferPoolMetricSet() throws Exception { assertNotNull(name, metrics.get(name)); Object g = metrics.get(name); assertTrue(g instanceof Gauge); - Object v = ((Gauge)g).getValue(); + Object v = ((Gauge) g).getValue(); assertTrue(v instanceof Long); } } @@ -101,17 +91,20 @@ public void testSystemProperties() throws Exception { System.setProperty("basicauth", "foo:bar"); } SolrMetricManager metricManager = jetty.getCoreContainer().getMetricManager(); - Map metrics = metricManager.registry("solr.jvm").getMetrics(); - MetricsMap map = (MetricsMap)((SolrMetricManager.GaugeWrapper)metrics.get("system.properties")).getGauge(); + Map metrics = metricManager.registry("solr.jvm").getMetrics(); + MetricsMap map = + (MetricsMap) ((SolrMetricManager.GaugeWrapper) metrics.get("system.properties")).getGauge(); assertNotNull(map); - Map values = map.getValue(); - System.getProperties().forEach((k, v) -> { - if (NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS.contains(k)) { - assertNull("hidden property " + k + " present!", values.get(k)); - } else { - assertEquals(v, values.get(String.valueOf(k))); - } - }); + Map values = map.getValue(); + System.getProperties() + .forEach( + (k, v) -> { + if (NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS.contains(k)) { + assertNull("hidden property " + k + " present!", values.get(k)); + } else { + assertEquals(v, values.get(String.valueOf(k))); + } + }); } @Test @@ -121,29 +114,46 @@ public void testHiddenSysProps() throws Exception { // default config String solrXml = Files.readString(home.resolve("solr.xml"), StandardCharsets.UTF_8); NodeConfig config = SolrXmlConfig.fromString(home, solrXml); - NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS.forEach(s -> { - assertTrue(s, config.getMetricsConfig().getHiddenSysProps().contains(s)); - }); + NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS.forEach( + s -> { + assertTrue(s, config.getMetricsConfig().getHiddenSysProps().contains(s)); + }); // custom config solrXml = Files.readString(home.resolve("solr-hiddensysprops.xml"), StandardCharsets.UTF_8); NodeConfig config2 = SolrXmlConfig.fromString(home, solrXml); - Arrays.asList("foo", "bar", "baz").forEach(s -> { - assertTrue(s, config2.getMetricsConfig().getHiddenSysProps().contains(s)); - }); + Arrays.asList("foo", "bar", "baz") + .forEach( + s -> { + assertTrue(s, config2.getMetricsConfig().getHiddenSysProps().contains(s)); + }); } @Test public void testSetupJvmMetrics() throws Exception { SolrMetricManager metricManager = jetty.getCoreContainer().getMetricManager(); - Map metrics = metricManager.registry("solr.jvm").getMetrics(); + Map metrics = metricManager.registry("solr.jvm").getMetrics(); assertTrue(metrics.size() > 0); - assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("buffers.")).count() > 0); - assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("classes.")).count() > 0); - assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("os.")).count() > 0); - assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("gc.")).count() > 0); - assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("memory.")).count() > 0); - assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("threads.")).count() > 0); - assertTrue(metrics.toString(), metrics.entrySet().stream().filter(e -> e.getKey().startsWith("system.")).count() > 0); + assertTrue( + metrics.toString(), + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("buffers.")).count() > 0); + assertTrue( + metrics.toString(), + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("classes.")).count() > 0); + assertTrue( + metrics.toString(), + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("os.")).count() > 0); + assertTrue( + metrics.toString(), + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("gc.")).count() > 0); + assertTrue( + metrics.toString(), + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("memory.")).count() > 0); + assertTrue( + metrics.toString(), + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("threads.")).count() > 0); + assertTrue( + metrics.toString(), + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("system.")).count() > 0); } } diff --git a/solr/core/src/test/org/apache/solr/metrics/MetricsConfigTest.java b/solr/core/src/test/org/apache/solr/metrics/MetricsConfigTest.java index 2cefedf3858..444829e993b 100644 --- a/solr/core/src/test/org/apache/solr/metrics/MetricsConfigTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/MetricsConfigTest.java @@ -16,17 +16,16 @@ */ package org.apache.solr.metrics; -import java.io.File; -import java.io.InputStream; -import java.util.Map; -import java.util.Properties; - import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import com.codahale.metrics.Clock; import com.codahale.metrics.ExponentiallyDecayingReservoir; import com.codahale.metrics.Reservoir; import com.codahale.metrics.SlidingTimeWindowReservoir; import com.codahale.metrics.UniformReservoir; +import java.io.File; +import java.io.InputStream; +import java.util.Map; +import java.util.Properties; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.core.NodeConfig; import org.apache.solr.core.SolrXmlConfig; @@ -37,12 +36,9 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -/** - * - */ +/** */ public class MetricsConfigTest extends SolrTestCaseJ4 { - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); // tmp dir, cleaned up automatically. private static File solrHome = null; @@ -60,14 +56,15 @@ public static void cleanupLoader() throws Exception { @Test public void testDefaults() throws Exception { NodeConfig cfg = loadNodeConfig("solr-metricsconfig.xml"); - SolrMetricManager mgr = new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); + SolrMetricManager mgr = + new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); assertTrue(mgr.getCounterSupplier() instanceof MetricSuppliers.DefaultCounterSupplier); assertTrue(mgr.getMeterSupplier() instanceof MetricSuppliers.DefaultMeterSupplier); assertTrue(mgr.getTimerSupplier() instanceof MetricSuppliers.DefaultTimerSupplier); assertTrue(mgr.getHistogramSupplier() instanceof MetricSuppliers.DefaultHistogramSupplier); - Clock clk = ((MetricSuppliers.DefaultTimerSupplier)mgr.getTimerSupplier()).clk; + Clock clk = ((MetricSuppliers.DefaultTimerSupplier) mgr.getTimerSupplier()).clk; assertTrue(clk instanceof Clock.UserTimeClock); - Reservoir rsv = ((MetricSuppliers.DefaultTimerSupplier)mgr.getTimerSupplier()).getReservoir(); + Reservoir rsv = ((MetricSuppliers.DefaultTimerSupplier) mgr.getTimerSupplier()).getReservoir(); assertTrue(rsv instanceof ExponentiallyDecayingReservoir); } @@ -78,14 +75,15 @@ public void testCustomReservoir() throws Exception { System.setProperty("histogram.window", "600"); System.setProperty("histogram.reservoir", SlidingTimeWindowReservoir.class.getName()); NodeConfig cfg = loadNodeConfig("solr-metricsconfig.xml"); - SolrMetricManager mgr = new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); + SolrMetricManager mgr = + new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); assertTrue(mgr.getCounterSupplier() instanceof MetricSuppliers.DefaultCounterSupplier); assertTrue(mgr.getMeterSupplier() instanceof MetricSuppliers.DefaultMeterSupplier); assertTrue(mgr.getTimerSupplier() instanceof MetricSuppliers.DefaultTimerSupplier); assertTrue(mgr.getHistogramSupplier() instanceof MetricSuppliers.DefaultHistogramSupplier); - Reservoir rsv = ((MetricSuppliers.DefaultTimerSupplier)mgr.getTimerSupplier()).getReservoir(); + Reservoir rsv = ((MetricSuppliers.DefaultTimerSupplier) mgr.getTimerSupplier()).getReservoir(); assertTrue(rsv instanceof UniformReservoir); - rsv = ((MetricSuppliers.DefaultHistogramSupplier)mgr.getHistogramSupplier()).getReservoir(); + rsv = ((MetricSuppliers.DefaultHistogramSupplier) mgr.getHistogramSupplier()).getReservoir(); assertTrue(rsv instanceof SlidingTimeWindowReservoir); } @@ -96,24 +94,26 @@ public void testCustomSupplier() throws Exception { System.setProperty("timer.class", MockTimerSupplier.class.getName()); System.setProperty("histogram.class", MockHistogramSupplier.class.getName()); NodeConfig cfg = loadNodeConfig("solr-metricsconfig.xml"); - SolrMetricManager mgr = new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); + SolrMetricManager mgr = + new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); assertTrue(mgr.getCounterSupplier() instanceof MockCounterSupplier); assertTrue(mgr.getMeterSupplier() instanceof MockMeterSupplier); assertTrue(mgr.getTimerSupplier() instanceof MockTimerSupplier); assertTrue(mgr.getHistogramSupplier() instanceof MockHistogramSupplier); // assert setter-based configuration - MockCounterSupplier mockCounterSupplier = ((MockCounterSupplier)mgr.getCounterSupplier()); + MockCounterSupplier mockCounterSupplier = ((MockCounterSupplier) mgr.getCounterSupplier()); assertEquals("bar", mockCounterSupplier.foo); - MockMeterSupplier mockMeterSupplier = ((MockMeterSupplier)mgr.getMeterSupplier()); + MockMeterSupplier mockMeterSupplier = ((MockMeterSupplier) mgr.getMeterSupplier()); assertEquals("bar", mockMeterSupplier.foo); - MockTimerSupplier mockTimerSupplier = ((MockTimerSupplier)mgr.getTimerSupplier()); + MockTimerSupplier mockTimerSupplier = ((MockTimerSupplier) mgr.getTimerSupplier()); assertEquals(true, mockTimerSupplier.boolParam); assertEquals("strParam", mockTimerSupplier.strParam); assertEquals(-100, mockTimerSupplier.intParam); // assert PluginInfoInitialized-based configuration - MockHistogramSupplier mockHistogramSupplier = ((MockHistogramSupplier)mgr.getHistogramSupplier()); + MockHistogramSupplier mockHistogramSupplier = + ((MockHistogramSupplier) mgr.getHistogramSupplier()); assertNotNull(mockHistogramSupplier.info); } @@ -121,18 +121,19 @@ public void testCustomSupplier() throws Exception { public void testDisabledMetrics() throws Exception { System.setProperty("metricsEnabled", "false"); NodeConfig cfg = loadNodeConfig("solr-metricsconfig.xml"); - SolrMetricManager mgr = new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); + SolrMetricManager mgr = + new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); assertTrue(mgr.getCounterSupplier() instanceof MetricSuppliers.NoOpCounterSupplier); assertTrue(mgr.getMeterSupplier() instanceof MetricSuppliers.NoOpMeterSupplier); assertTrue(mgr.getTimerSupplier() instanceof MetricSuppliers.NoOpTimerSupplier); assertTrue(mgr.getHistogramSupplier() instanceof MetricSuppliers.NoOpHistogramSupplier); - } @Test public void testMissingValuesConfig() throws Exception { NodeConfig cfg = loadNodeConfig("solr-metricsconfig1.xml"); - SolrMetricManager mgr = new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); + SolrMetricManager mgr = + new SolrMetricManager(cfg.getSolrResourceLoader(), cfg.getMetricsConfig()); assertEquals("nullNumber", null, mgr.nullNumber()); assertEquals("notANumber", -1, mgr.notANumber()); assertEquals("nullNumber", "", mgr.nullString()); @@ -144,6 +145,6 @@ public void testMissingValuesConfig() throws Exception { private NodeConfig loadNodeConfig(String config) throws Exception { InputStream is = MetricsConfigTest.class.getResourceAsStream("/solr/" + config); - return SolrXmlConfig.fromInputStream(TEST_PATH(), is, new Properties()); //TODO pass in props + return SolrXmlConfig.fromInputStream(TEST_PATH(), is, new Properties()); // TODO pass in props } } diff --git a/solr/core/src/test/org/apache/solr/metrics/MetricsDisabledCloudTest.java b/solr/core/src/test/org/apache/solr/metrics/MetricsDisabledCloudTest.java index fcd1be14d4e..3c626a8f460 100644 --- a/solr/core/src/test/org/apache/solr/metrics/MetricsDisabledCloudTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/MetricsDisabledCloudTest.java @@ -25,17 +25,15 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ +/** */ public class MetricsDisabledCloudTest extends SolrCloudTestCase { @BeforeClass public static void startCluster() throws Exception { System.setProperty("metricsEnabled", "false"); configureCluster(2).configure(); - CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection("test", - "config", 1, 2); + CollectionAdminRequest.Create create = + CollectionAdminRequest.createCollection("test", "config", 1, 2); } @Test @@ -43,25 +41,39 @@ public void testBasic() throws Exception { NodeConfig cfg = cluster.getRandomJetty(random()).getCoreContainer().getNodeConfig(); MetricsConfig metricsConfig = cfg.getMetricsConfig(); assertFalse("metrics should be disabled", metricsConfig.isEnabled()); - SolrMetricManager metricManager = cluster.getRandomJetty(random()).getCoreContainer().getMetricManager(); - assertTrue("wrong type of supplier: " + metricManager.getCounterSupplier(), + SolrMetricManager metricManager = + cluster.getRandomJetty(random()).getCoreContainer().getMetricManager(); + assertTrue( + "wrong type of supplier: " + metricManager.getCounterSupplier(), metricManager.getCounterSupplier() instanceof MetricSuppliers.NoOpCounterSupplier); - assertTrue("wrong type of supplier: " + metricManager.getHistogramSupplier(), + assertTrue( + "wrong type of supplier: " + metricManager.getHistogramSupplier(), metricManager.getHistogramSupplier() instanceof MetricSuppliers.NoOpHistogramSupplier); - assertTrue("wrong type of supplier: " + metricManager.getTimerSupplier(), + assertTrue( + "wrong type of supplier: " + metricManager.getTimerSupplier(), metricManager.getTimerSupplier() instanceof MetricSuppliers.NoOpTimerSupplier); - assertTrue("wrong type of supplier: " + metricManager.getMeterSupplier(), + assertTrue( + "wrong type of supplier: " + metricManager.getMeterSupplier(), metricManager.getMeterSupplier() instanceof MetricSuppliers.NoOpMeterSupplier); for (String registryName : metricManager.registryNames()) { if (!registryName.startsWith("solr.core.")) { continue; } MetricRegistry registry = metricManager.registry(registryName); - registry.getMetrics().forEach((name, metric) -> { - assertTrue("should be NoOpMetric but was: " + name + "=" + - metric + "(" + metric.getClass() + ")", - metric instanceof MetricSuppliers.NoOpMetric); - }); + registry + .getMetrics() + .forEach( + (name, metric) -> { + assertTrue( + "should be NoOpMetric but was: " + + name + + "=" + + metric + + "(" + + metric.getClass() + + ")", + metric instanceof MetricSuppliers.NoOpMetric); + }); } } diff --git a/solr/core/src/test/org/apache/solr/metrics/MockCounterSupplier.java b/solr/core/src/test/org/apache/solr/metrics/MockCounterSupplier.java index d6ca4079ef7..702a390d73e 100644 --- a/solr/core/src/test/org/apache/solr/metrics/MockCounterSupplier.java +++ b/solr/core/src/test/org/apache/solr/metrics/MockCounterSupplier.java @@ -19,9 +19,7 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.MetricRegistry; -/** - * - */ +/** */ public class MockCounterSupplier implements MetricRegistry.MetricSupplier { public String foo; diff --git a/solr/core/src/test/org/apache/solr/metrics/MockHistogramSupplier.java b/solr/core/src/test/org/apache/solr/metrics/MockHistogramSupplier.java index 3df72af470e..4bba36842cc 100644 --- a/solr/core/src/test/org/apache/solr/metrics/MockHistogramSupplier.java +++ b/solr/core/src/test/org/apache/solr/metrics/MockHistogramSupplier.java @@ -22,10 +22,9 @@ import org.apache.solr.core.PluginInfo; import org.apache.solr.util.plugin.PluginInfoInitialized; -/** - * - */ -public class MockHistogramSupplier implements MetricRegistry.MetricSupplier, PluginInfoInitialized { +/** */ +public class MockHistogramSupplier + implements MetricRegistry.MetricSupplier, PluginInfoInitialized { public PluginInfo info; @Override diff --git a/solr/core/src/test/org/apache/solr/metrics/MockMeterSupplier.java b/solr/core/src/test/org/apache/solr/metrics/MockMeterSupplier.java index c035244ac9d..eca764d6e31 100644 --- a/solr/core/src/test/org/apache/solr/metrics/MockMeterSupplier.java +++ b/solr/core/src/test/org/apache/solr/metrics/MockMeterSupplier.java @@ -19,9 +19,7 @@ import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; -/** - * - */ +/** */ public class MockMeterSupplier implements MetricRegistry.MetricSupplier { public String foo; diff --git a/solr/core/src/test/org/apache/solr/metrics/MockTimerSupplier.java b/solr/core/src/test/org/apache/solr/metrics/MockTimerSupplier.java index aa400d942af..599abc7ef89 100644 --- a/solr/core/src/test/org/apache/solr/metrics/MockTimerSupplier.java +++ b/solr/core/src/test/org/apache/solr/metrics/MockTimerSupplier.java @@ -19,9 +19,7 @@ import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; -/** - * - */ +/** */ public class MockTimerSupplier implements MetricRegistry.MetricSupplier { public boolean boolParam; public String strParam; diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java index 141537b9fcf..f82440f6bdc 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java @@ -16,15 +16,14 @@ */ package org.apache.solr.metrics; +import com.codahale.metrics.Counter; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Random; import java.util.stream.Collectors; - -import com.codahale.metrics.Counter; -import com.codahale.metrics.Metric; -import com.codahale.metrics.MetricRegistry; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CoreAdminParams; @@ -65,14 +64,16 @@ public void testRegisterMetrics() { String scope = SolrMetricTestUtils.getRandomScope(random); SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random); Map metrics = SolrMetricTestUtils.getRandomMetrics(random); - SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); + SolrMetricProducer producer = + SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); try { coreMetricManager.registerMetricProducer(scope, producer); assertNotNull(scope); assertNotNull(category); assertRegistered(scope, metrics, coreMetricManager); } catch (final IllegalArgumentException e) { - assertTrue("expected at least one null but got: scope="+scope+", category="+category, + assertTrue( + "expected at least one null but got: scope=" + scope + ", category=" + category, (scope == null || category == null)); assertRegistered(scope, new HashMap<>(), coreMetricManager); } @@ -88,11 +89,13 @@ public void testRegisterMetricsWithReplacements() { int iterations = TestUtil.nextInt(random, 0, MAX_ITERATIONS); for (int i = 0; i < iterations; ++i) { - Map metrics = SolrMetricTestUtils.getRandomMetricsWithReplacements(random, registered); + Map metrics = + SolrMetricTestUtils.getRandomMetricsWithReplacements(random, registered); if (metrics.isEmpty()) { continue; } - SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); + SolrMetricProducer producer = + SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); coreMetricManager.registerMetricProducer(scope, producer); registered.putAll(metrics); assertRegistered(scope, registered, coreMetricManager); @@ -116,34 +119,47 @@ public void testLoadReporter() throws Exception { if (shouldDefineConfigurable) attrs.put("configurable", configurable); boolean shouldDefinePlugin = random.nextBoolean(); - PluginInfo pluginInfo = shouldDefinePlugin ? new PluginInfo(TestUtil.randomUnicodeString(random), attrs) : null; + PluginInfo pluginInfo = + shouldDefinePlugin ? new PluginInfo(TestUtil.randomUnicodeString(random), attrs) : null; try { - metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore(), - pluginInfo, coreMetricManager.getTag()); + metricManager.loadReporter( + coreMetricManager.getRegistryName(), + coreMetricManager.getCore(), + pluginInfo, + coreMetricManager.getTag()); assertNotNull(pluginInfo); - Map reporters = metricManager.getReporters(coreMetricManager.getRegistryName()); - assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0); - assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(taggedName)); - assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof MockMetricReporter); + Map reporters = + metricManager.getReporters(coreMetricManager.getRegistryName()); + assertTrue( + "reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0); + assertNotNull( + "reporter " + reporterName + " not present among " + reporters, + reporters.get(taggedName)); + assertTrue( + "wrong reporter class: " + reporters.get(taggedName), + reporters.get(taggedName) instanceof MockMetricReporter); } catch (IllegalArgumentException e) { assertTrue(pluginInfo == null || attrs.get("configurable") == null); assertNull(metricManager.getReporters(coreMetricManager.getRegistryName()).get(taggedName)); } } - private void assertRegistered(String scope, Map newMetrics, SolrCoreMetricManager coreMetricManager) { + private void assertRegistered( + String scope, Map newMetrics, SolrCoreMetricManager coreMetricManager) { if (scope == null || newMetrics == null) { return; } String filter = "." + scope + "."; MetricRegistry registry = metricManager.registry(coreMetricManager.getRegistryName()); - assertEquals(newMetrics.size(), registry.getMetrics(). - keySet().stream().filter(s -> s.contains(filter)).count()); - - Map registeredMetrics = registry.getMetrics(). - entrySet().stream().filter(e -> e.getKey() != null && e.getKey().contains(filter)). - collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); + assertEquals( + newMetrics.size(), + registry.getMetrics().keySet().stream().filter(s -> s.contains(filter)).count()); + + Map registeredMetrics = + registry.getMetrics().entrySet().stream() + .filter(e -> e.getKey() != null && e.getKey().contains(filter)) + .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); for (Map.Entry entry : registeredMetrics.entrySet()) { String name = entry.getKey(); Metric expectedMetric = entry.getValue(); diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java index 916afbf3906..54f1f55d92f 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java @@ -17,14 +17,13 @@ package org.apache.solr.metrics; +import com.codahale.metrics.Counter; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; import java.util.HashMap; import java.util.Map; import java.util.Random; import java.util.Set; - -import com.codahale.metrics.Counter; -import com.codahale.metrics.Metric; -import com.codahale.metrics.MetricRegistry; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.NamedList; @@ -48,10 +47,12 @@ public void testSwapRegistries() throws Exception { String toName = "to-" + TestUtil.randomSimpleString(r, 1, 10); // register test metrics for (Map.Entry entry : metrics1.entrySet()) { - metricManager.registerMetric(null, fromName, entry.getValue(), false, entry.getKey(), "metrics1"); + metricManager.registerMetric( + null, fromName, entry.getValue(), false, entry.getKey(), "metrics1"); } for (Map.Entry entry : metrics2.entrySet()) { - metricManager.registerMetric(null, toName, entry.getValue(), false, entry.getKey(), "metrics2"); + metricManager.registerMetric( + null, toName, entry.getValue(), false, entry.getKey(), "metrics2"); } assertEquals(metrics1.size(), metricManager.registry(fromName).getMetrics().size()); assertEquals(metrics2.size(), metricManager.registry(toName).getMetrics().size()); @@ -96,7 +97,11 @@ public void testRegisterAll() throws Exception { // this should re-register everything, and no errors metricManager.registerAll(registryName, mr, SolrMetricManager.ResolutionStrategy.REPLACE); // this should produce error - expectThrows(IllegalArgumentException.class, () -> metricManager.registerAll(registryName, mr, SolrMetricManager.ResolutionStrategy.ERROR)); + expectThrows( + IllegalArgumentException.class, + () -> + metricManager.registerAll( + registryName, mr, SolrMetricManager.ResolutionStrategy.ERROR)); } @Test @@ -109,13 +114,16 @@ public void testClearMetrics() throws Exception { String registryName = TestUtil.randomSimpleString(r, 1, 10); for (Map.Entry entry : metrics.entrySet()) { - metricManager.registerMetric(null, registryName, entry.getValue(), false, entry.getKey(), "foo", "bar"); + metricManager.registerMetric( + null, registryName, entry.getValue(), false, entry.getKey(), "foo", "bar"); } for (Map.Entry entry : metrics.entrySet()) { - metricManager.registerMetric(null, registryName, entry.getValue(), false, entry.getKey(), "foo", "baz"); + metricManager.registerMetric( + null, registryName, entry.getValue(), false, entry.getKey(), "foo", "baz"); } for (Map.Entry entry : metrics.entrySet()) { - metricManager.registerMetric(null, registryName, entry.getValue(), false, entry.getKey(), "foo"); + metricManager.registerMetric( + null, registryName, entry.getValue(), false, entry.getKey(), "foo"); } assertEquals(metrics.size() * 3, metricManager.registry(registryName).getMetrics().size()); @@ -170,7 +178,8 @@ public void testRegistryName() throws Exception { result = SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, result); assertEquals("solr.core." + name + ".collection1", result); // try it with already prefixed name but with additional segments - result = SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, result, "shard1", "replica1"); + result = + SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, result, "shard1", "replica1"); assertEquals("solr.core." + name + ".collection1.shard1.replica1", result); } @@ -180,56 +189,64 @@ public void testReporters() throws Exception { try (SolrResourceLoader loader = new SolrResourceLoader(createTempDir())) { SolrMetricManager metricManager = new SolrMetricManager(); - PluginInfo[] plugins = new PluginInfo[] { - createPluginInfo("universal_foo", null, null), - createPluginInfo("multigroup_foo", "jvm, node, core", null), - createPluginInfo("multiregistry_foo", null, "solr.node, solr.core.collection1"), - createPluginInfo("specific_foo", null, "solr.core.collection1"), - createPluginInfo("node_foo", "node", null), - createPluginInfo("core_foo", "core", null) - }; + PluginInfo[] plugins = + new PluginInfo[] { + createPluginInfo("universal_foo", null, null), + createPluginInfo("multigroup_foo", "jvm, node, core", null), + createPluginInfo("multiregistry_foo", null, "solr.node, solr.core.collection1"), + createPluginInfo("specific_foo", null, "solr.core.collection1"), + createPluginInfo("node_foo", "node", null), + createPluginInfo("core_foo", "core", null) + }; String tag = "xyz"; metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.node); - Map reporters = metricManager.getReporters - (SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)); - + Map reporters = + metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)); + assertEquals(4, reporters.size()); assertTrue(reporters.containsKey("universal_foo@" + tag)); assertTrue(reporters.containsKey("multigroup_foo@" + tag)); assertTrue(reporters.containsKey("node_foo@" + tag)); assertTrue(reporters.containsKey("multiregistry_foo@" + tag)); - - metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.core, "collection1"); - reporters = metricManager.getReporters - (SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1")); - + + metricManager.loadReporters( + plugins, loader, null, null, tag, SolrInfoBean.Group.core, "collection1"); + reporters = + metricManager.getReporters( + SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1")); + assertEquals(5, reporters.size()); assertTrue(reporters.containsKey("universal_foo@" + tag)); assertTrue(reporters.containsKey("multigroup_foo@" + tag)); assertTrue(reporters.containsKey("specific_foo@" + tag)); assertTrue(reporters.containsKey("core_foo@" + tag)); assertTrue(reporters.containsKey("multiregistry_foo@" + tag)); - + metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.jvm); - reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm)); - + reporters = + metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm)); + assertEquals(2, reporters.size()); assertTrue(reporters.containsKey("universal_foo@" + tag)); assertTrue(reporters.containsKey("multigroup_foo@" + tag)); - + metricManager.removeRegistry("solr.jvm"); - reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm)); - + reporters = + metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm)); + assertEquals(0, reporters.size()); - + metricManager.removeRegistry("solr.node"); - reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)); - + reporters = + metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node)); + assertEquals(0, reporters.size()); - + metricManager.removeRegistry("solr.core.collection1"); - reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1")); - + reporters = + metricManager.getReporters( + SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1")); + assertEquals(0, reporters.size()); } } @@ -240,7 +257,7 @@ public void testDefaultCloudReporterPeriodUnchanged() throws Exception { } private PluginInfo createPluginInfo(String name, String group, String registry) { - Map attrs = new HashMap<>(); + Map attrs = new HashMap<>(); attrs.put("name", name); attrs.put("class", MockMetricReporter.class.getName()); if (group != null) { diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java index 7d255a3843e..bdb996297a0 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricReporterTest.java @@ -19,7 +19,6 @@ import java.util.HashMap; import java.util.Map; import java.util.Random; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CoreAdminParams; diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java index 1f5f614fe8e..12ce35affcd 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricTestUtils.java @@ -16,34 +16,38 @@ */ package org.apache.solr.metrics; +import com.codahale.metrics.Counter; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Random; - -import com.codahale.metrics.Counter; import org.apache.lucene.util.TestUtil; import org.apache.solr.core.SolrInfoBean; public final class SolrMetricTestUtils { - private static final int MAX_ITERATIONS = 100; - private static final SolrInfoBean.Category CATEGORIES[] = SolrInfoBean.Category.values(); + private static final int MAX_ITERATIONS = 100; + private static final SolrInfoBean.Category CATEGORIES[] = SolrInfoBean.Category.values(); public static String getRandomScope(Random random) { return getRandomScope(random, random.nextBoolean()); } public static String getRandomScope(Random random, boolean shouldDefineScope) { - return shouldDefineScope ? TestUtil.randomSimpleString(random, 5, 10) : null; // must be simple string for JMX publishing + return shouldDefineScope + ? TestUtil.randomSimpleString(random, 5, 10) + : null; // must be simple string for JMX publishing } public static SolrInfoBean.Category getRandomCategory(Random random) { return getRandomCategory(random, random.nextBoolean()); } - public static SolrInfoBean.Category getRandomCategory(Random random, boolean shouldDefineCategory) { - return shouldDefineCategory ? CATEGORIES[TestUtil.nextInt(random, 0, CATEGORIES.length - 1)] : null; + public static SolrInfoBean.Category getRandomCategory( + Random random, boolean shouldDefineCategory) { + return shouldDefineCategory + ? CATEGORIES[TestUtil.nextInt(random, 0, CATEGORIES.length - 1)] + : null; } public static Map getRandomMetrics(Random random) { @@ -56,16 +60,19 @@ public static Map getRandomMetrics(Random random, boolean shoul public static final String SUFFIX = "_testing"; - public static Map getRandomMetricsWithReplacements(Random random, Map existing) { + public static Map getRandomMetricsWithReplacements( + Random random, Map existing) { HashMap metrics = new HashMap<>(); ArrayList existingKeys = new ArrayList<>(existing.keySet()); int numMetrics = TestUtil.nextInt(random, 1, MAX_ITERATIONS); for (int i = 0; i < numMetrics; ++i) { boolean shouldReplaceMetric = !existing.isEmpty() && random.nextBoolean(); - String name = shouldReplaceMetric - ? existingKeys.get(TestUtil.nextInt(random, 0, existingKeys.size() - 1)) - : TestUtil.randomSimpleString(random, 5, 10) + SUFFIX; // must be simple string for JMX publishing + String name = + shouldReplaceMetric + ? existingKeys.get(TestUtil.nextInt(random, 0, existingKeys.size() - 1)) + : TestUtil.randomSimpleString(random, 5, 10) + + SUFFIX; // must be simple string for JMX publishing Counter counter = new Counter(); counter.inc(random.nextLong()); @@ -75,9 +82,14 @@ public static Map getRandomMetricsWithReplacements(Random rando return metrics; } - public static SolrMetricProducer getProducerOf(SolrMetricManager metricManager, SolrInfoBean.Category category, String scope, Map metrics) { + public static SolrMetricProducer getProducerOf( + SolrMetricManager metricManager, + SolrInfoBean.Category category, + String scope, + Map metrics) { return new SolrMetricProducer() { SolrMetricsContext solrMetricsContext; + @Override public void initializeMetrics(SolrMetricsContext parentContext, String scope) { this.solrMetricsContext = parentContext.getChildContext(this); @@ -99,11 +111,14 @@ public SolrMetricsContext getSolrMetricsContext() { @Override public String toString() { - return "SolrMetricProducer.of{" + - "\ncategory=" + category + - "\nscope=" + scope + - "\nmetrics=" + metrics + - "\n}"; + return "SolrMetricProducer.of{" + + "\ncategory=" + + category + + "\nscope=" + + scope + + "\nmetrics=" + + metrics + + "\n}"; } }; } diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java index f527cc858b1..8bb3b415989 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java @@ -17,16 +17,15 @@ package org.apache.solr.metrics; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Timer; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; import java.util.Random; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Metric; -import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.Timer; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.core.CoreContainer; @@ -51,8 +50,12 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 { private static final String SPECIFIC = "specific"; private static final String MULTIGROUP = "multigroup"; private static final String MULTIREGISTRY = "multiregistry"; - private static final String[] INITIAL_REPORTERS = {REPORTER_NAMES[0], REPORTER_NAMES[1], UNIVERSAL, SPECIFIC, MULTIGROUP, MULTIREGISTRY}; - private static final String[] RENAMED_REPORTERS = {REPORTER_NAMES[0], REPORTER_NAMES[1], UNIVERSAL, MULTIGROUP}; + private static final String[] INITIAL_REPORTERS = { + REPORTER_NAMES[0], REPORTER_NAMES[1], UNIVERSAL, SPECIFIC, MULTIGROUP, MULTIREGISTRY + }; + private static final String[] RENAMED_REPORTERS = { + REPORTER_NAMES[0], REPORTER_NAMES[1], UNIVERSAL, MULTIGROUP + }; private static final SolrInfoBean.Category HANDLER_CATEGORY = SolrInfoBean.Category.QUERY; private CoreContainer cc; @@ -61,7 +64,8 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 { private int jmxReporter; private void assertTagged(Map reporters, String name) { - assertTrue("Reporter '" + name + "' missing in " + reporters, reporters.containsKey(name + "@" + tag)); + assertTrue( + "Reporter '" + name + "' missing in " + reporters, reporters.containsKey(name + "@" + tag)); } @Before @@ -70,18 +74,26 @@ public void beforeTest() throws Exception { // define these properties, they are used in solrconfig.xml System.setProperty("solr.test.sys.prop1", "propone"); System.setProperty("solr.test.sys.prop2", "proptwo"); - String solrXml = Files.readString(home.resolve("solr-metricreporter.xml"), StandardCharsets.UTF_8); + String solrXml = + Files.readString(home.resolve("solr-metricreporter.xml"), StandardCharsets.UTF_8); NodeConfig cfg = SolrXmlConfig.fromString(home, solrXml); - cc = createCoreContainer(cfg, new TestHarness.TestCoresLocator - (DEFAULT_TEST_CORENAME, initAndGetDataDir().getAbsolutePath(), - "solrconfig.xml", "schema.xml")); - + cc = + createCoreContainer( + cfg, + new TestHarness.TestCoresLocator( + DEFAULT_TEST_CORENAME, + initAndGetDataDir().getAbsolutePath(), + "solrconfig.xml", + "schema.xml")); + h.coreName = DEFAULT_TEST_CORENAME; jmxReporter = JmxUtil.findFirstMBeanServer() != null ? 1 : 0; metricManager = cc.getMetricManager(); tag = h.getCore().getCoreMetricManager().getTag(); - // initially there are more reporters, because two of them are added via a matching collection name - Map reporters = metricManager.getReporters("solr.core." + DEFAULT_TEST_CORENAME); + // initially there are more reporters, because two of them are added via a matching collection + // name + Map reporters = + metricManager.getReporters("solr.core." + DEFAULT_TEST_CORENAME); assertEquals(INITIAL_REPORTERS.length + jmxReporter, reporters.size()); for (String r : INITIAL_REPORTERS) { assertTagged(reporters, r); @@ -95,16 +107,24 @@ public void beforeTest() throws Exception { assertEquals(10 + jmxReporter, plugins.length); reporters = metricManager.getReporters("solr.node"); assertEquals(4 + jmxReporter, reporters.size()); - assertTrue("Reporter '" + REPORTER_NAMES[0] + "' missing in solr.node", reporters.containsKey(REPORTER_NAMES[0])); - assertTrue("Reporter '" + UNIVERSAL + "' missing in solr.node", reporters.containsKey(UNIVERSAL)); - assertTrue("Reporter '" + MULTIGROUP + "' missing in solr.node", reporters.containsKey(MULTIGROUP)); - assertTrue("Reporter '" + MULTIREGISTRY + "' missing in solr.node", reporters.containsKey(MULTIREGISTRY)); + assertTrue( + "Reporter '" + REPORTER_NAMES[0] + "' missing in solr.node", + reporters.containsKey(REPORTER_NAMES[0])); + assertTrue( + "Reporter '" + UNIVERSAL + "' missing in solr.node", reporters.containsKey(UNIVERSAL)); + assertTrue( + "Reporter '" + MULTIGROUP + "' missing in solr.node", reporters.containsKey(MULTIGROUP)); + assertTrue( + "Reporter '" + MULTIREGISTRY + "' missing in solr.node", + reporters.containsKey(MULTIREGISTRY)); SolrMetricReporter reporter = reporters.get(REPORTER_NAMES[0]); - assertTrue("Reporter " + reporter + " is not an instance of " + MockMetricReporter.class.getName(), - reporter instanceof MockMetricReporter); + assertTrue( + "Reporter " + reporter + " is not an instance of " + MockMetricReporter.class.getName(), + reporter instanceof MockMetricReporter); reporter = reporters.get(UNIVERSAL); - assertTrue("Reporter " + reporter + " is not an instance of " + MockMetricReporter.class.getName(), - reporter instanceof MockMetricReporter); + assertTrue( + "Reporter " + reporter + " is not an instance of " + MockMetricReporter.class.getName(), + reporter instanceof MockMetricReporter); } @After @@ -112,9 +132,10 @@ public void afterTest() throws Exception { if (null == metricManager) { return; // test failed to init, nothing to cleanup } - + SolrCoreMetricManager coreMetricManager = h.getCore().getCoreMetricManager(); - Map reporters = metricManager.getReporters(coreMetricManager.getRegistryName()); + Map reporters = + metricManager.getReporters(coreMetricManager.getRegistryName()); Gauge gauge = (Gauge) coreMetricManager.getRegistry().getMetrics().get("CORE.indexDir"); assertNotNull(gauge.getValue()); @@ -126,7 +147,8 @@ public void afterTest() throws Exception { for (String reporterName : RENAMED_REPORTERS) { SolrMetricReporter reporter = reporters.get(reporterName + "@" + tag); MockMetricReporter mockReporter = (MockMetricReporter) reporter; - assertTrue("Reporter " + reporterName + " was not closed: " + mockReporter, mockReporter.didClose); + assertTrue( + "Reporter " + reporterName + " was not closed: " + mockReporter, mockReporter.didClose); } } @@ -134,7 +156,8 @@ public void afterTest() throws Exception { public void testConfigureReporter() throws Exception { Random random = random(); - String metricName = SolrMetricManager.mkName(METRIC_NAME, HANDLER_CATEGORY.toString(), HANDLER_NAME); + String metricName = + SolrMetricManager.mkName(METRIC_NAME, HANDLER_CATEGORY.toString(), HANDLER_NAME); SolrCoreMetricManager coreMetricManager = h.getCore().getCoreMetricManager(); Timer timer = metricManager.timer(null, coreMetricManager.getRegistryName(), metricName); @@ -147,7 +170,8 @@ public void testConfigureReporter() throws Exception { long finalCount = timer.getCount(); assertEquals("metric counter incorrect", iterations, finalCount - initialCount); - Map reporters = metricManager.getReporters(coreMetricManager.getRegistryName()); + Map reporters = + metricManager.getReporters(coreMetricManager.getRegistryName()); assertEquals(RENAMED_REPORTERS.length + jmxReporter, reporters.size()); // SPECIFIC and MULTIREGISTRY were skipped because they were @@ -158,16 +182,24 @@ public void testConfigureReporter() throws Exception { assertTrue(reporter instanceof MockMetricReporter); MockMetricReporter mockReporter = (MockMetricReporter) reporter; - assertTrue("Reporter " + reporterName + " was not initialized: " + mockReporter, mockReporter.didInit); - assertTrue("Reporter " + reporterName + " was not validated: " + mockReporter, mockReporter.didValidate); - assertFalse("Reporter " + reporterName + " was incorrectly closed: " + mockReporter, mockReporter.didClose); + assertTrue( + "Reporter " + reporterName + " was not initialized: " + mockReporter, + mockReporter.didInit); + assertTrue( + "Reporter " + reporterName + " was not validated: " + mockReporter, + mockReporter.didValidate); + assertFalse( + "Reporter " + reporterName + " was incorrectly closed: " + mockReporter, + mockReporter.didClose); } } @Test public void testCoreContainerMetrics() throws Exception { String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node); - assertTrue(cc.getMetricManager().registryNames().toString(), cc.getMetricManager().registryNames().contains(registryName)); + assertTrue( + cc.getMetricManager().registryNames().toString(), + cc.getMetricManager().registryNames().contains(registryName)); MetricRegistry registry = cc.getMetricManager().registry(registryName); Map metrics = registry.getMetrics(); assertTrue(metrics.containsKey("CONTAINER.cores.loaded")); @@ -181,7 +213,7 @@ public void testCoreContainerMetrics() throws Exception { assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.path")); assertTrue(metrics.containsKey("CONTAINER.version.specification")); assertTrue(metrics.containsKey("CONTAINER.version.implementation")); - Gauge g = (Gauge)metrics.get("CONTAINER.fs.path"); + Gauge g = (Gauge) metrics.get("CONTAINER.fs.path"); assertEquals(g.getValue(), cc.getSolrHome()); } } diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/MockMetricReporter.java b/solr/core/src/test/org/apache/solr/metrics/reporters/MockMetricReporter.java index f815c7c694f..e2830552ce3 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/MockMetricReporter.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/MockMetricReporter.java @@ -16,12 +16,11 @@ */ package org.apache.solr.metrics.reporters; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; import java.io.IOException; import java.util.Locale; import java.util.NoSuchElementException; - -import com.codahale.metrics.Metric; -import com.codahale.metrics.MetricRegistry; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricReporter; @@ -54,7 +53,8 @@ protected void validate() throws IllegalStateException { throw new IllegalStateException("MockMetricReporter::configurable not defined."); } if (period < 1) { - throw new IllegalStateException("Init argument 'period' is in time unit 'seconds' and must be at least 1."); + throw new IllegalStateException( + "Init argument 'period' is in time unit 'seconds' and must be at least 1."); } } @@ -74,8 +74,14 @@ public Metric reportMetric(String metricName) throws NoSuchElementException { @Override public String toString() { - return String.format(Locale.ENGLISH, "[%s@%s: configurable = %s, didInit = %b, didValidate = %b, didClose = %b]", - getClass().getName(), Integer.toHexString(hashCode()), configurable, didInit, didValidate, didClose); - + return String.format( + Locale.ENGLISH, + "[%s@%s: configurable = %s, didInit = %b, didValidate = %b, didClose = %b]", + getClass().getName(), + Integer.toHexString(hashCode()), + configurable, + didInit, + didValidate, + didClose); } } diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java index c5bcde2c492..d9a320ad7d8 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrGraphiteReporterTest.java @@ -27,7 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.core.CoreContainer; @@ -39,14 +38,12 @@ import org.apache.solr.util.TestHarness; import org.junit.Test; -/** - * - */ +/** */ public class SolrGraphiteReporterTest extends SolrTestCaseJ4 { @Test public void testReporter() throws Exception { - int jmxReporter = JmxUtil.findFirstMBeanServer() != null ? 1: 0; + int jmxReporter = JmxUtil.findFirstMBeanServer() != null ? 1 : 0; Path home = Paths.get(TEST_HOME()); // define these properties, they are used in solrconfig.xml System.setProperty("solr.test.sys.prop1", "propone"); @@ -58,12 +55,19 @@ public void testReporter() throws Exception { Thread.sleep(1000); // define the port where MockGraphite is running System.setProperty("mock-graphite-port", String.valueOf(mock.port)); - String solrXml = FileUtils.readFileToString(Paths.get(home.toString(), "solr-graphitereporter.xml").toFile(), "UTF-8"); + String solrXml = + FileUtils.readFileToString( + Paths.get(home.toString(), "solr-graphitereporter.xml").toFile(), "UTF-8"); NodeConfig cfg = SolrXmlConfig.fromString(home, solrXml); - CoreContainer cc = createCoreContainer(cfg, new TestHarness.TestCoresLocator - (DEFAULT_TEST_CORENAME, initAndGetDataDir().getAbsolutePath(), - "solrconfig.xml", "schema.xml")); - + CoreContainer cc = + createCoreContainer( + cfg, + new TestHarness.TestCoresLocator( + DEFAULT_TEST_CORENAME, + initAndGetDataDir().getAbsolutePath(), + "solrconfig.xml", + "schema.xml")); + h.coreName = DEFAULT_TEST_CORENAME; SolrMetricManager metricManager = cc.getMetricManager(); Map reporters = metricManager.getReporters("solr.node"); @@ -98,8 +102,8 @@ public void run() { while (!stop) { try { Socket s = server.accept(); - BufferedReader br = new BufferedReader( - new InputStreamReader(s.getInputStream(), StandardCharsets.UTF_8)); + BufferedReader br = + new BufferedReader(new InputStreamReader(s.getInputStream(), StandardCharsets.UTF_8)); String line; while ((line = br.readLine()) != null) { lines.add(line); @@ -117,5 +121,4 @@ public void close() throws Exception { } } } - } diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java index 5cff7737ceb..8b935508ad4 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterCloudTest.java @@ -16,16 +16,15 @@ */ package org.apache.solr.metrics.reporters; -import javax.management.MBeanServer; -import javax.management.ObjectInstance; -import javax.management.Query; -import javax.management.QueryExp; import java.lang.invoke.MethodHandles; import java.lang.management.ManagementFactory; import java.util.HashSet; import java.util.Map; import java.util.Set; - +import javax.management.MBeanServer; +import javax.management.ObjectInstance; +import javax.management.Query; +import javax.management.QueryExp; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -41,9 +40,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * - */ +/** */ public class SolrJmxReporterCloudTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -54,17 +51,15 @@ public class SolrJmxReporterCloudTest extends SolrCloudTestCase { public static void setupCluster() throws Exception { // make sure there's an MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer(); - configureCluster(1) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-minimal")).configure(); CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1) .process(cluster.getSolrClient()); } + @AfterClass public static void releaseMBeanServer() { mBeanServer = null; } - @Test public void testJmxReporter() throws Exception { @@ -84,36 +79,49 @@ public void testJmxReporter() throws Exception { SolrMetricManager manager = runner.getCoreContainer().getMetricManager(); for (String registry : manager.registryNames()) { Map reporters = manager.getReporters(registry); - long jmxReporters = reporters.entrySet().stream().filter(e -> e.getValue() instanceof SolrJmxReporter).count(); - reporters.forEach((k, v) -> { - if (!(v instanceof SolrJmxReporter)) { - return; - } - if (!((SolrJmxReporter)v).getDomain().startsWith("solr.core")) { - return; - } - if (!((SolrJmxReporter)v).isActive()) { - return; - } - QueryExp exp = Query.eq(Query.attr(JmxMetricsReporter.INSTANCE_TAG), Query.value(Integer.toHexString(v.hashCode()))); - Set beans = mBeanServer.queryMBeans(null, exp); - if (((SolrJmxReporter) v).isStarted() && beans.isEmpty() && jmxReporters < 2) { - if (log.isInfoEnabled()) { - log.info("DocCollection: {}", getCollectionState(COLLECTION)); - } - fail("JMX reporter " + k + " for registry " + registry + " failed to register any beans!"); - } else { - Set categories = new HashSet<>(); - beans.forEach(bean -> { - String cat = bean.getObjectName().getKeyProperty("category"); - if (cat != null) { - categories.add(cat); + long jmxReporters = + reporters.entrySet().stream() + .filter(e -> e.getValue() instanceof SolrJmxReporter) + .count(); + reporters.forEach( + (k, v) -> { + if (!(v instanceof SolrJmxReporter)) { + return; + } + if (!((SolrJmxReporter) v).getDomain().startsWith("solr.core")) { + return; + } + if (!((SolrJmxReporter) v).isActive()) { + return; + } + QueryExp exp = + Query.eq( + Query.attr(JmxMetricsReporter.INSTANCE_TAG), + Query.value(Integer.toHexString(v.hashCode()))); + Set beans = mBeanServer.queryMBeans(null, exp); + if (((SolrJmxReporter) v).isStarted() && beans.isEmpty() && jmxReporters < 2) { + if (log.isInfoEnabled()) { + log.info("DocCollection: {}", getCollectionState(COLLECTION)); + } + fail( + "JMX reporter " + + k + + " for registry " + + registry + + " failed to register any beans!"); + } else { + Set categories = new HashSet<>(); + beans.forEach( + bean -> { + String cat = bean.getObjectName().getKeyProperty("category"); + if (cat != null) { + categories.add(cat); + } + }); + log.info("Registered categories: {}", categories); + assertTrue("Too few categories: " + categories, categories.size() > 5); } }); - log.info("Registered categories: {}", categories); - assertTrue("Too few categories: " + categories, categories.size() > 5); - } - }); } } } diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java index b4fedf98211..fa84f7c1218 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java @@ -16,20 +16,18 @@ */ package org.apache.solr.metrics.reporters; -import javax.management.InstanceNotFoundException; -import javax.management.MBeanServer; -import javax.management.MBeanServerFactory; -import javax.management.ObjectInstance; -import javax.management.ObjectName; - +import com.codahale.metrics.Counter; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; - -import com.codahale.metrics.Counter; +import javax.management.InstanceNotFoundException; +import javax.management.MBeanServer; +import javax.management.MBeanServerFactory; +import javax.management.ObjectInstance; +import javax.management.ObjectName; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CoreAdminParams; @@ -54,6 +52,7 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 { /** The MbeanServer started by the test, that we expect the SolrJmxReporter to find by agentId */ private static MBeanServer TEST_MBEAN_SERVER; + private static String PREFIX; private String domain; @@ -67,6 +66,7 @@ public static void init() throws Exception { TEST_MBEAN_SERVER = MBeanServerFactory.createMBeanServer(); PREFIX = getSimpleClassName() + "-"; } + @AfterClass public static void shutdown() throws Exception { if (null != TEST_MBEAN_SERVER) { @@ -86,29 +86,34 @@ public void beforeTest() throws Exception { coreMetricManager = core.getCoreMetricManager(); metricManager = core.getCoreContainer().getMetricManager(); PluginInfo pluginInfo = createReporterPluginInfo(rootName, true); - metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore(), - pluginInfo, coreMetricManager.getTag()); - - Map reporters = metricManager.getReporters(coreMetricManager.getRegistryName()); + metricManager.loadReporter( + coreMetricManager.getRegistryName(), + coreMetricManager.getCore(), + pluginInfo, + coreMetricManager.getTag()); + + Map reporters = + metricManager.getReporters(coreMetricManager.getRegistryName()); assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0); String reporterName = pluginInfo.name; String taggedName = reporterName + "@" + coreMetricManager.getTag(); - assertNotNull("reporter " + taggedName + " not present among " + reporters, reporters.get(taggedName)); - assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof SolrJmxReporter); + assertNotNull( + "reporter " + taggedName + " not present among " + reporters, reporters.get(taggedName)); + assertTrue( + "wrong reporter class: " + reporters.get(taggedName), + reporters.get(taggedName) instanceof SolrJmxReporter); SolrJmxReporter reporter = (SolrJmxReporter) reporters.get(taggedName); assertNotNull("MBean server not found on reporter", reporter.getMBeanServer()); - assertEquals("Wrong MBeanServer found on reporter", - TEST_MBEAN_SERVER, - reporter.getMBeanServer()); + assertEquals( + "Wrong MBeanServer found on reporter", TEST_MBEAN_SERVER, reporter.getMBeanServer()); } private PluginInfo createReporterPluginInfo(String rootName, boolean enabled) { Random random = random(); String className = SolrJmxReporter.class.getName(); String reporterName = PREFIX + TestUtil.randomSimpleString(random, 5, 10); - - + Map attrs = new HashMap<>(); attrs.put(FieldType.CLASS_NAME, className); attrs.put(CoreAdminParams.NAME, reporterName); @@ -116,12 +121,14 @@ private PluginInfo createReporterPluginInfo(String rootName, boolean enabled) { attrs.put("enabled", enabled); try { - String agentId = (String) TEST_MBEAN_SERVER.getAttribute - (new ObjectName("JMImplementation:type=MBeanServerDelegate"), - "MBeanServerId"); + String agentId = + (String) + TEST_MBEAN_SERVER.getAttribute( + new ObjectName("JMImplementation:type=MBeanServerDelegate"), "MBeanServerId"); attrs.put("agentId", agentId); } catch (Exception e) { - throw new RuntimeException("Unable to determine agentId of MBeanServer: " + e.getMessage(), e); + throw new RuntimeException( + "Unable to determine agentId of MBeanServer: " + e.getMessage(), e); } boolean shouldOverrideDomain = random.nextBoolean(); if (shouldOverrideDomain) { @@ -137,16 +144,16 @@ public void afterTest() throws Exception { if (null == metricManager) { return; // test failed to init, nothing to cleanup } - + metricManager.closeReporters(coreMetricManager.getRegistryName()); // sanity check there are no longer any registered beans in our domain.... - assertEquals(Collections.emptySet(), - TEST_MBEAN_SERVER.queryMBeans(ObjectName.getInstance(domain + ":*"), null)); + assertEquals( + Collections.emptySet(), + TEST_MBEAN_SERVER.queryMBeans(ObjectName.getInstance(domain + ":*"), null)); coreMetricManager.close(); deleteCore(); - } @Test @@ -159,15 +166,22 @@ public void testReportMetrics() throws Exception { int iterations = TestUtil.nextInt(random, 0, MAX_ITERATIONS); for (int i = 0; i < iterations; ++i) { - Map metrics = SolrMetricTestUtils.getRandomMetricsWithReplacements(random, registered); - SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); + Map metrics = + SolrMetricTestUtils.getRandomMetricsWithReplacements(random, registered); + SolrMetricProducer producer = + SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); coreMetricManager.registerMetricProducer(scope, producer); registered.putAll(metrics); - //waitForListener(); + // waitForListener(); Set objects = TEST_MBEAN_SERVER.queryMBeans(null, null); - assertEquals(registered.size(), objects.stream(). - filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) && - rootName.equals(o.getObjectName().getDomain())).count()); + assertEquals( + registered.size(), + objects.stream() + .filter( + o -> + scope.equals(o.getObjectName().getKeyProperty("scope")) + && rootName.equals(o.getObjectName().getDomain())) + .count()); } } @@ -178,45 +192,63 @@ public void testReloadCore() throws Exception { String scope = PREFIX + SolrMetricTestUtils.getRandomScope(random, true); SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random, true); Map metrics = SolrMetricTestUtils.getRandomMetrics(random, true); - SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); + SolrMetricProducer producer = + SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); coreMetricManager.registerMetricProducer(scope, producer); Set objects = TEST_MBEAN_SERVER.queryMBeans(null, null); - assertEquals(metrics.size(), objects.stream(). - filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) && - o.getObjectName().getDomain().equals(rootName)).count()); + assertEquals( + metrics.size(), + objects.stream() + .filter( + o -> + scope.equals(o.getObjectName().getKeyProperty("scope")) + && o.getObjectName().getDomain().equals(rootName)) + .count()); h.getCoreContainer().reload(h.getCore().getName()); PluginInfo pluginInfo = createReporterPluginInfo(rootName, true); - metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore(), - pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode())); + metricManager.loadReporter( + coreMetricManager.getRegistryName(), + coreMetricManager.getCore(), + pluginInfo, + String.valueOf(coreMetricManager.getCore().hashCode())); coreMetricManager.registerMetricProducer(scope, producer); objects = TEST_MBEAN_SERVER.queryMBeans(null, null); - assertEquals(metrics.size(), objects.stream(). - filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) && - rootName.equals(o.getObjectName().getDomain())).count()); + assertEquals( + metrics.size(), + objects.stream() + .filter( + o -> + scope.equals(o.getObjectName().getKeyProperty("scope")) + && rootName.equals(o.getObjectName().getDomain())) + .count()); } @Test public void testClosedCore() throws Exception { - Set objects = TEST_MBEAN_SERVER.queryMBeans(new ObjectName("*:category=CORE,name=indexDir,*"), null); + Set objects = + TEST_MBEAN_SERVER.queryMBeans(new ObjectName("*:category=CORE,name=indexDir,*"), null); assertEquals("Unexpected number of indexDir beans: " + objects.toString(), 1, objects.size()); final ObjectInstance inst = objects.iterator().next(); final AtomicBoolean running = new AtomicBoolean(true); try { - new Thread(() -> { - while (running.get()) { - try { - Object value = TEST_MBEAN_SERVER.getAttribute(inst.getObjectName(), "Value"); - assertNotNull(value); - } catch (InstanceNotFoundException x) { - // no longer present - break; - } catch (Exception e) { - fail("Unexpected error retrieving attribute: " + e); - } - } - }, "TestMBeanThread").start(); + new Thread( + () -> { + while (running.get()) { + try { + Object value = TEST_MBEAN_SERVER.getAttribute(inst.getObjectName(), "Value"); + assertNotNull(value); + } catch (InstanceNotFoundException x) { + // no longer present + break; + } catch (Exception e) { + fail("Unexpected error retrieving attribute: " + e); + } + } + }, + "TestMBeanThread") + .start(); // This should be enough time for the Thread.sleep(500); @@ -226,7 +258,8 @@ public void testClosedCore() throws Exception { h.getCoreContainer().unload(h.getCore().getName()); Thread.sleep(2000); - objects = TEST_MBEAN_SERVER.queryMBeans(new ObjectName("*:category=CORE,name=indexDir,*"), null); + objects = + TEST_MBEAN_SERVER.queryMBeans(new ObjectName("*:category=CORE,name=indexDir,*"), null); assertEquals("Unexpected number of beans after core closed: " + objects, 0, objects.size()); } @@ -234,31 +267,48 @@ public void testClosedCore() throws Exception { public void testEnabled() throws Exception { String root1 = PREFIX + TestUtil.randomSimpleString(random(), 5, 10); PluginInfo pluginInfo1 = createReporterPluginInfo(root1, true); - metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore(), - pluginInfo1, coreMetricManager.getTag()); + metricManager.loadReporter( + coreMetricManager.getRegistryName(), + coreMetricManager.getCore(), + pluginInfo1, + coreMetricManager.getTag()); String root2 = PREFIX + TestUtil.randomSimpleString(random(), 5, 10); assertFalse(root2.equals(root1)); PluginInfo pluginInfo2 = createReporterPluginInfo(root2, false); - metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore(), - pluginInfo2, coreMetricManager.getTag()); - - Map reporters = metricManager.getReporters(coreMetricManager.getRegistryName()); + metricManager.loadReporter( + coreMetricManager.getRegistryName(), + coreMetricManager.getCore(), + pluginInfo2, + coreMetricManager.getTag()); + + Map reporters = + metricManager.getReporters(coreMetricManager.getRegistryName()); assertTrue(reporters.containsKey(pluginInfo1.name + "@" + coreMetricManager.getTag())); assertTrue(reporters.containsKey(pluginInfo2.name + "@" + coreMetricManager.getTag())); String scope = PREFIX + SolrMetricTestUtils.getRandomScope(random(), true); SolrInfoBean.Category category = SolrMetricTestUtils.getRandomCategory(random(), true); Map metrics = SolrMetricTestUtils.getRandomMetrics(random(), true); - SolrMetricProducer producer = SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); + SolrMetricProducer producer = + SolrMetricTestUtils.getProducerOf(metricManager, category, scope, metrics); coreMetricManager.registerMetricProducer(scope, producer); Set objects = TEST_MBEAN_SERVER.queryMBeans(null, null); - assertEquals(metrics.size(), objects.stream(). - filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) && - root1.equals(o.getObjectName().getDomain())).count()); - assertEquals(0, objects.stream(). - filter(o -> scope.equals(o.getObjectName().getKeyProperty("scope")) && - root2.equals(o.getObjectName().getDomain())).count()); + assertEquals( + metrics.size(), + objects.stream() + .filter( + o -> + scope.equals(o.getObjectName().getKeyProperty("scope")) + && root1.equals(o.getObjectName().getDomain())) + .count()); + assertEquals( + 0, + objects.stream() + .filter( + o -> + scope.equals(o.getObjectName().getKeyProperty("scope")) + && root2.equals(o.getObjectName().getDomain())) + .count()); } - } diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java index 65b6a090d9d..fb57ca0d785 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrSlf4jReporterTest.java @@ -21,7 +21,6 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocumentList; @@ -37,9 +36,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * - */ +/** */ public class SolrSlf4jReporterTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -51,12 +48,19 @@ public void testReporter() throws Exception { System.setProperty("solr.test.sys.prop1", "propone"); System.setProperty("solr.test.sys.prop2", "proptwo"); - String solrXml = FileUtils.readFileToString(Paths.get(home.toString(), "solr-slf4jreporter.xml").toFile(), "UTF-8"); + String solrXml = + FileUtils.readFileToString( + Paths.get(home.toString(), "solr-slf4jreporter.xml").toFile(), "UTF-8"); NodeConfig cfg = SolrXmlConfig.fromString(home, solrXml); - CoreContainer cc = createCoreContainer(cfg, new TestHarness.TestCoresLocator - (DEFAULT_TEST_CORENAME, initAndGetDataDir().getAbsolutePath(), - "solrconfig.xml", "schema.xml")); - + CoreContainer cc = + createCoreContainer( + cfg, + new TestHarness.TestCoresLocator( + DEFAULT_TEST_CORENAME, + initAndGetDataDir().getAbsolutePath(), + "solrconfig.xml", + "schema.xml")); + h.coreName = DEFAULT_TEST_CORENAME; SolrMetricManager metricManager = cc.getMetricManager(); Map reporters = metricManager.getReporters("solr.node"); @@ -78,7 +82,8 @@ public void testReporter() throws Exception { do { Thread.sleep(1000); cnt--; - active = ((SolrSlf4jReporter)reporter1).isActive() && ((SolrSlf4jReporter)reporter2).isActive(); + active = + ((SolrSlf4jReporter) reporter1).isActive() && ((SolrSlf4jReporter) reporter2).isActive(); } while (!active && cnt > 0); if (!active) { fail("One or more reporters didn't become active in 20 seconds"); @@ -100,7 +105,7 @@ public void testReporter() throws Exception { } private static void ensureLoggingConfiguredAppropriately() throws Exception { - if (! log.isInfoEnabled()) { + if (!log.isInfoEnabled()) { fail("Test requires that log-level is at-least INFO, but INFO is disabled"); } } diff --git a/solr/core/src/test/org/apache/solr/parser/SolrQueryParserBaseTest.java b/solr/core/src/test/org/apache/solr/parser/SolrQueryParserBaseTest.java index 75e11749ba2..3a0cbf1276a 100644 --- a/solr/core/src/test/org/apache/solr/parser/SolrQueryParserBaseTest.java +++ b/solr/core/src/test/org/apache/solr/parser/SolrQueryParserBaseTest.java @@ -16,6 +16,14 @@ */ package org.apache.solr.parser; +import static org.apache.solr.SolrTestCaseJ4.assumeWorkingMockito; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; + +import java.util.Arrays; +import java.util.List; import org.apache.lucene.queryparser.charstream.CharStream; import org.apache.lucene.search.Query; import org.apache.solr.common.SolrException; @@ -29,110 +37,95 @@ import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; -import java.util.Arrays; -import java.util.List; - -import static org.apache.solr.SolrTestCaseJ4.assumeWorkingMockito; -import static org.junit.Assert.assertEquals; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; - @RunWith(MockitoJUnitRunner.class) public class SolrQueryParserBaseTest { - @BeforeClass - public static void setUpClass() { - assumeWorkingMockito(); - } - - private static final String DEFAULT_FIELD_NAME = "TestDefaultFieldname"; - - private static class MockSolrQueryParser extends SolrQueryParserBase { - public void ReInit(CharStream stream) { - } - - public Query TopLevelQuery(String field) { - return null; - } - } - - @Mock - private QParser qParser; - @Mock - private QParser subQParser; - @Mock - private SolrQueryRequest solrQueryRequest; - @Mock - private Query query; - @Mock - private IndexSchema indexSchema; - - private MockSolrQueryParser solrQueryParser; - - @Before - public void setUp() throws Exception { - solrQueryParser = new MockSolrQueryParser(); - } - - private void initQParser() { - doReturn(indexSchema).when(solrQueryRequest).getSchema(); - doReturn(solrQueryRequest).when(qParser).getReq(); - } - - @Test - public void testInitHappyCases() { - initQParser(); - solrQueryParser.init(null, qParser); - solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); - } + @BeforeClass + public static void setUpClass() { + assumeWorkingMockito(); + } - @Test(expected = SolrException.class) - public void testInitBadDefaultField() { - solrQueryParser.init("", qParser); - } + private static final String DEFAULT_FIELD_NAME = "TestDefaultFieldname"; - @Test(expected = SolrException.class) - public void testInitNullQParser() { - solrQueryParser.init(DEFAULT_FIELD_NAME, null); - } - - @Test(expected = SolrException.class) - public void testInitNullQParserReq() { - solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); - } - - @Test(expected = SolrException.class) - public void testInitNullQParserReqSchema() { - doReturn(solrQueryRequest).when(qParser).getReq(); - solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); - } - - @Test - public void testGetField() { - initQParser(); - solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); - assertEquals(DEFAULT_FIELD_NAME, solrQueryParser.getField(null)); - assertEquals(DEFAULT_FIELD_NAME, solrQueryParser.getField("")); - final String nonNullFieldName = "testFieldName"; - assertEquals(nonNullFieldName, solrQueryParser.getField(nonNullFieldName)); - } + private static class MockSolrQueryParser extends SolrQueryParserBase { + public void ReInit(CharStream stream) {} - @Test - public void testGetMagicFieldQuery() throws Exception { - String magicField = "_val_"; - String magicFieldSubParser = SolrQueryParserBase.MagicFieldName.get(magicField).subParser; - initQParser(); - solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); - solrQueryParser.setAllowSubQueryParsing(true); - doReturn(query).when(subQParser).getQuery(); - doReturn(subQParser).when(qParser).subQuery(anyString(), eq(magicFieldSubParser)); - - String queryText = "queryText"; - List queryTerms = Arrays.asList("query", "terms"); - boolean quoted = true; //value doesn't matter for this test - boolean raw = true; //value doesn't matter for this test - assertEquals(query, solrQueryParser.getFieldQuery(magicField, queryText, quoted, raw)); - assertEquals(query, solrQueryParser.getFieldQuery(magicField, queryTerms, raw)); + public Query TopLevelQuery(String field) { + return null; } + } + + @Mock private QParser qParser; + @Mock private QParser subQParser; + @Mock private SolrQueryRequest solrQueryRequest; + @Mock private Query query; + @Mock private IndexSchema indexSchema; + + private MockSolrQueryParser solrQueryParser; + + @Before + public void setUp() throws Exception { + solrQueryParser = new MockSolrQueryParser(); + } + + private void initQParser() { + doReturn(indexSchema).when(solrQueryRequest).getSchema(); + doReturn(solrQueryRequest).when(qParser).getReq(); + } + + @Test + public void testInitHappyCases() { + initQParser(); + solrQueryParser.init(null, qParser); + solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); + } + + @Test(expected = SolrException.class) + public void testInitBadDefaultField() { + solrQueryParser.init("", qParser); + } + + @Test(expected = SolrException.class) + public void testInitNullQParser() { + solrQueryParser.init(DEFAULT_FIELD_NAME, null); + } + + @Test(expected = SolrException.class) + public void testInitNullQParserReq() { + solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); + } + + @Test(expected = SolrException.class) + public void testInitNullQParserReqSchema() { + doReturn(solrQueryRequest).when(qParser).getReq(); + solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); + } + + @Test + public void testGetField() { + initQParser(); + solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); + assertEquals(DEFAULT_FIELD_NAME, solrQueryParser.getField(null)); + assertEquals(DEFAULT_FIELD_NAME, solrQueryParser.getField("")); + final String nonNullFieldName = "testFieldName"; + assertEquals(nonNullFieldName, solrQueryParser.getField(nonNullFieldName)); + } + + @Test + public void testGetMagicFieldQuery() throws Exception { + String magicField = "_val_"; + String magicFieldSubParser = SolrQueryParserBase.MagicFieldName.get(magicField).subParser; + initQParser(); + solrQueryParser.init(DEFAULT_FIELD_NAME, qParser); + solrQueryParser.setAllowSubQueryParsing(true); + doReturn(query).when(subQParser).getQuery(); + doReturn(subQParser).when(qParser).subQuery(anyString(), eq(magicFieldSubParser)); + + String queryText = "queryText"; + List queryTerms = Arrays.asList("query", "terms"); + boolean quoted = true; // value doesn't matter for this test + boolean raw = true; // value doesn't matter for this test + assertEquals(query, solrQueryParser.getFieldQuery(magicField, queryText, quoted, raw)); + assertEquals(query, solrQueryParser.getFieldQuery(magicField, queryTerms, raw)); + } } diff --git a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java index 4c91daa2ab9..1b8ac54026c 100644 --- a/solr/core/src/test/org/apache/solr/pkg/TestPackages.java +++ b/solr/core/src/test/org/apache/solr/pkg/TestPackages.java @@ -17,6 +17,23 @@ package org.apache.solr.pkg; +import static org.apache.solr.common.cloud.ZkStateReader.SOLR_PKGS_PATH; +import static org.apache.solr.common.params.CommonParams.JAVABIN; +import static org.apache.solr.common.params.CommonParams.WT; +import static org.apache.solr.core.TestSolrConfigHandler.getFileContent; +import static org.apache.solr.filestore.TestDistribPackageStore.*; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; import org.apache.commons.codec.digest.DigestUtils; import org.apache.lucene.analysis.core.WhitespaceTokenizerFactory; import org.apache.lucene.analysis.pattern.PatternReplaceCharFilterFactory; @@ -61,24 +78,6 @@ import org.junit.Before; import org.junit.Test; -import java.io.FileOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; - -import static org.apache.solr.common.cloud.ZkStateReader.SOLR_PKGS_PATH; -import static org.apache.solr.common.params.CommonParams.JAVABIN; -import static org.apache.solr.common.params.CommonParams.WT; -import static org.apache.solr.core.TestSolrConfigHandler.getFileContent; -import static org.apache.solr.filestore.TestDistribPackageStore.*; - @LogLevel("org.apache.solr.pkg.PackageLoader=DEBUG;org.apache.solr.pkg.PackageAPI=DEBUG") public class TestPackages extends SolrCloudTestCase { @@ -106,565 +105,606 @@ public void tearDown() throws Exception { } public static class ConfigPlugin implements ReflectMapWriter { - @JsonProperty - public String name; + @JsonProperty public String name; @JsonProperty("class") public String klass; } - @Test public void testCoreReloadingPlugin() throws Exception { - String FILE1 = "/mypkg/runtimelibs.jar"; - String COLLECTION_NAME = "testCoreReloadingPluginColl"; - byte[] derFile = readFile("cryptokeys/pub_key512.der"); - uploadKey(derFile, PackageStoreAPI.KEYS_DIR+"/pub_key512.der", cluster); - postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1, - "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); - - Package.AddVersion add = new Package.AddVersion(); - add.version = "1.0"; - add.pkg = "mypkg"; - add.files = Arrays.asList(new String[]{FILE1}); - V2Request req = new V2Request.Builder("/cluster/package") - .forceV2(true) - .withMethod(SolrRequest.METHOD.POST) - .withPayload(Collections.singletonMap("add", add)) - .build(); - - req.process(cluster.getSolrClient()); - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/cluster/package"). - withMethod(SolrRequest.METHOD.GET) - .build().process(cluster.getSolrClient()), - Map.of( - ":result:packages:mypkg[0]:version", "1.0", - ":result:packages:mypkg[0]:files[0]", FILE1 - )); - - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 2, 2) - .process(cluster.getSolrClient()); - cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4); - - verifyComponent(cluster.getSolrClient(), COLLECTION_NAME, "query", "filterCache", add.pkg, add.version); - - - add.version = "2.0"; - req.process(cluster.getSolrClient()); - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/cluster/package"). - withMethod(SolrRequest.METHOD.GET) - .build().process(cluster.getSolrClient()), - Map.of( - ":result:packages:mypkg[1]:version", "2.0", - ":result:packages:mypkg[1]:files[0]", FILE1 - )); - new UpdateRequest().commit(cluster.getSolrClient(), COLLECTION_NAME); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "query", "filterCache", - "mypkg", "2.0" ); + String FILE1 = "/mypkg/runtimelibs.jar"; + String COLLECTION_NAME = "testCoreReloadingPluginColl"; + byte[] derFile = readFile("cryptokeys/pub_key512.der"); + uploadKey(derFile, PackageStoreAPI.KEYS_DIR + "/pub_key512.der", cluster); + postFileAndWait( + cluster, + "runtimecode/runtimelibs.jar.bin", + FILE1, + "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); + + Package.AddVersion add = new Package.AddVersion(); + add.version = "1.0"; + add.pkg = "mypkg"; + add.files = Arrays.asList(new String[] {FILE1}); + V2Request req = + new V2Request.Builder("/cluster/package") + .forceV2(true) + .withMethod(SolrRequest.METHOD.POST) + .withPayload(Collections.singletonMap("add", add)) + .build(); + + req.process(cluster.getSolrClient()); + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/cluster/package") + .withMethod(SolrRequest.METHOD.GET) + .build() + .process(cluster.getSolrClient()), + Map.of( + ":result:packages:mypkg[0]:version", + "1.0", + ":result:packages:mypkg[0]:files[0]", + FILE1)); + + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 2) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "query", "filterCache", add.pkg, add.version); + + add.version = "2.0"; + req.process(cluster.getSolrClient()); + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/cluster/package") + .withMethod(SolrRequest.METHOD.GET) + .build() + .process(cluster.getSolrClient()), + Map.of( + ":result:packages:mypkg[1]:version", + "2.0", + ":result:packages:mypkg[1]:files[0]", + FILE1)); + new UpdateRequest().commit(cluster.getSolrClient(), COLLECTION_NAME); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "query", "filterCache", "mypkg", "2.0"); } @Test public void testPluginLoading() throws Exception { - String FILE1 = "/mypkg/runtimelibs.jar"; - String FILE2 = "/mypkg/runtimelibs_v2.jar"; - String FILE3 = "/mypkg/runtimelibs_v3.jar"; - String URP1 = "/mypkg/testurpv1.jar"; - String URP2 = "/mypkg/testurpv2.jar"; - String EXPR1 = "/mypkg/expressible.jar"; - String COLLECTION_NAME = "testPluginLoadingColl"; - byte[] derFile = readFile("cryptokeys/pub_key512.der"); - uploadKey(derFile, PackageStoreAPI.KEYS_DIR+"/pub_key512.der", cluster); - postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1, - "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); - - postFileAndWait(cluster, "runtimecode/testurp_v1.jar.bin", URP1, - "h6UmMzuPqu4hQFGLBMJh/6kDSEXpJlgLsQDXx0KuxXWkV5giilRP57K3towiJRh2J+rqihqIghNCi3YgzgUnWQ=="); - - postFileAndWait(cluster, "runtimecode/expressible.jar.bin", EXPR1, - "ZOT11arAiPmPZYOHzqodiNnxO9pRyRozWZEBX8XGjU1/HJptFnZK+DI7eXnUtbNaMcbXE2Ze8hh4M/eGyhY8BQ=="); - - Package.AddVersion add = new Package.AddVersion(); - add.version = "1.0"; - add.pkg = "mypkg"; - add.files = Arrays.asList(new String[]{FILE1, URP1, EXPR1}); - V2Request req = new V2Request.Builder("/cluster/package") - .forceV2(true) - .withMethod(SolrRequest.METHOD.POST) - .withPayload(Collections.singletonMap("add", add)) - .build(); - - req.process(cluster.getSolrClient()); - - - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 2, 2) - .process(cluster.getSolrClient()); - cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4); - - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/cluster/package"). - withMethod(SolrRequest.METHOD.GET) - .build().process(cluster.getSolrClient()), - Map.of( - ":result:packages:mypkg[0]:version", "1.0", - ":result:packages:mypkg[0]:files[0]", FILE1 - )); - Map plugins = new LinkedHashMap<>(); - ConfigPlugin p = new ConfigPlugin(); - p.klass = "mypkg:org.apache.solr.core.RuntimeLibReqHandler"; - p.name = "/runtime"; - plugins.put("create-requesthandler", p); - - p = new ConfigPlugin(); - p.klass = "mypkg:org.apache.solr.core.RuntimeLibSearchComponent"; - p.name = "get"; - plugins.put("create-searchcomponent", p); - - p = new ConfigPlugin(); - p.klass = "mypkg:org.apache.solr.core.RuntimeLibResponseWriter"; - p.name = "json1"; - plugins.put("create-queryResponseWriter", p); - - p = new ConfigPlugin(); - p.klass = "mypkg:org.apache.solr.update.TestVersionedURP"; - p.name = "myurp"; - plugins.put("create-updateProcessor", p); - - p = new ConfigPlugin(); - p.klass = "mypkg:org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric"; - p.name = "mincopy"; - plugins.put("create-expressible", p); - - - V2Request v2r = new V2Request.Builder( "/c/"+COLLECTION_NAME+ "/config") - .withMethod(SolrRequest.METHOD.POST) - .withPayload(plugins) - .forceV2(true) - .build(); - cluster.getSolrClient().request(v2r); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "queryResponseWriter", "json1", - "mypkg", "1.0" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "searchComponent", "get", - "mypkg", "1.0" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "requestHandler", "/runtime", - "mypkg", "1.0" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "updateProcessor", "myurp", - "mypkg", "1.0" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "expressible", "mincopy", - "mypkg", "1.0" ); - - TestDistribPackageStore.assertResponseValues(10, - cluster.getSolrClient() , - new GenericSolrRequest(SolrRequest.METHOD.GET, - "/stream", new MapSolrParams(Map.of("collection", COLLECTION_NAME, - WT, JAVABIN, - "action", "plugins" - ))), Map.of( - ":plugins:mincopy", "org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric" - )); - - UpdateRequest ur = new UpdateRequest(); - ur.add(new SolrInputDocument("id", "1")); - ur.setParam("processor", "myurp"); - ur.process(cluster.getSolrClient(), COLLECTION_NAME); - cluster.getSolrClient().commit(COLLECTION_NAME, true, true); - - QueryResponse result = cluster.getSolrClient() - .query(COLLECTION_NAME, new SolrQuery( "id:1")); - - assertEquals("Version 1", result.getResults().get(0).getFieldValue("TestVersionedURP.Ver_s")); - - executeReq( "/" + COLLECTION_NAME + "/runtime?wt=javabin", cluster.getRandomJetty(random()), - Utils.JAVABINCONSUMER, - Map.of("class", "org.apache.solr.core.RuntimeLibReqHandler")); - - executeReq( "/" + COLLECTION_NAME + "/get?wt=json", cluster.getRandomJetty(random()), - Utils.JSONCONSUMER, - Map.of("Version","1")); - - - executeReq( "/" + COLLECTION_NAME + "/runtime?wt=json1", cluster.getRandomJetty(random()), - Utils.JSONCONSUMER, - Map.of("wt", "org.apache.solr.core.RuntimeLibResponseWriter")); - - //now upload the second jar - postFileAndWait(cluster, "runtimecode/runtimelibs_v2.jar.bin", FILE2, - "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=="); - - postFileAndWait(cluster, "runtimecode/testurp_v2.jar.bin", URP2, - "P/ptFXRvQMd4oKPvadSpd+A9ffwY3gcex5GVFVRy3df0/OF8XT5my8rQz7FZva+2ORbWxdXS8NKwNrbPVHLGXw=="); - //add the version using package API - add.version = "1.1"; - add.files = Arrays.asList(new String[]{FILE2,URP2, EXPR1}); - req.process(cluster.getSolrClient()); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "queryResponseWriter", "json1", - "mypkg", "1.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "searchComponent", "get", - "mypkg", "1.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "requestHandler", "/runtime", - "mypkg", "1.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "updateProcessor", "myurp", - "mypkg", "1.1" ); - - - executeReq( "/" + COLLECTION_NAME + "/get?wt=json", cluster.getRandomJetty(random()), - Utils.JSONCONSUMER, - Map.of( "Version","2")); - - - //now upload the third jar - postFileAndWait(cluster, "runtimecode/runtimelibs_v3.jar.bin", FILE3, - "a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q=="); - - add.version = "2.1"; - add.files = Arrays.asList(new String[]{FILE3, URP2, EXPR1}); - req.process(cluster.getSolrClient()); - - //now let's verify that the classes are updated - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "queryResponseWriter", "json1", - "mypkg", "2.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "searchComponent", "get", - "mypkg", "2.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "requestHandler", "/runtime", - "mypkg", "2.1" ); - - executeReq( "/" + COLLECTION_NAME + "/runtime?wt=json", cluster.getRandomJetty(random()), - Utils.JSONCONSUMER, - Map.of("Version","2")); - - //insert a doc with urp - ur = new UpdateRequest(); - ur.add(new SolrInputDocument("id", "2")); - ur.setParam("processor", "myurp"); - ur.process(cluster.getSolrClient(), COLLECTION_NAME); - cluster.getSolrClient().commit(COLLECTION_NAME, true, true); - - result = cluster.getSolrClient() - .query(COLLECTION_NAME, new SolrQuery( "id:2")); - - assertEquals("Version 2", result.getResults().get(0).getFieldValue("TestVersionedURP.Ver_s")); - - - Package.DelVersion delVersion = new Package.DelVersion(); - delVersion.pkg = "mypkg"; - delVersion.version = "1.0"; - V2Request delete = new V2Request.Builder("/cluster/package") - .withMethod(SolrRequest.METHOD.POST) - .forceV2(true) - .withPayload(Collections.singletonMap("delete", delVersion)) - .build(); - delete.process(cluster.getSolrClient()); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "queryResponseWriter", "json1", - "mypkg", "2.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "searchComponent", "get", - "mypkg", "2.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "requestHandler", "/runtime", - "mypkg", "2.1" ); - - // now remove the hughest version. So, it will roll back to the next highest one - delVersion.version = "2.1"; - delete.process(cluster.getSolrClient()); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "queryResponseWriter", "json1", - "mypkg", "1.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "searchComponent", "get", - "mypkg", "1.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "requestHandler", "/runtime", - "mypkg", "1.1" ); - - ModifiableSolrParams params = new ModifiableSolrParams(); - params.add("collection", COLLECTION_NAME); - new GenericSolrRequest(SolrRequest.METHOD.POST, "/config/params", params ){ - @Override - public RequestWriter.ContentWriter getContentWriter(String expectedType) { - return new RequestWriter.StringPayloadContentWriter("{set:{PKG_VERSIONS:{mypkg : '1.1'}}}", - ClientUtils.TEXT_JSON); - } - }.process(cluster.getSolrClient()) ; - - add.version = "2.1"; - add.files = Arrays.asList(new String[]{FILE3, URP2, EXPR1}); - req.process(cluster.getSolrClient()); - - //the collections mypkg is set to use version 1.1 - //so no upgrade - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "queryResponseWriter", "json1", - "mypkg", "1.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "searchComponent", "get", - "mypkg", "1.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "requestHandler", "/runtime", - "mypkg", "1.1" ); - - new GenericSolrRequest(SolrRequest.METHOD.POST, "/config/params", params ){ - @Override - public RequestWriter.ContentWriter getContentWriter(String expectedType) { - return new RequestWriter.StringPayloadContentWriter("{set:{PKG_VERSIONS:{mypkg : '2.1'}}}", - ClientUtils.TEXT_JSON); - } - }.process(cluster.getSolrClient()) ; - - //now, let's force every collection using 'mypkg' to refresh - //so that it uses version 2.1 - new V2Request.Builder("/cluster/package") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{refresh : mypkg}") - .forceV2(true) - .build() - .process(cluster.getSolrClient()); - - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "queryResponseWriter", "json1", - "mypkg", "2.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "searchComponent", "get", - "mypkg", "2.1" ); - - verifyComponent(cluster.getSolrClient(), - COLLECTION_NAME, "requestHandler", "/runtime", - "mypkg", "2.1" ); - - plugins.clear(); - p = new ConfigPlugin(); - p.name = "/rt_2"; - p.klass = "mypkg:"+ C.class.getName(); - plugins.put("create-requesthandler", p); - - p = new ConfigPlugin(); - p.name = "qp1"; - p.klass = "mypkg:"+ C2.class.getName(); - plugins.put("create-queryparser", p); - - v2r = new V2Request.Builder( "/c/"+COLLECTION_NAME+ "/config") - .withMethod(SolrRequest.METHOD.POST) - .withPayload(plugins) - .forceV2(true) - .build(); - cluster.getSolrClient().request(v2r); - assertTrue(C.informCalled); - assertTrue(C2.informCalled); - - //we create a new node. This node does not have the packages. But it should download it from another node - JettySolrRunner jetty = cluster.startJettySolrRunner(); - //create a new replica for this collection. it should end up - CollectionAdminRequest.addReplicaToShard(COLLECTION_NAME, "shard1") - .setNrtReplicas(1) - .setNode(jetty.getNodeName()) - .process(cluster.getSolrClient()); - cluster.waitForActiveCollection(COLLECTION_NAME, 2, 5); - checkAllNodesForFile(cluster,FILE3, - Map.of(":files:" + FILE3 + ":name", "runtimelibs_v3.jar"), - false); + String FILE1 = "/mypkg/runtimelibs.jar"; + String FILE2 = "/mypkg/runtimelibs_v2.jar"; + String FILE3 = "/mypkg/runtimelibs_v3.jar"; + String URP1 = "/mypkg/testurpv1.jar"; + String URP2 = "/mypkg/testurpv2.jar"; + String EXPR1 = "/mypkg/expressible.jar"; + String COLLECTION_NAME = "testPluginLoadingColl"; + byte[] derFile = readFile("cryptokeys/pub_key512.der"); + uploadKey(derFile, PackageStoreAPI.KEYS_DIR + "/pub_key512.der", cluster); + postFileAndWait( + cluster, + "runtimecode/runtimelibs.jar.bin", + FILE1, + "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); + + postFileAndWait( + cluster, + "runtimecode/testurp_v1.jar.bin", + URP1, + "h6UmMzuPqu4hQFGLBMJh/6kDSEXpJlgLsQDXx0KuxXWkV5giilRP57K3towiJRh2J+rqihqIghNCi3YgzgUnWQ=="); + + postFileAndWait( + cluster, + "runtimecode/expressible.jar.bin", + EXPR1, + "ZOT11arAiPmPZYOHzqodiNnxO9pRyRozWZEBX8XGjU1/HJptFnZK+DI7eXnUtbNaMcbXE2Ze8hh4M/eGyhY8BQ=="); + + Package.AddVersion add = new Package.AddVersion(); + add.version = "1.0"; + add.pkg = "mypkg"; + add.files = Arrays.asList(new String[] {FILE1, URP1, EXPR1}); + V2Request req = + new V2Request.Builder("/cluster/package") + .forceV2(true) + .withMethod(SolrRequest.METHOD.POST) + .withPayload(Collections.singletonMap("add", add)) + .build(); + + req.process(cluster.getSolrClient()); + + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 2) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4); + + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/cluster/package") + .withMethod(SolrRequest.METHOD.GET) + .build() + .process(cluster.getSolrClient()), + Map.of( + ":result:packages:mypkg[0]:version", + "1.0", + ":result:packages:mypkg[0]:files[0]", + FILE1)); + Map plugins = new LinkedHashMap<>(); + ConfigPlugin p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.core.RuntimeLibReqHandler"; + p.name = "/runtime"; + plugins.put("create-requesthandler", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.core.RuntimeLibSearchComponent"; + p.name = "get"; + plugins.put("create-searchcomponent", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.core.RuntimeLibResponseWriter"; + p.name = "json1"; + plugins.put("create-queryResponseWriter", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.update.TestVersionedURP"; + p.name = "myurp"; + plugins.put("create-updateProcessor", p); + + p = new ConfigPlugin(); + p.klass = "mypkg:org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric"; + p.name = "mincopy"; + plugins.put("create-expressible", p); + + V2Request v2r = + new V2Request.Builder("/c/" + COLLECTION_NAME + "/config") + .withMethod(SolrRequest.METHOD.POST) + .withPayload(plugins) + .forceV2(true) + .build(); + cluster.getSolrClient().request(v2r); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.0"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.0"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.0"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "updateProcessor", "myurp", "mypkg", "1.0"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "expressible", "mincopy", "mypkg", "1.0"); + + TestDistribPackageStore.assertResponseValues( + 10, + cluster.getSolrClient(), + new GenericSolrRequest( + SolrRequest.METHOD.GET, + "/stream", + new MapSolrParams( + Map.of("collection", COLLECTION_NAME, WT, JAVABIN, "action", "plugins"))), + Map.of(":plugins:mincopy", "org.apache.solr.client.solrj.io.stream.metrics.MinCopyMetric")); + + UpdateRequest ur = new UpdateRequest(); + ur.add(new SolrInputDocument("id", "1")); + ur.setParam("processor", "myurp"); + ur.process(cluster.getSolrClient(), COLLECTION_NAME); + cluster.getSolrClient().commit(COLLECTION_NAME, true, true); + + QueryResponse result = cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("id:1")); + + assertEquals("Version 1", result.getResults().get(0).getFieldValue("TestVersionedURP.Ver_s")); + + executeReq( + "/" + COLLECTION_NAME + "/runtime?wt=javabin", + cluster.getRandomJetty(random()), + Utils.JAVABINCONSUMER, + Map.of("class", "org.apache.solr.core.RuntimeLibReqHandler")); + + executeReq( + "/" + COLLECTION_NAME + "/get?wt=json", + cluster.getRandomJetty(random()), + Utils.JSONCONSUMER, + Map.of("Version", "1")); + + executeReq( + "/" + COLLECTION_NAME + "/runtime?wt=json1", + cluster.getRandomJetty(random()), + Utils.JSONCONSUMER, + Map.of("wt", "org.apache.solr.core.RuntimeLibResponseWriter")); + + // now upload the second jar + postFileAndWait( + cluster, + "runtimecode/runtimelibs_v2.jar.bin", + FILE2, + "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=="); + + postFileAndWait( + cluster, + "runtimecode/testurp_v2.jar.bin", + URP2, + "P/ptFXRvQMd4oKPvadSpd+A9ffwY3gcex5GVFVRy3df0/OF8XT5my8rQz7FZva+2ORbWxdXS8NKwNrbPVHLGXw=="); + // add the version using package API + add.version = "1.1"; + add.files = Arrays.asList(new String[] {FILE2, URP2, EXPR1}); + req.process(cluster.getSolrClient()); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "updateProcessor", "myurp", "mypkg", "1.1"); + + executeReq( + "/" + COLLECTION_NAME + "/get?wt=json", + cluster.getRandomJetty(random()), + Utils.JSONCONSUMER, + Map.of("Version", "2")); + + // now upload the third jar + postFileAndWait( + cluster, + "runtimecode/runtimelibs_v3.jar.bin", + FILE3, + "a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q=="); + + add.version = "2.1"; + add.files = Arrays.asList(new String[] {FILE3, URP2, EXPR1}); + req.process(cluster.getSolrClient()); + + // now let's verify that the classes are updated + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "2.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "2.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "2.1"); + + executeReq( + "/" + COLLECTION_NAME + "/runtime?wt=json", + cluster.getRandomJetty(random()), + Utils.JSONCONSUMER, + Map.of("Version", "2")); + + // insert a doc with urp + ur = new UpdateRequest(); + ur.add(new SolrInputDocument("id", "2")); + ur.setParam("processor", "myurp"); + ur.process(cluster.getSolrClient(), COLLECTION_NAME); + cluster.getSolrClient().commit(COLLECTION_NAME, true, true); + + result = cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("id:2")); + + assertEquals("Version 2", result.getResults().get(0).getFieldValue("TestVersionedURP.Ver_s")); + + Package.DelVersion delVersion = new Package.DelVersion(); + delVersion.pkg = "mypkg"; + delVersion.version = "1.0"; + V2Request delete = + new V2Request.Builder("/cluster/package") + .withMethod(SolrRequest.METHOD.POST) + .forceV2(true) + .withPayload(Collections.singletonMap("delete", delVersion)) + .build(); + delete.process(cluster.getSolrClient()); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "2.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "2.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "2.1"); + + // now remove the hughest version. So, it will roll back to the next highest one + delVersion.version = "2.1"; + delete.process(cluster.getSolrClient()); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.1"); + + ModifiableSolrParams params = new ModifiableSolrParams(); + params.add("collection", COLLECTION_NAME); + new GenericSolrRequest(SolrRequest.METHOD.POST, "/config/params", params) { + @Override + public RequestWriter.ContentWriter getContentWriter(String expectedType) { + return new RequestWriter.StringPayloadContentWriter( + "{set:{PKG_VERSIONS:{mypkg : '1.1'}}}", ClientUtils.TEXT_JSON); + } + }.process(cluster.getSolrClient()); + + add.version = "2.1"; + add.files = Arrays.asList(new String[] {FILE3, URP2, EXPR1}); + req.process(cluster.getSolrClient()); + + // the collections mypkg is set to use version 1.1 + // so no upgrade + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "1.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "1.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "1.1"); + + new GenericSolrRequest(SolrRequest.METHOD.POST, "/config/params", params) { + @Override + public RequestWriter.ContentWriter getContentWriter(String expectedType) { + return new RequestWriter.StringPayloadContentWriter( + "{set:{PKG_VERSIONS:{mypkg : '2.1'}}}", ClientUtils.TEXT_JSON); + } + }.process(cluster.getSolrClient()); + + // now, let's force every collection using 'mypkg' to refresh + // so that it uses version 2.1 + new V2Request.Builder("/cluster/package") + .withMethod(SolrRequest.METHOD.POST) + .withPayload("{refresh : mypkg}") + .forceV2(true) + .build() + .process(cluster.getSolrClient()); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "queryResponseWriter", "json1", "mypkg", "2.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "searchComponent", "get", "mypkg", "2.1"); + + verifyComponent( + cluster.getSolrClient(), COLLECTION_NAME, "requestHandler", "/runtime", "mypkg", "2.1"); + + plugins.clear(); + p = new ConfigPlugin(); + p.name = "/rt_2"; + p.klass = "mypkg:" + C.class.getName(); + plugins.put("create-requesthandler", p); + + p = new ConfigPlugin(); + p.name = "qp1"; + p.klass = "mypkg:" + C2.class.getName(); + plugins.put("create-queryparser", p); + + v2r = + new V2Request.Builder("/c/" + COLLECTION_NAME + "/config") + .withMethod(SolrRequest.METHOD.POST) + .withPayload(plugins) + .forceV2(true) + .build(); + cluster.getSolrClient().request(v2r); + assertTrue(C.informCalled); + assertTrue(C2.informCalled); + + // we create a new node. This node does not have the packages. But it should download it from + // another node + JettySolrRunner jetty = cluster.startJettySolrRunner(); + // create a new replica for this collection. it should end up + CollectionAdminRequest.addReplicaToShard(COLLECTION_NAME, "shard1") + .setNrtReplicas(1) + .setNode(jetty.getNodeName()) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(COLLECTION_NAME, 2, 5); + checkAllNodesForFile( + cluster, FILE3, Map.of(":files:" + FILE3 + ":name", "runtimelibs_v3.jar"), false); } + @SuppressWarnings("unchecked") - private void executeReq(String uri, JettySolrRunner jetty, Utils.InputStreamConsumer parser, Map expected) throws Exception { - try(HttpSolrClient client = (HttpSolrClient) jetty.newClient()){ - TestDistribPackageStore.assertResponseValues(10, + private void executeReq( + String uri, + JettySolrRunner jetty, + Utils.InputStreamConsumer parser, + Map expected) + throws Exception { + try (HttpSolrClient client = (HttpSolrClient) jetty.newClient()) { + TestDistribPackageStore.assertResponseValues( + 10, () -> { - Object o = Utils.executeGET(client.getHttpClient(), - jetty.getBaseUrl() + uri, parser); - if(o instanceof NavigableObject) return (NavigableObject) o; - if(o instanceof Map) return new MapWriterMap((Map) o); + Object o = Utils.executeGET(client.getHttpClient(), jetty.getBaseUrl() + uri, parser); + if (o instanceof NavigableObject) return (NavigableObject) o; + if (o instanceof Map) return new MapWriterMap((Map) o); throw new RuntimeException("Unknown response"); - }, expected); - + }, + expected); } } - private void verifyComponent(SolrClient client, String COLLECTION_NAME, - String componentType, String componentName, String pkg, String version) throws Exception { - SolrParams params = new MapSolrParams(Map.of("collection", COLLECTION_NAME, - WT, JAVABIN, - "componentName", componentName, - "meta", "true")); - - GenericSolrRequest req1 = new GenericSolrRequest(SolrRequest.METHOD.GET, - "/config/" + componentType, params); - TestDistribPackageStore.assertResponseValues(10, + private void verifyComponent( + SolrClient client, + String COLLECTION_NAME, + String componentType, + String componentName, + String pkg, + String version) + throws Exception { + SolrParams params = + new MapSolrParams( + Map.of( + "collection", + COLLECTION_NAME, + WT, + JAVABIN, + "componentName", + componentName, + "meta", + "true")); + + GenericSolrRequest req1 = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/" + componentType, params); + TestDistribPackageStore.assertResponseValues( + 10, client, - req1, Map.of( + req1, + Map.of( ":config:" + componentType + ":" + componentName + ":_packageinfo_:package", pkg, - ":config:" + componentType + ":" + componentName + ":_packageinfo_:version", version - )); + ":config:" + componentType + ":" + componentName + ":_packageinfo_:version", version)); } @Test @SuppressWarnings("unchecked") public void testAPI() throws Exception { - String errPath = "/error/details[0]/errorMessages[0]"; - String FILE1 = "/mypkg/v.0.12/jar_a.jar"; - String FILE2 = "/mypkg/v.0.12/jar_b.jar"; - String FILE3 = "/mypkg/v.0.13/jar_a.jar"; - - Package.AddVersion add = new Package.AddVersion(); - add.version = "0.12"; - add.pkg = "test_pkg"; - add.files = List.of(FILE1, FILE2); - V2Request req = new V2Request.Builder("/cluster/package") - .forceV2(true) - .withMethod(SolrRequest.METHOD.POST) - .withPayload(Collections.singletonMap("add", add)) - .build(); - - //the files is not yet there. The command should fail with error saying "No such file" - expectError(req, cluster.getSolrClient(), errPath, "No such file:"); - - - //post the jar file. No signature is sent - postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1, null); - - - add.files = Collections.singletonList(FILE1); - expectError(req, cluster.getSolrClient(), errPath, - FILE1 + " has no signature"); - //now we upload the keys - byte[] derFile = readFile("cryptokeys/pub_key512.der"); - uploadKey(derFile, PackageStoreAPI.KEYS_DIR+"/pub_key512.der", cluster); - //and upload the same file with a different name but it has proper signature - postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE2, - "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); - // with correct signature - //after uploading the file, let's delete the keys to see if we get proper error message - add.files = Collections.singletonList(FILE2); - /*expectError(req, cluster.getSolrClient(), errPath, - "ZooKeeper does not have any public keys");*/ - - //Now lets' put the keys back - - //this time we have a file with proper signature, public keys are in ZK - // so the add {} command should succeed - req.process(cluster.getSolrClient()); - - //Now verify the data in ZK - TestDistribPackageStore.assertResponseValues(1, - () -> new MapWriterMap((Map) Utils.fromJSON(cluster.getZkClient().getData(SOLR_PKGS_PATH, - null, new Stat(), true))), - Map.of( - ":packages:test_pkg[0]:version", "0.12", - ":packages:test_pkg[0]:files[0]", FILE2 - )); - - //post a new jar with a proper signature - postFileAndWait(cluster, "runtimecode/runtimelibs_v2.jar.bin", FILE3, - "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=="); - - - //this time we are adding the second version of the package (0.13) - add.version = "0.13"; - add.pkg = "test_pkg"; - add.files = Collections.singletonList(FILE3); - - //this request should succeed - req.process(cluster.getSolrClient()); - //no verify the data (/packages.json) in ZK - TestDistribPackageStore.assertResponseValues(1, - () -> new MapWriterMap((Map) Utils.fromJSON(cluster.getZkClient().getData(SOLR_PKGS_PATH, - null, new Stat(), true))), - Map.of( - ":packages:test_pkg[1]:version", "0.13", - ":packages:test_pkg[1]:files[0]", FILE3 - )); - - //Now we will just delete one version - Package.DelVersion delVersion = new Package.DelVersion(); - delVersion.version = "0.1";//this version does not exist - delVersion.pkg = "test_pkg"; - req = new V2Request.Builder("/cluster/package") - .forceV2(true) - .withMethod(SolrRequest.METHOD.POST) - .withPayload(Collections.singletonMap("delete", delVersion)) - .build(); - - //we are expecting an error - expectError(req, cluster.getSolrClient(), errPath, "No such version:"); - - delVersion.version = "0.12";//correct version. Should succeed - req.process(cluster.getSolrClient()); - //Verify with ZK that the data is correct - TestDistribPackageStore.assertResponseValues(1, - () -> new MapWriterMap((Map) Utils.fromJSON(cluster.getZkClient().getData(SOLR_PKGS_PATH, - null, new Stat(), true))), - Map.of( - ":packages:test_pkg[0]:version", "0.13", - ":packages:test_pkg[0]:files[0]", FILE3 - )); - - //So far we have been verifying the details with ZK directly - //use the package read API to verify with each node that it has the correct data - for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { - String path = jetty.getBaseUrl().toString().replace("/solr", "/api") + "/cluster/package?wt=javabin"; - TestDistribPackageStore.assertResponseValues(10, new Callable() { - @Override - public NavigableObject call() throws Exception { - try (HttpSolrClient solrClient = (HttpSolrClient) jetty.newClient()) { - return (NavigableObject) Utils.executeGET(solrClient.getHttpClient(), path, Utils.JAVABINCONSUMER); + String errPath = "/error/details[0]/errorMessages[0]"; + String FILE1 = "/mypkg/v.0.12/jar_a.jar"; + String FILE2 = "/mypkg/v.0.12/jar_b.jar"; + String FILE3 = "/mypkg/v.0.13/jar_a.jar"; + + Package.AddVersion add = new Package.AddVersion(); + add.version = "0.12"; + add.pkg = "test_pkg"; + add.files = List.of(FILE1, FILE2); + V2Request req = + new V2Request.Builder("/cluster/package") + .forceV2(true) + .withMethod(SolrRequest.METHOD.POST) + .withPayload(Collections.singletonMap("add", add)) + .build(); + + // the files is not yet there. The command should fail with error saying "No such file" + expectError(req, cluster.getSolrClient(), errPath, "No such file:"); + + // post the jar file. No signature is sent + postFileAndWait(cluster, "runtimecode/runtimelibs.jar.bin", FILE1, null); + + add.files = Collections.singletonList(FILE1); + expectError(req, cluster.getSolrClient(), errPath, FILE1 + " has no signature"); + // now we upload the keys + byte[] derFile = readFile("cryptokeys/pub_key512.der"); + uploadKey(derFile, PackageStoreAPI.KEYS_DIR + "/pub_key512.der", cluster); + // and upload the same file with a different name but it has proper signature + postFileAndWait( + cluster, + "runtimecode/runtimelibs.jar.bin", + FILE2, + "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="); + // with correct signature + // after uploading the file, let's delete the keys to see if we get proper error message + add.files = Collections.singletonList(FILE2); + /*expectError(req, cluster.getSolrClient(), errPath, + "ZooKeeper does not have any public keys");*/ + + // Now lets' put the keys back + + // this time we have a file with proper signature, public keys are in ZK + // so the add {} command should succeed + req.process(cluster.getSolrClient()); + + // Now verify the data in ZK + TestDistribPackageStore.assertResponseValues( + 1, + () -> + new MapWriterMap( + (Map) + Utils.fromJSON( + cluster.getZkClient().getData(SOLR_PKGS_PATH, null, new Stat(), true))), + Map.of(":packages:test_pkg[0]:version", "0.12", ":packages:test_pkg[0]:files[0]", FILE2)); + + // post a new jar with a proper signature + postFileAndWait( + cluster, + "runtimecode/runtimelibs_v2.jar.bin", + FILE3, + "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=="); + + // this time we are adding the second version of the package (0.13) + add.version = "0.13"; + add.pkg = "test_pkg"; + add.files = Collections.singletonList(FILE3); + + // this request should succeed + req.process(cluster.getSolrClient()); + // no verify the data (/packages.json) in ZK + TestDistribPackageStore.assertResponseValues( + 1, + () -> + new MapWriterMap( + (Map) + Utils.fromJSON( + cluster.getZkClient().getData(SOLR_PKGS_PATH, null, new Stat(), true))), + Map.of(":packages:test_pkg[1]:version", "0.13", ":packages:test_pkg[1]:files[0]", FILE3)); + + // Now we will just delete one version + Package.DelVersion delVersion = new Package.DelVersion(); + delVersion.version = "0.1"; // this version does not exist + delVersion.pkg = "test_pkg"; + req = + new V2Request.Builder("/cluster/package") + .forceV2(true) + .withMethod(SolrRequest.METHOD.POST) + .withPayload(Collections.singletonMap("delete", delVersion)) + .build(); + + // we are expecting an error + expectError(req, cluster.getSolrClient(), errPath, "No such version:"); + + delVersion.version = "0.12"; // correct version. Should succeed + req.process(cluster.getSolrClient()); + // Verify with ZK that the data is correct + TestDistribPackageStore.assertResponseValues( + 1, + () -> + new MapWriterMap( + (Map) + Utils.fromJSON( + cluster.getZkClient().getData(SOLR_PKGS_PATH, null, new Stat(), true))), + Map.of(":packages:test_pkg[0]:version", "0.13", ":packages:test_pkg[0]:files[0]", FILE3)); + + // So far we have been verifying the details with ZK directly + // use the package read API to verify with each node that it has the correct data + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { + String path = + jetty.getBaseUrl().toString().replace("/solr", "/api") + "/cluster/package?wt=javabin"; + TestDistribPackageStore.assertResponseValues( + 10, + new Callable() { + @Override + public NavigableObject call() throws Exception { + try (HttpSolrClient solrClient = (HttpSolrClient) jetty.newClient()) { + return (NavigableObject) + Utils.executeGET(solrClient.getHttpClient(), path, Utils.JAVABINCONSUMER); + } } - } - }, Map.of( - ":result:packages:test_pkg[0]:version", "0.13", - ":result:packages:test_pkg[0]:files[0]", FILE3 - )); - } + }, + Map.of( + ":result:packages:test_pkg[0]:version", + "0.13", + ":result:packages:test_pkg[0]:files[0]", + FILE3)); + } } - public static class C extends RequestHandlerBase implements SolrCoreAware { + + public static class C extends RequestHandlerBase implements SolrCoreAware { static boolean informCalled = false; @Override public void inform(SolrCore core) { informCalled = true; - } @Override - public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) { - - } + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) {} @Override public String getDescription() { @@ -680,15 +720,14 @@ public Name getPermissionName(AuthorizationContext request) { public static class C2 extends QParserPlugin implements ResourceLoaderAware { static boolean informCalled = false; - @Override public void inform(ResourceLoader loader) throws IOException { informCalled = true; - } @Override - public QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) { + public QParser createParser( + String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) { return null; } } @@ -698,108 +737,134 @@ public void testSchemaPlugins() throws Exception { String COLLECTION_NAME = "testSchemaLoadingColl"; System.setProperty("managed.schema.mutable", "true"); - String FILE1 = "/schemapkg/schema-plugins.jar"; - byte[] derFile = readFile("cryptokeys/pub_key512.der"); - uploadKey(derFile, PackageStoreAPI.KEYS_DIR+"/pub_key512.der", cluster); - postFileAndWait(cluster, "runtimecode/schema-plugins.jar.bin", FILE1, - "U+AdO/jgY3DtMpeFRGoTQk72iA5g/qjPvdQYPGBaXB5+ggcTZk4FoIWiueB0bwGJ8Mg3V/elxOqEbD2JR8R0tA=="); - - String FILE2 = "/schemapkg/payload-component.jar"; - postFileAndWait(cluster, "runtimecode/payload-component.jar.bin", FILE2, - "gI6vYUDmSXSXmpNEeK1cwqrp4qTeVQgizGQkd8A4Prx2K8k7c5QlXbcs4lxFAAbbdXz9F4esBqTCiLMjVDHJ5Q=="); - - Package.AddVersion add = new Package.AddVersion(); - add.version = "1.0"; - add.pkg = "schemapkg"; - add.files = Arrays.asList(FILE1,FILE2); - V2Request req = new V2Request.Builder("/cluster/package") - .forceV2(true) - .withMethod(SolrRequest.METHOD.POST) - .withPayload(Collections.singletonMap("add", add)) - .build(); - req.process(cluster.getSolrClient()); - - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/cluster/package"). - withMethod(SolrRequest.METHOD.GET) - .build().process(cluster.getSolrClient()), - Map.of( - ":result:packages:schemapkg[0]:version", "1.0", - ":result:packages:schemapkg[0]:files[0]", FILE1 - )); - - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf1", 2, 2) - .process(cluster.getSolrClient()); - cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4); - - verifySchemaComponent(cluster.getSolrClient(), COLLECTION_NAME, "/schema/fieldtypes/myNewTextFieldWithAnalyzerClass", - Map.of(":fieldType:analyzer:charFilters[0]:_packageinfo_:version" ,"1.0", - ":fieldType:analyzer:tokenizer:_packageinfo_:version","1.0", - ":fieldType:_packageinfo_:version","1.0")); - - add = new Package.AddVersion(); - add.version = "2.0"; - add.pkg = "schemapkg"; - add.files = Arrays.asList(FILE1); - req = new V2Request.Builder("/cluster/package") - .forceV2(true) - .withMethod(SolrRequest.METHOD.POST) - .withPayload(Collections.singletonMap("add", add)) - .build(); - req.process(cluster.getSolrClient()); - - TestDistribPackageStore.assertResponseValues(10, - () -> new V2Request.Builder("/cluster/package"). - withMethod(SolrRequest.METHOD.GET) - .build().process(cluster.getSolrClient()), - Map.of( - ":result:packages:schemapkg[1]:version", "2.0", - ":result:packages:schemapkg[1]:files[0]", FILE1 - )); - - verifySchemaComponent(cluster.getSolrClient(), COLLECTION_NAME, "/schema/fieldtypes/myNewTextFieldWithAnalyzerClass", - Map.of(":fieldType:analyzer:charFilters[0]:_packageinfo_:version" ,"2.0", - ":fieldType:analyzer:tokenizer:_packageinfo_:version","2.0", - ":fieldType:_packageinfo_:version","2.0")); + String FILE1 = "/schemapkg/schema-plugins.jar"; + byte[] derFile = readFile("cryptokeys/pub_key512.der"); + uploadKey(derFile, PackageStoreAPI.KEYS_DIR + "/pub_key512.der", cluster); + postFileAndWait( + cluster, + "runtimecode/schema-plugins.jar.bin", + FILE1, + "U+AdO/jgY3DtMpeFRGoTQk72iA5g/qjPvdQYPGBaXB5+ggcTZk4FoIWiueB0bwGJ8Mg3V/elxOqEbD2JR8R0tA=="); + + String FILE2 = "/schemapkg/payload-component.jar"; + postFileAndWait( + cluster, + "runtimecode/payload-component.jar.bin", + FILE2, + "gI6vYUDmSXSXmpNEeK1cwqrp4qTeVQgizGQkd8A4Prx2K8k7c5QlXbcs4lxFAAbbdXz9F4esBqTCiLMjVDHJ5Q=="); + + Package.AddVersion add = new Package.AddVersion(); + add.version = "1.0"; + add.pkg = "schemapkg"; + add.files = Arrays.asList(FILE1, FILE2); + V2Request req = + new V2Request.Builder("/cluster/package") + .forceV2(true) + .withMethod(SolrRequest.METHOD.POST) + .withPayload(Collections.singletonMap("add", add)) + .build(); + req.process(cluster.getSolrClient()); + + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/cluster/package") + .withMethod(SolrRequest.METHOD.GET) + .build() + .process(cluster.getSolrClient()), + Map.of( + ":result:packages:schemapkg[0]:version", + "1.0", + ":result:packages:schemapkg[0]:files[0]", + FILE1)); + + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf1", 2, 2) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(COLLECTION_NAME, 2, 4); + + verifySchemaComponent( + cluster.getSolrClient(), + COLLECTION_NAME, + "/schema/fieldtypes/myNewTextFieldWithAnalyzerClass", + Map.of( + ":fieldType:analyzer:charFilters[0]:_packageinfo_:version", + "1.0", + ":fieldType:analyzer:tokenizer:_packageinfo_:version", + "1.0", + ":fieldType:_packageinfo_:version", + "1.0")); + + add = new Package.AddVersion(); + add.version = "2.0"; + add.pkg = "schemapkg"; + add.files = Arrays.asList(FILE1); + req = + new V2Request.Builder("/cluster/package") + .forceV2(true) + .withMethod(SolrRequest.METHOD.POST) + .withPayload(Collections.singletonMap("add", add)) + .build(); + req.process(cluster.getSolrClient()); + + TestDistribPackageStore.assertResponseValues( + 10, + () -> + new V2Request.Builder("/cluster/package") + .withMethod(SolrRequest.METHOD.GET) + .build() + .process(cluster.getSolrClient()), + Map.of( + ":result:packages:schemapkg[1]:version", + "2.0", + ":result:packages:schemapkg[1]:files[0]", + FILE1)); + + verifySchemaComponent( + cluster.getSolrClient(), + COLLECTION_NAME, + "/schema/fieldtypes/myNewTextFieldWithAnalyzerClass", + Map.of( + ":fieldType:analyzer:charFilters[0]:_packageinfo_:version", + "2.0", + ":fieldType:analyzer:tokenizer:_packageinfo_:version", + "2.0", + ":fieldType:_packageinfo_:version", + "2.0")); } - private void verifySchemaComponent(SolrClient client, String COLLECTION_NAME, String path, - Map expected) throws Exception { - SolrParams params = new MapSolrParams(Map.of("collection", COLLECTION_NAME, - WT, JAVABIN, - "meta", "true")); - - GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET,path - , params); - TestDistribPackageStore.assertResponseValues(10, - client, - req, expected); + + private void verifySchemaComponent( + SolrClient client, String COLLECTION_NAME, String path, Map expected) + throws Exception { + SolrParams params = + new MapSolrParams(Map.of("collection", COLLECTION_NAME, WT, JAVABIN, "meta", "true")); + + GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, path, params); + TestDistribPackageStore.assertResponseValues(10, client, req, expected); } - public static void postFileAndWait(MiniSolrCloudCluster cluster, String fname, String path, String sig) throws Exception { + public static void postFileAndWait( + MiniSolrCloudCluster cluster, String fname, String path, String sig) throws Exception { ByteBuffer fileContent = getFileContent(fname); @SuppressWarnings("ByteBufferBackingArray") // this is the result of a call to wrap() String sha512 = DigestUtils.sha512Hex(fileContent.array()); - TestDistribPackageStore.postFile(cluster.getSolrClient(), - fileContent, - path, sig);// has file, but no signature + TestDistribPackageStore.postFile( + cluster.getSolrClient(), fileContent, path, sig); // has file, but no signature - TestDistribPackageStore.checkAllNodesForFile(cluster, path, Map.of( - ":files:" + path + ":sha512", - sha512 - ), false); + TestDistribPackageStore.checkAllNodesForFile( + cluster, path, Map.of(":files:" + path + ":sha512", sha512), false); } - private void expectError(V2Request req, SolrClient client, String errPath, String expectErrorMsg) throws IOException, SolrServerException { + private void expectError(V2Request req, SolrClient client, String errPath, String expectErrorMsg) + throws IOException, SolrServerException { try { req.process(client); fail("should have failed with message : " + expectErrorMsg); } catch (BaseHttpSolrClient.RemoteExecutionException e) { String msg = e.getMetaData()._getStr(errPath, ""); - assertTrue("should have failed with message: " + expectErrorMsg + "actual message : " + msg, - msg.contains(expectErrorMsg) - ); + assertTrue( + "should have failed with message: " + expectErrorMsg + "actual message : " + msg, + msg.contains(expectErrorMsg)); } } @@ -822,7 +887,6 @@ public static void main(String[] args) throws Exception { persistZip("/tmp/x.jar", MyPatternReplaceCharFilterFactory.class, MyTextField.class, MyWhitespaceTokenizerFactory.class); }*/ - public static ByteBuffer persistZip(String loc, Class... classes) throws IOException { ByteBuffer jar = generateZip(classes); try (FileOutputStream fos = new FileOutputStream(loc)) { @@ -839,7 +903,8 @@ public static ByteBuffer generateZip(Class... classes) throws IOException { for (Class c : classes) { String path = c.getName().replace('.', '/').concat(".class"); ZipEntry entry = new ZipEntry(path); - ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path)); + ByteBuffer b = + SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path)); zipOut.putNextEntry(entry); zipOut.write(b.array(), b.arrayOffset(), b.limit()); zipOut.closeEntry(); @@ -847,5 +912,4 @@ public static ByteBuffer generateZip(Class... classes) throws IOException { } return bos.getByteBuffer(); } - } diff --git a/solr/core/src/test/org/apache/solr/request/RegexBytesRefFilterTest.java b/solr/core/src/test/org/apache/solr/request/RegexBytesRefFilterTest.java index 888d05d029c..041b5012d54 100644 --- a/solr/core/src/test/org/apache/solr/request/RegexBytesRefFilterTest.java +++ b/solr/core/src/test/org/apache/solr/request/RegexBytesRefFilterTest.java @@ -30,5 +30,4 @@ public void testSubstringBytesRefFilter() { assertFalse(filter.test(new BytesRef("qux"))); assertFalse(filter.test(new BytesRef("quxfoo"))); } - } diff --git a/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java b/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java index 6d04b90cff9..28016a7df26 100644 --- a/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java +++ b/solr/core/src/test/org/apache/solr/request/SimpleFacetsTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.request; +import static org.apache.solr.common.util.Utils.fromJSONString; + import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Arrays; @@ -24,7 +26,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; @@ -44,9 +45,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.util.Utils.fromJSONString; - - public class SimpleFacetsTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -54,17 +52,17 @@ public class SimpleFacetsTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - initCore("solrconfig.xml","schema.xml"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + initCore("solrconfig.xml", "schema.xml"); createIndex(); } static int random_commit_percent = 30; - static int random_dupe_percent = 25; // some duplicates in the index to create deleted docs + static int random_dupe_percent = 25; // some duplicates in the index to create deleted docs static void randomCommit(int percent_chance) { - if (random().nextInt(100) <= percent_chance) - assertU(commit()); + if (random().nextInt(100) <= percent_chance) assertU(commit()); } static ArrayList pendingDocs = new ArrayList<>(); @@ -72,30 +70,29 @@ static void randomCommit(int percent_chance) { // committing randomly gives different looking segments each time static void add_doc(String... fieldsAndValues) { do { - //do our own copy-field: + // do our own copy-field: List fieldsAndValuesList = new ArrayList<>(Arrays.asList(fieldsAndValues)); int idx = fieldsAndValuesList.indexOf("a_tdt"); if (idx >= 0) { fieldsAndValuesList.add("a_drf"); - fieldsAndValuesList.add(fieldsAndValuesList.get(idx + 1));//copy + fieldsAndValuesList.add(fieldsAndValuesList.get(idx + 1)); // copy } idx = fieldsAndValuesList.indexOf("bday"); if (idx >= 0) { fieldsAndValuesList.add("bday_drf"); - fieldsAndValuesList.add(fieldsAndValuesList.get(idx + 1));//copy + fieldsAndValuesList.add(fieldsAndValuesList.get(idx + 1)); // copy } fieldsAndValues = fieldsAndValuesList.toArray(new String[fieldsAndValuesList.size()]); - pendingDocs.add(fieldsAndValues); + pendingDocs.add(fieldsAndValues); } while (random().nextInt(100) <= random_dupe_percent); // assertU(adoc(fieldsAndValues)); // randomCommit(random_commit_percent); } - static void createIndex() throws Exception { - doEmptyFacetCounts(); // try on empty index + doEmptyFacetCounts(); // try on empty index indexSimpleFacetCounts(); indexDateFacets(); @@ -114,66 +111,138 @@ static void createIndex() throws Exception { } static void indexSimpleFacetCounts() { - add_doc("id", "42", - "range_facet_f", "35.3", - "range_facet_f1", "35.3", - "trait_s", "Tool", "trait_s", "Obnoxious", - "name", "Zapp Brannigan", - "foo_s","A", "foo_s","B", - "range_facet_mv_f", "1.0", - "range_facet_mv_f", "2.5", - "range_facet_mv_f", "3.7", - "range_facet_mv_f", "3.3" - ); - add_doc("id", "43" , - "range_facet_f", "28.789", - "range_facet_f1", "28.789", - "title", "Democratic Order of Planets", - "foo_s","A", "foo_s","B", - "range_facet_mv_f", "3.0", - "range_facet_mv_f", "7.5", - "range_facet_mv_f", "12.0" - ); - add_doc("id", "44", - "range_facet_f", "15.97", - "range_facet_f1", "15.97", - "trait_s", "Tool", - "name", "The Zapper", - "foo_s","A", "foo_s","B", "foo_s","C", - "range_facet_mv_f", "0.0", - "range_facet_mv_f", "5", - "range_facet_mv_f", "74" - ); - add_doc("id", "45", - "range_facet_f", "30.0", - "range_facet_f1", "30.0", - "trait_s", "Chauvinist", - "title", "25 star General", - "foo_s","A", "foo_s","B", - "range_facet_mv_f_f", "12.0", - "range_facet_mv_f", "212.452", - "range_facet_mv_f", "32.77", - "range_facet_mv_f", "0.123" - ); - add_doc("id", "46", - "range_facet_f", "20.0", - "range_facet_f1", "20.0", - "trait_s", "Obnoxious", - "subject", "Defeated the pacifists of the Gandhi nebula", - "foo_s","A", "foo_s","B", - "range_facet_mv_f", "123.0", - "range_facet_mv_f", "2.0", - "range_facet_mv_f", "7.3", - "range_facet_mv_f", "0.123" - ); - add_doc("id", "47", - "range_facet_f", "28.62", - "range_facet_f1", "28.62", - "trait_s", "Pig", - "text", "line up and fly directly at the enemy death cannons, clogging them with wreckage!", - "zerolen_s","", - "foo_s","A", "foo_s","B", "foo_s","C" - ); + add_doc( + "id", + "42", + "range_facet_f", + "35.3", + "range_facet_f1", + "35.3", + "trait_s", + "Tool", + "trait_s", + "Obnoxious", + "name", + "Zapp Brannigan", + "foo_s", + "A", + "foo_s", + "B", + "range_facet_mv_f", + "1.0", + "range_facet_mv_f", + "2.5", + "range_facet_mv_f", + "3.7", + "range_facet_mv_f", + "3.3"); + add_doc( + "id", + "43", + "range_facet_f", + "28.789", + "range_facet_f1", + "28.789", + "title", + "Democratic Order of Planets", + "foo_s", + "A", + "foo_s", + "B", + "range_facet_mv_f", + "3.0", + "range_facet_mv_f", + "7.5", + "range_facet_mv_f", + "12.0"); + add_doc( + "id", + "44", + "range_facet_f", + "15.97", + "range_facet_f1", + "15.97", + "trait_s", + "Tool", + "name", + "The Zapper", + "foo_s", + "A", + "foo_s", + "B", + "foo_s", + "C", + "range_facet_mv_f", + "0.0", + "range_facet_mv_f", + "5", + "range_facet_mv_f", + "74"); + add_doc( + "id", + "45", + "range_facet_f", + "30.0", + "range_facet_f1", + "30.0", + "trait_s", + "Chauvinist", + "title", + "25 star General", + "foo_s", + "A", + "foo_s", + "B", + "range_facet_mv_f_f", + "12.0", + "range_facet_mv_f", + "212.452", + "range_facet_mv_f", + "32.77", + "range_facet_mv_f", + "0.123"); + add_doc( + "id", + "46", + "range_facet_f", + "20.0", + "range_facet_f1", + "20.0", + "trait_s", + "Obnoxious", + "subject", + "Defeated the pacifists of the Gandhi nebula", + "foo_s", + "A", + "foo_s", + "B", + "range_facet_mv_f", + "123.0", + "range_facet_mv_f", + "2.0", + "range_facet_mv_f", + "7.3", + "range_facet_mv_f", + "0.123"); + add_doc( + "id", + "47", + "range_facet_f", + "28.62", + "range_facet_f1", + "28.62", + "trait_s", + "Pig", + "text", + "line up and fly directly at the enemy death cannons, clogging them with wreckage!", + "zerolen_s", + "", + "foo_s", + "A", + "foo_s", + "B", + "foo_s", + "C"); add_doc("id", "101", "myfield_s", "foo"); add_doc("id", "102", "myfield_s", "bar"); } @@ -188,105 +257,159 @@ static void indexSimpleGroupedFacetCounts() { public void testDvMethodNegativeFloatRangeFacet() throws Exception { String field = "negative_num_f1_dv"; - assertTrue("Unexpected schema configuration", h.getCore().getLatestSchema().getField(field).hasDocValues()); - assertEquals("Unexpected schema configuration", NumberType.FLOAT, h.getCore().getLatestSchema().getField(field).getType().getNumberType()); - assertFalse("Unexpected schema configuration", h.getCore().getLatestSchema().getField(field).multiValued()); - - final String[] commonParams = { - "q", "*:*", "facet", "true", "facet.range.start", "-2", "facet.range.end", "0", "facet.range.gap", "2" + assertTrue( + "Unexpected schema configuration", + h.getCore().getLatestSchema().getField(field).hasDocValues()); + assertEquals( + "Unexpected schema configuration", + NumberType.FLOAT, + h.getCore().getLatestSchema().getField(field).getType().getNumberType()); + assertFalse( + "Unexpected schema configuration", + h.getCore().getLatestSchema().getField(field).multiValued()); + + final String[] commonParams = { + "q", + "*:*", + "facet", + "true", + "facet.range.start", + "-2", + "facet.range.end", + "0", + "facet.range.gap", + "2" }; - final String countAssertion - = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='%s']/lst[@name='counts']/int[@name='-2.0'][.='1']"; + final String countAssertion = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='%s']/lst[@name='counts']/int[@name='-2.0'][.='1']"; assertU(adoc("id", "10001", field, "-1.0")); assertU(commit()); - assertQ(req(commonParams, "facet.range", field, "facet.range.method", "filter"), - String.format(Locale.ROOT, countAssertion, field) - ); - assertQ(req(commonParams, "facet.range", field, "facet.range.method", "dv"), - String.format(Locale.ROOT, countAssertion, field) - ); + assertQ( + req(commonParams, "facet.range", field, "facet.range.method", "filter"), + String.format(Locale.ROOT, countAssertion, field)); + assertQ( + req(commonParams, "facet.range", field, "facet.range.method", "dv"), + String.format(Locale.ROOT, countAssertion, field)); } - public void testDefaultsAndAppends() throws Exception { // all defaults - assertQ( req("indent","true", "q","*:*", "rows","0", "facet","true", "qt","/search-facet-def") - // only one default facet.field - ,"//lst[@name='facet_fields']/lst[@name='foo_s']" - ,"count(//lst[@name='facet_fields']/lst[@name='foo_s'])=1" - ,"count(//lst[@name='facet_fields']/lst)=1" - // only one default facet.query - ,"//lst[@name='facet_queries']/int[@name='foo_s:bar']" - ,"count(//lst[@name='facet_queries']/int[@name='foo_s:bar'])=1" - ,"count(//lst[@name='facet_queries']/int)=1" - ); + assertQ( + req("indent", "true", "q", "*:*", "rows", "0", "facet", "true", "qt", "/search-facet-def") + // only one default facet.field + , + "//lst[@name='facet_fields']/lst[@name='foo_s']", + "count(//lst[@name='facet_fields']/lst[@name='foo_s'])=1", + "count(//lst[@name='facet_fields']/lst)=1" + // only one default facet.query + , + "//lst[@name='facet_queries']/int[@name='foo_s:bar']", + "count(//lst[@name='facet_queries']/int[@name='foo_s:bar'])=1", + "count(//lst[@name='facet_queries']/int)=1"); // override default & pre-pend to appends - assertQ( req("indent","true", "q","*:*", "rows","0", "facet","true", "qt","/search-facet-def", - "facet.field", "bar_s", - "facet.query", "bar_s:yak" - ) - // override single default facet.field - ,"//lst[@name='facet_fields']/lst[@name='bar_s']" - ,"count(//lst[@name='facet_fields']/lst[@name='bar_s'])=1" - ,"count(//lst[@name='facet_fields']/lst)=1" - // add an additional facet.query - ,"//lst[@name='facet_queries']/int[@name='foo_s:bar']" - ,"//lst[@name='facet_queries']/int[@name='bar_s:yak']" - ,"count(//lst[@name='facet_queries']/int[@name='foo_s:bar'])=1" - ,"count(//lst[@name='facet_queries']/int[@name='bar_s:yak'])=1" - ,"count(//lst[@name='facet_queries']/int)=2" - ); + assertQ( + req( + "indent", + "true", + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "qt", + "/search-facet-def", + "facet.field", + "bar_s", + "facet.query", + "bar_s:yak") + // override single default facet.field + , + "//lst[@name='facet_fields']/lst[@name='bar_s']", + "count(//lst[@name='facet_fields']/lst[@name='bar_s'])=1", + "count(//lst[@name='facet_fields']/lst)=1" + // add an additional facet.query + , + "//lst[@name='facet_queries']/int[@name='foo_s:bar']", + "//lst[@name='facet_queries']/int[@name='bar_s:yak']", + "count(//lst[@name='facet_queries']/int[@name='foo_s:bar'])=1", + "count(//lst[@name='facet_queries']/int[@name='bar_s:yak'])=1", + "count(//lst[@name='facet_queries']/int)=2"); } public void testInvariants() throws Exception { // no matter if we try to use facet.field or facet.query, results shouldn't change - for (String ff : new String[] { "facet.field", "bogus" }) { - for (String fq : new String[] { "facet.query", "bogus" }) { - assertQ( req("indent","true", "q", "*:*", "rows","0", "facet","true", - "qt","/search-facet-invariants", - ff, "bar_s", - fq, "bar_s:yak") - // only one invariant facet.field - ,"//lst[@name='facet_fields']/lst[@name='foo_s']" - ,"count(//lst[@name='facet_fields']/lst[@name='foo_s'])=1" - ,"count(//lst[@name='facet_fields']/lst)=1" - // only one invariant facet.query - ,"//lst[@name='facet_queries']/int[@name='foo_s:bar']" - ,"count(//lst[@name='facet_queries']/int[@name='foo_s:bar'])=1" - ,"count(//lst[@name='facet_queries']/int)=1" - ); + for (String ff : new String[] {"facet.field", "bogus"}) { + for (String fq : new String[] {"facet.query", "bogus"}) { + assertQ( + req( + "indent", + "true", + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "qt", + "/search-facet-invariants", + ff, + "bar_s", + fq, + "bar_s:yak") + // only one invariant facet.field + , + "//lst[@name='facet_fields']/lst[@name='foo_s']", + "count(//lst[@name='facet_fields']/lst[@name='foo_s'])=1", + "count(//lst[@name='facet_fields']/lst)=1" + // only one invariant facet.query + , + "//lst[@name='facet_queries']/int[@name='foo_s:bar']", + "count(//lst[@name='facet_queries']/int[@name='foo_s:bar'])=1", + "count(//lst[@name='facet_queries']/int)=1"); } } } @Test public void testCachingBigTerms() throws Exception { - assertQ( req("indent","true", "q", "id_i1:[42 TO 47]", - "facet", "true", - "facet.field", "foo_s" // big terms should cause foo_s:A to be cached - ), - "*[count(//doc)=6]" - ); + assertQ( + req( + "indent", + "true", + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "facet.field", + "foo_s" // big terms should cause foo_s:A to be cached + ), + "*[count(//doc)=6]"); // now use the cached term as a filter to make sure deleted docs are accounted for - assertQ( req("indent","true", "fl","id", "q", "foo_s:B", - "facet", "true", - "facet.field", "foo_s", - "fq","foo_s:A" - ), - "*[count(//doc)=6]" - ); - - + assertQ( + req( + "indent", + "true", + "fl", + "id", + "q", + "foo_s:B", + "facet", + "true", + "facet.field", + "foo_s", + "fq", + "foo_s:A"), + "*[count(//doc)=6]"); } - @Test public void testSimpleGroupedQueryRangeFacets() throws Exception { - // for the purposes of our test data, it shouldn't matter + // for the purposes of our test data, it shouldn't matter // if we use facet.limit -100, -1, or 100 ... // our set of values is small enough either way testSimpleGroupedQueryRangeFacets("-100"); @@ -304,10 +427,8 @@ private void testSimpleGroupedQueryRangeFacets(String facetLimit) { "group.field", "hotel_s1", "facet", "true", "facet.limit", facetLimit, - "facet.query", "airport_s1:ams" - ), - "//lst[@name='facet_queries']/int[@name='airport_s1:ams'][.='2']" - ); + "facet.query", "airport_s1:ams"), + "//lst[@name='facet_queries']/int[@name='airport_s1:ams'][.='2']"); /* Testing facet.query using tagged filter query and exclusion */ assertQ( req( @@ -319,10 +440,8 @@ private void testSimpleGroupedQueryRangeFacets(String facetLimit) { "group.field", "hotel_s1", "facet", "true", "facet.limit", facetLimit, - "facet.query", "{!ex=dus}airport_s1:ams" - ), - "//lst[@name='facet_queries']/int[@name='{!ex=dus}airport_s1:ams'][.='2']" - ); + "facet.query", "{!ex=dus}airport_s1:ams"), + "//lst[@name='facet_queries']/int[@name='{!ex=dus}airport_s1:ams'][.='2']"); assertQ( req( "q", "*:*", @@ -335,15 +454,13 @@ private void testSimpleGroupedQueryRangeFacets(String facetLimit) { "facet.range", "duration_i1", "facet.range.start", "5", "facet.range.end", "11", - "facet.range.gap", "1" - ), + "facet.range.gap", "1"), "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='5'][.='2']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='6'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='7'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='8'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='9'][.='0']", - "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='10'][.='2']" - ); + "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='10'][.='2']"); /* Testing facet.range using tagged filter query and exclusion */ assertQ( req( @@ -358,17 +475,16 @@ private void testSimpleGroupedQueryRangeFacets(String facetLimit) { "facet.range", "{!ex=dus}duration_i1", "facet.range.start", "5", "facet.range.end", "11", - "facet.range.gap", "1" - ), + "facet.range.gap", "1"), "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='5'][.='2']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='6'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='7'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='8'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='9'][.='0']", - "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='10'][.='2']" - ); - - // repeat the same query using DV method. This is not supported and the query should use filter method instead + "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='10'][.='2']"); + + // repeat the same query using DV method. This is not supported and the query should use filter + // method instead assertQ( req( "q", "*:*", @@ -383,24 +499,23 @@ private void testSimpleGroupedQueryRangeFacets(String facetLimit) { "facet.range.start", "5", "facet.range.end", "11", "facet.range.gap", "1", - "facet.range.method", FacetRangeMethod.DV.toString() - ), + "facet.range.method", FacetRangeMethod.DV.toString()), "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='5'][.='2']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='6'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='7'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='8'][.='0']", "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='9'][.='0']", - "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='10'][.='2']" - ); + "//lst[@name='facet_ranges']/lst[@name='duration_i1']/lst[@name='counts']/int[@name='10'][.='2']"); } @Test public void testSimpleGroupedFacets() throws Exception { - assumeFalse("SOLR-10844: group.facet doesn't play nice with points *OR* DocValues", - Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP) || Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)); - - - // for the purposes of our test data, it shouldn't matter + assumeFalse( + "SOLR-10844: group.facet doesn't play nice with points *OR* DocValues", + Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP) + || Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)); + + // for the purposes of our test data, it shouldn't matter // if we use facet.limit -100, -1, or 100 ... // our set of values is small enough either way testSimpleGroupedFacets("100"); @@ -408,101 +523,92 @@ public void testSimpleGroupedFacets() throws Exception { testSimpleGroupedFacets("-5"); testSimpleGroupedFacets("-1"); } - + private void testSimpleGroupedFacets(String facetLimit) throws Exception { assertQ( "Return 5 docs with id range 1937 till 1940", - req("id_i1:[2000 TO 2004]"), - "*[count(//doc)=5]" - ); + req("id_i1:[2000 TO 2004]"), + "*[count(//doc)=5]"); assertQ( "Return two facet counts for field airport_a and duration_i1", - req( - "q", "*:*", - "fq", "id_i1:[2000 TO 2004]", - "group", "true", - "group.facet", "true", - "group.field", "hotel_s1", - "facet", "true", - "facet.limit", facetLimit, - "facet.field", "airport_s1", - "facet.field", "duration_i1" - ), + req( + "q", "*:*", + "fq", "id_i1:[2000 TO 2004]", + "group", "true", + "group.facet", "true", + "group.field", "hotel_s1", + "facet", "true", + "facet.limit", facetLimit, + "facet.field", "airport_s1", + "facet.field", "duration_i1"), "//lst[@name='facet_fields']/lst[@name='airport_s1']", "*[count(//lst[@name='airport_s1']/int)=2]", "//lst[@name='airport_s1']/int[@name='ams'][.='2']", "//lst[@name='airport_s1']/int[@name='dus'][.='1']", - "//lst[@name='facet_fields']/lst[@name='duration_i1']", "*[count(//lst[@name='duration_i1']/int)=2]", "//lst[@name='duration_i1']/int[@name='5'][.='2']", - "//lst[@name='duration_i1']/int[@name='10'][.='2']" - ); + "//lst[@name='duration_i1']/int[@name='10'][.='2']"); assertQ( "Return one facet count for field airport_a using facet.offset", - req( - "q", "*:*", - "fq", "id_i1:[2000 TO 2004]", - "group", "true", - "group.facet", "true", - "group.field", "hotel_s1", - "facet", "true", - "facet.offset", "1", - "facet.limit", facetLimit, - "facet.field", "airport_s1" - ), + req( + "q", "*:*", + "fq", "id_i1:[2000 TO 2004]", + "group", "true", + "group.facet", "true", + "group.field", "hotel_s1", + "facet", "true", + "facet.offset", "1", + "facet.limit", facetLimit, + "facet.field", "airport_s1"), "//lst[@name='facet_fields']/lst[@name='airport_s1']", "*[count(//lst[@name='airport_s1']/int)=1]", - "//lst[@name='airport_s1']/int[@name='dus'][.='1']" - ); + "//lst[@name='airport_s1']/int[@name='dus'][.='1']"); assertQ( "Return two facet counts for field airport_a with fq", - req( - "q", "*:*", - "fq", "id_i1:[2000 TO 2004]", - "fq", "duration_i1:5", - "group", "true", - "group.facet", "true", - "group.field", "hotel_s1", - "facet", "true", - "facet.limit", facetLimit, - "facet.field", "airport_s1" - ), + req( + "q", "*:*", + "fq", "id_i1:[2000 TO 2004]", + "fq", "duration_i1:5", + "group", "true", + "group.facet", "true", + "group.field", "hotel_s1", + "facet", "true", + "facet.limit", facetLimit, + "facet.field", "airport_s1"), "//lst[@name='facet_fields']/lst[@name='airport_s1']", "*[count(//lst[@name='airport_s1']/int)=2]", "//lst[@name='airport_s1']/int[@name='ams'][.='2']", - "//lst[@name='airport_s1']/int[@name='dus'][.='0']" - ); + "//lst[@name='airport_s1']/int[@name='dus'][.='0']"); assertQ( "Return one facet count for field airport_s1 with prefix a", - req( - "q", "*:*", - "fq", "id_i1:[2000 TO 2004]", - "group", "true", - "group.facet", "true", - "group.field", "hotel_s1", - "facet", "true", - "facet.field", "airport_s1", - "facet.limit", facetLimit, - "facet.prefix", "a" - ), + req( + "q", "*:*", + "fq", "id_i1:[2000 TO 2004]", + "group", "true", + "group.facet", "true", + "group.field", "hotel_s1", + "facet", "true", + "facet.field", "airport_s1", + "facet.limit", facetLimit, + "facet.prefix", "a"), "//lst[@name='facet_fields']/lst[@name='airport_s1']", "*[count(//lst[@name='airport_s1']/int)=1]", - "//lst[@name='airport_s1']/int[@name='ams'][.='2']" - ); - - SolrException e = expectThrows(SolrException.class, () -> { - h.query( - req( - "q", "*:*", - "fq", "id_i1:[2000 TO 2004]", - "group.facet", "true", - "facet", "true", - "facet.field", "airport_s1", - "facet.prefix", "a" - ) - ); - }); + "//lst[@name='airport_s1']/int[@name='ams'][.='2']"); + + SolrException e = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + "q", "*:*", + "fq", "id_i1:[2000 TO 2004]", + "group.facet", "true", + "facet", "true", + "facet.field", "airport_s1", + "facet.prefix", "a")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); } @@ -513,29 +619,39 @@ public void testEmptyFacetCounts() throws Exception { // static so we can try both with and without an empty index static void doEmptyFacetCounts() throws Exception { - doEmptyFacetCounts("empty_t", new String[]{null, "myprefix",""}); - doEmptyFacetCounts("empty_i", new String[]{null}); - doEmptyFacetCounts("empty_f", new String[]{null}); - doEmptyFacetCounts("empty_s", new String[]{null, "myprefix",""}); - doEmptyFacetCounts("empty_d", new String[]{null}); + doEmptyFacetCounts("empty_t", new String[] {null, "myprefix", ""}); + doEmptyFacetCounts("empty_i", new String[] {null}); + doEmptyFacetCounts("empty_f", new String[] {null}); + doEmptyFacetCounts("empty_s", new String[] {null, "myprefix", ""}); + doEmptyFacetCounts("empty_d", new String[] {null}); } - + static void doEmptyFacetCounts(String field, String[] prefixes) throws Exception { SchemaField sf = h.getCore().getLatestSchema().getField(field); String response = JQ(req("q", "*:*")); Map rsp = (Map) fromJSONString(response); - Long numFound = (Long)(((Map)rsp.get("response")).get("numFound")); - - ModifiableSolrParams params = params("q","*:*", "facet.mincount","1","rows","0", "facet","true", "facet.field","{!key=myalias}"+field); - - String[] methods = {null, "fc","enum","fcs", "uif"}; + Long numFound = (Long) (((Map) rsp.get("response")).get("numFound")); + + ModifiableSolrParams params = + params( + "q", + "*:*", + "facet.mincount", + "1", + "rows", + "0", + "facet", + "true", + "facet.field", + "{!key=myalias}" + field); + + String[] methods = {null, "fc", "enum", "fcs", "uif"}; if (sf.multiValued() || sf.getType().multiValuedFieldCache()) { - methods = new String[]{null, "fc","enum", "uif"}; + methods = new String[] {null, "fc", "enum", "uif"}; } - prefixes = prefixes==null ? new String[]{null} : prefixes; - + prefixes = prefixes == null ? new String[] {null} : prefixes; for (String method : methods) { if (method == null) { @@ -556,50 +672,52 @@ static void doEmptyFacetCounts(String field, String[] prefixes) throws Exception } else { params.set("facet.missing", missing); } - - String expected = missing==null ? "[]" : "[null," + numFound + "]"; - - assertJQ(req(params), - "/facet_counts/facet_fields/myalias==" + expected); + + String expected = missing == null ? "[]" : "[null," + numFound + "]"; + + assertJQ(req(params), "/facet_counts/facet_fields/myalias==" + expected); } } } } - @Test public void testFacetMatches() { - final String[][] uifSwitch = new String[][] { - new String[]{"f.trait_s.facet.method", "uif"}, - new String[]{"facet.method", "uif"} - }; - final String[] none = new String[]{}; + final String[][] uifSwitch = + new String[][] { + new String[] {"f.trait_s.facet.method", "uif"}, + new String[] {"facet.method", "uif"} + }; + final String[] none = new String[] {}; for (String[] aSwitch : uifSwitch) { - for(String[] methodParam : new String[][]{ none, aSwitch}) { - assertQ("check facet.match filters facets returned", - req(methodParam - , "q", "id:[42 TO 47]" - , "facet", "true" - , "facet.field", "trait_s" - , "facet.matches", ".*o.*" - ) - , "*[count(//doc)=6]" - - , "//lst[@name='facet_counts']/lst[@name='facet_queries']" - - , "//lst[@name='facet_counts']/lst[@name='facet_fields']" - , "//lst[@name='facet_fields']/lst[@name='trait_s']" - , "*[count(//lst[@name='trait_s']/int)=2]" - , "//lst[@name='trait_s']/int[@name='Tool'][.='2']" - , "//lst[@name='trait_s']/int[@name='Obnoxious'][.='2']" - ); + for (String[] methodParam : new String[][] {none, aSwitch}) { + assertQ( + "check facet.match filters facets returned", + req( + methodParam, + "q", + "id:[42 TO 47]", + "facet", + "true", + "facet.field", + "trait_s", + "facet.matches", + ".*o.*"), + "*[count(//doc)=6]", + "//lst[@name='facet_counts']/lst[@name='facet_queries']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']", + "//lst[@name='facet_fields']/lst[@name='trait_s']", + "*[count(//lst[@name='trait_s']/int)=2]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[@name='Obnoxious'][.='2']"); } } } @Test public void testFacetMissing() { - SolrParams commonParams = params("q", "foo_s:A", "rows", "0", "facet", "true", "facet.missing", "true"); + SolrParams commonParams = + params("q", "foo_s:A", "rows", "0", "facet", "true", "facet.missing", "true"); // with facet.limit!=0 and facet.missing=true assertQ( @@ -608,8 +726,7 @@ public void testFacetMissing() { "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='trait_s']", "*[count(//lst[@name='trait_s']/int)=2]", "//lst[@name='trait_s']/int[@name='Obnoxious'][.='2']", - "//lst[@name='trait_s']/int[.='1']" - ); + "//lst[@name='trait_s']/int[.='1']"); // with facet.limit=0 and facet.missing=true assertQ( @@ -617,8 +734,7 @@ public void testFacetMissing() { "//lst[@name='facet_counts']/lst[@name='facet_fields']", "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='trait_s']", "*[count(//lst[@name='trait_s']/int)=1]", - "//lst[@name='trait_s']/int[.='1']" - ); + "//lst[@name='trait_s']/int[.='1']"); // facet.method=enum assertQ( @@ -626,17 +742,23 @@ public void testFacetMissing() { "//lst[@name='facet_counts']/lst[@name='facet_fields']", "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='trait_s']", "*[count(//lst[@name='trait_s']/int)=1]", - "//lst[@name='trait_s']/int[.='1']" - ); + "//lst[@name='trait_s']/int[.='1']"); assertQ( - req(commonParams, "facet.field", "trait_s", "facet.limit", "0", "facet.mincount", "1", - "facet.method", "uif"), + req( + commonParams, + "facet.field", + "trait_s", + "facet.limit", + "0", + "facet.mincount", + "1", + "facet.method", + "uif"), "//lst[@name='facet_counts']/lst[@name='facet_fields']", "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='trait_s']", "*[count(//lst[@name='trait_s']/int)=1]", - "//lst[@name='trait_s']/int[.='1']" - ); + "//lst[@name='trait_s']/int[.='1']"); // facet.method=fcs assertQ( @@ -644,356 +766,460 @@ public void testFacetMissing() { "//lst[@name='facet_counts']/lst[@name='facet_fields']", "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='trait_s']", "*[count(//lst[@name='trait_s']/int)=1]", - "//lst[@name='trait_s']/int[.='1']" - ); + "//lst[@name='trait_s']/int[.='1']"); // facet.missing=true on numeric field assertQ( - req(commonParams, "facet.field", "range_facet_f", "facet.limit", "1", "facet.mincount", "1"), + req( + commonParams, + "facet.field", + "range_facet_f", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='facet_counts']/lst[@name='facet_fields']", "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='range_facet_f']", "*[count(//lst[@name='range_facet_f']/int)=2]", - "//lst[@name='range_facet_f']/int[.='0']" - ); + "//lst[@name='range_facet_f']/int[.='0']"); // facet.limit=0 assertQ( - req(commonParams, "facet.field", "range_facet_f", "facet.limit", "0", "facet.mincount", "1"), + req( + commonParams, + "facet.field", + "range_facet_f", + "facet.limit", + "0", + "facet.mincount", + "1"), "//lst[@name='facet_counts']/lst[@name='facet_fields']", "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='range_facet_f']", "*[count(//lst[@name='range_facet_f']/int)=1]", - "//lst[@name='range_facet_f']/int[.='0']" - ); + "//lst[@name='range_facet_f']/int[.='0']"); } @Test public void testSimpleFacetCounts() { - - assertQ("standard request handler returns all matches", - req("id_i1:[42 TO 47]"), - "*[count(//doc)=6]" - ); - - assertQ("filter results using fq", - req("q","id_i1:[42 TO 46]", - "fq", "id_i1:[43 TO 47]"), - "*[count(//doc)=4]" - ); - - assertQ("don't filter results using blank fq", - req("q","id_i1:[42 TO 46]", - "fq", " "), - "*[count(//doc)=5]" - ); - - assertQ("filter results using multiple fq params", - req("q","id_i1:[42 TO 46]", - "fq", "trait_s:Obnoxious", - "fq", "id_i1:[43 TO 47]"), - "*[count(//doc)=1]" - ); - - final String[] uifSwitch = new String[]{(random().nextBoolean() ? "":"f.trait_s.")+"facet.method", "uif"}; - final String[] none = new String[]{}; - - for(String[] methodParam : new String[][]{ none, uifSwitch}){ - assertQ("check counts for facet queries", - req(methodParam - ,"q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"facet.query", "trait_s:Obnoxious" - ,"facet.query", "id_i1:[42 TO 45]" - ,"facet.query", "id_i1:[43 TO 47]" - ,"facet.field", "trait_s" - ) - ,"*[count(//doc)=6]" - - ,"//lst[@name='facet_counts']/lst[@name='facet_queries']" - ,"//lst[@name='facet_queries']/int[@name='trait_s:Obnoxious'][.='2']" - ,"//lst[@name='facet_queries']/int[@name='id_i1:[42 TO 45]'][.='4']" - ,"//lst[@name='facet_queries']/int[@name='id_i1:[43 TO 47]'][.='5']" - - ,"//lst[@name='facet_counts']/lst[@name='facet_fields']" - ,"//lst[@name='facet_fields']/lst[@name='trait_s']" - ,"*[count(//lst[@name='trait_s']/int)=4]" - ,"//lst[@name='trait_s']/int[@name='Tool'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Obnoxious'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Pig'][.='1']" - ); - - assertQ("check multi-select facets with naming", - req(methodParam, "q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"facet.query", "{!ex=1}trait_s:Obnoxious" - ,"facet.query", "{!ex=2 key=foo}id_i1:[42 TO 45]" // tag=2 same as 1 - ,"facet.query", "{!ex=3,4 key=bar}id_i1:[43 TO 47]" // tag=3,4 don't exist - ,"facet.field", "{!ex=3,1}trait_s" // 3,1 same as 1 - ,"fq", "{!tag=1,2}id:47" // tagged as 1 and 2 - ) - ,"*[count(//doc)=1]" - - ,"//lst[@name='facet_counts']/lst[@name='facet_queries']" - ,"//lst[@name='facet_queries']/int[@name='{!ex=1}trait_s:Obnoxious'][.='2']" - ,"//lst[@name='facet_queries']/int[@name='foo'][.='4']" - ,"//lst[@name='facet_queries']/int[@name='bar'][.='1']" - - ,"//lst[@name='facet_counts']/lst[@name='facet_fields']" - ,"//lst[@name='facet_fields']/lst[@name='trait_s']" - ,"*[count(//lst[@name='trait_s']/int)=4]" - ,"//lst[@name='trait_s']/int[@name='Tool'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Obnoxious'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Pig'][.='1']" - ); + + assertQ( + "standard request handler returns all matches", + req("id_i1:[42 TO 47]"), + "*[count(//doc)=6]"); + + assertQ( + "filter results using fq", + req( + "q", "id_i1:[42 TO 46]", + "fq", "id_i1:[43 TO 47]"), + "*[count(//doc)=4]"); + + assertQ( + "don't filter results using blank fq", + req( + "q", "id_i1:[42 TO 46]", + "fq", " "), + "*[count(//doc)=5]"); + + assertQ( + "filter results using multiple fq params", + req( + "q", "id_i1:[42 TO 46]", + "fq", "trait_s:Obnoxious", + "fq", "id_i1:[43 TO 47]"), + "*[count(//doc)=1]"); + + final String[] uifSwitch = + new String[] {(random().nextBoolean() ? "" : "f.trait_s.") + "facet.method", "uif"}; + final String[] none = new String[] {}; + + for (String[] methodParam : new String[][] {none, uifSwitch}) { + assertQ( + "check counts for facet queries", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "facet.query", + "trait_s:Obnoxious", + "facet.query", + "id_i1:[42 TO 45]", + "facet.query", + "id_i1:[43 TO 47]", + "facet.field", + "trait_s"), + "*[count(//doc)=6]", + "//lst[@name='facet_counts']/lst[@name='facet_queries']", + "//lst[@name='facet_queries']/int[@name='trait_s:Obnoxious'][.='2']", + "//lst[@name='facet_queries']/int[@name='id_i1:[42 TO 45]'][.='4']", + "//lst[@name='facet_queries']/int[@name='id_i1:[43 TO 47]'][.='5']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']", + "//lst[@name='facet_fields']/lst[@name='trait_s']", + "*[count(//lst[@name='trait_s']/int)=4]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[@name='Obnoxious'][.='2']", + "//lst[@name='trait_s']/int[@name='Pig'][.='1']"); + + assertQ( + "check multi-select facets with naming", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "facet.query", + "{!ex=1}trait_s:Obnoxious", + "facet.query", + "{!ex=2 key=foo}id_i1:[42 TO 45]" // tag=2 same as 1 + , + "facet.query", + "{!ex=3,4 key=bar}id_i1:[43 TO 47]" // tag=3,4 don't exist + , + "facet.field", + "{!ex=3,1}trait_s" // 3,1 same as 1 + , + "fq", + "{!tag=1,2}id:47" // tagged as 1 and 2 + ), + "*[count(//doc)=1]", + "//lst[@name='facet_counts']/lst[@name='facet_queries']", + "//lst[@name='facet_queries']/int[@name='{!ex=1}trait_s:Obnoxious'][.='2']", + "//lst[@name='facet_queries']/int[@name='foo'][.='4']", + "//lst[@name='facet_queries']/int[@name='bar'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']", + "//lst[@name='facet_fields']/lst[@name='trait_s']", + "*[count(//lst[@name='trait_s']/int)=4]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[@name='Obnoxious'][.='2']", + "//lst[@name='trait_s']/int[@name='Pig'][.='1']"); } // test excluding main query - assertQ(req("q", "{!tag=main}id:43" - ,"facet", "true" - ,"facet.query", "{!key=foo}id:42" - ,"facet.query", "{!ex=main key=bar}id:42" // only matches when we exclude main query - ) - ,"//lst[@name='facet_queries']/int[@name='foo'][.='0']" - ,"//lst[@name='facet_queries']/int[@name='bar'][.='1']" - ); - - for(String[] methodParam : new String[][]{ none, uifSwitch}){ - assertQ("check counts for applied facet queries using filtering (fq)", - req(methodParam - ,"q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ,"facet.query", "id_i1:[42 TO 45]" - ,"facet.query", "id_i1:[43 TO 47]" - ) - ,"*[count(//doc)=4]" - ,"//lst[@name='facet_counts']/lst[@name='facet_queries']" - ,"//lst[@name='facet_queries']/int[@name='id_i1:[42 TO 45]'][.='4']" - ,"//lst[@name='facet_queries']/int[@name='id_i1:[43 TO 47]'][.='3']" - ,"*[count(//lst[@name='trait_s']/int)=4]" - ,"//lst[@name='trait_s']/int[@name='Tool'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']" - ,"//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']" - ,"//lst[@name='trait_s']/int[@name='Pig'][.='0']" - ); - - assertQ("check counts with facet.zero=false&facet.missing=true using fq", - req(methodParam - ,"q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"facet.zeros", "false" - ,"f.trait_s.facet.missing", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ) - ,"*[count(//doc)=4]" - ,"*[count(//lst[@name='trait_s']/int)=4]" - ,"//lst[@name='trait_s']/int[@name='Tool'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']" - ,"//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']" - ,"//lst[@name='trait_s']/int[not(@name)][.='1']" - ); - - assertQ("check counts with facet.mincount=1&facet.missing=true using fq", - req(methodParam - ,"q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"facet.mincount", "1" - ,"f.trait_s.facet.missing", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ) - ,"*[count(//doc)=4]" - ,"*[count(//lst[@name='trait_s']/int)=4]" - ,"//lst[@name='trait_s']/int[@name='Tool'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']" - ,"//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']" - ,"//lst[@name='trait_s']/int[not(@name)][.='1']" - ); - - assertQ("check counts with facet.mincount=2&facet.missing=true using fq", - req(methodParam - ,"q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"facet.mincount", "2" - ,"f.trait_s.facet.missing", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ) - ,"*[count(//doc)=4]" - ,"*[count(//lst[@name='trait_s']/int)=2]" - ,"//lst[@name='trait_s']/int[@name='Tool'][.='2']" - ,"//lst[@name='trait_s']/int[not(@name)][.='1']" - ); - - assertQ("check sorted paging", - req(methodParam - ,"q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","4" - ) - ,"*[count(//lst[@name='trait_s']/int)=4]" - ,"//lst[@name='trait_s']/int[@name='Tool'][.='2']" - ,"//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']" - ,"//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']" - ,"//lst[@name='trait_s']/int[@name='Pig'][.='0']" - ); + assertQ( + req( + "q", + "{!tag=main}id:43", + "facet", + "true", + "facet.query", + "{!key=foo}id:42", + "facet.query", + "{!ex=main key=bar}id:42" // only matches when we exclude main query + ), + "//lst[@name='facet_queries']/int[@name='foo'][.='0']", + "//lst[@name='facet_queries']/int[@name='bar'][.='1']"); + + for (String[] methodParam : new String[][] {none, uifSwitch}) { + assertQ( + "check counts for applied facet queries using filtering (fq)", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s", + "facet.query", + "id_i1:[42 TO 45]", + "facet.query", + "id_i1:[43 TO 47]"), + "*[count(//doc)=4]", + "//lst[@name='facet_counts']/lst[@name='facet_queries']", + "//lst[@name='facet_queries']/int[@name='id_i1:[42 TO 45]'][.='4']", + "//lst[@name='facet_queries']/int[@name='id_i1:[43 TO 47]'][.='3']", + "*[count(//lst[@name='trait_s']/int)=4]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']", + "//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']", + "//lst[@name='trait_s']/int[@name='Pig'][.='0']"); + + assertQ( + "check counts with facet.zero=false&facet.missing=true using fq", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "facet.zeros", + "false", + "f.trait_s.facet.missing", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s"), + "*[count(//doc)=4]", + "*[count(//lst[@name='trait_s']/int)=4]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']", + "//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']", + "//lst[@name='trait_s']/int[not(@name)][.='1']"); + + assertQ( + "check counts with facet.mincount=1&facet.missing=true using fq", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "facet.mincount", + "1", + "f.trait_s.facet.missing", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s"), + "*[count(//doc)=4]", + "*[count(//lst[@name='trait_s']/int)=4]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']", + "//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']", + "//lst[@name='trait_s']/int[not(@name)][.='1']"); + + assertQ( + "check counts with facet.mincount=2&facet.missing=true using fq", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "facet.mincount", + "2", + "f.trait_s.facet.missing", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s"), + "*[count(//doc)=4]", + "*[count(//lst[@name='trait_s']/int)=2]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[not(@name)][.='1']"); + + assertQ( + "check sorted paging", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s", + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "4"), + "*[count(//lst[@name='trait_s']/int)=4]", + "//lst[@name='trait_s']/int[@name='Tool'][.='2']", + "//lst[@name='trait_s']/int[@name='Obnoxious'][.='1']", + "//lst[@name='trait_s']/int[@name='Chauvinist'][.='1']", + "//lst[@name='trait_s']/int[@name='Pig'][.='0']"); // check that the default sort is by count - assertQ("check sorted paging", - req(methodParam, "q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","3" - ) - ,"*[count(//lst[@name='trait_s']/int)=3]" - ,"//int[1][@name='Tool'][.='2']" - ,"//int[2][@name='Chauvinist'][.='1']" - ,"//int[3][@name='Obnoxious'][.='1']" - ); + assertQ( + "check sorted paging", + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s", + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "3"), + "*[count(//lst[@name='trait_s']/int)=3]", + "//int[1][@name='Tool'][.='2']", + "//int[2][@name='Chauvinist'][.='1']", + "//int[3][@name='Obnoxious'][.='1']"); // // check that legacy facet.sort=true/false works // - assertQ(req(methodParam, "q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","3" - ,"facet.sort","true" // true means sort-by-count - ) - ,"*[count(//lst[@name='trait_s']/int)=3]" - ,"//int[1][@name='Tool'][.='2']" - ,"//int[2][@name='Chauvinist'][.='1']" - ,"//int[3][@name='Obnoxious'][.='1']" - ); - - assertQ(req(methodParam, "q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "trait_s" - ,"facet.mincount","1" - ,"facet.offset","0" - ,"facet.limit","3" - ,"facet.sort","false" // false means sort by index order - ) - ,"*[count(//lst[@name='trait_s']/int)=3]" - ,"//int[1][@name='Chauvinist'][.='1']" - ,"//int[2][@name='Obnoxious'][.='1']" - ,"//int[3][@name='Tool'][.='2']" - ); + assertQ( + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s", + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "3", + "facet.sort", + "true" // true means sort-by-count + ), + "*[count(//lst[@name='trait_s']/int)=3]", + "//int[1][@name='Tool'][.='2']", + "//int[2][@name='Chauvinist'][.='1']", + "//int[3][@name='Obnoxious'][.='1']"); + + assertQ( + req( + methodParam, + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "trait_s", + "facet.mincount", + "1", + "facet.offset", + "0", + "facet.limit", + "3", + "facet.sort", + "false" // false means sort by index order + ), + "*[count(//lst[@name='trait_s']/int)=3]", + "//int[1][@name='Chauvinist'][.='1']", + "//int[2][@name='Obnoxious'][.='1']", + "//int[3][@name='Tool'][.='2']"); } - for(String method : new String[]{ "fc","uif"}){ - assertQ(req("q", "id_i1:[42 TO 47]" - ,"facet", "true" - ,"fq", "id_i1:[42 TO 45]" - ,"facet.field", "zerolen_s" - ,(random().nextBoolean() ? "":"f.zerolen_s.")+"facet.method", method - ) - ,"*[count(//lst[@name='zerolen_s']/int[@name=''])=1]" - ); + for (String method : new String[] {"fc", "uif"}) { + assertQ( + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fq", + "id_i1:[42 TO 45]", + "facet.field", + "zerolen_s", + (random().nextBoolean() ? "" : "f.zerolen_s.") + "facet.method", + method), + "*[count(//lst[@name='zerolen_s']/int[@name=''])=1]"); } - assertQ("a facet.query that analyzes to no query shoud not NPE", - req("q", "*:*", + assertQ( + "a facet.query that analyzes to no query shoud not NPE", + req( + "q", "*:*", "facet", "true", - "facet.query", "{!field key=k f=lengthfilt}a"),//2 char minimum - "//lst[@name='facet_queries']/int[@name='k'][.='0']" - ); + "facet.query", "{!field key=k f=lengthfilt}a"), // 2 char minimum + "//lst[@name='facet_queries']/int[@name='k'][.='0']"); } public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { // NOTE: mincount=0 affects method detection/coercion, so we include permutations of it - - { + + { // an "uninvertible=false" field is not be facetable using the "default" method, // or any explicit method other then "enum". // // it should behave the same as any attempt (using any method) at faceting on // and "indexed=false docValues=false" field -- returning no buckets. - + final List paramSets = new ArrayList<>(); for (String min : Arrays.asList("0", "1")) { for (String f : Arrays.asList("trait_s_not_uninvert", "trait_s_not_indexed_sS")) { paramSets.add(params("facet.field", "{!key=x}" + f)); for (String method : Arrays.asList("fc", "fcs", "uif")) { - paramSets.add(params("facet.field", "{!key=x}" + f, - "facet.mincount", min, - "facet.method", method)); - paramSets.add(params("facet.field", "{!key=x}" + f, - "facet.mincount", min, - "facet.method", method)); + paramSets.add( + params( + "facet.field", "{!key=x}" + f, + "facet.mincount", min, + "facet.method", method)); + paramSets.add( + params( + "facet.field", "{!key=x}" + f, + "facet.mincount", min, + "facet.method", method)); } } - paramSets.add(params("facet.field", "{!key=x}trait_s_not_indexed_sS", - "facet.mincount", min, - "facet.method", "enum")); + paramSets.add( + params( + "facet.field", "{!key=x}trait_s_not_indexed_sS", + "facet.mincount", min, + "facet.method", "enum")); } for (SolrParams p : paramSets) { // "empty" results should be the same regardless of mincount - assertQ("expect no buckets when field is not-indexed or not-uninvertible", - req(p - ,"rows","0" - ,"q", "id_i1:[42 TO 47]" - ,"fq", "id_i1:[42 TO 45]" - ,"facet", "true" - ) - ,"//*[@numFound='4']" - ,"*[count(//lst[@name='x'])=1]" - ,"*[count(//lst[@name='x']/int)=0]" - ); + assertQ( + "expect no buckets when field is not-indexed or not-uninvertible", + req(p, "rows", "0", "q", "id_i1:[42 TO 47]", "fq", "id_i1:[42 TO 45]", "facet", "true"), + "//*[@numFound='4']", + "*[count(//lst[@name='x'])=1]", + "*[count(//lst[@name='x']/int)=0]"); } - } - - { - // the only way to facet on an "uninvertible=false" field is to explicitly request facet.method=enum - // in which case it should behave consistently with it's copyField source & equivilent docValues field - // (using any method for either of them) + + { + // the only way to facet on an "uninvertible=false" field is to explicitly request + // facet.method=enum in which case it should behave consistently with it's copyField source & + // equivilent docValues field (using any method for either of them) final List paramSets = new ArrayList<>(); for (String min : Arrays.asList("0", "1")) { - paramSets.add(params("facet.field", "{!key=x}trait_s_not_uninvert", - "facet.method", "enum")); + paramSets.add( + params( + "facet.field", "{!key=x}trait_s_not_uninvert", + "facet.method", "enum")); for (String okField : Arrays.asList("trait_s", "trait_s_not_uninvert_dv")) { paramSets.add(params("facet.field", "{!key=x}" + okField)); - for (String method : Arrays.asList("enum","fc", "fcs", "uif")) { - paramSets.add(params("facet.field", "{!key=x}" + okField, - "facet.method", method)); + for (String method : Arrays.asList("enum", "fc", "fcs", "uif")) { + paramSets.add(params("facet.field", "{!key=x}" + okField, "facet.method", method)); } } for (SolrParams p : paramSets) { - assertQ("check counts for applied facet queries using filtering (fq)", - req(p - ,"rows","0" - ,"q", "id_i1:[42 TO 47]" - ,"fq", "id_i1:[42 TO 45]" - ,"facet", "true" - ,"facet.mincount", min - ) - ,"//*[@numFound='4']" - ,"*[count(//lst[@name='x'])=1]" - ,"*[count(//lst[@name='x']/int)="+("0".equals(min) ? "4]" : "3]") - ,"//lst[@name='x']/int[@name='Tool'][.='2']" - ,"//lst[@name='x']/int[@name='Obnoxious'][.='1']" - ,"//lst[@name='x']/int[@name='Chauvinist'][.='1']" - ,"count(//lst[@name='x']/int[@name='Pig'][.='0'])=" + ("0".equals(min) ? "1" : "0") - ); + assertQ( + "check counts for applied facet queries using filtering (fq)", + req( + p, + "rows", + "0", + "q", + "id_i1:[42 TO 47]", + "fq", + "id_i1:[42 TO 45]", + "facet", + "true", + "facet.mincount", + min), + "//*[@numFound='4']", + "*[count(//lst[@name='x'])=1]", + "*[count(//lst[@name='x']/int)=" + ("0".equals(min) ? "4]" : "3]"), + "//lst[@name='x']/int[@name='Tool'][.='2']", + "//lst[@name='x']/int[@name='Obnoxious'][.='1']", + "//lst[@name='x']/int[@name='Chauvinist'][.='1']", + "count(//lst[@name='x']/int[@name='Pig'][.='0'])=" + ("0".equals(min) ? "1" : "0")); } } } } - public static void indexDateFacets() { final String i = "id"; final String f = "bday"; @@ -1001,17 +1227,17 @@ public static void indexDateFacets() { final String ooo = "00:00:00.000Z"; final String xxx = "15:15:15.155Z"; - //note: add_doc duplicates bday to bday_drf and a_tdt to a_drf (date range field) - add_doc(i, "201", f, "1976-07-04T12:08:56.235Z", ff, "1900-01-01T"+ooo); - add_doc(i, "202", f, "1976-07-05T00:00:00.000Z", ff, "1976-07-01T"+ooo); - add_doc(i, "203", f, "1976-07-15T00:07:57.890Z", ff, "1976-07-04T"+ooo); - add_doc(i, "204", f, "1976-07-21T00:07:57.890Z", ff, "1976-07-05T"+ooo); - add_doc(i, "205", f, "1976-07-13T12:12:25.255Z", ff, "1976-07-05T"+xxx); - add_doc(i, "206", f, "1976-07-03T17:01:23.456Z", ff, "1976-07-07T"+ooo); - add_doc(i, "207", f, "1976-07-12T12:12:25.255Z", ff, "1976-07-13T"+ooo); - add_doc(i, "208", f, "1976-07-15T15:15:15.155Z", ff, "1976-07-13T"+xxx); - add_doc(i, "209", f, "1907-07-12T13:13:23.235Z", ff, "1976-07-15T"+xxx); - add_doc(i, "2010", f, "1976-07-03T11:02:45.678Z", ff, "2000-01-01T"+ooo); + // note: add_doc duplicates bday to bday_drf and a_tdt to a_drf (date range field) + add_doc(i, "201", f, "1976-07-04T12:08:56.235Z", ff, "1900-01-01T" + ooo); + add_doc(i, "202", f, "1976-07-05T00:00:00.000Z", ff, "1976-07-01T" + ooo); + add_doc(i, "203", f, "1976-07-15T00:07:57.890Z", ff, "1976-07-04T" + ooo); + add_doc(i, "204", f, "1976-07-21T00:07:57.890Z", ff, "1976-07-05T" + ooo); + add_doc(i, "205", f, "1976-07-13T12:12:25.255Z", ff, "1976-07-05T" + xxx); + add_doc(i, "206", f, "1976-07-03T17:01:23.456Z", ff, "1976-07-07T" + ooo); + add_doc(i, "207", f, "1976-07-12T12:12:25.255Z", ff, "1976-07-13T" + ooo); + add_doc(i, "208", f, "1976-07-15T15:15:15.155Z", ff, "1976-07-13T" + xxx); + add_doc(i, "209", f, "1907-07-12T13:13:23.235Z", ff, "1976-07-15T" + xxx); + add_doc(i, "2010", f, "1976-07-03T11:02:45.678Z", ff, "2000-01-01T" + ooo); add_doc(i, "2011", f, "1907-07-12T12:12:25.255Z"); add_doc(i, "2012", f, "2007-07-30T07:07:07.070Z"); add_doc(i, "2013", f, "1976-07-30T22:22:22.222Z"); @@ -1022,7 +1248,7 @@ public static void indexDateFacets() { public void testTrieDateRangeFacets() { helpTestDateFacets("bday", FacetRangeMethod.FILTER); } - + @Test public void testTrieDateRangeFacetsDocValues() { helpTestDateFacets("bday", FacetRangeMethod.DV); @@ -1038,206 +1264,269 @@ private void helpTestDateFacets(final String fieldName, final FacetRangeMethod r final String b = "facet_ranges"; final String f = fieldName; final String c = "/lst[@name='counts']"; - final String pre = "//lst[@name='"+b+"']/lst[@name='"+f+"']" + c; + final String pre = "//lst[@name='" + b + "']/lst[@name='" + f + "']" + c; final String meta = pre + "/../"; - // range faceting defaults to including only lower endpoint // doc exists with value @ 00:00:00.000 on July5 final String jul4 = "[.='1' ]"; - assertQ("check counts for month of facet by day", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-01T00:00:00.000Z+1MONTH" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".method", rangeFacetMethod.toString() //This only applies to range faceting, won't be use for date faceting - ) - ,"*[count("+pre+"/int)=31]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z']" + jul4 - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-13T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-14T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-15T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-16T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-17T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-18T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-19T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-21T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-22T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-23T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-24T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-25T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-26T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-27T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-28T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-29T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-30T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-31T00:00:00Z'][.='0']" - - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='11']" - - ); - - assertQ("check counts for month of facet by day with global mincount = 1", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-01T00:00:00.000Z+1MONTH" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,"facet.mincount", "1" - ) - ,"*[count("+pre+"/int)=8]" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z']" + jul4 - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-13T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-15T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-21T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-30T00:00:00Z'][.='1' ]" - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='11']" - ); - - assertQ("check counts for month of facet by day with field mincount = 1", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-01T00:00:00.000Z+1MONTH" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,"f." + f + ".facet.mincount", "2" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='2' ]" - ,pre - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-15T00:00:00Z'][.='2' ]" - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='11']" - ); - - assertQ("check before is not inclusive of upper bound by default", - req("q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-05T00:00:00.000Z" - ,p+".end", "1976-07-07T00:00:00.000Z" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ) - ,"*[count("+pre+"/int)=2]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0']" - - ,meta+"/int[@name='before' ][.='5']" - ); - assertQ("check after is not inclusive of lower bound by default (for dates)", - req("q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-03T00:00:00.000Z" - ,p+".end", "1976-07-05T00:00:00.000Z" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ) - ,"*[count("+pre+"/int)=2]" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z']" + jul4 - - ,meta+"/int[@name='after' ][.='9']" - ); - - - assertQ("check hardend=false", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-13T00:00:00.000Z" - ,p+".gap", "+5DAYS" - ,p+".other", "all" - ,p+".hardend","false" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='5' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0' ]" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='4' ]" - - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='3']" - ,meta+"/int[@name='between'][.='9']" - ); - - assertQ("check hardend=true", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-13T00:00:00.000Z" - ,p+".gap", "+5DAYS" - ,p+".other", "all" - ,p+".hardend","true" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='5' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0' ]" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='1' ]" - - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='6']" - ,meta+"/int[@name='between'][.='6']" - ); - - //Fixed by SOLR-9080 related to the Gregorian Change Date - assertQ("check BC era", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "-0200-01-01T00:00:00Z" // BC - ,p+".end", "+0200-01-01T00:00:00Z" // AD - ,p+".gap", "+100YEARS" - ,p+".other", "all" - ) - ,pre+"/int[@name='-0200-01-01T00:00:00Z'][.='0']" - ,pre+"/int[@name='-0100-01-01T00:00:00Z'][.='0']" - ,pre+"/int[@name='0000-01-01T00:00:00Z'][.='0']" - ,pre+"/int[@name='0100-01-01T00:00:00Z'][.='0']" - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='14']" - ,meta+"/int[@name='between'][.='0']" - - ); + assertQ( + "check counts for month of facet by day", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-01T00:00:00.000Z+1MONTH", + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".method", + rangeFacetMethod + .toString() // This only applies to range faceting, won't be use for date faceting + ), + "*[count(" + pre + "/int)=31]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-04T00:00:00Z']" + jul4, + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-13T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-14T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-15T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-16T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-17T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-18T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-19T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-21T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-22T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-23T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-24T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-25T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-26T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-27T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-28T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-29T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-30T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-31T00:00:00Z'][.='0']", + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='11']"); + + assertQ( + "check counts for month of facet by day with global mincount = 1", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-01T00:00:00.000Z+1MONTH", + p + ".gap", + "+1DAY", + p + ".other", + "all", + "facet.mincount", + "1"), + "*[count(" + pre + "/int)=8]", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-04T00:00:00Z']" + jul4, + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-13T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-15T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-21T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-30T00:00:00Z'][.='1' ]", + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='11']"); + + assertQ( + "check counts for month of facet by day with field mincount = 1", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-01T00:00:00.000Z+1MONTH", + p + ".gap", + "+1DAY", + p + ".other", + "all", + "f." + f + ".facet.mincount", + "2"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='2' ]", + pre, + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-15T00:00:00Z'][.='2' ]", + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='11']"); + assertQ( + "check before is not inclusive of upper bound by default", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-05T00:00:00.000Z", + p + ".end", + "1976-07-07T00:00:00.000Z", + p + ".gap", + "+1DAY", + p + ".other", + "all"), + "*[count(" + pre + "/int)=2]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0']", + meta + "/int[@name='before' ][.='5']"); + assertQ( + "check after is not inclusive of lower bound by default (for dates)", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-03T00:00:00.000Z", + p + ".end", + "1976-07-05T00:00:00.000Z", + p + ".gap", + "+1DAY", + p + ".other", + "all"), + "*[count(" + pre + "/int)=2]", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-04T00:00:00Z']" + jul4, + meta + "/int[@name='after' ][.='9']"); + + assertQ( + "check hardend=false", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-13T00:00:00.000Z", + p + ".gap", + "+5DAYS", + p + ".other", + "all", + p + ".hardend", + "false"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='5' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0' ]", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='4' ]", + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='3']", + meta + "/int[@name='between'][.='9']"); + + assertQ( + "check hardend=true", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-13T00:00:00.000Z", + p + ".gap", + "+5DAYS", + p + ".other", + "all", + p + ".hardend", + "true"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='5' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0' ]", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='1' ]", + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='6']", + meta + "/int[@name='between'][.='6']"); + + // Fixed by SOLR-9080 related to the Gregorian Change Date + assertQ( + "check BC era", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "-0200-01-01T00:00:00Z" // BC + , + p + ".end", + "+0200-01-01T00:00:00Z" // AD + , + p + ".gap", + "+100YEARS", + p + ".other", + "all"), + pre + "/int[@name='-0200-01-01T00:00:00Z'][.='0']", + pre + "/int[@name='-0100-01-01T00:00:00Z'][.='0']", + pre + "/int[@name='0000-01-01T00:00:00Z'][.='0']", + pre + "/int[@name='0100-01-01T00:00:00Z'][.='0']", + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='14']", + meta + "/int[@name='between'][.='0']"); } @Test @@ -1250,313 +1539,426 @@ public void testDateRangeFieldDateRangeFacetsWithIncludeOption() { helpTestDateRangeFacetsWithIncludeOption("a_drf"); } - /** Similar to helpTestDateFacets, but for different fields with test data - exactly on boundary marks */ + /** + * Similar to helpTestDateFacets, but for different fields with test data exactly on boundary + * marks + */ private void helpTestDateRangeFacetsWithIncludeOption(final String fieldName) { final String p = "facet.range"; final String b = "facet_ranges"; final String f = fieldName; final String c = "/lst[@name='counts']"; - final String pre = "//lst[@name='"+b+"']/lst[@name='"+f+"']" + c; + final String pre = "//lst[@name='" + b + "']/lst[@name='" + f + "']" + c; final String meta = pre + "/../"; - assertQ("checking counts for lower", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-16T00:00:00.000Z" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "lower" - ) - // 15 days + pre+post+inner = 18 - ,"*[count("+pre+"/int)=15]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-13T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-14T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-15T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='1']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='8']" - ); - - assertQ("checking counts for upper", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-16T00:00:00.000Z" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "upper" - ) - // 15 days + pre+post+inner = 18 - ,"*[count("+pre+"/int)=15]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-13T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-14T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-15T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='7']" - ); - - assertQ("checking counts for lower & upper", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-16T00:00:00.000Z" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "lower" - ,p+".include", "upper" - ) - // 15 days + pre+post+inner = 18 - ,"*[count("+pre+"/int)=15]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-13T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-14T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-15T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='1']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='8']" - ); - - assertQ("checking counts for upper & edge", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-16T00:00:00.000Z" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "upper" - ,p+".include", "edge" - ) - // 15 days + pre+post+inner = 18 - ,"*[count("+pre+"/int)=15]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-13T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-14T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-15T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='1']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='8']" - ); - - assertQ("checking counts for upper & outer", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-13T00:00:00.000Z" // smaller now - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "upper" - ,p+".include", "outer" - ) - // 12 days + pre+post+inner = 15 - ,"*[count("+pre+"/int)=12]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='4']" - ,meta+"/int[@name='between'][.='5']" - ); - - assertQ("checking counts for lower & edge", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-13T00:00:00.000Z" // smaller now - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "lower" - ,p+".include", "edge" - ) - // 12 days + pre+post+inner = 15 - ,"*[count("+pre+"/int)=12]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='1']" - ,meta+"/int[@name='after' ][.='3']" - ,meta+"/int[@name='between'][.='6']" - ); - - assertQ("checking counts for lower & outer", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-13T00:00:00.000Z" // smaller now - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "lower" - ,p+".include", "outer" - ) - // 12 days + pre+post+inner = 15 - ,"*[count("+pre+"/int)=12]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='0']" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='4']" - ,meta+"/int[@name='between'][.='5']" - ); - - assertQ("checking counts for lower & edge & outer", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-13T00:00:00.000Z" // smaller now - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "lower" - ,p+".include", "edge" - ,p+".include", "outer" - ) - // 12 days + pre+post+inner = 15 - ,"*[count("+pre+"/int)=12]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='4']" - ,meta+"/int[@name='between'][.='6']" - ); - - assertQ("checking counts for all", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,p, f - ,p+".start", "1976-07-01T00:00:00.000Z" - ,p+".end", "1976-07-13T00:00:00.000Z" // smaller now - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "all" - ) - // 12 days + pre+post+inner = 15 - ,"*[count("+pre+"/int)=12]" - ,pre+"/int[@name='1976-07-01T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-02T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-04T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-05T00:00:00Z'][.='2' ]" - ,pre+"/int[@name='1976-07-06T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-07T00:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-08T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T00:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T00:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='4']" - ,meta+"/int[@name='between'][.='6']" - ); + assertQ( + "checking counts for lower", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-16T00:00:00.000Z", + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "lower") + // 15 days + pre+post+inner = 18 + , + "*[count(" + pre + "/int)=15]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-13T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-14T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-15T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='1']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='8']"); + + assertQ( + "checking counts for upper", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-16T00:00:00.000Z", + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "upper") + // 15 days + pre+post+inner = 18 + , + "*[count(" + pre + "/int)=15]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-13T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-14T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-15T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='7']"); + + assertQ( + "checking counts for lower & upper", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-16T00:00:00.000Z", + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "lower", + p + ".include", + "upper") + // 15 days + pre+post+inner = 18 + , + "*[count(" + pre + "/int)=15]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-13T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-14T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-15T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='1']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='8']"); + + assertQ( + "checking counts for upper & edge", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-16T00:00:00.000Z", + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "upper", + p + ".include", + "edge") + // 15 days + pre+post+inner = 18 + , + "*[count(" + pre + "/int)=15]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-13T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-14T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-15T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='1']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='8']"); + + assertQ( + "checking counts for upper & outer", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-13T00:00:00.000Z" // smaller now + , + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "upper", + p + ".include", + "outer") + // 12 days + pre+post+inner = 15 + , + "*[count(" + pre + "/int)=12]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='4']", + meta + "/int[@name='between'][.='5']"); + + assertQ( + "checking counts for lower & edge", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-13T00:00:00.000Z" // smaller now + , + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "lower", + p + ".include", + "edge") + // 12 days + pre+post+inner = 15 + , + "*[count(" + pre + "/int)=12]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='1']", + meta + "/int[@name='after' ][.='3']", + meta + "/int[@name='between'][.='6']"); + + assertQ( + "checking counts for lower & outer", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-13T00:00:00.000Z" // smaller now + , + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "lower", + p + ".include", + "outer") + // 12 days + pre+post+inner = 15 + , + "*[count(" + pre + "/int)=12]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='0']" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='4']", + meta + "/int[@name='between'][.='5']"); + + assertQ( + "checking counts for lower & edge & outer", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-13T00:00:00.000Z" // smaller now + , + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "lower", + p + ".include", + "edge", + p + ".include", + "outer") + // 12 days + pre+post+inner = 15 + , + "*[count(" + pre + "/int)=12]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='4']", + meta + "/int[@name='between'][.='6']"); + + assertQ( + "checking counts for all", + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + p, + f, + p + ".start", + "1976-07-01T00:00:00.000Z", + p + ".end", + "1976-07-13T00:00:00.000Z" // smaller now + , + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "all") + // 12 days + pre+post+inner = 15 + , + "*[count(" + pre + "/int)=12]", + pre + "/int[@name='1976-07-01T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-02T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-04T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-05T00:00:00Z'][.='2' ]", + pre + "/int[@name='1976-07-06T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-07T00:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-08T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T00:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T00:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='4']", + meta + "/int[@name='between'][.='6']"); } @Test @@ -1569,98 +1971,128 @@ private void helpTestDateRangeFacetsWithTz(final String fieldName) { final String b = "facet_ranges"; final String f = fieldName; final String c = "/lst[@name='counts']"; - final String pre = "//lst[@name='"+b+"']/lst[@name='"+f+"']" + c; + final String pre = "//lst[@name='" + b + "']/lst[@name='" + f + "']" + c; final String meta = pre + "/../"; final String TZ = "America/Los_Angeles"; - assumeTrue("Test requires JVM to know about about TZ: " + TZ, - TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(TZ)); - - assertQ("checking facet counts for fixed now, using TZ: " + TZ, - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"NOW", "205078333000" // 1976-07-01T14:12:13.000Z - ,"TZ", TZ - ,p, f - ,p+".start", "NOW/MONTH" - ,p+".end", "NOW/MONTH+15DAYS" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "lower" - ) - // 15 days + pre+post+inner = 18 - ,"*[count("+pre+"/int)=15]" - ,pre+"/int[@name='1976-07-01T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-02T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-03T07:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-04T07:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-05T07:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-06T07:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-07T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-08T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-09T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-10T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-11T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-12T07:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-13T07:00:00Z'][.='1' ]" - ,pre+"/int[@name='1976-07-14T07:00:00Z'][.='0']" - ,pre+"/int[@name='1976-07-15T07:00:00Z'][.='1' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='7']" - ); + assumeTrue( + "Test requires JVM to know about about TZ: " + TZ, + TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(TZ)); + + assertQ( + "checking facet counts for fixed now, using TZ: " + TZ, + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "NOW", + "205078333000" // 1976-07-01T14:12:13.000Z + , + "TZ", + TZ, + p, + f, + p + ".start", + "NOW/MONTH", + p + ".end", + "NOW/MONTH+15DAYS", + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "lower") + // 15 days + pre+post+inner = 18 + , + "*[count(" + pre + "/int)=15]", + pre + "/int[@name='1976-07-01T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-02T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-03T07:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-04T07:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-05T07:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-06T07:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-07T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-08T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-09T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-10T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-11T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-12T07:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-13T07:00:00Z'][.='1' ]", + pre + "/int[@name='1976-07-14T07:00:00Z'][.='0']", + pre + "/int[@name='1976-07-15T07:00:00Z'][.='1' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='7']"); // NOTE: the counts should all be zero, what we really care about // is that the computed lower bounds take into account DST change - assertQ("checking facet counts arround DST change for TZ: " + TZ, - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"NOW", "1288606136000" // 2010-11-01T10:08:56.235Z - ,"TZ", TZ - ,p, f - ,p+".start", "NOW/MONTH" - ,p+".end", "NOW/MONTH+15DAYS" - ,p+".gap", "+1DAY" - ,p+".other", "all" - ,p+".include", "lower" - ) - // 15 days + pre+post+inner = 18 - ,"*[count("+pre+"/int)=15]" - ,pre+"/int[@name='2010-11-01T07:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-02T07:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-03T07:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-04T07:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-05T07:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-06T07:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-07T07:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-08T08:00:00Z'][.='0']" // BOOM! - ,pre+"/int[@name='2010-11-09T08:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-10T08:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-11T08:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-12T08:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-13T08:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-14T08:00:00Z'][.='0']" - ,pre+"/int[@name='2010-11-15T08:00:00Z'][.='0']" - ); - + assertQ( + "checking facet counts arround DST change for TZ: " + TZ, + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "NOW", + "1288606136000" // 2010-11-01T10:08:56.235Z + , + "TZ", + TZ, + p, + f, + p + ".start", + "NOW/MONTH", + p + ".end", + "NOW/MONTH+15DAYS", + p + ".gap", + "+1DAY", + p + ".other", + "all", + p + ".include", + "lower") + // 15 days + pre+post+inner = 18 + , + "*[count(" + pre + "/int)=15]", + pre + "/int[@name='2010-11-01T07:00:00Z'][.='0']", + pre + "/int[@name='2010-11-02T07:00:00Z'][.='0']", + pre + "/int[@name='2010-11-03T07:00:00Z'][.='0']", + pre + "/int[@name='2010-11-04T07:00:00Z'][.='0']", + pre + "/int[@name='2010-11-05T07:00:00Z'][.='0']", + pre + "/int[@name='2010-11-06T07:00:00Z'][.='0']", + pre + "/int[@name='2010-11-07T07:00:00Z'][.='0']", + pre + "/int[@name='2010-11-08T08:00:00Z'][.='0']" // BOOM! + , + pre + "/int[@name='2010-11-09T08:00:00Z'][.='0']", + pre + "/int[@name='2010-11-10T08:00:00Z'][.='0']", + pre + "/int[@name='2010-11-11T08:00:00Z'][.='0']", + pre + "/int[@name='2010-11-12T08:00:00Z'][.='0']", + pre + "/int[@name='2010-11-13T08:00:00Z'][.='0']", + pre + "/int[@name='2010-11-14T08:00:00Z'][.='0']", + pre + "/int[@name='2010-11-15T08:00:00Z'][.='0']"); } @Test public void testNumericRangeFacetsTrieFloat() { helpTestFractionalNumberRangeFacets("range_facet_f"); } + @Test public void testNumericRangeFacetsTrieDouble() { helpTestFractionalNumberRangeFacets("range_facet_d"); } - + @Test public void testNumericRangeFacetsTrieFloatDocValues() { helpTestFractionalNumberRangeFacets("range_facet_f", FacetRangeMethod.DV); } + @Test public void testNumericRangeFacetsTrieDoubleDocValues() { helpTestFractionalNumberRangeFacets("range_facet_d", FacetRangeMethod.DV); @@ -1670,274 +2102,298 @@ public void testNumericRangeFacetsTrieDoubleDocValues() { public void testNumericRangeFacetsOverflowTrieDouble() { helpTestNumericRangeFacetsDoubleOverflow("range_facet_d", FacetRangeMethod.FILTER); } - + @Test public void testNumericRangeFacetsOverflowTrieDoubleDocValue() { helpTestNumericRangeFacetsDoubleOverflow("range_facet_d", FacetRangeMethod.DV); } - private void helpTestNumericRangeFacetsDoubleOverflow(final String fieldName, final FacetRangeMethod method) { + private void helpTestNumericRangeFacetsDoubleOverflow( + final String fieldName, final FacetRangeMethod method) { final String f = fieldName; - final String pre = "//lst[@name='facet_ranges']/lst[@name='"+f+"']/lst[@name='counts']"; + final String pre = "//lst[@name='facet_ranges']/lst[@name='" + f + "']/lst[@name='counts']"; final String meta = pre + "/../"; String start = "0.0"; - String gap = Double.toString(Float.MAX_VALUE ); + String gap = Double.toString(Float.MAX_VALUE); String end = Double.toString(((double) Float.MAX_VALUE) * 3D); String mid = Double.toString(((double) Float.MAX_VALUE) * 2D); - assertQ(f+": checking counts for lower", - req( "q", "id_i1:[30 TO 60]" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", start - ,"facet.range.end", end - ,"facet.range.gap", gap - ,"facet.range.other", "all" - ,"facet.range.include", "lower" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='"+start+"'][.='6' ]" - ,pre+"/int[@name='"+mid+"'][.='0' ]" - // - ,meta+"/double[@name='end' ][.='"+end+"']" - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='6']" - ); - } - + assertQ( + f + ": checking counts for lower", + req( + "q", + "id_i1:[30 TO 60]", + "rows", + "0", + "facet", + "true", + "facet.range", + f, + "facet.range.method", + method.toString(), + "facet.range.start", + start, + "facet.range.end", + end, + "facet.range.gap", + gap, + "facet.range.other", + "all", + "facet.range.include", + "lower"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='" + start + "'][.='6' ]", + pre + "/int[@name='" + mid + "'][.='0' ]" + // + , + meta + "/double[@name='end' ][.='" + end + "']", + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='6']"); + } + private void helpTestFractionalNumberRangeFacets(final String fieldName) { helpTestFractionalNumberRangeFacets(fieldName, FacetRangeMethod.FILTER); } - private void helpTestFractionalNumberRangeFacets(final String fieldName, FacetRangeMethod method) { + + private void helpTestFractionalNumberRangeFacets( + final String fieldName, FacetRangeMethod method) { final String f = fieldName; - final String pre = "//lst[@name='facet_ranges']/lst[@name='"+f+"']/lst[@name='counts']"; + final String pre = "//lst[@name='facet_ranges']/lst[@name='" + f + "']/lst[@name='counts']"; final String meta = pre + "/../"; - assertQ(f+": checking counts for lower", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "10" - ,"facet.range.end", "50" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.include", "lower" - ) - ,"*[count("+pre+"/int)=4]" - ,pre+"/int[@name='10.0'][.='1' ]" - ,pre+"/int[@name='20.0'][.='3' ]" - ,pre+"/int[@name='30.0'][.='2' ]" - ,pre+"/int[@name='40.0'][.='0' ]" - // - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='6']" - ); - - assertQ(f + ":checking counts for upper", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "10" - ,"facet.range.end", "50" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.include", "upper" - ) - ,"*[count("+pre+"/int)=4]" - ,pre+"/int[@name='10.0'][.='2' ]" - ,pre+"/int[@name='20.0'][.='3' ]" - ,pre+"/int[@name='30.0'][.='1' ]" - ,pre+"/int[@name='40.0'][.='0' ]" - // - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='6']" - ); - - assertQ(f + ":checking counts for lower & upper", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "10" - ,"facet.range.end", "50" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.include", "upper" - ,"facet.range.include", "lower" - ) - ,"*[count("+pre+"/int)=4]" - ,pre+"/int[@name='10.0'][.='2' ]" - ,pre+"/int[@name='20.0'][.='4' ]" - ,pre+"/int[@name='30.0'][.='2' ]" - ,pre+"/int[@name='40.0'][.='0' ]" - // - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='6']" - ); - - assertQ(f + ": checking counts for upper & edge", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "20" - ,"facet.range.end", "50" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.include", "upper" - ,"facet.range.include", "edge" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='20.0'][.='4' ]" - ,pre+"/int[@name='30.0'][.='1' ]" - ,pre+"/int[@name='40.0'][.='0' ]" - // - ,meta+"/int[@name='before' ][.='1']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='5']" - ); - - assertQ(f + ": checking counts for upper & outer", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "10" - ,"facet.range.end", "30" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.include", "upper" - ,"facet.range.include", "outer" - ) - ,"*[count("+pre+"/int)=2]" - ,pre+"/int[@name='10.0'][.='2' ]" - ,pre+"/int[@name='20.0'][.='3' ]" - // - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='2']" - ,meta+"/int[@name='between'][.='5']" - ); - - assertQ(f + ": checking counts for lower & edge", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "10" - ,"facet.range.end", "30" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.include", "lower" - ,"facet.range.include", "edge" - ) - ,"*[count("+pre+"/int)=2]" - ,pre+"/int[@name='10.0'][.='1' ]" - ,pre+"/int[@name='20.0'][.='4' ]" - // - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='5']" - ); - - assertQ(f + ": checking counts for lower & outer", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "20" - ,"facet.range.end", "40" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.include", "lower" - ,"facet.range.include", "outer" - ) - ,"*[count("+pre+"/int)=2]" - ,pre+"/int[@name='20.0'][.='3' ]" - ,pre+"/int[@name='30.0'][.='2' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='5']" - ); - - assertQ(f + ": checking counts for lower & edge & outer", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "20" - ,"facet.range.end", "35.3" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.hardend", "true" - ,"facet.range.include", "lower" - ,"facet.range.include", "edge" - ,"facet.range.include", "outer" - ) - ,"*[count("+pre+"/int)=2]" - ,pre+"/int[@name='20.0'][.='3' ]" - ,pre+"/int[@name='30.0'][.='2' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='5']" - ); - - assertQ(f + ": checking counts for include all", - req( "q", "*:*" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "20" - ,"facet.range.end", "35.3" - ,"facet.range.gap", "10" - ,"facet.range.other", "all" - ,"facet.range.hardend", "true" - ,"facet.range.include", "all" - ) - ,"*[count("+pre+"/int)=2]" - ,pre+"/int[@name='20.0'][.='4' ]" - ,pre+"/int[@name='30.0'][.='2' ]" - // - ,meta+"/int[@name='before' ][.='2']" - ,meta+"/int[@name='after' ][.='1']" - ,meta+"/int[@name='between'][.='5']" - ); + assertQ( + f + ": checking counts for lower", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "10", + "facet.range.end", "50", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.include", "lower"), + "*[count(" + pre + "/int)=4]", + pre + "/int[@name='10.0'][.='1' ]", + pre + "/int[@name='20.0'][.='3' ]", + pre + "/int[@name='30.0'][.='2' ]", + pre + "/int[@name='40.0'][.='0' ]" + // + , + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='6']"); + + assertQ( + f + ":checking counts for upper", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "10", + "facet.range.end", "50", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.include", "upper"), + "*[count(" + pre + "/int)=4]", + pre + "/int[@name='10.0'][.='2' ]", + pre + "/int[@name='20.0'][.='3' ]", + pre + "/int[@name='30.0'][.='1' ]", + pre + "/int[@name='40.0'][.='0' ]" + // + , + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='6']"); + + assertQ( + f + ":checking counts for lower & upper", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "10", + "facet.range.end", "50", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.include", "upper", + "facet.range.include", "lower"), + "*[count(" + pre + "/int)=4]", + pre + "/int[@name='10.0'][.='2' ]", + pre + "/int[@name='20.0'][.='4' ]", + pre + "/int[@name='30.0'][.='2' ]", + pre + "/int[@name='40.0'][.='0' ]" + // + , + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='6']"); + + assertQ( + f + ": checking counts for upper & edge", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "20", + "facet.range.end", "50", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.include", "upper", + "facet.range.include", "edge"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='20.0'][.='4' ]", + pre + "/int[@name='30.0'][.='1' ]", + pre + "/int[@name='40.0'][.='0' ]" + // + , + meta + "/int[@name='before' ][.='1']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='5']"); + + assertQ( + f + ": checking counts for upper & outer", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "10", + "facet.range.end", "30", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.include", "upper", + "facet.range.include", "outer"), + "*[count(" + pre + "/int)=2]", + pre + "/int[@name='10.0'][.='2' ]", + pre + "/int[@name='20.0'][.='3' ]" + // + , + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='2']", + meta + "/int[@name='between'][.='5']"); + + assertQ( + f + ": checking counts for lower & edge", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "10", + "facet.range.end", "30", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.include", "lower", + "facet.range.include", "edge"), + "*[count(" + pre + "/int)=2]", + pre + "/int[@name='10.0'][.='1' ]", + pre + "/int[@name='20.0'][.='4' ]" + // + , + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='5']"); + + assertQ( + f + ": checking counts for lower & outer", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "20", + "facet.range.end", "40", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.include", "lower", + "facet.range.include", "outer"), + "*[count(" + pre + "/int)=2]", + pre + "/int[@name='20.0'][.='3' ]", + pre + "/int[@name='30.0'][.='2' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='5']"); + + assertQ( + f + ": checking counts for lower & edge & outer", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "20", + "facet.range.end", "35.3", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.hardend", "true", + "facet.range.include", "lower", + "facet.range.include", "edge", + "facet.range.include", "outer"), + "*[count(" + pre + "/int)=2]", + pre + "/int[@name='20.0'][.='3' ]", + pre + "/int[@name='30.0'][.='2' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='5']"); + + assertQ( + f + ": checking counts for include all", + req( + "q", "*:*", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "20", + "facet.range.end", "35.3", + "facet.range.gap", "10", + "facet.range.other", "all", + "facet.range.hardend", "true", + "facet.range.include", "all"), + "*[count(" + pre + "/int)=2]", + pre + "/int[@name='20.0'][.='4' ]", + pre + "/int[@name='30.0'][.='2' ]" + // + , + meta + "/int[@name='before' ][.='2']", + meta + "/int[@name='after' ][.='1']", + meta + "/int[@name='between'][.='5']"); } @Test public void testNumericRangeFacetsTrieInt() { helpTestWholeNumberRangeFacets("id_i1"); } + @Test public void testNumericRangeFacetsTrieLong() { helpTestWholeNumberRangeFacets("range_facet_l"); } - + @Test public void testNumericRangeFacetsTrieIntDocValues() { helpTestWholeNumberRangeFacets("id_i1", FacetRangeMethod.DV); } - + @Test public void testNumericRangeFacetsTrieLongDocValues() { helpTestWholeNumberRangeFacets("range_facet_l", FacetRangeMethod.DV); @@ -1947,288 +2403,333 @@ public void testNumericRangeFacetsTrieLongDocValues() { public void testNumericRangeFacetsOverflowTrieLong() { helpTestNumericRangeFacetsLongOverflow("range_facet_l", FacetRangeMethod.FILTER); } - + @Test public void testNumericRangeFacetsOverflowTrieLongDocValues() { helpTestNumericRangeFacetsLongOverflow("range_facet_l", FacetRangeMethod.DV); } - private void helpTestNumericRangeFacetsLongOverflow(final String fieldName, final FacetRangeMethod method) { + private void helpTestNumericRangeFacetsLongOverflow( + final String fieldName, final FacetRangeMethod method) { final String f = fieldName; - final String pre = "//lst[@name='facet_ranges']/lst[@name='"+f+"']/lst[@name='counts']"; + final String pre = "//lst[@name='facet_ranges']/lst[@name='" + f + "']/lst[@name='counts']"; final String meta = pre + "/../"; String start = "0"; - String gap = Long.toString(Integer.MAX_VALUE ); - String end = Long.toString( ((long)Integer.MAX_VALUE) * 3L ); - String mid = Long.toString(((long)Integer.MAX_VALUE) * 2L ); - - assertQ(f+": checking counts for lower", - req( "q", "id_i1:[30 TO 60]" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", start - ,"facet.range.end", end - ,"facet.range.gap", gap - ,"facet.range.other", "all" - ,"facet.range.include", "lower" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='"+start+"'][.='6' ]" - ,pre+"/int[@name='"+mid+"'][.='0' ]" - // - ,meta+"/long[@name='end' ][.='"+end+"']" - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='6']" - ); - } - + String gap = Long.toString(Integer.MAX_VALUE); + String end = Long.toString(((long) Integer.MAX_VALUE) * 3L); + String mid = Long.toString(((long) Integer.MAX_VALUE) * 2L); + + assertQ( + f + ": checking counts for lower", + req( + "q", + "id_i1:[30 TO 60]", + "rows", + "0", + "facet", + "true", + "facet.range", + f, + "facet.range.method", + method.toString(), + "facet.range.start", + start, + "facet.range.end", + end, + "facet.range.gap", + gap, + "facet.range.other", + "all", + "facet.range.include", + "lower"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='" + start + "'][.='6' ]", + pre + "/int[@name='" + mid + "'][.='0' ]" + // + , + meta + "/long[@name='end' ][.='" + end + "']", + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='6']"); + } + private void helpTestWholeNumberRangeFacets(final String fieldName) { helpTestWholeNumberRangeFacets(fieldName, FacetRangeMethod.FILTER); } - + private void helpTestWholeNumberRangeFacets(final String fieldName, FacetRangeMethod method) { // the float test covers a lot of the weird edge cases // here we just need some basic sanity checking of the parsing final String f = fieldName; - final String pre = "//lst[@name='facet_ranges']/lst[@name='"+f+"']/lst[@name='counts']"; + final String pre = "//lst[@name='facet_ranges']/lst[@name='" + f + "']/lst[@name='counts']"; final String meta = pre + "/../"; - assertQ(f+": checking counts for lower", - req( "q", "id_i1:[30 TO 60]" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "35" - ,"facet.range.end", "50" - ,"facet.range.gap", "5" - ,"facet.range.other", "all" - ,"facet.range.include", "lower" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='35'][.='0' ]" - ,pre+"/int[@name='40'][.='3' ]" - ,pre+"/int[@name='45'][.='3' ]" - // - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='6']" - ); - - assertQ(f + ":checking counts for upper", - req( "q", "id_i1:[30 TO 60]" - ,"rows", "0" - ,"facet", "true" - ,"facet.range", f - ,"facet.range.method", method.toString() - ,"facet.range.start", "35" - ,"facet.range.end", "50" - ,"facet.range.gap", "5" - ,"facet.range.other", "all" - ,"facet.range.include", "upper" - ) - ,"*[count("+pre+"/int)=3]" - ,pre+"/int[@name='35'][.='0' ]" - ,pre+"/int[@name='40'][.='4' ]" - ,pre+"/int[@name='45'][.='2' ]" - // - ,meta+"/int[@name='before' ][.='0']" - ,meta+"/int[@name='after' ][.='0']" - ,meta+"/int[@name='between'][.='6']" - ); - + assertQ( + f + ": checking counts for lower", + req( + "q", "id_i1:[30 TO 60]", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "35", + "facet.range.end", "50", + "facet.range.gap", "5", + "facet.range.other", "all", + "facet.range.include", "lower"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='35'][.='0' ]", + pre + "/int[@name='40'][.='3' ]", + pre + "/int[@name='45'][.='3' ]" + // + , + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='6']"); + + assertQ( + f + ":checking counts for upper", + req( + "q", "id_i1:[30 TO 60]", + "rows", "0", + "facet", "true", + "facet.range", f, + "facet.range.method", method.toString(), + "facet.range.start", "35", + "facet.range.end", "50", + "facet.range.gap", "5", + "facet.range.other", "all", + "facet.range.include", "upper"), + "*[count(" + pre + "/int)=3]", + pre + "/int[@name='35'][.='0' ]", + pre + "/int[@name='40'][.='4' ]", + pre + "/int[@name='45'][.='2' ]" + // + , + meta + "/int[@name='before' ][.='0']", + meta + "/int[@name='after' ][.='0']", + meta + "/int[@name='between'][.='6']"); } static void indexFacetSingleValued() { - indexFacets("40","t_s1"); + indexFacets("40", "t_s1"); } @Test public void testFacetSingleValued() { doFacets("t_s1"); } + @Test public void testFacetSingleValuedFcs() { - doFacets("t_s1","facet.method","fcs"); + doFacets("t_s1", "facet.method", "fcs"); } static void indexFacets(String idPrefix, String f) { - add_doc("id", idPrefix+"1", f, "A"); - add_doc("id", idPrefix+"2", f, "B"); - add_doc("id", idPrefix+"3", f, "C"); - add_doc("id", idPrefix+"4", f, "C"); - add_doc("id", idPrefix+"5", f, "D"); - add_doc("id", idPrefix+"6", f, "E"); - add_doc("id", idPrefix+"7", f, "E"); - add_doc("id", idPrefix+"8", f, "E"); - add_doc("id", idPrefix+"9", f, "F"); - add_doc("id", idPrefix+"10", f, "G"); - add_doc("id", idPrefix+"11", f, "G"); - add_doc("id", idPrefix+"12", f, "G"); - add_doc("id", idPrefix+"13", f, "G"); - add_doc("id", idPrefix+"14", f, "G"); + add_doc("id", idPrefix + "1", f, "A"); + add_doc("id", idPrefix + "2", f, "B"); + add_doc("id", idPrefix + "3", f, "C"); + add_doc("id", idPrefix + "4", f, "C"); + add_doc("id", idPrefix + "5", f, "D"); + add_doc("id", idPrefix + "6", f, "E"); + add_doc("id", idPrefix + "7", f, "E"); + add_doc("id", idPrefix + "8", f, "E"); + add_doc("id", idPrefix + "9", f, "F"); + add_doc("id", idPrefix + "10", f, "G"); + add_doc("id", idPrefix + "11", f, "G"); + add_doc("id", idPrefix + "12", f, "G"); + add_doc("id", idPrefix + "13", f, "G"); + add_doc("id", idPrefix + "14", f, "G"); } public void doFacets(String f, String... params) { - String pre = "//lst[@name='"+f+"']"; - String notc = "id:[* TO *] -"+f+":C"; - - - assertQ("check counts for unlimited facet", - req(params, "q", "id:[* TO *]", "indent","true" - ,"facet", "true" - ,"facet.field", f - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=7]" - - ,pre+"/int[@name='G'][.='5']" - ,pre+"/int[@name='E'][.='3']" - ,pre+"/int[@name='C'][.='2']" - - ,pre+"/int[@name='A'][.='1']" - ,pre+"/int[@name='B'][.='1']" - ,pre+"/int[@name='D'][.='1']" - ,pre+"/int[@name='F'][.='1']" - ); - - assertQ("check counts for facet with generous limit", - req(params, "q", "id:[* TO *]" - ,"facet", "true" - ,"facet.limit", "100" - ,"facet.field", f - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=7]" - - ,pre+"/int[1][@name='G'][.='5']" - ,pre+"/int[2][@name='E'][.='3']" - ,pre+"/int[3][@name='C'][.='2']" - - ,pre+"/int[@name='A'][.='1']" - ,pre+"/int[@name='B'][.='1']" - ,pre+"/int[@name='D'][.='1']" - ,pre+"/int[@name='F'][.='1']" - ); - - assertQ("check counts for limited facet", - req(params, "q", "id:[* TO *]" - ,"facet", "true" - ,"facet.limit", "2" - ,"facet.field", f - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - - ,pre+"/int[1][@name='G'][.='5']" - ,pre+"/int[2][@name='E'][.='3']" - ); - - assertQ("check offset", - req(params, "q", "id:[* TO *]" - ,"facet", "true" - ,"facet.offset", "1" - ,"facet.limit", "1" - ,"facet.field", f - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - - ,pre+"/int[1][@name='E'][.='3']" - ); - - assertQ("test sorted facet paging with zero (don't count in limit)", - req(params, "q", "id:[* TO *]" - ,"fq",notc - ,"facet", "true" - ,"facet.field", f - ,"facet.mincount","1" - ,"facet.offset","0" - ,"facet.limit","6" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=6]" - ,pre+"/int[1][@name='G'][.='5']" - ,pre+"/int[2][@name='E'][.='3']" - ,pre+"/int[3][@name='A'][.='1']" - ,pre+"/int[4][@name='B'][.='1']" - ,pre+"/int[5][@name='D'][.='1']" - ,pre+"/int[6][@name='F'][.='1']" - ); - - assertQ("test sorted facet paging with zero (test offset correctness)", - req(params, "q", "id:[* TO *]" - ,"fq",notc - ,"facet", "true" - ,"facet.field", f - ,"facet.mincount","1" - ,"facet.offset","3" - ,"facet.limit","2" - ,"facet.sort","count" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='B'][.='1']" - ,pre+"/int[2][@name='D'][.='1']" - ); - - assertQ("test facet unsorted paging", - req(params, "q", "id:[* TO *]" - ,"fq",notc - ,"facet", "true" - ,"facet.field", f - ,"facet.mincount","1" - ,"facet.offset","0" - ,"facet.limit","6" - ,"facet.sort","index" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=6]" - ,pre+"/int[1][@name='A'][.='1']" - ,pre+"/int[2][@name='B'][.='1']" - ,pre+"/int[3][@name='D'][.='1']" - ,pre+"/int[4][@name='E'][.='3']" - ,pre+"/int[5][@name='F'][.='1']" - ,pre+"/int[6][@name='G'][.='5']" - ); - - assertQ("test facet unsorted paging", - req(params, "q", "id:[* TO *]" - ,"fq",notc - ,"facet", "true" - ,"facet.field", f - ,"facet.mincount","1" - ,"facet.offset","3" - ,"facet.limit","2" - ,"facet.sort","index" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='E'][.='3']" - ,pre+"/int[2][@name='F'][.='1']" - ); - - assertQ("test facet unsorted paging, mincount=2", - req(params, "q", "id:[* TO *]" - ,"fq",notc - ,"facet", "true" - ,"facet.field", f - ,"facet.mincount","2" - ,"facet.offset","1" - ,"facet.limit","2" - ,"facet.sort","index" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='G'][.='5']" - ); - } + String pre = "//lst[@name='" + f + "']"; + String notc = "id:[* TO *] -" + f + ":C"; + + assertQ( + "check counts for unlimited facet", + req(params, "q", "id:[* TO *]", "indent", "true", "facet", "true", "facet.field", f), + "*[count(//lst[@name='facet_fields']/lst/int)=7]", + pre + "/int[@name='G'][.='5']", + pre + "/int[@name='E'][.='3']", + pre + "/int[@name='C'][.='2']", + pre + "/int[@name='A'][.='1']", + pre + "/int[@name='B'][.='1']", + pre + "/int[@name='D'][.='1']", + pre + "/int[@name='F'][.='1']"); + assertQ( + "check counts for facet with generous limit", + req(params, "q", "id:[* TO *]", "facet", "true", "facet.limit", "100", "facet.field", f), + "*[count(//lst[@name='facet_fields']/lst/int)=7]", + pre + "/int[1][@name='G'][.='5']", + pre + "/int[2][@name='E'][.='3']", + pre + "/int[3][@name='C'][.='2']", + pre + "/int[@name='A'][.='1']", + pre + "/int[@name='B'][.='1']", + pre + "/int[@name='D'][.='1']", + pre + "/int[@name='F'][.='1']"); + + assertQ( + "check counts for limited facet", + req(params, "q", "id:[* TO *]", "facet", "true", "facet.limit", "2", "facet.field", f), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='G'][.='5']", + pre + "/int[2][@name='E'][.='3']"); + + assertQ( + "check offset", + req( + params, + "q", + "id:[* TO *]", + "facet", + "true", + "facet.offset", + "1", + "facet.limit", + "1", + "facet.field", + f), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='E'][.='3']"); + + assertQ( + "test sorted facet paging with zero (don't count in limit)", + req( + params, + "q", + "id:[* TO *]", + "fq", + notc, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "1", + "facet.offset", + "0", + "facet.limit", + "6"), + "*[count(//lst[@name='facet_fields']/lst/int)=6]", + pre + "/int[1][@name='G'][.='5']", + pre + "/int[2][@name='E'][.='3']", + pre + "/int[3][@name='A'][.='1']", + pre + "/int[4][@name='B'][.='1']", + pre + "/int[5][@name='D'][.='1']", + pre + "/int[6][@name='F'][.='1']"); + + assertQ( + "test sorted facet paging with zero (test offset correctness)", + req( + params, + "q", + "id:[* TO *]", + "fq", + notc, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "1", + "facet.offset", + "3", + "facet.limit", + "2", + "facet.sort", + "count"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='B'][.='1']", + pre + "/int[2][@name='D'][.='1']"); + + assertQ( + "test facet unsorted paging", + req( + params, + "q", + "id:[* TO *]", + "fq", + notc, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "1", + "facet.offset", + "0", + "facet.limit", + "6", + "facet.sort", + "index"), + "*[count(//lst[@name='facet_fields']/lst/int)=6]", + pre + "/int[1][@name='A'][.='1']", + pre + "/int[2][@name='B'][.='1']", + pre + "/int[3][@name='D'][.='1']", + pre + "/int[4][@name='E'][.='3']", + pre + "/int[5][@name='F'][.='1']", + pre + "/int[6][@name='G'][.='5']"); + + assertQ( + "test facet unsorted paging", + req( + params, + "q", + "id:[* TO *]", + "fq", + notc, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "1", + "facet.offset", + "3", + "facet.limit", + "2", + "facet.sort", + "index"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='E'][.='3']", + pre + "/int[2][@name='F'][.='1']"); + + assertQ( + "test facet unsorted paging, mincount=2", + req( + params, + "q", + "id:[* TO *]", + "fq", + notc, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "2", + "facet.offset", + "1", + "facet.limit", + "2", + "facet.sort", + "index"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='G'][.='5']"); + } static void indexFacetPrefixMultiValued() { - indexFacetPrefix("50","t_s","","ignore_s"); + indexFacetPrefix("50", "t_s", "", "ignore_s"); } @Test public void testFacetPrefixMultiValued() { - doFacetPrefix("t_s", null, "", "facet.method","enum"); + doFacetPrefix("t_s", null, "", "facet.method", "enum"); doFacetPrefix("t_s", null, "", "facet.method", "enum", "facet.enum.cache.minDf", "3"); doFacetPrefix("t_s", null, "", "facet.method", "enum", "facet.enum.cache.minDf", "100"); doFacetPrefix("t_s", null, "", "facet.method", "fc"); @@ -2238,60 +2739,83 @@ public void testFacetPrefixMultiValued() { } @Test - public void testFacetExistsShouldThrowExceptionForMincountGreaterThanOne () throws Exception { + public void testFacetExistsShouldThrowExceptionForMincountGreaterThanOne() throws Exception { final String f = "t_s"; final List msg = Arrays.asList("facet.mincount", "facet.exists", f); Collections.shuffle(msg, random()); - assertQEx("checking global method or per field", msg.get(0), - req("q", "id:[* TO *]" - ,"indent","on" - ,"facet","true" - , random().nextBoolean() ? "facet.exists": "f."+f+".facet.exists", "true" - ,"facet.field", f - , random().nextBoolean() ? "facet.mincount" : "f."+f+".facet.mincount" , - "" + (2+random().nextInt(Integer.MAX_VALUE-2)) - ) - , ErrorCode.BAD_REQUEST); - - assertQ("overriding per field", - req("q", "id:[* TO *]" - ,"indent","on" - ,"facet","true" - ,"facet.exists", "true" - ,"f."+f+".facet.exists", "false" - ,"facet.field", f - ,"facet.mincount",""+(2+random().nextInt(Integer.MAX_VALUE-2)) - ), - "//lst[@name='facet_fields']/lst[@name='"+f+"']"); - - assertQ("overriding per field", - req("q", "id:[* TO *]" - ,"indent","on" - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", f - ,"facet.mincount",""+(2+random().nextInt(Integer.MAX_VALUE-2)) - ,"f."+f+".facet.mincount", random().nextBoolean() ? "0":"1" - ), - "//lst[@name='facet_fields']/lst[@name='"+f+"']"); - + assertQEx( + "checking global method or per field", + msg.get(0), + req( + "q", + "id:[* TO *]", + "indent", + "on", + "facet", + "true", + random().nextBoolean() ? "facet.exists" : "f." + f + ".facet.exists", + "true", + "facet.field", + f, + random().nextBoolean() ? "facet.mincount" : "f." + f + ".facet.mincount", + "" + (2 + random().nextInt(Integer.MAX_VALUE - 2))), + ErrorCode.BAD_REQUEST); + + assertQ( + "overriding per field", + req( + "q", + "id:[* TO *]", + "indent", + "on", + "facet", + "true", + "facet.exists", + "true", + "f." + f + ".facet.exists", + "false", + "facet.field", + f, + "facet.mincount", + "" + (2 + random().nextInt(Integer.MAX_VALUE - 2))), + "//lst[@name='facet_fields']/lst[@name='" + f + "']"); + + assertQ( + "overriding per field", + req( + "q", + "id:[* TO *]", + "indent", + "on", + "facet", + "true", + "facet.exists", + "true", + "facet.field", + f, + "facet.mincount", + "" + (2 + random().nextInt(Integer.MAX_VALUE - 2)), + "f." + f + ".facet.mincount", + random().nextBoolean() ? "0" : "1"), + "//lst[@name='facet_fields']/lst[@name='" + f + "']"); } static void indexFacetPrefixSingleValued() { - indexFacetPrefix("60","tt_s1","","ignore_s"); + indexFacetPrefix("60", "tt_s1", "", "ignore_s"); } @Test public void testFacetPrefixSingleValued() { doFacetPrefix("tt_s1", null, ""); } - + @Test public void testFacetPrefixSingleValuedFcs() { - doFacetPrefix("tt_s1", null, "", "facet.method","fcs"); - doFacetPrefix("tt_s1", "{!threads=0}", "", "facet.method","fcs"); // direct execution - doFacetPrefix("tt_s1", "{!threads=-1}", "", "facet.method","fcs"); // default / unlimited threads - doFacetPrefix("tt_s1", "{!threads=2}", "", "facet.method","fcs"); // specific number of threads + doFacetPrefix("tt_s1", null, "", "facet.method", "fcs"); + doFacetPrefix("tt_s1", "{!threads=0}", "", "facet.method", "fcs"); // direct execution + doFacetPrefix( + "tt_s1", "{!threads=-1}", "", "facet.method", "fcs"); // default / unlimited threads + doFacetPrefix("tt_s1", "{!threads=2}", "", "facet.method", "fcs"); // specific number of threads } @Test @@ -2302,75 +2826,101 @@ public void testFacetExclude() { } private void doFacetExclude(String f, String g, String termSuffix, String... params) { - String indent="on"; - String pre = "//lst[@name='"+f+"']"; - - final SolrQueryRequest req = req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", f - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.excludeTerms","B,BBB"+termSuffix - ); - - assertQ("test facet.exclude", - req - ,"*[count(//lst[@name='facet_fields']/lst/int)=10]" - ,pre+"/int[1][@name='BBB'][.='3']" - ,pre+"/int[2][@name='CCC'][.='3']" - ,pre+"/int[3][@name='CCC"+termSuffix+"'][.='3']" - ,pre+"/int[4][@name='BB'][.='2']" - ,pre+"/int[5][@name='BB"+termSuffix+"'][.='2']" - ,pre+"/int[6][@name='CC'][.='2']" - ,pre+"/int[7][@name='CC"+termSuffix+"'][.='2']" - ,pre+"/int[8][@name='AAA'][.='1']" - ,pre+"/int[9][@name='AAA"+termSuffix+"'][.='1']" - ,pre+"/int[10][@name='B"+termSuffix+"'][.='1']" - ); - - final SolrQueryRequest groupReq = req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", f - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.excludeTerms","B,BBB"+termSuffix - ,"group","true" - ,"group.field",g - ,"group.facet","true" - ,"facet.missing","true" - ); - - assertQ("test facet.exclude for grouped facets", - groupReq - ,"*[count(//lst[@name='facet_fields']/lst/int)=11]" - ,pre+"/int[1][@name='CCC'][.='3']" - ,pre+"/int[2][@name='CCC"+termSuffix+"'][.='3']" - ,pre+"/int[3][@name='BBB'][.='2']" - ,pre+"/int[4][@name='AAA'][.='1']" - ,pre+"/int[5][@name='AAA"+termSuffix+"'][.='1']" - ,pre+"/int[6][@name='B"+termSuffix+"'][.='1']" - ,pre+"/int[7][@name='BB'][.='1']" - ,pre+"/int[8][@name='BB"+termSuffix+"'][.='1']" - ,pre+"/int[9][@name='CC'][.='1']" - ,pre+"/int[10][@name='CC"+termSuffix+"'][.='1']" - ,pre+"/int[11][.='1']" - ); + String indent = "on"; + String pre = "//lst[@name='" + f + "']"; + + final SolrQueryRequest req = + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.excludeTerms", + "B,BBB" + termSuffix); + + assertQ( + "test facet.exclude", + req, + "*[count(//lst[@name='facet_fields']/lst/int)=10]", + pre + "/int[1][@name='BBB'][.='3']", + pre + "/int[2][@name='CCC'][.='3']", + pre + "/int[3][@name='CCC" + termSuffix + "'][.='3']", + pre + "/int[4][@name='BB'][.='2']", + pre + "/int[5][@name='BB" + termSuffix + "'][.='2']", + pre + "/int[6][@name='CC'][.='2']", + pre + "/int[7][@name='CC" + termSuffix + "'][.='2']", + pre + "/int[8][@name='AAA'][.='1']", + pre + "/int[9][@name='AAA" + termSuffix + "'][.='1']", + pre + "/int[10][@name='B" + termSuffix + "'][.='1']"); + + final SolrQueryRequest groupReq = + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.excludeTerms", + "B,BBB" + termSuffix, + "group", + "true", + "group.field", + g, + "group.facet", + "true", + "facet.missing", + "true"); + + assertQ( + "test facet.exclude for grouped facets", + groupReq, + "*[count(//lst[@name='facet_fields']/lst/int)=11]", + pre + "/int[1][@name='CCC'][.='3']", + pre + "/int[2][@name='CCC" + termSuffix + "'][.='3']", + pre + "/int[3][@name='BBB'][.='2']", + pre + "/int[4][@name='AAA'][.='1']", + pre + "/int[5][@name='AAA" + termSuffix + "'][.='1']", + pre + "/int[6][@name='B" + termSuffix + "'][.='1']", + pre + "/int[7][@name='BB'][.='1']", + pre + "/int[8][@name='BB" + termSuffix + "'][.='1']", + pre + "/int[9][@name='CC'][.='1']", + pre + "/int[10][@name='CC" + termSuffix + "'][.='1']", + pre + "/int[11][.='1']"); ModifiableSolrParams modifiableSolrParams = new ModifiableSolrParams(groupReq.getParams()); modifiableSolrParams.set("facet.limit", "0"); groupReq.setParams(modifiableSolrParams); - assertQ("test facet.exclude for grouped facets with facet.limit=0, facet.missing=true", - groupReq - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[.='1']" - ); + assertQ( + "test facet.exclude for grouped facets with facet.limit=0, facet.missing=true", + groupReq, + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[.='1']"); } @Test @@ -2382,9 +2932,25 @@ public void testFacetContainsAndExclude() { if (ignoreCase) { contains = randomizeStringCasing(contains); groupContains = randomizeStringCasing(groupContains); - doFacetContainsAndExclude("contains_s1", "contains_group_s1", "Astra", contains, groupContains, "facet.method", method, "facet.contains.ignoreCase", "true"); + doFacetContainsAndExclude( + "contains_s1", + "contains_group_s1", + "Astra", + contains, + groupContains, + "facet.method", + method, + "facet.contains.ignoreCase", + "true"); } else { - doFacetContainsAndExclude("contains_s1", "contains_group_s1", "Astra", contains, groupContains, "facet.method", method); + doFacetContainsAndExclude( + "contains_s1", + "contains_group_s1", + "Astra", + contains, + groupContains, + "facet.method", + method); } } } @@ -2409,1162 +2975,1878 @@ private String randomizeStringCasing(String str) { return new String(characters); } - private void doFacetContainsAndExclude(String f, String g, String termSuffix, String contains, String groupContains, String... params) { - String indent="on"; - String pre = "//lst[@name='"+f+"']"; - - final SolrQueryRequest req = req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", f - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.contains",contains - ,"facet.excludeTerms","BBB"+termSuffix - ); - - assertQ("test facet.contains with facet.exclude", - req - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='BB"+termSuffix+"'][.='2']" - ,pre+"/int[2][@name='B"+termSuffix+"'][.='1']" - ); - - final SolrQueryRequest groupReq = req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", f - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.contains",groupContains - ,"facet.excludeTerms","AAA"+termSuffix - ,"group","true" - ,"group.field",g - ,"group.facet","true" - ); - - assertQ("test facet.contains with facet.exclude for grouped facets", - groupReq - ,"*[count(//lst[@name='facet_fields']/lst/int)=5]" - ,pre+"/int[1][@name='CCC"+termSuffix+"'][.='3']" - ,pre+"/int[2][@name='BBB"+termSuffix+"'][.='2']" - ,pre+"/int[3][@name='B"+termSuffix+"'][.='1']" - ,pre+"/int[4][@name='BB"+termSuffix+"'][.='1']" - ,pre+"/int[5][@name='CC"+termSuffix+"'][.='1']" - ); - } - + private void doFacetContainsAndExclude( + String f, + String g, + String termSuffix, + String contains, + String groupContains, + String... params) { + String indent = "on"; + String pre = "//lst[@name='" + f + "']"; + + final SolrQueryRequest req = + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.contains", + contains, + "facet.excludeTerms", + "BBB" + termSuffix); + + assertQ( + "test facet.contains with facet.exclude", + req, + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='BB" + termSuffix + "'][.='2']", + pre + "/int[2][@name='B" + termSuffix + "'][.='1']"); + + final SolrQueryRequest groupReq = + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.contains", + groupContains, + "facet.excludeTerms", + "AAA" + termSuffix, + "group", + "true", + "group.field", + g, + "group.facet", + "true"); + + assertQ( + "test facet.contains with facet.exclude for grouped facets", + groupReq, + "*[count(//lst[@name='facet_fields']/lst/int)=5]", + pre + "/int[1][@name='CCC" + termSuffix + "'][.='3']", + pre + "/int[2][@name='BBB" + termSuffix + "'][.='2']", + pre + "/int[3][@name='B" + termSuffix + "'][.='1']", + pre + "/int[4][@name='BB" + termSuffix + "'][.='1']", + pre + "/int[5][@name='CC" + termSuffix + "'][.='1']"); + } + @Test - //@Ignore("SOLR-8466 - facet.method=uif ignores facet.contains") + // @Ignore("SOLR-8466 - facet.method=uif ignores facet.contains") public void testFacetContainsUif() { - doFacetContains("contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "uif"); + doFacetContains( + "contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "uif"); doFacetPrefix("contains_s1", null, "Astra", "facet.method", "uif", "facet.contains", "Ast"); - doFacetPrefix("contains_s1", null, "Astra", "facet.method", "uif", "facet.contains", "aST", "facet.contains.ignoreCase", "true"); + doFacetPrefix( + "contains_s1", + null, + "Astra", + "facet.method", + "uif", + "facet.contains", + "aST", + "facet.contains.ignoreCase", + "true"); } static void indexFacetContains() { - indexFacetPrefix("70","contains_s1","","contains_group_s1"); - indexFacetPrefix("80","contains_s1","Astra","contains_group_s1"); + indexFacetPrefix("70", "contains_s1", "", "contains_group_s1"); + indexFacetPrefix("80", "contains_s1", "Astra", "contains_group_s1"); } - + @Test public void testFacetContains() { - doFacetContains("contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "enum"); - doFacetContains("contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "fcs"); - doFacetContains("contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "fc"); - doFacetContains("contains_s1", "contains_group_s1", "Astra", "bAst", "ast", "facet.method", "enum", "facet.contains.ignoreCase", "true"); - doFacetContains("contains_s1", "contains_group_s1", "Astra", "baSt", "ast", "facet.method", "fcs", "facet.contains.ignoreCase", "true"); - doFacetContains("contains_s1", "contains_group_s1", "Astra", "basT", "ast", "facet.method", "fc", "facet.contains.ignoreCase", "true"); + doFacetContains( + "contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "enum"); + doFacetContains( + "contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "fcs"); + doFacetContains( + "contains_s1", "contains_group_s1", "Astra", "BAst", "Ast", "facet.method", "fc"); + doFacetContains( + "contains_s1", + "contains_group_s1", + "Astra", + "bAst", + "ast", + "facet.method", + "enum", + "facet.contains.ignoreCase", + "true"); + doFacetContains( + "contains_s1", + "contains_group_s1", + "Astra", + "baSt", + "ast", + "facet.method", + "fcs", + "facet.contains.ignoreCase", + "true"); + doFacetContains( + "contains_s1", + "contains_group_s1", + "Astra", + "basT", + "ast", + "facet.method", + "fc", + "facet.contains.ignoreCase", + "true"); doFacetPrefix("contains_s1", null, "Astra", "facet.method", "enum", "facet.contains", "Ast"); doFacetPrefix("contains_s1", null, "Astra", "facet.method", "fcs", "facet.contains", "Ast"); doFacetPrefix("contains_s1", null, "Astra", "facet.method", "fc", "facet.contains", "Ast"); - doFacetPrefix("contains_s1", null, "Astra", "facet.method", "enum", "facet.contains", "aSt", "facet.contains.ignoreCase", "true"); - doFacetPrefix("contains_s1", null, "Astra", "facet.method", "fcs", "facet.contains", "asT", "facet.contains.ignoreCase", "true"); - doFacetPrefix("contains_s1", null, "Astra", "facet.method", "fc", "facet.contains", "aST", "facet.contains.ignoreCase", "true"); + doFacetPrefix( + "contains_s1", + null, + "Astra", + "facet.method", + "enum", + "facet.contains", + "aSt", + "facet.contains.ignoreCase", + "true"); + doFacetPrefix( + "contains_s1", + null, + "Astra", + "facet.method", + "fcs", + "facet.contains", + "asT", + "facet.contains.ignoreCase", + "true"); + doFacetPrefix( + "contains_s1", + null, + "Astra", + "facet.method", + "fc", + "facet.contains", + "aST", + "facet.contains.ignoreCase", + "true"); doFacetExistsPrefix("contains_s1", null, "Astra", "facet.contains", "Ast"); } static void indexFacetPrefix(String idPrefix, String f, String termSuffix, String g) { - add_doc("id", idPrefix+"1", f, "AAA"+termSuffix, g, "A"); - add_doc("id", idPrefix+"2", f, "B"+termSuffix, g, "A"); - add_doc("id", idPrefix+"3", f, "BB"+termSuffix, g, "B"); - add_doc("id", idPrefix+"4", f, "BB"+termSuffix, g, "B"); - add_doc("id", idPrefix+"5", f, "BBB"+termSuffix, g, "B"); - add_doc("id", idPrefix+"6", f, "BBB"+termSuffix, g, "B"); - add_doc("id", idPrefix+"7", f, "BBB"+termSuffix, g, "C"); - add_doc("id", idPrefix+"8", f, "CC"+termSuffix, g, "C"); - add_doc("id", idPrefix+"9", f, "CC"+termSuffix, g, "C"); - add_doc("id", idPrefix+"10", f, "CCC"+termSuffix, g, "C"); - add_doc("id", idPrefix+"11", f, "CCC"+termSuffix, g, "D"); - add_doc("id", idPrefix+"12", f, "CCC"+termSuffix, g, "E"); + add_doc("id", idPrefix + "1", f, "AAA" + termSuffix, g, "A"); + add_doc("id", idPrefix + "2", f, "B" + termSuffix, g, "A"); + add_doc("id", idPrefix + "3", f, "BB" + termSuffix, g, "B"); + add_doc("id", idPrefix + "4", f, "BB" + termSuffix, g, "B"); + add_doc("id", idPrefix + "5", f, "BBB" + termSuffix, g, "B"); + add_doc("id", idPrefix + "6", f, "BBB" + termSuffix, g, "B"); + add_doc("id", idPrefix + "7", f, "BBB" + termSuffix, g, "C"); + add_doc("id", idPrefix + "8", f, "CC" + termSuffix, g, "C"); + add_doc("id", idPrefix + "9", f, "CC" + termSuffix, g, "C"); + add_doc("id", idPrefix + "10", f, "CCC" + termSuffix, g, "C"); + add_doc("id", idPrefix + "11", f, "CCC" + termSuffix, g, "D"); + add_doc("id", idPrefix + "12", f, "CCC" + termSuffix, g, "E"); assertU(commit()); } public void doFacetPrefix(String f, String local, String termSuffix, String... params) { - String indent="on"; - String pre = "//lst[@name='"+f+"']"; - String lf = local==null ? f : local+f; - - - assertQ("test facet.prefix middle, exact match first term", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=3]" - ,pre+"/int[1][@name='BBB"+termSuffix+"'][.='3']" - ,pre+"/int[2][@name='BB"+termSuffix+"'][.='2']" - ,pre+"/int[3][@name='B"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix middle, exact match first term, unsorted", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","index" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=3]" - ,pre+"/int[1][@name='B"+termSuffix+"'][.='1']" - ,pre+"/int[2][@name='BB"+termSuffix+"'][.='2']" - ,pre+"/int[3][@name='BBB"+termSuffix+"'][.='3']" - ); - - assertQ("test facet.prefix middle, paging", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","1" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='BB"+termSuffix+"'][.='2']" - ,pre+"/int[2][@name='B"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix middle, paging", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","1" - ,"facet.limit","1" - ,"facet.sort","count" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='BB"+termSuffix+"'][.='2']" - ); - - assertQ("test facet.prefix middle, paging", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","1" - ,"facet.limit","1" - ,"facet.sort","count" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='BB"+termSuffix+"'][.='2']" - ); - - assertQ("test facet.prefix end, not exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","C" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='CCC"+termSuffix+"'][.='3']" - ,pre+"/int[2][@name='CC"+termSuffix+"'][.='2']" - ); - - assertQ("test facet.prefix end, exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","CC" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='CCC"+termSuffix+"'][.='3']" - ,pre+"/int[2][@name='CC"+termSuffix+"'][.='2']" - ); - - assertQ("test facet.prefix past end", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","X" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); - - assertQ("test facet.prefix past end", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","1" - ,"facet.limit","-1" - ,"facet.sort","count" - ,"facet.prefix","X" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); - - assertQ("test facet.prefix at start, exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","AAA" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='AAA"+termSuffix+"'][.='1']" - ); - assertQ("test facet.prefix at Start, not exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","AA" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='AAA"+termSuffix+"'][.='1']" - ); - assertQ("test facet.prefix at Start, not exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","AA" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='AAA"+termSuffix+"'][.='1']" - ); - assertQ("test facet.prefix before start", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","999" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); - - assertQ("test facet.prefix before start", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","2" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","999" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); + String indent = "on"; + String pre = "//lst[@name='" + f + "']"; + String lf = local == null ? f : local + f; + + assertQ( + "test facet.prefix middle, exact match first term", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=3]", + pre + "/int[1][@name='BBB" + termSuffix + "'][.='3']", + pre + "/int[2][@name='BB" + termSuffix + "'][.='2']", + pre + "/int[3][@name='B" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix middle, exact match first term, unsorted", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "index", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=3]", + pre + "/int[1][@name='B" + termSuffix + "'][.='1']", + pre + "/int[2][@name='BB" + termSuffix + "'][.='2']", + pre + "/int[3][@name='BBB" + termSuffix + "'][.='3']"); + + assertQ( + "test facet.prefix middle, paging", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "1", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='BB" + termSuffix + "'][.='2']", + pre + "/int[2][@name='B" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix middle, paging", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "1", + "facet.limit", + "1", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='BB" + termSuffix + "'][.='2']"); + + assertQ( + "test facet.prefix middle, paging", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "1", + "facet.limit", + "1", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='BB" + termSuffix + "'][.='2']"); + + assertQ( + "test facet.prefix end, not exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "C"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='CCC" + termSuffix + "'][.='3']", + pre + "/int[2][@name='CC" + termSuffix + "'][.='2']"); + + assertQ( + "test facet.prefix end, exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "CC"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='CCC" + termSuffix + "'][.='3']", + pre + "/int[2][@name='CC" + termSuffix + "'][.='2']"); + + assertQ( + "test facet.prefix past end", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "X"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); + + assertQ( + "test facet.prefix past end", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.sort", + "count", + "facet.prefix", + "X"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); + + assertQ( + "test facet.prefix at start, exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "AAA"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='AAA" + termSuffix + "'][.='1']"); + assertQ( + "test facet.prefix at Start, not exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "AA"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='AAA" + termSuffix + "'][.='1']"); + assertQ( + "test facet.prefix at Start, not exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "AA"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='AAA" + termSuffix + "'][.='1']"); + assertQ( + "test facet.prefix before start", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "999"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); + + assertQ( + "test facet.prefix before start", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "2", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "999"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); // test offset beyond what is collected internally in queue assertQ( - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", lf - ,"facet.mincount","3" - ,"facet.offset","5" - ,"facet.limit","10" - ,"facet.sort","count" - ,"facet.prefix","CC" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + lf, + "facet.mincount", + "3", + "facet.offset", + "5", + "facet.limit", + "10", + "facet.sort", + "count", + "facet.prefix", + "CC"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); } public void doFacetExistsPrefix(String f, String local, String termSuffix, String... params) { - String indent="on"; - String pre = "//lst[@name='"+f+"']"; - String lf = local==null ? f : local+f; - - assertQ("test field facet.method", - req(params, "q", "id:[* TO *]" - ,"indent", indent - ,"facet", "true" - ,"f."+lf+".facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount", "0" - ,"facet.offset", "0" - ,"facet.limit", "100" - ,"facet.sort", "count" - ,"facet.prefix", "B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=3]" - ,pre+"/int[1][@name='B"+termSuffix+"'][.='1']" - ,pre+"/int[2][@name='BB"+termSuffix+"'][.='1']" - ,pre+"/int[3][@name='BBB"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix middle, exact match first term", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=3]" - ,pre+"/int[1][@name='B"+termSuffix+"'][.='1']" - ,pre+"/int[2][@name='BB"+termSuffix+"'][.='1']" - ,pre+"/int[3][@name='BBB"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix middle, exact match first term, unsorted", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","index" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=3]" - ,pre+"/int[1][@name='B"+termSuffix+"'][.='1']" - ,pre+"/int[2][@name='BB"+termSuffix+"'][.='1']" - ,pre+"/int[3][@name='BBB"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix middle, paging", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","1" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='BB"+termSuffix+"'][.='1']" - ,pre+"/int[2][@name='BBB"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix middle, paging", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","1" - ,"facet.limit","1" - ,"facet.sort","count" - ,"facet.prefix","B" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='BB"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix end, not exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","C" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='CC"+termSuffix+"'][.='1']" - ,pre+"/int[2][@name='CCC"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix end, exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","CC" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=2]" - ,pre+"/int[1][@name='CC"+termSuffix+"'][.='1']" - ,pre+"/int[2][@name='CCC"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.prefix past end", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","X" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); - - assertQ("test facet.prefix past end", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","1" - ,"facet.limit","-1" - ,"facet.sort","count" - ,"facet.prefix","X" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); - - assertQ("test facet.prefix at start, exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","AAA" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='AAA"+termSuffix+"'][.='1']" - ); - assertQ("test facet.prefix at Start, not exact match", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","AA" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=1]" - ,pre+"/int[1][@name='AAA"+termSuffix+"'][.='1']" - ); - assertQ("test facet.prefix before start", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","999" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); - - assertQ("test facet.prefix before start", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","0" - ,"facet.offset","2" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.prefix","999" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); + String indent = "on"; + String pre = "//lst[@name='" + f + "']"; + String lf = local == null ? f : local + f; + + assertQ( + "test field facet.method", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "f." + lf + ".facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=3]", + pre + "/int[1][@name='B" + termSuffix + "'][.='1']", + pre + "/int[2][@name='BB" + termSuffix + "'][.='1']", + pre + "/int[3][@name='BBB" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix middle, exact match first term", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=3]", + pre + "/int[1][@name='B" + termSuffix + "'][.='1']", + pre + "/int[2][@name='BB" + termSuffix + "'][.='1']", + pre + "/int[3][@name='BBB" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix middle, exact match first term, unsorted", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "index", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=3]", + pre + "/int[1][@name='B" + termSuffix + "'][.='1']", + pre + "/int[2][@name='BB" + termSuffix + "'][.='1']", + pre + "/int[3][@name='BBB" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix middle, paging", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "1", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='BB" + termSuffix + "'][.='1']", + pre + "/int[2][@name='BBB" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix middle, paging", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "1", + "facet.limit", + "1", + "facet.sort", + "count", + "facet.prefix", + "B"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='BB" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix end, not exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "C"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='CC" + termSuffix + "'][.='1']", + pre + "/int[2][@name='CCC" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix end, exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "CC"), + "*[count(//lst[@name='facet_fields']/lst/int)=2]", + pre + "/int[1][@name='CC" + termSuffix + "'][.='1']", + pre + "/int[2][@name='CCC" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.prefix past end", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "X"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); + + assertQ( + "test facet.prefix past end", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.sort", + "count", + "facet.prefix", + "X"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); + + assertQ( + "test facet.prefix at start, exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "AAA"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='AAA" + termSuffix + "'][.='1']"); + assertQ( + "test facet.prefix at Start, not exact match", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "AA"), + "*[count(//lst[@name='facet_fields']/lst/int)=1]", + pre + "/int[1][@name='AAA" + termSuffix + "'][.='1']"); + assertQ( + "test facet.prefix before start", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "999"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); + + assertQ( + "test facet.prefix before start", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "0", + "facet.offset", + "2", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.prefix", + "999"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); // test offset beyond what is collected internally in queue assertQ( - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.exists", "true" - ,"facet.field", lf - ,"facet.mincount","1" - ,"facet.offset","5" - ,"facet.limit","10" - ,"facet.sort","count" - ,"facet.prefix","CC" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=0]" - ); - } - - public void doFacetContains(String f, String g, String termSuffix, String contains, String groupContains, String... params) { - String indent="on"; - String pre = "//lst[@name='"+f+"']"; - - assertQ("test facet.contains", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", f - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.contains",contains - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=3]" - ,pre+"/int[1][@name='BBB"+termSuffix+"'][.='3']" - ,pre+"/int[2][@name='BB"+termSuffix+"'][.='2']" - ,pre+"/int[3][@name='B"+termSuffix+"'][.='1']" - ); - - assertQ("test facet.contains for grouped facets", - req(params, "q", "id:[* TO *]" - ,"indent",indent - ,"facet","true" - ,"facet.field", f - ,"facet.mincount","0" - ,"facet.offset","0" - ,"facet.limit","100" - ,"facet.sort","count" - ,"facet.contains",groupContains - ,"group","true" - ,"group.field",g - ,"group.facet","true" - ) - ,"*[count(//lst[@name='facet_fields']/lst/int)=6]" - ,pre+"/int[1][@name='CCC"+termSuffix+"'][.='3']" - ,pre+"/int[2][@name='BBB"+termSuffix+"'][.='2']" - ,pre+"/int[3][@name='AAA"+termSuffix+"'][.='1']" - ,pre+"/int[4][@name='B"+termSuffix+"'][.='1']" - ,pre+"/int[5][@name='BB"+termSuffix+"'][.='1']" - ,pre+"/int[6][@name='CC"+termSuffix+"'][.='1']" - ); - } - - /** - * kind of an absurd test because if there is an infinite loop, it - * would never finish -- but at least it ensures that if one of - * these requests return, they return an error + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.exists", + "true", + "facet.field", + lf, + "facet.mincount", + "1", + "facet.offset", + "5", + "facet.limit", + "10", + "facet.sort", + "count", + "facet.prefix", + "CC"), + "*[count(//lst[@name='facet_fields']/lst/int)=0]"); + } + + public void doFacetContains( + String f, + String g, + String termSuffix, + String contains, + String groupContains, + String... params) { + String indent = "on"; + String pre = "//lst[@name='" + f + "']"; + + assertQ( + "test facet.contains", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.contains", + contains), + "*[count(//lst[@name='facet_fields']/lst/int)=3]", + pre + "/int[1][@name='BBB" + termSuffix + "'][.='3']", + pre + "/int[2][@name='BB" + termSuffix + "'][.='2']", + pre + "/int[3][@name='B" + termSuffix + "'][.='1']"); + + assertQ( + "test facet.contains for grouped facets", + req( + params, + "q", + "id:[* TO *]", + "indent", + indent, + "facet", + "true", + "facet.field", + f, + "facet.mincount", + "0", + "facet.offset", + "0", + "facet.limit", + "100", + "facet.sort", + "count", + "facet.contains", + groupContains, + "group", + "true", + "group.field", + g, + "group.facet", + "true"), + "*[count(//lst[@name='facet_fields']/lst/int)=6]", + pre + "/int[1][@name='CCC" + termSuffix + "'][.='3']", + pre + "/int[2][@name='BBB" + termSuffix + "'][.='2']", + pre + "/int[3][@name='AAA" + termSuffix + "'][.='1']", + pre + "/int[4][@name='B" + termSuffix + "'][.='1']", + pre + "/int[5][@name='BB" + termSuffix + "'][.='1']", + pre + "/int[6][@name='CC" + termSuffix + "'][.='1']"); + } + + /** + * kind of an absurd test because if there is an infinite loop, it would never finish -- but at + * least it ensures that if one of these requests return, they return an error */ public void testRangeFacetInfiniteLoopDetection() { for (String field : new String[] {"foo_f", "foo_d", "foo_i"}) { - assertQEx("no zero gap error: " + field, - req("q", "*:*", - "facet", "true", - "facet.range", field, - "facet.range.start", "23", - "facet.range.gap", "0", - "facet.range.end", "100"), - 400); + assertQEx( + "no zero gap error: " + field, + req( + "q", "*:*", + "facet", "true", + "facet.range", field, + "facet.range.start", "23", + "facet.range.gap", "0", + "facet.range.end", "100"), + 400); } String field = "foo_dt"; - assertQEx("no zero gap error for facet.range: " + field, - req("q", "*:*", - "facet", "true", - "facet.range", field, - "facet.range.start", "NOW", - "facet.range.gap", "+0DAYS", - "facet.range.end", "NOW+10DAY"), - 400); + assertQEx( + "no zero gap error for facet.range: " + field, + req( + "q", "*:*", + "facet", "true", + "facet.range", field, + "facet.range.start", "NOW", + "facet.range.gap", "+0DAYS", + "facet.range.end", "NOW+10DAY"), + 400); field = "foo_f"; - assertQEx("no float underflow error: " + field, - req("q", "*:*", - "facet", "true", - "facet.range", field, - "facet.range.start", "100000000000", - "facet.range.end", "100000086200", - "facet.range.gap", "2160"), - 400); + assertQEx( + "no float underflow error: " + field, + req( + "q", "*:*", + "facet", "true", + "facet.range", field, + "facet.range.start", "100000000000", + "facet.range.end", "100000086200", + "facet.range.gap", "2160"), + 400); field = "foo_d"; - assertQEx("no double underflow error: " + field, - req("q", "*:*", - "facet", "true", - "facet.range", field, - "facet.range.start", "9900000000000", - "facet.range.end", "9900000086200", - "facet.range.gap", "0.0003"), - 400); - } - + assertQEx( + "no double underflow error: " + field, + req( + "q", "*:*", + "facet", "true", + "facet.range", field, + "facet.range.start", "9900000000000", + "facet.range.end", "9900000086200", + "facet.range.gap", "0.0003"), + 400); + } + public void testRangeQueryHardEndParamFilter() { doTestRangeQueryHardEndParam("range_facet_l", FacetRangeMethod.FILTER); } - + public void testRangeQueryHardEndParamDv() { doTestRangeQueryHardEndParam("range_facet_l", FacetRangeMethod.DV); } - + private void doTestRangeQueryHardEndParam(String field, FacetRangeMethod method) { - assertQ("Test facet.range.hardend", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","5" - ,"facet.range.hardend", "false" - ,"facet.range.other", "after" - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=1]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int[@name='43'][.='5']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/long[@name='end'][.='48']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='0']" - ); - - assertQ("Test facet.range.hardend", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","5" - ,"facet.range.hardend", "true" - ,"facet.range.other", "after" - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=1]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int[@name='43'][.='2']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/long[@name='end'][.='45']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']" - ); - - } - + assertQ( + "Test facet.range.hardend", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "5", + "facet.range.hardend", + "false", + "facet.range.other", + "after"), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=1]", + "//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int[@name='43'][.='5']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/long[@name='end'][.='48']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='0']"); + + assertQ( + "Test facet.range.hardend", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "5", + "facet.range.hardend", + "true", + "facet.range.other", + "after"), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=1]", + "//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int[@name='43'][.='2']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/long[@name='end'][.='45']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']"); + } + public void testRangeQueryOtherParamFilter() { doTestRangeQueryOtherParam("range_facet_l", FacetRangeMethod.FILTER); } - + public void testRangeQueryOtherParamDv() { doTestRangeQueryOtherParam("range_facet_l", FacetRangeMethod.DV); } - + private void doTestRangeQueryOtherParam(String field, FacetRangeMethod method) { - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other", FacetRangeOther.BEFORE.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other", FacetRangeOther.AFTER.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other",FacetRangeOther.BETWEEN.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='2']" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other",FacetRangeOther.NONE.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]" - ); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.BEFORE.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.AFTER.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.BETWEEN.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='2']"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.NONE.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]"); // these should have equivalent behavior (multivalued 'other' param: top level vs local) - for (SolrQueryRequest req : new SolrQueryRequest[] { - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other",FacetRangeOther.BEFORE.toString() - ,"facet.range.other",FacetRangeOther.AFTER.toString()), - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", "{!facet.range.other=before facet.range.other=after}" + field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1") }) { - - assertQ("Test facet.range.other: " + req.toString(), req - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']" - ); + for (SolrQueryRequest req : + new SolrQueryRequest[] { + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.BEFORE.toString(), + "facet.range.other", + FacetRangeOther.AFTER.toString()), + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + "{!facet.range.other=before facet.range.other=after}" + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1") + }) { + + assertQ( + "Test facet.range.other: " + req.toString(), + req, + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']"); } - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other",FacetRangeOther.BEFORE.toString() - ,"facet.range.other",FacetRangeOther.AFTER.toString() - ,"facet.range.other",FacetRangeOther.NONE.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other",FacetRangeOther.ALL.toString() - ,"facet.range.include", FacetRangeInclude.LOWER.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='2']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other",FacetRangeOther.ALL.toString() - ,"facet.range.include", FacetRangeInclude.UPPER.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='2']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='2']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='2']" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other",FacetRangeOther.ALL.toString() - ,"facet.range.include", FacetRangeInclude.EDGE.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='3']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='2']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other", FacetRangeOther.ALL.toString() - ,"facet.range.include", FacetRangeInclude.OUTER.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=2]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='1']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='2']" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[12345 TO 12345]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","1" - ,"facet.range.other", FacetRangeOther.ALL.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='0']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='0']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='0']" - ); - - assertQ("Test facet.range.other", - req("q", "id_i1:[42 TO 47]" - ,"facet","true" - ,"fl","id," + field - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","10" - ,"facet.range.other", FacetRangeOther.ALL.toString() - ) - ,"*[count(//lst[@name='facet_ranges']/lst)=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]" - ,"*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts']/int)=1]" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='5']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='0']" - ,"//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']" - ); - + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.BEFORE.toString(), + "facet.range.other", + FacetRangeOther.AFTER.toString(), + "facet.range.other", + FacetRangeOther.NONE.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='between'])=0]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='after'])=0]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='before'])=0]"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.ALL.toString(), + "facet.range.include", + FacetRangeInclude.LOWER.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='2']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.ALL.toString(), + "facet.range.include", + FacetRangeInclude.UPPER.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='2']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='2']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='2']"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.ALL.toString(), + "facet.range.include", + FacetRangeInclude.EDGE.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='3']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='2']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.ALL.toString(), + "facet.range.include", + FacetRangeInclude.OUTER.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=2]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='1']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='3']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='2']"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[12345 TO 12345]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "1", + "facet.range.other", + FacetRangeOther.ALL.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='0']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='0']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='0']"); + + assertQ( + "Test facet.range.other", + req( + "q", + "id_i1:[42 TO 47]", + "facet", + "true", + "fl", + "id," + field, + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "10", + "facet.range.other", + FacetRangeOther.ALL.toString()), + "*[count(//lst[@name='facet_ranges']/lst)=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + field + "']/lst[@name='counts'])=1]", + "*[count(//lst[@name='facet_ranges']/lst[@name='" + + field + + "']/lst[@name='counts']/int)=1]", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='between'][.='5']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='after'][.='0']", + "//lst[@name='facet_ranges']/lst[@name='" + field + "']/int[@name='before'][.='1']"); } public void testGroupFacetErrors() { - ModifiableSolrParams params = params("q", "*:*", "group", "true", "group.query", "myfield_s:*", - "facet", "true", "group.facet", "true"); + ModifiableSolrParams params = + params( + "q", + "*:*", + "group", + "true", + "group.query", + "myfield_s:*", + "facet", + "true", + "group.facet", + "true"); // with facet.field - SolrException ex = expectThrows(SolrException.class, () -> { - h.query(req(params, "facet.field", "myfield_s")); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + h.query(req(params, "facet.field", "myfield_s")); + }); assertEquals(ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Specify the group.field as parameter or local parameter")); // with facet.query - ex = expectThrows(SolrException.class, () -> { - h.query(req(params, "facet.query", "myfield_s:*")); - }); + ex = + expectThrows( + SolrException.class, + () -> { + h.query(req(params, "facet.query", "myfield_s:*")); + }); assertEquals(ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Specify the group.field as parameter or local parameter")); // with facet.range - ex = expectThrows(SolrException.class, () -> h.query(req(params, "facet.range", "range_facet_l", - "facet.range.start", "43", "facet.range.end", "450", "facet.range.gap", "10")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "facet.range", + "range_facet_l", + "facet.range.start", + "43", + "facet.range.end", + "450", + "facet.range.gap", + "10"))); assertEquals(ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Specify the group.field as parameter or local parameter")); // with facet.interval - ex = expectThrows(SolrException.class, () -> h.query(req(params, "facet.interval", "range_facet_l", - "f.range_facet_l.facet.interval.set", "(43,60]")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "facet.interval", + "range_facet_l", + "f.range_facet_l.facet.interval.set", + "(43,60]"))); assertEquals(ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Interval Faceting can't be used with group.facet")); } - + public void testRangeFacetingBadRequest() { String field = "range_facet_l"; ignoreException("."); try { - for (FacetRangeMethod method:FacetRangeMethod.values()) { - assertQEx("Test facet.range bad requests", + for (FacetRangeMethod method : FacetRangeMethod.values()) { + assertQEx( + "Test facet.range bad requests", "range facet 'end' comes before 'start'", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","45" - ,"facet.range.end","43" - ,"facet.range.gap","10" - ), - ErrorCode.BAD_REQUEST - ); - - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "45", + "facet.range.end", + "43", + "facet.range.gap", + "10"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "Test facet.range bad requests", "range facet infinite loop (is gap negative? did the math overflow?)", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","-1" - ), - ErrorCode.BAD_REQUEST - ); - - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "-1"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "Test facet.range bad requests", "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","0" - ), - ErrorCode.BAD_REQUEST - ); - - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "0"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "Test facet.range bad requests", "Missing required parameter", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.end","45" - ,"facet.range.gap","5" - ), - ErrorCode.BAD_REQUEST - ); - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.end", + "45", + "facet.range.gap", + "5"), + ErrorCode.BAD_REQUEST); + assertQEx( + "Test facet.range bad requests", "Missing required parameter", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.gap","5" - ), - ErrorCode.BAD_REQUEST - ); - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.gap", + "5"), + ErrorCode.BAD_REQUEST); + assertQEx( + "Test facet.range bad requests", "Missing required parameter", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ), - ErrorCode.BAD_REQUEST - ); - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45"), + ErrorCode.BAD_REQUEST); + assertQEx( + "Test facet.range bad requests", "Unable to range facet on field", - req("q", "*:*" - ,"facet","true" - ,"facet.range", "contains_s1" - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","5" - ), - ErrorCode.BAD_REQUEST - ); - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + "contains_s1", + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "5"), + ErrorCode.BAD_REQUEST); + assertQEx( + "Test facet.range bad requests", "foo is not a valid method for range faceting", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", "foo" - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","5" - ), - ErrorCode.BAD_REQUEST - ); - - assertQEx("Test facet.range bad requests", + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + "foo", + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "5"), + ErrorCode.BAD_REQUEST); + + assertQEx( + "Test facet.range bad requests", "foo is not a valid type of for range 'include' information", - req("q", "*:*" - ,"facet","true" - ,"facet.range", field - ,"facet.range.method", method.toString() - ,"facet.range.start","43" - ,"facet.range.end","45" - ,"facet.range.gap","5" - ,"facet.range.include", "foo" - ), - ErrorCode.BAD_REQUEST - ); + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + field, + "facet.range.method", + method.toString(), + "facet.range.start", + "43", + "facet.range.end", + "45", + "facet.range.gap", + "5", + "facet.range.include", + "foo"), + ErrorCode.BAD_REQUEST); } } finally { resetExceptionIgnores(); } - } - + @SuppressWarnings("unchecked") public void testRangeFacetFilterVsDocValuesRandom() throws Exception { for (int i = 0; i < atLeast(100); i++) { ModifiableSolrParams params = null; - int fieldType = i%3; + int fieldType = i % 3; switch (fieldType) { - case 0: params = getRandomParamsDate(); break; - case 1: params = getRandomParamsInt(); break; - case 2: params = getRandomParamsFloat(); break; + case 0: + params = getRandomParamsDate(); + break; + case 1: + params = getRandomParamsInt(); + break; + case 2: + params = getRandomParamsFloat(); + break; } String field = params.get("facet.range"); params.add("q", getRandomQuery()); - - + params.add("facet", "true"); if (random().nextBoolean()) { params.add("facet.range.method", FacetRangeMethod.FILTER.toString()); } - + NamedList rangeFacetsFilter; NamedList rangeFacetsDv; - + SolrQueryRequest req = req(params); log.info("Using Params: {}", params); try { SolrQueryResponse rsp = h.queryAndResponse("", req); - rangeFacetsFilter = (NamedList) ((NamedList) rsp.getValues().get("facet_counts")).get("facet_ranges"); + rangeFacetsFilter = + (NamedList) + ((NamedList) rsp.getValues().get("facet_counts")).get("facet_ranges"); } finally { req.close(); } @@ -3572,33 +4854,45 @@ public void testRangeFacetFilterVsDocValuesRandom() throws Exception { req = req(params); try { SolrQueryResponse rsp = h.queryAndResponse("", req); - rangeFacetsDv = (NamedList) ((NamedList) rsp.getValues().get("facet_counts")).get("facet_ranges"); + rangeFacetsDv = + (NamedList) + ((NamedList) rsp.getValues().get("facet_counts")).get("facet_ranges"); } finally { req.close(); } - + assertNotNull(rangeFacetsFilter.get(field)); assertNotNull(rangeFacetsDv.get(field)); - - assertSameResults("Different results obtained when using 'filter' and 'dv' methods for Range Facets using params." - + params + "\n" + "Filter:" + rangeFacetsFilter + "\n DV: " + rangeFacetsDv, - (NamedList)rangeFacetsFilter.get(field), (NamedList)rangeFacetsDv.get(field)); + + assertSameResults( + "Different results obtained when using 'filter' and 'dv' methods for Range Facets using params." + + params + + "\n" + + "Filter:" + + rangeFacetsFilter + + "\n DV: " + + rangeFacetsDv, + (NamedList) rangeFacetsFilter.get(field), + (NamedList) rangeFacetsDv.get(field)); } - } - public void testFacetPrefixWithFacetThreads() throws Exception { - assertQ("Test facet.prefix with facet.thread", - req("q", "id_i1:[101 TO 102]" - ,"facet","true" - ,"facet.field", "{!key=key1 facet.prefix=foo}myfield_s" - ,"facet.field", "{!key=key2 facet.prefix=bar}myfield_s" - ,"facet.threads", "1" - ) - ,"*[count(//lst[@name='facet_fields']/lst[@name='key1']/int[@name='foo'])=1]" - ,"*[count(//lst[@name='facet_fields']/lst[@name='key2']/int[@name='bar'])=1]" - ); - + public void testFacetPrefixWithFacetThreads() throws Exception { + assertQ( + "Test facet.prefix with facet.thread", + req( + "q", + "id_i1:[101 TO 102]", + "facet", + "true", + "facet.field", + "{!key=key1 facet.prefix=foo}myfield_s", + "facet.field", + "{!key=key2 facet.prefix=bar}myfield_s", + "facet.threads", + "1"), + "*[count(//lst[@name='facet_fields']/lst[@name='key1']/int[@name='foo'])=1]", + "*[count(//lst[@name='facet_fields']/lst[@name='key2']/int[@name='bar'])=1]"); } private String getRandomQuery() { @@ -3609,14 +4903,14 @@ private String getRandomQuery() { values[0] = random().nextInt(3000); values[1] = random().nextInt(3000); Arrays.sort(values); - return String.format(Locale.ROOT, "id_i1:[%d TO %d]", values[0], values[1]); + return String.format(Locale.ROOT, "id_i1:[%d TO %d]", values[0], values[1]); } - - private void assertSameResults(String message, - NamedList rangeFacetsFilter, NamedList rangeFacetsDv) { - assertEquals(message + " Different number of elements.", rangeFacetsFilter.size(), rangeFacetsDv.size()); - for (Map.Entry entry:rangeFacetsFilter) { + private void assertSameResults( + String message, NamedList rangeFacetsFilter, NamedList rangeFacetsDv) { + assertEquals( + message + " Different number of elements.", rangeFacetsFilter.size(), rangeFacetsDv.size()); + for (Map.Entry entry : rangeFacetsFilter) { if (entry.getKey().equals("counts")) { continue; } @@ -3628,20 +4922,25 @@ private void assertSameResults(String message, } assertNotNull("Null counts: " + rangeFacetsFilter, rangeFacetsFilter.get("counts")); assertNotNull("Null counts: " + rangeFacetsDv, rangeFacetsDv.get("counts")); - assertEquals(message + "Different counts", rangeFacetsFilter.get("counts"), rangeFacetsDv.get("counts")); + assertEquals( + message + "Different counts", rangeFacetsFilter.get("counts"), rangeFacetsDv.get("counts")); } private ModifiableSolrParams getRandomParamsInt() { - String field = new String[]{"range_facet_l_dv", "range_facet_i_dv", "range_facet_l", "duration_i1", "id_i1"}[random().nextInt(5)]; + String field = + new String[] { + "range_facet_l_dv", "range_facet_i_dv", "range_facet_l", "duration_i1", "id_i1" + } + [random().nextInt(5)]; ModifiableSolrParams params = new ModifiableSolrParams(); Integer[] values = new Integer[2]; do { - values[0] = random().nextInt(3000) * (random().nextBoolean()?-1:1); - values[1] = random().nextInt(3000) * (random().nextBoolean()?-1:1); + values[0] = random().nextInt(3000) * (random().nextBoolean() ? -1 : 1); + values[1] = random().nextInt(3000) * (random().nextBoolean() ? -1 : 1); } while (values[0].equals(values[1])); Arrays.sort(values); long gapNum = Math.max(1, random().nextInt(3000)); - + params.add(FacetParams.FACET_RANGE_START, String.valueOf(values[0])); params.add(FacetParams.FACET_RANGE_END, String.valueOf(values[1])); params.add(FacetParams.FACET_RANGE_GAP, String.format(Locale.ROOT, "+%d", gapNum)); @@ -3649,18 +4948,28 @@ private ModifiableSolrParams getRandomParamsInt() { params.add(FacetParams.FACET_RANGE, field); return params; } - + private ModifiableSolrParams getRandomParamsFloat() { - String field = new String[]{"range_facet_d_dv", "range_facet_f_dv", "range_facet_d", "range_facet_f", "range_facet_mv_f", "range_facet_f1", "range_facet_f1_dv"}[random().nextInt(7)]; + String field = + new String[] { + "range_facet_d_dv", + "range_facet_f_dv", + "range_facet_d", + "range_facet_f", + "range_facet_mv_f", + "range_facet_f1", + "range_facet_f1_dv" + } + [random().nextInt(7)]; ModifiableSolrParams params = new ModifiableSolrParams(); Float[] values = new Float[2]; do { - values[0] = random().nextFloat() * 3000 * (random().nextBoolean()?-1:1); - values[1] = random().nextFloat() * 3000 * (random().nextBoolean()?-1:1); + values[0] = random().nextFloat() * 3000 * (random().nextBoolean() ? -1 : 1); + values[1] = random().nextFloat() * 3000 * (random().nextBoolean() ? -1 : 1); } while (values[0].equals(values[1])); Arrays.sort(values); float gapNum = Math.max(1, random().nextFloat() * 3000); - + params.add(FacetParams.FACET_RANGE_START, String.valueOf(values[0])); params.add(FacetParams.FACET_RANGE_END, String.valueOf(values[1])); params.add(FacetParams.FACET_RANGE_GAP, String.format(Locale.ROOT, "+%f", gapNum)); @@ -3668,33 +4977,44 @@ private ModifiableSolrParams getRandomParamsFloat() { params.add(FacetParams.FACET_RANGE, field); return params; } - - private final static String[] DATE_GAP_UNITS = new String[]{"SECONDS", "MINUTES", "HOURS", "DAYS", "MONTHS", "YEARS"}; - + + private static final String[] DATE_GAP_UNITS = + new String[] {"SECONDS", "MINUTES", "HOURS", "DAYS", "MONTHS", "YEARS"}; + private ModifiableSolrParams getRandomParamsDate() { - String field = new String[]{"range_facet_dt_dv", "a_tdt", "bday"}[random().nextInt(3)]; + String field = new String[] {"range_facet_dt_dv", "a_tdt", "bday"}[random().nextInt(3)]; ModifiableSolrParams params = new ModifiableSolrParams(); Date[] dates = new Date[2]; do { - dates[0] = new Date((long)(random().nextDouble()*(new Date().getTime()) * (random().nextBoolean()?-1:1))); - dates[1] = new Date((long)(random().nextDouble()*(new Date().getTime()) * (random().nextBoolean()?-1:1))); + dates[0] = + new Date( + (long) + (random().nextDouble() + * (new Date().getTime()) + * (random().nextBoolean() ? -1 : 1))); + dates[1] = + new Date( + (long) + (random().nextDouble() + * (new Date().getTime()) + * (random().nextBoolean() ? -1 : 1))); } while (dates[0].equals(dates[1])); Arrays.sort(dates); - long dateDiff = (dates[1].getTime() - dates[0].getTime())/1000; + long dateDiff = (dates[1].getTime() - dates[0].getTime()) / 1000; String gapUnit; if (dateDiff < 1000) { gapUnit = DATE_GAP_UNITS[random().nextInt(DATE_GAP_UNITS.length)]; - } else if (dateDiff < 10000){ + } else if (dateDiff < 10000) { gapUnit = DATE_GAP_UNITS[1 + random().nextInt(DATE_GAP_UNITS.length - 1)]; - } else if (dateDiff < 100000){ + } else if (dateDiff < 100000) { gapUnit = DATE_GAP_UNITS[2 + random().nextInt(DATE_GAP_UNITS.length - 2)]; - } else if (dateDiff < 1000000){ + } else if (dateDiff < 1000000) { gapUnit = DATE_GAP_UNITS[3 + random().nextInt(DATE_GAP_UNITS.length - 3)]; } else { gapUnit = DATE_GAP_UNITS[4 + random().nextInt(DATE_GAP_UNITS.length - 4)]; } int gapNum = random().nextInt(100) + 1; - + params.add(FacetParams.FACET_RANGE_START, dates[0].toInstant().toString()); params.add(FacetParams.FACET_RANGE_END, dates[1].toInstant().toString()); params.add(FacetParams.FACET_RANGE_GAP, String.format(Locale.ROOT, "+%d%s", gapNum, gapUnit)); @@ -3703,18 +5023,21 @@ private ModifiableSolrParams getRandomParamsDate() { return params; } - private void addCommonRandomRangeParams(ModifiableSolrParams params) { for (int i = 0; i < random().nextInt(2); i++) { - params.add(FacetParams.FACET_RANGE_OTHER, FacetRangeOther.values()[random().nextInt(FacetRangeOther.values().length)].toString()); + params.add( + FacetParams.FACET_RANGE_OTHER, + FacetRangeOther.values()[random().nextInt(FacetRangeOther.values().length)].toString()); } if (random().nextBoolean()) { - params.add(FacetParams.FACET_RANGE_INCLUDE, FacetRangeInclude.values()[random().nextInt(FacetRangeInclude.values().length)].toString()); + params.add( + FacetParams.FACET_RANGE_INCLUDE, + FacetRangeInclude.values()[random().nextInt(FacetRangeInclude.values().length)] + .toString()); } if (random().nextBoolean()) { params.add(FacetParams.FACET_MINCOUNT, String.valueOf(random().nextInt(10))); } params.add(FacetParams.FACET_RANGE_HARD_END, String.valueOf(random().nextBoolean())); } - } diff --git a/solr/core/src/test/org/apache/solr/request/SubstringBytesRefFilterTest.java b/solr/core/src/test/org/apache/solr/request/SubstringBytesRefFilterTest.java index db24312e0bb..d18201c443e 100644 --- a/solr/core/src/test/org/apache/solr/request/SubstringBytesRefFilterTest.java +++ b/solr/core/src/test/org/apache/solr/request/SubstringBytesRefFilterTest.java @@ -18,13 +18,12 @@ import java.util.ArrayList; import java.util.List; - import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCase; import org.junit.Test; public class SubstringBytesRefFilterTest extends SolrTestCase { - + @Test public void testSubstringBytesRefFilter() { final List substrings = new ArrayList<>(4); @@ -47,5 +46,4 @@ public void testSubstringBytesRefFilter() { assertFalse(filter.test(new BytesRef("qux"))); } - -} +} diff --git a/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java b/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java index bc451f5d3d7..3694f029222 100644 --- a/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java +++ b/solr/core/src/test/org/apache/solr/request/TestFacetMethods.java @@ -18,7 +18,6 @@ package org.apache.solr.request; import java.util.Arrays; - import org.apache.solr.SolrTestCase; import org.apache.solr.request.SimpleFacets.FacetMethod; import org.apache.solr.schema.BoolField; @@ -31,31 +30,34 @@ public class TestFacetMethods extends SolrTestCase { // TODO - make these public in FieldProperties? - protected final static int MULTIVALUED = 0x00000200; - protected final static int DOC_VALUES = 0x00008000; - protected final static int UNINVERTIBLE = 0b10000000000000000000; + protected static final int MULTIVALUED = 0x00000200; + protected static final int DOC_VALUES = 0x00008000; + protected static final int UNINVERTIBLE = 0b10000000000000000000; - protected static boolean propsMatch( int x, int y ) { + protected static boolean propsMatch(int x, int y) { return (x & y) != 0; } @Test public void testNumericSingleValuedDV() { - for (int props : Arrays.asList(DOC_VALUES ^ UNINVERTIBLE, - DOC_VALUES)) { + for (int props : Arrays.asList(DOC_VALUES ^ UNINVERTIBLE, DOC_VALUES)) { SchemaField field = new SchemaField("field", new TrieIntField(), props, null); // default is FCS, can't use ENUM due to trie-field terms, FC rewrites to FCS for efficiency for (int mincount : Arrays.asList(0, 1)) { // behavior should be independent of mincount assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); - + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + // UIF only allowed if field is UNINVERTIBLE - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 0)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 0)); } } } @@ -63,20 +65,24 @@ public void testNumericSingleValuedDV() { @Test public void testNumericMultiValuedDV() { - for (int props : Arrays.asList(DOC_VALUES ^ MULTIVALUED ^ UNINVERTIBLE, - DOC_VALUES ^ MULTIVALUED)) { + for (int props : + Arrays.asList(DOC_VALUES ^ MULTIVALUED ^ UNINVERTIBLE, DOC_VALUES ^ MULTIVALUED)) { SchemaField field = new SchemaField("field", new TrieIntField(), props, null); // default value is FC for (int mincount : Arrays.asList(0, 1)) { // behavior should be independent of mincount assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + // UIF only allowed if field is UNINVERTIBLE - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); } } } @@ -84,59 +90,68 @@ public void testNumericMultiValuedDV() { @Test public void testNumericSingleValuedNoDV() { - for (int props : Arrays.asList(0 ^ UNINVERTIBLE, - 0)) { + for (int props : Arrays.asList(0 ^ UNINVERTIBLE, 0)) { SchemaField field = new SchemaField("field", new TrieIntField(), props, null); // FCS is used by default for most requested methods other then UIF -- regardless of mincount for (int mincount : Arrays.asList(0, 1)) { assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); } // UIF allowed only if UNINVERTIBLE *AND* mincount > 0 assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 0)); - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); } } @Test public void testNumericMultiValuedNoDV() { - for (int props : Arrays.asList(MULTIVALUED ^ UNINVERTIBLE, - MULTIVALUED)) { + for (int props : Arrays.asList(MULTIVALUED ^ UNINVERTIBLE, MULTIVALUED)) { SchemaField field = new SchemaField("field", new TrieIntField(), props, null); // FC is used by default for most requested methods other then UIF -- regardless of mincount for (int mincount : Arrays.asList(0, 1)) { assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); } // UIF allowed only if UNINVERTIBLE *AND* mincount > 0 assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 0)); - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); } } @Test public void testStringSingleValuedDV() { - for (int props : Arrays.asList(DOC_VALUES ^ UNINVERTIBLE, - DOC_VALUES)) { + for (int props : Arrays.asList(DOC_VALUES ^ UNINVERTIBLE, DOC_VALUES)) { SchemaField field = new SchemaField("field", new StrField(), props, null); // default is FC, otherwise just uses the passed-in method as is unless UIF... for (int mincount : Arrays.asList(0, 1)) { // behavior should be independent of mincount assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); // UIF only allowed if field is UNINVERTIBLE - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); } } } @@ -144,19 +159,23 @@ public void testStringSingleValuedDV() { @Test public void testStringMultiValuedDV() { - for (int props : Arrays.asList(MULTIVALUED ^ DOC_VALUES ^ UNINVERTIBLE, - MULTIVALUED ^ DOC_VALUES)) { + for (int props : + Arrays.asList(MULTIVALUED ^ DOC_VALUES ^ UNINVERTIBLE, MULTIVALUED ^ DOC_VALUES)) { SchemaField field = new SchemaField("field", new StrField(), props, null); // default is FC, can't use FCS because of multivalues... for (int mincount : Arrays.asList(0, 1)) { // behavior should be independent of mincount assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); // UIF only allowed if field is UNINVERTIBLE - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); } } } @@ -164,42 +183,48 @@ public void testStringMultiValuedDV() { @Test public void testStringSingleValuedNoDV() { - for (int props : Arrays.asList(0 ^ UNINVERTIBLE, - 0)) { + for (int props : Arrays.asList(0 ^ UNINVERTIBLE, 0)) { SchemaField field = new SchemaField("field", new StrField(), props, null); // default is FC, otherwise just uses the passed-in method as is unless UIF... for (int mincount : Arrays.asList(0, 1)) { // behavior should be independent of mincount assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); } // UIF allowed only if UNINVERTIBLE *AND* mincount > 0 assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 0)); - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FCS, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); } } @Test public void testStringMultiValuedNoDV() { - for (int props : Arrays.asList(MULTIVALUED ^ UNINVERTIBLE, - MULTIVALUED)) { + for (int props : Arrays.asList(MULTIVALUED ^ UNINVERTIBLE, MULTIVALUED)) { SchemaField field = new SchemaField("field", new StrField(), props, null); // default is FC, can't use FCS because of multivalues... for (int mincount : Arrays.asList(0, 1)) { // behavior should be independent of mincount assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); } // UIF allowed only if UNINVERTIBLE *AND* mincount > 0 assertEquals(FacetMethod.FC, SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 0)); - assertEquals(propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, - SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); + assertEquals( + propsMatch(props, UNINVERTIBLE) ? FacetMethod.UIF : FacetMethod.FC, + SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, 1)); } } @@ -207,31 +232,36 @@ public void testStringMultiValuedNoDV() { public void testBooleanDefaults() { // BoolField defaults to ENUM - for (int props : Arrays.asList(0 ^ UNINVERTIBLE, - 0)) { + for (int props : Arrays.asList(0 ^ UNINVERTIBLE, 0)) { SchemaField field = new SchemaField("field", new BoolField(), props, null); assertEquals(SimpleFacets.FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, null, 0)); assertEquals(SimpleFacets.FacetMethod.ENUM, SimpleFacets.selectFacetMethod(field, null, 1)); } } - + @Test public void testPointFields() { // Methods other than FCS are not currently supported for PointFields - for (int props : Arrays.asList(MULTIVALUED ^ DOC_VALUES ^ UNINVERTIBLE, - MULTIVALUED ^ DOC_VALUES, - MULTIVALUED ^ UNINVERTIBLE, - UNINVERTIBLE, - MULTIVALUED, - DOC_VALUES, - 0)) { + for (int props : + Arrays.asList( + MULTIVALUED ^ DOC_VALUES ^ UNINVERTIBLE, + MULTIVALUED ^ DOC_VALUES, + MULTIVALUED ^ UNINVERTIBLE, + UNINVERTIBLE, + MULTIVALUED, + DOC_VALUES, + 0)) { SchemaField field = new SchemaField("foo", new IntPointField(), props, null); for (int mincount : Arrays.asList(0, 1)) { assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, null, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); - assertEquals(FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.ENUM, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FC, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.FCS, mincount)); + assertEquals( + FacetMethod.FCS, SimpleFacets.selectFacetMethod(field, FacetMethod.UIF, mincount)); } } } diff --git a/solr/core/src/test/org/apache/solr/request/TestFaceting.java b/solr/core/src/test/org/apache/solr/request/TestFaceting.java index 0e9284e0fd1..2021aaab01c 100644 --- a/solr/core/src/test/org/apache/solr/request/TestFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestFaceting.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Locale; import java.util.Random; - import org.apache.lucene.index.DocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Term; @@ -35,15 +34,14 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ +/** */ public class TestFaceting extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - initCore("solrconfig.xml","schema11.xml"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + initCore("solrconfig.xml", "schema11.xml"); } @Override @@ -62,29 +60,31 @@ public void tearDown() throws Exception { String t(int tnum) { return String.format(Locale.ROOT, "%08d", tnum); } - + void createIndex(int nTerms) { assertU(delQ("*:*")); - for (int i=0; i 0) { + assertEquals(size > 0, te.seekCeil(new BytesRef("000")) != TermsEnum.SeekStatus.END); assertEquals(0, te.ord()); assertEquals(t(0), te.term().utf8ToString()); } - if (size>0) { + if (size > 0) { // test seeking by term number - for (int i=0; i fields = new ArrayList<>(); @@ -287,14 +320,34 @@ public void testTrieFields() { assertU(adoc(fields.toArray(new String[0]))); assertU(commit()); for (String suffix : suffixes) { - for (String facetMethod : new String[] {FacetParams.FACET_METHOD_enum, FacetParams.FACET_METHOD_fc, FacetParams.FACET_METHOD_fcs, FacetParams.FACET_METHOD_uif}) { - for (String facetSort : new String[] {FacetParams.FACET_SORT_COUNT, FacetParams.FACET_SORT_INDEX}) { + for (String facetMethod : + new String[] { + FacetParams.FACET_METHOD_enum, + FacetParams.FACET_METHOD_fc, + FacetParams.FACET_METHOD_fcs, + FacetParams.FACET_METHOD_uif + }) { + for (String facetSort : + new String[] {FacetParams.FACET_SORT_COUNT, FacetParams.FACET_SORT_INDEX}) { for (String value : new String[] {"42", "43"}) { // match or not final String field = "f_" + suffix; final int num_constraints = ("42".equals(value)) ? 1 : 0; - assertQ("field=" + field + ",method=" + facetMethod + ",sort=" + facetSort, - req("q", field + ":" + value, FacetParams.FACET, "true", FacetParams.FACET_FIELD, field, FacetParams.FACET_MINCOUNT, "1", FacetParams.FACET_SORT, facetSort, FacetParams.FACET_METHOD, facetMethod), - "*[count(//lst[@name='" + field + "']/int)="+num_constraints+"]"); + assertQ( + "field=" + field + ",method=" + facetMethod + ",sort=" + facetSort, + req( + "q", + field + ":" + value, + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + field, + FacetParams.FACET_MINCOUNT, + "1", + FacetParams.FACET_SORT, + facetSort, + FacetParams.FACET_METHOD, + facetMethod), + "*[count(//lst[@name='" + field + "']/int)=" + num_constraints + "]"); } } } @@ -308,25 +361,73 @@ public void testFacetSortWithMinCount() { assertU(adoc("id", "3", "f_td", "-1.218")); assertU(commit()); - assertQ(req("q", "*:*", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "f_td", "f.f_td.facet.sort", FacetParams.FACET_SORT_INDEX), + assertQ( + req( + "q", + "*:*", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "f_td", + "f.f_td.facet.sort", + FacetParams.FACET_SORT_INDEX), "*[count(//lst[@name='f_td']/int)=3]", "//lst[@name='facet_fields']/lst[@name='f_td']/int[1][@name='-420.126']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[2][@name='-285.672']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[3][@name='-1.218']"); - - assertQ(req("q", "*:*", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "f_td", "f.f_td.facet.sort", FacetParams.FACET_SORT_INDEX, FacetParams.FACET_MINCOUNT, "1", FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc), + + assertQ( + req( + "q", + "*:*", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "f_td", + "f.f_td.facet.sort", + FacetParams.FACET_SORT_INDEX, + FacetParams.FACET_MINCOUNT, + "1", + FacetParams.FACET_METHOD, + FacetParams.FACET_METHOD_fc), "*[count(//lst[@name='f_td']/int)=3]", "//lst[@name='facet_fields']/lst[@name='f_td']/int[1][@name='-420.126']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[2][@name='-285.672']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[3][@name='-1.218']"); - assertQ(req("q", "*:*", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "f_td", "f.f_td.facet.sort", FacetParams.FACET_SORT_INDEX, FacetParams.FACET_MINCOUNT, "1", FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_uif), + assertQ( + req( + "q", + "*:*", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "f_td", + "f.f_td.facet.sort", + FacetParams.FACET_SORT_INDEX, + FacetParams.FACET_MINCOUNT, + "1", + FacetParams.FACET_METHOD, + FacetParams.FACET_METHOD_uif), "*[count(//lst[@name='f_td']/int)=3]", "//lst[@name='facet_fields']/lst[@name='f_td']/int[1][@name='-420.126']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[2][@name='-285.672']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[3][@name='-1.218']"); - - assertQ(req("q", "*:*", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "f_td", "f.f_td.facet.sort", FacetParams.FACET_SORT_INDEX, FacetParams.FACET_MINCOUNT, "1", "indent", "true"), + + assertQ( + req( + "q", + "*:*", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "f_td", + "f.f_td.facet.sort", + FacetParams.FACET_SORT_INDEX, + FacetParams.FACET_MINCOUNT, + "1", + "indent", + "true"), "*[count(//lst[@name='f_td']/int)=3]", "//lst[@name='facet_fields']/lst[@name='f_td']/int[1][@name='-420.126']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[2][@name='-285.672']", @@ -335,21 +436,49 @@ public void testFacetSortWithMinCount() { @Test public void testFacetSortWithMinCount0() { - assumeFalse("facet.mincount=0 doesn't work with point fields (SOLR-11174) or single valued DV", - Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) || Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)); - + assumeFalse( + "facet.mincount=0 doesn't work with point fields (SOLR-11174) or single valued DV", + Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) + || Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)); + assertU(adoc("id", "1", "f_td", "-420.126")); assertU(adoc("id", "2", "f_td", "-285.672")); assertU(adoc("id", "3", "f_td", "-1.218")); assertU(commit()); - assertQ(req("q", "id:1.0", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "f_td", "f.f_td.facet.sort", FacetParams.FACET_SORT_INDEX, FacetParams.FACET_MINCOUNT, "0", FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc), + assertQ( + req( + "q", + "id:1.0", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "f_td", + "f.f_td.facet.sort", + FacetParams.FACET_SORT_INDEX, + FacetParams.FACET_MINCOUNT, + "0", + FacetParams.FACET_METHOD, + FacetParams.FACET_METHOD_fc), "*[count(//lst[@name='f_td']/int)=3]", "//lst[@name='facet_fields']/lst[@name='f_td']/int[1][@name='-420.126']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[2][@name='-285.672']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[3][@name='-1.218']"); - assertQ(req("q", "id:1.0", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "f_td", "f.f_td.facet.sort", FacetParams.FACET_SORT_INDEX, FacetParams.FACET_MINCOUNT, "0", FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_uif), + assertQ( + req( + "q", + "id:1.0", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "f_td", + "f.f_td.facet.sort", + FacetParams.FACET_SORT_INDEX, + FacetParams.FACET_MINCOUNT, + "0", + FacetParams.FACET_METHOD, + FacetParams.FACET_METHOD_uif), "*[count(//lst[@name='f_td']/int)=3]", "//lst[@name='facet_fields']/lst[@name='f_td']/int[1][@name='-420.126']", "//lst[@name='facet_fields']/lst[@name='f_td']/int[2][@name='-285.672']", @@ -358,233 +487,292 @@ public void testFacetSortWithMinCount0() { @Test public void testFacetOverPointFieldWithMinCount0() { - String field = "f_" + new String[]{"i","l","f","d"}[random().nextInt(4)] + "_p"; - String expectedWarning = "Raising facet.mincount from 0 to 1, because field " + field + " is Points-based."; - SolrQueryRequest req = req("q", "id:1.0", - FacetParams.FACET, "true", - FacetParams.FACET_FIELD, field, - FacetParams.FACET_MINCOUNT, "0"); - assertQ(req - , "/response/lst[@name='responseHeader']/arr[@name='warnings']/str[.='" + expectedWarning + "']"); - - field = "f_" + new String[]{"is","ls","fs","ds"}[random().nextInt(4)] + "_p"; - expectedWarning = "Raising facet.mincount from 0 to 1, because field " + field + " is Points-based."; - req = req("q", "id:1.0", - FacetParams.FACET, "true", - FacetParams.FACET_FIELD, field, - FacetParams.FACET_MINCOUNT, "0"); - assertQ(req - , "/response/lst[@name='responseHeader']/arr[@name='warnings']/str[.='" + expectedWarning + "']"); + String field = "f_" + new String[] {"i", "l", "f", "d"}[random().nextInt(4)] + "_p"; + String expectedWarning = + "Raising facet.mincount from 0 to 1, because field " + field + " is Points-based."; + SolrQueryRequest req = + req( + "q", + "id:1.0", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + field, + FacetParams.FACET_MINCOUNT, + "0"); + assertQ( + req, + "/response/lst[@name='responseHeader']/arr[@name='warnings']/str[.='" + + expectedWarning + + "']"); + + field = "f_" + new String[] {"is", "ls", "fs", "ds"}[random().nextInt(4)] + "_p"; + expectedWarning = + "Raising facet.mincount from 0 to 1, because field " + field + " is Points-based."; + req = + req( + "q", + "id:1.0", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + field, + FacetParams.FACET_MINCOUNT, + "0"); + assertQ( + req, + "/response/lst[@name='responseHeader']/arr[@name='warnings']/str[.='" + + expectedWarning + + "']"); } public void testSimpleFacetCountsWithMultipleConfigurationsForSameField() { - clearIndex(); - String fname = "trait_ss"; - assertU(adoc("id", "42", - fname, "Tool", - fname, "Obnoxious", - "name_s", "Zapp Brannigan")); - - assertU(adoc("id", "43" , - "title_s", "Democratic Order of Planets")); - assertU(commit()); - - assertU(adoc("id", "44", - fname, "Tool", - "name_s", "The Zapper")); - - assertU(adoc("id", "45", - fname, "Chauvinist", - "title_s", "25 star General")); - - assertU(adoc("id", "46", - fname, "Obnoxious", - "subject_s", "Defeated the pacifists of the Gandhi nebula")); - - assertU(commit()); - - assertU(adoc("id", "47", - fname, "Pig", - "text_t", "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); - assertU(commit()); - - for(String [] methodParam: new String[][]{ new String[]{}, new String []{"facet.method", "uif"}}) { - assertQ("checking facets when one has missing=true&mincount=2 and the other has missing=false&mincount=0", - req(methodParam - , "q", "id:[42 TO 47]" - ,"facet", "true" - ,"facet.zeros", "false" - ,"fq", "id:[42 TO 45]" - ,"facet.field", "{!key=foo " + - "facet.mincount=0 "+ - "facet.missing=false "+ - "}"+fname - ,"facet.field", "{!key=bar " + - "facet.mincount=2 "+ - "facet.missing=true "+ - "}"+fname - ) - ,"*[count(//doc)=4]" - ,"*[count(//lst[@name='foo']/int)=4]" - ,"*[count(//lst[@name='bar']/int)=2]" - ,"//lst[@name='foo']/int[@name='Tool'][.='2']" - ,"//lst[@name='foo']/int[@name='Obnoxious'][.='1']" - ,"//lst[@name='foo']/int[@name='Chauvinist'][.='1']" - ,"//lst[@name='foo']/int[@name='Pig'][.='0']" - ,"//lst[@name='foo']/int[@name='Tool'][.='2']" - ,"//lst[@name='bar']/int[not(@name)][.='1']" - ); - - assertQforUIF("checking facets when one has missing=true&mincount=2 and the other has missing=false&mincount=0", - req(methodParam - ,"q", "id:[42 TO 47]" - ,"facet", "true" - ,"facet.zeros", "false" - ,"fq", "id:[42 TO 45]" - ,"facet.field", "{!key=foo " + - "facet.prefix=Too "+ - "}"+fname - ,"facet.field", "{!key=bar " + - "facet.limit=2 "+ - "facet.sort=false "+ - "}"+fname - ) - ,"*[count(//doc)=4]" - ,"*[count(//lst[@name='foo']/int)=1]" - ,"*[count(//lst[@name='bar']/int)=2]" - ,"//lst[@name='foo']/int[@name='Tool'][.='2']" - ,"//lst[@name='bar']/int[@name='Chauvinist'][.='1']" - ,"//lst[@name='bar']/int[@name='Obnoxious'][.='1']" - ); - - assertQ("localparams in one facet variant should not affect defaults in another: facet.sort vs facet.missing", - req(methodParam - ,"q", "id:[42 TO 47]" - ,"rows","0" - ,"facet", "true" - ,"fq", "id:[42 TO 45]" - ,"facet.field", "{!key=foo " + - "facet.sort=index" + - "}"+fname - ,"facet.field", "{!key=bar " + - "facet.missing=true" + - "}"+fname - ) - // foo is in index order w/o missing - ,"*[count(//lst[@name='foo']/int)=4]" - ,"//lst[@name='foo']/int[1][@name='Chauvinist'][.='1']" - ,"//lst[@name='foo']/int[2][@name='Obnoxious'][.='1']" - ,"//lst[@name='foo']/int[3][@name='Pig'][.='0']" - ,"//lst[@name='foo']/int[4][@name='Tool'][.='2']" - // bar is in count order by default and includes missing - ,"*[count(//lst[@name='bar']/int)=5]" - ,"//lst[@name='bar']/int[1][@name='Tool'][.='2']" - // don't assume tie breaker for slots 3 & 4, behavior undefined? - ,"//lst[@name='bar']/int[4][@name='Pig'][.='0']" - ,"//lst[@name='bar']/int[5][not(@name)][.='1']" - ); - - assertQ("localparams in one facet variant should not affect defaults in another: facet.mincount", - req(methodParam - ,"q", "id:[42 TO 47]" - ,"rows","0" - ,"facet", "true" - ,"fq", "id:[42 TO 45]" - ,"facet.field", "{!key=foo " + - "facet.mincount=2" + - "}"+fname - ,"facet.field", "{!key=bar}"+fname - ) - // only Tool for foo - ,"*[count(//lst[@name='foo']/int)=1]" - ,"//lst[@name='foo']/int[1][@name='Tool'][.='2']" - // all for bar - ,"*[count(//lst[@name='bar']/int)=4]" - ,"//lst[@name='bar']/int[1][@name='Tool'][.='2']" - // don't assume tie breaker for slots 3 & 4, behavior undefined? - ,"//lst[@name='bar']/int[4][@name='Pig'][.='0']" - ); - - assertQ("localparams in one facet variant should not affect defaults in another: facet.missing", - req(methodParam - ,"q", "id:[42 TO 47]" - ,"rows","0" - ,"facet", "true" - ,"fq", "id:[42 TO 45]" - ,"facet.field", "{!key=foo " + - "facet.missing=true" + - "}"+fname - ,"facet.field", "{!key=bar}"+fname - ) - // foo includes missing - ,"*[count(//lst[@name='foo']/int)=5]" - ,"//lst[@name='foo']/int[1][@name='Tool'][.='2']" - // don't assume tie breaker for slots 3 & 4, behavior undefined? - ,"//lst[@name='foo']/int[4][@name='Pig'][.='0']" - ,"//lst[@name='foo']/int[5][not(@name)][.='1']" - // bar does not - ,"*[count(//lst[@name='bar']/int)=4]" - ,"//lst[@name='bar']/int[1][@name='Tool'][.='2']" - // don't assume tie breaker for slots 3 & 4, behavior undefined? - ,"//lst[@name='bar']/int[4][@name='Pig'][.='0']" - ); - - assertQforUIF("checking facets when local facet.prefix param used after regular/raw field faceting", - req(methodParam - ,"q", "*:*" - ,"facet", "true" - ,"facet.field", fname - ,"facet.field", "{!key=foo " + - "facet.prefix=T "+ - "}"+fname - ) - ,"*[count(//doc)=6]" - ,"*[count(//lst[@name='" + fname + "']/int)=4]" - ,"*[count(//lst[@name='foo']/int)=1]" - ,"//lst[@name='foo']/int[@name='Tool'][.='2']" - ); - - assertQforUIF("checking facets when local facet.prefix param used before regular/raw field faceting", - req(methodParam - ,"q", "*:*" - ,"facet", "true" - ,"facet.field", "{!key=foo " + - "facet.prefix=T "+ - "}"+fname - ,"facet.field", fname - ) - ,"*[count(//doc)=6]" - ,"*[count(//lst[@name='" + fname + "']/int)=4]" - ,"*[count(//lst[@name='foo']/int)=1]" - ,"//lst[@name='foo']/int[@name='Tool'][.='2']" - ); - } + clearIndex(); + String fname = "trait_ss"; + assertU(adoc("id", "42", fname, "Tool", fname, "Obnoxious", "name_s", "Zapp Brannigan")); - final String foo_range_facet = "{!key=foo facet.range.gap=2}val_i"; - final String val_range_facet = "val_i"; - for (boolean toggle : new boolean[] { true, false }) { - assertQ("local gap param mixed w/raw range faceting: " + toggle, - req("q", "*:*" - ,"facet", "true" - ,"rows", "0" - ,"facet.range.start", "0" - ,"facet.range.end", "10" - ,"facet.range.gap", "1" - ,"facet.range", (toggle ? foo_range_facet : val_range_facet) - ,"facet.range", (toggle ? val_range_facet : foo_range_facet) - ) - ,"*[count(//lst[@name='val_i']/lst[@name='counts']/int)=10]" - ,"*[count(//lst[@name='foo']/lst[@name='counts']/int)=5]" - ); - } + assertU( + adoc( + "id", "43", + "title_s", "Democratic Order of Planets")); + assertU(commit()); - clearIndex(); - assertU(commit()); + assertU(adoc("id", "44", fname, "Tool", "name_s", "The Zapper")); + + assertU(adoc("id", "45", fname, "Chauvinist", "title_s", "25 star General")); + + assertU( + adoc( + "id", + "46", + fname, + "Obnoxious", + "subject_s", + "Defeated the pacifists of the Gandhi nebula")); + + assertU(commit()); + + assertU( + adoc( + "id", + "47", + fname, + "Pig", + "text_t", + "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); + assertU(commit()); + + for (String[] methodParam : + new String[][] {new String[] {}, new String[] {"facet.method", "uif"}}) { + assertQ( + "checking facets when one has missing=true&mincount=2 and the other has missing=false&mincount=0", + req( + methodParam, + "q", + "id:[42 TO 47]", + "facet", + "true", + "facet.zeros", + "false", + "fq", + "id:[42 TO 45]", + "facet.field", + "{!key=foo " + "facet.mincount=0 " + "facet.missing=false " + "}" + fname, + "facet.field", + "{!key=bar " + "facet.mincount=2 " + "facet.missing=true " + "}" + fname), + "*[count(//doc)=4]", + "*[count(//lst[@name='foo']/int)=4]", + "*[count(//lst[@name='bar']/int)=2]", + "//lst[@name='foo']/int[@name='Tool'][.='2']", + "//lst[@name='foo']/int[@name='Obnoxious'][.='1']", + "//lst[@name='foo']/int[@name='Chauvinist'][.='1']", + "//lst[@name='foo']/int[@name='Pig'][.='0']", + "//lst[@name='foo']/int[@name='Tool'][.='2']", + "//lst[@name='bar']/int[not(@name)][.='1']"); + + assertQforUIF( + "checking facets when one has missing=true&mincount=2 and the other has missing=false&mincount=0", + req( + methodParam, + "q", + "id:[42 TO 47]", + "facet", + "true", + "facet.zeros", + "false", + "fq", + "id:[42 TO 45]", + "facet.field", + "{!key=foo " + "facet.prefix=Too " + "}" + fname, + "facet.field", + "{!key=bar " + "facet.limit=2 " + "facet.sort=false " + "}" + fname), + "*[count(//doc)=4]", + "*[count(//lst[@name='foo']/int)=1]", + "*[count(//lst[@name='bar']/int)=2]", + "//lst[@name='foo']/int[@name='Tool'][.='2']", + "//lst[@name='bar']/int[@name='Chauvinist'][.='1']", + "//lst[@name='bar']/int[@name='Obnoxious'][.='1']"); + + assertQ( + "localparams in one facet variant should not affect defaults in another: facet.sort vs facet.missing", + req( + methodParam, + "q", + "id:[42 TO 47]", + "rows", + "0", + "facet", + "true", + "fq", + "id:[42 TO 45]", + "facet.field", + "{!key=foo " + "facet.sort=index" + "}" + fname, + "facet.field", + "{!key=bar " + "facet.missing=true" + "}" + fname) + // foo is in index order w/o missing + , + "*[count(//lst[@name='foo']/int)=4]", + "//lst[@name='foo']/int[1][@name='Chauvinist'][.='1']", + "//lst[@name='foo']/int[2][@name='Obnoxious'][.='1']", + "//lst[@name='foo']/int[3][@name='Pig'][.='0']", + "//lst[@name='foo']/int[4][@name='Tool'][.='2']" + // bar is in count order by default and includes missing + , + "*[count(//lst[@name='bar']/int)=5]", + "//lst[@name='bar']/int[1][@name='Tool'][.='2']" + // don't assume tie breaker for slots 3 & 4, behavior undefined? + , + "//lst[@name='bar']/int[4][@name='Pig'][.='0']", + "//lst[@name='bar']/int[5][not(@name)][.='1']"); + + assertQ( + "localparams in one facet variant should not affect defaults in another: facet.mincount", + req( + methodParam, + "q", + "id:[42 TO 47]", + "rows", + "0", + "facet", + "true", + "fq", + "id:[42 TO 45]", + "facet.field", + "{!key=foo " + "facet.mincount=2" + "}" + fname, + "facet.field", + "{!key=bar}" + fname) + // only Tool for foo + , + "*[count(//lst[@name='foo']/int)=1]", + "//lst[@name='foo']/int[1][@name='Tool'][.='2']" + // all for bar + , + "*[count(//lst[@name='bar']/int)=4]", + "//lst[@name='bar']/int[1][@name='Tool'][.='2']" + // don't assume tie breaker for slots 3 & 4, behavior undefined? + , + "//lst[@name='bar']/int[4][@name='Pig'][.='0']"); + + assertQ( + "localparams in one facet variant should not affect defaults in another: facet.missing", + req( + methodParam, + "q", + "id:[42 TO 47]", + "rows", + "0", + "facet", + "true", + "fq", + "id:[42 TO 45]", + "facet.field", + "{!key=foo " + "facet.missing=true" + "}" + fname, + "facet.field", + "{!key=bar}" + fname) + // foo includes missing + , + "*[count(//lst[@name='foo']/int)=5]", + "//lst[@name='foo']/int[1][@name='Tool'][.='2']" + // don't assume tie breaker for slots 3 & 4, behavior undefined? + , + "//lst[@name='foo']/int[4][@name='Pig'][.='0']", + "//lst[@name='foo']/int[5][not(@name)][.='1']" + // bar does not + , + "*[count(//lst[@name='bar']/int)=4]", + "//lst[@name='bar']/int[1][@name='Tool'][.='2']" + // don't assume tie breaker for slots 3 & 4, behavior undefined? + , + "//lst[@name='bar']/int[4][@name='Pig'][.='0']"); + + assertQforUIF( + "checking facets when local facet.prefix param used after regular/raw field faceting", + req( + methodParam, + "q", + "*:*", + "facet", + "true", + "facet.field", + fname, + "facet.field", + "{!key=foo " + "facet.prefix=T " + "}" + fname), + "*[count(//doc)=6]", + "*[count(//lst[@name='" + fname + "']/int)=4]", + "*[count(//lst[@name='foo']/int)=1]", + "//lst[@name='foo']/int[@name='Tool'][.='2']"); + + assertQforUIF( + "checking facets when local facet.prefix param used before regular/raw field faceting", + req( + methodParam, + "q", + "*:*", + "facet", + "true", + "facet.field", + "{!key=foo " + "facet.prefix=T " + "}" + fname, + "facet.field", + fname), + "*[count(//doc)=6]", + "*[count(//lst[@name='" + fname + "']/int)=4]", + "*[count(//lst[@name='foo']/int)=1]", + "//lst[@name='foo']/int[@name='Tool'][.='2']"); + } + + final String foo_range_facet = "{!key=foo facet.range.gap=2}val_i"; + final String val_range_facet = "val_i"; + for (boolean toggle : new boolean[] {true, false}) { + assertQ( + "local gap param mixed w/raw range faceting: " + toggle, + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.range.start", + "0", + "facet.range.end", + "10", + "facet.range.gap", + "1", + "facet.range", + (toggle ? foo_range_facet : val_range_facet), + "facet.range", + (toggle ? val_range_facet : foo_range_facet)), + "*[count(//lst[@name='val_i']/lst[@name='counts']/int)=10]", + "*[count(//lst[@name='foo']/lst[@name='counts']/int)=5]"); + } + + clearIndex(); + assertU(commit()); } - - private void assertQforUIF(String message, SolrQueryRequest request, String ... tests) { + + private void assertQforUIF(String message, SolrQueryRequest request, String... tests) { // handle any differences for uif here, like skipping unsupported options - assertQ(message,request, tests); + assertQ(message, request, tests); } private void add50ocs() { @@ -600,307 +788,508 @@ private void add50ocs() { String f7 = (idx % 9 == 0) ? "seven_9" : "seven_1"; String f8 = (idx % 10 == 0) ? "eight_10" : "eight_1"; String f9 = (idx % 11 == 0) ? "nine_11" : "nine_1"; - assertU(adoc("id", Integer.toString(idx), - "f0_ws", f0, - "f1_ws", f1, - "f2_ws", f2, - "f3_ws", f3, - "f4_ws", f4, - "f5_ws", f5, - "f6_ws", f6, - "f7_ws", f7, - "f8_ws", f8, - "f9_ws", f9 - )); + assertU( + adoc( + "id", + Integer.toString(idx), + "f0_ws", + f0, + "f1_ws", + f1, + "f2_ws", + f2, + "f3_ws", + f3, + "f4_ws", + f4, + "f5_ws", + f5, + "f6_ws", + f6, + "f7_ws", + f7, + "f8_ws", + f8, + "f9_ws", + f9)); } assertU(commit()); - } @Test public void testThreadWait() throws Exception { add50ocs(); - String[] methodParam = random().nextBoolean() ? new String[]{} : new String[]{"facet.method","uif"} ; - - // All I really care about here is the chance to fire off a bunch of threads to the UnIninvertedField.get method - // to insure that we get into/out of the lock. Again, it's not entirely deterministic, but it might catch bad - // stuff occasionally... - assertQ("check threading, more threads than fields", - req(methodParam - , "q", "id:*", "indent", "true", "fl", "id", "rows", "1" - , "facet", "true" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.threads", "1000" - , "facet.limit", "-1" - ) - , "*[count(//lst[@name='facet_fields']/lst)=10]" - , "*[count(//lst[@name='facet_fields']/lst/int)=20]" - ); - + String[] methodParam = + random().nextBoolean() ? new String[] {} : new String[] {"facet.method", "uif"}; + + // All I really care about here is the chance to fire off a bunch of threads to the + // UnIninvertedField.get method to insure that we get into/out of the lock. Again, it's not + // entirely deterministic, but it might catch bad stuff occasionally... + assertQ( + "check threading, more threads than fields", + req( + methodParam, + "q", + "id:*", + "indent", + "true", + "fl", + "id", + "rows", + "1", + "facet", + "true", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.threads", + "1000", + "facet.limit", + "-1"), + "*[count(//lst[@name='facet_fields']/lst)=10]", + "*[count(//lst[@name='facet_fields']/lst/int)=20]"); } @Test public void testMultiThreadedFacets() throws Exception { add50ocs(); - - String[] methodParam = random().nextBoolean() ? new String[]{} : new String[]{"facet.method","uif"} ; - - assertQ("check no threading, threads == 0", - req(methodParam - , "q", "id:*", "indent", "true", "fl", "id", "rows", "1" - , "facet", "true" - , "facet.field", "f0_ws" - , "facet.field", "f1_ws" - , "facet.field", "f2_ws" - , "facet.field", "f3_ws" - , "facet.field", "f4_ws" - , "facet.field", "f5_ws" - , "facet.field", "f6_ws" - , "facet.field", "f7_ws" - , "facet.field", "f8_ws" - , "facet.field", "f9_ws" - , "facet.threads", "0" - , "facet.limit", "-1" - ) - , "*[count(//lst[@name='facet_fields']/lst)=10]" - , "*[count(//lst[@name='facet_fields']/lst/int)=20]" - , "//lst[@name='f0_ws']/int[@name='zero_1'][.='25']" - , "//lst[@name='f0_ws']/int[@name='zero_2'][.='25']" - , "//lst[@name='f1_ws']/int[@name='one_1'][.='33']" - , "//lst[@name='f1_ws']/int[@name='one_3'][.='17']" - , "//lst[@name='f2_ws']/int[@name='two_1'][.='37']" - , "//lst[@name='f2_ws']/int[@name='two_4'][.='13']" - , "//lst[@name='f3_ws']/int[@name='three_1'][.='40']" - , "//lst[@name='f3_ws']/int[@name='three_5'][.='10']" - , "//lst[@name='f4_ws']/int[@name='four_1'][.='41']" - , "//lst[@name='f4_ws']/int[@name='four_6'][.='9']" - , "//lst[@name='f5_ws']/int[@name='five_1'][.='42']" - , "//lst[@name='f5_ws']/int[@name='five_7'][.='8']" - , "//lst[@name='f6_ws']/int[@name='six_1'][.='43']" - , "//lst[@name='f6_ws']/int[@name='six_8'][.='7']" - , "//lst[@name='f7_ws']/int[@name='seven_1'][.='44']" - , "//lst[@name='f7_ws']/int[@name='seven_9'][.='6']" - , "//lst[@name='f8_ws']/int[@name='eight_1'][.='45']" - , "//lst[@name='f8_ws']/int[@name='eight_10'][.='5']" - , "//lst[@name='f9_ws']/int[@name='nine_1'][.='45']" - , "//lst[@name='f9_ws']/int[@name='nine_11'][.='5']" - - ); - - h.getCore().withSearcher(currentSearcher -> { - - SortedSetDocValues ui0 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f0_ws"); - SortedSetDocValues ui1 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f1_ws"); - SortedSetDocValues ui2 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f2_ws"); - SortedSetDocValues ui3 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f3_ws"); - SortedSetDocValues ui4 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f4_ws"); - SortedSetDocValues ui5 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f5_ws"); - SortedSetDocValues ui6 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f6_ws"); - SortedSetDocValues ui7 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f7_ws"); - SortedSetDocValues ui8 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f8_ws"); - SortedSetDocValues ui9 = DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f9_ws"); - - assertQ("check threading, more threads than fields", - req(methodParam - ,"q", "id:*", "indent", "true", "fl", "id", "rows", "1" - , "facet", "true" - , "facet.field", "f0_ws" - , "facet.field", "f1_ws" - , "facet.field", "f2_ws" - , "facet.field", "f3_ws" - , "facet.field", "f4_ws" - , "facet.field", "f5_ws" - , "facet.field", "f6_ws" - , "facet.field", "f7_ws" - , "facet.field", "f8_ws" - , "facet.field", "f9_ws" - , "facet.threads", "1000" - , "facet.limit", "-1" - ) - , "*[count(//lst[@name='facet_fields']/lst)=10]" - , "*[count(//lst[@name='facet_fields']/lst/int)=20]" - , "//lst[@name='f0_ws']/int[@name='zero_1'][.='25']" - , "//lst[@name='f0_ws']/int[@name='zero_2'][.='25']" - , "//lst[@name='f1_ws']/int[@name='one_1'][.='33']" - , "//lst[@name='f1_ws']/int[@name='one_3'][.='17']" - , "//lst[@name='f2_ws']/int[@name='two_1'][.='37']" - , "//lst[@name='f2_ws']/int[@name='two_4'][.='13']" - , "//lst[@name='f3_ws']/int[@name='three_1'][.='40']" - , "//lst[@name='f3_ws']/int[@name='three_5'][.='10']" - , "//lst[@name='f4_ws']/int[@name='four_1'][.='41']" - , "//lst[@name='f4_ws']/int[@name='four_6'][.='9']" - , "//lst[@name='f5_ws']/int[@name='five_1'][.='42']" - , "//lst[@name='f5_ws']/int[@name='five_7'][.='8']" - , "//lst[@name='f6_ws']/int[@name='six_1'][.='43']" - , "//lst[@name='f6_ws']/int[@name='six_8'][.='7']" - , "//lst[@name='f7_ws']/int[@name='seven_1'][.='44']" - , "//lst[@name='f7_ws']/int[@name='seven_9'][.='6']" - , "//lst[@name='f8_ws']/int[@name='eight_1'][.='45']" - , "//lst[@name='f8_ws']/int[@name='eight_10'][.='5']" - , "//lst[@name='f9_ws']/int[@name='nine_1'][.='45']" - , "//lst[@name='f9_ws']/int[@name='nine_11'][.='5']" - - ); - assertQ("check threading, fewer threads than fields", - req(methodParam - ,"q", "id:*", "indent", "true", "fl", "id", "rows", "1" - , "facet", "true" - , "facet.field", "f0_ws" - , "facet.field", "f1_ws" - , "facet.field", "f2_ws" - , "facet.field", "f3_ws" - , "facet.field", "f4_ws" - , "facet.field", "f5_ws" - , "facet.field", "f6_ws" - , "facet.field", "f7_ws" - , "facet.field", "f8_ws" - , "facet.field", "f9_ws" - , "facet.threads", "3" - , "facet.limit", "-1" - ) - , "*[count(//lst[@name='facet_fields']/lst)=10]" - , "*[count(//lst[@name='facet_fields']/lst/int)=20]" - , "//lst[@name='f0_ws']/int[@name='zero_1'][.='25']" - , "//lst[@name='f0_ws']/int[@name='zero_2'][.='25']" - , "//lst[@name='f1_ws']/int[@name='one_1'][.='33']" - , "//lst[@name='f1_ws']/int[@name='one_3'][.='17']" - , "//lst[@name='f2_ws']/int[@name='two_1'][.='37']" - , "//lst[@name='f2_ws']/int[@name='two_4'][.='13']" - , "//lst[@name='f3_ws']/int[@name='three_1'][.='40']" - , "//lst[@name='f3_ws']/int[@name='three_5'][.='10']" - , "//lst[@name='f4_ws']/int[@name='four_1'][.='41']" - , "//lst[@name='f4_ws']/int[@name='four_6'][.='9']" - , "//lst[@name='f5_ws']/int[@name='five_1'][.='42']" - , "//lst[@name='f5_ws']/int[@name='five_7'][.='8']" - , "//lst[@name='f6_ws']/int[@name='six_1'][.='43']" - , "//lst[@name='f6_ws']/int[@name='six_8'][.='7']" - , "//lst[@name='f7_ws']/int[@name='seven_1'][.='44']" - , "//lst[@name='f7_ws']/int[@name='seven_9'][.='6']" - , "//lst[@name='f8_ws']/int[@name='eight_1'][.='45']" - , "//lst[@name='f8_ws']/int[@name='eight_10'][.='5']" - , "//lst[@name='f9_ws']/int[@name='nine_1'][.='45']" - , "//lst[@name='f9_ws']/int[@name='nine_11'][.='5']" - - ); - - // After this all, the uninverted fields should be exactly the same as they were the first time, even if we - // blast a whole bunch of identical fields at the facet code. - // The way fetching the uninverted field is written, all this is really testing is if the cache is working. - // It's NOT testing whether the pending/sleep is actually functioning, I had to do that by hand since I don't - // see how to make sure that uninverting the field multiple times actually happens to hit the wait state. - assertQ("check threading, more threads than fields", - req(methodParam - ,"q", "id:*", "indent", "true", "fl", "id", "rows", "1" - , "facet", "true" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f0_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f1_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f2_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f3_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f4_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f5_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f6_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f7_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f8_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.field", "f9_ws" - , "facet.threads", "1000" - , "facet.limit", "-1" - ) - , "*[count(//lst[@name='facet_fields']/lst)=10]" - , "*[count(//lst[@name='facet_fields']/lst/int)=20]" - ); - return null; - }); + + String[] methodParam = + random().nextBoolean() ? new String[] {} : new String[] {"facet.method", "uif"}; + + assertQ( + "check no threading, threads == 0", + req( + methodParam, + "q", + "id:*", + "indent", + "true", + "fl", + "id", + "rows", + "1", + "facet", + "true", + "facet.field", + "f0_ws", + "facet.field", + "f1_ws", + "facet.field", + "f2_ws", + "facet.field", + "f3_ws", + "facet.field", + "f4_ws", + "facet.field", + "f5_ws", + "facet.field", + "f6_ws", + "facet.field", + "f7_ws", + "facet.field", + "f8_ws", + "facet.field", + "f9_ws", + "facet.threads", + "0", + "facet.limit", + "-1"), + "*[count(//lst[@name='facet_fields']/lst)=10]", + "*[count(//lst[@name='facet_fields']/lst/int)=20]", + "//lst[@name='f0_ws']/int[@name='zero_1'][.='25']", + "//lst[@name='f0_ws']/int[@name='zero_2'][.='25']", + "//lst[@name='f1_ws']/int[@name='one_1'][.='33']", + "//lst[@name='f1_ws']/int[@name='one_3'][.='17']", + "//lst[@name='f2_ws']/int[@name='two_1'][.='37']", + "//lst[@name='f2_ws']/int[@name='two_4'][.='13']", + "//lst[@name='f3_ws']/int[@name='three_1'][.='40']", + "//lst[@name='f3_ws']/int[@name='three_5'][.='10']", + "//lst[@name='f4_ws']/int[@name='four_1'][.='41']", + "//lst[@name='f4_ws']/int[@name='four_6'][.='9']", + "//lst[@name='f5_ws']/int[@name='five_1'][.='42']", + "//lst[@name='f5_ws']/int[@name='five_7'][.='8']", + "//lst[@name='f6_ws']/int[@name='six_1'][.='43']", + "//lst[@name='f6_ws']/int[@name='six_8'][.='7']", + "//lst[@name='f7_ws']/int[@name='seven_1'][.='44']", + "//lst[@name='f7_ws']/int[@name='seven_9'][.='6']", + "//lst[@name='f8_ws']/int[@name='eight_1'][.='45']", + "//lst[@name='f8_ws']/int[@name='eight_10'][.='5']", + "//lst[@name='f9_ws']/int[@name='nine_1'][.='45']", + "//lst[@name='f9_ws']/int[@name='nine_11'][.='5']"); + + h.getCore() + .withSearcher( + currentSearcher -> { + SortedSetDocValues ui0 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f0_ws"); + SortedSetDocValues ui1 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f1_ws"); + SortedSetDocValues ui2 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f2_ws"); + SortedSetDocValues ui3 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f3_ws"); + SortedSetDocValues ui4 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f4_ws"); + SortedSetDocValues ui5 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f5_ws"); + SortedSetDocValues ui6 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f6_ws"); + SortedSetDocValues ui7 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f7_ws"); + SortedSetDocValues ui8 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f8_ws"); + SortedSetDocValues ui9 = + DocValues.getSortedSet(currentSearcher.getSlowAtomicReader(), "f9_ws"); + + assertQ( + "check threading, more threads than fields", + req( + methodParam, + "q", + "id:*", + "indent", + "true", + "fl", + "id", + "rows", + "1", + "facet", + "true", + "facet.field", + "f0_ws", + "facet.field", + "f1_ws", + "facet.field", + "f2_ws", + "facet.field", + "f3_ws", + "facet.field", + "f4_ws", + "facet.field", + "f5_ws", + "facet.field", + "f6_ws", + "facet.field", + "f7_ws", + "facet.field", + "f8_ws", + "facet.field", + "f9_ws", + "facet.threads", + "1000", + "facet.limit", + "-1"), + "*[count(//lst[@name='facet_fields']/lst)=10]", + "*[count(//lst[@name='facet_fields']/lst/int)=20]", + "//lst[@name='f0_ws']/int[@name='zero_1'][.='25']", + "//lst[@name='f0_ws']/int[@name='zero_2'][.='25']", + "//lst[@name='f1_ws']/int[@name='one_1'][.='33']", + "//lst[@name='f1_ws']/int[@name='one_3'][.='17']", + "//lst[@name='f2_ws']/int[@name='two_1'][.='37']", + "//lst[@name='f2_ws']/int[@name='two_4'][.='13']", + "//lst[@name='f3_ws']/int[@name='three_1'][.='40']", + "//lst[@name='f3_ws']/int[@name='three_5'][.='10']", + "//lst[@name='f4_ws']/int[@name='four_1'][.='41']", + "//lst[@name='f4_ws']/int[@name='four_6'][.='9']", + "//lst[@name='f5_ws']/int[@name='five_1'][.='42']", + "//lst[@name='f5_ws']/int[@name='five_7'][.='8']", + "//lst[@name='f6_ws']/int[@name='six_1'][.='43']", + "//lst[@name='f6_ws']/int[@name='six_8'][.='7']", + "//lst[@name='f7_ws']/int[@name='seven_1'][.='44']", + "//lst[@name='f7_ws']/int[@name='seven_9'][.='6']", + "//lst[@name='f8_ws']/int[@name='eight_1'][.='45']", + "//lst[@name='f8_ws']/int[@name='eight_10'][.='5']", + "//lst[@name='f9_ws']/int[@name='nine_1'][.='45']", + "//lst[@name='f9_ws']/int[@name='nine_11'][.='5']"); + assertQ( + "check threading, fewer threads than fields", + req( + methodParam, + "q", + "id:*", + "indent", + "true", + "fl", + "id", + "rows", + "1", + "facet", + "true", + "facet.field", + "f0_ws", + "facet.field", + "f1_ws", + "facet.field", + "f2_ws", + "facet.field", + "f3_ws", + "facet.field", + "f4_ws", + "facet.field", + "f5_ws", + "facet.field", + "f6_ws", + "facet.field", + "f7_ws", + "facet.field", + "f8_ws", + "facet.field", + "f9_ws", + "facet.threads", + "3", + "facet.limit", + "-1"), + "*[count(//lst[@name='facet_fields']/lst)=10]", + "*[count(//lst[@name='facet_fields']/lst/int)=20]", + "//lst[@name='f0_ws']/int[@name='zero_1'][.='25']", + "//lst[@name='f0_ws']/int[@name='zero_2'][.='25']", + "//lst[@name='f1_ws']/int[@name='one_1'][.='33']", + "//lst[@name='f1_ws']/int[@name='one_3'][.='17']", + "//lst[@name='f2_ws']/int[@name='two_1'][.='37']", + "//lst[@name='f2_ws']/int[@name='two_4'][.='13']", + "//lst[@name='f3_ws']/int[@name='three_1'][.='40']", + "//lst[@name='f3_ws']/int[@name='three_5'][.='10']", + "//lst[@name='f4_ws']/int[@name='four_1'][.='41']", + "//lst[@name='f4_ws']/int[@name='four_6'][.='9']", + "//lst[@name='f5_ws']/int[@name='five_1'][.='42']", + "//lst[@name='f5_ws']/int[@name='five_7'][.='8']", + "//lst[@name='f6_ws']/int[@name='six_1'][.='43']", + "//lst[@name='f6_ws']/int[@name='six_8'][.='7']", + "//lst[@name='f7_ws']/int[@name='seven_1'][.='44']", + "//lst[@name='f7_ws']/int[@name='seven_9'][.='6']", + "//lst[@name='f8_ws']/int[@name='eight_1'][.='45']", + "//lst[@name='f8_ws']/int[@name='eight_10'][.='5']", + "//lst[@name='f9_ws']/int[@name='nine_1'][.='45']", + "//lst[@name='f9_ws']/int[@name='nine_11'][.='5']"); + + // After this all, the uninverted fields should be exactly the same as they were the + // first time, even if we blast a whole bunch of identical fields at the facet code. + // The way fetching the uninverted field is written, all this is really testing is if + // the cache is working. It's NOT testing whether the pending/sleep is actually + // functioning, I had to do that by hand since I don't see how to make sure that + // uninverting the field multiple times actually happens to hit the wait state. + assertQ( + "check threading, more threads than fields", + req( + methodParam, + "q", + "id:*", + "indent", + "true", + "fl", + "id", + "rows", + "1", + "facet", + "true", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f0_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f1_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f2_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f3_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f4_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f5_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f6_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f7_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f8_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.field", + "f9_ws", + "facet.threads", + "1000", + "facet.limit", + "-1"), + "*[count(//lst[@name='facet_fields']/lst)=10]", + "*[count(//lst[@name='facet_fields']/lst/int)=20]"); + return null; + }); } @Test @@ -914,30 +1303,53 @@ public void testListedTermCounts() throws Exception { assertU(commit()); // order is the same as in facet.field, when no facet.sort specified - assertQ(req("q", "*:*", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "{!terms=Book3,Book2,Book1}title_ws"), + assertQ( + req( + "q", + "*:*", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "{!terms=Book3,Book2,Book1}title_ws"), "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book3']", "//lst[@name='facet_fields']/lst[@name='title_ws']/int[2][@name='Book2']", "//lst[@name='facet_fields']/lst[@name='title_ws']/int[3][@name='Book1']"); // order is by counts, when facet.sort by count specified - assertQ(req("q", "*:*", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "{!terms=Book3,Book2,Book1}title_ws", - "facet.sort", FacetParams.FACET_SORT_COUNT), + assertQ( + req( + "q", + "*:*", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "{!terms=Book3,Book2,Book1}title_ws", + "facet.sort", + FacetParams.FACET_SORT_COUNT), "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book2']", "//lst[@name='facet_fields']/lst[@name='title_ws']/int[2][@name='Book1']", "//lst[@name='facet_fields']/lst[@name='title_ws']/int[3][@name='Book3']"); // order is by index, when facet.sort by index specified - assertQ(req("q", "*:*", FacetParams.FACET, "true", FacetParams.FACET_FIELD, "{!terms=Book3,Book2,Book1}title_ws", - "facet.sort", FacetParams.FACET_SORT_INDEX), + assertQ( + req( + "q", + "*:*", + FacetParams.FACET, + "true", + FacetParams.FACET_FIELD, + "{!terms=Book3,Book2,Book1}title_ws", + "facet.sort", + FacetParams.FACET_SORT_INDEX), "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1']", "//lst[@name='facet_fields']/lst[@name='title_ws']/int[2][@name='Book2']", "//lst[@name='facet_fields']/lst[@name='title_ws']/int[3][@name='Book3']"); } - + @Test public void testFacetCountsWithMinExactCount() throws Exception { final int NUM_DOCS = 20; - for (int i = 0; i < NUM_DOCS ; i++) { + for (int i = 0; i < NUM_DOCS; i++) { assertU(adoc("id", String.valueOf(i), "title_ws", "Book1")); assertU(commit()); } @@ -945,16 +1357,18 @@ public void testFacetCountsWithMinExactCount() throws Exception { params.set("q", "title_ws:Book1"); params.set(FacetParams.FACET, "true"); params.set(FacetParams.FACET_FIELD, "title_ws"); - assertQ(req(params), - "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1'][.='20']" - ,"//*[@numFoundExact='true']" - ,"//*[@numFound='" + NUM_DOCS + "']"); - - // It doesn't matter if we request minExactCount, when requesting facets, the numFound value is precise - assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2"), - "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1'][.='20']" - ,"//*[@numFoundExact='true']" - ,"//*[@numFound='" + NUM_DOCS + "']"); + assertQ( + req(params), + "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1'][.='20']", + "//*[@numFoundExact='true']", + "//*[@numFound='" + NUM_DOCS + "']"); + + // It doesn't matter if we request minExactCount, when requesting facets, the numFound value is + // precise + assertQ( + req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2"), + "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1'][.='20']", + "//*[@numFoundExact='true']", + "//*[@numFound='" + NUM_DOCS + "']"); } } - diff --git a/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java b/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java index a581264c92b..d47b042ff8f 100644 --- a/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestIntervalFaceting.java @@ -24,7 +24,6 @@ import java.util.HashSet; import java.util.Locale; import java.util.Set; - import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -50,14 +49,16 @@ public class TestIntervalFaceting extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private final static long DATE_START_TIME_RANDOM_TEST = 1499797224224L; - private final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.ROOT); - + + private static final long DATE_START_TIME_RANDOM_TEST = 1499797224224L; + private final SimpleDateFormat dateFormat = + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.ROOT); + @BeforeClass public static void beforeTests() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); initCore("solrconfig-basic.xml", "schema-docValuesFaceting.xml"); } @@ -98,7 +99,8 @@ public void testMultiValueFields() { assertIntervalQuery("test_ss_dv", "[hello,hello]", "0"); assertIntervalQuery("test_ss_dv", "[dog,dog]", "1"); assertIntervalQuery("test_ss_dv", "[cat,cat]", "2"); - assertIntervalQuery("test_ss_dv", "[*,*]", "2", "[*,cat)", "1", "[cat,dog)", "2", "[dog,*)", "1"); + assertIntervalQuery( + "test_ss_dv", "[*,*]", "2", "[*,cat)", "1", "[cat,dog)", "2", "[dog,*)", "1"); } @Test @@ -118,7 +120,7 @@ public void testMultipleSegments() { assertU(commit()); int i = 11; while (getNumberOfReaders() < 2 && i < 20) { - //try to get more than one segment + // try to get more than one segment assertU(adoc("id", String.valueOf(i), "test_i_dv", String.valueOf(i))); assertU(commit()); i++; @@ -131,7 +133,6 @@ public void testMultipleSegments() { } assertIntervalQueriesString("test_s_dv"); - } @Test @@ -160,7 +161,6 @@ private int getNumberOfReaders() { } catch (IOException e) { throw new RuntimeException(e); } - } @Test @@ -180,26 +180,62 @@ public void testBasic() { assertIntervalQueriesString("test_s_dv"); // error cases - assertQEx("missing beginning of range", - req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "bird,bird]"), - SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("only separator is escaped", - req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "(bird\\,turtle]"), - SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("missing separator", - req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "(bird]"), - SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("missing end of range", - req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "(bird,turtle"), - SolrException.ErrorCode.BAD_REQUEST - ); + assertQEx( + "missing beginning of range", + req( + "fl", + "test_s_dv", + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "bird,bird]"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "only separator is escaped", + req( + "fl", + "test_s_dv", + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "(bird\\,turtle]"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "missing separator", + req( + "fl", + "test_s_dv", + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "(bird]"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "missing end of range", + req( + "fl", + "test_s_dv", + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "(bird,turtle"), + SolrException.ErrorCode.BAD_REQUEST); } @Test @@ -216,16 +252,27 @@ public void testMultipleFields() { assertU(adoc("id", "10")); assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "facet.interval", "test_l_dv", "f.test_s_dv.facet.interval.set", "[cat,dog]", - "f.test_l_dv.facet.interval.set", "[3,6]", - "f.test_l_dv.facet.interval.set", "[5,9]"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "facet.interval", + "test_l_dv", + "f.test_s_dv.facet.interval.set", + "[cat,dog]", + "f.test_l_dv.facet.interval.set", + "[3,6]", + "f.test_l_dv.facet.interval.set", + "[5,9]"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]", "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,6]'][.=4]", "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[5,9]'][.=5]"); - } - + @Test public void testWithFieldCache() { assertU(adoc("id", "1", "test_s", "dog", "test_l", "1")); @@ -240,14 +287,25 @@ public void testWithFieldCache() { assertU(adoc("id", "10")); assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s", - "facet.interval", "test_l", "f.test_s.facet.interval.set", "[cat,dog]", - "f.test_l.facet.interval.set", "[3,6]", - "f.test_l.facet.interval.set", "[5,9]"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s", + "facet.interval", + "test_l", + "f.test_s.facet.interval.set", + "[cat,dog]", + "f.test_l.facet.interval.set", + "[3,6]", + "f.test_l.facet.interval.set", + "[5,9]"), "//lst[@name='facet_intervals']/lst[@name='test_s']/int[@name='[cat,dog]'][.=5]", "//lst[@name='facet_intervals']/lst[@name='test_l']/int[@name='[3,6]'][.=4]", "//lst[@name='facet_intervals']/lst[@name='test_l']/int[@name='[5,9]'][.=5]"); - } @Test @@ -256,26 +314,58 @@ public void testRandom() throws Exception { // All field values will be a number between 0 and cardinality int cardinality = 10000; // Fields to use for interval faceting - String[] fields = new String[]{ - "test_s_dv", "test_i_dv", "test_l_dv", "test_f_dv", "test_d_dv", "test_dt_dv", - "test_ss_dv", "test_is_dv", "test_fs_dv", "test_ls_dv", "test_ds_dv", "test_dts_dv", "test_s", "test_i", - "test_l", "test_f", "test_d", "test_dt", "test_ss", "test_is", "test_fs", "test_ls", "test_ds", "test_dts", - "test_i_p", "test_is_p", "test_l_p", "test_ls_p", "test_f_p", "test_fs_p", "test_d_p", "test_ds_p", "test_dts_p" + String[] fields = + new String[] { + "test_s_dv", + "test_i_dv", + "test_l_dv", + "test_f_dv", + "test_d_dv", + "test_dt_dv", + "test_ss_dv", + "test_is_dv", + "test_fs_dv", + "test_ls_dv", + "test_ds_dv", + "test_dts_dv", + "test_s", + "test_i", + "test_l", + "test_f", + "test_d", + "test_dt", + "test_ss", + "test_is", + "test_fs", + "test_ls", + "test_ds", + "test_dts", + "test_i_p", + "test_is_p", + "test_l_p", + "test_ls_p", + "test_f_p", + "test_fs_p", + "test_d_p", + "test_ds_p", + "test_dts_p" }; for (int i = 0; i < atLeast(500); i++) { if (random().nextInt(50) == 0) { - //have some empty docs + // have some empty docs assertU(adoc("id", String.valueOf(i))); continue; } if (random().nextInt(100) == 0 && i > 0) { - //delete some docs + // delete some docs assertU(delI(String.valueOf(i - 1))); } String[] docFields = new String[(random().nextInt(5)) * 12 + 14]; docFields[0] = "id"; - docFields[1] = String.valueOf(i * (random().nextBoolean()?1:-1)); // in the queries we do positive and negative + docFields[1] = + String.valueOf( + i * (random().nextBoolean() ? 1 : -1)); // in the queries we do positive and negative docFields[2] = "test_s"; docFields[3] = String.valueOf(randomInt(cardinality)); docFields[4] = "test_i"; @@ -312,11 +402,11 @@ public void testRandom() throws Exception { for (int i = 0; i < atLeast(10000); i++) { doTestQuery(cardinality, fields); } - } long randomMs(int cardinality) { - return DATE_START_TIME_RANDOM_TEST + random().nextInt(cardinality) * 1000 * (random().nextBoolean()?1:-1); + return DATE_START_TIME_RANDOM_TEST + + random().nextInt(cardinality) * 1000 * (random().nextBoolean() ? 1 : -1); } double raondomDouble(int cardinality) { @@ -331,7 +421,7 @@ long randomMs(int cardinality) { while (d.isNaN()) { d = random().nextDouble(); } - return d * cardinality * (random().nextBoolean()?1:-1); + return d * cardinality * (random().nextBoolean() ? 1 : -1); } float randomFloat(int cardinality) { @@ -346,7 +436,7 @@ float randomFloat(int cardinality) { while (f.isNaN()) { f = random().nextFloat(); } - return f * cardinality * (random().nextBoolean()?1:-1); + return f * cardinality * (random().nextBoolean() ? 1 : -1); } int randomInt(int cardinality) { @@ -355,9 +445,9 @@ int randomInt(int cardinality) { if (num == 0) return Integer.MAX_VALUE; if (num == 1) return Integer.MIN_VALUE; } - return random().nextInt(cardinality) * (random().nextBoolean()?1:-1); + return random().nextInt(cardinality) * (random().nextBoolean() ? 1 : -1); } - + long randomLong(int cardinality) { if (rarely()) { int num = random().nextInt(2); @@ -368,13 +458,13 @@ long randomLong(int cardinality) { } /** - * Executes one query using interval faceting and compares with the same query using - * facet query with the same range + * Executes one query using interval faceting and compares with the same query using facet query + * with the same range */ @SuppressWarnings("unchecked") private void doTestQuery(int cardinality, String[] fields) throws Exception { - String[] startOptions = new String[]{"(", "["}; - String[] endOptions = new String[]{")", "]"}; + String[] startOptions = new String[] {"(", "["}; + String[] endOptions = new String[] {")", "]"}; ModifiableSolrParams params = new ModifiableSolrParams(); if (rarely()) { params.set("q", "*:*"); @@ -384,33 +474,67 @@ private void doTestQuery(int cardinality, String[] fields) throws Exception { params.set("q", "id:[" + qRange[0] + " TO " + qRange[1] + "]"); } params.set("facet", "true"); - String field = pickRandom(fields); //choose from any of the fields + String field = pickRandom(fields); // choose from any of the fields params.set("facet.interval", field); // number of intervals for (int i = 0; i < 1 + random().nextInt(20); i++) { String[] interval = getRandomRange(cardinality, field); String open = pickRandom(startOptions); String close = pickRandom(endOptions); - params.add("f." + field + ".facet.interval.set", open + interval[0] + "," + interval[1] + close); - params.add("facet.query", field + ":" + open.replace('(', '{') + interval[0] + " TO " + interval[1] + close.replace(')', '}')); + params.add( + "f." + field + ".facet.interval.set", open + interval[0] + "," + interval[1] + close); + params.add( + "facet.query", + field + + ":" + + open.replace('(', '{') + + interval[0] + + " TO " + + interval[1] + + close.replace(')', '}')); } SolrQueryRequest req = req(params); try { SolrQueryResponse rsp = h.queryAndResponse("", req); - NamedList facetQueries = (NamedList) ((NamedList) rsp.getValues().get("facet_counts")).get("facet_queries"); - NamedList facetIntervals = (NamedList) ((NamedList) ((NamedList) rsp.getValues().get("facet_counts")) - .get("facet_intervals")).get(field); - assertEquals("Responses don't have the same number of facets: \n" + facetQueries + "\n" + facetIntervals, - facetQueries.size(), getCountDistinctIntervals(facetIntervals)); + NamedList facetQueries = + (NamedList) + ((NamedList) rsp.getValues().get("facet_counts")).get("facet_queries"); + NamedList facetIntervals = + (NamedList) + ((NamedList) + ((NamedList) rsp.getValues().get("facet_counts")) + .get("facet_intervals")) + .get(field); + assertEquals( + "Responses don't have the same number of facets: \n" + + facetQueries + + "\n" + + facetIntervals, + facetQueries.size(), + getCountDistinctIntervals(facetIntervals)); for (int i = 0; i < facetIntervals.size(); i++) { - assertEquals("Interval did not match: " + field + ": " + facetIntervals.getName(i) + "\nResponse: " + rsp.getValues().get("facet_counts"), - facetQueries.get(field + ":" + facetIntervals.getName(i).replace(",", " TO ").replace('(', '{').replace(')', '}')).toString(), + assertEquals( + "Interval did not match: " + + field + + ": " + + facetIntervals.getName(i) + + "\nResponse: " + + rsp.getValues().get("facet_counts"), + facetQueries + .get( + field + + ":" + + facetIntervals + .getName(i) + .replace(",", " TO ") + .replace('(', '{') + .replace(')', '}')) + .toString(), facetIntervals.getVal(i).toString()); } } finally { req.close(); } - } private int getCountDistinctIntervals(NamedList facetIntervals) { @@ -422,14 +546,12 @@ private int getCountDistinctIntervals(NamedList facetIntervals) { } /** - * Returns a random range. It's guaranteed that the first - * number will be lower than the second. The range could have values greater than "max", - * for example [Integer/Long/Float/Double].[MIN/MAX_VALUE,POSITIVE/NEGATIVE_INFINITY] - * If the fieldName is "test_s_dv" or "test_ss_dv" (the - * two fields used for Strings), the comparison will be done - * alphabetically - * If the field is a Date, a date range will be returned - * The range could also contain "*" as beginning and/or end of the range + * Returns a random range. It's guaranteed that the first number will be lower than the second. + * The range could have values greater than "max", for example + * [Integer/Long/Float/Double].[MIN/MAX_VALUE,POSITIVE/NEGATIVE_INFINITY] If the fieldName is + * "test_s_dv" or "test_ss_dv" (the two fields used for Strings), the comparison will be done + * alphabetically If the field is a Date, a date range will be returned The range could also + * contain "*" as beginning and/or end of the range */ private String[] getRandomRange(int max, String fieldName) { Number[] values = new Number[2]; @@ -463,7 +585,6 @@ private String[] getRandomRange(int max, String fieldName) { break; default: throw new AssertionError("Unexpected number type"); - } Arrays.sort(values); } @@ -491,36 +612,160 @@ private String[] getRandomRange(int max, String fieldName) { @Test public void testParse() throws SyntaxError { - assertInterval("test_l_dv", "(0,2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "(0,2]", new long[]{1, 2}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "[0,2]", new long[]{0, 1, 2}, new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "[0,2)", new long[]{0, 1}, new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - - assertInterval("test_l_dv", "(0,*)", new long[]{1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{-1, 0, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{}); - assertInterval("test_l_dv", "(*,2)", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1}, new long[]{}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "(*,*)", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{}, new long[]{}); - - assertInterval("test_l_dv", "[0,*]", new long[]{0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{}); - assertInterval("test_l_dv", "[*,2]", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2}, new long[]{}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "[*,*]", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{}, new long[]{}); - - assertInterval("test_l_dv", "(2,2)", new long[]{}, new long[]{2, 1, 0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "(0,0)", new long[]{}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - - assertInterval("test_l_dv", "(0," + Long.MAX_VALUE + "]", new long[]{1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{}); - assertInterval("test_l_dv", "(0," + Long.MAX_VALUE + ")", new long[]{1, 2, 3, Integer.MAX_VALUE}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{Long.MAX_VALUE}); - assertInterval("test_l_dv", "(" + Long.MIN_VALUE + ",0)", new long[]{-1, Integer.MIN_VALUE}, new long[]{Long.MIN_VALUE}, new long[]{1, 2, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "[" + Long.MIN_VALUE + ",0)", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{}, new long[]{1, 2, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "[" + Long.MIN_VALUE + "," + Long.MAX_VALUE + "]", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 1, 2, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{}, new long[]{}); - assertInterval("test_l_dv", "(" + Long.MIN_VALUE + "," + Long.MAX_VALUE + ")", new long[]{-1, Integer.MIN_VALUE, 1, 2, Integer.MAX_VALUE}, new long[]{Long.MIN_VALUE}, new long[]{Long.MAX_VALUE}); - - assertInterval("test_l_dv", "( 0,2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "( 0,2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "(0, 2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", "( 0 , 2 )", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - assertInterval("test_l_dv", " ( 0 , 2 ) ", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); - - assertInterval("test_l_dv", "[-1,1]", new long[]{-1, 0, 1}, new long[]{-2, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "(0,2)", + new long[] {1}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "(0,2]", + new long[] {1, 2}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "[0,2]", + new long[] {0, 1, 2}, + new long[] {-1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "[0,2)", + new long[] {0, 1}, + new long[] {-1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + + assertInterval( + "test_l_dv", + "(0,*)", + new long[] {1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, + new long[] {-1, 0, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {}); + assertInterval( + "test_l_dv", + "(*,2)", + new long[] {-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1}, + new long[] {}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "(*,*)", + new long[] { + -1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE + }, + new long[] {}, + new long[] {}); + + assertInterval( + "test_l_dv", + "[0,*]", + new long[] {0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, + new long[] {-1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {}); + assertInterval( + "test_l_dv", + "[*,2]", + new long[] {-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2}, + new long[] {}, + new long[] {3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "[*,*]", + new long[] { + -1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE + }, + new long[] {}, + new long[] {}); + + assertInterval( + "test_l_dv", + "(2,2)", + new long[] {}, + new long[] {2, 1, 0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "(0,0)", + new long[] {}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + + assertInterval( + "test_l_dv", + "(0," + Long.MAX_VALUE + "]", + new long[] {1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {}); + assertInterval( + "test_l_dv", + "(0," + Long.MAX_VALUE + ")", + new long[] {1, 2, 3, Integer.MAX_VALUE}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "(" + Long.MIN_VALUE + ",0)", + new long[] {-1, Integer.MIN_VALUE}, + new long[] {Long.MIN_VALUE}, + new long[] {1, 2, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "[" + Long.MIN_VALUE + ",0)", + new long[] {-1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {}, + new long[] {1, 2, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "[" + Long.MIN_VALUE + "," + Long.MAX_VALUE + "]", + new long[] {-1, Integer.MIN_VALUE, Long.MIN_VALUE, 1, 2, Integer.MAX_VALUE, Long.MAX_VALUE}, + new long[] {}, + new long[] {}); + assertInterval( + "test_l_dv", + "(" + Long.MIN_VALUE + "," + Long.MAX_VALUE + ")", + new long[] {-1, Integer.MIN_VALUE, 1, 2, Integer.MAX_VALUE}, + new long[] {Long.MIN_VALUE}, + new long[] {Long.MAX_VALUE}); + + assertInterval( + "test_l_dv", + "( 0,2)", + new long[] {1}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "( 0,2)", + new long[] {1}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "(0, 2)", + new long[] {1}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + "( 0 , 2 )", + new long[] {1}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + assertInterval( + "test_l_dv", + " ( 0 , 2 ) ", + new long[] {1}, + new long[] {0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); + + assertInterval( + "test_l_dv", + "[-1,1]", + new long[] {-1, 0, 1}, + new long[] {-2, Integer.MIN_VALUE, Long.MIN_VALUE}, + new long[] {2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}); assertStringInterval("test_s_dv", "[A,B]", "A", "B"); assertStringInterval("test_s_dv", "[A,b]", "A", "b"); @@ -530,11 +775,11 @@ public void testParse() throws SyntaxError { assertStringInterval("test_s_dv", "[\"A\",B]", "\"A\"", "B"); assertStringInterval("test_s_dv", "[A B C,B]", "A B C", "B"); assertStringInterval("test_s_dv", "[ A B C ,B]", "A B C", "B"); -// These two are currently not possible -// assertStringInterval("test_s_dv", "[\\ A B C ,B]", " A B C", "B"); -// assertStringInterval("test_s_dv", "[\\*,B]", "*", "B"); - - //invalid intervals + // These two are currently not possible + // assertStringInterval("test_s_dv", "[\\ A B C ,B]", " A B C", "B"); + // assertStringInterval("test_s_dv", "[\\*,B]", "*", "B"); + + // invalid intervals assertBadInterval("test_l_dv", "0,2)", "Invalid start character"); assertBadInterval("test_l_dv", "{0,2)", "Invalid start character"); assertBadInterval("test_l_dv", "(0,2", "Invalid end character"); @@ -553,7 +798,7 @@ public void testParse() throws SyntaxError { assertBadInterval("test_s_dv", "A,B)", "Invalid start character"); assertBadInterval("test_s_dv", "(B,A)", "Start is higher than end in interval for key"); assertBadInterval("test_s_dv", "(a,B)", "Start is higher than end in interval for key"); - + assertIntervalKey("test_s_dv", "[A,B]", "[A,B]"); assertIntervalKey("test_s_dv", "(A,*]", "(A,*]"); assertIntervalKey("test_s_dv", "{!}(A,*]", "(A,*]"); @@ -571,8 +816,7 @@ public void testParse() throws SyntaxError { assertIntervalKey("test_s_dv", "{!key='\"A,B\"'}(A,B)", "\"A,B\""); assertIntervalKey("test_s_dv", "{!key='A..B'}(A,B)", "A..B"); assertIntervalKey("test_s_dv", "{!key='A TO B'}(A,B)", "A TO B"); - - + assertU(adoc("id", "1", "test_s_dv", "dog", "test_l_dv", "1")); assertU(adoc("id", "2", "test_s_dv", "cat", "test_l_dv", "2")); assertU(adoc("id", "3", "test_s_dv", "bird", "test_l_dv", "3")); @@ -584,150 +828,240 @@ public void testParse() throws SyntaxError { assertU(adoc("id", "9", "test_s_dv", "cat", "test_l_dv", "9")); assertU(adoc("id", "10")); assertU(commit()); - + // facet.interval not set - assertQ(req("q", "*:*", "facet", "true", - "f.test_s_dv.facet.interval.set", "[cat,dog]", - "f.test_l_dv.facet.interval.set", "[3,6]", - "f.test_l_dv.facet.interval.set", "[5,9]"), - "count(//lst[@name='facet_intervals']/lst)=0"); - + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "f.test_s_dv.facet.interval.set", + "[cat,dog]", + "f.test_l_dv.facet.interval.set", + "[3,6]", + "f.test_l_dv.facet.interval.set", + "[5,9]"), + "count(//lst[@name='facet_intervals']/lst)=0"); + // facet.interval only on one of the fields - assertQ(req("q", "*:*", "facet", "true", - "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "[cat,dog]", - "f.test_l_dv.facet.interval.set", "[3,6]", - "f.test_l_dv.facet.interval.set", "[5,9]"), - "count(//lst[@name='facet_intervals']/lst)=1", - "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]"); - + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "[cat,dog]", + "f.test_l_dv.facet.interval.set", + "[3,6]", + "f.test_l_dv.facet.interval.set", + "[5,9]"), + "count(//lst[@name='facet_intervals']/lst)=1", + "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]"); + // existing fields in facet.interval with no intervals defined - assertQEx("Unexpected exception", + assertQEx( + "Unexpected exception", "Missing required parameter: f.test_l_dv.facet.interval.set (or default: facet.interval.set)", - req("q", "*:*", "facet", "true", - "facet.interval", "test_s_dv", - "facet.interval", "test_l_dv", - "f.test_s_dv.facet.interval.set", "[cat,dog]"), + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "facet.interval", + "test_l_dv", + "f.test_s_dv.facet.interval.set", + "[cat,dog]"), SolrException.ErrorCode.BAD_REQUEST); - + // use of facet.interval.set - assertQ(req("q", "*:*", "facet", "true", - "facet.interval", "test_s_dv", - "facet.interval", "test_l_dv", - "facet.interval.set", "[1,2]"), - "count(//lst[@name='facet_intervals']/lst)=2", - "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]" - ); - - // multiple facet.interval.set - assertQ(req("q", "*:*", "facet", "true", - "facet.interval", "test_s_dv", - "facet.interval", "test_l_dv", - "facet.interval.set", "[1,2]", - "facet.interval.set", "[2,3]", - "facet.interval.set", "[3,4]"), - "count(//lst[@name='facet_intervals']/lst)=2", - "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]", - "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[2,3]'][.=0]", - "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[3,4]'][.=0]", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[2,3]'][.=2]", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]" - ); - + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "facet.interval", + "test_l_dv", + "facet.interval.set", + "[1,2]"), + "count(//lst[@name='facet_intervals']/lst)=2", + "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]"); + + // multiple facet.interval.set + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "facet.interval", + "test_l_dv", + "facet.interval.set", + "[1,2]", + "facet.interval.set", + "[2,3]", + "facet.interval.set", + "[3,4]"), + "count(//lst[@name='facet_intervals']/lst)=2", + "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]", + "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[2,3]'][.=0]", + "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[3,4]'][.=0]", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[2,3]'][.=2]", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]"); + // use of facet.interval.set and override - assertQ(req("q", "*:*", "facet", "true", - "facet.interval", "test_s_dv", - "facet.interval", "test_l_dv", - "facet.interval.set", "[1,2]", - "f.test_l_dv.facet.interval.set", "[3,4]", - "f.test_l_dv.facet.interval.set", "[4,5]"), - "count(//lst[@name='facet_intervals']/lst)=2", - "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]", - "count(//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int)=2", // interval [1,2] not present - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[4,5]'][.=2]" - ); - - assertQ(req("q", "*:*", "facet", "true", - "facet.interval", "test_s_dv", - "facet.interval", "test_l_dv", - "facet.interval.set", "[1,2]", - "facet.interval.set", "[2,3]", - "facet.interval.set", "[3,4]", - "f.test_s_dv.facet.interval.set", "[cat,dog]"), - "count(//lst[@name='facet_intervals']/lst)=2", - "count(//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int)=1", // only [cat,dog] in test_s_dv - "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]", - "count(//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int)=3", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[2,3]'][.=2]", - "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]" - ); - + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "facet.interval", + "test_l_dv", + "facet.interval.set", + "[1,2]", + "f.test_l_dv.facet.interval.set", + "[3,4]", + "f.test_l_dv.facet.interval.set", + "[4,5]"), + "count(//lst[@name='facet_intervals']/lst)=2", + "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]", + "count(//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int)=2", // interval [1,2] not + // present + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[4,5]'][.=2]"); + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "facet.interval", + "test_l_dv", + "facet.interval.set", + "[1,2]", + "facet.interval.set", + "[2,3]", + "facet.interval.set", + "[3,4]", + "f.test_s_dv.facet.interval.set", + "[cat,dog]"), + "count(//lst[@name='facet_intervals']/lst)=2", + "count(//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int)=1", // only [cat,dog] in + // test_s_dv + "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]", + "count(//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int)=3", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[2,3]'][.=2]", + "//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]"); + // use of facet.interval.set with wrong field type - assertQEx("Unexpected Exception", + assertQEx( + "Unexpected Exception", "Invalid start interval", - req("q", "*:*", "facet", "true", - "facet.interval", "test_l_dv", - "f.test_l_dv.facet.interval.set", "[cat,dog]"), + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_l_dv", + "f.test_l_dv.facet.interval.set", + "[cat,dog]"), SolrException.ErrorCode.BAD_REQUEST); - } - private void assertStringInterval(String fieldName, String intervalStr, - String expectedStart, String expectedEnd) throws SyntaxError { + private void assertStringInterval( + String fieldName, String intervalStr, String expectedStart, String expectedEnd) + throws SyntaxError { SchemaField f = h.getCore().getLatestSchema().getField(fieldName); FacetInterval interval = new FacetInterval(f, intervalStr, new ModifiableSolrParams()); - assertEquals("Expected start " + expectedStart + " but found " + f.getType().toObject(f, interval.start), - interval.start, new BytesRef(f.getType().toInternal(expectedStart))); + assertEquals( + "Expected start " + expectedStart + " but found " + f.getType().toObject(f, interval.start), + interval.start, + new BytesRef(f.getType().toInternal(expectedStart))); - assertEquals("Expected end " + expectedEnd + " but found " + f.getType().toObject(f, interval.end), - interval.end, new BytesRef(f.getType().toInternal(expectedEnd))); + assertEquals( + "Expected end " + expectedEnd + " but found " + f.getType().toObject(f, interval.end), + interval.end, + new BytesRef(f.getType().toInternal(expectedEnd))); } private void assertBadInterval(String fieldName, String intervalStr, String errorMsg) { SchemaField f = h.getCore().getLatestSchema().getField(fieldName); - SyntaxError e = expectThrows(SyntaxError.class, () -> new FacetInterval(f, intervalStr, new ModifiableSolrParams())); - assertTrue("Unexpected error message for interval String: " + intervalStr + ": " + - e.getMessage(), e.getMessage().contains(errorMsg)); + SyntaxError e = + expectThrows( + SyntaxError.class, () -> new FacetInterval(f, intervalStr, new ModifiableSolrParams())); + assertTrue( + "Unexpected error message for interval String: " + intervalStr + ": " + e.getMessage(), + e.getMessage().contains(errorMsg)); } - private void assertInterval(String fieldName, String intervalStr, long[] included, long[] lowerThanStart, long[] graterThanEnd) throws SyntaxError { + private void assertInterval( + String fieldName, + String intervalStr, + long[] included, + long[] lowerThanStart, + long[] graterThanEnd) + throws SyntaxError { SchemaField f = h.getCore().getLatestSchema().getField(fieldName); FacetInterval interval = new FacetInterval(f, intervalStr, new ModifiableSolrParams()); for (long l : included) { - assertEquals("Value " + l + " should be INCLUDED for interval" + interval, - IntervalCompareResult.INCLUDED, interval.includes(l)); + assertEquals( + "Value " + l + " should be INCLUDED for interval" + interval, + IntervalCompareResult.INCLUDED, + interval.includes(l)); } for (long l : lowerThanStart) { - assertEquals("Value " + l + " should be LOWER_THAN_START for inteval " + interval, - IntervalCompareResult.LOWER_THAN_START, interval.includes(l)); + assertEquals( + "Value " + l + " should be LOWER_THAN_START for inteval " + interval, + IntervalCompareResult.LOWER_THAN_START, + interval.includes(l)); } for (long l : graterThanEnd) { - assertEquals("Value " + l + " should be GRATER_THAN_END for inteval " + interval, - IntervalCompareResult.GREATER_THAN_END, interval.includes(l)); + assertEquals( + "Value " + l + " should be GRATER_THAN_END for inteval " + interval, + IntervalCompareResult.GREATER_THAN_END, + interval.includes(l)); } - } - - private void assertIntervalKey(String fieldName, String intervalStr, - String expectedKey, String...params) throws SyntaxError { - assert (params.length&1)==0:"Params must have an even number of elements"; + + private void assertIntervalKey( + String fieldName, String intervalStr, String expectedKey, String... params) + throws SyntaxError { + assert (params.length & 1) == 0 : "Params must have an even number of elements"; SchemaField f = h.getCore().getLatestSchema().getField(fieldName); ModifiableSolrParams solrParams = new ModifiableSolrParams(); - for (int i = 0; i < params.length - 1;) { - solrParams.set(params[i], params[i+1]); - i+=2; + for (int i = 0; i < params.length - 1; ) { + solrParams.set(params[i], params[i + 1]); + i += 2; } FacetInterval interval = new FacetInterval(f, intervalStr, solrParams); - - assertEquals("Expected key " + expectedKey + " but found " + interval.getKey(), - expectedKey, interval.getKey()); + + assertEquals( + "Expected key " + expectedKey + " but found " + interval.getKey(), + expectedKey, + interval.getKey()); } - + public void testChangeKey() { assertU(adoc("id", "1", "test_s_dv", "dog")); assertU(adoc("id", "2", "test_s_dv", "cat")); @@ -740,32 +1074,68 @@ public void testChangeKey() { assertU(adoc("id", "9", "test_s_dv", "cat")); assertU(adoc("id", "10")); assertU(commit()); - - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "{!key=foo}[bird,bird]", - "f.test_s_dv.facet.interval.set", "{!key='bar'}(bird,dog)"), + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "{!key=foo}[bird,bird]", + "f.test_s_dv.facet.interval.set", + "{!key='bar'}(bird,dog)"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='foo'][.=1]", "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='bar'][.=3]"); - - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "{!key=Birds}[bird,bird]", - "f.test_s_dv.facet.interval.set", "{!key='foo bar'}(bird,dog)"), + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "{!key=Birds}[bird,bird]", + "f.test_s_dv.facet.interval.set", + "{!key='foo bar'}(bird,dog)"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='Birds'][.=1]", "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='foo bar'][.=3]"); - - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "{!key=$p}[bird,bird]", - "p", "foo bar"), + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "{!key=$p}[bird,bird]", + "p", + "foo bar"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='foo bar'][.=1]"); - - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv", - "f.test_s_dv.facet.interval.set", "{!key='[bird,\\}'}[bird,*]", - "f.test_s_dv.facet.interval.set", "{!key='\\{bird,dog\\}'}(bird,dog)", - "f.test_s_dv.facet.interval.set", "{!key='foo'}(bird,dog})"), + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "f.test_s_dv.facet.interval.set", + "{!key='[bird,\\}'}[bird,*]", + "f.test_s_dv.facet.interval.set", + "{!key='\\{bird,dog\\}'}(bird,dog)", + "f.test_s_dv.facet.interval.set", + "{!key='foo'}(bird,dog})"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[bird,}'][.=9]", "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='{bird,dog}'][.=3]", "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='foo'][.=7]"); - } @Test @@ -936,10 +1306,10 @@ public void testIntFieldsMultipleSegments() { assertU(adoc("id", "10")); assertU(adoc("id", "11", "test_i_dv", "10")); assertU(commit()); - + int i = 12; while (getNumberOfReaders() < 2 && i < 20) { - //try to get more than one segment + // try to get more than one segment assertU(adoc("id", String.valueOf(i), "test_s_dv", String.valueOf(i))); assertU(commit()); i++; @@ -971,10 +1341,18 @@ public void testIntMultivaluedFields() { assertIntervalQueriesNumeric("test_is_dv"); - assertU(adoc("id", "1", "test_is_dv", "0", "test_is_dv", "1", "test_is_dv", "2", "test_is_dv", "3")); - assertU(adoc("id", "2", "test_is_dv", "1", "test_is_dv", "2", "test_is_dv", "3", "test_is_dv", "4")); - assertU(adoc("id", "3", "test_is_dv", "2", "test_is_dv", "3", "test_is_dv", "4", "test_is_dv", "5")); - assertU(adoc("id", "4", "test_is_dv", "3", "test_is_dv", "4", "test_is_dv", "5", "test_is_dv", "6")); + assertU( + adoc( + "id", "1", "test_is_dv", "0", "test_is_dv", "1", "test_is_dv", "2", "test_is_dv", "3")); + assertU( + adoc( + "id", "2", "test_is_dv", "1", "test_is_dv", "2", "test_is_dv", "3", "test_is_dv", "4")); + assertU( + adoc( + "id", "3", "test_is_dv", "2", "test_is_dv", "3", "test_is_dv", "4", "test_is_dv", "5")); + assertU( + adoc( + "id", "4", "test_is_dv", "3", "test_is_dv", "4", "test_is_dv", "5", "test_is_dv", "6")); assertU(commit()); assertIntervalQuery("test_is_dv", "[1,3]", "4"); @@ -1010,7 +1388,6 @@ public void testDateFields() { assertU(adoc("id", "5", "test_dt_dv", "NOW")); assertU(commit()); assertIntervalQuery("test_dt_dv", "[NOW/DAY-1DAY,NOW+2DAY]", "1"); - } @Test @@ -1042,23 +1419,34 @@ public void testWithDeletedDocs() { assertIntervalQueriesString("test_s_dv"); } - + @Test public void testChangeFieldKey() { assertU(adoc("id", "1", "test_s_dv", "dog", "test_l_dv", "1")); assertU(adoc("id", "2", "test_s_dv", "cat", "test_l_dv", "2")); assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "{!key=foo}test_s_dv", - "facet.interval", "{!key=bar}test_l_dv", "f.test_s_dv.facet.interval.set", "[cat,dog]", - "f.test_l_dv.facet.interval.set", "[0,1]", - "f.test_l_dv.facet.interval.set", "[2,*]"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "{!key=foo}test_s_dv", + "facet.interval", + "{!key=bar}test_l_dv", + "f.test_s_dv.facet.interval.set", + "[cat,dog]", + "f.test_l_dv.facet.interval.set", + "[0,1]", + "f.test_l_dv.facet.interval.set", + "[2,*]"), "//lst[@name='facet_intervals']/lst[@name='foo']/int[@name='[cat,dog]'][.=2]", "//lst[@name='facet_intervals']/lst[@name='bar']/int[@name='[0,1]'][.=1]", "//lst[@name='facet_intervals']/lst[@name='bar']/int[@name='[2,*]'][.=1]"); } - - + @Test public void testFilterExclusion() { assertU(adoc("id", "1", "test_s_dv", "dog")); @@ -1073,48 +1461,83 @@ public void testFilterExclusion() { assertU(adoc("id", "10")); assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv", "rows", "0", - "f.test_s_dv.facet.interval.set", "[a,d]", - "f.test_s_dv.facet.interval.set", "[d,z]"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "rows", + "0", + "f.test_s_dv.facet.interval.set", + "[a,d]", + "f.test_s_dv.facet.interval.set", + "[d,z]"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[a,d]'][.=4]", "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[d,z]'][.=5]"); - - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv", "rows", "0", - "f.test_s_dv.facet.interval.set", "[a,d]", - "f.test_s_dv.facet.interval.set", "[d,z]", - "fq", "test_s_dv:dog"), + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "test_s_dv", + "rows", + "0", + "f.test_s_dv.facet.interval.set", + "[a,d]", + "f.test_s_dv.facet.interval.set", + "[d,z]", + "fq", + "test_s_dv:dog"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[a,d]'][.=0]", "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[d,z]'][.=4]"); - - assertQ(req("q", "*:*", "facet", "true", "facet.interval", "{!ex=dogs}test_s_dv", "rows", "0", - "f.test_s_dv.facet.interval.set", "[a,d]", - "f.test_s_dv.facet.interval.set", "[d,z]", - "fq", "{!tag='dogs'}test_s_dv:dog"), + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.interval", + "{!ex=dogs}test_s_dv", + "rows", + "0", + "f.test_s_dv.facet.interval.set", + "[a,d]", + "f.test_s_dv.facet.interval.set", + "[d,z]", + "fq", + "{!tag='dogs'}test_s_dv:dog"), "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[a,d]'][.=4]", "//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[d,z]'][.=5]"); } - + @Test - public void testSolrJ() throws Exception { + public void testSolrJ() throws Exception { assertU(adoc("id", "1", "test_i_dv", "0")); assertU(adoc("id", "2", "test_i_dv", "1")); assertU(adoc("id", "3", "test_i_dv", "2")); assertU(commit()); - + // Don't close this client, it would shutdown the CoreContainer @SuppressWarnings("resource") SolrClient client = new EmbeddedSolrServer(h.getCoreContainer(), h.coreName); - + SolrQuery q = new SolrQuery(); q.setQuery("*:*"); - q.addIntervalFacets("test_i_dv", new String[]{"[0,1]","[2,*]"}); + q.addIntervalFacets("test_i_dv", new String[] {"[0,1]", "[2,*]"}); QueryResponse response = client.query(q); assertEquals(1, response.getIntervalFacets().size()); assertEquals("test_i_dv", response.getIntervalFacets().get(0).getField()); assertEquals(2, response.getIntervalFacets().get(0).getIntervals().size()); assertEquals("[0,1]", response.getIntervalFacets().get(0).getIntervals().get(0).getKey()); assertEquals("[2,*]", response.getIntervalFacets().get(0).getIntervals().get(1).getKey()); - + q = new SolrQuery(); q.setQuery("*:*"); q.setFacet(true); @@ -1127,10 +1550,7 @@ public void testSolrJ() throws Exception { assertEquals(2, response.getIntervalFacets().get(0).getIntervals().size()); assertEquals("first", response.getIntervalFacets().get(0).getIntervals().get(0).getKey()); assertEquals("second", response.getIntervalFacets().get(0).getIntervals().get(1).getKey()); - } - - private void assertIntervalQueriesNumeric(String field) { assertIntervalQuery(field, "[0,1]", "2"); @@ -1138,11 +1558,12 @@ private void assertIntervalQueriesNumeric(String field) { assertIntervalQuery(field, "[0,2)", "2"); assertIntervalQuery(field, "(0,2]", "2"); assertIntervalQuery(field, "[*,5]", "6"); - assertIntervalQuery(field, "[*,3)", "3", "[2,5)", "3", "[6,8)", "2", "[3,*]", "7", "[10,10]", "1", "[10,10]", "1", "[10,10]", "1"); + assertIntervalQuery( + field, "[*,3)", "3", "[2,5)", "3", "[6,8)", "2", "[3,*]", "7", "[10,10]", "1", "[10,10]", + "1", "[10,10]", "1"); assertIntervalQuery(field, "(5,*]", "4", "[5,5]", "1", "(*,5)", "5"); assertIntervalQuery(field, "[5,5]", "1", "(*,5)", "5", "(5,*]", "4"); assertIntervalQuery(field, "(5,*]", "4", "(*,5)", "5", "[5,5]", "1"); - } private void assertIntervalQueriesString(String field) { @@ -1151,28 +1572,43 @@ private void assertIntervalQueriesString(String field) { assertIntervalQuery(field, "[bird,dog)", "3"); assertIntervalQuery(field, "(bird,turtle]", "6"); assertIntervalQuery(field, "[*,bird]", "3"); - assertIntervalQuery(field, "[*,bird)", "2", "[bird,cat)", "1", "[cat,dog)", "2", "[dog,*]", "4"); + assertIntervalQuery( + field, "[*,bird)", "2", "[bird,cat)", "1", "[cat,dog)", "2", "[dog,*]", "4"); assertIntervalQuery(field, "[*,*]", "9", "[*,dog)", "5", "[*,dog]", "8", "[dog,*]", "4"); - assertIntervalQuery(field, field + ":dog", 3, "[*,*]", "3", "[*,dog)", "0", "[*,dog]", "3", "[dog,*]", "3", "[bird,cat]", "0"); + assertIntervalQuery( + field, + field + ":dog", + 3, + "[*,*]", + "3", + "[*,dog)", + "0", + "[*,dog]", + "3", + "[dog,*]", + "3", + "[bird,cat]", + "0"); assertIntervalQuery(field, "(*,dog)", "5", "[dog, dog]", "3", "(dog,*)", "1"); assertIntervalQuery(field, "[dog, dog]", "3", "(dog,*)", "1", "(*,dog)", "5"); assertIntervalQuery(field, "(dog,*)", "1", "(*,dog)", "5", "[dog, dog]", "3"); } /** - * Will run a match all query, and ask for interval facets in the specified field. - * The intervals facet are indicated in the intervals parameter, followed - * by the expected count result. For example: - * assertIntervalQuery("my_string_field", "[0,10]", "3", "(20,*), "12"); + * Will run a match all query, and ask for interval facets in the specified field. The intervals + * facet are indicated in the intervals parameter, followed by the expected count + * result. For example: + * assertIntervalQuery("my_string_field", "[0,10]", "3", "(20,*), "12"); * - * @param field The field in which the interval facet should be asked + * @param field The field in which the interval facet should be asked * @param intervals a variable array of intervals followed by the expected count (also a string) */ private void assertIntervalQuery(String field, String... intervals) { assertIntervalQuery(field, "*:*", -1, intervals); } - private void assertIntervalQuery(String field, String query, int resultCount, String... intervals) { + private void assertIntervalQuery( + String field, String query, int resultCount, String... intervals) { assert (intervals.length & 1) == 0; int idx = 0; String[] params = new String[intervals.length + 6]; @@ -1191,13 +1627,27 @@ private void assertIntervalQuery(String field, String query, int resultCount, St String[] tests = new String[intervals.length / 2 + (resultCount > 0 ? 1 : 0)]; idx = 0; for (int i = 0; i < intervals.length; i += 2) { - tests[idx++] = "//lst[@name='facet_intervals']/lst[@name='" + field + "']/int[@name='" + intervals[i] + "'][.=" + intervals[i + 1] + "]"; + tests[idx++] = + "//lst[@name='facet_intervals']/lst[@name='" + + field + + "']/int[@name='" + + intervals[i] + + "'][.=" + + intervals[i + 1] + + "]"; } if (resultCount >= 0) { tests[idx++] = "//*[@numFound='" + resultCount + "']"; } - assertQ("Unexpected facet iterval count. Field:" + field + ", Intervals: " + Arrays.toString(intervals) + "Query: " + query, - req(params), tests); + assertQ( + "Unexpected facet iterval count. Field:" + + field + + ", Intervals: " + + Arrays.toString(intervals) + + "Query: " + + query, + req(params), + tests); } } diff --git a/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java b/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java index 7389228e2ff..1a63020b4b5 100644 --- a/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java +++ b/solr/core/src/test/org/apache/solr/request/TestRemoteStreaming.java @@ -25,7 +25,6 @@ import java.net.URL; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; - import org.apache.commons.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrJettyTestBase; @@ -43,32 +42,28 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * See SOLR-2854. - */ -@SuppressSSL // does not yet work with ssl yet - uses raw java.net.URL API rather than HttpClient +/** See SOLR-2854. */ +@SuppressSSL // does not yet work with ssl yet - uses raw java.net.URL API rather than HttpClient public class TestRemoteStreaming extends SolrJettyTestBase { private static File solrHomeDirectory; - + @BeforeClass public static void beforeTest() throws Exception { - //this one has handleSelect=true which a test here needs + // this one has handleSelect=true which a test here needs solrHomeDirectory = createTempDir(LuceneTestCase.getTestClass().getSimpleName()).toFile(); setupJettyTestHome(solrHomeDirectory, "collection1"); createAndStartJetty(solrHomeDirectory.getAbsolutePath()); } @AfterClass - public static void afterTest() throws Exception { - - } + public static void afterTest() throws Exception {} @Before public void doBefore() throws IOException, SolrServerException { - //add document and commit, and ensure it's there + // add document and commit, and ensure it's there SolrClient client = getSolrClient(); SolrInputDocument doc = new SolrInputDocument(); - doc.addField( "id", "1234" ); + doc.addField("id", "1234"); client.add(doc); client.commit(); assertTrue(searchFindsIt()); @@ -83,12 +78,15 @@ public void testMakeDeleteAllUrl() throws Exception { @Test public void testStreamUrl() throws Exception { HttpSolrClient client = (HttpSolrClient) getSolrClient(); - String streamUrl = client.getBaseURL()+"/select?q=*:*&fl=id&wt=csv"; + String streamUrl = client.getBaseURL() + "/select?q=*:*&fl=id&wt=csv"; - String getUrl = client.getBaseURL()+"/debug/dump?wt=xml&stream.url="+URLEncoder.encode(streamUrl,"UTF-8"); + String getUrl = + client.getBaseURL() + + "/debug/dump?wt=xml&stream.url=" + + URLEncoder.encode(streamUrl, "UTF-8"); String content = getUrlForString(getUrl); assertTrue(content.contains("1234")); - //System.out.println(content); + // System.out.println(content); } private String getUrlForString(String getUrl) throws IOException { @@ -110,22 +108,24 @@ private String getUrlForString(String getUrl) throws IOException { @Test public void testNoUrlAccess() throws Exception { SolrQuery query = new SolrQuery(); - query.setQuery( "*:*" );//for anything - query.add("stream.url",makeDeleteAllUrl()); + query.setQuery("*:*"); // for anything + query.add("stream.url", makeDeleteAllUrl()); SolrException se = expectThrows(SolrException.class, () -> getSolrClient().query(query)); assertSame(ErrorCode.BAD_REQUEST, ErrorCode.getErrorCode(se.code())); } - + /** Compose a url that if you get it, it will delete all the data. */ private String makeDeleteAllUrl() throws UnsupportedEncodingException { HttpSolrClient client = (HttpSolrClient) getSolrClient(); String deleteQuery = "*:*"; - return client.getBaseURL()+"/update?commit=true&stream.body="+ URLEncoder.encode(deleteQuery, "UTF-8"); + return client.getBaseURL() + + "/update?commit=true&stream.body=" + + URLEncoder.encode(deleteQuery, "UTF-8"); } private boolean searchFindsIt() throws SolrServerException, IOException { SolrQuery query = new SolrQuery(); - query.setQuery( "id:1234" ); + query.setQuery("id:1234"); QueryResponse rsp = getSolrClient().query(query); return rsp.getResults().getNumFound() != 0; } diff --git a/solr/core/src/test/org/apache/solr/request/TestSolrRequestInfo.java b/solr/core/src/test/org/apache/solr/request/TestSolrRequestInfo.java index 0b1573b5da6..088fdf9f345 100644 --- a/solr/core/src/test/org/apache/solr/request/TestSolrRequestInfo.java +++ b/solr/core/src/test/org/apache/solr/request/TestSolrRequestInfo.java @@ -20,7 +20,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.response.SolrQueryResponse; @@ -28,53 +27,55 @@ public class TestSolrRequestInfo extends SolrTestCaseJ4 { - @BeforeClass - public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema11.xml"); - } + @BeforeClass + public static void beforeClass() throws Exception { + initCore("solrconfig.xml", "schema11.xml"); + } - public void testCloseHookTwice(){ - final SolrRequestInfo info = new SolrRequestInfo( - new LocalSolrQueryRequest(h.getCore(), params()), - new SolrQueryResponse()); - AtomicInteger counter = new AtomicInteger(); - info.addCloseHook(counter::incrementAndGet); - SolrRequestInfo.setRequestInfo(info); - SolrRequestInfo.setRequestInfo(info); - SolrRequestInfo.clearRequestInfo(); - assertNotNull(SolrRequestInfo.getRequestInfo()); - SolrRequestInfo.clearRequestInfo(); - assertEquals("hook should be closed only once", 1, counter.get()); - assertNull(SolrRequestInfo.getRequestInfo()); - } + public void testCloseHookTwice() { + final SolrRequestInfo info = + new SolrRequestInfo( + new LocalSolrQueryRequest(h.getCore(), params()), new SolrQueryResponse()); + AtomicInteger counter = new AtomicInteger(); + info.addCloseHook(counter::incrementAndGet); + SolrRequestInfo.setRequestInfo(info); + SolrRequestInfo.setRequestInfo(info); + SolrRequestInfo.clearRequestInfo(); + assertNotNull(SolrRequestInfo.getRequestInfo()); + SolrRequestInfo.clearRequestInfo(); + assertEquals("hook should be closed only once", 1, counter.get()); + assertNull(SolrRequestInfo.getRequestInfo()); + } - public void testThreadPool() throws InterruptedException { - final SolrRequestInfo info = new SolrRequestInfo( - new LocalSolrQueryRequest(h.getCore(), params()), - new SolrQueryResponse()); - AtomicInteger counter = new AtomicInteger(); + public void testThreadPool() throws InterruptedException { + final SolrRequestInfo info = + new SolrRequestInfo( + new LocalSolrQueryRequest(h.getCore(), params()), new SolrQueryResponse()); + AtomicInteger counter = new AtomicInteger(); - SolrRequestInfo.setRequestInfo(info); - ExecutorUtil.MDCAwareThreadPoolExecutor pool = new ExecutorUtil.MDCAwareThreadPoolExecutor(1, 1, 1, - TimeUnit.SECONDS, new ArrayBlockingQueue<>(1)); - AtomicBoolean run = new AtomicBoolean(false); - pool.execute(() -> { - final SolrRequestInfo poolInfo = SolrRequestInfo.getRequestInfo(); - assertSame(info, poolInfo); - info.addCloseHook(counter::incrementAndGet); - run.set(true); + SolrRequestInfo.setRequestInfo(info); + ExecutorUtil.MDCAwareThreadPoolExecutor pool = + new ExecutorUtil.MDCAwareThreadPoolExecutor( + 1, 1, 1, TimeUnit.SECONDS, new ArrayBlockingQueue<>(1)); + AtomicBoolean run = new AtomicBoolean(false); + pool.execute( + () -> { + final SolrRequestInfo poolInfo = SolrRequestInfo.getRequestInfo(); + assertSame(info, poolInfo); + info.addCloseHook(counter::incrementAndGet); + run.set(true); }); - if (random().nextBoolean()) { - pool.shutdown(); - } else { - pool.shutdownNow(); - } - SolrRequestInfo.clearRequestInfo(); - SolrRequestInfo.reset(); - - pool.awaitTermination(1, TimeUnit.MINUTES); - assertTrue(run.get()); - assertEquals("hook should be closed only once", 1, counter.get()); - assertNull(SolrRequestInfo.getRequestInfo()); + if (random().nextBoolean()) { + pool.shutdown(); + } else { + pool.shutdownNow(); } + SolrRequestInfo.clearRequestInfo(); + SolrRequestInfo.reset(); + + pool.awaitTermination(1, TimeUnit.MINUTES); + assertTrue(run.get()); + assertEquals("hook should be closed only once", 1, counter.get()); + assertNull(SolrRequestInfo.getRequestInfo()); + } } diff --git a/solr/core/src/test/org/apache/solr/request/TestStreamBody.java b/solr/core/src/test/org/apache/solr/request/TestStreamBody.java index 181a0080e00..8d9852336ea 100644 --- a/solr/core/src/test/org/apache/solr/request/TestStreamBody.java +++ b/solr/core/src/test/org/apache/solr/request/TestStreamBody.java @@ -16,11 +16,12 @@ */ package org.apache.solr.request; +import static org.apache.solr.core.TestSolrConfigHandler.runConfigCommand; + import java.io.File; import java.lang.invoke.MethodHandles; import java.util.SortedMap; import java.util.TreeMap; - import org.apache.commons.io.FileUtils; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.QueryRequest; @@ -35,14 +36,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.core.TestSolrConfigHandler.runConfigCommand; - public class TestStreamBody extends RestTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String collection = "collection1"; private static final String confDir = collection + "/conf"; - + @Before public void before() throws Exception { File tmpSolrHome = createTempDir().toFile(); @@ -53,11 +52,17 @@ public void before() throws Exception { System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-minimal.xml", "schema-rest.xml", - "/solr", true, extraServlets); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-minimal.xml", + "schema-rest.xml", + "/solr", + true, + extraServlets); if (random().nextBoolean()) { log.info("These tests are run with V2 API"); - restTestHarness.setServerProvider(() -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME); + restTestHarness.setServerProvider( + () -> jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME); } } @@ -82,23 +87,26 @@ public void after() throws Exception { public void testQtUpdateFails() throws Exception { enableStreamBody(true); SolrQuery query = new SolrQuery(); - query.setQuery( "*:*" );//for anything - query.add("echoHandler","true"); - //sneaky sneaky - query.add("qt","/update"); - query.add(CommonParams.STREAM_BODY,"*:*"); + query.setQuery("*:*"); // for anything + query.add("echoHandler", "true"); + // sneaky sneaky + query.add("qt", "/update"); + query.add(CommonParams.STREAM_BODY, "*:*"); - QueryRequest queryRequest = new QueryRequest(query) { - @Override - public String getPath() { //don't let superclass substitute qt for the path - return "/select"; - } - }; + QueryRequest queryRequest = + new QueryRequest(query) { + @Override + public String getPath() { // don't let superclass substitute qt for the path + return "/select"; + } + }; try { queryRequest.process(getSolrClient()); fail(); } catch (SolrException se) { - assertTrue(se.getMessage(), se.getMessage().contains("Bad contentType for search handler :text/xml")); + assertTrue( + se.getMessage(), + se.getMessage().contains("Bad contentType for search handler :text/xml")); } } @@ -106,16 +114,18 @@ public String getPath() { //don't let superclass substitute qt for the path @Test public void testStreamBodyDefaultAndConfigApi() throws Exception { SolrQuery query = new SolrQuery(); - query.add(CommonParams.STREAM_BODY,"*:*"); - query.add("commit","true"); + query.add(CommonParams.STREAM_BODY, "*:*"); + query.add("commit", "true"); - QueryRequest queryRequest = new QueryRequest(query) { - @Override - public String getPath() { //don't let superclass substitute qt for the path - return "/update"; - } - }; - SolrException se = expectThrows(SolrException.class, () -> queryRequest.process(getSolrClient())); + QueryRequest queryRequest = + new QueryRequest(query) { + @Override + public String getPath() { // don't let superclass substitute qt for the path + return "/update"; + } + }; + SolrException se = + expectThrows(SolrException.class, () -> queryRequest.process(getSolrClient())); assertTrue(se.getMessage(), se.getMessage().contains("Stream Body is disabled")); enableStreamBody(true); queryRequest.process(getSolrClient()); @@ -124,7 +134,10 @@ public String getPath() { //don't let superclass substitute qt for the path // Enables/disables stream.body through Config API private void enableStreamBody(boolean enable) throws Exception { RestTestHarness harness = restTestHarness; - String payload = "{ 'set-property' : { 'requestDispatcher.requestParsers.enableStreamBody':" + enable + "} }"; + String payload = + "{ 'set-property' : { 'requestDispatcher.requestParsers.enableStreamBody':" + + enable + + "} }"; runConfigCommand(harness, "/config?wt=json", payload); } } diff --git a/solr/core/src/test/org/apache/solr/request/TestUnInvertedFieldException.java b/solr/core/src/test/org/apache/solr/request/TestUnInvertedFieldException.java index b035826cd9c..6aebf820f6c 100644 --- a/solr/core/src/test/org/apache/solr/request/TestUnInvertedFieldException.java +++ b/solr/core/src/test/org/apache/solr/request/TestUnInvertedFieldException.java @@ -26,7 +26,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; - import org.apache.lucene.index.Term; import org.apache.lucene.util.NamedThreadFactory; import org.apache.lucene.util.TestUtil; @@ -43,7 +42,7 @@ public class TestUnInvertedFieldException extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema11.xml"); + initCore("solrconfig.xml", "schema11.xml"); } private int numTerms; @@ -64,16 +63,16 @@ public void tearDown() throws Exception { String t(int tnum) { return String.format(Locale.ROOT, "%08d", tnum); } - + void createIndex(int nTerms) { assertU(delQ("*:*")); - for (int i=0; i> initCallables = new ArrayList<>(); - for (int i=0;i< TestUtil.nextInt(random(), 10, 30);i++) { - initCallables.add(()-> UnInvertedField.getUnInvertedField(proto.field(), searcher)); + for (int i = 0; i < TestUtil.nextInt(random(), 10, 30); i++) { + initCallables.add(() -> UnInvertedField.getUnInvertedField(proto.field(), searcher)); } - final ThreadPoolExecutor pool = new MDCAwareThreadPoolExecutor(3, - TestUtil.nextInt(random(), 3, 6), 10, TimeUnit.MILLISECONDS, - new LinkedBlockingQueue(), new NamedThreadFactory(getClass().getSimpleName())); + final ThreadPoolExecutor pool = + new MDCAwareThreadPoolExecutor( + 3, + TestUtil.nextInt(random(), 3, 6), + 10, + TimeUnit.MILLISECONDS, + new LinkedBlockingQueue(), + new NamedThreadFactory(getClass().getSimpleName())); try { TestInjection.uifOutOfMemoryError = true; if (assertsAreEnabled) { // if they aren't, we check that injection is disabled in live - List> futures = initCallables.stream().map((c) -> pool.submit(c)) - .collect(Collectors.toList()); + List> futures = + initCallables.stream().map((c) -> pool.submit(c)).collect(Collectors.toList()); for (Future uifuture : futures) { ExecutionException injection = assertThrows(ExecutionException.class, uifuture::get); Throwable root = SolrException.getRootCause(injection); @@ -103,8 +107,8 @@ public void testConcurrentInit() throws Exception { TestInjection.uifOutOfMemoryError = false; } UnInvertedField prev = null; - List> futures = initCallables.stream().map((c) -> pool.submit(c)) - .collect(Collectors.toList()); + List> futures = + initCallables.stream().map((c) -> pool.submit(c)).collect(Collectors.toList()); for (Future uifuture : futures) { final UnInvertedField uif = uifuture.get(); assertNotNull(uif); @@ -121,4 +125,3 @@ public void testConcurrentInit() throws Exception { } } } - diff --git a/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java b/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java index ed869b067f7..4738017f3a9 100644 --- a/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java +++ b/solr/core/src/test/org/apache/solr/request/TestWriterPerf.java @@ -23,20 +23,18 @@ import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.ArrayList; - +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.ResponseParser; import org.apache.solr.client.solrj.impl.BinaryResponseParser; import org.apache.solr.client.solrj.impl.XMLResponseParser; import org.apache.solr.response.BinaryQueryResponseWriter; import org.apache.solr.response.QueryResponseWriter; import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.util.RTimer; import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class TestWriterPerf extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -44,11 +42,14 @@ public class TestWriterPerf extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); initCore("solrconfig-functionquery.xml", "schema11.xml"); } - - public String getCoreName() { return "basic"; } + + public String getCoreName() { + return "basic"; + } @Override public void setUp() throws Exception { @@ -56,6 +57,7 @@ public void setUp() throws Exception { // the super classes version super.setUp(); } + @Override public void tearDown() throws Exception { // if you override setUp or tearDown, you better call @@ -68,7 +70,6 @@ public void tearDown() throws Exception { String i1 = "f_i"; String tag = "f_ss"; - void index(Object... olst) { ArrayList lst = new ArrayList<>(); for (Object o : olst) lst.add(o.toString()); @@ -76,56 +77,121 @@ void index(Object... olst) { } void makeIndex() { - index(id,1, i1, 100,t1,"now is the time for all good men", tag,"patriotic"); - index(id,2, i1, 50 ,t1,"to come to the aid of their country.", tag,"patriotic",tag,"country",tag,"nation",tag,"speeches"); - index(id,3, i1, 2 ,t1,"how now brown cow", tag,"cow",tag,"jersey"); - index(id,4, i1, -100 ,t1,"the quick fox jumped over the lazy dog",tag,"fox",tag,"dog",tag,"quick",tag,"slow",tag,"lazy"); - index(id,5, i1, 50 ,t1,"the quick fox jumped way over the lazy dog",tag,"fox",tag,"dog"); - index(id,6, i1, -60 ,t1,"humpty dumpy sat on a wall",tag,"humpty",tag,"dumpty"); - index(id,7, i1, 123 ,t1,"humpty dumpy had a great fall",tag,"accidents"); - index(id,8, i1, 876 ,t1,"all the kings horses and all the kings men",tag,"king",tag,"horses",tag,"trouble"); - index(id,9, i1, 7 ,t1,"couldn't put humpty together again",tag,"humpty",tag,"broken"); - index(id,10, i1, 4321 ,t1,"this too shall pass",tag,"1",tag,"2",tag,"infinity"); - index(id,11, i1, 33 ,t1,"An eye for eye only ends up making the whole world blind.",tag,"ouch",tag,"eye",tag,"peace",tag,"world"); - index(id,12, i1, 379 ,t1,"Great works are performed, not by strength, but by perseverance.",tag,"herculese",tag,"strong",tag,"stubborn"); + index(id, 1, i1, 100, t1, "now is the time for all good men", tag, "patriotic"); + index( + id, + 2, + i1, + 50, + t1, + "to come to the aid of their country.", + tag, + "patriotic", + tag, + "country", + tag, + "nation", + tag, + "speeches"); + index(id, 3, i1, 2, t1, "how now brown cow", tag, "cow", tag, "jersey"); + index( + id, + 4, + i1, + -100, + t1, + "the quick fox jumped over the lazy dog", + tag, + "fox", + tag, + "dog", + tag, + "quick", + tag, + "slow", + tag, + "lazy"); + index(id, 5, i1, 50, t1, "the quick fox jumped way over the lazy dog", tag, "fox", tag, "dog"); + index(id, 6, i1, -60, t1, "humpty dumpy sat on a wall", tag, "humpty", tag, "dumpty"); + index(id, 7, i1, 123, t1, "humpty dumpy had a great fall", tag, "accidents"); + index( + id, + 8, + i1, + 876, + t1, + "all the kings horses and all the kings men", + tag, + "king", + tag, + "horses", + tag, + "trouble"); + index(id, 9, i1, 7, t1, "couldn't put humpty together again", tag, "humpty", tag, "broken"); + index(id, 10, i1, 4321, t1, "this too shall pass", tag, "1", tag, "2", tag, "infinity"); + index( + id, + 11, + i1, + 33, + t1, + "An eye for eye only ends up making the whole world blind.", + tag, + "ouch", + tag, + "eye", + tag, + "peace", + tag, + "world"); + index( + id, + 12, + i1, + 379, + t1, + "Great works are performed, not by strength, but by perseverance.", + tag, + "herculese", + tag, + "strong", + tag, + "stubborn"); assertU(optimize()); } - /** make sure to close req after you are done using the response */ public SolrQueryResponse getResponse(SolrQueryRequest req) throws Exception { SolrQueryResponse rsp = new SolrQueryResponse(); - h.getCore().execute(h.getCore().getRequestHandler(null),req,rsp); + h.getCore().execute(h.getCore().getRequestHandler(null), req, rsp); if (rsp.getException() != null) { throw rsp.getException(); } return rsp; } - void doPerf(String writerName, SolrQueryRequest req, int encIter, int decIter) throws Exception { SolrQueryResponse rsp = getResponse(req); QueryResponseWriter w = h.getCore().getQueryResponseWriter(writerName); - - ByteArrayOutputStream out=null; + ByteArrayOutputStream out = null; System.gc(); RTimer timer = new RTimer(); - for (int i=0; i testParams = new HashMap(); + final Map testParams = new HashMap(); final MacroExpander me; // example behavior unaffected by absence or value of failOnMissingParams if (random().nextBoolean()) { @@ -40,59 +37,59 @@ public void testExamples() { me = new MacroExpander(testParams, failOnMissingParams); } - //default examples: https://cwiki.apache.org/confluence/display/solr/Parameter+Substitution + // default examples: https://cwiki.apache.org/confluence/display/solr/Parameter+Substitution // and http://yonik.com/solr-query-parameter-substitution/ - //using params + // using params String[] lowParams = {"50"}; - testParams.put("low",lowParams); + testParams.put("low", lowParams); String[] highParams = {"100"}; - testParams.put("high",highParams); + testParams.put("high", highParams); String testQuery = "q=popularity:[ ${low} TO ${high} ]"; assertEquals("q=popularity:[ 50 TO 100 ]", me.expand(testQuery)); - //using default values + // using default values testQuery = "q=popularity:[ ${low:10} TO ${high:20} ]"; assertEquals("q=popularity:[ 50 TO 100 ]", me.expand(testQuery)); testParams.clear(); assertEquals("q=popularity:[ 10 TO 20 ]", me.expand(testQuery)); - //multiple levels of substitutions + // multiple levels of substitutions testQuery = "q=${pop_query}"; String[] popQueryParams = {"${pop_field}:[ ${low} TO ${high} ] AND inStock:true"}; String[] popFieldParams = {"popularity"}; - testParams.put("low",lowParams); - testParams.put("high",highParams); - testParams.put("pop_query",popQueryParams); - testParams.put("pop_field",popFieldParams); + testParams.put("low", lowParams); + testParams.put("high", highParams); + testParams.put("pop_query", popQueryParams); + testParams.put("pop_field", popFieldParams); assertEquals("q=popularity:[ 50 TO 100 ] AND inStock:true", me.expand(testQuery)); - //end default examples + // end default examples } @Test public void testOnMissingParams() { - final Map testParams = new HashMap(); + final Map testParams = new HashMap(); final MacroExpander meSkipOnMissingParams = new MacroExpander(testParams); final MacroExpander meFailOnMissingParams = new MacroExpander(testParams, true); final String low = "50"; final String high = "100"; - testParams.put("low", new String[]{ low }); - testParams.put("high", new String[]{ high }); + testParams.put("low", new String[] {low}); + testParams.put("high", new String[] {high}); final String testQuery = "q=popularity:[ ${low} TO ${high} ]"; - //when params all present the expansion results match - final String expandedQuery = "q=popularity:[ "+low+" TO "+high+" ]"; + // when params all present the expansion results match + final String expandedQuery = "q=popularity:[ " + low + " TO " + high + " ]"; assertEquals(expandedQuery, meSkipOnMissingParams.expand(testQuery)); assertEquals(expandedQuery, meFailOnMissingParams.expand(testQuery)); - //when param(s) missing and have no default the expansion results differ + // when param(s) missing and have no default the expansion results differ final String expandedLow; final String expandedHigh; if (random().nextBoolean()) { // keep low @@ -107,7 +104,8 @@ public void testOnMissingParams() { expandedHigh = ""; testParams.remove("high"); } - assertEquals("q=popularity:[ "+expandedLow+" TO "+expandedHigh+" ]", + assertEquals( + "q=popularity:[ " + expandedLow + " TO " + expandedHigh + " ]", meSkipOnMissingParams.expand(testQuery)); if (testParams.size() < 2) { // at least one of the two parameters missing assertEquals(null, meFailOnMissingParams.expand(testQuery)); @@ -116,11 +114,12 @@ public void testOnMissingParams() { @Test public void testMap() { // see SOLR-9740, the second fq param was being dropped. - final Map request = new HashMap<>(); + final Map request = new HashMap<>(); request.put("fq", new String[] {"zero", "${one_ref}", "two", "${three_ref}"}); - request.put("expr", new String[] {"${one_ref}"}); // expr is for streaming expressions, no replacement by default - request.put("one_ref",new String[] {"one"}); - request.put("three_ref",new String[] {"three"}); + // expr is for streaming expressions, no replacement by default + request.put("expr", new String[] {"${one_ref}"}); + request.put("one_ref", new String[] {"one"}); + request.put("three_ref", new String[] {"three"}); Map expanded = MacroExpander.expand(request); assertEquals("zero", expanded.get("fq")[0]); assertEquals("one", expanded.get("fq")[1]); @@ -132,14 +131,15 @@ public void testMap() { // see SOLR-9740, the second fq param was being dropped. @Test public void testMapExprExpandOn() { - final Map request = new HashMap<>(); + final Map request = new HashMap<>(); request.put("fq", new String[] {"zero", "${one_ref}", "two", "${three_ref}"}); - request.put("expr", new String[] {"${one_ref}"}); // expr is for streaming expressions, no replacement by default - request.put("one_ref",new String[] {"one"}); - request.put("three_ref",new String[] {"three"}); + // expr is for streaming expressions, no replacement by default + request.put("expr", new String[] {"${one_ref}"}); + request.put("one_ref", new String[] {"one"}); + request.put("three_ref", new String[] {"three"}); // I believe that so long as this is sure to be reset before the end of the test we should // be fine with respect to other tests. - String oldVal = System.getProperty("StreamingExpressionMacros","false"); + String oldVal = System.getProperty("StreamingExpressionMacros", "false"); System.setProperty("StreamingExpressionMacros", "true"); try { Map expanded = MacroExpander.expand(request); @@ -155,7 +155,7 @@ public void testMapExprExpandOn() { @Test public void testUnbalanced() { // SOLR-13181 - final Map request = Collections.singletonMap("answer", new String[]{ "42" }); + final Map request = Collections.singletonMap("answer", new String[] {"42"}); final MacroExpander meSkipOnMissingParams = new MacroExpander(request); final MacroExpander meFailOnMissingParams = new MacroExpander(request, true); assertEquals("${noClose", meSkipOnMissingParams.expand("${noClose")); diff --git a/solr/core/src/test/org/apache/solr/request/macro/TestMacros.java b/solr/core/src/test/org/apache/solr/request/macro/TestMacros.java index 77d7d8c7021..b80551f5df3 100644 --- a/solr/core/src/test/org/apache/solr/request/macro/TestMacros.java +++ b/solr/core/src/test/org/apache/solr/request/macro/TestMacros.java @@ -20,70 +20,80 @@ import org.junit.BeforeClass; import org.junit.Test; - public class TestMacros extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig-tlog.xml","schema_latest.xml"); + initCore("solrconfig-tlog.xml", "schema_latest.xml"); } @Test public void testMacros() throws Exception { - assertU(add(doc("id", "1", "val_s", "aaa", "val_i","123"))); - assertU(add(doc("id", "2", "val_s", "bbb", "val_i","456"))); + assertU(add(doc("id", "1", "val_s", "aaa", "val_i", "123"))); + assertU(add(doc("id", "2", "val_s", "bbb", "val_i", "456"))); assertU(commit()); + assertJQ(req("fl", "id", "q", "id:${id}", "id", "1"), "/response/docs==[{'id':'1'}]"); + assertJQ(req("fl", "id", "q", "${idquery}", "idquery", "id:1"), "/response/docs==[{'id':'1'}]"); - assertJQ(req("fl","id", "q", "id:${id}", "id","1") - , "/response/docs==[{'id':'1'}]" - ); - - assertJQ(req("fl","id", "q", "${idquery}", "idquery","id:1") - , "/response/docs==[{'id':'1'}]" - ); - - assertJQ(req("fl","id", "q", "${fname}:${fval}", "fname","id", "fval","2") - , "/response/docs==[{'id':'2'}]" - ); + assertJQ( + req("fl", "id", "q", "${fname}:${fval}", "fname", "id", "fval", "2"), + "/response/docs==[{'id':'2'}]"); // test macro expansion in keys... - assertJQ(req("fl","id", "q", "{!term f=$fieldparam v=$valueparam}", "field${p}","val_s", "value${p}", "aaa", "p","param", "echoParams","ALL") - , "/response/docs==[{'id':'1'}]" - ); + assertJQ( + req( + "fl", + "id", + "q", + "{!term f=$fieldparam v=$valueparam}", + "field${p}", + "val_s", + "value${p}", + "aaa", + "p", + "param", + "echoParams", + "ALL"), + "/response/docs==[{'id':'1'}]"); // test disabling expansion - assertJQ(req("fl","id", "q", "id:\"${id}\"", "id","1", "expandMacros","false") - , "/response/docs==[]" - ); + assertJQ( + req("fl", "id", "q", "id:\"${id}\"", "id", "1", "expandMacros", "false"), + "/response/docs==[]"); // test multiple levels in values - assertJQ(req("fl","id", "q", "${idquery}", "idquery","${a}${b}", "a","val${fieldpostfix}:", "b","${fieldval}", "fieldpostfix","_s", "fieldval","bbb") - , "/response/docs==[{'id':'2'}]" - ); + assertJQ( + req( + "fl", + "id", + "q", + "${idquery}", + "idquery", + "${a}${b}", + "a", + "val${fieldpostfix}:", + "b", + "${fieldval}", + "fieldpostfix", + "_s", + "fieldval", + "bbb"), + "/response/docs==[{'id':'2'}]"); // test defaults - assertJQ(req("fl","id", "q", "val_s:${val:aaa}") - , "/response/docs==[{'id':'1'}]" - ); + assertJQ(req("fl", "id", "q", "val_s:${val:aaa}"), "/response/docs==[{'id':'1'}]"); // test defaults with value present - assertJQ(req("fl","id", "q", "val_s:${val:aaa}", "val","bbb") - , "/response/docs==[{'id':'2'}]" - ); + assertJQ( + req("fl", "id", "q", "val_s:${val:aaa}", "val", "bbb"), "/response/docs==[{'id':'2'}]"); // test zero length default value - assertJQ(req("fl","id", "q", "val_s:${missing:}aaa") - , "/response/docs==[{'id':'1'}]" - ); + assertJQ(req("fl", "id", "q", "val_s:${missing:}aaa"), "/response/docs==[{'id':'1'}]"); // test missing value - assertJQ(req("fl","id", "q", "val_s:${missing}aaa") - , "/response/docs==[{'id':'1'}]" - ); - + assertJQ(req("fl", "id", "q", "val_s:${missing}aaa"), "/response/docs==[{'id':'1'}]"); } - } diff --git a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java index b62ba7eefd2..9af64108cfc 100644 --- a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java +++ b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; - import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; @@ -38,13 +37,11 @@ import org.junit.BeforeClass; import org.junit.Test; -/** Test some aspects of JSON/python writer output (very incomplete) - * - */ +/** Test some aspects of JSON/python writer output (very incomplete) */ public class JSONWriterTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } private void jsonEq(String expected, String received) { @@ -52,10 +49,10 @@ private void jsonEq(String expected, String received) { received = received.trim(); assertEquals(expected, received); } - + @Test public void testTypes() throws IOException { - SolrQueryRequest req = req("q", "dummy", "indent","off"); + SolrQueryRequest req = req("q", "dummy", "indent", "off"); SolrQueryResponse rsp = new SolrQueryResponse(); QueryResponseWriter w = new PythonResponseWriter(); @@ -80,13 +77,14 @@ public void testTypes() throws IOException { @Test public void testJSON() throws IOException { - final String[] namedListStyles = new String[] { - JsonTextWriter.JSON_NL_FLAT, - JsonTextWriter.JSON_NL_MAP, - JsonTextWriter.JSON_NL_ARROFARR, - JsonTextWriter.JSON_NL_ARROFMAP, - JsonTextWriter.JSON_NL_ARROFNTV, - }; + final String[] namedListStyles = + new String[] { + JsonTextWriter.JSON_NL_FLAT, + JsonTextWriter.JSON_NL_MAP, + JsonTextWriter.JSON_NL_ARROFARR, + JsonTextWriter.JSON_NL_ARROFMAP, + JsonTextWriter.JSON_NL_ARROFNTV, + }; for (final String namedListStyle : namedListStyles) { implTestJSON(namedListStyle); } @@ -94,19 +92,20 @@ public void testJSON() throws IOException { } private void implTestJSON(final String namedListStyle) throws IOException { - SolrQueryRequest req = req("wt","json","json.nl",namedListStyle, "indent", "off"); + SolrQueryRequest req = req("wt", "json", "json.nl", namedListStyle, "indent", "off"); SolrQueryResponse rsp = new SolrQueryResponse(); JSONResponseWriter w = new JSONResponseWriter(); StringWriter buf = new StringWriter(); NamedList nl = new NamedList<>(); - nl.add("data1", "he\u2028llo\u2029!"); // make sure that 2028 and 2029 are both escaped (they are illegal in javascript) + // make sure that 2028 and 2029 are both escaped (they are illegal in javascript) + nl.add("data1", "he\u2028llo\u2029!"); nl.add(null, 42); nl.add(null, null); rsp.add("nl", nl); - rsp.add("byte", Byte.valueOf((byte)-3)); - rsp.add("short", Short.valueOf((short)-4)); + rsp.add("byte", Byte.valueOf((byte) -3)); + rsp.add("short", Short.valueOf((short) -4)); rsp.add("bytes", "abc".getBytes(StandardCharsets.UTF_8)); w.write(buf, req, rsp); @@ -121,22 +120,25 @@ private void implTestJSON(final String namedListStyle) throws IOException { } else if (namedListStyle == JSONWriter.JSON_NL_ARROFMAP) { expectedNLjson = "\"nl\":[{\"data1\":\"he\\u2028llo\\u2029!\"},42,null]"; } else if (namedListStyle == JSONWriter.JSON_NL_ARROFNTV) { - expectedNLjson = "\"nl\":[{\"name\":\"data1\",\"type\":\"str\",\"value\":\"he\\u2028llo\\u2029!\"}," + - "{\"name\":null,\"type\":\"int\",\"value\":42}," + - "{\"name\":null,\"type\":\"null\",\"value\":null}]"; + expectedNLjson = + "\"nl\":[{\"name\":\"data1\",\"type\":\"str\",\"value\":\"he\\u2028llo\\u2029!\"}," + + "{\"name\":null,\"type\":\"int\",\"value\":42}," + + "{\"name\":null,\"type\":\"null\",\"value\":null}]"; } else { expectedNLjson = null; - fail("unexpected namedListStyle="+namedListStyle); + fail("unexpected namedListStyle=" + namedListStyle); } - jsonEq("{"+expectedNLjson+",\"byte\":-3,\"short\":-4,\"bytes\":\"YWJj\"}", buf.toString()); + jsonEq("{" + expectedNLjson + ",\"byte\":-3,\"short\":-4,\"bytes\":\"YWJj\"}", buf.toString()); req.close(); } @Test public void testJSONSolrDocument() throws Exception { - SolrQueryRequest req = req(CommonParams.WT,"json", - CommonParams.FL,"id,score,_children_,path"); + SolrQueryRequest req = + req( + CommonParams.WT, "json", + CommonParams.FL, "id,score,_children_,path"); SolrQueryResponse rsp = new SolrQueryResponse(); JSONResponseWriter w = new JSONResponseWriter(); @@ -172,17 +174,17 @@ public void testJSONSolrDocument() throws Exception { w.write(buf, req, rsp); String result = buf.toString(); - assertFalse("response contains unexpected fields: " + result, - result.contains("hello") || - result.contains("\"subject\"") || - result.contains("\"title\"")); - assertTrue("response doesn't contain expected fields: " + result, - result.contains("\"id\"") && - result.contains("\"score\"") && result.contains("_children_")); - - String expectedResult = "{'response':{'numFound':1,'start':0,'maxScore':0.7, 'numFoundExact':true,'docs':[{'id':'1', 'score':'0.7'," + - " '_children_':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'2', 'score':'0.4', 'path':['a>b', 'a>b>c']}] }}] }}"; - String error = JSONTestUtil.match(result, "=="+expectedResult); + assertFalse( + "response contains unexpected fields: " + result, + result.contains("hello") || result.contains("\"subject\"") || result.contains("\"title\"")); + assertTrue( + "response doesn't contain expected fields: " + result, + result.contains("\"id\"") && result.contains("\"score\"") && result.contains("_children_")); + + String expectedResult = + "{'response':{'numFound':1,'start':0,'maxScore':0.7, 'numFoundExact':true,'docs':[{'id':'1', 'score':'0.7'," + + " '_children_':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'2', 'score':'0.4', 'path':['a>b', 'a>b>c']}] }}] }}"; + String error = JSONTestUtil.match(result, "==" + expectedResult); assertNull("response validation failed with error: " + error, error); req.close(); @@ -203,21 +205,25 @@ public void testArrntvWriterOverridesAllWrites() { methodsExpectedNotOverridden.add("writeMapOpener"); methodsExpectedNotOverridden.add("writeMapSeparator"); methodsExpectedNotOverridden.add("writeMapCloser"); - methodsExpectedNotOverridden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeArray(java.lang.String,java.util.List,boolean) throws java.io.IOException"); + methodsExpectedNotOverridden.add( + "public default void org.apache.solr.common.util.JsonTextWriter.writeArray(java.lang.String,java.util.List,boolean) throws java.io.IOException"); methodsExpectedNotOverridden.add("writeArrayOpener"); methodsExpectedNotOverridden.add("writeArraySeparator"); methodsExpectedNotOverridden.add("writeArrayCloser"); - methodsExpectedNotOverridden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeMap(org.apache.solr.common.MapWriter) throws java.io.IOException"); - methodsExpectedNotOverridden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeIterator(org.apache.solr.common.IteratorWriter) throws java.io.IOException"); - methodsExpectedNotOverridden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeJsonIter(java.util.Iterator,boolean) throws java.io.IOException"); + methodsExpectedNotOverridden.add( + "public default void org.apache.solr.common.util.JsonTextWriter.writeMap(org.apache.solr.common.MapWriter) throws java.io.IOException"); + methodsExpectedNotOverridden.add( + "public default void org.apache.solr.common.util.JsonTextWriter.writeIterator(org.apache.solr.common.IteratorWriter) throws java.io.IOException"); + methodsExpectedNotOverridden.add( + "public default void org.apache.solr.common.util.JsonTextWriter.writeJsonIter(java.util.Iterator,boolean) throws java.io.IOException"); final Class subClass = JSONResponseWriter.ArrayOfNameTypeValueJSONWriter.class; final Class superClass = subClass.getSuperclass(); List allSuperClassMethods = new ArrayList<>(); for (Method method : superClass.getDeclaredMethods()) allSuperClassMethods.add(method); - for (Method method : JsonTextWriter.class.getDeclaredMethods()) allSuperClassMethods.add(method); - + for (Method method : JsonTextWriter.class.getDeclaredMethods()) + allSuperClassMethods.add(method); for (final Method superClassMethod : allSuperClassMethods) { final String methodName = superClassMethod.getName(); @@ -229,13 +235,15 @@ public void testArrntvWriterOverridesAllWrites() { if (Modifier.isStatic(modifiers)) continue; if (Modifier.isPrivate(modifiers)) continue; - final boolean expectOverridden = !methodsExpectedNotOverridden.contains(methodName) - && !methodsExpectedNotOverridden.contains(methodFullName); + final boolean expectOverridden = + !methodsExpectedNotOverridden.contains(methodName) + && !methodsExpectedNotOverridden.contains(methodFullName); try { final Method subClassMethod = getDeclaredMethodInClasses(superClassMethod, subClass); if (expectOverridden) { - assertEquals("getReturnType() difference", + assertEquals( + "getReturnType() difference", superClassMethod.getReturnType(), subClassMethod.getReturnType()); } else { @@ -245,12 +253,16 @@ public void testArrntvWriterOverridesAllWrites() { if (expectOverridden) { fail(subClass + " needs to override '" + superClassMethod + "'"); } else { - assertTrue(methodName+" not found in remaining "+methodsExpectedNotOverridden, methodsExpectedNotOverridden.remove(methodName)|| methodsExpectedNotOverridden.remove(methodFullName)); + assertTrue( + methodName + " not found in remaining " + methodsExpectedNotOverridden, + methodsExpectedNotOverridden.remove(methodName) + || methodsExpectedNotOverridden.remove(methodFullName)); } } } - assertTrue("methodsExpected NotOverridden but NotFound instead: "+methodsExpectedNotOverridden, + assertTrue( + "methodsExpected NotOverridden but NotFound instead: " + methodsExpectedNotOverridden, methodsExpectedNotOverridden.isEmpty()); } @@ -264,30 +276,31 @@ public void testArrntvWriterLacksMethodsOfItsOwn() { // only own private method of its own if (subClassMethod.getName().equals("ifNeededWriteTypeAndValueKey")) continue; try { - final Method superClassMethod = getDeclaredMethodInClasses( subClassMethod,superClass, JsonTextWriter.class); + final Method superClassMethod = + getDeclaredMethodInClasses(subClassMethod, superClass, JsonTextWriter.class); - assertEquals("getReturnType() difference", - subClassMethod.getReturnType(), - superClassMethod.getReturnType()); + assertEquals( + "getReturnType() difference", + subClassMethod.getReturnType(), + superClassMethod.getReturnType()); } catch (NoSuchMethodException e) { - fail(subClass + " should not have '" + subClassMethod + "' method of its own"); + fail(subClass + " should not have '" + subClassMethod + "' method of its own"); } } } - private Method getDeclaredMethodInClasses(Method subClassMethod, Class... classes) throws NoSuchMethodException { + private Method getDeclaredMethodInClasses(Method subClassMethod, Class... classes) + throws NoSuchMethodException { for (int i = 0; i < classes.length; i++) { Class klass = classes[i]; try { return klass.getDeclaredMethod( - subClassMethod.getName(), - subClassMethod.getParameterTypes()); + subClassMethod.getName(), subClassMethod.getParameterTypes()); } catch (NoSuchMethodException e) { - if(i==classes.length-1) throw e; + if (i == classes.length - 1) throw e; } } - throw new NoSuchMethodException(subClassMethod.toString()); - + throw new NoSuchMethodException(subClassMethod.toString()); } @Test @@ -300,5 +313,4 @@ public void testConstantsUnchanged() { assertEquals("arrntv", JSONWriter.JSON_NL_ARROFNTV); assertEquals("json.wrf", JSONWriter.JSON_WRAPPER_FUNCTION); } - } diff --git a/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java b/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java index 864325ee973..9e56119e10c 100644 --- a/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java +++ b/solr/core/src/test/org/apache/solr/response/SmileWriterTest.java @@ -16,6 +16,14 @@ */ package org.apache.solr.response; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.BinaryNode; +import com.fasterxml.jackson.databind.node.BooleanNode; +import com.fasterxml.jackson.databind.node.NullNode; +import com.fasterxml.jackson.databind.node.NumericNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.dataformat.smile.SmileFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -28,15 +36,6 @@ import java.util.List; import java.util.Map; import java.util.Random; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.BinaryNode; -import com.fasterxml.jackson.databind.node.BooleanNode; -import com.fasterxml.jackson.databind.node.NullNode; -import com.fasterxml.jackson.databind.node.NumericNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.dataformat.smile.SmileFactory; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; @@ -44,8 +43,8 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.Utils; -import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.ReturnFields; import org.apache.solr.search.SolrReturnFields; import org.junit.BeforeClass; @@ -58,7 +57,7 @@ public class SmileWriterTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } @Test @@ -70,7 +69,7 @@ public void testTypes() throws IOException { rsp.add("data3", Float.POSITIVE_INFINITY); SmileResponseWriter smileResponseWriter = new SmileResponseWriter(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); - smileResponseWriter.write(baos,req,rsp); + smileResponseWriter.write(baos, req, rsp); @SuppressWarnings({"rawtypes"}) Map m = (Map) decodeSmile(new ByteArrayInputStream(baos.toByteArray())); CharArr out = new CharArr(); @@ -78,7 +77,7 @@ public void testTypes() throws IOException { jsonWriter.setIndentSize(-1); // indentation by default jsonWriter.write(m); String s = new String(Utils.toUTF8(out), StandardCharsets.UTF_8); - assertEquals(s , "{\"data1\":NaN,\"data2\":-Infinity,\"data3\":Infinity}"); + assertEquals(s, "{\"data1\":NaN,\"data2\":-Infinity,\"data3\":Infinity}"); req.close(); } @@ -86,18 +85,19 @@ public void testTypes() throws IOException { @Test @SuppressWarnings({"unchecked"}) public void testJSON() throws IOException { - SolrQueryRequest req = req("wt","json","json.nl","arrarr"); + SolrQueryRequest req = req("wt", "json", "json.nl", "arrarr"); SolrQueryResponse rsp = new SolrQueryResponse(); SmileResponseWriter w = new SmileResponseWriter(); ByteArrayOutputStream buf = new ByteArrayOutputStream(); NamedList nl = new NamedList<>(); - nl.add("data1", "he\u2028llo\u2029!"); // make sure that 2028 and 2029 are both escaped (they are illegal in javascript) + // make sure that 2028 and 2029 are both escaped (they are illegal in javascript) + nl.add("data1", "he\u2028llo\u2029!"); nl.add(null, 42); rsp.add("nl", nl); - rsp.add("byte", Byte.valueOf((byte)-3)); - rsp.add("short", Short.valueOf((short)-4)); + rsp.add("byte", Byte.valueOf((byte) -3)); + rsp.add("short", Short.valueOf((short) -4)); String expected = "{\"nl\":[[\"data1\",\"he\\u2028llo\\u2029!\"],[null,42]],byte:-3,short:-4}"; w.write(buf, req, rsp); @SuppressWarnings({"rawtypes"}) @@ -110,8 +110,7 @@ public void testJSON() throws IOException { @Test public void testJSONSolrDocument() throws IOException { - SolrQueryRequest req = req(CommonParams.WT,"json", - CommonParams.FL,"id,score"); + SolrQueryRequest req = req(CommonParams.WT, "json", CommonParams.FL, "id,score"); SolrQueryResponse rsp = new SolrQueryResponse(); SmileResponseWriter w = new SmileResponseWriter(); @@ -151,33 +150,32 @@ public void testJSONSolrDocument() throws IOException { req.close(); } - @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void test10Docs() throws IOException { SolrQueryResponse response = new SolrQueryResponse(); SolrDocumentList l = constructSolrDocList(response); ByteArrayOutputStream baos = new ByteArrayOutputStream(); - new SmileResponseWriter().write(baos, new LocalSolrQueryRequest(null, new ModifiableSolrParams()), response); + new SmileResponseWriter() + .write(baos, new LocalSolrQueryRequest(null, new ModifiableSolrParams()), response); byte[] bytes = baos.toByteArray(); Map m = (Map) decodeSmile(new ByteArrayInputStream(bytes, 0, bytes.length)); m = (Map) m.get("results"); List lst = (List) m.get("docs"); - assertEquals(lst.size(),10); + assertEquals(lst.size(), 10); for (int i = 0; i < lst.size(); i++) { m = (Map) lst.get(i); SolrDocument d = new SolrDocument(); d.putAll(m); compareSolrDocument(l.get(i), d); } - } @SuppressWarnings({"unchecked"}) public static SolrDocumentList constructSolrDocList(SolrQueryResponse response) { SolrDocumentList l = new SolrDocumentList(); - for(int i=0;i<10; i++){ + for (int i = 0; i < 10; i++) { l.add(sampleDoc(random(), i)); } @@ -204,30 +202,29 @@ public static SolrDocument sampleDoc(Random r, int bufnum) { sdoc.put("level", r.nextInt(101)); sdoc.put("education_level", r.nextInt(10)); // higher level of reuse for string values - sdoc.put("state", "S"+r.nextInt(50)); - sdoc.put("country", "Country"+r.nextInt(20)); - sdoc.put("some_boolean", ""+r.nextBoolean()); - sdoc.put("another_boolean", ""+r.nextBoolean()); + sdoc.put("state", "S" + r.nextInt(50)); + sdoc.put("country", "Country" + r.nextInt(20)); + sdoc.put("some_boolean", "" + r.nextBoolean()); + sdoc.put("another_boolean", "" + r.nextBoolean()); return sdoc; } // common-case ascii static String str(Random r, int sz) { StringBuffer sb = new StringBuffer(sz); - for (int i=0; i> it = ((ObjectNode)value).fields(); + if (value instanceof ObjectNode) { + Iterator> it = ((ObjectNode) value).fields(); Map result = new LinkedHashMap<>(); - while(it.hasNext()){ + while (it.hasNext()) { Map.Entry e = it.next(); - result.put(e.getKey(),getVal(e.getValue())); + result.put(e.getKey(), getVal(e.getValue())); } return result; } if (value instanceof ArrayNode) { - ArrayList result = new ArrayList(); + ArrayList result = new ArrayList(); Iterator it = ((ArrayNode) value).elements(); while (it.hasNext()) { result.add(getVal(it.next())); } return result; - } - if(value instanceof BinaryNode) { + if (value instanceof BinaryNode) { return ((BinaryNode) value).binaryValue(); } return value.textValue(); } - - } diff --git a/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java index 99a11fdc03e..8d68c590bc0 100644 --- a/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestBinaryResponseWriter.java @@ -23,7 +23,6 @@ import java.util.Locale; import java.util.Map; import java.util.UUID; - import org.apache.lucene.document.StoredField; import org.apache.lucene.document.TextField; import org.apache.lucene.util.BytesRef; @@ -45,12 +44,10 @@ /** * Test for BinaryResponseWriter * - * * @since solr 1.4 */ public class TestBinaryResponseWriter extends SolrTestCaseJ4 { - @BeforeClass public static void beforeClass() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ @@ -60,25 +57,30 @@ public static void beforeClass() throws Exception { public void testBytesRefWriting() { compareStringFormat("ThisIsUTF8String"); compareStringFormat("Thailand (ประเทศไทย)"); - compareStringFormat("LIVE: सबरीमाला मंदिर के पास पहुंची दो महिलाएं, जमकर हो रहा विरोध-प्रदर्शन"); + compareStringFormat( + "LIVE: सबरीमाला मंदिर के पास पहुंची दो महिलाएं, जमकर हो रहा विरोध-प्रदर्शन"); } public void testJavabinCodecWithCharSeq() throws IOException { SolrDocument document = new SolrDocument(); document.put("id", "1"); String text = "नए लुक में धमाल मचाने आ रहे हैं MS Dhoni, कुछ यूं दिखाया हेलीकॉप्टर शॉट"; - document.put("desc", new StoredField("desc", new ByteArrayUtf8CharSequence(text) { - }, TextField.TYPE_STORED)); + document.put( + "desc", + new StoredField("desc", new ByteArrayUtf8CharSequence(text) {}, TextField.TYPE_STORED)); NamedList nl = new NamedList<>(); nl.add("doc1", document); SimplePostTool.BAOS baos = new SimplePostTool.BAOS(); new JavaBinCodec(new BinaryResponseWriter.Resolver(null, null)).marshal(nl, baos); ByteBuffer byteBuffer = baos.getByteBuffer(); - NamedList result = (NamedList) new JavaBinCodec().unmarshal(new ByteArrayInputStream(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.limit())); + NamedList result = + (NamedList) + new JavaBinCodec() + .unmarshal( + new ByteArrayInputStream( + byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.limit())); assertEquals(text, result._get("doc1/desc", null)); - - } private void compareStringFormat(String input) { @@ -92,26 +94,28 @@ private void compareStringFormat(String input) { } } - /** - * Tests known types implementation by asserting correct encoding/decoding of UUIDField - */ + /** Tests known types implementation by asserting correct encoding/decoding of UUIDField */ public void testUUID() throws Exception { String s = UUID.randomUUID().toString().toLowerCase(Locale.ROOT); assertU(adoc("id", "101", "uuid", s)); assertU(commit()); LocalSolrQueryRequest req = lrf.makeRequest("q", "*:*"); SolrQueryResponse rsp = h.queryAndResponse(req.getParams().get(CommonParams.QT), req); - BinaryQueryResponseWriter writer = (BinaryQueryResponseWriter) h.getCore().getQueryResponseWriter("javabin"); + BinaryQueryResponseWriter writer = + (BinaryQueryResponseWriter) h.getCore().getQueryResponseWriter("javabin"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); writer.write(baos, req, rsp); NamedList res; try (JavaBinCodec jbc = new JavaBinCodec()) { res = (NamedList) jbc.unmarshal(new ByteArrayInputStream(baos.toByteArray())); - } + } SolrDocumentList docs = (SolrDocumentList) res.get("response"); for (Object doc : docs) { SolrDocument document = (SolrDocument) doc; - assertEquals("Returned object must be a string", "java.lang.String", document.getFieldValue("uuid").getClass().getName()); + assertEquals( + "Returned object must be a string", + "java.lang.String", + document.getFieldValue("uuid").getClass().getName()); assertEquals("Wrong UUID string returned", s, document.getFieldValue("uuid")); } @@ -134,15 +138,17 @@ public void testOmitHeader() throws Exception { } public void testResolverSolrDocumentPartialFields() throws Exception { - LocalSolrQueryRequest req = lrf.makeRequest("q", "*:*", - "fl", "id,xxx,ddd_s"); + LocalSolrQueryRequest req = + lrf.makeRequest( + "q", "*:*", + "fl", "id,xxx,ddd_s"); SolrDocument in = new SolrDocument(); in.addField("id", 345); in.addField("aaa_s", "aaa"); in.addField("bbb_s", "bbb"); in.addField("ccc_s", "ccc"); in.addField("ddd_s", "ddd"); - in.addField("eee_s", "eee"); + in.addField("eee_s", "eee"); Resolver r = new Resolver(req, new SolrReturnFields(req)); Object o = r.resolve(in, new JavaBinCodec()); @@ -152,15 +158,11 @@ public void testResolverSolrDocumentPartialFields() throws Exception { SolrDocument out = new SolrDocument(); for (Map.Entry e : in) { - if(r.isWritable(e.getKey())) out.put(e.getKey(),e.getValue()); - + if (r.isWritable(e.getKey())) out.put(e.getKey(), e.getValue()); } assertTrue("id not found", out.getFieldNames().contains("id")); assertTrue("ddd_s not found", out.getFieldNames().contains("ddd_s")); - assertEquals("Wrong number of fields found", - 2, out.getFieldNames().size()); + assertEquals("Wrong number of fields found", 2, out.getFieldNames().size()); req.close(); - } - } diff --git a/solr/core/src/test/org/apache/solr/response/TestCSVResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestCSVResponseWriter.java index b26c8de9a14..4472ff57034 100644 --- a/solr/core/src/test/org/apache/solr/response/TestCSVResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestCSVResponseWriter.java @@ -22,7 +22,6 @@ import java.util.Date; import java.util.List; import java.util.stream.Collectors; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; @@ -35,284 +34,471 @@ public class TestCSVResponseWriter extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ - initCore("solrconfig.xml","schema12.xml"); + initCore("solrconfig.xml", "schema12.xml"); createIndex(); } public static void createIndex() { - assertU(adoc("id","1", "foo_i","-1", "foo_s","hi", "foo_l","12345678987654321", "foo_b","false", "foo_f","1.414","foo_d","-1.0E300","foo_dt","2000-01-02T03:04:05Z")); - assertU(adoc("id","2", "v_ss","hi", "v_ss","there", "v2_ss","nice", "v2_ss","output", "shouldbeunstored","foo")); - assertU(adoc("id","3", "shouldbeunstored","foo", "foo_l", "1")); - assertU(adoc("id","4", "amount_c", "1.50,EUR")); - assertU(adoc("id","5", "store", "12.434,-134.1")); - assertU(adoc("id","6", "pubyear_ii", "123", "store_iis", "12", "price_ff", "1.3")); + assertU( + adoc( + "id", + "1", + "foo_i", + "-1", + "foo_s", + "hi", + "foo_l", + "12345678987654321", + "foo_b", + "false", + "foo_f", + "1.414", + "foo_d", + "-1.0E300", + "foo_dt", + "2000-01-02T03:04:05Z")); + assertU( + adoc( + "id", + "2", + "v_ss", + "hi", + "v_ss", + "there", + "v2_ss", + "nice", + "v2_ss", + "output", + "shouldbeunstored", + "foo")); + assertU(adoc("id", "3", "shouldbeunstored", "foo", "foo_l", "1")); + assertU(adoc("id", "4", "amount_c", "1.50,EUR")); + assertU(adoc("id", "5", "store", "12.434,-134.1")); + assertU(adoc("id", "6", "pubyear_ii", "123", "store_iis", "12", "price_ff", "1.3")); assertU(commit()); } - @Test public void testCSVOutput() throws Exception { // test our basic types,and that fields come back in the requested order - assertEquals("id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt\n1,hi,-1,12345678987654321,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n" - , h.query(req("q","id:1", "wt","csv", "fl","id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt"))); + assertEquals( + "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt\n1,hi,-1,12345678987654321,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n", + h.query( + req("q", "id:1", "wt", "csv", "fl", "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt"))); // test retrieving score, csv.header - assertEquals("1,0.0,hi\n" - , h.query(req("q","id:1^0", "wt","csv", "csv.header","false", "fl","id,score,foo_s"))); + assertEquals( + "1,0.0,hi\n", + h.query(req("q", "id:1^0", "wt", "csv", "csv.header", "false", "fl", "id,score,foo_s"))); // test multivalued - assertEquals("2,\"hi,there\"\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "fl","id,v_ss"))); - + assertEquals( + "2,\"hi,there\"\n", + h.query(req("q", "id:2", "wt", "csv", "csv.header", "false", "fl", "id,v_ss"))); + // test separator change - assertEquals("2|\"hi|there\"\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.separator","|", "fl","id,v_ss"))); + assertEquals( + "2|\"hi|there\"\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.separator", + "|", + "fl", + "id,v_ss"))); // test mv separator change - assertEquals("2,hi|there\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "fl","id,v_ss"))); + assertEquals( + "2,hi|there\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.mv.separator", + "|", + "fl", + "id,v_ss"))); // test mv separator change for a single field - assertEquals("2,hi|there,nice:output\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "f.v2_ss.csv.separator",":", "fl","id,v_ss,v2_ss"))); + assertEquals( + "2,hi|there,nice:output\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.mv.separator", + "|", + "f.v2_ss.csv.separator", + ":", + "fl", + "id,v_ss,v2_ss"))); // test csv field for polyfield (currency) SOLR-3959 - assertEquals("4,\"1.50\\,EUR\"\n" - , h.query(req("q","id:4", "wt","csv", "csv.header","false", "fl","id,amount_c"))); - + assertEquals( + "4,\"1.50\\,EUR\"\n", + h.query(req("q", "id:4", "wt", "csv", "csv.header", "false", "fl", "id,amount_c"))); + // test csv field for polyfield (latlon) SOLR-3959 - assertEquals("5,\"12.434\\,-134.1\"\n" - , h.query(req("q","id:5", "wt","csv", "csv.header","false", "fl","id,store")) ); + assertEquals( + "5,\"12.434\\,-134.1\"\n", + h.query(req("q", "id:5", "wt", "csv", "csv.header", "false", "fl", "id,store"))); // test retrieving fields from index - String result = h.query(req("q","*:*", "wt","csv", "csv.header","true", "fl","*,score")); - for (String field : "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss,score".split(",")) { + String result = h.query(req("q", "*:*", "wt", "csv", "csv.header", "true", "fl", "*,score")); + for (String field : + "id,foo_s,foo_i,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss,score".split(",")) { assertTrue(result.indexOf(field) >= 0); } // test null values - assertEquals("2,,hi|there\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "fl","id,foo_s,v_ss"))); + assertEquals( + "2,,hi|there\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.mv.separator", + "|", + "fl", + "id,foo_s,v_ss"))); // test alternate null value - assertEquals("2,NULL,hi|there\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.mv.separator","|", "csv.null","NULL","fl","id,foo_s,v_ss"))); + assertEquals( + "2,NULL,hi|there\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.mv.separator", + "|", + "csv.null", + "NULL", + "fl", + "id,foo_s,v_ss"))); // test alternate newline - assertEquals("2,\"hi,there\"\r\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.newline","\r\n", "fl","id,v_ss"))); + assertEquals( + "2,\"hi,there\"\r\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.newline", + "\r\n", + "fl", + "id,v_ss"))); // test alternate encapsulator - assertEquals("2,'hi,there'\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.encapsulator","'", "fl","id,v_ss"))); + assertEquals( + "2,'hi,there'\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.encapsulator", + "'", + "fl", + "id,v_ss"))); // test using escape instead of encapsulator - assertEquals("2,hi\\,there\n" - , h.query(req("q","id:2", "wt","csv", "csv.header","false", "csv.escape","\\", "fl","id,v_ss"))); + assertEquals( + "2,hi\\,there\n", + h.query( + req( + "q", + "id:2", + "wt", + "csv", + "csv.header", + "false", + "csv.escape", + "\\", + "fl", + "id,v_ss"))); // test multiple lines - assertEquals("1,,hi\n2,\"hi,there\",\n" - , h.query(req("q","id:[1 TO 2]", "wt","csv", "csv.header","false", "fl","id,v_ss,foo_s"))); + assertEquals( + "1,,hi\n2,\"hi,there\",\n", + h.query( + req("q", "id:[1 TO 2]", "wt", "csv", "csv.header", "false", "fl", "id,v_ss,foo_s"))); // test SOLR-2970 not returning non-stored fields by default. Compare sorted list - assertEquals(sortHeader("amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + - "pubyear_ii,store_iis\n"), - sortHeader(h.query(req("q","id:3", "wt","csv", "csv.header","true", "fl","*", "rows","0")))); - + assertEquals( + sortHeader( + "amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + + "pubyear_ii,store_iis\n"), + sortHeader( + h.query(req("q", "id:3", "wt", "csv", "csv.header", "true", "fl", "*", "rows", "0")))); // now test SolrDocumentList SolrDocument d = new SolrDocument(); SolrDocument d1 = d; - d.addField("id","1"); - d.addField("foo_i",-1); - d.addField("foo_s","hi"); - d.addField("foo_l","12345678987654321L"); - d.addField("foo_b",false); - d.addField("foo_f",1.414f); - d.addField("foo_d",-1.0E300); + d.addField("id", "1"); + d.addField("foo_i", -1); + d.addField("foo_s", "hi"); + d.addField("foo_l", "12345678987654321L"); + d.addField("foo_b", false); + d.addField("foo_f", 1.414f); + d.addField("foo_d", -1.0E300); d.addField("foo_dt", new Date(Instant.parse("2000-01-02T03:04:05Z").toEpochMilli())); d.addField("score", "2.718"); d = new SolrDocument(); SolrDocument d2 = d; - d.addField("id","2"); - d.addField("v_ss","hi"); - d.addField("v_ss","there"); - d.addField("v2_ss","nice"); - d.addField("v2_ss","output"); + d.addField("id", "2"); + d.addField("v_ss", "hi"); + d.addField("v_ss", "there"); + d.addField("v2_ss", "nice"); + d.addField("v2_ss", "output"); d.addField("score", "89.83"); - d.addField("shouldbeunstored","foo"); + d.addField("shouldbeunstored", "foo"); SolrDocumentList sdl = new SolrDocumentList(); sdl.add(d1); sdl.add(d2); - - SolrQueryRequest req = req("q","*:*"); + + SolrQueryRequest req = req("q", "*:*"); SolrQueryResponse rsp = new SolrQueryResponse(); rsp.addResponse(sdl); QueryResponseWriter w = new CSVResponseWriter(); - - rsp.setReturnFields( new SolrReturnFields("id,foo_s", req) ); + + rsp.setReturnFields(new SolrReturnFields("id,foo_s", req)); StringWriter buf = new StringWriter(); w.write(buf, req, rsp); assertEquals("id,foo_s\n1,hi\n2,\n", buf.toString()); // try scores - rsp.setReturnFields( new SolrReturnFields("id,score,foo_s", req) ); + rsp.setReturnFields(new SolrReturnFields("id,score,foo_s", req)); buf = new StringWriter(); w.write(buf, req, rsp); assertEquals("id,score,foo_s\n1,2.718,hi\n2,89.83,\n", buf.toString()); // get field values from docs... should be ordered and not include score unless requested - rsp.setReturnFields( new SolrReturnFields("*", req) ); + rsp.setReturnFields(new SolrReturnFields("*", req)); buf = new StringWriter(); w.write(buf, req, rsp); - assertEquals("id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss\n" + - "1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z,,\n" + - "2,,,,,,,,\"hi,there\",\"nice,output\"\n", - buf.toString()); - - - // get field values and scores - just check that the scores are there... we don't guarantee where - rsp.setReturnFields( new SolrReturnFields("*,score", req) ); + assertEquals( + "id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt,v_ss,v2_ss\n" + + "1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z,,\n" + + "2,,,,,,,,\"hi,there\",\"nice,output\"\n", + buf.toString()); + + // get field values and scores - just check that the scores are there... we don't guarantee + // where + rsp.setReturnFields(new SolrReturnFields("*,score", req)); buf = new StringWriter(); w.write(buf, req, rsp); String s = buf.toString(); - assertTrue(s.indexOf("score") >=0 && s.indexOf("2.718") > 0 && s.indexOf("89.83") > 0 ); - + assertTrue(s.indexOf("score") >= 0 && s.indexOf("2.718") > 0 && s.indexOf("89.83") > 0); + // Test field globs - rsp.setReturnFields( new SolrReturnFields("id,foo*", req) ); + rsp.setReturnFields(new SolrReturnFields("id,foo*", req)); buf = new StringWriter(); w.write(buf, req, rsp); - assertEquals("id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt\n" + - "1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n" + - "2,,,,,,,\n", - buf.toString()); + assertEquals( + "id,foo_i,foo_s,foo_l,foo_b,foo_f,foo_d,foo_dt\n" + + "1,-1,hi,12345678987654321L,false,1.414,-1.0E300,2000-01-02T03:04:05Z\n" + + "2,,,,,,,\n", + buf.toString()); - rsp.setReturnFields( new SolrReturnFields("id,*_d*", req) ); + rsp.setReturnFields(new SolrReturnFields("id,*_d*", req)); buf = new StringWriter(); w.write(buf, req, rsp); - assertEquals("id,foo_d,foo_dt\n" + - "1,-1.0E300,2000-01-02T03:04:05Z\n" + - "2,,\n", - buf.toString()); + assertEquals( + "id,foo_d,foo_dt\n" + "1,-1.0E300,2000-01-02T03:04:05Z\n" + "2,,\n", buf.toString()); // Test function queries - rsp.setReturnFields( new SolrReturnFields("sum(1,1),id,exists(foo_i),div(9,1),foo_f", req) ); + rsp.setReturnFields(new SolrReturnFields("sum(1,1),id,exists(foo_i),div(9,1),foo_f", req)); buf = new StringWriter(); w.write(buf, req, rsp); - assertEquals("\"sum(1,1)\",id,exists(foo_i),\"div(9,1)\",foo_f\n" + - "\"\",1,,,1.414\n" + - "\"\",2,,,\n", + assertEquals( + "\"sum(1,1)\",id,exists(foo_i),\"div(9,1)\",foo_f\n" + "\"\",1,,,1.414\n" + "\"\",2,,,\n", buf.toString()); // Test transformers - rsp.setReturnFields( new SolrReturnFields("mydocid:[docid],[explain]", req) ); + rsp.setReturnFields(new SolrReturnFields("mydocid:[docid],[explain]", req)); buf = new StringWriter(); w.write(buf, req, rsp); - assertEquals("mydocid,[explain]\n" + - "\"\",\n" + - "\"\",\n", - buf.toString()); + assertEquals("mydocid,[explain]\n" + "\"\",\n" + "\"\",\n", buf.toString()); req.close(); } - @Test public void testPseudoFields() throws Exception { // Use Pseudo Field - assertEquals("1,hi", - h.query(req("q","id:1", "wt","csv", "csv.header","false", "fl","XXX:id,foo_s")).trim()); - - String txt = h.query(req("q","id:1", "wt","csv", "csv.header","true", "fl","XXX:id,YYY:[docid],FOO:foo_s")); + assertEquals( + "1,hi", + h.query(req("q", "id:1", "wt", "csv", "csv.header", "false", "fl", "XXX:id,foo_s")).trim()); + + String txt = + h.query( + req( + "q", + "id:1", + "wt", + "csv", + "csv.header", + "true", + "fl", + "XXX:id,YYY:[docid],FOO:foo_s")); String[] lines = txt.split("\n"); assertEquals(2, lines.length); - assertEquals("XXX,YYY,FOO", lines[0] ); - assertEquals("1,0,hi", lines[1] ); - - //assertions specific to multiple pseudofields functions like abs, div, exists, etc.. (SOLR-5423) - String funcText = h.query(req("q","*", "wt","csv", "csv.header","true", "fl","XXX:id,YYY:exists(foo_i),exists(shouldbeunstored)")); + assertEquals("XXX,YYY,FOO", lines[0]); + assertEquals("1,0,hi", lines[1]); + + // assertions specific to multiple pseudofields functions like abs, div, exists, etc.. + // (SOLR-5423) + String funcText = + h.query( + req( + "q", + "*", + "wt", + "csv", + "csv.header", + "true", + "fl", + "XXX:id,YYY:exists(foo_i),exists(shouldbeunstored)")); String[] funcLines = funcText.split("\n"); assertEquals(7, funcLines.length); - assertEquals("XXX,YYY,exists(shouldbeunstored)", funcLines[0] ); - assertEquals("1,true,false", funcLines[1] ); - assertEquals("3,false,true", funcLines[3] ); - - - //assertions specific to single function without alias (SOLR-5423) - String singleFuncText = h.query(req("q","*", "wt","csv", "csv.header","true", "fl","exists(shouldbeunstored),XXX:id")); + assertEquals("XXX,YYY,exists(shouldbeunstored)", funcLines[0]); + assertEquals("1,true,false", funcLines[1]); + assertEquals("3,false,true", funcLines[3]); + + // assertions specific to single function without alias (SOLR-5423) + String singleFuncText = + h.query( + req( + "q", + "*", + "wt", + "csv", + "csv.header", + "true", + "fl", + "exists(shouldbeunstored),XXX:id")); String[] singleFuncLines = singleFuncText.split("\n"); assertEquals(7, singleFuncLines.length); - assertEquals("exists(shouldbeunstored),XXX", singleFuncLines[0] ); - assertEquals("false,1", singleFuncLines[1] ); - assertEquals("true,3", singleFuncLines[3] ); + assertEquals("exists(shouldbeunstored),XXX", singleFuncLines[0]); + assertEquals("false,1", singleFuncLines[1]); + assertEquals("true,3", singleFuncLines[3]); // pseudo-fields with * in fl - txt = h.query(req("q","id:4", "wt","csv", "csv.header","true", "fl","*,YYY:[docid],FOO:amount_c")); + txt = + h.query( + req( + "q", + "id:4", + "wt", + "csv", + "csv.header", + "true", + "fl", + "*,YYY:[docid],FOO:amount_c")); lines = txt.split("\n"); assertEquals(2, lines.length); - assertEquals(sortHeader("foo_i,foo_l,FOO,foo_s,store,store_iis," + - "v2_ss,pubyear_ii,foo_dt,foo_b,YYY,foo_d,id,amount_c,foo_f,v_ss"), sortHeader(lines[0])); + assertEquals( + sortHeader( + "foo_i,foo_l,FOO,foo_s,store,store_iis," + + "v2_ss,pubyear_ii,foo_dt,foo_b,YYY,foo_d,id,amount_c,foo_f,v_ss"), + sortHeader(lines[0])); } @Test public void testForDVEnabledFields() throws Exception { // for dv enabled and useDocValueAsStored=true // returns pubyear_ii, store_iis but not price_ff - String singleFuncText = h.query(req("q","id:6", "wt","csv", "csv.header","true")); - String sortedHeader = sortHeader("amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + - "pubyear_ii,store_iis"); + String singleFuncText = h.query(req("q", "id:6", "wt", "csv", "csv.header", "true")); + String sortedHeader = + sortHeader( + "amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + + "pubyear_ii,store_iis"); String[] singleFuncLines = singleFuncText.split("\n"); assertEquals(2, singleFuncLines.length); assertEquals(sortedHeader, sortHeader(singleFuncLines[0])); - List actualVal = Arrays.stream(singleFuncLines[1].trim().split(",")) - .filter(val -> !val.trim().isEmpty() && !val.trim().equals("\"\"")) - .collect(Collectors.toList()); + List actualVal = + Arrays.stream(singleFuncLines[1].trim().split(",")) + .filter(val -> !val.trim().isEmpty() && !val.trim().equals("\"\"")) + .collect(Collectors.toList()); assertEquals(3, actualVal.size()); assertTrue(actualVal.containsAll(Arrays.asList("6", "123", "12"))); // explicit fl=* - singleFuncText = h.query(req("q","id:6", "wt","csv", "csv.header","true", "fl", "*")); - sortedHeader = sortHeader("amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + - "pubyear_ii,store_iis"); + singleFuncText = h.query(req("q", "id:6", "wt", "csv", "csv.header", "true", "fl", "*")); + sortedHeader = + sortHeader( + "amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + + "pubyear_ii,store_iis"); singleFuncLines = singleFuncText.split("\n"); assertEquals(2, singleFuncLines.length); assertEquals(sortedHeader, sortHeader(singleFuncLines[0])); - actualVal = Arrays.stream(singleFuncLines[1].trim().split(",")) - .filter(val -> !val.trim().isEmpty() && !val.trim().equals("\"\"")) - .collect(Collectors.toList()); + actualVal = + Arrays.stream(singleFuncLines[1].trim().split(",")) + .filter(val -> !val.trim().isEmpty() && !val.trim().equals("\"\"")) + .collect(Collectors.toList()); assertEquals(3, actualVal.size()); assertTrue(actualVal.containsAll(Arrays.asList("6", "123", "12"))); // explicit price_ff - singleFuncText = h.query(req("q","id:6", "wt","csv", "csv.header","true", "fl", "price_ff")); + singleFuncText = h.query(req("q", "id:6", "wt", "csv", "csv.header", "true", "fl", "price_ff")); singleFuncLines = singleFuncText.split("\n"); assertEquals(2, singleFuncLines.length); assertEquals("price_ff", singleFuncLines[0]); assertEquals("1.3", singleFuncLines[1]); // explicit price_ff with fl=* - singleFuncText = h.query(req("q","id:6", "wt","csv", "csv.header","true", "fl", "*,price_ff")); - sortedHeader = sortHeader("amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + - "pubyear_ii,store_iis,price_ff"); + singleFuncText = + h.query(req("q", "id:6", "wt", "csv", "csv.header", "true", "fl", "*,price_ff")); + sortedHeader = + sortHeader( + "amount_c,store,v_ss,foo_b,v2_ss,foo_f,foo_i,foo_d,foo_s,foo_dt,id,foo_l," + + "pubyear_ii,store_iis,price_ff"); singleFuncLines = singleFuncText.split("\n"); assertEquals(2, singleFuncLines.length); assertEquals(sortedHeader, sortHeader(singleFuncLines[0])); - actualVal = Arrays.stream(singleFuncLines[1].trim().split(",")) - .filter(val -> !val.trim().isEmpty() && !val.trim().equals("\"\"")) - .collect(Collectors.toList()); + actualVal = + Arrays.stream(singleFuncLines[1].trim().split(",")) + .filter(val -> !val.trim().isEmpty() && !val.trim().equals("\"\"")) + .collect(Collectors.toList()); assertEquals(4, actualVal.size()); assertTrue(actualVal.containsAll(Arrays.asList("6", "123", "12", "1.3"))); } - /* * Utility method to sort a comma separated list of strings, for easier comparison regardless of platform */ private String sortHeader(String input) { - String[] output = input.trim().split(","); + String[] output = input.trim().split(","); Arrays.sort(output); return Arrays.toString(output); } - } diff --git a/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java b/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java index 7a13a03efb8..4714913f20d 100644 --- a/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java +++ b/solr/core/src/test/org/apache/solr/response/TestCustomDocTransformer.java @@ -17,7 +17,6 @@ package org.apache.solr.response; import java.io.IOException; - import org.apache.lucene.index.IndexableField; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; @@ -34,7 +33,7 @@ public class TestCustomDocTransformer extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-doctransformers.xml","schema.xml"); + initCore("solrconfig-doctransformers.xml", "schema.xml"); } @After @@ -47,19 +46,20 @@ public void cleanup() throws Exception { public void testCustomTransformer() throws Exception { // Build a simple index int max = 10; - for(int i=0; i -// -// + // + // + // // multiple valued field - assertU(adoc("id","H.A", "srpt_geohash","POINT( 1 2 )")); - assertU(adoc("id","H.B", "srpt_geohash","POINT( 1 2 )", - "srpt_geohash","POINT( 3 4 )")); - assertU(adoc("id","H.C", "srpt_geohash","LINESTRING (30 10, 10 30, 40 40)")); - - assertU(adoc("id","Q.A", "srpt_quad","POINT( 1 2 )")); - assertU(adoc("id","Q.B", "srpt_quad","POINT( 1 2 )", - "srpt_quad","POINT( 3 4 )")); - assertU(adoc("id","Q.C", "srpt_quad","LINESTRING (30 10, 10 30, 40 40)")); - - assertU(adoc("id","P.A", "srpt_packedquad","POINT( 1 2 )")); - assertU(adoc("id","P.B", "srpt_packedquad","POINT( 1 2 )", - "srpt_packedquad","POINT( 3 4 )")); - assertU(adoc("id","P.C", "srpt_packedquad","LINESTRING (30 10, 10 30, 40 40)")); - - + assertU(adoc("id", "H.A", "srpt_geohash", "POINT( 1 2 )")); + assertU(adoc("id", "H.B", "srpt_geohash", "POINT( 1 2 )", "srpt_geohash", "POINT( 3 4 )")); + assertU(adoc("id", "H.C", "srpt_geohash", "LINESTRING (30 10, 10 30, 40 40)")); + + assertU(adoc("id", "Q.A", "srpt_quad", "POINT( 1 2 )")); + assertU(adoc("id", "Q.B", "srpt_quad", "POINT( 1 2 )", "srpt_quad", "POINT( 3 4 )")); + assertU(adoc("id", "Q.C", "srpt_quad", "LINESTRING (30 10, 10 30, 40 40)")); + + assertU(adoc("id", "P.A", "srpt_packedquad", "POINT( 1 2 )")); + assertU( + adoc("id", "P.B", "srpt_packedquad", "POINT( 1 2 )", "srpt_packedquad", "POINT( 3 4 )")); + assertU(adoc("id", "P.C", "srpt_packedquad", "LINESTRING (30 10, 10 30, 40 40)")); + // single valued field - assertU(adoc("id","R.A", "srptgeom","POINT( 1 2 )")); + assertU(adoc("id", "R.A", "srptgeom", "POINT( 1 2 )")); // non-spatial field - assertU(adoc("id","S.X", "str_shape","POINT( 1 2 )")); - assertU(adoc("id","S.A", "str_shape","{\"type\":\"Point\",\"coordinates\":[1,2]}")); - + assertU(adoc("id", "S.X", "str_shape", "POINT( 1 2 )")); + assertU(adoc("id", "S.A", "str_shape", "{\"type\":\"Point\",\"coordinates\":[1,2]}")); assertU(commit()); } @SuppressWarnings({"unchecked"}) - protected Map readJSON(String json) { + protected Map readJSON(String json) { try { return jsonmapper.readValue(json, Map.class); - } - catch(Exception ex) { + } catch (Exception ex) { log.warn("Unable to read GeoJSON From: {}", json); log.warn("Error", ex); fail("Unable to parse JSON GeoJSON Response"); } - return null; + return null; } - + @SuppressWarnings({"unchecked"}) - protected Map getFirstFeatureGeometry(Map json) - { - Map rsp = (Map)json.get("response"); + protected Map getFirstFeatureGeometry(Map json) { + Map rsp = (Map) json.get("response"); assertEquals("FeatureCollection", rsp.get("type")); - List vals = (List)rsp.get("features"); + List vals = (List) rsp.get("features"); assertEquals(1, vals.size()); - Map feature = (Map)vals.get(0); + Map feature = (Map) vals.get(0); assertEquals("Feature", feature.get("type")); - return (Map)feature.get("geometry"); + return (Map) feature.get("geometry"); } @Test public void testRequestExceptions() throws Exception { - + // Make sure we select the field try { - h.query(req( - "q","*:*", - "wt","geojson", - "fl","*")); + h.query( + req( + "q", "*:*", + "wt", "geojson", + "fl", "*")); fail("should Require a parameter to select the field"); + } catch (SolrException ex) { } - catch(SolrException ex) {} - // non-spatial fields *must* be stored as JSON try { - h.query(req( - "q","id:S.X", - "wt","geojson", - "fl","*", - "geojson.field", "str_shape")); + h.query( + req( + "q", "id:S.X", + "wt", "geojson", + "fl", "*", + "geojson.field", "str_shape")); fail("should complain about bad shape config"); + } catch (SolrException ex) { } - catch(SolrException ex) {} - } @Test public void testGeoJSONAtRoot() throws Exception { - + // Try reading the whole resposne - String json = h.query(req( - "q","*:*", - "wt","geojson", - "rows","2", - "fl","*", - "geojson.field", "srptgeom", - "indent","true")); - + String json = + h.query( + req( + "q", "*:*", + "wt", "geojson", + "rows", "2", + "fl", "*", + "geojson.field", "srptgeom", + "indent", "true")); + // Check that we have a normal solr response with 'responseHeader' and 'response' - Map rsp = readJSON(json); + Map rsp = readJSON(json); assertNotNull(rsp.get("responseHeader")); assertNotNull(rsp.get("response")); - - json = h.query(req( - "q","*:*", - "wt","geojson", - "rows","2", - "fl","*", - "omitHeader", "true", - "geojson.field", "srptgeom", - "indent","true")); - + + json = + h.query( + req( + "q", "*:*", + "wt", "geojson", + "rows", "2", + "fl", "*", + "omitHeader", "true", + "geojson.field", "srptgeom", + "indent", "true")); + // Check that we have a normal solr response with 'responseHeader' and 'response' rsp = readJSON(json); assertNull(rsp.get("responseHeader")); @@ -163,65 +158,69 @@ public void testGeoJSONAtRoot() throws Exception { assertEquals("FeatureCollection", rsp.get("type")); assertNotNull(rsp.get("features")); } - + @Test public void testGeoJSONOutput() throws Exception { - + // Try reading the whole resposne - readJSON(h.query(req( - "q","*:*", - "wt","geojson", - "fl","*", - "geojson.field", "srpt_geohash", - "indent","true"))); - + readJSON( + h.query( + req( + "q", "*:*", + "wt", "geojson", + "fl", "*", + "geojson.field", "srpt_geohash", + "indent", "true"))); + // Multivalued Valued Point - Map json = readJSON(h.query(req( - "q","id:H.B", - "wt","geojson", - "fl","*", - "geojson.field", "srpt_geohash", - "indent","true"))); - - Map geo = getFirstFeatureGeometry(json); + Map json = + readJSON( + h.query( + req( + "q", "id:H.B", + "wt", "geojson", + "fl", "*", + "geojson.field", "srpt_geohash", + "indent", "true"))); + + Map geo = getFirstFeatureGeometry(json); assertEquals( // NOTE: not actual JSON, it is Map.toString()! "{type=GeometryCollection, geometries=[" - + "{type=Point, coordinates=[1, 2]}, " - + "{type=Point, coordinates=[3, 4]}]}", ""+geo); - - + + "{type=Point, coordinates=[1, 2]}, " + + "{type=Point, coordinates=[3, 4]}]}", + "" + geo); + // Check the same value encoded on different field types - String[][] check = new String[][] { - { "id:H.A", "srpt_geohash" }, - { "id:Q.A", "srpt_quad" }, - { "id:P.A", "srpt_packedquad" }, - { "id:R.A", "srptgeom" }, - { "id:S.A", "str_shape" }, - }; - - for(String[] args : check) { - json = readJSON(h.query(req( - "q",args[0], - "wt","geojson", - "fl","*", - "geojson.field", args[1]))); - + String[][] check = + new String[][] { + {"id:H.A", "srpt_geohash"}, + {"id:Q.A", "srpt_quad"}, + {"id:P.A", "srpt_packedquad"}, + {"id:R.A", "srptgeom"}, + {"id:S.A", "str_shape"}, + }; + + for (String[] args : check) { + json = + readJSON( + h.query(req("q", args[0], "wt", "geojson", "fl", "*", "geojson.field", args[1]))); + geo = getFirstFeatureGeometry(json); - assertEquals( - "Error reading point from: "+args[1] + " ("+args[0]+")", - // NOTE: not actual JSON, it is Map.toString()! - "{type=Point, coordinates=[1, 2]}", ""+geo); + assertEquals( + "Error reading point from: " + args[1] + " (" + args[0] + ")", + // NOTE: not actual JSON, it is Map.toString()! + "{type=Point, coordinates=[1, 2]}", + "" + geo); } } - + @SuppressWarnings({"unchecked"}) - protected Map readFirstDoc(String json) - { + protected Map readFirstDoc(String json) { @SuppressWarnings({"rawtypes"}) - List docs = (List)((Map)readJSON(json).get("response")).get("docs"); - return (Map)docs.get(0); + List docs = (List) ((Map) readJSON(json).get("response")).get("docs"); + return (Map) docs.get(0); } - + public static String normalizeMapToJSON(String val) { val = val.replace("\"", ""); // remove quotes val = val.replace(':', '='); @@ -231,51 +230,43 @@ public static String normalizeMapToJSON(String val) { @Test public void testTransformToAllFormats() throws Exception { - + String wkt = "POINT( 1 2 )"; SupportedFormats fmts = SpatialContext.GEO.getFormats(); Shape shape = fmts.read(wkt); - - String[] check = new String[] { - "srpt_geohash", - "srpt_geohash", - "srpt_quad", - "srpt_packedquad", - "srptgeom", - // "str_shape", // NEEDS TO BE A SpatialField! - }; - - String[] checkFormats = new String[] { - "GeoJSON", - "WKT", - "POLY" - }; - - for(String field : check) { + + String[] check = + new String[] { + "srpt_geohash", "srpt_geohash", "srpt_quad", "srpt_packedquad", "srptgeom", + // "str_shape", // NEEDS TO BE A SpatialField! + }; + + String[] checkFormats = new String[] {"GeoJSON", "WKT", "POLY"}; + + for (String field : check) { // Add a document with the given field - assertU(adoc("id","test", - field, wkt)); + assertU(adoc("id", "test", field, wkt)); assertU(commit()); - - - for(String fmt : checkFormats) { - String json = h.query(req( - "q","id:test", - "wt","json", - "indent", "true", - "fl","xxx:[geo f="+field+" w="+fmt+"]" - )); - - Map doc = readFirstDoc(json); + + for (String fmt : checkFormats) { + String json = + h.query( + req( + "q", "id:test", + "wt", "json", + "indent", "true", + "fl", "xxx:[geo f=" + field + " w=" + fmt + "]")); + + Map doc = readFirstDoc(json); Object v = doc.get("xxx"); String expect = fmts.getWriter(fmt).toString(shape); - - if(!(v instanceof String)) { + + if (!(v instanceof String)) { v = normalizeMapToJSON(v.toString()); expect = normalizeMapToJSON(expect); } - - assertEquals("Bad result: "+field+"/"+fmt, expect, v.toString()); + + assertEquals("Bad result: " + field + "/" + fmt, expect, v.toString()); } } } diff --git a/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java index 7f68cd2837b..58478b924a2 100644 --- a/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestGraphMLResponseWriter.java @@ -18,20 +18,19 @@ package org.apache.solr.response; import java.io.StringWriter; -import java.util.Map; -import java.util.HashMap; -import java.util.List; import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; - +import java.util.List; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.StreamComparator; import org.apache.solr.client.solrj.io.graph.Traversal; -import org.apache.solr.client.solrj.io.stream.TupleStream; import org.apache.solr.client.solrj.io.stream.StreamContext; -import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; -import org.apache.solr.client.solrj.io.Tuple; +import org.apache.solr.client.solrj.io.stream.TupleStream; import org.apache.solr.client.solrj.io.stream.expr.Explanation; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.util.BaseTestHarness; import org.junit.BeforeClass; @@ -41,17 +40,18 @@ public class TestGraphMLResponseWriter extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ - initCore("solrconfig.xml","schema12.xml"); + initCore("solrconfig.xml", "schema12.xml"); } @Test @SuppressWarnings({"unchecked"}) public void testGraphMLOutput() throws Exception { - SolrQueryRequest request = req("blah", "blah"); // Just need a request to attach the stream and traversal to. + // Just need a request to attach the stream and traversal to. + SolrQueryRequest request = req("blah", "blah"); SolrQueryResponse response = new SolrQueryResponse(); @SuppressWarnings({"rawtypes"}) Map context = request.getContext(); - TupleStream stream = new TestStream(); //Simulates a GatherNodesStream + TupleStream stream = new TestStream(); // Simulates a GatherNodesStream Traversal traversal = new Traversal(); context.put("traversal", traversal); context.put("stream", stream); @@ -61,30 +61,32 @@ public void testGraphMLOutput() throws Exception { graphMLResponseWriter.write(writer, request, response); String graphML = writer.toString(); - //Validate the nodes - String error = BaseTestHarness.validateXPath(graphML, - "//graph/node[1][@id ='bill']", - "//graph/node[2][@id ='jim']", - "//graph/node[3][@id ='max']"); - if(error != null) { + // Validate the nodes + String error = + BaseTestHarness.validateXPath( + graphML, + "//graph/node[1][@id ='bill']", + "//graph/node[2][@id ='jim']", + "//graph/node[3][@id ='max']"); + if (error != null) { throw new Exception(error); } - //Validate the edges - error = BaseTestHarness.validateXPath(graphML, - "//graph/edge[1][@source ='jim']", - "//graph/edge[1][@target ='bill']", - "//graph/edge[2][@source ='max']", - "//graph/edge[2][@target ='bill']", - "//graph/edge[3][@source ='max']", - "//graph/edge[3][@target ='jim']", - "//graph/edge[4][@source ='jim']", - "//graph/edge[4][@target ='max']" - ); - - if(error != null) { + // Validate the edges + error = + BaseTestHarness.validateXPath( + graphML, + "//graph/edge[1][@source ='jim']", + "//graph/edge[1][@target ='bill']", + "//graph/edge[2][@source ='max']", + "//graph/edge[2][@target ='bill']", + "//graph/edge[3][@source ='max']", + "//graph/edge[3][@target ='jim']", + "//graph/edge[4][@source ='jim']", + "//graph/edge[4][@target ='max']"); + + if (error != null) { throw new Exception(error); } - } @SuppressWarnings({"unchecked"}) @@ -93,7 +95,7 @@ private static class TestStream extends TupleStream { private Iterator tuples; public TestStream() { - //Create some nodes + // Create some nodes List testTuples = new ArrayList<>(); @SuppressWarnings({"rawtypes"}) Map m1 = new HashMap(); @@ -128,13 +130,9 @@ public StreamComparator getStreamSort() { return null; } - public void close() { - - } - - public void open() { + public void close() {} - } + public void open() {} public List children() { return null; @@ -142,7 +140,7 @@ public List children() { @SuppressWarnings({"unchecked"}) public Tuple read() { - if(tuples.hasNext()) { + if (tuples.hasNext()) { return tuples.next(); } else { @SuppressWarnings({"rawtypes"}) @@ -152,13 +150,10 @@ public Tuple read() { } } - public void setStreamContext(StreamContext streamContext) { - - } + public void setStreamContext(StreamContext streamContext) {} public Explanation toExplanation(StreamFactory factory) { return null; } - } } diff --git a/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java b/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java index 6cd662e37c1..a97c9d45096 100644 --- a/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java +++ b/solr/core/src/test/org/apache/solr/response/TestJavabinTupleStreamParser.java @@ -17,6 +17,7 @@ package org.apache.solr.response; +import static org.apache.solr.response.SmileWriterTest.constructSolrDocList; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -25,7 +26,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.StreamComparator; @@ -41,36 +41,36 @@ import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.common.util.Utils; -import static org.apache.solr.response.SmileWriterTest.constructSolrDocList; - public class TestJavabinTupleStreamParser extends SolrTestCaseJ4 { public void testKnown() throws IOException { - String payload = "{\n" + - " \"responseHeader\":{\n" + - " \"zkConnected\":true,\n" + - " \"status\":0,\n" + - " \"QTime\":46},\n" + - " \"response\":{\n" + - " \"numFound\":2,\n" + - " \"start\":0,\n" + - " \"docs\":[\n" + - " {\n" + - " \"id\":\"2\",\n" + - " \"a_s\":\"hello2\",\n" + - " \"a_i\":2,\n" + - " \"a_f\":0.0},\n" + - " {\n" + - " \"id\":\"3\",\n" + - " \"a_s\":\"hello3\",\n" + - " \"a_i\":3,\n" + - " \"a_f\":3.0}]}}"; + String payload = + "{\n" + + " \"responseHeader\":{\n" + + " \"zkConnected\":true,\n" + + " \"status\":0,\n" + + " \"QTime\":46},\n" + + " \"response\":{\n" + + " \"numFound\":2,\n" + + " \"start\":0,\n" + + " \"docs\":[\n" + + " {\n" + + " \"id\":\"2\",\n" + + " \"a_s\":\"hello2\",\n" + + " \"a_i\":2,\n" + + " \"a_f\":0.0},\n" + + " {\n" + + " \"id\":\"3\",\n" + + " \"a_s\":\"hello3\",\n" + + " \"a_i\":3,\n" + + " \"a_f\":3.0}]}}"; @SuppressWarnings({"rawtypes"}) SimpleOrderedMap nl = convert2OrderedMap((Map) Utils.fromJSONString(payload)); byte[] bytes = serialize(nl); - try (JavabinTupleStreamParser parser = new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), true)) { + try (JavabinTupleStreamParser parser = + new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), true)) { Map map = parser.next(); assertEquals("2", map.get("id")); map = parser.next(); @@ -79,19 +79,18 @@ public void testKnown() throws IOException { map = parser.next(); assertNull(map); } - } @SuppressWarnings({"unchecked", "rawtypes"}) public SimpleOrderedMap convert2OrderedMap(Map m) { SimpleOrderedMap result = new SimpleOrderedMap<>(); - m.forEach((k, v) -> { - if (v instanceof List) v = ((List) v).iterator(); - if (v instanceof Map) v = convert2OrderedMap((Map) v); - result.add((String) k, v); - }); + m.forEach( + (k, v) -> { + if (v instanceof List) v = ((List) v).iterator(); + if (v instanceof Map) v = convert2OrderedMap((Map) v); + result.add((String) k, v); + }); return result; - } public void testSimple() throws IOException { @@ -101,48 +100,46 @@ public void testSimple() throws IOException { l.add(Map.of("id", 3, "f", 1.0f, "s", "Some str 3")); l.add(Map.of("EOF", true, "RESPONSE_TIME", 206, "sleepMillis", 1000)); Iterator> iterator = l.iterator(); - TupleStream tupleStream = new TupleStream() { - @Override - public void setStreamContext(StreamContext context) { - - } - - @Override - public List children() { - return null; - } - - @Override - public void open() throws IOException { - } - - @Override - public void close() throws IOException { - } - - @Override - public Tuple read() throws IOException { - if (iterator.hasNext()) return new Tuple(iterator.next()); - else return null; - } - - @Override - public StreamComparator getStreamSort() { - return null; - } - - @Override - public Explanation toExplanation(StreamFactory factory) throws IOException { - return new StreamExplanation(getStreamNodeId().toString()) - .withFunctionName("Dummy") - .withImplementingClass(this.getClass().getName()) - .withExpressionType(Explanation.ExpressionType.STREAM_SOURCE) - .withExpression("--non-expressible--"); - } - }; + TupleStream tupleStream = + new TupleStream() { + @Override + public void setStreamContext(StreamContext context) {} + + @Override + public List children() { + return null; + } + + @Override + public void open() throws IOException {} + + @Override + public void close() throws IOException {} + + @Override + public Tuple read() throws IOException { + if (iterator.hasNext()) return new Tuple(iterator.next()); + else return null; + } + + @Override + public StreamComparator getStreamSort() { + return null; + } + + @Override + public Explanation toExplanation(StreamFactory factory) throws IOException { + return new StreamExplanation(getStreamNodeId().toString()) + .withFunctionName("Dummy") + .withImplementingClass(this.getClass().getName()) + .withExpressionType(Explanation.ExpressionType.STREAM_SOURCE) + .withExpression("--non-expressible--"); + } + }; byte[] bytes = serialize(tupleStream); - JavabinTupleStreamParser parser = new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), true); + JavabinTupleStreamParser parser = + new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), true); @SuppressWarnings({"rawtypes"}) Map m = parser.next(); assertEquals(1L, m.get("id")); @@ -174,7 +171,8 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { public void testSolrDocumentList() throws IOException { SolrQueryResponse response = new SolrQueryResponse(); SolrDocumentList l = constructSolrDocList(response); - try (JavaBinCodec jbc = new JavaBinCodec(); ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + try (JavaBinCodec jbc = new JavaBinCodec(); + ByteArrayOutputStream baos = new ByteArrayOutputStream()) { jbc.marshal(response.getValues(), baos); } byte[] bytes = serialize(response.getValues()); @@ -183,17 +181,18 @@ public void testSolrDocumentList() throws IOException { } List list = new ArrayList<>(); Map m = null; - try (JavabinTupleStreamParser parser = new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), false)) { + try (JavabinTupleStreamParser parser = + new JavabinTupleStreamParser(new ByteArrayInputStream(bytes), false)) { while ((m = parser.next()) != null) { list.add(m); } } assertEquals(l.size(), list.size()); - for(int i =0;i) list.get(i))); + for (int i = 0; i < list.size(); i++) { + compareSolrDocument(l.get(i), new SolrDocument((Map) list.get(i))); } - } + @SuppressWarnings({"unchecked"}) public static byte[] serialize(Object o) throws IOException { SolrQueryResponse response = new SolrQueryResponse(); diff --git a/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java index e3837438831..df370c26ce4 100644 --- a/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestPHPSerializedResponseWriter.java @@ -20,22 +20,18 @@ import java.io.StringWriter; import java.util.Arrays; import java.util.LinkedHashMap; - import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.request.SolrQueryRequest; import org.junit.BeforeClass; import org.junit.Test; -/** - * Basic PHPS tests based on JSONWriterTest - * - */ +/** Basic PHPS tests based on JSONWriterTest */ public class TestPHPSerializedResponseWriter extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } @Test @@ -49,15 +45,14 @@ public void testSimple() throws IOException { rsp.add("data2", 42); rsp.add("data3", true); w.write(buf, req, rsp); - assertEquals("a:3:{s:5:\"data1\";s:5:\"hello\";s:5:\"data2\";i:42;s:5:\"data3\";b:1;}", - buf.toString()); + assertEquals( + "a:3:{s:5:\"data1\";s:5:\"hello\";s:5:\"data2\";i:42;s:5:\"data3\";b:1;}", buf.toString()); req.close(); } - @Test public void testSolrDocuments() throws IOException { - SolrQueryRequest req = req("q","*:*"); + SolrQueryRequest req = req("q", "*:*"); SolrQueryResponse rsp = new SolrQueryResponse(); QueryResponseWriter w = new PHPSerializedResponseWriter(); StringWriter buf = new StringWriter(); @@ -65,38 +60,38 @@ public void testSolrDocuments() throws IOException { SolrDocument d = new SolrDocument(); SolrDocument d1 = d; - d.addField("id","1"); - d.addField("data1","hello"); - d.addField("data2",42); - d.addField("data3",true); + d.addField("id", "1"); + d.addField("data1", "hello"); + d.addField("data2", 42); + d.addField("data3", true); - // multivalued fields: + // multivalued fields: // extremely odd edge case: value is a map - // we use LinkedHashMap because we are doing a string comparison + // we use LinkedHashMap because we are doing a string comparison // later and we need predictible ordering - LinkedHashMap nl = new LinkedHashMap<>(); + LinkedHashMap nl = new LinkedHashMap<>(); nl.put("data4.1", "hashmap"); nl.put("data4.2", "hello"); - d.addField("data4",nl); - // array value - d.addField("data5",Arrays.asList("data5.1", "data5.2", "data5.3")); + d.addField("data4", nl); + // array value + d.addField("data5", Arrays.asList("data5.1", "data5.2", "data5.3")); // adding one more document to test array indexes d = new SolrDocument(); SolrDocument d2 = d; - d.addField("id","2"); + d.addField("id", "2"); SolrDocumentList sdl = new SolrDocumentList(); sdl.add(d1); sdl.add(d2); rsp.addResponse(sdl); - + w.write(buf, req, rsp); - assertEquals("a:1:{s:8:\"response\";a:4:{s:8:\"numFound\";i:0;s:5:\"start\";i:0;s:13:\"numFoundExact\";b:1;s:4:\"docs\";a:2:{i:0;a:6:{s:2:\"id\";s:1:\"1\";s:5:\"data1\";s:5:\"hello\";s:5:\"data2\";i:42;s:5:\"data3\";b:1;s:5:\"data4\";a:2:{s:7:\"data4.1\";s:7:\"hashmap\";s:7:\"data4.2\";s:5:\"hello\";}s:5:\"data5\";a:3:{i:0;s:7:\"data5.1\";i:1;s:7:\"data5.2\";i:2;s:7:\"data5.3\";}}i:1;a:1:{s:2:\"id\";s:1:\"2\";}}}}", - buf.toString()); + assertEquals( + "a:1:{s:8:\"response\";a:4:{s:8:\"numFound\";i:0;s:5:\"start\";i:0;s:13:\"numFoundExact\";b:1;s:4:\"docs\";a:2:{i:0;a:6:{s:2:\"id\";s:1:\"1\";s:5:\"data1\";s:5:\"hello\";s:5:\"data2\";i:42;s:5:\"data3\";b:1;s:5:\"data4\";a:2:{s:7:\"data4.1\";s:7:\"hashmap\";s:7:\"data4.2\";s:5:\"hello\";}s:5:\"data5\";a:3:{i:0;s:7:\"data5.1\";i:1;s:7:\"data5.2\";i:2;s:7:\"data5.3\";}}i:1;a:1:{s:2:\"id\";s:1:\"2\";}}}}", + buf.toString()); req.close(); } - } diff --git a/solr/core/src/test/org/apache/solr/response/TestPushWriter.java b/solr/core/src/test/org/apache/solr/response/TestPushWriter.java index 6281e0fee64..bbfb4500d85 100644 --- a/solr/core/src/test/org/apache/solr/response/TestPushWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestPushWriter.java @@ -17,6 +17,7 @@ package org.apache.solr.response; +import static java.util.Collections.singletonMap; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -24,7 +25,6 @@ import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.IteratorWriter; import org.apache.solr.common.MapWriter; @@ -38,19 +38,19 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Collections.singletonMap; - public class TestPushWriter extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - @SuppressWarnings({"unchecked"}) public void testStandardResponse() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Map m; try (OutputStreamWriter osw = new OutputStreamWriter(baos, StandardCharsets.UTF_8)) { - JSONWriter pw = new JSONWriter(osw, - new LocalSolrQueryRequest(null, new ModifiableSolrParams()), new SolrQueryResponse()); + JSONWriter pw = + new JSONWriter( + osw, + new LocalSolrQueryRequest(null, new ModifiableSolrParams()), + new SolrQueryResponse()); writeData(null, pw); osw.flush(); if (log.isInfoEnabled()) { @@ -60,7 +60,7 @@ public void testStandardResponse() throws IOException { checkValues(m); } - try (JavaBinCodec jbc = new JavaBinCodec(baos= new ByteArrayOutputStream(), null)) { + try (JavaBinCodec jbc = new JavaBinCodec(baos = new ByteArrayOutputStream(), null)) { writeData(jbc); m = (Map) Utils.fromJavabin(baos.toByteArray()); } @@ -70,55 +70,77 @@ public void testStandardResponse() throws IOException { public void testXmlWriter() throws Exception { try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); OutputStreamWriter osw = new OutputStreamWriter(baos, StandardCharsets.UTF_8)) { - XMLWriter xml = new XMLWriter(osw, - new LocalSolrQueryRequest(null, new ModifiableSolrParams()), new SolrQueryResponse()); + XMLWriter xml = + new XMLWriter( + osw, + new LocalSolrQueryRequest(null, new ModifiableSolrParams()), + new SolrQueryResponse()); writeData(null, xml); osw.flush(); if (log.isInfoEnabled()) { log.info("{}", new String(baos.toByteArray(), StandardCharsets.UTF_8)); } String response = new String(baos.toByteArray(), StandardCharsets.UTF_8); - BaseTestHarness.validateXPath(response, + BaseTestHarness.validateXPath( + response, "/lst/lst[@name='responseHeader']/int[@name='status'][.=1]", "/lst/lst[@name='response']/int[@name=numFound][.=10]", "/lst/lst[@name='response']/arr[@name='docs'][0]/lst/int[@name='id'][.=1]", "/lst/lst[@name='response']/arr[@name='docs'][1]/lst/int[@name='id'][.=2]", - "/lst/lst[@name='response']/arr[@name='docs'][2]/lst/int[@name='id'][.=3]" - ); + "/lst/lst[@name='response']/arr[@name='docs'][2]/lst/int[@name='id'][.=3]"); } } protected void checkValues(Map m) { - assertEquals(0, ((Number)Utils.getObjectByPath(m, true, "responseHeader/status")).intValue()); - assertEquals(10, ((Number)Utils.getObjectByPath(m, true, "response/numFound")).intValue()); - assertEquals(1, ((Number)Utils.getObjectByPath(m, true, "response/docs[0]/id")).intValue()); - assertEquals(2, ((Number)Utils.getObjectByPath(m, true, "response/docs[1]/id")).intValue()); - assertEquals(3, ((Number)Utils.getObjectByPath(m, true, "response/docs[2]/id")).intValue()); + assertEquals(0, ((Number) Utils.getObjectByPath(m, true, "responseHeader/status")).intValue()); + assertEquals(10, ((Number) Utils.getObjectByPath(m, true, "response/numFound")).intValue()); + assertEquals(1, ((Number) Utils.getObjectByPath(m, true, "response/docs[0]/id")).intValue()); + assertEquals(2, ((Number) Utils.getObjectByPath(m, true, "response/docs[1]/id")).intValue()); + assertEquals(3, ((Number) Utils.getObjectByPath(m, true, "response/docs[2]/id")).intValue()); } protected void writeData(PushWriter pw) throws IOException { - pw.writeMap(m -> { - m.put("responseHeader", singletonMap("status", 0)) - .put("response", (MapWriter) m1 -> { - m1.put("numFound", 10) - .put("docs", (IteratorWriter) w -> { - w.add((MapWriter) m3 -> m3.put("id", 1)) - .add(singletonMap("id", 2)) - .add(singletonMap("id", 3)); - }); }); }); + pw.writeMap( + m -> { + m.put("responseHeader", singletonMap("status", 0)) + .put( + "response", + (MapWriter) + m1 -> { + m1.put("numFound", 10) + .put( + "docs", + (IteratorWriter) + w -> { + w.add((MapWriter) m3 -> m3.put("id", 1)) + .add(singletonMap("id", 2)) + .add(singletonMap("id", 3)); + }); + }); + }); pw.close(); } protected void writeData(String name, TextWriter pw) throws IOException { - pw.writeMap(name, m -> { - m.put("responseHeader", singletonMap("status", 0)) - .put("response", (MapWriter) m1 -> { - m1.put("numFound", 10) - .put("docs", (IteratorWriter) w -> { - w.add((MapWriter) m3 -> m3.put("id", 1)) - .add(singletonMap("id", 2)) - .add(singletonMap("id", 3)); - }); }); }); + pw.writeMap( + name, + m -> { + m.put("responseHeader", singletonMap("status", 0)) + .put( + "response", + (MapWriter) + m1 -> { + m1.put("numFound", 10) + .put( + "docs", + (IteratorWriter) + w -> { + w.add((MapWriter) m3 -> m3.put("id", 1)) + .add(singletonMap("id", 2)) + .add(singletonMap("id", 3)); + }); + }); + }); pw.close(); } } diff --git a/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java b/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java index 79c350267ea..072fcbabe30 100644 --- a/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java +++ b/solr/core/src/test/org/apache/solr/response/TestRawResponseWriter.java @@ -21,21 +21,18 @@ import java.io.IOException; import java.io.StringWriter; import java.nio.charset.StandardCharsets; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.BinaryResponseParser; import org.apache.solr.common.util.ContentStreamBase.ByteArrayStream; import org.apache.solr.common.util.ContentStreamBase.StringStream; import org.apache.solr.common.util.NamedList; -import org.junit.BeforeClass; import org.junit.AfterClass; +import org.junit.BeforeClass; -/** - * Tests the {@link RawResponseWriter} behavior, in particular when dealing with "base" writer - */ +/** Tests the {@link RawResponseWriter} behavior, in particular when dealing with "base" writer */ public class TestRawResponseWriter extends SolrTestCaseJ4 { - + private static RawResponseWriter writerXmlBase; private static RawResponseWriter writerJsonBase; private static RawResponseWriter writerBinBase; @@ -49,16 +46,15 @@ public static void setupCoreAndWriters() throws Exception { // QueryResponseWriters' constructed programmatically, // but we do use this core for managing the life cycle of the requests // we spin up. - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); writerNoBase = newRawResponseWriter(null); /* defaults to standard writer as base */ writerXmlBase = newRawResponseWriter("xml"); writerJsonBase = newRawResponseWriter("json"); writerBinBase = newRawResponseWriter("javabin"); - allWriters = new RawResponseWriter[] { - writerXmlBase, writerJsonBase, writerBinBase, writerNoBase - }; + allWriters = + new RawResponseWriter[] {writerXmlBase, writerJsonBase, writerBinBase, writerNoBase}; } @AfterClass @@ -72,10 +68,10 @@ public static void cleanupWriters() throws Exception { } /** - * Regardless of base writer, the bytes in should be the same as the bytes out - * when response is a raw ContentStream written to an OutputStream + * Regardless of base writer, the bytes in should be the same as the bytes out when response is a + * raw ContentStream written to an OutputStream */ - public void testRawBinaryContentStream() throws IOException { + public void testRawBinaryContentStream() throws IOException { SolrQueryResponse rsp = new SolrQueryResponse(); byte[] data = new byte[TestUtil.nextInt(random(), 10, 2048)]; random().nextBytes(data); @@ -83,7 +79,7 @@ public void testRawBinaryContentStream() throws IOException { stream.setContentType(TestUtil.randomSimpleString(random())); rsp.add(RawResponseWriter.CONTENT, stream); - + for (RawResponseWriter writer : allWriters) { assertEquals(stream.getContentType(), writer.getContentType(req(), rsp)); ByteArrayOutputStream out = new ByteArrayOutputStream(); @@ -93,17 +89,17 @@ public void testRawBinaryContentStream() throws IOException { } /** - * Regardless of base writer, the String in should be the same as the String out - * when response is a raw ContentStream written to a Writer (or OutputStream) + * Regardless of base writer, the String in should be the same as the String out when response is + * a raw ContentStream written to a Writer (or OutputStream) */ - public void testRawStringContentStream() throws IOException { + public void testRawStringContentStream() throws IOException { SolrQueryResponse rsp = new SolrQueryResponse(); String data = TestUtil.randomUnicodeString(random()); StringStream stream = new StringStream(data); stream.setContentType(TestUtil.randomSimpleString(random())); rsp.add(RawResponseWriter.CONTENT, stream); - + for (RawResponseWriter writer : allWriters) { assertEquals(stream.getContentType(), writer.getContentType(req(), rsp)); @@ -119,9 +115,7 @@ public void testRawStringContentStream() throws IOException { } } - /** - * When no real ContentStream is specified, each base writer should be used for formatting - */ + /** When no real ContentStream is specified, each base writer should be used for formatting */ public void testStructuredDataViaBaseWriters() throws IOException { SolrQueryResponse rsp = new SolrQueryResponse(); // Don't send a ContentStream back, this will fall back to the configured base writer. @@ -129,21 +123,22 @@ public void testStructuredDataViaBaseWriters() throws IOException { rsp.add(RawResponseWriter.CONTENT, "test"); rsp.add("foo", "bar"); - // check Content-Type against each writer + // check Content-Type against each writer assertEquals("application/xml; charset=UTF-8", writerNoBase.getContentType(req(), rsp)); assertEquals("application/xml; charset=UTF-8", writerXmlBase.getContentType(req(), rsp)); assertEquals("application/json; charset=UTF-8", writerJsonBase.getContentType(req(), rsp)); - assertEquals("application/octet-stream", writerBinBase.getContentType(req(), rsp)); + assertEquals("application/octet-stream", writerBinBase.getContentType(req(), rsp)); // check response against each writer // xml & none (default behavior same as XML) - String xml = "\n" + - "\n" + - "\n" + - "test\n" + - "bar\n" + - "\n"; + String xml = + "\n" + + "\n" + + "\n" + + "test\n" + + "bar\n" + + "\n"; StringWriter xmlSout = new StringWriter(); writerXmlBase.write(xmlSout, req(), rsp); assertEquals(xml, xmlSout.toString()); @@ -159,9 +154,7 @@ public void testStructuredDataViaBaseWriters() throws IOException { assertEquals(xml, noneBout.toString(StandardCharsets.UTF_8.toString())); // json - String json = "{\n" + - " \"content\":\"test\",\n" + - " \"foo\":\"bar\"}\n"; + String json = "{\n" + " \"content\":\"test\",\n" + " \"foo\":\"bar\"}\n"; StringWriter jsonSout = new StringWriter(); writerJsonBase.write(jsonSout, req(), rsp); assertEquals(json, jsonSout.toString()); @@ -173,19 +166,19 @@ public void testStructuredDataViaBaseWriters() throws IOException { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); writerBinBase.write(bytes, req(), rsp); BinaryResponseParser parser = new BinaryResponseParser(); - NamedList out = parser.processResponse - (new ByteArrayInputStream(bytes.toByteArray()), /* encoding irrelevant */ null); + NamedList out = + parser.processResponse( + new ByteArrayInputStream(bytes.toByteArray()), /* encoding irrelevant */ null); assertEquals(RawResponseWriter.CONTENT, out.getName(0)); assertEquals("test", out.getVal(0)); assertEquals("foo", out.getName(1)); assertEquals("bar", out.getVal(1)); - } /** - * Generates a new {@link RawResponseWriter} wrapping the specified baseWriter name - * (which much either be an implicitly defined response writer, or one explicitly - * configured in solrconfig.xml) + * Generates a new {@link RawResponseWriter} wrapping the specified baseWriter name (which much + * either be an implicitly defined response writer, or one explicitly configured in + * solrconfig.xml) * * @param baseWriter null or the name of a valid base writer */ @@ -198,5 +191,4 @@ private static RawResponseWriter newRawResponseWriter(String baseWriter) { writer.init(initArgs); return writer; } - } diff --git a/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java b/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java index 28fa1c2f03f..bcae13a38de 100644 --- a/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java +++ b/solr/core/src/test/org/apache/solr/response/TestRawTransformer.java @@ -16,6 +16,15 @@ */ package org.apache.solr.response; +import java.io.File; +import java.lang.invoke.MethodHandles; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Properties; +import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -33,20 +42,10 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; -import java.lang.invoke.MethodHandles; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Properties; -import java.util.regex.Pattern; - /** * Tests Raw JSON output for fields when used with and without the unique key field. * - * See SOLR-7993 + *

See SOLR-7993 */ public class TestRawTransformer extends SolrCloudTestCase { @@ -54,6 +53,7 @@ public class TestRawTransformer extends SolrCloudTestCase { /** A basic client for operations at the cloud level, default collection will be set */ private static JettySolrRunner JSR; + private static HttpSolrClient CLIENT; @BeforeClass @@ -76,14 +76,20 @@ private static void initStandalone() throws Exception { final File collDir = new File(homeDir, "collection1"); final File confDir = collDir.toPath().resolve("conf").toFile(); confDir.mkdirs(); - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(homeDir, "solr.xml")); + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(homeDir, "solr.xml")); String src_dir = TEST_HOME() + "/collection1/conf"; - FileUtils.copyFile(new File(src_dir, "schema_latest.xml"), - new File(confDir, "schema.xml")); - FileUtils.copyFile(new File(src_dir, "solrconfig-minimal.xml"), - new File(confDir, "solrconfig.xml")); - for (String file : new String[] {"solrconfig.snippet.randomindexconfig.xml", - "stopwords.txt", "synonyms.txt", "protwords.txt", "currency.xml"}) { + FileUtils.copyFile(new File(src_dir, "schema_latest.xml"), new File(confDir, "schema.xml")); + FileUtils.copyFile( + new File(src_dir, "solrconfig-minimal.xml"), new File(confDir, "solrconfig.xml")); + for (String file : + new String[] { + "solrconfig.snippet.randomindexconfig.xml", + "stopwords.txt", + "synonyms.txt", + "protwords.txt", + "currency.xml" + }) { FileUtils.copyFile(new File(src_dir, file), new File(confDir, file)); } Files.createFile(collDir.toPath().resolve("core.properties")); @@ -97,22 +103,23 @@ private static void initCloud() throws Exception { final Path configDir = Paths.get(TEST_HOME(), "collection1", "conf"); final int numNodes = 3; - MiniSolrCloudCluster cloud = configureCluster(numNodes).addConfig(configName, configDir).configure(); + MiniSolrCloudCluster cloud = + configureCluster(numNodes).addConfig(configName, configDir).configure(); Map collectionProperties = new LinkedHashMap<>(); collectionProperties.put("config", "solrconfig-minimal.xml"); collectionProperties.put("schema", "schema_latest.xml"); CloudSolrClient cloudSolrClient = cloud.getSolrClient(); CollectionAdminRequest.createCollection("collection1", configName, numNodes, 1) - .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .setProperties(collectionProperties) - .process(cloudSolrClient); + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) + .setProperties(collectionProperties) + .process(cloudSolrClient); JSR = cloud.getRandomJetty(random()); } @AfterClass - private static void afterClass() throws Exception{ + private static void afterClass() throws Exception { if (JSR != null) { JSR.stop(); } @@ -128,6 +135,7 @@ public void cleanup() throws Exception { } private static final int MAX = 10; + private static void initIndex() throws Exception { // Build a simple index // TODO: why are we indexing 10 docs here? Wouldn't one suffice? @@ -135,8 +143,12 @@ private static void initIndex() throws Exception { SolrInputDocument sdoc = new SolrInputDocument(); sdoc.addField("id", i); // below are single-valued fields - sdoc.addField("subject", "{poffL:[{offL:[{oGUID:\"79D5A31D-B3E4-4667-B812-09DF4336B900\",oID:\"OO73XRX\",prmryO:1,oRank:1,addTp:\"Office\",addCd:\"AA4GJ5T\",ad1:\"102 S 3rd St Ste 100\",city:\"Carson City\",st:\"MI\",zip:\"48811\",lat:43.176885,lng:-84.842919,phL:[\"(989) 584-1308\"],faxL:[\"(989) 584-6453\"]}]}]}"); - sdoc.addField("author", "sometrivialxml"); + sdoc.addField( + "subject", + "{poffL:[{offL:[{oGUID:\"79D5A31D-B3E4-4667-B812-09DF4336B900\",oID:\"OO73XRX\",prmryO:1,oRank:1,addTp:\"Office\",addCd:\"AA4GJ5T\",ad1:\"102 S 3rd St Ste 100\",city:\"Carson City\",st:\"MI\",zip:\"48811\",lat:43.176885,lng:-84.842919,phL:[\"(989) 584-1308\"],faxL:[\"(989) 584-6453\"]}]}]}"); + sdoc.addField( + "author", + "sometrivialxml"); // below are multiValued fields sdoc.addField("links", "{an_array:[1,2,3]}"); sdoc.addField("links", "{an_array:[4,5,6]}"); @@ -145,72 +157,130 @@ private static void initIndex() throws Exception { CLIENT.add("collection1", sdoc); } CLIENT.commit("collection1"); - assertEquals(MAX, CLIENT.query("collection1", new ModifiableSolrParams(Map.of("q", new String[]{"*:*"}))).getResults().getNumFound()); + assertEquals( + MAX, + CLIENT + .query("collection1", new ModifiableSolrParams(Map.of("q", new String[] {"*:*"}))) + .getResults() + .getNumFound()); } @Test public void testXmlTransformer() throws Exception { - QueryRequest req = new QueryRequest(new ModifiableSolrParams( - Map.of("q", new String[]{"*:*"}, "fl", new String[]{"author:[xml],content_type:[xml]"}, "wt", new String[]{"xml"}) - )); + QueryRequest req = + new QueryRequest( + new ModifiableSolrParams( + Map.of( + "q", + new String[] {"*:*"}, + "fl", + new String[] {"author:[xml],content_type:[xml]"}, + "wt", + new String[] {"xml"}))); req.setResponseParser(XML_NOOP_RESPONSE_PARSER); - String strResponse = (String) CLIENT.request(req,"collection1").get("response"); - assertTrue("response does not contain raw XML encoding: " + strResponse, - strResponse.contains("sometrivialxml")); - assertTrue("response (multiValued) does not contain raw XML encoding: " + strResponse, - Pattern.compile("\\s*one\\s*two\\s*").matcher(strResponse).find()); - - req = new QueryRequest(new ModifiableSolrParams( - Map.of("q", new String[]{"*:*"}, "fl", new String[]{"author,content_type"}, "wt", new String[]{"xml"}) - )); + String strResponse = (String) CLIENT.request(req, "collection1").get("response"); + assertTrue( + "response does not contain raw XML encoding: " + strResponse, + strResponse.contains( + "sometrivialxml")); + assertTrue( + "response (multiValued) does not contain raw XML encoding: " + strResponse, + Pattern.compile( + "\\s*one\\s*two\\s*") + .matcher(strResponse) + .find()); + + req = + new QueryRequest( + new ModifiableSolrParams( + Map.of( + "q", + new String[] {"*:*"}, + "fl", + new String[] {"author,content_type"}, + "wt", + new String[] {"xml"}))); req.setResponseParser(XML_NOOP_RESPONSE_PARSER); strResponse = (String) CLIENT.request(req, "collection1").get("response"); - assertTrue("response does not contain escaped XML encoding: " + strResponse, - strResponse.contains("<root><child1")); - assertTrue("response (multiValued) does not contain escaped XML encoding: " + strResponse, - Pattern.compile("\\s*<root>").matcher(strResponse).find()); - - req = new QueryRequest(new ModifiableSolrParams( - Map.of("q", new String[]{"*:*"}, "fl", new String[]{"author:[xml],content_type:[xml]"}, "wt", new String[]{"json"}) - )); + assertTrue( + "response does not contain escaped XML encoding: " + strResponse, + strResponse.contains("<root><child1")); + assertTrue( + "response (multiValued) does not contain escaped XML encoding: " + strResponse, + Pattern.compile("\\s*<root>") + .matcher(strResponse) + .find()); + + req = + new QueryRequest( + new ModifiableSolrParams( + Map.of( + "q", + new String[] {"*:*"}, + "fl", + new String[] {"author:[xml],content_type:[xml]"}, + "wt", + new String[] {"json"}))); req.setResponseParser(JSON_NOOP_RESPONSE_PARSER); strResponse = (String) CLIENT.request(req, "collection1").get("response"); - assertTrue("unexpected serialization of XML field value in JSON response: " + strResponse, - strResponse.contains("\"author\":\"some")); - assertTrue("unexpected (multiValued) serialization of XML field value in JSON response: " + strResponse, - strResponse.contains("\"content_type\":[\"one")); + assertTrue( + "unexpected serialization of XML field value in JSON response: " + strResponse, + strResponse.contains("\"author\":\"some")); + assertTrue( + "unexpected (multiValued) serialization of XML field value in JSON response: " + + strResponse, + strResponse.contains("\"content_type\":[\"one")); } @Test public void testJsonTransformer() throws Exception { - QueryRequest req = new QueryRequest(new ModifiableSolrParams( - Map.of("q", new String[]{"*:*"}, "fl", new String[]{"subject:[json],links:[json]"}, "wt", new String[]{"json"}) - )); + QueryRequest req = + new QueryRequest( + new ModifiableSolrParams( + Map.of( + "q", + new String[] {"*:*"}, + "fl", + new String[] {"subject:[json],links:[json]"}, + "wt", + new String[] {"json"}))); req.setResponseParser(JSON_NOOP_RESPONSE_PARSER); - String strResponse = (String) CLIENT.request(req,"collection1").get("response"); - assertTrue("response does not contain right JSON encoding: " + strResponse, + String strResponse = (String) CLIENT.request(req, "collection1").get("response"); + assertTrue( + "response does not contain right JSON encoding: " + strResponse, strResponse.contains("\"subject\":{poffL:[{offL:[{oGUID:\"7")); - assertTrue("response (multiValued) does not contain right JSON encoding: " + strResponse, - Pattern.compile("\"links\":\\[\\{an_array:\\[1,2,3]},\\s*\\{an_array:\\[4,5,6]}]").matcher(strResponse).find()); + assertTrue( + "response (multiValued) does not contain right JSON encoding: " + strResponse, + Pattern.compile("\"links\":\\[\\{an_array:\\[1,2,3]},\\s*\\{an_array:\\[4,5,6]}]") + .matcher(strResponse) + .find()); - req = new QueryRequest(new ModifiableSolrParams( - Map.of("q", new String[]{"*:*"}, "fl", new String[]{"id", "subject,links"}, "wt", new String[]{"json"}) - )); + req = + new QueryRequest( + new ModifiableSolrParams( + Map.of( + "q", + new String[] {"*:*"}, + "fl", + new String[] {"id", "subject,links"}, + "wt", + new String[] {"json"}))); req.setResponseParser(JSON_NOOP_RESPONSE_PARSER); strResponse = (String) CLIENT.request(req, "collection1").get("response"); - assertTrue("response does not contain right JSON encoding: " + strResponse, + assertTrue( + "response does not contain right JSON encoding: " + strResponse, strResponse.contains("subject\":\"")); - assertTrue("response (multiValued) does not contain right JSON encoding: " + strResponse, - strResponse.contains("\"links\":[\"")); + assertTrue( + "response (multiValued) does not contain right JSON encoding: " + strResponse, + strResponse.contains("\"links\":[\"")); } private static final NoOpResponseParser XML_NOOP_RESPONSE_PARSER = new NoOpResponseParser(); - private static final NoOpResponseParser JSON_NOOP_RESPONSE_PARSER = new NoOpResponseParser() { - @Override - public String getWriterType() { - return "json"; - } - }; - + private static final NoOpResponseParser JSON_NOOP_RESPONSE_PARSER = + new NoOpResponseParser() { + @Override + public String getWriterType() { + return "json"; + } + }; } - diff --git a/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java b/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java index f63c76baa18..f7d81b5c0af 100644 --- a/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java +++ b/solr/core/src/test/org/apache/solr/response/TestRetrieveFieldsOptimizer.java @@ -17,6 +17,13 @@ package org.apache.solr.response; +import static junit.framework.Assert.fail; +import static org.apache.lucene.util.LuceneTestCase.random; +import static org.apache.solr.search.SolrReturnFields.FIELD_SOURCES.ALL_FROM_DV; +import static org.apache.solr.search.SolrReturnFields.FIELD_SOURCES.ALL_FROM_STORED; +import static org.apache.solr.search.SolrReturnFields.FIELD_SOURCES.MIXED_SOURCES; + +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -35,8 +42,6 @@ import java.util.TreeSet; import java.util.stream.Collectors; import java.util.stream.Stream; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.lucene.index.FieldInfo; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; @@ -62,22 +67,16 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import static junit.framework.Assert.fail; -import static org.apache.lucene.util.LuceneTestCase.random; -import static org.apache.solr.search.SolrReturnFields.FIELD_SOURCES.ALL_FROM_STORED; -import static org.apache.solr.search.SolrReturnFields.FIELD_SOURCES.MIXED_SOURCES; -import static org.apache.solr.search.SolrReturnFields.FIELD_SOURCES.ALL_FROM_DV; - public class TestRetrieveFieldsOptimizer extends SolrTestCaseJ4 { - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); @BeforeClass public static void initManagedSchemaCore() throws Exception { // This testing approach means no schema file or per-test temp solr-home! System.setProperty("managed.schema.mutable", "true"); - System.setProperty("managed.schema.resourceName", "schema-one-field-no-dynamic-field-unique-key.xml"); + System.setProperty( + "managed.schema.resourceName", "schema-one-field-no-dynamic-field-unique-key.xml"); System.setProperty("enable.update.log", "false"); initCore("solrconfig-managed-schema.xml", "ignoredSchemaName"); @@ -105,7 +104,7 @@ public static void initManagedSchemaCore() throws Exception { static FieldHolder fieldsHolder = new FieldHolder(); static Map>> allFieldValuesInput = new HashMap<>(); - //TODO, how to generalize? + // TODO, how to generalize? @SuppressWarnings({"unchecked"}) private static void setupAllFields() throws IOException { @@ -117,10 +116,12 @@ private static void setupAllFields() throws IOException { // We need our special id fields to find the docs later. typesHolder.addFieldType(schema, idNotStoredDv, RetrieveFieldType.TEST_TYPE.STRING); - fieldsToAdd.put(idNotStoredDv, map("stored", "false", "docValues", "true", "multiValued", "false")); + fieldsToAdd.put( + idNotStoredDv, map("stored", "false", "docValues", "true", "multiValued", "false")); typesHolder.addFieldType(schema, idStoredNotDv, RetrieveFieldType.TEST_TYPE.STRING); - fieldsToAdd.put(idStoredNotDv, map("stored", "true", "docValues", "false", "multiValued", "false")); + fieldsToAdd.put( + idStoredNotDv, map("stored", "true", "docValues", "false", "multiValued", "false")); for (RetrieveFieldType.TEST_TYPE type : RetrieveFieldType.solrClassMap.keySet()) { // We happen to be naming the fields and types identically. @@ -163,21 +164,23 @@ private static void setupAllFields() throws IOException { addDocWithAllFields(idx); } assertU(commit()); - // Now we need to massage the expected values returned based on the docValues type 'cause it's weird. + // Now we need to massage the expected values returned based on the docValues type 'cause it's + // weird. final RefCounted refCounted = h.getCore().getNewestSearcher(true); try { - //static Map>> + // static Map>> for (Map> docFieldsEnt : allFieldValuesInput.values()) { for (Map.Entry> oneField : docFieldsEnt.entrySet()) { RetrieveField field = fieldsHolder.getTestField(oneField.getKey()); - field.expectedValsAsStrings(refCounted.get().getSlowAtomicReader().getFieldInfos().fieldInfo(field.name), + field.expectedValsAsStrings( + refCounted.get().getSlowAtomicReader().getFieldInfos().fieldInfo(field.name), oneField.getValue()); } } } finally { refCounted.decref(); } - } + } static void addDocWithAllFields(int idx) { @@ -214,18 +217,19 @@ public void testDocFetcher() throws Exception { Thread threads[] = new Thread[numThreads]; for (int idx = 0; idx < numThreads; idx++) { - threads[idx] = new Thread() { - @Override - public void run() { - try { - checkFetchSources(ALL_FROM_DV); - checkFetchSources(ALL_FROM_STORED); - checkFetchSources(MIXED_SOURCES); - } catch (Exception e) { - fail("Failed with exception " + e.getMessage()); - } - } - }; + threads[idx] = + new Thread() { + @Override + public void run() { + try { + checkFetchSources(ALL_FROM_DV); + checkFetchSources(ALL_FROM_STORED); + checkFetchSources(MIXED_SOURCES); + } catch (Exception e) { + fail("Failed with exception " + e.getMessage()); + } + } + }; threads[idx].start(); } for (int idx = 0; idx < numThreads; idx++) { @@ -235,9 +239,10 @@ public void run() { @SuppressWarnings({"unchecked", "rawtypes"}) private void checkFetchSources(SolrReturnFields.FIELD_SOURCES source) throws Exception { - String flAll = fieldsHolder.allFields.stream() - .map(RetrieveField::getName) // This will call testField.getName() - .collect(Collectors.joining(",")); + String flAll = + fieldsHolder.allFields.stream() + .map(RetrieveField::getName) // This will call testField.getName() + .collect(Collectors.joining(",")); List toCheck = new ArrayList<>(); String idField = idNotStoredDv + ","; @@ -259,11 +264,13 @@ private void checkFetchSources(SolrReturnFields.FIELD_SOURCES source) throws Exc // MultiValued fields are _always_ read from stored data. toCheck.removeAll(fieldsHolder.multiValuedFields); - // At this point, toCheck should be only singleValued fields. Adding in even a single multiValued field should - // read stuff from stored. - String fl = idField + toCheck.stream() - .map(RetrieveField::getName) // This will call testField.getName() - .collect(Collectors.joining(",")); + // At this point, toCheck should be only singleValued fields. Adding in even a single + // multiValued field should read stuff from stored. + String fl = + idField + + toCheck.stream() + .map(RetrieveField::getName) // This will call testField.getName() + .collect(Collectors.joining(",")); // Even a single multiValued and stored field should cause stored fields to be visited. @@ -281,11 +288,15 @@ private void checkFetchSources(SolrReturnFields.FIELD_SOURCES source) throws Exc case 2: List toCheckPlusMv = new ArrayList<>(toCheck); - toCheckPlusMv.add(fieldsHolder.storedMvFields.get(random().nextInt(fieldsHolder.storedMvFields.size()))); - - String flWithMv = idField + toCheckPlusMv.stream() - .map(RetrieveField::getName) // This will call testField.getName() - .collect(Collectors.joining(",")); + toCheckPlusMv.add( + fieldsHolder.storedMvFields.get( + random().nextInt(fieldsHolder.storedMvFields.size()))); + + String flWithMv = + idField + + toCheckPlusMv.stream() + .map(RetrieveField::getName) // This will call testField.getName() + .collect(Collectors.joining(",")); if (source == ALL_FROM_STORED) { check(flWithMv, ALL_FROM_STORED); } else { @@ -302,7 +313,8 @@ private void checkFetchSources(SolrReturnFields.FIELD_SOURCES source) throws Exc // 1> we got all the values from the place we expected. // 2> all the values we expect are actually returned. // - // NOTE: multiValued fields are _NOT_ fetched from docValues by design so we don't have to worry about set semantics + // NOTE: multiValued fields are _NOT_ fetched from docValues by design so we don't have to worry + // about set semantics // private void check(String flIn, SolrReturnFields.FIELD_SOURCES source) throws Exception { Set setDedupe = new HashSet<>(Arrays.asList(flIn.split(","))); @@ -313,13 +325,16 @@ private void check(String flIn, SolrReturnFields.FIELD_SOURCES source) throws Ex SolrQueryRequest req = lrf.makeRequest("q", "*:*", CommonParams.FL, fl); SolrQueryResponse rsp = h.queryAndResponse("", req); - BinaryQueryResponseWriter writer = (BinaryQueryResponseWriter) core.getQueryResponseWriter("javabin"); + BinaryQueryResponseWriter writer = + (BinaryQueryResponseWriter) core.getQueryResponseWriter("javabin"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); writer.write(baos, req, rsp); // This is really the main point! - assertEquals("We didn't get the values from the expected places! ", - source, ((SolrReturnFields) rsp.returnFields).getFieldSources()); + assertEquals( + "We didn't get the values from the expected places! ", + source, + ((SolrReturnFields) rsp.returnFields).getFieldSources()); @SuppressWarnings({"rawtypes"}) NamedList res; @@ -329,10 +344,13 @@ private void check(String flIn, SolrReturnFields.FIELD_SOURCES source) throws Ex SolrDocumentList docs = (SolrDocumentList) res.get("response"); for (Object doc : docs) { SolrDocument sdoc = (SolrDocument) doc; - // Check that every (and only) the fields in the fl param were fetched and the values are as expected. - // Since each doc has the same fields, we don't need to find the special doc. + // Check that every (and only) the fields in the fl param were fetched and the values are as + // expected. Since each doc has the same fields, we don't need to find the special doc. String[] requestedFields = fl.split(","); - assertEquals("Should have exactly as many fields as requested, ", sdoc.getFieldNames().size(), requestedFields.length); + assertEquals( + "Should have exactly as many fields as requested, ", + sdoc.getFieldNames().size(), + requestedFields.length); String id = (String) sdoc.get(idNotStoredDv); if (id == null) { @@ -343,12 +361,17 @@ private void check(String flIn, SolrReturnFields.FIELD_SOURCES source) throws Ex Object[] docVals = sdoc.getFieldValues(field).toArray(); RetrieveField testField = fieldsHolder.getTestField(field); List expectedVals = expected.get(field); - assertEquals("Returned fields should have the expected number of entries", docVals.length, expectedVals.size()); + assertEquals( + "Returned fields should have the expected number of entries", + docVals.length, + expectedVals.size()); for (int idx = 0; idx < docVals.length; ++idx) { - assertEquals("Values should be identical and exactly in order. ", expectedVals.get(idx), testField.getValAsString(docVals[idx])); + assertEquals( + "Values should be identical and exactly in order. ", + expectedVals.get(idx), + testField.getValAsString(docVals[idx])); } } - } req.close(); } @@ -382,25 +405,36 @@ class RetrieveFieldType { final String solrTypeClass; static enum TEST_TYPE { - TINT, TLONG, TFLOAT, TDOUBLE, TDATE, - PINT, PLONG, PFLOAT, PDOUBLE, PDATE, - STRING, BOOL + TINT, + TLONG, + TFLOAT, + TDOUBLE, + TDATE, + PINT, + PLONG, + PFLOAT, + PDOUBLE, + PDATE, + STRING, + BOOL } - static final Map solrClassMap = Collections.unmodifiableMap(Stream.of( - new SimpleEntry<>(TEST_TYPE.TINT, "solr.TrieIntField"), - new SimpleEntry<>(TEST_TYPE.TLONG, "solr.TrieLongField"), - new SimpleEntry<>(TEST_TYPE.TFLOAT, "solr.TrieFloatField"), - new SimpleEntry<>(TEST_TYPE.TDOUBLE, "solr.TrieDoubleField"), - new SimpleEntry<>(TEST_TYPE.TDATE, "solr.TrieDateField"), - new SimpleEntry<>(TEST_TYPE.PINT, "solr.IntPointField"), - new SimpleEntry<>(TEST_TYPE.PLONG, "solr.LongPointField"), - new SimpleEntry<>(TEST_TYPE.PFLOAT, "solr.FloatPointField"), - new SimpleEntry<>(TEST_TYPE.PDOUBLE, "solr.DoublePointField"), - new SimpleEntry<>(TEST_TYPE.PDATE, "solr.DatePointField"), - new SimpleEntry<>(TEST_TYPE.STRING, "solr.StrField"), - new SimpleEntry<>(TEST_TYPE.BOOL, "solr.BoolField")) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + static final Map solrClassMap = + Collections.unmodifiableMap( + Stream.of( + new SimpleEntry<>(TEST_TYPE.TINT, "solr.TrieIntField"), + new SimpleEntry<>(TEST_TYPE.TLONG, "solr.TrieLongField"), + new SimpleEntry<>(TEST_TYPE.TFLOAT, "solr.TrieFloatField"), + new SimpleEntry<>(TEST_TYPE.TDOUBLE, "solr.TrieDoubleField"), + new SimpleEntry<>(TEST_TYPE.TDATE, "solr.TrieDateField"), + new SimpleEntry<>(TEST_TYPE.PINT, "solr.IntPointField"), + new SimpleEntry<>(TEST_TYPE.PLONG, "solr.LongPointField"), + new SimpleEntry<>(TEST_TYPE.PFLOAT, "solr.FloatPointField"), + new SimpleEntry<>(TEST_TYPE.PDOUBLE, "solr.DoublePointField"), + new SimpleEntry<>(TEST_TYPE.PDATE, "solr.DatePointField"), + new SimpleEntry<>(TEST_TYPE.STRING, "solr.StrField"), + new SimpleEntry<>(TEST_TYPE.BOOL, "solr.BoolField")) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); RetrieveFieldType(IndexSchema schema, String name, TEST_TYPE type) { this.name = name; @@ -484,14 +518,13 @@ class RetrieveField { this.type = type; this.schemaField = schema.newField(name, type, opts); this.testFieldType = TestRetrieveFieldsOptimizer.typesHolder.getTestType(type); - } String getValAsString(Object val) { FieldType fieldType = schemaField.getType(); - //Why do mutliValued date fields get here as Strings whereas single-valued fields are Dates? + // Why do mutliValued date fields get here as Strings whereas single-valued fields are Dates? // Why do BoolFields sometimes get here as "F" or "T"? if (val instanceof String) { if (fieldType instanceof TrieDateField || fieldType instanceof DatePointField) { @@ -587,11 +620,11 @@ List getValsForField() { fail("Found no case for field " + name + " type " + type); break; } - // There are tricky cases with multiValued fields that are sometimes fetched from docValues that obey set - // semantics so be sure we include at least one duplicate in a multValued field sometimes + // There are tricky cases with multiValued fields that are sometimes fetched from docValues that + // obey set semantics so be sure we include at least one duplicate in a multValued field + // sometimes if (random().nextBoolean() && valsAsStrings.size() > 1) { valsAsStrings.add(valsAsStrings.get(random().nextInt(valsAsStrings.size()))); - } return valsAsStrings; @@ -599,7 +632,7 @@ List getValsForField() { void expectedValsAsStrings(final FieldInfo info, List valsAsStrings) { if (schemaField.stored() || schemaField.multiValued() == false) { - return ; + return; } switch (info.getDocValuesType()) { @@ -623,7 +656,6 @@ void expectedValsAsStrings(final FieldInfo info, List valsAsStrings) { switch (testFieldType.getSolrTypeClass()) { case "solr.TrieIntField": case "solr.TrieLongField": - Collections.sort(valsAsStrings, Comparator.comparingInt(Integer::parseInt)); break; case "solr.IntPointField": @@ -651,4 +683,3 @@ void expectedValsAsStrings(final FieldInfo info, List valsAsStrings) { } } } - diff --git a/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java b/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java index f658158e393..5a34f3b7875 100644 --- a/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java +++ b/solr/core/src/test/org/apache/solr/response/TestSolrQueryResponse.java @@ -21,7 +21,6 @@ import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; - import org.apache.solr.SolrTestCase; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; @@ -30,7 +29,7 @@ import org.junit.Test; public class TestSolrQueryResponse extends SolrTestCase { - + @Test public void testName() throws Exception { assertEquals("SolrQueryResponse.NAME value changed", "response", SolrQueryResponse.NAME); @@ -38,14 +37,18 @@ public void testName() throws Exception { @Test public void testResponseHeaderPartialResults() throws Exception { - assertEquals("SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY value changed", - "partialResults", SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY); + assertEquals( + "SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY value changed", + "partialResults", + SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY); } @Test public void testResponseHeaderSegmentTerminatedEarly() throws Exception { - assertEquals("SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY value changed", - "segmentTerminatedEarly", SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY); + assertEquals( + "SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY value changed", + "segmentTerminatedEarly", + SolrQueryResponse.RESPONSE_HEADER_SEGMENT_TERMINATED_EARLY_KEY); } @Test @@ -60,13 +63,13 @@ public void testValues() throws Exception { response.add("key2", "value2"); { @SuppressWarnings({"unchecked"}) - final Iterator> it = response.getValues().iterator(); + final Iterator> it = response.getValues().iterator(); assertTrue(it.hasNext()); - final Map.Entry entry1 = it.next(); + final Map.Entry entry1 = it.next(); assertEquals("key1", entry1.getKey()); assertEquals("value1", entry1.getValue()); assertTrue(it.hasNext()); - final Map.Entry entry2 = it.next(); + final Map.Entry entry2 = it.next(); assertEquals("key2", entry2.getKey()); assertEquals("value2", entry2.getValue()); assertFalse(it.hasNext()); @@ -77,8 +80,8 @@ public void testValues() throws Exception { public void testResponse() throws Exception { final SolrQueryResponse response = new SolrQueryResponse(); assertEquals("response initial value", null, response.getResponse()); - final Object newValue = (random().nextBoolean() - ? (random().nextBoolean() ? "answer" : Integer.valueOf(42)) : null); + final Object newValue = + (random().nextBoolean() ? (random().nextBoolean() ? "answer" : Integer.valueOf(42)) : null); response.addResponse(newValue); assertEquals("response new value", newValue, response.getResponse()); } @@ -91,9 +94,9 @@ public void testToLog() throws Exception { // initially empty, then add something response.addToLog("key1", "value1"); { - final Iterator> it = response.getToLog().iterator(); + final Iterator> it = response.getToLog().iterator(); assertTrue(it.hasNext()); - final Map.Entry entry1 = it.next(); + final Map.Entry entry1 = it.next(); assertEquals("key1", entry1.getKey()); assertEquals("value1", entry1.getValue()); assertFalse(it.hasNext()); @@ -103,13 +106,13 @@ public void testToLog() throws Exception { // and then add something else response.addToLog("key2", "value2"); { - final Iterator> it = response.getToLog().iterator(); + final Iterator> it = response.getToLog().iterator(); assertTrue(it.hasNext()); - final Map.Entry entry1 = it.next(); + final Map.Entry entry1 = it.next(); assertEquals("key1", entry1.getKey()); assertEquals("value1", entry1.getValue()); assertTrue(it.hasNext()); - final Map.Entry entry2 = it.next(); + final Map.Entry entry2 = it.next(); assertEquals("key2", entry2.getKey()); assertEquals("value2", entry2.getValue()); assertFalse(it.hasNext()); @@ -122,11 +125,15 @@ public void testToLog() throws Exception { public void testReturnFields() throws Exception { final SolrQueryResponse response = new SolrQueryResponse(); final ReturnFields defaultReturnFields = new SolrReturnFields(); - assertEquals("returnFields initial value", defaultReturnFields.toString(), response.getReturnFields().toString()); - final SolrReturnFields newValue = new SolrReturnFields((random().nextBoolean() - ? SolrReturnFields.SCORE : "value"), null); + assertEquals( + "returnFields initial value", + defaultReturnFields.toString(), + response.getReturnFields().toString()); + final SolrReturnFields newValue = + new SolrReturnFields((random().nextBoolean() ? SolrReturnFields.SCORE : "value"), null); response.setReturnFields(newValue); - assertEquals("returnFields new value", newValue.toString(), response.getReturnFields().toString()); + assertEquals( + "returnFields new value", newValue.toString(), response.getReturnFields().toString()); } @Test @@ -134,7 +141,7 @@ public void testAddHttpHeader() { SolrQueryResponse response = new SolrQueryResponse(); Iterator> it = response.httpHeaders(); assertFalse(it.hasNext()); - + response.addHttpHeader("key1", "value1"); it = response.httpHeaders(); assertTrue(it.hasNext()); @@ -142,7 +149,7 @@ public void testAddHttpHeader() { assertEquals("key1", entry.getKey()); assertEquals("value1", entry.getValue()); assertFalse(it.hasNext()); - + response.addHttpHeader("key1", "value2"); it = response.httpHeaders(); assertTrue(it.hasNext()); @@ -154,7 +161,7 @@ public void testAddHttpHeader() { assertEquals("key1", entry.getKey()); assertEquals("value2", entry.getValue()); assertFalse(it.hasNext()); - + response.addHttpHeader("key2", "value2"); it = response.httpHeaders(); assertTrue(it.hasNext()); @@ -171,13 +178,13 @@ public void testAddHttpHeader() { assertEquals("value2", entry.getValue()); assertFalse(it.hasNext()); } - + @Test public void testSetHttpHeader() { SolrQueryResponse response = new SolrQueryResponse(); Iterator> it = response.httpHeaders(); assertFalse(it.hasNext()); - + response.setHttpHeader("key1", "value1"); it = response.httpHeaders(); assertTrue(it.hasNext()); @@ -185,7 +192,7 @@ public void testSetHttpHeader() { assertEquals("key1", entry.getKey()); assertEquals("value1", entry.getValue()); assertFalse(it.hasNext()); - + response.setHttpHeader("key1", "value2"); it = response.httpHeaders(); assertTrue(it.hasNext()); @@ -193,7 +200,7 @@ public void testSetHttpHeader() { assertEquals("key1", entry.getKey()); assertEquals("value2", entry.getValue()); assertFalse(it.hasNext()); - + response.addHttpHeader("key1", "value3"); response.setHttpHeader("key1", "value4"); it = response.httpHeaders(); @@ -202,7 +209,7 @@ public void testSetHttpHeader() { assertEquals("key1", entry.getKey()); assertEquals("value4", entry.getValue()); assertFalse(it.hasNext()); - + response.setHttpHeader("key2", "value5"); it = response.httpHeaders(); assertTrue(it.hasNext()); @@ -215,7 +222,7 @@ public void testSetHttpHeader() { assertEquals("value5", entry.getValue()); assertFalse(it.hasNext()); } - + @Test public void testRemoveHttpHeader() { SolrQueryResponse response = new SolrQueryResponse(); @@ -225,7 +232,7 @@ public void testRemoveHttpHeader() { assertTrue(response.httpHeaders().hasNext()); assertEquals("value1", response.removeHttpHeader("key1")); assertFalse(response.httpHeaders().hasNext()); - + response.addHttpHeader("key1", "value2"); response.addHttpHeader("key1", "value3"); response.addHttpHeader("key2", "value4"); @@ -235,9 +242,8 @@ public void testRemoveHttpHeader() { assertEquals("value3", response.removeHttpHeader("key1")); assertNull(response.removeHttpHeader("key1")); assertEquals("key2", response.httpHeaders().next().getKey()); - } - + @Test public void testRemoveHttpHeaders() { SolrQueryResponse response = new SolrQueryResponse(); @@ -247,22 +253,25 @@ public void testRemoveHttpHeaders() { assertTrue(response.httpHeaders().hasNext()); assertEquals(Arrays.asList("value1"), response.removeHttpHeaders("key1")); assertFalse(response.httpHeaders().hasNext()); - + response.addHttpHeader("key1", "value2"); response.addHttpHeader("key1", "value3"); response.addHttpHeader("key2", "value4"); assertTrue(response.httpHeaders().hasNext()); - assertEquals(Arrays.asList(new String[]{"value2", "value3"}), response.removeHttpHeaders("key1")); + assertEquals( + Arrays.asList(new String[] {"value2", "value3"}), response.removeHttpHeaders("key1")); assertNull(response.removeHttpHeaders("key1")); assertEquals("key2", response.httpHeaders().next().getKey()); } - + @Test public void testException() throws Exception { final SolrQueryResponse response = new SolrQueryResponse(); assertEquals("exception initial value", null, response.getException()); - final Exception newValue = (random().nextBoolean() - ? (random().nextBoolean() ? new ArithmeticException() : new IOException()) : null); + final Exception newValue = + (random().nextBoolean() + ? (random().nextBoolean() ? new ArithmeticException() : new IOException()) + : null); response.setException(newValue); assertEquals("exception new value", newValue, response.getException()); } @@ -311,10 +320,5 @@ public void testConvertToHEADStyleResponse() throws Exception { assertEquals("key1", entry.getKey()); assertEquals("value1", entry.getValue()); assertFalse(it.hasNext()); - - - - } - } diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java index 48f2816d22e..5ce5202aa27 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java @@ -16,9 +16,10 @@ */ package org.apache.solr.response.transform; +import static org.hamcrest.core.StringContains.containsString; + import java.util.Collection; import java.util.Iterator; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; @@ -30,8 +31,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.StringContains.containsString; - public class TestChildDocTransformer extends SolrTestCaseJ4 { private static String ID_FIELD = "id"; @@ -39,7 +38,7 @@ public class TestChildDocTransformer extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-root.xml"); // *not* the "nest" schema + initCore("solrconfig.xml", "schema-root.xml"); // *not* the "nest" schema } @After @@ -50,13 +49,13 @@ public void cleanup() throws Exception { @Test public void testParentFilter() throws Exception { - for(int i=0; i { - h.query(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "id, subject,[child parentFilter=\"subject:bleh\" childFilter=\"title:bar\" limit=2]")); - }); + assertQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id, subject,[child childFilter=\"title:foo\"]"), + test2); + + assertQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id, subject,[child childFilter=\"title:bar\" limit=2]"), + test3); + + SolrException e = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id, subject,[child parentFilter=\"subject:bleh\" childFilter=\"title:bar\" limit=2]")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertThat(e.getMessage(), - containsString("Parent filter 'QueryBitSetProducer(subject:bleh)' doesn't match any parent documents")); - - e = expectThrows(SolrException.class, () -> { - h.query(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "id, subject,[child parentFilter=e childFilter=\"title:bar\" limit=2]")); - }); + assertThat( + e.getMessage(), + containsString( + "Parent filter 'QueryBitSetProducer(subject:bleh)' doesn't match any parent documents")); + + e = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id, subject,[child parentFilter=e childFilter=\"title:bar\" limit=2]")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertThat(e.getMessage(), - containsString("Parent filter 'QueryBitSetProducer(text:e)' doesn't match any parent documents")); - - e = expectThrows(SolrException.class, () -> { - h.query(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "id, subject,[child parentFilter=\"\"]")); - }); + assertThat( + e.getMessage(), + containsString( + "Parent filter 'QueryBitSetProducer(text:e)' doesn't match any parent documents")); + + e = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id, subject,[child parentFilter=\"\"]")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertThat(e.getMessage(), containsString("Invalid Parent filter '', resolves to null")); } - + private void testSubQueryXML() { String test1[]; { - final String subqueryPath = "/result[@name='children'][@numFound='6']"; - test1 = new String[] { - "//*[@numFound='1']", - "/response/result/doc[1]" + subqueryPath + "/doc[1]/str[@name='id']='2'" , - "/response/result/doc[1]" + subqueryPath + "/doc[2]/str[@name='id']='3'" , - "/response/result/doc[1]" + subqueryPath + "/doc[3]/str[@name='id']='4'" , - "/response/result/doc[1]" + subqueryPath + "/doc[4]/str[@name='id']='5'" , - "/response/result/doc[1]" + subqueryPath + "/doc[5]/str[@name='id']='6'" , - "/response/result/doc[1]" + subqueryPath + "/doc[6]/str[@name='id']='7'"}; + final String subqueryPath = "/result[@name='children'][@numFound='6']"; + test1 = + new String[] { + "//*[@numFound='1']", + "/response/result/doc[1]" + subqueryPath + "/doc[1]/str[@name='id']='2'", + "/response/result/doc[1]" + subqueryPath + "/doc[2]/str[@name='id']='3'", + "/response/result/doc[1]" + subqueryPath + "/doc[3]/str[@name='id']='4'", + "/response/result/doc[1]" + subqueryPath + "/doc[4]/str[@name='id']='5'", + "/response/result/doc[1]" + subqueryPath + "/doc[5]/str[@name='id']='6'", + "/response/result/doc[1]" + subqueryPath + "/doc[6]/str[@name='id']='7'" + }; } - assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.rows","10"), + assertQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.rows", + "10"), test1); - String test2[] = new String[] { - "//*[@numFound='1']", - "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[1]/str[@name='id']='2'" , - "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[2]/str[@name='id']='4'" , - "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[3]/str[@name='id']='6'" }; - - assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.rows","10", - "children.fq","title:foo" - ), test2); - - - String test3[] = new String[] { - "//*[@numFound='1']", - "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[1]/str[@name='id']='3'" , - "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[2]/str[@name='id']='5'" }; - - - assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.rows","2", - "children.fq","title:bar", - "children.sort","_docid_ asc" - ), test3); + String test2[] = + new String[] { + "//*[@numFound='1']", + "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[1]/str[@name='id']='2'", + "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[2]/str[@name='id']='4'", + "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[3]/str[@name='id']='6'" + }; + + assertQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.rows", + "10", + "children.fq", + "title:foo"), + test2); + + String test3[] = + new String[] { + "//*[@numFound='1']", + "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[1]/str[@name='id']='3'", + "/response/result/doc[1]/result[@name='children'][@numFound='3']/doc[2]/str[@name='id']='5'" + }; + + assertQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.rows", + "2", + "children.fq", + "title:bar", + "children.sort", + "_docid_ asc"), + test3); } - + private void testSubQueryJSON() throws Exception { - String[] test1 = new String[] { - "/response/docs/[0]/children/docs/[0]/id=='2'", - "/response/docs/[0]/children/docs/[1]/id=='3'", - "/response/docs/[0]/children/docs/[2]/id=='4'", - "/response/docs/[0]/children/docs/[3]/id=='5'", - "/response/docs/[0]/children/docs/[4]/id=='6'", - "/response/docs/[0]/children/docs/[5]/id=='7'" - }; - - String[] test2 = new String[] { - "/response/docs/[0]/children/docs/[0]/id=='2'", - "/response/docs/[0]/children/docs/[1]/id=='4'", - "/response/docs/[0]/children/docs/[2]/id=='6'" - }; - - String[] test3 = new String[] { - "/response/docs/[0]/children/docs/[0]/id=='3'", - "/response/docs/[0]/children/docs/[1]/id=='5'" - }; - - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.rows","10"), test1); - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.rows","10", - "children.fq","title:foo"), test2); - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.rows","2", - "children.fq","title:bar", - "children.sort","_docid_ asc"), test3); + String[] test1 = + new String[] { + "/response/docs/[0]/children/docs/[0]/id=='2'", + "/response/docs/[0]/children/docs/[1]/id=='3'", + "/response/docs/[0]/children/docs/[2]/id=='4'", + "/response/docs/[0]/children/docs/[3]/id=='5'", + "/response/docs/[0]/children/docs/[4]/id=='6'", + "/response/docs/[0]/children/docs/[5]/id=='7'" + }; + + String[] test2 = + new String[] { + "/response/docs/[0]/children/docs/[0]/id=='2'", + "/response/docs/[0]/children/docs/[1]/id=='4'", + "/response/docs/[0]/children/docs/[2]/id=='6'" + }; + + String[] test3 = + new String[] { + "/response/docs/[0]/children/docs/[0]/id=='3'", + "/response/docs/[0]/children/docs/[1]/id=='5'" + }; + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.rows", + "10"), + test1); + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.rows", + "10", + "children.fq", + "title:foo"), + test2); + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.rows", + "2", + "children.fq", + "title:bar", + "children.sort", + "_docid_ asc"), + test3); } private void testChildDoctransformerJSON() throws Exception { - String[] test1 = new String[] { - "/response/docs/[0]/_childDocuments_/[0]/id=='2'", - "/response/docs/[0]/_childDocuments_/[1]/id=='3'", - "/response/docs/[0]/_childDocuments_/[2]/id=='4'", - "/response/docs/[0]/_childDocuments_/[3]/id=='5'", - "/response/docs/[0]/_childDocuments_/[4]/id=='6'", - "/response/docs/[0]/_childDocuments_/[5]/id=='7'" - }; - - String[] test2 = new String[] { - "/response/docs/[0]/_childDocuments_/[0]/id=='2'", - "/response/docs/[0]/_childDocuments_/[1]/id=='4'", - "/response/docs/[0]/_childDocuments_/[2]/id=='6'" - }; - - String[] test3 = new String[] { - "/response/docs/[0]/_childDocuments_/[0]/id=='3'", - "/response/docs/[0]/_childDocuments_/[1]/id=='5'" - }; - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,[child]"), test1); - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "id, subject,[child childFilter=\"title:foo\"]"), test2); - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "id, subject,[child childFilter=\"title:bar\" limit=3]"), test3); + String[] test1 = + new String[] { + "/response/docs/[0]/_childDocuments_/[0]/id=='2'", + "/response/docs/[0]/_childDocuments_/[1]/id=='3'", + "/response/docs/[0]/_childDocuments_/[2]/id=='4'", + "/response/docs/[0]/_childDocuments_/[3]/id=='5'", + "/response/docs/[0]/_childDocuments_/[4]/id=='6'", + "/response/docs/[0]/_childDocuments_/[5]/id=='7'" + }; + + String[] test2 = + new String[] { + "/response/docs/[0]/_childDocuments_/[0]/id=='2'", + "/response/docs/[0]/_childDocuments_/[1]/id=='4'", + "/response/docs/[0]/_childDocuments_/[2]/id=='6'" + }; + + String[] test3 = + new String[] { + "/response/docs/[0]/_childDocuments_/[0]/id=='3'", + "/response/docs/[0]/_childDocuments_/[1]/id=='5'" + }; + + assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", "fl", "*,[child]"), test1); + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id, subject,[child childFilter=\"title:foo\"]"), + test2); + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id, subject,[child childFilter=\"title:bar\" limit=3]"), + test3); } private void testChildDocNonStoredDVFields() throws Exception { - String[] test1 = new String[] { - "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[2]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[3]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[4]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[5]/intDvoDefault==42" - }; - - String[] test2 = new String[] { - "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[2]/intDvoDefault==42" - }; - - String[] test3 = new String[] { - "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", - "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42" - }; - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,[child]"), test1); - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "intDvoDefault, subject,[child childFilter=\"title:foo\"]"), test2); - - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "intDvoDefault, subject,[child childFilter=\"title:bar\" limit=2]"), test3); - + String[] test1 = + new String[] { + "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[2]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[3]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[4]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[5]/intDvoDefault==42" + }; + + String[] test2 = + new String[] { + "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[2]/intDvoDefault==42" + }; + + String[] test3 = + new String[] { + "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42" + }; + + assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", "fl", "*,[child]"), test1); + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "intDvoDefault, subject,[child childFilter=\"title:foo\"]"), + test2); + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "intDvoDefault, subject,[child childFilter=\"title:bar\" limit=2]"), + test3); } private void testChildReturnFields() throws Exception { - assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,[child fl=\"intDvoDefault,child_fl:[value v='child_fl_test']\"]"), + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,[child fl=\"intDvoDefault,child_fl:[value v='child_fl_test']\"]"), "/response/docs/[0]/intDefault==42", "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", "/response/docs/[0]/_childDocuments_/[0]/child_fl=='child_fl_test'"); - try(SolrQueryRequest req = req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "intDefault,[child fl=\"intDvoDefault, [docid]\"]")) { - BasicResultContext res = (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); + try (SolrQueryRequest req = + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "intDefault,[child fl=\"intDvoDefault, [docid]\"]")) { + BasicResultContext res = + (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); Iterator docsStreamer = res.getProcessedDocuments(); while (docsStreamer.hasNext()) { SolrDocument doc = docsStreamer.next(); - assertFalse("root docs should not contain fields specified in child return fields", doc.containsKey("intDvoDefault")); - assertTrue("root docs should contain fields specified in query return fields", doc.containsKey("intDefault")); + assertFalse( + "root docs should not contain fields specified in child return fields", + doc.containsKey("intDvoDefault")); + assertTrue( + "root docs should contain fields specified in query return fields", + doc.containsKey("intDefault")); Collection childDocs = doc.getChildDocuments(); - for(SolrDocument childDoc: childDocs) { + for (SolrDocument childDoc : childDocs) { assertEquals("child doc should only have 2 keys", 2, childDoc.keySet().size()); - assertTrue("child docs should contain fields specified in child return fields", childDoc.containsKey("intDvoDefault")); - assertEquals("child docs should contain fields specified in child return fields", - 42, childDoc.getFieldValue("intDvoDefault")); - assertTrue("child docs should contain fields specified in child return fields", childDoc.containsKey("[docid]")); + assertTrue( + "child docs should contain fields specified in child return fields", + childDoc.containsKey("intDvoDefault")); + assertEquals( + "child docs should contain fields specified in child return fields", + 42, + childDoc.getFieldValue("intDvoDefault")); + assertTrue( + "child docs should contain fields specified in child return fields", + childDoc.containsKey("[docid]")); } } } @@ -315,10 +481,10 @@ private void createSimpleIndex() { SolrInputDocument parentDocument = new SolrInputDocument(); parentDocument.addField(ID_FIELD, "1"); parentDocument.addField("subject", "parentDocument"); - for(int i=0; i< 6; i++) { + for (int i = 0; i < 6; i++) { SolrInputDocument childDocument = new SolrInputDocument(); - childDocument.addField(ID_FIELD, Integer.toString(i+2)); - if(i%2==0) { + childDocument.addField(ID_FIELD, Integer.toString(i + 2)); + if (i % 2 == 0) { childDocument.addField("title", "foo"); } else { childDocument.addField("title", "bar"); @@ -336,14 +502,13 @@ private void createSimpleIndex() { assertQ(req("q", "*:*"), "//*[@numFound='" + 7 + "']"); } - private static void createIndex(String[] titleVals) { String[] parentIDS = new String[] {"1", "4"}; String[] childDocIDS = new String[] {"2", "5"}; String[] grandChildIDS = new String[] {"3", "6"}; - for(int i=0; i< parentIDS.length; i++) { + for (int i = 0; i < parentIDS.length; i++) { SolrInputDocument parentDocument = new SolrInputDocument(); parentDocument.addField(ID_FIELD, parentIDS[i]); parentDocument.addField("subject", "parentDocument"); @@ -372,134 +537,193 @@ private static void createIndex(String[] titleVals) { } assertU(commit()); - assertQ(req("q", "*:*"), "//*[@numFound='" + (parentIDS.length + childDocIDS.length + grandChildIDS.length) + "']"); - + assertQ( + req("q", "*:*"), + "//*[@numFound='" + (parentIDS.length + childDocIDS.length + grandChildIDS.length) + "']"); } private void testParentFilterJSON() throws Exception { - String[] tests = new String[] { - "/response/docs/[0]/id=='1'", - "/response/docs/[0]/_childDocuments_/[0]/id=='2'", - "/response/docs/[0]/_childDocuments_/[0]/cat=='childDocument'", - "/response/docs/[0]/_childDocuments_/[0]/title=='" + titleVals[0] + "'", - "/response/docs/[1]/id=='4'", - "/response/docs/[1]/_childDocuments_/[0]/id=='5'", - "/response/docs/[1]/_childDocuments_/[0]/cat=='childDocument'", - "/response/docs/[1]/_childDocuments_/[0]/title=='" + titleVals[1] + "'" - }; - - - assertJQ(req("q", "*:*", - "sort", "id asc", - "fq", "subject:\"parentDocument\" ", - "fl", "*,[child childFilter='cat:childDocument' parentFilter=\"subject:parentDocument\"]"), - tests); - - assertJQ(req("q", "*:*", - "sort", "id asc", - "fq", "subject:\"parentDocument\" ", - "fl", "id, cat, title, [child childFilter='cat:childDocument' parentFilter=\"subject:parentDocument\"]"), - tests); + String[] tests = + new String[] { + "/response/docs/[0]/id=='1'", + "/response/docs/[0]/_childDocuments_/[0]/id=='2'", + "/response/docs/[0]/_childDocuments_/[0]/cat=='childDocument'", + "/response/docs/[0]/_childDocuments_/[0]/title=='" + titleVals[0] + "'", + "/response/docs/[1]/id=='4'", + "/response/docs/[1]/_childDocuments_/[0]/id=='5'", + "/response/docs/[1]/_childDocuments_/[0]/cat=='childDocument'", + "/response/docs/[1]/_childDocuments_/[0]/title=='" + titleVals[1] + "'" + }; + + assertJQ( + req( + "q", "*:*", + "sort", "id asc", + "fq", "subject:\"parentDocument\" ", + "fl", + "*,[child childFilter='cat:childDocument' parentFilter=\"subject:parentDocument\"]"), + tests); + + assertJQ( + req( + "q", "*:*", + "sort", "id asc", + "fq", "subject:\"parentDocument\" ", + "fl", + "id, cat, title, [child childFilter='cat:childDocument' parentFilter=\"subject:parentDocument\"]"), + tests); // shows if parentFilter matches all docs, then there are effectively no child docs - assertJQ(req("q", "*:*", - "sort", "id asc", - "fq", "subject:\"parentDocument\" ", - "fl", "id,[child childFilter='cat:childDocument' parentFilter=\"*:*\"]"), - "/response==" + - "{'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); - + assertJQ( + req( + "q", + "*:*", + "sort", + "id asc", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id,[child childFilter='cat:childDocument' parentFilter=\"*:*\"]"), + "/response==" + + "{'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); } - + private void testSubQueryParentFilterJSON() throws Exception { - String[] tests = new String[] { - "/response/docs/[0]/id=='1'", - "/response/docs/[0]/children/docs/[0]/id=='2'", - "/response/docs/[0]/children/docs/[0]/cat=='childDocument'", - "/response/docs/[0]/children/docs/[0]/title=='" + titleVals[0] + "'", - "/response/docs/[1]/id=='4'", - "/response/docs/[1]/children/docs/[0]/id=='5'", - "/response/docs/[1]/children/docs/[0]/cat=='childDocument'", - "/response/docs/[1]/children/docs/[0]/title=='" + titleVals[1] + "'" - }; - - - assertJQ(req( - "q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "sort", "id asc", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.fq","cat:childDocument", - "children.sort","_docid_ asc"), - tests); - assertJQ(req( - "q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "id,children:[subquery]", - "sort", "id asc", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.fq","cat:childDocument", - "children.sort","_docid_ asc"), - tests); + String[] tests = + new String[] { + "/response/docs/[0]/id=='1'", + "/response/docs/[0]/children/docs/[0]/id=='2'", + "/response/docs/[0]/children/docs/[0]/cat=='childDocument'", + "/response/docs/[0]/children/docs/[0]/title=='" + titleVals[0] + "'", + "/response/docs/[1]/id=='4'", + "/response/docs/[1]/children/docs/[0]/id=='5'", + "/response/docs/[1]/children/docs/[0]/cat=='childDocument'", + "/response/docs/[1]/children/docs/[0]/title=='" + titleVals[1] + "'" + }; + + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "sort", + "id asc", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.fq", + "cat:childDocument", + "children.sort", + "_docid_ asc"), + tests); + assertJQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id,children:[subquery]", + "sort", + "id asc", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.fq", + "cat:childDocument", + "children.sort", + "_docid_ asc"), + tests); } private void testParentFilterXML() { - String tests[] = new String[] { - "//*[@numFound='2']", - "/response/result/doc[1]/str[@name='id']='1'" , - "/response/result/doc[1]/doc[1]/str[@name='id']='2'" , - "/response/result/doc[1]/doc[1]/str[@name='cat']='childDocument'" , - "/response/result/doc[1]/doc[1]/str[@name='title']='" + titleVals[0] + "'" , - "/response/result/doc[2]/str[@name='id']='4'" , - "/response/result/doc[2]/doc[1]/str[@name='id']='5'", - "/response/result/doc[2]/doc[1]/str[@name='cat']='childDocument'", - "/response/result/doc[2]/doc[1]/str[@name='title']='" + titleVals[1] + "'"}; - - assertQ(req("q", "*:*", - "sort", "id asc", - "fq", "subject:\"parentDocument\" ", - "fl", "*,[child childFilter='cat:childDocument']"), - tests); - - assertQ(req("q", "*:*", - "sort", "id asc", - "fq", "subject:\"parentDocument\" ", - "fl", "id, cat, title, [child childFilter='cat:childDocument']"), - tests); + String tests[] = + new String[] { + "//*[@numFound='2']", + "/response/result/doc[1]/str[@name='id']='1'", + "/response/result/doc[1]/doc[1]/str[@name='id']='2'", + "/response/result/doc[1]/doc[1]/str[@name='cat']='childDocument'", + "/response/result/doc[1]/doc[1]/str[@name='title']='" + titleVals[0] + "'", + "/response/result/doc[2]/str[@name='id']='4'", + "/response/result/doc[2]/doc[1]/str[@name='id']='5'", + "/response/result/doc[2]/doc[1]/str[@name='cat']='childDocument'", + "/response/result/doc[2]/doc[1]/str[@name='title']='" + titleVals[1] + "'" + }; + + assertQ( + req( + "q", "*:*", + "sort", "id asc", + "fq", "subject:\"parentDocument\" ", + "fl", "*,[child childFilter='cat:childDocument']"), + tests); + + assertQ( + req( + "q", "*:*", + "sort", "id asc", + "fq", "subject:\"parentDocument\" ", + "fl", "id, cat, title, [child childFilter='cat:childDocument']"), + tests); } private void testSubQueryParentFilterXML() { - String tests[] = new String[] { - "//*[@numFound='2']", - "/response/result/doc[1]/str[@name='id']='1'" , - "/response/result/doc[1]/result[@name='children'][@numFound=1]/doc[1]/str[@name='id']='2'" , - "/response/result/doc[1]/result[@name='children'][@numFound=1]/doc[1]/str[@name='cat']='childDocument'" , - "/response/result/doc[1]/result[@name='children'][@numFound=1]/doc[1]/str[@name='title']='" + titleVals[0] + "'" , - "/response/result/doc[2]/str[@name='id']='4'" , - "/response/result/doc[2]/result[@name='children'][@numFound=1]/doc[1]/str[@name='id']='5'", - "/response/result/doc[2]/result[@name='children'][@numFound=1]/doc[1]/str[@name='cat']='childDocument'", - "/response/result/doc[2]/result[@name='children'][@numFound=1]/doc[1]/str[@name='title']='" + titleVals[1] + "'"}; - - assertQ(req( - "q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "*,children:[subquery]", - "sort", "id asc", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.fq","cat:childDocument", - "children.sort","_docid_ asc" - ), - tests); - - assertQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", - "fl", "id,children:[subquery]", - "sort", "id asc", - "children.q","{!child of=subject:parentDocument}{!terms f=id v=$row.id}", - "children.fq","cat:childDocument", - "children.sort","_docid_ asc"), - tests); + String tests[] = + new String[] { + "//*[@numFound='2']", + "/response/result/doc[1]/str[@name='id']='1'", + "/response/result/doc[1]/result[@name='children'][@numFound=1]/doc[1]/str[@name='id']='2'", + "/response/result/doc[1]/result[@name='children'][@numFound=1]/doc[1]/str[@name='cat']='childDocument'", + "/response/result/doc[1]/result[@name='children'][@numFound=1]/doc[1]/str[@name='title']='" + + titleVals[0] + + "'", + "/response/result/doc[2]/str[@name='id']='4'", + "/response/result/doc[2]/result[@name='children'][@numFound=1]/doc[1]/str[@name='id']='5'", + "/response/result/doc[2]/result[@name='children'][@numFound=1]/doc[1]/str[@name='cat']='childDocument'", + "/response/result/doc[2]/result[@name='children'][@numFound=1]/doc[1]/str[@name='title']='" + + titleVals[1] + + "'" + }; + + assertQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "*,children:[subquery]", + "sort", + "id asc", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.fq", + "cat:childDocument", + "children.sort", + "_docid_ asc"), + tests); + + assertQ( + req( + "q", + "*:*", + "fq", + "subject:\"parentDocument\" ", + "fl", + "id,children:[subquery]", + "sort", + "id asc", + "children.q", + "{!child of=subject:parentDocument}{!terms f=id v=$row.id}", + "children.fq", + "cat:childDocument", + "children.sort", + "_docid_ asc"), + tests); } - } diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java index f1355238ef2..04345c1c79e 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformerHierarchy.java @@ -16,13 +16,12 @@ */ package org.apache.solr.response.transform; +import com.google.common.collect.Iterables; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; - -import com.google.common.collect.Iterables; import org.apache.lucene.index.IndexableField; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; @@ -44,19 +43,22 @@ public class TestChildDocTransformerHierarchy extends SolrTestCaseJ4 { private static final String[] fieldsToRemove = {"_nest_parent_", "_nest_path_", "_root_"}; private static final int sumOfDocsPerNestedDocument = 8; private static final int numberOfDocsPerNestedTest = 10; - private static final String fqToExcludeNonTestedDocs = "{!frange l=0}id_i"; // filter documents that were created for random segments to ensure the transformer works with multiple segments. + // filter documents that were created for random segments to ensure the transformer works with + // multiple segments. + private static final String fqToExcludeNonTestedDocs = "{!frange l=0}id_i"; @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-minimal.xml", "schema-nest.xml"); // use "nest" schema - if(random().nextBoolean()) { - idCounter.set(-100); // start docIDs at -100 for these random docs we don't care about (all less than 0) + if (random().nextBoolean()) { + // start docIDs at -100 for these random docs we don't care about (all less than 0) + idCounter.set(-100); // create random segments final int numOfDocs = 10; - for(int i = 0; i < numOfDocs; ++i) { + for (int i = 0; i < numOfDocs; ++i) { updateJ(generateDocHierarchy(i), null); - if(random().nextBoolean()) { + if (random().nextBoolean()) { assertU(commit()); } } @@ -79,46 +81,65 @@ public void after() throws Exception { public void testNonTrivialChildFilter() throws Exception { // just check we don't throw an exception. This used to throw before SOLR-15152 assertQ( - req( - "q", - "*:*", - "sort", - "id asc", - "fl", - "*, _nest_path_, [child childFilter='type_s:Regular OR type_s:Chocolate']", - "fq", - fqToExcludeNonTestedDocs)); + req( + "q", + "*:*", + "sort", + "id asc", + "fl", + "*, _nest_path_, [child childFilter='type_s:Regular OR type_s:Chocolate']", + "fq", + fqToExcludeNonTestedDocs)); } @Test public void testParentFilterJSON() throws Exception { indexSampleData(numberOfDocsPerNestedTest); - String[] tests = new String[] { - "/response/docs/[0]/type_s==donut", - "/response/docs/[0]/toppings/[0]/type_s==Regular", - "/response/docs/[0]/toppings/[1]/type_s==Chocolate", - "/response/docs/[0]/toppings/[0]/ingredients/[0]/name_s==cocoa", - "/response/docs/[0]/toppings/[1]/ingredients/[1]/name_s==cocoa", - "/response/docs/[0]/lonely/test_s==testing", - "/response/docs/[0]/lonely/lonelyGrandChild/test2_s==secondTest", - }; - - try(SolrQueryRequest req = req("q", "type_s:donut", "sort", "id asc", - "fl", "*, _nest_path_, [child]", "fq", fqToExcludeNonTestedDocs)) { - BasicResultContext res = (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); + String[] tests = + new String[] { + "/response/docs/[0]/type_s==donut", + "/response/docs/[0]/toppings/[0]/type_s==Regular", + "/response/docs/[0]/toppings/[1]/type_s==Chocolate", + "/response/docs/[0]/toppings/[0]/ingredients/[0]/name_s==cocoa", + "/response/docs/[0]/toppings/[1]/ingredients/[1]/name_s==cocoa", + "/response/docs/[0]/lonely/test_s==testing", + "/response/docs/[0]/lonely/lonelyGrandChild/test2_s==secondTest", + }; + + try (SolrQueryRequest req = + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*, _nest_path_, [child]", + "fq", + fqToExcludeNonTestedDocs)) { + BasicResultContext res = + (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); Iterator docsStreamer = res.getProcessedDocuments(); while (docsStreamer.hasNext()) { SolrDocument doc = docsStreamer.next(); cleanSolrDocumentFields(doc); int currDocId = Integer.parseInt((doc.getFirstValue("id")).toString()); - assertEquals("queried docs are not equal to expected output for id: " + currDocId, fullNestedDocTemplate(currDocId), doc.toString()); + assertEquals( + "queried docs are not equal to expected output for id: " + currDocId, + fullNestedDocTemplate(currDocId), + doc.toString()); } } - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*, _nest_path_, [child]", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*, _nest_path_, [child]", + "fq", + fqToExcludeNonTestedDocs), tests); } @@ -126,9 +147,18 @@ public void testParentFilterJSON() throws Exception { public void testParentFilterLimitJSON() throws Exception { indexSampleData(numberOfDocsPerNestedTest); - try(SolrQueryRequest req = req("q", "type_s:donut", "sort", "id asc", "fl", "id, type_s, toppings, _nest_path_, [child childFilter='{!field f=_nest_path_}/toppings' limit=1]", - "fq", fqToExcludeNonTestedDocs)) { - BasicResultContext res = (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); + try (SolrQueryRequest req = + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "id, type_s, toppings, _nest_path_, [child childFilter='{!field f=_nest_path_}/toppings' limit=1]", + "fq", + fqToExcludeNonTestedDocs)) { + BasicResultContext res = + (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); Iterator docsStreamer = res.getProcessedDocuments(); while (docsStreamer.hasNext()) { SolrDocument doc = docsStreamer.next(); @@ -138,30 +168,38 @@ public void testParentFilterLimitJSON() throws Exception { } } - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*, [child limit=1]", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*, [child limit=1]", + "fq", + fqToExcludeNonTestedDocs), "/response/docs/[0]/type_s==donut", "/response/docs/[0]/lonely/test_s==testing", "/response/docs/[0]/lonely/lonelyGrandChild/test2_s==secondTest", - // "!" (negate): don't find toppings. The "limit" kept us from reaching these, which follow lonely. - "!/response/docs/[0]/toppings/[0]/type_s==Regular" - ); + // "!" (negate): don't find toppings. The "limit" kept us from reaching these, which follow + // lonely. + "!/response/docs/[0]/toppings/[0]/type_s==Regular"); } @Test public void testWithDeletedChildren() throws Exception { // add a doc to create another segment final String addNonTestedDoc = - "{\n" + - "\"add\": {\n" + - "\"doc\": {\n" + - "\"id\": " + -1000 + ", \n" + - "\"type_s\": \"cake\", \n" + - "}\n" + - "}\n" + - "}"; + "{\n" + + "\"add\": {\n" + + "\"doc\": {\n" + + "\"id\": " + + -1000 + + ", \n" + + "\"type_s\": \"cake\", \n" + + "}\n" + + "}\n" + + "}"; if (random().nextBoolean()) { updateJ(addNonTestedDoc, null); @@ -173,9 +211,18 @@ public void testWithDeletedChildren() throws Exception { assertU(delQ("_nest_path_:\\/toppings")); assertU(commit()); - try(SolrQueryRequest req = req("q", "type_s:donut", "sort", "id asc", "fl", "id, type_s, toppings, _nest_path_, [child childFilter='_nest_path_:\\\\/toppings' limit=1]", - "fq", fqToExcludeNonTestedDocs)) { - BasicResultContext res = (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); + try (SolrQueryRequest req = + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "id, type_s, toppings, _nest_path_, [child childFilter='_nest_path_:\\\\/toppings' limit=1]", + "fq", + fqToExcludeNonTestedDocs)) { + BasicResultContext res = + (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); Iterator docsStreamer = res.getProcessedDocuments(); while (docsStreamer.hasNext()) { SolrDocument doc = docsStreamer.next(); @@ -185,39 +232,63 @@ public void testWithDeletedChildren() throws Exception { } } - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*, [child limit=1]", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*, [child limit=1]", + "fq", + fqToExcludeNonTestedDocs), "/response/docs/[0]/type_s==donut", "/response/docs/[0]/lonely/test_s==testing", "/response/docs/[0]/lonely/lonelyGrandChild/test2_s==secondTest", - // "!" (negate): don't find toppings. The "limit" kept us from reaching these, which follow lonely. - "!/response/docs/[0]/toppings/[0]/type_s==Regular" - ); + // "!" (negate): don't find toppings. The "limit" kept us from reaching these, which follow + // lonely. + "!/response/docs/[0]/toppings/[0]/type_s==Regular"); } @Test public void testChildFilterLimitJSON() throws Exception { indexSampleData(numberOfDocsPerNestedTest); - try(SolrQueryRequest req = req("q", "type_s:donut", "sort", "id asc", "fl", "*, _nest_path_, " + - "[child limit='1' childFilter='toppings/type_s:Regular']", "fq", fqToExcludeNonTestedDocs)) { - BasicResultContext res = (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); + try (SolrQueryRequest req = + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*, _nest_path_, " + "[child limit='1' childFilter='toppings/type_s:Regular']", + "fq", + fqToExcludeNonTestedDocs)) { + BasicResultContext res = + (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); Iterator docsStreamer = res.getProcessedDocuments(); while (docsStreamer.hasNext()) { SolrDocument doc = docsStreamer.next(); cleanSolrDocumentFields(doc); assertFalse("root doc should not have anonymous child docs", doc.hasChildDocuments()); assertEquals("should only have 1 child doc", 1, doc.getFieldValues("toppings").size()); - assertEquals("should be of type_s:Regular", "Regular", ((SolrDocument) doc.getFirstValue("toppings")).getFieldValue("type_s")); + assertEquals( + "should be of type_s:Regular", + "Regular", + ((SolrDocument) doc.getFirstValue("toppings")).getFieldValue("type_s")); } } - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "id, type_s, toppings, _nest_path_, [child limit='10' childFilter='toppings/type_s:Regular']", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "id, type_s, toppings, _nest_path_, [child limit='10' childFilter='toppings/type_s:Regular']", + "fq", + fqToExcludeNonTestedDocs), "/response/docs/[0]/type_s==donut", "/response/docs/[0]/toppings/[0]/type_s==Regular"); } @@ -226,80 +297,129 @@ public void testChildFilterLimitJSON() throws Exception { public void testExactPath() throws Exception { indexSampleData(2); String[] tests = { - "/response/numFound==4", - "/response/docs/[0]/_nest_path_=='/toppings#0'", - "/response/docs/[1]/_nest_path_=='/toppings#0'", - "/response/docs/[2]/_nest_path_=='/toppings#1'", - "/response/docs/[3]/_nest_path_=='/toppings#1'", + "/response/numFound==4", + "/response/docs/[0]/_nest_path_=='/toppings#0'", + "/response/docs/[1]/_nest_path_=='/toppings#0'", + "/response/docs/[2]/_nest_path_=='/toppings#1'", + "/response/docs/[3]/_nest_path_=='/toppings#1'", }; - assertJQ(req("q", "_nest_path_:*toppings", - "sort", "_nest_path_ asc", - "fl", "*, id_i, _nest_path_", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "_nest_path_:*toppings", + "sort", + "_nest_path_ asc", + "fl", + "*, id_i, _nest_path_", + "fq", + fqToExcludeNonTestedDocs), tests); - assertJQ(req("q", "+_nest_path_:\"/toppings\"", - "sort", "_nest_path_ asc", - "fl", "*, _nest_path_", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "+_nest_path_:\"/toppings\"", + "sort", + "_nest_path_ asc", + "fl", + "*, _nest_path_", + "fq", + fqToExcludeNonTestedDocs), tests); } @Test public void testChildFilterJSON() throws Exception { indexSampleData(numberOfDocsPerNestedTest); - String[] tests = new String[] { - "/response/docs/[0]/type_s==donut", - "/response/docs/[0]/toppings/[0]/type_s==Regular", - }; - - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*,[child childFilter='toppings/type_s:Regular']", - "fq", fqToExcludeNonTestedDocs), + String[] tests = + new String[] { + "/response/docs/[0]/type_s==donut", "/response/docs/[0]/toppings/[0]/type_s==Regular", + }; + + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*,[child childFilter='toppings/type_s:Regular']", + "fq", + fqToExcludeNonTestedDocs), tests); } @Test public void testGrandChildFilterJSON() throws Exception { indexSampleData(numberOfDocsPerNestedTest); - String[] tests = new String[] { - "/response/docs/[0]/type_s==donut", - "/response/docs/[0]/toppings/[0]/ingredients/[0]/name_s==cocoa" - }; - - try(SolrQueryRequest req = req("q", "type_s:donut", "sort", "id asc", - "fl", "*,[child childFilter='toppings/ingredients/name_s:cocoa'],", "fq", fqToExcludeNonTestedDocs)) { - BasicResultContext res = (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); + String[] tests = + new String[] { + "/response/docs/[0]/type_s==donut", + "/response/docs/[0]/toppings/[0]/ingredients/[0]/name_s==cocoa" + }; + + try (SolrQueryRequest req = + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*,[child childFilter='toppings/ingredients/name_s:cocoa'],", + "fq", + fqToExcludeNonTestedDocs)) { + BasicResultContext res = + (BasicResultContext) h.queryAndResponse("/select", req).getResponse(); Iterator docsStreamer = res.getProcessedDocuments(); while (docsStreamer.hasNext()) { SolrDocument doc = docsStreamer.next(); cleanSolrDocumentFields(doc); int currDocId = Integer.parseInt((doc.getFirstValue("id")).toString()); - assertEquals("queried docs are not equal to expected output for id: " + currDocId, grandChildDocTemplate(currDocId), doc.toString()); + assertEquals( + "queried docs are not equal to expected output for id: " + currDocId, + grandChildDocTemplate(currDocId), + doc.toString()); } } // test full path - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*,[child childFilter='toppings/ingredients/name_s:cocoa']", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*,[child childFilter='toppings/ingredients/name_s:cocoa']", + "fq", + fqToExcludeNonTestedDocs), tests); // test partial path - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*,[child childFilter='ingredients/name_s:cocoa']", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*,[child childFilter='ingredients/name_s:cocoa']", + "fq", + fqToExcludeNonTestedDocs), tests); // test absolute path - assertJQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*,[child childFilter='/toppings/ingredients/name_s:cocoa']", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*,[child childFilter='/toppings/ingredients/name_s:cocoa']", + "fq", + fqToExcludeNonTestedDocs), tests); } @@ -309,44 +429,69 @@ public void testNestPathTransformerMatches() throws Exception { // test partial path // should not match any child docs - assertQ(req("q", "type_s:donut", - "sort", "id asc", - "fl", "*,[child childFilter='redients/name_s:cocoa']", - "fq", fqToExcludeNonTestedDocs), - "//result/doc/str[@name='type_s'][.='donut']", "not(//result/doc/arr[@name='toppings'])" - ); + assertQ( + req( + "q", + "type_s:donut", + "sort", + "id asc", + "fl", + "*,[child childFilter='redients/name_s:cocoa']", + "fq", + fqToExcludeNonTestedDocs), + "//result/doc/str[@name='type_s'][.='donut']", + "not(//result/doc/arr[@name='toppings'])"); } @Test public void testSingularChildFilterJSON() throws Exception { indexSampleData(numberOfDocsPerNestedTest); - String[] tests = new String[] { - "/response/docs/[0]/type_s==cake", - "/response/docs/[0]/lonely/test_s==testing", - "/response/docs/[0]/lonely/lonelyGrandChild/test2_s==secondTest" - }; - - assertJQ(req("q", "type_s:cake", - "sort", "id asc", - "fl", "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest']", - "fq", fqToExcludeNonTestedDocs), + String[] tests = + new String[] { + "/response/docs/[0]/type_s==cake", + "/response/docs/[0]/lonely/test_s==testing", + "/response/docs/[0]/lonely/lonelyGrandChild/test2_s==secondTest" + }; + + assertJQ( + req( + "q", + "type_s:cake", + "sort", + "id asc", + "fl", + "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest']", + "fq", + fqToExcludeNonTestedDocs), tests); } @Test public void testNonRootChildren() throws Exception { indexSampleData(numberOfDocsPerNestedTest); - assertJQ(req("q", "test_s:testing", - "sort", "id asc", - "fl", "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest']", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "test_s:testing", + "sort", + "id asc", + "fl", + "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest']", + "fq", + fqToExcludeNonTestedDocs), "/response/docs/[0]/test_s==testing", "/response/docs/[0]/lonelyGrandChild/test2_s==secondTest"); - assertJQ(req("q", "type_s:Chocolate", - "sort", "id asc", - "fl", "*,[child]", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:Chocolate", + "sort", + "id asc", + "fl", + "*,[child]", + "fq", + fqToExcludeNonTestedDocs), "/response/docs/[0]/type_s==Chocolate", "/response/docs/[0]/ingredients/[0]/name_s==cocoa", "/response/docs/[0]/ingredients/[1]/name_s==cocoa"); @@ -354,40 +499,55 @@ public void testNonRootChildren() throws Exception { @Test public void testExceptionThrownWParentFilter() throws Exception { - expectThrows(SolrException.class, + expectThrows( + SolrException.class, "Exception was not thrown when parentFilter param was passed to ChildDocTransformer using a nested schema", - () -> assertJQ(req("q", "test_s:testing", - "sort", "id asc", - "fl", "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest' parentFilter='_nest_path_:\"lonely/\"']", - "fq", fqToExcludeNonTestedDocs), - "/response/docs/[0]/test_s==testing", - "/response/docs/[0]/lonelyGrandChild/test2_s==secondTest") - ); + () -> + assertJQ( + req( + "q", + "test_s:testing", + "sort", + "id asc", + "fl", + "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest' parentFilter='_nest_path_:\"lonely/\"']", + "fq", + fqToExcludeNonTestedDocs), + "/response/docs/[0]/test_s==testing", + "/response/docs/[0]/lonelyGrandChild/test2_s==secondTest")); } @Test public void testNoChildren() throws Exception { final String addDocWoChildren = - "{\n" + - "\"add\": {\n" + - "\"doc\": {\n" + - "\"id\": " + id() + ", \n" + - "\"type_s\": \"cake\", \n" + - "}\n" + - "}\n" + - "}"; + "{\n" + + "\"add\": {\n" + + "\"doc\": {\n" + + "\"id\": " + + id() + + ", \n" + + "\"type_s\": \"cake\", \n" + + "}\n" + + "}\n" + + "}"; updateJ(addDocWoChildren, null); assertU(commit()); - assertJQ(req("q", "type_s:cake", - "sort", "id asc", - "fl", "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest']", - "fq", fqToExcludeNonTestedDocs), + assertJQ( + req( + "q", + "type_s:cake", + "sort", + "id asc", + "fl", + "*,[child childFilter='lonely/lonelyGrandChild/test2_s:secondTest']", + "fq", + fqToExcludeNonTestedDocs), "/response/docs/[0]/type_s==cake"); } private void indexSampleData(int numDocs) throws Exception { - for(int i = 0; i < numDocs; ++i) { + for (int i = 0; i < numDocs; ++i) { updateJ(generateDocHierarchy(i), null); } assertU(commit()); @@ -399,14 +559,15 @@ private static int id() { @SuppressWarnings({"unchecked"}) private static void cleanSolrDocumentFields(SolrDocument input) { - for(String fieldName: fieldsToRemove) { + for (String fieldName : fieldsToRemove) { input.removeFields(fieldName); } - for(Map.Entry field: input) { + for (Map.Entry field : input) { Object val = field.getValue(); - if(val instanceof Collection) { - Object newVals = ((Collection) val).stream().map((item) -> (cleanIndexableField(item))) - .collect(Collectors.toList()); + if (val instanceof Collection) { + Object newVals = + ((Collection) val) + .stream().map((item) -> (cleanIndexableField(item))).collect(Collectors.toList()); input.setField(field.getKey(), newVals); continue; } @@ -415,63 +576,129 @@ private static void cleanSolrDocumentFields(SolrDocument input) { } private static Object cleanIndexableField(Object field) { - if(field instanceof IndexableField) { + if (field instanceof IndexableField) { return ((IndexableField) field).stringValue(); - } else if(field instanceof SolrDocument) { + } else if (field instanceof SolrDocument) { cleanSolrDocumentFields((SolrDocument) field); } return field; } private static String grandChildDocTemplate(int id) { - final int docNum = id / sumOfDocsPerNestedDocument; // the index of docs sent to solr in the AddUpdateCommand. e.g. first doc is 0 - return - "SolrDocument{id="+ id + ", type_s=" + types[docNum % types.length] + ", name_s=" + names[docNum % names.length] + ", " + - "toppings=[" + - "SolrDocument{id=" + (id + 3) + ", type_s=Regular, " + - "ingredients=[SolrDocument{id=" + (id + 4) + ", name_s=cocoa}]}, " + - "SolrDocument{id=" + (id + 5) + ", type_s=Chocolate, " + - "ingredients=[SolrDocument{id=" + (id + 6) + ", name_s=cocoa}, SolrDocument{id=" + (id + 7) + ", name_s=cocoa}]}]}"; + final int docNum = + id / sumOfDocsPerNestedDocument; // the index of docs sent to solr in the AddUpdateCommand. + // e.g. first doc is 0 + return "SolrDocument{id=" + + id + + ", type_s=" + + types[docNum % types.length] + + ", name_s=" + + names[docNum % names.length] + + ", " + + "toppings=[" + + "SolrDocument{id=" + + (id + 3) + + ", type_s=Regular, " + + "ingredients=[SolrDocument{id=" + + (id + 4) + + ", name_s=cocoa}]}, " + + "SolrDocument{id=" + + (id + 5) + + ", type_s=Chocolate, " + + "ingredients=[SolrDocument{id=" + + (id + 6) + + ", name_s=cocoa}, SolrDocument{id=" + + (id + 7) + + ", name_s=cocoa}]}]}"; } private static String fullNestedDocTemplate(int id) { - final int docNum = id / sumOfDocsPerNestedDocument; // the index of docs sent to solr in the AddUpdateCommand. e.g. first doc is 0 + final int docNum = + id / sumOfDocsPerNestedDocument; // the index of docs sent to solr in the AddUpdateCommand. + // e.g. first doc is 0 boolean doubleIngredient = docNum % 2 == 0; - String currIngredient = doubleIngredient ? ingredients[1]: ingredientsCycler.next(); - return - "SolrDocument{id=" + id + ", type_s=" + types[docNum % types.length] + ", name_s=" + names[docNum % names.length] + ", " + - "lonely=SolrDocument{id=" + (id + 1) + ", test_s=testing, " + - "lonelyGrandChild=SolrDocument{id=" + (id + 2) + ", test2_s=secondTest}}, " + - "toppings=[" + - "SolrDocument{id=" + (id + 3) + ", type_s=Regular, " + - "ingredients=[SolrDocument{id=" + (id + 4) + ", name_s=" + currIngredient + "}]}, " + - "SolrDocument{id=" + (id + 5) + ", type_s=Chocolate, " + - "ingredients=[SolrDocument{id=" + (id + 6) + ", name_s=cocoa}, SolrDocument{id=" + (id + 7) + ", name_s=cocoa}]}]}"; + String currIngredient = doubleIngredient ? ingredients[1] : ingredientsCycler.next(); + return "SolrDocument{id=" + + id + + ", type_s=" + + types[docNum % types.length] + + ", name_s=" + + names[docNum % names.length] + + ", " + + "lonely=SolrDocument{id=" + + (id + 1) + + ", test_s=testing, " + + "lonelyGrandChild=SolrDocument{id=" + + (id + 2) + + ", test2_s=secondTest}}, " + + "toppings=[" + + "SolrDocument{id=" + + (id + 3) + + ", type_s=Regular, " + + "ingredients=[SolrDocument{id=" + + (id + 4) + + ", name_s=" + + currIngredient + + "}]}, " + + "SolrDocument{id=" + + (id + 5) + + ", type_s=Chocolate, " + + "ingredients=[SolrDocument{id=" + + (id + 6) + + ", name_s=cocoa}, SolrDocument{id=" + + (id + 7) + + ", name_s=cocoa}]}]}"; } private static String generateDocHierarchy(int sequence) { boolean doubleIngredient = sequence % 2 == 0; - String currIngredient = doubleIngredient ? ingredients[1]: ingredientsCycler.next(); - return "{\n" + - "\"add\": {\n" + - "\"doc\": {\n" + - "\"id\": " + id() + ", \n" + - "\"type_s\": \"" + types[sequence % types.length] + "\", \n" + - "\"lonely\": {\"id\": " + id() + ", \"test_s\": \"testing\", \"lonelyGrandChild\": {\"id\": " + id() + ", \"test2_s\": \"secondTest\"}}, \n" + - "\"name_s\": " + names[sequence % names.length] + - "\"toppings\": [ \n" + - "{\"id\": " + id() + ", \"type_s\":\"Regular\"," + - "\"ingredients\": [{\"id\": " + id() + "," + - "\"name_s\": \"" + currIngredient + "\"}]" + - "},\n" + - "{\"id\": " + id() + ", \"type_s\":\"Chocolate\"," + - "\"ingredients\": [{\"id\": " + id() + "," + - "\"name_s\": \"" + ingredients[1] + "\"}," + - "{\"id\": " + id() + ",\n" + "\"name_s\": \"" + ingredients[1] +"\"" + - "}]" + - "}]\n" + - "}\n" + - "}\n" + - "}"; + String currIngredient = doubleIngredient ? ingredients[1] : ingredientsCycler.next(); + return "{\n" + + "\"add\": {\n" + + "\"doc\": {\n" + + "\"id\": " + + id() + + ", \n" + + "\"type_s\": \"" + + types[sequence % types.length] + + "\", \n" + + "\"lonely\": {\"id\": " + + id() + + ", \"test_s\": \"testing\", \"lonelyGrandChild\": {\"id\": " + + id() + + ", \"test2_s\": \"secondTest\"}}, \n" + + "\"name_s\": " + + names[sequence % names.length] + + "\"toppings\": [ \n" + + "{\"id\": " + + id() + + ", \"type_s\":\"Regular\"," + + "\"ingredients\": [{\"id\": " + + id() + + "," + + "\"name_s\": \"" + + currIngredient + + "\"}]" + + "},\n" + + "{\"id\": " + + id() + + ", \"type_s\":\"Chocolate\"," + + "\"ingredients\": [{\"id\": " + + id() + + "," + + "\"name_s\": \"" + + ingredients[1] + + "\"}," + + "{\"id\": " + + id() + + ",\n" + + "\"name_s\": \"" + + ingredients[1] + + "\"" + + "}]" + + "}]\n" + + "}\n" + + "}\n" + + "}"; } } diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestExplainDocTransformer.java b/solr/core/src/test/org/apache/solr/response/transform/TestExplainDocTransformer.java index 39c7ca37688..5c72602602a 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestExplainDocTransformer.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestExplainDocTransformer.java @@ -17,7 +17,6 @@ package org.apache.solr.response.transform; - import org.apache.solr.SolrTestCaseJ4; import org.junit.After; import org.junit.BeforeClass; @@ -27,18 +26,73 @@ public class TestExplainDocTransformer extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); - assertU(add(doc("id", "1", "name_s", "john", "title_s", "Director", "dept_s","Engineering", - "text_t","These guys develop stuff"))); - assertU(add(doc("id", "2", "name_s", "mark", "title_s", "VP", "dept_s","Marketing", - "text_t","These guys make you look good"))); - assertU(add(doc("id", "3", "name_s", "nancy", "title_s", "MTS", "dept_s","Sales", - "text_t","These guys sell stuff"))); - assertU(add(doc("id", "4", "name_s", "dave", "title_s", "MTS", "dept_s","Support", - "text_t","These guys help customers"))); - assertU(add(doc("id", "5", "name_s", "tina", "title_s", "VP", "dept_s","Engineering", - "text_t","These guys develop stuff"))); + assertU( + add( + doc( + "id", + "1", + "name_s", + "john", + "title_s", + "Director", + "dept_s", + "Engineering", + "text_t", + "These guys develop stuff"))); + assertU( + add( + doc( + "id", + "2", + "name_s", + "mark", + "title_s", + "VP", + "dept_s", + "Marketing", + "text_t", + "These guys make you look good"))); + assertU( + add( + doc( + "id", + "3", + "name_s", + "nancy", + "title_s", + "MTS", + "dept_s", + "Sales", + "text_t", + "These guys sell stuff"))); + assertU( + add( + doc( + "id", + "4", + "name_s", + "dave", + "title_s", + "MTS", + "dept_s", + "Support", + "text_t", + "These guys help customers"))); + assertU( + add( + doc( + "id", + "5", + "name_s", + "tina", + "title_s", + "VP", + "dept_s", + "Engineering", + "text_t", + "These guys develop stuff"))); assertU(commit()); } @@ -50,26 +104,37 @@ public void cleanup() throws Exception { @Test public void testStyle() throws Exception { - // this doesn't validate the explain response but checks if explain response is returned in expected format + // this doesn't validate the explain response but checks if explain response is returned in + // expected format // when not style is passed then default style should be used - assertQ(req("q", "*:*", "fl", "id,[explain]"), "//result/doc[1]/str[@name='id'][.='1']", + assertQ( + req("q", "*:*", "fl", "id,[explain]"), + "//result/doc[1]/str[@name='id'][.='1']", "boolean(//result/doc[1]/str[@name='[explain]'])"); // doc transformer defined in solrconfig without style - assertQ(req("q", "*:*", "fl", "id,[explain1]"), "//result/doc[1]/str[@name='id'][.='1']", + assertQ( + req("q", "*:*", "fl", "id,[explain1]"), + "//result/doc[1]/str[@name='id'][.='1']", "boolean(//result/doc[1]/str[@name='[explain1]'])"); // doc transformer defined in solrconfig with style=nl - assertQ(req("q", "*:*", "fl", "id,[explainNL]"), "//result/doc[1]/str[@name='id'][.='1']", + assertQ( + req("q", "*:*", "fl", "id,[explainNL]"), + "//result/doc[1]/str[@name='id'][.='1']", "boolean(//result/doc[1]/lst[@name='[explainNL]'])"); // doc transformer defined in solrconfig with style=nl - assertQ(req("q", "*:*", "fl", "id,[explainText]"), "//result/doc[1]/str[@name='id'][.='1']", + assertQ( + req("q", "*:*", "fl", "id,[explainText]"), + "//result/doc[1]/str[@name='id'][.='1']", "boolean(//result/doc[1]/str[@name='[explainText]'])"); // passing style as parameter at runtime - assertQ(req("q", "*:*", "fl", "id,[explainText style=nl]"), "//result/doc[1]/str[@name='id'][.='1']", + assertQ( + req("q", "*:*", "fl", "id,[explainText style=nl]"), + "//result/doc[1]/str[@name='id'][.='1']", "boolean(//result/doc[1]/lst[@name='[explainText]'])"); } } diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformer.java b/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformer.java index a9e5fa923de..5d9af928c45 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformer.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformer.java @@ -24,26 +24,7 @@ import java.util.Map; import java.util.Random; import java.util.StringTokenizer; - import org.apache.commons.io.output.ByteArrayOutputStream; - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; @@ -70,499 +51,905 @@ public static void beforeTests() throws Exception { initCore("solrconfig-basic.xml", "schema-docValuesJoin.xml"); peopleMultiplier = atLeast(1); deptMultiplier = atLeast(1); - - int id=0; - for (int p=0; p < peopleMultiplier; p++){ - assertU(add(doc("id", ""+id++,"name_s", "john", "title_s", "Director", - "dept_ss_dv","Engineering", - "dept_i", "0", - "dept_is", "0"))); - assertU(add(doc("id", ""+id++,"name_s", "mark", "title_s", "VP", - "dept_ss_dv","Marketing", - "dept_i", "1", - "dept_is", "1"))); - assertU(add(doc("id", ""+id++,"name_s", "nancy", "title_s", "MTS", - "dept_ss_dv","Sales", - "dept_i", "2", - "dept_is", "2"))); - assertU(add(doc("id", ""+id++,"name_s", "dave", "title_s", "MTS", - "dept_ss_dv","Support", "dept_ss_dv","Engineering", - "dept_i", "3", - "dept_is", "3", "dept_is", "0"))); - assertU(add(doc("id", ""+id++,"name_s", "tina", "title_s", "VP", - "dept_ss_dv","Engineering", - "dept_i", "0", - "dept_is", "0"))); - + + int id = 0; + for (int p = 0; p < peopleMultiplier; p++) { + assertU( + add( + doc( + "id", + "" + id++, + "name_s", + "john", + "title_s", + "Director", + "dept_ss_dv", + "Engineering", + "dept_i", + "0", + "dept_is", + "0"))); + assertU( + add( + doc( + "id", + "" + id++, + "name_s", + "mark", + "title_s", + "VP", + "dept_ss_dv", + "Marketing", + "dept_i", + "1", + "dept_is", + "1"))); + assertU( + add( + doc( + "id", + "" + id++, + "name_s", + "nancy", + "title_s", + "MTS", + "dept_ss_dv", + "Sales", + "dept_i", + "2", + "dept_is", + "2"))); + assertU( + add( + doc( + "id", + "" + id++, + "name_s", + "dave", + "title_s", + "MTS", + "dept_ss_dv", + "Support", + "dept_ss_dv", + "Engineering", + "dept_i", + "3", + "dept_is", + "3", + "dept_is", + "0"))); + assertU( + add( + doc( + "id", + "" + id++, + "name_s", + "tina", + "title_s", + "VP", + "dept_ss_dv", + "Engineering", + "dept_i", + "0", + "dept_is", + "0"))); + if (rarely()) { assertU(commit("softCommit", "true")); } } - - for (int d=0; d < deptMultiplier; d++){ - assertU(add(doc("id",""+id, "id_i",""+id++, - "dept_id_s", "Engineering", "text_t","These guys develop stuff", "salary_i_dv", "1000", - "dept_id_i", "0"))); - assertU(add(doc("id",""+id++,"id_i",""+id++, - "dept_id_s", "Marketing", "text_t","These guys make you look good","salary_i_dv", "1500", - "dept_id_i", "1"))); - assertU(add(doc("id",""+id, "id_i",""+id++, - "dept_id_s", "Sales", "text_t","These guys sell stuff","salary_i_dv", "1600", - "dept_id_i", "2"))); - assertU(add(doc("id",""+id,"id_i",""+id++, - "dept_id_s", "Support", "text_t","These guys help customers","salary_i_dv", "800", - "dept_id_i", "3"))); - + + for (int d = 0; d < deptMultiplier; d++) { + assertU( + add( + doc( + "id", + "" + id, + "id_i", + "" + id++, + "dept_id_s", + "Engineering", + "text_t", + "These guys develop stuff", + "salary_i_dv", + "1000", + "dept_id_i", + "0"))); + assertU( + add( + doc( + "id", + "" + id++, + "id_i", + "" + id++, + "dept_id_s", + "Marketing", + "text_t", + "These guys make you look good", + "salary_i_dv", + "1500", + "dept_id_i", + "1"))); + assertU( + add( + doc( + "id", + "" + id, + "id_i", + "" + id++, + "dept_id_s", + "Sales", + "text_t", + "These guys sell stuff", + "salary_i_dv", + "1600", + "dept_id_i", + "2"))); + assertU( + add( + doc( + "id", + "" + id, + "id_i", + "" + id++, + "dept_id_s", + "Support", + "text_t", + "These guys help customers", + "salary_i_dv", + "800", + "dept_id_i", + "3"))); + if (rarely()) { assertU(commit("softCommit", "true")); } } assertU(commit()); - } - @Test public void testJohnOrNancySingleField() throws Exception { - //System.out.println("p "+peopleMultiplier+" d "+deptMultiplier); - assertQ("subq1.fl is limited to single field", - req("q","name_s:(john nancy)", "indent","true", - "fl","dept_ss_dv,name_s_dv,depts:[subquery]", - "rows","" + (2 * peopleMultiplier), - "depts.q","{!term f=dept_id_s v=$row.dept_ss_dv}", - "depts.fl","text_t", - "depts.indent","true", - "depts.rows",""+deptMultiplier), - "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts'][@numFound='" + - deptMultiplier+ "']/doc/str[@name='text_t'][.='These guys develop stuff'])="+ - (peopleMultiplier * deptMultiplier), - "count(//result/doc/str[@name='name_s_dv'][.='nancy']/../result[@name='depts'][@numFound='" + - deptMultiplier+ "']/doc/str[@name='text_t'][.='These guys sell stuff'])="+ - (peopleMultiplier * deptMultiplier), + // System.out.println("p "+peopleMultiplier+" d "+deptMultiplier); + assertQ( + "subq1.fl is limited to single field", + req( + "q", + "name_s:(john nancy)", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,depts:[subquery]", + "rows", + "" + (2 * peopleMultiplier), + "depts.q", + "{!term f=dept_id_s v=$row.dept_ss_dv}", + "depts.fl", + "text_t", + "depts.indent", + "true", + "depts.rows", + "" + deptMultiplier), + "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts'][@numFound='" + + deptMultiplier + + "']/doc/str[@name='text_t'][.='These guys develop stuff'])=" + + (peopleMultiplier * deptMultiplier), + "count(//result/doc/str[@name='name_s_dv'][.='nancy']/../result[@name='depts'][@numFound='" + + deptMultiplier + + "']/doc/str[@name='text_t'][.='These guys sell stuff'])=" + + (peopleMultiplier * deptMultiplier), "count((//result/doc/str[@name='name_s_dv'][.='john']/..)[1]/result[@name='depts']/doc[1]/*)=1", - "count((//result/doc/str[@name='name_s_dv'][.='john']/..)[1]/result[@name='depts']/doc["+ deptMultiplier+ "]/*)=1", - "count((//result/doc/str[@name='name_s_dv'][.='john']/..)["+ peopleMultiplier +"]/result[@name='depts'][@numFound='" + - deptMultiplier+ "']/doc[1]/*)=1", - "count((//result/doc/str[@name='name_s_dv'][.='john']/..)["+ peopleMultiplier +"]/result[@name='depts'][@numFound='" + - deptMultiplier+ "']/doc["+ deptMultiplier+ "]/*)=1" - - ); - } - - final String[] johnAndNancyParams = new String[]{"q","name_s:(john nancy)", "indent","true", - "fl","dept_ss_dv,name_s_dv,depts:[subquery]", - "fl","dept_i_dv,depts_i:[subquery]", - "rows","" + (2 * peopleMultiplier), - "depts.q","{!term f=dept_id_s v=$row.dept_ss_dv}", - "depts.fl","text_t", - "depts.indent","true", - "depts.rows",""+deptMultiplier, - - "depts_i.q","{!term f=dept_id_i v=$row.dept_i_dv}", - "depts_i.fl","text_t", // multi val subquery param check - "depts_i.fl","dept_id_s_dv", - "depts_i.indent","true", - "depts_i.rows",""+deptMultiplier}; - + "count((//result/doc/str[@name='name_s_dv'][.='john']/..)[1]/result[@name='depts']/doc[" + + deptMultiplier + + "]/*)=1", + "count((//result/doc/str[@name='name_s_dv'][.='john']/..)[" + + peopleMultiplier + + "]/result[@name='depts'][@numFound='" + + deptMultiplier + + "']/doc[1]/*)=1", + "count((//result/doc/str[@name='name_s_dv'][.='john']/..)[" + + peopleMultiplier + + "]/result[@name='depts'][@numFound='" + + deptMultiplier + + "']/doc[" + + deptMultiplier + + "]/*)=1"); + } + + final String[] johnAndNancyParams = + new String[] { + "q", + "name_s:(john nancy)", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,depts:[subquery]", + "fl", + "dept_i_dv,depts_i:[subquery]", + "rows", + "" + (2 * peopleMultiplier), + "depts.q", + "{!term f=dept_id_s v=$row.dept_ss_dv}", + "depts.fl", + "text_t", + "depts.indent", + "true", + "depts.rows", + "" + deptMultiplier, + "depts_i.q", + "{!term f=dept_id_i v=$row.dept_i_dv}", + "depts_i.fl", + "text_t", // multi val subquery param check + "depts_i.fl", + "dept_id_s_dv", + "depts_i.indent", + "true", + "depts_i.rows", + "" + deptMultiplier + }; + @Test public void testTwoSubQueriesAndByNumberWithTwoFields() throws Exception { final SolrQueryRequest johnOrNancyTwoFL = req(johnAndNancyParams); - - assertQ("call subquery twice a row, once by number, with two fls via multival params", + + assertQ( + "call subquery twice a row, once by number, with two fls via multival params", johnOrNancyTwoFL, - "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc/str[@name='text_t'][.='These guys develop stuff'])="+ - (peopleMultiplier * deptMultiplier), - "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts_i']/doc/str[@name='dept_id_s_dv'][.='Engineering'])="+ - (peopleMultiplier * deptMultiplier), - "count(//result/doc/str[@name='name_s_dv'][.='nancy']/../result[@name='depts_i']/doc/str[@name='text_t'][.='These guys sell stuff'])="+ - (peopleMultiplier * deptMultiplier), - "count(//result/doc/str[@name='name_s_dv'][.='nancy']/../result[@name='depts_i']/doc/str[@name='dept_id_s_dv'][.='Sales'])="+ - (peopleMultiplier * deptMultiplier), - "count((//result/doc/str[@name='name_s_dv'][.='john']/..)["+ peopleMultiplier +"]/result[@name='depts_i']/doc["+ deptMultiplier+ "]/str[@name='dept_id_s_dv'][.='Engineering'])=1", - "count((//result/doc/str[@name='name_s_dv'][.='john']/..)["+ peopleMultiplier +"]/result[@name='depts_i']/doc["+ deptMultiplier+ "]/str[@name='text_t'][.='These guys develop stuff'])=1" - ); + "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc/str[@name='text_t'][.='These guys develop stuff'])=" + + (peopleMultiplier * deptMultiplier), + "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts_i']/doc/str[@name='dept_id_s_dv'][.='Engineering'])=" + + (peopleMultiplier * deptMultiplier), + "count(//result/doc/str[@name='name_s_dv'][.='nancy']/../result[@name='depts_i']/doc/str[@name='text_t'][.='These guys sell stuff'])=" + + (peopleMultiplier * deptMultiplier), + "count(//result/doc/str[@name='name_s_dv'][.='nancy']/../result[@name='depts_i']/doc/str[@name='dept_id_s_dv'][.='Sales'])=" + + (peopleMultiplier * deptMultiplier), + "count((//result/doc/str[@name='name_s_dv'][.='john']/..)[" + + peopleMultiplier + + "]/result[@name='depts_i']/doc[" + + deptMultiplier + + "]/str[@name='dept_id_s_dv'][.='Engineering'])=1", + "count((//result/doc/str[@name='name_s_dv'][.='john']/..)[" + + peopleMultiplier + + "]/result[@name='depts_i']/doc[" + + deptMultiplier + + "]/str[@name='text_t'][.='These guys develop stuff'])=1"); } - + @Test public void testRowsStartForSubqueryAndScores() throws Exception { - - String johnDeptsIds = h.query(req(new String[]{"q","{!join from=dept_ss_dv to=dept_id_s}name_s:john", - "wt","csv", - "csv.header","false", - "fl","id", - "rows",""+deptMultiplier, - "sort", "id_i desc" - })); - - ArrayList deptIds = Collections.list( - new StringTokenizer( johnDeptsIds)); - - final int a = random().nextInt(deptMultiplier+1); - final int b = random().nextInt(deptMultiplier+1); - final int start = Math.min(a, b) ; - final int toIndex = Math.max(a, b) ; - List expectIds = deptIds.subList(start , toIndex); + + String johnDeptsIds = + h.query( + req( + new String[] { + "q", + "{!join from=dept_ss_dv to=dept_id_s}name_s:john", + "wt", + "csv", + "csv.header", + "false", + "fl", + "id", + "rows", + "" + deptMultiplier, + "sort", + "id_i desc" + })); + + ArrayList deptIds = Collections.list(new StringTokenizer(johnDeptsIds)); + + final int a = random().nextInt(deptMultiplier + 1); + final int b = random().nextInt(deptMultiplier + 1); + final int start = Math.min(a, b); + final int toIndex = Math.max(a, b); + List expectIds = deptIds.subList(start, toIndex); ArrayList assertions = new ArrayList<>(); // count((//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts'])[1]/doc/str[@name='id']) - // random().nextInt(peopleMultiplier); - assertions.add("count((//result/doc/str[@name='name_s_dv'][.='john']/.." - + "/result[@name='depts'][@numFound='"+deptMultiplier+"'][@start='"+start+"'])["+ - (random().nextInt(peopleMultiplier)+1) - +"]/doc/str[@name='id'])=" +(toIndex-start)); - - // System.out.println(expectIds); - - for (int i=0; i< expectIds.size(); i++) { + // random().nextInt(peopleMultiplier); + assertions.add( + "count((//result/doc/str[@name='name_s_dv'][.='john']/.." + + "/result[@name='depts'][@numFound='" + + deptMultiplier + + "'][@start='" + + start + + "'])[" + + (random().nextInt(peopleMultiplier) + 1) + + "]/doc/str[@name='id'])=" + + (toIndex - start)); + + // System.out.println(expectIds); + + for (int i = 0; i < expectIds.size(); i++) { // (//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts'])[1]/doc[1]/str[@name='id']='15' - String ithDoc = "(//result/doc/str[@name='name_s_dv'][.='john']/.." - + "/result[@name='depts'][@numFound='"+deptMultiplier+"'][@start='"+start+"'])["+ - (random().nextInt(peopleMultiplier)+1) + - "]/doc[" +(i+1)+ "]"; - assertions.add(ithDoc+"/str[@name='id'][.='"+expectIds.get(i)+"']"); + String ithDoc = + "(//result/doc/str[@name='name_s_dv'][.='john']/.." + + "/result[@name='depts'][@numFound='" + + deptMultiplier + + "'][@start='" + + start + + "'])[" + + (random().nextInt(peopleMultiplier) + 1) + + "]/doc[" + + (i + 1) + + "]"; + assertions.add(ithDoc + "/str[@name='id'][.='" + expectIds.get(i) + "']"); // let's test scores right there - assertions.add(ithDoc+"/float[@name='score'][.='"+expectIds.get(i)+".0']"); - + assertions.add(ithDoc + "/float[@name='score'][.='" + expectIds.get(i) + ".0']"); } - - String[] john = new String[]{"q","name_s:john", "indent","true", - "fl","dept_ss_dv,name_s_dv,depts:[subquery]", - "rows","" + (2 * peopleMultiplier), - "depts.q","+{!term f=dept_id_s v=$row.dept_ss_dv}^=0 _val_:id_i", - "depts.fl","id", - "depts.fl","score", - "depts.indent","true", - "depts.rows",""+(toIndex-start), - "depts.start",""+start}; - - assertQ(req(john), assertions.toArray(new String[]{})); + + String[] john = + new String[] { + "q", + "name_s:john", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,depts:[subquery]", + "rows", + "" + (2 * peopleMultiplier), + "depts.q", + "+{!term f=dept_id_s v=$row.dept_ss_dv}^=0 _val_:id_i", + "depts.fl", + "id", + "depts.fl", + "score", + "depts.indent", + "true", + "depts.rows", + "" + (toIndex - start), + "depts.start", + "" + start + }; + + assertQ(req(john), assertions.toArray(new String[] {})); } - + @Test public void testThreeLevel() throws Exception { - List asserts = new ArrayList<>(); + List asserts = new ArrayList<>(); // dave works in both dept, get his coworkers from both - for (String dept : new String[] {"Engineering", "Support"}) { //dept_id_s_dv">Engineering - - ArrayList deptWorkers = Collections.list( - new StringTokenizer( h.query(req( - "q","dept_ss_dv:"+dept ,//dept_id_i_dv - "wt","csv", - "csv.header","false", - "fl","name_s_dv", - "rows",""+peopleMultiplier*3, // dave has three coworkers in two depts - "sort", "id desc" - )))); - // System.out.println(deptWorkers); - + for (String dept : new String[] {"Engineering", "Support"}) { // dept_id_s_dv">Engineering + + ArrayList deptWorkers = + Collections.list( + new StringTokenizer( + h.query( + req( + "q", + "dept_ss_dv:" + dept, // dept_id_i_dv + "wt", + "csv", + "csv.header", + "false", + "fl", + "name_s_dv", + "rows", + "" + peopleMultiplier * 3, // dave has three coworkers in two depts + "sort", + "id desc")))); + // System.out.println(deptWorkers); + // looping dave clones - for (int p : new int []{1, peopleMultiplier}) { + for (int p : new int[] {1, peopleMultiplier}) { // looping dept clones - for (int d : new int []{1, deptMultiplier}) { + for (int d : new int[] {1, deptMultiplier}) { // looping coworkers int wPos = 1; for (Object mate : deptWorkers) { // (/response/result/doc/str[@name='name_s_dv'][.='dave']/..)[1] // /result[@name='subq1']/doc/str[@name='dept_id_s_dv'][.='Engineering']/.. // /result[@name='neighbours']/doc/str[@name='name_s_dv'][.='tina'] - asserts.add("((/response/result/doc/str[@name='name_s_dv'][.='dave']/..)["+p+"]"+ - "/result[@name='subq1']/doc/str[@name='dept_id_s_dv'][.='"+dept+"']/..)["+ d +"]"+ - "/result[@name='neighbours']/doc[" + wPos + "]/str[@name='name_s_dv'][.='"+ mate+"']"); - wPos ++; + asserts.add( + "((/response/result/doc/str[@name='name_s_dv'][.='dave']/..)[" + + p + + "]" + + "/result[@name='subq1']/doc/str[@name='dept_id_s_dv'][.='" + + dept + + "']/..)[" + + d + + "]" + + "/result[@name='neighbours']/doc[" + + wPos + + "]/str[@name='name_s_dv'][.='" + + mate + + "']"); + wPos++; } - } } } - //System.out.println(asserts); - assertQ("dave works at both dept with other folks", - // System.out.println(h.query( - req(new String[]{"q","name_s:dave", "indent","true", - "fl","dept_ss_dv,name_s_dv,subq1:[subquery]", - "rows","" + peopleMultiplier, - "subq1.q","{!terms f=dept_id_s v=$row.dept_ss_dv}", - "subq1.fl","dept_id_i_dv,text_t,dept_id_s_dv,neighbours:[subquery]", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2), - "subq1.neighbours.q",//flipping via numbers - random().nextBoolean() ? - "{!terms f=dept_ss_dv v=$row.dept_id_s_dv}" - : "{!terms f=dept_is v=$row.dept_id_i_dv}", - "subq1.neighbours.fl", "name_s_dv" , - "subq1.neighbours.rows", ""+peopleMultiplier*3}, - "subq1.neighbours.sort", "id desc")//, - ,asserts.toArray(new String[]{}) - // ) - ); - + // System.out.println(asserts); + assertQ( + "dave works at both dept with other folks", + // System.out.println(h.query( + req( + new String[] { + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,subq1:[subquery]", + "rows", + "" + peopleMultiplier, + "subq1.q", + "{!terms f=dept_id_s v=$row.dept_ss_dv}", + "subq1.fl", + "dept_id_i_dv,text_t,dept_id_s_dv,neighbours:[subquery]", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2), + "subq1.neighbours.q", // flipping via numbers + random().nextBoolean() + ? "{!terms f=dept_ss_dv v=$row.dept_id_s_dv}" + : "{!terms f=dept_is v=$row.dept_id_i_dv}", + "subq1.neighbours.fl", + "name_s_dv", + "subq1.neighbours.rows", + "" + peopleMultiplier * 3 + }, + "subq1.neighbours.sort", + "id desc") // , + , + asserts.toArray(new String[] {}) + // ) + ); } - + @Test public void testNoExplicitName() throws Exception { - String[] john = new String[]{"q","name_s:john", "indent","true", - "fl","name_s_dv," - + "[subquery]", - "rows","" + (2 * peopleMultiplier), - "depts.q","+{!term f=dept_id_s v=$row.dept_ss_dv}^=0 _val_:id_i", - "depts.fl","id", - "depts.fl","score", - "depts.indent","true", - "depts.rows",""+deptMultiplier, - "depts.start","0"}; - + String[] john = + new String[] { + "q", + "name_s:john", + "indent", + "true", + "fl", + "name_s_dv," + "[subquery]", + "rows", + "" + (2 * peopleMultiplier), + "depts.q", + "+{!term f=dept_id_s v=$row.dept_ss_dv}^=0 _val_:id_i", + "depts.fl", + "id", + "depts.fl", + "score", + "depts.indent", + "true", + "depts.rows", + "" + deptMultiplier, + "depts.start", + "0" + }; + assertQEx("no prefix, no subquery", req(john), ErrorCode.BAD_REQUEST); - - - assertQEx("no prefix, no subsubquery", - req("q","name_s:john", "indent","true", - "fl","name_s_dv," - + "depts:[subquery]", - "rows","" + (2 * peopleMultiplier), - "depts.q","+{!term f=dept_id_s v=$row.dept_ss_dv}^=0 _val_:id_i", - "depts.fl","id", - "depts.fl","score", - "depts.fl","[subquery]",// <- here is a trouble - "depts.indent","true", - "depts.rows",""+deptMultiplier, - "depts.start","0"), ErrorCode.BAD_REQUEST); + + assertQEx( + "no prefix, no subsubquery", + req( + "q", + "name_s:john", + "indent", + "true", + "fl", + "name_s_dv," + "depts:[subquery]", + "rows", + "" + (2 * peopleMultiplier), + "depts.q", + "+{!term f=dept_id_s v=$row.dept_ss_dv}^=0 _val_:id_i", + "depts.fl", + "id", + "depts.fl", + "score", + "depts.fl", + "[subquery]", // <- here is a trouble + "depts.indent", + "true", + "depts.rows", + "" + deptMultiplier, + "depts.start", + "0"), + ErrorCode.BAD_REQUEST); } - + @Test public void testDupePrefix() throws Exception { - assertQEx("subquery name clash", req(new String[]{"q","name_s:(john nancy)", "indent","true", - "fl","name_s_dv,depts:[subquery]", - "fl","depts:[subquery]", - "rows","" + (2 * peopleMultiplier), - "depts.q","{!term f=dept_id_s v=$row.dept_ss_dv}", - "depts.fl","text_t", - "depts.indent","true", - "depts.rows",""+deptMultiplier, - - "depts_i.q","{!term f=dept_id_i v=$depts_i.row.dept_i_dv}", - "depts_i.fl","text_t", // multi val subquery param check - "depts_i.fl","dept_id_s_dv", - "depts_i.indent","true", - "depts_i.rows",""+deptMultiplier} - ), ErrorCode.BAD_REQUEST); + assertQEx( + "subquery name clash", + req( + new String[] { + "q", + "name_s:(john nancy)", + "indent", + "true", + "fl", + "name_s_dv,depts:[subquery]", + "fl", + "depts:[subquery]", + "rows", + "" + (2 * peopleMultiplier), + "depts.q", + "{!term f=dept_id_s v=$row.dept_ss_dv}", + "depts.fl", + "text_t", + "depts.indent", + "true", + "depts.rows", + "" + deptMultiplier, + "depts_i.q", + "{!term f=dept_id_i v=$depts_i.row.dept_i_dv}", + "depts_i.fl", + "text_t", // multi val subquery param check + "depts_i.fl", + "dept_id_s_dv", + "depts_i.indent", + "true", + "depts_i.rows", + "" + deptMultiplier + }), + ErrorCode.BAD_REQUEST); } - + @Test public void testJustJohnJson() throws Exception { - + final SolrQueryRequest johnTwoFL = req(johnAndNancyParams); ModifiableSolrParams params = new ModifiableSolrParams(johnTwoFL.getParams()); - params.set("q","name_s:john"); + params.set("q", "name_s:john"); johnTwoFL.setParams(params); - assertJQ(johnTwoFL, + assertJQ( + johnTwoFL, "/response/docs/[0]/depts/docs/[0]=={text_t:\"These guys develop stuff\"}", - "/response/docs/[" + (peopleMultiplier-1) + "]/depts/docs/[" + (deptMultiplier-1) + "]=={text_t:\"These guys develop stuff\"}", - - "/response/docs/[0]/depts_i/docs/[0]=={dept_id_s_dv:\"Engineering\", text_t:\"These guys develop stuff\"}",// seem like key order doesn't matter , well - "/response/docs/[" + (peopleMultiplier-1) + "]/depts_i/docs/[" + (deptMultiplier-1) + "]==" + "/response/docs/[" + + (peopleMultiplier - 1) + + "]/depts/docs/[" + + (deptMultiplier - 1) + + "]=={text_t:\"These guys develop stuff\"}", + "/response/docs/[0]/depts_i/docs/[0]=={dept_id_s_dv:\"Engineering\", text_t:\"These guys develop stuff\"}", // seem like key order doesn't matter , well + "/response/docs/[" + + (peopleMultiplier - 1) + + "]/depts_i/docs/[" + + (deptMultiplier - 1) + + "]==" + "{text_t:\"These guys develop stuff\", dept_id_s_dv:\"Engineering\"}"); } - + @SuppressWarnings("unchecked") @Test public void testJustJohnJavabin() throws Exception { final SolrQueryRequest johnTwoFL = req(johnAndNancyParams); ModifiableSolrParams params = new ModifiableSolrParams(johnTwoFL.getParams()); - params.set("q","name_s:john"); - params.set("wt","javabin"); - + params.set("q", "name_s:john"); + params.set("wt", "javabin"); + johnTwoFL.setParams(params); - + final NamedList unmarshalled; SolrCore core = johnTwoFL.getCore(); SolrQueryResponse rsp = new SolrQueryResponse(); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(johnTwoFL, rsp)); - SolrQueryResponse response = h.queryAndResponse( - johnTwoFL.getParams().get(CommonParams.QT), johnTwoFL); + SolrQueryResponse response = + h.queryAndResponse(johnTwoFL.getParams().get(CommonParams.QT), johnTwoFL); - BinaryQueryResponseWriter responseWriter = (BinaryQueryResponseWriter) core.getQueryResponseWriter(johnTwoFL); + BinaryQueryResponseWriter responseWriter = + (BinaryQueryResponseWriter) core.getQueryResponseWriter(johnTwoFL); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); responseWriter.write(bytes, johnTwoFL, response); try (JavaBinCodec jbc = new JavaBinCodec()) { - unmarshalled = (NamedList) jbc.unmarshal( - new ByteArrayInputStream(bytes.toByteArray())); + unmarshalled = + (NamedList) jbc.unmarshal(new ByteArrayInputStream(bytes.toByteArray())); } johnTwoFL.close(); SolrRequestInfo.clearRequestInfo(); - - SolrDocumentList resultDocs = (SolrDocumentList)(unmarshalled.get("response")); - - Map engText = new HashMap<>(); - engText.put("text_t", "These guys develop stuff"); - - Map engId = new HashMap<>(); - engId.put("text_t", "These guys develop stuff"); - engId.put("dept_id_s_dv", "Engineering"); - - for (int docNum : new int []{0, peopleMultiplier-1}) { - SolrDocument employeeDoc = resultDocs.get(docNum); - assertEquals("john", employeeDoc.getFieldValue("name_s_dv")); - for (String subResult : new String []{"depts", "depts_i"}) { - - SolrDocumentList subDoc = (SolrDocumentList)employeeDoc.getFieldValue(subResult); - for (int deptNum : new int []{0, deptMultiplier-1}) { - SolrDocument deptDoc = subDoc.get(deptNum); - Object expectedDept = (subResult.equals("depts") ? engText : engId); - assertTrue( "" + expectedDept + " equals to " + deptDoc, - expectedDept.equals(deptDoc)); - } + + SolrDocumentList resultDocs = (SolrDocumentList) (unmarshalled.get("response")); + + Map engText = new HashMap<>(); + engText.put("text_t", "These guys develop stuff"); + + Map engId = new HashMap<>(); + engId.put("text_t", "These guys develop stuff"); + engId.put("dept_id_s_dv", "Engineering"); + + for (int docNum : new int[] {0, peopleMultiplier - 1}) { + SolrDocument employeeDoc = resultDocs.get(docNum); + assertEquals("john", employeeDoc.getFieldValue("name_s_dv")); + for (String subResult : new String[] {"depts", "depts_i"}) { + + SolrDocumentList subDoc = (SolrDocumentList) employeeDoc.getFieldValue(subResult); + for (int deptNum : new int[] {0, deptMultiplier - 1}) { + SolrDocument deptDoc = subDoc.get(deptNum); + Object expectedDept = (subResult.equals("depts") ? engText : engId); + assertTrue("" + expectedDept + " equals to " + deptDoc, expectedDept.equals(deptDoc)); + } } } } - + @Test public void testExceptionPropagation() throws Exception { - final SolrQueryRequest r = req("q","name_s:dave", "indent","true", - "fl","depts:[subquery]", - "rows","" + ( peopleMultiplier), - "depts.q","{!lucene}(", - "depts.fl","text_t", - "depts.indent","true", - "depts.rows",""+(deptMultiplier*2), - "depts.logParamsList","q,fl,rows,subq1.row.dept_ss_dv"); - - // System.out.println(h.query(r)); - - assertQEx("wrong subquery", - r, + final SolrQueryRequest r = + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "depts:[subquery]", + "rows", + "" + (peopleMultiplier), + "depts.q", + "{!lucene}(", + "depts.fl", + "text_t", + "depts.indent", + "true", + "depts.rows", + "" + (deptMultiplier * 2), + "depts.logParamsList", + "q,fl,rows,subq1.row.dept_ss_dv"); + + // System.out.println(h.query(r)); + + assertQEx("wrong subquery", r, ErrorCode.BAD_REQUEST); + + assertQEx( + "", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "depts:[subquery]", + "rows", + "1", + "depts.q", + "{!lucene}", + "depts.fl", + "text_t", + "depts.indent", + "true", + "depts.rows", + "NAN", + "depts.logParamsList", + "q,fl,rows,subq1.row.dept_ss_dv"), ErrorCode.BAD_REQUEST); - - assertQEx( "", req("q","name_s:dave", "indent","true", - "fl","depts:[subquery]", - "rows","1", - "depts.q","{!lucene}", - "depts.fl","text_t", - "depts.indent","true", - "depts.rows","NAN", - "depts.logParamsList","q,fl,rows,subq1.row.dept_ss_dv"), - ErrorCode.BAD_REQUEST); } - + @Test public void testMultiValue() throws Exception { - - String [] happyPathAsserts = new String[]{ - "count(//result/doc/str[@name='name_s_dv'][.='dave']/../result[@name='subq1']/doc/str[@name='text_t'][.='These guys develop stuff'])="+ - (peopleMultiplier * deptMultiplier), - "count(//result/doc/str[@name='name_s_dv'][.='dave']/../result[@name='subq1']/doc/str[@name='text_t'][.='These guys help customers'])="+ - (peopleMultiplier * deptMultiplier), - "//result[@numFound="+peopleMultiplier+"]"}; + + String[] happyPathAsserts = + new String[] { + "count(//result/doc/str[@name='name_s_dv'][.='dave']/../result[@name='subq1']/doc/str[@name='text_t'][.='These guys develop stuff'])=" + + (peopleMultiplier * deptMultiplier), + "count(//result/doc/str[@name='name_s_dv'][.='dave']/../result[@name='subq1']/doc/str[@name='text_t'][.='These guys help customers'])=" + + (peopleMultiplier * deptMultiplier), + "//result[@numFound=" + peopleMultiplier + "]" + }; Random random1 = random(); - - assertQ("dave works at both, whether we set a default separator or both", - req(new String[]{"q","name_s:dave", "indent","true", - "fl", (random().nextBoolean() ? "name_s_dv,dept_ss_dv" : "*") + - ",subq1:[subquery " +((random1.nextBoolean() ? "" : "separator=,"))+"]", - "rows","" + peopleMultiplier, - "subq1.q","{!terms f=dept_id_s v=$row.dept_ss_dv "+((random1.nextBoolean() ? "" : "separator=,"))+"}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2), - "subq1.logParamsList","q,fl,rows,row.dept_ss_dv"}), - happyPathAsserts - ); - - assertQ("even via numbers", - req("q","name_s:dave", "indent","true", - "fl","dept_is_dv,name_s_dv,subq1:[subquery]", - "rows","" + ( peopleMultiplier), - "subq1.q","{!terms f=dept_id_i v=$row.dept_is_dv}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2)), - happyPathAsserts - ); - - - assertQ("even if we set a separator both", - req("q","name_s:dave", "indent","true", - "fl","dept_ss_dv,name_s_dv,name_s_dv,subq1:[subquery separator=\" \"]", - "rows","" + ( peopleMultiplier), - "subq1.q","{!terms f=dept_id_s v=$row.dept_ss_dv separator=\" \"}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2)), - happyPathAsserts - ); - - String [] noMatchAtSubQ = new String[] { - "count(//result/doc/str[@name='name_s_dv'][.='dave']/../result[@name='subq1'][@numFound=0])="+ - (peopleMultiplier), - "//result[@numFound="+peopleMultiplier+"]" }; - - assertQ("different separators, no match", - req("q","name_s:dave", "indent","true", - "fl","dept_ss_dv,name_s_dv,subq1:[subquery]", - "rows","" + ( peopleMultiplier), - "subq1.q","{!terms f=dept_id_s v=$row.dept_ss_dv separator=\" \"}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2)), - noMatchAtSubQ - ); - - assertQ("and no matter where", - req("q","name_s:dave", "indent","true", - "fl","dept_ss_dv,name_s_dv,subq1:[subquery separator=\" \"]", - "rows","" + ( peopleMultiplier), - "subq1.q","{!terms f=dept_id_s v=$row.dept_ss_dv}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2)), - noMatchAtSubQ - ); - - assertQ("setting a wrong parser gets you nowhere", - req("q","name_s:dave", "indent","true", - "fl","dept_ss_dv,name_s_dv,subq1:[subquery]", - "rows","" + ( peopleMultiplier), - "subq1.q","{!term f=dept_id_s v=$row.dept_ss_dv}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2)), - noMatchAtSubQ - ); - - assertQ("but it luckily works with default query parser, but it's not really reliable", - req("q","name_s:dave", "indent","true", - "fl","dept_ss_dv,name_s_dv,subq1:[subquery separator=\" \"]", - "rows","" + ( peopleMultiplier), - "subq1.q","{!lucene df=dept_id_s v=$row.dept_ss_dv}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2)), - happyPathAsserts - ); - - assertQ("even lucene qp can't help at any separator but space", - req("q","name_s:dave", "indent","true", - "fl","dept_ss_dv,name_s_dv," - + "subq1:[subquery "+(random().nextBoolean() ? "" : "separator=" +((random().nextBoolean() ? "" : ",")))+"]", - "rows","" + ( peopleMultiplier), - "subq1.q","{!lucene df=dept_id_s v=$row.dept_ss_dv}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMultiplier*2)), - noMatchAtSubQ - ); + + assertQ( + "dave works at both, whether we set a default separator or both", + req( + new String[] { + "q", + "name_s:dave", + "indent", + "true", + "fl", + (random().nextBoolean() ? "name_s_dv,dept_ss_dv" : "*") + + ",subq1:[subquery " + + ((random1.nextBoolean() ? "" : "separator=,")) + + "]", + "rows", + "" + peopleMultiplier, + "subq1.q", + "{!terms f=dept_id_s v=$row.dept_ss_dv " + + ((random1.nextBoolean() ? "" : "separator=,")) + + "}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2), + "subq1.logParamsList", + "q,fl,rows,row.dept_ss_dv" + }), + happyPathAsserts); + + assertQ( + "even via numbers", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_is_dv,name_s_dv,subq1:[subquery]", + "rows", + "" + (peopleMultiplier), + "subq1.q", + "{!terms f=dept_id_i v=$row.dept_is_dv}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2)), + happyPathAsserts); + + assertQ( + "even if we set a separator both", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,name_s_dv,subq1:[subquery separator=\" \"]", + "rows", + "" + (peopleMultiplier), + "subq1.q", + "{!terms f=dept_id_s v=$row.dept_ss_dv separator=\" \"}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2)), + happyPathAsserts); + + String[] noMatchAtSubQ = + new String[] { + "count(//result/doc/str[@name='name_s_dv'][.='dave']/../result[@name='subq1'][@numFound=0])=" + + (peopleMultiplier), + "//result[@numFound=" + peopleMultiplier + "]" + }; + + assertQ( + "different separators, no match", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,subq1:[subquery]", + "rows", + "" + (peopleMultiplier), + "subq1.q", + "{!terms f=dept_id_s v=$row.dept_ss_dv separator=\" \"}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2)), + noMatchAtSubQ); + + assertQ( + "and no matter where", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,subq1:[subquery separator=\" \"]", + "rows", + "" + (peopleMultiplier), + "subq1.q", + "{!terms f=dept_id_s v=$row.dept_ss_dv}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2)), + noMatchAtSubQ); + + assertQ( + "setting a wrong parser gets you nowhere", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,subq1:[subquery]", + "rows", + "" + (peopleMultiplier), + "subq1.q", + "{!term f=dept_id_s v=$row.dept_ss_dv}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2)), + noMatchAtSubQ); + + assertQ( + "but it luckily works with default query parser, but it's not really reliable", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv,subq1:[subquery separator=\" \"]", + "rows", + "" + (peopleMultiplier), + "subq1.q", + "{!lucene df=dept_id_s v=$row.dept_ss_dv}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2)), + happyPathAsserts); + + assertQ( + "even lucene qp can't help at any separator but space", + req( + "q", + "name_s:dave", + "indent", + "true", + "fl", + "dept_ss_dv,name_s_dv," + + "subq1:[subquery " + + (random().nextBoolean() + ? "" + : "separator=" + ((random().nextBoolean() ? "" : ","))) + + "]", + "rows", + "" + (peopleMultiplier), + "subq1.q", + "{!lucene df=dept_id_s v=$row.dept_ss_dv}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMultiplier * 2)), + noMatchAtSubQ); } static String[] daveMultiValueSearchParams(Random random, int peopleMult, int deptMult) { - return new String[]{"q","name_s:dave", "indent","true", - "fl",(random().nextBoolean() ? "name_s_dv" : "*")+ //"dept_ss_dv, - ",subq1:[subquery " - +((random.nextBoolean() ? "" : "separator=,"))+"]", - "rows","" + peopleMult, - "subq1.q","{!terms f=dept_id_s v=$row.dept_ss_dv "+((random.nextBoolean() ? "" : "separator=,"))+"}", - "subq1.fl","text_t", - "subq1.indent","true", - "subq1.rows",""+(deptMult*2), - "subq1.logParamsList","q,fl,rows,row.dept_ss_dv"}; + return new String[] { + "q", + "name_s:dave", + "indent", + "true", + "fl", + (random().nextBoolean() ? "name_s_dv" : "*") + + // "dept_ss_dv, + ",subq1:[subquery " + + ((random.nextBoolean() ? "" : "separator=,")) + + "]", + "rows", + "" + peopleMult, + "subq1.q", + "{!terms f=dept_id_s v=$row.dept_ss_dv " + + ((random.nextBoolean() ? "" : "separator=,")) + + "}", + "subq1.fl", + "text_t", + "subq1.indent", + "true", + "subq1.rows", + "" + (deptMult * 2), + "subq1.logParamsList", + "q,fl,rows,row.dept_ss_dv" + }; } } diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerCrossCore.java b/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerCrossCore.java index 5a0356c6d21..8b350606cce 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerCrossCore.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerCrossCore.java @@ -16,6 +16,7 @@ */ package org.apache.solr.response.transform; +import com.google.common.collect.ImmutableMap; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.core.CoreContainer; @@ -26,8 +27,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.google.common.collect.ImmutableMap; - public class TestSubQueryTransformerCrossCore extends SolrTestCaseJ4 { private static SolrCore fromCore; @@ -35,32 +34,134 @@ public class TestSubQueryTransformerCrossCore extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ - initCore("solrconfig-basic.xml","schema-docValuesJoin.xml"); + initCore("solrconfig-basic.xml", "schema-docValuesJoin.xml"); final CoreContainer coreContainer = h.getCoreContainer(); - fromCore = coreContainer.create("fromCore", //FileSystems.getDefault().getPath( TEST_HOME()), ImmutableMap.of("config","solrconfig-basic.xml","schema","schema-docValuesJoin.xml" - ImmutableMap.of("configSet", "minimal") - ); - assertU(add(doc("id", "1","name_s", "john", "title_s", "Director", "dept_ss_dv","Engineering", - "text_t","These guys develop stuff"))); - assertU(add(doc("id", "2","name_s", "mark", "title_s", "VP", "dept_ss_dv","Marketing", - "text_t","These guys make you look good"))); - assertU(add(doc("id", "3","name_s", "nancy", "title_s", "MTS", "dept_ss_dv","Sales", - "text_t","These guys sell stuff"))); - assertU(add(doc("id", "4","name_s", "dave", "title_s", "MTS", "dept_ss_dv","Support", "dept_ss_dv","Engineering" - , "text_t","These guys help customers"))); - assertU(add(doc("id", "5","name_s", "tina", "title_s", "VP", "dept_ss_dv","Engineering", - "text_t","These guys develop stuff"))); + fromCore = + coreContainer.create( + "fromCore", // FileSystems.getDefault().getPath( TEST_HOME()), + // ImmutableMap.of("config","solrconfig-basic.xml","schema","schema-docValuesJoin.xml" + ImmutableMap.of("configSet", "minimal")); + assertU( + add( + doc( + "id", + "1", + "name_s", + "john", + "title_s", + "Director", + "dept_ss_dv", + "Engineering", + "text_t", + "These guys develop stuff"))); + assertU( + add( + doc( + "id", + "2", + "name_s", + "mark", + "title_s", + "VP", + "dept_ss_dv", + "Marketing", + "text_t", + "These guys make you look good"))); + assertU( + add( + doc( + "id", + "3", + "name_s", + "nancy", + "title_s", + "MTS", + "dept_ss_dv", + "Sales", + "text_t", + "These guys sell stuff"))); + assertU( + add( + doc( + "id", + "4", + "name_s", + "dave", + "title_s", + "MTS", + "dept_ss_dv", + "Support", + "dept_ss_dv", + "Engineering", + "text_t", + "These guys help customers"))); + assertU( + add( + doc( + "id", + "5", + "name_s", + "tina", + "title_s", + "VP", + "dept_ss_dv", + "Engineering", + "text_t", + "These guys develop stuff"))); assertU(commit()); - update(fromCore, add(doc("id","10", "dept_id_s", "Engineering", "text_t","These guys develop stuff", "salary_i_dv", "1000"))); - update(fromCore, add(doc("id","11", "dept_id_s", "Marketing", "text_t","These guys make you look good","salary_i_dv", "1500"))); - update(fromCore, add(doc("id","12", "dept_id_s", "Sales", "text_t","These guys sell stuff","salary_i_dv", "1600"))); - update(fromCore, add(doc("id","13", "dept_id_s", "Support", "text_t","These guys help customers","salary_i_dv", "800"))); + update( + fromCore, + add( + doc( + "id", + "10", + "dept_id_s", + "Engineering", + "text_t", + "These guys develop stuff", + "salary_i_dv", + "1000"))); + update( + fromCore, + add( + doc( + "id", + "11", + "dept_id_s", + "Marketing", + "text_t", + "These guys make you look good", + "salary_i_dv", + "1500"))); + update( + fromCore, + add( + doc( + "id", + "12", + "dept_id_s", + "Sales", + "text_t", + "These guys sell stuff", + "salary_i_dv", + "1600"))); + update( + fromCore, + add( + doc( + "id", + "13", + "dept_id_s", + "Support", + "text_t", + "These guys help customers", + "salary_i_dv", + "800"))); update(fromCore, commit()); } - public static String update(SolrCore core, String xml) throws Exception { DirectSolrConnection connection = new DirectSolrConnection(core); SolrRequestHandler handler = core.getRequestHandler("/update"); @@ -69,59 +170,76 @@ public static String update(SolrCore core, String xml) throws Exception { @Test public void testSameCoreSingleField() throws Exception { - assertQ("subq1.fl is limited to single field", - req("q","name_s:john", - "fl","*,depts:[subquery fromIndex=fromCore]", - "depts.q","{!term f=dept_id_s v=$row.dept_ss_dv}", - "depts.fl","text_t"), + assertQ( + "subq1.fl is limited to single field", + req( + "q", "name_s:john", + "fl", "*,depts:[subquery fromIndex=fromCore]", + "depts.q", "{!term f=dept_id_s v=$row.dept_ss_dv}", + "depts.fl", "text_t"), "//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc/str[@name='text_t'][.='These guys develop stuff']", - "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc/*)=1");// only text_t + "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc/*)=1"); // only text_t } @Test public void testAbsentCore() throws Exception { - assertQEx("from index not exist", - req("q","name_s:dave", - "fl","*,depts:[subquery fromIndex=fromCore2]", - "depts.q","{!term f=dept_id_s v=$row.dept_ss_dv}", - "depts.fl","text_t"), - SolrException.ErrorCode.BAD_REQUEST - ); - + assertQEx( + "from index not exist", + req( + "q", "name_s:dave", + "fl", "*,depts:[subquery fromIndex=fromCore2]", + "depts.q", "{!term f=dept_id_s v=$row.dept_ss_dv}", + "depts.fl", "text_t"), + SolrException.ErrorCode.BAD_REQUEST); } - + @Test public void testCrossCoreSubQueryTransformer() throws Exception { - - assertQ("make sure request is parsed in this core", - req("q","name_s:john", - "fl","*,depts:[subquery]", + + assertQ( + "make sure request is parsed in this core", + req( + "q", + "name_s:john", + "fl", + "*,depts:[subquery]", // text is tokenized and can be found, despite there is no substitution magic - "depts.q","{!field f=text_t}These guys"), - "//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc" - ); - - assertQ("make sure request is parsed in that core", - req("q","name_s:john", - "fl","*,depts:[subquery fromIndex=fromCore]", + "depts.q", + "{!field f=text_t}These guys"), + "//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc"); + + assertQ( + "make sure request is parsed in that core", + req( + "q", + "name_s:john", + "fl", + "*,depts:[subquery fromIndex=fromCore]", // text is NOT tokenized and can NOT be found - "depts.q","{!field f=text_t}These guys"), - "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc)=0" - ); + "depts.q", + "{!field f=text_t}These guys"), + "count(//result/doc/str[@name='name_s_dv'][.='john']/../result[@name='depts']/doc)=0"); - assertQ("make sure request is parsed in that core", - req("q","-name_s:dave", "indent", "true", - "fl","*,depts:[subquery fromIndex=fromCore]", - // stored text (text_t is string in minimal configset) can be found as - "depts.q","{!field f=text_t v=$row.text_t}", - "depts.fl", "dept_id_s" ), - "//result/doc/str[@name='name_s_dv'][.='john']/.." - + "/result[@name='depts']/doc/str[@name='dept_id_s'][.='Engineering']", - "//result/doc/str[@name='name_s_dv'][.='tina']/.." - + "/result[@name='depts']/doc/str[@name='dept_id_s'][.='Engineering']", - "//result/doc/str[@name='name_s_dv'][.='mark']/.." - + "/result[@name='depts']/doc/str[@name='dept_id_s'][.='Marketing']" - ); + assertQ( + "make sure request is parsed in that core", + req( + "q", + "-name_s:dave", + "indent", + "true", + "fl", + "*,depts:[subquery fromIndex=fromCore]", + // stored text (text_t is string in minimal configset) can be found as + "depts.q", + "{!field f=text_t v=$row.text_t}", + "depts.fl", + "dept_id_s"), + "//result/doc/str[@name='name_s_dv'][.='john']/.." + + "/result[@name='depts']/doc/str[@name='dept_id_s'][.='Engineering']", + "//result/doc/str[@name='name_s_dv'][.='tina']/.." + + "/result[@name='depts']/doc/str[@name='dept_id_s'][.='Engineering']", + "//result/doc/str[@name='name_s_dv'][.='mark']/.." + + "/result[@name='depts']/doc/str[@name='dept_id_s'][.='Marketing']"); } @AfterClass diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerDistrib.java b/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerDistrib.java index e85afe685f0..1e604d4f470 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerDistrib.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestSubQueryTransformerDistrib.java @@ -28,7 +28,6 @@ import java.util.List; import java.util.Map; import java.util.Random; - import org.apache.solr.JSONTestUtil; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -48,32 +47,30 @@ @org.apache.solr.SolrTestCaseJ4.SuppressSSL() public class TestSubQueryTransformerDistrib extends SolrCloudTestCase { - + private static final String support = "These guys help customers"; private static final String engineering = "These guys develop stuff"; - final static String people = "people"; - final static String depts = "departments"; + static final String people = "people"; + static final String depts = "departments"; private static boolean differentUniqueId; - + @BeforeClass public static void setupCluster() throws Exception { - + differentUniqueId = random().nextBoolean(); final Path configDir = TEST_COLL1_CONF(); String configName = "solrCloudCollectionConfig"; int nodeCount = 5; - configureCluster(nodeCount) - .addConfig(configName, configDir) - .configure(); - + configureCluster(nodeCount).addConfig(configName, configDir).configure(); + Map collectionProperties = new HashMap<>(); - collectionProperties.put("config", "solrconfig-doctransformers.xml" ); - collectionProperties.put("schema", "schema-docValuesJoin.xml"); + collectionProperties.put("config", "solrconfig-doctransformers.xml"); + collectionProperties.put("schema", "schema-docValuesJoin.xml"); int shards = 2; - int replicas = 2 ; + int replicas = 2; CollectionAdminRequest.createCollection(people, configName, shards, replicas) .withProperty("config", "solrconfig-doctransformers.xml") .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) @@ -83,152 +80,282 @@ public static void setupCluster() throws Exception { CollectionAdminRequest.createCollection(depts, configName, shards, replicas) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .withProperty("config", "solrconfig-doctransformers.xml") - .withProperty("schema", - differentUniqueId ? "schema-minimal-with-another-uniqkey.xml": - "schema-docValuesJoin.xml") + .withProperty( + "schema", + differentUniqueId + ? "schema-minimal-with-another-uniqkey.xml" + : "schema-docValuesJoin.xml") .process(cluster.getSolrClient()); CloudSolrClient client = cluster.getSolrClient(); client.setDefaultCollection(people); - + ZkStateReader zkStateReader = client.getZkStateReader(); AbstractDistribZkTestBase.waitForRecoveriesToFinish(people, zkStateReader, true, true, 30); - + AbstractDistribZkTestBase.waitForRecoveriesToFinish(depts, zkStateReader, false, true, 30); } - - + @SuppressWarnings("serial") @Test public void test() throws Exception { int peopleMultiplier = atLeast(1); int deptMultiplier = atLeast(1); - + createIndex(people, peopleMultiplier, depts, deptMultiplier); - + Random random1 = random(); - - final ModifiableSolrParams params = params( - new String[]{"q","name_s:dave", "indent","true", - "fl","*,depts:[subquery "+((random1.nextBoolean() ? "" : "separator=,"))+"]", - "rows","" + peopleMultiplier, - "depts.q","{!terms f=dept_id_s v=$row.dept_ss_dv "+((random1.nextBoolean() ? "" : "separator=,"))+"}", - "depts.fl","text_t"+(differentUniqueId?",id:notid":""), - "depts.sort", "dept_id_i desc", - "depts.indent","true", - "depts.collection","departments", - differentUniqueId ? "depts.distrib.singlePass":"notnecessary","true", - "depts.rows",""+(deptMultiplier*2), - "depts.logParamsList","q,fl,rows,row.dept_ss_dv", - random().nextBoolean()?"depts.wt":"whatever",anyWt(), - random().nextBoolean()?"wt":"whatever",anyWt()}); + + final ModifiableSolrParams params = + params( + new String[] { + "q", + "name_s:dave", + "indent", + "true", + "fl", + "*,depts:[subquery " + ((random1.nextBoolean() ? "" : "separator=,")) + "]", + "rows", + "" + peopleMultiplier, + "depts.q", + "{!terms f=dept_id_s v=$row.dept_ss_dv " + + ((random1.nextBoolean() ? "" : "separator=,")) + + "}", + "depts.fl", + "text_t" + (differentUniqueId ? ",id:notid" : ""), + "depts.sort", + "dept_id_i desc", + "depts.indent", + "true", + "depts.collection", + "departments", + differentUniqueId ? "depts.distrib.singlePass" : "notnecessary", + "true", + "depts.rows", + "" + (deptMultiplier * 2), + "depts.logParamsList", + "q,fl,rows,row.dept_ss_dv", + random().nextBoolean() ? "depts.wt" : "whatever", + anyWt(), + random().nextBoolean() ? "wt" : "whatever", + anyWt() + }); final SolrDocumentList hits; { final QueryRequest qr = new QueryRequest(params); - final QueryResponse rsp = new QueryResponse(); - rsp.setResponse(cluster.getSolrClient().request(qr, people+","+depts)); + final QueryResponse rsp = new QueryResponse(); + rsp.setResponse(cluster.getSolrClient().request(qr, people + "," + depts)); hits = rsp.getResults(); - + assertEquals(peopleMultiplier, hits.getNumFound()); - + int engineerCount = 0; int supportCount = 0; - - for (int res : new int [] {0, (peopleMultiplier-1) /2, peopleMultiplier-1}) { + + for (int res : new int[] {0, (peopleMultiplier - 1) / 2, peopleMultiplier - 1}) { SolrDocument doc = hits.get(res); assertEquals("dave", doc.getFieldValue("name_s_dv")); SolrDocumentList relDepts = (SolrDocumentList) doc.getFieldValue("depts"); - assertEquals("dave works in both depts "+rsp, - deptMultiplier * 2, relDepts.getNumFound()); - for (int deptN = 0 ; deptN < relDepts.getNumFound(); deptN++ ) { + assertEquals("dave works in both depts " + rsp, deptMultiplier * 2, relDepts.getNumFound()); + for (int deptN = 0; deptN < relDepts.getNumFound(); deptN++) { SolrDocument deptDoc = relDepts.get(deptN); String actual = (String) deptDoc.get("text_t"); - assertTrue(deptDoc + "should be either "+engineering +" or "+support, - (engineering.equals(actual) && ++engineerCount>0) || - (support.equals(actual) && ++supportCount>0)); + assertTrue( + deptDoc + "should be either " + engineering + " or " + support, + (engineering.equals(actual) && ++engineerCount > 0) + || (support.equals(actual) && ++supportCount > 0)); } } - assertEquals(hits.toString(), engineerCount, supportCount); + assertEquals(hits.toString(), engineerCount, supportCount); } params.set("wt", "json"); - final URL node = new URL(cluster.getRandomJetty(random()).getBaseUrl().toString() - +"/"+people+"/select"+params.toQueryString()); + final URL node = + new URL( + cluster.getRandomJetty(random()).getBaseUrl().toString() + + "/" + + people + + "/select" + + params.toQueryString()); - try(final InputStream jsonResponse = node.openStream()){ + try (final InputStream jsonResponse = node.openStream()) { final ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); jsonResponse.transferTo(outBuffer); - final Object expected = ((SolrDocumentList) hits.get(0).getFieldValue("depts")).get(0).get("text_t"); - final String err = JSONTestUtil.match("/response/docs/[0]/depts/docs/[0]/text_t" - ,outBuffer.toString(StandardCharsets.UTF_8), - "\""+expected+"\""); - assertNull(err,err); + final Object expected = + ((SolrDocumentList) hits.get(0).getFieldValue("depts")).get(0).get("text_t"); + final String err = + JSONTestUtil.match( + "/response/docs/[0]/depts/docs/[0]/text_t", + outBuffer.toString(StandardCharsets.UTF_8), + "\"" + expected + "\""); + assertNull(err, err); } - } private String anyWt() { - String[] wts = new String[]{"javabin","xml","json"}; + String[] wts = new String[] {"javabin", "xml", "json"}; return wts[random().nextInt(wts.length)]; } - private void createIndex(String people, int peopleMultiplier, String depts, int deptMultiplier) throws SolrServerException, IOException { - - int id=0; + + int id = 0; List peopleDocs = new ArrayList<>(); - for (int p=0; p < peopleMultiplier; p++){ - - peopleDocs.add(add(doc("id", ""+id++,"name_s", "john", "title_s", "Director", - "dept_ss_dv","Engineering", - "dept_i", "0", - "dept_is", "0"))); - peopleDocs.add(add(doc("id", ""+id++,"name_s", "mark", "title_s", "VP", - "dept_ss_dv","Marketing", - "dept_i", "1", - "dept_is", "1"))); - peopleDocs.add(add(doc("id", ""+id++,"name_s", "nancy", "title_s", "MTS", - "dept_ss_dv","Sales", - "dept_i", "2", - "dept_is", "2"))); - peopleDocs.add(add(doc("id", ""+id++,"name_s", "dave", "title_s", "MTS", - "dept_ss_dv","Support", "dept_ss_dv","Engineering", - "dept_i", "3", - "dept_is", "3", "dept_is", "0"))); - peopleDocs.add(add(doc("id", ""+id++,"name_s", "tina", "title_s", "VP", - "dept_ss_dv","Engineering", - "dept_i", "0", - "dept_is", "0"))); + for (int p = 0; p < peopleMultiplier; p++) { + + peopleDocs.add( + add( + doc( + "id", + "" + id++, + "name_s", + "john", + "title_s", + "Director", + "dept_ss_dv", + "Engineering", + "dept_i", + "0", + "dept_is", + "0"))); + peopleDocs.add( + add( + doc( + "id", + "" + id++, + "name_s", + "mark", + "title_s", + "VP", + "dept_ss_dv", + "Marketing", + "dept_i", + "1", + "dept_is", + "1"))); + peopleDocs.add( + add( + doc( + "id", + "" + id++, + "name_s", + "nancy", + "title_s", + "MTS", + "dept_ss_dv", + "Sales", + "dept_i", + "2", + "dept_is", + "2"))); + peopleDocs.add( + add( + doc( + "id", + "" + id++, + "name_s", + "dave", + "title_s", + "MTS", + "dept_ss_dv", + "Support", + "dept_ss_dv", + "Engineering", + "dept_i", + "3", + "dept_is", + "3", + "dept_is", + "0"))); + peopleDocs.add( + add( + doc( + "id", + "" + id++, + "name_s", + "tina", + "title_s", + "VP", + "dept_ss_dv", + "Engineering", + "dept_i", + "0", + "dept_is", + "0"))); } addDocs(people, peopleDocs); List deptsDocs = new ArrayList<>(); - String deptIdField = differentUniqueId? "notid":"id"; - for (int d=0; d < deptMultiplier; d++) { - deptsDocs.add(add(doc(deptIdField,""+id++, "dept_id_s", "Engineering", "text_t",engineering, "salary_i_dv", "1000", - "dept_id_i", "0"))); - deptsDocs.add(add(doc(deptIdField,""+id++, "dept_id_s", "Marketing", "text_t","These guys make you look good","salary_i_dv", "1500", - "dept_id_i", "1"))); - deptsDocs.add(add(doc(deptIdField,""+id++, "dept_id_s", "Sales", "text_t","These guys sell stuff","salary_i_dv", "1600", - "dept_id_i", "2"))); - deptsDocs.add(add(doc(deptIdField,""+id++, "dept_id_s", "Support", "text_t",support,"salary_i_dv", "800", - "dept_id_i", "3"))); - + String deptIdField = differentUniqueId ? "notid" : "id"; + for (int d = 0; d < deptMultiplier; d++) { + deptsDocs.add( + add( + doc( + deptIdField, + "" + id++, + "dept_id_s", + "Engineering", + "text_t", + engineering, + "salary_i_dv", + "1000", + "dept_id_i", + "0"))); + deptsDocs.add( + add( + doc( + deptIdField, + "" + id++, + "dept_id_s", + "Marketing", + "text_t", + "These guys make you look good", + "salary_i_dv", + "1500", + "dept_id_i", + "1"))); + deptsDocs.add( + add( + doc( + deptIdField, + "" + id++, + "dept_id_s", + "Sales", + "text_t", + "These guys sell stuff", + "salary_i_dv", + "1600", + "dept_id_i", + "2"))); + deptsDocs.add( + add( + doc( + deptIdField, + "" + id++, + "dept_id_s", + "Support", + "text_t", + support, + "salary_i_dv", + "800", + "dept_id_i", + "3"))); } addDocs(depts, deptsDocs); } - private void addDocs(String collection, List docs) throws SolrServerException, IOException { + private void addDocs(String collection, List docs) + throws SolrServerException, IOException { StringBuilder upd = new StringBuilder(""); - + upd.append("*:*"); - - for (Iterator iterator = docs.iterator(); iterator.hasNext();) { - String add = iterator.next(); + + for (Iterator iterator = docs.iterator(); iterator.hasNext(); ) { + String add = iterator.next(); upd.append(add); if (rarely()) { upd.append(commit("softCommit", "true")); @@ -238,10 +365,10 @@ private void addDocs(String collection, List docs) throws SolrServerExce upd.append(commit("softCommit", "false")); } upd.append(""); - + ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update"); - req.addContentStream(new ContentStreamBase.StringStream(upd.toString(),"text/xml")); - + req.addContentStream(new ContentStreamBase.StringStream(upd.toString(), "text/xml")); + cluster.getSolrClient().request(req, collection); upd.setLength("".length()); } diff --git a/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java b/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java index 354d316c0b8..480c2885708 100644 --- a/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java +++ b/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java @@ -15,29 +15,27 @@ * limitations under the License. */ package org.apache.solr.rest; -import org.apache.solr.util.RestTestBase; -import org.eclipse.jetty.servlet.ServletHolder; -import org.junit.BeforeClass; import java.nio.file.Path; import java.util.Properties; import java.util.SortedMap; import java.util.TreeMap; +import org.apache.solr.util.RestTestBase; +import org.eclipse.jetty.servlet.ServletHolder; +import org.junit.BeforeClass; /** - * Base class for Solr Rest-oriented API tests. Creates jetty and test harness - * with solrconfig.xml and schema-rest.xml. + * Base class for Solr Rest-oriented API tests. Creates jetty and test harness with solrconfig.xml + * and schema-rest.xml. * - * Use RestTestBase instead if you need to specialize the solrconfig, - * the schema, or jetty/test harness creation; otherwise you'll get - * imbalanced SolrIndexSearcher closes/opens and a suite-level failure - * for a zombie thread. + *

Use RestTestBase instead if you need to specialize the solrconfig, the schema, or jetty/test + * harness creation; otherwise you'll get imbalanced SolrIndexSearcher closes/opens and a + * suite-level failure for a zombie thread. */ -abstract public class SolrRestletTestBase extends RestTestBase { +public abstract class SolrRestletTestBase extends RestTestBase { /** - * Creates test harness, including "extra" servlets for all - * Solr Restlet Application subclasses. + * Creates test harness, including "extra" servlets for all Solr Restlet Application subclasses. */ @BeforeClass public static void init() throws Exception { @@ -48,7 +46,7 @@ public static void init() throws Exception { System.setProperty("coreRootDirectory", coresDir.toString()); System.setProperty("configSetBaseDir", TEST_HOME()); - final SortedMap extraServlets = new TreeMap<>(); + final SortedMap extraServlets = new TreeMap<>(); Properties props = new Properties(); props.setProperty("name", DEFAULT_TEST_CORENAME); @@ -57,6 +55,7 @@ public static void init() throws Exception { props.setProperty("configSet", "collection1"); writeCoreProperties(coresDir.resolve("core"), props, "SolrRestletTestBase"); - createJettyAndHarness(TEST_HOME(), "solrconfig.xml", "schema-rest.xml", "/solr", true, extraServlets); + createJettyAndHarness( + TEST_HOME(), "solrconfig.xml", "schema-rest.xml", "/solr", true, extraServlets); } } diff --git a/solr/core/src/test/org/apache/solr/rest/TestManagedResource.java b/solr/core/src/test/org/apache/solr/rest/TestManagedResource.java index 2c2aeb66ed2..c2c59fcd280 100644 --- a/solr/core/src/test/org/apache/solr/rest/TestManagedResource.java +++ b/solr/core/src/test/org/apache/solr/rest/TestManagedResource.java @@ -29,7 +29,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; - import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.SuppressForbidden; import org.apache.solr.SolrTestCaseJ4; @@ -40,32 +39,31 @@ import org.apache.solr.rest.ManagedResourceStorage.StorageIO; import org.junit.Test; -/** - * Tests {@link ManagedResource} functionality. - */ +/** Tests {@link ManagedResource} functionality. */ public class TestManagedResource extends SolrTestCaseJ4 { /** - * Mock class that acts like an analysis component that depends on - * data managed by a ManagedResource + * Mock class that acts like an analysis component that depends on data managed by a + * ManagedResource */ private static class MockAnalysisComponent implements ManagedResourceObserver { - + private boolean wasNotified = false; @SuppressWarnings("unchecked") @Override - public void onManagedResourceInitialized(NamedList args, ManagedResource res) throws SolrException { + public void onManagedResourceInitialized(NamedList args, ManagedResource res) + throws SolrException { assertEquals("someVal", args.get("someArg")); - assertTrue(res instanceof ManagedTestResource); - ManagedTestResource testRes = (ManagedTestResource)res; - List data = (List)testRes.managedData; + assertTrue(res instanceof ManagedTestResource); + ManagedTestResource testRes = (ManagedTestResource) res; + List data = (List) testRes.managedData; assertTrue(data.contains("1")); assertTrue(data.contains("2")); assertTrue(data.contains("3")); - + wasNotified = true; } } @@ -73,9 +71,9 @@ public void onManagedResourceInitialized(NamedList args, ManagedResource res) private class ManagedTestResource extends ManagedResource { private Object managedData; - - private ManagedTestResource(String resourceId, SolrResourceLoader loader, - StorageIO storageIO) throws SolrException { + + private ManagedTestResource(String resourceId, SolrResourceLoader loader, StorageIO storageIO) + throws SolrException { super(resourceId, loader, storageIO); } @@ -94,7 +92,7 @@ protected void onManagedDataLoadedFromStorage(NamedList managedInitArgs, Obje assertEquals(arg3List, managedInitArgs.get("arg3")); assertEquals(18L, managedInitArgs.get("arg4")); assertEquals(0.9, managedInitArgs.get("arg5")); - Map arg6map = new LinkedHashMap<>(2); + Map arg6map = new LinkedHashMap<>(2); arg6map.put("uno", 1L); arg6map.put("dos", 2L); assertEquals(arg6map, managedInitArgs.get("arg6")); @@ -102,10 +100,9 @@ protected void onManagedDataLoadedFromStorage(NamedList managedInitArgs, Obje this.managedData = managedData; } - // NOTE: These methods are better tested from the REST API // so they are stubbed out here and not used in this test - + @Override protected Object applyUpdatesToManagedData(Object updates) { return null; @@ -115,15 +112,13 @@ protected Object applyUpdatesToManagedData(Object updates) { public void doDeleteChild(BaseSolrResource endpoint, String childId) {} @Override - public void doGet(BaseSolrResource endpoint, String childId) {} + public void doGet(BaseSolrResource endpoint, String childId) {} } - /** - * Implements a Java serialization based storage format. - */ + /** Implements a Java serialization based storage format. */ @SuppressForbidden(reason = "XXX: security hole") private static class SerializableStorage extends ManagedResourceStorage { - + SerializableStorage(StorageIO storageIO, SolrResourceLoader loader) { super(storageIO, loader); } @@ -148,19 +143,20 @@ public Object load(String resourceId) throws IOException { if (ois != null) { try { ois.close(); - } catch (Exception ignore){} + } catch (Exception ignore) { + } } } - return serialized; + return serialized; } @SuppressForbidden(reason = "XXX: security hole") @Override public void store(String resourceId, Object toStore) throws IOException { if (!(toStore instanceof Serializable)) - throw new IOException("Instance of "+ - toStore.getClass().getName()+" is not Serializable!"); - + throw new IOException( + "Instance of " + toStore.getClass().getName() + " is not Serializable!"); + String storedId = getStoredResourceId(resourceId); ObjectOutputStream oos = null; try { @@ -171,57 +167,59 @@ public void store(String resourceId, Object toStore) throws IOException { if (oos != null) { try { oos.close(); - } catch (Exception ignore){} + } catch (Exception ignore) { + } } - } + } } @Override public String getStoredResourceId(String resourceId) { - return resourceId.replace('/','_')+".bin"; + return resourceId.replace('/', '_') + ".bin"; } - } - + } + private class CustomStorageFormatResource extends ManagedTestResource { - private CustomStorageFormatResource(String resourceId, SolrResourceLoader loader, - StorageIO storageIO) throws SolrException { + private CustomStorageFormatResource( + String resourceId, SolrResourceLoader loader, StorageIO storageIO) throws SolrException { super(resourceId, loader, storageIO); } - + @Override - protected ManagedResourceStorage createStorage(StorageIO storageIO, SolrResourceLoader loader) - throws SolrException - { - return new SerializableStorage(storageIO, loader); + protected ManagedResourceStorage createStorage(StorageIO storageIO, SolrResourceLoader loader) + throws SolrException { + return new SerializableStorage(storageIO, loader); } } /** - * Tests managed data storage to and loading from {@link ManagedResourceStorage.InMemoryStorageIO}. + * Tests managed data storage to and loading from {@link + * ManagedResourceStorage.InMemoryStorageIO}. */ @SuppressWarnings("unchecked") @Test public void testLoadingAndStoringOfManagedData() throws Exception { String resourceId = "/config/test/foo"; String storedResourceId = "_config_test_foo.json"; - + MockAnalysisComponent observer = new MockAnalysisComponent(); - List observers = - Arrays.asList((ManagedResourceObserver)observer); - - // put some data in the storage impl so that we can test + List observers = Arrays.asList((ManagedResourceObserver) observer); + + // put some data in the storage impl so that we can test // initialization of managed data from storage - String storedJson = "{'initArgs':{'someArg':'someVal', 'arg2':true, 'arg3':['one','two','three']," - + " 'arg4':18, 'arg5':0.9, 'arg6':{ 'uno':1, 'dos':2}},'" - + ManagedResource.MANAGED_JSON_LIST_FIELD+"':['1','2','3']}"; - ManagedResourceStorage.InMemoryStorageIO storageIO = + String storedJson = + "{'initArgs':{'someArg':'someVal', 'arg2':true, 'arg3':['one','two','three']," + + " 'arg4':18, 'arg5':0.9, 'arg6':{ 'uno':1, 'dos':2}},'" + + ManagedResource.MANAGED_JSON_LIST_FIELD + + "':['1','2','3']}"; + ManagedResourceStorage.InMemoryStorageIO storageIO = new ManagedResourceStorage.InMemoryStorageIO(); storageIO.storage.put(storedResourceId, new BytesRef(json(storedJson))); - - ManagedTestResource res = + + ManagedTestResource res = new ManagedTestResource(resourceId, new SolrResourceLoader(Paths.get("./")), storageIO); res.loadManagedDataAndNotify(observers); - + assertTrue("Observer was not notified by ManagedResource!", observer.wasNotified); // now update the managed data (as if it came from the REST API) @@ -229,35 +227,34 @@ public void testLoadingAndStoringOfManagedData() throws Exception { updatedData.add("1"); updatedData.add("2"); updatedData.add("3"); - updatedData.add("4"); + updatedData.add("4"); res.storeManagedData(updatedData); - Map jsonObject = - (Map) Utils.fromJSONString(storageIO.storage.get(storedResourceId).utf8ToString()); - List jsonList = - (List)jsonObject.get(ManagedResource.MANAGED_JSON_LIST_FIELD); - - assertTrue("Managed data was not updated correctly!", jsonList.contains("4")); + Map jsonObject = + (Map) + Utils.fromJSONString(storageIO.storage.get(storedResourceId).utf8ToString()); + List jsonList = (List) jsonObject.get(ManagedResource.MANAGED_JSON_LIST_FIELD); + + assertTrue("Managed data was not updated correctly!", jsonList.contains("4")); } - + /** - * The ManagedResource storage framework allows the end developer to use a different - * storage format other than JSON, as demonstrated by this test. + * The ManagedResource storage framework allows the end developer to use a different storage + * format other than JSON, as demonstrated by this test. */ @SuppressWarnings("rawtypes") @Test public void testCustomStorageFormat() throws Exception { String resourceId = "/schema/test/foo"; String storedResourceId = "_schema_test_foo.bin"; - + MockAnalysisComponent observer = new MockAnalysisComponent(); - List observers = - Arrays.asList((ManagedResourceObserver)observer); - - // put some data in the storage impl so that we can test + List observers = Arrays.asList((ManagedResourceObserver) observer); + + // put some data in the storage impl so that we can test // initialization of managed data from storage - Map storedData = new HashMap<>(); - Map initArgs = new HashMap<>(); + Map storedData = new HashMap<>(); + Map initArgs = new HashMap<>(); // {'initArgs':{'someArg':'someVal', 'arg2':true, 'arg3':['one','two','three'], // 'arg4':18, 'arg5':0.9, 'arg6':{ 'uno':1, 'dos':2 }},'" @@ -267,7 +264,7 @@ public void testCustomStorageFormat() throws Exception { initArgs.put("arg3", arg3list); initArgs.put("arg4", 18L); initArgs.put("arg5", 0.9); - Map arg6map = new HashMap<>(); + Map arg6map = new HashMap<>(); arg6map.put("uno", 1L); arg6map.put("dos", 2L); initArgs.put("arg6", arg6map); @@ -278,14 +275,15 @@ public void testCustomStorageFormat() throws Exception { managedList.add("2"); managedList.add("3"); storedData.put(ManagedResource.MANAGED_JSON_LIST_FIELD, managedList); - ManagedResourceStorage.InMemoryStorageIO storageIO = + ManagedResourceStorage.InMemoryStorageIO storageIO = new ManagedResourceStorage.InMemoryStorageIO(); - storageIO.storage.put(storedResourceId, ser2bytes((Serializable)storedData)); - - CustomStorageFormatResource res = - new CustomStorageFormatResource(resourceId, new SolrResourceLoader(Paths.get("./")), storageIO); + storageIO.storage.put(storedResourceId, ser2bytes((Serializable) storedData)); + + CustomStorageFormatResource res = + new CustomStorageFormatResource( + resourceId, new SolrResourceLoader(Paths.get("./")), storageIO); res.loadManagedDataAndNotify(observers); - + assertTrue("Observer was not notified by ManagedResource!", observer.wasNotified); // now store some data (as if it came from the REST API) @@ -293,22 +291,19 @@ public void testCustomStorageFormat() throws Exception { updatedData.add("1"); updatedData.add("2"); updatedData.add("3"); - updatedData.add("4"); + updatedData.add("4"); res.storeManagedData(updatedData); - + Object stored = res.storage.load(resourceId); assertNotNull(stored); assertTrue(stored instanceof Map); - Map storedMap = (Map)stored; + Map storedMap = (Map) stored; assertNotNull(storedMap.get("initArgs")); - List storedList = (List)storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD); + List storedList = (List) storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD); assertTrue(storedList.contains("4")); } - - /** - * Converts the given Serializable object to bytes - */ + /** Converts the given Serializable object to bytes */ @SuppressForbidden(reason = "XXX: security hole") private BytesRef ser2bytes(Serializable ser) throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); @@ -321,9 +316,10 @@ private BytesRef ser2bytes(Serializable ser) throws Exception { if (oos != null) { try { oos.close(); - } catch (Exception ignore){} + } catch (Exception ignore) { + } } - } - return new BytesRef(out.toByteArray()); + } + return new BytesRef(out.toByteArray()); } } diff --git a/solr/core/src/test/org/apache/solr/rest/TestManagedResourceStorage.java b/solr/core/src/test/org/apache/solr/rest/TestManagedResourceStorage.java index 061d31c2ab4..77636f531e1 100644 --- a/solr/core/src/test/org/apache/solr/rest/TestManagedResourceStorage.java +++ b/solr/core/src/test/org/apache/solr/rest/TestManagedResourceStorage.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.rest; + import java.io.File; import java.nio.file.Paths; import java.util.ArrayList; @@ -22,7 +23,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.cloud.AbstractZkTestCase; import org.apache.solr.common.util.NamedList; @@ -33,19 +33,14 @@ import org.apache.solr.rest.ManagedResourceStorage.ZooKeeperStorageIO; import org.junit.Test; -/** - * Depends on ZK for testing ZooKeeper backed storage logic. - */ +/** Depends on ZK for testing ZooKeeper backed storage logic. */ @Slow -// commented 4-Sep-2018 @LuceneTestCase.BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-6443") public class TestManagedResourceStorage extends AbstractZkTestCase { - /** - * Runs persisted managed resource creation and update tests on Zookeeper storage. - */ + /** Runs persisted managed resource creation and update tests on Zookeeper storage. */ @Test public void testZkBasedJsonStorage() throws Exception { - + // test using ZooKeeper assertTrue("Not using ZooKeeper", h.getCoreContainer().isZooKeeperAware()); SolrResourceLoader loader = new SolrResourceLoader(Paths.get("./")); @@ -61,10 +56,7 @@ public void testZkBasedJsonStorage() throws Exception { } } - - /** - * Runs persisted managed resource creation and update tests on JSON storage. - */ + /** Runs persisted managed resource creation and update tests on JSON storage. */ @Test public void testFileBasedJsonStorage() throws Exception { File instanceDir = createTempDir("json-storage").toFile(); @@ -82,57 +74,58 @@ public void testFileBasedJsonStorage() throws Exception { } /** - * Called from tests for each storage type to run creation and update tests - * on a persisted managed resource. + * Called from tests for each storage type to run creation and update tests on a persisted managed + * resource. */ @SuppressWarnings("unchecked") private void doStorageTests(SolrResourceLoader loader, StorageIO storageIO) throws Exception { String resourceId = "/test/foo"; - + JsonStorage jsonStorage = new JsonStorage(storageIO, loader); - - Map managedInitArgs = new HashMap<>(); - managedInitArgs.put("ignoreCase","true"); + + Map managedInitArgs = new HashMap<>(); + managedInitArgs.put("ignoreCase", "true"); managedInitArgs.put("dontIgnoreCase", "false"); - + List managedList = new ArrayList<>(); // we need a mutable List for this test - managedList.addAll(Arrays.asList("a","b","c","d","e")); - - Map toStore = new HashMap<>(); + managedList.addAll(Arrays.asList("a", "b", "c", "d", "e")); + + Map toStore = new HashMap<>(); toStore.put(ManagedResource.INIT_ARGS_JSON_FIELD, managedInitArgs); toStore.put(ManagedResource.MANAGED_JSON_LIST_FIELD, managedList); - + jsonStorage.store(resourceId, toStore); - + String storedResourceId = jsonStorage.getStoredResourceId(resourceId); - assertTrue(storedResourceId+" file not found!", storageIO.exists(storedResourceId)); - + assertTrue(storedResourceId + " file not found!", storageIO.exists(storedResourceId)); + Object fromStorage = jsonStorage.load(resourceId); - assertNotNull(fromStorage); - - Map storedMap = (Map)fromStorage; - Map storedArgs = (Map)storedMap.get(ManagedResource.INIT_ARGS_JSON_FIELD); + assertNotNull(fromStorage); + + Map storedMap = (Map) fromStorage; + Map storedArgs = + (Map) storedMap.get(ManagedResource.INIT_ARGS_JSON_FIELD); assertNotNull(storedArgs); assertEquals("true", storedArgs.get("ignoreCase")); - List storedList = (List)storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD); + List storedList = (List) storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD); assertNotNull(storedList); assertTrue(storedList.size() == managedList.size()); - assertTrue(storedList.contains("a")); - + assertTrue(storedList.contains("a")); + // now verify you can update existing data managedInitArgs.put("anotherArg", "someValue"); managedList.add("f"); - jsonStorage.store(resourceId, toStore); + jsonStorage.store(resourceId, toStore); fromStorage = jsonStorage.load(resourceId); - assertNotNull(fromStorage); - - storedMap = (Map)fromStorage; - storedArgs = (Map)storedMap.get(ManagedResource.INIT_ARGS_JSON_FIELD); + assertNotNull(fromStorage); + + storedMap = (Map) fromStorage; + storedArgs = (Map) storedMap.get(ManagedResource.INIT_ARGS_JSON_FIELD); assertNotNull(storedArgs); assertEquals("someValue", storedArgs.get("anotherArg")); - storedList = (List)storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD); + storedList = (List) storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD); assertNotNull(storedList); assertTrue(storedList.size() == managedList.size()); - assertTrue(storedList.contains("e")); + assertTrue(storedList.contains("e")); } } diff --git a/solr/core/src/test/org/apache/solr/rest/TestRestManager.java b/solr/core/src/test/org/apache/solr/rest/TestRestManager.java index bfe1f119637..4f4b2f9b28e 100644 --- a/solr/core/src/test/org/apache/solr/rest/TestRestManager.java +++ b/solr/core/src/test/org/apache/solr/rest/TestRestManager.java @@ -19,7 +19,6 @@ import java.io.File; import java.nio.file.Paths; import java.util.Arrays; - import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.Utils; import org.apache.solr.core.SolrResourceLoader; @@ -28,113 +27,125 @@ import org.junit.Test; /** - * Tests {@link RestManager} functionality, including resource registration, - * and REST API requests and responses. + * Tests {@link RestManager} functionality, including resource registration, and REST API requests + * and responses. */ public class TestRestManager extends SolrRestletTestBase { /** - * Tests {@link RestManager}'s responses to REST API requests on /config/managed - * and /schema/managed. Also tests {@link ManagedWordSetResource} functionality - * through the REST API. + * Tests {@link RestManager}'s responses to REST API requests on /config/managed and + * /schema/managed. Also tests {@link ManagedWordSetResource} functionality through the REST API. */ @Test public void testRestManagerEndpoints() throws Exception { // relies on these ManagedResources being activated in the schema-rest.xml used by this test - assertJQ("/schema/managed", - "/responseHeader/status==0"); + assertJQ("/schema/managed", "/responseHeader/status==0"); /* * TODO: can't assume these will be here unless schema-rest.xml includes these declarations - * + * "/managedResources/[0]/class=='org.apache.solr.rest.schema.analysis.ManagedWordSetResource'", "/managedResources/[0]/resourceId=='/schema/analysis/stopwords/english'", "/managedResources/[1]/class=='org.apache.solr.rest.schema.analysis.ManagedSynonymGraphFilterFactory$SynonymManager'", "/managedResources/[1]/resourceId=='/schema/analysis/synonyms/englishgraph'"); */ - + // no pre-existing managed config components -// assertJQ("/config/managed", "/managedResources==[]"); - + // assertJQ("/config/managed", "/managedResources==[]"); + // add a ManagedWordSetResource for managing protected words (for stemming) String newEndpoint = "/schema/analysis/protwords/english"; - - assertJPut(newEndpoint, json("{ 'class':'solr.ManagedWordSetResource' }"), "/responseHeader/status==0"); - - assertJQ("/schema/managed" - ,"/managedResources/[0]/class=='org.apache.solr.rest.schema.analysis.ManagedWordSetResource'" - ,"/managedResources/[0]/resourceId=='/schema/analysis/protwords/english'"); - + + assertJPut( + newEndpoint, + json("{ 'class':'solr.ManagedWordSetResource' }"), + "/responseHeader/status==0"); + + assertJQ( + "/schema/managed", + "/managedResources/[0]/class=='org.apache.solr.rest.schema.analysis.ManagedWordSetResource'", + "/managedResources/[0]/resourceId=='/schema/analysis/protwords/english'"); + // query the resource we just created assertJQ(newEndpoint, "/wordSet/managedList==[]"); - + // add some words to this new word list manager - assertJPut(newEndpoint, Utils.toJSONString(Arrays.asList("this", "is", "a", "test")), "/responseHeader/status==0"); + assertJPut( + newEndpoint, + Utils.toJSONString(Arrays.asList("this", "is", "a", "test")), + "/responseHeader/status==0"); - assertJQ(newEndpoint - ,"/wordSet/managedList==['a','is','test','this']" - ,"/wordSet/initArgs=={'ignoreCase':false}"); // make sure the default is serialized even if not specified + assertJQ( + newEndpoint, + "/wordSet/managedList==['a','is','test','this']", + "/wordSet/initArgs=={'ignoreCase':false}"); // make sure the default is serialized even if + // not specified // Test for case-sensitivity - "Test" lookup should fail assertJQ(newEndpoint + "/Test", "/responseHeader/status==404"); // Switch to case-insensitive - assertJPut(newEndpoint, json("{ 'initArgs':{ 'ignoreCase':'true' } }"), "/responseHeader/status==0"); + assertJPut( + newEndpoint, json("{ 'initArgs':{ 'ignoreCase':'true' } }"), "/responseHeader/status==0"); // Test for case-insensitivity - "Test" lookup should succeed assertJQ(newEndpoint + "/Test", "/responseHeader/status==0"); - // Switch to case-sensitive - this request should fail: changing ignoreCase from true to false is not permitted - assertJPut(newEndpoint, json("{ 'initArgs':{ 'ignoreCase':false } }"), "/responseHeader/status==400"); + // Switch to case-sensitive - this request should fail: changing ignoreCase from true to false + // is not permitted + assertJPut( + newEndpoint, json("{ 'initArgs':{ 'ignoreCase':false } }"), "/responseHeader/status==400"); // Test XML response format - assertQ(newEndpoint + "?wt=xml" - ,"/response/lst[@name='responseHeader']/int[@name='status']=0" - ,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[1]='a'" - ,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[2]='is'" - ,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[3]='test'" - ,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[4]='this'"); + assertQ( + newEndpoint + "?wt=xml", + "/response/lst[@name='responseHeader']/int[@name='status']=0", + "/response/lst[@name='wordSet']/arr[@name='managedList']/str[1]='a'", + "/response/lst[@name='wordSet']/arr[@name='managedList']/str[2]='is'", + "/response/lst[@name='wordSet']/arr[@name='managedList']/str[3]='test'", + "/response/lst[@name='wordSet']/arr[@name='managedList']/str[4]='this'"); // delete the one we created above assertJDelete(newEndpoint, "/responseHeader/status==0"); // make sure it's really gone -// assertJQ("/config/managed", "/managedResources==[]"); + // assertJQ("/config/managed", "/managedResources==[]"); } - + @Test public void testReloadFromPersistentStorage() throws Exception { SolrResourceLoader loader = new SolrResourceLoader(Paths.get("./")); File unitTestStorageDir = createTempDir("testRestManager").toFile(); - assertTrue(unitTestStorageDir.getAbsolutePath()+" is not a directory!", - unitTestStorageDir.isDirectory()); + assertTrue( + unitTestStorageDir.getAbsolutePath() + " is not a directory!", + unitTestStorageDir.isDirectory()); assertTrue(unitTestStorageDir.canRead()); assertTrue(unitTestStorageDir.canWrite()); NamedList ioInitArgs = new NamedList<>(); - ioInitArgs.add(ManagedResourceStorage.STORAGE_DIR_INIT_ARG, - unitTestStorageDir.getAbsolutePath()); - + ioInitArgs.add( + ManagedResourceStorage.STORAGE_DIR_INIT_ARG, unitTestStorageDir.getAbsolutePath()); + StorageIO storageIO = new ManagedResourceStorage.FileStorageIO(); storageIO.configure(loader, ioInitArgs); - + NamedList initArgs = new NamedList<>(); RestManager restManager = new RestManager(); restManager.init(loader, initArgs, storageIO); - + // verifies a RestManager can be reloaded from a previous RestManager's data RestManager restManager2 = new RestManager(); - restManager2.init(loader, initArgs, storageIO); + restManager2.init(loader, initArgs, storageIO); } @Test - public void testResolveResourceId () throws Exception { + public void testResolveResourceId() throws Exception { String path = "http://solr.apache.org/schema/analysis/synonyms/de"; String resourceId = RestManager.ManagedEndpoint.resolveResourceId(path); assertEquals(resourceId, "/schema/analysis/synonyms/de"); } @Test - public void testResolveResourceIdDecodeUrlEntities () throws Exception { + public void testResolveResourceIdDecodeUrlEntities() throws Exception { String path = "http://solr.apache.org/schema/analysis/synonyms/de/%C3%84ndern"; String resourceId = RestManager.ManagedEndpoint.resolveResourceId(path); assertEquals(resourceId, "/schema/analysis/synonyms/de/Ändern"); diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java index 960b83a6d90..44728128b02 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java @@ -16,6 +16,9 @@ */ package org.apache.solr.rest.schema; +import static org.apache.solr.common.util.Utils.fromJSONString; +import static org.hamcrest.Matchers.containsString; + import java.io.File; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -26,7 +29,6 @@ import java.util.Map; import java.util.Set; import java.util.function.Consumer; - import org.apache.commons.io.FileUtils; import org.apache.lucene.misc.SweetSpotSimilarity; import org.apache.lucene.search.similarities.BM25Similarity; @@ -51,14 +53,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.util.Utils.fromJSONString; -import static org.hamcrest.Matchers.containsString; - - public class TestBulkSchemaAPI extends RestTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static File tmpSolrHome; @Before @@ -69,16 +66,22 @@ public void before() throws Exception { System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, null); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-managed-schema.xml", + "schema-rest.xml", + "/solr", + true, + null); if (random().nextBoolean()) { log.info("These tests are run with V2 API"); - restTestHarness.setServerProvider(new RESTfulServerProvider() { - @Override - public String getBaseURL() { - return jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME; - } - }); + restTestHarness.setServerProvider( + new RESTfulServerProvider() { + @Override + public String getBaseURL() { + return jetty.getBaseUrl().toString() + "/____v2/cores/" + DEFAULT_TEST_CORENAME; + } + }); } } @@ -96,105 +99,111 @@ public void after() throws Exception { public void testMultipleAddFieldWithErrors() throws Exception { - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'a1',\n" + - " 'type': 'string1',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " },\n" + - " 'add-field' : {\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - " }"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string1',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " 'add-field' : {\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + " }"; String response = restTestHarness.post("/schema", json(payload)); @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); @SuppressWarnings({"rawtypes"}) - Map error = (Map)map.get("error"); + Map error = (Map) map.get("error"); assertNotNull("No errors", error); @SuppressWarnings({"rawtypes"}) - List details = (List)error.get("details"); + List details = (List) error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 2, details.size()); @SuppressWarnings({"rawtypes"}) - List firstErrorList = (List)((Map)details.get(0)).get("errorMessages"); + List firstErrorList = (List) ((Map) details.get(0)).get("errorMessages"); assertEquals(1, firstErrorList.size()); - assertTrue (((String)firstErrorList.get(0)).contains("Field 'a1': Field type 'string1' not found.\n")); + assertTrue( + ((String) firstErrorList.get(0)).contains("Field 'a1': Field type 'string1' not found.\n")); @SuppressWarnings({"rawtypes"}) - List secondErrorList = (List)((Map)details.get(1)).get("errorMessages"); + List secondErrorList = (List) ((Map) details.get(1)).get("errorMessages"); assertEquals(1, secondErrorList.size()); - assertTrue (((String)secondErrorList.get(0)).contains("is a required field")); + assertTrue(((String) secondErrorList.get(0)).contains("is a required field")); } - + public void testAnalyzerClass() throws Exception { - String addFieldTypeAnalyzerWithClass = "{\n" + - "'add-field-type' : {" + - " 'name' : 'myNewTextFieldWithAnalyzerClass',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {\n" + - " 'luceneMatchVersion':'5.0.0',\n" + - " 'class':'org.apache.lucene.analysis.core.WhitespaceAnalyzer'\n"; + String addFieldTypeAnalyzerWithClass = + "{\n" + + "'add-field-type' : {" + + " 'name' : 'myNewTextFieldWithAnalyzerClass',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {\n" + + " 'luceneMatchVersion':'5.0.0',\n" + + " 'class':'org.apache.lucene.analysis.core.WhitespaceAnalyzer'\n"; String charFilters = - " 'charFilters' : [{\n" + - " 'class':'solr.PatternReplaceCharFilterFactory',\n" + - " 'replacement':'$1$1',\n" + - " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + - " }],\n"; - String tokenizer = - " 'tokenizer' : { 'class':'solr.WhitespaceTokenizerFactory' },\n"; - String filters = - " 'filters' : [{ 'class':'solr.ASCIIFoldingFilterFactory' }]\n"; - String suffix = - " }\n"+ - "}}"; - - String response = restTestHarness.post("/schema", - json(addFieldTypeAnalyzerWithClass + ',' + charFilters + tokenizer + filters + suffix)); + " 'charFilters' : [{\n" + + " 'class':'solr.PatternReplaceCharFilterFactory',\n" + + " 'replacement':'$1$1',\n" + + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + + " }],\n"; + String tokenizer = " 'tokenizer' : { 'class':'solr.WhitespaceTokenizerFactory' },\n"; + String filters = " 'filters' : [{ 'class':'solr.ASCIIFoldingFilterFactory' }]\n"; + String suffix = " }\n" + "}}"; + + String response = + restTestHarness.post( + "/schema", + json(addFieldTypeAnalyzerWithClass + ',' + charFilters + tokenizer + filters + suffix)); @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); @SuppressWarnings({"rawtypes"}) - Map error = (Map)map.get("error"); + Map error = (Map) map.get("error"); assertNotNull("No errors", error); @SuppressWarnings({"rawtypes"}) - List details = (List)error.get("details"); + List details = (List) error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 1, details.size()); @SuppressWarnings({"rawtypes"}) - List errorList = (List)((Map)details.get(0)).get("errorMessages"); + List errorList = (List) ((Map) details.get(0)).get("errorMessages"); assertEquals(1, errorList.size()); - assertTrue (((String)errorList.get(0)).contains - ("An analyzer with a class property may not define any char filters!")); + assertTrue( + ((String) errorList.get(0)) + .contains("An analyzer with a class property may not define any char filters!")); - response = restTestHarness.post("/schema", - json(addFieldTypeAnalyzerWithClass + ',' + tokenizer + filters + suffix)); + response = + restTestHarness.post( + "/schema", json(addFieldTypeAnalyzerWithClass + ',' + tokenizer + filters + suffix)); map = (Map) fromJSONString(response); - error = (Map)map.get("error"); + error = (Map) map.get("error"); assertNotNull("No errors", error); - details = (List)error.get("details"); + details = (List) error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 1, details.size()); - errorList = (List)((Map)details.get(0)).get("errorMessages"); + errorList = (List) ((Map) details.get(0)).get("errorMessages"); assertEquals(1, errorList.size()); - assertTrue (((String)errorList.get(0)).contains - ("An analyzer with a class property may not define a tokenizer!")); + assertTrue( + ((String) errorList.get(0)) + .contains("An analyzer with a class property may not define a tokenizer!")); - response = restTestHarness.post("/schema", - json(addFieldTypeAnalyzerWithClass + ',' + filters + suffix)); + response = + restTestHarness.post( + "/schema", json(addFieldTypeAnalyzerWithClass + ',' + filters + suffix)); map = (Map) fromJSONString(response); - error = (Map)map.get("error"); + error = (Map) map.get("error"); assertNotNull("No errors", error); - details = (List)error.get("details"); + details = (List) error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 1, details.size()); - errorList = (List)((Map)details.get(0)).get("errorMessages"); + errorList = (List) ((Map) details.get(0)).get("errorMessages"); assertEquals(1, errorList.size()); - assertTrue (((String)errorList.get(0)).contains - ("An analyzer with a class property may not define any filters!")); + assertTrue( + ((String) errorList.get(0)) + .contains("An analyzer with a class property may not define any filters!")); response = restTestHarness.post("/schema", json(addFieldTypeAnalyzerWithClass + suffix)); map = (Map) fromJSONString(response); @@ -203,27 +212,29 @@ public void testAnalyzerClass() throws Exception { map = getObj(restTestHarness, "myNewTextFieldWithAnalyzerClass", "fieldTypes"); assertNotNull(map); @SuppressWarnings({"rawtypes"}) - Map analyzer = (Map)map.get("analyzer"); - assertEquals("org.apache.lucene.analysis.core.WhitespaceAnalyzer", String.valueOf(analyzer.get("class"))); - + Map analyzer = (Map) map.get("analyzer"); + assertEquals( + "org.apache.lucene.analysis.core.WhitespaceAnalyzer", + String.valueOf(analyzer.get("class"))); } public void testAnalyzerByName() throws Exception { - String addFieldTypeAnalyzer = "{\n" + - "'add-field-type' : {" + - " 'name' : 'myNewTextField',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {\n" + - " 'charFilters' : [{\n" + - " 'name':'patternReplace',\n" + - " 'replacement':'$1$1',\n" + - " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + - " }],\n" + - " 'tokenizer' : { 'name':'whitespace' },\n" + - " 'filters' : [{ 'name':'asciiFolding' }]\n" + - " }\n"+ - "}}"; + String addFieldTypeAnalyzer = + "{\n" + + "'add-field-type' : {" + + " 'name' : 'myNewTextField',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {\n" + + " 'charFilters' : [{\n" + + " 'name':'patternReplace',\n" + + " 'replacement':'$1$1',\n" + + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + + " }],\n" + + " 'tokenizer' : { 'name':'whitespace' },\n" + + " 'filters' : [{ 'name':'asciiFolding' }]\n" + + " }\n" + + "}}"; String response = restTestHarness.post("/schema", json(addFieldTypeAnalyzer)); @SuppressWarnings({"rawtypes"}) @@ -233,44 +244,47 @@ public void testAnalyzerByName() throws Exception { map = getObj(restTestHarness, "myNewTextField", "fieldTypes"); assertNotNull(map); @SuppressWarnings({"rawtypes"}) - Map analyzer = (Map)map.get("analyzer"); + Map analyzer = (Map) map.get("analyzer"); @SuppressWarnings({"rawtypes"}) - Map tokenizer = (Map)analyzer.get("tokenizer"); + Map tokenizer = (Map) analyzer.get("tokenizer"); @SuppressWarnings({"rawtypes"}) - List charFilters = (List)analyzer.get("charFilters"); + List charFilters = (List) analyzer.get("charFilters"); @SuppressWarnings({"rawtypes"}) - List tokenFilters = (List)analyzer.get("filters"); + List tokenFilters = (List) analyzer.get("filters"); assertEquals("whitespace", String.valueOf(tokenizer.get("name"))); - assertEquals("patternReplace", String.valueOf(((Map)charFilters.get(0)).get("name"))); - assertEquals("asciiFolding", String.valueOf(((Map)tokenFilters.get(0)).get("name"))); + assertEquals("patternReplace", String.valueOf(((Map) charFilters.get(0)).get("name"))); + assertEquals("asciiFolding", String.valueOf(((Map) tokenFilters.get(0)).get("name"))); } public void testAnalyzerByBogusName() throws Exception { - String addFieldTypeAnalyzer = "{\n" + - "'add-field-type' : {" + - " 'name' : 'myNewTextField',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {\n" + - " 'tokenizer' : { 'name':'bogus' }\n" + - " }\n"+ - "}}"; + String addFieldTypeAnalyzer = + "{\n" + + "'add-field-type' : {" + + " 'name' : 'myNewTextField',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {\n" + + " 'tokenizer' : { 'name':'bogus' }\n" + + " }\n" + + "}}"; String response = restTestHarness.post("/schema", json(addFieldTypeAnalyzer)); @SuppressWarnings({"rawtypes"}) Map map = (Map) fromJSONString(response); @SuppressWarnings({"rawtypes"}) - Map error = (Map)map.get("error"); + Map error = (Map) map.get("error"); assertNotNull("No errors", error); @SuppressWarnings({"rawtypes"}) - List details = (List)error.get("details"); + List details = (List) error.get("details"); assertNotNull("No details", details); assertEquals("Wrong number of details", 1, details.size()); @SuppressWarnings({"rawtypes"}) - List errorList = (List)((Map)details.get(0)).get("errorMessages"); + List errorList = (List) ((Map) details.get(0)).get("errorMessages"); assertEquals(1, errorList.size()); - assertTrue (((String)errorList.get(0)).contains - ("A SPI class of type org.apache.lucene.analysis.TokenizerFactory with name 'bogus' does not exist.")); + assertTrue( + ((String) errorList.get(0)) + .contains( + "A SPI class of type org.apache.lucene.analysis.TokenizerFactory with name 'bogus' does not exist.")); } public void testAddFieldMatchingExistingDynamicField() throws Exception { @@ -288,14 +302,17 @@ public void testAddFieldMatchingExistingDynamicField() throws Exception { map = getObj(harness, "boolean", "fieldTypes"); assertNotNull("'boolean' field type does not exist in the schema", map); - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'" + newFieldName + "',\n" + - " 'type':'boolean',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - " }"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'" + + newFieldName + + "',\n" + + " 'type':'boolean',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + " }"; String response = harness.post("/schema", json(payload)); @@ -311,14 +328,17 @@ public void testAddIllegalDynamicField() throws Exception { String newFieldName = "illegal"; - String payload = "{\n" + - " 'add-dynamic-field' : {\n" + - " 'name':'" + newFieldName + "',\n" + - " 'type':'string',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - " }"; + String payload = + "{\n" + + " 'add-dynamic-field' : {\n" + + " 'name':'" + + newFieldName + + "',\n" + + " 'type':'string',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + " }"; String response = harness.post("/schema", json(payload)); @SuppressWarnings({"rawtypes"}) @@ -336,14 +356,17 @@ public void testAddIllegalFields() throws Exception { // 1. Make sure you can't create a new field with an asterisk in its name String newFieldName = "asterisk*"; - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'" + newFieldName + "',\n" + - " 'type':'string',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - "}"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'" + + newFieldName + + "',\n" + + " 'type':'string',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + "}"; String response = harness.post("/schema", json(payload)); Map map = (Map) fromJSONString(response); @@ -358,14 +381,17 @@ public void testAddIllegalFields() throws Exception { Map m = getObj(harness, newFieldName, "fields"); assertNotNull("'" + newFieldName + "' field does not exist in the schema", m); - payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'" + newFieldName + "',\n" + - " 'type':'string',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - "}"; + payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'" + + newFieldName + + "',\n" + + " 'type':'string',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + "}"; response = harness.post("/schema", json(payload)); map = (Map) fromJSONString(response); @@ -390,14 +416,15 @@ public void testAddFieldWithExistingCatchallDynamicField() throws Exception { map = getObj(harness, "boolean", "fieldTypes"); assertNotNull("'boolean' field type does not exist in the schema", map); - String payload = "{\n" + - " 'add-dynamic-field' : {\n" + - " 'name':'*',\n" + - " 'type':'string',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - "}"; + String payload = + "{\n" + + " 'add-dynamic-field' : {\n" + + " 'name':'*',\n" + + " 'type':'string',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + "}"; String response = harness.post("/schema", json(payload)); @@ -407,14 +434,17 @@ public void testAddFieldWithExistingCatchallDynamicField() throws Exception { map = getObj(harness, "*", "dynamicFields"); assertNotNull("Dynamic field '*' is not in the schema", map); - payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'" + newFieldName + "',\n" + - " 'type':'boolean',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - " }"; + payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'" + + newFieldName + + "',\n" + + " 'type':'boolean',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + " }"; response = harness.post("/schema", json(payload)); @@ -426,20 +456,20 @@ public void testAddFieldWithExistingCatchallDynamicField() throws Exception { } @SuppressWarnings({"unchecked", "rawtypes"}) - public void testMultipleCommands() throws Exception{ + public void testMultipleCommands() throws Exception { RestTestHarness harness = restTestHarness; Map m = getObj(harness, "wdf_nocase", "fields"); assertNotNull("'wdf_nocase' field does not exist in the schema", m); - + m = getObj(harness, "wdf_nocase", "fieldTypes"); assertNotNull("'wdf_nocase' field type does not exist in the schema", m); - + m = getObj(harness, "boolean", "fieldTypes"); assertNotNull("'boolean' field type does not exist in the schema", m); assertNull(m.get("sortMissingFirst")); - assertTrue((Boolean)m.get("sortMissingLast")); - + assertTrue((Boolean) m.get("sortMissingLast")); + m = getObj(harness, "name", "fields"); assertNotNull("'name' field does not exist in the schema", m); assertEquals("nametext", m.get("type")); @@ -455,151 +485,152 @@ public void testMultipleCommands() throws Exception{ List l = getSourceCopyFields(harness, "*_i"); Set s = new HashSet(); assertEquals(4, l.size()); - s.add(((Map)l.get(0)).get("dest")); - s.add(((Map)l.get(1)).get("dest")); + s.add(((Map) l.get(0)).get("dest")); + s.add(((Map) l.get(1)).get("dest")); s.add(((Map) l.get(2)).get("dest")); s.add(((Map) l.get(3)).get("dest")); assertTrue(s.contains("title")); assertTrue(s.contains("*_s")); - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'a1',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " },\n" + - " 'add-field' : {\n" + - " 'name':'a2',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':true,\n" + - " 'uninvertible':true,\n" + - " },\n" + - " 'add-dynamic-field' : {\n" + - " 'name' :'*_lol',\n" + - " 'type':'string',\n" + - " 'stored':true,\n" + - " 'indexed':true,\n" + - " 'uninvertible':false,\n" + - " },\n" + - " 'add-copy-field' : {\n" + - " 'source' :'a1',\n" + - " 'dest':['a2','hello_lol']\n" + - " },\n" + - " 'add-field-type' : {\n" + - " 'name' :'mystr',\n" + - " 'class' : 'solr.StrField',\n" + - " 'sortMissingLast':'true'\n" + - " },\n" + - " 'add-field-type' : {" + - " 'name' : 'myNewTxtField',\n" + - " 'class':'solr.TextField',\n" + - " 'positionIncrementGap':'100',\n" + - " 'indexAnalyzer' : {\n" + - " 'charFilters':[\n" + - " {\n" + - " 'class':'solr.PatternReplaceCharFilterFactory',\n" + - " 'replacement':'$1$1',\n" + - " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + - " }\n" + - " ],\n" + - " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},\n" + - " 'filters':[\n" + - " {\n" + - " 'class':'solr.WordDelimiterGraphFilterFactory',\n" + - " 'preserveOriginal':'0'\n" + - " },\n" + - " {\n" + - " 'class':'solr.StopFilterFactory',\n" + - " 'words':'stopwords.txt',\n" + - " 'ignoreCase':'true'\n" + - " },\n" + - " {'class':'solr.LowerCaseFilterFactory'},\n" + - " {'class':'solr.ASCIIFoldingFilterFactory'},\n" + - " {'class':'solr.KStemFilterFactory'},\n" + - " {'class':'solr.FlattenGraphFilterFactory'}\n" + - " ]\n" + - " },\n" + - " 'queryAnalyzer' : {\n" + - " 'charFilters':[\n" + - " {\n" + - " 'class':'solr.PatternReplaceCharFilterFactory',\n" + - " 'replacement':'$1$1',\n" + - " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + - " }\n" + - " ],\n" + - " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},\n" + - " 'filters':[\n" + - " {\n" + - " 'class':'solr.WordDelimiterGraphFilterFactory',\n" + - " 'preserveOriginal':'0'\n" + - " },\n" + - " {\n" + - " 'class':'solr.StopFilterFactory',\n" + - " 'words':'stopwords.txt',\n" + - " 'ignoreCase':'true'\n" + - " },\n" + - " {'class':'solr.LowerCaseFilterFactory'},\n" + - " {'class':'solr.ASCIIFoldingFilterFactory'},\n" + - " {'class':'solr.KStemFilterFactory'}\n" + - " ]\n" + - " }\n" + - " },\n"+ - " 'add-field' : {\n" + - " 'name':'a3',\n" + - " 'type': 'myNewTxtField',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " },\n" + - " 'add-field-type' : {" + - " 'name' : 'myWhitespaceTxtField',\n" + - " 'class':'solr.TextField',\n" + - " 'uninvertible':false,\n" + - " 'analyzer' : {'class' : 'org.apache.lucene.analysis.core.WhitespaceAnalyzer'}\n" + - " },\n"+ - " 'add-field' : {\n" + - " 'name':'a5',\n" + - " 'type': 'myWhitespaceTxtField',\n" + - " 'stored':true\n" + - " },\n" + - " 'add-field-type' : {" + - " 'name' : 'mySimField',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + - " 'similarity' : {'class':'org.apache.lucene.misc.SweetSpotSimilarity'}\n" + - " },\n"+ - " 'add-field' : {\n" + - " 'name':'a4',\n" + - " 'type': 'mySimField',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " },\n" + - " 'delete-field' : {'name':'wdf_nocase'},\n" + - " 'delete-field-type' : {'name':'wdf_nocase'},\n" + - " 'delete-dynamic-field' : {'name':'*_tt'},\n" + - " 'delete-copy-field' : {'source':'a1', 'dest':'a2'},\n" + - " 'delete-copy-field' : {'source':'*_i', 'dest':['title', '*_s']},\n" + - " 'replace-field-type' : {\n" + - " 'name':'boolean',\n" + - " 'class':'solr.BoolField',\n" + - " 'sortMissingFirst':true\n" + - " },\n" + - " 'replace-field' : {\n" + - " 'name':'name',\n" + - " 'type':'string',\n" + - " 'indexed':true,\n" + - " 'stored':true\n" + - " },\n" + - " 'replace-dynamic-field' : {\n" + - " 'name':'attr_*',\n" + - " 'type':'string',\n" + - " 'indexed':true,\n" + - " 'stored':true,\n" + - " 'multiValued':true\n" + - " }\n" + - " }\n"; - + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'a2',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':true,\n" + + " 'uninvertible':true,\n" + + " },\n" + + " 'add-dynamic-field' : {\n" + + " 'name' :'*_lol',\n" + + " 'type':'string',\n" + + " 'stored':true,\n" + + " 'indexed':true,\n" + + " 'uninvertible':false,\n" + + " },\n" + + " 'add-copy-field' : {\n" + + " 'source' :'a1',\n" + + " 'dest':['a2','hello_lol']\n" + + " },\n" + + " 'add-field-type' : {\n" + + " 'name' :'mystr',\n" + + " 'class' : 'solr.StrField',\n" + + " 'sortMissingLast':'true'\n" + + " },\n" + + " 'add-field-type' : {" + + " 'name' : 'myNewTxtField',\n" + + " 'class':'solr.TextField',\n" + + " 'positionIncrementGap':'100',\n" + + " 'indexAnalyzer' : {\n" + + " 'charFilters':[\n" + + " {\n" + + " 'class':'solr.PatternReplaceCharFilterFactory',\n" + + " 'replacement':'$1$1',\n" + + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + + " }\n" + + " ],\n" + + " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},\n" + + " 'filters':[\n" + + " {\n" + + " 'class':'solr.WordDelimiterGraphFilterFactory',\n" + + " 'preserveOriginal':'0'\n" + + " },\n" + + " {\n" + + " 'class':'solr.StopFilterFactory',\n" + + " 'words':'stopwords.txt',\n" + + " 'ignoreCase':'true'\n" + + " },\n" + + " {'class':'solr.LowerCaseFilterFactory'},\n" + + " {'class':'solr.ASCIIFoldingFilterFactory'},\n" + + " {'class':'solr.KStemFilterFactory'},\n" + + " {'class':'solr.FlattenGraphFilterFactory'}\n" + + " ]\n" + + " },\n" + + " 'queryAnalyzer' : {\n" + + " 'charFilters':[\n" + + " {\n" + + " 'class':'solr.PatternReplaceCharFilterFactory',\n" + + " 'replacement':'$1$1',\n" + + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + + " }\n" + + " ],\n" + + " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},\n" + + " 'filters':[\n" + + " {\n" + + " 'class':'solr.WordDelimiterGraphFilterFactory',\n" + + " 'preserveOriginal':'0'\n" + + " },\n" + + " {\n" + + " 'class':'solr.StopFilterFactory',\n" + + " 'words':'stopwords.txt',\n" + + " 'ignoreCase':'true'\n" + + " },\n" + + " {'class':'solr.LowerCaseFilterFactory'},\n" + + " {'class':'solr.ASCIIFoldingFilterFactory'},\n" + + " {'class':'solr.KStemFilterFactory'}\n" + + " ]\n" + + " }\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'a3',\n" + + " 'type': 'myNewTxtField',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " },\n" + + " 'add-field-type' : {" + + " 'name' : 'myWhitespaceTxtField',\n" + + " 'class':'solr.TextField',\n" + + " 'uninvertible':false,\n" + + " 'analyzer' : {'class' : 'org.apache.lucene.analysis.core.WhitespaceAnalyzer'}\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'a5',\n" + + " 'type': 'myWhitespaceTxtField',\n" + + " 'stored':true\n" + + " },\n" + + " 'add-field-type' : {" + + " 'name' : 'mySimField',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + + " 'similarity' : {'class':'org.apache.lucene.misc.SweetSpotSimilarity'}\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'a4',\n" + + " 'type': 'mySimField',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " },\n" + + " 'delete-field' : {'name':'wdf_nocase'},\n" + + " 'delete-field-type' : {'name':'wdf_nocase'},\n" + + " 'delete-dynamic-field' : {'name':'*_tt'},\n" + + " 'delete-copy-field' : {'source':'a1', 'dest':'a2'},\n" + + " 'delete-copy-field' : {'source':'*_i', 'dest':['title', '*_s']},\n" + + " 'replace-field-type' : {\n" + + " 'name':'boolean',\n" + + " 'class':'solr.BoolField',\n" + + " 'sortMissingFirst':true\n" + + " },\n" + + " 'replace-field' : {\n" + + " 'name':'name',\n" + + " 'type':'string',\n" + + " 'indexed':true,\n" + + " 'stored':true\n" + + " },\n" + + " 'replace-dynamic-field' : {\n" + + " 'name':'attr_*',\n" + + " 'type':'string',\n" + + " 'indexed':true,\n" + + " 'stored':true,\n" + + " 'multiValued':true\n" + + " }\n" + + " }\n"; + String response = harness.post("/schema", json(payload)); Map map = (Map) fromJSONString(response); @@ -612,7 +643,7 @@ public void testMultipleCommands() throws Exception{ assertEquals(Boolean.TRUE, m.get("stored")); assertEquals(Boolean.FALSE, m.get("indexed")); - m = getObj(harness,"a2", "fields"); + m = getObj(harness, "a2", "fields"); assertNotNull("field a2 not created", m); assertEquals("string", m.get("type")); @@ -620,7 +651,7 @@ public void testMultipleCommands() throws Exception{ assertEquals(Boolean.TRUE, m.get("indexed")); assertEquals(Boolean.TRUE, m.get("uninvertible")); - m = getObj(harness,"*_lol", "dynamicFields"); + m = getObj(harness, "*_lol", "dynamicFields"); assertNotNull("field *_lol not created", m); assertEquals("string", m.get("type")); @@ -637,7 +668,7 @@ public void testMultipleCommands() throws Exception{ l = getSourceCopyFields(harness, "*_i"); s = new HashSet(); assertEquals(2, l.size()); - s.add(((Map)l.get(0)).get("dest")); + s.add(((Map) l.get(0)).get("dest")); s.add(((Map) l.get(1)).get("dest")); assertFalse(s.contains("title")); assertFalse(s.contains("*_s")); @@ -656,7 +687,7 @@ public void testMultipleCommands() throws Exception{ m = getObj(harness, "mySimField", "fieldTypes"); assertNotNull(m); - m = (Map)m.get("similarity"); + m = (Map) m.get("similarity"); assertNotNull(m); assertEquals(SweetSpotSimilarity.class.getName(), m.get("class")); @@ -664,7 +695,7 @@ public void testMultipleCommands() throws Exception{ assertNotNull("field a4 not created", m); assertEquals("mySimField", m.get("type")); assertFieldSimilarity("a4", SweetSpotSimilarity.class); - + m = getObj(harness, "myWhitespaceTxtField", "fieldTypes"); assertNotNull(m); assertEquals(Boolean.FALSE, m.get("uninvertible")); @@ -673,7 +704,8 @@ public void testMultipleCommands() throws Exception{ m = getObj(harness, "a5", "fields"); assertNotNull("field a5 not created", m); assertEquals("myWhitespaceTxtField", m.get("type")); - assertNull(m.get("uninvertible")); // inherited, but API shouldn't return w/o explicit showDefaults + // inherited, but API shouldn't return w/o explicit showDefaults + assertNull(m.get("uninvertible")); assertFieldSimilarity("a5", BM25Similarity.class); // unspecified, expect default m = getObj(harness, "wdf_nocase", "fields"); @@ -688,9 +720,10 @@ public void testMultipleCommands() throws Exception{ m = getObj(harness, "boolean", "fieldTypes"); assertNotNull("'boolean' field type does not exist in the schema", m); assertNull(m.get("sortMissingLast")); - assertTrue((Boolean)m.get("sortMissingFirst")); + assertTrue((Boolean) m.get("sortMissingFirst")); - m = getObj(harness, "bind", "fields"); // this field will be rebuilt when "boolean" field type is replaced + // this field will be rebuilt when "boolean" field type is replaced + m = getObj(harness, "bind", "fields"); assertNotNull("'bind' field does not exist in the schema", m); m = getObj(harness, "name", "fields"); @@ -716,12 +749,13 @@ public void testCopyFieldRules() throws Exception { List l = getSourceCopyFields(harness, "bleh_s"); assertTrue("'bleh_s' copyField rule exists in the schema", l.isEmpty()); - String payload = "{\n" + - " 'add-copy-field' : {\n" + - " 'source' :'bleh_s',\n" + - " 'dest':'name'\n" + - " }\n" + - " }\n"; + String payload = + "{\n" + + " 'add-copy-field' : {\n" + + " 'source' :'bleh_s',\n" + + " 'dest':'name'\n" + + " }\n" + + " }\n"; String response = harness.post("/schema", json(payload)); @SuppressWarnings({"rawtypes"}) @@ -730,16 +764,17 @@ public void testCopyFieldRules() throws Exception { l = getSourceCopyFields(harness, "bleh_s"); assertFalse("'bleh_s' copyField rule doesn't exist", l.isEmpty()); - assertEquals("bleh_s", ((Map)l.get(0)).get("source")); - assertEquals("name", ((Map)l.get(0)).get("dest")); + assertEquals("bleh_s", ((Map) l.get(0)).get("source")); + assertEquals("name", ((Map) l.get(0)).get("dest")); // delete copy field rule - payload = "{\n" + - " 'delete-copy-field' : {\n" + - " 'source' :'bleh_s',\n" + - " 'dest':'name'\n" + - " }\n" + - " }\n"; + payload = + "{\n" + + " 'delete-copy-field' : {\n" + + " 'source' :'bleh_s',\n" + + " 'dest':'name'\n" + + " }\n" + + " }\n"; response = harness.post("/schema", json(payload)); map = (Map) fromJSONString(response); @@ -748,12 +783,13 @@ public void testCopyFieldRules() throws Exception { assertTrue("'bleh_s' copyField rule exists in the schema", l.isEmpty()); // copy and delete with multiple destination - payload = "{\n" + - " 'add-copy-field' : {\n" + - " 'source' :'bleh_s',\n" + - " 'dest':['name','bind']\n" + - " }\n" + - " }\n"; + payload = + "{\n" + + " 'add-copy-field' : {\n" + + " 'source' :'bleh_s',\n" + + " 'dest':['name','bind']\n" + + " }\n" + + " }\n"; response = harness.post("/schema", json(payload)); map = (Map) fromJSONString(response); assertNull(response, map.get("error")); @@ -761,12 +797,13 @@ public void testCopyFieldRules() throws Exception { l = getSourceCopyFields(harness, "bleh_s"); assertEquals(2, l.size()); - payload = "{\n" + - " 'delete-copy-field' : {\n" + - " 'source' :'bleh_s',\n" + - " 'dest':['name','bind']\n" + - " }\n" + - " }\n"; + payload = + "{\n" + + " 'delete-copy-field' : {\n" + + " 'source' :'bleh_s',\n" + + " 'dest':['name','bind']\n" + + " }\n" + + " }\n"; response = harness.post("/schema", json(payload)); map = (Map) fromJSONString(response); @@ -781,20 +818,21 @@ public void testCopyFieldWithReplace() throws Exception { String newFieldName = "test_solr_14950"; // add-field-type - String addFieldTypeAnalyzer = "{\n" + - "'add-field-type' : {" + - " 'name' : 'myNewTextField',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {\n" + - " 'charFilters' : [{\n" + - " 'name':'patternReplace',\n" + - " 'replacement':'$1$1',\n" + - " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + - " }],\n" + - " 'tokenizer' : { 'name':'whitespace' },\n" + - " 'filters' : [{ 'name':'asciiFolding' }]\n" + - " }\n"+ - "}}"; + String addFieldTypeAnalyzer = + "{\n" + + "'add-field-type' : {" + + " 'name' : 'myNewTextField',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {\n" + + " 'charFilters' : [{\n" + + " 'name':'patternReplace',\n" + + " 'replacement':'$1$1',\n" + + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + + " }],\n" + + " 'tokenizer' : { 'name':'whitespace' },\n" + + " 'filters' : [{ 'name':'asciiFolding' }]\n" + + " }\n" + + "}}"; String response = restTestHarness.post("/schema", json(addFieldTypeAnalyzer)); Map map = (Map) fromJSONString(response); @@ -803,14 +841,17 @@ public void testCopyFieldWithReplace() throws Exception { assertNotNull("'myNewTextField' field type does not exist in the schema", map); // add-field - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'" + newFieldName + "',\n" + - " 'type':'myNewTextField',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - " }"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'" + + newFieldName + + "',\n" + + " 'type':'myNewTextField',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + " }"; response = harness.post("/schema", json(payload)); @@ -818,18 +859,21 @@ public void testCopyFieldWithReplace() throws Exception { assertNull(response, map.get("error")); Map m = getObj(harness, newFieldName, "fields"); - assertNotNull("'"+ newFieldName + "' field does not exist in the schema", m); + assertNotNull("'" + newFieldName + "' field does not exist in the schema", m); // add copy-field with explicit source and destination List l = getSourceCopyFields(harness, "bleh_s"); assertTrue("'bleh_s' copyField rule exists in the schema", l.isEmpty()); - payload = "{\n" + - " 'add-copy-field' : {\n" + - " 'source' :'bleh_s',\n" + - " 'dest':'"+ newFieldName + "'\n" + - " }\n" + - " }\n"; + payload = + "{\n" + + " 'add-copy-field' : {\n" + + " 'source' :'bleh_s',\n" + + " 'dest':'" + + newFieldName + + "'\n" + + " }\n" + + " }\n"; response = harness.post("/schema", json(payload)); map = (Map) fromJSONString(response); @@ -837,19 +881,20 @@ public void testCopyFieldWithReplace() throws Exception { l = getSourceCopyFields(harness, "bleh_s"); assertFalse("'bleh_s' copyField rule doesn't exist", l.isEmpty()); - assertEquals("bleh_s", ((Map)l.get(0)).get("source")); - assertEquals(newFieldName, ((Map)l.get(0)).get("dest")); + assertEquals("bleh_s", ((Map) l.get(0)).get("source")); + assertEquals(newFieldName, ((Map) l.get(0)).get("dest")); // replace-field-type - String replaceFieldTypeAnalyzer = "{\n" + - "'replace-field-type' : {" + - " 'name' : 'myNewTextField',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {\n" + - " 'tokenizer' : { 'name':'whitespace' },\n" + - " 'filters' : [{ 'name':'asciiFolding' }]\n" + - " }\n"+ - "}}"; + String replaceFieldTypeAnalyzer = + "{\n" + + "'replace-field-type' : {" + + " 'name' : 'myNewTextField',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {\n" + + " 'tokenizer' : { 'name':'whitespace' },\n" + + " 'filters' : [{ 'name':'asciiFolding' }]\n" + + " }\n" + + "}}"; response = restTestHarness.post("/schema", json(replaceFieldTypeAnalyzer)); map = (Map) fromJSONString(response); @@ -857,13 +902,13 @@ public void testCopyFieldWithReplace() throws Exception { map = getObj(restTestHarness, "myNewTextField", "fieldTypes"); assertNotNull(map); - Map analyzer = (Map)map.get("analyzer"); + Map analyzer = (Map) map.get("analyzer"); assertNull("'myNewTextField' shouldn't contain charFilters", analyzer.get("charFilters")); l = getSourceCopyFields(harness, "bleh_s"); assertFalse("'bleh_s' copyField rule doesn't exist", l.isEmpty()); - assertEquals("bleh_s", ((Map)l.get(0)).get("source")); - assertEquals(newFieldName, ((Map)l.get(0)).get("dest")); + assertEquals("bleh_s", ((Map) l.get(0)).get("source")); + assertEquals(newFieldName, ((Map) l.get(0)).get("dest")); // with replace-field String replaceField = "{'replace-field' : {'name':'" + newFieldName + "', 'type':'string'}}"; @@ -873,8 +918,8 @@ public void testCopyFieldWithReplace() throws Exception { l = getSourceCopyFields(harness, "bleh_s"); assertFalse("'bleh_s' copyField rule doesn't exist", l.isEmpty()); - assertEquals("bleh_s", ((Map)l.get(0)).get("source")); - assertEquals(newFieldName, ((Map)l.get(0)).get("dest")); + assertEquals("bleh_s", ((Map) l.get(0)).get("source")); + assertEquals(newFieldName, ((Map) l.get(0)).get("dest")); } @SuppressWarnings({"unchecked", "rawtypes"}) @@ -891,7 +936,8 @@ public void testDeleteAndReplace() throws Exception { assertNull("'NewFieldType' field type already exists in the schema", map); List list = getSourceCopyFields(harness, "NewField1"); - assertEquals("There is already a copy field with source 'NewField1' in the schema", 0, list.size()); + assertEquals( + "There is already a copy field with source 'NewField1' in the schema", 0, list.size()); map = getObj(harness, "NewDynamicField1*", "dynamicFields"); assertNull("Dynamic field 'NewDynamicField1*' already exists in the schema", map); @@ -899,23 +945,24 @@ public void testDeleteAndReplace() throws Exception { map = getObj(harness, "NewDynamicField2*", "dynamicFields"); assertNull("Dynamic field 'NewDynamicField2*' already exists in the schema", map); - String cmds = "{\n" + - " 'add-field-type': { 'name':'NewFieldType', 'class':'solr.StrField' },\n" + - " 'add-field': [{ 'name':'NewField1', 'type':'NewFieldType' },\n" + - " { 'name':'NewField2', 'type':'NewFieldType' },\n" + - " { 'name':'NewField3', 'type':'NewFieldType' },\n" + - " { 'name':'NewField4', 'type':'NewFieldType' }],\n" + - " 'add-dynamic-field': [{ 'name':'NewDynamicField1*', 'type':'NewFieldType' },\n" + - " { 'name':'NewDynamicField2*', 'type':'NewFieldType' },\n" + - " { 'name':'NewDynamicField3*', 'type':'NewFieldType' }],\n" + - " 'add-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A']},\n" + - " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + - " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + - " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + - " {'source':'NewField4', 'dest':'NewField3' },\n" + - " {'source':'NewField4', 'dest':'NewField2', maxChars: 100 },\n" + - " {'source':'NewField4', 'dest':['NewField1'], maxChars: 3333 }]\n" + - "}\n"; + String cmds = + "{\n" + + " 'add-field-type': { 'name':'NewFieldType', 'class':'solr.StrField' },\n" + + " 'add-field': [{ 'name':'NewField1', 'type':'NewFieldType' },\n" + + " { 'name':'NewField2', 'type':'NewFieldType' },\n" + + " { 'name':'NewField3', 'type':'NewFieldType' },\n" + + " { 'name':'NewField4', 'type':'NewFieldType' }],\n" + + " 'add-dynamic-field': [{ 'name':'NewDynamicField1*', 'type':'NewFieldType' },\n" + + " { 'name':'NewDynamicField2*', 'type':'NewFieldType' },\n" + + " { 'name':'NewDynamicField3*', 'type':'NewFieldType' }],\n" + + " 'add-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A']},\n" + + " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + + " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + + " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + + " {'source':'NewField4', 'dest':'NewField3' },\n" + + " {'source':'NewField4', 'dest':'NewField2', maxChars: 100 },\n" + + " {'source':'NewField4', 'dest':['NewField1'], maxChars: 3333 }]\n" + + "}\n"; String response = harness.post("/schema", json(cmds)); @@ -940,7 +987,7 @@ public void testDeleteAndReplace() throws Exception { list = getSourceCopyFields(harness, "NewField1"); Set set = new HashSet(); for (Object obj : list) { - set.add(((Map)obj).get("dest")); + set.add(((Map) obj).get("dest")); } assertEquals(2, list.size()); assertTrue(set.contains("NewField2")); @@ -948,21 +995,21 @@ public void testDeleteAndReplace() throws Exception { list = getSourceCopyFields(harness, "NewDynamicField1*"); assertEquals(1, list.size()); - assertEquals("NewField2", ((Map)list.get(0)).get("dest")); + assertEquals("NewField2", ((Map) list.get(0)).get("dest")); list = getSourceCopyFields(harness, "NewDynamicField2*"); assertEquals(1, list.size()); - assertEquals("NewField2", ((Map)list.get(0)).get("dest")); + assertEquals("NewField2", ((Map) list.get(0)).get("dest")); list = getSourceCopyFields(harness, "NewDynamicField3*"); assertEquals(1, list.size()); - assertEquals("NewField3", ((Map)list.get(0)).get("dest")); + assertEquals("NewField3", ((Map) list.get(0)).get("dest")); list = getSourceCopyFields(harness, "NewField4"); assertEquals(3, list.size()); map.clear(); - for (Object obj : list) { - map.put(((Map)obj).get("dest"), ((Map)obj).get("maxChars")); + for (Object obj : list) { + map.put(((Map) obj).get("dest"), ((Map) obj).get("maxChars")); } assertTrue(map.containsKey("NewField1")); assertEquals(3333L, map.get("NewField1")); @@ -976,23 +1023,30 @@ public void testDeleteAndReplace() throws Exception { map = (Map) fromJSONString(response); Object errors = map.get("error"); assertNotNull(errors); - assertTrue(errors.toString().contains("Can't delete 'NewFieldType' because it's the field type of ")); + assertTrue( + errors.toString().contains("Can't delete 'NewFieldType' because it's the field type of ")); cmds = "{'delete-field' : {'name':'NewField1'}}"; response = harness.post("/schema", json(cmds)); map = (Map) fromJSONString(response); errors = map.get("error"); assertNotNull(errors); - assertTrue(errors.toString().contains - ("Can't delete field 'NewField1' because it's referred to by at least one copy field directive")); + assertTrue( + errors + .toString() + .contains( + "Can't delete field 'NewField1' because it's referred to by at least one copy field directive")); cmds = "{'delete-field' : {'name':'NewField2'}}"; response = harness.post("/schema", json(cmds)); map = (Map) fromJSONString(response); errors = map.get("error"); assertNotNull(errors); - assertTrue(errors.toString().contains - ("Can't delete field 'NewField2' because it's referred to by at least one copy field directive")); + assertTrue( + errors + .toString() + .contains( + "Can't delete field 'NewField2' because it's referred to by at least one copy field directive")); cmds = "{'replace-field' : {'name':'NewField1', 'type':'string'}}"; response = harness.post("/schema", json(cmds)); @@ -1002,7 +1056,7 @@ public void testDeleteAndReplace() throws Exception { list = getSourceCopyFields(harness, "NewField1"); set = new HashSet(); for (Object obj : list) { - set.add(((Map)obj).get("dest")); + set.add(((Map) obj).get("dest")); } assertEquals(2, list.size()); assertTrue(set.contains("NewField2")); @@ -1013,8 +1067,11 @@ public void testDeleteAndReplace() throws Exception { map = (Map) fromJSONString(response); errors = map.get("error"); assertNotNull(errors); - assertTrue(errors.toString().contains - ("copyField dest :'NewDynamicField1A' is not an explicit field and doesn't match a dynamicField.")); + assertTrue( + errors + .toString() + .contains( + "copyField dest :'NewDynamicField1A' is not an explicit field and doesn't match a dynamicField.")); cmds = "{'replace-field' : {'name':'NewField2', 'type':'string'}}"; response = harness.post("/schema", json(cmds)); @@ -1025,7 +1082,7 @@ public void testDeleteAndReplace() throws Exception { list = getDestCopyFields(harness, "NewField2"); set = new HashSet(); for (Object obj : list) { - set.add(((Map)obj).get("source")); + set.add(((Map) obj).get("source")); } assertEquals(4, list.size()); assertTrue(set.contains("NewField1")); @@ -1048,7 +1105,8 @@ public void testDeleteAndReplace() throws Exception { map = (Map) fromJSONString(response); errors = map.get("error"); assertNull(errors); - // Make sure the copy field directives with destinations matching NewDynamicField1* are preserved + // Make sure the copy field directives with destinations matching NewDynamicField1* are + // preserved list = getDestCopyFields(harness, "NewDynamicField1A"); assertEquals(1, list.size()); assertEquals("NewField1", ((Map) list.get(0)).get("source")); @@ -1057,23 +1115,25 @@ public void testDeleteAndReplace() throws Exception { response = harness.post("/schema", json(cmds)); map = (Map) fromJSONString(response); assertNull(map.get("error")); - // Make sure the copy field directives with sources and destinations of type NewFieldType are preserved + // Make sure the copy field directives with sources and destinations of type NewFieldType are + // preserved list = getDestCopyFields(harness, "NewField3"); assertEquals(2, list.size()); set = new HashSet(); for (Object obj : list) { - set.add(((Map)obj).get("source")); + set.add(((Map) obj).get("source")); } assertTrue(set.contains("NewField4")); assertTrue(set.contains("NewDynamicField3*")); - cmds = "{\n" + - " 'delete-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A'] },\n" + - " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + - " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + - " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + - " {'source':'NewField4', 'dest':['NewField1', 'NewField2', 'NewField3']}]\n" + - "}\n"; + cmds = + "{\n" + + " 'delete-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A'] },\n" + + " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + + " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + + " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + + " {'source':'NewField4', 'dest':['NewField1', 'NewField2', 'NewField3']}]\n" + + "}\n"; response = harness.post("/schema", json(cmds)); map = (Map) fromJSONString(response); assertNull(map.get("error")); @@ -1087,20 +1147,22 @@ public void testDeleteAndReplace() throws Exception { assertEquals(0, list.size()); list = getSourceCopyFields(harness, "NewField4"); assertEquals(0, list.size()); - - cmds = "{'delete-field': [{'name':'NewField1'},{'name':'NewField2'},{'name':'NewField3'},{'name':'NewField4'}]}"; + + cmds = + "{'delete-field': [{'name':'NewField1'},{'name':'NewField2'},{'name':'NewField3'},{'name':'NewField4'}]}"; response = harness.post("/schema", json(cmds)); map = (Map) fromJSONString(response); assertNull(map.get("error")); - cmds = "{'delete-dynamic-field': [{'name':'NewDynamicField1*'}," + - " {'name':'NewDynamicField2*'},\n" + - " {'name':'NewDynamicField3*'}]\n" + - "}\n"; + cmds = + "{'delete-dynamic-field': [{'name':'NewDynamicField1*'}," + + " {'name':'NewDynamicField2*'},\n" + + " {'name':'NewDynamicField3*'}]\n" + + "}\n"; response = harness.post("/schema", json(cmds)); map = (Map) fromJSONString(response); assertNull(map.get("error")); - + cmds = "{'delete-field-type':{'name':'NewFieldType'}}"; response = harness.post("/schema", json(cmds)); map = (Map) fromJSONString(response); @@ -1110,20 +1172,27 @@ public void testDeleteAndReplace() throws Exception { public void testSortableTextFieldWithAnalyzer() throws Exception { String fieldTypeName = "sort_text_type"; String fieldName = "sort_text"; - String payload = "{\n" + - " 'add-field-type' : {" + - " 'name' : '" + fieldTypeName + "',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " 'maxCharsForDocValues':6\n" + - " 'class':'solr.SortableTextField',\n" + - " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + - " },\n"+ - " 'add-field' : {\n" + - " 'name':'" + fieldName + "',\n" + - " 'type': '"+fieldTypeName+"',\n" + - " }\n" + - "}\n"; + String payload = + "{\n" + + " 'add-field-type' : {" + + " 'name' : '" + + fieldTypeName + + "',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " 'maxCharsForDocValues':6\n" + + " 'class':'solr.SortableTextField',\n" + + " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'" + + fieldName + + "',\n" + + " 'type': '" + + fieldTypeName + + "',\n" + + " }\n" + + "}\n"; String response = restTestHarness.post("/schema", json(payload)); @@ -1135,16 +1204,23 @@ public void testSortableTextFieldWithAnalyzer() throws Exception { Map fields = getObj(restTestHarness, fieldName, "fields"); assertNotNull("field " + fieldName + " not created", fields); - assertEquals(0, - getSolrClient().add(Arrays.asList(sdoc("id","1",fieldName,"xxx aaa"), - sdoc("id","2",fieldName,"xxx bbb aaa"), - sdoc("id","3",fieldName,"xxx bbb zzz"))).getStatus()); - + assertEquals( + 0, + getSolrClient() + .add( + Arrays.asList( + sdoc("id", "1", fieldName, "xxx aaa"), + sdoc("id", "2", fieldName, "xxx bbb aaa"), + sdoc("id", "3", fieldName, "xxx bbb zzz"))) + .getStatus()); + assertEquals(0, getSolrClient().commit().getStatus()); { - SolrDocumentList docs = getSolrClient().query - (params("q",fieldName+":xxx","sort", fieldName + " asc, id desc")).getResults(); - + SolrDocumentList docs = + getSolrClient() + .query(params("q", fieldName + ":xxx", "sort", fieldName + " asc, id desc")) + .getResults(); + assertEquals(3L, docs.getNumFound()); assertEquals(3L, docs.size()); assertEquals("1", docs.get(0).getFieldValue("id")); @@ -1152,25 +1228,25 @@ public void testSortableTextFieldWithAnalyzer() throws Exception { assertEquals("2", docs.get(2).getFieldValue("id")); } { - SolrDocumentList docs = getSolrClient().query - (params("q",fieldName+":xxx", "sort", fieldName + " desc, id asc")).getResults(); - + SolrDocumentList docs = + getSolrClient() + .query(params("q", fieldName + ":xxx", "sort", fieldName + " desc, id asc")) + .getResults(); + assertEquals(3L, docs.getNumFound()); assertEquals(3L, docs.size()); assertEquals("2", docs.get(0).getFieldValue("id")); assertEquals("3", docs.get(1).getFieldValue("id")); assertEquals("1", docs.get(2).getFieldValue("id")); } - } @Test public void testAddNewFieldAndQuery() throws Exception { - getSolrClient().add(Arrays.asList( - sdoc("id", "1", "term_s", "tux"))); + getSolrClient().add(Arrays.asList(sdoc("id", "1", "term_s", "tux"))); getSolrClient().commit(true, true); - Map attrs = new HashMap<>(); + Map attrs = new HashMap<>(); attrs.put("name", "newstringtestfield"); attrs.put("type", "string"); @@ -1186,11 +1262,14 @@ public void testAddNewFieldAndQuery() throws Exception { public void testDateRangeFieldDefaults() throws Exception { try (LogListener listener = LogListener.warn(AbstractSpatialPrefixTreeFieldType.class)) { // Add a default date range field and verify success - assertJPost("/schema", "{\n" - + " \"add-field-type\":{\n" - + " \"name\":\"dr-field1\",\n" - + " \"class\":\"solr.DateRangeField\"}\n" - + "}", "/responseHeader/status==0"); + assertJPost( + "/schema", + "{\n" + + " \"add-field-type\":{\n" + + " \"name\":\"dr-field1\",\n" + + " \"class\":\"solr.DateRangeField\"}\n" + + "}", + "/responseHeader/status==0"); assertJQ( "/schema/fieldtypes/dr-field1", "fieldType=={\n" @@ -1201,26 +1280,32 @@ public void testDateRangeFieldDefaults() throws Exception { assertEquals(0, listener.getCount()); // Add a date range field with redundant invariants - assertJPost("/schema", "{\n" - + " \"add-field-type\":{\n" - + " \"name\":\"dr-redundant\",\n" - + " \"omitNorms\":\"true\",\n" - + " \"termOffsets\":\"false\",\n" - + " \"class\":\"solr.DateRangeField\"}\n" - + "}", "/responseHeader/status==0"); + assertJPost( + "/schema", + "{\n" + + " \"add-field-type\":{\n" + + " \"name\":\"dr-redundant\",\n" + + " \"omitNorms\":\"true\",\n" + + " \"termOffsets\":\"false\",\n" + + " \"class\":\"solr.DateRangeField\"}\n" + + "}", + "/responseHeader/status==0"); assertEquals(2, listener.getCount()); assertThat(listener.pollMessage(), containsString("hardcoded behavior is omitNorms=true")); assertThat(listener.pollMessage(), containsString("hardcoded behavior is termOffsets=false")); // Add a date range field with violated invariants - assertJPost("/schema", "{\n" - + " \"add-field-type\":{\n" - + " \"name\":\"dr-invalid\",\n" - + " \"omitNorms\":\"false\",\n" - + " \"termOffsets\":\"true\",\n" - + " \"class\":\"solr.DateRangeField\"}\n" - + "}", "/responseHeader/status==400"); + assertJPost( + "/schema", + "{\n" + + " \"add-field-type\":{\n" + + " \"name\":\"dr-invalid\",\n" + + " \"omitNorms\":\"false\",\n" + + " \"termOffsets\":\"true\",\n" + + " \"class\":\"solr.DateRangeField\"}\n" + + "}", + "/responseHeader/status==400"); // could assert no more listener events but listener's close() gives us a better error message } @@ -1234,20 +1319,29 @@ public void testSimilarityParser() throws Exception { String fieldTypeName = "MySimilarityField"; String fieldName = "similarityTestField"; - String payload = "{\n" + - " 'add-field-type' : {" + - " 'name' : '" + fieldTypeName + "',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + - " 'similarity' : {'class':'org.apache.solr.search.similarities.BM25SimilarityFactory', 'k1':"+k1+", 'b':"+b+" }\n" + - " },\n"+ - " 'add-field' : {\n" + - " 'name':'" + fieldName + "',\n" + - " 'type': 'MySimilarityField',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }\n" + - "}\n"; + String payload = + "{\n" + + " 'add-field-type' : {" + + " 'name' : '" + + fieldTypeName + + "',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + + " 'similarity' : {'class':'org.apache.solr.search.similarities.BM25SimilarityFactory', 'k1':" + + k1 + + ", 'b':" + + b + + " }\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'" + + fieldName + + "',\n" + + " 'type': 'MySimilarityField',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }\n" + + "}\n"; String response = harness.post("/schema", json(payload)); @@ -1258,25 +1352,34 @@ public void testSimilarityParser() throws Exception { @SuppressWarnings({"rawtypes"}) Map fields = getObj(harness, fieldName, "fields"); assertNotNull("field " + fieldName + " not created", fields); - - assertFieldSimilarity(fieldName, BM25Similarity.class, - sim -> assertEquals("Unexpected k1", k1, sim.getK1(), .001), - sim -> assertEquals("Unexpected b", b, sim.getB(), .001)); + + assertFieldSimilarity( + fieldName, + BM25Similarity.class, + sim -> assertEquals("Unexpected k1", k1, sim.getK1(), .001), + sim -> assertEquals("Unexpected b", b, sim.getB(), .001)); final String independenceMeasure = "Saturated"; - final boolean discountOverlaps = false; - payload = "{\n" + - " 'replace-field-type' : {" + - " 'name' : '" + fieldTypeName + "',\n" + - " 'class':'solr.TextField',\n" + - " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + - " 'similarity' : {\n" + - " 'class':'org.apache.solr.search.similarities.DFISimilarityFactory',\n" + - " 'independenceMeasure':'" + independenceMeasure + "',\n" + - " 'discountOverlaps':" + discountOverlaps + "\n" + - " }\n" + - " }\n"+ - "}\n"; + final boolean discountOverlaps = false; + payload = + "{\n" + + " 'replace-field-type' : {" + + " 'name' : '" + + fieldTypeName + + "',\n" + + " 'class':'solr.TextField',\n" + + " 'analyzer' : {'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'}},\n" + + " 'similarity' : {\n" + + " 'class':'org.apache.solr.search.similarities.DFISimilarityFactory',\n" + + " 'independenceMeasure':'" + + independenceMeasure + + "',\n" + + " 'discountOverlaps':" + + discountOverlaps + + "\n" + + " }\n" + + " }\n" + + "}\n"; response = harness.post("/schema", json(payload)); @@ -1285,19 +1388,27 @@ public void testSimilarityParser() throws Exception { fields = getObj(harness, fieldName, "fields"); assertNotNull("field " + fieldName + " not created", fields); - assertFieldSimilarity(fieldName, DFISimilarity.class, - sim -> assertEquals("Unexpected independenceMeasure", independenceMeasure, sim.getIndependence().toString()), - sim -> assertEquals("Unexpected discountedOverlaps", discountOverlaps, sim.getDiscountOverlaps())); + assertFieldSimilarity( + fieldName, + DFISimilarity.class, + sim -> + assertEquals( + "Unexpected independenceMeasure", + independenceMeasure, + sim.getIndependence().toString()), + sim -> + assertEquals( + "Unexpected discountedOverlaps", discountOverlaps, sim.getDiscountOverlaps())); } @SuppressWarnings({"rawtypes"}) public static Map getObj(RestTestHarness restHarness, String fld, String key) throws Exception { Map map = getRespMap(restHarness); - List l = (List) ((Map)map.get("schema")).get(key); + List l = (List) ((Map) map.get("schema")).get(key); for (Object o : l) { - @SuppressWarnings({"rawtypes"})Map m = (Map) o; - if (fld.equals(m.get("name"))) - return m; + @SuppressWarnings({"rawtypes"}) + Map m = (Map) o; + if (fld.equals(m.get("name"))) return m; } return null; } @@ -1316,7 +1427,7 @@ public static Map getAsMap(String uri, RestTestHarness restHarness) throws Excep @SuppressWarnings({"unchecked", "rawtypes"}) public static List getSourceCopyFields(RestTestHarness harness, String src) throws Exception { Map map = getRespMap(harness); - List l = (List) ((Map)map.get("schema")).get("copyFields"); + List l = (List) ((Map) map.get("schema")).get("copyFields"); List result = new ArrayList(); for (Object o : l) { Map m = (Map) o; @@ -1328,7 +1439,7 @@ public static List getSourceCopyFields(RestTestHarness harness, String src) thro @SuppressWarnings({"unchecked", "rawtypes"}) public static List getDestCopyFields(RestTestHarness harness, String dest) throws Exception { Map map = getRespMap(harness); - List l = (List) ((Map)map.get("schema")).get("copyFields"); + List l = (List) ((Map) map.get("schema")).get("copyFields"); List result = new ArrayList(); for (Object o : l) { Map m = (Map) o; @@ -1338,31 +1449,35 @@ public static List getDestCopyFields(RestTestHarness harness, String dest) throw } /** - * whitebox checks the Similarity for the specified field according to {@link SolrCore#getLatestSchema} - * - * Executes each of the specified Similarity-accepting validators. + * whitebox checks the Similarity for the specified field according to {@link + * SolrCore#getLatestSchema} + * + *

Executes each of the specified Similarity-accepting validators. */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) - private static void assertFieldSimilarity(String fieldname, Class expected, Consumer... validators) { + private static void assertFieldSimilarity( + String fieldname, Class expected, Consumer... validators) { CoreContainer cc = jetty.getCoreContainer(); try (SolrCore core = cc.getCore("collection1")) { SimilarityFactory simfac = core.getLatestSchema().getSimilarityFactory(); assertNotNull(simfac); - assertTrue("test only works with SchemaSimilarityFactory", - simfac instanceof SchemaSimilarityFactory); - + assertTrue( + "test only works with SchemaSimilarityFactory", + simfac instanceof SchemaSimilarityFactory); + Similarity mainSim = core.getLatestSchema().getSimilarity(); assertNotNull(mainSim); - + // sanity check simfac vs sim in use - also verify infom called on simfac, otherwise exception assertEquals(mainSim, simfac.getSimilarity()); - - assertTrue("test only works with PerFieldSimilarityWrapper, SchemaSimilarityFactory redefined?", - mainSim instanceof PerFieldSimilarityWrapper); - Similarity fieldSim = ((PerFieldSimilarityWrapper)mainSim).get(fieldname); + + assertTrue( + "test only works with PerFieldSimilarityWrapper, SchemaSimilarityFactory redefined?", + mainSim instanceof PerFieldSimilarityWrapper); + Similarity fieldSim = ((PerFieldSimilarityWrapper) mainSim).get(fieldname); assertEquals("wrong sim for field=" + fieldname, expected, fieldSim.getClass()); - Arrays.asList(validators).forEach(v -> v.accept((T)fieldSim)); + Arrays.asList(validators).forEach(v -> v.accept((T) fieldSim)); } } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java index a444a8459fb..60f526a91cb 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java @@ -15,94 +15,77 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; public class TestCopyFieldCollectionResource extends SolrRestletTestBase { @Test public void testXMLGetAllCopyFields() throws Exception { - assertQ("/schema/copyfields?indent=on&wt=xml", + assertQ( + "/schema/copyfields?indent=on&wt=xml", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='title']]", - + + " and str[@name='dest'][.='title']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" - +" and str[@name='dest'][.='title_stemmed']" - +" and int[@name='maxChars'][.='200']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - + + " and str[@name='dest'][.='title_stemmed']" + + " and int[@name='maxChars'][.='200']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='title']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='*_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - + + " and str[@name='dest'][.='title']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='title']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='*_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - + + " and str[@name='dest'][.='*_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='*_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='text']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='*_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]"); + + " and str[@name='dest'][.='title']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + + " and str[@name='dest'][.='*_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" + + " and str[@name='dest'][.='*_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='text']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='*_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]"); } @Test public void testGetAllCopyFields() throws Exception { - assertJQ("/schema/copyfields", - "/copyFields/[1]=={'source':'src_sub_no_ast_i','dest':'title'}", - "/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s'}", - - "/copyFields/[8]=={'source':'*_i','dest':'title'}", - "/copyFields/[9]=={'source':'*_i','dest':'*_s'}", - "/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s'}", - "/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s'}", - - "/copyFields/[12]=={'source':'*_src_sub_i','dest':'title'}", - "/copyFields/[13]=={'source':'*_src_sub_i','dest':'*_s'}", - "/copyFields/[14]=={'source':'*_src_sub_i','dest':'*_dest_sub_s'}", - "/copyFields/[15]=={'source':'*_src_sub_i','dest':'dest_sub_no_ast_s'}", - - "/copyFields/[16]=={'source':'src_sub_no_ast_i','dest':'*_s'}", - "/copyFields/[17]=={'source':'src_sub_no_ast_i','dest':'*_dest_sub_s'}", - "/copyFields/[18]=={'source':'src_sub_no_ast_i','dest':'dest_sub_no_ast_s'}"); - + assertJQ( + "/schema/copyfields", + "/copyFields/[1]=={'source':'src_sub_no_ast_i','dest':'title'}", + "/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s'}", + "/copyFields/[8]=={'source':'*_i','dest':'title'}", + "/copyFields/[9]=={'source':'*_i','dest':'*_s'}", + "/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s'}", + "/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s'}", + "/copyFields/[12]=={'source':'*_src_sub_i','dest':'title'}", + "/copyFields/[13]=={'source':'*_src_sub_i','dest':'*_s'}", + "/copyFields/[14]=={'source':'*_src_sub_i','dest':'*_dest_sub_s'}", + "/copyFields/[15]=={'source':'*_src_sub_i','dest':'dest_sub_no_ast_s'}", + "/copyFields/[16]=={'source':'src_sub_no_ast_i','dest':'*_s'}", + "/copyFields/[17]=={'source':'src_sub_no_ast_i','dest':'*_dest_sub_s'}", + "/copyFields/[18]=={'source':'src_sub_no_ast_i','dest':'dest_sub_no_ast_s'}"); } @Test public void testRestrictSource() throws Exception { - assertQ("/schema/copyfields/?wt=xml&source.fl=title,*_i,*_src_sub_i,src_sub_no_ast_i", + assertQ( + "/schema/copyfields/?wt=xml&source.fl=title,*_i,*_src_sub_i,src_sub_no_ast_i", "count(/response/arr[@name='copyFields']/lst) = 16", // 4 + 4 + 4 + 4 "count(/response/arr[@name='copyFields']/lst/str[@name='source'][.='title']) = 4", "count(/response/arr[@name='copyFields']/lst/str[@name='source'][.='*_i']) = 4", @@ -112,7 +95,8 @@ public void testRestrictSource() throws Exception { @Test public void testRestrictDest() throws Exception { - assertQ("/schema/copyfields/?wt=xml&dest.fl=title,*_s,*_dest_sub_s,dest_sub_no_ast_s", + assertQ( + "/schema/copyfields/?wt=xml&dest.fl=title,*_s,*_dest_sub_s,dest_sub_no_ast_s", "count(/response/arr[@name='copyFields']/lst) = 16", // 3 + 4 + 4 + 5 "count(/response/arr[@name='copyFields']/lst/str[@name='dest'][.='title']) = 3", "count(/response/arr[@name='copyFields']/lst/str[@name='dest'][.='*_s']) = 4", @@ -122,15 +106,13 @@ public void testRestrictDest() throws Exception { @Test public void testRestrictSourceAndDest() throws Exception { - assertQ("/schema/copyfields/?wt=xml&source.fl=title,*_i&dest.fl=title,dest_sub_no_ast_s", + assertQ( + "/schema/copyfields/?wt=xml&source.fl=title,*_i&dest.fl=title,dest_sub_no_ast_s", "count(/response/arr[@name='copyFields']/lst) = 3", - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" + " and str[@name='dest'][.='dest_sub_no_ast_s']]", - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + " and str[@name='dest'][.='title']]", - "/response/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + " and str[@name='dest'][.='dest_sub_no_ast_s']]"); } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java index 6734ca2fb69..dc2c57ca243 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldCollectionResource.java @@ -15,49 +15,53 @@ * limitations under the License. */ package org.apache.solr.rest.schema; -import org.apache.solr.rest.SolrRestletTestBase; -import org.junit.Test; import java.io.IOException; +import org.apache.solr.rest.SolrRestletTestBase; +import org.junit.Test; public class TestDynamicFieldCollectionResource extends SolrRestletTestBase { @Test public void testGetAllDynamicFields() throws Exception { - assertQ("/schema/dynamicfields?indent=on&wt=xml", - "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[1] = '*_coordinate'", - "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[2] = 'ignored_*'", - "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[3] = '*_mfacet'"); + assertQ( + "/schema/dynamicfields?indent=on&wt=xml", + "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[1] = '*_coordinate'", + "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[2] = 'ignored_*'", + "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[3] = '*_mfacet'"); } @Test public void testGetTwoDynamicFields() throws IOException { - assertQ("/schema/dynamicfields?indent=on&wt=xml&fl=*_i,*_s", - "count(/response/arr[@name='dynamicFields']/lst/str[@name='name']) = 2", - "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[1] = '*_i'", - "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[2] = '*_s'"); + assertQ( + "/schema/dynamicfields?indent=on&wt=xml&fl=*_i,*_s", + "count(/response/arr[@name='dynamicFields']/lst/str[@name='name']) = 2", + "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[1] = '*_i'", + "(/response/arr[@name='dynamicFields']/lst/str[@name='name'])[2] = '*_s'"); } @Test public void testNotFoundDynamicFields() throws IOException { - assertQ("/schema/dynamicfields?indent=on&wt=xml&fl=*_not_in_there,this_one_isnt_either_*", - "count(/response/arr[@name='dynamicFields']) = 1", - "count(/response/arr[@name='dynamicfields']/lst/str[@name='name']) = 0"); + assertQ( + "/schema/dynamicfields?indent=on&wt=xml&fl=*_not_in_there,this_one_isnt_either_*", + "count(/response/arr[@name='dynamicFields']) = 1", + "count(/response/arr[@name='dynamicfields']/lst/str[@name='name']) = 0"); } @Test public void testJsonGetAllDynamicFields() throws Exception { - assertJQ("/schema/dynamicfields?indent=on", - "/dynamicFields/[0]/name=='*_coordinate'", - "/dynamicFields/[1]/name=='ignored_*'", - "/dynamicFields/[2]/name=='*_mfacet'"); + assertJQ( + "/schema/dynamicfields?indent=on", + "/dynamicFields/[0]/name=='*_coordinate'", + "/dynamicFields/[1]/name=='ignored_*'", + "/dynamicFields/[2]/name=='*_mfacet'"); } @Test public void testJsonGetTwoDynamicFields() throws Exception { - assertJQ("/schema/dynamicfields?indent=on&fl=*_i,*_s&wt=xml", // assertJQ will fix the wt param to be json - "/dynamicFields/[0]/name=='*_i'", - "/dynamicFields/[1]/name=='*_s'"); + // assertJQ will fix the wt param to be json + assertJQ( + "/schema/dynamicfields?indent=on&fl=*_i,*_s&wt=xml", + "/dynamicFields/[0]/name=='*_i'", + "/dynamicFields/[1]/name=='*_s'"); } - - } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java index 79b1bf06d7c..4dcd6bc824d 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestDynamicFieldResource.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; @@ -22,51 +23,54 @@ public class TestDynamicFieldResource extends SolrRestletTestBase { @Test public void testGetDynamicField() throws Exception { final boolean expectedDocValues = Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP); - assertQ("/schema/dynamicfields/*_i?indent=on&wt=xml&showDefaults=on", - "count(/response/lst[@name='dynamicField']) = 1", - "/response/lst[@name='dynamicField']/str[@name='name'] = '*_i'", - "/response/lst[@name='dynamicField']/str[@name='type'] = 'int'", - "/response/lst[@name='dynamicField']/bool[@name='indexed'] = 'true'", - "/response/lst[@name='dynamicField']/bool[@name='stored'] = 'true'", - "/response/lst[@name='dynamicField']/bool[@name='docValues'] = '"+expectedDocValues+"'", - "/response/lst[@name='dynamicField']/bool[@name='termVectors'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='termPositions'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='termOffsets'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='omitNorms'] = 'true'", - "/response/lst[@name='dynamicField']/bool[@name='omitTermFreqAndPositions'] = 'true'", - "/response/lst[@name='dynamicField']/bool[@name='omitPositions'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='storeOffsetsWithPositions'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='multiValued'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='required'] = 'false'", - "/response/lst[@name='dynamicField']/bool[@name='tokenized'] = 'false'"); + assertQ( + "/schema/dynamicfields/*_i?indent=on&wt=xml&showDefaults=on", + "count(/response/lst[@name='dynamicField']) = 1", + "/response/lst[@name='dynamicField']/str[@name='name'] = '*_i'", + "/response/lst[@name='dynamicField']/str[@name='type'] = 'int'", + "/response/lst[@name='dynamicField']/bool[@name='indexed'] = 'true'", + "/response/lst[@name='dynamicField']/bool[@name='stored'] = 'true'", + "/response/lst[@name='dynamicField']/bool[@name='docValues'] = '" + expectedDocValues + "'", + "/response/lst[@name='dynamicField']/bool[@name='termVectors'] = 'false'", + "/response/lst[@name='dynamicField']/bool[@name='termPositions'] = 'false'", + "/response/lst[@name='dynamicField']/bool[@name='termOffsets'] = 'false'", + "/response/lst[@name='dynamicField']/bool[@name='omitNorms'] = 'true'", + "/response/lst[@name='dynamicField']/bool[@name='omitTermFreqAndPositions'] = 'true'", + "/response/lst[@name='dynamicField']/bool[@name='omitPositions'] = 'false'", + "/response/lst[@name='dynamicField']/bool[@name='storeOffsetsWithPositions'] = 'false'", + "/response/lst[@name='dynamicField']/bool[@name='multiValued'] = 'false'", + "/response/lst[@name='dynamicField']/bool[@name='required'] = 'false'", + "/response/lst[@name='dynamicField']/bool[@name='tokenized'] = 'false'"); } @Test public void testGetNotFoundDynamicField() throws Exception { - assertQ("/schema/dynamicfields/*not_in_there?indent=on&wt=xml", - "count(/response/lst[@name='dynamicField']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); - } - + assertQ( + "/schema/dynamicfields/*not_in_there?indent=on&wt=xml", + "count(/response/lst[@name='dynamicField']) = 0", + "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", + "/response/lst[@name='error']/int[@name='code'] = '404'"); + } + @Test public void testJsonGetDynamicField() throws Exception { final boolean expectedDocValues = Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP); - assertJQ("/schema/dynamicfields/*_i?indent=on&showDefaults=on", - "/dynamicField/name=='*_i'", - "/dynamicField/type=='int'", - "/dynamicField/indexed==true", - "/dynamicField/stored==true", - "/dynamicField/docValues=="+expectedDocValues, - "/dynamicField/termVectors==false", - "/dynamicField/termPositions==false", - "/dynamicField/termOffsets==false", - "/dynamicField/omitNorms==true", - "/dynamicField/omitTermFreqAndPositions==true", - "/dynamicField/omitPositions==false", - "/dynamicField/storeOffsetsWithPositions==false", - "/dynamicField/multiValued==false", - "/dynamicField/required==false", - "/dynamicField/tokenized==false"); + assertJQ( + "/schema/dynamicfields/*_i?indent=on&showDefaults=on", + "/dynamicField/name=='*_i'", + "/dynamicField/type=='int'", + "/dynamicField/indexed==true", + "/dynamicField/stored==true", + "/dynamicField/docValues==" + expectedDocValues, + "/dynamicField/termVectors==false", + "/dynamicField/termPositions==false", + "/dynamicField/termOffsets==false", + "/dynamicField/omitNorms==true", + "/dynamicField/omitTermFreqAndPositions==true", + "/dynamicField/omitPositions==false", + "/dynamicField/storeOffsetsWithPositions==false", + "/dynamicField/multiValued==false", + "/dynamicField/required==false", + "/dynamicField/tokenized==false"); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java index 19d04209909..daf147dc2b8 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldCollectionResource.java @@ -20,34 +20,34 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.solr.client.solrj.request.schema.SchemaRequest; import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; - public class TestFieldCollectionResource extends SolrRestletTestBase { @Test public void testXMLGetAllFields() throws Exception { - assertQ("/schema/fields?wt=xml", - "(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", - "(/response/arr[@name='fields']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", - "(/response/arr[@name='fields']/lst/str[@name='name'])[3] = '_version_'"); + assertQ( + "/schema/fields?wt=xml", + "(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", + "(/response/arr[@name='fields']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", + "(/response/arr[@name='fields']/lst/str[@name='name'])[3] = '_version_'"); } - @Test public void testGetAllFields() throws Exception { - assertJQ("/schema/fields", - "/fields/[0]/name=='HTMLstandardtok'", - "/fields/[1]/name=='HTMLwhitetok'", - "/fields/[2]/name=='_version_'"); + assertJQ( + "/schema/fields", + "/fields/[0]/name=='HTMLstandardtok'", + "/fields/[1]/name=='HTMLwhitetok'", + "/fields/[2]/name=='_version_'"); } @Test public void testXMLGetThreeFieldsDontIncludeDynamic() throws IOException { // - assertQ("/schema/fields?wt=xml&fl=id,_version_,price_i", + assertQ( + "/schema/fields?wt=xml&fl=id,_version_,price_i", "count(/response/arr[@name='fields']/lst/str[@name='name']) = 2", "(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'id'", "(/response/arr[@name='fields']/lst/str[@name='name'])[2] = '_version_'"); @@ -55,37 +55,42 @@ public void testXMLGetThreeFieldsDontIncludeDynamic() throws IOException { @Test public void testXMLGetThreeFieldsIncludeDynamic() throws IOException { - assertQ("/schema/fields?wt=xml&fl=id,_version_,price_i&includeDynamic=on", - + assertQ( + "/schema/fields?wt=xml&fl=id,_version_,price_i&includeDynamic=on", "count(/response/arr[@name='fields']/lst/str[@name='name']) = 3", - "(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'id'", - "(/response/arr[@name='fields']/lst/str[@name='name'])[2] = '_version_'", - "(/response/arr[@name='fields']/lst/str[@name='name'])[3] = 'price_i'", - "/response/arr[@name='fields']/lst[ str[@name='name']='price_i' " - +" and str[@name='dynamicBase']='*_i']"); + + " and str[@name='dynamicBase']='*_i']"); } + @Test public void testXMLNotFoundFields() throws IOException { - assertQ("/schema/fields?&wt=xml&fl=not_in_there,this_one_either", + assertQ( + "/schema/fields?&wt=xml&fl=not_in_there,this_one_either", "count(/response/arr[@name='fields']) = 1", "count(/response/arr[@name='fields']/lst/str[@name='name']) = 0"); } - @Test public void testGetAllFieldsIncludeDynamic() throws Exception { - List> fields = new SchemaRequest.Fields(params("includeDynamic", "true")) - .process(getSolrClient()) - .getFields(); - - Set lookingForNames = asSet("HTMLstandardtok", "HTMLwhitetok", "_version_", // static - "*_d", "*_f", "*_b", "*_t", "*_l"); // dynamic + List> fields = + new SchemaRequest.Fields(params("includeDynamic", "true")) + .process(getSolrClient()) + .getFields(); + + Set lookingForNames = + asSet( + "HTMLstandardtok", + "HTMLwhitetok", + "_version_", // static + "*_d", + "*_f", + "*_b", + "*_t", + "*_l"); // dynamic fields.stream().map(f -> f.get("name")).forEach(lookingForNames::remove); assertTrue(lookingForNames.toString(), lookingForNames.isEmpty()); } - } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java index 81620ede3fb..941a61c43a2 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldResource.java @@ -15,39 +15,42 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; public class TestFieldResource extends SolrRestletTestBase { @Test public void testGetField() throws Exception { - assertQ("/schema/fields/test_postv?indent=on&wt=xml&showDefaults=true", - "count(/response/lst[@name='field']) = 1", - "count(/response/lst[@name='field']/*) = 19", - "/response/lst[@name='field']/str[@name='name'] = 'test_postv'", - "/response/lst[@name='field']/str[@name='type'] = 'text'", - "/response/lst[@name='field']/bool[@name='indexed'] = 'true'", - "/response/lst[@name='field']/bool[@name='stored'] = 'true'", - "/response/lst[@name='field']/bool[@name='uninvertible'] = 'true'", - "/response/lst[@name='field']/bool[@name='docValues'] = 'false'", - "/response/lst[@name='field']/bool[@name='termVectors'] = 'true'", - "/response/lst[@name='field']/bool[@name='termPositions'] = 'true'", - "/response/lst[@name='field']/bool[@name='termPayloads'] = 'false'", - "/response/lst[@name='field']/bool[@name='termOffsets'] = 'false'", - "/response/lst[@name='field']/bool[@name='omitNorms'] = 'false'", - "/response/lst[@name='field']/bool[@name='omitTermFreqAndPositions'] = 'false'", - "/response/lst[@name='field']/bool[@name='omitPositions'] = 'false'", - "/response/lst[@name='field']/bool[@name='storeOffsetsWithPositions'] = 'false'", - "/response/lst[@name='field']/bool[@name='multiValued'] = 'false'", - "/response/lst[@name='field']/bool[@name='large'] = 'false'", - "/response/lst[@name='field']/bool[@name='required'] = 'false'", - "/response/lst[@name='field']/bool[@name='tokenized'] = 'true'", - "/response/lst[@name='field']/bool[@name='useDocValuesAsStored'] = 'true'"); + assertQ( + "/schema/fields/test_postv?indent=on&wt=xml&showDefaults=true", + "count(/response/lst[@name='field']) = 1", + "count(/response/lst[@name='field']/*) = 19", + "/response/lst[@name='field']/str[@name='name'] = 'test_postv'", + "/response/lst[@name='field']/str[@name='type'] = 'text'", + "/response/lst[@name='field']/bool[@name='indexed'] = 'true'", + "/response/lst[@name='field']/bool[@name='stored'] = 'true'", + "/response/lst[@name='field']/bool[@name='uninvertible'] = 'true'", + "/response/lst[@name='field']/bool[@name='docValues'] = 'false'", + "/response/lst[@name='field']/bool[@name='termVectors'] = 'true'", + "/response/lst[@name='field']/bool[@name='termPositions'] = 'true'", + "/response/lst[@name='field']/bool[@name='termPayloads'] = 'false'", + "/response/lst[@name='field']/bool[@name='termOffsets'] = 'false'", + "/response/lst[@name='field']/bool[@name='omitNorms'] = 'false'", + "/response/lst[@name='field']/bool[@name='omitTermFreqAndPositions'] = 'false'", + "/response/lst[@name='field']/bool[@name='omitPositions'] = 'false'", + "/response/lst[@name='field']/bool[@name='storeOffsetsWithPositions'] = 'false'", + "/response/lst[@name='field']/bool[@name='multiValued'] = 'false'", + "/response/lst[@name='field']/bool[@name='large'] = 'false'", + "/response/lst[@name='field']/bool[@name='required'] = 'false'", + "/response/lst[@name='field']/bool[@name='tokenized'] = 'true'", + "/response/lst[@name='field']/bool[@name='useDocValuesAsStored'] = 'true'"); } @Test public void testGetNotFoundField() throws Exception { - assertQ("/schema/fields/not_in_there?indent=on&wt=xml", + assertQ( + "/schema/fields/not_in_there?indent=on&wt=xml", "count(/response/lst[@name='field']) = 0", "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", "/response/lst[@name='error']/int[@name='code'] = '404'"); @@ -55,47 +58,48 @@ public void testGetNotFoundField() throws Exception { @Test public void testJsonGetField() throws Exception { - assertJQ("/schema/fields/test_postv?indent=on&showDefaults=true", - "/field/name=='test_postv'", - "/field/type=='text'", - "/field/indexed==true", - "/field/stored==true", - "/field/uninvertible==true", - "/field/docValues==false", - "/field/termVectors==true", - "/field/termPositions==true", - "/field/termOffsets==false", - "/field/termPayloads==false", - "/field/omitNorms==false", - "/field/omitTermFreqAndPositions==false", - "/field/omitPositions==false", - "/field/storeOffsetsWithPositions==false", - "/field/multiValued==false", - "/field/required==false", - "/field/tokenized==true"); + assertJQ( + "/schema/fields/test_postv?indent=on&showDefaults=true", + "/field/name=='test_postv'", + "/field/type=='text'", + "/field/indexed==true", + "/field/stored==true", + "/field/uninvertible==true", + "/field/docValues==false", + "/field/termVectors==true", + "/field/termPositions==true", + "/field/termOffsets==false", + "/field/termPayloads==false", + "/field/omitNorms==false", + "/field/omitTermFreqAndPositions==false", + "/field/omitPositions==false", + "/field/storeOffsetsWithPositions==false", + "/field/multiValued==false", + "/field/required==false", + "/field/tokenized==true"); } + @Test public void testGetFieldIncludeDynamic() throws Exception { - assertQ("/schema/fields/some_crazy_name_i?indent=on&wt=xml&includeDynamic=true", + assertQ( + "/schema/fields/some_crazy_name_i?indent=on&wt=xml&includeDynamic=true", "/response/lst[@name='field']/str[@name='name'] = 'some_crazy_name_i'", "/response/lst[@name='field']/str[@name='dynamicBase'] = '*_i'"); } - @Test public void testGetFieldDontShowDefaults() throws Exception { - String[] tests = { - "count(/response/lst[@name='field']) = 1", - "count(/response/lst[@name='field']/*) = 6", - "/response/lst[@name='field']/str[@name='name'] = 'id'", - "/response/lst[@name='field']/str[@name='type'] = 'string'", - "/response/lst[@name='field']/bool[@name='indexed'] = 'true'", - "/response/lst[@name='field']/bool[@name='stored'] = 'true'", - "/response/lst[@name='field']/bool[@name='multiValued'] = 'false'", - "/response/lst[@name='field']/bool[@name='required'] = 'true'" + String[] tests = { + "count(/response/lst[@name='field']) = 1", + "count(/response/lst[@name='field']/*) = 6", + "/response/lst[@name='field']/str[@name='name'] = 'id'", + "/response/lst[@name='field']/str[@name='type'] = 'string'", + "/response/lst[@name='field']/bool[@name='indexed'] = 'true'", + "/response/lst[@name='field']/bool[@name='stored'] = 'true'", + "/response/lst[@name='field']/bool[@name='multiValued'] = 'false'", + "/response/lst[@name='field']/bool[@name='required'] = 'true'" }; assertQ("/schema/fields/id?indent=on&wt=xml", tests); assertQ("/schema/fields/id?indent=on&wt=xml&showDefaults=false", tests); } - } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java index 53cd1c0c860..b0651b3a9b5 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeCollectionResource.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; @@ -22,17 +23,19 @@ public class TestFieldTypeCollectionResource extends SolrRestletTestBase { @Test public void testGetAllFieldTypes() throws Exception { - assertQ("/schema/fieldtypes?indent=on&wt=xml", - "(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", - "(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", - "(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[3] = 'boolean'"); + assertQ( + "/schema/fieldtypes?indent=on&wt=xml", + "(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", + "(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", + "(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[3] = 'boolean'"); } @Test public void testJsonGetAllFieldTypes() throws Exception { - assertJQ("/schema/fieldtypes?indent=on", - "/fieldTypes/[0]/name=='HTMLstandardtok'", - "/fieldTypes/[1]/name=='HTMLwhitetok'", - "/fieldTypes/[2]/name=='boolean'"); + assertJQ( + "/schema/fieldtypes?indent=on", + "/fieldTypes/[0]/name=='HTMLstandardtok'", + "/fieldTypes/[1]/name=='HTMLwhitetok'", + "/fieldTypes/[2]/name=='boolean'"); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java index 0e4fe7b52fe..b89079d452a 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestFieldTypeResource.java @@ -24,69 +24,72 @@ public class TestFieldTypeResource extends SolrRestletTestBase { public void testXMLGetFieldType() throws Exception { final String expectedFloatClass = RANDOMIZED_NUMERIC_FIELDTYPES.get(Float.class); final boolean expectedDocValues = Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP); - assertQ("/schema/fieldtypes/float?wt=xml&showDefaults=true", - "count(/response/lst[@name='fieldType']) = 1", - "count(/response/lst[@name='fieldType']/*) = 18", - "/response/lst[@name='fieldType']/str[@name='name'] = 'float'", - "/response/lst[@name='fieldType']/str[@name='class'] = '"+expectedFloatClass+"'", - "/response/lst[@name='fieldType']/str[@name='precisionStep'] ='0'", - "/response/lst[@name='fieldType']/bool[@name='indexed'] = 'true'", - "/response/lst[@name='fieldType']/bool[@name='stored'] = 'true'", - "/response/lst[@name='fieldType']/bool[@name='uninvertible'] = 'true'", - "/response/lst[@name='fieldType']/bool[@name='docValues'] = '"+expectedDocValues+"'", - "/response/lst[@name='fieldType']/bool[@name='termVectors'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='termPositions'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='termOffsets'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='omitNorms'] = 'true'", - "/response/lst[@name='fieldType']/bool[@name='omitTermFreqAndPositions'] = 'true'", - "/response/lst[@name='fieldType']/bool[@name='omitPositions'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='storeOffsetsWithPositions'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='multiValued'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='large'] = 'false'", - "/response/lst[@name='fieldType']/bool[@name='tokenized'] = 'false'"); + assertQ( + "/schema/fieldtypes/float?wt=xml&showDefaults=true", + "count(/response/lst[@name='fieldType']) = 1", + "count(/response/lst[@name='fieldType']/*) = 18", + "/response/lst[@name='fieldType']/str[@name='name'] = 'float'", + "/response/lst[@name='fieldType']/str[@name='class'] = '" + expectedFloatClass + "'", + "/response/lst[@name='fieldType']/str[@name='precisionStep'] ='0'", + "/response/lst[@name='fieldType']/bool[@name='indexed'] = 'true'", + "/response/lst[@name='fieldType']/bool[@name='stored'] = 'true'", + "/response/lst[@name='fieldType']/bool[@name='uninvertible'] = 'true'", + "/response/lst[@name='fieldType']/bool[@name='docValues'] = '" + expectedDocValues + "'", + "/response/lst[@name='fieldType']/bool[@name='termVectors'] = 'false'", + "/response/lst[@name='fieldType']/bool[@name='termPositions'] = 'false'", + "/response/lst[@name='fieldType']/bool[@name='termOffsets'] = 'false'", + "/response/lst[@name='fieldType']/bool[@name='omitNorms'] = 'true'", + "/response/lst[@name='fieldType']/bool[@name='omitTermFreqAndPositions'] = 'true'", + "/response/lst[@name='fieldType']/bool[@name='omitPositions'] = 'false'", + "/response/lst[@name='fieldType']/bool[@name='storeOffsetsWithPositions'] = 'false'", + "/response/lst[@name='fieldType']/bool[@name='multiValued'] = 'false'", + "/response/lst[@name='fieldType']/bool[@name='large'] = 'false'", + "/response/lst[@name='fieldType']/bool[@name='tokenized'] = 'false'"); } @Test public void testXMLGetNotFoundFieldType() throws Exception { - assertQ("/schema/fieldtypes/not_in_there?wt=xml", - "count(/response/lst[@name='fieldtypes']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); + assertQ( + "/schema/fieldtypes/not_in_there?wt=xml", + "count(/response/lst[@name='fieldtypes']) = 0", + "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", + "/response/lst[@name='error']/int[@name='code'] = '404'"); } @Test public void testJsonGetFieldType() throws Exception { final String expectedFloatClass = RANDOMIZED_NUMERIC_FIELDTYPES.get(Float.class); final boolean expectedDocValues = Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP); - assertJQ("/schema/fieldtypes/float?showDefaults=on", - "/fieldType/name=='float'", - "/fieldType/class=='"+expectedFloatClass+"'", - "/fieldType/precisionStep=='0'", - "/fieldType/indexed==true", - "/fieldType/stored==true", - "/fieldType/uninvertible==true", - "/fieldType/docValues=="+expectedDocValues, - "/fieldType/termVectors==false", - "/fieldType/termPositions==false", - "/fieldType/termOffsets==false", - "/fieldType/omitNorms==true", - "/fieldType/omitTermFreqAndPositions==true", - "/fieldType/omitPositions==false", - "/fieldType/storeOffsetsWithPositions==false", - "/fieldType/multiValued==false", - "/fieldType/tokenized==false"); + assertJQ( + "/schema/fieldtypes/float?showDefaults=on", + "/fieldType/name=='float'", + "/fieldType/class=='" + expectedFloatClass + "'", + "/fieldType/precisionStep=='0'", + "/fieldType/indexed==true", + "/fieldType/stored==true", + "/fieldType/uninvertible==true", + "/fieldType/docValues==" + expectedDocValues, + "/fieldType/termVectors==false", + "/fieldType/termPositions==false", + "/fieldType/termOffsets==false", + "/fieldType/omitNorms==true", + "/fieldType/omitTermFreqAndPositions==true", + "/fieldType/omitPositions==false", + "/fieldType/storeOffsetsWithPositions==false", + "/fieldType/multiValued==false", + "/fieldType/tokenized==false"); } - + @Test public void testXMLGetFieldTypeDontShowDefaults() throws Exception { - assertQ("/schema/fieldtypes/teststop?wt=xml", - "count(/response/lst[@name='fieldType']/*) = 3", - "/response/lst[@name='fieldType']/str[@name='name'] = 'teststop'", - "/response/lst[@name='fieldType']/str[@name='class'] = 'solr.TextField'", - "/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.LetterTokenizerFactory'", - "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.LowerCaseFilterFactory']", - "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.StopFilterFactory']", - "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='words'][.='stopwords.txt']" - ); + assertQ( + "/schema/fieldtypes/teststop?wt=xml", + "count(/response/lst[@name='fieldType']/*) = 3", + "/response/lst[@name='fieldType']/str[@name='name'] = 'teststop'", + "/response/lst[@name='fieldType']/str[@name='class'] = 'solr.TextField'", + "/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.LetterTokenizerFactory'", + "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.LowerCaseFilterFactory']", + "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.StopFilterFactory']", + "/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='words'][.='stopwords.txt']"); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaNameResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaNameResource.java index f6b94ac27ea..a890987a362 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaNameResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaNameResource.java @@ -15,14 +15,16 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; public class TestSchemaNameResource extends SolrRestletTestBase { @Test public void testGetSchemaName() throws Exception { - assertQ("/schema/name?wt=xml", - "count(/response/str[@name='name']) = 1", - "/response/str[@name='name'][.='test-rest']"); + assertQ( + "/schema/name?wt=xml", + "count(/response/str[@name='name']) = 1", + "/response/str[@name='name'][.='test-rest']"); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java index b60092b62e3..bd52af0614a 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java @@ -15,168 +15,136 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; public class TestSchemaResource extends SolrRestletTestBase { @Test public void testXMLResponse() throws Exception { - assertQ("/schema/?indent=on&wt=xml", // should work with or without trailing slash on '/schema/' path - - "count(/response/lst[@name='schema']/str[@name='name']) = 1", - "/response/lst[@name='schema']/str[@name='name'][.='test-rest']", - - "count(/response/lst[@name='schema']/float[@name='version']) = 1", - "/response/lst[@name='schema']/float[@name='version'][.='1.6']", - - "count(/response/lst[@name='schema']/str[@name='uniqueKey']) = 1", - "/response/lst[@name='schema']/str[@name='uniqueKey'][.='id']", - - "(/response/lst[@name='schema']/arr[@name='fieldTypes']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", - "(/response/lst[@name='schema']/arr[@name='fieldTypes']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", - "(/response/lst[@name='schema']/arr[@name='fieldTypes']/lst/str[@name='name'])[3] = 'boolean'", - - "(/response/lst[@name='schema']/arr[@name='fields']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", - "(/response/lst[@name='schema']/arr[@name='fields']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", - "(/response/lst[@name='schema']/arr[@name='fields']/lst/str[@name='name'])[3] = '_version_'", - - "(/response/lst[@name='schema']/arr[@name='dynamicFields']/lst/str[@name='name'])[1] = '*_coordinate'", - "(/response/lst[@name='schema']/arr[@name='dynamicFields']/lst/str[@name='name'])[2] = 'ignored_*'", - "(/response/lst[@name='schema']/arr[@name='dynamicFields']/lst/str[@name='name'])[3] = '*_mfacet'", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" - +" and str[@name='dest'][.='title_stemmed']" - +" and int[@name='maxChars'][.='200']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='title']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='*_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='title']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='*_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='title']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='*_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='text']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='*_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='*_dest_sub_s']]", - - "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" - +" and str[@name='dest'][.='dest_sub_no_ast_s']]"); + // should work with or without trailing slash on '/schema/' path + assertQ( + "/schema/?indent=on&wt=xml", + "count(/response/lst[@name='schema']/str[@name='name']) = 1", + "/response/lst[@name='schema']/str[@name='name'][.='test-rest']", + "count(/response/lst[@name='schema']/float[@name='version']) = 1", + "/response/lst[@name='schema']/float[@name='version'][.='1.6']", + "count(/response/lst[@name='schema']/str[@name='uniqueKey']) = 1", + "/response/lst[@name='schema']/str[@name='uniqueKey'][.='id']", + "(/response/lst[@name='schema']/arr[@name='fieldTypes']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", + "(/response/lst[@name='schema']/arr[@name='fieldTypes']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", + "(/response/lst[@name='schema']/arr[@name='fieldTypes']/lst/str[@name='name'])[3] = 'boolean'", + "(/response/lst[@name='schema']/arr[@name='fields']/lst/str[@name='name'])[1] = 'HTMLstandardtok'", + "(/response/lst[@name='schema']/arr[@name='fields']/lst/str[@name='name'])[2] = 'HTMLwhitetok'", + "(/response/lst[@name='schema']/arr[@name='fields']/lst/str[@name='name'])[3] = '_version_'", + "(/response/lst[@name='schema']/arr[@name='dynamicFields']/lst/str[@name='name'])[1] = '*_coordinate'", + "(/response/lst[@name='schema']/arr[@name='dynamicFields']/lst/str[@name='name'])[2] = 'ignored_*'", + "(/response/lst[@name='schema']/arr[@name='dynamicFields']/lst/str[@name='name'])[3] = '*_mfacet'", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" + + " and str[@name='dest'][.='title_stemmed']" + + " and int[@name='maxChars'][.='200']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + + " and str[@name='dest'][.='title']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + + " and str[@name='dest'][.='*_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_i']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + + " and str[@name='dest'][.='title']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + + " and str[@name='dest'][.='*_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='*_src_sub_i']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" + + " and str[@name='dest'][.='title']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" + + " and str[@name='dest'][.='*_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='src_sub_no_ast_i']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='text']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='*_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='*_dest_sub_s']]", + "/response/lst[@name='schema']/arr[@name='copyFields']/lst[ str[@name='source'][.='title_*']" + + " and str[@name='dest'][.='dest_sub_no_ast_s']]"); } - @Test public void testJSONResponse() throws Exception { - assertJQ("/schema", // Should work with or without a trailing slash - - "/schema/name=='test-rest'", - "/schema/version==1.6", - "/schema/uniqueKey=='id'", - - "/schema/fieldTypes/[0]/name=='HTMLstandardtok'", - "/schema/fieldTypes/[1]/name=='HTMLwhitetok'", - "/schema/fieldTypes/[2]/name=='boolean'", - - "/schema/fields/[0]/name=='HTMLstandardtok'", - "/schema/fields/[1]/name=='HTMLwhitetok'", - "/schema/fields/[2]/name=='_version_'", - - "/schema/dynamicFields/[0]/name=='*_coordinate'", - "/schema/dynamicFields/[1]/name=='ignored_*'", - "/schema/dynamicFields/[2]/name=='*_mfacet'", - - "/schema/copyFields/[1]=={'source':'src_sub_no_ast_i','dest':'title'}", - - "/schema/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s'}", - "/schema/copyFields/[8]=={'source':'*_i','dest':'title'}", - "/schema/copyFields/[9]=={'source':'*_i','dest':'*_s'}", - "/schema/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s'}", - "/schema/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s'}", - - "/schema/copyFields/[12]=={'source':'*_src_sub_i','dest':'title'}", - "/schema/copyFields/[13]=={'source':'*_src_sub_i','dest':'*_s'}", - "/schema/copyFields/[14]=={'source':'*_src_sub_i','dest':'*_dest_sub_s'}", - "/schema/copyFields/[15]=={'source':'*_src_sub_i','dest':'dest_sub_no_ast_s'}", - - "/schema/copyFields/[16]=={'source':'src_sub_no_ast_i','dest':'*_s'}", - "/schema/copyFields/[17]=={'source':'src_sub_no_ast_i','dest':'*_dest_sub_s'}", - "/schema/copyFields/[18]=={'source':'src_sub_no_ast_i','dest':'dest_sub_no_ast_s'}"); - + // Should work with or without a trailing slash + assertJQ( + "/schema", + "/schema/name=='test-rest'", + "/schema/version==1.6", + "/schema/uniqueKey=='id'", + "/schema/fieldTypes/[0]/name=='HTMLstandardtok'", + "/schema/fieldTypes/[1]/name=='HTMLwhitetok'", + "/schema/fieldTypes/[2]/name=='boolean'", + "/schema/fields/[0]/name=='HTMLstandardtok'", + "/schema/fields/[1]/name=='HTMLwhitetok'", + "/schema/fields/[2]/name=='_version_'", + "/schema/dynamicFields/[0]/name=='*_coordinate'", + "/schema/dynamicFields/[1]/name=='ignored_*'", + "/schema/dynamicFields/[2]/name=='*_mfacet'", + "/schema/copyFields/[1]=={'source':'src_sub_no_ast_i','dest':'title'}", + "/schema/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s'}", + "/schema/copyFields/[8]=={'source':'*_i','dest':'title'}", + "/schema/copyFields/[9]=={'source':'*_i','dest':'*_s'}", + "/schema/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s'}", + "/schema/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s'}", + "/schema/copyFields/[12]=={'source':'*_src_sub_i','dest':'title'}", + "/schema/copyFields/[13]=={'source':'*_src_sub_i','dest':'*_s'}", + "/schema/copyFields/[14]=={'source':'*_src_sub_i','dest':'*_dest_sub_s'}", + "/schema/copyFields/[15]=={'source':'*_src_sub_i','dest':'dest_sub_no_ast_s'}", + "/schema/copyFields/[16]=={'source':'src_sub_no_ast_i','dest':'*_s'}", + "/schema/copyFields/[17]=={'source':'src_sub_no_ast_i','dest':'*_dest_sub_s'}", + "/schema/copyFields/[18]=={'source':'src_sub_no_ast_i','dest':'dest_sub_no_ast_s'}"); } @Test public void testSchemaXmlResponse() { - assertQ("/schema?wt=schema.xml", // should work with or without trailing slash on '/schema/' path - - "/schema/@name = 'test-rest'", - "/schema/@version = '1.6'", - "/schema/uniqueKey = 'id'", - - "(/schema/fieldType)[1]/@name = 'HTMLstandardtok'", - "(/schema/fieldType)[2]/@name = 'HTMLwhitetok'", - "(/schema/fieldType)[3]/@name = 'boolean'", - - "(/schema/field)[1]/@name = 'HTMLstandardtok'", - "(/schema/field)[2]/@name = 'HTMLwhitetok'", - "(/schema/field)[3]/@name = '_version_'", - - "(/schema/dynamicField)[1]/@name = '*_coordinate'", - "(/schema/dynamicField)[2]/@name = 'ignored_*'", - "(/schema/dynamicField)[3]/@name = '*_mfacet'", - - "/schema/copyField[@source='title'][@dest='title_stemmed'][@maxChars='200']", - "/schema/copyField[@source='title'][@dest='dest_sub_no_ast_s']", - "/schema/copyField[@source='*_i'][@dest='title']", - "/schema/copyField[@source='*_i'][@dest='*_s']", - "/schema/copyField[@source='*_i'][@dest='*_dest_sub_s']", - "/schema/copyField[@source='*_i'][@dest='dest_sub_no_ast_s']", - "/schema/copyField[@source='*_src_sub_i'][@dest='title']", - "/schema/copyField[@source='*_src_sub_i'][@dest='*_s']", - "/schema/copyField[@source='*_src_sub_i'][@dest='*_dest_sub_s']", - "/schema/copyField[@source='*_src_sub_i'][@dest='dest_sub_no_ast_s']", - "/schema/copyField[@source='src_sub_no_ast_i'][@dest='title']", - "/schema/copyField[@source='src_sub_no_ast_i'][@dest='*_s']", - "/schema/copyField[@source='src_sub_no_ast_i'][@dest='*_dest_sub_s']", - "/schema/copyField[@source='src_sub_no_ast_i'][@dest='dest_sub_no_ast_s']", - "/schema/copyField[@source='title_*'][@dest='text']", - "/schema/copyField[@source='title_*'][@dest='*_s']", - "/schema/copyField[@source='title_*'][@dest='*_dest_sub_s']", - "/schema/copyField[@source='title_*'][@dest='dest_sub_no_ast_s']"); + // should work with or without trailing slash on '/schema/' path + assertQ( + "/schema?wt=schema.xml", + "/schema/@name = 'test-rest'", + "/schema/@version = '1.6'", + "/schema/uniqueKey = 'id'", + "(/schema/fieldType)[1]/@name = 'HTMLstandardtok'", + "(/schema/fieldType)[2]/@name = 'HTMLwhitetok'", + "(/schema/fieldType)[3]/@name = 'boolean'", + "(/schema/field)[1]/@name = 'HTMLstandardtok'", + "(/schema/field)[2]/@name = 'HTMLwhitetok'", + "(/schema/field)[3]/@name = '_version_'", + "(/schema/dynamicField)[1]/@name = '*_coordinate'", + "(/schema/dynamicField)[2]/@name = 'ignored_*'", + "(/schema/dynamicField)[3]/@name = '*_mfacet'", + "/schema/copyField[@source='title'][@dest='title_stemmed'][@maxChars='200']", + "/schema/copyField[@source='title'][@dest='dest_sub_no_ast_s']", + "/schema/copyField[@source='*_i'][@dest='title']", + "/schema/copyField[@source='*_i'][@dest='*_s']", + "/schema/copyField[@source='*_i'][@dest='*_dest_sub_s']", + "/schema/copyField[@source='*_i'][@dest='dest_sub_no_ast_s']", + "/schema/copyField[@source='*_src_sub_i'][@dest='title']", + "/schema/copyField[@source='*_src_sub_i'][@dest='*_s']", + "/schema/copyField[@source='*_src_sub_i'][@dest='*_dest_sub_s']", + "/schema/copyField[@source='*_src_sub_i'][@dest='dest_sub_no_ast_s']", + "/schema/copyField[@source='src_sub_no_ast_i'][@dest='title']", + "/schema/copyField[@source='src_sub_no_ast_i'][@dest='*_s']", + "/schema/copyField[@source='src_sub_no_ast_i'][@dest='*_dest_sub_s']", + "/schema/copyField[@source='src_sub_no_ast_i'][@dest='dest_sub_no_ast_s']", + "/schema/copyField[@source='title_*'][@dest='text']", + "/schema/copyField[@source='title_*'][@dest='*_s']", + "/schema/copyField[@source='title_*'][@dest='*_dest_sub_s']", + "/schema/copyField[@source='title_*'][@dest='dest_sub_no_ast_s']"); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java index 046ebb21ec6..636032e519f 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaSimilarityResource.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; @@ -23,13 +24,12 @@ public class TestSchemaSimilarityResource extends SolrRestletTestBase { /** * NOTE: schema used by parent class doesn't define a global sim, so we get the implicit default * which causes the FQN of the class to be returned - * */ @Test public void testGetSchemaSimilarity() throws Exception { - assertQ("/schema/similarity?wt=xml", - "count(/response/lst[@name='similarity']) = 1", - "/response/lst[@name='similarity']/str[@name='class'][.='org.apache.solr.search.similarities.SchemaSimilarityFactory']"); + assertQ( + "/schema/similarity?wt=xml", + "count(/response/lst[@name='similarity']) = 1", + "/response/lst[@name='similarity']/str[@name='class'][.='org.apache.solr.search.similarities.SchemaSimilarityFactory']"); } } - diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaVersionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaVersionResource.java index fdd38223a22..ddfc2d6b630 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaVersionResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaVersionResource.java @@ -15,15 +15,16 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; public class TestSchemaVersionResource extends SolrRestletTestBase { @Test public void testGetSchemaVersion() throws Exception { - assertQ("/schema/version?indent=on&wt=xml", - "count(/response/float[@name='version']) = 1", - "/response/float[@name='version'][.='1.6']"); + assertQ( + "/schema/version?indent=on&wt=xml", + "count(/response/float[@name='version']) = 1", + "/response/float[@name='version'][.='1.6']"); } } - diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java index 4750b0fde70..c732ee1d263 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java @@ -15,53 +15,52 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + +import java.util.SortedMap; +import java.util.TreeMap; import org.apache.solr.util.RestTestBase; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.BeforeClass; import org.junit.Test; -import java.util.SortedMap; -import java.util.TreeMap; - - public class TestSerializedLuceneMatchVersion extends RestTestBase { @BeforeClass public static void init() throws Exception { - final SortedMap extraServlets = new TreeMap<>(); + final SortedMap extraServlets = new TreeMap<>(); - createJettyAndHarness(TEST_HOME(), "solrconfig-minimal.xml", "schema-rest-lucene-match-version.xml", - "/solr", true, extraServlets); + createJettyAndHarness( + TEST_HOME(), + "solrconfig-minimal.xml", + "schema-rest-lucene-match-version.xml", + "/solr", + true, + extraServlets); } @Test public void testExplicitLuceneMatchVersions() throws Exception { - assertQ("/schema/fieldtypes/explicitLuceneMatchVersions?indent=on&wt=xml&showDefaults=true", - "count(/response/lst[@name='fieldType']) = 1", - - "//lst[str[@name='class'][.='org.apache.solr.analysis.MockCharFilterFactory']]" - +" [str[@name='luceneMatchVersion'][.='4.0.0']]", - - "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenizerFactory']]" - +" [str[@name='luceneMatchVersion'][.='4.0.0']]", - - "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenFilterFactory']]" - +" [str[@name='luceneMatchVersion'][.='4.0.0']]"); + assertQ( + "/schema/fieldtypes/explicitLuceneMatchVersions?indent=on&wt=xml&showDefaults=true", + "count(/response/lst[@name='fieldType']) = 1", + "//lst[str[@name='class'][.='org.apache.solr.analysis.MockCharFilterFactory']]" + + " [str[@name='luceneMatchVersion'][.='4.0.0']]", + "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenizerFactory']]" + + " [str[@name='luceneMatchVersion'][.='4.0.0']]", + "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenFilterFactory']]" + + " [str[@name='luceneMatchVersion'][.='4.0.0']]"); } @Test public void testNoLuceneMatchVersions() throws Exception { - assertQ("/schema/fieldtypes/noLuceneMatchVersions?indent=on&wt=xml&showDefaults=true", - "count(/response/lst[@name='fieldType']) = 1", - - "//lst[str[@name='class'][.='org.apache.solr.analysis.MockCharFilterFactory']]" - +" [not(./str[@name='luceneMatchVersion'])]", - - "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenizerFactory']]" - +" [not(./str[@name='luceneMatchVersion'])]", - - "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenFilterFactory']]" - +" [not(./str[@name='luceneMatchVersion'])]"); + assertQ( + "/schema/fieldtypes/noLuceneMatchVersions?indent=on&wt=xml&showDefaults=true", + "count(/response/lst[@name='fieldType']) = 1", + "//lst[str[@name='class'][.='org.apache.solr.analysis.MockCharFilterFactory']]" + + " [not(./str[@name='luceneMatchVersion'])]", + "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenizerFactory']]" + + " [not(./str[@name='luceneMatchVersion'])]", + "//lst[str[@name='class'][.='org.apache.solr.analysis.MockTokenFilterFactory']]" + + " [not(./str[@name='luceneMatchVersion'])]"); } - } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestUniqueKeyFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestUniqueKeyFieldResource.java index e58d5aa3faf..9932c8166eb 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/TestUniqueKeyFieldResource.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/TestUniqueKeyFieldResource.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.rest.schema; + import org.apache.solr.rest.SolrRestletTestBase; import org.junit.Test; @@ -22,9 +23,9 @@ public class TestUniqueKeyFieldResource extends SolrRestletTestBase { @Test public void testGetUniqueKey() throws Exception { - assertQ("/schema/uniquekey?indent=on&wt=xml", - "count(/response/str[@name='uniqueKey']) = 1", - "/response/str[@name='uniqueKey'][.='id']"); + assertQ( + "/schema/uniquekey?indent=on&wt=xml", + "count(/response/str[@name='uniqueKey']) = 1", + "/response/str[@name='uniqueKey'][.='id']"); } } - diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java index d41132534f2..f7edadc1e68 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java @@ -20,7 +20,6 @@ import java.util.Arrays; import java.util.SortedMap; import java.util.TreeMap; - import org.apache.commons.io.FileUtils; import org.apache.solr.common.util.Utils; import org.apache.solr.util.RestTestBase; @@ -31,8 +30,10 @@ /** * Test the REST API for managing stop words, which is pretty basic: - * GET: returns the list of stop words or a single word if it exists - * PUT: add some words to the current list + * + *

GET: returns the list of stop words or a single word if it exists + * + *

PUT: add some words to the current list */ public class TestManagedStopFilterFactory extends RestTestBase { private static File tmpSolrHome; @@ -47,13 +48,18 @@ public void before() throws Exception { tmpConfDir = new File(tmpSolrHome, confDir); FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - final SortedMap extraServlets = new TreeMap<>(); + final SortedMap extraServlets = new TreeMap<>(); System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, extraServlets); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-managed-schema.xml", + "schema-rest.xml", + "/solr", + true, + extraServlets); } @After @@ -64,18 +70,16 @@ private void after() throws Exception { } System.clearProperty("managed.schema.mutable"); System.clearProperty("enable.update.log"); - + if (restTestHarness != null) { restTestHarness.close(); } restTestHarness = null; } - /** - * Test adding managed stopwords to an endpoint defined in the schema, - * then adding docs containing a stopword before and after removing - * the stopword from the managed stopwords set. + * Test adding managed stopwords to an endpoint defined in the schema, then adding docs containing + * a stopword before and after removing the stopword from the managed stopwords set. */ @Test public void testManagedStopwords() throws Exception { @@ -83,81 +87,86 @@ public void testManagedStopwords() throws Exception { //// TODO: This returns HTML vs JSON because the exception is thrown //// from the init method of ManagedEndpoint ... need a better solution // assertJQ("/schema/analysis/stopwords/bogus", "/error/code==404"); - + // this endpoint depends on at least one field type containing the following // declaration in the schema-rest.xml: - // + // // // String endpoint = "/schema/analysis/stopwords/english"; - + // test the initial GET request returns the default stopwords settings - assertJQ(endpoint, - "/wordSet/initArgs/ignoreCase==false", - "/wordSet/managedList==[]"); - + assertJQ(endpoint, "/wordSet/initArgs/ignoreCase==false", "/wordSet/managedList==[]"); + // add some stopwords and verify they were added - assertJPut(endpoint, - Utils.toJSONString(Arrays.asList("a", "an", "the")), - "/responseHeader/status==0"); - + assertJPut( + endpoint, Utils.toJSONString(Arrays.asList("a", "an", "the")), "/responseHeader/status==0"); + // test requesting a specific stop word that exists / does not exist assertJQ(endpoint + "/the", "/the=='the'"); // not exist - 404 assertJQ(endpoint + "/foo", "/error/code==404"); // wrong case - 404 assertJQ(endpoint + "/An", "/error/code==404"); - + // update the ignoreCase initArg to true and make sure case is ignored - String updateIgnoreCase = + String updateIgnoreCase = "{ 'initArgs':{ 'ignoreCase':true }, " - + "'managedList':['A','a','AN','an','THE','the','of','OF'] }"; + + "'managedList':['A','a','AN','an','THE','the','of','OF'] }"; assertJPut(endpoint, json(updateIgnoreCase), "/responseHeader/status==0"); - - assertJQ(endpoint, - "/wordSet/initArgs/ignoreCase==true", - "/wordSet/managedList==['a','an','of','the']"); - + + assertJQ( + endpoint, + "/wordSet/initArgs/ignoreCase==true", + "/wordSet/managedList==['a','an','of','the']"); + // verify ignoreCase applies when requesting a word assertJQ("/schema/analysis/stopwords/english/The", "/The=='the'"); // verify the resource supports XML writer type (wt) as well as JSON - assertQ(endpoint, - "count(/response/lst[@name='wordSet']/arr[@name='managedList']/*) = 4", - "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[1] = 'a'", - "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[2] = 'an'", - "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[3] = 'of'", - "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[4] = 'the'"); + assertQ( + endpoint, + "count(/response/lst[@name='wordSet']/arr[@name='managedList']/*) = 4", + "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[1] = 'a'", + "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[2] = 'an'", + "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[3] = 'of'", + "(/response/lst[@name='wordSet']/arr[@name='managedList']/str)[4] = 'the'"); - restTestHarness.reload(); // make the word set available + restTestHarness.reload(); // make the word set available String newFieldName = "managed_en_field"; // make sure the new field doesn't already exist - assertQ("/schema/fields/" + newFieldName + "?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); + assertQ( + "/schema/fields/" + newFieldName + "?indent=on&wt=xml", + "count(/response/lst[@name='field']) = 0", + "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", + "/response/lst[@name='error']/int[@name='code'] = '404'"); // add the new field - assertJPost("/schema/fields", "{add-field : { name :managed_en_field, type : managed_en}}", - "/responseHeader/status==0"); + assertJPost( + "/schema/fields", + "{add-field : { name :managed_en_field, type : managed_en}}", + "/responseHeader/status==0"); // make sure the new field exists now - assertQ("/schema/fields/" + newFieldName + "?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); + assertQ( + "/schema/fields/" + newFieldName + "?indent=on&wt=xml", + "count(/response/lst[@name='field']) = 1", + "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); assertU(adoc(newFieldName, "This is the one", "id", "6")); assertU(commit()); - assertQ("/select?q=" + newFieldName + ":This", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "/response/result[@name='response']/doc/str[@name='id'][.='6']"); + assertQ( + "/select?q=" + newFieldName + ":This", + "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", + "/response/result[@name='response'][@numFound='1']", + "/response/result[@name='response']/doc/str[@name='id'][.='6']"); - assertQ("/select?q=%7B%21raw%20f=" + newFieldName + "%7Dthe", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='0']"); + assertQ( + "/select?q=%7B%21raw%20f=" + newFieldName + "%7Dthe", + "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", + "/response/result[@name='response'][@numFound='0']"); // verify delete works assertJDelete(endpoint + "/the", "/responseHeader/status==0"); @@ -166,9 +175,10 @@ public void testManagedStopwords() throws Exception { assertU(adoc(newFieldName, "This is the other one", "id", "7")); assertU(commit()); - assertQ("/select?q=%7B%21raw%20f=" + newFieldName + "%7Dthe", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='0']"); + assertQ( + "/select?q=%7B%21raw%20f=" + newFieldName + "%7Dthe", + "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", + "/response/result[@name='response'][@numFound='0']"); restTestHarness.reload(); @@ -176,41 +186,37 @@ public void testManagedStopwords() throws Exception { assertU(adoc(newFieldName, "This is the other other one", "id", "8")); assertU(commit()); - assertQ("/select?q=%7B%21raw%20f=" + newFieldName + "%7Dthe", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "/response/result[@name='response']/doc/str[@name='id'][.='8']"); + assertQ( + "/select?q=%7B%21raw%20f=" + newFieldName + "%7Dthe", + "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", + "/response/result[@name='response'][@numFound='1']", + "/response/result[@name='response']/doc/str[@name='id'][.='8']"); + + assertJQ( + endpoint, "/wordSet/initArgs/ignoreCase==true", "/wordSet/managedList==['a','an','of']"); - assertJQ(endpoint, - "/wordSet/initArgs/ignoreCase==true", - "/wordSet/managedList==['a','an','of']"); - // should fail with 404 as foo doesn't exist assertJDelete(endpoint + "/foo", "/error/code==404"); } - /** - * Can we add and remove stopwords with umlauts - */ + /** Can we add and remove stopwords with umlauts */ @Test - public void testCanHandleDecodingAndEncodingForStopwords() throws Exception { + public void testCanHandleDecodingAndEncodingForStopwords() throws Exception { String endpoint = "/schema/analysis/stopwords/german"; - //initially it should not exist + // initially it should not exist assertJQ(endpoint + "/schön", "/error/code==404"); - //now we put a stopword with an umlaut - assertJPut(endpoint, - Utils.toJSONString(Arrays.asList("schön")), - "/responseHeader/status==0"); + // now we put a stopword with an umlaut + assertJPut(endpoint, Utils.toJSONString(Arrays.asList("schön")), "/responseHeader/status==0"); - //let's check if it exists + // let's check if it exists assertJQ(endpoint + "/schön", "/schön=='schön'"); - //now let's remove it + // now let's remove it assertJDelete(endpoint + "/schön", "/responseHeader/status==0"); - //and of it is unavailable again + // and of it is unavailable again assertJQ(endpoint + "/schön", "/error/code==404"); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java index 8740cec6ca8..cb03c7a34c6 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java @@ -16,6 +16,8 @@ */ package org.apache.solr.rest.schema.analysis; +import static org.apache.solr.common.util.Utils.toJSONString; + import java.io.File; import java.net.URLEncoder; import java.util.ArrayList; @@ -25,7 +27,6 @@ import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; - import org.apache.commons.io.FileUtils; import org.apache.solr.util.RestTestBase; import org.eclipse.jetty.servlet.ServletHolder; @@ -33,26 +34,27 @@ import org.junit.Before; import org.junit.Test; -import static org.apache.solr.common.util.Utils.toJSONString; - public class TestManagedSynonymFilterFactory extends RestTestBase { - + private static File tmpSolrHome; - /** - * Setup to make the schema mutable - */ + /** Setup to make the schema mutable */ @Before public void before() throws Exception { tmpSolrHome = createTempDir().toFile(); FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - final SortedMap extraServlets = new TreeMap<>(); + final SortedMap extraServlets = new TreeMap<>(); System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, extraServlets); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-managed-schema.xml", + "schema-rest.xml", + "/solr", + true, + extraServlets); } @After @@ -67,232 +69,212 @@ private void after() throws Exception { } System.clearProperty("managed.schema.mutable"); System.clearProperty("enable.update.log"); - + if (restTestHarness != null) { restTestHarness.close(); } restTestHarness = null; } - + @Test public void testManagedSynonyms() throws Exception { // this endpoint depends on at least one field type containing the following // declaration in the schema-rest.xml: - // + // // - // + // String endpoint = "/schema/analysis/synonyms/english"; - - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==false", - "/synonymMappings/managedMap=={}"); - + + assertJQ( + endpoint, "/synonymMappings/initArgs/ignoreCase==false", "/synonymMappings/managedMap=={}"); + // put a new mapping into the synonyms - Map> syns = new HashMap<>(); - syns.put("happy", Arrays.asList("glad","cheerful","joyful")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); - - assertJQ(endpoint, - "/synonymMappings/managedMap/happy==['cheerful','glad','joyful']"); + Map> syns = new HashMap<>(); + syns.put("happy", Arrays.asList("glad", "cheerful", "joyful")); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); + + assertJQ(endpoint, "/synonymMappings/managedMap/happy==['cheerful','glad','joyful']"); // request to a specific mapping - assertJQ(endpoint+"/happy", - "/happy==['cheerful','glad','joyful']"); + assertJQ(endpoint + "/happy", "/happy==['cheerful','glad','joyful']"); // does not exist - assertJQ(endpoint+"/sad", - "/error/code==404"); - + assertJQ(endpoint + "/sad", "/error/code==404"); + // verify the user can update the ignoreCase initArg - assertJPut(endpoint, - json("{ 'initArgs':{ 'ignoreCase':true } }"), - "responseHeader/status==0"); + assertJPut(endpoint, json("{ 'initArgs':{ 'ignoreCase':true } }"), "responseHeader/status==0"); + + assertJQ(endpoint, "/synonymMappings/initArgs/ignoreCase==true"); - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==true"); - syns = new HashMap<>(); - syns.put("sad", Arrays.asList("unhappy")); - syns.put("SAD", Arrays.asList("bummed")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); - - assertJQ(endpoint, - "/synonymMappings/managedMap/sad==['unhappy']"); - assertJQ(endpoint, - "/synonymMappings/managedMap/SAD==['bummed']"); - + syns.put("sad", Arrays.asList("unhappy")); + syns.put("SAD", Arrays.asList("bummed")); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); + + assertJQ(endpoint, "/synonymMappings/managedMap/sad==['unhappy']"); + assertJQ(endpoint, "/synonymMappings/managedMap/SAD==['bummed']"); + // expect a union of values when requesting the "sad" child - assertJQ(endpoint+"/sad", - "/sad==['bummed','unhappy']"); - + assertJQ(endpoint + "/sad", "/sad==['bummed','unhappy']"); + // verify delete works - assertJDelete(endpoint+"/sad", - "/responseHeader/status==0"); - - assertJQ(endpoint, - "/synonymMappings/managedMap=={'happy':['cheerful','glad','joyful']}"); - + assertJDelete(endpoint + "/sad", "/responseHeader/status==0"); + + assertJQ(endpoint, "/synonymMappings/managedMap=={'happy':['cheerful','glad','joyful']}"); + // should fail with 404 as foo doesn't exist - assertJDelete(endpoint+"/foo", - "/error/code==404"); - + assertJDelete(endpoint + "/foo", "/error/code==404"); + // verify that a newly added synonym gets expanded on the query side after core reload - + String newFieldName = "managed_en_field"; // make sure the new field doesn't already exist - assertQ("/schema/fields/" + newFieldName + "?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 0", - "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", - "/response/lst[@name='error']/int[@name='code'] = '404'"); + assertQ( + "/schema/fields/" + newFieldName + "?indent=on&wt=xml", + "count(/response/lst[@name='field']) = 0", + "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", + "/response/lst[@name='error']/int[@name='code'] = '404'"); // add the new field - assertJPost("/schema", "{ add-field : { name: managed_en_field, type : managed_en}}", - "/responseHeader/status==0"); + assertJPost( + "/schema", + "{ add-field : { name: managed_en_field, type : managed_en}}", + "/responseHeader/status==0"); // make sure the new field exists now - assertQ("/schema/fields/" + newFieldName + "?indent=on&wt=xml", - "count(/response/lst[@name='field']) = 1", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); + assertQ( + "/schema/fields/" + newFieldName + "?indent=on&wt=xml", + "count(/response/lst[@name='field']) = 1", + "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); // multi-term synonym logic - SOLR-10264 final String multiTermOrigin; final String multiTermSynonym; if (random().nextBoolean()) { - multiTermOrigin = "hansestadt hamburg"; + multiTermOrigin = "hansestadt hamburg"; multiTermSynonym = "hh"; } else { - multiTermOrigin = "hh"; + multiTermOrigin = "hh"; multiTermSynonym = "hansestadt hamburg"; } // multi-term logic similar to the angry/mad logic (angry ~ origin, mad ~ synonym) assertU(adoc(newFieldName, "I am a happy test today but yesterday I was angry", "id", "5150")); - assertU(adoc(newFieldName, multiTermOrigin+" is in North Germany.", "id", "040")); + assertU(adoc(newFieldName, multiTermOrigin + " is in North Germany.", "id", "040")); assertU(commit()); - assertQ("/select?q=" + newFieldName + ":angry", - "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='1']", - "/response/result[@name='response']/doc/str[@name='id'][.='5150']"); - assertQ("/select?q=" + newFieldName + ":"+URLEncoder.encode(multiTermOrigin, "UTF-8"), + assertQ( + "/select?q=" + newFieldName + ":angry", + "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", + "/response/result[@name='response'][@numFound='1']", + "/response/result[@name='response']/doc/str[@name='id'][.='5150']"); + assertQ( + "/select?q=" + newFieldName + ":" + URLEncoder.encode(multiTermOrigin, "UTF-8"), "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='1']", "/response/result[@name='response']/doc/str[@name='id'][.='040']"); - + // add a mapping that will expand a query for "mad" to match docs with "angry" syns = new HashMap<>(); - syns.put("mad", Arrays.asList("angry")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); - - assertJQ(endpoint, - "/synonymMappings/managedMap/mad==['angry']"); + syns.put("mad", Arrays.asList("angry")); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); + + assertJQ(endpoint, "/synonymMappings/managedMap/mad==['angry']"); // add a mapping that will expand a query for "multi-term synonym" to match docs with "acronym" syns = new HashMap<>(); syns.put(multiTermSynonym, Arrays.asList(multiTermOrigin)); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); - assertJQ(endpoint+"/"+URLEncoder.encode(multiTermSynonym, "UTF-8"), - "/"+multiTermSynonym+"==['"+multiTermOrigin+"']"); + assertJQ( + endpoint + "/" + URLEncoder.encode(multiTermSynonym, "UTF-8"), + "/" + multiTermSynonym + "==['" + multiTermOrigin + "']"); - // should not match as the synonym mapping between mad and angry does not + // should not match as the synonym mapping between mad and angry does not // get applied until core reload - assertQ("/select?q=" + newFieldName + ":mad", + assertQ( + "/select?q=" + newFieldName + ":mad", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", - "/response/result[@name='response'][@numFound='0']"); - + "/response/result[@name='response'][@numFound='0']"); + // should not match as the synonym mapping between "origin" and "synonym" // was not added before the document was indexed - assertQ("/select?q=" + newFieldName + ":("+URLEncoder.encode(multiTermSynonym, "UTF-8") + ")&sow=false", + assertQ( + "/select?q=" + + newFieldName + + ":(" + + URLEncoder.encode(multiTermSynonym, "UTF-8") + + ")&sow=false", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='0']"); restTestHarness.reload(); // now query for mad and we should see our test doc - assertQ("/select?q=" + newFieldName + ":mad", + assertQ( + "/select?q=" + newFieldName + ":mad", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='1']", - "/response/result[@name='response']/doc/str[@name='id'][.='5150']"); - + "/response/result[@name='response']/doc/str[@name='id'][.='5150']"); + // now query for "synonym" and we should see our test doc with "origin" - assertQ("/select?q=" + newFieldName + ":("+URLEncoder.encode(multiTermSynonym, "UTF-8") + ")&sow=false", + assertQ( + "/select?q=" + + newFieldName + + ":(" + + URLEncoder.encode(multiTermSynonym, "UTF-8") + + ")&sow=false", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='1']", "/response/result[@name='response']/doc/str[@name='id'][.='040']"); // test for SOLR-6015 syns = new HashMap<>(); - syns.put("mb", Arrays.asList("megabyte")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + syns.put("mb", Arrays.asList("megabyte")); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); - syns.put("MB", Arrays.asList("MiB", "Megabyte")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); - - assertJQ(endpoint + "/MB", - "/MB==['Megabyte','MiB','megabyte']"); + syns.put("MB", Arrays.asList("MiB", "Megabyte")); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); + + assertJQ(endpoint + "/MB", "/MB==['Megabyte','MiB','megabyte']"); // test for SOLR-6878 - by default, expand is true, but only applies when sending in a list List m2mSyns = new ArrayList<>(); m2mSyns.addAll(Arrays.asList("funny", "entertaining", "whimiscal", "jocular")); assertJPut(endpoint, toJSONString(m2mSyns), "/responseHeader/status==0"); - assertJQ(endpoint + "/funny", - "/funny==['entertaining','funny','jocular','whimiscal']"); - assertJQ(endpoint + "/entertaining", + assertJQ(endpoint + "/funny", "/funny==['entertaining','funny','jocular','whimiscal']"); + assertJQ( + endpoint + "/entertaining", "/entertaining==['entertaining','funny','jocular','whimiscal']"); - assertJQ(endpoint + "/jocular", - "/jocular==['entertaining','funny','jocular','whimiscal']"); - assertJQ(endpoint + "/whimiscal", - "/whimiscal==['entertaining','funny','jocular','whimiscal']"); + assertJQ(endpoint + "/jocular", "/jocular==['entertaining','funny','jocular','whimiscal']"); + assertJQ(endpoint + "/whimiscal", "/whimiscal==['entertaining','funny','jocular','whimiscal']"); } - /** - * Can we add and remove stopwords with umlauts - */ + /** Can we add and remove stopwords with umlauts */ @Test - public void testCanHandleDecodingAndEncodingForSynonyms() throws Exception { + public void testCanHandleDecodingAndEncodingForSynonyms() throws Exception { String endpoint = "/schema/analysis/synonyms/german"; - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==false", - "/synonymMappings/managedMap=={}"); + assertJQ( + endpoint, "/synonymMappings/initArgs/ignoreCase==false", "/synonymMappings/managedMap=={}"); // does not exist - assertJQ(endpoint+"/fröhlich", - "/error/code==404"); + assertJQ(endpoint + "/fröhlich", "/error/code==404"); - Map> syns = new HashMap<>(); + Map> syns = new HashMap<>(); // now put a synonym syns.put("fröhlich", Arrays.asList("glücklick")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); // and check if it exists - assertJQ(endpoint, - "/synonymMappings/managedMap/fröhlich==['glücklick']"); + assertJQ(endpoint, "/synonymMappings/managedMap/fröhlich==['glücklick']"); // verify delete works - assertJDelete(endpoint+"/fröhlich", - "/responseHeader/status==0"); - + assertJDelete(endpoint + "/fröhlich", "/responseHeader/status==0"); // was it really deleted? - assertJDelete(endpoint+"/fröhlich", - "/error/code==404"); + assertJDelete(endpoint + "/fröhlich", "/error/code==404"); } } diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java index 778b19acaa3..9d902bbed2f 100644 --- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java +++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java @@ -17,6 +17,8 @@ package org.apache.solr.rest.schema.analysis; +import static org.apache.solr.common.util.Utils.toJSONString; + import java.io.File; import java.net.URLEncoder; import java.util.ArrayList; @@ -26,7 +28,6 @@ import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; - import org.apache.commons.io.FileUtils; import org.apache.solr.util.RestTestBase; import org.eclipse.jetty.servlet.ServletHolder; @@ -34,27 +35,29 @@ import org.junit.Before; import org.junit.Test; -import static org.apache.solr.common.util.Utils.toJSONString; - -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class TestManagedSynonymGraphFilterFactory extends RestTestBase { private static File tmpSolrHome; - /** - * Setup to make the schema mutable - */ + /** Setup to make the schema mutable */ @Before public void before() throws Exception { tmpSolrHome = createTempDir().toFile(); FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); - final SortedMap extraServlets = new TreeMap<>(); + final SortedMap extraServlets = new TreeMap<>(); System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, extraServlets); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-managed-schema.xml", + "schema-rest.xml", + "/solr", + true, + extraServlets); } @After @@ -79,83 +82,70 @@ private void after() throws Exception { public void testManagedSynonyms() throws Exception { // this endpoint depends on at least one field type containing the following // declaration in the schema-rest.xml: - // + // // - // + // String endpoint = "/schema/analysis/synonyms/englishgraph"; - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==false", - "/synonymMappings/managedMap=={}"); + assertJQ( + endpoint, "/synonymMappings/initArgs/ignoreCase==false", "/synonymMappings/managedMap=={}"); // put a new mapping into the synonyms - Map> syns = new HashMap<>(); - syns.put("happy", Arrays.asList("glad","cheerful","joyful")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + Map> syns = new HashMap<>(); + syns.put("happy", Arrays.asList("glad", "cheerful", "joyful")); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); - assertJQ(endpoint, - "/synonymMappings/managedMap/happy==['cheerful','glad','joyful']"); + assertJQ(endpoint, "/synonymMappings/managedMap/happy==['cheerful','glad','joyful']"); // request to a specific mapping - assertJQ(endpoint+"/happy", - "/happy==['cheerful','glad','joyful']"); + assertJQ(endpoint + "/happy", "/happy==['cheerful','glad','joyful']"); // does not exist - assertJQ(endpoint+"/sad", - "/error/code==404"); + assertJQ(endpoint + "/sad", "/error/code==404"); // verify the user can update the ignoreCase initArg - assertJPut(endpoint, - json("{ 'initArgs':{ 'ignoreCase':true } }"), - "responseHeader/status==0"); + assertJPut(endpoint, json("{ 'initArgs':{ 'ignoreCase':true } }"), "responseHeader/status==0"); - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==true"); + assertJQ(endpoint, "/synonymMappings/initArgs/ignoreCase==true"); syns = new HashMap<>(); syns.put("sad", Arrays.asList("unhappy")); syns.put("SAD", Arrays.asList("bummed")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); - assertJQ(endpoint, - "/synonymMappings/managedMap/sad==['unhappy']"); - assertJQ(endpoint, - "/synonymMappings/managedMap/SAD==['bummed']"); + assertJQ(endpoint, "/synonymMappings/managedMap/sad==['unhappy']"); + assertJQ(endpoint, "/synonymMappings/managedMap/SAD==['bummed']"); // expect a union of values when requesting the "sad" child - assertJQ(endpoint+"/sad", - "/sad==['bummed','unhappy']"); + assertJQ(endpoint + "/sad", "/sad==['bummed','unhappy']"); // verify delete works - assertJDelete(endpoint+"/sad", - "/responseHeader/status==0"); + assertJDelete(endpoint + "/sad", "/responseHeader/status==0"); - assertJQ(endpoint, - "/synonymMappings/managedMap=={'happy':['cheerful','glad','joyful']}"); + assertJQ(endpoint, "/synonymMappings/managedMap=={'happy':['cheerful','glad','joyful']}"); // should fail with 404 as foo doesn't exist - assertJDelete(endpoint+"/foo", - "/error/code==404"); + assertJDelete(endpoint + "/foo", "/error/code==404"); // verify that a newly added synonym gets expanded on the query side after core reload String newFieldName = "managed_graph_en_field"; // make sure the new field doesn't already exist - assertQ("/schema/fields/" + newFieldName + "?indent=on&wt=xml", + assertQ( + "/schema/fields/" + newFieldName + "?indent=on&wt=xml", "count(/response/lst[@name='field']) = 0", "/response/lst[@name='responseHeader']/int[@name='status'] = '404'", "/response/lst[@name='error']/int[@name='code'] = '404'"); // add the new field - assertJPost("/schema", "{ add-field : { name: managed_graph_en_field, type : managed_graph_en}}", + assertJPost( + "/schema", + "{ add-field : { name: managed_graph_en_field, type : managed_graph_en}}", "/responseHeader/status==0"); // make sure the new field exists now - assertQ("/schema/fields/" + newFieldName + "?indent=on&wt=xml", + assertQ( + "/schema/fields/" + newFieldName + "?indent=on&wt=xml", "count(/response/lst[@name='field']) = 1", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'"); @@ -163,23 +153,25 @@ public void testManagedSynonyms() throws Exception { final String multiTermOrigin; final String multiTermSynonym; if (random().nextBoolean()) { - multiTermOrigin = "hansestadt hamburg"; + multiTermOrigin = "hansestadt hamburg"; multiTermSynonym = "hh"; } else { - multiTermOrigin = "hh"; + multiTermOrigin = "hh"; multiTermSynonym = "hansestadt hamburg"; } // multi-term logic similar to the angry/mad logic (angry ~ origin, mad ~ synonym) assertU(adoc(newFieldName, "I am a happy test today but yesterday I was angry", "id", "5150")); - assertU(adoc(newFieldName, multiTermOrigin+" is in North Germany.", "id", "040")); + assertU(adoc(newFieldName, multiTermOrigin + " is in North Germany.", "id", "040")); assertU(commit()); - assertQ("/select?q=" + newFieldName + ":angry", + assertQ( + "/select?q=" + newFieldName + ":angry", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='1']", "/response/result[@name='response']/doc/str[@name='id'][.='5150']"); - assertQ("/select?q=" + newFieldName + ":"+URLEncoder.encode(multiTermOrigin, "UTF-8"), + assertQ( + "/select?q=" + newFieldName + ":" + URLEncoder.encode(multiTermOrigin, "UTF-8"), "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='1']", "/response/result[@name='response']/doc/str[@name='id'][.='040']"); @@ -187,45 +179,53 @@ public void testManagedSynonyms() throws Exception { // add a mapping that will expand a query for "mad" to match docs with "angry" syns = new HashMap<>(); syns.put("mad", Arrays.asList("angry")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); - assertJQ(endpoint, - "/synonymMappings/managedMap/mad==['angry']"); + assertJQ(endpoint, "/synonymMappings/managedMap/mad==['angry']"); // add a mapping that will expand a query for "multi-term synonym" to match docs with "acronym" syns = new HashMap<>(); syns.put(multiTermSynonym, Arrays.asList(multiTermOrigin)); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); - assertJQ(endpoint+"/"+URLEncoder.encode(multiTermSynonym, "UTF-8"), - "/"+multiTermSynonym+"==['"+multiTermOrigin+"']"); + assertJQ( + endpoint + "/" + URLEncoder.encode(multiTermSynonym, "UTF-8"), + "/" + multiTermSynonym + "==['" + multiTermOrigin + "']"); - // should not match as the synonym mapping between mad and angry does not + // should not match as the synonym mapping between mad and angry does not // get applied until core reload - assertQ("/select?q=" + newFieldName + ":mad", + assertQ( + "/select?q=" + newFieldName + ":mad", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='0']"); // should not match as the synonym mapping between "origin" and "synonym" // was not added before the document was indexed - assertQ("/select?q=" + newFieldName + ":("+URLEncoder.encode(multiTermSynonym, "UTF-8") + ")&sow=false", + assertQ( + "/select?q=" + + newFieldName + + ":(" + + URLEncoder.encode(multiTermSynonym, "UTF-8") + + ")&sow=false", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='0']"); restTestHarness.reload(); // now query for mad and we should see our test doc - assertQ("/select?q=" + newFieldName + ":mad", + assertQ( + "/select?q=" + newFieldName + ":mad", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='1']", "/response/result[@name='response']/doc/str[@name='id'][.='5150']"); // now query for "synonym" and we should see our test doc with "origin" - assertQ("/select?q=" + newFieldName + ":("+URLEncoder.encode(multiTermSynonym, "UTF-8") + ")&sow=false", + assertQ( + "/select?q=" + + newFieldName + + ":(" + + URLEncoder.encode(multiTermSynonym, "UTF-8") + + ")&sow=false", "/response/lst[@name='responseHeader']/int[@name='status'] = '0'", "/response/result[@name='response'][@numFound='1']", "/response/result[@name='response']/doc/str[@name='id'][.='040']"); @@ -233,143 +233,108 @@ public void testManagedSynonyms() throws Exception { // test for SOLR-6015 syns = new HashMap<>(); syns.put("mb", Arrays.asList("megabyte")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); syns.put("MB", Arrays.asList("MiB", "Megabyte")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); - assertJQ(endpoint + "/MB", - "/MB==['Megabyte','MiB','megabyte']"); + assertJQ(endpoint + "/MB", "/MB==['Megabyte','MiB','megabyte']"); // test for SOLR-6878 - by default, expand is true, but only applies when sending in a list List m2mSyns = new ArrayList<>(); m2mSyns.addAll(Arrays.asList("funny", "entertaining", "whimiscal", "jocular")); assertJPut(endpoint, toJSONString(m2mSyns), "/responseHeader/status==0"); - assertJQ(endpoint + "/funny", - "/funny==['entertaining','funny','jocular','whimiscal']"); - assertJQ(endpoint + "/entertaining", + assertJQ(endpoint + "/funny", "/funny==['entertaining','funny','jocular','whimiscal']"); + assertJQ( + endpoint + "/entertaining", "/entertaining==['entertaining','funny','jocular','whimiscal']"); - assertJQ(endpoint + "/jocular", - "/jocular==['entertaining','funny','jocular','whimiscal']"); - assertJQ(endpoint + "/whimiscal", - "/whimiscal==['entertaining','funny','jocular','whimiscal']"); + assertJQ(endpoint + "/jocular", "/jocular==['entertaining','funny','jocular','whimiscal']"); + assertJQ(endpoint + "/whimiscal", "/whimiscal==['entertaining','funny','jocular','whimiscal']"); } - /** - * Can we add and remove stopwords with umlauts - */ + /** Can we add and remove stopwords with umlauts */ @Test - public void testCanHandleDecodingAndEncodingForSynonyms() throws Exception { + public void testCanHandleDecodingAndEncodingForSynonyms() throws Exception { String endpoint = "/schema/analysis/synonyms/germangraph"; - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==false", - "/synonymMappings/managedMap=={}"); + assertJQ( + endpoint, "/synonymMappings/initArgs/ignoreCase==false", "/synonymMappings/managedMap=={}"); // does not exist - assertJQ(endpoint+"/fröhlich", - "/error/code==404"); + assertJQ(endpoint + "/fröhlich", "/error/code==404"); - Map> syns = new HashMap<>(); + Map> syns = new HashMap<>(); // now put a synonym syns.put("fröhlich", Arrays.asList("glücklick")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); // and check if it exists - assertJQ(endpoint, - "/synonymMappings/managedMap/fröhlich==['glücklick']"); + assertJQ(endpoint, "/synonymMappings/managedMap/fröhlich==['glücklick']"); // verify delete works - assertJDelete(endpoint+"/fröhlich", - "/responseHeader/status==0"); - + assertJDelete(endpoint + "/fröhlich", "/responseHeader/status==0"); // was it really deleted? - assertJDelete(endpoint+"/fröhlich", - "/error/code==404"); + assertJDelete(endpoint + "/fröhlich", "/error/code==404"); } - /** - * Can we add and single term synonyms with weight - */ + /** Can we add and single term synonyms with weight */ @Test - public void testManagedSynonyms_singleTermWithWeight_shouldHandleSynonym() throws Exception { + public void testManagedSynonyms_singleTermWithWeight_shouldHandleSynonym() throws Exception { String endpoint = "/schema/analysis/synonyms/englishgraph"; - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==false", - "/synonymMappings/managedMap=={}"); + assertJQ( + endpoint, "/synonymMappings/initArgs/ignoreCase==false", "/synonymMappings/managedMap=={}"); // does not exist - assertJQ(endpoint+"/tiger", - "/error/code==404"); + assertJQ(endpoint + "/tiger", "/error/code==404"); - Map> syns = new HashMap<>(); + Map> syns = new HashMap<>(); // now put a synonym syns.put("tiger", Arrays.asList("tiger|1.0")); - assertJPut(endpoint, - toJSONString(syns), - "/responseHeader/status==0"); + assertJPut(endpoint, toJSONString(syns), "/responseHeader/status==0"); // and check if it exists - assertJQ(endpoint, - "/synonymMappings/managedMap/tiger==['tiger|1.0']"); + assertJQ(endpoint, "/synonymMappings/managedMap/tiger==['tiger|1.0']"); // verify delete works - assertJDelete(endpoint+"/tiger", - "/responseHeader/status==0"); - + assertJDelete(endpoint + "/tiger", "/responseHeader/status==0"); // was it really deleted? - assertJDelete(endpoint+"/tiger", - "/error/code==404"); + assertJDelete(endpoint + "/tiger", "/error/code==404"); } - /** - * Can we add multi term synonyms with weight - */ + /** Can we add multi term synonyms with weight */ @Test - public void testManagedSynonyms_multiTermWithWeight_shouldHandleSynonym() throws Exception { + public void testManagedSynonyms_multiTermWithWeight_shouldHandleSynonym() throws Exception { String endpoint = "/schema/analysis/synonyms/englishgraph"; - assertJQ(endpoint, - "/synonymMappings/initArgs/ignoreCase==false", - "/synonymMappings/managedMap=={}"); + assertJQ( + endpoint, "/synonymMappings/initArgs/ignoreCase==false", "/synonymMappings/managedMap=={}"); // does not exist - assertJQ(endpoint+"/tiger", - "/error/code==404"); + assertJQ(endpoint + "/tiger", "/error/code==404"); - Map> syns = new HashMap<>(); + Map> syns = new HashMap<>(); // now put a synonym List tigerSyonyms = Arrays.asList("tiger|1.0", "panthera tigris|0.9", "Shere Kan|0.8"); syns.put("tiger", tigerSyonyms); String jsonTigerSynonyms = toJSONString(syns); - assertJPut(endpoint, - jsonTigerSynonyms, - "/responseHeader/status==0"); + assertJPut(endpoint, jsonTigerSynonyms, "/responseHeader/status==0"); // and check if it exists - assertJQ(endpoint, + assertJQ( + endpoint, "/synonymMappings/managedMap/tiger==[\"Shere Kan|0.8\",\"panthera tigris|0.9\",\"tiger|1.0\"]"); // verify delete works - assertJDelete(endpoint+"/tiger", - "/responseHeader/status==0"); - + assertJDelete(endpoint + "/tiger", "/responseHeader/status==0"); // was it really deleted? - assertJDelete(endpoint+"/tiger", - "/error/code==404"); + assertJDelete(endpoint + "/tiger", "/error/code==404"); } } diff --git a/solr/core/src/test/org/apache/solr/schema/BadCopyFieldTest.java b/solr/core/src/test/org/apache/solr/schema/BadCopyFieldTest.java index 0cbcd478054..0224f306274 100644 --- a/solr/core/src/test/org/apache/solr/schema/BadCopyFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/BadCopyFieldTest.java @@ -19,7 +19,8 @@ import org.apache.solr.core.AbstractBadConfigTestBase; /** - * SOLR-4650: copyField source with no asterisk should trigger an error if it doesn't match an explicit or dynamic field + * SOLR-4650: copyField source with no asterisk should trigger an error if it doesn't match an + * explicit or dynamic field */ public class BadCopyFieldTest extends AbstractBadConfigTestBase { @@ -28,30 +29,36 @@ private void doTest(final String schema, final String errString) throws Exceptio } public void testNonGlobCopyFieldSourceMatchingNothingShouldFail() throws Exception { - doTest("bad-schema-non-glob-copyfield-source-matching-nothing-should-fail-test.xml", - "copyField source :'matches_nothing' is not a glob and doesn't match any explicit field or dynamicField."); + doTest( + "bad-schema-non-glob-copyfield-source-matching-nothing-should-fail-test.xml", + "copyField source :'matches_nothing' is not a glob and doesn't match any explicit field or dynamicField."); } - private static final String INVALID_GLOB_MESSAGE = " is an invalid glob: either it contains more than one asterisk," - + " or the asterisk occurs neither at the start nor at the end."; - + private static final String INVALID_GLOB_MESSAGE = + " is an invalid glob: either it contains more than one asterisk," + + " or the asterisk occurs neither at the start nor at the end."; + public void testMultipleAsteriskCopyFieldSourceShouldFail() throws Exception { - doTest("bad-schema-multiple-asterisk-copyfield-source-should-fail-test.xml", - "copyField source :'*too_many_asterisks*'" + INVALID_GLOB_MESSAGE); + doTest( + "bad-schema-multiple-asterisk-copyfield-source-should-fail-test.xml", + "copyField source :'*too_many_asterisks*'" + INVALID_GLOB_MESSAGE); } public void testMisplacedAsteriskCopyFieldSourceShouldFail() throws Exception { - doTest("bad-schema-misplaced-asterisk-copyfield-source-should-fail-test.xml", - "copyField source :'misplaced_*_asterisk'" + INVALID_GLOB_MESSAGE); + doTest( + "bad-schema-misplaced-asterisk-copyfield-source-should-fail-test.xml", + "copyField source :'misplaced_*_asterisk'" + INVALID_GLOB_MESSAGE); } public void testMultipleAsteriskCopyFieldDestShouldFail() throws Exception { - doTest("bad-schema-multiple-asterisk-copyfield-dest-should-fail-test.xml", - "copyField dest :'*too_many_asterisks*'" + INVALID_GLOB_MESSAGE); + doTest( + "bad-schema-multiple-asterisk-copyfield-dest-should-fail-test.xml", + "copyField dest :'*too_many_asterisks*'" + INVALID_GLOB_MESSAGE); } public void testMisplacedAsteriskCopyFieldDestShouldFail() throws Exception { - doTest("bad-schema-misplaced-asterisk-copyfield-dest-should-fail-test.xml", - "copyField dest :'misplaced_*_asterisk'" + INVALID_GLOB_MESSAGE); + doTest( + "bad-schema-misplaced-asterisk-copyfield-dest-should-fail-test.xml", + "copyField dest :'misplaced_*_asterisk'" + INVALID_GLOB_MESSAGE); } } diff --git a/solr/core/src/test/org/apache/solr/schema/BadIndexSchemaTest.java b/solr/core/src/test/org/apache/solr/schema/BadIndexSchemaTest.java index 905868a031b..5dd9873ee83 100644 --- a/solr/core/src/test/org/apache/solr/schema/BadIndexSchemaTest.java +++ b/solr/core/src/test/org/apache/solr/schema/BadIndexSchemaTest.java @@ -20,8 +20,7 @@ public class BadIndexSchemaTest extends AbstractBadConfigTestBase { - private void doTest(final String schema, final String errString) - throws Exception { + private void doTest(final String schema, final String errString) throws Exception { assertConfigs("solrconfig-basic.xml", schema, errString); } @@ -40,6 +39,7 @@ public void testSevereErrorsForDuplicateFields() throws Exception { public void testSevereErrorsForDuplicateDynamicField() throws Exception { doTest("bad-schema-dup-dynamicField.xml", "_twice"); } + public void testSevereErrorsForUnsupportedAttributesOnDynamicField() throws Exception { doTest("bad-schema-dynamicfield-default-val.xml", "default"); doTest("bad-schema-dynamicfield-required.xml", "required"); @@ -55,78 +55,72 @@ public void testSevereErrorsForUnexpectedAnalyzer() throws Exception { } public void testUniqueKeyRules() throws Exception { - doTest("bad-schema-uniquekey-is-copyfield-dest.xml", - "can not be the dest of a copyField"); - doTest("bad-schema-uniquekey-uses-default.xml", - "can not be configured with a default value"); - doTest("bad-schema-uniquekey-multivalued.xml", - "can not be configured to be multivalued"); - doTest("bad-schema-uniquekey-uses-points.xml", - "can not be configured to use a Points based FieldType"); + doTest("bad-schema-uniquekey-is-copyfield-dest.xml", "can not be the dest of a copyField"); + doTest("bad-schema-uniquekey-uses-default.xml", "can not be configured with a default value"); + doTest("bad-schema-uniquekey-multivalued.xml", "can not be configured to be multivalued"); + doTest( + "bad-schema-uniquekey-uses-points.xml", + "can not be configured to use a Points based FieldType"); } public void testMultivaluedCurrency() throws Exception { - doTest("bad-schema-currency-ft-multivalued.xml", - "types can not be multiValued: currency"); - doTest("bad-schema-currency-multivalued.xml", - "fields can not be multiValued: money"); - doTest("bad-schema-currency-dynamic-multivalued.xml", - "fields can not be multiValued: *_c"); - doTest("bad-schema-currencyfieldtype-ft-multivalued.xml", + doTest("bad-schema-currency-ft-multivalued.xml", "types can not be multiValued: currency"); + doTest("bad-schema-currency-multivalued.xml", "fields can not be multiValued: money"); + doTest("bad-schema-currency-dynamic-multivalued.xml", "fields can not be multiValued: *_c"); + doTest( + "bad-schema-currencyfieldtype-ft-multivalued.xml", "types can not be multiValued: currency"); - doTest("bad-schema-currencyfieldtype-multivalued.xml", - "fields can not be multiValued: money"); - doTest("bad-schema-currencyfieldtype-dynamic-multivalued.xml", + doTest("bad-schema-currencyfieldtype-multivalued.xml", "fields can not be multiValued: money"); + doTest( + "bad-schema-currencyfieldtype-dynamic-multivalued.xml", "fields can not be multiValued: *_c"); } public void testCurrencyOERNoRates() throws Exception { - doTest("bad-schema-currency-ft-oer-norates.xml", - "ratesFileLocation"); - doTest("bad-schema-currencyfieldtype-ft-oer-norates.xml", - "ratesFileLocation"); + doTest("bad-schema-currency-ft-oer-norates.xml", "ratesFileLocation"); + doTest("bad-schema-currencyfieldtype-ft-oer-norates.xml", "ratesFileLocation"); } public void testCurrencyBogusCode() throws Exception { - doTest("bad-schema-currency-ft-bogus-default-code.xml", - "HOSS"); - doTest("bad-schema-currency-ft-bogus-code-in-xml.xml", - "HOSS"); - doTest("bad-schema-currencyfieldtype-ft-bogus-default-code.xml", - "HOSS"); - doTest("bad-schema-currencyfieldtype-ft-bogus-code-in-xml.xml", - "HOSS"); - } - + doTest("bad-schema-currency-ft-bogus-default-code.xml", "HOSS"); + doTest("bad-schema-currency-ft-bogus-code-in-xml.xml", "HOSS"); + doTest("bad-schema-currencyfieldtype-ft-bogus-default-code.xml", "HOSS"); + doTest("bad-schema-currencyfieldtype-ft-bogus-code-in-xml.xml", "HOSS"); + } + public void testCurrencyDisallowedSuffixParams() throws Exception { - doTest("bad-schema-currency-ft-code-suffix.xml", - "Unknown parameter(s)"); - doTest("bad-schema-currency-ft-amount-suffix.xml", - "Unknown parameter(s)"); + doTest("bad-schema-currency-ft-code-suffix.xml", "Unknown parameter(s)"); + doTest("bad-schema-currency-ft-amount-suffix.xml", "Unknown parameter(s)"); } - + public void testCurrencyBogusSuffixes() throws Exception { - doTest("bad-schema-currencyfieldtype-bogus-code-suffix.xml", - "Undefined dynamic field for codeStrSuffix"); - doTest("bad-schema-currencyfieldtype-bogus-amount-suffix.xml", - "Undefined dynamic field for amountLongSuffix"); - doTest("bad-schema-currencyfieldtype-wrong-code-ft.xml", - "Dynamic field for codeStrSuffix=\"_l\" must have type class of (or extending) StrField"); - doTest("bad-schema-currencyfieldtype-wrong-amount-ft.xml", - "Dynamic field for amountLongSuffix=\"_s\" must have type class extending LongValueFieldType"); - } - + doTest( + "bad-schema-currencyfieldtype-bogus-code-suffix.xml", + "Undefined dynamic field for codeStrSuffix"); + doTest( + "bad-schema-currencyfieldtype-bogus-amount-suffix.xml", + "Undefined dynamic field for amountLongSuffix"); + doTest( + "bad-schema-currencyfieldtype-wrong-code-ft.xml", + "Dynamic field for codeStrSuffix=\"_l\" must have type class of (or extending) StrField"); + doTest( + "bad-schema-currencyfieldtype-wrong-amount-ft.xml", + "Dynamic field for amountLongSuffix=\"_s\" must have type class extending LongValueFieldType"); + } + public void testCurrencyMissingSuffixes() throws Exception { - doTest("bad-schema-currencyfieldtype-missing-code-suffix.xml", + doTest( + "bad-schema-currencyfieldtype-missing-code-suffix.xml", "Missing required param codeStrSuffix"); - doTest("bad-schema-currencyfieldtype-missing-amount-suffix.xml", + doTest( + "bad-schema-currencyfieldtype-missing-amount-suffix.xml", "Missing required param amountLongSuffix"); } public void testPerFieldtypeSimButNoSchemaSimFactory() throws Exception { doTest("bad-schema-sim-global-vs-ft-mismatch.xml", "global similarity does not support it"); } - + public void testPerFieldtypePostingsFormatButNoSchemaCodecFactory() throws Exception { doTest("bad-schema-codec-global-vs-ft-mismatch.xml", "codec does not support"); } @@ -134,17 +128,18 @@ public void testPerFieldtypePostingsFormatButNoSchemaCodecFactory() throws Excep public void testDocValuesUnsupported() throws Exception { doTest("bad-schema-unsupported-docValues.xml", "does not support doc values"); } - + public void testRootTypeMissmatchWithUniqueKey() throws Exception { - doTest("bad-schema-uniquekey-diff-type-root.xml", - "using the exact same fieldType as the uniqueKey field (id) uses: string1"); + doTest( + "bad-schema-uniquekey-diff-type-root.xml", + "using the exact same fieldType as the uniqueKey field (id) uses: string1"); } - + public void testRootTypeDynamicMissmatchWithUniqueKey() throws Exception { // in this case, the core should load fine -- but we should get an error adding docs try { - initCore("solrconfig.xml","bad-schema-uniquekey-diff-type-dynamic-root.xml"); - assertFailedU("Unable to index docs with children", adoc(sdocWithChildren("1","-1"))); + initCore("solrconfig.xml", "bad-schema-uniquekey-diff-type-dynamic-root.xml"); + assertFailedU("Unable to index docs with children", adoc(sdocWithChildren("1", "-1"))); } finally { deleteCore(); } @@ -152,43 +147,45 @@ public void testRootTypeDynamicMissmatchWithUniqueKey() throws Exception { public void testSweetSpotSimBadConfig() throws Exception { doTest("bad-schema-sweetspot-both-tf.xml", "Can not mix"); - doTest("bad-schema-sweetspot-partial-baseline.xml", - "Overriding default baselineTf"); - doTest("bad-schema-sweetspot-partial-hyperbolic.xml", - "Overriding default hyperbolicTf"); - doTest("bad-schema-sweetspot-partial-norms.xml", - "Overriding default lengthNorm"); - } - + doTest("bad-schema-sweetspot-partial-baseline.xml", "Overriding default baselineTf"); + doTest("bad-schema-sweetspot-partial-hyperbolic.xml", "Overriding default hyperbolicTf"); + doTest("bad-schema-sweetspot-partial-norms.xml", "Overriding default lengthNorm"); + } + public void testBogusParameters() throws Exception { doTest("bad-schema-bogus-field-parameters.xml", "Invalid field property"); } - + public void testBogusAnalysisParameters() throws Exception { doTest("bad-schema-bogus-analysis-parameters.xml", "Unknown parameters"); } public void testSimDefaultFieldTypeHasNoExplicitSim() throws Exception { - doTest("bad-schema-sim-default-has-no-explicit-sim.xml", - "ft-has-no-sim"); + doTest("bad-schema-sim-default-has-no-explicit-sim.xml", "ft-has-no-sim"); } - + public void testSimDefaultFieldTypeDoesNotExist() throws Exception { - doTest("bad-schema-sim-default-does-not-exist.xml", - "ft-does-not-exist"); + doTest("bad-schema-sim-default-does-not-exist.xml", "ft-does-not-exist"); } public void testDefaultOperatorBanned() throws Exception { - doTest("bad-schema-default-operator.xml", - "default operator in schema (solrQueryParser/@defaultOperator) not supported"); + doTest( + "bad-schema-default-operator.xml", + "default operator in schema (solrQueryParser/@defaultOperator) not supported"); } public void testSchemaWithDefaultSearchField() throws Exception { - doTest("bad-schema-defaultsearchfield.xml", "Setting defaultSearchField in schema not supported since Solr 7"); + doTest( + "bad-schema-defaultsearchfield.xml", + "Setting defaultSearchField in schema not supported since Solr 7"); } public void testDateRangeFieldWithInvalidOptions() throws Exception { - doTest("bad-schema-daterangefield-type-options.xml", "FieldType DateRangeField is incompatible with omitNorms=false"); - doTest("bad-schema-daterangefield-instance-options.xml", "daterange_field of type DateRangeField is incompatible with omitNorms=false"); + doTest( + "bad-schema-daterangefield-type-options.xml", + "FieldType DateRangeField is incompatible with omitNorms=false"); + doTest( + "bad-schema-daterangefield-instance-options.xml", + "daterange_field of type DateRangeField is incompatible with omitNorms=false"); } } diff --git a/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java b/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java index d851d7147d4..824d13c45f6 100644 --- a/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/BooleanFieldTest.java @@ -31,40 +31,75 @@ public static void beforeClass() throws Exception { @Test public void testBoolField() { - // found an odd case when adding booleans to docValues and noticed that we didn't have any boolean - // specific tests. Only caught the odd case by accident so let's have a place for explicit tests + // found an odd case when adding booleans to docValues and noticed that we didn't have any + // boolean specific tests. Only caught the odd case by accident so let's have a place for + // explicit tests assertU(adoc("id", "0")); // missing - assertU(adoc("id", "1", "bind", "true", "bsto", "true", "bindsto", "true", "bindstom", "true", "bindstom", "false")); - assertU(adoc("id", "2", "bind", "false", "bsto", "false", "bindsto", "false", "bindstom", "false", "bindstom", "true")); + assertU( + adoc( + "id", + "1", + "bind", + "true", + "bsto", + "true", + "bindsto", + "true", + "bindstom", + "true", + "bindstom", + "false")); + assertU( + adoc( + "id", + "2", + "bind", + "false", + "bsto", + "false", + "bindsto", + "false", + "bindstom", + "false", + "bindstom", + "true")); assertU(adoc("id", "3", "bind", "false")); assertU(adoc("id", "4", "bsto", "false")); assertU(adoc("id", "5", "bindsto", "true")); assertU(adoc("id", "6", "bindstom", "true")); assertU(commit()); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "id,bind,bsto,bindsto,bindstom") - ,"count(//result/doc[1]/bool[@name='bind'])=0" - ,"count(//result/doc[1]/bool[@name='bsto'])=0" - ,"count(//result/doc[1]/bool[@name='bindsto'])=0" - ,"count(//result/doc[2]/bool[@name='bind'])=0" - ,"count(//result/doc[3]/bool[@name='bind'])=0" - ,"//result/doc[2]/bool[@name='bsto'][.='true']" - ,"//result/doc[2]/bool[@name='bindsto'][.='true']" - ,"//result/doc[3]/bool[@name='bsto'][.='false']" - ,"//result/doc[3]/bool[@name='bindsto'][.='false']" - ,"//result/doc[2]/arr[@name='bindstom']/bool[1][.='true']" - ,"//result/doc[2]/arr[@name='bindstom']/bool[2][.='false']" - ,"//result/doc[3]/arr[@name='bindstom']/bool[1][.='false']" - ,"//result/doc[3]/arr[@name='bindstom']/bool[2][.='true']" + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "id,bind,bsto,bindsto,bindstom"), + "count(//result/doc[1]/bool[@name='bind'])=0", + "count(//result/doc[1]/bool[@name='bsto'])=0", + "count(//result/doc[1]/bool[@name='bindsto'])=0", + "count(//result/doc[2]/bool[@name='bind'])=0", + "count(//result/doc[3]/bool[@name='bind'])=0", + "//result/doc[2]/bool[@name='bsto'][.='true']", + "//result/doc[2]/bool[@name='bindsto'][.='true']", + "//result/doc[3]/bool[@name='bsto'][.='false']", + "//result/doc[3]/bool[@name='bindsto'][.='false']", + "//result/doc[2]/arr[@name='bindstom']/bool[1][.='true']", + "//result/doc[2]/arr[@name='bindstom']/bool[2][.='false']", + "//result/doc[3]/arr[@name='bindstom']/bool[1][.='false']", + "//result/doc[3]/arr[@name='bindstom']/bool[2][.='true']"); - ); - // Make sure faceting is behaving. - assertQ(req("q", "*:*", "facet", "true", - "facet.field", "bind", - "facet.field", "bsto", - "facet.field", "bindsto", - "facet.field", "bindstom"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.field", + "bind", + "facet.field", + "bsto", + "facet.field", + "bindsto", + "facet.field", + "bindstom"), "//lst[@name='bind']/int[@name='false'][.='2']", "//lst[@name='bind']/int[@name='true'][.='1']", "//lst[@name='bsto'][not(node())]", @@ -74,5 +109,4 @@ public void testBoolField() { "//lst[@name='bindstom']/int[@name='false'][.='2']", "//lst[@name='bindstom']/int[@name='true'][.='3']"); } - } diff --git a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java index bdc4b277b0f..8f373922cf3 100644 --- a/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/ChangedSchemaMergeTest.java @@ -21,7 +21,6 @@ import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.Properties; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.ModifiableSolrParams; @@ -44,21 +43,22 @@ public class ChangedSchemaMergeTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + public static Class simfac1; public static Class simfac2; - + @BeforeClass public static void beforeClass() throws Exception { simfac1 = LMJelinekMercerSimilarityFactory.class; simfac2 = SchemaSimilarityFactory.class; - + // sanity check our test... - assertTrue("Effectiveness of tets depends on SchemaSimilarityFactory being SolrCoreAware " + - "something changed in the impl and now major portions of this test are useless", - SolrCoreAware.class.isAssignableFrom(simfac2)); - + assertTrue( + "Effectiveness of tets depends on SchemaSimilarityFactory being SolrCoreAware " + + "something changed in the impl and now major portions of this test are useless", + SolrCoreAware.class.isAssignableFrom(simfac2)); + // randomize the order these similarities are used in the changed schemas // to help test proper initialization in both code paths if (random().nextBoolean()) { @@ -68,7 +68,7 @@ public static void beforeClass() throws Exception { } System.setProperty("solr.test.simfac1", simfac1.getName()); System.setProperty("solr.test.simfac2", simfac2.getName()); - + initCore(); } @@ -105,10 +105,9 @@ public void testSanityOfSchemaSimilarityFactoryInform() { broken.init(new ModifiableSolrParams()); // NO INFORM IllegalStateException e = expectThrows(IllegalStateException.class, broken::getSimilarity); - assertTrue("GOT: " + e.getMessage(), - e.getMessage().contains("SolrCoreAware.inform")); + assertTrue("GOT: " + e.getMessage(), e.getMessage().contains("SolrCoreAware.inform")); } - + @Test public void testOptimizeDiffSchemas() throws Exception { // load up a core (why not put it on disk?) @@ -116,7 +115,7 @@ public void testOptimizeDiffSchemas() throws Exception { try (SolrCore changed = cc.getCore("changed")) { assertSimilarity(changed, simfac1); - + // add some documents addDoc(changed, "id", "1", "which", "15", "text", "some stuff with which"); addDoc(changed, "id", "2", "which", "15", "text", "some stuff with which"); @@ -128,12 +127,13 @@ public void testOptimizeDiffSchemas() throws Exception { // write the new schema out and make it current FileUtils.writeStringToFile(schemaFile, withoutWhich, StandardCharsets.UTF_8); - IndexSchema iSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", changed.getSolrConfig()); + IndexSchema iSchema = + IndexSchemaFactory.buildIndexSchema("schema.xml", changed.getSolrConfig()); changed.setLatestSchema(iSchema); - + assertSimilarity(changed, simfac2); // sanity check our sanity check - assertFalse("test is broken: both simfacs are the same", simfac1.equals(simfac2)); + assertFalse("test is broken: both simfacs are the same", simfac1.equals(simfac2)); addDoc(changed, "id", "1", "text", "some stuff without which"); addDoc(changed, "id", "5", "text", "some stuff without which"); @@ -141,8 +141,9 @@ public void testOptimizeDiffSchemas() throws Exception { changed.getUpdateHandler().commit(new CommitUpdateCommand(req, false)); changed.getUpdateHandler().commit(new CommitUpdateCommand(req, true)); } catch (Throwable e) { - log.error("Test exception, logging so not swallowed if there is a (finally) shutdown exception: " - , e); + log.error( + "Test exception, logging so not swallowed if there is a (finally) shutdown exception: ", + e); throw e; } finally { if (cc != null) cc.shutdown(); @@ -157,39 +158,42 @@ private static void assertSimilarity(SolrCore core, Class\n" + - " \n" + - " \n" + - " \n" + - " id\n" + - "\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - - " \n" + - " \n" + - " " + - " " + - ""; - - private String withoutWhich = "\n" + - " \n" + - " \n" + - " id\n" + - "\n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " " + - " " + - ""; - - + private String withWhich = + "\n" + + " \n" + + " \n" + + " \n" + + " id\n" + + "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " " + + " " + + ""; + + private String withoutWhich = + "\n" + + " \n" + + " \n" + + " id\n" + + "\n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " \n" + + " " + + " " + + ""; } diff --git a/solr/core/src/test/org/apache/solr/schema/CopyFieldTest.java b/solr/core/src/test/org/apache/solr/schema/CopyFieldTest.java index 6ef47cc9f6a..81767b3ec44 100644 --- a/solr/core/src/test/org/apache/solr/schema/CopyFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/CopyFieldTest.java @@ -18,7 +18,6 @@ import java.util.HashMap; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.MapSolrParams; @@ -29,224 +28,274 @@ import org.junit.Test; /** - * This is a simple test to make sure the CopyField works. - * It uses its own special schema file. + * This is a simple test to make sure the CopyField works. It uses its own special + * schema file. * * @since solr 1.4 */ public class CopyFieldTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-copyfield-test.xml"); - } + initCore("solrconfig.xml", "schema-copyfield-test.xml"); + } @Test public void testCopyFieldSchemaFieldSchemaField() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - new CopyField(new SchemaField("source", new TextField()), null); - }); + IllegalArgumentException e = + expectThrows( + IllegalArgumentException.class, + () -> { + new CopyField(new SchemaField("source", new TextField()), null); + }); assertTrue(e.getLocalizedMessage().contains("can't be NULL")); - e = expectThrows(IllegalArgumentException.class, () -> { - new CopyField(null, new SchemaField("destination", new TextField())); - }); + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new CopyField(null, new SchemaField("destination", new TextField())); + }); assertTrue(e.getLocalizedMessage().contains("can't be NULL")); - e = expectThrows(IllegalArgumentException.class, () -> { - new CopyField(null, null); - }); + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new CopyField(null, null); + }); assertTrue(e.getLocalizedMessage().contains("can't be NULL")); } @Test public void testCopyFieldSchemaFieldSchemaFieldInt() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - new CopyField(null, new SchemaField("destination", new TextField()), 1000); - }); + IllegalArgumentException e = + expectThrows( + IllegalArgumentException.class, + () -> { + new CopyField(null, new SchemaField("destination", new TextField()), 1000); + }); assertTrue(e.getLocalizedMessage().contains("can't be NULL")); - e = expectThrows(IllegalArgumentException.class, () -> { - new CopyField(new SchemaField("source", new TextField()), null, 1000); - }); + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new CopyField(new SchemaField("source", new TextField()), null, 1000); + }); assertTrue(e.getLocalizedMessage().contains("can't be NULL")); - e = expectThrows(IllegalArgumentException.class, () -> { - new CopyField(null, null, 1000); - }); + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new CopyField(null, null, 1000); + }); assertTrue(e.getLocalizedMessage().contains("can't be NULL")); - e = expectThrows(IllegalArgumentException.class, () -> { - new CopyField(new SchemaField("source", new TextField()), - new SchemaField("destination", new TextField()), -1000); - }); + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new CopyField( + new SchemaField("source", new TextField()), + new SchemaField("destination", new TextField()), + -1000); + }); assertTrue(e.getLocalizedMessage().contains("can't have a negative value")); - new CopyField(new SchemaField("source", new TextField()), - new SchemaField("destination", new TextField()), CopyField.UNLIMITED); + new CopyField( + new SchemaField("source", new TextField()), + new SchemaField("destination", new TextField()), + CopyField.UNLIMITED); } @Test public void testGetSource() { - final CopyField copyField = new CopyField(new SchemaField("source", - new TextField()), new SchemaField("destination", - new TextField()), 1000); + final CopyField copyField = + new CopyField( + new SchemaField("source", new TextField()), + new SchemaField("destination", new TextField()), + 1000); assertEquals("source", copyField.getSource().name); } @Test public void testGetDestination() { - final CopyField copyField = new CopyField(new SchemaField("source", - new TextField()), new SchemaField("destination", - new TextField()), 1000); + final CopyField copyField = + new CopyField( + new SchemaField("source", new TextField()), + new SchemaField("destination", new TextField()), + 1000); assertEquals("destination", copyField.getDestination().name); } @Test public void testGetMaxChars() { - final CopyField copyField = new CopyField(new SchemaField("source", - new TextField()), new SchemaField("destination", - new TextField()), 1000); + final CopyField copyField = + new CopyField( + new SchemaField("source", new TextField()), + new SchemaField("destination", new TextField()), + 1000); assertEquals(1000, copyField.getMaxChars()); } @Test - public void testCopyFieldFunctionality() - { - SolrCore core = h.getCore(); - assertU(adoc("id", "5", "title", "test copy field", "text_en", "this is a simple test of the copy field functionality")); - assertU(commit()); - - Map args = new HashMap<>(); - args.put( CommonParams.Q, "text_en:simple" ); - args.put( "indent", "true" ); - SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - - assertQ("Make sure they got in", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='5']" - ); - - args = new HashMap<>(); - args.put( CommonParams.Q, "highlight:simple" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("dynamic source", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='5']" - ,"//result/doc[1]/arr[@name='highlight']/str[.='this is a simple test of ']" - ); - - args = new HashMap<>(); - args.put( CommonParams.Q, "text_en:functionality" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("Make sure they got in", req - ,"//*[@numFound='1']"); - - args = new HashMap<>(); - args.put( CommonParams.Q, "highlight:functionality" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("dynamic source", req - ,"//*[@numFound='0']"); - } + public void testCopyFieldFunctionality() { + SolrCore core = h.getCore(); + assertU( + adoc( + "id", + "5", + "title", + "test copy field", + "text_en", + "this is a simple test of the copy field functionality")); + assertU(commit()); + + Map args = new HashMap<>(); + args.put(CommonParams.Q, "text_en:simple"); + args.put("indent", "true"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + + assertQ( + "Make sure they got in", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='5']"); + + args = new HashMap<>(); + args.put(CommonParams.Q, "highlight:simple"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "dynamic source", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='5']", + "//result/doc[1]/arr[@name='highlight']/str[.='this is a simple test of ']"); + + args = new HashMap<>(); + args.put(CommonParams.Q, "text_en:functionality"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ("Make sure they got in", req, "//*[@numFound='1']"); + + args = new HashMap<>(); + args.put(CommonParams.Q, "highlight:functionality"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ("dynamic source", req, "//*[@numFound='0']"); + } @Test - public void testExplicitSourceGlob() - { + public void testExplicitSourceGlob() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - - assertTrue("schema should contain explicit field 'sku1'", schema.getFields().containsKey("sku1")); - assertTrue("schema should contain explicit field 'sku2'", schema.getFields().containsKey("sku2")); + + assertTrue( + "schema should contain explicit field 'sku1'", schema.getFields().containsKey("sku1")); + assertTrue( + "schema should contain explicit field 'sku2'", schema.getFields().containsKey("sku2")); assertNull("'sku*' should not be (or match) a dynamic field", schema.getDynamicPattern("sku*")); - - assertTrue("schema should contain dynamic field '*_s'", schema.getDynamicPattern("*_s").equals("*_s")); + + assertTrue( + "schema should contain dynamic field '*_s'", schema.getDynamicPattern("*_s").equals("*_s")); final String subsetPattern = "*_dest_sub_s"; final String dynamicPattern1 = schema.getDynamicPattern(subsetPattern); - assertTrue("'" + subsetPattern + "' should match dynamic field '*_s', but instead matches '" + dynamicPattern1 + "'", - dynamicPattern1.equals("*_s")); - + assertTrue( + "'" + + subsetPattern + + "' should match dynamic field '*_s', but instead matches '" + + dynamicPattern1 + + "'", + dynamicPattern1.equals("*_s")); + final String dest_sub_no_ast_s = "dest_sub_no_ast_s"; - assertFalse(schema.getFields().containsKey(dest_sub_no_ast_s)); // Should not be an explicit field + assertFalse( + schema.getFields().containsKey(dest_sub_no_ast_s)); // Should not be an explicit field final String dynamicPattern2 = schema.getDynamicPattern(dest_sub_no_ast_s); - assertTrue("'" + dest_sub_no_ast_s + "' should match dynamic field '*_s', but instead matches '" + dynamicPattern2 + "'", - dynamicPattern2.equals("*_s")); - + assertTrue( + "'" + + dest_sub_no_ast_s + + "' should match dynamic field '*_s', but instead matches '" + + dynamicPattern2 + + "'", + dynamicPattern2.equals("*_s")); + assertU(adoc("id", "5", "sku1", "10-1839ACX-93", "sku2", "AAM46")); assertU(commit()); - Map args = new HashMap<>(); - args.put( CommonParams.Q, "text:AAM46" ); - args.put( "indent", "true" ); - SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("sku2 copied to text", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='5']" - ); + Map args = new HashMap<>(); + args.put(CommonParams.Q, "text:AAM46"); + args.put("indent", "true"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "sku2 copied to text", req, "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='5']"); args = new HashMap<>(); - args.put( CommonParams.Q, "1_s:10-1839ACX-93" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("sku1 copied to dynamic dest *_s", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='5']" - ,"//result/doc[1]/arr[@name='sku1']/str[.='10-1839ACX-93']" - ); + args.put(CommonParams.Q, "1_s:10-1839ACX-93"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "sku1 copied to dynamic dest *_s", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='5']", + "//result/doc[1]/arr[@name='sku1']/str[.='10-1839ACX-93']"); args = new HashMap<>(); - args.put( CommonParams.Q, "1_dest_sub_s:10-1839ACX-93" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("sku1 copied to *_dest_sub_s (*_s subset pattern)", req - ,"//*[@numFound='1']"); + args.put(CommonParams.Q, "1_dest_sub_s:10-1839ACX-93"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ("sku1 copied to *_dest_sub_s (*_s subset pattern)", req, "//*[@numFound='1']"); args = new HashMap<>(); - args.put( CommonParams.Q, "dest_sub_no_ast_s:AAM46" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("sku2 copied to dest_sub_no_ast_s (*_s subset pattern no asterisk)", req - ,"//*[@numFound='1']"); + args.put(CommonParams.Q, "dest_sub_no_ast_s:AAM46"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "sku2 copied to dest_sub_no_ast_s (*_s subset pattern no asterisk)", + req, + "//*[@numFound='1']"); } @Test - public void testSourceGlobMatchesNoDynamicOrExplicitField() - { - // SOLR-4650: copyField source globs should not have to match an explicit or dynamic field + public void testSourceGlobMatchesNoDynamicOrExplicitField() { + // SOLR-4650: copyField source globs should not have to match an explicit or dynamic field SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - assertNull("'testing123_*' should not be (or match) a dynamic or explicit field", schema.getFieldOrNull("testing123_*")); + assertNull( + "'testing123_*' should not be (or match) a dynamic or explicit field", + schema.getFieldOrNull("testing123_*")); - assertTrue("schema should contain dynamic field '*_s'", schema.getDynamicPattern("*_s").equals("*_s")); + assertTrue( + "schema should contain dynamic field '*_s'", schema.getDynamicPattern("*_s").equals("*_s")); assertU(adoc("id", "5", "sku1", "10-1839ACX-93", "testing123_s", "AAM46")); assertU(commit()); - Map args = new HashMap<>(); - args.put( CommonParams.Q, "text:AAM46" ); - args.put( "indent", "true" ); - SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("sku2 copied to text", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='5']" - ); + Map args = new HashMap<>(); + args.put(CommonParams.Q, "text:AAM46"); + args.put("indent", "true"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "sku2 copied to text", req, "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='5']"); } public void testCatchAllCopyField() { IndexSchema schema = h.getCore().getLatestSchema(); - assertNull("'*' should not be (or match) a dynamic field", - schema.getDynamicPattern("*")); - + assertNull("'*' should not be (or match) a dynamic field", schema.getDynamicPattern("*")); + assertU(adoc("id", "5", "sku1", "10-1839ACX-93", "testing123_s", "AAM46")); assertU(commit()); - for (String q : new String[] {"5", "10-1839ACX-93", "AAM46" }) { - assertQ(req("q","catchall_t:" + q) - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='5']"); + for (String q : new String[] {"5", "10-1839ACX-93", "AAM46"}) { + assertQ( + req("q", "catchall_t:" + q), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='5']"); } } } diff --git a/solr/core/src/test/org/apache/solr/schema/CurrencyFieldTypeTest.java b/solr/core/src/test/org/apache/solr/schema/CurrencyFieldTypeTest.java index c63a8607518..2f04c33436d 100644 --- a/solr/core/src/test/org/apache/solr/schema/CurrencyFieldTypeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/CurrencyFieldTypeTest.java @@ -17,20 +17,18 @@ package org.apache.solr.schema; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.util.Arrays; import java.util.Currency; import java.util.List; import java.util.Random; import java.util.Set; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.index.IndexableField; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.SolrParams; import org.apache.solr.core.SolrCore; import org.apache.solr.util.RTimer; - import org.junit.Assume; import org.junit.BeforeClass; import org.junit.Ignore; @@ -40,28 +38,27 @@ public class CurrencyFieldTypeTest extends SolrTestCaseJ4 { private final String fieldName; private final Class expectedProviderClass; - - public CurrencyFieldTypeTest(String fieldName, Class expectedProviderClass) { + + public CurrencyFieldTypeTest( + String fieldName, Class expectedProviderClass) { this.fieldName = fieldName; this.expectedProviderClass = expectedProviderClass; } @ParametersFactory public static Iterable parameters() { - return Arrays.asList(new Object[][] { - {"amount", FileExchangeRateProvider.class}, // CurrencyField - {"mock_amount", MockExchangeRateProvider.class}, // CurrencyField - {"oer_amount", OpenExchangeRatesOrgProvider.class}, // CurrencyField - {"amount_CFT", FileExchangeRateProvider.class}, // CurrencyFieldType - {"mock_amount_CFT", MockExchangeRateProvider.class}, // CurrencyFieldType - {"oer_amount_CFT", OpenExchangeRatesOrgProvider.class} // CurrencyFieldType - }); + return Arrays.asList( + new Object[][] { + {"amount", FileExchangeRateProvider.class}, // CurrencyField + {"mock_amount", MockExchangeRateProvider.class}, // CurrencyField + {"oer_amount", OpenExchangeRatesOrgProvider.class}, // CurrencyField + {"amount_CFT", FileExchangeRateProvider.class}, // CurrencyFieldType + {"mock_amount_CFT", MockExchangeRateProvider.class}, // CurrencyFieldType + {"oer_amount_CFT", OpenExchangeRatesOrgProvider.class} // CurrencyFieldType + }); } - - /** - * "Assumes" that the specified list of currency codes are - * supported in this JVM - */ + + /** "Assumes" that the specified list of currency codes are supported in this JVM */ public static void assumeCurrencySupport(String... codes) { try { // each JDK might have a diff list of supported currencies, @@ -73,8 +70,8 @@ public static void assumeCurrencySupport(String... codes) { } catch (IllegalArgumentException e) { Assume.assumeNoException(e); } - } + @BeforeClass public static void beforeClass() throws Exception { assumeCurrencySupport("USD", "EUR", "MXN", "GBP", "JPY", "NOK"); @@ -89,11 +86,15 @@ public void testCurrencySchema() throws Exception { assertNotNull(amount); assertTrue(amount.isPolyField()); - CurrencyFieldType type = (CurrencyFieldType)amount.getType(); - String currencyDynamicField - = "*" + (type instanceof CurrencyField ? FieldType.POLY_FIELD_SEPARATOR : "") + type.fieldSuffixCurrency; - String amountDynamicField - = "*" + (type instanceof CurrencyField ? FieldType.POLY_FIELD_SEPARATOR : "") + type.fieldSuffixAmountRaw; + CurrencyFieldType type = (CurrencyFieldType) amount.getType(); + String currencyDynamicField = + "*" + + (type instanceof CurrencyField ? FieldType.POLY_FIELD_SEPARATOR : "") + + type.fieldSuffixCurrency; + String amountDynamicField = + "*" + + (type instanceof CurrencyField ? FieldType.POLY_FIELD_SEPARATOR : "") + + type.fieldSuffixAmountRaw; SchemaField[] dynFields = schema.getDynamicFieldPrototypes(); boolean seenCurrency = false; @@ -109,13 +110,16 @@ public void testCurrencySchema() throws Exception { } } - assertTrue("Didn't find the expected currency code dynamic field " + currencyDynamicField, seenCurrency); + assertTrue( + "Didn't find the expected currency code dynamic field " + currencyDynamicField, + seenCurrency); assertTrue("Didn't find the expected value dynamic field " + amountDynamicField, seenAmount); } @Test public void testCurrencyFieldType() throws Exception { - assumeTrue("This test is only applicable to the XML file based exchange rate provider", + assumeTrue( + "This test is only applicable to the XML file based exchange rate provider", expectedProviderClass.equals(FileExchangeRateProvider.class)); SolrCore core = h.getCore(); @@ -124,23 +128,25 @@ public void testCurrencyFieldType() throws Exception { assertNotNull(amount); assertTrue(fieldName + " is not a poly field", amount.isPolyField()); FieldType tmp = amount.getType(); - assertTrue(fieldName + " is not an instance of CurrencyFieldType", tmp instanceof CurrencyFieldType); + assertTrue( + fieldName + " is not an instance of CurrencyFieldType", tmp instanceof CurrencyFieldType); String currencyValue = "1.50,EUR"; List fields = amount.createFields(currencyValue); assertEquals(fields.size(), 3); // First field is currency code, second is value, third is stored. for (int i = 0; i < 3; i++) { - boolean hasValue = fields.get(i).readerValue() != null + boolean hasValue = + fields.get(i).readerValue() != null || fields.get(i).numericValue() != null || fields.get(i).stringValue() != null; assertTrue("Doesn't have a value: " + fields.get(i), hasValue); } assertEquals(schema.getFieldTypeByName("string").toExternal(fields.get(2)), "1.50,EUR"); - + // A few tests on the provider directly - ExchangeRateProvider p = ((CurrencyFieldType)tmp).getProvider(); + ExchangeRateProvider p = ((CurrencyFieldType) tmp).getProvider(); Set availableCurrencies = p.listAvailableCurrencies(); assertEquals(5, availableCurrencies.size()); assertTrue(p.reload()); @@ -149,14 +155,15 @@ public void testCurrencyFieldType() throws Exception { @Test public void testMockExchangeRateProvider() throws Exception { - assumeTrue("This test is only applicable to the mock exchange rate provider", + assumeTrue( + "This test is only applicable to the mock exchange rate provider", expectedProviderClass.equals(MockExchangeRateProvider.class)); - + SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); SchemaField field = schema.getField(fieldName); FieldType fieldType = field.getType(); - ExchangeRateProvider provider = ((CurrencyFieldType)fieldType).getProvider(); + ExchangeRateProvider provider = ((CurrencyFieldType) fieldType).getProvider(); // A few tests on the provider directly assertEquals(3, provider.listAvailableCurrencies().size()); @@ -166,13 +173,14 @@ public void testMockExchangeRateProvider() throws Exception { @Test public void testCurrencyRangeSearch() throws Exception { - assumeTrue("This test is only applicable to the XML file based exchange rate provider", + assumeTrue( + "This test is only applicable to the XML file based exchange rate provider", expectedProviderClass.equals(FileExchangeRateProvider.class)); - + clearIndex(); final int emptyDocs = atLeast(50); // times 2 final int negDocs = atLeast(5); - + assertU(adoc("id", "0", fieldName, "0,USD")); // 0 // lots of docs w/o values for (int i = 100; i <= 100 + emptyDocs; i++) { @@ -194,62 +202,50 @@ public void testCurrencyRangeSearch() throws Exception { assertU(commit()); - assertQ(req("fl", "*,score", "q", - fieldName+":[2.00,USD TO 5.00,USD]"), - "//*[@numFound='4']"); + assertQ(req("fl", "*,score", "q", fieldName + ":[2.00,USD TO 5.00,USD]"), "//*[@numFound='4']"); - assertQ(req("fl", "*,score", "q", - fieldName+":[0.50,USD TO 1.00,USD]"), - "//*[@numFound='1']"); + assertQ(req("fl", "*,score", "q", fieldName + ":[0.50,USD TO 1.00,USD]"), "//*[@numFound='1']"); - assertQ(req("fl", "*,score", "q", - fieldName+":[24.00,USD TO 25.00,USD]"), - "//*[@numFound='0']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":[24.00,USD TO 25.00,USD]"), "//*[@numFound='0']"); // "GBP" currency code is 1/2 of a USD dollar, for testing. - assertQ(req("fl", "*,score", "q", - fieldName+":[0.50,GBP TO 1.00,GBP]"), - "//*[@numFound='2']"); + assertQ(req("fl", "*,score", "q", fieldName + ":[0.50,GBP TO 1.00,GBP]"), "//*[@numFound='2']"); // "EUR" currency code is 2.5X of a USD dollar, for testing. - assertQ(req("fl", "*,score", "q", - fieldName+":[24.00,EUR TO 25.00,EUR]"), - "//*[@numFound='1']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":[24.00,EUR TO 25.00,EUR]"), "//*[@numFound='1']"); // Slight asymmetric rate should work. - assertQ(req("fl", "*,score", "q", - fieldName+":[24.99,EUR TO 25.01,EUR]"), - "//*[@numFound='1']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":[24.99,EUR TO 25.01,EUR]"), "//*[@numFound='1']"); // Open ended ranges without currency - assertQ(req("fl", "*,score", "q", - fieldName+":[* TO *]"), - "//*[@numFound='" + (2 + 10 + negDocs) + "']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":[* TO *]"), + "//*[@numFound='" + (2 + 10 + negDocs) + "']"); // Open ended ranges without currency - assertQ(req("fl", "*,score", "q", - fieldName+":*"), - "//*[@numFound='" + (2 + 10 + negDocs) + "']"); - + assertQ( + req("fl", "*,score", "q", fieldName + ":*"), "//*[@numFound='" + (2 + 10 + negDocs) + "']"); + // Open ended ranges with currency - assertQ(req("fl", "*,score", "q", - fieldName+":[*,EUR TO *,EUR]"), - "//*[@numFound='" + (2 + 10 + negDocs) + "']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":[*,EUR TO *,EUR]"), + "//*[@numFound='" + (2 + 10 + negDocs) + "']"); // Open ended start range without currency - assertQ(req("fl", "*,score", "q", - fieldName+":[* TO 5,USD]"), - "//*[@numFound='" + (2 + 5 + negDocs) + "']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":[* TO 5,USD]"), + "//*[@numFound='" + (2 + 5 + negDocs) + "']"); // Open ended start range with currency (currency for the * won't matter) - assertQ(req("fl", "*,score", "q", - fieldName+":[*,USD TO 5,USD]"), - "//*[@numFound='" + (2 + 5 + negDocs) + "']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":[*,USD TO 5,USD]"), + "//*[@numFound='" + (2 + 5 + negDocs) + "']"); // Open ended end range - assertQ(req("fl", "*,score", "q", - fieldName+":[3 TO *]"), - "//*[@numFound='8']"); + assertQ(req("fl", "*,score", "q", fieldName + ":[3 TO *]"), "//*[@numFound='8']"); } @Test @@ -257,28 +253,29 @@ public void testBogusCurrency() throws Exception { ignoreException("HOSS"); // bogus currency - assertQEx("Expected exception for invalid currency", - req("fl", "*,score", "q", - fieldName+":[3,HOSS TO *]"), - 400); + assertQEx( + "Expected exception for invalid currency", + req("fl", "*,score", "q", fieldName + ":[3,HOSS TO *]"), + 400); } @Test public void testCurrencyPointQuery() throws Exception { - assumeTrue("This test is only applicable to the XML file based exchange rate provider", + assumeTrue( + "This test is only applicable to the XML file based exchange rate provider", expectedProviderClass.equals(FileExchangeRateProvider.class)); clearIndex(); assertU(adoc("id", "" + 1, fieldName, "10.00,USD")); assertU(adoc("id", "" + 2, fieldName, "15.00,MXN")); assertU(commit()); - assertQ(req("fl", "*,score", "q", fieldName+":10.00,USD"), "//str[@name='id']='1'"); - assertQ(req("fl", "*,score", "q", fieldName+":9.99,USD"), "//*[@numFound='0']"); - assertQ(req("fl", "*,score", "q", fieldName+":10.01,USD"), "//*[@numFound='0']"); - assertQ(req("fl", "*,score", "q", fieldName+":15.00,MXN"), "//str[@name='id']='2'"); - assertQ(req("fl", "*,score", "q", fieldName+":7.50,USD"), "//str[@name='id']='2'"); - assertQ(req("fl", "*,score", "q", fieldName+":7.49,USD"), "//*[@numFound='0']"); - assertQ(req("fl", "*,score", "q", fieldName+":7.51,USD"), "//*[@numFound='0']"); + assertQ(req("fl", "*,score", "q", fieldName + ":10.00,USD"), "//str[@name='id']='1'"); + assertQ(req("fl", "*,score", "q", fieldName + ":9.99,USD"), "//*[@numFound='0']"); + assertQ(req("fl", "*,score", "q", fieldName + ":10.01,USD"), "//*[@numFound='0']"); + assertQ(req("fl", "*,score", "q", fieldName + ":15.00,MXN"), "//str[@name='id']='2'"); + assertQ(req("fl", "*,score", "q", fieldName + ":7.50,USD"), "//str[@name='id']='2'"); + assertQ(req("fl", "*,score", "q", fieldName + ":7.49,USD"), "//*[@numFound='0']"); + assertQ(req("fl", "*,score", "q", fieldName + ":7.51,USD"), "//*[@numFound='0']"); } @Ignore @@ -290,22 +287,39 @@ public void testPerformance() throws Exception { for (int i = 1; i <= initDocs; i++) { assertU(adoc("id", "" + i, fieldName, (r.nextInt(10) + 1.00) + ",USD")); - if (i % 1000 == 0) - System.out.println(i); + if (i % 1000 == 0) System.out.println(i); } assertU(commit()); for (int i = 0; i < 1000; i++) { double lower = r.nextInt(10) + 1.00; - assertQ(req("fl", "*,score", "q", fieldName+":[" + lower + ",USD TO " + (lower + 10.00) + ",USD]"), "//*"); - assertQ(req("fl", "*,score", "q", fieldName+":[" + lower + ",EUR TO " + (lower + 10.00) + ",EUR]"), "//*"); + assertQ( + req( + "fl", + "*,score", + "q", + fieldName + ":[" + lower + ",USD TO " + (lower + 10.00) + ",USD]"), + "//*"); + assertQ( + req( + "fl", + "*,score", + "q", + fieldName + ":[" + lower + ",EUR TO " + (lower + 10.00) + ",EUR]"), + "//*"); } for (int j = 0; j < 3; j++) { final RTimer timer = new RTimer(); for (int i = 0; i < 1000; i++) { double lower = r.nextInt(10) + 1.00; - assertQ(req("fl", "*,score", "q", fieldName+":[" + lower + ",USD TO " + (lower + (9.99 - (j * 0.01))) + ",USD]"), "//*"); + assertQ( + req( + "fl", + "*,score", + "q", + fieldName + ":[" + lower + ",USD TO " + (lower + (9.99 - (j * 0.01))) + ",USD]"), + "//*"); } System.out.println(timer.getTime()); @@ -317,7 +331,13 @@ public void testPerformance() throws Exception { final RTimer timer = new RTimer(); for (int i = 0; i < 1000; i++) { double lower = r.nextInt(10) + 1.00; - assertQ(req("fl", "*,score", "q", fieldName+":[" + lower + ",EUR TO " + (lower + (9.99 - (j * 0.01))) + ",EUR]"), "//*"); + assertQ( + req( + "fl", + "*,score", + "q", + fieldName + ":[" + lower + ",EUR TO " + (lower + (9.99 - (j * 0.01))) + ",EUR]"), + "//*"); } System.out.println(timer.getTime()); @@ -326,7 +346,8 @@ public void testPerformance() throws Exception { @Test public void testCurrencySort() throws Exception { - assumeTrue("This test is only applicable to the XML file based exchange rate provider", + assumeTrue( + "This test is only applicable to the XML file based exchange rate provider", expectedProviderClass.equals(FileExchangeRateProvider.class)); clearIndex(); @@ -338,27 +359,32 @@ public void testCurrencySort() throws Exception { assertU(adoc("id", "" + 5, fieldName, "2.00,GBP")); assertU(commit()); - assertQ(req("fl", "*,score", "q", "*:*", "sort", fieldName+" desc", "limit", "1"), "//str[@name='id']='4'"); - assertQ(req("fl", "*,score", "q", "*:*", "sort", fieldName+" asc", "limit", "1"), "//str[@name='id']='3'"); + assertQ( + req("fl", "*,score", "q", "*:*", "sort", fieldName + " desc", "limit", "1"), + "//str[@name='id']='4'"); + assertQ( + req("fl", "*,score", "q", "*:*", "sort", fieldName + " asc", "limit", "1"), + "//str[@name='id']='3'"); } public void testExpectedProvider() { - SolrCore core = h.getCore(); - IndexSchema schema = core.getLatestSchema(); - SchemaField field = schema.getField(fieldName); - FieldType fieldType = field.getType(); - ExchangeRateProvider provider = ((CurrencyFieldType)fieldType).getProvider(); - assertEquals(expectedProviderClass, provider.getClass()); - } - + SolrCore core = h.getCore(); + IndexSchema schema = core.getLatestSchema(); + SchemaField field = schema.getField(fieldName); + FieldType fieldType = field.getType(); + ExchangeRateProvider provider = ((CurrencyFieldType) fieldType).getProvider(); + assertEquals(expectedProviderClass, provider.getClass()); + } + public void testFunctionUsage() throws Exception { - assumeTrue("This test is only applicable to the XML file based exchange rate provider", + assumeTrue( + "This test is only applicable to the XML file based exchange rate provider", expectedProviderClass.equals(FileExchangeRateProvider.class)); clearIndex(); for (int i = 1; i <= 8; i++) { // "GBP" currency code is 1/2 of a USD dollar, for testing. - assertU(adoc("id", "" + i, fieldName, (((float)i)/2) + ",GBP")); + assertU(adoc("id", "" + i, fieldName, (((float) i) / 2) + ",GBP")); } for (int i = 9; i <= 11; i++) { assertU(adoc("id", "" + i, fieldName, i + ",USD")); @@ -368,102 +394,113 @@ public void testFunctionUsage() throws Exception { // direct value source usage, gets "raw" form od default currency // default==USD, so raw==penies - assertQ(req("fl", "id,func:field($f)", - "f", fieldName, - "q", "id:5"), - "//*[@numFound='1']", - "//doc/float[@name='func' and .=500]"); - assertQ(req("fl", "id,func:field($f)", - "f", fieldName, - "q", "id:10"), - "//*[@numFound='1']", - "//doc/float[@name='func' and .=1000]"); - assertQ(req("fl", "id,score,"+fieldName, - "q", "{!frange u=500}"+fieldName) - ,"//*[@numFound='5']" - ,"//str[@name='id']='1'" - ,"//str[@name='id']='2'" - ,"//str[@name='id']='3'" - ,"//str[@name='id']='4'" - ,"//str[@name='id']='5'" - ); - assertQ(req("fl", "id,score,"+fieldName, - "q", "{!frange l=500 u=1000}"+fieldName) - ,"//*[@numFound='6']" - ,"//str[@name='id']='5'" - ,"//str[@name='id']='6'" - ,"//str[@name='id']='7'" - ,"//str[@name='id']='8'" - ,"//str[@name='id']='9'" - ,"//str[@name='id']='10'" - ); + assertQ( + req( + "fl", "id,func:field($f)", + "f", fieldName, + "q", "id:5"), + "//*[@numFound='1']", + "//doc/float[@name='func' and .=500]"); + assertQ( + req( + "fl", "id,func:field($f)", + "f", fieldName, + "q", "id:10"), + "//*[@numFound='1']", + "//doc/float[@name='func' and .=1000]"); + assertQ( + req( + "fl", "id,score," + fieldName, + "q", "{!frange u=500}" + fieldName), + "//*[@numFound='5']", + "//str[@name='id']='1'", + "//str[@name='id']='2'", + "//str[@name='id']='3'", + "//str[@name='id']='4'", + "//str[@name='id']='5'"); + assertQ( + req( + "fl", "id,score," + fieldName, + "q", "{!frange l=500 u=1000}" + fieldName), + "//*[@numFound='6']", + "//str[@name='id']='5'", + "//str[@name='id']='6'", + "//str[@name='id']='7'", + "//str[@name='id']='8'", + "//str[@name='id']='9'", + "//str[@name='id']='10'"); // use the currency function to convert to default (USD) - assertQ(req("fl", "id,func:currency($f)", - "f", fieldName, - "q", "id:10"), - "//*[@numFound='1']", - "//doc/float[@name='func' and .=10]"); - assertQ(req("fl", "id,func:currency($f)", - "f", fieldName, - "q", "id:5"), - "//*[@numFound='1']", - "//doc/float[@name='func' and .=5]"); - assertQ(req("fl", "id,score"+fieldName, - "f", fieldName, - "q", "{!frange u=5}currency($f)") - ,"//*[@numFound='5']" - ,"//str[@name='id']='1'" - ,"//str[@name='id']='2'" - ,"//str[@name='id']='3'" - ,"//str[@name='id']='4'" - ,"//str[@name='id']='5'" - ); - assertQ(req("fl", "id,score"+fieldName, - "f", fieldName, - "q", "{!frange l=5 u=10}currency($f)") - ,"//*[@numFound='6']" - ,"//str[@name='id']='5'" - ,"//str[@name='id']='6'" - ,"//str[@name='id']='7'" - ,"//str[@name='id']='8'" - ,"//str[@name='id']='9'" - ,"//str[@name='id']='10'" - ); - - // use the currency function to convert to MXN - assertQ(req("fl", "id,func:currency($f,MXN)", - "f", fieldName, - "q", "id:5"), - "//*[@numFound='1']", - "//doc/float[@name='func' and .=10]"); - assertQ(req("fl", "id,func:currency($f,MXN)", - "f", fieldName, - "q", "id:10"), - "//*[@numFound='1']", - "//doc/float[@name='func' and .=20]"); - assertQ(req("fl", "*,score,"+fieldName, - "f", fieldName, - "q", "{!frange u=10}currency($f,MXN)") - ,"//*[@numFound='5']" - ,"//str[@name='id']='1'" - ,"//str[@name='id']='2'" - ,"//str[@name='id']='3'" - ,"//str[@name='id']='4'" - ,"//str[@name='id']='5'" - ); - assertQ(req("fl", "*,score,"+fieldName, - "f", fieldName, - "q", "{!frange l=10 u=20}currency($f,MXN)") - ,"//*[@numFound='6']" - ,"//str[@name='id']='5'" - ,"//str[@name='id']='6'" - ,"//str[@name='id']='7'" - ,"//str[@name='id']='8'" - ,"//str[@name='id']='9'" - ,"//str[@name='id']='10'" - ); + assertQ( + req( + "fl", "id,func:currency($f)", + "f", fieldName, + "q", "id:10"), + "//*[@numFound='1']", + "//doc/float[@name='func' and .=10]"); + assertQ( + req( + "fl", "id,func:currency($f)", + "f", fieldName, + "q", "id:5"), + "//*[@numFound='1']", + "//doc/float[@name='func' and .=5]"); + assertQ( + req("fl", "id,score" + fieldName, "f", fieldName, "q", "{!frange u=5}currency($f)"), + "//*[@numFound='5']", + "//str[@name='id']='1'", + "//str[@name='id']='2'", + "//str[@name='id']='3'", + "//str[@name='id']='4'", + "//str[@name='id']='5'"); + assertQ( + req("fl", "id,score" + fieldName, "f", fieldName, "q", "{!frange l=5 u=10}currency($f)"), + "//*[@numFound='6']", + "//str[@name='id']='5'", + "//str[@name='id']='6'", + "//str[@name='id']='7'", + "//str[@name='id']='8'", + "//str[@name='id']='9'", + "//str[@name='id']='10'"); + // use the currency function to convert to MXN + assertQ( + req( + "fl", "id,func:currency($f,MXN)", + "f", fieldName, + "q", "id:5"), + "//*[@numFound='1']", + "//doc/float[@name='func' and .=10]"); + assertQ( + req( + "fl", "id,func:currency($f,MXN)", + "f", fieldName, + "q", "id:10"), + "//*[@numFound='1']", + "//doc/float[@name='func' and .=20]"); + assertQ( + req("fl", "*,score," + fieldName, "f", fieldName, "q", "{!frange u=10}currency($f,MXN)"), + "//*[@numFound='5']", + "//str[@name='id']='1'", + "//str[@name='id']='2'", + "//str[@name='id']='3'", + "//str[@name='id']='4'", + "//str[@name='id']='5'"); + assertQ( + req( + "fl", + "*,score," + fieldName, + "f", + fieldName, + "q", + "{!frange l=10 u=20}currency($f,MXN)"), + "//*[@numFound='6']", + "//str[@name='id']='5'", + "//str[@name='id']='6'", + "//str[@name='id']='7'", + "//str[@name='id']='8'", + "//str[@name='id']='9'", + "//str[@name='id']='10'"); } @Test @@ -473,228 +510,358 @@ public void testStringValue() throws Exception { assertEquals("3.14,GBP", new CurrencyValue(314, "GBP").strValue()); CurrencyValue currencyValue = new CurrencyValue(314, "XYZ"); - expectThrows(SolrException.class, currencyValue::strValue); + expectThrows(SolrException.class, currencyValue::strValue); } @Test public void testRangeFacet() throws Exception { - assumeTrue("This test is only applicable to the XML file based exchange rate provider " + - "because it excercies the asymetric exchange rates option it supports", - expectedProviderClass.equals(FileExchangeRateProvider.class)); - + assumeTrue( + "This test is only applicable to the XML file based exchange rate provider " + + "because it excercies the asymetric exchange rates option it supports", + expectedProviderClass.equals(FileExchangeRateProvider.class)); + clearIndex(); - + // NOTE: in our test conversions EUR uses an asynetric echange rate // these are the equivalent values when converting to: USD EUR GBP - assertU(adoc("id", "" + 1, fieldName, "10.00,USD")); // 10.00,USD 25.00,EUR 5.00,GBP - assertU(adoc("id", "" + 2, fieldName, "15.00,EUR")); // 7.50,USD 15.00,EUR 7.50,GBP - assertU(adoc("id", "" + 3, fieldName, "6.00,GBP")); // 12.00,USD 12.00,EUR 6.00,GBP - assertU(adoc("id", "" + 4, fieldName, "7.00,EUR")); // 3.50,USD 7.00,EUR 3.50,GBP - assertU(adoc("id", "" + 5, fieldName, "2,GBP")); // 4.00,USD 4.00,EUR 2.00,GBP + assertU(adoc("id", "" + 1, fieldName, "10.00,USD")); // 10.00,USD 25.00,EUR 5.00,GBP + assertU(adoc("id", "" + 2, fieldName, "15.00,EUR")); // 7.50,USD 15.00,EUR 7.50,GBP + assertU(adoc("id", "" + 3, fieldName, "6.00,GBP")); // 12.00,USD 12.00,EUR 6.00,GBP + assertU(adoc("id", "" + 4, fieldName, "7.00,EUR")); // 3.50,USD 7.00,EUR 3.50,GBP + assertU(adoc("id", "" + 5, fieldName, "2,GBP")); // 4.00,USD 4.00,EUR 2.00,GBP assertU(commit()); for (String suffix : Arrays.asList("", ",USD")) { - assertQ("Ensure that we get correct facet counts back in USD (explicit or implicit default) (facet.range)", - req("fl", "*,score", "q", "*:*", "rows", "0", "facet", "true", - "facet.range", fieldName, - "f." + fieldName + ".facet.range.start", "4.00" + suffix, - "f." + fieldName + ".facet.range.end", "11.00" + suffix, - "f." + fieldName + ".facet.range.gap", "1.00" + suffix, - "f." + fieldName + ".facet.range.other", "all") - ,"count(//lst[@name='counts']/int)=7" - ,"//lst[@name='counts']/int[@name='4.00,USD']='1'" - ,"//lst[@name='counts']/int[@name='5.00,USD']='0'" - ,"//lst[@name='counts']/int[@name='6.00,USD']='0'" - ,"//lst[@name='counts']/int[@name='7.00,USD']='1'" - ,"//lst[@name='counts']/int[@name='8.00,USD']='0'" - ,"//lst[@name='counts']/int[@name='9.00,USD']='0'" - ,"//lst[@name='counts']/int[@name='10.00,USD']='1'" - ,"//int[@name='after']='1'" - ,"//int[@name='before']='1'" - ,"//int[@name='between']='3'" - ); - assertQ("Ensure that we get correct facet counts back in USD (explicit or implicit default) (json.facet)", - req("fl", "*,score", "q", "*:*", "rows", "0", "json.facet", - "{ xxx : { type:range, field:" + fieldName + ", " + - " start:'4.00"+suffix+"', gap:'1.00"+suffix+"', end:'11.00"+suffix+"', other:all } }") - ,"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='5.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='6.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='9.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]" - ,"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='1']" - ,"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']" - ,"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']" - ); + assertQ( + "Ensure that we get correct facet counts back in USD (explicit or implicit default) (facet.range)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.range", + fieldName, + "f." + fieldName + ".facet.range.start", + "4.00" + suffix, + "f." + fieldName + ".facet.range.end", + "11.00" + suffix, + "f." + fieldName + ".facet.range.gap", + "1.00" + suffix, + "f." + fieldName + ".facet.range.other", + "all"), + "count(//lst[@name='counts']/int)=7", + "//lst[@name='counts']/int[@name='4.00,USD']='1'", + "//lst[@name='counts']/int[@name='5.00,USD']='0'", + "//lst[@name='counts']/int[@name='6.00,USD']='0'", + "//lst[@name='counts']/int[@name='7.00,USD']='1'", + "//lst[@name='counts']/int[@name='8.00,USD']='0'", + "//lst[@name='counts']/int[@name='9.00,USD']='0'", + "//lst[@name='counts']/int[@name='10.00,USD']='1'", + "//int[@name='after']='1'", + "//int[@name='before']='1'", + "//int[@name='between']='3'"); + assertQ( + "Ensure that we get correct facet counts back in USD (explicit or implicit default) (json.facet)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ xxx : { type:range, field:" + + fieldName + + ", " + + " start:'4.00" + + suffix + + "', gap:'1.00" + + suffix + + "', end:'11.00" + + suffix + + "', other:all } }"), + "count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='5.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='6.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='9.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]", + "//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='1']", + "//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']", + "//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']"); } - assertQ("Zero value as start range point + mincount (facet.range)", - req("fl", "*,score", "q", "*:*", "rows", "0", "facet", "true", "facet.mincount", "1", - "facet.range", fieldName, - "f." + fieldName + ".facet.range.start", "0,USD", - "f." + fieldName + ".facet.range.end", "11.00,USD", - "f." + fieldName + ".facet.range.gap", "1.00,USD", - "f." + fieldName + ".facet.range.other", "all") - ,"count(//lst[@name='counts']/int)=4" - ,"//lst[@name='counts']/int[@name='3.00,USD']='1'" - ,"//lst[@name='counts']/int[@name='4.00,USD']='1'" - ,"//lst[@name='counts']/int[@name='7.00,USD']='1'" - ,"//lst[@name='counts']/int[@name='10.00,USD']='1'" - ,"//int[@name='before']='0'" - ,"//int[@name='after']='1'" - ,"//int[@name='between']='4'" - ); - assertQ("Zero value as start range point + mincount (json.facet)", - req("fl", "*,score", "q", "*:*", "rows", "0", "json.facet", - "{ xxx : { type:range, mincount:1, field:" + fieldName + - ", start:'0.00,USD', gap:'1.00,USD', end:'11.00,USD', other:all } }") - ,"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=4" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='3.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]" - ,"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='0']" - ,"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']" - ,"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='4']" - ); - - // NOTE: because of asymetric EUR exchange rate, these buckets are diff then the similar looking USD based request above + assertQ( + "Zero value as start range point + mincount (facet.range)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.mincount", + "1", + "facet.range", + fieldName, + "f." + fieldName + ".facet.range.start", + "0,USD", + "f." + fieldName + ".facet.range.end", + "11.00,USD", + "f." + fieldName + ".facet.range.gap", + "1.00,USD", + "f." + fieldName + ".facet.range.other", + "all"), + "count(//lst[@name='counts']/int)=4", + "//lst[@name='counts']/int[@name='3.00,USD']='1'", + "//lst[@name='counts']/int[@name='4.00,USD']='1'", + "//lst[@name='counts']/int[@name='7.00,USD']='1'", + "//lst[@name='counts']/int[@name='10.00,USD']='1'", + "//int[@name='before']='0'", + "//int[@name='after']='1'", + "//int[@name='between']='4'"); + assertQ( + "Zero value as start range point + mincount (json.facet)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ xxx : { type:range, mincount:1, field:" + + fieldName + + ", start:'0.00,USD', gap:'1.00,USD', end:'11.00,USD', other:all } }"), + "count(//lst[@name='xxx']/arr[@name='buckets']/lst)=4", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='3.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]", + "//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='0']", + "//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']", + "//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='4']"); + + // NOTE: because of asymetric EUR exchange rate, these buckets are diff then the similar looking + // USD based request above // This request converts the values in each doc into EUR to decide what range buck it's in. - assertQ("Ensure that we get correct facet counts back in EUR (facet.range)", - req("fl", "*,score", "q", "*:*", "rows", "0", "facet", "true", - "facet.range", fieldName, - "f." + fieldName + ".facet.range.start", "8.00,EUR", - "f." + fieldName + ".facet.range.end", "22.00,EUR", - "f." + fieldName + ".facet.range.gap", "2.00,EUR", - "f." + fieldName + ".facet.range.other", "all" - ) - , "count(//lst[@name='counts']/int)=7" - , "//lst[@name='counts']/int[@name='8.00,EUR']='0'" - , "//lst[@name='counts']/int[@name='10.00,EUR']='0'" - , "//lst[@name='counts']/int[@name='12.00,EUR']='1'" - , "//lst[@name='counts']/int[@name='14.00,EUR']='1'" - , "//lst[@name='counts']/int[@name='16.00,EUR']='0'" - , "//lst[@name='counts']/int[@name='18.00,EUR']='0'" - , "//lst[@name='counts']/int[@name='20.00,EUR']='0'" - , "//int[@name='before']='2'" - , "//int[@name='after']='1'" - , "//int[@name='between']='2'" - ); - assertQ("Ensure that we get correct facet counts back in EUR (json.facet)", - req("fl", "*,score", "q", "*:*", "rows", "0", "json.facet", - "{ xxx : { type:range, field:" + fieldName + ", start:'8.00,EUR', gap:'2.00,EUR', end:'22.00,EUR', other:all } }") - ,"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,EUR']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='10.00,EUR']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='12.00,EUR']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='14.00,EUR']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='16.00,EUR']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='18.00,EUR']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='20.00,EUR']]" - ,"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='2']" - ,"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']" - ,"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='2']" - ); - - - // GBP has a symetric echange rate with USD, so these counts are *similar* to the USD based request above... - // but the asymetric EUR/USD rate means that when computing counts realtive to GBP the EUR based docs wind up in + assertQ( + "Ensure that we get correct facet counts back in EUR (facet.range)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.range", + fieldName, + "f." + fieldName + ".facet.range.start", + "8.00,EUR", + "f." + fieldName + ".facet.range.end", + "22.00,EUR", + "f." + fieldName + ".facet.range.gap", + "2.00,EUR", + "f." + fieldName + ".facet.range.other", + "all"), + "count(//lst[@name='counts']/int)=7", + "//lst[@name='counts']/int[@name='8.00,EUR']='0'", + "//lst[@name='counts']/int[@name='10.00,EUR']='0'", + "//lst[@name='counts']/int[@name='12.00,EUR']='1'", + "//lst[@name='counts']/int[@name='14.00,EUR']='1'", + "//lst[@name='counts']/int[@name='16.00,EUR']='0'", + "//lst[@name='counts']/int[@name='18.00,EUR']='0'", + "//lst[@name='counts']/int[@name='20.00,EUR']='0'", + "//int[@name='before']='2'", + "//int[@name='after']='1'", + "//int[@name='between']='2'"); + assertQ( + "Ensure that we get correct facet counts back in EUR (json.facet)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ xxx : { type:range, field:" + + fieldName + + ", start:'8.00,EUR', gap:'2.00,EUR', end:'22.00,EUR', other:all } }"), + "count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,EUR']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='10.00,EUR']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='12.00,EUR']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='14.00,EUR']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='16.00,EUR']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='18.00,EUR']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='20.00,EUR']]", + "//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='2']", + "//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']", + "//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='2']"); + + // GBP has a symetric echange rate with USD, so these counts are *similar* to the USD based + // request above... + // but the asymetric EUR/USD rate means that when computing counts realtive to GBP the EUR based + // docs wind up in // diff buckets - assertQ("Ensure that we get correct facet counts back in GBP (facet.range)", - req("fl", "*,score", "q", "*:*", "rows", "0", "facet", "true", - "facet.range", fieldName, - "f." + fieldName + ".facet.range.start", "2.00,GBP", - "f." + fieldName + ".facet.range.end", "5.50,GBP", - "f." + fieldName + ".facet.range.gap", "0.50,GBP", - "f." + fieldName + ".facet.range.other", "all" - ) - , "count(//lst[@name='counts']/int)=7" - , "//lst[@name='counts']/int[@name='2.00,GBP']='1'" - , "//lst[@name='counts']/int[@name='2.50,GBP']='0'" - , "//lst[@name='counts']/int[@name='3.00,GBP']='0'" - , "//lst[@name='counts']/int[@name='3.50,GBP']='1'" - , "//lst[@name='counts']/int[@name='4.00,GBP']='0'" - , "//lst[@name='counts']/int[@name='4.50,GBP']='0'" - , "//lst[@name='counts']/int[@name='5.00,GBP']='1'" - , "//int[@name='before']='0'" - , "//int[@name='after']='2'" - , "//int[@name='between']='3'" - ); - assertQ("Ensure that we get correct facet counts back in GBP (json.facet)", - req("fl", "*,score", "q", "*:*", "rows", "0", "json.facet", - "{ xxx : { type:range, field:" + fieldName + ", start:'2.00,GBP', gap:'0.50,GBP', end:'5.50,GBP', other:all } }") - ,"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='2.00,GBP']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='2.50,GBP']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='3.00,GBP']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='3.50,GBP']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='4.00,GBP']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='4.50,GBP']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='5.00,GBP']]" - ,"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='0']" - ,"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='2']" - ,"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']" - ); - - assertQ("Ensure that we can set a gap in a currency other than the start and end currencies (facet.range)", - req("fl", "*,score", "q", "*:*", "rows", "0", "facet", "true", - "facet.range", fieldName, - "f." + fieldName + ".facet.range.start", "4.00,USD", - "f." + fieldName + ".facet.range.end", "11.00,USD", - "f." + fieldName + ".facet.range.gap", "0.50,GBP", - "f." + fieldName + ".facet.range.other", "all" - ) - , "count(//lst[@name='counts']/int)=7" - , "//lst[@name='counts']/int[@name='4.00,USD']='1'" - , "//lst[@name='counts']/int[@name='5.00,USD']='0'" - , "//lst[@name='counts']/int[@name='6.00,USD']='0'" - , "//lst[@name='counts']/int[@name='7.00,USD']='1'" - , "//lst[@name='counts']/int[@name='8.00,USD']='0'" - , "//lst[@name='counts']/int[@name='9.00,USD']='0'" - , "//lst[@name='counts']/int[@name='10.00,USD']='1'" - , "//int[@name='before']='1'" - , "//int[@name='after']='1'" - , "//int[@name='between']='3'" - ); - assertQ("Ensure that we can set a gap in a currency other than the start and end currencies (json.facet)", - req("fl", "*,score", "q", "*:*", "rows", "0", "json.facet", - "{ xxx : { type:range, field:" + fieldName + ", start:'4.00,USD', gap:'0.50,GBP', end:'11.00,USD', other:all } }") - ,"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='5.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='6.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='9.00,USD']]" - ,"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]" - - ,"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='1']" - ,"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']" - ,"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']" - ); - - for (SolrParams facet : Arrays.asList(params("facet", "true", - "facet.range", fieldName, - "f." + fieldName + ".facet.range.start", "4.00,USD", - "f." + fieldName + ".facet.range.end", "11.00,EUR", - "f." + fieldName + ".facet.range.gap", "1.00,USD", - "f." + fieldName + ".facet.range.other", "all"), - params("json.facet", - "{ xxx : { type:range, field:" + fieldName + ", start:'4.00,USD', " + - " gap:'1.00,USD', end:'11.00,EUR', other:all } }"))) { - assertQEx("Ensure that we throw an error if we try to use different start and end currencies", - "Cannot compare CurrencyValues when their currencies are not equal", - req(facet, "q", "*:*"), - SolrException.ErrorCode.BAD_REQUEST); + assertQ( + "Ensure that we get correct facet counts back in GBP (facet.range)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.range", + fieldName, + "f." + fieldName + ".facet.range.start", + "2.00,GBP", + "f." + fieldName + ".facet.range.end", + "5.50,GBP", + "f." + fieldName + ".facet.range.gap", + "0.50,GBP", + "f." + fieldName + ".facet.range.other", + "all"), + "count(//lst[@name='counts']/int)=7", + "//lst[@name='counts']/int[@name='2.00,GBP']='1'", + "//lst[@name='counts']/int[@name='2.50,GBP']='0'", + "//lst[@name='counts']/int[@name='3.00,GBP']='0'", + "//lst[@name='counts']/int[@name='3.50,GBP']='1'", + "//lst[@name='counts']/int[@name='4.00,GBP']='0'", + "//lst[@name='counts']/int[@name='4.50,GBP']='0'", + "//lst[@name='counts']/int[@name='5.00,GBP']='1'", + "//int[@name='before']='0'", + "//int[@name='after']='2'", + "//int[@name='between']='3'"); + assertQ( + "Ensure that we get correct facet counts back in GBP (json.facet)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ xxx : { type:range, field:" + + fieldName + + ", start:'2.00,GBP', gap:'0.50,GBP', end:'5.50,GBP', other:all } }"), + "count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='2.00,GBP']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='2.50,GBP']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='3.00,GBP']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='3.50,GBP']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='4.00,GBP']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='4.50,GBP']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='5.00,GBP']]", + "//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='0']", + "//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='2']", + "//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']"); + + assertQ( + "Ensure that we can set a gap in a currency other than the start and end currencies (facet.range)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.range", + fieldName, + "f." + fieldName + ".facet.range.start", + "4.00,USD", + "f." + fieldName + ".facet.range.end", + "11.00,USD", + "f." + fieldName + ".facet.range.gap", + "0.50,GBP", + "f." + fieldName + ".facet.range.other", + "all"), + "count(//lst[@name='counts']/int)=7", + "//lst[@name='counts']/int[@name='4.00,USD']='1'", + "//lst[@name='counts']/int[@name='5.00,USD']='0'", + "//lst[@name='counts']/int[@name='6.00,USD']='0'", + "//lst[@name='counts']/int[@name='7.00,USD']='1'", + "//lst[@name='counts']/int[@name='8.00,USD']='0'", + "//lst[@name='counts']/int[@name='9.00,USD']='0'", + "//lst[@name='counts']/int[@name='10.00,USD']='1'", + "//int[@name='before']='1'", + "//int[@name='after']='1'", + "//int[@name='between']='3'"); + assertQ( + "Ensure that we can set a gap in a currency other than the start and end currencies (json.facet)", + req( + "fl", + "*,score", + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ xxx : { type:range, field:" + + fieldName + + ", start:'4.00,USD', gap:'0.50,GBP', end:'11.00,USD', other:all } }"), + "count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='5.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='6.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='9.00,USD']]", + "//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]", + "//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='1']", + "//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']", + "//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']"); + + for (SolrParams facet : + Arrays.asList( + params( + "facet", + "true", + "facet.range", + fieldName, + "f." + fieldName + ".facet.range.start", + "4.00,USD", + "f." + fieldName + ".facet.range.end", + "11.00,EUR", + "f." + fieldName + ".facet.range.gap", + "1.00,USD", + "f." + fieldName + ".facet.range.other", + "all"), + params( + "json.facet", + "{ xxx : { type:range, field:" + + fieldName + + ", start:'4.00,USD', " + + " gap:'1.00,USD', end:'11.00,EUR', other:all } }"))) { + assertQEx( + "Ensure that we throw an error if we try to use different start and end currencies", + "Cannot compare CurrencyValues when their currencies are not equal", + req(facet, "q", "*:*"), + SolrException.ErrorCode.BAD_REQUEST); } } @Test public void testMockFieldType() throws Exception { - assumeTrue("This test is only applicable to the mock exchange rate provider", + assumeTrue( + "This test is only applicable to the mock exchange rate provider", expectedProviderClass.equals(MockExchangeRateProvider.class)); clearIndex(); @@ -704,15 +871,25 @@ public void testMockFieldType() throws Exception { assertU(adoc("id", "3", fieldName, "1.00,NOK")); assertU(commit()); - assertQ(req("fl", "*,score", "q", fieldName+":5.0,NOK"), "//*[@numFound='1']", "//str[@name='id']='1'"); - assertQ(req("fl", "*,score", "q", fieldName+":1.2,USD"), "//*[@numFound='1']", "//str[@name='id']='2'"); - assertQ(req("fl", "*,score", "q", fieldName+":0.2,USD"), "//*[@numFound='1']", "//str[@name='id']='3'"); - assertQ(req("fl", "*,score", "q", fieldName+":99,USD"), "//*[@numFound='0']"); + assertQ( + req("fl", "*,score", "q", fieldName + ":5.0,NOK"), + "//*[@numFound='1']", + "//str[@name='id']='1'"); + assertQ( + req("fl", "*,score", "q", fieldName + ":1.2,USD"), + "//*[@numFound='1']", + "//str[@name='id']='2'"); + assertQ( + req("fl", "*,score", "q", fieldName + ":0.2,USD"), + "//*[@numFound='1']", + "//str[@name='id']='3'"); + assertQ(req("fl", "*,score", "q", fieldName + ":99,USD"), "//*[@numFound='0']"); } @Test public void testAsymmetricPointQuery() throws Exception { - assumeTrue("This test is only applicable to the XML file based exchange rate provider", + assumeTrue( + "This test is only applicable to the XML file based exchange rate provider", expectedProviderClass.equals(FileExchangeRateProvider.class)); clearIndex(); @@ -720,9 +897,9 @@ public void testAsymmetricPointQuery() throws Exception { assertU(adoc("id", "" + 2, fieldName, "15.00,EUR")); assertU(commit()); - assertQ(req("fl", "*,score", "q", fieldName+":15.00,EUR"), "//str[@name='id']='2'"); - assertQ(req("fl", "*,score", "q", fieldName+":7.50,USD"), "//str[@name='id']='2'"); - assertQ(req("fl", "*,score", "q", fieldName+":7.49,USD"), "//*[@numFound='0']"); - assertQ(req("fl", "*,score", "q", fieldName+":7.51,USD"), "//*[@numFound='0']"); + assertQ(req("fl", "*,score", "q", fieldName + ":15.00,EUR"), "//str[@name='id']='2'"); + assertQ(req("fl", "*,score", "q", fieldName + ":7.50,USD"), "//str[@name='id']='2'"); + assertQ(req("fl", "*,score", "q", fieldName + ":7.49,USD"), "//*[@numFound='0']"); + assertQ(req("fl", "*,score", "q", fieldName + ":7.51,USD"), "//*[@numFound='0']"); } } diff --git a/solr/core/src/test/org/apache/solr/schema/CustomAnalyzerStrField.java b/solr/core/src/test/org/apache/solr/schema/CustomAnalyzerStrField.java index c09eecf4a40..6b02fb2dae6 100644 --- a/solr/core/src/test/org/apache/solr/schema/CustomAnalyzerStrField.java +++ b/solr/core/src/test/org/apache/solr/schema/CustomAnalyzerStrField.java @@ -18,18 +18,16 @@ import java.util.HashMap; import java.util.Random; - import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.core.KeywordTokenizerFactory; -import org.apache.lucene.analysis.TokenFilterFactory; import org.apache.lucene.analysis.CharFilterFactory; +import org.apache.lucene.analysis.TokenFilterFactory; +import org.apache.lucene.analysis.core.KeywordTokenizerFactory; import org.apache.lucene.util.LuceneTestCase; - import org.apache.solr.analysis.TokenizerChain; import org.apache.solr.handler.admin.LukeRequestHandlerTest; // jdoc /** - * A Test only custom FieldType that specifies null for various params when constructing + * A Test only custom FieldType that specifies null for various params when constructing * TokenizerChain instances to ensure that they are still well behaved. * * @see LukeRequestHandlerTest#testNullFactories @@ -42,15 +40,17 @@ public CustomAnalyzerStrField() { Random r = LuceneTestCase.random(); // two arg constructor - Analyzer a2 = new TokenizerChain - (new KeywordTokenizerFactory(new HashMap<>()), - r.nextBoolean() ? null : new TokenFilterFactory[0]); - + Analyzer a2 = + new TokenizerChain( + new KeywordTokenizerFactory(new HashMap<>()), + r.nextBoolean() ? null : new TokenFilterFactory[0]); + // three arg constructor - Analyzer a3 = new TokenizerChain - (r.nextBoolean() ? null : new CharFilterFactory[0], - new KeywordTokenizerFactory(new HashMap<>()), - r.nextBoolean() ? null : new TokenFilterFactory[0]); + Analyzer a3 = + new TokenizerChain( + r.nextBoolean() ? null : new CharFilterFactory[0], + new KeywordTokenizerFactory(new HashMap<>()), + r.nextBoolean() ? null : new TokenFilterFactory[0]); if (r.nextBoolean()) { indexAnalyzer = a2; diff --git a/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java b/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java index a3aa96a6208..b908fa1ecd7 100644 --- a/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DateFieldTest.java @@ -20,7 +20,6 @@ import java.nio.file.Paths; import java.util.Collections; import java.util.Date; - import org.apache.lucene.index.IndexableField; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.ByteArrayUtf8CharSequence; @@ -28,37 +27,35 @@ public class DateFieldTest extends SolrTestCaseJ4 { private final String testInstanceDir = TEST_HOME() + File.separator + "collection1"; - private final String testConfHome = testInstanceDir + File.separator + "conf"+ File.separator; + private final String testConfHome = testInstanceDir + File.separator + "conf" + File.separator; private FieldType f = null; @Override - public void setUp() throws Exception { + public void setUp() throws Exception { super.setUp(); // set some system properties for use by tests System.setProperty("solr.test.sys.prop1", "propone"); System.setProperty("solr.test.sys.prop2", "proptwo"); - SolrConfig config = new SolrConfig - (Paths.get(testInstanceDir), testConfHome + "solrconfig.xml"); + SolrConfig config = new SolrConfig(Paths.get(testInstanceDir), testConfHome + "solrconfig.xml"); IndexSchema schema = IndexSchemaFactory.buildIndexSchema(testConfHome + "schema.xml", config); - f = Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) - ? new DatePointField() : new TrieDateField(); - f.init(schema, Collections.emptyMap()); + f = Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) ? new DatePointField() : new TrieDateField(); + f.init(schema, Collections.emptyMap()); } // NOTE: Many other tests were moved to DateMathParserTest public void testCreateField() { int props = FieldProperties.INDEXED ^ FieldProperties.STORED; - SchemaField sf = new SchemaField( "test", f, props, null ); + SchemaField sf = new SchemaField("test", f, props, null); // String - IndexableField out = f.createField(sf, "1995-12-31T23:59:59Z" ); - assertEquals(820454399000L, ((Date) f.toObject( out )).getTime() ); + IndexableField out = f.createField(sf, "1995-12-31T23:59:59Z"); + assertEquals(820454399000L, ((Date) f.toObject(out)).getTime()); // Date obj - out = f.createField(sf, new Date(820454399000L) ); - assertEquals(820454399000L, ((Date) f.toObject( out )).getTime() ); + out = f.createField(sf, new Date(820454399000L)); + assertEquals(820454399000L, ((Date) f.toObject(out)).getTime()); // Date math out = f.createField(sf, "1995-12-31T23:59:59.99Z+5MINUTES"); - assertEquals(820454699990L, ((Date) f.toObject( out )).getTime() ); + assertEquals(820454699990L, ((Date) f.toObject(out)).getTime()); } public void testToNativeType() { @@ -68,5 +65,4 @@ public void testToNativeType() { Date val = (Date) ft.toNativeType(charSequence); assertEquals("1995-12-31T23:59:59Z", val.toInstant().toString()); } - } diff --git a/solr/core/src/test/org/apache/solr/schema/DateRangeFieldTest.java b/solr/core/src/test/org/apache/solr/schema/DateRangeFieldTest.java index c7b935128b9..27aace77e4c 100644 --- a/solr/core/src/test/org/apache/solr/schema/DateRangeFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DateRangeFieldTest.java @@ -32,14 +32,19 @@ public void test() { assertU(adoc("id", "0", "dateRange", "[* TO *]")); assertU(adoc("id", "1", "dateRange", "2014-05-21T12:00:00.000Z")); assertU(adoc("id", "2", "dateRange", "[2000 TO 2014-05-21]")); - assertU(adoc("id", "3", "dateRange", "2020-05-21T12:00:00.000Z/DAY"));//DateMath syntax + assertU(adoc("id", "3", "dateRange", "2020-05-21T12:00:00.000Z/DAY")); // DateMath syntax assertU(commit()); - - //ensure stored value resolves datemath - assertQ(req("q", "id:1", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='2014-05-21T12:00:00Z']");//no 000 ms - assertQ(req("q", "id:2", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='[2000 TO 2014-05-21]']");//a range; same - assertQ(req("q", "id:3", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='2020-05-21T00:00:00Z']");//resolve datemath + // ensure stored value resolves datemath + assertQ( + req("q", "id:1", "fl", "dateRange"), + "//result/doc/arr[@name='dateRange']/str[.='2014-05-21T12:00:00Z']"); // no 000 ms + assertQ( + req("q", "id:2", "fl", "dateRange"), + "//result/doc/arr[@name='dateRange']/str[.='[2000 TO 2014-05-21]']"); // a range; same + assertQ( + req("q", "id:3", "fl", "dateRange"), + "//result/doc/arr[@name='dateRange']/str[.='2020-05-21T00:00:00Z']"); // resolve datemath String[] commonParams = {"q", "{!field f=dateRange op=$op v=$qq}", "sort", "id asc"}; assertQ(req(commonParams, "qq", "[* TO *]"), xpathMatches(0, 1, 2, 3)); @@ -49,10 +54,10 @@ public void test() { assertQ(req(commonParams, "qq", "[1999 TO 2001]", "op", "IsWithin"), xpathMatches()); assertQ(req(commonParams, "qq", "2014-05", "op", "IsWithin"), xpathMatches(1)); - assertQ(req("q", "dateRange:[1998 TO 2000}"), xpathMatches(0));//exclusive end, so we barely miss one doc - + // exclusive end, so we barely miss one doc + assertQ(req("q", "dateRange:[1998 TO 2000}"), xpathMatches(0)); - //show without local-params + // show without local-params assertQ(req("q", "dateRange:[* TO *]"), xpathMatches(0, 1, 2, 3)); assertQ(req("q", "dateRange:*"), xpathMatches(0, 1, 2, 3)); assertQ(req("q", "dateRange:\"2014-05-21T12:00:00.000Z\""), xpathMatches(0, 1, 2)); @@ -63,37 +68,66 @@ public void testBeforeGregorianChangeDate() { // GCD is the year 1582 assertU(delQ("*:*")); assertU(adoc("id", "0", "dateRange", "1500-01-01T00:00:00Z")); assertU(adoc("id", "1", "dateRange", "-1500-01-01T00:00:00Z")); // BC - assertU(adoc("id", "2", "dateRange", "1400-01-01T00:00:00Z/YEAR")); // date math of month or year can cause issues + assertU( + adoc( + "id", + "2", + "dateRange", + "1400-01-01T00:00:00Z/YEAR")); // date math of month or year can cause issues assertU(adoc("id", "3", "dateRange", "1300")); // the whole year of 1300 assertU(commit()); - //ensure round-trip toString - assertQ(req("q", "id:0", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='1500-01-01T00:00:00Z']"); - assertQ(req("q", "id:1", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='-1500-01-01T00:00:00Z']"); + // ensure round-trip toString + assertQ( + req("q", "id:0", "fl", "dateRange"), + "//result/doc/arr[@name='dateRange']/str[.='1500-01-01T00:00:00Z']"); + assertQ( + req("q", "id:1", "fl", "dateRange"), + "//result/doc/arr[@name='dateRange']/str[.='-1500-01-01T00:00:00Z']"); // note: fixed by SOLR-9080, would instead find "1399-01-09T00:00:00Z" - assertQ(req("q", "id:2", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='1400-01-01T00:00:00Z']"); - assertQ(req("q", "id:3", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='1300']"); - - //ensure range syntax works - assertQ(req("q", "dateRange:[1450-01-01T00:00:00Z TO 1499-12-31T23:59:59Z]"), xpathMatches());// before - assertQ(req("q", "dateRange:[1500-01-01T00:00:00Z TO 1500-01-01T00:00:00Z]"), xpathMatches(0));// spot on - assertQ(req("q", "dateRange:[1500-01-01T00:00:01Z TO 1550-01-01T00:00:00Z]"), xpathMatches());// after - - assertQ(req("q", "dateRange:[-1500-01-01T00:00:00Z TO -1500-01-01T00:00:00Z]"), xpathMatches(1)); + assertQ( + req("q", "id:2", "fl", "dateRange"), + "//result/doc/arr[@name='dateRange']/str[.='1400-01-01T00:00:00Z']"); + assertQ( + req("q", "id:3", "fl", "dateRange"), "//result/doc/arr[@name='dateRange']/str[.='1300']"); + + // ensure range syntax works + assertQ( + req("q", "dateRange:[1450-01-01T00:00:00Z TO 1499-12-31T23:59:59Z]"), + xpathMatches()); // before + assertQ( + req("q", "dateRange:[1500-01-01T00:00:00Z TO 1500-01-01T00:00:00Z]"), + xpathMatches(0)); // spot on + assertQ( + req("q", "dateRange:[1500-01-01T00:00:01Z TO 1550-01-01T00:00:00Z]"), + xpathMatches()); // after + + assertQ( + req("q", "dateRange:[-1500-01-01T00:00:00Z TO -1500-01-01T00:00:00Z]"), xpathMatches(1)); // do range queries in the vicinity of docId=3 val:"1300" - assertQ(req("q", "dateRange:[1299 TO 1299-12-31T23:59:59Z]"), xpathMatches());//adjacent - assertQ(req("q", "dateRange:[1299 TO 1300-01-01T00:00:00Z]"), xpathMatches(3));// expand + 1 sec + assertQ(req("q", "dateRange:[1299 TO 1299-12-31T23:59:59Z]"), xpathMatches()); // adjacent + assertQ( + req("q", "dateRange:[1299 TO 1300-01-01T00:00:00Z]"), xpathMatches(3)); // expand + 1 sec assertQ(req("q", "dateRange:1301"), xpathMatches()); // adjacent - assertQ(req("q", "dateRange:[1300-12-31T23:59:59Z TO 1301]"), xpathMatches(3)); // expand + 1 sec + assertQ( + req("q", "dateRange:[1300-12-31T23:59:59Z TO 1301]"), xpathMatches(3)); // expand + 1 sec } @Test public void testMultiValuedDateRanges() { assertU(delQ("*:*")); assertU(adoc("id", "0", "dateRange", "[2000 TO 2010]", "dateRange", "[2011 TO 2014]")); - assertU(adoc("id", "1", "dateRange", "[2000-01 TO 2010-10]", "dateRange", "[2010-11 TO 2014-12]")); - assertU(adoc("id", "2", "dateRange", "[2000-01-01 TO 2010-08-01]", "dateRange", "[2010-08-01 TO 2014-12-01]")); + assertU( + adoc("id", "1", "dateRange", "[2000-01 TO 2010-10]", "dateRange", "[2010-11 TO 2014-12]")); + assertU( + adoc( + "id", + "2", + "dateRange", + "[2000-01-01 TO 2010-08-01]", + "dateRange", + "[2010-08-01 TO 2014-12-01]")); assertU(adoc("id", "3", "dateRange", "[1990 TO 1995]", "dateRange", "[1997 TO 1999]")); assertU(commit()); @@ -104,22 +138,28 @@ public void testMultiValuedDateRanges() { assertQ(req(commonParams, "qq", "[2000 TO 2014]", "op", "Contains"), xpathMatches(0, 1)); assertQ(req(commonParams, "qq", "[2000 TO 2015]", "op", "Contains"), xpathMatches()); - assertQ(req(commonParams, "qq", "[2000-01 TO 2014-12]", "op", "IsWithin"), xpathMatches(0, 1, 2)); + assertQ( + req(commonParams, "qq", "[2000-01 TO 2014-12]", "op", "IsWithin"), xpathMatches(0, 1, 2)); assertQ(req(commonParams, "qq", "[2000 TO 2014-11]", "op", "IsWithin"), xpathMatches()); assertQ(req(commonParams, "qq", "[2000-01 TO 2014-12]", "op", "Contains"), xpathMatches(0, 1)); - assertQ(req(commonParams, "qq", "[2000-01-01 TO 2014-12-31]", "op", "IsWithin"), xpathMatches(0, 1, 2)); - assertQ(req(commonParams, "qq", "[2000-01-01 TO 2014-12-01]", "op", "Contains"), xpathMatches(0, 1, 2)); + assertQ( + req(commonParams, "qq", "[2000-01-01 TO 2014-12-31]", "op", "IsWithin"), + xpathMatches(0, 1, 2)); + assertQ( + req(commonParams, "qq", "[2000-01-01 TO 2014-12-01]", "op", "Contains"), + xpathMatches(0, 1, 2)); assertQ(req(commonParams, "qq", "[2000 TO 2000]", "op", "Contains"), xpathMatches(0, 1, 2)); assertQ(req(commonParams, "qq", "[2000 TO 2000]", "op", "Contains"), xpathMatches(0, 1, 2)); - assertQ(req(commonParams, "qq", "[1996-01-01 TO 1996-12-31]", "op", "Contains"), xpathMatches()); + assertQ( + req(commonParams, "qq", "[1996-01-01 TO 1996-12-31]", "op", "Contains"), xpathMatches()); } private String[] xpathMatches(int... docIds) { String[] tests = new String[docIds != null ? docIds.length + 1 : 1]; - tests[0] = "*[count(//doc)=" + (tests.length-1) + "]"; + tests[0] = "*[count(//doc)=" + (tests.length - 1) + "]"; if (docIds != null && docIds.length > 0) { int i = 1; for (int docId : docIds) { @@ -128,5 +168,4 @@ private String[] xpathMatches(int... docIds) { } return tests; } - } diff --git a/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java b/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java index 204a2452d05..ec8ea76dbb9 100644 --- a/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DenseVectorFieldTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.schema; +import static org.hamcrest.core.Is.is; + +import java.util.Arrays; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -24,379 +27,425 @@ import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; - -import static org.hamcrest.core.Is.is; - public class DenseVectorFieldTest extends AbstractBadConfigTestBase { - private DenseVectorField toTest = new DenseVectorField(); - - @Test - public void fieldTypeDefinition_badVectorDimension_shouldThrowException() throws Exception { - assertConfigs("solrconfig-basic.xml", "bad-schema-densevector-dimension.xml", - "For input string: \"4.6\""); + private DenseVectorField toTest = new DenseVectorField(); + + @Test + public void fieldTypeDefinition_badVectorDimension_shouldThrowException() throws Exception { + assertConfigs( + "solrconfig-basic.xml", + "bad-schema-densevector-dimension.xml", + "For input string: \"4.6\""); + } + + @Test + public void fieldTypeDefinition_nullVectorDimension_shouldThrowException() throws Exception { + assertConfigs( + "solrconfig-basic.xml", + "bad-schema-densevector-dimension-null.xml", + "the vector dimension is a mandatory parameter"); + } + + @Test + public void fieldTypeDefinition_badSimilarityDistance_shouldThrowException() throws Exception { + assertConfigs( + "solrconfig-basic.xml", + "bad-schema-densevector-similarity.xml", + "No enum constant org.apache.lucene.index.VectorSimilarityFunction.NOT_EXISTENT"); + } + + @Test + public void fieldDefinition_docValues_shouldThrowException() throws Exception { + assertConfigs( + "solrconfig-basic.xml", + "bad-schema-densevector-docvalues.xml", + "DenseVectorField fields can not have docValues: vector"); + } + + @Test + public void fieldDefinition_multiValued_shouldThrowException() throws Exception { + assertConfigs( + "solrconfig-basic.xml", + "bad-schema-densevector-multivalued.xml", + "DenseVectorField fields can not be multiValued: vector"); + } + + @Test + public void fieldTypeDefinition_nullSimilarityDistance_shouldUseDefaultSimilarityEuclidean() + throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector-similarity-null.xml"); + IndexSchema schema = h.getCore().getLatestSchema(); + + SchemaField vector = schema.getField("vector"); + assertNotNull(vector); + + DenseVectorField type = (DenseVectorField) vector.getType(); + MatcherAssert.assertThat( + type.getSimilarityFunction(), is(VectorSimilarityFunction.EUCLIDEAN)); + MatcherAssert.assertThat(type.getDimension(), is(4)); + + assertTrue(vector.indexed()); + assertTrue(vector.stored()); + } finally { + deleteCore(); } + } - @Test - public void fieldTypeDefinition_nullVectorDimension_shouldThrowException() throws Exception { - assertConfigs("solrconfig-basic.xml", "bad-schema-densevector-dimension-null.xml", - "the vector dimension is a mandatory parameter"); - } + @Test + public void fieldDefinition_correctConfiguration_shouldLoadSchemaField() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + IndexSchema schema = h.getCore().getLatestSchema(); - @Test - public void fieldTypeDefinition_badSimilarityDistance_shouldThrowException() throws Exception { - assertConfigs("solrconfig-basic.xml", "bad-schema-densevector-similarity.xml", - "No enum constant org.apache.lucene.index.VectorSimilarityFunction.NOT_EXISTENT"); - } + SchemaField vector = schema.getField("vector"); + assertNotNull(vector); - @Test - public void fieldDefinition_docValues_shouldThrowException() throws Exception { - assertConfigs("solrconfig-basic.xml", "bad-schema-densevector-docvalues.xml", - "DenseVectorField fields can not have docValues: vector"); - } + DenseVectorField type = (DenseVectorField) vector.getType(); + MatcherAssert.assertThat(type.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); + MatcherAssert.assertThat(type.getDimension(), is(4)); - @Test - public void fieldDefinition_multiValued_shouldThrowException() throws Exception { - assertConfigs("solrconfig-basic.xml", "bad-schema-densevector-multivalued.xml", - "DenseVectorField fields can not be multiValued: vector"); + assertTrue(vector.indexed()); + assertTrue(vector.stored()); + } finally { + deleteCore(); } - - @Test - public void fieldTypeDefinition_nullSimilarityDistance_shouldUseDefaultSimilarityEuclidean() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector-similarity-null.xml"); - IndexSchema schema = h.getCore().getLatestSchema(); - - SchemaField vector = schema.getField("vector"); - assertNotNull(vector); - - DenseVectorField type = (DenseVectorField) vector.getType(); - MatcherAssert.assertThat(type.getSimilarityFunction(), is(VectorSimilarityFunction.EUCLIDEAN)); - MatcherAssert.assertThat(type.getDimension(), is(4)); - - assertTrue(vector.indexed()); - assertTrue(vector.stored()); - } finally { - deleteCore(); - } + } + + @Test + public void fieldDefinition_advancedCodecHyperParamer_shouldLoadSchemaField() throws Exception { + try { + initCore("solrconfig_codec.xml", "schema-densevector-codec-hyperparamer.xml"); + IndexSchema schema = h.getCore().getLatestSchema(); + + SchemaField vector = schema.getField("vector"); + assertNotNull(vector); + + DenseVectorField type1 = (DenseVectorField) vector.getType(); + MatcherAssert.assertThat(type1.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); + MatcherAssert.assertThat(type1.getDimension(), is(4)); + MatcherAssert.assertThat(type1.getCodecFormat(), is("Lucene90HnswVectorsFormat")); + MatcherAssert.assertThat(type1.getHnswMaxConn(), is(10)); + MatcherAssert.assertThat(type1.getHnswBeamWidth(), is(40)); + + SchemaField vector2 = schema.getField("vector2"); + assertNotNull(vector2); + + DenseVectorField type2 = (DenseVectorField) vector2.getType(); + MatcherAssert.assertThat(type2.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); + MatcherAssert.assertThat(type2.getDimension(), is(4)); + MatcherAssert.assertThat(type2.getCodecFormat(), is("Lucene90HnswVectorsFormat")); + MatcherAssert.assertThat(type2.getHnswMaxConn(), is(6)); + MatcherAssert.assertThat(type2.getHnswBeamWidth(), is(60)); + + SchemaField vectorDefault = schema.getField("vector_default"); + assertNotNull(vectorDefault); + + DenseVectorField typeDefault = (DenseVectorField) vectorDefault.getType(); + MatcherAssert.assertThat( + typeDefault.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); + MatcherAssert.assertThat(typeDefault.getDimension(), is(4)); + assertNull(typeDefault.getCodecFormat()); + MatcherAssert.assertThat(typeDefault.getHnswMaxConn(), is(16)); + MatcherAssert.assertThat(typeDefault.getHnswBeamWidth(), is(100)); + } finally { + deleteCore(); } - - @Test - public void fieldDefinition_correctConfiguration_shouldLoadSchemaField() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - IndexSchema schema = h.getCore().getLatestSchema(); - - SchemaField vector = schema.getField("vector"); - assertNotNull(vector); - - DenseVectorField type = (DenseVectorField) vector.getType(); - MatcherAssert.assertThat(type.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); - MatcherAssert.assertThat(type.getDimension(), is(4)); - - assertTrue(vector.indexed()); - assertTrue(vector.stored()); - } finally { - deleteCore(); - } + } + + @Test + public void parseVector_NotAList_shouldThrowException() { + RuntimeException thrown = + Assert.assertThrows( + "Single string value should throw an exception", + SolrException.class, + () -> { + toTest.parseVector("string"); + }); + MatcherAssert.assertThat( + thrown.getMessage(), + is( + "incorrect vector format." + + " The expected format is an array :'[f1,f2..f3]' where each element f is a float")); + + thrown = + Assert.assertThrows( + "Single float value should throw an exception", + SolrException.class, + () -> { + toTest.parseVector(1.5f); + }); + MatcherAssert.assertThat( + thrown.getMessage(), + is( + "incorrect vector format." + + " The expected format is an array :'[f1,f2..f3]' where each element f is a float")); + } + + @Test + public void parseVector_notNumericList_shouldThrowException() { + toTest = new DenseVectorField(3); + + RuntimeException thrown = + Assert.assertThrows( + "Incorrect elements should throw an exception", + SolrException.class, + () -> { + toTest.parseVector( + Arrays.asList( + new DenseVectorField(3), new DenseVectorField(4), new DenseVectorField(5))); + }); + MatcherAssert.assertThat( + thrown.getMessage(), + is( + "incorrect vector format. The expected format is an array :'[f1,f2..f3]' where each element f is a float")); + } + + @Test + public void parseVector_incorrectVectorDimension_shouldThrowException() { + toTest = new DenseVectorField(3); + + RuntimeException thrown = + Assert.assertThrows( + "Incorrect vector dimension should throw an exception", + SolrException.class, + () -> { + toTest.parseVector(Arrays.asList(1.0f, 1.5f)); + }); + MatcherAssert.assertThat( + thrown.getMessage(), + is( + "incorrect vector dimension. The vector value has size 2 while it is expected a vector with size 3")); + } + + @Test + public void parseVector_incorrectElement_shouldThrowException() { + toTest = new DenseVectorField(3); + + RuntimeException thrown = + Assert.assertThrows( + "Incorrect elements should throw an exception", + SolrException.class, + () -> { + toTest.parseVector(Arrays.asList("1.0f", "string", "string2")); + }); + MatcherAssert.assertThat( + thrown.getMessage(), + is( + "incorrect vector element: 'string'. The expected format is:'[f1,f2..f3]' where each element f is a float")); + } + + /** + * The inputValue is an ArrayList with a type that dipends on the loader used: - {@link + * org.apache.solr.handler.loader.XMLLoader}, {@link org.apache.solr.handler.loader.CSVLoader} + * produces an ArrayList of String + */ + @Test + public void parseVector_StringArrayList_shouldParseFloatArray() { + toTest = new DenseVectorField(3); + float[] expected = new float[] {1.1f, 2.2f, 3.3f}; + + MatcherAssert.assertThat(toTest.parseVector(Arrays.asList("1.1", "2.2", "3.3")), is(expected)); + } + + /** + * The inputValue is an ArrayList with a type that dipends on the loader used: - {@link + * org.apache.solr.handler.loader.JsonLoader} produces an ArrayList of Double + */ + @Test + public void parseVector_DoubleArrayList_shouldParseFloatArray() { + toTest = new DenseVectorField(3); + float[] expected = new float[] {1.7f, 5.4f, 6.6f}; + + MatcherAssert.assertThat(toTest.parseVector(Arrays.asList(1.7d, 5.4d, 6.6d)), is(expected)); + } + + /** + * The inputValue is an ArrayList with a type that dipends on the loader used: - {@link + * org.apache.solr.handler.loader.JavabinLoader} produces an ArrayList of Float + */ + @Test + public void parseVector_FloatArrayList_shouldParseFloatArray() { + toTest = new DenseVectorField(3); + float[] expected = new float[] {5.5f, 7.7f, 9.8f}; + + MatcherAssert.assertThat(toTest.parseVector(Arrays.asList(5.5f, 7.7f, 9.8f)), is(expected)); + } + + @Test + public void indexing_incorrectVectorFormat_shouldThrowException() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + assertFailedU(adoc("id", "0", "vector", "5.4")); + assertFailedU(adoc("id", "0", "vector", "string")); + } finally { + deleteCore(); } - - @Test - public void fieldDefinition_advancedCodecHyperParamer_shouldLoadSchemaField() throws Exception { - try { - initCore("solrconfig_codec.xml", "schema-densevector-codec-hyperparamer.xml"); - IndexSchema schema = h.getCore().getLatestSchema(); - - SchemaField vector = schema.getField("vector"); - assertNotNull(vector); - - DenseVectorField type1 = (DenseVectorField) vector.getType(); - MatcherAssert.assertThat(type1.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); - MatcherAssert.assertThat(type1.getDimension(), is(4)); - MatcherAssert.assertThat(type1.getCodecFormat(), is("Lucene90HnswVectorsFormat")); - MatcherAssert.assertThat(type1.getHnswMaxConn(), is(10)); - MatcherAssert.assertThat(type1.getHnswBeamWidth(), is(40)); - - SchemaField vector2 = schema.getField("vector2"); - assertNotNull(vector2); - - DenseVectorField type2 = (DenseVectorField) vector2.getType(); - MatcherAssert.assertThat(type2.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); - MatcherAssert.assertThat(type2.getDimension(), is(4)); - MatcherAssert.assertThat(type2.getCodecFormat(), is("Lucene90HnswVectorsFormat")); - MatcherAssert.assertThat(type2.getHnswMaxConn(), is(6)); - MatcherAssert.assertThat(type2.getHnswBeamWidth(), is(60)); - - SchemaField vectorDefault = schema.getField("vector_default"); - assertNotNull(vectorDefault); - - DenseVectorField typeDefault = (DenseVectorField) vectorDefault.getType(); - MatcherAssert.assertThat(typeDefault.getSimilarityFunction(), is(VectorSimilarityFunction.COSINE)); - MatcherAssert.assertThat(typeDefault.getDimension(), is(4)); - assertNull(typeDefault.getCodecFormat()); - MatcherAssert.assertThat(typeDefault.getHnswMaxConn(), is(16)); - MatcherAssert.assertThat(typeDefault.getHnswBeamWidth(), is(100)); - } finally { - deleteCore(); - } + } + + @Test + public void indexing_inconsistentVectorDimension_shouldThrowException() throws Exception { + try { + // vectorDimension = 4 + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + SolrInputDocument toFailDoc1 = new SolrInputDocument(); + toFailDoc1.addField("id", "0"); + toFailDoc1.addField("vector", Arrays.asList(1, 2, 3)); + + SolrInputDocument toFailDoc2 = new SolrInputDocument(); + toFailDoc2.addField("id", "0"); + toFailDoc2.addField("vector", Arrays.asList(1, 2, 3, 4, 5)); + + assertFailedU(adoc(toFailDoc1)); + assertFailedU(adoc(toFailDoc2)); + } finally { + deleteCore(); } + } - @Test - public void parseVector_NotAList_shouldThrowException() { - RuntimeException thrown = Assert.assertThrows("Single string value should throw an exception", SolrException.class, () -> { - toTest.parseVector("string"); - }); - MatcherAssert.assertThat(thrown.getMessage(), is("incorrect vector format." + - " The expected format is an array :'[f1,f2..f3]' where each element f is a float")); - - thrown = Assert.assertThrows("Single float value should throw an exception", SolrException.class, () -> { - toTest.parseVector(1.5f); - }); - MatcherAssert.assertThat(thrown.getMessage(), is("incorrect vector format." + - " The expected format is an array :'[f1,f2..f3]' where each element f is a float")); - } + @Test + public void indexing_correctDocument_shouldBeIndexed() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); - @Test - public void parseVector_notNumericList_shouldThrowException() { - toTest = new DenseVectorField(3); + SolrInputDocument correctDoc = new SolrInputDocument(); + correctDoc.addField("id", "0"); + correctDoc.addField("vector", Arrays.asList(1, 2, 3, 4)); - RuntimeException thrown = Assert.assertThrows("Incorrect elements should throw an exception", SolrException.class, () -> { - toTest.parseVector(Arrays.asList(new DenseVectorField(3), new DenseVectorField(4), new DenseVectorField(5))); - }); - MatcherAssert.assertThat(thrown.getMessage(), is("incorrect vector format. The expected format is an array :'[f1,f2..f3]' where each element f is a float")); + assertU(adoc(correctDoc)); + } finally { + deleteCore(); } - - @Test - public void parseVector_incorrectVectorDimension_shouldThrowException() { - toTest = new DenseVectorField(3); - - RuntimeException thrown = Assert.assertThrows("Incorrect vector dimension should throw an exception", SolrException.class, () -> { - toTest.parseVector(Arrays.asList(1.0f, 1.5f)); - }); - MatcherAssert.assertThat(thrown.getMessage(), is("incorrect vector dimension. The vector value has size 2 while it is expected a vector with size 3")); + } + + @Test + public void query_storedField_shouldBeReturnedInResults() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + SolrInputDocument doc1 = new SolrInputDocument(); + doc1.addField("id", "0"); + doc1.addField("vector", Arrays.asList(1.1f, 2.1f, 3.1f, 4.1f)); + assertU(adoc(doc1)); + assertU(commit()); + + assertJQ( + req("q", "id:0", "fl", "vector"), "/response/docs/[0]=={'vector':[1.1,2.1,3.1,4.1]}"); + + assertQ( + req("q", "id:0", "fl", "vector"), + "*[count(//doc)=1]", + "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.1 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.1 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.1 + "']", + "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.1 + "']"); + } finally { + deleteCore(); } - - @Test - public void parseVector_incorrectElement_shouldThrowException() { - toTest = new DenseVectorField(3); - - RuntimeException thrown = Assert.assertThrows("Incorrect elements should throw an exception", SolrException.class, () -> { - toTest.parseVector(Arrays.asList("1.0f", "string", "string2")); - }); - MatcherAssert.assertThat(thrown.getMessage(), is("incorrect vector element: 'string'. The expected format is:'[f1,f2..f3]' where each element f is a float")); + } + + /** Not Supported */ + @Test + public void query_rangeSearch_shouldThrowException() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + assertQEx( + "Running Range queries on a dense vector field should raise an Exception", + "Cannot parse 'vector:[[1.0 2.0] TO [1.5 2.5]]'", + req("q", "vector:[[1.0 2.0] TO [1.5 2.5]]", "fl", "vector"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQEx( + "Running Range queries on a dense vector field should raise an Exception", + "Range Queries are not supported for Dense Vector fields." + + " Please use the {!knn} query parser to run K nearest neighbors search queries.", + req("q", "vector:[1 TO 5]", "fl", "vector"), + SolrException.ErrorCode.BAD_REQUEST); + } finally { + deleteCore(); } - - /** - * The inputValue is an ArrayList with a type that dipends on the loader used: - * - {@link org.apache.solr.handler.loader.XMLLoader}, {@link org.apache.solr.handler.loader.CSVLoader} produces an ArrayList of String - */ - @Test - public void parseVector_StringArrayList_shouldParseFloatArray() { - toTest = new DenseVectorField(3); - float[] expected = new float[]{1.1f, 2.2f, 3.3f}; - - MatcherAssert.assertThat(toTest.parseVector(Arrays.asList("1.1", "2.2", "3.3")), is(expected)); + } + + /** Not Supported */ + @Test + public void query_existenceSearch_shouldThrowException() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + assertQEx( + "Running Existence queries on a dense vector field should raise an Exception", + "Range Queries are not supported for Dense Vector fields." + + " Please use the {!knn} query parser to run K nearest neighbors search queries.", + req("q", "vector:[* TO *]", "fl", "vector"), + SolrException.ErrorCode.BAD_REQUEST); + } finally { + deleteCore(); } - - /** - * The inputValue is an ArrayList with a type that dipends on the loader used: - * - {@link org.apache.solr.handler.loader.JsonLoader} produces an ArrayList of Double - */ - @Test - public void parseVector_DoubleArrayList_shouldParseFloatArray() { - toTest = new DenseVectorField(3); - float[] expected = new float[]{1.7f, 5.4f, 6.6f}; - - MatcherAssert.assertThat(toTest.parseVector(Arrays.asList(1.7d, 5.4d, 6.6d)), is(expected)); + } + + /** Not Supported */ + @Test + public void query_fieldQuery_shouldThrowException() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + assertQEx( + "Running Field queries on a dense vector field should raise an Exception", + "Cannot parse 'vector:[1.0, 2.0, 3.0, 4.0]", + req("q", "vector:[1.0, 2.0, 3.0, 4.0]", "fl", "vector"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQEx( + "Running Field queries on a dense vector field should raise an Exception", + "Field Queries are not supported for Dense Vector fields." + + " Please use the {!knn} query parser to run K nearest neighbors search queries.", + req("q", "vector:\"[1.0, 2.0, 3.0, 4.0]\"", "fl", "vector"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQEx( + "Running Field queries on a dense vector field should raise an Exception", + "Field Queries are not supported for Dense Vector fields." + + " Please use the {!knn} query parser to run K nearest neighbors search queries.", + req("q", "vector:2.0", "fl", "vector"), + SolrException.ErrorCode.BAD_REQUEST); + } finally { + deleteCore(); } - - /** - * The inputValue is an ArrayList with a type that dipends on the loader used: - * - {@link org.apache.solr.handler.loader.JavabinLoader} produces an ArrayList of Float - */ - @Test - public void parseVector_FloatArrayList_shouldParseFloatArray() { - toTest = new DenseVectorField(3); - float[] expected = new float[]{5.5f, 7.7f, 9.8f}; - - MatcherAssert.assertThat(toTest.parseVector(Arrays.asList(5.5f, 7.7f, 9.8f)), is(expected)); + } + + /** Not Supported */ + @Test + public void query_sortByVectorField_shouldThrowException() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + assertQEx( + "Sort over vectors should raise an Exception", + "Cannot sort on a Dense Vector field", + req("q", "*:*", "sort", "vector desc"), + SolrException.ErrorCode.BAD_REQUEST); + } finally { + deleteCore(); } - - @Test - public void indexing_incorrectVectorFormat_shouldThrowException() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - assertFailedU(adoc("id", "0", "vector", "5.4")); - assertFailedU(adoc("id", "0", "vector", "string")); - } finally { - deleteCore(); - } - } - - @Test - public void indexing_inconsistentVectorDimension_shouldThrowException() throws Exception { - try { - //vectorDimension = 4 - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - SolrInputDocument toFailDoc1 = new SolrInputDocument(); - toFailDoc1.addField("id", "0"); - toFailDoc1.addField("vector", Arrays.asList(1, 2, 3)); - - SolrInputDocument toFailDoc2 = new SolrInputDocument(); - toFailDoc2.addField("id", "0"); - toFailDoc2.addField("vector", Arrays.asList(1, 2, 3, 4, 5)); - - assertFailedU(adoc(toFailDoc1)); - assertFailedU(adoc(toFailDoc2)); - } finally { - deleteCore(); - } - } - - @Test - public void indexing_correctDocument_shouldBeIndexed() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - SolrInputDocument correctDoc = new SolrInputDocument(); - correctDoc.addField("id", "0"); - correctDoc.addField("vector", Arrays.asList(1, 2, 3, 4)); - - assertU(adoc(correctDoc)); - } finally { - deleteCore(); - } - } - - @Test - public void query_storedField_shouldBeReturnedInResults() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - SolrInputDocument doc1 = new SolrInputDocument(); - doc1.addField("id", "0"); - doc1.addField("vector", Arrays.asList(1.1f, 2.1f, 3.1f, 4.1f)); - assertU(adoc(doc1)); - assertU(commit()); - - assertJQ(req("q","id:0", "fl","vector"), - "/response/docs/[0]=={'vector':[1.1,2.1,3.1,4.1]}"); - - assertQ(req("q", "id:0", "fl", "vector"), "*[count(//doc)=1]", - "//result/doc[1]/arr[@name=\"vector\"]/float[1][.='" + 1.1 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[2][.='" + 2.1 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[3][.='" + 3.1 + "']", - "//result/doc[1]/arr[@name=\"vector\"]/float[4][.='" + 4.1 + "']" - ); - } finally { - deleteCore(); - } - } - - /** - * Not Supported - */ - @Test - public void query_rangeSearch_shouldThrowException() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - assertQEx("Running Range queries on a dense vector field should raise an Exception", - "Cannot parse 'vector:[[1.0 2.0] TO [1.5 2.5]]'", - req("q", "vector:[[1.0 2.0] TO [1.5 2.5]]", "fl", "vector"), - SolrException.ErrorCode.BAD_REQUEST); - - assertQEx("Running Range queries on a dense vector field should raise an Exception", - "Range Queries are not supported for Dense Vector fields." + - " Please use the {!knn} query parser to run K nearest neighbors search queries.", - req("q", "vector:[1 TO 5]", "fl", "vector"), - SolrException.ErrorCode.BAD_REQUEST); - } finally { - deleteCore(); - } - } - - /** - * Not Supported - */ - @Test - public void query_existenceSearch_shouldThrowException() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - assertQEx("Running Existence queries on a dense vector field should raise an Exception", - "Range Queries are not supported for Dense Vector fields." + - " Please use the {!knn} query parser to run K nearest neighbors search queries.", - req("q", "vector:[* TO *]", "fl", "vector"), - SolrException.ErrorCode.BAD_REQUEST); - } finally { - deleteCore(); - } - } - - /** - * Not Supported - */ - @Test - public void query_fieldQuery_shouldThrowException() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - assertQEx("Running Field queries on a dense vector field should raise an Exception", - "Cannot parse 'vector:[1.0, 2.0, 3.0, 4.0]", - req("q", "vector:[1.0, 2.0, 3.0, 4.0]", "fl", "vector"), - SolrException.ErrorCode.BAD_REQUEST); - - assertQEx("Running Field queries on a dense vector field should raise an Exception", - "Field Queries are not supported for Dense Vector fields." + - " Please use the {!knn} query parser to run K nearest neighbors search queries.", - req("q", "vector:\"[1.0, 2.0, 3.0, 4.0]\"", "fl", "vector"), - SolrException.ErrorCode.BAD_REQUEST); - - assertQEx("Running Field queries on a dense vector field should raise an Exception", - "Field Queries are not supported for Dense Vector fields." + - " Please use the {!knn} query parser to run K nearest neighbors search queries.", - req("q", "vector:2.0", "fl", "vector"), - SolrException.ErrorCode.BAD_REQUEST); - } finally { - deleteCore(); - } - } - - /** - * Not Supported - */ - @Test - public void query_sortByVectorField_shouldThrowException() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - assertQEx("Sort over vectors should raise an Exception", - "Cannot sort on a Dense Vector field", - req("q", "*:*", "sort", "vector desc"), - SolrException.ErrorCode.BAD_REQUEST); - } finally { - deleteCore(); - } - } - - /** - * Not Supported - */ - @Test - public void query_functionQueryUsage_shouldThrowException() throws Exception { - try { - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - assertQEx("Running Function queries on a dense vector field should raise an Exception", - "Function queries are not supported for Dense Vector fields.", - req("q", "*:*", "fl", "id,field(vector)"), - SolrException.ErrorCode.BAD_REQUEST); - } finally { - deleteCore(); - } + } + + /** Not Supported */ + @Test + public void query_functionQueryUsage_shouldThrowException() throws Exception { + try { + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + assertQEx( + "Running Function queries on a dense vector field should raise an Exception", + "Function queries are not supported for Dense Vector fields.", + req("q", "*:*", "fl", "id,field(vector)"), + SolrException.ErrorCode.BAD_REQUEST); + } finally { + deleteCore(); } + } } diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java index 04d38fdf565..9cc943b15a5 100644 --- a/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DocValuesMissingTest.java @@ -20,107 +20,112 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * Tests things like sorting on docvalues with missing values - */ +/** Tests things like sorting on docvalues with missing values */ public class DocValuesMissingTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-basic.xml", "schema-docValuesMissing.xml"); } - + @Override public void setUp() throws Exception { super.setUp(); clearIndex(); assertU(commit()); } - + /** numeric default lucene sort (relative to presumed default value of 0) */ - private void checkSortMissingDefault(final String field, - final String negative, - final String positive) { - assertU(adoc("id", "0")); // missing - assertU(adoc("id", "1", field, negative)); - assertU(adoc("id", "2", field, positive)); - assertU(commit()); - assertQ(req("q", "*:*", "sort", field+" asc"), - "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=0]", - "//result/doc[3]/str[@name='id'][.=2]"); - assertQ(req("q", "*:*", "sort", field+" desc"), - "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=0]", - "//result/doc[3]/str[@name='id'][.=1]"); + private void checkSortMissingDefault( + final String field, final String negative, final String positive) { + assertU(adoc("id", "0")); // missing + assertU(adoc("id", "1", field, negative)); + assertU(adoc("id", "2", field, positive)); + assertU(commit()); + assertQ( + req("q", "*:*", "sort", field + " asc"), + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=0]", + "//result/doc[3]/str[@name='id'][.=2]"); + assertQ( + req("q", "*:*", "sort", field + " desc"), + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=0]", + "//result/doc[3]/str[@name='id'][.=1]"); } /** sort missing always first */ - private void checkSortMissingFirst(final String field, - final String low, - final String high) { + private void checkSortMissingFirst(final String field, final String low, final String high) { assertU(adoc("id", "0")); // missing assertU(adoc("id", "1", field, low)); assertU(adoc("id", "2", field, high)); assertU(commit()); - assertQ(req("q", "*:*", "sort", field+" asc"), - "//result/doc[1]/str[@name='id'][.=0]", - "//result/doc[2]/str[@name='id'][.=1]", - "//result/doc[3]/str[@name='id'][.=2]"); - assertQ(req("q", "*:*", "sort", field+" desc"), - "//result/doc[1]/str[@name='id'][.=0]", - "//result/doc[2]/str[@name='id'][.=2]", - "//result/doc[3]/str[@name='id'][.=1]"); + assertQ( + req("q", "*:*", "sort", field + " asc"), + "//result/doc[1]/str[@name='id'][.=0]", + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=2]"); + assertQ( + req("q", "*:*", "sort", field + " desc"), + "//result/doc[1]/str[@name='id'][.=0]", + "//result/doc[2]/str[@name='id'][.=2]", + "//result/doc[3]/str[@name='id'][.=1]"); } /** sort missing always last */ - private void checkSortMissingLast(final String field, - final String low, - final String high) { + private void checkSortMissingLast(final String field, final String low, final String high) { assertU(adoc("id", "0")); // missing assertU(adoc("id", "1", field, low)); assertU(adoc("id", "2", field, high)); assertU(commit()); - assertQ(req("q", "*:*", "sort", field+" asc"), - "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=2]", - "//result/doc[3]/str[@name='id'][.=0]"); - assertQ(req("q", "*:*", "sort", field+" desc"), - "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=1]", - "//result/doc[3]/str[@name='id'][.=0]"); - + assertQ( + req("q", "*:*", "sort", field + " asc"), + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=2]", + "//result/doc[3]/str[@name='id'][.=0]"); + assertQ( + req("q", "*:*", "sort", field + " desc"), + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=0]"); } /** function query based on missing */ - private void checkSortMissingFunction(final String field, - final String low, - final String high) { + private void checkSortMissingFunction(final String field, final String low, final String high) { assertU(adoc("id", "0")); // missing assertU(adoc("id", "1", field, low)); assertU(adoc("id", "2", field, high)); assertU(commit()); - assertQ(req("q", "*:*", "fl", "e:exists("+field+")", "sort", "id asc"), - "//result/doc[1]/bool[@name='e'][.='false']", - "//result/doc[2]/bool[@name='e'][.='true']", - "//result/doc[3]/bool[@name='e'][.='true']"); + assertQ( + req("q", "*:*", "fl", "e:exists(" + field + ")", "sort", "id asc"), + "//result/doc[1]/bool[@name='e'][.='false']", + "//result/doc[2]/bool[@name='e'][.='true']", + "//result/doc[3]/bool[@name='e'][.='true']"); } /** missing facet count */ - private void checkSortMissingFacet(final String field, - final String low, - final String high) { + private void checkSortMissingFacet(final String field, final String low, final String high) { assertU(adoc("id", "0")); // missing assertU(adoc("id", "1")); // missing assertU(adoc("id", "2", field, low)); assertU(adoc("id", "3", field, high)); assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "facet.field", field, - "facet.mincount", "1", "facet.missing", "true"), - "//lst[@name='facet_fields']/lst[@name='"+field+"']/int[@name='"+low+"'][.=1]", - "//lst[@name='facet_fields']/lst[@name='"+field+"']/int[@name='"+high+"'][.=1]", - "//lst[@name='facet_fields']/lst[@name='"+field+"']/int[.=2]"); + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.field", + field, + "facet.mincount", + "1", + "facet.missing", + "true"), + "//lst[@name='facet_fields']/lst[@name='" + field + "']/int[@name='" + low + "'][.=1]", + "//lst[@name='facet_fields']/lst[@name='" + field + "']/int[@name='" + high + "'][.=1]", + "//lst[@name='facet_fields']/lst[@name='" + field + "']/int[.=2]"); } /** float with default lucene sort (treats as 0) */ @@ -155,7 +160,7 @@ public void testFloatSortMissingLast() throws Exception { public void testDynFloatSortMissingLast() throws Exception { checkSortMissingLast("dyn_floatdv_missinglast", "-1.3", "4.2"); } - + /** float function query based on missing */ @Test public void testFloatMissingFunction() throws Exception { @@ -166,7 +171,7 @@ public void testFloatMissingFunction() throws Exception { public void testDynFloatMissingFunction() throws Exception { checkSortMissingFunction("dyn_floatdv", "-1.3", "4.2"); } - + /** float missing facet count */ @Test public void testFloatMissingFacet() throws Exception { @@ -188,7 +193,7 @@ public void testIntSort() throws Exception { public void testDynIntSort() throws Exception { checkSortMissingDefault("dyn_intdv", "-1", "4"); } - + /** int with sort missing always first */ @Test public void testIntSortMissingFirst() throws Exception { @@ -199,7 +204,7 @@ public void testIntSortMissingFirst() throws Exception { public void testDynIntSortMissingFirst() throws Exception { checkSortMissingFirst("dyn_intdv_missingfirst", "-1", "4"); } - + /** int with sort missing always last */ @Test public void testIntSortMissingLast() throws Exception { @@ -210,7 +215,7 @@ public void testIntSortMissingLast() throws Exception { public void testDynIntSortMissingLast() throws Exception { checkSortMissingLast("dyn_intdv_missinglast", "-1", "4"); } - + /** int function query based on missing */ @Test public void testIntMissingFunction() throws Exception { @@ -221,7 +226,7 @@ public void testIntMissingFunction() throws Exception { public void testDynIntMissingFunction() throws Exception { checkSortMissingFunction("dyn_intdv", "-1", "4"); } - + /** int missing facet count */ @Test public void testIntMissingFacet() throws Exception { @@ -232,7 +237,7 @@ public void testIntMissingFacet() throws Exception { public void testDynIntMissingFacet() throws Exception { checkSortMissingFacet("dyn_intdv", "-1", "4"); } - + /** double with default lucene sort (treats as 0) */ @Test public void testDoubleSort() throws Exception { @@ -243,7 +248,7 @@ public void testDoubleSort() throws Exception { public void testDynDoubleSort() throws Exception { checkSortMissingDefault("dyn_doubledv", "-1.3", "4.2"); } - + /** double with sort missing always first */ @Test public void testDoubleSortMissingFirst() throws Exception { @@ -265,7 +270,7 @@ public void testDoubleSortMissingLast() throws Exception { public void testDynDoubleSortMissingLast() throws Exception { checkSortMissingLast("dyn_doubledv_missinglast", "-1.3", "4.2"); } - + /** double function query based on missing */ @Test public void testDoubleMissingFunction() throws Exception { @@ -276,7 +281,7 @@ public void testDoubleMissingFunction() throws Exception { public void testDynDoubleMissingFunction() throws Exception { checkSortMissingFunction("dyn_doubledv", "-1.3", "4.2"); } - + /** double missing facet count */ @Test public void testDoubleMissingFacet() throws Exception { @@ -287,7 +292,7 @@ public void testDoubleMissingFacet() throws Exception { public void testDynDoubleMissingFacet() throws Exception { checkSortMissingFacet("dyn_doubledv", "-1.3", "4.2"); } - + /** long with default lucene sort (treats as 0) */ @Test public void testLongSort() throws Exception { @@ -320,7 +325,7 @@ public void testLongSortMissingLast() throws Exception { public void testDynLongSortMissingLast() throws Exception { checkSortMissingLast("dyn_longdv_missinglast", "-1", "4"); } - + /** long function query based on missing */ @Test public void testLongMissingFunction() throws Exception { @@ -331,7 +336,7 @@ public void testLongMissingFunction() throws Exception { public void testDynLongMissingFunction() throws Exception { checkSortMissingFunction("dyn_longdv", "-1", "4"); } - + /** long missing facet count */ @Test public void testLongMissingFacet() throws Exception { @@ -342,7 +347,7 @@ public void testLongMissingFacet() throws Exception { public void testDynLongMissingFacet() throws Exception { checkSortMissingFacet("dyn_longdv", "-1", "4"); } - + /** date with default lucene sort (treats as 1970) */ @Test public void testDateSort() throws Exception { @@ -353,81 +358,79 @@ public void testDateSort() throws Exception { public void testDynDateSort() throws Exception { checkSortMissingDefault("dyn_datedv", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } - + /** date with sort missing always first */ @Test public void testDateSortMissingFirst() throws Exception { - checkSortMissingFirst("datedv_missingfirst", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingFirst( + "datedv_missingfirst", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } /** dynamic date with sort missing always first */ @Test public void testDynDateSortMissingFirst() throws Exception { - checkSortMissingFirst("dyn_datedv_missingfirst", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingFirst( + "dyn_datedv_missingfirst", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } - + /** date with sort missing always last */ @Test public void testDateSortMissingLast() throws Exception { - checkSortMissingLast("datedv_missinglast", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingLast( + "datedv_missinglast", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } /** dynamic date with sort missing always last */ @Test public void testDynDateSortMissingLast() throws Exception { - checkSortMissingLast("dyn_datedv_missinglast", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingLast( + "dyn_datedv_missinglast", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } - + /** date function query based on missing */ @Test public void testDateMissingFunction() throws Exception { - checkSortMissingFunction("datedv", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingFunction("datedv", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } /** dynamic date function query based on missing */ @Test public void testDynDateMissingFunction() throws Exception { - checkSortMissingFunction("dyn_datedv", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingFunction("dyn_datedv", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } - + /** date missing facet count */ @Test public void testDateMissingFacet() throws Exception { - checkSortMissingFacet("datedv", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingFacet("datedv", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } /** dynamic date missing facet count */ @Test public void testDynDateMissingFacet() throws Exception { - checkSortMissingFacet("dyn_datedv", - "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); + checkSortMissingFacet("dyn_datedv", "1900-12-31T23:59:59.999Z", "2005-12-31T23:59:59.999Z"); } - + /** string (and dynamic string) with default lucene sort (treats as "") */ @Test public void testStringSort() throws Exception { - // note: cant use checkSortMissingDefault because + // note: cant use checkSortMissingDefault because // nothing sorts lower then the default of "" - for (String field : new String[] {"stringdv","dyn_stringdv"}) { + for (String field : new String[] {"stringdv", "dyn_stringdv"}) { assertU(adoc("id", "0")); // missing assertU(adoc("id", "1", field, "a")); assertU(adoc("id", "2", field, "z")); assertU(commit()); - assertQ(req("q", "*:*", "sort", field+" asc"), - "//result/doc[1]/str[@name='id'][.=0]", - "//result/doc[2]/str[@name='id'][.=1]", - "//result/doc[3]/str[@name='id'][.=2]"); - assertQ(req("q", "*:*", "sort", field+" desc"), - "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=1]", - "//result/doc[3]/str[@name='id'][.=0]"); + assertQ( + req("q", "*:*", "sort", field + " asc"), + "//result/doc[1]/str[@name='id'][.=0]", + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=2]"); + assertQ( + req("q", "*:*", "sort", field + " desc"), + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=0]"); } } - + /** string with sort missing always first */ @Test public void testStringSortMissingFirst() throws Exception { @@ -438,7 +441,7 @@ public void testStringSortMissingFirst() throws Exception { public void testDynStringSortMissingFirst() throws Exception { checkSortMissingFirst("dyn_stringdv_missingfirst", "a", "z"); } - + /** string with sort missing always last */ @Test public void testStringSortMissingLast() throws Exception { @@ -460,7 +463,7 @@ public void testStringMissingFunction() throws Exception { public void testDynStringMissingFunction() throws Exception { checkSortMissingFunction("dyn_stringdv", "a", "z"); } - + /** string missing facet count */ @Test public void testStringMissingFacet() throws Exception { @@ -469,7 +472,18 @@ public void testStringMissingFacet() throws Exception { assertU(adoc("id", "2", "stringdv", "a")); assertU(adoc("id", "3", "stringdv", "z")); assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "facet.field", "stringdv", "facet.mincount", "1", "facet.missing", "true"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.field", + "stringdv", + "facet.mincount", + "1", + "facet.missing", + "true"), "//lst[@name='facet_fields']/lst[@name='stringdv']/int[@name='a'][.=1]", "//lst[@name='facet_fields']/lst[@name='stringdv']/int[@name='z'][.=1]", "//lst[@name='facet_fields']/lst[@name='stringdv']/int[.=2]"); @@ -478,19 +492,21 @@ public void testStringMissingFacet() throws Exception { /** bool (and dynamic bool) with default lucene sort (treats as "") */ @Test public void testBoolSort() throws Exception { - // note: cant use checkSortMissingDefault because + // note: cant use checkSortMissingDefault because // nothing sorts lower then the default of "" and // bool fields are, at root, string fields. - for (String field : new String[] {"booldv","dyn_booldv"}) { + for (String field : new String[] {"booldv", "dyn_booldv"}) { assertU(adoc("id", "0")); // missing assertU(adoc("id", "1", field, "false")); assertU(adoc("id", "2", field, "true")); assertU(commit()); - assertQ(req("q", "*:*", "sort", field+" asc"), + assertQ( + req("q", "*:*", "sort", field + " asc"), "//result/doc[1]/str[@name='id'][.=0]", "//result/doc[2]/str[@name='id'][.=1]", "//result/doc[3]/str[@name='id'][.=2]"); - assertQ(req("q", "*:*", "sort", field+" desc"), + assertQ( + req("q", "*:*", "sort", field + " desc"), "//result/doc[1]/str[@name='id'][.=2]", "//result/doc[2]/str[@name='id'][.=1]", "//result/doc[3]/str[@name='id'][.=0]"); @@ -538,10 +554,20 @@ public void testBoolMissingFacet() throws Exception { assertU(adoc("id", "2", "booldv", "false")); assertU(adoc("id", "3", "booldv", "true")); assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "facet.field", "booldv", "facet.mincount", "1", "facet.missing", "true"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.field", + "booldv", + "facet.mincount", + "1", + "facet.missing", + "true"), "//lst[@name='facet_fields']/lst[@name='booldv']/int[@name='false'][.=1]", "//lst[@name='facet_fields']/lst[@name='booldv']/int[@name='true'][.=1]", "//lst[@name='facet_fields']/lst[@name='booldv']/int[.=2]"); } - } diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesMultiTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesMultiTest.java index 8697b264d61..b38a0f094a8 100644 --- a/solr/core/src/test/org/apache/solr/schema/DocValuesMultiTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DocValuesMultiTest.java @@ -17,7 +17,6 @@ package org.apache.solr.schema; import java.io.IOException; - import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.LeafReader; @@ -30,21 +29,21 @@ import org.junit.Test; public class DocValuesMultiTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeTests() throws Exception { initCore("solrconfig-basic.xml", "schema-docValuesMulti.xml"); - + // sanity check our schema meets our expectations final IndexSchema schema = h.getCore().getLatestSchema(); - for (String f : new String[] {"floatdv", "intdv", "doubledv", "longdv", "datedv", "stringdv", "booldv"}) { + for (String f : + new String[] {"floatdv", "intdv", "doubledv", "longdv", "datedv", "stringdv", "booldv"}) { final SchemaField sf = schema.getField(f); - assertTrue(f + " is not multiValued, test is useless, who changed the schema?", - sf.multiValued()); - assertFalse(f + " is indexed, test is useless, who changed the schema?", - sf.indexed()); - assertTrue(f + " has no docValues, test is useless, who changed the schema?", - sf.hasDocValues()); + assertTrue( + f + " is not multiValued, test is useless, who changed the schema?", sf.multiValued()); + assertFalse(f + " is indexed, test is useless, who changed the schema?", sf.indexed()); + assertTrue( + f + " has no docValues, test is useless, who changed the schema?", sf.hasDocValues()); } } @@ -56,12 +55,29 @@ public void setUp() throws Exception { @Test public void testDocValues() throws IOException { - final DocValuesType expectedNumericDvType = Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) ? - DocValuesType.SORTED_NUMERIC : DocValuesType.SORTED_SET; - - assertU(adoc("id", "1", "floatdv", "4.5", "intdv", "-1", "intdv", "3", - "stringdv", "value1", "stringdv", "value2", - "booldv", "false", "booldv", "true")); + final DocValuesType expectedNumericDvType = + Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) + ? DocValuesType.SORTED_NUMERIC + : DocValuesType.SORTED_SET; + + assertU( + adoc( + "id", + "1", + "floatdv", + "4.5", + "intdv", + "-1", + "intdv", + "3", + "stringdv", + "value1", + "stringdv", + "value2", + "booldv", + "false", + "booldv", + "true")); assertU(commit()); try (SolrCore core = h.getCoreInc()) { final RefCounted searcherRef = core.openNewSearcher(true, true); @@ -87,15 +103,15 @@ public void testDocValues() throws IOException { assertEquals(1, dv.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd()); - } finally { searcherRef.decref(); } } } - - /** Tests the ability to do basic queries (without scoring, just match-only) on - * string docvalues fields that are not inverted (indexed "forward" only) + + /** + * Tests the ability to do basic queries (without scoring, just match-only) on string docvalues + * fields that are not inverted (indexed "forward" only) */ @Test public void testStringDocValuesMatch() throws Exception { @@ -105,45 +121,46 @@ public void testStringDocValuesMatch() throws Exception { assertU(adoc("id", "4", "stringdv", "car")); assertU(adoc("id", "5", "stringdv", "dog", "stringdv", "cat")); assertU(commit()); - + // string: termquery - assertQ(req("q", "stringdv:car", "sort", "id asc"), + assertQ( + req("q", "stringdv:car", "sort", "id asc"), "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); - + "//result/doc[1]/str[@name='id'][.=4]"); + // string: range query - assertQ(req("q", "stringdv:[b TO d]", "sort", "id asc"), + assertQ( + req("q", "stringdv:[b TO d]", "sort", "id asc"), "//*[@numFound='4']", "//result/doc[1]/str[@name='id'][.=1]", "//result/doc[2]/str[@name='id'][.=3]", "//result/doc[3]/str[@name='id'][.=4]", - "//result/doc[4]/str[@name='id'][.=5]" - ); - + "//result/doc[4]/str[@name='id'][.=5]"); + // string: prefix query - assertQ(req("q", "stringdv:c*", "sort", "id asc"), + assertQ( + req("q", "stringdv:c*", "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.=3]", "//result/doc[2]/str[@name='id'][.=4]", - "//result/doc[3]/str[@name='id'][.=5]" - ); - + "//result/doc[3]/str[@name='id'][.=5]"); + // string: wildcard query - assertQ(req("q", "stringdv:c?r", "sort", "id asc"), + assertQ( + req("q", "stringdv:c?r", "sort", "id asc"), "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); - + "//result/doc[1]/str[@name='id'][.=4]"); + // string: regexp query - assertQ(req("q", "stringdv:/c[a-b]r/", "sort", "id asc"), + assertQ( + req("q", "stringdv:/c[a-b]r/", "sort", "id asc"), "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); + "//result/doc[1]/str[@name='id'][.=4]"); } - /** Tests the ability to do basic queries (without scoring, just match-only) on - * boolean docvalues fields that are not inverted (indexed "forward" only) + /** + * Tests the ability to do basic queries (without scoring, just match-only) on boolean docvalues + * fields that are not inverted (indexed "forward" only) */ @Test public void testBoolDocValuesMatch() throws Exception { @@ -155,33 +172,33 @@ public void testBoolDocValuesMatch() throws Exception { assertU(commit()); // string: termquery - assertQ(req("q", "booldv:true", "sort", "id asc"), + assertQ( + req("q", "booldv:true", "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.=1]", "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=5]" - ); + "//result/doc[3]/str[@name='id'][.=5]"); - // boolean: range query, - assertQ(req("q", "booldv:[false TO false]", "sort", "id asc"), + // boolean: range query, + assertQ( + req("q", "booldv:[false TO false]", "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.=2]", "//result/doc[2]/str[@name='id'][.=4]", "//result/doc[3]/str[@name='id'][.=5]"); - - assertQ(req("q", "*:*", "sort", "id asc", "rows", "10", "fl", "booldv"), + assertQ( + req("q", "*:*", "sort", "id asc", "rows", "10", "fl", "booldv"), "//result/doc[1]/arr[@name='booldv']/bool[1][.='true']", "//result/doc[2]/arr[@name='booldv']/bool[1][.='false']", "//result/doc[3]/arr[@name='booldv']/bool[1][.='true']", "//result/doc[4]/arr[@name='booldv']/bool[1][.='false']", "//result/doc[5]/arr[@name='booldv']/bool[1][.='false']", - "//result/doc[5]/arr[@name='booldv']/bool[2][.='true']" - ); - + "//result/doc[5]/arr[@name='booldv']/bool[2][.='true']"); } - /** Tests the ability to do basic queries (without scoring, just match-only) on - * float docvalues fields that are not inverted (indexed "forward" only) + /** + * Tests the ability to do basic queries (without scoring, just match-only) on float docvalues + * fields that are not inverted (indexed "forward" only) */ @Test public void testFloatDocValuesMatch() throws Exception { @@ -191,37 +208,38 @@ public void testFloatDocValuesMatch() throws Exception { assertU(adoc("id", "4", "floatdv", "3")); assertU(adoc("id", "5", "floatdv", "-0.5")); assertU(commit()); - + // float: termquery - assertQ(req("q", "floatdv:3", "sort", "id asc"), + assertQ( + req("q", "floatdv:3", "sort", "id asc"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.=3]", - "//result/doc[2]/str[@name='id'][.=4]" - ); - + "//result/doc[2]/str[@name='id'][.=4]"); + // float: rangequery - assertQ(req("q", "floatdv:[-1 TO 2.5]", "sort", "id asc"), - "//*[@numFound='3']", - "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=5]" - ); + assertQ( + req("q", "floatdv:[-1 TO 2.5]", "sort", "id asc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=3]", + "//result/doc[3]/str[@name='id'][.=5]"); // (neg) float: rangequery - assertQ(req("q", "floatdv:[-6 TO -4]", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); - + assertQ( + req("q", "floatdv:[-6 TO -4]", "sort", "id asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); + // (neg) float: termquery - assertQ(req("q", "floatdv:\"-5\"", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); + assertQ( + req("q", "floatdv:\"-5\"", "sort", "id asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); } - - /** Tests the ability to do basic queries (without scoring, just match-only) on - * double docvalues fields that are not inverted (indexed "forward" only) + + /** + * Tests the ability to do basic queries (without scoring, just match-only) on double docvalues + * fields that are not inverted (indexed "forward" only) */ @Test public void testDoubleDocValuesMatch() throws Exception { @@ -231,41 +249,58 @@ public void testDoubleDocValuesMatch() throws Exception { assertU(adoc("id", "4", "doubledv", "3")); assertU(adoc("id", "5", "doubledv", "-0.5")); assertU(commit()); - + // double: termquery - assertQ(req("q", "doubledv:3", "sort", "id asc"), + assertQ( + req("q", "doubledv:3", "sort", "id asc"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.=3]", - "//result/doc[2]/str[@name='id'][.=4]" - ); - + "//result/doc[2]/str[@name='id'][.=4]"); + // double: rangequery - assertQ(req("q", "doubledv:[-1 TO 2.5]", "sort", "id asc"), - "//*[@numFound='3']", - "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=5]" - ); + assertQ( + req("q", "doubledv:[-1 TO 2.5]", "sort", "id asc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=3]", + "//result/doc[3]/str[@name='id'][.=5]"); // (neg) double: rangequery - assertQ(req("q", "doubledv:[-6 TO -4]", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); - + assertQ( + req("q", "doubledv:[-6 TO -4]", "sort", "id asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); + // (neg) double: termquery - assertQ(req("q", "doubledv:\"-5\"", "sort", "id asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); + assertQ( + req("q", "doubledv:\"-5\"", "sort", "id asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); } + @Test public void testDocValuesFacetingSimple() { - // this is the random test verbatim from DocValuesTest, so it populates with the default values defined in its schema. + // this is the random test verbatim from DocValuesTest, so it populates with the default values + // defined in its schema. for (int i = 0; i < 50; ++i) { - assertU(adoc("id", "" + i, "floatdv", "1", "intdv", "2", "doubledv", "3", "longdv", "4", - "datedv", "1995-12-31T23:59:59.999Z", - "stringdv", "abc", "booldv", "true")); + assertU( + adoc( + "id", + "" + i, + "floatdv", + "1", + "intdv", + "2", + "doubledv", + "3", + "longdv", + "4", + "datedv", + "1995-12-31T23:59:59.999Z", + "stringdv", + "abc", + "booldv", + "true")); } for (int i = 0; i < 50; ++i) { if (rarely()) { @@ -273,75 +308,415 @@ public void testDocValuesFacetingSimple() { } switch (i % 3) { case 0: - assertU(adoc("id", "1000" + i, "floatdv", "" + i, "intdv", "" + i, "doubledv", "" + i, "longdv", "" + i, - "datedv", (1900+i) + "-12-31T23:59:59.999Z", "stringdv", "abc" + i, "booldv", "true", "booldv", "true")); + assertU( + adoc( + "id", + "1000" + i, + "floatdv", + "" + i, + "intdv", + "" + i, + "doubledv", + "" + i, + "longdv", + "" + i, + "datedv", + (1900 + i) + "-12-31T23:59:59.999Z", + "stringdv", + "abc" + i, + "booldv", + "true", + "booldv", + "true")); break; case 1: - assertU(adoc("id", "1000" + i, "floatdv", "" + i, "intdv", "" + i, "doubledv", "" + i, "longdv", "" + i, - "datedv", (1900+i) + "-12-31T23:59:59.999Z", "stringdv", "abc" + i, "booldv", "false", "booldv", "false")); + assertU( + adoc( + "id", + "1000" + i, + "floatdv", + "" + i, + "intdv", + "" + i, + "doubledv", + "" + i, + "longdv", + "" + i, + "datedv", + (1900 + i) + "-12-31T23:59:59.999Z", + "stringdv", + "abc" + i, + "booldv", + "false", + "booldv", + "false")); break; case 2: - assertU(adoc("id", "1000" + i, "floatdv", "" + i, "intdv", "" + i, "doubledv", "" + i, "longdv", "" + i, - "datedv", (1900+i) + "-12-31T23:59:59.999Z", "stringdv", "abc" + i, "booldv", "true", "booldv", "false")); + assertU( + adoc( + "id", + "1000" + i, + "floatdv", + "" + i, + "intdv", + "" + i, + "doubledv", + "" + i, + "longdv", + "" + i, + "datedv", + (1900 + i) + "-12-31T23:59:59.999Z", + "stringdv", + "abc" + i, + "booldv", + "true", + "booldv", + "false")); break; } - - } assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "longdv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "longdv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='longdv']/int[@name='4'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "longdv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "longdv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "1"), "//lst[@name='longdv']/int[@name='0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "longdv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "longdv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='longdv']/int[@name='33'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "floatdv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "floatdv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='floatdv']/int[@name='1.0'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "floatdv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "floatdv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='floatdv']/int[@name='0.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "floatdv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "floatdv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='floatdv']/int[@name='33.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "doubledv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "doubledv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='doubledv']/int[@name='3.0'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "doubledv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "doubledv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='doubledv']/int[@name='0.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "doubledv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "doubledv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='doubledv']/int[@name='33.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "intdv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "intdv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='intdv']/int[@name='2'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "intdv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "intdv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='intdv']/int[@name='0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "intdv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "intdv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='intdv']/int[@name='33'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "datedv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "datedv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='datedv']/int[@name='1995-12-31T23:59:59.999Z'][.='50']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "datedv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "datedv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='datedv']/int[@name='1900-12-31T23:59:59.999Z'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "datedv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "datedv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='datedv']/int[@name='1933-12-31T23:59:59.999Z'][.='1']"); - - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "stringdv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "stringdv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='stringdv']/int[@name='abc'][.='50']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "stringdv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "stringdv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='stringdv']/int[@name='abc1'][.='1']", "//lst[@name='stringdv']/int[@name='abc13'][.='1']", "//lst[@name='stringdv']/int[@name='abc19'][.='1']", - "//lst[@name='stringdv']/int[@name='abc49'][.='1']" - ); - - // Even though offseting by 33, the sort order is abc1 abc11....abc2 so it throws the position in the return list off. - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "stringdv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), - "//lst[@name='stringdv']/int[@name='abc38'][.='1']"); + "//lst[@name='stringdv']/int[@name='abc49'][.='1']"); + // Even though offseting by 33, the sort order is abc1 abc11....abc2 so it throws the position + // in the return list off. + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "stringdv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), + "//lst[@name='stringdv']/int[@name='abc38'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "booldv", "facet.sort", "count"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "booldv", + "facet.sort", + "count"), "//lst[@name='booldv']/int[@name='true'][.='83']", "//lst[@name='booldv']/int[@name='false'][.='33']"); - } } diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java index a791da2f667..f0f03b535ee 100644 --- a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.function.Function; import java.util.function.Supplier; - import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexableField; @@ -52,14 +51,14 @@ public static void beforeTests() throws Exception { // sanity check our schema meets our expectations final IndexSchema schema = h.getCore().getLatestSchema(); - for (String f : new String[] {"floatdv", "intdv", "doubledv", "longdv", "datedv", "stringdv", "booldv"}) { + for (String f : + new String[] {"floatdv", "intdv", "doubledv", "longdv", "datedv", "stringdv", "booldv"}) { final SchemaField sf = schema.getField(f); - assertFalse(f + " is multiValued, test is useless, who changed the schema?", - sf.multiValued()); - assertFalse(f + " is indexed, test is useless, who changed the schema?", - sf.indexed()); - assertTrue(f + " has no docValues, test is useless, who changed the schema?", - sf.hasDocValues()); + assertFalse( + f + " is multiValued, test is useless, who changed the schema?", sf.multiValued()); + assertFalse(f + " is indexed, test is useless, who changed the schema?", sf.indexed()); + assertTrue( + f + " has no docValues, test is useless, who changed the schema?", sf.hasDocValues()); } } @@ -112,20 +111,40 @@ public void testDocValues() throws IOException { final SchemaField longDv = schema.getField("longdv"); final SchemaField boolDv = schema.getField("booldv"); - FunctionValues values = floatDv.getType().getValueSource(floatDv, null).getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); + FunctionValues values = + floatDv + .getType() + .getValueSource(floatDv, null) + .getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); assertEquals(1f, values.floatVal(0), 0f); assertEquals(1f, values.objectVal(0)); - values = intDv.getType().getValueSource(intDv, null).getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); + values = + intDv + .getType() + .getValueSource(intDv, null) + .getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); assertEquals(2, values.intVal(0)); assertEquals(2, values.objectVal(0)); - values = doubleDv.getType().getValueSource(doubleDv, null).getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); + values = + doubleDv + .getType() + .getValueSource(doubleDv, null) + .getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); assertEquals(3d, values.doubleVal(0), 0d); assertEquals(3d, values.objectVal(0)); - values = longDv.getType().getValueSource(longDv, null).getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); + values = + longDv + .getType() + .getValueSource(longDv, null) + .getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); assertEquals(4L, values.longVal(0)); assertEquals(4L, values.objectVal(0)); - - values = boolDv.getType().getValueSource(boolDv, null).getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); + + values = + boolDv + .getType() + .getValueSource(boolDv, null) + .getValues(null, searcher.getSlowAtomicReader().leaves().get(0)); assertEquals("true", values.strVal(0)); assertEquals(true, values.objectVal(0)); @@ -154,53 +173,109 @@ public void testDocValues() throws IOException { private void tstToObj(SchemaField sf, Object o) { List fields = sf.createFields(o); for (IndexableField field : fields) { - assertEquals( sf.getType().toObject(field), o); + assertEquals(sf.getType().toObject(field), o); } } @Test public void testDocValuesSorting() { - assertU(adoc("id", "1", "floatdv", "2", "intdv", "3", "doubledv", "4", "longdv", "5", "datedv", "1995-12-31T23:59:59.999Z", "stringdv", "b", "booldv", "true")); - assertU(adoc("id", "2", "floatdv", "5", "intdv", "4", "doubledv", "3", "longdv", "2", "datedv", "1997-12-31T23:59:59.999Z", "stringdv", "a", "booldv", "false")); - assertU(adoc("id", "3", "floatdv", "3", "intdv", "1", "doubledv", "2", "longdv", "1", "datedv", "1996-12-31T23:59:59.999Z", "stringdv", "c", "booldv", "true")); + assertU( + adoc( + "id", + "1", + "floatdv", + "2", + "intdv", + "3", + "doubledv", + "4", + "longdv", + "5", + "datedv", + "1995-12-31T23:59:59.999Z", + "stringdv", + "b", + "booldv", + "true")); + assertU( + adoc( + "id", + "2", + "floatdv", + "5", + "intdv", + "4", + "doubledv", + "3", + "longdv", + "2", + "datedv", + "1997-12-31T23:59:59.999Z", + "stringdv", + "a", + "booldv", + "false")); + assertU( + adoc( + "id", + "3", + "floatdv", + "3", + "intdv", + "1", + "doubledv", + "2", + "longdv", + "1", + "datedv", + "1996-12-31T23:59:59.999Z", + "stringdv", + "c", + "booldv", + "true")); assertU(adoc("id", "4")); assertU(commit()); - assertQ(req("q", "*:*", "sort", "floatdv desc", "rows", "1", "fl", "id"), + assertQ( + req("q", "*:*", "sort", "floatdv desc", "rows", "1", "fl", "id"), "//str[@name='id'][.='2']"); - assertQ(req("q", "*:*", "sort", "intdv desc", "rows", "1", "fl", "id"), - "//str[@name='id'][.='2']"); - assertQ(req("q", "*:*", "sort", "doubledv desc", "rows", "1", "fl", "id"), + assertQ( + req("q", "*:*", "sort", "intdv desc", "rows", "1", "fl", "id"), "//str[@name='id'][.='2']"); + assertQ( + req("q", "*:*", "sort", "doubledv desc", "rows", "1", "fl", "id"), "//str[@name='id'][.='1']"); - assertQ(req("q", "*:*", "sort", "longdv desc", "rows", "1", "fl", "id"), + assertQ( + req("q", "*:*", "sort", "longdv desc", "rows", "1", "fl", "id"), "//str[@name='id'][.='1']"); - assertQ(req("q", "*:*", "sort", "datedv desc", "rows", "1", "fl", "id,datedv"), + assertQ( + req("q", "*:*", "sort", "datedv desc", "rows", "1", "fl", "id,datedv"), "//str[@name='id'][.='2']", - "//result/doc[1]/date[@name='datedv'][.='1997-12-31T23:59:59.999Z']" - ); - assertQ(req("q", "*:*", "sort", "stringdv desc", "rows", "1", "fl", "id"), + "//result/doc[1]/date[@name='datedv'][.='1997-12-31T23:59:59.999Z']"); + assertQ( + req("q", "*:*", "sort", "stringdv desc", "rows", "1", "fl", "id"), "//str[@name='id'][.='4']"); - assertQ(req("q", "*:*", "sort", "floatdv asc", "rows", "1", "fl", "id"), + assertQ( + req("q", "*:*", "sort", "floatdv asc", "rows", "1", "fl", "id"), "//str[@name='id'][.='4']"); - assertQ(req("q", "*:*", "sort", "intdv asc", "rows", "1", "fl", "id"), - "//str[@name='id'][.='3']"); - assertQ(req("q", "*:*", "sort", "doubledv asc", "rows", "1", "fl", "id"), + assertQ( + req("q", "*:*", "sort", "intdv asc", "rows", "1", "fl", "id"), "//str[@name='id'][.='3']"); + assertQ( + req("q", "*:*", "sort", "doubledv asc", "rows", "1", "fl", "id"), "//str[@name='id'][.='3']"); - assertQ(req("q", "*:*", "sort", "longdv asc", "rows", "1", "fl", "id"), - "//str[@name='id'][.='3']"); - assertQ(req("q", "*:*", "sort", "datedv asc", "rows", "1", "fl", "id"), - "//str[@name='id'][.='1']"); - assertQ(req("q", "*:*", "sort", "stringdv asc", "rows", "1", "fl", "id"), + assertQ( + req("q", "*:*", "sort", "longdv asc", "rows", "1", "fl", "id"), "//str[@name='id'][.='3']"); + assertQ( + req("q", "*:*", "sort", "datedv asc", "rows", "1", "fl", "id"), "//str[@name='id'][.='1']"); + assertQ( + req("q", "*:*", "sort", "stringdv asc", "rows", "1", "fl", "id"), "//str[@name='id'][.='2']"); - assertQ(req("q", "*:*", "sort", "booldv asc", "rows", "10", "fl", "booldv,stringdv"), + assertQ( + req("q", "*:*", "sort", "booldv asc", "rows", "10", "fl", "booldv,stringdv"), "//result/doc[1]/bool[@name='booldv'][.='false']", "//result/doc[2]/bool[@name='booldv'][.='true']", "//result/doc[3]/bool[@name='booldv'][.='true']", - "//result/doc[4]/bool[@name='booldv'][.='true']" - ); - - + "//result/doc[4]/bool[@name='booldv'][.='true']"); } - + @Test public void testDocValuesSorting2() { assertU(adoc("id", "1", "doubledv", "12")); @@ -211,15 +286,15 @@ public void testDocValuesSorting2() { assertU(adoc("id", "6", "doubledv", "-5.123")); assertU(adoc("id", "7", "doubledv", "1.7976931348623157E308")); assertU(commit()); - assertQ(req("fl", "id", "q", "*:*", "sort", "doubledv asc"), + assertQ( + req("fl", "id", "q", "*:*", "sort", "doubledv asc"), "//result/doc[1]/str[@name='id'][.='6']", "//result/doc[2]/str[@name='id'][.='5']", "//result/doc[3]/str[@name='id'][.='3']", "//result/doc[4]/str[@name='id'][.='4']", "//result/doc[5]/str[@name='id'][.='1']", "//result/doc[6]/str[@name='id'][.='2']", - "//result/doc[7]/str[@name='id'][.='7']" - ); + "//result/doc[7]/str[@name='id'][.='7']"); } @Test @@ -233,119 +308,451 @@ public void testDocValuesFaceting() { } switch (i % 3) { case 0: - assertU(adoc("id", "1000" + i, "floatdv", "" + i, "intdv", "" + i, "doubledv", "" + i, "longdv", "" + i, - "datedv", (1900 + i) + "-12-31T23:59:59.999Z", "stringdv", "abc" + i, "booldv", "false")); + assertU( + adoc( + "id", + "1000" + i, + "floatdv", + "" + i, + "intdv", + "" + i, + "doubledv", + "" + i, + "longdv", + "" + i, + "datedv", + (1900 + i) + "-12-31T23:59:59.999Z", + "stringdv", + "abc" + i, + "booldv", + "false")); break; case 1: - assertU(adoc("id", "1000" + i, "floatdv", "" + i, "intdv", "" + i, "doubledv", "" + i, "longdv", "" + i, - "datedv", (1900 + i) + "-12-31T23:59:59.999Z", "stringdv", "abc" + i, "booldv", "true")); + assertU( + adoc( + "id", + "1000" + i, + "floatdv", + "" + i, + "intdv", + "" + i, + "doubledv", + "" + i, + "longdv", + "" + i, + "datedv", + (1900 + i) + "-12-31T23:59:59.999Z", + "stringdv", + "abc" + i, + "booldv", + "true")); break; case 2: - assertU(adoc("id", "1000" + i, "floatdv", "" + i, "intdv", "" + i, "doubledv", "" + i, "longdv", "" + i, - "datedv", (1900 + i) + "-12-31T23:59:59.999Z", "stringdv", "abc" + i)); + assertU( + adoc( + "id", + "1000" + i, + "floatdv", + "" + i, + "intdv", + "" + i, + "doubledv", + "" + i, + "longdv", + "" + i, + "datedv", + (1900 + i) + "-12-31T23:59:59.999Z", + "stringdv", + "abc" + i)); break; } } assertU(commit()); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "longdv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "longdv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='longdv']/int[@name='4'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "longdv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "longdv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "1"), "//lst[@name='longdv']/int[@name='0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "longdv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "longdv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='longdv']/int[@name='33'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "floatdv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "floatdv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='floatdv']/int[@name='1.0'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "floatdv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "floatdv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='floatdv']/int[@name='0.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "floatdv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "floatdv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='floatdv']/int[@name='33.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "doubledv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "doubledv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='doubledv']/int[@name='3.0'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "doubledv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "doubledv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='doubledv']/int[@name='0.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "doubledv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "doubledv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='doubledv']/int[@name='33.0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "intdv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "intdv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='intdv']/int[@name='2'][.='51']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "intdv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "intdv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='intdv']/int[@name='0'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "intdv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "intdv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='intdv']/int[@name='33'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "datedv", "facet.sort", "count", "facet.limit", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "datedv", + "facet.sort", + "count", + "facet.limit", + "1"), "//lst[@name='datedv']/int[@name='1995-12-31T23:59:59.999Z'][.='50']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "datedv", "facet.sort", "count", "facet.offset", "1", "facet.limit", "-1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "datedv", + "facet.sort", + "count", + "facet.offset", + "1", + "facet.limit", + "-1", + "facet.mincount", + "1"), "//lst[@name='datedv']/int[@name='1900-12-31T23:59:59.999Z'][.='1']"); - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "datedv", "facet.sort", "index", "facet.offset", "33", "facet.limit", "1", "facet.mincount", "1"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "datedv", + "facet.sort", + "index", + "facet.offset", + "33", + "facet.limit", + "1", + "facet.mincount", + "1"), "//lst[@name='datedv']/int[@name='1933-12-31T23:59:59.999Z'][.='1']"); - assertQ(req("q", "booldv:true"), - "//*[@numFound='83']"); - - assertQ(req("q", "booldv:false"), - "//*[@numFound='17']"); - - assertQ(req("q", "*:*", "facet", "true", "rows", "0", "facet.field", "booldv", "facet.sort", "index", "facet.mincount", "1"), + assertQ(req("q", "booldv:true"), "//*[@numFound='83']"); + + assertQ(req("q", "booldv:false"), "//*[@numFound='17']"); + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "rows", + "0", + "facet.field", + "booldv", + "facet.sort", + "index", + "facet.mincount", + "1"), "//lst[@name='booldv']/int[@name='false'][.='17']", "//lst[@name='booldv']/int[@name='true'][.='83']"); - } @Test public void testDocValuesStats() { for (int i = 0; i < 50; ++i) { - assertU(adoc("id", "1000" + i, "floatdv", "" + i%2, "intdv", "" + i%3, "doubledv", "" + i%4, "longdv", "" + i%5, "datedv", (1900+i%6) + "-12-31T23:59:59.999Z", "stringdv", "abc" + i%7)); + assertU( + adoc( + "id", + "1000" + i, + "floatdv", + "" + i % 2, + "intdv", + "" + i % 3, + "doubledv", + "" + i % 4, + "longdv", + "" + i % 5, + "datedv", + (1900 + i % 6) + "-12-31T23:59:59.999Z", + "stringdv", + "abc" + i % 7)); if (rarely()) { assertU(commit()); // to have several segments } } assertU(commit()); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "stringdv"), + assertQ( + req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "stringdv"), "//str[@name='min'][.='abc0']", "//str[@name='max'][.='abc6']", "//long[@name='count'][.='50']"); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "floatdv"), + assertQ( + req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "floatdv"), "//double[@name='min'][.='0.0']", "//double[@name='max'][.='1.0']", "//long[@name='count'][.='50']", "//double[@name='sum'][.='25.0']", "//double[@name='mean'][.='0.5']"); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "intdv"), + assertQ( + req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "intdv"), "//double[@name='min'][.='0.0']", "//double[@name='max'][.='2.0']", "//long[@name='count'][.='50']", "//double[@name='sum'][.='49.0']"); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "doubledv"), + assertQ( + req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "doubledv"), "//double[@name='min'][.='0.0']", "//double[@name='max'][.='3.0']", "//long[@name='count'][.='50']", "//double[@name='sum'][.='73.0']"); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "longdv"), + assertQ( + req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "longdv"), "//double[@name='min'][.='0.0']", "//double[@name='max'][.='4.0']", "//long[@name='count'][.='50']", "//double[@name='sum'][.='100.0']", "//double[@name='mean'][.='2.0']"); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "datedv"), + assertQ( + req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "datedv"), "//date[@name='min'][.='1900-12-31T23:59:59.999Z']", "//date[@name='max'][.='1905-12-31T23:59:59.999Z']", "//long[@name='count'][.='50']"); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "floatdv", "stats.facet", "intdv"), + assertQ( + req( + "q", + "*:*", + "stats", + "true", + "rows", + "0", + "stats.field", + "floatdv", + "stats.facet", + "intdv"), "//lst[@name='intdv']/lst[@name='0']/long[@name='count'][.='17']", "//lst[@name='intdv']/lst[@name='1']/long[@name='count'][.='17']", "//lst[@name='intdv']/lst[@name='2']/long[@name='count'][.='16']"); - assertQ(req("q", "*:*", "stats", "true", "rows", "0", "stats.field", "floatdv", "stats.facet", "datedv"), + assertQ( + req( + "q", + "*:*", + "stats", + "true", + "rows", + "0", + "stats.field", + "floatdv", + "stats.facet", + "datedv"), "//lst[@name='datedv']/lst[@name='1900-12-31T23:59:59.999Z']/long[@name='count'][.='9']", "//lst[@name='datedv']/lst[@name='1901-12-31T23:59:59.999Z']/long[@name='count'][.='9']", "//lst[@name='datedv']/lst[@name='1902-12-31T23:59:59.999Z']/long[@name='count'][.='8']", @@ -353,210 +760,291 @@ public void testDocValuesStats() { "//lst[@name='datedv']/lst[@name='1904-12-31T23:59:59.999Z']/long[@name='count'][.='8']", "//lst[@name='datedv']/lst[@name='1905-12-31T23:59:59.999Z']/long[@name='count'][.='8']"); } - - /** Tests the ability to do basic queries (without scoring, just match-only) on - * docvalues fields that are not inverted (indexed "forward" only) + + /** + * Tests the ability to do basic queries (without scoring, just match-only) on docvalues fields + * that are not inverted (indexed "forward" only) */ @Test public void testDocValuesMatch() throws Exception { - assertU(adoc("id", "1", "floatdv", "2", "intdv", "3", "doubledv", "3.1", "longdv", "5", "datedv", "1995-12-31T23:59:59.999Z", "stringdv", "b", "booldv", "false")); - assertU(adoc("id", "2", "floatdv", "-5", "intdv", "4", "doubledv", "-4.3", "longdv", "2", "datedv", "1997-12-31T23:59:59.999Z", "stringdv", "a", "booldv", "true")); - assertU(adoc("id", "3", "floatdv", "3", "intdv", "1", "doubledv", "2.1", "longdv", "1", "datedv", "1996-12-31T23:59:59.999Z", "stringdv", "c", "booldv", "false")); - assertU(adoc("id", "4", "floatdv", "3", "intdv", "-1", "doubledv", "1.5", "longdv", "1", "datedv", "1996-12-31T23:59:59.999Z", "stringdv", "car")); + assertU( + adoc( + "id", + "1", + "floatdv", + "2", + "intdv", + "3", + "doubledv", + "3.1", + "longdv", + "5", + "datedv", + "1995-12-31T23:59:59.999Z", + "stringdv", + "b", + "booldv", + "false")); + assertU( + adoc( + "id", + "2", + "floatdv", + "-5", + "intdv", + "4", + "doubledv", + "-4.3", + "longdv", + "2", + "datedv", + "1997-12-31T23:59:59.999Z", + "stringdv", + "a", + "booldv", + "true")); + assertU( + adoc( + "id", + "3", + "floatdv", + "3", + "intdv", + "1", + "doubledv", + "2.1", + "longdv", + "1", + "datedv", + "1996-12-31T23:59:59.999Z", + "stringdv", + "c", + "booldv", + "false")); + assertU( + adoc( + "id", + "4", + "floatdv", + "3", + "intdv", + "-1", + "doubledv", + "1.5", + "longdv", + "1", + "datedv", + "1996-12-31T23:59:59.999Z", + "stringdv", + "car")); assertU(commit()); // string: termquery - assertQ(req("q", "stringdv:car", "sort", "id_i asc"), + assertQ( + req("q", "stringdv:car", "sort", "id_i asc"), "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); - + "//result/doc[1]/str[@name='id'][.=4]"); + // string: range query - assertQ(req("q", "stringdv:[b TO d]", "sort", "id_i asc"), + assertQ( + req("q", "stringdv:[b TO d]", "sort", "id_i asc"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.=1]", "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=4]" - ); - + "//result/doc[3]/str[@name='id'][.=4]"); + // string: prefix query - assertQ(req("q", "stringdv:c*", "sort", "id_i asc"), + assertQ( + req("q", "stringdv:c*", "sort", "id_i asc"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.=3]", - "//result/doc[2]/str[@name='id'][.=4]" - ); - + "//result/doc[2]/str[@name='id'][.=4]"); + // string: wildcard query - assertQ(req("q", "stringdv:c?r", "sort", "id_i asc"), + assertQ( + req("q", "stringdv:c?r", "sort", "id_i asc"), "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); - + "//result/doc[1]/str[@name='id'][.=4]"); + // string: regexp query - assertQ(req("q", "stringdv:/c[a-b]r/", "sort", "id_i asc"), + assertQ( + req("q", "stringdv:/c[a-b]r/", "sort", "id_i asc"), "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); - + "//result/doc[1]/str[@name='id'][.=4]"); + // float: termquery - assertQ(req("q", "floatdv:3", "sort", "id_i asc"), + assertQ( + req("q", "floatdv:3", "sort", "id_i asc"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.=3]", - "//result/doc[2]/str[@name='id'][.=4]" - ); - + "//result/doc[2]/str[@name='id'][.=4]"); + // float: rangequery - assertQ(req("q", "floatdv:[2 TO 3]", "sort", "id_i asc"), + assertQ( + req("q", "floatdv:[2 TO 3]", "sort", "id_i asc"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.=1]", "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=4]" - ); - + "//result/doc[3]/str[@name='id'][.=4]"); + // (neg) float: termquery - assertQ(req("q", "floatdv:\"-5\"", "sort", "id_i asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); + assertQ( + req("q", "floatdv:\"-5\"", "sort", "id_i asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); // (neg) float: rangequery - assertQ(req("q", "floatdv:[-6 TO -4]", "sort", "id_i asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); - + assertQ( + req("q", "floatdv:[-6 TO -4]", "sort", "id_i asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); + // (cross zero bounds) float: rangequery - assertQ(req("q", "floatdv:[-6 TO 2.1]", "sort", "id_i asc"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=2]" - ); - + assertQ( + req("q", "floatdv:[-6 TO 2.1]", "sort", "id_i asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=2]"); + // int: termquery - assertQ(req("q", "intdv:1", "sort", "id_i asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=3]" - ); - + assertQ( + req("q", "intdv:1", "sort", "id_i asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=3]"); + // int: rangequery - assertQ(req("q", "intdv:[3 TO 4]", "sort", "id_i asc"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=2]" - ); - + assertQ( + req("q", "intdv:[3 TO 4]", "sort", "id_i asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=2]"); + // (neg) int: termquery - assertQ(req("q", "intdv:\"-1\"", "sort", "id_i asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=4]" - ); - + assertQ( + req("q", "intdv:\"-1\"", "sort", "id_i asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=4]"); + // (neg) int: rangequery - assertQ(req("q", "intdv:[-1 TO 1]", "sort", "id_i asc"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=3]", - "//result/doc[2]/str[@name='id'][.=4]" - ); + assertQ( + req("q", "intdv:[-1 TO 1]", "sort", "id_i asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=3]", + "//result/doc[2]/str[@name='id'][.=4]"); // long: termquery - assertQ(req("q", "longdv:1", "sort", "id_i asc"), + assertQ( + req("q", "longdv:1", "sort", "id_i asc"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.=3]", - "//result/doc[2]/str[@name='id'][.=4]" - ); - + "//result/doc[2]/str[@name='id'][.=4]"); + // long: rangequery - assertQ(req("q", "longdv:[1 TO 2]", "sort", "id_i asc"), + assertQ( + req("q", "longdv:[1 TO 2]", "sort", "id_i asc"), "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.=2]", "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=4]" - ); - + "//result/doc[3]/str[@name='id'][.=4]"); + // double: termquery - assertQ(req("q", "doubledv:3.1", "sort", "id_i asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=1]" - ); - + assertQ( + req("q", "doubledv:3.1", "sort", "id_i asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=1]"); + // double: rangequery - assertQ(req("q", "doubledv:[2 TO 3.3]", "sort", "id_i asc"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=3]" - ); - + assertQ( + req("q", "doubledv:[2 TO 3.3]", "sort", "id_i asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=3]"); + // (neg) double: termquery - assertQ(req("q", "doubledv:\"-4.3\"", "sort", "id_i asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); - + assertQ( + req("q", "doubledv:\"-4.3\"", "sort", "id_i asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); + // (neg) double: rangequery - assertQ(req("q", "doubledv:[-6 TO -4]", "sort", "id_i asc"), - "//*[@numFound='1']", - "//result/doc[1]/str[@name='id'][.=2]" - ); - + assertQ( + req("q", "doubledv:[-6 TO -4]", "sort", "id_i asc"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.=2]"); + // (cross zero bounds) double: rangequery - assertQ(req("q", "doubledv:[-6 TO 2.0]", "sort", "id_i asc"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=4]" - ); + assertQ( + req("q", "doubledv:[-6 TO 2.0]", "sort", "id_i asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=4]"); // boolean basic queries: - assertQ(req("q", "booldv:false", "sort", "id_i asc"), + assertQ( + req("q", "booldv:false", "sort", "id_i asc"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.=1]", - "//result/doc[2]/str[@name='id'][.=3]" - ); + "//result/doc[2]/str[@name='id'][.=3]"); - assertQ(req("q", "booldv:true", "sort", "id_i asc"), + assertQ( + req("q", "booldv:true", "sort", "id_i asc"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=4]" - ); - + "//result/doc[2]/str[@name='id'][.=4]"); } @Test public void testFloatAndDoubleRangeQueryRandom() throws Exception { String fieldName[] = new String[] {"floatdv", "doubledv"}; - - Number largestNegative[] = new Number[] {0f-Float.MIN_NORMAL, 0f-Double.MIN_NORMAL}; + + Number largestNegative[] = new Number[] {0f - Float.MIN_NORMAL, 0f - Double.MIN_NORMAL}; Number smallestPositive[] = new Number[] {Float.MIN_NORMAL, Double.MIN_NORMAL}; Number positiveInfinity[] = new Number[] {Float.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}; Number negativeInfinity[] = new Number[] {Float.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY}; Number largestValue[] = new Number[] {Float.MAX_VALUE, Double.MAX_VALUE}; Number zero[] = new Number[] {0f, 0d}; - Function,Number> noNaN = (next) - -> { Number num; while (String.valueOf(num = next.get()).equals("NaN")); return num; }; - List> nextRandNoNaN = Arrays.asList( - () -> noNaN.apply(() -> Float.intBitsToFloat(random().nextInt())), - () -> noNaN.apply(() -> Double.longBitsToDouble(random().nextLong()))); - List> toSortableLong = Arrays.asList( - (num) -> (long)NumericUtils.floatToSortableInt(num.floatValue()), - (num) -> NumericUtils.doubleToSortableLong(num.doubleValue())); - - // Number minusZero[] = new Number[] {-0f, -0d}; // -0 == 0, so we should not treat them differently (and we should not guarantee that sign is preserved... we should be able to index both as 0) - - for (int i=0; i, Number> noNaN = + (next) -> { + Number num; + while (String.valueOf(num = next.get()).equals("NaN")) + ; + return num; + }; + List> nextRandNoNaN = + Arrays.asList( + () -> noNaN.apply(() -> Float.intBitsToFloat(random().nextInt())), + () -> noNaN.apply(() -> Double.longBitsToDouble(random().nextLong()))); + List> toSortableLong = + Arrays.asList( + (num) -> (long) NumericUtils.floatToSortableInt(num.floatValue()), + (num) -> NumericUtils.doubleToSortableLong(num.doubleValue())); + + // Number minusZero[] = new Number[] {-0f, -0d}; // -0 == 0, so we should not treat them + // differently (and we should not guarantee that sign is preserved... we should be able to index + // both as 0) + + for (int i = 0; i < fieldName.length; i++) { assertU(delQ("*:*")); commit(); - Number specialValues[] = new Number[] {largestNegative[i], smallestPositive[i], negativeInfinity[i], - largestValue[i], positiveInfinity[i], zero[i]}; + Number specialValues[] = + new Number[] { + largestNegative[i], + smallestPositive[i], + negativeInfinity[i], + largestValue[i], + positiveInfinity[i], + zero[i] + }; List values = new ArrayList<>(); int numDocs = 1 + random().nextInt(10); - for (int j=0; j tests = new ArrayList<>(); int counter = 0; - - for (int k=0; k=valSortable || !maxInclusive && maxSortable>valSortable || (max.equals("*") && val == positiveInfinity[i]))) { + + if ((minInclusive && minSortable <= valSortable + || !minInclusive && minSortable < valSortable + || (min.equals("*") && val == negativeInfinity[i])) + && (maxInclusive && maxSortable >= valSortable + || !maxInclusive && maxSortable > valSortable + || (max.equals("*") && val == positiveInfinity[i]))) { counter++; - tests.add("//result/doc["+counter+"]/str[@name='id'][.="+(k+1)+"]"); - tests.add("//result/doc["+counter+"]/float[@name='score'][.=1.0]"); + tests.add("//result/doc[" + counter + "]/str[@name='id'][.=" + (k + 1) + "]"); + tests.add("//result/doc[" + counter + "]/float[@name='score'][.=1.0]"); } } - tests.add(0, "//*[@numFound='"+counter+"']"); + tests.add(0, "//*[@numFound='" + counter + "']"); String testsArr[] = new String[tests.size()]; - for (int k=0; k enumStrs = ((AbstractEnumField)sf.getType()).getEnumMapping().enumStringToIntMap.keySet(); + Set enumStrs = + ((AbstractEnumField) sf.getType()).getEnumMapping().enumStringToIntMap.keySet(); assertTrue(enumStrs.size() > SolrQueryParser.TERMS_QUERY_THRESHOLD); Iterator enumStrIter = enumStrs.iterator(); - for (int i = 0 ; enumStrIter.hasNext() ; ++i) { + for (int i = 0; enumStrIter.hasNext(); ++i) { assertU(adoc("id", "" + i, FIELD_NAME, enumStrIter.next())); } assertU(commit()); - + StringBuilder builder = new StringBuilder(FIELD_NAME + ":("); enumStrs.forEach(v -> builder.append(v.replace(" ", "\\ ")).append(' ')); builder.append(')'); - + if (sf.indexed()) { // SolrQueryParser should also be generating a TermInSetQuery if indexed String setQuery = sf.getType().getSetQuery(null, sf, enumStrs).toString(); - if (sf.getType() instanceof EnumField) { // Trie field TermInSetQuery non-XML chars serialize with "#XX;" syntax + if (sf.getType() + instanceof + EnumField) { // Trie field TermInSetQuery non-XML chars serialize with "#XX;" syntax Pattern nonXMLCharPattern = Pattern.compile("[\u0000-\u0008\u000B\u000C\u000E-\u0019]"); StringBuffer munged = new StringBuffer(); Matcher matcher = nonXMLCharPattern.matcher(setQuery); while (matcher.find()) { - matcher.appendReplacement(munged, "#" + (int)matcher.group(0).charAt(0) + ";"); + matcher.appendReplacement(munged, "#" + (int) matcher.group(0).charAt(0) + ";"); } matcher.appendTail(munged); setQuery = munged.toString(); } - assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), - "fl", "id," + FIELD_NAME, "rows", "" + enumStrs.size(), "indent", "on"), + assertQ( + req( + CommonParams.DEBUG, + CommonParams.QUERY, + "q", + "*:*", + "fq", + builder.toString(), + "fl", + "id," + FIELD_NAME, + "rows", + "" + enumStrs.size(), + "indent", + "on"), "//*[@numFound='" + enumStrs.size() + "']", - "//*[@name='parsed_filter_queries']/str[normalize-space(.)=normalize-space('TermInSetQuery(" + setQuery + ")')]"); // fix [\r\n] problems + "//*[@name='parsed_filter_queries']/str[normalize-space(.)=normalize-space('TermInSetQuery(" + + setQuery + + ")')]"); // fix [\r\n] problems } else { // Won't use TermInSetQuery if the field is not indexed, but should match the same docs - assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), "fl", "id," + FIELD_NAME, "indent", "on"), + assertQ( + req( + CommonParams.DEBUG, + CommonParams.QUERY, + "q", + "*:*", + "fq", + builder.toString(), + "fl", + "id," + FIELD_NAME, + "indent", + "on"), "//*[@numFound='" + enumStrs.size() + "']"); } } @Test public void testMultivaluedSetQuery() throws Exception { - assumeFalse("Skipping testing of EnumFieldType without docValues, which is unsupported.", + assumeFalse( + "Skipping testing of EnumFieldType without docValues, which is unsupported.", System.getProperty("solr.tests.EnumFieldType").equals("solr.EnumFieldType") && System.getProperty("solr.tests.numeric.dv").equals("false")); - assumeFalse("Skipping testing of unindexed EnumField without docValues, which is unsupported.", + assumeFalse( + "Skipping testing of unindexed EnumField without docValues, which is unsupported.", System.getProperty("solr.tests.EnumFieldType").equals("solr.EnumField") && System.getProperty("solr.tests.EnumFieldTest.indexed").equals("false") && System.getProperty("solr.tests.numeric.dv").equals("false")); @@ -519,15 +619,16 @@ public void testMultivaluedSetQuery() throws Exception { clearIndex(); SchemaField sf = h.getCore().getLatestSchema().getField(MV_FIELD_NAME); - Set enumStrs = ((AbstractEnumField)sf.getType()).getEnumMapping().enumStringToIntMap.keySet(); + Set enumStrs = + ((AbstractEnumField) sf.getType()).getEnumMapping().enumStringToIntMap.keySet(); assertTrue(enumStrs.size() > SolrQueryParser.TERMS_QUERY_THRESHOLD); Iterator enumStrIter = enumStrs.iterator(); String prevEnumStr = "x18"; // wrap around - for (int i = 0 ; enumStrIter.hasNext() ; ++i) { + for (int i = 0; enumStrIter.hasNext(); ++i) { String thisEnumStr = enumStrIter.next(); assertU(adoc("id", "" + i, MV_FIELD_NAME, thisEnumStr, MV_FIELD_NAME, prevEnumStr)); - prevEnumStr = thisEnumStr; + prevEnumStr = thisEnumStr; } assertU(commit()); @@ -537,50 +638,78 @@ public void testMultivaluedSetQuery() throws Exception { if (sf.indexed()) { // SolrQueryParser should also be generating a TermInSetQuery if indexed String setQuery = sf.getType().getSetQuery(null, sf, enumStrs).toString(); - if (sf.getType() instanceof EnumField) { // Trie field TermInSetQuery non-XML chars serialize with "#XX;" syntax + // Trie field TermInSetQuery non-XML chars serialize with "#XX;" syntax + if (sf.getType() instanceof EnumField) { Pattern nonXMLCharPattern = Pattern.compile("[\u0000-\u0008\u000B\u000C\u000E-\u0019]"); StringBuffer munged = new StringBuffer(); Matcher matcher = nonXMLCharPattern.matcher(setQuery); while (matcher.find()) { - matcher.appendReplacement(munged, "#" + (int)matcher.group(0).charAt(0) + ";"); + matcher.appendReplacement(munged, "#" + (int) matcher.group(0).charAt(0) + ";"); } matcher.appendTail(munged); setQuery = munged.toString(); } - assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), - "fl", "id," + MV_FIELD_NAME, "rows", "" + enumStrs.size(), "indent", "on"), + assertQ( + req( + CommonParams.DEBUG, + CommonParams.QUERY, + "q", + "*:*", + "fq", + builder.toString(), + "fl", + "id," + MV_FIELD_NAME, + "rows", + "" + enumStrs.size(), + "indent", + "on"), "//*[@numFound='" + enumStrs.size() + "']", - "//*[@name='parsed_filter_queries']/str[normalize-space(.)=normalize-space('TermInSetQuery(" + setQuery + ")')]"); // fix [\r\n] problems + "//*[@name='parsed_filter_queries']/str[normalize-space(.)=normalize-space('TermInSetQuery(" + + setQuery + + ")')]"); // fix [\r\n] problems } else { // Won't use TermInSetQuery if the field is not indexed, but should match the same docs - assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), "fl", "id," + MV_FIELD_NAME, "indent", "on"), + assertQ( + req( + CommonParams.DEBUG, + CommonParams.QUERY, + "q", + "*:*", + "fq", + builder.toString(), + "fl", + "id," + MV_FIELD_NAME, + "indent", + "on"), "//*[@numFound='" + enumStrs.size() + "']"); } } @Test public void testEnumFieldTypeWithoutDocValues() throws Exception { - assumeTrue("Only testing EnumFieldType without docValues.", + assumeTrue( + "Only testing EnumFieldType without docValues.", System.getProperty("solr.tests.EnumFieldType").equals("solr.EnumFieldType") && System.getProperty("solr.tests.numeric.dv").equals("false")); try { deleteCore(); initCore("solrconfig-minimal.xml", "bad-schema-enums.xml"); - SolrException e = expectThrows(SolrException.class, - () -> assertU(adoc("id", "0", FIELD_NAME, "Not Available"))); + SolrException e = + expectThrows( + SolrException.class, () -> assertU(adoc("id", "0", FIELD_NAME, "Not Available"))); assertTrue(e.getMessage().contains("EnumFieldType requires docValues=\"true\"")); } finally { // put back the core expected by other tests deleteCore(); doInitCore(); } } - + @SuppressWarnings("unchecked") @Test public void testFacetEnumSearch() throws Exception { - assumeFalse("This requires docValues", - System.getProperty("solr.tests.numeric.dv").equals("false")); + assumeFalse( + "This requires docValues", System.getProperty("solr.tests.numeric.dv").equals("false")); clearIndex(); @@ -592,11 +721,17 @@ public void testFacetEnumSearch() throws Exception { assertU(adoc("id", "5", FIELD_NAME, "Critical")); assertU(commit()); - - final String jsonFacetParam = "{ " + FIELD_NAME + " : { type : terms, field : " + FIELD_NAME + ", "+ - "missing : true, exists : true, allBuckets : true, method : enum }}"; - assertQ(req("fl", "" + FIELD_NAME, "q", FIELD_NAME + ":*", "json.facet", jsonFacetParam), + final String jsonFacetParam = + "{ " + + FIELD_NAME + + " : { type : terms, field : " + + FIELD_NAME + + ", " + + "missing : true, exists : true, allBuckets : true, method : enum }}"; + + assertQ( + req("fl", "" + FIELD_NAME, "q", FIELD_NAME + ":*", "json.facet", jsonFacetParam), "//*[@name='facets']/long/text()=6", "//*[@name='allBuckets']/long/text()=6", "//*[@name='buckets']/lst[long[@name='count'][.='2']][str[@name='val'][.='Critical']]", @@ -605,11 +740,25 @@ public void testFacetEnumSearch() throws Exception { "//*[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='Medium']]", "//*[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='High']]"); - try (SolrQueryRequest req = req("fl", "" + FIELD_NAME, "q", FIELD_NAME + ":*", "json.facet", jsonFacetParam, "wt", "json")) { - SolrQueryResponse rsp = h.queryAndResponse(req.getParams().get(CommonParams.QT),req); - List> buckets = (List>) ((NamedList)((NamedList)rsp.getValues().get("facets")).get("severity")).get("buckets"); + try (SolrQueryRequest req = + req( + "fl", + "" + FIELD_NAME, + "q", + FIELD_NAME + ":*", + "json.facet", + jsonFacetParam, + "wt", + "json")) { + SolrQueryResponse rsp = h.queryAndResponse(req.getParams().get(CommonParams.QT), req); + List> buckets = + (List>) + ((NamedList) ((NamedList) rsp.getValues().get("facets")).get("severity")) + .get("buckets"); for (NamedList bucket : buckets) { - assertTrue("Bucket value must be instance of EnumFieldVale!", bucket.get("val") instanceof EnumFieldValue); + assertTrue( + "Bucket value must be instance of EnumFieldVale!", + bucket.get("val") instanceof EnumFieldValue); } } } diff --git a/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java b/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java index ee56edbee18..a32ea1cbdee 100644 --- a/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java +++ b/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java @@ -16,21 +16,20 @@ */ package org.apache.solr.schema; +import java.io.File; +import java.io.IOException; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.junit.Test; -import java.io.File; -import java.io.IOException; - public class ExternalFileFieldSortTest extends SolrTestCaseJ4 { static void updateExternalFile() throws IOException { final String testHome = SolrTestCaseJ4.getFile("solr/collection1").getParent(); String filename = "external_eff"; - FileUtils.copyFile(new File(testHome + "/" + filename), - new File(h.getCore().getDataDir() + "/" + filename)); + FileUtils.copyFile( + new File(testHome + "/" + filename), new File(h.getCore().getDataDir() + "/" + filename)); } private void addDocuments() { @@ -47,7 +46,8 @@ public void testSort() throws Exception { updateExternalFile(); addDocuments(); - assertQ("query", + assertQ( + "query", req("q", "*:*", "sort", "eff asc"), "//result/doc[position()=1]/str[.='3']", "//result/doc[position()=2]/str[.='1']", @@ -55,12 +55,13 @@ public void testSort() throws Exception { assertQ("test exists", req("q", "*:*", "sort", "exists(eff) desc")); } - + @Test public void testPointKeyFieldType() throws Exception { // This one should fail though, no "node" parameter specified - SolrException e = expectThrows(SolrException.class, - () -> initCore("solrconfig-basic.xml", "bad-schema-eff.xml")); + SolrException e = + expectThrows( + SolrException.class, () -> initCore("solrconfig-basic.xml", "bad-schema-eff.xml")); assertTrue(e.getMessage().contains("has a Point field type, which is not supported.")); } } diff --git a/solr/core/src/test/org/apache/solr/schema/IndexSchemaRuntimeFieldTest.java b/solr/core/src/test/org/apache/solr/schema/IndexSchemaRuntimeFieldTest.java index 3237b654c4c..504409ab6d0 100644 --- a/solr/core/src/test/org/apache/solr/schema/IndexSchemaRuntimeFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/IndexSchemaRuntimeFieldTest.java @@ -28,7 +28,7 @@ public class IndexSchemaRuntimeFieldTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } @Test @@ -41,31 +41,33 @@ public void testRuntimeFieldCreation() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); final String fieldName = "runtimefield"; - SchemaField sf = new SchemaField( fieldName, schema.getFieldTypes().get( "string" ) ); - schema.getFields().put( fieldName, sf ); + SchemaField sf = new SchemaField(fieldName, schema.getFieldTypes().get("string")); + schema.getFields().put(fieldName, sf); // also register a new copy field (from our new field) - schema.registerCopyField( fieldName, "dynamic_runtime" ); + schema.registerCopyField(fieldName, "dynamic_runtime"); schema.refreshAnalyzers(); assertU(adoc("id", "10", "title", "test", fieldName, "aaa")); assertU(commit()); - SolrQuery query = new SolrQuery( fieldName+":aaa" ); - query.set( "indent", "true" ); - SolrQueryRequest req = new LocalSolrQueryRequest( core, query ); + SolrQuery query = new SolrQuery(fieldName + ":aaa"); + query.set("indent", "true"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, query); - assertQ("Make sure they got in", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='10']" - ); + assertQ( + "Make sure they got in", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='10']"); // Check to see if our copy field made it out safely - query.setQuery( "dynamic_runtime:aaa" ); - assertQ("Make sure they got in", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='10']" - ); + query.setQuery("dynamic_runtime:aaa"); + assertQ( + "Make sure they got in", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='10']"); clearIndex(); } } diff --git a/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java b/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java index 6bfa14fe27f..aabf95a3145 100644 --- a/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java +++ b/solr/core/src/test/org/apache/solr/schema/IndexSchemaTest.java @@ -16,7 +16,9 @@ */ package org.apache.solr.schema; - +import java.util.Date; +import java.util.HashMap; +import java.util.Map; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.solr.SolrTestCaseJ4; @@ -28,15 +30,10 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - - public class IndexSchemaTest extends SolrTestCaseJ4 { - final private static String solrConfigFileName = "solrconfig.xml"; - final private static String schemaFileName = "schema.xml"; + private static final String solrConfigFileName = "solrconfig.xml"; + private static final String schemaFileName = "schema.xml"; @BeforeClass public static void beforeClass() throws Exception { @@ -44,44 +41,42 @@ public static void beforeClass() throws Exception { } /** - * This test assumes the schema includes: - * <dynamicField name="dynamic_*" type="string" indexed="true" stored="true"/> - * <dynamicField name="*_dynamic" type="string" indexed="true" stored="true"/> + * This test assumes the schema includes: <dynamicField name="dynamic_*" type="string" + * indexed="true" stored="true"/> <dynamicField name="*_dynamic" type="string" + * indexed="true" stored="true"/> */ @Test - public void testDynamicCopy() - { + public void testDynamicCopy() { SolrCore core = h.getCore(); assertU(adoc("id", "10", "title", "test", "aaa_dynamic", "aaa")); assertU(commit()); - Map args = new HashMap<>(); - args.put( CommonParams.Q, "title:test" ); - args.put( "indent", "true" ); - SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); + Map args = new HashMap<>(); + args.put(CommonParams.Q, "title:test"); + args.put("indent", "true"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); - assertQ("Make sure they got in", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='10']" - ); + assertQ( + "Make sure they got in", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='10']"); args = new HashMap<>(); - args.put( CommonParams.Q, "aaa_dynamic:aaa" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("dynamic source", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='10']" - ); + args.put(CommonParams.Q, "aaa_dynamic:aaa"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ("dynamic source", req, "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='10']"); args = new HashMap<>(); - args.put( CommonParams.Q, "dynamic_aaa:aaa" ); - args.put( "indent", "true" ); - req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("dynamic destination", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='10']" - ); + args.put(CommonParams.Q, "dynamic_aaa:aaa"); + args.put("indent", "true"); + req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "dynamic destination", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='10']"); clearIndex(); } @@ -89,13 +84,13 @@ public void testDynamicCopy() public void testIsDynamicField() throws Exception { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - assertFalse( schema.isDynamicField( "id" ) ); - assertTrue( schema.isDynamicField( "aaa_i" ) ); - assertFalse( schema.isDynamicField( "no_such_field" ) ); + assertFalse(schema.isDynamicField("id")); + assertTrue(schema.isDynamicField("aaa_i")); + assertFalse(schema.isDynamicField("no_such_field")); } @Test - public void testProperties() throws Exception{ + public void testProperties() throws Exception { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); assertFalse(schema.getField("id").multiValued()); @@ -103,18 +98,24 @@ public void testProperties() throws Exception{ final String dateClass = RANDOMIZED_NUMERIC_FIELDTYPES.get(Date.class); final boolean usingPoints = Boolean.getBoolean(NUMERIC_POINTS_SYSPROP); // Test TrieDate fields. The following asserts are expecting a field type defined as: - String expectedDefinition = ""; + String expectedDefinition = + ""; FieldType tdatedv = schema.getFieldType("foo_tdtdvs"); - assertTrue("Expecting a field type defined as " + expectedDefinition, - (usingPoints ? DatePointField.class : TrieDateField.class).isInstance(tdatedv)); - assertTrue("Expecting a field type defined as " + expectedDefinition, - tdatedv.hasProperty(FieldProperties.DOC_VALUES)); - assertTrue("Expecting a field type defined as " + expectedDefinition, - tdatedv.isMultiValued()); - if ( ! usingPoints ) { - assertEquals("Expecting a field type defined as " + expectedDefinition, - 6, ((TrieDateField)tdatedv).getPrecisionStep()); + assertTrue( + "Expecting a field type defined as " + expectedDefinition, + (usingPoints ? DatePointField.class : TrieDateField.class).isInstance(tdatedv)); + assertTrue( + "Expecting a field type defined as " + expectedDefinition, + tdatedv.hasProperty(FieldProperties.DOC_VALUES)); + assertTrue("Expecting a field type defined as " + expectedDefinition, tdatedv.isMultiValued()); + if (!usingPoints) { + assertEquals( + "Expecting a field type defined as " + expectedDefinition, + 6, + ((TrieDateField) tdatedv).getPrecisionStep()); } } diff --git a/solr/core/src/test/org/apache/solr/schema/IntPointPrefixActsAsRangeQueryFieldType.java b/solr/core/src/test/org/apache/solr/schema/IntPointPrefixActsAsRangeQueryFieldType.java index e9acdee8186..3112db56a1c 100644 --- a/solr/core/src/test/org/apache/solr/schema/IntPointPrefixActsAsRangeQueryFieldType.java +++ b/solr/core/src/test/org/apache/solr/schema/IntPointPrefixActsAsRangeQueryFieldType.java @@ -20,8 +20,9 @@ import org.apache.solr.search.QParser; /** - * Custom field type that overrides the prefix query behavior to map "X*" to [X TO Integer.MAX_VALUE]. - * * This is used for testing overridden prefix query for custom fields in TestOverriddenPrefixQueryForCustomFieldType + * Custom field type that overrides the prefix query behavior to map "X*" to [X TO + * Integer.MAX_VALUE]. * This is used for testing overridden prefix query for custom fields in + * TestOverriddenPrefixQueryForCustomFieldType * * @see TrieIntPrefixActsAsRangeQueryFieldType */ @@ -30,5 +31,4 @@ public class IntPointPrefixActsAsRangeQueryFieldType extends IntPointField { public Query getPrefixQuery(QParser parser, SchemaField sf, String termStr) { return getRangeQuery(parser, sf, termStr, Integer.MAX_VALUE + "", true, false); } - } diff --git a/solr/core/src/test/org/apache/solr/schema/ManagedSchemaRoundRobinCloudTest.java b/solr/core/src/test/org/apache/solr/schema/ManagedSchemaRoundRobinCloudTest.java index fcee288f611..9cd45bcd9dc 100644 --- a/solr/core/src/test/org/apache/solr/schema/ManagedSchemaRoundRobinCloudTest.java +++ b/solr/core/src/test/org/apache/solr/schema/ManagedSchemaRoundRobinCloudTest.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -47,8 +46,13 @@ public static void setupCluster() throws Exception { configureCluster(NUM_SHARDS).addConfig(CONFIG, configset(CONFIG)).configure(); CollectionAdminRequest.createCollection(COLLECTION, CONFIG, NUM_SHARDS, 1) .process(cluster.getSolrClient()); - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, NUM_SHARDS, 1)); + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, NUM_SHARDS, 1)); } @AfterClass @@ -60,37 +64,42 @@ public static void clearSysProps() throws Exception { public void testAddFieldsRoundRobin() throws Exception { List clients = new ArrayList<>(NUM_SHARDS); try { - for (int shardNum = 0 ; shardNum < NUM_SHARDS ; ++shardNum) { - clients.add(getHttpSolrClient(cluster.getJettySolrRunners().get(shardNum).getBaseUrl().toString())); + for (int shardNum = 0; shardNum < NUM_SHARDS; ++shardNum) { + clients.add( + getHttpSolrClient(cluster.getJettySolrRunners().get(shardNum).getBaseUrl().toString())); } int shardNum = 0; - for (int fieldNum = 0 ; fieldNum < NUM_FIELDS_TO_ADD ; ++fieldNum) { - addField(clients.get(shardNum), keyValueArrayToMap("name", FIELD_PREFIX + fieldNum, "type", "string")); - if (++shardNum == NUM_SHARDS) { + for (int fieldNum = 0; fieldNum < NUM_FIELDS_TO_ADD; ++fieldNum) { + addField( + clients.get(shardNum), + keyValueArrayToMap("name", FIELD_PREFIX + fieldNum, "type", "string")); + if (++shardNum == NUM_SHARDS) { shardNum = 0; } } } finally { - for (int shardNum = 0 ; shardNum < NUM_SHARDS ; ++shardNum) { + for (int shardNum = 0; shardNum < NUM_SHARDS; ++shardNum) { clients.get(shardNum).close(); } } } - private void addField(SolrClient client, Map field) throws Exception { - SchemaResponse.UpdateResponse addFieldResponse = new SchemaRequest.AddField(field).process(client, COLLECTION); + private void addField(SolrClient client, Map field) throws Exception { + SchemaResponse.UpdateResponse addFieldResponse = + new SchemaRequest.AddField(field).process(client, COLLECTION); assertNotNull(addFieldResponse); assertEquals(0, addFieldResponse.getStatus()); assertNull(addFieldResponse.getResponse().get("errors")); String fieldName = field.get("name").toString(); - SchemaResponse.FieldResponse fieldResponse = new SchemaRequest.Field(fieldName).process(client, COLLECTION); + SchemaResponse.FieldResponse fieldResponse = + new SchemaRequest.Field(fieldName).process(client, COLLECTION); assertNotNull(fieldResponse); assertEquals(0, fieldResponse.getStatus()); } - private Map keyValueArrayToMap(String... alternatingKeysAndValues) { - Map map = new HashMap<>(); - for (int i = 0 ; i < alternatingKeysAndValues.length ; i += 2) + private Map keyValueArrayToMap(String... alternatingKeysAndValues) { + Map map = new HashMap<>(); + for (int i = 0; i < alternatingKeysAndValues.length; i += 2) map.put(alternatingKeysAndValues[i], alternatingKeysAndValues[i + 1]); return map; } diff --git a/solr/core/src/test/org/apache/solr/schema/MyCrazyCustomField.java b/solr/core/src/test/org/apache/solr/schema/MyCrazyCustomField.java index 82f8f6cad85..a0228dbc393 100644 --- a/solr/core/src/test/org/apache/solr/schema/MyCrazyCustomField.java +++ b/solr/core/src/test/org/apache/solr/schema/MyCrazyCustomField.java @@ -17,7 +17,6 @@ package org.apache.solr.schema; import java.io.IOException; - import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.PrefixQuery; @@ -27,13 +26,12 @@ import org.apache.solr.search.QParser; /** - * Custom field that overrides the PrefixQuery behaviour to map queries such that: - * (foo* becomes bar*) and (bar* becomes foo*). - * This is used for testing overridden prefix query for custom fields in TestOverriddenPrefixQueryForCustomFieldType + * Custom field that overrides the PrefixQuery behaviour to map queries such that: (foo* becomes + * bar*) and (bar* becomes foo*). This is used for testing overridden prefix query for custom fields + * in TestOverriddenPrefixQueryForCustomFieldType */ public class MyCrazyCustomField extends TextField { - @Override public void write(TextResponseWriter writer, String name, IndexableField f) throws IOException { writer.writeStr(name, f.stringValue(), true); @@ -47,7 +45,7 @@ public SortField getSortField(final SchemaField field, final boolean reverse) { @Override public Query getPrefixQuery(QParser parser, SchemaField sf, String termStr) { - if(termStr.equals("foo")) { + if (termStr.equals("foo")) { termStr = "bar"; } else if (termStr.equals("bar")) { termStr = "foo"; diff --git a/solr/core/src/test/org/apache/solr/schema/NotRequiredUniqueKeyTest.java b/solr/core/src/test/org/apache/solr/schema/NotRequiredUniqueKeyTest.java index a21cb169064..8ac363a8505 100644 --- a/solr/core/src/test/org/apache/solr/schema/NotRequiredUniqueKeyTest.java +++ b/solr/core/src/test/org/apache/solr/schema/NotRequiredUniqueKeyTest.java @@ -22,28 +22,26 @@ import org.junit.Test; /** - * This is a simple test to make sure the unique key is not required - * when it is specified as 'false' - * - * It needs its own file so it can load a special schema file + * This is a simple test to make sure the unique key is not required when it is specified as 'false' + * + *

It needs its own file so it can load a special schema file */ public class NotRequiredUniqueKeyTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { System.setProperty("enable.update.log", "false"); // usecase doesn't work with updateLog - initCore("solrconfig.xml","schema-not-required-unique-key.xml"); + initCore("solrconfig.xml", "schema-not-required-unique-key.xml"); } @Test - public void testSchemaLoading() - { + public void testSchemaLoading() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); SchemaField uniqueKey = schema.getUniqueKeyField(); - - assertFalse( uniqueKey.isRequired() ); - - assertFalse( schema.getRequiredFields().contains( uniqueKey ) ); + + assertFalse(uniqueKey.isRequired()); + + assertFalse(schema.getRequiredFields().contains(uniqueKey)); } } diff --git a/solr/core/src/test/org/apache/solr/schema/NumericFieldsTest.java b/solr/core/src/test/org/apache/solr/schema/NumericFieldsTest.java index 17ded2f32e3..63a892c6fc9 100644 --- a/solr/core/src/test/org/apache/solr/schema/NumericFieldsTest.java +++ b/solr/core/src/test/org/apache/solr/schema/NumericFieldsTest.java @@ -21,14 +21,13 @@ import org.junit.BeforeClass; import org.junit.Test; - public class NumericFieldsTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-basic.xml", "schema-numeric.xml"); } - static String[] types = new String[]{"int", "long", "float", "double", "date"}; + static String[] types = new String[] {"int", "long", "float", "double", "date"}; public static SolrInputDocument getDoc(String id, Integer number, String date) { SolrInputDocument doc = new SolrInputDocument(); @@ -59,182 +58,179 @@ public void testSortMissingFirstLast() { assertU(adoc(getDoc("-3", -3, "2011-01-01T00:00:00Z"))); assertU(adoc("id", "M2")); assertU(commit()); - + // 'normal' sorting. Missing Values are 0 String suffix = ""; for (String t : types) { if ("date".equals(t)) { - assertQ("Sorting Asc: " + t + suffix, + assertQ( + "Sorting Asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][starts-with(.,'M')]", "//result/doc[2]/str[@name='id'][starts-with(.,'M')]", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='+5']" - ); + "//result/doc[5]/str[@name='id'][.='+5']"); - assertQ("Sorting Desc: " + t + suffix, + assertQ( + "Sorting Desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='+5']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][starts-with(.,'M')]", - "//result/doc[5]/str[@name='id'][starts-with(.,'M')]" - ); - - assertQ("Sorting Asc w/secondary on id desc: " + t + suffix, + "//result/doc[5]/str[@name='id'][starts-with(.,'M')]"); + + assertQ( + "Sorting Asc w/secondary on id desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc, id desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='M2']", "//result/doc[2]/str[@name='id'][.='M1']", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='+5']" - ); + "//result/doc[5]/str[@name='id'][.='+5']"); - assertQ("Sorting Desc w/secondary on id asc: " + t + suffix, + assertQ( + "Sorting Desc w/secondary on id asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc, id asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='+5']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][.='M1']", - "//result/doc[5]/str[@name='id'][.='M2']" - ); + "//result/doc[5]/str[@name='id'][.='M2']"); } else { - assertQ("Sorting Asc: " + t + suffix, + assertQ( + "Sorting Asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='-3']", "//result/doc[2]/str[@name='id'][starts-with(.,'M')]", "//result/doc[3]/str[@name='id'][starts-with(.,'M')]", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='+5']" - ); + "//result/doc[5]/str[@name='id'][.='+5']"); - assertQ("Sorting Desc: " + t + suffix, + assertQ( + "Sorting Desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='+5']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][starts-with(.,'M')]", "//result/doc[4]/str[@name='id'][starts-with(.,'M')]", - "//result/doc[5]/str[@name='id'][.='-3']" - ); - - assertQ("Sorting Asc w/secondary on id desc: " + t + suffix, + "//result/doc[5]/str[@name='id'][.='-3']"); + + assertQ( + "Sorting Asc w/secondary on id desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc, id desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='-3']", "//result/doc[2]/str[@name='id'][.='M2']", "//result/doc[3]/str[@name='id'][.='M1']", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='+5']" - ); + "//result/doc[5]/str[@name='id'][.='+5']"); - assertQ("Sorting Desc w/secondary on id asc: " + t + suffix, + assertQ( + "Sorting Desc w/secondary on id asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc, id asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='+5']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][.='M1']", "//result/doc[4]/str[@name='id'][.='M2']", - "//result/doc[5]/str[@name='id'][.='-3']" - ); - + "//result/doc[5]/str[@name='id'][.='-3']"); } } - // sortMissingLast = true suffix = "_last"; for (String t : types) { - assertQ("Sorting Asc: " + t + suffix, + assertQ( + "Sorting Asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='-3']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][.='+5']", "//result/doc[4]/str[@name='id'][starts-with(.,'M')]", - "//result/doc[5]/str[@name='id'][starts-with(.,'M')]" - ); + "//result/doc[5]/str[@name='id'][starts-with(.,'M')]"); - assertQ("Sorting Desc: " + t + suffix, + assertQ( + "Sorting Desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='+5']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][starts-with(.,'M')]", - "//result/doc[5]/str[@name='id'][starts-with(.,'M')]" - ); + "//result/doc[5]/str[@name='id'][starts-with(.,'M')]"); - assertQ("Sorting Asc w/secondary on id desc: " + t + suffix, + assertQ( + "Sorting Asc w/secondary on id desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc, id desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='-3']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][.='+5']", "//result/doc[4]/str[@name='id'][.='M2']", - "//result/doc[5]/str[@name='id'][.='M1']" - ); + "//result/doc[5]/str[@name='id'][.='M1']"); - assertQ("Sorting Desc w/secondary on id asc: " + t + suffix, + assertQ( + "Sorting Desc w/secondary on id asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc, id asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='+5']", "//result/doc[2]/str[@name='id'][.='+4']", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][.='M1']", - "//result/doc[5]/str[@name='id'][.='M2']" - ); + "//result/doc[5]/str[@name='id'][.='M2']"); } // sortMissingFirst = true suffix = "_first"; for (String t : types) { - assertQ("Sorting Asc: " + t + suffix, + assertQ( + "Sorting Asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][starts-with(.,'M')]", "//result/doc[2]/str[@name='id'][starts-with(.,'M')]", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='+5']" - ); + "//result/doc[5]/str[@name='id'][.='+5']"); - assertQ("Sorting Desc: " + t + suffix, + assertQ( + "Sorting Desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][starts-with(.,'M')]", "//result/doc[2]/str[@name='id'][starts-with(.,'M')]", "//result/doc[3]/str[@name='id'][.='+5']", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='-3']" - ); + "//result/doc[5]/str[@name='id'][.='-3']"); - assertQ("Sorting Asc w/secondary on id desc: " + t + suffix, + assertQ( + "Sorting Asc w/secondary on id desc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " asc, id desc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='M2']", "//result/doc[2]/str[@name='id'][.='M1']", "//result/doc[3]/str[@name='id'][.='-3']", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='+5']" - ); + "//result/doc[5]/str[@name='id'][.='+5']"); - assertQ("Sorting Desc w/secondary on id asc: " + t + suffix, + assertQ( + "Sorting Desc w/secondary on id asc: " + t + suffix, req("fl", "id", "q", "*:*", "sort", (t + suffix) + " desc, id asc"), "//*[@numFound='5']", "//result/doc[1]/str[@name='id'][.='M1']", "//result/doc[2]/str[@name='id'][.='M2']", "//result/doc[3]/str[@name='id'][.='+5']", "//result/doc[4]/str[@name='id'][.='+4']", - "//result/doc[5]/str[@name='id'][.='-3']" - ); - + "//result/doc[5]/str[@name='id'][.='-3']"); } } } diff --git a/solr/core/src/test/org/apache/solr/schema/OpenExchangeRatesOrgProviderTest.java b/solr/core/src/test/org/apache/solr/schema/OpenExchangeRatesOrgProviderTest.java index 1cab50629be..7511ecb8b5d 100644 --- a/solr/core/src/test/org/apache/solr/schema/OpenExchangeRatesOrgProviderTest.java +++ b/solr/core/src/test/org/apache/solr/schema/OpenExchangeRatesOrgProviderTest.java @@ -15,9 +15,9 @@ * limitations under the License. */ package org.apache.solr.schema; + import java.util.HashMap; import java.util.Map; - import org.apache.lucene.util.ResourceLoader; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -26,49 +26,41 @@ import org.junit.Before; import org.junit.Test; -/** - * Tests currency field type. - */ +/** Tests currency field type. */ public class OpenExchangeRatesOrgProviderTest extends SolrTestCaseJ4 { - private final static long HARDCODED_TEST_TIMESTAMP = 1332070464L; + private static final long HARDCODED_TEST_TIMESTAMP = 1332070464L; OpenExchangeRatesOrgProvider oerp; ResourceLoader loader; - private final Map mockParams = new HashMap<>(); - + private final Map mockParams = new HashMap<>(); @Override @Before public void setUp() throws Exception { - CurrencyFieldTypeTest.assumeCurrencySupport - ("USD", "EUR", "MXN", "GBP", "JPY"); + CurrencyFieldTypeTest.assumeCurrencySupport("USD", "EUR", "MXN", "GBP", "JPY"); super.setUp(); - mockParams.put(OpenExchangeRatesOrgProvider.PARAM_RATES_FILE_LOCATION, - "open-exchange-rates.json"); + mockParams.put( + OpenExchangeRatesOrgProvider.PARAM_RATES_FILE_LOCATION, "open-exchange-rates.json"); oerp = new OpenExchangeRatesOrgProvider(); loader = new SolrResourceLoader(TEST_PATH().resolve("collection1")); } - + @Test public void testInit() throws Exception { oerp.init(mockParams); // don't inform, we don't want to hit any of these URLs - assertEquals("Wrong url", - "open-exchange-rates.json", oerp.ratesFileLocation); - assertEquals("Wrong default interval", (1440*60), oerp.refreshIntervalSeconds); + assertEquals("Wrong url", "open-exchange-rates.json", oerp.ratesFileLocation); + assertEquals("Wrong default interval", (1440 * 60), oerp.refreshIntervalSeconds); - Map params = new HashMap<>(); - params.put(OpenExchangeRatesOrgProvider.PARAM_RATES_FILE_LOCATION, - "http://foo.bar/baz"); + Map params = new HashMap<>(); + params.put(OpenExchangeRatesOrgProvider.PARAM_RATES_FILE_LOCATION, "http://foo.bar/baz"); params.put(OpenExchangeRatesOrgProvider.PARAM_REFRESH_INTERVAL, "100"); oerp.init(params); - assertEquals("Wrong param set url", - "http://foo.bar/baz", oerp.ratesFileLocation); - assertEquals("Wrong param interval", (100*60), oerp.refreshIntervalSeconds); - + assertEquals("Wrong param set url", "http://foo.bar/baz", oerp.ratesFileLocation); + assertEquals("Wrong param interval", (100 * 60), oerp.refreshIntervalSeconds); } @Test @@ -82,7 +74,7 @@ public void testList() { public void testGetExchangeRate() { oerp.init(mockParams); oerp.inform(loader); - assertEquals(81.29D, oerp.getExchangeRate("USD", "JPY"), 0.0D); + assertEquals(81.29D, oerp.getExchangeRate("USD", "JPY"), 0.0D); assertEquals("USD", oerp.rates.getBaseCurrency()); } @@ -100,18 +92,19 @@ public void testReload() { // modify the timestamp to be "current" then fetch a rate and ensure no reload final long currentTimestamp = System.currentTimeMillis() / 1000; oerp.rates.setTimestamp(currentTimestamp); - assertEquals(81.29D, oerp.getExchangeRate("USD", "JPY"), 0.0D); + assertEquals(81.29D, oerp.getExchangeRate("USD", "JPY"), 0.0D); assertEquals(currentTimestamp, oerp.rates.getTimestamp()); // roll back clock on timestamp and ensure rate fetch does reload oerp.rates.setTimestamp(currentTimestamp - (101 * 60)); - assertEquals(81.29D, oerp.getExchangeRate("USD", "JPY"), 0.0D); - assertEquals("timestamp wasn't reset to hardcoded value, indicating no reload", - HARDCODED_TEST_TIMESTAMP, oerp.rates.getTimestamp()); - + assertEquals(81.29D, oerp.getExchangeRate("USD", "JPY"), 0.0D); + assertEquals( + "timestamp wasn't reset to hardcoded value, indicating no reload", + HARDCODED_TEST_TIMESTAMP, + oerp.rates.getTimestamp()); } - @Test(expected=SolrException.class) + @Test(expected = SolrException.class) public void testNoInit() { oerp.getExchangeRate("ABC", "DEF"); assertTrue("Should have thrown exception if not initialized", false); diff --git a/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java b/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java index 317b542837f..70eb14db0c3 100644 --- a/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/PolyFieldTest.java @@ -15,9 +15,9 @@ * limitations under the License. */ package org.apache.solr.schema; + import java.util.Arrays; import java.util.List; - import org.apache.lucene.index.IndexableField; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.BooleanQuery; @@ -28,25 +28,21 @@ import org.junit.BeforeClass; import org.junit.Test; - -/** - * Test a whole slew of things related to PolyFields - */ +/** Test a whole slew of things related to PolyFields */ public class PolyFieldTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } @Test public void testSchemaBasics() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); - SchemaField home = schema.getField("home"); assertNotNull(home); assertTrue(home.isPolyField()); - + String subFieldType = "double"; SchemaField[] dynFields = schema.getDynamicFieldPrototypes(); boolean seen = false; @@ -67,7 +63,7 @@ public void testSchemaBasics() throws Exception { home = schema.getField("home"); assertNotNull(home); - home = schema.getField("homed");//sub field suffix + home = schema.getField("homed"); // sub field suffix assertNotNull(home); assertTrue(home.isPolyField()); } @@ -83,31 +79,35 @@ public void testPointFieldType() throws Exception { assertTrue(tmp instanceof PointType); PointType pt = (PointType) tmp; assertEquals(pt.getDimension(), 2); - double[] xy = new double[]{35.0, -79.34}; + double[] xy = new double[] {35.0, -79.34}; String point = xy[0] + "," + xy[1]; List fields = home.createFields(point); assertNotNull(pt.getSubType()); - int expectdNumFields = 3;//If DV=false, we expect one field per dimension plus a stored field + int expectdNumFields = 3; // If DV=false, we expect one field per dimension plus a stored field if (pt.subField(home, 0, schema).hasDocValues()) { - expectdNumFields+=2; // If docValues=true, then we expect two more fields + expectdNumFields += 2; // If docValues=true, then we expect two more fields } - assertEquals("Unexpected fields created: " + Arrays.toString(fields.toArray()), expectdNumFields, fields.size()); - //first two/four fields contain the values, last one is just stored and contains the original + assertEquals( + "Unexpected fields created: " + Arrays.toString(fields.toArray()), + expectdNumFields, + fields.size()); + // first two/four fields contain the values, last one is just stored and contains the original for (int i = 0; i < expectdNumFields; i++) { - boolean hasValue = fields.get(i).binaryValue() != null - || fields.get(i).stringValue() != null - || fields.get(i).numericValue() != null; + boolean hasValue = + fields.get(i).binaryValue() != null + || fields.get(i).stringValue() != null + || fields.get(i).numericValue() != null; assertTrue("Doesn't have a value: " + fields.get(i), hasValue); } /*assertTrue("first field " + fields[0].tokenStreamValue() + " is not 35.0", pt.getSubType().toExternal(fields[0]).equals(String.valueOf(xy[0]))); assertTrue("second field is not -79.34", pt.getSubType().toExternal(fields[1]).equals(String.valueOf(xy[1]))); assertTrue("third field is not '35.0,-79.34'", pt.getSubType().toExternal(fields[2]).equals(point));*/ - home = schema.getField("home_ns"); assertNotNull(home); fields = home.createFields(point); - assertEquals(expectdNumFields - 1, fields.size(), 2);//one less field than with "home", since we aren't storing + // one less field than with "home", since we aren't storing + assertEquals(expectdNumFields - 1, fields.size(), 2); home = schema.getField("home_ns"); assertNotNull(home); @@ -129,35 +129,34 @@ public void testPointFieldType() throws Exception { @Test public void testSearching() throws Exception { for (int i = 0; i < 50; i++) { - assertU(adoc("id", "" + i, "home", i + "," + (i * 100), "homed", (i * 1000) + "," + (i * 10000))); + assertU( + adoc("id", "" + i, "home", i + "," + (i * 100), "homed", (i * 1000) + "," + (i * 10000))); } assertU(commit()); assertQ(req("fl", "*,score", "q", "*:*"), "//*[@numFound='50']"); - assertQ(req("fl", "*,score", "q", "home:1,100"), - "//*[@numFound='1']", - "//str[@name='home'][.='1,100']"); - assertQ(req("fl", "*,score", "q", "homed:1000,10000"), - "//*[@numFound='1']", - "//str[@name='homed'][.='1000,10000']"); - assertQ(req("fl", "*,score", "q", - "{!func}sqedist(home, vector(0, 0))"), - "\"//*[@numFound='50']\""); - assertQ(req("fl", "*,score", "q", - "{!func}dist(2, home, vector(0, 0))"), - "\"//*[@numFound='50']\""); - - assertQ(req("fl", "*,score", "q", - "home:[10,10000 TO 30,30000]"), - "\"//*[@numFound='3']\""); - assertQ(req("fl", "*,score", "q", - "homed:[1,1000 TO 2000,35000]"), - "\"//*[@numFound='2']\""); - //bad + assertQ( + req("fl", "*,score", "q", "home:1,100"), + "//*[@numFound='1']", + "//str[@name='home'][.='1,100']"); + assertQ( + req("fl", "*,score", "q", "homed:1000,10000"), + "//*[@numFound='1']", + "//str[@name='homed'][.='1000,10000']"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(home, vector(0, 0))"), "\"//*[@numFound='50']\""); + assertQ( + req("fl", "*,score", "q", "{!func}dist(2, home, vector(0, 0))"), "\"//*[@numFound='50']\""); + + assertQ(req("fl", "*,score", "q", "home:[10,10000 TO 30,30000]"), "\"//*[@numFound='3']\""); + assertQ(req("fl", "*,score", "q", "homed:[1,1000 TO 2000,35000]"), "\"//*[@numFound='2']\""); + // bad ignoreException("dimension"); - assertQEx("Query should throw an exception due to incorrect dimensions", req("fl", "*,score", "q", - "homed:[1 TO 2000]"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Query should throw an exception due to incorrect dimensions", + req("fl", "*,score", "q", "homed:[1 TO 2000]"), + SolrException.ErrorCode.BAD_REQUEST); resetExceptionIgnores(); clearIndex(); } @@ -166,10 +165,10 @@ public void testSearching() throws Exception { public void testSearchDetails() throws Exception { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - double[] xy = new double[]{35.0, -79.34}; + double[] xy = new double[] {35.0, -79.34}; String point = xy[0] + "," + xy[1]; - //How about some queries? - //don't need a parser for this path currently. This may change + // How about some queries? + // don't need a parser for this path currently. This may change assertU(adoc("id", "0", "home_ns", point)); assertU(commit()); SchemaField home = schema.getField("home_ns"); @@ -178,10 +177,9 @@ public void testSearchDetails() throws Exception { Query q = pt.getFieldQuery(null, home, point); assertNotNull(q); assertTrue(q instanceof BooleanQuery); - //should have two clauses, one for 35.0 and the other for -79.34 + // should have two clauses, one for 35.0 and the other for -79.34 BooleanQuery bq = (BooleanQuery) q; assertEquals(2, bq.clauses().size()); clearIndex(); } - } diff --git a/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldManagedSchemaCloudTest.java b/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldManagedSchemaCloudTest.java index b350883e31f..c44884b37b9 100644 --- a/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldManagedSchemaCloudTest.java +++ b/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldManagedSchemaCloudTest.java @@ -20,7 +20,6 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.schema.SchemaRequest; @@ -41,8 +40,13 @@ public static void setupCluster() throws Exception { configureCluster(2).addConfig(CONFIG, configset(CONFIG)).configure(); CollectionAdminRequest.createCollection(COLLECTION, CONFIG, 2, 1) .process(cluster.getSolrClient()); - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); } @Test @@ -51,22 +55,22 @@ public void testAdd2Fields() throws Exception { addField(keyValueArrayToMap("name", "field2", "type", "string")); } - private void addField(Map field) throws Exception { + private void addField(Map field) throws Exception { CloudSolrClient client = cluster.getSolrClient(); UpdateResponse addFieldResponse = new SchemaRequest.AddField(field).process(client, COLLECTION); assertNotNull(addFieldResponse); assertEquals(0, addFieldResponse.getStatus()); assertNull(addFieldResponse.getResponse().get("errors")); - FieldResponse fieldResponse = new SchemaRequest.Field(field.get("name").toString()).process(client, COLLECTION); + FieldResponse fieldResponse = + new SchemaRequest.Field(field.get("name").toString()).process(client, COLLECTION); assertNotNull(fieldResponse); assertEquals(0, fieldResponse.getStatus()); } - private Map keyValueArrayToMap(String... alternatingKeysAndValues) { - Map map = new HashMap<>(); - for (int i = 0 ; i < alternatingKeysAndValues.length ; i += 2) + private Map keyValueArrayToMap(String... alternatingKeysAndValues) { + Map map = new HashMap<>(); + for (int i = 0; i < alternatingKeysAndValues.length; i += 2) map.put(alternatingKeysAndValues[i], alternatingKeysAndValues[i + 1]); return map; } } - diff --git a/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java b/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java index ae39ffa31c2..c8ca46458d1 100644 --- a/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/PreAnalyzedFieldTest.java @@ -18,7 +18,6 @@ import java.util.Collections; import java.util.HashMap; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -29,11 +28,11 @@ import org.junit.Test; public class PreAnalyzedFieldTest extends SolrTestCaseJ4 { - + private static final String[] valid = { - "1 one two three", // simple parsing - "1 one two three ", // spurious spaces - "1 one,s=123,e=128,i=22 two three,s=20,e=22,y=foobar", // attribs + "1 one two three", // simple parsing + "1 one two three ", // spurious spaces + "1 one,s=123,e=128,i=22 two three,s=20,e=22,y=foobar", // attribs "1 \\ one\\ \\,,i=22,a=\\, two\\=\n\r\t\\n,\\ =\\ \\", // escape madness "1 ,i=22 ,i=33,s=2,e=20 , ", // empty token text, non-empty attribs "1 =This is the stored part with \\= \n \\n \t \\t escapes.=one two three \u0001ąćęłńóśźż", // stored plus token stream @@ -41,7 +40,7 @@ public class PreAnalyzedFieldTest extends SolrTestCaseJ4 { "1 =this is a test.=", // stored + empty token stream "1 one,p=deadbeef two,p=0123456789abcdef three" // payloads }; - + private static final String[] validParsed = { "1 one,s=0,e=3 two,s=4,e=7 three,s=8,e=13", "1 one,s=1,e=4 two,s=6,e=9 three,s=12,e=17", @@ -60,7 +59,7 @@ public class PreAnalyzedFieldTest extends SolrTestCaseJ4 { "1 o,ne two", // missing escape "1 one t=wo", // missing escape "1 one,, two", // missing attribs, unescaped comma - "1 one,s ", // missing attrib value + "1 one,s ", // missing attrib value "1 one,s= val", // missing attrib value, unescaped space "1 one,s=,val", // unescaped comma "1 =", // unescaped equals @@ -68,24 +67,28 @@ public class PreAnalyzedFieldTest extends SolrTestCaseJ4 { "1 ===" // empty stored (ok), but unescaped = in token stream }; - private static final String validJson - = json("{'v':'1','str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]}"); + private static final String validJson = + json("{'v':'1','str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]}"); private static final String[] invalidJson = { - json("'v':'1','str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]"), // missing enclosing object - json("{'str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]}"), // missing version # - json("{'v':'2','str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]}"), // invalid version # - json("{'v':'1','str':'stored-value','tokens':[{}]}"), // single token no attribs - json("{'v':'1','str':'stored-value','tokens':[{'t'}]}"), // missing attrib value + // missing enclosing object + json("'v':'1','str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]"), + // missing version # + json("{'str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]}"), + // invalid version # + json("{'v':'2','str':'stored-value','tokens':[{'t':'a'},{'t':'b'},{'t':'c'}]}"), + // single token no attribs + json("{'v':'1','str':'stored-value','tokens':[{}]}"), + // missing attrib value + json("{'v':'1','str':'stored-value','tokens':[{'t'}]}"), }; SchemaField field = null; - int props = - FieldProperties.INDEXED | FieldProperties.STORED; - + int props = FieldProperties.INDEXED | FieldProperties.STORED; + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-minimal.xml","schema-preanalyzed.xml"); + initCore("solrconfig-minimal.xml", "schema-preanalyzed.xml"); } @Override @@ -93,20 +96,20 @@ public void setUp() throws Exception { super.setUp(); field = new SchemaField("content", new TextField(), props, null); } - + @Test public void testValidSimple() { PreAnalyzedField paf = new PreAnalyzedField(); // use Simple format - HashMap args = new HashMap<>(); + HashMap args = new HashMap<>(); args.put(PreAnalyzedField.PARSER_IMPL, SimplePreAnalyzedParser.class.getName()); paf.init(h.getCore().getLatestSchema(), args); PreAnalyzedParser parser = new SimplePreAnalyzedParser(); for (int i = 0; i < valid.length; i++) { String s = valid[i]; try { - Field f = (Field)paf.fromString(field, s); - //System.out.println(" - toString: '" + sb.toString() + "'"); + Field f = (Field) paf.fromString(field, s); + // System.out.println(" - toString: '" + sb.toString() + "'"); assertEquals(validParsed[i], parser.toFormattedString(f)); } catch (Exception e) { e.printStackTrace(); @@ -117,10 +120,18 @@ public void testValidSimple() { private String addTwoDocs(int firstId, String field) { return "\n" - + doc("id", Integer.toString(firstId), field, - json("{'v':'1','str':'document one','tokens':[{'t':'one'},{'t':'two'},{'t':'three','i':100}]}")) - + doc("id", Integer.toString(firstId + 1), field, - json("{'v':'1','str':'document two','tokens':[{'t':'eleven'},{'t':'twelve'},{'t':'thirteen','i':110}]}")) + + doc( + "id", + Integer.toString(firstId), + field, + json( + "{'v':'1','str':'document one','tokens':[{'t':'one'},{'t':'two'},{'t':'three','i':100}]}")) + + doc( + "id", + Integer.toString(firstId + 1), + field, + json( + "{'v':'1','str':'document two','tokens':[{'t':'eleven'},{'t':'twelve'},{'t':'thirteen','i':110}]}")) + "\n"; } @@ -128,32 +139,32 @@ private String addTwoDocs(int firstId, String field) { public void testIndexAndQueryNoSchemaAnalyzer() throws Exception { assertU(addTwoDocs(1, "pre_no_analyzer")); assertU(commit()); - assertQ(req("q", "id:(1 2)", "sort", "id asc") - ,"//result[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][.='1']" - ,"//result/doc[1]/str[@name='pre_no_analyzer'][.='document one']" - ,"//result/doc[2]/str[@name='id'][.='2']" - ,"//result/doc[2]/str[@name='pre_no_analyzer'][.='document two']" - ); - assertQ(req("q", "{!field f='pre_no_analyzer'}{'v':'1','tokens':[{'t':'two'}]}") - ,"//result[@numFound='1']" - ); - assertQ(req("q", "{!field f='pre_no_analyzer'}{'v':'1','tokens':[{'t':'eleven'},{'t':'twelve'}]}") - ,"//result[@numFound='1']" - ); + assertQ( + req("q", "id:(1 2)", "sort", "id asc"), + "//result[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[1]/str[@name='pre_no_analyzer'][.='document one']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[2]/str[@name='pre_no_analyzer'][.='document two']"); + assertQ( + req("q", "{!field f='pre_no_analyzer'}{'v':'1','tokens':[{'t':'two'}]}"), + "//result[@numFound='1']"); + assertQ( + req("q", "{!field f='pre_no_analyzer'}{'v':'1','tokens':[{'t':'eleven'},{'t':'twelve'}]}"), + "//result[@numFound='1']"); } @Test public void testIndexAndQueryWithSchemaAnalyzer() { assertU(addTwoDocs(3, "pre_with_analyzer")); assertU(commit()); - assertQ(req("q", "id:(3 4)", "sort", "id asc") - ,"//result[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][.='3']" - ,"//result/doc[1]/str[@name='pre_with_analyzer'][.='document one']" - ,"//result/doc[2]/str[@name='id'][.='4']" - ,"//result/doc[2]/str[@name='pre_with_analyzer'][.='document two']" - ); + assertQ( + req("q", "id:(3 4)", "sort", "id asc"), + "//result[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='3']", + "//result/doc[1]/str[@name='pre_with_analyzer'][.='document one']", + "//result/doc[2]/str[@name='id'][.='4']", + "//result/doc[2]/str[@name='pre_with_analyzer'][.='document two']"); assertQ(req("q", "pre_with_analyzer:(+two +three)"), "//result[@numFound='1']"); assertQ(req("q", "pre_with_analyzer:(+eleven +twelve)"), "//result[@numFound='1']"); } @@ -162,13 +173,13 @@ public void testIndexAndQueryWithSchemaAnalyzer() { public void testIndexAndQueryWithSchemaQueryAnalyzer() { assertU(addTwoDocs(5, "pre_with_query_analyzer")); assertU(commit()); - assertQ(req("q", "id:(5 6)", "sort", "id asc") - ,"//result[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][.='5']" - ,"//result/doc[1]/str[@name='pre_with_query_analyzer'][.='document one']" - ,"//result/doc[2]/str[@name='id'][.='6']" - ,"//result/doc[2]/str[@name='pre_with_query_analyzer'][.='document two']" - ); + assertQ( + req("q", "id:(5 6)", "sort", "id asc"), + "//result[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='5']", + "//result/doc[1]/str[@name='pre_with_query_analyzer'][.='document one']", + "//result/doc[2]/str[@name='id'][.='6']", + "//result/doc[2]/str[@name='pre_with_query_analyzer'][.='document two']"); assertQ(req("q", "pre_with_query_analyzer:one,two"), "//result[@numFound='1']"); assertQ(req("q", "pre_with_query_analyzer:eleven,twelve"), "//result[@numFound='1']"); } @@ -176,7 +187,7 @@ public void testIndexAndQueryWithSchemaQueryAnalyzer() { @Test public void testInvalidSimple() { PreAnalyzedField paf = new PreAnalyzedField(); - paf.init(h.getCore().getLatestSchema(), Collections.emptyMap()); + paf.init(h.getCore().getLatestSchema(), Collections.emptyMap()); for (String s : invalidSimple) { try { paf.fromString(field, s); @@ -191,7 +202,7 @@ public void testInvalidJson() throws Exception { PreAnalyzedField paf = new PreAnalyzedField(); paf.init(h.getCore().getLatestSchema(), Collections.emptyMap()); Analyzer preAnalyzer = paf.getIndexAnalyzer(); - for (String s: invalidJson) { + for (String s : invalidJson) { TokenStream stream = null; try { stream = preAnalyzer.tokenStream("dummy", s); @@ -215,24 +226,26 @@ public void testInvalidJson() throws Exception { stream.end(); stream.close(); } - - // "1 =test ąćęłńóśźż \u0001=one,i=22,s=123,e=128,p=deadbeef,y=word two,i=1,s=5,e=8,y=word three,i=1,s=20,e=22,y=foobar" - - private static final String jsonValid = "{\"v\":\"1\",\"str\":\"test ąćęłńóśźż\",\"tokens\":[" + - "{\"e\":128,\"i\":22,\"p\":\"DQ4KDQsODg8=\",\"s\":123,\"t\":\"one\",\"y\":\"word\"}," + - "{\"e\":8,\"i\":1,\"s\":5,\"t\":\"two\",\"y\":\"word\"}," + - "{\"e\":22,\"i\":1,\"s\":20,\"t\":\"three\",\"y\":\"foobar\"}" + - "]}"; - + + // "1 =test ąćęłńóśźż \u0001=one,i=22,s=123,e=128,p=deadbeef,y=word two,i=1,s=5,e=8,y=word + // three,i=1,s=20,e=22,y=foobar" + + private static final String jsonValid = + "{\"v\":\"1\",\"str\":\"test ąćęłńóśźż\",\"tokens\":[" + + "{\"e\":128,\"i\":22,\"p\":\"DQ4KDQsODg8=\",\"s\":123,\"t\":\"one\",\"y\":\"word\"}," + + "{\"e\":8,\"i\":1,\"s\":5,\"t\":\"two\",\"y\":\"word\"}," + + "{\"e\":22,\"i\":1,\"s\":20,\"t\":\"three\",\"y\":\"foobar\"}" + + "]}"; + @Test public void testParsers() throws Exception { PreAnalyzedField paf = new PreAnalyzedField(); // use Simple format - HashMap args = new HashMap<>(); + HashMap args = new HashMap<>(); args.put(PreAnalyzedField.PARSER_IMPL, SimplePreAnalyzedParser.class.getName()); paf.init(h.getCore().getLatestSchema(), args); { - Field f = (Field)paf.fromString(field, valid[0]); + Field f = (Field) paf.fromString(field, valid[0]); } // use JSON format @@ -240,11 +253,21 @@ public void testParsers() throws Exception { paf.init(h.getCore().getLatestSchema(), args); expectThrows(Exception.class, () -> paf.fromString(field, valid[0])); - byte[] deadbeef = new byte[]{(byte)0xd, (byte)0xe, (byte)0xa, (byte)0xd, (byte)0xb, (byte)0xe, (byte)0xe, (byte)0xf}; + byte[] deadbeef = + new byte[] { + (byte) 0xd, + (byte) 0xe, + (byte) 0xa, + (byte) 0xd, + (byte) 0xb, + (byte) 0xe, + (byte) 0xe, + (byte) 0xf + }; PreAnalyzedParser parser = new JsonPreAnalyzedParser(); { - Field f = (Field)paf.fromString(field, jsonValid); + Field f = (Field) paf.fromString(field, jsonValid); assertEquals(jsonValid, parser.toFormattedString(f)); } } diff --git a/solr/core/src/test/org/apache/solr/schema/PrimitiveFieldTypeTest.java b/solr/core/src/test/org/apache/solr/schema/PrimitiveFieldTypeTest.java index 17c0c29907c..810a695d0ca 100644 --- a/solr/core/src/test/org/apache/solr/schema/PrimitiveFieldTypeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/PrimitiveFieldTypeTest.java @@ -21,22 +21,20 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.core.SolrConfig; import org.junit.Test; -/** - * Tests that defaults are set for Primitive (non-analyzed) fields - */ +/** Tests that defaults are set for Primitive (non-analyzed) fields */ public class PrimitiveFieldTypeTest extends SolrTestCaseJ4 { - private final String testConfHome = TEST_HOME() + File.separator + "collection1" + File.separator + "conf"+ File.separator; + private final String testConfHome = + TEST_HOME() + File.separator + "collection1" + File.separator + "conf" + File.separator; protected SolrConfig config; protected IndexSchema schema; - protected HashMap initMap; - + protected HashMap initMap; + @Override - public void setUp() throws Exception { + public void setUp() throws Exception { super.setUp(); // set some system properties for use by tests System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ @@ -46,7 +44,7 @@ public void setUp() throws Exception { initMap = new HashMap<>(); config = new SolrConfig(TEST_PATH().resolve("collection1"), testConfHome + "solrconfig.xml"); } - + @Override public void tearDown() throws Exception { super.tearDown(); @@ -55,29 +53,29 @@ public void tearDown() throws Exception { @SuppressWarnings("deprecation") @Test public void testDefaultOmitNorms() throws Exception { - - final List> types - = Arrays.asList(TrieDateField.class, DatePointField.class, - TrieIntField.class, IntPointField.class, - TrieLongField.class, IntPointField.class, - TrieFloatField.class, FloatPointField.class, - TrieDoubleField.class, DoublePointField.class, - StrField.class, BoolField.class, - // Non-prims, omitNorms always defaults to false regardless of schema version... - TextField.class, BinaryField.class); - + + final List> types = + Arrays.asList( + TrieDateField.class, DatePointField.class, + TrieIntField.class, IntPointField.class, + TrieLongField.class, IntPointField.class, + TrieFloatField.class, FloatPointField.class, + TrieDoubleField.class, DoublePointField.class, + StrField.class, BoolField.class, + // Non-prims, omitNorms always defaults to false regardless of schema version... + TextField.class, BinaryField.class); + // *********************** // With schema version 1.4: // *********************** schema = IndexSchemaFactory.buildIndexSchema(testConfHome + "schema12.xml", config); - for (Class clazz : types) { FieldType ft = clazz.getConstructor().newInstance(); ft.init(schema, initMap); assertFalse(ft.getClass().getName(), ft.hasProperty(FieldType.OMIT_NORMS)); } - + // *********************** // With schema version 1.5 // *********************** @@ -86,16 +84,16 @@ public void testDefaultOmitNorms() throws Exception { for (Class clazz : types) { FieldType ft = clazz.getConstructor().newInstance(); ft.init(schema, initMap); - assertEquals(ft.getClass().getName(), - ft instanceof PrimitiveFieldType, - ft.hasProperty(FieldType.OMIT_NORMS)); + assertEquals( + ft.getClass().getName(), + ft instanceof PrimitiveFieldType, + ft.hasProperty(FieldType.OMIT_NORMS)); } - } - - public void testDateField() { + + public void testDateField() { schema = IndexSchemaFactory.buildIndexSchema(testConfHome + "schema15.xml", config); - + final TrieDateField tdt = new TrieDateField(); { final Map args = new HashMap<>(); @@ -116,7 +114,7 @@ public void testDateField() { args.put("docValues", "true"); pdt.setArgs(schema, args); } - + for (FieldType ft : Arrays.asList(tdt, pdt)) { assertTrue(ft.getClass().getName(), ft.hasProperty(FieldType.OMIT_NORMS)); assertTrue(ft.getClass().getName(), ft.hasProperty(FieldType.SORT_MISSING_LAST)); diff --git a/solr/core/src/test/org/apache/solr/schema/RankFieldTest.java b/solr/core/src/test/org/apache/solr/schema/RankFieldTest.java index 3763af15db1..f8f38430d99 100644 --- a/solr/core/src/test/org/apache/solr/schema/RankFieldTest.java +++ b/solr/core/src/test/org/apache/solr/schema/RankFieldTest.java @@ -18,9 +18,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; - import javax.xml.xpath.XPathConstants; - import org.apache.lucene.index.LeafReader; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; @@ -31,257 +29,232 @@ import org.junit.Ignore; public class RankFieldTest extends SolrTestCaseJ4 { - + private static final String RANK_1 = "rank_1"; private static final String RANK_2 = "rank_2"; @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-minimal.xml","schema-rank-fields.xml"); + initCore("solrconfig-minimal.xml", "schema-rank-fields.xml"); } - + @Override public void setUp() throws Exception { clearIndex(); assertU(commit()); super.setUp(); } - + public void testInternalFieldName() { - assertEquals("RankField.INTERNAL_RANK_FIELD_NAME changed in an incompatible way", - "_rank_", RankField.INTERNAL_RANK_FIELD_NAME); + assertEquals( + "RankField.INTERNAL_RANK_FIELD_NAME changed in an incompatible way", + "_rank_", + RankField.INTERNAL_RANK_FIELD_NAME); } public void testBasic() { assertNotNull(h.getCore().getLatestSchema().getFieldOrNull(RANK_1)); - assertEquals(RankField.class, h.getCore().getLatestSchema().getField(RANK_1).getType().getClass()); + assertEquals( + RankField.class, h.getCore().getLatestSchema().getField(RANK_1).getType().getClass()); } - + public void testBadFormat() { try (ErrorLogMuter errors = ErrorLogMuter.substring("Expecting float")) { - assertFailedU(adoc( - "id", "1", - RANK_1, "foo" - )); + assertFailedU(adoc("id", "1", RANK_1, "foo")); - assertFailedU(adoc( - "id", "1", - RANK_1, "1.2.3" - )); + assertFailedU(adoc("id", "1", RANK_1, "1.2.3")); assertEquals(2, errors.getCount()); } - + try (ErrorLogMuter errors = ErrorLogMuter.substring("must be finite")) { - assertFailedU(adoc( - "id", "1", - RANK_1, Float.toString(Float.POSITIVE_INFINITY) - )); + assertFailedU(adoc("id", "1", RANK_1, Float.toString(Float.POSITIVE_INFINITY))); - assertFailedU(adoc( - "id", "1", - RANK_1, Float.toString(Float.NEGATIVE_INFINITY) - )); - - assertFailedU(adoc( - "id", "1", - RANK_1, Float.toString(Float.NaN) - )); + assertFailedU(adoc("id", "1", RANK_1, Float.toString(Float.NEGATIVE_INFINITY))); + + assertFailedU(adoc("id", "1", RANK_1, Float.toString(Float.NaN))); assertEquals(3, errors.getCount()); } - + try (ErrorLogMuter errors = ErrorLogMuter.substring("must be a positive")) { - assertFailedU(adoc( - "id", "1", - RANK_1, Float.toString(-0.0f) - )); + assertFailedU(adoc("id", "1", RANK_1, Float.toString(-0.0f))); - assertFailedU(adoc( - "id", "1", - RANK_1, Float.toString(-1f) - )); + assertFailedU(adoc("id", "1", RANK_1, Float.toString(-1f))); - assertFailedU(adoc( - "id", "1", - RANK_1, Float.toString(0.0f) - )); + assertFailedU(adoc("id", "1", RANK_1, Float.toString(0.0f))); assertEquals(3, errors.getCount()); } } - + public void testAddRandom() { - for (int i = 0 ; i < random().nextInt(TEST_NIGHTLY ? 10000 : 100); i++) { - assertU(adoc( - "id", String.valueOf(i), - RANK_1, Float.toString(random().nextFloat()) - )); + for (int i = 0; i < random().nextInt(TEST_NIGHTLY ? 10000 : 100); i++) { + assertU(adoc("id", String.valueOf(i), RANK_1, Float.toString(random().nextFloat()))); } assertU(commit()); } - + public void testSkipEmpty() { - assertU(adoc( - "id", "1", - RANK_1, "" - )); + assertU(adoc("id", "1", RANK_1, "")); } - + public void testBasicAdd() throws IOException { - assertU(adoc( - "id", "testBasicAdd", - RANK_1, "1" - )); + assertU(adoc("id", "testBasicAdd", RANK_1, "1")); assertU(commit()); - //assert that the document made it in + // assert that the document made it in assertQ(req("q", "id:testBasicAdd"), "//*[@numFound='1']"); - h.getCore().withSearcher((searcher) -> { - LeafReader reader = searcher.getIndexReader().getContext().leaves().get(0).reader(); - // assert that the field made it in - assertNotNull(reader.getFieldInfos().fieldInfo(RankField.INTERNAL_RANK_FIELD_NAME)); - // assert that the feature made it in - assertTrue(reader.terms(RankField.INTERNAL_RANK_FIELD_NAME).iterator().seekExact(new BytesRef(RANK_1.getBytes(StandardCharsets.UTF_8)))); - return null; - }); + h.getCore() + .withSearcher( + (searcher) -> { + LeafReader reader = searcher.getIndexReader().getContext().leaves().get(0).reader(); + // assert that the field made it in + assertNotNull(reader.getFieldInfos().fieldInfo(RankField.INTERNAL_RANK_FIELD_NAME)); + // assert that the feature made it in + assertTrue( + reader + .terms(RankField.INTERNAL_RANK_FIELD_NAME) + .iterator() + .seekExact(new BytesRef(RANK_1.getBytes(StandardCharsets.UTF_8)))); + return null; + }); } - + public void testMultipleRankFields() throws IOException { - assertU(adoc( - "id", "testMultiValueAdd", - RANK_1, "1", - RANK_2, "2" - )); + assertU(adoc("id", "testMultiValueAdd", RANK_1, "1", RANK_2, "2")); assertU(commit()); - //assert that the document made it in + // assert that the document made it in assertQ(req("q", "id:testMultiValueAdd"), "//*[@numFound='1']"); - h.getCore().withSearcher((searcher) -> { - LeafReader reader = searcher.getIndexReader().getContext().leaves().get(0).reader(); - // assert that the field made it in - assertNotNull(reader.getFieldInfos().fieldInfo(RankField.INTERNAL_RANK_FIELD_NAME)); - // assert that the features made it in - assertTrue(reader.terms(RankField.INTERNAL_RANK_FIELD_NAME).iterator().seekExact(new BytesRef(RANK_2.getBytes(StandardCharsets.UTF_8)))); - assertTrue(reader.terms(RankField.INTERNAL_RANK_FIELD_NAME).iterator().seekExact(new BytesRef(RANK_1.getBytes(StandardCharsets.UTF_8)))); - return null; - }); + h.getCore() + .withSearcher( + (searcher) -> { + LeafReader reader = searcher.getIndexReader().getContext().leaves().get(0).reader(); + // assert that the field made it in + assertNotNull(reader.getFieldInfos().fieldInfo(RankField.INTERNAL_RANK_FIELD_NAME)); + // assert that the features made it in + assertTrue( + reader + .terms(RankField.INTERNAL_RANK_FIELD_NAME) + .iterator() + .seekExact(new BytesRef(RANK_2.getBytes(StandardCharsets.UTF_8)))); + assertTrue( + reader + .terms(RankField.INTERNAL_RANK_FIELD_NAME) + .iterator() + .seekExact(new BytesRef(RANK_1.getBytes(StandardCharsets.UTF_8)))); + return null; + }); } - + public void testSortFails() throws IOException { - assertU(adoc( - "id", "testSortFails", - RANK_1, "1" - )); + assertU(adoc("id", "testSortFails", RANK_1, "1")); assertU(commit()); - assertQEx("Can't sort on rank field", req( - "q", "id:testSortFails", - "sort", RANK_1 + " desc"), 400); + assertQEx( + "Can't sort on rank field", req("q", "id:testSortFails", "sort", RANK_1 + " desc"), 400); } - + @Ignore("We currently don't fail these kinds of requests with other field types") public void testFacetFails() throws IOException { - assertU(adoc( - "id", "testFacetFails", - RANK_1, "1" - )); + assertU(adoc("id", "testFacetFails", RANK_1, "1")); assertU(commit()); - assertQEx("Can't facet on rank field", req( - "q", "id:testFacetFails", - "facet", "true", - "facet.field", RANK_1), 400); + assertQEx( + "Can't facet on rank field", + req( + "q", "id:testFacetFails", + "facet", "true", + "facet.field", RANK_1), + 400); } - + public void testTermQuery() throws IOException { - assertU(adoc( - "id", "testTermQuery", - RANK_1, "1", - RANK_2, "1" - )); - assertU(adoc( - "id", "testTermQuery2", - RANK_1, "1" - )); + assertU(adoc("id", "testTermQuery", RANK_1, "1", RANK_2, "1")); + assertU(adoc("id", "testTermQuery2", RANK_1, "1")); assertU(commit()); assertQ(req("q", RANK_1 + ":*"), "//*[@numFound='2']"); assertQ(req("q", RANK_1 + ":[* TO *]"), "//*[@numFound='2']"); assertQ(req("q", RANK_2 + ":*"), "//*[@numFound='1']"); assertQ(req("q", RANK_2 + ":[* TO *]"), "//*[@numFound='1']"); - + assertQEx("Term queries not supported", req("q", RANK_1 + ":1"), 400); assertQEx("Range queries not supported", req("q", RANK_1 + ":[1 TO 10]"), 400); } - - + public void testResponseQuery() throws IOException { - assertU(adoc( - "id", "testResponseQuery", - RANK_1, "1" - )); + assertU(adoc("id", "testResponseQuery", RANK_1, "1")); assertU(commit()); // Ignore requests to retrieve rank - assertQ(req("q", RANK_1 + ":*", - "fl", "id," + RANK_1), + assertQ( + req("q", RANK_1 + ":*", "fl", "id," + RANK_1), "//*[@numFound='1']", "count(//result/doc[1]/str)=1"); } - + public void testRankQParserQuery() throws IOException { - assertU(adoc( - "id", "1", - "str_field", "foo", - RANK_1, "1", - RANK_2, "2" - )); - assertU(adoc( - "id", "2", - "str_field", "foo", - RANK_1, "2", - RANK_2, "1" - )); + assertU(adoc("id", "1", "str_field", "foo", RANK_1, "1", RANK_2, "2")); + assertU(adoc("id", "2", "str_field", "foo", RANK_1, "2", RANK_2, "1")); assertU(commit()); - assertQ(req("q", "str_field:foo _query_:{!rank f='" + RANK_1 + "' function='log' scalingFactor='1'}"), + assertQ( + req( + "q", + "str_field:foo _query_:{!rank f='" + RANK_1 + "' function='log' scalingFactor='1'}"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='1']"); - - assertQ(req("q", "str_field:foo _query_:{!rank f='" + RANK_2 + "' function='log' scalingFactor='1'}"), + + assertQ( + req( + "q", + "str_field:foo _query_:{!rank f='" + RANK_2 + "' function='log' scalingFactor='1'}"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']"); - - assertQ(req("q", "foo", - "defType", "dismax", - "qf", "str_field^10", - "bq", "{!rank f='" + RANK_1 + "' function='log' scalingFactor='1'}" - ), + + assertQ( + req( + "q", + "foo", + "defType", + "dismax", + "qf", + "str_field^10", + "bq", + "{!rank f='" + RANK_1 + "' function='log' scalingFactor='1'}"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='1']"); - - assertQ(req("q", "foo", - "defType", "dismax", - "qf", "str_field^10", - "bq", "{!rank f='" + RANK_2 + "' function='log' scalingFactor='1'}" - ), + + assertQ( + req( + "q", + "foo", + "defType", + "dismax", + "qf", + "str_field^10", + "bq", + "{!rank f='" + RANK_2 + "' function='log' scalingFactor='1'}"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']"); } - + public void testScoreChanges() throws Exception { - assertU(adoc( - "id", "1", - "str_field", "foo", - RANK_1, "1" - )); + assertU(adoc("id", "1", "str_field", "foo", RANK_1, "1")); assertU(commit()); - ModifiableSolrParams params = params("q", "foo", - "defType", "dismax", - "qf", "str_field^10", - "fl", "id,score", - "wt", "xml"); - - double scoreBefore = (Double) TestHarness.evaluateXPath(h.query(req(params)), "//result/doc[1]/float[@name='score']", XPathConstants.NUMBER); + ModifiableSolrParams params = + params( + "q", "foo", "defType", "dismax", "qf", "str_field^10", "fl", "id,score", "wt", "xml"); + + double scoreBefore = + (Double) + TestHarness.evaluateXPath( + h.query(req(params)), + "//result/doc[1]/float[@name='score']", + XPathConstants.NUMBER); params.add("bq", "{!rank f='" + RANK_1 + "' function='log' scalingFactor='1'}"); - double scoreAfter = (Double) TestHarness.evaluateXPath(h.query(req(params)), "//result/doc[1]/float[@name='score']", XPathConstants.NUMBER); + double scoreAfter = + (Double) + TestHarness.evaluateXPath( + h.query(req(params)), + "//result/doc[1]/float[@name='score']", + XPathConstants.NUMBER); assertNotEquals("Expecting score to change", scoreBefore, scoreAfter, 0f); - } - } diff --git a/solr/core/src/test/org/apache/solr/schema/RequiredFieldsTest.java b/solr/core/src/test/org/apache/solr/schema/RequiredFieldsTest.java index 6827002c3a0..765a2f47fc9 100644 --- a/solr/core/src/test/org/apache/solr/schema/RequiredFieldsTest.java +++ b/solr/core/src/test/org/apache/solr/schema/RequiredFieldsTest.java @@ -17,26 +17,24 @@ package org.apache.solr.schema; import java.util.Collection; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.core.SolrCore; import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ + +/** */ public class RequiredFieldsTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-required-fields.xml"); + initCore("solrconfig.xml", "schema-required-fields.xml"); } - + @Override public void setUp() throws Exception { super.setUp(); clearIndex(); } - + @Test public void testRequiredFieldsConfig() { SolrCore core = h.getCore(); @@ -44,99 +42,184 @@ public void testRequiredFieldsConfig() { SchemaField uniqueKey = schema.getUniqueKeyField(); // Make sure the uniqueKey is required - assertTrue( uniqueKey.isRequired() ); - assertTrue( schema.getRequiredFields().contains( uniqueKey ) ); - + assertTrue(uniqueKey.isRequired()); + assertTrue(schema.getRequiredFields().contains(uniqueKey)); + // we specified one required field, but all devault valued fields are also required - Collection requiredFields =schema.getRequiredFields(); + Collection requiredFields = schema.getRequiredFields(); int numDefaultFields = schema.getFieldsWithDefaultValue().size(); - assertEquals( numDefaultFields+1+1, requiredFields.size()); // also the uniqueKey + assertEquals(numDefaultFields + 1 + 1, requiredFields.size()); // also the uniqueKey } - + @Test - public void testRequiredFieldsSingleAdd() { - SolrCore core = h.getCore(); + public void testRequiredFieldsSingleAdd() { + SolrCore core = h.getCore(); // Add a single document - assertU("adding document", - adoc("id", "529", "name", "document with id, name, and subject", "field_t", "what's inside?", "subject", "info")); + assertU( + "adding document", + adoc( + "id", + "529", + "name", + "document with id, name, and subject", + "field_t", + "what's inside?", + "subject", + "info")); assertU(commit()); - + // Check it it is in the index - assertQ("should find one", req("id:529") ,"//result[@numFound=1]" ); + assertQ("should find one", req("id:529"), "//result[@numFound=1]"); // Add another document without the required subject field, which // has a configured defaultValue of "Stuff" - assertU("adding a doc without field w/ configured default", - adoc("id", "530", "name", "document with id and name", "field_t", "what's inside?")); + assertU( + "adding a doc without field w/ configured default", + adoc("id", "530", "name", "document with id and name", "field_t", "what's inside?")); assertU(commit()); // Add another document without a subject, which has a default in schema String subjectDefault = core.getLatestSchema().getField("subject").getDefaultValue(); assertNotNull("subject has no default value", subjectDefault); - assertQ("should find one with subject="+subjectDefault, req("id:530 subject:"+subjectDefault) ,"//result[@numFound=1]" ); + assertQ( + "should find one with subject=" + subjectDefault, + req("id:530 subject:" + subjectDefault), + "//result[@numFound=1]"); // Add another document without a required name, which has no default assertNull(core.getLatestSchema().getField("name").getDefaultValue()); ignoreException("missing required field"); - assertFailedU("adding doc without required field", - adoc("id", "531", "subject", "no name document", "field_t", "what's inside?") ); + assertFailedU( + "adding doc without required field", + adoc("id", "531", "subject", "no name document", "field_t", "what's inside?")); resetExceptionIgnores(); assertU(commit()); - + // Check to make sure this submission did not succeed - assertQ("should not find any", req("id:531") ,"//result[@numFound=0]" ); + assertQ("should not find any", req("id:531"), "//result[@numFound=0]"); } - + @Test public void testAddMultipleDocumentsWithErrors() { - //Add three documents at once to make sure the baseline succeeds - assertU("adding 3 documents", - "" +doc("id", "601", "name", "multiad one", "field_t", "what's inside?", "subject", "info") + - doc("id", "602", "name", "multiad two", "field_t", "what's inside?", "subject", "info") + - doc("id", "603", "name", "multiad three", "field_t", "what's inside?", "subject", "info") + - ""); + // Add three documents at once to make sure the baseline succeeds + assertU( + "adding 3 documents", + "" + + doc( + "id", "601", "name", "multiad one", "field_t", "what's inside?", "subject", "info") + + doc( + "id", "602", "name", "multiad two", "field_t", "what's inside?", "subject", "info") + + doc( + "id", + "603", + "name", + "multiad three", + "field_t", + "what's inside?", + "subject", + "info") + + ""); assertU(commit()); // Check that they are in the index - assertQ("should find three", req("name:multiad") ,"//result[@numFound=3]" ); - + assertQ("should find three", req("name:multiad"), "//result[@numFound=3]"); + // Add three documents at once, with the middle one missing a field that has a default - assertU("adding 3 docs, with 2nd one missing a field that has a default value", - "" +doc("id", "601", "name", "nosubject batch one", "field_t", "what's inside?", "subject", "info") + - doc("id", "602", "name", "nosubject batch two", "field_t", "what's inside?") + - doc("id", "603", "name", "nosubject batch three", "field_t", "what's inside?", "subject", "info") + - ""); + assertU( + "adding 3 docs, with 2nd one missing a field that has a default value", + "" + + doc( + "id", + "601", + "name", + "nosubject batch one", + "field_t", + "what's inside?", + "subject", + "info") + + doc("id", "602", "name", "nosubject batch two", "field_t", "what's inside?") + + doc( + "id", + "603", + "name", + "nosubject batch three", + "field_t", + "what's inside?", + "subject", + "info") + + ""); assertU(commit()); - + // Since the missing field had a devault value, // All three should have made it into the index - assertQ("should find three", req("name:nosubject") ,"//result[@numFound=3]" ); - + assertQ("should find three", req("name:nosubject"), "//result[@numFound=3]"); // Add three documents at once, with the middle with a bad field definition, // to establish the baselinie behavior for errors in a multi-ad submission - assertFailedU("adding 3 documents, with 2nd one with undefined field", - "" +doc("id", "801", "name", "baddef batch one", "field_t", "what's inside?", "subject", "info") + - doc("id", "802", "name", "baddef batch two", "missing_field_ignore_exception", "garbage") + - doc("id", "803", "name", "baddef batch three", "field_t", "what's inside?", "subject", "info") + - ""); - assertU(commit()); + assertFailedU( + "adding 3 documents, with 2nd one with undefined field", + "" + + doc( + "id", + "801", + "name", + "baddef batch one", + "field_t", + "what's inside?", + "subject", + "info") + + doc( + "id", + "802", + "name", + "baddef batch two", + "missing_field_ignore_exception", + "garbage") + + doc( + "id", + "803", + "name", + "baddef batch three", + "field_t", + "what's inside?", + "subject", + "info") + + ""); + assertU(commit()); // Check that only docs before the error should be in the index - assertQ("should find one", req("name:baddef") ,"//result[@numFound=1]" ); + assertQ("should find one", req("name:baddef"), "//result[@numFound=1]"); ignoreException("missing required field"); // Add three documents at once, with the middle one missing a required field that has no default - assertFailedU("adding 3 docs, with 2nd one missing required field", - "" +doc("id", "701", "name", "noname batch one", "field_t", "what's inside?", "subject", "info") + - doc("id", "702", "field_t", "what's inside?", "subject", "info") + - doc("id", "703", "name", "noname batch batch three", "field_t", "what's inside?", "subject", "info") + - ""); + assertFailedU( + "adding 3 docs, with 2nd one missing required field", + "" + + doc( + "id", + "701", + "name", + "noname batch one", + "field_t", + "what's inside?", + "subject", + "info") + + doc("id", "702", "field_t", "what's inside?", "subject", "info") + + doc( + "id", + "703", + "name", + "noname batch batch three", + "field_t", + "what's inside?", + "subject", + "info") + + ""); resetExceptionIgnores(); assertU(commit()); // Check that only docs before the error should be in the index - assertQ("should find one", req("name:noname") ,"//result[@numFound=1]" ); - } + assertQ("should find one", req("name:noname"), "//result[@numFound=1]"); + } } diff --git a/solr/core/src/test/org/apache/solr/schema/ResolveAnalyzerByNameTest.java b/solr/core/src/test/org/apache/solr/schema/ResolveAnalyzerByNameTest.java index e0ee0baf516..b64a098b82f 100644 --- a/solr/core/src/test/org/apache/solr/schema/ResolveAnalyzerByNameTest.java +++ b/solr/core/src/test/org/apache/solr/schema/ResolveAnalyzerByNameTest.java @@ -17,7 +17,6 @@ package org.apache.solr.schema; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.SimpleOrderedMap; @@ -26,11 +25,9 @@ import org.junit.Test; /** - * This is a simple test to make sure the schema loads when - * provided analyzers resolve tokenizer/tokenfilter/charfilter factories by (SPI) name. - * + * This is a simple test to make sure the schema loads when provided analyzers resolve + * tokenizer/tokenfilter/charfilter factories by (SPI) name. */ - public class ResolveAnalyzerByNameTest extends SolrTestCaseJ4 { @BeforeClass @@ -42,11 +39,11 @@ public static void beforeTests() throws Exception { public void testSchemaLoadingSimpleAnalyzer() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - assertTrue( schema.getFieldTypes().containsKey("text_ws") ); + assertTrue(schema.getFieldTypes().containsKey("text_ws")); @SuppressWarnings({"unchecked"}) SimpleOrderedMap analyzerProps = - (SimpleOrderedMap)schema.getFieldTypeByName("text_ws") - .getNamedPropertyValues(true).get("analyzer"); + (SimpleOrderedMap) + schema.getFieldTypeByName("text_ws").getNamedPropertyValues(true).get("analyzer"); checkTokenizerName(analyzerProps, "whitespace"); assertNotNull(schema.getFieldTypeByName("text_ws").getIndexAnalyzer()); @@ -57,21 +54,41 @@ public void testSchemaLoadingSimpleAnalyzer() { public void testSchemaLoadingComplexAnalyzer() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - assertTrue( schema.getFieldTypes().containsKey("text") ); + assertTrue(schema.getFieldTypes().containsKey("text")); @SuppressWarnings({"unchecked"}) SimpleOrderedMap indexAnalyzerProps = - (SimpleOrderedMap)schema.getFieldTypeByName("text") - .getNamedPropertyValues(true).get("indexAnalyzer"); + (SimpleOrderedMap) + schema.getFieldTypeByName("text").getNamedPropertyValues(true).get("indexAnalyzer"); checkTokenizerName(indexAnalyzerProps, "whitespace"); - checkTokenFilterNames(indexAnalyzerProps, new String[]{"stop", "wordDelimiterGraph", "lowercase", "keywordMarker", "porterStem", "removeDuplicates", "flattenGraph"}); + checkTokenFilterNames( + indexAnalyzerProps, + new String[] { + "stop", + "wordDelimiterGraph", + "lowercase", + "keywordMarker", + "porterStem", + "removeDuplicates", + "flattenGraph" + }); @SuppressWarnings({"unchecked"}) SimpleOrderedMap queryAnalyzerProps = - (SimpleOrderedMap)schema.getFieldTypeByName("text") - .getNamedPropertyValues(true).get("queryAnalyzer"); + (SimpleOrderedMap) + schema.getFieldTypeByName("text").getNamedPropertyValues(true).get("queryAnalyzer"); checkTokenizerName(queryAnalyzerProps, "whitespace"); - checkTokenFilterNames(queryAnalyzerProps, new String[]{"synonymGraph", "stop", "wordDelimiterGraph", "lowercase", "keywordMarker", "porterStem", "removeDuplicates"}); + checkTokenFilterNames( + queryAnalyzerProps, + new String[] { + "synonymGraph", + "stop", + "wordDelimiterGraph", + "lowercase", + "keywordMarker", + "porterStem", + "removeDuplicates" + }); assertNotNull(schema.getFieldTypeByName("text").getIndexAnalyzer()); assertNotNull(schema.getFieldTypeByName("text").getQueryAnalyzer()); @@ -81,13 +98,16 @@ public void testSchemaLoadingComplexAnalyzer() { public void testSchemaLoadingAnalyzerWithCharFilters() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - assertTrue( schema.getFieldTypes().containsKey("charfilthtmlmap") ); + assertTrue(schema.getFieldTypes().containsKey("charfilthtmlmap")); @SuppressWarnings({"unchecked"}) SimpleOrderedMap analyzerProps = - (SimpleOrderedMap)schema.getFieldTypeByName("charfilthtmlmap") - .getNamedPropertyValues(true).get("analyzer"); + (SimpleOrderedMap) + schema + .getFieldTypeByName("charfilthtmlmap") + .getNamedPropertyValues(true) + .get("analyzer"); checkTokenizerName(analyzerProps, "whitespace"); - checkCharFilterNames(analyzerProps, new String[]{"htmlStrip", "mapping"}); + checkCharFilterNames(analyzerProps, new String[] {"htmlStrip", "mapping"}); assertNotNull(schema.getFieldTypeByName("charfilthtmlmap").getIndexAnalyzer()); assertNotNull(schema.getFieldTypeByName("charfilthtmlmap").getQueryAnalyzer()); @@ -115,14 +135,16 @@ public void testSchemaLoadingClassAndNameTokenFilter() throws Exception { private void checkTokenizerName(SimpleOrderedMap analyzerProps, String name) { @SuppressWarnings({"unchecked"}) - SimpleOrderedMap tokenizerProps = (SimpleOrderedMap)analyzerProps.get("tokenizer"); + SimpleOrderedMap tokenizerProps = + (SimpleOrderedMap) analyzerProps.get("tokenizer"); assertNull(tokenizerProps.get("class")); assertEquals(name, tokenizerProps.get("name")); } private void checkTokenFilterNames(SimpleOrderedMap analyzerProps, String[] names) { @SuppressWarnings({"unchecked"}) - List> tokenFilterProps = (List>)analyzerProps.get("filters"); + List> tokenFilterProps = + (List>) analyzerProps.get("filters"); assertEquals(names.length, tokenFilterProps.size()); for (int i = 0; i < names.length; i++) { assertNull(tokenFilterProps.get(i).get("class")); @@ -132,7 +154,8 @@ private void checkTokenFilterNames(SimpleOrderedMap analyzerProps, Strin private void checkCharFilterNames(SimpleOrderedMap analyzerProps, String[] names) { @SuppressWarnings({"unchecked"}) - List> charFilterProps = (List>)analyzerProps.get("charFilters"); + List> charFilterProps = + (List>) analyzerProps.get("charFilters"); assertEquals(names.length, charFilterProps.size()); for (int i = 0; i < names.length; i++) { assertNull(charFilterProps.get(i).get("class")); diff --git a/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java b/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java index 163432626ba..690df76c130 100644 --- a/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java +++ b/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java @@ -19,7 +19,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.solr.client.solrj.impl.BaseHttpSolrClient; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -40,24 +39,31 @@ public static void setupCluster() throws Exception { configureCluster(1).configure(); CollectionAdminRequest.createCollection(COLLECTION, 2, 1) // _default configset .process(cluster.getSolrClient()); - cluster.getSolrClient().waitForState(COLLECTION, DEFAULT_TIMEOUT, TimeUnit.SECONDS, - (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); + cluster + .getSolrClient() + .waitForState( + COLLECTION, + DEFAULT_TIMEOUT, + TimeUnit.SECONDS, + (n, c) -> DocCollection.isFullyActive(n, c, 2, 1)); } @Test - // commented 4-Sep-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018 public void testAddTheSameFieldTwice() throws Exception { CloudSolrClient client = cluster.getSolrClient(); - SchemaRequest.Update fieldAddition = new SchemaRequest.AddField - (Map.of("name","myfield", "type","string")); + SchemaRequest.Update fieldAddition = + new SchemaRequest.AddField(Map.of("name", "myfield", "type", "string")); SchemaResponse.UpdateResponse updateResponse = fieldAddition.process(client, COLLECTION); - BaseHttpSolrClient.RemoteExecutionException ex = expectThrows(BaseHttpSolrClient.RemoteExecutionException.class, - () -> fieldAddition.process(client, COLLECTION)); - - assertTrue("expected error message 'Field 'myfield' already exists'.",Utils.getObjectByPath(ex.getMetaData(), false, "error/details[0]/errorMessages[0]").toString().contains("Field 'myfield' already exists.") ); + BaseHttpSolrClient.RemoteExecutionException ex = + expectThrows( + BaseHttpSolrClient.RemoteExecutionException.class, + () -> fieldAddition.process(client, COLLECTION)); + assertTrue( + "expected error message 'Field 'myfield' already exists'.", + Utils.getObjectByPath(ex.getMetaData(), false, "error/details[0]/errorMessages[0]") + .toString() + .contains("Field 'myfield' already exists.")); } - - } diff --git a/solr/core/src/test/org/apache/solr/schema/SchemaVersionSpecificBehaviorTest.java b/solr/core/src/test/org/apache/solr/schema/SchemaVersionSpecificBehaviorTest.java index 67a629179e7..76c60f583ef 100644 --- a/solr/core/src/test/org/apache/solr/schema/SchemaVersionSpecificBehaviorTest.java +++ b/solr/core/src/test/org/apache/solr/schema/SchemaVersionSpecificBehaviorTest.java @@ -18,217 +18,254 @@ import org.apache.solr.SolrTestCaseJ4; - public class SchemaVersionSpecificBehaviorTest extends SolrTestCaseJ4 { public void testVersionBehavior() throws Exception { - for (float v : new float[] { 1.0F, 1.1F, 1.2F, 1.3F, 1.4F, 1.5F, 1.6F }) { + for (float v : new float[] {1.0F, 1.1F, 1.2F, 1.3F, 1.4F, 1.5F, 1.6F}) { try { final IndexSchema schema = initCoreUsingSchemaVersion(v); final String ver = String.valueOf(v); // check defaults for fields where neither the field nor the field type // have any properties set on them - for (String f : new String[] { "text", "xx_dyn_text", - "bool", "xx_dyn_bool", - "str", "xx_dyn_str", - "int", "xx_dyn_int"}) { + for (String f : + new String[] { + "text", "xx_dyn_text", + "bool", "xx_dyn_bool", + "str", "xx_dyn_str", + "int", "xx_dyn_int" + }) { SchemaField field = schema.getField(f); // 1.1: multiValued default changed - assertEquals(f + " field's multiValued is wrong for ver=" + ver, - (v < 1.1F), field.multiValued()); + assertEquals( + f + " field's multiValued is wrong for ver=" + ver, (v < 1.1F), field.multiValued()); - // 1.2: omitTermFreqAndPositions default changed + // 1.2: omitTermFreqAndPositions default changed // to true for non TextField - assertEquals(f + " field's type has wrong omitTfP for ver=" + ver, - ( v < 1.2F ? false : - ! (field.getType() instanceof TextField)), - field.omitTermFreqAndPositions()); + assertEquals( + f + " field's type has wrong omitTfP for ver=" + ver, + (v < 1.2F ? false : !(field.getType() instanceof TextField)), + field.omitTermFreqAndPositions()); // 1.4: autoGeneratePhraseQueries default changed to false if (field.getType() instanceof TextField) { TextField ft = (TextField) field.getType(); - assertEquals(f + " field's autoPhrase is wrong for ver=" + ver, - (v < 1.4F), ft.getAutoGeneratePhraseQueries()); + assertEquals( + f + " field's autoPhrase is wrong for ver=" + ver, + (v < 1.4F), + ft.getAutoGeneratePhraseQueries()); } // 1.5: omitNorms default changed to true for non TextField - assertEquals(f + " field's type has wrong omitNorm for ver=" + ver, - ( v < 1.5F ? false : - ! (field.getType() instanceof TextField)), - field.omitNorms()); - + assertEquals( + f + " field's type has wrong omitNorm for ver=" + ver, + (v < 1.5F ? false : !(field.getType() instanceof TextField)), + field.omitNorms()); + // 1.6: useDocValuesAsStored defaults to true - assertEquals(f + " field's type has wrong useDocValuesAsStored for ver=" + ver, - ( v < 1.6F ? false : true), - field.useDocValuesAsStored()); - + assertEquals( + f + " field's type has wrong useDocValuesAsStored for ver=" + ver, + (v < 1.6F ? false : true), + field.useDocValuesAsStored()); + // uninvertable defaults to true (for now) - assertEquals(f + " field's type has wrong uninvertable for ver=" + ver, - true, - field.isUninvertible()); + assertEquals( + f + " field's type has wrong uninvertable for ver=" + ver, + true, + field.isUninvertible()); } - // regardless of version, explicit multiValued values on field or type + // regardless of version, explicit multiValued values on field or type // should be correct - for (String f : new String[] { "multi_f", "multi_t", - "ft_multi_f", "ft_multi_t", - "xx_dyn_str_multi_f", - "xx_dyn_str_multi_t", - "xx_dyn_str_ft_multi_f", - "xx_dyn_str_ft_multi_t" }) { + for (String f : + new String[] { + "multi_f", + "multi_t", + "ft_multi_f", + "ft_multi_t", + "xx_dyn_str_multi_f", + "xx_dyn_str_multi_t", + "xx_dyn_str_ft_multi_f", + "xx_dyn_str_ft_multi_t" + }) { boolean expected = f.endsWith("multi_t"); SchemaField field = schema.getField(f); - assertEquals(f + " field's multiValued is wrong for ver=" + ver, - expected, field.multiValued()); + assertEquals( + f + " field's multiValued is wrong for ver=" + ver, expected, field.multiValued()); FieldType ft = field.getType(); if (f.contains("ft_multi")) { // sanity check that we really are inheriting from fieldtype - assertEquals(f + " field's multiValued doesn't match type for ver=" + ver, - expected, ft.isMultiValued()); + assertEquals( + f + " field's multiValued doesn't match type for ver=" + ver, + expected, + ft.isMultiValued()); } else { // for fields where the property is explicit, make sure // we aren't getting a false negative because someone changed the // schema and we're inheriting from fieldType - assertEquals(f + " field's type has wrong multiValued is wrong for ver=" + ver, - (v < 1.1F), ft.isMultiValued()); - + assertEquals( + f + " field's type has wrong multiValued is wrong for ver=" + ver, + (v < 1.1F), + ft.isMultiValued()); } } - - // regardless of version, explicit useDocValuesAsStored values on field or type + + // regardless of version, explicit useDocValuesAsStored values on field or type // should be correct - for (String f : new String[] { "ft_intdvas_f", "ft_intdvas_t", - "intdvas_f", "intdvas_t", - "xx_dyn_ft_intdvas_f", "xx_dyn_ft_intdvas_f", - "xx_dyn_intdvas_f", "xx_dyn_intdvas_f"}) { + for (String f : + new String[] { + "ft_intdvas_f", + "ft_intdvas_t", + "intdvas_f", + "intdvas_t", + "xx_dyn_ft_intdvas_f", + "xx_dyn_ft_intdvas_f", + "xx_dyn_intdvas_f", + "xx_dyn_intdvas_f" + }) { boolean expected = f.endsWith("dvas_t"); SchemaField field = schema.getField(f); - assertEquals(f + " field's useDocValuesAsStored is wrong for ver=" + ver, - expected, field.useDocValuesAsStored()); + assertEquals( + f + " field's useDocValuesAsStored is wrong for ver=" + ver, + expected, + field.useDocValuesAsStored()); FieldType ft = field.getType(); if (f.contains("ft_")) { // sanity check that we really are inheriting from fieldtype - assertEquals(f + " field's omitTfP doesn't match type for ver=" + ver, - expected, ft.hasProperty(FieldType.USE_DOCVALUES_AS_STORED)); + assertEquals( + f + " field's omitTfP doesn't match type for ver=" + ver, + expected, + ft.hasProperty(FieldType.USE_DOCVALUES_AS_STORED)); } else { // for fields where the property is explicit, make sure // we aren't getting a false negative because someone changed the // schema and we're inheriting from fieldType - assertEquals(f + " field's type has wrong useDocValuesAsStored for ver=" + ver, - ( v < 1.6F ? false : true), - ft.hasProperty(FieldType.USE_DOCVALUES_AS_STORED)); - + assertEquals( + f + " field's type has wrong useDocValuesAsStored for ver=" + ver, + (v < 1.6F ? false : true), + ft.hasProperty(FieldType.USE_DOCVALUES_AS_STORED)); } } - // regardless of version, explicit omitTfP values on field or type + // regardless of version, explicit omitTfP values on field or type // should be correct - for (String f : new String[] { "strTfP_f", "strTfP_t", - "txtTfP_f", "txtTfP_t", - "ft_strTfP_f", "ft_strTfP_t", - "ft_txtTfP_f", "ft_txtTfP_t", - "xx_dyn_strTfP_f", "xx_dyn_strTfP_t", - "xx_dyn_txtTfP_f", "xx_dyn_txtTfP_t", - "xx_dyn_ft_strTfP_f", "xx_dyn_ft_strTfP_t", - "xx_dyn_ft_txtTfP_f", "xx_dyn_ft_txtTfP_t" }) { + for (String f : + new String[] { + "strTfP_f", "strTfP_t", + "txtTfP_f", "txtTfP_t", + "ft_strTfP_f", "ft_strTfP_t", + "ft_txtTfP_f", "ft_txtTfP_t", + "xx_dyn_strTfP_f", "xx_dyn_strTfP_t", + "xx_dyn_txtTfP_f", "xx_dyn_txtTfP_t", + "xx_dyn_ft_strTfP_f", "xx_dyn_ft_strTfP_t", + "xx_dyn_ft_txtTfP_f", "xx_dyn_ft_txtTfP_t" + }) { boolean expected = f.endsWith("TfP_t"); SchemaField field = schema.getField(f); - assertEquals(f + " field's omitTfP is wrong for ver=" + ver, - expected, field.omitTermFreqAndPositions()); + assertEquals( + f + " field's omitTfP is wrong for ver=" + ver, + expected, + field.omitTermFreqAndPositions()); FieldType ft = field.getType(); if (f.contains("ft_")) { // sanity check that we really are inheriting from fieldtype - assertEquals(f + " field's omitTfP doesn't match type for ver=" + ver, - expected, ft.hasProperty(FieldType.OMIT_TF_POSITIONS)); + assertEquals( + f + " field's omitTfP doesn't match type for ver=" + ver, + expected, + ft.hasProperty(FieldType.OMIT_TF_POSITIONS)); } else { // for fields where the property is explicit, make sure // we aren't getting a false negative because someone changed the // schema and we're inheriting from fieldType - assertEquals(f + " field's type has wrong omitTfP for ver=" + ver, - ( v < 1.2F ? false : - ! (field.getType() instanceof TextField)), - ft.hasProperty(FieldType.OMIT_TF_POSITIONS)); - + assertEquals( + f + " field's type has wrong omitTfP for ver=" + ver, + (v < 1.2F ? false : !(field.getType() instanceof TextField)), + ft.hasProperty(FieldType.OMIT_TF_POSITIONS)); } } - // regardless of version, explicit autophrase values on type + // regardless of version, explicit autophrase values on type // should be correct - for (String f : new String[] { "ft_txt_phrase_f", "ft_txt_phrase_t", - "xx_dyn_ft_txt_phrase_f", - "xx_dyn_ft_txt_phrase_t" }) { + for (String f : + new String[] { + "ft_txt_phrase_f", + "ft_txt_phrase_t", + "xx_dyn_ft_txt_phrase_f", + "xx_dyn_ft_txt_phrase_t" + }) { boolean expected = f.endsWith("phrase_t"); FieldType ft = schema.getFieldType(f); - assertTrue("broken test, assert only valid on text fields: " + f, - ft instanceof TextField); - assertEquals(f + " field's autophrase is wrong for ver=" + ver, - expected, - ((TextField)ft).getAutoGeneratePhraseQueries() ); + assertTrue( + "broken test, assert only valid on text fields: " + f, ft instanceof TextField); + assertEquals( + f + " field's autophrase is wrong for ver=" + ver, + expected, + ((TextField) ft).getAutoGeneratePhraseQueries()); } - - // regardless of version, explicit multiValued values on field or type + + // regardless of version, explicit multiValued values on field or type // should be correct - for (String f : new String[] { "strnorm_f", "strnorm_t", - "txtnorm_f", "txtnorm_t", - "ft_strnorm_f", "ft_strnorm_t", - "ft_txtnorm_f", "ft_txtnorm_t", - "xx_dyn_strnorm_f", "xx_dyn_strnorm_t", - "xx_dyn_txtnorm_f", "xx_dyn_txtnorm_t", - "xx_dyn_ft_strnorm_f", "xx_dyn_ft_strnorm_t", - "xx_dyn_ft_txtnorm_f", "xx_dyn_ft_txtnorm_t" }) { + for (String f : + new String[] { + "strnorm_f", "strnorm_t", + "txtnorm_f", "txtnorm_t", + "ft_strnorm_f", "ft_strnorm_t", + "ft_txtnorm_f", "ft_txtnorm_t", + "xx_dyn_strnorm_f", "xx_dyn_strnorm_t", + "xx_dyn_txtnorm_f", "xx_dyn_txtnorm_t", + "xx_dyn_ft_strnorm_f", "xx_dyn_ft_strnorm_t", + "xx_dyn_ft_txtnorm_f", "xx_dyn_ft_txtnorm_t" + }) { boolean expected = f.endsWith("norm_t"); SchemaField field = schema.getField(f); - assertEquals(f + " field's omitNorm is wrong for ver=" + ver, - expected, field.omitNorms()); + assertEquals( + f + " field's omitNorm is wrong for ver=" + ver, expected, field.omitNorms()); FieldType ft = field.getType(); if (f.contains("ft_")) { // sanity check that we really are inheriting from fieldtype - assertEquals(f + " field's omitNorm doesn't match type for ver=" + ver, - expected, ft.hasProperty(FieldType.OMIT_NORMS)); + assertEquals( + f + " field's omitNorm doesn't match type for ver=" + ver, + expected, + ft.hasProperty(FieldType.OMIT_NORMS)); } else { // for fields where the property is explicit, make sure // we aren't getting a false negative because someone changed the // schema and we're inheriting from fieldType - assertEquals(f + " field's type has wrong omitNorm for ver=" + ver, - ( v < 1.5F ? false : - ! (field.getType() instanceof TextField)), - ft.hasProperty(FieldType.OMIT_NORMS)); - + assertEquals( + f + " field's type has wrong omitNorm for ver=" + ver, + (v < 1.5F ? false : !(field.getType() instanceof TextField)), + ft.hasProperty(FieldType.OMIT_NORMS)); } } - + } finally { deleteCore(); } } } - public IndexSchema initCoreUsingSchemaVersion(final float ver) - throws Exception { + public IndexSchema initCoreUsingSchemaVersion(final float ver) throws Exception { try { System.setProperty("solr.schema.test.ver", String.valueOf(ver)); - initCore( "solrconfig-basic.xml", "schema-behavior.xml" ); + initCore("solrconfig-basic.xml", "schema-behavior.xml"); IndexSchema s = h.getCore().getLatestSchema(); - assertEquals("Schema version not set correctly", - String.valueOf(ver), - String.valueOf(s.getVersion())); + assertEquals( + "Schema version not set correctly", String.valueOf(ver), String.valueOf(s.getVersion())); return s; } finally { System.clearProperty("solr.schema.test.ver"); } } - } diff --git a/solr/core/src/test/org/apache/solr/schema/SchemaWatcherTest.java b/solr/core/src/test/org/apache/solr/schema/SchemaWatcherTest.java index 729ec53c601..98fd661f3d9 100644 --- a/solr/core/src/test/org/apache/solr/schema/SchemaWatcherTest.java +++ b/solr/core/src/test/org/apache/solr/schema/SchemaWatcherTest.java @@ -17,6 +17,10 @@ package org.apache.solr.schema; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; + import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.schema.ZkIndexSchemaReader.SchemaWatcher; import org.apache.zookeeper.WatchedEvent; @@ -25,10 +29,6 @@ import org.junit.Before; import org.junit.Test; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; - public class SchemaWatcherTest { private ZkIndexSchemaReader mockSchemaReader; @@ -37,14 +37,15 @@ public class SchemaWatcherTest { @Before public void setUp() throws Exception { SolrTestCaseJ4.assumeWorkingMockito(); - + mockSchemaReader = mock(ZkIndexSchemaReader.class); schemaWatcher = new SchemaWatcher(mockSchemaReader); } @Test public void testProcess() throws Exception { - schemaWatcher.process(new WatchedEvent(EventType.NodeDataChanged, KeeperState.SyncConnected, "/test")); + schemaWatcher.process( + new WatchedEvent(EventType.NodeDataChanged, KeeperState.SyncConnected, "/test")); verify(mockSchemaReader).updateSchema(schemaWatcher, -1); } @@ -52,7 +53,8 @@ public void testProcess() throws Exception { public void testDiscardReaderReference() throws Exception { schemaWatcher.discardReaderReference(); - schemaWatcher.process(new WatchedEvent(EventType.NodeDataChanged, KeeperState.SyncConnected, "/test")); + schemaWatcher.process( + new WatchedEvent(EventType.NodeDataChanged, KeeperState.SyncConnected, "/test")); // after discardReaderReference, SchemaWatcher should no longer hold a ref to the reader verifyZeroInteractions(mockSchemaReader); } diff --git a/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java index 02e6d20861b..103dcd7371f 100644 --- a/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java +++ b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java @@ -15,11 +15,11 @@ * limitations under the License. */ package org.apache.solr.schema; + import java.io.File; import java.nio.file.Files; import java.util.HashMap; import java.util.Map; - import org.apache.commons.io.FileUtils; import org.apache.solr.common.SolrException; import org.apache.solr.core.AbstractBadConfigTestBase; @@ -40,11 +40,15 @@ private void initManagedSchemaCore() throws Exception { tmpSolrHome = createTempDir().toFile(); tmpConfDir = new File(tmpSolrHome, confDir); File testHomeConfDir = new File(TEST_HOME(), confDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-managed-schema.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig-managed-schema.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-basic.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-one-field-no-dynamic-field.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-one-field-no-dynamic-field-unique-key.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-one-field-no-dynamic-field.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-one-field-no-dynamic-field-unique-key.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-minimal.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema_codec.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-bm25.xml"), tmpConfDir); @@ -76,17 +80,28 @@ public void testDistanceUnitsDegrees() throws Exception { assertU(commit()); String q; - q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']"); - - q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']"); - - q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']"); - - q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']"); + q = "geo:{!geofilt score=distance filter=false sfield=geo pt=" + QUERY_COORDINATES + " d=180}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_DEGREES + "']"); + + q = "geo:{!geofilt score=degrees filter=false sfield=geo pt=" + QUERY_COORDINATES + " d=180}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_DEGREES + "']"); + + q = + "geo:{!geofilt score=kilometers filter=false sfield=geo pt=" + + QUERY_COORDINATES + + " d=180}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_KILOMETERS + "']"); + + q = "geo:{!geofilt score=miles filter=false sfield=geo pt=" + QUERY_COORDINATES + " d=180}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_MILES + "']"); } public void testDistanceUnitsKilometers() throws Exception { @@ -96,17 +111,28 @@ public void testDistanceUnitsKilometers() throws Exception { assertU(commit()); String q; - q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']"); - - q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']"); - - q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']"); - - q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}"; - assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']"); + q = "geo:{!geofilt score=distance filter=false sfield=geo pt=" + QUERY_COORDINATES + " d=1000}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_KILOMETERS + "']"); + + q = "geo:{!geofilt score=degrees filter=false sfield=geo pt=" + QUERY_COORDINATES + " d=1000}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_DEGREES + "']"); + + q = + "geo:{!geofilt score=kilometers filter=false sfield=geo pt=" + + QUERY_COORDINATES + + " d=1000}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_KILOMETERS + "']"); + + q = "geo:{!geofilt score=miles filter=false sfield=geo pt=" + QUERY_COORDINATES + " d=1000}"; + assertQ( + req("q", q, "fl", "*,score"), + "//result/doc/float[@name='score'][.='" + DISTANCE_MILES + "']"); } public void testJunkValuesForDistanceUnits() throws Exception { @@ -117,18 +143,23 @@ public void testJunkValuesForDistanceUnits() throws Exception { public void testMaxDistErrConversion() throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); String fieldName = "new_text_field"; - assertNull("Field '" + fieldName + "' is present in the schema", + assertNull( + "Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); IndexSchema oldSchema = h.getCore().getLatestSchema(); SpatialRecursivePrefixTreeFieldType rptFieldType = new SpatialRecursivePrefixTreeFieldType(); - Map rptMap = new HashMap(); + Map rptMap = new HashMap(); rptFieldType.setTypeName("location_rpt"); rptMap.put("geo", "true"); @@ -159,13 +190,10 @@ public void testGeoDistanceFunctionWithBackCompat() throws Exception { assertU(commit()); // geodist() should return in km - assertJQ(req("defType","func", - "q","geodist(3,4)", - "sfield","geo", - "fl","score") - , 1e-5 - ,"/response/docs/[0]/score==314.4033" - ); + assertJQ( + req("defType", "func", "q", "geodist(3,4)", "sfield", "geo", "fl", "score"), + 1e-5, + "/response/docs/[0]/score==314.4033"); } public void testGeoDistanceFunctionWithKilometers() throws Exception { @@ -174,13 +202,10 @@ public void testGeoDistanceFunctionWithKilometers() throws Exception { assertU(adoc("str", "X", "geo", "1,2")); assertU(commit()); - assertJQ(req("defType","func", - "q","geodist(3,4)", - "sfield","geo", - "fl","score") - , 1e-5 - ,"/response/docs/[0]/score==314.4033" - ); + assertJQ( + req("defType", "func", "q", "geodist(3,4)", "sfield", "geo", "fl", "score"), + 1e-5, + "/response/docs/[0]/score==314.4033"); } public void testGeoDistanceFunctionWithMiles() throws Exception { @@ -189,21 +214,23 @@ public void testGeoDistanceFunctionWithMiles() throws Exception { assertU(adoc("str", "X", "geo", "1,2")); assertU(commit()); - assertJQ(req("defType","func", - "q","geodist(3,4)", - "sfield","geo", - "fl","score") - , 1e-5 - ,"/response/docs/[0]/score==195.36115" - ); + assertJQ( + req("defType", "func", "q", "geodist(3,4)", "sfield", "geo", "fl", "score"), + 1e-5, + "/response/docs/[0]/score==195.36115"); } public void testShapeToFromStringWKT() throws Exception { - setupRPTField("miles", "true", "WKT", random().nextBoolean() - ? new SpatialRecursivePrefixTreeFieldType() : new RptWithGeometrySpatialField()); + setupRPTField( + "miles", + "true", + "WKT", + random().nextBoolean() + ? new SpatialRecursivePrefixTreeFieldType() + : new RptWithGeometrySpatialField()); - AbstractSpatialFieldType ftype = (AbstractSpatialFieldType) - h.getCore().getLatestSchema().getField("geo").getType(); + AbstractSpatialFieldType ftype = + (AbstractSpatialFieldType) h.getCore().getLatestSchema().getField("geo").getType(); String wkt = "POINT (1 2)"; Shape shape = ftype.parseShape(wkt); @@ -211,17 +238,22 @@ public void testShapeToFromStringWKT() throws Exception { assertEquals(wkt, out); - //assert fails GeoJSON - expectThrows(SolrException.class, () -> ftype.parseShape("{\"type\":\"Point\",\"coordinates\":[1,2]}")); - + // assert fails GeoJSON + expectThrows( + SolrException.class, () -> ftype.parseShape("{\"type\":\"Point\",\"coordinates\":[1,2]}")); } public void testShapeToFromStringGeoJSON() throws Exception { - setupRPTField("miles", "true", "GeoJSON", random().nextBoolean() - ? new SpatialRecursivePrefixTreeFieldType() : new RptWithGeometrySpatialField()); + setupRPTField( + "miles", + "true", + "GeoJSON", + random().nextBoolean() + ? new SpatialRecursivePrefixTreeFieldType() + : new RptWithGeometrySpatialField()); - AbstractSpatialFieldType ftype = (AbstractSpatialFieldType) - h.getCore().getLatestSchema().getField("geo").getType(); + AbstractSpatialFieldType ftype = + (AbstractSpatialFieldType) h.getCore().getLatestSchema().getField("geo").getType(); String json = "{\"type\":\"Point\",\"coordinates\":[1,2]}"; Shape shape = ftype.parseShape(json); @@ -230,15 +262,21 @@ public void testShapeToFromStringGeoJSON() throws Exception { assertEquals(json, out); } - private void setupRPTField(String distanceUnits, String geo, String format, FieldType fieldType) throws Exception { + private void setupRPTField(String distanceUnits, String geo, String format, FieldType fieldType) + throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); String fieldName = "new_text_field"; - assertNull("Field '" + fieldName + "' is present in the schema", + assertNull( + "Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); IndexSchema oldSchema = h.getCore().getLatestSchema(); @@ -246,12 +284,10 @@ private void setupRPTField(String distanceUnits, String geo, String format, Fiel if (fieldType == null) { fieldType = new SpatialRecursivePrefixTreeFieldType(); } - Map rptMap = new HashMap(); - if(distanceUnits!=null) - rptMap.put("distanceUnits", distanceUnits); - if(geo!=null) - rptMap.put("geo", geo); - if(format!=null) { + Map rptMap = new HashMap(); + if (distanceUnits != null) rptMap.put("distanceUnits", distanceUnits); + if (geo != null) rptMap.put("geo", geo); + if (format != null) { rptMap.put("format", format); } if (random().nextBoolean()) { @@ -260,7 +296,14 @@ private void setupRPTField(String distanceUnits, String geo, String format, Fiel } fieldType.init(oldSchema, rptMap); fieldType.setTypeName("location_rpt"); - SchemaField newField = new SchemaField("geo", fieldType, SchemaField.STORED | SchemaField.INDEXED | SchemaField.OMIT_NORMS | SchemaField.OMIT_TF_POSITIONS, + SchemaField newField = + new SchemaField( + "geo", + fieldType, + SchemaField.STORED + | SchemaField.INDEXED + | SchemaField.OMIT_NORMS + | SchemaField.OMIT_TF_POSITIONS, null); IndexSchema newSchema = oldSchema.addField(newField); diff --git a/solr/core/src/test/org/apache/solr/schema/StrFieldAnalyzerTest.java b/solr/core/src/test/org/apache/solr/schema/StrFieldAnalyzerTest.java index b893cfbdd5e..e17b0f9b9c2 100644 --- a/solr/core/src/test/org/apache/solr/schema/StrFieldAnalyzerTest.java +++ b/solr/core/src/test/org/apache/solr/schema/StrFieldAnalyzerTest.java @@ -16,11 +16,10 @@ */ package org.apache.solr.schema; +import java.io.IOException; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; -import java.io.IOException; - public class StrFieldAnalyzerTest extends BaseTokenStreamTestCase { public void testOffsetSanity() throws IOException { Analyzer analyzer = new StrField().getIndexAnalyzer(); diff --git a/solr/core/src/test/org/apache/solr/schema/SynonymTokenizerTest.java b/solr/core/src/test/org/apache/solr/schema/SynonymTokenizerTest.java index 5813b8e7c51..00e330d673d 100644 --- a/solr/core/src/test/org/apache/solr/schema/SynonymTokenizerTest.java +++ b/solr/core/src/test/org/apache/solr/schema/SynonymTokenizerTest.java @@ -22,11 +22,9 @@ import org.junit.Test; /** - * This is a simple test to make sure the schema loads when - * provided a tokenizerFactory that requires a match version - * + * This is a simple test to make sure the schema loads when provided a tokenizerFactory that + * requires a match version */ - public class SynonymTokenizerTest extends SolrTestCaseJ4 { @BeforeClass @@ -38,6 +36,6 @@ public static void beforeTests() throws Exception { public void testSchemaLoading() { SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); - assertTrue( schema.getFieldTypes().containsKey("text_synonyms") ); + assertTrue(schema.getFieldTypes().containsKey("text_synonyms")); } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java b/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java index 1ad7765c0fa..97d5437ccba 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java +++ b/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java @@ -16,6 +16,14 @@ */ package org.apache.solr.schema; +import java.io.File; +import java.io.OutputStreamWriter; +import java.io.Writer; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.List; +import java.util.Properties; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrJettyTestBase; import org.apache.solr.SolrTestCaseJ4; @@ -29,15 +37,6 @@ import org.apache.solr.common.SolrInputDocument; import org.junit.BeforeClass; -import java.io.File; -import java.io.OutputStreamWriter; -import java.io.Writer; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.util.List; -import java.util.Properties; - @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class TestBinaryField extends SolrJettyTestBase { @@ -54,17 +53,22 @@ public static void beforeTest() throws Exception { dataDir.mkdirs(); confDir.mkdirs(); - FileUtils.copyFile(new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(homeDir, "solr.xml")); + FileUtils.copyFile( + new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"), new File(homeDir, "solr.xml")); String src_dir = TEST_HOME() + "/collection1/conf"; - FileUtils.copyFile(new File(src_dir, "schema-binaryfield.xml"), - new File(confDir, "schema.xml")); - FileUtils.copyFile(new File(src_dir, "solrconfig-basic.xml"), - new File(confDir, "solrconfig.xml")); - FileUtils.copyFile(new File(src_dir, "solrconfig.snippet.randomindexconfig.xml"), - new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); - - try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), StandardCharsets.UTF_8)) { + FileUtils.copyFile( + new File(src_dir, "schema-binaryfield.xml"), new File(confDir, "schema.xml")); + FileUtils.copyFile( + new File(src_dir, "solrconfig-basic.xml"), new File(confDir, "solrconfig.xml")); + FileUtils.copyFile( + new File(src_dir, "solrconfig.snippet.randomindexconfig.xml"), + new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); + + try (Writer w = + new OutputStreamWriter( + Files.newOutputStream(collDir.toPath().resolve("core.properties")), + StandardCharsets.UTF_8)) { Properties coreProps = new Properties(); coreProps.put("name", "collection1"); coreProps.store(w, ""); @@ -73,7 +77,6 @@ public static void beforeTest() throws Exception { createAndStartJetty(homeDir.getAbsolutePath()); } - public void testSimple() throws Exception { try (SolrClient client = getSolrClient()) { byte[] buf = new byte[10]; @@ -121,16 +124,13 @@ public void testSimple() throws Exception { assertEquals((byte) (i + 4), b); } - } else if (id == 3) { assertEquals(10, data.length); for (int i = 0; i < data.length; i++) { byte b = data[i]; assertEquals((byte) i, b); } - } - } for (Bean d : beans) { Integer id = Integer.parseInt(d.id); @@ -149,25 +149,19 @@ public void testSimple() throws Exception { assertEquals((byte) (i + 4), b); } - } else if (id == 3) { assertEquals(10, data.length); for (int i = 0; i < data.length; i++) { byte b = data[i]; assertEquals((byte) i, b); } - } - } } - - } - public static class Bean{ - @Field - String id; - @Field - byte [] data; } + public static class Bean { + @Field String id; + @Field byte[] data; + } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java index df196d759d0..7aaed143e3e 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java +++ b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java @@ -16,6 +16,9 @@ */ package org.apache.solr.schema; +import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getObj; +import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getSourceCopyFields; + import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -25,7 +28,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.AbstractFullDistribZkTestBase; import org.apache.solr.common.util.StrUtils; @@ -36,10 +38,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getObj; -import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getSourceCopyFields; - -public class TestBulkSchemaConcurrent extends AbstractFullDistribZkTestBase { +public class TestBulkSchemaConcurrent extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass @@ -62,23 +61,24 @@ public void test() throws Exception { @SuppressWarnings({"rawtypes"}) final List collectErrors = Collections.synchronizedList(new ArrayList<>()); - for (int i = 0 ; i < threadCount ; i++) { + for (int i = 0; i < threadCount; i++) { final int finalI = i; - threads[i] = new Thread(){ - @Override - public void run() { - @SuppressWarnings({"rawtypes"}) - ArrayList errs = new ArrayList(); - collectErrors.add(errs); - try { - invokeBulkAddCall(finalI, errs); - invokeBulkReplaceCall(finalI, errs); - invokeBulkDeleteCall(finalI, errs); - } catch (Exception e) { - e.printStackTrace(); - } - } - }; + threads[i] = + new Thread() { + @Override + public void run() { + @SuppressWarnings({"rawtypes"}) + ArrayList errs = new ArrayList(); + collectErrors.add(errs); + try { + invokeBulkAddCall(finalI, errs); + invokeBulkReplaceCall(finalI, errs); + invokeBulkDeleteCall(finalI, errs); + } catch (Exception e) { + e.printStackTrace(); + } + } + }; threads[i].start(); } @@ -87,8 +87,8 @@ public void run() { boolean success = true; - for (@SuppressWarnings({"rawtypes"})List e : collectErrors) { - if (e != null && !e.isEmpty()) { + for (@SuppressWarnings({"rawtypes"}) List e : collectErrors) { + if (e != null && !e.isEmpty()) { success = false; log.error("{}", e); } @@ -99,32 +99,33 @@ public void run() { @SuppressWarnings({"unchecked"}) private void invokeBulkAddCall(int seed, ArrayList errs) throws Exception { - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'replaceFieldA',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " },\n" + - " 'add-dynamic-field' : {\n" + - " 'name' :'replaceDynamicField',\n" + - " 'type':'string',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " },\n" + - " 'add-copy-field' : {\n" + - " 'source' :'replaceFieldA',\n" + - " 'dest':['replaceDynamicCopyFieldDest']\n" + - " },\n" + - " 'add-field-type' : {\n" + - " 'name' :'myNewFieldTypeName',\n" + - " 'class' : 'solr.StrField',\n" + - " 'sortMissingLast':'true'\n" + - " }\n" + - " }"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'replaceFieldA',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " 'add-dynamic-field' : {\n" + + " 'name' :'replaceDynamicField',\n" + + " 'type':'string',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " },\n" + + " 'add-copy-field' : {\n" + + " 'source' :'replaceFieldA',\n" + + " 'dest':['replaceDynamicCopyFieldDest']\n" + + " },\n" + + " 'add-field-type' : {\n" + + " 'name' :'myNewFieldTypeName',\n" + + " 'class' : 'solr.StrField',\n" + + " 'sortMissingLast':'true'\n" + + " }\n" + + " }"; String aField = "a" + seed; String dynamicFldName = "*_lol" + seed; - String dynamicCopyFldDest = "hello_lol"+seed; + String dynamicCopyFldDest = "hello_lol" + seed; String newFieldTypeName = "mystr" + seed; payload = payload.replace("replaceFieldA", aField); @@ -142,31 +143,36 @@ private void invokeBulkAddCall(int seed, ArrayList errs) throws Exceptio return; } - //get another node + // get another node Set errmessages = new HashSet<>(); RestTestHarness harness = randomRestTestHarness(r); try { long startTime = System.nanoTime(); long maxTimeoutMillis = 100000; - while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { + while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) + < maxTimeoutMillis) { errmessages.clear(); @SuppressWarnings({"rawtypes"}) Map m = getObj(harness, aField, "fields"); if (m == null) errmessages.add(StrUtils.formatString("field {0} not created", aField)); - + m = getObj(harness, dynamicFldName, "dynamicFields"); - if (m == null) errmessages.add(StrUtils.formatString("dynamic field {0} not created", dynamicFldName)); + if (m == null) + errmessages.add(StrUtils.formatString("dynamic field {0} not created", dynamicFldName)); @SuppressWarnings({"rawtypes"}) List l = getSourceCopyFields(harness, aField); if (!checkCopyField(l, aField, dynamicCopyFldDest)) - errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} not created", aField, dynamicCopyFldDest)); - + errmessages.add( + StrUtils.formatString( + "CopyField source={0},dest={1} not created", aField, dynamicCopyFldDest)); + m = getObj(harness, newFieldTypeName, "fieldTypes"); - if (m == null) errmessages.add(StrUtils.formatString("new type {0} not created", newFieldTypeName)); - + if (m == null) + errmessages.add(StrUtils.formatString("new type {0} not created", newFieldTypeName)); + if (errmessages.isEmpty()) break; - + Thread.sleep(10); } } finally { @@ -179,27 +185,28 @@ private void invokeBulkAddCall(int seed, ArrayList errs) throws Exceptio @SuppressWarnings({"unchecked"}) private void invokeBulkReplaceCall(int seed, ArrayList errs) throws Exception { - String payload = "{\n" + - " 'replace-field' : {\n" + - " 'name':'replaceFieldA',\n" + - " 'type': 'text',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " },\n" + - " 'replace-dynamic-field' : {\n" + - " 'name' :'replaceDynamicField',\n" + - " 'type':'text',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " },\n" + - " 'replace-field-type' : {\n" + - " 'name' :'myNewFieldTypeName',\n" + - " 'class' : 'solr.TextField'\n" + - " }\n" + - " }"; + String payload = + "{\n" + + " 'replace-field' : {\n" + + " 'name':'replaceFieldA',\n" + + " 'type': 'text',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " },\n" + + " 'replace-dynamic-field' : {\n" + + " 'name' :'replaceDynamicField',\n" + + " 'type':'text',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " },\n" + + " 'replace-field-type' : {\n" + + " 'name' :'myNewFieldTypeName',\n" + + " 'class' : 'solr.TextField'\n" + + " }\n" + + " }"; String aField = "a" + seed; String dynamicFldName = "*_lol" + seed; - String dynamicCopyFldDest = "hello_lol"+seed; + String dynamicCopyFldDest = "hello_lol" + seed; String newFieldTypeName = "mystr" + seed; payload = payload.replace("replaceFieldA", aField); @@ -216,28 +223,36 @@ private void invokeBulkReplaceCall(int seed, ArrayList errs) throws Exce return; } - //get another node + // get another node Set errmessages = new HashSet<>(); RestTestHarness harness = randomRestTestHarness(r); try { long startTime = System.nanoTime(); long maxTimeoutMillis = 100000; - while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { + while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) + < maxTimeoutMillis) { errmessages.clear(); @SuppressWarnings({"rawtypes"}) Map m = getObj(harness, aField, "fields"); - if (m == null) errmessages.add(StrUtils.formatString("field {0} no longer present", aField)); + if (m == null) + errmessages.add(StrUtils.formatString("field {0} no longer present", aField)); m = getObj(harness, dynamicFldName, "dynamicFields"); - if (m == null) errmessages.add(StrUtils.formatString("dynamic field {0} no longer present", dynamicFldName)); + if (m == null) + errmessages.add( + StrUtils.formatString("dynamic field {0} no longer present", dynamicFldName)); @SuppressWarnings({"rawtypes"}) List l = getSourceCopyFields(harness, aField); if (!checkCopyField(l, aField, dynamicCopyFldDest)) - errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} no longer present", aField, dynamicCopyFldDest)); + errmessages.add( + StrUtils.formatString( + "CopyField source={0},dest={1} no longer present", aField, dynamicCopyFldDest)); m = getObj(harness, newFieldTypeName, "fieldTypes"); - if (m == null) errmessages.add(StrUtils.formatString("new type {0} no longer present", newFieldTypeName)); + if (m == null) + errmessages.add( + StrUtils.formatString("new type {0} no longer present", newFieldTypeName)); if (errmessages.isEmpty()) break; @@ -253,23 +268,24 @@ private void invokeBulkReplaceCall(int seed, ArrayList errs) throws Exce @SuppressWarnings({"unchecked"}) private void invokeBulkDeleteCall(int seed, ArrayList errs) throws Exception { - String payload = "{\n" + - " 'delete-copy-field' : {\n" + - " 'source' :'replaceFieldA',\n" + - " 'dest':['replaceDynamicCopyFieldDest']\n" + - " },\n" + - " 'delete-field' : {'name':'replaceFieldA'},\n" + - " 'delete-dynamic-field' : {'name' :'replaceDynamicField'},\n" + - " 'delete-field-type' : {'name' :'myNewFieldTypeName'}\n" + - " }"; + String payload = + "{\n" + + " 'delete-copy-field' : {\n" + + " 'source' :'replaceFieldA',\n" + + " 'dest':['replaceDynamicCopyFieldDest']\n" + + " },\n" + + " 'delete-field' : {'name':'replaceFieldA'},\n" + + " 'delete-dynamic-field' : {'name' :'replaceDynamicField'},\n" + + " 'delete-field-type' : {'name' :'myNewFieldTypeName'}\n" + + " }"; String aField = "a" + seed; String dynamicFldName = "*_lol" + seed; - String dynamicCopyFldDest = "hello_lol"+seed; + String dynamicCopyFldDest = "hello_lol" + seed; String newFieldTypeName = "mystr" + seed; payload = payload.replace("replaceFieldA", aField); payload = payload.replace("replaceDynamicField", dynamicFldName); - payload = payload.replace("replaceDynamicCopyFieldDest",dynamicCopyFldDest); + payload = payload.replace("replaceDynamicCopyFieldDest", dynamicCopyFldDest); payload = payload.replace("myNewFieldTypeName", newFieldTypeName); RestTestHarness publisher = randomRestTestHarness(r); @@ -282,28 +298,33 @@ private void invokeBulkDeleteCall(int seed, ArrayList errs) throws Excep return; } - //get another node + // get another node Set errmessages = new HashSet<>(); RestTestHarness harness = randomRestTestHarness(r); try { long startTime = System.nanoTime(); long maxTimeoutMillis = 100000; - while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { + while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) + < maxTimeoutMillis) { errmessages.clear(); @SuppressWarnings({"rawtypes"}) Map m = getObj(harness, aField, "fields"); if (m != null) errmessages.add(StrUtils.formatString("field {0} still exists", aField)); m = getObj(harness, dynamicFldName, "dynamicFields"); - if (m != null) errmessages.add(StrUtils.formatString("dynamic field {0} still exists", dynamicFldName)); + if (m != null) + errmessages.add(StrUtils.formatString("dynamic field {0} still exists", dynamicFldName)); @SuppressWarnings({"rawtypes"}) List l = getSourceCopyFields(harness, aField); if (checkCopyField(l, aField, dynamicCopyFldDest)) - errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} still exists", aField, dynamicCopyFldDest)); + errmessages.add( + StrUtils.formatString( + "CopyField source={0},dest={1} still exists", aField, dynamicCopyFldDest)); m = getObj(harness, newFieldTypeName, "fieldTypes"); - if (m != null) errmessages.add(StrUtils.formatString("new type {0} still exists", newFieldTypeName)); + if (m != null) + errmessages.add(StrUtils.formatString("new type {0} still exists", newFieldTypeName)); if (errmessages.isEmpty()) break; @@ -317,11 +338,11 @@ private void invokeBulkDeleteCall(int seed, ArrayList errs) throws Excep } } - private boolean checkCopyField(@SuppressWarnings({"rawtypes"})List l, String src, String dest) { + private boolean checkCopyField( + @SuppressWarnings({"rawtypes"}) List l, String src, String dest) { if (l == null) return false; - for (@SuppressWarnings({"rawtypes"})Map map : l) { - if (src.equals(map.get("source")) && dest.equals(map.get("dest"))) - return true; + for (@SuppressWarnings({"rawtypes"}) Map map : l) { + if (src.equals(map.get("source")) && dest.equals(map.get("dest"))) return true; } return false; } diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java index 54c679d096a..ec131cc0243 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java @@ -15,6 +15,10 @@ * limitations under the License. */ package org.apache.solr.schema; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.QueryRequest; @@ -27,10 +31,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.List; - public class TestCloudManagedSchema extends AbstractFullDistribZkTestBase { public TestCloudManagedSchema() { @@ -55,22 +55,24 @@ public void test() throws Exception { QueryRequest request = new QueryRequest(params); request.setPath("/admin/cores"); int which = r.nextInt(clients.size()); - HttpSolrClient client = (HttpSolrClient)clients.get(which); + HttpSolrClient client = (HttpSolrClient) clients.get(which); String previousBaseURL = client.getBaseURL(); // Strip /collection1 step from baseURL - requests fail otherwise client.setBaseURL(previousBaseURL.substring(0, previousBaseURL.lastIndexOf("/"))); NamedList namedListResponse = client.request(request); - client.setBaseURL(previousBaseURL); // Restore baseURL - NamedList status = (NamedList)namedListResponse.get("status"); - NamedList collectionStatus = (NamedList)status.getVal(0); - String collectionSchema = (String)collectionStatus.get(CoreAdminParams.SCHEMA); + client.setBaseURL(previousBaseURL); // Restore baseURL + NamedList status = (NamedList) namedListResponse.get("status"); + NamedList collectionStatus = (NamedList) status.getVal(0); + String collectionSchema = (String) collectionStatus.get(CoreAdminParams.SCHEMA); // Make sure the upgrade to managed schema happened - assertEquals("Schema resource name differs from expected name", "managed-schema.xml", collectionSchema); + assertEquals( + "Schema resource name differs from expected name", "managed-schema.xml", collectionSchema); SolrZkClient zkClient = new SolrZkClient(zkServer.getZkHost(), 30000); try { // Make sure "DO NOT EDIT" is in the content of the managed schema - String fileContent = getFileContentFromZooKeeper(zkClient, "/solr/configs/conf1/managed-schema.xml"); + String fileContent = + getFileContentFromZooKeeper(zkClient, "/solr/configs/conf1/managed-schema.xml"); assertTrue("Managed schema is missing", fileContent.contains("DO NOT EDIT")); // Make sure the original non-managed schema is no longer in ZooKeeper @@ -85,20 +87,26 @@ public void test() throws Exception { } } } - + private String getFileContentFromZooKeeper(SolrZkClient zkClient, String fileName) throws IOException, SolrServerException, KeeperException, InterruptedException { return (new String(zkClient.getData(fileName, null, null, true), StandardCharsets.UTF_8)); - } - protected final void assertFileNotInZooKeeper(SolrZkClient zkClient, String parent, String fileName) throws Exception { + + protected final void assertFileNotInZooKeeper( + SolrZkClient zkClient, String parent, String fileName) throws Exception { List kids = zkClient.getChildren(parent, null, true); for (String kid : kids) { if (kid.equalsIgnoreCase(fileName)) { - String rawContent = new String(zkClient.getData(fileName, null, null, true), StandardCharsets.UTF_8); - fail("File '" + fileName + "' was unexpectedly found in ZooKeeper. Content starts with '" - + rawContent.substring(0, 100) + " [...]'"); + String rawContent = + new String(zkClient.getData(fileName, null, null, true), StandardCharsets.UTF_8); + fail( + "File '" + + fileName + + "' was unexpectedly found in ZooKeeper. Content starts with '" + + rawContent.substring(0, 100) + + " [...]'"); } } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java index 09b2a6daf7c..f2598d5539a 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.SortedMap; import java.util.TreeMap; - import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -37,14 +36,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * Tests a schemaless collection configuration with SolrCloud - */ +/** Tests a schemaless collection configuration with SolrCloud */ @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class TestCloudSchemaless extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final String SUCCESS_XPATH = "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"; + private static final String SUCCESS_XPATH = + "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"; @After public void teardDown() throws Exception { @@ -63,8 +62,8 @@ protected String getCloudSolrConfig() { } @Override - public SortedMap getExtraServlets() { - final SortedMap extraServlets = new TreeMap<>(); + public SortedMap getExtraServlets() { + final SortedMap extraServlets = new TreeMap<>(); return extraServlets; } @@ -75,7 +74,7 @@ private String[] getExpectedFieldResponses(int numberOfDocs) { for (int i = 0; i < numberOfDocs; ++i) { String newFieldName = "newTestFieldInt" + i; expectedAddFields[1 + i] = - "/response/arr[@name='fields']/lst/str[@name='name'][.='" + newFieldName + "']"; + "/response/arr[@name='fields']/lst/str[@name='name'][.='" + newFieldName + "']"; } return expectedAddFields; } @@ -89,16 +88,21 @@ public void test() throws Exception { // First, add a bunch of documents in a single update with the same new field. // This tests that the replicas properly handle schema additions. - int slices = getCommonCloudSolrClient().getZkStateReader().getClusterState() - .getCollection("collection1").getActiveSlices().size(); + int slices = + getCommonCloudSolrClient() + .getZkStateReader() + .getClusterState() + .getCollection("collection1") + .getActiveSlices() + .size(); int trials = 50; // generate enough docs so that we can expect at least a doc per slice - int numDocsPerTrial = (int)(slices * (Math.log(slices) + 1)); + int numDocsPerTrial = (int) (slices * (Math.log(slices) + 1)); SolrClient randomClient = clients.get(random().nextInt(clients.size())); int docNumber = 0; for (int i = 0; i < trials; ++i) { List docs = new ArrayList<>(); - for (int j =0; j < numDocsPerTrial; ++j) { + for (int j = 0; j < numDocsPerTrial; ++j) { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", Long.toHexString(Double.doubleToLongBits(random().nextDouble()))); doc.addField("newTestFieldInt" + docNumber++, "123"); @@ -110,22 +114,29 @@ public void test() throws Exception { } randomClient.commit(); - String [] expectedFields = getExpectedFieldResponses(docNumber); + String[] expectedFields = getExpectedFieldResponses(docNumber); // Check that all the fields were added - forAllRestTestHarnesses(client -> { - try { - String request = "/schema/fields?wt=xml"; - String response = client.query(request); - String result = BaseTestHarness.validateXPath(response, expectedFields); - if (result != null) { - String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response; - log.error(msg); - fail(msg); - } - } catch (Exception ex) { - fail("Caught exception: "+ex); - } - }); + forAllRestTestHarnesses( + client -> { + try { + String request = "/schema/fields?wt=xml"; + String response = client.query(request); + String result = BaseTestHarness.validateXPath(response, expectedFields); + if (result != null) { + String msg = + "QUERY FAILED: xpath=" + + result + + " request=" + + request + + " response=" + + response; + log.error(msg); + fail(msg); + } + } catch (Exception ex) { + fail("Caught exception: " + ex); + } + }); // Now, let's ensure that writing the same field with two different types fails int failTrials = 50; @@ -139,19 +150,26 @@ public void test() throws Exception { dateDoc.addField("longOrDateField" + i, "1995-12-31T23:59:59Z"); // randomize the order of the docs - List docs = random().nextBoolean()? Arrays.asList(intDoc, dateDoc): Arrays.asList(dateDoc, intDoc); - - SolrException ex = expectThrows(SolrException.class, () -> { - randomClient.add(docs); - randomClient.commit(); - }); + List docs = + random().nextBoolean() ? Arrays.asList(intDoc, dateDoc) : Arrays.asList(dateDoc, intDoc); + + SolrException ex = + expectThrows( + SolrException.class, + () -> { + randomClient.add(docs); + randomClient.commit(); + }); assertEquals(ErrorCode.BAD_REQUEST, ErrorCode.getErrorCode(ex.code())); - ex = expectThrows(SolrException.class, () -> { - CloudSolrClient cloudSolrClient = getCommonCloudSolrClient(); - cloudSolrClient.add(docs); - cloudSolrClient.commit(); - }); + ex = + expectThrows( + SolrException.class, + () -> { + CloudSolrClient cloudSolrClient = getCommonCloudSolrClient(); + cloudSolrClient.add(docs); + cloudSolrClient.commit(); + }); assertEquals(ErrorCode.BAD_REQUEST, ErrorCode.getErrorCode(ex.code())); } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestCollationField.java b/solr/core/src/test/org/apache/solr/schema/TestCollationField.java index f33a902beac..aae742161a4 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCollationField.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCollationField.java @@ -21,22 +21,18 @@ import java.text.Collator; import java.text.RuleBasedCollator; import java.util.Locale; - - import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; -/** - * Tests {@link CollationField} with TermQueries, RangeQueries, and sort order. - */ +/** Tests {@link CollationField} with TermQueries, RangeQueries, and sort order. */ public class TestCollationField extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { String home = setupSolrHome(); - initCore("solrconfig.xml","schema.xml", home); + initCore("solrconfig.xml", "schema.xml", home); // add some docs assertU(adoc("id", "1", "text", "\u0633\u0627\u0628")); assertU(adoc("id", "2", "text", "I WİLL USE TURKİSH CASING")); @@ -52,36 +48,41 @@ public static void beforeClass() throws Exception { assertU(adoc("id", "12", "text", "\u0698\u0698")); assertU(commit()); } - + /** - * Ugly: but what to do? We want to test custom sort, which reads rules in as a resource. - * These are largish files, and jvm-specific (as our documentation says, you should always - * look out for jvm differences with collation). - * So it's preferable to create this file on-the-fly. + * Ugly: but what to do? We want to test custom sort, which reads rules in as a resource. These + * are largish files, and jvm-specific (as our documentation says, you should always look out for + * jvm differences with collation). So it's preferable to create this file on-the-fly. */ public static String setupSolrHome() throws Exception { // make a solr home underneath the test's TEMP_DIR File tmpFile = createTempDir("collation1").toFile(); - + // make data and conf dirs new File(tmpFile, "data").mkdir(); File confDir = new File(tmpFile + "/collection1", "conf"); confDir.mkdirs(); - + // copy over configuration files - FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-basic.xml"), new File(confDir, "solrconfig.xml")); - FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml"), new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); - FileUtils.copyFile(getFile("solr/collection1/conf/schema-collate.xml"), new File(confDir, "schema.xml")); - + FileUtils.copyFile( + getFile("solr/collection1/conf/solrconfig-basic.xml"), new File(confDir, "solrconfig.xml")); + FileUtils.copyFile( + getFile("solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml"), + new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); + FileUtils.copyFile( + getFile("solr/collection1/conf/schema-collate.xml"), new File(confDir, "schema.xml")); + // generate custom collation rules (DIN 5007-2), saving to customrules.dat - RuleBasedCollator baseCollator = (RuleBasedCollator) Collator.getInstance(new Locale("de", "DE")); + RuleBasedCollator baseCollator = + (RuleBasedCollator) Collator.getInstance(new Locale("de", "DE")); String DIN5007_2_tailorings = - "& ae , a\u0308 & AE , A\u0308"+ - "& oe , o\u0308 & OE , O\u0308"+ - "& ue , u\u0308 & UE , u\u0308"; + "& ae , a\u0308 & AE , A\u0308" + + "& oe , o\u0308 & OE , O\u0308" + + "& ue , u\u0308 & UE , u\u0308"; - RuleBasedCollator tailoredCollator = new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings); + RuleBasedCollator tailoredCollator = + new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings); String tailoredRules = tailoredCollator.getRules(); FileOutputStream os = new FileOutputStream(new File(confDir, "customrules.dat")); IOUtils.write(tailoredRules, os, "UTF-8"); @@ -90,106 +91,102 @@ public static String setupSolrHome() throws Exception { return tmpFile.getAbsolutePath(); } - /** - * Test termquery with german DIN 5007-1 primary strength. - * In this case, ö is equivalent to o (but not oe) + /** + * Test termquery with german DIN 5007-1 primary strength. In this case, ö is equivalent to o (but + * not oe) */ public void testBasicTermQuery() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_de:tone", "sort", "id asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=4]", - "//result/doc[2]/str[@name='id'][.=7]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_de:tone", "sort", "id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=7]"); } - - /** - * Test rangequery again with the DIN 5007-1 collator. - * We do a range query of tone .. tp, in binary order this - * would retrieve nothing due to case and accent differences. + + /** + * Test rangequery again with the DIN 5007-1 collator. We do a range query of tone .. tp, in + * binary order this would retrieve nothing due to case and accent differences. */ public void testBasicRangeQuery() { - assertQ("Collated RangeQ: ", - req("fl", "id", "q", "sort_de:[tone TO tp]", "sort", "id asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=4]", - "//result/doc[2]/str[@name='id'][.=7]" - ); + assertQ( + "Collated RangeQ: ", + req("fl", "id", "q", "sort_de:[tone TO tp]", "sort", "id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=7]"); } - - /** - * Test sort with a danish collator. ö is ordered after z - */ + + /** Test sort with a danish collator. ö is ordered after z */ public void testBasicSort() { - assertQ("Collated Sort: ", - req("fl", "id", "q", "sort_da:[tz TO töz]", "sort", "sort_da asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=11]", - "//result/doc[2]/str[@name='id'][.=4]" - ); + assertQ( + "Collated Sort: ", + req("fl", "id", "q", "sort_da:[tz TO töz]", "sort", "sort_da asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=11]", + "//result/doc[2]/str[@name='id'][.=4]"); } - - /** - * Test sort with an arabic collator. U+0633 is ordered after U+0698. - * With a binary collator, the range would also return nothing. + + /** + * Test sort with an arabic collator. U+0633 is ordered after U+0698. With a binary collator, the + * range would also return nothing. */ public void testArabicSort() { - assertQ("Collated Sort: ", - req("fl", "id", "q", "sort_ar:[\u0698 TO \u0633\u0633]", "sort", "sort_ar asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=12]", - "//result/doc[2]/str[@name='id'][.=1]" - ); + assertQ( + "Collated Sort: ", + req("fl", "id", "q", "sort_ar:[\u0698 TO \u0633\u0633]", "sort", "sort_ar asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=12]", + "//result/doc[2]/str[@name='id'][.=1]"); } - /** - * Test rangequery again with an Arabic collator. - * Binary order would normally order U+0633 in this range. + /** + * Test rangequery again with an Arabic collator. Binary order would normally order U+0633 in this + * range. */ public void testNegativeRangeQuery() { - assertQ("Collated RangeQ: ", - req("fl", "id", "q", "sort_ar:[\u062F TO \u0698]", "sort", "id asc" ), - "//*[@numFound='0']" - ); + assertQ( + "Collated RangeQ: ", + req("fl", "id", "q", "sort_ar:[\u062F TO \u0698]", "sort", "id asc"), + "//*[@numFound='0']"); } /** - * Test canonical decomposition with turkish primary strength. - * With this sort order, İ is the uppercase form of i, and I is the uppercase form of ı. - * We index a decomposed form of İ. + * Test canonical decomposition with turkish primary strength. With this sort order, İ is the + * uppercase form of i, and I is the uppercase form of ı. We index a decomposed form of İ. */ public void testCanonicalDecomposition() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_tr_canon:\"I Will Use Turkish Casıng\"", "sort", "id asc" ), - "//*[@numFound='3']", - "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=5]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_tr_canon:\"I Will Use Turkish Casıng\"", "sort", "id asc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=3]", + "//result/doc[3]/str[@name='id'][.=5]"); } - + /** - * Test full decomposition with chinese identical strength. - * The full width form "Testing" is treated identical to "Testing" + * Test full decomposition with chinese identical strength. The full width form "Testing" is + * treated identical to "Testing" */ public void testFullDecomposition() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_zh_full:Testing", "sort", "id asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=6]", - "//result/doc[2]/str[@name='id'][.=8]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_zh_full:Testing", "sort", "id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=6]", + "//result/doc[2]/str[@name='id'][.=8]"); } - - /** - * Test termquery with custom collator (DIN 5007-2). - * In this case, ö is equivalent to oe (but not o) + + /** + * Test termquery with custom collator (DIN 5007-2). In this case, ö is equivalent to oe (but not + * o) */ public void testCustomCollation() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_custom:toene" ), - "//*[@numFound='2']", - "//result/doc/str[@name='id'][.=4]", - "//result/doc/str[@name='id'][.=10]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_custom:toene"), + "//*[@numFound='2']", + "//result/doc/str[@name='id'][.=4]", + "//result/doc/str[@name='id'][.=10]"); } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestCollationFieldDocValues.java b/solr/core/src/test/org/apache/solr/schema/TestCollationFieldDocValues.java index c950f673649..0187f60dfd6 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCollationFieldDocValues.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCollationFieldDocValues.java @@ -21,21 +21,18 @@ import java.text.Collator; import java.text.RuleBasedCollator; import java.util.Locale; - import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; -/** - * Tests {@link CollationField} with docvalues - */ +/** Tests {@link CollationField} with docvalues */ public class TestCollationFieldDocValues extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { String home = setupSolrHome(); - initCore("solrconfig.xml","schema.xml", home); + initCore("solrconfig.xml", "schema.xml", home); // add some docs assertU(adoc("id", "1", "text", "\u0633\u0627\u0628")); assertU(adoc("id", "2", "text", "I WİLL USE TURKİSH CASING")); @@ -51,36 +48,41 @@ public static void beforeClass() throws Exception { assertU(adoc("id", "12", "text", "\u0698\u0698")); assertU(commit()); } - + /** - * Ugly: but what to do? We want to test custom sort, which reads rules in as a resource. - * These are largish files, and jvm-specific (as our documentation says, you should always - * look out for jvm differences with collation). - * So it's preferable to create this file on-the-fly. + * Ugly: but what to do? We want to test custom sort, which reads rules in as a resource. These + * are largish files, and jvm-specific (as our documentation says, you should always look out for + * jvm differences with collation). So it's preferable to create this file on-the-fly. */ public static String setupSolrHome() throws Exception { // make a solr home underneath the test's TEMP_DIR File tmpFile = createTempDir("collation1").toFile(); - + // make data and conf dirs new File(tmpFile, "data").mkdir(); File confDir = new File(tmpFile + "/collection1", "conf"); confDir.mkdirs(); - + // copy over configuration files - FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-basic.xml"), new File(confDir, "solrconfig.xml")); - FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml"), new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); - FileUtils.copyFile(getFile("solr/collection1/conf/schema-collate-dv.xml"), new File(confDir, "schema.xml")); - + FileUtils.copyFile( + getFile("solr/collection1/conf/solrconfig-basic.xml"), new File(confDir, "solrconfig.xml")); + FileUtils.copyFile( + getFile("solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml"), + new File(confDir, "solrconfig.snippet.randomindexconfig.xml")); + FileUtils.copyFile( + getFile("solr/collection1/conf/schema-collate-dv.xml"), new File(confDir, "schema.xml")); + // generate custom collation rules (DIN 5007-2), saving to customrules.dat - RuleBasedCollator baseCollator = (RuleBasedCollator) Collator.getInstance(new Locale("de", "DE")); + RuleBasedCollator baseCollator = + (RuleBasedCollator) Collator.getInstance(new Locale("de", "DE")); String DIN5007_2_tailorings = - "& ae , a\u0308 & AE , A\u0308"+ - "& oe , o\u0308 & OE , O\u0308"+ - "& ue , u\u0308 & UE , u\u0308"; + "& ae , a\u0308 & AE , A\u0308" + + "& oe , o\u0308 & OE , O\u0308" + + "& ue , u\u0308 & UE , u\u0308"; - RuleBasedCollator tailoredCollator = new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings); + RuleBasedCollator tailoredCollator = + new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings); String tailoredRules = tailoredCollator.getRules(); FileOutputStream os = new FileOutputStream(new File(confDir, "customrules.dat")); IOUtils.write(tailoredRules, os, "UTF-8"); @@ -89,106 +91,102 @@ public static String setupSolrHome() throws Exception { return tmpFile.getAbsolutePath(); } - /** - * Test termquery with german DIN 5007-1 primary strength. - * In this case, ö is equivalent to o (but not oe) + /** + * Test termquery with german DIN 5007-1 primary strength. In this case, ö is equivalent to o (but + * not oe) */ public void testBasicTermQuery() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_de:tone", "sort", "id asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=4]", - "//result/doc[2]/str[@name='id'][.=7]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_de:tone", "sort", "id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=7]"); } - - /** - * Test rangequery again with the DIN 5007-1 collator. - * We do a range query of tone .. tp, in binary order this - * would retrieve nothing due to case and accent differences. + + /** + * Test rangequery again with the DIN 5007-1 collator. We do a range query of tone .. tp, in + * binary order this would retrieve nothing due to case and accent differences. */ public void testBasicRangeQuery() { - assertQ("Collated RangeQ: ", - req("fl", "id", "q", "sort_de:[tone TO tp]", "sort", "id asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=4]", - "//result/doc[2]/str[@name='id'][.=7]" - ); + assertQ( + "Collated RangeQ: ", + req("fl", "id", "q", "sort_de:[tone TO tp]", "sort", "id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=7]"); } - - /** - * Test sort with a danish collator. ö is ordered after z - */ + + /** Test sort with a danish collator. ö is ordered after z */ public void testBasicSort() { - assertQ("Collated Sort: ", - req("fl", "id", "q", "sort_da:[tz TO töz]", "sort", "sort_da asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=11]", - "//result/doc[2]/str[@name='id'][.=4]" - ); + assertQ( + "Collated Sort: ", + req("fl", "id", "q", "sort_da:[tz TO töz]", "sort", "sort_da asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=11]", + "//result/doc[2]/str[@name='id'][.=4]"); } - - /** - * Test sort with an arabic collator. U+0633 is ordered after U+0698. - * With a binary collator, the range would also return nothing. + + /** + * Test sort with an arabic collator. U+0633 is ordered after U+0698. With a binary collator, the + * range would also return nothing. */ public void testArabicSort() { - assertQ("Collated Sort: ", - req("fl", "id", "q", "sort_ar:[\u0698 TO \u0633\u0633]", "sort", "sort_ar asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=12]", - "//result/doc[2]/str[@name='id'][.=1]" - ); + assertQ( + "Collated Sort: ", + req("fl", "id", "q", "sort_ar:[\u0698 TO \u0633\u0633]", "sort", "sort_ar asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=12]", + "//result/doc[2]/str[@name='id'][.=1]"); } - /** - * Test rangequery again with an Arabic collator. - * Binary order would normally order U+0633 in this range. + /** + * Test rangequery again with an Arabic collator. Binary order would normally order U+0633 in this + * range. */ public void testNegativeRangeQuery() { - assertQ("Collated RangeQ: ", - req("fl", "id", "q", "sort_ar:[\u062F TO \u0698]", "sort", "id asc" ), - "//*[@numFound='0']" - ); + assertQ( + "Collated RangeQ: ", + req("fl", "id", "q", "sort_ar:[\u062F TO \u0698]", "sort", "id asc"), + "//*[@numFound='0']"); } /** - * Test canonical decomposition with turkish primary strength. - * With this sort order, İ is the uppercase form of i, and I is the uppercase form of ı. - * We index a decomposed form of İ. + * Test canonical decomposition with turkish primary strength. With this sort order, İ is the + * uppercase form of i, and I is the uppercase form of ı. We index a decomposed form of İ. */ public void testCanonicalDecomposition() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_tr_canon:\"I Will Use Turkish Casıng\"", "sort", "id asc" ), - "//*[@numFound='3']", - "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=3]", - "//result/doc[3]/str[@name='id'][.=5]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_tr_canon:\"I Will Use Turkish Casıng\"", "sort", "id asc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=3]", + "//result/doc[3]/str[@name='id'][.=5]"); } - + /** - * Test full decomposition with chinese identical strength. - * The full width form "Testing" is treated identical to "Testing" + * Test full decomposition with chinese identical strength. The full width form "Testing" is + * treated identical to "Testing" */ public void testFullDecomposition() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_zh_full:Testing", "sort", "id asc" ), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=6]", - "//result/doc[2]/str[@name='id'][.=8]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_zh_full:Testing", "sort", "id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=6]", + "//result/doc[2]/str[@name='id'][.=8]"); } - - /** - * Test termquery with custom collator (DIN 5007-2). - * In this case, ö is equivalent to oe (but not o) + + /** + * Test termquery with custom collator (DIN 5007-2). In this case, ö is equivalent to oe (but not + * o) */ public void testCustomCollation() { - assertQ("Collated TQ: ", - req("fl", "id", "q", "sort_custom:toene" ), - "//*[@numFound='2']", - "//result/doc/str[@name='id'][.=4]", - "//result/doc/str[@name='id'][.=10]" - ); + assertQ( + "Collated TQ: ", + req("fl", "id", "q", "sort_custom:toene"), + "//*[@numFound='2']", + "//result/doc/str[@name='id'][.=4]", + "//result/doc/str[@name='id'][.=10]"); } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java index 42a5a79a321..740dd445d53 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchema.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.schema; + import java.io.File; import java.io.FileInputStream; import java.lang.invoke.MethodHandles; @@ -23,7 +24,6 @@ import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; - import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.solr.common.SolrException; @@ -54,17 +54,21 @@ private void initManagedSchemaCore() throws Exception { tmpSolrHome = createTempDir().toFile(); tmpConfDir = new File(tmpSolrHome, confDir); File testHomeConfDir = new File(TEST_HOME(), confDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-managed-schema.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig-managed-schema.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-basic.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-managed-schema-test.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-one-field-no-dynamic-field.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-one-field-no-dynamic-field-unique-key.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig-managed-schema-test.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-one-field-no-dynamic-field.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-one-field-no-dynamic-field-unique-key.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-minimal.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema_codec.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-bm25.xml"), tmpConfDir); - // initCore will trigger an upgrade to managed schema, since the solrconfig has // System.setProperty("managed.schema.mutable", "false"); @@ -108,8 +112,11 @@ public void testUpgradeThenRestartNonManaged() throws Exception { deleteCore(); // After upgrade to managed schema, fail to restart when solrconfig doesn't contain // ... - assertConfigs("solrconfig-basic.xml", "schema-minimal.xml", tmpSolrHome.getPath(), - "Can't find resource 'schema-minimal.xml'"); + assertConfigs( + "solrconfig-basic.xml", + "schema-minimal.xml", + tmpSolrHome.getPath(), + "Can't find resource 'schema-minimal.xml'"); } public void testUpgradeThenRestartNonManagedAfterPuttingBackNonManagedSchema() throws Exception { @@ -120,7 +127,8 @@ public void testUpgradeThenRestartNonManagedAfterPuttingBackNonManagedSchema() t File upgradedOriginalSchemaFile = new File(tmpConfDir, "schema-minimal.xml.bak"); assertTrue(upgradedOriginalSchemaFile.exists()); - // After upgrade to managed schema, downgrading to non-managed should work after putting back the non-managed schema. + // After upgrade to managed schema, downgrading to non-managed should work after putting back + // the non-managed schema. FileUtils.moveFile(upgradedOriginalSchemaFile, nonManagedSchemaFile); initCore("solrconfig-basic.xml", "schema-minimal.xml", tmpSolrHome.getPath()); assertSchemaResource(collection, "schema-minimal.xml"); @@ -132,25 +140,31 @@ public void testDefaultSchemaFactory() throws Exception { final CoreContainer cores = h.getCoreContainer(); final CoreAdminHandler admin = new CoreAdminHandler(cores); - SolrQueryRequest request = req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.STATUS.toString()); + SolrQueryRequest request = + req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.STATUS.toString()); SolrQueryResponse response = new SolrQueryResponse(); admin.handleRequestBody(request, response); assertNull("Exception on create", response.getException()); assertSchemaResource(collection, "managed-schema.xml"); } - private void assertSchemaResource(String collection, String expectedSchemaResource) throws Exception { + private void assertSchemaResource(String collection, String expectedSchemaResource) + throws Exception { final CoreContainer cores = h.getCoreContainer(); final CoreAdminHandler admin = new CoreAdminHandler(cores); - SolrQueryRequest request = req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.STATUS.toString()); + SolrQueryRequest request = + req(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.STATUS.toString()); SolrQueryResponse response = new SolrQueryResponse(); admin.handleRequestBody(request, response); assertNull("Exception on create", response.getException()); NamedList responseValues = response.getValues(); - NamedList status = (NamedList)responseValues.get("status"); - NamedList collectionStatus = (NamedList)status.get(collection); - String collectionSchema = (String)collectionStatus.get(CoreAdminParams.SCHEMA); - assertEquals("Schema resource name differs from expected name", expectedSchemaResource, collectionSchema); + NamedList status = (NamedList) responseValues.get("status"); + NamedList collectionStatus = (NamedList) status.get(collection); + String collectionSchema = (String) collectionStatus.get(CoreAdminParams.SCHEMA); + assertEquals( + "Schema resource name differs from expected name", + expectedSchemaResource, + collectionSchema); } public void testAddFieldWhenNotMutable() throws Exception { @@ -161,7 +175,7 @@ public void testAddFieldWhenNotMutable() throws Exception { IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldName = "new_field"; String fieldType = "string"; - Map options = Collections.emptyMap(); + Map options = Collections.emptyMap(); SchemaField newField = oldSchema.newField(fieldName, fieldType, options); IndexSchema newSchema = oldSchema.addField(newField); h.getCore().setLatestSchema(newSchema); @@ -172,8 +186,10 @@ public void testAddFieldWhenNotMutable() throws Exception { if (t.getMessage() != null && -1 != t.getMessage().indexOf(errString)) return; } // otherwise, rethrow it, possibly completely unrelated - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Unexpected error, expected error matching: " + errString, e); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, + "Unexpected error, expected error matching: " + errString, + e); } finally { resetExceptionIgnores(); } @@ -183,16 +199,21 @@ public void testAddFieldPersistence() throws Exception { assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete( + managedSchemaFile + .toPath()); // Delete managed-schema.xml so it won't block parsing a new schema System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); assertTrue(managedSchemaFile.exists()); String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); assertFalse(managedSchemaContents.contains("\"new_field\"")); - Map options = new HashMap<>(); + Map options = new HashMap<>(); options.put("stored", "false"); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldName = "new_field"; @@ -205,16 +226,22 @@ public void testAddFieldPersistence() throws Exception { FileInputStream stream = new FileInputStream(managedSchemaFile); managedSchemaContents = IOUtils.toString(stream, "UTF-8"); stream.close(); // Explicitly close so that Windows can delete this file - assertTrue(managedSchemaContents.contains("")); + assertTrue( + managedSchemaContents.contains( + "")); } public void testAddedFieldIndexableAndQueryable() throws Exception { assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); assertTrue(managedSchemaFile.exists()); String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); @@ -233,15 +260,17 @@ public void testAddedFieldIndexableAndQueryable() throws Exception { if (t.getMessage() != null && -1 != t.getMessage().indexOf(errString)) return; } // otherwise, rethrow it, possibly completely unrelated - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Unexpected error, expected error matching: " + errString, e); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, + "Unexpected error, expected error matching: " + errString, + e); } finally { resetExceptionIgnores(); } assertU(commit()); assertQ(req("new_field:thing1"), "//*[@numFound='0']"); - Map options = new HashMap<>(); + Map options = new HashMap<>(); options.put("stored", "false"); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldName = "new_field"; @@ -256,19 +285,25 @@ public void testAddedFieldIndexableAndQueryable() throws Exception { assertQ(req("new_field:thing1"), "//*[@numFound='1']"); } - public void testAddFieldWhenItAlreadyExists() throws Exception{ + public void testAddFieldWhenItAlreadyExists() throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); - assertNotNull("Field 'str' is not present in the schema", h.getCore().getLatestSchema().getFieldOrNull("str")); + assertNotNull( + "Field 'str' is not present in the schema", + h.getCore().getLatestSchema().getFieldOrNull("str")); String errString = "Field 'str' already exists."; ignoreException(Pattern.quote(errString)); try { - Map options = new HashMap<>(); + Map options = new HashMap<>(); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldName = "str"; String fieldType = "string"; @@ -282,21 +317,27 @@ public void testAddFieldWhenItAlreadyExists() throws Exception{ if (t.getMessage() != null && -1 != t.getMessage().indexOf(errString)) return; } // otherwise, rethrow it, possibly completely unrelated - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Unexpected error, expected error matching: " + errString, e); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, + "Unexpected error, expected error matching: " + errString, + e); } finally { resetExceptionIgnores(); } } - public void testAddSameFieldTwice() throws Exception{ + public void testAddSameFieldTwice() throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); - Map options = new HashMap<>(); + Map options = new HashMap<>(); options.put("stored", "false"); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldName = "new_field"; @@ -317,26 +358,34 @@ public void testAddSameFieldTwice() throws Exception{ if (t.getMessage() != null && -1 != t.getMessage().indexOf(errString)) return; } // otherwise, rethrow it, possibly completely unrelated - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Unexpected error, expected error matching: " + errString, e); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, + "Unexpected error, expected error matching: " + errString, + e); } finally { resetExceptionIgnores(); } } - public void testAddDynamicField() throws Exception{ + public void testAddDynamicField() throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); - assertNull("Field '*_s' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull("*_s")); + assertNull( + "Field '*_s' is present in the schema", + h.getCore().getLatestSchema().getFieldOrNull("*_s")); String errString = "Can't add dynamic field '*_s'."; ignoreException(Pattern.quote(errString)); try { - Map options = new HashMap<>(); + Map options = new HashMap<>(); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldName = "*_s"; String fieldType = "string"; @@ -350,8 +399,10 @@ public void testAddDynamicField() throws Exception{ if (t.getMessage() != null && -1 != t.getMessage().indexOf(errString)) return; } // otherwise, rethrow it, possibly completely unrelated - throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, - "Unexpected error, expected error matching: " + errString, e); + throw new SolrException( + SolrException.ErrorCode.SERVER_ERROR, + "Unexpected error, expected error matching: " + errString, + e); } finally { resetExceptionIgnores(); } @@ -360,19 +411,22 @@ public void testAddDynamicField() throws Exception{ public void testAddWithSchemaCodecFactory() throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema_codec.xml", tmpSolrHome.getPath()); String uniqueKey = "string_f"; - assertNotNull("Unique key field '" + uniqueKey + "' is not present in the schema", - h.getCore().getLatestSchema().getFieldOrNull(uniqueKey)); + assertNotNull( + "Unique key field '" + uniqueKey + "' is not present in the schema", + h.getCore().getLatestSchema().getFieldOrNull(uniqueKey)); String fieldName = "string_disk_new_field"; - assertNull("Field '" + fieldName + "' is present in the schema", - h.getCore().getLatestSchema().getFieldOrNull(fieldName)); + assertNull( + "Field '" + fieldName + "' is present in the schema", + h.getCore().getLatestSchema().getFieldOrNull(fieldName)); - Map options = new HashMap<>(); + Map options = new HashMap<>(); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldType = "string_disk"; SchemaField newField = oldSchema.newField(fieldName, fieldType, options); @@ -388,19 +442,22 @@ public void testAddWithSchemaCodecFactory() throws Exception { public void testAddWithSchemaSimilarityFactory() throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); initCore("solrconfig-managed-schema.xml", "schema-bm25.xml", tmpSolrHome.getPath()); String uniqueKey = "id"; - assertNotNull("Unique key field '" + uniqueKey + "' is not present in the schema", + assertNotNull( + "Unique key field '" + uniqueKey + "' is not present in the schema", h.getCore().getLatestSchema().getFieldOrNull(uniqueKey)); String fieldName = "new_text_field"; - assertNull("Field '" + fieldName + "' is present in the schema", + assertNull( + "Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); - Map options = new HashMap<>(); + Map options = new HashMap<>(); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldType = "text"; SchemaField newField = oldSchema.newField(fieldName, fieldType, options); @@ -417,15 +474,19 @@ public void testPersistUniqueKey() throws Exception { assertSchemaResource(collection, "managed-schema.xml"); deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field-unique-key.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field-unique-key.xml", + tmpSolrHome.getPath()); assertTrue(managedSchemaFile.exists()); String managedSchemaContents = FileUtils.readFileToString(managedSchemaFile, "UTF-8"); assertFalse(managedSchemaContents.contains("\"new_field\"")); - Map options = new HashMap<>(); + Map options = new HashMap<>(); options.put("stored", "false"); IndexSchema oldSchema = h.getCore().getLatestSchema(); assertEquals("str", oldSchema.getUniqueKeyField().getName()); @@ -444,7 +505,9 @@ public void testPersistUniqueKey() throws Exception { FileInputStream stream = new FileInputStream(managedSchemaFile); managedSchemaContents = IOUtils.toString(stream, "UTF-8"); stream.close(); // Explicitly close so that Windows can delete this file - assertTrue(managedSchemaContents.contains("")); + assertTrue( + managedSchemaContents.contains( + "")); IndexSchema newNewSchema = h.getCore().getLatestSchema(); assertNotNull(newNewSchema.getUniqueKeyField()); assertEquals("str", newNewSchema.getUniqueKeyField().getName()); @@ -453,15 +516,20 @@ public void testPersistUniqueKey() throws Exception { public void testAddFieldThenReload() throws Exception { deleteCore(); File managedSchemaFile = new File(tmpConfDir, "managed-schema.xml"); - Files.delete(managedSchemaFile.toPath()); // Delete managed-schema.xml so it won't block parsing a new schema + // Delete managed-schema.xml so it won't block parsing a new schema + Files.delete(managedSchemaFile.toPath()); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-one-field-no-dynamic-field.xml", + tmpSolrHome.getPath()); String fieldName = "new_text_field"; - assertNull("Field '" + fieldName + "' is present in the schema", + assertNull( + "Field '" + fieldName + "' is present in the schema", h.getCore().getLatestSchema().getFieldOrNull(fieldName)); - Map options = new HashMap<>(); + Map options = new HashMap<>(); IndexSchema oldSchema = h.getCore().getLatestSchema(); String fieldType = "text"; SchemaField newField = oldSchema.newField(fieldName, fieldType, options); diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java index e0303e5c26b..50eba14edc2 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaAPI.java @@ -21,7 +21,6 @@ import java.lang.invoke.MethodHandles; import java.util.LinkedHashMap; import java.util.Map; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -43,7 +42,8 @@ public class TestManagedSchemaAPI extends SolrCloudTestCase { public static void createCluster() throws Exception { System.setProperty("managed.schema.mutable", "true"); configureCluster(2) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf")) .configure(); } @@ -52,7 +52,6 @@ public void test() throws Exception { String collection = "testschemaapi"; CollectionAdminRequest.createCollection(collection, "conf1", 1, 2) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .process(cluster.getSolrClient()); testModifyField(collection); testReloadAndAddSimple(collection); @@ -65,7 +64,8 @@ private void testReloadAndAddSimple(String collection) throws IOException, SolrS String fieldName = "myNewField"; addStringField(fieldName, collection, cloudClient); - CollectionAdminRequest.Reload reloadRequest = CollectionAdminRequest.reloadCollection(collection); + CollectionAdminRequest.Reload reloadRequest = + CollectionAdminRequest.reloadCollection(collection); CollectionAdminResponse response = reloadRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); @@ -90,12 +90,15 @@ private void testAddFieldAndDocument(String collection) throws IOException, Solr cloudClient.request(ureq, collection); } - private void addStringField(String fieldName, String collection, CloudSolrClient cloudClient) throws IOException, SolrServerException { + private void addStringField(String fieldName, String collection, CloudSolrClient cloudClient) + throws IOException, SolrServerException { Map fieldAttributes = new LinkedHashMap<>(); fieldAttributes.put("name", fieldName); fieldAttributes.put("type", "string"); - SchemaRequest.AddField addFieldUpdateSchemaRequest = new SchemaRequest.AddField(fieldAttributes); - SchemaResponse.UpdateResponse addFieldResponse = addFieldUpdateSchemaRequest.process(cloudClient, collection); + SchemaRequest.AddField addFieldUpdateSchemaRequest = + new SchemaRequest.AddField(fieldAttributes); + SchemaResponse.UpdateResponse addFieldResponse = + addFieldUpdateSchemaRequest.process(cloudClient, collection); assertEquals(0, addFieldResponse.getStatus()); assertNull(addFieldResponse.getResponse().get("errors")); @@ -113,21 +116,25 @@ private void testModifyField(String collection) throws Exception { String fieldName = "id"; SchemaRequest.Field getFieldRequest = new SchemaRequest.Field(fieldName); - SchemaResponse.FieldResponse getFieldResponse = getFieldRequest.process(cloudClient, collection); + SchemaResponse.FieldResponse getFieldResponse = + getFieldRequest.process(cloudClient, collection); Map field = getFieldResponse.getField(); field.put("uninvertible", false); // and because this field does not have docValues, can't sort. SchemaRequest.ReplaceField replaceRequest = new SchemaRequest.ReplaceField(field); SchemaResponse.UpdateResponse replaceResponse = replaceRequest.process(cloudClient, collection); assertNull(replaceResponse.getResponse().get("errors")); - CollectionAdminRequest.Reload reloadRequest = CollectionAdminRequest.reloadCollection(collection); + CollectionAdminRequest.Reload reloadRequest = + CollectionAdminRequest.reloadCollection(collection); CollectionAdminResponse response = reloadRequest.process(cloudClient); assertEquals(0, response.getStatus()); assertTrue(response.isSuccess()); - Exception e = expectThrows(Exception.class, () -> { - cloudClient.query(collection, params("q", "*:*", "sort", "id asc")); - }); + Exception e = + expectThrows( + Exception.class, + () -> { + cloudClient.query(collection, params("q", "*:*", "sort", "id asc")); + }); assertTrue("Should fail because needs docValues", e.getMessage().contains("docValues")); } - } diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java index bd21cda3fb0..23bd86e510b 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java +++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java @@ -17,13 +17,15 @@ package org.apache.solr.schema; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.cloud.ZkController; import org.apache.solr.cloud.ZkSolrResourceLoader; @@ -47,9 +49,6 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - public class TestManagedSchemaThreadSafety extends SolrTestCaseJ4 { private static final class SuspendingZkClient extends SolrZkClient { @@ -59,9 +58,9 @@ private SuspendingZkClient(String zkServerAddress, int zkClientTimeout) { super(zkServerAddress, zkClientTimeout); } - boolean isSlowpoke(){ + boolean isSlowpoke() { Thread youKnow; - if ((youKnow = slowpoke.get())!=null) { + if ((youKnow = slowpoke.get()) != null) { return youKnow == Thread.currentThread(); } else { return slowpoke.compareAndSet(null, Thread.currentThread()); @@ -76,7 +75,7 @@ public byte[] getData(String path, Watcher watcher, Stat stat, boolean retryOnCo data = super.getData(path, watcher, stat, retryOnConnLoss); } catch (NoNodeException e) { if (isSlowpoke()) { - //System.out.println("suspending "+Thread.currentThread()+" on " + path); + // System.out.println("suspending "+Thread.currentThread()+" on " + path); Thread.sleep(500); } throw e; @@ -108,7 +107,7 @@ public static void stopZkServer() throws Exception { @LogLevel("org.apache.solr.common.cloud.SolrZkClient=debug") public void testThreadSafety() throws Exception { - final String configsetName = "managed-config";// + final String configsetName = "managed-config"; // try (SolrZkClient client = new SuspendingZkClient(zkServer.getZkHost(), 30000)) { // we can pick any to load configs, I suppose, but here we check @@ -116,7 +115,7 @@ public void testThreadSafety() throws Exception { } ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool("threadpool"); - + try (SolrZkClient raceJudge = new SuspendingZkClient(zkServer.getZkHost(), 30000)) { ZkController zkController = createZkController(raceJudge); @@ -129,62 +128,65 @@ public void testThreadSafety() throws Exception { for (Future future : futures) { future.get(); } - } - finally { + } finally { ExecutorUtil.shutdownAndAwaitTermination(executor); } } - private ZkController createZkController(SolrZkClient client) throws KeeperException, InterruptedException { + private ZkController createZkController(SolrZkClient client) + throws KeeperException, InterruptedException { assumeWorkingMockito(); - - CoreContainer mockAlwaysUpCoreContainer = mock(CoreContainer.class, - Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS)); - when(mockAlwaysUpCoreContainer.isShutDown()).thenReturn(Boolean.FALSE); // Allow retry on session expiry - - - ZkController zkController = mock(ZkController.class, - Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS)); + + CoreContainer mockAlwaysUpCoreContainer = + mock(CoreContainer.class, Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS)); + when(mockAlwaysUpCoreContainer.isShutDown()) + .thenReturn(Boolean.FALSE); // Allow retry on session expiry + + ZkController zkController = + mock(ZkController.class, Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS)); when(zkController.getCoreContainer()).thenReturn(mockAlwaysUpCoreContainer); when(zkController.getZkClient()).thenReturn(client); - Mockito.doAnswer(new Answer() { - volatile boolean sessionExpired=false; - - @Override - public Boolean answer(InvocationOnMock invocation) throws Throwable { - String path = (String) invocation.getArguments()[0]; - perhapsExpired(); - Boolean exists = client.exists(path, true); - perhapsExpired(); - return exists; - } - - private void perhapsExpired() throws SessionExpiredException { - if (!sessionExpired && rarely()) { - sessionExpired = true; - throw new KeeperException.SessionExpiredException(); - } - } - }).when(zkController).pathExists(Mockito.anyString()); + Mockito.doAnswer( + new Answer() { + volatile boolean sessionExpired = false; + + @Override + public Boolean answer(InvocationOnMock invocation) throws Throwable { + String path = (String) invocation.getArguments()[0]; + perhapsExpired(); + Boolean exists = client.exists(path, true); + perhapsExpired(); + return exists; + } + + private void perhapsExpired() throws SessionExpiredException { + if (!sessionExpired && rarely()) { + sessionExpired = true; + throw new KeeperException.SessionExpiredException(); + } + } + }) + .when(zkController) + .pathExists(Mockito.anyString()); return zkController; } private Runnable indexSchemaLoader(String configsetName, final ZkController zkController) { return () -> { try { - SolrResourceLoader loader = new ZkSolrResourceLoader(loaderPath, configsetName, null, zkController); - SolrConfig solrConfig = SolrConfig.readFromResourceLoader(loader, "solrconfig.xml", true, null); + SolrResourceLoader loader = + new ZkSolrResourceLoader(loaderPath, configsetName, null, zkController); + SolrConfig solrConfig = + SolrConfig.readFromResourceLoader(loader, "solrconfig.xml", true, null); ManagedIndexSchemaFactory factory = new ManagedIndexSchemaFactory(); factory.init(new NamedList<>()); factory.create("schema.xml", solrConfig, null); - } - catch (Exception e) { + } catch (Exception e) { throw new RuntimeException(e); } }; } - } diff --git a/solr/core/src/test/org/apache/solr/schema/TestOmitPositions.java b/solr/core/src/test/org/apache/solr/schema/TestOmitPositions.java index 0fcc9fee06f..cd891279375 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestOmitPositions.java +++ b/solr/core/src/test/org/apache/solr/schema/TestOmitPositions.java @@ -22,34 +22,48 @@ public class TestOmitPositions extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); // add some docs - assertU(adoc("id", "1", "nopositionstext", "this is a test this is only a test", "text", "just another test")); - assertU(adoc("id", "2", "nopositionstext", "test test test test test test test test test test test test test", "text", "have a nice day")); + assertU( + adoc( + "id", + "1", + "nopositionstext", + "this is a test this is only a test", + "text", + "just another test")); + assertU( + adoc( + "id", + "2", + "nopositionstext", + "test test test test test test test test test test test test test", + "text", + "have a nice day")); assertU(commit()); } - + public void testFrequencies() { // doc 2 should be ranked above doc 1 - assertQ("term query: ", - req("fl", "id", "q", "nopositionstext:test"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.=2]", - "//result/doc[2]/str[@name='id'][.=1]" - ); + assertQ( + "term query: ", + req("fl", "id", "q", "nopositionstext:test"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=1]"); } - + public void testPositions() { // no results should be found: // lucene 3.x: silent failure // lucene 4.x: illegal state exception, field was indexed without positions - + ignoreException("was indexed without position data"); try { - assertQ("phrase query: ", - req("fl", "id", "q", "nopositionstext:\"test test\""), - "//*[@numFound='0']" - ); + assertQ( + "phrase query: ", + req("fl", "id", "q", "nopositionstext:\"test test\""), + "//*[@numFound='0']"); } catch (Exception expected) { assertTrue(expected.getCause() instanceof IllegalStateException); // in lucene 4.0, queries don't silently fail diff --git a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java index 3710e2cb91c..19ed5ff9d5e 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java +++ b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java @@ -16,6 +16,7 @@ */ package org.apache.solr.schema; +import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; @@ -41,8 +42,6 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import com.google.common.collect.ImmutableMap; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; @@ -76,22 +75,53 @@ /** Tests for PointField functionality */ public class TestPointFields extends SolrTestCaseJ4 { - // long overflow can occur in some date calculations if gaps are too large, so we limit to a million years BC & AD. - private static final long MIN_DATE_EPOCH_MILLIS = LocalDateTime.parse("-1000000-01-01T00:00:00").toInstant(ZoneOffset.ofHours(0)).toEpochMilli(); - private static final long MAX_DATE_EPOCH_MILLIS = LocalDateTime.parse("+1000000-01-01T00:00:00").toInstant(ZoneOffset.ofHours(0)).toEpochMilli(); - - private static final String[] FIELD_SUFFIXES = new String[] { - "", "_dv", "_mv", "_mv_dv", "_ni", "_ni_dv", "_ni_dv_ns", "_ni_dv_ns_mv", - "_ni_mv", "_ni_mv_dv", "_ni_ns", "_ni_ns_mv", "_dv_ns", "_ni_ns_dv", "_dv_ns_mv", - "_smf", "_dv_smf", "_mv_smf", "_mv_dv_smf", "_ni_dv_smf", "_ni_mv_dv_smf", - "_sml", "_dv_sml", "_mv_sml", "_mv_dv_sml", "_ni_dv_sml", "_ni_mv_dv_sml" - }; + // long overflow can occur in some date calculations if gaps are too large, so we limit to a + // million years BC & AD. + private static final long MIN_DATE_EPOCH_MILLIS = + LocalDateTime.parse("-1000000-01-01T00:00:00") + .toInstant(ZoneOffset.ofHours(0)) + .toEpochMilli(); + private static final long MAX_DATE_EPOCH_MILLIS = + LocalDateTime.parse("+1000000-01-01T00:00:00") + .toInstant(ZoneOffset.ofHours(0)) + .toEpochMilli(); + + private static final String[] FIELD_SUFFIXES = + new String[] { + "", + "_dv", + "_mv", + "_mv_dv", + "_ni", + "_ni_dv", + "_ni_dv_ns", + "_ni_dv_ns_mv", + "_ni_mv", + "_ni_mv_dv", + "_ni_ns", + "_ni_ns_mv", + "_dv_ns", + "_ni_ns_dv", + "_dv_ns_mv", + "_smf", + "_dv_smf", + "_mv_smf", + "_mv_dv_smf", + "_ni_dv_smf", + "_ni_mv_dv_smf", + "_sml", + "_dv_sml", + "_mv_sml", + "_mv_dv_sml", + "_ni_dv_sml", + "_ni_mv_dv_sml" + }; @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-point.xml"); + initCore("solrconfig.xml", "schema-point.xml"); } - + @Override @After public void tearDown() throws Exception { @@ -99,7 +129,7 @@ public void tearDown() throws Exception { assertU(commit()); super.tearDown(); } - + @Test public void testIntPointFieldExactQuery() throws Exception { doTestIntPointFieldExactQuery("number_p_i", false); @@ -110,13 +140,13 @@ public void testIntPointFieldExactQuery() throws Exception { doTestIntPointFieldExactQuery("number_p_i_ni_ns_dv", false); doTestIntPointFieldExactQuery("number_p_i_ni_mv_dv", false); } - + @Test public void testIntPointFieldNonSearchableExactQuery() throws Exception { doTestIntPointFieldExactQuery("number_p_i_ni", false, false); doTestIntPointFieldExactQuery("number_p_i_ni_ns", false, false); } - + @Test public void testIntPointFieldReturn() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; @@ -125,22 +155,25 @@ public void testIntPointFieldReturn() throws Exception { doTestPointFieldReturn("number_p_i_dv_ns", "int", ints); doTestPointFieldReturn("number_p_i_ni", "int", ints); } - + @Test public void testIntPointFieldRangeQuery() throws Exception { doTestIntPointFieldRangeQuery("number_p_i", "int", false); doTestIntPointFieldRangeQuery("number_p_i_ni_ns_dv", "int", false); doTestIntPointFieldRangeQuery("number_p_i_dv", "int", false); } - + @Test public void testIntPointFieldNonSearchableRangeQuery() throws Exception { - doTestPointFieldNonSearchableRangeQuery("number_p_i_ni", toStringArray(getRandomInts(1, false))); - doTestPointFieldNonSearchableRangeQuery("number_p_i_ni_ns", toStringArray(getRandomInts(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_i_ni", toStringArray(getRandomInts(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_i_ni_ns", toStringArray(getRandomInts(1, false))); int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldNonSearchableRangeQuery("number_p_i_ni_ns_mv", toStringArray(getRandomInts(numValues, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_i_ni_ns_mv", toStringArray(getRandomInts(numValues, false))); } - + @Test public void testIntPointFieldSortAndFunction() throws Exception { @@ -148,17 +181,29 @@ public void testIntPointFieldSortAndFunction() throws Exception { final List sequential = Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"); final List randomInts = getRandomInts(10, false); final List randomIntsMissing = getRandomInts(10, true); - - for (String r : Arrays.asList("*_p_i", "*_p_i_dv", "*_p_i_dv_ns", "*_p_i_ni_dv", - "*_p_i_ni_dv_ns", "*_p_i_ni_ns_dv")) { + + for (String r : + Arrays.asList( + "*_p_i", + "*_p_i_dv", + "*_p_i_dv_ns", + "*_p_i_ni_dv", + "*_p_i_ni_dv_ns", + "*_p_i_ni_ns_dv")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); doTestPointFieldSort(field, randomInts); doTestIntPointFunctionQuery(field); } - for (String r : Arrays.asList("*_p_i_smf", "*_p_i_dv_smf", "*_p_i_ni_dv_smf", - "*_p_i_sml", "*_p_i_dv_sml", "*_p_i_ni_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_i_smf", + "*_p_i_dv_smf", + "*_p_i_ni_dv_smf", + "*_p_i_sml", + "*_p_i_dv_sml", + "*_p_i_ni_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); @@ -171,28 +216,34 @@ public void testIntPointFieldSortAndFunction() throws Exception { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", toStringArray(getRandomInts(1, false))); - doTestPointFieldFunctionQueryError(field, "w/o docValues", toStringArray(getRandomInts(1, false))); + doTestPointFieldFunctionQueryError( + field, "w/o docValues", toStringArray(getRandomInts(1, false))); } - + // multivalued, no docvalues - for (String r : Arrays.asList("*_p_i_mv", "*_p_i_ni_mv", "*_p_i_ni_ns_mv", - "*_p_i_mv_smf", "*_p_i_mv_sml")) { - + for (String r : + Arrays.asList( + "*_p_i_mv", "*_p_i_ni_mv", "*_p_i_ni_ns_mv", "*_p_i_mv_smf", "*_p_i_mv_sml")) { + assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", toStringArray(getRandomInts(1, false))); int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldSortError(field, "w/o docValues", toStringArray(getRandomInts(numValues, false))); - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomInts(1, false))); - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomInts(numValues, false))); + doTestPointFieldSortError( + field, "w/o docValues", toStringArray(getRandomInts(numValues, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomInts(1, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomInts(numValues, false))); } // multivalued, w/ docValues - for (String r : Arrays.asList("*_p_i_ni_mv_dv", "*_p_i_ni_dv_ns_mv", - "*_p_i_dv_ns_mv", "*_p_i_mv_dv", - "*_p_i_mv_dv_smf", "*_p_i_ni_mv_dv_smf", - "*_p_i_mv_dv_sml", "*_p_i_ni_mv_dv_sml" - )) { + for (String r : + Arrays.asList( + "*_p_i_ni_mv_dv", "*_p_i_ni_dv_ns_mv", + "*_p_i_dv_ns_mv", "*_p_i_mv_dv", + "*_p_i_mv_dv_smf", "*_p_i_ni_mv_dv_smf", + "*_p_i_mv_dv_sml", "*_p_i_ni_mv_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); @@ -203,19 +254,22 @@ public void testIntPointFieldSortAndFunction() throws Exception { // value source (w/o field(...,min|max)) usuage should still error... int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomInts(1, false))); - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomInts(numValues, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomInts(1, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomInts(numValues, false))); } - + assertEquals("Missing types in the test", Collections.emptySet(), regexToTest); } - + @Test public void testIntPointFieldFacetField() throws Exception { doTestPointFieldFacetField("number_p_i", "number_p_i_dv", getSequentialStringArrayWithInts(10)); clearIndex(); assertU(commit()); - doTestPointFieldFacetField("number_p_i", "number_p_i_dv", toStringArray(getRandomInts(10, false))); + doTestPointFieldFacetField( + "number_p_i", "number_p_i_dv", toStringArray(getRandomInts(10, false))); } @Test @@ -230,55 +284,131 @@ public void testIntPointFieldRangeFacet() throws Exception { do { values = getRandomInts(numValues, false); sortedValues = values.stream().sorted().collect(Collectors.toList()); - } while ((max = sortedValues.get(sortedValues.size() - 1)) >= Integer.MAX_VALUE - numValues); // leave room for rounding + } while ((max = sortedValues.get(sortedValues.size() - 1)) + >= Integer.MAX_VALUE - numValues); // leave room for rounding int min = sortedValues.get(0); - int gap = (int)(((long)(max + numValues) - (long)min) / (long)numBuckets); + int gap = (int) (((long) (max + numValues) - (long) min) / (long) numBuckets); int[] bucketCount = new int[numBuckets]; int bucketNum = 0; int minBucketVal = min; for (Integer value : sortedValues) { - while (((long)value - (long)minBucketVal) >= (long)gap) { + while (((long) value - (long) minBucketVal) >= (long) gap) { ++bucketNum; minBucketVal += gap; } ++bucketCount[bucketNum]; } - for (int i = 0 ; i < numValues ; i++) { - assertU(adoc("id", String.valueOf(i), docValuesField, String.valueOf(values.get(i)), nonDocValuesField, String.valueOf(values.get(i)))); + for (int i = 0; i < numValues; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + docValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i)))); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + numValues + "']"; minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; - } - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap)), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; + } + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap)), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); } @@ -286,11 +416,11 @@ public void testIntPointFieldRangeFacet() throws Exception { public void testIntPointStats() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; // don't produce numbers with exponents, since XPath comparison operators can't handle them - List values = getRandomInts(numValues, false, 9999999); + List values = getRandomInts(numValues, false, 9999999); // System.err.println(Arrays.toString(values.toArray(new Integer[values.size()]))); List sortedValues = values.stream().sorted().collect(Collectors.toList()); - double min = (double)sortedValues.get(0); - double max = (double)sortedValues.get(sortedValues.size() - 1); + double min = (double) sortedValues.get(0); + double max = (double) sortedValues.get(sortedValues.size() - 1); String[] valArray = toStringArray(values); doTestPointStats("number_p_i", "number_p_i_dv", valArray, min, max, numValues, 1, 0D); @@ -310,7 +440,7 @@ public void testIntPointFieldMultiValuedNonSearchableExactQuery() throws Excepti doTestPointFieldMultiValuedExactQuery("number_p_i_ni_mv", ints, false); doTestPointFieldMultiValuedExactQuery("number_p_i_ni_ns_mv", ints, false); } - + @Test public void testIntPointFieldMultiValuedReturn() throws Exception { String[] ints = toStringArray(getRandomInts(20, false)); @@ -318,27 +448,30 @@ public void testIntPointFieldMultiValuedReturn() throws Exception { doTestPointFieldMultiValuedReturn("number_p_i_ni_mv_dv", "int", ints); doTestPointFieldMultiValuedReturn("number_p_i_dv_ns_mv", "int", ints); } - + @Test public void testIntPointFieldMultiValuedRangeQuery() throws Exception { - String[] ints = toStringArray(getRandomInts(20, false).stream().sorted().collect(Collectors.toList())); + String[] ints = + toStringArray(getRandomInts(20, false).stream().sorted().collect(Collectors.toList())); doTestPointFieldMultiValuedRangeQuery("number_p_i_mv", "int", ints); doTestPointFieldMultiValuedRangeQuery("number_p_i_ni_mv_dv", "int", ints); doTestPointFieldMultiValuedRangeQuery("number_p_i_mv_dv", "int", ints); } - + @Test public void testIntPointFieldNotIndexed() throws Exception { String[] ints = toStringArray(getRandomInts(10, false)); doTestFieldNotIndexed("number_p_i_ni", ints); doTestFieldNotIndexed("number_p_i_ni_mv", ints); } - - //TODO MV SORT? + + // TODO MV SORT? @Test public void testIntPointFieldMultiValuedFacetField() throws Exception { - doTestPointFieldMultiValuedFacetField("number_p_i_mv", "number_p_i_mv_dv", getSequentialStringArrayWithInts(20)); - String[] randomSortedInts = toStringArray(getRandomInts(20, false).stream().sorted().collect(Collectors.toList())); + doTestPointFieldMultiValuedFacetField( + "number_p_i_mv", "number_p_i_mv_dv", getSequentialStringArrayWithInts(20)); + String[] randomSortedInts = + toStringArray(getRandomInts(20, false).stream().sorted().collect(Collectors.toList())); doTestPointFieldMultiValuedFacetField("number_p_i_mv", "number_p_i_mv_dv", randomSortedInts); } @@ -354,75 +487,159 @@ public void testIntPointFieldMultiValuedRangeFacet() throws Exception { do { values = getRandomInts(numValues, false); sortedValues = toAscendingPosVals(values, true); - } while ((max = sortedValues.get(sortedValues.size() - 1).val) >= Integer.MAX_VALUE - numValues); // leave room for rounding + } while ((max = sortedValues.get(sortedValues.size() - 1).val) + >= Integer.MAX_VALUE - numValues); // leave room for rounding int min = sortedValues.get(0).val; - int gap = (int)(((long)(max + numValues) - (long)min) / (long)numBuckets); + int gap = (int) (((long) (max + numValues) - (long) min) / (long) numBuckets); List> docIdBucket = new ArrayList<>(numBuckets); - for (int i = 0 ; i < numBuckets ; ++i) { + for (int i = 0; i < numBuckets; ++i) { docIdBucket.add(new HashSet<>()); } int bucketNum = 0; int minBucketVal = min; for (PosVal value : sortedValues) { - while ((long)value.val - (long)minBucketVal >= gap) { + while ((long) value.val - (long) minBucketVal >= gap) { ++bucketNum; minBucketVal += gap; } - docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values - } - for (int i = 0 ; i < numValues ; i += 2) { - assertU(adoc("id", String.valueOf(i / 2), - docValuesField, String.valueOf(values.get(i)), - docValuesField, String.valueOf(values.get(i + 1)), - nonDocValuesField, String.valueOf(values.get(i)), - nonDocValuesField, String.valueOf(values.get(i + 1)))); + docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values + } + for (int i = 0; i < numValues; i += 2) { + assertU( + adoc( + "id", + String.valueOf(i / 2), + docValuesField, + String.valueOf(values.get(i)), + docValuesField, + String.valueOf(values.get(i + 1)), + nonDocValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i + 1)))); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String[] testStrings = new String[numBuckets + 1]; minBucketVal = min; testStrings[numBuckets] = "//*[@numFound='" + (numValues / 2) + "']"; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; - } - - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "indent", "on"), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; + } + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "indent", + "on"), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter", + "indent", + "on"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); } @Test - public void testIntPointMultiValuedFunctionQuery() throws Exception { - doTestPointMultiValuedFunctionQuery("number_p_i_mv", "number_p_i_mv_dv", "int", getSequentialStringArrayWithInts(20)); - doTestPointMultiValuedFunctionQuery("number_p_i_mv", "number_p_i_mv_dv", "int", + public void testIntPointMultiValuedFunctionQuery() throws Exception { + doTestPointMultiValuedFunctionQuery( + "number_p_i_mv", "number_p_i_mv_dv", "int", getSequentialStringArrayWithInts(20)); + doTestPointMultiValuedFunctionQuery( + "number_p_i_mv", + "number_p_i_mv_dv", + "int", toStringArray(getRandomInts(20, false).stream().sorted().collect(Collectors.toList()))); } - + @Test public void testIntPointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -432,7 +649,7 @@ public void testIntPointFieldsAtomicUpdates() throws Exception { doTestIntPointFieldsAtomicUpdates("number_p_i_dv"); doTestIntPointFieldsAtomicUpdates("number_p_i_dv_ns"); } - + @Test public void testMultiValuedIntPointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -445,10 +662,13 @@ public void testMultiValuedIntPointFieldsAtomicUpdates() throws Exception { } private String[] toStringArray(List list) { - return list.stream().map(String::valueOf).collect(Collectors.toList()).toArray(new String[list.size()]); + return list.stream() + .map(String::valueOf) + .collect(Collectors.toList()) + .toArray(new String[list.size()]); } - private class PosVal > { + private class PosVal> { int pos; T val; @@ -456,14 +676,18 @@ private class PosVal > { this.pos = pos; this.val = val; } + public String toString() { return "(" + pos + ": " + val.toString() + ")"; } } - /** Primary sort by value, with nulls either first or last as specified, and then secondary sort by position. */ - private > - Comparator> getPosValComparator(final boolean ascending, final boolean nullsFirst) { + /** + * Primary sort by value, with nulls either first or last as specified, and then secondary sort by + * position. + */ + private > Comparator> getPosValComparator( + final boolean ascending, final boolean nullsFirst) { return (o1, o2) -> { if (o1.val == null) { if (o2.val == null) { @@ -479,36 +703,46 @@ Comparator> getPosValComparator(final boolean ascending, final boolean }; } - /** - * Primary ascending sort by value, with missing values (represented as null) either first or last as specified, - * and then secondary ascending sort by position. + /** + * Primary ascending sort by value, with missing values (represented as null) either first or last + * as specified, and then secondary ascending sort by position. */ - private > String[] toAscendingStringArray(List list, boolean missingFirst) { - return toStringArray(toAscendingPosVals(list, missingFirst).stream().map(pv -> pv.val).collect(Collectors.toList())); + private > String[] toAscendingStringArray( + List list, boolean missingFirst) { + return toStringArray( + toAscendingPosVals(list, missingFirst).stream() + .map(pv -> pv.val) + .collect(Collectors.toList())); } /** - * Primary ascending sort by value, with missing values (represented as null) either first or last as specified, - * and then secondary ascending sort by position. - * + * Primary ascending sort by value, with missing values (represented as null) either first or last + * as specified, and then secondary ascending sort by position. + * * @return a list of the (originally) positioned values sorted as described above. */ - private > List> toAscendingPosVals(List list, boolean missingFirst) { - List> posVals = IntStream.range(0, list.size()) - .mapToObj(i -> new PosVal<>(i, list.get(i))).collect(Collectors.toList()); + private > List> toAscendingPosVals( + List list, boolean missingFirst) { + List> posVals = + IntStream.range(0, list.size()) + .mapToObj(i -> new PosVal<>(i, list.get(i))) + .collect(Collectors.toList()); posVals.sort(getPosValComparator(true, missingFirst)); return posVals; } /** - * Primary descending sort by value, with missing values (represented as null) either first or last as specified, - * and then secondary descending sort by position. + * Primary descending sort by value, with missing values (represented as null) either first or + * last as specified, and then secondary descending sort by position. * * @return a list of the (originally) positioned values sorted as described above. */ - private > List> toDescendingPosVals(List list, boolean missingFirst) { - List> posVals = IntStream.range(0, list.size()) - .mapToObj(i -> new PosVal<>(i, list.get(i))).collect(Collectors.toList()); + private > List> toDescendingPosVals( + List list, boolean missingFirst) { + List> posVals = + IntStream.range(0, list.size()) + .mapToObj(i -> new PosVal<>(i, list.get(i))) + .collect(Collectors.toList()); posVals.sort(getPosValComparator(false, missingFirst)); return posVals; } @@ -519,7 +753,7 @@ public void testIntPointSetQuery() throws Exception { doTestSetQueries("number_p_i_mv", toStringArray(getRandomInts(20, false)), true); doTestSetQueries("number_p_i_ni_dv", toStringArray(getRandomInts(20, false)), false); } - + // DoublePointField @Test @@ -533,13 +767,13 @@ public void testDoublePointFieldExactQuery() throws Exception { doTestFloatPointFieldExactQuery("number_p_d_ni_dv_ns", true); doTestFloatPointFieldExactQuery("number_p_d_ni_mv_dv", true); } - + @Test public void testDoublePointFieldNonSearchableExactQuery() throws Exception { doTestFloatPointFieldExactQuery("number_p_d_ni", false, true); doTestFloatPointFieldExactQuery("number_p_d_ni_ns", false, true); } - + @Test public void testDoublePointFieldReturn() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; @@ -547,32 +781,41 @@ public void testDoublePointFieldReturn() throws Exception { doTestPointFieldReturn("number_p_d", "double", doubles); doTestPointFieldReturn("number_p_d_dv_ns", "double", doubles); } - + @Test public void testDoublePointFieldRangeQuery() throws Exception { doTestFloatPointFieldRangeQuery("number_p_d", "double", true); doTestFloatPointFieldRangeQuery("number_p_d_ni_ns_dv", "double", true); doTestFloatPointFieldRangeQuery("number_p_d_dv", "double", true); } - + @Test public void testDoubleFieldNonSearchableRangeQuery() throws Exception { - doTestPointFieldNonSearchableRangeQuery("number_p_d_ni", toStringArray(getRandomDoubles(1, false))); - doTestPointFieldNonSearchableRangeQuery("number_p_d_ni_ns", toStringArray(getRandomDoubles(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_d_ni", toStringArray(getRandomDoubles(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_d_ni_ns", toStringArray(getRandomDoubles(1, false))); int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldNonSearchableRangeQuery("number_p_d_ni_ns_mv", toStringArray(getRandomDoubles(numValues, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_d_ni_ns_mv", toStringArray(getRandomDoubles(numValues, false))); } - - + @Test public void testDoublePointFieldSortAndFunction() throws Exception { final SortedSet regexToTest = dynFieldRegexesForType(DoublePointField.class); - final List sequential = Arrays.asList("0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0"); + final List sequential = + Arrays.asList("0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0"); List randomDoubles = getRandomDoubles(10, false); List randomDoublesMissing = getRandomDoubles(10, true); - for (String r : Arrays.asList("*_p_d", "*_p_d_dv", "*_p_d_dv_ns", "*_p_d_ni_dv", - "*_p_d_ni_dv_ns", "*_p_d_ni_ns_dv")) { + for (String r : + Arrays.asList( + "*_p_d", + "*_p_d_dv", + "*_p_d_dv_ns", + "*_p_d_ni_dv", + "*_p_d_ni_dv_ns", + "*_p_d_ni_ns_dv")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); @@ -580,26 +823,33 @@ public void testDoublePointFieldSortAndFunction() throws Exception { doTestDoublePointFunctionQuery(field); } - for (String r : Arrays.asList("*_p_d_smf", "*_p_d_dv_smf", "*_p_d_ni_dv_smf", - "*_p_d_sml", "*_p_d_dv_sml", "*_p_d_ni_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_d_smf", + "*_p_d_dv_smf", + "*_p_d_ni_dv_smf", + "*_p_d_sml", + "*_p_d_dv_sml", + "*_p_d_ni_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); doTestPointFieldSort(field, randomDoublesMissing); doTestDoublePointFunctionQuery(field); } - + for (String r : Arrays.asList("*_p_d_ni", "*_p_d_ni_ns")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", "42.34"); doTestPointFieldFunctionQueryError(field, "w/o docValues", "42.34"); } - + // multivalued, no docvalues - for (String r : Arrays.asList("*_p_d_mv", "*_p_d_ni_mv", "*_p_d_ni_ns_mv", - "*_p_d_mv_smf", "*_p_d_mv_sml")) { - + for (String r : + Arrays.asList( + "*_p_d_mv", "*_p_d_ni_mv", "*_p_d_ni_ns_mv", "*_p_d_mv_smf", "*_p_d_mv_sml")) { + assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", "42.34"); @@ -607,33 +857,37 @@ public void testDoublePointFieldSortAndFunction() throws Exception { doTestPointFieldFunctionQueryError(field, "multivalued", "42.34"); doTestPointFieldFunctionQueryError(field, "multivalued", "42.34", "66.6"); } - + // multivalued, w/ docValues - for (String r : Arrays.asList("*_p_d_ni_mv_dv", "*_p_d_ni_dv_ns_mv", - "*_p_d_dv_ns_mv", "*_p_d_mv_dv", - "*_p_d_mv_dv_smf", "*_p_d_ni_mv_dv_smf", - "*_p_d_mv_dv_sml", "*_p_d_ni_mv_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_d_ni_mv_dv", "*_p_d_ni_dv_ns_mv", + "*_p_d_dv_ns_mv", "*_p_d_mv_dv", + "*_p_d_mv_dv_smf", "*_p_d_ni_mv_dv_smf", + "*_p_d_mv_dv_sml", "*_p_d_ni_mv_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); - + // NOTE: only testing one value per doc here, but TestMinMaxOnMultiValuedField // covers this in more depth doTestPointFieldSort(field, sequential); doTestPointFieldSort(field, randomDoubles); - + // value source (w/o field(...,min|max)) usuage should still error... doTestPointFieldFunctionQueryError(field, "multivalued", "42.34"); doTestPointFieldFunctionQueryError(field, "multivalued", "42.34", "66.6"); } assertEquals("Missing types in the test", Collections.emptySet(), regexToTest); } - + @Test public void testDoublePointFieldFacetField() throws Exception { - doTestPointFieldFacetField("number_p_d", "number_p_d_dv", getSequentialStringArrayWithDoubles(10)); + doTestPointFieldFacetField( + "number_p_d", "number_p_d_dv", getSequentialStringArrayWithDoubles(10)); clearIndex(); assertU(commit()); - doTestPointFieldFacetField("number_p_d", "number_p_d_dv", toStringArray(getRandomDoubles(10, false))); + doTestPointFieldFacetField( + "number_p_d", "number_p_d_dv", toStringArray(getRandomDoubles(10, false))); } @Test @@ -649,12 +903,20 @@ public void testDoublePointFieldRangeFacet() throws Exception { sortedValues = values.stream().sorted().collect(Collectors.toList()); min = sortedValues.get(0); max = sortedValues.get(sortedValues.size() - 1); - buffer = BigDecimal.valueOf(max).subtract(BigDecimal.valueOf(min)) - .divide(BigDecimal.valueOf(numValues / 2), RoundingMode.HALF_UP).doubleValue(); - gap = BigDecimal.valueOf(max).subtract(BigDecimal.valueOf(min)).add(BigDecimal.valueOf(buffer * 2.0D)) - .divide(BigDecimal.valueOf(numBuckets), RoundingMode.HALF_UP).doubleValue(); + buffer = + BigDecimal.valueOf(max) + .subtract(BigDecimal.valueOf(min)) + .divide(BigDecimal.valueOf(numValues / 2), RoundingMode.HALF_UP) + .doubleValue(); + gap = + BigDecimal.valueOf(max) + .subtract(BigDecimal.valueOf(min)) + .add(BigDecimal.valueOf(buffer * 2.0D)) + .divide(BigDecimal.valueOf(numBuckets), RoundingMode.HALF_UP) + .doubleValue(); } while (max >= Double.MAX_VALUE - buffer || min <= -Double.MAX_VALUE + buffer); - // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + buffer); + // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + + // buffer); int[] bucketCount = new int[numBuckets]; int bucketNum = 0; double minBucketVal = min - buffer; @@ -669,71 +931,146 @@ public void testDoublePointFieldRangeFacet() throws Exception { ++bucketCount[bucketNum]; } - for (int i = 0 ; i < numValues ; i++) { - assertU(adoc("id", String.valueOf(i), - docValuesField, String.valueOf(values.get(i)), nonDocValuesField, String.valueOf(values.get(i)))); + for (int i = 0; i < numValues; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + docValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i)))); } assertU(commit()); String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + numValues + "']"; minBucketVal = min - buffer; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; - } - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap)), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; + } + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap)), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); minBucketVal = min - buffer; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); } @Test public void testDoublePointStats() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; - // don't produce numbers with exponents, since XPath comparison operators can't handle them: 7 digits of precision - List values = getRandomInts(numValues, false, 9999999).stream() - .map(v -> (float)((double)v * Math.pow(10D, -1 * random().nextInt(8)))).collect(Collectors.toList()); + // don't produce numbers with exponents, since XPath comparison operators can't handle them: 7 + // digits of precision + List values = + getRandomInts(numValues, false, 9999999).stream() + .map(v -> (float) ((double) v * Math.pow(10D, -1 * random().nextInt(8)))) + .collect(Collectors.toList()); // System.err.println(Arrays.toString(values.toArray(new Float[values.size()]))); List sortedValues = values.stream().sorted().collect(Collectors.toList()); - double min = (double)sortedValues.get(0); - double max = (double)sortedValues.get(sortedValues.size() - 1); + double min = (double) sortedValues.get(0); + double max = (double) sortedValues.get(sortedValues.size() - 1); String[] valArray = toStringArray(values); doTestPointStats("number_p_d", "number_p_d_dv", valArray, min, max, numValues, 1, 1E-7D); doTestPointStats("number_p_d", "number_p_d_mv_dv", valArray, min, max, numValues, 1, 1E-7D); } - + @Test public void testDoublePointFieldMultiValuedExactQuery() throws Exception { String[] doubles = toStringArray(getRandomDoubles(20, false)); doTestPointFieldMultiValuedExactQuery("number_p_d_mv", doubles); doTestPointFieldMultiValuedExactQuery("number_p_d_ni_mv_dv", doubles); } - + @Test public void testDoublePointFieldMultiValuedNonSearchableExactQuery() throws Exception { String[] doubles = toStringArray(getRandomDoubles(20, false)); doTestPointFieldMultiValuedExactQuery("number_p_d_ni_mv", doubles, false); doTestPointFieldMultiValuedExactQuery("number_p_d_ni_ns_mv", doubles, false); } - + @Test public void testDoublePointFieldMultiValuedReturn() throws Exception { String[] doubles = toStringArray(getRandomDoubles(20, false)); @@ -741,19 +1078,22 @@ public void testDoublePointFieldMultiValuedReturn() throws Exception { doTestPointFieldMultiValuedReturn("number_p_d_ni_mv_dv", "double", doubles); doTestPointFieldMultiValuedReturn("number_p_d_dv_ns_mv", "double", doubles); } - + @Test public void testDoublePointFieldMultiValuedRangeQuery() throws Exception { - String[] doubles = toStringArray(getRandomDoubles(20, false).stream().sorted().collect(Collectors.toList())); + String[] doubles = + toStringArray(getRandomDoubles(20, false).stream().sorted().collect(Collectors.toList())); doTestPointFieldMultiValuedRangeQuery("number_p_d_mv", "double", doubles); doTestPointFieldMultiValuedRangeQuery("number_p_d_ni_mv_dv", "double", doubles); doTestPointFieldMultiValuedRangeQuery("number_p_d_mv_dv", "double", doubles); } - + @Test public void testDoublePointFieldMultiValuedFacetField() throws Exception { - doTestPointFieldMultiValuedFacetField("number_p_d_mv", "number_p_d_mv_dv", getSequentialStringArrayWithDoubles(20)); - doTestPointFieldMultiValuedFacetField("number_p_d_mv", "number_p_d_mv_dv", toStringArray(getRandomDoubles(20, false))); + doTestPointFieldMultiValuedFacetField( + "number_p_d_mv", "number_p_d_mv_dv", getSequentialStringArrayWithDoubles(20)); + doTestPointFieldMultiValuedFacetField( + "number_p_d_mv", "number_p_d_mv_dv", toStringArray(getRandomDoubles(20, false))); } @Test @@ -780,14 +1120,22 @@ public void testDoublePointFieldMultiValuedRangeFacet() throws Exception { sortedValues = toAscendingPosVals(values, true); min = sortedValues.get(0).val; max = sortedValues.get(sortedValues.size() - 1).val; - buffer = BigDecimal.valueOf(max).subtract(BigDecimal.valueOf(min)) - .divide(BigDecimal.valueOf(numValues / 2), RoundingMode.HALF_UP).doubleValue(); - gap = BigDecimal.valueOf(max).subtract(BigDecimal.valueOf(min)).add(BigDecimal.valueOf(buffer * 2.0D)) - .divide(BigDecimal.valueOf(numBuckets), RoundingMode.HALF_UP).doubleValue(); + buffer = + BigDecimal.valueOf(max) + .subtract(BigDecimal.valueOf(min)) + .divide(BigDecimal.valueOf(numValues / 2), RoundingMode.HALF_UP) + .doubleValue(); + gap = + BigDecimal.valueOf(max) + .subtract(BigDecimal.valueOf(min)) + .add(BigDecimal.valueOf(buffer * 2.0D)) + .divide(BigDecimal.valueOf(numBuckets), RoundingMode.HALF_UP) + .doubleValue(); } while (max >= Double.MAX_VALUE - buffer || min <= -Double.MAX_VALUE + buffer); - // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + buffer); + // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + + // buffer); List> docIdBucket = new ArrayList<>(numBuckets); - for (int i = 0 ; i < numBuckets ; ++i) { + for (int i = 0; i < numBuckets; ++i) { docIdBucket.add(new HashSet<>()); } int bucketNum = 0; @@ -800,56 +1148,138 @@ public void testDoublePointFieldMultiValuedRangeFacet() throws Exception { minBucketVal += gap; // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + minBucketVal); } - docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values - } - for (int i = 0 ; i < numValues ; i += 2) { - assertU(adoc("id", String.valueOf(i / 2), - docValuesField, String.valueOf(values.get(i)), - docValuesField, String.valueOf(values.get(i + 1)), - nonDocValuesField, String.valueOf(values.get(i)), - nonDocValuesField, String.valueOf(values.get(i + 1)))); + docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values + } + for (int i = 0; i < numValues; i += 2) { + assertU( + adoc( + "id", + String.valueOf(i / 2), + docValuesField, + String.valueOf(values.get(i)), + docValuesField, + String.valueOf(values.get(i + 1)), + nonDocValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i + 1)))); } assertU(commit()); String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + (numValues / 2) + "']"; minBucketVal = min - buffer; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; - } - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "indent", "on"), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; + } + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "indent", + "on"), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); minBucketVal = min - buffer; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter", + "indent", + "on"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); } - + @Test public void testDoublePointMultiValuedFunctionQuery() throws Exception { - doTestPointMultiValuedFunctionQuery("number_p_d_mv", "number_p_d_mv_dv", "double", getSequentialStringArrayWithDoubles(20)); - doTestPointMultiValuedFunctionQuery("number_p_d_mv", "number_p_d_mv_dv", "double", toAscendingStringArray(getRandomFloats(20, false), true)); + doTestPointMultiValuedFunctionQuery( + "number_p_d_mv", "number_p_d_mv_dv", "double", getSequentialStringArrayWithDoubles(20)); + doTestPointMultiValuedFunctionQuery( + "number_p_d_mv", + "number_p_d_mv_dv", + "double", + toAscendingStringArray(getRandomFloats(20, false), true)); } - + @Test public void testDoublePointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -859,7 +1289,7 @@ public void testDoublePointFieldsAtomicUpdates() throws Exception { doTestDoublePointFieldsAtomicUpdates("number_p_d_dv"); doTestDoublePointFieldsAtomicUpdates("number_p_d_dv_ns"); } - + @Test public void testMultiValuedDoublePointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -870,49 +1300,46 @@ public void testMultiValuedDoublePointFieldsAtomicUpdates() throws Exception { doTestMultiValuedPointFieldsAtomicUpdates("number_p_d_ni_mv_dv", "double", doubles); doTestMultiValuedPointFieldsAtomicUpdates("number_p_d_dv_ns_mv", "double", doubles); } - + @Test public void testDoublePointFieldNotIndexed() throws Exception { String[] doubles = toStringArray(getRandomDoubles(10, false)); doTestFieldNotIndexed("number_p_d_ni", doubles); doTestFieldNotIndexed("number_p_d_ni_mv", doubles); } - - + private void doTestFloatPointFieldsAtomicUpdates(String field) throws Exception { float number1 = getRandomFloats(1, false).get(0); float number2; double inc1; - for ( ; ; ) { + for (; ; ) { number2 = getRandomFloats(1, false).get(0); - inc1 = (double)number2 - (double)number1; - if (Math.abs(inc1) < (double)Float.MAX_VALUE) { - number2 = number1 + (float)inc1; + inc1 = (double) number2 - (double) number1; + if (Math.abs(inc1) < (double) Float.MAX_VALUE) { + number2 = number1 + (float) inc1; break; } } assertU(adoc(sdoc("id", "1", field, String.valueOf(number1)))); assertU(commit()); - assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("inc", (float)inc1)))); + assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("inc", (float) inc1)))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/float[@name='" + field + "'][.='" + number2 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/float[@name='" + field + "'][.='" + number2 + "']"); float number3 = getRandomFloats(1, false).get(0); assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", number3)))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/float[@name='" + field + "'][.='" + number3 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/float[@name='" + field + "'][.='" + number3 + "']"); } private void doTestDoublePointFieldsAtomicUpdates(String field) throws Exception { double number1 = getRandomDoubles(1, false).get(0); double number2; BigDecimal inc1; - for ( ; ; ) { + for (; ; ) { number2 = getRandomDoubles(1, false).get(0); inc1 = BigDecimal.valueOf(number2).subtract(BigDecimal.valueOf(number1)); if (inc1.abs().compareTo(BigDecimal.valueOf(Double.MAX_VALUE)) <= 0) { @@ -926,15 +1353,13 @@ private void doTestDoublePointFieldsAtomicUpdates(String field) throws Exception assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("inc", inc1.doubleValue())))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/double[@name='" + field + "'][.='" + number2 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/double[@name='" + field + "'][.='" + number2 + "']"); double number3 = getRandomDoubles(1, false).get(0); assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", number3)))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/double[@name='" + field + "'][.='" + number3 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/double[@name='" + field + "'][.='" + number3 + "']"); } @Test @@ -943,7 +1368,7 @@ public void testDoublePointSetQuery() throws Exception { doTestSetQueries("number_p_d_mv", toStringArray(getRandomDoubles(20, false)), true); doTestSetQueries("number_p_d_ni_dv", toStringArray(getRandomDoubles(20, false)), false); } - + // Float @Test @@ -957,13 +1382,13 @@ public void testFloatPointFieldExactQuery() throws Exception { doTestFloatPointFieldExactQuery("number_p_f_ni_dv_ns", false); doTestFloatPointFieldExactQuery("number_p_f_ni_mv_dv", false); } - + @Test public void testFloatPointFieldNonSearchableExactQuery() throws Exception { doTestFloatPointFieldExactQuery("number_p_f_ni", false, false); doTestFloatPointFieldExactQuery("number_p_f_ni_ns", false, false); } - + @Test public void testFloatPointFieldReturn() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; @@ -971,31 +1396,41 @@ public void testFloatPointFieldReturn() throws Exception { doTestPointFieldReturn("number_p_f", "float", floats); doTestPointFieldReturn("number_p_f_dv_ns", "float", floats); } - + @Test public void testFloatPointFieldRangeQuery() throws Exception { doTestFloatPointFieldRangeQuery("number_p_f", "float", false); doTestFloatPointFieldRangeQuery("number_p_f_ni_ns_dv", "float", false); doTestFloatPointFieldRangeQuery("number_p_f_dv", "float", false); } - + @Test public void testFloatPointFieldNonSearchableRangeQuery() throws Exception { - doTestPointFieldNonSearchableRangeQuery("number_p_f_ni", toStringArray(getRandomFloats(1, false))); - doTestPointFieldNonSearchableRangeQuery("number_p_f_ni_ns", toStringArray(getRandomFloats(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_f_ni", toStringArray(getRandomFloats(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_f_ni_ns", toStringArray(getRandomFloats(1, false))); int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldNonSearchableRangeQuery("number_p_f_ni_ns_mv", toStringArray(getRandomFloats(numValues, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_f_ni_ns_mv", toStringArray(getRandomFloats(numValues, false))); } - + @Test public void testFloatPointFieldSortAndFunction() throws Exception { final SortedSet regexToTest = dynFieldRegexesForType(FloatPointField.class); - final List sequential = Arrays.asList("0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0"); + final List sequential = + Arrays.asList("0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0"); final List randomFloats = getRandomFloats(10, false); final List randomFloatsMissing = getRandomFloats(10, true); - - for (String r : Arrays.asList("*_p_f", "*_p_f_dv", "*_p_f_dv_ns", "*_p_f_ni_dv", - "*_p_f_ni_dv_ns", "*_p_f_ni_ns_dv")) { + + for (String r : + Arrays.asList( + "*_p_f", + "*_p_f_dv", + "*_p_f_dv_ns", + "*_p_f_ni_dv", + "*_p_f_ni_dv_ns", + "*_p_f_ni_ns_dv")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); @@ -1003,26 +1438,33 @@ public void testFloatPointFieldSortAndFunction() throws Exception { doTestFloatPointFunctionQuery(field); } - for (String r : Arrays.asList("*_p_f_smf", "*_p_f_dv_smf", "*_p_f_ni_dv_smf", - "*_p_f_sml", "*_p_f_dv_sml", "*_p_f_ni_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_f_smf", + "*_p_f_dv_smf", + "*_p_f_ni_dv_smf", + "*_p_f_sml", + "*_p_f_dv_sml", + "*_p_f_ni_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); doTestPointFieldSort(field, randomFloatsMissing); doTestFloatPointFunctionQuery(field); } - + for (String r : Arrays.asList("*_p_f_ni", "*_p_f_ni_ns")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", "42.34"); doTestPointFieldFunctionQueryError(field, "w/o docValues", "42.34"); } - + // multivalued, no docvalues - for (String r : Arrays.asList("*_p_f_mv", "*_p_f_ni_mv", "*_p_f_ni_ns_mv", - "*_p_f_mv_smf", "*_p_f_mv_sml")) { - + for (String r : + Arrays.asList( + "*_p_f_mv", "*_p_f_ni_mv", "*_p_f_ni_ns_mv", "*_p_f_mv_smf", "*_p_f_mv_sml")) { + assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", "42.34"); @@ -1032,10 +1474,12 @@ public void testFloatPointFieldSortAndFunction() throws Exception { } // multivalued, w/ docValues - for (String r : Arrays.asList("*_p_f_ni_mv_dv", "*_p_f_ni_dv_ns_mv", - "*_p_f_dv_ns_mv", "*_p_f_mv_dv", - "*_p_f_mv_dv_smf", "*_p_f_ni_mv_dv_smf", - "*_p_f_mv_dv_sml", "*_p_f_ni_mv_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_f_ni_mv_dv", "*_p_f_ni_dv_ns_mv", + "*_p_f_dv_ns_mv", "*_p_f_mv_dv", + "*_p_f_mv_dv_smf", "*_p_f_ni_mv_dv_smf", + "*_p_f_mv_dv_sml", "*_p_f_ni_mv_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); @@ -1043,21 +1487,22 @@ public void testFloatPointFieldSortAndFunction() throws Exception { // covers this in more depth doTestPointFieldSort(field, sequential); doTestPointFieldSort(field, randomFloats); - + // value source (w/o field(...,min|max)) usuage should still error... doTestPointFieldFunctionQueryError(field, "multivalued", "42.34"); doTestPointFieldFunctionQueryError(field, "multivalued", "42.34", "66.6"); - - } + } assertEquals("Missing types in the test", Collections.emptySet(), regexToTest); } - + @Test public void testFloatPointFieldFacetField() throws Exception { - doTestPointFieldFacetField("number_p_f", "number_p_f_dv", getSequentialStringArrayWithDoubles(10)); + doTestPointFieldFacetField( + "number_p_f", "number_p_f_dv", getSequentialStringArrayWithDoubles(10)); clearIndex(); assertU(commit()); - doTestPointFieldFacetField("number_p_f", "number_p_f_dv", toStringArray(getRandomFloats(10, false))); + doTestPointFieldFacetField( + "number_p_f", "number_p_f_dv", toStringArray(getRandomFloats(10, false))); } @Test @@ -1073,10 +1518,14 @@ public void testFloatPointFieldRangeFacet() throws Exception { sortedValues = values.stream().sorted().collect(Collectors.toList()); min = sortedValues.get(0); max = sortedValues.get(sortedValues.size() - 1); - buffer = (float)(((double)max - (double)min) / (double)numValues / 2.0D); - gap = (float)(((double)max + (double)buffer - (double)min + (double)buffer) / (double)numBuckets); - } while (max >= Float.MAX_VALUE - buffer || min <= -Float.MAX_VALUE + buffer); - // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + buffer); + buffer = (float) (((double) max - (double) min) / (double) numValues / 2.0D); + gap = + (float) + (((double) max + (double) buffer - (double) min + (double) buffer) + / (double) numBuckets); + } while (max >= Float.MAX_VALUE - buffer || min <= -Float.MAX_VALUE + buffer); + // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + + // buffer); int[] bucketCount = new int[numBuckets]; int bucketNum = 0; float minBucketVal = min - buffer; @@ -1091,75 +1540,152 @@ public void testFloatPointFieldRangeFacet() throws Exception { ++bucketCount[bucketNum]; } - for (int i = 0 ; i < numValues ; i++) { - assertU(adoc("id", String.valueOf(i), - docValuesField, String.valueOf(values.get(i)), nonDocValuesField, String.valueOf(values.get(i)))); + for (int i = 0; i < numValues; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + docValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i)))); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + numValues + "']"; minBucketVal = min - buffer; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; - } - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap)), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; + } + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap)), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); minBucketVal = min - buffer; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min - buffer), - "facet.range.end", String.valueOf(max + buffer), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); } @Test public void testFloatPointStats() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; - // don't produce numbers with exponents, since XPath comparison operators can't handle them: 7 digits of precision - List values = getRandomInts(numValues, false, 9999999).stream() - .map(v -> (float)((double)v * Math.pow(10D, -1 * random().nextInt(8)))).collect(Collectors.toList()); + // don't produce numbers with exponents, since XPath comparison operators can't handle them: 7 + // digits of precision + List values = + getRandomInts(numValues, false, 9999999).stream() + .map(v -> (float) ((double) v * Math.pow(10D, -1 * random().nextInt(8)))) + .collect(Collectors.toList()); // System.err.println(Arrays.toString(values.toArray(new Float[values.size()]))); List sortedValues = values.stream().sorted().collect(Collectors.toList()); - double min = (double)sortedValues.get(0); - double max = (double)sortedValues.get(sortedValues.size() - 1); + double min = (double) sortedValues.get(0); + double max = (double) sortedValues.get(sortedValues.size() - 1); String[] valArray = toStringArray(values); doTestPointStats("number_p_f", "number_p_f_dv", valArray, min, max, numValues, 1, 1E-7D); doTestPointStats("number_p_f", "number_p_f_mv_dv", valArray, min, max, numValues, 1, 1E-7D); } - + @Test public void testFloatPointFieldMultiValuedExactQuery() throws Exception { String[] floats = toStringArray(getRandomFloats(20, false)); doTestPointFieldMultiValuedExactQuery("number_p_f_mv", floats); doTestPointFieldMultiValuedExactQuery("number_p_f_ni_mv_dv", floats); } - + @Test public void testFloatPointFieldMultiValuedNonSearchableExactQuery() throws Exception { String[] floats = toStringArray(getRandomFloats(20, false)); doTestPointFieldMultiValuedExactQuery("number_p_f_ni_mv", floats, false); doTestPointFieldMultiValuedExactQuery("number_p_f_ni_ns_mv", floats, false); } - + @Test public void testFloatPointFieldMultiValuedReturn() throws Exception { String[] floats = toStringArray(getRandomFloats(20, false)); @@ -1167,15 +1693,16 @@ public void testFloatPointFieldMultiValuedReturn() throws Exception { doTestPointFieldMultiValuedReturn("number_p_f_ni_mv_dv", "float", floats); doTestPointFieldMultiValuedReturn("number_p_f_dv_ns_mv", "float", floats); } - + @Test public void testFloatPointFieldMultiValuedRangeQuery() throws Exception { - String[] floats = toStringArray(getRandomFloats(20, false).stream().sorted().collect(Collectors.toList())); + String[] floats = + toStringArray(getRandomFloats(20, false).stream().sorted().collect(Collectors.toList())); doTestPointFieldMultiValuedRangeQuery("number_p_f_mv", "float", floats); doTestPointFieldMultiValuedRangeQuery("number_p_f_ni_mv_dv", "float", floats); doTestPointFieldMultiValuedRangeQuery("number_p_f_mv_dv", "float", floats); } - + @Test public void testFloatPointFieldMultiValuedRangeFacet() throws Exception { String docValuesField = "number_p_f_mv_dv"; @@ -1183,13 +1710,13 @@ public void testFloatPointFieldMultiValuedRangeFacet() throws Exception { assertTrue(dvSchemaField.multiValued()); assertTrue(dvSchemaField.hasDocValues()); assertTrue(dvSchemaField.getType() instanceof PointField); - + String nonDocValuesField = "number_p_f_mv"; SchemaField nonDvSchemaField = h.getCore().getLatestSchema().getField(nonDocValuesField); assertTrue(nonDvSchemaField.multiValued()); assertFalse(nonDvSchemaField.hasDocValues()); assertTrue(nonDvSchemaField.getType() instanceof PointField); - + int numValues = 20 * RANDOM_MULTIPLIER; int numBuckets = numValues / 2; List values; @@ -1200,12 +1727,16 @@ public void testFloatPointFieldMultiValuedRangeFacet() throws Exception { sortedValues = toAscendingPosVals(values, true); min = sortedValues.get(0).val; max = sortedValues.get(sortedValues.size() - 1).val; - buffer = (float)(((double)max - (double)min) / (double)numValues / 2.0D); - gap = (float)(((double)max + (double)buffer - (double)min + (double)buffer) / (double)numBuckets); + buffer = (float) (((double) max - (double) min) / (double) numValues / 2.0D); + gap = + (float) + (((double) max + (double) buffer - (double) min + (double) buffer) + / (double) numBuckets); } while (max >= Float.MAX_VALUE - buffer || min <= -Float.MAX_VALUE + buffer); - // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + buffer); + // System.err.println("min: " + min + " max: " + max + " gap: " + gap + " buffer: " + + // buffer); List> docIdBucket = new ArrayList<>(numBuckets); - for (int i = 0 ; i < numBuckets ; ++i) { + for (int i = 0; i < numBuckets; ++i) { docIdBucket.add(new HashSet<>()); } int bucketNum = 0; @@ -1218,67 +1749,152 @@ public void testFloatPointFieldMultiValuedRangeFacet() throws Exception { minBucketVal += gap; // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + minBucketVal); } - docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values - } - for (int i = 0 ; i < numValues ; i += 2) { - assertU(adoc("id", String.valueOf(i / 2), - docValuesField, String.valueOf(values.get(i)), - docValuesField, String.valueOf(values.get(i + 1)), - nonDocValuesField, String.valueOf(values.get(i)), - nonDocValuesField, String.valueOf(values.get(i + 1)))); + docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values + } + for (int i = 0; i < numValues; i += 2) { + assertU( + adoc( + "id", + String.valueOf(i / 2), + docValuesField, + String.valueOf(values.get(i)), + docValuesField, + String.valueOf(values.get(i + 1)), + nonDocValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i + 1)))); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String[] testStrings = new String[numBuckets + 1]; minBucketVal = min - buffer; testStrings[numBuckets] = "//*[@numFound='" + (numValues / 2) + "']"; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; - } - - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "indent", "on"), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; + } + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "indent", + "on"), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); minBucketVal = min - buffer; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter", + "indent", + "on"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min - buffer), "facet.range.end", String.valueOf(max + buffer), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min - buffer), + "facet.range.end", + String.valueOf(max + buffer), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); } - + @Test public void testFloatPointFieldMultiValuedFacetField() throws Exception { - doTestPointFieldMultiValuedFacetField("number_p_f_mv", "number_p_f_mv_dv", getSequentialStringArrayWithDoubles(20)); - doTestPointFieldMultiValuedFacetField("number_p_f_mv", "number_p_f_mv_dv", toStringArray(getRandomFloats(20, false))); + doTestPointFieldMultiValuedFacetField( + "number_p_f_mv", "number_p_f_mv_dv", getSequentialStringArrayWithDoubles(20)); + doTestPointFieldMultiValuedFacetField( + "number_p_f_mv", "number_p_f_mv_dv", toStringArray(getRandomFloats(20, false))); } - + @Test public void testFloatPointMultiValuedFunctionQuery() throws Exception { - doTestPointMultiValuedFunctionQuery("number_p_f_mv", "number_p_f_mv_dv", "float", getSequentialStringArrayWithDoubles(20)); - doTestPointMultiValuedFunctionQuery("number_p_f_mv", "number_p_f_mv_dv", "float", toAscendingStringArray(getRandomFloats(20, false), true)); + doTestPointMultiValuedFunctionQuery( + "number_p_f_mv", "number_p_f_mv_dv", "float", getSequentialStringArrayWithDoubles(20)); + doTestPointMultiValuedFunctionQuery( + "number_p_f_mv", + "number_p_f_mv_dv", + "float", + toAscendingStringArray(getRandomFloats(20, false), true)); } - - + @Test public void testFloatPointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -1288,7 +1904,7 @@ public void testFloatPointFieldsAtomicUpdates() throws Exception { doTestFloatPointFieldsAtomicUpdates("number_p_f_dv"); doTestFloatPointFieldsAtomicUpdates("number_p_f_dv_ns"); } - + @Test public void testMultiValuedFloatPointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -1306,16 +1922,16 @@ public void testFloatPointSetQuery() throws Exception { doTestSetQueries("number_p_f_mv", toStringArray(getRandomFloats(20, false)), true); doTestSetQueries("number_p_f_ni_dv", toStringArray(getRandomFloats(20, false)), false); } - + @Test public void testFloatPointFieldNotIndexed() throws Exception { String[] floats = toStringArray(getRandomFloats(10, false)); doTestFieldNotIndexed("number_p_f_ni", floats); doTestFieldNotIndexed("number_p_f_ni_mv", floats); } - + // Long - + @Test public void testLongPointFieldExactQuery() throws Exception { doTestIntPointFieldExactQuery("number_p_l", true); @@ -1327,13 +1943,13 @@ public void testLongPointFieldExactQuery() throws Exception { doTestIntPointFieldExactQuery("number_p_l_ni_dv_ns", true); doTestIntPointFieldExactQuery("number_p_l_ni_mv_dv", true); } - + @Test public void testLongPointFieldNonSearchableExactQuery() throws Exception { doTestIntPointFieldExactQuery("number_p_l_ni", true, false); doTestIntPointFieldExactQuery("number_p_l_ni_ns", true, false); } - + @Test public void testLongPointFieldReturn() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; @@ -1341,33 +1957,51 @@ public void testLongPointFieldReturn() throws Exception { doTestPointFieldReturn("number_p_l", "long", longs); doTestPointFieldReturn("number_p_l_dv_ns", "long", longs); } - + @Test public void testLongPointFieldRangeQuery() throws Exception { doTestIntPointFieldRangeQuery("number_p_l", "long", true); doTestIntPointFieldRangeQuery("number_p_l_ni_ns_dv", "long", true); doTestIntPointFieldRangeQuery("number_p_l_dv", "long", true); } - + @Test public void testLongPointFieldNonSearchableRangeQuery() throws Exception { - doTestPointFieldNonSearchableRangeQuery("number_p_l_ni", toStringArray(getRandomLongs(1, false))); - doTestPointFieldNonSearchableRangeQuery("number_p_l_ni_ns", toStringArray(getRandomLongs(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_l_ni", toStringArray(getRandomLongs(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_l_ni_ns", toStringArray(getRandomLongs(1, false))); int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldNonSearchableRangeQuery("number_p_l_ni_ns_mv", toStringArray(getRandomLongs(numValues, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_l_ni_ns_mv", toStringArray(getRandomLongs(numValues, false))); } @Test public void testLongPointFieldSortAndFunction() throws Exception { final SortedSet regexToTest = dynFieldRegexesForType(LongPointField.class); - final List vals = Arrays.asList((long)Integer.MIN_VALUE, - 1L, 2L, 3L, 4L, 5L, 6L, 7L, - (long)Integer.MAX_VALUE, Long.MAX_VALUE); + final List vals = + Arrays.asList( + (long) Integer.MIN_VALUE, + 1L, + 2L, + 3L, + 4L, + 5L, + 6L, + 7L, + (long) Integer.MAX_VALUE, + Long.MAX_VALUE); final List randomLongs = getRandomLongs(10, false); final List randomLongsMissing = getRandomLongs(10, true); - - for (String r : Arrays.asList("*_p_l", "*_p_l_dv", "*_p_l_dv_ns", "*_p_l_ni_dv", - "*_p_l_ni_dv_ns", "*_p_l_ni_ns_dv")) { + + for (String r : + Arrays.asList( + "*_p_l", + "*_p_l_dv", + "*_p_l_dv_ns", + "*_p_l_ni_dv", + "*_p_l_ni_dv_ns", + "*_p_l_ni_ns_dv")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, vals); @@ -1375,40 +2009,53 @@ public void testLongPointFieldSortAndFunction() throws Exception { doTestLongPointFunctionQuery(field); } - for (String r : Arrays.asList("*_p_l_smf", "*_p_l_dv_smf", "*_p_l_ni_dv_smf", - "*_p_l_sml", "*_p_l_dv_sml", "*_p_l_ni_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_l_smf", + "*_p_l_dv_smf", + "*_p_l_ni_dv_smf", + "*_p_l_sml", + "*_p_l_dv_sml", + "*_p_l_ni_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, vals); doTestPointFieldSort(field, randomLongsMissing); doTestLongPointFunctionQuery(field); } - + // no docvalues for (String r : Arrays.asList("*_p_l_ni", "*_p_l_ni_ns")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", toStringArray(getRandomLongs(1, false))); - doTestPointFieldFunctionQueryError(field, "w/o docValues", toStringArray(getRandomLongs(1, false))); + doTestPointFieldFunctionQueryError( + field, "w/o docValues", toStringArray(getRandomLongs(1, false))); } - + // multivalued, no docvalues - for (String r : Arrays.asList("*_p_l_mv", "*_p_l_ni_mv", "*_p_l_ni_ns_mv", - "*_p_l_mv_smf", "*_p_l_mv_sml")) { - + for (String r : + Arrays.asList( + "*_p_l_mv", "*_p_l_ni_mv", "*_p_l_ni_ns_mv", "*_p_l_mv_smf", "*_p_l_mv_sml")) { + assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", toStringArray(getRandomLongs(1, false))); int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldSortError(field, "w/o docValues", toStringArray(getRandomLongs(numValues, false))); - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomLongs(1, false))); - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomLongs(numValues, false))); + doTestPointFieldSortError( + field, "w/o docValues", toStringArray(getRandomLongs(numValues, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomLongs(1, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomLongs(numValues, false))); } // multivalued, w/ docValues - for (String r : Arrays.asList("*_p_l_ni_mv_dv", "*_p_l_ni_dv_ns_mv", - "*_p_l_dv_ns_mv", "*_p_l_mv_dv", - "*_p_l_mv_dv_smf", "*_p_l_ni_mv_dv_smf", - "*_p_l_mv_dv_sml", "*_p_l_ni_mv_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_l_ni_mv_dv", "*_p_l_ni_dv_ns_mv", + "*_p_l_dv_ns_mv", "*_p_l_mv_dv", + "*_p_l_mv_dv_smf", "*_p_l_ni_mv_dv_smf", + "*_p_l_mv_dv_sml", "*_p_l_ni_mv_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); @@ -1420,20 +2067,23 @@ public void testLongPointFieldSortAndFunction() throws Exception { // value source (w/o field(...,min|max)) usuage should still error... int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomLongs(1, false))); - doTestPointFieldFunctionQueryError(field, "multivalued", toStringArray(getRandomLongs(numValues, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomLongs(1, false))); + doTestPointFieldFunctionQueryError( + field, "multivalued", toStringArray(getRandomLongs(numValues, false))); } assertEquals("Missing types in the test", Collections.emptySet(), regexToTest); } - + @Test public void testLongPointFieldFacetField() throws Exception { doTestPointFieldFacetField("number_p_l", "number_p_l_dv", getSequentialStringArrayWithInts(10)); clearIndex(); assertU(commit()); - doTestPointFieldFacetField("number_p_l", "number_p_l_dv", toStringArray(getRandomLongs(10, false))); + doTestPointFieldFacetField( + "number_p_l", "number_p_l_dv", toStringArray(getRandomLongs(10, false))); } - + @Test public void testLongPointFieldRangeFacet() throws Exception { String docValuesField = "number_p_l_dv"; @@ -1446,10 +2096,13 @@ public void testLongPointFieldRangeFacet() throws Exception { do { values = getRandomLongs(numValues, false); sortedValues = values.stream().sorted().collect(Collectors.toList()); - } while ((max = sortedValues.get(sortedValues.size() - 1)) >= Long.MAX_VALUE - numValues); // leave room for rounding + } while ((max = sortedValues.get(sortedValues.size() - 1)) + >= Long.MAX_VALUE - numValues); // leave room for rounding long min = sortedValues.get(0); - BigInteger bigIntGap = BigInteger.valueOf(max + numValues).subtract(BigInteger.valueOf(min)) - .divide(BigInteger.valueOf(numBuckets)); + BigInteger bigIntGap = + BigInteger.valueOf(max + numValues) + .subtract(BigInteger.valueOf(min)) + .divide(BigInteger.valueOf(numBuckets)); long gap = bigIntGap.longValueExact(); int[] bucketCount = new int[numBuckets]; int bucketNum = 0; @@ -1458,7 +2111,10 @@ public void testLongPointFieldRangeFacet() throws Exception { // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + minBucketVal); for (Long value : sortedValues) { // System.err.println("value: " + value); - while (BigInteger.valueOf(value).subtract(BigInteger.valueOf(minBucketVal)).compareTo(bigIntGap) > 0) { + while (BigInteger.valueOf(value) + .subtract(BigInteger.valueOf(minBucketVal)) + .compareTo(bigIntGap) + > 0) { ++bucketNum; minBucketVal += gap; // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + minBucketVal); @@ -1466,72 +2122,147 @@ public void testLongPointFieldRangeFacet() throws Exception { ++bucketCount[bucketNum]; } - for (int i = 0 ; i < numValues ; i++) { - assertU(adoc("id", String.valueOf(i), docValuesField, String.valueOf(values.get(i)), nonDocValuesField, String.valueOf(values.get(i)))); + for (int i = 0; i < numValues; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + docValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i)))); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + numValues + "']"; minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; - } - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap)), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; + } + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap)), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + bucketCount[i] + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + bucketCount[i] + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, "facet.range.start", String.valueOf(min), - "facet.range.end", String.valueOf(max), "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv"), testStrings); } - + @Test public void testLongPointStats() throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; // don't produce numbers with exponents, since XPath comparison operators can't handle them - List values = getRandomLongs(numValues, false, 9999999L); + List values = getRandomLongs(numValues, false, 9999999L); List sortedValues = values.stream().sorted().collect(Collectors.toList()); - double min = (double)sortedValues.get(0); - double max = (double)sortedValues.get(sortedValues.size() - 1); + double min = (double) sortedValues.get(0); + double max = (double) sortedValues.get(sortedValues.size() - 1); String[] valArray = toStringArray(values); doTestPointStats("number_p_l", "number_p_l_dv", valArray, min, max, numValues, 1, 0D); doTestPointStats("number_p_l", "number_p_l_mv_dv", valArray, min, max, numValues, 1, 0D); } - + @Test public void testLongPointFieldMultiValuedExactQuery() throws Exception { String[] ints = toStringArray(getRandomInts(20, false)); doTestPointFieldMultiValuedExactQuery("number_p_l_mv", ints); doTestPointFieldMultiValuedExactQuery("number_p_l_ni_mv_dv", ints); } - + @Test public void testLongPointFieldMultiValuedNonSearchableExactQuery() throws Exception { String[] longs = toStringArray(getRandomLongs(20, false)); doTestPointFieldMultiValuedExactQuery("number_p_l_ni_mv", longs, false); doTestPointFieldMultiValuedExactQuery("number_p_l_ni_ns_mv", longs, false); } - + @Test public void testLongPointFieldMultiValuedReturn() throws Exception { String[] longs = toStringArray(getRandomLongs(20, false)); @@ -1539,21 +2270,24 @@ public void testLongPointFieldMultiValuedReturn() throws Exception { doTestPointFieldMultiValuedReturn("number_p_l_ni_mv_dv", "long", longs); doTestPointFieldMultiValuedReturn("number_p_l_dv_ns_mv", "long", longs); } - + @Test public void testLongPointFieldMultiValuedRangeQuery() throws Exception { - String[] longs = toStringArray(getRandomLongs(20, false).stream().sorted().collect(Collectors.toList())); + String[] longs = + toStringArray(getRandomLongs(20, false).stream().sorted().collect(Collectors.toList())); doTestPointFieldMultiValuedRangeQuery("number_p_l_mv", "long", longs); doTestPointFieldMultiValuedRangeQuery("number_p_l_ni_mv_dv", "long", longs); doTestPointFieldMultiValuedRangeQuery("number_p_l_mv_dv", "long", longs); } - + @Test public void testLongPointFieldMultiValuedFacetField() throws Exception { - doTestPointFieldMultiValuedFacetField("number_p_l_mv", "number_p_l_mv_dv", getSequentialStringArrayWithInts(20)); - doTestPointFieldMultiValuedFacetField("number_p_l_mv", "number_p_l_mv_dv", toStringArray(getRandomLongs(20, false))); + doTestPointFieldMultiValuedFacetField( + "number_p_l_mv", "number_p_l_mv_dv", getSequentialStringArrayWithInts(20)); + doTestPointFieldMultiValuedFacetField( + "number_p_l_mv", "number_p_l_mv_dv", toStringArray(getRandomLongs(20, false))); } - + @Test public void testLongPointFieldMultiValuedRangeFacet() throws Exception { String docValuesField = "number_p_l_mv_dv"; @@ -1566,12 +2300,16 @@ public void testLongPointFieldMultiValuedRangeFacet() throws Exception { do { values = getRandomLongs(numValues, false); sortedValues = toAscendingPosVals(values, true); - } while ((max = sortedValues.get(sortedValues.size() - 1).val) >= Long.MAX_VALUE - numValues); // leave room for rounding + } while ((max = sortedValues.get(sortedValues.size() - 1).val) + >= Long.MAX_VALUE - numValues); // leave room for rounding long min = sortedValues.get(0).val; - long gap = BigInteger.valueOf(max + numValues).subtract(BigInteger.valueOf(min)) - .divide(BigInteger.valueOf(numBuckets)).longValueExact(); + long gap = + BigInteger.valueOf(max + numValues) + .subtract(BigInteger.valueOf(min)) + .divide(BigInteger.valueOf(numBuckets)) + .longValueExact(); List> docIdBucket = new ArrayList<>(numBuckets); - for (int i = 0 ; i < numBuckets ; ++i) { + for (int i = 0; i < numBuckets; ++i) { docIdBucket.add(new HashSet<>()); } int bucketNum = 0; @@ -1581,61 +2319,144 @@ public void testLongPointFieldMultiValuedRangeFacet() throws Exception { ++bucketNum; minBucketVal += gap; } - docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values - } - for (int i = 0 ; i < numValues ; i += 2) { - assertU(adoc("id", String.valueOf(i / 2), - docValuesField, String.valueOf(values.get(i)), - docValuesField, String.valueOf(values.get(i + 1)), - nonDocValuesField, String.valueOf(values.get(i)), - nonDocValuesField, String.valueOf(values.get(i + 1)))); + docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values + } + for (int i = 0; i < numValues; i += 2) { + assertU( + adoc( + "id", + String.valueOf(i / 2), + docValuesField, + String.valueOf(values.get(i)), + docValuesField, + String.valueOf(values.get(i + 1)), + nonDocValuesField, + String.valueOf(values.get(i)), + nonDocValuesField, + String.valueOf(values.get(i + 1)))); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + (numValues / 2) + "']"; minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; - } - - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "indent", "on"), + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; + } + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "indent", + "on"), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal += gap, ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + minBucketVal + "'][.='" + docIdBucket.get(i).size() + "']"; + for (int i = 0; i < numBuckets; minBucketVal += gap, ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + minBucketVal + + "'][.='" + + docIdBucket.get(i).size() + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "filter", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "filter", + "indent", + "on"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", String.valueOf(min), "facet.range.end", String.valueOf(max), - "facet.range.gap", String.valueOf(gap), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + String.valueOf(min), + "facet.range.end", + String.valueOf(max), + "facet.range.gap", + String.valueOf(gap), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); } - + @Test public void testLongPointMultiValuedFunctionQuery() throws Exception { - doTestPointMultiValuedFunctionQuery("number_p_l_mv", "number_p_l_mv_dv", "long", getSequentialStringArrayWithInts(20)); - doTestPointMultiValuedFunctionQuery("number_p_l_mv", "number_p_l_mv_dv", "long", + doTestPointMultiValuedFunctionQuery( + "number_p_l_mv", "number_p_l_mv_dv", "long", getSequentialStringArrayWithInts(20)); + doTestPointMultiValuedFunctionQuery( + "number_p_l_mv", + "number_p_l_mv_dv", + "long", toStringArray(getRandomLongs(20, false).stream().sorted().collect(Collectors.toList()))); } - + @Test public void testLongPointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -1645,7 +2466,7 @@ public void testLongPointFieldsAtomicUpdates() throws Exception { doTestLongPointFieldsAtomicUpdates("number_p_l_dv"); doTestLongPointFieldsAtomicUpdates("number_p_l_dv_ns"); } - + @Test public void testMultiValuedLongPointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { @@ -1656,14 +2477,14 @@ public void testMultiValuedLongPointFieldsAtomicUpdates() throws Exception { doTestMultiValuedPointFieldsAtomicUpdates("number_p_l_ni_mv_dv", "long", longs); doTestMultiValuedPointFieldsAtomicUpdates("number_p_l_dv_ns_mv", "long", longs); } - + @Test public void testLongPointSetQuery() throws Exception { doTestSetQueries("number_p_l", toStringArray(getRandomLongs(20, false)), false); doTestSetQueries("number_p_l_mv", toStringArray(getRandomLongs(20, false)), true); doTestSetQueries("number_p_l_ni_dv", toStringArray(getRandomLongs(20, false)), false); } - + @Test public void testLongPointFieldNotIndexed() throws Exception { String[] longs = toStringArray(getRandomLongs(10, false)); @@ -1683,20 +2504,27 @@ private String getRandomDateMaybeWithMath() { } return date; } - + @Test public void testDatePointFieldExactQuery() throws Exception { String baseDate = getRandomDateMaybeWithMath(); - for (String field : Arrays.asList("number_p_dt","number_p_dt_mv","number_p_dt_dv", - "number_p_dt_mv_dv", "number_p_dt_ni_dv", "number_p_dt_ni_ns_dv", "number_p_dt_ni_mv_dv")) { + for (String field : + Arrays.asList( + "number_p_dt", + "number_p_dt_mv", + "number_p_dt_dv", + "number_p_dt_mv_dv", + "number_p_dt_ni_dv", + "number_p_dt_ni_ns_dv", + "number_p_dt_ni_mv_dv")) { doTestDatePointFieldExactQuery(field, baseDate); } } + @Test public void testDatePointFieldNonSearchableExactQuery() throws Exception { doTestDatePointFieldExactQuery("number_p_dt_ni", "1995-12-31T23:59:59Z", false); doTestDatePointFieldExactQuery("number_p_dt_ni_ns", "1995-12-31T23:59:59Z", false); - } @Test @@ -1712,13 +2540,16 @@ public void testDatePointFieldRangeQuery() throws Exception { doTestDatePointFieldRangeQuery("number_p_dt"); doTestDatePointFieldRangeQuery("number_p_dt_ni_ns_dv"); } - + @Test public void testDatePointFieldNonSearchableRangeQuery() throws Exception { - doTestPointFieldNonSearchableRangeQuery("number_p_dt_ni", toStringArray(getRandomInstants(1, false))); - doTestPointFieldNonSearchableRangeQuery("number_p_dt_ni_ns", toStringArray(getRandomInstants(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_dt_ni", toStringArray(getRandomInstants(1, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_dt_ni_ns", toStringArray(getRandomInstants(1, false))); int numValues = 2 * RANDOM_MULTIPLIER; - doTestPointFieldNonSearchableRangeQuery("number_p_dt_ni_ns_mv", toStringArray(getRandomInstants(numValues, false))); + doTestPointFieldNonSearchableRangeQuery( + "number_p_dt_ni_ns_mv", toStringArray(getRandomInstants(numValues, false))); } @Test @@ -1727,49 +2558,65 @@ public void testDatePointFieldSortAndFunction() throws Exception { final List sequential = Arrays.asList(getSequentialStringArrayWithDates(10)); final List randomDates = getRandomInstants(10, false); final List randomDatesMissing = getRandomInstants(10, true); - - for (String r : Arrays.asList("*_p_dt", "*_p_dt_dv", "*_p_dt_dv_ns", "*_p_dt_ni_dv", - "*_p_dt_ni_dv_ns", "*_p_dt_ni_ns_dv")) { + + for (String r : + Arrays.asList( + "*_p_dt", + "*_p_dt_dv", + "*_p_dt_dv_ns", + "*_p_dt_ni_dv", + "*_p_dt_ni_dv_ns", + "*_p_dt_ni_ns_dv")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); doTestPointFieldSort(field, randomDates); doTestDatePointFunctionQuery(field); } - for (String r : Arrays.asList("*_p_dt_smf", "*_p_dt_dv_smf", "*_p_dt_ni_dv_smf", - "*_p_dt_sml", "*_p_dt_dv_sml", "*_p_dt_ni_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_dt_smf", + "*_p_dt_dv_smf", + "*_p_dt_ni_dv_smf", + "*_p_dt_sml", + "*_p_dt_dv_sml", + "*_p_dt_ni_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSort(field, sequential); doTestPointFieldSort(field, randomDatesMissing); doTestDatePointFunctionQuery(field); } - + for (String r : Arrays.asList("*_p_dt_ni", "*_p_dt_ni_ns")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", "1995-12-31T23:59:59Z"); doTestPointFieldFunctionQueryError(field, "w/o docValues", "1995-12-31T23:59:59Z"); } - + // multivalued, no docvalues - for (String r : Arrays.asList("*_p_dt_mv", "*_p_dt_ni_mv", "*_p_dt_ni_ns_mv", - "*_p_dt_mv_smf", "*_p_dt_mv_sml")) { - + for (String r : + Arrays.asList( + "*_p_dt_mv", "*_p_dt_ni_mv", "*_p_dt_ni_ns_mv", "*_p_dt_mv_smf", "*_p_dt_mv_sml")) { + assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); doTestPointFieldSortError(field, "w/o docValues", "1995-12-31T23:59:59Z"); - doTestPointFieldSortError(field, "w/o docValues", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z"); + doTestPointFieldSortError( + field, "w/o docValues", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z"); doTestPointFieldFunctionQueryError(field, "multivalued", "1995-12-31T23:59:59Z"); - doTestPointFieldFunctionQueryError(field, "multivalued", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z"); - + doTestPointFieldFunctionQueryError( + field, "multivalued", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z"); } // multivalued, w/ docValues - for (String r : Arrays.asList("*_p_dt_ni_mv_dv", "*_p_dt_ni_dv_ns_mv", - "*_p_dt_dv_ns_mv", "*_p_dt_mv_dv", - "*_p_dt_mv_dv_smf", "*_p_dt_ni_mv_dv_smf", - "*_p_dt_mv_dv_sml", "*_p_dt_ni_mv_dv_sml")) { + for (String r : + Arrays.asList( + "*_p_dt_ni_mv_dv", "*_p_dt_ni_dv_ns_mv", + "*_p_dt_dv_ns_mv", "*_p_dt_mv_dv", + "*_p_dt_mv_dv_smf", "*_p_dt_ni_mv_dv_smf", + "*_p_dt_mv_dv_sml", "*_p_dt_ni_mv_dv_sml")) { assertTrue(r, regexToTest.remove(r)); String field = r.replace("*", "number"); @@ -1780,24 +2627,27 @@ public void testDatePointFieldSortAndFunction() throws Exception { // value source (w/o field(...,min|max)) usuage should still error... doTestPointFieldFunctionQueryError(field, "multivalued", "1995-12-31T23:59:59Z"); - doTestPointFieldFunctionQueryError(field, "multivalued", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z"); - } + doTestPointFieldFunctionQueryError( + field, "multivalued", "1995-12-31T23:59:59Z", "2000-12-31T23:59:59Z"); + } assertEquals("Missing types in the test", Collections.emptySet(), regexToTest); } @Test public void testDatePointFieldFacetField() throws Exception { - doTestPointFieldFacetField("number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10)); + doTestPointFieldFacetField( + "number_p_dt", "number_p_dt_dv", getSequentialStringArrayWithDates(10)); clearIndex(); assertU(commit()); - doTestPointFieldFacetField("number_p_dt", "number_p_dt_dv", toStringArray(getRandomInstants(10, false))); + doTestPointFieldFacetField( + "number_p_dt", "number_p_dt_dv", toStringArray(getRandomInstants(10, false))); } private static class DateGapCeiling { String calendarUnit = "MILLIS"; long inCalendarUnits; boolean negative = false; - + /** Maximize calendar unit size given initialGapMillis; performs ceiling on each conversion */ DateGapCeiling(long initialGapMillis) { negative = initialGapMillis < 0; @@ -1817,9 +2667,10 @@ private static class DateGapCeiling { if (inCalendarUnits >= 12L) { calendarUnit = "MONTHS"; inCalendarUnits = (inCalendarUnits + 11L) / 12L; - if ((inCalendarUnits * 16) >= 487) { // 487 = 365.25 / 12 * 16 (365.25 days/year, -ish) + // 487 = 365.25 / 12 * 16 (365.25 days/year, -ish) + if ((inCalendarUnits * 16) >= 487) { calendarUnit = "YEARS"; - inCalendarUnits = (16L * inCalendarUnits + 486) / 487L; + inCalendarUnits = (16L * inCalendarUnits + 486) / 487L; } } } @@ -1827,13 +2678,16 @@ private static class DateGapCeiling { } } } + @Override public String toString() { return (negative ? "-" : "+") + inCalendarUnits + calendarUnit; } - public long addTo(long millis) { // Instant.plus() doesn't work with estimated durations (MONTHS and YEARS) - LocalDateTime time = LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.ofHours(0)); + public long addTo(long millis) { + // Instant.plus() doesn't work with estimated durations (MONTHS and YEARS) + LocalDateTime time = + LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.ofHours(0)); if (negative) { time = time.minus(inCalendarUnits, DateMathParser.CALENDAR_UNITS.get(calendarUnit)); } else { @@ -1842,7 +2696,7 @@ public long addTo(long millis) { // Instant.plus() doesn't work with estimated return time.atZone(ZoneOffset.ofHours(0)).toInstant().toEpochMilli(); } } - + @Test public void testDatePointFieldRangeFacet() throws Exception { String docValuesField = "number_p_dt_dv"; @@ -1858,78 +2712,149 @@ public void testDatePointFieldRangeFacet() throws Exception { min = sortedValues.get(0); max = sortedValues.get(sortedValues.size() - 1); } while (max > MAX_DATE_EPOCH_MILLIS || min < MIN_DATE_EPOCH_MILLIS); - long initialGap = BigInteger.valueOf(max).subtract(BigInteger.valueOf(min)) - .divide(BigInteger.valueOf(numBuckets)).longValueExact(); - gap = new DateGapCeiling(BigInteger.valueOf(max + initialGap).subtract(BigInteger.valueOf(min)) // padding for rounding - .divide(BigInteger.valueOf(numBuckets)).longValueExact()); + long initialGap = + BigInteger.valueOf(max) + .subtract(BigInteger.valueOf(min)) + .divide(BigInteger.valueOf(numBuckets)) + .longValueExact(); + gap = + new DateGapCeiling( + BigInteger.valueOf(max + initialGap) + .subtract(BigInteger.valueOf(min)) // padding for rounding + .divide(BigInteger.valueOf(numBuckets)) + .longValueExact()); int[] bucketCount = new int[numBuckets]; int bucketNum = 0; long minBucketVal = min; - // System.err.println("min:" + Instant.ofEpochMilli(min) + " max: " + Instant.ofEpochMilli(max) + " gap: " + gap); - // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + Instant.ofEpochMilli(minBucketVal)); + // System.err.println("min:" + Instant.ofEpochMilli(min) + " max: " + + // Instant.ofEpochMilli(max) + " gap: " + gap); + // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + + // Instant.ofEpochMilli(minBucketVal)); for (long value : sortedValues) { // System.err.println("value: " + Instant.ofEpochMilli(value)); while (value >= gap.addTo(minBucketVal)) { ++bucketNum; minBucketVal = gap.addTo(minBucketVal); - // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + Instant.ofEpochMilli(minBucketVal)); + // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + + // Instant.ofEpochMilli(minBucketVal)); } ++bucketCount[bucketNum]; } - for (int i = 0 ; i < numValues ; i++) { - assertU(adoc("id", String.valueOf(i), docValuesField, Instant.ofEpochMilli(values.get(i)).toString(), - nonDocValuesField, Instant.ofEpochMilli(values.get(i)).toString())); + for (int i = 0; i < numValues; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + docValuesField, + Instant.ofEpochMilli(values.get(i)).toString(), + nonDocValuesField, + Instant.ofEpochMilli(values.get(i)).toString())); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + numValues + "']"; minBucketVal = min; - for (int i = 0 ; i < numBuckets ; ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + Instant.ofEpochMilli(minBucketVal) - + "'][.='" + bucketCount[i] + "']"; + for (int i = 0; i < numBuckets; ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + Instant.ofEpochMilli(minBucketVal) + + "'][.='" + + bucketCount[i] + + "']"; minBucketVal = gap.addTo(minBucketVal); } long maxPlusGap = gap.addTo(max); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", Instant.ofEpochMilli(min).toString(), - "facet.range.end", Instant.ofEpochMilli(maxPlusGap).toString(), - "facet.range.gap", gap.toString()), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + Instant.ofEpochMilli(min).toString(), + "facet.range.end", + Instant.ofEpochMilli(maxPlusGap).toString(), + "facet.range.gap", + gap.toString()), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", Instant.ofEpochMilli(min).toString(), - "facet.range.end", Instant.ofEpochMilli(maxPlusGap).toString(), - "facet.range.gap", gap.toString(), - "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + Instant.ofEpochMilli(min).toString(), + "facet.range.end", + Instant.ofEpochMilli(maxPlusGap).toString(), + "facet.range.gap", + gap.toString(), + "facet.range.method", + "dv"), testStrings); assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); minBucketVal = min; - for (int i = 0 ; i < numBuckets ; ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + Instant.ofEpochMilli(minBucketVal).toString() - + "'][.='" + bucketCount[i] + "']"; + for (int i = 0; i < numBuckets; ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + Instant.ofEpochMilli(minBucketVal).toString() + + "'][.='" + + bucketCount[i] + + "']"; minBucketVal = gap.addTo(minBucketVal); } maxPlusGap = gap.addTo(max); // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", Instant.ofEpochMilli(min).toString(), - "facet.range.end", Instant.ofEpochMilli(maxPlusGap).toString(), - "facet.range.gap", gap.toString(), - "facet.range.method", "filter"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + Instant.ofEpochMilli(min).toString(), + "facet.range.end", + Instant.ofEpochMilli(maxPlusGap).toString(), + "facet.range.gap", + gap.toString(), + "facet.range.method", + "filter"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", Instant.ofEpochMilli(min).toString(), - "facet.range.end", Instant.ofEpochMilli(maxPlusGap).toString(), - "facet.range.gap", gap.toString(), - "facet.range.method", "dv"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + Instant.ofEpochMilli(min).toString(), + "facet.range.end", + Instant.ofEpochMilli(maxPlusGap).toString(), + "facet.range.gap", + gap.toString(), + "facet.range.method", + "dv"), testStrings); } @@ -1953,7 +2878,7 @@ public void testDatePointFieldMultiValuedNonSearchableExactQuery() throws Except doTestPointFieldMultiValuedExactQuery("number_p_dt_ni_mv", dates, false); doTestPointFieldMultiValuedExactQuery("number_p_dt_ni_ns_mv", dates, false); } - + @Test public void testDatePointFieldMultiValuedReturn() throws Exception { String[] dates = toStringArray(getRandomInstants(20, false)); @@ -1964,15 +2889,18 @@ public void testDatePointFieldMultiValuedReturn() throws Exception { @Test public void testDatePointFieldMultiValuedRangeQuery() throws Exception { - String[] dates = toStringArray(getRandomInstants(20, false).stream().sorted().collect(Collectors.toList())); + String[] dates = + toStringArray(getRandomInstants(20, false).stream().sorted().collect(Collectors.toList())); doTestPointFieldMultiValuedRangeQuery("number_p_dt_mv", "date", dates); doTestPointFieldMultiValuedRangeQuery("number_p_dt_ni_mv_dv", "date", dates); } @Test public void testDatePointFieldMultiValuedFacetField() throws Exception { - doTestPointFieldMultiValuedFacetField("number_p_dt_mv", "number_p_dt_mv_dv", getSequentialStringArrayWithDates(20)); - doTestPointFieldMultiValuedFacetField("number_p_dt_mv", "number_p_dt_mv_dv", toStringArray(getRandomInstants(20, false))); + doTestPointFieldMultiValuedFacetField( + "number_p_dt_mv", "number_p_dt_mv_dv", getSequentialStringArrayWithDates(20)); + doTestPointFieldMultiValuedFacetField( + "number_p_dt_mv", "number_p_dt_mv_dv", toStringArray(getRandomInstants(20, false))); } @Test @@ -1982,7 +2910,7 @@ public void testDatePointFieldMultiValuedRangeFacet() throws Exception { assertTrue(dvSchemaField.multiValued()); assertTrue(dvSchemaField.hasDocValues()); assertTrue(dvSchemaField.getType() instanceof PointField); - + String nonDocValuesField = "number_p_dt_mv"; SchemaField nonDvSchemaField = h.getCore().getLatestSchema().getField(nonDocValuesField); assertTrue(nonDvSchemaField.multiValued()); @@ -2000,33 +2928,50 @@ public void testDatePointFieldMultiValuedRangeFacet() throws Exception { min = sortedValues.get(0).val; max = sortedValues.get(sortedValues.size() - 1).val; } while (max > MAX_DATE_EPOCH_MILLIS || min < MIN_DATE_EPOCH_MILLIS); - long initialGap = BigInteger.valueOf(max).subtract(BigInteger.valueOf(min)) - .divide(BigInteger.valueOf(numBuckets)).longValueExact(); - DateGapCeiling gap = new DateGapCeiling(BigInteger.valueOf(max + initialGap).subtract(BigInteger.valueOf(min)) // padding for rounding - .divide(BigInteger.valueOf(numBuckets)).longValueExact()); + long initialGap = + BigInteger.valueOf(max) + .subtract(BigInteger.valueOf(min)) + .divide(BigInteger.valueOf(numBuckets)) + .longValueExact(); + DateGapCeiling gap = + new DateGapCeiling( + BigInteger.valueOf(max + initialGap) + .subtract(BigInteger.valueOf(min)) // padding for rounding + .divide(BigInteger.valueOf(numBuckets)) + .longValueExact()); List> docIdBucket = new ArrayList<>(numBuckets); - for (int i = 0 ; i < numBuckets ; ++i) { + for (int i = 0; i < numBuckets; ++i) { docIdBucket.add(new HashSet<>()); } int bucketNum = 0; long minBucketVal = min; - // System.err.println("min:" + Instant.ofEpochMilli(min) + " max: " + Instant.ofEpochMilli(max) + " gap: " + gap); - // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + Instant.ofEpochMilli(minBucketVal)); + // System.err.println("min:" + Instant.ofEpochMilli(min) + " max: " + + // Instant.ofEpochMilli(max) + " gap: " + gap); + // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + + // Instant.ofEpochMilli(minBucketVal)); for (PosVal value : sortedValues) { // System.err.println("value: " + Instant.ofEpochMilli(value.val)); while (value.val >= gap.addTo(minBucketVal)) { ++bucketNum; minBucketVal = gap.addTo(minBucketVal); - // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + Instant.ofEpochMilli(minBucketVal)); + // System.err.println("bucketNum: " + bucketNum + " minBucketVal: " + + // Instant.ofEpochMilli(minBucketVal)); } - docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values - } - for (int i = 0 ; i < numValues ; i += 2) { - assertU(adoc("id", String.valueOf(i / 2), - docValuesField, Instant.ofEpochMilli(values.get(i)).toString(), - docValuesField, Instant.ofEpochMilli(values.get(i + 1)).toString(), - nonDocValuesField, Instant.ofEpochMilli(values.get(i)).toString(), - nonDocValuesField, Instant.ofEpochMilli(values.get(i + 1)).toString())); + docIdBucket.get(bucketNum).add(value.pos / 2); // each doc gets two consecutive values + } + for (int i = 0; i < numValues; i += 2) { + assertU( + adoc( + "id", + String.valueOf(i / 2), + docValuesField, + Instant.ofEpochMilli(values.get(i)).toString(), + docValuesField, + Instant.ofEpochMilli(values.get(i + 1)).toString(), + nonDocValuesField, + Instant.ofEpochMilli(values.get(i)).toString(), + nonDocValuesField, + Instant.ofEpochMilli(values.get(i + 1)).toString())); } assertU(commit()); @@ -2035,42 +2980,111 @@ public void testDatePointFieldMultiValuedRangeFacet() throws Exception { String[] testStrings = new String[numBuckets + 1]; testStrings[numBuckets] = "//*[@numFound='" + (numValues / 2) + "']"; minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal = gap.addTo(minBucketVal), ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + docValuesField - + "']/lst[@name='counts']/int[@name='" + Instant.ofEpochMilli(minBucketVal) - + "'][.='" + docIdBucket.get(i).size() + "']"; - } - - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", minDate, "facet.range.end", maxDate, - "facet.range.gap", gap.toString(), "indent", "on"), + for (int i = 0; i < numBuckets; minBucketVal = gap.addTo(minBucketVal), ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + docValuesField + + "']/lst[@name='counts']/int[@name='" + + Instant.ofEpochMilli(minBucketVal) + + "'][.='" + + docIdBucket.get(i).size() + + "']"; + } + + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + minDate, + "facet.range.end", + maxDate, + "facet.range.gap", + gap.toString(), + "indent", + "on"), testStrings); - assertQ(req("q", "*:*", "facet", "true", "facet.range", docValuesField, - "facet.range.start", minDate, "facet.range.end", maxDate, - "facet.range.gap", gap.toString(), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + docValuesField, + "facet.range.start", + minDate, + "facet.range.end", + maxDate, + "facet.range.gap", + gap.toString(), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); minBucketVal = min; - for (int i = 0 ; i < numBuckets ; minBucketVal = gap.addTo(minBucketVal), ++i) { - testStrings[i] = "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + nonDocValuesField - + "']/lst[@name='counts']/int[@name='" + Instant.ofEpochMilli(minBucketVal) - + "'][.='" + docIdBucket.get(i).size() + "']"; + for (int i = 0; i < numBuckets; minBucketVal = gap.addTo(minBucketVal), ++i) { + testStrings[i] = + "//lst[@name='facet_counts']/lst[@name='facet_ranges']/lst[@name='" + + nonDocValuesField + + "']/lst[@name='counts']/int[@name='" + + Instant.ofEpochMilli(minBucketVal) + + "'][.='" + + docIdBucket.get(i).size() + + "']"; } // Range Faceting with method = filter should work - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", minDate, "facet.range.end", maxDate, - "facet.range.gap", gap.toString(), "facet.range.method", "filter", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + minDate, + "facet.range.end", + maxDate, + "facet.range.gap", + gap.toString(), + "facet.range.method", + "filter", + "indent", + "on"), testStrings); // this should actually use filter method instead of dv - assertQ(req("q", "*:*", "facet", "true", "facet.range", nonDocValuesField, - "facet.range.start", minDate, "facet.range.end", maxDate, - "facet.range.gap", gap.toString(), "facet.range.method", "dv", "indent", "on"), + assertQ( + req( + "q", + "*:*", + "facet", + "true", + "facet.range", + nonDocValuesField, + "facet.range.start", + minDate, + "facet.range.end", + maxDate, + "facet.range.gap", + gap.toString(), + "facet.range.method", + "dv", + "indent", + "on"), testStrings); } @Test public void testDatePointMultiValuedFunctionQuery() throws Exception { - String[] dates = toStringArray(getRandomInstants(20, false).stream().sorted().collect(Collectors.toList())); + String[] dates = + toStringArray(getRandomInstants(20, false).stream().sorted().collect(Collectors.toList())); doTestPointMultiValuedFunctionQuery("number_p_dt_mv", "number_p_dt_mv_dv", "date", dates); } @@ -2089,8 +3103,11 @@ public void testMultiValuedDatePointFieldsAtomicUpdates() throws Exception { if (!Boolean.getBoolean("enable.update.log")) { return; } - List datesList = getRandomLongs(3, false, MAX_DATE_EPOCH_MILLIS) - .stream().map(Instant::ofEpochMilli).map(Object::toString).collect(Collectors.toList()); + List datesList = + getRandomLongs(3, false, MAX_DATE_EPOCH_MILLIS).stream() + .map(Instant::ofEpochMilli) + .map(Object::toString) + .collect(Collectors.toList()); String[] dates = datesList.toArray(new String[datesList.size()]); doTestMultiValuedPointFieldsAtomicUpdates("number_p_dt_mv", "date", dates); doTestMultiValuedPointFieldsAtomicUpdates("number_p_dt_ni_mv_dv", "date", dates); @@ -2103,71 +3120,100 @@ public void testDatePointSetQuery() throws Exception { doTestSetQueries("number_p_dt_mv", toStringArray(getRandomInstants(20, false)), true); doTestSetQueries("number_p_dt_ni_dv", toStringArray(getRandomInstants(20, false)), false); } - - + @Test public void testDatePointFieldNotIndexed() throws Exception { String[] dates = toStringArray(getRandomInstants(10, false)); doTestFieldNotIndexed("number_p_dt_ni", dates); doTestFieldNotIndexed("number_p_dt_ni_mv", dates); } - + @Test public void testIndexOrDocValuesQuery() throws Exception { - String[] fieldTypeNames = new String[] { "_p_i", "_p_l", "_p_d", "_p_f", "_p_dt" }; - FieldType[] fieldTypes = new FieldType[] - { new IntPointField(), new LongPointField(), new DoublePointField(), new FloatPointField(), new DatePointField() }; - String[] ints = toStringArray(getRandomInts(2, false).stream().sorted().collect(Collectors.toList())); - String[] longs = toStringArray(getRandomLongs(2, false).stream().sorted().collect(Collectors.toList())); - String[] doubles = toStringArray(getRandomDoubles(2, false).stream().sorted().collect(Collectors.toList())); - String[] floats = toStringArray(getRandomFloats(2, false).stream().sorted().collect(Collectors.toList())); - String[] dates = toStringArray(getRandomInstants(2, false).stream().sorted().collect(Collectors.toList())); - String[] min = new String[] { ints[0], longs[0], doubles[0], floats[0], dates[0] }; - String[] max = new String[] { ints[1], longs[1], doubles[1], floats[1], dates[1] }; + String[] fieldTypeNames = new String[] {"_p_i", "_p_l", "_p_d", "_p_f", "_p_dt"}; + FieldType[] fieldTypes = + new FieldType[] { + new IntPointField(), + new LongPointField(), + new DoublePointField(), + new FloatPointField(), + new DatePointField() + }; + String[] ints = + toStringArray(getRandomInts(2, false).stream().sorted().collect(Collectors.toList())); + String[] longs = + toStringArray(getRandomLongs(2, false).stream().sorted().collect(Collectors.toList())); + String[] doubles = + toStringArray(getRandomDoubles(2, false).stream().sorted().collect(Collectors.toList())); + String[] floats = + toStringArray(getRandomFloats(2, false).stream().sorted().collect(Collectors.toList())); + String[] dates = + toStringArray(getRandomInstants(2, false).stream().sorted().collect(Collectors.toList())); + String[] min = new String[] {ints[0], longs[0], doubles[0], floats[0], dates[0]}; + String[] max = new String[] {ints[1], longs[1], doubles[1], floats[1], dates[1]}; assert fieldTypeNames.length == fieldTypes.length && fieldTypeNames.length == max.length && fieldTypeNames.length == min.length; for (int i = 0; i < fieldTypeNames.length; i++) { SchemaField fieldIndexed = h.getCore().getLatestSchema().getField("foo_" + fieldTypeNames[i]); - SchemaField fieldIndexedAndDv = h.getCore().getLatestSchema().getField("foo_" + fieldTypeNames[i] + "_dv"); - SchemaField fieldIndexedMv = h.getCore().getLatestSchema().getField("foo_" + fieldTypeNames[i] + "_mv"); - SchemaField fieldIndexedAndDvMv = h.getCore().getLatestSchema().getField("foo_" + fieldTypeNames[i] + "_mv_dv"); - assertTrue(fieldTypes[i].getRangeQuery(null, fieldIndexed, min[i], max[i], true, true) instanceof PointRangeQuery); - assertTrue(fieldTypes[i].getRangeQuery(null, fieldIndexedAndDv, min[i], max[i], true, true) instanceof IndexOrDocValuesQuery); - assertTrue(fieldTypes[i].getRangeQuery(null, fieldIndexedMv, min[i], max[i], true, true) instanceof PointRangeQuery); - assertTrue(fieldTypes[i].getRangeQuery(null, fieldIndexedAndDvMv, min[i], max[i], true, true) instanceof IndexOrDocValuesQuery); - assertTrue(fieldTypes[i].getFieldQuery(null, fieldIndexed, min[i]) instanceof PointRangeQuery); - assertTrue(fieldTypes[i].getFieldQuery(null, fieldIndexedAndDv, min[i]) instanceof IndexOrDocValuesQuery); - assertTrue(fieldTypes[i].getFieldQuery(null, fieldIndexedMv, min[i]) instanceof PointRangeQuery); - assertTrue(fieldTypes[i].getFieldQuery(null, fieldIndexedAndDvMv, min[i]) instanceof IndexOrDocValuesQuery); - } - } - + SchemaField fieldIndexedAndDv = + h.getCore().getLatestSchema().getField("foo_" + fieldTypeNames[i] + "_dv"); + SchemaField fieldIndexedMv = + h.getCore().getLatestSchema().getField("foo_" + fieldTypeNames[i] + "_mv"); + SchemaField fieldIndexedAndDvMv = + h.getCore().getLatestSchema().getField("foo_" + fieldTypeNames[i] + "_mv_dv"); + assertTrue( + fieldTypes[i].getRangeQuery(null, fieldIndexed, min[i], max[i], true, true) + instanceof PointRangeQuery); + assertTrue( + fieldTypes[i].getRangeQuery(null, fieldIndexedAndDv, min[i], max[i], true, true) + instanceof IndexOrDocValuesQuery); + assertTrue( + fieldTypes[i].getRangeQuery(null, fieldIndexedMv, min[i], max[i], true, true) + instanceof PointRangeQuery); + assertTrue( + fieldTypes[i].getRangeQuery(null, fieldIndexedAndDvMv, min[i], max[i], true, true) + instanceof IndexOrDocValuesQuery); + assertTrue( + fieldTypes[i].getFieldQuery(null, fieldIndexed, min[i]) instanceof PointRangeQuery); + assertTrue( + fieldTypes[i].getFieldQuery(null, fieldIndexedAndDv, min[i]) + instanceof IndexOrDocValuesQuery); + assertTrue( + fieldTypes[i].getFieldQuery(null, fieldIndexedMv, min[i]) instanceof PointRangeQuery); + assertTrue( + fieldTypes[i].getFieldQuery(null, fieldIndexedAndDvMv, min[i]) + instanceof IndexOrDocValuesQuery); + } + } + public void testInternals() throws IOException { - String[] types = new String[]{"i", "l", "f", "d", "dt"}; - String[][] values = new String[][] { - toStringArray(getRandomInts(10, false)), - toStringArray(getRandomLongs(10, false)), - toStringArray(getRandomFloats(10, false)), - toStringArray(getRandomDoubles(10, false)), - toStringArray(getRandomInstants(10, false)) - }; + String[] types = new String[] {"i", "l", "f", "d", "dt"}; + String[][] values = + new String[][] { + toStringArray(getRandomInts(10, false)), + toStringArray(getRandomLongs(10, false)), + toStringArray(getRandomFloats(10, false)), + toStringArray(getRandomDoubles(10, false)), + toStringArray(getRandomInstants(10, false)) + }; assertEquals(types.length, values.length); Set typesTested = new HashSet<>(); - for (int i = 0 ; i < types.length ; ++i) { - for (String suffix:FIELD_SUFFIXES) { + for (int i = 0; i < types.length; ++i) { + for (String suffix : FIELD_SUFFIXES) { doTestInternals("number_p_" + types[i] + suffix, values[i]); typesTested.add("*_p_" + types[i] + suffix); } } - assertEquals("Missing types in the test", dynFieldRegexesForType(PointField.class), typesTested); + assertEquals( + "Missing types in the test", dynFieldRegexesForType(PointField.class), typesTested); } - + // Helper methods /** - * Given a FieldType, return the list of DynamicField 'regexes' for all declared - * DynamicFields that use that FieldType. + * Given a FieldType, return the list of DynamicField 'regexes' for all declared DynamicFields + * that use that FieldType. * * @see IndexSchema#getDynamicFields * @see DynamicField#getRegex @@ -2181,11 +3227,11 @@ private static SortedSet dynFieldRegexesForType(final Class List getRandomList(int length, boolean missingVals, Supplier randomVal) { List list = new ArrayList<>(length); - for (int i = 0 ; i < length ; ++i) { - T val = null; + for (int i = 0; i < length; ++i) { + T val = null; // Sometimes leave val as null when we're producing missing values if (missingVals == false || usually()) { val = randomVal.get(); @@ -2196,28 +3242,36 @@ private List getRandomList(int length, boolean missingVals, Supplier r } private List getRandomDoubles(int length, boolean missingVals) { - return getRandomList(length, missingVals, () -> { - Double d = Double.NaN; - while (d.isNaN()) { - d = Double.longBitsToDouble(random().nextLong()); - } - return d; - }); + return getRandomList( + length, + missingVals, + () -> { + Double d = Double.NaN; + while (d.isNaN()) { + d = Double.longBitsToDouble(random().nextLong()); + } + return d; + }); } private List getRandomFloats(int length, boolean missingVals) { - return getRandomList(length, missingVals, () -> { - Float f = Float.NaN; - while (f.isNaN()) { - f = Float.intBitsToFloat(random().nextInt()); - } - return f; - }); + return getRandomList( + length, + missingVals, + () -> { + Float f = Float.NaN; + while (f.isNaN()) { + f = Float.intBitsToFloat(random().nextInt()); + } + return f; + }); } private List getRandomInts(int length, boolean missingVals, int boundPosNeg) { assert boundPosNeg > 0L; - return getRandomList(length, missingVals, + return getRandomList( + length, + missingVals, () -> (random().nextBoolean() ? 1 : -1) * random().nextInt(boundPosNeg)); } @@ -2227,7 +3281,9 @@ private List getRandomInts(int length, boolean missingVals) { private List getRandomLongs(int length, boolean missingVals, long boundPosNeg) { assert boundPosNeg > 0L; - return getRandomList(length, missingVals, + return getRandomList( + length, + missingVals, () -> random().nextLong() % boundPosNeg); // see Random.nextInt(int bound) } @@ -2238,7 +3294,7 @@ private List getRandomLongs(int length, boolean missingVals) { private List getRandomInstants(int length, boolean missingVals) { return getRandomList(length, missingVals, () -> Instant.ofEpochMilli(random().nextLong())); } - + private String[] getSequentialStringArrayWithInts(int length) { String[] arr = new String[length]; for (int i = 0; i < length; i++) { @@ -2255,7 +3311,7 @@ private String[] getSequentialStringArrayWithDates(int length) { } return arr; } - + private String[] getSequentialStringArrayWithDoubles(int length) { String[] arr = new String[length]; for (int i = 0; i < length; i++) { @@ -2270,50 +3326,68 @@ private void doTestFieldNotIndexed(String field, String[] values) throws IOExcep SchemaField sf = h.getCore().getLatestSchema().getField(field); assertFalse("Field should be indexed=false", sf.indexed()); assertFalse("Field should be docValues=false", sf.hasDocValues()); - - for (int i=0; i < 10; i++) { + + for (int i = 0; i < 10; i++) { assertU(adoc("id", String.valueOf(i), field, values[i])); } assertU(commit()); assertQ(req("q", "*:*"), "//*[@numFound='10']"); - assertQ("Can't search on index=false docValues=false field", req("q", field + ":[* TO *]"), "//*[@numFound='0']"); - h.getCore().withSearcher(searcher -> { - IndexReader ir = searcher.getIndexReader(); - assertEquals("Field " + field + " should have no point values", 0, PointValues.size(ir, field)); - return null; - }); - } - - - private void doTestIntPointFieldExactQuery(final String field, final boolean testLong) throws Exception { + assertQ( + "Can't search on index=false docValues=false field", + req("q", field + ":[* TO *]"), + "//*[@numFound='0']"); + h.getCore() + .withSearcher( + searcher -> { + IndexReader ir = searcher.getIndexReader(); + assertEquals( + "Field " + field + " should have no point values", + 0, + PointValues.size(ir, field)); + return null; + }); + } + + private void doTestIntPointFieldExactQuery(final String field, final boolean testLong) + throws Exception { doTestIntPointFieldExactQuery(field, testLong, true); } private String getTestString(boolean searchable, int numFound) { return "//*[@numFound='" + (searchable ? Integer.toString(numFound) : "0") + "']"; } - + /** * @param field the field to use for indexing and searching against - * @param testLong set to true if "field" is expected to support long values, false if only integers - * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0 + * @param testLong set to true if "field" is expected to support long values, false if only + * integers + * @param searchable set to true if searches against "field" should succeed, false if field is + * only stored and searches should always get numFound=0 */ - private void doTestIntPointFieldExactQuery(final String field, final boolean testLong, final boolean searchable) throws Exception { + private void doTestIntPointFieldExactQuery( + final String field, final boolean testLong, final boolean searchable) throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; - Map randCount = new HashMap<>(numValues); - String[] rand = testLong ? toStringArray(getRandomLongs(numValues, false)) - : toStringArray(getRandomInts(numValues, false)); - for (int i = 0 ; i < numValues ; i++) { + Map randCount = new HashMap<>(numValues); + String[] rand = + testLong + ? toStringArray(getRandomLongs(numValues, false)) + : toStringArray(getRandomInts(numValues, false)); + for (int i = 0; i < numValues; i++) { randCount.merge(rand[i], 1, (a, b) -> a + b); // count unique values assertU(adoc("id", String.valueOf(i), field, rand[i])); } assertU(commit()); - for (int i = 0 ; i < numValues ; i++) { - assertQ(req("q", field + ":" + (rand[i].startsWith("-") ? "\\" : "") + rand[i], - "fl", "id," + field), getTestString(searchable, randCount.get(rand[i]))); + for (int i = 0; i < numValues; i++) { + assertQ( + req( + "q", + field + ":" + (rand[i].startsWith("-") ? "\\" : "") + rand[i], + "fl", + "id," + field), + getTestString(searchable, randCount.get(rand[i]))); } - + StringBuilder builder = new StringBuilder(); for (String value : randCount.keySet()) { if (builder.length() != 0) { @@ -2324,42 +3398,58 @@ private void doTestIntPointFieldExactQuery(final String field, final boolean tes } builder.append(value); } - assertQ(req("debug", "true", "q", field + ":(" + builder.toString() + ")"), getTestString(searchable, numValues)); - - assertU(adoc("id", String.valueOf(Integer.MAX_VALUE), field, String.valueOf(Integer.MAX_VALUE))); + assertQ( + req("debug", "true", "q", field + ":(" + builder.toString() + ")"), + getTestString(searchable, numValues)); + + assertU( + adoc("id", String.valueOf(Integer.MAX_VALUE), field, String.valueOf(Integer.MAX_VALUE))); assertU(commit()); - assertQ(req("q", field + ":"+Integer.MAX_VALUE, "fl", "id, " + field), getTestString(searchable, 1)); - + assertQ( + req("q", field + ":" + Integer.MAX_VALUE, "fl", "id, " + field), + getTestString(searchable, 1)); + clearIndex(); assertU(commit()); } private void doTestPointFieldReturn(String field, String type, String[] values) throws Exception { SchemaField sf = h.getCore().getLatestSchema().getField(field); - assert sf.stored() || (sf.hasDocValues() && sf.useDocValuesAsStored()): - "Unexpected field definition for " + field; - for (int i=0; i < values.length; i++) { + assert sf.stored() || (sf.hasDocValues() && sf.useDocValuesAsStored()) + : "Unexpected field definition for " + field; + for (int i = 0; i < values.length; i++) { assertU(adoc("id", String.valueOf(i), field, values[i])); } // Check using RTG if (Boolean.getBoolean("enable.update.log")) { for (int i = 0; i < values.length; i++) { - assertQ(req("qt", "/get", "id", String.valueOf(i)), + assertQ( + req("qt", "/get", "id", String.valueOf(i)), "//doc/" + type + "[@name='" + field + "'][.='" + values[i] + "']"); } } assertU(commit()); String[] expected = new String[values.length + 1]; - expected[0] = "//*[@numFound='" + values.length + "']"; + expected[0] = "//*[@numFound='" + values.length + "']"; for (int i = 0; i < values.length; i++) { - expected[i + 1] = "//result/doc[str[@name='id']='" + i + "']/" + type + "[@name='" + field + "'][.='" + values[i] + "']"; + expected[i + 1] = + "//result/doc[str[@name='id']='" + + i + + "']/" + + type + + "[@name='" + + field + + "'][.='" + + values[i] + + "']"; } assertQ(req("q", "*:*", "fl", "id, " + field, "rows", String.valueOf(values.length)), expected); // Check using RTG if (Boolean.getBoolean("enable.update.log")) { for (int i = 0; i < values.length; i++) { - assertQ(req("qt", "/get", "id", String.valueOf(i)), + assertQ( + req("qt", "/get", "id", String.valueOf(i)), "//doc/" + type + "[@name='" + field + "'][.='" + values[i] + "']"); } } @@ -2367,7 +3457,8 @@ private void doTestPointFieldReturn(String field, String type, String[] values) assertU(commit()); } - private void doTestPointFieldNonSearchableRangeQuery(String fieldName, String... values) throws Exception { + private void doTestPointFieldNonSearchableRangeQuery(String fieldName, String... values) + throws Exception { for (int i = 9; i >= 0; i--) { SolrInputDocument doc = sdoc("id", String.valueOf(i)); for (String value : values) { @@ -2376,81 +3467,104 @@ private void doTestPointFieldNonSearchableRangeQuery(String fieldName, String... assertU(adoc(doc)); } assertU(commit()); - assertQ(req("q", fieldName + ":[* TO *]", "fl", "id, " + fieldName, "sort", "id asc"), - "//*[@numFound='0']"); + assertQ( + req("q", fieldName + ":[* TO *]", "fl", "id, " + fieldName, "sort", "id asc"), + "//*[@numFound='0']"); } - private void doTestIntPointFieldRangeQuery(String fieldName, String type, boolean testLong) throws Exception { + private void doTestIntPointFieldRangeQuery(String fieldName, String type, boolean testLong) + throws Exception { for (int i = 9; i >= 0; i--) { assertU(adoc("id", String.valueOf(i), fieldName, String.valueOf(i))); } assertU(commit()); - assertQ(req("q", fieldName + ":[0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req("q", fieldName + ":[0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='4']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='2']", "//result/doc[4]/" + type + "[@name='" + fieldName + "'][.='3']"); - - assertQ(req("q", fieldName + ":{0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":{0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='2']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='3']"); - - assertQ(req("q", fieldName + ":[0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":[0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='2']"); - - assertQ(req("q", fieldName + ":{0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":{0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='2']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='2']"); - - assertQ(req("q", fieldName + ":{0 TO *}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":{0 TO *}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='9']", "0=count(//result/doc/" + type + "[@name='" + fieldName + "'][.='0'])", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1']"); - - assertQ(req("q", fieldName + ":{* TO 3}", "fl", "id, " + fieldName, "sort", "id desc"), + + assertQ( + req("q", fieldName + ":{* TO 3}", "fl", "id, " + fieldName, "sort", "id desc"), "//*[@numFound='3']", "0=count(//result/doc/" + type + "[@name='" + fieldName + "'][.='3'])", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='2']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='0']"); - - assertQ(req("q", fieldName + ":[* TO 3}", "fl", "id, " + fieldName, "sort", "id desc"), + + assertQ( + req("q", fieldName + ":[* TO 3}", "fl", "id, " + fieldName, "sort", "id desc"), "//*[@numFound='3']", "0=count(//result/doc/" + type + "[@name='" + fieldName + "'][.='3'])", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='2']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='0']"); - - assertQ(req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='10']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0']", "//result/doc[10]/" + type + "[@name='" + fieldName + "'][.='9']"); - - assertQ(req("q", fieldName + ":[0 TO 1] OR " + fieldName + ":[8 TO 9]" , "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + fieldName + ":[0 TO 1] OR " + fieldName + ":[8 TO 9]", + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='4']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='8']", "//result/doc[4]/" + type + "[@name='" + fieldName + "'][.='9']"); - - assertQ(req("q", fieldName + ":[0 TO 1] AND " + fieldName + ":[1 TO 2]" , "fl", "id, " + fieldName), + + assertQ( + req("q", fieldName + ":[0 TO 1] AND " + fieldName + ":[1 TO 2]", "fl", "id, " + fieldName), "//*[@numFound='1']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1']"); - - assertQ(req("q", fieldName + ":[0 TO 1] AND NOT " + fieldName + ":[1 TO 2]" , "fl", "id, " + fieldName), + + assertQ( + req( + "q", + fieldName + ":[0 TO 1] AND NOT " + fieldName + ":[1 TO 2]", + "fl", + "id, " + fieldName), "//*[@numFound='1']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0']"); clearIndex(); assertU(commit()); - + String[] arr; if (testLong) { arr = toAscendingStringArray(getRandomLongs(100, false), true); @@ -2462,92 +3576,233 @@ private void doTestIntPointFieldRangeQuery(String fieldName, String type, boolea } assertU(commit()); for (int i = 0; i < arr.length; i++) { - assertQ(req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "]", "fl", "id, " + fieldName), + assertQ( + req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "]", "fl", "id, " + fieldName), "//*[@numFound='" + (i + 1) + "']"); - assertQ(req("q", fieldName + ":{" + arr[0] + " TO " + arr[i] + "}", "fl", "id, " + fieldName), - "//*[@numFound='" + (Math.max(0, i-1)) + "']"); - assertQ(req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "] AND " + fieldName + ":" + arr[0].replace("-", "\\-"), "fl", "id, " + fieldName), + assertQ( + req("q", fieldName + ":{" + arr[0] + " TO " + arr[i] + "}", "fl", "id, " + fieldName), + "//*[@numFound='" + (Math.max(0, i - 1)) + "']"); + assertQ( + req( + "q", + fieldName + + ":[" + + arr[0] + + " TO " + + arr[i] + + "] AND " + + fieldName + + ":" + + arr[0].replace("-", "\\-"), + "fl", + "id, " + fieldName), "//*[@numFound='1']"); } if (testLong) { - assertQ(req("q", fieldName + ":[" + Long.MIN_VALUE + " TO " + Long.MIN_VALUE + "}", "fl", "id, " + fieldName), + assertQ( + req( + "q", + fieldName + ":[" + Long.MIN_VALUE + " TO " + Long.MIN_VALUE + "}", + "fl", + "id, " + fieldName), "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":{" + Long.MAX_VALUE + " TO " + Long.MAX_VALUE + "]", "fl", "id, " + fieldName), + assertQ( + req( + "q", + fieldName + ":{" + Long.MAX_VALUE + " TO " + Long.MAX_VALUE + "]", + "fl", + "id, " + fieldName), "//*[@numFound='0']"); } else { - assertQ(req("q", fieldName + ":[" + Integer.MIN_VALUE + " TO " + Integer.MIN_VALUE + "}", "fl", "id, " + fieldName), + assertQ( + req( + "q", + fieldName + ":[" + Integer.MIN_VALUE + " TO " + Integer.MIN_VALUE + "}", + "fl", + "id, " + fieldName), "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":{" + Integer.MAX_VALUE + " TO " + Integer.MAX_VALUE + "]", "fl", "id, " + fieldName), + assertQ( + req( + "q", + fieldName + ":{" + Integer.MAX_VALUE + " TO " + Integer.MAX_VALUE + "]", + "fl", + "id, " + fieldName), "//*[@numFound='0']"); } } - - private void doTestPointFieldFacetField(String nonDocValuesField, String docValuesField, String[] numbers) throws Exception { + + private void doTestPointFieldFacetField( + String nonDocValuesField, String docValuesField, String[] numbers) throws Exception { assert numbers != null && numbers.length == 10; - + assertFalse(h.getCore().getLatestSchema().getField(docValuesField).multiValued()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); - + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + for (int i = 0; i < 10; i++) { - assertU(adoc("id", String.valueOf(i), docValuesField, numbers[i], nonDocValuesField, numbers[i])); + assertU( + adoc("id", String.valueOf(i), docValuesField, numbers[i], nonDocValuesField, numbers[i])); } assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + docValuesField, "facet", "true", "facet.field", docValuesField), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + docValuesField, + "facet", + "true", + "facet.field", + docValuesField), "//*[@numFound='10']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + docValuesField +"']/int[@name='" + numbers[1] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + docValuesField +"']/int[@name='" + numbers[2] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + docValuesField +"']/int[@name='" + numbers[3] + "'][.='1']"); - + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + docValuesField + + "']/int[@name='" + + numbers[1] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + docValuesField + + "']/int[@name='" + + numbers[2] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + docValuesField + + "']/int[@name='" + + numbers[3] + + "'][.='1']"); + assertU(adoc("id", "10", docValuesField, numbers[1], nonDocValuesField, numbers[1])); - + assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + docValuesField, "facet", "true", "facet.field", docValuesField), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + docValuesField, + "facet", + "true", + "facet.field", + docValuesField), "//*[@numFound='11']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + docValuesField +"']/int[@name='" + numbers[1] + "'][.='2']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + docValuesField +"']/int[@name='" + numbers[2] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + docValuesField +"']/int[@name='" + numbers[3] + "'][.='1']"); - + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + docValuesField + + "']/int[@name='" + + numbers[1] + + "'][.='2']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + docValuesField + + "']/int[@name='" + + numbers[2] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + docValuesField + + "']/int[@name='" + + numbers[3] + + "'][.='1']"); + assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); - assertQEx("Expecting Exception", - "Can't facet on a PointField without docValues", - req("q", "*:*", "fl", "id, " + nonDocValuesField, "facet", "true", "facet.field", nonDocValuesField), + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertQEx( + "Expecting Exception", + "Can't facet on a PointField without docValues", + req( + "q", + "*:*", + "fl", + "id, " + nonDocValuesField, + "facet", + "true", + "facet.field", + nonDocValuesField), SolrException.ErrorCode.BAD_REQUEST); } - + private void doTestIntPointFunctionQuery(String field) throws Exception { assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField); int numVals = 10 * RANDOM_MULTIPLIER; List values = getRandomInts(numVals, false); - String assertNumFound = "//*[@numFound='" + numVals + "']"; + String assertNumFound = "//*[@numFound='" + numVals + "']"; String[] idAscXpathChecks = new String[numVals + 1]; String[] idAscNegXpathChecks = new String[numVals + 1]; idAscXpathChecks[0] = assertNumFound; idAscNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < values.size() ; ++i) { - assertU(adoc("id", Character.valueOf((char)('A' + i)).toString(), field, String.valueOf(values.get(i)))); + for (int i = 0; i < values.size(); ++i) { + assertU( + adoc( + "id", + Character.valueOf((char) ('A' + i)).toString(), + field, + String.valueOf(values.get(i)))); // reminder: xpath array indexes start at 1 - idAscXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/int[@name='field(" + field + ")'][.='" + values.get(i) + "']"; - idAscNegXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/float[@name='product(-1," + field + ")'][.='" - + (-1.0f * (float)values.get(i)) + "']"; + idAscXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/int[@name='field(" + + field + + ")'][.='" + + values.get(i) + + "']"; + idAscNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/float[@name='product(-1," + + field + + ")'][.='" + + (-1.0f * (float) values.get(i)) + + "']"; } assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", field(" + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscXpathChecks); - assertQ(req("q", "*:*", "fl", "id, " + field + ", product(-1," + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", product(-1," + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscNegXpathChecks); - List> ascNegPosVals - = toAscendingPosVals(values.stream().map(v -> -v).collect(Collectors.toList()), true); + List> ascNegPosVals = + toAscendingPosVals(values.stream().map(v -> -v).collect(Collectors.toList()), true); String[] ascNegXpathChecks = new String[numVals + 1]; ascNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < ascNegPosVals.size() ; ++i) { + for (int i = 0; i < ascNegPosVals.size(); ++i) { PosVal posVal = ascNegPosVals.get(i); - ascNegXpathChecks[i + 1] - = "//result/doc[" + (1 + i) + "]/int[@name='" + field + "'][.='" + values.get(posVal.pos) + "']"; - } - assertQ(req("q", "*:*", "fl", "id, " + field, "rows", String.valueOf(numVals), "sort", "product(-1," + field + ") asc"), + ascNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/int[@name='" + + field + + "'][.='" + + values.get(posVal.pos) + + "']"; + } + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field, + "rows", + String.valueOf(numVals), + "sort", + "product(-1," + field + ") asc"), ascNegXpathChecks); clearIndex(); @@ -2563,73 +3818,136 @@ private void doTestLongPointFunctionQuery(String field) throws Exception { String[] idAscNegXpathChecks = new String[numVals + 1]; idAscXpathChecks[0] = assertNumFound; idAscNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < values.size() ; ++i) { - assertU(adoc("id", Character.valueOf((char)('A' + i)).toString(), field, String.valueOf(values.get(i)))); + for (int i = 0; i < values.size(); ++i) { + assertU( + adoc( + "id", + Character.valueOf((char) ('A' + i)).toString(), + field, + String.valueOf(values.get(i)))); // reminder: xpath array indexes start at 1 - idAscXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/long[@name='field(" + field + ")'][.='" + values.get(i) + "']"; - idAscNegXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/float[@name='product(-1," + field + ")'][.='" - + (-1.0f * (float)values.get(i)) + "']"; + idAscXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/long[@name='field(" + + field + + ")'][.='" + + values.get(i) + + "']"; + idAscNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/float[@name='product(-1," + + field + + ")'][.='" + + (-1.0f * (float) values.get(i)) + + "']"; } assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", field(" + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscXpathChecks); - assertQ(req("q", "*:*", "fl", "id, " + field + ", product(-1," + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", product(-1," + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscNegXpathChecks); - List> ascNegPosVals - = toAscendingPosVals(values.stream().map(v -> -v).collect(Collectors.toList()), true); + List> ascNegPosVals = + toAscendingPosVals(values.stream().map(v -> -v).collect(Collectors.toList()), true); String[] ascNegXpathChecks = new String[numVals + 1]; ascNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < ascNegPosVals.size() ; ++i) { + for (int i = 0; i < ascNegPosVals.size(); ++i) { PosVal posVal = ascNegPosVals.get(i); - ascNegXpathChecks[i + 1] - = "//result/doc[" + (1 + i) + "]/long[@name='" + field + "'][.='" + values.get(posVal.pos) + "']"; - } - assertQ(req("q", "*:*", "fl", "id, " + field, "rows", String.valueOf(numVals), "sort", "product(-1," + field + ") asc"), + ascNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/long[@name='" + + field + + "'][.='" + + values.get(posVal.pos) + + "']"; + } + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field, + "rows", + String.valueOf(numVals), + "sort", + "product(-1," + field + ") asc"), ascNegXpathChecks); clearIndex(); assertU(commit()); } - /** - * Checks that the specified field can not be used as a value source, even if there are documents + /** + * Checks that the specified field can not be used as a value source, even if there are documents * with (all) the specified values in the index. * * @param field the field name to try and sort on * @param errSubStr substring to look for in the error msg - * @param values one or more values to put into the doc(s) in the index - may be more then one for multivalued fields + * @param values one or more values to put into the doc(s) in the index - may be more then one for + * multivalued fields */ - private void doTestPointFieldFunctionQueryError(String field, String errSubStr, String...values) throws Exception { + private void doTestPointFieldFunctionQueryError(String field, String errSubStr, String... values) + throws Exception { final int numDocs = atLeast(random(), 10); for (int i = 0; i < numDocs; i++) { SolrInputDocument doc = sdoc("id", String.valueOf(i)); - for (String v: values) { + for (String v : values) { doc.addField(field, v); } assertU(adoc(doc)); } - assertQEx("Should not be able to use field in function: " + field, errSubStr, - req("q", "*:*", "fl", "id", "fq", "{!frange l=0 h=100}product(-1, " + field + ")"), - SolrException.ErrorCode.BAD_REQUEST); - + assertQEx( + "Should not be able to use field in function: " + field, + errSubStr, + req("q", "*:*", "fl", "id", "fq", "{!frange l=0 h=100}product(-1, " + field + ")"), + SolrException.ErrorCode.BAD_REQUEST); + clearIndex(); assertU(commit()); - + // empty index should (also) give same error - assertQEx("Should not be able to use field in function: " + field, errSubStr, - req("q", "*:*", "fl", "id", "fq", "{!frange l=0 h=100}product(-1, " + field + ")"), - SolrException.ErrorCode.BAD_REQUEST); - - } - - - private void doTestPointStats(String field, String dvField, String[] numbers, double min, double max, int count, int missing, double delta) { - String minMin = String.valueOf(min - Math.abs(delta*min)); - String maxMin = String.valueOf(min + Math.abs(delta*min)); - String minMax = String.valueOf(max - Math.abs(delta*max)); - String maxMax = String.valueOf(max + Math.abs(delta*max)); + assertQEx( + "Should not be able to use field in function: " + field, + errSubStr, + req("q", "*:*", "fl", "id", "fq", "{!frange l=0 h=100}product(-1, " + field + ")"), + SolrException.ErrorCode.BAD_REQUEST); + } + + private void doTestPointStats( + String field, + String dvField, + String[] numbers, + double min, + double max, + int count, + int missing, + double delta) { + String minMin = String.valueOf(min - Math.abs(delta * min)); + String maxMin = String.valueOf(min + Math.abs(delta * min)); + String minMax = String.valueOf(max - Math.abs(delta * max)); + String maxMax = String.valueOf(max + Math.abs(delta * max)); for (int i = 0; i < numbers.length; i++) { assertU(adoc("id", String.valueOf(i), dvField, numbers[i], field, numbers[i])); } @@ -2637,81 +3955,128 @@ private void doTestPointStats(String field, String dvField, String[] numbers, do assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(dvField).hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField(dvField).getType() instanceof PointField); - assertQ(req("q", "*:*", "fl", "id, " + dvField, "stats", "true", "stats.field", dvField), + assertQ( + req("q", "*:*", "fl", "id, " + dvField, "stats", "true", "stats.field", dvField), "//*[@numFound='" + (numbers.length + 1) + "']", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/double[@name='min'][.>=" + minMin + "]", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/double[@name='min'][.<=" + maxMin+ "]", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/double[@name='max'][.>=" + minMax + "]", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/double[@name='max'][.<=" + maxMax + "]", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/long[@name='count'][.='" + count + "']", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/long[@name='missing'][.='" + missing + "']"); - + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/double[@name='min'][.>=" + + minMin + + "]", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/double[@name='min'][.<=" + + maxMin + + "]", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/double[@name='max'][.>=" + + minMax + + "]", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/double[@name='max'][.<=" + + maxMax + + "]", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/long[@name='count'][.='" + + count + + "']", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/long[@name='missing'][.='" + + missing + + "']"); + assertFalse(h.getCore().getLatestSchema().getField(field).hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField); - assertQEx("Expecting Exception", - "Can't calculate stats on a PointField without docValues", - req("q", "*:*", "fl", "id, " + field, "stats", "true", "stats.field", field), + assertQEx( + "Expecting Exception", + "Can't calculate stats on a PointField without docValues", + req("q", "*:*", "fl", "id, " + field, "stats", "true", "stats.field", field), SolrException.ErrorCode.BAD_REQUEST); } - - private void doTestPointFieldMultiValuedExactQuery(final String fieldName, final String[] numbers) throws Exception { + private void doTestPointFieldMultiValuedExactQuery(final String fieldName, final String[] numbers) + throws Exception { doTestPointFieldMultiValuedExactQuery(fieldName, numbers, true); } /** * @param fieldName the field to use for indexing and searching against * @param numbers list of 20 values to index in 10 docs (pairwise) - * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0 + * @param searchable set to true if searches against "field" should succeed, false if field is + * only stored and searches should always get numFound=0 */ - private void doTestPointFieldMultiValuedExactQuery(final String fieldName, final String[] numbers, - final boolean searchable) throws Exception { - + private void doTestPointFieldMultiValuedExactQuery( + final String fieldName, final String[] numbers, final boolean searchable) throws Exception { + final String MATCH_ONE = "//*[@numFound='" + (searchable ? "1" : "0") + "']"; final String MATCH_TWO = "//*[@numFound='" + (searchable ? "2" : "0") + "']"; - + assert numbers != null && numbers.length == 20; assertTrue(h.getCore().getLatestSchema().getField(fieldName).multiValued()); assertTrue(h.getCore().getLatestSchema().getField(fieldName).getType() instanceof PointField); - for (int i=0; i < 10; i++) { - assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i+10])); + for (int i = 0; i < 10; i++) { + assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i + 10])); } assertU(commit()); FieldType type = h.getCore().getLatestSchema().getField(fieldName).getType(); for (int i = 0; i < 20; i++) { if (type instanceof DatePointField) { - assertQ(req("q", fieldName + ":\"" + numbers[i] + "\""), - MATCH_ONE); + assertQ(req("q", fieldName + ":\"" + numbers[i] + "\""), MATCH_ONE); } else { - assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-")), - MATCH_ONE); + assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-")), MATCH_ONE); } } - + for (int i = 0; i < 20; i++) { if (type instanceof DatePointField) { - assertQ(req("q", fieldName + ":\"" + numbers[i] + "\"" + " OR " + fieldName + ":\"" + numbers[(i+1)%10]+"\""), - MATCH_TWO); + assertQ( + req( + "q", + fieldName + + ":\"" + + numbers[i] + + "\"" + + " OR " + + fieldName + + ":\"" + + numbers[(i + 1) % 10] + + "\""), + MATCH_TWO); } else { - assertQ(req("q", fieldName + ":" + numbers[i].replace("-", "\\-") + " OR " + fieldName + ":" + numbers[(i+1)%10].replace("-", "\\-")), - MATCH_TWO); + assertQ( + req( + "q", + fieldName + + ":" + + numbers[i].replace("-", "\\-") + + " OR " + + fieldName + + ":" + + numbers[(i + 1) % 10].replace("-", "\\-")), + MATCH_TWO); } } } - - private void doTestPointFieldMultiValuedReturn(String fieldName, String type, String[] numbers) throws Exception { + + private void doTestPointFieldMultiValuedReturn(String fieldName, String type, String[] numbers) + throws Exception { assert numbers != null && numbers.length == 20; assertTrue(h.getCore().getLatestSchema().getField(fieldName).multiValued()); assertTrue(h.getCore().getLatestSchema().getField(fieldName).getType() instanceof PointField); - for (int i=9; i >= 0; i--) { - assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i+10])); + for (int i = 9; i >= 0; i--) { + assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i + 10])); } // Check using RTG before commit if (Boolean.getBoolean("enable.update.log")) { for (int i = 0; i < 10; i++) { - assertQ(req("qt", "/get", "id", String.valueOf(i)), + assertQ( + req("qt", "/get", "id", String.valueOf(i)), "//doc/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i] + "']", - "//doc/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i+10] + "']", + "//doc/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i + 10] + "']", "count(//doc/arr[@name='" + fieldName + "']/" + type + ")=2"); } } @@ -2719,33 +4084,60 @@ private void doTestPointFieldMultiValuedReturn(String fieldName, String type, St assertU(commit()); if (Boolean.getBoolean("enable.update.log")) { for (int i = 0; i < 10; i++) { - assertQ(req("qt", "/get", "id", String.valueOf(i)), + assertQ( + req("qt", "/get", "id", String.valueOf(i)), "//doc/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i] + "']", - "//doc/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i+10] + "']", + "//doc/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i + 10] + "']", "count(//doc/arr[@name='" + fieldName + "']/" + type + ")=2"); } } String[] expected = new String[21]; - expected[0] = "//*[@numFound='10']"; + expected[0] = "//*[@numFound='10']"; for (int i = 1; i <= 10; i++) { - // checks for each doc's two values aren't next to eachother in array, but that doesn't matter for correctness - expected[i] = "//result/doc[" + i + "]/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i-1] + "']"; - expected[i+10] = "//result/doc[" + i + "]/arr[@name='" + fieldName + "']/" + type + "[.='" + numbers[i + 9] + "']"; - } - assertQ(req("q", "*:*", "fl", "id, " + fieldName, "sort","id asc"), expected); - } - - private void doTestPointFieldMultiValuedRangeQuery(String fieldName, String type, String[] numbers) throws Exception { + // checks for each doc's two values aren't next to eachother in array, but that doesn't matter + // for correctness + expected[i] = + "//result/doc[" + + i + + "]/arr[@name='" + + fieldName + + "']/" + + type + + "[.='" + + numbers[i - 1] + + "']"; + expected[i + 10] = + "//result/doc[" + + i + + "]/arr[@name='" + + fieldName + + "']/" + + type + + "[.='" + + numbers[i + 9] + + "']"; + } + assertQ(req("q", "*:*", "fl", "id, " + fieldName, "sort", "id asc"), expected); + } + + private void doTestPointFieldMultiValuedRangeQuery( + String fieldName, String type, String[] numbers) throws Exception { assert numbers != null && numbers.length == 20; SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); assertTrue(sf.multiValued()); assertTrue(sf.getType() instanceof PointField); - for (int i=9; i >= 0; i--) { - assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i+10])); + for (int i = 9; i >= 0; i--) { + assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i + 10])); } assertU(commit()); - assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s]", fieldName, numbers[0], numbers[3]), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:[%s TO %s]", fieldName, numbers[0], numbers[3]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='4']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[2][.='" + numbers[10] + "']", @@ -2755,146 +4147,371 @@ private void doTestPointFieldMultiValuedRangeQuery(String fieldName, String type "//result/doc[3]/arr[@name='" + fieldName + "']/" + type + "[2][.='" + numbers[12] + "']", "//result/doc[4]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[3] + "']", "//result/doc[4]/arr[@name='" + fieldName + "']/" + type + "[2][.='" + numbers[13] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO %s]", fieldName, numbers[0], numbers[3]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:{%s TO %s]", fieldName, numbers[0], numbers[3]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']", "//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[2] + "']", "//result/doc[3]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[3] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s}", fieldName, numbers[0], numbers[3]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:[%s TO %s}", fieldName, numbers[0], numbers[3]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']", "//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']", "//result/doc[3]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[2] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO %s}", fieldName, numbers[0], numbers[3]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:{%s TO %s}", fieldName, numbers[0], numbers[3]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='2']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']", "//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[2] + "']"); - assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO *}", fieldName, numbers[0]), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:{%s TO *}", fieldName, numbers[0]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='10']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:{%s TO *}", fieldName, numbers[10]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:{%s TO *}", fieldName, numbers[10]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='9']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:{* TO %s}", fieldName, numbers[3]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:{* TO %s}", fieldName, numbers[3]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:[* TO %s}", fieldName, numbers[3]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format(Locale.ROOT, "%s:[* TO %s}", fieldName, numbers[3]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']"); - - assertQ(req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='10']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']", "//result/doc[10]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[9] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s] OR %s:[%s TO %s]", fieldName, numbers[0], numbers[1], fieldName, numbers[8], numbers[9]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format( + Locale.ROOT, + "%s:[%s TO %s] OR %s:[%s TO %s]", + fieldName, + numbers[0], + numbers[1], + fieldName, + numbers[8], + numbers[9]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='4']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']", "//result/doc[2]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[1] + "']", "//result/doc[3]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[8] + "']", "//result/doc[4]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[9] + "']"); - - assertQ(req("q", String.format(Locale.ROOT, "%s:[%s TO %s] OR %s:[%s TO %s]", fieldName, numbers[0], numbers[0], fieldName, numbers[10], numbers[10]), - "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req( + "q", + String.format( + Locale.ROOT, + "%s:[%s TO %s] OR %s:[%s TO %s]", + fieldName, + numbers[0], + numbers[0], + fieldName, + numbers[10], + numbers[10]), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='1']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']"); - - if (sf.getType().getNumberType() == NumberType.FLOAT || sf.getType().getNumberType() == NumberType.DOUBLE) { + + if (sf.getType().getNumberType() == NumberType.FLOAT + || sf.getType().getNumberType() == NumberType.DOUBLE) { doTestDoubleFloatRangeLimits(fieldName, sf.getType().getNumberType() == NumberType.DOUBLE); } - } - private void doTestPointFieldMultiValuedFacetField(String nonDocValuesField, String dvFieldName, String[] numbers) throws Exception { + private void doTestPointFieldMultiValuedFacetField( + String nonDocValuesField, String dvFieldName, String[] numbers) throws Exception { assert numbers != null && numbers.length == 20; assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).multiValued()); assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField(dvFieldName).getType() instanceof PointField); - + for (int i = 0; i < 10; i++) { - assertU(adoc("id", String.valueOf(i), dvFieldName, numbers[i], dvFieldName, numbers[i + 10], - nonDocValuesField, numbers[i], nonDocValuesField, numbers[i + 10])); - if (rarely()) { - assertU(commit()); - } + assertU( + adoc( + "id", + String.valueOf(i), + dvFieldName, + numbers[i], + dvFieldName, + numbers[i + 10], + nonDocValuesField, + numbers[i], + nonDocValuesField, + numbers[i + 10])); + if (rarely()) { + assertU(commit()); + } } assertU(commit()); - - assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), + + assertQ( + req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), "//*[@numFound='10']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[1] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[2] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[3] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[10] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[11] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[12] + "'][.='1']"); - + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[1] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[2] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[3] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[10] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[11] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[12] + + "'][.='1']"); + assertU(adoc("id", "10", dvFieldName, numbers[1], nonDocValuesField, numbers[1])); - + assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), + assertQ( + req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), "//*[@numFound='11']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[1] + "'][.='2']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[2] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[3] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[10] + "'][.='1']"); - - assertU(adoc("id", "10", dvFieldName, numbers[1], nonDocValuesField, numbers[1], dvFieldName, numbers[1], nonDocValuesField, numbers[1])); - assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName, "facet.missing", "true"), + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[1] + + "'][.='2']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[2] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[3] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[10] + + "'][.='1']"); + + assertU( + adoc( + "id", + "10", + dvFieldName, + numbers[1], + nonDocValuesField, + numbers[1], + dvFieldName, + numbers[1], + nonDocValuesField, + numbers[1])); + assertU(commit()); + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + dvFieldName, + "facet", + "true", + "facet.field", + dvFieldName, + "facet.missing", + "true"), "//*[@numFound='11']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[1] + "'][.='2']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[2] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[3] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[10] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[not(@name)][.='0']" - ); - + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[1] + + "'][.='2']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[2] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[3] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[10] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[not(@name)][.='0']"); + assertU(adoc("id", "10")); // add missing values assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName, "facet.missing", "true"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + dvFieldName, + "facet", + "true", + "facet.field", + dvFieldName, + "facet.missing", + "true"), "//*[@numFound='11']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[1] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[2] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[3] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[10] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[not(@name)][.='1']" - ); - - assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName, "facet.mincount", "3"), + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[1] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[2] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[3] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[10] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[not(@name)][.='1']"); + + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + dvFieldName, + "facet", + "true", + "facet.field", + dvFieldName, + "facet.mincount", + "3"), "//*[@numFound='11']", - "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int)=0"); - - assertQ(req("q", "id:0", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), + "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int)=0"); + + assertQ( + req("q", "id:0", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), "//*[@numFound='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[0] + "'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + numbers[10] + "'][.='1']", - "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int)=2"); - + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[0] + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + numbers[10] + + "'][.='1']", + "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int)=2"); + assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); - assertQEx("Expecting Exception", - "Can't facet on a PointField without docValues", - req("q", "*:*", "fl", "id, " + nonDocValuesField, "facet", "true", "facet.field", nonDocValuesField), + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertQEx( + "Expecting Exception", + "Can't facet on a PointField without docValues", + req( + "q", + "*:*", + "fl", + "id, " + nonDocValuesField, + "facet", + "true", + "facet.field", + nonDocValuesField), SolrException.ErrorCode.BAD_REQUEST); clearIndex(); assertU(commit()); - + String smaller, larger; try { if (Long.parseLong(numbers[1]) < Long.parseLong(numbers[2])) { @@ -2914,7 +4531,8 @@ private void doTestPointFieldMultiValuedFacetField(String nonDocValuesField, Str larger = numbers[1]; } } catch (NumberFormatException e2) { - if (DateMathParser.parseMath(null, numbers[1]).getTime() < DateMathParser.parseMath(null, numbers[2]).getTime()) { + if (DateMathParser.parseMath(null, numbers[1]).getTime() + < DateMathParser.parseMath(null, numbers[2]).getTime()) { smaller = numbers[1]; larger = numbers[2]; } else { @@ -2923,41 +4541,86 @@ private void doTestPointFieldMultiValuedFacetField(String nonDocValuesField, Str } } } - + assertU(adoc("id", "1", dvFieldName, smaller, dvFieldName, larger)); assertU(adoc("id", "2", dvFieldName, larger)); assertU(commit()); - - assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), + + assertQ( + req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName), "//*[@numFound='2']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + larger + "'][.='2']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + smaller + "'][.='1']", - "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int)=2"); - - assertQ(req("q", "*:*", "fl", "id, " + dvFieldName, "facet", "true", "facet.field", dvFieldName, "facet.sort", "index"), + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + larger + + "'][.='2']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + smaller + + "'][.='1']", + "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int)=2"); + + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + dvFieldName, + "facet", + "true", + "facet.field", + dvFieldName, + "facet.sort", + "index"), "//*[@numFound='2']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='" + smaller +"'][.='1']", - "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int[@name='"+ larger + "'][.='2']", - "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + dvFieldName +"']/int)=2"); - + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + smaller + + "'][.='1']", + "//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int[@name='" + + larger + + "'][.='2']", + "count(//lst[@name='facet_counts']/lst[@name='facet_fields']/lst[@name='" + + dvFieldName + + "']/int)=2"); + clearIndex(); assertU(commit()); - } - private void doTestPointMultiValuedFunctionQuery(String nonDocValuesField, String docValuesField, String type, String[] numbers) throws Exception { + private void doTestPointMultiValuedFunctionQuery( + String nonDocValuesField, String docValuesField, String type, String[] numbers) + throws Exception { assert numbers != null && numbers.length == 20; for (int i = 0; i < 10; i++) { - assertU(adoc("id", String.valueOf(i), docValuesField, numbers[i], docValuesField, numbers[i+10], - nonDocValuesField, numbers[i], nonDocValuesField, numbers[i+10])); + assertU( + adoc( + "id", + String.valueOf(i), + docValuesField, + numbers[i], + docValuesField, + numbers[i + 10], + nonDocValuesField, + numbers[i], + nonDocValuesField, + numbers[i + 10])); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField(docValuesField).multiValued()); - assertTrue(h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(docValuesField).getType() instanceof PointField); String function = "field(" + docValuesField + ", min)"; - - assertQ(req("q", "*:*", "fl", "id, " + docValuesField, "sort", function + " desc"), + + assertQ( + req("q", "*:*", "fl", "id, " + docValuesField, "sort", function + " desc"), "//*[@numFound='10']", "//result/doc[1]/str[@name='id'][.='9']", "//result/doc[2]/str[@name='id'][.='8']", @@ -2966,101 +4629,108 @@ private void doTestPointMultiValuedFunctionQuery(String nonDocValuesField, Strin assertFalse(h.getCore().getLatestSchema().getField(nonDocValuesField).hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).multiValued()); - assertTrue(h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); + assertTrue( + h.getCore().getLatestSchema().getField(nonDocValuesField).getType() instanceof PointField); function = "field(" + nonDocValuesField + ",min)"; - - assertQEx("Expecting Exception", - "sort param could not be parsed as a query", - req("q", "*:*", "fl", "id", "sort", function + " desc"), + + assertQEx( + "Expecting Exception", + "sort param could not be parsed as a query", + req("q", "*:*", "fl", "id", "sort", function + " desc"), SolrException.ErrorCode.BAD_REQUEST); - - assertQEx("Expecting Exception", - "docValues='true' is required to select 'min' value from multivalued field (" + nonDocValuesField + ") at query time", - req("q", "*:*", "fl", "id, " + function), + + assertQEx( + "Expecting Exception", + "docValues='true' is required to select 'min' value from multivalued field (" + + nonDocValuesField + + ") at query time", + req("q", "*:*", "fl", "id, " + function), SolrException.ErrorCode.BAD_REQUEST); - + function = "field(" + docValuesField + ",foo)"; - assertQEx("Expecting Exception", - "Multi-Valued field selector 'foo' not supported", - req("q", "*:*", "fl", "id, " + function), + assertQEx( + "Expecting Exception", + "Multi-Valued field selector 'foo' not supported", + req("q", "*:*", "fl", "id, " + function), SolrException.ErrorCode.BAD_REQUEST); } - private void doTestMultiValuedPointFieldsAtomicUpdates(String field, String type, String[] values) throws Exception { + private void doTestMultiValuedPointFieldsAtomicUpdates(String field, String type, String[] values) + throws Exception { assertEquals(3, values.length); assertU(adoc(sdoc("id", "1", field, String.valueOf(values[0])))); assertU(commit()); - - assertQ(req("q", "id:1"), + + assertQ( + req("q", "id:1"), "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='" + values[0] + "']", "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=1"); assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("add", values[1])))); assertU(commit()); - assertQ(req("q", "id:1"), + assertQ( + req("q", "id:1"), "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='" + values[0] + "']", "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='" + values[1] + "']", "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=2"); - + assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("remove", values[0])))); assertU(commit()); - - assertQ(req("q", "id:1"), + + assertQ( + req("q", "id:1"), "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='" + values[1] + "']", "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=1"); - + assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", Arrays.asList(values))))); assertU(commit()); - - assertQ(req("q", "id:1"), + + assertQ( + req("q", "id:1"), "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='" + values[0] + "']", "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='" + values[1] + "']", "//result/doc[1]/arr[@name='" + field + "']/" + type + "[.='" + values[2] + "']", "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=3"); - + assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("removeregex", ".*")))); assertU(commit()); - - assertQ(req("q", "id:1"), - "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=0"); - + + assertQ(req("q", "id:1"), "count(//result/doc[1]/arr[@name='" + field + "']/" + type + ")=0"); } private void doTestIntPointFieldsAtomicUpdates(String field) throws Exception { int number1 = random().nextInt(); int number2; long inc1; - for ( ; ; ) { + for (; ; ) { number2 = random().nextInt(); inc1 = number2 - number1; - if (Math.abs(inc1) < (long)Integer.MAX_VALUE) { + if (Math.abs(inc1) < (long) Integer.MAX_VALUE) { break; } } assertU(adoc(sdoc("id", "1", field, String.valueOf(number1)))); assertU(commit()); - assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("inc", (int)inc1)))); + assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("inc", (int) inc1)))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/int[@name='" + field + "'][.='" + number2 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/int[@name='" + field + "'][.='" + number2 + "']"); int number3 = random().nextInt(); assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", number3)))); assertU(commit()); - - assertQ(req("q", "id:1"), - "//result/doc[1]/int[@name='" + field + "'][.='" + number3 + "']"); + + assertQ(req("q", "id:1"), "//result/doc[1]/int[@name='" + field + "'][.='" + number3 + "']"); } private void doTestLongPointFieldsAtomicUpdates(String field) throws Exception { long number1 = random().nextLong(); long number2; BigInteger inc1; - for ( ; ; ) { + for (; ; ) { number2 = random().nextLong(); inc1 = BigInteger.valueOf(number2).subtract(BigInteger.valueOf(number1)); if (inc1.abs().compareTo(BigInteger.valueOf(Long.MAX_VALUE)) <= 0) { @@ -3073,39 +4743,46 @@ private void doTestLongPointFieldsAtomicUpdates(String field) throws Exception { assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("inc", inc1.longValueExact())))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/long[@name='" + field + "'][.='" + number2 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/long[@name='" + field + "'][.='" + number2 + "']"); long number3 = random().nextLong(); assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", number3)))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/long[@name='" + field + "'][.='" + number3 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/long[@name='" + field + "'][.='" + number3 + "']"); } - private void doTestFloatPointFieldExactQuery(final String field, boolean testDouble) throws Exception { + private void doTestFloatPointFieldExactQuery(final String field, boolean testDouble) + throws Exception { doTestFloatPointFieldExactQuery(field, true, testDouble); } /** * @param field the field to use for indexing and searching against - * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0 + * @param searchable set to true if searches against "field" should succeed, false if field is + * only stored and searches should always get numFound=0 */ - private void doTestFloatPointFieldExactQuery(final String field, final boolean searchable, final boolean testDouble) - throws Exception { + private void doTestFloatPointFieldExactQuery( + final String field, final boolean searchable, final boolean testDouble) throws Exception { int numValues = 10 * RANDOM_MULTIPLIER; - Map randCount = new HashMap<>(numValues); - String[] rand = testDouble ? toStringArray(getRandomDoubles(numValues, false)) - : toStringArray(getRandomFloats(numValues, false)); - for (int i = 0 ; i < numValues ; i++) { + Map randCount = new HashMap<>(numValues); + String[] rand = + testDouble + ? toStringArray(getRandomDoubles(numValues, false)) + : toStringArray(getRandomFloats(numValues, false)); + for (int i = 0; i < numValues; i++) { randCount.merge(rand[i], 1, (a, b) -> a + b); // count unique values assertU(adoc("id", String.valueOf(i), field, rand[i])); } assertU(commit()); - for (int i = 0 ; i < numValues ; i++) { - assertQ(req("q", field + ":" + (rand[i].startsWith("-") ? "\\" : "") + rand[i], - "fl", "id," + field), getTestString(searchable, randCount.get(rand[i]))); + for (int i = 0; i < numValues; i++) { + assertQ( + req( + "q", + field + ":" + (rand[i].startsWith("-") ? "\\" : "") + rand[i], + "fl", + "id," + field), + getTestString(searchable, randCount.get(rand[i]))); } StringBuilder builder = new StringBuilder(); @@ -3118,7 +4795,9 @@ private void doTestFloatPointFieldExactQuery(final String field, final boolean s } builder.append(value); } - assertQ(req("debug", "true", "q", field + ":(" + builder.toString() + ")"), getTestString(searchable, numValues)); + assertQ( + req("debug", "true", "q", field + ":(" + builder.toString() + ")"), + getTestString(searchable, numValues)); clearIndex(); assertU(commit()); @@ -3131,19 +4810,20 @@ private void doTestFloatPointFieldExactQuery(final String field, final boolean s * @param field name of field to sort on * @param values list of values in ascending order */ - private > void doTestPointFieldSort(String field, List values) throws Exception { + private > void doTestPointFieldSort(String field, List values) + throws Exception { assert values != null && 2 <= values.size(); - + final List docs = new ArrayList<>(values.size()); final String[] ascXpathChecks = new String[values.size() + 1]; final String[] descXpathChecks = new String[values.size() + 1]; ascXpathChecks[values.size()] = "//*[@numFound='" + values.size() + "']"; descXpathChecks[values.size()] = "//*[@numFound='" + values.size() + "']"; - + boolean missingFirst = field.endsWith("_sml") == false; - + List> ascendingPosVals = toAscendingPosVals(values, missingFirst); - for (int i = ascendingPosVals.size() - 1 ; i >= 0 ; --i) { + for (int i = ascendingPosVals.size() - 1; i >= 0; --i) { T value = ascendingPosVals.get(i).val; if (value == null) { docs.add(sdoc("id", String.valueOf(i))); // null => missing value @@ -3151,14 +4831,16 @@ private > void doTestPointFieldSort(String field, List> descendingPosVals = toDescendingPosVals - (ascendingPosVals.stream().map(pv->pv.val).collect(Collectors.toList()), missingFirst); - for (int i = descendingPosVals.size() - 1 ; i >= 0 ; --i) { - descXpathChecks[i]= "//result/doc[" + (i + 1) + "]/str[@name='id'][.='" + descendingPosVals.get(i).pos + "']"; + List> descendingPosVals = + toDescendingPosVals( + ascendingPosVals.stream().map(pv -> pv.val).collect(Collectors.toList()), missingFirst); + for (int i = descendingPosVals.size() - 1; i >= 0; --i) { + descXpathChecks[i] = + "//result/doc[" + (i + 1) + "]/str[@name='id'][.='" + descendingPosVals.get(i).pos + "']"; } - + // ensure doc add order doesn't affect results Collections.shuffle(docs, random()); for (SolrInputDocument doc : docs) { @@ -3166,110 +4848,124 @@ private > void doTestPointFieldSort(String field, List= 0; i--) { assertU(adoc("id", String.valueOf(i), fieldName, String.valueOf(i))); } assertU(commit()); - assertQ(req("q", fieldName + ":[0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req("q", fieldName + ":[0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='4']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0.0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1.0']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='2.0']", "//result/doc[4]/" + type + "[@name='" + fieldName + "'][.='3.0']"); - - assertQ(req("q", fieldName + ":{0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":{0 TO 3]", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1.0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='2.0']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='3.0']"); - - assertQ(req("q", fieldName + ":[0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":[0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0.0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1.0']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='2.0']"); - - assertQ(req("q", fieldName + ":{0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":{0 TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='2']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1.0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='2.0']"); - - assertQ(req("q", fieldName + ":{0 TO *}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":{0 TO *}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='9']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1.0']"); - - assertQ(req("q", fieldName + ":{* TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":{* TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0.0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1.0']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='2.0']"); - - assertQ(req("q", fieldName + ":[* TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":[* TO 3}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='3']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0.0']", "//result/doc[2]/" + type + "[@name='" + fieldName + "'][.='1.0']", "//result/doc[3]/" + type + "[@name='" + fieldName + "'][.='2.0']"); - - assertQ(req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), + + assertQ( + req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='10']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='0.0']", "//result/doc[10]/" + type + "[@name='" + fieldName + "'][.='9.0']"); - - assertQ(req("q", fieldName + ":[0.9 TO 1.01]", "fl", "id, " + fieldName), + + assertQ( + req("q", fieldName + ":[0.9 TO 1.01]", "fl", "id, " + fieldName), "//*[@numFound='1']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1.0']"); - - assertQ(req("q", fieldName + ":{0.9 TO 1.01}", "fl", "id, " + fieldName), + + assertQ( + req("q", fieldName + ":{0.9 TO 1.01}", "fl", "id, " + fieldName), "//*[@numFound='1']", "//result/doc[1]/" + type + "[@name='" + fieldName + "'][.='1.0']"); - + clearIndex(); assertU(commit()); - + String[] arr; if (testDouble) { arr = toAscendingStringArray(getRandomDoubles(10, false), true); @@ -3281,22 +4977,25 @@ private void doTestFloatPointFieldRangeQuery(String fieldName, String type, bool } assertU(commit()); for (int i = 0; i < arr.length; i++) { - assertQ(req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "]", "fl", "id, " + fieldName), + assertQ( + req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "]", "fl", "id, " + fieldName), "//*[@numFound='" + (i + 1) + "']"); - assertQ(req("q", fieldName + ":{" + arr[0] + " TO " + arr[i] + "}", "fl", "id, " + fieldName), - "//*[@numFound='" + (Math.max(0, i-1)) + "']"); + assertQ( + req("q", fieldName + ":{" + arr[0] + " TO " + arr[i] + "}", "fl", "id, " + fieldName), + "//*[@numFound='" + (Math.max(0, i - 1)) + "']"); } doTestDoubleFloatRangeLimits(fieldName, testDouble); } - + private void doTestDoubleFloatRangeLimits(String fieldName, boolean testDouble) { - // POSITIVE/NEGATIVE_INFINITY toString is the same for Double and Float, it's OK to use this code for both cases + // POSITIVE/NEGATIVE_INFINITY toString is the same for Double and Float, it's OK to use this + // code for both cases String positiveInfinity = String.valueOf(Double.POSITIVE_INFINITY); String negativeInfinity = String.valueOf(Double.NEGATIVE_INFINITY); - String minVal = String.valueOf(testDouble?Double.MIN_VALUE:Float.MIN_VALUE); - String maxVal = String.valueOf(testDouble?Double.MAX_VALUE:Float.MAX_VALUE); + String minVal = String.valueOf(testDouble ? Double.MIN_VALUE : Float.MIN_VALUE); + String maxVal = String.valueOf(testDouble ? Double.MAX_VALUE : Float.MAX_VALUE); String negativeMinVal = "-" + minVal; - String negativeMaxVal = "-" + maxVal; + String negativeMaxVal = "-" + maxVal; clearIndex(); assertU(adoc("id", "1", fieldName, minVal)); assertU(adoc("id", "2", fieldName, maxVal)); @@ -3305,52 +5004,56 @@ private void doTestDoubleFloatRangeLimits(String fieldName, boolean testDouble) assertU(adoc("id", "5", fieldName, negativeMinVal)); assertU(adoc("id", "6", fieldName, negativeMaxVal)); assertU(commit()); - //negative to negative + // negative to negative assertAllInclusiveExclusiveVariations(fieldName, "*", "-1", 2, 2, 2, 2); assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "-1", 1, 2, 1, 2); assertAllInclusiveExclusiveVariations(fieldName, negativeMaxVal, negativeMinVal, 0, 1, 1, 2); - //negative to cero + // negative to cero assertAllInclusiveExclusiveVariations(fieldName, "*", "-0.0f", 3, 3, 3, 3); assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "-0.0f", 2, 3, 2, 3); assertAllInclusiveExclusiveVariations(fieldName, negativeMinVal, "-0.0f", 0, 1, 0, 1); - + assertAllInclusiveExclusiveVariations(fieldName, "*", "0", 3, 3, 3, 3); assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "0", 2, 3, 2, 3); assertAllInclusiveExclusiveVariations(fieldName, negativeMinVal, "0", 0, 1, 0, 1); - //negative to positive + // negative to positive assertAllInclusiveExclusiveVariations(fieldName, "*", "1", 4, 4, 4, 4); assertAllInclusiveExclusiveVariations(fieldName, "-1", "*", 4, 4, 4, 4); assertAllInclusiveExclusiveVariations(fieldName, "-1", "1", 2, 2, 2, 2); assertAllInclusiveExclusiveVariations(fieldName, "*", "*", 6, 6, 6, 6); - + assertAllInclusiveExclusiveVariations(fieldName, "-1", positiveInfinity, 3, 3, 4, 4); assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "1", 3, 4, 3, 4); - assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, positiveInfinity, 4, 5, 5, 6); - + assertAllInclusiveExclusiveVariations( + fieldName, negativeInfinity, positiveInfinity, 4, 5, 5, 6); + assertAllInclusiveExclusiveVariations(fieldName, negativeMinVal, minVal, 0, 1, 1, 2); assertAllInclusiveExclusiveVariations(fieldName, negativeMaxVal, maxVal, 2, 3, 3, 4); - //cero to positive + // cero to positive assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", "*", 3, 3, 3, 3); assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", positiveInfinity, 2, 2, 3, 3); assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", minVal, 0, 0, 1, 1); - + assertAllInclusiveExclusiveVariations(fieldName, "0", "*", 3, 3, 3, 3); assertAllInclusiveExclusiveVariations(fieldName, "0", positiveInfinity, 2, 2, 3, 3); assertAllInclusiveExclusiveVariations(fieldName, "0", minVal, 0, 0, 1, 1); - //positive to positive + // positive to positive assertAllInclusiveExclusiveVariations(fieldName, "1", "*", 2, 2, 2, 2); assertAllInclusiveExclusiveVariations(fieldName, "1", positiveInfinity, 1, 1, 2, 2); assertAllInclusiveExclusiveVariations(fieldName, minVal, maxVal, 0, 1, 1, 2); - + // inverted limits assertAllInclusiveExclusiveVariations(fieldName, "1", "-1", 0, 0, 0, 0); - assertAllInclusiveExclusiveVariations(fieldName, positiveInfinity, negativeInfinity, 0, 0, 0, 0); + assertAllInclusiveExclusiveVariations( + fieldName, positiveInfinity, negativeInfinity, 0, 0, 0, 0); assertAllInclusiveExclusiveVariations(fieldName, minVal, negativeMinVal, 0, 0, 0, 0); - + // MatchNoDocs cases - assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, negativeInfinity, 0, 0, 0, 1); - assertAllInclusiveExclusiveVariations(fieldName, positiveInfinity, positiveInfinity, 0, 0, 0, 1); - + assertAllInclusiveExclusiveVariations( + fieldName, negativeInfinity, negativeInfinity, 0, 0, 0, 1); + assertAllInclusiveExclusiveVariations( + fieldName, positiveInfinity, positiveInfinity, 0, 0, 0, 1); + clearIndex(); assertU(adoc("id", "1", fieldName, "0.0")); assertU(adoc("id", "2", fieldName, "-0.0")); @@ -3363,19 +5066,26 @@ private void doTestDoubleFloatRangeLimits(String fieldName, boolean testDouble) assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", "0", 0, 1, 1, 2); } - private void assertAllInclusiveExclusiveVariations(String fieldName, String min, String max, + private void assertAllInclusiveExclusiveVariations( + String fieldName, + String min, + String max, int countExclusiveExclusive, int countInclusiveExclusive, int countExclusiveInclusive, int countInclusiveInclusive) { - assertQ(req("q", fieldName + ":{" + min + " TO " + max + "}", "fl", "id, " + fieldName), - "//*[@numFound='" + countExclusiveExclusive +"']"); - assertQ(req("q", fieldName + ":[" + min + " TO " + max + "}", "fl", "id, " + fieldName), - "//*[@numFound='" + countInclusiveExclusive +"']"); - assertQ(req("q", fieldName + ":{" + min + " TO " + max + "]", "fl", "id, " + fieldName), - "//*[@numFound='" + countExclusiveInclusive +"']"); - assertQ(req("q", fieldName + ":[" + min + " TO " + max + "]", "fl", "id, " + fieldName), - "//*[@numFound='" + countInclusiveInclusive +"']"); + assertQ( + req("q", fieldName + ":{" + min + " TO " + max + "}", "fl", "id, " + fieldName), + "//*[@numFound='" + countExclusiveExclusive + "']"); + assertQ( + req("q", fieldName + ":[" + min + " TO " + max + "}", "fl", "id, " + fieldName), + "//*[@numFound='" + countInclusiveExclusive + "']"); + assertQ( + req("q", fieldName + ":{" + min + " TO " + max + "]", "fl", "id, " + fieldName), + "//*[@numFound='" + countExclusiveInclusive + "']"); + assertQ( + req("q", fieldName + ":[" + min + " TO " + max + "]", "fl", "id, " + fieldName), + "//*[@numFound='" + countInclusiveInclusive + "']"); } private void doTestFloatPointFunctionQuery(String field) throws Exception { @@ -3387,29 +5097,80 @@ private void doTestFloatPointFunctionQuery(String field) throws Exception { String[] idAscNegXpathChecks = new String[numVals + 1]; idAscXpathChecks[0] = assertNumFound; idAscNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < values.size() ; ++i) { - assertU(adoc("id", Character.valueOf((char)('A' + i)).toString(), field, String.valueOf(values.get(i)))); + for (int i = 0; i < values.size(); ++i) { + assertU( + adoc( + "id", + Character.valueOf((char) ('A' + i)).toString(), + field, + String.valueOf(values.get(i)))); // reminder: xpath array indexes start at 1 - idAscXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/float[@name='field(" + field + ")'][.='" + values.get(i) + "']"; - idAscNegXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/float[@name='product(-1," + field + ")'][.='" - + (-1.0f * values.get(i)) + "']"; + idAscXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/float[@name='field(" + + field + + ")'][.='" + + values.get(i) + + "']"; + idAscNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/float[@name='product(-1," + + field + + ")'][.='" + + (-1.0f * values.get(i)) + + "']"; } assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", field(" + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscXpathChecks); - assertQ(req("q", "*:*", "fl", "id, " + field + ", product(-1," + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", product(-1," + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscNegXpathChecks); - List> ascNegPosVals - = toAscendingPosVals(values.stream().map(v -> -v).collect(Collectors.toList()), true); + List> ascNegPosVals = + toAscendingPosVals(values.stream().map(v -> -v).collect(Collectors.toList()), true); String[] ascNegXpathChecks = new String[numVals + 1]; ascNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < ascNegPosVals.size() ; ++i) { + for (int i = 0; i < ascNegPosVals.size(); ++i) { PosVal posVal = ascNegPosVals.get(i); - ascNegXpathChecks[i + 1] - = "//result/doc[" + (1 + i) + "]/float[@name='" + field + "'][.='" + values.get(posVal.pos) + "']"; - } - assertQ(req("q", "*:*", "fl", "id, " + field, "rows", String.valueOf(numVals), "sort", "product(-1," + field + ") asc"), + ascNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/float[@name='" + + field + + "'][.='" + + values.get(posVal.pos) + + "']"; + } + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field, + "rows", + String.valueOf(numVals), + "sort", + "product(-1," + field + ") asc"), ascNegXpathChecks); clearIndex(); @@ -3419,43 +5180,100 @@ private void doTestFloatPointFunctionQuery(String field) throws Exception { private void doTestDoublePointFunctionQuery(String field) throws Exception { assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField); int numVals = 10 * RANDOM_MULTIPLIER; - // Restrict values to float range; otherwise conversion to float will cause truncation -> undefined results - List values = getRandomList(numVals, false, () -> { - Float f = Float.NaN; - while (f.isNaN()) { - f = Float.intBitsToFloat(random().nextInt()); - } - return f.doubleValue(); - }); + // Restrict values to float range; otherwise conversion to float will cause truncation -> + // undefined results + List values = + getRandomList( + numVals, + false, + () -> { + Float f = Float.NaN; + while (f.isNaN()) { + f = Float.intBitsToFloat(random().nextInt()); + } + return f.doubleValue(); + }); String assertNumFound = "//*[@numFound='" + numVals + "']"; String[] idAscXpathChecks = new String[numVals + 1]; String[] idAscNegXpathChecks = new String[numVals + 1]; idAscXpathChecks[0] = assertNumFound; idAscNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < values.size() ; ++i) { - assertU(adoc("id", Character.valueOf((char)('A' + i)).toString(), field, String.valueOf(values.get(i)))); + for (int i = 0; i < values.size(); ++i) { + assertU( + adoc( + "id", + Character.valueOf((char) ('A' + i)).toString(), + field, + String.valueOf(values.get(i)))); // reminder: xpath array indexes start at 1 - idAscXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/double[@name='field(" + field + ")'][.='" + values.get(i) + "']"; - idAscNegXpathChecks[i + 1] = "//result/doc[" + (1 + i) + "]/float[@name='product(-1," + field + ")'][.='" - + (-1.0f * values.get(i).floatValue()) + "']"; + idAscXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/double[@name='field(" + + field + + ")'][.='" + + values.get(i) + + "']"; + idAscNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/float[@name='product(-1," + + field + + ")'][.='" + + (-1.0f * values.get(i).floatValue()) + + "']"; } assertU(commit()); - assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", field(" + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscXpathChecks); - assertQ(req("q", "*:*", "fl", "id, " + field + ", product(-1," + field + ")", "rows", String.valueOf(numVals), "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", product(-1," + field + ")", + "rows", + String.valueOf(numVals), + "sort", + "id asc"), idAscNegXpathChecks); // Intentionally use floats here to mimic server-side function sorting - List> ascNegPosVals - = toAscendingPosVals(values.stream().map(v -> -v.floatValue()).collect(Collectors.toList()), true); + List> ascNegPosVals = + toAscendingPosVals( + values.stream().map(v -> -v.floatValue()).collect(Collectors.toList()), true); String[] ascNegXpathChecks = new String[numVals + 1]; ascNegXpathChecks[0] = assertNumFound; - for (int i = 0 ; i < ascNegPosVals.size() ; ++i) { + for (int i = 0; i < ascNegPosVals.size(); ++i) { PosVal posVal = ascNegPosVals.get(i); - ascNegXpathChecks[i + 1] - = "//result/doc[" + (1 + i) + "]/double[@name='" + field + "'][.='" + values.get(posVal.pos) + "']"; - } - assertQ(req("q", "*:*", "fl", "id, " + field, "rows", String.valueOf(numVals), "sort", "product(-1," + field + ") asc"), + ascNegXpathChecks[i + 1] = + "//result/doc[" + + (1 + i) + + "]/double[@name='" + + field + + "'][.='" + + values.get(posVal.pos) + + "']"; + } + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field, + "rows", + String.valueOf(numVals), + "sort", + "product(-1," + field + ") asc"), ascNegXpathChecks); clearIndex(); @@ -3467,19 +5285,25 @@ private void doTestSetQueries(String fieldName, String[] values, boolean multiVa assertU(adoc("id", String.valueOf(i), fieldName, values[i])); } assertU(commit()); - SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); + SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); assertTrue(sf.getType() instanceof PointField); - + for (int i = 0; i < values.length; i++) { - assertQ(req("q", "{!term f='" + fieldName + "'}" + values[i], "fl", "id," + fieldName), + assertQ( + req("q", "{!term f='" + fieldName + "'}" + values[i], "fl", "id," + fieldName), "//*[@numFound='1']"); } - + for (int i = 0; i < values.length; i++) { - assertQ(req("q", "{!terms f='" + fieldName + "'}" + values[i] + "," + values[(i + 1)%values.length], "fl", "id," + fieldName), + assertQ( + req( + "q", + "{!terms f='" + fieldName + "'}" + values[i] + "," + values[(i + 1) % values.length], + "fl", + "id," + fieldName), "//*[@numFound='2']"); } - + assertTrue(values.length > SolrQueryParser.TERMS_QUERY_THRESHOLD); int numTerms = SolrQueryParser.TERMS_QUERY_THRESHOLD + 1; StringBuilder builder = new StringBuilder(fieldName + ":("); @@ -3492,12 +5316,32 @@ private void doTestSetQueries(String fieldName, String[] values, boolean multiVa } builder.append(')'); if (sf.indexed()) { // SolrQueryParser should also be generating a PointInSetQuery if indexed - assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), "fl", "id," + fieldName), + assertQ( + req( + CommonParams.DEBUG, + CommonParams.QUERY, + "q", + "*:*", + "fq", + builder.toString(), + "fl", + "id," + fieldName), "//*[@numFound='" + numTerms + "']", - "//*[@name='parsed_filter_queries']/str[.='(" + getSetQueryToString(fieldName, values, numTerms) + ")']"); + "//*[@name='parsed_filter_queries']/str[.='(" + + getSetQueryToString(fieldName, values, numTerms) + + ")']"); } else { // Won't use PointInSetQuery if the field is not indexed, but should match the same docs - assertQ(req(CommonParams.DEBUG, CommonParams.QUERY, "q", "*:*", "fq", builder.toString(), "fl", "id," + fieldName), + assertQ( + req( + CommonParams.DEBUG, + CommonParams.QUERY, + "q", + "*:*", + "fq", + builder.toString(), + "fl", + "id," + fieldName), "//*[@numFound='" + numTerms + "']"); } @@ -3505,180 +5349,317 @@ private void doTestSetQueries(String fieldName, String[] values, boolean multiVa clearIndex(); assertU(commit()); for (int i = 0; i < values.length; i++) { - assertU(adoc("id", String.valueOf(i), fieldName, values[i], fieldName, values[(i+1)%values.length])); + assertU( + adoc( + "id", + String.valueOf(i), + fieldName, + values[i], + fieldName, + values[(i + 1) % values.length])); } assertU(commit()); for (int i = 0; i < values.length; i++) { - assertQ(req("q", "{!term f='" + fieldName + "'}" + values[i], "fl", "id," + fieldName), + assertQ( + req("q", "{!term f='" + fieldName + "'}" + values[i], "fl", "id," + fieldName), "//*[@numFound='2']"); } - + for (int i = 0; i < values.length; i++) { - assertQ(req("q", "{!terms f='" + fieldName + "'}" + values[i] + "," + values[(i + 1)%values.length], "fl", "id," + fieldName), + assertQ( + req( + "q", + "{!terms f='" + + fieldName + + "'}" + + values[i] + + "," + + values[(i + 1) % values.length], + "fl", + "id," + fieldName), "//*[@numFound='3']"); } } } - + private String getSetQueryToString(String fieldName, String[] values, int numTerms) { SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); - return sf.getType().getSetQuery(null, sf, Arrays.asList(Arrays.copyOf(values, numTerms))).toString(); + return sf.getType() + .getSetQuery(null, sf, Arrays.asList(Arrays.copyOf(values, numTerms))) + .toString(); } - private void doTestDatePointFieldExactQuery(final String field, final String baseDate) throws Exception { + private void doTestDatePointFieldExactQuery(final String field, final String baseDate) + throws Exception { doTestDatePointFieldExactQuery(field, baseDate, true); } - + /** * @param field the field to use for indexing and searching against * @param baseDate basic value to use for indexing and searching - * @param searchable set to true if searches against "field" should succeed, false if field is only stored and searches should always get numFound=0 + * @param searchable set to true if searches against "field" should succeed, false if field is + * only stored and searches should always get numFound=0 */ - private void doTestDatePointFieldExactQuery(final String field, final String baseDate, final boolean searchable) throws Exception { + private void doTestDatePointFieldExactQuery( + final String field, final String baseDate, final boolean searchable) throws Exception { final String MATCH_ONE = "//*[@numFound='" + (searchable ? "1" : "0") + "']"; final String MATCH_TWO = "//*[@numFound='" + (searchable ? "2" : "0") + "']"; - - for (int i=0; i < 10; i++) { - assertU(adoc("id", String.valueOf(i), field, String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1))); + + for (int i = 0; i < 10; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + field, + String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i + 1))); } assertU(commit()); for (int i = 0; i < 10; i++) { - String date = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1); - assertQ(req("q", field + ":\""+date+"\"", "fl", "id, " + field), - MATCH_ONE); + String date = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i + 1); + assertQ(req("q", field + ":\"" + date + "\"", "fl", "id, " + field), MATCH_ONE); } for (int i = 0; i < 10; i++) { - String date1 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i+1); - String date2 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, ((i+1)%10 + 1)); - assertQ(req("q", field + ":\"" + date1 + "\"" - + " OR " + field + ":\"" + date2 + "\""), - MATCH_TWO); + String date1 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, i + 1); + String date2 = String.format(Locale.ROOT, "%s+%dMINUTES", baseDate, ((i + 1) % 10 + 1)); + assertQ( + req("q", field + ":\"" + date1 + "\"" + " OR " + field + ":\"" + date2 + "\""), + MATCH_TWO); } clearIndex(); assertU(commit()); } - + private void doTestDatePointFieldRangeQuery(String fieldName) throws Exception { String baseDate = "1995-12-31T10:59:59Z"; for (int i = 9; i >= 0; i--) { - assertU(adoc("id", String.valueOf(i), fieldName, String.format(Locale.ROOT, "%s+%dHOURS", baseDate, i))); + assertU( + adoc( + "id", + String.valueOf(i), + fieldName, + String.format(Locale.ROOT, "%s+%dHOURS", baseDate, i))); } assertU(commit()); - assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "[%s+0HOURS TO %s+3HOURS]", baseDate, baseDate), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+0HOURS TO %s+3HOURS]", baseDate, baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='4']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']", "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']", "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']", "//result/doc[4]/date[@name='" + fieldName + "'][.='1995-12-31T13:59:59Z']"); - assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "{%s+0HOURS TO %s+3HOURS]", baseDate, baseDate), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + + ":" + + String.format(Locale.ROOT, "{%s+0HOURS TO %s+3HOURS]", baseDate, baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']", "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']", "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T13:59:59Z']"); - assertQ(req("q", fieldName + ":"+ String.format(Locale.ROOT, "[%s+0HOURS TO %s+3HOURS}",baseDate,baseDate), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+0HOURS TO %s+3HOURS}", baseDate, baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']", "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']", "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']"); - assertQ(req("q", fieldName + ":"+ String.format(Locale.ROOT, "{%s+0HOURS TO %s+3HOURS}",baseDate,baseDate), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + + ":" + + String.format(Locale.ROOT, "{%s+0HOURS TO %s+3HOURS}", baseDate, baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='2']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']", "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T12:59:59Z']"); - assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "{%s+0HOURS TO *}",baseDate), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + ":" + String.format(Locale.ROOT, "{%s+0HOURS TO *}", baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='9']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']"); - assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "{* TO %s+3HOURS}",baseDate), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + ":" + String.format(Locale.ROOT, "{* TO %s+3HOURS}", baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']"); - assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "[* TO %s+3HOURS}",baseDate), - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + ":" + String.format(Locale.ROOT, "[* TO %s+3HOURS}", baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='3']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']"); - assertQ(req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req("q", fieldName + ":[* TO *}", "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='10']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']", "//result/doc[10]/date[@name='" + fieldName + "'][.='1995-12-31T19:59:59Z']"); - assertQ(req("q", fieldName + ":" + String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]",baseDate,baseDate) - + " OR " + fieldName + ":" + String.format(Locale.ROOT, "[%s+8HOURS TO %s+9HOURS]",baseDate,baseDate) , - "fl", "id, " + fieldName, "sort", "id asc"), + assertQ( + req( + "q", + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]", baseDate, baseDate) + + " OR " + + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+8HOURS TO %s+9HOURS]", baseDate, baseDate), + "fl", + "id, " + fieldName, + "sort", + "id asc"), "//*[@numFound='4']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']", "//result/doc[2]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']", "//result/doc[3]/date[@name='" + fieldName + "'][.='1995-12-31T18:59:59Z']", "//result/doc[4]/date[@name='" + fieldName + "'][.='1995-12-31T19:59:59Z']"); - assertQ(req("q", fieldName + ":"+String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]",baseDate,baseDate) - +" AND " + fieldName + ":"+String.format(Locale.ROOT, "[%s+1HOURS TO %s+2HOURS]",baseDate,baseDate) , "fl", "id, " + fieldName), + assertQ( + req( + "q", + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]", baseDate, baseDate) + + " AND " + + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+1HOURS TO %s+2HOURS]", baseDate, baseDate), + "fl", + "id, " + fieldName), "//*[@numFound='1']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T11:59:59Z']"); - assertQ(req("q", fieldName + ":"+String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]",baseDate,baseDate) - +" AND NOT " + fieldName + ":"+String.format(Locale.ROOT, "[%s+1HOURS TO %s+2HOURS]",baseDate,baseDate) , "fl", "id, " + fieldName), + assertQ( + req( + "q", + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+0HOURS TO %s+1HOURS]", baseDate, baseDate) + + " AND NOT " + + fieldName + + ":" + + String.format(Locale.ROOT, "[%s+1HOURS TO %s+2HOURS]", baseDate, baseDate), + "fl", + "id, " + fieldName), "//*[@numFound='1']", "//result/doc[1]/date[@name='" + fieldName + "'][.='1995-12-31T10:59:59Z']"); clearIndex(); assertU(commit()); - + String[] arr = toAscendingStringArray(getRandomInstants(100, false), true); - for (int i = 0 ; i < arr.length ; ++i) { + for (int i = 0; i < arr.length; ++i) { assertU(adoc("id", String.valueOf(i), fieldName, arr[i])); } assertU(commit()); - for (int i = 0 ; i < arr.length ; ++i) { - assertQ(req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "]", "fl", "id," + fieldName), + for (int i = 0; i < arr.length; ++i) { + assertQ( + req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "]", "fl", "id," + fieldName), "//*[@numFound='" + (i + 1) + "']"); - assertQ(req("q", fieldName + ":{" + arr[0] + " TO " + arr[i] + "}", "fl", "id, " + fieldName), - "//*[@numFound='" + (Math.max(0, i-1)) + "']"); - assertQ(req("q", fieldName + ":[" + arr[0] + " TO " + arr[i] + "] AND " + fieldName + ":\"" + arr[0] + "\"", "fl", "id, " + fieldName), + assertQ( + req("q", fieldName + ":{" + arr[0] + " TO " + arr[i] + "}", "fl", "id, " + fieldName), + "//*[@numFound='" + (Math.max(0, i - 1)) + "']"); + assertQ( + req( + "q", + fieldName + ":[" + arr[0] + " TO " + arr[i] + "] AND " + fieldName + ":\"" + arr[0] + + "\"", + "fl", + "id, " + fieldName), "//*[@numFound='1']"); } } private void doTestDatePointFunctionQuery(String field) { // This method is intentionally not randomized, because sorting by function happens - // at float precision, which causes ms(date) to give the same value for different dates. + // at float precision, which causes ms(date) to give the same value for different dates. // See https://issues.apache.org/jira/browse/SOLR-11825 final String baseDate = "1995-01-10T10:59:10Z"; for (int i = 9; i >= 0; i--) { - String date = String.format(Locale.ROOT, "%s+%dSECONDS", baseDate, i+1); + String date = String.format(Locale.ROOT, "%s+%dSECONDS", baseDate, i + 1); assertU(adoc("id", String.valueOf(i), field, date)); } assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof DatePointField); - assertQ(req("q", "*:*", "fl", "id, " + field, "sort", "product(-1,ms(" + field + "," + baseDate +")) asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field, + "sort", + "product(-1,ms(" + field + "," + baseDate + ")) asc"), "//*[@numFound='10']", "//result/doc[1]/date[@name='" + field + "'][.='1995-01-10T10:59:20Z']", "//result/doc[2]/date[@name='" + field + "'][.='1995-01-10T10:59:19Z']", "//result/doc[3]/date[@name='" + field + "'][.='1995-01-10T10:59:18Z']", "//result/doc[10]/date[@name='" + field + "'][.='1995-01-10T10:59:11Z']"); - assertQ(req("q", "*:*", "fl", "id, " + field + ", ms(" + field + ","+baseDate+")", "sort", "id asc"), + assertQ( + req( + "q", + "*:*", + "fl", + "id, " + field + ", ms(" + field + "," + baseDate + ")", + "sort", + "id asc"), "//*[@numFound='10']", "//result/doc[1]/float[@name='ms(" + field + "," + baseDate + ")'][.='1000.0']", "//result/doc[2]/float[@name='ms(" + field + "," + baseDate + ")'][.='2000.0']", "//result/doc[3]/float[@name='ms(" + field + "," + baseDate + ")'][.='3000.0']", "//result/doc[10]/float[@name='ms(" + field + "," + baseDate + ")'][.='10000.0']"); - assertQ(req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "sort", "id asc"), + assertQ( + req("q", "*:*", "fl", "id, " + field + ", field(" + field + ")", "sort", "id asc"), "//*[@numFound='10']", "//result/doc[1]/date[@name='field(" + field + ")'][.='1995-01-10T10:59:11Z']", "//result/doc[2]/date[@name='field(" + field + ")'][.='1995-01-10T10:59:12Z']", @@ -3694,16 +5675,32 @@ private void doTestDatePointStats(String field, String dvField, String[] dates) assertU(commit()); assertTrue(h.getCore().getLatestSchema().getField(dvField).hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField(dvField).getType() instanceof PointField); - assertQ(req("q", "*:*", "fl", "id, " + dvField, "stats", "true", "stats.field", dvField), + assertQ( + req("q", "*:*", "fl", "id, " + dvField, "stats", "true", "stats.field", dvField), "//*[@numFound='" + (dates.length + 1) + "']", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/date[@name='min'][.='" + dates[0] + "']", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/date[@name='max'][.='" + dates[dates.length-1] + "']", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/long[@name='count'][.='" + dates.length + "']", - "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + dvField+ "']/long[@name='missing'][.='1']"); + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/date[@name='min'][.='" + + dates[0] + + "']", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/date[@name='max'][.='" + + dates[dates.length - 1] + + "']", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/long[@name='count'][.='" + + dates.length + + "']", + "//lst[@name='stats']/lst[@name='stats_fields']/lst[@name='" + + dvField + + "']/long[@name='missing'][.='1']"); assertFalse(h.getCore().getLatestSchema().getField(field).hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField); - assertQEx("Expecting Exception", + assertQEx( + "Expecting Exception", "Can't calculate stats on a PointField without docValues", req("q", "*:*", "fl", "id, " + field, "stats", "true", "stats.field", field), SolrException.ErrorCode.BAD_REQUEST); @@ -3713,7 +5710,7 @@ private void doTestDatePointFieldsAtomicUpdates(String field) throws Exception { long millis1 = random().nextLong() % MAX_DATE_EPOCH_MILLIS; long millis2; DateGapCeiling gap; - for ( ; ; ) { + for (; ; ) { millis2 = random().nextLong() % MAX_DATE_EPOCH_MILLIS; gap = new DateGapCeiling(millis2 - millis1); millis2 = gap.addTo(millis1); // adjust millis2 to the closest +/-UNIT gap @@ -3724,73 +5721,101 @@ private void doTestDatePointFieldsAtomicUpdates(String field) throws Exception { assertU(adoc(sdoc("id", "1", field, date1))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/date[@name='" + field + "'][.='" + date1 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/date[@name='" + field + "'][.='" + date1 + "']"); assertU(adoc(sdoc("id", "1", field, ImmutableMap.of("set", date1 + gap.toString())))); assertU(commit()); - assertQ(req("q", "id:1"), - "//result/doc[1]/date[@name='" + field + "'][.='" + date2 + "']"); + assertQ(req("q", "id:1"), "//result/doc[1]/date[@name='" + field + "'][.='" + date2 + "']"); } private void doTestInternals(String field, String[] values) throws IOException { assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField); - for (int i=0; i < 10; i++) { + for (int i = 0; i < 10; i++) { assertU(adoc("id", String.valueOf(i), field, values[i])); } assertU(commit()); SchemaField sf = h.getCore().getLatestSchema().getField(field); boolean ignoredField = !(sf.indexed() || sf.stored() || sf.hasDocValues()); - h.getCore().withSearcher(searcher -> { - DirectoryReader ir = searcher.getIndexReader(); - // our own SlowCompositeReader to check DocValues on disk w/o the UninvertingReader added by SolrIndexSearcher - final LeafReader leafReaderForCheckingDVs = SlowCompositeReaderWrapper.wrap(searcher.getRawReader()); - - if (sf.indexed()) { - assertEquals("Field " + field + " should have point values", 10, PointValues.size(ir, field)); - } else { - assertEquals("Field " + field + " should have no point values", 0, PointValues.size(ir, field)); - } - if (ignoredField) { - assertTrue("Field " + field + " should not have docValues", - DocValues.getSortedNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS); - assertTrue("Field " + field + " should not have docValues", - DocValues.getNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS); - assertTrue("Field " + field + " should not have docValues", - DocValues.getSorted(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS); - assertTrue("Field " + field + " should not have docValues", - DocValues.getBinary(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS); - } else { - if (sf.hasDocValues()) { - if (sf.multiValued()) { - assertFalse("Field " + field + " should have docValues", - DocValues.getSortedNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS); - } else { - assertFalse("Field " + field + " should have docValues", - DocValues.getNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS); - } - } else { - expectThrows(IllegalStateException.class, ()->DocValues.getSortedNumeric(leafReaderForCheckingDVs, field)); - expectThrows(IllegalStateException.class, ()->DocValues.getNumeric(leafReaderForCheckingDVs, field)); - } - expectThrows(IllegalStateException.class, ()->DocValues.getSorted(leafReaderForCheckingDVs, field)); - expectThrows(IllegalStateException.class, ()->DocValues.getBinary(leafReaderForCheckingDVs, field)); - } - for (LeafReaderContext leave:ir.leaves()) { - LeafReader reader = leave.reader(); - for (int i = 0; i < reader.numDocs(); i++) { - Document doc = reader.document(i); - if (sf.stored()) { - assertNotNull("Field " + field + " not found. Doc: " + doc, doc.get(field)); - } else { - assertNull(doc.get(field)); - } - } - } - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + DirectoryReader ir = searcher.getIndexReader(); + // our own SlowCompositeReader to check DocValues on disk w/o the UninvertingReader + // added by SolrIndexSearcher + final LeafReader leafReaderForCheckingDVs = + SlowCompositeReaderWrapper.wrap(searcher.getRawReader()); + + if (sf.indexed()) { + assertEquals( + "Field " + field + " should have point values", + 10, + PointValues.size(ir, field)); + } else { + assertEquals( + "Field " + field + " should have no point values", + 0, + PointValues.size(ir, field)); + } + if (ignoredField) { + assertTrue( + "Field " + field + " should not have docValues", + DocValues.getSortedNumeric(leafReaderForCheckingDVs, field).nextDoc() + == DocIdSetIterator.NO_MORE_DOCS); + assertTrue( + "Field " + field + " should not have docValues", + DocValues.getNumeric(leafReaderForCheckingDVs, field).nextDoc() + == DocIdSetIterator.NO_MORE_DOCS); + assertTrue( + "Field " + field + " should not have docValues", + DocValues.getSorted(leafReaderForCheckingDVs, field).nextDoc() + == DocIdSetIterator.NO_MORE_DOCS); + assertTrue( + "Field " + field + " should not have docValues", + DocValues.getBinary(leafReaderForCheckingDVs, field).nextDoc() + == DocIdSetIterator.NO_MORE_DOCS); + } else { + if (sf.hasDocValues()) { + if (sf.multiValued()) { + assertFalse( + "Field " + field + " should have docValues", + DocValues.getSortedNumeric(leafReaderForCheckingDVs, field).nextDoc() + == DocIdSetIterator.NO_MORE_DOCS); + } else { + assertFalse( + "Field " + field + " should have docValues", + DocValues.getNumeric(leafReaderForCheckingDVs, field).nextDoc() + == DocIdSetIterator.NO_MORE_DOCS); + } + } else { + expectThrows( + IllegalStateException.class, + () -> DocValues.getSortedNumeric(leafReaderForCheckingDVs, field)); + expectThrows( + IllegalStateException.class, + () -> DocValues.getNumeric(leafReaderForCheckingDVs, field)); + } + expectThrows( + IllegalStateException.class, + () -> DocValues.getSorted(leafReaderForCheckingDVs, field)); + expectThrows( + IllegalStateException.class, + () -> DocValues.getBinary(leafReaderForCheckingDVs, field)); + } + for (LeafReaderContext leave : ir.leaves()) { + LeafReader reader = leave.reader(); + for (int i = 0; i < reader.numDocs(); i++) { + Document doc = reader.document(i); + if (sf.stored()) { + assertNotNull("Field " + field + " not found. Doc: " + doc, doc.get(field)); + } else { + assertNull(doc.get(field)); + } + } + } + return null; + }); clearIndex(); assertU(commit()); } @@ -3813,25 +5838,27 @@ public void testNonReturnable() throws Exception { doTestReturnNonStored("foo_p_f_ni_dv_ns", true, floats[0]); doTestReturnNonStored("foo_p_f_ni_ns_mv", false, floats); doTestReturnNonStored("foo_p_f_ni_dv_ns_mv", true, floats); - + String[] doubles = toStringArray(getRandomDoubles(2, false)); doTestReturnNonStored("foo_p_d_ni_ns", false, doubles[0]); doTestReturnNonStored("foo_p_d_ni_dv_ns", true, doubles[0]); doTestReturnNonStored("foo_p_d_ni_ns_mv", false, doubles); doTestReturnNonStored("foo_p_d_ni_dv_ns_mv", true, doubles); - String[] dates = new String[] { getRandomDateMaybeWithMath(), getRandomDateMaybeWithMath() }; + String[] dates = new String[] {getRandomDateMaybeWithMath(), getRandomDateMaybeWithMath()}; doTestReturnNonStored("foo_p_dt_ni_ns", false, dates[0]); doTestReturnNonStored("foo_p_dt_ni_dv_ns", true, dates[0]); doTestReturnNonStored("foo_p_dt_ni_ns_mv", false, dates); doTestReturnNonStored("foo_p_dt_ni_dv_ns_mv", true, dates); } - public void doTestReturnNonStored(final String fieldName, boolean shouldReturnFieldIfRequested, final String... values) throws Exception { + public void doTestReturnNonStored( + final String fieldName, boolean shouldReturnFieldIfRequested, final String... values) + throws Exception { final String RETURN_FIELD = "count(//doc/*[@name='" + fieldName + "'])=10"; final String DONT_RETURN_FIELD = "count(//doc/*[@name='" + fieldName + "'])=0"; assertFalse(h.getCore().getLatestSchema().getField(fieldName).stored()); - for (int i=0; i < 10; i++) { + for (int i = 0; i < 10; i++) { SolrInputDocument doc = sdoc("id", String.valueOf(i)); for (String value : values) { doc.addField(fieldName, value); @@ -3839,17 +5866,22 @@ public void doTestReturnNonStored(final String fieldName, boolean shouldReturnFi assertU(adoc(doc)); } assertU(commit()); - assertQ(req("q", "*:*", "rows", "100", "fl", "id," + fieldName), - "//*[@numFound='10']", - "count(//doc)=10", // exactly 10 docs in response - (shouldReturnFieldIfRequested?RETURN_FIELD:DONT_RETURN_FIELD)); // no field in any doc other then 'id' + assertQ( + req("q", "*:*", "rows", "100", "fl", "id," + fieldName), + "//*[@numFound='10']", + "count(//doc)=10", // exactly 10 docs in response + (shouldReturnFieldIfRequested + ? RETURN_FIELD + : DONT_RETURN_FIELD)); // no field in any doc other then 'id' - assertQ(req("q", "*:*", "rows", "100", "fl", "*"), + assertQ( + req("q", "*:*", "rows", "100", "fl", "*"), "//*[@numFound='10']", "count(//doc)=10", // exactly 10 docs in response DONT_RETURN_FIELD); // no field in any doc other then 'id' - assertQ(req("q", "*:*", "rows", "100"), + assertQ( + req("q", "*:*", "rows", "100"), "//*[@numFound='10']", "count(//doc)=10", // exactly 10 docs in response DONT_RETURN_FIELD); // no field in any doc other then 'id' @@ -3858,88 +5890,105 @@ public void doTestReturnNonStored(final String fieldName, boolean shouldReturnFi } public void testWhiteboxCreateFields() throws Exception { - String[] typeNames = new String[]{"i", "l", "f", "d", "dt"}; - Class[] expectedClasses = new Class[]{IntPoint.class, LongPoint.class, FloatPoint.class, DoublePoint.class, LongPoint.class}; - + String[] typeNames = new String[] {"i", "l", "f", "d", "dt"}; + Class[] expectedClasses = + new Class[] { + IntPoint.class, LongPoint.class, FloatPoint.class, DoublePoint.class, LongPoint.class + }; + Date dateToTest = new Date(); - Object[][] values = new Object[][] { - {42, "42"}, - {42, "42"}, - {42.123, "42.123"}, - {12345.6789, "12345.6789"}, - {dateToTest, new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT).format(dateToTest), "NOW"} // "NOW" won't be equal to the other dates - }; - + Object[][] values = + new Object[][] { + {42, "42"}, + {42, "42"}, + {42.123, "42.123"}, + {12345.6789, "12345.6789"}, + { + dateToTest, + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT).format(dateToTest), + "NOW" + } // "NOW" won't be equal to the other dates + }; + Set typesTested = new HashSet<>(); for (int i = 0; i < typeNames.length; i++) { - for (String suffix:FIELD_SUFFIXES) { - doWhiteboxCreateFields("whitebox_p_" + typeNames[i] + suffix, expectedClasses[i], values[i]); + for (String suffix : FIELD_SUFFIXES) { + doWhiteboxCreateFields( + "whitebox_p_" + typeNames[i] + suffix, expectedClasses[i], values[i]); typesTested.add("*_p_" + typeNames[i] + suffix); } } Set typesToTest = new HashSet<>(); - for (DynamicField dynField:h.getCore().getLatestSchema().getDynamicFields()) { + for (DynamicField dynField : h.getCore().getLatestSchema().getDynamicFields()) { if (dynField.getPrototype().getType() instanceof PointField) { typesToTest.add(dynField.getRegex()); } } assertEquals("Missing types in the test", typesTested, typesToTest); } - - /** - * Calls {@link #callAndCheckCreateFields} on each of the specified values. - * This is a convinience method for testing the same fieldname with multiple inputs. + + /** + * Calls {@link #callAndCheckCreateFields} on each of the specified values. This is a convinience + * method for testing the same fieldname with multiple inputs. * * @see #callAndCheckCreateFields */ - private void doWhiteboxCreateFields(final String fieldName, final Class pointType, final Object... values) throws Exception { - + private void doWhiteboxCreateFields( + final String fieldName, final Class pointType, final Object... values) throws Exception { + for (Object value : values) { - // ideally we should require that all input values be diff forms of the same logical value - // (ie '"42"' vs 'new Integer(42)') and assert that each produces an equivalent list of IndexableField objects - // but that doesn't seem to work -- appears not all IndexableField classes override Object.equals? + // ideally we should require that all input values be diff forms of the same logical value (ie + // '"42"' vs 'new Integer(42)') and assert that each produces an equivalent list of + // IndexableField objects but that doesn't seem to work -- appears not all IndexableField + // classes override Object.equals? final List result = callAndCheckCreateFields(fieldName, pointType, value); assertNotNull(value + " => null", result); } } - - /** - * Calls {@link SchemaField#createFields} on the specified value for the specified field name, and asserts - * that the results match the SchemaField propeties, with an additional check that the pointType - * is included if and only if the SchemaField is "indexed" + /** + * Calls {@link SchemaField#createFields} on the specified value for the specified field name, and + * asserts that the results match the SchemaField propeties, with an additional check that the + * pointType is included if and only if the SchemaField is "indexed" */ - private List callAndCheckCreateFields(final String fieldName, final Class pointType, final Object value) throws Exception { + private List callAndCheckCreateFields( + final String fieldName, final Class pointType, final Object value) throws Exception { final SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); final List results = sf.createFields(value); final Set resultSet = new LinkedHashSet<>(results); - assertEquals("duplicates found in results? " + results.toString(), - results.size(), resultSet.size()); + assertEquals( + "duplicates found in results? " + results.toString(), results.size(), resultSet.size()); final Set> resultClasses = new HashSet<>(); for (IndexableField f : results) { resultClasses.add(f.getClass()); - - if (!sf.hasDocValues() ) { - assertFalse(f.toString(), - (f instanceof NumericDocValuesField) || - (f instanceof SortedNumericDocValuesField)); + + if (!sf.hasDocValues()) { + assertFalse( + f.toString(), + (f instanceof NumericDocValuesField) || (f instanceof SortedNumericDocValuesField)); } } - assertEquals(fieldName + " stored? Result Fields: " + Arrays.toString(results.toArray()), - sf.stored(), resultClasses.contains(StoredField.class)); - assertEquals(fieldName + " indexed? Result Fields: " + Arrays.toString(results.toArray()), - sf.indexed(), resultClasses.contains(pointType)); + assertEquals( + fieldName + " stored? Result Fields: " + Arrays.toString(results.toArray()), + sf.stored(), + resultClasses.contains(StoredField.class)); + assertEquals( + fieldName + " indexed? Result Fields: " + Arrays.toString(results.toArray()), + sf.indexed(), + resultClasses.contains(pointType)); if (sf.multiValued()) { - assertEquals(fieldName + " docvalues? Result Fields: " + Arrays.toString(results.toArray()), - sf.hasDocValues(), resultClasses.contains(SortedNumericDocValuesField.class)); + assertEquals( + fieldName + " docvalues? Result Fields: " + Arrays.toString(results.toArray()), + sf.hasDocValues(), + resultClasses.contains(SortedNumericDocValuesField.class)); } else { - assertEquals(fieldName + " docvalues? Result Fields: " + Arrays.toString(results.toArray()), - sf.hasDocValues(), resultClasses.contains(NumericDocValuesField.class)); + assertEquals( + fieldName + " docvalues? Result Fields: " + Arrays.toString(results.toArray()), + sf.hasDocValues(), + resultClasses.contains(NumericDocValuesField.class)); } return results; } - - } diff --git a/solr/core/src/test/org/apache/solr/schema/TestSchemaField.java b/solr/core/src/test/org/apache/solr/schema/TestSchemaField.java index a7cd08ed893..eb48ac05ca2 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestSchemaField.java +++ b/solr/core/src/test/org/apache/solr/schema/TestSchemaField.java @@ -25,7 +25,7 @@ public class TestSchemaField extends SolrTestCaseJ4 { @BeforeClass public static void create() throws Exception { - initCore("solrconfig_codec.xml","schema_postingsformat.xml"); + initCore("solrconfig_codec.xml", "schema_postingsformat.xml"); } @Before @@ -39,15 +39,26 @@ public void testFieldTypes() { assertFieldTypeFormats("str_standard_simple", "Lucene84", "SimpleTextDocValuesFormat"); } - private void assertFieldTypeFormats(String fieldTypeName, String expectedPostingsFormat, String expectedDocValuesFormat) { + private void assertFieldTypeFormats( + String fieldTypeName, String expectedPostingsFormat, String expectedDocValuesFormat) { FieldType ft = h.getCore().getLatestSchema().getFieldTypeByName(fieldTypeName); assertNotNull("Field type " + fieldTypeName + " not found - schema got changed?", ft); - assertEquals("Field type " + ft.getTypeName() + " wrong " + FieldProperties.POSTINGS_FORMAT + assertEquals( + "Field type " + + ft.getTypeName() + + " wrong " + + FieldProperties.POSTINGS_FORMAT + " - schema got changed?", - expectedPostingsFormat, ft.getNamedPropertyValues(true).get(FieldProperties.POSTINGS_FORMAT)); - assertEquals("Field type " + ft.getTypeName() + " wrong " + FieldProperties.DOC_VALUES_FORMAT + expectedPostingsFormat, + ft.getNamedPropertyValues(true).get(FieldProperties.POSTINGS_FORMAT)); + assertEquals( + "Field type " + + ft.getTypeName() + + " wrong " + + FieldProperties.DOC_VALUES_FORMAT + " - schema got changed?", - expectedDocValuesFormat, ft.getNamedPropertyValues(true).get(FieldProperties.DOC_VALUES_FORMAT)); + expectedDocValuesFormat, + ft.getNamedPropertyValues(true).get(FieldProperties.DOC_VALUES_FORMAT)); } public void testFields() { @@ -71,14 +82,25 @@ public void testDynamicFields() { assertFieldFormats("any_simple", "Direct", "SimpleTextDocValuesFormat"); } - private void assertFieldFormats(String fieldName, String expectedPostingsFormat, String expectedDocValuesFormat) { + private void assertFieldFormats( + String fieldName, String expectedPostingsFormat, String expectedDocValuesFormat) { SchemaField f = h.getCore().getLatestSchema().getField(fieldName); assertNotNull("Field " + fieldName + " not found - schema got changed?", f); - assertEquals("Field " + f.getName() + " wrong " + FieldProperties.POSTINGS_FORMAT + assertEquals( + "Field " + + f.getName() + + " wrong " + + FieldProperties.POSTINGS_FORMAT + " - schema got changed?", - expectedPostingsFormat, f.getPostingsFormat()); - assertEquals("Field " + f.getName() + " wrong " + FieldProperties.DOC_VALUES_FORMAT + expectedPostingsFormat, + f.getPostingsFormat()); + assertEquals( + "Field " + + f.getName() + + " wrong " + + FieldProperties.DOC_VALUES_FORMAT + " - schema got changed?", - expectedDocValuesFormat, f.getDocValuesFormat()); + expectedDocValuesFormat, + f.getDocValuesFormat()); } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestSchemaManager.java b/solr/core/src/test/org/apache/solr/schema/TestSchemaManager.java index d7a11c2cc16..d895460c2d5 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestSchemaManager.java +++ b/solr/core/src/test/org/apache/solr/schema/TestSchemaManager.java @@ -16,6 +16,10 @@ */ package org.apache.solr.schema; +import java.io.IOException; +import java.io.StringReader; +import java.util.Arrays; +import java.util.List; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.CommandOperation; import org.apache.solr.core.SolrConfig; @@ -25,54 +29,51 @@ import org.junit.Ignore; import org.junit.Test; -import java.io.IOException; -import java.io.StringReader; -import java.util.Arrays; -import java.util.List; - public class TestSchemaManager extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-tiny.xml"); + initCore("solrconfig.xml", "schema-tiny.xml"); } @Test public void testParsing() throws IOException { - String x = "{\n" + - " 'add-field' : {\n" + - " 'name':'a',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " },\n" + - " 'add-field' : {\n" + - " 'name':'b',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " }\n" + - "\n" + - "}"; + String x = + "{\n" + + " 'add-field' : {\n" + + " 'name':'a',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'b',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " }\n" + + "\n" + + "}"; List ops = CommandOperation.parse(new StringReader(json(x))); - assertEquals(2,ops.size()); - assertTrue( CommandOperation.captureErrors(ops).isEmpty()); + assertEquals(2, ops.size()); + assertTrue(CommandOperation.captureErrors(ops).isEmpty()); - x = " {'add-field' : [{\n" + - " 'name':'a1',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':false\n" + - " },\n" + - " {\n" + - " 'name':'a2',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'indexed':true\n" + - " }]\n" + - " }"; + x = + " {'add-field' : [{\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':false\n" + + " },\n" + + " {\n" + + " 'name':'a2',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'indexed':true\n" + + " }]\n" + + " }"; ops = CommandOperation.parse(new StringReader(json(x))); - assertEquals(2,ops.size()); + assertEquals(2, ops.size()); assertTrue(CommandOperation.captureErrors(ops).isEmpty()); } @@ -80,7 +81,8 @@ public void testParsing() throws IOException { @Ignore public void testSchemaLoadingPerf() { SolrConfig config = TestHarness.createConfig(testSolrHome, "collection1", "solrconfig.xml"); - List names = Arrays.asList("schema.xml", "schema11.xml", "schema12.xml", "schema15.xml"); + List names = + Arrays.asList("schema.xml", "schema11.xml", "schema12.xml", "schema15.xml"); RTimer timer = new RTimer(); for (String name : names) { IndexSchema schema = IndexSchemaFactory.buildIndexSchema(name, config); diff --git a/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java b/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java index a4a9b2bcec9..9e3484b1dd4 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java +++ b/solr/core/src/test/org/apache/solr/schema/TestSchemalessBufferedUpdates.java @@ -19,6 +19,14 @@ import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.concurrent.Future; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; @@ -26,8 +34,9 @@ import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.update.AddUpdateCommand; -import org.apache.solr.update.UpdateLog; import org.apache.solr.update.UpdateHandler; +import org.apache.solr.update.UpdateLog; +import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase; import org.apache.solr.update.processor.DistributedUpdateProcessorFactory; import org.apache.solr.update.processor.UpdateRequestProcessor; import org.apache.solr.update.processor.UpdateRequestProcessorChain; @@ -36,17 +45,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.concurrent.Future; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; - -import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase; - public class TestSchemalessBufferedUpdates extends SolrTestCaseJ4 { // means that we've seen the leader and have version info (i.e. we are a non-leader replica) @@ -62,10 +60,16 @@ public static void beforeClass() throws Exception { File tmpSolrHome = createTempDir().toFile(); File tmpConfDir = new File(tmpSolrHome, confDir); File testHomeConfDir = new File(TEST_HOME(), confDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-schemaless.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-add-schema-fields-update-processor.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); - initCore("solrconfig-schemaless.xml", "schema-add-schema-fields-update-processor.xml", tmpSolrHome.getPath()); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig-schemaless.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-add-schema-fields-update-processor.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); + initCore( + "solrconfig-schemaless.xml", + "schema-add-schema-fields-update-processor.xml", + tmpSolrHome.getPath()); } @Test @@ -73,13 +77,14 @@ public void test() throws Exception { TestInjection.skipIndexWriterCommitOnClose = true; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(TIMEOUT, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(TIMEOUT, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = logReplayFinish::release; SolrQueryRequest req = req(); @@ -90,19 +95,24 @@ public void test() throws Exception { assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // Invalid date will be normalized by ParseDateField URP - updateJ(jsonAdd(processAdd(sdoc("id","1", "f_dt","2017-01-04"))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(processAdd(sdoc("id", "1", "f_dt", "2017-01-04"))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertU(commit()); assertJQ(req("q", "*:*"), "/response/numFound==1"); ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); - // If the ParseDateField URP isn't ahead of the DUP, then the date won't be normalized in the buffered tlog entry, - // and the doc won't be indexed on the replaying replica - a warning is logged as follows: + // If the ParseDateField URP isn't ahead of the DUP, then the date won't be normalized in the + // buffered tlog entry, and the doc won't be indexed on the replaying replica - a warning is + // logged as follows: // WARN [...] o.a.s.u.UpdateLog REYPLAY_ERR: IOException reading log // org.apache.solr.common.SolrException: Invalid Date String:'2017-01-05' // at org.apache.solr.util.DateMathParser.parseMath(DateMathParser.java:234) - updateJ(jsonAdd(processAdd(sdoc("id","2", "f_dt","2017-01-05"))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(processAdd(sdoc("id", "2", "f_dt", "2017-01-05"))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); Future rinfoFuture = ulog.applyBufferedUpdates(); @@ -128,13 +138,13 @@ public void test() throws Exception { private SolrInputDocument processAdd(final SolrInputDocument docIn) throws IOException { UpdateRequestProcessorChain processorChain = h.getCore().getUpdateProcessingChain(UPDATE_CHAIN); assertNotNull("Undefined URP chain '" + UPDATE_CHAIN + "'", processorChain); - List factoriesUpToDUP = new ArrayList<>(); + List factoriesUpToDUP = new ArrayList<>(); for (UpdateRequestProcessorFactory urpFactory : processorChain.getProcessors()) { factoriesUpToDUP.add(urpFactory); - if (urpFactory.getClass().equals(DistributedUpdateProcessorFactory.class)) - break; + if (urpFactory.getClass().equals(DistributedUpdateProcessorFactory.class)) break; } - UpdateRequestProcessorChain chainUpToDUP = new UpdateRequestProcessorChain(factoriesUpToDUP, h.getCore()); + UpdateRequestProcessorChain chainUpToDUP = + new UpdateRequestProcessorChain(factoriesUpToDUP, h.getCore()); assertNotNull("URP chain '" + UPDATE_CHAIN + "'", chainUpToDUP); SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryRequest req = req(); @@ -145,10 +155,14 @@ private SolrInputDocument processAdd(final SolrInputDocument docIn) throws IOExc UpdateRequestProcessor processor = chainUpToDUP.createProcessor(req, rsp); processor.processAdd(cmd); if (cmd.solrDoc.get("f_dt").getValue() instanceof Date) { - // Non-JSON types (Date in this case) aren't handled properly in noggit-0.6. Although this is fixed in - // https://github.com/yonik/noggit/commit/ec3e732af7c9425e8f40297463cbe294154682b1 to call obj.toString(), - // Date::toString produces a Date representation that Solr doesn't like, so we convert using Instant::toString - cmd.solrDoc.get("f_dt").setValue(((Date) cmd.solrDoc.get("f_dt").getValue()).toInstant().toString()); + // Non-JSON types (Date in this case) aren't handled properly in noggit-0.6. Although this + // is fixed in + // https://github.com/yonik/noggit/commit/ec3e732af7c9425e8f40297463cbe294154682b1 to call + // obj.toString(), Date::toString produces a Date representation that Solr doesn't like, so + // we convert using Instant::toString + cmd.solrDoc + .get("f_dt") + .setValue(((Date) cmd.solrDoc.get("f_dt").getValue()).toInstant().toString()); } return cmd.solrDoc; } finally { diff --git a/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java b/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java index 571d6256ead..30988c167b1 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java +++ b/solr/core/src/test/org/apache/solr/schema/TestSortableTextField.java @@ -16,13 +16,13 @@ */ package org.apache.solr.schema; -import java.util.Arrays; +import static org.hamcrest.CoreMatchers.instanceOf; + import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; - import org.apache.commons.lang3.StringUtils; - import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; @@ -30,186 +30,202 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.util.TestUtil; - import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.util.RefCounted; - import org.junit.Before; import org.junit.BeforeClass; -import static org.hamcrest.CoreMatchers.instanceOf; public class TestSortableTextField extends SolrTestCaseJ4 { - protected static final String BIG_CONST - = StringUtils.repeat("x", SortableTextField.DEFAULT_MAX_CHARS_FOR_DOC_VALUES); - + protected static final String BIG_CONST = + StringUtils.repeat("x", SortableTextField.DEFAULT_MAX_CHARS_FOR_DOC_VALUES); + @BeforeClass public static void create() throws Exception { - initCore("solrconfig-minimal.xml","schema-sorting-text.xml"); - + initCore("solrconfig-minimal.xml", "schema-sorting-text.xml"); + // sanity check our fields & types... // these should all use docValues (either explicitly or implicitly)... - for (String n : Arrays.asList("keyword_stxt", - "whitespace_stxt", "whitespace_f_stxt", "whitespace_l_stxt")) { - + for (String n : + Arrays.asList( + "keyword_stxt", "whitespace_stxt", "whitespace_f_stxt", "whitespace_l_stxt")) { + FieldType ft = h.getCore().getLatestSchema().getFieldTypeByName(n); - assertEquals("type " + ft.getTypeName() + " should have docvalues - schema got changed?", - true, ft.getNamedPropertyValues(true).get("docValues")) ; + assertEquals( + "type " + ft.getTypeName() + " should have docvalues - schema got changed?", + true, + ft.getNamedPropertyValues(true).get("docValues")); } - for (String n : Arrays.asList("keyword_stxt", "keyword_dv_stxt", - "whitespace_stxt", "whitespace_nois_stxt", - "whitespace_f_stxt", "whitespace_l_stxt")) { - + for (String n : + Arrays.asList( + "keyword_stxt", "keyword_dv_stxt", + "whitespace_stxt", "whitespace_nois_stxt", + "whitespace_f_stxt", "whitespace_l_stxt")) { + SchemaField sf = h.getCore().getLatestSchema().getField(n); - assertTrue("field " + sf.getName() + " should have docvalues - schema got changed?", - sf.hasDocValues()) ; + assertTrue( + "field " + sf.getName() + " should have docvalues - schema got changed?", + sf.hasDocValues()); } { // this field should *NOT* have docValues .. should behave like a plain old TextField SchemaField sf = h.getCore().getLatestSchema().getField("whitespace_nodv_stxt"); - assertFalse("field " + sf.getName() + " should not have docvalues - schema got changed?", - sf.hasDocValues()) ; + assertFalse( + "field " + sf.getName() + " should not have docvalues - schema got changed?", + sf.hasDocValues()); } - } - + @Before public void cleanup() throws Exception { clearIndex(); } public void testSimple() throws Exception { - assertU(adoc("id","1", "whitespace_stxt", "how now brown cow ?", "whitespace_f_stxt", "aaa bbb")); - assertU(adoc("id","2", "whitespace_stxt", "how now brown dog ?", "whitespace_f_stxt", "bbb aaa")); - assertU(adoc("id","3", "whitespace_stxt", "how now brown cat ?", "whitespace_f_stxt", "xxx yyy")); - assertU(adoc("id","4", "whitespace_stxt", "dog and cat" /* no val for whitespace_f_stxt */)); - + assertU( + adoc("id", "1", "whitespace_stxt", "how now brown cow ?", "whitespace_f_stxt", "aaa bbb")); + assertU( + adoc("id", "2", "whitespace_stxt", "how now brown dog ?", "whitespace_f_stxt", "bbb aaa")); + assertU( + adoc("id", "3", "whitespace_stxt", "how now brown cat ?", "whitespace_f_stxt", "xxx yyy")); + assertU(adoc("id", "4", "whitespace_stxt", "dog and cat" /* no val for whitespace_f_stxt */)); + assertU(commit()); // search & sort // NOTE: even if the field is indexed=false, should still be able to sort on it - for (String sortf : Arrays.asList("whitespace_stxt", "whitespace_nois_stxt", "whitespace_plain_str")) { - assertQ(req("q", "whitespace_stxt:cat", "sort", sortf + " asc") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.=4]" - , "//result/doc[2]/str[@name='id'][.=3]" - ); - assertQ(req("q", "whitespace_stxt:cat", "sort", sortf + " desc") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.=3]" - , "//result/doc[2]/str[@name='id'][.=4]" - ); - assertQ(req("q", "whitespace_stxt:brown", "sort", sortf + " asc") - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.=3]" - , "//result/doc[2]/str[@name='id'][.=1]" - , "//result/doc[3]/str[@name='id'][.=2]" - ); - assertQ(req("q", "whitespace_stxt:brown", "sort", sortf + " desc") - , "//*[@numFound='3']" - , "//result/doc[1]/str[@name='id'][.=2]" - , "//result/doc[2]/str[@name='id'][.=1]" - , "//result/doc[3]/str[@name='id'][.=3]" - ); - + for (String sortf : + Arrays.asList("whitespace_stxt", "whitespace_nois_stxt", "whitespace_plain_str")) { + assertQ( + req("q", "whitespace_stxt:cat", "sort", sortf + " asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=3]"); + assertQ( + req("q", "whitespace_stxt:cat", "sort", sortf + " desc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=3]", + "//result/doc[2]/str[@name='id'][.=4]"); + assertQ( + req("q", "whitespace_stxt:brown", "sort", sortf + " asc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.=3]", + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=2]"); + assertQ( + req("q", "whitespace_stxt:brown", "sort", sortf + " desc"), + "//*[@numFound='3']", + "//result/doc[1]/str[@name='id'][.=2]", + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=3]"); + // we should still be able to search if docValues="false" (but sort on a diff field) - assertQ(req("q","whitespace_nodv_stxt:cat", "sort", sortf + " asc") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.=4]" - , "//result/doc[2]/str[@name='id'][.=3]" - ); + assertQ( + req("q", "whitespace_nodv_stxt:cat", "sort", sortf + " asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=3]"); } - + // attempting to sort on docValues="false" field should give an error... - assertQEx("attempting to sort on docValues=false field should give an error", - "when docValues=\"false\"", - req("q","*:*", "sort", "whitespace_nodv_stxt asc"), - ErrorCode.BAD_REQUEST); + assertQEx( + "attempting to sort on docValues=false field should give an error", + "when docValues=\"false\"", + req("q", "*:*", "sort", "whitespace_nodv_stxt asc"), + ErrorCode.BAD_REQUEST); // sortMissing - whitespace_f_stxt copyField to whitespace_l_stxt - assertQ(req("q","*:*", "sort", "whitespace_f_stxt asc") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=4]" - , "//result/doc[2]/str[@name='id'][.=1]" - , "//result/doc[3]/str[@name='id'][.=2]" - , "//result/doc[4]/str[@name='id'][.=3]" - ); - assertQ(req("q","*:*", "sort", "whitespace_f_stxt desc") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=4]" - , "//result/doc[2]/str[@name='id'][.=3]" - , "//result/doc[3]/str[@name='id'][.=2]" - , "//result/doc[4]/str[@name='id'][.=1]" - ); - assertQ(req("q","*:*", "sort", "whitespace_l_stxt asc") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=1]" - , "//result/doc[2]/str[@name='id'][.=2]" - , "//result/doc[3]/str[@name='id'][.=3]" - , "//result/doc[4]/str[@name='id'][.=4]" - ); - assertQ(req("q","*:*", "sort", "whitespace_l_stxt desc") - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=3]" - , "//result/doc[2]/str[@name='id'][.=2]" - , "//result/doc[3]/str[@name='id'][.=1]" - , "//result/doc[4]/str[@name='id'][.=4]" - ); + assertQ( + req("q", "*:*", "sort", "whitespace_f_stxt asc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=2]", + "//result/doc[4]/str[@name='id'][.=3]"); + assertQ( + req("q", "*:*", "sort", "whitespace_f_stxt desc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=3]", + "//result/doc[3]/str[@name='id'][.=2]", + "//result/doc[4]/str[@name='id'][.=1]"); + assertQ( + req("q", "*:*", "sort", "whitespace_l_stxt asc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=2]", + "//result/doc[3]/str[@name='id'][.=3]", + "//result/doc[4]/str[@name='id'][.=4]"); + assertQ( + req("q", "*:*", "sort", "whitespace_l_stxt desc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=3]", + "//result/doc[2]/str[@name='id'][.=2]", + "//result/doc[3]/str[@name='id'][.=1]", + "//result/doc[4]/str[@name='id'][.=4]"); } public void testSimpleSearchAndFacets() throws Exception { - assertU(adoc("id","1", "whitespace_stxt", "how now brown cow ?")); - assertU(adoc("id","2", "whitespace_stxt", "how now brown cow ?")); - assertU(adoc("id","3", "whitespace_stxt", "holy cow !")); - assertU(adoc("id","4", "whitespace_stxt", "dog and cat")); - + assertU(adoc("id", "1", "whitespace_stxt", "how now brown cow ?")); + assertU(adoc("id", "2", "whitespace_stxt", "how now brown cow ?")); + assertU(adoc("id", "3", "whitespace_stxt", "holy cow !")); + assertU(adoc("id", "4", "whitespace_stxt", "dog and cat")); + assertU(commit()); // NOTE: even if the field is indexed=false, should still be able to facet on it - for (String facet : Arrays.asList("whitespace_stxt", "whitespace_nois_stxt", - "whitespace_m_stxt", "whitespace_plain_str")) { - for (String search : Arrays.asList("whitespace_stxt", "whitespace_nodv_stxt", - "whitespace_m_stxt", "whitespace_plain_txt")) { + for (String facet : + Arrays.asList( + "whitespace_stxt", "whitespace_nois_stxt", + "whitespace_m_stxt", "whitespace_plain_str")) { + for (String search : + Arrays.asList( + "whitespace_stxt", "whitespace_nodv_stxt", + "whitespace_m_stxt", "whitespace_plain_txt")) { // facet.field - final String fpre = "//lst[@name='facet_fields']/lst[@name='"+facet+"']/"; - assertQ(req("q", search + ":cow", "rows", "0", - "facet.field", facet, "facet", "true") - , "//*[@numFound='3']" - , fpre + "int[@name='how now brown cow ?'][.=2]" - , fpre + "int[@name='holy cow !'][.=1]" - , fpre + "int[@name='dog and cat'][.=0]" - ); - + final String fpre = "//lst[@name='facet_fields']/lst[@name='" + facet + "']/"; + assertQ( + req("q", search + ":cow", "rows", "0", "facet.field", facet, "facet", "true"), + "//*[@numFound='3']", + fpre + "int[@name='how now brown cow ?'][.=2]", + fpre + "int[@name='holy cow !'][.=1]", + fpre + "int[@name='dog and cat'][.=0]"); + // json facet final String jpre = "//lst[@name='facets']/lst[@name='x']/arr[@name='buckets']/"; - assertQ(req("q", search + ":cow", "rows", "0", - "json.facet", "{x:{ type: terms, field:'" + facet + "', mincount:0 }}") - , "//*[@numFound='3']" - , jpre + "lst[str[@name='val'][.='how now brown cow ?']][long[@name='count'][.=2]]" - , jpre + "lst[str[@name='val'][.='holy cow !']][long[@name='count'][.=1]]" - , jpre + "lst[str[@name='val'][.='dog and cat']][long[@name='count'][.=0]]" - ); - + assertQ( + req( + "q", + search + ":cow", + "rows", + "0", + "json.facet", + "{x:{ type: terms, field:'" + facet + "', mincount:0 }}"), + "//*[@numFound='3']", + jpre + "lst[str[@name='val'][.='how now brown cow ?']][long[@name='count'][.=2]]", + jpre + "lst[str[@name='val'][.='holy cow !']][long[@name='count'][.=1]]", + jpre + "lst[str[@name='val'][.='dog and cat']][long[@name='count'][.=0]]"); } } } - public void testWhiteboxIndexReader() throws Exception { - assertU(adoc("id","1", - "whitespace_stxt", "how now brown cow ?", - "whitespace_m_stxt", "xxx", - "whitespace_m_stxt", "yyy", - "whitespace_f_stxt", "aaa bbb", - "keyword_stxt", "Blarggghhh!")); + assertU( + adoc( + "id", "1", + "whitespace_stxt", "how now brown cow ?", + "whitespace_m_stxt", "xxx", + "whitespace_m_stxt", "yyy", + "whitespace_f_stxt", "aaa bbb", + "keyword_stxt", "Blarggghhh!")); assertU(commit()); final RefCounted searcher = h.getCore().getNewestSearcher(false); @@ -217,54 +233,68 @@ public void testWhiteboxIndexReader() throws Exception { final LeafReader r = searcher.get().getSlowAtomicReader(); // common cases... - for (String field : Arrays.asList("keyword_stxt", "keyword_dv_stxt", - "whitespace_stxt", "whitespace_f_stxt", "whitespace_l_stxt")) { + for (String field : + Arrays.asList( + "keyword_stxt", + "keyword_dv_stxt", + "whitespace_stxt", + "whitespace_f_stxt", + "whitespace_l_stxt")) { assertNotNull("FieldInfos: " + field, r.getFieldInfos().fieldInfo(field)); - assertEquals("DocValuesType: " + field, - DocValuesType.SORTED, r.getFieldInfos().fieldInfo(field).getDocValuesType()); + assertEquals( + "DocValuesType: " + field, + DocValuesType.SORTED, + r.getFieldInfos().fieldInfo(field).getDocValuesType()); assertNotNull("DocValues: " + field, r.getSortedDocValues(field)); assertNotNull("Terms: " + field, r.terms(field)); - } - + // special cases... assertNotNull(r.getFieldInfos().fieldInfo("whitespace_nodv_stxt")); - assertEquals(DocValuesType.NONE, - r.getFieldInfos().fieldInfo("whitespace_nodv_stxt").getDocValuesType()); + assertEquals( + DocValuesType.NONE, + r.getFieldInfos().fieldInfo("whitespace_nodv_stxt").getDocValuesType()); assertNull(r.getSortedDocValues("whitespace_nodv_stxt")); assertNotNull(r.terms("whitespace_nodv_stxt")); - // + // assertNotNull(r.getFieldInfos().fieldInfo("whitespace_nois_stxt")); - assertEquals(DocValuesType.SORTED, - r.getFieldInfos().fieldInfo("whitespace_nois_stxt").getDocValuesType()); + assertEquals( + DocValuesType.SORTED, + r.getFieldInfos().fieldInfo("whitespace_nois_stxt").getDocValuesType()); assertNotNull(r.getSortedDocValues("whitespace_nois_stxt")); assertNull(r.terms("whitespace_nois_stxt")); // assertNotNull(r.getFieldInfos().fieldInfo("whitespace_m_stxt")); - assertEquals(DocValuesType.SORTED_SET, - r.getFieldInfos().fieldInfo("whitespace_m_stxt").getDocValuesType()); + assertEquals( + DocValuesType.SORTED_SET, + r.getFieldInfos().fieldInfo("whitespace_m_stxt").getDocValuesType()); assertNotNull(r.getSortedSetDocValues("whitespace_m_stxt")); assertNotNull(r.terms("whitespace_m_stxt")); - + } finally { if (null != searcher) { searcher.decref(); } } } - + public void testWhiteboxCreateFields() throws Exception { List values = null; // common case... - for (String field : Arrays.asList("keyword_stxt", "keyword_dv_stxt", - "whitespace_stxt", "whitespace_f_stxt", "whitespace_l_stxt")) { + for (String field : + Arrays.asList( + "keyword_stxt", + "keyword_dv_stxt", + "whitespace_stxt", + "whitespace_f_stxt", + "whitespace_l_stxt")) { values = createIndexableFields(field); assertEquals(field, 2, values.size()); assertThat(field, values.get(0), instanceOf(Field.class)); assertThat(field, values.get(1), instanceOf(SortedDocValuesField.class)); } - + // special cases... values = createIndexableFields("whitespace_nois_stxt"); assertEquals(1, values.size()); @@ -277,286 +307,342 @@ public void testWhiteboxCreateFields() throws Exception { values = createIndexableFields("whitespace_m_stxt"); assertEquals(2, values.size()); assertThat(values.get(0), instanceOf(Field.class)); - assertThat(values.get(1), instanceOf(SortedSetDocValuesField.class)); + assertThat(values.get(1), instanceOf(SortedSetDocValuesField.class)); } + private List createIndexableFields(String fieldName) { SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); return sf.getType().createFields(sf, "dummy value"); } public void testMaxCharsSort() throws Exception { - assertU(adoc("id","1", "whitespace_stxt", "aaa bbb ccc ddd")); - assertU(adoc("id","2", "whitespace_stxt", "aaa bbb xxx yyy")); - assertU(adoc("id","3", "whitespace_stxt", "aaa bbb ccc xxx")); - assertU(adoc("id","4", "whitespace_stxt", "aaa")); + assertU(adoc("id", "1", "whitespace_stxt", "aaa bbb ccc ddd")); + assertU(adoc("id", "2", "whitespace_stxt", "aaa bbb xxx yyy")); + assertU(adoc("id", "3", "whitespace_stxt", "aaa bbb ccc xxx")); + assertU(adoc("id", "4", "whitespace_stxt", "aaa")); assertU(commit()); // all terms should be searchable in all fields, even if the docvalues are limited - for (String searchF : Arrays.asList("whitespace_stxt", "whitespace_plain_txt", - "whitespace_max3_stxt", "whitespace_max6_stxt", - "whitespace_max0_stxt", "whitespace_maxNeg_stxt")) { + for (String searchF : + Arrays.asList( + "whitespace_stxt", "whitespace_plain_txt", + "whitespace_max3_stxt", "whitespace_max6_stxt", + "whitespace_max0_stxt", "whitespace_maxNeg_stxt")) { // maxChars of 0 or neg should be equivalent to no max at all - for (String sortF : Arrays.asList("whitespace_stxt", "whitespace_plain_str", - "whitespace_max0_stxt", "whitespace_maxNeg_stxt")) { - - assertQ(req("q", searchF + ":ccc", "sort", sortF + " desc, id asc") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.=3]" - , "//result/doc[2]/str[@name='id'][.=1]" - ); - - assertQ(req("q", searchF + ":ccc", "sort", sortF + " asc, id desc") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.=1]" - , "//result/doc[2]/str[@name='id'][.=3]" - ); + for (String sortF : + Arrays.asList( + "whitespace_stxt", "whitespace_plain_str", + "whitespace_max0_stxt", "whitespace_maxNeg_stxt")) { + + assertQ( + req("q", searchF + ":ccc", "sort", sortF + " desc, id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=3]", + "//result/doc[2]/str[@name='id'][.=1]"); + + assertQ( + req("q", searchF + ":ccc", "sort", sortF + " asc, id desc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=3]"); } } - + // sorting on a maxChars limited fields should force tie breaker for (String dir : Arrays.asList("asc", "desc")) { // for max3, dir shouldn't matter - should always tie.. - assertQ(req("q", "*:*", "sort", "whitespace_max3_stxt "+dir+", id desc") // max3, id desc - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=4]" - , "//result/doc[2]/str[@name='id'][.=3]" - , "//result/doc[3]/str[@name='id'][.=2]" - , "//result/doc[4]/str[@name='id'][.=1]" - ); - assertQ(req("q", "*:*", "sort", "whitespace_max3_stxt "+dir+", id asc") // max3, id desc - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=1]" - , "//result/doc[2]/str[@name='id'][.=2]" - , "//result/doc[3]/str[@name='id'][.=3]" - , "//result/doc[4]/str[@name='id'][.=4]" - ); + assertQ( + req("q", "*:*", "sort", "whitespace_max3_stxt " + dir + ", id desc") // max3, id desc + , + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=4]", + "//result/doc[2]/str[@name='id'][.=3]", + "//result/doc[3]/str[@name='id'][.=2]", + "//result/doc[4]/str[@name='id'][.=1]"); + assertQ( + req("q", "*:*", "sort", "whitespace_max3_stxt " + dir + ", id asc") // max3, id desc + , + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=2]", + "//result/doc[3]/str[@name='id'][.=3]", + "//result/doc[4]/str[@name='id'][.=4]"); } - assertQ(req("q", "*:*", "sort", "whitespace_max6_stxt asc, id desc") // max6 asc, id desc - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=4]" // no tiebreaker needed - , "//result/doc[2]/str[@name='id'][.=3]" - , "//result/doc[3]/str[@name='id'][.=2]" - , "//result/doc[4]/str[@name='id'][.=1]" - ); - assertQ(req("q", "*:*", "sort", "whitespace_max6_stxt asc, id asc") // max6 asc, id desc - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=4]" // no tiebreaker needed - , "//result/doc[2]/str[@name='id'][.=1]" - , "//result/doc[3]/str[@name='id'][.=2]" - , "//result/doc[4]/str[@name='id'][.=3]" - ); - assertQ(req("q", "*:*", "sort", "whitespace_max6_stxt desc, id desc") // max6 desc, id desc - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=3]" - , "//result/doc[2]/str[@name='id'][.=2]" - , "//result/doc[3]/str[@name='id'][.=1]" - , "//result/doc[4]/str[@name='id'][.=4]" // no tiebreaker needed - ); - assertQ(req("q", "*:*", "sort", "whitespace_max6_stxt desc, id asc") // max6 desc, id desc - , "//*[@numFound='4']" - , "//result/doc[1]/str[@name='id'][.=1]" - , "//result/doc[2]/str[@name='id'][.=2]" - , "//result/doc[3]/str[@name='id'][.=3]" - , "//result/doc[4]/str[@name='id'][.=4]" // no tiebreaker needed - ); - + assertQ( + req("q", "*:*", "sort", "whitespace_max6_stxt asc, id desc") // max6 asc, id desc + , + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=4]" // no tiebreaker needed + , + "//result/doc[2]/str[@name='id'][.=3]", + "//result/doc[3]/str[@name='id'][.=2]", + "//result/doc[4]/str[@name='id'][.=1]"); + assertQ( + req("q", "*:*", "sort", "whitespace_max6_stxt asc, id asc") // max6 asc, id desc + , + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=4]" // no tiebreaker needed + , + "//result/doc[2]/str[@name='id'][.=1]", + "//result/doc[3]/str[@name='id'][.=2]", + "//result/doc[4]/str[@name='id'][.=3]"); + assertQ( + req("q", "*:*", "sort", "whitespace_max6_stxt desc, id desc") // max6 desc, id desc + , + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=3]", + "//result/doc[2]/str[@name='id'][.=2]", + "//result/doc[3]/str[@name='id'][.=1]", + "//result/doc[4]/str[@name='id'][.=4]" // no tiebreaker needed + ); + assertQ( + req("q", "*:*", "sort", "whitespace_max6_stxt desc, id asc") // max6 desc, id desc + , + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.=1]", + "//result/doc[2]/str[@name='id'][.=2]", + "//result/doc[3]/str[@name='id'][.=3]", + "//result/doc[4]/str[@name='id'][.=4]" // no tiebreaker needed + ); + // sanity check that the default max is working.... - assertU(adoc("id","5", "whitespace_stxt", BIG_CONST + " aaa zzz")); - assertU(adoc("id","6", "whitespace_stxt", BIG_CONST + " bbb zzz ")); + assertU(adoc("id", "5", "whitespace_stxt", BIG_CONST + " aaa zzz")); + assertU(adoc("id", "6", "whitespace_stxt", BIG_CONST + " bbb zzz ")); assertU(commit()); - // for these fields, the tie breaker should be the only thing that matters, regardless of direction... + // for these fields, the tie breaker should be the only thing that matters, regardless of + // direction... for (String sortF : Arrays.asList("whitespace_stxt", "whitespace_nois_stxt")) { for (String dir : Arrays.asList("asc", "desc")) { - assertQ(req("q", "whitespace_stxt:zzz", "sort", sortF + " " + dir + ", id asc") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.=5]" - , "//result/doc[2]/str[@name='id'][.=6]" - ); - assertQ(req("q", "whitespace_stxt:zzz", "sort", sortF + " " + dir + ", id desc") - , "//*[@numFound='2']" - , "//result/doc[1]/str[@name='id'][.=6]" - , "//result/doc[2]/str[@name='id'][.=5]" - ); + assertQ( + req("q", "whitespace_stxt:zzz", "sort", sortF + " " + dir + ", id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=5]", + "//result/doc[2]/str[@name='id'][.=6]"); + assertQ( + req("q", "whitespace_stxt:zzz", "sort", sortF + " " + dir + ", id desc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.=6]", + "//result/doc[2]/str[@name='id'][.=5]"); } } } - /** - * test how various permutations of useDocValuesAsStored and maxCharsForDocValues interact - */ + /** test how various permutations of useDocValuesAsStored and maxCharsForDocValues interact */ public void testUseDocValuesAsStored() throws Exception { ignoreException("when useDocValuesAsStored=true \\(length="); - + // first things first... // unlike most field types, SortableTextField should default to useDocValuesAsStored==false // (check a handful that should have the default behavior) for (String n : Arrays.asList("keyword_stxt", "whitespace_max0_stxt", "whitespace_max6_stxt")) { { FieldType ft = h.getCore().getLatestSchema().getFieldTypeByName(n); - assertEquals("type " + ft.getTypeName() + " should not default to useDocValuesAsStored", - false, ft.useDocValuesAsStored()) ; + assertEquals( + "type " + ft.getTypeName() + " should not default to useDocValuesAsStored", + false, + ft.useDocValuesAsStored()); } { SchemaField sf = h.getCore().getLatestSchema().getField(n); - assertEquals("field " + sf.getName() + " should not default to useDocValuesAsStored", - false, sf.useDocValuesAsStored()) ; + assertEquals( + "field " + sf.getName() + " should not default to useDocValuesAsStored", + false, + sf.useDocValuesAsStored()); } } - + // but it should be possible to set useDocValuesAsStored=true explicitly on types... int num_types_found = 0; - for (Map.Entry entry : h.getCore().getLatestSchema().getFieldTypes().entrySet()) { + for (Map.Entry entry : + h.getCore().getLatestSchema().getFieldTypes().entrySet()) { if (entry.getKey().endsWith("_has_usedvs")) { num_types_found++; FieldType ft = entry.getValue(); - assertEquals("type " + ft.getTypeName() + " has unexpected useDocValuesAsStored value", - true, ft.useDocValuesAsStored()) ; + assertEquals( + "type " + ft.getTypeName() + " has unexpected useDocValuesAsStored value", + true, + ft.useDocValuesAsStored()); } } - assertEquals("sanity check: wrong number of *_has_usedvs types found -- schema changed?", - 2, num_types_found); + assertEquals( + "sanity check: wrong number of *_has_usedvs types found -- schema changed?", + 2, + num_types_found); - // ...and it should be possible to set/override useDocValuesAsStored=true on fields... int num_fields_found = 0; List xpaths = new ArrayList<>(42); - for (Map.Entry entry : h.getCore().getLatestSchema().getFields().entrySet()) { + for (Map.Entry entry : + h.getCore().getLatestSchema().getFields().entrySet()) { if (entry.getKey().endsWith("_usedvs")) { num_fields_found++; final SchemaField sf = entry.getValue(); final String name = sf.getName(); - + // some sanity check before we move on with the rest of our testing... assertFalse("schema change? field should not be stored=true: " + name, sf.stored()); final boolean usedvs = name.endsWith("_has_usedvs"); - assertTrue("schema change broke assumptions: field must be '*_has_usedvs' or '*_negates_usedvs': " + - name, usedvs ^ name.endsWith("_negates_usedvs")); + assertTrue( + "schema change broke assumptions: field must be '*_has_usedvs' or '*_negates_usedvs': " + + name, + usedvs ^ name.endsWith("_negates_usedvs")); final boolean max6 = name.startsWith("max6_"); - assertTrue("schema change broke assumptions: field must be 'max6_*' or 'max0_*': " + - name, max6 ^ name.startsWith("max0_")); - - assertEquals("Unexpected useDocValuesAsStored value for field: " + name, - usedvs, sf.useDocValuesAsStored()) ; - - final String docid = ""+num_fields_found; + assertTrue( + "schema change broke assumptions: field must be 'max6_*' or 'max0_*': " + name, + max6 ^ name.startsWith("max0_")); + + assertEquals( + "Unexpected useDocValuesAsStored value for field: " + name, + usedvs, + sf.useDocValuesAsStored()); + + final String docid = "" + num_fields_found; if (usedvs && max6) { // if useDocValuesAsStored==true and maxCharsForDocValues=N then longer values should fail - + final String doc = adoc("id", docid, name, "apple pear orange"); - SolrException ex = expectThrows(SolrException.class, () -> { assertU(doc); }); - for (String expect : Arrays.asList("field " + name, - "length=17", - "useDocValuesAsStored=true", - "maxCharsForDocValues=6")) { - assertTrue("exception must mention " + expect + ": " + ex.getMessage(), - ex.getMessage().contains(expect)); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + assertU(doc); + }); + for (String expect : + Arrays.asList( + "field " + name, + "length=17", + "useDocValuesAsStored=true", + "maxCharsForDocValues=6")) { + assertTrue( + "exception must mention " + expect + ": " + ex.getMessage(), + ex.getMessage().contains(expect)); } } else { // otherwise (useDocValuesAsStored==false *OR* maxCharsForDocValues=0) any value // should be fine when adding a doc and we should be able to search for it later... final String val = docid + " apple pear orange " + BIG_CONST; assertU(adoc("id", docid, name, val)); - String doc_xpath = "//result/doc[str[@name='id'][.='"+docid+"']]"; - + String doc_xpath = "//result/doc[str[@name='id'][.='" + docid + "']]"; + if (usedvs) { // ...and if it *does* usedvs, then we should defnitely see our value when searching... - doc_xpath = doc_xpath + "[str[@name='"+name+"'][.='"+val+"']]"; + doc_xpath = doc_xpath + "[str[@name='" + name + "'][.='" + val + "']]"; } else { // ...but if not, then we should definitely not see any value for our field... - doc_xpath = doc_xpath + "[not(str[@name='"+name+"'])]"; + doc_xpath = doc_xpath + "[not(str[@name='" + name + "'])]"; } xpaths.add(doc_xpath); } } } - assertEquals("sanity check: wrong number of *_usedvs fields found -- schema changed?", - 6, num_fields_found); - + assertEquals( + "sanity check: wrong number of *_usedvs fields found -- schema changed?", + 6, + num_fields_found); + // check all our expected docs can be found (with the expected values) assertU(commit()); - xpaths.add("//*[@numFound='"+xpaths.size()+"']"); + xpaths.add("//*[@numFound='" + xpaths.size() + "']"); assertQ(req("q", "*:*", "fl", "*"), xpaths.toArray(new String[xpaths.size()])); } - - /** - * tests that a SortableTextField using KeywordTokenzier (w/docValues) behaves exactly the same as + * tests that a SortableTextField using KeywordTokenzier (w/docValues) behaves exactly the same as * StrFields that it's copied to for quering and sorting */ public void testRandomStrEquivalentBehavior() throws Exception { - final List test_fields = Arrays.asList("keyword_stxt", "keyword_dv_stxt", - "keyword_s_dv", "keyword_s"); - // we use embedded client instead of assertQ: we want to compare the responses from multiple requests - @SuppressWarnings("resource") final SolrClient client = new EmbeddedSolrServer(h.getCore()); - + final List test_fields = + Arrays.asList( + "keyword_stxt", "keyword_dv_stxt", + "keyword_s_dv", "keyword_s"); + // we use embedded client instead of assertQ: we want to compare the responses from multiple + // requests + @SuppressWarnings("resource") + final SolrClient client = new EmbeddedSolrServer(h.getCore()); + final int numDocs = atLeast(100); final int magicIdx = TestUtil.nextInt(random(), 1, numDocs); String magic = null; for (int i = 1; i <= numDocs; i++) { - // ideally we'd test all "realistic" unicode string, but EmbeddedSolrServer uses XML request writer - // and has no option to change this so ctrl-characters break the request + // ideally we'd test all "realistic" unicode string, but EmbeddedSolrServer uses XML request + // writer and has no option to change this so ctrl-characters break the request final String val = TestUtil.randomSimpleString(random(), 100); if (i == magicIdx) { magic = val; } - assertEquals(0, client.add(sdoc("id", ""+i, "keyword_stxt", val)).getStatus()); - + assertEquals(0, client.add(sdoc("id", "" + i, "keyword_stxt", val)).getStatus()); } assertNotNull(magic); - + assertEquals(0, client.commit().getStatus()); // query for magic term should match same doc regardless of field (reminder: keyword tokenizer) // (we need the filter in the unlikely event that magic value with randomly picked twice) for (String f : test_fields) { - - final SolrDocumentList results = client.query(params("q", "{!field f="+f+" v=$v}", - "v", magic, - "fq", "id:" + magicIdx )).getResults(); - assertEquals(f + ": Query ("+magic+") filtered by id: " + magicIdx + " ==> " + results, - 1L, results.getNumFound()); + + final SolrDocumentList results = + client + .query( + params( + "q", "{!field f=" + f + " v=$v}", + "v", magic, + "fq", "id:" + magicIdx)) + .getResults(); + assertEquals( + f + ": Query (" + magic + ") filtered by id: " + magicIdx + " ==> " + results, + 1L, + results.getNumFound()); final SolrDocument doc = results.get(0); - assertEquals(f + ": Query ("+magic+") filtered by id: " + magicIdx + " ==> " + doc, - ""+magicIdx, doc.getFieldValue("id")); - assertEquals(f + ": Query ("+magic+") filtered by id: " + magicIdx + " ==> " + doc, - magic, doc.getFieldValue(f)); + assertEquals( + f + ": Query (" + magic + ") filtered by id: " + magicIdx + " ==> " + doc, + "" + magicIdx, + doc.getFieldValue("id")); + assertEquals( + f + ": Query (" + magic + ") filtered by id: " + magicIdx + " ==> " + doc, + magic, + doc.getFieldValue(f)); } // do some random id range queries using all 3 fields for sorting. results should be identical final int numQ = atLeast(10); for (int i = 0; i < numQ; i++) { - final int hi = TestUtil.nextInt(random(), 1, numDocs-1); + final int hi = TestUtil.nextInt(random(), 1, numDocs - 1); final int lo = TestUtil.nextInt(random(), 1, hi); final boolean fwd = random().nextBoolean(); - + SolrDocumentList previous = null; String prevField = null; for (String f : test_fields) { - final SolrDocumentList results = client.query(params("q","id_i:["+lo+" TO "+hi+"]", - "sort", f + (fwd ? " asc" : " desc") + - // secondary on id for determinism - ", id asc") - ).getResults(); + final SolrDocumentList results = + client + .query( + params( + "q", "id_i:[" + lo + " TO " + hi + "]", + "sort", + f + + (fwd ? " asc" : " desc") + + + // secondary on id for determinism + ", id asc")) + .getResults(); assertEquals(results.toString(), (1L + hi - lo), results.getNumFound()); if (null != previous) { - assertEquals(prevField + " vs " + f, - previous.getNumFound(), results.getNumFound()); + assertEquals(prevField + " vs " + f, previous.getNumFound(), results.getNumFound()); for (int d = 0; d < results.size(); d++) { - assertEquals(prevField + " vs " + f + ": " + d, - previous.get(d).getFieldValue("id"), - results.get(d).getFieldValue("id")); - assertEquals(prevField + " vs " + f + ": " + d, - previous.get(d).getFieldValue(prevField), - results.get(d).getFieldValue(f)); - + assertEquals( + prevField + " vs " + f + ": " + d, + previous.get(d).getFieldValue("id"), + results.get(d).getFieldValue("id")); + assertEquals( + prevField + " vs " + f + ": " + d, + previous.get(d).getFieldValue(prevField), + results.get(d).getFieldValue(f)); } } previous = results; prevField = f; } } - } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestTextField.java b/solr/core/src/test/org/apache/solr/schema/TestTextField.java index 9409908180a..beab46a027a 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestTextField.java +++ b/solr/core/src/test/org/apache/solr/schema/TestTextField.java @@ -25,26 +25,32 @@ import org.apache.solr.common.SolrException; import org.junit.Test; -/** - * Tests directly {@link org.apache.solr.schema.TextField} methods. - */ +/** Tests directly {@link org.apache.solr.schema.TextField} methods. */ public class TestTextField extends SolrTestCaseJ4 { @Test public void testAnalyzeMultiTerm() { // No terms provided by the StopFilter (stop word) for the multi-term part. - // This is supported. Check TextField.analyzeMultiTerm returns null (and does not throw an exception). - BytesRef termBytes = TextField.analyzeMultiTerm("field", "the", new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET)); + // This is supported. Check TextField.analyzeMultiTerm returns null (and does not throw an + // exception). + BytesRef termBytes = + TextField.analyzeMultiTerm( + "field", "the", new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET)); assertNull(termBytes); // One term provided by the WhitespaceTokenizer for the multi-term part. - // This is the regular case. Check TextField.analyzeMultiTerm returns it (and does not throw an exception). + // This is the regular case. Check TextField.analyzeMultiTerm returns it (and does not throw an + // exception). termBytes = TextField.analyzeMultiTerm("field", "Sol", new WhitespaceAnalyzer()); assertEquals("Sol", termBytes.utf8ToString()); // Two terms provided by the WhitespaceTokenizer for the multi-term part. // This is not allowed. Expect an exception. - SolrException exception = expectThrows(SolrException.class, () -> TextField.analyzeMultiTerm("field", "term1 term2", new WhitespaceAnalyzer())); - assertEquals("Unexpected error code", SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); + SolrException exception = + expectThrows( + SolrException.class, + () -> TextField.analyzeMultiTerm("field", "term1 term2", new WhitespaceAnalyzer())); + assertEquals( + "Unexpected error code", SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java index 6fb8deccb44..5bd49aafe8b 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java +++ b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored.java @@ -16,11 +16,6 @@ */ package org.apache.solr.schema; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathFactory; import java.io.File; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -32,7 +27,11 @@ import java.util.HashSet; import java.util.Set; import java.util.regex.Pattern; - +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathFactory; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.TestUtil; @@ -45,16 +44,14 @@ import org.w3c.dom.NodeList; import org.xml.sax.InputSource; -/** - * Tests the useDocValuesAsStored functionality. - */ +/** Tests the useDocValuesAsStored functionality. */ public class TestUseDocValuesAsStored extends AbstractBadConfigTestBase { private int id = 1; private static File tmpSolrHome; private static File tmpConfDir; - + private static final String collection = "collection1"; private static final String confDir = collection + "/conf"; @@ -64,24 +61,34 @@ public class TestUseDocValuesAsStored extends AbstractBadConfigTestBase { // http://www.w3.org/TR/2006/REC-xml-20060816/#charsets private static final String NON_XML_CHARS = "\u0000-\u0008\u000B-\u000C\u000E-\u001F\uFFFE\uFFFF"; - // Avoid single quotes (problematic in XPath literals) and carriage returns (XML roundtripping fails) + // Avoid single quotes (problematic in XPath literals) and carriage returns (XML roundtripping + // fails) private static final Pattern BAD_CHAR_PATTERN = Pattern.compile("[\'\r" + NON_XML_CHARS + "]"); private static final Pattern STORED_FIELD_NAME_PATTERN = Pattern.compile("_dv$"); static { - START_RANDOM_EPOCH_MILLIS = LocalDateTime.of(-11000, Month.JANUARY, 1, 0, 0)// BC - .toInstant(ZoneOffset.UTC).toEpochMilli(); - END_RANDOM_EPOCH_MILLIS = LocalDateTime.of(11000, Month.DECEMBER, 31, 23, 59, 59, 999_000_000) // AD, 5 digit year - .toInstant(ZoneOffset.UTC).toEpochMilli(); + START_RANDOM_EPOCH_MILLIS = + LocalDateTime.of(-11000, Month.JANUARY, 1, 0, 0) // BC + .toInstant(ZoneOffset.UTC) + .toEpochMilli(); + END_RANDOM_EPOCH_MILLIS = + LocalDateTime.of(11000, Month.DECEMBER, 31, 23, 59, 59, 999_000_000) // AD, 5 digit year + .toInstant(ZoneOffset.UTC) + .toEpochMilli(); try { DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); - InputStream stream = TestUseDocValuesAsStored.class.getResourceAsStream("/solr/collection1/conf/enumsConfig.xml"); - Document doc = builder.parse(new InputSource(IOUtils.getDecodingReader(stream, StandardCharsets.UTF_8))); + InputStream stream = + TestUseDocValuesAsStored.class.getResourceAsStream( + "/solr/collection1/conf/enumsConfig.xml"); + Document doc = + builder.parse(new InputSource(IOUtils.getDecodingReader(stream, StandardCharsets.UTF_8))); XPath xpath = XPathFactory.newInstance().newXPath(); - NodeList nodes = (NodeList)xpath.evaluate - ("/enumsConfig/enum[@name='severity']/value", doc, XPathConstants.NODESET); + NodeList nodes = + (NodeList) + xpath.evaluate( + "/enumsConfig/enum[@name='severity']/value", doc, XPathConstants.NODESET); SEVERITY = new String[nodes.getLength()]; - for (int i = 0 ; i < nodes.getLength() ; ++i) { + for (int i = 0; i < nodes.getLength(); ++i) { SEVERITY[i] = DOMUtil.getText(nodes.item(i)); } } catch (Exception e) { @@ -94,16 +101,20 @@ private void initManagedSchemaCore() throws Exception { tmpSolrHome = createTempDir().toFile(); tmpConfDir = new File(tmpSolrHome, confDir); File testHomeConfDir = new File(TEST_HOME(), confDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-managed-schema.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig-managed-schema.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, "enumsConfig.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-non-stored-docvalues.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-non-stored-docvalues.xml"), tmpConfDir); // initCore will trigger an upgrade to managed schema, since the solrconfig has // System.setProperty("enable.update.log", "false"); System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-non-stored-docvalues.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", "schema-non-stored-docvalues.xml", tmpSolrHome.getPath()); assertQ("sanity check", req("q", "*:*"), "//*[@numFound='0']"); } @@ -136,61 +147,96 @@ public void testOnEmptyIndex() throws Exception { assertJQ(req("q", "*:*", "fl", "*,test_nonstored_dv_str"), "/response/numFound==0"); assertU(commit()); - assertJQ(req("q", "*:*"), "/response/numFound==1", - "/response/docs==[" + - "{'id':'xyz','test_nonstored_dv_str':'xyz'}" - + "]"); - assertJQ(req("q", "*:*", "fl", "*"), "/response/numFound==1", - "/response/docs==[" + - "{'id':'xyz','test_nonstored_dv_str':'xyz'}" - + "]"); - assertJQ(req("q", "*:*", "fl", "test_nonstored_dv_str"), "/response/numFound==1", - "/response/docs==[" + - "{'test_nonstored_dv_str':'xyz'}" - + "]"); - assertJQ(req("q", "*:*", "fl", "*,test_nonstored_dv_str"), "/response/numFound==1", - "/response/docs==[" + - "{'id':'xyz','test_nonstored_dv_str':'xyz'}" - + "]"); + assertJQ( + req("q", "*:*"), + "/response/numFound==1", + "/response/docs==[" + "{'id':'xyz','test_nonstored_dv_str':'xyz'}" + "]"); + assertJQ( + req("q", "*:*", "fl", "*"), + "/response/numFound==1", + "/response/docs==[" + "{'id':'xyz','test_nonstored_dv_str':'xyz'}" + "]"); + assertJQ( + req("q", "*:*", "fl", "test_nonstored_dv_str"), + "/response/numFound==1", + "/response/docs==[" + "{'test_nonstored_dv_str':'xyz'}" + "]"); + assertJQ( + req("q", "*:*", "fl", "*,test_nonstored_dv_str"), + "/response/numFound==1", + "/response/docs==[" + "{'id':'xyz','test_nonstored_dv_str':'xyz'}" + "]"); assertU(adoc("id", "xyz")); assertU(commit()); - assertJQ(req("q", "*:*"), "/response/numFound==1", - "/response/docs==[" + - "{'id':'xyz'}" - + "]"); + assertJQ(req("q", "*:*"), "/response/numFound==1", "/response/docs==[" + "{'id':'xyz'}" + "]"); } - + @Test public void testDuplicateMultiValued() throws Exception { - doTest("strTF", dvStringFieldName(3,true,false), "str", "X", "X", "Y"); - doTest("strTT", dvStringFieldName(3,true,true), "str", "X", "X", "Y"); - doTest("strFF", dvStringFieldName(3,false,false), "str", "X", "X", "Y"); + doTest("strTF", dvStringFieldName(3, true, false), "str", "X", "X", "Y"); + doTest("strTT", dvStringFieldName(3, true, true), "str", "X", "X", "Y"); + doTest("strFF", dvStringFieldName(3, false, false), "str", "X", "X", "Y"); doTest("int", "test_is_dvo", "int", "42", "42", "-666"); doTest("float", "test_fs_dvo", "float", "4.2", "4.2", "-66.666"); - doTest("long", "test_ls_dvo", "long", "420", "420", "-6666666" ); + doTest("long", "test_ls_dvo", "long", "420", "420", "-6666666"); doTest("double", "test_ds_dvo", "double", "0.0042", "0.0042", "-6.6666E-5"); - doTest("date", "test_dts_dvo", "date", "2016-07-04T03:02:01Z", "2016-07-04T03:02:01Z", "1999-12-31T23:59:59Z" ); + doTest( + "date", + "test_dts_dvo", + "date", + "2016-07-04T03:02:01Z", + "2016-07-04T03:02:01Z", + "1999-12-31T23:59:59Z"); doTest("enum", "enums_dvo", "str", SEVERITY[0], SEVERITY[0], SEVERITY[1]); } @Test public void testRandomSingleAndMultiValued() throws Exception { - for (int c = 0 ; c < 10 * RANDOM_MULTIPLIER ; ++c) { + for (int c = 0; c < 10 * RANDOM_MULTIPLIER; ++c) { clearIndex(); int[] arity = new int[9]; - for (int a = 0 ; a < arity.length ; ++a) { + for (int a = 0; a < arity.length; ++a) { // Single-valued 50% of the time; other 50%: 2-10 values equally likely arity[a] = random().nextBoolean() ? 1 : TestUtil.nextInt(random(), 2, 10); } - doTest("check string value is correct", dvStringFieldName(arity[0], true, false), "str", nextValues(arity[0], "str")); - doTest("check int value is correct", "test_i" + plural(arity[1]) + "_dvo", "int", nextValues(arity[1], "int")); - doTest("check double value is correct", "test_d" + plural(arity[2]) + "_dvo", "double", nextValues(arity[2], "double")); - doTest("check long value is correct", "test_l" + plural(arity[3]) + "_dvo", "long", nextValues(arity[3], "long")); - doTest("check float value is correct", "test_f" + plural(arity[4]) + "_dvo", "float", nextValues(arity[4], "float")); - doTest("check date value is correct", "test_dt" + plural(arity[5]) + "_dvo", "date", nextValues(arity[5], "date")); - doTest("check stored and docValues value is correct", dvStringFieldName(arity[6], true, true), "str", nextValues(arity[6], "str")); - doTest("check non-stored and non-indexed is accessible", dvStringFieldName(arity[7], false, false), "str", nextValues(arity[7], "str")); + doTest( + "check string value is correct", + dvStringFieldName(arity[0], true, false), + "str", + nextValues(arity[0], "str")); + doTest( + "check int value is correct", + "test_i" + plural(arity[1]) + "_dvo", + "int", + nextValues(arity[1], "int")); + doTest( + "check double value is correct", + "test_d" + plural(arity[2]) + "_dvo", + "double", + nextValues(arity[2], "double")); + doTest( + "check long value is correct", + "test_l" + plural(arity[3]) + "_dvo", + "long", + nextValues(arity[3], "long")); + doTest( + "check float value is correct", + "test_f" + plural(arity[4]) + "_dvo", + "float", + nextValues(arity[4], "float")); + doTest( + "check date value is correct", + "test_dt" + plural(arity[5]) + "_dvo", + "date", + nextValues(arity[5], "date")); + doTest( + "check stored and docValues value is correct", + dvStringFieldName(arity[6], true, true), + "str", + nextValues(arity[6], "str")); + doTest( + "check non-stored and non-indexed is accessible", + dvStringFieldName(arity[7], false, false), + "str", + nextValues(arity[7], "str")); doTest("enumField", "enum" + plural(arity[8]) + "_dvo", "str", nextValues(arity[8], "enum")); } } @@ -204,35 +250,49 @@ private static boolean isStoredField(String fieldName) { } private String dvStringFieldName(int arity, boolean indexed, boolean stored) { - String base = "test_s" + (arity > 1 ? "s": ""); + String base = "test_s" + (arity > 1 ? "s" : ""); String suffix = ""; if (indexed && stored) suffix = "_dv"; - else if (indexed && ! stored) suffix = "_dvo"; - else if ( ! indexed && ! stored) suffix = "_dvo2"; + else if (indexed && !stored) suffix = "_dvo"; + else if (!indexed && !stored) suffix = "_dvo2"; else assertTrue("unsupported dv string field combination: stored and not indexed", false); return base + suffix; } private String[] nextValues(int arity, String valueType) throws Exception { String[] values = new String[arity]; - for (int i = 0 ; i < arity ; ++i) { + for (int i = 0; i < arity; ++i) { switch (valueType) { - case "int": values[i] = String.valueOf(random().nextInt()); break; - case "double": values[i] = String.valueOf(Double.longBitsToDouble(random().nextLong())); break; - case "long": values[i] = String.valueOf(random().nextLong()); break; - case "float": values[i] = String.valueOf(Float.intBitsToFloat(random().nextInt())); break; - case "enum": values[i] = SEVERITY[TestUtil.nextInt(random(), 0, SEVERITY.length - 1)]; break; - case "str": { - String str = TestUtil.randomRealisticUnicodeString(random()); - values[i] = BAD_CHAR_PATTERN.matcher(str).replaceAll("\uFFFD"); + case "int": + values[i] = String.valueOf(random().nextInt()); + break; + case "double": + values[i] = String.valueOf(Double.longBitsToDouble(random().nextLong())); + break; + case "long": + values[i] = String.valueOf(random().nextLong()); + break; + case "float": + values[i] = String.valueOf(Float.intBitsToFloat(random().nextInt())); break; - } - case "date": { - long epochMillis = TestUtil.nextLong(random(), START_RANDOM_EPOCH_MILLIS, END_RANDOM_EPOCH_MILLIS); - values[i] = Instant.ofEpochMilli(epochMillis).toString(); + case "enum": + values[i] = SEVERITY[TestUtil.nextInt(random(), 0, SEVERITY.length - 1)]; break; - } - default: throw new Exception("unknown type '" + valueType + "'"); + case "str": + { + String str = TestUtil.randomRealisticUnicodeString(random()); + values[i] = BAD_CHAR_PATTERN.matcher(str).replaceAll("\uFFFD"); + break; + } + case "date": + { + long epochMillis = + TestUtil.nextLong(random(), START_RANDOM_EPOCH_MILLIS, END_RANDOM_EPOCH_MILLIS); + values[i] = Instant.ofEpochMilli(epochMillis).toString(); + break; + } + default: + throw new Exception("unknown type '" + valueType + "'"); } } return values; @@ -243,8 +303,18 @@ public void testMultipleSearchResults() throws Exception { // Three documents with different numbers of values for a field assertU(adoc("id", "myid1", "test_is_dvo", "101", "test_is_dvo", "102", "test_is_dvo", "103")); assertU(adoc("id", "myid2", "test_is_dvo", "201", "test_is_dvo", "202")); - assertU(adoc("id", "myid3", "test_is_dvo", "301", "test_is_dvo", "302", - "test_is_dvo", "303", "test_is_dvo", "304")); + assertU( + adoc( + "id", + "myid3", + "test_is_dvo", + "301", + "test_is_dvo", + "302", + "test_is_dvo", + "303", + "test_is_dvo", + "304")); // Multivalued and singly valued fields in the same document assertU(adoc("id", "myid4", "test_s_dvo", "hello", "test_is_dvo", "401", "test_is_dvo", "402")); @@ -254,7 +324,8 @@ public void testMultipleSearchResults() throws Exception { assertU(adoc("id", "myid6", "nonstored_dv_str", "dont see me", "test_s_dvo", "hello")); assertU(commit()); - assertJQ(req("q", "id:myid*", "fl", "*"), + assertJQ( + req("q", "id:myid*", "fl", "*"), "/response/docs==[" + "{'id':'myid1','test_is_dvo':[101,102,103]}," + "{'id':'myid2','test_is_dvo':[201,202]}," @@ -264,7 +335,7 @@ public void testMultipleSearchResults() throws Exception { + "{'id':'myid6','test_s_dvo':'hello'}" + "]"); } - + @Test public void testUseDocValuesAsStoredFalse() throws Exception { SchemaField sf = h.getCore().getLatestSchema().getField("nonstored_dv_str"); @@ -274,33 +345,29 @@ public void testUseDocValuesAsStoredFalse() throws Exception { assertFalse(sf.stored()); assertU(adoc("id", "myid", "nonstored_dv_str", "dont see me")); assertU(commit()); - - assertJQ(req("q", "id:myid"), - "/response/docs==[" - + "{'id':'myid'}" - + "]"); - assertJQ(req("q", "id:myid", "fl", "*"), - "/response/docs==[" - + "{'id':'myid'}" - + "]"); - assertJQ(req("q", "id:myid", "fl", "id,nonstored_dv_*"), - "/response/docs==[" - + "{'id':'myid'}" - + "]"); - assertJQ(req("q", "id:myid", "fl", "id,nonstored_dv_str"), - "/response/docs==[" - + "{'id':'myid','nonstored_dv_str':'dont see me'}" - + "]"); + + assertJQ(req("q", "id:myid"), "/response/docs==[" + "{'id':'myid'}" + "]"); + assertJQ(req("q", "id:myid", "fl", "*"), "/response/docs==[" + "{'id':'myid'}" + "]"); + assertJQ( + req("q", "id:myid", "fl", "id,nonstored_dv_*"), + "/response/docs==[" + "{'id':'myid'}" + "]"); + assertJQ( + req("q", "id:myid", "fl", "id,nonstored_dv_str"), + "/response/docs==[" + "{'id':'myid','nonstored_dv_str':'dont see me'}" + "]"); } public void testManagedSchema() throws Exception { IndexSchema oldSchema = h.getCore().getLatestSchema(); StrField type = new StrField(); type.setTypeName("str"); - SchemaField falseDVASField = new SchemaField("false_dvas", type, - SchemaField.INDEXED | SchemaField.DOC_VALUES, null); - SchemaField trueDVASField = new SchemaField("true_dvas", type, - SchemaField.INDEXED | SchemaField.DOC_VALUES | SchemaField.USE_DOCVALUES_AS_STORED, null); + SchemaField falseDVASField = + new SchemaField("false_dvas", type, SchemaField.INDEXED | SchemaField.DOC_VALUES, null); + SchemaField trueDVASField = + new SchemaField( + "true_dvas", + type, + SchemaField.INDEXED | SchemaField.DOC_VALUES | SchemaField.USE_DOCVALUES_AS_STORED, + null); IndexSchema newSchema = oldSchema.addField(falseDVASField).addField(trueDVASField); h.getCore().setLatestSchema(newSchema); @@ -308,15 +375,14 @@ public void testManagedSchema() throws Exception { assertU(adoc("id", "myid1", "false_dvas", "101", "true_dvas", "102")); assertU(commit()); - assertJQ(req("q", "id:myid*", "fl", "*"), - "/response/docs==[" - + "{'id':'myid1', 'true_dvas':'102'}]"); + assertJQ( + req("q", "id:myid*", "fl", "*"), + "/response/docs==[" + "{'id':'myid1', 'true_dvas':'102'}]"); } private void doTest(String desc, String field, String type, String... value) { String id = "" + this.id++; - String[] xpaths = new String[value.length + 1]; if (value.length > 1) { @@ -336,10 +402,12 @@ private void doTest(String desc, String field, String type, String... value) { // Trie/String based Docvalues are sets, but stored values & Point DVs are ordered multisets, // so cardinality depends on the value source final int expectedCardinality = - (isStoredField(field) || (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) - && ! field.startsWith("test_s"))) - ? value.length : valueSet.size(); - xpaths[value.length] = "*[count(//arr[@name='"+field+"']/"+type+")="+expectedCardinality+"]"; + (isStoredField(field) + || (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) && !field.startsWith("test_s"))) + ? value.length + : valueSet.size(); + xpaths[value.length] = + "*[count(//arr[@name='" + field + "']/" + type + ")=" + expectedCardinality + "]"; assertU(adoc(fieldAndValues)); } else { @@ -367,64 +435,109 @@ private void doTest(String desc, String field, String type, String... value) { fl = "*"; assertQ(desc + ": " + fl, req("q", "*:*", "fl", fl), xpaths); - } - - // See SOLR-8740 for a discussion. This test is here to make sure we consciously change behavior of multiValued - // fields given that we can now return docValues fields. The behavior we've guaranteed in the past is that if - // multiValued fields are stored, they're returned in the document in the order they were added. + + // See SOLR-8740 for a discussion. This test is here to make sure we consciously change behavior + // of multiValued fields given that we can now return docValues fields. The behavior we've + // guaranteed in the past is that if multiValued fields are stored, they're returned in the + // document in the order they were added. // There are four new fieldTypes added: - // - // - // - // + // + // + // + // // - // If any of these tests break as a result of returning DocValues rather than stored values, make sure we reach some - // consensus that any breaks on back-compat are A Good Thing and that that behavior is carefully documented! + // If any of these tests break as a result of returning DocValues rather than stored values, make + // sure we reach some consensus that any breaks on back-compat are A Good Thing and that that + // behavior is carefully documented! @Test public void testMultivaluedOrdering() throws Exception { clearIndex(); - + // multiValued=true, docValues=true, stored=true. Should return in original order - assertU(adoc("id", "1", "test_mvt_dvt_st_str", "cccc", "test_mvt_dvt_st_str", "aaaa", "test_mvt_dvt_st_str", "bbbb")); - + assertU( + adoc( + "id", + "1", + "test_mvt_dvt_st_str", + "cccc", + "test_mvt_dvt_st_str", + "aaaa", + "test_mvt_dvt_st_str", + "bbbb")); + // multiValued=true, docValues=true, stored=false. Should return in sorted order - assertU(adoc("id", "2", "test_mvt_dvt_sf_str", "cccc", "test_mvt_dvt_sf_str", "aaaa", "test_mvt_dvt_sf_str", "bbbb")); - + assertU( + adoc( + "id", + "2", + "test_mvt_dvt_sf_str", + "cccc", + "test_mvt_dvt_sf_str", + "aaaa", + "test_mvt_dvt_sf_str", + "bbbb")); + // multiValued=true, docValues=false, stored=true. Should return in original order - assertU(adoc("id", "3", "test_mvt_dvf_st_str", "cccc", "test_mvt_dvf_st_str", "aaaa", "test_mvt_dvf_st_str", "bbbb")); - + assertU( + adoc( + "id", + "3", + "test_mvt_dvf_st_str", + "cccc", + "test_mvt_dvf_st_str", + "aaaa", + "test_mvt_dvf_st_str", + "bbbb")); + // multiValued=true, docValues=not specified, stored=true. Should return in original order - assertU(adoc("id", "4", "test_mvt_dvu_st_str", "cccc", "test_mvt_dvu_st_str", "aaaa", "test_mvt_dvu_st_str", "bbbb")); - + assertU( + adoc( + "id", + "4", + "test_mvt_dvu_st_str", + "cccc", + "test_mvt_dvu_st_str", + "aaaa", + "test_mvt_dvu_st_str", + "bbbb")); + assertU(commit()); - - assertJQ(req("q", "id:1", "fl", "test_mvt_dvt_st_str"), + + assertJQ( + req("q", "id:1", "fl", "test_mvt_dvt_st_str"), "/response/docs/[0]/test_mvt_dvt_st_str/[0]==cccc", "/response/docs/[0]/test_mvt_dvt_st_str/[1]==aaaa", "/response/docs/[0]/test_mvt_dvt_st_str/[2]==bbbb"); - // Currently, this test fails since stored=false. When SOLR-8740 is committed, it should not throw an exception + // Currently, this test fails since stored=false. When SOLR-8740 is committed, it should not + // throw an exception // and should succeed, returning the field in sorted order. try { - assertJQ(req("q", "id:2", "fl", "test_mvt_dvt_sf_str"), + assertJQ( + req("q", "id:2", "fl", "test_mvt_dvt_sf_str"), "/response/docs/[0]/test_mvt_dvt_sf_str/[0]==aaaa", "/response/docs/[0]/test_mvt_dvt_sf_str/[1]==bbbb", "/response/docs/[0]/test_mvt_dvt_sf_str/[2]==cccc"); } catch (Exception e) { - // do nothing until SOLR-8740 is committed. At that point this should not throw an exception. + // do nothing until SOLR-8740 is committed. At that point this should not throw an exception. // NOTE: I think the test is correct after 8740 so just remove the try/catch } - assertJQ(req("q", "id:3", "fl", "test_mvt_dvf_st_str"), + assertJQ( + req("q", "id:3", "fl", "test_mvt_dvf_st_str"), "/response/docs/[0]/test_mvt_dvf_st_str/[0]==cccc", "/response/docs/[0]/test_mvt_dvf_st_str/[1]==aaaa", "/response/docs/[0]/test_mvt_dvf_st_str/[2]==bbbb"); - assertJQ(req("q", "id:4", "fl", "test_mvt_dvu_st_str"), + assertJQ( + req("q", "id:4", "fl", "test_mvt_dvu_st_str"), "/response/docs/[0]/test_mvt_dvu_st_str/[0]==cccc", "/response/docs/[0]/test_mvt_dvu_st_str/[1]==aaaa", "/response/docs/[0]/test_mvt_dvu_st_str/[2]==bbbb"); - } } diff --git a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java index cab4e27e93a..1f05641a6d1 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java +++ b/solr/core/src/test/org/apache/solr/schema/TestUseDocValuesAsStored2.java @@ -18,7 +18,6 @@ import java.io.File; import java.util.Map; - import org.apache.commons.io.FileUtils; import org.apache.solr.common.util.Utils; import org.apache.solr.rest.schema.TestBulkSchemaAPI; @@ -27,10 +26,9 @@ import org.junit.After; import org.junit.Before; -/** - * Tests the useDocValuesAsStored functionality. - */ -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +/** Tests the useDocValuesAsStored functionality. */ +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class TestUseDocValuesAsStored2 extends RestTestBase { @Before @@ -41,8 +39,13 @@ public void before() throws Exception { System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", - "/solr", true, null); + createJettyAndHarness( + tmpSolrHome.getAbsolutePath(), + "solrconfig-managed-schema.xml", + "schema-rest.xml", + "/solr", + true, + null); } @After @@ -57,35 +60,35 @@ public void after() throws Exception { } restTestHarness = null; } - public void testSchemaAPI() throws Exception { RestTestHarness harness = restTestHarness; - String payload = "{\n" + - " 'add-field' : {\n" + - " 'name':'a1',\n" + - " 'type': 'string',\n" + - " 'stored':false,\n" + - " 'docValues':true,\n" + - " 'indexed':false\n" + - " },\n" + - " 'add-field' : {\n" + - " 'name':'a2',\n" + - " 'type': 'string',\n" + - " 'stored':false,\n" + - " 'useDocValuesAsStored':true,\n" + - " 'docValues':true,\n" + - " 'indexed':true\n" + - " },\n" + - " 'add-field' : {\n" + - " 'name':'a3',\n" + - " 'type': 'string',\n" + - " 'stored':false,\n" + - " 'useDocValuesAsStored':false,\n" + - " 'docValues':true,\n" + - " 'indexed':true\n" + - " }\n" + - " }\n"; + String payload = + "{\n" + + " 'add-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':false,\n" + + " 'docValues':true,\n" + + " 'indexed':false\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'a2',\n" + + " 'type': 'string',\n" + + " 'stored':false,\n" + + " 'useDocValuesAsStored':true,\n" + + " 'docValues':true,\n" + + " 'indexed':true\n" + + " },\n" + + " 'add-field' : {\n" + + " 'name':'a3',\n" + + " 'type': 'string',\n" + + " 'stored':false,\n" + + " 'useDocValuesAsStored':false,\n" + + " 'docValues':true,\n" + + " 'indexed':true\n" + + " }\n" + + " }\n"; String response = harness.post("/schema", json(payload)); @@ -98,12 +101,12 @@ public void testSchemaAPI() throws Exception { assertNull(m.get("useDocValuesAsStored")); // useDocValuesAsStored=true - m = TestBulkSchemaAPI.getObj(harness,"a2", "fields"); + m = TestBulkSchemaAPI.getObj(harness, "a2", "fields"); assertNotNull("field a2 not created", m); assertEquals(Boolean.TRUE, m.get("useDocValuesAsStored")); // useDocValuesAsStored=false - m = TestBulkSchemaAPI.getObj(harness,"a3", "fields"); + m = TestBulkSchemaAPI.getObj(harness, "a3", "fields"); assertNotNull("field a3 not created", m); assertEquals(Boolean.FALSE, m.get("useDocValuesAsStored")); @@ -111,63 +114,68 @@ public void testSchemaAPI() throws Exception { assertU(adoc("id", "myid1", "a1", "1", "a2", "2", "a3", "3")); assertU(commit()); - RestTestBase.assertJQ("/select?q=id:myid*&fl=*", - "/response/docs==[{'id':'myid1', 'a1':'1', 'a2':'2'}]"); + RestTestBase.assertJQ( + "/select?q=id:myid*&fl=*", "/response/docs==[{'id':'myid1', 'a1':'1', 'a2':'2'}]"); - RestTestBase.assertJQ("/select?q=id:myid*&fl=id,a1,a2,a3", + RestTestBase.assertJQ( + "/select?q=id:myid*&fl=id,a1,a2,a3", "/response/docs==[{'id':'myid1', 'a1':'1', 'a2':'2', 'a3':'3'}]"); - RestTestBase.assertJQ("/select?q=id:myid*&fl=a3", - "/response/docs==[{'a3':'3'}]"); + RestTestBase.assertJQ("/select?q=id:myid*&fl=a3", "/response/docs==[{'a3':'3'}]"); // this will return a3 because it is explicitly requested even if '*' is specified - RestTestBase.assertJQ("/select?q=id:myid*&fl=*,a3", + RestTestBase.assertJQ( + "/select?q=id:myid*&fl=*,a3", "/response/docs==[{'id':'myid1', 'a1':'1', 'a2':'2', 'a3':'3'}]"); - // this will not return a3 because the glob 'a*' will match only stored + useDocValuesAsStored=true fields - RestTestBase.assertJQ("/select?q=id:myid*&fl=id,a*", - "/response/docs==[{'id':'myid1', 'a1':'1', 'a2':'2'}]"); - + // this will not return a3 because the glob 'a*' will match only stored + + // useDocValuesAsStored=true fields + RestTestBase.assertJQ( + "/select?q=id:myid*&fl=id,a*", "/response/docs==[{'id':'myid1', 'a1':'1', 'a2':'2'}]"); + // Test replace-field // Explicitly set useDocValuesAsStored to false - payload = "{\n" + - " 'replace-field' : {\n" + - " 'name':'a1',\n" + - " 'type': 'string',\n" + - " 'stored':false,\n" + - " 'useDocValuesAsStored':false,\n" + - " 'docValues':true,\n" + - " 'indexed':false\n" + - " }}"; + payload = + "{\n" + + " 'replace-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':false,\n" + + " 'useDocValuesAsStored':false,\n" + + " 'docValues':true,\n" + + " 'indexed':false\n" + + " }}"; response = harness.post("/schema", json(payload)); m = TestBulkSchemaAPI.getObj(harness, "a1", "fields"); assertNotNull("field a1 doesn't exist any more", m); assertEquals(Boolean.FALSE, m.get("useDocValuesAsStored")); // Explicitly set useDocValuesAsStored to true - payload = "{\n" + - " 'replace-field' : {\n" + - " 'name':'a1',\n" + - " 'type': 'string',\n" + - " 'stored':false,\n" + - " 'useDocValuesAsStored':true,\n" + - " 'docValues':true,\n" + - " 'indexed':false\n" + - " }}"; + payload = + "{\n" + + " 'replace-field' : {\n" + + " 'name':'a1',\n" + + " 'type': 'string',\n" + + " 'stored':false,\n" + + " 'useDocValuesAsStored':true,\n" + + " 'docValues':true,\n" + + " 'indexed':false\n" + + " }}"; response = harness.post("/schema", json(payload)); m = TestBulkSchemaAPI.getObj(harness, "a1", "fields"); assertNotNull("field a1 doesn't exist any more", m); assertEquals(Boolean.TRUE, m.get("useDocValuesAsStored")); // add a field which is stored as well as docvalues - payload = "{ 'add-field' : {\n" + - " 'name':'a4',\n" + - " 'type': 'string',\n" + - " 'stored':true,\n" + - " 'useDocValuesAsStored':true,\n" + - " 'docValues':true,\n" + - " 'indexed':true\n" + - " }}"; + payload = + "{ 'add-field' : {\n" + + " 'name':'a4',\n" + + " 'type': 'string',\n" + + " 'stored':true,\n" + + " 'useDocValuesAsStored':true,\n" + + " 'docValues':true,\n" + + " 'indexed':true\n" + + " }}"; response = harness.post("/schema", json(payload)); m = TestBulkSchemaAPI.getObj(harness, "a4", "fields"); assertNotNull("field a4 not found", m); @@ -176,8 +184,8 @@ public void testSchemaAPI() throws Exception { assertU(adoc("id", "myid1", "a1", "1", "a2", "2", "a3", "3", "a4", "4")); assertU(commit()); - RestTestBase.assertJQ("/select?q=id:myid*&fl=*", + RestTestBase.assertJQ( + "/select?q=id:myid*&fl=*", "/response/docs==[{'id':'myid1', 'a1':'1', 'a2':'2', 'a4':'4'}]"); - } } diff --git a/solr/core/src/test/org/apache/solr/schema/TrieIntPrefixActsAsRangeQueryFieldType.java b/solr/core/src/test/org/apache/solr/schema/TrieIntPrefixActsAsRangeQueryFieldType.java index 29fd51612d2..c67cb0a00c2 100644 --- a/solr/core/src/test/org/apache/solr/schema/TrieIntPrefixActsAsRangeQueryFieldType.java +++ b/solr/core/src/test/org/apache/solr/schema/TrieIntPrefixActsAsRangeQueryFieldType.java @@ -20,8 +20,9 @@ import org.apache.solr.search.QParser; /** - * Custom field type that overrides the prefix query behavior to map "X*" to [X TO Integer.MAX_VALUE]. - * * This is used for testing overridden prefix query for custom fields in TestOverriddenPrefixQueryForCustomFieldType + * Custom field type that overrides the prefix query behavior to map "X*" to [X TO + * Integer.MAX_VALUE]. * This is used for testing overridden prefix query for custom fields in + * TestOverriddenPrefixQueryForCustomFieldType * * @see IntPointPrefixActsAsRangeQueryFieldType * @deprecated Trie fields are deprecated as of Solr 7.0 @@ -32,5 +33,4 @@ public class TrieIntPrefixActsAsRangeQueryFieldType extends TrieIntField { public Query getPrefixQuery(QParser parser, SchemaField sf, String termStr) { return getRangeQuery(parser, sf, termStr, Integer.MAX_VALUE + "", true, false); } - } diff --git a/solr/core/src/test/org/apache/solr/schema/WrappedIntPointField.java b/solr/core/src/test/org/apache/solr/schema/WrappedIntPointField.java index db421e580e9..b6d3959199a 100644 --- a/solr/core/src/test/org/apache/solr/schema/WrappedIntPointField.java +++ b/solr/core/src/test/org/apache/solr/schema/WrappedIntPointField.java @@ -22,9 +22,7 @@ import org.apache.lucene.search.DoubleValuesSource; import org.apache.lucene.search.SortField; -/** - * Custom field wrapping an int, to test sorting via a custom comparator. - */ +/** Custom field wrapping an int, to test sorting via a custom comparator. */ public class WrappedIntPointField extends IntPointField { /** static helper for re-use in sibling trie class */ public static SortField getSortField(final SortField superSort, final SchemaField field) { @@ -46,7 +44,7 @@ public SortField getSortField(final SchemaField field, final boolean reverse) { } private static DoubleValuesSource fromSortField(SortField field) { - switch(field.getType()) { + switch (field.getType()) { case INT: return DoubleValuesSource.fromIntField(field.getField()); case LONG: diff --git a/solr/core/src/test/org/apache/solr/schema/WrappedTrieIntField.java b/solr/core/src/test/org/apache/solr/schema/WrappedTrieIntField.java index f1d01fda4ca..3c2c24fd75e 100644 --- a/solr/core/src/test/org/apache/solr/schema/WrappedTrieIntField.java +++ b/solr/core/src/test/org/apache/solr/schema/WrappedTrieIntField.java @@ -20,6 +20,7 @@ /** * Custom field wrapping an int, to test sorting via a custom comparator. + * * @deprecated Trie fields are deprecated as of Solr 7.0 * @see WrappedIntPointField */ diff --git a/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java b/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java index 94d90e0fc46..169dc249c51 100644 --- a/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java +++ b/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.search; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.response.QueryResponse; @@ -24,20 +26,15 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; -import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; - /** * Test for QueryComponent's distributed querying * * @see org.apache.solr.handler.component.QueryComponent */ - -@SolrTestCaseJ4.SuppressSSL(bugUrl="https://issues.apache.org/jira/browse/SOLR-8433") +@SolrTestCaseJ4.SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-8433") @ThreadLeakScope(Scope.NONE) public class AnalyticsMergeStrategyTest extends BaseDistributedSearchTestCase { - public AnalyticsMergeStrategyTest() { stress = 0; } @@ -52,25 +49,24 @@ public static void setUpBeforeClass() throws Exception { public void test() throws Exception { del("*:*"); - index_specific(0,"id","1", "sort_i", "5"); - index_specific(0,"id","2", "sort_i", "50"); - index_specific(1,"id","5", "sort_i", "4"); - index_specific(1,"id","6", "sort_i", "10"); - index_specific(0,"id","7", "sort_i", "1"); - index_specific(1,"id","8", "sort_i", "2"); - index_specific(2,"id","9", "sort_i", "1000"); - index_specific(2,"id","10", "sort_i", "1500"); - index_specific(2,"id","11", "sort_i", "1300"); - index_specific(1,"id","12", "sort_i", "15"); - index_specific(1,"id","13", "sort_i", "16"); + index_specific(0, "id", "1", "sort_i", "5"); + index_specific(0, "id", "2", "sort_i", "50"); + index_specific(1, "id", "5", "sort_i", "4"); + index_specific(1, "id", "6", "sort_i", "10"); + index_specific(0, "id", "7", "sort_i", "1"); + index_specific(1, "id", "8", "sort_i", "2"); + index_specific(2, "id", "9", "sort_i", "1000"); + index_specific(2, "id", "10", "sort_i", "1500"); + index_specific(2, "id", "11", "sort_i", "1300"); + index_specific(1, "id", "12", "sort_i", "15"); + index_specific(1, "id", "13", "sort_i", "16"); commit(); /* - * The count qparser plugin is pointing to AnalyticsTestQParserPlugin. This class defines a simple AnalyticsQuery and - * has two merge strategies. If the iterate local param is true then an InterativeMergeStrategy is used. - */ - + * The count qparser plugin is pointing to AnalyticsTestQParserPlugin. This class defines a simple AnalyticsQuery and + * has two merge strategies. If the iterate local param is true then an InterativeMergeStrategy is used. + */ ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); @@ -79,7 +75,7 @@ public void test() throws Exception { QueryResponse rsp = queryServer(params); assertCount(rsp, 11); - //Test IterativeMergeStrategy + // Test IterativeMergeStrategy params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!count iterate=true}"); @@ -99,10 +95,10 @@ private void assertCountOnly(QueryResponse rsp, int count) throws Exception { @SuppressWarnings({"rawtypes"}) NamedList response = rsp.getResponse(); @SuppressWarnings({"rawtypes"}) - NamedList analytics = (NamedList)response.get("analytics"); - Integer c = (Integer)analytics.get("mycount"); - if(c.intValue() != count) { - throw new Exception("Count is not correct:"+count+":"+c.intValue()); + NamedList analytics = (NamedList) response.get("analytics"); + Integer c = (Integer) analytics.get("mycount"); + if (c.intValue() != count) { + throw new Exception("Count is not correct:" + count + ":" + c.intValue()); } } @@ -110,15 +106,15 @@ private void assertCount(QueryResponse rsp, int count) throws Exception { @SuppressWarnings({"rawtypes"}) NamedList response = rsp.getResponse(); @SuppressWarnings({"rawtypes"}) - NamedList analytics = (NamedList)response.get("analytics"); - Integer c = (Integer)analytics.get("mycount"); - if(c.intValue() != count) { - throw new Exception("Count is not correct:"+count+":"+c.intValue()); + NamedList analytics = (NamedList) response.get("analytics"); + Integer c = (Integer) analytics.get("mycount"); + if (c.intValue() != count) { + throw new Exception("Count is not correct:" + count + ":" + c.intValue()); } long numFound = rsp.getResults().getNumFound(); - if(c.intValue() != numFound) { - throw new Exception("Count does not equal numFound:"+c.intValue()+":"+numFound); + if (c.intValue() != numFound) { + throw new Exception("Count does not equal numFound:" + c.intValue() + ":" + numFound); } } } diff --git a/solr/core/src/test/org/apache/solr/search/AnalyticsQueryTest.java b/solr/core/src/test/org/apache/solr/search/AnalyticsQueryTest.java index 59bf95c80d5..6c1474eacf5 100644 --- a/solr/core/src/test/org/apache/solr/search/AnalyticsQueryTest.java +++ b/solr/core/src/test/org/apache/solr/search/AnalyticsQueryTest.java @@ -39,33 +39,28 @@ public void setUp() throws Exception { assertU(commit()); } - @Test public void testAnalyticsQuery() throws Exception { - String[] doc = {"id","1", "sort_i", "100"}; + String[] doc = {"id", "1", "sort_i", "100"}; assertU(adoc(doc)); assertU(commit()); - String[] doc1 = {"id","2", "sort_i", "50"}; + String[] doc1 = {"id", "2", "sort_i", "50"}; assertU(adoc(doc1)); - - - String[] doc2 = {"id","3", "sort_i", "1000"}; + String[] doc2 = {"id", "3", "sort_i", "1000"}; assertU(adoc(doc2)); assertU(commit()); - String[] doc3 = {"id","4", "sort_i", "2000"}; + String[] doc3 = {"id", "4", "sort_i", "2000"}; assertU(adoc(doc3)); - - String[] doc4 = {"id","5", "sort_i", "2"}; + String[] doc4 = {"id", "5", "sort_i", "2"}; assertU(adoc(doc4)); assertU(commit()); - String[] doc5 = {"id","6", "sort_i","11"}; + String[] doc5 = {"id", "6", "sort_i", "11"}; assertU(adoc(doc5)); assertU(commit()); - ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); @@ -79,6 +74,5 @@ public void testAnalyticsQuery() throws Exception { params.add("fq", "{!count}"); assertQ(req(params), " //lst[@name='analytics']/int[@name='mycount'][.=2]"); - } } diff --git a/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java index b2a733f8f2a..72b96f83d32 100644 --- a/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/AnalyticsTestQParserPlugin.java @@ -16,45 +16,42 @@ */ package org.apache.solr.search; -import org.apache.lucene.search.Query; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.Future; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; import org.apache.solr.client.solrj.request.QueryRequest; - import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.handler.component.IterativeMergeStrategy; +import org.apache.solr.handler.component.MergeStrategy; import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.handler.component.ShardRequest; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.handler.component.MergeStrategy; import org.apache.solr.handler.component.ShardResponse; - -import java.util.List; -import java.util.concurrent.Future; -import java.io.IOException; +import org.apache.solr.request.SolrQueryRequest; public class AnalyticsTestQParserPlugin extends QParserPlugin { - - public QParser createParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { + public QParser createParser( + String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { return new TestAnalyticsQueryParser(query, localParams, params, req); } static class TestAnalyticsQueryParser extends QParser { - public TestAnalyticsQueryParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { + public TestAnalyticsQueryParser( + String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { super(query, localParams, params, req); } public Query parse() throws SyntaxError { int base = localParams.getInt("base", 0); boolean iterate = localParams.getBool("iterate", false); - if(iterate) - return new TestAnalyticsQuery(base, new TestIterative()); - else - return new TestAnalyticsQuery(base, new TestAnalyticsMergeStrategy()); + if (iterate) return new TestAnalyticsQuery(base, new TestIterative()); + else return new TestAnalyticsQuery(base, new TestAnalyticsMergeStrategy()); } } @@ -92,9 +89,9 @@ public void finish() throws IOException { @SuppressWarnings({"rawtypes"}) NamedList analytics = new NamedList(); rb.rsp.add("analytics", analytics); - analytics.add("mycount", count+base); - if(this.delegate instanceof DelegatingCollector) { - ((DelegatingCollector)this.delegate).finish(); + analytics.add("mycount", count + base); + if (this.delegate instanceof DelegatingCollector) { + ((DelegatingCollector) this.delegate).finish(); } } } @@ -113,8 +110,7 @@ public int getCost() { return 100; } - public void handleMergeFields(ResponseBuilder rb, SolrIndexSearcher searcher) { - } + public void handleMergeFields(ResponseBuilder rb, SolrIndexSearcher searcher) {} @SuppressWarnings({"unchecked"}) public void merge(ResponseBuilder rb, ShardRequest shardRequest) { @@ -122,12 +118,12 @@ public void merge(ResponseBuilder rb, ShardRequest shardRequest) { @SuppressWarnings({"rawtypes"}) NamedList merged = new NamedList(); - for(ShardResponse shardResponse : shardRequest.responses) { + for (ShardResponse shardResponse : shardRequest.responses) { @SuppressWarnings({"rawtypes"}) NamedList response = shardResponse.getSolrResponse().getResponse(); @SuppressWarnings({"rawtypes"}) - NamedList analytics = (NamedList)response.get("analytics"); - Integer c = (Integer)analytics.get("mycount"); + NamedList analytics = (NamedList) response.get("analytics"); + Integer c = (Integer) analytics.get("mycount"); count += c.intValue(); } @@ -136,28 +132,27 @@ public void merge(ResponseBuilder rb, ShardRequest shardRequest) { } } - static class TestIterative extends IterativeMergeStrategy { + static class TestIterative extends IterativeMergeStrategy { @SuppressWarnings({"unchecked"}) public void process(ResponseBuilder rb, ShardRequest sreq) throws Exception { int count = 0; - for(ShardResponse shardResponse : sreq.responses) { + for (ShardResponse shardResponse : sreq.responses) { @SuppressWarnings({"rawtypes"}) NamedList response = shardResponse.getSolrResponse().getResponse(); @SuppressWarnings({"rawtypes"}) - NamedList analytics = (NamedList)response.get("analytics"); - Integer c = (Integer)analytics.get("mycount"); + NamedList analytics = (NamedList) response.get("analytics"); + Integer c = (Integer) analytics.get("mycount"); count += c.intValue(); } ModifiableSolrParams params = new ModifiableSolrParams(); params.add("distrib", "false"); - params.add("fq","{!count base="+count+"}"); - params.add("q","*:*"); - + params.add("fq", "{!count base=" + count + "}"); + params.add("q", "*:*"); /* - * Call back to all the shards in the response and process the result. + * Call back to all the shards in the response and process the result. */ QueryRequest request = new QueryRequest(params); @@ -165,11 +160,11 @@ public void process(ResponseBuilder rb, ShardRequest sreq) throws Exception { int nextCount = 0; - for(Future future : futures) { + for (Future future : futures) { QueryResponse response = future.get().getResponse(); @SuppressWarnings({"rawtypes"}) - NamedList analytics = (NamedList)response.getResponse().get("analytics"); - Integer c = (Integer)analytics.get("mycount"); + NamedList analytics = (NamedList) response.getResponse().get("analytics"); + Integer c = (Integer) analytics.get("mycount"); nextCount += c.intValue(); } diff --git a/solr/core/src/test/org/apache/solr/search/ApacheLuceneSolrNearQueryBuilder.java b/solr/core/src/test/org/apache/solr/search/ApacheLuceneSolrNearQueryBuilder.java index 01c1e63a42b..8ab8f146e8c 100644 --- a/solr/core/src/test/org/apache/solr/search/ApacheLuceneSolrNearQueryBuilder.java +++ b/solr/core/src/test/org/apache/solr/search/ApacheLuceneSolrNearQueryBuilder.java @@ -30,8 +30,8 @@ public class ApacheLuceneSolrNearQueryBuilder extends SolrSpanQueryBuilder { - public ApacheLuceneSolrNearQueryBuilder(String defaultField, Analyzer analyzer, - SolrQueryRequest req, SpanQueryBuilder spanFactory) { + public ApacheLuceneSolrNearQueryBuilder( + String defaultField, Analyzer analyzer, SolrQueryRequest req, SpanQueryBuilder spanFactory) { super(defaultField, analyzer, req, spanFactory); } @@ -41,14 +41,14 @@ public Query getQuery(Element e) throws ParserException { public SpanQuery getSpanQuery(Element e) throws ParserException { final String fieldName = DOMUtils.getAttributeWithInheritanceOrFail(e, "fieldName"); - final SpanQuery[] spanQueries = new SpanQuery[]{ - new SpanTermQuery(new Term(fieldName, "Apache")), - new SpanTermQuery(new Term(fieldName, "Lucene")), - new SpanTermQuery(new Term(fieldName, "Solr")) - }; + final SpanQuery[] spanQueries = + new SpanQuery[] { + new SpanTermQuery(new Term(fieldName, "Apache")), + new SpanTermQuery(new Term(fieldName, "Lucene")), + new SpanTermQuery(new Term(fieldName, "Solr")) + }; final int slop = 42; final boolean inOrder = false; return new SpanNearQuery(spanQueries, slop, inOrder); } - } diff --git a/solr/core/src/test/org/apache/solr/search/ChooseOneWordQueryBuilder.java b/solr/core/src/test/org/apache/solr/search/ChooseOneWordQueryBuilder.java index 3fd29edd679..fccccf77065 100644 --- a/solr/core/src/test/org/apache/solr/search/ChooseOneWordQueryBuilder.java +++ b/solr/core/src/test/org/apache/solr/search/ChooseOneWordQueryBuilder.java @@ -31,8 +31,8 @@ public class ChooseOneWordQueryBuilder extends SolrSpanQueryBuilder { - public ChooseOneWordQueryBuilder(String defaultField, Analyzer analyzer, SolrQueryRequest req, - SpanQueryBuilder spanFactory) { + public ChooseOneWordQueryBuilder( + String defaultField, Analyzer analyzer, SolrQueryRequest req, SpanQueryBuilder spanFactory) { super(defaultField, analyzer, req, spanFactory); } @@ -41,15 +41,14 @@ public Query getQuery(Element e) throws ParserException { } public SpanQuery getSpanQuery(Element e) throws ParserException { - return (SpanQuery)implGetQuery(e, true); + return (SpanQuery) implGetQuery(e, true); } public Query implGetQuery(Element e, boolean span) throws ParserException { Term term = null; final String fieldName = DOMUtils.getAttributeWithInheritanceOrFail(e, "fieldName"); for (Node node = e.getFirstChild(); node != null; node = node.getNextSibling()) { - if (node.getNodeType() == Node.ELEMENT_NODE && - node.getNodeName().equals("Word")) { + if (node.getNodeType() == Node.ELEMENT_NODE && node.getNodeName().equals("Word")) { final String word = DOMUtils.getNonBlankTextOrFail((Element) node); final Term t = new Term(fieldName, word); if (term == null || term.text().length() < t.text().length()) { diff --git a/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java index c54c7f12f5d..1e0cc430688 100644 --- a/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java +++ b/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java @@ -21,7 +21,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -33,106 +32,122 @@ import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.schema.CurrencyFieldTypeTest; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class CurrencyRangeFacetCloudTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + private static final String COLLECTION = MethodHandles.lookup().lookupClass().getName(); private static final String CONF = COLLECTION + "_configSet"; - + private static String FIELD = null; // randomized private static final List STR_VALS = Arrays.asList("x0", "x1", "x2"); // NOTE: in our test conversions EUR uses an asynetric exchange rate - // these are the equivalent values relative to: USD EUR GBP - private static final List VALUES = Arrays.asList("10.00,USD", // 10.00,USD 25.00,EUR 5.00,GBP - "15.00,EUR", // 7.50,USD 15.00,EUR 7.50,GBP - "6.00,GBP", // 12.00,USD 12.00,EUR 6.00,GBP - "7.00,EUR", // 3.50,USD 7.00,EUR 3.50,GBP - "2,GBP"); // 4.00,USD 4.00,EUR 2.00,GBP + // these are the equivalent values relative to: USD EUR GBP + private static final List VALUES = + Arrays.asList( + "10.00,USD", // 10.00,USD 25.00,EUR 5.00,GBP + "15.00,EUR", // 7.50,USD 15.00,EUR 7.50,GBP + "6.00,GBP", // 12.00,USD 12.00,EUR 6.00,GBP + "7.00,EUR", // 3.50,USD 7.00,EUR 3.50,GBP + "2,GBP"); // 4.00,USD 4.00,EUR 2.00,GBP private static final int NUM_DOCS = STR_VALS.size() * VALUES.size(); - + @BeforeClass public static void setupCluster() throws Exception { CurrencyFieldTypeTest.assumeCurrencySupport("USD", "EUR", "MXN", "GBP", "JPY", "NOK"); FIELD = usually() ? "amount_CFT" : "amount"; - - final int numShards = TestUtil.nextInt(random(),1,5); + + final int numShards = TestUtil.nextInt(random(), 1, 5); final int numReplicas = 1; final int nodeCount = numShards * numReplicas; configureCluster(nodeCount) - .addConfig(CONF, Paths.get(TEST_HOME(), "collection1", "conf")) - .configure(); + .addConfig(CONF, Paths.get(TEST_HOME(), "collection1", "conf")) + .configure(); + + assertEquals( + 0, + (CollectionAdminRequest.createCollection(COLLECTION, CONF, numShards, numReplicas) + .setProperties( + Collections.singletonMap(CoreAdminParams.CONFIG, "solrconfig-minimal.xml")) + .process(cluster.getSolrClient())) + .getStatus()); - assertEquals(0, (CollectionAdminRequest.createCollection(COLLECTION, CONF, numShards, numReplicas) - .setProperties(Collections.singletonMap(CoreAdminParams.CONFIG, "solrconfig-minimal.xml")) - .process(cluster.getSolrClient())).getStatus()); - cluster.getSolrClient().setDefaultCollection(COLLECTION); - - for (int id = 0; id < NUM_DOCS; id++) { // we're indexing each Currency value in 3 docs, each with a diff 'x_s' field value - // use modulo to pick the values, so we don't add the docs in strict order of either VALUES of STR_VALS - // (that way if we want ot filter by id later, it's an independent variable) + + // we're indexing each Currency value in 3 docs, each with a diff 'x_s' field value + // use modulo to pick the values, so we don't add the docs in strict order of either VALUES of + // STR_VALS (that way if we want ot filter by id later, it's an independent variable) + for (int id = 0; id < NUM_DOCS; id++) { final String x = STR_VALS.get(id % STR_VALS.size()); final String val = VALUES.get(id % VALUES.size()); - assertEquals(0, (new UpdateRequest().add(sdoc("id", "" + id, - "x_s", x, - FIELD, val)) - ).process(cluster.getSolrClient()).getStatus()); - + assertEquals( + 0, + (new UpdateRequest().add(sdoc("id", "" + id, "x_s", x, FIELD, val))) + .process(cluster.getSolrClient()) + .getStatus()); } assertEquals(0, cluster.getSolrClient().commit().getStatus()); } public void testSimpleRangeFacetsOfSymetricRates() throws Exception { for (boolean use_mincount : Arrays.asList(true, false)) { - + // exchange rates relative to USD... // - // for all of these permutations, the numDocs in each bucket that we get back should be the same - // (regardless of the any asymetric echanges ranges, or the currency used for the 'gap') because the - // start & end are always in USD. + // for all of these permutations, the numDocs in each bucket that we get back should be the + // same (regardless of the any asymetric echanges ranges, or the currency used for the 'gap') + // because the start & end are always in USD. // // NOTE: // - 0,1,2 are the *input* start,gap,end // - 3,4,5 are the *normalized* start,gap,end expected in the response - for (List args : Arrays.asList(// default currency is USD - Arrays.asList("4", "1.00", "11.0", - "4.00,USD", "1.00,USD", "11.00,USD"), - // explicit USD - Arrays.asList("4,USD", "1,USD", "11,USD", - "4.00,USD", "1.00,USD", "11.00,USD"), - // Gap can be in diff currency (but start/end must currently match) - Arrays.asList("4.00,USD", "000.50,GBP", "11,USD", - "4.00,USD", ".50,GBP", "11.00,USD"), - Arrays.asList("4.00,USD", "2,EUR", "11,USD", - "4.00,USD", "2.00,EUR", "11.00,USD"))) { - + for (List args : + Arrays.asList( // default currency is USD + Arrays.asList("4", "1.00", "11.0", "4.00,USD", "1.00,USD", "11.00,USD"), + // explicit USD + Arrays.asList("4,USD", "1,USD", "11,USD", "4.00,USD", "1.00,USD", "11.00,USD"), + // Gap can be in diff currency (but start/end must currently match) + Arrays.asList("4.00,USD", "000.50,GBP", "11,USD", "4.00,USD", ".50,GBP", "11.00,USD"), + Arrays.asList("4.00,USD", "2,EUR", "11,USD", "4.00,USD", "2.00,EUR", "11.00,USD"))) { + assertEquals(6, args.size()); // sanity check - + // first let's check facet.range - SolrQuery solrQuery = new SolrQuery("q", "*:*", "rows", "0", "facet", "true", "facet.range", FIELD, - "facet.mincount", (use_mincount ? "3" : "0"), - "f." + FIELD + ".facet.range.start", args.get(0), - "f." + FIELD + ".facet.range.gap", args.get(1), - "f." + FIELD + ".facet.range.end", args.get(2), - "f." + FIELD + ".facet.range.other", "all"); + SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.range", + FIELD, + "facet.mincount", + (use_mincount ? "3" : "0"), + "f." + FIELD + ".facet.range.start", + args.get(0), + "f." + FIELD + ".facet.range.gap", + args.get(1), + "f." + FIELD + ".facet.range.end", + args.get(2), + "f." + FIELD + ".facet.range.other", + "all"); QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); - + final String start = args.get(3); final String gap = args.get(4); final String end = args.get(5); - + @SuppressWarnings({"rawtypes"}) final List range_facets = rsp.getFacetRanges(); assertEquals(1, range_facets.size()); @@ -145,7 +160,7 @@ public void testSimpleRangeFacetsOfSymetricRates() throws Exception { assertEquals(3, result.getBefore()); assertEquals(3, result.getAfter()); assertEquals(9, result.getBetween()); - + @SuppressWarnings({"unchecked"}) List counts = result.getCounts(); if (use_mincount) { @@ -163,28 +178,46 @@ public void testSimpleRangeFacetsOfSymetricRates() throws Exception { assertEquals("bucket #" + i, (i == 0 || i == 3 || i == 6) ? 3 : 0, bucket.getCount()); } } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } // same basic logic, w/json.facet - solrQuery = new SolrQuery("q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:"+FIELD+", mincount:"+(use_mincount ? 3 : 0)+", " + - " start:'"+args.get(0)+"', gap:'"+args.get(1)+"', end:'"+args.get(2)+"', other:all}}"); + solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + FIELD + + ", mincount:" + + (use_mincount ? 3 : 0) + + ", " + + " start:'" + + args.get(0) + + "', gap:'" + + args.get(1) + + "', end:'" + + args.get(2) + + "', other:all}}"); rsp = cluster.getSolrClient().query(solrQuery); try { assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); - + @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); - - assertEquals("before", 3L, ((NamedList)foo.get("before")).get("count")); - assertEquals("after", 3L, ((NamedList)foo.get("after")).get("count")); - assertEquals("between", 9L, ((NamedList)foo.get("between")).get("count")); - + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + + assertEquals("before", 3L, ((NamedList) foo.get("before")).get("count")); + assertEquals("after", 3L, ((NamedList) foo.get("after")).get("count")); + assertEquals("between", 9L, ((NamedList) foo.get("between")).get("count")); + @SuppressWarnings({"unchecked", "rawtypes"}) final List buckets = (List) foo.get("buckets"); - + if (use_mincount) { assertEquals(3, buckets.size()); for (int i = 0; i < 3; i++) { @@ -199,28 +232,44 @@ public void testSimpleRangeFacetsOfSymetricRates() throws Exception { @SuppressWarnings({"rawtypes"}) NamedList bucket = buckets.get(i); assertEquals((4 + i) + ".00,USD", bucket.get("val")); - assertEquals("bucket #" + i, (i == 0 || i == 3 || i == 6) ? 3L : 0L, bucket.get("count")); + assertEquals( + "bucket #" + i, (i == 0 || i == 3 || i == 6) ? 3L : 0L, bucket.get("count")); } } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } - + public void testFacetRangeOfAsymetricRates() throws Exception { // facet.range: exchange rates relative to EUR... // // because of the asymetric echange rate, the counts for these buckets will be different // then if we just converted the EUR values to USD for (boolean use_mincount : Arrays.asList(true, false)) { - final SolrQuery solrQuery = new SolrQuery("q", "*:*", "rows", "0", "facet", "true", "facet.range", FIELD, - "facet.mincount", (use_mincount ? "3" : "0"), - "f." + FIELD + ".facet.range.start", "8,EUR", - "f." + FIELD + ".facet.range.gap", "2,EUR", - "f." + FIELD + ".facet.range.end", "22,EUR", - "f." + FIELD + ".facet.range.other", "all"); + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.range", + FIELD, + "facet.mincount", + (use_mincount ? "3" : "0"), + "f." + FIELD + ".facet.range.start", + "8,EUR", + "f." + FIELD + ".facet.range.gap", + "2,EUR", + "f." + FIELD + ".facet.range.end", + "22,EUR", + "f." + FIELD + ".facet.range.other", + "all"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); @@ -236,7 +285,7 @@ public void testFacetRangeOfAsymetricRates() throws Exception { assertEquals(6, result.getBefore()); assertEquals(3, result.getAfter()); assertEquals(6, result.getBetween()); - + @SuppressWarnings({"unchecked"}) List counts = result.getCounts(); if (use_mincount) { @@ -254,36 +303,48 @@ public void testFacetRangeOfAsymetricRates() throws Exception { assertEquals("bucket #" + i, (i == 2 || i == 3) ? 3 : 0, bucket.getCount()); } } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } - + public void testJsonFacetRangeOfAsymetricRates() throws Exception { // json.facet: exchange rates relative to EUR (same as testFacetRangeOfAsymetricRates) // // because of the asymetric echange rate, the counts for these buckets will be different // then if we just converted the EUR values to USD for (boolean use_mincount : Arrays.asList(true, false)) { - final SolrQuery solrQuery = new SolrQuery("q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:"+FIELD+", start:'8,EUR', " + - " mincount:"+(use_mincount ? 3 : 0)+", " + - " gap:'2,EUR', end:'22,EUR', other:all}}"); + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + FIELD + + ", start:'8,EUR', " + + " mincount:" + + (use_mincount ? 3 : 0) + + ", " + + " gap:'2,EUR', end:'22,EUR', other:all}}"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { assertEquals(NUM_DOCS, rsp.getResults().getNumFound()); - + @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); - - assertEquals("before", 6L, ((NamedList)foo.get("before")).get("count")); - assertEquals("after", 3L, ((NamedList)foo.get("after")).get("count")); - assertEquals("between", 6L, ((NamedList)foo.get("between")).get("count")); - + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + + assertEquals("before", 6L, ((NamedList) foo.get("before")).get("count")); + assertEquals("after", 3L, ((NamedList) foo.get("after")).get("count")); + assertEquals("between", 6L, ((NamedList) foo.get("between")).get("count")); + @SuppressWarnings({"unchecked", "rawtypes"}) final List buckets = (List) foo.get("buckets"); - + if (use_mincount) { assertEquals(2, buckets.size()); for (int i = 0; i < 2; i++) { @@ -301,24 +362,40 @@ public void testJsonFacetRangeOfAsymetricRates() throws Exception { assertEquals("bucket #" + i, (i == 2 || i == 3) ? 3L : 0L, bucket.get("count")); } } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } - + public void testFacetRangeCleanErrorOnMissmatchCurrency() { final String expected = "Cannot compare CurrencyValues when their currencies are not equal"; ignoreException(expected); - + // test to check clean error when start/end have diff currency (facet.range) - final SolrQuery solrQuery = new SolrQuery("q", "*:*", "rows", "0", "facet", "true", "facet.range", FIELD, - "f." + FIELD + ".facet.range.start", "0,EUR", - "f." + FIELD + ".facet.range.gap", "10,EUR", - "f." + FIELD + ".facet.range.end", "100,USD"); - final SolrException ex = expectThrows(SolrException.class, () -> { - final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); - }); + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "facet.range", + FIELD, + "f." + FIELD + ".facet.range.start", + "0,EUR", + "f." + FIELD + ".facet.range.gap", + "10,EUR", + "f." + FIELD + ".facet.range.end", + "100,USD"); + final SolrException ex = + expectThrows( + SolrException.class, + () -> { + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage(), ex.getMessage().contains(expected)); } @@ -326,18 +403,27 @@ public void testFacetRangeCleanErrorOnMissmatchCurrency() { public void testJsonFacetCleanErrorOnMissmatchCurrency() { final String expected = "Cannot compare CurrencyValues when their currencies are not equal"; ignoreException(expected); - + // test to check clean error when start/end have diff currency (json.facet) - final SolrQuery solrQuery = new SolrQuery("q", "*:*", "json.facet", - "{ x:{ type:range, field:"+FIELD+", " + - " start:'0,EUR', gap:'10,EUR', end:'100,USD' } }"); - final SolrException ex = expectThrows(SolrException.class, () -> { - final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); - }); + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "json.facet", + "{ x:{ type:range, field:" + + FIELD + + ", " + + " start:'0,EUR', gap:'10,EUR', end:'100,USD' } }"); + final SolrException ex = + expectThrows( + SolrException.class, + () -> { + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage(), ex.getMessage().contains(expected)); } - + @Test public void testJsonRangeFacetWithSubFacet() throws Exception { @@ -345,32 +431,46 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { // filter out the first 5 docs (by id) which should ensure that regardless of sharding: // - x2 being the top term for the 1st range bucket // - x0 being the top term for the 2nd range bucket - // - the 2nd term in each bucket may vary based on shard/doc placement, but the count will always be '1' - // ...and in many cases (based on the shard/doc placement) this will require refinement to backfill the top terms - final String filter = "id_i1:["+VALUES.size()+" TO *]"; + // - the 2nd term in each bucket may vary based on shard/doc placement, but the count will + // always be '1' + // ...and in many cases (based on the shard/doc placement) this will require refinement to + // backfill the top terms + final String filter = "id_i1:[" + VALUES.size() + " TO *]"; - // the *facet* results should be the same regardless of wether we filter via fq, or using a domain filter on the top facet + // the *facet* results should be the same regardless of wether we filter via fq, or using a + // domain filter on the top facet for (boolean use_domain : Arrays.asList(true, false)) { final String domain = use_domain ? "domain: { filter:'" + filter + "'}," : ""; // both of these options should produce same results since hardened:false is default final String end = random().nextBoolean() ? "end:'20,EUR'" : "end:'15,EUR'"; - - - final SolrQuery solrQuery = new SolrQuery("q", (use_domain ? "*:*" : filter), - "rows", "0", "json.facet", - "{ bar:{ type:range, field:"+FIELD+", " + domain + - " start:'0,EUR', gap:'10,EUR', "+end+", other:all " + - " facet: { foo:{ type:terms, field:x_s, " + - " refine:true, limit:2, overrequest:0" + - " } } } }"); + + final SolrQuery solrQuery = + new SolrQuery( + "q", + (use_domain ? "*:*" : filter), + "rows", + "0", + "json.facet", + "{ bar:{ type:range, field:" + + FIELD + + ", " + + domain + + " start:'0,EUR', gap:'10,EUR', " + + end + + ", other:all " + + " facet: { foo:{ type:terms, field:x_s, " + + " refine:true, limit:2, overrequest:0" + + " } } } }"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { - // this top level result count sanity check that should vary based on how we are filtering our facets... + // this top level result count sanity check that should vary based on how we are filtering + // our facets... assertEquals(use_domain ? 15 : 10, rsp.getResults().getNumFound()); @SuppressWarnings({"unchecked"}) - final NamedList bar = ((NamedList>)rsp.getResponse().get("facets")).get("bar"); + final NamedList bar = + ((NamedList>) rsp.getResponse().get("facets")).get("bar"); @SuppressWarnings({"unchecked"}) final List> bar_buckets = (List>) bar.get("buckets"); @SuppressWarnings({"unchecked"}) @@ -379,37 +479,42 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { final NamedList between = (NamedList) bar.get("between"); @SuppressWarnings({"unchecked"}) final NamedList after = (NamedList) bar.get("after"); - + // sanity check our high level expectations... assertEquals("bar num buckets", 2, bar_buckets.size()); assertEquals("before count", 0L, before.get("count")); assertEquals("between count", 8L, between.get("count")); assertEquals("after count", 2L, after.get("count")); - + // drill into the various buckets... - + // before should have no subfacets since it's empty... assertNull("before has foo???", before.get("foo")); - + // our 2 range buckets & their sub facets... for (int i = 0; i < 2; i++) { final NamedList bucket = bar_buckets.get(i); assertEquals((i * 10) + ".00,EUR", bucket.get("val")); assertEquals("bucket #" + i, 4L, bucket.get("count")); @SuppressWarnings({"unchecked"}) - final List> foo_buckets = ((NamedList>>)bucket.get("foo")).get("buckets"); + final List> foo_buckets = + ((NamedList>>) bucket.get("foo")).get("buckets"); assertEquals("bucket #" + i + " foo num buckets", 2, foo_buckets.size()); - assertEquals("bucket #" + i + " foo top term", (0==i ? "x2" : "x0"), foo_buckets.get(0).get("val")); + assertEquals( + "bucket #" + i + " foo top term", + (0 == i ? "x2" : "x0"), + foo_buckets.get(0).get("val")); assertEquals("bucket #" + i + " foo top count", 2L, foo_buckets.get(0).get("count")); // NOTE: we can't make any assertions about the 2nd val.. // our limit + randomized sharding could result in either remaining term being picked. // but for either term, the count should be exactly the same... assertEquals("bucket #" + i + " foo 2nd count", 1L, foo_buckets.get(1).get("count")); } - + { // between... @SuppressWarnings({"unchecked"}) - final List> buckets = ((NamedList>>)between.get("foo")).get("buckets"); + final List> buckets = + ((NamedList>>) between.get("foo")).get("buckets"); assertEquals("between num buckets", 2, buckets.size()); // the counts should both be 3, and the term order should break the tie... assertEquals("between bucket top", "x0", buckets.get(0).get("val")); @@ -417,10 +522,11 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { assertEquals("between bucket 2nd", "x2", buckets.get(1).get("val")); assertEquals("between bucket 2nd count", 3L, buckets.get(1).get("count")); } - + { // after... @SuppressWarnings({"unchecked"}) - final List> buckets = ((NamedList>>)after.get("foo")).get("buckets"); + final List> buckets = + ((NamedList>>) after.get("foo")).get("buckets"); assertEquals("after num buckets", 2, buckets.size()); // the counts should both be 1, and the term order should break the tie... assertEquals("after bucket top", "x1", buckets.get(0).get("val")); @@ -428,42 +534,56 @@ public void testJsonRangeFacetWithSubFacet() throws Exception { assertEquals("after bucket 2nd", "x2", buckets.get(1).get("val")); assertEquals("after bucket 2nd count", 1L, buckets.get(1).get("count")); } - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } - + @Test public void testJsonRangeFacetAsSubFacet() throws Exception { // limit=1, overrequest=1, with refinement enabled - // filter out the first 5 docs (by id), which should ensure that 'x2' is the top bucket overall... + // filter out the first 5 docs (by id), which should ensure that 'x2' is the top bucket + // overall... // ...except in some rare sharding cases where it doesn't make it into the top 2 terms. - // - // So the filter also explicitly accepts all 'x2' docs -- ensuring we have enough matches containing that term for it - // to be enough of a candidate in phase#1, but for many shard arrangements it won't be returned by all shards resulting - // in refinement being neccessary to get the x_s:x2 sub-shard ranges from shard(s) where x_s:x2 is only tied for the - // (shard local) top term count and would lose the (index order) tie breaker with x_s:x0 or x_s:x1 - final String filter = "id_i1:["+VALUES.size()+" TO *] OR x_s:x2"; - - // the *facet* results should be the same regardless of wether we filter via fq, or using a domain filter on the top facet + // + // So the filter also explicitly accepts all 'x2' docs -- ensuring we have enough matches + // containing that term for it to be enough of a candidate in phase#1, but for many shard + // arrangements it won't be returned by all shards resulting in refinement being neccessary to + // get the x_s:x2 sub-shard ranges from shard(s) where x_s:x2 is only tied for the (shard local) + // top term count and would lose the (index order) tie breaker with x_s:x0 or x_s:x1 + final String filter = "id_i1:[" + VALUES.size() + " TO *] OR x_s:x2"; + + // the *facet* results should be the same regardless of wether we filter via fq, or using a + // domain filter on the top facet for (boolean use_domain : Arrays.asList(true, false)) { final String domain = use_domain ? "domain: { filter:'" + filter + "'}," : ""; - final SolrQuery solrQuery = new SolrQuery("q", (use_domain ? "*:*" : filter), - "rows", "0", "json.facet", - "{ foo:{ type:terms, field:x_s, refine:true, limit:1, overrequest:1, " + domain + - " facet: { bar:{ type:range, field:"+FIELD+", other:all, " + - " start:'8,EUR', gap:'2,EUR', end:'22,EUR' }} } }"); + final SolrQuery solrQuery = + new SolrQuery( + "q", + (use_domain ? "*:*" : filter), + "rows", + "0", + "json.facet", + "{ foo:{ type:terms, field:x_s, refine:true, limit:1, overrequest:1, " + + domain + + " facet: { bar:{ type:range, field:" + + FIELD + + ", other:all, " + + " start:'8,EUR', gap:'2,EUR', end:'22,EUR' }} } }"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { - // this top level result count sanity check that should vary based on how we are filtering our facets... + // this top level result count sanity check that should vary based on how we are filtering + // our facets... assertEquals(use_domain ? 15 : 11, rsp.getResults().getNumFound()); - + @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); - + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + // sanity check... // because of the facet limit, foo should only have 1 bucket // because of the fq, the val should be "x2" and the count=5 @@ -472,16 +592,16 @@ public void testJsonRangeFacetAsSubFacet() throws Exception { assertEquals(1, foo_buckets.size()); assertEquals("x2", foo_buckets.get(0).get("val")); assertEquals("foo bucket count", 5L, foo_buckets.get(0).get("count")); - + @SuppressWarnings({"unchecked"}) - final NamedList bar = (NamedList)foo_buckets.get(0).get("bar"); - + final NamedList bar = (NamedList) foo_buckets.get(0).get("bar"); + // these are the 'x2' specific counts, based on our fq... - - assertEquals("before", 2L, ((NamedList)bar.get("before")).get("count")); - assertEquals("after", 1L, ((NamedList)bar.get("after")).get("count")); - assertEquals("between", 2L, ((NamedList)bar.get("between")).get("count")); - + + assertEquals("before", 2L, ((NamedList) bar.get("before")).get("count")); + assertEquals("after", 1L, ((NamedList) bar.get("after")).get("count")); + assertEquals("between", 2L, ((NamedList) bar.get("between")).get("count")); + @SuppressWarnings({"unchecked", "rawtypes"}) final List buckets = (List) bar.get("buckets"); assertEquals(7, buckets.size()); @@ -492,10 +612,10 @@ public void testJsonRangeFacetAsSubFacet() throws Exception { // 12,EUR & 15,EUR are the 2 values that align with x2 docs assertEquals("bucket #" + i, (i == 2 || i == 3) ? 1L : 0L, bucket.get("count")); } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } - } diff --git a/solr/core/src/test/org/apache/solr/search/CursorMarkTest.java b/solr/core/src/test/org/apache/solr/search/CursorMarkTest.java index 9ae3fe49119..951cf3cbc4a 100644 --- a/solr/core/src/test/org/apache/solr/search/CursorMarkTest.java +++ b/solr/core/src/test/org/apache/solr/search/CursorMarkTest.java @@ -16,43 +16,44 @@ */ package org.apache.solr.search; +import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; +import static org.hamcrest.core.StringContains.containsString; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.UUID; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; -import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.TestUtil; +import org.apache.solr.CursorPagingTest; +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; +import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; -import org.apache.solr.request.SolrQueryRequest; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.CursorPagingTest; -import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START; - -import java.io.IOException; -import java.util.Arrays; -import java.util.ArrayList; -import java.util.List; -import java.util.Collection; -import java.util.Collections; -import java.util.UUID; - import org.junit.BeforeClass; -import static org.hamcrest.core.StringContains.containsString; /** * Primarily a test of parsing and serialization of the CursorMark values. * - * NOTE: this class Reuses some utilities from {@link CursorPagingTest} that assume the same schema and configs. + *

NOTE: this class Reuses some utilities from {@link CursorPagingTest} that assume the same + * schema and configs. * - * @see CursorPagingTest + * @see CursorPagingTest */ public class CursorMarkTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { - System.setProperty("solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); + System.setProperty( + "solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean())); initCore(CursorPagingTest.TEST_SOLRCONFIG_NAME, CursorPagingTest.TEST_SCHEMAXML_NAME); } @@ -72,14 +73,17 @@ public void testNextCursorMark() throws IOException { assertEquals("next values not correct", nextValues, next.getSortValues()); assertEquals("next SortSpec not correct", ss, next.getSortSpec()); - SolrException e = expectThrows(SolrException.class, - "didn't fail on next with incorrect num of sortvalues", - () -> { - // append to our random sort string so we know it has wrong num clauses - final SortSpec otherSort = SortSpecParsing.parseSortSpec(randomSortString+",id asc", req); - CursorMark trash = previous.createNext(Arrays.asList - (buildRandomSortObjects(otherSort))); - }); + SolrException e = + expectThrows( + SolrException.class, + "didn't fail on next with incorrect num of sortvalues", + () -> { + // append to our random sort string so we know it has wrong num clauses + final SortSpec otherSort = + SortSpecParsing.parseSortSpec(randomSortString + ",id asc", req); + CursorMark trash = + previous.createNext(Arrays.asList(buildRandomSortObjects(otherSort))); + }); assertEquals(500, e.code()); assertThat(e.getMessage(), containsString("sort values != sort length")); } @@ -106,9 +110,9 @@ public void testInvalidUsage() { assertEquals(ErrorCode.BAD_REQUEST.code, e.code()); assertTrue(0 < e.getMessage().indexOf("uniqueKey")); } - + try { - final SortSpec ss = SortSpecParsing.parseSortSpec("_docid_ "+dir+", id desc", req); + final SortSpec ss = SortSpecParsing.parseSortSpec("_docid_ " + dir + ", id desc", req); final CursorMark totem = new CursorMark(schema, ss); fail("no failure from sort that includes _docid_: " + dir); } catch (SolrException e) { @@ -118,7 +122,6 @@ public void testInvalidUsage() { } } - public void testGarbageParsing() throws IOException { final SolrQueryRequest req = req(); final IndexSchema schema = req.getSchema(); @@ -157,7 +160,7 @@ public void testGarbageParsing() throws IOException { final SortSpec otherSort = SortSpecParsing.parseSortSpec("double desc, id asc", req); final CursorMark otherTotem = new CursorMark(schema, otherSort); otherTotem.setSortValues(Arrays.asList(buildRandomSortObjects(otherSort))); - + totem.parseSerializedTotem(otherTotem.getSerializedTotem()); fail("didn't fail on totem from incorrect sort (num clauses)"); } catch (SolrException e) { @@ -168,7 +171,7 @@ public void testGarbageParsing() throws IOException { public void testRoundTripParsing() throws IOException { - // for any valid SortSpec, and any legal values, we should be able to round + // for any valid SortSpec, and any legal values, we should be able to round // trip serialize the totem and get the same values back. final Collection allFieldNames = getAllFieldNames(); @@ -177,8 +180,8 @@ public void testRoundTripParsing() throws IOException { final int numRandomSorts = atLeast(50); final int numRandomValIters = atLeast(10); for (int i = 0; i < numRandomSorts; i++) { - final SortSpec ss = SortSpecParsing.parseSortSpec - (CursorPagingTest.buildRandomSort(allFieldNames), req); + final SortSpec ss = + SortSpecParsing.parseSortSpec(CursorPagingTest.buildRandomSort(allFieldNames), req); final CursorMark totemIn = new CursorMark(schema, ss); final CursorMark totemOut = new CursorMark(schema, ss); @@ -210,7 +213,8 @@ private static Object[] buildRandomSortObjects(SortSpec ss) throws IOException { SchemaField sf = fields.get(i); if (null == sf) { // score or function - results[i] = (Float) random().nextFloat() * random().nextInt(); break; + results[i] = (Float) random().nextFloat() * random().nextInt(); + break; } else if (0 == TestUtil.nextInt(random(), 0, 7)) { // emulate missing value for doc results[i] = null; @@ -218,7 +222,7 @@ private static Object[] buildRandomSortObjects(SortSpec ss) throws IOException { final String fieldName = sf.getName(); assertNotNull(fieldName); - // Note: In some cases we build a human readable version of the sort value and then + // Note: In some cases we build a human readable version of the sort value and then // unmarshall it into the raw, real, sort values that are expected by the FieldTypes. // In other cases we just build the raw value to begin with because it's easier @@ -254,9 +258,8 @@ private static Object[] buildRandomSortObjects(SortSpec ss) throws IOException { } else { fail("fell through the rabbit hole, new field in schema? = " + fieldName); } - - results[i] = val; + results[i] = val; } } return results; @@ -276,19 +279,15 @@ private static Object getRandomCollation(SchemaField sf) throws IOException { } return val; } - - /** - * a list of the fields in the schema - excluding _version_ - */ + + /** a list of the fields in the schema - excluding _version_ */ private Collection getAllFieldNames() { ArrayList names = new ArrayList<>(37); for (String f : h.getCore().getLatestSchema().getFields().keySet()) { - if (! f.equals("_version_")) { + if (!f.equals("_version_")) { names.add(f); } } return Collections.unmodifiableCollection(names); } - - } diff --git a/solr/core/src/test/org/apache/solr/search/DelayingSearchComponent.java b/solr/core/src/test/org/apache/solr/search/DelayingSearchComponent.java index b7e8e4ca570..cff1bc6e396 100644 --- a/solr/core/src/test/org/apache/solr/search/DelayingSearchComponent.java +++ b/solr/core/src/test/org/apache/solr/search/DelayingSearchComponent.java @@ -18,14 +18,11 @@ import java.io.IOException; import java.util.concurrent.TimeUnit; - import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.handler.component.SearchComponent; -/** - * Search component used to add delay to each request. - */ -public class DelayingSearchComponent extends SearchComponent{ +/** Search component used to add delay to each request. */ +public class DelayingSearchComponent extends SearchComponent { @Override public void prepare(ResponseBuilder rb) throws IOException { @@ -34,9 +31,10 @@ public void prepare(ResponseBuilder rb) throws IOException { @Override public void process(ResponseBuilder rb) throws IOException { - final long totalSleepMillis = rb.req.getParams().getLong("sleep",0); + final long totalSleepMillis = rb.req.getParams().getLong("sleep", 0); if (totalSleepMillis > 0) { - final long totalSleepNanos = TimeUnit.NANOSECONDS.convert(totalSleepMillis, TimeUnit.MILLISECONDS); + final long totalSleepNanos = + TimeUnit.NANOSECONDS.convert(totalSleepMillis, TimeUnit.MILLISECONDS); final long startNanos = System.nanoTime(); try { // Thread.sleep() (and derivatives) are not garunteed to sleep the full amount: @@ -48,8 +46,8 @@ public void process(ResponseBuilder rb) throws IOException { // has exceeded in order to get their expected results, we would rather over-sleep // then under sleep) for (long sleepNanos = totalSleepNanos; - 0 < sleepNanos; - sleepNanos = totalSleepNanos - (System.nanoTime() - startNanos)) { + 0 < sleepNanos; + sleepNanos = totalSleepNanos - (System.nanoTime() - startNanos)) { TimeUnit.NANOSECONDS.sleep(sleepNanos); } } catch (InterruptedException e) { @@ -62,5 +60,4 @@ public void process(ResponseBuilder rb) throws IOException { public String getDescription() { return "SearchComponent used to add delay to each request"; } - } diff --git a/solr/core/src/test/org/apache/solr/search/DocSetPerf.java b/solr/core/src/test/org/apache/solr/search/DocSetPerf.java index 28e3e0c8cdb..cf15b171232 100644 --- a/solr/core/src/test/org/apache/solr/search/DocSetPerf.java +++ b/solr/core/src/test/org/apache/solr/search/DocSetPerf.java @@ -17,20 +17,18 @@ package org.apache.solr.search; import java.util.Random; - import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.SuppressForbidden; import org.apache.solr.util.RTimer; -/** - */ +/** */ public class DocSetPerf { // use test instead of assert since asserts may be turned off public static void test(boolean condition) { - if (!condition) { - throw new RuntimeException("test requestHandler: assertion failed!"); - } + if (!condition) { + throw new RuntimeException("test requestHandler: assertion failed!"); + } } static FixedBitSet bs; @@ -38,7 +36,7 @@ public static void test(boolean condition) { static int[] ids; // not unique static Random rand = getRandom(); - + @SuppressForbidden(reason = "No testcase, use of java.util.Random allowed") private static Random getRandom() { return new Random(); @@ -47,26 +45,26 @@ private static Random getRandom() { static void generate(int maxSize, int bitsToSet) { bs = new FixedBitSet(maxSize); ids = new int[bitsToSet]; - int count=0; - if (maxSize>0) { - for (int i=0; i 0) { + for (int i = 0; i < bitsToSet; i++) { + int id = rand.nextInt(maxSize); if (!bs.get(id)) { bs.set(id); - ids[count++]=id; + ids[count++] = id; } } } - bds = new BitDocSet(bs,bitsToSet); + bds = new BitDocSet(bs, bitsToSet); } public static void main(String[] args) { - String bsSize=args[0]; - boolean randSize=false; + String bsSize = args[0]; + boolean randSize = false; if (bsSize.endsWith("-")) { - bsSize=bsSize.substring(0,bsSize.length()-1); - randSize=true; + bsSize = bsSize.substring(0, bsSize.length() - 1); + randSize = true; } int bitSetSize = Integer.parseInt(bsSize); @@ -75,12 +73,12 @@ public static void main(String[] args) { String test = args[3].intern(); int iter = Integer.parseInt(args[4]); - long ret=0; + long ret = 0; FixedBitSet[] sets = new FixedBitSet[numSets]; DocSet[] bset = new DocSet[numSets]; - for (int i=0; i qParsersTested = new HashSet<>(); - /** @see #testParserCoverage */ + /** + * @see #testParserCoverage + */ private static final Set valParsersTested = new HashSet<>(); - public void testDateMathParsingEquality() throws Exception { // regardless of parser, these should all be equivalent queries - assertQueryEquals - (null - ,"{!lucene}f_tdt:2013-09-11T00\\:00\\:00Z" - ,"{!lucene}f_tdt:2013-03-08T00\\:46\\:15Z/DAY+6MONTHS+3DAYS" - ,"{!lucene}f_tdt:\"2013-03-08T00:46:15Z/DAY+6MONTHS+3DAYS\"" - ,"{!field f=f_tdt}2013-03-08T00:46:15Z/DAY+6MONTHS+3DAYS" - ,"{!field f=f_tdt}2013-09-11T00:00:00Z" - ,"{!term f=f_tdt}2013-03-08T00:46:15Z/DAY+6MONTHS+3DAYS" - ,"{!term f=f_tdt}2013-09-11T00:00:00Z" - ); - + assertQueryEquals( + null, + "{!lucene}f_tdt:2013-09-11T00\\:00\\:00Z", + "{!lucene}f_tdt:2013-03-08T00\\:46\\:15Z/DAY+6MONTHS+3DAYS", + "{!lucene}f_tdt:\"2013-03-08T00:46:15Z/DAY+6MONTHS+3DAYS\"", + "{!field f=f_tdt}2013-03-08T00:46:15Z/DAY+6MONTHS+3DAYS", + "{!field f=f_tdt}2013-09-11T00:00:00Z", + "{!term f=f_tdt}2013-03-08T00:46:15Z/DAY+6MONTHS+3DAYS", + "{!term f=f_tdt}2013-09-11T00:00:00Z"); } + public void testQueryLucene() throws Exception { - assertQueryEquals("lucene", "{!lucene}apache solr", - "apache solr", "apache solr "); - assertQueryEquals("lucene", "+apache +solr", "apache AND solr", - " +apache +solr"); + assertQueryEquals( + "lucene", "{!lucene}apache solr", + "apache solr", "apache solr "); + assertQueryEquals("lucene", "+apache +solr", "apache AND solr", " +apache +solr"); } public void testQueryLuceneAllDocsWithField() throws Exception { - // for all "primative" types except for doubles/floats, 'foo:*' should be functionally equivilent to "foo:[* TO *]" - // whatever implementation/optimizations exist for one syntax, should exist for the other syntax as well - // (regardless of docValues, multivalued, etc...) - for (String field : Arrays.asList("foo_sI", "foo_sS", "foo_s1", "foo_s", - "t_foo", "tv_foo", "tv_mv_foo", - "foo_b", "foo_b_dvo", - "foo_i", "foo_is", "foo_i_dvo", - "foo_l", "foo_l_dvo", - "foo_dt", "foo_dt_dvo")) { + // for all "primative" types except for doubles/floats, 'foo:*' should be functionally + // equivilent to "foo:[* TO *]" whatever implementation/optimizations exist for one syntax, + // should exist for the other syntax as well (regardless of docValues, multivalued, etc...) + for (String field : + Arrays.asList( + "foo_sI", + "foo_sS", + "foo_s1", + "foo_s", + "t_foo", + "tv_foo", + "tv_mv_foo", + "foo_b", + "foo_b_dvo", + "foo_i", + "foo_is", + "foo_i_dvo", + "foo_l", + "foo_l_dvo", + "foo_dt", + "foo_dt_dvo")) { assertQueryEquals("lucene", field + ":*", field + ":[* TO *]"); } } public void testQueryPrefix() throws Exception { - SolrQueryRequest req = req("myField","foo_s"); + SolrQueryRequest req = req("myField", "foo_s"); try { - assertQueryEquals("prefix", req, - "{!prefix f=$myField}asdf", - "{!prefix f=foo_s}asdf"); + assertQueryEquals("prefix", req, "{!prefix f=$myField}asdf", "{!prefix f=foo_s}asdf"); } finally { req.close(); } } public void testQueryBoost() throws Exception { - SolrQueryRequest req = req("df","foo_s","myBoost","sum(3,foo_i)"); + SolrQueryRequest req = req("df", "foo_s", "myBoost", "sum(3,foo_i)"); try { - assertQueryEquals("boost", req, - "{!boost b=$myBoost}asdf", - "{!boost b=$myBoost v=asdf}", - "{!boost b=sum(3,foo_i)}foo_s:asdf"); + assertQueryEquals( + "boost", + req, + "{!boost b=$myBoost}asdf", + "{!boost b=$myBoost v=asdf}", + "{!boost b=sum(3,foo_i)}foo_s:asdf"); } finally { req.close(); } @@ -135,32 +152,77 @@ public void testQueryBoost() throws Exception { public void testReRankQuery() throws Exception { final String defType = ReRankQParserPlugin.NAME; - SolrQueryRequest req = req("q", "*:*", - "rqq", "{!edismax}hello", - "rdocs", "20", - "rweight", "2", - "rows", "10", - "start", "0"); + SolrQueryRequest req = + req( + "q", "*:*", + "rqq", "{!edismax}hello", + "rdocs", "20", + "rweight", "2", + "rows", "10", + "start", "0"); try { - assertQueryEquals(defType, req, - "{!"+defType+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=$rdocs "+ReRankQParserPlugin.RERANK_WEIGHT+"=$rweight}", - "{!"+defType+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=20 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + assertQueryEquals( + defType, + req, + "{!" + + defType + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=$rdocs " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=$rweight}", + "{!" + + defType + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=20 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); } finally { req.close(); } - - req = req("qq", "*:*", - "rqq", "{!edismax}hello", - "rdocs", "20", - "rweight", "2", - "rows", "100", - "start", "50"); + req = + req( + "qq", + "*:*", + "rqq", + "{!edismax}hello", + "rdocs", + "20", + "rweight", + "2", + "rows", + "100", + "start", + "50"); try { - assertQueryEquals(defType, req, - "{!"+defType+" mainQuery=$qq "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=$rdocs "+ReRankQParserPlugin.RERANK_WEIGHT+"=$rweight}", - "{!"+defType+" mainQuery=$qq "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=20 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + assertQueryEquals( + defType, + req, + "{!" + + defType + + " mainQuery=$qq " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=$rdocs " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=$rweight}", + "{!" + + defType + + " mainQuery=$qq " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=20 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); } finally { req.close(); @@ -186,7 +248,24 @@ public void testGraphTermsQuery() throws Exception { } public void testTlogitQuery() throws Exception { - SolrQueryRequest req = req("q", "*:*", "feature", "f", "terms","a,b,c", "weights", "100,200,300", "idfs","1,5,7","iteration","1", "outcome","a","positiveLabel","1"); + SolrQueryRequest req = + req( + "q", + "*:*", + "feature", + "f", + "terms", + "a,b,c", + "weights", + "100,200,300", + "idfs", + "1,5,7", + "iteration", + "1", + "outcome", + "a", + "positiveLabel", + "1"); try { assertQueryEquals("tlogit", req, "{!tlogit}"); } finally { @@ -195,7 +274,8 @@ public void testTlogitQuery() throws Exception { } public void testIGainQuery() throws Exception { - SolrQueryRequest req = req("q", "*:*", "outcome", "b", "positiveLabel", "1", "field", "x", "numTerms","200"); + SolrQueryRequest req = + req("q", "*:*", "outcome", "b", "positiveLabel", "1", "field", "x", "numTerms", "200"); try { assertQueryEquals("igain", req, "{!igain}"); } finally { @@ -206,33 +286,36 @@ public void testIGainQuery() throws Exception { public void testSignificantTermsQuery() throws Exception { SolrQueryRequest req = req("q", "*:*"); try { - assertQueryEquals(SignificantTermsQParserPlugin.NAME, - req, "{!"+SignificantTermsQParserPlugin.NAME+"}"); + assertQueryEquals( + SignificantTermsQParserPlugin.NAME, req, "{!" + SignificantTermsQParserPlugin.NAME + "}"); } finally { req.close(); } } public void testQuerySwitch() throws Exception { - SolrQueryRequest req = req("myXXX", "XXX", - "myField", "foo_s", - "myQ", "{!prefix f=$myField}asdf"); + SolrQueryRequest req = + req( + "myXXX", "XXX", + "myField", "foo_s", + "myQ", "{!prefix f=$myField}asdf"); try { - assertQueryEquals("switch", req, - "{!switch case.foo=XXX case.bar=zzz case.yak=qqq}foo", - "{!switch case.foo=qqq case.bar=XXX case.yak=zzz} bar ", - "{!switch case.foo=qqq case.bar=XXX case.yak=zzz v=' bar '}", - "{!switch default=XXX case.foo=qqq case.bar=zzz}asdf", - "{!switch default=$myXXX case.foo=qqq case.bar=zzz}asdf", - "{!switch case=XXX case.bar=zzz case.yak=qqq v=''}", - "{!switch case.bar=zzz case=XXX case.yak=qqq v=''}", - "{!switch case=XXX case.bar=zzz case.yak=qqq}", - "{!switch case=XXX case.bar=zzz case.yak=qqq} ", - "{!switch case=$myXXX case.bar=zzz case.yak=qqq} "); - - assertQueryEquals("switch", req, - "{!switch case.foo=$myQ case.bar=zzz case.yak=qqq}foo", - "{!query v=$myQ}"); + assertQueryEquals( + "switch", + req, + "{!switch case.foo=XXX case.bar=zzz case.yak=qqq}foo", + "{!switch case.foo=qqq case.bar=XXX case.yak=zzz} bar ", + "{!switch case.foo=qqq case.bar=XXX case.yak=zzz v=' bar '}", + "{!switch default=XXX case.foo=qqq case.bar=zzz}asdf", + "{!switch default=$myXXX case.foo=qqq case.bar=zzz}asdf", + "{!switch case=XXX case.bar=zzz case.yak=qqq v=''}", + "{!switch case.bar=zzz case=XXX case.yak=qqq v=''}", + "{!switch case=XXX case.bar=zzz case.yak=qqq}", + "{!switch case=XXX case.bar=zzz case.yak=qqq} ", + "{!switch case=$myXXX case.bar=zzz case.yak=qqq} "); + + assertQueryEquals( + "switch", req, "{!switch case.foo=$myQ case.bar=zzz case.yak=qqq}foo", "{!query v=$myQ}"); } finally { req.close(); } @@ -240,51 +323,54 @@ public void testQuerySwitch() throws Exception { public void testMatchAllDocsQueryXmlParser() throws Exception { final String type = "xmlparser"; - assertQueryEquals(type, - "{!"+type+"}", - "", - ""); + assertQueryEquals( + type, + "{!" + type + "}", + "", + ""); } public void testQueryDismax() throws Exception { - for (final String type : new String[]{"dismax","edismax"}) { - assertQueryEquals(type, "{!"+type+"}apache solr", - "apache solr", "apache solr", "apache solr "); - assertQueryEquals(type, "+apache +solr", "apache AND solr", - " +apache +solr"); + for (final String type : new String[] {"dismax", "edismax"}) { + assertQueryEquals( + type, "{!" + type + "}apache solr", "apache solr", "apache solr", "apache solr "); + assertQueryEquals(type, "+apache +solr", "apache AND solr", " +apache +solr"); } } + public void testField() throws Exception { - SolrQueryRequest req = req("myField","foo_s"); + SolrQueryRequest req = req("myField", "foo_s"); try { - assertQueryEquals("field", req, - "{!field f=$myField}asdf", - "{!field f=$myField v=asdf}", - "{!field f=foo_s}asdf"); + assertQueryEquals( + "field", + req, + "{!field f=$myField}asdf", + "{!field f=$myField v=asdf}", + "{!field f=foo_s}asdf"); } finally { req.close(); } } public void testQueryRaw() throws Exception { - SolrQueryRequest req = req("myField","foo_s"); + SolrQueryRequest req = req("myField", "foo_s"); try { - assertQueryEquals("raw", req, - "{!raw f=$myField}asdf", - "{!raw f=$myField v=asdf}", - "{!raw f=foo_s}asdf"); + assertQueryEquals( + "raw", req, "{!raw f=$myField}asdf", "{!raw f=$myField v=asdf}", "{!raw f=foo_s}asdf"); } finally { req.close(); } } public void testQueryTerm() throws Exception { - SolrQueryRequest req = req("myField","foo_s"); + SolrQueryRequest req = req("myField", "foo_s"); try { - assertQueryEquals("term", req, - "{!term f=$myField}asdf", - "{!term f=$myField v=asdf}", - "{!term f=foo_s}asdf"); + assertQueryEquals( + "term", + req, + "{!term f=$myField}asdf", + "{!term f=$myField v=asdf}", + "{!term f=foo_s}asdf"); } finally { req.close(); } @@ -292,29 +378,32 @@ public void testQueryTerm() throws Exception { @SuppressWarnings({"unchecked"}) public void testQueryCollapse() throws Exception { - SolrQueryRequest req = req("myField","foo_s1", - "g_sort","foo_s1 asc, foo_i desc"); + SolrQueryRequest req = + req( + "myField", "foo_s1", + "g_sort", "foo_s1 asc, foo_i desc"); try { - assertQueryEquals("collapse", req, - "{!collapse field=$myField}"); + assertQueryEquals("collapse", req, "{!collapse field=$myField}"); - assertQueryEquals("collapse", req, - "{!collapse field=$myField max=a}"); + assertQueryEquals("collapse", req, "{!collapse field=$myField max=a}"); - assertQueryEquals("collapse", req, - "{!collapse field=$myField min=a}", - "{!collapse field=$myField min=a nullPolicy=ignore}"); + assertQueryEquals( + "collapse", + req, + "{!collapse field=$myField min=a}", + "{!collapse field=$myField min=a nullPolicy=ignore}"); - assertQueryEquals("collapse", req, - "{!collapse field=$myField sort=$g_sort}", - "{!collapse field=$myField sort='foo_s1 asc, foo_i desc'}", - "{!collapse field=$myField sort=$g_sort nullPolicy=ignore}"); + assertQueryEquals( + "collapse", + req, + "{!collapse field=$myField sort=$g_sort}", + "{!collapse field=$myField sort='foo_s1 asc, foo_i desc'}", + "{!collapse field=$myField sort=$g_sort nullPolicy=ignore}"); - assertQueryEquals("collapse", req, - "{!collapse field=$myField max=a nullPolicy=expand}"); + assertQueryEquals("collapse", req, "{!collapse field=$myField max=a nullPolicy=expand}"); - //Add boosted documents to the request context. + // Add boosted documents to the request context. @SuppressWarnings({"rawtypes"}) Map context = req.getContext(); @SuppressWarnings({"rawtypes"}) @@ -323,23 +412,22 @@ public void testQueryCollapse() throws Exception { boosted.add("doc2"); context.put("BOOSTED", boosted); - assertQueryEquals("collapse", req, + assertQueryEquals( + "collapse", + req, "{!collapse field=$myField min=a}", "{!collapse field=$myField min=a nullPolicy=ignore}"); - } finally { req.close(); } } - public void testHash() throws Exception { - SolrQueryRequest req = req("partitionKeys","foo_s"); + SolrQueryRequest req = req("partitionKeys", "foo_s"); try { - assertQueryEquals("hash", req, - "{!hash workers=3 worker=0}"); + assertQueryEquals("hash", req, "{!hash workers=3 worker=0}"); } finally { req.close(); @@ -347,24 +435,27 @@ public void testHash() throws Exception { } public void testMinHash() throws Exception { - SolrQueryRequest req = req("q","apache lucene is a search library", - "df", "min_hash_analyzed"); + SolrQueryRequest req = req("q", "apache lucene is a search library", "df", "min_hash_analyzed"); try { - assertQueryEquals("min_hash", req, + assertQueryEquals( + "min_hash", + req, "{!min_hash field=\"min_hash_analysed\"}apache lucene is a search library"); } finally { req.close(); } } - + public void testRankQuery() throws Exception { SolrQueryRequest req = req("df", "foo_s"); try { - assertQueryEquals("rank", req, - "{!rank f='rank_1'}", - "{!rank f='rank_1' function='satu'}", - "{!rank f='rank_1' function='satu' weight=1}"); + assertQueryEquals( + "rank", + req, + "{!rank f='rank_1'}", + "{!rank f='rank_1' function='satu'}", + "{!rank f='rank_1' function='satu' weight=1}"); } finally { req.close(); } @@ -373,11 +464,13 @@ public void testRankQuery() throws Exception { public void testQueryNested() throws Exception { SolrQueryRequest req = req("df", "foo_s"); try { - assertQueryEquals("query", req, - "{!query defType=lucene}asdf", - "{!query v='foo_s:asdf'}", - "{!query}foo_s:asdf", - "{!query}asdf"); + assertQueryEquals( + "query", + req, + "{!query defType=lucene}asdf", + "{!query v='foo_s:asdf'}", + "{!query}foo_s:asdf", + "{!query}asdf"); } finally { req.close(); } @@ -385,40 +478,43 @@ public void testQueryNested() throws Exception { public void testQueryFunc() throws Exception { // more involved tests of specific functions in other methods - SolrQueryRequest req = req("myVar", "5", - "myField","foo_i", - "myInner","product(4,foo_i)"); + SolrQueryRequest req = + req( + "myVar", "5", + "myField", "foo_i", + "myInner", "product(4,foo_i)"); try { - assertQueryEquals("func", req, - "{!func}sum(4,5)", - "{!func}sum(4,$myVar)", - "sum(4,5)"); - assertQueryEquals("func", req, - "{!func}sum(1,2,3,4,5)", - "{!func}sum(1,2,3,4,$myVar)", - "sum(1,2,3,4,5)"); - assertQueryEquals("func", req, - "{!func}sum(4,$myInner)", - "{!func}sum(4,product(4,foo_i))", - "{!func}sum(4,product(4,$myField))", - "{!func}sum(4,product(4,field(foo_i)))"); + assertQueryEquals("func", req, "{!func}sum(4,5)", "{!func}sum(4,$myVar)", "sum(4,5)"); + assertQueryEquals( + "func", req, "{!func}sum(1,2,3,4,5)", "{!func}sum(1,2,3,4,$myVar)", "sum(1,2,3,4,5)"); + assertQueryEquals( + "func", + req, + "{!func}sum(4,$myInner)", + "{!func}sum(4,product(4,foo_i))", + "{!func}sum(4,product(4,$myField))", + "{!func}sum(4,product(4,field(foo_i)))"); } finally { req.close(); } } public void testQueryFrange() throws Exception { - SolrQueryRequest req = req("myVar", "5", - "low","0.2", - "high", "20.4", - "myField","foo_i", - "myInner","product(4,foo_i)"); + SolrQueryRequest req = + req( + "myVar", "5", + "low", "0.2", + "high", "20.4", + "myField", "foo_i", + "myInner", "product(4,foo_i)"); try { // NOTE: unlike most queries, frange defaultsto cost==100 - assertQueryEquals("frange", req, - "{!frange l=0.2 h=20.4}sum(4,5)", - "{!frange l=0.2 h=20.4 cost=100}sum(4,5)", - "{!frange l=$low h=$high}sum(4,$myVar)"); + assertQueryEquals( + "frange", + req, + "{!frange l=0.2 h=20.4}sum(4,5)", + "{!frange l=0.2 h=20.4 cost=100}sum(4,5)", + "{!frange l=$low h=$high}sum(4,$myVar)"); } finally { req.close(); } @@ -427,73 +523,87 @@ public void testQueryFrange() throws Exception { public void testQueryGeofilt() throws Exception { checkQuerySpatial("geofilt"); } + public void testQueryBbox() throws Exception { checkQuerySpatial("bbox"); } public void testLocalParamsWithRepeatingParam() throws Exception { - SolrQueryRequest req = req("q", "foo", - "bq", "111", - "bq", "222"); + SolrQueryRequest req = + req( + "q", "foo", + "bq", "111", + "bq", "222"); try { - assertQueryEquals("dismax", - req, - "{!dismax}foo", - "{!dismax bq=111 bq=222}foo", - "{!dismax bq=222 bq=111}foo"); + assertQueryEquals( + "dismax", + req, + "{!dismax}foo", + "{!dismax bq=111 bq=222}foo", + "{!dismax bq=222 bq=111}foo"); } finally { req.close(); } } private void checkQuerySpatial(final String type) throws Exception { - SolrQueryRequest req = req("myVar", "5", - "d","109", - "pt","10.312,-20.556", - "sfield","store"); + SolrQueryRequest req = + req( + "myVar", "5", + "d", "109", + "pt", "10.312,-20.556", + "sfield", "store"); try { - assertQueryEquals(type, req, - "{!"+type+" d=109}", - "{!"+type+" sfield=$sfield}", - "{!"+type+" sfield=store d=109}", - "{!"+type+" sfield=store d=$d pt=$pt}", - "{!"+type+" sfield=store d=$d pt=10.312,-20.556}", - "{!"+type+"}"); + assertQueryEquals( + type, + req, + "{!" + type + " d=109}", + "{!" + type + " sfield=$sfield}", + "{!" + type + " sfield=store d=109}", + "{!" + type + " sfield=store d=$d pt=$pt}", + "{!" + type + " sfield=store d=$d pt=10.312,-20.556}", + "{!" + type + "}"); // diff SpatialQueryable FieldTypes matter for determining final query - assertQueryEquals(type, req, - "{!"+type+" sfield=xy}", - "{!"+type+" sfield=xy d=109}", - "{!"+type+" sfield=xy d=$d pt=$pt}", - "{!"+type+" sfield=xy d=$d pt=10.312,-20.556}"); + assertQueryEquals( + type, + req, + "{!" + type + " sfield=xy}", + "{!" + type + " sfield=xy d=109}", + "{!" + type + " sfield=xy d=$d pt=$pt}", + "{!" + type + " sfield=xy d=$d pt=10.312,-20.556}"); } finally { req.close(); } } + public void testQueryJoin() throws Exception { - SolrQueryRequest req = req("myVar", "5", - "df","text", - "ff","foo_s", - "tt", "bar_s"); + SolrQueryRequest req = + req( + "myVar", "5", + "df", "text", + "ff", "foo_s", + "tt", "bar_s"); try { - assertQueryEquals("join", req, - "{!join from=foo_s to=bar_s}asdf", - "{!join from=$ff to=$tt}asdf", - "{!join from=$ff to='bar_s'}text:asdf"); + assertQueryEquals( + "join", + req, + "{!join from=foo_s to=bar_s}asdf", + "{!join from=$ff to=$tt}asdf", + "{!join from=$ff to='bar_s'}text:asdf"); } finally { req.close(); } } public void testQueryScoreJoin() throws Exception { - SolrQueryRequest req = req("myVar", "5", - "df", "text", - "ff", "foo_s", - "tt", "bar_s", - "scoreavg","avg"); + SolrQueryRequest req = + req("myVar", "5", "df", "text", "ff", "foo_s", "tt", "bar_s", "scoreavg", "avg"); try { - assertQueryEquals("join", req, + assertQueryEquals( + "join", + req, "{!join from=foo_s to=bar_s score=avg}asdf", "{!join from=$ff to=$tt score=Avg}asdf", "{!join from=$ff to='bar_s' score=$scoreavg}text:asdf"); @@ -503,172 +613,221 @@ public void testQueryScoreJoin() throws Exception { } public void testTerms() throws Exception { - assertQueryEquals("terms", "{!terms f=foo_i}10,20,30,-10,-20,-30", "{!terms f=foo_i}10,20,30,-10,-20,-30"); + assertQueryEquals( + "terms", "{!terms f=foo_i}10,20,30,-10,-20,-30", "{!terms f=foo_i}10,20,30,-10,-20,-30"); } public void testBlockJoin() throws Exception { - assertQueryEquals("parent", "{!parent which=foo_s:parent}dude", - "{!parent which=foo_s:parent}dude"); - assertQueryEquals("child", "{!child of=foo_s:parent}dude", - "{!child of=foo_s:parent}dude"); + assertQueryEquals( + "parent", "{!parent which=foo_s:parent}dude", "{!parent which=foo_s:parent}dude"); + assertQueryEquals("child", "{!child of=foo_s:parent}dude", "{!child of=foo_s:parent}dude"); // zero query case - assertQueryEquals(null, "{!parent which=foo_s:parent}", - "{!parent which=foo_s:parent}"); - assertQueryEquals(null, "{!child of=foo_s:parent}", - "{!child of=foo_s:parent}"); - assertQueryEquals(null, "{!parent which='+*:* -foo_s:parent'}", - "{!child of=foo_s:parent}"); - - try (SolrQueryRequest req = req("fq","bar_s:baz","fq","{!tag=fqban}bar_s:ban", - "ffq","bar_s:baz","ffq","{!tag=ffqban}bar_s:ban")) { - assertQueryEquals("filters", req, - "{!parent which=foo_s:parent param=$fq}foo_s:bar", - "{!parent which=foo_s:parent param=$ffq}foo_s:bar" // differently named params - ); - assertQueryEquals("filters", req, - "{!parent which=foo_s:parent param=$fq excludeTags=fqban}foo_s:bar", - "{!parent which=foo_s:parent param=$ffq excludeTags=ffqban}foo_s:bar" // differently named params - ); - - QueryUtils.checkUnequal(// parent filter is not an equal to child - QParser.getParser("{!child of=foo_s:parent}", req).getQuery(), - QParser.getParser("{!parent which=foo_s:parent}", req).getQuery()); + assertQueryEquals(null, "{!parent which=foo_s:parent}", "{!parent which=foo_s:parent}"); + assertQueryEquals(null, "{!child of=foo_s:parent}", "{!child of=foo_s:parent}"); + assertQueryEquals(null, "{!parent which='+*:* -foo_s:parent'}", "{!child of=foo_s:parent}"); + + try (SolrQueryRequest req = + req( + "fq", + "bar_s:baz", + "fq", + "{!tag=fqban}bar_s:ban", + "ffq", + "bar_s:baz", + "ffq", + "{!tag=ffqban}bar_s:ban")) { + assertQueryEquals( + "filters", + req, + "{!parent which=foo_s:parent param=$fq}foo_s:bar", + "{!parent which=foo_s:parent param=$ffq}foo_s:bar" // differently named params + ); + assertQueryEquals( + "filters", + req, + "{!parent which=foo_s:parent param=$fq excludeTags=fqban}foo_s:bar", + "{!parent which=foo_s:parent param=$ffq excludeTags=ffqban}foo_s:bar" // differently named + // params + ); + + QueryUtils.checkUnequal( // parent filter is not an equal to child + QParser.getParser("{!child of=foo_s:parent}", req).getQuery(), + QParser.getParser("{!parent which=foo_s:parent}", req).getQuery()); } // sanity check multiple ways of specifing _nest_path_ prefixes final String parent_path = "/aa/bb"; - try (SolrQueryRequest req = req("parent_filt", "(*:* -{!prefix f='_nest_path_' v='"+parent_path+"/'})", - "child_q", "(+foo +{!prefix f='_nest_path_' v='"+parent_path+"/'})", - "parent_q", "(+bar +{!field f='_nest_path_' v='"+parent_path+"'})")) { - - assertQueryEquals("parent", req, - - // using local params to refer to other query params using 'prefix' parser... - "{!parent which=$parent_filt v=$child_q}", - - // using 'inline' prefix query syntax... - // - // '/' has to be escaped other wise it will be treated as a regex query... - // ...and when used inside the 'which' param it has to be escaped *AGAIN* because of - // the "quoted" localparam evaluation layer... - // (and of course '\' escaping is the java syntax as well, we have to double it) - "{!parent which='*:* -_nest_path_:"+(parent_path + "/").replace("/","\\\\/") +"*'}" - + "(+foo +_nest_path_:" + (parent_path + "/").replace("/", "\\/") + "*)"); - - assertQueryEquals("child", req, - - // using local params to refer to other query params using 'prefix' parser... - "{!child of=$parent_filt v=$parent_q}", - - // using 'inline' prefix query syntax... - // - // '/' has to be escaped other wise it will be treated as a regex query... - // ...and when used inside the 'which' param it has to be escaped *AGAIN* because of - // the "quoted" localparam evaluation layer... - // (and of course '\' escaping is the java syntax as well, we have to double it) - "{!child of='*:* -_nest_path_:"+(parent_path + "/").replace("/","\\\\/") +"*'}" - + "(+bar +_nest_path_:" + parent_path.replace("/", "\\/") + ")"); - + try (SolrQueryRequest req = + req( + "parent_filt", "(*:* -{!prefix f='_nest_path_' v='" + parent_path + "/'})", + "child_q", "(+foo +{!prefix f='_nest_path_' v='" + parent_path + "/'})", + "parent_q", "(+bar +{!field f='_nest_path_' v='" + parent_path + "'})")) { + + assertQueryEquals( + "parent", + req, + + // using local params to refer to other query params using 'prefix' parser... + "{!parent which=$parent_filt v=$child_q}", + + // using 'inline' prefix query syntax... + // + // '/' has to be escaped other wise it will be treated as a regex query... + // ...and when used inside the 'which' param it has to be escaped *AGAIN* because of + // the "quoted" localparam evaluation layer... + // (and of course '\' escaping is the java syntax as well, we have to double it) + "{!parent which='*:* -_nest_path_:" + + (parent_path + "/").replace("/", "\\\\/") + + "*'}" + + "(+foo +_nest_path_:" + + (parent_path + "/").replace("/", "\\/") + + "*)"); + + assertQueryEquals( + "child", + req, + + // using local params to refer to other query params using 'prefix' parser... + "{!child of=$parent_filt v=$parent_q}", + + // using 'inline' prefix query syntax... + // + // '/' has to be escaped other wise it will be treated as a regex query... + // ...and when used inside the 'which' param it has to be escaped *AGAIN* because of + // the "quoted" localparam evaluation layer... + // (and of course '\' escaping is the java syntax as well, we have to double it) + "{!child of='*:* -_nest_path_:" + + (parent_path + "/").replace("/", "\\\\/") + + "*'}" + + "(+bar +_nest_path_:" + + parent_path.replace("/", "\\/") + + ")"); } } public void testFilters() throws Exception { - final SolrQueryRequest req = req( - "fq","bar_s:baz","fq","{!tag=fqban}bar_s:ban", - "ffq","{!tag=ffqbaz}bar_s:baz","ffq","{!tag=ffqban}bar_s:ban"); + final SolrQueryRequest req = + req( + "fq", + "bar_s:baz", + "fq", + "{!tag=fqban}bar_s:ban", + "ffq", + "{!tag=ffqbaz}bar_s:baz", + "ffq", + "{!tag=ffqban}bar_s:ban"); try { - assertQueryEquals("filters", req, - "{!filters param=$fq}foo_s:bar", - "{!filters param=$fq}foo_s:bar", - "{!filters param=$ffq}foo_s:bar" // differently named params - ); - assertQueryEquals("filters", req, - "{!filters param=$fq excludeTags=fqban}foo_s:bar", - "{!filters param=$ffq excludeTags=ffqban}foo_s:bar" - ); - assertQueryEquals("filters", req, - "{!filters excludeTags=top}{!tag=top v='foo_s:bar'}", - "{!filters param=$ffq excludeTags='ffqban,ffqbaz'}" - ); - QueryUtils.checkUnequal( - QParser.getParser("{!filters param=$fq}foo_s:bar", req).getQuery(), - QParser.getParser("{!filters param=$fq excludeTags=fqban}foo_s:bar", req).getQuery()); + assertQueryEquals( + "filters", + req, + "{!filters param=$fq}foo_s:bar", + "{!filters param=$fq}foo_s:bar", + "{!filters param=$ffq}foo_s:bar" // differently named params + ); + assertQueryEquals( + "filters", + req, + "{!filters param=$fq excludeTags=fqban}foo_s:bar", + "{!filters param=$ffq excludeTags=ffqban}foo_s:bar"); + assertQueryEquals( + "filters", + req, + "{!filters excludeTags=top}{!tag=top v='foo_s:bar'}", + "{!filters param=$ffq excludeTags='ffqban,ffqbaz'}"); + QueryUtils.checkUnequal( + QParser.getParser("{!filters param=$fq}foo_s:bar", req).getQuery(), + QParser.getParser("{!filters param=$fq excludeTags=fqban}foo_s:bar", req).getQuery()); } finally { req.close(); } } - public void testGraphQuery() throws Exception { - SolrQueryRequest req = req("from", "node_s", - "to","edge_s", - "traversalFilter","foo", - "returnOnlyLeaf","true", - "returnRoot","false", - "maxDepth","2", - "useAutn","false" - ); + SolrQueryRequest req = + req( + "from", + "node_s", + "to", + "edge_s", + "traversalFilter", + "foo", + "returnOnlyLeaf", + "true", + "returnRoot", + "false", + "maxDepth", + "2", + "useAutn", + "false"); // make sure all param subsitution works for all args to graph query. - assertQueryEquals("graph", req, - "{!graph from=node_s to=edge_s}*:*", - "{!graph from=$from to=$to}*:*"); + assertQueryEquals( + "graph", req, "{!graph from=node_s to=edge_s}*:*", "{!graph from=$from to=$to}*:*"); - assertQueryEquals("graph", req, + assertQueryEquals( + "graph", + req, "{!graph from=node_s to=edge_s traversalFilter=foo}*:*", "{!graph from=$from to=$to traversalFilter=$traversalFilter}*:*"); - assertQueryEquals("graph", req, + assertQueryEquals( + "graph", + req, "{!graph from=node_s to=edge_s traversalFilter=foo returnOnlyLeaf=true}*:*", "{!graph from=$from to=$to traversalFilter=$traversalFilter returnOnlyLeaf=$returnOnlyLeaf}*:*"); - assertQueryEquals("graph", req, + assertQueryEquals( + "graph", + req, "{!graph from=node_s to=edge_s traversalFilter=foo returnOnlyLeaf=true returnRoot=false}*:*", "{!graph from=$from to=$to traversalFilter=$traversalFilter returnOnlyLeaf=$returnOnlyLeaf returnRoot=$returnRoot}*:*"); - assertQueryEquals("graph", req, + assertQueryEquals( + "graph", + req, "{!graph from=node_s to=edge_s traversalFilter=foo returnOnlyLeaf=true returnRoot=false maxDepth=2}*:*", "{!graph from=$from to=$to traversalFilter=$traversalFilter returnOnlyLeaf=$returnOnlyLeaf returnRoot=$returnRoot maxDepth=$maxDepth}*:*"); - assertQueryEquals("graph", req, + assertQueryEquals( + "graph", + req, "{!graph from=node_s to=edge_s traversalFilter=foo returnOnlyLeaf=true returnRoot=false maxDepth=2 useAutn=false}*:*", "{!graph from=$from to=$to traversalFilter=$traversalFilter returnOnlyLeaf=$returnOnlyLeaf returnRoot=$returnRoot maxDepth=$maxDepth useAutn=$useAutn}*:*"); - } public void testQuerySurround() throws Exception { - assertQueryEquals("surround", "{!surround}and(apache,solr)", - "and(apache,solr)", "apache AND solr"); + assertQueryEquals( + "surround", "{!surround}and(apache,solr)", + "and(apache,solr)", "apache AND solr"); } public void testQueryComplexPhrase() throws Exception { - assertQueryEquals("complexphrase", "{!complexphrase df=text}\"jo* smith\"", - "text:\"jo* smith\""); - assertQueryEquals("complexphrase", "{!complexphrase df=title}\"jo* smith\"", - "title:\"jo* smith\""); + assertQueryEquals( + "complexphrase", "{!complexphrase df=text}\"jo* smith\"", "text:\"jo* smith\""); + assertQueryEquals( + "complexphrase", "{!complexphrase df=title}\"jo* smith\"", "title:\"jo* smith\""); } public void testFuncTestfunc() throws Exception { - assertFuncEquals("testfunc(foo_i)","testfunc(field(foo_i))"); + assertFuncEquals("testfunc(foo_i)", "testfunc(field(foo_i))"); assertFuncEquals("testfunc(23)"); - assertFuncEquals("testfunc(sum(23,foo_i))", - "testfunc(sum(23,field(foo_i)))"); + assertFuncEquals("testfunc(sum(23,foo_i))", "testfunc(sum(23,field(foo_i)))"); } + public void testFuncOrd() throws Exception { - assertFuncEquals("ord(foo_s)","ord(foo_s )"); + assertFuncEquals("ord(foo_s)", "ord(foo_s )"); } public void testFuncLiteral() throws Exception { - SolrQueryRequest req = req("someVar","a string"); + SolrQueryRequest req = req("someVar", "a string"); try { - assertFuncEquals(req, - "literal('a string')","literal(\"a string\")", - "literal($someVar)"); + assertFuncEquals(req, "literal('a string')", "literal(\"a string\")", "literal($someVar)"); } finally { req.close(); } } + public void testFuncRord() throws Exception { - assertFuncEquals("rord(foo_s)","rord(foo_s )"); + assertFuncEquals("rord(foo_s)", "rord(foo_s )"); } public void testFuncCscore() throws Exception { @@ -678,122 +837,107 @@ public void testFuncCscore() throws Exception { public void testFuncTop() throws Exception { assertFuncEquals("top(sum(3,foo_i))"); } + public void testFuncLinear() throws Exception { - SolrQueryRequest req = req("someVar","27"); + SolrQueryRequest req = req("someVar", "27"); try { - assertFuncEquals(req, - "linear(foo_i,$someVar,42)", - "linear(foo_i, 27, 42)"); + assertFuncEquals(req, "linear(foo_i,$someVar,42)", "linear(foo_i, 27, 42)"); } finally { req.close(); } } + public void testFuncRecip() throws Exception { - SolrQueryRequest req = req("someVar","27"); + SolrQueryRequest req = req("someVar", "27"); try { - assertFuncEquals(req, - "recip(foo_i,$someVar,42, 27 )", - "recip(foo_i, 27, 42,$someVar)"); + assertFuncEquals( + req, "recip(foo_i,$someVar,42, 27 )", "recip(foo_i, 27, 42,$someVar)"); } finally { req.close(); } } + public void testFuncScale() throws Exception { - SolrQueryRequest req = req("someVar","27"); + SolrQueryRequest req = req("someVar", "27"); try { - assertFuncEquals(req, - "scale(field(foo_i),$someVar,42)", - "scale(foo_i, 27, 42)"); + assertFuncEquals(req, "scale(field(foo_i),$someVar,42)", "scale(foo_i, 27, 42)"); } finally { req.close(); } } + public void testFuncDiv() throws Exception { assertFuncEquals("div(5,4)", "div(5, 4)"); - assertFuncEquals("div(foo_i,4)", "div(foo_i, 4)", - "div(field('foo_i'), 4)"); - assertFuncEquals("div(foo_i,sub(4,field('bar_i')))", - "div(field(foo_i), sub(4,bar_i))"); - + assertFuncEquals("div(foo_i,4)", "div(foo_i, 4)", "div(field('foo_i'), 4)"); + assertFuncEquals("div(foo_i,sub(4,field('bar_i')))", "div(field(foo_i), sub(4,bar_i))"); } + public void testFuncMod() throws Exception { assertFuncEquals("mod(5,4)", "mod(5, 4)"); - assertFuncEquals("mod(foo_i,4)", "mod(foo_i, 4)", - "mod(field('foo_i'), 4)"); - assertFuncEquals("mod(foo_i,sub(4,field('bar_i')))", - "mod(field(foo_i), sub(4,bar_i))"); + assertFuncEquals("mod(foo_i,4)", "mod(foo_i, 4)", "mod(field('foo_i'), 4)"); + assertFuncEquals("mod(foo_i,sub(4,field('bar_i')))", "mod(field(foo_i), sub(4,bar_i))"); } + public void testFuncMap() throws Exception { - assertFuncEquals("map(field(foo_i), 0, 45, 100)", - "map(foo_i, 0.0, 45, 100)"); + assertFuncEquals("map(field(foo_i), 0, 45, 100)", "map(foo_i, 0.0, 45, 100)"); } public void testFuncSum() throws Exception { assertFuncEquals("sum(5,4)", "add(5, 4)"); assertFuncEquals("sum(5,4,3,2,1)", "add(5, 4, 3, 2, 1)"); - assertFuncEquals("sum(foo_i,4)", "sum(foo_i, 4)", - "sum(field('foo_i'), 4)"); - assertFuncEquals("add(foo_i,sub(4,field('bar_i')))", - "sum(field(foo_i), sub(4,bar_i))"); - + assertFuncEquals("sum(foo_i,4)", "sum(foo_i, 4)", "sum(field('foo_i'), 4)"); + assertFuncEquals("add(foo_i,sub(4,field('bar_i')))", "sum(field(foo_i), sub(4,bar_i))"); } public void testFuncProduct() throws Exception { assertFuncEquals("product(5,4,3,2,1)", "mul(5, 4, 3, 2, 1)"); assertFuncEquals("product(5,4)", "mul(5, 4)"); - assertFuncEquals("product(foo_i,4)", "product(foo_i, 4)", - "product(field('foo_i'), 4)"); - assertFuncEquals("mul(foo_i,sub(4,field('bar_i')))", - "product(field(foo_i), sub(4,bar_i))"); - + assertFuncEquals("product(foo_i,4)", "product(foo_i, 4)", "product(field('foo_i'), 4)"); + assertFuncEquals("mul(foo_i,sub(4,field('bar_i')))", "product(field(foo_i), sub(4,bar_i))"); } + public void testFuncSub() throws Exception { assertFuncEquals("sub(5,4)", "sub(5, 4)"); assertFuncEquals("sub(foo_i,4)", "sub(foo_i, 4)"); assertFuncEquals("sub(foo_i,sum(4,bar_i))", "sub(foo_i, sum(4,bar_i))"); } + public void testFuncVector() throws Exception { assertFuncEquals("vector(5,4, field(foo_i))", "vector(5, 4, foo_i)"); assertFuncEquals("vector(foo_i,4)", "vector(foo_i, 4)"); assertFuncEquals("vector(foo_i,sum(4,bar_i))", "vector(foo_i, sum(4,bar_i))"); } + public void testFuncQuery() throws Exception { - SolrQueryRequest req = req("myQ","asdf"); + SolrQueryRequest req = req("myQ", "asdf"); try { - assertFuncEquals(req, - "query($myQ)", - "query($myQ,0)", - "query({!lucene v=$myQ},0)"); + assertFuncEquals(req, "query($myQ)", "query($myQ,0)", "query({!lucene v=$myQ},0)"); } finally { req.close(); } } + public void testFuncBoost() throws Exception { - SolrQueryRequest req = req("myQ","asdf"); + SolrQueryRequest req = req("myQ", "asdf"); try { - assertFuncEquals(req, - "boost($myQ,sum(4,5))", - "boost({!lucene v=$myQ},sum(4,5))"); + assertFuncEquals(req, "boost($myQ,sum(4,5))", "boost({!lucene v=$myQ},sum(4,5))"); } finally { req.close(); } } + public void testFuncJoindf() throws Exception { assertFuncEquals("joindf(foo,bar)"); } public void testFuncGeodist() throws Exception { String pt = "10.312,-20.556"; - try (SolrQueryRequest req = req("pt", pt, - "sfield", "store")) { + try (SolrQueryRequest req = req("pt", pt, "sfield", "store")) { - assertFuncEquals(req, - "geodist($pt)", - "geodist(" + pt + ")", - "geodist(" + pt + "," + pt + ")"); + assertFuncEquals( + req, "geodist($pt)", "geodist(" + pt + ")", "geodist(" + pt + "," + pt + ")"); - assertFuncEquals(req, - "geodist()"); + assertFuncEquals(req, "geodist()"); // geodist() does not support field names in its arguments sometimes // "geodist(store,$pt)", // "geodist(field(store),$pt)", @@ -803,146 +947,158 @@ public void testFuncGeodist() throws Exception { public void testFuncHsin() throws Exception { assertFuncEquals("hsin(45,true,0,0,45,45)"); } + public void testFuncGhhsin() throws Exception { - assertFuncEquals("ghhsin(45,id,'asdf')", - "ghhsin(45,field(id),'asdf')");// "id" is just a single-valued string field + assertFuncEquals( + "ghhsin(45,id,'asdf')", + "ghhsin(45,field(id),'asdf')"); // "id" is just a single-valued string field } + public void testFuncGeohash() throws Exception { assertFuncEquals("geohash(45,99)"); } + public void testFuncDist() throws Exception { - assertFuncEquals("dist(2,45,99,101,111)", - "dist(2,vector(45,99),vector(101,111))"); + assertFuncEquals("dist(2,45,99,101,111)", "dist(2,vector(45,99),vector(101,111))"); } + public void testFuncSqedist() throws Exception { - assertFuncEquals("sqedist(45,99,101,111)", - "sqedist(vector(45,99),vector(101,111))"); + assertFuncEquals("sqedist(45,99,101,111)", "sqedist(vector(45,99),vector(101,111))"); } + public void testFuncMin() throws Exception { assertFuncEquals("min(5,4,3,2,1)", "min(5, 4, 3, 2, 1)"); assertFuncEquals("min(foo_i,4)", "min(field('foo_i'), 4)"); - assertFuncEquals("min(foo_i,sub(4,field('bar_i')))", - "min(field(foo_i), sub(4,bar_i))"); + assertFuncEquals("min(foo_i,sub(4,field('bar_i')))", "min(field(foo_i), sub(4,bar_i))"); } + public void testFuncMax() throws Exception { assertFuncEquals("max(5,4,3,2,1)", "max(5, 4, 3, 2, 1)"); assertFuncEquals("max(foo_i,4)", "max(field('foo_i'), 4)"); - assertFuncEquals("max(foo_i,sub(4,field('bar_i')))", - "max(field(foo_i), sub(4,bar_i))"); + assertFuncEquals("max(foo_i,sub(4,field('bar_i')))", "max(field(foo_i), sub(4,bar_i))"); } public void testFuncMs() throws Exception { // Note ms() takes in field name, not field(...) assertFuncEquals("ms()", "ms(NOW)"); - assertFuncEquals("ms(2000-01-01T00:00:00Z)", - "ms('2000-01-01T00:00:00Z')"); - assertFuncEquals("ms(myDateField_dt)", - "ms('myDateField_dt')"); - assertFuncEquals("ms(2000-01-01T00:00:00Z,myDateField_dt)", - "ms('2000-01-01T00:00:00Z','myDateField_dt')"); - assertFuncEquals("ms(myDateField_dt, NOW)", - "ms('myDateField_dt', NOW)"); + assertFuncEquals("ms(2000-01-01T00:00:00Z)", "ms('2000-01-01T00:00:00Z')"); + assertFuncEquals("ms(myDateField_dt)", "ms('myDateField_dt')"); + assertFuncEquals( + "ms(2000-01-01T00:00:00Z,myDateField_dt)", "ms('2000-01-01T00:00:00Z','myDateField_dt')"); + assertFuncEquals("ms(myDateField_dt, NOW)", "ms('myDateField_dt', NOW)"); } + public void testFuncMathConsts() throws Exception { assertFuncEquals("pi()"); assertFuncEquals("e()"); } + public void testFuncTerms() throws Exception { - SolrQueryRequest req = req("myField","field_t","myTerm","my term"); + SolrQueryRequest req = req("myField", "field_t", "myTerm", "my term"); try { - for (final String type : new String[]{"docfreq","termfreq", - "totaltermfreq","ttf", - "idf","tf"}) { + for (final String type : + new String[] { + "docfreq", "termfreq", + "totaltermfreq", "ttf", + "idf", "tf" + }) { // NOTE: these functions takes a field *name* not a field(..) source - assertFuncEquals(req, - type + "('field_t','my term')", - type + "(field_t,'my term')", - type + "(field_t,$myTerm)", - type + "(field_t,$myTerm)", - type + "($myField,$myTerm)"); + assertFuncEquals( + req, + type + "('field_t','my term')", + type + "(field_t,'my term')", + type + "(field_t,$myTerm)", + type + "(field_t,$myTerm)", + type + "($myField,$myTerm)"); } // ttf is an alias for totaltermfreq - assertFuncEquals(req, - "ttf(field_t,'my term')", "ttf('field_t','my term')", - "totaltermfreq(field_t,'my term')"); + assertFuncEquals( + req, + "ttf(field_t,'my term')", + "ttf('field_t','my term')", + "totaltermfreq(field_t,'my term')"); } finally { req.close(); } } + public void testFuncSttf() throws Exception { // sttf is an alias for sumtotaltermfreq - assertFuncEquals("sttf(foo_t)", "sttf('foo_t')", - "sumtotaltermfreq(foo_t)", "sumtotaltermfreq('foo_t')"); + assertFuncEquals( + "sttf(foo_t)", "sttf('foo_t')", + "sumtotaltermfreq(foo_t)", "sumtotaltermfreq('foo_t')"); assertFuncEquals("sumtotaltermfreq('foo_t')"); } + public void testFuncNorm() throws Exception { - assertFuncEquals("norm(foo_t)","norm('foo_t')"); + assertFuncEquals("norm(foo_t)", "norm('foo_t')"); } + public void testFuncMaxdoc() throws Exception { assertFuncEquals("maxdoc()"); } + public void testFuncNumdocs() throws Exception { assertFuncEquals("numdocs()"); } public void testFuncBools() throws Exception { - SolrQueryRequest req = req("myTrue","true","myFalse","false"); + SolrQueryRequest req = req("myTrue", "true", "myFalse", "false"); try { - assertFuncEquals(req, "true","$myTrue"); - assertFuncEquals(req, "false","$myFalse"); + assertFuncEquals(req, "true", "$myTrue"); + assertFuncEquals(req, "false", "$myFalse"); } finally { req.close(); } } public void testFuncExists() throws Exception { - SolrQueryRequest req = req("myField","field_t","myQ","asdf"); + SolrQueryRequest req = req("myField", "field_t", "myQ", "asdf"); try { - assertFuncEquals(req, - "exists(field_t)", - "exists($myField)", - "exists(field('field_t'))", - "exists(field($myField))"); - assertFuncEquals(req, - "exists(query($myQ))", - "exists(query({!lucene v=$myQ}))"); + assertFuncEquals( + req, + "exists(field_t)", + "exists($myField)", + "exists(field('field_t'))", + "exists(field($myField))"); + assertFuncEquals(req, "exists(query($myQ))", "exists(query({!lucene v=$myQ}))"); } finally { req.close(); } } public void testFuncNot() throws Exception { - SolrQueryRequest req = req("myField","field_b", "myTrue","true"); + SolrQueryRequest req = req("myField", "field_b", "myTrue", "true"); try { assertFuncEquals(req, "not(true)", "not($myTrue)"); assertFuncEquals(req, "not(not(true))", "not(not($myTrue))"); - assertFuncEquals(req, - "not(field_b)", - "not($myField)", - "not(field('field_b'))", - "not(field($myField))"); - assertFuncEquals(req, - "not(exists(field_b))", - "not(exists($myField))", - "not(exists(field('field_b')))", - "not(exists(field($myField)))"); + assertFuncEquals( + req, "not(field_b)", "not($myField)", "not(field('field_b'))", "not(field($myField))"); + assertFuncEquals( + req, + "not(exists(field_b))", + "not(exists($myField))", + "not(exists(field('field_b')))", + "not(exists(field($myField)))"); } finally { req.close(); } } + public void testFuncDoubleValueBools() throws Exception { - SolrQueryRequest req = req("myField","field_b","myTrue","true"); + SolrQueryRequest req = req("myField", "field_b", "myTrue", "true"); try { - for (final String type : new String[]{"and","or","xor"}) { - assertFuncEquals(req, - type + "(field_b,true)", - type + "(field_b,$myTrue)", - type + "(field('field_b'),true)", - type + "(field($myField),$myTrue)", - type + "($myField,$myTrue)"); + for (final String type : new String[] {"and", "or", "xor"}) { + assertFuncEquals( + req, + type + "(field_b,true)", + type + "(field_b,$myTrue)", + type + "(field('field_b'),true)", + type + "(field($myField),$myTrue)", + type + "($myField,$myTrue)"); } } finally { req.close(); @@ -950,45 +1106,42 @@ public void testFuncDoubleValueBools() throws Exception { } public void testFuncIf() throws Exception { - SolrQueryRequest req = req("myBoolField","foo_b", - "myIntField","bar_i", - "myTrue","true"); + SolrQueryRequest req = + req( + "myBoolField", "foo_b", + "myIntField", "bar_i", + "myTrue", "true"); try { - assertFuncEquals(req, - "if(foo_b,bar_i,25)", - "if($myBoolField,bar_i,25)", - "if(field('foo_b'),$myIntField,25)", - "if(field($myBoolField),field('bar_i'),25)"); - assertFuncEquals(req, - "if(true,37,field($myIntField))", - "if($myTrue,37,$myIntField)"); + assertFuncEquals( + req, + "if(foo_b,bar_i,25)", + "if($myBoolField,bar_i,25)", + "if(field('foo_b'),$myIntField,25)", + "if(field($myBoolField),field('bar_i'),25)"); + assertFuncEquals(req, "if(true,37,field($myIntField))", "if($myTrue,37,$myIntField)"); } finally { req.close(); } } public void testFuncDef() throws Exception { - SolrQueryRequest req = req("myField","bar_f"); + SolrQueryRequest req = req("myField", "bar_f"); try { - assertFuncEquals(req, - "def(bar_f,25)", - "def($myField,25)", - "def(field('bar_f'),25)"); - assertFuncEquals(req, - "def(ceil(bar_f),25)", - "def(ceil($myField),25)", - "def(ceil(field('bar_f')),25)"); + assertFuncEquals(req, "def(bar_f,25)", "def($myField,25)", "def(field('bar_f'),25)"); + assertFuncEquals( + req, "def(ceil(bar_f),25)", "def(ceil($myField),25)", "def(ceil(field('bar_f')),25)"); } finally { req.close(); } } public void testFuncConcat() throws Exception { - SolrQueryRequest req = req("myField","bar_f","myOtherField","bar_t"); + SolrQueryRequest req = req("myField", "bar_f", "myOtherField", "bar_t"); try { - assertFuncEquals(req, + assertFuncEquals( + req, "concat(bar_f,bar_t)", "concat($myField,bar_t)", "concat(bar_f,$myOtherField)", @@ -1000,17 +1153,15 @@ public void testFuncConcat() throws Exception { } public void testFuncSingleValueMathFuncs() throws Exception { - SolrQueryRequest req = req("myVal","45", "myField","foo_i"); - for (final String func : new String[] {"abs","rad","deg","sqrt","cbrt", - "log","ln","exp","sin","cos","tan", - "asin","acos","atan", - "sinh","cosh","tanh", - "ceil","floor","rint"}) { + SolrQueryRequest req = req("myVal", "45", "myField", "foo_i"); + for (final String func : + new String[] { + "abs", "rad", "deg", "sqrt", "cbrt", "log", "ln", "exp", "sin", "cos", "tan", "asin", + "acos", "atan", "sinh", "cosh", "tanh", "ceil", "floor", "rint" + }) { try { - assertFuncEquals(req, - func + "(field(foo_i))", func + "(foo_i)", - func + "($myField)"); - assertFuncEquals(req, func + "(45)", func+ "($myVal)"); + assertFuncEquals(req, func + "(field(foo_i))", func + "(foo_i)", func + "($myField)"); + assertFuncEquals(req, func + "(45)", func + "($myVal)"); } finally { req.close(); } @@ -1018,16 +1169,13 @@ public void testFuncSingleValueMathFuncs() throws Exception { } public void testFuncDoubleValueMathFuncs() throws Exception { - SolrQueryRequest req = req("myVal","45", "myOtherVal", "27", - "myField","foo_i"); - for (final String func : new String[] {"pow","hypot","atan2"}) { + SolrQueryRequest req = req("myVal", "45", "myOtherVal", "27", "myField", "foo_i"); + for (final String func : new String[] {"pow", "hypot", "atan2"}) { try { - assertFuncEquals(req, - func + "(field(foo_i),$myVal)", func+"(foo_i,$myVal)", - func + "($myField,45)"); - assertFuncEquals(req, - func+"(45,$myOtherVal)", func+"($myVal,27)", - func+"($myVal,$myOtherVal)"); + assertFuncEquals( + req, func + "(field(foo_i),$myVal)", func + "(foo_i,$myVal)", func + "($myField,45)"); + assertFuncEquals( + req, func + "(45,$myOtherVal)", func + "($myVal,27)", func + "($myVal,$myOtherVal)"); } finally { req.close(); @@ -1036,64 +1184,64 @@ public void testFuncDoubleValueMathFuncs() throws Exception { } public void testFuncStrdist() throws Exception { - SolrQueryRequest req = req("myVal","zot", "myOtherVal", "yak", - "myField","foo_s1"); + SolrQueryRequest req = req("myVal", "zot", "myOtherVal", "yak", "myField", "foo_s1"); try { - assertFuncEquals(req, - "strdist(\"zot\",literal('yak'),edit)", - "strdist(literal(\"zot\"),'yak', edit )", - "strdist(literal($myVal),literal($myOtherVal),edit)"); - assertFuncEquals(req, - "strdist(\"zot\",literal($myOtherVal),ngram)", - "strdist(\"zot\",'yak', ngram, 2)"); - assertFuncEquals(req, - "strdist(field('foo_s1'),literal($myOtherVal),jw)", - "strdist(field($myField),\"yak\",jw)", - "strdist($myField,'yak', jw)"); + assertFuncEquals( + req, + "strdist(\"zot\",literal('yak'),edit)", + "strdist(literal(\"zot\"),'yak', edit )", + "strdist(literal($myVal),literal($myOtherVal),edit)"); + assertFuncEquals( + req, "strdist(\"zot\",literal($myOtherVal),ngram)", "strdist(\"zot\",'yak', ngram, 2)"); + assertFuncEquals( + req, + "strdist(field('foo_s1'),literal($myOtherVal),jw)", + "strdist(field($myField),\"yak\",jw)", + "strdist($myField,'yak', jw)"); } finally { req.close(); } } + public void testFuncField() throws Exception { - assertFuncEquals("field(\"foo_i\")", - "field('foo_i\')", - "foo_i"); + assertFuncEquals("field(\"foo_i\")", "field('foo_i\')", "foo_i"); // simple VS of single valued field should be same as asking for min/max on that field - assertFuncEquals("field(\"foo_i\")", - "field('foo_i',min)", - "field(foo_i,'min')", - "field('foo_i',max)", - "field(foo_i,'max')", - "foo_i"); + assertFuncEquals( + "field(\"foo_i\")", + "field('foo_i',min)", + "field(foo_i,'min')", + "field('foo_i',max)", + "field(foo_i,'max')", + "foo_i"); // multivalued field with selector String multif = "multi_int_with_docvals"; SolrQueryRequest req = req("my_field", multif); // this test is only viable if it's a multivalued field, sanity check the schema - assertTrue(multif + " is no longer multivalued, who broke this schema?", - req.getSchema().getField(multif).multiValued()); - assertFuncEquals(req, - "field($my_field,'MIN')", - "field('"+multif+"',min)"); - assertFuncEquals(req, - "field($my_field,'max')", - "field('"+multif+"',Max)"); - + assertTrue( + multif + " is no longer multivalued, who broke this schema?", + req.getSchema().getField(multif).multiValued()); + assertFuncEquals(req, "field($my_field,'MIN')", "field('" + multif + "',min)"); + assertFuncEquals(req, "field($my_field,'max')", "field('" + multif + "',Max)"); } + public void testFuncCurrency() throws Exception { - assertFuncEquals("currency(\"amount\")", - "currency('amount\')", - "currency(amount)", - "currency(amount,USD)", - "currency('amount',USD)"); + assertFuncEquals( + "currency(\"amount\")", + "currency('amount\')", + "currency(amount)", + "currency(amount,USD)", + "currency('amount',USD)"); } + public void testFuncRelatedness() throws Exception { - SolrQueryRequest req = req("fore","foo_s:front", "back","foo_s:back"); + SolrQueryRequest req = req("fore", "foo_s:front", "back", "foo_s:back"); try { - assertFuncEquals(req, - "agg_relatedness({!query v='foo_s:front'}, {!query v='foo_s:back'})", - "agg_relatedness($fore, $back)"); + assertFuncEquals( + req, + "agg_relatedness({!query v='foo_s:front'}, {!query v='foo_s:back'})", + "agg_relatedness($fore, $back)"); } finally { req.close(); } @@ -1106,30 +1254,27 @@ public void testTestFuncs() throws Exception { // TODO: more tests public void testQueryMaxScore() throws Exception { - assertQueryEquals("maxscore", "{!maxscore}A OR B OR C", - "A OR B OR C"); - assertQueryEquals("maxscore", "{!maxscore}A AND B", - "A AND B"); - assertQueryEquals("maxscore", "{!maxscore}apache -solr", - "apache -solr", "apache -solr "); - assertQueryEquals("maxscore", "+apache +solr", "apache AND solr", - "+apache +solr"); + assertQueryEquals("maxscore", "{!maxscore}A OR B OR C", "A OR B OR C"); + assertQueryEquals("maxscore", "{!maxscore}A AND B", "A AND B"); + assertQueryEquals("maxscore", "{!maxscore}apache -solr", "apache -solr", "apache -solr "); + assertQueryEquals("maxscore", "+apache +solr", "apache AND solr", "+apache +solr"); } /** - * this test does not assert anything itself, it simply toggles a static - * boolean informing an @AfterClass method to assert that every default - * qparser and valuesource parser configured was recorded by - * assertQueryEquals and assertFuncEquals. + * this test does not assert anything itself, it simply toggles a static boolean informing + * an @AfterClass method to assert that every default qparser and valuesource parser configured + * was recorded by assertQueryEquals and assertFuncEquals. */ public void testParserCoverage() { doAssertParserCoverage = true; } public void testQuerySimple() throws Exception { - SolrQueryRequest req = req("myField","foo_s"); + SolrQueryRequest req = req("myField", "foo_s"); try { - assertQueryEquals("simple", req, + assertQueryEquals( + "simple", + req, "{!simple f=$myField}asdf", "{!simple f=$myField v=asdf}", "{!simple f=foo_s}asdf"); @@ -1142,8 +1287,7 @@ public void testQueryMLT() throws Exception { assertU(adoc("id", "1", "lowerfilt", "sample data")); assertU(commit()); try { - assertQueryEquals("mlt", "{!mlt qf=lowerfilt}1", - "{!mlt qf=lowerfilt v=1}"); + assertQueryEquals("mlt", "{!mlt qf=lowerfilt}1", "{!mlt qf=lowerfilt v=1}"); } finally { delQ("*:*"); assertU(commit()); @@ -1156,25 +1300,24 @@ public void testQueryKNN() throws Exception { doc.addField("vector", Arrays.asList(1, 2, 3, 4)); assertU(adoc(doc)); assertU(commit()); - + try { - assertQueryEquals("knn", "{!knn f=vector}[1.0,2.0,3.0,4.0]", - "{!knn f=vector v=[1.0,2.0,3.0,4.0]}"); + assertQueryEquals( + "knn", "{!knn f=vector}[1.0,2.0,3.0,4.0]", "{!knn f=vector v=[1.0,2.0,3.0,4.0]}"); } finally { delQ("id:0"); assertU(commit()); } } - /** - * NOTE: defType is not only used to pick the parser, but also to record - * the parser being tested for coverage sanity checking + * NOTE: defType is not only used to pick the parser, but also to record the parser being tested + * for coverage sanity checking + * * @see #testParserCoverage * @see #assertQueryEquals */ - protected void assertQueryEquals(final String defType, - final String... inputs) throws Exception { + protected void assertQueryEquals(final String defType, final String... inputs) throws Exception { SolrQueryRequest req = req(new String[] {"df", "text"}); try { assertQueryEquals(defType, req, inputs); @@ -1184,16 +1327,15 @@ protected void assertQueryEquals(final String defType, } /** - * NOTE: defType is not only used to pick the parser, but, if non-null it is - * also to record the parser being tested for coverage sanity checking + * NOTE: defType is not only used to pick the parser, but, if non-null it is also to record the + * parser being tested for coverage sanity checking * * @see QueryUtils#check * @see QueryUtils#checkEqual * @see #testParserCoverage */ - protected void assertQueryEquals(final String defType, - final SolrQueryRequest req, - final String... inputs) throws Exception { + protected void assertQueryEquals( + final String defType, final SolrQueryRequest req, final String... inputs) throws Exception { if (null != defType) qParsersTested.add(defType); @@ -1201,7 +1343,7 @@ protected void assertQueryEquals(final String defType, try { SolrQueryResponse rsp = new SolrQueryResponse(); - SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req,rsp)); + SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); for (int i = 0; i < inputs.length; i++) { queries[i] = QParser.getParser(inputs[i], defType, true, req).getQuery(); } @@ -1221,8 +1363,8 @@ protected void assertQueryEquals(final String defType, } /** - * the function name for val parser coverage checking is extracted from - * the first input + * the function name for val parser coverage checking is extracted from the first input + * * @see #assertQueryEquals * @see #testParserCoverage */ @@ -1236,13 +1378,13 @@ protected void assertFuncEquals(final String... inputs) throws Exception { } /** - * the function name for val parser coverage checking is extracted from - * the first input + * the function name for val parser coverage checking is extracted from the first input + * * @see #assertQueryEquals * @see #testParserCoverage */ - protected void assertFuncEquals(final SolrQueryRequest req, - final String... inputs) throws Exception { + protected void assertFuncEquals(final SolrQueryRequest req, final String... inputs) + throws Exception { // pull out the function name final String funcName = (new StrParser(inputs[0])).getId(); valParsersTested.add(funcName); @@ -1250,7 +1392,6 @@ protected void assertFuncEquals(final SolrQueryRequest req, assertQueryEquals(FunctionQParserPlugin.NAME, req, inputs); } - public void testAggs() throws Exception { assertFuncEquals("agg(avg(foo_i))", "agg(avg(foo_i))"); assertFuncEquals("agg(avg(foo_i))", "agg_avg(foo_i)"); @@ -1284,75 +1425,79 @@ public void testCompares() throws Exception { assertFuncEquals("gte(foo_i,2)", "gte(foo_i,2)"); assertFuncEquals("eq(foo_i,2)", "eq(foo_i,2)"); - expectThrows(AssertionError.class, "expected error, functions are not equal", + expectThrows( + AssertionError.class, + "expected error, functions are not equal", () -> assertFuncEquals("eq(foo_i,2)", "lt(foo_i,2)")); } public void testChildField() throws Exception { final SolrQueryRequest req = req("q", "{!parent which=type_s1:parent}whatever_s1:foo"); try { - assertFuncEquals(req, - "childfield(name_s1,$q)", "childfield(name_s1,$q)"); + assertFuncEquals(req, "childfield(name_s1,$q)", "childfield(name_s1,$q)"); } finally { req.close(); } } public void testPayloadScoreQuery() throws Exception { - // There was a bug with PayloadScoreQuery's .equals() method that said two queries were equal with different includeSpanScore settings + // There was a bug with PayloadScoreQuery's .equals() method that said two queries were equal + // with different includeSpanScore settings - expectThrows(AssertionError.class, "queries should not have been equal", - () -> assertQueryEquals - ("payload_score" - , "{!payload_score f=foo_dpf v=query func=min includeSpanScore=false}" - , "{!payload_score f=foo_dpf v=query func=min includeSpanScore=true}" - ) - ); + expectThrows( + AssertionError.class, + "queries should not have been equal", + () -> + assertQueryEquals( + "payload_score", + "{!payload_score f=foo_dpf v=query func=min includeSpanScore=false}", + "{!payload_score f=foo_dpf v=query func=min includeSpanScore=true}")); } public void testPayloadCheckQuery() throws Exception { - expectThrows(AssertionError.class, "queries should not have been equal", - () -> assertQueryEquals - ("payload_check" - , "{!payload_check f=foo_dpf payloads=2}one" - , "{!payload_check f=foo_dpf payloads=2}two" - ) - ); + expectThrows( + AssertionError.class, + "queries should not have been equal", + () -> + assertQueryEquals( + "payload_check", + "{!payload_check f=foo_dpf payloads=2}one", + "{!payload_check f=foo_dpf payloads=2}two")); } public void testPayloadFunction() throws Exception { - SolrQueryRequest req = req("myField","bar_f"); + SolrQueryRequest req = req("myField", "bar_f"); try { - assertFuncEquals(req, - "payload(foo_dpf,some_term)", - "payload(foo_dpf,some_term)"); + assertFuncEquals(req, "payload(foo_dpf,some_term)", "payload(foo_dpf,some_term)"); } finally { req.close(); } } public void testBoolQuery() throws Exception { - assertQueryEquals("bool", - "{!bool must='{!lucene}foo_s:a' must='{!lucene}foo_s:b'}", - "{!bool must='{!lucene}foo_s:b' must='{!lucene}foo_s:a'}"); - assertQueryEquals("bool", - "{!bool must_not='{!lucene}foo_s:a' should='{!lucene}foo_s:b' " + - "must='{!lucene}foo_s:c' filter='{!lucene}foo_s:d' filter='{!lucene}foo_s:e'}", - "{!bool must='{!lucene}foo_s:c' filter='{!lucene}foo_s:d' " + - "must_not='{!lucene}foo_s:a' should='{!lucene}foo_s:b' filter='{!lucene}foo_s:e'}"); - - expectThrows(AssertionError.class, "queries should not have been equal", - () -> assertQueryEquals - ("bool" - , "{!bool must='{!lucene}foo_s:a'}" - , "{!bool should='{!lucene}foo_s:a'}" - ) - ); + assertQueryEquals( + "bool", + "{!bool must='{!lucene}foo_s:a' must='{!lucene}foo_s:b'}", + "{!bool must='{!lucene}foo_s:b' must='{!lucene}foo_s:a'}"); + assertQueryEquals( + "bool", + "{!bool must_not='{!lucene}foo_s:a' should='{!lucene}foo_s:b' " + + "must='{!lucene}foo_s:c' filter='{!lucene}foo_s:d' filter='{!lucene}foo_s:e'}", + "{!bool must='{!lucene}foo_s:c' filter='{!lucene}foo_s:d' " + + "must_not='{!lucene}foo_s:a' should='{!lucene}foo_s:b' filter='{!lucene}foo_s:e'}"); + + expectThrows( + AssertionError.class, + "queries should not have been equal", + () -> + assertQueryEquals( + "bool", "{!bool must='{!lucene}foo_s:a'}", "{!bool should='{!lucene}foo_s:a'}")); } public void testHashRangeQuery() throws Exception { - assertQueryEquals("hash_range", + assertQueryEquals( + "hash_range", "{!hash_range f=x_id l=107347968 u=214695935}", "{!hash_range l='107347968' u='214695935' f='x_id'}"); } diff --git a/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java b/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java index 2e695be1871..c423bda72ea 100644 --- a/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java +++ b/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.solr.search; + import org.apache.lucene.search.Query; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -22,32 +23,28 @@ import org.apache.solr.request.SolrQueryRequest; import org.junit.BeforeClass; -/** - * - * - **/ +/** */ public class QueryParsingTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } /** - * Test that the main QParserPlugins people are likely to use - * as defaults fail with a consistent exception when the query string - * is either empty or null. + * Test that the main QParserPlugins people are likely to use as defaults fail with a consistent + * exception when the query string is either empty or null. + * * @see SOLR-435 * @see SOLR-2001 */ public void testQParserEmptyInput() throws Exception { - + SolrQueryRequest req = req("df", "text"); - final String[] parsersTested = new String[] { - LuceneQParserPlugin.NAME, - DisMaxQParserPlugin.NAME, - ExtendedDismaxQParserPlugin.NAME - }; + final String[] parsersTested = + new String[] { + LuceneQParserPlugin.NAME, DisMaxQParserPlugin.NAME, ExtendedDismaxQParserPlugin.NAME + }; for (String defType : parsersTested) { for (String qstr : new String[] {null, ""}) { @@ -55,50 +52,48 @@ public void testQParserEmptyInput() throws Exception { try { parser = QParser.getParser(qstr, defType, req); } catch (Exception e) { - throw new RuntimeException("getParser excep using defType=" + - defType + " with qstr="+qstr, e); + throw new RuntimeException( + "getParser excep using defType=" + defType + " with qstr=" + qstr, e); } Query q = parser.parse(); - assertNull("expected no query",q); + assertNull("expected no query", q); } } } - + public void testLocalParamsWithModifiableSolrParams() throws Exception { ModifiableSolrParams target = new ModifiableSolrParams(); - QueryParsing.parseLocalParams("{!handler foo1=bar1 foo2=bar2 multi=loser multi=winner}", 0, target, new ModifiableSolrParams(), "{!", '}'); + QueryParsing.parseLocalParams( + "{!handler foo1=bar1 foo2=bar2 multi=loser multi=winner}", + 0, + target, + new ModifiableSolrParams(), + "{!", + '}'); assertEquals("bar1", target.get("foo1")); assertEquals("bar2", target.get("foo2")); - assertArrayEquals(new String[]{"loser", "winner"}, target.getParams("multi")); + assertArrayEquals(new String[] {"loser", "winner"}, target.getParams("multi")); } public void testLiteralFunction() throws Exception { - + final String NAME = FunctionQParserPlugin.NAME; SolrQueryRequest req = req("variable", "foobar"); - - assertNotNull(QParser.getParser - ("literal('a value')", - NAME, req).getQuery()); - assertNotNull(QParser.getParser - ("literal('a value')", - NAME, req).getQuery()); - assertNotNull(QParser.getParser - ("literal(\"a value\")", - NAME, req).getQuery()); - assertNotNull(QParser.getParser - ("literal($variable)", - NAME, req).getQuery()); - assertNotNull(QParser.getParser - ("strdist(\"a value\",literal('a value'),edit)", - NAME, req).getQuery()); + + assertNotNull(QParser.getParser("literal('a value')", NAME, req).getQuery()); + assertNotNull(QParser.getParser("literal('a value')", NAME, req).getQuery()); + assertNotNull(QParser.getParser("literal(\"a value\")", NAME, req).getQuery()); + assertNotNull(QParser.getParser("literal($variable)", NAME, req).getQuery()); + assertNotNull( + QParser.getParser("strdist(\"a value\",literal('a value'),edit)", NAME, req).getQuery()); } public void testGetQParser() throws Exception { // invalid defType - SolrException exception = expectThrows(SolrException.class, () -> h.query(req("q", "ad", "defType", "bleh"))); + SolrException exception = + expectThrows(SolrException.class, () -> h.query(req("q", "ad", "defType", "bleh"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); assertEquals("invalid query parser 'bleh' for query 'ad'", exception.getMessage()); @@ -113,16 +108,34 @@ public void testGetQParser() throws Exception { assertEquals("invalid query parser 'some' for query '{!some}'", exception.getMessage()); // invalid qparser with function queries - exception = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "defType", "edismax", "boost", "{!hmm}"))); + exception = + expectThrows( + SolrException.class, + () -> h.query(req("q", "*:*", "defType", "edismax", "boost", "{!hmm}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); assertEquals("invalid query parser 'hmm' for query '{!hmm}'", exception.getMessage()); - exception = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "defType", "edismax", "boost", "query({!bleh v=ak})"))); + exception = + expectThrows( + SolrException.class, + () -> h.query(req("q", "*:*", "defType", "edismax", "boost", "query({!bleh v=ak})"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); assertEquals("invalid query parser 'bleh' for query '{!bleh v=ak}'", exception.getMessage()); - exception = expectThrows(SolrException.class, () -> - h.query(req("q", "*:*", "defType", "edismax", "boost", "query($qq)", "qq", "{!bleh v=a}"))); + exception = + expectThrows( + SolrException.class, + () -> + h.query( + req( + "q", + "*:*", + "defType", + "edismax", + "boost", + "query($qq)", + "qq", + "{!bleh v=a}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); assertEquals("invalid query parser 'bleh' for query '{!bleh v=a}'", exception.getMessage()); @@ -132,7 +145,10 @@ public void testGetQParser() throws Exception { assertEquals("invalid query parser 'bleh' for query '{!bleh}'", exception.getMessage()); // with stats.field - exception = expectThrows(SolrException.class, () -> h.query(req("q", "*:*", "stats", "true", "stats.field", "{!bleh}"))); + exception = + expectThrows( + SolrException.class, + () -> h.query(req("q", "*:*", "stats", "true", "stats.field", "{!bleh}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); assertEquals("invalid query parser 'bleh' for query '{!bleh}'", exception.getMessage()); } diff --git a/solr/core/src/test/org/apache/solr/search/RankQParserPluginTest.java b/solr/core/src/test/org/apache/solr/search/RankQParserPluginTest.java index 2e88ce28d92..4420fc00824 100644 --- a/solr/core/src/test/org/apache/solr/search/RankQParserPluginTest.java +++ b/solr/core/src/test/org/apache/solr/search/RankQParserPluginTest.java @@ -25,7 +25,6 @@ import static org.apache.solr.search.RankQParserPlugin.WEIGHT; import java.io.IOException; - import org.apache.lucene.search.Query; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.ModifiableSolrParams; @@ -37,222 +36,332 @@ import org.junit.BeforeClass; public class RankQParserPluginTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-minimal.xml", "schema-rank-fields.xml"); - } + } public void testParamCompatibility() { assertEquals("RankQParserPlugin.NAME changed in an incompatible way", "rank", NAME); assertEquals("RankQParserPlugin.FIELD changed in an incompatible way", "f", FIELD); assertEquals("RankQParserPlugin.FUNCTION changed in an incompatible way", "function", FUNCTION); assertEquals("RankQParserPlugin.PIVOT changed in an incompatible way", "pivot", PIVOT); - assertEquals("RankQParserPlugin.SCALING_FACTOR changed in an incompatible way", "scalingFactor", SCALING_FACTOR); + assertEquals( + "RankQParserPlugin.SCALING_FACTOR changed in an incompatible way", + "scalingFactor", + SCALING_FACTOR); assertEquals("RankQParserPlugin.WEIGHT changed in an incompatible way", "weight", WEIGHT); assertEquals("RankQParserPlugin.EXPONENT changed in an incompatible way", "exponent", EXPONENT); } - + public void testCreateParser() throws IOException { try (RankQParserPlugin rankQPPlugin = new RankQParserPlugin()) { - QParser parser = rankQPPlugin.createParser("", new ModifiableSolrParams(), null, req()); + QParser parser = rankQPPlugin.createParser("", new ModifiableSolrParams(), null, req()); assertNotNull(parser); assertTrue(parser instanceof RankQParser); } } - + public void testSyntaxErrors() throws IOException, SyntaxError { - assertSyntaxError("No Field", "Field can't be empty", () -> - getRankQParser(new ModifiableSolrParams(), null, req()).parse()); - assertSyntaxError("Field empty", "Field can't be empty", () -> - getRankQParser( - params(FIELD, ""), null, req()).parse()); - assertSyntaxError("Field doesn't exist", "Field \"foo\" not found", () -> - getRankQParser( - params(FIELD, "foo"), null, req()).parse()); - assertSyntaxError("ID is not a feature field", "Field \"id\" is not a RankField", () -> - getRankQParser( - params(FIELD, "id"), null, req()).parse()); + assertSyntaxError( + "No Field", + "Field can't be empty", + () -> getRankQParser(new ModifiableSolrParams(), null, req()).parse()); + assertSyntaxError( + "Field empty", + "Field can't be empty", + () -> getRankQParser(params(FIELD, ""), null, req()).parse()); + assertSyntaxError( + "Field doesn't exist", + "Field \"foo\" not found", + () -> getRankQParser(params(FIELD, "foo"), null, req()).parse()); + assertSyntaxError( + "ID is not a feature field", + "Field \"id\" is not a RankField", + () -> getRankQParser(params(FIELD, "id"), null, req()).parse()); } - + public void testBadLogParameters() throws IOException, SyntaxError { - assertSyntaxError("Expecting bad weight", "weight must be in", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "log", - WEIGHT, "0"), null, req()).parse()); - assertSyntaxError("Expecting bad scaling factor", "scalingFactor must be", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "log", - SCALING_FACTOR, "0"), null, req()).parse()); + assertSyntaxError( + "Expecting bad weight", + "weight must be in", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "log", + WEIGHT, "0"), + null, + req()) + .parse()); + assertSyntaxError( + "Expecting bad scaling factor", + "scalingFactor must be", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "log", + SCALING_FACTOR, "0"), + null, + req()) + .parse()); } - + public void testBadSaturationParameters() throws IOException, SyntaxError { - assertSyntaxError("Expecting a pivot value", "A pivot value", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "satu", - WEIGHT, "2"), null, req()).parse()); - assertSyntaxError("Expecting bad weight", "weight must be in", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "satu", - PIVOT, "1", - WEIGHT, "-1"), null, req()).parse()); + assertSyntaxError( + "Expecting a pivot value", + "A pivot value", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "satu", + WEIGHT, "2"), + null, + req()) + .parse()); + assertSyntaxError( + "Expecting bad weight", + "weight must be in", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "satu", + PIVOT, "1", + WEIGHT, "-1"), + null, + req()) + .parse()); } - + public void testBadSigmoidParameters() throws IOException, SyntaxError { - assertSyntaxError("Expecting missing pivot", "A pivot value", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "sigm", - EXPONENT, "1"), null, req()).parse()); - assertSyntaxError("Expecting missing exponent", "An exponent value", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "sigm", - PIVOT, "1"), null, req()).parse()); - assertSyntaxError("Expecting bad weight", "weight must be in", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "sigm", - PIVOT, "1", - EXPONENT, "1", - WEIGHT, "-1"), null, req()).parse()); - assertSyntaxError("Expecting bad pivot", "pivot must be", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "sigm", - PIVOT, "0", - EXPONENT, "1"), null, req()).parse()); - assertSyntaxError("Expecting bad exponent", "exp must be", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "sigm", - PIVOT, "1", - EXPONENT, "0"), null, req()).parse()); + assertSyntaxError( + "Expecting missing pivot", + "A pivot value", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "sigm", + EXPONENT, "1"), + null, + req()) + .parse()); + assertSyntaxError( + "Expecting missing exponent", + "An exponent value", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "sigm", + PIVOT, "1"), + null, + req()) + .parse()); + assertSyntaxError( + "Expecting bad weight", + "weight must be in", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "sigm", + PIVOT, "1", + EXPONENT, "1", + WEIGHT, "-1"), + null, + req()) + .parse()); + assertSyntaxError( + "Expecting bad pivot", + "pivot must be", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "sigm", + PIVOT, "0", + EXPONENT, "1"), + null, + req()) + .parse()); + assertSyntaxError( + "Expecting bad exponent", + "exp must be", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "sigm", + PIVOT, "1", + EXPONENT, "0"), + null, + req()) + .parse()); } - + public void testUnknownFunction() throws IOException, SyntaxError { - assertSyntaxError("Expecting bad function", "Unknown function in rank query: \"foo\"", () -> - getRankQParser( - params(FIELD, "rank_1", - FUNCTION, "foo"), null, req()).parse()); + assertSyntaxError( + "Expecting bad function", + "Unknown function in rank query: \"foo\"", + () -> + getRankQParser( + params( + FIELD, "rank_1", + FUNCTION, "foo"), + null, + req()) + .parse()); } - + public void testParseLog() throws IOException, SyntaxError { - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedLogToString(1), 1), - params(FIELD, "rank_1", - FUNCTION, "log", - SCALING_FACTOR, "1", - WEIGHT, "1")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedLogToString(2.5f), 1), - params(FIELD, "rank_1", - FUNCTION, "log", - SCALING_FACTOR, "2.5", - WEIGHT, "1")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedLogToString(1), 2.5f), - params(FIELD, "rank_1", - FUNCTION, "log", - SCALING_FACTOR, "1", - WEIGHT, "2.5")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedLogToString(1), 2.5f), - params(FIELD, "rank_1", - FUNCTION, "Log", //use different case - SCALING_FACTOR, "1", - WEIGHT, "2.5")); + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedLogToString(1), 1), + params( + FIELD, "rank_1", + FUNCTION, "log", + SCALING_FACTOR, "1", + WEIGHT, "1")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedLogToString(2.5f), 1), + params( + FIELD, "rank_1", + FUNCTION, "log", + SCALING_FACTOR, "2.5", + WEIGHT, "1")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedLogToString(1), 2.5f), + params( + FIELD, "rank_1", + FUNCTION, "log", + SCALING_FACTOR, "1", + WEIGHT, "2.5")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedLogToString(1), 2.5f), + params( + FIELD, "rank_1", + FUNCTION, "Log", // use different case + SCALING_FACTOR, "1", + WEIGHT, "2.5")); } - + public void testParseSigm() throws IOException, SyntaxError { - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSigmoidToString(1.5f, 2f), 1), - params(FIELD, "rank_1", - FUNCTION, "sigm", - PIVOT, "1.5", - EXPONENT, "2", - WEIGHT, "1")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSigmoidToString(1.5f, 2f), 2), - params(FIELD, "rank_1", - FUNCTION, "sigm", - PIVOT, "1.5", - EXPONENT, "2", - WEIGHT, "2")); + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSigmoidToString(1.5f, 2f), 1), + params( + FIELD, "rank_1", + FUNCTION, "sigm", + PIVOT, "1.5", + EXPONENT, "2", + WEIGHT, "1")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSigmoidToString(1.5f, 2f), 2), + params( + FIELD, "rank_1", + FUNCTION, "sigm", + PIVOT, "1.5", + EXPONENT, "2", + WEIGHT, "2")); } public void testParseSatu() throws IOException, SyntaxError { - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSaturationToString(1.5f), 1), - params(FIELD, "rank_1", - FUNCTION, "satu", - PIVOT, "1.5", - WEIGHT, "1")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSaturationToString(1.5f), 2), - params(FIELD, "rank_1", - FUNCTION, "satu", - PIVOT, "1.5", - WEIGHT, "2")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSaturationToString(null), 1), - params(FIELD, "rank_1", - FUNCTION, "satu")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSaturationToString(null), 1), - params(FIELD, "rank_1", - FUNCTION, "satu", - WEIGHT, "1")); - - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSaturationToString(1.5f), 1), - params(FIELD, "rank_1", - FUNCTION, "satu", - PIVOT, "1.5")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSaturationToString(1.5f), 1), + params( + FIELD, "rank_1", + FUNCTION, "satu", + PIVOT, "1.5", + WEIGHT, "1")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSaturationToString(1.5f), 2), + params( + FIELD, "rank_1", + FUNCTION, "satu", + PIVOT, "1.5", + WEIGHT, "2")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSaturationToString(null), 1), + params( + FIELD, "rank_1", + FUNCTION, "satu")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSaturationToString(null), 1), + params( + FIELD, "rank_1", + FUNCTION, "satu", + WEIGHT, "1")); + + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSaturationToString(1.5f), 1), + params( + FIELD, "rank_1", + FUNCTION, "satu", + PIVOT, "1.5")); } - + public void testParseDefault() throws IOException, SyntaxError { - assertValidRankQuery(expectedFeatureQueryToString("rank_1", expectedSaturationToString(null), 1), + assertValidRankQuery( + expectedFeatureQueryToString("rank_1", expectedSaturationToString(null), 1), params(FIELD, "rank_1")); } - - private void assertValidRankQuery(String expctedToString, SolrParams localParams) throws IOException, SyntaxError { + + private void assertValidRankQuery(String expctedToString, SolrParams localParams) + throws IOException, SyntaxError { QParser parser = getRankQParser(localParams, null, req()); Query q = parser.parse(); assertNotNull(q); assertThat(q.toString(), CoreMatchers.equalTo(expctedToString)); } - + private String expectedFeatureQueryToString(String fieldName, String function, float boost) { - String featureQueryStr = "FeatureQuery(field=" + RankField.INTERNAL_RANK_FIELD_NAME + ", feature=" + fieldName + ", function=" + function + ")"; + String featureQueryStr = + "FeatureQuery(field=" + + RankField.INTERNAL_RANK_FIELD_NAME + + ", feature=" + + fieldName + + ", function=" + + function + + ")"; if (boost == 1f) { return featureQueryStr; } return "(" + featureQueryStr + ")^" + boost; } - + private String expectedLogToString(float scalingFactor) { return "LogFunction(scalingFactor=" + scalingFactor + ")"; } - + private String expectedSigmoidToString(float pivot, float exp) { return "SigmoidFunction(pivot=" + pivot + ", a=" + exp + ")"; } - + private String expectedSaturationToString(Float pivot) { return "SaturationFunction(pivot=" + pivot + ")"; } - - private void assertSyntaxError(String assertionMsg, String expectedExceptionMsg, ThrowingRunnable runnable) { + + private void assertSyntaxError( + String assertionMsg, String expectedExceptionMsg, ThrowingRunnable runnable) { SyntaxError se = expectThrows(SyntaxError.class, assertionMsg, runnable); assertThat(se.getMessage(), CoreMatchers.containsString(expectedExceptionMsg)); } - - private RankQParser getRankQParser(SolrParams localParams, SolrParams params, SolrQueryRequest req) throws IOException { + + private RankQParser getRankQParser( + SolrParams localParams, SolrParams params, SolrQueryRequest req) throws IOException { try (RankQParserPlugin rankQPPlugin = new RankQParserPlugin()) { return (RankQParser) rankQPPlugin.createParser("", localParams, params, req); } } - } diff --git a/solr/core/src/test/org/apache/solr/search/RankQueryTest.java b/solr/core/src/test/org/apache/solr/search/RankQueryTest.java index af7ad4aeb7c..21ba5369274 100644 --- a/solr/core/src/test/org/apache/solr/search/RankQueryTest.java +++ b/solr/core/src/test/org/apache/solr/search/RankQueryTest.java @@ -39,75 +39,71 @@ public void setUp() throws Exception { assertU(commit()); } - @Test public void testPluggableCollector() throws Exception { - String[] doc = {"id","1", "sort_i", "100"}; + String[] doc = {"id", "1", "sort_i", "100"}; assertU(adoc(doc)); assertU(commit()); - String[] doc1 = {"id","2", "sort_i", "50"}; + String[] doc1 = {"id", "2", "sort_i", "50"}; assertU(adoc(doc1)); - - - String[] doc2 = {"id","3", "sort_i", "1000"}; + String[] doc2 = {"id", "3", "sort_i", "1000"}; assertU(adoc(doc2)); assertU(commit()); - String[] doc3 = {"id","4", "sort_i", "2000"}; + String[] doc3 = {"id", "4", "sort_i", "2000"}; assertU(adoc(doc3)); - - String[] doc4 = {"id","5", "sort_i", "2"}; + String[] doc4 = {"id", "5", "sort_i", "2"}; assertU(adoc(doc4)); assertU(commit()); - String[] doc5 = {"id","6", "sort_i","11"}; + String[] doc5 = {"id", "6", "sort_i", "11"}; assertU(adoc(doc5)); assertU(commit()); - ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("rq", "{!rank}"); - params.add("sort","sort_i asc"); + params.add("sort", "sort_i asc"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='4']", "//result/doc[2]/str[@name='id'][.='3']", "//result/doc[3]/str[@name='id'][.='1']", "//result/doc[4]/str[@name='id'][.='2']", "//result/doc[5]/str[@name='id'][.='6']", - "//result/doc[6]/str[@name='id'][.='5']" - ); + "//result/doc[6]/str[@name='id'][.='5']"); params = new ModifiableSolrParams(); params.add("q", "{!edismax bf=$bff}*:*"); params.add("bff", "field(sort_i)"); params.add("rq", "{!rank collector=1}"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[6]/str[@name='id'][.='4']", "//result/doc[5]/str[@name='id'][.='3']", "//result/doc[4]/str[@name='id'][.='1']", "//result/doc[3]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='6']", - "//result/doc[1]/str[@name='id'][.='5']" - ); - + "//result/doc[1]/str[@name='id'][.='5']"); params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("sort","sort_i asc"); + params.add("sort", "sort_i asc"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[6]/str[@name='id'][.='4']", "//result/doc[5]/str[@name='id'][.='3']", "//result/doc[4]/str[@name='id'][.='1']", "//result/doc[3]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='6']", - "//result/doc[1]/str[@name='id'][.='5']" - ); - + "//result/doc[1]/str[@name='id'][.='5']"); } } diff --git a/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java b/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java index 1aeaf91eaaa..266243e6021 100644 --- a/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/RankQueryTestPlugin.java @@ -25,7 +25,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import org.apache.lucene.index.DocValues; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; @@ -66,17 +65,17 @@ import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; - public class RankQueryTestPlugin extends QParserPlugin { - - public QParser createParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { + public QParser createParser( + String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { return new TestRankQueryParser(query, localParams, params, req); } static class TestRankQueryParser extends QParser { - public TestRankQueryParser(String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { + public TestRankQueryParser( + String query, SolrParams localParams, SolrParams params, SolrQueryRequest req) { super(query, localParams, params, req); } @@ -95,27 +94,26 @@ static class TestRankQuery extends RankQuery { private Query q; public int hashCode() { - return collector+q.hashCode(); + return collector + q.hashCode(); } public boolean equals(Object o) { - if(o instanceof TestRankQuery) { - TestRankQuery trq = (TestRankQuery)o; + if (o instanceof TestRankQuery) { + TestRankQuery trq = (TestRankQuery) o; - return (trq.q.equals(q) && trq.collector == collector) ; + return (trq.q.equals(q) && trq.collector == collector); } return false; } - public Weight createWeight(IndexSearcher indexSearcher, ScoreMode scoreMode, float boost) throws IOException{ + public Weight createWeight(IndexSearcher indexSearcher, ScoreMode scoreMode, float boost) + throws IOException { return q.createWeight(indexSearcher, scoreMode, boost); } @Override - public void visit(QueryVisitor visitor) { - - } + public void visit(QueryVisitor visitor) {} @Override public String toString(String field) { @@ -132,18 +130,15 @@ public TestRankQuery(int collector, int mergeStrategy) { this.mergeStrategy = mergeStrategy; } - public TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) { - if(collector == 0) - return new TestCollector(null); - else - return new TestCollector1(null); + public TopDocsCollector getTopDocsCollector( + int len, QueryCommand cmd, IndexSearcher searcher) { + if (collector == 0) return new TestCollector(null); + else return new TestCollector1(null); } public MergeStrategy getMergeStrategy() { - if(mergeStrategy == 0) - return new TestMergeStrategy(); - else - return new TestMergeStrategy1(); + if (mergeStrategy == 0) return new TestMergeStrategy(); + else return new TestMergeStrategy1(); } } @@ -161,57 +156,53 @@ public boolean handlesMergeFields() { return false; } - public void handleMergeFields(ResponseBuilder rb, SolrIndexSearcher searcher) { - - } + public void handleMergeFields(ResponseBuilder rb, SolrIndexSearcher searcher) {} @SuppressWarnings({"unchecked"}) public void merge(ResponseBuilder rb, ShardRequest sreq) { // id to shard mapping, to eliminate any accidental dups - HashMap uniqueDoc = new HashMap<>(); - + HashMap uniqueDoc = new HashMap<>(); NamedList shardInfo = null; - if(rb.req.getParams().getBool(ShardParams.SHARDS_INFO, false)) { + if (rb.req.getParams().getBool(ShardParams.SHARDS_INFO, false)) { shardInfo = new SimpleOrderedMap<>(); - rb.rsp.getValues().add(ShardParams.SHARDS_INFO,shardInfo); + rb.rsp.getValues().add(ShardParams.SHARDS_INFO, shardInfo); } IndexSchema schema = rb.req.getSchema(); SchemaField uniqueKeyField = schema.getUniqueKeyField(); long numFound = 0; - Float maxScore=null; + Float maxScore = null; boolean partialResults = false; List shardDocs = new ArrayList<>(); for (ShardResponse srsp : sreq.responses) { SolrDocumentList docs = null; - if(shardInfo!=null) { + if (shardInfo != null) { SimpleOrderedMap nl = new SimpleOrderedMap<>(); if (srsp.getException() != null) { Throwable t = srsp.getException(); - if(t instanceof SolrServerException) { - t = ((SolrServerException)t).getCause(); + if (t instanceof SolrServerException) { + t = ((SolrServerException) t).getCause(); } - nl.add("error", t.toString() ); + nl.add("error", t.toString()); StringWriter trace = new StringWriter(); t.printStackTrace(new PrintWriter(trace)); - nl.add("trace", trace.toString() ); + nl.add("trace", trace.toString()); if (srsp.getShardAddress() != null) { nl.add("shardAddress", srsp.getShardAddress()); } - } - else { - docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response"); + } else { + docs = (SolrDocumentList) srsp.getSolrResponse().getResponse().get("response"); nl.add("numFound", docs.getNumFound()); nl.add("maxScore", docs.getMaxScore()); nl.add("shardAddress", srsp.getShardAddress()); } - if(srsp.getSolrResponse()!=null) { + if (srsp.getSolrResponse() != null) { nl.add("time", srsp.getSolrResponse().getElapsedTime()); } @@ -224,22 +215,24 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { } if (docs == null) { // could have been initialized in the shards info block above - docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response"); + docs = (SolrDocumentList) srsp.getSolrResponse().getResponse().get("response"); } - NamedList responseHeader = (NamedList)srsp.getSolrResponse().getResponse().get("responseHeader"); - if (responseHeader != null && Boolean.TRUE.equals(responseHeader.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { + NamedList responseHeader = + (NamedList) srsp.getSolrResponse().getResponse().get("responseHeader"); + if (responseHeader != null + && Boolean.TRUE.equals( + responseHeader.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { partialResults = true; } // calculate global maxScore and numDocsFound if (docs.getMaxScore() != null) { - maxScore = maxScore==null ? docs.getMaxScore() : Math.max(maxScore, docs.getMaxScore()); + maxScore = maxScore == null ? docs.getMaxScore() : Math.max(maxScore, docs.getMaxScore()); } numFound += docs.getNumFound(); - - for (int i=0; i { - if (o1.score < o2.score) { - return 1; - } else if (o1.score > o2.score) { - return -1; - } else { - return 0; //To change body of implemented methods use File | Settings | File Templates. - } - }); + Collections.sort( + shardDocs, + (o1, o2) -> { + if (o1.score < o2.score) { + return 1; + } else if (o1.score > o2.score) { + return -1; + } else { + return 0; + } + }); int resultSize = shardDocs.size(); - Map resultIds = new HashMap<>(); - for (int i=0; i resultIds = new HashMap<>(); + for (int i = 0; i < shardDocs.size(); i++) { ShardDoc shardDoc = shardDocs.get(i); shardDoc.positionInResponse = i; // Need the toString() for correlation with other lists that must @@ -301,11 +296,11 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { rb.rsp.addToLog("hits", numFound); SolrDocumentList responseDocs = new SolrDocumentList(); - if (maxScore!=null) responseDocs.setMaxScore(maxScore); + if (maxScore != null) responseDocs.setMaxScore(maxScore); responseDocs.setNumFound(numFound); responseDocs.setStart(0); // size appropriately - for (int i=0; i sortVals = new NamedList<>(); // order is important for the sort fields IndexReaderContext topReaderContext = searcher.getTopReaderContext(); List leaves = topReaderContext.leaves(); LeafReaderContext currentLeaf = null; - if (leaves.size()==1) { + if (leaves.size() == 1) { // if there is a single segment, use that subReader and avoid looking up each time currentLeaf = leaves.get(0); - leaves=null; + leaves = null; } DocList docList = rb.getResults().docList; @@ -361,8 +359,8 @@ public void handleMergeFields(ResponseBuilder rb, SolrIndexSearcher searcher) th final float[] scores = new float[nDocs]; // doc scores, parallel to sortedIds DocList docs = rb.getResults().docList; DocIterator it = docs.iterator(); - for (int i=0; i schemaFields = sortSpec.getSchemaFields(); for (int fld = 0; fld < schemaFields.size(); fld++) { SchemaField schemaField = schemaFields.get(fld); - FieldType ft = null == schemaField? null : schemaField.getType(); + FieldType ft = null == schemaField ? null : schemaField.getType(); SortField sortField = sortFields[fld]; SortField.Type type = sortField.getType(); // :TODO: would be simpler to always serialize every position of SortField[] - if (type==SortField.Type.SCORE || type==SortField.Type.DOC) continue; + if (type == SortField.Type.SCORE || type == SortField.Type.DOC) continue; FieldComparator comparator = null; LeafFieldComparator leafComparator = null; @@ -408,8 +407,8 @@ protected int compare(int i, int j) { for (int i = 0; i < sortedIds.length; ++i) { long idAndPos = sortedIds[i]; float score = scores[i]; - int doc = (int)(idAndPos >>> 32); - int position = (int)idAndPos; + int doc = (int) (idAndPos >>> 32); + int position = (int) idAndPos; if (leaves != null) { idx = ReaderUtil.subIndex(doc, leaves); @@ -421,11 +420,11 @@ protected int compare(int i, int j) { } if (comparator == null) { - comparator = sortField.getComparator(1,0); + comparator = sortField.getComparator(1, 0); leafComparator = comparator.getLeafComparator(currentLeaf); } - doc -= currentLeaf.docBase; // adjust for what segment this is in + doc -= currentLeaf.docBase; // adjust for what segment this is in leafComparator.setScorer(new ScoreAndDoc(doc, score)); leafComparator.copy(0, doc); Object val = comparator.value(0); @@ -465,49 +464,47 @@ public float score() { public void merge(ResponseBuilder rb, ShardRequest sreq) { // id to shard mapping, to eliminate any accidental dups - HashMap uniqueDoc = new HashMap<>(); - + HashMap uniqueDoc = new HashMap<>(); NamedList shardInfo = null; - if(rb.req.getParams().getBool(ShardParams.SHARDS_INFO, false)) { + if (rb.req.getParams().getBool(ShardParams.SHARDS_INFO, false)) { shardInfo = new SimpleOrderedMap<>(); - rb.rsp.getValues().add(ShardParams.SHARDS_INFO,shardInfo); + rb.rsp.getValues().add(ShardParams.SHARDS_INFO, shardInfo); } IndexSchema schema = rb.req.getSchema(); SchemaField uniqueKeyField = schema.getUniqueKeyField(); long numFound = 0; - Float maxScore=null; + Float maxScore = null; boolean partialResults = false; List shardDocs = new ArrayList<>(); for (ShardResponse srsp : sreq.responses) { SolrDocumentList docs = null; - if(shardInfo!=null) { + if (shardInfo != null) { SimpleOrderedMap nl = new SimpleOrderedMap<>(); if (srsp.getException() != null) { Throwable t = srsp.getException(); - if(t instanceof SolrServerException) { - t = ((SolrServerException)t).getCause(); + if (t instanceof SolrServerException) { + t = ((SolrServerException) t).getCause(); } - nl.add("error", t.toString() ); + nl.add("error", t.toString()); StringWriter trace = new StringWriter(); t.printStackTrace(new PrintWriter(trace)); - nl.add("trace", trace.toString() ); + nl.add("trace", trace.toString()); if (srsp.getShardAddress() != null) { nl.add("shardAddress", srsp.getShardAddress()); } - } - else { - docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response"); + } else { + docs = (SolrDocumentList) srsp.getSolrResponse().getResponse().get("response"); nl.add("numFound", docs.getNumFound()); nl.add("maxScore", docs.getMaxScore()); nl.add("shardAddress", srsp.getShardAddress()); } - if(srsp.getSolrResponse()!=null) { + if (srsp.getSolrResponse() != null) { nl.add("time", srsp.getSolrResponse().getElapsedTime()); } @@ -520,17 +517,20 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { } if (docs == null) { // could have been initialized in the shards info block above - docs = (SolrDocumentList)srsp.getSolrResponse().getResponse().get("response"); + docs = (SolrDocumentList) srsp.getSolrResponse().getResponse().get("response"); } - NamedList responseHeader = (NamedList)srsp.getSolrResponse().getResponse().get("responseHeader"); - if (responseHeader != null && Boolean.TRUE.equals(responseHeader.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { + NamedList responseHeader = + (NamedList) srsp.getSolrResponse().getResponse().get("responseHeader"); + if (responseHeader != null + && Boolean.TRUE.equals( + responseHeader.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) { partialResults = true; } // calculate global maxScore and numDocsFound if (docs.getMaxScore() != null) { - maxScore = maxScore==null ? docs.getMaxScore() : Math.max(maxScore, docs.getMaxScore()); + maxScore = maxScore == null ? docs.getMaxScore() : Math.max(maxScore, docs.getMaxScore()); } numFound += docs.getNumFound(); @@ -538,13 +538,14 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { Sort sort = ss.getSort(); @SuppressWarnings({"rawtypes"}) - NamedList sortFieldValues = (NamedList)(srsp.getSolrResponse().getResponse().get("merge_values")); + NamedList sortFieldValues = + (NamedList) (srsp.getSolrResponse().getResponse().get("merge_values")); @SuppressWarnings({"rawtypes"}) NamedList unmarshalledSortFieldValues = unmarshalSortValues(ss, sortFieldValues, schema); @SuppressWarnings({"rawtypes"}) - List lst = (List)unmarshalledSortFieldValues.getVal(0); + List lst = (List) unmarshalledSortFieldValues.getVal(0); - for (int i=0; i { - if (o1.score < o2.score) { - return 1; - } else if (o1.score > o2.score) { - return -1; - } else { - return 0; //To change body of implemented methods use File | Settings | File Templates. - } - }); + Collections.sort( + shardDocs, + (o1, o2) -> { + if (o1.score < o2.score) { + return 1; + } else if (o1.score > o2.score) { + return -1; + } else { + return 0; + } + }); int resultSize = shardDocs.size(); - Map resultIds = new HashMap<>(); - for (int i=0; i resultIds = new HashMap<>(); + for (int i = 0; i < shardDocs.size(); i++) { ShardDoc shardDoc = shardDocs.get(i); shardDoc.positionInResponse = i; // Need the toString() for correlation with other lists that must @@ -602,11 +605,11 @@ public void merge(ResponseBuilder rb, ShardRequest sreq) { rb.rsp.addToLog("hits", numFound); SolrDocumentList responseDocs = new SolrDocumentList(); - if (maxScore!=null) responseDocs.setMaxScore(maxScore); + if (maxScore != null) responseDocs.setMaxScore(maxScore); responseDocs.setNumFound(numFound); responseDocs.setStart(0); // size appropriately - for (int i=0; i(); if (0 == sortFieldValues.size()) return unmarshalledSortValsPerField; @@ -636,14 +640,14 @@ private NamedList unmarshalSortValues(SortSpec sortSpec, final SortField.Type type = sortField.getType(); // :TODO: would be simpler to always serialize every position of SortField[] - if (type==SortField.Type.SCORE || type==SortField.Type.DOC) continue; + if (type == SortField.Type.SCORE || type == SortField.Type.DOC) continue; final String sortFieldName = sortField.getField(); final String valueFieldName = sortFieldValues.getName(marshalledFieldNum); assert sortFieldName.equals(valueFieldName) : "sortFieldValues name key does not match expected SortField.getField"; - List sortVals = (List)sortFieldValues.getVal(marshalledFieldNum); + List sortVals = (List) sortFieldValues.getVal(marshalledFieldNum); final SchemaField schemaField = schemaFields.get(sortFieldNum); if (null == schemaField) { @@ -662,7 +666,6 @@ private NamedList unmarshalSortValues(SortSpec sortSpec, } } - static class TestCollector extends TopDocsCollector { private List list = new ArrayList<>(); @@ -676,10 +679,10 @@ public LeafCollector getLeafCollector(LeafReaderContext context) throws IOExcept final int base = context.docBase; final NumericDocValues values = DocValues.getNumeric(context.reader(), "sort_i"); return new LeafCollector() { - + @Override public void setScorer(Scorable scorer) throws IOException {} - + public void collect(int doc) throws IOException { long value; if (values.advanceExact(doc)) { @@ -687,7 +690,7 @@ public void collect(int doc) throws IOException { } else { value = 0; } - list.add(new ScoreDoc(doc+base, (float) value)); + list.add(new ScoreDoc(doc + base, (float) value)); } }; } @@ -697,17 +700,19 @@ public int topDocsSize() { } public TopDocs topDocs() { - Collections.sort(list, new Comparator<>() { - public int compare(ScoreDoc s1, ScoreDoc s2) { - if (s1.score == s2.score) { - return 0; - } else if (s1.score < s2.score) { - return 1; - } else { - return -1; - } - } - }); + Collections.sort( + list, + new Comparator<>() { + public int compare(ScoreDoc s1, ScoreDoc s2) { + if (s1.score == s2.score) { + return 0; + } else if (s1.score < s2.score) { + return 1; + } else { + return -1; + } + } + }); ScoreDoc[] scoreDocs = list.toArray(new ScoreDoc[list.size()]); return new TopDocs(new TotalHits(list.size(), TotalHits.Relation.EQUAL_TO), scoreDocs); } @@ -719,7 +724,7 @@ public TopDocs topDocs(int start, int len) { public int getTotalHits() { return list.size(); } - + @Override public ScoreMode scoreMode() { return ScoreMode.COMPLETE; @@ -738,16 +743,16 @@ public TestCollector1(PriorityQueue pq) { public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { final int base = context.docBase; return new LeafCollector() { - + Scorable scorer; - + @Override public void setScorer(Scorable scorer) throws IOException { this.scorer = scorer; } - + public void collect(int doc) throws IOException { - list.add(new ScoreDoc(doc+base, scorer.score())); + list.add(new ScoreDoc(doc + base, scorer.score())); } }; } @@ -757,17 +762,19 @@ public int topDocsSize() { } public TopDocs topDocs() { - Collections.sort(list, new Comparator<>() { - public int compare(ScoreDoc s1, ScoreDoc s2) { - if (s1.score == s2.score) { - return 0; - } else if (s1.score > s2.score) { - return 1; - } else { - return -1; - } - } - }); + Collections.sort( + list, + new Comparator<>() { + public int compare(ScoreDoc s1, ScoreDoc s2) { + if (s1.score == s2.score) { + return 0; + } else if (s1.score > s2.score) { + return 1; + } else { + return -1; + } + } + }); ScoreDoc[] scoreDocs = list.toArray(new ScoreDoc[list.size()]); return new TopDocs(new TotalHits(list.size(), TotalHits.Relation.EQUAL_TO), scoreDocs); } @@ -779,11 +786,10 @@ public TopDocs topDocs(int start, int len) { public int getTotalHits() { return list.size(); } - + @Override public ScoreMode scoreMode() { return ScoreMode.COMPLETE; } } - } diff --git a/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java b/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java index 3e5f787830e..00284f11801 100644 --- a/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java +++ b/solr/core/src/test/org/apache/solr/search/ReturnFieldsTest.java @@ -16,27 +16,25 @@ */ package org.apache.solr.search; -import org.apache.lucene.util.TestUtil; -import org.apache.lucene.document.Document; import static org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.StringField; - -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.SolrDocument; -import org.apache.solr.response.transform.*; import static org.apache.solr.response.DocsStreamer.convertLuceneDocToSolrDoc; -import org.apache.solr.schema.IndexSchema; - -import org.junit.BeforeClass; -import org.junit.Test; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; -import java.util.Locale; import java.util.List; +import java.util.Locale; import java.util.Random; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.StringField; +import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrDocument; +import org.apache.solr.response.transform.*; +import org.apache.solr.schema.IndexSchema; +import org.junit.BeforeClass; +import org.junit.Test; public class ReturnFieldsTest extends SolrTestCaseJ4 { @@ -47,9 +45,9 @@ public static void beforeClass() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ initCore("solrconfig.xml", "schema12.xml"); String v = "how now brown cow"; - assertU(adoc("id","1", "text",v, "text_np", v, "#foo_s", v)); + assertU(adoc("id", "1", "text", v, "text_np", v, "#foo_s", v)); v = "now cow"; - assertU(adoc("id","2", "text",v, "text_np", v)); + assertU(adoc("id", "2", "text", v, "text_np", v)); assertU(commit()); } @@ -57,40 +55,40 @@ public static void beforeClass() throws Exception { public void testCopyRename() throws Exception { // original - assertQ(req("q","id:1", "fl","id") - ,"//*[@numFound='1'] " - ,"*[count(//doc/str)=1] " - ,"*//doc[1]/str[1][.='1'] " - ); - + assertQ( + req("q", "id:1", "fl", "id"), + "//*[@numFound='1'] ", + "*[count(//doc/str)=1] ", + "*//doc[1]/str[1][.='1'] "); + // rename - assertQ(req("q","id:1", "fl","xxx:id") - ,"//*[@numFound='1'] " - ,"*[count(//doc/str)=1] " - ,"*//doc[1]/str[1][.='1'] " - ); + assertQ( + req("q", "id:1", "fl", "xxx:id"), + "//*[@numFound='1'] ", + "*[count(//doc/str)=1] ", + "*//doc[1]/str[1][.='1'] "); // original and copy - assertQ(req("q","id:1", "fl","id,xxx:id") - ,"//*[@numFound='1'] " - ,"*[count(//doc/str)=2] " - ,"*//doc[1]/str[1][.='1'] " - ,"*//doc[1]/str[2][.='1'] " - ); - assertQ(req("q","id:1", "fl","xxx:id,id") - ,"//*[@numFound='1'] " - ,"*[count(//doc/str)=2] " - ,"*//doc[1]/str[1][.='1'] " - ,"*//doc[1]/str[2][.='1'] " - ); + assertQ( + req("q", "id:1", "fl", "id,xxx:id"), + "//*[@numFound='1'] ", + "*[count(//doc/str)=2] ", + "*//doc[1]/str[1][.='1'] ", + "*//doc[1]/str[2][.='1'] "); + assertQ( + req("q", "id:1", "fl", "xxx:id,id"), + "//*[@numFound='1'] ", + "*[count(//doc/str)=2] ", + "*//doc[1]/str[1][.='1'] ", + "*//doc[1]/str[2][.='1'] "); // two copies - assertQ(req("q","id:1", "fl","xxx:id,yyy:id") - ,"//*[@numFound='1'] " - ,"*[count(//doc/str)=2] " - ,"*//doc[1]/str[1][.='1'] " - ,"*//doc[1]/str[2][.='1'] " - ); + assertQ( + req("q", "id:1", "fl", "xxx:id,yyy:id"), + "//*[@numFound='1'] ", + "*[count(//doc/str)=2] ", + "*//doc[1]/str[1][.='1'] ", + "*//doc[1]/str[2][.='1'] "); } @Test @@ -103,192 +101,206 @@ public void testToString() { } final ReturnFields rf1 = new SolrReturnFields(); - final String rf1ToString = "SolrReturnFields=(globs=[]" - +",fields=[]" - +",okFieldNames=[]" - +",reqFieldNames=null" - +",transformer=null,wantsScore=false,wantsAllFields=true)"; + final String rf1ToString = + "SolrReturnFields=(globs=[]" + + ",fields=[]" + + ",okFieldNames=[]" + + ",reqFieldNames=null" + + ",transformer=null,wantsScore=false,wantsAllFields=true)"; assertEquals(rf1ToString, rf1.toString()); - final ReturnFields rf2 = new SolrReturnFields( - req("fl", SolrReturnFields.SCORE)); - final String rf2ToStringA = "SolrReturnFields=(globs=[]" - +",fields=["+SolrReturnFields.SCORE+"]" - +",okFieldNames=[null, "+SolrReturnFields.SCORE+"]" - +",reqFieldNames=["+SolrReturnFields.SCORE+"]" - +",transformer=score,wantsScore=true,wantsAllFields=false)"; - final String rf2ToStringB = "SolrReturnFields=(globs=[]" - +",fields=["+SolrReturnFields.SCORE+"]" - +",okFieldNames=["+SolrReturnFields.SCORE+", null]" - +",reqFieldNames=["+SolrReturnFields.SCORE+"]" - +",transformer=score,wantsScore=true,wantsAllFields=false)"; - assertTrue( - rf2ToStringA.equals(rf2.toString()) || - rf2ToStringB.equals(rf2.toString())); + final ReturnFields rf2 = new SolrReturnFields(req("fl", SolrReturnFields.SCORE)); + final String rf2ToStringA = + "SolrReturnFields=(globs=[]" + + ",fields=[" + + SolrReturnFields.SCORE + + "]" + + ",okFieldNames=[null, " + + SolrReturnFields.SCORE + + "]" + + ",reqFieldNames=[" + + SolrReturnFields.SCORE + + "]" + + ",transformer=score,wantsScore=true,wantsAllFields=false)"; + final String rf2ToStringB = + "SolrReturnFields=(globs=[]" + + ",fields=[" + + SolrReturnFields.SCORE + + "]" + + ",okFieldNames=[" + + SolrReturnFields.SCORE + + ", null]" + + ",reqFieldNames=[" + + SolrReturnFields.SCORE + + "]" + + ",transformer=score,wantsScore=true,wantsAllFields=false)"; + assertTrue(rf2ToStringA.equals(rf2.toString()) || rf2ToStringB.equals(rf2.toString())); } @Test public void testSeparators() { - ReturnFields rf = new SolrReturnFields( req("fl", "id name test subject score") ); - assertTrue( rf.wantsScore() ); - assertTrue( rf.wantsField( "id" ) ); - assertTrue( rf.wantsField( "name" ) ); - assertTrue( rf.wantsField( "test" ) ); - assertTrue( rf.wantsField( "subject" ) ); - assertTrue( rf.wantsField( "score" ) ); - assertFalse( rf.wantsAllFields() ); - assertFalse( rf.wantsField( "xxx" ) ); - assertTrue( rf.getTransformer() instanceof ScoreAugmenter); - - rf = new SolrReturnFields( req("fl", "id,name,test,subject,score") ); - assertTrue( rf.wantsScore() ); - assertTrue( rf.wantsField( "id" ) ); - assertTrue( rf.wantsField( "name" ) ); - assertTrue( rf.wantsField( "test" ) ); - assertTrue( rf.wantsField( "subject" ) ); - assertTrue( rf.wantsField( "score" ) ); - assertFalse( rf.wantsAllFields() ); - assertFalse( rf.wantsField( "xxx" ) ); - assertTrue( rf.getTransformer() instanceof ScoreAugmenter); - - rf = new SolrReturnFields( req("fl", "id,name test,subject score") ); - assertTrue( rf.wantsScore() ); - assertTrue( rf.wantsField( "id" ) ); - assertTrue( rf.wantsField( "name" ) ); - assertTrue( rf.wantsField( "test" ) ); - assertTrue( rf.wantsField( "subject" ) ); - assertTrue( rf.wantsField( "score" ) ); - assertFalse( rf.wantsAllFields() ); - assertFalse( rf.wantsField( "xxx" ) ); - assertTrue( rf.getTransformer() instanceof ScoreAugmenter); - - rf = new SolrReturnFields( req("fl", "id, name test , subject,score") ); - assertTrue( rf.wantsScore() ); - assertTrue( rf.wantsField( "id" ) ); - assertTrue( rf.wantsField( "name" ) ); - assertTrue( rf.wantsField( "test" ) ); - assertTrue( rf.wantsField( "subject" ) ); - assertTrue( rf.wantsField( "score" ) ); - assertFalse( rf.wantsAllFields() ); - assertFalse( rf.wantsField( "xxx" ) ); - assertTrue( rf.getTransformer() instanceof ScoreAugmenter); + ReturnFields rf = new SolrReturnFields(req("fl", "id name test subject score")); + assertTrue(rf.wantsScore()); + assertTrue(rf.wantsField("id")); + assertTrue(rf.wantsField("name")); + assertTrue(rf.wantsField("test")); + assertTrue(rf.wantsField("subject")); + assertTrue(rf.wantsField("score")); + assertFalse(rf.wantsAllFields()); + assertFalse(rf.wantsField("xxx")); + assertTrue(rf.getTransformer() instanceof ScoreAugmenter); + + rf = new SolrReturnFields(req("fl", "id,name,test,subject,score")); + assertTrue(rf.wantsScore()); + assertTrue(rf.wantsField("id")); + assertTrue(rf.wantsField("name")); + assertTrue(rf.wantsField("test")); + assertTrue(rf.wantsField("subject")); + assertTrue(rf.wantsField("score")); + assertFalse(rf.wantsAllFields()); + assertFalse(rf.wantsField("xxx")); + assertTrue(rf.getTransformer() instanceof ScoreAugmenter); + + rf = new SolrReturnFields(req("fl", "id,name test,subject score")); + assertTrue(rf.wantsScore()); + assertTrue(rf.wantsField("id")); + assertTrue(rf.wantsField("name")); + assertTrue(rf.wantsField("test")); + assertTrue(rf.wantsField("subject")); + assertTrue(rf.wantsField("score")); + assertFalse(rf.wantsAllFields()); + assertFalse(rf.wantsField("xxx")); + assertTrue(rf.getTransformer() instanceof ScoreAugmenter); + + rf = new SolrReturnFields(req("fl", "id, name test , subject,score")); + assertTrue(rf.wantsScore()); + assertTrue(rf.wantsField("id")); + assertTrue(rf.wantsField("name")); + assertTrue(rf.wantsField("test")); + assertTrue(rf.wantsField("subject")); + assertTrue(rf.wantsField("score")); + assertFalse(rf.wantsAllFields()); + assertFalse(rf.wantsField("xxx")); + assertTrue(rf.getTransformer() instanceof ScoreAugmenter); } @Test public void testWilcards() { - ReturnFields rf = new SolrReturnFields( req("fl", "*") ); - assertFalse( rf.wantsScore() ); - assertTrue( rf.wantsField( "xxx" ) ); - assertTrue( rf.wantsAllFields() ); - assertNull( rf.getTransformer() ); - - rf = new SolrReturnFields( req("fl", " * ") ); - assertFalse( rf.wantsScore() ); - assertTrue( rf.wantsField( "xxx" ) ); - assertTrue( rf.wantsAllFields() ); - assertNull( rf.getTransformer() ); + ReturnFields rf = new SolrReturnFields(req("fl", "*")); + assertFalse(rf.wantsScore()); + assertTrue(rf.wantsField("xxx")); + assertTrue(rf.wantsAllFields()); + assertNull(rf.getTransformer()); + + rf = new SolrReturnFields(req("fl", " * ")); + assertFalse(rf.wantsScore()); + assertTrue(rf.wantsField("xxx")); + assertTrue(rf.wantsAllFields()); + assertNull(rf.getTransformer()); // Check that we want wildcards - rf = new SolrReturnFields( req("fl", "id,aaa*,*bbb") ); - assertTrue( rf.wantsField( "id" ) ); - assertTrue( rf.wantsField( "aaaxxx" ) ); + rf = new SolrReturnFields(req("fl", "id,aaa*,*bbb")); + assertTrue(rf.wantsField("id")); + assertTrue(rf.wantsField("aaaxxx")); assertFalse(rf.wantsField("xxxaaa")); - assertTrue( rf.wantsField( "xxxbbb" ) ); + assertTrue(rf.wantsField("xxxbbb")); assertFalse(rf.wantsField("bbbxxx")); - assertFalse( rf.wantsField( "aa" ) ); - assertFalse( rf.wantsField( "bb" ) ); + assertFalse(rf.wantsField("aa")); + assertFalse(rf.wantsField("bb")); } @Test public void testManyParameters() { - ReturnFields rf = new SolrReturnFields( req("fl", "id name", "fl", "test subject", "fl", "score") ); - assertTrue( rf.wantsScore() ); - assertTrue( rf.wantsField( "id" ) ); - assertTrue( rf.wantsField( "name" ) ); - assertTrue( rf.wantsField( "test" ) ); - assertTrue( rf.wantsField( "subject" ) ); - assertTrue( rf.wantsField( "score" ) ); - assertFalse( rf.wantsAllFields() ); - assertFalse( rf.wantsField( "xxx" ) ); - assertTrue( rf.getTransformer() instanceof ScoreAugmenter); + ReturnFields rf = + new SolrReturnFields(req("fl", "id name", "fl", "test subject", "fl", "score")); + assertTrue(rf.wantsScore()); + assertTrue(rf.wantsField("id")); + assertTrue(rf.wantsField("name")); + assertTrue(rf.wantsField("test")); + assertTrue(rf.wantsField("subject")); + assertTrue(rf.wantsField("score")); + assertFalse(rf.wantsAllFields()); + assertFalse(rf.wantsField("xxx")); + assertTrue(rf.getTransformer() instanceof ScoreAugmenter); } @Test public void testFunctions() { - ReturnFields rf = new SolrReturnFields( req("fl", "exists(text),id,sum(1,1)") ); + ReturnFields rf = new SolrReturnFields(req("fl", "exists(text),id,sum(1,1)")); assertFalse(rf.wantsScore()); - assertTrue( rf.wantsField( "id" ) ); - assertTrue( rf.wantsField( "sum(1,1)" )); - assertTrue( rf.wantsField( "exists(text)" )); - assertFalse( rf.wantsAllFields() ); - assertFalse( rf.wantsField( "xxx" ) ); - assertTrue( rf.getTransformer() instanceof DocTransformers); - DocTransformers transformers = (DocTransformers)rf.getTransformer(); + assertTrue(rf.wantsField("id")); + assertTrue(rf.wantsField("sum(1,1)")); + assertTrue(rf.wantsField("exists(text)")); + assertFalse(rf.wantsAllFields()); + assertFalse(rf.wantsField("xxx")); + assertTrue(rf.getTransformer() instanceof DocTransformers); + DocTransformers transformers = (DocTransformers) rf.getTransformer(); assertEquals("exists(text)", transformers.getTransformer(0).getName()); assertEquals("sum(1,1)", transformers.getTransformer(1).getName()); } @Test public void testTransformers() { - ReturnFields rf = new SolrReturnFields( req("fl", "[explain]") ); - assertFalse( rf.wantsScore() ); + ReturnFields rf = new SolrReturnFields(req("fl", "[explain]")); + assertFalse(rf.wantsScore()); assertTrue(rf.wantsField("[explain]")); assertFalse(rf.wantsField("id")); assertFalse(rf.wantsAllFields()); - assertEquals( "[explain]", rf.getTransformer().getName() ); + assertEquals("[explain]", rf.getTransformer().getName()); - rf = new SolrReturnFields( req("fl", "[shard],id") ); - assertFalse( rf.wantsScore() ); + rf = new SolrReturnFields(req("fl", "[shard],id")); + assertFalse(rf.wantsScore()); assertTrue(rf.wantsField("[shard]")); assertTrue(rf.wantsField("id")); assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); - assertEquals( "[shard]", rf.getTransformer().getName() ); + assertEquals("[shard]", rf.getTransformer().getName()); - rf = new SolrReturnFields( req("fl", "[docid]") ); - assertFalse( rf.wantsScore() ); + rf = new SolrReturnFields(req("fl", "[docid]")); + assertFalse(rf.wantsScore()); assertTrue(rf.wantsField("[docid]")); - assertFalse( rf.wantsField( "id" ) ); + assertFalse(rf.wantsField("id")); assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); - assertEquals( "[docid]", rf.getTransformer().getName() ); + assertEquals("[docid]", rf.getTransformer().getName()); - rf = new SolrReturnFields( req("fl", "mydocid:[docid]") ); - assertFalse( rf.wantsScore() ); + rf = new SolrReturnFields(req("fl", "mydocid:[docid]")); + assertFalse(rf.wantsScore()); assertTrue(rf.wantsField("mydocid")); - assertFalse( rf.wantsField( "id" ) ); + assertFalse(rf.wantsField("id")); assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); - assertEquals( "mydocid", rf.getTransformer().getName() ); + assertEquals("mydocid", rf.getTransformer().getName()); - rf = new SolrReturnFields( req("fl", "[docid][shard]") ); - assertFalse( rf.wantsScore() ); + rf = new SolrReturnFields(req("fl", "[docid][shard]")); + assertFalse(rf.wantsScore()); assertTrue(rf.wantsField("[docid]")); assertTrue(rf.wantsField("[shard]")); assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); - assertTrue( rf.getTransformer() instanceof DocTransformers); - assertEquals(2, ((DocTransformers)rf.getTransformer()).size()); + assertTrue(rf.getTransformer() instanceof DocTransformers); + assertEquals(2, ((DocTransformers) rf.getTransformer()).size()); - rf = new SolrReturnFields( req("fl", "[xxxxx]") ); - assertFalse( rf.wantsScore() ); + rf = new SolrReturnFields(req("fl", "[xxxxx]")); + assertFalse(rf.wantsScore()); assertTrue(rf.wantsField("[xxxxx]")); - assertFalse( rf.wantsField( "id" ) ); + assertFalse(rf.wantsField("id")); assertFalse(rf.wantsAllFields()); assertNull(rf.getTransformer()); // Don't return 'store' just because it is required by the transformer - rf = new SolrReturnFields( req("fl", "[geo f=store]") ); - assertFalse( rf.wantsScore() ); + rf = new SolrReturnFields(req("fl", "[geo f=store]")); + assertFalse(rf.wantsScore()); assertTrue(rf.wantsField("[geo]")); - assertFalse( rf.wantsField( "store" ) ); + assertFalse(rf.wantsField("store")); assertFalse(rf.wantsAllFields()); assertNotNull(rf.getTransformer()); } @Test public void testAliases() { - ReturnFields rf = new SolrReturnFields( req("fl", "newId:id newName:name newTest:test newSubject:subject") ); + ReturnFields rf = + new SolrReturnFields(req("fl", "newId:id newName:name newTest:test newSubject:subject")); assertTrue(rf.wantsField("id")); assertTrue(rf.wantsField("name")); assertTrue(rf.wantsField("test")); @@ -300,7 +312,9 @@ public void testAliases() { assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); - rf = new SolrReturnFields( req("fl", "newId:id newName:name newTest:test newSubject:subject score") ); + rf = + new SolrReturnFields( + req("fl", "newId:id newName:name newTest:test newSubject:subject score")); assertTrue(rf.wantsField("id")); assertTrue(rf.wantsField("name")); assertTrue(rf.wantsField("test")); @@ -311,8 +325,8 @@ public void testAliases() { assertTrue(rf.wantsField("newSubject")); assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); - assertTrue( rf.getTransformer() instanceof DocTransformers); - assertEquals(5, ((DocTransformers)rf.getTransformer()).size()); // 4 rename and score + assertTrue(rf.getTransformer() instanceof DocTransformers); + assertEquals(5, ((DocTransformers) rf.getTransformer()).size()); // 4 rename and score } // hyphens in field names are not supported in all contexts, but we wanted @@ -367,21 +381,19 @@ public void testFunkyFieldNames() { assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); - assertQ(req("q","id:1", "fl","#foo_s", "fl","id") - ,"//*[@numFound='1'] " - ,"//str[@name='id'][.='1']" - ,"//arr[@name='#foo_s']/str[.='how now brown cow']" - ); - + assertQ( + req("q", "id:1", "fl", "#foo_s", "fl", "id"), + "//*[@numFound='1'] ", + "//str[@name='id'][.='1']", + "//arr[@name='#foo_s']/str[.='how now brown cow']"); } /** - * Whitebox verification that the conversion from lucene {@link Document} to {@link SolrDocument} respects - * the {@link ReturnFields} and doesn't unneccessarily convert Fields that aren't needed. - *

- * This is important because {@link SolrDocumentFetcher} may return additional fields - * (lazy or otherwise) if the document has been cached. - *

+ * Whitebox verification that the conversion from lucene {@link Document} to {@link SolrDocument} + * respects the {@link ReturnFields} and doesn't unneccessarily convert Fields that aren't needed. + * + *

This is important because {@link SolrDocumentFetcher} may return additional fields (lazy or + * otherwise) if the document has been cached. */ public void testWhiteboxSolrDocumentConversion() { final IndexSchema schema = h.getCore().getLatestSchema(); @@ -393,62 +405,63 @@ public void testWhiteboxSolrDocumentConversion() { // uses the schema for multivalued-ness) final Document docIn = new Document(); final StringBuilder allFieldNames = new StringBuilder(); - docIn.add(new StringField("id","bar",Store.YES)); + docIn.add(new StringField("id", "bar", Store.YES)); allFieldNames.append("id"); - docIn.add(new StringField("store","42",Store.YES)); + docIn.add(new StringField("store", "42", Store.YES)); allFieldNames.append(",store"); - docIn.add(new StringField("subword","bar",Store.YES)); // single value in multi-value field + docIn.add(new StringField("subword", "bar", Store.YES)); // single value in multi-value field allFieldNames.append(",subword"); - docIn.add(new StringField("uniq","xxx",Store.YES)); - docIn.add(new StringField("uniq","yyy",Store.YES)); // multi-value in multi-valued field + docIn.add(new StringField("uniq", "xxx", Store.YES)); + docIn.add(new StringField("uniq", "yyy", Store.YES)); // multi-value in multi-valued field allFieldNames.append(",uniq"); for (int i = 0; i < 20; i++) { final String foo = "foo_" + i + "_s1"; allFieldNames.append(",").append(foo); - docIn.add(new StringField(foo, "bar"+i, Store.YES)); + docIn.add(new StringField(foo, "bar" + i, Store.YES)); } // output should only have a single field - docOut = convertLuceneDocToSolrDoc(docIn, schema, new SolrReturnFields(req("fl","id"))); + docOut = convertLuceneDocToSolrDoc(docIn, schema, new SolrReturnFields(req("fl", "id"))); assertEquals(docOut.toString(), 1, docOut.size()); - assertEquals(docOut.toString(), - Collections.singleton("id"), - docOut.getFieldNames()); + assertEquals(docOut.toString(), Collections.singleton("id"), docOut.getFieldNames()); assertTrue(docOut.toString(), docOut.get("id") instanceof StringField); // output should only have the few specified fields // behavior should be ultimately be consistent for all of these ReturnField instances // (aliasing, extra requested by transformer, or otherwise) - for (ReturnFields rf : Arrays.asList - (new SolrReturnFields(req("fl","id,subword,store,uniq,foo_2_s1")), - new SolrReturnFields(req("fl","id,xxx:[geo f=store],uniq,foo_2_s1,subword")), - new SolrReturnFields(req("fl","id,xxx:subword,uniq,yyy:foo_2_s1,[geo f=store]")))) { + for (ReturnFields rf : + Arrays.asList( + new SolrReturnFields(req("fl", "id,subword,store,uniq,foo_2_s1")), + new SolrReturnFields(req("fl", "id,xxx:[geo f=store],uniq,foo_2_s1,subword")), + new SolrReturnFields(req("fl", "id,xxx:subword,uniq,yyy:foo_2_s1,[geo f=store]")))) { docOut = convertLuceneDocToSolrDoc(docIn, schema, rf); - final String debug = rf.toString() + " => " +docOut.toString(); + final String debug = rf.toString() + " => " + docOut.toString(); assertEquals(debug, 5, docOut.size()); - assertEquals(debug, - new HashSet(Arrays.asList("id","subword","uniq","foo_2_s1","store")), - docOut.getFieldNames()); + assertEquals( + debug, + new HashSet(Arrays.asList("id", "subword", "uniq", "foo_2_s1", "store")), + docOut.getFieldNames()); assertTrue(debug, docOut.get("id") instanceof StringField); assertTrue(debug, docOut.get("store") instanceof StringField); assertTrue(debug, docOut.get("foo_2_s1") instanceof StringField); assertTrue(debug, docOut.get("subword") instanceof List); assertTrue(debug, docOut.get("uniq") instanceof List); } - + // all source fields should be in the output // behavior should be ultimately be consistent for all of these ReturnField instances // (globbing or requesting more fields then doc has) - for (ReturnFields rf : Arrays.asList - (new SolrReturnFields(), + for (ReturnFields rf : + Arrays.asList( + new SolrReturnFields(), new SolrReturnFields(req()), - new SolrReturnFields(req("fl","*")), - new SolrReturnFields(req("fl","*,score")), - new SolrReturnFields(req("fl","id,subword,uniq,foo_*,store_*")), - new SolrReturnFields(req("fl",allFieldNames+",bogus1,bogus2,bogus3")))) { - + new SolrReturnFields(req("fl", "*")), + new SolrReturnFields(req("fl", "*,score")), + new SolrReturnFields(req("fl", "id,subword,uniq,foo_*,store_*")), + new SolrReturnFields(req("fl", allFieldNames + ",bogus1,bogus2,bogus3")))) { + docOut = convertLuceneDocToSolrDoc(docIn, schema, rf); - final String debug = rf.toString() + " => " +docOut.toString(); + final String debug = rf.toString() + " => " + docOut.toString(); assertEquals(debug, 24, docOut.size()); assertTrue(debug, docOut.get("id") instanceof StringField); assertTrue(debug, docOut.get("store") instanceof StringField); @@ -458,10 +471,8 @@ public void testWhiteboxSolrDocumentConversion() { assertTrue(debug, docOut.get("foo_" + i + "_s1") instanceof StringField); } } - } - public void testWhitespace() { Random r = random(); final int iters = atLeast(30); @@ -470,25 +481,27 @@ public void testWhitespace() { final boolean aliasId = r.nextBoolean(); final boolean aliasFoo = r.nextBoolean(); - final String id = randomWhitespace(r, 0, 3) + - (aliasId ? "aliasId:" : "") + - "id" + - randomWhitespace(r, 1, 3); - final String foo_i = randomWhitespace(r, 0, 3) + - (aliasFoo ? "aliasFoo:" : "") + - "foo_i" + - randomWhitespace(r, 0, 3); + final String id = + randomWhitespace(r, 0, 3) + + (aliasId ? "aliasId:" : "") + + "id" + + randomWhitespace(r, 1, 3); + final String foo_i = + randomWhitespace(r, 0, 3) + + (aliasFoo ? "aliasFoo:" : "") + + "foo_i" + + randomWhitespace(r, 0, 3); final String fl = id + (r.nextBoolean() ? "" : ",") + foo_i; ReturnFields rf = new SolrReturnFields(req("fl", fl)); - assertFalse("score ("+fl+")", rf.wantsScore()); + assertFalse("score (" + fl + ")", rf.wantsScore()); - assertTrue("id ("+fl+")", rf.wantsField("id")); - assertTrue("foo_i ("+fl+")", rf.wantsField("foo_i")); + assertTrue("id (" + fl + ")", rf.wantsField("id")); + assertTrue("foo_i (" + fl + ")", rf.wantsField("foo_i")); - assertEquals("aliasId ("+fl+")", aliasId, rf.wantsField("aliasId")); - assertEquals("aliasFoo ("+fl+")", aliasFoo, rf.wantsField("aliasFoo")); + assertEquals("aliasId (" + fl + ")", aliasId, rf.wantsField("aliasId")); + assertEquals("aliasFoo (" + fl + ")", aliasFoo, rf.wantsField("aliasFoo")); assertFalse(rf.wantsField("xxx")); assertFalse(rf.wantsAllFields()); @@ -496,66 +509,77 @@ public void testWhitespace() { } /** List of characters that match {@link Character#isWhitespace} */ - private static final char[] WHITESPACE_CHARACTERS = new char[] { - // :TODO: is this list exhaustive? - '\u0009', - '\n', - '\u000B', - '\u000C', - '\r', - '\u001C', - '\u001D', - '\u001E', - '\u001F', - '\u0020', - // '\u0085', failed sanity check? - '\u1680', - // '\u180E', no longer whitespace in Unicode 7.0 (Java 9)! - '\u2000', - '\u2001', - '\u2002', - '\u2003', - '\u2004', - '\u2005', - '\u2006', - '\u2008', - '\u2009', - '\u200A', - '\u2028', - '\u2029', - '\u205F', - '\u3000', - }; + private static final char[] WHITESPACE_CHARACTERS = + new char[] { + // :TODO: is this list exhaustive? + '\u0009', + '\n', + '\u000B', + '\u000C', + '\r', + '\u001C', + '\u001D', + '\u001E', + '\u001F', + '\u0020', + // '\u0085', failed sanity check? + '\u1680', + // '\u180E', no longer whitespace in Unicode 7.0 (Java 9)! + '\u2000', + '\u2001', + '\u2002', + '\u2003', + '\u2004', + '\u2005', + '\u2006', + '\u2008', + '\u2009', + '\u200A', + '\u2028', + '\u2029', + '\u205F', + '\u3000', + }; static { // if the JVM/unicode can redefine whitespace once (LUCENE-6760), it might happen again // in the future. if that happens, fail early with a clera msg, even if java asserts // (used in randomWhitespace) are disbled - + for (int offset = 0; offset < WHITESPACE_CHARACTERS.length; offset++) { char c = WHITESPACE_CHARACTERS[offset]; - if (! Character.isWhitespace(c) ) { - fail(String.format(Locale.ENGLISH, "Not really whitespace? New JVM/Unicode definitions? WHITESPACE_CHARACTERS[%d] is '\\u%04X'", offset, (int) c)); + if (!Character.isWhitespace(c)) { + fail( + String.format( + Locale.ENGLISH, + "Not really whitespace? New JVM/Unicode definitions? WHITESPACE_CHARACTERS[%d] is '\\u%04X'", + offset, + (int) c)); } } } - + /** - * Returns a random string in the specified length range consisting - * entirely of whitespace characters + * Returns a random string in the specified length range consisting entirely of whitespace + * characters + * * @see #WHITESPACE_CHARACTERS */ public static String randomWhitespace(Random r, int minLength, int maxLength) { final int end = TestUtil.nextInt(r, minLength, maxLength); StringBuilder out = new StringBuilder(); for (int i = 0; i < end; i++) { - int offset = TestUtil.nextInt(r, 0, WHITESPACE_CHARACTERS.length-1); + int offset = TestUtil.nextInt(r, 0, WHITESPACE_CHARACTERS.length - 1); char c = WHITESPACE_CHARACTERS[offset]; // sanity check - assert Character.isWhitespace(c) : String.format(Locale.ENGLISH, "Not really whitespace? WHITESPACE_CHARACTERS[%d] is '\\u%04X'", offset, (int) c); + assert Character.isWhitespace(c) + : String.format( + Locale.ENGLISH, + "Not really whitespace? WHITESPACE_CHARACTERS[%d] is '\\u%04X'", + offset, + (int) c); out.append(c); } return out.toString(); } - } diff --git a/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java b/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java index b8cdd014f56..cc64c320e50 100644 --- a/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java +++ b/solr/core/src/test/org/apache/solr/search/SignificantTermsQParserPluginTest.java @@ -17,6 +17,13 @@ package org.apache.solr.search; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; @@ -34,117 +41,117 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - public class SignificantTermsQParserPluginTest extends SolrTestCaseJ4 { - @BeforeClass - public static void setUpCore() throws Exception { - String tmpSolrHome = createTempDir().toFile().getAbsolutePath(); - FileUtils.copyDirectory(new File(TEST_HOME()), new File(tmpSolrHome).getAbsoluteFile()); - initCore("solrconfig.xml", "schema.xml", new File(tmpSolrHome).getAbsolutePath()); - } - - /** - * Test the backwards compatibility for a typo in the SignificantTermsQParserPlugin. It will fail if the backwards - * compatibility is broken. - */ - @Test - public void testQParserBackwardsCompatibility() { - assertEquals("significantTerms", SignificantTermsQParserPlugin.NAME); - assertEquals(SignificantTermsQParserPlugin.class, - QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME).getClass()); + @BeforeClass + public static void setUpCore() throws Exception { + String tmpSolrHome = createTempDir().toFile().getAbsolutePath(); + FileUtils.copyDirectory(new File(TEST_HOME()), new File(tmpSolrHome).getAbsoluteFile()); + initCore("solrconfig.xml", "schema.xml", new File(tmpSolrHome).getAbsolutePath()); + } + + /** + * Test the backwards compatibility for a typo in the SignificantTermsQParserPlugin. It will fail + * if the backwards compatibility is broken. + */ + @Test + public void testQParserBackwardsCompatibility() { + assertEquals("significantTerms", SignificantTermsQParserPlugin.NAME); + assertEquals( + SignificantTermsQParserPlugin.class, + QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME).getClass()); + } + + @Test + public void testEmptyCollectionDoesNotThrow() throws Exception { + SolrCore emptyCore = h.getCore(); + QParserPlugin qParserPlugin = + QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME); + Map params = new HashMap<>(); + params.put("field", "cat"); + QParser parser = + qParserPlugin.createParser( + "", new MapSolrParams(params), new MapSolrParams(new HashMap<>()), null); + AnalyticsQuery query = (AnalyticsQuery) parser.parse(); + SolrQueryResponse resp = new SolrQueryResponse(); + + RefCounted searcher = emptyCore.getSearcher(); + try { + DelegatingCollector analyticsCollector = + query.getAnalyticsCollector( + new ResponseBuilder(null, resp, Collections.emptyList()), searcher.get()); + assertNotNull(analyticsCollector); + analyticsCollector.finish(); + LinkedHashMap expectedValues = new LinkedHashMap<>(); + expectedValues.put("numDocs", 0); + expectedValues.put("sterms", new ArrayList()); + expectedValues.put("scores", new ArrayList()); + expectedValues.put("docFreq", new ArrayList()); + expectedValues.put("queryDocFreq", new ArrayList()); + assertEquals(expectedValues, resp.getValues().get("significantTerms")); + } finally { + searcher.decref(); } - - @Test - public void testEmptyCollectionDoesNotThrow() throws Exception { - SolrCore emptyCore = h.getCore(); - QParserPlugin qParserPlugin = QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME); - Map params = new HashMap<>(); - params.put("field", "cat"); - QParser parser = qParserPlugin.createParser("", new MapSolrParams(params), new MapSolrParams(new HashMap<>()), null); - AnalyticsQuery query = (AnalyticsQuery) parser.parse(); - SolrQueryResponse resp = new SolrQueryResponse(); - - RefCounted searcher = emptyCore.getSearcher(); - try { - DelegatingCollector analyticsCollector = query.getAnalyticsCollector(new ResponseBuilder(null, resp, Collections.emptyList()), searcher.get()); - assertNotNull(analyticsCollector); - analyticsCollector.finish(); - LinkedHashMap expectedValues = new LinkedHashMap<>(); - expectedValues.put("numDocs", 0); - expectedValues.put("sterms", new ArrayList()); - expectedValues.put("scores", new ArrayList()); - expectedValues.put("docFreq", new ArrayList()); - expectedValues.put("queryDocFreq", new ArrayList()); - assertEquals(expectedValues, resp.getValues().get("significantTerms")); - } finally { - searcher.decref(); - } - - } - - @Test - public void testCollectionWithDocuments() throws Exception { - SolrCore dataCore = h.getCore(); - addTestDocs(dataCore); - - QParserPlugin qParserPlugin = QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME); - Map params = new HashMap<>(); - params.put("field", "cat"); - QParser parser = qParserPlugin.createParser("", new MapSolrParams(params), new MapSolrParams(new HashMap<>()), null); - AnalyticsQuery query = (AnalyticsQuery) parser.parse(); - SolrQueryResponse resp = new SolrQueryResponse(); - - ResponseBuilder responseBuilder = new ResponseBuilder(null, resp, Collections.emptyList()); - RefCounted searcher = dataCore.getSearcher(); - try { - - DelegatingCollector analyticsCollector = query.getAnalyticsCollector(responseBuilder, searcher.get()); - assertNotNull(analyticsCollector); - analyticsCollector.finish(); - - LinkedHashMap expectedValues = new LinkedHashMap<>(); - expectedValues.put("numDocs", 1); - expectedValues.put("sterms", new ArrayList()); - expectedValues.put("scores", new ArrayList()); - expectedValues.put("docFreq", new ArrayList()); - expectedValues.put("queryDocFreq", new ArrayList()); - - assertEquals(expectedValues, resp.getValues().get("significantTerms")); - - } finally { - searcher.decref(); - } - - deleteTestDocs(dataCore); - } - - private void addTestDocs(SolrCore core) throws IOException { - SolrQueryRequest coreReq = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); - AddUpdateCommand cmd = new AddUpdateCommand(coreReq); - cmd.solrDoc = new SolrInputDocument(); - cmd.solrDoc.addField("id", "1"); - cmd.solrDoc.addField("cat", "foo"); - core.getUpdateHandler().addDoc(cmd); - - core.getUpdateHandler().commit(new CommitUpdateCommand(coreReq, true)); - coreReq.close(); - } - - private void deleteTestDocs(SolrCore core) throws IOException { - SolrQueryRequest coreReq = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); - DeleteUpdateCommand cmd = new DeleteUpdateCommand(coreReq); - cmd.id = "1"; - core.getUpdateHandler().delete(cmd); - core.getUpdateHandler().commit(new CommitUpdateCommand(coreReq, true)); - coreReq.close(); + } + + @Test + public void testCollectionWithDocuments() throws Exception { + SolrCore dataCore = h.getCore(); + addTestDocs(dataCore); + + QParserPlugin qParserPlugin = + QParserPlugin.standardPlugins.get(SignificantTermsQParserPlugin.NAME); + Map params = new HashMap<>(); + params.put("field", "cat"); + QParser parser = + qParserPlugin.createParser( + "", new MapSolrParams(params), new MapSolrParams(new HashMap<>()), null); + AnalyticsQuery query = (AnalyticsQuery) parser.parse(); + SolrQueryResponse resp = new SolrQueryResponse(); + + ResponseBuilder responseBuilder = new ResponseBuilder(null, resp, Collections.emptyList()); + RefCounted searcher = dataCore.getSearcher(); + try { + + DelegatingCollector analyticsCollector = + query.getAnalyticsCollector(responseBuilder, searcher.get()); + assertNotNull(analyticsCollector); + analyticsCollector.finish(); + + LinkedHashMap expectedValues = new LinkedHashMap<>(); + expectedValues.put("numDocs", 1); + expectedValues.put("sterms", new ArrayList()); + expectedValues.put("scores", new ArrayList()); + expectedValues.put("docFreq", new ArrayList()); + expectedValues.put("queryDocFreq", new ArrayList()); + + assertEquals(expectedValues, resp.getValues().get("significantTerms")); + + } finally { + searcher.decref(); } + deleteTestDocs(dataCore); + } + + private void addTestDocs(SolrCore core) throws IOException { + SolrQueryRequest coreReq = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); + AddUpdateCommand cmd = new AddUpdateCommand(coreReq); + cmd.solrDoc = new SolrInputDocument(); + cmd.solrDoc.addField("id", "1"); + cmd.solrDoc.addField("cat", "foo"); + core.getUpdateHandler().addDoc(cmd); + + core.getUpdateHandler().commit(new CommitUpdateCommand(coreReq, true)); + coreReq.close(); + } + + private void deleteTestDocs(SolrCore core) throws IOException { + SolrQueryRequest coreReq = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); + DeleteUpdateCommand cmd = new DeleteUpdateCommand(coreReq); + cmd.id = "1"; + core.getUpdateHandler().delete(cmd); + core.getUpdateHandler().commit(new CommitUpdateCommand(coreReq, true)); + coreReq.close(); + } } diff --git a/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java index 330acfc3138..484e16fe466 100644 --- a/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java +++ b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java @@ -17,7 +17,6 @@ package org.apache.solr.search; import java.io.IOException; - import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -33,31 +32,38 @@ import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; -import org.apache.solr.handler.component.MergeStrategy; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.handler.component.MergeStrategy; import org.junit.Before; import org.junit.BeforeClass; public class SolrIndexSearcherTest extends SolrTestCaseJ4 { - private final static int NUM_DOCS = 200; + private static final int NUM_DOCS = 200; @BeforeClass public static void setUpClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); - for (int i = 0 ; i < NUM_DOCS ; i ++) { - assertU(adoc("id", String.valueOf(i), - "field1_s", "foo", - "field2_s", String.valueOf(i % 2), - "field3_i_dvo", String.valueOf(i), - "field4_t", numbersTo(i))); + for (int i = 0; i < NUM_DOCS; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + "field1_s", + "foo", + "field2_s", + String.valueOf(i % 2), + "field3_i_dvo", + String.valueOf(i), + "field4_t", + numbersTo(i))); } assertU(commit()); } - + private static String numbersTo(int i) { StringBuilder numbers = new StringBuilder(); - for (int j = 0; j <= i ; j++) { + for (int j = 0; j <= i; j++) { numbers.append(String.valueOf(j) + " "); } return numbers.toString(); @@ -65,198 +71,236 @@ private static String numbersTo(int i) { @Before public void setUp() throws Exception { - assertU(adoc("id", "1", - "field1_s", "foo", - "field2_s", "1", - "field3_i_dvo", "1", - "field4_t", numbersTo(1))); + assertU( + adoc( + "id", + "1", + "field1_s", + "foo", + "field2_s", + "1", + "field3_i_dvo", + "1", + "field4_t", + numbersTo(1))); assertU(commit()); super.setUp(); } - + public void testMinExactCountLongValue() { - assertQ("test query on empty index", - req("q", "field1_s:foo", + assertQ( + "test query on empty index", + req( + "q", "field1_s:foo", "minExactCount", Long.toString(10L * Integer.MAX_VALUE), - "rows", "2") - ,"//*[@numFoundExact='true']" - ,"//*[@numFound='" + NUM_DOCS + "']" - ); + "rows", "2"), + "//*[@numFoundExact='true']", + "//*[@numFound='" + NUM_DOCS + "']"); } - + public void testMinExactCount() { - assertQ("minExactCount is lower than numFound,should produce approximated results", - req("q", "field1_s:foo", - "minExactCount", "2", - "rows", "2") - ,"//*[@numFoundExact='false']" - ,"//*[@numFound<='" + NUM_DOCS + "']" - ); - assertQ("minExactCount is higher than numFound,should produce exact results", - req("q", "field1_s:foo", + assertQ( + "minExactCount is lower than numFound,should produce approximated results", + req( + "q", "field1_s:foo", + "minExactCount", "2", + "rows", "2"), + "//*[@numFoundExact='false']", + "//*[@numFound<='" + NUM_DOCS + "']"); + assertQ( + "minExactCount is higher than numFound,should produce exact results", + req( + "q", "field1_s:foo", "minExactCount", "200", - "rows", "2") - ,"//*[@numFoundExact='true']" - ,"//*[@numFound='" + NUM_DOCS + "']" - ); + "rows", "2"), + "//*[@numFoundExact='true']", + "//*[@numFound='" + NUM_DOCS + "']"); } - - private void assertMatchesEqual(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException { + + private void assertMatchesEqual(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) + throws IOException { QueryResult qr = new QueryResult(); searcher.search(qr, cmd); assertEquals(expectedCount, qr.getDocList().matches()); assertEquals(TotalHits.Relation.EQUAL_TO, qr.getDocList().hitCountRelation()); } - - private QueryResult assertMatchesGreaterThan(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException { + + private QueryResult assertMatchesGreaterThan( + int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException { QueryResult qr = new QueryResult(); searcher.search(qr, cmd); - assertTrue("Expecting returned matches to be greater than " + expectedCount + " but got " + qr.getDocList().matches(), + assertTrue( + "Expecting returned matches to be greater than " + + expectedCount + + " but got " + + qr.getDocList().matches(), expectedCount >= qr.getDocList().matches()); assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, qr.getDocList().hitCountRelation()); return qr; } - + public void testLowMinExactCountGeneratesApproximation() throws IOException { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo"); - assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); - return null; - }); - - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(1, 1, "field2_s", "1"); - assertMatchesGreaterThan(NUM_DOCS/2, searcher, cmd); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field2_s", "1"); + assertMatchesGreaterThan(NUM_DOCS / 2, searcher, cmd); + return null; + }); } public void testHighMinExactCountGeneratesExactCount() throws IOException { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field1_s", "foo"); - assertMatchesEqual(NUM_DOCS, searcher, cmd); - return null; - }); - - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field2_s", "1"); - assertMatchesEqual(NUM_DOCS/2, searcher, cmd); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field1_s", "foo"); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); + + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field2_s", "1"); + assertMatchesEqual(NUM_DOCS / 2, searcher, cmd); + return null; + }); } - - public void testLowMinExactCountWithQueryResultCache() throws IOException { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo"); - cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE); - searcher.search(new QueryResult(), cmd); - assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo"); + cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE); + searcher.search(new QueryResult(), cmd); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); } - + public void testHighMinExactCountWithQueryResultCache() throws IOException { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 2, "field1_s", "foo"); - cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE); - searcher.search(new QueryResult(), cmd); - assertMatchesEqual(NUM_DOCS, searcher, cmd); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 2, "field1_s", "foo"); + cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE); + searcher.search(new QueryResult(), cmd); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); } - + public void testMinExactCountMoreRows() throws IOException { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(2, NUM_DOCS, "field1_s", "foo"); - assertMatchesEqual(NUM_DOCS, searcher, cmd); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(2, NUM_DOCS, "field1_s", "foo"); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); } - + public void testMinExactCountMatchWithDocSet() throws IOException { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo"); - assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); - - cmd.setNeedDocSet(true); - assertMatchesEqual(NUM_DOCS, searcher, cmd); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + + cmd.setNeedDocSet(true); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); } - + public void testMinExactCountWithMaxScoreRequested() throws IOException { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo"); - cmd.setFlags(SolrIndexSearcher.GET_SCORES); - QueryResult qr = assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); - assertNotEquals(Float.NaN, qr.getDocList().maxScore()); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo"); + cmd.setFlags(SolrIndexSearcher.GET_SCORES); + QueryResult qr = assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + assertNotEquals(Float.NaN, qr.getDocList().maxScore()); + return null; + }); } - + public void testReranking() throws Exception { float fixedScore = 1.23f; - for (boolean doFilter : new boolean[]{ false, true }) { - for (boolean doSort : new boolean[]{ false, true }) { - for (int getDocSetFlag : new int[]{ 0, SolrIndexSearcher.GET_DOCSET }) { - implTestReranking(doFilter, doSort, getDocSetFlag, null); // don't fix score i.e. no re-ranking - implTestReranking(doFilter, doSort, getDocSetFlag, fixedScore); // fix score to be non-zero and non-one + for (boolean doFilter : new boolean[] {false, true}) { + for (boolean doSort : new boolean[] {false, true}) { + for (int getDocSetFlag : new int[] {0, SolrIndexSearcher.GET_DOCSET}) { + implTestReranking( + doFilter, doSort, getDocSetFlag, null); // don't fix score i.e. no re-ranking + implTestReranking( + doFilter, doSort, getDocSetFlag, fixedScore); // fix score to be non-zero and non-one fixedScore *= 2; } } } } - private void implTestReranking(boolean doFilter, boolean doSort, int getDocSetFlag, Float fixedScore) throws Exception { - h.getCore().withSearcher(searcher -> { - - final QueryCommand cmd = new QueryCommand(); - cmd.setFlags(SolrIndexSearcher.GET_SCORES | getDocSetFlag); - - if (doSort) { - cmd.setSort(new Sort(SortField.FIELD_SCORE, new SortField("id", SortField.Type.STRING))); - } - - if (doFilter) { - cmd.setFilterList(new TermQuery(new Term("field4_t", Integer.toString(NUM_DOCS - 1)))); - } - - cmd.setQuery(new TermQuery(new Term("field1_s", "foo"))); - - final float expectedScore; - if (fixedScore == null) { - expectedScore = 1f; - } else { - expectedScore = fixedScore.floatValue(); - cmd.setQuery(new FixedScoreReRankQuery(cmd.getQuery(), expectedScore)); - } - - final QueryResult qr = new QueryResult(); - searcher.search(qr, cmd); - - // check score for the first document - final DocIterator iter = qr.getDocList().iterator(); - iter.next(); - assertEquals(expectedScore, iter.score(), 0); - - return null; - }); - + private void implTestReranking( + boolean doFilter, boolean doSort, int getDocSetFlag, Float fixedScore) throws Exception { + h.getCore() + .withSearcher( + searcher -> { + final QueryCommand cmd = new QueryCommand(); + cmd.setFlags(SolrIndexSearcher.GET_SCORES | getDocSetFlag); + + if (doSort) { + cmd.setSort( + new Sort(SortField.FIELD_SCORE, new SortField("id", SortField.Type.STRING))); + } + + if (doFilter) { + cmd.setFilterList( + new TermQuery(new Term("field4_t", Integer.toString(NUM_DOCS - 1)))); + } + + cmd.setQuery(new TermQuery(new Term("field1_s", "foo"))); + + final float expectedScore; + if (fixedScore == null) { + expectedScore = 1f; + } else { + expectedScore = fixedScore.floatValue(); + cmd.setQuery(new FixedScoreReRankQuery(cmd.getQuery(), expectedScore)); + } + + final QueryResult qr = new QueryResult(); + searcher.search(qr, cmd); + + // check score for the first document + final DocIterator iter = qr.getDocList().iterator(); + iter.next(); + assertEquals(expectedScore, iter.score(), 0); + + return null; + }); } private static final class FixedScoreReRankQuery extends RankQuery { private Query q; - final private float fixedScore; + private final float fixedScore; public FixedScoreReRankQuery(Query q, float fixedScore) { this.q = q; this.fixedScore = fixedScore; } - public Weight createWeight(IndexSearcher indexSearcher, ScoreMode scoreMode, float boost) throws IOException { + public Weight createWeight(IndexSearcher indexSearcher, ScoreMode scoreMode, float boost) + throws IOException { return q.createWeight(indexSearcher, scoreMode, boost); } @@ -281,21 +325,29 @@ public String toString(String field) { } @Override - public TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) throws IOException { - return new ReRankCollector(len, len, new Rescorer() { - @Override - public TopDocs rescore(IndexSearcher searcher, TopDocs firstPassTopDocs, int topN) { - for (ScoreDoc scoreDoc : firstPassTopDocs.scoreDocs) { - scoreDoc.score = fixedScore; - } - return firstPassTopDocs; - } - - @Override - public Explanation explain(IndexSearcher searcher, Explanation firstPassExplanation, int docID) { - return firstPassExplanation; - } - }, cmd, searcher, null); + public TopDocsCollector getTopDocsCollector( + int len, QueryCommand cmd, IndexSearcher searcher) throws IOException { + return new ReRankCollector( + len, + len, + new Rescorer() { + @Override + public TopDocs rescore(IndexSearcher searcher, TopDocs firstPassTopDocs, int topN) { + for (ScoreDoc scoreDoc : firstPassTopDocs.scoreDocs) { + scoreDoc.score = fixedScore; + } + return firstPassTopDocs; + } + + @Override + public Explanation explain( + IndexSearcher searcher, Explanation firstPassExplanation, int docID) { + return firstPassExplanation; + } + }, + cmd, + searcher, + null); } @Override @@ -310,67 +362,79 @@ public RankQuery wrap(Query q) { } public void testMinExactWithFilters() throws Exception { - - h.getCore().withSearcher(searcher -> { - //Sanity Check - No Filter - QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); - assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); - return null; - }); - - - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); - Query filterQuery = new TermQuery(new Term("field4_t", Integer.toString(NUM_DOCS - 1))); - cmd.setFilterList(filterQuery); - assertNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); - assertMatchesEqual(1, searcher, cmd); - return null; - }); + + h.getCore() + .withSearcher( + searcher -> { + // Sanity Check - No Filter + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + Query filterQuery = + new TermQuery(new Term("field4_t", Integer.toString(NUM_DOCS - 1))); + cmd.setFilterList(filterQuery); + assertNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesEqual(1, searcher, cmd); + return null; + }); } - + public void testMinExactWithPostFilters() throws Exception { - h.getCore().withSearcher(searcher -> { - //Sanity Check - No Filter - QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); - assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); - return null; - }); - - - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); - MockPostFilter filterQuery = new MockPostFilter(1, 101); - cmd.setFilterList(filterQuery); - assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); - assertMatchesEqual(1, searcher, cmd); - return null; - }); - - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); - MockPostFilter filterQuery = new MockPostFilter(100, 101); - cmd.setFilterList(filterQuery); - assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); - assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); - return null; - }); - + h.getCore() + .withSearcher( + searcher -> { + // Sanity Check - No Filter + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); + + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + MockPostFilter filterQuery = new MockPostFilter(1, 101); + cmd.setFilterList(filterQuery); + assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesEqual(1, searcher, cmd); + return null; + }); + + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + MockPostFilter filterQuery = new MockPostFilter(100, 101); + cmd.setFilterList(filterQuery); + assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesGreaterThan(NUM_DOCS, searcher, cmd); + return null; + }); } - + public void testMinExactWithPostFilterThatChangesScoreMode() throws Exception { - h.getCore().withSearcher(searcher -> { - QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); - // Use ScoreMode.COMPLETE for the PostFilter - MockPostFilter filterQuery = new MockPostFilter(NUM_DOCS * 10, 101, ScoreMode.COMPLETE); - cmd.setFilterList(filterQuery); - assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); - assertMatchesEqual(NUM_DOCS, searcher, cmd); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0"); + // Use ScoreMode.COMPLETE for the PostFilter + MockPostFilter filterQuery = + new MockPostFilter(NUM_DOCS * 10, 101, ScoreMode.COMPLETE); + cmd.setFilterList(filterQuery); + assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter); + assertMatchesEqual(NUM_DOCS, searcher, cmd); + return null; + }); } - private QueryCommand createBasicQueryCommand(int minExactCount, int length, String field, String q) { + private QueryCommand createBasicQueryCommand( + int minExactCount, int length, String field, String q) { QueryCommand cmd = new QueryCommand(); cmd.setMinExactCount(minExactCount); cmd.setLen(length); @@ -378,15 +442,15 @@ private QueryCommand createBasicQueryCommand(int minExactCount, int length, Stri cmd.setQuery(new TermQuery(new Term(field, q))); return cmd; } - - private final static class MockPostFilter extends TermQuery implements PostFilter { - + + private static final class MockPostFilter extends TermQuery implements PostFilter { + private final int cost; private final int maxDocsToCollect; private final ScoreMode scoreMode; - + public MockPostFilter(int maxDocsToCollect, int cost, ScoreMode scoreMode) { - super(new Term("foo", "bar"));//The term won't really be used. just the collector + super(new Term("foo", "bar")); // The term won't really be used. just the collector assert cost > 100; this.cost = cost; this.maxDocsToCollect = maxDocsToCollect; @@ -396,10 +460,12 @@ public MockPostFilter(int maxDocsToCollect, int cost, ScoreMode scoreMode) { public MockPostFilter(int maxDocsToCollect, int cost) { this(maxDocsToCollect, cost, null); } - + @Override - public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { - throw new UnsupportedOperationException("This class is only intended to be used as a PostFilter"); + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) + throws IOException { + throw new UnsupportedOperationException( + "This class is only intended to be used as a PostFilter"); } @Override @@ -422,13 +488,14 @@ public void setCost(int cost) {} public DelegatingCollector getFilterCollector(IndexSearcher searcher) { return new DelegatingCollector() { private int collected = 0; + @Override public void collect(int doc) throws IOException { if (++collected <= maxDocsToCollect) { super.collect(doc); } } - + @Override public ScoreMode scoreMode() { if (scoreMode != null) { @@ -438,6 +505,5 @@ public ScoreMode scoreMode() { } }; } - } } diff --git a/solr/core/src/test/org/apache/solr/search/SortSpecParsingTest.java b/solr/core/src/test/org/apache/solr/search/SortSpecParsingTest.java index 25bcb7e1955..62074b3add7 100644 --- a/solr/core/src/test/org/apache/solr/search/SortSpecParsingTest.java +++ b/solr/core/src/test/org/apache/solr/search/SortSpecParsingTest.java @@ -15,6 +15,8 @@ * limitations under the License. */ package org.apache.solr.search; + +import java.util.List; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.solr.SolrTestCaseJ4; @@ -24,16 +26,11 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.List; - -/** - * - * - **/ +/** */ public class SortSpecParsingTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } private static SortSpec doParseSortSpec(String sortSpec, SolrQueryRequest req) { @@ -51,7 +48,7 @@ public void testSort() throws Exception { SolrQueryRequest req = req(); sort = doParseSortSpec("score desc", req).getSort(); - assertNull("sort", sort);//only 1 thing in the list, no Sort specified + assertNull("sort", sort); // only 1 thing in the list, no Sort specified spec = doParseSortSpec("score desc", req); assertNotNull("spec", spec); @@ -97,7 +94,7 @@ public void testSort() throws Exception { assertEquals(flds[1].getType(), SortField.Type.LONG); assertEquals(flds[1].getField(), "bday"); assertEquals(flds[1].getReverse(), false); - //order aliases + // order aliases sort = doParseSortSpec("weight top,bday asc", req).getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.FLOAT); @@ -115,50 +112,56 @@ public void testSort() throws Exception { assertEquals(flds[1].getField(), "bday"); assertEquals(flds[1].getReverse(), false); - //test weird spacing + // test weird spacing sort = doParseSortSpec("weight DESC, bday asc", req).getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.FLOAT); assertEquals(flds[0].getField(), "weight"); assertEquals(flds[1].getField(), "bday"); assertEquals(flds[1].getType(), SortField.Type.LONG); - //handles trailing commas + // handles trailing commas sort = doParseSortSpec("weight desc,", req).getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.FLOAT); assertEquals(flds[0].getField(), "weight"); - //test functions + // test functions sort = SortSpecParsing.parseSortSpec("pow(weight, 2) desc", req).getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE); - //Not thrilled about the fragility of string matching here, but... - //the value sources get wrapped, so the out field is different than the input + // Not thrilled about the fragility of string matching here, but... + // the value sources get wrapped, so the out field is different than the input assertEquals(flds[0].getField(), "pow(float(weight),const(2))"); - - //test functions (more deep) - sort = SortSpecParsing.parseSortSpec("sum(product(r_f1,sum(d_f1,t_f1,1.0)),a_f1) asc", req).getSort(); + + // test functions (more deep) + sort = + SortSpecParsing.parseSortSpec("sum(product(r_f1,sum(d_f1,t_f1,1.0)),a_f1) asc", req) + .getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE); - assertEquals(flds[0].getField(), "sum(product(float(r_f1),sum(float(d_f1),float(t_f1),const(1.0))),float(a_f1))"); + assertEquals( + flds[0].getField(), + "sum(product(float(r_f1),sum(float(d_f1),float(t_f1),const(1.0))),float(a_f1))"); - sort = SortSpecParsing.parseSortSpec("pow(weight, 2.0) desc", req).getSort(); + sort = + SortSpecParsing.parseSortSpec("pow(weight, 2.0) desc", req) + .getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE); - //Not thrilled about the fragility of string matching here, but... - //the value sources get wrapped, so the out field is different than the input + // Not thrilled about the fragility of string matching here, but... + // the value sources get wrapped, so the out field is different than the input assertEquals(flds[0].getField(), "pow(float(weight),const(2.0))"); - - spec = SortSpecParsing.parseSortSpec("pow(weight, 2.0) desc, weight desc, bday asc", req); + spec = + SortSpecParsing.parseSortSpec("pow(weight, 2.0) desc, weight desc, bday asc", req); flds = spec.getSort().getSort(); List schemaFlds = spec.getSchemaFields(); assertEquals(3, flds.length); assertEquals(3, schemaFlds.size()); assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE); - //Not thrilled about the fragility of string matching here, but... - //the value sources get wrapped, so the out field is different than the input + // Not thrilled about the fragility of string matching here, but... + // the value sources get wrapped, so the out field is different than the input assertEquals(flds[0].getField(), "pow(float(weight),const(2.0))"); assertNull(schemaFlds.get(0)); @@ -171,19 +174,21 @@ public void testSort() throws Exception { assertEquals(flds[2].getType(), SortField.Type.LONG); assertNotNull(schemaFlds.get(2)); assertEquals("bday", schemaFlds.get(2).getName()); - - //handles trailing commas + + // handles trailing commas sort = doParseSortSpec("weight desc,", req).getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.FLOAT); assertEquals(flds[0].getField(), "weight"); - //Test literals in functions + // Test literals in functions sort = SortSpecParsing.parseSortSpec("strdist(foo_s1, \"junk\", jw) desc", req).getSort(); flds = sort.getSort(); assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE); - //the value sources get wrapped, so the out field is different than the input - assertEquals(flds[0].getField(), "strdist(str(foo_s1),literal(junk), dist=org.apache.lucene.search.spell.JaroWinklerDistance)"); + // the value sources get wrapped, so the out field is different than the input + assertEquals( + flds[0].getField(), + "strdist(str(foo_s1),literal(junk), dist=org.apache.lucene.search.spell.JaroWinklerDistance)"); sort = doParseSortSpec("", req).getSort(); assertNull(sort); @@ -229,18 +234,18 @@ public void testBad() throws Exception { Sort sort; SolrQueryRequest req = req(); - //test some bad vals + // test some bad vals try { sort = doParseSortSpec("weight, desc", req).getSort(); assertTrue(false); } catch (SolrException e) { - //expected + // expected } try { sort = doParseSortSpec("w", req).getSort(); assertTrue(false); } catch (SolrException e) { - //expected + // expected } try { sort = doParseSortSpec("weight desc, bday", req).getSort(); @@ -249,21 +254,21 @@ public void testBad() throws Exception { } try { - //bad number of commas + // bad number of commas sort = SortSpecParsing.parseSortSpec("pow(weight,,2) desc, bday asc", req).getSort(); assertTrue(false); } catch (SolrException e) { } try { - //bad function + // bad function sort = SortSpecParsing.parseSortSpec("pow() desc, bday asc", req).getSort(); assertTrue(false); } catch (SolrException e) { } try { - //bad number of parens + // bad number of parens sort = SortSpecParsing.parseSortSpec("pow((weight,2) desc, bday asc", req).getSort(); assertTrue(false); } catch (SolrException e) { diff --git a/solr/core/src/test/org/apache/solr/search/SpatialFilterTest.java b/solr/core/src/test/org/apache/solr/search/SpatialFilterTest.java index 84b7d1f74b0..aa5c4114a8f 100644 --- a/solr/core/src/test/org/apache/solr/search/SpatialFilterTest.java +++ b/solr/core/src/test/org/apache/solr/search/SpatialFilterTest.java @@ -15,15 +15,12 @@ * limitations under the License. */ package org.apache.solr.search; + import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; import org.junit.Test; - -/** - * - * - **/ +/** */ public class SpatialFilterTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { @@ -47,18 +44,18 @@ private void setupDocs(String fieldName) { assertU(adoc("id", "13", fieldName, "-89.9,-130")); assertU(commit()); } - + @Test public void testPoints() throws Exception { String fieldName = "home"; setupDocs(fieldName); - //Try some edge cases + // Try some edge cases checkHits(fieldName, "1,1", 100, 5, 3, 4, 5, 6, 7); checkHits(fieldName, "0,179.8", 200, 5, 3, 4, 8, 10, 12); checkHits(fieldName, "89.8, 50", 200, 9); - //try some normal cases + // try some normal cases checkHits(fieldName, "33.0,-80.0", 300, 12); - //large distance + // large distance checkHits(fieldName, "33.0,-80.0", 5000, 13); } @@ -66,24 +63,24 @@ public void testPoints() throws Exception { public void testLatLonType() throws Exception { String fieldName = "store"; setupDocs(fieldName); - //Try some edge cases + // Try some edge cases checkHits(fieldName, "1,1", 175, 3, 5, 6, 7); checkHits(fieldName, "0,179.8", 200, 2, 8, 9); - checkHits(fieldName, "89.8, 50", 200, 2, 10, 11);//this goes over the north pole - checkHits(fieldName, "-89.8, 50", 200, 2, 12, 13);//this goes over the south pole - //try some normal cases + checkHits(fieldName, "89.8, 50", 200, 2, 10, 11); // this goes over the north pole + checkHits(fieldName, "-89.8, 50", 200, 2, 12, 13); // this goes over the south pole + // try some normal cases checkHits(fieldName, "33.0,-80.0", 300, 2); - //large distance + // large distance checkHits(fieldName, "1,1", 5000, 3, 5, 6, 7); - //Because we are generating a box based on the west/east longitudes and the south/north latitudes, which then - //translates to a range query, which is slightly more inclusive. Thus, even though 0.0 is 15.725 kms away, - //it will be included, b/c of the box calculation. + // Because we are generating a box based on the west/east longitudes and the south/north + // latitudes, which then translates to a range query, which is slightly more inclusive. Thus, + // even though 0.0 is 15.725 kms away, it will be included, b/c of the box calculation. checkHits(fieldName, false, "0.1,0.1", 15, 2, 5, 6); - //try some more + // try some more clearIndex(); assertU(adoc("id", "14", fieldName, "0,5")); assertU(adoc("id", "15", fieldName, "0,15")); - //3000KM from 0,0, see http://www.movable-type.co.uk/scripts/latlong.html + // 3000KM from 0,0, see http://www.movable-type.co.uk/scripts/latlong.html assertU(adoc("id", "16", fieldName, "18.71111,19.79750")); assertU(adoc("id", "17", fieldName, "44.043900,-95.436643")); assertU(commit()); @@ -94,35 +91,35 @@ public void testLatLonType() throws Exception { checkHits(fieldName, "0,0", 3001, 3, 14, 15, 16); checkHits(fieldName, "0,0", 3000.1, 3, 14, 15, 16); - //really fine grained distance and reflects some of the vagaries of how we are calculating the box + // really fine grained distance and reflects some of the vagaries of how we are calculating the + // box checkHits(fieldName, "43.517030,-96.789603", 109, 0); - // falls outside of the real distance, but inside the bounding box + // falls outside of the real distance, but inside the bounding box checkHits(fieldName, true, "43.517030,-96.789603", 110, 0); checkHits(fieldName, false, "43.517030,-96.789603", 110, 1, 17); - - + // Tests SOLR-2829 String fieldNameHome = "home"; String fieldNameWork = "work"; clearIndex(); - assertU(adoc("id", "1", fieldNameHome, "52.67,7.30", fieldNameWork,"48.60,11.61")); + assertU(adoc("id", "1", fieldNameHome, "52.67,7.30", fieldNameWork, "48.60,11.61")); assertU(commit()); checkHits(fieldNameHome, "52.67,7.30", 1, 1); checkHits(fieldNameWork, "48.60,11.61", 1, 1); checkHits(fieldNameWork, "52.67,7.30", 1, 0); checkHits(fieldNameHome, "48.60,11.61", 1, 0); - } - private void checkHits(String fieldName, String pt, double distance, int count, int ... docIds) { + private void checkHits(String fieldName, String pt, double distance, int count, int... docIds) { checkHits(fieldName, true, pt, distance, count, docIds); } - private void checkHits(String fieldName, boolean exact, String pt, double distance, int count, int ... docIds) { - String [] tests = new String[docIds != null && docIds.length > 0 ? docIds.length + 1 : 1]; + private void checkHits( + String fieldName, boolean exact, String pt, double distance, int count, int... docIds) { + String[] tests = new String[docIds != null && docIds.length > 0 ? docIds.length + 1 : 1]; tests[0] = "*[count(//doc)=" + count + "]"; if (docIds != null && docIds.length > 0) { int i = 1; @@ -135,58 +132,97 @@ private void checkHits(String fieldName, boolean exact, String pt, double distan int postFilterCount = DelegatingCollector.setLastDelegateCount; // throw in a random into the main query to prevent most cache hits - assertQ(req("fl", "id", "q","*:* OR foo_i:" + random().nextInt(100), "rows", "1000", "fq", "{!"+method+" sfield=" +fieldName +"}", - "pt", pt, "d", String.valueOf(distance)), - tests); - assertEquals(postFilterCount, DelegatingCollector.setLastDelegateCount); // post filtering shouldn't be used - + assertQ( + req( + "fl", + "id", + "q", + "*:* OR foo_i:" + random().nextInt(100), + "rows", + "1000", + "fq", + "{!" + method + " sfield=" + fieldName + "}", + "pt", + pt, + "d", + String.valueOf(distance)), + tests); + assertEquals( + postFilterCount, + DelegatingCollector.setLastDelegateCount); // post filtering shouldn't be used + // try uncached - assertQ(req("fl", "id", "q","*:* OR foo_i:" + random().nextInt(100), "rows", "1000", "fq", "{!"+method+" sfield=" +fieldName + " cache=false" + "}", - "pt", pt, "d", String.valueOf(distance)), + assertQ( + req( + "fl", + "id", + "q", + "*:* OR foo_i:" + random().nextInt(100), + "rows", + "1000", + "fq", + "{!" + method + " sfield=" + fieldName + " cache=false" + "}", + "pt", + pt, + "d", + String.valueOf(distance)), tests); - assertEquals(postFilterCount, DelegatingCollector.setLastDelegateCount); // post filtering shouldn't be used + assertEquals( + postFilterCount, + DelegatingCollector.setLastDelegateCount); // post filtering shouldn't be used // try post filtered for fields that support it if (fieldName.endsWith("ll")) { - assertQ(req("fl", "id", "q","*:* OR foo_i:" + random().nextInt(100)+100, "rows", "1000", "fq", "{!"+method+" sfield=" +fieldName + " cache=false cost=150" + "}", - "pt", pt, "d", String.valueOf(distance)), - tests); - assertEquals(postFilterCount + 1, DelegatingCollector.setLastDelegateCount); // post filtering *should* have been used - + assertQ( + req( + "fl", + "id", + "q", + "*:* OR foo_i:" + random().nextInt(100) + 100, + "rows", + "1000", + "fq", + "{!" + method + " sfield=" + fieldName + " cache=false cost=150" + "}", + "pt", + pt, + "d", + String.valueOf(distance)), + tests); + assertEquals( + postFilterCount + 1, + DelegatingCollector.setLastDelegateCount); // post filtering *should* have been used } } - - } /*public void testSpatialQParser() throws Exception { - ModifiableSolrParams local = new ModifiableSolrParams(); - local.add(CommonParams.FL, "home"); - ModifiableSolrParams params = new ModifiableSolrParams(); - params.add(SpatialParams.POINT, "5.0,5.0"); - params.add(SpatialParams.DISTANCE, "3"); - SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), "", "", 0, 10, new HashMap()); - SpatialFilterQParserPlugin parserPlugin; - Query query; - - parserPlugin = new SpatialFilterQParserPlugin(); - QParser parser = parserPlugin.createParser("'foo'", local, params, req); - query = parser.parse(); - assertNotNull("Query is null", query); - assertTrue("query is not an instanceof " - + BooleanQuery.class, - query instanceof BooleanQuery); - local = new ModifiableSolrParams(); - local.add(CommonParams.FL, "x"); - params = new ModifiableSolrParams(); - params.add(SpatialParams.POINT, "5.0"); - params.add(SpatialParams.DISTANCE, "3"); - req = new LocalSolrQueryRequest(h.getCore(), "", "", 0, 10, new HashMap()); - parser = parserPlugin.createParser("'foo'", local, params, req); - query = parser.parse(); - assertNotNull("Query is null", query); - assertTrue(query.getClass() + " is not an instanceof " - + LegacyNumericRangeQuery.class, - query instanceof LegacyNumericRangeQuery); - req.close(); - }*/ + ModifiableSolrParams local = new ModifiableSolrParams(); + local.add(CommonParams.FL, "home"); + ModifiableSolrParams params = new ModifiableSolrParams(); + params.add(SpatialParams.POINT, "5.0,5.0"); + params.add(SpatialParams.DISTANCE, "3"); + SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), "", "", 0, 10, new HashMap()); + SpatialFilterQParserPlugin parserPlugin; + Query query; + + parserPlugin = new SpatialFilterQParserPlugin(); + QParser parser = parserPlugin.createParser("'foo'", local, params, req); + query = parser.parse(); + assertNotNull("Query is null", query); + assertTrue("query is not an instanceof " + + BooleanQuery.class, + query instanceof BooleanQuery); + local = new ModifiableSolrParams(); + local.add(CommonParams.FL, "x"); + params = new ModifiableSolrParams(); + params.add(SpatialParams.POINT, "5.0"); + params.add(SpatialParams.DISTANCE, "3"); + req = new LocalSolrQueryRequest(h.getCore(), "", "", 0, 10, new HashMap()); + parser = parserPlugin.createParser("'foo'", local, params, req); + query = parser.parse(); + assertNotNull("Query is null", query); + assertTrue(query.getClass() + " is not an instanceof " + + LegacyNumericRangeQuery.class, + query instanceof LegacyNumericRangeQuery); + req.close(); + }*/ diff --git a/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java b/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java index aaa54406b8f..81f67c387ba 100644 --- a/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java +++ b/solr/core/src/test/org/apache/solr/search/TestAddFieldRealTimeGet.java @@ -18,7 +18,6 @@ import java.io.File; import java.util.Collections; - import org.apache.commons.io.FileUtils; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; @@ -42,7 +41,8 @@ private void initManagedSchemaCore() throws Exception { final String schemaFileName = "schema-id-and-version-fields-only.xml"; FileUtils.copyFileToDirectory(new File(testHomeConfDir, configFileName), tmpConfDir); FileUtils.copyFileToDirectory(new File(testHomeConfDir, schemaFileName), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); // initCore will trigger an upgrade to managed schema, since the solrconfig has // @@ -60,31 +60,38 @@ public void test() throws Exception { String newFieldValue = "xyz"; ignoreException("unknown field"); - assertFailedU("Should fail due to unknown field '" + newFieldName + "'", - adoc("id", "1", newFieldName, newFieldValue)); + assertFailedU( + "Should fail due to unknown field '" + newFieldName + "'", + adoc("id", "1", newFieldName, newFieldValue)); unIgnoreException("unknown field"); IndexSchema schema = h.getCore().getLatestSchema(); SchemaField newField = schema.newField(newFieldName, newFieldType, Collections.emptyMap()); IndexSchema newSchema = schema.addField(newField); h.getCore().setLatestSchema(newSchema); - - String newFieldKeyValue = "'" + newFieldName + "':'" + newFieldValue + "'"; + + String newFieldKeyValue = "'" + newFieldName + "':'" + newFieldValue + "'"; assertU(adoc("id", "1", newFieldName, newFieldValue)); - assertJQ(req("q","id:1"), - "/response/numFound==0"); - assertJQ(req("qt","/get", "id","1", "fl","id,"+newFieldName), - "=={'doc':{'id':'1'," + newFieldKeyValue + "}}"); - assertJQ(req("qt","/get","ids","1", "fl","id,"+newFieldName), - "=={'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'," + newFieldKeyValue + "}]}}"); + assertJQ(req("q", "id:1"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id," + newFieldName), + "=={'doc':{'id':'1'," + newFieldKeyValue + "}}"); + assertJQ( + req("qt", "/get", "ids", "1", "fl", "id," + newFieldName), + "=={'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'," + + newFieldKeyValue + + "}]}}"); assertU(commit()); - assertJQ(req("q","id:1"), - "/response/numFound==1"); - assertJQ(req("qt","/get", "id","1", "fl","id,"+newFieldName), + assertJQ(req("q", "id:1"), "/response/numFound==1"); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id," + newFieldName), "=={'doc':{'id':'1'," + newFieldKeyValue + "}}"); - assertJQ(req("qt","/get","ids","1", "fl","id,"+newFieldName), - "=={'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'," + newFieldKeyValue + "}]}}"); + assertJQ( + req("qt", "/get", "ids", "1", "fl", "id," + newFieldName), + "=={'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'," + + newFieldKeyValue + + "}]}}"); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestBlockCollapse.java b/solr/core/src/test/org/apache/solr/search/TestBlockCollapse.java index ba15351cf1e..7548b527304 100644 --- a/solr/core/src/test/org/apache/solr/search/TestBlockCollapse.java +++ b/solr/core/src/test/org/apache/solr/search/TestBlockCollapse.java @@ -16,33 +16,31 @@ */ package org.apache.solr.search; +import static org.hamcrest.CoreMatchers.instanceOf; + import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.stream.Stream; import java.util.stream.Collectors; - +import java.util.stream.Stream; import org.apache.lucene.search.Query; - import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.SolrQueryResponse; - import org.junit.After; import org.junit.BeforeClass; -import static org.hamcrest.CoreMatchers.instanceOf; - /** Test collapse functionality with hierarchical documents using 'block collapse' */ public class TestBlockCollapse extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); initCore("solrconfig-collapseqparser.xml", "schema15.xml"); } @@ -53,442 +51,561 @@ public void cleanup() throws Exception { } public void testPostFilterIntrospection() throws Exception { - final List fieldValueSelectors = Arrays.asList("sort='bar_i asc'", - "min=bar_i", - "max=bar_i", - "min='sum(bar_i, 42)'", - "max='sum(bar_i, 42)'"); - for (SolrParams p : Arrays.asList(params(), - // QEC boosting shouldn't impact what impl we get in any situation - params("qt", "/elevate", "elevateIds", "42"))) { - + final List fieldValueSelectors = + Arrays.asList( + "sort='bar_i asc'", + "min=bar_i", + "max=bar_i", + "min='sum(bar_i, 42)'", + "max='sum(bar_i, 42)'"); + for (SolrParams p : + Arrays.asList( + params(), + // QEC boosting shouldn't impact what impl we get in any situation + params("qt", "/elevate", "elevateIds", "42"))) { + try (SolrQueryRequest req = req()) { // non-block based collapse sitautions, regardless of nullPolicy... - for (String np : Arrays.asList("", " nullPolicy=ignore", " nullPolicy=expand", " nullPolicy=collapse", - // when policy is 'collapse' hint should be ignored... - " nullPolicy=collapse hint=block")) { - assertThat(parseAndBuildCollector("{!collapse field=foo_s1"+np+"}", req), - instanceOf(CollapsingQParserPlugin.OrdScoreCollector.class)); - assertThat(parseAndBuildCollector("{!collapse field=foo_i"+np+"}", req), - instanceOf(CollapsingQParserPlugin.IntScoreCollector.class)); + for (String np : + Arrays.asList( + "", + " nullPolicy=ignore", + " nullPolicy=expand", + " nullPolicy=collapse", + // when policy is 'collapse' hint should be ignored... + " nullPolicy=collapse hint=block")) { + assertThat( + parseAndBuildCollector("{!collapse field=foo_s1" + np + "}", req), + instanceOf(CollapsingQParserPlugin.OrdScoreCollector.class)); + assertThat( + parseAndBuildCollector("{!collapse field=foo_i" + np + "}", req), + instanceOf(CollapsingQParserPlugin.IntScoreCollector.class)); for (String selector : fieldValueSelectors) { - assertThat(parseAndBuildCollector("{!collapse field=foo_s1 " + selector + np + "}", req), - instanceOf(CollapsingQParserPlugin.OrdFieldValueCollector.class)); + assertThat( + parseAndBuildCollector("{!collapse field=foo_s1 " + selector + np + "}", req), + instanceOf(CollapsingQParserPlugin.OrdFieldValueCollector.class)); } for (String selector : fieldValueSelectors) { - assertThat(parseAndBuildCollector("{!collapse field=foo_i " + selector + np + "}", req), - instanceOf(CollapsingQParserPlugin.IntFieldValueCollector.class)); + assertThat( + parseAndBuildCollector("{!collapse field=foo_i " + selector + np + "}", req), + instanceOf(CollapsingQParserPlugin.IntFieldValueCollector.class)); } - + // anything with cscore() is (currently) off limits regardless of null policy or hint... - for (String selector : Arrays.asList(" min=sum(42,cscore())", - " max=cscore()")) { + for (String selector : Arrays.asList(" min=sum(42,cscore())", " max=cscore()")) { for (String hint : Arrays.asList("", " hint=block")) { - assertThat(parseAndBuildCollector("{!collapse field=_root_" + selector + np + hint + "}", req), - instanceOf(CollapsingQParserPlugin.OrdFieldValueCollector.class)); - assertThat(parseAndBuildCollector("{!collapse field=foo_s1" + selector + np + hint + "}", req), - instanceOf(CollapsingQParserPlugin.OrdFieldValueCollector.class)); - assertThat(parseAndBuildCollector("{!collapse field=foo_i" + selector + np + hint + "}", req), - instanceOf(CollapsingQParserPlugin.IntFieldValueCollector.class)); + assertThat( + parseAndBuildCollector( + "{!collapse field=_root_" + selector + np + hint + "}", req), + instanceOf(CollapsingQParserPlugin.OrdFieldValueCollector.class)); + assertThat( + parseAndBuildCollector( + "{!collapse field=foo_s1" + selector + np + hint + "}", req), + instanceOf(CollapsingQParserPlugin.OrdFieldValueCollector.class)); + assertThat( + parseAndBuildCollector( + "{!collapse field=foo_i" + selector + np + hint + "}", req), + instanceOf(CollapsingQParserPlugin.IntFieldValueCollector.class)); } } } - + // block based collectors as long as nullPolicy isn't collapse... for (String np : Arrays.asList("", " nullPolicy=ignore", " nullPolicy=expand")) { - assertThat(parseAndBuildCollector("{!collapse field=_root_"+np+"}", req), // implicit block collection on _root_ - instanceOf(CollapsingQParserPlugin.BlockOrdScoreCollector.class)); - assertThat(parseAndBuildCollector("{!collapse field=_root_ hint=top_fc"+np+"}", req), // top_fc shouldn't stop implicit block collection - instanceOf(CollapsingQParserPlugin.BlockOrdScoreCollector.class)); - assertThat(parseAndBuildCollector("{!collapse field=foo_s1 hint=block"+np+"}", req), - instanceOf(CollapsingQParserPlugin.BlockOrdScoreCollector.class)); - assertThat(parseAndBuildCollector("{!collapse field=foo_i hint=block"+np+"}", req), - instanceOf(CollapsingQParserPlugin.BlockIntScoreCollector.class)); + assertThat( + parseAndBuildCollector( + "{!collapse field=_root_" + np + "}", req), // implicit block collection on _root_ + instanceOf(CollapsingQParserPlugin.BlockOrdScoreCollector.class)); + assertThat( + parseAndBuildCollector( + "{!collapse field=_root_ hint=top_fc" + np + "}", + req), // top_fc shouldn't stop implicit block collection + instanceOf(CollapsingQParserPlugin.BlockOrdScoreCollector.class)); + assertThat( + parseAndBuildCollector("{!collapse field=foo_s1 hint=block" + np + "}", req), + instanceOf(CollapsingQParserPlugin.BlockOrdScoreCollector.class)); + assertThat( + parseAndBuildCollector("{!collapse field=foo_i hint=block" + np + "}", req), + instanceOf(CollapsingQParserPlugin.BlockIntScoreCollector.class)); for (String selector : fieldValueSelectors) { - assertThat(parseAndBuildCollector("{!collapse field=foo_s1 hint=block " + selector + np + "}", req), - instanceOf(CollapsingQParserPlugin.BlockOrdSortSpecCollector.class)); + assertThat( + parseAndBuildCollector( + "{!collapse field=foo_s1 hint=block " + selector + np + "}", req), + instanceOf(CollapsingQParserPlugin.BlockOrdSortSpecCollector.class)); } for (String selector : fieldValueSelectors) { - assertThat(parseAndBuildCollector("{!collapse field=foo_i hint=block " + selector + np + "}", req), - instanceOf(CollapsingQParserPlugin.BlockIntSortSpecCollector.class)); + assertThat( + parseAndBuildCollector( + "{!collapse field=foo_i hint=block " + selector + np + "}", req), + instanceOf(CollapsingQParserPlugin.BlockIntSortSpecCollector.class)); } } - } } - } - - /** - * Helper method for introspection testing + + /** + * Helper method for introspection testing + * * @see #testPostFilterIntrospection */ - private DelegatingCollector parseAndBuildCollector(final String input, final SolrQueryRequest req) throws Exception { + private DelegatingCollector parseAndBuildCollector(final String input, final SolrQueryRequest req) + throws Exception { try { final SolrQueryResponse rsp = new SolrQueryResponse(); - SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req,rsp)); - + SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); + final Query q = QParser.getParser(input, "lucene", true, req).getQuery(); assertTrue("Not a PostFilter: " + input, q instanceof PostFilter); - return ((PostFilter)q).getFilterCollector(req.getSearcher()); + return ((PostFilter) q).getFilterCollector(req.getSearcher()); } finally { SolrRequestInfo.clearRequestInfo(); } } - public void testEmptyIndex() throws Exception { // some simple sanity checks that collapse queries against empty indexes don't match any docs // (or throw any errors) - + doTestEmptyIndex(); - - assertU(adoc(dupFields(sdoc("id", "p1", - "block_i", 1, - "skus", sdocs(dupFields(sdoc("id", "p1s1", "block_i", 1, "txt_t", "a b c d e ", "num_i", 42)), - dupFields(sdoc("id", "p1s2", "block_i", 1, "txt_t", "a XX c d e ", "num_i", 10)), - dupFields(sdoc("id", "p1s3", "block_i", 1, "txt_t", "XX b XX XX e ", "num_i", 777)), - dupFields(sdoc("id", "p1s4", "block_i", 1, "txt_t", "a XX c d XX", "num_i", 6)) - ))))); + + assertU( + adoc( + dupFields( + sdoc( + "id", + "p1", + "block_i", + 1, + "skus", + sdocs( + dupFields( + sdoc( + "id", + "p1s1", + "block_i", + 1, + "txt_t", + "a b c d e ", + "num_i", + 42)), + dupFields( + sdoc( + "id", + "p1s2", + "block_i", + 1, + "txt_t", + "a XX c d e ", + "num_i", + 10)), + dupFields( + sdoc( + "id", + "p1s3", + "block_i", + 1, + "txt_t", + "XX b XX XX e ", + "num_i", + 777)), + dupFields( + sdoc( + "id", + "p1s4", + "block_i", + 1, + "txt_t", + "a XX c d XX", + "num_i", + 6))))))); assertU(commit()); assertU(delQ("_root_:p1")); // avoid *:* so we don't get low level deleteAll optimization assertU(commit()); - + doTestEmptyIndex(); - + clearIndex(); assertU(commit()); doTestEmptyIndex(); } - - /** @see #testEmptyIndex */ + + /** + * @see #testEmptyIndex + */ private void doTestEmptyIndex() throws Exception { - for (String opt : Arrays.asList(// no block collapse logic used (sanity checks) - "field=block_s1", - "field=block_i", - // block collapse used implicitly (ord) - "field=_root_", - "field=_root_ hint=top_fc", // top_fc hint shouldn't matter - // block collapse used explicitly (ord) - "field=_root_ hint=block", - "field=block_s1 hint=block", - // block collapse used explicitly (int) - "field=block_i hint=block" - )) { - for (String nullPolicy : Arrays.asList("", // ignore is default - " nullPolicy=ignore", - " nullPolicy=expand")) { + for (String opt : + Arrays.asList( // no block collapse logic used (sanity checks) + "field=block_s1", + "field=block_i", + // block collapse used implicitly (ord) + "field=_root_", + "field=_root_ hint=top_fc", // top_fc hint shouldn't matter + // block collapse used explicitly (ord) + "field=_root_ hint=block", + "field=block_s1 hint=block", + // block collapse used explicitly (int) + "field=block_i hint=block")) { + for (String nullPolicy : + Arrays.asList( + "", // ignore is default + " nullPolicy=ignore", + " nullPolicy=expand")) { for (String suffix : SELECTOR_FIELD_SUFFIXES) { - for (String headSelector : Arrays.asList("", // score is default - " max=asc" + suffix, - " min=desc" + suffix, - " sort='asc" + suffix + " desc'", - " sort='desc" +suffix + " asc'", - " max=sum(42,asc" + suffix + ")", - " min=sum(42,desc" + suffix + ")", - " max=sub(0,desc" + suffix + ")", - " min=sub(0,asc" + suffix + ")")) { - + for (String headSelector : + Arrays.asList( + "", // score is default + " max=asc" + suffix, + " min=desc" + suffix, + " sort='asc" + suffix + " desc'", + " sort='desc" + suffix + " asc'", + " max=sum(42,asc" + suffix + ")", + " min=sum(42,desc" + suffix + ")", + " max=sub(0,desc" + suffix + ")", + " min=sub(0,asc" + suffix + ")")) { + if (headSelector.endsWith("_l") && opt.endsWith("_i")) { // NOTE: this limitation doesn't apply to block collapse on int, // so we only check 'opt.endsWith' (if ends with block hint we're ok) - assertQEx("expected known limitation of using long for min/max selector when doing numeric collapse", - "min/max must be Int or Float", - req("q", "*:*", - "fq", "{!collapse " + opt + nullPolicy + headSelector + "}"), - SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "expected known limitation of using long for min/max selector when doing numeric collapse", + "min/max must be Int or Float", + req("q", "*:*", "fq", "{!collapse " + opt + nullPolicy + headSelector + "}"), + SolrException.ErrorCode.BAD_REQUEST); continue; } - assertQ(req("q", "*:*", - "fq", "{!collapse " + opt + nullPolicy + headSelector + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=0]" - ); + assertQ( + req( + "q", "*:*", + "fq", "{!collapse " + opt + nullPolicy + headSelector + "}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=0]"); } } } } } - - + public void testSimple() throws Exception { - - { // convert our docs to update commands, along with some commits, in a shuffled order and process all of them... - final List updates = Stream.concat(Stream.of(commit()), - makeBlockDocs().stream().map(doc -> adoc(doc))).collect(Collectors.toList()); + + { // convert our docs to update commands, along with some commits, in a shuffled order and + // process all of them... + final List updates = + Stream.concat(Stream.of(commit()), makeBlockDocs().stream().map(doc -> adoc(doc))) + .collect(Collectors.toList()); Collections.shuffle(updates, random()); for (String u : updates) { assertU(u); } assertU(commit()); } - - for (String opt : Arrays.asList(// no block collapse logic used (sanity checks) - "field=block_s1", - "field=block_i", - // block collapse used implicitly (ord) - "field=_root_", - "field=_root_ hint=top_fc", // top_fc hint shouldn't matter - // block collapse used explicitly (ord) - "field=_root_ hint=block", - "field=block_s1 hint=block", - // block collapse used explicitly (int) - "field=block_i hint=block" - )) { - - { // score based group head selection (default) - - // these permutations should all give the same results, since the queries don't match any docs in 'null' groups - // (because we don't have any in our index)... - for (String nullPolicy : Arrays.asList("", // ignore is default - " nullPolicy=ignore", - " nullPolicy=expand")) { - for (String q : Arrays.asList("txt_t:XX", // only child docs with XX match - "txt_t:* txt_t:XX", // all child docs match - "*:* txt_t:XX")) { // all docs match + + for (String opt : + Arrays.asList( // no block collapse logic used (sanity checks) + "field=block_s1", + "field=block_i", + // block collapse used implicitly (ord) + "field=_root_", + "field=_root_ hint=top_fc", // top_fc hint shouldn't matter + // block collapse used explicitly (ord) + "field=_root_ hint=block", + "field=block_s1 hint=block", + // block collapse used explicitly (int) + "field=block_i hint=block")) { + + { + // score based group head selection (default) these permutations should all give the same + // results, since the queries don't match any docs in 'null' groups (because we don't have + // any in our index)... + for (String nullPolicy : + Arrays.asList( + "", // ignore is default + " nullPolicy=ignore", + " nullPolicy=expand")) { + for (String q : + Arrays.asList( + "txt_t:XX", // only child docs with XX match + "txt_t:* txt_t:XX", // all child docs match + "*:* txt_t:XX")) { // all docs match // single score based collapse... - assertQ(req("q", q, - "fq", "{!collapse " + opt + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1]/str[@name='id'][.='p2s4']" - , "//result/doc[2]/str[@name='id'][.='p3s1']" - , "//result/doc[3]/str[@name='id'][.='p1s3']" - ); + assertQ( + req( + "q", + q, + "fq", + "{!collapse " + opt + nullPolicy + "}", + "sort", + "score desc, num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='p2s4']", + "//result/doc[2]/str[@name='id'][.='p3s1']", + "//result/doc[3]/str[@name='id'][.='p1s3']"); // same query, but boosting a diff p1 sku to change group head (and result order) - assertQ(req("q", q, - "qt", "/elevate", - "elevateIds", "p1s1", - "fq", "{!collapse " + opt + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1]/str[@name='id'][.='p1s1']" - , "//result/doc[2]/str[@name='id'][.='p2s4']" - , "//result/doc[3]/str[@name='id'][.='p3s1']" - ); - + assertQ( + req( + "q", q, + "qt", "/elevate", + "elevateIds", "p1s1", + "fq", "{!collapse " + opt + nullPolicy + "}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='p1s1']", + "//result/doc[2]/str[@name='id'][.='p2s4']", + "//result/doc[3]/str[@name='id'][.='p3s1']"); + // same query, but boosting multiple skus from p1 - assertQ(req("q", q, - "qt", "/elevate", - "elevateIds", "p1s1,p1s2", - "fq", "{!collapse " + opt + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=4]" - , "//result/doc[1]/str[@name='id'][.='p1s1']" - , "//result/doc[2]/str[@name='id'][.='p1s2']" - , "//result/doc[3]/str[@name='id'][.='p2s4']" - , "//result/doc[4]/str[@name='id'][.='p3s1']" - ); + assertQ( + req( + "q", q, + "qt", "/elevate", + "elevateIds", "p1s1,p1s2", + "fq", "{!collapse " + opt + nullPolicy + "}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='p1s1']", + "//result/doc[2]/str[@name='id'][.='p1s2']", + "//result/doc[3]/str[@name='id'][.='p2s4']", + "//result/doc[4]/str[@name='id'][.='p3s1']"); } { // use func query to assert expected scores - assertQ(req("q", "{!func}sum(42, num_i)", - "fq", "{!collapse " + opt + nullPolicy + "}", - "fl","score,id", - "sort", "score desc, num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - , "//result/doc[2][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]" - , "//result/doc[3][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - ); + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "fq", "{!collapse " + opt + nullPolicy + "}", + "fl", "score,id", + "sort", "score desc, num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]", + "//result/doc[2][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]", + "//result/doc[3][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]"); // same query, but boosting a diff child to change group head (and result order) - assertQ(req("q", "{!func}sum(42, num_i)", - "qt", "/elevate", - "elevateIds", "p1s1", - "fq", "{!collapse " + opt + nullPolicy + "}", - "fl","score,id", - "sort", "score desc, num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1][str[@name='id'][.='p1s1'] and float[@name='score'][.=84.0]]" - , "//result/doc[2][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - , "//result/doc[3][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - ); + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "qt", "/elevate", + "elevateIds", "p1s1", + "fq", "{!collapse " + opt + nullPolicy + "}", + "fl", "score,id", + "sort", "score desc, num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1][str[@name='id'][.='p1s1'] and float[@name='score'][.=84.0]]", + "//result/doc[2][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]", + "//result/doc[3][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]"); // same query, but boosting multiple skus from p1 - assertQ(req("q", "{!func}sum(42, num_i)", - "qt", "/elevate", - "elevateIds", "p1s2,p1s1", - "fq", "{!collapse " + opt + nullPolicy + "}", - "fl","score,id", - "sort", "score desc, num_i asc") - , "*[count(//doc)=4]" - , "//result/doc[1][str[@name='id'][.='p1s2'] and float[@name='score'][.=52.0]]" - , "//result/doc[2][str[@name='id'][.='p1s1'] and float[@name='score'][.=84.0]]" - , "//result/doc[3][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - , "//result/doc[4][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - ); + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "qt", "/elevate", + "elevateIds", "p1s2,p1s1", + "fq", "{!collapse " + opt + nullPolicy + "}", + "fl", "score,id", + "sort", "score desc, num_i asc"), + "*[count(//doc)=4]", + "//result/doc[1][str[@name='id'][.='p1s2'] and float[@name='score'][.=52.0]]", + "//result/doc[2][str[@name='id'][.='p1s1'] and float[@name='score'][.=84.0]]", + "//result/doc[3][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]", + "//result/doc[4][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]"); } } - - } // score - + } // score // sort and min/max based group head selection for (String suffix : SELECTOR_FIELD_SUFFIXES) { - // these permutations should all give the same results, since the queries don't match any docs in 'null' groups + // these permutations should all give the same results, since the queries don't match any + // docs in 'null' groups // (because we don't have any in our index)... - for (String nullPolicy : Arrays.asList("", // ignore is default - " nullPolicy=ignore", - " nullPolicy=expand")) { - + for (String nullPolicy : + Arrays.asList( + "", // ignore is default + " nullPolicy=ignore", + " nullPolicy=expand")) { + // queries that are relevancy based... - for (String selector : Arrays.asList(" sort='asc" + suffix + " asc'", - " sort='sum(asc" + suffix + ",42) asc'", - " max=desc" + suffix, - " min=asc" + suffix, - " min='sum(asc" + suffix + ", 42)'")) { - + for (String selector : + Arrays.asList( + " sort='asc" + suffix + " asc'", + " sort='sum(asc" + suffix + ",42) asc'", + " max=desc" + suffix, + " min=asc" + suffix, + " min='sum(asc" + suffix + ", 42)'")) { + if (selector.endsWith("_l") && opt.endsWith("_i")) { // NOTE: this limitation doesn't apply to block collapse on int, // so we only check 'opt.endsWith' (if ends with block hint we're ok) - assertQEx("expected known limitation of using long for min/max selector when doing numeric collapse", - "min/max must be Int or Float", - req("q", "*:*", - "fq", "{!collapse " + opt + nullPolicy + selector + "}"), - SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "expected known limitation of using long for min/max selector when doing numeric collapse", + "min/max must be Int or Float", + req("q", "*:*", "fq", "{!collapse " + opt + nullPolicy + selector + "}"), + SolrException.ErrorCode.BAD_REQUEST); continue; } - assertQ(req("q","txt_t:XX", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1]/str[@name='id'][.='p2s4']" - , "//result/doc[2]/str[@name='id'][.='p3s4']" - , "//result/doc[3]/str[@name='id'][.='p1s4']" - ); - assertQ(req("q","txt_t:* txt_t:XX", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1]/str[@name='id'][.='p3s4']" - , "//result/doc[2]/str[@name='id'][.='p1s4']" - , "//result/doc[3]/str[@name='id'][.='p2s2']" - ); + assertQ( + req( + "q", "txt_t:XX", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='p2s4']", + "//result/doc[2]/str[@name='id'][.='p3s4']", + "//result/doc[3]/str[@name='id'][.='p1s4']"); + assertQ( + req( + "q", "txt_t:* txt_t:XX", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='p3s4']", + "//result/doc[2]/str[@name='id'][.='p1s4']", + "//result/doc[3]/str[@name='id'][.='p2s2']"); // same query, but boosting skus to change group head (and result order) - assertQ(req("q","txt_t:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p2s3,p1s1", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1]/str[@name='id'][.='p2s3']" - , "//result/doc[2]/str[@name='id'][.='p1s1']" - , "//result/doc[3]/str[@name='id'][.='p3s4']" - ); + assertQ( + req( + "q", "txt_t:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p2s3,p1s1", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='p2s3']", + "//result/doc[2]/str[@name='id'][.='p1s1']", + "//result/doc[3]/str[@name='id'][.='p3s4']"); // same query, but boosting multiple skus from p1 - assertQ(req("q","txt_t:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p2s3,p1s4,p1s3", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=4]" - , "//result/doc[1]/str[@name='id'][.='p2s3']" - , "//result/doc[2]/str[@name='id'][.='p1s4']" - , "//result/doc[3]/str[@name='id'][.='p1s3']" - , "//result/doc[4]/str[@name='id'][.='p3s4']" - ); - - + assertQ( + req( + "q", "txt_t:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p2s3,p1s4,p1s3", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='p2s3']", + "//result/doc[2]/str[@name='id'][.='p1s4']", + "//result/doc[3]/str[@name='id'][.='p1s3']", + "//result/doc[4]/str[@name='id'][.='p3s4']"); } - + // query use {!func} so we can assert expected scores - for (String selector : Arrays.asList(" sort='asc" + suffix + " desc'", - " sort='sum(asc" + suffix + ",42) desc'", - " min=desc" + suffix, - " max=asc" + suffix, - " min='sum(desc" + suffix + ", 42)'", - " max='sum(asc" + suffix + ", 42)'")) { - + for (String selector : + Arrays.asList( + " sort='asc" + suffix + " desc'", + " sort='sum(asc" + suffix + ",42) desc'", + " min=desc" + suffix, + " max=asc" + suffix, + " min='sum(desc" + suffix + ", 42)'", + " max='sum(asc" + suffix + ", 42)'")) { + if (selector.endsWith("_l") && opt.endsWith("_i")) { // NOTE: this limitation doesn't apply to block collapse on int, // so we only check 'opt.endsWith' (if ends with block hint we're ok) - assertQEx("expected known limitation of using long for min/max selector when doing numeric collapse", - "min/max must be Int or Float", - req("q", "*:*", - "fq", "{!collapse " + opt + nullPolicy + selector + "}"), - SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "expected known limitation of using long for min/max selector when doing numeric collapse", + "min/max must be Int or Float", + req("q", "*:*", "fq", "{!collapse " + opt + nullPolicy + selector + "}"), + SolrException.ErrorCode.BAD_REQUEST); continue; } - - assertQ(req("q", "{!func}sum(42, num_i)", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "fl","score,id", - "sort", "num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - , "//result/doc[2][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]" - , "//result/doc[3][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - ); + + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "fl", "score,id", + "sort", "num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]", + "//result/doc[2][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]", + "//result/doc[3][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]"); // same query, but boosting multiple skus from p1 - assertQ(req("q", "{!func}sum(42, num_i)", - "qt", "/elevate", - "elevateIds", "p1s2,p1s1", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "fl","score,id", - "sort", "num_i asc") - , "*[count(//doc)=4]" - , "//result/doc[1][str[@name='id'][.='p1s2'] and float[@name='score'][.=52.0]]" - , "//result/doc[2][str[@name='id'][.='p1s1'] and float[@name='score'][.=84.0]]" - , "//result/doc[3][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - , "//result/doc[4][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - ); - - + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "qt", "/elevate", + "elevateIds", "p1s2,p1s1", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "fl", "score,id", + "sort", "num_i asc"), + "*[count(//doc)=4]", + "//result/doc[1][str[@name='id'][.='p1s2'] and float[@name='score'][.=52.0]]", + "//result/doc[2][str[@name='id'][.='p1s1'] and float[@name='score'][.=84.0]]", + "//result/doc[3][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]", + "//result/doc[4][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]"); } - - // queries are relevancy based, and score is used in collapse local param sort -- but not in top fl/sort - // (ie: help prove we setup 'needScores' correctly for collapse, even though top level query doesn't care) - for (String selector : Arrays.asList("", // implicit score ranking as sanity check - " sort='score desc'", - // unused tie breaker after score - " sort='score desc, sum(num_i,42) desc'", - // force score to be a tie breaker - " sort='sum(1.5,2.5) asc, score desc'")) { - assertQ(req("q", "*:* txt_t:XX", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "fl", "id", - "sort", "num_i asc") - , "*[count(//doc)=3]" - , "//result/doc[1][str[@name='id'][.='p2s4']]" // 13 - , "//result/doc[2][str[@name='id'][.='p3s1']]" // 15 - , "//result/doc[3][str[@name='id'][.='p1s3']]" // 777 - ); + + // queries are relevancy based, and score is used in collapse local param sort -- but not + // in top fl/sort (ie: help prove we setup 'needScores' correctly for collapse, even + // though top level query doesn't care) + for (String selector : + Arrays.asList( + "", // implicit score ranking as sanity check + " sort='score desc'", + // unused tie breaker after score + " sort='score desc, sum(num_i,42) desc'", + // force score to be a tie breaker + " sort='sum(1.5,2.5) asc, score desc'")) { + assertQ( + req( + "q", "*:* txt_t:XX", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "fl", "id", + "sort", "num_i asc"), + "*[count(//doc)=3]", + "//result/doc[1][str[@name='id'][.='p2s4']]" // 13 + , + "//result/doc[2][str[@name='id'][.='p3s1']]" // 15 + , + "//result/doc[3][str[@name='id'][.='p1s3']]" // 777 + ); // same query, but boosting multiple skus from p3 - // NOTE: this causes each boosted doc to be returned, but top level sort is not score, so QEC doesn't hijak order - assertQ(req("q", "*:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p3s3,p3s2", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "fl", "id", - "sort", "num_i asc") - , "*[count(//doc)=4]" - , "//result/doc[1][str[@name='id'][.='p2s4']]" // 13 - , "//result/doc[2][str[@name='id'][.='p3s2']]" // 100 (boosted so treated as own group) - , "//result/doc[3][str[@name='id'][.='p1s3']]" // 777 - , "//result/doc[4][str[@name='id'][.='p3s3']]" // 1234 (boosted so treated as own group) - ); + // NOTE: this causes each boosted doc to be returned, but top level sort is not score, + // so QEC doesn't hijak order + assertQ( + req( + "q", "*:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p3s3,p3s2", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "fl", "id", + "sort", "num_i asc"), + "*[count(//doc)=4]", + "//result/doc[1][str[@name='id'][.='p2s4']]" // 13 + , + "//result/doc[2][str[@name='id'][.='p3s2']]" // 100 (boosted so treated as own + // group) + , + "//result/doc[3][str[@name='id'][.='p1s3']]" // 777 + , + "//result/doc[4][str[@name='id'][.='p3s3']]" // 1234 (boosted so treated as own + // group) + ); // same query, w/forceElevation to change top level order - assertQ(req("q", "*:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p3s3,p3s2", - "forceElevation", "true", - "fq", "{!collapse " + opt + selector + nullPolicy + "}", - "fl", "id", - "sort", "num_i asc") - , "*[count(//doc)=4]" - , "//result/doc[1][str[@name='id'][.='p3s3']]" // 1234 (boosted so treated as own group) - , "//result/doc[2][str[@name='id'][.='p3s2']]" // 100 (boosted so treated as own group) - , "//result/doc[3][str[@name='id'][.='p2s4']]" // 13 - , "//result/doc[4][str[@name='id'][.='p1s3']]" // 777 - ); - - + assertQ( + req( + "q", "*:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p3s3,p3s2", + "forceElevation", "true", + "fq", "{!collapse " + opt + selector + nullPolicy + "}", + "fl", "id", + "sort", "num_i asc"), + "*[count(//doc)=4]", + "//result/doc[1][str[@name='id'][.='p3s3']]" // 1234 (boosted so treated as own + // group) + , + "//result/doc[2][str[@name='id'][.='p3s2']]" // 100 (boosted so treated as own + // group) + , + "//result/doc[3][str[@name='id'][.='p2s4']]" // 13 + , + "//result/doc[4][str[@name='id'][.='p1s3']]" // 777 + ); } } } @@ -496,336 +613,378 @@ public void testSimple() throws Exception { } public void testNullPolicyExpand() throws Exception { - - { // convert our docs + some docs w/o collapse fields, along with some commits, to update commands - // in a shuffled order and process all of them... - final List updates = Stream.concat(Stream.of(commit(), commit()), - Stream.concat(makeBlockDocs().stream(), - sdocs(dupFields(sdoc("id","z1", "num_i", 1)), - dupFields(sdoc("id","z2", "num_i", 2)), - dupFields(sdoc("id","z3", "num_i", 3)), - dupFields(sdoc("id","z100", "num_i", 100))).stream() - ).map(doc -> adoc(doc))).collect(Collectors.toList()); + + { // convert our docs + some docs w/o collapse fields, along with some commits, to update + // commands in a shuffled order and process all of them... + final List updates = + Stream.concat( + Stream.of(commit(), commit()), + Stream.concat( + makeBlockDocs().stream(), + sdocs( + dupFields(sdoc("id", "z1", "num_i", 1)), + dupFields(sdoc("id", "z2", "num_i", 2)), + dupFields(sdoc("id", "z3", "num_i", 3)), + dupFields(sdoc("id", "z100", "num_i", 100))) + .stream()) + .map(doc -> adoc(doc))) + .collect(Collectors.toList()); Collections.shuffle(updates, random()); for (String u : updates) { assertU(u); } assertU(commit()); } - - // NOTE: we don't try to collapse on '_root_' in this test, because then we'll get different results - // compared to our other collapse fields (because every doc has a _root_ field) - for (String opt : Arrays.asList(// no block collapse logic used (sanity checks) - "field=block_s1", - "field=block_i", - // block collapse used explicitly (ord) - "field=block_s1 hint=block", - // block collapse used explicitly (int) - "field=block_i hint=block" - )) { - + + // NOTE: we don't try to collapse on '_root_' in this test, because then we'll get different + // results compared to our other collapse fields (because every doc has a _root_ field) + for (String opt : + Arrays.asList( // no block collapse logic used (sanity checks) + "field=block_s1", + "field=block_i", + // block collapse used explicitly (ord) + "field=block_s1 hint=block", + // block collapse used explicitly (int) + "field=block_i hint=block")) { + { // score based group head selection (default) - assertQ(req("q", "*:* txt_t:XX", - "fq", "{!collapse " + opt + " nullPolicy=expand}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=7]" - , "//result/doc[1]/str[@name='id'][.='p2s4']" - , "//result/doc[2]/str[@name='id'][.='p3s1']" - , "//result/doc[3]/str[@name='id'][.='p1s3']" - , "//result/doc[4]/str[@name='id'][.='z1']" - , "//result/doc[5]/str[@name='id'][.='z2']" - , "//result/doc[6]/str[@name='id'][.='z3']" - , "//result/doc[7]/str[@name='id'][.='z100']" - ); + assertQ( + req( + "q", "*:* txt_t:XX", + "fq", "{!collapse " + opt + " nullPolicy=expand}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=7]", + "//result/doc[1]/str[@name='id'][.='p2s4']", + "//result/doc[2]/str[@name='id'][.='p3s1']", + "//result/doc[3]/str[@name='id'][.='p1s3']", + "//result/doc[4]/str[@name='id'][.='z1']", + "//result/doc[5]/str[@name='id'][.='z2']", + "//result/doc[6]/str[@name='id'][.='z3']", + "//result/doc[7]/str[@name='id'][.='z100']"); // same query, but boosting docs to change group heads (and result order) - assertQ(req("q", "*:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "z2,p3s3", - "fq", "{!collapse " + opt + " nullPolicy=expand}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=7]" - , "//result/doc[1]/str[@name='id'][.='z2']" - , "//result/doc[2]/str[@name='id'][.='p3s3']" - , "//result/doc[3]/str[@name='id'][.='p2s4']" - , "//result/doc[4]/str[@name='id'][.='p1s3']" - , "//result/doc[5]/str[@name='id'][.='z1']" - , "//result/doc[6]/str[@name='id'][.='z3']" - , "//result/doc[7]/str[@name='id'][.='z100']" - ); + assertQ( + req( + "q", "*:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "z2,p3s3", + "fq", "{!collapse " + opt + " nullPolicy=expand}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=7]", + "//result/doc[1]/str[@name='id'][.='z2']", + "//result/doc[2]/str[@name='id'][.='p3s3']", + "//result/doc[3]/str[@name='id'][.='p2s4']", + "//result/doc[4]/str[@name='id'][.='p1s3']", + "//result/doc[5]/str[@name='id'][.='z1']", + "//result/doc[6]/str[@name='id'][.='z3']", + "//result/doc[7]/str[@name='id'][.='z100']"); // use func query to assert expected scores - assertQ(req("q", "{!func}sum(42, num_i)", - "fq", "{!collapse " + opt + " nullPolicy=expand}", - "fl","score,id", - "sort", "score desc, num_i asc") - , "*[count(//doc)=7]" - , "//result/doc[1][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - , "//result/doc[2][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]" - , "//result/doc[3][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]" - , "//result/doc[4][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - , "//result/doc[5][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]" - , "//result/doc[6][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]" - , "//result/doc[7][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]" - ); + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "fq", "{!collapse " + opt + " nullPolicy=expand}", + "fl", "score,id", + "sort", "score desc, num_i asc"), + "*[count(//doc)=7]", + "//result/doc[1][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]", + "//result/doc[2][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]", + "//result/doc[3][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]", + "//result/doc[4][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]", + "//result/doc[5][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]", + "//result/doc[6][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]", + "//result/doc[7][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]"); // same query, but boosting docs to change group heads (and result order) - assertQ(req("q", "{!func}sum(42, num_i)", - "qt", "/elevate", - "elevateIds", "p2s4,z2,p2s1", - "fq", "{!collapse " + opt + " nullPolicy=expand}", - "fl","score,id", - "sort", "score desc, num_i asc") - , "*[count(//doc)=8]" - , "//result/doc[1][str[@name='id'][.='p2s4'] and float[@name='score'][.=55.0]]" - , "//result/doc[2][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]" - , "//result/doc[3][str[@name='id'][.='p2s1'] and float[@name='score'][.=97.0]]" - , "//result/doc[4][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - , "//result/doc[5][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]" - , "//result/doc[6][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]" - , "//result/doc[7][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]" - , "//result/doc[8][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]" - ); - - } // score - + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "qt", "/elevate", + "elevateIds", "p2s4,z2,p2s1", + "fq", "{!collapse " + opt + " nullPolicy=expand}", + "fl", "score,id", + "sort", "score desc, num_i asc"), + "*[count(//doc)=8]", + "//result/doc[1][str[@name='id'][.='p2s4'] and float[@name='score'][.=55.0]]", + "//result/doc[2][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]", + "//result/doc[3][str[@name='id'][.='p2s1'] and float[@name='score'][.=97.0]]", + "//result/doc[4][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]", + "//result/doc[5][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]", + "//result/doc[6][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]", + "//result/doc[7][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]", + "//result/doc[8][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]"); + } // score + // sort and min/max based group head selection for (String suffix : SELECTOR_FIELD_SUFFIXES) { - + // queries that are relevancy based... - for (String selector : Arrays.asList(" sort='asc" + suffix + " asc'", - " sort='sum(asc" + suffix + ",42) asc'", - " min=asc" + suffix, - " max=desc" + suffix, - " min='sum(asc" + suffix + ", 42)'", - " max='sum(desc" + suffix + ", 42)'")) { - + for (String selector : + Arrays.asList( + " sort='asc" + suffix + " asc'", + " sort='sum(asc" + suffix + ",42) asc'", + " min=asc" + suffix, + " max=desc" + suffix, + " min='sum(asc" + suffix + ", 42)'", + " max='sum(desc" + suffix + ", 42)'")) { + if (selector.endsWith("_l") && opt.endsWith("_i")) { // NOTE: this limitation doesn't apply to block collapse on int, // so we only check 'opt.endsWith' (if ends with block hint we're ok) - assertQEx("expected known limitation of using long for min/max selector when doing numeric collapse", - "min/max must be Int or Float", - req("q", "*:*", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}"), - SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "expected known limitation of using long for min/max selector when doing numeric collapse", + "min/max must be Int or Float", + req("q", "*:*", "fq", "{!collapse " + opt + selector + " nullPolicy=expand}"), + SolrException.ErrorCode.BAD_REQUEST); continue; } - - assertQ(req("q","num_i:* txt_t:* txt_t:XX", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "sort", "score desc, num_i asc") - , "*[count(//doc)=7]" - , "//result/doc[1]/str[@name='id'][.='p3s4']" - , "//result/doc[2]/str[@name='id'][.='p1s4']" - , "//result/doc[3]/str[@name='id'][.='p2s2']" - , "//result/doc[4]/str[@name='id'][.='z1']" - , "//result/doc[5]/str[@name='id'][.='z2']" - , "//result/doc[6]/str[@name='id'][.='z3']" - , "//result/doc[7]/str[@name='id'][.='z100']" - ); - assertQ(req("q","num_i:* txt_t:XX", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "sort", "num_i asc") - , "*[count(//doc)=7]" - , "//result/doc[1]/str[@name='id'][.='z1']" - , "//result/doc[2]/str[@name='id'][.='z2']" - , "//result/doc[3]/str[@name='id'][.='z3']" - , "//result/doc[4]/str[@name='id'][.='p3s4']" - , "//result/doc[5]/str[@name='id'][.='p1s4']" - , "//result/doc[6]/str[@name='id'][.='p2s2']" - , "//result/doc[7]/str[@name='id'][.='z100']" - ); + + assertQ( + req( + "q", "num_i:* txt_t:* txt_t:XX", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "sort", "score desc, num_i asc"), + "*[count(//doc)=7]", + "//result/doc[1]/str[@name='id'][.='p3s4']", + "//result/doc[2]/str[@name='id'][.='p1s4']", + "//result/doc[3]/str[@name='id'][.='p2s2']", + "//result/doc[4]/str[@name='id'][.='z1']", + "//result/doc[5]/str[@name='id'][.='z2']", + "//result/doc[6]/str[@name='id'][.='z3']", + "//result/doc[7]/str[@name='id'][.='z100']"); + assertQ( + req( + "q", "num_i:* txt_t:XX", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "sort", "num_i asc"), + "*[count(//doc)=7]", + "//result/doc[1]/str[@name='id'][.='z1']", + "//result/doc[2]/str[@name='id'][.='z2']", + "//result/doc[3]/str[@name='id'][.='z3']", + "//result/doc[4]/str[@name='id'][.='p3s4']", + "//result/doc[5]/str[@name='id'][.='p1s4']", + "//result/doc[6]/str[@name='id'][.='p2s2']", + "//result/doc[7]/str[@name='id'][.='z100']"); // same query, but boosting multiple docs - // NOTE: this causes each boosted doc to be returned, but top level sort is not score, so QEC doesn't hijak order - assertQ(req("q","num_i:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p3s3,z3,p3s1", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "sort", "num_i asc") - , "*[count(//doc)=8]" - , "//result/doc[1]/str[@name='id'][.='z1']" - , "//result/doc[2]/str[@name='id'][.='z2']" - , "//result/doc[3]/str[@name='id'][.='z3']" - , "//result/doc[4]/str[@name='id'][.='p1s4']" - , "//result/doc[5]/str[@name='id'][.='p2s2']" - , "//result/doc[6]/str[@name='id'][.='p3s1']" - , "//result/doc[7]/str[@name='id'][.='z100']" - , "//result/doc[8]/str[@name='id'][.='p3s3']" - ); + // NOTE: this causes each boosted doc to be returned, but top level sort is not score, so + // QEC doesn't hijak order + assertQ( + req( + "q", "num_i:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p3s3,z3,p3s1", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "sort", "num_i asc"), + "*[count(//doc)=8]", + "//result/doc[1]/str[@name='id'][.='z1']", + "//result/doc[2]/str[@name='id'][.='z2']", + "//result/doc[3]/str[@name='id'][.='z3']", + "//result/doc[4]/str[@name='id'][.='p1s4']", + "//result/doc[5]/str[@name='id'][.='p2s2']", + "//result/doc[6]/str[@name='id'][.='p3s1']", + "//result/doc[7]/str[@name='id'][.='z100']", + "//result/doc[8]/str[@name='id'][.='p3s3']"); // same query, w/forceElevation to change top level order - assertQ(req("q","num_i:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p3s3,z3,p3s1", - "forceElevation", "true", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "sort", "num_i asc") - , "*[count(//doc)=8]" - , "//result/doc[1]/str[@name='id'][.='p3s3']" - , "//result/doc[2]/str[@name='id'][.='z3']" - , "//result/doc[3]/str[@name='id'][.='p3s1']" - , "//result/doc[4]/str[@name='id'][.='z1']" - , "//result/doc[5]/str[@name='id'][.='z2']" - , "//result/doc[6]/str[@name='id'][.='p1s4']" - , "//result/doc[7]/str[@name='id'][.='p2s2']" - , "//result/doc[8]/str[@name='id'][.='z100']" - ); - + assertQ( + req( + "q", "num_i:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p3s3,z3,p3s1", + "forceElevation", "true", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "sort", "num_i asc"), + "*[count(//doc)=8]", + "//result/doc[1]/str[@name='id'][.='p3s3']", + "//result/doc[2]/str[@name='id'][.='z3']", + "//result/doc[3]/str[@name='id'][.='p3s1']", + "//result/doc[4]/str[@name='id'][.='z1']", + "//result/doc[5]/str[@name='id'][.='z2']", + "//result/doc[6]/str[@name='id'][.='p1s4']", + "//result/doc[7]/str[@name='id'][.='p2s2']", + "//result/doc[8]/str[@name='id'][.='z100']"); } // query uses {!func} so we can assert expected scores - for (String selector : Arrays.asList(" sort='asc" + suffix + " desc'", - " sort='sum(asc" + suffix + ",42) desc'", - " min=desc" + suffix, - " max=asc" + suffix, - " min='sum(desc" + suffix + ", 42)'", - " max='sum(asc" + suffix + ", 42)'")) { + for (String selector : + Arrays.asList( + " sort='asc" + suffix + " desc'", + " sort='sum(asc" + suffix + ",42) desc'", + " min=desc" + suffix, + " max=asc" + suffix, + " min='sum(desc" + suffix + ", 42)'", + " max='sum(asc" + suffix + ", 42)'")) { if (selector.endsWith("_l") && opt.endsWith("_i")) { // NOTE: this limitation doesn't apply to block collapse on int, // so we only check 'opt.endsWith' (if ends with block hint we're ok) - assertQEx("expected known limitation of using long for min/max selector when doing numeric collapse", - "min/max must be Int or Float", - req("q", "*:*", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}"), - SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "expected known limitation of using long for min/max selector when doing numeric collapse", + "min/max must be Int or Float", + req("q", "*:*", "fq", "{!collapse " + opt + selector + " nullPolicy=expand}"), + SolrException.ErrorCode.BAD_REQUEST); continue; } - - assertQ(req("q", "{!func}sum(42, num_i)", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "fl","score,id", - "sort", "num_i asc") - , "*[count(//doc)=7]" - , "//result/doc[1][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]" - , "//result/doc[2][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]" - , "//result/doc[3][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]" - , "//result/doc[4][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - , "//result/doc[5][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]" - , "//result/doc[6][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]" - , "//result/doc[7][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]" - ); + + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "fl", "score,id", + "sort", "num_i asc"), + "*[count(//doc)=7]", + "//result/doc[1][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]", + "//result/doc[2][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]", + "//result/doc[3][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]", + "//result/doc[4][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]", + "//result/doc[5][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]", + "//result/doc[6][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]", + "//result/doc[7][str[@name='id'][.='p3s3'] and float[@name='score'][.=1276.0]]"); // same query, but boosting multiple docs - // NOTE: this causes each boosted doc to be returned, but top level sort is not score, so QEC doesn't hijak order - assertQ(req("q", "{!func}sum(42, num_i)", - "qt", "/elevate", - "elevateIds", "p3s1,z3,p3s4", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "fl","score,id", - "sort", "num_i asc") - , "*[count(//doc)=8]" - , "//result/doc[1][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]" - , "//result/doc[2][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]" - , "//result/doc[3][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]" - , "//result/doc[4][str[@name='id'][.='p3s4'] and float[@name='score'][.=46.0]]" - , "//result/doc[5][str[@name='id'][.='p3s1'] and float[@name='score'][.=57.0]]" - , "//result/doc[6][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - , "//result/doc[7][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]" - , "//result/doc[8][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]" - ); + // NOTE: this causes each boosted doc to be returned, but top level sort is not score, so + // QEC doesn't hijak order + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "qt", "/elevate", + "elevateIds", "p3s1,z3,p3s4", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "fl", "score,id", + "sort", "num_i asc"), + "*[count(//doc)=8]", + "//result/doc[1][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]", + "//result/doc[2][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]", + "//result/doc[3][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]", + "//result/doc[4][str[@name='id'][.='p3s4'] and float[@name='score'][.=46.0]]", + "//result/doc[5][str[@name='id'][.='p3s1'] and float[@name='score'][.=57.0]]", + "//result/doc[6][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]", + "//result/doc[7][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]", + "//result/doc[8][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]"); // same query, w/forceElevation to change top level order - assertQ(req("q", "{!func}sum(42, num_i)", - "qt", "/elevate", - "elevateIds", "p3s1,z3,p3s4", - "forceElevation", "true", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "fl","score,id", - "sort", "num_i asc") - , "*[count(//doc)=8]" - , "//result/doc[1][str[@name='id'][.='p3s1'] and float[@name='score'][.=57.0]]" - , "//result/doc[2][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]" - , "//result/doc[3][str[@name='id'][.='p3s4'] and float[@name='score'][.=46.0]]" - , "//result/doc[4][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]" - , "//result/doc[5][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]" - , "//result/doc[6][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]" - , "//result/doc[7][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]" - , "//result/doc[8][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]" - ); - + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "qt", "/elevate", + "elevateIds", "p3s1,z3,p3s4", + "forceElevation", "true", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "fl", "score,id", + "sort", "num_i asc"), + "*[count(//doc)=8]", + "//result/doc[1][str[@name='id'][.='p3s1'] and float[@name='score'][.=57.0]]", + "//result/doc[2][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]", + "//result/doc[3][str[@name='id'][.='p3s4'] and float[@name='score'][.=46.0]]", + "//result/doc[4][str[@name='id'][.='z1'] and float[@name='score'][.=43.0]]", + "//result/doc[5][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]", + "//result/doc[6][str[@name='id'][.='p2s3'] and float[@name='score'][.=141.0]]", + "//result/doc[7][str[@name='id'][.='z100'] and float[@name='score'][.=142.0]]", + "//result/doc[8][str[@name='id'][.='p1s3'] and float[@name='score'][.=819.0]]"); } - - // queries are relevancy based, and score is used in collapse local param sort -- but not in top fl/sort - // (ie: help prove we setup 'needScores' correctly for collapse, even though top level query doesn't care) - for (String selector : Arrays.asList("", // implicit score ranking as sanity check - " sort='score desc'", - // unused tie breaker after score - " sort='score desc, sum(num_i,42) desc'", - // force score to be a tie breaker - " sort='sum(1.5,2.5) asc, score desc'")) { - - assertQ(req("q", "*:* txt_t:XX", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "fl", "id", - "sort", "num_i asc") - , "*[count(//doc)=7]" - , "//result/doc[1][str[@name='id'][.='z1']]" - , "//result/doc[2][str[@name='id'][.='z2']]" - , "//result/doc[3][str[@name='id'][.='z3']]" - , "//result/doc[4][str[@name='id'][.='p2s4']]" // 13 - , "//result/doc[5][str[@name='id'][.='p3s1']]" // 15 - , "//result/doc[6][str[@name='id'][.='z100']]" - , "//result/doc[7][str[@name='id'][.='p1s3']]" // 777 - ); - // same query, but boosting multiple docs - // NOTE: this causes each boosted doc to be returned, but top level sort is not score, so QEC doesn't hijak order - assertQ(req("q", "*:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p3s3,z3,p3s4", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "fl", "id", - "sort", "num_i asc") - , "*[count(//doc)=8]" - , "//result/doc[1][str[@name='id'][.='z1']]" - , "//result/doc[2][str[@name='id'][.='z2']]" - , "//result/doc[3][str[@name='id'][.='z3']]" - , "//result/doc[4][str[@name='id'][.='p3s4']]" // 4 - , "//result/doc[5][str[@name='id'][.='p2s4']]" // 13 - , "//result/doc[6][str[@name='id'][.='z100']]" - , "//result/doc[7][str[@name='id'][.='p1s3']]" // 777 - , "//result/doc[8][str[@name='id'][.='p3s3']]" // 1234 - ); - // same query, w/forceElevation to change top level order - assertQ(req("q", "*:* txt_t:XX", - "qt", "/elevate", - "elevateIds", "p3s3,z3,p3s4", - "forceElevation", "true", - "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", - "fl", "id", - "sort", "num_i asc") - , "*[count(//doc)=8]" - , "//result/doc[1][str[@name='id'][.='p3s3']]" // 1234 - , "//result/doc[2][str[@name='id'][.='z3']]" - , "//result/doc[3][str[@name='id'][.='p3s4']]" // 4 - , "//result/doc[4][str[@name='id'][.='z1']]" - , "//result/doc[5][str[@name='id'][.='z2']]" - , "//result/doc[6][str[@name='id'][.='p2s4']]" // 13 - , "//result/doc[7][str[@name='id'][.='z100']]" - , "//result/doc[8][str[@name='id'][.='p1s3']]" // 777 - ); + // queries are relevancy based, and score is used in collapse local param sort -- but not in + // top fl/sort (ie: help prove we setup 'needScores' correctly for collapse, even though top + // level query doesn't care) + for (String selector : + Arrays.asList( + "", // implicit score ranking as sanity check + " sort='score desc'", + // unused tie breaker after score + " sort='score desc, sum(num_i,42) desc'", + // force score to be a tie breaker + " sort='sum(1.5,2.5) asc, score desc'")) { + assertQ( + req( + "q", "*:* txt_t:XX", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "fl", "id", + "sort", "num_i asc"), + "*[count(//doc)=7]", + "//result/doc[1][str[@name='id'][.='z1']]", + "//result/doc[2][str[@name='id'][.='z2']]", + "//result/doc[3][str[@name='id'][.='z3']]", + "//result/doc[4][str[@name='id'][.='p2s4']]" // 13 + , + "//result/doc[5][str[@name='id'][.='p3s1']]" // 15 + , + "//result/doc[6][str[@name='id'][.='z100']]", + "//result/doc[7][str[@name='id'][.='p1s3']]" // 777 + ); + // same query, but boosting multiple docs + // NOTE: this causes each boosted doc to be returned, but top level sort is not score, so + // QEC doesn't hijak order + assertQ( + req( + "q", "*:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p3s3,z3,p3s4", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "fl", "id", + "sort", "num_i asc"), + "*[count(//doc)=8]", + "//result/doc[1][str[@name='id'][.='z1']]", + "//result/doc[2][str[@name='id'][.='z2']]", + "//result/doc[3][str[@name='id'][.='z3']]", + "//result/doc[4][str[@name='id'][.='p3s4']]" // 4 + , + "//result/doc[5][str[@name='id'][.='p2s4']]" // 13 + , + "//result/doc[6][str[@name='id'][.='z100']]", + "//result/doc[7][str[@name='id'][.='p1s3']]" // 777 + , + "//result/doc[8][str[@name='id'][.='p3s3']]" // 1234 + ); + // same query, w/forceElevation to change top level order + assertQ( + req( + "q", "*:* txt_t:XX", + "qt", "/elevate", + "elevateIds", "p3s3,z3,p3s4", + "forceElevation", "true", + "fq", "{!collapse " + opt + selector + " nullPolicy=expand}", + "fl", "id", + "sort", "num_i asc"), + "*[count(//doc)=8]", + "//result/doc[1][str[@name='id'][.='p3s3']]" // 1234 + , + "//result/doc[2][str[@name='id'][.='z3']]", + "//result/doc[3][str[@name='id'][.='p3s4']]" // 4 + , + "//result/doc[4][str[@name='id'][.='z1']]", + "//result/doc[5][str[@name='id'][.='z2']]", + "//result/doc[6][str[@name='id'][.='p2s4']]" // 13 + , + "//result/doc[7][str[@name='id'][.='z100']]", + "//result/doc[8][str[@name='id'][.='p1s3']]" // 777 + ); } - } // sort } } /** - * There is no reason why ExpandComponent should care if/when block collapse is used, - * this test just serves as a "future proofing" against the possibility that someone adds new expectations + * There is no reason why ExpandComponent should care if/when block collapse is used, this test + * just serves as a "future proofing" against the possibility that someone adds new expectations * to ExpandComponent of some side effect state that CollapseQParser should produce. * - * We don't bother testing _root_ field collapsing in this test, since it contains different field values - * then our other collapse fields. - * (and the other tests should adequeately prove that the block hueristics for _root_ collapsing work) + *

We don't bother testing _root_ field collapsing in this test, since it contains different + * field values then our other collapse fields. (and the other tests should adequeately prove that + * the block hueristics for _root_ collapsing work) */ public void testBlockCollapseWithExpandComponent() throws Exception { - { // convert our docs + some docs w/o collapse fields, along with some commits, to update commands + { // convert our docs + some docs w/o collapse fields, along with some commits, to update + // commands // in a shuffled order and process all of them... - final List updates = Stream.concat(Stream.of(commit(), commit()), - Stream.concat(makeBlockDocs().stream(), - sdocs(dupFields(sdoc("id","z1", "num_i", 1)), - dupFields(sdoc("id","z2", "num_i", 2)), - dupFields(sdoc("id","z3", "num_i", 3))).stream() - ).map(doc -> adoc(doc))).collect(Collectors.toList()); + final List updates = + Stream.concat( + Stream.of(commit(), commit()), + Stream.concat( + makeBlockDocs().stream(), + sdocs( + dupFields(sdoc("id", "z1", "num_i", 1)), + dupFields(sdoc("id", "z2", "num_i", 2)), + dupFields(sdoc("id", "z3", "num_i", 3))) + .stream()) + .map(doc -> adoc(doc))) + .collect(Collectors.toList()); Collections.shuffle(updates, random()); for (String u : updates) { assertU(u); @@ -834,124 +993,171 @@ public void testBlockCollapseWithExpandComponent() throws Exception { } final String EX = "/response/lst[@name='expanded']/result"; - // we don't bother testing _root_ field collapsing, since it contains different field values then block_s1 - for (String opt : Arrays.asList(// no block collapse logic used (sanity checks) - "field=block_s1", - "field=block_i", - - // block collapse used explicitly (int) - "field=block_i hint=block", - - // block collapse used explicitly (ord) - "field=block_s1 hint=block" - )) { - - // these permutations should all give the same results, since the queries don't match any docs in 'null' groups - for (String nullPolicy : Arrays.asList("", // ignore is default - " nullPolicy=ignore", - " nullPolicy=expand")) { - + // we don't bother testing _root_ field collapsing, since it contains different field values + // then block_s1 + for (String opt : + Arrays.asList( // no block collapse logic used (sanity checks) + "field=block_s1", + "field=block_i", + + // block collapse used explicitly (int) + "field=block_i hint=block", + + // block collapse used explicitly (ord) + "field=block_s1 hint=block")) { + + // these permutations should all give the same results, since the queries don't match any docs + // in 'null' groups + for (String nullPolicy : + Arrays.asList( + "", // ignore is default + " nullPolicy=ignore", + " nullPolicy=expand")) { + // score based collapse with boost to change p1 group head - assertQ(req("q", "txt_t:XX", // only child docs with XX match - "expand", "true", - "qt", "/elevate", - "elevateIds", "p1s1", - "fl", "id", - "fq", "{!collapse " + opt + nullPolicy + "}", - "sort", "score desc, num_i asc") - , "*[count(/response/result/doc)=3]" - , "/response/result/doc[1]/str[@name='id'][.='p1s1']" - , "/response/result/doc[2]/str[@name='id'][.='p2s4']" - , "/response/result/doc[3]/str[@name='id'][.='p3s1']" - // - ,"*[count("+EX+")=count(/response/result/doc)]" // group per doc - // - ,"*[count("+EX+"[@name='-1']/doc)=3]" - ,EX+"[@name='-1']/doc[1]/str[@name='id'][.='p1s3']" - ,EX+"[@name='-1']/doc[2]/str[@name='id'][.='p1s4']" - ,EX+"[@name='-1']/doc[3]/str[@name='id'][.='p1s2']" - // - ,"*[count("+EX+"[@name='0']/doc)=2]" - ,EX+"[@name='0']/doc[1]/str[@name='id'][.='p2s3']" - ,EX+"[@name='0']/doc[2]/str[@name='id'][.='p2s1']" - // - ,"*[count("+EX+"[@name='1']/doc)=2]" - ,EX+"[@name='1']/doc[1]/str[@name='id'][.='p3s4']" - ,EX+"[@name='1']/doc[2]/str[@name='id'][.='p3s3']" - ); + assertQ( + req( + "q", "txt_t:XX", // only child docs with XX match + "expand", "true", + "qt", "/elevate", + "elevateIds", "p1s1", + "fl", "id", + "fq", "{!collapse " + opt + nullPolicy + "}", + "sort", "score desc, num_i asc"), + "*[count(/response/result/doc)=3]", + "/response/result/doc[1]/str[@name='id'][.='p1s1']", + "/response/result/doc[2]/str[@name='id'][.='p2s4']", + "/response/result/doc[3]/str[@name='id'][.='p3s1']" + // + , + "*[count(" + EX + ")=count(/response/result/doc)]" // group per doc + // + , + "*[count(" + EX + "[@name='-1']/doc)=3]", + EX + "[@name='-1']/doc[1]/str[@name='id'][.='p1s3']", + EX + "[@name='-1']/doc[2]/str[@name='id'][.='p1s4']", + EX + "[@name='-1']/doc[3]/str[@name='id'][.='p1s2']" + // + , + "*[count(" + EX + "[@name='0']/doc)=2]", + EX + "[@name='0']/doc[1]/str[@name='id'][.='p2s3']", + EX + "[@name='0']/doc[2]/str[@name='id'][.='p2s1']" + // + , + "*[count(" + EX + "[@name='1']/doc)=2]", + EX + "[@name='1']/doc[1]/str[@name='id'][.='p3s4']", + EX + "[@name='1']/doc[2]/str[@name='id'][.='p3s3']"); } // nullPolicy=expand w/ func query to assert expected scores for (String suffix : SELECTOR_FIELD_SUFFIXES) { - for (String selector : Arrays.asList(" sort='asc" + suffix + " desc'", - " sort='sum(asc" + suffix + ",42) desc'", - " min=desc" + suffix, - " max=asc" + suffix, - " min='sum(desc" + suffix + ", 42)'", - " max='sum(asc" + suffix + ", 42)'")) { - assertQ(req("q", "{!func}sum(42, num_i)", - "expand", "true", - "fq", "{!collapse " + opt + " nullPolicy=expand}", - "fq", "num_i:[2 TO 13]", // NOTE: FQ!!!! - "fl","score,id", - "sort", "score desc, num_i asc") - , "*[count(/response/result/doc)=5]" - , "/response/result/doc[1][str[@name='id'][.='p2s4'] and float[@name='score'][.=55.0]]" - , "/response/result/doc[2][str[@name='id'][.='p1s2'] and float[@name='score'][.=52.0]]" - , "/response/result/doc[3][str[@name='id'][.='p3s4'] and float[@name='score'][.=46.0]]" - , "/response/result/doc[4][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]" - , "/response/result/doc[5][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]" - // - ,"*[count("+EX+")=2]" // groups w/o any other docs don't expand - // - ,"*[count("+EX+"[@name='-1']/doc)=1]" - ,EX+"[@name='-1']/doc[1][str[@name='id'][.='p1s4'] and float[@name='score'][.=48.0]]" - // - ,"*[count("+EX+"[@name='0']/doc)=1]" - ,EX+"[@name='0']/doc[1][str[@name='id'][.='p2s2'] and float[@name='score'][.=52.0]]" - // - // no "expand" docs for group '1' because no other docs match query - // no "expand" docs for nulls unless/until SOLR-14330 is implemented - ); + for (String selector : + Arrays.asList( + " sort='asc" + suffix + " desc'", + " sort='sum(asc" + suffix + ",42) desc'", + " min=desc" + suffix, + " max=asc" + suffix, + " min='sum(desc" + suffix + ", 42)'", + " max='sum(asc" + suffix + ", 42)'")) { + assertQ( + req( + "q", "{!func}sum(42, num_i)", + "expand", "true", + "fq", "{!collapse " + opt + " nullPolicy=expand}", + "fq", "num_i:[2 TO 13]", // NOTE: FQ!!!! + "fl", "score,id", + "sort", "score desc, num_i asc"), + "*[count(/response/result/doc)=5]", + "/response/result/doc[1][str[@name='id'][.='p2s4'] and float[@name='score'][.=55.0]]", + "/response/result/doc[2][str[@name='id'][.='p1s2'] and float[@name='score'][.=52.0]]", + "/response/result/doc[3][str[@name='id'][.='p3s4'] and float[@name='score'][.=46.0]]", + "/response/result/doc[4][str[@name='id'][.='z3'] and float[@name='score'][.=45.0]]", + "/response/result/doc[5][str[@name='id'][.='z2'] and float[@name='score'][.=44.0]]" + // + , + "*[count(" + EX + ")=2]" // groups w/o any other docs don't expand + // + , + "*[count(" + EX + "[@name='-1']/doc)=1]", + EX + "[@name='-1']/doc[1][str[@name='id'][.='p1s4'] and float[@name='score'][.=48.0]]" + // + , + "*[count(" + EX + "[@name='0']/doc)=1]", + EX + "[@name='0']/doc[1][str[@name='id'][.='p2s2'] and float[@name='score'][.=52.0]]" + // + // no "expand" docs for group '1' because no other docs match query + // no "expand" docs for nulls unless/until SOLR-14330 is implemented + ); } } } } - /** - * returns a (new) list of the block based documents used in our test methods - */ + /** returns a (new) list of the block based documents used in our test methods */ protected static final List makeBlockDocs() { // NOTE: block_i and block_s1 will contain identical content so these need to be "numbers"... - // The specific numbers shouldn't matter (and we explicitly test '0' to confirm legacy bug/behavior + // The specific numbers shouldn't matter (and we explicitly test '0' to confirm legacy + // bug/behavior // of treating 0 as null is no longer a problem) ... final String A = "-1"; - final String B = "0"; + final String B = "0"; final String C = "1"; - return sdocs(dupFields(sdoc("id", "p1", - "block_i", A, - "skus", sdocs(dupFields(sdoc("id", "p1s1", "block_i", A, "txt_t", "a b c d e ", "num_i", 42)), - dupFields(sdoc("id", "p1s2", "block_i", A, "txt_t", "a XX c d e ", "num_i", 10)), - dupFields(sdoc("id", "p1s3", "block_i", A, "txt_t", "XX b XX XX e ", "num_i", 777)), - dupFields(sdoc("id", "p1s4", "block_i", A, "txt_t", "a XX c d XX", "num_i", 6)) - ))), - dupFields(sdoc("id", "p2", - "block_i", B, - "skus", sdocs(dupFields(sdoc("id", "p2s1", "block_i", B, "txt_t", "a XX c d e ", "num_i", 55)), - dupFields(sdoc("id", "p2s2", "block_i", B, "txt_t", "a b c d e ", "num_i", 10)), - dupFields(sdoc("id", "p2s3", "block_i", B, "txt_t", "XX b c XX e ", "num_i", 99)), - dupFields(sdoc("id", "p2s4", "block_i", B, "txt_t", "a XX XX d XX", "num_i", 13)) - ))), - dupFields(sdoc("id", "p3", - "block_i", C, - "skus", sdocs(dupFields(sdoc("id", "p3s1", "block_i", C, "txt_t", "a XX XX XX e ", "num_i", 15)), - dupFields(sdoc("id", "p3s2", "block_i", C, "txt_t", "a b c d e ", "num_i", 100)), - dupFields(sdoc("id", "p3s3", "block_i", C, "txt_t", "XX b c d e ", "num_i", 1234)), - dupFields(sdoc("id", "p3s4", "block_i", C, "txt_t", "a b XX d XX", "num_i", 4)) - )))); + return sdocs( + dupFields( + sdoc( + "id", + "p1", + "block_i", + A, + "skus", + sdocs( + dupFields( + sdoc("id", "p1s1", "block_i", A, "txt_t", "a b c d e ", "num_i", 42)), + dupFields( + sdoc("id", "p1s2", "block_i", A, "txt_t", "a XX c d e ", "num_i", 10)), + dupFields( + sdoc("id", "p1s3", "block_i", A, "txt_t", "XX b XX XX e ", "num_i", 777)), + dupFields( + sdoc("id", "p1s4", "block_i", A, "txt_t", "a XX c d XX", "num_i", 6))))), + dupFields( + sdoc( + "id", + "p2", + "block_i", + B, + "skus", + sdocs( + dupFields( + sdoc("id", "p2s1", "block_i", B, "txt_t", "a XX c d e ", "num_i", 55)), + dupFields( + sdoc("id", "p2s2", "block_i", B, "txt_t", "a b c d e ", "num_i", 10)), + dupFields( + sdoc("id", "p2s3", "block_i", B, "txt_t", "XX b c XX e ", "num_i", 99)), + dupFields( + sdoc( + "id", "p2s4", "block_i", B, "txt_t", "a XX XX d XX", "num_i", 13))))), + dupFields( + sdoc( + "id", + "p3", + "block_i", + C, + "skus", + sdocs( + dupFields( + sdoc("id", "p3s1", "block_i", C, "txt_t", "a XX XX XX e ", "num_i", 15)), + dupFields( + sdoc("id", "p3s2", "block_i", C, "txt_t", "a b c d e ", "num_i", 100)), + dupFields( + sdoc("id", "p3s3", "block_i", C, "txt_t", "XX b c d e ", "num_i", 1234)), + dupFields( + sdoc( + "id", "p3s4", "block_i", C, "txt_t", "a b XX d XX", "num_i", 4)))))); } - protected final static List SELECTOR_FIELD_SUFFIXES = Arrays.asList("_i", "_l", "_f"); + + protected static final List SELECTOR_FIELD_SUFFIXES = Arrays.asList("_i", "_l", "_f"); + protected static SolrInputDocument dupFields(final SolrInputDocument doc) { if (doc.getFieldNames().contains("block_i")) { doc.setField("block_s1", doc.getFieldValue("block_i")); @@ -959,7 +1165,7 @@ protected static SolrInputDocument dupFields(final SolrInputDocument doc) { // as num_i value increases, the asc_* fields increase // as num_i value increases, the desc_* fields decrease if (doc.getFieldNames().contains("num_i")) { - final int val = ((Integer)doc.getFieldValue("num_i")).intValue(); + final int val = ((Integer) doc.getFieldValue("num_i")).intValue(); for (String suffix : SELECTOR_FIELD_SUFFIXES) { doc.setField("asc" + suffix, val); doc.setField("desc" + suffix, 0 - val); diff --git a/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java index 08c19e19023..8a30e9198cc 100644 --- a/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java +++ b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java @@ -16,6 +16,9 @@ */ package org.apache.solr.search; +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.RemovalCause; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; @@ -26,8 +29,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; - -import com.github.benmanes.caffeine.cache.RemovalCause; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCase; @@ -35,12 +36,7 @@ import org.apache.solr.metrics.SolrMetricsContext; import org.junit.Test; -import com.github.benmanes.caffeine.cache.Cache; -import com.github.benmanes.caffeine.cache.Caffeine; - -/** - * Test for {@link CaffeineCache}. - */ +/** Test for {@link CaffeineCache}. */ public class TestCaffeineCache extends SolrTestCase { SolrMetricManager metricManager = new SolrMetricManager(); @@ -75,7 +71,7 @@ public void testSimple() throws IOException { assertEquals(2L, nl.get("hits")); assertEquals(101L, nl.get("inserts")); - assertEquals(null, lfuCache.get(1)); // first item put in should be the first out + assertEquals(null, lfuCache.get(1)); // first item put in should be the first out // Test autowarming newLFUCache.init(params, initObj, regenerator); @@ -99,10 +95,8 @@ public void testSimple() throws IOException { @Test public void testTimeDecay() { - Cache cacheDecay = Caffeine.newBuilder() - .executor(Runnable::run) - .maximumSize(20) - .build(); + Cache cacheDecay = + Caffeine.newBuilder().executor(Runnable::run).maximumSize(20).build(); for (int i = 1; i < 21; i++) { cacheDecay.put(i, Integer.toString(i)); } @@ -120,8 +114,9 @@ public void testTimeDecay() { cacheDecay.put(24, "24"); cacheDecay.put(25, "25"); itemsDecay = cacheDecay.policy().eviction().get().hottest(10); - // 13 - 17 should be in cache, but 11 and 18 (among others) should not. Testing that elements before and - // after the ones with increased counts are removed, and all the increased count ones are still in the cache + // 13 - 17 should be in cache, but 11 and 18 (among others) should not. Testing that elements + // before and after the ones with increased counts are removed, and all the increased count ones + // are still in the cache assertNull(itemsDecay.get(11)); assertNull(itemsDecay.get(18)); assertNotNull(itemsDecay.get(13)); @@ -130,12 +125,11 @@ public void testTimeDecay() { assertNotNull(itemsDecay.get(16)); assertNotNull(itemsDecay.get(17)); - // Testing that all the elements in front of the ones with increased counts are gone for (int idx = 26; idx < 32; ++idx) { cacheDecay.put(idx, Integer.toString(idx)); } - //Surplus count should be at 0 + // Surplus count should be at 0 itemsDecay = cacheDecay.policy().eviction().get().hottest(10); assertNull(itemsDecay.get(20)); assertNull(itemsDecay.get(24)); @@ -151,14 +145,15 @@ public void testMaxIdleTime() throws Exception { int IDLE_TIME_SEC = 5; CountDownLatch removed = new CountDownLatch(1); AtomicReference removalCause = new AtomicReference<>(); - CaffeineCache cache = new CaffeineCache<>() { - @Override - public void onRemoval(String key, String value, RemovalCause cause) { - super.onRemoval(key, value, cause); - removalCause.set(cause); - removed.countDown(); - } - }; + CaffeineCache cache = + new CaffeineCache<>() { + @Override + public void onRemoval(String key, String value, RemovalCause cause) { + super.onRemoval(key, value, cause); + removalCause.set(cause); + removed.countDown(); + } + }; Map params = new HashMap<>(); params.put("size", "6"); params.put("maxIdleTime", "" + IDLE_TIME_SEC); @@ -182,49 +177,56 @@ public void testSetLimits() throws Exception { List removalCauses = new ArrayList<>(); List removedKeys = new ArrayList<>(); Set allKeys = new HashSet<>(); - CaffeineCache cache = new CaffeineCache<>() { - @Override - public Accountable put(String key, Accountable val) { - allKeys.add(key); - return super.put(key, val); - } + CaffeineCache cache = + new CaffeineCache<>() { + @Override + public Accountable put(String key, Accountable val) { + allKeys.add(key); + return super.put(key, val); + } - @Override - public void onRemoval(String key, Accountable value, RemovalCause cause) { - super.onRemoval(key, value, cause); - removalCauses.add(cause); - removedKeys.add(key); - removed.get().countDown(); - } - }; + @Override + public void onRemoval(String key, Accountable value, RemovalCause cause) { + super.onRemoval(key, value, cause); + removalCauses.add(cause); + removedKeys.add(key); + removed.get().countDown(); + } + }; Map params = new HashMap<>(); params.put("size", "5"); cache.init(params, null, new NoOpRegenerator()); for (int i = 0; i < 5; i++) { - cache.put("foo-" + i, new Accountable() { - @Override - public long ramBytesUsed() { - return 1024 * 1024; - } - }); + cache.put( + "foo-" + i, + new Accountable() { + @Override + public long ramBytesUsed() { + return 1024 * 1024; + } + }); } assertEquals(5, cache.size()); // no evictions yet assertEquals(2, removed.get().getCount()); - cache.put("abc1", new Accountable() { - @Override - public long ramBytesUsed() { - return 1; - } - }); - cache.put("abc2", new Accountable() { - @Override - public long ramBytesUsed() { - return 2; - } - }); + cache.put( + "abc1", + new Accountable() { + @Override + public long ramBytesUsed() { + return 1; + } + }); + cache.put( + "abc2", + new Accountable() { + @Override + public long ramBytesUsed() { + return 2; + } + }); boolean await = removed.get().await(30, TimeUnit.SECONDS); assertTrue("did not evict entries in in time", await); assertEquals(5, cache.size()); @@ -242,12 +244,14 @@ public long ramBytesUsed() { removedKeys.clear(); // trim down by item count cache.setMaxSize(3); - cache.put("abc3", new Accountable() { - @Override - public long ramBytesUsed() { - return 3; - } - }); + cache.put( + "abc3", + new Accountable() { + @Override + public long ramBytesUsed() { + return 3; + } + }); await = removed.get().await(30, TimeUnit.SECONDS); assertTrue("did not evict entries in in time", await); assertEquals(3, cache.size()); diff --git a/solr/core/src/test/org/apache/solr/search/TestCancellableCollector.java b/solr/core/src/test/org/apache/solr/search/TestCancellableCollector.java index a860d43d615..d31ca82a07c 100644 --- a/solr/core/src/test/org/apache/solr/search/TestCancellableCollector.java +++ b/solr/core/src/test/org/apache/solr/search/TestCancellableCollector.java @@ -104,7 +104,7 @@ private CancellableCollector buildCancellableCollector( } private void executeSearchTest( - IndexSearcher searcher, Query query, CancellableCollector cancellableCollector, int numHits) + IndexSearcher searcher, Query query, CancellableCollector cancellableCollector, int numHits) throws Exception { TopDocs topDocs = searcher.search(query, numHits); diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java index 3d2e89d6744..289b74a7877 100644 --- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java @@ -16,6 +16,8 @@ */ package org.apache.solr.search; +import static org.hamcrest.core.StringContains.containsString; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -23,12 +25,11 @@ import java.util.Iterator; import java.util.List; import java.util.Set; -import java.util.stream.Stream; import java.util.stream.Collectors; - +import java.util.stream.Stream; import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrException; +import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.SolrParams; import org.apache.solr.search.CollapsingQParserPlugin.GroupHeadSelector; @@ -37,13 +38,12 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.StringContains.containsString; - public class TestCollapseQParserPlugin extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); initCore("solrconfig-collapseqparser.xml", "schema11.xml"); } @@ -66,124 +66,129 @@ public void testMultiSort() throws Exception { // assertU(adoc("id", "5", "group_s", "group2", "test_i", "5", "test_l", "10", "term_s", "YYYY")); assertU(commit()); - assertU(adoc("id", "6", "group_s", "group2", "test_i", "5", "test_l","1000")); - assertU(adoc("id", "7", "group_s", "group2", "test_i", "5", "test_l","1000", "term_s", "XXXX")); - assertU(adoc("id", "8", "group_s", "group2", "test_i", "10","test_l", "100")); + assertU(adoc("id", "6", "group_s", "group2", "test_i", "5", "test_l", "1000")); + assertU( + adoc("id", "7", "group_s", "group2", "test_i", "5", "test_l", "1000", "term_s", "XXXX")); + assertU(adoc("id", "8", "group_s", "group2", "test_i", "10", "test_l", "100")); assertU(commit()); - + ModifiableSolrParams params; - + // group heads are selected using the same sort that is then applied to the final groups params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_s sort=$sort}"); params.add("sort", "test_i asc, test_l desc, id_i desc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='7']" - ,"//result/doc[2]/str[@name='id'][.='3']" - ); - + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[2]/str[@name='id'][.='3']"); + // group heads are selected using a complex sort, simpler sort used for final groups params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_s sort='test_i asc, test_l desc, id_i desc'}"); params.add("sort", "id_i asc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='3']" - ,"//result/doc[2]/str[@name='id'][.='7']" - ); + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='3']", + "//result/doc[2]/str[@name='id'][.='7']"); // diff up the sort directions, only first clause matters with our data params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_s sort='test_i desc, test_l asc, id_i asc'}"); params.add("sort", "id_i desc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='8']" - ,"//result/doc[2]/str[@name='id'][.='4']" - ); - + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='8']", + "//result/doc[2]/str[@name='id'][.='4']"); + // tie broken by index order params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_s sort='test_l desc'}"); params.add("sort", "id_i desc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='6']" - ,"//result/doc[2]/str[@name='id'][.='2']" - ); + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='6']", + "//result/doc[2]/str[@name='id'][.='2']"); // score, then tiebreakers; note top level sort by score ASCENDING (just for weirdness) params = new ModifiableSolrParams(); params.add("q", "*:* term_s:YYYY"); - params.add("fq", "{!collapse field=group_s sort='score desc, test_l desc, test_i asc, id_i asc'}"); + params.add( + "fq", "{!collapse field=group_s sort='score desc, test_l desc, test_i asc, id_i asc'}"); params.add("sort", "score asc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='2']" - ,"//result/doc[2]/str[@name='id'][.='5']" - ); + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='2']", + "//result/doc[2]/str[@name='id'][.='5']"); // score, then tiebreakers; note no score in top level sort/fl to check needsScores logic params = new ModifiableSolrParams(); params.add("q", "*:* term_s:YYYY"); - params.add("fq", "{!collapse field=group_s sort='score desc, test_l desc, test_i asc, id_i asc'}"); + params.add( + "fq", "{!collapse field=group_s sort='score desc, test_l desc, test_i asc, id_i asc'}"); params.add("sort", "id_i desc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='5']" - ,"//result/doc[2]/str[@name='id'][.='2']" - ); - + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='5']", + "//result/doc[2]/str[@name='id'][.='2']"); + // term_s desc -- term_s is missing from many docs, and uses sortMissingLast=true params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_s sort='term_s desc, test_l asc'}"); params.add("sort", "id_i asc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='1']" - ,"//result/doc[2]/str[@name='id'][.='5']" - ); + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='5']"); // term_s asc -- term_s is missing from many docs, and uses sortMissingLast=true params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_s sort='term_s asc, test_l asc'}"); params.add("sort", "id_i asc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='1']" - ,"//result/doc[2]/str[@name='id'][.='7']" - ); + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='7']"); // collapse on int field params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=test_i sort='term_s asc, group_s asc'}"); params.add("sort", "id_i asc"); - assertQ(req(params) - , "*[count(//doc)=2]" - ,"//result/doc[1]/str[@name='id'][.='4']" - ,"//result/doc[2]/str[@name='id'][.='7']" - ); - + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='4']", + "//result/doc[2]/str[@name='id'][.='7']"); + // collapse on term_s (very sparse) with nullPolicy=collapse params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!collapse field=term_s nullPolicy=collapse sort='test_i asc, test_l desc, id_i asc'}"); + params.add( + "fq", + "{!collapse field=term_s nullPolicy=collapse sort='test_i asc, test_l desc, id_i asc'}"); params.add("sort", "test_l asc, id_i asc"); - assertQ(req(params) - , "*[count(//doc)=3]" - ,"//result/doc[1]/str[@name='id'][.='5']" - ,"//result/doc[2]/str[@name='id'][.='2']" - ,"//result/doc[3]/str[@name='id'][.='7']" - ); - + assertQ( + req(params), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='5']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='7']"); + // sort local param + elevation params = new ModifiableSolrParams(); params.add("q", "*:*"); @@ -192,10 +197,11 @@ public void testMultiSort() throws Exception { params.add("qt", "/elevate"); params.add("forceElevation", "true"); params.add("elevateIds", "4"); - assertQ(req(params), - "*[count(//doc)=2]", - "//result/doc[1]/str[@name='id'][.='4']", - "//result/doc[2]/str[@name='id'][.='5']"); + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='4']", + "//result/doc[2]/str[@name='id'][.='5']"); // params = new ModifiableSolrParams(); params.add("q", "*:*"); @@ -204,16 +210,16 @@ public void testMultiSort() throws Exception { params.add("qt", "/elevate"); params.add("forceElevation", "true"); params.add("elevateIds", "7"); - assertQ(req(params), - "*[count(//doc)=2]", - "//result/doc[1]/str[@name='id'][.='7']", - "//result/doc[2]/str[@name='id'][.='1']"); - + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='7']", + "//result/doc[2]/str[@name='id'][.='1']"); } @Test public void testStringCollapse() throws Exception { - for (final String hint : new String[] {"", " hint="+CollapsingQParserPlugin.HINT_TOP_FC}) { + for (final String hint : new String[] {"", " hint=" + CollapsingQParserPlugin.HINT_TOP_FC}) { testCollapseQueries("group_s", hint, false); testCollapseQueries("group_s_dv", hint, false); } @@ -230,56 +236,63 @@ public void testNumericCollapse() throws Exception { @Test public void testFieldValueCollapseWithNegativeMinMax() throws Exception { - String[] doc = {"id","1", "group_i", "-1000", "test_i", "5", "test_l", "-10", "test_f", "2000.32"}; + String[] doc = { + "id", "1", "group_i", "-1000", "test_i", "5", "test_l", "-10", "test_f", "2000.32" + }; assertU(adoc(doc)); assertU(commit()); - String[] doc1 = {"id","2", "group_i", "-1000", "test_i", "50", "test_l", "-100", "test_f", "2000.33"}; + String[] doc1 = { + "id", "2", "group_i", "-1000", "test_i", "50", "test_l", "-100", "test_f", "2000.33" + }; assertU(adoc(doc1)); - String[] doc2 = {"id","3", "group_i", "-1000", "test_l", "100", "test_f", "200"}; + String[] doc2 = {"id", "3", "group_i", "-1000", "test_l", "100", "test_f", "200"}; assertU(adoc(doc2)); assertU(commit()); - String[] doc3 = {"id","4", "test_i", "500", "test_l", "1000", "test_f", "2000"}; + String[] doc3 = {"id", "4", "test_i", "500", "test_l", "1000", "test_f", "2000"}; assertU(adoc(doc3)); - String[] doc4 = {"id","5", "group_i", "-1000", "test_i", "4", "test_l", "10", "test_f", "2000.31"}; + String[] doc4 = { + "id", "5", "group_i", "-1000", "test_i", "4", "test_l", "10", "test_f", "2000.31" + }; assertU(adoc(doc4)); assertU(commit()); - String[] doc5 = {"id","6", "group_i", "-1000", "test_i", "10", "test_l", "100", "test_f", "-2000.12"}; + String[] doc5 = { + "id", "6", "group_i", "-1000", "test_i", "10", "test_l", "100", "test_f", "-2000.12" + }; assertU(adoc(doc5)); assertU(commit()); - String[] doc6 = {"id","7", "group_i", "-1000", "test_i", "8", "test_l", "-50", "test_f", "-100.2"}; + String[] doc6 = { + "id", "7", "group_i", "-1000", "test_i", "8", "test_l", "-50", "test_f", "-100.2" + }; assertU(adoc(doc6)); assertU(commit()); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_i min=test_f}"); - assertQ(req(params), "*[count(//doc)=1]", - "//result/doc[1]/str[@name='id'][.='6']"); + assertQ(req(params), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='6']"); params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse field=group_i max=test_f}"); - assertQ(req(params), "*[count(//doc)=1]", - "//result/doc[1]/str[@name='id'][.='2']"); - + assertQ(req(params), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='2']"); } @Test // https://issues.apache.org/jira/browse/SOLR-9494 public void testNeedsScoreBugFixed() throws Exception { - String[] doc = {"id","1", "group_s", "xyz", "text_ws", "hello xxx world"}; + String[] doc = {"id", "1", "group_s", "xyz", "text_ws", "hello xxx world"}; assertU(adoc(doc)); assertU(commit()); - ModifiableSolrParams params = params( - "q", "{!surround df=text_ws} 2W(hello, world)", // a SpanQuery that matches - "fq", "{!collapse field=group_s}", // collapse on some field - // note: rows= whatever; doesn't matter - "facet", "true", // facet on something - "facet.field", "group_s" - ); + ModifiableSolrParams params = + params( + "q", "{!surround df=text_ws} 2W(hello, world)", // a SpanQuery that matches + "fq", "{!collapse field=group_s}", // collapse on some field + // note: rows= whatever; doesn't matter + "facet", "true", // facet on something + "facet.field", "group_s"); assertQ(req(params)); assertQ(req(params)); // fails *second* time! } @@ -290,11 +303,11 @@ public void testMergeBoost() throws Exception { Set boosted = new HashSet<>(); Set results = new HashSet<>(); - for(int i=0; i<200; i++) { + for (int i = 0; i < 200; i++) { boosted.add(random().nextInt(1000)); } - for(int i=0; i<200; i++) { + for (int i = 0; i < 200; i++) { results.add(random().nextInt(1000)); } @@ -303,47 +316,56 @@ public void testMergeBoost() throws Exception { Iterator boostIt = boosted.iterator(); int index = 0; - while(boostIt.hasNext()) { + while (boostIt.hasNext()) { boostedArray[index++] = boostIt.next(); } Iterator resultsIt = results.iterator(); index = 0; - while(resultsIt.hasNext()) { + while (resultsIt.hasNext()) { resultsArray[index++] = resultsIt.next(); } Arrays.sort(boostedArray); Arrays.sort(resultsArray); - CollapsingQParserPlugin.MergeBoost mergeBoost = new CollapsingQParserPlugin.MergeBoost(boostedArray); + CollapsingQParserPlugin.MergeBoost mergeBoost = + new CollapsingQParserPlugin.MergeBoost(boostedArray); List boostedResults = new ArrayList<>(); - for(int i=0; i controlResults = new ArrayList<>(); - for(int i=0; i -1) { + if (Arrays.binarySearch(boostedArray, result) > -1) { controlResults.add(result); } } - if(boostedResults.size() == controlResults.size()) { - for(int i=0; i { - h.query(req(params("q", "*:*", "fq", "{!collapse field="+group+hint+"}", "group", "true", "group.field", "id"))); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + params( + "q", + "*:*", + "fq", + "{!collapse field=" + group + hint + "}", + "group", + "true", + "group.field", + "id"))); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertThat(ex.getMessage(), containsString("Can not use collapse with Grouping enabled")); @@ -840,16 +981,17 @@ private void testCollapseQueries(String group, String hint, boolean numeric) thr assertU(commit()); params = new ModifiableSolrParams(); params.add("q", "YYYY"); - params.add("fq", "{!collapse field="+group+hint+" nullPolicy=collapse}"); + params.add("fq", "{!collapse field=" + group + hint + " nullPolicy=collapse}"); params.add("defType", "edismax"); params.add("bf", "field(test_i)"); params.add("qf", "term_s"); params.add("qt", "/elevate"); - assertQ(req(params), "*[count(//doc)=3]", - "//result/doc[1]/str[@name='id'][.='3']", - "//result/doc[2]/str[@name='id'][.='6']", - "//result/doc[3]/str[@name='id'][.='7']"); - + assertQ( + req(params), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='3']", + "//result/doc[2]/str[@name='id'][.='6']", + "//result/doc[3]/str[@name='id'][.='7']"); } @Test @@ -857,7 +999,9 @@ public void testMissingFieldParam() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("fq", "{!collapse}"); - assertQEx("It should respond with a bad request when the 'field' param is missing", req(params), + assertQEx( + "It should respond with a bad request when the 'field' param is missing", + req(params), SolrException.ErrorCode.BAD_REQUEST); } @@ -867,31 +1011,42 @@ public void testEmptyCollection() throws Exception { String group = (random().nextBoolean() ? "group_s" : "group_s_dv"); // min-or-max is for CollapsingScoreCollector vs. CollapsingFieldValueCollector - String optional_min_or_max = (random().nextBoolean() ? "" : (random().nextBoolean() ? "min=field(test_i)" : "max=field(test_i)")); - + String optional_min_or_max = + (random().nextBoolean() + ? "" + : (random().nextBoolean() ? "min=field(test_i)" : "max=field(test_i)")); + ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!collapse field="+group+" "+optional_min_or_max+"}"); + params.add("fq", "{!collapse field=" + group + " " + optional_min_or_max + "}"); assertQ(req(params), "*[count(//doc)=0]"); // if a field is uninvertible=false, it should behave the same as a field that is indexed=false // this is currently ok on fields that don't exist on any docs in the index for (String f : Arrays.asList("not_indexed_sS", "indexed_s_not_uninvert")) { for (String hint : Arrays.asList("", " hint=top_fc")) { - SolrException e = expectThrows(SolrException.class, - () -> h.query(req("q", "*:*", "fq", "{!collapse field="+f + hint +"}"))); + SolrException e = + expectThrows( + SolrException.class, + () -> h.query(req("q", "*:*", "fq", "{!collapse field=" + f + hint + "}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertTrue("unexpected Message: " + e.getMessage(), - e.getMessage().contains("Collapsing field '" + f + "' " + - "should be either docValues enabled or indexed with uninvertible enabled")); + assertTrue( + "unexpected Message: " + e.getMessage(), + e.getMessage() + .contains( + "Collapsing field '" + + f + + "' " + + "should be either docValues enabled or indexed with uninvertible enabled")); } } } public void testNoDocsHaveGroupField() throws Exception { // as unlikely as this test seems, it's important for the possibility that a segment exists w/o - // any live docs that have DocValues for the group field -- ie: every doc in segment is in null group. - + // any live docs that have DocValues for the group field -- ie: every doc in segment is in null + // group. + assertU(adoc("id", "1", "group_s", "group1", "test_i", "5", "test_l", "10")); assertU(commit()); assertU(adoc("id", "2", "group_s", "group1", "test_i", "5", "test_l", "1000")); @@ -900,58 +1055,72 @@ public void testNoDocsHaveGroupField() throws Exception { // assertU(adoc("id", "5", "group_s", "group2", "test_i", "5", "test_l", "10", "term_s", "YYYY")); assertU(commit()); - assertU(adoc("id", "6", "group_s", "group2", "test_i", "5", "test_l","1000")); - assertU(adoc("id", "7", "group_s", "group2", "test_i", "5", "test_l","1000", "term_s", "XXXX")); - assertU(adoc("id", "8", "group_s", "group2", "test_i", "10","test_l", "100")); + assertU(adoc("id", "6", "group_s", "group2", "test_i", "5", "test_l", "1000")); + assertU( + adoc("id", "7", "group_s", "group2", "test_i", "5", "test_l", "1000", "term_s", "XXXX")); + assertU(adoc("id", "8", "group_s", "group2", "test_i", "10", "test_l", "100")); assertU(commit()); - + // none of these grouping fields are in any doc - for (String group : new String[] { - "field=bogus_s", "field=bogus_s_dv", - "field=bogus_s hint=top_fc", // alternative docvalues codepath w/ hint - "field=bogus_s_dv hint=top_fc", // alternative docvalues codepath w/ hint - "field=bogus_i", "field=bogus_tf" }) { - - // for any of these selectors, behavior of these checks should be consistent - for (String selector : new String[] { - "", " sort='score desc' ", - " min=test_i ", " max=test_i ", " sort='test_i asc' ", " sort='test_i desc' ", - " min=test_f ", " max=test_f ", " sort='test_f asc' ", " sort='test_f desc' ", - " sort='group_s asc' ", " sort='group_s desc' ", - // fields that don't exist - " min=bogus_sort_i ", " max=bogus_sort_i ", - " sort='bogus_sort_i asc' ", " sort='bogus_sort_i desc' ", - " sort='bogus_sort_s asc' ", " sort='bogus_sort_s desc' ", + for (String group : + new String[] { + "field=bogus_s", + "field=bogus_s_dv", + "field=bogus_s hint=top_fc", // alternative docvalues codepath w/ hint + "field=bogus_s_dv hint=top_fc", // alternative docvalues codepath w/ hint + "field=bogus_i", + "field=bogus_tf" }) { - - + + // for any of these selectors, behavior of these checks should be consistent + for (String selector : + new String[] { + "", + " sort='score desc' ", + " min=test_i ", + " max=test_i ", + " sort='test_i asc' ", + " sort='test_i desc' ", + " min=test_f ", + " max=test_f ", + " sort='test_f asc' ", + " sort='test_f desc' ", + " sort='group_s asc' ", + " sort='group_s desc' ", + // fields that don't exist + " min=bogus_sort_i ", + " max=bogus_sort_i ", + " sort='bogus_sort_i asc' ", + " sort='bogus_sort_i desc' ", + " sort='bogus_sort_s asc' ", + " sort='bogus_sort_s desc' ", + }) { + ModifiableSolrParams params = null; // w/default nullPolicy, no groups found params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("sort", "id_i desc"); - params.add("fq", "{!collapse "+group+" "+selector+"}"); + params.add("fq", "{!collapse " + group + " " + selector + "}"); assertQ(req(params), "*[count(//doc)=0]"); // w/nullPolicy=expand, every doc found params = new ModifiableSolrParams(); params.add("q", "*:*"); params.add("sort", "id_i desc"); - params.add("fq", "{!collapse field="+group+" nullPolicy=expand "+selector+"}"); - assertQ(req(params) - , "*[count(//doc)=8]" - ,"//result/doc[1]/str[@name='id'][.='8']" - ,"//result/doc[2]/str[@name='id'][.='7']" - ,"//result/doc[3]/str[@name='id'][.='6']" - ,"//result/doc[4]/str[@name='id'][.='5']" - ,"//result/doc[5]/str[@name='id'][.='4']" - ,"//result/doc[6]/str[@name='id'][.='3']" - ,"//result/doc[7]/str[@name='id'][.='2']" - ,"//result/doc[8]/str[@name='id'][.='1']" - ); - - + params.add("fq", "{!collapse field=" + group + " nullPolicy=expand " + selector + "}"); + assertQ( + req(params), + "*[count(//doc)=8]", + "//result/doc[1]/str[@name='id'][.='8']", + "//result/doc[2]/str[@name='id'][.='7']", + "//result/doc[3]/str[@name='id'][.='6']", + "//result/doc[4]/str[@name='id'][.='5']", + "//result/doc[5]/str[@name='id'][.='4']", + "//result/doc[6]/str[@name='id'][.='3']", + "//result/doc[7]/str[@name='id'][.='2']", + "//result/doc[8]/str[@name='id'][.='1']"); } } } @@ -959,10 +1128,11 @@ public void testNoDocsHaveGroupField() throws Exception { public void testGroupHeadSelector() { GroupHeadSelector s; - expectThrows(SolrException.class, "no exception with multi criteria", - () -> GroupHeadSelector.build(params("sort", "foo_s asc", "min", "bar_s")) - ); - + expectThrows( + SolrException.class, + "no exception with multi criteria", + () -> GroupHeadSelector.build(params("sort", "foo_s asc", "min", "bar_s"))); + s = GroupHeadSelector.build(params("min", "foo_s")); assertEquals(GroupHeadSelectorType.MIN, s.type); assertEquals("foo_s", s.selectorText); @@ -981,121 +1151,174 @@ public void testGroupHeadSelector() { s = GroupHeadSelector.build(params("sort", "foo_s asc")); assertEquals(GroupHeadSelectorType.SORT, s.type); assertEquals("foo_s asc", s.selectorText); - assertEquals(GroupHeadSelector.build(params("sort", "foo_s asc")), - s); + assertEquals(GroupHeadSelector.build(params("sort", "foo_s asc")), s); assertFalse(s.equals(GroupHeadSelector.build(params("sort", "BAR_s asc")))); assertFalse(s.equals(GroupHeadSelector.build(params("min", "BAR_s")))); assertFalse(s.equals(GroupHeadSelector.build(params()))); - assertEquals(GroupHeadSelector.build(params("sort", "foo_s asc")).hashCode(), - GroupHeadSelector.build(params("sort", "foo_s asc", - "other", "stuff")).hashCode()); - + assertEquals( + GroupHeadSelector.build(params("sort", "foo_s asc")).hashCode(), + GroupHeadSelector.build( + params( + "sort", "foo_s asc", + "other", "stuff")) + .hashCode()); } @Test public void testForNotSupportedCases() { - String[] doc = {"id","3", "term_s", "YYYY", "test_ii", "5000", "test_l", "100", "test_f", "200", - "not_indexed_sS", "zzz", "indexed_s_not_uninvert", "zzz"}; + String[] doc = { + "id", + "3", + "term_s", + "YYYY", + "test_ii", + "5000", + "test_l", + "100", + "test_f", + "200", + "not_indexed_sS", + "zzz", + "indexed_s_not_uninvert", + "zzz" + }; assertU(adoc(doc)); assertU(commit()); // collapsing on multivalued field - assertQEx("Should Fail with Bad Request", "Collapsing not supported on multivalued fields", - req("q","*:*", "fq","{!collapse field=test_ii}"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should Fail with Bad Request", + "Collapsing not supported on multivalued fields", + req("q", "*:*", "fq", "{!collapse field=test_ii}"), + SolrException.ErrorCode.BAD_REQUEST); // collapsing on unknown field - assertQEx("Should Fail with Bad Request", "org.apache.solr.search.SyntaxError: undefined field: \"bleh\"", - req("q","*:*", "fq","{!collapse field=bleh}"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should Fail with Bad Request", + "org.apache.solr.search.SyntaxError: undefined field: \"bleh\"", + req("q", "*:*", "fq", "{!collapse field=bleh}"), + SolrException.ErrorCode.BAD_REQUEST); - // if a field is uninvertible=false, it should behave the same as a field that is indexed=false ... + // if a field is uninvertible=false, it should behave the same as a field that is indexed=false // this also tests docValues=false along with indexed=false or univertible=false for (String f : Arrays.asList("not_indexed_sS", "indexed_s_not_uninvert")) { { - SolrException e = expectThrows(SolrException.class, - () -> h.query(req(params("q", "*:*", - "fq", "{!collapse field="+f+"}")))); + SolrException e = + expectThrows( + SolrException.class, + () -> h.query(req(params("q", "*:*", "fq", "{!collapse field=" + f + "}")))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertTrue("unexpected Message: " + e.getMessage(), - e.getMessage().contains("Collapsing field '" + f + "' " + - "should be either docValues enabled or indexed with uninvertible enabled")); + assertTrue( + "unexpected Message: " + e.getMessage(), + e.getMessage() + .contains( + "Collapsing field '" + + f + + "' " + + "should be either docValues enabled or indexed with uninvertible enabled")); } { - SolrException e = expectThrows(SolrException.class, - () -> h.query(req("q", "*:*", "fq", "{!collapse field="+f+" hint=top_fc}"))); + SolrException e = + expectThrows( + SolrException.class, + () -> h.query(req("q", "*:*", "fq", "{!collapse field=" + f + " hint=top_fc}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertTrue("unexpected Message: " + e.getMessage(), - e.getMessage().contains("Collapsing field '" + f + "' " + - "should be either docValues enabled or indexed with uninvertible enabled")); + assertTrue( + "unexpected Message: " + e.getMessage(), + e.getMessage() + .contains( + "Collapsing field '" + + f + + "' " + + "should be either docValues enabled or indexed with uninvertible enabled")); } - } } @Test public void test64BitCollapseFieldException() { - assertQEx("Should Fail For collapsing on Long fields", "Collapsing field should be of either String, Int or Float type", - req("q", "*:*", "fq", "{!collapse field=group_l}"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should Fail For collapsing on Long fields", + "Collapsing field should be of either String, Int or Float type", + req("q", "*:*", "fq", "{!collapse field=group_l}"), + SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should Fail For collapsing on Double fields", "Collapsing field should be of either String, Int or Float type", - req("q", "*:*", "fq", "{!collapse field=group_d}"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should Fail For collapsing on Double fields", + "Collapsing field should be of either String, Int or Float type", + req("q", "*:*", "fq", "{!collapse field=group_d}"), + SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should Fail For collapsing on Date fields", "Collapsing field should be of either String, Int or Float type", - req("q", "*:*", "fq", "{!collapse field=group_dt}"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should Fail For collapsing on Date fields", + "Collapsing field should be of either String, Int or Float type", + req("q", "*:*", "fq", "{!collapse field=group_dt}"), + SolrException.ErrorCode.BAD_REQUEST); } - + @Test public void testMinExactCountDisabledByCollapse() throws Exception { int numDocs = 10; String collapseFieldInt = "field_ti_dv"; String collapseFieldFloat = "field_tf_dv"; String collapseFieldString = "field_s_dv"; - for (int i = 0 ; i < numDocs ; i ++) { - assertU(adoc( - "id", String.valueOf(i), - "field_s", String.valueOf(i % 2), - collapseFieldInt, String.valueOf(i), - collapseFieldFloat, String.valueOf(i), - collapseFieldString, String.valueOf(i))); - assertU(commit()); + for (int i = 0; i < numDocs; i++) { + assertU( + adoc( + "id", + String.valueOf(i), + "field_s", + String.valueOf(i % 2), + collapseFieldInt, + String.valueOf(i), + collapseFieldFloat, + String.valueOf(i), + collapseFieldString, + String.valueOf(i))); + assertU(commit()); } - - for (String collapseField : Arrays.asList(collapseFieldInt, collapseFieldFloat, collapseFieldString)) { + + for (String collapseField : + Arrays.asList(collapseFieldInt, collapseFieldFloat, collapseFieldString)) { // all of our docs have a value in the collapse field(s) so the policy shouldn't matter... - for (String policy : Arrays.asList("", " nullPolicy=ignore", " nullPolicy=expand", " nullPolicy=collapse")) { - assertQ(req("q", "{!cache=false}field_s:1", - "rows", "1", - "minExactCount", "1", // collapse should force this to be ignored - // this collapse will end up creating a group for each matched doc - "fq", "{!collapse field=" + collapseField + policy + "}" - ) - , "//*[@numFoundExact='true']" - , "//*[@numFound='" + (numDocs/2) + "']" - ); + for (String policy : + Arrays.asList("", " nullPolicy=ignore", " nullPolicy=expand", " nullPolicy=collapse")) { + assertQ( + req( + "q", "{!cache=false}field_s:1", + "rows", "1", + "minExactCount", "1", // collapse should force this to be ignored + // this collapse will end up creating a group for each matched doc + "fq", "{!collapse field=" + collapseField + policy + "}"), + "//*[@numFoundExact='true']", + "//*[@numFound='" + (numDocs / 2) + "']"); } } } public void testNullGroupNumericVsStringCollapse() throws Exception { // NOTE: group_i and group_s will contain identical content so these need to be "numbers"... - // The specific numbers shouldn't matter (and we explicitly test '0' to confirm legacy bug/behavior - // of treating 0 as null is no longer a problem) ... + // The specific numbers shouldn't matter (and we explicitly test '0' to confirm legacy + // bug/behavior of treating 0 as null is no longer a problem) ... final String A = "-1"; - final String B = "0"; + final String B = "0"; final String C = "1"; - // Stub out our documents. From now on assume highest "id" of each group should be group head... - final List docs = sdocs - (sdoc("id", "0"), // null group - sdoc("id", "1", "group_i", A, "group_s", A), - sdoc("id", "2", "group_i", B, "group_s", B), - sdoc("id", "3", "group_i", B, "group_s", B), // B head - sdoc("id", "4"), // null group - sdoc("id", "5", "group_i", A, "group_s", A), - sdoc("id", "6", "group_i", C, "group_s", C), - sdoc("id", "7"), // null group // null head - sdoc("id", "8", "group_i", A, "group_s", A), // A head - sdoc("id", "9", "group_i", C, "group_s", C)); // C head + // Stub out our documents. From now on assume highest "id" of each group should be group + // head... + final List docs = + sdocs( + sdoc("id", "0"), // null group + sdoc("id", "1", "group_i", A, "group_s", A), + sdoc("id", "2", "group_i", B, "group_s", B), + sdoc("id", "3", "group_i", B, "group_s", B), // B head + sdoc("id", "4"), // null group + sdoc("id", "5", "group_i", A, "group_s", A), + sdoc("id", "6", "group_i", C, "group_s", C), + sdoc("id", "7"), // null group // null head + sdoc("id", "8", "group_i", A, "group_s", A), // A head + sdoc("id", "9", "group_i", C, "group_s", C)); // C head final List SELECTOR_FIELD_SUFFIXES = Arrays.asList("_i", "_l", "_f"); // add all the fields we'll be using as group head selectors... @@ -1103,177 +1326,311 @@ public void testNullGroupNumericVsStringCollapse() throws Exception { int desc = 0; for (SolrInputDocument doc : docs) { for (String type : SELECTOR_FIELD_SUFFIXES) { - doc.setField("asc" + type, asc); + doc.setField("asc" + type, asc); doc.setField("desc" + type, desc); } asc++; desc--; } - // convert our docs to update commands, along with some commits, in a shuffled order and process all of them... - final List updates = Stream.concat(Stream.of(commit(), commit()), - docs.stream().map(doc -> adoc(doc))).collect(Collectors.toList()); + // convert our docs to update commands, along with some commits, in a shuffled order and process + // all of them... + final List updates = + Stream.concat(Stream.of(commit(), commit()), docs.stream().map(doc -> adoc(doc))) + .collect(Collectors.toList()); Collections.shuffle(updates, random()); for (String u : updates) { assertU(u); } assertU(commit()); - // function based query for deterministic scores final String q = "{!func}sum(asc_i,42)"; - - // results should be the same regardless of wether we collapse on a string field or numeric field - // (docs have identicle group identifiers in both fields) - for (String f : Arrays.asList("group_i", - "group_s")) { - + + // results should be the same regardless of wether we collapse on a string field or numeric + // field (docs have identicle group identifiers in both fields) + for (String f : Arrays.asList("group_i", "group_s")) { + // these group head selectors should all result in identical group heads for our query... for (String suffix : SELECTOR_FIELD_SUFFIXES) { - for (String selector : Arrays.asList("", - "max=asc" + suffix, - "min=desc" + suffix, - "sort='asc" + suffix + " desc'", - "sort='desc" +suffix + " asc'", - "max=sum(42,asc" + suffix + ")", - "min=sum(42,desc" + suffix + ")", - "max=sub(0,desc" + suffix + ")", - "min=sub(0,asc" + suffix + ")")) { - + for (String selector : + Arrays.asList( + "", + "max=asc" + suffix, + "min=desc" + suffix, + "sort='asc" + suffix + " desc'", + "sort='desc" + suffix + " asc'", + "max=sum(42,asc" + suffix + ")", + "min=sum(42,desc" + suffix + ")", + "max=sub(0,desc" + suffix + ")", + "min=sub(0,asc" + suffix + ")")) { + if (selector.endsWith("_l") && f.endsWith("_i")) { - assertQEx("expected known limitation of using long for min/max selector when doing numeric collapse", - "min/max must be Int or Float", - req("q", q, - "fq", "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}"), - SolrException.ErrorCode.BAD_REQUEST); - - continue; + assertQEx( + "expected known limitation of using long for min/max selector when doing numeric collapse", + "min/max must be Int or Float", + req("q", q, "fq", "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}"), + SolrException.ErrorCode.BAD_REQUEST); + + continue; } - - + // ignore nulls - assertQ(req(params("q", q, - "fq", "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")) - , "*[count(//doc)=3]" - ,"//result/doc[1]/str[@name='id'][.='9']" // group C - ,"//result/doc[2]/str[@name='id'][.='8']" // group A - ,"//result/doc[3]/str[@name='id'][.='3']" // group B - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "1,5", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")) - , "*[count(//doc)=4]" - ,"//result/doc[1]/str[@name='id'][.='1']" // elevated, prevents group A - ,"//result/doc[2]/str[@name='id'][.='5']" // elevated, (also) prevents group A - ,"//result/doc[3]/str[@name='id'][.='9']" // group C - ,"//result/doc[4]/str[@name='id'][.='3']" // group B - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "0,7", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")) - , "*[count(//doc)=5]" - ,"//result/doc[1]/str[@name='id'][.='0']" // elevated (null) - ,"//result/doc[2]/str[@name='id'][.='7']" // elevated (null) - ,"//result/doc[3]/str[@name='id'][.='9']" // group C - ,"//result/doc[4]/str[@name='id'][.='8']" // group A - ,"//result/doc[5]/str[@name='id'][.='3']" // group B - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "6,0", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")) - , "*[count(//doc)=4]" - ,"//result/doc[1]/str[@name='id'][.='6']" // elevated, prevents group C - ,"//result/doc[2]/str[@name='id'][.='0']" // elevated (null) - ,"//result/doc[3]/str[@name='id'][.='8']" // group A - ,"//result/doc[4]/str[@name='id'][.='3']" // group B - ); - + assertQ( + req( + params( + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='9']" // group C + , + "//result/doc[2]/str[@name='id'][.='8']" // group A + , + "//result/doc[3]/str[@name='id'][.='3']" // group B + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "1,5", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='1']" // elevated, prevents group A + , + "//result/doc[2]/str[@name='id'][.='5']" // elevated, (also) prevents group A + , + "//result/doc[3]/str[@name='id'][.='9']" // group C + , + "//result/doc[4]/str[@name='id'][.='3']" // group B + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "0,7", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")), + "*[count(//doc)=5]", + "//result/doc[1]/str[@name='id'][.='0']" // elevated (null) + , + "//result/doc[2]/str[@name='id'][.='7']" // elevated (null) + , + "//result/doc[3]/str[@name='id'][.='9']" // group C + , + "//result/doc[4]/str[@name='id'][.='8']" // group A + , + "//result/doc[5]/str[@name='id'][.='3']" // group B + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "6,0", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=ignore " + selector + "}")), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='6']" // elevated, prevents group C + , + "//result/doc[2]/str[@name='id'][.='0']" // elevated (null) + , + "//result/doc[3]/str[@name='id'][.='8']" // group A + , + "//result/doc[4]/str[@name='id'][.='3']" // group B + ); + // collapse nulls - assertQ(req(params("q", q, - "fq", "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")) - , "*[count(//doc)=4]" - ,"//result/doc[1]/str[@name='id'][.='9']" // group C - ,"//result/doc[2]/str[@name='id'][.='8']" // group A - ,"//result/doc[3]/str[@name='id'][.='7']" // group null - ,"//result/doc[4]/str[@name='id'][.='3']" // group B - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "1,5", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")) - , "*[count(//doc)=5]" - ,"//result/doc[1]/str[@name='id'][.='1']" // elevated, prevents group A - ,"//result/doc[2]/str[@name='id'][.='5']" // elevated, (also) prevents group A - ,"//result/doc[3]/str[@name='id'][.='9']" // group C - ,"//result/doc[4]/str[@name='id'][.='7']" // group null - ,"//result/doc[5]/str[@name='id'][.='3']" // group B - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "0,7", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")) - , "*[count(//doc)=5]" - ,"//result/doc[1]/str[@name='id'][.='0']" // elevated (null) - ,"//result/doc[2]/str[@name='id'][.='7']" // elevated (null) - ,"//result/doc[3]/str[@name='id'][.='9']" // group C - ,"//result/doc[4]/str[@name='id'][.='8']" // group A - ,"//result/doc[5]/str[@name='id'][.='3']" // group B - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "6,0", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")) - , "*[count(//doc)=4]" - ,"//result/doc[1]/str[@name='id'][.='6']" // elevated, prevents group C - ,"//result/doc[2]/str[@name='id'][.='0']" // elevated (null) - ,"//result/doc[3]/str[@name='id'][.='8']" // group A - ,"//result/doc[4]/str[@name='id'][.='3']" // group B - ); - + assertQ( + req( + params( + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='9']" // group C + , + "//result/doc[2]/str[@name='id'][.='8']" // group A + , + "//result/doc[3]/str[@name='id'][.='7']" // group null + , + "//result/doc[4]/str[@name='id'][.='3']" // group B + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "1,5", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")), + "*[count(//doc)=5]", + "//result/doc[1]/str[@name='id'][.='1']" // elevated, prevents group A + , + "//result/doc[2]/str[@name='id'][.='5']" // elevated, (also) prevents group A + , + "//result/doc[3]/str[@name='id'][.='9']" // group C + , + "//result/doc[4]/str[@name='id'][.='7']" // group null + , + "//result/doc[5]/str[@name='id'][.='3']" // group B + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "0,7", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")), + "*[count(//doc)=5]", + "//result/doc[1]/str[@name='id'][.='0']" // elevated (null) + , + "//result/doc[2]/str[@name='id'][.='7']" // elevated (null) + , + "//result/doc[3]/str[@name='id'][.='9']" // group C + , + "//result/doc[4]/str[@name='id'][.='8']" // group A + , + "//result/doc[5]/str[@name='id'][.='3']" // group B + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "6,0", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=collapse " + selector + "}")), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='6']" // elevated, prevents group C + , + "//result/doc[2]/str[@name='id'][.='0']" // elevated (null) + , + "//result/doc[3]/str[@name='id'][.='8']" // group A + , + "//result/doc[4]/str[@name='id'][.='3']" // group B + ); + // expand nulls - assertQ(req(params("q", q, - "fq", "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")) - , "*[count(//doc)=6]" - ,"//result/doc[1]/str[@name='id'][.='9']" // group C - ,"//result/doc[2]/str[@name='id'][.='8']" // group A - ,"//result/doc[3]/str[@name='id'][.='7']" // null - ,"//result/doc[4]/str[@name='id'][.='4']" // null - ,"//result/doc[5]/str[@name='id'][.='3']" // group B - ,"//result/doc[6]/str[@name='id'][.='0']" // null - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "1,5", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")) - , "*[count(//doc)=7]" - ,"//result/doc[1]/str[@name='id'][.='1']" // elevated, prevents group A - ,"//result/doc[2]/str[@name='id'][.='5']" // elevated, (also) prevents group A - ,"//result/doc[3]/str[@name='id'][.='9']" // group C - ,"//result/doc[4]/str[@name='id'][.='7']" // null - ,"//result/doc[5]/str[@name='id'][.='4']" // null - ,"//result/doc[6]/str[@name='id'][.='3']" // group B - ,"//result/doc[7]/str[@name='id'][.='0']" // null - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "0,7", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")) - , "*[count(//doc)=6]" - ,"//result/doc[1]/str[@name='id'][.='0']" // elevated (null) - ,"//result/doc[2]/str[@name='id'][.='7']" // elevated (null) - ,"//result/doc[3]/str[@name='id'][.='9']" // group C - ,"//result/doc[4]/str[@name='id'][.='8']" // group A - ,"//result/doc[5]/str[@name='id'][.='4']" // null - ,"//result/doc[6]/str[@name='id'][.='3']" // group B - ); - assertQ(req(params("qt", "/elevate", "elevateIds", "6,0", - "q", q, - "fq", "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")) - , "*[count(//doc)=6]" - ,"//result/doc[1]/str[@name='id'][.='6']" // elevated, prevents group C - ,"//result/doc[2]/str[@name='id'][.='0']" // elevated (null) - ,"//result/doc[3]/str[@name='id'][.='8']" // group A - ,"//result/doc[4]/str[@name='id'][.='7']" // null - ,"//result/doc[5]/str[@name='id'][.='4']" // null - ,"//result/doc[6]/str[@name='id'][.='3']" // group B - ); - + assertQ( + req( + params( + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='9']" // group C + , + "//result/doc[2]/str[@name='id'][.='8']" // group A + , + "//result/doc[3]/str[@name='id'][.='7']" // null + , + "//result/doc[4]/str[@name='id'][.='4']" // null + , + "//result/doc[5]/str[@name='id'][.='3']" // group B + , + "//result/doc[6]/str[@name='id'][.='0']" // null + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "1,5", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")), + "*[count(//doc)=7]", + "//result/doc[1]/str[@name='id'][.='1']" // elevated, prevents group A + , + "//result/doc[2]/str[@name='id'][.='5']" // elevated, (also) prevents group A + , + "//result/doc[3]/str[@name='id'][.='9']" // group C + , + "//result/doc[4]/str[@name='id'][.='7']" // null + , + "//result/doc[5]/str[@name='id'][.='4']" // null + , + "//result/doc[6]/str[@name='id'][.='3']" // group B + , + "//result/doc[7]/str[@name='id'][.='0']" // null + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "0,7", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='0']" // elevated (null) + , + "//result/doc[2]/str[@name='id'][.='7']" // elevated (null) + , + "//result/doc[3]/str[@name='id'][.='9']" // group C + , + "//result/doc[4]/str[@name='id'][.='8']" // group A + , + "//result/doc[5]/str[@name='id'][.='4']" // null + , + "//result/doc[6]/str[@name='id'][.='3']" // group B + ); + assertQ( + req( + params( + "qt", + "/elevate", + "elevateIds", + "6,0", + "q", + q, + "fq", + "{!collapse field=" + f + " nullPolicy=expand " + selector + "}")), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='6']" // elevated, prevents group C + , + "//result/doc[2]/str[@name='id'][.='0']" // elevated (null) + , + "//result/doc[3]/str[@name='id'][.='8']" // group A + , + "//result/doc[4]/str[@name='id'][.='7']" // null + , + "//result/doc[5]/str[@name='id'][.='4']" // null + , + "//result/doc[6]/str[@name='id'][.='3']" // group B + ); } } } diff --git a/solr/core/src/test/org/apache/solr/search/TestComplexPhraseLeadingWildcard.java b/solr/core/src/test/org/apache/solr/search/TestComplexPhraseLeadingWildcard.java index ffccaca1d55..7434ed329dc 100644 --- a/solr/core/src/test/org/apache/solr/search/TestComplexPhraseLeadingWildcard.java +++ b/solr/core/src/test/org/apache/solr/search/TestComplexPhraseLeadingWildcard.java @@ -20,7 +20,7 @@ import org.junit.BeforeClass; import org.junit.Test; -public class TestComplexPhraseLeadingWildcard extends SolrTestCaseJ4 { +public class TestComplexPhraseLeadingWildcard extends SolrTestCaseJ4 { private static final String noReverseText = "three"; private static final String withOriginal = "one"; @@ -28,36 +28,35 @@ public class TestComplexPhraseLeadingWildcard extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-reversed.xml"); + initCore("solrconfig.xml", "schema-reversed.xml"); assertU(doc123(1, "one ever")); assertU(doc123(2, "once forever")); - + assertU(doc123(7, "once slope forever")); assertU(doc123(8, "once again slope forever")); assertU(doc123(9, "forever once")); assertU(commit()); } - + @Test public void testReverseWithOriginal() throws Exception { checkField(withOriginal); - } // prefix query won't match without original tokens @Test public void testReverseWithoutOriginal() throws Exception { - assertQ( "prefix query doesn't work without original term", - req("q","{!complexphrase inOrder=true}\"on* for*\"", - "df",withoutOriginal), + assertQ( + "prefix query doesn't work without original term", + req("q", "{!complexphrase inOrder=true}\"on* for*\"", "df", withoutOriginal), expect()); - - assertQ("postfix query works fine even without original", - req("q","{!complexphrase inOrder=true}\"*nce *ver\"", - "df",withoutOriginal), + + assertQ( + "postfix query works fine even without original", + req("q", "{!complexphrase inOrder=true}\"*nce *ver\"", "df", withoutOriginal), expect("2")); } - + @Test public void testWithoutReverse() throws Exception { checkField(noReverseText); @@ -65,48 +64,42 @@ public void testWithoutReverse() throws Exception { private void checkField(String field) { assertQ( - req("q","{!complexphrase inOrder=true}\"on* *ver\"", - "df",field, - "indent","on", + req( + "q", "{!complexphrase inOrder=true}\"on* *ver\"", + "df", field, + "indent", "on", "debugQuery", "true"), - expect("1","2")); - - assertQ( - req("q","{!complexphrase inOrder=true}\"ON* *VER\"", - "df",field), - expect("1","2")); - + expect("1", "2")); + + assertQ(req("q", "{!complexphrase inOrder=true}\"ON* *VER\"", "df", field), expect("1", "2")); + + assertQ(req("q", "{!complexphrase inOrder=true}\"ON* *ver\"", "df", field), expect("1", "2")); + assertQ( - req("q","{!complexphrase inOrder=true}\"ON* *ver\"", - "df",field), - expect("1","2")); - + req("q", "{!complexphrase inOrder=true}\"on* *ver\"~1", "df", field), + expect("1", "2", "7")); + assertQ( - req("q","{!complexphrase inOrder=true}\"on* *ver\"~1", - "df",field), - expect("1","2","7")); - - assertQ("range works if reverse doesn't mess", - req("q","{!complexphrase inOrder=true}\"on* [* TO a]\"", - "df",field), + "range works if reverse doesn't mess", + req("q", "{!complexphrase inOrder=true}\"on* [* TO a]\"", "df", field), expect()); - assertQ("range works if reverse doesn't mess", - req("q","{!complexphrase inOrder=true}\"[on TO onZ] for*\"", - "df",field), + assertQ( + "range works if reverse doesn't mess", + req("q", "{!complexphrase inOrder=true}\"[on TO onZ] for*\"", "df", field), expect("2")); - } - - private static String doc123(int id, String text){ - return adoc("id",""+id, withOriginal, text, withoutOriginal, text, noReverseText, text); } - - private static String [] expect(String ...ids) { - String[] xpathes = new String[ids.length+1]; - xpathes[0]= "//result[@numFound=" +ids.length+ "]"; - int i=1; - for(String id : ids) { - xpathes[i++] = "//doc/str[@name='id' and text()='"+id+"']"; + + private static String doc123(int id, String text) { + return adoc("id", "" + id, withOriginal, text, withoutOriginal, text, noReverseText, text); + } + + private static String[] expect(String... ids) { + String[] xpathes = new String[ids.length + 1]; + xpathes[0] = "//result[@numFound=" + ids.length + "]"; + int i = 1; + for (String id : ids) { + xpathes[i++] = "//doc/str[@name='id' and text()='" + id + "']"; } return xpathes; } diff --git a/solr/core/src/test/org/apache/solr/search/TestComplexPhraseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestComplexPhraseQParserPlugin.java index 1b5f5ee9592..d1617031101 100644 --- a/solr/core/src/test/org/apache/solr/search/TestComplexPhraseQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestComplexPhraseQParserPlugin.java @@ -16,21 +16,20 @@ */ package org.apache.solr.search; +import java.util.HashMap; +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.HighlightParams; -import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.util.TestHarness; import org.junit.BeforeClass; import org.junit.Test; -import java.util.HashMap; - public class TestComplexPhraseQParserPlugin extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema15.xml"); + initCore("solrconfig.xml", "schema15.xml"); } @Override @@ -52,47 +51,46 @@ public void testDefaultField() { assertU(commit()); assertU(optimize()); - assertQ(req("q", "{!complexphrase} \"john smith\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='1']" - ); - - assertQ(req("q", "{!complexphrase} \"j* smyth~\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - ); - - assertQ(req("q", "{!complexphrase} \"(jo* -john) smith\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='2']" - ); - - assertQ(req("q", "{!complexphrase} \"jo* smith\"~2") - , "//result[@numFound='3']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - , "//doc[./str[@name='id']='3']" - ); - - assertQ(req("q", "{!complexphrase} \"jo* [sma TO smz]\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - ); - - assertQ(req("q", "{!complexphrase} \"john\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='3']" - ); - - assertQ(req("q", "{!complexphrase} \"(john johathon) smith\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - ); - + assertQ( + req("q", "{!complexphrase} \"john smith\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='1']"); + + assertQ( + req("q", "{!complexphrase} \"j* smyth~\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']"); + + assertQ( + req("q", "{!complexphrase} \"(jo* -john) smith\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='2']"); + + assertQ( + req("q", "{!complexphrase} \"jo* smith\"~2"), + "//result[@numFound='3']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']", + "//doc[./str[@name='id']='3']"); + + assertQ( + req("q", "{!complexphrase} \"jo* [sma TO smz]\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']"); + + assertQ( + req("q", "{!complexphrase} \"john\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='3']"); + + assertQ( + req("q", "{!complexphrase} \"(john johathon) smith\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']"); } @Test @@ -102,8 +100,7 @@ public void test() { args.put(QueryParsing.DEFTYPE, ComplexPhraseQParserPlugin.NAME); args.put(CommonParams.FL, "id"); - TestHarness.LocalRequestFactory sumLRF = h.getRequestFactory( - "", 0, 200, args); + TestHarness.LocalRequestFactory sumLRF = h.getRequestFactory("", 0, 200, args); assertU(adoc("name", "john smith", "id", "1")); assertU(adoc("name", "johathon smith", "id", "2")); @@ -111,69 +108,69 @@ public void test() { assertU(commit()); assertU(optimize()); - assertQ("Simple multi-term still works", - sumLRF.makeRequest("name:\"john smith\""), - "//doc[./str[@name='id']='1']", - "//result[@numFound='1']" - ); - - assertQ(req("q", "{!complexphrase} name:\"john smith\""), - "//doc[./str[@name='id']='1']", - "//result[@numFound='1']" - ); - - - assertQ("wildcards and fuzzies are OK in phrases", - sumLRF.makeRequest("name:\"j* smyth~\""), - "//doc[./str[@name='id']='1']", - "//doc[./str[@name='id']='2']", - "//result[@numFound='2']" - ); - - assertQ("boolean logic works", - sumLRF.makeRequest("name:\"(jo* -john) smith\""), - "//doc[./str[@name='id']='2']", - "//result[@numFound='1']" - ); - - assertQ("position logic works", - sumLRF.makeRequest("name:\"jo* smith\"~2"), - "//doc[./str[@name='id']='1']", - "//doc[./str[@name='id']='2']", - "//doc[./str[@name='id']='3']", - "//result[@numFound='3']" - ); - - assertQ("range queries supported", - sumLRF.makeRequest("name:\"jo* [sma TO smz]\""), - "//doc[./str[@name='id']='1']", - "//doc[./str[@name='id']='2']", - "//result[@numFound='2']" - ); - - assertQ("Simple single-term still works", - sumLRF.makeRequest("name:\"john\""), - "//doc[./str[@name='id']='1']", - "//doc[./str[@name='id']='3']", - "//result[@numFound='2']" - ); - - assertQ("OR inside phrase works", - sumLRF.makeRequest("name:\"(john johathon) smith\""), - "//doc[./str[@name='id']='1']", - "//doc[./str[@name='id']='2']", - "//result[@numFound='2']" - ); - - assertQEx("don't parse subqueries", + assertQ( + "Simple multi-term still works", + sumLRF.makeRequest("name:\"john smith\""), + "//doc[./str[@name='id']='1']", + "//result[@numFound='1']"); + + assertQ( + req("q", "{!complexphrase} name:\"john smith\""), + "//doc[./str[@name='id']='1']", + "//result[@numFound='1']"); + + assertQ( + "wildcards and fuzzies are OK in phrases", + sumLRF.makeRequest("name:\"j* smyth~\""), + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']", + "//result[@numFound='2']"); + + assertQ( + "boolean logic works", + sumLRF.makeRequest("name:\"(jo* -john) smith\""), + "//doc[./str[@name='id']='2']", + "//result[@numFound='1']"); + + assertQ( + "position logic works", + sumLRF.makeRequest("name:\"jo* smith\"~2"), + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']", + "//doc[./str[@name='id']='3']", + "//result[@numFound='3']"); + + assertQ( + "range queries supported", + sumLRF.makeRequest("name:\"jo* [sma TO smz]\""), + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']", + "//result[@numFound='2']"); + + assertQ( + "Simple single-term still works", + sumLRF.makeRequest("name:\"john\""), + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='3']", + "//result[@numFound='2']"); + + assertQ( + "OR inside phrase works", + sumLRF.makeRequest("name:\"(john johathon) smith\""), + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']", + "//result[@numFound='2']"); + + assertQEx( + "don't parse subqueries", "SyntaxError", - sumLRF.makeRequest("_query_:\"{!prefix f=name v=smi}\""), SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("don't parse subqueries", + sumLRF.makeRequest("_query_:\"{!prefix f=name v=smi}\""), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "don't parse subqueries", "SyntaxError", - sumLRF.makeRequest("{!prefix f=name v=smi}"), SolrException.ErrorCode.BAD_REQUEST - ); - + sumLRF.makeRequest("{!prefix f=name v=smi}"), + SolrException.ErrorCode.BAD_REQUEST); } @Test @@ -189,9 +186,7 @@ public void testPhraseHighlighter() { args.put(HighlightParams.FRAGSIZE, String.valueOf(0)); args.put(HighlightParams.FIELDS, "name"); - - TestHarness.LocalRequestFactory sumLRF = h.getRequestFactory( - "", 0, 200, args); + TestHarness.LocalRequestFactory sumLRF = h.getRequestFactory("", 0, 200, args); assertU(adoc("name", "john smith smith john", "id", "1")); assertU(adoc("name", "johathon smith smith johathon", "id", "2")); @@ -199,90 +194,90 @@ public void testPhraseHighlighter() { assertU(commit()); assertU(optimize()); - assertQ("range queries supported", - sumLRF.makeRequest("name:[sma TO smz]"), - "//doc[./str[@name='id']='1']", - "//doc[./str[@name='id']='2']", - "//doc[./str[@name='id']='3']", - "//result[@numFound='3']" - ); - + assertQ( + "range queries supported", + sumLRF.makeRequest("name:[sma TO smz]"), + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']", + "//doc[./str[@name='id']='3']", + "//result[@numFound='3']"); sumLRF = h.getRequestFactory("", 0, 200, args); - assertQ("PhraseHighlighter=true Test", - sumLRF.makeRequest("name:\"(john johathon) smith\""), - "//lst[@name='highlighting']/lst[@name='1']", - "//lst[@name='1']/arr[@name='name']/str[.='john smith smith john']", - "//lst[@name='highlighting']/lst[@name='2']", - "//lst[@name='2']/arr[@name='name']/str[.='johathon smith smith johathon']" - ); - + assertQ( + "PhraseHighlighter=true Test", + sumLRF.makeRequest("name:\"(john johathon) smith\""), + "//lst[@name='highlighting']/lst[@name='1']", + "//lst[@name='1']/arr[@name='name']/str[.='john smith smith john']", + "//lst[@name='highlighting']/lst[@name='2']", + "//lst[@name='2']/arr[@name='name']/str[.='johathon smith smith johathon']"); args.put(HighlightParams.USE_PHRASE_HIGHLIGHTER, Boolean.FALSE.toString()); sumLRF = h.getRequestFactory("", 0, 200, args); - assertQ("PhraseHighlighter=false Test", - sumLRF.makeRequest("name:\"(john johathon) smith\""), - "//lst[@name='highlighting']/lst[@name='1']", - "//lst[@name='1']/arr[@name='name']/str[.='john smith smith john']", - "//lst[@name='highlighting']/lst[@name='2']", - "//lst[@name='2']/arr[@name='name']/str[.='johathon smith smith johathon']" - ); - - /* - assertQ("Highlight Plain Prefix Query Test", - sumLRF.makeRequest("name:jo*"), - "//lst[@name='highlighting']/lst[@name='1']", - "//lst[@name='1']/arr[@name='name']/str[.='john smith smith john']", - "//lst[@name='highlighting']/lst[@name='2']", - "//lst[@name='2']/arr[@name='name']/str[.='johathon smith smith johathon']", - "//lst[@name='highlighting']/lst[@name='3']", - "//lst[@name='3']/arr[@name='name']/str[.='john percival smith']" - ); - */ + assertQ( + "PhraseHighlighter=false Test", + sumLRF.makeRequest("name:\"(john johathon) smith\""), + "//lst[@name='highlighting']/lst[@name='1']", + "//lst[@name='1']/arr[@name='name']/str[.='john smith smith john']", + "//lst[@name='highlighting']/lst[@name='2']", + "//lst[@name='2']/arr[@name='name']/str[.='johathon smith smith johathon']"); + + /* + assertQ("Highlight Plain Prefix Query Test", + sumLRF.makeRequest("name:jo*"), + "//lst[@name='highlighting']/lst[@name='1']", + "//lst[@name='1']/arr[@name='name']/str[.='john smith smith john']", + "//lst[@name='highlighting']/lst[@name='2']", + "//lst[@name='2']/arr[@name='name']/str[.='johathon smith smith johathon']", + "//lst[@name='highlighting']/lst[@name='3']", + "//lst[@name='3']/arr[@name='name']/str[.='john percival smith']" + ); + */ } @Test public void testMultipleFields() { - assertU(adoc("text", "protein digest", "name", "dna rules", "id", "1")); - assertU(adoc("text", "digest protein", "name", "rna is the workhorse", "id", "2")); + assertU(adoc("text", "protein digest", "name", "dna rules", "id", "1")); + assertU(adoc("text", "digest protein", "name", "rna is the workhorse", "id", "2")); - assertU(adoc("text", "dna rules", "name", "protein digest", "id", "3")); + assertU(adoc("text", "dna rules", "name", "protein digest", "id", "3")); assertU(adoc("text", "dna really rules", "name", "digest protein", "id", "4")); assertU(commit()); assertU(optimize()); - assertQ(req("q", "{!complexphrase} name:\"protein digest\" AND text:\"dna rules\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='3']" - ); - - assertQ(req("q", "{!complexphrase} name:\"prot* dige*\" AND text:\"d* r*\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='3']" - ); - - assertQ(req("q", "{!complexphrase inOrder=\"false\"} name:\"dna* rule*\" AND text:\"prot* diges*\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='1']" - ); - - assertQ(req("q", "{!complexphrase inOrder=false} name:\"protein digest\" AND text:\"dna rules\"~2") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='3']" - , "//doc[./str[@name='id']='4']" - ); - - - assertQ(req("q", "{!complexphrase inOrder=\"true\"} name:\"protein digest\" AND text:\"dna rules\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='3']" - ); - + assertQ( + req("q", "{!complexphrase} name:\"protein digest\" AND text:\"dna rules\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='3']"); + + assertQ( + req("q", "{!complexphrase} name:\"prot* dige*\" AND text:\"d* r*\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='3']"); + + assertQ( + req( + "q", + "{!complexphrase inOrder=\"false\"} name:\"dna* rule*\" AND text:\"prot* diges*\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='1']"); + + assertQ( + req("q", "{!complexphrase inOrder=false} name:\"protein digest\" AND text:\"dna rules\"~2"), + "//result[@numFound='2']", + "//doc[./str[@name='id']='3']", + "//doc[./str[@name='id']='4']"); + + assertQ( + req( + "q", + "{!complexphrase inOrder=\"true\"} name:\"protein digest\" AND text:\"dna rules\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='3']"); } - @Test + @Test public void testUnorderedPhraseQuery() { assertU(adoc("text", "protein digest", "id", "1")); @@ -294,83 +289,71 @@ public void testUnorderedPhraseQuery() { assertU(commit()); assertU(optimize()); - /** - * ordered phrase query return only fist document - */ - assertQ(req("q", "{!complexphrase} \"protein digest\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='1']" - ); - - assertQ(req("q", "{!complexphrase} \"pro* di*\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='1']" - ); - - assertQ(req("q", "{!complexphrase} name:\"protein digest\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='3']" - ); - - assertQ(req("q", "{!complexphrase} name:\"pro* di*\"") - , "//result[@numFound='1']" - , "//doc[./str[@name='id']='3']" - ); - - /** - * unordered phrase query returns two documents. - */ - assertQ(req("q", "{!complexphrase inOrder=false} \"digest protein\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - ); - - assertQ(req("q", "{!complexphrase inOrder=false} \"di* pro*\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - ); - - assertQ(req("q", "{!complexphrase inOrder=false} name:\"digest protein\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='3']" - , "//doc[./str[@name='id']='4']" - ); - - assertQ(req("q", "{!complexphrase inOrder=false} name:\"di* pro*\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='3']" - , "//doc[./str[@name='id']='4']" - ); - - /** - * inOrder parameter can be defined with local params syntax. - */ - assertQ(req("q", "{!complexphrase inOrder=false} \"di* pro*\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - ); - - - assertQ(req("q", "{!complexphrase inOrder=true} \"di* pro*\"") - , "//result[@numFound='1']" - ); - - /** - * inOrder and df parameters can be defined with local params syntax. - */ - assertQ(req("q", "{!complexphrase inOrder=false df=name} \"di* pro*\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='3']" - , "//doc[./str[@name='id']='4']" - ); + /** ordered phrase query return only fist document */ + assertQ( + req("q", "{!complexphrase} \"protein digest\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='1']"); + + assertQ( + req("q", "{!complexphrase} \"pro* di*\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='1']"); + + assertQ( + req("q", "{!complexphrase} name:\"protein digest\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='3']"); + + assertQ( + req("q", "{!complexphrase} name:\"pro* di*\""), + "//result[@numFound='1']", + "//doc[./str[@name='id']='3']"); + + /** unordered phrase query returns two documents. */ + assertQ( + req("q", "{!complexphrase inOrder=false} \"digest protein\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']"); + + assertQ( + req("q", "{!complexphrase inOrder=false} \"di* pro*\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']"); + + assertQ( + req("q", "{!complexphrase inOrder=false} name:\"digest protein\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='3']", + "//doc[./str[@name='id']='4']"); + + assertQ( + req("q", "{!complexphrase inOrder=false} name:\"di* pro*\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='3']", + "//doc[./str[@name='id']='4']"); + + /** inOrder parameter can be defined with local params syntax. */ + assertQ( + req("q", "{!complexphrase inOrder=false} \"di* pro*\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']"); + + assertQ(req("q", "{!complexphrase inOrder=true} \"di* pro*\""), "//result[@numFound='1']"); + + /** inOrder and df parameters can be defined with local params syntax. */ + assertQ( + req("q", "{!complexphrase inOrder=false df=name} \"di* pro*\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='3']", + "//doc[./str[@name='id']='4']"); } - /** - * the query "sulfur-reducing bacteria" was crashing due to the dash inside the phrase. - */ - @Test public void testHyphenInPhrase() { + /** the query "sulfur-reducing bacteria" was crashing due to the dash inside the phrase. */ + @Test + public void testHyphenInPhrase() { assertU(adoc("text", "sulfur-reducing bacteria", "id", "1")); assertU(adoc("text", "sulfur reducing bacteria", "id", "2")); @@ -381,16 +364,15 @@ public void testUnorderedPhraseQuery() { assertU(commit()); assertU(optimize()); - assertQ(req("q", "{!complexphrase} \"sulfur-reducing bacteria\"") - , "//result[@numFound='2']" - , "//doc[./str[@name='id']='1']" - , "//doc[./str[@name='id']='2']" - ); + assertQ( + req("q", "{!complexphrase} \"sulfur-reducing bacteria\""), + "//result[@numFound='2']", + "//doc[./str[@name='id']='1']", + "//doc[./str[@name='id']='2']"); - // the analysis for "name" currently does not break on "-" (only whitespace) and thus only matches one doc - assertQ(req("q", "{!complexphrase} name:\"sulfur-reducing bacteria\"") - , "//result[@numFound='1']" - ); + // the analysis for "name" currently does not break on "-" (only whitespace) and thus only + // matches one doc + assertQ( + req("q", "{!complexphrase} name:\"sulfur-reducing bacteria\""), "//result[@numFound='1']"); } } - diff --git a/solr/core/src/test/org/apache/solr/search/TestComponentsName.java b/solr/core/src/test/org/apache/solr/search/TestComponentsName.java index ce643d27e98..d965c59c201 100644 --- a/solr/core/src/test/org/apache/solr/search/TestComponentsName.java +++ b/solr/core/src/test/org/apache/solr/search/TestComponentsName.java @@ -25,48 +25,57 @@ import org.junit.BeforeClass; import org.junit.Test; -public class TestComponentsName extends SolrTestCaseJ4{ - +public class TestComponentsName extends SolrTestCaseJ4 { + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-components-name.xml","schema.xml"); + initCore("solrconfig-components-name.xml", "schema.xml"); } - + @Override public void tearDown() throws Exception { super.tearDown(); assertU(delQ("*:*")); assertU((commit())); } - - + @Test public void testComponentsName() { assertU(adoc("id", "0", "name", "Zapp Brannigan")); assertU(adoc("id", "1", "name", "The Zapper")); assertU((commit())); - - assertQ("match all docs query", - req("q","*:*") - ,"//result[@numFound='2']", - "/response/str[@name='component1'][.='foo']", + + assertQ( + "match all docs query", + req("q", "*:*"), + "//result[@numFound='2']", + "/response/str[@name='component1'][.='foo']", "/response/str[@name='component2'][.='bar']"); - - assertQ("use debugQuery", - req("q","*:*", - "debugQuery", "true") - ,"//result[@numFound='2']", - "/response/str[@name='component1'][.='foo']", + + assertQ( + "use debugQuery", + req( + "q", "*:*", + "debugQuery", "true"), + "//result[@numFound='2']", + "/response/str[@name='component1'][.='foo']", "/response/str[@name='component2'][.='bar']", "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='component1']", - "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + QueryComponent.COMPONENT_NAME + "']", - "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + FacetComponent.COMPONENT_NAME + "']", - "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + MoreLikeThisComponent.COMPONENT_NAME + "']", - "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + StatsComponent.COMPONENT_NAME + "']", - "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + DebugComponent.COMPONENT_NAME + "']", + "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + + QueryComponent.COMPONENT_NAME + + "']", + "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + + FacetComponent.COMPONENT_NAME + + "']", + "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + + MoreLikeThisComponent.COMPONENT_NAME + + "']", + "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + + StatsComponent.COMPONENT_NAME + + "']", + "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='" + + DebugComponent.COMPONENT_NAME + + "']", "/response/lst[@name='debug']/lst[@name='timing']/lst[@name='prepare']/lst[@name='component2']"); } - } - - diff --git a/solr/core/src/test/org/apache/solr/search/TestCustomSort.java b/solr/core/src/test/org/apache/solr/search/TestCustomSort.java index 74b83011fa0..9aace572750 100644 --- a/solr/core/src/test/org/apache/solr/search/TestCustomSort.java +++ b/solr/core/src/test/org/apache/solr/search/TestCustomSort.java @@ -16,110 +16,290 @@ */ package org.apache.solr.search; +import java.nio.ByteBuffer; import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; -import java.nio.ByteBuffer; - - -/** - * Test SortField.CUSTOM sorts - */ +/** Test SortField.CUSTOM sorts */ public class TestCustomSort extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema-custom-field.xml"); } - + public void testSortableBinary() throws Exception { clearIndex(); - assertU(adoc(sdoc("id", "1", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x12, 0x62, 0x15 })))); // 2 - assertU(adoc(sdoc("id", "2", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x16 })))); // 5 - assertU(adoc(sdoc("id", "3", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x35, 0x32, 0x58 })))); // 8 - assertU(adoc(sdoc("id", "4", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x25, 0x21, 0x15 })))); // 4 - assertU(adoc(sdoc("id", "5", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x35, 0x35, 0x10, 0x00 })))); // 9 - assertU(adoc(sdoc("id", "6", "text", "c", "payload", ByteBuffer.wrap(new byte[] { 0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03 })))); // 3 - assertU(adoc(sdoc("id", "7", "text", "c", "payload", ByteBuffer.wrap(new byte[] { 0x00, 0x3c, 0x73 })))); // 1 - assertU(adoc(sdoc("id", "8", "text", "c", "payload", ByteBuffer.wrap(new byte[] { 0x59, 0x2d, 0x4d })))); // 11 - assertU(adoc(sdoc("id", "9", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x39, 0x79, 0x7a })))); // 10 - assertU(adoc(sdoc("id", "10", "text", "b", "payload", ByteBuffer.wrap(new byte[] { 0x31, 0x39, 0x7c })))); // 6 - assertU(adoc(sdoc("id", "11", "text", "d", "payload", ByteBuffer.wrap(new byte[] { (byte)0xff, (byte)0xaf, (byte)0x9c })))); // 13 - assertU(adoc(sdoc("id", "12", "text", "d", "payload", ByteBuffer.wrap(new byte[] { 0x34, (byte)0xdd, 0x4d })))); // 7 - assertU(adoc(sdoc("id", "13", "text", "d", "payload", ByteBuffer.wrap(new byte[] { (byte)0x80, 0x11, 0x33 })))); // 12 + assertU( + adoc( + sdoc( + "id", + "1", + "text", + "a", + "payload", + ByteBuffer.wrap(new byte[] {0x12, 0x62, 0x15})))); // 2 + assertU( + adoc( + sdoc( + "id", + "2", + "text", + "b", + "payload", + ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x16})))); // 5 + assertU( + adoc( + sdoc( + "id", + "3", + "text", + "a", + "payload", + ByteBuffer.wrap(new byte[] {0x35, 0x32, 0x58})))); // 8 + assertU( + adoc( + sdoc( + "id", + "4", + "text", + "b", + "payload", + ByteBuffer.wrap(new byte[] {0x25, 0x21, 0x15})))); // 4 + assertU( + adoc( + sdoc( + "id", + "5", + "text", + "a", + "payload", + ByteBuffer.wrap(new byte[] {0x35, 0x35, 0x10, 0x00})))); // 9 + assertU( + adoc( + sdoc( + "id", + "6", + "text", + "c", + "payload", + ByteBuffer.wrap(new byte[] {0x1a, 0x2b, 0x3c, 0x00, 0x00, 0x03})))); // 3 + assertU( + adoc( + sdoc( + "id", + "7", + "text", + "c", + "payload", + ByteBuffer.wrap(new byte[] {0x00, 0x3c, 0x73})))); // 1 + assertU( + adoc( + sdoc( + "id", + "8", + "text", + "c", + "payload", + ByteBuffer.wrap(new byte[] {0x59, 0x2d, 0x4d})))); // 11 + assertU( + adoc( + sdoc( + "id", + "9", + "text", + "a", + "payload", + ByteBuffer.wrap(new byte[] {0x39, 0x79, 0x7a})))); // 10 + assertU( + adoc( + sdoc( + "id", + "10", + "text", + "b", + "payload", + ByteBuffer.wrap(new byte[] {0x31, 0x39, 0x7c})))); // 6 + assertU( + adoc( + sdoc( + "id", + "11", + "text", + "d", + "payload", + ByteBuffer.wrap(new byte[] {(byte) 0xff, (byte) 0xaf, (byte) 0x9c})))); // 13 + assertU( + adoc( + sdoc( + "id", + "12", + "text", + "d", + "payload", + ByteBuffer.wrap(new byte[] {0x34, (byte) 0xdd, 0x4d})))); // 7 + assertU( + adoc( + sdoc( + "id", + "13", + "text", + "d", + "payload", + ByteBuffer.wrap(new byte[] {(byte) 0x80, 0x11, 0x33})))); // 12 assertU(commit()); - - assertQ(req("q", "*:*", "fl", "id_i", "sort", "payload asc", "rows", "20") - , "//result[@numFound='13']" // - , "//result/doc[int='7' and position()=1]" // 7 00 3c 73 - , "//result/doc[int='1' and position()=2]" // 1 12 62 15 - , "//result/doc[int='6' and position()=3]" // 6 1a 2b 3c 00 00 03 - , "//result/doc[int='4' and position()=4]" // 4 25 21 15 - , "//result/doc[int='2' and position()=5]" // 2 25 21 16 - , "//result/doc[int='10' and position()=6]" // 10 31 39 7c - , "//result/doc[int='12' and position()=7]" // 12 34 dd 4d - , "//result/doc[int='3' and position()=8]" // 3 35 32 58 - , "//result/doc[int='5' and position()=9]" // 5 35 35 10 00 - , "//result/doc[int='9' and position()=10]" // 9 39 79 7a - , "//result/doc[int='8' and position()=11]" // 8 59 2d 4d - , "//result/doc[int='13' and position()=12]" // 13 80 11 33 - , "//result/doc[int='11' and position()=13]"); // 11 ff af 9c - assertQ(req("q", "*:*", "fl", "id_i", "sort", "payload desc", "rows", "20") - , "//result[@numFound='13']" // - , "//result/doc[int='11' and position()=1]" // 11 ff af 9c - , "//result/doc[int='13' and position()=2]" // 13 80 11 33 - , "//result/doc[int='8' and position()=3]" // 8 59 2d 4d - , "//result/doc[int='9' and position()=4]" // 9 39 79 7a - , "//result/doc[int='5' and position()=5]" // 5 35 35 10 00 - , "//result/doc[int='3' and position()=6]" // 3 35 32 58 - , "//result/doc[int='12' and position()=7]" // 12 34 dd 4d - , "//result/doc[int='10' and position()=8]" // 10 31 39 7c - , "//result/doc[int='2' and position()=9]" // 2 25 21 16 - , "//result/doc[int='4' and position()=10]" // 4 25 21 15 - , "//result/doc[int='6' and position()=11]" // 6 1a 2b 3c 00 00 03 - , "//result/doc[int='1' and position()=12]" // 1 12 62 15 - , "//result/doc[int='7' and position()=13]"); // 7 00 3c 73 - assertQ(req("q", "text:a", "fl", "id_i", "sort", "payload asc", "rows", "20") - , "//result[@numFound='4']" // - , "//result/doc[int='1' and position()=1]" // 1 12 62 15 - , "//result/doc[int='3' and position()=2]" // 3 35 32 58 - , "//result/doc[int='5' and position()=3]" // 5 35 35 10 00 - , "//result/doc[int='9' and position()=4]"); // 9 39 79 7a - assertQ(req("q", "text:a", "fl", "id_i", "sort", "payload desc", "rows", "20") - , "//result[@numFound='4']" // - , "//result/doc[int='9' and position()=1]" // 9 39 79 7a - , "//result/doc[int='5' and position()=2]" // 5 35 35 10 00 - , "//result/doc[int='3' and position()=3]" // 3 35 32 58 - , "//result/doc[int='1' and position()=4]"); // 1 12 62 15 - assertQ(req("q", "text:b", "fl", "id_i", "sort", "payload asc", "rows", "20") - , "//result[@numFound='3']" // - , "//result/doc[int='4' and position()=1]" // 4 25 21 15 - , "//result/doc[int='2' and position()=2]" // 2 25 21 16 - , "//result/doc[int='10' and position()=3]"); // 10 31 39 7c - assertQ(req("q", "text:b", "fl", "id_i", "sort", "payload desc", "rows", "20") - , "//result[@numFound='3']" // - , "//result/doc[int='10' and position()=1]" // 10 31 39 7c - , "//result/doc[int='2' and position()=2]" // 2 25 21 16 - , "//result/doc[int='4' and position()=3]"); // 4 25 21 15 - assertQ(req("q", "text:c", "fl", "id_i", "sort", "payload asc", "rows", "20") - , "//result[@numFound='3']" // - , "//result/doc[int='7' and position()=1]" // 7 00 3c 73 - , "//result/doc[int='6' and position()=2]" // 6 1a 2b 3c 00 00 03 - , "//result/doc[int='8' and position()=3]"); // 8 59 2d 4d - assertQ(req("q", "text:c", "fl", "id_i", "sort", "payload desc", "rows", "20") - , "//result[@numFound='3']" // - , "//result/doc[int='8' and position()=1]" // 8 59 2d 4d - , "//result/doc[int='6' and position()=2]" // 6 1a 2b 3c 00 00 03 - , "//result/doc[int='7' and position()=3]"); // 7 00 3c 73 - assertQ(req("q", "text:d", "fl", "id_i", "sort", "payload asc", "rows", "20") - , "//result[@numFound='3']" // - , "//result/doc[int='12' and position()=1]" // 12 34 dd 4d - , "//result/doc[int='13' and position()=2]" // 13 80 11 33 - , "//result/doc[int='11' and position()=3]"); // 11 ff af 9c - assertQ(req("q", "text:d", "fl", "id_i", "sort", "payload desc", "rows", "20") - , "//result[@numFound='3']" // - , "//result/doc[int='11' and position()=1]" // 11 ff af 9c - , "//result/doc[int='13' and position()=2]" // 13 80 11 33 - , "//result/doc[int='12' and position()=3]"); // 12 34 dd 4d + + assertQ( + req("q", "*:*", "fl", "id_i", "sort", "payload asc", "rows", "20"), + "//result[@numFound='13']" // + , + "//result/doc[int='7' and position()=1]" // 7 00 3c 73 + , + "//result/doc[int='1' and position()=2]" // 1 12 62 15 + , + "//result/doc[int='6' and position()=3]" // 6 1a 2b 3c + // 00 00 03 + , + "//result/doc[int='4' and position()=4]" // 4 25 21 15 + , + "//result/doc[int='2' and position()=5]" // 2 25 21 16 + , + "//result/doc[int='10' and position()=6]" // 10 31 39 7c + , + "//result/doc[int='12' and position()=7]" // 12 34 dd 4d + , + "//result/doc[int='3' and position()=8]" // 3 35 32 58 + , + "//result/doc[int='5' and position()=9]" // 5 35 35 10 + // 00 + , + "//result/doc[int='9' and position()=10]" // 9 39 79 7a + , + "//result/doc[int='8' and position()=11]" // 8 59 2d 4d + , + "//result/doc[int='13' and position()=12]" // 13 80 11 33 + , + "//result/doc[int='11' and position()=13]"); // 11 ff af + // 9c + assertQ( + req("q", "*:*", "fl", "id_i", "sort", "payload desc", "rows", "20"), + "//result[@numFound='13']" // + , + "//result/doc[int='11' and position()=1]" // 11 ff af 9c + , + "//result/doc[int='13' and position()=2]" // 13 80 11 33 + , + "//result/doc[int='8' and position()=3]" // 8 59 2d 4d + , + "//result/doc[int='9' and position()=4]" // 9 39 79 7a + , + "//result/doc[int='5' and position()=5]" // 5 35 35 10 + // 00 + , + "//result/doc[int='3' and position()=6]" // 3 35 32 58 + , + "//result/doc[int='12' and position()=7]" // 12 34 dd 4d + , + "//result/doc[int='10' and position()=8]" // 10 31 39 7c + , + "//result/doc[int='2' and position()=9]" // 2 25 21 16 + , + "//result/doc[int='4' and position()=10]" // 4 25 21 15 + , + "//result/doc[int='6' and position()=11]" // 6 1a 2b 3c + // 00 00 03 + , + "//result/doc[int='1' and position()=12]" // 1 12 62 15 + , + "//result/doc[int='7' and position()=13]"); // 7 00 3c + // 73 + assertQ( + req("q", "text:a", "fl", "id_i", "sort", "payload asc", "rows", "20"), + "//result[@numFound='4']" // + , + "//result/doc[int='1' and position()=1]" // 1 12 62 15 + , + "//result/doc[int='3' and position()=2]" // 3 35 32 58 + , + "//result/doc[int='5' and position()=3]" // 5 35 35 10 + // 00 + , + "//result/doc[int='9' and position()=4]"); // 9 39 79 + // 7a + assertQ( + req("q", "text:a", "fl", "id_i", "sort", "payload desc", "rows", "20"), + "//result[@numFound='4']" // + , + "//result/doc[int='9' and position()=1]" // 9 39 79 7a + , + "//result/doc[int='5' and position()=2]" // 5 35 35 10 + // 00 + , + "//result/doc[int='3' and position()=3]" // 3 35 32 58 + , + "//result/doc[int='1' and position()=4]"); // 1 12 62 + // 15 + assertQ( + req("q", "text:b", "fl", "id_i", "sort", "payload asc", "rows", "20"), + "//result[@numFound='3']" // + , + "//result/doc[int='4' and position()=1]" // 4 25 21 15 + , + "//result/doc[int='2' and position()=2]" // 2 25 21 16 + , + "//result/doc[int='10' and position()=3]"); // 10 31 39 + // 7c + assertQ( + req("q", "text:b", "fl", "id_i", "sort", "payload desc", "rows", "20"), + "//result[@numFound='3']" // + , + "//result/doc[int='10' and position()=1]" // 10 31 39 7c + , + "//result/doc[int='2' and position()=2]" // 2 25 21 16 + , + "//result/doc[int='4' and position()=3]"); // 4 25 21 + // 15 + assertQ( + req("q", "text:c", "fl", "id_i", "sort", "payload asc", "rows", "20"), + "//result[@numFound='3']" // + , + "//result/doc[int='7' and position()=1]" // 7 00 3c 73 + , + "//result/doc[int='6' and position()=2]" // 6 1a 2b 3c + // 00 00 03 + , + "//result/doc[int='8' and position()=3]"); // 8 59 2d + // 4d + assertQ( + req("q", "text:c", "fl", "id_i", "sort", "payload desc", "rows", "20"), + "//result[@numFound='3']" // + , + "//result/doc[int='8' and position()=1]" // 8 59 2d 4d + , + "//result/doc[int='6' and position()=2]" // 6 1a 2b 3c + // 00 00 03 + , + "//result/doc[int='7' and position()=3]"); // 7 00 3c + // 73 + assertQ( + req("q", "text:d", "fl", "id_i", "sort", "payload asc", "rows", "20"), + "//result[@numFound='3']" // + , + "//result/doc[int='12' and position()=1]" // 12 34 dd 4d + , + "//result/doc[int='13' and position()=2]" // 13 80 11 33 + , + "//result/doc[int='11' and position()=3]"); // 11 ff af + // 9c + assertQ( + req("q", "text:d", "fl", "id_i", "sort", "payload desc", "rows", "20"), + "//result[@numFound='3']" // + , + "//result/doc[int='11' and position()=1]" // 11 ff af 9c + , + "//result/doc[int='13' and position()=2]" // 13 80 11 33 + , + "//result/doc[int='12' and position()=3]"); // 12 34 dd + // 4d } } diff --git a/solr/core/src/test/org/apache/solr/search/TestDocSet.java b/solr/core/src/test/org/apache/solr/search/TestDocSet.java index 02492fc8521..943d9a314d8 100644 --- a/solr/core/src/test/org/apache/solr/search/TestDocSet.java +++ b/solr/core/src/test/org/apache/solr/search/TestDocSet.java @@ -51,9 +51,7 @@ import org.apache.lucene.util.Version; import org.apache.solr.SolrTestCase; -/** - * - */ +/** */ public class TestDocSet extends SolrTestCase { Random rand; private Object IndexSearcher; @@ -69,27 +67,27 @@ public void collect(DocSet set, int maxDoc) { int smallSetSize = maxDoc >> 64 + 3; if (set.size() > 1) { if (random().nextBoolean()) { - smallSetSize = set.size() + random().nextInt(3) - 1; // test the bounds around smallSetSize + smallSetSize = set.size() + random().nextInt(3) - 1; // test the bounds around smallSetSize } } DocSetCollector collector = new DocSetCollector(smallSetSize, maxDoc); - for(DocIterator i1 = set.iterator(); i1.hasNext();) { + for (DocIterator i1 = set.iterator(); i1.hasNext(); ) { try { - collector.collect( i1.nextDoc() ); + collector.collect(i1.nextDoc()); } catch (IOException e) { - throw new RuntimeException(e); // should be impossible + throw new RuntimeException(e); // should be impossible } } DocSet result = collector.getDocSet(); - iter(set, result); // check that they are equal + iter(set, result); // check that they are equal } public FixedBitSet getRandomSet(int sz, int bitsToSet) { FixedBitSet bs = new FixedBitSet(sz); - if (sz==0) return bs; - for (int i=0; i=offset; i--) { + for (int i = end - 1; i >= offset; i--) { arr[i] = iter.nextDoc(); } - return new DocSlice(offset, len, arr, null, len*2, 100.0f, TotalHits.Relation.EQUAL_TO); + return new DocSlice(offset, len, arr, null, len * 2, 100.0f, TotalHits.Relation.EQUAL_TO); } - public DocSet getDocSet(FixedBitSet bs) { - switch(rand.nextInt(9)) { - case 0: case 1: case 2: case 3: return getBitDocSet(bs); - - case 4: return getIntDocSet(bs); - case 5: return getIntDocSet(bs); - case 6: return getIntDocSet(bs); - case 7: return getIntDocSet(bs); - case 8: return getIntDocSet(bs); + switch (rand.nextInt(9)) { + case 0: + case 1: + case 2: + case 3: + return getBitDocSet(bs); + + case 4: + return getIntDocSet(bs); + case 5: + return getIntDocSet(bs); + case 6: + return getIntDocSet(bs); + case 7: + return getIntDocSet(bs); + case 8: + return getIntDocSet(bs); } return null; } public void checkEqual(FixedBitSet bs, DocSet set) { - for (int i=0; i disiSupplier(final DocIdSet docs) { } @SafeVarargs - private static void populateDisis(NoThrowDocIdSetIterator[] disis, Supplier... suppliers) { + private static void populateDisis( + NoThrowDocIdSetIterator[] disis, Supplier... suppliers) { for (int i = 0; i < suppliers.length; i++) { DocIdSetIterator disi = suppliers[i].get(); disis[i] = disi == null ? null : new NoThrowDocIdSetIterator(disi); } } - private static void populateDocs(NoThrowDocIdSetIterator[] disis, int[] docs, ToIntFunction toDocId) throws IOException { + private static void populateDocs( + NoThrowDocIdSetIterator[] disis, int[] docs, ToIntFunction toDocId) + throws IOException { for (int i = 0; i < docs.length; i++) { docs[i] = toDocId.applyAsInt(disis[i]); } @@ -446,14 +458,16 @@ private static void assertAll(int expected, int[] docs) { } /** - * By wrapping exceptions (which we don't expect to have thrown in this context anyway), we allow for - * more transparent/readable inline functions. + * By wrapping exceptions (which we don't expect to have thrown in this context anyway), we allow + * for more transparent/readable inline functions. */ private static class NoThrowDocIdSetIterator extends DocIdSetIterator { private final DocIdSetIterator backing; + private NoThrowDocIdSetIterator(DocIdSetIterator backing) { this.backing = backing; } + @Override public int advance(int target) { try { @@ -462,14 +476,17 @@ public int advance(int target) { throw new RuntimeException(e); } } + @Override public long cost() { return backing.cost(); } + @Override public int docID() { return backing.docID(); } + @Override public int nextDoc() { try { @@ -481,7 +498,8 @@ public int nextDoc() { } @SafeVarargs - private void doTestIteratorEqual(Bits bits, Supplier... disiSuppliers) throws IOException { + private void doTestIteratorEqual(Bits bits, Supplier... disiSuppliers) + throws IOException { NoThrowDocIdSetIterator[] disis = new NoThrowDocIdSetIterator[disiSuppliers.length]; int[] docs = new int[disiSuppliers.length]; populateDisis(disis, disiSuppliers); @@ -493,7 +511,7 @@ private void doTestIteratorEqual(Bits bits, Supplier... disiSu // test for next() equivalence final int bitsLength = bits == null ? -1 : bits.length(); int bitsDoc = -1; - for(;;) { + for (; ; ) { populateDocs(disis, docs, (disi) -> disi.nextDoc()); final int expected = docs[0]; // arbitrarily pick the first as "expected" assertAll(expected, docs); @@ -502,22 +520,25 @@ private void doTestIteratorEqual(Bits bits, Supplier... disiSu while (++bitsDoc < expected && bitsDoc < bitsLength) { assertFalse(bits.get(bitsDoc)); } - if (expected==DocIdSetIterator.NO_MORE_DOCS) break; + if (expected == DocIdSetIterator.NO_MORE_DOCS) break; assertTrue(bits.get(expected)); } - for (int i=0; i<10; i++) { + for (int i = 0; i < 10; i++) { // test random skipTo() and next() populateDisis(disis, disiSuppliers); bitsDoc = -1; int doc = -1; - for (;;) { + for (; ; ) { final int target; if (rand.nextBoolean()) { target = doc + 1; populateDocs(disis, docs, (disi) -> disi.nextDoc()); } else { - target = doc + rand.nextInt(10) + 1; // keep in mind future edge cases like probing (increase if necessary) + target = + doc + + rand.nextInt(10) + + 1; // keep in mind future edge cases like probing (increase if necessary) populateDocs(disis, docs, (disi) -> disi.advance(target)); } @@ -528,28 +549,27 @@ private void doTestIteratorEqual(Bits bits, Supplier... disiSu for (int j = target; j < expected && j < bitsLength; j++) { assertFalse(bits.get(j)); } - if (expected==DocIdSetIterator.NO_MORE_DOCS) break; + if (expected == DocIdSetIterator.NO_MORE_DOCS) break; assertTrue(bits.get(expected)); doc = expected; } } } - /** - * Tests equivalence among {@link DocIdSetIterator} instances retrieved from {@link BitDocSet} and {@link SortedIntDocSet} - * implementations, via {@link DocSet#makeQuery()} and directly via {@link DocSet#iterator(LeafReaderContext)}. - * Also tests corresponding random-access {@link Bits} instances retrieved via {@link DocSet#makeQuery()}/ - * {@link DocIdSet#bits()}. + * Tests equivalence among {@link DocIdSetIterator} instances retrieved from {@link BitDocSet} and + * {@link SortedIntDocSet} implementations, via {@link DocSet#makeQuery()} and directly via {@link + * DocSet#iterator(LeafReaderContext)}. Also tests corresponding random-access {@link Bits} + * instances retrieved via {@link DocSet#makeQuery()}/ {@link DocIdSet#bits()}. */ public void doFilterTest(IndexReader reader) throws IOException { IndexReaderContext topLevelContext = reader.getContext(); - FixedBitSet bs = getRandomSet(reader.maxDoc(), rand.nextInt(reader.maxDoc()+1)); + FixedBitSet bs = getRandomSet(reader.maxDoc(), rand.nextInt(reader.maxDoc() + 1)); DocSet a = new BitDocSet(bs); DocSet b = getIntDocSet(bs); -// Query fa = a.makeQuery(); -// Query fb = b.makeQuery(); + // Query fa = a.makeQuery(); + // Query fb = b.makeQuery(); /* top level filters are no longer supported // test top-level @@ -561,29 +581,39 @@ public void doFilterTest(IndexReader reader) throws IOException { List leaves = topLevelContext.leaves(); // first test in-sequence sub readers for (LeafReaderContext readerContext : leaves) { - // there are various ways that disis can be retrieved for each leafReader; they should all be equivalent. - doTestIteratorEqual(getExpectedBits(a, readerContext), () -> a.iterator(readerContext), () -> b.iterator(readerContext)); - } + // there are various ways that disis can be retrieved for each leafReader; they should all be + // equivalent. + doTestIteratorEqual( + getExpectedBits(a, readerContext), + () -> a.iterator(readerContext), + () -> b.iterator(readerContext)); + } int nReaders = leaves.size(); // now test out-of-sequence sub readers - for (int i=0; i a.iterator(readerContext), () -> b.iterator(readerContext)); + doTestIteratorEqual( + getExpectedBits(a, readerContext), + () -> a.iterator(readerContext), + () -> b.iterator(readerContext)); } } public void testFilter() throws IOException { // keeping these numbers smaller help hit more edge cases - int maxSeg=4; - int maxDoc=5; // increase if future changes add more edge cases (like probing a certain distance in the bin search) - for (int i=0; i<5000; i++) { + int maxSeg = 4; + // increase if future changes add more edge cases (like probing a certain distance in the bin + // search) + int maxDoc = 5; + for (int i = 0; i < 5000; i++) { IndexReader r = dummyMultiReader(maxSeg, maxDoc); doFilterTest(r); } } - private DocIdSetIterator getDocIdSetIteratorFromQuery(DocSetQuery dsq, LeafReaderContext readerContext) throws IOException { + private DocIdSetIterator getDocIdSetIteratorFromQuery( + DocSetQuery dsq, LeafReaderContext readerContext) throws IOException { Scorer scorer = dsq.createWeight(null, ScoreMode.COMPLETE_NO_SCORES, 0).scorer(readerContext); return scorer != null ? scorer.iterator() : null; } diff --git a/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java b/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java index 8807fae950f..76e98479974 100644 --- a/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java +++ b/solr/core/src/test/org/apache/solr/search/TestElisionMultitermQuery.java @@ -29,13 +29,13 @@ public String getCoreName() { @BeforeClass public static void beforeTests() throws Exception { initCore("solrconfig-basic.xml", "schema-folding.xml"); - + assertU(adoc("id", "1", "text_fr", "l'Auberge")); assertU(adoc("id", "2", "text_fr", "Auberge")); assertU(adoc("id", "3", "text_fr", "other")); assertU(commit()); } - + @Test public void testElisionMultitermQuery() { assertQ(req("q", "text_fr:auberge"), "//result[@numFound='2']"); @@ -47,5 +47,4 @@ public void testElisionMultitermQuery() { assertQ(req("q", "text_fr:l'aub*"), "//result[@numFound='2']"); assertQ(req("q", "text_fr:l'Aub*"), "//result[@numFound='2']"); } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java index 1703b35d612..a66a84d0874 100644 --- a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java +++ b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java @@ -16,6 +16,16 @@ */ package org.apache.solr.search; +import static org.apache.solr.util.QueryMatchers.booleanQuery; +import static org.apache.solr.util.QueryMatchers.boosted; +import static org.apache.solr.util.QueryMatchers.disjunctionOf; +import static org.apache.solr.util.QueryMatchers.phraseQuery; +import static org.apache.solr.util.QueryMatchers.stringQuery; +import static org.apache.solr.util.QueryMatchers.termQuery; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; + import java.util.Arrays; import java.util.HashSet; import java.util.List; @@ -23,7 +33,7 @@ import java.util.Random; import java.util.Set; import java.util.stream.Stream; - +import javax.xml.xpath.XPathConstants; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; @@ -45,18 +55,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import javax.xml.xpath.XPathConstants; - -import static org.apache.solr.util.QueryMatchers.booleanQuery; -import static org.apache.solr.util.QueryMatchers.boosted; -import static org.apache.solr.util.QueryMatchers.disjunctionOf; -import static org.apache.solr.util.QueryMatchers.phraseQuery; -import static org.apache.solr.util.QueryMatchers.stringQuery; -import static org.apache.solr.util.QueryMatchers.termQuery; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.anyOf; -import static org.hamcrest.Matchers.containsString; - public class TestExtendedDismaxParser extends SolrTestCaseJ4 { @BeforeClass @@ -65,29 +63,34 @@ public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema12.xml"); index(); } - + public static void index() throws Exception { - assertU(adoc("id", "42", "trait_ss", "Tool", "trait_ss", "Obnoxious", - "name", "Zapp Brannigan")); - assertU(adoc("id", "43" , - "title", "Democratic Order op Planets")); - assertU(adoc("id", "44", "trait_ss", "Tool", - "name", "The Zapper")); - assertU(adoc("id", "45", "trait_ss", "Chauvinist", - "title", "25 star General")); - assertU(adoc("id", "46", - "trait_ss", "Obnoxious", - "subject", "Defeated the pacifists op the Gandhi nebula", - "t_special", "literal:colon value", - "movies_t", "first is Mission: Impossible, second is Terminator 2: Judgement Day. Terminator:3 ok...", - "foo_i", "8" - )); - assertU(adoc("id", "47", "trait_ss", "Pig", - "text", "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); - assertU(adoc("id", "48", "text_sw", "this has gigabyte potential", "foo_i","100")); - assertU(adoc("id", "49", "text_sw", "start the big apple end", "foo_i","-100")); + assertU( + adoc("id", "42", "trait_ss", "Tool", "trait_ss", "Obnoxious", "name", "Zapp Brannigan")); + assertU(adoc("id", "43", "title", "Democratic Order op Planets")); + assertU(adoc("id", "44", "trait_ss", "Tool", "name", "The Zapper")); + assertU(adoc("id", "45", "trait_ss", "Chauvinist", "title", "25 star General")); + assertU( + adoc( + "id", "46", + "trait_ss", "Obnoxious", + "subject", "Defeated the pacifists op the Gandhi nebula", + "t_special", "literal:colon value", + "movies_t", + "first is Mission: Impossible, second is Terminator 2: Judgement Day. Terminator:3 ok...", + "foo_i", "8")); + assertU( + adoc( + "id", + "47", + "trait_ss", + "Pig", + "text", + "line up and fly directly at the enemy death cannons, clogging them with wreckage!")); + assertU(adoc("id", "48", "text_sw", "this has gigabyte potential", "foo_i", "100")); + assertU(adoc("id", "49", "text_sw", "start the big apple end", "foo_i", "-100")); assertU(adoc("id", "50", "text_sw", "start new big city end")); - assertU(adoc("id", "51", "store", "12.34,-56.78")); + assertU(adoc("id", "51", "store", "12.34,-56.78")); assertU(adoc("id", "52", "text_sw", "tekna theou klethomen")); assertU(adoc("id", "53", "text_sw", "nun tekna theou esmen")); assertU(adoc("id", "54", "text_sw", "phanera estin ta tekna tou theou")); @@ -112,8 +115,8 @@ public static void index() throws Exception { assertU(adoc("id", "72", "text_sw", "wifi ATM")); assertU(adoc("id", "73", "shingle23", "A B X D E")); assertU(adoc("id", "74", "isocharfilter", "niño")); -// assertU(adoc("id", "74", "text_pick_best", "tabby")); -// assertU(adoc("id", "74", "text_as_distinct", "persian")); + // assertU(adoc("id", "74", "text_pick_best", "tabby")); + // assertU(adoc("id", "74", "text_as_distinct", "persian")); assertU(commit()); } @@ -122,125 +125,134 @@ public static void index() throws Exception { public void testSyntax() throws Exception { for (String sow : Arrays.asList("true", "false")) { // a bare * should be treated as *:* - assertJQ(req("defType", "edismax", "q", "*", "df", "doesnotexist_s", "sow", sow) - , "/response/docs/[0]==" // make sure we get something... - ); - assertJQ(req("defType", "edismax", "q", "doesnotexist_s:*", "sow", sow) - , "/response/numFound==0" // nothing should be found - ); - assertJQ(req("defType", "edismax", "q", "doesnotexist_s:*", "sow", sow) - , "/response/numFound==0" // nothing should be found - ); - assertJQ(req("defType", "edismax", "q", "doesnotexist_s:( * * * )", "sow", sow) - , "/response/numFound==0" // nothing should be found - ); + assertJQ( + req("defType", "edismax", "q", "*", "df", "doesnotexist_s", "sow", sow), + "/response/docs/[0]==" // make sure we get something... + ); + assertJQ( + req("defType", "edismax", "q", "doesnotexist_s:*", "sow", sow), + "/response/numFound==0" // nothing should be found + ); + assertJQ( + req("defType", "edismax", "q", "doesnotexist_s:*", "sow", sow), + "/response/numFound==0" // nothing should be found + ); + assertJQ( + req("defType", "edismax", "q", "doesnotexist_s:( * * * )", "sow", sow), + "/response/numFound==0" // nothing should be found + ); } } - public void testTrailingOperators() throws Exception { for (String sow : Arrays.asList("true", "false")) { // really just test that exceptions aren't thrown by // single + - - assertJQ(req("defType", "edismax", "q", "-", "sow", sow) - , "/response=="); + assertJQ(req("defType", "edismax", "q", "-", "sow", sow), "/response=="); - assertJQ(req("defType", "edismax", "q", "+", "sow", sow) - , "/response=="); + assertJQ(req("defType", "edismax", "q", "+", "sow", sow), "/response=="); - assertJQ(req("defType", "edismax", "q", "+ - +", "sow", sow) - , "/response=="); + assertJQ(req("defType", "edismax", "q", "+ - +", "sow", sow), "/response=="); - assertJQ(req("defType", "edismax", "q", "- + -", "sow", sow) - , "/response=="); + assertJQ(req("defType", "edismax", "q", "- + -", "sow", sow), "/response=="); - assertJQ(req("defType", "edismax", "q", "id:47 +", "sow", sow) - , "/response/numFound==1"); + assertJQ(req("defType", "edismax", "q", "id:47 +", "sow", sow), "/response/numFound==1"); - assertJQ(req("defType", "edismax", "q", "id:47 -", "sow", sow) - , "/response/numFound==1"); + assertJQ(req("defType", "edismax", "q", "id:47 -", "sow", sow), "/response/numFound==1"); Random r = random(); - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { StringBuilder sb = new StringBuilder(); - for (int j=0; j h.query(req("uf", "fl=trait*,id", "defType", "edismax"))); + SolrException exception = + expectThrows( + SolrException.class, () -> h.query(req("uf", "fl=trait*,id", "defType", "edismax"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); - assertEquals("dynamic field name must start or end with *", - exception.getMessage()); + assertEquals("dynamic field name must start or end with *", exception.getMessage()); } finally { resetExceptionIgnores(); } @@ -734,24 +977,118 @@ public void testCyclicAliasing() throws Exception { try { ignoreException(".*Field aliases lead to a cycle.*"); - SolrException e = expectThrows(SolrException.class, "Simple cyclic alising not detected", - () -> h.query(req("defType","edismax", "q","blarg", "qf","who", "f.who.qf","name","f.name.qf","who"))); + SolrException e = + expectThrows( + SolrException.class, + "Simple cyclic alising not detected", + () -> + h.query( + req( + "defType", + "edismax", + "q", + "blarg", + "qf", + "who", + "f.who.qf", + "name", + "f.name.qf", + "who"))); assertCyclicDetectionErrorMessage(e); - e = expectThrows(SolrException.class, "Cyclic alising not detected", - () -> h.query(req("defType","edismax", "q","blarg", "qf","who", "f.who.qf","name","f.name.qf","myalias", "f.myalias.qf","who"))); + e = + expectThrows( + SolrException.class, + "Cyclic alising not detected", + () -> + h.query( + req( + "defType", + "edismax", + "q", + "blarg", + "qf", + "who", + "f.who.qf", + "name", + "f.name.qf", + "myalias", + "f.myalias.qf", + "who"))); assertCyclicDetectionErrorMessage(e); - e = expectThrows(SolrException.class, "Cyclic aliasing not detected", () -> h.query(req("defType","edismax", "q","blarg", "qf","field1", "f.field1.qf","field2 field3","f.field2.qf","field4 field5", "f.field4.qf","field5", "f.field5.qf","field6", "f.field3.qf","field6"))); - assertFalse("This is not cyclic aliasing", e.getCause().getMessage().contains("Field aliases lead to a cycle")); - assertTrue("Should throw exception due to invalid field name", e.getCause().getMessage().contains("not a valid field name")); - - e = expectThrows(SolrException.class, "Cyclic alising not detected", - () -> h.query(req("defType","edismax", "q","blarg", "qf","field1", "f.field1.qf","field2 field3", "f.field2.qf","field4 field5", "f.field4.qf","field5", "f.field5.qf","field4"))); + e = + expectThrows( + SolrException.class, + "Cyclic aliasing not detected", + () -> + h.query( + req( + "defType", + "edismax", + "q", + "blarg", + "qf", + "field1", + "f.field1.qf", + "field2 field3", + "f.field2.qf", + "field4 field5", + "f.field4.qf", + "field5", + "f.field5.qf", + "field6", + "f.field3.qf", + "field6"))); + assertFalse( + "This is not cyclic aliasing", + e.getCause().getMessage().contains("Field aliases lead to a cycle")); + assertTrue( + "Should throw exception due to invalid field name", + e.getCause().getMessage().contains("not a valid field name")); + + e = + expectThrows( + SolrException.class, + "Cyclic alising not detected", + () -> + h.query( + req( + "defType", + "edismax", + "q", + "blarg", + "qf", + "field1", + "f.field1.qf", + "field2 field3", + "f.field2.qf", + "field4 field5", + "f.field4.qf", + "field5", + "f.field5.qf", + "field4"))); assertCyclicDetectionErrorMessage(e); - e = expectThrows(SolrException.class, "Cyclic alising not detected", - () -> h.query(req("defType","edismax", "q","who:(Zapp Pig)", "qf","text", "f.who.qf","name","f.name.qf","myalias", "f.myalias.qf","who"))); + e = + expectThrows( + SolrException.class, + "Cyclic alising not detected", + () -> + h.query( + req( + "defType", + "edismax", + "q", + "who:(Zapp Pig)", + "qf", + "text", + "f.who.qf", + "name", + "f.name.qf", + "myalias", + "f.myalias.qf", + "who"))); assertCyclicDetectionErrorMessage(e); } finally { resetExceptionIgnores(); @@ -771,73 +1108,87 @@ public void testOperatorsWithLiteralColons() { assertU(adoc("id", "147", "a_s", "AND", "a_s", "NOT")); assertU(commit()); - assertQ(req("q", "bogus:xxx AND text_s:yak", - "fl", "id", - "qf", "a_s b_s", - "defType", "edismax", - "mm", "0"), - "//*[@numFound='1']", - "//str[@name='id'][.='142']"); - - assertQ(req("q", "a_s:xxx AND text_s:yak", - "fl", "id", - "qf", "a_s b_s", - "defType", "edismax", - "mm", "0", - "uf", "text_s"), - "//*[@numFound='1']", - "//str[@name='id'][.='145']"); - - assertQ(req("q", "NOT bogus:xxx +text_s:yak", - "fl", "id", - "qf", "a_s b_s", - "defType", "edismax", - "mm", "0", - "debugQuery", "true"), - "//*[@numFound='2']", - "//str[@name='id'][.='144']", - "//str[@name='id'][.='145']"); - - assertQ(req("q", "NOT a_s:xxx +text_s:yak", - "fl", "id", - "qf", "a_s b_s", - "defType", "edismax", - "mm", "0", - "uf", "text_s"), - "//*[@numFound='2']", - "//str[@name='id'][.='142']", - "//str[@name='id'][.='144']"); - - assertQ(req("q", "+bogus:xxx yak", - "fl", "id", - "qf", "a_s b_s text_s", - "defType", "edismax", - "mm", "0"), - "//*[@numFound='2']", - "//str[@name='id'][.='142']", - "//str[@name='id'][.='143']"); - - assertQ(req("q", "+a_s:xxx yak", - "fl", "id", - "qf", "a_s b_s text_s", - "defType", "edismax", - "mm", "0", - "uf", "b_s"), - "//*[@numFound='2']", - "//str[@name='id'][.='145']", - "//str[@name='id'][.='146']"); + assertQ( + req( + "q", "bogus:xxx AND text_s:yak", + "fl", "id", + "qf", "a_s b_s", + "defType", "edismax", + "mm", "0"), + "//*[@numFound='1']", + "//str[@name='id'][.='142']"); + + assertQ( + req( + "q", "a_s:xxx AND text_s:yak", + "fl", "id", + "qf", "a_s b_s", + "defType", "edismax", + "mm", "0", + "uf", "text_s"), + "//*[@numFound='1']", + "//str[@name='id'][.='145']"); + + assertQ( + req( + "q", "NOT bogus:xxx +text_s:yak", + "fl", "id", + "qf", "a_s b_s", + "defType", "edismax", + "mm", "0", + "debugQuery", "true"), + "//*[@numFound='2']", + "//str[@name='id'][.='144']", + "//str[@name='id'][.='145']"); + + assertQ( + req( + "q", "NOT a_s:xxx +text_s:yak", + "fl", "id", + "qf", "a_s b_s", + "defType", "edismax", + "mm", "0", + "uf", "text_s"), + "//*[@numFound='2']", + "//str[@name='id'][.='142']", + "//str[@name='id'][.='144']"); + + assertQ( + req( + "q", "+bogus:xxx yak", + "fl", "id", + "qf", "a_s b_s text_s", + "defType", "edismax", + "mm", "0"), + "//*[@numFound='2']", + "//str[@name='id'][.='142']", + "//str[@name='id'][.='143']"); + + assertQ( + req( + "q", "+a_s:xxx yak", + "fl", "id", + "qf", "a_s b_s text_s", + "defType", "edismax", + "mm", "0", + "uf", "b_s"), + "//*[@numFound='2']", + "//str[@name='id'][.='145']", + "//str[@name='id'][.='146']"); } - + // test phrase fields including pf2 pf3 and phrase slop public void testPfPs() throws Exception { - assertU(adoc("id", "s0", "phrase_sw", "foo bar a b c", "boost_d", "1.0")); - assertU(adoc("id", "s1", "phrase_sw", "foo a bar b c", "boost_d", "2.0")); - assertU(adoc("id", "s2", "phrase_sw", "foo a b bar c", "boost_d", "3.0")); - assertU(adoc("id", "s3", "phrase_sw", "foo a b c bar", "boost_d", "4.0")); + assertU(adoc("id", "s0", "phrase_sw", "foo bar a b c", "boost_d", "1.0")); + assertU(adoc("id", "s1", "phrase_sw", "foo a bar b c", "boost_d", "2.0")); + assertU(adoc("id", "s2", "phrase_sw", "foo a b bar c", "boost_d", "3.0")); + assertU(adoc("id", "s3", "phrase_sw", "foo a b c bar", "boost_d", "4.0")); assertU(commit()); - assertQ("default order assumption wrong", - req("q", "foo bar", + assertQ( + "default order assumption wrong", + req( + "q", "foo bar", "qf", "phrase_sw", "bf", "boost_d", "fl", "score,*", @@ -845,53 +1196,65 @@ public void testPfPs() throws Exception { "//doc[1]/str[@name='id'][.='s3']", "//doc[2]/str[@name='id'][.='s2']", "//doc[3]/str[@name='id'][.='s1']", - "//doc[4]/str[@name='id'][.='s0']"); + "//doc[4]/str[@name='id'][.='s0']"); - assertQ("pf not working", - req("q", "foo bar", + assertQ( + "pf not working", + req( + "q", "foo bar", "qf", "phrase_sw", "pf", "phrase_sw^10", "fl", "score,*", "defType", "edismax"), "//doc[1]/str[@name='id'][.='s0']"); - - assertQ("pf2 not working", - req("q", "foo bar", - "qf", "phrase_sw", + + assertQ( + "pf2 not working", + req( + "q", "foo bar", + "qf", "phrase_sw", "pf2", "phrase_sw^10", - "fl", "score,*", + "fl", "score,*", "defType", "edismax"), - "//doc[1]/str[@name='id'][.='s0']"); + "//doc[1]/str[@name='id'][.='s0']"); - assertQ("pf3 not working", - req("q", "a b bar", - "qf", "phrase_sw", + assertQ( + "pf3 not working", + req( + "q", "a b bar", + "qf", "phrase_sw", "pf3", "phrase_sw^10", - "fl", "score,*", + "fl", "score,*", "defType", "edismax"), - "//doc[1]/str[@name='id'][.='s2']"); + "//doc[1]/str[@name='id'][.='s2']"); - assertQ("ps not working for pf2", - req("q", "bar foo", - "qf", "phrase_sw", + assertQ( + "ps not working for pf2", + req( + "q", "bar foo", + "qf", "phrase_sw", "pf2", "phrase_sw^10", - "ps", "2", - "fl", "score,*", + "ps", "2", + "fl", "score,*", "defType", "edismax"), - "//doc[1]/str[@name='id'][.='s0']"); + "//doc[1]/str[@name='id'][.='s0']"); - assertQ("ps not working for pf3", - req("q", "a bar foo", - "qf", "phrase_sw", + assertQ( + "ps not working for pf3", + req( + "q", "a bar foo", + "qf", "phrase_sw", "pf3", "phrase_sw^10", - "ps", "3", - "fl", "score,*", - "debugQuery", "true", + "ps", "3", + "fl", "score,*", + "debugQuery", "true", "defType", "edismax"), - "//doc[1]/str[@name='id'][.='s1']"); - - assertQ("ps/ps2/ps3 with default slop overrides not working", - req("q", "zzzz xxxx cccc vvvv", + "//doc[1]/str[@name='id'][.='s1']"); + + assertQ( + "ps/ps2/ps3 with default slop overrides not working", + req( + "q", "zzzz xxxx cccc vvvv", "qf", "phrase_sw", "pf", "phrase_sw~1^10 phrase_sw~2^20 phrase_sw^30", "pf2", "phrase_sw~2^22 phrase_sw^33", @@ -907,132 +1270,183 @@ public void testPfPs() throws Exception { "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"cccc vvvv\"~2)^22.0')]", "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"zzzz xxxx\"~3)^33.0')]", "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"xxxx cccc\"~3)^33.0')]", - "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"cccc vvvv\"~3)^33.0')]", + "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"cccc vvvv\"~3)^33.0')]", "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"zzzz xxxx cccc\"~2)^222.0')]", "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"xxxx cccc vvvv\"~2)^222.0')]", "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"zzzz xxxx cccc\"~3)^333.0')]", - "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"xxxx cccc vvvv\"~3)^333.0')]" - ); + "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"xxxx cccc vvvv\"~3)^333.0')]"); assertQ( "ps2 not working", - req("q", "bar foo", "qf", "phrase_sw", "pf2", "phrase_sw^10", "ps2", - "2", "fl", "score,*", "defType", "edismax"), + req( + "q", + "bar foo", + "qf", + "phrase_sw", + "pf2", + "phrase_sw^10", + "ps2", + "2", + "fl", + "score,*", + "defType", + "edismax"), "//doc[1]/str[@name='id'][.='s0']"); - + assertQ( "Specifying slop in pf2 param not working", - req("q", "bar foo", "qf", "phrase_sw", "pf2", "phrase_sw~2^10", - "fl", "score,*", "defType", "edismax"), + req( + "q", + "bar foo", + "qf", + "phrase_sw", + "pf2", + "phrase_sw~2^10", + "fl", + "score,*", + "defType", + "edismax"), "//doc[1]/str[@name='id'][.='s0']"); - + assertQ( "Slop in ps2 parameter should override ps", - req("q", "bar foo", "qf", "phrase_sw", "pf2", "phrase_sw^10", "ps", - "0", "ps2", "2", "fl", "score,*", "defType", - "edismax"), "//doc[1]/str[@name='id'][.='s0']"); + req( + "q", + "bar foo", + "qf", + "phrase_sw", + "pf2", + "phrase_sw^10", + "ps", + "0", + "ps2", + "2", + "fl", + "score,*", + "defType", + "edismax"), + "//doc[1]/str[@name='id'][.='s0']"); assertQ( "ps3 not working", - req("q", "a bar foo", "qf", "phrase_sw", "pf3", "phrase_sw^10", "ps3", - "3", "fl", "score,*", "defType", "edismax"), + req( + "q", + "a bar foo", + "qf", + "phrase_sw", + "pf3", + "phrase_sw^10", + "ps3", + "3", + "fl", + "score,*", + "defType", + "edismax"), "//doc[1]/str[@name='id'][.='s1']"); - + assertQ( "Specifying slop in pf3 param not working", - req("q", "a bar foo", "qf", "phrase_sw", "pf3", "phrase_sw~3^10", - "fl", "score,*", "defType", "edismax"), + req( + "q", + "a bar foo", + "qf", + "phrase_sw", + "pf3", + "phrase_sw~3^10", + "fl", + "score,*", + "defType", + "edismax"), "//doc[1]/str[@name='id'][.='s1']"); - - assertQ("ps2 should not override slop specified inline in pf2", - req("q", "zzzz xxxx cccc vvvv", + + assertQ( + "ps2 should not override slop specified inline in pf2", + req( + "q", "zzzz xxxx cccc vvvv", "qf", "phrase_sw", "pf2", "phrase_sw~2^22", "ps2", "4", "defType", "edismax", "debugQuery", "true"), - "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"zzzz xxxx\"~2)^22.0')]" - ); - - String parsedquery = getParsedQuery(req("q", "aaaa bbbb cccc", - "qf", "phrase_sw phrase1_sw", - "pf2", "phrase_sw phrase1_sw", - "pf3", "phrase_sw phrase1_sw", - "defType", "edismax", - "debugQuery", "true")); - assertThat("phrase field queries spanning multiple fields should be within their own dismax queries", - parsedquery, anyOf( - containsString("(phrase_sw:\"aaaa bbbb\" | phrase1_sw:\"aaaa bbbb\")"), - containsString("(phrase1_sw:\"aaaa bbbb\" | phrase_sw:\"aaaa bbbb\")"))); - assertThat("phrase field queries spanning multiple fields should be within their own dismax queries", - parsedquery, anyOf( + "//str[@name='parsedquery'][contains(.,'(phrase_sw:\"zzzz xxxx\"~2)^22.0')]"); + + String parsedquery = + getParsedQuery( + req( + "q", + "aaaa bbbb cccc", + "qf", + "phrase_sw phrase1_sw", + "pf2", + "phrase_sw phrase1_sw", + "pf3", + "phrase_sw phrase1_sw", + "defType", + "edismax", + "debugQuery", + "true")); + assertThat( + "phrase field queries spanning multiple fields should be within their own dismax queries", + parsedquery, + anyOf( + containsString("(phrase_sw:\"aaaa bbbb\" | phrase1_sw:\"aaaa bbbb\")"), + containsString("(phrase1_sw:\"aaaa bbbb\" | phrase_sw:\"aaaa bbbb\")"))); + assertThat( + "phrase field queries spanning multiple fields should be within their own dismax queries", + parsedquery, + anyOf( containsString("(phrase_sw:\"bbbb cccc\" | phrase1_sw:\"bbbb cccc\")"), containsString("(phrase1_sw:\"bbbb cccc\" | phrase_sw:\"bbbb cccc\")"))); - assertThat("phrase field queries spanning multiple fields should be within their own dismax queries", - parsedquery, anyOf( + assertThat( + "phrase field queries spanning multiple fields should be within their own dismax queries", + parsedquery, + anyOf( containsString("(phrase_sw:\"aaaa bbbb cccc\" | phrase1_sw:\"aaaa bbbb cccc\")"), containsString("(phrase1_sw:\"aaaa bbbb cccc\" | phrase_sw:\"aaaa bbbb cccc\")"))); } - @Test - public void testWhitespaceCharacters() throws Exception { - assertU(adoc("id", "whitespaceChars", - "cat_s", "foo\nfoo")); - assertU(commit()); + @Test + public void testWhitespaceCharacters() throws Exception { + assertU(adoc("id", "whitespaceChars", "cat_s", "foo\nfoo")); + assertU(commit()); - assertQ(req("q", "(\"foo\nfoo\")", - "qf", "cat_s", - "defType", "edismax") - , "*[count(//doc)=1]"); + assertQ(req("q", "(\"foo\nfoo\")", "qf", "cat_s", "defType", "edismax"), "*[count(//doc)=1]"); - assertQ(req("q", "cat_s:[\"foo\nfoo\" TO \"foo\nfoo\"]", - "qf", "name", - "defType", "edismax") - , "*[count(//doc)=1]"); + assertQ( + req("q", "cat_s:[\"foo\nfoo\" TO \"foo\nfoo\"]", "qf", "name", "defType", "edismax"), + "*[count(//doc)=1]"); - assertQ(req("q", "cat_s:[ \"foo\nfoo\" TO \"foo\nfoo\"]", - "qf", "name", - "defType", "edismax") - , "*[count(//doc)=1]"); + assertQ( + req("q", "cat_s:[ \"foo\nfoo\" TO \"foo\nfoo\"]", "qf", "name", "defType", "edismax"), + "*[count(//doc)=1]"); - assertQ(req("q", "{!edismax qf=cat_s v='[\"foo\nfoo\" TO \"foo\nfoo\"]'}") - , "*[count(//doc)=1]"); + assertQ( + req("q", "{!edismax qf=cat_s v='[\"foo\nfoo\" TO \"foo\nfoo\"]'}"), "*[count(//doc)=1]"); - assertQ(req("q", "{!edismax qf=cat_s v='[ \"foo\nfoo\" TO \"foo\nfoo\"]'}") - , "*[count(//doc)=1]"); + assertQ( + req("q", "{!edismax qf=cat_s v='[ \"foo\nfoo\" TO \"foo\nfoo\"]'}"), "*[count(//doc)=1]"); + } - } + @Test + public void testDoubleQuoteCharacters() throws Exception { + assertU(adoc("id", "doubleQuote", "cat_s", "foo\"foo")); + assertU(commit()); - @Test - public void testDoubleQuoteCharacters() throws Exception { - assertU(adoc("id", "doubleQuote", - "cat_s", "foo\"foo")); - assertU(commit()); - - assertQ(req("q", "cat_s:[\"foo\\\"foo\" TO \"foo\\\"foo\"]", - "qf", "name", - "defType", "edismax") - , "*[count(//doc)=1]"); - - assertQ(req("q", "cat_s:\"foo\\\"foo\"", - "qf", "name", - "defType", "edismax") - , "*[count(//doc)=1]"); - - assertQ(req("q", "cat_s:foo\\\"foo", - "qf", "name", - "defType", "edismax") - , "*[count(//doc)=1]"); - - assertQ(req("q", "cat_s:foo\"foo", - "qf", "name", - "defType", "edismax") - , "*[count(//doc)=1]"); - } + assertQ( + req("q", "cat_s:[\"foo\\\"foo\" TO \"foo\\\"foo\"]", "qf", "name", "defType", "edismax"), + "*[count(//doc)=1]"); + + assertQ( + req("q", "cat_s:\"foo\\\"foo\"", "qf", "name", "defType", "edismax"), "*[count(//doc)=1]"); + + assertQ(req("q", "cat_s:foo\\\"foo", "qf", "name", "defType", "edismax"), "*[count(//doc)=1]"); + + assertQ(req("q", "cat_s:foo\"foo", "qf", "name", "defType", "edismax"), "*[count(//doc)=1]"); + } /** - * verify that all reserved characters are properly escaped when being set in - * {@link org.apache.solr.search.ExtendedDismaxQParser.Clause#val}. + * verify that all reserved characters are properly escaped when being set in {@link + * org.apache.solr.search.ExtendedDismaxQParser.Clause#val}. * * @see ExtendedDismaxQParser#splitIntoClauses(String, boolean) */ @@ -1041,623 +1455,767 @@ public void testEscapingOfReservedCharacters() throws Exception { // create a document that contains all reserved characters String allReservedCharacters = "!():^[]{}~*?\"+-\\|&/"; - assertU(adoc("id", "reservedChars", - "name", allReservedCharacters, - "cat_s", "foo/")); + assertU( + adoc( + "id", "reservedChars", + "name", allReservedCharacters, + "cat_s", "foo/")); assertU(commit()); - // the backslash needs to be manually escaped (the query parser sees the raw backslash as an escape the subsequent - // character) + // the backslash needs to be manually escaped (the query parser sees the raw backslash as an + // escape the subsequent character) String query = allReservedCharacters.replace("\\", "\\\\"); - // query for all those reserved characters. This will fail to parse in the initial parse, meaning that the escaped - // query will then be used - assertQ("Escaping reserved characters", - req("q", query, + // query for all those reserved characters. This will fail to parse in the initial parse, + // meaning that the escaped query will then be used + assertQ( + "Escaping reserved characters", + req( + "q", query, "qf", "name", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=1]"); - + "defType", "edismax"), + "*[count(//doc)=1]"); + // Query string field 'cat_s' for special char / - causes SyntaxError without patch SOLR-3467 - assertQ("Escaping string with reserved / character", - req("q", "foo/", + assertQ( + "Escaping string with reserved / character", + req( + "q", "foo/", "qf", "cat_s", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=1]"); - + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( - "Might be double-escaping a client-escaped colon", - req("q", "text_sw:(theos OR thistokenhasa\\:preescapedcolon OR theou)", "defType", "edismax", "qf", "id"), + "Might be double-escaping a client-escaped colon", + req( + "q", + "text_sw:(theos OR thistokenhasa\\:preescapedcolon OR theou)", + "defType", + "edismax", + "qf", + "id"), "*[count(//doc)=3]"); assertQ( - "Might be double-escaping a client-escaped colon", - req("q", "text_sw:(theos OR thistokenhasa\\:preescapedcolon OR theou)", "defType", "edismax", "qf", "text"), - "*[count(//doc)=3]"); - + "Might be double-escaping a client-escaped colon", + req( + "q", + "text_sw:(theos OR thistokenhasa\\:preescapedcolon OR theou)", + "defType", + "edismax", + "qf", + "text"), + "*[count(//doc)=3]"); } + /** Repeating some of test cases as direct calls to splitIntoClauses */ + @Test + public void testSplitIntoClauses() throws Exception { + String query = "(\"foo\nfoo\")"; + SolrQueryRequest request = req("q", query, "qf", "cat_s", "defType", "edismax"); + ExtendedDismaxQParser parser = + new ExtendedDismaxQParser(query, null, request.getParams(), request); + List clauses = parser.splitIntoClauses(query, false); + Assert.assertEquals(3, clauses.size()); + assertClause(clauses.get(0), "\\(", false, true); + assertClause(clauses.get(1), "foo\nfoo", true, false); + assertClause(clauses.get(2), "\\)", false, true); + + query = "cat_s:[\"foo\nfoo\" TO \"foo\nfoo\"]"; + request = req("q", query, "qf", "cat_s", "defType", "edismax"); + parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); + clauses = parser.splitIntoClauses(query, false); + Assert.assertEquals(5, clauses.size()); + assertClause(clauses.get(0), "\\[", false, true, "cat_s"); + assertClause(clauses.get(1), "foo\nfoo", true, false); + assertClause(clauses.get(2), "TO", true, false); + assertClause(clauses.get(3), "foo\nfoo", true, false); + assertClause(clauses.get(4), "\\]", false, true); + + query = "cat_s:[ \"foo\nfoo\" TO \"foo\nfoo\"]"; + request = req("q", query, "qf", "cat_s", "defType", "edismax"); + parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); + clauses = parser.splitIntoClauses(query, false); + Assert.assertEquals(5, clauses.size()); + assertClause(clauses.get(0), "\\[", true, true, "cat_s"); + assertClause(clauses.get(1), "foo\nfoo", true, false); + assertClause(clauses.get(2), "TO", true, false); + assertClause(clauses.get(3), "foo\nfoo", true, false); + assertClause(clauses.get(4), "\\]", false, true); - /** - * Repeating some of test cases as direct calls to splitIntoClauses - */ - @Test - public void testSplitIntoClauses() throws Exception { - String query = "(\"foo\nfoo\")"; - SolrQueryRequest request = req("q", query, - "qf", "cat_s", - "defType", "edismax"); - ExtendedDismaxQParser parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); - List clauses = parser.splitIntoClauses(query, false); - Assert.assertEquals(3, clauses.size()); - assertClause(clauses.get(0), "\\(", false, true); - assertClause(clauses.get(1), "foo\nfoo", true, false); - assertClause(clauses.get(2), "\\)", false, true); - - query = "cat_s:[\"foo\nfoo\" TO \"foo\nfoo\"]"; - request = req("q", query, - "qf", "cat_s", - "defType", "edismax"); - parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); - clauses = parser.splitIntoClauses(query, false); - Assert.assertEquals(5, clauses.size()); - assertClause(clauses.get(0), "\\[", false, true, "cat_s"); - assertClause(clauses.get(1), "foo\nfoo", true, false); - assertClause(clauses.get(2), "TO", true, false); - assertClause(clauses.get(3), "foo\nfoo", true, false); - assertClause(clauses.get(4), "\\]", false, true); - - query = "cat_s:[ \"foo\nfoo\" TO \"foo\nfoo\"]"; - request = req("q", query, - "qf", "cat_s", - "defType", "edismax"); - parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); - clauses = parser.splitIntoClauses(query, false); - Assert.assertEquals(5, clauses.size()); - assertClause(clauses.get(0), "\\[", true, true, "cat_s"); - assertClause(clauses.get(1), "foo\nfoo", true, false); - assertClause(clauses.get(2), "TO", true, false); - assertClause(clauses.get(3), "foo\nfoo", true, false); - assertClause(clauses.get(4), "\\]", false, true); - - String allReservedCharacters = "!():^[]{}~*?\"+-\\|&/"; - // the backslash needs to be manually escaped (the query parser sees the raw backslash as an escape the subsequent - // character) - query = allReservedCharacters.replace("\\", "\\\\"); - - request = req("q", query, - "qf", "name", - "mm", "100%", - "defType", "edismax"); - - parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); - clauses = parser.splitIntoClauses(query, false); - Assert.assertEquals(1, clauses.size()); - assertClause(clauses.get(0), "\\!\\(\\)\\:\\^\\[\\]\\{\\}\\~\\*\\?\\\"\\+\\-\\\\\\|\\&\\/", false, true); - - query = "foo/"; - request = req("q", query, - "qf", "name", - "mm", "100%", - "defType", "edismax"); - - parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); - clauses = parser.splitIntoClauses(query, false); - Assert.assertEquals(1, clauses.size()); - assertClause(clauses.get(0), "foo\\/", false, true); - } + String allReservedCharacters = "!():^[]{}~*?\"+-\\|&/"; + // the backslash needs to be manually escaped (the query parser sees the raw backslash as an + // escape the subsequent character) + query = allReservedCharacters.replace("\\", "\\\\"); - private static void assertClause(ExtendedDismaxQParser.Clause clause, String value, boolean hasWhitespace, - boolean hasSpecialSyntax, String field) { - Assert.assertEquals(value, clause.val); - Assert.assertEquals(hasWhitespace, clause.hasWhitespace); - Assert.assertEquals(hasSpecialSyntax, clause.hasSpecialSyntax); - Assert.assertEquals(field, clause.field); - } + request = req("q", query, "qf", "name", "mm", "100%", "defType", "edismax"); - private static void assertClause(ExtendedDismaxQParser.Clause clause, String value, boolean hasWhitespace, - boolean hasSpecialSyntax) { - assertClause(clause, value, hasWhitespace, hasSpecialSyntax, null); + parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); + clauses = parser.splitIntoClauses(query, false); + Assert.assertEquals(1, clauses.size()); + assertClause( + clauses.get(0), "\\!\\(\\)\\:\\^\\[\\]\\{\\}\\~\\*\\?\\\"\\+\\-\\\\\\|\\&\\/", false, true); - } + query = "foo/"; + request = req("q", query, "qf", "name", "mm", "100%", "defType", "edismax"); - /** - * SOLR-3589: Edismax parser does not honor mm parameter if analyzer splits a token - */ + parser = new ExtendedDismaxQParser(query, null, request.getParams(), request); + clauses = parser.splitIntoClauses(query, false); + Assert.assertEquals(1, clauses.size()); + assertClause(clauses.get(0), "foo\\/", false, true); + } + + private static void assertClause( + ExtendedDismaxQParser.Clause clause, + String value, + boolean hasWhitespace, + boolean hasSpecialSyntax, + String field) { + Assert.assertEquals(value, clause.val); + Assert.assertEquals(hasWhitespace, clause.hasWhitespace); + Assert.assertEquals(hasSpecialSyntax, clause.hasSpecialSyntax); + Assert.assertEquals(field, clause.field); + } + + private static void assertClause( + ExtendedDismaxQParser.Clause clause, + String value, + boolean hasWhitespace, + boolean hasSpecialSyntax) { + assertClause(clause, value, hasWhitespace, hasSpecialSyntax, null); + } + + /** SOLR-3589: Edismax parser does not honor mm parameter if analyzer splits a token */ public void testCJK() throws Exception { - assertQ("test cjk (disjunction)", - req("q", "大亚湾", + assertQ( + "test cjk (disjunction)", + req( + "q", "大亚湾", "qf", "standardtok", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=3]"); - assertQ("test cjk (minShouldMatch)", - req("q", "大亚湾", + "defType", "edismax"), + "*[count(//doc)=3]"); + assertQ( + "test cjk (minShouldMatch)", + req( + "q", "大亚湾", "qf", "standardtok", "mm", "67%", - "defType", "edismax") - , "*[count(//doc)=2]"); - assertQ("test cjk (conjunction)", - req("q", "大亚湾", + "defType", "edismax"), + "*[count(//doc)=2]"); + assertQ( + "test cjk (conjunction)", + req( + "q", "大亚湾", "qf", "standardtok", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); } - - /** - * test that minShouldMatch works with aliasing - * for implicit boolean queries - */ + + /** test that minShouldMatch works with aliasing for implicit boolean queries */ public void testCJKAliasing() throws Exception { // single field - assertQ("test cjk (aliasing+disjunction)", - req("q", "myalias:大亚湾", + assertQ( + "test cjk (aliasing+disjunction)", + req( + "q", "myalias:大亚湾", "f.myalias.qf", "standardtok", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=3]"); - assertQ("test cjk (aliasing+minShouldMatch)", - req("q", "myalias:大亚湾", + "defType", "edismax"), + "*[count(//doc)=3]"); + assertQ( + "test cjk (aliasing+minShouldMatch)", + req( + "q", "myalias:大亚湾", "f.myalias.qf", "standardtok", "mm", "67%", - "defType", "edismax") - , "*[count(//doc)=2]"); - assertQ("test cjk (aliasing+conjunction)", - req("q", "myalias:大亚湾", + "defType", "edismax"), + "*[count(//doc)=2]"); + assertQ( + "test cjk (aliasing+conjunction)", + req( + "q", "myalias:大亚湾", "f.myalias.qf", "standardtok", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); // multifield - assertQ("test cjk (aliasing+disjunction)", - req("q", "myalias:大亚湾", + assertQ( + "test cjk (aliasing+disjunction)", + req( + "q", "myalias:大亚湾", "f.myalias.qf", "standardtok HTMLstandardtok", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=6]"); - assertQ("test cjk (aliasing+minShouldMatch)", - req("q", "myalias:大亚湾", + "defType", "edismax"), + "*[count(//doc)=6]"); + assertQ( + "test cjk (aliasing+minShouldMatch)", + req( + "q", "myalias:大亚湾", "f.myalias.qf", "standardtok HTMLstandardtok", "mm", "67%", - "defType", "edismax") - , "*[count(//doc)=4]"); - assertQ("test cjk (aliasing+conjunction)", - req("q", "myalias:大亚湾", + "defType", "edismax"), + "*[count(//doc)=4]"); + assertQ( + "test cjk (aliasing+conjunction)", + req( + "q", "myalias:大亚湾", "f.myalias.qf", "standardtok HTMLstandardtok", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=2]"); + "defType", "edismax"), + "*[count(//doc)=2]"); } - + /** Test that we apply boosts correctly */ public void testCJKBoosts() throws Exception { - assertQ("test cjk (disjunction)", - req("q", "大亚湾", + assertQ( + "test cjk (disjunction)", + req( + "q", "大亚湾", "qf", "standardtok^2 HTMLstandardtok", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='57']"); - assertQ("test cjk (minShouldMatch)", - req("q", "大亚湾", + "defType", "edismax"), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='57']"); + assertQ( + "test cjk (minShouldMatch)", + req( + "q", "大亚湾", "qf", "standardtok^2 HTMLstandardtok", "mm", "67%", - "defType", "edismax") - , "*[count(//doc)=4]", "//result/doc[1]/str[@name='id'][.='57']"); - assertQ("test cjk (conjunction)", - req("q", "大亚湾", + "defType", "edismax"), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='57']"); + assertQ( + "test cjk (conjunction)", + req( + "q", "大亚湾", "qf", "standardtok^2 HTMLstandardtok", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='57']"); - + "defType", "edismax"), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='57']"); + // now boost the other field - assertQ("test cjk (disjunction)", - req("q", "大亚湾", + assertQ( + "test cjk (disjunction)", + req( + "q", "大亚湾", "qf", "standardtok HTMLstandardtok^2", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='60']"); - assertQ("test cjk (minShouldMatch)", - req("q", "大亚湾", + "defType", "edismax"), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='60']"); + assertQ( + "test cjk (minShouldMatch)", + req( + "q", "大亚湾", "qf", "standardtok HTMLstandardtok^2", "mm", "67%", - "defType", "edismax") - , "*[count(//doc)=4]", "//result/doc[1]/str[@name='id'][.='60']"); - assertQ("test cjk (conjunction)", - req("q", "大亚湾", + "defType", "edismax"), + "*[count(//doc)=4]", + "//result/doc[1]/str[@name='id'][.='60']"); + assertQ( + "test cjk (conjunction)", + req( + "q", "大亚湾", "qf", "standardtok HTMLstandardtok^2", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='60']"); + "defType", "edismax"), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='60']"); } - - /** always apply minShouldMatch to the inner booleanqueries - * created from whitespace, as these are never structured lucene queries - * but only come from unstructured text */ + + /** + * always apply minShouldMatch to the inner booleanqueries created from whitespace, as these are + * never structured lucene queries but only come from unstructured text + */ public void testCJKStructured() throws Exception { - assertQ("test cjk (disjunction)", - req("q", "大亚湾 OR bogus", + assertQ( + "test cjk (disjunction)", + req( + "q", "大亚湾 OR bogus", "qf", "standardtok", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=3]"); - assertQ("test cjk (minShouldMatch)", - req("q", "大亚湾 OR bogus", + "defType", "edismax"), + "*[count(//doc)=3]"); + assertQ( + "test cjk (minShouldMatch)", + req( + "q", "大亚湾 OR bogus", "qf", "standardtok", "mm", "67%", - "defType", "edismax") - , "*[count(//doc)=2]"); - assertQ("test cjk (conjunction)", - req("q", "大亚湾 OR bogus", // +(((((standardtok:大 standardtok:亚 standardtok:湾)~3)) (standardtok:bogus))~2) + "defType", "edismax"), + "*[count(//doc)=2]"); + assertQ( + "test cjk (conjunction)", + req( + "q", "大亚湾 OR bogus", // +(((((standardtok:大 standardtok:亚 standardtok:湾)~3)) + // (standardtok:bogus))~2) "qf", "standardtok", "mm", "100%", - "defType", "edismax") - , "//*[@numFound='0']"); + "defType", "edismax"), + "//*[@numFound='0']"); } - + /** - * Test that we don't apply minShouldMatch to the inner boolean queries - * when there are synonyms (these are indicated by coordination factor) + * Test that we don't apply minShouldMatch to the inner boolean queries when there are synonyms + * (these are indicated by coordination factor) */ public void testSynonyms() throws Exception { // document only contains baraaa, but should still match. - assertQ("test synonyms", - req("q", "fooaaa", + assertQ( + "test synonyms", + req( + "q", "fooaaa", "qf", "text_sw", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); } - /** - * Test that the default operator and MM are interacting appropriately when both provided - */ + /** Test that the default operator and MM are interacting appropriately when both provided */ public void testDefaultOperatorWithMm() throws Exception { // Text we are searching // "line up and fly directly at the enemy death cannons, clogging them with wreckage!" - assertQ("test default operator with mm (AND + 0% => 0 hits)", - req("q", "(line notfound) OR notfound", + assertQ( + "test default operator with mm (AND + 0% => 0 hits)", + req( + "q", "(line notfound) OR notfound", "qf", "text", "q.op", "AND", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=0]"); - assertQ("test default operator with mm (OR + 0% => 1 hit)", - req("q", "line notfound OR notfound", + "defType", "edismax"), + "*[count(//doc)=0]"); + assertQ( + "test default operator with mm (OR + 0% => 1 hit)", + req( + "q", "line notfound OR notfound", "qf", "text", "q.op", "OR", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("test default operator with mm (OR + 100% => 0 hits)", - req("q", "line notfound OR notfound", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "test default operator with mm (OR + 100% => 0 hits)", + req( + "q", "line notfound OR notfound", "qf", "text", "q.op", "OR", "mm", "100%", - "defType", "edismax") - , "*[count(//doc)=0]"); - assertQ("test default operator with mm (OR + 35% => 1 hit)", - req("q", "line notfound notfound2 OR notfound", + "defType", "edismax"), + "*[count(//doc)=0]"); + assertQ( + "test default operator with mm (OR + 35% => 1 hit)", + req( + "q", "line notfound notfound2 OR notfound", "qf", "text", "q.op", "OR", "mm", "35%", - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("test default operator with mm (OR + 75% => 0 hits)", - req("q", "line notfound notfound2 OR notfound3", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "test default operator with mm (OR + 75% => 0 hits)", + req( + "q", "line notfound notfound2 OR notfound3", "qf", "text", "q.op", "OR", "mm", "75%", - "defType", "edismax") - , "*[count(//doc)=0]"); - assertQ("test default operator with mm (AND + 0% => 1 hit)", - req("q", "(line enemy) OR notfound", + "defType", "edismax"), + "*[count(//doc)=0]"); + assertQ( + "test default operator with mm (AND + 0% => 1 hit)", + req( + "q", "(line enemy) OR notfound", "qf", "text", "q.op", "AND", "mm", "0%", - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("test default operator with mm (AND + 50% => 1 hit)", - req("q", "(line enemy) OR (line notfound) OR (death cannons) OR (death notfound)", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "test default operator with mm (AND + 50% => 1 hit)", + req( + "q", "(line enemy) OR (line notfound) OR (death cannons) OR (death notfound)", "qf", "text", "q.op", "AND", "mm", "50%", - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("test default operator with mm (AND + 75% => 0 hits)", - req("q", "(line enemy) OR (line notfound) OR (death cannons) OR (death notfound)", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "test default operator with mm (AND + 75% => 0 hits)", + req( + "q", "(line enemy) OR (line notfound) OR (death cannons) OR (death notfound)", "qf", "text", "q.op", "AND", "mm", "75%", - "defType", "edismax") - , "*[count(//doc)=0]"); + "defType", "edismax"), + "*[count(//doc)=0]"); } - - /** - * Test that minShouldMatch applies to Optional terms only - */ + + /** Test that minShouldMatch applies to Optional terms only */ public void testMinShouldMatchOptional() throws Exception { for (String sow : Arrays.asList("true", "false")) { - assertQ("test minShouldMatch (top level optional terms only)", - req("q", "stocks oil gold", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold))~1) + assertQ( + "test minShouldMatch (top level optional terms only)", + req( + "q", "stocks oil gold", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold))~1) "qf", "text_sw", "mm", "50%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=4]"); - - assertQ("test minShouldMatch (top level optional terms only) local mm=50%", - req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold'}") - , "*[count(//doc)=4]"); - - assertQ("test minShouldMatch (top level optional and negative terms mm=50%)", - req("q", "stocks oil gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~1) + "defType", "edismax"), + "*[count(//doc)=4]"); + + assertQ( + "test minShouldMatch (top level optional terms only) local mm=50%", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold'}"), + "*[count(//doc)=4]"); + + assertQ( + "test minShouldMatch (top level optional and negative terms mm=50%)", + req( + "q", "stocks oil gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) + // -(text_sw:stockad))~1) "qf", "text_sw", "mm", "50%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); - - assertQ("test minShouldMatch (top level optional and negative terms local mm=50%)", - req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold -stockade'}") - , "*[count(//doc)=3]"); - - assertQ("test minShouldMatch (top level optional and negative terms mm=100%)", - req("q", "stocks gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~2) + "defType", "edismax"), + "*[count(//doc)=3]"); + + assertQ( + "test minShouldMatch (top level optional and negative terms local mm=50%)", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold -stockade'}"), + "*[count(//doc)=3]"); + + assertQ( + "test minShouldMatch (top level optional and negative terms mm=100%)", + req( + "q", "stocks gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) + // -(text_sw:stockad))~2) "qf", "text_sw", "mm", "100%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); - assertQ("test minShouldMatch (top level optional and negative terms local mm=100%)", - req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='stocks gold -stockade'}") - , "*[count(//doc)=1]"); + assertQ( + "test minShouldMatch (top level optional and negative terms local mm=100%)", + req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='stocks gold -stockade'}"), + "*[count(//doc)=1]"); - assertQ("test minShouldMatch (top level required terms only)", - req("q", "stocks AND oil", // +(+(text_sw:stock) +(text_sw:oil)) + assertQ( + "test minShouldMatch (top level required terms only)", + req( + "q", "stocks AND oil", // +(+(text_sw:stock) +(text_sw:oil)) "qf", "text_sw", "mm", "50%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); - assertQ("test minShouldMatch (top level required terms only) local mm=50%)", - req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks AND oil'}") - , "*[count(//doc)=1]"); + assertQ( + "test minShouldMatch (top level required terms only) local mm=50%)", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks AND oil'}"), + "*[count(//doc)=1]"); - assertQ("test minShouldMatch (top level optional and required terms)", - req("q", "oil gold +stocks", // +(((text_sw:oil) (text_sw:gold) +(text_sw:stock))~1) + assertQ( + "test minShouldMatch (top level optional and required terms)", + req( + "q", "oil gold +stocks", // +(((text_sw:oil) (text_sw:gold) +(text_sw:stock))~1) "qf", "text_sw", "mm", "50%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); + "defType", "edismax"), + "*[count(//doc)=3]"); - assertQ("test minShouldMatch (top level optional and required terms) local mm=50%)", - req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='oil gold +stocks'}") - , "*[count(//doc)=3]"); + assertQ( + "test minShouldMatch (top level optional and required terms) local mm=50%)", + req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='oil gold +stocks'}"), + "*[count(//doc)=3]"); - assertQ("test minShouldMatch (top level optional with explicit OR and parens)", - req("q", "(snake OR stocks) oil", + assertQ( + "test minShouldMatch (top level optional with explicit OR and parens)", + req( + "q", "(snake OR stocks) oil", "qf", "text_sw", "mm", "100%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=2]"); + "defType", "edismax"), + "*[count(//doc)=2]"); - assertQ("test minShouldMatch (top level optional with explicit OR and parens) local mm=100%)", - req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='(snake OR stocks) oil'}") - , "*[count(//doc)=2]"); + assertQ( + "test minShouldMatch (top level optional with explicit OR and parens) local mm=100%)", + req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='(snake OR stocks) oil'}"), + "*[count(//doc)=2]"); // The results for these two appear odd, but are correct as per BooleanQuery processing. // See: http://searchhub.org/2011/12/28/why-not-and-or-and-not/ - // Non-parenthesis OR/AND precedence is not true to abstract boolean logic in solr when q.op = AND + // Non-parenthesis OR/AND precedence is not true to abstract boolean logic in solr when q.op = + // AND // and when q.op = OR all three clauses are top-level and optional so mm takes over - assertQ("test minShouldMatch (top level optional with explicit OR without parens)", - req("q", "snake OR stocks oil", + assertQ( + "test minShouldMatch (top level optional with explicit OR without parens)", + req( + "q", "snake OR stocks oil", "qf", "text_sw", "q.op", "OR", "mm", "100%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=0]"); + "defType", "edismax"), + "*[count(//doc)=0]"); - assertQ("test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)", - req("q", "{!edismax qf=text_sw q.op=OR mm=100% sow=" + sow + " v='snake OR stocks oil'}") - , "*[count(//doc)=0]"); + assertQ( + "test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)", + req("q", "{!edismax qf=text_sw q.op=OR mm=100% sow=" + sow + " v='snake OR stocks oil'}"), + "*[count(//doc)=0]"); - assertQ("test minShouldMatch (top level optional with explicit OR without parens)", - req("q", "snake OR stocks oil", + assertQ( + "test minShouldMatch (top level optional with explicit OR without parens)", + req( + "q", "snake OR stocks oil", "qf", "text_sw", "q.op", "AND", "mm", "100%", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=0]"); + "defType", "edismax"), + "*[count(//doc)=0]"); - assertQ("test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)", - req("q", "{!edismax qf=text_sw q.op=AND mm=100% sow=" + sow + " v='snake OR stocks oil'}") - , "*[count(//doc)=0]"); + assertQ( + "test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)", + req( + "q", + "{!edismax qf=text_sw q.op=AND mm=100% sow=" + sow + " v='snake OR stocks oil'}"), + "*[count(//doc)=0]"); // SOLR-9174 - assertQ("test minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op", - req("q", "barbie OR (hair AND nonexistentword)", + assertQ( + "test minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op", + req( + "q", "barbie OR (hair AND nonexistentword)", "qf", "text_sw", "mm", "1<-1", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); - - assertQ("test local minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op", - req("q", "{!edismax qf=text_sw mm=1<-1 sow=" + sow + " v='barbie OR (hair AND nonexistentword)'}") - , "*[count(//doc)=3]"); + "defType", "edismax"), + "*[count(//doc)=3]"); + + assertQ( + "test local minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op", + req( + "q", + "{!edismax qf=text_sw mm=1<-1 sow=" + + sow + + " v='barbie OR (hair AND nonexistentword)'}"), + "*[count(//doc)=3]"); } } /* SOLR-8812 */ @Test public void testDefaultMM() throws Exception { - // Ensure MM is off when explicit operators (+/-/OR/NOT) are used and no explicit mm spec is specified. + // Ensure MM is off when explicit operators (+/-/OR/NOT) are used and no explicit mm spec is + // specified. for (String sow : Arrays.asList("true", "false")) { - assertQ("Explicit OR in query with no explicit mm and q.op=AND => mm = 0%", - req("q", "oil OR stocks", + assertQ( + "Explicit OR in query with no explicit mm and q.op=AND => mm = 0%", + req( + "q", "oil OR stocks", "qf", "text_sw", "q.op", "AND", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=4]"); - assertQ("Explicit 'or' in query with lowercaseOperators=true, no explicit mm and q.op=AND => mm = 0%", - req("q", "oil or stocks", + "defType", "edismax"), + "*[count(//doc)=4]"); + assertQ( + "Explicit 'or' in query with lowercaseOperators=true, no explicit mm and q.op=AND => mm = 0%", + req( + "q", "oil or stocks", "qf", "text_sw", "q.op", "AND", "lowercaseOperators", "true", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=4]"); - assertQ("Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%", - req("q", "oil OR stocks", + "defType", "edismax"), + "*[count(//doc)=4]"); + assertQ( + "Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%", + req( + "q", "oil OR stocks", "qf", "text_sw", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=4]"); - assertQ("No operator in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "oil stocks", + "defType", "edismax"), + "*[count(//doc)=4]"); + assertQ( + "No operator in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "oil stocks", "qf", "text_sw", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=4]"); - assertQ("No operator in query with no explicit mm and q.op=AND => mm = 100%", - req("q", "oil stocks", + "defType", "edismax"), + "*[count(//doc)=4]"); + assertQ( + "No operator in query with no explicit mm and q.op=AND => mm = 100%", + req( + "q", "oil stocks", "qf", "text_sw", "q.op", "AND", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("No operator in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "oil stocks", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "No operator in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "oil stocks", "qf", "text_sw", "q.op", "OR", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=4]"); + "defType", "edismax"), + "*[count(//doc)=4]"); - assertQ("Explicit '-' operator in query with no explicit mm and no explicit q.op => mm = 0%", - req("q", "hair ties -barbie", + assertQ( + "Explicit '-' operator in query with no explicit mm and no explicit q.op => mm = 0%", + req( + "q", "hair ties -barbie", "qf", "text_sw", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); - assertQ("Explicit NOT in query with no explicit mm and no explicit q.op => mm = 0%", - req("q", "hair ties NOT barbie", + "defType", "edismax"), + "*[count(//doc)=3]"); + assertQ( + "Explicit NOT in query with no explicit mm and no explicit q.op => mm = 0%", + req( + "q", "hair ties NOT barbie", "qf", "text_sw", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); + "defType", "edismax"), + "*[count(//doc)=3]"); - assertQ("Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "hair ties -barbie", + assertQ( + "Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "hair ties -barbie", "qf", "text_sw", "q.op", "OR", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); - assertQ("Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "hair ties NOT barbie", + "defType", "edismax"), + "*[count(//doc)=3]"); + assertQ( + "Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "hair ties NOT barbie", "qf", "text_sw", "q.op", "OR", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); + "defType", "edismax"), + "*[count(//doc)=3]"); - assertQ("Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "hair AND ties -barbie", + assertQ( + "Explicit '-' operator in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "hair AND ties -barbie", "qf", "text_sw", "q.op", "OR", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "hair AND ties -barbie", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "Explicit NOT in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "hair AND ties -barbie", "qf", "text_sw", "q.op", "OR", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); - assertQ("No explicit non-AND operator in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "hair AND ties barbie", + assertQ( + "No explicit non-AND operator in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "hair AND ties barbie", "qf", "text_sw", "q.op", "OR", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=2]"); - assertQ("No explicit non-AND operator in query with no explicit mm and q.op=AND => mm = 100%", - req("q", "hair AND ties barbie", + "defType", "edismax"), + "*[count(//doc)=2]"); + assertQ( + "No explicit non-AND operator in query with no explicit mm and q.op=AND => mm = 100%", + req( + "q", "hair AND ties barbie", "qf", "text_sw", "q.op", "AND", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%", - req("q", "hair AND ties barbie", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%", + req( + "q", "hair AND ties barbie", "qf", "text_sw", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=2]"); - assertQ("No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%", - req("q", "hair and ties barbie", + "defType", "edismax"), + "*[count(//doc)=2]"); + assertQ( + "No explicit non-AND operator in query with no explicit mm and no explicit q.op => mm = 0%", + req( + "q", "hair and ties barbie", "qf", "text_sw", "lowercaseOperators", "true", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=2]"); + "defType", "edismax"), + "*[count(//doc)=2]"); - assertQ("Explicit '-' operator in query with no explicit mm and q.op=AND => mm = 100%", - req("q", "hair ties -barbie", + assertQ( + "Explicit '-' operator in query with no explicit mm and q.op=AND => mm = 100%", + req( + "q", "hair ties -barbie", "qf", "text_sw", "q.op", "AND", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); - assertQ("Explicit NOT in query with no explicit mm and q.op=AND => mm = 100%", - req("q", "hair ties NOT barbie", + "defType", "edismax"), + "*[count(//doc)=1]"); + assertQ( + "Explicit NOT in query with no explicit mm and q.op=AND => mm = 100%", + req( + "q", "hair ties NOT barbie", "qf", "text_sw", "q.op", "AND", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); - assertQ("Explicit OR in query with no explicit mm and q.op=AND => mm = 0%", - req("q", "hair OR ties barbie", + assertQ( + "Explicit OR in query with no explicit mm and q.op=AND => mm = 0%", + req( + "q", "hair OR ties barbie", "qf", "text_sw", "q.op", "AND", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=3]"); - assertQ("Explicit OR in query with no explicit mm and q.op=OR => mm = 0%", - req("q", "hair OR ties barbie", + "defType", "edismax"), + "*[count(//doc)=3]"); + assertQ( + "Explicit OR in query with no explicit mm and q.op=OR => mm = 0%", + req( + "q", "hair OR ties barbie", "qf", "text_sw", "q.op", "OR", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=6]"); - assertQ("Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%", - req("q", "hair OR ties barbie", + "defType", "edismax"), + "*[count(//doc)=6]"); + assertQ( + "Explicit OR in query with no explicit mm and no explicit q.op => mm = 0%", + req( + "q", "hair OR ties barbie", "qf", "text_sw", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=6]"); + "defType", "edismax"), + "*[count(//doc)=6]"); - assertQ("Explicit '+' operator in query with no explicit mm and q.op=AND => mm = 0%", - req("q", "hair ties +barbie", + assertQ( + "Explicit '+' operator in query with no explicit mm and q.op=AND => mm = 0%", + req( + "q", "hair ties +barbie", "qf", "text_sw", "q.op", "AND", "sow", sow, - "defType", "edismax") - , "*[count(//doc)=1]"); + "defType", "edismax"), + "*[count(//doc)=1]"); } } @@ -1668,31 +2226,33 @@ public void testEdismaxSimpleExtension() throws SyntaxError { params.set("qf_fr", "subject_fr title_fr^5"); params.set("qf_en", "subject_en title_en^5"); params.set("qf_es", "subject_es title_es^5"); - - MultilanguageQueryParser parser = new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params)); + + MultilanguageQueryParser parser = + new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params)); Query query = parser.parse(); assertNotNull(query); assertTrue(containsClause(query, "title", "foo", 5, false)); assertTrue(containsClause(query, "title", "bar", 5, false)); assertTrue(containsClause(query, "subject", "foo", 1, false)); assertTrue(containsClause(query, "subject", "bar", 1, false)); - + params.set("language", "es"); - parser = new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params)); + parser = + new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params)); query = parser.parse(); assertNotNull(query); assertTrue(containsClause(query, "title_es", "foo", 5, false)); assertTrue(containsClause(query, "title_es", "bar", 5, false)); assertTrue(containsClause(query, "subject_es", "foo", 1, false)); assertTrue(containsClause(query, "subject_es", "bar", 1, false)); - - FuzzyDismaxQParser parser2 = new FuzzyDismaxQParser("foo bar absence", new ModifiableSolrParams(), params, req(params)); + + FuzzyDismaxQParser parser2 = + new FuzzyDismaxQParser("foo bar absence", new ModifiableSolrParams(), params, req(params)); query = parser2.parse(); assertNotNull(query); assertTrue(containsClause(query, "title", "foo", 5, false)); assertTrue(containsClause(query, "title", "bar", 5, false)); assertTrue(containsClause(query, "title", "absence", 5, true)); - } @Test @@ -1700,68 +2260,135 @@ public void testSplitOnWhitespace_Basic() throws Exception { // The "text_sw" field has synonyms loaded from synonyms.txt // retrieve the single document containing literal "wifi" - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wifi", "sow","true") - , "/response/numFound==1" - , "/response/docs/[0]/id=='72'" - ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wifi", "sow", "true"), + "/response/numFound==1", + "/response/docs/[0]/id=='72'"); // trigger the "wi fi => wifi" synonym - assertJQ(req("qf", "text_sw title", "defType","edismax", "q","wi fi", "sow","false") - , "/response/numFound==1" - , "/response/docs/[0]/id=='72'" - ); - assertJQ(req("qf", "text_sw title", "defType","edismax", "q","wi fi", "sow","true") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='72'" - ); - - assertJQ(req("qf","text_sw title", "q","{!edismax sow=false}wi fi") - , "/response/numFound==1" - , "/response/docs/[0]/id=='72'" - ); - assertJQ(req("qf", "text_sw title", "q","{!edismax sow=true}wi fi") - , "/response/numFound==0" - ); - assertJQ(req("qf", "text_sw title", "q", "{!edismax}wi fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='72'" - ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi", "sow", "false"), + "/response/numFound==1", + "/response/docs/[0]/id=='72'"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi", "sow", "true"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='72'"); + + assertJQ( + req("qf", "text_sw title", "q", "{!edismax sow=false}wi fi"), + "/response/numFound==1", + "/response/docs/[0]/id=='72'"); + assertJQ(req("qf", "text_sw title", "q", "{!edismax sow=true}wi fi"), "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "q", "{!edismax}wi fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='72'"); String parsedquery; - parsedquery = getParsedQuery(req("qf", "name title", - "q", "barking curds of stigma", - "defType", "edismax", - "sow", "false", - "debugQuery", "true")); - assertThat(parsedquery, anyOf(containsString("((name:barking | title:barking))"), containsString("((title:barking | name:barking))"))); - assertThat(parsedquery, anyOf(containsString("((name:curds | title:curds))"), containsString("((title:curds | name:curds))"))); - assertThat(parsedquery, anyOf(containsString("((name:of | title:of))"), containsString("((title:of | name:of))"))); - assertThat(parsedquery, anyOf(containsString("((name:stigma | title:stigma))"), containsString("((title:stigma | name:stigma))"))); - parsedquery = getParsedQuery(req("qf", "name title", - "q", "barking curds of stigma", - "defType", "edismax", - "sow", "true", - "debugQuery", "true")); - assertThat(parsedquery, anyOf(containsString("((name:barking | title:barking))"), containsString("((title:barking | name:barking))"))); - assertThat(parsedquery, anyOf(containsString("((name:curds | title:curds))"), containsString("((title:curds | name:curds))"))); - assertThat(parsedquery, anyOf(containsString("((name:of | title:of))"), containsString("((title:of | name:of))"))); - assertThat(parsedquery, anyOf(containsString("((name:stigma | title:stigma))"), containsString("((title:stigma | name:stigma))"))); - parsedquery = getParsedQuery(req("qf", "name title", - "q", "barking curds of stigma", - "defType", "edismax", - "debugQuery", "true")); // Default sow=false - assertThat(parsedquery, anyOf(containsString("((name:barking | title:barking))"), containsString("((title:barking | name:barking))"))); - assertThat(parsedquery, anyOf(containsString("((name:curds | title:curds))"), containsString("((title:curds | name:curds))"))); - assertThat(parsedquery, anyOf(containsString("((name:of | title:of))"), containsString("((title:of | name:of))"))); - assertThat(parsedquery, anyOf(containsString("((name:stigma | title:stigma))"), containsString("((title:stigma | name:stigma))"))); + parsedquery = + getParsedQuery( + req( + "qf", + "name title", + "q", + "barking curds of stigma", + "defType", + "edismax", + "sow", + "false", + "debugQuery", + "true")); + assertThat( + parsedquery, + anyOf( + containsString("((name:barking | title:barking))"), + containsString("((title:barking | name:barking))"))); + assertThat( + parsedquery, + anyOf( + containsString("((name:curds | title:curds))"), + containsString("((title:curds | name:curds))"))); + assertThat( + parsedquery, + anyOf(containsString("((name:of | title:of))"), containsString("((title:of | name:of))"))); + assertThat( + parsedquery, + anyOf( + containsString("((name:stigma | title:stigma))"), + containsString("((title:stigma | name:stigma))"))); + parsedquery = + getParsedQuery( + req( + "qf", + "name title", + "q", + "barking curds of stigma", + "defType", + "edismax", + "sow", + "true", + "debugQuery", + "true")); + assertThat( + parsedquery, + anyOf( + containsString("((name:barking | title:barking))"), + containsString("((title:barking | name:barking))"))); + assertThat( + parsedquery, + anyOf( + containsString("((name:curds | title:curds))"), + containsString("((title:curds | name:curds))"))); + assertThat( + parsedquery, + anyOf(containsString("((name:of | title:of))"), containsString("((title:of | name:of))"))); + assertThat( + parsedquery, + anyOf( + containsString("((name:stigma | title:stigma))"), + containsString("((title:stigma | name:stigma))"))); + parsedquery = + getParsedQuery( + req( + "qf", + "name title", + "q", + "barking curds of stigma", + "defType", + "edismax", + "debugQuery", + "true")); // Default sow=false + assertThat( + parsedquery, + anyOf( + containsString("((name:barking | title:barking))"), + containsString("((title:barking | name:barking))"))); + assertThat( + parsedquery, + anyOf( + containsString("((name:curds | title:curds))"), + containsString("((title:curds | name:curds))"))); + assertThat( + parsedquery, + anyOf(containsString("((name:of | title:of))"), containsString("((title:of | name:of))"))); + assertThat( + parsedquery, + anyOf( + containsString("((name:stigma | title:stigma))"), + containsString("((title:stigma | name:stigma))"))); } private static String getParsedQuery(SolrQueryRequest request) throws Exception { String resp = h.query(request); - return (String) BaseTestHarness.evaluateXPath(resp, "//str[@name='parsedquery']/text()", XPathConstants.STRING); + return (String) + BaseTestHarness.evaluateXPath( + resp, "//str[@name='parsedquery']/text()", XPathConstants.STRING); } public void testSplitOnWhitespace_shouldRespectMinimumShouldMatch() { @@ -1775,81 +2402,225 @@ public void testSplitOnWhitespace_shouldRespectMinimumShouldMatch() { * i.e a document to be a match must contain all the mm query terms in a single field at least once * See https://issues.apache.org/jira/browse/SOLR-12779 for additional details */ - assertQ(req("defType", "edismax", "mm", "100%", "q", "Terminator: 100", "qf", "movies_t foo_i", "sow", "true"), - nor); //no document contains both terms, in a field or in multiple field - assertQ(req("defType", "edismax", "mm", "100%", "q", "Terminator: 100", "qf", "movies_t foo_i", "sow", "false"), - nor); //no document contains both terms in a field - - assertQ(req("defType", "edismax", "mm", "100%", "q", "Terminator: 8", "qf", "movies_t foo_i", "sow", "true"), - oner); //document 46 contains both terms, Terminator in movies_t and 8 in foo_i - assertQ(req("defType", "edismax", "mm", "100%", "q", "Terminator: 8", "qf", "movies_t foo_i", "sow", "false"), - nor); //no document contains both terms in a field - - assertQ(req("defType", "edismax", "mm", "100%", "q", "mission impossible Terminator: 8", "qf", "movies_t foo_i", "sow", "true"), - oner); //document 46 contains all terms, mission, impossible, Terminator in movies_t and 8 in foo_i - assertQ(req("defType", "edismax", "mm", "100%", "q", "mission impossible Terminator: 8", "qf", "movies_t foo_i", "sow", "false"), - nor); //no document contains all terms, in a field + assertQ( + req( + "defType", + "edismax", + "mm", + "100%", + "q", + "Terminator: 100", + "qf", + "movies_t foo_i", + "sow", + "true"), + nor); // no document contains both terms, in a field or in multiple field + assertQ( + req( + "defType", + "edismax", + "mm", + "100%", + "q", + "Terminator: 100", + "qf", + "movies_t foo_i", + "sow", + "false"), + nor); // no document contains both terms in a field + + assertQ( + req( + "defType", + "edismax", + "mm", + "100%", + "q", + "Terminator: 8", + "qf", + "movies_t foo_i", + "sow", + "true"), + oner); // document 46 contains both terms, Terminator in movies_t and 8 in foo_i + assertQ( + req( + "defType", + "edismax", + "mm", + "100%", + "q", + "Terminator: 8", + "qf", + "movies_t foo_i", + "sow", + "false"), + nor); // no document contains both terms in a field + + assertQ( + req( + "defType", + "edismax", + "mm", + "100%", + "q", + "mission impossible Terminator: 8", + "qf", + "movies_t foo_i", + "sow", + "true"), + oner); // document 46 contains all terms, mission, impossible, Terminator in movies_t and 8 + // in foo_i + assertQ( + req( + "defType", + "edismax", + "mm", + "100%", + "q", + "mission impossible Terminator: 8", + "qf", + "movies_t foo_i", + "sow", + "false"), + nor); // no document contains all terms, in a field } - + public void testSplitOnWhitespace_Different_Field_Analysis() throws Exception { - // When the *structure* of produced queries is different in each field, + // When the *structure* of produced queries is different in each field, // sow=true produces boolean-of-dismax query structure, // and sow=false produces dismax-of-boolean query structure. - String parsedquery = getParsedQuery(req("qf", "text_sw title", - "q", "olive the other", - "defType", "edismax", - "sow", "true", - "debugQuery", "true")); - assertThat(parsedquery, anyOf(containsString("((text_sw:oliv | title:olive))"), containsString("((title:olive | text_sw:oliv))"))); + String parsedquery = + getParsedQuery( + req( + "qf", + "text_sw title", + "q", + "olive the other", + "defType", + "edismax", + "sow", + "true", + "debugQuery", + "true")); + assertThat( + parsedquery, + anyOf( + containsString("((text_sw:oliv | title:olive))"), + containsString("((title:olive | text_sw:oliv))"))); assertThat(parsedquery, containsString("DisjunctionMaxQuery((title:the))")); - assertThat(parsedquery, anyOf(containsString("((text_sw:other | title:other))"), containsString("((title:other | text_sw:other))"))); - - parsedquery = getParsedQuery(req("qf", "text_sw title", - "q", "olive the other", - "defType", "edismax", - "sow", "false", - "debugQuery", "true")); - assertThat(parsedquery, anyOf( - containsString("(((text_sw:oliv text_sw:other) | (title:olive title:the title:other)))"), - containsString("(((title:olive title:the title:other) | (text_sw:oliv text_sw:other)))") - )); - - // When field's analysis produce different query structures, mm processing is always done on the boolean query. + assertThat( + parsedquery, + anyOf( + containsString("((text_sw:other | title:other))"), + containsString("((title:other | text_sw:other))"))); + + parsedquery = + getParsedQuery( + req( + "qf", + "text_sw title", + "q", + "olive the other", + "defType", + "edismax", + "sow", + "false", + "debugQuery", + "true")); + assertThat( + parsedquery, + anyOf( + containsString( + "(((text_sw:oliv text_sw:other) | (title:olive title:the title:other)))"), + containsString( + "(((title:olive title:the title:other) | (text_sw:oliv text_sw:other)))"))); + + // When field's analysis produce different query structures, mm processing is always done on the + // boolean query. // sow=true produces (boolean-of-dismax)~ query structure, // and sow=false produces dismax-of-(boolean)~ query structure. - parsedquery = getParsedQuery(req("qf", "text_sw title", - "q", "olive the other", - "defType", "edismax", - "sow", "true", - "mm", "100%", - "debugQuery", "true")); - assertThat(parsedquery, anyOf(containsString("((text_sw:oliv | title:olive))"), containsString("((title:olive | text_sw:oliv))"))); + parsedquery = + getParsedQuery( + req( + "qf", + "text_sw title", + "q", + "olive the other", + "defType", + "edismax", + "sow", + "true", + "mm", + "100%", + "debugQuery", + "true")); + assertThat( + parsedquery, + anyOf( + containsString("((text_sw:oliv | title:olive))"), + containsString("((title:olive | text_sw:oliv))"))); assertThat(parsedquery, containsString("DisjunctionMaxQuery((title:the))")); - assertThat(parsedquery, anyOf(containsString("((text_sw:other | title:other))"), containsString("((title:other | text_sw:other))"))); + assertThat( + parsedquery, + anyOf( + containsString("((text_sw:other | title:other))"), + containsString("((title:other | text_sw:other))"))); assertThat(parsedquery, containsString("))~3")); - parsedquery = getParsedQuery(req("qf", "text_sw title", - "q", "olive the other", - "defType", "edismax", - "sow", "false", - "mm", "100%", - "debugQuery", "true")); - assertThat(parsedquery, anyOf( - containsString("(((text_sw:oliv text_sw:other)~2) | ((title:olive title:the title:other)~3)))"), - containsString("(((title:olive title:the title:other)~3) | ((text_sw:oliv text_sw:other)~2)))") - )); - - // When the *structure* of produced queries is the same in each field, - // sow=false/true produce the same boolean-of-dismax query structure + parsedquery = + getParsedQuery( + req( + "qf", + "text_sw title", + "q", + "olive the other", + "defType", + "edismax", + "sow", + "false", + "mm", + "100%", + "debugQuery", + "true")); + assertThat( + parsedquery, + anyOf( + containsString( + "(((text_sw:oliv text_sw:other)~2) | ((title:olive title:the title:other)~3)))"), + containsString( + "(((title:olive title:the title:other)~3) | ((text_sw:oliv text_sw:other)~2)))"))); + + // When the *structure* of produced queries is the same in each field, + // sow=false/true produce the same boolean-of-dismax query structure for (String sow : Arrays.asList("true", "false")) { - parsedquery = getParsedQuery(req("qf", "text_sw title", - "q", "olive blah other", - "defType", "edismax", - "sow", sow, - "debugQuery", "true")); - assertThat(parsedquery, anyOf(containsString("((text_sw:oliv | title:olive))"), containsString("((title:olive | text_sw:oliv))"))); - assertThat(parsedquery, anyOf(containsString("((text_sw:blah | title:blah))"), containsString("((title:blah | text_sw:blah))"))); - assertThat(parsedquery, anyOf(containsString("((text_sw:other | title:other))"), containsString("((title:other | text_sw:other))"))); + parsedquery = + getParsedQuery( + req( + "qf", + "text_sw title", + "q", + "olive blah other", + "defType", + "edismax", + "sow", + sow, + "debugQuery", + "true")); + assertThat( + parsedquery, + anyOf( + containsString("((text_sw:oliv | title:olive))"), + containsString("((title:olive | text_sw:oliv))"))); + assertThat( + parsedquery, + anyOf( + containsString("((text_sw:blah | title:blah))"), + containsString("((title:blah | text_sw:blah))"))); + assertThat( + parsedquery, + anyOf( + containsString("((text_sw:other | title:other))"), + containsString("((title:other | text_sw:other))"))); } } @@ -1857,241 +2628,359 @@ public void testOperatorsAndMultiWordSynonyms() throws Exception { // The "text_sw" field has synonyms loaded from synonyms.txt // retrieve the single document containing literal "wifi" - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wifi", "sow","true") - , "/response/numFound==1" - , "/response/docs/[0]/id=='72'" - ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wifi", "sow", "true"), + "/response/numFound==1", + "/response/docs/[0]/id=='72'"); // trigger the "wi fi => wifi" synonym - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi", "sow","false") - , "/response/numFound==1" - , "/response/docs/[0]/id=='72'" - ); - - assertJQ(req("qf","text_sw title", "defType","edismax", "q","+wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","-wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","!wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi* fi", "sow","false") - , "/response/numFound==1" // matches because wi* matches "wifi" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","w? fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi~1 fi", "sow","false") - , "/response/numFound==4" // matches because wi~1 matches ti (stemmed "ties") - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi^2 fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi^=2 fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi +fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi -fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi !fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi*", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi?", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi~1", "sow","false") - , "/response/numFound==4" // matches because fi~1 matches ti (stemmed "ties") - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi^2", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi^=2", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","text_sw:wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi text_sw:fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi NOT fi", "sow","false") - , "/response/numFound==0" - ); - - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi AND ATM", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM AND wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi && ATM", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM && wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) AND ATM", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM AND (wi fi)", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) && ATM", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","ATM && (wi fi)", "sow","false") - , "/response/numFound==1" - ); - - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi OR NotThereAtAll", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll OR wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi || NotThereAtAll", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll || wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) OR NotThereAtAll", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll OR (wi fi)", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi) || NotThereAtAll", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NotThereAtAll || (wi fi)", "sow","false") - , "/response/numFound==1" - ); - - assertJQ(req("qf","text_sw title", "defType","edismax", "q","\"wi\" fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi \"fi\"", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi) fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi (fi)", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","/wi/ fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi /fi/", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi)", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","+(wi fi)", "sow","false") - , "/response/numFound==1" - ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi", "sow", "false"), + "/response/numFound==1", + "/response/docs/[0]/id=='72'"); + + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "+wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "-wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "!wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi* fi", "sow", "false"), + "/response/numFound==1" // matches because wi* matches "wifi" + ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "w? fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi~1 fi", "sow", "false"), + "/response/numFound==4" // matches because wi~1 matches ti (stemmed "ties") + ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi^2 fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi^=2 fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi +fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi -fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi !fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi*", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi?", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi~1", "sow", "false"), + "/response/numFound==4" // matches because fi~1 matches ti (stemmed "ties") + ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi^2", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi^=2", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "text_sw:wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi text_sw:fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "NOT wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi NOT fi", "sow", "false"), + "/response/numFound==0"); + + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi AND ATM", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "ATM AND wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi && ATM", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "ATM && wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "(wi fi) AND ATM", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "ATM AND (wi fi)", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "(wi fi) && ATM", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "ATM && (wi fi)", "sow", "false"), + "/response/numFound==1"); + + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "wi fi OR NotThereAtAll", + "sow", + "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "NotThereAtAll OR wi fi", + "sow", + "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "wi fi || NotThereAtAll", + "sow", + "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "NotThereAtAll || wi fi", + "sow", + "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "(wi fi) OR NotThereAtAll", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "NotThereAtAll OR (wi fi)", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "(wi fi) || NotThereAtAll", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "NotThereAtAll || (wi fi)", + "sow", + "false"), + "/response/numFound==1"); + + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "\"wi\" fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi \"fi\"", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "(wi) fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi (fi)", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "/wi/ fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi /fi/", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "(wi fi)", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "+(wi fi)", "sow", "false"), + "/response/numFound==1"); @SuppressWarnings({"rawtypes"}) Map all = (Map) Utils.fromJSONString(h.query(req("q", "*:*", "rows", "0", "wt", "json"))); - int totalDocs = Integer.parseInt(((Map)all.get("response")).get("numFound").toString()); + int totalDocs = Integer.parseInt(((Map) all.get("response")).get("numFound").toString()); int allDocsExceptOne = totalDocs - 1; - assertJQ(req("qf","text_sw title", "defType","edismax", "q","-(wi fi)", "sow","false") - , "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the text_sw field - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","!(wi fi)", "sow","false") - , "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the text_sw field - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT (wi fi)", "sow","false") - , "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the text_sw field - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi)^2", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","(wi fi)^=2", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","text_sw:(wi fi)", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","+ATM wi fi", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","-ATM wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","-NotThereAtAll wi fi", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","!ATM wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","!NotThereAtAll wi fi", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT ATM wi fi", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","NOT NotThereAtAll wi fi", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","AT* wi fi", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","AT? wi fi", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","\"ATM\" wi fi", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi +ATM", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi -ATM", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi -NotThereAtAll", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi !ATM", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi !NotThereAtAll", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi NOT ATM", "sow","false") - , "/response/numFound==0" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi NOT NotThereAtAll", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi AT*", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi AT?", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","wi fi \"ATM\"", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","\"wi fi\"~2", "sow","false") - , "/response/numFound==1" - ); - assertJQ(req("qf","text_sw title", "defType","edismax", "q","text_sw:\"wi fi\"", "sow","false") - , "/response/numFound==1" - ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "-(wi fi)", "sow", "false"), + "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the text_sw field + ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "!(wi fi)", "sow", "false"), + "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the text_sw field + ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "NOT (wi fi)", "sow", "false"), + "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the text_sw field + ); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "(wi fi)^2", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "(wi fi)^=2", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "text_sw:(wi fi)", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "+ATM wi fi", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "-ATM wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "-NotThereAtAll wi fi", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "!ATM wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "!NotThereAtAll wi fi", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "NOT ATM wi fi", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "NOT NotThereAtAll wi fi", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "AT* wi fi", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "AT? wi fi", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "\"ATM\" wi fi", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi +ATM", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi -ATM", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "wi fi -NotThereAtAll", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi !ATM", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "wi fi !NotThereAtAll", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi NOT ATM", "sow", "false"), + "/response/numFound==0"); + assertJQ( + req( + "qf", + "text_sw title", + "defType", + "edismax", + "q", + "wi fi NOT NotThereAtAll", + "sow", + "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi AT*", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi AT?", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "wi fi \"ATM\"", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "\"wi fi\"~2", "sow", "false"), + "/response/numFound==1"); + assertJQ( + req("qf", "text_sw title", "defType", "edismax", "q", "text_sw:\"wi fi\"", "sow", "false"), + "/response/numFound==1"); } public void testAutoGeneratePhraseQueries() throws Exception { @@ -2110,7 +2999,9 @@ public void testAutoGeneratePhraseQueries() throws Exception { for (SolrParams params : Arrays.asList(noSowParams, sowFalseParams)) { try (SolrQueryRequest req = req(params)) { - QParser qParser = QParser.getParser("text:grackle", "edismax", req); // "text" has autoGeneratePhraseQueries="true" + QParser qParser = + QParser.getParser( + "text:grackle", "edismax", req); // "text" has autoGeneratePhraseQueries="true" Query q = qParser.getQuery(); assertEquals("+((text:\"crow blackbird\" text:grackl))", q.toString()); } @@ -2122,91 +3013,119 @@ public void testAutoGeneratePhraseQueries() throws Exception { } for (SolrParams params : Arrays.asList(noSowParams, sowTrueParams, sowFalseParams)) { try (SolrQueryRequest req = req(params)) { - QParser qParser = QParser.getParser("text_sw:grackle", "edismax", req); // "text_sw" doesn't specify autoGeneratePhraseQueries => default false + QParser qParser = + QParser.getParser( + "text_sw:grackle", + "edismax", + req); // "text_sw" doesn't specify autoGeneratePhraseQueries => default false Query q = qParser.getQuery(); assertEquals("+(((+text_sw:crow +text_sw:blackbird) text_sw:grackl))", q.toString()); } } - Stream.of(noSowParams, sowTrueParams, sowFalseParams).forEach(p->p.add("qf", "text text_sw")); + Stream.of(noSowParams, sowTrueParams, sowFalseParams).forEach(p -> p.add("qf", "text text_sw")); for (SolrParams params : Arrays.asList(noSowParams, sowFalseParams)) { try (SolrQueryRequest req = req(params)) { QParser qParser = QParser.getParser("grackle", "edismax", req); Query q = qParser.getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - stringQuery("(text:\"crow blackbird\" text:grackl)"), - stringQuery("((+text_sw:crow +text_sw:blackbird) text_sw:grackl)") - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + disjunctionOf( + stringQuery("(text:\"crow blackbird\" text:grackl)"), + stringQuery("((+text_sw:crow +text_sw:blackbird) text_sw:grackl)")), + BooleanClause.Occur.MUST)); qParser = QParser.getParser("grackle wi fi", "edismax", req); q = qParser.getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - stringQuery("(text:\"crow blackbird\" text:grackl) text:wifi"), - stringQuery("((+text_sw:crow +text_sw:blackbird) text_sw:grackl) text_sw:wifi") - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + disjunctionOf( + stringQuery("(text:\"crow blackbird\" text:grackl) text:wifi"), + stringQuery( + "((+text_sw:crow +text_sw:blackbird) text_sw:grackl) text_sw:wifi")), + BooleanClause.Occur.MUST)); } } - + try (SolrQueryRequest req = req(sowTrueParams)) { QParser qParser = QParser.getParser("grackle", "edismax", req); Query q = qParser.getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - stringQuery("text:\"crow blackbird\" text:grackl"), - stringQuery("((+text_sw:crow +text_sw:blackbird) text_sw:grackl)") - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + disjunctionOf( + stringQuery("text:\"crow blackbird\" text:grackl"), + stringQuery("((+text_sw:crow +text_sw:blackbird) text_sw:grackl)")), + BooleanClause.Occur.MUST)); qParser = QParser.getParser("grackle wi fi", "edismax", req); q = qParser.getQuery(); - assertThat(q, booleanQuery(booleanQuery( - disjunctionOf(termQuery("text", "wi"), termQuery("text_sw", "wi")), - disjunctionOf(termQuery("text", "fi"), termQuery("text_sw", "fi")), - disjunctionOf( - stringQuery("((+text_sw:crow +text_sw:blackbird) text_sw:grackl)"), - booleanQuery(phraseQuery("text", "crow blackbird"), termQuery("text", "grackl")) - ) - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + booleanQuery( + disjunctionOf(termQuery("text", "wi"), termQuery("text_sw", "wi")), + disjunctionOf(termQuery("text", "fi"), termQuery("text_sw", "fi")), + disjunctionOf( + stringQuery("((+text_sw:crow +text_sw:blackbird) text_sw:grackl)"), + booleanQuery( + phraseQuery("text", "crow blackbird"), termQuery("text", "grackl")))), + BooleanClause.Occur.MUST)); } } - + public void testSowFalseWithBoost() throws Exception { try (SolrQueryRequest req = req("sow", "false", "qf", "subject title")) { QParser qParser = QParser.getParser("one two", "edismax", req); Query q = qParser.getQuery(); - assertThat(q, booleanQuery(booleanQuery( - disjunctionOf(termQuery("title", "one"), termQuery("subject", "on")), - disjunctionOf(termQuery("title", "two"), termQuery("subject", "two")) - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + booleanQuery( + disjunctionOf(termQuery("title", "one"), termQuery("subject", "on")), + disjunctionOf(termQuery("title", "two"), termQuery("subject", "two"))), + BooleanClause.Occur.MUST)); } try (SolrQueryRequest req = req("sow", "false", "qf", "subject title^5")) { QParser qParser = QParser.getParser("one two", "edismax", req); Query q = qParser.getQuery(); - assertThat(q, booleanQuery(booleanQuery( - disjunctionOf(boosted("title", "one", 5), termQuery("subject", "on")), - disjunctionOf(boosted("title", "two", 5), termQuery("subject", "two")) - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + booleanQuery( + disjunctionOf(boosted("title", "one", 5), termQuery("subject", "on")), + disjunctionOf(boosted("title", "two", 5), termQuery("subject", "two"))), + BooleanClause.Occur.MUST)); } try (SolrQueryRequest req = req("sow", "false", "qf", "subject^3 title")) { QParser qParser = QParser.getParser("one two", "edismax", req); Query q = qParser.getQuery(); - assertThat(q, booleanQuery(booleanQuery( - disjunctionOf(termQuery("title", "one"), boosted("subject", "on", 3)), - disjunctionOf(termQuery("title", "two"), boosted("subject", "two", 3)) - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + booleanQuery( + disjunctionOf(termQuery("title", "one"), boosted("subject", "on", 3)), + disjunctionOf(termQuery("title", "two"), boosted("subject", "two", 3))), + BooleanClause.Occur.MUST)); } try (SolrQueryRequest req = req("sow", "false", "qf", "subject^10 title^20")) { QParser qParser = QParser.getParser("one two", "edismax", req); Query q = qParser.getQuery(); - assertThat(q, booleanQuery(booleanQuery( - disjunctionOf(boosted("title", "one", 20), boosted("subject", "on", 10)), - disjunctionOf(boosted("title", "two", 20), boosted("subject", "two", 10)) - ), BooleanClause.Occur.MUST)); + assertThat( + q, + booleanQuery( + booleanQuery( + disjunctionOf(boosted("title", "one", 20), boosted("subject", "on", 10)), + disjunctionOf(boosted("title", "two", 20), boosted("subject", "two", 10))), + BooleanClause.Occur.MUST)); } } - - private boolean containsClause(Query query, String field, String value, - int boost, boolean fuzzy) { + private boolean containsClause( + Query query, String field, String value, int boost, boolean fuzzy) { float queryBoost = 1f; if (query instanceof BoostQuery) { @@ -2215,52 +3134,54 @@ private boolean containsClause(Query query, String field, String value, queryBoost = bq.getBoost(); } - if(query instanceof BooleanQuery) { - return containsClause((BooleanQuery)query, field, value, boost, fuzzy); + if (query instanceof BooleanQuery) { + return containsClause((BooleanQuery) query, field, value, boost, fuzzy); } - if(query instanceof DisjunctionMaxQuery) { - return containsClause((DisjunctionMaxQuery)query, field, value, boost, fuzzy); + if (query instanceof DisjunctionMaxQuery) { + return containsClause((DisjunctionMaxQuery) query, field, value, boost, fuzzy); } if (boost != queryBoost) { return false; } - if(query instanceof TermQuery && !fuzzy) { - return containsClause((TermQuery)query, field, value); + if (query instanceof TermQuery && !fuzzy) { + return containsClause((TermQuery) query, field, value); } - if(query instanceof FuzzyQuery && fuzzy) { - return containsClause((FuzzyQuery)query, field, value); + if (query instanceof FuzzyQuery && fuzzy) { + return containsClause((FuzzyQuery) query, field, value); } return false; } private boolean containsClause(FuzzyQuery query, String field, String value) { - if(query.getTerm().field().equals(field) && - query.getTerm().bytes().utf8ToString().equals(value)) { + if (query.getTerm().field().equals(field) + && query.getTerm().bytes().utf8ToString().equals(value)) { return true; } return false; } - private boolean containsClause(BooleanQuery query, String field, String value, int boost, boolean fuzzy) { - for(BooleanClause clause:query) { - if(containsClause(clause.getQuery(), field, value, boost, fuzzy)) { + private boolean containsClause( + BooleanQuery query, String field, String value, int boost, boolean fuzzy) { + for (BooleanClause clause : query) { + if (containsClause(clause.getQuery(), field, value, boost, fuzzy)) { return true; } } return false; } - + private boolean containsClause(TermQuery query, String field, String value) { - if(query.getTerm().field().equals(field) && - query.getTerm().bytes().utf8ToString().equals(value)) { + if (query.getTerm().field().equals(field) + && query.getTerm().bytes().utf8ToString().equals(value)) { return true; } return false; } - - private boolean containsClause(DisjunctionMaxQuery query, String field, String value, int boost, boolean fuzzy) { - for(Query disjunct:query.getDisjuncts()) { - if(containsClause(disjunct, field, value, boost, fuzzy)) { + + private boolean containsClause( + DisjunctionMaxQuery query, String field, String value, int boost, boolean fuzzy) { + for (Query disjunct : query.getDisjuncts()) { + if (containsClause(disjunct, field, value, boost, fuzzy)) { return true; } } @@ -2269,51 +3190,47 @@ private boolean containsClause(DisjunctionMaxQuery query, String field, String v static class MultilanguageQueryParser extends ExtendedDismaxQParser { - public MultilanguageQueryParser(String qstr, SolrParams localParams, - SolrParams params, SolrQueryRequest req) { + public MultilanguageQueryParser( + String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) { super(qstr, localParams, params, req); } - + @Override - protected ExtendedDismaxConfiguration createConfiguration(String qstr, - SolrParams localParams, SolrParams params, SolrQueryRequest req) { + protected ExtendedDismaxConfiguration createConfiguration( + String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) { return new MultilanguageDismaxConfiguration(localParams, params, req); } - + class MultilanguageDismaxConfiguration extends ExtendedDismaxConfiguration { - public MultilanguageDismaxConfiguration(SolrParams localParams, - SolrParams params, SolrQueryRequest req) { + public MultilanguageDismaxConfiguration( + SolrParams localParams, SolrParams params, SolrQueryRequest req) { super(localParams, params, req); String language = params.get("language"); - if(language != null) { - super.queryFields = SolrPluginUtils.parseFieldBoosts(solrParams.getParams("qf_" + language)); + if (language != null) { + super.queryFields = + SolrPluginUtils.parseFieldBoosts(solrParams.getParams("qf_" + language)); } } - } - } - - static class FuzzyDismaxQParser extends ExtendedDismaxQParser { - + private static final float MIN_SIMILARITY = 0.75F; - public FuzzyDismaxQParser(String qstr, SolrParams localParams, - SolrParams params, SolrQueryRequest req) { + public FuzzyDismaxQParser( + String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) { super(qstr, localParams, params, req); } - + @Override - protected ExtendedSolrQueryParser createEdismaxQueryParser(QParser qParser, - String field) { + protected ExtendedSolrQueryParser createEdismaxQueryParser(QParser qParser, String field) { return new FuzzyQueryParser(qParser, field); } - - class FuzzyQueryParser extends ExtendedSolrQueryParser{ - + + class FuzzyQueryParser extends ExtendedSolrQueryParser { + private Set frequentlyMisspelledWords; public FuzzyQueryParser(QParser parser, String defaultField) { @@ -2322,44 +3239,56 @@ public FuzzyQueryParser(QParser parser, String defaultField) { frequentlyMisspelledWords.add("absence"); frequentlyMisspelledWords.add("absenc"); } - + @Override - protected Query getFieldQuery(String field, - String val, boolean quoted, boolean raw) throws SyntaxError { - if(frequentlyMisspelledWords.contains(val)) { + protected Query getFieldQuery(String field, String val, boolean quoted, boolean raw) + throws SyntaxError { + if (frequentlyMisspelledWords.contains(val)) { return getFuzzyQuery(field, val, MIN_SIMILARITY); } return super.getFieldQuery(field, val, quoted, raw); } - - /** + + /** * Handle multi-term queries by repacking boolean queries with frequently misspelled term * queries rewritten as fuzzy queries. - **/ + */ @Override - protected Query newFieldQuery(Analyzer analyzer, String field, String queryText, - boolean quoted, boolean fieldAutoGenPhraseQueries, - boolean fieldEnableGraphQueries, SynonymQueryStyle synonymQueryStyle) + protected Query newFieldQuery( + Analyzer analyzer, + String field, + String queryText, + boolean quoted, + boolean fieldAutoGenPhraseQueries, + boolean fieldEnableGraphQueries, + SynonymQueryStyle synonymQueryStyle) throws SyntaxError { - Query q = super.newFieldQuery - (analyzer, field, queryText, quoted, fieldAutoGenPhraseQueries, fieldEnableGraphQueries, synonymQueryStyle); + Query q = + super.newFieldQuery( + analyzer, + field, + queryText, + quoted, + fieldAutoGenPhraseQueries, + fieldEnableGraphQueries, + synonymQueryStyle); if (q instanceof BooleanQuery) { boolean rewrittenSubQ = false; // dirty flag: rebuild the repacked query? BooleanQuery.Builder builder = newBooleanQuery(); - for (BooleanClause clause : ((BooleanQuery)q).clauses()) { + for (BooleanClause clause : ((BooleanQuery) q).clauses()) { Query subQ = clause.getQuery(); if (subQ instanceof TermQuery) { - Term subTerm = ((TermQuery)subQ).getTerm(); + Term subTerm = ((TermQuery) subQ).getTerm(); if (frequentlyMisspelledWords.contains(subTerm.text())) { rewrittenSubQ = true; Query fuzzySubQ = newFuzzyQuery(subTerm, MIN_SIMILARITY, getFuzzyPrefixLength()); clause = newBooleanClause(fuzzySubQ, clause.getOccur()); - } - } + } + } builder.add(clause); } if (rewrittenSubQ) { - builder.setMinimumNumberShouldMatch(((BooleanQuery)q).getMinimumNumberShouldMatch()); + builder.setMinimumNumberShouldMatch(((BooleanQuery) q).getMinimumNumberShouldMatch()); q = builder.build(); } } @@ -2380,17 +3309,27 @@ public void testShingleQueries() throws Exception { assertEquals("Synonym(shingle23:A_B shingle23:A_B_C) shingle23:B_C", q.toString()); } - assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false") - , "/response/numFound==1" - ); + assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false"), "/response/numFound==1"); } /** SOLR-11512 */ @Test public void killInfiniteRecursionParse() throws Exception { - SolrException exception = expectThrows(SolrException.class, () -> { - h.query(req("defType", "edismax", "q", "*", "qq", "{!edismax v=something}", "bq", "{!edismax v=$qq}")); - }); + SolrException exception = + expectThrows( + SolrException.class, + () -> { + h.query( + req( + "defType", + "edismax", + "q", + "*", + "qq", + "{!edismax v=something}", + "bq", + "{!edismax v=$qq}")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); assertTrue(exception.getMessage().contains("Infinite Recursion detected parsing query")); } @@ -2408,21 +3347,22 @@ public void testValidateQueryFields() throws Exception { // test valid field names String response = h.query(req(params)); - assertThat(response, allOf( - containsString("+(+DisjunctionMaxQuery(("), - containsString("title:olive"), - containsString("(subject:oliv)^3.0"), - containsString(" +DisjunctionMaxQuery(("), - containsString("title:other"), - containsString("(subject:other)^3.0") - )); + assertThat( + response, + allOf( + containsString("+(+DisjunctionMaxQuery(("), + containsString("title:olive"), + containsString("(subject:oliv)^3.0"), + containsString(" +DisjunctionMaxQuery(("), + containsString("title:other"), + containsString("(subject:other)^3.0"))); // test invalid field name params.set("qf", "subject^3 nosuchfield"); SolrException exception = expectThrows(SolrException.class, () -> h.query(req(params))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, exception.code()); - assertEquals("org.apache.solr.search.SyntaxError: Query Field 'nosuchfield' is not a valid field name", + assertEquals( + "org.apache.solr.search.SyntaxError: Query Field 'nosuchfield' is not a valid field name", exception.getMessage()); } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestFieldSortValues.java b/solr/core/src/test/org/apache/solr/search/TestFieldSortValues.java index fcd9ee5f78e..5718f3d69af 100644 --- a/solr/core/src/test/org/apache/solr/search/TestFieldSortValues.java +++ b/solr/core/src/test/org/apache/solr/search/TestFieldSortValues.java @@ -19,20 +19,17 @@ import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; - -/** - * Test QueryComponent.doFieldSortValues - */ +/** Test QueryComponent.doFieldSortValues */ public class TestFieldSortValues extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - System.setProperty("solr.tests.payload.fieldtype", - Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) ? - "wrapped_point_int" : "wrapped_trie_int"); + System.setProperty( + "solr.tests.payload.fieldtype", + Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) ? "wrapped_point_int" : "wrapped_trie_int"); initCore("solrconfig-minimal.xml", "schema-field-sort-values.xml"); } - + public void testCustomComparator() throws Exception { clearIndex(); assertU(adoc(sdoc("id", "1", "payload", "2"))); @@ -43,11 +40,12 @@ public void testCustomComparator() throws Exception { assertU(commit()); // payload is backed by a custom sort field which returns the payload value mod 3 - assertQ(req("q", "*:*", "fl", "id", "sort", "payload asc, id asc", "fsv", "true") - , "//result/doc[str='2' and position()=1]" - , "//result/doc[str='3' and position()=2]" - , "//result/doc[str='5' and position()=3]" - , "//result/doc[str='1' and position()=4]" - , "//result/doc[str='4' and position()=5]"); + assertQ( + req("q", "*:*", "fl", "id", "sort", "payload asc, id asc", "fsv", "true"), + "//result/doc[str='2' and position()=1]", + "//result/doc[str='3' and position()=2]", + "//result/doc[str='5' and position()=3]", + "//result/doc[str='1' and position()=4]", + "//result/doc[str='4' and position()=5]"); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestFilteredDocIdSet.java b/solr/core/src/test/org/apache/solr/search/TestFilteredDocIdSet.java index 068d603da58..87afd9fce33 100644 --- a/solr/core/src/test/org/apache/solr/search/TestFilteredDocIdSet.java +++ b/solr/core/src/test/org/apache/solr/search/TestFilteredDocIdSet.java @@ -20,9 +20,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; - import junit.framework.Assert; - import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; @@ -45,68 +43,70 @@ public class TestFilteredDocIdSet extends SolrTestCase { public void testFilteredDocIdSet() throws Exception { - final int maxdoc=10; - final DocIdSet innerSet = new DocIdSet() { + final int maxdoc = 10; + final DocIdSet innerSet = + new DocIdSet() { - @Override - public long ramBytesUsed() { - return 0L; - } + @Override + public long ramBytesUsed() { + return 0L; + } + + @Override + public DocIdSetIterator iterator() { + return new DocIdSetIterator() { + + int docid = -1; + + @Override + public int docID() { + return docid; + } + + @Override + public int nextDoc() { + docid++; + return docid < maxdoc ? docid : (docid = NO_MORE_DOCS); + } + + @Override + public int advance(int target) throws IOException { + return slowAdvance(target); + } + + @Override + public long cost() { + return 1; + } + }; + } + }; - @Override - public DocIdSetIterator iterator() { - return new DocIdSetIterator() { - - int docid = -1; - - @Override - public int docID() { - return docid; - } - - @Override - public int nextDoc() { - docid++; - return docid < maxdoc ? docid : (docid = NO_MORE_DOCS); - } - - @Override - public int advance(int target) throws IOException { - return slowAdvance(target); - } - - @Override - public long cost() { - return 1; - } - }; - } - }; - - DocIdSet filteredSet = new FilteredDocIdSet(innerSet){ - @Override - protected boolean match(int docid) { - return docid%2 == 0; //validate only even docids - } - }; + DocIdSet filteredSet = + new FilteredDocIdSet(innerSet) { + @Override + protected boolean match(int docid) { + return docid % 2 == 0; // validate only even docids + } + }; DocIdSetIterator iter = filteredSet.iterator(); ArrayList list = new ArrayList<>(); int doc = iter.advance(3); if (doc != DocIdSetIterator.NO_MORE_DOCS) { list.add(Integer.valueOf(doc)); - while((doc = iter.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { + while ((doc = iter.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { list.add(Integer.valueOf(doc)); } } int[] docs = new int[list.size()]; - int c=0; + int c = 0; Iterator intIter = list.iterator(); - while(intIter.hasNext()) { + while (intIter.hasNext()) { docs[c++] = intIter.next().intValue(); } - int[] answer = new int[]{4,6,8}; + int[] answer = new int[] {4, 6, 8}; boolean same = Arrays.equals(answer, docs); if (!same) { System.out.println("answer: " + Arrays.toString(answer)); @@ -114,7 +114,7 @@ protected boolean match(int docid) { fail(); } } - + public void testNullDocIdSet() throws Exception { // (historical note) Tests that if a Query produces a null DocIdSet, which is given to // IndexSearcher, everything works fine. This came up in LUCENE-1754. @@ -125,18 +125,19 @@ public void testNullDocIdSet() throws Exception { writer.addDocument(doc); IndexReader reader = writer.getReader(); writer.close(); - + // First verify the document is searchable. IndexSearcher searcher = newSearcher(reader); Assert.assertEquals(1, searcher.search(new MatchAllDocsQuery(), 10).totalHits.value); - + // Now search w/ a Query which returns a null Scorer DocSetQuery f = new DocSetQuery(DocSet.empty()); - Query filtered = new BooleanQuery.Builder() - .add(new MatchAllDocsQuery(), Occur.MUST) - .add(f, Occur.FILTER) - .build(); + Query filtered = + new BooleanQuery.Builder() + .add(new MatchAllDocsQuery(), Occur.MUST) + .add(f, Occur.FILTER) + .build(); Assert.assertEquals(0, searcher.search(filtered, 10).totalHits.value); reader.close(); dir.close(); @@ -156,51 +157,53 @@ public void testNullIteratorFilteredDocIdSet() throws Exception { Assert.assertEquals(1, searcher.search(new MatchAllDocsQuery(), 10).totalHits.value); // Now search w/ a Query which returns a null Scorer - Query f = new Query() { - @Override - public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { - return new Weight(this) { + Query f = + new Query() { + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { + return new Weight(this) { + + @Override + public Explanation explain(LeafReaderContext context, int doc) { + return Explanation.match(0f, "No match on id " + doc); + } + + @Override + public Scorer scorer(LeafReaderContext leafReaderContext) { + return null; + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return false; + } + }; + } @Override - public Explanation explain(LeafReaderContext context, int doc) { - return Explanation.match(0f, "No match on id " + doc); + public String toString(String field) { + return "nullDocIdSetFilter"; } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) { - return null; + public void visit(QueryVisitor queryVisitor) {} + + @Override + public boolean equals(Object other) { + return other == this; } @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; + public int hashCode() { + return System.identityHashCode(this); } }; - } - - @Override - public String toString(String field) { - return "nullDocIdSetFilter"; - } - - @Override - public void visit(QueryVisitor queryVisitor) {} - - @Override - public boolean equals(Object other) { - return other == this; - } - - @Override - public int hashCode() { - return System.identityHashCode(this); - } - }; - Query filtered = new BooleanQuery.Builder() - .add(new MatchAllDocsQuery(), Occur.MUST) - .add(f, Occur.FILTER) - .build(); + Query filtered = + new BooleanQuery.Builder() + .add(new MatchAllDocsQuery(), Occur.MUST) + .add(f, Occur.FILTER) + .build(); Assert.assertEquals(0, searcher.search(filtered, 10).totalHits.value); reader.close(); dir.close(); diff --git a/solr/core/src/test/org/apache/solr/search/TestFiltering.java b/solr/core/src/test/org/apache/solr/search/TestFiltering.java index b2ad9cf75c3..e193fb63e54 100644 --- a/solr/core/src/test/org/apache/solr/search/TestFiltering.java +++ b/solr/core/src/test/org/apache/solr/search/TestFiltering.java @@ -16,12 +16,10 @@ */ package org.apache.solr.search; - import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.List; import java.util.Locale; - import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; import org.apache.lucene.util.FixedBitSet; @@ -42,27 +40,36 @@ public class TestFiltering extends SolrTestCaseJ4 { @BeforeClass public static void beforeTests() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ - initCore("solrconfig.xml","schema_latest.xml"); + initCore("solrconfig.xml", "schema_latest.xml"); } @Test public void testLiveDocsSharing() throws Exception { clearIndex(); - for (int i=0; i<20; i++) { - for (int repeat=0; repeat < (i%5==0 ? 2 : 1); repeat++) { - assertU(adoc("id", Integer.toString(i), "foo_s", "foo", "val_i", Integer.toString(i), "val_s", Character.toString((char)('A' + i)))); + for (int i = 0; i < 20; i++) { + for (int repeat = 0; repeat < (i % 5 == 0 ? 2 : 1); repeat++) { + assertU( + adoc( + "id", + Integer.toString(i), + "foo_s", + "foo", + "val_i", + Integer.toString(i), + "val_s", + Character.toString((char) ('A' + i)))); } } assertU(commit()); String[] queries = { - "foo_s:foo", - "foo_s:f*", - "*:*", - "id:[* TO *]", - "id:[0 TO 99]", - "val_i:[0 TO 20]", - "val_s:[A TO z]" + "foo_s:foo", + "foo_s:f*", + "*:*", + "id:[* TO *]", + "id:[0 TO 99]", + "val_i:[0 TO 20]", + "val_s:[A TO z]" }; SolrQueryRequest req = req(); @@ -70,30 +77,30 @@ public void testLiveDocsSharing() throws Exception { SolrIndexSearcher searcher = req.getSearcher(); DocSet live = null; - for (String qstr : queries) { + for (String qstr : queries) { Query q = QParser.getParser(qstr, null, req).getQuery(); // System.out.println("getting set for " + q); DocSet set = searcher.getDocSet(q); if (live == null) { live = searcher.getLiveDocSet(); } - assertTrue( set == live); + assertTrue(set == live); QueryCommand cmd = new QueryCommand(); - cmd.setQuery( QParser.getParser(qstr, null, req).getQuery() ); + cmd.setQuery(QParser.getParser(qstr, null, req).getQuery()); cmd.setLen(random().nextInt(30)); cmd.setNeedDocSet(true); QueryResult res = new QueryResult(); searcher.search(res, cmd); set = res.getDocSet(); - assertTrue( set == live ); + assertTrue(set == live); - cmd.setQuery( QParser.getParser(qstr + " OR id:0", null, req).getQuery() ); - cmd.setFilterList( QParser.getParser(qstr + " OR id:1", null, req).getQuery() ); + cmd.setQuery(QParser.getParser(qstr + " OR id:0", null, req).getQuery()); + cmd.setFilterList(QParser.getParser(qstr + " OR id:1", null, req).getQuery()); res = new QueryResult(); searcher.search(res, cmd); set = res.getDocSet(); - assertTrue( set == live ); + assertTrue(set == live); } } finally { @@ -101,99 +108,137 @@ public void testLiveDocsSharing() throws Exception { } } - public void testCaching() throws Exception { + public void testCaching() throws Exception { clearIndex(); - assertU(adoc("id","4", "val_i","1")); - assertU(adoc("id","1", "val_i","2")); - assertU(adoc("id","3", "val_i","3")); - assertU(adoc("id","2", "val_i","4")); + assertU(adoc("id", "4", "val_i", "1")); + assertU(adoc("id", "1", "val_i", "2")); + assertU(adoc("id", "3", "val_i", "3")); + assertU(adoc("id", "2", "val_i", "4")); assertU(commit()); int prevCount; // default cost uses post filtering (for frange) prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("q","*:*", "fq","{!frange l=2 u=3 cache=false}val_i") - ,"/response/numFound==2" - ); + assertJQ(req("q", "*:*", "fq", "{!frange l=2 u=3 cache=false}val_i"), "/response/numFound==2"); assertEquals(1, DelegatingCollector.setLastDelegateCount - prevCount); // The exact same query the second time will be cached by the queryCache prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("q","*:*", "fq","{!frange l=2 u=3 cache=false}val_i") - ,"/response/numFound==2" - ); + assertJQ(req("q", "*:*", "fq", "{!frange l=2 u=3 cache=false}val_i"), "/response/numFound==2"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); // cache is true by default, even w/explicit low/high costs prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("q","*:*", "fq","{!frange l=2 u=4}val_i") - ,"/response/numFound==3" - ); + assertJQ(req("q", "*:*", "fq", "{!frange l=2 u=4}val_i"), "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); - assertJQ(req("q","*:*", "fq","{!frange l=2 u=4 cost=0}val_i") - ,"/response/numFound==3" - ); + assertJQ(req("q", "*:*", "fq", "{!frange l=2 u=4 cost=0}val_i"), "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); - assertJQ(req("q","*:*", "fq","{!frange l=2 u=4 cost=999}val_i") - ,"/response/numFound==3" - ); + assertJQ(req("q", "*:*", "fq", "{!frange l=2 u=4 cost=999}val_i"), "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); // no caching and explicitly low cost avoids post filtering prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("q","*:*", "fq","{!frange l=2 u=5 cache=false cost=0}val_i") - ,"/response/numFound==3" - ); + assertJQ( + req("q", "*:*", "fq", "{!frange l=2 u=5 cache=false cost=0}val_i"), + "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); - // now re-do the same tests w/ faceting on to get the full docset // default cost uses post filtering (for frange) prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=6 cache=false}val_i") - ,"/response/numFound==3" - ); + assertJQ( + req( + "facet", + "true", + "facet.field", + "id", + "q", + "*:*", + "fq", + "{!frange l=2 u=6 cache=false}val_i"), + "/response/numFound==3"); assertEquals(1, DelegatingCollector.setLastDelegateCount - prevCount); - // since we need the docset and the filter was not cached, the collector will need to be used again + // since we need the docset and the filter was not cached, the collector will need to be used + // again prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=6 cache=false}val_i") - ,"/response/numFound==3" - ); + assertJQ( + req( + "facet", + "true", + "facet.field", + "id", + "q", + "*:*", + "fq", + "{!frange l=2 u=6 cache=false}val_i"), + "/response/numFound==3"); assertEquals(1, DelegatingCollector.setLastDelegateCount - prevCount); // cache is true by default, even w/explicit low/high costs prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=7}val_i") - ,"/response/numFound==3" - ); + assertJQ( + req("facet", "true", "facet.field", "id", "q", "*:*", "fq", "{!frange l=2 u=7}val_i"), + "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); - assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=7 cost=0}val_i") - ,"/response/numFound==3" - ); + assertJQ( + req( + "facet", + "true", + "facet.field", + "id", + "q", + "*:*", + "fq", + "{!frange l=2 u=7 cost=0}val_i"), + "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); - assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=7 cost=999}val_i") - ,"/response/numFound==3" - ); + assertJQ( + req( + "facet", + "true", + "facet.field", + "id", + "q", + "*:*", + "fq", + "{!frange l=2 u=7 cost=999}val_i"), + "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); // no caching and explicitly low cost avoids post filtering prevCount = DelegatingCollector.setLastDelegateCount; - assertJQ(req("facet","true", "facet.field","id", "q","*:*", "fq","{!frange l=2 u=8 cache=false cost=0}val_i") - ,"/response/numFound==3" - ); + assertJQ( + req( + "facet", + "true", + "facet.field", + "id", + "q", + "*:*", + "fq", + "{!frange l=2 u=8 cache=false cost=0}val_i"), + "/response/numFound==3"); assertEquals(0, DelegatingCollector.setLastDelegateCount - prevCount); // test that offset works when not caching main query - assertJQ(req("q","{!cache=false}*:*", "start","2", "rows","1", "sort","val_i asc", "fl","val_i") - ,"/response/docs==[{'val_i':3}]" - ); - + assertJQ( + req( + "q", + "{!cache=false}*:*", + "start", + "2", + "rows", + "1", + "sort", + "val_i asc", + "fl", + "val_i"), + "/response/docs==[{'val_i':3}]"); } - static class Model { int indexSize; FixedBitSet answer; @@ -214,26 +259,30 @@ void clear() { static String f = "val_i"; static String f_s = "val_s"; + static String f_s(int i) { return String.format(Locale.ROOT, "%05d", i); } - - String rangeStr(String field, boolean negative, int l, int u, boolean cache, int cost, boolean exclude) { - String topLev=""; + String rangeStr( + String field, boolean negative, int l, int u, boolean cache, int cost, boolean exclude) { + String topLev = ""; if (!cache || exclude) { - topLev = "{!" + (cache || random().nextBoolean() ? " cache=" + cache : "") - + (cost != 0 ? " cost=" + cost : "") - + ((exclude) ? " tag=t" : "") + "}"; + topLev = + "{!" + + (cache || random().nextBoolean() ? " cache=" + cache : "") + + (cost != 0 ? " cost=" + cost : "") + + ((exclude) ? " tag=t" : "") + + "}"; } String q = field + ":"; String q2 = q; String lower1 = "[" + f_s(l); - String lower2 = l<=0 ? lower1 : ("{" + f_s(l-1)); + String lower2 = l <= 0 ? lower1 : ("{" + f_s(l - 1)); String upper1 = f_s(u) + "]"; - String upper2 = f_s(u+1) + "}"; + String upper2 = f_s(u + 1) + "}"; if (random().nextBoolean()) { q += lower1; @@ -254,7 +303,6 @@ String rangeStr(String field, boolean negative, int l, int u, boolean cache, int q2 += upper1; } - // String q = field + ":[" + f_s(l) + " TO " + f_s(u) + "]"; if (negative) { @@ -265,10 +313,10 @@ String rangeStr(String field, boolean negative, int l, int u, boolean cache, int // try some different query structures - important for testing different code paths switch (random().nextInt(5)) { case 0: - q = q + " OR id:RAND"+random().nextInt(); + q = q + " OR id:RAND" + random().nextInt(); break; case 1: - q = "id:RAND"+random().nextInt() + " OR " + q; + q = "id:RAND" + random().nextInt() + " OR " + q; break; case 2: q = "*:* AND " + q; @@ -285,19 +333,22 @@ String rangeStr(String field, boolean negative, int l, int u, boolean cache, int return topLev + q; } - String frangeStr(String field, boolean negative, int l, int u, boolean cache, int cost, boolean exclude) { + String frangeStr( + String field, boolean negative, int l, int u, boolean cache, int cost, boolean exclude) { - String topLev=""; + String topLev = ""; if (!cache || exclude) { - topLev = "" + (cache || random().nextBoolean() ? " cache="+cache : "") - + (cost!=0 ? " cost="+cost : "") - + ((exclude) ? " tag=t" : ""); + topLev = + "" + + (cache || random().nextBoolean() ? " cache=" + cache : "") + + (cost != 0 ? " cost=" + cost : "") + + ((exclude) ? " tag=t" : ""); } - String ret = "{!frange v="+field+" l="+l+" u="+u; + String ret = "{!frange v=" + field + " l=" + l + " u=" + u; if (negative) { ret = "-_query_:\"" + ret + "}\""; - if (topLev.length()>0) { + if (topLev.length() > 0) { ret = "{!" + topLev + "}" + ret; // add options at top level (can't be on frange) } } else { @@ -312,59 +363,67 @@ String makeRandomQuery(Model model, boolean mainQuery, boolean facetQuery) { boolean cache = random().nextBoolean(); int cost = cache ? 0 : random().nextBoolean() ? random().nextInt(200) : 0; boolean positive = random().nextBoolean(); - boolean exclude = facetQuery ? false : random().nextBoolean(); // can't exclude a facet query from faceting + boolean exclude = + facetQuery ? false : random().nextBoolean(); // can't exclude a facet query from faceting - FixedBitSet[] sets = facetQuery ? new FixedBitSet[]{model.facetQuery} : - (exclude ? new FixedBitSet[]{model.answer, model.facetQuery} : new FixedBitSet[]{model.answer, model.multiSelect, model.facetQuery}); + FixedBitSet[] sets = + facetQuery + ? new FixedBitSet[] {model.facetQuery} + : (exclude + ? new FixedBitSet[] {model.answer, model.facetQuery} + : new FixedBitSet[] {model.answer, model.multiSelect, model.facetQuery}); if (random().nextInt(100) < 60) { // frange - int l=0; - int u=0; + int l = 0; + int u = 0; if (positive) { // positive frange, make it big by taking the max of 4 tries - int n=-1; + int n = -1; - for (int i=0; i<4; i++) { + for (int i = 0; i < 4; i++) { int ll = random().nextInt(model.indexSize); - int uu = ll + ((ll==model.indexSize-1) ? 0 : random().nextInt(model.indexSize-l)); - if (uu-ll+1 > n) { - n = uu-ll+1; + int uu = ll + ((ll == model.indexSize - 1) ? 0 : random().nextInt(model.indexSize - l)); + if (uu - ll + 1 > n) { + n = uu - ll + 1; u = uu; l = ll; } } for (FixedBitSet set : sets) { - set.clear(0,l); + set.clear(0, l); if (u + 1 < model.indexSize) { - set.clear(u+1, model.indexSize); + set.clear(u + 1, model.indexSize); } } } else { // negative frange.. make it relatively small l = random().nextInt(model.indexSize); - u = Math.max(model.indexSize-1, l+random().nextInt(Math.max(model.indexSize / 10, 2))); + u = Math.max(model.indexSize - 1, l + random().nextInt(Math.max(model.indexSize / 10, 2))); for (FixedBitSet set : sets) { - int end = Math.min(u+1, set.length()); - set.clear(l,end); + int end = Math.min(u + 1, set.length()); + set.clear(l, end); } } String whichField = random().nextBoolean() ? f : f_s; - return random().nextBoolean() ? - frangeStr(f, !positive, l, u, cache, cost, exclude) // todo: frange doesn't work on the string field? - : rangeStr(whichField, !positive, l, u, cache, cost, exclude); + return random().nextBoolean() + ? frangeStr( + f, !positive, l, u, cache, cost, + exclude) // todo: frange doesn't work on the string field? + : rangeStr(whichField, !positive, l, u, cache, cost, exclude); } else { // term or boolean query int numWords = FixedBitSet.bits2words(model.indexSize); long[] psetBits = new long[numWords]; - for (int i=0; i= model.indexSize) break; - doc = pset.nextSetBit(doc+1); + for (int doc = -1; ; ) { + if (doc + 1 >= model.indexSize) break; + doc = pset.nextSetBit(doc + 1); if (doc == DocIdSetIterator.NO_MORE_DOCS) break; - sb.append((positive ? " ":" -") + f+":"+doc); + sb.append((positive ? " " : " -") + f + ":" + doc); } String ret = sb.toString(); - if (ret.length()==0) ret = (positive ? "":"-") + "id:99999999"; + if (ret.length() == 0) ret = (positive ? "" : "-") + "id:99999999"; if (!cache || exclude || random().nextBoolean()) { - ret = "{!cache=" + cache - + ((cost != 0) ? " cost="+cost : "") - + ((exclude) ? " tag=t" : "") - + "}" + ret; + ret = + "{!cache=" + + cache + + ((cost != 0) ? " cost=" + cost : "") + + ((exclude) ? " tag=t" : "") + + "}" + + ret; } return ret; @@ -406,18 +467,18 @@ String makeRandomQuery(Model model, boolean mainQuery, boolean facetQuery) { @Test public void testRandomFiltering() throws Exception { - int indexIter=5 * RANDOM_MULTIPLIER; - int queryIter=250 * RANDOM_MULTIPLIER; + int indexIter = 5 * RANDOM_MULTIPLIER; + int queryIter = 250 * RANDOM_MULTIPLIER; Model model = new Model(); - for (int iiter = 0; iiter params = new ArrayList<>(); - params.add("q"); params.add(makeRandomQuery(model, true, false)); + params.add("q"); + params.add(makeRandomQuery(model, true, false)); int nFilters = random().nextInt(5); - for (int i=0; i assertQ(req("q", "content_multi_bad:" + "abCD*")) - ); + Exception expected = + expectThrows( + Exception.class, + "Should throw exception when token evaluates to more than one term", + () -> assertQ(req("q", "content_multi_bad:" + "abCD*"))); assertTrue(expected.getCause() instanceof org.apache.solr.common.SolrException); } finally { resetExceptionIgnores(); } } + @Test public void testGreek() { assertQ(req("q", "content_greek:μαιο*"), "//result[@numFound='2']"); assertQ(req("q", "content_greek:ΜΆΪΟ*"), "//result[@numFound='2']"); assertQ(req("q", "content_greek:Μάϊο*"), "//result[@numFound='2']"); } + @Test public void testRussian() { assertQ(req("q", "content_russian:элЕктРомагн*тной"), "//result[@numFound='1']"); @@ -313,24 +347,26 @@ public void testRussian() { assertQ(req("q", "content_russian:Си*е"), "//result[@numFound='1']"); assertQ(req("q", "content_russian:эЛектромагнИт*"), "//result[@numFound='1']"); } - + public void testPersian() { assertQ(req("q", "content_persian:های*"), "//result[@numFound='1']"); } - + public void testArabic() { - assertQ(req("q", "content_arabic:روبرـــــــــــــــــــــــــــــــــت*"), "//result[@numFound='1']"); + assertQ( + req("q", "content_arabic:روبرـــــــــــــــــــــــــــــــــت*"), + "//result[@numFound='1']"); } - + public void testHindi() { assertQ(req("q", "content_hindi:हिन्दी*"), "//result[@numFound='1']"); assertQ(req("q", "content_hindi:आआ*"), "//result[@numFound='1']"); } - + public void testGerman() { assertQ(req("q", "content_german:weiß*"), "//result[@numFound='1']"); } - + public void testCJKWidth() { assertQ(req("q", "content_width:ヴィ*"), "//result[@numFound='1']"); } diff --git a/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java index 1cb927d3644..b070be36e87 100644 --- a/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java @@ -24,8 +24,8 @@ import org.junit.BeforeClass; import org.junit.Test; -//We want codecs that support DocValues, and ones supporting blank/empty values. -@SuppressCodecs({"Appending","Lucene3x","Lucene40","Lucene41","Lucene42"}) +// We want codecs that support DocValues, and ones supporting blank/empty values. +@SuppressCodecs({"Appending", "Lucene3x", "Lucene40", "Lucene41", "Lucene42"}) public class TestGraphTermsQParserPlugin extends SolrTestCaseJ4 { @BeforeClass @@ -48,76 +48,93 @@ public void testQueries() throws Exception { String group = "group_s"; - String[] doc = {"id","1", "term_s", "YYYY", group, "1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"}; + String[] doc = { + "id", "1", "term_s", "YYYY", group, "1", "test_ti", "5", "test_tl", "10", "test_tf", "2000" + }; assertU(adoc(doc)); - String[] doc1 = {"id","2", "term_s","YYYY", group, "1", "test_ti", "5", "test_tl", "100", "test_tf", "200"}; + String[] doc1 = { + "id", "2", "term_s", "YYYY", group, "1", "test_ti", "5", "test_tl", "100", "test_tf", "200" + }; assertU(adoc(doc1)); - String[] doc2 = {"id","3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200"}; + String[] doc2 = { + "id", "3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200" + }; assertU(adoc(doc2)); assertU(commit()); - String[] doc3 = {"id","4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000"}; + String[] doc3 = { + "id", "4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000" + }; assertU(adoc(doc3)); - String[] doc4 = {"id","5", "term_s", "YYYY", group, "2", "test_ti", "5", "test_tl", "10", "test_tf", "2000"}; + String[] doc4 = { + "id", "5", "term_s", "YYYY", group, "2", "test_ti", "5", "test_tl", "10", "test_tf", "2000" + }; assertU(adoc(doc4)); assertU(commit()); - String[] doc5 = {"id","6", "term_s","YYYY", group, "2", "test_ti", "10", "test_tl", "100", "test_tf", "200"}; + String[] doc5 = { + "id", "6", "term_s", "YYYY", group, "2", "test_ti", "10", "test_tl", "100", "test_tf", "200" + }; assertU(adoc(doc5)); assertU(commit()); - String[] doc6 = {"id","7", "term_s", "YYYY", group, "1", "test_ti", "10", "test_tl", "50", "test_tf", "300"}; + String[] doc6 = { + "id", "7", "term_s", "YYYY", group, "1", "test_ti", "10", "test_tl", "50", "test_tf", "300" + }; assertU(adoc(doc6)); assertU(commit()); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "{!graphTerms f=group_s maxDocFreq=10}1,2"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=5]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='6']", - "//result/doc[5]/str[@name='id'][.='7']" - ); + "//result/doc[5]/str[@name='id'][.='7']"); - //Test without maxDocFreq param. Should default to Integer.MAX_VALUE and match all terms. + // Test without maxDocFreq param. Should default to Integer.MAX_VALUE and match all terms. params = new ModifiableSolrParams(); params.add("q", "{!graphTerms f=group_s}1,2"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=5]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='6']", - "//result/doc[5]/str[@name='id'][.='7']" - ); + "//result/doc[5]/str[@name='id'][.='7']"); params = new ModifiableSolrParams(); params.add("q", "{!graphTerms f=group_s maxDocFreq=1}1,2"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=0]" - ); + assertQ(req(params, "indent", "on"), "*[count(//doc)=0]"); - //Test with int field + // Test with int field params = new ModifiableSolrParams(); params.add("q", "{!graphTerms f=test_ti maxDocFreq=10}5,10"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=5]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='6']", - "//result/doc[5]/str[@name='id'][.='7']" - ); + "//result/doc[5]/str[@name='id'][.='7']"); - //Test with int field + // Test with int field params = new ModifiableSolrParams(); params.add("q", "{!graphTerms f=test_ti maxDocFreq=2}5,10"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=2]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='6']", - "//result/doc[2]/str[@name='id'][.='7']" - ); + "//result/doc[2]/str[@name='id'][.='7']"); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java index 3c17e286d81..cc7939f526e 100644 --- a/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestHashQParserPlugin.java @@ -16,6 +16,10 @@ */ package org.apache.solr.search; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Random; +import java.util.Set; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -25,12 +29,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Random; -import java.util.Set; - -@LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"}) +@LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45"}) public class TestHashQParserPlugin extends SolrTestCaseJ4 { @BeforeClass @@ -48,10 +47,9 @@ public void setUp() throws Exception { assertU(commit()); } - public int getCost(Random random) { int i = random.nextInt(2); - if(i == 0) { + if (i == 0) { return 200; } else { return 1; @@ -62,7 +60,7 @@ public int getCost(Random random) { public void testManyHashPartitions() throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=2 cost="+getCost(random())+"}"); + params.add("fq", "{!hash worker=0 workers=2 cost=" + getCost(random()) + "}"); params.add("partitionKeys", "a_i,a_s,a_i,a_s"); params.add("wt", "xml"); String response = h.query(req(params)); @@ -70,7 +68,7 @@ public void testManyHashPartitions() throws Exception { params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=2 cost="+getCost(random())+"}"); + params.add("fq", "{!hash worker=0 workers=2 cost=" + getCost(random()) + "}"); params.add("partitionKeys", "nonexistent"); params.add("wt", "xml"); ModifiableSolrParams finalParams = params; @@ -81,7 +79,7 @@ public void testManyHashPartitions() throws Exception { public void testLessWorkers() { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=1 cost="+getCost(random())+"}"); + params.add("fq", "{!hash worker=0 workers=1 cost=" + getCost(random()) + "}"); params.add("partitionKeys", "a_i"); params.add("wt", "xml"); ModifiableSolrParams finalParams = params; @@ -91,227 +89,224 @@ public void testLessWorkers() { @Test public void testHashPartitionWithEmptyValues() throws Exception { - assertU(adoc("id", "1", "a_s", "one", "a_i" , "1")); - assertU(adoc("id", "2", "a_s", "one", "a_i" , "1")); + assertU(adoc("id", "1", "a_s", "one", "a_i", "1")); + assertU(adoc("id", "2", "a_s", "one", "a_i", "1")); assertU(adoc("id", "3")); assertU(adoc("id", "4")); assertU(commit()); - //Test with string hash + // Test with string hash ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=2 cost="+getCost(random())+"}"); + params.add("fq", "{!hash worker=0 workers=2 cost=" + getCost(random()) + "}"); params.add("partitionKeys", "a_s"); params.add("wt", "xml"); String response = h.query(req(params)); BaseTestHarness.validateXPath(response, "//*[@numFound='4']"); - //Test with int hash + // Test with int hash params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=2 cost="+getCost(random())+"}"); + params.add("fq", "{!hash worker=0 workers=2 cost=" + getCost(random()) + "}"); params.add("partitionKeys", "a_i"); params.add("wt", "xml"); response = h.query(req(params)); BaseTestHarness.validateXPath(response, "//*[@numFound='4']"); } - @Test @SuppressWarnings({"unchecked", "rawtypes"}) public void testHashPartition() throws Exception { - Random random = random(); HashSet set = new HashSet(); - for (int i=0; i<50; i++) { + for (int i = 0; i < 50; i++) { int v = random.nextInt(1000000); String val = Integer.toString(v); - if(!set.contains(val)){ + if (!set.contains(val)) { set.add(val); String[] doc = {"id", val, "a_s", val, "a_i", val, "a_l", val}; assertU(adoc(doc)); - if(i % 10 == 0) - assertU(commit()); - + if (i % 10 == 0) assertU(commit()); } } assertU(commit()); - - //Test with 3 worker and String hash ID. + // Test with 3 worker and String hash ID. ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=3 cost="+getCost(random)+"}"); + params.add("fq", "{!hash worker=0 workers=3 cost=" + getCost(random) + "}"); params.add("partitionKeys", "a_s"); - params.add("rows","50"); + params.add("rows", "50"); params.add("wt", "xml"); HashSet set1 = new HashSet(); String response = h.query(req(params)); Iterator it = set.iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String s = it.next(); - String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); - if(results == null) { + String results = + BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='" + s + "'])=1]"); + if (results == null) { set1.add(s); } } params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=1 workers=3 cost="+getCost(random)+"}"); + params.add("fq", "{!hash worker=1 workers=3 cost=" + getCost(random) + "}"); params.add("partitionKeys", "a_s"); - params.add("rows","50"); + params.add("rows", "50"); params.add("wt", "xml"); HashSet set2 = new HashSet(); response = h.query(req(params)); it = set.iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String s = it.next(); - String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); - if(results == null) { + String results = + BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='" + s + "'])=1]"); + if (results == null) { set2.add(s); } } - params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=2 workers=3 cost="+getCost(random)+"}"); + params.add("fq", "{!hash worker=2 workers=3 cost=" + getCost(random) + "}"); params.add("partitionKeys", "a_s"); - params.add("rows","50"); + params.add("rows", "50"); params.add("wt", "xml"); HashSet set3 = new HashSet(); response = h.query(req(params)); it = set.iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String s = it.next(); - String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); - if(results == null) { + String results = + BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='" + s + "'])=1]"); + if (results == null) { set3.add(s); } } - assert(set1.size() > 0); - assert(set2.size() > 0); - assert(set3.size() > 0); - assert(set1.size()+set2.size()+set3.size()==set.size()); + assert (set1.size() > 0); + assert (set2.size() > 0); + assert (set3.size() > 0); + assert (set1.size() + set2.size() + set3.size() == set.size()); assertNoOverLap(set1, set2); assertNoOverLap(set1, set3); assertNoOverLap(set2, set3); - - //Test with 2 workers and int partition Key - + // Test with 2 workers and int partition Key params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=2 cost="+getCost(random)+"}"); + params.add("fq", "{!hash worker=0 workers=2 cost=" + getCost(random) + "}"); params.add("partitionKeys", "a_i"); - params.add("rows","50"); + params.add("rows", "50"); params.add("wt", "xml"); set1 = new HashSet(); response = h.query(req(params)); it = set.iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String s = it.next(); - String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); - if(results == null) { + String results = + BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='" + s + "'])=1]"); + if (results == null) { set1.add(s); } } params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=1 workers=2 cost="+getCost(random)+"}"); + params.add("fq", "{!hash worker=1 workers=2 cost=" + getCost(random) + "}"); params.add("partitionKeys", "a_i"); - params.add("rows","50"); + params.add("rows", "50"); params.add("wt", "xml"); set2 = new HashSet(); response = h.query(req(params)); it = set.iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String s = it.next(); - String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); - if(results == null) { + String results = + BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='" + s + "'])=1]"); + if (results == null) { set2.add(s); } } - assert(set1.size() > 0); - assert(set2.size() > 0); - assert(set1.size()+set2.size()==set.size()); + assert (set1.size() > 0); + assert (set2.size() > 0); + assert (set1.size() + set2.size() == set.size()); assertNoOverLap(set1, set2); - - //Test with 2 workers and compound partition Key - + // Test with 2 workers and compound partition Key params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=0 workers=2 cost="+getCost(random)+"}"); + params.add("fq", "{!hash worker=0 workers=2 cost=" + getCost(random) + "}"); params.add("partitionKeys", "a_s, a_i, a_l"); - params.add("rows","50"); + params.add("rows", "50"); params.add("wt", "xml"); set1 = new HashSet(); response = h.query(req(params)); it = set.iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String s = it.next(); - String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); - if(results == null) { + String results = + BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='" + s + "'])=1]"); + if (results == null) { set1.add(s); } } params = new ModifiableSolrParams(); params.add("q", "*:*"); - params.add("fq", "{!hash worker=1 workers=2 cost="+getCost(random)+"}"); + params.add("fq", "{!hash worker=1 workers=2 cost=" + getCost(random) + "}"); params.add("partitionKeys", "a_s, a_i, a_l"); - params.add("rows","50"); + params.add("rows", "50"); params.add("wt", "xml"); set2 = new HashSet(); response = h.query(req(params)); it = set.iterator(); - while(it.hasNext()) { + while (it.hasNext()) { String s = it.next(); - String results = BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='"+s+"'])=1]"); - if(results == null) { + String results = + BaseTestHarness.validateXPath(response, "*[count(//str[@name='id'][.='" + s + "'])=1]"); + if (results == null) { set2.add(s); } } - assert(set1.size() > 0); - assert(set2.size() > 0); - assert(set1.size()+set2.size()==set.size()); + assert (set1.size() > 0); + assert (set2.size() > 0); + assert (set1.size() + set2.size() == set.size()); assertNoOverLap(set1, set2); } - - private void assertNoOverLap(@SuppressWarnings({"rawtypes"})Set setA, - @SuppressWarnings({"rawtypes"})Set setB) throws Exception { + private void assertNoOverLap( + @SuppressWarnings({"rawtypes"}) Set setA, @SuppressWarnings({"rawtypes"}) Set setB) + throws Exception { @SuppressWarnings({"rawtypes"}) - Iterator it = setA.iterator(); - while(it.hasNext()) { + Iterator it = setA.iterator(); + while (it.hasNext()) { Object o = it.next(); - if(setB.contains(o)) { - throw new Exception("Overlapping sets for value:"+o.toString()); + if (setB.contains(o)) { + throw new Exception("Overlapping sets for value:" + o.toString()); } } } diff --git a/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java b/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java index 37c3c9657c1..fe256df9e42 100644 --- a/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java +++ b/solr/core/src/test/org/apache/solr/search/TestIndexSearcher.java @@ -16,6 +16,9 @@ */ package org.apache.solr.search; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Metric; +import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.lang.reflect.Array; import java.util.Date; @@ -26,10 +29,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Metric; -import com.google.common.collect.ImmutableMap; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; @@ -62,9 +61,9 @@ public static void beforeClass() throws Exception { // dependso n merges not happening when it doesn't expect systemSetPropertySolrTestsMergePolicyFactory(LogDocMergePolicyFactory.class.getName()); - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } - + @AfterClass public static void afterClass() { systemClearPropertySolrTestsMergePolicyFactory(); @@ -75,7 +74,7 @@ public void setUp() throws Exception { System.getProperties().remove("tests.solr.useColdSearcher"); super.setUp(); } - + @Override public void tearDown() throws Exception { super.tearDown(); @@ -95,37 +94,37 @@ private String getStringVal(SolrQueryRequest sqr, String field, int doc) throws int idx = ReaderUtil.subIndex(doc, leaves); LeafReaderContext leaf = leaves.get(idx); FunctionValues vals = vs.getValues(context, leaf); - return vals.strVal(doc-leaf.docBase); + return vals.strVal(doc - leaf.docBase); } public void testReopen() throws Exception { - assertU(adoc("id","1", "v_t","Hello Dude", "v_s1","string1")); - assertU(adoc("id","2", "v_t","Hello Yonik", "v_s1","string2")); + assertU(adoc("id", "1", "v_t", "Hello Dude", "v_s1", "string1")); + assertU(adoc("id", "2", "v_t", "Hello Yonik", "v_s1", "string2")); assertU(commit()); - SolrQueryRequest sr1 = req("q","foo"); + SolrQueryRequest sr1 = req("q", "foo"); IndexReader r1 = sr1.getSearcher().getRawReader(); - String sval1 = getStringVal(sr1, "v_s1",0); + String sval1 = getStringVal(sr1, "v_s1", 0); assertEquals("string1", sval1); - assertU(adoc("id","3", "v_s1","{!literal}")); - assertU(adoc("id","4", "v_s1","other stuff")); + assertU(adoc("id", "3", "v_s1", "{!literal}")); + assertU(adoc("id", "4", "v_s1", "other stuff")); assertU(commit()); - SolrQueryRequest sr2 = req("q","foo"); + SolrQueryRequest sr2 = req("q", "foo"); IndexReader r2 = sr2.getSearcher().getRawReader(); // make sure the readers share the first segment // Didn't work w/ older versions of lucene2.9 going from segment -> multi assertEquals(r1.leaves().get(0).reader(), r2.leaves().get(0).reader()); - assertU(adoc("id","5", "v_f","3.14159")); - assertU(adoc("id","6", "v_f","8983", "v_s1","string6")); + assertU(adoc("id", "5", "v_f", "3.14159")); + assertU(adoc("id", "6", "v_f", "8983", "v_s1", "string6")); assertU(commit()); - SolrQueryRequest sr3 = req("q","foo"); + SolrQueryRequest sr3 = req("q", "foo"); IndexReader r3 = sr3.getSearcher().getRawReader(); // make sure the readers share segments // assertEquals(r1.getLeafReaders()[0], r3.getLeafReaders()[0]); @@ -133,7 +132,7 @@ public void testReopen() throws Exception { assertEquals(r2.leaves().get(1).reader(), r3.leaves().get(1).reader()); sr1.close(); - sr2.close(); + sr2.close(); // should currently be 1, but this could change depending on future index management int baseRefCount = r3.getRefCount(); @@ -141,20 +140,22 @@ public void testReopen() throws Exception { Map metrics = h.getCore().getCoreMetricManager().getRegistry().getMetrics(); @SuppressWarnings({"unchecked"}) - Gauge g = (Gauge)metrics.get("SEARCHER.searcher.registeredAt"); + Gauge g = (Gauge) metrics.get("SEARCHER.searcher.registeredAt"); Date sr3SearcherRegAt = g.getValue(); assertU(commit()); // nothing has changed - SolrQueryRequest sr4 = req("q","foo"); - assertSame("nothing changed, searcher should be the same", - sr3.getSearcher(), sr4.getSearcher()); - assertEquals("nothing changed, searcher should not have been re-registered", - sr3SearcherRegAt, g.getValue()); + SolrQueryRequest sr4 = req("q", "foo"); + assertSame( + "nothing changed, searcher should be the same", sr3.getSearcher(), sr4.getSearcher()); + assertEquals( + "nothing changed, searcher should not have been re-registered", + sr3SearcherRegAt, + g.getValue()); IndexReader r4 = sr4.getSearcher().getRawReader(); // force an index change so the registered searcher won't be the one we are testing (and // then we should be able to test the refCount going all the way to 0 - assertU(adoc("id","7", "v_f","7574")); - assertU(commit()); + assertU(adoc("id", "7", "v_f", "7574")); + assertU(commit()); // test that reader didn't change assertSame(r3, r4); @@ -162,114 +163,130 @@ public void testReopen() throws Exception { sr3.close(); assertEquals(baseRefCount, r4.getRefCount()); sr4.close(); - assertEquals(baseRefCount-1, r4.getRefCount()); - + assertEquals(baseRefCount - 1, r4.getRefCount()); - SolrQueryRequest sr5 = req("q","foo"); + SolrQueryRequest sr5 = req("q", "foo"); IndexReaderContext rCtx5 = sr5.getSearcher().getTopReaderContext(); assertU(delI("1")); assertU(commit()); - SolrQueryRequest sr6 = req("q","foo"); + SolrQueryRequest sr6 = req("q", "foo"); IndexReaderContext rCtx6 = sr6.getSearcher().getTopReaderContext(); - assertEquals(1, rCtx6.leaves().get(0).reader().numDocs()); // only a single doc left in the first segment - assertTrue( !rCtx5.leaves().get(0).reader().equals(rCtx6.leaves().get(0).reader()) ); // readers now different + assertEquals( + 1, rCtx6.leaves().get(0).reader().numDocs()); // only a single doc left in the first segment + assertTrue( + !rCtx5 + .leaves() + .get(0) + .reader() + .equals(rCtx6.leaves().get(0).reader())); // readers now different sr5.close(); sr6.close(); } - // make sure we don't leak searchers (SOLR-3391) public void testCloses() { - assertU(adoc("id","1")); - assertU(commit("openSearcher","false")); // this was enough to trigger SOLR-3391 + assertU(adoc("id", "1")); + assertU(commit("openSearcher", "false")); // this was enough to trigger SOLR-3391 int maxDoc = random().nextInt(20) + 1; // test different combinations of commits - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { if (random().nextInt(100) < 50) { String id = Integer.toString(random().nextInt(maxDoc)); - assertU(adoc("id",id)); + assertU(adoc("id", id)); } else { boolean soft = random().nextBoolean(); boolean optimize = random().nextBoolean(); boolean openSearcher = random().nextBoolean(); if (optimize) { - assertU(optimize("openSearcher",""+openSearcher, "softCommit",""+soft)); + assertU(optimize("openSearcher", "" + openSearcher, "softCommit", "" + soft)); } else { - assertU(commit("openSearcher",""+openSearcher, "softCommit",""+soft)); + assertU(commit("openSearcher", "" + openSearcher, "softCommit", "" + soft)); } } } - } - + public void testSearcherListeners() throws Exception { MockSearchComponent.registerSlowSearcherListener = false; - + MockSearchComponent.registerFirstSearcherListener = false; MockSearchComponent.registerNewSearcherListener = false; createCoreAndValidateListeners(0, 0, 0, 0); - + MockSearchComponent.registerFirstSearcherListener = true; MockSearchComponent.registerNewSearcherListener = false; createCoreAndValidateListeners(1, 1, 1, 1); - + MockSearchComponent.registerFirstSearcherListener = true; MockSearchComponent.registerNewSearcherListener = true; createCoreAndValidateListeners(1, 1, 2, 1); } - - private void createCoreAndValidateListeners(int numTimesCalled, int numTimesCalledFirstSearcher, - int numTimesCalledAfterGetSearcher, int numTimesCalledFirstSearcherAfterGetSearcher) throws Exception { + + private void createCoreAndValidateListeners( + int numTimesCalled, + int numTimesCalledFirstSearcher, + int numTimesCalledAfterGetSearcher, + int numTimesCalledFirstSearcherAfterGetSearcher) + throws Exception { CoreContainer cores = h.getCoreContainer(); CoreDescriptor cd = h.getCore().getCoreDescriptor(); SolrCore newCore = null; // reset counters MockSearcherListener.numberOfTimesCalled = new AtomicInteger(); MockSearcherListener.numberOfTimesCalledFirstSearcher = new AtomicInteger(); - + try { // Create a new core, this should call all the firstSearcherListeners - newCore = cores.create("core1", cd.getInstanceDir(), ImmutableMap.of("config", "solrconfig-searcher-listeners1.xml"), false); - - //validate that the new core was created with the correct solrconfig + newCore = + cores.create( + "core1", + cd.getInstanceDir(), + ImmutableMap.of("config", "solrconfig-searcher-listeners1.xml"), + false); + + // validate that the new core was created with the correct solrconfig assertNotNull(newCore.getSearchComponent("mock")); assertEquals(MockSearchComponent.class, newCore.getSearchComponent("mock").getClass()); assertFalse(newCore.getSolrConfig().useColdSearcher); - + doQuery(newCore); - + assertEquals(numTimesCalled, MockSearcherListener.numberOfTimesCalled.get()); - assertEquals(numTimesCalledFirstSearcher, MockSearcherListener.numberOfTimesCalledFirstSearcher.get()); - + assertEquals( + numTimesCalledFirstSearcher, MockSearcherListener.numberOfTimesCalledFirstSearcher.get()); + addDummyDoc(newCore); - + // Open a new searcher, this should call the newSearcherListeners @SuppressWarnings("unchecked") Future[] future = (Future[]) Array.newInstance(Future.class, 1); newCore.getSearcher(true, false, future); future[0].get(); - + assertEquals(numTimesCalledAfterGetSearcher, MockSearcherListener.numberOfTimesCalled.get()); - assertEquals(numTimesCalledFirstSearcherAfterGetSearcher, MockSearcherListener.numberOfTimesCalledFirstSearcher.get()); - + assertEquals( + numTimesCalledFirstSearcherAfterGetSearcher, + MockSearcherListener.numberOfTimesCalledFirstSearcher.get()); + } finally { if (newCore != null) { cores.unload("core1"); } } } - + private void doQuery(SolrCore core) throws Exception { DirectSolrConnection connection = new DirectSolrConnection(core); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); - assertTrue(connection.request("/select",params, null ).contains("0")); + assertTrue( + connection.request("/select", params, null).contains("0")); } public void testDontUseColdSearcher() throws Exception { @@ -279,44 +296,51 @@ public void testDontUseColdSearcher() throws Exception { final AtomicBoolean querySucceeded = new AtomicBoolean(false); SlowSearcherListener.numberOfTimesCalled = new AtomicInteger(0); SlowSearcherListener.latch = new CountDownLatch(1); - + CoreContainer cores = h.getCoreContainer(); CoreDescriptor cd = h.getCore().getCoreDescriptor(); final SolrCore newCore; boolean coreCreated = false; try { // Create a new core, this should call all the firstSearcherListeners - newCore = cores.create("core1", cd.getInstanceDir(), ImmutableMap.of("config", "solrconfig-searcher-listeners1.xml"), false); + newCore = + cores.create( + "core1", + cd.getInstanceDir(), + ImmutableMap.of("config", "solrconfig-searcher-listeners1.xml"), + false); coreCreated = true; - - //validate that the new core was created with the correct solrconfig + + // validate that the new core was created with the correct solrconfig assertNotNull(newCore.getSearchComponent("mock")); assertEquals(MockSearchComponent.class, newCore.getSearchComponent("mock").getClass()); assertFalse(newCore.getSolrConfig().useColdSearcher); - - Thread t = new Thread() { - public void run() { - try { - doQuery(newCore); - querySucceeded.set(true); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; - }; + + Thread t = + new Thread() { + public void run() { + try { + doQuery(newCore); + querySucceeded.set(true); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + ; + }; t.start(); - + if (System.getProperty(SYSPROP_NIGHTLY) != null) { // even if we wait here, the SearcherListener should not finish Thread.sleep(500); } - // validate that the searcher warmer didn't finish yet. + // validate that the searcher warmer didn't finish yet. assertEquals(0, SlowSearcherListener.numberOfTimesCalled.get()); assertFalse("Query should be waiting for warming to finish", querySucceeded.get()); - - // Let warmer finish + + // Let warmer finish SlowSearcherListener.latch.countDown(); - + // Validate that the query eventually succeeds for (int i = 0; i <= 1000; i++) { if (querySucceeded.get()) { @@ -327,15 +351,15 @@ public void run() { } Thread.sleep(10); } - + } finally { - + if (coreCreated) { cores.unload("core1"); } } } - + public void testUseColdSearcher() throws Exception { MockSearchComponent.registerFirstSearcherListener = false; MockSearchComponent.registerNewSearcherListener = false; @@ -343,8 +367,7 @@ public void testUseColdSearcher() throws Exception { final AtomicBoolean querySucceeded = new AtomicBoolean(false); SlowSearcherListener.numberOfTimesCalled = new AtomicInteger(0); SlowSearcherListener.latch = new CountDownLatch(1); - - + CoreContainer cores = h.getCoreContainer(); CoreDescriptor cd = h.getCore().getCoreDescriptor(); final SolrCore newCore; @@ -352,26 +375,33 @@ public void testUseColdSearcher() throws Exception { try { System.setProperty("tests.solr.useColdSearcher", "true"); // Create a new core, this should call all the firstSearcherListeners - newCore = cores.create("core1", cd.getInstanceDir(), ImmutableMap.of("config", "solrconfig-searcher-listeners1.xml"), false); + newCore = + cores.create( + "core1", + cd.getInstanceDir(), + ImmutableMap.of("config", "solrconfig-searcher-listeners1.xml"), + false); coreCreated = true; - - //validate that the new core was created with the correct solrconfig + + // validate that the new core was created with the correct solrconfig assertNotNull(newCore.getSearchComponent("mock")); assertEquals(MockSearchComponent.class, newCore.getSearchComponent("mock").getClass()); assertTrue(newCore.getSolrConfig().useColdSearcher); - - Thread t = new Thread() { - public void run() { - try { - doQuery(newCore); - querySucceeded.set(true); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; - }; + + Thread t = + new Thread() { + public void run() { + try { + doQuery(newCore); + querySucceeded.set(true); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + ; + }; t.start(); - + // validate that the query runs before the searcher warmer finishes for (int i = 0; i <= 1000; i++) { if (querySucceeded.get()) { @@ -382,16 +412,15 @@ public void run() { } Thread.sleep(10); } - + assertEquals(0, SlowSearcherListener.numberOfTimesCalled.get()); - + } finally { System.getProperties().remove("tests.solr.useColdSearcher"); if (coreCreated) { SlowSearcherListener.latch.countDown(); cores.unload("core1"); } - } } @@ -406,7 +435,7 @@ public static class MockSearchComponent extends SearchComponent implements SolrC static boolean registerFirstSearcherListener = false; static boolean registerNewSearcherListener = false; static boolean registerSlowSearcherListener = false; - + @Override public void prepare(ResponseBuilder rb) throws IOException {} @@ -430,11 +459,10 @@ public void inform(SolrCore core) { core.registerFirstSearcherListener(new SlowSearcherListener()); } } - } - + static class MockSearcherListener implements SolrEventListener { - + static AtomicInteger numberOfTimesCalled; static AtomicInteger numberOfTimesCalledFirstSearcher; @@ -445,20 +473,19 @@ public void postCommit() {} public void postSoftCommit() {} @Override - public void newSearcher(SolrIndexSearcher newSearcher, - SolrIndexSearcher currentSearcher) { + public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { numberOfTimesCalled.incrementAndGet(); if (currentSearcher == null) { numberOfTimesCalledFirstSearcher.incrementAndGet(); } } } - + static class SlowSearcherListener implements SolrEventListener { - + static AtomicInteger numberOfTimesCalled; static CountDownLatch latch; - + @Override public void postCommit() {} @@ -466,10 +493,10 @@ public void postCommit() {} public void postSoftCommit() {} @Override - public void newSearcher(SolrIndexSearcher newSearcher, - SolrIndexSearcher currentSearcher) { + public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { try { - assert currentSearcher == null: "SlowSearcherListener should only be used as FirstSearcherListener"; + assert currentSearcher == null + : "SlowSearcherListener should only be used as FirstSearcherListener"; // simulate a slow searcher listener latch.await(10, TimeUnit.SECONDS); } catch (InterruptedException e) { diff --git a/solr/core/src/test/org/apache/solr/search/TestInitQParser.java b/solr/core/src/test/org/apache/solr/search/TestInitQParser.java index d20c1d6c5eb..585a56027b8 100644 --- a/solr/core/src/test/org/apache/solr/search/TestInitQParser.java +++ b/solr/core/src/test/org/apache/solr/search/TestInitQParser.java @@ -21,8 +21,8 @@ import org.junit.Test; /** - * Checking QParser plugin initialization, failing with NPE during Solr startup. - * Ensures that query is working by registered in solrconfig.xml "fail" query parser. + * Checking QParser plugin initialization, failing with NPE during Solr startup. Ensures that query + * is working by registered in solrconfig.xml "fail" query parser. */ public class TestInitQParser extends SolrTestCaseJ4 { private static void createIndex() { @@ -31,7 +31,8 @@ private static void createIndex() { assertU(adoc("id", "1", "text", v, "text_np", v)); v = "now cow"; assertU(adoc("id", "2", "text", v, "text_np", v)); - assertU(adoc("id", "3", "foo_s", "a ' \" \\ {! ) } ( { z")); // A value filled with special chars + assertU( + adoc("id", "3", "foo_s", "a ' \" \\ {! ) } ( { z")); // A value filled with special chars assertU(adoc("id", "10", "qqq_s", "X")); assertU(adoc("id", "11", "www_s", "X")); @@ -39,7 +40,6 @@ private static void createIndex() { assertU(adoc("id", "13", "eee_s", "'balance'")); assertU(commit()); - } @Override @@ -54,9 +54,6 @@ public void setUp() throws Exception { @Test public void testQueryParserInit() throws Exception { // should query using registered fail (defType=fail) QParser and match only one doc - assertQ(req("q", "id:1", "indent", "true", "defType", "fail") - , "//*[@numFound='1']" - ); + assertQ(req("q", "id:1", "indent", "true", "defType", "fail"), "//*[@numFound='1']"); } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestLegacyNumericRangeQueryBuilder.java b/solr/core/src/test/org/apache/solr/search/TestLegacyNumericRangeQueryBuilder.java index ffa083ffd2c..140ece6680e 100644 --- a/solr/core/src/test/org/apache/solr/search/TestLegacyNumericRangeQueryBuilder.java +++ b/solr/core/src/test/org/apache/solr/search/TestLegacyNumericRangeQueryBuilder.java @@ -16,28 +16,27 @@ */ package org.apache.solr.search; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import org.apache.lucene.queryparser.xml.ParserException; import org.apache.lucene.search.Query; import org.apache.solr.SolrTestCase; import org.apache.solr.legacy.LegacyNumericRangeQuery; -import org.apache.lucene.queryparser.xml.ParserException; import org.w3c.dom.Document; import org.xml.sax.SAXException; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; - public class TestLegacyNumericRangeQueryBuilder extends SolrTestCase { public void testGetFilterHandleNumericParseErrorStrict() throws Exception { LegacyNumericRangeQueryBuilder filterBuilder = new LegacyNumericRangeQueryBuilder(); - String xml = ""; + String xml = + ""; Document doc = getDocumentFromString(xml); try { filterBuilder.getQuery(doc.getDocumentElement()); @@ -47,11 +46,12 @@ public void testGetFilterHandleNumericParseErrorStrict() throws Exception { fail("Expected to throw " + ParserException.class); } - @SuppressWarnings({"unchecked","rawtypes"}) + @SuppressWarnings({"unchecked", "rawtypes"}) public void testGetFilterInt() throws Exception { LegacyNumericRangeQueryBuilder filterBuilder = new LegacyNumericRangeQueryBuilder(); - String xml = ""; + String xml = + ""; Document doc = getDocumentFromString(xml); Query filter = filterBuilder.getQuery(doc.getDocumentElement()); assertTrue(filter instanceof LegacyNumericRangeQuery); @@ -63,7 +63,8 @@ public void testGetFilterInt() throws Exception { assertTrue(numRangeFilter.includesMin()); assertTrue(numRangeFilter.includesMax()); - String xml2 = ""; + String xml2 = + ""; Document doc2 = getDocumentFromString(xml2); Query filter2 = filterBuilder.getQuery(doc2.getDocumentElement()); assertTrue(filter2 instanceof LegacyNumericRangeQuery); @@ -76,11 +77,12 @@ public void testGetFilterInt() throws Exception { assertFalse(numRangeFilter2.includesMax()); } - @SuppressWarnings({"unchecked","rawtypes"}) + @SuppressWarnings({"unchecked", "rawtypes"}) public void testGetFilterLong() throws Exception { LegacyNumericRangeQueryBuilder filterBuilder = new LegacyNumericRangeQueryBuilder(); - String xml = ""; + String xml = + ""; Document doc = getDocumentFromString(xml); Query filter = filterBuilder.getQuery(doc.getDocumentElement()); assertTrue(filter instanceof LegacyNumericRangeQuery); @@ -91,7 +93,8 @@ public void testGetFilterLong() throws Exception { assertTrue(numRangeFilter.includesMin()); assertTrue(numRangeFilter.includesMax()); - String xml2 = ""; + String xml2 = + ""; Document doc2 = getDocumentFromString(xml2); Query filter2 = filterBuilder.getQuery(doc2.getDocumentElement()); assertTrue(filter2 instanceof LegacyNumericRangeQuery); @@ -104,11 +107,12 @@ public void testGetFilterLong() throws Exception { assertFalse(numRangeFilter2.includesMax()); } - @SuppressWarnings({"unchecked","rawtypes"}) + @SuppressWarnings({"unchecked", "rawtypes"}) public void testGetFilterDouble() throws Exception { LegacyNumericRangeQueryBuilder filterBuilder = new LegacyNumericRangeQueryBuilder(); - String xml = ""; + String xml = + ""; Document doc = getDocumentFromString(xml); Query filter = filterBuilder.getQuery(doc.getDocumentElement()); @@ -121,7 +125,8 @@ public void testGetFilterDouble() throws Exception { assertTrue(numRangeFilter.includesMin()); assertTrue(numRangeFilter.includesMax()); - String xml2 = ""; + String xml2 = + ""; Document doc2 = getDocumentFromString(xml2); Query filter2 = filterBuilder.getQuery(doc2.getDocumentElement()); assertTrue(filter2 instanceof LegacyNumericRangeQuery); @@ -134,11 +139,12 @@ public void testGetFilterDouble() throws Exception { assertFalse(numRangeFilter2.includesMax()); } - @SuppressWarnings({"unchecked","rawtypes"}) + @SuppressWarnings({"unchecked", "rawtypes"}) public void testGetFilterFloat() throws Exception { LegacyNumericRangeQueryBuilder filterBuilder = new LegacyNumericRangeQueryBuilder(); - String xml = ""; + String xml = + ""; Document doc = getDocumentFromString(xml); Query filter = filterBuilder.getQuery(doc.getDocumentElement()); @@ -151,7 +157,8 @@ public void testGetFilterFloat() throws Exception { assertTrue(numRangeFilter.includesMin()); assertTrue(numRangeFilter.includesMax()); - String xml2 = ""; + String xml2 = + ""; Document doc2 = getDocumentFromString(xml2); Query filter2 = filterBuilder.getQuery(doc2.getDocumentElement()); @@ -175,5 +182,4 @@ private static Document getDocumentFromString(String str) is.close(); return doc; } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java index 796e8c4c241..6c4aaeba0ea 100644 --- a/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java +++ b/solr/core/src/test/org/apache/solr/search/TestMaxScoreQueryParser.java @@ -16,6 +16,13 @@ */ package org.apache.solr.search; +import static org.hamcrest.Matchers.hasItem; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.stream.Collectors; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -33,14 +40,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.stream.Collectors; - -import static org.hamcrest.Matchers.hasItem; - public class TestMaxScoreQueryParser extends SolrTestCaseJ4 { Query q; BooleanClause[] clauses; @@ -90,12 +89,12 @@ public void testPureMax() { clauses = clauses(q); assertEquals(1, clauses.length); assertTrue(clauses[0].getQuery() instanceof DisjunctionMaxQuery); - assertEquals(0.0, ((DisjunctionMaxQuery) clauses[0].getQuery()).getTieBreakerMultiplier(), 1e-15); + assertEquals( + 0.0, ((DisjunctionMaxQuery) clauses[0].getQuery()).getTieBreakerMultiplier(), 1e-15); Collection qa = ((DisjunctionMaxQuery) clauses[0].getQuery()).getDisjuncts(); assertEquals(2, qa.size()); - final Collection qaStrings = qa.stream() - .map(q -> q.toString()) - .collect(Collectors.toList()); + final Collection qaStrings = + qa.stream().map(q -> q.toString()).collect(Collectors.toList()); org.hamcrest.MatcherAssert.assertThat(qaStrings, hasItem("text:foo")); } @@ -116,7 +115,8 @@ public void testTie() { clauses = clauses(q); assertEquals(1, clauses.length); assertTrue(clauses[0].getQuery() instanceof DisjunctionMaxQuery); - assertEquals(0.5, ((DisjunctionMaxQuery) clauses[0].getQuery()).getTieBreakerMultiplier(), 1e-15); + assertEquals( + 0.5, ((DisjunctionMaxQuery) clauses[0].getQuery()).getTieBreakerMultiplier(), 1e-15); } @Test @@ -132,11 +132,27 @@ public void testBoost() { assertEquals(2, clauses.length); assertTrue(clauses[0].getQuery() instanceof DisjunctionMaxQuery); DisjunctionMaxQuery dmq = ((DisjunctionMaxQuery) clauses[0].getQuery()); - Query fooClause = ((BooleanQuery)dmq.getDisjuncts().stream().filter(q -> q.toString().contains("foo")).findFirst().get()) - .clauses().iterator().next().getQuery(); + Query fooClause = + ((BooleanQuery) + dmq.getDisjuncts().stream() + .filter(q -> q.toString().contains("foo")) + .findFirst() + .get()) + .clauses() + .iterator() + .next() + .getQuery(); assertEquals(5.0, ((BoostQuery) fooClause).getBoost(), 1e-15); - Query barClause = ((BooleanQuery)dmq.getDisjuncts().stream().filter(q -> q.toString().contains("bar")).findFirst().get()) - .clauses().iterator().next().getQuery(); + Query barClause = + ((BooleanQuery) + dmq.getDisjuncts().stream() + .filter(q -> q.toString().contains("bar")) + .findFirst() + .get()) + .clauses() + .iterator() + .next() + .getQuery(); assertEquals(6.0, ((BoostQuery) barClause).getBoost(), 1e-15); assertEquals(7.0, ((BoostQuery) clauses[1].getQuery()).getBoost(), 1e-15); assertFalse(q instanceof BoostQuery); @@ -147,11 +163,27 @@ public void testBoost() { assertEquals(1, clauses.length); assertTrue(clauses[0].getQuery() instanceof DisjunctionMaxQuery); dmq = ((DisjunctionMaxQuery) clauses[0].getQuery()); - fooClause = ((BooleanQuery)dmq.getDisjuncts().stream().filter(q -> q.toString().contains("foo")).findFirst().get()) - .clauses().iterator().next().getQuery(); + fooClause = + ((BooleanQuery) + dmq.getDisjuncts().stream() + .filter(q -> q.toString().contains("foo")) + .findFirst() + .get()) + .clauses() + .iterator() + .next() + .getQuery(); assertEquals(2.0, ((BoostQuery) fooClause).getBoost(), 1e-15); - barClause = ((BooleanQuery)dmq.getDisjuncts().stream().filter(q -> q.toString().contains("bar")).findFirst().get()) - .clauses().iterator().next().getQuery(); + barClause = + ((BooleanQuery) + dmq.getDisjuncts().stream() + .filter(q -> q.toString().contains("bar")) + .findFirst() + .get()) + .clauses() + .iterator() + .next() + .getQuery(); assertFalse(barClause instanceof BoostQuery); assertEquals(3.0, ((BoostQuery) q).getBoost(), 1e-15); } @@ -164,12 +196,14 @@ private Query parse(String q, String... params) { try { ModifiableSolrParams p = new ModifiableSolrParams(); ArrayList al = new ArrayList<>(Arrays.asList(params)); - while(al.size() >= 2) { + while (al.size() >= 2) { p.add(al.remove(0), al.remove(0)); } - return new MaxScoreQParser(q, p, new MapSolrParams(Collections.singletonMap("df", "text")), req(q)).parse(); + return new MaxScoreQParser( + q, p, new MapSolrParams(Collections.singletonMap("df", "text")), req(q)) + .parse(); } catch (SyntaxError syntaxError) { - fail("Failed with exception "+syntaxError.getMessage()); + fail("Failed with exception " + syntaxError.getMessage()); } fail("Parse failed"); return null; diff --git a/solr/core/src/test/org/apache/solr/search/TestMinHashQParser.java b/solr/core/src/test/org/apache/solr/search/TestMinHashQParser.java index 78027cb87b8..d8f678cb3f1 100644 --- a/solr/core/src/test/org/apache/solr/search/TestMinHashQParser.java +++ b/solr/core/src/test/org/apache/solr/search/TestMinHashQParser.java @@ -31,9 +31,7 @@ public class TestMinHashQParser extends SolrTestCaseJ4 { - /** - * Initializes core and does some sanity checking of schema - */ + /** Initializes core and does some sanity checking of schema */ @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-minhash.xml", "schema-minhash.xml"); @@ -57,20 +55,32 @@ public void testBandSize() { assertEquals(5, MinHashQParser.computeBandSize(100, 0.2, 0.005)); } - @Test public void testAnalysedMinHash() { - assertU(adoc("id", "doc_1", "min_hash_analysed", "Min Hashing is great for spotted strings of exact matching words")); - assertU(adoc("id", "doc_2", "min_hash_analysed", "Min Hashing is great for rabbits who like to spot strings of exact matching words")); + assertU( + adoc( + "id", + "doc_1", + "min_hash_analysed", + "Min Hashing is great for spotted strings of exact matching words")); + assertU( + adoc( + "id", + "doc_2", + "min_hash_analysed", + "Min Hashing is great for rabbits who like to spot strings of exact matching words")); assertU(commit()); String gQuery = "*:*"; SolrQueryRequest qr = createRequest(gQuery); assertQ(qr, "//*[@numFound='2']"); - gQuery = "{!minhash field=\"min_hash_analysed\"}Min Hashing is great for spotted strings of exact matching words"; + gQuery = + "{!minhash field=\"min_hash_analysed\"}Min Hashing is great for spotted strings of exact matching words"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=512.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", @@ -78,46 +88,78 @@ public void testAnalysedMinHash() { gQuery = "{!minhash field=\"min_hash_analysed\"}Min Hashing is great for"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=512.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", "//result/doc[2]/float[@name='score'][.=512.0]"); - gQuery = "{!minhash field=\"min_hash_analysed\" sim=\"0.9\" tp=\"0.9\"}Min Hashing is great for spotted strings of exact matching words"; + gQuery = + "{!minhash field=\"min_hash_analysed\" sim=\"0.9\" tp=\"0.9\"}Min Hashing is great for spotted strings of exact matching words"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=23.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", "//result/doc[2]/float[@name='score'][.=10.0]"); - gQuery = "{!minhash field=\"min_hash_analysed\" sim=\"0.9\"}Min Hashing is great for spotted strings of exact matching words"; + gQuery = + "{!minhash field=\"min_hash_analysed\" sim=\"0.9\"}Min Hashing is great for spotted strings of exact matching words"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='1']", + assertQ( + qr, + "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=512.0]"); - gQuery = "{!minhash field=\"min_hash_analysed\" sim=\"0.9\" analyzer_field=\"min_hash_analysed\"}Min Hashing is great for spotted strings of exact matching words"; + gQuery = + "{!minhash field=\"min_hash_analysed\" sim=\"0.9\" analyzer_field=\"min_hash_analysed\"}Min Hashing is great for spotted strings of exact matching words"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='1']", + assertQ( + qr, + "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=512.0]"); - gQuery = "{!minhash field=\"min_hash_analysed\" sim=\"0.9\" analyzer_field=\"min_hash_string\"}Min Hashing is great for spotted strings of exact matching words"; + gQuery = + "{!minhash field=\"min_hash_analysed\" sim=\"0.9\" analyzer_field=\"min_hash_string\"}Min Hashing is great for spotted strings of exact matching words"; qr = createRequest(gQuery); assertQ(qr, "//*[@numFound='0']"); } @Test public void testPreAnalysedMinHash() { - assertU(adoc("id", "doc_1", "min_hash_string", "HASH1", "min_hash_string", "HASH2", "min_hash_string", "HASH3")); - assertU(adoc("id", "doc_2", "min_hash_string", "HASH1", "min_hash_string", "HASH2", "min_hash_string", "HASH4")); + assertU( + adoc( + "id", + "doc_1", + "min_hash_string", + "HASH1", + "min_hash_string", + "HASH2", + "min_hash_string", + "HASH3")); + assertU( + adoc( + "id", + "doc_2", + "min_hash_string", + "HASH1", + "min_hash_string", + "HASH2", + "min_hash_string", + "HASH4")); assertU(commit()); String gQuery = "*:*"; SolrQueryRequest qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=1.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", @@ -125,16 +167,19 @@ public void testPreAnalysedMinHash() { gQuery = "{!minhash field=\"min_hash_string\"}HASH1"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=1.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", "//result/doc[2]/float[@name='score'][.=1.0]"); - gQuery = "{!minhash field=\"min_hash_string\" sep=\",\"}HASH1,HASH2,HASH3"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=3.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", @@ -144,13 +189,33 @@ public void testPreAnalysedMinHash() { @Test public void testNestedQuery() { - assertU(adoc("id", "doc_1", "min_hash_string", "HASH1", "min_hash_string", "HASH2", "min_hash_string", "HASH3")); - assertU(adoc("id", "doc_2", "min_hash_string", "HASH1", "min_hash_string", "HASH2", "min_hash_string", "HASH4")); + assertU( + adoc( + "id", + "doc_1", + "min_hash_string", + "HASH1", + "min_hash_string", + "HASH2", + "min_hash_string", + "HASH3")); + assertU( + adoc( + "id", + "doc_2", + "min_hash_string", + "HASH1", + "min_hash_string", + "HASH2", + "min_hash_string", + "HASH4")); assertU(commit()); String gQuery = "*:*"; SolrQueryRequest qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=1.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", @@ -158,22 +223,29 @@ public void testNestedQuery() { gQuery = "*:* AND _query_:{!minhash field=\"min_hash_string\" sep=\",\"}HASH3"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='1']", + assertQ( + qr, + "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=2.0]"); - gQuery = "*:* AND _query_:{!minhash field=\"min_hash_string\" sep=\",\" sep=\"0.9\" tp=\"0.9\"}HASH3"; + gQuery = + "*:* AND _query_:{!minhash field=\"min_hash_string\" sep=\",\" sep=\"0.9\" tp=\"0.9\"}HASH3"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='1']", + assertQ( + qr, + "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=2.0]"); - gQuery = "*:* AND _query_:{!minhash field=\"min_hash_string\" sep=\",\" sep=\"0.1\" tp=\"0.1\"}HASH3"; + gQuery = + "*:* AND _query_:{!minhash field=\"min_hash_string\" sep=\",\" sep=\"0.1\" tp=\"0.1\"}HASH3"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='1']", + assertQ( + qr, + "//*[@numFound='1']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=2.0]"); - } @Test @@ -186,7 +258,9 @@ public void testBasic() { String gQuery = "*:*"; SolrQueryRequest qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='3']", + assertQ( + qr, + "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=1.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", @@ -194,10 +268,11 @@ public void testBasic() { "//result/doc[3]/str[@name='id'][.='doc_3']", "//result/doc[3]/float[@name='score'][.=1.0]"); - gQuery = "{!minhash field=\"min_hash_analysed\"}woof woof woof woof woof puff"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='3']", + assertQ( + qr, + "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.='doc_2']", "//result/doc[1]/float[@name='score'][.=512.0]", "//result/doc[2]/str[@name='id'][.='doc_1']", @@ -207,7 +282,9 @@ public void testBasic() { gQuery = "{!minhash field=\"min_hash_analysed\" sep=\",\"}℁팽徭聙↝ꇁ홱杯,跻\uF7E1ꠅ�찼薷\uE24Eꔾ"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='3']", + assertQ( + qr, + "//*[@numFound='3']", "//result/doc[1]/str[@name='id'][.='doc_2']", "//result/doc[1]/float[@name='score'][.=2.0]", "//result/doc[2]/str[@name='id'][.='doc_1']", @@ -217,19 +294,22 @@ public void testBasic() { gQuery = "{!minhash field=\"min_hash_analysed\" analyzer_field=\"min_hash_string\"}℁팽徭聙↝ꇁ홱杯"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='2']", + assertQ( + qr, + "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='doc_1']", "//result/doc[1]/float[@name='score'][.=1.0]", "//result/doc[2]/str[@name='id'][.='doc_2']", "//result/doc[2]/float[@name='score'][.=1.0]"); - } - @Test public void test() { - String[] parts = new String[]{"one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten"}; + String[] parts = + new String[] { + "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten" + }; for (int i = 0; i < parts.length; i++) { StringBuilder builder = new StringBuilder(); @@ -274,10 +354,12 @@ public void test() { qr = createRequest(gQuery); assertQ(qr, "//*[@numFound='6']"); - - gQuery = "{!minhash field=\"min_hash_analysed\"}one two three four five six seven eight nine ten"; + gQuery = + "{!minhash field=\"min_hash_analysed\"}one two three four five six seven eight nine ten"; qr = createRequest(gQuery); - assertQ(qr, "//*[@numFound='21']", + assertQ( + qr, + "//*[@numFound='21']", "//result/doc[1]/str[@name='id'][.='doc_0_9']", "//result/doc[1]/float[@name='score'][.=512.0]", "//result/doc[2]/str[@name='id'][.='doc_1_8']", @@ -300,7 +382,6 @@ public void test() { "//result/doc[10]/float[@name='score'][.=207.0]", "//result/doc[11]/str[@name='id'][.='doc_0_5']", "//result/doc[11]/float[@name='score'][.=181.0]", - "//result/doc[12]/str[@name='id'][.='doc_5_4']", "//result/doc[12]/float[@name='score'][.=171.0]", "//result/doc[13]/str[@name='id'][.='doc_2_6']", @@ -321,7 +402,7 @@ public void test() { "//result/doc[20]/float[@name='score'][.=57.0]" // "//result/doc[21]/str[@name='id'][.='doc_0_8']", // "//result/doc[21]/float[@name='score'][.=341.0]" - ); + ); } @Test @@ -333,15 +414,19 @@ public void testBandsWrap() throws SyntaxError { par.add("sep", ","); par.add("debug", "false"); - QParser qparser = h.getCore().getQueryPlugin("minhash").createParser("1, 2, 3, 4, 5, 6, 7, 8, 9, 10", SolrParams.toSolrParams(par), null, null); + QParser qparser = + h.getCore() + .getQueryPlugin("minhash") + .createParser( + "1, 2, 3, 4, 5, 6, 7, 8, 9, 10", SolrParams.toSolrParams(par), null, null); Query query = qparser.getQuery(); - BooleanQuery bq = (BooleanQuery)query; + BooleanQuery bq = (BooleanQuery) query; assertEquals(4, bq.clauses().size()); - for(BooleanClause clause : bq.clauses()) { - assertEquals(3, ((BooleanQuery)((ConstantScoreQuery)clause.getQuery()).getQuery()) .clauses().size()); + for (BooleanClause clause : bq.clauses()) { + assertEquals( + 3, ((BooleanQuery) ((ConstantScoreQuery) clause.getQuery()).getQuery()).clauses().size()); } - } private SolrQueryRequest createRequest(String query) { diff --git a/solr/core/src/test/org/apache/solr/search/TestMissingGroups.java b/solr/core/src/test/org/apache/solr/search/TestMissingGroups.java index 321296cc05c..f86dcd04f6d 100644 --- a/solr/core/src/test/org/apache/solr/search/TestMissingGroups.java +++ b/solr/core/src/test/org/apache/solr/search/TestMissingGroups.java @@ -16,18 +16,15 @@ */ package org.apache.solr.search; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; -import org.apache.lucene.util.TestUtil; - -import org.junit.BeforeClass; import org.junit.After; - -import java.util.Set; -import java.util.HashSet; -import java.util.List; -import java.util.ArrayList; - +import org.junit.BeforeClass; /** Inspired by LUCENE-5790 */ public class TestMissingGroups extends SolrTestCaseJ4 { @@ -45,28 +42,27 @@ public void cleanup() throws Exception { public void testGroupsOnMissingValues() throws Exception { - final int numDocs = atLeast(500); // setup some key values for some random docs in our index // every other doc will have no values for these fields // NOTE: special values may be randomly assigned to the *same* docs final List specials = new ArrayList(7); - specials.add(new SpecialField(numDocs, "group_s1", "xxx","yyy")); - specials.add(new SpecialField(numDocs, "group_ti", "42","24")); - specials.add(new SpecialField(numDocs, "group_td", "34.56","12.78")); - specials.add(new SpecialField(numDocs, "group_tl", "66666666","999999999")); - specials.add(new SpecialField(numDocs, "group_tf", "56.78","78.45")); + specials.add(new SpecialField(numDocs, "group_s1", "xxx", "yyy")); + specials.add(new SpecialField(numDocs, "group_ti", "42", "24")); + specials.add(new SpecialField(numDocs, "group_td", "34.56", "12.78")); + specials.add(new SpecialField(numDocs, "group_tl", "66666666", "999999999")); + specials.add(new SpecialField(numDocs, "group_tf", "56.78", "78.45")); specials.add(new SpecialField(numDocs, "group_b", "true", "false")); - specials.add(new SpecialField(numDocs, "group_tdt", - "2009-05-10T03:30:00Z","1976-03-06T15:06:00Z")); - + specials.add( + new SpecialField(numDocs, "group_tdt", "2009-05-10T03:30:00Z", "1976-03-06T15:06:00Z")); + // build up our index of docs - + for (int i = 1; i < numDocs; i++) { // NOTE: start at 1, doc#0 is below... SolrInputDocument d = sdoc("id", i); if (SpecialField.special_docids.contains(i)) { - d.addField("special_s","special"); + d.addField("special_s", "special"); for (SpecialField f : specials) { if (f.docX == i) { d.addField(f.field, f.valueX); @@ -84,70 +80,113 @@ public void testGroupsOnMissingValues() throws Exception { } } // doc#0: at least one doc that is guaranteed not special and has no chance of being filtered - assertU(adoc(sdoc("id","0"))); + assertU(adoc(sdoc("id", "0"))); assertU(commit()); // sanity check - assertQ(req("q", "*:*"), "//result[@numFound="+numDocs+"]"); - + assertQ(req("q", "*:*"), "//result[@numFound=" + numDocs + "]"); + for (SpecialField special : specials) { // sanity checks - assertQ(req("q", "{!term f=" + special.field + "}" + special.valueX), - "//result[@numFound=1]"); - assertQ(req("q", "{!term f=" + special.field + "}" + special.valueY), - "//result[@numFound=1]"); + assertQ( + req("q", "{!term f=" + special.field + "}" + special.valueX), "//result[@numFound=1]"); + assertQ( + req("q", "{!term f=" + special.field + "}" + special.valueY), "//result[@numFound=1]"); // group on special field, and confirm all docs w/o group field get put into a single group - final String xpre = "//lst[@name='grouped']/lst[@name='"+special.field+"']"; - assertQ(req("q", (random().nextBoolean() ? "*:*" : "special_s:special id:[0 TO 400]"), - "fq", (random().nextBoolean() ? "*:*" : "-filter_b:"+random().nextBoolean()), - "group","true", - "group.field",special.field, - "group.ngroups", "true") - // basic grouping checks - , xpre + "/int[@name='ngroups'][.='3']" - , xpre + "/arr[@name='groups'][count(lst)=3]" - // sanity check one group is the missing values - , xpre + "/arr[@name='groups']/lst/null[@name='groupValue']" - // check we have the correct groups for the special values with a single doc - , xpre + "/arr[@name='groups']/lst/*[@name='groupValue'][.='"+special.valueX+"']/following-sibling::result[@name='doclist'][@numFound=1]/doc/str[@name='id'][.="+special.docX+"]" - , xpre + "/arr[@name='groups']/lst/*[@name='groupValue'][.='"+special.valueY+"']/following-sibling::result[@name='doclist'][@numFound=1]/doc/str[@name='id'][.="+special.docY+"]" - ); + final String xpre = "//lst[@name='grouped']/lst[@name='" + special.field + "']"; + assertQ( + req( + "q", + (random().nextBoolean() ? "*:*" : "special_s:special id:[0 TO 400]"), + "fq", + (random().nextBoolean() ? "*:*" : "-filter_b:" + random().nextBoolean()), + "group", + "true", + "group.field", + special.field, + "group.ngroups", + "true") + // basic grouping checks + , + xpre + "/int[@name='ngroups'][.='3']", + xpre + "/arr[@name='groups'][count(lst)=3]" + // sanity check one group is the missing values + , + xpre + "/arr[@name='groups']/lst/null[@name='groupValue']" + // check we have the correct groups for the special values with a single doc + , + xpre + + "/arr[@name='groups']/lst/*[@name='groupValue'][.='" + + special.valueX + + "']/following-sibling::result[@name='doclist'][@numFound=1]/doc/str[@name='id'][.=" + + special.docX + + "]", + xpre + + "/arr[@name='groups']/lst/*[@name='groupValue'][.='" + + special.valueY + + "']/following-sibling::result[@name='doclist'][@numFound=1]/doc/str[@name='id'][.=" + + special.docY + + "]"); // now do the same check, but exclude one special doc to force only 2 groups final int doc = random().nextBoolean() ? special.docX : special.docY; final Object val = (doc == special.docX) ? special.valueX : special.valueY; - assertQ(req("q", (random().nextBoolean() ? "*:*" : "special_s:special id:[0 TO 400]"), - "fq", (random().nextBoolean() ? "*:*" : "-filter_b:"+random().nextBoolean()), - "fq", "-id:" + ((doc == special.docX) ? special.docY : special.docX), - "group","true", - "group.field",special.field, - "group.ngroups", "true") - // basic grouping checks - , xpre + "/int[@name='ngroups'][.='2']" - , xpre + "/arr[@name='groups'][count(lst)=2]" - // sanity check one group is the missing values - , xpre + "/arr[@name='groups']/lst/null[@name='groupValue']" - // check we have the correct group for the special value with a single doc - , xpre + "/arr[@name='groups']/lst/*[@name='groupValue'][.='"+val+"']/following-sibling::result[@name='doclist'][@numFound=1]/doc/str[@name='id'][.="+doc+"]" - ); + assertQ( + req( + "q", + (random().nextBoolean() ? "*:*" : "special_s:special id:[0 TO 400]"), + "fq", + (random().nextBoolean() ? "*:*" : "-filter_b:" + random().nextBoolean()), + "fq", + "-id:" + ((doc == special.docX) ? special.docY : special.docX), + "group", + "true", + "group.field", + special.field, + "group.ngroups", + "true") + // basic grouping checks + , + xpre + "/int[@name='ngroups'][.='2']", + xpre + "/arr[@name='groups'][count(lst)=2]" + // sanity check one group is the missing values + , + xpre + "/arr[@name='groups']/lst/null[@name='groupValue']" + // check we have the correct group for the special value with a single doc + , + xpre + + "/arr[@name='groups']/lst/*[@name='groupValue'][.='" + + val + + "']/following-sibling::result[@name='doclist'][@numFound=1]/doc/str[@name='id'][.=" + + doc + + "]"); // one last check, exclude both docs and verify the only group is the missing value group - assertQ(req("q", (random().nextBoolean() ? "*:*" : "special_s:special id:[0 TO 400]"), - "fq", (random().nextBoolean() ? "*:*" : "-filter_b:"+random().nextBoolean()), - "fq", "-id:" + special.docX, - "fq", "-id:" + special.docY, - "group","true", - "group.field",special.field, - "group.ngroups", "true") - // basic grouping checks - , xpre + "/int[@name='ngroups'][.='1']" - , xpre + "/arr[@name='groups'][count(lst)=1]" - // the only group should be the missing values - , xpre + "/arr[@name='groups']/lst/null[@name='groupValue']" - ); - - } + assertQ( + req( + "q", + (random().nextBoolean() ? "*:*" : "special_s:special id:[0 TO 400]"), + "fq", + (random().nextBoolean() ? "*:*" : "-filter_b:" + random().nextBoolean()), + "fq", + "-id:" + special.docX, + "fq", + "-id:" + special.docY, + "group", + "true", + "group.field", + special.field, + "group.ngroups", + "true") + // basic grouping checks + , + xpre + "/int[@name='ngroups'][.='1']", + xpre + "/arr[@name='groups'][count(lst)=1]" + // the only group should be the missing values + , + xpre + "/arr[@name='groups']/lst/null[@name='groupValue']"); + } } private static final class SpecialField { @@ -167,13 +206,14 @@ public SpecialField(int numDocs, String field, Object valueX, Object valueY) { this.valueX = valueX; this.valueY = valueY; - this.docX = TestUtil.nextInt(random(),1,numDocs-1); - this.docY = (docX < (numDocs / 2)) - ? TestUtil.nextInt(random(),docX+1,numDocs-1) - : TestUtil.nextInt(random(),1,docX-1); + this.docX = TestUtil.nextInt(random(), 1, numDocs - 1); + this.docY = + (docX < (numDocs / 2)) + ? TestUtil.nextInt(random(), docX + 1, numDocs - 1) + : TestUtil.nextInt(random(), 1, docX - 1); special_docids.add(docX); special_docids.add(docY); } - } + } } diff --git a/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java b/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java index 4d9feedce86..cb98f514ffa 100644 --- a/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java +++ b/solr/core/src/test/org/apache/solr/search/TestMultiWordSynonyms.java @@ -18,7 +18,6 @@ package org.apache.solr.search; import java.util.Arrays; - import org.apache.lucene.search.Query; import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; @@ -33,14 +32,14 @@ public static void beforeClass() throws Exception { } private static void index() throws Exception { - assertU(adoc("id","1", "text","USA Today")); - assertU(adoc("id","2", "text","A dynamic US economy")); - assertU(adoc("id","3", "text","The United States of America's 50 states")); - assertU(adoc("id","4", "text","Party in the U.S.A.")); - assertU(adoc("id","5", "text","These United States")); + assertU(adoc("id", "1", "text", "USA Today")); + assertU(adoc("id", "2", "text", "A dynamic US economy")); + assertU(adoc("id", "3", "text", "The United States of America's 50 states")); + assertU(adoc("id", "4", "text", "Party in the U.S.A.")); + assertU(adoc("id", "5", "text", "These United States")); - assertU(adoc("id","6", "text","America United of States")); - assertU(adoc("id","7", "text","States United")); + assertU(adoc("id", "6", "text", "America United of States")); + assertU(adoc("id", "7", "text", "States United")); assertU(commit()); } @@ -48,52 +47,43 @@ private static void index() throws Exception { @Test public void testNonPhrase() throws Exception { // Don't split on whitespace (sow=false) - for (String q : Arrays.asList("US", "U.S.", "USA", "U.S.A.", "United States", "United States of America")) { + for (String q : + Arrays.asList("US", "U.S.", "USA", "U.S.A.", "United States", "United States of America")) { for (String defType : Arrays.asList("lucene", "edismax")) { - assertJQ(req("q", q, - "defType", defType, - "df", "text", - "sow", "false") - , "/response/numFound==7" - ); + assertJQ( + req("q", q, "defType", defType, "df", "text", "sow", "false"), "/response/numFound==7"); } } // Split on whitespace (sow=true) for (String q : Arrays.asList("US", "U.S.", "USA", "U.S.A.")) { for (String defType : Arrays.asList("lucene", "edismax")) { - assertJQ(req("q", q, - "defType", defType, - "df", "text", - "sow", "true") - , "/response/numFound==7" - ); + assertJQ( + req("q", q, "defType", defType, "df", "text", "sow", "true"), "/response/numFound==7"); } } for (String q : Arrays.asList("United States", "United States of America")) { for (String defType : Arrays.asList("lucene", "edismax")) { - assertJQ(req("q", q, - "defType", defType, - "df", "text", - "sow", "true") - , "/response/numFound==4" - ); + assertJQ( + req("q", q, "defType", defType, "df", "text", "sow", "true"), "/response/numFound==4"); } } } @Test public void testPhrase() throws Exception { - for (String q : Arrays.asList - ("\"US\"", "\"U.S.\"", "\"USA\"", "\"U.S.A.\"", "\"United States\"", "\"United States of America\"")) { + for (String q : + Arrays.asList( + "\"US\"", + "\"U.S.\"", + "\"USA\"", + "\"U.S.A.\"", + "\"United States\"", + "\"United States of America\"")) { for (String defType : Arrays.asList("lucene", "edismax")) { for (String sow : Arrays.asList("true", "false")) { - assertJQ(req("q", q, - "defType", defType, - "df", "text", - "sow", sow) - , "/response/numFound==5" - ); + assertJQ( + req("q", q, "defType", defType, "df", "text", "sow", sow), "/response/numFound==5"); } } } @@ -109,8 +99,10 @@ public void testPf() throws Exception { assertU(adoc("id", "13", "text", "foo a b c bar", "boost_d", "4.0")); assertU(commit()); - assertQ("default order assumption wrong", - req("q", "foo bar", + assertQ( + "default order assumption wrong", + req( + "q", "foo bar", "qf", "text", "bf", "boost_d", "fl", "score,*", @@ -120,8 +112,10 @@ public void testPf() throws Exception { "//doc[3]/str[@name='id'][.='11']", "//doc[4]/str[@name='id'][.='10']"); - assertQ("default order assumption wrong", - req("q", "foo tropical cyclone", + assertQ( + "default order assumption wrong", + req( + "q", "foo tropical cyclone", "qf", "text", "bf", "boost_d", "fl", "score,*", @@ -131,8 +125,10 @@ public void testPf() throws Exception { "//doc[3]/str[@name='id'][.='11']", "//doc[4]/str[@name='id'][.='10']"); - assertQ("pf not working", - req("q", "foo bar", + assertQ( + "pf not working", + req( + "q", "foo bar", "qf", "text", "pf", "text^10", "fl", "score,*", @@ -140,8 +136,10 @@ public void testPf() throws Exception { "defType", "edismax"), "//doc[1]/str[@name='id'][.='10']"); - assertQ("pf not working", - req("q", "foo tropical cyclone", + assertQ( + "pf not working", + req( + "q", "foo tropical cyclone", "qf", "text", "pf", "text^10", "fl", "score,*", @@ -149,8 +147,10 @@ public void testPf() throws Exception { "defType", "edismax"), "//doc[1]/str[@name='id'][.='10']"); - assertQ("pf2 not working", - req("q", "foo bar", + assertQ( + "pf2 not working", + req( + "q", "foo bar", "qf", "text", "pf2", "text^10", "fl", "score,*", @@ -158,8 +158,10 @@ public void testPf() throws Exception { "defType", "edismax"), "//doc[1]/str[@name='id'][.='10']"); - assertQ("pf3 not working", - req("q", "a b bar", + assertQ( + "pf3 not working", + req( + "q", "a b bar", "qf", "text", "pf3", "text^10", "fl", "score,*", @@ -167,8 +169,10 @@ public void testPf() throws Exception { "defType", "edismax"), "//doc[1]/str[@name='id'][.='12']"); - assertQ("pf3 not working", - req("q", "a b tropical cyclone", + assertQ( + "pf3 not working", + req( + "q", "a b tropical cyclone", "qf", "text", "pf3", "text^10", "fl", "score,*", @@ -176,8 +180,10 @@ public void testPf() throws Exception { "defType", "edismax"), "//doc[1]/str[@name='id'][.='12']"); - assertQ("ps not working for pf2", - req("q", "bar foo", + assertQ( + "ps not working for pf2", + req( + "q", "bar foo", "qf", "text", "pf2", "text^10", "ps", "2", @@ -185,8 +191,10 @@ public void testPf() throws Exception { "defType", "edismax"), "//doc[1]/str[@name='id'][.='10']"); - assertQ("ps not working for pf2", - req("q", "tropical cyclone foo", + assertQ( + "ps not working for pf2", + req( + "q", "tropical cyclone foo", "qf", "text", "pf2", "text^10", "ps", "2", @@ -198,14 +206,37 @@ public void testPf() throws Exception { @Test public void testPf3WithReordering() throws Exception { // test pf3 and phrase slop - assertU(adoc("id", "20", "text", "chicken 1 2 3 4 5 pig 1 2 3 4 5 anteater bunny cow", "boost_d", "1.0")); + assertU( + adoc( + "id", + "20", + "text", + "chicken 1 2 3 4 5 pig 1 2 3 4 5 anteater bunny cow", + "boost_d", + "1.0")); assertU(adoc("id", "21", "text", "chicken anteater pig bunny cow", "boost_d", "2.0")); - assertU(adoc("id", "22", "text", "chicken 1 2 3 4 5 anteater bunny 1 2 3 4 5 pig cow", "boost_d", "3.0")); - assertU(adoc("id", "23", "text", "chicken 1 2 3 4 5 anteater bunny cow 1 2 3 4 5 pig", "boost_d", "4.0")); + assertU( + adoc( + "id", + "22", + "text", + "chicken 1 2 3 4 5 anteater bunny 1 2 3 4 5 pig cow", + "boost_d", + "3.0")); + assertU( + adoc( + "id", + "23", + "text", + "chicken 1 2 3 4 5 anteater bunny cow 1 2 3 4 5 pig", + "boost_d", + "4.0")); assertU(commit()); - assertQ("ps not working for pf3", - req("q", "anteater chicken pig", + assertQ( + "ps not working for pf3", + req( + "q", "anteater chicken pig", "qf", "text", "bf", "boost_d", "pf3", "text^10", @@ -216,17 +247,40 @@ public void testPf3WithReordering() throws Exception { "//doc[1]/str[@name='id'][.='21']"); } - @Test + @Test public void testPf3WithoutReordering() throws Exception { // test pf3 and phrase slop - assertU(adoc("id", "20", "text", "anteater 1 2 3 4 5 pig 1 2 3 4 5 chicken bunny pig", "boost_d", "1.0")); + assertU( + adoc( + "id", + "20", + "text", + "anteater 1 2 3 4 5 pig 1 2 3 4 5 chicken bunny pig", + "boost_d", + "1.0")); assertU(adoc("id", "21", "text", "anteater 1 2 chicken 1 2 pig bunny cow", "boost_d", "2.0")); - assertU(adoc("id", "22", "text", "chicken 1 2 3 4 5 anteater bunny 1 2 3 4 5 pig cow", "boost_d", "3.0")); - assertU(adoc("id", "23", "text", "chicken 1 2 3 4 5 anteater bunny cow 1 2 3 4 5 pig", "boost_d", "4.0")); + assertU( + adoc( + "id", + "22", + "text", + "chicken 1 2 3 4 5 anteater bunny 1 2 3 4 5 pig cow", + "boost_d", + "3.0")); + assertU( + adoc( + "id", + "23", + "text", + "chicken 1 2 3 4 5 anteater bunny cow 1 2 3 4 5 pig", + "boost_d", + "4.0")); assertU(commit()); - assertQ("ps not working for pf3", - req("q", "anteater chicken pig", + assertQ( + "ps not working for pf3", + req( + "q", "anteater chicken pig", "qf", "text", "bf", "boost_d", "pf3", "text^10", @@ -237,27 +291,54 @@ public void testPf3WithoutReordering() throws Exception { "//doc[1]/str[@name='id'][.='21']"); } - public void testEdismaxQueryParsing_multiTermWithPf_shouldParseCorrectPhraseQueries() throws Exception { - Query q = QParser.getParser("foo a b bar","edismax",true, - req(params("sow", "false","qf", "text^10","pf", "text^10","pf2", "text^5","pf3", "text^8"))).getQuery(); - assertEquals("+(" + - "((text:foo)^10.0) ((text:a)^10.0) ((text:b)^10.0) (((+text:tropical +text:cyclone) text:bar)^10.0)) " + - "((text:\"foo a b tropical cyclone\" text:\"foo a b bar\")^10.0) " + - "(((text:\"foo a\")^5.0) ((text:\"a b\")^5.0) ((text:\"b tropical cyclone\" text:\"b bar\")^5.0)) " + - "(((text:\"foo a b\")^8.0) ((text:\"a b tropical cyclone\" text:\"a b bar\")^8.0))", q.toString()); + public void testEdismaxQueryParsing_multiTermWithPf_shouldParseCorrectPhraseQueries() + throws Exception { + Query q = + QParser.getParser( + "foo a b bar", + "edismax", + true, + req( + params( + "sow", "false", "qf", "text^10", "pf", "text^10", "pf2", "text^5", "pf3", + "text^8"))) + .getQuery(); + assertEquals( + "+(" + + "((text:foo)^10.0) ((text:a)^10.0) ((text:b)^10.0) (((+text:tropical +text:cyclone) text:bar)^10.0)) " + + "((text:\"foo a b tropical cyclone\" text:\"foo a b bar\")^10.0) " + + "(((text:\"foo a\")^5.0) ((text:\"a b\")^5.0) ((text:\"b tropical cyclone\" text:\"b bar\")^5.0)) " + + "(((text:\"foo a b\")^8.0) ((text:\"a b tropical cyclone\" text:\"a b bar\")^8.0))", + q.toString()); - q = QParser.getParser("tropical cyclone foo a b ","edismax",true, req(params("qf", "text^10","pf", "text^10","pf2", "text^5","pf3", "text^8"))).getQuery(); - assertEquals("+(" + - "((text:bar (+text:tropical +text:cyclone))^10.0) ((text:foo)^10.0) ((text:a)^10.0) ((text:b)^10.0)) " + - "((text:\"bar foo a b\" text:\"tropical cyclone foo a b\")^10.0) " + - "(((text:bar text:\"tropical cyclone\")^5.0) ((text:\"cyclone foo\")^5.0) ((text:\"foo a\")^5.0) ((text:\"a b\")^5.0)) " + - "(((text:\"bar foo\" text:\"tropical cyclone foo\")^8.0) ((text:\"cyclone foo a\")^8.0) ((text:\"foo a b\")^8.0))", q.toString()); + q = + QParser.getParser( + "tropical cyclone foo a b ", + "edismax", + true, + req(params("qf", "text^10", "pf", "text^10", "pf2", "text^5", "pf3", "text^8"))) + .getQuery(); + assertEquals( + "+(" + + "((text:bar (+text:tropical +text:cyclone))^10.0) ((text:foo)^10.0) ((text:a)^10.0) ((text:b)^10.0)) " + + "((text:\"bar foo a b\" text:\"tropical cyclone foo a b\")^10.0) " + + "(((text:bar text:\"tropical cyclone\")^5.0) ((text:\"cyclone foo\")^5.0) ((text:\"foo a\")^5.0) ((text:\"a b\")^5.0)) " + + "(((text:\"bar foo\" text:\"tropical cyclone foo\")^8.0) ((text:\"cyclone foo a\")^8.0) ((text:\"foo a b\")^8.0))", + q.toString()); - q = QParser.getParser("foo a b tropical cyclone","edismax",true, req(params("qf", "text^10","pf", "text^10","pf2", "text^5","pf3", "text^8"))).getQuery(); - assertEquals("+(" + - "((text:foo)^10.0) ((text:a)^10.0) ((text:b)^10.0) ((text:bar (+text:tropical +text:cyclone))^10.0)) " + - "((text:\"foo a b bar\" text:\"foo a b tropical cyclone\")^10.0) " + - "(((text:\"foo a\")^5.0) ((text:\"a b\")^5.0) ((text:\"b tropical\")^5.0) ((text:bar text:\"tropical cyclone\")^5.0)) " + - "(((text:\"foo a b\")^8.0) ((text:\"a b tropical\")^8.0) ((text:\"b bar\" text:\"b tropical cyclone\")^8.0))", q.toString()); + q = + QParser.getParser( + "foo a b tropical cyclone", + "edismax", + true, + req(params("qf", "text^10", "pf", "text^10", "pf2", "text^5", "pf3", "text^8"))) + .getQuery(); + assertEquals( + "+(" + + "((text:foo)^10.0) ((text:a)^10.0) ((text:b)^10.0) ((text:bar (+text:tropical +text:cyclone))^10.0)) " + + "((text:\"foo a b bar\" text:\"foo a b tropical cyclone\")^10.0) " + + "(((text:\"foo a\")^5.0) ((text:\"a b\")^5.0) ((text:\"b tropical\")^5.0) ((text:bar text:\"tropical cyclone\")^5.0)) " + + "(((text:\"foo a b\")^8.0) ((text:\"a b tropical\")^8.0) ((text:\"b bar\" text:\"b tropical cyclone\")^8.0))", + q.toString()); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestNoOpRegenerator.java b/solr/core/src/test/org/apache/solr/search/TestNoOpRegenerator.java index 72af2cd284d..605260be30f 100644 --- a/solr/core/src/test/org/apache/solr/search/TestNoOpRegenerator.java +++ b/solr/core/src/test/org/apache/solr/search/TestNoOpRegenerator.java @@ -21,40 +21,44 @@ /** Tests that NoOpRegenerator does what it should */ public class TestNoOpRegenerator extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-noopregen.xml", "schema-minimal.xml"); } - + @SuppressWarnings("unchecked") public void testRegeneration() throws Exception { assertU(adoc("id", "1")); assertU(adoc("id", "2")); assertU(commit()); - + // add some items - h.getCore().withSearcher(searcher -> { - assertEquals(2, searcher.maxDoc()); - SolrCache cache = searcher.getCache("myPerSegmentCache"); - assertEquals(0, cache.size()); - cache.put("key1", "value1"); - cache.put("key2", "value2"); - assertEquals(2, cache.size()); - return null; - }); - + h.getCore() + .withSearcher( + searcher -> { + assertEquals(2, searcher.maxDoc()); + SolrCache cache = searcher.getCache("myPerSegmentCache"); + assertEquals(0, cache.size()); + cache.put("key1", "value1"); + cache.put("key2", "value2"); + assertEquals(2, cache.size()); + return null; + }); + // add a doc and commit: we should see our cached items still there assertU(adoc("id", "3")); assertU(commit()); - h.getCore().withSearcher(searcher -> { - assertEquals(3, searcher.maxDoc()); - SolrCache cache = searcher.getCache("myPerSegmentCache"); - assertEquals(2, cache.size()); - assertEquals("value1", cache.get("key1")); - assertEquals("value2", cache.get("key2")); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + assertEquals(3, searcher.maxDoc()); + SolrCache cache = searcher.getCache("myPerSegmentCache"); + assertEquals(2, cache.size()); + assertEquals("value1", cache.get("key1")); + assertEquals("value2", cache.get("key2")); + return null; + }); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestOverriddenPrefixQueryForCustomFieldType.java b/solr/core/src/test/org/apache/solr/search/TestOverriddenPrefixQueryForCustomFieldType.java index b26603f7dc7..026854e14ca 100644 --- a/solr/core/src/test/org/apache/solr/search/TestOverriddenPrefixQueryForCustomFieldType.java +++ b/solr/core/src/test/org/apache/solr/search/TestOverriddenPrefixQueryForCustomFieldType.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import java.util.Random; import org.apache.lucene.search.*; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; @@ -26,20 +27,19 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.util.Random; - public class TestOverriddenPrefixQueryForCustomFieldType extends SolrTestCaseJ4 { - private static int[] counts= new int[2]; + private static int[] counts = new int[2]; private static int otherCounts; String[] otherTerms = {"this", "that", "those", "randomness"}; @BeforeClass public static void beforeClass() throws Exception { - System.setProperty("solr.tests.CustomIntFieldType", - (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) - ? "solr.IntPointPrefixActsAsRangeQueryFieldType" - : "solr.TrieIntPrefixActsAsRangeQueryFieldType")); + System.setProperty( + "solr.tests.CustomIntFieldType", + (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) + ? "solr.IntPointPrefixActsAsRangeQueryFieldType" + : "solr.TrieIntPrefixActsAsRangeQueryFieldType")); initCore("solrconfig-basic.xml", "schema-customfield.xml"); } @@ -51,21 +51,21 @@ public void setUp() throws Exception { super.setUp(); clearIndex(); assertU(commit()); - otherCounts=0; + otherCounts = 0; counts = new int[2]; } public void createIndex(int nDocs) { Random r = random(); - for (int i=0; i=-1]" - ,"//doc[count(*)=1]" - ); + for (String id : Arrays.asList("42", "99")) { + assertQ( + id + ": fl=[docid]", + req("qt", "/get", "id", id, "wt", "xml", "fl", "[docid]"), + "count(//doc)=1", + "//doc/int[@name='[docid]'][.>=-1]", + "//doc[count(*)=1]"); } } - + public void testAugmentersRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted - for (String id : Arrays.asList("42","99")) { - for (SolrParams p : Arrays.asList - (params("fl","[docid],[shard],[explain],x_alias:[value v=10 t=int],abs(val_i)"), - params("fl","[docid],[shard],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"), - params("fl","[docid],[shard]","fl","[explain],x_alias:[value v=10 t=int]","fl","abs(val_i)"), - params("fl","[docid]","fl","[shard]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) { - assertQ(id + ": " + p, - req(p, "qt","/get","id",id, "wt","xml") - ,"count(//doc)=1" - ,"//doc/int[@name='[docid]'][.>=-1]" - ,"//doc/float[@name='abs(val_i)'][.='1.0']" - ,"//doc/str[@name='[shard]'][.='[not a shard request]']" - // RTG: [explain] should be missing (ignored) - ,"//doc/int[@name='x_alias'][.=10]" - - ,"//doc[count(*)=4]" - ); + for (String id : Arrays.asList("42", "99")) { + for (SolrParams p : + Arrays.asList( + params("fl", "[docid],[shard],[explain],x_alias:[value v=10 t=int],abs(val_i)"), + params( + "fl", "[docid],[shard],abs(val_i)", "fl", "[explain],x_alias:[value v=10 t=int]"), + params( + "fl", + "[docid],[shard]", + "fl", + "[explain],x_alias:[value v=10 t=int]", + "fl", + "abs(val_i)"), + params( + "fl", + "[docid]", + "fl", + "[shard]", + "fl", + "[explain]", + "fl", + "x_alias:[value v=10 t=int]", + "fl", + "abs(val_i)"))) { + assertQ( + id + ": " + p, + req(p, "qt", "/get", "id", id, "wt", "xml"), + "count(//doc)=1", + "//doc/int[@name='[docid]'][.>=-1]", + "//doc/float[@name='abs(val_i)'][.='1.0']", + "//doc/str[@name='[shard]'][.='[not a shard request]']" + // RTG: [explain] should be missing (ignored) + , + "//doc/int[@name='x_alias'][.=10]", + "//doc[count(*)=4]"); } } } public void testAugmentersAndExplicit() throws Exception { - for (SolrParams p : Arrays.asList(params("fl","id,[docid],[explain],x_alias:[value v=10 t=int]"), - params("fl","id","fl","[docid],[explain],x_alias:[value v=10 t=int]"), - params("fl","id","fl","[docid]","fl","[explain]","fl","x_alias:[value v=10 t=int]"))) { - assertQ(p.toString(), - req(p, "q","*:*", "rows", "1") - ,"//result[@numFound='5']" - ,"//result/doc/str[@name='id']" - ,"//result/doc/int[@name='[docid]']" - ,"//result/doc/str[@name='[explain]']" - ,"//result/doc/int[@name='x_alias'][.=10]" - - ,"//result/doc[count(*)=4]" - ); + for (SolrParams p : + Arrays.asList( + params("fl", "id,[docid],[explain],x_alias:[value v=10 t=int]"), + params("fl", "id", "fl", "[docid],[explain],x_alias:[value v=10 t=int]"), + params( + "fl", + "id", + "fl", + "[docid]", + "fl", + "[explain]", + "fl", + "x_alias:[value v=10 t=int]"))) { + assertQ( + p.toString(), + req(p, "q", "*:*", "rows", "1"), + "//result[@numFound='5']", + "//result/doc/str[@name='id']", + "//result/doc/int[@name='[docid]']", + "//result/doc/str[@name='[explain]']", + "//result/doc/int[@name='x_alias'][.=10]", + "//result/doc[count(*)=4]"); } } - + public void testAugmentersAndExplicitRTG() throws Exception { // behavior shouldn't matter if we are committed or uncommitted - for (String id : Arrays.asList("42","99")) { - for (SolrParams p : Arrays.asList - (params("fl","id,[docid],[explain],x_alias:[value v=10 t=int],abs(val_i)"), - params("fl","id,[docid],abs(val_i)","fl","[explain],x_alias:[value v=10 t=int]"), - params("fl","id","fl","[docid]","fl","[explain]","fl","x_alias:[value v=10 t=int]","fl","abs(val_i)"))) { - assertQ(id + ": " + p, - req(p, "qt","/get","id",id, "wt","xml") - ,"count(//doc)=1" - ,"//doc/str[@name='id']" - ,"//doc/int[@name='[docid]'][.>=-1]" - ,"//doc/float[@name='abs(val_i)'][.='1.0']" - // RTG: [explain] should be missing (ignored) - ,"//doc/int[@name='x_alias'][.=10]" - - ,"//doc[count(*)=4]" - ); + for (String id : Arrays.asList("42", "99")) { + for (SolrParams p : + Arrays.asList( + params("fl", "id,[docid],[explain],x_alias:[value v=10 t=int],abs(val_i)"), + params("fl", "id,[docid],abs(val_i)", "fl", "[explain],x_alias:[value v=10 t=int]"), + params( + "fl", + "id", + "fl", + "[docid]", + "fl", + "[explain]", + "fl", + "x_alias:[value v=10 t=int]", + "fl", + "abs(val_i)"))) { + assertQ( + id + ": " + p, + req(p, "qt", "/get", "id", id, "wt", "xml"), + "count(//doc)=1", + "//doc/str[@name='id']", + "//doc/int[@name='[docid]'][.>=-1]", + "//doc/float[@name='abs(val_i)'][.='1.0']" + // RTG: [explain] should be missing (ignored) + , + "//doc/int[@name='x_alias'][.=10]", + "//doc[count(*)=4]"); } } } public void testAugmentersAndScore() throws Exception { - assertQ(req("q","*:*", "rows", "1", - "fl","[docid],x_alias:[value v=10 t=int],score") - ,"//result[@numFound='5']" - ,"//result/doc/int[@name='[docid]']" - ,"//result/doc/int[@name='x_alias'][.=10]" - ,"//result/doc/float[@name='score']" - - ,"//result/doc[count(*)=3]" - ); - for (SolrParams p : Arrays.asList(params("fl","[docid],x_alias:[value v=10 t=int],[explain],score"), - params("fl","[docid]","fl","x_alias:[value v=10 t=int],[explain]","fl","score"), - params("fl","[docid]","fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score"))) { - - assertQ(p.toString(), - req(p, "q","*:*", "rows", "1") - ,"//result[@numFound='5']" - - ,"//result/doc/int[@name='[docid]']" - ,"//result/doc/int[@name='x_alias'][.=10]" - ,"//result/doc/str[@name='[explain]']" - ,"//result/doc/float[@name='score']" - - ,"//result/doc[count(*)=4]" - ); + assertQ( + req("q", "*:*", "rows", "1", "fl", "[docid],x_alias:[value v=10 t=int],score"), + "//result[@numFound='5']", + "//result/doc/int[@name='[docid]']", + "//result/doc/int[@name='x_alias'][.=10]", + "//result/doc/float[@name='score']", + "//result/doc[count(*)=3]"); + for (SolrParams p : + Arrays.asList( + params("fl", "[docid],x_alias:[value v=10 t=int],[explain],score"), + params("fl", "[docid]", "fl", "x_alias:[value v=10 t=int],[explain]", "fl", "score"), + params( + "fl", + "[docid]", + "fl", + "x_alias:[value v=10 t=int]", + "fl", + "[explain]", + "fl", + "score"))) { + + assertQ( + p.toString(), + req(p, "q", "*:*", "rows", "1"), + "//result[@numFound='5']", + "//result/doc/int[@name='[docid]']", + "//result/doc/int[@name='x_alias'][.=10]", + "//result/doc/str[@name='[explain]']", + "//result/doc/float[@name='score']", + "//result/doc[count(*)=4]"); } } - + public void testAugmentersAndScoreRTG() throws Exception { // if we use RTG (committed or otherwise) score should be ignored - for (String id : Arrays.asList("42","99")) { - assertQ(id, - req("qt","/get","id",id, "wt","xml", - "fl","x_alias:[value v=10 t=int],score,abs(val_i),[docid]") - ,"//doc/int[@name='[docid]'][.>=-1]" - ,"//doc/float[@name='abs(val_i)'][.='1.0']" - ,"//doc/int[@name='x_alias'][.=10]" - - ,"//doc[count(*)=3]" - ); - for (SolrParams p : Arrays.asList(params("fl","[docid],x_alias:[value v=10 t=int],[explain],score,abs(val_i)"), - params("fl","x_alias:[value v=10 t=int],[explain]","fl","[docid],score,abs(val_i)"), - params("fl","[docid]","fl","x_alias:[value v=10 t=int]","fl","[explain]","fl","score","fl","abs(val_i)"))) { - - assertQ(p.toString(), - req(p, "qt","/get","id",id, "wt","xml") - - ,"//doc/int[@name='[docid]']" // TODO - ,"//doc/float[@name='abs(val_i)'][.='1.0']" - ,"//doc/int[@name='x_alias'][.=10]" - // RTG: [explain] and score should be missing (ignored) - - ,"//doc[count(*)=3]" - ); + for (String id : Arrays.asList("42", "99")) { + assertQ( + id, + req( + "qt", + "/get", + "id", + id, + "wt", + "xml", + "fl", + "x_alias:[value v=10 t=int],score,abs(val_i),[docid]"), + "//doc/int[@name='[docid]'][.>=-1]", + "//doc/float[@name='abs(val_i)'][.='1.0']", + "//doc/int[@name='x_alias'][.=10]", + "//doc[count(*)=3]"); + for (SolrParams p : + Arrays.asList( + params("fl", "[docid],x_alias:[value v=10 t=int],[explain],score,abs(val_i)"), + params( + "fl", "x_alias:[value v=10 t=int],[explain]", "fl", "[docid],score,abs(val_i)"), + params( + "fl", + "[docid]", + "fl", + "x_alias:[value v=10 t=int]", + "fl", + "[explain]", + "fl", + "score", + "fl", + "abs(val_i)"))) { + + assertQ( + p.toString(), + req(p, "qt", "/get", "id", id, "wt", "xml"), + "//doc/int[@name='[docid]']" // TODO + , + "//doc/float[@name='abs(val_i)'][.='1.0']", + "//doc/int[@name='x_alias'][.=10]" + // RTG: [explain] and score should be missing (ignored) + + , + "//doc[count(*)=3]"); } } } @@ -681,68 +757,67 @@ public void testAugmentersGlobsExplicitAndScoreOhMy() throws Exception { Random random = random(); // NOTE: 'ssto' is the missing one - final List fl = Arrays.asList - ("id","[docid]","[explain]","score","val_*","subj*"); - + final List fl = Arrays.asList("id", "[docid]", "[explain]", "score", "val_*", "subj*"); + final int iters = atLeast(random, 10); - for (int i = 0; i< iters; i++) { - + for (int i = 0; i < iters; i++) { + Collections.shuffle(fl, random); - final SolrParams singleFl = params("q","*:*", "rows", "1","fl",String.join(",", fl)); - final ModifiableSolrParams multiFl = params("q","*:*", "rows", "1"); + final SolrParams singleFl = params("q", "*:*", "rows", "1", "fl", String.join(",", fl)); + final ModifiableSolrParams multiFl = params("q", "*:*", "rows", "1"); for (String item : fl) { - multiFl.add("fl",item); + multiFl.add("fl", item); } for (SolrParams p : Arrays.asList(singleFl, multiFl)) { - assertQ(p.toString(), - req(p) - ,"//result[@numFound='5']" - ,"//result/doc/str[@name='id']" - ,"//result/doc/float[@name='score']" - ,"//result/doc/str[@name='subject']" - ,"//result/doc/int[@name='val_i']" - ,"//result/doc/int[@name='[docid]']" - ,"//result/doc/str[@name='[explain]']" - - ,"//result/doc[count(*)=6]" - ); + assertQ( + p.toString(), + req(p), + "//result[@numFound='5']", + "//result/doc/str[@name='id']", + "//result/doc/float[@name='score']", + "//result/doc/str[@name='subject']", + "//result/doc/int[@name='val_i']", + "//result/doc/int[@name='[docid]']", + "//result/doc/str[@name='[explain]']", + "//result/doc[count(*)=6]"); } } } - + public void testAugmentersGlobsExplicitAndScoreOhMyRTG() throws Exception { Random random = random(); // NOTE: 'ssto' is the missing one - final List fl = Arrays.asList - ("id","[explain]","score","val_*","subj*","abs(val_i)","[docid]"); - + final List fl = + Arrays.asList("id", "[explain]", "score", "val_*", "subj*", "abs(val_i)", "[docid]"); + final int iters = atLeast(random, 10); - for (int i = 0; i< iters; i++) { - + for (int i = 0; i < iters; i++) { + Collections.shuffle(fl, random); - final SolrParams singleFl = params("fl",String.join(",", fl)); + final SolrParams singleFl = params("fl", String.join(",", fl)); final ModifiableSolrParams multiFl = params(); for (String item : fl) { - multiFl.add("fl",item); + multiFl.add("fl", item); } - // RTG behavior should be consistent, (committed or otherwise) - for (String id : Arrays.asList("42","99")) { + // RTG behavior should be consistent, (committed or otherwise) + for (String id : Arrays.asList("42", "99")) { for (SolrParams p : Arrays.asList(singleFl, multiFl)) { - assertQ(id + ": " + p, - req(p, "qt","/get","id",id, "wt","xml") - ,"count(//doc)=1" - ,"//doc/str[@name='id']" - ,"//doc/int[@name='[docid]'][.>=-1]" - ,"//doc/float[@name='abs(val_i)'][.='1.0']" - // RTG: [explain] and score should be missing (ignored) - ,"//doc/int[@name='val_i'][.=1]" - ,"//doc/str[@name='subject']" - ,"//doc[count(*)=5]" - ); + assertQ( + id + ": " + p, + req(p, "qt", "/get", "id", id, "wt", "xml"), + "count(//doc)=1", + "//doc/str[@name='id']", + "//doc/int[@name='[docid]'][.>=-1]", + "//doc/float[@name='abs(val_i)'][.='1.0']" + // RTG: [explain] and score should be missing (ignored) + , + "//doc/int[@name='val_i'][.=1]", + "//doc/str[@name='subject']", + "//doc[count(*)=5]"); } } } diff --git a/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java b/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java index 7f9abb5d3d8..aaff821fb53 100644 --- a/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java +++ b/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java @@ -16,9 +16,9 @@ */ package org.apache.solr.search; +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; -import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; import org.junit.Test; @@ -28,44 +28,59 @@ public class TestQueryTypes extends SolrTestCaseJ4 { public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema11.xml"); } - - public String getCoreName() { return "basic"; } - public void testQueryTypes() { - assertU(adoc("id","0")); - assertU(adoc("id","1", "v_t","Hello Dude")); - assertU(adoc("id","2", "v_t","Hello Yonik")); - assertU(adoc("id","3", "v_s","{!literal}")); - assertU(adoc("id","4", "v_s","other stuff")); - assertU(adoc("id","5", "v_f","3.14159")); - assertU(adoc("id","6", "v_f","8983")); - assertU(adoc("id","7", "v_f","1.5")); - assertU(adoc("id","8", "v_ti","5")); - assertU(adoc("id","9", "v_s","internal\"quote")); - assertU(adoc("id","10","text_no_analyzer","should just work")); + public String getCoreName() { + return "basic"; + } + public void testQueryTypes() { + assertU(adoc("id", "0")); + assertU(adoc("id", "1", "v_t", "Hello Dude")); + assertU(adoc("id", "2", "v_t", "Hello Yonik")); + assertU(adoc("id", "3", "v_s", "{!literal}")); + assertU(adoc("id", "4", "v_s", "other stuff")); + assertU(adoc("id", "5", "v_f", "3.14159")); + assertU(adoc("id", "6", "v_f", "8983")); + assertU(adoc("id", "7", "v_f", "1.5")); + assertU(adoc("id", "8", "v_ti", "5")); + assertU(adoc("id", "9", "v_s", "internal\"quote")); + assertU(adoc("id", "10", "text_no_analyzer", "should just work")); assertU(adoc("id", "200", "subject_t", "Sony Netzteil")); assertU(adoc("id", "201", "subject_t", "Other Netzteil")); assertU(adoc("id", "202", "subject_t", "Other Product")); - Object[] arr = new Object[] { - "id",999 - ,"v_s","wow dude" - ,"v_t","wow" - ,"v_ti",-1 - ,"v_tis",-1 - ,"v_tl",-1234567891234567890L - ,"v_tls",-1234567891234567890L - ,"v_tf",-2.0f - ,"v_tfs",-2.0f - ,"v_td",-2.0 - ,"v_tds",-2.0 - ,"v_tdt","2000-05-10T01:01:01Z" - ,"v_tdts","2002-08-26T01:01:01Z" - }; + Object[] arr = + new Object[] { + "id", + 999, + "v_s", + "wow dude", + "v_t", + "wow", + "v_ti", + -1, + "v_tis", + -1, + "v_tl", + -1234567891234567890L, + "v_tls", + -1234567891234567890L, + "v_tf", + -2.0f, + "v_tfs", + -2.0f, + "v_td", + -2.0, + "v_tds", + -2.0, + "v_tdt", + "2000-05-10T01:01:01Z", + "v_tdts", + "2002-08-26T01:01:01Z" + }; String[] sarr = new String[arr.length]; - for (int i=0; i assertQ("no match and no default", - req("q", "{!switch case.x=Dude case.z=Yonik}asdf") - , "//result[@numFound='BOGUS']") - ); - assertTrue("exp cause is wrong", - exp.getCause() instanceof SolrException); + RuntimeException exp = + expectThrows( + RuntimeException.class, + "Should have gotten an error w/o default", + () -> + assertQ( + "no match and no default", + req("q", "{!switch case.x=Dude case.z=Yonik}asdf"), + "//result[@numFound='BOGUS']")); + assertTrue("exp cause is wrong", exp.getCause() instanceof SolrException); SolrException e = (SolrException) exp.getCause(); assertEquals("error isn't user error", 400, e.code()); - assertTrue("Error doesn't include bad switch case: " + e.getMessage(), + assertTrue( + "Error doesn't include bad switch case: " + e.getMessage(), e.getMessage().contains("asdf")); } finally { resetExceptionIgnores(); } - // dismax query from std request handler - assertQ("test dismax query", - req("q","{!dismax}hello" - ,"qf","v_t" - ,"bf","sqrt(v_f)^100 log(sum(v_f,1))^50" - ,"bq","{!prefix f=v_t}he" - , CommonParams.DEBUG_QUERY,"on" - ) - ,"//result[@numFound='2']" - ); + assertQ( + "test dismax query", + req( + "q", + "{!dismax}hello", + "qf", + "v_t", + "bf", + "sqrt(v_f)^100 log(sum(v_f,1))^50", + "bq", + "{!prefix f=v_t}he", + CommonParams.DEBUG_QUERY, + "on"), + "//result[@numFound='2']"); // dismax query from std request handler, using local params - assertQ("test dismax query w/ local params", - req("q","{!dismax qf=v_t}hello" - ,"qf","v_f" - ) - ,"//result[@numFound='2']" - ); - - assertQ("test nested query", - req("q","_query_:\"{!query v=$q1}\"", "q1","{!prefix f=v_t}hel") - ,"//result[@numFound='2']" - ); - - assertQ("test nested nested query", - req("q","_query_:\"{!query v=$q1}\"", "q1","{!v=$q2}","q2","{!prefix f=v_t v=$qqq}","qqq","hel") - ,"//result[@numFound='2']" - ); - assertQ("Test text field with no analysis doesn't NPE with wildcards (SOLR-4318)", - req("q", "text_no_analyzer:should*"), "//result[@numFound='1']"); - - + assertQ( + "test dismax query w/ local params", + req( + "q", "{!dismax qf=v_t}hello", + "qf", "v_f"), + "//result[@numFound='2']"); + + assertQ( + "test nested query", + req("q", "_query_:\"{!query v=$q1}\"", "q1", "{!prefix f=v_t}hel"), + "//result[@numFound='2']"); + + assertQ( + "test nested nested query", + req( + "q", + "_query_:\"{!query v=$q1}\"", + "q1", + "{!v=$q2}", + "q2", + "{!prefix f=v_t v=$qqq}", + "qqq", + "hel"), + "//result[@numFound='2']"); + assertQ( + "Test text field with no analysis doesn't NPE with wildcards (SOLR-4318)", + req("q", "text_no_analyzer:should*"), + "//result[@numFound='1']"); } - + @Test public void testNumericBadRequests() { String[] suffixes = new String[50]; int fieldNum = 0; - for (String type:new String[]{"i", "l", "f", "d", "dt"}) { - for (String s:new String[]{"", "s"}) { - //Trie + for (String type : new String[] {"i", "l", "f", "d", "dt"}) { + for (String s : new String[] {"", "s"}) { + // Trie suffixes[fieldNum++] = "t" + type + s; suffixes[fieldNum++] = "t" + type + s + "_dv"; suffixes[fieldNum++] = "t" + type + s + "_ni_dv"; - - //Points + + // Points suffixes[fieldNum++] = type + s + "_p"; suffixes[fieldNum++] = type + s + "_ni_p"; } } - assertEquals(fieldNum,suffixes.length); - + assertEquals(fieldNum, suffixes.length); + String badNumber = "NOT_A_NUMBER"; - for (String suffix:suffixes) { + for (String suffix : suffixes) { // Numeric bad requests - assertQEx("Expecting exception for suffix: " + suffix, badNumber, req("q","{!term f=foo_" + suffix + "}" + badNumber), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Expecting exception for suffix: " + suffix, badNumber, req("q","{!terms f=foo_" + suffix + "}1 2 3 4 5 " + badNumber), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Expecting exception for suffix: " + suffix, badNumber, req("q","{!lucene}foo_" + suffix + ":" + badNumber), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Expecting exception for suffix: " + suffix, badNumber, req("q","{!field f=foo_" + suffix + "}" + badNumber), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Expecting exception for suffix: " + suffix, badNumber, req("q","{!maxscore}foo_" + suffix + ":" + badNumber), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Expecting exception for suffix: " + suffix, badNumber, - req("q","{!xmlparser}"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + badNumber, + req("q", "{!term f=foo_" + suffix + "}" + badNumber), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + badNumber, + req("q", "{!terms f=foo_" + suffix + "}1 2 3 4 5 " + badNumber), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + badNumber, + req("q", "{!lucene}foo_" + suffix + ":" + badNumber), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + badNumber, + req("q", "{!field f=foo_" + suffix + "}" + badNumber), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + badNumber, + req("q", "{!maxscore}foo_" + suffix + ":" + badNumber), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + badNumber, + req( + "q", + "{!xmlparser}"), + SolrException.ErrorCode.BAD_REQUEST); if (suffix.contains("_p")) { // prefix queries work in Trie fields - assertQEx("Expecting exception for suffix: " + suffix, "Can't run prefix queries on numeric fields", - req("q","{!prefix f=foo_" + suffix + "}NOT_A_NUMBER"), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Expecting exception for suffix: " + suffix, "Can't run prefix queries on numeric fields", - req("q","{!lucene}foo_" + suffix + ":123*"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + "Can't run prefix queries on numeric fields", + req("q", "{!prefix f=foo_" + suffix + "}NOT_A_NUMBER"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting exception for suffix: " + suffix, + "Can't run prefix queries on numeric fields", + req("q", "{!lucene}foo_" + suffix + ":123*"), + SolrException.ErrorCode.BAD_REQUEST); } - - // Skipping: func, boost, raw, nested, frange, spatial*, join, surround, switch, parent, child, collapsing, - // complexphrase, rerank, export, mlt, hash, graph, graphTerms, igain, tlogit, significantTerms, payload* - // Maybe add: raw, join, parent, child, collapsing, graphTerms, igain, significantTerms, simple - } + // Skipping: func, boost, raw, nested, frange, spatial*, join, surround, switch, parent, + // child, collapsing, + // complexphrase, rerank, export, mlt, hash, graph, graphTerms, igain, tlogit, + // significantTerms, payload* + // Maybe add: raw, join, parent, child, collapsing, graphTerms, igain, significantTerms, + // simple + } } } diff --git a/solr/core/src/test/org/apache/solr/search/TestQueryUtils.java b/solr/core/src/test/org/apache/solr/search/TestQueryUtils.java index 8f6e72d5537..2a9a52a5022 100644 --- a/solr/core/src/test/org/apache/solr/search/TestQueryUtils.java +++ b/solr/core/src/test/org/apache/solr/search/TestQueryUtils.java @@ -16,19 +16,16 @@ */ package org.apache.solr.search; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.BooleanQuery; +import java.util.Collection; +import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.index.Term; +import org.apache.lucene.search.TermQuery; import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; -import java.util.Collection; - -/** - * - */ +/** */ public class TestQueryUtils extends SolrTestCaseJ4 { @BeforeClass @@ -36,11 +33,11 @@ public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - @Override public void setUp() throws Exception { super.setUp(); } + @Override public void tearDown() throws Exception { super.tearDown(); @@ -48,14 +45,15 @@ public void tearDown() throws Exception { public void positive(Query q) { assertFalse(QueryUtils.isNegative(q)); - assertTrue(QueryUtils.getAbs(q)==q); - Collection clauses = (q instanceof BooleanQuery) ? ((BooleanQuery)q).clauses() : null; + assertTrue(QueryUtils.getAbs(q) == q); + Collection clauses = + (q instanceof BooleanQuery) ? ((BooleanQuery) q).clauses() : null; if (clauses != null) { if (clauses.size() != 0) { - assertTrue(QueryUtils.makeQueryable(q)==q); + assertTrue(QueryUtils.makeQueryable(q) == q); } } else { - assertTrue(QueryUtils.makeQueryable(q)==q); + assertTrue(QueryUtils.makeQueryable(q) == q); } } @@ -70,8 +68,8 @@ public void negative(Query q) { } public void testNegativeQueries() { - TermQuery tq = new TermQuery(new Term("hi","there")); - TermQuery tq2 = new TermQuery(new Term("wow","dude")); + TermQuery tq = new TermQuery(new Term("hi", "there")); + TermQuery tq2 = new TermQuery(new Term("wow", "dude")); BooleanQuery.Builder bq = new BooleanQuery.Builder(); positive(tq); @@ -82,24 +80,23 @@ public void testNegativeQueries() { positive(bq.build()); bq = new BooleanQuery.Builder(); - bq.add(tq,BooleanClause.Occur.MUST_NOT); + bq.add(tq, BooleanClause.Occur.MUST_NOT); negative(bq.build()); - bq.add(tq2,BooleanClause.Occur.MUST_NOT); + bq.add(tq2, BooleanClause.Occur.MUST_NOT); negative(bq.build()); - - String f = "name"; // name is whitespace tokenized - - assertU(adoc("id", "1", f, "A")); - assertU(adoc("id", "2", f, "B")); - assertU(adoc("id", "3", f, "C")); - assertU(adoc("id", "4", f, "C")); - assertU(adoc("id", "5", f, "D")); - assertU(adoc("id", "6", f, "E")); - assertU(adoc("id", "7", f, "E")); - assertU(adoc("id", "8", f, "E W")); - assertU(adoc("id", "9", f, "F W")); + String f = "name"; // name is whitespace tokenized + + assertU(adoc("id", "1", f, "A")); + assertU(adoc("id", "2", f, "B")); + assertU(adoc("id", "3", f, "C")); + assertU(adoc("id", "4", f, "C")); + assertU(adoc("id", "5", f, "D")); + assertU(adoc("id", "6", f, "E")); + assertU(adoc("id", "7", f, "E")); + assertU(adoc("id", "8", f, "E W")); + assertU(adoc("id", "9", f, "F W")); assertU(adoc("id", "10", f, "G W")); assertU(adoc("id", "11", f, "G X ")); assertU(adoc("id", "12", f, "G X Y")); @@ -109,180 +106,118 @@ public void testNegativeQueries() { assertU(adoc("id", "16", f, "G")); assertU(commit()); - assertQ("test negative base q matching nothing", - req("-qlkciyopsbgzyvkylsjhchghjrdf") - ,"//result[@numFound='16']" - ); + assertQ( + "test negative base q matching nothing", + req("-qlkciyopsbgzyvkylsjhchghjrdf"), + "//result[@numFound='16']"); - assertQ("test negative base q matching something", - req("-name:E") - ,"//result[@numFound='13']" - ); + assertQ("test negative base q matching something", req("-name:E"), "//result[@numFound='13']"); - assertQ("test negative base q with two terms", - req("-name:G -name:W") - ,"//result[@numFound='7']" - ); + assertQ( + "test negative base q with two terms", req("-name:G -name:W"), "//result[@numFound='7']"); - assertQ("test negative base q with three terms", - req("-name:G -name:W -name:E") - ,"//result[@numFound='5']" - ); + assertQ( + "test negative base q with three terms", + req("-name:G -name:W -name:E"), + "//result[@numFound='5']"); - assertQ("test negative boolean query", - req("-(name:G OR name:W)") - ,"//result[@numFound='7']" - ); + assertQ("test negative boolean query", req("-(name:G OR name:W)"), "//result[@numFound='7']"); - assertQ("test non negative q", - req("-name:G -name:W -name:E id:[* TO *]") - ,"//result[@numFound='5']" - ); + assertQ( + "test non negative q", + req("-name:G -name:W -name:E id:[* TO *]"), + "//result[@numFound='5']"); - assertQ("test non negative q", - req("-name:G -name:W -name:E +id:[* TO *]") - ,"//result[@numFound='5']" - ); + assertQ( + "test non negative q", + req("-name:G -name:W -name:E +id:[* TO *]"), + "//result[@numFound='5']"); // now for the filters... - assertQ("test negative base q matching nothing, with filters", - req("q","-qlkciyopsbgzyvkylsjhchghjrdf" - ,"fq","name:A" - ) - ,"//result[@numFound='1']" - ); - - assertQ("test negative filters", - req("q","name:A" - ,"fq","-name:A" - ) - ,"//result[@numFound='0']" - ); - assertQ("test negative filters", - req("q","name:A" - ,"fq","-name:A" - ) - ,"//result[@numFound='0']" - ); - assertQ("test negative filters", - req("q","-name:E" - ,"fq","name:E" - ) - ,"//result[@numFound='0']" - ); - assertQ("test negative filters", - req("q","-name:E" - ,"fq","name:W" - ) - ,"//result[@numFound='2']" - ); - assertQ("test negative filters", - req("q","-name:E" - ,"fq","name:W" - ) - ,"//result[@numFound='2']" - ); - assertQ("one pos filter, one neg", - req("q","-name:E" - ,"fq","name:W" - ,"fq","-name:G" - ) - ,"//result[@numFound='1']" - ); - assertQ("two neg filters", - req("q","-name:E" - ,"fq","-name:W" - ,"fq","-name:G" - ) - ,"//result[@numFound='5']" // ABCCD - ); - - assertQ("three neg filters", - req("q","-name:E" - ,"fq","-name:W" - ,"fq","-name:G" - ,"fq","-name:C" - ) - ,"//result[@numFound='3']" // ABD - ); - - assertQ("compound neg filters", - req("q","-name:E" - ,"fq","-name:W -name:G" - ,"fq","-name:C" - ) - ,"//result[@numFound='3']" // ABD - ); - - assertQ("compound neg filters", - req("q","-name:E" - ,"fq","-name:W -name:G -name:C" - ) - ,"//result[@numFound='3']" // ABD - ); - - assertQ("compound neg filters", - req("q","-name:E" - ,"fq","-(name:W name:G name:C)" - ) - ,"//result[@numFound='3']" // ABD - ); - - assertQ("three neg filters + pos", - req("q","-name:E" - ,"fq","-name:W" - ,"fq","-name:G" - ,"fq","-name:C" - ,"fq","name:G" - ) - ,"//result[@numFound='0']" - ); - assertQ("three neg filters + pos", - req("q","-name:E" - ,"fq","-name:W" - ,"fq","-name:G" - ,"fq","-name:C" - ,"fq","+id:1" - ) - ,"//result[@numFound='1']" // A - ); - assertQ("three neg filters + pos", - req("q","-name:E" - ,"fq","-name:W" - ,"fq","-name:G" - ,"fq","-name:C" - ,"fq","id:[* TO *]" - ) - ,"//result[@numFound='3']" // ABD - ); - - // QueryParser turns term queries on stopwords into a BooleanQuery with - // zero clauses. - assertQ("neg base query on stopword", - req("q","-text:stopworda") - ,"//result[@numFound='16']" // ABD - ); - - assertQ("negative filter on stopword", - req("q","id:[* TO *]" - ,"fq","-text:stopworda" - ) - ,"//result[@numFound='16']" // ABD - ); - assertQ("two negative filters on stopword", - req("q","id:[* TO *]" - ,"fq","-text:stopworda" - ,"fq","-text:stopworda" - ) - ,"//result[@numFound='16']" // ABD - ); - assertQ("compound negative filters with stopword", - req("q","id:[* TO *]" - ,"fq","-text:stopworda -id:1" - ) - ,"//result[@numFound='15']" // ABD - ); + assertQ( + "test negative base q matching nothing, with filters", + req("q", "-qlkciyopsbgzyvkylsjhchghjrdf", "fq", "name:A"), + "//result[@numFound='1']"); + + assertQ( + "test negative filters", req("q", "name:A", "fq", "-name:A"), "//result[@numFound='0']"); + assertQ( + "test negative filters", req("q", "name:A", "fq", "-name:A"), "//result[@numFound='0']"); + assertQ( + "test negative filters", req("q", "-name:E", "fq", "name:E"), "//result[@numFound='0']"); + assertQ( + "test negative filters", req("q", "-name:E", "fq", "name:W"), "//result[@numFound='2']"); + assertQ( + "test negative filters", req("q", "-name:E", "fq", "name:W"), "//result[@numFound='2']"); + assertQ( + "one pos filter, one neg", + req("q", "-name:E", "fq", "name:W", "fq", "-name:G"), + "//result[@numFound='1']"); + assertQ( + "two neg filters", + req("q", "-name:E", "fq", "-name:W", "fq", "-name:G"), + "//result[@numFound='5']" // ABCCD + ); + + assertQ( + "three neg filters", + req("q", "-name:E", "fq", "-name:W", "fq", "-name:G", "fq", "-name:C"), + "//result[@numFound='3']" // ABD + ); + + assertQ( + "compound neg filters", + req("q", "-name:E", "fq", "-name:W -name:G", "fq", "-name:C"), + "//result[@numFound='3']" // ABD + ); + + assertQ( + "compound neg filters", + req("q", "-name:E", "fq", "-name:W -name:G -name:C"), + "//result[@numFound='3']" // ABD + ); + + assertQ( + "compound neg filters", + req("q", "-name:E", "fq", "-(name:W name:G name:C)"), + "//result[@numFound='3']" // ABD + ); + + assertQ( + "three neg filters + pos", + req("q", "-name:E", "fq", "-name:W", "fq", "-name:G", "fq", "-name:C", "fq", "name:G"), + "//result[@numFound='0']"); + assertQ( + "three neg filters + pos", + req("q", "-name:E", "fq", "-name:W", "fq", "-name:G", "fq", "-name:C", "fq", "+id:1"), + "//result[@numFound='1']" // A + ); + assertQ( + "three neg filters + pos", + req("q", "-name:E", "fq", "-name:W", "fq", "-name:G", "fq", "-name:C", "fq", "id:[* TO *]"), + "//result[@numFound='3']" // ABD + ); + + // QueryParser turns term queries on stopwords into a BooleanQuery with + // zero clauses. + assertQ( + "neg base query on stopword", req("q", "-text:stopworda"), "//result[@numFound='16']" // ABD + ); + + assertQ( + "negative filter on stopword", + req("q", "id:[* TO *]", "fq", "-text:stopworda"), + "//result[@numFound='16']" // ABD + ); + assertQ( + "two negative filters on stopword", + req("q", "id:[* TO *]", "fq", "-text:stopworda", "fq", "-text:stopworda"), + "//result[@numFound='16']" // ABD + ); + assertQ( + "compound negative filters with stopword", + req("q", "id:[* TO *]", "fq", "-text:stopworda -id:1"), + "//result[@numFound='15']" // ABD + ); } - - } diff --git a/solr/core/src/test/org/apache/solr/search/TestRTGBase.java b/solr/core/src/test/org/apache/solr/search/TestRTGBase.java index f12245a5cd1..9fc0942f119 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRTGBase.java +++ b/solr/core/src/test/org/apache/solr/search/TestRTGBase.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase; import java.io.IOException; import java.util.HashMap; @@ -23,7 +24,6 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; - import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiBits; import org.apache.lucene.index.MultiTerms; @@ -36,15 +36,13 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.update.UpdateLog; -import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase; - public class TestRTGBase extends SolrTestCaseJ4 { // means we've seen the leader and have version info (i.e. we are a non-leader replica) public static String FROM_LEADER = DistribPhase.FROMLEADER.toString(); - protected final ConcurrentHashMap model = new ConcurrentHashMap<>(); - protected Map committedModel = new HashMap<>(); + protected final ConcurrentHashMap model = new ConcurrentHashMap<>(); + protected Map committedModel = new HashMap<>(); protected long snapshotCount; protected long committedModelClock; protected volatile int lastId; @@ -60,14 +58,13 @@ protected void initModel(int ndocs) { syncArr = new Object[ndocs]; - for (int i=0; i 0) { // return a random number not equal to version - for (;;) { + for (; ; ) { long badVersion = rand.nextInt(); if (badVersion != version && badVersion != 0) return badVersion; } } // if the version does not exist, then we can only specify a positive version - for (;;) { - long badVersion = rand.nextInt() & 0x7fffffff; // mask off sign bit + for (; ; ) { + long badVersion = rand.nextInt() & 0x7fffffff; // mask off sign bit if (badVersion != 0) return badVersion; } } - protected List getLatestVersions() { - try (UpdateLog.RecentUpdates startingRecentUpdates = h.getCore().getUpdateHandler().getUpdateLog().getRecentUpdates()) { + try (UpdateLog.RecentUpdates startingRecentUpdates = + h.getCore().getUpdateHandler().getUpdateLog().getRecentUpdates()) { return startingRecentUpdates.getVersions(100); } } - - protected int getFirstMatch(IndexReader r, Term t) throws IOException { Terms terms = MultiTerms.getTerms(r, t.field()); if (terms == null) return -1; @@ -125,5 +120,4 @@ protected int getFirstMatch(IndexReader r, Term t) throws IOException { } return id == DocIdSetIterator.NO_MORE_DOCS ? -1 : id; } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java index e384d84a278..d3e28e47af1 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java @@ -16,11 +16,12 @@ */ package org.apache.solr.search; +import static org.hamcrest.core.StringContains.containsString; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; - import org.apache.lucene.util.TestUtil; import org.apache.solr.CursorPagingTest; import org.apache.solr.SolrTestCaseJ4; @@ -34,24 +35,25 @@ import org.junit.AfterClass; import org.junit.BeforeClass; -import static org.hamcrest.core.StringContains.containsString; - public class TestRandomCollapseQParserPlugin extends SolrTestCaseJ4 { /** Full SolrServer instance for arbitrary introspection of response data and adding fqs */ public static SolrClient SOLR; + public static List ALL_SORT_FIELD_NAMES; public static List ALL_COLLAPSE_FIELD_NAMES; - private static String[] NULL_POLICIES - = new String[] {CollapsingQParserPlugin.NullPolicy.IGNORE.getName(), - CollapsingQParserPlugin.NullPolicy.COLLAPSE.getName(), - CollapsingQParserPlugin.NullPolicy.EXPAND.getName()}; - + private static String[] NULL_POLICIES = + new String[] { + CollapsingQParserPlugin.NullPolicy.IGNORE.getName(), + CollapsingQParserPlugin.NullPolicy.COLLAPSE.getName(), + CollapsingQParserPlugin.NullPolicy.EXPAND.getName() + }; + @BeforeClass public static void buildIndexAndClient() throws Exception { initCore("solrconfig-minimal.xml", "schema-sorts.xml"); - + final int totalDocs = atLeast(500); for (int i = 1; i <= totalDocs; i++) { SolrInputDocument doc = CursorPagingTest.buildRandomDocument(i); @@ -60,23 +62,24 @@ public static void buildIndexAndClient() throws Exception { assertU(adoc(doc)); } assertU(commit()); - + // Don't close this client, it would shutdown the CoreContainer SOLR = new EmbeddedSolrServer(h.getCoreContainer(), h.coreName); - - ALL_SORT_FIELD_NAMES = CursorPagingTest.pruneAndDeterministicallySort - (h.getCore().getLatestSchema().getFields().keySet()); - + + ALL_SORT_FIELD_NAMES = + CursorPagingTest.pruneAndDeterministicallySort( + h.getCore().getLatestSchema().getFields().keySet()); + ALL_COLLAPSE_FIELD_NAMES = new ArrayList(ALL_SORT_FIELD_NAMES.size()); for (String candidate : ALL_SORT_FIELD_NAMES) { if (candidate.startsWith("str") || candidate.startsWith("float") - || candidate.startsWith("int") ) { + || candidate.startsWith("int")) { ALL_COLLAPSE_FIELD_NAMES.add(candidate); } } } - + @AfterClass public static void cleanupStatics() throws Exception { deleteCore(); @@ -85,16 +88,15 @@ public static void cleanupStatics() throws Exception { } public void testEveryIsolatedSortFieldOnSingleGroup() throws Exception { - + for (String sortField : ALL_SORT_FIELD_NAMES) { for (String dir : Arrays.asList(" asc", " desc")) { - + final String sort = sortField + dir + ", id" + dir; // need id for tie breaker final String q = random().nextBoolean() ? "*:*" : CursorPagingTest.buildRandomQuery(); - final SolrParams sortedP = params("q", q, "rows", "1", - "sort", sort); - + final SolrParams sortedP = params("q", q, "rows", "1", "sort", sort); + final QueryResponse sortedRsp = SOLR.query(sortedP); // random data -- might be no docs matching our query @@ -103,59 +105,75 @@ public void testEveryIsolatedSortFieldOnSingleGroup() throws Exception { // check forced array resizing starting from 1 for (String p : Arrays.asList("{!collapse field=", "{!collapse size='1' field=")) { - for (String fq : Arrays.asList - (p + "same_for_all_docs sort='"+sort+"'}", + for (String fq : + Arrays.asList( + p + "same_for_all_docs sort='" + sort + "'}", // nullPolicy=expand shouldn't change anything since every doc has field - p + "same_for_all_docs sort='"+sort+"' nullPolicy=expand}", + p + "same_for_all_docs sort='" + sort + "' nullPolicy=expand}", // a field in no docs with nullPolicy=collapse should have same effect as // collapsing on a field in every doc - p + "not_in_any_docs sort='"+sort+"' nullPolicy=collapse}")) { + p + "not_in_any_docs sort='" + sort + "' nullPolicy=collapse}")) { final SolrParams collapseP = params("q", q, "rows", "1", "fq", fq); - + // since every doc is in the same group, collapse query should return exactly one doc final QueryResponse collapseRsp = SOLR.query(collapseP); - assertEquals("collapse should have produced exactly one doc: " + collapseP, - 1, collapseRsp.getResults().getNumFound()); + assertEquals( + "collapse should have produced exactly one doc: " + collapseP, + 1, + collapseRsp.getResults().getNumFound()); final SolrDocument groupHead = collapseRsp.getResults().get(0); - + // the group head from the collapse query should match the first doc of a simple sort - assertEquals(sortedP + " => " + firstDoc + " :VS: " + collapseP + " => " + groupHead, - firstDoc.getFieldValue("id"), groupHead.getFieldValue("id")); + assertEquals( + sortedP + " => " + firstDoc + " :VS: " + collapseP + " => " + groupHead, + firstDoc.getFieldValue("id"), + groupHead.getFieldValue("id")); } } } } } } - + public void testRandomCollpaseWithSort() throws Exception { - + final int numMainQueriesPerCollapseField = atLeast(5); - + for (String collapseField : ALL_COLLAPSE_FIELD_NAMES) { for (int i = 0; i < numMainQueriesPerCollapseField; i++) { final String topSort = CursorPagingTest.buildRandomSort(ALL_SORT_FIELD_NAMES); final String collapseSort = CursorPagingTest.buildRandomSort(ALL_SORT_FIELD_NAMES); - + final String q = random().nextBoolean() ? "*:*" : CursorPagingTest.buildRandomQuery(); - - final SolrParams mainP = params("q", q, "fl", "id,"+collapseField); - final String csize = random().nextBoolean() ? - "" : " size=" + TestUtil.nextInt(random(),1,10000); + final SolrParams mainP = params("q", q, "fl", "id," + collapseField); + + final String csize = + random().nextBoolean() ? "" : " size=" + TestUtil.nextInt(random(), 1, 10000); final String nullPolicy = randomNullPolicy(); - final String nullPs = nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.IGNORE.getName()) - // ignore is default, randomly be explicit about it - ? (random().nextBoolean() ? "" : " nullPolicy=ignore") - : (" nullPolicy=" + nullPolicy); - - final SolrParams collapseP - = params("sort", topSort, - "rows", "200", - "fq", ("{!collapse" + csize + nullPs + - " field="+collapseField+" sort='"+collapseSort+"'}")); + final String nullPs = + nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.IGNORE.getName()) + // ignore is default, randomly be explicit about it + ? (random().nextBoolean() ? "" : " nullPolicy=ignore") + : (" nullPolicy=" + nullPolicy); + + final SolrParams collapseP = + params( + "sort", + topSort, + "rows", + "200", + "fq", + ("{!collapse" + + csize + + nullPs + + " field=" + + collapseField + + " sort='" + + collapseSort + + "'}")); try { final QueryResponse mainRsp = SOLR.query(SolrParams.wrapDefaults(collapseP, mainP)); @@ -163,43 +181,60 @@ public void testRandomCollpaseWithSort() throws Exception { for (SolrDocument doc : mainRsp.getResults()) { final Object groupHeadId = doc.getFieldValue("id"); final Object collapseVal = doc.getFieldValue(collapseField); - + if (null == collapseVal) { if (nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.EXPAND.getName())) { // nothing to check for this doc, it's in its own group continue; } - - assertFalse(groupHeadId + " has null collapseVal but nullPolicy==ignore; " + - "mainP: " + mainP + ", collapseP: " + collapseP, - nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.IGNORE.getName())); + + assertFalse( + groupHeadId + + " has null collapseVal but nullPolicy==ignore; " + + "mainP: " + + mainP + + ", collapseP: " + + collapseP, + nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.IGNORE.getName())); } - + // workaround for SOLR-8082... // // what's important is that we already did the collapsing on the *real* collapseField // to verify the groupHead returned is really the best our verification filter // on docs with that value in a different field containing the exact same values final String checkField = collapseField.replace("float_dv", "float"); - - final String checkFQ = ((null == collapseVal) - ? ("-" + checkField + ":[* TO *]") - : ("{!field f="+checkField+"}" + collapseVal.toString())); - - final SolrParams checkP = params("fq", checkFQ, - "rows", "1", - "sort", collapseSort); - + + final String checkFQ = + ((null == collapseVal) + ? ("-" + checkField + ":[* TO *]") + : ("{!field f=" + checkField + "}" + collapseVal.toString())); + + final SolrParams checkP = + params( + "fq", checkFQ, + "rows", "1", + "sort", collapseSort); + final QueryResponse checkRsp = SOLR.query(SolrParams.wrapDefaults(checkP, mainP)); - - assertTrue("not even 1 match for sanity check query? expected: " + doc, - ! checkRsp.getResults().isEmpty()); + + assertTrue( + "not even 1 match for sanity check query? expected: " + doc, + !checkRsp.getResults().isEmpty()); final SolrDocument firstMatch = checkRsp.getResults().get(0); final Object firstMatchId = firstMatch.getFieldValue("id"); - assertEquals("first match for filtered group '"+ collapseVal + - "' not matching expected group head ... " + - "mainP: " + mainP + ", collapseP: " + collapseP + ", checkP: " + checkP, - groupHeadId, firstMatchId); + assertEquals( + "first match for filtered group '" + + collapseVal + + "' not matching expected group head ... " + + "mainP: " + + mainP + + ", collapseP: " + + collapseP + + ", checkP: " + + checkP, + groupHeadId, + firstMatchId); } } catch (Exception e) { throw new RuntimeException("BUG using params: " + collapseP + " + " + mainP, e); @@ -207,31 +242,49 @@ public void testRandomCollpaseWithSort() throws Exception { } } } - + public void testParsedFilterQueryResponse() throws Exception { String nullPolicy = randomNullPolicy(); String groupHeadSort = "'_version_ asc'"; String collapseSize = "5000"; String collapseHint = "top_fc"; - String filterQuery = "{!collapse field=id sort=" + groupHeadSort + " nullPolicy=" + nullPolicy + " size=" + - collapseSize + " hint=" + collapseHint + "}"; + String filterQuery = + "{!collapse field=id sort=" + + groupHeadSort + + " nullPolicy=" + + nullPolicy + + " size=" + + collapseSize + + " hint=" + + collapseHint + + "}"; SolrParams solrParams = params("q", "*:*", "rows", "0", "debug", "true", "fq", filterQuery); QueryResponse response = SOLR.query(solrParams); // Query name is occurring twice, this should be handled in QueryParsing.toString - String expectedParsedFilterString = "CollapsingPostFilter(CollapsingPostFilter(field=id, " + - "nullPolicy=" + nullPolicy + ", GroupHeadSelector(selectorText=" + groupHeadSort.substring(1, - groupHeadSort.length() - 1) + ", type=SORT" + - "), hint=" + collapseHint + ", size=" + collapseSize + "))"; + String expectedParsedFilterString = + "CollapsingPostFilter(CollapsingPostFilter(field=id, " + + "nullPolicy=" + + nullPolicy + + ", GroupHeadSelector(selectorText=" + + groupHeadSort.substring(1, groupHeadSort.length() - 1) + + ", type=SORT" + + "), hint=" + + collapseHint + + ", size=" + + collapseSize + + "))"; List expectedParsedFilterQuery = Collections.singletonList(expectedParsedFilterString); assertEquals(expectedParsedFilterQuery, response.getDebugMap().get("parsed_filter_queries")); - assertEquals(Collections.singletonList(filterQuery), response.getDebugMap().get("filter_queries")); + assertEquals( + Collections.singletonList(filterQuery), response.getDebugMap().get("filter_queries")); } public void testNullPolicy() { String nullPolicy = "xyz"; String groupHeadSort = "'_version_ asc'"; - String filterQuery = "{!collapse field=id sort=" + groupHeadSort + " nullPolicy=" + nullPolicy + "}"; + String filterQuery = + "{!collapse field=id sort=" + groupHeadSort + " nullPolicy=" + nullPolicy + "}"; SolrParams solrParams = params("q", "*:*", "fq", filterQuery); SolrException e = expectThrows(SolrException.class, () -> SOLR.query(solrParams)); @@ -243,7 +296,6 @@ public void testNullPolicy() { } private String randomNullPolicy() { - return NULL_POLICIES[ TestUtil.nextInt(random(), 0, NULL_POLICIES.length-1) ]; + return NULL_POLICIES[TestUtil.nextInt(random(), 0, NULL_POLICIES.length - 1)]; } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java b/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java index d5259670510..9ec1d0e27fa 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java +++ b/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import com.carrotsearch.hppc.IntHashSet; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; @@ -28,8 +29,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; - -import com.carrotsearch.hppc.IntHashSet; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; @@ -51,9 +50,10 @@ import org.junit.Test; public class TestRangeQuery extends SolrTestCaseJ4 { - - private final static long DATE_START_TIME_RANDOM_TEST = 1499797224224L; - private final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.ROOT); + + private static final long DATE_START_TIME_RANDOM_TEST = 1499797224224L; + private final SimpleDateFormat dateFormat = + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.ROOT); @BeforeClass public static void beforeClass() throws Exception { @@ -79,11 +79,11 @@ public void tearDown() throws Exception { } void addInt(SolrInputDocument doc, int l, int u, String... fields) { - int v=0; - if (0==l && l==u) { - v=random().nextInt(); + int v = 0; + if (0 == l && l == u) { + v = random().nextInt(); } else { - v=random().nextInt(u-l)+l; + v = random().nextInt(u - l) + l; } for (String field : fields) { doc.addField(field, v); @@ -95,9 +95,9 @@ interface DocProcessor { } public void createIndex(int nDocs, DocProcessor proc) { - for (int i=0; i norm_fields = new HashMap<>(); + Map norm_fields = new HashMap<>(); norm_fields.put("foo_i", ints); norm_fields.put("foo_l", longs); norm_fields.put("foo_d", doubles); @@ -136,9 +144,8 @@ public void testRangeQueries() throws Exception { norm_fields.put("foo_s", strings); norm_fields.put("foo_dt", dates); - // fields that frange queries should work on - Map frange_fields = new HashMap<>(); + Map frange_fields = new HashMap<>(); frange_fields.put("foo_i", ints); frange_fields.put("foo_l", longs); frange_fields.put("foo_d", doubles); @@ -151,15 +158,15 @@ public void testRangeQueries() throws Exception { frange_fields.put("foo_s", strings); frange_fields.put("foo_dt", dates); - Map all_fields = new HashMap<>(); + Map all_fields = new HashMap<>(); all_fields.putAll(norm_fields); all_fields.putAll(frange_fields); - for (int j=0; j fields = new ArrayList<>(); fields.add("id"); - fields.add(""+j); - for (Map.Entry entry : all_fields.entrySet()) { + fields.add("" + j); + for (Map.Entry entry : all_fields.entrySet()) { fields.add(entry.getKey()); fields.add(entry.getValue()[j]); } @@ -173,63 +180,68 @@ public void testRangeQueries() throws Exception { assertQ(req("{!frange l=0 u=2}product(id_i,2)"), "*[count(//doc)=2]"); assertQ(req("{!frange l=100 u=102}sum(id_i,100)"), "*[count(//doc)=3]"); - - for (Map.Entry entry : norm_fields.entrySet()) { + for (Map.Entry entry : norm_fields.entrySet()) { String f = entry.getKey(); String[] v = entry.getValue(); - assertQ(req(f + ":[* TO *]" ), "*[count(//doc)=3]"); - assertQ(req(f + ":["+v[0]+" TO "+v[2]+"]"), "*[count(//doc)=3]"); - assertQ(req(f + ":["+v[1]+" TO "+v[2]+"]"), "*[count(//doc)=2]"); - assertQ(req(f + ":["+v[0]+" TO "+v[1]+"]"), "*[count(//doc)=2]"); - assertQ(req(f + ":["+v[0]+" TO "+v[0]+"]"), "*[count(//doc)=1]"); - assertQ(req(f + ":["+v[1]+" TO "+v[1]+"]"), "*[count(//doc)=1]"); - assertQ(req(f + ":["+v[2]+" TO "+v[2]+"]"), "*[count(//doc)=1]"); - assertQ(req(f + ":["+v[3]+" TO "+v[3]+"]"), "*[count(//doc)=0]"); - assertQ(req(f + ":["+v[4]+" TO "+v[4]+"]"), "*[count(//doc)=0]"); - - assertQ(req(f + ":{"+v[0]+" TO "+v[2]+"}"), "*[count(//doc)=1]"); - assertQ(req(f + ":{"+v[1]+" TO "+v[2]+"}"), "*[count(//doc)=0]"); - assertQ(req(f + ":{"+v[0]+" TO "+v[1]+"}"), "*[count(//doc)=0]"); - assertQ(req(f + ":{"+v[3]+" TO "+v[4]+"}"), "*[count(//doc)=3]"); + assertQ(req(f + ":[* TO *]"), "*[count(//doc)=3]"); + assertQ(req(f + ":[" + v[0] + " TO " + v[2] + "]"), "*[count(//doc)=3]"); + assertQ(req(f + ":[" + v[1] + " TO " + v[2] + "]"), "*[count(//doc)=2]"); + assertQ(req(f + ":[" + v[0] + " TO " + v[1] + "]"), "*[count(//doc)=2]"); + assertQ(req(f + ":[" + v[0] + " TO " + v[0] + "]"), "*[count(//doc)=1]"); + assertQ(req(f + ":[" + v[1] + " TO " + v[1] + "]"), "*[count(//doc)=1]"); + assertQ(req(f + ":[" + v[2] + " TO " + v[2] + "]"), "*[count(//doc)=1]"); + assertQ(req(f + ":[" + v[3] + " TO " + v[3] + "]"), "*[count(//doc)=0]"); + assertQ(req(f + ":[" + v[4] + " TO " + v[4] + "]"), "*[count(//doc)=0]"); + + assertQ(req(f + ":{" + v[0] + " TO " + v[2] + "}"), "*[count(//doc)=1]"); + assertQ(req(f + ":{" + v[1] + " TO " + v[2] + "}"), "*[count(//doc)=0]"); + assertQ(req(f + ":{" + v[0] + " TO " + v[1] + "}"), "*[count(//doc)=0]"); + assertQ(req(f + ":{" + v[3] + " TO " + v[4] + "}"), "*[count(//doc)=3]"); } - for (Map.Entry entry : frange_fields.entrySet()) { + for (Map.Entry entry : frange_fields.entrySet()) { String f = entry.getKey(); String[] v = entry.getValue(); - assertQ(req("{!frange}"+f ), "*[count(//doc)=3]"); - assertQ(req("{!frange" + " l="+v[0]+"}"+f ), "*[count(//doc)=3]"); - assertQ(req("{!frange" + " l="+v[1]+"}"+f ), "*[count(//doc)=2]"); - assertQ(req("{!frange" + " l="+v[2]+"}"+f ), "*[count(//doc)=1]"); - assertQ(req("{!frange" + " l="+v[3]+"}"+f ), "*[count(//doc)=3]"); - assertQ(req("{!frange" + " l="+v[4]+"}"+f ), "*[count(//doc)=0]"); - - assertQ(req("{!frange" + " u="+v[0]+"}"+f ), "*[count(//doc)=1]"); - assertQ(req("{!frange" + " u="+v[1]+"}"+f ), "*[count(//doc)=2]"); - assertQ(req("{!frange" + " u="+v[2]+"}"+f ), "*[count(//doc)=3]"); - assertQ(req("{!frange" + " u="+v[3]+"}"+f ), "*[count(//doc)=0]"); - assertQ(req("{!frange" + " u="+v[4]+"}"+f ), "*[count(//doc)=3]"); - - assertQ(req("{!frange incl=false" + " l="+v[0]+"}"+f ), "*[count(//doc)=2]"); - assertQ(req("{!frange incl=false" + " l="+v[1]+"}"+f ), "*[count(//doc)=1]"); - assertQ(req("{!frange incl=false" + " l="+v[2]+"}"+f ), "*[count(//doc)=0]"); - assertQ(req("{!frange incl=false" + " l="+v[3]+"}"+f ), "*[count(//doc)=3]"); - assertQ(req("{!frange incl=false" + " l="+v[4]+"}"+f ), "*[count(//doc)=0]"); - - assertQ(req("{!frange incu=false" + " u="+v[0]+"}"+f ), "*[count(//doc)=0]"); - assertQ(req("{!frange incu=false" + " u="+v[1]+"}"+f ), "*[count(//doc)=1]"); - assertQ(req("{!frange incu=false" + " u="+v[2]+"}"+f ), "*[count(//doc)=2]"); - assertQ(req("{!frange incu=false" + " u="+v[3]+"}"+f ), "*[count(//doc)=0]"); - assertQ(req("{!frange incu=false" + " u="+v[4]+"}"+f ), "*[count(//doc)=3]"); - - assertQ(req("{!frange incl=true incu=true" + " l=" +v[0] +" u="+v[2]+"}"+f ), "*[count(//doc)=3]"); - assertQ(req("{!frange incl=false incu=false" + " l=" +v[0] +" u="+v[2]+"}"+f ), "*[count(//doc)=1]"); - assertQ(req("{!frange incl=false incu=false" + " l=" +v[3] +" u="+v[4]+"}"+f ), "*[count(//doc)=3]"); + assertQ(req("{!frange}" + f), "*[count(//doc)=3]"); + assertQ(req("{!frange" + " l=" + v[0] + "}" + f), "*[count(//doc)=3]"); + assertQ(req("{!frange" + " l=" + v[1] + "}" + f), "*[count(//doc)=2]"); + assertQ(req("{!frange" + " l=" + v[2] + "}" + f), "*[count(//doc)=1]"); + assertQ(req("{!frange" + " l=" + v[3] + "}" + f), "*[count(//doc)=3]"); + assertQ(req("{!frange" + " l=" + v[4] + "}" + f), "*[count(//doc)=0]"); + + assertQ(req("{!frange" + " u=" + v[0] + "}" + f), "*[count(//doc)=1]"); + assertQ(req("{!frange" + " u=" + v[1] + "}" + f), "*[count(//doc)=2]"); + assertQ(req("{!frange" + " u=" + v[2] + "}" + f), "*[count(//doc)=3]"); + assertQ(req("{!frange" + " u=" + v[3] + "}" + f), "*[count(//doc)=0]"); + assertQ(req("{!frange" + " u=" + v[4] + "}" + f), "*[count(//doc)=3]"); + + assertQ(req("{!frange incl=false" + " l=" + v[0] + "}" + f), "*[count(//doc)=2]"); + assertQ(req("{!frange incl=false" + " l=" + v[1] + "}" + f), "*[count(//doc)=1]"); + assertQ(req("{!frange incl=false" + " l=" + v[2] + "}" + f), "*[count(//doc)=0]"); + assertQ(req("{!frange incl=false" + " l=" + v[3] + "}" + f), "*[count(//doc)=3]"); + assertQ(req("{!frange incl=false" + " l=" + v[4] + "}" + f), "*[count(//doc)=0]"); + + assertQ(req("{!frange incu=false" + " u=" + v[0] + "}" + f), "*[count(//doc)=0]"); + assertQ(req("{!frange incu=false" + " u=" + v[1] + "}" + f), "*[count(//doc)=1]"); + assertQ(req("{!frange incu=false" + " u=" + v[2] + "}" + f), "*[count(//doc)=2]"); + assertQ(req("{!frange incu=false" + " u=" + v[3] + "}" + f), "*[count(//doc)=0]"); + assertQ(req("{!frange incu=false" + " u=" + v[4] + "}" + f), "*[count(//doc)=3]"); + + assertQ( + req("{!frange incl=true incu=true" + " l=" + v[0] + " u=" + v[2] + "}" + f), + "*[count(//doc)=3]"); + assertQ( + req("{!frange incl=false incu=false" + " l=" + v[0] + " u=" + v[2] + "}" + f), + "*[count(//doc)=1]"); + assertQ( + req("{!frange incl=false incu=false" + " l=" + v[3] + " u=" + v[4] + "}" + f), + "*[count(//doc)=3]"); } // now pick a random range to use to delete (some of) the docs... - + final boolean incl = random().nextBoolean(); final boolean incu = random().nextBoolean(); final int expected = 0 + (incl ? 0 : 1) + (incu ? 0 : 1); @@ -241,90 +253,109 @@ public void testRangeQueries() throws Exception { } else { // frange String field = randomKey(frange_fields); String[] values = frange_fields.get(field); - dbq = "{!frange incl=" + incl + " incu=" + incu + " l=" + values[0] + " u=" + values[2] + "}" + field; + dbq = + "{!frange incl=" + + incl + + " incu=" + + incu + + " l=" + + values[0] + + " u=" + + values[2] + + "}" + + field; } if (random().nextBoolean()) { // wrap in a BQ String field = randomKey(norm_fields); String value = norm_fields.get(field)[1]; // wraping shouldn't affect expected - dbq = "("+field+":\""+value+"\" OR " + dbq + ")"; - } - + dbq = "(" + field + ":\"" + value + "\" OR " + dbq + ")"; + } + assertU(delQ(dbq)); assertU(commit()); - assertQ(req("q","*:*","_trace_dbq",dbq), - "*[count(//doc)=" + expected + "]"); - + assertQ(req("q", "*:*", "_trace_dbq", dbq), "*[count(//doc)=" + expected + "]"); } @Test public void testRandomRangeQueries() throws Exception { - String handler=""; - final String[] fields = {"foo_s","foo_i","foo_l","foo_f","foo_d", - "foo_ti","foo_tl","foo_tf","foo_td" }; - + String handler = ""; + final String[] fields = { + "foo_s", "foo_i", "foo_l", "foo_f", "foo_d", "foo_ti", "foo_tl", "foo_tf", "foo_td" + }; + // NOTE: foo_s supports ranges, but for the arrays below we are only // interested in fields that support *equivalent* ranges -- strings // are not ordered the same as ints/longs, so we can't test the ranges // for equivilence across diff fields. // // fields that a normal range query will work correctly on - String[] norm_fields = {"foo_i","foo_l","foo_f","foo_d", - "foo_ti","foo_tl","foo_tf","foo_td" }; + String[] norm_fields = { + "foo_i", "foo_l", "foo_f", "foo_d", + "foo_ti", "foo_tl", "foo_tf", "foo_td" + }; // fields that a value source range query should work on - String[] frange_fields = {"foo_i","foo_l","foo_f","foo_d"}; + String[] frange_fields = {"foo_i", "foo_l", "foo_f", "foo_d"}; - final int l= -1 * atLeast(50); - final int u= atLeast(250); + final int l = -1 * atLeast(50); + final int u = atLeast(250); // sometimes a very small index, sometimes a very large index final int numDocs = random().nextBoolean() ? random().nextInt(50) : atLeast(1000); - createIndex(numDocs, doc -> { - // 10% of the docs have missing values - if (random().nextInt(10)!=0) addInt(doc, l,u, fields); - }); + createIndex( + numDocs, + doc -> { + // 10% of the docs have missing values + if (random().nextInt(10) != 0) addInt(doc, l, u, fields); + }); final int numIters = atLeast(1000); - for (int i=0; i < numIters; i++) { + for (int i = 0; i < numIters; i++) { int lower = TestUtil.nextInt(random(), 2 * l, u); int upper = TestUtil.nextInt(random(), lower, 2 * u); - boolean lowerMissing = random().nextInt(10)==1; - boolean upperMissing = random().nextInt(10)==1; + boolean lowerMissing = random().nextInt(10) == 1; + boolean upperMissing = random().nextInt(10) == 1; boolean inclusive = lowerMissing || upperMissing || random().nextBoolean(); - // lower=2; upper=2; inclusive=true; - // inclusive=true; lowerMissing=true; upperMissing=true; + // lower=2; upper=2; inclusive=true; + // inclusive=true; lowerMissing=true; upperMissing=true; List qs = new ArrayList<>(); for (String field : norm_fields) { - String q = field + ':' + (inclusive?'[':'{') - + (lowerMissing?"*":lower) + String q = + field + + ':' + + (inclusive ? '[' : '{') + + (lowerMissing ? "*" : lower) + " TO " - + (upperMissing?"*":upper) - + (inclusive?']':'}'); + + (upperMissing ? "*" : upper) + + (inclusive ? ']' : '}'); qs.add(q); } for (String field : frange_fields) { - String q = "{!frange v="+field - + (lowerMissing?"":(" l="+lower)) - + (upperMissing?"":(" u="+upper)) - + (inclusive?"":" incl=false") - + (inclusive?"":" incu=false") + String q = + "{!frange v=" + + field + + (lowerMissing ? "" : (" l=" + lower)) + + (upperMissing ? "" : (" u=" + upper)) + + (inclusive ? "" : " incl=false") + + (inclusive ? "" : " incu=false") + "}"; qs.add(q); } String lastQ = null; - SolrQueryResponse last=null; + SolrQueryResponse last = null; for (String q : qs) { // System.out.println("QUERY="+q); - SolrQueryRequest req = req("q",q,"rows",""+numDocs); + SolrQueryRequest req = req("q", q, "rows", "" + numDocs); SolrQueryResponse qr = h.queryAndResponse(handler, req); if (last != null) { - // we only test if the same docs matched since some queries will include factors like idf, etc. - DocList rA = ((ResultContext)qr.getResponse()).getDocList(); - DocList rB = ((ResultContext)last.getResponse()).getDocList(); - sameDocs(q + " vs " + lastQ, rA, rB ); + // we only test if the same docs matched since some queries will include factors like idf, + // etc. + DocList rA = ((ResultContext) qr.getResponse()).getDocList(); + DocList rB = ((ResultContext) last.getResponse()).getDocList(); + sameDocs(q + " vs " + lastQ, rA, rB); } req.close(); last = qr; @@ -332,43 +363,48 @@ public void testRandomRangeQueries() throws Exception { } } - // now build some random queries (against *any* field) and validate that using it in a DBQ changes - // the index by the expected number of docs + // now build some random queries (against *any* field) and validate that using it in a DBQ + // changes the index by the expected number of docs long numDocsLeftInIndex = numDocs; - final int numDBQs= atLeast(10); - for (int i=0; i < numDBQs; i++) { + final int numDBQs = atLeast(10); + for (int i = 0; i < numDBQs; i++) { int lower = TestUtil.nextInt(random(), 2 * l, u); int upper = TestUtil.nextInt(random(), lower, 2 * u); - boolean lowerMissing = random().nextInt(10)==1; - boolean upperMissing = random().nextInt(10)==1; + boolean lowerMissing = random().nextInt(10) == 1; + boolean upperMissing = random().nextInt(10) == 1; boolean inclusive = lowerMissing || upperMissing || random().nextBoolean(); - + String dbq = null; if (random().nextBoolean()) { // regular range String field = fields[random().nextInt(fields.length)]; - dbq = field + ':' + (inclusive?'[':'{') - + (lowerMissing?"*":lower) - + " TO " - + (upperMissing?"*":upper) - + (inclusive?']':'}'); - } else { // frange + dbq = + field + + ':' + + (inclusive ? '[' : '{') + + (lowerMissing ? "*" : lower) + + " TO " + + (upperMissing ? "*" : upper) + + (inclusive ? ']' : '}'); + } else { // frange String field = frange_fields[random().nextInt(frange_fields.length)]; - dbq = "{!frange v="+field - + (lowerMissing?"":(" l="+lower)) - + (upperMissing?"":(" u="+upper)) - + (inclusive?"":" incl=false") - + (inclusive?"":" incu=false") - + "}"; + dbq = + "{!frange v=" + + field + + (lowerMissing ? "" : (" l=" + lower)) + + (upperMissing ? "" : (" u=" + upper)) + + (inclusive ? "" : " incl=false") + + (inclusive ? "" : " incu=false") + + "}"; } - try (SolrQueryRequest req = req("q",dbq,"rows","0")) { + try (SolrQueryRequest req = req("q", dbq, "rows", "0")) { SolrQueryResponse qr = h.queryAndResponse(handler, req); - numDocsLeftInIndex -= ((ResultContext)qr.getResponse()).getDocList().matches(); + numDocsLeftInIndex -= ((ResultContext) qr.getResponse()).getDocList().matches(); } assertU(delQ(dbq)); assertU(commit()); - try (SolrQueryRequest req = req("q","*:*","rows","0","_trace_after_dbq",dbq)) { + try (SolrQueryRequest req = req("q", "*:*", "rows", "0", "_trace_after_dbq", dbq)) { SolrQueryResponse qr = h.queryAndResponse(handler, req); - final long allDocsFound = ((ResultContext)qr.getResponse()).getDocList().matches(); + final long allDocsFound = ((ResultContext) qr.getResponse()).getDocList().matches(); assertEquals(dbq, numDocsLeftInIndex, allDocsFound); } } @@ -379,18 +415,27 @@ public void testRangeQueryWithFilterCache() throws Exception { // sometimes a very small index, sometimes a very large index // final int numDocs = random().nextBoolean() ? random().nextInt(50) : atLeast(1000); final int numDocs = 99; - createIndex(numDocs, doc -> { - addInt(doc, 0, 0, "foo_i"); - }); - - // ensure delay comes after createIndex - so we don't affect/count any cache warming from queries left over by other test methods - TestInjection.delayBeforeCreatingNewDocSet = TEST_NIGHTLY ? 50 : 500; // Run more queries nightly, so use shorter delay - - final int MAX_QUERY_RANGE = 222; // Arbitrary number in the middle of the value range - final int QUERY_START = TEST_NIGHTLY ? 1 : MAX_QUERY_RANGE; // Either run queries for the full range, or just the last one + createIndex( + numDocs, + doc -> { + addInt(doc, 0, 0, "foo_i"); + }); + + // ensure delay comes after createIndex - so we don't affect/count any cache warming from + // queries left over by other test methods + TestInjection.delayBeforeCreatingNewDocSet = + TEST_NIGHTLY ? 50 : 500; // Run more queries nightly, so use shorter delay + + final int MAX_QUERY_RANGE = 222; // Arbitrary number in the middle of the value range + final int QUERY_START = + TEST_NIGHTLY + ? 1 + : MAX_QUERY_RANGE; // Either run queries for the full range, or just the last one final int NUM_QUERIES = TEST_NIGHTLY ? 101 : 10; - for (int j = QUERY_START ; j <= MAX_QUERY_RANGE; j++) { - ExecutorService queryService = ExecutorUtil.newMDCAwareFixedThreadPool(4, new SolrNamedThreadFactory("TestRangeQuery-" + j)); + for (int j = QUERY_START; j <= MAX_QUERY_RANGE; j++) { + ExecutorService queryService = + ExecutorUtil.newMDCAwareFixedThreadPool( + 4, new SolrNamedThreadFactory("TestRangeQuery-" + j)); try (SolrCore core = h.getCoreInc()) { SolrRequestHandler defaultHandler = core.getRequestHandler(""); @@ -398,23 +443,28 @@ public void testRangeQueryWithFilterCache() throws Exception { params.set("q", "*:*"); params.add("fq", "id:[0 TO " + j + "]"); // These should all come from FilterCache - // Regular: 10 threads with 4 executors would be enough for 3 waves, or approximately 1500ms of delay + // Regular: 10 threads with 4 executors would be enough for 3 waves, or approximately 1500ms + // of delay // Nightly: 101 threads with 4 executors is 26 waves, approximately 1300ms delay CountDownLatch atLeastOnceCompleted = new CountDownLatch(TEST_NIGHTLY ? 30 : 1); for (int i = 0; i < NUM_QUERIES; i++) { - queryService.submit(() -> { - try (SolrQueryRequest req = req(params)) { - core.execute(defaultHandler, req, new SolrQueryResponse()); - } - atLeastOnceCompleted.countDown(); - }); + queryService.submit( + () -> { + try (SolrQueryRequest req = req(params)) { + core.execute(defaultHandler, req, new SolrQueryResponse()); + } + atLeastOnceCompleted.countDown(); + }); } queryService.shutdown(); // No more requests will be queued up atLeastOnceCompleted.await(); // Wait for the first batch of queries to complete - assertTrue(queryService.awaitTermination(1, TimeUnit.SECONDS)); // All queries after should be very fast + assertTrue( + queryService.awaitTermination( + 1, TimeUnit.SECONDS)); // All queries after should be very fast - assertEquals("Create only one DocSet outside of cache", 1, TestInjection.countDocSetDelays.get()); + assertEquals( + "Create only one DocSet outside of cache", 1, TestInjection.countDocSetDelays.get()); } TestInjection.countDocSetDelays.set(0); } @@ -422,30 +472,60 @@ public void testRangeQueryWithFilterCache() throws Exception { @Test public void testRangeQueryEndpointTO() throws Exception { - assertEquals("[to TO to]", QParser.getParser("[to TO to]", req("df", "text")).getQuery().toString("text")); - assertEquals("[to TO to]", QParser.getParser("[to TO TO]", req("df", "text")).getQuery().toString("text")); - assertEquals("[to TO to]", QParser.getParser("[TO TO to]", req("df", "text")).getQuery().toString("text")); - assertEquals("[to TO to]", QParser.getParser("[TO TO TO]", req("df", "text")).getQuery().toString("text")); - - assertEquals("[to TO to]", QParser.getParser("[\"TO\" TO \"TO\"]", req("df", "text")).getQuery().toString("text")); - assertEquals("[to TO to]", QParser.getParser("[\"TO\" TO TO]", req("df", "text")).getQuery().toString("text")); - assertEquals("[to TO to]", QParser.getParser("[TO TO \"TO\"]", req("df", "text")).getQuery().toString("text")); - - assertEquals("[to TO xx]", QParser.getParser("[to TO xx]", req("df", "text")).getQuery().toString("text")); - assertEquals("[to TO xx]", QParser.getParser("[\"TO\" TO xx]", req("df", "text")).getQuery().toString("text")); - assertEquals("[to TO xx]", QParser.getParser("[TO TO xx]", req("df", "text")).getQuery().toString("text")); - - assertEquals("[xx TO to]", QParser.getParser("[xx TO to]", req("df", "text")).getQuery().toString("text")); - assertEquals("[xx TO to]", QParser.getParser("[xx TO \"TO\"]", req("df", "text")).getQuery().toString("text")); - assertEquals("[xx TO to]", QParser.getParser("[xx TO TO]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO to]", + QParser.getParser("[to TO to]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO to]", + QParser.getParser("[to TO TO]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO to]", + QParser.getParser("[TO TO to]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO to]", + QParser.getParser("[TO TO TO]", req("df", "text")).getQuery().toString("text")); + + assertEquals( + "[to TO to]", + QParser.getParser("[\"TO\" TO \"TO\"]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO to]", + QParser.getParser("[\"TO\" TO TO]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO to]", + QParser.getParser("[TO TO \"TO\"]", req("df", "text")).getQuery().toString("text")); + + assertEquals( + "[to TO xx]", + QParser.getParser("[to TO xx]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO xx]", + QParser.getParser("[\"TO\" TO xx]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[to TO xx]", + QParser.getParser("[TO TO xx]", req("df", "text")).getQuery().toString("text")); + + assertEquals( + "[xx TO to]", + QParser.getParser("[xx TO to]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[xx TO to]", + QParser.getParser("[xx TO \"TO\"]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[xx TO to]", + QParser.getParser("[xx TO TO]", req("df", "text")).getQuery().toString("text")); } @Test public void testRangeQueryRequiresTO() throws Exception { - assertEquals("{a TO b}", QParser.getParser("{A TO B}", req("df", "text")).getQuery().toString("text")); - assertEquals("[a TO b}", QParser.getParser("[A TO B}", req("df", "text")).getQuery().toString("text")); - assertEquals("{a TO b]", QParser.getParser("{A TO B]", req("df", "text")).getQuery().toString("text")); - assertEquals("[a TO b]", QParser.getParser("[A TO B]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "{a TO b}", QParser.getParser("{A TO B}", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[a TO b}", QParser.getParser("[A TO B}", req("df", "text")).getQuery().toString("text")); + assertEquals( + "{a TO b]", QParser.getParser("{A TO B]", req("df", "text")).getQuery().toString("text")); + assertEquals( + "[a TO b]", QParser.getParser("[A TO B]", req("df", "text")).getQuery().toString("text")); // " TO " is required between range endpoints expectThrows(SyntaxError.class, () -> QParser.getParser("{A B}", req("df", "text")).getQuery()); @@ -453,47 +533,140 @@ public void testRangeQueryRequiresTO() throws Exception { expectThrows(SyntaxError.class, () -> QParser.getParser("{A B]", req("df", "text")).getQuery()); expectThrows(SyntaxError.class, () -> QParser.getParser("[A B]", req("df", "text")).getQuery()); - expectThrows(SyntaxError.class, () -> QParser.getParser("{TO B}", req("df", "text")).getQuery()); - expectThrows(SyntaxError.class, () -> QParser.getParser("[TO B}", req("df", "text")).getQuery()); - expectThrows(SyntaxError.class, () -> QParser.getParser("{TO B]", req("df", "text")).getQuery()); - expectThrows(SyntaxError.class, () -> QParser.getParser("[TO B]", req("df", "text")).getQuery()); - - expectThrows(SyntaxError.class, () -> QParser.getParser("{A TO}", req("df", "text")).getQuery()); - expectThrows(SyntaxError.class, () -> QParser.getParser("[A TO}", req("df", "text")).getQuery()); - expectThrows(SyntaxError.class, () -> QParser.getParser("{A TO]", req("df", "text")).getQuery()); - expectThrows(SyntaxError.class, () -> QParser.getParser("[A TO]", req("df", "text")).getQuery()); + expectThrows( + SyntaxError.class, () -> QParser.getParser("{TO B}", req("df", "text")).getQuery()); + expectThrows( + SyntaxError.class, () -> QParser.getParser("[TO B}", req("df", "text")).getQuery()); + expectThrows( + SyntaxError.class, () -> QParser.getParser("{TO B]", req("df", "text")).getQuery()); + expectThrows( + SyntaxError.class, () -> QParser.getParser("[TO B]", req("df", "text")).getQuery()); + + expectThrows( + SyntaxError.class, () -> QParser.getParser("{A TO}", req("df", "text")).getQuery()); + expectThrows( + SyntaxError.class, () -> QParser.getParser("[A TO}", req("df", "text")).getQuery()); + expectThrows( + SyntaxError.class, () -> QParser.getParser("{A TO]", req("df", "text")).getQuery()); + expectThrows( + SyntaxError.class, () -> QParser.getParser("[A TO]", req("df", "text")).getQuery()); } @Test public void testCompareTypesRandomRangeQueries() throws Exception { int cardinality = 10000; - Map types = new HashMap<>(); //single and multivalued field types - Map typesMv = new HashMap<>(); // multivalued field types only - types.put(NumberType.INTEGER, new String[]{"ti", "ti_dv", "ti_ni_dv", "i_p", "i_ni_p", "i_ndv_p", "tis", "tis_dv", "tis_ni_dv", "is_p", "is_ni_p", "is_ndv_p"}); - types.put(NumberType.LONG, new String[]{"tl", "tl_dv", "tl_ni_dv", "l_p", "l_ni_p", "l_ndv_p", "tls", "tls_dv", "tls_ni_dv", "ls_p", "ls_ni_p", "ls_ndv_p"}); - types.put(NumberType.FLOAT, new String[]{"tf", "tf_dv", "tf_ni_dv", "f_p", "f_ni_p", "f_ndv_p", "tfs", "tfs_dv", "tfs_ni_dv", "fs_p", "fs_ni_p", "fs_ndv_p"}); - types.put(NumberType.DOUBLE, new String[]{"td", "td_dv", "td_ni_dv", "d_p", "d_ni_p", "d_ndv_p", "tds", "tds_dv", "tds_ni_dv", "ds_p", "ds_ni_p", "ds_ndv_p"}); - types.put(NumberType.DATE, new String[]{"tdt", "tdt_dv", "tdt_ni_dv", "dt_p", "dt_ni_p", "dt_ndv_p", "tdts", "tdts_dv", "tdts_ni_dv", "dts_p", "dts_ni_p", "dts_ndv_p"}); - typesMv.put(NumberType.INTEGER, new String[]{"tis", "tis_dv", "tis_ni_dv", "is_p", "is_ni_p", "is_ndv_p"}); - typesMv.put(NumberType.LONG, new String[]{"tls", "tls_dv", "tls_ni_dv", "ls_p", "ls_ni_p", "ls_ndv_p"}); - typesMv.put(NumberType.FLOAT, new String[]{"tfs", "tfs_dv", "tfs_ni_dv", "fs_p", "fs_ni_p", "fs_ndv_p"}); - typesMv.put(NumberType.DOUBLE, new String[]{"tds", "tds_dv", "tds_ni_dv", "ds_p", "ds_ni_p", "ds_ndv_p"}); - typesMv.put(NumberType.DATE, new String[]{"tdts", "tdts_dv", "tdts_ni_dv", "dts_p", "dts_ni_p", "dts_ndv_p"}); + Map types = new HashMap<>(); // single and multivalued field types + Map typesMv = new HashMap<>(); // multivalued field types only + types.put( + NumberType.INTEGER, + new String[] { + "ti", + "ti_dv", + "ti_ni_dv", + "i_p", + "i_ni_p", + "i_ndv_p", + "tis", + "tis_dv", + "tis_ni_dv", + "is_p", + "is_ni_p", + "is_ndv_p" + }); + types.put( + NumberType.LONG, + new String[] { + "tl", + "tl_dv", + "tl_ni_dv", + "l_p", + "l_ni_p", + "l_ndv_p", + "tls", + "tls_dv", + "tls_ni_dv", + "ls_p", + "ls_ni_p", + "ls_ndv_p" + }); + types.put( + NumberType.FLOAT, + new String[] { + "tf", + "tf_dv", + "tf_ni_dv", + "f_p", + "f_ni_p", + "f_ndv_p", + "tfs", + "tfs_dv", + "tfs_ni_dv", + "fs_p", + "fs_ni_p", + "fs_ndv_p" + }); + types.put( + NumberType.DOUBLE, + new String[] { + "td", + "td_dv", + "td_ni_dv", + "d_p", + "d_ni_p", + "d_ndv_p", + "tds", + "tds_dv", + "tds_ni_dv", + "ds_p", + "ds_ni_p", + "ds_ndv_p" + }); + types.put( + NumberType.DATE, + new String[] { + "tdt", + "tdt_dv", + "tdt_ni_dv", + "dt_p", + "dt_ni_p", + "dt_ndv_p", + "tdts", + "tdts_dv", + "tdts_ni_dv", + "dts_p", + "dts_ni_p", + "dts_ndv_p" + }); + typesMv.put( + NumberType.INTEGER, + new String[] {"tis", "tis_dv", "tis_ni_dv", "is_p", "is_ni_p", "is_ndv_p"}); + typesMv.put( + NumberType.LONG, + new String[] {"tls", "tls_dv", "tls_ni_dv", "ls_p", "ls_ni_p", "ls_ndv_p"}); + typesMv.put( + NumberType.FLOAT, + new String[] {"tfs", "tfs_dv", "tfs_ni_dv", "fs_p", "fs_ni_p", "fs_ndv_p"}); + typesMv.put( + NumberType.DOUBLE, + new String[] {"tds", "tds_dv", "tds_ni_dv", "ds_p", "ds_ni_p", "ds_ndv_p"}); + typesMv.put( + NumberType.DATE, + new String[] {"tdts", "tdts_dv", "tdts_ni_dv", "dts_p", "dts_ni_p", "dts_ndv_p"}); for (int i = 0; i < atLeast(500); i++) { if (random().nextInt(50) == 0) { - //have some empty docs + // have some empty docs assertU(adoc("id", String.valueOf(i))); continue; } if (random().nextInt(100) == 0 && i > 0) { - //delete some docs + // delete some docs assertU(delI(String.valueOf(i - 1))); } SolrInputDocument document = new SolrInputDocument(); document.setField("id", i); - for (Map.Entry entry:types.entrySet()) { + for (Map.Entry entry : types.entrySet()) { NumberType type = entry.getKey(); String val = null; List vals = null; @@ -520,15 +693,14 @@ public void testCompareTypesRandomRangeQueries() throws Exception { break; default: throw new AssertionError(); - } // SingleValue - for (String fieldSuffix:entry.getValue()) { + for (String fieldSuffix : entry.getValue()) { document.setField("field_sv_" + fieldSuffix, val); } // MultiValue - for (String fieldSuffix:typesMv.get(type)) { - for (String value:vals) { + for (String fieldSuffix : typesMv.get(type)) { + for (String value : vals) { document.addField("field_mv_" + fieldSuffix, value); } } @@ -552,22 +724,44 @@ public void testCompareTypesRandomRangeQueries() throws Exception { } private void doTestQuery(int cardinality, boolean mv, String[] types) throws Exception { - String[] startOptions = new String[]{"{", "["}; - String[] endOptions = new String[]{"}", "]"}; + String[] startOptions = new String[] {"{", "["}; + String[] endOptions = new String[] {"}", "]"}; String[] qRange = getRandomRange(cardinality, types[0]); String start = pickRandom(startOptions); String end = pickRandom(endOptions); long expectedHits = doRangeQuery(mv, start, end, types[0], qRange); for (int i = 1; i < types.length; i++) { - assertEquals("Unexpected results from query when comparing " + types[0] + " with " + types[i] + " and query: " + - start + qRange[0] + " TO " + qRange[1] + end + "\n", - expectedHits, doRangeQuery(mv, start, end, types[i], qRange)); + assertEquals( + "Unexpected results from query when comparing " + + types[0] + + " with " + + types[i] + + " and query: " + + start + + qRange[0] + + " TO " + + qRange[1] + + end + + "\n", + expectedHits, + doRangeQuery(mv, start, end, types[i], qRange)); } } - private long doRangeQuery(boolean mv, String start, String end, String field, String[] qRange) throws Exception { + private long doRangeQuery(boolean mv, String start, String end, String field, String[] qRange) + throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("q", "field_" + (mv?"mv_":"sv_") + field + ":" + start + qRange[0] + " TO " + qRange[1] + end); + params.set( + "q", + "field_" + + (mv ? "mv_" : "sv_") + + field + + ":" + + start + + qRange[0] + + " TO " + + qRange[1] + + end); try (SolrQueryRequest req = req(params)) { return (long) h.queryAndResponse("", req).getToLog().get("hits"); } @@ -605,9 +799,8 @@ private String[] getRandomRange(int max, String fieldName) { break; default: throw new AssertionError("Unexpected number type"); - } - if (random().nextInt(100) >= 1) {// sometimes don't sort the values. Should result in 0 hits + if (random().nextInt(100) >= 1) { // sometimes don't sort the values. Should result in 0 hits Arrays.sort(values); } } @@ -633,7 +826,6 @@ private String[] getRandomRange(int max, String fieldName) { return stringValues; } - // Helper methods private String randomDate(int cardinality) { return dateFormat.format(new Date(randomMs(cardinality))); @@ -641,7 +833,7 @@ private String randomDate(int cardinality) { private List getRandomDates(int numValues, int cardinality) { List vals = new ArrayList<>(numValues); - for (int i = 0; i < numValues;i++) { + for (int i = 0; i < numValues; i++) { vals.add(randomDate(cardinality)); } return vals; @@ -649,31 +841,31 @@ private List getRandomDates(int numValues, int cardinality) { private List getRandomDoubles(int numValues, int cardinality) { List vals = new ArrayList<>(numValues); - for (int i = 0; i < numValues;i++) { + for (int i = 0; i < numValues; i++) { vals.add(randomDouble(cardinality)); } return vals; } - + private List getRandomFloats(int numValues, int cardinality) { List vals = new ArrayList<>(numValues); - for (int i = 0; i < numValues;i++) { + for (int i = 0; i < numValues; i++) { vals.add(randomFloat(cardinality)); } return vals; } - + private List getRandomInts(int numValues, int cardinality) { List vals = new ArrayList<>(numValues); - for (int i = 0; i < numValues;i++) { + for (int i = 0; i < numValues; i++) { vals.add(randomInt(cardinality)); } return vals; } - + private List getRandomLongs(int numValues, int cardinality) { List vals = new ArrayList<>(numValues); - for (int i = 0; i < numValues;i++) { + for (int i = 0; i < numValues; i++) { vals.add(randomLong(cardinality)); } return vals; @@ -681,14 +873,15 @@ private List getRandomLongs(int numValues, int cardinality) { List toStringList(List input) { List newList = new ArrayList<>(input.size()); - for (T element:input) { + for (T element : input) { newList.add(String.valueOf(element)); } return newList; } long randomMs(int cardinality) { - return DATE_START_TIME_RANDOM_TEST + random().nextInt(cardinality) * 1000 * (random().nextBoolean()?1:-1); + return DATE_START_TIME_RANDOM_TEST + + random().nextInt(cardinality) * 1000 * (random().nextBoolean() ? 1 : -1); } double randomDouble(int cardinality) { @@ -707,7 +900,7 @@ long randomMs(int cardinality) { while (d.isNaN()) { d = random().nextDouble(); } - return d * cardinality * (random().nextBoolean()?1:-1); + return d * cardinality * (random().nextBoolean() ? 1 : -1); } float randomFloat(int cardinality) { @@ -726,7 +919,7 @@ float randomFloat(int cardinality) { while (f.isNaN()) { f = random().nextFloat(); } - return f * cardinality * (random().nextBoolean()?1:-1); + return f * cardinality * (random().nextBoolean() ? 1 : -1); } int randomInt(int cardinality) { @@ -735,7 +928,7 @@ int randomInt(int cardinality) { if (num == 0) return Integer.MAX_VALUE; if (num == 1) return Integer.MIN_VALUE; } - return random().nextInt(cardinality) * (random().nextBoolean()?1:-1); + return random().nextInt(cardinality) * (random().nextBoolean() ? 1 : -1); } long randomLong(int cardinality) { @@ -764,8 +957,8 @@ static boolean sameDocs(String msg, DocList a, DocList b) { return true; } - private static ,Y> X randomKey(Map map) { - assert ! map.isEmpty(); + private static , Y> X randomKey(Map map) { + assert !map.isEmpty(); List sortedKeys = new ArrayList<>(map.keySet()); Collections.sort(sortedKeys); return sortedKeys.get(random().nextInt(sortedKeys.size())); diff --git a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java index 0198efad6d7..5b00a9e4d05 100644 --- a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java @@ -19,7 +19,6 @@ import java.util.Map; import java.util.stream.Collectors; import java.util.stream.IntStream; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; @@ -66,47 +65,77 @@ public void testReRankQueries() throws Exception { assertU(delQ("*:*")); assertU(commit()); - String[] doc = {"id","1", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"}; + String[] doc = { + "id", "1", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", + "2000" + }; assertU(adoc(doc)); assertU(commit()); - String[] doc1 = {"id","2", "term_s","YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", "test_tf", "200"}; + String[] doc1 = { + "id", "2", "term_s", "YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", + "test_tf", "200" + }; assertU(adoc(doc1)); - String[] doc2 = {"id","3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200"}; + String[] doc2 = { + "id", "3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200" + }; assertU(adoc(doc2)); assertU(commit()); - String[] doc3 = {"id","4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000"}; + String[] doc3 = { + "id", "4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000" + }; assertU(adoc(doc3)); - String[] doc4 = {"id","5", "term_s", "YYYY", "group_s", "group2", "test_ti", "4", "test_tl", "10", "test_tf", "2000"}; + String[] doc4 = { + "id", "5", "term_s", "YYYY", "group_s", "group2", "test_ti", "4", "test_tl", "10", "test_tf", + "2000" + }; assertU(adoc(doc4)); assertU(commit()); - String[] doc5 = {"id","6", "term_s","YYYY", "group_s", "group2", "test_ti", "10", "test_tl", "100", "test_tf", "200"}; + String[] doc5 = { + "id", "6", "term_s", "YYYY", "group_s", "group2", "test_ti", "10", "test_tl", "100", + "test_tf", "200" + }; assertU(adoc(doc5)); assertU(commit()); - - - ModifiableSolrParams params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=200}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=200}"); params.add("q", "term_s:YYYY"); params.add("rqq", "{!edismax bf=$bff}*:*"); params.add("bff", "field(test_ti)"); params.add("start", "0"); params.add("rows", "6"); params.add("df", "text"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='3']", "//result/doc[2]/str[@name='id'][.='4']", "//result/doc[3]/str[@name='id'][.='2']", "//result/doc[4]/str[@name='id'][.='6']", "//result/doc[5]/str[@name='id'][.='1']", - "//result/doc[6]/str[@name='id'][.='5']" - ); + "//result/doc[6]/str[@name='id'][.='5']"); params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "{!edismax bq=$bqq2}*:*"); @@ -116,18 +145,27 @@ public void testReRankQueries() throws Exception { params.add("rows", "10"); params.add("df", "text"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='6']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='4']", "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='1']" - ); + "//result/doc[6]/str[@name='id'][.='1']"); - //Test with sort by score. + // Test with sort by score. params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "{!edismax bq=$bqq2}*:*"); @@ -137,19 +175,27 @@ public void testReRankQueries() throws Exception { params.add("rows", "10"); params.add("sort", "score desc"); params.add("df", "text"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='6']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='4']", "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='1']" - ); - + "//result/doc[6]/str[@name='id'][.='1']"); - //Test with compound sort. + // Test with compound sort. params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "{!edismax bq=$bqq2}*:*"); @@ -160,19 +206,29 @@ public void testReRankQueries() throws Exception { params.add("sort", "score desc,test_ti asc"); params.add("df", "text"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='6']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='4']", "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='1']" - ); - - - //Test with elevation - - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=50}"); + "//result/doc[6]/str[@name='id'][.='1']"); + + // Test with elevation + + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=50}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "{!edismax bq=$bqq2}*:*"); @@ -182,20 +238,29 @@ public void testReRankQueries() throws Exception { params.add("rows", "10"); params.add("qt", "/elevate"); params.add("elevateIds", "1"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']", "//result/doc[3]/str[@name='id'][.='6']", "//result/doc[4]/str[@name='id'][.='5']", "//result/doc[5]/str[@name='id'][.='4']", - "//result/doc[6]/str[@name='id'][.='3']" - - ); + "//result/doc[6]/str[@name='id'][.='3']"); - - //Test TermQuery rqq + // Test TermQuery rqq params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -204,19 +269,29 @@ public void testReRankQueries() throws Exception { params.add("rows", "10"); params.add("df", "text"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='6']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='4']", "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='1']" - ); - + "//result/doc[6]/str[@name='id'][.='1']"); - //Test Elevation + // Test Elevation params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -226,42 +301,60 @@ public void testReRankQueries() throws Exception { params.add("qt", "/elevate"); params.add("elevateIds", "1,4"); - assertQ(req(params), "*[count(//doc)=6]", - "//result/doc[1]/str[@name='id'][.='1']", //Elevated - "//result/doc[2]/str[@name='id'][.='4']", //Elevated - "//result/doc[3]/str[@name='id'][.='2']", //Boosted during rerank. + assertQ( + req(params), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='1']", // Elevated + "//result/doc[2]/str[@name='id'][.='4']", // Elevated + "//result/doc[3]/str[@name='id'][.='2']", // Boosted during rerank. "//result/doc[4]/str[@name='id'][.='6']", "//result/doc[5]/str[@name='id'][.='5']", - "//result/doc[6]/str[@name='id'][.='3']" - ); + "//result/doc[6]/str[@name='id'][.='3']"); - - //Test Elevation swapped + // Test Elevation swapped params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); params.add("fl", "id,score"); params.add("start", "0"); params.add("rows", "10"); - params.add("qt","/elevate"); + params.add("qt", "/elevate"); params.add("elevateIds", "4,1"); - assertQ(req(params), "*[count(//doc)=6]", - "//result/doc[1]/str[@name='id'][.='4']", //Elevated - "//result/doc[2]/str[@name='id'][.='1']", //Elevated - "//result/doc[3]/str[@name='id'][.='2']", //Boosted during rerank. + assertQ( + req(params), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='4']", // Elevated + "//result/doc[2]/str[@name='id'][.='1']", // Elevated + "//result/doc[3]/str[@name='id'][.='2']", // Boosted during rerank. "//result/doc[4]/str[@name='id'][.='6']", "//result/doc[5]/str[@name='id'][.='5']", - "//result/doc[6]/str[@name='id'][.='3']" - ); - - - + "//result/doc[6]/str[@name='id'][.='3']"); params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=4 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=4 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -271,18 +364,30 @@ public void testReRankQueries() throws Exception { params.add("qt", "/elevate"); params.add("elevateIds", "4,1"); - assertQ(req(params), "*[count(//doc)=6]", - "//result/doc[1]/str[@name='id'][.='4']", //Elevated - "//result/doc[2]/str[@name='id'][.='1']", //Elevated + assertQ( + req(params), + "*[count(//doc)=6]", + "//result/doc[1]/str[@name='id'][.='4']", // Elevated + "//result/doc[2]/str[@name='id'][.='1']", // Elevated "//result/doc[3]/str[@name='id'][.='6']", "//result/doc[4]/str[@name='id'][.='5']", "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='2']" //Not in reRankeDocs - ); + "//result/doc[6]/str[@name='id'][.='2']" // Not in reRankeDocs + ); - //Test Elevation with start beyond the rerank docs + // Test Elevation with start beyond the rerank docs params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=3 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -292,14 +397,26 @@ public void testReRankQueries() throws Exception { params.add("qt", "/elevate"); params.add("elevateIds", "4,1"); - assertQ(req(params), "*[count(//doc)=2]", + assertQ( + req(params), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='3']", - "//result/doc[2]/str[@name='id'][.='2']" //Was not in reRankDocs - ); + "//result/doc[2]/str[@name='id'][.='2']" // Was not in reRankDocs + ); - //Test Elevation with zero results + // Test Elevation with zero results params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=3 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}nada"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -311,11 +428,19 @@ public void testReRankQueries() throws Exception { assertQ(req(params), "*[count(//doc)=0]"); - - - //Pass in reRankDocs lower then the length being collected. + // Pass in reRankDocs lower then the length being collected. params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=1 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=1 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -323,17 +448,28 @@ public void testReRankQueries() throws Exception { params.add("start", "0"); params.add("rows", "10"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='6']", "//result/doc[2]/str[@name='id'][.='5']", "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[4]/str[@name='id'][.='3']", "//result/doc[5]/str[@name='id'][.='2']", - "//result/doc[6]/str[@name='id'][.='1']" - ); + "//result/doc[6]/str[@name='id'][.='1']"); params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=0 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=0 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -341,17 +477,28 @@ public void testReRankQueries() throws Exception { params.add("start", "0"); params.add("rows", "10"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='6']", "//result/doc[2]/str[@name='id'][.='5']", "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[4]/str[@name='id'][.='3']", "//result/doc[5]/str[@name='id'][.='2']", - "//result/doc[6]/str[@name='id'][.='1']" - ); + "//result/doc[6]/str[@name='id'][.='1']"); params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=2 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=2 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:4^1000"); @@ -359,18 +506,29 @@ public void testReRankQueries() throws Exception { params.add("start", "0"); params.add("rows", "10"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='5']", "//result/doc[2]/str[@name='id'][.='6']", "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[4]/str[@name='id'][.='3']", "//result/doc[5]/str[@name='id'][.='2']", - "//result/doc[6]/str[@name='id'][.='1']" - ); + "//result/doc[6]/str[@name='id'][.='1']"); - //Test reRankWeight of 0, reranking will have no effect. + // Test reRankWeight of 0, reranking will have no effect. params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=0}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=0}"); params.add("q", "{!edismax bq=$bqq1}*:*"); params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "test_ti:50^1000"); @@ -378,125 +536,186 @@ public void testReRankQueries() throws Exception { params.add("start", "0"); params.add("rows", "5"); - assertQ(req(params), "*[count(//doc)=5]", + assertQ( + req(params), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='6']", "//result/doc[2]/str[@name='id'][.='5']", "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[4]/str[@name='id'][.='3']", - "//result/doc[5]/str[@name='id'][.='2']" - ); - - MetricsMap metrics = (MetricsMap)((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.queryResultCache")).getGauge(); - Map stats = metrics.getValue(); + "//result/doc[5]/str[@name='id'][.='2']"); + + MetricsMap metrics = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.queryResultCache")) + .getGauge(); + Map stats = metrics.getValue(); long inserts = (Long) stats.get("inserts"); assertTrue(inserts > 0); - //Test range query + // Test range query params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6}"); params.add("q", "test_ti:[0 TO 2000]"); params.add("rqq", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("fl", "id,score"); params.add("start", "0"); params.add("rows", "6"); - assertQ(req(params), "*[count(//doc)=5]", + assertQ( + req(params), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='6']", "//result/doc[2]/str[@name='id'][.='5']", "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[4]/str[@name='id'][.='2']", - "//result/doc[5]/str[@name='id'][.='1']" - ); - + "//result/doc[5]/str[@name='id'][.='1']"); stats = metrics.getValue(); long inserts1 = (Long) stats.get("inserts"); - //Last query was added to the cache + // Last query was added to the cache assertTrue(inserts1 > inserts); - //Run same query and see if it was cached. This tests the query result cache hit with rewritten queries + // Run same query and see if it was cached. This tests the query result cache hit with rewritten + // queries params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6}"); params.add("q", "test_ti:[0 TO 2000]"); params.add("rqq", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("fl", "id,score"); params.add("start", "0"); params.add("rows", "6"); - assertQ(req(params), "*[count(//doc)=5]", + assertQ( + req(params), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='6']", "//result/doc[2]/str[@name='id'][.='5']", "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[4]/str[@name='id'][.='2']", - "//result/doc[5]/str[@name='id'][.='1']" - ); + "//result/doc[5]/str[@name='id'][.='1']"); stats = metrics.getValue(); long inserts2 = (Long) stats.get("inserts"); - //Last query was NOT added to the cache + // Last query was NOT added to the cache assertTrue(inserts1 == inserts2); - - //Test range query embedded in larger query + // Test range query embedded in larger query params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6}"); // function query for predictible scores (relative to id) independent of similarity params.add("q", "{!func}id_i"); - // constant score for each clause (unique per doc) for predictible scores independent of similarity + // constant score for each clause (unique per doc) for predictible scores independent of + // similarity // NOTE: biased in favor of doc id == 2 params.add("rqq", "id:1^=10 id:2^=40 id:3^=30 id:4^=40 id:5^=50 id:6^=60"); params.add("fl", "id,score"); params.add("start", "0"); params.add("rows", "6"); - assertQ(req(params), "*[count(//doc)=6]", + assertQ( + req(params), + "*[count(//doc)=6]", "//result/doc[1]/str[@name='id'][.='6']", "//result/doc[2]/str[@name='id'][.='5']", "//result/doc[3]/str[@name='id'][.='4']", "//result/doc[4]/str[@name='id'][.='2']", // reranked out of orig order "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='1']" - ); - + "//result/doc[6]/str[@name='id'][.='1']"); - //Test with start beyond reRankDocs + // Test with start beyond reRankDocs params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=3 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60"); params.add("rqq", "id:1^1000"); params.add("fl", "id,score"); params.add("start", "4"); params.add("rows", "5"); - assertQ(req(params), "*[count(//doc)=2]", + assertQ( + req(params), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='1']" - ); + "//result/doc[2]/str[@name='id'][.='1']"); - - //Test ReRankDocs > docs returned + // Test ReRankDocs > docs returned params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=6 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=6 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50"); params.add("rqq", "id:1^1000"); params.add("fl", "id,score"); params.add("start", "0"); params.add("rows", "1"); - assertQ(req(params), "*[count(//doc)=1]", - "//result/doc[1]/str[@name='id'][.='1']" - ); - - + assertQ(req(params), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='1']"); - //Test with zero results + // Test with zero results params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=3 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=3 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "term_s:NNNN"); params.add("rqq", "id:1^1000"); params.add("fl", "id,score"); @@ -504,7 +723,6 @@ public void testReRankQueries() throws Exception { params.add("rows", "5"); assertQ(req(params), "*[count(//doc)=0]"); - } @Test @@ -513,101 +731,172 @@ public void testOverRank() throws Exception { assertU(delQ("*:*")); assertU(commit()); - //Test the scenario that where we rank more documents then we return. + // Test the scenario that where we rank more documents then we return. - String[] doc = {"id","1", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"}; + String[] doc = { + "id", "1", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", + "2000" + }; assertU(adoc(doc)); - String[] doc1 = {"id","2", "term_s","YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", "test_tf", "200"}; + String[] doc1 = { + "id", "2", "term_s", "YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", + "test_tf", "200" + }; assertU(adoc(doc1)); - String[] doc2 = {"id","3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200"}; + String[] doc2 = { + "id", "3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200" + }; assertU(adoc(doc2)); - String[] doc3 = {"id","4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000"}; + String[] doc3 = { + "id", "4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000" + }; assertU(adoc(doc3)); - - String[] doc4 = {"id","5", "term_s", "YYYY", "group_s", "group2", "test_ti", "4", "test_tl", "10", "test_tf", "2000"}; + String[] doc4 = { + "id", "5", "term_s", "YYYY", "group_s", "group2", "test_ti", "4", "test_tl", "10", "test_tf", + "2000" + }; assertU(adoc(doc4)); - String[] doc5 = {"id","6", "term_s","YYYY", "group_s", "group2", "test_ti", "10", "test_tl", "100", "test_tf", "200"}; + String[] doc5 = { + "id", "6", "term_s", "YYYY", "group_s", "group2", "test_ti", "10", "test_tl", "100", + "test_tf", "200" + }; assertU(adoc(doc5)); - String[] doc6 = {"id","7", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"}; + String[] doc6 = { + "id", "7", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", + "2000" + }; assertU(adoc(doc6)); - - String[] doc7 = {"id","8", "term_s","YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", "test_tf", "200"}; + String[] doc7 = { + "id", "8", "term_s", "YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", + "test_tf", "200" + }; assertU(adoc(doc7)); - String[] doc8 = {"id","9", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200"}; + String[] doc8 = { + "id", "9", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200" + }; assertU(adoc(doc8)); - String[] doc9 = {"id","10", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000"}; + String[] doc9 = { + "id", "10", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000" + }; assertU(adoc(doc9)); - String[] doc10 = {"id","11", "term_s", "YYYY", "group_s", "group2", "test_ti", "4", "test_tl", "10", "test_tf", "2000"}; + String[] doc10 = { + "id", "11", "term_s", "YYYY", "group_s", "group2", "test_ti", "4", "test_tl", "10", "test_tf", + "2000" + }; assertU(adoc(doc10)); assertU(commit()); - ModifiableSolrParams params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=11 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=11 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); - params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60 id:7^70 id:8^80 id:9^90 id:10^100 id:11^110"); + params.add( + "bqq1", + "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60 id:7^70 id:8^80 id:9^90 id:10^100 id:11^110"); params.add("rqq", "test_ti:50^1000"); params.add("fl", "id,score"); params.add("start", "0"); params.add("rows", "2"); params.add("df", "text"); - assertQ(req(params), "*[count(//doc)=2]", + assertQ( + req(params), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='8']", - "//result/doc[2]/str[@name='id'][.='2']" - ); + "//result/doc[2]/str[@name='id'][.='2']"); - //Test Elevation + // Test Elevation params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=11 "+ReRankQParserPlugin.RERANK_WEIGHT+"=2}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=11 " + + ReRankQParserPlugin.RERANK_WEIGHT + + "=2}"); params.add("q", "{!edismax bq=$bqq1}*:*"); - params.add("bqq1", "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60 id:7^70 id:8^80 id:9^90 id:10^100 id:11^110"); + params.add( + "bqq1", + "id:1^10 id:2^20 id:3^30 id:4^40 id:5^50 id:6^60 id:7^70 id:8^80 id:9^90 id:10^100 id:11^110"); params.add("rqq", "test_ti:50^1000"); params.add("fl", "id,score"); params.add("start", "0"); params.add("rows", "3"); - params.add("qt","/elevate"); + params.add("qt", "/elevate"); params.add("elevateIds", "1,4"); - assertQ(req(params), "*[count(//doc)=3]", - "//result/doc[1]/str[@name='id'][.='1']", //Elevated - "//result/doc[2]/str[@name='id'][.='4']", //Elevated - "//result/doc[3]/str[@name='id'][.='8']"); //Boosted during rerank. + assertQ( + req(params), + "*[count(//doc)=3]", + "//result/doc[1]/str[@name='id'][.='1']", // Elevated + "//result/doc[2]/str[@name='id'][.='4']", // Elevated + "//result/doc[3]/str[@name='id'][.='8']"); // Boosted during rerank. } @Test - public void testRerankQueryParsingShouldFailWithoutMandatoryReRankQueryParameter() throws Exception { + public void testRerankQueryParsingShouldFailWithoutMandatoryReRankQueryParameter() + throws Exception { assertU(delQ("*:*")); assertU(commit()); - String[] doc = {"id", "1", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"}; + String[] doc = { + "id", "1", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", + "2000" + }; assertU(adoc(doc)); assertU(commit()); - String[] doc1 = {"id", "2", "term_s", "YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", "test_tf", "200"}; + String[] doc1 = { + "id", "2", "term_s", "YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", + "test_tf", "200" + }; assertU(adoc(doc1)); assertU(commit()); ModifiableSolrParams params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rqq "+ReRankQParserPlugin.RERANK_DOCS+"=200}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rqq " + + ReRankQParserPlugin.RERANK_DOCS + + "=200}"); params.add("q", "term_s:YYYY"); params.add("start", "0"); params.add("rows", "2"); ignoreException("reRankQuery parameter is mandatory"); - SolrException se = expectThrows(SolrException.class, "A syntax error should be thrown when "+ReRankQParserPlugin.RERANK_QUERY+" parameter is not specified", - () -> h.query(req(params)) - ); + SolrException se = + expectThrows( + SolrException.class, + "A syntax error should be thrown when " + + ReRankQParserPlugin.RERANK_QUERY + + " parameter is not specified", + () -> h.query(req(params))); assertTrue(se.code() == SolrException.ErrorCode.BAD_REQUEST.code); unIgnoreException("reRankQuery parameter is mandatory"); - } @Test @@ -616,10 +905,10 @@ public void testReRankQueriesWithDefType() throws Exception { assertU(delQ("*:*")); assertU(commit()); - final String[] doc1 = {"id","1"}; + final String[] doc1 = {"id", "1"}; assertU(adoc(doc1)); assertU(commit()); - final String[] doc2 = {"id","2"}; + final String[] doc2 = {"id", "2"}; assertU(adoc(doc2)); assertU(commit()); @@ -633,47 +922,58 @@ public void testReRankQueriesWithDefType() throws Exception { lessPreferrredDocId = "1"; } - for (final String defType : new String[] { - null, - LuceneQParserPlugin.NAME, - ExtendedDismaxQParserPlugin.NAME - }) { + for (final String defType : + new String[] {null, LuceneQParserPlugin.NAME, ExtendedDismaxQParserPlugin.NAME}) { final ModifiableSolrParams params = new ModifiableSolrParams(); - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=id:"+preferredDocId+"}"); + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=id:" + + preferredDocId + + "}"); params.add("q", "*:*"); if (defType != null) { params.add(QueryParsing.DEFTYPE, defType); } - assertQ(req(params), "*[count(//doc)=2]", - "//result/doc[1]/str[@name='id'][.='"+preferredDocId+"']", - "//result/doc[2]/str[@name='id'][.='"+lessPreferrredDocId+"']" - ); + assertQ( + req(params), + "*[count(//doc)=2]", + "//result/doc[1]/str[@name='id'][.='" + preferredDocId + "']", + "//result/doc[2]/str[@name='id'][.='" + lessPreferrredDocId + "']"); } } - + @Test public void testMinExactCount() throws Exception { assertU(delQ("*:*")); assertU(commit()); - + int numDocs = 200; - - for (int i = 0 ; i < numDocs ; i ++) { - assertU(adoc( - "id", String.valueOf(i), - "id_p_i", String.valueOf(i), - "field_t", IntStream.range(0, numDocs).mapToObj(val -> Integer.toString(val)).collect(Collectors.joining(" ")))); + + for (int i = 0; i < numDocs; i++) { + assertU( + adoc( + "id", String.valueOf(i), + "id_p_i", String.valueOf(i), + "field_t", + IntStream.range(0, numDocs) + .mapToObj(val -> Integer.toString(val)) + .collect(Collectors.joining(" ")))); } assertU(commit()); - + ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "field_t:0"); params.add("start", "0"); params.add("rows", "10"); params.add("fl", "id,score"); params.add("sort", "score desc, id_p_i asc"); - assertQ(req(params), + assertQ( + req(params), "*[count(//doc)=10]", "//result[@numFound='" + numDocs + "']", "//result[@numFoundExact='true']", @@ -686,12 +986,20 @@ public void testMinExactCount() throws Exception { "//result/doc[7]/str[@name='id'][.='6']", "//result/doc[8]/str[@name='id'][.='7']", "//result/doc[9]/str[@name='id'][.='8']", - "//result/doc[10]/str[@name='id'][.='9']" - ); - - params.add("rq", "{!"+ReRankQParserPlugin.NAME+" "+ReRankQParserPlugin.RERANK_QUERY+"=$rrq "+ReRankQParserPlugin.RERANK_DOCS+"=20}"); + "//result/doc[10]/str[@name='id'][.='9']"); + + params.add( + "rq", + "{!" + + ReRankQParserPlugin.NAME + + " " + + ReRankQParserPlugin.RERANK_QUERY + + "=$rrq " + + ReRankQParserPlugin.RERANK_DOCS + + "=20}"); params.add("rrq", "id:10"); - assertQ(req(params), + assertQ( + req(params), "*[count(//doc)=10]", "//result[@numFound='" + numDocs + "']", "//result[@numFoundExact='true']", @@ -704,11 +1012,11 @@ public void testMinExactCount() throws Exception { "//result/doc[7]/str[@name='id'][.='5']", "//result/doc[8]/str[@name='id'][.='6']", "//result/doc[9]/str[@name='id'][.='7']", - "//result/doc[10]/str[@name='id'][.='8']" - ); - + "//result/doc[10]/str[@name='id'][.='8']"); + params.add(CommonParams.MIN_EXACT_COUNT, "2"); - assertQ(req(params), + assertQ( + req(params), "*[count(//doc)=10]", "//result[@numFound<='" + numDocs + "']", "//result[@numFoundExact='false']", @@ -721,8 +1029,6 @@ public void testMinExactCount() throws Exception { "//result/doc[7]/str[@name='id'][.='5']", "//result/doc[8]/str[@name='id'][.='6']", "//result/doc[9]/str[@name='id'][.='7']", - "//result/doc[10]/str[@name='id'][.='8']" - ); + "//result/doc[10]/str[@name='id'][.='8']"); } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java index f1c5555268d..3ae14d3f363 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java +++ b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java @@ -16,6 +16,8 @@ */ package org.apache.solr.search; +import static org.apache.solr.core.SolrCore.verbose; +import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; import java.util.ArrayList; import java.util.HashMap; @@ -24,7 +26,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; - import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; @@ -35,126 +36,214 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.core.SolrCore.verbose; -import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; - public class TestRealTimeGet extends TestRTGBase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-tlog.xml","schema_latest.xml"); + initCore("solrconfig-tlog.xml", "schema_latest.xml"); } - @Test public void testGetRealtime() throws Exception { clearIndex(); assertU(commit()); - assertU(adoc("id","1", - "a_f","-1.5", "a_fd","-1.5", "a_fdS","-1.5", "a_fs","1.0","a_fs","2.5", "a_fds","1.0","a_fds","2.5", "a_fdsS","1.0","a_fdsS","2.5", - "a_d","-1.2E99", "a_dd","-1.2E99", "a_ddS","-1.2E99", "a_ds","1.0","a_ds","2.5", "a_dds","1.0","a_dds","2.5", "a_ddsS","1.0","a_ddsS","2.5", - "a_i","-1", "a_id","-1", "a_idS","-1", "a_is","1","a_is","2", "a_ids","1","a_ids","2", "a_idsS","1","a_idsS","2", - "a_l","-9999999999", "a_ld","-9999999999", "a_ldS","-9999999999", "a_ls","1","a_ls","9999999999", "a_lds","1","a_lds","9999999999", "a_ldsS","1","a_ldsS","9999999999", - "a_s", "abc", "a_sd", "bcd", "a_sdS", "cde", "a_ss","def","a_ss", "efg", "a_sds","fgh","a_sds","ghi", "a_sdsS","hij","a_sdsS","ijk", - "a_b", "false", "a_bd", "true", "a_bdS", "false", "a_bs","true","a_bs", "false", "a_bds","true","a_bds","false", "a_bdsS","true","a_bdsS","false" - )); - assertJQ(req("q","id:1") - ,"/response/numFound==0" - ); - assertJQ(req("qt","/get", "id","1", "fl","id, a_f,a_fd,a_fdS a_fs,a_fds,a_fdsS, " + - "a_d,a_dd,a_ddS, a_ds,a_dds,a_ddsS, a_i,a_id,a_idS a_is,a_ids,a_idsS, " + - "a_l,a_ld,a_ldS, a_ls,a_lds,a_ldsS, a_s,a_sd,a_sdS a_ss,a_sds,a_sdsS, " + - "a_b,a_bd,a_bdS, a_bs,a_bds,a_bdsS" - ) - ,"=={'doc':{'id':'1'" + - ", a_f:-1.5, a_fd:-1.5, a_fdS:-1.5, a_fs:[1.0,2.5], a_fds:[1.0,2.5],a_fdsS:[1.0,2.5]" + - ", a_d:-1.2E99, a_dd:-1.2E99, a_ddS:-1.2E99, a_ds:[1.0,2.5],a_dds:[1.0,2.5],a_ddsS:[1.0,2.5]" + - ", a_i:-1, a_id:-1, a_idS:-1, a_is:[1,2],a_ids:[1,2],a_idsS:[1,2]" + - ", a_l:-9999999999, a_ld:-9999999999, a_ldS:-9999999999, a_ls:[1,9999999999],a_lds:[1,9999999999],a_ldsS:[1,9999999999]" + - ", a_s:'abc', a_sd:'bcd', a_sdS:'cde', a_ss:['def','efg'],a_sds:['fgh','ghi'],a_sdsS:['hij','ijk']" + - ", a_b:false, a_bd:true, a_bdS:false, a_bs:[true,false],a_bds:[true,false],a_bdsS:[true,false]" + - " }}" - ); - assertJQ(req("qt","/get","ids","1", "fl","id") - ,"=={" + - " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[" + - " {" + - " 'id':'1'}]" + - " }}}" - ); + assertU( + adoc( + "id", + "1", + "a_f", + "-1.5", + "a_fd", + "-1.5", + "a_fdS", + "-1.5", + "a_fs", + "1.0", + "a_fs", + "2.5", + "a_fds", + "1.0", + "a_fds", + "2.5", + "a_fdsS", + "1.0", + "a_fdsS", + "2.5", + "a_d", + "-1.2E99", + "a_dd", + "-1.2E99", + "a_ddS", + "-1.2E99", + "a_ds", + "1.0", + "a_ds", + "2.5", + "a_dds", + "1.0", + "a_dds", + "2.5", + "a_ddsS", + "1.0", + "a_ddsS", + "2.5", + "a_i", + "-1", + "a_id", + "-1", + "a_idS", + "-1", + "a_is", + "1", + "a_is", + "2", + "a_ids", + "1", + "a_ids", + "2", + "a_idsS", + "1", + "a_idsS", + "2", + "a_l", + "-9999999999", + "a_ld", + "-9999999999", + "a_ldS", + "-9999999999", + "a_ls", + "1", + "a_ls", + "9999999999", + "a_lds", + "1", + "a_lds", + "9999999999", + "a_ldsS", + "1", + "a_ldsS", + "9999999999", + "a_s", + "abc", + "a_sd", + "bcd", + "a_sdS", + "cde", + "a_ss", + "def", + "a_ss", + "efg", + "a_sds", + "fgh", + "a_sds", + "ghi", + "a_sdsS", + "hij", + "a_sdsS", + "ijk", + "a_b", + "false", + "a_bd", + "true", + "a_bdS", + "false", + "a_bs", + "true", + "a_bs", + "false", + "a_bds", + "true", + "a_bds", + "false", + "a_bdsS", + "true", + "a_bdsS", + "false")); + assertJQ(req("q", "id:1"), "/response/numFound==0"); + assertJQ( + req( + "qt", + "/get", + "id", + "1", + "fl", + "id, a_f,a_fd,a_fdS a_fs,a_fds,a_fdsS, " + + "a_d,a_dd,a_ddS, a_ds,a_dds,a_ddsS, a_i,a_id,a_idS a_is,a_ids,a_idsS, " + + "a_l,a_ld,a_ldS, a_ls,a_lds,a_ldsS, a_s,a_sd,a_sdS a_ss,a_sds,a_sdsS, " + + "a_b,a_bd,a_bdS, a_bs,a_bds,a_bdsS"), + "=={'doc':{'id':'1'" + + ", a_f:-1.5, a_fd:-1.5, a_fdS:-1.5, a_fs:[1.0,2.5], a_fds:[1.0,2.5],a_fdsS:[1.0,2.5]" + + ", a_d:-1.2E99, a_dd:-1.2E99, a_ddS:-1.2E99, a_ds:[1.0,2.5],a_dds:[1.0,2.5],a_ddsS:[1.0,2.5]" + + ", a_i:-1, a_id:-1, a_idS:-1, a_is:[1,2],a_ids:[1,2],a_idsS:[1,2]" + + ", a_l:-9999999999, a_ld:-9999999999, a_ldS:-9999999999, a_ls:[1,9999999999],a_lds:[1,9999999999],a_ldsS:[1,9999999999]" + + ", a_s:'abc', a_sd:'bcd', a_sdS:'cde', a_ss:['def','efg'],a_sds:['fgh','ghi'],a_sdsS:['hij','ijk']" + + ", a_b:false, a_bd:true, a_bdS:false, a_bs:[true,false],a_bds:[true,false],a_bdsS:[true,false]" + + " }}"); + assertJQ( + req("qt", "/get", "ids", "1", "fl", "id"), + "=={" + + " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[" + + " {" + + " 'id':'1'}]" + + " }}}"); assertU(commit()); - assertJQ(req("q","id:1") - ,"/response/numFound==1" - ); - - // a cut-n-paste of the first big query, but this time it will be retrieved from the index rather than the transaction log - assertJQ(req("qt","/get", "id","1", "fl","id, a_f,a_fd,a_fdS a_fs,a_fds,a_fdsS, a_d,a_dd,a_ddS, a_ds,a_dds,a_ddsS, a_i,a_id,a_idS a_is,a_ids,a_idsS, a_l,a_ld,a_ldS a_ls,a_lds,a_ldsS") - ,"=={'doc':{'id':'1'" + - ", a_f:-1.5, a_fd:-1.5, a_fdS:-1.5, a_fs:[1.0,2.5], a_fds:[1.0,2.5],a_fdsS:[1.0,2.5]" + - ", a_d:-1.2E99, a_dd:-1.2E99, a_ddS:-1.2E99, a_ds:[1.0,2.5],a_dds:[1.0,2.5],a_ddsS:[1.0,2.5]" + - ", a_i:-1, a_id:-1, a_idS:-1, a_is:[1,2],a_ids:[1,2],a_idsS:[1,2]" + - ", a_l:-9999999999, a_ld:-9999999999, a_ldS:-9999999999, a_ls:[1,9999999999],a_lds:[1,9999999999],a_ldsS:[1,9999999999]" + - " }}" - ); - - assertJQ(req("qt","/get","id","1", "fl","id") - ,"=={'doc':{'id':'1'}}" - ); - assertJQ(req("qt","/get","ids","1", "fl","id") - ,"=={" + - " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[" + - " {" + - " 'id':'1'}]" + - " }}}" - ); + assertJQ(req("q", "id:1"), "/response/numFound==1"); + + // a cut-n-paste of the first big query, but this time it will be retrieved from the index + // rather than the transaction log + assertJQ( + req( + "qt", + "/get", + "id", + "1", + "fl", + "id, a_f,a_fd,a_fdS a_fs,a_fds,a_fdsS, a_d,a_dd,a_ddS, a_ds,a_dds,a_ddsS, a_i,a_id,a_idS a_is,a_ids,a_idsS, a_l,a_ld,a_ldS a_ls,a_lds,a_ldsS"), + "=={'doc':{'id':'1'" + + ", a_f:-1.5, a_fd:-1.5, a_fdS:-1.5, a_fs:[1.0,2.5], a_fds:[1.0,2.5],a_fdsS:[1.0,2.5]" + + ", a_d:-1.2E99, a_dd:-1.2E99, a_ddS:-1.2E99, a_ds:[1.0,2.5],a_dds:[1.0,2.5],a_ddsS:[1.0,2.5]" + + ", a_i:-1, a_id:-1, a_idS:-1, a_is:[1,2],a_ids:[1,2],a_idsS:[1,2]" + + ", a_l:-9999999999, a_ld:-9999999999, a_ldS:-9999999999, a_ls:[1,9999999999],a_lds:[1,9999999999],a_ldsS:[1,9999999999]" + + " }}"); + + assertJQ(req("qt", "/get", "id", "1", "fl", "id"), "=={'doc':{'id':'1'}}"); + assertJQ( + req("qt", "/get", "ids", "1", "fl", "id"), + "=={" + + " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':[" + + " {" + + " 'id':'1'}]" + + " }}}"); assertU(delI("1")); - assertJQ(req("q","id:1") - ,"/response/numFound==1" - ); - assertJQ(req("qt","/get","id","1") - ,"=={'doc':null}" - ); - assertJQ(req("qt","/get","ids","1") - ,"=={'response':{'numFound':0,'start':0,'numFoundExact':true,'docs':[]}}" - ); - - - assertU(adoc("id","10")); - assertU(adoc("id","11")); - assertJQ(req("qt","/get","id","10", "fl","id") - ,"=={'doc':{'id':'10'}}" - ); + assertJQ(req("q", "id:1"), "/response/numFound==1"); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':null}"); + assertJQ( + req("qt", "/get", "ids", "1"), + "=={'response':{'numFound':0,'start':0,'numFoundExact':true,'docs':[]}}"); + + assertU(adoc("id", "10")); + assertU(adoc("id", "11")); + assertJQ(req("qt", "/get", "id", "10", "fl", "id"), "=={'doc':{'id':'10'}}"); assertU(delQ("id:10 foo_s:abcdef")); - assertJQ(req("qt","/get","id","10") - ,"=={'doc':null}" - ); - assertJQ(req("qt","/get","id","11", "fl","id") - ,"=={'doc':{'id':'11'}}" - ); + assertJQ(req("qt", "/get", "id", "10"), "=={'doc':null}"); + assertJQ(req("qt", "/get", "id", "11", "fl", "id"), "=={'doc':{'id':'11'}}"); // multivalued field - assertU(adoc("id","12", "val_ls","1", "val_ls","2")); - assertJQ(req("q","id:12") - ,"/response/numFound==0" - ); - assertJQ(req("qt","/get", "id","12", "fl","id,val_ls") - ,"=={'doc':{'id':'12', 'val_ls':[1,2]}}" - ); + assertU(adoc("id", "12", "val_ls", "1", "val_ls", "2")); + assertJQ(req("q", "id:12"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "12", "fl", "id,val_ls"), "=={'doc':{'id':'12', 'val_ls':[1,2]}}"); assertU(commit()); - assertJQ(req("qt","/get", "id","12", "fl","id,val_ls") - ,"=={'doc':{'id':'12', 'val_ls':[1,2]}}" - ); - assertJQ(req("q","id:12") - ,"/response/numFound==1" - ); - + assertJQ( + req("qt", "/get", "id", "12", "fl", "id,val_ls"), "=={'doc':{'id':'12', 'val_ls':[1,2]}}"); + assertJQ(req("q", "id:12"), "/response/numFound==1"); SolrQueryRequest req = req(); RefCounted realtimeHolder = req.getCore().getRealtimeSearcher(); @@ -166,143 +255,141 @@ public void testGetRealtime() throws Exception { assertU(adoc("id", "13")); // this should not need to open another realtime searcher - assertJQ(req("qt","/get","id","11", "fl","id", "fq","id:11") - ,"=={doc:{id:'11'}}" - ); + assertJQ(req("qt", "/get", "id", "11", "fl", "id", "fq", "id:11"), "=={doc:{id:'11'}}"); // assert that the same realtime searcher is still in effect (i.e. that we didn't // open a new searcher when we didn't have to). RefCounted realtimeHolder2 = req.getCore().getRealtimeSearcher(); - assertEquals(realtimeHolder.get(), realtimeHolder2.get()); // Autocommit could possibly cause this to fail? + assertEquals( + realtimeHolder.get(), + realtimeHolder2.get()); // Autocommit could possibly cause this to fail? realtimeHolder2.decref(); // filter most likely different segment - assertJQ(req("qt","/get","id","12", "fl","id", "fq","id:11") - ,"=={doc:null}" - ); + assertJQ(req("qt", "/get", "id", "12", "fl", "id", "fq", "id:11"), "=={doc:null}"); // filter most likely same different segment - assertJQ(req("qt","/get","id","12", "fl","id", "fq","id:13") - ,"=={doc:null}" - ); + assertJQ(req("qt", "/get", "id", "12", "fl", "id", "fq", "id:13"), "=={doc:null}"); - assertJQ(req("qt","/get","id","12", "fl","id", "fq","id:12") - ,"=={doc:{id:'12'}}" - ); + assertJQ(req("qt", "/get", "id", "12", "fl", "id", "fq", "id:12"), "=={doc:{id:'12'}}"); assertU(adoc("id", "14")); assertU(adoc("id", "15")); // id list, with some in index and some not, first id from index. Also test mutiple fq params. - assertJQ(req("qt","/get","ids","12,14,13,15", "fl","id", "fq","id:[10 TO 14]", "fq","id:[13 TO 19]") - ,"/response/docs==[{id:'14'},{id:'13'}]" - ); + assertJQ( + req( + "qt", + "/get", + "ids", + "12,14,13,15", + "fl", + "id", + "fq", + "id:[10 TO 14]", + "fq", + "id:[13 TO 19]"), + "/response/docs==[{id:'14'},{id:'13'}]"); assertU(adoc("id", "16")); assertU(adoc("id", "17")); // id list, with some in index and some not, first id from tlog - assertJQ(req("qt","/get","ids","17,16,15,14", "fl","id", "fq","id:[15 TO 16]") - ,"/response/docs==[{id:'16'},{id:'15'}]" - ); + assertJQ( + req("qt", "/get", "ids", "17,16,15,14", "fl", "id", "fq", "id:[15 TO 16]"), + "/response/docs==[{id:'16'},{id:'15'}]"); // more complex filter - assertJQ(req("qt","/get","ids","17,16,15,14", "fl","id", "fq","{!frange l=15 u=16}id") - ,"/response/docs==[{id:'16'},{id:'15'}]" - ); + assertJQ( + req("qt", "/get", "ids", "17,16,15,14", "fl", "id", "fq", "{!frange l=15 u=16}id"), + "/response/docs==[{id:'16'},{id:'15'}]"); // test with negative filter - assertJQ(req("qt","/get","ids","15,14", "fl","id", "fq","-id:15") - ,"/response/docs==[{id:'14'}]" - ); - assertJQ(req("qt","/get","ids","17,16,15,14", "fl","id", "fq","-id:[15 TO 17]") - ,"/response/docs==[{id:'14'}]" - ); + assertJQ( + req("qt", "/get", "ids", "15,14", "fl", "id", "fq", "-id:15"), + "/response/docs==[{id:'14'}]"); + assertJQ( + req("qt", "/get", "ids", "17,16,15,14", "fl", "id", "fq", "-id:[15 TO 17]"), + "/response/docs==[{id:'14'}]"); realtimeHolder.decref(); req.close(); - } - @Test public void testVersions() throws Exception { clearIndex(); assertU(commit()); - long version = addAndGetVersion(sdoc("id","1") , null); + long version = addAndGetVersion(sdoc("id", "1"), null); - assertJQ(req("q","id:1") - ,"/response/numFound==0" - ); + assertJQ(req("q", "id:1"), "/response/numFound==0"); // test version is there from rtg - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}"); // test version is there from the index assertU(commit()); - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}"); // simulate an update from the leader version += 10; - updateJ(jsonAdd(sdoc("id","1", "_version_",Long.toString(version))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "1", "_version_", Long.toString(version))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // test version is there from rtg - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}"); // simulate reordering: test that a version less than that does not take affect - updateJ(jsonAdd(sdoc("id","1", "_version_",Long.toString(version - 1))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "1", "_version_", Long.toString(version - 1))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // test that version hasn't changed - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}"); // simulate reordering: test that a delete w/ version less than that does not take affect // TODO: also allow passing version on delete instead of on URL? - updateJ(jsonDelId("1"), params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",Long.toString(version - 1))); + updateJ( + jsonDelId("1"), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(version - 1))); // test that version hasn't changed - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}"); // make sure reordering detection also works after a commit assertU(commit()); // simulate reordering: test that a version less than that does not take affect - updateJ(jsonAdd(sdoc("id","1", "_version_",Long.toString(version - 1))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "1", "_version_", Long.toString(version - 1))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // test that version hasn't changed - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}"); // simulate reordering: test that a delete w/ version less than that does not take affect - updateJ(jsonDelId("1"), params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",Long.toString(version - 1))); + updateJ( + jsonDelId("1"), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(version - 1))); // test that version hasn't changed - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}"); // now simulate a normal delete from the leader version += 5; - updateJ(jsonDelId("1"), params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",Long.toString(version))); + updateJ( + jsonDelId("1"), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(version))); // make sure a reordered add doesn't take affect. - updateJ(jsonAdd(sdoc("id","1", "_version_",Long.toString(version - 1))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "1", "_version_", Long.toString(version - 1))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // test that it's still deleted - assertJQ(req("qt","/get","id","1") - ,"=={'doc':null}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':null}"); // test that we can remember the version of a delete after a commit assertU(commit()); @@ -311,38 +398,43 @@ public void testVersions() throws Exception { long version2 = deleteByQueryAndGetVersion("id:2", null); // test that it's still deleted - assertJQ(req("qt","/get","id","1") - ,"=={'doc':null}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':null}"); - version = addAndGetVersion(sdoc("id","2"), null); + version = addAndGetVersion(sdoc("id", "2"), null); version2 = deleteByQueryAndGetVersion("id:2", null); - assertTrue(Math.abs(version2) > version ); + assertTrue(Math.abs(version2) > version); // test that it's deleted - assertJQ(req("qt","/get","id","2") - ,"=={'doc':null}"); - + assertJQ(req("qt", "/get", "id", "2"), "=={'doc':null}"); version2 = Math.abs(version2) + 1000; - updateJ(jsonAdd(sdoc("id","3", "_version_",Long.toString(version2+100))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","4", "_version_",Long.toString(version2+200))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "3", "_version_", Long.toString(version2 + 100))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "4", "_version_", Long.toString(version2 + 200))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // this should only affect id:3 so far - deleteByQueryAndGetVersion("id:(3 4 5 6)", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",Long.toString(-(version2+150))) ); + deleteByQueryAndGetVersion( + "id:(3 4 5 6)", + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(-(version2 + 150)))); - assertJQ(req("qt","/get","id","3"),"=={'doc':null}"); - assertJQ(req("qt","/get","id","4", "fl","id"),"=={'doc':{'id':'4'}}"); + assertJQ(req("qt", "/get", "id", "3"), "=={'doc':null}"); + assertJQ(req("qt", "/get", "id", "4", "fl", "id"), "=={'doc':{'id':'4'}}"); - updateJ(jsonAdd(sdoc("id","5", "_version_",Long.toString(version2+201))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","6", "_version_",Long.toString(version2+101))), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "5", "_version_", Long.toString(version2 + 201))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "6", "_version_", Long.toString(version2 + 101))), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); - // the DBQ should also have caused id:6 to be removed - assertJQ(req("qt","/get","id","5", "fl","id"),"=={'doc':{'id':'5'}}"); - assertJQ(req("qt","/get","id","6"),"=={'doc':null}"); + // the DBQ should also have caused id:6 to be removed + assertJQ(req("qt", "/get", "id", "5", "fl", "id"), "=={'doc':{'id':'5'}}"); + assertJQ(req("qt", "/get", "id", "6"), "=={'doc':null}"); assertU(commit()); - } @Test @@ -350,27 +442,43 @@ public void testOptimisticLocking() throws Exception { clearIndex(); assertU(commit()); - final long version = addAndGetVersion(sdoc("id","1") , null); + final long version = addAndGetVersion(sdoc("id", "1"), null); long version2; // try version added directly on doc - SolrException se = expectThrows(SolrException.class, "version should cause an error", - () -> addAndGetVersion(sdoc("id","1", "_version_", Long.toString(version-1)), null)); + SolrException se = + expectThrows( + SolrException.class, + "version should cause an error", + () -> addAndGetVersion(sdoc("id", "1", "_version_", Long.toString(version - 1)), null)); assertEquals("version should cause a conflict", 409, se.code()); // try version added as a parameter on the request - se = expectThrows(SolrException.class, "version should cause an error", - () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version-1)))); + se = + expectThrows( + SolrException.class, + "version should cause an error", + () -> + addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(version - 1)))); assertEquals("version should cause a conflict", 409, se.code()); // try an add specifying a negative version - se = expectThrows(SolrException.class, "negative version should cause a conflict", - () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(-version)))); + se = + expectThrows( + SolrException.class, + "negative version should cause a conflict", + () -> addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(-version)))); assertEquals("version should cause a conflict", 409, se.code()); // try an add with a greater version - se = expectThrows(SolrException.class, "greater version should cause a conflict", - () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version+random().nextInt(1000)+1)))); + se = + expectThrows( + SolrException.class, + "greater version should cause a conflict", + () -> + addAndGetVersion( + sdoc("id", "1"), + params("_version_", Long.toString(version + random().nextInt(1000) + 1)))); assertEquals("version should cause a conflict", 409, se.code()); // @@ -378,42 +486,62 @@ public void testOptimisticLocking() throws Exception { // // try a delete with version on the request - se = expectThrows(SolrException.class, "version should cause an error", - () -> deleteAndGetVersion("1", params("_version_", Long.toString(version-1)))); + se = + expectThrows( + SolrException.class, + "version should cause an error", + () -> deleteAndGetVersion("1", params("_version_", Long.toString(version - 1)))); assertEquals("version should cause a conflict", 409, se.code()); // try a delete with a negative version - se = expectThrows(SolrException.class, "negative version should cause an error", - () -> deleteAndGetVersion("1", params("_version_", Long.toString(-version)))); + se = + expectThrows( + SolrException.class, + "negative version should cause an error", + () -> deleteAndGetVersion("1", params("_version_", Long.toString(-version)))); assertEquals("version should cause a conflict", 409, se.code()); // try a delete with a greater version - se = expectThrows(SolrException.class, "greater version should cause an error", - () -> deleteAndGetVersion("1", params("_version_", Long.toString(version+random().nextInt(1000)+1)))); + se = + expectThrows( + SolrException.class, + "greater version should cause an error", + () -> + deleteAndGetVersion( + "1", params("_version_", Long.toString(version + random().nextInt(1000) + 1)))); assertEquals("version should cause a conflict", 409, se.code()); // try a delete of a document that doesn't exist, specifying a specific version - se = expectThrows(SolrException.class, "document does not exist should cause an error", - () -> deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(version)))); + se = + expectThrows( + SolrException.class, + "document does not exist should cause an error", + () -> + deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(version)))); assertEquals("version should cause a conflict", 409, se.code()); - // try a delete of a document that doesn't exist, specifying that it should not version2 = deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(-1))); assertTrue(version2 < 0); // overwrite the document - version2 = addAndGetVersion(sdoc("id","1", "_version_", Long.toString(version)), null); + version2 = addAndGetVersion(sdoc("id", "1", "_version_", Long.toString(version)), null); assertTrue(version2 > version); // overwriting the previous version should now fail - se = expectThrows(SolrException.class, "overwriting previous version should fail", - () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version)))); + se = + expectThrows( + SolrException.class, + "overwriting previous version should fail", + () -> addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(version)))); assertEquals(409, se.code()); // deleting the previous version should now fail - se = expectThrows(SolrException.class, "deleting the previous version should now fail", - () -> deleteAndGetVersion("1", params("_version_", Long.toString(version)))); + se = + expectThrows( + SolrException.class, + "deleting the previous version should now fail", + () -> deleteAndGetVersion("1", params("_version_", Long.toString(version)))); assertEquals(409, se.code()); final long prevVersion = version2; @@ -422,60 +550,62 @@ public void testOptimisticLocking() throws Exception { version2 = deleteAndGetVersion("1", params("_version_", Long.toString(prevVersion))); // overwriting the previous existing doc should now fail (since it was deleted) - se = expectThrows(SolrException.class, "overwriting the previous existing doc should now fail (since it was deleted)", - () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(prevVersion)))); + se = + expectThrows( + SolrException.class, + "overwriting the previous existing doc should now fail (since it was deleted)", + () -> + addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(prevVersion)))); assertEquals(409, se.code()); // deleting the previous existing doc should now fail (since it was deleted) - se = expectThrows(SolrException.class, "deleting the previous existing doc should now fail (since it was deleted)", - () -> deleteAndGetVersion("1", params("_version_", Long.toString(prevVersion)))); + se = + expectThrows( + SolrException.class, + "deleting the previous existing doc should now fail (since it was deleted)", + () -> deleteAndGetVersion("1", params("_version_", Long.toString(prevVersion)))); assertEquals(409, se.code()); // overwriting a negative version should work - version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(-(prevVersion-1)))); + version2 = + addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(-(prevVersion - 1)))); assertTrue(version2 > version); long lastVersion = version2; // sanity test that we see the right version via rtg - assertJQ(req("qt","/get","id","1") - ,"=={'doc':{'id':'1','_version_':" + lastVersion + "}}" - ); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + lastVersion + "}}"); } - - /*** - @Test - public void testGetRealtime() throws Exception { - SolrQueryRequest sr1 = req("q","foo"); - IndexReader r1 = sr1.getCore().getRealtimeReader(); - - assertU(adoc("id","1")); - - IndexReader r2 = sr1.getCore().getRealtimeReader(); - assertNotSame(r1, r2); - int refcount = r2.getRefCount(); - - // make sure a new reader wasn't opened - IndexReader r3 = sr1.getCore().getRealtimeReader(); - assertSame(r2, r3); - assertEquals(refcount+1, r3.getRefCount()); - - assertU(commit()); - - // this is not critical, but currently a commit does not refresh the reader - // if nothing has changed - IndexReader r4 = sr1.getCore().getRealtimeReader(); - assertEquals(refcount+2, r4.getRefCount()); - - - r1.decRef(); - r2.decRef(); - r3.decRef(); - r4.decRef(); - sr1.close(); - } - ***/ - + // @Test + // public void testGetRealtime() throws Exception { + // SolrQueryRequest sr1 = req("q","foo"); + // IndexReader r1 = sr1.getCore().getRealtimeReader(); + // + // assertU(adoc("id","1")); + // + // IndexReader r2 = sr1.getCore().getRealtimeReader(); + // assertNotSame(r1, r2); + // int refcount = r2.getRefCount(); + // + // // make sure a new reader wasn't opened + // IndexReader r3 = sr1.getCore().getRealtimeReader(); + // assertSame(r2, r3); + // assertEquals(refcount+1, r3.getRefCount()); + // + // assertU(commit()); + // + // // this is not critical, but currently a commit does not refresh the reader + // // if nothing has changed + // IndexReader r4 = sr1.getCore().getRealtimeReader(); + // assertEquals(refcount+2, r4.getRefCount()); + // + // + // r1.decRef(); + // r2.decRef(); + // r3.decRef(); + // r4.decRef(); + // sr1.close(); + // } @Test public void testStressGetRealtime() throws Exception { @@ -485,26 +615,31 @@ public void testStressGetRealtime() throws Exception { // req().getCore().getUpdateHandler().getIndexWriterProvider().getIndexWriter(req().getCore()).setInfoStream(System.out); final int commitPercent = 5 + random().nextInt(20); - final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft - final int deletePercent = 4+random().nextInt(25); - final int deleteByQueryPercent = 1+random().nextInt(5); - final int optimisticPercent = 1+random().nextInt(50); // percent change that an update uses optimistic locking - final int optimisticCorrectPercent = 25+random().nextInt(70); // percent change that a version specified will be correct - final int filteredGetPercent = random().nextInt( random().nextInt(20)+1 ); // percent of time that a get will be filtered... we normally don't want too high. + // what percent of the commits are soft + final int softCommitPercent = 30 + random().nextInt(75); + final int deletePercent = 4 + random().nextInt(25); + final int deleteByQueryPercent = 1 + random().nextInt(5); + // percent change that an update uses optimistic locking + final int optimisticPercent = 1 + random().nextInt(50); + // percent change that a version specified will be correct + final int optimisticCorrectPercent = 25 + random().nextInt(70); + // percent of time that a get will be filtered... we normally don't want too high. + final int filteredGetPercent = random().nextInt(random().nextInt(20) + 1); final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200)); int nWriteThreads = 5 + random().nextInt(25); - final int maxConcurrentCommits = nWriteThreads; // number of committers at a time... + // number of committers at a time... + final int maxConcurrentCommits = nWriteThreads; - // query variables + // query variables final int percentRealtimeQuery = 60; - final AtomicLong operations = new AtomicLong(50000); // number of query operations to perform in total + final AtomicLong operations = + new AtomicLong(50000); // number of query operations to perform in total int nReadThreads = 5 + random().nextInt(25); - verbose("commitPercent=", commitPercent); - verbose("softCommitPercent=",softCommitPercent); - verbose("deletePercent=",deletePercent); + verbose("softCommitPercent=", softCommitPercent); + verbose("deletePercent=", deletePercent); verbose("deleteByQueryPercent=", deleteByQueryPercent); verbose("ndocs=", ndocs); verbose("nWriteThreads=", nWriteThreads); @@ -513,253 +648,284 @@ public void testStressGetRealtime() throws Exception { verbose("maxConcurrentCommits=", maxConcurrentCommits); verbose("operations=", operations); - initModel(ndocs); final AtomicInteger numCommitting = new AtomicInteger(); List threads = new ArrayList<>(); - for (int i=0; i 0) { - int oper = rand.nextInt(100); - - if (oper < commitPercent) { - if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { - Map newCommittedModel; - long version; - - synchronized(TestRealTimeGet.this) { - newCommittedModel = new HashMap<>(model); // take a snapshot - version = snapshotCount++; - verbose("took snapshot version=",version); - } + for (int i = 0; i < nWriteThreads; i++) { + Thread thread = + new Thread("WRITER" + i) { + Random rand = new Random(random().nextInt()); + + @Override + public void run() { + try { + while (operations.get() > 0) { + int oper = rand.nextInt(100); + + if (oper < commitPercent) { + if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { + Map newCommittedModel; + long version; + + synchronized (TestRealTimeGet.this) { + newCommittedModel = new HashMap<>(model); // take a snapshot + version = snapshotCount++; + verbose("took snapshot version=", version); + } + + if (rand.nextInt(100) < softCommitPercent) { + verbose("softCommit start"); + assertU(TestHarness.commit("softCommit", "true")); + verbose("softCommit end"); + } else { + verbose("hardCommit start"); + assertU(commit()); + verbose("hardCommit end"); + } + + synchronized (TestRealTimeGet.this) { + // install this model snapshot only if it's newer than the current one + if (version >= committedModelClock) { + if (VERBOSE) { + verbose("installing new committedModel version=" + committedModelClock); + } + committedModel = newCommittedModel; + committedModelClock = version; + } + } + } + numCommitting.decrementAndGet(); + continue; + } - if (rand.nextInt(100) < softCommitPercent) { - verbose("softCommit start"); - assertU(TestHarness.commit("softCommit","true")); - verbose("softCommit end"); - } else { - verbose("hardCommit start"); - assertU(commit()); - verbose("hardCommit end"); - } + int id = rand.nextInt(ndocs); + Object sync = syncArr[id]; + + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; + } - synchronized(TestRealTimeGet.this) { - // install this model snapshot only if it's newer than the current one - if (version >= committedModelClock) { - if (VERBOSE) { - verbose("installing new committedModel version="+committedModelClock); + // We can't concurrently update the same document and retain our invariants of + // increasing values since we can't guarantee what order the updates will be + // executed. Even with versions, we can't remove the sync because increasing + // versions does not mean increasing vals. + synchronized (sync) { + DocInfo info = model.get(id); + + long val = info.val; + long nextVal = Math.abs(val) + 1; + + if (oper < commitPercent + deletePercent) { + boolean opt = rand.nextInt() < optimisticPercent; + boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false; + long badVersion = correct ? 0 : badVersion(rand, info.version); + + if (VERBOSE) { + if (!opt) { + verbose("deleting id", id, "val=", nextVal); + } else { + verbose( + "deleting id", + id, + "val=", + nextVal, + "existing_version=", + info.version, + (correct ? "" : (" bad_version=" + badVersion))); + } + } + + // assertU("" + id + ""); + Long version = null; + + if (opt) { + if (correct) { + version = + deleteAndGetVersion( + Integer.toString(id), + params("_version_", Long.toString(info.version))); + } else { + SolrException se = + expectThrows( + SolrException.class, + "should not get random version", + () -> + deleteAndGetVersion( + Integer.toString(id), + params("_version_", Long.toString(badVersion)))); + assertEquals(409, se.code()); + } + } else { + version = deleteAndGetVersion(Integer.toString(id), null); + } + + if (version != null) { + model.put(id, new DocInfo(version, -nextVal)); + } + + if (VERBOSE) { + verbose("deleting id", id, "val=", nextVal, "DONE"); + } + } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { + if (VERBOSE) { + verbose("deleteByQuery id ", id, "val=", nextVal); + } + + assertU("id:" + id + ""); + model.put(id, new DocInfo(-1L, -nextVal)); + if (VERBOSE) { + verbose("deleteByQuery id", id, "val=", nextVal, "DONE"); + } + } else { + boolean opt = rand.nextInt() < optimisticPercent; + boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false; + long badVersion = correct ? 0 : badVersion(rand, info.version); + + if (VERBOSE) { + if (!opt) { + verbose("adding id", id, "val=", nextVal); + } else { + verbose( + "adding id", + id, + "val=", + nextVal, + "existing_version=", + info.version, + (correct ? "" : (" bad_version=" + badVersion))); + } + } + + Long version = null; + SolrInputDocument sd = + sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal)); + + if (opt) { + if (correct) { + version = + addAndGetVersion( + sd, params("_version_", Long.toString(info.version))); + } else { + SolrException se = + expectThrows( + SolrException.class, + "should not get bad version", + () -> + addAndGetVersion( + sd, params("_version_", Long.toString(badVersion)))); + assertEquals(409, se.code()); + } + } else { + version = addAndGetVersion(sd, null); + } + + if (version != null) { + model.put(id, new DocInfo(version, nextVal)); + } + + if (VERBOSE) { + verbose("adding id", id, "val=", nextVal, "DONE"); + } } - committedModel = newCommittedModel; - committedModelClock = version; + } // end sync + + if (!before) { + lastId = id; } } + } catch (Throwable e) { + operations.set(-1L); + throw new RuntimeException(e); } - numCommitting.decrementAndGet(); - continue; } + }; + threads.add(thread); + } - int id = rand.nextInt(ndocs); - Object sync = syncArr[id]; - - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } + for (int i = 0; i < nReadThreads; i++) { + Thread thread = + new Thread("READER" + i) { + Random rand = new Random(random().nextInt()); - // We can't concurrently update the same document and retain our invariants of increasing values - // since we can't guarantee what order the updates will be executed. - // Even with versions, we can't remove the sync because increasing versions does not mean increasing vals. - synchronized (sync) { - DocInfo info = model.get(id); + @Override + public void run() { + try { + while (operations.decrementAndGet() >= 0) { + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - long val = info.val; - long nextVal = Math.abs(val)+1; + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index - if (oper < commitPercent + deletePercent) { - boolean opt = rand.nextInt() < optimisticPercent; - boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false; - long badVersion = correct ? 0 : badVersion(rand, info.version); + boolean realTime = rand.nextInt(100) < percentRealtimeQuery; + DocInfo info; - if (VERBOSE) { - if (!opt) { - verbose("deleting id",id,"val=",nextVal); + if (realTime) { + info = model.get(id); } else { - verbose("deleting id",id,"val=",nextVal, "existing_version=",info.version, (correct ? "" : (" bad_version=" + badVersion))); + synchronized (TestRealTimeGet.this) { + info = committedModel.get(id); + } } - } - - // assertU("" + id + ""); - Long version = null; - if (opt) { - if (correct) { - version = deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(info.version))); - } else { - SolrException se = expectThrows(SolrException.class, "should not get random version", - () -> deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(badVersion)))); - assertEquals(409, se.code()); + if (VERBOSE) { + verbose("querying id", id); } - } else { - version = deleteAndGetVersion(Integer.toString(id), null); - } - if (version != null) { - model.put(id, new DocInfo(version, -nextVal)); - } - - if (VERBOSE) { - verbose("deleting id", id, "val=",nextVal,"DONE"); - } - } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { - if (VERBOSE) { - verbose("deleteByQuery id ",id, "val=",nextVal); - } - - assertU("id:" + id + ""); - model.put(id, new DocInfo(-1L, -nextVal)); - if (VERBOSE) { - verbose("deleteByQuery id",id, "val=",nextVal,"DONE"); - } - } else { - boolean opt = rand.nextInt() < optimisticPercent; - boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false; - long badVersion = correct ? 0 : badVersion(rand, info.version); - - if (VERBOSE) { - if (!opt) { - verbose("adding id",id,"val=",nextVal); + boolean filteredOut = false; + SolrQueryRequest sreq; + if (realTime) { + ModifiableSolrParams p = + params("wt", "json", "qt", "/get", "ids", Integer.toString(id)); + if (rand.nextInt(100) < filteredGetPercent) { + int idToFilter = rand.nextBoolean() ? id : rand.nextInt(ndocs); + filteredOut = idToFilter != id; + p.add("fq", "id:" + idToFilter); + } + sreq = req(p); } else { - verbose("adding id",id,"val=",nextVal, "existing_version=",info.version, (correct ? "" : (" bad_version=" + badVersion))); + sreq = + req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true"); } - } - - Long version = null; - SolrInputDocument sd = sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal)); - if (opt) { - if (correct) { - version = addAndGetVersion(sd, params("_version_", Long.toString(info.version))); + String response = h.query(sreq); + @SuppressWarnings({"rawtypes"}) + Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) + List doclist = (List) (((Map) rsp.get("response")).get("docs")); + if (doclist.size() == 0) { + // there's no info we can get back with a delete, so not much we can check + // without further synchronization. This is also correct when filteredOut==true } else { - SolrException se = expectThrows(SolrException.class, "should not get bad version", - () -> addAndGetVersion(sd, params("_version_", Long.toString(badVersion)))); - assertEquals(409, se.code()); + assertEquals(1, doclist.size()); + long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD)); + long foundVer = (Long) (((Map) doclist.get(0)).get("_version_")); + if (filteredOut + || foundVal < Math.abs(info.val) + || (foundVer == info.version + && foundVal != info.val)) { // if the version matches, the val must + verbose("ERROR, id=", id, "found=", response, "model", info); + assertTrue(false); + } } - } else { - version = addAndGetVersion(sd, null); } - - - if (version != null) { - model.put(id, new DocInfo(version, nextVal)); - } - - if (VERBOSE) { - verbose("adding id", id, "val=", nextVal,"DONE"); - } - + } catch (Throwable e) { + operations.set(-1L); + throw new RuntimeException(e); } - } // end sync - - if (!before) { - lastId = id; } - } - } catch (Throwable e) { - operations.set(-1L); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } - - for (int i=0; i= 0) { - // bias toward a recently changed doc - int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - - // when indexing, we update the index, then the model - // so when querying, we should first check the model, and then the index - - boolean realTime = rand.nextInt(100) < percentRealtimeQuery; - DocInfo info; - - if (realTime) { - info = model.get(id); - } else { - synchronized(TestRealTimeGet.this) { - info = committedModel.get(id); - } - } - - if (VERBOSE) { - verbose("querying id", id); - } - - boolean filteredOut = false; - SolrQueryRequest sreq; - if (realTime) { - ModifiableSolrParams p = params("wt","json", "qt","/get", "ids",Integer.toString(id)); - if (rand.nextInt(100) < filteredGetPercent) { - int idToFilter = rand.nextBoolean() ? id : rand.nextInt(ndocs); - filteredOut = idToFilter != id; - p.add("fq", "id:"+idToFilter); - } - sreq = req(p); - } else { - sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true"); - } - - String response = h.query(sreq); - @SuppressWarnings({"rawtypes"}) - Map rsp = (Map) Utils.fromJSONString(response); - @SuppressWarnings({"rawtypes"}) - List doclist = (List)(((Map)rsp.get("response")).get("docs")); - if (doclist.size() == 0) { - // there's no info we can get back with a delete, so not much we can check without further synchronization - // This is also correct when filteredOut==true - } else { - assertEquals(1, doclist.size()); - long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD)); - long foundVer = (Long)(((Map)doclist.get(0)).get("_version_")); - if (filteredOut || foundVal < Math.abs(info.val) - || (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must - verbose("ERROR, id=", id, "found=",response,"model",info); - assertTrue(false); - } - } - } - } - catch (Throwable e) { - operations.set(-1L); - throw new RuntimeException(e); - } - } - }; - - threads.add(thread); - } - - for (Thread thread : threads) { thread.start(); } @@ -767,8 +933,5 @@ public void run() { for (Thread thread : threads) { thread.join(); } - } - - } diff --git a/solr/core/src/test/org/apache/solr/search/TestRecovery.java b/solr/core/src/test/org/apache/solr/search/TestRecovery.java index 6eac90a2a6f..593d6deacc4 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRecovery.java +++ b/solr/core/src/test/org/apache/solr/search/TestRecovery.java @@ -16,7 +16,13 @@ */ package org.apache.solr.search; +import static org.apache.solr.search.TestRecovery.VersionProvider.getNextVersion; +import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Meter; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; import java.io.File; import java.io.RandomAccessFile; import java.lang.invoke.MethodHandles; @@ -34,11 +40,6 @@ import java.util.concurrent.Future; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Meter; -import com.codahale.metrics.Metric; -import com.codahale.metrics.MetricRegistry; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.TimeSource; @@ -57,34 +58,33 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.search.TestRecovery.VersionProvider.getNextVersion; -import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; - public class TestRecovery extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); // means that we've seen the leader and have version info (i.e. we are a non-leader replica) - private static String FROM_LEADER = DistribPhase.FROMLEADER.toString(); + private static String FROM_LEADER = DistribPhase.FROMLEADER.toString(); - private static int timeout=60; // acquire timeout in seconds. change this to a huge number when debugging to prevent threads from advancing. + // acquire timeout in seconds. change this to a huge number when debugging to prevent threads + // from advancing. + private static int timeout = 60; // TODO: fix this test to not require FSDirectory static String savedFactory; - @Before public void beforeTest() throws Exception { savedFactory = System.getProperty("solr.DirectoryFactory"); System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockFSDirectoryFactory"); - initCore("solrconfig-tlog.xml","schema15.xml"); - + initCore("solrconfig-tlog.xml", "schema15.xml"); + // validate that the schema was not changed to an unexpected state IndexSchema schema = h.getCore().getLatestSchema(); - assertTrue(schema.getFieldOrNull("_version_").hasDocValues() && !schema.getFieldOrNull("_version_").indexed() - && !schema.getFieldOrNull("_version_").stored()); - + assertTrue( + schema.getFieldOrNull("_version_").hasDocValues() + && !schema.getFieldOrNull("_version_").indexed() + && !schema.getFieldOrNull("_version_").stored()); } - + @After public void afterTest() { TestInjection.reset(); // do after every test, don't wait for AfterClass @@ -93,13 +93,14 @@ public void afterTest() { } else { System.setProperty("solr.directoryFactory", savedFactory); } - + deleteCore(); } private Map getMetrics() { SolrMetricManager manager = h.getCoreContainer().getMetricManager(); - MetricRegistry registry = manager.registry(h.getCore().getCoreMetricManager().getRegistryName()); + MetricRegistry registry = + manager.registry(h.getCore().getCoreMetricManager().getRegistryName()); return registry.getMetrics(); } @@ -111,13 +112,14 @@ public void stressLogReplay() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = logReplayFinish::release; clearIndex(); @@ -150,8 +152,10 @@ public void stressLogReplay() throws Exception { // delete by id ArrayList vals = new ArrayList<>(docIdToVal.values()); int val = vals.get(random().nextInt(vals.size())); - deleteByQueryAndGetVersion("val_i_dvo:"+val, null); - docIdToVal.entrySet().removeIf(integerIntegerEntry -> integerIntegerEntry.getValue() == val); + deleteByQueryAndGetVersion("val_i_dvo:" + val, null); + docIdToVal + .entrySet() + .removeIf(integerIntegerEntry -> integerIntegerEntry.getValue() == val); } else { // delete by query ArrayList ids = new ArrayList<>(docIdToVal.keySet()); @@ -163,17 +167,18 @@ public void stressLogReplay() throws Exception { h.close(); createCore(); - assertJQ(req("q","*:*") ,"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==0"); // unblock recovery logReplay.release(Integer.MAX_VALUE); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); assertU(commit()); - assertJQ(req("q","*:*") ,"/response/numFound=="+docIdToVal.size()); + assertJQ(req("q", "*:*"), "/response/numFound==" + docIdToVal.size()); for (Map.Entry entry : docIdToVal.entrySet()) { - assertJQ(req("q","id:"+entry.getKey(), "fl", "val_i_dvo") , + assertJQ( + req("q", "id:" + entry.getKey(), "fl", "val_i_dvo"), "/response/numFound==1", - "/response/docs==[{'val_i_dvo':"+entry.getValue()+"}]"); + "/response/docs==[{'val_i_dvo':" + entry.getValue() + "}]"); } } finally { UpdateLog.testing_logReplayHook = null; @@ -183,24 +188,24 @@ public void stressLogReplay() throws Exception { @Test public void testLogReplay() throws Exception { - + try { TestInjection.skipIndexWriterCommitOnClose = true; final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); - clearIndex(); assertU(commit()); @@ -210,11 +215,13 @@ public void testLogReplay() throws Exception { versions.addFirst(addAndGetVersion(sdoc("id", "A12"), null)); versions.addFirst(deleteByQueryAndGetVersion("id:A11", null)); versions.addFirst(addAndGetVersion(sdoc("id", "A13"), null)); - versions.addFirst(addAndGetVersion(sdoc("id", "A12", "val_i_dvo", map("set", 1)), null)); // atomic update - versions.addFirst(addAndGetVersion(sdoc("id", "A12", "val_i_dvo", map("set", 2)), null)); // in-place update - assertJQ(req("q","*:*"),"/response/numFound==0"); + versions.addFirst( + addAndGetVersion(sdoc("id", "A12", "val_i_dvo", map("set", 1)), null)); // atomic update + versions.addFirst( + addAndGetVersion(sdoc("id", "A12", "val_i_dvo", map("set", 2)), null)); // in-place update + assertJQ(req("q", "*:*"), "/response/numFound==0"); - assertJQ(req("qt","/get", "getVersions",""+versions.size()) ,"/versions==" + versions); + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); h.close(); createCore(); @@ -226,50 +233,55 @@ public void testLogReplay() throws Exception { // verify that previous close didn't do a commit // recovery should be blocked by our hook - assertJQ(req("q","*:*") ,"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==0"); // make sure we can still access versions after a restart - assertJQ(req("qt","/get", "getVersions",""+versions.size()),"/versions==" + versions); + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); - assertEquals(UpdateLog.State.REPLAYING, h.getCore().getUpdateHandler().getUpdateLog().getState()); + assertEquals( + UpdateLog.State.REPLAYING, h.getCore().getUpdateHandler().getUpdateLog().getState()); // check metrics @SuppressWarnings({"unchecked"}) - Gauge state = (Gauge)metrics.get("TLOG.state"); + Gauge state = (Gauge) metrics.get("TLOG.state"); assertEquals(UpdateLog.State.REPLAYING.ordinal(), state.getValue().intValue()); @SuppressWarnings({"unchecked"}) - Gauge replayingLogs = (Gauge)metrics.get("TLOG.replay.remaining.logs"); + Gauge replayingLogs = (Gauge) metrics.get("TLOG.replay.remaining.logs"); assertTrue(replayingLogs.getValue().intValue() > 0); @SuppressWarnings({"unchecked"}) - Gauge replayingDocs = (Gauge)metrics.get("TLOG.replay.remaining.bytes"); + Gauge replayingDocs = (Gauge) metrics.get("TLOG.replay.remaining.bytes"); assertTrue(replayingDocs.getValue().longValue() > 0); - Meter replayDocs = (Meter)metrics.get("TLOG.replay.ops"); + Meter replayDocs = (Meter) metrics.get("TLOG.replay.ops"); long initialOps = replayDocs.getCount(); // unblock recovery logReplay.release(1000); // make sure we can still access versions during recovery - assertJQ(req("qt","/get", "getVersions",""+versions.size()),"/versions==" + versions); + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); // wait until recovery has finished assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); - assertJQ(req("q","val_i_dvo:2") ,"/response/numFound==1"); // assert that in-place update is retained + assertJQ( + req("q", "val_i_dvo:2"), + "/response/numFound==1"); // assert that in-place update is retained - assertJQ(req("q","*:*") ,"/response/numFound==3"); + assertJQ(req("q", "*:*"), "/response/numFound==3"); assertEquals(7L, replayDocs.getCount() - initialOps); assertEquals(UpdateLog.State.ACTIVE.ordinal(), state.getValue().intValue()); // make sure we can still access versions after recovery - assertJQ(req("qt","/get", "getVersions",""+versions.size()) ,"/versions==" + versions); + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); - assertU(adoc("id","A2")); - assertU(adoc("id","A3")); + assertU(adoc("id", "A2")); + assertU(adoc("id", "A3")); assertU(delI("A2")); - assertU(adoc("id","A4")); + assertU(adoc("id", "A4")); - assertJQ(req("q","*:*") ,"/response/numFound==3"); - assertJQ(req("q","val_i_dvo:2") ,"/response/numFound==1"); // assert that in-place update is retained + assertJQ(req("q", "*:*"), "/response/numFound==3"); + assertJQ( + req("q", "val_i_dvo:2"), + "/response/numFound==1"); // assert that in-place update is retained h.close(); createCore(); @@ -278,8 +290,8 @@ public void testLogReplay() throws Exception { // wait until recovery has finished assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); - assertJQ(req("q","*:*") ,"/response/numFound==5"); - assertJQ(req("q","id:A2") ,"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==5"); + assertJQ(req("q", "id:A2"), "/response/numFound==0"); // no updates, so insure that recovery does not run h.close(); @@ -288,18 +300,21 @@ public void testLogReplay() throws Exception { // Solr should kick this off now // h.getCore().getUpdateHandler().getUpdateLog().recoverFromLog(); - assertJQ(req("q","*:*") ,"/response/numFound==5"); - assertJQ(req("q","val_i_dvo:2") ,"/response/numFound==1"); // assert that in-place update is retained + assertJQ(req("q", "*:*"), "/response/numFound==5"); + assertJQ( + req("q", "val_i_dvo:2"), + "/response/numFound==1"); // assert that in-place update is retained Thread.sleep(100); - assertEquals(permits, logReplay.availablePermits()); // no updates, so insure that recovery didn't run + assertEquals( + permits, logReplay.availablePermits()); // no updates, so insure that recovery didn't run - assertEquals(UpdateLog.State.ACTIVE, h.getCore().getUpdateHandler().getUpdateLog().getState()); + assertEquals( + UpdateLog.State.ACTIVE, h.getCore().getUpdateHandler().getUpdateLog().getState()); } finally { UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } - } @Test @@ -310,50 +325,57 @@ public void testNewDBQAndDocMatchingOldDBQDuringLogReplay() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); clearIndex(); assertU(commit()); - // because we're sending updates during log replay, we can't emulate replica logic -- we need to use - // normal updates like a leader / single-node instance would get. + // because we're sending updates during log replay, we can't emulate replica logic -- we need + // to use normal updates like a leader / single-node instance would get. // - // (In SolrCloud mode, when a replica run recoverFromLog, replica in this time period will have state = DOWN, - // so It won't receive any updates.) - - updateJ(jsonAdd(sdoc("id","B0")),params()); - updateJ(jsonAdd(sdoc("id","B1")),params()); // should be deleted by subsequent DBQ in tlog - updateJ(jsonAdd(sdoc("id","B2")),params()); // should be deleted by DBQ that arives during tlog replay - updateJ(jsonDelQ("id:B1 OR id:B3 OR id:B6"),params()); - updateJ(jsonAdd(sdoc("id","B3")),params()); // should *NOT* be deleted by previous DBQ in tlog - updateJ(jsonAdd(sdoc("id","B4")),params()); // should be deleted by DBQ that arives during tlog replay - updateJ(jsonAdd(sdoc("id","B5")),params()); - + // (In SolrCloud mode, when a replica run recoverFromLog, replica in this time period will + // have state = DOWN, so It won't receive any updates.) + + updateJ(jsonAdd(sdoc("id", "B0")), params()); + updateJ(jsonAdd(sdoc("id", "B1")), params()); // should be deleted by subsequent DBQ in tlog + updateJ( + jsonAdd(sdoc("id", "B2")), + params()); // should be deleted by DBQ that arives during tlog replay + updateJ(jsonDelQ("id:B1 OR id:B3 OR id:B6"), params()); + updateJ( + jsonAdd(sdoc("id", "B3")), params()); // should *NOT* be deleted by previous DBQ in tlog + updateJ( + jsonAdd(sdoc("id", "B4")), + params()); // should be deleted by DBQ that arives during tlog replay + updateJ(jsonAdd(sdoc("id", "B5")), params()); + // sanity check no updates have been applied yet (just in tlog) - assertJQ(req("q","*:*"),"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==0"); h.close(); createCore(); // (Attempts to) kick off recovery (which is currently blocked by semaphore) // verify that previous close didn't do a commit & that recovery should be blocked by our hook - assertJQ(req("q","*:*") ,"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==0"); // begin recovery (first few items) - logReplay.release(TestUtil.nextInt(random(),1,6)); + logReplay.release(TestUtil.nextInt(random(), 1, 6)); // ... but before recover is completely unblocked/finished, have a *new* DBQ arrive - // that should delete some items we either have just replayed, or are about to replay (or maybe both)... - updateJ(jsonDelQ("id:B2 OR id:B4"),params()); + // that should delete some items we either have just replayed, or are about to replay (or + // maybe both)... + updateJ(jsonDelQ("id:B2 OR id:B4"), params()); // ...and re-add a doc that would have matched a DBQ already in the tlog // (which may/may-not have been replayed yet) - updateJ(jsonAdd(sdoc("id","B6")),params()); // should *NOT* be deleted by DBQ from tlog + updateJ(jsonAdd(sdoc("id", "B6")), params()); // should *NOT* be deleted by DBQ from tlog assertU(commit()); // now completely unblock recovery @@ -362,124 +384,190 @@ public void testNewDBQAndDocMatchingOldDBQDuringLogReplay() throws Exception { // wait until recovery has finished assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); - // verify only the expected docs are found, even with out of order DBQ and DBQ that arived during recovery - assertJQ(req("q", "*:*", "fl", "id", "sort", "id asc") - , "/response/docs==[{'id':'B0'}, {'id':'B3'}, {'id':'B5'}, {'id':'B6'}]"); - + // verify only the expected docs are found, even with out of order DBQ and DBQ that arived + // during recovery + assertJQ( + req("q", "*:*", "fl", "id", "sort", "id asc"), + "/response/docs==[{'id':'B0'}, {'id':'B3'}, {'id':'B5'}, {'id':'B6'}]"); + } finally { UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } - } @Test public void testLogReplayWithReorderedDBQ() throws Exception { - testLogReplayWithReorderedDBQWrapper(() -> { + testLogReplayWithReorderedDBQWrapper( + () -> { String v1010 = getNextVersion(); String v1015 = getNextVersion(); String v1017_del = "-" + getNextVersion(); String v1020 = getNextVersion(); - - updateJ(jsonAdd(sdoc("id", "RDBQ1_1", "_version_", v1010)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); - updateJ(jsonDelQ("id:RDBQ1_2"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); // This should've arrived after the ver2 update - updateJ(jsonAdd(sdoc("id", "RDBQ1_2", "_version_", v1015)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); - updateJ(jsonAdd(sdoc("id", "RDBQ1_3", "_version_", v1020)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + + updateJ( + jsonAdd(sdoc("id", "RDBQ1_1", "_version_", v1010)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonDelQ("id:RDBQ1_2"), + params( + DISTRIB_UPDATE_PARAM, + FROM_LEADER, + "_version_", + v1017_del)); // This should've arrived after the ver2 update + updateJ( + jsonAdd(sdoc("id", "RDBQ1_2", "_version_", v1015)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "RDBQ1_3", "_version_", v1020)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); }, - () -> assertJQ(req("q", "*:*"), "/response/numFound==2") - ); + () -> assertJQ(req("q", "*:*"), "/response/numFound==2")); } @Test public void testLogReplayWithReorderedDBQByAsterixAndChildDocs() throws Exception { - testLogReplayWithReorderedDBQWrapper(() -> { + testLogReplayWithReorderedDBQWrapper( + () -> { String v1010 = getNextVersion(); String v1012 = getNextVersion(); String v1017_del = "-" + getNextVersion(); String v1018 = getNextVersion(); String v1020 = getNextVersion(); - + // 1010 - will be deleted - updateJ(jsonAdd(sdocWithChildren("RDBQ2_1", v1010)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ2_1", v1010)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1018 - should be kept, including child docs - updateJ(jsonAdd(sdocWithChildren("RDBQ2_2", v1018)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ2_2", v1018)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1017 - delete should affect only 1010 - updateJ(jsonDelQ("_root_:RDBQ2_1 _root_:RDBQ2_2 id:RDBQ2_3 _root_:RDBQ2_4"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); // This should've arrived after the ver2 update + updateJ( + jsonDelQ("_root_:RDBQ2_1 _root_:RDBQ2_2 id:RDBQ2_3 _root_:RDBQ2_4"), + params( + DISTRIB_UPDATE_PARAM, + FROM_LEADER, + "_version_", + v1017_del)); // This should've arrived after the ver2 update // 1012 - will be deleted - updateJ(jsonAdd(sdoc("id", "RDBQ2_3", "_version_", v1012)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "RDBQ2_3", "_version_", v1012)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1020 - should be untouched - updateJ(jsonAdd(sdocWithChildren("RDBQ2_4", v1020)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ2_4", v1020)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); }, - () -> assertJQ(req("q", "*:*"), "/response/numFound==6") - ); + () -> assertJQ(req("q", "*:*"), "/response/numFound==6")); } @Test public void testLogReplayWithReorderedDBQByIdAndChildDocs() throws Exception { - testLogReplayWithReorderedDBQWrapper(() -> { + testLogReplayWithReorderedDBQWrapper( + () -> { String v1010 = getNextVersion(); String v1012 = getNextVersion(); String v1017_del = "-" + getNextVersion(); String v1018 = getNextVersion(); String v1020 = getNextVersion(); - + // 1010 - will be deleted - updateJ(jsonAdd(sdocWithChildren("RDBQ3_1", v1010)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ3_1", v1010)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1018 - should be kept, including child docs - updateJ(jsonAdd(sdocWithChildren("RDBQ3_2", v1018)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ3_2", v1018)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1017 - delete should affect only 1010 - updateJ(jsonDelQ("id:RDBQ3_1 id:RDBQ3_2 id:RDBQ3_3 id:RDBQ3_4"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); // This should've arrived after the ver2 update + updateJ( + jsonDelQ("id:RDBQ3_1 id:RDBQ3_2 id:RDBQ3_3 id:RDBQ3_4"), + params( + DISTRIB_UPDATE_PARAM, + FROM_LEADER, + "_version_", + v1017_del)); // This should've arrived after the ver2 update // 1012 - will be deleted - updateJ(jsonAdd(sdoc("id", "RDBQ3_3", "_version_", v1012)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "RDBQ3_3", "_version_", v1012)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1020 - should be untouched - updateJ(jsonAdd(sdocWithChildren("RDBQ3_4", v1020)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ3_4", v1020)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); }, - () -> assertJQ(req("q", "*:*"), "/response/numFound==8") // RDBQ3_2, RDBQ3_4 and 6 children docs (delete by id does not delete child docs) - ); + () -> + assertJQ( + req("q", "*:*"), + "/response/numFound==8") // RDBQ3_2, RDBQ3_4 and 6 children docs (delete by id does + // not delete child docs) + ); } @Test public void testLogReplayWithReorderedDBQInsertingChildnodes() throws Exception { - testLogReplayWithReorderedDBQWrapper(() -> { + testLogReplayWithReorderedDBQWrapper( + () -> { String v1013 = getNextVersion(); String v1017_del = "-" + getNextVersion(); - - updateJ(jsonDelQ("id:RDBQ4_2"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); + + updateJ( + jsonDelQ("id:RDBQ4_2"), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); // test doc: B1 // 1013 - will be inserted with 3 children - updateJ(jsonAdd(sdocWithChildren("RDBQ4_1", v1013, 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ4_1", v1013, 3)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); }, - () -> assertJQ(req("q", "*:*"), "/response/numFound==4") // RDBQ4_1 and RDBQ4_2, plus 2x 3 children - ); + () -> + assertJQ( + req("q", "*:*"), "/response/numFound==4") // RDBQ4_1 and RDBQ4_2, plus 2x 3 children + ); } - @Test public void testLogReplayWithReorderedDBQUpdateWithDifferentChildCount() throws Exception { - testLogReplayWithReorderedDBQWrapper(() -> { + testLogReplayWithReorderedDBQWrapper( + () -> { String v1011 = getNextVersion(); String v1012 = getNextVersion(); String v1013 = getNextVersion(); String v1018 = getNextVersion(); String v1019_del = "-" + getNextVersion(); - + // control // 1011 - will be inserted with 3 children as 1012 - updateJ(jsonAdd(sdocWithChildren("RDBQ5_1", v1011, 2)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ5_1", v1011, 2)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1012 - this should be the final - updateJ(jsonAdd(sdocWithChildren("RDBQ5_1", v1012, 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ5_1", v1012, 3)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // 1013 - will be inserted with 3 children as 1018 - updateJ(jsonAdd(sdocWithChildren("RDBQ5_2", v1013, 2)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); - updateJ(jsonDelQ("id:RDBQ5_3"), params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1019_del)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ5_2", v1013, 2)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonDelQ("id:RDBQ5_3"), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1019_del)); // 1018 - this should be the final - updateJ(jsonAdd(sdocWithChildren("RDBQ5_2", v1018, 3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdocWithChildren("RDBQ5_2", v1018, 3)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); }, - () -> assertJQ(req("q", "*:*"), "/response/numFound==8") // RDBQ5_1+3children+RDBQ5_2+3children - ); + () -> + assertJQ( + req("q", "*:*"), "/response/numFound==8") // RDBQ5_1+3children+RDBQ5_2+3children + ); } - private void testLogReplayWithReorderedDBQWrapper(ThrowingRunnable act, ThrowingRunnable assrt) throws Exception { + private void testLogReplayWithReorderedDBQWrapper(ThrowingRunnable act, ThrowingRunnable assrt) + throws Exception { try { @@ -487,24 +575,23 @@ private void testLogReplayWithReorderedDBQWrapper(ThrowingRunnable act, Throwing final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); - clearIndex(); assertU(commit()); // Adding some documents act.run(); - assertJQ(req("q", "*:*"), "/response/numFound==0"); h.close(); @@ -531,7 +618,6 @@ private void testLogReplayWithReorderedDBQWrapper(ThrowingRunnable act, Throwing UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } - } @Test @@ -541,17 +627,17 @@ public void testBuffering() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = logReplayFinish::release; - SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); @@ -573,14 +659,14 @@ public void testBuffering() throws Exception { ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); @SuppressWarnings({"unchecked"}) - Gauge state = (Gauge)metrics.get("TLOG.state"); + Gauge state = (Gauge) metrics.get("TLOG.state"); assertEquals(UpdateLog.State.BUFFERING.ordinal(), state.getValue().intValue()); @SuppressWarnings({"unchecked"}) - Gauge bufferedOps = (Gauge)metrics.get("TLOG.buffered.ops"); + Gauge bufferedOps = (Gauge) metrics.get("TLOG.buffered.ops"); int initialOps = bufferedOps.getValue(); - Meter applyingBuffered = (Meter)metrics.get("TLOG.applyingBuffered.ops"); + Meter applyingBuffered = (Meter) metrics.get("TLOG.applyingBuffered.ops"); long initialApplyingOps = applyingBuffered.getCount(); - + String v3 = getNextVersion(); String v940_del = "-" + getNextVersion(); String v950_del = "-" + getNextVersion(); @@ -598,38 +684,43 @@ public void testBuffering() throws Exception { String v2060_del = "-" + getNextVersion(); String v3000_del = "-" + getNextVersion(); - String versionListFirstCheck = String.join(",", v2010_del, v1030, v1020, v1017_del, v1015, v1010); - String versionListSecondCheck = String.join(",", v3000_del, v1080, v1050, v1060, v940_del, v1040 ,v3, v2010_del, v1030, v1020, v1017_del, v1015, v1010); + String versionListFirstCheck = + String.join(",", v2010_del, v1030, v1020, v1017_del, v1015, v1010); + String versionListSecondCheck = + String.join( + ",", v3000_del, v1080, v1050, v1060, v940_del, v1040, v3, v2010_del, v1030, v1020, + v1017_del, v1015, v1010); // simulate updates from a leader - updateJ(jsonAdd(sdoc("id","B1", "_version_",v1010)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","B11", "_version_",v1015)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonDelQ("id:B1 id:B11 id:B2 id:B3"), params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v1017_del)); - updateJ(jsonAdd(sdoc("id","B2", "_version_",v1020)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","B3", "_version_",v1030)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - deleteAndGetVersion("B1", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v2010_del)); - - assertJQ(req("qt","/get", "getVersions","6") - ,"=={'versions':["+versionListFirstCheck+"]}" - ); + updateJ( + jsonAdd(sdoc("id", "B1", "_version_", v1010)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "B11", "_version_", v1015)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonDelQ("id:B1 id:B11 id:B2 id:B3"), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v1017_del)); + updateJ( + jsonAdd(sdoc("id", "B2", "_version_", v1020)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "B3", "_version_", v1030)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + deleteAndGetVersion("B1", params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v2010_del)); + + assertJQ( + req("qt", "/get", "getVersions", "6"), "=={'versions':[" + versionListFirstCheck + "]}"); assertU(commit()); - assertJQ(req("qt","/get", "getVersions","6") - ,"=={'versions':["+versionListFirstCheck+"]}" - ); + assertJQ( + req("qt", "/get", "getVersions", "6"), "=={'versions':[" + versionListFirstCheck + "]}"); // updates should be buffered, so we should not see any results yet. - assertJQ(req("q", "*:*") - , "/response/numFound==0" - ); + assertJQ(req("q", "*:*"), "/response/numFound==0"); // real-time get should also not show anything (this could change in the future, // but it's currently used for validating version numbers too, so it would // be bad for updates to be visible if we're just buffering. - assertJQ(req("qt","/get", "id","B3") - ,"=={'doc':null}" - ); + assertJQ(req("qt", "/get", "id", "B3"), "=={'doc':null}"); assertEquals(6, bufferedOps.getValue().intValue() - initialOps); @@ -645,39 +736,51 @@ public void testBuffering() throws Exception { assertEquals(6L, applyingBuffered.getCount() - initialApplyingOps); - assertJQ(req("qt","/get", "getVersions","6") - ,"=={'versions':["+versionListFirstCheck+"]}" - ); + assertJQ( + req("qt", "/get", "getVersions", "6"), "=={'versions':[" + versionListFirstCheck + "]}"); - - assertJQ(req("q", "*:*") - , "/response/numFound==2" - ); + assertJQ(req("q", "*:*"), "/response/numFound==2"); // move back to recovering ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); - Long ver = getVer(req("qt","/get", "id","B3")); + Long ver = getVer(req("qt", "/get", "id", "B3")); assertEquals(Long.valueOf(v1030), ver); // add a reordered doc that shouldn't overwrite one in the index - updateJ(jsonAdd(sdoc("id","B3", "_version_",v3)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "B3", "_version_", v3)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // reorder two buffered updates - updateJ(jsonAdd(sdoc("id","B4", "_version_",v1040)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - deleteAndGetVersion("B4", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v940_del)); // this update should not take affect - updateJ(jsonAdd(sdoc("id","B6", "_version_",v1060)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","B5", "_version_",v1050)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","B8", "_version_",v1080)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - - // test that delete by query is at least buffered along with everything else so it will delete the - // currently buffered id:8 (even if it doesn't currently support versioning) - updateJ("{\"delete\": { \"query\":\"id:B2 OR id:B8\" }}", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v3000_del)); - - assertJQ(req("qt","/get", "getVersions","13") - ,"=={'versions':[" + versionListSecondCheck + "]}" // the "3" appears because versions aren't checked while buffering - ); + updateJ( + jsonAdd(sdoc("id", "B4", "_version_", v1040)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + deleteAndGetVersion( + "B4", + params( + DISTRIB_UPDATE_PARAM, + FROM_LEADER, + "_version_", + v940_del)); // this update should not take affect + updateJ( + jsonAdd(sdoc("id", "B6", "_version_", v1060)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "B5", "_version_", v1050)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "B8", "_version_", v1080)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + + // test that delete by query is at least buffered along with everything else so it will delete + // the currently buffered id:8 (even if it doesn't currently support versioning) + updateJ( + "{\"delete\": { \"query\":\"id:B2 OR id:B8\" }}", + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v3000_del)); + + assertJQ( + req("qt", "/get", "getVersions", "13"), + "=={'versions':[" + + versionListSecondCheck + + "]}" // the "3" appears because versions aren't checked while buffering + ); logReplay.drainPermits(); rinfoFuture = ulog.applyBufferedUpdates(); @@ -688,28 +791,38 @@ public void testBuffering() throws Exception { logReplay.release(1); // now add another update - updateJ(jsonAdd(sdoc("id","B7", "_version_",v1070)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "B7", "_version_", v1070)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); // a reordered update that should be dropped - deleteAndGetVersion("B5", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v950_del)); + deleteAndGetVersion("B5", params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v950_del)); - deleteAndGetVersion("B6", params(DISTRIB_UPDATE_PARAM,FROM_LEADER, "_version_",v2060_del)); + deleteAndGetVersion("B6", params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", v2060_del)); logReplay.release(1000); UpdateLog.RecoveryInfo recInfo = rinfoFuture.get(); - assertJQ(req("q", "*:*", "sort","id asc", "fl","id,_version_") - , "/response/docs==[" - + "{'id':'B3','_version_':"+v1030+"}" - + ",{'id':'B4','_version_':"+v1040+"}" - + ",{'id':'B5','_version_':"+v1050+"}" - + ",{'id':'B7','_version_':"+v1070+"}" - +"]" - ); + assertJQ( + req("q", "*:*", "sort", "id asc", "fl", "id,_version_"), + "/response/docs==[" + + "{'id':'B3','_version_':" + + v1030 + + "}" + + ",{'id':'B4','_version_':" + + v1040 + + "}" + + ",{'id':'B5','_version_':" + + v1050 + + "}" + + ",{'id':'B7','_version_':" + + v1070 + + "}" + + "]"); assertEquals(1, recInfo.deleteByQuery); - assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state + assertEquals( + UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state assertEquals(0, bufferedOps.getValue().intValue()); } finally { @@ -718,10 +831,8 @@ public void testBuffering() throws Exception { req().close(); } - } - @Test public void testDropBuffered() throws Exception { @@ -729,17 +840,17 @@ public void testDropBuffered() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); - SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); @@ -760,7 +871,7 @@ public void testDropBuffered() throws Exception { String v302 = getNextVersion(); String v998 = getNextVersion(); String v999 = getNextVersion(); - + clearIndex(); assertU(commit()); @@ -775,95 +886,129 @@ public void testDropBuffered() throws Exception { assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); // simulate updates from a leader - updateJ(jsonAdd(sdoc("id","C1", "_version_",v101)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","C2", "_version_",v102)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","C3", "_version_",v103)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C1", "_version_", v101)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C2", "_version_", v102)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C3", "_version_", v103)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertTrue(ulog.dropBufferedUpdates()); ulog.bufferUpdates(); - updateJ(jsonAdd(sdoc("id", "C4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id", "C5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C4", "_version_", v104)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C5", "_version_", v105)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); logReplay.release(1000); rinfoFuture = ulog.applyBufferedUpdates(); UpdateLog.RecoveryInfo rinfo = rinfoFuture.get(); assertEquals(2, rinfo.adds); - assertJQ(req("qt","/get", "getVersions","2") - ,"=={'versions':["+v105+","+v104+"]}" - ); + assertJQ(req("qt", "/get", "getVersions", "2"), "=={'versions':[" + v105 + "," + v104 + "]}"); // this time add some docs first before buffering starts (so tlog won't be at pos 0) - updateJ(jsonAdd(sdoc("id","C100", "_version_",v200)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","C101", "_version_",v201)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C100", "_version_", v200)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C101", "_version_", v201)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); ulog.bufferUpdates(); - updateJ(jsonAdd(sdoc("id","C103", "_version_",v203)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","C104", "_version_",v204)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C103", "_version_", v203)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C104", "_version_", v204)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertTrue(ulog.dropBufferedUpdates()); ulog.bufferUpdates(); - updateJ(jsonAdd(sdoc("id","C105", "_version_",v205)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","C106", "_version_",v206)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C105", "_version_", v205)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C106", "_version_", v206)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); rinfoFuture = ulog.applyBufferedUpdates(); rinfo = rinfoFuture.get(); assertEquals(2, rinfo.adds); - assertJQ(req("q", "*:*", "sort","_version_ asc", "fl","id,_version_") - , "/response/docs==[" - + "{'id':'C4','_version_':"+v104+"}" - + ",{'id':'C5','_version_':"+v105+"}" - + ",{'id':'C100','_version_':"+v200+"}" - + ",{'id':'C101','_version_':"+v201+"}" - + ",{'id':'C105','_version_':"+v205+"}" - + ",{'id':'C106','_version_':"+v206+"}" - +"]" - ); + assertJQ( + req("q", "*:*", "sort", "_version_ asc", "fl", "id,_version_"), + "/response/docs==[" + + "{'id':'C4','_version_':" + + v104 + + "}" + + ",{'id':'C5','_version_':" + + v105 + + "}" + + ",{'id':'C100','_version_':" + + v200 + + "}" + + ",{'id':'C101','_version_':" + + v201 + + "}" + + ",{'id':'C105','_version_':" + + v205 + + "}" + + ",{'id':'C106','_version_':" + + v206 + + "}" + + "]"); // Note that the v101->v103 are dropped, therefore it does not present in RTG - assertJQ(req("qt","/get", "getVersions","6") - ,"=={'versions':["+String.join(",",v206,v205,v201,v200,v105,v104)+"]}" - ); + assertJQ( + req("qt", "/get", "getVersions", "6"), + "=={'versions':[" + String.join(",", v206, v205, v201, v200, v105, v104) + "]}"); ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); - updateJ(jsonAdd(sdoc("id","C301", "_version_",v998)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","C302", "_version_",v999)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C301", "_version_", v998)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C302", "_version_", v999)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertTrue(ulog.dropBufferedUpdates()); // make sure we can overwrite with a lower version // TODO: is this functionality needed? - updateJ(jsonAdd(sdoc("id","C301", "_version_",v301)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","C302", "_version_",v302)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C301", "_version_", v301)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "C302", "_version_", v302)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertU(commit()); - assertJQ(req("qt","/get", "getVersions","2") - ,"=={'versions':["+v302+","+v301+"]}" - ); - - assertJQ(req("q", "*:*", "sort","_version_ desc", "fl","id,_version_", "rows","2") - , "/response/docs==[" - + "{'id':'C302','_version_':"+v302+"}" - + ",{'id':'C301','_version_':"+v301+"}" - +"]" - ); - + assertJQ(req("qt", "/get", "getVersions", "2"), "=={'versions':[" + v302 + "," + v301 + "]}"); - updateJ(jsonAdd(sdoc("id","C2", "_version_",v302)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + assertJQ( + req("q", "*:*", "sort", "_version_ desc", "fl", "id,_version_", "rows", "2"), + "/response/docs==[" + + "{'id':'C302','_version_':" + + v302 + + "}" + + ",{'id':'C301','_version_':" + + v301 + + "}" + + "]"); + updateJ( + jsonAdd(sdoc("id", "C2", "_version_", v302)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); - - - assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state + assertEquals( + UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state } finally { UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } - } @Test @@ -873,17 +1018,17 @@ public void testBufferedMultipleCalls() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); - SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); @@ -901,7 +1046,7 @@ public void testBufferedMultipleCalls() throws Exception { String v204 = getNextVersion(); String v205 = getNextVersion(); String v206 = getNextVersion(); - + clearIndex(); assertU(commit()); assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); @@ -910,76 +1055,105 @@ public void testBufferedMultipleCalls() throws Exception { assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); // simulate updates from a leader - updateJ(jsonAdd(sdoc("id","c1", "_version_",v101)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","c2", "_version_",v102)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","c3", "_version_",v103)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - - // call bufferUpdates again (this currently happens when recovery fails)... we should get a new starting point + updateJ( + jsonAdd(sdoc("id", "c1", "_version_", v101)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c2", "_version_", v102)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c3", "_version_", v103)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + + // call bufferUpdates again (this currently happens when recovery fails)... we should get a + // new starting point ulog.bufferUpdates(); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); - updateJ(jsonAdd(sdoc("id", "c4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id", "c5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c4", "_version_", v104)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c5", "_version_", v105)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); logReplay.release(1000); rinfoFuture = ulog.applyBufferedUpdates(); UpdateLog.RecoveryInfo rinfo = rinfoFuture.get(); assertEquals(2, rinfo.adds); - assertJQ(req("qt","/get", "getVersions","2") - ,"=={'versions':["+v105+","+v104+"]}" - ); + assertJQ(req("qt", "/get", "getVersions", "2"), "=={'versions':[" + v105 + "," + v104 + "]}"); - updateJ(jsonAdd(sdoc("id","c100", "_version_",v200)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","c101", "_version_",v201)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c100", "_version_", v200)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c101", "_version_", v201)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); ulog.bufferUpdates(); - updateJ(jsonAdd(sdoc("id","c103", "_version_",v203)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","c104", "_version_",v204)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - - // call bufferUpdates again (this currently happens when recovery fails)... we should get a new starting point + updateJ( + jsonAdd(sdoc("id", "c103", "_version_", v203)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c104", "_version_", v204)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + + // call bufferUpdates again (this currently happens when recovery fails)... we should get a + // new starting point ulog.bufferUpdates(); - updateJ(jsonAdd(sdoc("id","c105", "_version_",v205)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","c106", "_version_",v206)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c105", "_version_", v205)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "c106", "_version_", v206)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); rinfoFuture = ulog.applyBufferedUpdates(); rinfo = rinfoFuture.get(); assertEquals(2, rinfo.adds); - assertJQ(req("q", "*:*", "sort","_version_ asc", "fl","id,_version_") - , "/response/docs==[" - + "{'id':'c4','_version_':"+v104+"}" - + ",{'id':'c5','_version_':"+v105+"}" - + ",{'id':'c100','_version_':"+v200+"}" - + ",{'id':'c101','_version_':"+v201+"}" - + ",{'id':'c105','_version_':"+v205+"}" - + ",{'id':'c106','_version_':"+v206+"}" -+"" +"]" - ); - - assertJQ(req("qt","/get", "getVersions","6") - ,"=={'versions':["+String.join(",",v206,v205,v201,v200,v105,v104)+"]}" - ); - - assertEquals(UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state + assertJQ( + req("q", "*:*", "sort", "_version_ asc", "fl", "id,_version_"), + "/response/docs==[" + + "{'id':'c4','_version_':" + + v104 + + "}" + + ",{'id':'c5','_version_':" + + v105 + + "}" + + ",{'id':'c100','_version_':" + + v200 + + "}" + + ",{'id':'c101','_version_':" + + v201 + + "}" + + ",{'id':'c105','_version_':" + + v205 + + "}" + + ",{'id':'c106','_version_':" + + v206 + + "}" + + "" + + "]"); + + assertJQ( + req("qt", "/get", "getVersions", "6"), + "=={'versions':[" + String.join(",", v206, v205, v201, v200, v105, v104) + "]}"); + + assertEquals( + UpdateLog.State.ACTIVE, ulog.getState()); // leave each test method in a good state } finally { UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } - } - // we need to make sure that the log is informed of a core reload @Test public void testReload() throws Exception { - long version = addAndGetVersion(sdoc("id","reload1") , null); + long version = addAndGetVersion(sdoc("id", "reload1"), null); h.reload(); - version = addAndGetVersion(sdoc("id","reload1", "_version_", Long.toString(version)), null); + version = addAndGetVersion(sdoc("id", "reload1", "_version_", Long.toString(version)), null); assertU(commit()); @@ -987,23 +1161,22 @@ public void testReload() throws Exception { // and we should go to the index to check the version. This indirectly tests that // the update log was informed of the reload. See SOLR-4858 - version = addAndGetVersion(sdoc("id","reload1", "_version_", Long.toString(version)), null); + version = addAndGetVersion(sdoc("id", "reload1", "_version_", Long.toString(version)), null); // a deleteByQuery currently forces open a new realtime reader via the update log. // This also tests that the update log was informed of the new update handler. deleteByQueryAndGetVersion("foo_t:hownowbrowncow", null); - version = addAndGetVersion(sdoc("id","reload1", "_version_", Long.toString(version)), null); + version = addAndGetVersion(sdoc("id", "reload1", "_version_", Long.toString(version)), null); // if the update log was not informed of the new update handler, then the old core will - // incorrectly be used for some of the operations above and opened searchers - // will never be closed. This used to cause the test framework to fail because of unclosed directory checks. - // SolrCore.openNewSearcher was modified to throw an error if the core is closed, resulting in - // a faster fail. + // incorrectly be used for some of the operations above and opened searchers will never be + // closed. This used to cause the test framework to fail because of unclosed directory checks. + // SolrCore.openNewSearcher was modified to throw an error if the core is closed, resulting in a + // faster fail. } - @Test public void testExistOldBufferLog() throws Exception { @@ -1018,7 +1191,7 @@ public void testExistOldBufferLog() throws Exception { String v102 = getNextVersion(); String v103 = getNextVersion(); String v117 = getNextVersion(); - + clearIndex(); assertU(commit()); @@ -1026,9 +1199,12 @@ public void testExistOldBufferLog() throws Exception { ulog.bufferUpdates(); // simulate updates from a leader - updateJ(jsonAdd(sdoc("id","Q1", "_version_",v101)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","Q2", "_version_",v102)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","Q3", "_version_",v103)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "Q1", "_version_", v101)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "Q2", "_version_", v102)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "Q3", "_version_", v103)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); assertEquals(UpdateLog.State.BUFFERING, ulog.getState()); req.close(); @@ -1040,7 +1216,8 @@ public void testExistOldBufferLog() throws Exception { ulog = uhandler.getUpdateLog(); // the core does not replay updates from buffer tlog on startup - assertTrue(ulog.existOldBufferLog()); // since we died while buffering, we should see this last + assertTrue( + ulog.existOldBufferLog()); // since we died while buffering, we should see this last // buffer tlog won't be removed on restart req.close(); @@ -1055,12 +1232,17 @@ public void testExistOldBufferLog() throws Exception { ulog.bufferUpdates(); ulog.applyBufferedUpdates(); - + TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeout.waitFor("Timeout waiting for finish replay updates", + timeout.waitFor( + "Timeout waiting for finish replay updates", () -> h.getCore().getUpdateHandler().getUpdateLog().getState() == UpdateLog.State.ACTIVE); - - updateJ(jsonAdd(sdoc("id","Q7", "_version_",v117)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); // do another add to make sure flags are back to normal + + updateJ( + jsonAdd(sdoc("id", "Q7", "_version_", v117)), + params( + DISTRIB_UPDATE_PARAM, + FROM_LEADER)); // do another add to make sure flags are back to normal req.close(); h.close(); @@ -1068,57 +1250,55 @@ public void testExistOldBufferLog() throws Exception { req = req(); uhandler = req.getCore().getUpdateHandler(); - + UpdateLog updateLog = uhandler.getUpdateLog(); // TODO this can fail // assertFalse(updateLog.existOldBufferLog()); - - // Timeout for Q7 get replayed, because it was added on tlog, therefore it will be replayed on restart + + // Timeout for Q7 get replayed, because it was added on tlog, therefore it will be replayed on + // restart timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeout.waitFor("Timeout waiting for finish replay updates", + timeout.waitFor( + "Timeout waiting for finish replay updates", () -> h.getCore().getUpdateHandler().getUpdateLog().getState() == UpdateLog.State.ACTIVE); - - assertJQ(req("qt","/get", "id", "Q7") ,"/doc/id==Q7"); + + assertJQ(req("qt", "/get", "id", "Q7"), "/doc/id==Q7"); } finally { UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; req().close(); } - } - - // make sure that on a restart, versions don't start too low @Test public void testVersionsOnRestart() throws Exception { String v1 = getNextVersion(); String v2 = getNextVersion(); - + clearIndex(); assertU(commit()); - assertU(adoc("id","D1", "val_i",v1)); - assertU(adoc("id","D2", "val_i",v1)); + assertU(adoc("id", "D1", "val_i", v1)); + assertU(adoc("id", "D2", "val_i", v1)); assertU(commit()); - long D1Version1 = getVer(req("q","id:D1")); - long D2Version1 = getVer(req("q","id:D2")); + long D1Version1 = getVer(req("q", "id:D1")); + long D2Version1 = getVer(req("q", "id:D2")); h.close(); createCore(); - assertU(adoc("id","D1", "val_i",v2)); + assertU(adoc("id", "D1", "val_i", v2)); assertU(commit()); - long D1Version2 = getVer(req("q","id:D1")); - - assert(D1Version2 > D1Version1); + long D1Version2 = getVer(req("q", "id:D1")); - assertJQ(req("qt","/get", "getVersions","2") - ,"/versions==[" + D1Version2 + "," + D2Version1 + "]" - ); + assert (D1Version2 > D1Version1); + assertJQ( + req("qt", "/get", "getVersions", "2"), + "/versions==[" + D1Version2 + "," + D2Version1 + "]"); } // make sure that log isn't needlessly replayed after a clean close @@ -1127,29 +1307,29 @@ public void testCleanShutdown() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); - SolrQueryRequest req = req(); UpdateHandler uhandler = req.getCore().getUpdateHandler(); UpdateLog ulog = uhandler.getUpdateLog(); try { String v1 = getNextVersion(); - + clearIndex(); assertU(commit()); - assertU(adoc("id","E1", "val_i",v1)); - assertU(adoc("id","E2", "val_i",v1)); + assertU(adoc("id", "E1", "val_i", v1)); + assertU(adoc("id", "E2", "val_i", v1)); // set to a high enough number so this test won't hang on a bug logReplay.release(10); @@ -1158,7 +1338,7 @@ public void testCleanShutdown() throws Exception { createCore(); // make sure the docs got committed - assertJQ(req("q","*:*"),"/response/numFound==2"); + assertJQ(req("q", "*:*"), "/response/numFound==2"); // make sure no replay happened assertEquals(10, logReplay.availablePermits()); @@ -1170,11 +1350,10 @@ public void testCleanShutdown() throws Exception { req().close(); } } - - + private void addDocs(int nDocs, int start, LinkedList versions) throws Exception { - for (int i=0; i { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); - clearIndex(); assertU(commit()); @@ -1219,17 +1398,27 @@ public void testRemoveOldLogs() throws Exception { LinkedList versions = new LinkedList<>(); int docsPerBatch = 3; - // we don't expect to reach numRecordsToKeep as yet, so the bottleneck is still number of logs to keep + // we don't expect to reach numRecordsToKeep as yet, so the bottleneck is still number of logs + // to keep int expectedToRetain = ulog.getMaxNumLogsToKeep() * docsPerBatch; int versExpected; - for (int i = 1; i <= ulog.getMaxNumLogsToKeep() + 2; i ++) { - addDocs(docsPerBatch, numIndexed, versions); numIndexed += docsPerBatch; - versExpected = Math.min(numIndexed, expectedToRetain + docsPerBatch); // not yet committed, so one more tlog could slip in - assertJQ(req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); + for (int i = 1; i <= ulog.getMaxNumLogsToKeep() + 2; i++) { + addDocs(docsPerBatch, numIndexed, versions); + numIndexed += docsPerBatch; + versExpected = + Math.min( + numIndexed, + expectedToRetain + + docsPerBatch); // not yet committed, so one more tlog could slip in + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); assertU(commit()); versExpected = Math.min(numIndexed, expectedToRetain); - assertJQ(req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); assertEquals(Math.min(i, ulog.getMaxNumLogsToKeep()), ulog.getLogList(logDir).length); } @@ -1237,58 +1426,83 @@ public void testRemoveOldLogs() throws Exception { // about to commit a lot of docs, so numRecordsToKeep becomes the bottleneck expectedToRetain = ulog.getNumRecordsToKeep(); - addDocs(docsPerBatch, numIndexed, versions); numIndexed+=docsPerBatch; + addDocs(docsPerBatch, numIndexed, versions); + numIndexed += docsPerBatch; versExpected = Math.min(numIndexed, expectedToRetain); - assertJQ(req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); assertU(commit()); expectedToRetain = expectedToRetain - 1; // we lose a log entry due to the commit record versExpected = Math.min(numIndexed, expectedToRetain); - assertJQ(req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, versExpected))); // previous logs should be gone now assertEquals(1, ulog.getLogList(logDir).length); - addDocs(1, numIndexed, versions); numIndexed+=1; + addDocs(1, numIndexed, versions); + numIndexed += 1; h.close(); - createCore(); // trigger recovery, make sure that tlog reference handling is correct + createCore(); // trigger recovery, make sure that tlog reference handling is correct // test we can get versions while replay is happening - assertJQ(req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, expectedToRetain))); + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, expectedToRetain))); logReplay.release(1000); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); - expectedToRetain = expectedToRetain - 1; // we lose a log entry due to the commit record made by recovery - assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,expectedToRetain))); + expectedToRetain = + expectedToRetain - 1; // we lose a log entry due to the commit record made by recovery + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, expectedToRetain))); docsPerBatch = ulog.getNumRecordsToKeep() + 20; // about to commit a lot of docs, so numRecordsToKeep becomes the bottleneck expectedToRetain = ulog.getNumRecordsToKeep(); - addDocs(docsPerBatch, numIndexed, versions); numIndexed+=docsPerBatch; - assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,expectedToRetain))); + addDocs(docsPerBatch, numIndexed, versions); + numIndexed += docsPerBatch; + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, expectedToRetain))); assertU(commit()); expectedToRetain = expectedToRetain - 1; // we lose a log entry due to the commit record - assertJQ(req("qt","/get", "getVersions",""+maxReq), "/versions==" + versions.subList(0,Math.min(maxReq,expectedToRetain))); + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, expectedToRetain))); // previous logs should be gone now assertEquals(1, ulog.getLogList(logDir).length); // - // test that a corrupt tlog file doesn't stop us from coming up, or seeing versions before that tlog file. + // test that a corrupt tlog file doesn't stop us from coming up, or seeing versions before + // that tlog file. // - addDocs(1, numIndexed, new LinkedList()); // don't add this to the versions list because we are going to lose it... + addDocs( + 1, + numIndexed, + new LinkedList< + Long>()); // don't add this to the versions list because we are going to lose it... h.close(); files = ulog.getLogList(logDir); Arrays.sort(files); - try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) { - raf.writeChars("This is a trashed log file that really shouldn't work at all, but we'll see..."); + try (RandomAccessFile raf = + new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) { + raf.writeChars( + "This is a trashed log file that really shouldn't work at all, but we'll see..."); } ignoreException("Failure to open existing"); createCore(); // we should still be able to get the list of versions (not including the trashed log file) - assertJQ(req("qt", "/get", "getVersions", "" + maxReq), "/versions==" + versions.subList(0, Math.min(maxReq, expectedToRetain))); + assertJQ( + req("qt", "/get", "getVersions", "" + maxReq), + "/versions==" + versions.subList(0, Math.min(maxReq, expectedToRetain))); resetExceptionIgnores(); } finally { @@ -1298,8 +1512,9 @@ public void testRemoveOldLogs() throws Exception { } // - // test that a partially written last tlog entry (that will cause problems for both reverse reading and for - // log replay) doesn't stop us from coming up, and from recovering the documents that were not cut off. + // test that a partially written last tlog entry (that will cause problems for both reverse + // reading and for log replay) doesn't stop us from coming up, and from recovering the documents + // that were not cut off. // @Test public void testTruncatedLog() throws Exception { @@ -1308,13 +1523,14 @@ public void testTruncatedLog() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); @@ -1324,26 +1540,29 @@ public void testTruncatedLog() throws Exception { clearIndex(); assertU(commit()); - assertU(adoc("id","F1")); - assertU(adoc("id","F2")); - assertU(adoc("id","F3")); + assertU(adoc("id", "F1")); + assertU(adoc("id", "F2")); + assertU(adoc("id", "F3")); h.close(); String[] files = ulog.getLogList(logDir); Arrays.sort(files); - try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) { - raf.seek(raf.length()); // seek to end + try (RandomAccessFile raf = + new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) { + raf.seek(raf.length()); // seek to end raf.writeLong(0xffffffffffffffffL); - raf.writeChars("This should be appended to a good log file, representing a bad partially written record."); + raf.writeChars( + "This should be appended to a good log file, representing a bad partially written record."); } logReplay.release(1000); logReplayFinish.drainPermits(); - ignoreException("OutOfBoundsException"); // this is what the corrupted log currently produces... subject to change. + // this is what the corrupted log currently produces... subject to change. + ignoreException("OutOfBoundsException"); createCore(); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); resetExceptionIgnores(); - assertJQ(req("q","*:*") ,"/response/numFound==3"); + assertJQ(req("q", "*:*"), "/response/numFound==3"); // // Now test that the bad log file doesn't mess up retrieving latest versions @@ -1352,14 +1571,20 @@ public void testTruncatedLog() throws Exception { String v104 = getNextVersion(); String v105 = getNextVersion(); String v106 = getNextVersion(); - - updateJ(jsonAdd(sdoc("id","F4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","F5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","F6", "_version_",v106)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - // This currently skips the bad log file and also returns the version of the clearIndex (del *:*) + updateJ( + jsonAdd(sdoc("id", "F4", "_version_", v104)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "F5", "_version_", v105)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "F6", "_version_", v106)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + + // This currently skips the bad log file and also returns the version of the clearIndex (del + // *:*) // assertJQ(req("qt","/get", "getVersions","6"), "/versions==[106,105,104]"); - assertJQ(req("qt","/get", "getVersions","3"), "/versions==["+v106+","+v105+","+v104+"]"); + assertJQ( + req("qt", "/get", "getVersions", "3"), + "/versions==[" + v106 + "," + v105 + "," + v104 + "]"); } finally { UpdateLog.testing_logReplayHook = null; @@ -1367,7 +1592,6 @@ public void testTruncatedLog() throws Exception { } } - // // test that a corrupt tlog doesn't stop us from coming up // @@ -1382,28 +1606,28 @@ public void testCorruptLog() throws Exception { clearIndex(); assertU(commit()); - assertU(adoc("id","G1")); - assertU(adoc("id","G2")); - assertU(adoc("id","G3")); + assertU(adoc("id", "G1")); + assertU(adoc("id", "G2")); + assertU(adoc("id", "G3")); h.close(); - String[] files = ulog.getLogList(logDir); Arrays.sort(files); - try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) { + try (RandomAccessFile raf = + new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) { long len = raf.length(); - raf.seek(0); // seek to start - raf.write(new byte[(int) len]); // zero out file + raf.seek(0); // seek to start + raf.write(new byte[(int) len]); // zero out file } - - ignoreException("Failure to open existing log file"); // this is what the corrupted log currently produces... subject to change. + // this is what the corrupted log currently produces... subject to change. + ignoreException("Failure to open existing log file"); createCore(); resetExceptionIgnores(); // just make sure it responds - assertJQ(req("q","*:*") ,"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==0"); // // Now test that the bad log file doesn't mess up retrieving latest versions @@ -1412,19 +1636,26 @@ public void testCorruptLog() throws Exception { String v105 = getNextVersion(); String v106 = getNextVersion(); - updateJ(jsonAdd(sdoc("id","G4", "_version_",v104)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","G5", "_version_",v105)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - updateJ(jsonAdd(sdoc("id","G6", "_version_",v106)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "G4", "_version_", v104)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "G5", "_version_", v105)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + updateJ( + jsonAdd(sdoc("id", "G6", "_version_", v106)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); - // This currently skips the bad log file and also returns the version of the clearIndex (del *:*) - assertJQ(req("qt","/get", "getVersions","3"), "/versions==["+v106+","+v105+","+v104+"]"); + // This currently skips the bad log file and also returns the version of the clearIndex (del + // *:*) + assertJQ( + req("qt", "/get", "getVersions", "3"), + "/versions==[" + v106 + "," + v105 + "," + v104 + "]"); assertU(commit()); - assertJQ(req("q","*:*") ,"/response/numFound==3"); + assertJQ(req("q", "*:*"), "/response/numFound==3"); // This messes up some other tests (on windows) if we don't remove the bad log. - // This *should* hopefully just be because the tests are too fragile and not because of real bugs - but it should be investigated further. + // This *should* hopefully just be because the tests are too fragile and not because of real + // bugs - but it should be investigated further. deleteLogs(); } finally { @@ -1433,9 +1664,8 @@ public void testCorruptLog() throws Exception { } } - - - // in rare circumstances, two logs can be left uncapped (lacking a commit at the end signifying that all the content in the log was committed) + // in rare circumstances, two logs can be left uncapped (lacking a commit at the end signifying + // that all the content in the log was committed) @Test public void testRecoveryMultipleLogs() throws Exception { try { @@ -1443,13 +1673,14 @@ public void testRecoveryMultipleLogs() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); @@ -1459,47 +1690,58 @@ public void testRecoveryMultipleLogs() throws Exception { clearIndex(); assertU(commit()); - assertU(adoc("id","AAAAAA")); - assertU(adoc("id","BBBBBB")); - assertU(adoc("id","CCCCCC")); + assertU(adoc("id", "AAAAAA")); + assertU(adoc("id", "BBBBBB")); + assertU(adoc("id", "CCCCCC")); h.close(); String[] files = ulog.getLogList(logDir); Arrays.sort(files); - String fname = files[files.length-1]; + String fname = files[files.length - 1]; byte[] content; try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, fname), "rw")) { - raf.seek(raf.length()); // seek to end + raf.seek(raf.length()); // seek to end raf.writeLong(0xffffffffffffffffL); - raf.writeChars("This should be appended to a good log file, representing a bad partially written record."); + raf.writeChars( + "This should be appended to a good log file, representing a bad partially written record."); content = new byte[(int) raf.length()]; raf.seek(0); raf.readFully(content); } - // Now make a newer log file with just the IDs changed. NOTE: this may not work if log format changes too much! - findReplace("AAAAAA".getBytes(StandardCharsets.UTF_8), "aaaaaa".getBytes(StandardCharsets.UTF_8), content); - findReplace("BBBBBB".getBytes(StandardCharsets.UTF_8), "bbbbbb".getBytes(StandardCharsets.UTF_8), content); - findReplace("CCCCCC".getBytes(StandardCharsets.UTF_8), "cccccc".getBytes(StandardCharsets.UTF_8), content); + // Now make a newer log file with just the IDs changed. NOTE: this may not work if log format + // changes too much! + findReplace( + "AAAAAA".getBytes(StandardCharsets.UTF_8), + "aaaaaa".getBytes(StandardCharsets.UTF_8), + content); + findReplace( + "BBBBBB".getBytes(StandardCharsets.UTF_8), + "bbbbbb".getBytes(StandardCharsets.UTF_8), + content); + findReplace( + "CCCCCC".getBytes(StandardCharsets.UTF_8), + "cccccc".getBytes(StandardCharsets.UTF_8), + content); // WARNING... assumes format of .00000n where n is less than 9 long logNumber = Long.parseLong(fname.substring(fname.lastIndexOf(".") + 1)); - String fname2 = String.format(Locale.ROOT, - UpdateLog.LOG_FILENAME_PATTERN, - UpdateLog.TLOG_NAME, - logNumber + 1); + String fname2 = + String.format( + Locale.ROOT, UpdateLog.LOG_FILENAME_PATTERN, UpdateLog.TLOG_NAME, logNumber + 1); try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, fname2), "rw")) { raf.write(content); } logReplay.release(1000); logReplayFinish.drainPermits(); - ignoreException("OutOfBoundsException"); // this is what the corrupted log currently produces... subject to change. + // this is what the corrupted log currently produces... subject to change. + ignoreException("OutOfBoundsException"); createCore(); assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); resetExceptionIgnores(); - assertJQ(req("q","*:*") ,"/response/numFound==6"); + assertJQ(req("q", "*:*"), "/response/numFound==6"); } finally { UpdateLog.testing_logReplayHook = null; @@ -1516,49 +1758,59 @@ public void testLogReplayWithInPlaceUpdatesAndDeletes() throws Exception { final Semaphore logReplay = new Semaphore(0); final Semaphore logReplayFinish = new Semaphore(0); - UpdateLog.testing_logReplayHook = () -> { - try { - assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }; + UpdateLog.testing_logReplayHook = + () -> { + try { + assertTrue(logReplay.tryAcquire(timeout, TimeUnit.SECONDS)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }; UpdateLog.testing_logReplayFinishHook = () -> logReplayFinish.release(); - clearIndex(); assertU(commit()); Deque versions = new ArrayDeque<>(); versions.addFirst(addAndGetVersion(sdoc("id", "A1"), null)); - + // DBQ of updated document using id versions.addFirst(addAndGetVersion(sdoc("id", "A2", "val_i_dvo", "1"), null)); - versions.addFirst(addAndGetVersion(sdoc("id", "A2", "val_i_dvo", map("set", 2)), null)); // in-place update + versions.addFirst( + addAndGetVersion(sdoc("id", "A2", "val_i_dvo", map("set", 2)), null)); // in-place update versions.addFirst(deleteByQueryAndGetVersion("id:A2", null)); // DBQ of updated document using updated value versions.addFirst(addAndGetVersion(sdoc("id", "A3", "val_i_dvo", "101"), null)); - versions.addFirst(addAndGetVersion(sdoc("id", "A3", "val_i_dvo", map("set", 102)), null)); // in-place update + versions.addFirst( + addAndGetVersion( + sdoc("id", "A3", "val_i_dvo", map("set", 102)), null)); // in-place update versions.addFirst(deleteByQueryAndGetVersion("val_i_dvo:102", null)); // DBQ using an intermediate update value (shouldn't delete anything) versions.addFirst(addAndGetVersion(sdoc("id", "A4", "val_i_dvo", "200"), null)); - versions.addFirst(addAndGetVersion(sdoc("id", "A4", "val_i_dvo", map("inc", "1")), null)); // in-place update - versions.addFirst(addAndGetVersion(sdoc("id", "A4", "val_i_dvo", map("inc", "1")), null)); // in-place update + versions.addFirst( + addAndGetVersion( + sdoc("id", "A4", "val_i_dvo", map("inc", "1")), null)); // in-place update + versions.addFirst( + addAndGetVersion( + sdoc("id", "A4", "val_i_dvo", map("inc", "1")), null)); // in-place update versions.addFirst(deleteByQueryAndGetVersion("val_i_dvo:201", null)); // DBI of updated document versions.addFirst(addAndGetVersion(sdoc("id", "A5", "val_i_dvo", "300"), null)); - versions.addFirst(addAndGetVersion(sdoc("id", "A5", "val_i_dvo", map("inc", "1")), null)); // in-place update - versions.addFirst(addAndGetVersion(sdoc("id", "A5", "val_i_dvo", map("inc", "1")), null)); // in-place update + versions.addFirst( + addAndGetVersion( + sdoc("id", "A5", "val_i_dvo", map("inc", "1")), null)); // in-place update + versions.addFirst( + addAndGetVersion( + sdoc("id", "A5", "val_i_dvo", map("inc", "1")), null)); // in-place update versions.addFirst(deleteAndGetVersion("A5", null)); - - assertJQ(req("q","*:*"),"/response/numFound==0"); - - assertJQ(req("qt","/get", "getVersions",""+versions.size()) ,"/versions==" + versions); + assertJQ(req("q", "*:*"), "/response/numFound==0"); + + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); h.close(); createCore(); @@ -1568,31 +1820,33 @@ public void testLogReplayWithInPlaceUpdatesAndDeletes() throws Exception { // verify that previous close didn't do a commit // recovery should be blocked by our hook - assertJQ(req("q","*:*") ,"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==0"); // make sure we can still access versions after a restart - assertJQ(req("qt","/get", "getVersions",""+versions.size()),"/versions==" + versions); + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); // unblock recovery logReplay.release(1000); // make sure we can still access versions during recovery - assertJQ(req("qt","/get", "getVersions",""+versions.size()),"/versions==" + versions); + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); // wait until recovery has finished assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); - assertJQ(req("q","val_i_dvo:202") ,"/response/numFound==1"); // assert that in-place update is retained + assertJQ( + req("q", "val_i_dvo:202"), + "/response/numFound==1"); // assert that in-place update is retained - assertJQ(req("q","*:*") ,"/response/numFound==2"); - assertJQ(req("q","id:A2") ,"/response/numFound==0"); - assertJQ(req("q","id:A3") ,"/response/numFound==0"); - assertJQ(req("q","id:A4") ,"/response/numFound==1"); - assertJQ(req("q","id:A5") ,"/response/numFound==0"); + assertJQ(req("q", "*:*"), "/response/numFound==2"); + assertJQ(req("q", "id:A2"), "/response/numFound==0"); + assertJQ(req("q", "id:A3"), "/response/numFound==0"); + assertJQ(req("q", "id:A4"), "/response/numFound==1"); + assertJQ(req("q", "id:A5"), "/response/numFound==0"); // make sure we can still access versions after recovery - assertJQ(req("qt","/get", "getVersions",""+versions.size()) ,"/versions==" + versions); + assertJQ(req("qt", "/get", "getVersions", "" + versions.size()), "/versions==" + versions); - assertU(adoc("id","A10")); + assertU(adoc("id", "A10")); h.close(); createCore(); @@ -1601,13 +1855,13 @@ public void testLogReplayWithInPlaceUpdatesAndDeletes() throws Exception { // wait until recovery has finished assertTrue(logReplayFinish.tryAcquire(timeout, TimeUnit.SECONDS)); - assertJQ(req("q","*:*") ,"/response/numFound==3"); - assertJQ(req("q","id:A2") ,"/response/numFound==0"); - assertJQ(req("q","id:A3") ,"/response/numFound==0"); - assertJQ(req("q","id:A4") ,"/response/numFound==1"); - assertJQ(req("q","id:A5") ,"/response/numFound==0"); - assertJQ(req("q","id:A10"),"/response/numFound==1"); - + assertJQ(req("q", "*:*"), "/response/numFound==3"); + assertJQ(req("q", "id:A2"), "/response/numFound==0"); + assertJQ(req("q", "id:A3"), "/response/numFound==0"); + assertJQ(req("q", "id:A4"), "/response/numFound==1"); + assertJQ(req("q", "id:A5"), "/response/numFound==0"); + assertJQ(req("q", "id:A10"), "/response/numFound==1"); + // no updates, so insure that recovery does not run h.close(); int permits = logReplay.availablePermits(); @@ -1615,41 +1869,45 @@ public void testLogReplayWithInPlaceUpdatesAndDeletes() throws Exception { // Solr should kick this off now // h.getCore().getUpdateHandler().getUpdateLog().recoverFromLog(); - assertJQ(req("q","*:*") ,"/response/numFound==3"); - assertJQ(req("q","val_i_dvo:202") ,"/response/numFound==1"); // assert that in-place update is retained - assertJQ(req("q","id:A2") ,"/response/numFound==0"); - assertJQ(req("q","id:A3") ,"/response/numFound==0"); - assertJQ(req("q","id:A4") ,"/response/numFound==1"); - assertJQ(req("q","id:A5") ,"/response/numFound==0"); - assertJQ(req("q","id:A10"),"/response/numFound==1"); + assertJQ(req("q", "*:*"), "/response/numFound==3"); + assertJQ( + req("q", "val_i_dvo:202"), + "/response/numFound==1"); // assert that in-place update is retained + assertJQ(req("q", "id:A2"), "/response/numFound==0"); + assertJQ(req("q", "id:A3"), "/response/numFound==0"); + assertJQ(req("q", "id:A4"), "/response/numFound==1"); + assertJQ(req("q", "id:A5"), "/response/numFound==0"); + assertJQ(req("q", "id:A10"), "/response/numFound==1"); Thread.sleep(100); - assertEquals(permits, logReplay.availablePermits()); // no updates, so insure that recovery didn't run + assertEquals( + permits, logReplay.availablePermits()); // no updates, so insure that recovery didn't run - assertEquals(UpdateLog.State.ACTIVE, h.getCore().getUpdateHandler().getUpdateLog().getState()); + assertEquals( + UpdateLog.State.ACTIVE, h.getCore().getUpdateHandler().getUpdateLog().getState()); } finally { UpdateLog.testing_logReplayHook = null; UpdateLog.testing_logReplayFinishHook = null; } - } // NOTE: replacement must currently be same size private static void findReplace(byte[] from, byte[] to, byte[] data) { int idx = -from.length; - for(;;) { - idx = indexOf(from, data, idx + from.length); // skip over previous match + for (; ; ) { + idx = indexOf(from, data, idx + from.length); // skip over previous match if (idx < 0) break; - for (int i=0; i 0) { - doc = (Map)lst.get(0); + doc = (Map) lst.get(0); } } else if (rsp.containsKey("response")) { @SuppressWarnings({"rawtypes"}) - Map responseMap = (Map)rsp.get("response"); + Map responseMap = (Map) rsp.get("response"); @SuppressWarnings({"rawtypes"}) - List lst = (List)responseMap.get("docs"); + List lst = (List) responseMap.get("docs"); if (lst.size() > 0) { - doc = (Map)lst.get(0); + doc = (Map) lst.get(0); } } if (doc == null) return null; - return (Long)doc.get("_version_"); + return (Long) doc.get("_version_"); } - - static class VersionProvider{ + + static class VersionProvider { private static long version = 0; - + static String getNextVersion() { return Long.toString(version++); } } } - diff --git a/solr/core/src/test/org/apache/solr/search/TestReload.java b/solr/core/src/test/org/apache/solr/search/TestReload.java index 13d78fcc178..ecf88cfaeda 100644 --- a/solr/core/src/test/org/apache/solr/search/TestReload.java +++ b/solr/core/src/test/org/apache/solr/search/TestReload.java @@ -16,56 +16,54 @@ */ package org.apache.solr.search; +import java.util.Random; import org.junit.BeforeClass; import org.junit.Test; -import java.util.Random; public class TestReload extends TestRTGBase { @BeforeClass public static void beforeClass() throws Exception { // useFactory(null); // force FS directory - initCore("solrconfig-tlog.xml","schema15.xml"); + initCore("solrconfig-tlog.xml", "schema15.xml"); } @Test public void testGetRealtimeReload() throws Exception { clearIndex(); assertU(commit()); - long version = addAndGetVersion(sdoc("id","1") , null); + long version = addAndGetVersion(sdoc("id", "1"), null); - assertU(commit("softCommit","true")); // should cause a RTG searcher to be opened + assertU(commit("softCommit", "true")); // should cause a RTG searcher to be opened - assertJQ(req("qt","/get","id","1", "fl", "id,_version_") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,_version_"), + "=={'doc':{'id':'1','_version_':" + version + "}}"); h.reload(); - assertJQ(req("qt","/get","id","1", "fl", "id,_version_") - ,"=={'doc':{'id':'1','_version_':" + version + "}}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,_version_"), + "=={'doc':{'id':'1','_version_':" + version + "}}"); - assertU(commit("softCommit","true")); // open a normal (caching) NRT searcher - - assertJQ(req("q","id:1") - ,"/response/numFound==1" - ); + assertU(commit("softCommit", "true")); // open a normal (caching) NRT searcher + assertJQ(req("q", "id:1"), "/response/numFound==1"); Random rand = random(); int iter = atLeast(20); - for (int i=0; i 0) { - int oper = rand.nextInt(100); - - if (oper < commitPercent) { - if (areCommitting.compareAndSet(false, true)) { - Map newCommittedModel; - long version; - - synchronized (TestReloadDeadlock.this) { - newCommittedModel = new HashMap<>(model); // take a snapshot - version = snapshotCount++; - } - - ifVerbose("hardCommit start"); - assertU(commit()); - ifVerbose("hardCommit end"); - - synchronized (TestReloadDeadlock.this) { - // install this model snapshot only if it's newer than the current one - if (version >= committedModelClock) { - ifVerbose("installing new committedModel version=" + committedModelClock); - committedModel = newCommittedModel; - committedModelClock = version; + Thread thread = + new Thread("WRITER" + i) { + Random rand = new Random(random().nextInt()); + + @Override + public void run() { + try { + while (reloads.get() > 0) { + int oper = rand.nextInt(100); + + if (oper < commitPercent) { + if (areCommitting.compareAndSet(false, true)) { + Map newCommittedModel; + long version; + + synchronized (TestReloadDeadlock.this) { + newCommittedModel = new HashMap<>(model); // take a snapshot + version = snapshotCount++; + } + + ifVerbose("hardCommit start"); + assertU(commit()); + ifVerbose("hardCommit end"); + + synchronized (TestReloadDeadlock.this) { + // install this model snapshot only if it's newer than the current one + if (version >= committedModelClock) { + ifVerbose("installing new committedModel version=" + committedModelClock); + committedModel = newCommittedModel; + committedModelClock = version; + } + } + areCommitting.set(false); } + continue; } - areCommitting.set(false); - } - continue; - } + int id; - int id; - - if (rand.nextBoolean()) { - id = rand.nextInt(ndocs); - } else { - id = lastId; // reuse the last ID half of the time to force more race conditions - } + if (rand.nextBoolean()) { + id = rand.nextInt(ndocs); + } else { + id = lastId; // reuse the last ID half of the time to force more race conditions + } - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; + } - DocInfo info = model.get(id); + DocInfo info = model.get(id); - long val = info.val; - long nextVal = Math.abs(val) + 1; + long val = info.val; + long nextVal = Math.abs(val) + 1; - long version = testVersion.incrementAndGet(); + long version = testVersion.incrementAndGet(); - // yield after getting the next version to increase the odds of updates happening out of order - if (rand.nextBoolean()) Thread.yield(); + // yield after getting the next version to increase the odds of updates happening + // out of order + if (rand.nextBoolean()) Thread.yield(); - if (oper < commitPercent + deleteByQueryPercent) { - deleteByQuery(id, nextVal, version); - } else { - addDoc(id, nextVal, version); - } + if (oper < commitPercent + deleteByQueryPercent) { + deleteByQuery(id, nextVal, version); + } else { + addDoc(id, nextVal, version); + } - if (!before) { - lastId = id; + if (!before) { + lastId = id; + } + } + } catch (Throwable e) { + reloads.set(-1L); + log.error("", e); + throw new RuntimeException(e); } } - } catch (Throwable e) { - reloads.set(-1L); - log.error("", e); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } @@ -175,7 +183,8 @@ public void run() { thread.start(); } - // The reload operation really doesn't need to happen from multiple threads, we just want it firing pretty often. + // The reload operation really doesn't need to happen from multiple threads, we just want it + // firing pretty often. while (reloads.get() > 0) { Thread.sleep(10 + random().nextInt(250)); reloads.decrementAndGet(); @@ -190,15 +199,25 @@ public void run() { fail("Shouldn't have sat around here this long waiting for the threads to join."); } for (Thread thread : threads) { // Probably a silly test, but what the heck. - assertFalse("All threads should be dead, but at least thread " + thread.getName() + " is not", thread.isAlive()); + assertFalse( + "All threads should be dead, but at least thread " + thread.getName() + " is not", + thread.isAlive()); } } private void addDoc(int id, long nextVal, long version) throws Exception { ifVerbose("adding id", id, "val=", nextVal, "version", version); - Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal), - "_version_", Long.toString(version)), params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + Long returnedVersion = + addAndGetVersion( + sdoc( + "id", + Integer.toString(id), + FIELD, + Long.toString(nextVal), + "_version_", + Long.toString(version)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); if (returnedVersion != null) { assertEquals(version, returnedVersion.longValue()); } @@ -217,8 +236,10 @@ private void addDoc(int id, long nextVal, long version) throws Exception { private void deleteByQuery(int id, long nextVal, long version) throws Exception { ifVerbose("deleteByQuery id", id, "val=", nextVal, "version", version); - Long returnedVersion = deleteByQueryAndGetVersion("id:" + Integer.toString(id), - params("_version_", Long.toString(-version), DISTRIB_UPDATE_PARAM, FROM_LEADER)); + Long returnedVersion = + deleteByQueryAndGetVersion( + "id:" + Integer.toString(id), + params("_version_", Long.toString(-version), DISTRIB_UPDATE_PARAM, FROM_LEADER)); // TODO: returning versions for these types of updates is redundant // but if we do return, they had better be equal diff --git a/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java b/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java index 08167b61ae4..7ebff71b0c5 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java +++ b/solr/core/src/test/org/apache/solr/search/TestSearchPerf.java @@ -16,37 +16,33 @@ */ package org.apache.solr.search; +import java.io.IOException; +import java.util.*; import org.apache.lucene.index.Term; import org.apache.lucene.search.*; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrInputDocument; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.update.processor.UpdateRequestProcessorChain; -import org.apache.solr.update.processor.UpdateRequestProcessor; import org.apache.solr.update.AddUpdateCommand; -import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.update.processor.UpdateRequestProcessor; +import org.apache.solr.update.processor.UpdateRequestProcessorChain; import org.apache.solr.util.RTimer; import org.junit.BeforeClass; -import java.util.*; -import java.io.IOException; - -/** - * - */ +/** */ public class TestSearchPerf extends SolrTestCaseJ4 { - @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema11.xml"); } - @Override public void setUp() throws Exception { super.setUp(); } + @Override public void tearDown() throws Exception { super.tearDown(); @@ -56,24 +52,28 @@ String t(int tnum) { return String.format(Locale.ROOT, "%08d", tnum); } - Random r = new Random(0); // specific seed for reproducible perf testing + Random r = new Random(0); // specific seed for reproducible perf testing int nDocs; + void createIndex(int nDocs) { this.nDocs = nDocs; assertU(delQ("*:*")); - for (int i=0; i fieldSet = new HashSet<>(Arrays.asList(fields)); @@ -90,28 +90,27 @@ void createIndex2(int nDocs, String... fields) throws IOException { boolean foo8_s = fieldSet.contains("foo8_s"); boolean t10_100_ws = fieldSet.contains("t10_100_ws"); - - for (int i=0; iis in fact an underlying index change, we want to - * assert that a new searcher will in fact be opened. + *

Likewise, if there is in fact an underlying index change, we want to assert that a + * new searcher will in fact be opened. */ public class TestSearcherReuse extends SolrTestCaseJ4 { @@ -43,9 +42,8 @@ public class TestSearcherReuse extends SolrTestCaseJ4 { private static final String confPath = collection + "/conf"; /** - * We're using a Managed schema so we can confirm that opening a new searcher - * after a schema modification results in getting a new searcher with the new - * schema linked to it. + * We're using a Managed schema so we can confirm that opening a new searcher after a schema + * modification results in getting a new searcher with the new schema linked to it. */ @BeforeClass private static void setupTempDirAndCoreWithManagedSchema() throws Exception { @@ -54,15 +52,20 @@ private static void setupTempDirAndCoreWithManagedSchema() throws Exception { File confDir = new File(solrHome, confPath); File testHomeConfDir = new File(TEST_HOME(), confPath); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-managed-schema.xml"), confDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), confDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-id-and-version-fields-only.xml"), confDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig-managed-schema.xml"), confDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), confDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-id-and-version-fields-only.xml"), confDir); // initCore will trigger an upgrade to managed schema, since the solrconfig has // System.setProperty("managed.schema.mutable", "true"); - initCore("solrconfig-managed-schema.xml", "schema-id-and-version-fields-only.xml", - solrHome.getPath()); + initCore( + "solrconfig-managed-schema.xml", + "schema-id-and-version-fields-only.xml", + solrHome.getPath()); } @AfterClass @@ -84,20 +87,21 @@ public void test() throws Exception { int numDocs = atLeast(1); for (int i = 1; i <= numDocs; i++) { // NOTE: starting at "1", we'll use id=0 later - assertU(adoc("id", ""+i)); + assertU(adoc("id", "" + i)); if (random().nextBoolean()) { assertU(commit()); } } - // with random merge policies, a regular commit can cause a segment to be flushed that can kick off a background merge - // that can cause a later commit to actually see changes and open a new searcher. This should not be possible with optimize + // with random merge policies, a regular commit can cause a segment to be flushed that can kick + // off a background merge that can cause a later commit to actually see changes and open a new + // searcher. This should not be possible with optimize assertU(optimize()); // seed a single query into the cache - assertQ(req("*:*"), "//*[@numFound='"+numDocs+"']"); + assertQ(req("*:*"), "//*[@numFound='" + numDocs + "']"); - final SolrQueryRequest baseReq = req("q","foo"); + final SolrQueryRequest baseReq = req("q", "foo"); try { // we make no index changes in this block, so the searcher should always be the same // NOTE: we *have* to call getSearcher() in advance, it's a delayed binding @@ -106,21 +110,21 @@ public void test() throws Exception { assertU(commit()); assertSearcherHasNotChanged(expectedSearcher); - assertU(commit("openSearcher","true")); + assertU(commit("openSearcher", "true")); assertSearcherHasNotChanged(expectedSearcher); - assertU(commit("softCommit","true")); + assertU(commit("softCommit", "true")); assertSearcherHasNotChanged(expectedSearcher); - assertU(commit("softCommit","true","openSearcher","true")); + assertU(commit("softCommit", "true", "openSearcher", "true")); assertSearcherHasNotChanged(expectedSearcher); assertU(delQ("id:match_no_documents")); - assertU(commit("softCommit","true","openSearcher","true")); + assertU(commit("softCommit", "true", "openSearcher", "true")); assertSearcherHasNotChanged(expectedSearcher); assertU(delI("0")); // no doc has this id, yet - assertU(commit("softCommit","true","openSearcher","true")); + assertU(commit("softCommit", "true", "openSearcher", "true")); assertSearcherHasNotChanged(expectedSearcher); } finally { @@ -130,7 +134,7 @@ public void test() throws Exception { // now do a variety of things that *should* always guarantee a new searcher SolrQueryRequest beforeReq; - beforeReq = req("q","foo"); + beforeReq = req("q", "foo"); try { // NOTE: we *have* to call getSearcher() in advance: delayed binding SolrIndexSearcher before = getMainSearcher(beforeReq); @@ -141,8 +145,8 @@ public void test() throws Exception { } finally { beforeReq.close(); } - - beforeReq = req("q","foo"); + + beforeReq = req("q", "foo"); try { // NOTE: we *have* to call getSearcher() in advance: delayed binding SolrIndexSearcher before = getMainSearcher(beforeReq); @@ -154,7 +158,7 @@ public void test() throws Exception { beforeReq.close(); } - beforeReq = req("q","foo"); + beforeReq = req("q", "foo"); try { // NOTE: we *have* to call getSearcher() in advance: delayed binding SolrIndexSearcher before = getMainSearcher(beforeReq); @@ -166,7 +170,7 @@ public void test() throws Exception { beforeReq.close(); } - beforeReq = req("q","foo"); + beforeReq = req("q", "foo"); try { // NOTE: we *have* to call getSearcher() in advance: delayed binding SolrIndexSearcher before = getMainSearcher(beforeReq); @@ -174,23 +178,23 @@ public void test() throws Exception { // create a new field & add it. assertTrue("schema not mutable", beforeReq.getSchema().isMutable()); ManagedIndexSchema oldSchema = (ManagedIndexSchema) beforeReq.getSchema(); - SchemaField newField = oldSchema.newField - ("hoss", "string", Collections.emptyMap()); + SchemaField newField = + oldSchema.newField("hoss", "string", Collections.emptyMap()); IndexSchema newSchema = oldSchema.addField(newField); h.getCore().setLatestSchema(newSchema); // sanity check, later asserts assume this - assertNotSame(oldSchema, newSchema); + assertNotSame(oldSchema, newSchema); // the schema has changed - but nothing has requested a new Searcher yet assertSearcherHasNotChanged(before); // only now should we get a new searcher... - assertU(commit("softCommit","true","openSearcher","true")); + assertU(commit("softCommit", "true", "openSearcher", "true")); assertSearcherHasChanged(before); // sanity that opening the new searcher was useful to get new schema... - SolrQueryRequest afterReq = req("q","foo"); + SolrQueryRequest afterReq = req("q", "foo"); try { assertSame(newSchema, afterReq.getSchema()); assertSame(newSchema, getMainSearcher(afterReq).getSchema()); @@ -201,32 +205,28 @@ public void test() throws Exception { } finally { beforeReq.close(); } - } - - /** - * Helper method to get the searcher from a request, and assert that it's the main searcher - */ + + /** Helper method to get the searcher from a request, and assert that it's the main searcher */ public static SolrIndexSearcher getMainSearcher(SolrQueryRequest req) { SolrIndexSearcher s = req.getSearcher(); assertMainSearcher(s); return s; } - /** - * Sanity check that we didn't get a realtime (non-caching) searcher - */ + /** Sanity check that we didn't get a realtime (non-caching) searcher */ public static void assertMainSearcher(SolrIndexSearcher s) { - assertTrue("Searcher isn't 'main': " + s.toString(), - // TODO brittle, better solution? - s.toString().contains(" main{")); + assertTrue( + "Searcher isn't 'main': " + s.toString(), + // TODO brittle, better solution? + s.toString().contains(" main{")); assertTrue("Searcher is non-caching", s.isCachingEnabled()); } - + /** - * Given an existing searcher, creates a new SolrRequest, and verifies that the - * searcher in that request is not the same as the previous searcher -- - * cleaningly closing the new SolrRequest either way. + * Given an existing searcher, creates a new SolrRequest, and verifies that the searcher in that + * request is not the same as the previous searcher -- cleaningly closing the new + * SolrRequest either way. */ public static void assertSearcherHasChanged(SolrIndexSearcher previous) { SolrQueryRequest req = req("*:*"); @@ -239,9 +239,9 @@ public static void assertSearcherHasChanged(SolrIndexSearcher previous) { } /** - * Given an existing searcher, creates a new SolrRequest, and verifies that the - * searcher in that request is the same as the expected searcher -- cleaningly - * closing the new SolrRequest either way. + * Given an existing searcher, creates a new SolrRequest, and verifies that the searcher in that + * request is the same as the expected searcher -- cleaningly closing the new SolrRequest either + * way. */ public static void assertSearcherHasNotChanged(SolrIndexSearcher expected) { SolrQueryRequest req = req("*:*"); @@ -252,5 +252,4 @@ public static void assertSearcherHasNotChanged(SolrIndexSearcher expected) { req.close(); } } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestSimpleQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestSimpleQParserPlugin.java index 0ce6179ec62..c2d138e6a4d 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSimpleQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestSimpleQParserPlugin.java @@ -24,16 +24,37 @@ public class TestSimpleQParserPlugin extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-simpleqpplugin.xml"); + initCore("solrconfig-basic.xml", "schema-simpleqpplugin.xml"); index(); } public static void index() throws Exception { - assertU(adoc("id", "42", "text0", "t0 t0 t0", "text1", "t0 t1 t2", "text-keyword0", "kw0 kw0 kw0")); - assertU(adoc("id", "43", "text0", "t0 t1 t2", "text1", "t3 t4 t5", "text-keyword0", "kw0 kw1 kw2")); - assertU(adoc("id", "44", "text0", "t0 t1 t1", "text1", "t6 t7 t8", "text-keyword0", "kw3 kw4 kw5")); - assertU(adoc("id", "45", "text0", "t0 t0 t1", "text1", "t9 t10 t11", "text-keyword0", "kw6 kw7 kw8")); - assertU(adoc("id", "46", "text0", "t1 t1 t1", "text1", "t12 t13 t14", "text-keyword0", "kw9 kw10 kw11")); + assertU( + adoc("id", "42", "text0", "t0 t0 t0", "text1", "t0 t1 t2", "text-keyword0", "kw0 kw0 kw0")); + assertU( + adoc("id", "43", "text0", "t0 t1 t2", "text1", "t3 t4 t5", "text-keyword0", "kw0 kw1 kw2")); + assertU( + adoc("id", "44", "text0", "t0 t1 t1", "text1", "t6 t7 t8", "text-keyword0", "kw3 kw4 kw5")); + assertU( + adoc( + "id", + "45", + "text0", + "t0 t0 t1", + "text1", + "t9 t10 t11", + "text-keyword0", + "kw6 kw7 kw8")); + assertU( + adoc( + "id", + "46", + "text0", + "t1 t1 t1", + "text1", + "t12 t13 t14", + "text-keyword0", + "kw9 kw10 kw11")); assertU(adoc("id", "47", "text0", "and", "text1", "+", "text-keyword0", "+")); assertU(adoc("id", "48", "text0", "not", "text1", "-", "text-keyword0", "-")); assertU(adoc("id", "49", "text0", "or", "text1", "|", "text-keyword0", "|")); @@ -52,13 +73,24 @@ public static void index() throws Exception { @Test public void testQueryFields() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0^2 text1 text-keyword0", "q", "t3"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0^3 text1^4 text-keyword0^0.55", "q", "t0"), "/response/numFound==4"); - assertJQ(req("defType", "simple", "qf", "text-keyword0^9.2", "q", "\"kw9 kw10 kw11\""), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text-keyword0", "q", "kw9 kw10 kw11"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text1 text-keyword0", "q", "kw9"), "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0^2 text1 text-keyword0", "q", "t3"), + "/response/numFound==1"); + assertJQ( + req("defType", "simple", "qf", "text0^3 text1^4 text-keyword0^0.55", "q", "t0"), + "/response/numFound==4"); + assertJQ( + req("defType", "simple", "qf", "text-keyword0^9.2", "q", "\"kw9 kw10 kw11\""), + "/response/numFound==1"); + assertJQ( + req("defType", "simple", "qf", "text-keyword0", "q", "kw9 kw10 kw11"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text1 text-keyword0", "q", "kw9"), "/response/numFound==0"); assertJQ(req("defType", "simple", "qf", "text0", "q", "t2"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0^1.1 text1^0.9", "q", "t2 t9 t12"), "/response/numFound==4"); + assertJQ( + req("defType", "simple", "qf", "text0^1.1 text1^0.9", "q", "t2 t9 t12"), + "/response/numFound==4"); } @Test @@ -67,144 +99,465 @@ public void testDefaultField() throws Exception { assertJQ(req("defType", "simple", "df", "text0", "q", "t3"), "/response/numFound==0"); assertJQ(req("defType", "simple", "df", "text1", "q", "t2 t9 t12"), "/response/numFound==3"); assertJQ(req("defType", "simple", "df", "text1", "q", "t3"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "df", "text-keyword0", "q", "\"kw9 kw10 kw11\""), "/response/numFound==1"); - assertJQ(req("defType", "simple", "df", "text-keyword0", "q", "kw9 kw10 kw11"), "/response/numFound==0"); + assertJQ( + req("defType", "simple", "df", "text-keyword0", "q", "\"kw9 kw10 kw11\""), + "/response/numFound==1"); + assertJQ( + req("defType", "simple", "df", "text-keyword0", "q", "kw9 kw10 kw11"), + "/response/numFound==0"); } @Test public void testQueryFieldPriority() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0^2 text1 text-keyword0", "df", "text0", "q", "t3"), "/response/numFound==1"); + assertJQ( + req("defType", "simple", "qf", "text0^2 text1 text-keyword0", "df", "text0", "q", "t3"), + "/response/numFound==1"); } @Test public void testOnlyAndOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "+", - "q.operators", "NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "-", - "q.operators", "NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "+", - "q.operators", "AND"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "-", - "q.operators", "AND"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "+", + "q.operators", + "NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "-", + "q.operators", + "NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "+", "q.operators", "AND"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "-", "q.operators", "AND"), + "/response/numFound==1"); } @Test public void testOnlyNotOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "-", - "q.operators", "AND, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", - "q.operators", "AND, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "-", - "q.operators", "NOT"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", - "q.operators", "NOT"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "-", + "q.operators", + "AND, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "|", + "q.operators", + "AND, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "-", "q.operators", "NOT"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", "q.operators", "NOT"), + "/response/numFound==1"); } @Test public void testOnlyOrOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", - "q.operators", "AND, NOT, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\"", - "q.operators", "AND, NOT, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", - "q.operators", "OR"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\"", - "q.operators", "OR"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "|", + "q.operators", + "AND, NOT, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\"", + "q.operators", + "AND, NOT, PHRASE, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", "q.operators", "OR"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\"", "q.operators", "OR"), + "/response/numFound==1"); } @Test public void testOnlyPhraseOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\"", - "q.operators", "AND, NOT, OR, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", - "q.operators", "AND, NOT, OR, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\"", - "q.operators", "PHRASE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "|", - "q.operators", "PHRASE"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\"", + "q.operators", + "AND, NOT, OR, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "|", + "q.operators", + "AND, NOT, OR, PREFIX, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\"", + "q.operators", + "PHRASE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "|", + "q.operators", + "PHRASE"), + "/response/numFound==1"); } @Test public void testOnlyPrefixOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "t*", - "q.operators", "AND, NOT, OR, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "(", - "q.operators", "AND, NOT, OR, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "t*", - "q.operators", "PREFIX"), "/response/numFound==6"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "(", - "q.operators", "PREFIX"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "t*", + "q.operators", + "AND, NOT, OR, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "(", + "q.operators", + "AND, NOT, OR, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "t*", + "q.operators", + "PREFIX"), + "/response/numFound==6"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "(", + "q.operators", + "PREFIX"), + "/response/numFound==1"); } @Test public void testOnlyPrecedenceOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "(", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "(", - "q.operators", "PRECEDENCE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "PRECEDENCE"), "/response/numFound==1"); - - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", ")", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", ")", - "q.operators", "PRECEDENCE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "PRECEDENCE"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "(", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "(", + "q.operators", + "PRECEDENCE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "PRECEDENCE"), + "/response/numFound==1"); + + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + ")", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, ESCAPE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + ")", + "q.operators", + "PRECEDENCE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "PRECEDENCE"), + "/response/numFound==1"); } @Test public void testOnlyEscapeOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, WHITESPACE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\n", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "ESCAPE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\n", - "q.operators", "ESCAPE"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, WHITESPACE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\n", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "ESCAPE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\n", + "q.operators", + "ESCAPE"), + "/response/numFound==1"); } @Test public void testOnlyWhitespaceOperatorEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\n", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\n", - "q.operators", "WHITESPACE"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "\\", - "q.operators", "WHITESPACE"), "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\n", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "AND, NOT, OR, PHRASE, PREFIX, PRECEDENCE, ESCAPE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\n", + "q.operators", + "WHITESPACE"), + "/response/numFound==0"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "\\", + "q.operators", + "WHITESPACE"), + "/response/numFound==1"); } @Test public void testArbitraryOperatorsEnabledDisabled() throws Exception { - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "kw0+kw1+kw2| \\ ", - "q.operators", "AND, NOT, OR, PHRASE"), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text0 text1 text-keyword0", "q", "t1 + t2 \\", - "q.operators", "AND, WHITESPACE"), "/response/numFound==3"); - assertJQ(req("defType", "simple", "qf", "text0 text-keyword0", "q", "t0 + (-t1 -t2) |", - "q.operators", "AND, NOT, PRECEDENCE, WHITESPACE"), "/response/numFound==4"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "kw0+kw1+kw2| \\ ", + "q.operators", + "AND, NOT, OR, PHRASE"), + "/response/numFound==1"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text1 text-keyword0", + "q", + "t1 + t2 \\", + "q.operators", + "AND, WHITESPACE"), + "/response/numFound==3"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text0 text-keyword0", + "q", + "t0 + (-t1 -t2) |", + "q.operators", + "AND, NOT, PRECEDENCE, WHITESPACE"), + "/response/numFound==4"); } @Test public void testNoOperators() throws Exception { - assertJQ(req("defType", "simple", "qf", "text1 text-keyword0", "q", "kw0 kw1 kw2", - "q.operators", ""), "/response/numFound==1"); - assertJQ(req("defType", "simple", "qf", "text1", "q", "t1 t2 t3", - "q.operators", ""), "/response/numFound==2"); + assertJQ( + req( + "defType", + "simple", + "qf", + "text1 text-keyword0", + "q", + "kw0 kw1 kw2", + "q.operators", + ""), + "/response/numFound==1"); + assertJQ( + req("defType", "simple", "qf", "text1", "q", "t1 t2 t3", "q.operators", ""), + "/response/numFound==2"); } @Test public void testDefaultOperator() throws Exception { - assertJQ(req("defType", "simple", "qf", "text1 text-keyword0", "q", "t2 t3", - "q.op", "AND"), "/response/numFound==0"); - assertJQ(req("defType", "simple", "qf", "text0 text-keyword0", "q", "t0 t2", - "q.op", "AND"), "/response/numFound==1"); + assertJQ( + req("defType", "simple", "qf", "text1 text-keyword0", "q", "t2 t3", "q.op", "AND"), + "/response/numFound==0"); + assertJQ( + req("defType", "simple", "qf", "text0 text-keyword0", "q", "t0 t2", "q.op", "AND"), + "/response/numFound==1"); assertJQ(req("defType", "simple", "qf", "text1", "q", "t2 t3"), "/response/numFound==2"); } @@ -226,7 +579,6 @@ public void testFuzzyChain() throws Exception { public void testQueryAnalyzerIsUsed() throws Exception { // this should only match one doc, which was lower cased before being added - assertJQ(req("defType", "simple", "qf", "text-query0", "q", "HELLO"), - "/response/numFound==1"); + assertJQ(req("defType", "simple", "qf", "text-query0", "q", "HELLO"), "/response/numFound==1"); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java b/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java index 3b2ebbd474e..4f92e76f555 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java +++ b/solr/core/src/test/org/apache/solr/search/TestSmileRequest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.io.InputStream; import java.util.Map; - import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseHS; import org.apache.solr.SolrTestCaseJ4; @@ -38,7 +37,7 @@ @SolrTestCaseJ4.SuppressSSL public class TestSmileRequest extends SolrTestCaseJ4 { - private static SolrTestCaseHS.SolrInstances servers; // for distributed testing + private static SolrTestCaseHS.SolrInstances servers; // for distributed testing @BeforeClass public static void beforeTests() throws Exception { @@ -67,28 +66,29 @@ public static void afterTests() throws Exception { public void testDistribJsonRequest() throws Exception { initServers(); SolrTestCaseHS.Client client = servers.getClient(random().nextInt()); - client.tester = new SolrTestCaseHS.Client.Tester() { - @Override - public void assertJQ(SolrClient client, SolrParams args, String... tests) throws Exception { - ((HttpSolrClient) client).setParser(SmileResponseParser.inst); - QueryRequest query = new QueryRequest(args); - String path = args.get("qt"); - if (path != null) { - query.setPath(path); - } - NamedList rsp = client.request(query); - @SuppressWarnings({"rawtypes"}) - Map m = rsp.asMap(5); - String jsonStr = Utils.toJSONString(m); - SolrTestCaseHS.matchJSON(jsonStr, tests); - } - }; + client.tester = + new SolrTestCaseHS.Client.Tester() { + @Override + public void assertJQ(SolrClient client, SolrParams args, String... tests) + throws Exception { + ((HttpSolrClient) client).setParser(SmileResponseParser.inst); + QueryRequest query = new QueryRequest(args); + String path = args.get("qt"); + if (path != null) { + query.setPath(path); + } + NamedList rsp = client.request(query); + @SuppressWarnings({"rawtypes"}) + Map m = rsp.asMap(5); + String jsonStr = Utils.toJSONString(m); + SolrTestCaseHS.matchJSON(jsonStr, tests); + } + }; client.queryDefaults().set("shards", servers.getShards()); TestJsonRequest.doJsonRequest(client, true); - } - //adding this to core adds the dependency on a few extra jars to our distribution. + // adding this to core adds the dependency on a few extra jars to our distribution. // So this is not added there public static class SmileResponseParser extends BinaryResponseParser { public static final SmileResponseParser inst = new SmileResponseParser(); @@ -106,9 +106,7 @@ public NamedList processResponse(InputStream body, String encoding) { return new NamedList(m); } catch (IOException e) { throw new RuntimeException(e); - } } - } } diff --git a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java index 6268cfadac2..5e3610defcd 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java @@ -16,10 +16,9 @@ */ package org.apache.solr.search; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import java.text.ParseException; import java.util.Arrays; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.core.SolrCore; @@ -37,8 +36,9 @@ import org.locationtech.spatial4j.shape.Rectangle; /** - * Test Solr 4's new spatial capabilities from the new Lucene spatial module. Don't thoroughly test it here because - * Lucene spatial has its own tests. Some of these tests were ported from Solr 3 spatial tests. + * Test Solr 4's new spatial capabilities from the new Lucene spatial module. Don't thoroughly test + * it here because Lucene spatial has its own tests. Some of these tests were ported from Solr 3 + * spatial tests. */ public class TestSolr4Spatial extends SolrTestCaseJ4 { @@ -52,9 +52,18 @@ public TestSolr4Spatial(String fieldName) { @ParametersFactory public static Iterable parameters() { - return Arrays.asList(new Object[][]{ - {"llp"}, {"llp_idx"}, {"llp_dv"}, {"srpt_geohash"}, {"srpt_quad"}, {"srpt_packedquad"}, {"bbox"}, {"pbbox"}, {"bbox_ndv"} - }); + return Arrays.asList( + new Object[][] { + {"llp"}, + {"llp_idx"}, + {"llp_dv"}, + {"srpt_geohash"}, + {"srpt_quad"}, + {"srpt_packedquad"}, + {"bbox"}, + {"pbbox"}, + {"bbox_ndv"} + }); } @BeforeClass @@ -72,20 +81,49 @@ public void setUp() throws Exception { @Test public void testBadShapeParse400() { - assertQEx(null, req( - "fl", "id," + fieldName, "q", "*:*", "rows", "1000", - "fq", "{!field f=" + fieldName + "}Intersects(NonexistentShape(89.9,-130 d=9))"), 400); - assertQEx(null, req( - "fl", "id," + fieldName, "q", "*:*", "rows", "1000", - "fq", "{!field f=" + fieldName + "}Intersects(NonexistentShape(89.9,-130 d=9"), 400);//missing parens - assertQEx(null, req( - "fl", "id," + fieldName, "q", "*:*", "rows", "1000", - "fq", "{!field f=" + fieldName + "}Intersectssss"), 400); + assertQEx( + null, + req( + "fl", + "id," + fieldName, + "q", + "*:*", + "rows", + "1000", + "fq", + "{!field f=" + fieldName + "}Intersects(NonexistentShape(89.9,-130 d=9))"), + 400); + assertQEx( + null, + req( + "fl", + "id," + fieldName, + "q", + "*:*", + "rows", + "1000", + "fq", + "{!field f=" + fieldName + "}Intersects(NonexistentShape(89.9,-130 d=9"), + 400); // missing parens + assertQEx( + null, + req( + "fl", + "id," + fieldName, + "q", + "*:*", + "rows", + "1000", + "fq", + "{!field f=" + fieldName + "}Intersectssss"), + 400); ignoreException("NonexistentShape"); - SolrException e = expectThrows(SolrException.class, "should throw exception on non existent shape", - () -> assertU(adoc("id", "-1", fieldName, "NonexistentShape")) - ); + SolrException e = + expectThrows( + SolrException.class, + "should throw exception on non existent shape", + () -> assertU(adoc("id", "-1", fieldName, "NonexistentShape"))); assertEquals(400, e.code()); unIgnoreException("NonexistentShape"); } @@ -107,33 +145,47 @@ private void setupDocs() { if (random().nextBoolean()) { assertU(commit()); } - assertU(adoc("id", "99"));//blank + assertU(adoc("id", "99")); // blank assertU(commit()); } @Test public void testIntersectFilter() throws Exception { setupDocs(); - - //Try some edge cases + + // Try some edge cases checkHits(fieldName, "1,1", 175, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 5, 6, 7); checkHits(fieldName, "0,179.8", 200, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 8, 9); - checkHits(fieldName, "89.8, 50", 200, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 10, 11);//this goes over the north pole - checkHits(fieldName, "-89.8, 50", 200, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 12, 13);//this goes over the south pole - //try some normal cases + checkHits( + fieldName, + "89.8, 50", + 200, + DistanceUtils.EARTH_MEAN_RADIUS_KM, + 2, + 10, + 11); // this goes over the north pole + checkHits( + fieldName, + "-89.8, 50", + 200, + DistanceUtils.EARTH_MEAN_RADIUS_KM, + 2, + 12, + 13); // this goes over the south pole + // try some normal cases checkHits(fieldName, "33.0,-80.0", 300, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2); - //large distance + // large distance checkHits(fieldName, "1,1", 5000, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 5, 6, 7); - //Because we are generating a box based on the west/east longitudes and the south/north latitudes, which then - //translates to a range query, which is slightly more inclusive. Thus, even though 0.0 is 15.725 kms away, - //it will be included, b/c of the box calculation. + // Because we are generating a box based on the west/east longitudes and the south/north + // latitudes, which then translates to a range query, which is slightly more inclusive. Thus, + // even though 0.0 is 15.725 kms away, it will be included, b/c of the box calculation. checkHits(fieldName, false, "0.1,0.1", 15, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 5, 6); - //try some more + // try some more clearIndex(); assertU(adoc("id", "14", fieldName, "0,5")); assertU(adoc("id", "15", fieldName, "0,15")); - //3000KM from 0,0, see http://www.movable-type.co.uk/scripts/latlong.html + // 3000KM from 0,0, see http://www.movable-type.co.uk/scripts/latlong.html assertU(adoc("id", "16", fieldName, "18.71111,19.79750")); assertU(adoc("id", "17", fieldName, "44.043900,-95.436643")); assertU(commit()); @@ -144,32 +196,41 @@ public void testIntersectFilter() throws Exception { checkHits(fieldName, "0,0", 3001, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 14, 15, 16); checkHits(fieldName, "0,0", 3000.1, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 14, 15, 16); - //really fine grained distance and reflects some of the vagaries of how we are calculating the box + // really fine grained distance and reflects some of the vagaries of how we are calculating the + // box checkHits(fieldName, "43.517030,-96.789603", 109, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0); - //falls outside of the real distance, but inside the bounding box - checkHits(fieldName, true, "43.517030,-96.789603", 110, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0); - checkHits(fieldName, false, "43.517030,-96.789603", 110, DistanceUtils.EARTH_MEAN_RADIUS_KM, 1, 17); + // falls outside of the real distance, but inside the bounding box + checkHits(fieldName, true, "43.517030,-96.789603", 110, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0); + checkHits( + fieldName, false, "43.517030,-96.789603", 110, DistanceUtils.EARTH_MEAN_RADIUS_KM, 1, 17); } @Test public void checkResultFormat() throws Exception { - //Check input and output format is the same - String IN = "89.9,-130";//lat,lon - String OUT = IN;//IDENTICAL! + // Check input and output format is the same + String IN = "89.9,-130"; // lat,lon + String OUT = IN; // IDENTICAL! assertU(adoc("id", "11", fieldName, IN)); assertU(commit()); - assertQ(req( - "fl", "id," + fieldName, "q", "*:*", "rows", "1000", - "fq", "{!bbox sfield=" + fieldName + " pt=" + IN + " d=9}"), + assertQ( + req( + "fl", + "id," + fieldName, + "q", + "*:*", + "rows", + "1000", + "fq", + "{!bbox sfield=" + fieldName + " pt=" + IN + " d=9}"), "//result/doc/*[@name='" + fieldName + "']//text()='" + OUT + "'"); } @Test public void checkQueryEmptyIndex() throws ParseException { - checkHits(fieldName, "0,0", 100, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0);//doesn't error + checkHits(fieldName, "0,0", 100, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0); // doesn't error } @Test @@ -177,84 +238,141 @@ public void testExistsQuery() { assertQ(req("q", fieldName + ":*", "fl", "id," + fieldName)); } - private void checkHits(String fieldName, String pt, double distKM, double sphereRadius, int count, int ... docIds) throws ParseException { + private void checkHits( + String fieldName, String pt, double distKM, double sphereRadius, int count, int... docIds) + throws ParseException { checkHits(fieldName, true, pt, distKM, sphereRadius, count, docIds); } private boolean isBBoxField(String fieldName) { - return fieldName.equalsIgnoreCase("bbox") + return fieldName.equalsIgnoreCase("bbox") || fieldName.equalsIgnoreCase("pbbox") - || fieldName.equalsIgnoreCase("bbox_ndv"); + || fieldName.equalsIgnoreCase("bbox_ndv"); } - - private void checkHits(String fieldName, boolean exact, String ptStr, double distKM, double sphereRadius, int count, int ... docIds) throws ParseException { + + private void checkHits( + String fieldName, + boolean exact, + String ptStr, + double distKM, + double sphereRadius, + int count, + int... docIds) + throws ParseException { if (exact && isBBoxField(fieldName)) { return; // bbox field only supports rectangular query } - String [] tests = new String[docIds != null && docIds.length > 0 ? docIds.length + 1 : 1]; - //test for presence of required ids first + String[] tests = new String[docIds != null && docIds.length > 0 ? docIds.length + 1 : 1]; + // test for presence of required ids first int i = 0; if (docIds != null && docIds.length > 0) { for (int docId : docIds) { tests[i++] = "//result/doc/*[@name='id'][.='" + docId + "']"; } } - //check total length last; maybe response includes ids it shouldn't. Nicer to check this last instead of first so - // that there may be a more specific detailed id to investigate. + // check total length last; maybe response includes ids it shouldn't. Nicer to check this last + // instead of first so that there may be a more specific detailed id to investigate. tests[i++] = "*[count(//doc)=" + count + "]"; - //Test using the Lucene spatial syntax + // Test using the Lucene spatial syntax { - //never actually need the score but lets test + // never actually need the score but lets test String score = randomScoreMode(); double distDEG = DistanceUtils.dist2Degrees(distKM, DistanceUtils.EARTH_MEAN_RADIUS_KM); Point point = SpatialUtils.parsePoint(ptStr, SpatialContext.GEO); - String circleStr = "BUFFER(POINT(" + point.getX()+" "+point.getY()+")," + distDEG + ")"; + String circleStr = "BUFFER(POINT(" + point.getX() + " " + point.getY() + ")," + distDEG + ")"; String shapeStr; if (exact) { shapeStr = circleStr; - } else {//bbox - //the GEO is an assumption + } else { // bbox + // the GEO is an assumption SpatialContext ctx = SpatialContext.GEO; Rectangle bbox = ctx.readShapeFromWkt(circleStr).getBoundingBox(); - shapeStr = "ENVELOPE(" + bbox.getMinX() + ", " + bbox.getMaxX() + - ", " + bbox.getMaxY() + ", " + bbox.getMinY() + ")"; + shapeStr = + "ENVELOPE(" + + bbox.getMinX() + + ", " + + bbox.getMaxX() + + ", " + + bbox.getMaxY() + + ", " + + bbox.getMinY() + + ")"; } - //FYI default distErrPct=0.025 works with the tests in this file - assertQ(req( - "fl", "id", "q","*:*", "rows", "1000", - "fq", "{!field f=" + fieldName + (score==null?"":" score="+score) - + "}Intersects(" + shapeStr + ")"), + // FYI default distErrPct=0.025 works with the tests in this file + assertQ( + req( + "fl", + "id", + "q", + "*:*", + "rows", + "1000", + "fq", + "{!field f=" + + fieldName + + (score == null ? "" : " score=" + score) + + "}Intersects(" + + shapeStr + + ")"), tests); } - //Test using geofilt + // Test using geofilt { - assertQ(req( - "fl", "id", "q", "*:*", "rows", "1000", - "fq", "{!" + (exact ? "geofilt" : "bbox") + " sfield=" + fieldName + " pt='" + ptStr + "' d=" + distKM + " sphere_radius=" + sphereRadius + "}"), + assertQ( + req( + "fl", + "id", + "q", + "*:*", + "rows", + "1000", + "fq", + "{!" + + (exact ? "geofilt" : "bbox") + + " sfield=" + + fieldName + + " pt='" + + ptStr + + "' d=" + + distKM + + " sphere_radius=" + + sphereRadius + + "}"), tests); } - } private String randomScoreMode() { - return canCalcDistance ? new String[]{null, "none","distance","recipDistance"}[random().nextInt(4)] : "none"; + return canCalcDistance + ? new String[] {null, "none", "distance", "recipDistance"}[random().nextInt(4)] + : "none"; } @Test public void testRangeSyntax() { setupDocs(); - //match docId 1 + // match docId 1 int docId = 1; int count = 1; - String score = randomScoreMode();//never actually need the score but lets test - assertQ(req( - "fl", "id", "q","*:*", "rows", "1000", // testing quotes in range too - "fq", "{! "+(score==null?"":" score="+score)+" df="+fieldName+"}[32,-80 TO \"33 , -79\"]"),//lower-left to upper-right - + String score = randomScoreMode(); // never actually need the score but lets test + assertQ( + req( + "fl", + "id", + "q", + "*:*", + "rows", + "1000", // testing quotes in range too + "fq", + "{! " + + (score == null ? "" : " score=" + score) + + " df=" + + fieldName + + "}[32,-80 TO \"33 , -79\"]"), // lower-left to upper-right "//result/doc/*[@name='id'][.='" + docId + "']", "*[count(//doc)=" + count + "]"); } @@ -268,129 +386,170 @@ public void testSort() throws Exception { assertU(commit()); // new segment } if (random().nextBoolean()) { - assertU(adoc("id", "999", fieldName, "70,70"));//far away from these queries; we filter it out + assertU( + adoc("id", "999", fieldName, "70,70")); // far away from these queries; we filter it out } else { assertU(adoc("id", "999")); // no data } assertU(commit()); - // geodist asc - assertJQ(req( - "q", radiusQuery(3, 4, 9, null, null), - "fl","id", - "sort","geodist() asc", - "sfield", fieldName, "pt", "3,4") - , 1e-3 - , "/response/docs/[0]/id=='100'" - , "/response/docs/[1]/id=='101'" - ); + assertJQ( + req( + "q", + radiusQuery(3, 4, 9, null, null), + "fl", + "id", + "sort", + "geodist() asc", + "sfield", + fieldName, + "pt", + "3,4"), + 1e-3, + "/response/docs/[0]/id=='100'", + "/response/docs/[1]/id=='101'"); // geodist desc (simply reverse the assertions) - assertJQ(req( - "q", radiusQuery(3, 4, 9, null, null), - "fl","id", - "sort","geodist() desc", // DESC - "sfield", fieldName, "pt", "3,4") - , 1e-3 - , "/response/docs/[0]/id=='101'" // FLIPPED - , "/response/docs/[1]/id=='100'" // FLIPPED - ); + assertJQ( + req( + "q", + radiusQuery(3, 4, 9, null, null), + "fl", + "id", + "sort", + "geodist() desc", // DESC + "sfield", + fieldName, + "pt", + "3,4"), + 1e-3, + "/response/docs/[0]/id=='101'" // FLIPPED + , + "/response/docs/[1]/id=='100'" // FLIPPED + ); // - // NOTE: the rest work via the score of the spatial query. Generally, you should use geodist() instead. + // NOTE: the rest work via the score of the spatial query. Generally, you should use geodist() + // instead. // - //test absence of score=distance means it doesn't score - assertJQ(req( - "q", radiusQuery(3, 4, 9, null, null), - "fl","id,score") - , 1e-9 - , "/response/docs/[0]/score==1.0" - , "/response/docs/[1]/score==1.0" - ); - - //score by distance - assertJQ(req( - "q", radiusQuery(3, 4, 9, "distance", null), - "fl","id,score", - "sort","score asc")//want ascending due to increasing distance - , 1e-3 - , "/response/docs/[0]/id=='100'" - , "/response/docs/[0]/score==2.827493" - , "/response/docs/[1]/id=='101'" - , "/response/docs/[1]/score==5.089807" - ); - //score by recipDistance - assertJQ(req( - "q", radiusQuery(3, 4, 9, "recipDistance", null), - "fl","id,score", - "sort","score desc")//want descending - , 1e-3 - , "/response/docs/[0]/id=='100'" - , "/response/docs/[0]/score==0.3099695" - , "/response/docs/[1]/id=='101'" - , "/response/docs/[1]/score==0.19970943" - ); - - //score by distance and don't filter - assertJQ(req( - //circle radius is small and shouldn't match either, but we disable filtering - "q", radiusQuery(3, 4, 0.000001, "distance", "false"), - "fl","id,score", - "sort","score asc")//want ascending due to increasing distance - , 1e-3 - , "/response/docs/[0]/id=='100'" - , "/response/docs/[0]/score==2.827493" - , "/response/docs/[1]/id=='101'" - , "/response/docs/[1]/score==5.089807" - ); - - //query again with the query point closer to #101, and check the new ordering - assertJQ(req( - "q", radiusQuery(4, 0, 9, "distance", null), - "fl","id,score", - "sort","score asc")//want ascending due to increasing distance - , 1e-4 - , "/response/docs/[0]/id=='101'" - , "/response/docs/[1]/id=='100'" - ); - - //use sort=query(...) - assertJQ(req( - "q","-id:999",//exclude that doc - "fl","id,score", - "sort","query($sortQuery) asc", //want ascending due to increasing distance - "sortQuery", radiusQuery(3, 4, 9, "distance", null)) - , 1e-4 - , "/response/docs/[0]/id=='100'" - , "/response/docs/[1]/id=='101'" ); - - //check reversed direction with query point closer to #101 - assertJQ(req( - "q","-id:999",//exclude that doc - "fl","id,score", - "sort","query($sortQuery) asc", //want ascending due to increasing distance - "sortQuery", radiusQuery(4, 0, 9, "distance", null)) - , 1e-4 - , "/response/docs/[0]/id=='101'" - , "/response/docs/[1]/id=='100'" ); + // test absence of score=distance means it doesn't score + assertJQ( + req("q", radiusQuery(3, 4, 9, null, null), "fl", "id,score"), + 1e-9, + "/response/docs/[0]/score==1.0", + "/response/docs/[1]/score==1.0"); + + // score by distance + assertJQ( + req( + "q", radiusQuery(3, 4, 9, "distance", null), + "fl", "id,score", + "sort", "score asc") // want ascending due to increasing distance + , + 1e-3, + "/response/docs/[0]/id=='100'", + "/response/docs/[0]/score==2.827493", + "/response/docs/[1]/id=='101'", + "/response/docs/[1]/score==5.089807"); + // score by recipDistance + assertJQ( + req( + "q", radiusQuery(3, 4, 9, "recipDistance", null), + "fl", "id,score", + "sort", "score desc") // want descending + , + 1e-3, + "/response/docs/[0]/id=='100'", + "/response/docs/[0]/score==0.3099695", + "/response/docs/[1]/id=='101'", + "/response/docs/[1]/score==0.19970943"); + + // score by distance and don't filter + assertJQ( + req( + // circle radius is small and shouldn't match either, but we disable filtering + "q", radiusQuery(3, 4, 0.000001, "distance", "false"), + "fl", "id,score", + "sort", "score asc") // want ascending due to increasing distance + , + 1e-3, + "/response/docs/[0]/id=='100'", + "/response/docs/[0]/score==2.827493", + "/response/docs/[1]/id=='101'", + "/response/docs/[1]/score==5.089807"); + + // query again with the query point closer to #101, and check the new ordering + assertJQ( + req( + "q", radiusQuery(4, 0, 9, "distance", null), + "fl", "id,score", + "sort", "score asc") // want ascending due to increasing distance + , + 1e-4, + "/response/docs/[0]/id=='101'", + "/response/docs/[1]/id=='100'"); + + // use sort=query(...) + assertJQ( + req( + "q", "-id:999", // exclude that doc + "fl", "id,score", + "sort", "query($sortQuery) asc", // want ascending due to increasing distance + "sortQuery", radiusQuery(3, 4, 9, "distance", null)), + 1e-4, + "/response/docs/[0]/id=='100'", + "/response/docs/[1]/id=='101'"); + + // check reversed direction with query point closer to #101 + assertJQ( + req( + "q", "-id:999", // exclude that doc + "fl", "id,score", + "sort", "query($sortQuery) asc", // want ascending due to increasing distance + "sortQuery", radiusQuery(4, 0, 9, "distance", null)), + 1e-4, + "/response/docs/[0]/id=='101'", + "/response/docs/[1]/id=='100'"); } private String radiusQuery(double lat, double lon, double dDEG, String score, String filter) { - //Choose between the Solr/Geofilt syntax, and the Lucene spatial module syntax + // Choose between the Solr/Geofilt syntax, and the Lucene spatial module syntax if (isBBoxField(fieldName) || random().nextBoolean()) { - //we cheat for bbox strategy which doesn't do radius, only rect. + // we cheat for bbox strategy which doesn't do radius, only rect. final String qparser = isBBoxField(fieldName) ? "bbox" : "geofilt"; - return "{!" + qparser + " " + - "sfield=" + fieldName + " " - + (score != null ? "score="+score : "") + " " - + (filter != null ? "filter="+filter : "") + " " - + "pt=" + lat + "," + lon + " d=" + (dDEG /* DistanceUtils.DEG_TO_KM*/) + "}"; + return "{!" + + qparser + + " " + + "sfield=" + + fieldName + + " " + + (score != null ? "score=" + score : "") + + " " + + (filter != null ? "filter=" + filter : "") + + " " + + "pt=" + + lat + + "," + + lon + + " d=" + + (dDEG /* DistanceUtils.DEG_TO_KM*/) + + "}"; } else { return "{! " - + (score != null ? "score="+score : "") + " " - + (filter != null ? "filter="+filter : "") + " " - + "}" + fieldName + ":\"Intersects(BUFFER(POINT(" + lon + " " + lat + ")," + dDEG + "))\""; + + (score != null ? "score=" + score : "") + + " " + + (filter != null ? "filter=" + filter : "") + + " " + + "}" + + fieldName + + ":\"Intersects(BUFFER(POINT(" + + lon + + " " + + lat + + ")," + + dDEG + + "))\""; } } @@ -399,44 +558,48 @@ public void testSortMultiVal() throws Exception { assumeTrue("dist sorting not supported on field " + fieldName, canCalcDistance); assumeFalse("Multivalue not supported for this field", isBBoxField(fieldName)); - assertU(adoc("id", "100", fieldName, "1,2"));//1 point - assertU(adoc("id", "101", fieldName, "4,-1", fieldName, "3,5"));//2 points, 2nd is pretty close to query point + assertU(adoc("id", "100", fieldName, "1,2")); // 1 point + assertU( + adoc( + "id", "101", fieldName, "4,-1", fieldName, + "3,5")); // 2 points, 2nd is pretty close to query point assertU(commit()); - assertJQ(req( - "q", radiusQuery(3, 4, 9, "distance", null), - "fl","id,score", - "sort","score asc")//want ascending due to increasing distance - , 1e-4 - , "/response/docs/[0]/id=='101'" - , "/response/docs/[0]/score==0.99862987"//dist to 3,5 - ); + assertJQ( + req( + "q", radiusQuery(3, 4, 9, "distance", null), + "fl", "id,score", + "sort", "score asc") // want ascending due to increasing distance + , + 1e-4, + "/response/docs/[0]/id=='101'", + "/response/docs/[0]/score==0.99862987" // dist to 3,5 + ); } @Test public void testBadScoreParam() throws Exception { - assertQEx("expect friendly error message", + assertQEx( + "expect friendly error message", "none", req(radiusQuery(0, 0, 0, "bogus", "false")), SolrException.ErrorCode.BAD_REQUEST); } - @Test public void testSpatialConfig() throws Exception { - try (SolrCore core = h.getCoreInc()) { + try (SolrCore core = h.getCoreInc()) { IndexSchema schema = core.getLatestSchema(); // BBox Config // Make sure the subfields are not stored - SchemaField sub = schema.getField("bbox"+BBoxStrategy.SUFFIX_MINX); + SchemaField sub = schema.getField("bbox" + BBoxStrategy.SUFFIX_MINX); assertFalse(sub.stored()); // Make sure solr field type is also not stored - BBoxField bbox = (BBoxField)schema.getField("bbox").getType(); + BBoxField bbox = (BBoxField) schema.getField("bbox").getType(); BBoxStrategy strategy = bbox.getStrategy("bbox"); assertFalse(strategy.getFieldType().stored()); } } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java index 807bac20031..b2474d72071 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial2.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import com.carrotsearch.randomizedtesting.annotations.Repeat; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.ArrayList; @@ -23,8 +24,6 @@ import java.util.Collections; import java.util.List; import java.util.stream.Collectors; - -import com.carrotsearch.randomizedtesting.annotations.Repeat; import org.apache.lucene.geo.GeoTestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -45,7 +44,7 @@ import org.locationtech.spatial4j.distance.DistanceUtils; import org.locationtech.spatial4j.shape.Point; -//Unlike TestSolr4Spatial, not parametrized / not generic. +// Unlike TestSolr4Spatial, not parametrized / not generic. public class TestSolr4Spatial2 extends SolrTestCaseJ4 { @BeforeClass @@ -71,58 +70,86 @@ public void testQuadTreeRobustness() { @Test public void testBBox() throws Exception { String fieldName = random().nextBoolean() ? "bbox" : "bboxD_dynamic"; - assertU(adoc("id", "0"));//nothing + assertU(adoc("id", "0")); // nothing assertU(adoc("id", "1", fieldName, "ENVELOPE(-10, 20, 15, 10)")); - assertU(adoc("id", "2", fieldName, "ENVELOPE(22, 22, 10, 10)"));//pt + assertU(adoc("id", "2", fieldName, "ENVELOPE(22, 22, 10, 10)")); // pt assertU(commit()); - assertJQ(req("q", "{!field f="+fieldName+" filter=false score=overlapRatio " + - "queryTargetProportion=0.25}" + - "Intersects(ENVELOPE(10,25,12,10))", - "fl", "*,score", - "debug", "results"),//explain info + assertJQ( + req( + "q", + "{!field f=" + + fieldName + + " filter=false score=overlapRatio " + + "queryTargetProportion=0.25}" + + "Intersects(ENVELOPE(10,25,12,10))", + "fl", + "*,score", + "debug", + "results"), // explain info "/response/docs/[0]/id=='2'", "/response/docs/[0]/score==0.75]", "/response/docs/[1]/id=='1'", "/response/docs/[1]/score==0.26666668]", "/response/docs/[2]/id=='0'", "/response/docs/[2]/score==0.0", - - "/response/docs/[1]/" + fieldName + "=='ENVELOPE(-10, 20, 15, 10)'"//stored value + "/response/docs/[1]/" + fieldName + "=='ENVELOPE(-10, 20, 15, 10)'" // stored value ); - //minSideLength with point query - assertJQ(req("q", "{!field f="+fieldName+" filter=false score=overlapRatio " + - "queryTargetProportion=0.5 minSideLength=1}" + - "Intersects(ENVELOPE(0,0,12,12))",//pt - "fl", "id,score", - "debug", "results"),//explain info + // minSideLength with point query + assertJQ( + req( + "q", + "{!field f=" + + fieldName + + " filter=false score=overlapRatio " + + "queryTargetProportion=0.5 minSideLength=1}" + + "Intersects(ENVELOPE(0,0,12,12))", // pt + "fl", + "id,score", + "debug", + "results"), // explain info "/response/docs/[0]/id=='1'", - "/response/docs/[0]/score==0.50333333]"//just over 0.5 - ); - - //area2D - assertJQ(req("q", "{!field f=" + fieldName + " filter=false score=area2D}" + - "Intersects(ENVELOPE(0,0,12,12))",//pt - "fl", "id,score", - "debug", "results"),//explain info + "/response/docs/[0]/score==0.50333333]" // just over 0.5 + ); + + // area2D + assertJQ( + req( + "q", + "{!field f=" + + fieldName + + " filter=false score=area2D}" + + "Intersects(ENVELOPE(0,0,12,12))", // pt + "fl", + "id,score", + "debug", + "results"), // explain info "/response/docs/[0]/id=='1'", - "/response/docs/[0]/score==" + (30f * 5f) + "]"//150 - ); - //area (not 2D) - assertJQ(req("q", "{!field f=" + fieldName + " filter=false score=area}" + - "Intersects(ENVELOPE(0,0,12,12))",//pt - "fl", "id,score", - "debug", "results"),//explain info + "/response/docs/[0]/score==" + (30f * 5f) + "]" // 150 + ); + // area (not 2D) + assertJQ( + req( + "q", + "{!field f=" + + fieldName + + " filter=false score=area}" + + "Intersects(ENVELOPE(0,0,12,12))", // pt + "fl", + "id,score", + "debug", + "results"), // explain info "/response/docs/[0]/id=='1'", - "/response/docs/[0]/score==" + 146.39793f + "]"//a bit less than 150 - ); + "/response/docs/[0]/score==" + 146.39793f + "]" // a bit less than 150 + ); } @Test public void testBadScoreParam() throws Exception { String fieldName = "bbox"; - assertQEx("expect friendly error message", + assertQEx( + "expect friendly error message", "area2D", req("{!field f=" + fieldName + " filter=false score=bogus}Intersects(ENVELOPE(0,0,12,12))"), SolrException.ErrorCode.BAD_REQUEST); @@ -130,7 +157,7 @@ public void testBadScoreParam() throws Exception { @Test public void testRptWithGeometryField() throws Exception { - //note: fails with "srpt_geohash" because it's not as precise + // note: fails with "srpt_geohash" because it's not as precise final boolean testCache = true; final boolean testHeatmap = true; final boolean testPolygon = false; // default spatialContext doesn't handle this @@ -153,11 +180,13 @@ public void testRptWithGeometryGeo3dS2Field() throws Exception { testRptWithGeometryField("srptgeom_s2_geo3d", testCache, testHeatmap, testPolygon); } - @Test @Repeat(iterations = 10) + @Test + @Repeat(iterations = 10) public void testLLPDecodeIsStableAndPrecise() throws Exception { - // test that LatLonPointSpatialField decode of docValue will round-trip (re-index then re-decode) to the same value + // test that LatLonPointSpatialField decode of docValue will round-trip (re-index then + // re-decode) to the same value @SuppressWarnings({"resource", "IOResourceOpenedButNotSafelyClosed"}) - SolrClient client = new EmbeddedSolrServer(h.getCore());// do NOT close it; it will close Solr + SolrClient client = new EmbeddedSolrServer(h.getCore()); // do NOT close it; it will close Solr final String fld = "llp_1_dv_dvasst"; String ptOrig = GeoTestUtil.nextLatitude() + "," + GeoTestUtil.nextLongitude(); @@ -175,23 +204,35 @@ public void testLLPDecodeIsStableAndPrecise() throws Exception { // test that the representation is pretty accurate final Point ptOrigObj = SpatialUtils.parsePoint(ptOrig, SpatialContext.GEO); final Point ptDecodedObj = SpatialUtils.parsePoint(ptDecoded1, SpatialContext.GEO); - double deltaCentimeters = SpatialContext.GEO.calcDistance(ptOrigObj, ptDecodedObj) * DistanceUtils.DEG_TO_KM * 1000.0 * 100.0; - //See javadocs of LatLonDocValuesField for these constants - final Point absErrorPt = SpatialContext.GEO.getShapeFactory().pointXY(8.381903171539307E-8, 4.190951585769653E-8); - double deltaCentimetersMax - = SpatialContext.GEO.calcDistance(absErrorPt, 0,0) * DistanceUtils.DEG_TO_KM * 1000.0 * 100.0; - assertEquals(1.0420371840922256, deltaCentimetersMax, 0.0);// just so that we see it in black & white in the test - - //max found by trial & error. If we used 8 decimal places then we could get down to 1.04cm accuracy but then we - // lose the ability to round-trip -- 40 would become 39.99999997 (ugh). + double deltaCentimeters = + SpatialContext.GEO.calcDistance(ptOrigObj, ptDecodedObj) + * DistanceUtils.DEG_TO_KM + * 1000.0 + * 100.0; + // See javadocs of LatLonDocValuesField for these constants + final Point absErrorPt = + SpatialContext.GEO.getShapeFactory().pointXY(8.381903171539307E-8, 4.190951585769653E-8); + double deltaCentimetersMax = + SpatialContext.GEO.calcDistance(absErrorPt, 0, 0) + * DistanceUtils.DEG_TO_KM + * 1000.0 + * 100.0; + assertEquals( + 1.0420371840922256, + deltaCentimetersMax, + 0.0); // just so that we see it in black & white in the test + + // max found by trial & error. If we used 8 decimal places then we could get down to 1.04cm + // accuracy but then we lose the ability to round-trip -- 40 would become 39.99999997 (ugh). assertTrue("deltaCm too high: " + deltaCentimeters, deltaCentimeters < 1.41); // Pt(x=105.29894270124083,y=-0.4371673760042398) to Pt(x=105.2989428,y=-0.4371673) is 1.38568 } @Test public void testLatLonRetrieval() throws Exception { - final String ptHighPrecision = "40.2996543270,-74.0824956673"; - final String ptLossOfPrecision = "40.2996544,-74.0824957"; // rounded version of the one above, losing precision + final String ptHighPrecision = "40.2996543270,-74.0824956673"; + final String ptLossOfPrecision = + "40.2996544,-74.0824957"; // rounded version of the one above, losing precision // "_1" is single, "_N" is multiValued // "_dv" is docValues (otherwise not), "_dvasst" is useDocValuesAsStored (otherwise not) @@ -199,28 +240,37 @@ public void testLatLonRetrieval() throws Exception { // a random point using the number of decimal places we support for round-tripping. String randPointStr = - new BigDecimal(GeoTestUtil.nextLatitude()).setScale(7, RoundingMode.HALF_UP).stripTrailingZeros().toPlainString() + - "," + new BigDecimal(GeoTestUtil.nextLongitude()).setScale(7, RoundingMode.HALF_UP).stripTrailingZeros().toPlainString(); - - List combos = Arrays.asList( - new RetrievalCombo("llp_1_dv_st", ptHighPrecision), - new RetrievalCombo("llp_N_dv_st", Arrays.asList("-40,40", "-45,45")), - new RetrievalCombo("llp_N_dv_st", Arrays.asList("-40,40")), // multiValued but 1 value - - new RetrievalCombo("llp_1_dv_dvasst", ptHighPrecision, ptLossOfPrecision), - // this one comes back in a different order since it gets sorted low to high - new RetrievalCombo("llp_N_dv_dvasst", Arrays.asList("-40,40", "-45,45"), Arrays.asList("-45,45", "-40,40")), - new RetrievalCombo("llp_N_dv_dvasst", Arrays.asList(randPointStr)), // multiValued but 1 value - // edge cases. (note we sorted it as Lucene will internally) - new RetrievalCombo("llp_N_dv_dvasst", Arrays.asList( - "-90,180", "-90,-180", - "0,0", "0,180", "0,-180", - "90,0", "90,180", "90,-180")), - - new RetrievalCombo("llp_1_dv", ptHighPrecision, ptLossOfPrecision), - new RetrievalCombo("llp_N_dv", Arrays.asList("-45,45", "-40,40")) + new BigDecimal(GeoTestUtil.nextLatitude()) + .setScale(7, RoundingMode.HALF_UP) + .stripTrailingZeros() + .toPlainString() + + "," + + new BigDecimal(GeoTestUtil.nextLongitude()) + .setScale(7, RoundingMode.HALF_UP) + .stripTrailingZeros() + .toPlainString(); + + List combos = + Arrays.asList( + new RetrievalCombo("llp_1_dv_st", ptHighPrecision), + new RetrievalCombo("llp_N_dv_st", Arrays.asList("-40,40", "-45,45")), + new RetrievalCombo("llp_N_dv_st", Arrays.asList("-40,40")), // multiValued but 1 value + new RetrievalCombo("llp_1_dv_dvasst", ptHighPrecision, ptLossOfPrecision), + // this one comes back in a different order since it gets sorted low to high + new RetrievalCombo( + "llp_N_dv_dvasst", + Arrays.asList("-40,40", "-45,45"), + Arrays.asList("-45,45", "-40,40")), + new RetrievalCombo( + "llp_N_dv_dvasst", Arrays.asList(randPointStr)), // multiValued but 1 value + // edge cases. (note we sorted it as Lucene will internally) + new RetrievalCombo( + "llp_N_dv_dvasst", + Arrays.asList( + "-90,180", "-90,-180", "0,0", "0,180", "0,-180", "90,0", "90,180", "90,-180")), + new RetrievalCombo("llp_1_dv", ptHighPrecision, ptLossOfPrecision), + new RetrievalCombo("llp_N_dv", Arrays.asList("-45,45", "-40,40"))); - ); Collections.shuffle(combos, random()); // add and commit @@ -241,19 +291,29 @@ public void testLatLonRetrieval() throws Exception { List assertJQsFlListed = new ArrayList<>(); List assertJQsFlStar = new ArrayList<>(); for (RetrievalCombo combo : combos) { - String expect = "response/docs/[" + combo.id + "]/" + combo.fieldName + "==" + combo.expectReturnJSON; + String expect = + "response/docs/[" + combo.id + "]/" + combo.fieldName + "==" + combo.expectReturnJSON; assertJQsFlListed.add(expect); if (combo.fieldName.endsWith("_dv")) { - expect = "response/docs/[" + combo.id + "]=={'id':'" + combo.id + "'}"; // only the id, nothing else + expect = + "response/docs/[" + + combo.id + + "]=={'id':'" + + combo.id + + "'}"; // only the id, nothing else } assertJQsFlStar.add(expect); } // check - assertJQ(req("q","*:*", "sort", "id asc", - "fl","*"), - assertJQsFlStar.toArray(new String[0])); - assertJQ(req("q","*:*", "sort", "id asc", - "fl", "id," + combos.stream().map(c -> c.fieldName).collect(Collectors.joining(","))), + assertJQ(req("q", "*:*", "sort", "id asc", "fl", "*"), assertJQsFlStar.toArray(new String[0])); + assertJQ( + req( + "q", + "*:*", + "sort", + "id asc", + "fl", + "id," + combos.stream().map(c -> c.fieldName).collect(Collectors.joining(","))), assertJQsFlListed.toArray(new String[0])); } @@ -262,15 +322,22 @@ private static class RetrievalCombo { final int id = idCounter++; final String fieldName; final List indexValues; - final String expectReturnJSON; //or null if not expected in response + final String expectReturnJSON; // or null if not expected in response + + RetrievalCombo(String fieldName, List indexValues) { + this(fieldName, indexValues, indexValues); + } - RetrievalCombo(String fieldName, List indexValues) { this(fieldName, indexValues, indexValues);} RetrievalCombo(String fieldName, List indexValues, List returnValues) { this.fieldName = fieldName; this.indexValues = indexValues; this.expectReturnJSON = returnValues.stream().collect(Collectors.joining("', '", "['", "']")); } - RetrievalCombo(String fieldName, String indexValue) { this(fieldName, indexValue, indexValue); } + + RetrievalCombo(String fieldName, String indexValue) { + this(fieldName, indexValue, indexValue); + } + RetrievalCombo(String fieldName, String indexValue, String returnValue) { this.fieldName = fieldName; this.indexValues = Collections.singletonList(indexValue); @@ -278,23 +345,39 @@ private static class RetrievalCombo { } } - private void testRptWithGeometryField(String fieldName, boolean testCache, boolean testHeatmap, boolean testPolygon) throws Exception { + private void testRptWithGeometryField( + String fieldName, boolean testCache, boolean testHeatmap, boolean testPolygon) + throws Exception { assertU(adoc("id", "0", fieldName, "ENVELOPE(-10, 20, 15, 10)")); - assertU(adoc("id", "1", fieldName, "BUFFER(POINT(-10 15), 5)"));//circle at top-left corner - assertU(optimize("maxSegments", "1"));// one segment. + assertU(adoc("id", "1", fieldName, "BUFFER(POINT(-10 15), 5)")); // circle at top-left corner + assertU(optimize("maxSegments", "1")); // one segment. assertU(commit()); - // Search to the edge but not quite touching the indexed envelope of id=0. It requires geom validation to - // eliminate id=0. id=1 is found and doesn't require validation. cache=false means no query cache. - final SolrQueryRequest sameReq = req( - "q", "{!cache=false field f=" + fieldName + "}Intersects(ENVELOPE(-20, -10.0001, 30, 15.0001))", - "sort", "id asc"); + // Search to the edge but not quite touching the indexed envelope of id=0. It requires geom + // validation to eliminate id=0. id=1 is found and doesn't require validation. cache=false + // means no query cache. + final SolrQueryRequest sameReq = + req( + "q", + "{!cache=false field f=" + + fieldName + + "}Intersects(ENVELOPE(-20, -10.0001, 30, 15.0001))", + "sort", + "id asc"); assertJQ(sameReq, "/response/numFound==1", "/response/docs/[0]/id=='1'"); if (testCache) { // The tricky thing is verifying the cache works correctly... - MetricsMap cacheMetrics = (MetricsMap) ((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.perSegSpatialFieldCache_" + fieldName)).getGauge(); + MetricsMap cacheMetrics = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.perSegSpatialFieldCache_" + fieldName)) + .getGauge(); assertEquals("1", cacheMetrics.getValue().get("cumulative_inserts").toString()); assertEquals("0", cacheMetrics.getValue().get("cumulative_hits").toString()); @@ -302,8 +385,7 @@ private void testRptWithGeometryField(String fieldName, boolean testCache, boole assertJQ(sameReq, "/response/numFound==1", "/response/docs/[0]/id=='1'"); assertEquals("1", cacheMetrics.getValue().get("cumulative_hits").toString()); - assertEquals("1 segment", - 1, getSearcher().getRawReader().leaves().size()); + assertEquals("1 segment", 1, getSearcher().getRawReader().leaves().size()); // Get key of first leaf reader -- this one contains the match for sure. Object leafKey1 = getFirstLeafReaderKey(); @@ -315,37 +397,62 @@ private void testRptWithGeometryField(String fieldName, boolean testCache, boole // can still find the same document assertJQ(sameReq, "/response/numFound==1", "/response/docs/[0]/id=='1'"); - // When there are new segments, we accumulate another hit. This tests the cache was not blown away on commit. - // (i.e. the cache instance is new but it should've been regenerated from the old one). - // Checking equality for the first reader's cache key indicates whether the cache should still be valid. + // When there are new segments, we accumulate another hit. This tests the cache was not blown + // away on commit. (i.e. the cache instance is new but it should've been regenerated from the + // old one). Checking equality for the first reader's cache key indicates whether the cache + // should still be valid. Object leafKey2 = getFirstLeafReaderKey(); - // get the current instance of metrics - the old one may not represent the current cache instance - cacheMetrics = (MetricsMap) ((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.perSegSpatialFieldCache_" + fieldName)).getGauge(); - assertEquals(leafKey1.equals(leafKey2) ? "2" : "1", cacheMetrics.getValue().get("cumulative_hits").toString()); + // get the current instance of metrics - the old one may not represent the current cache + // instance + cacheMetrics = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.perSegSpatialFieldCache_" + fieldName)) + .getGauge(); + assertEquals( + leafKey1.equals(leafKey2) ? "2" : "1", + cacheMetrics.getValue().get("cumulative_hits").toString()); } if (testHeatmap) { // Now try to see if heatmaps work: - assertJQ(req("q", "*:*", "facet", "true", FacetParams.FACET_HEATMAP, fieldName, "json.nl", "map"), + assertJQ( + req("q", "*:*", "facet", "true", FacetParams.FACET_HEATMAP, fieldName, "json.nl", "map"), "/facet_counts/facet_heatmaps/" + fieldName + "/minX==-180.0"); } if (testPolygon) { - String polygonWKT = "POLYGON((-11 12, -11 11, 10.5 12, -11 12))"; //right-angle triangle. Counter-clockwise order - assertJQ(req( - "q", "{!cache=false field f=" + fieldName + "}Intersects(" + polygonWKT + ")", - "sort", "id asc"), "/response/numFound==2"); - - assertU(adoc("id", "9", - fieldName, "POLYGON((" + // rectangle. Counter-clockwise order. - "-118.080201721669 54.5864541583249," + - "-118.080078279314 54.5864541583249," + - "-118.080078279314 54.5865258517606," + - "-118.080201721669 54.5865258517606," + - "-118.080201721669 54.5864541583249))" )); + String polygonWKT = + "POLYGON((-11 12, -11 11, 10.5 12, -11 12))"; // right-angle triangle. Counter-clockwise + // order + assertJQ( + req( + "q", + "{!cache=false field f=" + fieldName + "}Intersects(" + polygonWKT + ")", + "sort", + "id asc"), + "/response/numFound==2"); + + assertU( + adoc( + "id", + "9", + fieldName, + "POLYGON((" + + // rectangle. Counter-clockwise order. + "-118.080201721669 54.5864541583249," + + "-118.080078279314 54.5864541583249," + + "-118.080078279314 54.5865258517606," + + "-118.080201721669 54.5865258517606," + + "-118.080201721669 54.5864541583249))")); assertU(commit()); // should NOT match - assertJQ(req("q", fieldName+":[55.0260828,-115.5085624 TO 55.02646,-115.507337]"), + assertJQ( + req("q", fieldName + ":[55.0260828,-115.5085624 TO 55.02646,-115.507337]"), "/response/numFound==0"); } } @@ -355,12 +462,11 @@ protected SolrIndexSearcher getSearcher() { return (SolrIndexSearcher) h.getCore().getInfoRegistry().get("searcher"); } - protected Object getFirstLeafReaderKey() { return getSearcher().getRawReader().leaves().get(0).reader().getCoreCacheHelper().getKey(); } - @Test// SOLR-8541 + @Test // SOLR-8541 public void testConstantScoreQueryWithFilterPartOnly() { final String[] doc1 = {"id", "1", "srptgeom", "56.9485,24.0980"}; assertU(adoc(doc1)); @@ -374,21 +480,23 @@ public void testConstantScoreQueryWithFilterPartOnly() { } @Test // SOLR-14802 - public void testGeodistSortPossibleWithLatLonPointSpatialFieldOrSpatialRecursivePrefixTreeField() throws Exception { + public void testGeodistSortPossibleWithLatLonPointSpatialFieldOrSpatialRecursivePrefixTreeField() + throws Exception { assertU(adoc("id", "1", "llp", "53.4721936,-2.24703", "srpt_quad", "53.425272,-2.322356")); assertU(commit()); - assertJQ(req( + assertJQ( + req( "q", "*:*", "fq", "{!geofilt}", "d", "50", "pt", "53.4721936,-2.24703", "sfield", "srpt_quad", - "sort", "min(geodist(),geodist(llp,53.4721936,-2.24703)) asc" - ), - "/response/docs/[0]/id=='1'"); + "sort", "min(geodist(),geodist(llp,53.4721936,-2.24703)) asc"), + "/response/docs/[0]/id=='1'"); - assertJQ(req( + assertJQ( + req( "q", "*:*", "fq", "{!geofilt}", "d", "50", @@ -396,53 +504,55 @@ public void testGeodistSortPossibleWithLatLonPointSpatialFieldOrSpatialRecursive "sfield", "srpt_quad", "sort", "min(geodist(),geodist(53.4721936,-2.24703,llp)) asc" // moved llp to the end ), - "/response/docs/[0]/id=='1'"); + "/response/docs/[0]/id=='1'"); - assertJQ(req( + assertJQ( + req( "q", "*:*", "fq", "{!geofilt}", "d", "50", "pt", "53.4721936,-2.24703", "sfield", "llp", // trying another field type - "sort", "min(geodist(),geodist(53.4721936,-2.24703,srpt_quad)) asc" - ), - "/response/docs/[0]/id=='1'"); + "sort", "min(geodist(),geodist(53.4721936,-2.24703,srpt_quad)) asc"), + "/response/docs/[0]/id=='1'"); } @Test // SOLR-14802 - public void testGeodistSortOrderCorrectWithLatLonPointSpatialFieldAndSpatialRecursivePrefixTreeField() throws Exception { + public void + testGeodistSortOrderCorrectWithLatLonPointSpatialFieldAndSpatialRecursivePrefixTreeField() + throws Exception { assertU(adoc("id", "1", "llp", "53.4721936,-2.24703", "srpt_quad", "53.4721936,-2.24703")); assertU(adoc("id", "2", "llp", "53.425272,-2.322356", "srpt_quad", "55.4721936,-2.24703")); assertU(commit()); - assertJQ(req( + assertJQ( + req( "q", "*:*", "fq", "{!geofilt}", "d", "50", "pt", "53.431669,-2.318720", "sfield", "srpt_quad", - "sort", "min(geodist(),geodist(llp,53.431669,-2.318720)) asc" - ), - "/response/docs/[0]/id=='2'"); + "sort", "min(geodist(),geodist(llp,53.431669,-2.318720)) asc"), + "/response/docs/[0]/id=='2'"); - assertJQ(req( + assertJQ( + req( "q", "*:*", "fq", "{!geofilt}", "d", "50", "pt", "53.4721936,-2.24703", "sfield", "srpt_quad", - "sort", "min(geodist(),geodist(53.4721936,-2.24703,llp)) asc" - ), - "/response/docs/[0]/id=='1'"); + "sort", "min(geodist(),geodist(53.4721936,-2.24703,llp)) asc"), + "/response/docs/[0]/id=='1'"); - assertJQ(req( + assertJQ( + req( "q", "*:*", "fq", "{!geofilt}", "d", "50", "pt", "55.4721936,-2.24703", "sfield", "srpt_quad", - "sort", "min(geodist(),geodist(55.4721936,-2.24703,llp)) asc" - ), - "/response/docs/[0]/id=='2'"); + "sort", "min(geodist(),geodist(55.4721936,-2.24703,llp)) asc"), + "/response/docs/[0]/id=='2'"); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java b/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java index 63111d1f401..c15a48c23fb 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolrCachePerf.java @@ -25,7 +25,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; - import org.apache.commons.math3.stat.descriptive.SummaryStatistics; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; @@ -35,16 +34,12 @@ import org.junit.Test; import org.junit.runners.model.MultipleFailureException; -/** - * - */ +/** */ @LuceneTestCase.Slow public class TestSolrCachePerf extends SolrTestCaseJ4 { @SuppressWarnings({"unchecked", "rawtypes"}) - private static final Class[] IMPLS = new Class[] { - CaffeineCache.class - }; + private static final Class[] IMPLS = new Class[] {CaffeineCache.class}; private final int NUM_KEYS = 5000; private final String[] keys = new String[NUM_KEYS]; @@ -72,13 +67,19 @@ public void testGetPutCompute() throws Exception { doTestGetPutCompute(getPutRatio, getPutTime, threads, false); doTestGetPutCompute(computeRatio, computeTime, threads, true); } - computeRatio.forEach((type, computeStats) -> { - SummaryStatistics getPutStats = getPutRatio.get(type); - assertGreaterThanOrEqual( "Compute ratio should be higher or equal to get/put ratio", computeStats.getMean(), getPutStats.getMean(), 0.0001); - }); + computeRatio.forEach( + (type, computeStats) -> { + SummaryStatistics getPutStats = getPutRatio.get(type); + assertGreaterThanOrEqual( + "Compute ratio should be higher or equal to get/put ratio", + computeStats.getMean(), + getPutStats.getMean(), + 0.0001); + }); } - private void assertGreaterThanOrEqual(String message, double greater, double smaller, double delta) { + private void assertGreaterThanOrEqual( + String message, double greater, double smaller, double delta) { if (greater > smaller) { return; } else { @@ -91,7 +92,12 @@ private void assertGreaterThanOrEqual(String message, double greater, double sma static final String VALUE = "foo"; @SuppressWarnings({"rawtypes"}) - private void doTestGetPutCompute(Map ratioStats, Map timeStats, int numThreads, boolean useCompute) throws Exception { + private void doTestGetPutCompute( + Map ratioStats, + Map timeStats, + int numThreads, + boolean useCompute) + throws Exception { for (Class clazz : IMPLS) { SolrMetricManager metricManager = new SolrMetricManager(); @SuppressWarnings({"unchecked"}) @@ -103,38 +109,42 @@ private void doTestGetPutCompute(Map ratioStats, Map< cache.setState(SolrCache.State.LIVE); cache.initializeMetrics(new SolrMetricsContext(metricManager, "foo", "bar"), "foo"); AtomicBoolean stop = new AtomicBoolean(); - SummaryStatistics perImplRatio = ratioStats.computeIfAbsent(clazz.getSimpleName(), c -> new SummaryStatistics()); - SummaryStatistics perImplTime = timeStats.computeIfAbsent(clazz.getSimpleName(), c -> new SummaryStatistics()); + SummaryStatistics perImplRatio = + ratioStats.computeIfAbsent(clazz.getSimpleName(), c -> new SummaryStatistics()); + SummaryStatistics perImplTime = + timeStats.computeIfAbsent(clazz.getSimpleName(), c -> new SummaryStatistics()); CountDownLatch startLatch = new CountDownLatch(1); CountDownLatch stopLatch = new CountDownLatch(numThreads * NUM_KEYS); List runners = new ArrayList<>(); Set exceptions = ConcurrentHashMap.newKeySet(); for (int i = 0; i < numThreads; i++) { - Thread t = new Thread(() -> { - try { - startLatch.await(); - int ik = 0; - while (!stop.get()) { - String key = keys[ik % NUM_KEYS]; - ik++; - if (useCompute) { - String value = cache.computeIfAbsent(key, k -> VALUE); - assertNotNull(value); - } else { - String value = cache.get(key); - if (value == null) { - // increase a likelihood of context switch - Thread.yield(); - cache.put(key, VALUE); - } - } - Thread.yield(); - stopLatch.countDown(); // Does this need to be in a finally block? - } - } catch (InterruptedException | IOException e) { - exceptions.add(e); - } - }); + Thread t = + new Thread( + () -> { + try { + startLatch.await(); + int ik = 0; + while (!stop.get()) { + String key = keys[ik % NUM_KEYS]; + ik++; + if (useCompute) { + String value = cache.computeIfAbsent(key, k -> VALUE); + assertNotNull(value); + } else { + String value = cache.get(key); + if (value == null) { + // increase a likelihood of context switch + Thread.yield(); + cache.put(key, VALUE); + } + } + Thread.yield(); + stopLatch.countDown(); // Does this need to be in a finally block? + } + } catch (InterruptedException | IOException e) { + exceptions.add(e); + } + }); t.start(); runners.add(t); } @@ -146,14 +156,13 @@ private void doTestGetPutCompute(Map ratioStats, Map< for (Thread t : runners) { t.join(); } - if (! exceptions.isEmpty()) { + if (!exceptions.isEmpty()) { throw new MultipleFailureException(new ArrayList<>(exceptions)); } long stopTime = System.nanoTime(); Map metrics = cache.getSolrMetricsContext().getMetricsSnapshot(); - perImplRatio.addValue( - Double.parseDouble(String.valueOf(metrics.get("CACHE.foo.hitratio")))); - perImplTime.addValue((double)(stopTime - startTime)); + perImplRatio.addValue(Double.parseDouble(String.valueOf(metrics.get("CACHE.foo.hitratio")))); + perImplTime.addValue((double) (stopTime - startTime)); cache.close(); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java b/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java index def7a2135b1..016181794d3 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java @@ -43,15 +43,17 @@ public class TestSolrCoreParser extends SolrTestCaseJ4 { @BeforeClass public static void init() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } - + private SolrCoreParser solrCoreParser; private CoreParser solrCoreParser() { if (solrCoreParser == null) { final String defaultField = "contents"; - final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET); + final Analyzer analyzer = + new MockAnalyzer( + random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET); solrCoreParser = new SolrCoreParser(defaultField, analyzer, req()); { final NamedList args = new NamedList<>(); @@ -73,10 +75,15 @@ private Query parseXmlString(String xml) throws IOException, ParserException { } private Query parseHandyQuery(String lhsXml, String rhsXml) throws IOException, ParserException { - final String xml = "" - + "" + lhsXml + "" - + "" + rhsXml + "" - + ""; + final String xml = + "" + + "" + + lhsXml + + "" + + "" + + rhsXml + + "" + + ""; return parseXmlString(xml); } @@ -92,13 +99,13 @@ public void testGoodbye() throws IOException, ParserException { public void testApacheLuceneSolr() throws IOException, ParserException { final String fieldName = "contents"; - final Query query = parseXmlString(""); + final Query query = parseXmlString(""); checkApacheLuceneSolr(query, fieldName); } private static void checkApacheLuceneSolr(Query query, String fieldName) { assertTrue(query instanceof SpanNearQuery); - final SpanNearQuery snq = (SpanNearQuery)query; + final SpanNearQuery snq = (SpanNearQuery) query; assertEquals(fieldName, snq.getField()); assertEquals(42, snq.getSlop()); assertFalse(snq.isInOrder()); @@ -114,7 +121,7 @@ public void testHandyQuery() throws IOException, ParserException { final String rhsXml = ""; final Query query = parseHandyQuery(lhsXml, rhsXml); assertTrue(query instanceof BooleanQuery); - final BooleanQuery bq = (BooleanQuery)query; + final BooleanQuery bq = (BooleanQuery) query; assertEquals(2, bq.clauses().size()); assertTrue(bq.clauses().get(0).getQuery() instanceof MatchAllDocsQuery); assertTrue(bq.clauses().get(1).getQuery() instanceof MatchNoDocsQuery); @@ -122,19 +129,21 @@ public void testHandyQuery() throws IOException, ParserException { // test custom query (HandyQueryBuilder) wrapping a SpanQuery public void testHandySpanQuery() throws IOException, ParserException { - final String lhsXml = "" - + "rain" - + "spain" - + "plain" - + ""; - final String rhsXml = "" - + "sunny" - + "sky" - + ""; + final String lhsXml = + "" + + "rain" + + "spain" + + "plain" + + ""; + final String rhsXml = + "" + + "sunny" + + "sky" + + ""; final Query query = parseHandyQuery(lhsXml, rhsXml); - final BooleanQuery bq = (BooleanQuery)query; + final BooleanQuery bq = (BooleanQuery) query; assertEquals(2, bq.clauses().size()); - for (int ii=0; ii"); + final StringBuilder sb = new StringBuilder(""); for (String termText : termTexts) { sb.append("").append(termText).append(""); } @@ -165,15 +174,24 @@ public void testCustomQueryWrapping() throws IOException, ParserException { final String fieldName = "contents"; final String[] randomTerms = new String[] {"bumble", "honey", "solitary"}; final String randomQuery = composeChooseOneWordQueryXml(fieldName, randomTerms); - final String apacheLuceneSolr = ""; + final String apacheLuceneSolr = ""; // the wrapping query final String parentQuery = (span ? "SpanOr" : "BooleanQuery"); final String subQueryPrefix = (span ? "" : ""); final String subQuerySuffix = (span ? "" : ""); - final String xml = "<"+parentQuery+">" - + subQueryPrefix+randomQuery+subQuerySuffix - + subQueryPrefix+apacheLuceneSolr+subQuerySuffix - + ""; + final String xml = + "<" + + parentQuery + + ">" + + subQueryPrefix + + randomQuery + + subQuerySuffix + + subQueryPrefix + + apacheLuceneSolr + + subQuerySuffix + + ""; // the test final Query query = parseXmlString(xml); if (span) { @@ -184,22 +202,23 @@ public void testCustomQueryWrapping() throws IOException, ParserException { checkApacheLuceneSolr(soq.getClauses()[1], fieldName); } else { assertTrue(query instanceof BooleanQuery); - final BooleanQuery bq = (BooleanQuery)query; + final BooleanQuery bq = (BooleanQuery) query; assertEquals(2, bq.clauses().size()); checkChooseOneWordQuery(span, bq.clauses().get(0).getQuery(), fieldName, randomTerms); checkApacheLuceneSolr(bq.clauses().get(1).getQuery(), fieldName); } } - private static void checkChooseOneWordQuery(boolean span, Query query, String fieldName, String ... expectedTermTexts) { + private static void checkChooseOneWordQuery( + boolean span, Query query, String fieldName, String... expectedTermTexts) { final Term term; if (span) { assertTrue(query instanceof SpanTermQuery); - final SpanTermQuery stq = (SpanTermQuery)query; + final SpanTermQuery stq = (SpanTermQuery) query; term = stq.getTerm(); } else { assertTrue(query instanceof TermQuery); - final TermQuery tq = (TermQuery)query; + final TermQuery tq = (TermQuery) query; term = tq.getTerm(); } final String text = term.text(); @@ -208,7 +227,8 @@ private static void checkChooseOneWordQuery(boolean span, Query query, String fi foundExpected |= expected.equals(text); } assertEquals(fieldName, term.field()); - assertTrue("expected term text ("+text+") not found in ("+expectedTermTexts+")", foundExpected); + assertTrue( + "expected term text (" + text + ") not found in (" + expectedTermTexts + ")", + foundExpected); } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java index 49b754ff929..7dc8d4895b3 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolrFieldCacheBean.java @@ -16,9 +16,11 @@ */ package org.apache.solr.search; +import java.lang.invoke.MethodHandles; +import java.util.Map; +import java.util.Random; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; - import org.apache.solr.metrics.MetricsMap; import org.apache.solr.metrics.SolrMetricManager; import org.apache.solr.metrics.SolrMetricsContext; @@ -27,17 +29,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.invoke.MethodHandles; -import java.util.Map; -import java.util.Random; - public class TestSolrFieldCacheBean extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-minimal.xml"); + initCore("solrconfig.xml", "schema-minimal.xml"); } @Test @@ -75,11 +73,16 @@ private void assertEntryListIncluded(boolean checkJmx) { Random r = random(); String registryName = TestUtil.randomSimpleString(r, 1, 10); SolrMetricManager metricManager = h.getCoreContainer().getMetricManager(); - SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registryName, "foo"); + SolrMetricsContext solrMetricsContext = + new SolrMetricsContext(metricManager, registryName, "foo"); mbean.initializeMetrics(solrMetricsContext, null); - MetricsMap metricsMap = (MetricsMap)((SolrMetricManager.GaugeWrapper)metricManager.registry(registryName).getMetrics().get("CACHE.fieldCache")).getGauge(); + MetricsMap metricsMap = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + metricManager.registry(registryName).getMetrics().get("CACHE.fieldCache")) + .getGauge(); Map metrics = checkJmx ? metricsMap.getValue(true) : metricsMap.getValue(); - assertTrue(((Number)metrics.get("entries_count")).longValue() > 0); + assertTrue(((Number) metrics.get("entries_count")).longValue() > 0); assertNotNull(metrics.get("total_size")); assertNotNull(metrics.get("entry#0")); } @@ -89,11 +92,16 @@ private void assertEntryListNotIncluded(boolean checkJmx) { Random r = random(); String registryName = TestUtil.randomSimpleString(r, 1, 10); SolrMetricManager metricManager = h.getCoreContainer().getMetricManager(); - SolrMetricsContext solrMetricsContext = new SolrMetricsContext(metricManager, registryName, "foo"); + SolrMetricsContext solrMetricsContext = + new SolrMetricsContext(metricManager, registryName, "foo"); mbean.initializeMetrics(solrMetricsContext, null); - MetricsMap metricsMap = (MetricsMap)((SolrMetricManager.GaugeWrapper)metricManager.registry(registryName).getMetrics().get("CACHE.fieldCache")).getGauge(); + MetricsMap metricsMap = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + metricManager.registry(registryName).getMetrics().get("CACHE.fieldCache")) + .getGauge(); Map metrics = checkJmx ? metricsMap.getValue(true) : metricsMap.getValue(); - assertTrue(((Number)metrics.get("entries_count")).longValue() > 0); + assertTrue(((Number) metrics.get("entries_count")).longValue() > 0); assertNull(metrics.get("total_size")); assertNull(metrics.get("entry#0")); } diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrJ.java b/solr/core/src/test/org/apache/solr/search/TestSolrJ.java index ede71b4ebb1..b7136afb76a 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSolrJ.java +++ b/solr/core/src/test/org/apache/solr/search/TestSolrJ.java @@ -16,7 +16,11 @@ */ package org.apache.solr.search; - +import java.io.IOException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Random; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -25,17 +29,10 @@ import org.apache.solr.common.SolrInputDocument; import org.apache.solr.util.RTimer; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Random; - - public class TestSolrJ extends SolrTestCaseJ4 { public void testSolrJ() throws Exception { - // docs, producers, connections, sleep_time + // docs, producers, connections, sleep_time // main(new String[] {"1000000","4", "1", "0"}); // doCommitPerf(); @@ -59,7 +56,7 @@ public static void main(String[] args) throws Exception { ConcurrentUpdateSolrClient concurrentClient = null; // server = concurrentClient = new ConcurrentUpdateSolrServer(addr,32,8); - client = concurrentClient = getConcurrentUpdateSolrClient(addr,64,nConnections); + client = concurrentClient = getConcurrentUpdateSolrClient(addr, 64, nConnections); client.deleteByQuery("*:*"); client.commit(); @@ -70,27 +67,28 @@ public static void main(String[] args) throws Exception { Thread[] threads = new Thread[nProducers]; - for (int threadNum = 0; threadNum> 4)); - int golden = (int)2654435761L; + int golden = (int) 2654435761L; int h = docnum * golden; int n = (h & 0xff) + 1; @SuppressWarnings({"rawtypes"}) List lst = new ArrayList(n); - for (int i=0; i sowFalseParamsMap = new HashMap<>(); sowFalseParamsMap.put("sow", "false"); Map sowTrueParamsMap = new HashMap<>(); sowTrueParamsMap.put("sow", "true"); - List paramMaps = Arrays.asList - (new MapSolrParams(Collections.emptyMap()), // no sow param (i.e. the default sow value) - new MapSolrParams(sowFalseParamsMap), - new MapSolrParams(sowTrueParamsMap)); + List paramMaps = + Arrays.asList( + new MapSolrParams(Collections.emptyMap()), // no sow param (i.e. the default sow value) + new MapSolrParams(sowFalseParamsMap), + new MapSolrParams(sowTrueParamsMap)); for (MapSolrParams params : paramMaps) { // relevance query should not be a filter @@ -371,81 +353,92 @@ public void testAutoTerms() throws Exception { } // large relevancy query should use BooleanQuery - // TODO: we may decide that string fields shouldn't have relevance in the future... change to a text field w/o a stop filter if so - qParser = QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req); + // TODO: we may decide that string fields shouldn't have relevance in the future... change to + // a text field w/o a stop filter if so + qParser = + QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req); qParser.setParams(params); q = qParser.getQuery(); - assertEquals(26, ((BooleanQuery)q).clauses().size()); + assertEquals(26, ((BooleanQuery) q).clauses().size()); // large filter query should use TermsQuery - qParser = QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req); + qParser = + QParser.getParser("foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z)", req); qParser.setIsFilter(true); // this may change in the future qParser.setParams(params); q = qParser.getQuery(); - assertEquals(26, ((TermInSetQuery)q).getTermData().size()); + assertEquals(26, ((TermInSetQuery) q).getTermData().size()); // large numeric filter query should use TermsQuery - qParser = QParser.getParser("foo_ti:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req); + qParser = + QParser.getParser("foo_ti:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req); qParser.setIsFilter(true); // this may change in the future qParser.setParams(params); q = qParser.getQuery(); if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) { - assertEquals(20, ((PointInSetQuery)q).getPackedPoints().size()); + assertEquals(20, ((PointInSetQuery) q).getPackedPoints().size()); } else { - assertEquals(20, ((TermInSetQuery)q).getTermData().size()); + assertEquals(20, ((TermInSetQuery) q).getTermData().size()); } // for point fields large filter query should use PointInSetQuery - qParser = QParser.getParser("foo_pi:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req); + qParser = + QParser.getParser("foo_pi:(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 11)", req); qParser.setIsFilter(true); // this may change in the future qParser.setParams(params); q = qParser.getQuery(); assertTrue(q instanceof PointInSetQuery); - assertEquals(20, ((PointInSetQuery)q).getPackedPoints().size()); + assertEquals(20, ((PointInSetQuery) q).getPackedPoints().size()); // a filter() clause inside a relevancy query should be able to use a TermsQuery - qParser = QParser.getParser("foo_s:aaa filter(foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z))", req); + qParser = + QParser.getParser( + "foo_s:aaa filter(foo_s:(a b c d e f g h i j k l m n o p q r s t u v w x y z))", req); qParser.setParams(params); q = qParser.getQuery(); - assertEquals(2, ((BooleanQuery)q).clauses().size()); - qq = ((BooleanQuery)q).clauses().get(0).getQuery(); + assertEquals(2, ((BooleanQuery) q).clauses().size()); + qq = ((BooleanQuery) q).clauses().get(0).getQuery(); if (qq instanceof TermQuery) { - qq = ((BooleanQuery)q).clauses().get(1).getQuery(); + qq = ((BooleanQuery) q).clauses().get(1).getQuery(); } if (qq instanceof FilterQuery) { - qq = ((FilterQuery)qq).getQuery(); + qq = ((FilterQuery) qq).getQuery(); } assertEquals(26, ((TermInSetQuery) qq).getTermData().size()); // test mixed boolean query, including quotes (which shouldn't matter) - qParser = QParser.getParser("foo_s:(a +aaa b -bbb c d e f bar_s:(qqq www) g h i j k l m n o p q r s t u v w x y z)", req); + qParser = + QParser.getParser( + "foo_s:(a +aaa b -bbb c d e f bar_s:(qqq www) g h i j k l m n o p q r s t u v w x y z)", + req); qParser.setIsFilter(true); // this may change in the future qParser.setParams(params); q = qParser.getQuery(); - assertEquals(4, ((BooleanQuery)q).clauses().size()); + assertEquals(4, ((BooleanQuery) q).clauses().size()); qq = null; - for (BooleanClause clause : ((BooleanQuery)q).clauses()) { + for (BooleanClause clause : ((BooleanQuery) q).clauses()) { qq = clause.getQuery(); if (qq instanceof TermInSetQuery) break; } - assertEquals(26, ((TermInSetQuery)qq).getTermData().size()); + assertEquals(26, ((TermInSetQuery) qq).getTermData().size()); - // test terms queries of two different fields (LUCENE-7637 changed to require all terms be in the same field) + // test terms queries of two different fields (LUCENE-7637 changed to require all terms be in + // the same field) StringBuilder sb = new StringBuilder(); - for (int i=0; i<17; i++) { - char letter = (char)('a'+i); + for (int i = 0; i < 17; i++) { + char letter = (char) ('a' + i); sb.append("foo_s:" + letter + " bar_s:" + letter + " "); } qParser = QParser.getParser(sb.toString(), req); qParser.setIsFilter(true); // this may change in the future qParser.setParams(params); q = qParser.getQuery(); - assertEquals(2, ((BooleanQuery)q).clauses().size()); - for (BooleanClause clause : ((BooleanQuery)q).clauses()) { + assertEquals(2, ((BooleanQuery) q).clauses().size()); + for (BooleanClause clause : ((BooleanQuery) q).clauses()) { qq = clause.getQuery(); - assertEquals(17, ((TermInSetQuery)qq).getTermData().size()); + assertEquals(17, ((TermInSetQuery) qq).getTermData().size()); } } req.close(); @@ -454,30 +447,30 @@ public void testAutoTerms() throws Exception { @Test public void testManyClauses_Solr() throws Exception { final String a = "1 a 2 b 3 c 10 d 11 12 "; // 10 terms - + // this should exceed our solrconfig.xml level (solr specific) maxBooleanClauses limit // even though it's not long enough to trip the Lucene level (global) limit final String too_long = "id:(" + a + a + a + a + a + ")"; final String expectedMsg = "Too many clauses"; ignoreException(expectedMsg); - SolrException e = expectThrows(SolrException.class, "expected SolrException", - () -> assertJQ(req("q", too_long), "/response/numFound==6")); + SolrException e = + expectThrows( + SolrException.class, + "expected SolrException", + () -> assertJQ(req("q", too_long), "/response/numFound==6")); assertThat(e.getMessage(), containsString(expectedMsg)); - + // but should still work as a filter query since TermsQuery can be used... - assertJQ(req("q","*:*", "fq", too_long) - ,"/response/numFound==6"); - assertJQ(req("q","*:*", "fq", too_long, "sow", "false") - ,"/response/numFound==6"); - assertJQ(req("q","*:*", "fq", too_long, "sow", "true") - ,"/response/numFound==6"); + assertJQ(req("q", "*:*", "fq", too_long), "/response/numFound==6"); + assertJQ(req("q", "*:*", "fq", too_long, "sow", "false"), "/response/numFound==6"); + assertJQ(req("q", "*:*", "fq", too_long, "sow", "true"), "/response/numFound==6"); } - + @Test public void testManyClauses_Lucene() throws Exception { final int numZ = IndexSearcher.getMaxClauseCount(); - + final String a = "1 a 2 b 3 c 10 d 11 12 "; // 10 terms final StringBuilder sb = new StringBuilder("id:("); for (int i = 0; i < numZ; i++) { @@ -485,48 +478,44 @@ public void testManyClauses_Lucene() throws Exception { } sb.append(a); sb.append(")"); - + // this should trip the lucene level global BooleanQuery.getMaxClauseCount() limit, // causing a parsing error, before Solr even get's a chance to enforce it's lower level limit final String way_too_long = sb.toString(); final String expectedMsg = "too many boolean clauses"; ignoreException(expectedMsg); - SolrException e = expectThrows(SolrException.class, "expected SolrException", - () -> assertJQ(req("q", way_too_long), "/response/numFound==6")); + SolrException e = + expectThrows( + SolrException.class, + "expected SolrException", + () -> assertJQ(req("q", way_too_long), "/response/numFound==6")); assertThat(e.getMessage(), containsString(expectedMsg)); - + assertNotNull(e.getCause()); assertEquals(SyntaxError.class, e.getCause().getClass()); - + assertNotNull(e.getCause().getCause()); assertEquals(IndexSearcher.TooManyClauses.class, e.getCause().getCause().getClass()); // but should still work as a filter query since TermsQuery can be used... - assertJQ(req("q","*:*", "fq", way_too_long) - ,"/response/numFound==6"); - assertJQ(req("q","*:*", "fq", way_too_long, "sow", "false") - ,"/response/numFound==6"); - assertJQ(req("q","*:*", "fq", way_too_long, "sow", "true") - ,"/response/numFound==6"); + assertJQ(req("q", "*:*", "fq", way_too_long), "/response/numFound==6"); + assertJQ(req("q", "*:*", "fq", way_too_long, "sow", "false"), "/response/numFound==6"); + assertJQ(req("q", "*:*", "fq", way_too_long, "sow", "true"), "/response/numFound==6"); } @Test public void testComments() throws Exception { - assertJQ(req("q", "id:1 id:2 /* *:* */ id:3") - , "/response/numFound==3" - ); + assertJQ(req("q", "id:1 id:2 /* *:* */ id:3"), "/response/numFound==3"); // - assertJQ(req("q", "id:1 /**.*/") - , "/response/numFound==1" // if it matches more than one, it's being treated as a regex. - ); - + assertJQ( + req("q", "id:1 /**.*/"), + "/response/numFound==1" // if it matches more than one, it's being treated as a regex. + ); // don't match comment start in string - assertJQ(req("q", " \"/*\" id:1 id:2 \"*/\" id:3") - , "/response/numFound==3" - ); + assertJQ(req("q", " \"/*\" id:1 id:2 \"*/\" id:3"), "/response/numFound==3"); // don't match an end of comment within a string // assertJQ(req("q","id:1 id:2 /* \"*/\" *:* */ id:3") @@ -536,10 +525,8 @@ public void testComments() throws Exception { // can't do it */ ......... ' // nested comments - assertJQ(req("q", "id:1 /* id:2 /* */ /* /**/ id:3 */ id:10 */ id:11") - , "/response/numFound==2" - ); - + assertJQ( + req("q", "id:1 /* id:2 /* */ /* /**/ id:3 */ id:10 */ id:11"), "/response/numFound==2"); } @Test @@ -547,139 +534,187 @@ public void testFilter() throws Exception { // normal test "solrconfig.xml" has autowarm set to 2... for (int i = 0; i < 10; i++) { - assertJQ(req("q", "*:* " + i, "fq", "filter(just_to_clear_the_cache) filter(id:10000" + i + ") filter(id:10001" + i + ")") - , "/response/numFound==0" - ); + assertJQ( + req( + "q", + "*:* " + i, + "fq", + "filter(just_to_clear_the_cache) filter(id:10000" + + i + + ") filter(id:10001" + + i + + ")"), + "/response/numFound==0"); } assertU(adoc("id", "777")); delI("777"); - assertU(commit()); // arg... commit no longer "commits" unless there has been a change. - - - final MetricsMap filterCacheStats = (MetricsMap)((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry() - .getMetrics().get("CACHE.searcher.filterCache")).getGauge(); + assertU(commit()); // arg... commit no longer "commits" unless there has been a change. + + final MetricsMap filterCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.filterCache")) + .getGauge(); assertNotNull(filterCacheStats); - final MetricsMap queryCacheStats = (MetricsMap)((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry() - .getMetrics().get("CACHE.searcher.queryResultCache")).getGauge(); + final MetricsMap queryCacheStats = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.queryResultCache")) + .getGauge(); assertNotNull(queryCacheStats); - long inserts = (Long) filterCacheStats.getValue().get("inserts"); long hits = (Long) filterCacheStats.getValue().get("hits"); - assertJQ(req("q", "doesnotexist filter(id:1) filter(qqq_s:X) filter(abcdefg)") - , "/response/numFound==2" - ); + assertJQ( + req("q", "doesnotexist filter(id:1) filter(qqq_s:X) filter(abcdefg)"), + "/response/numFound==2"); inserts += 3; - assertEquals("wrong number of inserts", inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue()); - assertEquals("wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); + assertEquals( + "wrong number of inserts", + inserts, + ((Long) filterCacheStats.getValue().get("inserts")).longValue()); + assertEquals( + "wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); - assertJQ(req("q", "doesnotexist2 filter(id:1) filter(qqq_s:X) filter(abcdefg)") - , "/response/numFound==2" - ); + assertJQ( + req("q", "doesnotexist2 filter(id:1) filter(qqq_s:X) filter(abcdefg)"), + "/response/numFound==2"); hits += 3; - assertEquals("wrong number of inserts", inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue()); - assertEquals("wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); + assertEquals( + "wrong number of inserts", + inserts, + ((Long) filterCacheStats.getValue().get("inserts")).longValue()); + assertEquals( + "wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); // make sure normal "fq" parameters also hit the cache the same way - assertJQ(req("q", "doesnotexist3", "fq", "id:1", "fq", "qqq_s:X", "fq", "abcdefg") - , "/response/numFound==0" - ); + assertJQ( + req("q", "doesnotexist3", "fq", "id:1", "fq", "qqq_s:X", "fq", "abcdefg"), + "/response/numFound==0"); hits += 3; - assertEquals("wrong number of inserts", inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue()); - assertEquals("wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); + assertEquals( + "wrong number of inserts", + inserts, + ((Long) filterCacheStats.getValue().get("inserts")).longValue()); + assertEquals( + "wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); // try a query deeply nested in a FQ - assertJQ(req("q", "*:* doesnotexist4", "fq", "(id:* +(filter(id:1) filter(qqq_s:X) filter(abcdefg)) )") - , "/response/numFound==2" - ); - - inserts += 1; // +1 for top level fq + assertJQ( + req( + "q", + "*:* doesnotexist4", + "fq", + "(id:* +(filter(id:1) filter(qqq_s:X) filter(abcdefg)) )"), + "/response/numFound==2"); + + inserts += 1; // +1 for top level fq hits += 3; - assertEquals("wrong number of inserts", inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue()); - assertEquals("wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); + assertEquals( + "wrong number of inserts", + inserts, + ((Long) filterCacheStats.getValue().get("inserts")).longValue()); + assertEquals( + "wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); // retry the complex FQ and make sure hashCode/equals works as expected w/ filter queries - assertJQ(req("q", "*:* doesnotexist5", "fq", "(id:* +(filter(id:1) filter(qqq_s:X) filter(abcdefg)) )") - , "/response/numFound==2" - ); - - hits += 1; // top-level fq should have been found. - assertEquals("wrong number of inserts", inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue()); - assertEquals("wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); - + assertJQ( + req( + "q", + "*:* doesnotexist5", + "fq", + "(id:* +(filter(id:1) filter(qqq_s:X) filter(abcdefg)) )"), + "/response/numFound==2"); + + hits += 1; // top-level fq should have been found. + assertEquals( + "wrong number of inserts", + inserts, + ((Long) filterCacheStats.getValue().get("inserts")).longValue()); + assertEquals( + "wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); // try nested filter with multiple top-level args (i.e. a boolean query) - assertJQ(req("q", "*:* +filter(id:1 filter(qqq_s:X) abcdefg)") - , "/response/numFound==2" - ); + assertJQ(req("q", "*:* +filter(id:1 filter(qqq_s:X) abcdefg)"), "/response/numFound==2"); - hits += 1; // the inner filter + hits += 1; // the inner filter inserts += 1; // the outer filter - assertEquals("wrong number of inserts", inserts, ((Long) filterCacheStats.getValue().get("inserts")).longValue()); - assertEquals("wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); + assertEquals( + "wrong number of inserts", + inserts, + ((Long) filterCacheStats.getValue().get("inserts")).longValue()); + assertEquals( + "wrong number of hits", hits, ((Long) filterCacheStats.getValue().get("hits")).longValue()); // test the score for a filter, and that default score is 0 - assertJQ(req("q", "+filter(*:*) +filter(id:1)", "fl", "id,score", "sort", "id asc") - , "/response/docs/[0]/score==0.0" - ); + assertJQ( + req("q", "+filter(*:*) +filter(id:1)", "fl", "id,score", "sort", "id asc"), + "/response/docs/[0]/score==0.0"); - assertJQ(req("q", "+filter(*:*)^=10 +filter(id:1)", "fl", "id,score", "sort", "id asc") - , "/response/docs/[0]/score==10.0" - ); + assertJQ( + req("q", "+filter(*:*)^=10 +filter(id:1)", "fl", "id,score", "sort", "id asc"), + "/response/docs/[0]/score==10.0"); assertU(adoc("id", "40", "wdf_nocase", "just some text, don't want NPE")); assertU(commit()); // See SOLR-11555. If wdff removes all the characters, an NPE occurs. // try q and fq - assertJQ(req("q", "filter(wdf_nocase:&)", "fl", "id", "debug", "query") - , "/response/numFound==0" - ); - assertJQ(req("fq", "filter(wdf_nocase:.,)", "fl", "id", "debug", "query") - , "/response/numFound==0" - ); + assertJQ( + req("q", "filter(wdf_nocase:&)", "fl", "id", "debug", "query"), "/response/numFound==0"); + assertJQ( + req("fq", "filter(wdf_nocase:.,)", "fl", "id", "debug", "query"), "/response/numFound==0"); // Insure the same behavior as with bare clause, just not filter - assertJQ(req("q", "wdf_nocase:&", "fl", "id", "debug", "query") - , "/response/numFound==0" - ); - assertJQ(req("fq", "wdf_nocase:.,", "fl", "id", "debug", "query") - , "/response/numFound==0" - ); - + assertJQ(req("q", "wdf_nocase:&", "fl", "id", "debug", "query"), "/response/numFound==0"); + assertJQ(req("fq", "wdf_nocase:.,", "fl", "id", "debug", "query"), "/response/numFound==0"); } - @Test public void testRegex() throws Exception { // leading slash in a regex fixed by SOLR-8605 - assertJQ(req("q", "rrr_s:/\\/lead.*/", "fl","id") - , "/response/docs==[{id:'13'}]" - ); - + assertJQ(req("q", "rrr_s:/\\/lead.*/", "fl", "id"), "/response/docs==[{id:'13'}]"); } // parsing performance test - // Run from command line with ant test -Dtestcase=TestSolrQueryParser -Dtestmethod=testParsingPerformance -Dtests.asserts=false 2>/dev/null | grep QPS + // Run from command line with ant test -Dtestcase=TestSolrQueryParser + // -Dtestmethod=testParsingPerformance -Dtests.asserts=false 2>/dev/null | grep QPS @Test public void testParsingPerformance() throws Exception { - String[] args = {"-queries","100" ,"-iter","1000", "-clauses","100", "-format","term%d", "-seed","0"}; - args = new String[] {"-queries","1000" ,"-iter","2000", "-clauses","10", "-format","term%d", "-seed","0"}; - // args = new String[] {"-queries","1000" ,"-iter","1000000000", "-clauses","10", "-format","term%d", "-seed","0"}; + String[] args = { + "-queries", "100", "-iter", "1000", "-clauses", "100", "-format", "term%d", "-seed", "0" + }; + args = + new String[] { + "-queries", "1000", "-iter", "2000", "-clauses", "10", "-format", "term%d", "-seed", "0" + }; + // args = new String[] {"-queries","1000" ,"-iter","1000000000", "-clauses","10", + // "-format","term%d", "-seed","0"}; boolean assertOn = false; assert assertOn = true; if (assertOn) { - // System.out.println("WARNING! Assertions are enabled!!!! Will only execute small run. Change with -Dtests.asserts=false"); - args = new String[]{"-queries","10" ,"-iter","2", "-clauses","20", "-format","term%d", "-seed","0"}; + // System.out.println("WARNING! Assertions are enabled!!!! Will only execute small run. + // Change with -Dtests.asserts=false"); + args = + new String[] { + "-queries", "10", "-iter", "2", "-clauses", "20", "-format", "term%d", "-seed", "0" + }; } - int iter = 1000; int numQueries = 100; int maxClauses = 5; @@ -721,9 +756,10 @@ public void testParsingPerformance() throws Exception { sb.append(otherStuff).append(" "); - int nClauses = r.nextInt(maxClauses) + 1; // TODO: query parse can't parse () for some reason??? + int nClauses = + r.nextInt(maxClauses) + 1; // TODO: query parse can't parse () for some reason??? - for (int c = 0; c wifi" synonym - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - - assertJQ(req("df", "syn", "q", "wi fi", "sow", "true") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - - assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi fi") - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi fi") - , "/response/numFound==0" - ); - - assertJQ(req("df", "syn", "q", "{!lucene}wi fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); + assertJQ( + req( + "df", "syn", "q", "wifi", "sow", + "true") // retrieve the single document containing literal "wifi" + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + + assertJQ( + req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + + assertJQ(req("df", "syn", "q", "wi fi", "sow", "true"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "wi fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + + assertJQ( + req("df", "syn", "q", "{!lucene sow=false}wi fi"), + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi fi"), "/response/numFound==0"); + + assertJQ( + req("df", "syn", "q", "{!lucene}wi fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); } public void testSplitOnWhitespace_Comments() throws Exception { // The "syn" field has synonyms loaded from synonyms.txt - assertJQ(req("df", "syn", "q", "wifi", "sow", "true") // retrieve the single document containing literal "wifi" - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "wi /* foo */ fi", "sow", "false") // trigger the "wi fi => wifi" synonym - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "wi /* foo */ /* bar */ fi", "sow", "false") // trigger the "wi fi => wifi" synonym - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", " /* foo */ wi fi /* bar */", "sow", "false") // trigger the "wi fi => wifi" synonym - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", " /* foo */ wi /* bar */ fi /* baz */", "sow", "false") // trigger the "wi fi => wifi" synonym - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - - assertJQ(req("df", "syn", "q", "wi fi", "sow", "true") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi /* foo */ fi", "sow", "true") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi /* foo */ /* bar */ fi", "sow", "true") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "/* foo */ wi fi /* bar */", "sow", "true") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "/* foo */ wi /* bar */ fi /* baz */", "sow", "true") - , "/response/numFound==0" - ); - - assertJQ(req("df", "syn", "q", "wi fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "wi /* foo */ fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "wi /* foo */ /* bar */ fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", " /* foo */ wi fi /* bar */") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", " /* foo */ wi /* bar */ fi /* baz */") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - - - assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi fi") - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi /* foo */ fi") - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=false}wi /* foo */ /* bar */ fi") - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=false}/* foo */ wi fi /* bar */") - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=false}/* foo */ wi /* bar */ fi /* baz */") - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - - assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi fi") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi /* foo */ fi") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi /* foo */ /* bar */ fi") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=true}/* foo */ wi fi /* bar */") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "{!lucene sow=true}/* foo */ wi /* bar */ fi /* baz */") - , "/response/numFound==0" - ); - - assertJQ(req("df", "syn", "q", "{!lucene}wi fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene}wi /* foo */ fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene}wi /* foo */ /* bar */ fi") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene}/* foo */ wi fi /* bar */") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "{!lucene}/* foo */ wi /* bar */ fi /* baz */") // default sow=false - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); + assertJQ( + req( + "df", "syn", "q", "wifi", "sow", + "true") // retrieve the single document containing literal "wifi" + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req( + "df", + "syn", + "q", + "wi /* foo */ fi", + "sow", + "false") // trigger the "wi fi => wifi" synonym + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req( + "df", + "syn", + "q", + "wi /* foo */ /* bar */ fi", + "sow", + "false") // trigger the "wi fi => wifi" synonym + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req( + "df", + "syn", + "q", + " /* foo */ wi fi /* bar */", + "sow", + "false") // trigger the "wi fi => wifi" synonym + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req( + "df", + "syn", + "q", + " /* foo */ wi /* bar */ fi /* baz */", + "sow", + "false") // trigger the "wi fi => wifi" synonym + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + + assertJQ(req("df", "syn", "q", "wi fi", "sow", "true"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi /* foo */ fi", "sow", "true"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "wi /* foo */ /* bar */ fi", "sow", "true"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "/* foo */ wi fi /* bar */", "sow", "true"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "/* foo */ wi /* bar */ fi /* baz */", "sow", "true"), + "/response/numFound==0"); + + assertJQ( + req("df", "syn", "q", "wi fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "wi /* foo */ fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "wi /* foo */ /* bar */ fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", " /* foo */ wi fi /* bar */") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", " /* foo */ wi /* bar */ fi /* baz */") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + + assertJQ( + req("df", "syn", "q", "{!lucene sow=false}wi fi"), + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene sow=false}wi /* foo */ fi"), + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene sow=false}wi /* foo */ /* bar */ fi"), + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene sow=false}/* foo */ wi fi /* bar */"), + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene sow=false}/* foo */ wi /* bar */ fi /* baz */"), + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + + assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi fi"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "{!lucene sow=true}wi /* foo */ fi"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "{!lucene sow=true}wi /* foo */ /* bar */ fi"), + "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "{!lucene sow=true}/* foo */ wi fi /* bar */"), + "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "{!lucene sow=true}/* foo */ wi /* bar */ fi /* baz */"), + "/response/numFound==0"); + + assertJQ( + req("df", "syn", "q", "{!lucene}wi fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene}wi /* foo */ fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene}wi /* foo */ /* bar */ fi") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene}/* foo */ wi fi /* bar */") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "{!lucene}/* foo */ wi /* bar */ fi /* baz */") // default sow=false + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); } public void testOperatorsAndMultiWordSynonyms() throws Exception { // The "syn" field has synonyms loaded from synonyms.txt - assertJQ(req("df", "syn", "q", "wifi", "sow", "true") // retrieve the single document containing literal "wifi" - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - assertJQ(req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym - , "/response/numFound==1" - , "/response/docs/[0]/id=='20'" - ); - - assertJQ(req("df", "syn", "q", "+wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "-wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "!wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi* fi", "sow", "false") // matches because wi* matches wifi - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "w? fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi~1 fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi^2 fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi^=2 fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi +fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi -fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi !fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi*", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi?", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi~1", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi^2", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi^=2", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "syn:wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi syn:fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "NOT wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi NOT fi", "sow", "false") - , "/response/numFound==0" - ); - - assertJQ(req("df", "syn", "q", "wi fi AND ATM", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "ATM AND wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi && ATM", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "ATM && wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "(wi fi) AND ATM", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "ATM AND (wi fi)", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "(wi fi) && ATM", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "ATM && (wi fi)", "sow", "false") - , "/response/numFound==1" - ); - - assertJQ(req("df", "syn", "q", "wi fi OR NotThereAtAll", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "NotThereAtAll OR wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi || NotThereAtAll", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "NotThereAtAll || wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "(wi fi) OR NotThereAtAll", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "NotThereAtAll OR (wi fi)", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "(wi fi) || NotThereAtAll", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "NotThereAtAll || (wi fi)", "sow", "false") - , "/response/numFound==1" - ); - - assertJQ(req("df", "syn", "q", "\"wi\" fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi \"fi\"", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "(wi) fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi (fi)", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "/wi/ fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi /fi/", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "(wi fi)", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "+(wi fi)", "sow", "false") - , "/response/numFound==1" - ); + assertJQ( + req( + "df", "syn", "q", "wifi", "sow", + "true") // retrieve the single document containing literal "wifi" + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + assertJQ( + req("df", "syn", "q", "wi fi", "sow", "false") // trigger the "wi fi => wifi" synonym + , + "/response/numFound==1", + "/response/docs/[0]/id=='20'"); + + assertJQ(req("df", "syn", "q", "+wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "-wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "!wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "wi* fi", "sow", "false") // matches because wi* matches wifi + , + "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "w? fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi~1 fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi^2 fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi^=2 fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi +fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi -fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi !fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi fi*", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi fi?", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi fi~1", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi fi^2", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi fi^=2", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "syn:wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi syn:fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "NOT wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi NOT fi", "sow", "false"), "/response/numFound==0"); + + assertJQ(req("df", "syn", "q", "wi fi AND ATM", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "ATM AND wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi fi && ATM", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "ATM && wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "(wi fi) AND ATM", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "ATM AND (wi fi)", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "(wi fi) && ATM", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "ATM && (wi fi)", "sow", "false"), "/response/numFound==1"); + + assertJQ( + req("df", "syn", "q", "wi fi OR NotThereAtAll", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "NotThereAtAll OR wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "wi fi || NotThereAtAll", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "NotThereAtAll || wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "(wi fi) OR NotThereAtAll", "sow", "false"), "/response/numFound==1"); + assertJQ( + req("df", "syn", "q", "NotThereAtAll OR (wi fi)", "sow", "false"), "/response/numFound==1"); + assertJQ( + req("df", "syn", "q", "(wi fi) || NotThereAtAll", "sow", "false"), "/response/numFound==1"); + assertJQ( + req("df", "syn", "q", "NotThereAtAll || (wi fi)", "sow", "false"), "/response/numFound==1"); + + assertJQ(req("df", "syn", "q", "\"wi\" fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi \"fi\"", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "(wi) fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi (fi)", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "/wi/ fi", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "wi /fi/", "sow", "false"), "/response/numFound==0"); + assertJQ(req("df", "syn", "q", "(wi fi)", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "+(wi fi)", "sow", "false"), "/response/numFound==1"); @SuppressWarnings({"rawtypes"}) Map all = (Map) Utils.fromJSONString(h.query(req("q", "*:*", "rows", "0", "wt", "json"))); - int totalDocs = Integer.parseInt(((Map)all.get("response")).get("numFound").toString()); + int totalDocs = Integer.parseInt(((Map) all.get("response")).get("numFound").toString()); int allDocsExceptOne = totalDocs - 1; - assertJQ(req("df", "syn", "q", "-(wi fi)", "sow", "false") - , "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the syn field - ); - assertJQ(req("df", "syn", "q", "!(wi fi)", "sow", "false") - , "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the syn field - ); - assertJQ(req("df", "syn", "q", "NOT (wi fi)", "sow", "false") - , "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the syn field - ); - assertJQ(req("df", "syn", "q", "(wi fi)^2", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "(wi fi)^=2", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "syn:(wi fi)", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "+ATM wi fi", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "-ATM wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "-NotThereAtAll wi fi", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "!ATM wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "!NotThereAtAll wi fi", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "NOT ATM wi fi", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "NOT NotThereAtAll wi fi", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "AT* wi fi", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "AT? wi fi", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "\"ATM\" wi fi", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "wi fi +ATM", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "wi fi -ATM", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi -NotThereAtAll", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "wi fi !ATM", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi !NotThereAtAll", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "wi fi NOT ATM", "sow", "false") - , "/response/numFound==0" - ); - assertJQ(req("df", "syn", "q", "wi fi NOT NotThereAtAll", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "wi fi AT*", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "wi fi AT?", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "wi fi \"ATM\"", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "\"wi fi\"~2", "sow", "false") - , "/response/numFound==1" - ); - assertJQ(req("df", "syn", "q", "syn:\"wi fi\"", "sow", "false") - , "/response/numFound==1" - ); + assertJQ( + req("df", "syn", "q", "-(wi fi)", "sow", "false"), + "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the syn field + ); + assertJQ( + req("df", "syn", "q", "!(wi fi)", "sow", "false"), + "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the syn field + ); + assertJQ( + req("df", "syn", "q", "NOT (wi fi)", "sow", "false"), + "/response/numFound==" + allDocsExceptOne // one doc contains "wifi" in the syn field + ); + assertJQ(req("df", "syn", "q", "(wi fi)^2", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "(wi fi)^=2", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "syn:(wi fi)", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "+ATM wi fi", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "-ATM wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "-NotThereAtAll wi fi", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "!ATM wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "!NotThereAtAll wi fi", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "NOT ATM wi fi", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "NOT NotThereAtAll wi fi", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "AT* wi fi", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "AT? wi fi", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "\"ATM\" wi fi", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "wi fi +ATM", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "wi fi -ATM", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "wi fi -NotThereAtAll", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "wi fi !ATM", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "wi fi !NotThereAtAll", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "wi fi NOT ATM", "sow", "false"), "/response/numFound==0"); + assertJQ( + req("df", "syn", "q", "wi fi NOT NotThereAtAll", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "wi fi AT*", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "wi fi AT?", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "wi fi \"ATM\"", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "\"wi fi\"~2", "sow", "false"), "/response/numFound==1"); + assertJQ(req("df", "syn", "q", "syn:\"wi fi\"", "sow", "false"), "/response/numFound==1"); } @Test @@ -1178,7 +1138,8 @@ public void testAutoGeneratePhraseQueries() throws Exception { try (SolrQueryRequest req = req()) { for (SolrParams params : Arrays.asList(noSowParams, sowFalseParams)) { - QParser qParser = QParser.getParser("text:grackle", req); // "text" has autoGeneratePhraseQueries="true" + QParser qParser = + QParser.getParser("text:grackle", req); // "text" has autoGeneratePhraseQueries="true" qParser.setParams(sowFalseParams); Query q = qParser.getQuery(); assertEquals("(text:\"crow blackbird\" text:grackl)", q.toString()); @@ -1190,7 +1151,10 @@ public void testAutoGeneratePhraseQueries() throws Exception { assertEquals("text:\"crow blackbird\" text:grackl", q.toString()); for (SolrParams params : Arrays.asList(noSowParams, sowTrueParams, sowFalseParams)) { - qParser = QParser.getParser("text_sw:grackle", req); // "text_sw" doesn't specify autoGeneratePhraseQueries => default false + qParser = + QParser.getParser( + "text_sw:grackle", + req); // "text_sw" doesn't specify autoGeneratePhraseQueries => default false qParser.setParams(params); q = qParser.getQuery(); assertEquals("((+text_sw:crow +text_sw:blackbird) text_sw:grackl)", q.toString()); @@ -1209,406 +1173,619 @@ public void testShingleQueries() throws Exception { assertEquals("Synonym(shingle23:A_B shingle23:A_B_C) shingle23:B_C", q.toString()); } - assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false") - , "/response/numFound==1" - ); + assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false"), "/response/numFound==1"); } public void testSynonymQueryStyle() throws Exception { String field = "t_pick_best_foo"; Query q = QParser.getParser("tabby", req(params("df", field))).getQuery(); - assertThat(q, disjunctionOf( - termQuery(field, "cat"), - termQuery(field, "tabbi"), - termQuery(field, "anim"), - termQuery(field, "felin") - )); + assertThat( + q, + disjunctionOf( + termQuery(field, "cat"), + termQuery(field, "tabbi"), + termQuery(field, "anim"), + termQuery(field, "felin"))); field = "t_as_distinct_foo"; q = QParser.getParser("tabby", req(params("df", field))).getQuery(); - assertThat(q, booleanQuery( - termQuery(field,"cat"), - termQuery(field,"tabbi"), - termQuery(field,"anim"), - termQuery(field,"felin") - )); + assertThat( + q, + booleanQuery( + termQuery(field, "cat"), + termQuery(field, "tabbi"), + termQuery(field, "anim"), + termQuery(field, "felin"))); /*confirm autoGeneratePhraseQueries always builds OR queries*/ - q = QParser.getParser("jeans", req(params("df", "t_as_distinct_foo", "sow", "false"))).getQuery(); + q = + QParser.getParser("jeans", req(params("df", "t_as_distinct_foo", "sow", "false"))) + .getQuery(); assertEquals("(t_as_distinct_foo:\"denim pant\" t_as_distinct_foo:jean)", q.toString()); field = "t_pick_best_foo"; - q = QParser.getParser("jeans", req(params("df", field, "sow", "false"))).getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - termQuery(field, "jean"), - phraseQuery(field, "denim pant") - ))); + q = QParser.getParser("jeans", req(params("df", field, "sow", "false"))).getQuery(); + assertThat( + q, booleanQuery(disjunctionOf(termQuery(field, "jean"), phraseQuery(field, "denim pant")))); } - public void testSynonymsBoost_singleTermQuerySingleTermSynonyms_shouldParseBoostedQuery() throws Exception { - //tiger, tigre|0.9 + public void testSynonymsBoost_singleTermQuerySingleTermSynonyms_shouldParseBoostedQuery() + throws Exception { + // tiger, tigre|0.9 String field = "t_pick_best_boosted_foo"; Query q = QParser.getParser("tiger", req(params("df", field))).getQuery(); assertThat(q, disjunctionOf(termQuery(field, "tiger"), boosted(field, "tigre", 0.9f))); field = "t_as_distinct_boosted_foo"; q = QParser.getParser("tiger", req(params("df", "t_as_distinct_boosted_foo"))).getQuery(); - assertEquals("(t_as_distinct_boosted_foo:tigre)^0.9 t_as_distinct_boosted_foo:tiger", q.toString()); + assertEquals( + "(t_as_distinct_boosted_foo:tigre)^0.9 t_as_distinct_boosted_foo:tiger", q.toString()); field = "t_as_same_term_boosted_foo"; q = QParser.getParser("tiger", req(params("df", "t_as_same_term_boosted_foo"))).getQuery(); - assertEquals("Synonym(t_as_same_term_boosted_foo:tiger t_as_same_term_boosted_foo:tigre^0.9)", q.toString()); + assertEquals( + "Synonym(t_as_same_term_boosted_foo:tiger t_as_same_term_boosted_foo:tigre^0.9)", + q.toString()); - //lynx => lince|0.8, lynx_canadensis|0.9 + // lynx => lince|0.8, lynx_canadensis|0.9 field = "t_pick_best_boosted_foo"; q = QParser.getParser("lynx", req(params("df", field))).getQuery(); - assertThat(q, disjunctionOf(boosted(field, "lince", 0.8f), boosted(field, "lynx_canadensis", 0.9f))); + assertThat( + q, disjunctionOf(boosted(field, "lince", 0.8f), boosted(field, "lynx_canadensis", 0.9f))); field = "t_as_distinct_boosted_foo"; q = QParser.getParser("lynx", req(params("df", field))).getQuery(); - assertThat(q, booleanQuery( - boosted(field, "lince", 0.8f), - boosted(field, "lynx_canadensis", 0.9f) - )); + assertThat( + q, booleanQuery(boosted(field, "lince", 0.8f), boosted(field, "lynx_canadensis", 0.9f))); field = "t_as_same_term_boosted_foo"; q = QParser.getParser("lynx", req(params("df", field))).getQuery(); - assertEquals("Synonym(t_as_same_term_boosted_foo:lince^0.8 t_as_same_term_boosted_foo:lynx_canadensis^0.9)", q.toString()); + assertEquals( + "Synonym(t_as_same_term_boosted_foo:lince^0.8 t_as_same_term_boosted_foo:lynx_canadensis^0.9)", + q.toString()); } - public void testSynonymsBoost_singleTermQueryMultiTermSynonyms_shouldParseBoostedQuery() throws Exception { - //leopard, big cat|0.8, bagheera|0.9, panthera pardus|0.85 + public void testSynonymsBoost_singleTermQueryMultiTermSynonyms_shouldParseBoostedQuery() + throws Exception { + // leopard, big cat|0.8, bagheera|0.9, panthera pardus|0.85 String field = "t_pick_best_boosted_foo"; Query q = QParser.getParser("leopard", req(params("df", "t_pick_best_boosted_foo"))).getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - termQuery(field, "leopard"), - boosted(phraseQuery(field, "big cat"), 0.8f), - boosted(phraseQuery(field, "panthera pardus"), 0.85f), - boosted(termQuery(field, "bagheera"), 0.9f) - ))); + assertThat( + q, + booleanQuery( + disjunctionOf( + termQuery(field, "leopard"), + boosted(phraseQuery(field, "big cat"), 0.8f), + boosted(phraseQuery(field, "panthera pardus"), 0.85f), + boosted(termQuery(field, "bagheera"), 0.9f)))); q = QParser.getParser("leopard", req(params("df", "t_as_distinct_boosted_foo"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:bagheera)^0.9 (t_as_distinct_boosted_foo:\"panthera pardus\")^0.85 t_as_distinct_boosted_foo:leopard)", q.toString()); + assertEquals( + "((t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:bagheera)^0.9 (t_as_distinct_boosted_foo:\"panthera pardus\")^0.85 t_as_distinct_boosted_foo:leopard)", + q.toString()); q = QParser.getParser("leopard", req(params("df", "t_as_same_term_boosted_foo"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:bagheera)^0.9 (t_as_same_term_boosted_foo:\"panthera pardus\")^0.85 t_as_same_term_boosted_foo:leopard)", q.toString()); + assertEquals( + "((t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:bagheera)^0.9 (t_as_same_term_boosted_foo:\"panthera pardus\")^0.85 t_as_same_term_boosted_foo:leopard)", + q.toString()); - //lion => panthera leo|0.9, simba leo|0.8, kimba|0.75 + // lion => panthera leo|0.9, simba leo|0.8, kimba|0.75 q = QParser.getParser("lion", req(params("df", "t_pick_best_boosted_foo"))).getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - boosted(termQuery(field, "kimba"), 0.75f), - boosted(phraseQuery(field, "simba leo"), 0.8f), - boosted(phraseQuery(field, "panthera leo"), 0.9f) - ))); + assertThat( + q, + booleanQuery( + disjunctionOf( + boosted(termQuery(field, "kimba"), 0.75f), + boosted(phraseQuery(field, "simba leo"), 0.8f), + boosted(phraseQuery(field, "panthera leo"), 0.9f)))); q = QParser.getParser("lion", req(params("df", "t_as_distinct_boosted_foo"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:\"panthera leo\")^0.9 (t_as_distinct_boosted_foo:\"simba leo\")^0.8 (t_as_distinct_boosted_foo:kimba)^0.75)", q.toString()); + assertEquals( + "((t_as_distinct_boosted_foo:\"panthera leo\")^0.9 (t_as_distinct_boosted_foo:\"simba leo\")^0.8 (t_as_distinct_boosted_foo:kimba)^0.75)", + q.toString()); q = QParser.getParser("lion", req(params("df", "t_as_same_term_boosted_foo"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:\"panthera leo\")^0.9 (t_as_same_term_boosted_foo:\"simba leo\")^0.8 (t_as_same_term_boosted_foo:kimba)^0.75)", q.toString()); + assertEquals( + "((t_as_same_term_boosted_foo:\"panthera leo\")^0.9 (t_as_same_term_boosted_foo:\"simba leo\")^0.8 (t_as_same_term_boosted_foo:kimba)^0.75)", + q.toString()); } - public void testSynonymsBoost_multiTermQuerySingleTermSynonyms_shouldParseBoostedQuery() throws Exception { - //tiger, tigre|0.9 - //lynx => lince|0.8, lynx_canadensis|0.9 + public void testSynonymsBoost_multiTermQuerySingleTermSynonyms_shouldParseBoostedQuery() + throws Exception { + // tiger, tigre|0.9 + // lynx => lince|0.8, lynx_canadensis|0.9 String field = "t_pick_best_boosted_foo"; Query q = QParser.getParser("tiger lynx", req(params("df", field))).getQuery(); - assertThat(q, booleanQuery( - disjunctionOf( - boosted(field, "lince", 0.8f), - boosted(field, "lynx_canadensis", 0.9f) - ), - disjunctionOf( - termQuery(field, "tiger"), - boosted(field, "tigre", 0.9f) - ) - )); + assertThat( + q, + booleanQuery( + disjunctionOf(boosted(field, "lince", 0.8f), boosted(field, "lynx_canadensis", 0.9f)), + disjunctionOf(termQuery(field, "tiger"), boosted(field, "tigre", 0.9f)))); q = QParser.getParser("tiger lynx", req(params("df", "t_as_distinct_boosted_foo"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:tigre)^0.9 t_as_distinct_boosted_foo:tiger)" + - " ((t_as_distinct_boosted_foo:lince)^0.8 (t_as_distinct_boosted_foo:lynx_canadensis)^0.9)", q.toString()); + assertEquals( + "((t_as_distinct_boosted_foo:tigre)^0.9 t_as_distinct_boosted_foo:tiger)" + + " ((t_as_distinct_boosted_foo:lince)^0.8 (t_as_distinct_boosted_foo:lynx_canadensis)^0.9)", + q.toString()); q = QParser.getParser("tiger lynx", req(params("df", "t_as_same_term_boosted_foo"))).getQuery(); - assertEquals("Synonym(t_as_same_term_boosted_foo:tiger t_as_same_term_boosted_foo:tigre^0.9)" + - " Synonym(t_as_same_term_boosted_foo:lince^0.8 t_as_same_term_boosted_foo:lynx_canadensis^0.9)", q.toString()); + assertEquals( + "Synonym(t_as_same_term_boosted_foo:tiger t_as_same_term_boosted_foo:tigre^0.9)" + + " Synonym(t_as_same_term_boosted_foo:lince^0.8 t_as_same_term_boosted_foo:lynx_canadensis^0.9)", + q.toString()); } - public void testSynonymsBoost_multiTermQueryMultiTermSynonyms_shouldParseBoostedQuery() throws Exception { - //leopard, big cat|0.8, bagheera|0.9, panthera pardus|0.85 - //lion => panthera leo|0.9, simba leo|0.8, kimba|0.75 + public void testSynonymsBoost_multiTermQueryMultiTermSynonyms_shouldParseBoostedQuery() + throws Exception { + // leopard, big cat|0.8, bagheera|0.9, panthera pardus|0.85 + // lion => panthera leo|0.9, simba leo|0.8, kimba|0.75 String field = "t_pick_best_boosted_foo"; Query q = QParser.getParser("leopard lion", req(params("df", field))).getQuery(); - assertThat(q, booleanQuery( - disjunctionOf( - termQuery(field, "leopard"), - boosted(phraseQuery(field, "big cat"), 0.8f), - boosted(phraseQuery(field, "panthera pardus"), 0.85f), - boosted(termQuery(field, "bagheera"), 0.9f) - ), - disjunctionOf( - boosted(termQuery(field, "kimba"), 0.75f), - boosted(phraseQuery(field, "simba leo"), 0.8f), - boosted(phraseQuery(field, "panthera leo"), 0.9f) - ) - )); - - q = QParser.getParser("leopard lion", req(params("df", "t_as_distinct_boosted_foo"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:bagheera)^0.9 (t_as_distinct_boosted_foo:\"panthera pardus\")^0.85 t_as_distinct_boosted_foo:leopard)" + - " ((t_as_distinct_boosted_foo:\"panthera leo\")^0.9 (t_as_distinct_boosted_foo:\"simba leo\")^0.8 (t_as_distinct_boosted_foo:kimba)^0.75)", q.toString()); - - q = QParser.getParser("leopard lion", req(params("df", "t_as_same_term_boosted_foo"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:bagheera)^0.9 (t_as_same_term_boosted_foo:\"panthera pardus\")^0.85 t_as_same_term_boosted_foo:leopard)" + - " ((t_as_same_term_boosted_foo:\"panthera leo\")^0.9 (t_as_same_term_boosted_foo:\"simba leo\")^0.8 (t_as_same_term_boosted_foo:kimba)^0.75)", q.toString()); - + assertThat( + q, + booleanQuery( + disjunctionOf( + termQuery(field, "leopard"), + boosted(phraseQuery(field, "big cat"), 0.8f), + boosted(phraseQuery(field, "panthera pardus"), 0.85f), + boosted(termQuery(field, "bagheera"), 0.9f)), + disjunctionOf( + boosted(termQuery(field, "kimba"), 0.75f), + boosted(phraseQuery(field, "simba leo"), 0.8f), + boosted(phraseQuery(field, "panthera leo"), 0.9f)))); + + q = + QParser.getParser("leopard lion", req(params("df", "t_as_distinct_boosted_foo"))) + .getQuery(); + assertEquals( + "((t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:bagheera)^0.9 (t_as_distinct_boosted_foo:\"panthera pardus\")^0.85 t_as_distinct_boosted_foo:leopard)" + + " ((t_as_distinct_boosted_foo:\"panthera leo\")^0.9 (t_as_distinct_boosted_foo:\"simba leo\")^0.8 (t_as_distinct_boosted_foo:kimba)^0.75)", + q.toString()); + + q = + QParser.getParser("leopard lion", req(params("df", "t_as_same_term_boosted_foo"))) + .getQuery(); + assertEquals( + "((t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:bagheera)^0.9 (t_as_same_term_boosted_foo:\"panthera pardus\")^0.85 t_as_same_term_boosted_foo:leopard)" + + " ((t_as_same_term_boosted_foo:\"panthera leo\")^0.9 (t_as_same_term_boosted_foo:\"simba leo\")^0.8 (t_as_same_term_boosted_foo:kimba)^0.75)", + q.toString()); } - public void testSynonymsBoost_singleConceptQuerySingleTermSynonym_shouldParseBoostedQuery() throws Exception { - //panthera pardus, leopard|0.6 + public void testSynonymsBoost_singleConceptQuerySingleTermSynonym_shouldParseBoostedQuery() + throws Exception { + // panthera pardus, leopard|0.6 String field = "t_pick_best_boosted_foo"; - Query q = QParser.getParser("panthera pardus story",req(params("df", field,"sow", "false"))).getQuery(); - assertThat(q, booleanQuery( - termQuery(field, "story"), - disjunctionOf( - boosted(field, "leopard", 0.6f), - phraseQuery(field, "panthera pardus") - ) - )); - - q = QParser.getParser("panthera pardus story", req(params("df", "t_as_distinct_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:leopard)^0.6 t_as_distinct_boosted_foo:\"panthera pardus\") t_as_distinct_boosted_foo:story", q.toString()); - - q = QParser.getParser("panthera pardus story", req(params("df", "t_as_same_term_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:leopard)^0.6 t_as_same_term_boosted_foo:\"panthera pardus\") t_as_same_term_boosted_foo:story", q.toString()); - - //panthera tigris => tiger|0.99 - q = QParser.getParser("panthera tigris story", req(params("df", "t_pick_best_boosted_foo","sow", "false"))).getQuery(); - assertEquals("(t_pick_best_boosted_foo:tiger)^0.99 t_pick_best_boosted_foo:story", q.toString()); - - q = QParser.getParser("panthera tigris story", req(params("df", "t_as_distinct_boosted_foo","sow", "false"))).getQuery(); - assertEquals("(t_as_distinct_boosted_foo:tiger)^0.99 t_as_distinct_boosted_foo:story", q.toString()); - - q = QParser.getParser("panthera tigris story", req(params("df", "t_as_same_term_boosted_foo","sow", "false"))).getQuery(); - assertEquals("(t_as_same_term_boosted_foo:tiger)^0.99 t_as_same_term_boosted_foo:story", q.toString()); + Query q = + QParser.getParser("panthera pardus story", req(params("df", field, "sow", "false"))) + .getQuery(); + assertThat( + q, + booleanQuery( + termQuery(field, "story"), + disjunctionOf(boosted(field, "leopard", 0.6f), phraseQuery(field, "panthera pardus")))); + + q = + QParser.getParser( + "panthera pardus story", + req(params("df", "t_as_distinct_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_distinct_boosted_foo:leopard)^0.6 t_as_distinct_boosted_foo:\"panthera pardus\") t_as_distinct_boosted_foo:story", + q.toString()); + + q = + QParser.getParser( + "panthera pardus story", + req(params("df", "t_as_same_term_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_same_term_boosted_foo:leopard)^0.6 t_as_same_term_boosted_foo:\"panthera pardus\") t_as_same_term_boosted_foo:story", + q.toString()); + + // panthera tigris => tiger|0.99 + q = + QParser.getParser( + "panthera tigris story", + req(params("df", "t_pick_best_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "(t_pick_best_boosted_foo:tiger)^0.99 t_pick_best_boosted_foo:story", q.toString()); + + q = + QParser.getParser( + "panthera tigris story", + req(params("df", "t_as_distinct_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "(t_as_distinct_boosted_foo:tiger)^0.99 t_as_distinct_boosted_foo:story", q.toString()); + + q = + QParser.getParser( + "panthera tigris story", + req(params("df", "t_as_same_term_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "(t_as_same_term_boosted_foo:tiger)^0.99 t_as_same_term_boosted_foo:story", q.toString()); } - public void testSynonymsBoost_singleConceptQueryMultiTermSynonymWithMultipleBoost_shouldParseMultiplicativeBoostedQuery() throws Exception { - //panthera blytheae, oldest|0.5 ancient|0.9 panthera + public void + testSynonymsBoost_singleConceptQueryMultiTermSynonymWithMultipleBoost_shouldParseMultiplicativeBoostedQuery() + throws Exception { + // panthera blytheae, oldest|0.5 ancient|0.9 panthera String field = "t_pick_best_boosted_foo"; - Query q = QParser.getParser("panthera blytheae",req(params("df", field, "sow", "false"))).getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - boosted(phraseQuery(field, "oldest ancient panthera"), 0.45f), - phraseQuery(field, "panthera blytheae") - ))); - - q = QParser.getParser("panthera blytheae", req(params("df", "t_as_distinct_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:\"oldest ancient panthera\")^0.45 t_as_distinct_boosted_foo:\"panthera blytheae\")", q.toString()); - - q = QParser.getParser("panthera blytheae", req(params("df", "t_as_same_term_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:\"oldest ancient panthera\")^0.45 t_as_same_term_boosted_foo:\"panthera blytheae\")", q.toString()); + Query q = + QParser.getParser("panthera blytheae", req(params("df", field, "sow", "false"))).getQuery(); + assertThat( + q, + booleanQuery( + disjunctionOf( + boosted(phraseQuery(field, "oldest ancient panthera"), 0.45f), + phraseQuery(field, "panthera blytheae")))); + + q = + QParser.getParser( + "panthera blytheae", req(params("df", "t_as_distinct_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_distinct_boosted_foo:\"oldest ancient panthera\")^0.45 t_as_distinct_boosted_foo:\"panthera blytheae\")", + q.toString()); + + q = + QParser.getParser( + "panthera blytheae", + req(params("df", "t_as_same_term_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_same_term_boosted_foo:\"oldest ancient panthera\")^0.45 t_as_same_term_boosted_foo:\"panthera blytheae\")", + q.toString()); } - public void testSynonymsBoost_singleConceptQueryMultiTermSynonyms_shouldParseBoostedQuery() throws Exception { - //snow leopard, panthera uncia|0.9, big cat|0.8, white_leopard|0.6 + public void testSynonymsBoost_singleConceptQueryMultiTermSynonyms_shouldParseBoostedQuery() + throws Exception { + // snow leopard, panthera uncia|0.9, big cat|0.8, white_leopard|0.6 String field = "t_pick_best_boosted_foo"; - Query q = QParser.getParser("snow leopard",req(params("df", field, "sow", "false"))).getQuery(); - assertThat(q, booleanQuery(disjunctionOf( // TODO why does this generate a single clause Boolean? - phraseQuery(field, "snow leopard"), - boosted(phraseQuery(field, "panthera uncia"), 0.9f), - boosted(phraseQuery(field, "big cat"), 0.8f), - boosted(field, "white_leopard", 0.6f) - ))); - - q = QParser.getParser("snow leopard", req(params("df", "t_as_distinct_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:\"panthera uncia\")^0.9 (t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:white_leopard)^0.6 t_as_distinct_boosted_foo:\"snow leopard\")", q.toString()); - - q = QParser.getParser("snow leopard", req(params("df", "t_as_same_term_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:\"panthera uncia\")^0.9 (t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:white_leopard)^0.6 t_as_same_term_boosted_foo:\"snow leopard\")", q.toString()); - - //panthera onca => jaguar|0.95, big cat|0.85, black panther|0.65 - q = QParser.getParser("panthera onca", req(params("df", "t_pick_best_boosted_foo","sow", "false"))).getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - boosted(field, "jaguar", 0.95f), - boosted(phraseQuery(field, "big cat"), 0.85f), - boosted(phraseQuery(field, "black panther"), 0.65f) - ))); - - q = QParser.getParser("panthera onca", req(params("df", "t_as_distinct_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:jaguar)^0.95 (t_as_distinct_boosted_foo:\"big cat\")^0.85 (t_as_distinct_boosted_foo:\"black panther\")^0.65)", q.toString()); - - q = QParser.getParser("panthera onca", req(params("df", "t_as_same_term_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:jaguar)^0.95 (t_as_same_term_boosted_foo:\"big cat\")^0.85 (t_as_same_term_boosted_foo:\"black panther\")^0.65)", q.toString()); + Query q = + QParser.getParser("snow leopard", req(params("df", field, "sow", "false"))).getQuery(); + assertThat( + q, + booleanQuery( + disjunctionOf( // TODO why does this generate a single clause Boolean? + phraseQuery(field, "snow leopard"), + boosted(phraseQuery(field, "panthera uncia"), 0.9f), + boosted(phraseQuery(field, "big cat"), 0.8f), + boosted(field, "white_leopard", 0.6f)))); + + q = + QParser.getParser( + "snow leopard", req(params("df", "t_as_distinct_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_distinct_boosted_foo:\"panthera uncia\")^0.9 (t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:white_leopard)^0.6 t_as_distinct_boosted_foo:\"snow leopard\")", + q.toString()); + + q = + QParser.getParser( + "snow leopard", req(params("df", "t_as_same_term_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_same_term_boosted_foo:\"panthera uncia\")^0.9 (t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:white_leopard)^0.6 t_as_same_term_boosted_foo:\"snow leopard\")", + q.toString()); + + // panthera onca => jaguar|0.95, big cat|0.85, black panther|0.65 + q = + QParser.getParser( + "panthera onca", req(params("df", "t_pick_best_boosted_foo", "sow", "false"))) + .getQuery(); + assertThat( + q, + booleanQuery( + disjunctionOf( + boosted(field, "jaguar", 0.95f), + boosted(phraseQuery(field, "big cat"), 0.85f), + boosted(phraseQuery(field, "black panther"), 0.65f)))); + + q = + QParser.getParser( + "panthera onca", req(params("df", "t_as_distinct_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_distinct_boosted_foo:jaguar)^0.95 (t_as_distinct_boosted_foo:\"big cat\")^0.85 (t_as_distinct_boosted_foo:\"black panther\")^0.65)", + q.toString()); + + q = + QParser.getParser( + "panthera onca", req(params("df", "t_as_same_term_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_same_term_boosted_foo:jaguar)^0.95 (t_as_same_term_boosted_foo:\"big cat\")^0.85 (t_as_same_term_boosted_foo:\"black panther\")^0.65)", + q.toString()); } - public void testSynonymsBoost_multiConceptQuerySingleTermSynonym_shouldParseBoostedQuery() throws Exception { - //panthera pardus, leopard|0.6 - //tiger, tigre|0.9 + public void testSynonymsBoost_multiConceptQuerySingleTermSynonym_shouldParseBoostedQuery() + throws Exception { + // panthera pardus, leopard|0.6 + // tiger, tigre|0.9 String field = "t_pick_best_boosted_foo"; - Query q = QParser.getParser("panthera pardus tiger",req(params("df", field,"sow", "false"))).getQuery(); - assertThat(q, booleanQuery( - disjunctionOf( - boosted(field, "leopard", 0.6f), - phraseQuery(field, "panthera pardus") - ), - disjunctionOf( - termQuery(field, "tiger"), - boosted(field, "tigre", 0.9f) - ) - )); - - q = QParser.getParser("panthera pardus tiger", req(params("df", "t_as_distinct_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:leopard)^0.6 t_as_distinct_boosted_foo:\"panthera pardus\") ((t_as_distinct_boosted_foo:tigre)^0.9 t_as_distinct_boosted_foo:tiger)", q.toString()); - - q = QParser.getParser("panthera pardus tiger", req(params("df", "t_as_same_term_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:leopard)^0.6 t_as_same_term_boosted_foo:\"panthera pardus\") Synonym(t_as_same_term_boosted_foo:tiger t_as_same_term_boosted_foo:tigre^0.9)", q.toString()); + Query q = + QParser.getParser("panthera pardus tiger", req(params("df", field, "sow", "false"))) + .getQuery(); + assertThat( + q, + booleanQuery( + disjunctionOf(boosted(field, "leopard", 0.6f), phraseQuery(field, "panthera pardus")), + disjunctionOf(termQuery(field, "tiger"), boosted(field, "tigre", 0.9f)))); + + q = + QParser.getParser( + "panthera pardus tiger", + req(params("df", "t_as_distinct_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_distinct_boosted_foo:leopard)^0.6 t_as_distinct_boosted_foo:\"panthera pardus\") ((t_as_distinct_boosted_foo:tigre)^0.9 t_as_distinct_boosted_foo:tiger)", + q.toString()); + + q = + QParser.getParser( + "panthera pardus tiger", + req(params("df", "t_as_same_term_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_same_term_boosted_foo:leopard)^0.6 t_as_same_term_boosted_foo:\"panthera pardus\") Synonym(t_as_same_term_boosted_foo:tiger t_as_same_term_boosted_foo:tigre^0.9)", + q.toString()); } - public void testSynonymsBoost_multiConceptsQueryMultiTermSynonyms_shouldParseBoostedQuery() throws Exception { - //snow leopard, panthera uncia|0.9, big cat|0.8, white_leopard|0.6 - //panthera onca => jaguar|0.95, big cat|0.85, black panther|0.65 + public void testSynonymsBoost_multiConceptsQueryMultiTermSynonyms_shouldParseBoostedQuery() + throws Exception { + // snow leopard, panthera uncia|0.9, big cat|0.8, white_leopard|0.6 + // panthera onca => jaguar|0.95, big cat|0.85, black panther|0.65 String field = "t_pick_best_boosted_foo"; - Query q = QParser.getParser("snow leopard panthera onca",req(params("df", field,"sow", "false"))).getQuery(); - assertThat(q, booleanQuery( - disjunctionOf( - boosted(phraseQuery(field, "panthera uncia"), 0.9f), - boosted(phraseQuery(field, "big cat"), 0.8f), - boosted(field, "white_leopard", 0.6f), - phraseQuery(field, "snow leopard") - ), - disjunctionOf( - boosted(field, "jaguar", 0.95f), - boosted(phraseQuery(field, "big cat"), 0.85f), - boosted(phraseQuery(field, "black panther"), 0.65f) - ) - )); - - q = QParser.getParser("snow leopard panthera onca", req(params("df", "t_as_distinct_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:\"panthera uncia\")^0.9 (t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:white_leopard)^0.6 t_as_distinct_boosted_foo:\"snow leopard\")" + - " ((t_as_distinct_boosted_foo:jaguar)^0.95 (t_as_distinct_boosted_foo:\"big cat\")^0.85 (t_as_distinct_boosted_foo:\"black panther\")^0.65)", q.toString()); - - q = QParser.getParser("snow leopard panthera onca", req(params("df", "t_as_same_term_boosted_foo","sow", "false"))).getQuery(); - assertEquals("((t_as_same_term_boosted_foo:\"panthera uncia\")^0.9 (t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:white_leopard)^0.6 t_as_same_term_boosted_foo:\"snow leopard\")" + - " ((t_as_same_term_boosted_foo:jaguar)^0.95 (t_as_same_term_boosted_foo:\"big cat\")^0.85 (t_as_same_term_boosted_foo:\"black panther\")^0.65)", q.toString()); - + Query q = + QParser.getParser("snow leopard panthera onca", req(params("df", field, "sow", "false"))) + .getQuery(); + assertThat( + q, + booleanQuery( + disjunctionOf( + boosted(phraseQuery(field, "panthera uncia"), 0.9f), + boosted(phraseQuery(field, "big cat"), 0.8f), + boosted(field, "white_leopard", 0.6f), + phraseQuery(field, "snow leopard")), + disjunctionOf( + boosted(field, "jaguar", 0.95f), + boosted(phraseQuery(field, "big cat"), 0.85f), + boosted(phraseQuery(field, "black panther"), 0.65f)))); + + q = + QParser.getParser( + "snow leopard panthera onca", + req(params("df", "t_as_distinct_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_distinct_boosted_foo:\"panthera uncia\")^0.9 (t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:white_leopard)^0.6 t_as_distinct_boosted_foo:\"snow leopard\")" + + " ((t_as_distinct_boosted_foo:jaguar)^0.95 (t_as_distinct_boosted_foo:\"big cat\")^0.85 (t_as_distinct_boosted_foo:\"black panther\")^0.65)", + q.toString()); + + q = + QParser.getParser( + "snow leopard panthera onca", + req(params("df", "t_as_same_term_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "((t_as_same_term_boosted_foo:\"panthera uncia\")^0.9 (t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:white_leopard)^0.6 t_as_same_term_boosted_foo:\"snow leopard\")" + + " ((t_as_same_term_boosted_foo:jaguar)^0.95 (t_as_same_term_boosted_foo:\"big cat\")^0.85 (t_as_same_term_boosted_foo:\"black panther\")^0.65)", + q.toString()); } - + public void testSynonymsBoost_edismaxBoost_shouldParseBoostedPhraseQuery() throws Exception { String field = "t_pick_best_boosted_foo"; - Query q = QParser.getParser("snow leopard lion","edismax",true, req(params("sow", "false","qf", field + "^10"))).getQuery(); - assertThat(q, booleanQuery(booleanQuery( - disjunctionOf(boosted(disjunctionOf( - phraseQuery(field, "snow leopard"), - boosted(phraseQuery(field, "big cat"), 0.8f), - boosted(phraseQuery(field, "panthera uncia"), 0.9f), - boosted(termQuery(field, "white_leopard"), 0.6f) - ), 10)), - disjunctionOf(boosted(disjunctionOf( - boosted(termQuery(field, "kimba"), 0.75f), - boosted(phraseQuery(field, "simba leo"), 0.8f), - boosted(phraseQuery(field, "panthera leo"), 0.9f) - ), 10))), - BooleanClause.Occur.MUST)); - - q = QParser.getParser("snow leopard lion","edismax",true, req(params("sow", "false","qf", "t_as_distinct_boosted_foo^10"))).getQuery(); - assertEquals("+(" + - "(((t_as_distinct_boosted_foo:\"panthera uncia\")^0.9 (t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:white_leopard)^0.6 t_as_distinct_boosted_foo:\"snow leopard\")^10.0)" + - " (((t_as_distinct_boosted_foo:\"panthera leo\")^0.9 (t_as_distinct_boosted_foo:\"simba leo\")^0.8 (t_as_distinct_boosted_foo:kimba)^0.75)^10.0))", q.toString()); - - q = QParser.getParser("snow leopard lion","edismax",true, req(params("sow", "false","qf", "t_as_same_term_boosted_foo^10"))).getQuery(); - assertEquals("+(" + - "(((t_as_same_term_boosted_foo:\"panthera uncia\")^0.9 (t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:white_leopard)^0.6 t_as_same_term_boosted_foo:\"snow leopard\")^10.0)" + - " (((t_as_same_term_boosted_foo:\"panthera leo\")^0.9 (t_as_same_term_boosted_foo:\"simba leo\")^0.8 (t_as_same_term_boosted_foo:kimba)^0.75)^10.0))", q.toString()); - + Query q = + QParser.getParser( + "snow leopard lion", + "edismax", + true, + req(params("sow", "false", "qf", field + "^10"))) + .getQuery(); + assertThat( + q, + booleanQuery( + booleanQuery( + disjunctionOf( + boosted( + disjunctionOf( + phraseQuery(field, "snow leopard"), + boosted(phraseQuery(field, "big cat"), 0.8f), + boosted(phraseQuery(field, "panthera uncia"), 0.9f), + boosted(termQuery(field, "white_leopard"), 0.6f)), + 10)), + disjunctionOf( + boosted( + disjunctionOf( + boosted(termQuery(field, "kimba"), 0.75f), + boosted(phraseQuery(field, "simba leo"), 0.8f), + boosted(phraseQuery(field, "panthera leo"), 0.9f)), + 10))), + BooleanClause.Occur.MUST)); + + q = + QParser.getParser( + "snow leopard lion", + "edismax", + true, + req(params("sow", "false", "qf", "t_as_distinct_boosted_foo^10"))) + .getQuery(); + assertEquals( + "+(" + + "(((t_as_distinct_boosted_foo:\"panthera uncia\")^0.9 (t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:white_leopard)^0.6 t_as_distinct_boosted_foo:\"snow leopard\")^10.0)" + + " (((t_as_distinct_boosted_foo:\"panthera leo\")^0.9 (t_as_distinct_boosted_foo:\"simba leo\")^0.8 (t_as_distinct_boosted_foo:kimba)^0.75)^10.0))", + q.toString()); + + q = + QParser.getParser( + "snow leopard lion", + "edismax", + true, + req(params("sow", "false", "qf", "t_as_same_term_boosted_foo^10"))) + .getQuery(); + assertEquals( + "+(" + + "(((t_as_same_term_boosted_foo:\"panthera uncia\")^0.9 (t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:white_leopard)^0.6 t_as_same_term_boosted_foo:\"snow leopard\")^10.0)" + + " (((t_as_same_term_boosted_foo:\"panthera leo\")^0.9 (t_as_same_term_boosted_foo:\"simba leo\")^0.8 (t_as_same_term_boosted_foo:kimba)^0.75)^10.0))", + q.toString()); } public void testSynonymsBoost_phraseQueryMultiTermSynonymsBoost() throws Exception { - Query q = QParser.getParser("\"snow leopard lion\"", req(params("df", "t_pick_best_boosted_foo", "sow", "false"))).getQuery(); - assertEquals("(t_pick_best_boosted_foo:\"panthera uncia panthera leo\")^0.80999994 " + - "(t_pick_best_boosted_foo:\"panthera uncia simba leo\")^0.71999997 " + - "(t_pick_best_boosted_foo:\"panthera uncia kimba\")^0.67499995 " + - "(t_pick_best_boosted_foo:\"big cat panthera leo\")^0.71999997 " + - "(t_pick_best_boosted_foo:\"big cat simba leo\")^0.64000005 " + - "(t_pick_best_boosted_foo:\"big cat kimba\")^0.6 " + - "(t_pick_best_boosted_foo:\"white_leopard panthera leo\")^0.54 " + - "(t_pick_best_boosted_foo:\"white_leopard simba leo\")^0.48000002 " + - "(t_pick_best_boosted_foo:\"white_leopard kimba\")^0.45000002 " + - "(t_pick_best_boosted_foo:\"snow leopard panthera leo\")^0.9 " + - "(t_pick_best_boosted_foo:\"snow leopard simba leo\")^0.8 " + - "(t_pick_best_boosted_foo:\"snow leopard kimba\")^0.75", q.toString()); + Query q = + QParser.getParser( + "\"snow leopard lion\"", + req(params("df", "t_pick_best_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "(t_pick_best_boosted_foo:\"panthera uncia panthera leo\")^0.80999994 " + + "(t_pick_best_boosted_foo:\"panthera uncia simba leo\")^0.71999997 " + + "(t_pick_best_boosted_foo:\"panthera uncia kimba\")^0.67499995 " + + "(t_pick_best_boosted_foo:\"big cat panthera leo\")^0.71999997 " + + "(t_pick_best_boosted_foo:\"big cat simba leo\")^0.64000005 " + + "(t_pick_best_boosted_foo:\"big cat kimba\")^0.6 " + + "(t_pick_best_boosted_foo:\"white_leopard panthera leo\")^0.54 " + + "(t_pick_best_boosted_foo:\"white_leopard simba leo\")^0.48000002 " + + "(t_pick_best_boosted_foo:\"white_leopard kimba\")^0.45000002 " + + "(t_pick_best_boosted_foo:\"snow leopard panthera leo\")^0.9 " + + "(t_pick_best_boosted_foo:\"snow leopard simba leo\")^0.8 " + + "(t_pick_best_boosted_foo:\"snow leopard kimba\")^0.75", + q.toString()); } public void testSynonymsBoost_phraseQueryMultiTermSynonymsMultipleBoost() throws Exception { - Query q = QParser.getParser("\"panthera blytheae lion\"", req(params("df", "t_pick_best_boosted_foo", "sow", "false"))).getQuery(); - assertEquals("(t_pick_best_boosted_foo:\"oldest ancient panthera panthera leo\")^0.40499997 " + - "(t_pick_best_boosted_foo:\"oldest ancient panthera simba leo\")^0.35999998 " + - "(t_pick_best_boosted_foo:\"oldest ancient panthera kimba\")^0.33749998 " + - "(t_pick_best_boosted_foo:\"panthera blytheae panthera leo\")^0.9 " + - "(t_pick_best_boosted_foo:\"panthera blytheae simba leo\")^0.8 " + - "(t_pick_best_boosted_foo:\"panthera blytheae kimba\")^0.75", q.toString()); + Query q = + QParser.getParser( + "\"panthera blytheae lion\"", + req(params("df", "t_pick_best_boosted_foo", "sow", "false"))) + .getQuery(); + assertEquals( + "(t_pick_best_boosted_foo:\"oldest ancient panthera panthera leo\")^0.40499997 " + + "(t_pick_best_boosted_foo:\"oldest ancient panthera simba leo\")^0.35999998 " + + "(t_pick_best_boosted_foo:\"oldest ancient panthera kimba\")^0.33749998 " + + "(t_pick_best_boosted_foo:\"panthera blytheae panthera leo\")^0.9 " + + "(t_pick_best_boosted_foo:\"panthera blytheae simba leo\")^0.8 " + + "(t_pick_best_boosted_foo:\"panthera blytheae kimba\")^0.75", + q.toString()); } public void testSynonymsBoost_BoostMissing_shouldAssignDefaultBoost() throws Exception { - //leopard, big cat|0.8, bagheera|0.9, panthera pardus|0.85 + // leopard, big cat|0.8, bagheera|0.9, panthera pardus|0.85 String field = "t_pick_best_boosted_foo"; Query q = QParser.getParser("leopard", req(params("df", field))).getQuery(); - assertThat(q, booleanQuery(disjunctionOf( - termQuery(field, "leopard"), - boosted(phraseQuery(field, "big cat"), 0.8f), - boosted(phraseQuery(field, "panthera pardus"), 0.85f), - boosted(termQuery(field, "bagheera"), 0.9f) - ))); + assertThat( + q, + booleanQuery( + disjunctionOf( + termQuery(field, "leopard"), + boosted(phraseQuery(field, "big cat"), 0.8f), + boosted(phraseQuery(field, "panthera pardus"), 0.85f), + boosted(termQuery(field, "bagheera"), 0.9f)))); q = QParser.getParser("leopard", req(params("df", "t_as_distinct_boosted_foo"))).getQuery(); - assertEquals("((t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:bagheera)^0.9 (t_as_distinct_boosted_foo:\"panthera pardus\")^0.85 t_as_distinct_boosted_foo:leopard)", q.toString()); + assertEquals( + "((t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:bagheera)^0.9 (t_as_distinct_boosted_foo:\"panthera pardus\")^0.85 t_as_distinct_boosted_foo:leopard)", + q.toString()); } @Test public void testBadRequestInSetQuery() throws SyntaxError { SolrQueryRequest req = req(); QParser qParser; - String[] fieldSuffix = new String[] { - "ti", "tf", "td", "tl", - "i", "f", "d", "l", - "is", "fs", "ds", "ls", - "i_dv", "f_dv", "d_dv", "l_dv", - "is_dv", "fs_dv", "ds_dv", "ls_dv", - "i_dvo", "f_dvo", "d_dvo", "l_dvo", - }; - - for (String suffix:fieldSuffix) { - //Good queries - qParser = QParser.getParser("foo_" + suffix + ":(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 25)", req); + String[] fieldSuffix = + new String[] { + "ti", "tf", "td", "tl", + "i", "f", "d", "l", + "is", "fs", "ds", "ls", + "i_dv", "f_dv", "d_dv", "l_dv", + "is_dv", "fs_dv", "ds_dv", "ls_dv", + "i_dvo", "f_dvo", "d_dvo", "l_dvo", + }; + + for (String suffix : fieldSuffix) { + // Good queries + qParser = + QParser.getParser( + "foo_" + suffix + ":(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 25)", req); qParser.setIsFilter(true); qParser.getQuery(); } - for (String suffix:fieldSuffix) { - qParser = QParser.getParser("foo_" + suffix + ":(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 NOT_A_NUMBER)", req); + for (String suffix : fieldSuffix) { + qParser = + QParser.getParser( + "foo_" + suffix + ":(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 NOT_A_NUMBER)", + req); qParser.setIsFilter(true); // this may change in the future SolrException e = expectThrows(SolrException.class, "Expecting exception", qParser::getQuery); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertTrue("Unexpected exception: " + e.getMessage(), e.getMessage().contains("Invalid Number: NOT_A_NUMBER")); + assertTrue( + "Unexpected exception: " + e.getMessage(), + e.getMessage().contains("Invalid Number: NOT_A_NUMBER")); } - - } @Test public void testFieldExistsQueries() throws SyntaxError { SolrQueryRequest req = req(); - String[] fieldSuffix = new String[] { - "ti", "tf", "td", "tl", "tdt", // trie types - "pi", "pf", "pd", "pl", "pdt", // point types - "i", "f", "d", "l", "dt", "s", "b", // numeric types - "is", "fs", "ds", "ls", "dts", "ss", "bs", // multi-valued - "i_dv", "f_dv", "d_dv", "l_dv", "dt_dv", "s_dv", "b_dv", // numerics + docValues - "is_dv", "fs_dv", "ds_dv", "ls_dv", "dts_dv", "ss_dv", "bs_dv", // multi-docValues - "i_dvo", "f_dvo", "d_dvo", "l_dvo", "dt_dvo", // not indexed - "t", - "t_on", "b_norms", "s_norms", "dt_norms", "i_norms", "l_norms", "f_norms", "d_norms" - }; - String[] existenceQueries = new String[] { - "*", "[* TO *]" - }; + String[] fieldSuffix = + new String[] { + "ti", + "tf", + "td", + "tl", + "tdt", // trie types + "pi", + "pf", + "pd", + "pl", + "pdt", // point types + "i", + "f", + "d", + "l", + "dt", + "s", + "b", // numeric types + "is", + "fs", + "ds", + "ls", + "dts", + "ss", + "bs", // multi-valued + "i_dv", + "f_dv", + "d_dv", + "l_dv", + "dt_dv", + "s_dv", + "b_dv", // numerics + docValues + "is_dv", + "fs_dv", + "ds_dv", + "ls_dv", + "dts_dv", + "ss_dv", + "bs_dv", // multi-docValues + "i_dvo", + "f_dvo", + "d_dvo", + "l_dvo", + "dt_dvo", // not indexed + "t", + "t_on", + "b_norms", + "s_norms", + "dt_norms", + "i_norms", + "l_norms", + "f_norms", + "d_norms" + }; + String[] existenceQueries = new String[] {"*", "[* TO *]"}; for (String existenceQuery : existenceQueries) { for (String suffix : fieldSuffix) { @@ -1620,24 +1797,81 @@ public void testFieldExistsQueries() throws SyntaxError { SchemaField schemaField = indexSchema.getField(field); // Test float & double realNumber queries differently - if ("[* TO *]".equals(existenceQuery) && (schemaField.getType().getNumberType() == NumberType.DOUBLE || schemaField.getType().getNumberType() == NumberType.FLOAT)) { - assertFalse("For float and double fields \"" + query + "\" is not an existence query, so the query returned should not be a DocValuesFieldExistsQuery.", createdQuery instanceof DocValuesFieldExistsQuery); - assertFalse("For float and double fields \"" + query + "\" is not an existence query, so the query returned should not be a NormsFieldExistsQuery.", createdQuery instanceof NormsFieldExistsQuery); - assertFalse("For float and double fields \"" + query + "\" is not an existence query, so NaN should not be matched via a ConstantScoreQuery.", createdQuery instanceof ConstantScoreQuery); - assertFalse("For float and double fields\"" + query + "\" is not an existence query, so NaN should not be matched via a BooleanQuery (NaN and [* TO *]).", createdQuery instanceof BooleanQuery); + if ("[* TO *]".equals(existenceQuery) + && (schemaField.getType().getNumberType() == NumberType.DOUBLE + || schemaField.getType().getNumberType() == NumberType.FLOAT)) { + assertFalse( + "For float and double fields \"" + + query + + "\" is not an existence query, so the query returned should not be a DocValuesFieldExistsQuery.", + createdQuery instanceof DocValuesFieldExistsQuery); + assertFalse( + "For float and double fields \"" + + query + + "\" is not an existence query, so the query returned should not be a NormsFieldExistsQuery.", + createdQuery instanceof NormsFieldExistsQuery); + assertFalse( + "For float and double fields \"" + + query + + "\" is not an existence query, so NaN should not be matched via a ConstantScoreQuery.", + createdQuery instanceof ConstantScoreQuery); + assertFalse( + "For float and double fields\"" + + query + + "\" is not an existence query, so NaN should not be matched via a BooleanQuery (NaN and [* TO *]).", + createdQuery instanceof BooleanQuery); } else { if (schemaField.hasDocValues()) { - assertTrue("Field has docValues, so existence query \"" + query + "\" should return DocValuesFieldExistsQuery", createdQuery instanceof DocValuesFieldExistsQuery); - } else if (!schemaField.omitNorms() && !schemaField.getType().isPointField()) { //TODO: Remove !isPointField() for SOLR-14199 - assertTrue("Field has norms and no docValues, so existence query \"" + query + "\" should return NormsFieldExistsQuery", createdQuery instanceof NormsFieldExistsQuery); - } else if (schemaField.getType().getNumberType() == NumberType.DOUBLE || schemaField.getType().getNumberType() == NumberType.FLOAT) { - assertTrue("PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + query + "\".", createdQuery instanceof ConstantScoreQuery); - assertTrue("PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + query + "\".", ((ConstantScoreQuery)createdQuery).getQuery() instanceof BooleanQuery); - assertEquals("PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + query + "\". This boolean query must be an OR.", 1, ((BooleanQuery)((ConstantScoreQuery)createdQuery).getQuery()).getMinimumNumberShouldMatch()); - assertEquals("PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + query + "\". This boolean query must have 2 clauses.", 2, ((BooleanQuery)((ConstantScoreQuery)createdQuery).getQuery()).clauses().size()); + assertTrue( + "Field has docValues, so existence query \"" + + query + + "\" should return DocValuesFieldExistsQuery", + createdQuery instanceof DocValuesFieldExistsQuery); + } else if (!schemaField.omitNorms() + && !schemaField + .getType() + .isPointField()) { // TODO: Remove !isPointField() for SOLR-14199 + assertTrue( + "Field has norms and no docValues, so existence query \"" + + query + + "\" should return NormsFieldExistsQuery", + createdQuery instanceof NormsFieldExistsQuery); + } else if (schemaField.getType().getNumberType() == NumberType.DOUBLE + || schemaField.getType().getNumberType() == NumberType.FLOAT) { + assertTrue( + "PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + + query + + "\".", + createdQuery instanceof ConstantScoreQuery); + assertTrue( + "PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + + query + + "\".", + ((ConstantScoreQuery) createdQuery).getQuery() instanceof BooleanQuery); + assertEquals( + "PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + + query + + "\". This boolean query must be an OR.", + 1, + ((BooleanQuery) ((ConstantScoreQuery) createdQuery).getQuery()) + .getMinimumNumberShouldMatch()); + assertEquals( + "PointField with NaN values must include \"exists or NaN\" if the field doesn't have norms or docValues: \"" + + query + + "\". This boolean query must have 2 clauses.", + 2, + ((BooleanQuery) ((ConstantScoreQuery) createdQuery).getQuery()).clauses().size()); } else { - assertFalse("Field doesn't have docValues, so existence query \"" + query + "\" should not return DocValuesFieldExistsQuery", createdQuery instanceof DocValuesFieldExistsQuery); - assertFalse("Field doesn't have norms, so existence query \"" + query + "\" should not return NormsFieldExistsQuery", createdQuery instanceof NormsFieldExistsQuery); + assertFalse( + "Field doesn't have docValues, so existence query \"" + + query + + "\" should not return DocValuesFieldExistsQuery", + createdQuery instanceof DocValuesFieldExistsQuery); + assertFalse( + "Field doesn't have norms, so existence query \"" + + query + + "\" should not return NormsFieldExistsQuery", + createdQuery instanceof NormsFieldExistsQuery); } } } diff --git a/solr/core/src/test/org/apache/solr/search/TestSort.java b/solr/core/src/test/org/apache/solr/search/TestSort.java index 365eba90b85..96db219f150 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSort.java +++ b/solr/core/src/test/org/apache/solr/search/TestSort.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.Map; import java.util.Random; - import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -46,8 +45,8 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; -import org.apache.lucene.search.SortField.Type; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortField.Type; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.Weight; @@ -70,7 +69,7 @@ public class TestSort extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema-minimal.xml"); + initCore("solrconfig.xml", "schema-minimal.xml"); } Random r; @@ -78,8 +77,8 @@ public static void beforeClass() throws Exception { int ndocs = 77; int iter = 50; int qiter = 1000; - int commitCount = ndocs/5 + 1; - int maxval = ndocs*2; + int commitCount = ndocs / 5 + 1; + int maxval = ndocs * 2; @Override public void setUp() throws Exception { @@ -94,7 +93,7 @@ static class MyDoc { @Override public String toString() { - return "{id=" +doc + " val1="+val + " val2="+val2 + "}"; + return "{id=" + doc + " val1=" + val + " val2=" + val2 + "}"; } } @@ -116,22 +115,27 @@ public void testRandomFieldNameSorts() throws Exception { names[j] = TestUtil.randomRealisticUnicodeString(r, 1, 100); // munge anything that might make this a function - names[j] = names[j].replaceFirst("\\{","\\}\\{"); - names[j] = names[j].replaceFirst("\\(","\\)\\("); - names[j] = names[j].replaceFirst("(\\\"|\\')","$1$1z"); - names[j] = names[j].replaceFirst("(\\d)","$1x"); + names[j] = names[j].replaceFirst("\\{", "\\}\\{"); + names[j] = names[j].replaceFirst("\\(", "\\)\\("); + names[j] = names[j].replaceFirst("(\\\"|\\')", "$1$1z"); + names[j] = names[j].replaceFirst("(\\d)", "$1x"); // eliminate pesky problem chars - names[j] = names[j].replaceAll("\\p{Cntrl}|\\p{javaWhitespace}",""); - + names[j] = names[j].replaceAll("\\p{Cntrl}|\\p{javaWhitespace}", ""); + if (0 == names[j].length()) { names[j] = null; } } // with luck this bad, never go to vegas // alternatively: if (null == names[j]) names[j] = "never_go_to_vegas"; - assertNotNull("Unable to generate a (non-blank) names["+j+"] after " - + nonBlankAttempts + " attempts", names[j]); + assertNotNull( + "Unable to generate a (non-blank) names[" + + j + + "] after " + + nonBlankAttempts + + " attempts", + names[j]); reverse[j] = r.nextBoolean(); @@ -140,7 +144,7 @@ public void testRandomFieldNameSorts() throws Exception { input.append(" "); input.append(reverse[j] ? "desc," : "asc,"); } - input.deleteCharAt(input.length()-1); + input.deleteCharAt(input.length() - 1); SortField[] sorts = null; List fields = null; try { @@ -150,61 +154,65 @@ public void testRandomFieldNameSorts() throws Exception { } catch (RuntimeException e) { throw new RuntimeException("Failed to parse sort: " + input, e); } - assertEquals("parsed sorts had unexpected size", - names.length, sorts.length); - assertEquals("parsed sort schema fields had unexpected size", - names.length, fields.size()); + assertEquals("parsed sorts had unexpected size", names.length, sorts.length); + assertEquals("parsed sort schema fields had unexpected size", names.length, fields.size()); for (int j = 0; j < names.length; j++) { - assertEquals("sorts["+j+"] had unexpected reverse: " + input, - reverse[j], sorts[j].getReverse()); + assertEquals( + "sorts[" + j + "] had unexpected reverse: " + input, reverse[j], sorts[j].getReverse()); final Type type = sorts[j].getType(); if (Type.SCORE.equals(type)) { - assertEquals("sorts["+j+"] is (unexpectedly) type score : " + input, - "score", names[j]); + assertEquals( + "sorts[" + j + "] is (unexpectedly) type score : " + input, "score", names[j]); } else if (Type.DOC.equals(type)) { - assertEquals("sorts["+j+"] is (unexpectedly) type doc : " + input, - "_docid_", names[j]); + assertEquals( + "sorts[" + j + "] is (unexpectedly) type doc : " + input, "_docid_", names[j]); } else if (Type.CUSTOM.equals(type) || Type.REWRITEABLE.equals(type)) { log.error("names[{}] : {}", j, names[j]); log.error("sorts[{}] : {}", j, sorts[j]); - fail("sorts["+j+"] resulted in a '" + type.toString() - + "', either sort parsing code is broken, or func/query " - + "semantics have gotten broader and munging in this test " - + "needs improved: " + input); + fail( + "sorts[" + + j + + "] resulted in a '" + + type.toString() + + "', either sort parsing code is broken, or func/query " + + "semantics have gotten broader and munging in this test " + + "needs improved: " + + input); } else { - assertEquals("sorts["+j+"] ("+type.toString()+ - ") had unexpected field in: " + input, - names[j], sorts[j].getField()); - assertEquals("fields["+j+"] ("+type.toString()+ - ") had unexpected name in: " + input, - names[j], fields.get(j).getName()); + assertEquals( + "sorts[" + j + "] (" + type.toString() + ") had unexpected field in: " + input, + names[j], + sorts[j].getField()); + assertEquals( + "fields[" + j + "] (" + type.toString() + ") had unexpected name in: " + input, + names[j], + fields.get(j).getName()); } } } } - public void testSort() throws Exception { Directory dir = new ByteBuffersDirectory(); Field f = new StringField("f", "0", Field.Store.NO); Field f2 = new StringField("f2", "0", Field.Store.NO); - for (int iterCnt = 0; iterCnt mapping = new HashMap<>(); + Map mapping = new HashMap<>(); mapping.put("f", UninvertingReader.Type.SORTED); mapping.put("f2", UninvertingReader.Type.SORTED); @@ -239,49 +246,52 @@ public void testSort() throws Exception { // System.out.println("segments="+searcher.getIndexReader().getSequentialSubReaders().length); assertTrue(reader.leaves().size() > 1); - for (int i=0; i>3)+1)+1; + int top = r.nextInt((ndocs >> 3) + 1) + 1; final boolean luceneSort = r.nextBoolean(); final boolean sortMissingLast = !luceneSort && r.nextBoolean(); final boolean sortMissingFirst = !luceneSort && !sortMissingLast; @@ -294,69 +304,79 @@ public int hashCode() { final boolean sortMissingFirst2 = !luceneSort2 && !sortMissingLast2; final boolean reverse2 = r.nextBoolean(); - if (r.nextBoolean()) sfields.add( new SortField(null, SortField.Type.SCORE)); + if (r.nextBoolean()) sfields.add(new SortField(null, SortField.Type.SCORE)); // hit both use-cases of sort-missing-last - sfields.add( getStringSortField("f", reverse, sortMissingLast, sortMissingFirst) ); + sfields.add(getStringSortField("f", reverse, sortMissingLast, sortMissingFirst)); if (secondary) { - sfields.add( getStringSortField("f2", reverse2, sortMissingLast2, sortMissingFirst2) ); + sfields.add(getStringSortField("f2", reverse2, sortMissingLast2, sortMissingFirst2)); } - if (r.nextBoolean()) sfields.add( new SortField(null, SortField.Type.SCORE)); + if (r.nextBoolean()) sfields.add(new SortField(null, SortField.Type.SCORE)); Sort sort = new Sort(sfields.toArray(new SortField[sfields.size()])); - final String nullRep = luceneSort || sortMissingFirst && !reverse || sortMissingLast && reverse ? "" : "zzz"; - final String nullRep2 = luceneSort2 || sortMissingFirst2 && !reverse2 || sortMissingLast2 && reverse2 ? "" : "zzz"; + final String nullRep = + luceneSort || sortMissingFirst && !reverse || sortMissingLast && reverse ? "" : "zzz"; + final String nullRep2 = + luceneSort2 || sortMissingFirst2 && !reverse2 || sortMissingLast2 && reverse2 + ? "" + : "zzz"; boolean scoreInOrder = r.nextBoolean(); - final TopFieldCollector topCollector = TopFieldCollector.create(sort, top, Integer.MAX_VALUE); + final TopFieldCollector topCollector = + TopFieldCollector.create(sort, top, Integer.MAX_VALUE); final List collectedDocs = new ArrayList<>(); // delegate and collect docs ourselves - Collector myCollector = new FilterCollector(topCollector) { + Collector myCollector = + new FilterCollector(topCollector) { - @Override - public LeafCollector getLeafCollector(LeafReaderContext context) - throws IOException { - final int docBase = context.docBase; - return new FilterLeafCollector(super.getLeafCollector(context)) { @Override - public void collect(int doc) throws IOException { - super.collect(doc); - collectedDocs.add(mydocs[docBase + doc]); + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + final int docBase = context.docBase; + return new FilterLeafCollector(super.getLeafCollector(context)) { + @Override + public void collect(int doc) throws IOException { + super.collect(doc); + collectedDocs.add(mydocs[docBase + doc]); + } + }; } }; - } - }; searcher.search(query, myCollector); - Collections.sort(collectedDocs, (o1, o2) -> { - String v1 = o1.val == null ? nullRep : o1.val; - String v2 = o2.val == null ? nullRep : o2.val; - int cmp = v1.compareTo(v2); - if (reverse) cmp = -cmp; - if (cmp != 0) return cmp; - - if (secondary) { - v1 = o1.val2 == null ? nullRep2 : o1.val2; - v2 = o2.val2 == null ? nullRep2 : o2.val2; - cmp = v1.compareTo(v2); - if (reverse2) cmp = -cmp; - } - - cmp = cmp == 0 ? o1.doc - o2.doc : cmp; - return cmp; - }); + Collections.sort( + collectedDocs, + (o1, o2) -> { + String v1 = o1.val == null ? nullRep : o1.val; + String v2 = o2.val == null ? nullRep : o2.val; + int cmp = v1.compareTo(v2); + if (reverse) cmp = -cmp; + if (cmp != 0) return cmp; + + if (secondary) { + v1 = o1.val2 == null ? nullRep2 : o1.val2; + v2 = o2.val2 == null ? nullRep2 : o2.val2; + cmp = v1.compareTo(v2); + if (reverse2) cmp = -cmp; + } + cmp = cmp == 0 ? o1.doc - o2.doc : cmp; + return cmp; + }); TopDocs topDocs = topCollector.topDocs(); ScoreDoc[] sdocs = topDocs.scoreDocs; - for (int j=0; jSOLR-5526 * @see org.apache.solr.search.QParserPlugin#standardPlugins - * */ public class TestStandardQParsers extends SolrTestCase { - /** - * Field name of constant mandatory for query parser plugin. - */ + /** Field name of constant mandatory for query parser plugin. */ public static final String FIELD_NAME = "NAME"; /** - * Test standard query parsers registered in {@link org.apache.solr.search.QParserPlugin#standardPlugins} - * have NAME field which is final, static, and matches the registered name. + * Test standard query parsers registered in {@link + * org.apache.solr.search.QParserPlugin#standardPlugins} have NAME field which is final, static, + * and matches the registered name. */ @Test public void testRegisteredName() throws Exception { @@ -52,7 +51,8 @@ public void testRegisteredName() throws Exception { for (Map.Entry pair : QParserPlugin.standardPlugins.entrySet()) { String regName = pair.getKey(); - Class clazz = pair.getValue().getClass();; + Class clazz = pair.getValue().getClass(); + ; Field nameField = clazz.getField(FIELD_NAME); int modifiers = nameField.getModifiers(); @@ -61,31 +61,29 @@ public void testRegisteredName() throws Exception { } if (!Modifier.isStatic(modifiers)) { notStatic.add(clazz.getName()); - } else if (! regName.equals(nameField.get(null))) { - mismatch.add(regName +" != "+ nameField.get(null) +"("+ clazz.getName() +")"); + } else if (!regName.equals(nameField.get(null))) { + mismatch.add(regName + " != " + nameField.get(null) + "(" + clazz.getName() + ")"); } } - assertTrue("All standard QParsers must have final NAME, broken: " + notFinal, - notFinal.isEmpty()); - assertTrue("All standard QParsers must have static NAME, broken: " + notStatic, - notStatic.isEmpty()); - assertTrue("All standard QParsers must be registered using NAME, broken: " + mismatch, - mismatch.isEmpty()); + assertTrue( + "All standard QParsers must have final NAME, broken: " + notFinal, notFinal.isEmpty()); + assertTrue( + "All standard QParsers must have static NAME, broken: " + notStatic, notStatic.isEmpty()); + assertTrue( + "All standard QParsers must be registered using NAME, broken: " + mismatch, + mismatch.isEmpty()); - assertTrue("DEFAULT_QTYPE is not in the standard set of registered names: " + - QParserPlugin.DEFAULT_QTYPE, + assertTrue( + "DEFAULT_QTYPE is not in the standard set of registered names: " + + QParserPlugin.DEFAULT_QTYPE, QParserPlugin.standardPlugins.keySet().contains(QParserPlugin.DEFAULT_QTYPE)); - } - /** - * Test that "lucene" is the default query parser. - */ + /** Test that "lucene" is the default query parser. */ @Test public void testDefaultQType() throws Exception { assertEquals(LuceneQParserPlugin.NAME, QParserPlugin.DEFAULT_QTYPE); assertEquals("lucene", LuceneQParserPlugin.NAME); } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestStressLucene.java b/solr/core/src/test/org/apache/solr/search/TestStressLucene.java index af2ef1db1f1..a0f2055c8a7 100644 --- a/solr/core/src/test/org/apache/solr/search/TestStressLucene.java +++ b/solr/core/src/test/org/apache/solr/search/TestStressLucene.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import static org.apache.solr.core.SolrCore.verbose; import java.util.ArrayList; import java.util.HashMap; @@ -26,7 +27,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -40,31 +40,31 @@ import org.apache.lucene.store.Directory; import org.junit.Test; -import static org.apache.solr.core.SolrCore.verbose; - public class TestStressLucene extends TestRTGBase { // The purpose of this test is to roughly model how solr uses lucene DirectoryReader reader; + @Test public void testStressLuceneNRT() throws Exception { final int commitPercent = 5 + random().nextInt(20); - final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft - final int deletePercent = 4+random().nextInt(25); - final int deleteByQueryPercent = 1+random().nextInt(5); + final int softCommitPercent = 30 + random().nextInt(75); // what percent of the commits are soft + final int deletePercent = 4 + random().nextInt(25); + final int deleteByQueryPercent = 1 + random().nextInt(5); final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200)); int nWriteThreads = 5 + random().nextInt(25); final int maxConcurrentCommits = nWriteThreads; - final AtomicLong operations = new AtomicLong(100000); // number of query operations to perform in total + final AtomicLong operations = + new AtomicLong(100000); // number of query operations to perform in total int nReadThreads = 5 + random().nextInt(25); final boolean tombstones = random().nextBoolean(); final boolean syncCommits = random().nextBoolean(); verbose("commitPercent=", commitPercent); - verbose("softCommitPercent=",softCommitPercent); - verbose("deletePercent=",deletePercent); + verbose("softCommitPercent=", softCommitPercent); + verbose("deletePercent=", deletePercent); verbose("deleteByQueryPercent=", deleteByQueryPercent); verbose("ndocs=", ndocs); verbose("nWriteThreads=", nWriteThreads); @@ -80,7 +80,6 @@ public void testStressLuceneNRT() throws Exception { List threads = new ArrayList<>(); - final FieldType idFt = new FieldType(); idFt.setStored(true); idFt.setOmitNorms(true); @@ -90,19 +89,20 @@ public void testStressLuceneNRT() throws Exception { final FieldType ft2 = new FieldType(); ft2.setStored(true); - - // model how solr does locking - only allow one thread to do a hard commit at once, and only one thread to do a soft commit, but + // model how solr does locking - only allow one thread to do a hard commit at once, and only one + // thread to do a soft commit, but // a hard commit in progress does not stop a soft commit. final Lock hardCommitLock = syncCommits ? new ReentrantLock() : null; final Lock reopenLock = syncCommits ? new ReentrantLock() : null; - // RAMDirectory dir = new RAMDirectory(); - // final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new WhitespaceAnalyzer())); + // final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new + // WhitespaceAnalyzer())); Directory dir = newDirectory(); - final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); + final RandomIndexWriter writer = + new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.setDoRandomForceMergeAssert(false); // writer.commit(); @@ -111,252 +111,269 @@ public void testStressLuceneNRT() throws Exception { // to only opening at the last commit point. reader = DirectoryReader.open(writer.w); - for (int i=0; i 0) { - int oper = rand.nextInt(100); + @Override + public void run() { + try { + while (operations.get() > 0) { + int oper = rand.nextInt(100); - if (oper < commitPercent) { - if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { - Map newCommittedModel; - long version; - DirectoryReader oldReader; + if (oper < commitPercent) { + if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { + Map newCommittedModel; + long version; + DirectoryReader oldReader; - boolean softCommit = rand.nextInt(100) < softCommitPercent; + boolean softCommit = rand.nextInt(100) < softCommitPercent; - if (!softCommit) { - // only allow one hard commit to proceed at once - if (hardCommitLock != null) hardCommitLock.lock(); - verbose("hardCommit start"); + if (!softCommit) { + // only allow one hard commit to proceed at once + if (hardCommitLock != null) hardCommitLock.lock(); + verbose("hardCommit start"); - writer.commit(); - } - - if (reopenLock != null) reopenLock.lock(); - - synchronized(globalLock) { - newCommittedModel = new HashMap<>(model); // take a snapshot - version = snapshotCount++; - oldReader = reader; - oldReader.incRef(); // increment the reference since we will use this for reopening - } - - if (!softCommit) { - // must commit after taking a snapshot of the model - // writer.commit(); - } + writer.commit(); + } - verbose("reopen start using", oldReader); + if (reopenLock != null) reopenLock.lock(); - DirectoryReader newReader; - if (softCommit) { - newReader = DirectoryReader.openIfChanged(oldReader, writer.w, true); - } else { - // will only open to last commit - newReader = DirectoryReader.openIfChanged(oldReader); - } + synchronized (globalLock) { + newCommittedModel = new HashMap<>(model); // take a snapshot + version = snapshotCount++; + oldReader = reader; + // increment the reference since we will use this for reopening + oldReader.incRef(); + } + if (!softCommit) { + // must commit after taking a snapshot of the model + // writer.commit(); + } - if (newReader == null) { - oldReader.incRef(); - newReader = oldReader; - } - oldReader.decRef(); + verbose("reopen start using", oldReader); - verbose("reopen result", newReader); + DirectoryReader newReader; + if (softCommit) { + newReader = DirectoryReader.openIfChanged(oldReader, writer.w, true); + } else { + // will only open to last commit + newReader = DirectoryReader.openIfChanged(oldReader); + } - synchronized(globalLock) { - assert newReader.getRefCount() > 0; - assert reader.getRefCount() > 0; + if (newReader == null) { + oldReader.incRef(); + newReader = oldReader; + } + oldReader.decRef(); + + verbose("reopen result", newReader); + + synchronized (globalLock) { + assert newReader.getRefCount() > 0; + assert reader.getRefCount() > 0; + + // install the new reader if it's newest (and check the current version + // since another reader may have already been installed) + if (newReader.getVersion() > reader.getVersion()) { + reader.decRef(); + reader = newReader; + + // install this snapshot only if it's newer than the current one + if (version >= committedModelClock) { + committedModel = newCommittedModel; + committedModelClock = version; + } + + } else { + // close if unused + newReader.decRef(); + } + } - // install the new reader if it's newest (and check the current version since another reader may have already been installed) - if (newReader.getVersion() > reader.getVersion()) { - reader.decRef(); - reader = newReader; + if (reopenLock != null) reopenLock.unlock(); - // install this snapshot only if it's newer than the current one - if (version >= committedModelClock) { - committedModel = newCommittedModel; - committedModelClock = version; + if (!softCommit) { + if (hardCommitLock != null) hardCommitLock.unlock(); } - - } else { - // close if unused - newReader.decRef(); } - + numCommitting.decrementAndGet(); + continue; } - if (reopenLock != null) reopenLock.unlock(); + int id = rand.nextInt(ndocs); + Object sync = syncArr[id]; - if (!softCommit) { - if (hardCommitLock != null) hardCommitLock.unlock(); + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; } - } - numCommitting.decrementAndGet(); - continue; - } - - - int id = rand.nextInt(ndocs); - Object sync = syncArr[id]; + // We can't concurrently update the same document and retain our invariants of + // increasing values since we can't guarantee what order the updates will be + // executed. + synchronized (sync) { + DocInfo info = model.get(id); + long val = info.val; + long nextVal = Math.abs(val) + 1; + + if (oper < commitPercent + deletePercent) { + // add tombstone first + if (tombstones) { + Document d = new Document(); + d.add(new Field("id", "-" + Integer.toString(id), idFt)); + d.add(new Field(FIELD, Long.toString(nextVal), ft2)); + verbose("adding tombstone for id", id, "val=", nextVal); + writer.updateDocument(new Term("id", "-" + Integer.toString(id)), d); + } - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } + verbose("deleting id", id, "val=", nextVal); + writer.deleteDocuments(new Term("id", Integer.toString(id))); + model.put(id, new DocInfo(0, -nextVal)); + verbose("deleting id", id, "val=", nextVal, "DONE"); + + } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { + // assertU("id:" + id + ""); + + // add tombstone first + if (tombstones) { + Document d = new Document(); + d.add(new Field("id", "-" + Integer.toString(id), idFt)); + d.add(new Field(FIELD, Long.toString(nextVal), ft2)); + verbose("adding tombstone for id", id, "val=", nextVal); + writer.updateDocument(new Term("id", "-" + Integer.toString(id)), d); + } - // We can't concurrently update the same document and retain our invariants of increasing values - // since we can't guarantee what order the updates will be executed. - synchronized (sync) { - DocInfo info = model.get(id); - long val = info.val; - long nextVal = Math.abs(val)+1; - - if (oper < commitPercent + deletePercent) { - // add tombstone first - if (tombstones) { - Document d = new Document(); - d.add(new Field("id","-"+Integer.toString(id), idFt)); - d.add(new Field(FIELD, Long.toString(nextVal), ft2)); - verbose("adding tombstone for id",id,"val=",nextVal); - writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d); - } + verbose("deleteByQuery", id, "val=", nextVal); + writer.deleteDocuments(new TermQuery(new Term("id", Integer.toString(id)))); + model.put(id, new DocInfo(0, -nextVal)); + verbose("deleteByQuery", id, "val=", nextVal, "DONE"); + } else { + // model.put(id, nextVal); // uncomment this and this test should fail. + + // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal))); + Document d = new Document(); + d.add(new Field("id", Integer.toString(id), idFt)); + d.add(new Field(FIELD, Long.toString(nextVal), ft2)); + verbose("adding id", id, "val=", nextVal); + writer.updateDocument(new Term("id", Integer.toString(id)), d); + if (tombstones) { + // remove tombstone after new addition (this should be optional?) + verbose("deleting tombstone for id", id); + writer.deleteDocuments(new Term("id", "-" + Integer.toString(id))); + verbose("deleting tombstone for id", id, "DONE"); + } - verbose("deleting id",id,"val=",nextVal); - writer.deleteDocuments(new Term("id",Integer.toString(id))); - model.put(id, new DocInfo(0,-nextVal)); - verbose("deleting id",id,"val=",nextVal,"DONE"); - - } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { - //assertU("id:" + id + ""); - - // add tombstone first - if (tombstones) { - Document d = new Document(); - d.add(new Field("id","-"+Integer.toString(id), idFt)); - d.add(new Field(FIELD, Long.toString(nextVal), ft2)); - verbose("adding tombstone for id",id,"val=",nextVal); - writer.updateDocument(new Term("id", "-"+Integer.toString(id)), d); + model.put(id, new DocInfo(0, nextVal)); + verbose("adding id", id, "val=", nextVal, "DONE"); + } } - verbose("deleteByQuery",id,"val=",nextVal); - writer.deleteDocuments(new TermQuery(new Term("id", Integer.toString(id)))); - model.put(id, new DocInfo(0,-nextVal)); - verbose("deleteByQuery",id,"val=",nextVal,"DONE"); - } else { - // model.put(id, nextVal); // uncomment this and this test should fail. - - // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal))); - Document d = new Document(); - d.add(new Field("id",Integer.toString(id), idFt)); - d.add(new Field(FIELD, Long.toString(nextVal), ft2)); - verbose("adding id",id,"val=",nextVal); - writer.updateDocument(new Term("id", Integer.toString(id)), d); - if (tombstones) { - // remove tombstone after new addition (this should be optional?) - verbose("deleting tombstone for id",id); - writer.deleteDocuments(new Term("id","-"+Integer.toString(id))); - verbose("deleting tombstone for id",id,"DONE"); + if (!before) { + lastId = id; } - - model.put(id, new DocInfo(0,nextVal)); - verbose("adding id",id,"val=",nextVal,"DONE"); } - } - - if (!before) { - lastId = id; + } catch (Exception ex) { + throw new RuntimeException(ex); } } - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - }; + }; threads.add(thread); } + for (int i = 0; i < nReadThreads; i++) { + Thread thread = + new Thread("READER" + i) { + Random rand = new Random(random().nextInt()); - for (int i=0; i= 0) { + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - @Override - public void run() { - try { - while (operations.decrementAndGet() >= 0) { - // bias toward a recently changed doc - int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index - // when indexing, we update the index, then the model - // so when querying, we should first check the model, and then the index + DocInfo info; + synchronized (globalLock) { + info = committedModel.get(id); + } + long val = info.val; - DocInfo info; - synchronized(globalLock) { - info = committedModel.get(id); - } - long val = info.val; + IndexReader r; + synchronized (globalLock) { + r = reader; + r.incRef(); + } - IndexReader r; - synchronized(globalLock) { - r = reader; - r.incRef(); - } + int docid = getFirstMatch(r, new Term("id", Integer.toString(id))); - int docid = getFirstMatch(r, new Term("id",Integer.toString(id))); + if (docid < 0 && tombstones) { + // if we couldn't find the doc, look for its tombstone + docid = getFirstMatch(r, new Term("id", "-" + Integer.toString(id))); + if (docid < 0) { + if (val == -1L) { + // expected... no doc was added yet + r.decRef(); + continue; + } + verbose( + "ERROR: Couldn't find a doc or tombstone for id", + id, + "using reader", + r, + "expected value", + val); + fail( + "No documents or tombstones found for id " + + id + + ", expected at least " + + val); + } + } - if (docid < 0 && tombstones) { - // if we couldn't find the doc, look for its tombstone - docid = getFirstMatch(r, new Term("id","-"+Integer.toString(id))); - if (docid < 0) { - if (val == -1L) { - // expected... no doc was added yet - r.decRef(); - continue; + if (docid < 0 && !tombstones) { + // nothing to do - we can't tell anything from a deleted doc without tombstones + } else { + if (docid < 0) { + verbose("ERROR: Couldn't find a doc for id", id, "using reader", r); + } + assertTrue(docid >= 0); // we should have found the document, or its tombstone + Document doc = r.document(docid); + long foundVal = Long.parseLong(doc.get(FIELD)); + if (foundVal < Math.abs(val)) { + verbose( + "ERROR: id", + id, + "model_val=", + val, + " foundVal=", + foundVal, + "reader=", + reader); + } + assertTrue(foundVal >= Math.abs(val)); } - verbose("ERROR: Couldn't find a doc or tombstone for id", id, "using reader",r,"expected value",val); - fail("No documents or tombstones found for id " + id + ", expected at least " + val); - } - } - if (docid < 0 && !tombstones) { - // nothing to do - we can't tell anything from a deleted doc without tombstones - } else { - if (docid < 0) { - verbose("ERROR: Couldn't find a doc for id", id, "using reader",r); - } - assertTrue(docid >= 0); // we should have found the document, or its tombstone - Document doc = r.document(docid); - long foundVal = Long.parseLong(doc.get(FIELD)); - if (foundVal < Math.abs(val)) { - verbose("ERROR: id",id,"model_val=",val," foundVal=",foundVal,"reader=",reader); + r.decRef(); } - assertTrue(foundVal >= Math.abs(val)); + } catch (Throwable e) { + operations.set(-1L); + throw new RuntimeException(e); } - - r.decRef(); } - } catch (Throwable e) { - operations.set(-1L); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } - for (Thread thread : threads) { thread.start(); } @@ -369,7 +386,4 @@ public void run() { reader.close(); dir.close(); } - - - } diff --git a/solr/core/src/test/org/apache/solr/search/TestStressRecovery.java b/solr/core/src/test/org/apache/solr/search/TestStressRecovery.java index 45676a0cd6a..c0e3f8f0ede 100644 --- a/solr/core/src/test/org/apache/solr/search/TestStressRecovery.java +++ b/solr/core/src/test/org/apache/solr/search/TestStressRecovery.java @@ -16,6 +16,8 @@ */ package org.apache.solr.search; +import static org.apache.solr.core.SolrCore.verbose; +import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; import java.util.ArrayList; import java.util.HashMap; @@ -29,7 +31,6 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; - import org.apache.lucene.util.Constants; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.common.util.Utils; @@ -42,50 +43,47 @@ import org.junit.Before; import org.junit.Test; -import static org.apache.solr.core.SolrCore.verbose; -import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; - -@LuceneTestCase.AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 6-Sep-2018 +@LuceneTestCase.AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12028") // 6-Sep-2018 // can fail due to NPE uncaught exception in stress thread, probably because of null core public class TestStressRecovery extends TestRTGBase { @Before public void beforeClass() throws Exception { - initCore("solrconfig-tlog.xml","schema15.xml"); + initCore("solrconfig-tlog.xml", "schema15.xml"); } - + @After public void afterClass() { deleteCore(); } - // This points to the live model when state is ACTIVE, but a snapshot of the // past when recovering. - volatile ConcurrentHashMap visibleModel; + volatile ConcurrentHashMap visibleModel; // This version simulates updates coming from the leader and sometimes being reordered // and tests the ability to buffer updates and apply them later @Test - // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018 - // commented out on: 24-Dec-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 6-Sep-2018 public void testStressRecovery() throws Exception { assumeFalse("FIXME: This test is horribly slow sometimes on Windows!", Constants.WINDOWS); final int commitPercent = 5 + random().nextInt(10); - final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft - final int deletePercent = 4+random().nextInt(25); + final int softCommitPercent = 30 + random().nextInt(75); // what percent of the commits are soft + final int deletePercent = 4 + random().nextInt(25); final int deleteByQueryPercent = random().nextInt(5); final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200)); - int nWriteThreads = 2 + random().nextInt(10); // fewer write threads to give recovery thread more of a chance + int nWriteThreads = + 2 + random().nextInt(10); // fewer write threads to give recovery thread more of a chance final int maxConcurrentCommits = nWriteThreads; // query variables final int percentRealtimeQuery = 75; final int percentGetLatestVersions = random().nextInt(4); - final AtomicLong operations = new AtomicLong(atLeast(35)); // number of recovery loops to perform - int nReadThreads = 2 + random().nextInt(10); // fewer read threads to give writers more of a chance + final AtomicLong operations = + new AtomicLong(atLeast(35)); // number of recovery loops to perform + int nReadThreads = + 2 + random().nextInt(10); // fewer read threads to give writers more of a chance initModel(ndocs); @@ -93,261 +91,283 @@ public void testStressRecovery() throws Exception { List threads = new ArrayList<>(); - final AtomicLong testVersion = new AtomicLong(0); - final UpdateHandler uHandler = h.getCore().getUpdateHandler(); final UpdateLog uLog = uHandler.getUpdateLog(); final VersionInfo vInfo = uLog.getVersionInfo(); final Object stateChangeLock = new Object(); this.visibleModel = model; final Semaphore[] writePermissions = new Semaphore[nWriteThreads]; - for (int i=0; i 0) { - writePermission.acquire(); + @Override + public void run() { + try { + while (operations.get() > 0) { + writePermission.acquire(); - int oper = rand.nextInt(10); + int oper = rand.nextInt(10); - if (oper < commitPercent) { - if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { - Map newCommittedModel; - long version; + if (oper < commitPercent) { + if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { + Map newCommittedModel; + long version; - synchronized(globalLock) { - newCommittedModel = new HashMap<>(model); // take a snapshot - version = snapshotCount++; - } + synchronized (globalLock) { + newCommittedModel = new HashMap<>(model); // take a snapshot + version = snapshotCount++; + } - synchronized (stateChangeLock) { - // These commits won't take affect if we are in recovery mode, - // so change the version to -1 so we won't update our model. - if (uLog.getState() != UpdateLog.State.ACTIVE) version = -1; - if (rand.nextInt(100) < softCommitPercent) { - verbose("softCommit start"); - assertU(TestHarness.commit("softCommit","true")); - verbose("softCommit end"); - } else { - verbose("hardCommit start"); - assertU(commit()); - verbose("hardCommit end"); - } - } + synchronized (stateChangeLock) { + // These commits won't take affect if we are in recovery mode, + // so change the version to -1 so we won't update our model. + if (uLog.getState() != UpdateLog.State.ACTIVE) version = -1; + if (rand.nextInt(100) < softCommitPercent) { + verbose("softCommit start"); + assertU(TestHarness.commit("softCommit", "true")); + verbose("softCommit end"); + } else { + verbose("hardCommit start"); + assertU(commit()); + verbose("hardCommit end"); + } + } - synchronized(globalLock) { - // install this model snapshot only if it's newer than the current one - // install this model only if we are not in recovery mode. - if (version >= committedModelClock) { - if (VERBOSE) { - verbose("installing new committedModel version="+committedModelClock); + synchronized (globalLock) { + // install this model snapshot only if it's newer than the current one + // install this model only if we are not in recovery mode. + if (version >= committedModelClock) { + if (VERBOSE) { + verbose("installing new committedModel version=" + committedModelClock); + } + committedModel = newCommittedModel; + committedModelClock = version; + } } - committedModel = newCommittedModel; - committedModelClock = version; } + numCommitting.decrementAndGet(); + continue; } - } - numCommitting.decrementAndGet(); - continue; - } + int id; - int id; - - if (rand.nextBoolean()) { - id = rand.nextInt(ndocs); - } else { - id = lastId; // reuse the last ID half of the time to force more race conditions - } - - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } + if (rand.nextBoolean()) { + id = rand.nextInt(ndocs); + } else { + id = lastId; // reuse the last ID half of the time to force more race conditions + } - DocInfo info = model.get(id); + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; + } - long val = info.val; - long nextVal = Math.abs(val)+1; + DocInfo info = model.get(id); - // the version we set on the update should determine who wins - // These versions are not derived from the actual leader update handler hand hence this - // test may need to change depending on how we handle version numbers. - long version = testVersion.incrementAndGet(); + long val = info.val; + long nextVal = Math.abs(val) + 1; - // yield after getting the next version to increase the odds of updates happening out of order - if (rand.nextBoolean()) Thread.yield(); + // the version we set on the update should determine who wins + // These versions are not derived from the actual leader update handler hand hence + // this test may need to change depending on how we handle version numbers. + long version = testVersion.incrementAndGet(); - if (oper < commitPercent + deletePercent) { - verbose("deleting id",id,"val=",nextVal,"version",version); + // yield after getting the next version to increase the odds of updates happening + // out of order + if (rand.nextBoolean()) Thread.yield(); - Long returnedVersion = deleteAndGetVersion(Integer.toString(id), params("_version_",Long.toString(-version), DISTRIB_UPDATE_PARAM,FROM_LEADER)); + if (oper < commitPercent + deletePercent) { + verbose("deleting id", id, "val=", nextVal, "version", version); - // TODO: returning versions for these types of updates is redundant - // but if we do return, they had better be equal - if (returnedVersion != null) { - assertEquals(-version, returnedVersion.longValue()); - } + Long returnedVersion = + deleteAndGetVersion( + Integer.toString(id), + params( + "_version_", + Long.toString(-version), + DISTRIB_UPDATE_PARAM, + FROM_LEADER)); - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (Math.abs(version) > Math.abs(currInfo.version)) { - model.put(id, new DocInfo(version, -nextVal)); - } - } + // TODO: returning versions for these types of updates is redundant + // but if we do return, they had better be equal + if (returnedVersion != null) { + assertEquals(-version, returnedVersion.longValue()); + } - verbose("deleting id", id, "val=",nextVal,"version",version,"DONE"); - } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (Math.abs(version) > Math.abs(currInfo.version)) { + model.put(id, new DocInfo(version, -nextVal)); + } + } - verbose("deleteByQuery id",id,"val=",nextVal,"version",version); + verbose("deleting id", id, "val=", nextVal, "version", version, "DONE"); + } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { - Long returnedVersion = deleteByQueryAndGetVersion("id:"+Integer.toString(id), params("_version_",Long.toString(-version), DISTRIB_UPDATE_PARAM,FROM_LEADER)); + verbose("deleteByQuery id", id, "val=", nextVal, "version", version); - // TODO: returning versions for these types of updates is redundant - // but if we do return, they had better be equal - if (returnedVersion != null) { - assertEquals(-version, returnedVersion.longValue()); - } + Long returnedVersion = + deleteByQueryAndGetVersion( + "id:" + Integer.toString(id), + params( + "_version_", + Long.toString(-version), + DISTRIB_UPDATE_PARAM, + FROM_LEADER)); - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (Math.abs(version) > Math.abs(currInfo.version)) { - model.put(id, new DocInfo(version, -nextVal)); - } - } + // TODO: returning versions for these types of updates is redundant + // but if we do return, they had better be equal + if (returnedVersion != null) { + assertEquals(-version, returnedVersion.longValue()); + } - verbose("deleteByQuery id", id, "val=",nextVal,"version",version,"DONE"); + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (Math.abs(version) > Math.abs(currInfo.version)) { + model.put(id, new DocInfo(version, -nextVal)); + } + } - } else { - verbose("adding id", id, "val=", nextVal,"version",version); + verbose("deleteByQuery id", id, "val=", nextVal, "version", version, "DONE"); + + } else { + verbose("adding id", id, "val=", nextVal, "version", version); + + Long returnedVersion = + addAndGetVersion( + sdoc( + "id", + Integer.toString(id), + FIELD, + Long.toString(nextVal), + "_version_", + Long.toString(version)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + if (returnedVersion != null) { + assertEquals(version, returnedVersion.longValue()); + } - Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - if (returnedVersion != null) { - assertEquals(version, returnedVersion.longValue()); - } + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (version > currInfo.version) { + model.put(id, new DocInfo(version, nextVal)); + } + } - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (version > currInfo.version) { - model.put(id, new DocInfo(version, nextVal)); + if (VERBOSE) { + verbose("adding id", id, "val=", nextVal, "version", version, "DONE"); + } } - } + // } // end sync - if (VERBOSE) { - verbose("adding id", id, "val=", nextVal,"version",version,"DONE"); + if (!before) { + lastId = id; + } } - - } - // } // end sync - - if (!before) { - lastId = id; + } catch (Throwable e) { + operations.set(-1L); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } + for (int i = 0; i < nReadThreads; i++) { + Thread thread = + new Thread("READER" + i) { + Random rand = new Random(random().nextInt()); - for (int i=0; i 0) { + // throttle reads (don't completely stop) + readPermission.tryAcquire(10, TimeUnit.MILLISECONDS); - @Override - public void run() { - try { - while (operations.get() > 0) { - // throttle reads (don't completely stop) - readPermission.tryAcquire(10, TimeUnit.MILLISECONDS); + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index - // bias toward a recently changed doc - int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); + boolean realTime = rand.nextInt(100) < percentRealtimeQuery; + DocInfo info; - // when indexing, we update the index, then the model - // so when querying, we should first check the model, and then the index + if (realTime) { + info = visibleModel.get(id); + } else { + synchronized (globalLock) { + info = committedModel.get(id); + } + } - boolean realTime = rand.nextInt(100) < percentRealtimeQuery; - DocInfo info; + if (VERBOSE) { + verbose("querying id", id); + } + SolrQueryRequest sreq; + if (realTime) { + sreq = req("wt", "json", "qt", "/get", "ids", Integer.toString(id)); + } else { + sreq = + req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true"); + } - if (realTime) { - info = visibleModel.get(id); - } else { - synchronized(globalLock) { - info = committedModel.get(id); + String response = h.query(sreq); + @SuppressWarnings({"rawtypes"}) + Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) + List doclist = (List) (((Map) rsp.get("response")).get("docs")); + if (doclist.size() == 0) { + // there's no info we can get back with a delete, so not much we can check + // without further synchronization + } else { + assertEquals(1, doclist.size()); + long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD)); + long foundVer = (Long) (((Map) doclist.get(0)).get("_version_")); + if (foundVer < Math.abs(info.version) + || (foundVer == info.version + && foundVal != info.val)) { // if the version matches, the val must + verbose("ERROR, id=", id, "found=", response, "model", info); + assertTrue(false); + } + } } - } - - if (VERBOSE) { - verbose("querying id", id); - } - SolrQueryRequest sreq; - if (realTime) { - sreq = req("wt","json", "qt","/get", "ids",Integer.toString(id)); - } else { - sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true"); - } - - String response = h.query(sreq); - @SuppressWarnings({"rawtypes"}) - Map rsp = (Map) Utils.fromJSONString(response); - @SuppressWarnings({"rawtypes"}) - List doclist = (List)(((Map)rsp.get("response")).get("docs")); - if (doclist.size() == 0) { - // there's no info we can get back with a delete, so not much we can check without further synchronization - } else { - assertEquals(1, doclist.size()); - long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD)); - long foundVer = (Long)(((Map)doclist.get(0)).get("_version_")); - if (foundVer < Math.abs(info.version) - || (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must - verbose("ERROR, id=", id, "found=",response,"model",info); - assertTrue(false); + if (rand.nextInt(100) < percentGetLatestVersions) { + getLatestVersions(); + // TODO: some sort of validation that the latest version is >= to the latest + // version we added? } - } - } - - if (rand.nextInt(100) < percentGetLatestVersions) { - getLatestVersions(); - // TODO: some sort of validation that the latest version is >= to the latest version we added? + } catch (Throwable e) { + operations.set(-1L); + throw new RuntimeException(e); + } } - - } catch (Throwable e) { - operations.set(-1L); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } - for (Thread thread : threads) { thread.start(); } @@ -368,7 +388,7 @@ public void run() { assertTrue(uLog.getState() == UpdateLog.State.BUFFERING); // sometimes wait for a second to allow time for writers to write something - if (random().nextBoolean()) Thread.sleep(random().nextInt(10)+1); + if (random().nextBoolean()) Thread.sleep(random().nextInt(10) + 1); Future recoveryInfoF = uLog.applyBufferedUpdates(); if (recoveryInfoF != null) { @@ -378,12 +398,18 @@ public void run() { int cnt = 5000; while (recInfo == null) { try { - // wait a short period of time for recovery to complete (and to give a chance for more writers to concurrently add docs) + // wait a short period of time for recovery to complete (and to give a chance for more + // writers to concurrently add docs) cnt--; - recInfo = recoveryInfoF.get(random().nextInt(100/nWriteThreads), TimeUnit.MILLISECONDS); + recInfo = + recoveryInfoF.get(random().nextInt(100 / nWriteThreads), TimeUnit.MILLISECONDS); } catch (TimeoutException e) { // idle one more write thread - verbose("Operation",operations.get(),"Draining permits for write thread",writeThreadNumber); + verbose( + "Operation", + operations.get(), + "Draining permits for write thread", + writeThreadNumber); writePermissions[writeThreadNumber++].drainPermits(); if (writeThreadNumber >= nWriteThreads) { // if we hit the end, back up and give a few write permits @@ -407,23 +433,21 @@ public void run() { for (Semaphore writePerm : writePermissions) { // I don't think semaphores check for overflow, so we need to check mow many remain int neededPermits = Integer.MAX_VALUE - writePerm.availablePermits(); - if (neededPermits > 0) writePerm.release( neededPermits ); + if (neededPermits > 0) writePerm.release(neededPermits); } // put back readers at full blast and point back to live model visibleModel = model; int neededPermits = Integer.MAX_VALUE - readPermission.availablePermits(); - if (neededPermits > 0) readPermission.release( neededPermits ); + if (neededPermits > 0) readPermission.release(neededPermits); - verbose("ROUND=",operations.get()); + verbose("ROUND=", operations.get()); } while (operations.decrementAndGet() > 0); - verbose("bufferedAddsApplied=",bufferedAddsApplied); + verbose("bufferedAddsApplied=", bufferedAddsApplied); for (Thread thread : threads) { thread.join(); } - } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestStressReorder.java b/solr/core/src/test/org/apache/solr/search/TestStressReorder.java index ed3106efb8d..7d214c411ef 100644 --- a/solr/core/src/test/org/apache/solr/search/TestStressReorder.java +++ b/solr/core/src/test/org/apache/solr/search/TestStressReorder.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -25,7 +26,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; - import org.apache.solr.common.util.Utils; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.util.TestHarness; @@ -34,15 +34,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; - public class TestStressReorder extends TestRTGBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-tlog.xml","schema15.xml"); + initCore("solrconfig-tlog.xml", "schema15.xml"); } public static void verbose(Object... args) { @@ -50,7 +47,7 @@ public static void verbose(Object... args) { StringBuilder sb = new StringBuilder("VERBOSE:"); for (Object o : args) { sb.append(' '); - sb.append(o==null ? "(null)" : o.toString()); + sb.append(o == null ? "(null)" : o.toString()); } log.info("{}", sb); } @@ -62,38 +59,54 @@ public void testStressReorderVersions() throws Exception { assertU(commit()); final int commitPercent = 5 + random().nextInt(20); - final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft - final int deletePercent = 4+random().nextInt(25); + final int softCommitPercent = 30 + random().nextInt(75); // what percent of the commits are soft + final int deletePercent = 4 + random().nextInt(25); final int deleteByQueryPercent = random().nextInt(8); final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200)); int nWriteThreads = 5 + random().nextInt(25); final int maxConcurrentCommits = nWriteThreads; - // query variables + // query variables final int percentRealtimeQuery = 75; - final AtomicLong operations = new AtomicLong(50000); // number of query operations to perform in total + final AtomicLong operations = + new AtomicLong(50000); // number of query operations to perform in total int nReadThreads = 5 + random().nextInt(25); - - /** // testing - final int commitPercent = 5; - final int softCommitPercent = 100; // what percent of the commits are soft - final int deletePercent = 0; - final int deleteByQueryPercent = 50; - final int ndocs = 1; - int nWriteThreads = 2; - - final int maxConcurrentCommits = nWriteThreads; - - // query variables - final int percentRealtimeQuery = 101; - final AtomicLong operations = new AtomicLong(50000); // number of query operations to perform in total - int nReadThreads = 1; - **/ - - - verbose("commitPercent",commitPercent, "softCommitPercent",softCommitPercent, "deletePercent",deletePercent, "deleteByQueryPercent",deleteByQueryPercent - , "ndocs",ndocs,"nWriteThreads",nWriteThreads,"percentRealtimeQuery",percentRealtimeQuery,"operations",operations, "nReadThreads",nReadThreads); + // testing + // final int commitPercent = 5; + // final int softCommitPercent = 100; // what percent of the commits are soft + // final int deletePercent = 0; + // final int deleteByQueryPercent = 50; + // final int ndocs = 1; + // int nWriteThreads = 2; + // + // final int maxConcurrentCommits = nWriteThreads; + // + // // query variables + // final int percentRealtimeQuery = 101; + // final AtomicLong operations = new AtomicLong(50000); // number of query operations to + // perform in total + // int nReadThreads = 1; + + verbose( + "commitPercent", + commitPercent, + "softCommitPercent", + softCommitPercent, + "deletePercent", + deletePercent, + "deleteByQueryPercent", + deleteByQueryPercent, + "ndocs", + ndocs, + "nWriteThreads", + nWriteThreads, + "percentRealtimeQuery", + percentRealtimeQuery, + "operations", + operations, + "nReadThreads", + nReadThreads); initModel(ndocs); @@ -101,229 +114,253 @@ public void testStressReorderVersions() throws Exception { List threads = new ArrayList<>(); - final AtomicLong testVersion = new AtomicLong(0); - for (int i=0; i 0) { - int oper = rand.nextInt(100); - - if (oper < commitPercent) { - if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { - Map newCommittedModel; - long version; - - synchronized(TestStressReorder.this) { - newCommittedModel = new HashMap<>(model); // take a snapshot - version = snapshotCount++; - } - - if (rand.nextInt(100) < softCommitPercent) { - verbose("softCommit start"); - assertU(TestHarness.commit("softCommit","true")); - verbose("softCommit end"); - } else { - verbose("hardCommit start"); - assertU(commit()); - verbose("hardCommit end"); - } - - synchronized(TestStressReorder.this) { - // install this model snapshot only if it's newer than the current one - if (version >= committedModelClock) { - if (VERBOSE) { - verbose("installing new committedModel version="+committedModelClock); + for (int i = 0; i < nWriteThreads; i++) { + Thread thread = + new Thread("WRITER" + i) { + Random rand = new Random(random().nextInt()); + + @Override + public void run() { + try { + while (operations.get() > 0) { + int oper = rand.nextInt(100); + + if (oper < commitPercent) { + if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { + Map newCommittedModel; + long version; + + synchronized (TestStressReorder.this) { + newCommittedModel = new HashMap<>(model); // take a snapshot + version = snapshotCount++; + } + + if (rand.nextInt(100) < softCommitPercent) { + verbose("softCommit start"); + assertU(TestHarness.commit("softCommit", "true")); + verbose("softCommit end"); + } else { + verbose("hardCommit start"); + assertU(commit()); + verbose("hardCommit end"); + } + + synchronized (TestStressReorder.this) { + // install this model snapshot only if it's newer than the current one + if (version >= committedModelClock) { + if (VERBOSE) { + verbose("installing new committedModel version=" + committedModelClock); + } + committedModel = newCommittedModel; + committedModelClock = version; + } + } } - committedModel = newCommittedModel; - committedModelClock = version; + numCommitting.decrementAndGet(); + continue; } - } - } - numCommitting.decrementAndGet(); - continue; - } - - int id; + int id; - if (rand.nextBoolean()) { - id = rand.nextInt(ndocs); - } else { - id = lastId; // reuse the last ID half of the time to force more race conditions - } - - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } + if (rand.nextBoolean()) { + id = rand.nextInt(ndocs); + } else { + id = lastId; // reuse the last ID half of the time to force more race conditions + } - DocInfo info = model.get(id); + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; + } - long val = info.val; - long nextVal = Math.abs(val)+1; + DocInfo info = model.get(id); - // the version we set on the update should determine who wins - // These versions are not derived from the actual leader update handler hand hence this - // test may need to change depending on how we handle version numbers. - long version = testVersion.incrementAndGet(); + long val = info.val; + long nextVal = Math.abs(val) + 1; - // yield after getting the next version to increase the odds of updates happening out of order - if (rand.nextBoolean()) Thread.yield(); + // the version we set on the update should determine who wins + // These versions are not derived from the actual leader update handler hand hence + // this test may need to change depending on how we handle version numbers. + long version = testVersion.incrementAndGet(); - if (oper < commitPercent + deletePercent) { - verbose("deleting id",id,"val=",nextVal,"version",version); + // yield after getting the next version to increase the odds of updates happening + // out of order + if (rand.nextBoolean()) Thread.yield(); - Long returnedVersion = deleteAndGetVersion(Integer.toString(id), params("_version_",Long.toString(-version), DISTRIB_UPDATE_PARAM,FROM_LEADER)); + if (oper < commitPercent + deletePercent) { + verbose("deleting id", id, "val=", nextVal, "version", version); - // TODO: returning versions for these types of updates is redundant - // but if we do return, they had better be equal - if (returnedVersion != null) { - assertEquals(-version, returnedVersion.longValue()); - } + Long returnedVersion = + deleteAndGetVersion( + Integer.toString(id), + params( + "_version_", + Long.toString(-version), + DISTRIB_UPDATE_PARAM, + FROM_LEADER)); - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (Math.abs(version) > Math.abs(currInfo.version)) { - model.put(id, new DocInfo(version, -nextVal)); - } - } + // TODO: returning versions for these types of updates is redundant + // but if we do return, they had better be equal + if (returnedVersion != null) { + assertEquals(-version, returnedVersion.longValue()); + } - verbose("deleting id", id, "val=",nextVal,"version",version,"DONE"); - } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (Math.abs(version) > Math.abs(currInfo.version)) { + model.put(id, new DocInfo(version, -nextVal)); + } + } - verbose("deleteByQuery id",id,"val=",nextVal,"version",version); + verbose("deleting id", id, "val=", nextVal, "version", version, "DONE"); + } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { - Long returnedVersion = deleteByQueryAndGetVersion("id:"+Integer.toString(id), params("_version_",Long.toString(-version), DISTRIB_UPDATE_PARAM,FROM_LEADER)); + verbose("deleteByQuery id", id, "val=", nextVal, "version", version); - // TODO: returning versions for these types of updates is redundant - // but if we do return, they had better be equal - if (returnedVersion != null) { - assertEquals(-version, returnedVersion.longValue()); - } + Long returnedVersion = + deleteByQueryAndGetVersion( + "id:" + Integer.toString(id), + params( + "_version_", + Long.toString(-version), + DISTRIB_UPDATE_PARAM, + FROM_LEADER)); - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (Math.abs(version) > Math.abs(currInfo.version)) { - model.put(id, new DocInfo(version, -nextVal)); - } - } + // TODO: returning versions for these types of updates is redundant + // but if we do return, they had better be equal + if (returnedVersion != null) { + assertEquals(-version, returnedVersion.longValue()); + } - verbose("deleteByQuery id", id, "val=",nextVal,"version",version,"DONE"); + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (Math.abs(version) > Math.abs(currInfo.version)) { + model.put(id, new DocInfo(version, -nextVal)); + } + } - } else { - verbose("adding id", id, "val=", nextVal,"version",version); + verbose("deleteByQuery id", id, "val=", nextVal, "version", version, "DONE"); + + } else { + verbose("adding id", id, "val=", nextVal, "version", version); + + Long returnedVersion = + addAndGetVersion( + sdoc( + "id", + Integer.toString(id), + FIELD, + Long.toString(nextVal), + "_version_", + Long.toString(version)), + params(DISTRIB_UPDATE_PARAM, FROM_LEADER)); + if (returnedVersion != null) { + assertEquals(version, returnedVersion.longValue()); + } - Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal), "_version_",Long.toString(version)), params(DISTRIB_UPDATE_PARAM,FROM_LEADER)); - if (returnedVersion != null) { - assertEquals(version, returnedVersion.longValue()); - } + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (version > currInfo.version) { + model.put(id, new DocInfo(version, nextVal)); + } + } - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (version > currInfo.version) { - model.put(id, new DocInfo(version, nextVal)); + if (VERBOSE) { + verbose("adding id", id, "val=", nextVal, "version", version, "DONE"); + } } - } + // } // end sync - if (VERBOSE) { - verbose("adding id", id, "val=", nextVal,"version",version,"DONE"); + if (!before) { + lastId = id; + } } - + } catch (Throwable e) { + operations.set(-1L); + log.error("", e); + throw new RuntimeException(e); } - // } // end sync - - if (!before) { - lastId = id; } - } - } catch (Throwable e) { - operations.set(-1L); - log.error("",e); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } + for (int i = 0; i < nReadThreads; i++) { + Thread thread = + new Thread("READER" + i) { + Random rand = new Random(random().nextInt()); + + @Override + public void run() { + try { + while (operations.decrementAndGet() >= 0) { + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); + + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index + + boolean realTime = rand.nextInt(100) < percentRealtimeQuery; + DocInfo info; + + if (realTime) { + info = model.get(id); + } else { + synchronized (TestStressReorder.this) { + info = committedModel.get(id); + } + } - for (int i=0; i= 0) { - // bias toward a recently changed doc - int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - - // when indexing, we update the index, then the model - // so when querying, we should first check the model, and then the index - - boolean realTime = rand.nextInt(100) < percentRealtimeQuery; - DocInfo info; - - if (realTime) { - info = model.get(id); - } else { - synchronized(TestStressReorder.this) { - info = committedModel.get(id); - } - } - - if (VERBOSE) { - verbose("querying id", id); - } - SolrQueryRequest sreq; - if (realTime) { - sreq = req("wt","json", "qt","/get", "ids",Integer.toString(id)); - } else { - sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true"); - } + if (VERBOSE) { + verbose("querying id", id); + } + SolrQueryRequest sreq; + if (realTime) { + sreq = req("wt", "json", "qt", "/get", "ids", Integer.toString(id)); + } else { + sreq = + req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true"); + } - String response = h.query(sreq); - @SuppressWarnings({"rawtypes"}) - Map rsp = (Map) Utils.fromJSONString(response); - @SuppressWarnings({"rawtypes"}) - List doclist = (List)(((Map)rsp.get("response")).get("docs")); - if (doclist.size() == 0) { - // there's no info we can get back with a delete, so not much we can check without further synchronization - } else { - assertEquals(1, doclist.size()); - long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD)); - long foundVer = (Long)(((Map)doclist.get(0)).get("_version_")); - if (foundVer < Math.abs(info.version) - || (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must - log.error("ERROR, id={} found={} model {}", id, response, info); - assertTrue(false); + String response = h.query(sreq); + @SuppressWarnings({"rawtypes"}) + Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) + List doclist = (List) (((Map) rsp.get("response")).get("docs")); + if (doclist.size() == 0) { + // there's no info we can get back with a delete, so not much we can check + // without further synchronization + } else { + assertEquals(1, doclist.size()); + long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD)); + long foundVer = (Long) (((Map) doclist.get(0)).get("_version_")); + if (foundVer < Math.abs(info.version) + || (foundVer == info.version + && foundVal != info.val)) { // if the version matches, the val must + log.error("ERROR, id={} found={} model {}", id, response, info); + assertTrue(false); + } + } } + } catch (Throwable e) { + operations.set(-1L); + log.error("", e); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - log.error("",e); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } - for (Thread thread : threads) { thread.start(); } @@ -331,7 +368,5 @@ public void run() { for (Thread thread : threads) { thread.join(); } - } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestStressUserVersions.java b/solr/core/src/test/org/apache/solr/search/TestStressUserVersions.java index b550b17f313..e4a8fb29202 100644 --- a/solr/core/src/test/org/apache/solr/search/TestStressUserVersions.java +++ b/solr/core/src/test/org/apache/solr/search/TestStressUserVersions.java @@ -16,7 +16,6 @@ */ package org.apache.solr.search; - import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.HashMap; @@ -25,7 +24,6 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; - import org.apache.solr.common.util.Utils; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.util.TestHarness; @@ -34,42 +32,40 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class TestStressUserVersions extends TestRTGBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-externalversionconstraint.xml","schema15.xml"); + initCore("solrconfig-externalversionconstraint.xml", "schema15.xml"); } private static String vfield = "my_version_l"; private static String lfield = "live_b"; private static String dversion = "del_version"; - public static void verbose(Object... args) { // if (!log.isDebugEnabled()) return; StringBuilder sb = new StringBuilder("VERBOSE:"); for (Object o : args) { sb.append(' '); - sb.append(o==null ? "(null)" : o.toString()); + sb.append(o == null ? "(null)" : o.toString()); } log.info("{}", sb); } // This version simulates user versions sometimes being reordered. - // It should fail (and currently does) if optimistic concurrency is disabled (cmd.setVersion(currVersion)) - // in DocBasedVersionConstraintsProcessor. + // It should fail (and currently does) if optimistic concurrency is disabled + // (cmd.setVersion(currVersion)) in DocBasedVersionConstraintsProcessor. @Test public void testStressReorderVersions() throws Exception { clearIndex(); assertU(commit()); final int commitPercent = 5 + random().nextInt(20); - final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft - final int deletePercent = 4+random().nextInt(25); + final int softCommitPercent = 30 + random().nextInt(75); // what percent of the commits are soft + final int deletePercent = 4 + random().nextInt(25); final int deleteByQueryPercent = random().nextInt(8); final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200)); int nWriteThreads = 5 + random().nextInt(25); @@ -78,29 +74,45 @@ public void testStressReorderVersions() throws Exception { // query variables final int percentRealtimeQuery = 75; - final AtomicLong operations = new AtomicLong(10000); // number of query operations to perform in total - ramp up for a longer test + // number of query operations to perform in total - ramp up for a longer test + final AtomicLong operations = new AtomicLong(10000); int nReadThreads = 5 + random().nextInt(25); - - /** // testing - final int commitPercent = 5; - final int softCommitPercent = 100; // what percent of the commits are soft - final int deletePercent = 0; - final int deleteByQueryPercent = 50; - final int ndocs = 1; - int nWriteThreads = 2; - - final int maxConcurrentCommits = nWriteThreads; - - // query variables - final int percentRealtimeQuery = 101; - final AtomicLong operations = new AtomicLong(50000); // number of query operations to perform in total - int nReadThreads = 1; - **/ - - - verbose("commitPercent",commitPercent, "softCommitPercent",softCommitPercent, "deletePercent",deletePercent, "deleteByQueryPercent",deleteByQueryPercent - , "ndocs",ndocs,"nWriteThreads",nWriteThreads,"percentRealtimeQuery",percentRealtimeQuery,"operations",operations, "nReadThreads",nReadThreads); + // testing + // final int commitPercent = 5; + // final int softCommitPercent = 100; // what percent of the commits are soft + // final int deletePercent = 0; + // final int deleteByQueryPercent = 50; + // final int ndocs = 1; + // int nWriteThreads = 2; + // + // final int maxConcurrentCommits = nWriteThreads; + // + // // query variables + // final int percentRealtimeQuery = 101; + // final AtomicLong operations = new AtomicLong(50000); // number of query operations to + // perform in total + // int nReadThreads = 1; + + verbose( + "commitPercent", + commitPercent, + "softCommitPercent", + softCommitPercent, + "deletePercent", + deletePercent, + "deleteByQueryPercent", + deleteByQueryPercent, + "ndocs", + ndocs, + "nWriteThreads", + nWriteThreads, + "percentRealtimeQuery", + percentRealtimeQuery, + "operations", + operations, + "nReadThreads", + nReadThreads); initModel(ndocs); @@ -108,212 +120,223 @@ public void testStressReorderVersions() throws Exception { List threads = new ArrayList<>(); - final AtomicLong testVersion = new AtomicLong(0); - for (int i=0; i 0) { - int oper = rand.nextInt(100); + for (int i = 0; i < nWriteThreads; i++) { + Thread thread = + new Thread("WRITER" + i) { + Random rand = new Random(random().nextInt()); + + @Override + public void run() { + try { + while (operations.get() > 0) { + int oper = rand.nextInt(100); + + if (oper < commitPercent) { + if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { + Map newCommittedModel; + long version; + + synchronized (TestStressUserVersions.this) { + newCommittedModel = new HashMap<>(model); // take a snapshot + version = snapshotCount++; + } - if (oper < commitPercent) { - if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { - Map newCommittedModel; - long version; + if (rand.nextInt(100) < softCommitPercent) { + verbose("softCommit start"); + assertU(TestHarness.commit("softCommit", "true")); + verbose("softCommit end"); + } else { + verbose("hardCommit start"); + assertU(commit()); + verbose("hardCommit end"); + } - synchronized(TestStressUserVersions.this) { - newCommittedModel = new HashMap<>(model); // take a snapshot - version = snapshotCount++; + synchronized (TestStressUserVersions.this) { + // install this model snapshot only if it's newer than the current one + if (version >= committedModelClock) { + if (VERBOSE) { + verbose("installing new committedModel version=" + committedModelClock); + } + committedModel = newCommittedModel; + committedModelClock = version; + } + } + } + numCommitting.decrementAndGet(); + continue; } - if (rand.nextInt(100) < softCommitPercent) { - verbose("softCommit start"); - assertU(TestHarness.commit("softCommit","true")); - verbose("softCommit end"); + int id; + + if (rand.nextBoolean()) { + id = rand.nextInt(ndocs); } else { - verbose("hardCommit start"); - assertU(commit()); - verbose("hardCommit end"); + id = lastId; // reuse the last ID half of the time to force more race conditions } - synchronized(TestStressUserVersions.this) { - // install this model snapshot only if it's newer than the current one - if (version >= committedModelClock) { - if (VERBOSE) { - verbose("installing new committedModel version="+committedModelClock); - } - committedModel = newCommittedModel; - committedModelClock = version; - } + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; } - } - numCommitting.decrementAndGet(); - continue; - } - - int id; + DocInfo info = model.get(id); - if (rand.nextBoolean()) { - id = rand.nextInt(ndocs); - } else { - id = lastId; // reuse the last ID half of the time to force more race conditions - } + long val = info.val; + long nextVal = Math.abs(val) + 1; - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } + // the version we set on the update should determine who wins + // These versions are not derived from the actual leader update handler hand hence + // this test may need to change depending on how we handle version numbers. + long version = testVersion.incrementAndGet(); - DocInfo info = model.get(id); + // yield after getting the next version to increase the odds of updates happening + // out of order + if (rand.nextBoolean()) Thread.yield(); - long val = info.val; - long nextVal = Math.abs(val)+1; + if (oper < commitPercent + deletePercent) { + verbose("deleting id", id, "val=", nextVal, "version", version); - // the version we set on the update should determine who wins - // These versions are not derived from the actual leader update handler hand hence this - // test may need to change depending on how we handle version numbers. - long version = testVersion.incrementAndGet(); + Long returnedVersion = + deleteAndGetVersion( + Integer.toString(id), params(dversion, Long.toString(version))); - // yield after getting the next version to increase the odds of updates happening out of order - if (rand.nextBoolean()) Thread.yield(); + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (Math.abs(version) > Math.abs(currInfo.version)) { + model.put(id, new DocInfo(version, -nextVal)); + } + } - if (oper < commitPercent + deletePercent) { - verbose("deleting id",id,"val=",nextVal,"version",version); + verbose("deleting id", id, "val=", nextVal, "version", version, "DONE"); - Long returnedVersion = deleteAndGetVersion(Integer.toString(id), params(dversion, Long.toString(version))); + } else { + verbose("adding id", id, "val=", nextVal, "version", version); + + Long returnedVersion = + addAndGetVersion( + sdoc( + "id", + Integer.toString(id), + FIELD, + Long.toString(nextVal), + vfield, + Long.toString(version)), + null); + + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (version > currInfo.version) { + model.put(id, new DocInfo(version, nextVal)); + } + } - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (Math.abs(version) > Math.abs(currInfo.version)) { - model.put(id, new DocInfo(version, -nextVal)); + if (VERBOSE) { + verbose("adding id", id, "val=", nextVal, "version", version, "DONE"); + } } - } - - verbose("deleting id", id, "val=",nextVal,"version",version,"DONE"); - - } else { - verbose("adding id", id, "val=", nextVal,"version",version); - - Long returnedVersion = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal), vfield, Long.toString(version)), null); + // } // end sync - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (version > currInfo.version) { - model.put(id, new DocInfo(version, nextVal)); + if (!before) { + lastId = id; } } - - if (VERBOSE) { - verbose("adding id", id, "val=", nextVal,"version",version,"DONE"); - } - - } - // } // end sync - - if (!before) { - lastId = id; + } catch (Throwable e) { + operations.set(-1L); + log.error("", e); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - log.error("",e); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } + for (int i = 0; i < nReadThreads; i++) { + Thread thread = + new Thread("READER" + i) { + Random rand = new Random(random().nextInt()); - for (int i=0; i= 0) { - // bias toward a recently changed doc - int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); + @Override + public void run() { + try { + while (operations.decrementAndGet() >= 0) { + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - // when indexing, we update the index, then the model - // so when querying, we should first check the model, and then the index + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index - boolean realTime = rand.nextInt(100) < percentRealtimeQuery; - DocInfo info; + boolean realTime = rand.nextInt(100) < percentRealtimeQuery; + DocInfo info; - if (realTime) { - info = model.get(id); - } else { - synchronized(TestStressUserVersions.this) { - info = committedModel.get(id); - } - } - - if (VERBOSE) { - verbose("querying id", id); - } - SolrQueryRequest sreq; - if (realTime) { - sreq = req("wt","json", "qt","/get", "ids",Integer.toString(id)); - } else { - sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true"); - } + if (realTime) { + info = model.get(id); + } else { + synchronized (TestStressUserVersions.this) { + info = committedModel.get(id); + } + } - String response = h.query(sreq); - @SuppressWarnings({"rawtypes"}) - Map rsp = (Map) Utils.fromJSONString(response); - @SuppressWarnings({"rawtypes"}) - List doclist = (List)(((Map)rsp.get("response")).get("docs")); - if (doclist.size() == 0) { - // there's no info we can get back with a delete, so not much we can check without further synchronization - } else { - assertEquals(1, doclist.size()); - boolean isLive = (Boolean)(((Map)doclist.get(0)).get(lfield)); - long foundVer = (Long)(((Map)doclist.get(0)).get(vfield)); - - if (isLive) { - long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD)); - if (foundVer < Math.abs(info.version) - || (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must - log.error("ERROR, id={} found={} model {}", id, response, info); - assertTrue(false); + if (VERBOSE) { + verbose("querying id", id); } - } else { - // if the doc is deleted (via tombstone), it shouldn't have a value on it. - assertNull( ((Map)doclist.get(0)).get(FIELD) ); + SolrQueryRequest sreq; + if (realTime) { + sreq = req("wt", "json", "qt", "/get", "ids", Integer.toString(id)); + } else { + sreq = + req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true"); + } + + String response = h.query(sreq); + @SuppressWarnings({"rawtypes"}) + Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) + List doclist = (List) (((Map) rsp.get("response")).get("docs")); + if (doclist.size() == 0) { + // there's no info we can get back with a delete, so not much we can check + // without further synchronization + } else { + assertEquals(1, doclist.size()); + boolean isLive = (Boolean) (((Map) doclist.get(0)).get(lfield)); + long foundVer = (Long) (((Map) doclist.get(0)).get(vfield)); + + if (isLive) { + long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD)); + if (foundVer < Math.abs(info.version) + || (foundVer == info.version + && foundVal != info.val)) { // if the version matches, the val must + log.error("ERROR, id={} found={} model {}", id, response, info); + assertTrue(false); + } + } else { + // if the doc is deleted (via tombstone), it shouldn't have a value on it. + assertNull(((Map) doclist.get(0)).get(FIELD)); - if (foundVer < Math.abs(info.version)) { - log.error("ERROR, id={} found={} model {}", id, response, info); - assertTrue(false); + if (foundVer < Math.abs(info.version)) { + log.error("ERROR, id={} found={} model {}", id, response, info); + assertTrue(false); + } + } } } - + } catch (Throwable e) { + operations.set(-1L); + log.error("", e); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - log.error("",e); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } - for (Thread thread : threads) { thread.start(); } @@ -321,7 +344,5 @@ public void run() { for (Thread thread : threads) { thread.join(); } - } - } diff --git a/solr/core/src/test/org/apache/solr/search/TestStressVersions.java b/solr/core/src/test/org/apache/solr/search/TestStressVersions.java index ff3bf0c1b7a..497ab1e0bb4 100644 --- a/solr/core/src/test/org/apache/solr/search/TestStressVersions.java +++ b/solr/core/src/test/org/apache/solr/search/TestStressVersions.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search; +import static org.apache.solr.core.SolrCore.verbose; import java.util.ArrayList; import java.util.HashMap; @@ -24,23 +25,19 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; - import org.apache.solr.common.util.Utils; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.util.TestHarness; import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.core.SolrCore.verbose; - public class TestStressVersions extends TestRTGBase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-tlog.xml","schema15.xml"); + initCore("solrconfig-tlog.xml", "schema15.xml"); } - // This version doesn't synchronize on id to tell what update won, but instead uses versions @Test public void testStressGetRealtimeVersions() throws Exception { @@ -48,11 +45,13 @@ public void testStressGetRealtimeVersions() throws Exception { assertU(commit()); final int commitPercent = 5 + random().nextInt(20); - final int softCommitPercent = 30+random().nextInt(75); // what percent of the commits are soft - final int deletePercent = 4+random().nextInt(25); + final int softCommitPercent = 30 + random().nextInt(75); // what percent of the commits are soft + final int deletePercent = 4 + random().nextInt(25); final int deleteByQueryPercent = 1 + random().nextInt(5); - final int optimisticPercent = 1+random().nextInt(50); // percent change that an update uses optimistic locking - final int optimisticCorrectPercent = 25+random().nextInt(70); // percent change that a version specified will be correct + final int optimisticPercent = + 1 + random().nextInt(50); // percent change that an update uses optimistic locking + final int optimisticCorrectPercent = + 25 + random().nextInt(70); // percent change that a version specified will be correct final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200)); int nWriteThreads = 5 + random().nextInt(25); @@ -60,215 +59,218 @@ public void testStressGetRealtimeVersions() throws Exception { // query variables final int percentRealtimeQuery = 75; - final AtomicLong operations = new AtomicLong(50000); // number of query operations to perform in total + final AtomicLong operations = + new AtomicLong(50000); // number of query operations to perform in total int nReadThreads = 5 + random().nextInt(25); - - initModel(ndocs); final AtomicInteger numCommitting = new AtomicInteger(); List threads = new ArrayList<>(); - for (int i=0; i 0) { - int oper = rand.nextInt(100); - - if (oper < commitPercent) { - if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { - Map newCommittedModel; - long version; - - synchronized(globalLock) { - newCommittedModel = new HashMap<>(model); // take a snapshot - version = snapshotCount++; - } + for (int i = 0; i < nWriteThreads; i++) { + Thread thread = + new Thread("WRITER" + i) { + Random rand = new Random(random().nextInt()); + + @Override + public void run() { + try { + while (operations.get() > 0) { + int oper = rand.nextInt(100); + + if (oper < commitPercent) { + if (numCommitting.incrementAndGet() <= maxConcurrentCommits) { + Map newCommittedModel; + long version; + + synchronized (globalLock) { + newCommittedModel = new HashMap<>(model); // take a snapshot + version = snapshotCount++; + } - if (rand.nextInt(100) < softCommitPercent) { - verbose("softCommit start"); - assertU(TestHarness.commit("softCommit","true")); - verbose("softCommit end"); - } else { - verbose("hardCommit start"); - assertU(commit()); - verbose("hardCommit end"); - } + if (rand.nextInt(100) < softCommitPercent) { + verbose("softCommit start"); + assertU(TestHarness.commit("softCommit", "true")); + verbose("softCommit end"); + } else { + verbose("hardCommit start"); + assertU(commit()); + verbose("hardCommit end"); + } - synchronized(globalLock) { - // install this model snapshot only if it's newer than the current one - if (version >= committedModelClock) { - if (VERBOSE) { - verbose("installing new committedModel version="+committedModelClock); + synchronized (globalLock) { + // install this model snapshot only if it's newer than the current one + if (version >= committedModelClock) { + if (VERBOSE) { + verbose("installing new committedModel version=" + committedModelClock); + } + committedModel = newCommittedModel; + committedModelClock = version; + } } - committedModel = newCommittedModel; - committedModelClock = version; } + numCommitting.decrementAndGet(); + continue; } - } - numCommitting.decrementAndGet(); - continue; - } - - - int id = rand.nextInt(ndocs); - Object sync = syncArr[id]; - - // set the lastId before we actually change it sometimes to try and - // uncover more race conditions between writing and reading - boolean before = rand.nextBoolean(); - if (before) { - lastId = id; - } - - // We can't concurrently update the same document and retain our invariants of increasing values - // since we can't guarantee what order the updates will be executed. - // Even with versions, we can't remove the sync because increasing versions does not mean increasing vals. - // - // NOTE: versioning means we can now remove the sync and tell what update "won" - // synchronized (sync) { - DocInfo info = model.get(id); - long val = info.val; - long nextVal = Math.abs(val)+1; + int id = rand.nextInt(ndocs); + Object sync = syncArr[id]; - if (oper < commitPercent + deletePercent) { - verbose("deleting id",id,"val=",nextVal); - - Long version = deleteAndGetVersion(Integer.toString(id), null); - assertTrue(version < 0); - - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (Math.abs(version) > Math.abs(currInfo.version)) { - model.put(id, new DocInfo(version, -nextVal)); + // set the lastId before we actually change it sometimes to try and + // uncover more race conditions between writing and reading + boolean before = rand.nextBoolean(); + if (before) { + lastId = id; } - } - verbose("deleting id", id, "val=",nextVal,"DONE"); - } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { - verbose("deleteByQyery id",id,"val=",nextVal); + // We can't concurrently update the same document and retain our invariants of + // increasing values since we can't guarantee what order the updates will be + // executed. Even with versions, we can't remove the sync because increasing + // versions does not mean increasing vals. + // + // NOTE: versioning means we can now remove the sync and tell what update "won" + // synchronized (sync) { + DocInfo info = model.get(id); + + long val = info.val; + long nextVal = Math.abs(val) + 1; + + if (oper < commitPercent + deletePercent) { + verbose("deleting id", id, "val=", nextVal); + + Long version = deleteAndGetVersion(Integer.toString(id), null); + assertTrue(version < 0); + + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (Math.abs(version) > Math.abs(currInfo.version)) { + model.put(id, new DocInfo(version, -nextVal)); + } + } - Long version = deleteByQueryAndGetVersion("id:"+Integer.toString(id), null); - assertTrue(version < 0); + verbose("deleting id", id, "val=", nextVal, "DONE"); + } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) { + verbose("deleteByQyery id", id, "val=", nextVal); - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (Math.abs(version) > Math.abs(currInfo.version)) { - model.put(id, new DocInfo(version, -nextVal)); - } - } + Long version = deleteByQueryAndGetVersion("id:" + Integer.toString(id), null); + assertTrue(version < 0); - verbose("deleteByQyery id", id, "val=",nextVal,"DONE"); - } else { - verbose("adding id", id, "val=", nextVal); + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (Math.abs(version) > Math.abs(currInfo.version)) { + model.put(id, new DocInfo(version, -nextVal)); + } + } - // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal))); - Long version = addAndGetVersion(sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal)), null); - assertTrue(version > 0); + verbose("deleteByQyery id", id, "val=", nextVal, "DONE"); + } else { + verbose("adding id", id, "val=", nextVal); + + // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal))); + Long version = + addAndGetVersion( + sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal)), null); + assertTrue(version > 0); + + // only update model if the version is newer + synchronized (model) { + DocInfo currInfo = model.get(id); + if (version > currInfo.version) { + model.put(id, new DocInfo(version, nextVal)); + } + } - // only update model if the version is newer - synchronized (model) { - DocInfo currInfo = model.get(id); - if (version > currInfo.version) { - model.put(id, new DocInfo(version, nextVal)); + if (VERBOSE) { + verbose("adding id", id, "val=", nextVal, "DONE"); + } } - } + // } // end sync - if (VERBOSE) { - verbose("adding id", id, "val=", nextVal,"DONE"); + if (!before) { + lastId = id; + } } - - } - // } // end sync - - if (!before) { - lastId = id; + } catch (Throwable e) { + operations.set(-1L); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } + for (int i = 0; i < nReadThreads; i++) { + Thread thread = + new Thread("READER" + i) { + Random rand = new Random(random().nextInt()); - for (int i=0; i= 0) { - // bias toward a recently changed doc - int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); + @Override + public void run() { + try { + while (operations.decrementAndGet() >= 0) { + // bias toward a recently changed doc + int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs); - // when indexing, we update the index, then the model - // so when querying, we should first check the model, and then the index + // when indexing, we update the index, then the model + // so when querying, we should first check the model, and then the index - boolean realTime = rand.nextInt(100) < percentRealtimeQuery; - DocInfo info; + boolean realTime = rand.nextInt(100) < percentRealtimeQuery; + DocInfo info; - if (realTime) { - info = model.get(id); - } else { - synchronized(globalLock) { - info = committedModel.get(id); - } - } + if (realTime) { + info = model.get(id); + } else { + synchronized (globalLock) { + info = committedModel.get(id); + } + } - if (VERBOSE) { - verbose("querying id", id); - } - SolrQueryRequest sreq; - if (realTime) { - sreq = req("wt","json", "qt","/get", "ids",Integer.toString(id)); - } else { - sreq = req("wt","json", "q","id:"+Integer.toString(id), "omitHeader","true"); - } + if (VERBOSE) { + verbose("querying id", id); + } + SolrQueryRequest sreq; + if (realTime) { + sreq = req("wt", "json", "qt", "/get", "ids", Integer.toString(id)); + } else { + sreq = + req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true"); + } - String response = h.query(sreq); - @SuppressWarnings({"rawtypes"}) - Map rsp = (Map) Utils.fromJSONString(response); - @SuppressWarnings({"rawtypes"}) - List doclist = (List)(((Map)rsp.get("response")).get("docs")); - if (doclist.size() == 0) { - // there's no info we can get back with a delete, so not much we can check without further synchronization - } else { - assertEquals(1, doclist.size()); - long foundVal = (Long)(((Map)doclist.get(0)).get(FIELD)); - long foundVer = (Long)(((Map)doclist.get(0)).get("_version_")); - if (foundVer < Math.abs(info.version) - || (foundVer == info.version && foundVal != info.val) ) { // if the version matches, the val must - verbose("ERROR, id=", id, "found=",response,"model",info); - assertTrue(false); + String response = h.query(sreq); + @SuppressWarnings({"rawtypes"}) + Map rsp = (Map) Utils.fromJSONString(response); + @SuppressWarnings({"rawtypes"}) + List doclist = (List) (((Map) rsp.get("response")).get("docs")); + if (doclist.size() == 0) { + // there's no info we can get back with a delete, so not much we can check + // without further synchronization + } else { + assertEquals(1, doclist.size()); + long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD)); + long foundVer = (Long) (((Map) doclist.get(0)).get("_version_")); + if (foundVer < Math.abs(info.version) + || (foundVer == info.version + && foundVal != info.val)) { // if the version matches, the val must + verbose("ERROR, id=", id, "found=", response, "model", info); + assertTrue(false); + } + } } + } catch (Throwable e) { + operations.set(-1L); + throw new RuntimeException(e); } } - } catch (Throwable e) { - operations.set(-1L); - throw new RuntimeException(e); - } - } - }; + }; threads.add(thread); } - for (Thread thread : threads) { thread.start(); } @@ -276,8 +278,5 @@ public void run() { for (Thread thread : threads) { thread.join(); } - } - - } diff --git a/solr/core/src/test/org/apache/solr/search/TestSurroundQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestSurroundQueryParser.java index bd1c513b859..fe661645be5 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSurroundQueryParser.java +++ b/solr/core/src/test/org/apache/solr/search/TestSurroundQueryParser.java @@ -32,80 +32,75 @@ public void setUp() throws Exception { // if you override setUp or tearDown, you better call // the super classes version super.setUp(); - } - + } + @Override public void tearDown() throws Exception { // if you override setUp or tearDown, you better call // the super classes version super.tearDown(); } - + public void testQueryParser() { String v = "a b c d e a b c f g h i j k l m l k j z z z"; - assertU(adoc("id","1", "text",v, "text_np",v, "name",v)); - - v="abc abxy cde efg ef e "; - assertU(adoc("id","2", "text",v, "text_np",v)); - - v="1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 1001 1002 1003 1004 1005 1006 1007 1008 1009"; - assertU(adoc("id","3", "text",v, "text_np",v)); + assertU(adoc("id", "1", "text", v, "text_np", v, "name", v)); + + v = "abc abxy cde efg ef e "; + assertU(adoc("id", "2", "text", v, "text_np", v)); + + v = + "1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 1001 1002 1003 1004 1005 1006 1007 1008 1009"; + assertU(adoc("id", "3", "text", v, "text_np", v)); assertU(commit()); - - + // run through a series of syntax tests, not exhaustive yet String localP = "{!surround df=text}"; String t1; - t1 = localP+"1 N 2"; - assertQ(req("q", t1, "indent","true") - ,"//*[@numFound='1']"); + t1 = localP + "1 N 2"; + assertQ(req("q", t1, "indent", "true"), "//*[@numFound='1']"); // but ordered search should fail - t1 = localP +"2 W 1"; - assertQ(req("q", t1, "indent","true") - ,"//*[@numFound='0']"); + t1 = localP + "2 W 1"; + assertQ(req("q", t1, "indent", "true"), "//*[@numFound='0']"); // alternate syntax t1 = localP + "3n(a,e)"; - assertQ(req("q", t1, "indent","true") - ,"//*[@numFound='1']"); + assertQ(req("q", t1, "indent", "true"), "//*[@numFound='1']"); // wildcards - t1 =localP + "100* w 20"; - assertQ(req("q", t1, "indent","true") - ,"//*[@numFound='0']"); - t1 =localP + "100* n 20"; - assertQ(req("q", t1, "indent","true") - ,"//*[@numFound='1']"); + t1 = localP + "100* w 20"; + assertQ(req("q", t1, "indent", "true"), "//*[@numFound='0']"); + t1 = localP + "100* n 20"; + assertQ(req("q", t1, "indent", "true"), "//*[@numFound='1']"); // nested t1 = localP + "(1003 2n 1001) 3N 1006"; - assertQ(req("q", t1, "indent","true") - ,"//*[@numFound='1']"); + assertQ(req("q", t1, "indent", "true"), "//*[@numFound='1']"); // test highlighted response with ordered query and hl.usePhraseHighlighter=true - assertQ(req("q", "{!surround df=name}k w l", - "hl", "true", - "hl.fl", "name", - "hl.usePhraseHighlighter", "true") - ,"//*[@numFound='1']" - ,"//lst[@name='highlighting']/lst[@name='1']" - ,"//lst[@name='1']/arr[@name='name']/str[.='a b c d e a b c f g h i j k l m l k j z z z']"); + assertQ( + req( + "q", "{!surround df=name}k w l", + "hl", "true", + "hl.fl", "name", + "hl.usePhraseHighlighter", "true"), + "//*[@numFound='1']", + "//lst[@name='highlighting']/lst[@name='1']", + "//lst[@name='1']/arr[@name='name']/str[.='a b c d e a b c f g h i j k l m l k j z z z']"); // test highlighted response with ordered query and hl.usePhraseHighlighter=false // Note: UnifiedHighlighter doesn't support it because RewriteQuery doesn't implement visit(), // but it will work with usePhraseHighlighter (and thus weight.matches mode) // which is the default (as seen above). See SOLR-15962 - assertQ(req("q", "{!surround df=name}k w l", - "hl", "true", - "hl.method", "original", - "hl.fl", "name", - "hl.usePhraseHighlighter", "false") - ,"//*[@numFound='1']" - ,"//lst[@name='highlighting']/lst[@name='1']" - ,"//lst[@name='1']/arr[@name='name']/str[.='a b c d e a b c f g h i j k l m l k j z z z']"); + assertQ( + req( + "q", "{!surround df=name}k w l", + "hl", "true", + "hl.method", "original", + "hl.fl", "name", + "hl.usePhraseHighlighter", "false"), + "//*[@numFound='1']", + "//lst[@name='highlighting']/lst[@name='1']", + "//lst[@name='1']/arr[@name='name']/str[.='a b c d e a b c f g h i j k l m l k j z z z']"); } - - - } diff --git a/solr/core/src/test/org/apache/solr/search/TestTaskManagement.java b/solr/core/src/test/org/apache/solr/search/TestTaskManagement.java index 7330bc3b471..74a946d7651 100644 --- a/solr/core/src/test/org/apache/solr/search/TestTaskManagement.java +++ b/solr/core/src/test/org/apache/solr/search/TestTaskManagement.java @@ -16,6 +16,19 @@ */ package org.apache.solr.search; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; import org.apache.lucene.util.BytesRef; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -36,228 +49,226 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionException; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; - public class TestTaskManagement extends SolrCloudTestCase { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - - private static final String COLLECTION_NAME = "collection1"; + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private ExecutorService queryExecutor; - private ExecutorService cancelExecutor; + private static final String COLLECTION_NAME = "collection1"; - @BeforeClass - public static void setupCluster() throws Exception { - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); - } - - @Before - @Override - public void setUp() throws Exception { - super.setUp(); - - CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 1) - .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) - .process(cluster.getSolrClient()); - cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2); - cluster.getSolrClient().setDefaultCollection(COLLECTION_NAME); - - queryExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("TestTaskManagement-Query"); - cancelExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("TestTaskManagement-Cancel"); - - List docs = new ArrayList<>(); - for (int i = 0; i < 100; i++) { - SolrInputDocument doc = new SolrInputDocument(); - doc.addField("id", i); - doc.addField("foo1_s", Integer.toString(i)); - doc.addField("foo2_s", Boolean.toString(i % 2 == 0)); - doc.addField("foo4_s", new BytesRef(Boolean.toString(i % 2 == 0))); - - docs.add(doc); - } - - cluster.getSolrClient().add(docs); - cluster.getSolrClient().commit(); - } + private ExecutorService queryExecutor; + private ExecutorService cancelExecutor; - @After - @Override - public void tearDown() throws Exception { - queryExecutor.shutdown(); - cancelExecutor.shutdown(); + @BeforeClass + public static void setupCluster() throws Exception { + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); + } - queryExecutor.awaitTermination(5, TimeUnit.SECONDS); - CollectionAdminRequest.deleteCollection(COLLECTION_NAME).process(cluster.getSolrClient()); + @Before + @Override + public void setUp() throws Exception { + super.setUp(); - super.tearDown(); - } + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 1) + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2); + cluster.getSolrClient().setDefaultCollection(COLLECTION_NAME); - @Test - public void testNonExistentQuery() throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("queryUUID", "foobar"); + queryExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("TestTaskManagement-Query"); + cancelExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("TestTaskManagement-Cancel"); - GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, "/tasks/cancel", params); - NamedList queryResponse = cluster.getSolrClient().request(request); + List docs = new ArrayList<>(); + for (int i = 0; i < 100; i++) { + SolrInputDocument doc = new SolrInputDocument(); + doc.addField("id", i); + doc.addField("foo1_s", Integer.toString(i)); + doc.addField("foo2_s", Boolean.toString(i % 2 == 0)); + doc.addField("foo4_s", new BytesRef(Boolean.toString(i % 2 == 0))); - assertEquals("Query with queryID foobar not found", queryResponse.get("status")); - assertEquals(404, queryResponse.get("responseCode")); + docs.add(doc); } - @Test - public void testCancellationQuery() throws IOException, SolrServerException { - Set queryIdsSet = ConcurrentHashMap.newKeySet(); - Set notFoundIdsSet = ConcurrentHashMap.newKeySet(); - - List> queryFutures = new ArrayList<>(); - - for (int i = 0; i < 100; i++) { - queryFutures.add(executeQueryAsync(Integer.toString(i))); - } + cluster.getSolrClient().add(docs); + cluster.getSolrClient().commit(); + } - List> cancelFutures = new ArrayList<>(); + @After + @Override + public void tearDown() throws Exception { + queryExecutor.shutdown(); + cancelExecutor.shutdown(); - NamedList tasks = listTasks(); + queryExecutor.awaitTermination(5, TimeUnit.SECONDS); + CollectionAdminRequest.deleteCollection(COLLECTION_NAME).process(cluster.getSolrClient()); - for (int i = 0; i < 100; i++) { - cancelFutures.add(cancelQuery(Integer.toString(i), 0, queryIdsSet, notFoundIdsSet)); - } + super.tearDown(); + } - cancelFutures.forEach(CompletableFuture::join); - queryFutures.forEach(CompletableFuture::join); + @Test + public void testNonExistentQuery() throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("queryUUID", "foobar"); - // There is a very small window where we can successfully cancel the query because QueryComponent will - // aggressively deregister, and even if we use DelayingSearchComponent these queries are not around - // assertFalse("Should have canceled at least one query", queryIdsSet.isEmpty()); - if (log.isInfoEnabled()) { - log.info("Cancelled {} queries", queryIdsSet.size()); - } + GenericSolrRequest request = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/tasks/cancel", params); + NamedList queryResponse = cluster.getSolrClient().request(request); - assertEquals("Total query count did not match the expected value", - queryIdsSet.size() + notFoundIdsSet.size(), 100); - } - - @Test - public void testListCancellableQueries() throws Exception { - for (int i = 0; i < 50; i++) { - executeQueryAsync(Integer.toString(i)); - } + assertEquals("Query with queryID foobar not found", queryResponse.get("status")); + assertEquals(404, queryResponse.get("responseCode")); + } - NamedList result = listTasks(); + @Test + public void testCancellationQuery() throws IOException, SolrServerException { + Set queryIdsSet = ConcurrentHashMap.newKeySet(); + Set notFoundIdsSet = ConcurrentHashMap.newKeySet(); - Iterator> iterator = result.iterator(); + List> queryFutures = new ArrayList<>(); - Set presentQueryIDs = new HashSet<>(); + for (int i = 0; i < 100; i++) { + queryFutures.add(executeQueryAsync(Integer.toString(i))); + } - while (iterator.hasNext()) { - Map.Entry entry = iterator.next(); + List> cancelFutures = new ArrayList<>(); - presentQueryIDs.add(Integer.parseInt(entry.getKey())); - } + NamedList tasks = listTasks(); - assertThat(presentQueryIDs.size(), betweenInclusive(0, 50)); - for (int value : presentQueryIDs) { - assertThat(value, betweenInclusive(0, 49)); - } + for (int i = 0; i < 100; i++) { + cancelFutures.add(cancelQuery(Integer.toString(i), 0, queryIdsSet, notFoundIdsSet)); } - private static Matcher betweenInclusive(int lower, int upper) { - return Matchers.allOf(Matchers.greaterThanOrEqualTo(lower), Matchers.lessThanOrEqualTo(upper)); - } + cancelFutures.forEach(CompletableFuture::join); + queryFutures.forEach(CompletableFuture::join); - @SuppressWarnings("unchecked") - private NamedList listTasks() throws SolrServerException, IOException { - NamedList response = cluster.getSolrClient().request( - new GenericSolrRequest(SolrRequest.METHOD.GET, "/tasks/list", null)); - return (NamedList) response.get("taskList"); + // There is a very small window where we can successfully cancel the query because + // QueryComponent will aggressively deregister, and even if we use DelayingSearchComponent these + // queries are not around + // assertFalse("Should have canceled at least one query", queryIdsSet.isEmpty()); + if (log.isInfoEnabled()) { + log.info("Cancelled {} queries", queryIdsSet.size()); } - @Test - public void testCheckSpecificQueryStatus() throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("taskUUID", "25"); + assertEquals( + "Total query count did not match the expected value", + queryIdsSet.size() + notFoundIdsSet.size(), + 100); + } - GenericSolrRequest request = new GenericSolrRequest(SolrRequest.METHOD.GET, "/tasks/list", params); - NamedList queryResponse = cluster.getSolrClient().request(request); + @Test + public void testListCancellableQueries() throws Exception { + for (int i = 0; i < 50; i++) { + executeQueryAsync(Integer.toString(i)); + } - String result = (String) queryResponse.get("taskStatus"); + NamedList result = listTasks(); - assertTrue(result.contains("inactive")); - } + Iterator> iterator = result.iterator(); - private CompletableFuture cancelQuery(final String queryID, final int sleepTime, Set cancelledQueryIdsSet, - Set notFoundQueryIdSet) { - return CompletableFuture.runAsync(() -> { - ModifiableSolrParams params = new ModifiableSolrParams(); + Set presentQueryIDs = new HashSet<>(); - params.set("queryUUID", queryID); - SolrRequest request = new QueryRequest(params); - request.setPath("/tasks/cancel"); + while (iterator.hasNext()) { + Map.Entry entry = iterator.next(); - // Wait for some time to let the query start - try { - if (sleepTime > 0) { - Thread.sleep(sleepTime); - } - - try { - NamedList queryResponse; - - queryResponse = cluster.getSolrClient().request(request); - - int responseCode = (int) queryResponse.get("responseCode"); - - if (responseCode == 200 /* HTTP OK */) { - cancelledQueryIdsSet.add(Integer.parseInt(queryID)); - } else if (responseCode == 404 /* HTTP NOT FOUND */) { - notFoundQueryIdSet.add(Integer.parseInt(queryID)); - } - } catch (Exception e) { - throw new CompletionException(e); - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - throw new CompletionException(e); - } - }, cancelExecutor); + presentQueryIDs.add(Integer.parseInt(entry.getKey())); } - public void executeQuery(String queryId) throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams(); - - params.set("q", "*:*"); - params.set("canCancel", "true"); + assertThat(presentQueryIDs.size(), betweenInclusive(0, 50)); + for (int value : presentQueryIDs) { + assertThat(value, betweenInclusive(0, 49)); + } + } + + private static Matcher betweenInclusive(int lower, int upper) { + return Matchers.allOf(Matchers.greaterThanOrEqualTo(lower), Matchers.lessThanOrEqualTo(upper)); + } + + @SuppressWarnings("unchecked") + private NamedList listTasks() throws SolrServerException, IOException { + NamedList response = + cluster + .getSolrClient() + .request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/tasks/list", null)); + return (NamedList) response.get("taskList"); + } + + @Test + public void testCheckSpecificQueryStatus() throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("taskUUID", "25"); + + GenericSolrRequest request = + new GenericSolrRequest(SolrRequest.METHOD.GET, "/tasks/list", params); + NamedList queryResponse = cluster.getSolrClient().request(request); + + String result = (String) queryResponse.get("taskStatus"); + + assertTrue(result.contains("inactive")); + } + + private CompletableFuture cancelQuery( + final String queryID, + final int sleepTime, + Set cancelledQueryIdsSet, + Set notFoundQueryIdSet) { + return CompletableFuture.runAsync( + () -> { + ModifiableSolrParams params = new ModifiableSolrParams(); + + params.set("queryUUID", queryID); + SolrRequest request = new QueryRequest(params); + request.setPath("/tasks/cancel"); + + // Wait for some time to let the query start + try { + if (sleepTime > 0) { + Thread.sleep(sleepTime); + } - if (queryId != null) { - params.set("queryUUID", queryId); - } + try { + NamedList queryResponse; - SolrRequest request = new QueryRequest(params); + queryResponse = cluster.getSolrClient().request(request); - cluster.getSolrClient().request(request); - } + int responseCode = (int) queryResponse.get("responseCode"); - public CompletableFuture executeQueryAsync(String queryId) { - return CompletableFuture.runAsync(() -> { - try { - executeQuery(queryId); + if (responseCode == 200 /* HTTP OK */) { + cancelledQueryIdsSet.add(Integer.parseInt(queryID)); + } else if (responseCode == 404 /* HTTP NOT FOUND */) { + notFoundQueryIdSet.add(Integer.parseInt(queryID)); + } } catch (Exception e) { - throw new CompletionException(e); + throw new CompletionException(e); } - }, queryExecutor); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new CompletionException(e); + } + }, + cancelExecutor); + } + + public void executeQuery(String queryId) throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + + params.set("q", "*:*"); + params.set("canCancel", "true"); + + if (queryId != null) { + params.set("queryUUID", queryId); } + + SolrRequest request = new QueryRequest(params); + + cluster.getSolrClient().request(request); + } + + public CompletableFuture executeQueryAsync(String queryId) { + return CompletableFuture.runAsync( + () -> { + try { + executeQuery(queryId); + } catch (Exception e) { + throw new CompletionException(e); + } + }, + queryExecutor); + } } diff --git a/solr/core/src/test/org/apache/solr/search/TestTermQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestTermQParserPlugin.java index 829d01f5c9f..be31960528b 100644 --- a/solr/core/src/test/org/apache/solr/search/TestTermQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestTermQParserPlugin.java @@ -28,14 +28,95 @@ public class TestTermQParserPlugin extends SolrTestCaseJ4 { public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); - assertU(adoc("id","1", "author_s1", "Lev Grossman", "t_title", "The Magicians", "cat_s", "fantasy", "pubyear_i", "2009")); - assertU(adoc("id", "2", "author_s1", "Robert Jordan", "t_title", "The Eye of the World", "cat_s", "fantasy", "cat_s", "childrens", "pubyear_i", "1990")); - assertU(adoc("id", "3", "author_s1", "Robert Jordan", "t_title", "The Great Hunt", "cat_s", "fantasy", "cat_s", "childrens", "pubyear_i", "1990")); - assertU(adoc("id", "4", "author_s1", "N.K. Jemisin", "t_title", "The Fifth Season", "cat_s", "fantasy", "pubyear_i", "2015")); + assertU( + adoc( + "id", + "1", + "author_s1", + "Lev Grossman", + "t_title", + "The Magicians", + "cat_s", + "fantasy", + "pubyear_i", + "2009")); + assertU( + adoc( + "id", + "2", + "author_s1", + "Robert Jordan", + "t_title", + "The Eye of the World", + "cat_s", + "fantasy", + "cat_s", + "childrens", + "pubyear_i", + "1990")); + assertU( + adoc( + "id", + "3", + "author_s1", + "Robert Jordan", + "t_title", + "The Great Hunt", + "cat_s", + "fantasy", + "cat_s", + "childrens", + "pubyear_i", + "1990")); + assertU( + adoc( + "id", + "4", + "author_s1", + "N.K. Jemisin", + "t_title", + "The Fifth Season", + "cat_s", + "fantasy", + "pubyear_i", + "2015")); assertU(commit()); - assertU(adoc("id", "5", "author_s1", "Ursula K. Le Guin", "t_title", "The Dispossessed", "cat_s", "scifi", "pubyear_i", "1974")); - assertU(adoc("id", "6", "author_s1", "Ursula K. Le Guin", "t_title", "The Left Hand of Darkness", "cat_s", "scifi", "pubyear_i", "1969")); - assertU(adoc("id", "7", "author_s1", "Isaac Asimov", "t_title", "Foundation", "cat_s", "scifi", "pubyear_i", "1951")); + assertU( + adoc( + "id", + "5", + "author_s1", + "Ursula K. Le Guin", + "t_title", + "The Dispossessed", + "cat_s", + "scifi", + "pubyear_i", + "1974")); + assertU( + adoc( + "id", + "6", + "author_s1", + "Ursula K. Le Guin", + "t_title", + "The Left Hand of Darkness", + "cat_s", + "scifi", + "pubyear_i", + "1969")); + assertU( + adoc( + "id", + "7", + "author_s1", + "Isaac Asimov", + "t_title", + "Foundation", + "cat_s", + "scifi", + "pubyear_i", + "1951")); assertU(commit()); } @@ -45,51 +126,58 @@ public void testTextTermsQuery() { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "{!term f=t_title}left"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=1]", - "//result/doc[1]/str[@name='id'][.='6']" - ); + assertQ( + req(params, "indent", "on"), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='6']"); // Single term value params = new ModifiableSolrParams(); params.add("q", "{!term f=t_title}the"); params.add("sort", "id asc"); assertQ(req(params, "indent", "on"), "*[count(//doc)=0]"); } - + @Test public void testMissingField() { - assertQEx("Expecting bad request", "Missing field to query", req("q", "{!term}childrens"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting bad request", + "Missing field to query", + req("q", "{!term}childrens"), + SolrException.ErrorCode.BAD_REQUEST); } @Test public void testTermsMethodEquivalency() { // Single-valued field ModifiableSolrParams params = new ModifiableSolrParams(); - params.add("q","{!term f=author_s1}Robert Jordan"); + params.add("q", "{!term f=author_s1}Robert Jordan"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=2]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='3']" - ); + "//result/doc[2]/str[@name='id'][.='3']"); // Multi-valued field params = new ModifiableSolrParams(); params.add("q", "{!term f=cat_s}childrens"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=2]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='3']" - ); + "//result/doc[2]/str[@name='id'][.='3']"); // Numeric field params = new ModifiableSolrParams(); params.add("q", "{!term f=pubyear_i}2009"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='1']"); + assertQ( + req(params, "indent", "on"), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='1']"); // Numeric field params = new ModifiableSolrParams(); params.add("q", "{!term f=pubyear_i}2009"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='1']"); + assertQ( + req(params, "indent", "on"), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='1']"); } } diff --git a/solr/core/src/test/org/apache/solr/search/TestTermsQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestTermsQParserPlugin.java index 39130ae92dd..aa250dba243 100644 --- a/solr/core/src/test/org/apache/solr/search/TestTermsQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestTermsQParserPlugin.java @@ -28,14 +28,95 @@ public class TestTermsQParserPlugin extends SolrTestCaseJ4 { public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); - assertU(adoc("id","1", "author_s1", "Lev Grossman", "t_title", "The Magicians", "cat_s", "fantasy", "pubyear_i", "2009")); - assertU(adoc("id", "2", "author_s1", "Robert Jordan", "t_title", "The Eye of the World", "cat_s", "fantasy", "cat_s", "childrens", "pubyear_i", "1990")); - assertU(adoc("id", "3", "author_s1", "Robert Jordan", "t_title", "The Great Hunt", "cat_s", "fantasy", "cat_s", "childrens", "pubyear_i", "1990")); - assertU(adoc("id", "4", "author_s1", "N.K. Jemisin", "t_title", "The Fifth Season", "cat_s", "fantasy", "pubyear_i", "2015")); + assertU( + adoc( + "id", + "1", + "author_s1", + "Lev Grossman", + "t_title", + "The Magicians", + "cat_s", + "fantasy", + "pubyear_i", + "2009")); + assertU( + adoc( + "id", + "2", + "author_s1", + "Robert Jordan", + "t_title", + "The Eye of the World", + "cat_s", + "fantasy", + "cat_s", + "childrens", + "pubyear_i", + "1990")); + assertU( + adoc( + "id", + "3", + "author_s1", + "Robert Jordan", + "t_title", + "The Great Hunt", + "cat_s", + "fantasy", + "cat_s", + "childrens", + "pubyear_i", + "1990")); + assertU( + adoc( + "id", + "4", + "author_s1", + "N.K. Jemisin", + "t_title", + "The Fifth Season", + "cat_s", + "fantasy", + "pubyear_i", + "2015")); assertU(commit()); - assertU(adoc("id", "5", "author_s1", "Ursula K. Le Guin", "t_title", "The Dispossessed", "cat_s", "scifi", "pubyear_i", "1974")); - assertU(adoc("id", "6", "author_s1", "Ursula K. Le Guin", "t_title", "The Left Hand of Darkness", "cat_s", "scifi", "pubyear_i", "1969")); - assertU(adoc("id", "7", "author_s1", "Isaac Asimov", "t_title", "Foundation", "cat_s", "scifi", "pubyear_i", "1951")); + assertU( + adoc( + "id", + "5", + "author_s1", + "Ursula K. Le Guin", + "t_title", + "The Dispossessed", + "cat_s", + "scifi", + "pubyear_i", + "1974")); + assertU( + adoc( + "id", + "6", + "author_s1", + "Ursula K. Le Guin", + "t_title", + "The Left Hand of Darkness", + "cat_s", + "scifi", + "pubyear_i", + "1969")); + assertU( + adoc( + "id", + "7", + "author_s1", + "Isaac Asimov", + "t_title", + "Foundation", + "cat_s", + "scifi", + "pubyear_i", + "1951")); assertU(commit()); } @@ -45,18 +126,18 @@ public void testTextTermsQuery() { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "{!terms f=t_title}left"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=1]", - "//result/doc[1]/str[@name='id'][.='6']" - ); + assertQ( + req(params, "indent", "on"), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='6']"); // Multiple term values params = new ModifiableSolrParams(); params.add("q", "{!terms f=t_title}left,hunt"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=2]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='3']", - "//result/doc[2]/str[@name='id'][.='6']" - ); + "//result/doc[2]/str[@name='id'][.='6']"); } @Test @@ -64,18 +145,23 @@ public void testTermsUsingNonDefaultSeparator() { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "{!terms f=cat_s separator=|}childrens|scifi"); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=5]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='3']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='6']", - "//result/doc[5]/str[@name='id'][.='7']" - ); + "//result/doc[5]/str[@name='id'][.='7']"); } - + @Test public void testMissingField() { - assertQEx("Expecting bad request", "Missing field to query", req("q", "{!terms}childrens|scifi"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Expecting bad request", + "Missing field to query", + req("q", "{!terms}childrens|scifi"), + SolrException.ErrorCode.BAD_REQUEST); } class TermsParams { @@ -87,82 +173,97 @@ public TermsParams(String method, boolean cache) { this.cache = cache; } - public String buildQuery(String fieldName, String commaDelimitedTerms) { - return "{!terms f=" + fieldName + " method=" + method + " cache=" + cache + "}" + commaDelimitedTerms; + return "{!terms f=" + + fieldName + + " method=" + + method + + " cache=" + + cache + + "}" + + commaDelimitedTerms; } } @Test public void testTermsMethodEquivalency() { // Run queries with a variety of 'method' and postfilter options. - final TermsParams[] methods = new TermsParams[] { - new TermsParams("termsFilter", true), - new TermsParams("termsFilter", false), - new TermsParams("booleanQuery", true), - new TermsParams("booleanQuery", false), - new TermsParams("automaton", true), - new TermsParams("automaton", false), - new TermsParams("docValuesTermsFilter", true), - new TermsParams("docValuesTermsFilter", false), - new TermsParams("docValuesTermsFilterTopLevel", true), - new TermsParams("docValuesTermsFilterTopLevel", false), - new TermsParams("docValuesTermsFilterPerSegment", true), - new TermsParams("docValuesTermsFilterPerSegment", false) - }; + final TermsParams[] methods = + new TermsParams[] { + new TermsParams("termsFilter", true), + new TermsParams("termsFilter", false), + new TermsParams("booleanQuery", true), + new TermsParams("booleanQuery", false), + new TermsParams("automaton", true), + new TermsParams("automaton", false), + new TermsParams("docValuesTermsFilter", true), + new TermsParams("docValuesTermsFilter", false), + new TermsParams("docValuesTermsFilterTopLevel", true), + new TermsParams("docValuesTermsFilterTopLevel", false), + new TermsParams("docValuesTermsFilterPerSegment", true), + new TermsParams("docValuesTermsFilterPerSegment", false) + }; for (TermsParams method : methods) { // Single-valued field, single term value ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", method.buildQuery("author_s1", "Robert Jordan")); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=2]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='3']" - ); + "//result/doc[2]/str[@name='id'][.='3']"); // Single-valued field, multiple term values params = new ModifiableSolrParams(); params.add("q", method.buildQuery("author_s1", "Robert Jordan,Isaac Asimov")); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=3]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=3]", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='3']", - "//result/doc[3]/str[@name='id'][.='7']" - ); + "//result/doc[3]/str[@name='id'][.='7']"); // Multi-valued field, single term value params = new ModifiableSolrParams(); params.add("q", method.buildQuery("cat_s", "childrens")); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=2]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=2]", "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='3']" - ); + "//result/doc[2]/str[@name='id'][.='3']"); // Multi-valued field, multiple term values params = new ModifiableSolrParams(); params.add("q", method.buildQuery("cat_s", "childrens,scifi")); params.add("sort", "id asc"); - assertQ(req(params, "indent", "on"), "*[count(//doc)=5]", + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=5]", "//result/doc[1]/str[@name='id'][.='2']", "//result/doc[2]/str[@name='id'][.='3']", "//result/doc[3]/str[@name='id'][.='5']", "//result/doc[4]/str[@name='id'][.='6']", - "//result/doc[5]/str[@name='id'][.='7']" - ); + "//result/doc[5]/str[@name='id'][.='7']"); // Numeric field params = new ModifiableSolrParams(); params.add("q", method.buildQuery("pubyear_i", "2009")); params.add("sort", "id asc"); - // Test schema randomizes between Trie and Point. "terms" is supported for "trie" but not "Point" + // Test schema randomizes between Trie and Point. "terms" is supported for "trie" but not + // "Point" final String numericFieldType = System.getProperty("solr.tests.IntegerFieldType"); if (numericFieldType.contains("Point")) { assertQEx("Expected 'terms' query on PointField to fail", req(params, "indent", "on"), 400); } else { - assertQ(req(params, "indent", "on"), "*[count(//doc)=1]", "//result/doc[1]/str[@name='id'][.='1']"); + assertQ( + req(params, "indent", "on"), + "*[count(//doc)=1]", + "//result/doc[1]/str[@name='id'][.='1']"); } } } diff --git a/solr/core/src/test/org/apache/solr/search/TestTrieFacet.java b/solr/core/src/test/org/apache/solr/search/TestTrieFacet.java index ca0fd71fc6e..2bde49506ae 100644 --- a/solr/core/src/test/org/apache/solr/search/TestTrieFacet.java +++ b/solr/core/src/test/org/apache/solr/search/TestTrieFacet.java @@ -17,76 +17,80 @@ package org.apache.solr.search; import org.apache.lucene.util.TestUtil; - +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrInputDocument; import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.TrieIntField; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.SolrTestCaseJ4; - import org.junit.BeforeClass; @Deprecated public class TestTrieFacet extends SolrTestCaseJ4 { - final static int MIN_VALUE = 20; - final static int MAX_VALUE = 60; - - final static String TRIE_INT_P8_S_VALUED = "foo_ti1"; - final static String TRIE_INT_P8_M_VALUED = "foo_ti"; - - final static String TRIE_INT_P0_S_VALUED = "foo_i1"; - final static String TRIE_INT_P0_M_VALUED = "foo_i"; - - final static String[] M_VALUED = new String[] { TRIE_INT_P0_M_VALUED, TRIE_INT_P8_M_VALUED }; - final static String[] S_VALUED = new String[] { TRIE_INT_P0_S_VALUED, TRIE_INT_P8_S_VALUED }; - - final static String[] P0 = new String[] { TRIE_INT_P0_M_VALUED, TRIE_INT_P0_S_VALUED }; - final static String[] P8 = new String[] { TRIE_INT_P8_M_VALUED, TRIE_INT_P8_S_VALUED }; - + static final int MIN_VALUE = 20; + static final int MAX_VALUE = 60; + + static final String TRIE_INT_P8_S_VALUED = "foo_ti1"; + static final String TRIE_INT_P8_M_VALUED = "foo_ti"; + + static final String TRIE_INT_P0_S_VALUED = "foo_i1"; + static final String TRIE_INT_P0_M_VALUED = "foo_i"; + + static final String[] M_VALUED = new String[] {TRIE_INT_P0_M_VALUED, TRIE_INT_P8_M_VALUED}; + static final String[] S_VALUED = new String[] {TRIE_INT_P0_S_VALUED, TRIE_INT_P8_S_VALUED}; + + static final String[] P0 = new String[] {TRIE_INT_P0_M_VALUED, TRIE_INT_P0_S_VALUED}; + static final String[] P8 = new String[] {TRIE_INT_P8_M_VALUED, TRIE_INT_P8_S_VALUED}; + static int NUM_DOCS; private static TrieIntField assertCastFieldType(SchemaField f) { - assertTrue("who changed the schema? test isn't valid: " + f.getName(), - f.getType() instanceof TrieIntField); + assertTrue( + "who changed the schema? test isn't valid: " + f.getName(), + f.getType() instanceof TrieIntField); return (TrieIntField) f.getType(); } - + @BeforeClass public static void beforeClass() throws Exception { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); - initCore("solrconfig-tlog.xml","schema.xml"); + initCore("solrconfig-tlog.xml", "schema.xml"); // don't break the test assertTrue("min value must be less then max value", MIN_VALUE < MAX_VALUE); assertTrue("min value must be greater then zero", 0 < MIN_VALUE); - + // sanity check no one breaks the schema out from under us... for (String f : M_VALUED) { SchemaField sf = h.getCore().getLatestSchema().getField(f); assertTrue("who changed the schema? test isn't valid: " + f, sf.multiValued()); } - + for (String f : S_VALUED) { SchemaField sf = h.getCore().getLatestSchema().getField(f); assertFalse("who changed the schema? test isn't valid: " + f, sf.multiValued()); } - if (! Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) { + if (!Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) { for (String f : P0) { SchemaField sf = h.getCore().getLatestSchema().getField(f); - assertEquals("who changed the schema? test isn't valid: " + f, - 0, assertCastFieldType(sf).getPrecisionStep()); + assertEquals( + "who changed the schema? test isn't valid: " + f, + 0, + assertCastFieldType(sf).getPrecisionStep()); } for (String f : P8) { SchemaField sf = h.getCore().getLatestSchema().getField(f); - assertEquals("who changed the schema? test isn't valid: " + f, - 8, assertCastFieldType(sf).getPrecisionStep()); + assertEquals( + "who changed the schema? test isn't valid: " + f, + 8, + assertCastFieldType(sf).getPrecisionStep()); } } - - // we don't need a lot of docs -- at least one failure only had ~1000 + + // we don't need a lot of docs -- at least one failure only had ~1000 NUM_DOCS = TestUtil.nextInt(random(), 200, 1500); { // ensure at least one doc has every valid value in the multivalued fields @@ -100,8 +104,8 @@ public static void beforeClass() throws Exception { } // randomized docs (note: starting at i=1) - for (int i=1; i < NUM_DOCS; i++) { - SolrInputDocument doc = sdoc("id", i+""); + for (int i = 1; i < NUM_DOCS; i++) { + SolrInputDocument doc = sdoc("id", i + ""); if (useField()) { int val = TestUtil.nextInt(random(), MIN_VALUE, MAX_VALUE); for (String f : S_VALUED) { @@ -112,7 +116,7 @@ public static void beforeClass() throws Exception { int numMulti = atLeast(1); while (0 < numMulti--) { int val = TestUtil.nextInt(random(), MIN_VALUE, MAX_VALUE); - for (String f: M_VALUED) { + for (String f : M_VALUED) { doc.addField(f, val); } } @@ -122,53 +126,69 @@ public static void beforeClass() throws Exception { assertU(commit()); } - /** - * Similar to usually() but we want it to happen just as often regardless - * of test multiplier and nightly status + /** + * Similar to usually() but we want it to happen just as often regardless of test multiplier and + * nightly status */ private static boolean useField() { - return 0 != TestUtil.nextInt(random(), 0, 9); + return 0 != TestUtil.nextInt(random(), 0, 9); } private static void doTestNoZeros(final String field, final String method) throws Exception { - assertQ("sanity check # docs in index: " + NUM_DOCS, - req("q", "*:*", "rows", "0") - ,"//result[@numFound="+NUM_DOCS+"]"); - assertQ("sanity check that no docs match 0 failed", - req("q", field+":0", "rows", "0") - ,"//result[@numFound=0]"); - assertQ("sanity check that no docs match [0 TO 0] failed", - req("q", field+":[0 TO 0]", "rows", "0") - ,"//result[@numFound=0]"); - - assertQ("*:* facet with mincount 0 found unexpected 0 value", - req("q", "*:*" - ,"rows", "0" - ,"indent","true" - ,"facet", "true" - ,"facet.field", field - ,"facet.limit", "-1" - ,"facet.mincount", "0" - ,"facet.method", method - ) - // trivial sanity check we're at least getting facet counts in output - ,"*[count(//lst[@name='facet_fields']/lst[@name='"+field+"']/int)!=0]" - // main point of test - ,"*[count(//lst[@name='facet_fields']/lst[@name='"+field+"']/int[@name='0'])=0]" - ); + assertQ( + "sanity check # docs in index: " + NUM_DOCS, + req("q", "*:*", "rows", "0"), + "//result[@numFound=" + NUM_DOCS + "]"); + assertQ( + "sanity check that no docs match 0 failed", + req("q", field + ":0", "rows", "0"), + "//result[@numFound=0]"); + assertQ( + "sanity check that no docs match [0 TO 0] failed", + req("q", field + ":[0 TO 0]", "rows", "0"), + "//result[@numFound=0]"); + + assertQ( + "*:* facet with mincount 0 found unexpected 0 value", + req( + "q", + "*:*", + "rows", + "0", + "indent", + "true", + "facet", + "true", + "facet.field", + field, + "facet.limit", + "-1", + "facet.mincount", + "0", + "facet.method", + method) + // trivial sanity check we're at least getting facet counts in output + , + "*[count(//lst[@name='facet_fields']/lst[@name='" + field + "']/int)!=0]" + // main point of test + , + "*[count(//lst[@name='facet_fields']/lst[@name='" + field + "']/int[@name='0'])=0]"); } // enum public void testSingleValuedTrieP0_enum() throws Exception { doTestNoZeros(TRIE_INT_P0_S_VALUED, "enum"); } + public void testMultiValuedTrieP0_enum() throws Exception { doTestNoZeros(TRIE_INT_P0_M_VALUED, "enum"); } + public void testSingleValuedTrieP8_enum() throws Exception { doTestNoZeros(TRIE_INT_P8_S_VALUED, "enum"); } + public void testMultiValuedTrieP8_enum() throws Exception { doTestNoZeros(TRIE_INT_P8_M_VALUED, "enum"); } @@ -177,12 +197,15 @@ public void testMultiValuedTrieP8_enum() throws Exception { public void testSingleValuedTrieP0_fc() throws Exception { doTestNoZeros(TRIE_INT_P0_S_VALUED, "fc"); } + public void testMultiValuedTrieP0_fc() throws Exception { doTestNoZeros(TRIE_INT_P0_M_VALUED, "fc"); } + public void testSingleValuedTrieP8_fc() throws Exception { doTestNoZeros(TRIE_INT_P8_S_VALUED, "fc"); } + public void testMultiValuedTrieP8_fc() throws Exception { doTestNoZeros(TRIE_INT_P8_M_VALUED, "fc"); } @@ -191,15 +214,16 @@ public void testMultiValuedTrieP8_fc() throws Exception { public void testSingleValuedTrieP0_fcs() throws Exception { doTestNoZeros(TRIE_INT_P0_S_VALUED, "fcs"); } + public void testMultiValuedTrieP0_fcs() throws Exception { doTestNoZeros(TRIE_INT_P0_M_VALUED, "fcs"); } + public void testSingleValuedTrieP8_fcs() throws Exception { doTestNoZeros(TRIE_INT_P8_S_VALUED, "fcs"); } + public void testMultiValuedTrieP8_fcs() throws Exception { doTestNoZeros(TRIE_INT_P8_M_VALUED, "fcs"); } - } - diff --git a/solr/core/src/test/org/apache/solr/search/TestValueSourceCache.java b/solr/core/src/test/org/apache/solr/search/TestValueSourceCache.java index 3b1c228d3e3..b45fafab7f6 100644 --- a/solr/core/src/test/org/apache/solr/search/TestValueSourceCache.java +++ b/solr/core/src/test/org/apache/solr/search/TestValueSourceCache.java @@ -33,7 +33,7 @@ public static void beforeClass() throws Exception { } static QParser _func; - + @AfterClass public static void afterClass() throws Exception { _func = null; @@ -44,7 +44,8 @@ Query getQuery(String query) throws SyntaxError { return _func.parse(); } - // This is actually also tested by the tests for val_d1 below, but the bug was reported against geodist()... + // This is actually also tested by the tests for val_d1 below, but the bug was reported against + // geodist()... @Test public void testGeodistSource() throws SyntaxError { Query q_home = getQuery("geodist(home, 45.0, 43.0)"); @@ -56,28 +57,26 @@ public void testGeodistSource() throws SyntaxError { @Test public void testNumerics() throws SyntaxError { - String[] templates = new String[]{ - "sum(#v0, #n0)", - "product(pow(#v0,#n0),#v1,#n1)", - "log(#v0)", - "log(sum(#n0,#v0,#v1,#n1))", - "scale(map(#v0,#n0,#n1,#n2),#n3,#n4)", - }; - String[] numbers = new String[]{ - "1,2,3,4,5", - "1.0,2.0,3.0,4.0,5.0", - "1,2.0,3,4.0,5", - "1.0,2,3.0,4,5.0", - "1000000,2000000,3000000,4000000,5000000" - }; - String[] types = new String[]{ - "val1_f1", - "val1_d1", - "val1_b1", - "val1_i1", - "val1_l1", - "val1_b1", - }; + String[] templates = + new String[] { + "sum(#v0, #n0)", + "product(pow(#v0,#n0),#v1,#n1)", + "log(#v0)", + "log(sum(#n0,#v0,#v1,#n1))", + "scale(map(#v0,#n0,#n1,#n2),#n3,#n4)", + }; + String[] numbers = + new String[] { + "1,2,3,4,5", + "1.0,2.0,3.0,4.0,5.0", + "1,2.0,3,4.0,5", + "1.0,2,3.0,4,5.0", + "1000000,2000000,3000000,4000000,5000000" + }; + String[] types = + new String[] { + "val1_f1", "val1_d1", "val1_b1", "val1_i1", "val1_l1", "val1_b1", + }; for (String template : templates) { for (String nums : numbers) { for (String type : types) { @@ -105,7 +104,7 @@ void tryQuerySameTypes(String template, String numbers, String type) throws Synt s3 = s3.replace(patV, type2).replace(patN, numParts[idx]); } - //SolrQueryRequest req1 = req( "q","*:*", "fq", s1); + // SolrQueryRequest req1 = req( "q","*:*", "fq", s1); Query q1 = getQuery(s1); Query q2 = getQuery(s2); @@ -114,8 +113,8 @@ void tryQuerySameTypes(String template, String numbers, String type) throws Synt QueryUtils.checkUnequal(q1, q3); } - // These should always and forever fail, and would have failed without the fixes for 2829, but why not make - // some more tests just in case??? + // These should always and forever fail, and would have failed without the fixes for 2829, but why + // not make some more tests just in case??? void tryQueryDiffTypes(String template, String numbers, String[] types) throws SyntaxError { String s1 = template; String s2 = template; diff --git a/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java index 3c4edae0edc..e522e5f5c42 100644 --- a/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestXmlQParserPlugin.java @@ -54,14 +54,15 @@ public void testGoodbyeQuery() throws Exception { @Test public void testHandyQuery() throws Exception { final int numDocs = random().nextInt(10); - final String q = ""; + final String q = + ""; implTestQuery(numDocs, q, numDocs); } public void implTestQuery(int numDocs, String q, int expectedCount) throws Exception { // add some documents - for (int ii=1; ii<=numDocs; ++ii) { - String[] doc = {"id",ii+"0"}; + for (int ii = 1; ii <= numDocs; ++ii) { + String[] doc = {"id", ii + "0"}; assertU(adoc(doc)); if (random().nextBoolean()) { assertU(commit()); @@ -72,7 +73,6 @@ public void implTestQuery(int numDocs, String q, int expectedCount) throws Excep ModifiableSolrParams params = new ModifiableSolrParams(); params.add("defType", "testxmlparser"); params.add("q", q); - assertQ(req(params), "*[count(//doc)="+expectedCount+"]"); + assertQ(req(params), "*[count(//doc)=" + expectedCount + "]"); } - } diff --git a/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java b/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java index 73d5d5190de..39f850b07b5 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java +++ b/solr/core/src/test/org/apache/solr/search/facet/DistributedFacetSimpleRefinementLongTailTest.java @@ -19,7 +19,6 @@ import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.common.params.SolrParams; @@ -27,55 +26,67 @@ import org.junit.Test; /** - * A test the demonstrates some of the expected behavior fo "long tail" terms when using refine:simple - *

- * NOTE: This test ignores the control collection (in single node mode, there is no - * need for the overrequesting, all the data is local -- so comparisons with it wouldn't - * be valid in the cases we are testing here) - *

- *

- * NOTE: This test is heavily inspired by (and uses the same indexed documents) as - * {@link org.apache.solr.handler.component.DistributedFacetPivotLongTailTest} -- however the behavior of - * refine:simple is "simpler" then the refinement logic used by - * facet.pivot so the assertions in this test vary from that test. - *

+ * A test the demonstrates some of the expected behavior fo "long tail" terms when using + * refine:simple + * + *

NOTE: This test ignores the control collection (in single node mode, there is no need + * for the overrequesting, all the data is local -- so comparisons with it wouldn't be valid in the + * cases we are testing here) + * + *

NOTE: This test is heavily inspired by (and uses the same indexed documents) as {@link + * org.apache.solr.handler.component.DistributedFacetPivotLongTailTest} -- however the behavior of + * refine:simple is "simpler" then the refinement logic used by facet.pivot + * so the assertions in this test vary from that test. */ public class DistributedFacetSimpleRefinementLongTailTest extends BaseDistributedSearchTestCase { - private static List ALL_STATS = Arrays.asList("min", "max", "sum", "stddev", "avg", "sumsq", "unique", - "missing", "countvals", "percentile", "variance", "hll"); - + private static List ALL_STATS = + Arrays.asList( + "min", + "max", + "sum", + "stddev", + "avg", + "sumsq", + "unique", + "missing", + "countvals", + "percentile", + "variance", + "hll"); + private final String STAT_FIELD; private String ALL_STATS_JSON = ""; public DistributedFacetSimpleRefinementLongTailTest() { // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); STAT_FIELD = random().nextBoolean() ? "stat_is" : "stat_i"; for (String stat : ALL_STATS) { - String val = stat.equals("percentile")? STAT_FIELD+",90": STAT_FIELD; + String val = stat.equals("percentile") ? STAT_FIELD + ",90" : STAT_FIELD; ALL_STATS_JSON += stat + ":'" + stat + "(" + val + ")',"; } } - + @Test @ShardsFixed(num = 3) public void test() throws Exception { buildIndexes(clients, STAT_FIELD); commit(); - + sanityCheckIndividualShards(); checkRefinementAndOverrequesting(); checkSubFacetStats(); - } - public static void buildIndexes(final List clients, final String statField) throws Exception { + public static void buildIndexes(final List clients, final String statField) + throws Exception { assertEquals("This indexing code assumes exactly 3 shards/clients", 3, clients.size()); - + final AtomicInteger docNum = new AtomicInteger(); final SolrClient shard0 = clients.get(0); final SolrClient shard1 = clients.get(1); @@ -84,9 +95,9 @@ public static void buildIndexes(final List clients, final String sta // the 5 top foo_s terms have 100 docs each on every shard for (int i = 0; i < 100; i++) { for (int j = 0; j < 5; j++) { - shard0.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa"+j, statField, j * 13 - i)); - shard1.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa"+j, statField, j * 3 + i)); - shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa"+j, statField, i * 7 + j)); + shard0.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa" + j, statField, j * 13 - i)); + shard1.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa" + j, statField, j * 3 + i)); + shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "aaa" + j, statField, i * 7 + j)); } } @@ -94,8 +105,8 @@ public static void buildIndexes(final List clients, final String sta // on both shard0 & shard1 ("bbb_") for (int i = 0; i < 50; i++) { for (int j = 0; j < 20; j++) { - shard0.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "bbb"+j, statField, 0)); - shard1.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "bbb"+j, statField, 1)); + shard0.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "bbb" + j, statField, 0)); + shard1.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "bbb" + j, statField, 1)); } // distracting term appears on only on shard2 50 times shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "junkA")); @@ -110,36 +121,47 @@ public static void buildIndexes(final List clients, final String sta // for sub-pivot, shard0 & shard1 have 6 docs each for "tailB" // but the top 5 terms are ccc(0-4) -- 7 on each shard // (4 docs each have junk terms) - String sub_term = (i < 35) ? "ccc"+(i % 5) : ((i < 41) ? "tailB" : "junkA"); - shard0.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term, statField, i)); - shard1.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term, statField, i)); + String sub_term = (i < 35) ? "ccc" + (i % 5) : ((i < 41) ? "tailB" : "junkA"); + shard0.add( + sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term, statField, i)); + shard1.add( + sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term, statField, i)); // shard2's top 5 sub-pivot terms are junk only it has with 8 docs each // and 5 docs that use "tailB" // NOTE: none of these get statField ! ! - sub_term = (i < 40) ? "junkB"+(i % 5) : "tailB"; + sub_term = (i < 40) ? "junkB" + (i % 5) : "tailB"; shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "tail", "bar_s", sub_term)); } // really long tail uncommon foo_s terms on shard2 for (int i = 0; i < 30; i++) { - // NOTE: using "Z" here so these sort before bbb0 when they tie for '1' instance each on shard2 - shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "ZZZ"+i)); + // NOTE: using "Z" here so these sort before bbb0 when they tie for '1' instance each on + // shard2 + shard2.add(sdoc("id", docNum.incrementAndGet(), "foo_s", "ZZZ" + i)); } - } @SuppressWarnings({"unchecked", "rawtypes"}) private void sanityCheckIndividualShards() throws Exception { // sanity check that our expectations about each shard (non-distrib) are correct - SolrParams req = params( "q", "*:*", "distrib", "false", "json.facet", - " { foo:{ type:terms, limit:10, field:foo_s, facet:{ bar:{ type:terms, limit:10, field:bar_s }}}}"); + SolrParams req = + params( + "q", + "*:*", + "distrib", + "false", + "json.facet", + " { foo:{ type:terms, limit:10, field:foo_s, facet:{ bar:{ type:terms, limit:10, field:bar_s }}}}"); List[] shardFooBuckets = new List[clients.size()]; for (int i = 0; i < clients.size(); i++) { - shardFooBuckets[i] = (List) - ((NamedList)clients.get(i).query( req ).getResponse().get("facets")).get("foo").get("buckets"); + shardFooBuckets[i] = + (List) + ((NamedList) clients.get(i).query(req).getResponse().get("facets")) + .get("foo") + .get("buckets"); } // top 5 same on all shards @@ -147,7 +169,7 @@ private void sanityCheckIndividualShards() throws Exception { assertEquals(10, shardFooBuckets[i].size()); for (int j = 0; j < 5; j++) { NamedList bucket = shardFooBuckets[i].get(j); - assertEquals(bucket.toString(), "aaa"+j, bucket.get("val")); + assertEquals(bucket.toString(), "aaa" + j, bucket.get("val")); assertEquals(bucket.toString(), 100L, bucket.get("count")); } } @@ -170,9 +192,12 @@ private void sanityCheckIndividualShards() throws Exception { assertTrue(bucket.toString(), bucket.get("val").toString().startsWith("ZZZ")); assertEquals(bucket.toString(), 1L, bucket.get("count")); } - + // check 'bar' sub buckets on "tail" from shard2 - { List bar_buckets = (List) ((NamedList) shardFooBuckets[2].get(6).get("bar")).get("buckets"); + { + List bar_buckets = + (List) + ((NamedList) shardFooBuckets[2].get(6).get("bar")).get("buckets"); assertEquals(6, bar_buckets.size()); for (int j = 0; j < 5; j++) { NamedList bucket = bar_buckets.get(j); @@ -189,12 +214,23 @@ private void sanityCheckIndividualShards() throws Exception { private void checkRefinementAndOverrequesting() throws Exception { // // distributed queries // // - { // w/o refinement, the default overrequest isn't enough to find the long 'tail' *OR* the correct count for 'bbb0'... - List foo_buckets = (List) - ((NamedList) - queryServer( params( "q", "*:*", "shards", getShardsString(), "json.facet", - "{ foo: { type:terms, refine:none, limit:6, field:foo_s } }" - ) ).getResponse().get("facets")).get("foo").get("buckets"); + { // w/o refinement, the default overrequest isn't enough to find the long 'tail' *OR* the + // correct count for 'bbb0'... + List foo_buckets = + (List) + ((NamedList) + queryServer( + params( + "q", + "*:*", + "shards", + getShardsString(), + "json.facet", + "{ foo: { type:terms, refine:none, limit:6, field:foo_s } }")) + .getResponse() + .get("facets")) + .get("foo") + .get("buckets"); assertEquals(6, foo_buckets.size()); for (int i = 0; i < 5; i++) { NamedList bucket = foo_buckets.get(i); @@ -205,18 +241,38 @@ private void checkRefinementAndOverrequesting() throws Exception { // this will be short the "+1" fo the doc added to shard2... NamedList bucket = foo_buckets.get(5); assertTrue(bucket.toString(), bucket.get("val").equals("bbb0")); // 'tail' is missed - assertEquals(bucket.toString(), 100L, bucket.get("count")); // will not include the "+1" for the doc added to shard2 + assertEquals( + bucket.toString(), + 100L, + bucket.get("count")); // will not include the "+1" for the doc added to shard2 } // even if we enable refinement, we still won't find the long 'tail' ... // regardless of wether we use either the default overrequest, or disable overrequesting... - for (String over : Arrays.asList( "", "overrequest:0,")) { - List foo_buckets = (List) - ((NamedList) - queryServer( params( "q", "*:*", "shards", getShardsString(), "json.facet", - "{ foo: { type:terms, refine:simple, limit:6, "+ over +" field:foo_s, facet:{ " + ALL_STATS_JSON + - " bar: { type:terms, refine:simple, limit:6, "+ over +" field:bar_s, facet:{"+ALL_STATS_JSON+"}}}}}" - ) ).getResponse().get("facets")).get("foo").get("buckets"); + for (String over : Arrays.asList("", "overrequest:0,")) { + List foo_buckets = + (List) + ((NamedList) + queryServer( + params( + "q", + "*:*", + "shards", + getShardsString(), + "json.facet", + "{ foo: { type:terms, refine:simple, limit:6, " + + over + + " field:foo_s, facet:{ " + + ALL_STATS_JSON + + " bar: { type:terms, refine:simple, limit:6, " + + over + + " field:bar_s, facet:{" + + ALL_STATS_JSON + + "}}}}}")) + .getResponse() + .get("facets")) + .get("foo") + .get("buckets"); assertEquals(6, foo_buckets.size()); for (int i = 0; i < 5; i++) { NamedList bucket = foo_buckets.get(i); @@ -229,7 +285,7 @@ private void checkRefinementAndOverrequesting() throws Exception { assertEquals(bucket.toString(), 101L, bucket.get("count")); // ...and the status under bbb0 should be correct to include the refinement assertEquals(ALL_STATS.size() + 3, bucket.size()); // val,count,facet - assertEquals(-2L, bucket.get("min")); // this min only exists on shard2 + assertEquals(-2L, bucket.get("min")); // this min only exists on shard2 assertEquals(1L, bucket.get("max")); assertEquals(101L, bucket.get("countvals")); assertEquals(0L, bucket.get("missing")); @@ -243,28 +299,42 @@ private void checkRefinementAndOverrequesting() throws Exception { assertEquals(3L, bucket.get("hll")); } - - // with a limit==6, we have to "overrequest >= 20" in order to ensure that 'tail' is included in the top 6 - // this is because of how the "simple" refinement process works: the "top buckets" are determined based - // on the info available in the first pass request. + // with a limit==6, we have to "overrequest >= 20" in order to ensure that 'tail' is included in + // the top 6 this is because of how the "simple" refinement process works: the "top buckets" are + // determined based on the info available in the first pass request. // - // Even though 'tail' is returned in the top6 for shard2, the cumulative total for 'bbb0' from shard0 and shard1 is - // high enough that the simple facet refinement ignores 'tail' because it assumes 'bbb0's final total will be greater. + // Even though 'tail' is returned in the top6 for shard2, the cumulative total for 'bbb0' from + // shard0 and shard1 is high enough that the simple facet refinement ignores 'tail' because it + // assumes 'bbb0's final total will be greater. // - // Meanwhile, for the sub-facet on 'bar', a limit==6 means we should correctly find 'tailB' as the top sub-term of 'tail', - // regardless of how much overrequest is used (or even if we don't have any refinement) since it's always in the top6... - for (String bar_opts : Arrays.asList( "refine:none,", - "refine:simple,", - "refine:none, overrequest:0,", - "refine:simple, overrequest:0," )) { - - - List buckets = (List) - ((NamedList) - queryServer( params( "q", "*:*", "shards", getShardsString(), "json.facet", - "{ foo: { type:terms, limit:6, overrequest:20, refine:simple, field:foo_s, facet:{ " + - " bar: { type:terms, limit:6, " + bar_opts + " field:bar_s }}}}" - ) ).getResponse().get("facets")).get("foo").get("buckets"); + // Meanwhile, for the sub-facet on 'bar', a limit==6 means we should correctly find 'tailB' as + // the top sub-term of 'tail', regardless of how much overrequest is used (or even if we don't + // have any refinement) since it's always in the top6... + for (String bar_opts : + Arrays.asList( + "refine:none,", + "refine:simple,", + "refine:none, overrequest:0,", + "refine:simple, overrequest:0,")) { + + List buckets = + (List) + ((NamedList) + queryServer( + params( + "q", + "*:*", + "shards", + getShardsString(), + "json.facet", + "{ foo: { type:terms, limit:6, overrequest:20, refine:simple, field:foo_s, facet:{ " + + " bar: { type:terms, limit:6, " + + bar_opts + + " field:bar_s }}}}")) + .getResponse() + .get("facets")) + .get("foo") + .get("buckets"); assertEquals(6, buckets.size()); for (int i = 0; i < 5; i++) { @@ -288,21 +358,35 @@ private void checkRefinementAndOverrequesting() throws Exception { assertEquals(bucket.toString(), 14L, bucket.get("count")); } } - - // if we lower the limit on the sub-bucket to '5', overrequesting of at least 1 should still ensure - // that we get the correct top5 including "tailB" -- even w/o refinement - for (String bar_opts : Arrays.asList( "refine:none,", - "refine:simple,", - "refine:none, overrequest:1,", - "refine:simple, overrequest:1," )) { - - List buckets = (List) - ((NamedList) - queryServer( params( "q", "*:*", "shards", getShardsString(), "json.facet", - "{ foo: { type:terms, limit:6, overrequest:20, refine:simple, field:foo_s, facet:{ " + - " bar: { type:terms, limit:5, " + bar_opts + " field:bar_s }}}}" - ) ).getResponse().get("facets")).get("foo").get("buckets"); - + + // if we lower the limit on the sub-bucket to '5', overrequesting of at least 1 should still + // ensure that we get the correct top5 including "tailB" -- even w/o refinement + for (String bar_opts : + Arrays.asList( + "refine:none,", + "refine:simple,", + "refine:none, overrequest:1,", + "refine:simple, overrequest:1,")) { + + List buckets = + (List) + ((NamedList) + queryServer( + params( + "q", + "*:*", + "shards", + getShardsString(), + "json.facet", + "{ foo: { type:terms, limit:6, overrequest:20, refine:simple, field:foo_s, facet:{ " + + " bar: { type:terms, limit:5, " + + bar_opts + + " field:bar_s }}}}")) + .getResponse() + .get("facets")) + .get("foo") + .get("buckets"); + assertEquals(6, buckets.size()); for (int i = 0; i < 5; i++) { NamedList bucket = buckets.get(i); @@ -327,15 +411,27 @@ private void checkRefinementAndOverrequesting() throws Exception { // however: with a lower sub-facet limit==5, and overrequesting disabled, // we're going to miss out on tailB even if we have refinement - for (String bar_opts : Arrays.asList( "refine:none, overrequest:0,", - "refine:simple, overrequest:0," )) { - - List buckets = (List) - ((NamedList) - queryServer( params( "q", "*:*", "shards", getShardsString(), "json.facet", - "{ foo: { type:terms, limit:6, overrequest:20, refine:simple, field:foo_s, facet:{ " + - " bar: { type:terms, limit:5, " + bar_opts + " field:bar_s }}}}" - ) ).getResponse().get("facets")).get("foo").get("buckets"); + for (String bar_opts : + Arrays.asList("refine:none, overrequest:0,", "refine:simple, overrequest:0,")) { + + List buckets = + (List) + ((NamedList) + queryServer( + params( + "q", + "*:*", + "shards", + getShardsString(), + "json.facet", + "{ foo: { type:terms, limit:6, overrequest:20, refine:simple, field:foo_s, facet:{ " + + " bar: { type:terms, limit:5, " + + bar_opts + + " field:bar_s }}}}")) + .getResponse() + .get("facets")) + .get("foo") + .get("buckets"); assertEquals(6, buckets.size()); for (int i = 0; i < 5; i++) { @@ -355,35 +451,54 @@ private void checkRefinementAndOverrequesting() throws Exception { assertEquals(bucket.toString(), 14L, bucket.get("count")); } } - } - private void checkSubFacetStats() throws Exception { + private void checkSubFacetStats() throws Exception { // Deep checking of some Facet stats - - // the assertions only care about the first 5 results of each facet, but to get the long tail more are needed - // from the sub-shards. results should be the same regardless of: "high limit" vs "low limit + high overrequest" + + // the assertions only care about the first 5 results of each facet, but to get the long tail + // more are needed from the sub-shards. results should be the same regardless of: "high limit" + // vs "low limit + high overrequest" checkSubFacetStats("refine:simple, limit: 100,"); checkSubFacetStats("refine:simple, overrequest: 100,"); // and the results shouldn't change if we explicitly disable refinement checkSubFacetStats("refine:none, limit: 100,"); checkSubFacetStats("refine:none, overrequest: 100,"); - } - + private void checkSubFacetStats(String extraJson) throws Exception { String commonJson = "type: terms, " + extraJson; @SuppressWarnings({"unchecked", "rawtypes"}) - NamedList all_facets = (NamedList) queryServer - ( params( "q", "*:*", "shards", getShardsString(), "rows" , "0", "json.facet", - "{ foo : { " + commonJson + " field: foo_s, facet: { " + - ALL_STATS_JSON + " bar: { " + commonJson + " field: bar_s, facet: { " + ALL_STATS_JSON + - // under bar, in addition to "ALL" simple stats, we also ask for skg... - ", skg : 'relatedness($skg_fore,$skg_back)' } } } } }", - "skg_fore", STAT_FIELD+":[0 TO 40]", "skg_back", STAT_FIELD+":[-10000 TO 10000]" - ) ).getResponse().get("facets"); - + NamedList all_facets = + (NamedList) + queryServer( + params( + "q", + "*:*", + "shards", + getShardsString(), + "rows", + "0", + "json.facet", + "{ foo : { " + + commonJson + + " field: foo_s, facet: { " + + ALL_STATS_JSON + + " bar: { " + + commonJson + + " field: bar_s, facet: { " + + ALL_STATS_JSON + + + // under bar, in addition to "ALL" simple stats, we also ask for skg... + ", skg : 'relatedness($skg_fore,$skg_back)' } } } } }", + "skg_fore", + STAT_FIELD + ":[0 TO 40]", + "skg_back", + STAT_FIELD + ":[-10000 TO 10000]")) + .getResponse() + .get("facets"); + assertNotNull(all_facets); @SuppressWarnings({"unchecked", "rawtypes"}) @@ -399,7 +514,7 @@ private void checkSubFacetStats(String extraJson) throws Exception { assertEquals(300L, aaa0_Bucket.get("countvals")); assertEquals(0L, aaa0_Bucket.get("missing")); assertEquals(34650.0D, aaa0_Bucket.get("sum")); - assertEquals(483.70000000000016D, (double)aaa0_Bucket.get("percentile"), 0.1E-7); + assertEquals(483.70000000000016D, (double) aaa0_Bucket.get("percentile"), 0.1E-7); assertEquals(115.5D, (double) aaa0_Bucket.get("avg"), 0.1E-7); assertEquals(1.674585E7D, (double) aaa0_Bucket.get("sumsq"), 0.1E-7); assertEquals(206.4493184076D, (double) aaa0_Bucket.get("stddev"), 0.1E-7); @@ -426,8 +541,8 @@ private void checkSubFacetStats(String extraJson) throws Exception { assertEquals(45L, tail_Bucket.get("hll")); @SuppressWarnings({"unchecked", "rawtypes"}) - List tail_bar_buckets = (List) ((NamedList)tail_Bucket.get("bar")).get("buckets"); - + List tail_bar_buckets = (List) ((NamedList) tail_Bucket.get("bar")).get("buckets"); + @SuppressWarnings({"rawtypes"}) NamedList tailB_Bucket = tail_bar_buckets.get(0); assertEquals(ALL_STATS.size() + 3, tailB_Bucket.size()); // val,count,skg ... NO SUB FACETS @@ -449,15 +564,13 @@ private void checkSubFacetStats(String extraJson) throws Exception { // check the SKG stats on our tailB bucket @SuppressWarnings({"rawtypes"}) NamedList tailB_skg = (NamedList) tailB_Bucket.get("skg"); - assertEquals(tailB_skg.toString(), - 3, tailB_skg.size()); - assertEquals(0.19990D, tailB_skg.get("relatedness")); - assertEquals(0.00334D, tailB_skg.get("foreground_popularity")); - assertEquals(0.00334D, tailB_skg.get("background_popularity")); - //assertEquals(12L, tailB_skg.get("foreground_count")); - //assertEquals(82L, tailB_skg.get("foreground_size")); - //assertEquals(12L, tailB_skg.get("background_count")); - //assertEquals(3591L, tailB_skg.get("background_size")); + assertEquals(tailB_skg.toString(), 3, tailB_skg.size()); + assertEquals(0.19990D, tailB_skg.get("relatedness")); + assertEquals(0.00334D, tailB_skg.get("foreground_popularity")); + assertEquals(0.00334D, tailB_skg.get("background_popularity")); + // assertEquals(12L, tailB_skg.get("foreground_count")); + // assertEquals(82L, tailB_skg.get("foreground_size")); + // assertEquals(12L, tailB_skg.get("background_count")); + // assertEquals(3591L, tailB_skg.get("background_size")); } - } diff --git a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java index 4971b40ade4..d9636abc9ff 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java +++ b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java @@ -19,16 +19,15 @@ import java.lang.invoke.MethodHandles; import java.lang.reflect.Array; import java.nio.file.Paths; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; -import java.util.List; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -39,23 +38,22 @@ import org.apache.solr.common.params.CoreAdminParams; import org.apache.solr.common.params.FacetParams.FacetRangeOther; import org.apache.solr.common.util.NamedList; - +import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.junit.BeforeClass; - /** - * Builds a random index of a few simple fields, maintaining an in-memory model of the expected - * doc counts so that we can verify the results of range facets w/ nested field facets that need refinement. + * Builds a random index of a few simple fields, maintaining an in-memory model of the expected doc + * counts so that we can verify the results of range facets w/ nested field facets that need + * refinement. * - * The focus here is on stressing the cases where the document values fall direct only on the + *

The focus here is on stressing the cases where the document values fall direct only on the * range boundaries, and how the various "include" options affects refinement. */ public class RangeFacetCloudTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + private static final String COLLECTION = MethodHandles.lookup().lookupClass().getName(); private static final String CONF = COLLECTION + "_configSet"; @@ -64,38 +62,44 @@ public class RangeFacetCloudTest extends SolrCloudTestCase { private static final int NUM_RANGE_VALUES = 6; private static final int TERM_VALUES_RANDOMIZER = 100; - private static final List SORTS = Arrays.asList("count desc", "count asc", "index asc", "index desc"); - + private static final List SORTS = + Arrays.asList("count desc", "count asc", "index asc", "index desc"); + private static final List> OTHERS = buildListOfFacetRangeOtherOptions(); - private static final List BEFORE_AFTER_BETWEEN - = Arrays.asList(FacetRangeOther.BEFORE, FacetRangeOther.AFTER, FacetRangeOther.BETWEEN); - + private static final List BEFORE_AFTER_BETWEEN = + Arrays.asList(FacetRangeOther.BEFORE, FacetRangeOther.AFTER, FacetRangeOther.BETWEEN); + /** - * the array indexes represent values in our numeric field, while the array values - * track the number of docs that will have that value. + * the array indexes represent values in our numeric field, while the array values track the + * number of docs that will have that value. */ private static final int[] RANGE_MODEL = new int[NUM_RANGE_VALUES]; /** - * the array indexes represent values in our numeric field, while the array values - * track the mapping from string field terms to facet counts for docs that have that numeric value + * the array indexes represent values in our numeric field, while the array values track the + * mapping from string field terms to facet counts for docs that have that numeric value */ @SuppressWarnings({"unchecked"}) - private static final Map[] TERM_MODEL = (Map[]) Array.newInstance(Map.class, NUM_RANGE_VALUES); - + private static final Map[] TERM_MODEL = + (Map[]) Array.newInstance(Map.class, NUM_RANGE_VALUES); + @BeforeClass public static void setupCluster() throws Exception { - final int numShards = TestUtil.nextInt(random(),1,5); + final int numShards = TestUtil.nextInt(random(), 1, 5); final int numReplicas = 1; final int nodeCount = numShards * numReplicas; configureCluster(nodeCount) - .addConfig(CONF, Paths.get(TEST_HOME(), "collection1", "conf")) - .configure(); + .addConfig(CONF, Paths.get(TEST_HOME(), "collection1", "conf")) + .configure(); + + assertEquals( + 0, + (CollectionAdminRequest.createCollection(COLLECTION, CONF, numShards, numReplicas) + .setProperties( + Collections.singletonMap(CoreAdminParams.CONFIG, "solrconfig-minimal.xml")) + .process(cluster.getSolrClient())) + .getStatus()); - assertEquals(0, (CollectionAdminRequest.createCollection(COLLECTION, CONF, numShards, numReplicas) - .setProperties(Collections.singletonMap(CoreAdminParams.CONFIG, "solrconfig-minimal.xml")) - .process(cluster.getSolrClient())).getStatus()); - cluster.getSolrClient().setDefaultCollection(COLLECTION); final int numDocs = atLeast(1000); @@ -112,16 +116,14 @@ public static void setupCluster() throws Exception { for (int id = 0; id < numDocs; id++) { final int rangeVal = random().nextInt(NUM_RANGE_VALUES); final String termVal = "x" + random().nextInt(maxTermId); - final SolrInputDocument doc = sdoc("id", ""+id, - INT_FIELD, ""+rangeVal, - STR_FIELD, termVal); + final SolrInputDocument doc = + sdoc("id", "" + id, INT_FIELD, "" + rangeVal, STR_FIELD, termVal); RANGE_MODEL[rangeVal]++; TERM_MODEL[rangeVal].merge(termVal, 1, Integer::sum); assertEquals(0, (new UpdateRequest().add(doc)).process(cluster.getSolrClient()).getStatus()); } assertEquals(0, cluster.getSolrClient().commit().getStatus()); - } public void testInclude_Lower() throws Exception { @@ -131,28 +133,47 @@ public void testInclude_Lower() throws Exception { for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); for (String include : Arrays.asList(", include:lower", "")) { // same behavior - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:5, gap:1"+otherStr+include+subFacet+" } }"); + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:1, end:5, gap:1" + + otherStr + + include + + subFacet + + " } }"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); for (int i = 0; i < 4; i++) { - int expectedVal = i+1; - assertBucket("bucket#" + i, expectedVal, modelVals(expectedVal), subFacetLimit, buckets.get(i)); + int expectedVal = i + 1; + assertBucket( + "bucket#" + i, + expectedVal, + modelVals(expectedVal), + subFacetLimit, + buckets.get(i)); } - assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); + assertBeforeAfterBetween( + other, modelVals(0), modelVals(5), modelVals(1, 4), subFacetLimit, foo); - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } @@ -166,31 +187,46 @@ public void testInclude_Lower_Gap2() throws Exception { for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); for (String include : Arrays.asList(", include:lower", "")) { // same behavior - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:"+INT_FIELD+" start:0, end:5, gap:2"+otherStr+include+subFacet+" } }"); - + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:0, end:5, gap:2" + + otherStr + + include + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 3, buckets.size()); - assertBucket("bucket#0", 0, modelVals(0,1), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", 2, modelVals(2,3), subFacetLimit, buckets.get(1)); - assertBucket("bucket#2", 4, modelVals(4,5), subFacetLimit, buckets.get(2)); - - assertBeforeAfterBetween(other, emptyVals(), emptyVals(), modelVals(0,5), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + assertBucket("bucket#0", 0, modelVals(0, 1), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", 2, modelVals(2, 3), subFacetLimit, buckets.get(1)); + assertBucket("bucket#2", 4, modelVals(4, 5), subFacetLimit, buckets.get(2)); + + assertBeforeAfterBetween( + other, emptyVals(), emptyVals(), modelVals(0, 5), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } } + public void testInclude_Lower_Gap2_hardend() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); @@ -198,27 +234,40 @@ public void testInclude_Lower_Gap2_hardend() throws Exception { for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); for (String include : Arrays.asList(", include:lower", "")) { // same behavior - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:"+INT_FIELD+" start:0, end:5, gap:2, hardend:true" - + otherStr+include+subFacet+" } }"); - + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:0, end:5, gap:2, hardend:true" + + otherStr + + include + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 3, buckets.size()); - assertBucket("bucket#0", 0, modelVals(0,1), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", 2, modelVals(2,3), subFacetLimit, buckets.get(1)); + assertBucket("bucket#0", 0, modelVals(0, 1), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", 2, modelVals(2, 3), subFacetLimit, buckets.get(1)); assertBucket("bucket#2", 4, modelVals(4), subFacetLimit, buckets.get(2)); - - assertBeforeAfterBetween(other, emptyVals(), modelVals(5), modelVals(0,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + + assertBeforeAfterBetween( + other, emptyVals(), modelVals(5), modelVals(0, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } @@ -227,105 +276,151 @@ public void testInclude_Lower_Gap2_hardend() throws Exception { public void testStatsWithOmitHeader() throws Exception { // SOLR-13509: no NPE should be thrown when only stats are specified with omitHeader=true - SolrQuery solrQuery = new SolrQuery("q", "*:*", "omitHeader", "true", - "json.facet", "{unique_foo:\"unique(" + STR_FIELD+ ")\"}"); + SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "omitHeader", + "true", + "json.facet", + "{unique_foo:\"unique(" + STR_FIELD + ")\"}"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); // response shouldn't contain header as omitHeader is set to true assertNull(rsp.getResponseHeader()); } - + public void testInclude_Upper() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:0, end:4, gap:1, include:upper"+otherStr+subFacet+" } }"); - + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:0, end:4, gap:1, include:upper" + + otherStr + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 4, buckets.size()); for (int i = 0; i < 4; i++) { - assertBucket("bucket#" + i, i, modelVals(i+1), subFacetLimit, buckets.get(i)); + assertBucket("bucket#" + i, i, modelVals(i + 1), subFacetLimit, buckets.get(i)); } - assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + assertBeforeAfterBetween( + other, modelVals(0), modelVals(5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } + public void testInclude_Upper_Gap2() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:0, end:4, gap:2, include:upper"+otherStr+subFacet+" } }"); - + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:0, end:4, gap:2, include:upper" + + otherStr + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 2, buckets.size()); - assertBucket("bucket#0", 0, modelVals(1,2), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", 2, modelVals(3,4), subFacetLimit, buckets.get(1)); - - assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + assertBucket("bucket#0", 0, modelVals(1, 2), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", 2, modelVals(3, 4), subFacetLimit, buckets.get(1)); + + assertBeforeAfterBetween( + other, modelVals(0), modelVals(5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } - + public void testInclude_Edge() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:4, gap:1, include:edge"+otherStr+subFacet+" } }"); - + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:1, end:4, gap:1, include:edge" + + otherStr + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 3, buckets.size()); - + assertBucket("bucket#0", 1, modelVals(1), subFacetLimit, buckets.get(0)); - + // middle bucket doesn't include lower or upper so it's empty assertBucket("bucket#1", 2, emptyVals(), subFacetLimit, buckets.get(1)); - + assertBucket("bucket#2", 3, modelVals(4), subFacetLimit, buckets.get(2)); - - assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + + assertBeforeAfterBetween( + other, modelVals(0), modelVals(5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } @@ -337,187 +432,262 @@ public void testInclude_EdgeLower() throws Exception { final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - for (String include : Arrays.asList(", include:'edge,lower'", ", include:[edge,lower]")) { // same - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:4, gap:1"+otherStr+include+subFacet+" } }"); - + for (String include : + Arrays.asList(", include:'edge,lower'", ", include:[edge,lower]")) { // same + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:1, end:4, gap:1" + + otherStr + + include + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 3, buckets.size()); - + assertBucket("bucket#0", 1, modelVals(1), subFacetLimit, buckets.get(0)); assertBucket("bucket#1", 2, modelVals(2), subFacetLimit, buckets.get(1)); - assertBucket("bucket#2", 3, modelVals(3,4), subFacetLimit, buckets.get(2)); - - assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + assertBucket("bucket#2", 3, modelVals(3, 4), subFacetLimit, buckets.get(2)); + + assertBeforeAfterBetween( + other, modelVals(0), modelVals(5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } } - + public void testInclude_EdgeUpper() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - for (String include : Arrays.asList(", include:'edge,upper'", ", include:[edge,upper]")) { // same - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:4, gap:1"+otherStr+include+subFacet+" } }"); - + for (String include : + Arrays.asList(", include:'edge,upper'", ", include:[edge,upper]")) { // same + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:1, end:4, gap:1" + + otherStr + + include + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 3, buckets.size()); - - assertBucket("bucket#0", 1, modelVals(1,2), subFacetLimit, buckets.get(0)); + + assertBucket("bucket#0", 1, modelVals(1, 2), subFacetLimit, buckets.get(0)); assertBucket("bucket#1", 2, modelVals(3), subFacetLimit, buckets.get(1)); assertBucket("bucket#2", 3, modelVals(4), subFacetLimit, buckets.get(2)); - - assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + + assertBeforeAfterBetween( + other, modelVals(0), modelVals(5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } } - + public void testInclude_EdgeLowerUpper() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - for (String include : Arrays.asList(", include:'edge,lower,upper'", ", include:[edge,lower,upper]")) { // same - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:4, gap:1"+otherStr+include+subFacet+" } }"); - + for (String include : + Arrays.asList(", include:'edge,lower,upper'", ", include:[edge,lower,upper]")) { // same + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:1, end:4, gap:1" + + otherStr + + include + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 3, buckets.size()); - - assertBucket("bucket#0", 1, modelVals(1,2), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", 2, modelVals(2,3), subFacetLimit, buckets.get(1)); - assertBucket("bucket#2", 3, modelVals(3,4), subFacetLimit, buckets.get(2)); - - assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + + assertBucket("bucket#0", 1, modelVals(1, 2), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", 2, modelVals(2, 3), subFacetLimit, buckets.get(1)); + assertBucket("bucket#2", 3, modelVals(3, 4), subFacetLimit, buckets.get(2)); + + assertBeforeAfterBetween( + other, modelVals(0), modelVals(5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } } - + public void testInclude_All() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - for (String include : Arrays.asList(", include:'edge,lower,upper,outer'", - ", include:[edge,lower,upper,outer]", - ", include:all")) { // same - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:4, gap:1"+otherStr+include+subFacet+" } }"); - + for (String include : + Arrays.asList( + ", include:'edge,lower,upper,outer'", + ", include:[edge,lower,upper,outer]", + ", include:all")) { // same + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:1, end:4, gap:1" + + otherStr + + include + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 3, buckets.size()); - - assertBucket("bucket#0", 1, modelVals(1,2), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", 2, modelVals(2,3), subFacetLimit, buckets.get(1)); - assertBucket("bucket#2", 3, modelVals(3,4), subFacetLimit, buckets.get(2)); - - assertBeforeAfterBetween(other, modelVals(0,1), modelVals(4,5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + + assertBucket("bucket#0", 1, modelVals(1, 2), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", 2, modelVals(2, 3), subFacetLimit, buckets.get(1)); + assertBucket("bucket#2", 3, modelVals(3, 4), subFacetLimit, buckets.get(2)); + + assertBeforeAfterBetween( + other, modelVals(0, 1), modelVals(4, 5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } } - /** - * This test will also sanity check that mincount is working properly - */ + /** This test will also sanity check that mincount is working properly */ public void testInclude_All_Gap2() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - for (String include : Arrays.asList(", include:'edge,lower,upper,outer'", - ", include:[edge,lower,upper,outer]", - ", include:all")) { // same + for (String include : + Arrays.asList( + ", include:'edge,lower,upper,outer'", + ", include:[edge,lower,upper,outer]", + ", include:all")) { // same // we also want to sanity check that mincount doesn't bork anything, // so we're going to do the query twice: // 1) no mincount, keep track of which bucket has the highest count & what it was // 2) use that value as the mincount, assert that the other bucket isn't returned long mincount_to_use = -1; - Object expected_mincount_bucket_val = null; // HACK: use null to mean neither in case of tie + Object expected_mincount_bucket_val = + null; // HACK: use null to mean neither in case of tie // initial query, no mincount... - SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+", start:1, end:4, gap:2"+otherStr+include+subFacet+" } }"); - + SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + ", start:1, end:4, gap:2" + + otherStr + + include + + subFacet + + " } }"); + QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 2, buckets.size()); - - assertBucket("bucket#0", 1, modelVals(1,3), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", 3, modelVals(3,5), subFacetLimit, buckets.get(1)); - - assertBeforeAfterBetween(other, modelVals(0,1), modelVals(5), modelVals(1,5), subFacetLimit, foo); + + assertBucket("bucket#0", 1, modelVals(1, 3), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", 3, modelVals(3, 5), subFacetLimit, buckets.get(1)); + + assertBeforeAfterBetween( + other, modelVals(0, 1), modelVals(5), modelVals(1, 5), subFacetLimit, foo); // if we've made it this far, then our buckets match the model // now use our buckets to pick a mincount to use based on the MIN(+1) count seen - long count0 = ((Number)buckets.get(0).get("count")).longValue(); - long count1 = ((Number)buckets.get(1).get("count")).longValue(); - + long count0 = ((Number) buckets.get(0).get("count")).longValue(); + long count1 = ((Number) buckets.get(1).get("count")).longValue(); + mincount_to_use = 1 + Math.min(count0, count1); if (count0 > count1) { expected_mincount_bucket_val = buckets.get(0).get("val"); @@ -525,21 +695,35 @@ public void testInclude_All_Gap2() throws Exception { expected_mincount_bucket_val = buckets.get(1).get("val"); } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } // second query, using mincount... - solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges, - "{ foo:{ type:range, field:"+INT_FIELD+", mincount:" + mincount_to_use + - ", start:1, end:4, gap:2"+otherStr+include+subFacet+" } }"); - + solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges, + "{ foo:{ type:range, field:" + + INT_FIELD + + ", mincount:" + + mincount_to_use + + ", start:1, end:4, gap:2" + + otherStr + + include + + subFacet + + " } }"); + rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); @@ -549,54 +733,72 @@ public void testInclude_All_Gap2() throws Exception { assertEquals("num buckets", 1, buckets.size()); final Object actualBucket = buckets.get(0); if (expected_mincount_bucket_val.equals(1)) { - assertBucket("bucket#0(0)", 1, modelVals(1,3), subFacetLimit, actualBucket); + assertBucket("bucket#0(0)", 1, modelVals(1, 3), subFacetLimit, actualBucket); } else { - assertBucket("bucket#0(1)", 3, modelVals(3,5), subFacetLimit, actualBucket); + assertBucket("bucket#0(1)", 3, modelVals(3, 5), subFacetLimit, actualBucket); } } - - // regardless of mincount, the before/after/between special buckets should always be returned - assertBeforeAfterBetween(other, modelVals(0,1), modelVals(5), modelVals(1,5), subFacetLimit, foo); - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + // regardless of mincount, the before/after/between special buckets should always be + // returned + assertBeforeAfterBetween( + other, modelVals(0, 1), modelVals(5), modelVals(1, 5), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } } } - + public void testInclude_All_Gap2_hardend() throws Exception { for (boolean doSubFacet : Arrays.asList(false, true)) { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (EnumSet other : OTHERS) { final String otherStr = formatFacetRangeOther(other); - for (String include : Arrays.asList(", include:'edge,lower,upper,outer'", - ", include:[edge,lower,upper,outer]", - ", include:all")) { // same - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - // exclude a single low/high value from our ranges - "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:4, gap:2, hardend:true" - + otherStr+include+subFacet+" } }"); - + for (String include : + Arrays.asList( + ", include:'edge,lower,upper,outer'", + ", include:[edge,lower,upper,outer]", + ", include:all")) { // same + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + // exclude a single low/high value from our ranges + "{ foo:{ type:range, field:" + + INT_FIELD + + " start:1, end:4, gap:2, hardend:true" + + otherStr + + include + + subFacet + + " } }"); + final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); - + assertEquals("num buckets", 2, buckets.size()); - - assertBucket("bucket#0", 1, modelVals(1,3), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", 3, modelVals(3,4), subFacetLimit, buckets.get(1)); - - assertBeforeAfterBetween(other, modelVals(0,1), modelVals(4,5), modelVals(1,4), subFacetLimit, foo); - - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + + assertBucket("bucket#0", 1, modelVals(1, 3), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", 3, modelVals(3, 4), subFacetLimit, buckets.get(1)); + + assertBeforeAfterBetween( + other, modelVals(0, 1), modelVals(4, 5), modelVals(1, 4), subFacetLimit, foo); + + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } @@ -608,28 +810,46 @@ public void testRangeWithInterval() throws Exception { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (boolean incUpper : Arrays.asList(false, true)) { - String incUpperStr = ",inclusive_to:"+incUpper; - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:2"+ incUpperStr+ "}," + - "{from:2, to:3"+ incUpperStr +"},{from:3, to:4"+ incUpperStr +"},{from:4, to:5"+ incUpperStr+"}]" - + subFacet + " } }"); + String incUpperStr = ",inclusive_to:" + incUpper; + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + INT_FIELD + + " ranges:[{from:1, to:2" + + incUpperStr + + "}," + + "{from:2, to:3" + + incUpperStr + + "},{from:3, to:4" + + incUpperStr + + "},{from:4, to:5" + + incUpperStr + + "}]" + + subFacet + + " } }"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); for (int i = 0; i < 4; i++) { - String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper? "]": ")"); - ModelRange modelVals = incUpper? modelVals(i+1, i+2) : modelVals(i+1); + String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper ? "]" : ")"); + ModelRange modelVals = incUpper ? modelVals(i + 1, i + 2) : modelVals(i + 1); assertBucket("bucket#" + i, expectedVal, modelVals, subFacetLimit, buckets.get(i)); } } catch (AssertionError | RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } @@ -640,28 +860,46 @@ public void testRangeWithOldIntervalFormat() throws Exception { final Integer subFacetLimit = pickSubFacetLimit(doSubFacet); final CharSequence subFacet = makeSubFacet(subFacetLimit); for (boolean incUpper : Arrays.asList(false, true)) { - String incUpperStr = incUpper? "]\"":")\""; - final SolrQuery solrQuery = new SolrQuery - ("q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{range:\"[1,2"+ incUpperStr+ "}," + - "{range:\"[2,3"+ incUpperStr +"},{range:\"[3,4"+ incUpperStr +"},{range:\"[4,5"+ incUpperStr+"}]" - + subFacet + " } }"); + String incUpperStr = incUpper ? "]\"" : ")\""; + final SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + INT_FIELD + + " ranges:[{range:\"[1,2" + + incUpperStr + + "}," + + "{range:\"[2,3" + + incUpperStr + + "},{range:\"[3,4" + + incUpperStr + + "},{range:\"[4,5" + + incUpperStr + + "}]" + + subFacet + + " } }"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>) rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 4, buckets.size()); for (int i = 0; i < 4; i++) { - String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper? "]": ")"); - ModelRange modelVals = incUpper? modelVals(i+1, i+2) : modelVals(i+1); + String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper ? "]" : ")"); + ModelRange modelVals = incUpper ? modelVals(i + 1, i + 2) : modelVals(i + 1); assertBucket("bucket#" + i, expectedVal, modelVals, subFacetLimit, buckets.get(i)); } } catch (AssertionError | RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } @@ -676,29 +914,37 @@ public void testIntervalWithMincount() throws Exception { Object expected_mincount_bucket_val = null; // without mincount - SolrQuery solrQuery = new SolrQuery( - "q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:3},{from:3, to:5}]" + - subFacet + " } }" - ); + SolrQuery solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + INT_FIELD + + " ranges:[{from:1, to:3},{from:3, to:5}]" + + subFacet + + " } }"); QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); assertEquals("num buckets", 2, buckets.size()); // upper is not included - assertBucket("bucket#0", "[1,3)", modelVals(1,2), subFacetLimit, buckets.get(0)); - assertBucket("bucket#1", "[3,5)", modelVals(3,4), subFacetLimit, buckets.get(1)); + assertBucket("bucket#0", "[1,3)", modelVals(1, 2), subFacetLimit, buckets.get(0)); + assertBucket("bucket#1", "[3,5)", modelVals(3, 4), subFacetLimit, buckets.get(1)); // if we've made it this far, then our buckets match the model // now use our buckets to pick a mincount to use based on the MIN(+1) count seen - long count0 = ((Number)buckets.get(0).get("count")).longValue(); - long count1 = ((Number)buckets.get(1).get("count")).longValue(); + long count0 = ((Number) buckets.get(0).get("count")).longValue(); + long count1 = ((Number) buckets.get(1).get("count")).longValue(); mincount_to_use = 1 + Math.min(count0, count1); if (count0 > count1) { @@ -707,21 +953,32 @@ public void testIntervalWithMincount() throws Exception { expected_mincount_bucket_val = buckets.get(1).get("val"); } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } // with mincount - solrQuery = new SolrQuery( - "q", "*:*", "rows", "0", "json.facet", - "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:3},{from:3, to:5}]" + - ",mincount:" + mincount_to_use + subFacet + " } }" - ); + solrQuery = + new SolrQuery( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ type:range, field:" + + INT_FIELD + + " ranges:[{from:1, to:3},{from:3, to:5}]" + + ",mincount:" + + mincount_to_use + + subFacet + + " } }"); rsp = cluster.getSolrClient().query(solrQuery); try { @SuppressWarnings({"unchecked"}) - final NamedList foo = ((NamedList>)rsp.getResponse().get("facets")).get("foo"); + final NamedList foo = + ((NamedList>) rsp.getResponse().get("facets")).get("foo"); @SuppressWarnings({"unchecked"}) final List> buckets = (List>) foo.get("buckets"); @@ -731,13 +988,14 @@ public void testIntervalWithMincount() throws Exception { assertEquals("num buckets", 1, buckets.size()); final Object actualBucket = buckets.get(0); if (expected_mincount_bucket_val.equals("[1,3)")) { - assertBucket("bucket#0(0)", "[1,3)", modelVals(1,2), subFacetLimit, actualBucket); + assertBucket("bucket#0(0)", "[1,3)", modelVals(1, 2), subFacetLimit, actualBucket); } else { - assertBucket("bucket#0(1)", "[3,5)", modelVals(3,4), subFacetLimit, actualBucket); + assertBucket("bucket#0(1)", "[3,5)", modelVals(3, 4), subFacetLimit, actualBucket); } } - } catch (AssertionError|RuntimeException ae) { - throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); + } catch (AssertionError | RuntimeException ae) { + throw new AssertionError( + solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } } @@ -746,18 +1004,24 @@ public void testIntervalWithMincount() throws Exception { * Helper method for validating a single 'bucket' from a Range facet. * * @param label to use in assertions - * @param expectedVal "val" to assert for this bucket, use null for special "buckets" like before, after, between. - * @param expectedRangeValues a range of the expected values in the numeric field whose cumulative counts should match this buckets "count" - * @param subFacetLimitUsed if null, then assert this bucket has no "bar" subfacet, otherwise assert expected term counts for each actual term, and sanity check the number terms returnd against the model and/or this limit. - * @param actualBucket the actual bucket returned from a query for all assertions to be conducted against. + * @param expectedVal "val" to assert for this bucket, use null for + * special "buckets" like before, after, between. + * @param expectedRangeValues a range of the expected values in the numeric field whose cumulative + * counts should match this buckets "count" + * @param subFacetLimitUsed if null, then assert this bucket has no "bar" subfacet, + * otherwise assert expected term counts for each actual term, and sanity check the number + * terms returnd against the model and/or this limit. + * @param actualBucket the actual bucket returned from a query for all assertions to be conducted + * against. */ - private static void assertBucket(final String label, - final Object expectedVal, - final ModelRange expectedRangeValues, - final Integer subFacetLimitUsed, - final Object actualBucket) { + private static void assertBucket( + final String label, + final Object expectedVal, + final ModelRange expectedRangeValues, + final Integer subFacetLimitUsed, + final Object actualBucket) { try { - + assertNotNull("null bucket", actualBucket); assertNotNull("expectedRangeValues", expectedRangeValues); assertTrue("bucket is not a NamedList", actualBucket instanceof NamedList); @@ -767,34 +1031,37 @@ private static void assertBucket(final String label, if (null != expectedVal) { assertEquals("val", expectedVal, bucket.get("val")); } - + // figure out the model from our range of values... long expectedCount = 0; - List> toMerge = new ArrayList<>(NUM_RANGE_VALUES); + List> toMerge = new ArrayList<>(NUM_RANGE_VALUES); for (int i = expectedRangeValues.lower; i <= expectedRangeValues.upper; i++) { expectedCount += RANGE_MODEL[i]; toMerge.add(TERM_MODEL[i]); } assertEquals("count", expectedCount, bucket.get("count")); - + // merge the maps of our range values by summing the (int) values on key collisions - final Map expectedTermCounts = toMerge.stream() - .flatMap(m -> m.entrySet().stream()) - .collect(Collectors.toMap(Entry::getKey, (e -> e.getValue().longValue()), Long::sum)); + final Map expectedTermCounts = + toMerge.stream() + .flatMap(m -> m.entrySet().stream()) + .collect(Collectors.toMap(Entry::getKey, (e -> e.getValue().longValue()), Long::sum)); if (null == subFacetLimitUsed || 0 == expectedCount) { assertNull("unexpected subfacets", bucket.get("bar")); } else { @SuppressWarnings({"unchecked"}) - NamedList bar = ((NamedList)bucket.get("bar")); + NamedList bar = ((NamedList) bucket.get("bar")); assertNotNull("can't find subfacet 'bar'", bar); - final int numBucketsExpected = subFacetLimitUsed < 0 - ? expectedTermCounts.size() : Math.min(subFacetLimitUsed, expectedTermCounts.size()); + final int numBucketsExpected = + subFacetLimitUsed < 0 + ? expectedTermCounts.size() + : Math.min(subFacetLimitUsed, expectedTermCounts.size()); @SuppressWarnings({"unchecked"}) final List> subBuckets = (List>) bar.get("buckets"); - // we should either have filled out the expected limit, or + // we should either have filled out the expected limit, or assertEquals("num subfacet buckets", numBucketsExpected, subBuckets.size()); // assert sub-facet term counts for the subBuckets that do exist @@ -803,31 +1070,33 @@ private static void assertBucket(final String label, assertNotNull("subfacet bucket with null term: " + subBucket, term); final Long expectedTermCount = expectedTermCounts.get(term.toString()); assertNotNull("unexpected subfacet bucket: " + subBucket, expectedTermCount); - assertEquals("subfacet count for term: " + term, expectedTermCount, subBucket.get("count")); + assertEquals( + "subfacet count for term: " + term, expectedTermCount, subBucket.get("count")); } } - - } catch (AssertionError|RuntimeException ae) { + + } catch (AssertionError | RuntimeException ae) { throw new AssertionError(label + ": " + ae.getMessage(), ae); } } - + /** - * A convenience method for calling {@link #assertBucket} on the before/after/between buckets - * of a facet result, based on the {@link FacetRangeOther} specified for this facet. - * + * A convenience method for calling {@link #assertBucket} on the before/after/between buckets of a + * facet result, based on the {@link FacetRangeOther} specified for this facet. + * * @see #assertBucket - * @see #buildListOfFacetRangeOtherOptions + * @see #buildListOfFacetRangeOtherOptions */ - private static void assertBeforeAfterBetween(final EnumSet other, - final ModelRange before, - final ModelRange after, - final ModelRange between, - final Integer subFacetLimitUsed, - final NamedList facet) { - //final String[] names = new String[] { "before", "after", "between" }; + private static void assertBeforeAfterBetween( + final EnumSet other, + final ModelRange before, + final ModelRange after, + final ModelRange between, + final Integer subFacetLimitUsed, + final NamedList facet) { + // final String[] names = new String[] { "before", "after", "between" }; assertEquals(3, BEFORE_AFTER_BETWEEN.size()); - final ModelRange[] expected = new ModelRange[] { before, after, between }; + final ModelRange[] expected = new ModelRange[] {before, after, between}; for (int i = 0; i < 3; i++) { FacetRangeOther key = BEFORE_AFTER_BETWEEN.get(i); String name = key.toString(); @@ -839,10 +1108,11 @@ private static void assertBeforeAfterBetween(final EnumSet othe } } - /** - * A little helper struct to make the method sig of {@link #assertBucket} more readable. - * If lower (or upper) is negative, then both must be negative and upper must be less then - * lower -- this indicate that the bucket should be empty. + /** + * A little helper struct to make the method sig of {@link #assertBucket} more readable. If lower + * (or upper) is negative, then both must be negative and upper must be less then lower -- this + * indicate that the bucket should be empty. + * * @see #modelVals * @see #emptyVals */ @@ -852,20 +1122,23 @@ private static final class ModelRange { /** Don't use, use the convenience methods */ public ModelRange(int lower, int upper) { if (lower < 0 || upper < 0) { - assert(lower < 0 && upper < lower); + assert (lower < 0 && upper < lower); } else { - assert(lower <= upper); + assert (lower <= upper); } this.lower = lower; this.upper = upper; } } + private static final ModelRange emptyVals() { return new ModelRange(-1, -100); } + private static final ModelRange modelVals(int value) { return modelVals(value, value); } + private static final ModelRange modelVals(int lower, int upper) { assertTrue(upper + " < " + lower, lower <= upper); assertTrue("negative lower", 0 <= lower); @@ -875,7 +1148,9 @@ private static final ModelRange modelVals(int lower, int upper) { /** randomized helper */ private static final Integer pickSubFacetLimit(final boolean doSubFacet) { - if (! doSubFacet) { return null; } + if (!doSubFacet) { + return null; + } int result = TestUtil.nextInt(random(), -10, atLeast(TERM_VALUES_RANDOMIZER)); return (result <= 0) ? -1 : result; } @@ -884,12 +1159,13 @@ private static final CharSequence makeSubFacet(final Integer subFacetLimit) { if (null == subFacetLimit) { return ""; } - final StringBuilder result = new StringBuilder(", facet:{ bar:{ type:terms, refine:true, field:"+STR_FIELD); + final StringBuilder result = + new StringBuilder(", facet:{ bar:{ type:terms, refine:true, field:" + STR_FIELD); // constrain overrequesting to stress refiement, but still test those codepaths final String overrequest = random().nextBoolean() ? "0" : "1"; - + result.append(", overrequest:").append(overrequest).append(", limit:").append(subFacetLimit); - + // order should have no affect on our testing if (random().nextBoolean()) { result.append(", sort:'").append(SORTS.get(random().nextInt(SORTS.size()))).append("'"); @@ -898,22 +1174,25 @@ private static final CharSequence makeSubFacet(final Integer subFacetLimit) { return result; } - /** - * Helper for seeding the re-used static struct, and asserting no one changes the Enum w/o updating this test + /** + * Helper for seeding the re-used static struct, and asserting no one changes the Enum w/o + * updating this test * - * @see #assertBeforeAfterBetween + * @see #assertBeforeAfterBetween * @see #formatFacetRangeOther * @see #OTHERS */ private static final List> buildListOfFacetRangeOtherOptions() { - assertEquals("If someone adds to FacetRangeOther this method (and bulk of test) needs updated", - 5, EnumSet.allOf(FacetRangeOther.class).size()); - + assertEquals( + "If someone adds to FacetRangeOther this method (and bulk of test) needs updated", + 5, + EnumSet.allOf(FacetRangeOther.class).size()); + // we're not overly concerned about testing *EVERY* permutation, // we just want to make sure we test multiple code paths (some, all, "ALL", none) // - // NOTE: Don't mix "ALL" or "NONE" with other options so we don't have to make assertBeforeAfterBetween - // overly complicated + // NOTE: Don't mix "ALL" or "NONE" with other options so we don't have to make + // assertBeforeAfterBetween overly complicated ArrayList> results = new ArrayList<>(5); results.add(EnumSet.of(FacetRangeOther.ALL)); results.add(EnumSet.of(FacetRangeOther.BEFORE, FacetRangeOther.AFTER, FacetRangeOther.BETWEEN)); @@ -922,9 +1201,9 @@ private static final List> buildListOfFacetRangeOtherOp results.add(EnumSet.of(FacetRangeOther.NONE)); return results; } - - /** - * @see #assertBeforeAfterBetween + + /** + * @see #assertBeforeAfterBetween * @see #buildListOfFacetRangeOtherOptions */ private static final String formatFacetRangeOther(EnumSet other) { @@ -936,9 +1215,8 @@ private static final String formatFacetRangeOther(EnumSet other // two valid syntaxes to randomize between: // - a JSON list of items (conveniently the default toString of EnumSet), // - a single quoted string containing the comma separated list - val = val.replaceAll("\\[|\\]","'"); + val = val.replaceAll("\\[|\\]", "'"); } return ", other:" + val; } - } diff --git a/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java b/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java index ab3f0e068c0..94afeac277b 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java +++ b/solr/core/src/test/org/apache/solr/search/facet/SpatialHeatmapFacetsTest.java @@ -16,10 +16,9 @@ */ package org.apache.solr.search.facet; +import com.carrotsearch.randomizedtesting.annotations.Repeat; import java.util.Arrays; import java.util.List; - -import com.carrotsearch.randomizedtesting.annotations.Repeat; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrException; @@ -39,7 +38,7 @@ public static void beforeSuperClass() throws Exception { schemaString = "schema-spatial.xml"; configString = "solrconfig-basic.xml"; - //Strictly not necessary (set already in Ant & Maven) but your IDE might not have this set + // Strictly not necessary (set already in Ant & Maven) but your IDE might not have this set System.setProperty("java.awt.headless", "true"); } @@ -52,61 +51,135 @@ public void testClassicFacets() throws Exception { // AKA SimpleFacets handle.put("timestamp", SKIPVAL); handle.put("maxScore", SKIPVAL); - SolrParams baseParams = params("q", "*:*", "rows", "0", "facet", "true", FacetParams.FACET_HEATMAP, FIELD); + SolrParams baseParams = + params("q", "*:*", "rows", "0", "facet", "true", FacetParams.FACET_HEATMAP, FIELD); - final String testBox = "[\"50 50\" TO \"180 90\"]";//top-right somewhere on edge (whatever) + final String testBox = "[\"50 50\" TO \"180 90\"]"; // top-right somewhere on edge (whatever) - //----- First we test gridLevel derivation + // ----- First we test gridLevel derivation try { - getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, testBox, FacetParams.FACET_HEATMAP_DIST_ERR, "0"))).get("gridLevel"); + getHmObj( + query( + params( + baseParams, + FacetParams.FACET_HEATMAP_GEOM, + testBox, + FacetParams.FACET_HEATMAP_DIST_ERR, + "0"))) + .get("gridLevel"); fail(); } catch (SolrException e) { assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); } try { - getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, testBox, FacetParams.FACET_HEATMAP_DIST_ERR_PCT, "0"))).get("gridLevel"); + getHmObj( + query( + params( + baseParams, + FacetParams.FACET_HEATMAP_GEOM, + testBox, + FacetParams.FACET_HEATMAP_DIST_ERR_PCT, + "0"))) + .get("gridLevel"); fail(); } catch (SolrException e) { assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); } - // Monkeying with these params changes the gridLevel in different directions. We don't test the exact + // Monkeying with these params changes the gridLevel in different directions. We don't test the + // exact // computation here; that's not _that_ relevant, and is Lucene spatial's job (not Solr) any way. - assertEquals(7, getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, testBox))).get("gridLevel"));//default - assertEquals(3, getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, testBox, FacetParams.FACET_HEATMAP_LEVEL, "3"))).get("gridLevel")); - assertEquals(2, getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, testBox, FacetParams.FACET_HEATMAP_DIST_ERR, "100"))).get("gridLevel")); - //TODO test impact of distance units - assertEquals(9, getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, testBox, FacetParams.FACET_HEATMAP_DIST_ERR_PCT, "0.05"))).get("gridLevel")); - assertEquals(6, getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_DIST_ERR_PCT, "0.10"))).get("gridLevel")); - - //test key output label doing 2 heatmaps with different settings on the same field + assertEquals( + 7, + getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, testBox))) + .get("gridLevel")); // default + assertEquals( + 3, + getHmObj( + query( + params( + baseParams, + FacetParams.FACET_HEATMAP_GEOM, + testBox, + FacetParams.FACET_HEATMAP_LEVEL, + "3"))) + .get("gridLevel")); + assertEquals( + 2, + getHmObj( + query( + params( + baseParams, + FacetParams.FACET_HEATMAP_GEOM, + testBox, + FacetParams.FACET_HEATMAP_DIST_ERR, + "100"))) + .get("gridLevel")); + // TODO test impact of distance units + assertEquals( + 9, + getHmObj( + query( + params( + baseParams, + FacetParams.FACET_HEATMAP_GEOM, + testBox, + FacetParams.FACET_HEATMAP_DIST_ERR_PCT, + "0.05"))) + .get("gridLevel")); + assertEquals( + 6, + getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_DIST_ERR_PCT, "0.10"))) + .get("gridLevel")); + + // test key output label doing 2 heatmaps with different settings on the same field { - final ModifiableSolrParams params = params(baseParams, FacetParams.FACET_HEATMAP_DIST_ERR_PCT, "0.10"); + final ModifiableSolrParams params = + params(baseParams, FacetParams.FACET_HEATMAP_DIST_ERR_PCT, "0.10"); String courseFormat = random().nextBoolean() ? "png" : "ints2D"; - params.add(FacetParams.FACET_HEATMAP, "{!key=course " - + FacetParams.FACET_HEATMAP_LEVEL + "=2 " - + FacetParams.FACET_HEATMAP_FORMAT + "=" + courseFormat - + "}" + FIELD); + params.add( + FacetParams.FACET_HEATMAP, + "{!key=course " + + FacetParams.FACET_HEATMAP_LEVEL + + "=2 " + + FacetParams.FACET_HEATMAP_FORMAT + + "=" + + courseFormat + + "}" + + FIELD); final QueryResponse response = query(params); - assertEquals(6, getHmObj(response).get("gridLevel"));//same test as above - assertEquals(2, response.getResponse().findRecursive("facet_counts", "facet_heatmaps", "course", "gridLevel")); - assertTrue(((NamedList) response.getResponse().findRecursive("facet_counts", "facet_heatmaps", "course")) - .asMap(0).containsKey("counts_" + courseFormat)); + assertEquals(6, getHmObj(response).get("gridLevel")); // same test as above + assertEquals( + 2, + response + .getResponse() + .findRecursive("facet_counts", "facet_heatmaps", "course", "gridLevel")); + assertTrue( + ((NamedList) + response.getResponse().findRecursive("facet_counts", "facet_heatmaps", "course")) + .asMap(0) + .containsKey("counts_" + courseFormat)); } // ------ Index data - index("id", "0", FIELD, "ENVELOPE(100, 120, 80, 40)");// on right side - index("id", "1", FIELD, "ENVELOPE(-120, -110, 80, 20)");// on left side (outside heatmap) - index("id", "3", FIELD, "POINT(70 60)");//just left of BOX 0 - index("id", "4", FIELD, "POINT(91 89)");//just outside box 0 (above it) near pole, + index("id", "0", FIELD, "ENVELOPE(100, 120, 80, 40)"); // on right side + index("id", "1", FIELD, "ENVELOPE(-120, -110, 80, 20)"); // on left side (outside heatmap) + index("id", "3", FIELD, "POINT(70 60)"); // just left of BOX 0 + index("id", "4", FIELD, "POINT(91 89)"); // just outside box 0 (above it) near pole, commit(); // ----- Search // this test simply has some 0's, nulls, 1's and a 2 in there. - NamedList hmObj = getHmObj(query(params(baseParams, - FacetParams.FACET_HEATMAP_GEOM, "[\"50 20\" TO \"180 90\"]", - FacetParams.FACET_HEATMAP_LEVEL, "4"))); + NamedList hmObj = + getHmObj( + query( + params( + baseParams, + FacetParams.FACET_HEATMAP_GEOM, + "[\"50 20\" TO \"180 90\"]", + FacetParams.FACET_HEATMAP_LEVEL, + "4"))); List> counts = (List>) hmObj.get("counts_ints2D"); assertEquals( Arrays.asList( @@ -116,64 +189,83 @@ public void testClassicFacets() throws Exception { // AKA SimpleFacets Arrays.asList(0, 0, 1, 1, 0, 0), Arrays.asList(0, 0, 1, 1, 0, 0), null, - null - ), - counts - ); + null), + counts); // now this time we add a filter query and exclude it - QueryResponse response = query(params(baseParams, - "fq", "{!tag=excludeme}id:0", // filter to only be id:0 - FacetParams.FACET_HEATMAP, "{!ex=excludeme}" + FIELD, // exclude the filter - FacetParams.FACET_HEATMAP_GEOM, "[\"50 20\" TO \"180 90\"]", - FacetParams.FACET_HEATMAP_LEVEL, "4")); - assertEquals(1, response.getResults().getNumFound());// because of our 'fq' + QueryResponse response = + query( + params( + baseParams, + "fq", + "{!tag=excludeme}id:0", // filter to only be id:0 + FacetParams.FACET_HEATMAP, + "{!ex=excludeme}" + FIELD, // exclude the filter + FacetParams.FACET_HEATMAP_GEOM, + "[\"50 20\" TO \"180 90\"]", + FacetParams.FACET_HEATMAP_LEVEL, + "4")); + assertEquals(1, response.getResults().getNumFound()); // because of our 'fq' hmObj = getHmObj(response); counts = (List>) hmObj.get("counts_ints2D"); assertEquals( - Arrays.asList( // same counts as before + Arrays.asList( // same counts as before Arrays.asList(0, 0, 2, 1, 0, 0), Arrays.asList(0, 0, 1, 1, 0, 0), Arrays.asList(0, 1, 1, 1, 0, 0), Arrays.asList(0, 0, 1, 1, 0, 0), Arrays.asList(0, 0, 1, 1, 0, 0), null, - null - ), - counts - ); + null), + counts); // test using a circle input shape - hmObj = getHmObj(query(params(baseParams, - FacetParams.FACET_HEATMAP_GEOM, "BUFFER(POINT(110 40), 7)", - FacetParams.FACET_HEATMAP_LEVEL, "7"))); + hmObj = + getHmObj( + query( + params( + baseParams, + FacetParams.FACET_HEATMAP_GEOM, + "BUFFER(POINT(110 40), 7)", + FacetParams.FACET_HEATMAP_LEVEL, + "7"))); counts = (List>) hmObj.get("counts_ints2D"); assertEquals( Arrays.asList( - Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0),//curved; we have a 0 - Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0),//curved; we have a 0 - Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0),//curved; we have a 0 + Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0), // curved; we have a 0 + Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0), // curved; we have a 0 + Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0), // curved; we have a 0 Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1), Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1), Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1), - null, null, null, null, null//no data here (below edge of rect 0) - ), - counts - ); + null, + null, + null, + null, + null // no data here (below edge of rect 0) + ), + counts); // Search in no-where ville and get null counts - assertNull(getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_GEOM, "ENVELOPE(0, 10, -80, -90)"))).get("counts_ints2D")); - - Object v = getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_FORMAT, "png"))).get("counts_png"); + assertNull( + getHmObj( + query( + params( + baseParams, FacetParams.FACET_HEATMAP_GEOM, "ENVELOPE(0, 10, -80, -90)"))) + .get("counts_ints2D")); + + Object v = + getHmObj(query(params(baseParams, FacetParams.FACET_HEATMAP_FORMAT, "png"))) + .get("counts_png"); assertTrue(v instanceof byte[]); - //simply test we can read the image + // simply test we can read the image assertNotNull(FacetHeatmap.PngHelper.readImage((byte[]) v)); - //good enough for this test method + // good enough for this test method } private ModifiableSolrParams params(SolrParams baseParams, String... moreParams) { final ModifiableSolrParams params = new ModifiableSolrParams(baseParams); - params.add(params(moreParams));//actually replaces + params.add(params(moreParams)); // actually replaces return params; } @@ -192,82 +284,166 @@ public void testJsonFacets() throws Exception { SolrParams baseParams = params("q", "*:*", "rows", "0"); - final String testBox = "[\"50 50\" TO \"180 90\"]";//top-right somewhere on edge (whatever) + final String testBox = "[\"50 50\" TO \"180 90\"]"; // top-right somewhere on edge (whatever) // ------ Index data - index("id", "0", FIELD, "ENVELOPE(100, 120, 80, 40)");// on right side - index("id", "1", FIELD, "ENVELOPE(-120, -110, 80, 20)");// on left side (outside heatmap) - index("id", "3", FIELD, "POINT(70 60)");//just left of BOX 0 - index("id", "4", FIELD, "POINT(91 89)");//just outside box 0 (above it) near pole, + index("id", "0", FIELD, "ENVELOPE(100, 120, 80, 40)"); // on right side + index("id", "1", FIELD, "ENVELOPE(-120, -110, 80, 20)"); // on left side (outside heatmap) + index("id", "3", FIELD, "POINT(70 60)"); // just left of BOX 0 + index("id", "4", FIELD, "POINT(91 89)"); // just outside box 0 (above it) near pole, commit(); - //----- Test gridLevel derivation + // ----- Test gridLevel derivation try { - query(params(baseParams, "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "', distErr:0}}")); + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "', distErr:0}}")); fail(); } catch (SolrException e) { assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); } try { - query(params(baseParams, "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "', distErrPct:0}}")); + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "', distErrPct:0}}")); fail(); } catch (SolrException e) { assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); } - // Monkeying with these params changes the gridLevel in different directions. We don't test the exact - // computation here; that's not _that_ relevant, and is Lucene spatial's job (not Solr) any way. - assertEquals(7, getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "'}}"))).get("gridLevel"));//default - assertEquals(3, getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "', gridLevel:3}}"))).get("gridLevel")); - assertEquals(2, getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "', distErr:100}}"))).get("gridLevel")); - //TODO test impact of distance units - assertEquals(9, getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "', distErrPct:0.05}}"))).get("gridLevel")); - assertEquals(6, getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", distErrPct:0.10}}"))).get("gridLevel")); + // Monkeying with these params changes the gridLevel in different directions. We don't test the + // exact computation here; that's not _that_ relevant, and is Lucene spatial's job (not Solr) + // any way. + assertEquals( + 7, + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + FIELD + ", geom:'" + testBox + "'}}"))) + .get("gridLevel")); // default + assertEquals( + 3, + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + + FIELD + + ", geom:'" + + testBox + + "', gridLevel:3}}"))) + .get("gridLevel")); + assertEquals( + 2, + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + + FIELD + + ", geom:'" + + testBox + + "', distErr:100}}"))) + .get("gridLevel")); + // TODO test impact of distance units + assertEquals( + 9, + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + + FIELD + + ", geom:'" + + testBox + + "', distErrPct:0.05}}"))) + .get("gridLevel")); + assertEquals( + 6, + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + FIELD + ", distErrPct:0.10}}"))) + .get("gridLevel")); // ----- Search // this test simply has some 0's, nulls, 1's and a 2 in there. - NamedList hmObj = getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + " geom:'[\"50 20\" TO \"180 90\"]', gridLevel:4}}"))); + NamedList hmObj = + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + + FIELD + + " geom:'[\"50 20\" TO \"180 90\"]', gridLevel:4}}"))); List> counts = (List>) hmObj.get("counts_ints2D"); - List> expectedCounts1 = Arrays.asList( - Arrays.asList(0, 0, 2, 1, 0, 0), - Arrays.asList(0, 0, 1, 1, 0, 0), - Arrays.asList(0, 1, 1, 1, 0, 0), - Arrays.asList(0, 0, 1, 1, 0, 0), - Arrays.asList(0, 0, 1, 1, 0, 0), - null, - null - ); - assertEquals( expectedCounts1, counts); + List> expectedCounts1 = + Arrays.asList( + Arrays.asList(0, 0, 2, 1, 0, 0), + Arrays.asList(0, 0, 1, 1, 0, 0), + Arrays.asList(0, 1, 1, 1, 0, 0), + Arrays.asList(0, 0, 1, 1, 0, 0), + Arrays.asList(0, 0, 1, 1, 0, 0), + null, + null); + assertEquals(expectedCounts1, counts); // now this time we add a filter query and exclude it - QueryResponse response = query(params(baseParams, - "fq", "{!tag=excludeme}id:0", // filter to only be id:0 - "json.facet", "{f1:{type:heatmap, excludeTags:['excludeme'], f:" + FIELD + ", geom:'[\"50 20\" TO \"180 90\"]', gridLevel:4}}")); - - assertEquals(1, response.getResults().getNumFound());// because of our 'fq' + QueryResponse response = + query( + params( + baseParams, + "fq", + "{!tag=excludeme}id:0", // filter to only be id:0 + "json.facet", + "{f1:{type:heatmap, excludeTags:['excludeme'], f:" + + FIELD + + ", geom:'[\"50 20\" TO \"180 90\"]', gridLevel:4}}")); + + assertEquals(1, response.getResults().getNumFound()); // because of our 'fq' hmObj = getHmObj(response); counts = (List>) hmObj.get("counts_ints2D"); - assertEquals( expectedCounts1, counts); + assertEquals(expectedCounts1, counts); { - // impractical example but nonetheless encloses the points of both doc3 and doc4 (both of which are points) - final String jsonHeatmap = "facet:{hm:{type:heatmap, f:" + FIELD + ", geom:'MULTIPOINT(70 60, 91 89)', distErrPct:0.2}}"; - response = query(params(baseParams, - "json.facet", "{" + - "q1:{type:query, q:'id:3', " + jsonHeatmap + " }, " + - "q2:{type:query, q:'id:4', " + jsonHeatmap + " } " + - "}")); + // impractical example but nonetheless encloses the points of both doc3 and doc4 (both of + // which are points) + final String jsonHeatmap = + "facet:{hm:{type:heatmap, f:" + + FIELD + + ", geom:'MULTIPOINT(70 60, 91 89)', distErrPct:0.2}}"; + response = + query( + params( + baseParams, + "json.facet", + "{" + + "q1:{type:query, q:'id:3', " + + jsonHeatmap + + " }, " + + "q2:{type:query, q:'id:4', " + + jsonHeatmap + + " } " + + "}")); { - final NamedList q1Res = (NamedList) response.getResponse().findRecursive("facets", "q1"); + final NamedList q1Res = + (NamedList) response.getResponse().findRecursive("facets", "q1"); assertEquals("1", q1Res.get("count").toString()); - final NamedList q2Res = (NamedList) response.getResponse().findRecursive("facets", "q2"); + final NamedList q2Res = + (NamedList) response.getResponse().findRecursive("facets", "q2"); assertEquals("1", q2Res.get("count").toString()); // essentially, these will differ only in the heatmap counts but otherwise will be the same assertNotNull(compare(q1Res, q2Res, flags, handle)); @@ -275,37 +451,61 @@ public void testJsonFacets() throws Exception { } // test using a circle input shape - hmObj = getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'BUFFER(POINT(110 40), 7)', gridLevel:7}}"))); + hmObj = + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + + FIELD + + ", geom:'BUFFER(POINT(110 40), 7)', gridLevel:7}}"))); counts = (List>) hmObj.get("counts_ints2D"); assertEquals( Arrays.asList( - Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0),//curved; we have a 0 - Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0),//curved; we have a 0 - Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0),//curved; we have a 0 + Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0), // curved; we have a 0 + Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0), // curved; we have a 0 + Arrays.asList(0, 1, 1, 1, 1, 1, 1, 0), // curved; we have a 0 Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1), Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1), Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1), - null, null, null, null, null//no data here (below edge of rect 0) - ), - counts - ); + null, + null, + null, + null, + null // no data here (below edge of rect 0) + ), + counts); // Search in no-where ville and get null counts - assertNull(getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", geom:'ENVELOPE(0, 10, -80, -90)'}}"))).get("counts_ints2D")); - - Object v = getHmObj(query(params(baseParams, - "json.facet", "{f1:{type:heatmap, f:" + FIELD + ", format:png }}"))).get("counts_png"); + assertNull( + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + FIELD + ", geom:'ENVELOPE(0, 10, -80, -90)'}}"))) + .get("counts_ints2D")); + + Object v = + getHmObj( + query( + params( + baseParams, + "json.facet", + "{f1:{type:heatmap, f:" + FIELD + ", format:png }}"))) + .get("counts_png"); assertTrue(v instanceof byte[]); - //simply test we can read the image + // simply test we can read the image assertNotNull(FacetHeatmap.PngHelper.readImage((byte[]) v)); - //good enough for this test method + // good enough for this test method } private NamedList getHmObj(QueryResponse response) { // classic faceting - final NamedList classicResp = (NamedList) response.getResponse().findRecursive("facet_counts", "facet_heatmaps", FIELD); + final NamedList classicResp = + (NamedList) + response.getResponse().findRecursive("facet_counts", "facet_heatmaps", FIELD); if (classicResp != null) { return classicResp; } @@ -316,7 +516,7 @@ private NamedList getHmObj(QueryResponse response) { @Test @Repeat(iterations = 3) public void testPng() { - //We test via round-trip randomized data: + // We test via round-trip randomized data: // Make random data int columns = random().nextInt(100) + 1; @@ -327,7 +527,7 @@ public void testPng() { if (ri >= 0 && ri <= 3) { counts[i] = ri; // 0 thru 3 will be made common } else if (ri > 3) { - counts[i] = random().nextInt(Integer.MAX_VALUE); //lots of other possible values up to max + counts[i] = random().nextInt(Integer.MAX_VALUE); // lots of other possible values up to max } } // Round-trip @@ -342,8 +542,7 @@ public void testPng() { // Test equal assertEquals(counts.length, countsOut.length); for (int i = 0; i < countsOut.length; i++) { - assertEquals(counts[i], countsOut[i] - base);//back out the base input to prove we added + assertEquals(counts[i], countsOut[i] - base); // back out the base input to prove we added } } - } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java index 7b2c277a110..71b5cdda167 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetJoinDomain.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -50,14 +49,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - *

- * Tests randomized JSON Facets, sometimes using query 'join' domain transfers and/or domain 'filter' options - *

- *

- * The results of each facet constraint count will be compared with a verification query using an equivalent filter - *

- * +/** + * Tests randomized JSON Facets, sometimes using query 'join' domain transfers and/or domain + * 'filter' options + * + *

The results of each facet constraint count will be compared with a verification query using an + * equivalent filter + * * @see TestCloudPivotFacet */ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase { @@ -73,12 +71,16 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase { // NOTE: set to 'true' to see if refinement testing is adequate (should get fails occasionally) private static final boolean FORCE_DISABLE_REFINEMENT = false; - - /** Multivalued string field suffixes that can be randomized for testing diff facet/join code paths */ - private static final String[] STR_FIELD_SUFFIXES = new String[] { "_ss", "_sds", "_sdsS" }; - /** Multivalued int field suffixes that can be randomized for testing diff facet/join code paths */ - private static final String[] INT_FIELD_SUFFIXES = new String[] { "_is", "_ids", "_idsS" }; - + + /** + * Multivalued string field suffixes that can be randomized for testing diff facet/join code paths + */ + private static final String[] STR_FIELD_SUFFIXES = new String[] {"_ss", "_sds", "_sdsS"}; + /** + * Multivalued int field suffixes that can be randomized for testing diff facet/join code paths + */ + private static final String[] INT_FIELD_SUFFIXES = new String[] {"_is", "_ids", "_idsS"}; + /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; /** One client per node */ @@ -87,23 +89,25 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase { @BeforeClass private static void createMiniSolrCloudCluster() throws Exception { // sanity check constants - assertTrue("bad test constants: some suffixes will never be tested", - (STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && (INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); - + assertTrue( + "bad test constants: some suffixes will never be tested", + (STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && (INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); + // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + // multi replicas should not matter... final int repFactor = usually() ? 1 : 2; // ... but we definitely want to test multiple shards - final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 :3)); + final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 : 3)); final int numNodes = (numShards * repFactor); - + final String configName = DEBUG_LABEL + "_config-set"; final Path configDir = TEST_COLL1_CONF(); - + configureCluster(numNodes).addConfig(configName, configDir).configure(); - + Map collectionProperties = new LinkedHashMap<>(); collectionProperties.put("config", "solrconfig-tlog.xml"); collectionProperties.put("schema", "schema_latest.xml"); @@ -123,13 +127,13 @@ private static void createMiniSolrCloudCluster() throws Exception { final int numDocs = atLeast(100); for (int id = 0; id < numDocs; id++) { - SolrInputDocument doc = sdoc("id", ""+id); + SolrInputDocument doc = sdoc("id", "" + id); for (int fieldNum = 0; fieldNum < MAX_FIELD_NUM; fieldNum++) { // NOTE: some docs may have no value in a field final int numValsThisDoc = TestUtil.nextInt(random(), 0, (usually() ? 3 : 6)); for (int v = 0; v < numValsThisDoc; v++) { final String fieldValue = randFieldValue(fieldNum); - + // for each fieldNum, there are actaully two fields: one string, and one integer doc.addField(field(STR_FIELD_SUFFIXES, fieldNum), fieldValue); doc.addField(field(INT_FIELD_SUFFIXES, fieldNum), fieldValue); @@ -137,18 +141,18 @@ private static void createMiniSolrCloudCluster() throws Exception { } CLOUD_CLIENT.add(doc); if (random().nextInt(100) < 1) { - CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments + CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments } if (random().nextInt(100) < 5) { - CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs + CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs } } CLOUD_CLIENT.commit(); } /** - * Given a (random) number, and a (static) array of possible suffixes returns a consistent field name that - * uses that number and one of hte specified suffixes in it's name. + * Given a (random) number, and a (static) array of possible suffixes returns a consistent field + * name that uses that number and one of hte specified suffixes in it's name. * * @see #STR_FIELD_SUFFIXES * @see #INT_FIELD_SUFFIXES @@ -157,22 +161,24 @@ private static void createMiniSolrCloudCluster() throws Exception { */ private static String field(final String[] suffixes, final int fieldNum) { assert fieldNum < MAX_FIELD_NUM; - + final String suffix = suffixes[fieldNum % suffixes.length]; return "field_" + fieldNum + suffix; } + private static String strfield(final int fieldNum) { return field(STR_FIELD_SUFFIXES, fieldNum); } + private static String intfield(final int fieldNum) { return field(INT_FIELD_SUFFIXES, fieldNum); } /** - * Given a (random) field number, returns a random (integer based) value for that field. - * NOTE: The number of unique values in each field is constant according to {@link #UNIQUE_FIELD_VALS} - * but the precise range of values will vary for each unique field number, such that cross field joins - * will match fewer documents based on how far apart the field numbers are. + * Given a (random) field number, returns a random (integer based) value for that field. NOTE: The + * number of unique values in each field is constant according to {@link #UNIQUE_FIELD_VALS} but + * the precise range of values will vary for each unique field number, such that cross + * field joins will match fewer documents based on how far apart the field numbers are. * * @see #UNIQUE_FIELD_VALS * @see #field @@ -181,7 +187,6 @@ private static String randFieldValue(final int fieldNum) { return "" + (fieldNum + TestUtil.nextInt(random(), 1, UNIQUE_FIELD_VALS)); } - @AfterClass private static void afterClass() throws Exception { if (null != CLOUD_CLIENT) { @@ -198,18 +203,27 @@ private static void afterClass() throws Exception { public void testMalformedGivesError() throws Exception { ignoreException(".*'join' domain change.*"); - - for (String join : Arrays.asList("bogus", - "{ }", - "{ from:null, to:foo_s }", - "{ from:foo_s }", - "{ from:foo_s, to:foo_s, bogus:'what what?' }", - "{ to:foo_s, bogus:'what what?' }")) { - SolrException e = expectThrows(SolrException.class, () -> { - final SolrParams req = params("q", "*:*", "json.facet", - "{ x : { type:terms, field:x_s, domain: { join:"+join+" } } }"); - getRandClient(random()).request(new QueryRequest(req)); - }); + + for (String join : + Arrays.asList( + "bogus", + "{ }", + "{ from:null, to:foo_s }", + "{ from:foo_s }", + "{ from:foo_s, to:foo_s, bogus:'what what?' }", + "{ to:foo_s, bogus:'what what?' }")) { + SolrException e = + expectThrows( + SolrException.class, + () -> { + final SolrParams req = + params( + "q", + "*:*", + "json.facet", + "{ x : { type:terms, field:x_s, domain: { join:" + join + " } } }"); + getRandClient(random()).request(new QueryRequest(req)); + }); assertEquals(join + " -> " + e, SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertTrue(join + " -> " + e, e.getMessage().contains("'join' domain change")); } @@ -219,42 +233,65 @@ public void testJoinMethodSyntax() throws Exception { // 'method' value that doesn't exist at all { final String joinJson = "{from:foo, to:bar, method:invalidValue}"; - SolrException e = expectThrows(SolrException.class, () -> { - final SolrParams req = params("q", "*:*", "json.facet", - "{ x : { type:terms, field:x_s, domain: { join:"+joinJson+" } } }"); - getRandClient(random()).request(new QueryRequest(req)); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + final SolrParams req = + params( + "q", + "*:*", + "json.facet", + "{ x : { type:terms, field:x_s, domain: { join:" + joinJson + " } } }"); + getRandClient(random()).request(new QueryRequest(req)); + }); assertEquals(joinJson + " -> " + e, SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertTrue(joinJson + " -> " + e, e.getMessage().contains("join method 'invalidValue' not supported")); + assertTrue( + joinJson + " -> " + e, + e.getMessage().contains("join method 'invalidValue' not supported")); } // 'method' value that exists on joins generally but isn't supported for join domain transforms { final String joinJson = "{from:foo, to:bar, method:crossCollection}"; - SolrException e = expectThrows(SolrException.class, () -> { - final SolrParams req = params("q", "*:*", "json.facet", - "{ x : { type:terms, field:x_s, domain: { join:"+joinJson+" } } }"); - getRandClient(random()).request(new QueryRequest(req)); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + final SolrParams req = + params( + "q", + "*:*", + "json.facet", + "{ x : { type:terms, field:x_s, domain: { join:" + joinJson + " } } }"); + getRandClient(random()).request(new QueryRequest(req)); + }); assertEquals(joinJson + " -> " + e, SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertTrue(joinJson + " -> " + e, e.getMessage().contains("Join method crossCollection not supported")); + assertTrue( + joinJson + " -> " + e, + e.getMessage().contains("Join method crossCollection not supported")); } - // Valid, supported method value { - final String joinJson = "{from:" +strfield(1)+ ", to:"+strfield(1)+", method:index}"; - final SolrParams req = params("q", "*:*", "json.facet", "{ x : { type:terms, field:x_s, domain: { join:"+joinJson+" } } }"); - getRandClient(random()).request(new QueryRequest(req)); - // For the purposes of this test, we're not interested in the response so much as that Solr will accept a valid 'method' value + final String joinJson = "{from:" + strfield(1) + ", to:" + strfield(1) + ", method:index}"; + final SolrParams req = + params( + "q", + "*:*", + "json.facet", + "{ x : { type:terms, field:x_s, domain: { join:" + joinJson + " } } }"); + getRandClient(random()).request(new QueryRequest(req)); + // For the purposes of this test, we're not interested in the response so much as that Solr + // will accept a valid 'method' value } } public void testSanityCheckDomainMethods() throws Exception { - { + { final JoinDomain empty = new JoinDomain(null, null, null); assertEquals(null, empty.toJSONFacetParamValue()); - final SolrParams out = empty.applyDomainToQuery("safe_key", params("q","qqq")); + final SolrParams out = empty.applyDomainToQuery("safe_key", params("q", "qqq")); assertNotNull(out); assertEquals(null, out.get("safe_key")); assertEquals("qqq", out.get("q")); @@ -262,61 +299,71 @@ public void testSanityCheckDomainMethods() throws Exception { { final JoinDomain join = new JoinDomain("xxx", "yyy", null); assertEquals("domain:{join:{from:xxx,to:yyy}}", join.toJSONFacetParamValue().toString()); - final SolrParams out = join.applyDomainToQuery("safe_key", params("q","qqq")); + final SolrParams out = join.applyDomainToQuery("safe_key", params("q", "qqq")); assertNotNull(out); assertEquals("qqq", out.get("safe_key")); assertEquals("{!join from=xxx to=yyy v=$safe_key}", out.get("q")); - } { final JoinDomain filter = new JoinDomain(null, null, "zzz"); assertEquals("domain:{filter:'zzz'}", filter.toJSONFacetParamValue().toString()); - final SolrParams out = filter.applyDomainToQuery("safe_key", params("q","qqq")); + final SolrParams out = filter.applyDomainToQuery("safe_key", params("q", "qqq")); assertNotNull(out); assertEquals(null, out.get("safe_key")); assertEquals("zzz AND qqq", out.get("q")); } { final JoinDomain both = new JoinDomain("xxx", "yyy", "zzz"); - assertEquals("domain:{join:{from:xxx,to:yyy},filter:'zzz'}", both.toJSONFacetParamValue().toString()); - final SolrParams out = both.applyDomainToQuery("safe_key", params("q","qqq")); + assertEquals( + "domain:{join:{from:xxx,to:yyy},filter:'zzz'}", both.toJSONFacetParamValue().toString()); + final SolrParams out = both.applyDomainToQuery("safe_key", params("q", "qqq")); assertNotNull(out); assertEquals("qqq", out.get("safe_key")); assertEquals("zzz AND {!join from=xxx to=yyy v=$safe_key}", out.get("q")); } } - /** - * Test some small, hand crafted, but non-trivial queries that are - * easier to trace/debug then a pure random monstrosity. - * (ie: if something obvious gets broken, this test may fail faster and in a more obvious way then testRandom) + /** + * Test some small, hand crafted, but non-trivial queries that are easier to trace/debug then a + * pure random monstrosity. (ie: if something obvious gets broken, this test may fail faster and + * in a more obvious way then testRandom) */ public void testBespoke() throws Exception { { // sanity check our test methods can handle a query matching no docs - Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), new JoinDomain(strfield(5), strfield(9), strfield(9)+":[* TO *]")); - top.subFacets.put("sub", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null))); + Map facets = new LinkedHashMap<>(); + TermFacet top = + new TermFacet( + strfield(9), new JoinDomain(strfield(5), strfield(9), strfield(9) + ":[* TO *]")); + top.subFacets.put( + "sub", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null))); facets.put("empty_top", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); assertFacetCountsAreCorrect(maxBuckets, facets, strfield(7) + ":bogus"); - assertEquals("Empty search result shouldn't have found a single bucket", - UNIQUE_FIELD_VALS, maxBuckets.get()); + assertEquals( + "Empty search result shouldn't have found a single bucket", + UNIQUE_FIELD_VALS, + maxBuckets.get()); } - - { // sanity check our test methods can handle a query where a facet filter prevents any doc from having terms - Map facets = new LinkedHashMap<>(); + + { // sanity check our test methods can handle a query where a facet filter prevents any doc from + // having terms + Map facets = new LinkedHashMap<>(); TermFacet top = new TermFacet(strfield(9), new JoinDomain(null, null, "-*:*")); - top.subFacets.put("sub", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null))); + top.subFacets.put( + "sub", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null))); facets.put("filtered_top", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); assertFacetCountsAreCorrect(maxBuckets, facets, "*:*"); - assertEquals("Empty join filter shouldn't have found a single bucket", - UNIQUE_FIELD_VALS, maxBuckets.get()); + assertEquals( + "Empty join filter shouldn't have found a single bucket", + UNIQUE_FIELD_VALS, + maxBuckets.get()); } - - { // sanity check our test methods can handle a query where a facet filter prevents any doc from having sub-terms - Map facets = new LinkedHashMap<>(); + + { // sanity check our test methods can handle a query where a facet filter prevents any doc from + // having sub-terms + Map facets = new LinkedHashMap<>(); TermFacet top = new TermFacet(strfield(9), new JoinDomain(strfield(8), strfield(8), null)); top.subFacets.put("sub", new TermFacet(strfield(11), new JoinDomain(null, null, "-*:*"))); facets.put("filtered_top", top); @@ -324,116 +371,206 @@ public void testBespoke() throws Exception { assertFacetCountsAreCorrect(maxBuckets, facets, "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } - + { // strings - Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), new JoinDomain(strfield(5), strfield(9), strfield(9)+":[* TO *]")); - top.subFacets.put("facet_5", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null))); + Map facets = new LinkedHashMap<>(); + TermFacet top = + new TermFacet( + strfield(9), new JoinDomain(strfield(5), strfield(9), strfield(9) + ":[* TO *]")); + top.subFacets.put( + "facet_5", new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null))); facets.put("facet_4", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); - assertFacetCountsAreCorrect(maxBuckets, facets, "("+strfield(7)+":16 OR "+strfield(9)+":16 OR "+strfield(6)+":19 OR "+strfield(0)+":11)"); - assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); + assertFacetCountsAreCorrect( + maxBuckets, + facets, + "(" + + strfield(7) + + ":16 OR " + + strfield(9) + + ":16 OR " + + strfield(6) + + ":19 OR " + + strfield(0) + + ":11)"); + assertTrue( + "Didn't check a single bucket???", + maxBuckets.get() < UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); } { // ints - Map facets = new LinkedHashMap<>(); + Map facets = new LinkedHashMap<>(); TermFacet top = new TermFacet(intfield(9), new JoinDomain(intfield(5), intfield(9), null)); facets.put("top", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); - assertFacetCountsAreCorrect(maxBuckets, facets, "("+intfield(7)+":16 OR "+intfield(3)+":13)"); - assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); + assertFacetCountsAreCorrect( + maxBuckets, facets, "(" + intfield(7) + ":16 OR " + intfield(3) + ":13)"); + assertTrue( + "Didn't check a single bucket???", + maxBuckets.get() < UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); } { // some domains with filter only, no actual join - Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), new JoinDomain(null, null, strfield(9)+":[* TO *]")); - top.subFacets.put("facet_5", new TermFacet(strfield(11), new JoinDomain(null, null, strfield(3)+":[* TO 5]"))); + Map facets = new LinkedHashMap<>(); + TermFacet top = + new TermFacet(strfield(9), new JoinDomain(null, null, strfield(9) + ":[* TO *]")); + top.subFacets.put( + "facet_5", + new TermFacet(strfield(11), new JoinDomain(null, null, strfield(3) + ":[* TO 5]"))); facets.put("top", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); - assertFacetCountsAreCorrect(maxBuckets, facets, "("+strfield(7)+":16 OR "+strfield(9)+":16 OR "+strfield(6)+":19 OR "+strfield(0)+":11)"); - assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); - + assertFacetCountsAreCorrect( + maxBuckets, + facets, + "(" + + strfield(7) + + ":16 OR " + + strfield(9) + + ":16 OR " + + strfield(6) + + ":19 OR " + + strfield(0) + + ":11)"); + assertTrue( + "Didn't check a single bucket???", + maxBuckets.get() < UNIQUE_FIELD_VALS * UNIQUE_FIELD_VALS); } { // low limits, explicit refinement - Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), - new JoinDomain(strfield(5), strfield(9), strfield(9)+":[* TO *]"), - 5, 0, true); - top.subFacets.put("facet_5", new TermFacet(strfield(11), - new JoinDomain(strfield(8), strfield(8), null), - 10, 0, true)); + Map facets = new LinkedHashMap<>(); + TermFacet top = + new TermFacet( + strfield(9), + new JoinDomain(strfield(5), strfield(9), strfield(9) + ":[* TO *]"), + 5, + 0, + true); + top.subFacets.put( + "facet_5", + new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null), 10, 0, true)); facets.put("facet_4", top); final AtomicInteger maxBuckets = new AtomicInteger(5 * 10); - assertFacetCountsAreCorrect(maxBuckets, facets, "("+strfield(7)+":6 OR "+strfield(9)+":6 OR "+strfield(6)+":19 OR "+strfield(0)+":11)"); + assertFacetCountsAreCorrect( + maxBuckets, + facets, + "(" + + strfield(7) + + ":6 OR " + + strfield(9) + + ":6 OR " + + strfield(6) + + ":19 OR " + + strfield(0) + + ":11)"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < 5 * 10); } - + { // low limit, high overrequest - Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), - new JoinDomain(strfield(5), strfield(9), strfield(9)+":[* TO *]"), - 5, UNIQUE_FIELD_VALS + 10, false); - top.subFacets.put("facet_5", new TermFacet(strfield(11), - new JoinDomain(strfield(8), strfield(8), null), - 10, UNIQUE_FIELD_VALS + 10, false)); + Map facets = new LinkedHashMap<>(); + TermFacet top = + new TermFacet( + strfield(9), + new JoinDomain(strfield(5), strfield(9), strfield(9) + ":[* TO *]"), + 5, + UNIQUE_FIELD_VALS + 10, + false); + top.subFacets.put( + "facet_5", + new TermFacet( + strfield(11), + new JoinDomain(strfield(8), strfield(8), null), + 10, + UNIQUE_FIELD_VALS + 10, + false)); facets.put("facet_4", top); final AtomicInteger maxBuckets = new AtomicInteger(5 * 10); - assertFacetCountsAreCorrect(maxBuckets, facets, "("+strfield(7)+":6 OR "+strfield(9)+":6 OR "+strfield(6)+":19 OR "+strfield(0)+":11)"); + assertFacetCountsAreCorrect( + maxBuckets, + facets, + "(" + + strfield(7) + + ":6 OR " + + strfield(9) + + ":6 OR " + + strfield(6) + + ":19 OR " + + strfield(0) + + ":11)"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < 5 * 10); } - + { // low limit, low overrequest, explicit refinement - Map facets = new LinkedHashMap<>(); - TermFacet top = new TermFacet(strfield(9), - new JoinDomain(strfield(5), strfield(9), strfield(9)+":[* TO *]"), - 5, 7, true); - top.subFacets.put("facet_5", new TermFacet(strfield(11), - new JoinDomain(strfield(8), strfield(8), null), - 10, 7, true)); + Map facets = new LinkedHashMap<>(); + TermFacet top = + new TermFacet( + strfield(9), + new JoinDomain(strfield(5), strfield(9), strfield(9) + ":[* TO *]"), + 5, + 7, + true); + top.subFacets.put( + "facet_5", + new TermFacet(strfield(11), new JoinDomain(strfield(8), strfield(8), null), 10, 7, true)); facets.put("facet_4", top); final AtomicInteger maxBuckets = new AtomicInteger(5 * 10); - assertFacetCountsAreCorrect(maxBuckets, facets, "("+strfield(7)+":6 OR "+strfield(9)+":6 OR "+strfield(6)+":19 OR "+strfield(0)+":11)"); + assertFacetCountsAreCorrect( + maxBuckets, + facets, + "(" + + strfield(7) + + ":6 OR " + + strfield(9) + + ":6 OR " + + strfield(6) + + ":19 OR " + + strfield(0) + + ":11)"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < 5 * 10); } - } public void testTheTestRandomRefineParam() { // sanity check that randomRefineParam never violates isRefinementNeeded - // (should be imposisble ... unless someone changes/breaks the randomization logic in the future) + // (should be imposisble ... unless someone changes/breaks the randomization logic in the + // future) final int numIters = atLeast(100); for (int iter = 0; iter < numIters; iter++) { final Integer limit = TermFacet.randomLimitParam(random()); final Integer overrequest = TermFacet.randomOverrequestParam(random()); final Boolean refine = TermFacet.randomRefineParam(random(), limit, overrequest); if (TermFacet.isRefinementNeeded(limit, overrequest)) { - assertEquals("limit: " + limit + ", overrequest: " + overrequest + ", refine: " + refine, - Boolean.TRUE, refine); + assertEquals( + "limit: " + limit + ", overrequest: " + overrequest + ", refine: " + refine, + Boolean.TRUE, + refine); } } } - + public void testTheTestTermFacetShouldFreakOutOnBadRefineOptions() { - expectThrows(AssertionError.class, () -> { - final TermFacet bogus = new TermFacet("foo", null, 5, 0, false); - }); + expectThrows( + AssertionError.class, + () -> { + final TermFacet bogus = new TermFacet("foo", null, 5, 0, false); + }); } public void testRandom() throws Exception { // we put a safety valve in place on the maximum number of buckets that we are willing to verify - // across *all* the queries that we do. - // that way if the randomized queries we build all have relatively small facets, so be it, but if - // we get a really big one early on, we can test as much as possible, skip other iterations. + // across *all* the queries that we do. that way if the randomized queries we build all have + // relatively small facets, so be it, but if we get a really big one early on, we can test as + // much as possible, skip other iterations. // - // (deeply nested facets may contain more buckets then the max, but we won't *check* all of them) + // (deeply nested facets may contain more buckets then the max, but we won't *check* all of + // them) final int maxBucketsAllowed = atLeast(2000); final AtomicInteger maxBucketsToCheck = new AtomicInteger(maxBucketsAllowed); - + final int numIters = atLeast(20); for (int iter = 0; iter < numIters && 0 < maxBucketsToCheck.get(); iter++) { - assertFacetCountsAreCorrect(maxBucketsToCheck, TermFacet.buildRandomFacets(), buildRandomQuery()); + assertFacetCountsAreCorrect( + maxBucketsToCheck, TermFacet.buildRandomFacets(), buildRandomQuery()); } assertTrue("Didn't check a single bucket???", maxBucketsToCheck.get() < maxBucketsAllowed); } @@ -445,7 +582,7 @@ public void testRandom() throws Exception { * @see #field */ private static String buildRandomQuery() { - if (0 == TestUtil.nextInt(random(), 0,10)) { + if (0 == TestUtil.nextInt(random(), 0, 10)) { return "*:*"; } final int numClauses = TestUtil.nextInt(random(), 3, 10); @@ -457,20 +594,21 @@ private static String buildRandomQuery() { } return "(" + String.join(" OR ", clauses) + ")"; } - + /** - * Given a set of (potentially nested) term facets, and a base query string, asserts that - * the actual counts returned when executing that query with those facets match the expected results + * Given a set of (potentially nested) term facets, and a base query string, asserts that the + * actual counts returned when executing that query with those facets match the expected results * of filtering on the equivalent facet terms+domain */ - private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, - Map expected, - final String query) throws SolrServerException, IOException { + private void assertFacetCountsAreCorrect( + final AtomicInteger maxBucketsToCheck, Map expected, final String query) + throws SolrServerException, IOException { - final SolrParams baseParams = params("q", query, "rows","0"); - final SolrParams facetParams = params("json.facet", ""+TermFacet.toJSONFacetParamValue(expected)); + final SolrParams baseParams = params("q", query, "rows", "0"); + final SolrParams facetParams = + params("json.facet", "" + TermFacet.toJSONFacetParamValue(expected)); final SolrParams initParams = SolrParams.wrapAppended(facetParams, baseParams); - + log.info("Doing full run: {}", initParams); QueryResponse rsp = null; @@ -482,15 +620,16 @@ private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, topNamedList = rsp.getResponse(); assertNotNull(initParams + " is null topNamedList?", topNamedList); } catch (Exception e) { - throw new RuntimeException("init query failed: " + initParams + ": " + - e.getMessage(), e); + throw new RuntimeException("init query failed: " + initParams + ": " + e.getMessage(), e); } try { @SuppressWarnings("unchecked") final NamedList facetResponse = (NamedList) topNamedList.get("facets"); assertNotNull("null facet results?", facetResponse); - assertEquals("numFound mismatch with top count?", - rsp.getResults().getNumFound(), ((Number)facetResponse.get("count")).longValue()); + assertEquals( + "numFound mismatch with top count?", + rsp.getResults().getNumFound(), + ((Number) facetResponse.get("count")).longValue()); if (0 == rsp.getResults().getNumFound()) { // when the query matches nothing, we should expect no top level facets expected = Collections.emptyMap(); @@ -499,20 +638,22 @@ private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, } catch (AssertionError e) { throw new AssertionError(initParams + " ===> " + topNamedList + " --> " + e.getMessage(), e); } finally { - log.info("Ending full run"); + log.info("Ending full run"); } } - /** - * Recursive Helper method that walks the actual facet response, comparing the counts to the expected output - * based on the equivalent filters generated from the original TermFacet. + /** + * Recursive Helper method that walks the actual facet response, comparing the counts to the + * expected output based on the equivalent filters generated from the original TermFacet. */ - private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, - final Map expected, - final SolrParams baseParams, - final NamedList actualFacetResponse) throws SolrServerException, IOException { + private void assertFacetCountsAreCorrect( + final AtomicInteger maxBucketsToCheck, + final Map expected, + final SolrParams baseParams, + final NamedList actualFacetResponse) + throws SolrServerException, IOException { - for (Map.Entry entry : expected.entrySet()) { + for (Map.Entry entry : expected.entrySet()) { final String facetKey = entry.getKey(); final TermFacet facet = entry.getValue(); @SuppressWarnings("unchecked") @@ -524,48 +665,58 @@ private void assertFacetCountsAreCorrect(final AtomicInteger maxBucketsToCheck, if (buckets.isEmpty()) { // should only happen if the baseParams query does not match any docs with our field X - final long docsWithField = getRandClient(random()).query - (facet.applyValueConstraintAndDomain(baseParams, facetKey, "[* TO *]")).getResults().getNumFound(); - assertEquals(facetKey + " has no buckets, but docs in query exist with field: " + facet.field, - 0, docsWithField); + final long docsWithField = + getRandClient(random()) + .query(facet.applyValueConstraintAndDomain(baseParams, facetKey, "[* TO *]")) + .getResults() + .getNumFound(); + assertEquals( + facetKey + " has no buckets, but docs in query exist with field: " + facet.field, + 0, + docsWithField); } - + for (NamedList bucket : buckets) { final long count = ((Number) bucket.get("count")).longValue(); final String fieldVal = bucket.get("val").toString(); // int or stringified int // change our query to filter on the fieldVal, and wrap in the facet domain (if any) - final SolrParams verifyParams = facet.applyValueConstraintAndDomain(baseParams, facetKey, fieldVal); + final SolrParams verifyParams = + facet.applyValueConstraintAndDomain(baseParams, facetKey, fieldVal); // check the count for this bucket - assertEquals(facetKey + ": " + verifyParams, - count, getRandClient(random()).query(verifyParams).getResults().getNumFound()); + assertEquals( + facetKey + ": " + verifyParams, + count, + getRandClient(random()).query(verifyParams).getResults().getNumFound()); if (maxBucketsToCheck.decrementAndGet() <= 0) { return; } - + // recursively check subFacets - if (! facet.subFacets.isEmpty()) { + if (!facet.subFacets.isEmpty()) { assertFacetCountsAreCorrect(maxBucketsToCheck, facet.subFacets, verifyParams, bucket); } } } - assertTrue("facets have unexpected keys left over: " + actualFacetResponse, - // should alwasy be a count, maybe a 'val' if we're a subfacet - (actualFacetResponse.size() == expected.size() + 1) || - (actualFacetResponse.size() == expected.size() + 2)); + assertTrue( + "facets have unexpected keys left over: " + actualFacetResponse, + // should alwasy be a count, maybe a 'val' if we're a subfacet + (actualFacetResponse.size() == expected.size() + 1) + || (actualFacetResponse.size() == expected.size() + 2)); } - /** - * Trivial data structure for modeling a simple terms facet that can be written out as a json.facet param. + * Trivial data structure for modeling a simple terms facet that can be written out as a + * json.facet param. * - * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters + *

Doesn't do any string escaping or quoting, so don't use whitespace or reserved json + * characters */ private static final class TermFacet { public final String field; - public final Map subFacets = new LinkedHashMap<>(); + public final Map subFacets = new LinkedHashMap<>(); public final JoinDomain domain; // may be null public final Integer limit; // may be null public final Integer overrequest; // may be null @@ -575,7 +726,9 @@ private static final class TermFacet { public TermFacet(String field, JoinDomain domain) { this(field, domain, UNIQUE_FIELD_VALS, 0, false); } - public TermFacet(String field, JoinDomain domain, Integer limit, Integer overrequest, Boolean refine) { + + public TermFacet( + String field, JoinDomain domain, Integer limit, Integer overrequest, Boolean refine) { assert null != field; this.field = field; this.domain = domain; @@ -583,22 +736,26 @@ public TermFacet(String field, JoinDomain domain, Integer limit, Integer overreq this.overrequest = overrequest; this.refine = refine; if (isRefinementNeeded(limit, overrequest)) { - assertEquals("Invalid refine param based on limit & overrequest: " + this.toString(), - Boolean.TRUE, refine); + assertEquals( + "Invalid refine param based on limit & overrequest: " + this.toString(), + Boolean.TRUE, + refine); } } - /** + /** * Returns new SolrParams that: + * *

    - *
  • copy the original SolrParams
  • - *
  • modify/wrap the original "q" param to capture the domain change for this facet (if any)
  • - *
  • add a filter query against this field with the specified value
  • + *
  • copy the original SolrParams + *
  • modify/wrap the original "q" param to capture the domain change for this facet (if any) + *
  • add a filter query against this field with the specified value *
- * + * * @see JoinDomain#applyDomainToQuery */ - public SolrParams applyValueConstraintAndDomain(SolrParams orig, String facetKey, String facetVal) { + public SolrParams applyValueConstraintAndDomain( + SolrParams orig, String facetKey, String facetVal) { // first wrap our original query in the domain if there is one... if (null != domain) { orig = domain.applyDomainToQuery(facetKey + "_q", orig); @@ -609,7 +766,7 @@ public SolrParams applyValueConstraintAndDomain(SolrParams orig, String facetKey return out; } - + /** * recursively generates the json.facet param value to use for testing this facet */ @@ -617,8 +774,9 @@ private CharSequence toJSONFacetParamValue() { final String limitStr = (null == limit) ? "" : (", limit:" + limit); final String overrequestStr = (null == overrequest) ? "" : (", overrequest:" + overrequest); final String refineStr = (null == refine) ? "" : ", refine:" + refine; - final StringBuilder sb = new StringBuilder("{ type:terms, field:" + field + limitStr + overrequestStr + refineStr); - if (! subFacets.isEmpty()) { + final StringBuilder sb = + new StringBuilder("{ type:terms, field:" + field + limitStr + overrequestStr + refineStr); + if (!subFacets.isEmpty()) { sb.append(", facet:"); sb.append(toJSONFacetParamValue(subFacets)); } @@ -631,12 +789,12 @@ private CharSequence toJSONFacetParamValue() { sb.append("}"); return sb; } - + /** - * Given a set of (possibly nested) facets, generates a suitable json.facet param value to - * use for testing them against in a solr request. + * Given a set of (possibly nested) facets, generates a suitable json.facet param + * value to use for testing them against in a solr request. */ - public static CharSequence toJSONFacetParamValue(Map facets) { + public static CharSequence toJSONFacetParamValue(Map facets) { assert null != facets; assert 0 < facets.size(); StringBuilder sb = new StringBuilder("{"); @@ -648,15 +806,16 @@ public static CharSequence toJSONFacetParamValue(Map facets) { sb.append("}"); return sb; } - + /** - * Factory method for generating some random (nested) facets. + * Factory method for generating some random (nested) facets. * - * For simplicity, each facet will have a unique key name, regardless of it's depth under other facets + *

For simplicity, each facet will have a unique key name, regardless of it's depth under + * other facets * * @see JoinDomain */ - public static Map buildRandomFacets() { + public static Map buildRandomFacets() { // for simplicity, use a unique facet key regardless of depth - simplifies verification AtomicInteger keyCounter = new AtomicInteger(0); final int maxDepth = TestUtil.nextInt(random(), 0, (usually() ? 2 : 3)); @@ -666,27 +825,29 @@ public static Map buildRandomFacets() { /** * picks a random value for the "limit" param, biased in favor of interesting test cases * - * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @return a number to specify in the request, or null to specify nothing (trigger default + * behavior) * @see #UNIQUE_FIELD_VALS */ public static Integer randomLimitParam(Random r) { final int limit = 1 + r.nextInt(UNIQUE_FIELD_VALS * 2); if (limit >= UNIQUE_FIELD_VALS && r.nextBoolean()) { return -1; // unlimited - } else if (limit == DEFAULT_LIMIT && r.nextBoolean()) { + } else if (limit == DEFAULT_LIMIT && r.nextBoolean()) { return null; // sometimes, don't specify limit if it's the default } return limit; } - + /** * picks a random value for the "overrequest" param, biased in favor of interesting test cases * - * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @return a number to specify in the request, or null to specify nothing (trigger default + * behavior) * @see #UNIQUE_FIELD_VALS */ public static Integer randomOverrequestParam(Random r) { - switch(r.nextInt(10)) { + switch (r.nextInt(10)) { case 0: case 1: case 2: @@ -696,23 +857,27 @@ public static Integer randomOverrequestParam(Random r) { case 5: return r.nextInt(UNIQUE_FIELD_VALS); // 20% ask for less them what's needed case 6: - return r.nextInt(Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough - default: break; + return r.nextInt( + Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough + default: + break; } // else.... either leave param unspecified (or redundently specify the -1 default) return r.nextBoolean() ? null : -1; } /** - * picks a random value for the "refine" param, that is garunteed to be suitable for - * the specified limit & overrequest params. + * picks a random value for the "refine" param, that is garunteed to be suitable for the + * specified limit & overrequest params. * - * @return a value to specify in the request, or null to specify nothing (trigger default behavior) + * @return a value to specify in the request, or null to specify nothing (trigger default + * behavior) * @see #randomLimitParam * @see #randomOverrequestParam * @see #UNIQUE_FIELD_VALS */ - public static Boolean randomRefineParam(Random r, Integer limitParam, Integer overrequestParam) { + public static Boolean randomRefineParam( + Random r, Integer limitParam, Integer overrequestParam) { if (isRefinementNeeded(limitParam, overrequestParam)) { return true; } @@ -724,11 +889,11 @@ public static Boolean randomRefineParam(Random r, Integer limitParam, Integer ov // explicitly or implicitly indicate refinement is not needed return r.nextBoolean() ? false : null; } - + /** - * Deterministicly identifies if the specified limit & overrequest params require - * a "refine:true" param be used in the the request, in order for the counts to be 100% accurate. - * + * Deterministicly identifies if the specified limit & overrequest params require a + * "refine:true" param be used in the the request, in order for the counts to be 100% accurate. + * * @see #UNIQUE_FIELD_VALS */ public static boolean isRefinementNeeded(Integer limitParam, Integer overrequestParam) { @@ -736,71 +901,78 @@ public static boolean isRefinementNeeded(Integer limitParam, Integer overrequest if (FORCE_DISABLE_REFINEMENT) { return false; } - + // use the "effective" values if the params are null final int limit = null == limitParam ? DEFAULT_LIMIT : limitParam; final int overrequest = null == overrequestParam ? 0 : overrequestParam; return - // don't presume how much overrequest will be done by default, just check the limit - (overrequest < 0 && limit < UNIQUE_FIELD_VALS) - // if the user specified overrequest is not "enough" to get all unique values - || (overrequest >= 0 && (long)limit + overrequest < UNIQUE_FIELD_VALS); + // don't presume how much overrequest will be done by default, just check the limit + (overrequest < 0 && limit < UNIQUE_FIELD_VALS) + // if the user specified overrequest is not "enough" to get all unique values + || (overrequest >= 0 && (long) limit + overrequest < UNIQUE_FIELD_VALS); } - - /** + + /** * recursive helper method for building random facets * * @param keyCounter used to ensure every generated facet has a unique key name - * @param maxDepth max possible depth allowed for the recusion, a lower value may be used depending on how many facets are returned at the current level. + * @param maxDepth max possible depth allowed for the recusion, a lower value may be used + * depending on how many facets are returned at the current level. */ - private static Map buildRandomFacets(AtomicInteger keyCounter, int maxDepth) { - final int numFacets = Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1' - Map results = new LinkedHashMap<>(); + private static Map buildRandomFacets( + AtomicInteger keyCounter, int maxDepth) { + final int numFacets = + Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1' + Map results = new LinkedHashMap<>(); for (int i = 0; i < numFacets; i++) { final JoinDomain domain = JoinDomain.buildRandomDomain(); assert null != domain; final Integer limit = randomLimitParam(random()); final Integer overrequest = randomOverrequestParam(random()); - final TermFacet facet = new TermFacet(field(random().nextBoolean() ? STR_FIELD_SUFFIXES : INT_FIELD_SUFFIXES, - random().nextInt(MAX_FIELD_NUM)), - domain, limit, overrequest, - randomRefineParam(random(), limit, overrequest)); + final TermFacet facet = + new TermFacet( + field( + random().nextBoolean() ? STR_FIELD_SUFFIXES : INT_FIELD_SUFFIXES, + random().nextInt(MAX_FIELD_NUM)), + domain, + limit, + overrequest, + randomRefineParam(random(), limit, overrequest)); results.put("facet_" + keyCounter.incrementAndGet(), facet); if (0 < maxDepth) { // if we're going wide, don't go deep final int nextMaxDepth = Math.max(0, maxDepth - numFacets); - facet.subFacets.putAll(buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth))); + facet.subFacets.putAll( + buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth))); } } return results; } } - - /** - * Models a Domain Change which includes either a 'join' or a 'filter' or both - */ - private static final class JoinDomain { + /** Models a Domain Change which includes either a 'join' or a 'filter' or both */ + private static final class JoinDomain { public final String from; public final String to; public final String filter; // not bothering with more then 1 filter, not the point of the test - /** + /** * @param from left side of join field name, null if domain involves no joining * @param to right side of join field name, null if domain involves no joining * @param filter filter to apply to domain, null if domain involves no filtering */ - public JoinDomain(String from, String to, String filter) { - assert ! ((null == from) ^ (null == to)) : "if from is null, to must be null"; + public JoinDomain(String from, String to, String filter) { + assert !((null == from) ^ (null == to)) : "if from is null, to must be null"; this.from = from; this.to = to; this.filter = filter; } - /** - * @return the JSON string representing this domain for use in a facet param, or null if no domain should be used - * */ + /** + * @return the JSON string representing this domain for use in a facet param, or null if no + * domain should be used + */ public CharSequence toJSONFacetParamValue() { if (null == from && null == filter) { return null; @@ -808,11 +980,10 @@ public CharSequence toJSONFacetParamValue() { StringBuilder sb = new StringBuilder("domain:{"); if (null != from) { assert null != to; - sb. append("join:{from:").append(from).append(",to:").append(to).append("}"); - if (null != filter){ + sb.append("join:{from:").append(from).append(",to:").append(to).append("}"); + if (null != filter) { sb.append(","); } - } if (null != filter) { sb.append("filter:'").append(filter).append("'"); @@ -821,18 +992,20 @@ public CharSequence toJSONFacetParamValue() { return sb; } - /** - * Given some original SolrParams, returns new SolrParams where the original "q" param is wrapped - * as needed to apply the equivalent transformation to a query as this domain would to a facet + /** + * Given some original SolrParams, returns new SolrParams where the original "q" param is + * wrapped as needed to apply the equivalent transformation to a query as this domain would to a + * facet */ public SolrParams applyDomainToQuery(String safeKey, SolrParams in) { - assert null == in.get(safeKey); // shouldn't be possible if every facet uses a unique key string - + assert null + == in.get(safeKey); // shouldn't be possible if every facet uses a unique key string + String q = in.get("q"); final ModifiableSolrParams out = new ModifiableSolrParams(in); if (null != from) { out.set(safeKey, in.get("q")); - q = "{!join from="+from+" to="+to+" v=$"+safeKey+"}"; + q = "{!join from=" + from + " to=" + to + " v=$" + safeKey + "}"; } if (null != filter) { q = filter + " AND " + q; @@ -842,41 +1015,42 @@ public SolrParams applyDomainToQuery(String safeKey, SolrParams in) { } /** - * Factory method for creating a random domain change to use with a facet - may return an 'noop' JoinDomain, - * but will never return null. + * Factory method for creating a random domain change to use with a facet - may return an 'noop' + * JoinDomain, but will never return null. */ - public static JoinDomain buildRandomDomain() { + public static JoinDomain buildRandomDomain() { // use consistent type on both sides of join final String[] suffixes = random().nextBoolean() ? STR_FIELD_SUFFIXES : INT_FIELD_SUFFIXES; - + final boolean noJoin = random().nextBoolean(); String from = null; String to = null; - for (;;) { + for (; ; ) { if (noJoin) break; from = field(suffixes, random().nextInt(MAX_FIELD_NUM)); to = field(suffixes, random().nextInt(MAX_FIELD_NUM)); - // HACK: joined numeric point fields need docValues.. for now just skip _is fields if we are dealing with points. - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) && (from.endsWith("_is") || to.endsWith("_is"))) - { - continue; + // HACK: joined numeric point fields need docValues.. for now just skip _is fields if we are + // dealing with points. + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) + && (from.endsWith("_is") || to.endsWith("_is"))) { + continue; } break; } // keep it simple, only filter on string fields - not point of test final String filterField = strfield(random().nextInt(MAX_FIELD_NUM)); - - final String filter = random().nextBoolean() ? null : filterField+":[* TO *]"; + + final String filter = random().nextBoolean() ? null : filterField + ":[* TO *]"; return new JoinDomain(from, to, filter); } } - - /** - * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed - * at a node in our cluster + + /** + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed at a node + * in our cluster */ public static SolrClient getRandClient(Random rand) { int numClients = CLIENTS.size(); @@ -887,9 +1061,7 @@ public static SolrClient getRandClient(Random rand) { public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } - } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java index 9c93420442c..3a9773e21f6 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java @@ -16,6 +16,9 @@ */ package org.apache.solr.search.facet; +import static org.apache.solr.search.facet.RelatednessAgg.computeRelatedness; +import static org.apache.solr.search.facet.RelatednessAgg.roundTo5Digits; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Path; @@ -28,7 +31,6 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrClient; @@ -52,30 +54,24 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.search.facet.RelatednessAgg.computeRelatedness; -import static org.apache.solr.search.facet.RelatednessAgg.roundTo5Digits; - -/** - *

- * A randomized test of nested facets using the relatedness() function, that asserts the - * accuracy the results for all the buckets returned using verification queries of the (expected) - * foreground & background queries based on the nested facet terms. - *

- * Note that unlike normal facet "count" verification, using a high limit + overrequest isn't a substitute - * for refinement in order to ensure accurate "skg" computation across shards. For that reason, this - * tests forces refine: true (unlike {@link TestCloudJSONFacetJoinDomain}) and specifies a - * domain: { 'query':'*:*' } for every facet, in order to guarantee that all shards - * participate in all facets, so that the popularity & relatedness values returned can be proven - * with validation requests. - *

- *

- * (Refinement alone is not enough. Using the '*:*' query as the facet domain is necessary to - * prevent situations where a single shardX may return candidate bucket with no child-buckets due to - * the normal facet intersections, but when refined on other shardY(s), can produce "high scoring" - * SKG child-buckets, which would then be missing the foreground/background "size" contributions from - * shardX. - *

- * +/** + * A randomized test of nested facets using the relatedness() function, that asserts + * the accuracy the results for all the buckets returned using verification queries of the + * (expected) foreground & background queries based on the nested facet terms. + * + *

Note that unlike normal facet "count" verification, using a high limit + overrequest isn't a + * substitute for refinement in order to ensure accurate "skg" computation across shards. For that + * reason, this tests forces refine: true (unlike {@link TestCloudJSONFacetJoinDomain}) + * and specifies a domain: { 'query':'*:*' } for every facet, in order to guarantee + * that all shards participate in all facets, so that the popularity & relatedness values + * returned can be proven with validation requests. + * + *

(Refinement alone is not enough. Using the '*:*' query as the facet domain is necessary to + * prevent situations where a single shardX may return candidate bucket with no child-buckets due to + * the normal facet intersections, but when refined on other shardY(s), can produce "high scoring" + * SKG child-buckets, which would then be missing the foreground/background "size" contributions + * from shardX. + * * @see TestCloudJSONFacetJoinDomain * @see TestCloudJSONFacetSKGEquiv */ @@ -91,19 +87,25 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase { private static final int MAX_FIELD_NUM = 15; private static final int UNIQUE_FIELD_VALS = 50; - /** Multivalued string field suffixes that can be randomized for testing diff facet/join code paths */ - private static final String[] MULTI_STR_FIELD_SUFFIXES = new String[] - { "_multi_ss", "_multi_sds", "_multi_sdsS" }; - /** Multivalued int field suffixes that can be randomized for testing diff facet/join code paths */ - private static final String[] MULTI_INT_FIELD_SUFFIXES = new String[] - { "_multi_is", "_multi_ids", "_multi_idsS" }; - - /** Single Valued string field suffixes that can be randomized for testing diff facet code paths */ - private static final String[] SOLO_STR_FIELD_SUFFIXES = new String[] - { "_solo_s", "_solo_sd", "_solo_sdS" }; + /** + * Multivalued string field suffixes that can be randomized for testing diff facet/join code paths + */ + private static final String[] MULTI_STR_FIELD_SUFFIXES = + new String[] {"_multi_ss", "_multi_sds", "_multi_sdsS"}; + /** + * Multivalued int field suffixes that can be randomized for testing diff facet/join code paths + */ + private static final String[] MULTI_INT_FIELD_SUFFIXES = + new String[] {"_multi_is", "_multi_ids", "_multi_idsS"}; + + /** + * Single Valued string field suffixes that can be randomized for testing diff facet code paths + */ + private static final String[] SOLO_STR_FIELD_SUFFIXES = + new String[] {"_solo_s", "_solo_sd", "_solo_sdS"}; /** Single Valued int field suffixes that can be randomized for testing diff facet code paths */ - private static final String[] SOLO_INT_FIELD_SUFFIXES = new String[] - { "_solo_i", "_solo_id", "_solo_idS" }; + private static final String[] SOLO_INT_FIELD_SUFFIXES = + new String[] {"_solo_i", "_solo_id", "_solo_idS"}; /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; @@ -113,26 +115,28 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase { @BeforeClass private static void createMiniSolrCloudCluster() throws Exception { // sanity check constants - assertTrue("bad test constants: some suffixes will never be tested", - (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && - (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && - (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && - (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); - + assertTrue( + "bad test constants: some suffixes will never be tested", + (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) + && (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) + && (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) + && (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); + // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + // multi replicas should not matter... final int repFactor = usually() ? 1 : 2; // ... but we definitely want to test multiple shards - final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 :3)); + final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 : 3)); final int numNodes = (numShards * repFactor); - + final String configName = DEBUG_LABEL + "_config-set"; final Path configDir = TEST_COLL1_CONF(); - + configureCluster(numNodes).addConfig(configName, configDir).configure(); - + Map collectionProperties = new LinkedHashMap<>(); collectionProperties.put("config", "solrconfig-tlog.xml"); collectionProperties.put("schema", "schema_latest.xml"); @@ -152,24 +156,25 @@ private static void createMiniSolrCloudCluster() throws Exception { final int numDocs = atLeast(100); for (int id = 0; id < numDocs; id++) { - SolrInputDocument doc = sdoc("id", ""+id); + SolrInputDocument doc = sdoc("id", "" + id); for (int fieldNum = 0; fieldNum < MAX_FIELD_NUM; fieldNum++) { - // NOTE: we ensure every doc has at least one value in each field - // that way, if a term is returned for a parent there there is guaranteed to be at least one - // one term in the child facet as well. + // NOTE: we ensure every doc has at least one value in each field that way, if a term is + // returned for a parent there there is guaranteed to be at least one one term in the child + // facet as well. // // otherwise, we'd face the risk of a single shardX returning parentTermX as a top term for - // the parent facet, but having no child terms -- meanwhile on refinement another shardY that - // did *not* returned parentTermX in phase#1, could return some *new* child terms under + // the parent facet, but having no child terms -- meanwhile on refinement another shardY + // that did *not* returned parentTermX in phase#1, could return some *new* child terms under // parentTermX, but their stats would not include the bgCount from shardX. // // in normal operation, this is an edge case that isn't a big deal because the ratios & // relatedness scores are statistically approximate, but for the purpose of this test where - // we verify correctness via exactness we need all shards to contribute to the SKG statistics + // we verify correctness via exactness we need all shards to contribute to the SKG + // statistics final int numValsThisDoc = TestUtil.nextInt(random(), 1, (usually() ? 5 : 10)); for (int v = 0; v < numValsThisDoc; v++) { final String fieldValue = randFieldValue(fieldNum); - + // multi valued: one string, and one integer doc.addField(multiStrField(fieldNum), fieldValue); doc.addField(multiIntField(fieldNum), fieldValue); @@ -182,18 +187,18 @@ private static void createMiniSolrCloudCluster() throws Exception { } CLOUD_CLIENT.add(doc); if (random().nextInt(100) < 1) { - CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments + CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments } if (random().nextInt(100) < 5) { - CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs + CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs } } CLOUD_CLIENT.commit(); } /** - * Given a (random) number, and a (static) array of possible suffixes returns a consistent field name that - * uses that number and one of hte specified suffixes in it's name. + * Given a (random) number, and a (static) array of possible suffixes returns a consistent field + * name that uses that number and one of hte specified suffixes in it's name. * * @see #MULTI_STR_FIELD_SUFFIXES * @see #MULTI_INT_FIELD_SUFFIXES @@ -202,7 +207,7 @@ private static void createMiniSolrCloudCluster() throws Exception { */ private static String field(final String[] suffixes, final int fieldNum) { assert fieldNum < MAX_FIELD_NUM; - + final String suffix = suffixes[fieldNum % suffixes.length]; return "field_" + fieldNum + suffix; } @@ -224,10 +229,10 @@ private static String soloIntField(final int fieldNum) { } /** - * Given a (random) field number, returns a random (integer based) value for that field. - * NOTE: The number of unique values in each field is constant according to {@link #UNIQUE_FIELD_VALS} - * but the precise range of values will vary for each unique field number, such that cross field joins - * will match fewer documents based on how far apart the field numbers are. + * Given a (random) field number, returns a random (integer based) value for that field. NOTE: The + * number of unique values in each field is constant according to {@link #UNIQUE_FIELD_VALS} but + * the precise range of values will vary for each unique field number, such that cross + * field joins will match fewer documents based on how far apart the field numbers are. * * @see #UNIQUE_FIELD_VALS * @see #field @@ -236,7 +241,6 @@ private static String randFieldValue(final int fieldNum) { return "" + (fieldNum + TestUtil.nextInt(random(), 1, UNIQUE_FIELD_VALS)); } - @AfterClass private static void afterClass() throws Exception { if (null != CLOUD_CLIENT) { @@ -248,79 +252,87 @@ private static void afterClass() throws Exception { } CLIENTS.clear(); } - - /** - * Test some small, hand crafted, but non-trivial queries that are - * easier to trace/debug then a pure random monstrosity. - * (ie: if something obvious gets broken, this test may fail faster and in a more obvious way then testRandom) + + /** + * Test some small, hand crafted, but non-trivial queries that are easier to trace/debug then a + * pure random monstrosity. (ie: if something obvious gets broken, this test may fail faster and + * in a more obvious way then testRandom) */ public void testBespoke() throws Exception { { // trivial single level facet - Map facets = new LinkedHashMap<>(); + Map facets = new LinkedHashMap<>(); TermFacet top = new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, null); facets.put("top1", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); - assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreCorrect( + maxBuckets, facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } - + { // trivial single level facet w/sorting on skg - Map facets = new LinkedHashMap<>(); + Map facets = new LinkedHashMap<>(); TermFacet top = new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, "skg desc"); facets.put("top2", top); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); - assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreCorrect( + maxBuckets, facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } { // trivial single level facet w/ 2 diff ways to request "limit = (effectively) Infinite" // to sanity check refinement of buckets missing from other shard in both cases - + // NOTE that these two queries & facets *should* effectively identical given that the // very large limit value is big enough no shard will ever return that may terms, // but the "limit=-1" case it actually triggers slightly different code paths // because it causes FacetField.returnsPartial() to be "true" - for (int limit : new int[] { 999999999, -1 }) { - Map facets = new LinkedHashMap<>(); - facets.put("top_facet_limit__" + limit, new TermFacet(multiStrField(9), limit, 0, "skg desc")); + for (int limit : new int[] {999999999, -1}) { + Map facets = new LinkedHashMap<>(); + facets.put( + "top_facet_limit__" + limit, new TermFacet(multiStrField(9), limit, 0, "skg desc")); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); - assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreCorrect( + maxBuckets, facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } } { // allBuckets should have no impact... - for (Boolean allBuckets : Arrays.asList( null, false, true )) { - Map facets = new LinkedHashMap<>(); - facets.put("allb__" + allBuckets, new TermFacet(multiStrField(9), - map("allBuckets", allBuckets, - "sort", "skg desc"))); + for (Boolean allBuckets : Arrays.asList(null, false, true)) { + Map facets = new LinkedHashMap<>(); + facets.put( + "allb__" + allBuckets, + new TermFacet(multiStrField(9), map("allBuckets", allBuckets, "sort", "skg desc"))); final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS); - assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreCorrect( + maxBuckets, facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS); } } } - + public void testRandom() throws Exception { - // since the "cost" of verifying the stats for each bucket is so high (see TODO in verifySKGResults()) - // we put a safety valve in place on the maximum number of buckets that we are willing to verify - // across *all* the queries that we do. - // that way if the randomized queries we build all have relatively small facets, so be it, but if - // we get a really big one early on, we can test as much as possible, skip other iterations. + // since the "cost" of verifying the stats for each bucket is so high (see TODO in + // verifySKGResults()) we put a safety valve in place on the maximum number of buckets that we + // are willing to verify across *all* the queries that we do. that way if the randomized queries + // we build all have relatively small facets, so be it, but if we get a really big one early on, + // we can test as much as possible, skip other iterations. // - // (deeply nested facets may contain more buckets then the max, but we won't *check* all of them) + // (deeply nested facets may contain more buckets then the max, but we won't *check* all of + // them) final int maxBucketsAllowed = atLeast(2000); final AtomicInteger maxBucketsToCheck = new AtomicInteger(maxBucketsAllowed); - + final int numIters = atLeast(10); for (int iter = 0; iter < numIters && 0 < maxBucketsToCheck.get(); iter++) { - assertFacetSKGsAreCorrect(maxBucketsToCheck, TermFacet.buildRandomFacets(), - buildRandomQuery(), buildRandomQuery(), buildRandomQuery()); + assertFacetSKGsAreCorrect( + maxBucketsToCheck, + TermFacet.buildRandomFacets(), + buildRandomQuery(), + buildRandomQuery(), + buildRandomQuery()); } assertTrue("Didn't check a single bucket???", maxBucketsToCheck.get() < maxBucketsAllowed); - - } /** @@ -330,7 +342,7 @@ public void testRandom() throws Exception { * @see #field */ private static String buildRandomQuery() { - if (0 == TestUtil.nextInt(random(), 0,10)) { + if (0 == TestUtil.nextInt(random(), 0, 10)) { return "*:*"; } final int numClauses = TestUtil.nextInt(random(), 3, 10); @@ -347,25 +359,27 @@ private static String buildORQuery(String... clauses) { assert 0 < clauses.length; return "(" + String.join(" OR ", clauses) + ")"; } - + /** - * Given a set of term facets, and top level query strings, asserts that - * the SKG stats for each facet term returned when executing that query with those foreground/background - * queries match the expected results of executing the equivalent queries in isolation. + * Given a set of term facets, and top level query strings, asserts that the SKG stats for each + * facet term returned when executing that query with those foreground/background queries match + * the expected results of executing the equivalent queries in isolation. * * @see #verifySKGResults */ - private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, - Map expected, - final String query, - final String foreQ, - final String backQ) throws SolrServerException, IOException { - final SolrParams baseParams = params("rows","0", "fore", foreQ, "back", backQ); - - final SolrParams facetParams = params("q", query, - "json.facet", ""+TermFacet.toJSONFacetParamValue(expected)); + private void assertFacetSKGsAreCorrect( + final AtomicInteger maxBucketsToCheck, + Map expected, + final String query, + final String foreQ, + final String backQ) + throws SolrServerException, IOException { + final SolrParams baseParams = params("rows", "0", "fore", foreQ, "back", backQ); + + final SolrParams facetParams = + params("q", query, "json.facet", "" + TermFacet.toJSONFacetParamValue(expected)); final SolrParams initParams = SolrParams.wrapAppended(facetParams, baseParams); - + log.info("Doing full run: {}", initParams); QueryResponse rsp = null; @@ -377,71 +391,84 @@ private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, topNamedList = rsp.getResponse(); assertNotNull(initParams + " is null topNamedList?", topNamedList); } catch (Exception e) { - throw new RuntimeException("init query failed: " + initParams + ": " + - e.getMessage(), e); + throw new RuntimeException("init query failed: " + initParams + ": " + e.getMessage(), e); } try { @SuppressWarnings("unchecked") final NamedList facetResponse = (NamedList) topNamedList.get("facets"); assertNotNull("null facet results?", facetResponse); - assertEquals("numFound mismatch with top count?", - rsp.getResults().getNumFound(), ((Number)facetResponse.get("count")).longValue()); + assertEquals( + "numFound mismatch with top count?", + rsp.getResults().getNumFound(), + ((Number) facetResponse.get("count")).longValue()); // Note: even if the query has numFound=0, our explicit background query domain should // still force facet results // (even if the background query matches nothing, that just means there will be no // buckets in those facets) assertFacetSKGsAreCorrect(maxBucketsToCheck, expected, baseParams, facetResponse); - + } catch (AssertionError e) { throw new AssertionError(initParams + " ===> " + topNamedList + " --> " + e.getMessage(), e); } finally { - log.info("Ending full run"); + log.info("Ending full run"); } } - /** - * Recursive helper method that walks the actual facet response, comparing the SKG results to - * the expected output based on the equivalent filters generated from the original TermFacet. + /** + * Recursive helper method that walks the actual facet response, comparing the SKG results to the + * expected output based on the equivalent filters generated from the original TermFacet. */ @SuppressWarnings({"unchecked"}) - private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, - final Map expected, - final SolrParams baseParams, - final NamedList actualFacetResponse) throws SolrServerException, IOException { - - for (Map.Entry entry : expected.entrySet()) { + private void assertFacetSKGsAreCorrect( + final AtomicInteger maxBucketsToCheck, + final Map expected, + final SolrParams baseParams, + final NamedList actualFacetResponse) + throws SolrServerException, IOException { + + for (Map.Entry entry : expected.entrySet()) { final String facetKey = entry.getKey(); final TermFacet facet = entry.getValue(); - + final NamedList results = (NamedList) actualFacetResponse.get(facetKey); assertNotNull(facetKey + " key missing from: " + actualFacetResponse, results); if (null != results.get("allBuckets")) { // if the response includes an allBuckets bucket, then there must not be an skg value - + // 'skg' key must not exist in th allBuckets bucket - assertEquals(facetKey + " has skg in allBuckets: " + results.get("allBuckets"), - Collections.emptyList(), - ((NamedList)results.get("allBuckets")).getAll("skg")); + assertEquals( + facetKey + " has skg in allBuckets: " + results.get("allBuckets"), + Collections.emptyList(), + ((NamedList) results.get("allBuckets")).getAll("skg")); } final List> buckets = (List>) results.get("buckets"); assertNotNull(facetKey + " has null buckets: " + actualFacetResponse, buckets); if (buckets.isEmpty()) { // should only happen if the background query does not match any docs with field X - final long docsWithField = getNumFound(params("_trace", "noBuckets", - "rows", "0", - "q", facet.field+":[* TO *]", - "fq", baseParams.get("back"))); - - assertEquals(facetKey + " has no buckets, but docs in background exist with field: " + facet.field, - 0, docsWithField); + final long docsWithField = + getNumFound( + params( + "_trace", + "noBuckets", + "rows", + "0", + "q", + facet.field + ":[* TO *]", + "fq", + baseParams.get("back"))); + + assertEquals( + facetKey + " has no buckets, but docs in background exist with field: " + facet.field, + 0, + docsWithField); } - // NOTE: it's important that we do this depth first -- not just because it's the easiest way to do it, - // but because it means that our maxBucketsToCheck will ensure we do a lot of deep sub-bucket checking, - // not just all the buckets of the top level(s) facet(s) + // NOTE: it's important that we do this depth first -- not just because it's the easiest way + // to do it, but because it means that our maxBucketsToCheck will ensure we do a lot of deep + // sub-bucket checking, not just all the buckets of the top level(s) facet(s) for (NamedList bucket : buckets) { final String fieldVal = bucket.get("val").toString(); // int or stringified int @@ -449,42 +476,48 @@ private void assertFacetSKGsAreCorrect(final AtomicInteger maxBucketsToCheck, if (maxBucketsToCheck.decrementAndGet() <= 0) { return; } - - final SolrParams verifyParams = SolrParams.wrapAppended(baseParams, - params("fq", facet.field + ":" + fieldVal)); - + + final SolrParams verifyParams = + SolrParams.wrapAppended(baseParams, params("fq", facet.field + ":" + fieldVal)); + // recursively check subFacets - if (! facet.subFacets.isEmpty()) { + if (!facet.subFacets.isEmpty()) { assertFacetSKGsAreCorrect(maxBucketsToCheck, facet.subFacets, verifyParams, bucket); } } } - + { // make sure we don't have any facet keys we don't expect // a little hackish because subfacets have extra keys... final LinkedHashSet expectedKeys = new LinkedHashSet<>(expected.keySet()); expectedKeys.add("count"); - if (0 <= actualFacetResponse.indexOf("val",0)) { + if (0 <= actualFacetResponse.indexOf("val", 0)) { expectedKeys.add("val"); expectedKeys.add("skg"); } - assertEquals("Unexpected keys in facet response", - expectedKeys, actualFacetResponse.asShallowMap().keySet()); + assertEquals( + "Unexpected keys in facet response", + expectedKeys, + actualFacetResponse.asShallowMap().keySet()); } } /** - * Verifies that the popularity & relatedness values containined in a single SKG bucket - * match the expected values based on the facet field & bucket value, as well the existing + * Verifies that the popularity & relatedness values containined in a single SKG bucket match + * the expected values based on the facet field & bucket value, as well the existing * filterParams. - * + * * @see #assertFacetSKGsAreCorrect */ - private void verifySKGResults(String facetKey, TermFacet facet, SolrParams filterParams, - String fieldVal, NamedList bucket) - throws SolrServerException, IOException { - - final String bucketQ = facet.field+":"+fieldVal; + private void verifySKGResults( + String facetKey, + TermFacet facet, + SolrParams filterParams, + String fieldVal, + NamedList bucket) + throws SolrServerException, IOException { + + final String bucketQ = facet.field + ":" + fieldVal; @SuppressWarnings({"unchecked"}) final NamedList skgBucket = (NamedList) bucket.get("skg"); assertNotNull(facetKey + "/bucket:" + bucket.toString(), skgBucket); @@ -492,121 +525,130 @@ private void verifySKGResults(String facetKey, TermFacet facet, SolrParams filte // TODO: make this more efficient? // ideally we'd do a single query w/4 facet.queries, one for each count // but formatting the queries is a pain, currently we leverage the accumulated fq's - final long fgSize = getNumFound(SolrParams.wrapAppended(params("_trace", "fgSize", - "rows","0", - "q","{!query v=$fore}"), - filterParams)); - final long bgSize = getNumFound(params("_trace", "bgSize", - "rows","0", - "q", filterParams.get("back"))); - - final long fgCount = getNumFound(SolrParams.wrapAppended(params("_trace", "fgCount", - "rows","0", - "q","{!query v=$fore}", - "fq", bucketQ), - filterParams)); - final long bgCount = getNumFound(params("_trace", "bgCount", - "rows","0", - "q", bucketQ, - "fq", filterParams.get("back"))); - - assertEquals(facetKey + "/bucket:" + bucket + " => fgPop should be: " + fgCount + " / " + bgSize, - roundTo5Digits((double) fgCount / bgSize), - skgBucket.get("foreground_popularity")); - assertEquals(facetKey + "/bucket:" + bucket + " => bgPop should be: " + bgCount + " / " + bgSize, - roundTo5Digits((double) bgCount / bgSize), - skgBucket.get("background_popularity")); - assertEquals(facetKey + "/bucket:" + bucket + " => relatedness is wrong", - roundTo5Digits(computeRelatedness(fgCount, fgSize, bgCount, bgSize)), - skgBucket.get("relatedness")); - + final long fgSize = + getNumFound( + SolrParams.wrapAppended( + params( + "_trace", "fgSize", + "rows", "0", + "q", "{!query v=$fore}"), + filterParams)); + final long bgSize = + getNumFound( + params( + "_trace", "bgSize", + "rows", "0", + "q", filterParams.get("back"))); + + final long fgCount = + getNumFound( + SolrParams.wrapAppended( + params("_trace", "fgCount", "rows", "0", "q", "{!query v=$fore}", "fq", bucketQ), + filterParams)); + final long bgCount = + getNumFound( + params("_trace", "bgCount", "rows", "0", "q", bucketQ, "fq", filterParams.get("back"))); + + assertEquals( + facetKey + "/bucket:" + bucket + " => fgPop should be: " + fgCount + " / " + bgSize, + roundTo5Digits((double) fgCount / bgSize), + skgBucket.get("foreground_popularity")); + assertEquals( + facetKey + "/bucket:" + bucket + " => bgPop should be: " + bgCount + " / " + bgSize, + roundTo5Digits((double) bgCount / bgSize), + skgBucket.get("background_popularity")); + assertEquals( + facetKey + "/bucket:" + bucket + " => relatedness is wrong", + roundTo5Digits(computeRelatedness(fgCount, fgSize, bgCount, bgSize)), + skgBucket.get("relatedness")); } /** - * Trivial data structure for modeling a simple terms facet that can be written out as a json.facet param. + * Trivial data structure for modeling a simple terms facet that can be written out as a + * json.facet param. * - * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters + *

Doesn't do any string escaping or quoting, so don't use whitespace or reserved json + * characters */ private static final class TermFacet implements Writable { /** non-skg subfacets for use in verification */ - public final Map subFacets = new LinkedHashMap<>(); - - private final Map jsonData = new LinkedHashMap<>(); + public final Map subFacets = new LinkedHashMap<>(); + + private final Map jsonData = new LinkedHashMap<>(); public final String field; - /** + /** * @param field must be non null * @param options can set any of options used in a term facet other then field or (sub) facets */ - public TermFacet(final String field, final Map options) { + public TermFacet(final String field, final Map options) { assert null != field; this.field = field; - + jsonData.putAll(options); - + // we don't allow these to be overridden by options, so set them now... jsonData.put("type", "terms"); jsonData.put("field", field); - // see class javadocs for why we always use refine:true & the query:'*:*' domain for this test. + // see class javadocs for why we always use refine:true & the query:'*:*' domain for this + // test. jsonData.put("refine", true); - jsonData.put("domain", map("query","*:*")); - + jsonData.put("domain", map("query", "*:*")); } /** all params except field can be null */ public TermFacet(String field, Integer limit, Integer overrequest, String sort) { this(field, map("limit", limit, "overrequest", overrequest, "sort", sort)); } - + /** Simplified constructor asks for limit = # unique vals */ public TermFacet(String field) { - this(field, UNIQUE_FIELD_VALS, 0, "skg desc"); - + this(field, UNIQUE_FIELD_VALS, 0, "skg desc"); } + @Override public void write(JSONWriter writer) { // we need to include both our "real" subfacets, along with our SKG stat and 'processEmpty' // (we don't put these in 'subFacets' to help keep the verification code simpler - final Map sub = map("processEmpty", true, - "skg", "relatedness($fore,$back)"); + final Map sub = map("processEmpty", true, "skg", "relatedness($fore,$back)"); sub.putAll(subFacets); - - final Map out = map("facet", sub); + + final Map out = map("facet", sub); out.putAll(jsonData); - + writer.write(out); } /** - * Given a set of (possibly nested) facets, generates a suitable json.facet param value to - * use for testing them against in a solr request. + * Given a set of (possibly nested) facets, generates a suitable json.facet param + * value to use for testing them against in a solr request. */ - public static String toJSONFacetParamValue(final Map facets) { + public static String toJSONFacetParamValue(final Map facets) { assert null != facets; - assert ! facets.isEmpty(); - + assert !facets.isEmpty(); + // see class javadocs for why we always want processEmpty - final Map jsonData = map("processEmpty", true); + final Map jsonData = map("processEmpty", true); jsonData.putAll(facets); - + return JSONUtil.toJSON(jsonData, -1); // no newlines } /** - * Factory method for generating some random facets. + * Factory method for generating some random facets. * - * For simplicity, each facet will have a unique key name. + *

For simplicity, each facet will have a unique key name. */ - public static Map buildRandomFacets() { + public static Map buildRandomFacets() { // for simplicity, use a unique facet key regardless of depth - simplifies verification // and le's us enforce a hard limit on the total number of facets in a request AtomicInteger keyCounter = new AtomicInteger(0); - + final int maxDepth = TestUtil.nextInt(random(), 0, (usually() ? 2 : 3)); return buildRandomFacets(keyCounter, maxDepth); } - + /** * picks a random field to facet on. * @@ -615,12 +657,17 @@ public static Map buildRandomFacets() { */ public static String randomFacetField(final Random r) { final int fieldNum = r.nextInt(MAX_FIELD_NUM); - switch(r.nextInt(4)) { - case 0: return multiStrField(fieldNum); - case 1: return multiIntField(fieldNum); - case 2: return soloStrField(fieldNum); - case 3: return soloIntField(fieldNum); - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return multiStrField(fieldNum); + case 1: + return multiIntField(fieldNum); + case 2: + return soloStrField(fieldNum); + case 3: + return soloIntField(fieldNum); + default: + throw new RuntimeException("Broken case statement"); } } @@ -631,41 +678,49 @@ public static String randomFacetField(final Random r) { */ public static Boolean randomPerSegParam(final Random r) { - switch(r.nextInt(4)) { - case 0: return true; - case 1: return false; - case 2: - case 3: return null; - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return true; + case 1: + return false; + case 2: + case 3: + return null; + default: + throw new RuntimeException("Broken case statement"); } } - + /** * picks a random value for the "prefix" param, biased in favor of interesting test cases * * @return a valid prefix value, may be null */ public static String randomPrefixParam(final Random r, final String facetField) { - + if (facetField.contains("multi_i") || facetField.contains("solo_i")) { // never used a prefix on a numeric field return null; } assert (facetField.contains("multi_s") || facetField.contains("solo_s")) - : "possible facet fields have changed, breaking test"; - - switch(r.nextInt(5)) { - case 0: return "2"; - case 1: return "3"; - case 2: - case 3: - case 4: return null; - default: throw new RuntimeException("Broken case statement"); + : "possible facet fields have changed, breaking test"; + + switch (r.nextInt(5)) { + case 0: + return "2"; + case 1: + return "3"; + case 2: + case 3: + case 4: + return null; + default: + throw new RuntimeException("Broken case statement"); } } - + /** - * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases. + * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases. * * @return a sort string (w/direction), or null to specify nothing (trigger default behavior) * @see #randomSortParam @@ -691,23 +746,27 @@ public static String randomSortParam(Random r) { // if this method is modified to produce new sorts, make sure to update // randomLimitParam to account for them if they are impacted by SOLR-12556 final String dir = random().nextBoolean() ? "asc" : "desc"; - switch(r.nextInt(4)) { - case 0: return null; - case 1: return "count " + dir; - case 2: return "skg " + dir; - case 3: return "index " + dir; - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return null; + case 1: + return "count " + dir; + case 2: + return "skg " + dir; + case 3: + return "index " + dir; + default: + throw new RuntimeException("Broken case statement"); } } /** * picks a random value for the "limit" param, biased in favor of interesting test cases * - *

- * NOTE: Due to SOLR-12556, we have to force an overrequest of "all" possible terms for - * some sort values. - *

+ *

NOTE: Due to SOLR-12556, we have to force an overrequest of "all" possible terms + * for some sort values. * - * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @return a number to specify in the request, or null to specify nothing (trigger default + * behavior) * @see #UNIQUE_FIELD_VALS * @see #randomSortParam */ @@ -722,20 +781,21 @@ public static Integer randomLimitParam(Random r, final String sort) { final int limit = 1 + r.nextInt((int) (UNIQUE_FIELD_VALS * 1.5F)); if (limit >= UNIQUE_FIELD_VALS && r.nextBoolean()) { return -1; // unlimited - } else if (limit == DEFAULT_LIMIT && r.nextBoolean()) { + } else if (limit == DEFAULT_LIMIT && r.nextBoolean()) { return null; // sometimes, don't specify limit if it's the default } return limit; } - + /** * picks a random value for the "overrequest" param, biased in favor of interesting test cases. * - * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @return a number to specify in the request, or null to specify nothing (trigger default + * behavior) * @see #UNIQUE_FIELD_VALS */ public static Integer randomOverrequestParam(Random r) { - switch(r.nextInt(10)) { + switch (r.nextInt(10)) { case 0: case 1: case 2: @@ -745,66 +805,76 @@ public static Integer randomOverrequestParam(Random r) { case 5: return r.nextInt(UNIQUE_FIELD_VALS); // 20% ask for less them what's needed case 6: - return r.nextInt(Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough - default: break; + return r.nextInt( + Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough + default: + break; } // else.... either leave param unspecified (or redundently specify the -1 default) return r.nextBoolean() ? null : -1; } - + /** - * picks a random value for the "allBuckets" param, biased in favor of interesting test cases. - * This bucket should be ignored by relatedness, but inclusion should not cause any problems - * (or change the results) - * - *

- * NOTE: allBuckets is meaningless in conjunction with the STREAM processor, so - * this method always returns null if sort is index asc. - *

+ * picks a random value for the "allBuckets" param, biased in favor of interesting test cases. + * This bucket should be ignored by relatedness, but inclusion should not cause any problems (or + * change the results) * + *

NOTE: allBuckets is meaningless in conjunction with the STREAM + * processor, so this method always returns null if sort is index asc. * * @return a Boolean, may be null */ public static Boolean randomAllBucketsParam(final Random r, final String sort) { - switch(r.nextInt(4)) { - case 0: return true; - case 1: return false; - case 2: - case 3: return null; - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return true; + case 1: + return false; + case 2: + case 3: + return null; + default: + throw new RuntimeException("Broken case statement"); } } - /** + /** * recursive helper method for building random facets * * @param keyCounter used to ensure every generated facet has a unique key name - * @param maxDepth max possible depth allowed for the recusion, a lower value may be used depending on how many facets are returned at the current level. + * @param maxDepth max possible depth allowed for the recusion, a lower value may be used + * depending on how many facets are returned at the current level. */ - private static Map buildRandomFacets(AtomicInteger keyCounter, int maxDepth) { - final int numFacets = Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1' - Map results = new LinkedHashMap<>(); + private static Map buildRandomFacets( + AtomicInteger keyCounter, int maxDepth) { + final int numFacets = + Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1' + Map results = new LinkedHashMap<>(); for (int i = 0; i < numFacets; i++) { - if (keyCounter.get() < 3) { // a hard limit on the total number of facets (regardless of depth) to reduce OOM risk - + if (keyCounter.get() + < 3) { // a hard limit on the total number of facets (regardless of depth) to reduce OOM + // risk + final String sort = randomSortParam(random()); final String facetField = randomFacetField(random()); - final TermFacet facet = new TermFacet(facetField, - map("sort", sort, - "prelim_sort", randomPrelimSortParam(random(), sort), - "limit", randomLimitParam(random(), sort), - "overrequest", randomOverrequestParam(random()), - "prefix", randomPrefixParam(random(), facetField), - "allBuckets", randomAllBucketsParam(random(), sort), - "perSeg", randomPerSegParam(random()))); - - - + final TermFacet facet = + new TermFacet( + facetField, + map( + "sort", sort, + "prelim_sort", randomPrelimSortParam(random(), sort), + "limit", randomLimitParam(random(), sort), + "overrequest", randomOverrequestParam(random()), + "prefix", randomPrefixParam(random(), facetField), + "allBuckets", randomAllBucketsParam(random(), sort), + "perSeg", randomPerSegParam(random()))); + results.put("facet_" + keyCounter.incrementAndGet(), facet); if (0 < maxDepth) { // if we're going wide, don't go deep final int nextMaxDepth = Math.max(0, maxDepth - numFacets); - facet.subFacets.putAll(buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth))); + facet.subFacets.putAll( + buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth))); } } } @@ -812,9 +882,9 @@ private static Map buildRandomFacets(AtomicInteger keyCounter, } } - /** - * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed - * at a node in our cluster + /** + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed at a node + * in our cluster */ public static SolrClient getRandClient(Random rand) { int numClients = CLIENTS.size(); @@ -825,32 +895,31 @@ public static SolrClient getRandClient(Random rand) { /** * Uses a random SolrClient to execture a request and returns only the numFound + * * @see #getRandClient */ public static long getNumFound(final SolrParams req) throws SolrServerException, IOException { return getRandClient(random()).query(req).getResults().getNumFound(); } - + public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } - - /** helper macro: fails on null keys, skips pairs with null values */ - public static Map map(Object... pairs) { + + /** helper macro: fails on null keys, skips pairs with null values */ + public static Map map(Object... pairs) { if (0 != pairs.length % 2) throw new IllegalArgumentException("uneven number of arguments"); - final Map map = new LinkedHashMap<>(); - for (int i = 0; i < pairs.length; i+=2) { + final Map map = new LinkedHashMap<>(); + for (int i = 0; i < pairs.length; i += 2) { final Object key = pairs[i]; - final Object val = pairs[i+1]; + final Object val = pairs[i + 1]; if (null == key) throw new NullPointerException("arguemnt " + i); if (null == val) continue; - + map.put(key.toString(), val); } return map; } - } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java index 607f9f6752b..cc73d97274c 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java @@ -16,6 +16,9 @@ */ package org.apache.solr.search.facet; +import static org.apache.solr.search.facet.FacetField.FacetMethod; +import static org.apache.solr.search.facet.SlotAcc.SweepingCountSlotAcc.SWEEP_COLLECTION_DEBUG_KEY; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.file.Path; @@ -29,7 +32,6 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.lucene.util.TestUtil; import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.client.solrj.SolrClient; @@ -54,24 +56,18 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.search.facet.FacetField.FacetMethod; -import static org.apache.solr.search.facet.SlotAcc.SweepingCountSlotAcc.SWEEP_COLLECTION_DEBUG_KEY; - -/** - *

- * A randomized test of nested facets using the relatedness() function, that asserts the - * results are consistent and equivalent regardless of what method (ie: FacetFieldProcessor) - * and/or {@value RelatednessAgg#SWEEP_COLLECTION} option is requested. - *

- *

- * This test is based on {@link TestCloudJSONFacetSKG} but does not - * force refine: true nor specify a domain: { 'query':'*:*' } for every facet, - * because this test does not attempt to prove the results with validation requests. - *

- *

- * This test only concerns itself with the equivalency of results - *

- * +/** + * A randomized test of nested facets using the relatedness() function, that asserts + * the results are consistent and equivalent regardless of what method (ie: + * FacetFieldProcessor) and/or {@value RelatednessAgg#SWEEP_COLLECTION} option is + * requested. + * + *

This test is based on {@link TestCloudJSONFacetSKG} but does not force + * refine: true nor specify a domain: { 'query':'*:*' } for every facet, because + * this test does not attempt to prove the results with validation requests. + * + *

This test only concerns itself with the equivalency of results + * * @see TestCloudJSONFacetSKG */ public class TestCloudJSONFacetSKGEquiv extends SolrCloudTestCase { @@ -86,18 +82,20 @@ public class TestCloudJSONFacetSKGEquiv extends SolrCloudTestCase { private static final int UNIQUE_FIELD_VALS = 50; /** Multi-Valued string field suffixes that can be randomized for testing diff facet code paths */ - private static final String[] MULTI_STR_FIELD_SUFFIXES = new String[] - { "_multi_ss", "_multi_sds", "_multi_sdsS" }; + private static final String[] MULTI_STR_FIELD_SUFFIXES = + new String[] {"_multi_ss", "_multi_sds", "_multi_sdsS"}; /** Multi-Valued int field suffixes that can be randomized for testing diff facet code paths */ - private static final String[] MULTI_INT_FIELD_SUFFIXES = new String[] - { "_multi_is", "_multi_ids", "_multi_idsS" }; + private static final String[] MULTI_INT_FIELD_SUFFIXES = + new String[] {"_multi_is", "_multi_ids", "_multi_idsS"}; - /** Single Valued string field suffixes that can be randomized for testing diff facet code paths */ - private static final String[] SOLO_STR_FIELD_SUFFIXES = new String[] - { "_solo_s", "_solo_sd", "_solo_sdS" }; + /** + * Single Valued string field suffixes that can be randomized for testing diff facet code paths + */ + private static final String[] SOLO_STR_FIELD_SUFFIXES = + new String[] {"_solo_s", "_solo_sd", "_solo_sdS"}; /** Single Valued int field suffixes that can be randomized for testing diff facet code paths */ - private static final String[] SOLO_INT_FIELD_SUFFIXES = new String[] - { "_solo_i", "_solo_id", "_solo_idS" }; + private static final String[] SOLO_INT_FIELD_SUFFIXES = + new String[] {"_solo_i", "_solo_id", "_solo_idS"}; /** A basic client for operations at the cloud level, default collection will be set */ private static CloudSolrClient CLOUD_CLIENT; @@ -107,26 +105,28 @@ public class TestCloudJSONFacetSKGEquiv extends SolrCloudTestCase { @BeforeClass private static void createMiniSolrCloudCluster() throws Exception { // sanity check constants - assertTrue("bad test constants: some suffixes will never be tested", - (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && - (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && - (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && - (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); - + assertTrue( + "bad test constants: some suffixes will never be tested", + (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) + && (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) + && (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) + && (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM)); + // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + // multi replicas should not matter... final int repFactor = usually() ? 1 : 2; // ... but we definitely want to test multiple shards - final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 :3)); + final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 : 3)); final int numNodes = (numShards * repFactor); - + final String configName = DEBUG_LABEL + "_config-set"; final Path configDir = TEST_COLL1_CONF(); - + configureCluster(numNodes).addConfig(configName, configDir).configure(); - + Map collectionProperties = new LinkedHashMap<>(); collectionProperties.put("config", "solrconfig-tlog.xml"); collectionProperties.put("schema", "schema_latest.xml"); @@ -146,7 +146,7 @@ private static void createMiniSolrCloudCluster() throws Exception { final int numDocs = atLeast(100); for (int id = 0; id < numDocs; id++) { - SolrInputDocument doc = sdoc("id", ""+id); + SolrInputDocument doc = sdoc("id", "" + id); // NOTE: for each fieldNum, there are actaully 4 fields: multi(str+int) + solo(str+int) for (int fieldNum = 0; fieldNum < MAX_FIELD_NUM; fieldNum++) { @@ -154,12 +154,13 @@ private static void createMiniSolrCloudCluster() throws Exception { final int numValsThisDoc = TestUtil.nextInt(random(), 0, (usually() ? 5 : 10)); for (int v = 0; v < numValsThisDoc; v++) { final String fieldValue = randFieldValue(fieldNum); - + // multi valued: one string, and one integer doc.addField(multiStrField(fieldNum), fieldValue); doc.addField(multiIntField(fieldNum), fieldValue); } - if (3 <= numValsThisDoc) { // use num values in multivalue to inform sparseness of single value + // use num values in multivalue to inform sparseness of single value + if (3 <= numValsThisDoc) { final String fieldValue = randFieldValue(fieldNum); doc.addField(soloStrField(fieldNum), fieldValue); doc.addField(soloIntField(fieldNum), fieldValue); @@ -167,21 +168,26 @@ private static void createMiniSolrCloudCluster() throws Exception { } CLOUD_CLIENT.add(doc); if (random().nextInt(100) < 1) { - CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments + CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments } if (random().nextInt(100) < 5) { - CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs + CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs } } CLOUD_CLIENT.commit(); - log.info("Created {} using numNodes={}, numShards={}, repFactor={}, numDocs={}", - COLLECTION_NAME, numNodes, numShards, repFactor, numDocs); + log.info( + "Created {} using numNodes={}, numShards={}, repFactor={}, numDocs={}", + COLLECTION_NAME, + numNodes, + numShards, + repFactor, + numDocs); } /** - * Given a (random) number, and a (static) array of possible suffixes returns a consistent field name that - * uses that number and one of hte specified suffixes in it's name. + * Given a (random) number, and a (static) array of possible suffixes returns a consistent field + * name that uses that number and one of hte specified suffixes in it's name. * * @see #MULTI_STR_FIELD_SUFFIXES * @see #MULTI_INT_FIELD_SUFFIXES @@ -190,7 +196,7 @@ private static void createMiniSolrCloudCluster() throws Exception { */ private static String field(final String[] suffixes, final int fieldNum) { assert fieldNum < MAX_FIELD_NUM; - + final String suffix = suffixes[fieldNum % suffixes.length]; return "field_" + fieldNum + suffix; } @@ -212,10 +218,10 @@ private static String soloIntField(final int fieldNum) { } /** - * Given a (random) field number, returns a random (integer based) value for that field. - * NOTE: The number of unique values in each field is constant according to {@link #UNIQUE_FIELD_VALS} - * but the precise range of values will vary for each unique field number, such that cross field joins - * will match fewer documents based on how far apart the field numbers are. + * Given a (random) field number, returns a random (integer based) value for that field. NOTE: The + * number of unique values in each field is constant according to {@link #UNIQUE_FIELD_VALS} but + * the precise range of values will vary for each unique field number, such that cross + * field joins will match fewer documents based on how far apart the field numbers are. * * @see #UNIQUE_FIELD_VALS * @see #field @@ -224,7 +230,6 @@ private static String randFieldValue(final int fieldNum) { return "" + (fieldNum + TestUtil.nextInt(random(), 1, UNIQUE_FIELD_VALS)); } - @AfterClass private static void afterClass() throws Exception { if (null != CLOUD_CLIENT) { @@ -236,114 +241,124 @@ private static void afterClass() throws Exception { } CLIENTS.clear(); } - + /** - * Sanity check that our method of varying the method param - * works and can be verified by inspecting the debug output of basic requests. + * Sanity check that our method of varying the method param works and can be verified + * by inspecting the debug output of basic requests. */ public void testWhiteboxSanityMethodProcessorDebug() throws Exception { // NOTE: json.facet debugging output can be wonky, particularly when dealing with cloud // so for these queries we keep it simple: // - only one "top" facet per request // - no refinement - // even with those constraints in place, a single facet can (may/sometimes?) produce multiple debug - // blocks - aparently due to shard merging? So... + // even with those constraints in place, a single facet can (may/sometimes?) produce multiple + // debug blocks - apparently due to shard merging? So... // - only inspect the "first" debug NamedList in the results // - + // simple individual facet that sorts on an skg stat... final TermFacet f = new TermFacet(soloStrField(9), 10, 0, "skg desc", null); - final Map facets = new LinkedHashMap<>(); + final Map facets = new LinkedHashMap<>(); facets.put("str", f); - - final SolrParams facetParams = params("rows","0", - "debug","true", // SOLR-14451 - // *:* is the only "safe" query for this test, - // to ensure we always have at least one bucket for every facet - // so we can be confident in getting the debug we expect... - "q", "*:*", - "fore", multiStrField(7)+":11", - "back", "*:*", - "json.facet", Facet.toJSONFacetParamValue(facets)); - - { // dv - final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dv"), - facetParams); + + final SolrParams facetParams = + params( + "rows", "0", + "debug", "true", // SOLR-14451 + // *:* is the only "safe" query for this test, + // to ensure we always have at least one bucket for every facet + // so we can be confident in getting the debug we expect... + "q", "*:*", + "fore", multiStrField(7) + ":11", + "back", "*:*", + "json.facet", Facet.toJSONFacetParamValue(facets)); + + { // dv + final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dv"), facetParams); final NamedList debug = getFacetDebug(params); assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor")); } { // dvhash - final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dvhash"), - facetParams); + final SolrParams params = + SolrParams.wrapDefaults(params("method_val", "dvhash"), facetParams); final NamedList debug = getFacetDebug(params); assertEquals(FacetFieldProcessorByHashDV.class.getSimpleName(), debug.get("processor")); } } - - /** - * Sanity check that our method of varying the {@value RelatednessAgg#SWEEP_COLLECTION} in conjunction with the - * method params works and can be verified by inspecting the debug output of basic requests. + + /** + * Sanity check that our method of varying the {@value RelatednessAgg#SWEEP_COLLECTION} + * in conjunction with the method params works and can be verified by + * inspecting the debug output of basic requests. */ public void testWhiteboxSanitySweepDebug() throws Exception { // NOTE: json.facet debugging output can be wonky, particularly when dealing with cloud // so for these queries we keep it simple: // - only one "top" facet per request // - no refinement - // even with those constraints in place, a single facet can (may/sometimes?) produce multiple debug - // blocks - aparently due to shard merging? So... + // even with those constraints in place, a single facet can (may/sometimes?) produce multiple + // debug blocks - apparently due to shard merging? So... // - only inspect the "first" debug NamedList in the results // - - final SolrParams baseParams = params("rows","0", - "debug","true", // SOLR-14451 - // *:* is the only "safe" query for this test, - // to ensure we always have at least one bucket for every facet - // so we can be confident in getting the debug we expect... - "q", "*:*", - "fore", multiStrField(7)+":11", - "back", "*:*"); - + + final SolrParams baseParams = + params( + "rows", "0", + "debug", "true", // SOLR-14451 + // *:* is the only "safe" query for this test, + // to ensure we always have at least one bucket for every facet + // so we can be confident in getting the debug we expect... + "q", "*:*", + "fore", multiStrField(7) + ":11", + "back", "*:*"); + // simple individual facet that sorts on an skg stat... // // all results we test should be the same even if there is another 'skg_extra' stat, // it shouldn't be involved in the sweeping at all. - for (Facet extra : Arrays.asList(null, new RelatednessFacet(multiStrField(2)+":9", null))) { - // choose a single value string so we know both 'dv' (sweep) and 'dvhash' (no sweep) can be specified + for (Facet extra : Arrays.asList(null, new RelatednessFacet(multiStrField(2) + ":9", null))) { + // choose a single value string so we know both 'dv' (sweep) and 'dvhash' (no sweep) can be + // specified final TermFacet f = new TermFacet(soloStrField(9), 10, 0, "skg desc", null); if (null != extra) { f.subFacets.put("skg_extra", extra); } - final Map facets = new LinkedHashMap<>(); + final Map facets = new LinkedHashMap<>(); facets.put("str", f); - - final SolrParams facetParams - = SolrParams.wrapDefaults(params("method_val", "dv", - "json.facet", Facet.toJSONFacetParamValue(facets)), - baseParams); - + + final SolrParams facetParams = + SolrParams.wrapDefaults( + params("method_val", "dv", "json.facet", Facet.toJSONFacetParamValue(facets)), + baseParams); + // both default sweep option and explicit sweep should give same results... - for (SolrParams sweepParams : Arrays.asList(params(), - params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, - "sweep_val", "true"))) { + for (SolrParams sweepParams : + Arrays.asList( + params(), + params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, "sweep_val", "true"))) { final SolrParams params = SolrParams.wrapDefaults(sweepParams, facetParams); - + final NamedList debug = getFacetDebug(params); assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor")); @SuppressWarnings("unchecked") - final NamedList sweep_debug = (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); + final NamedList sweep_debug = + (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); assertNotNull(sweep_debug); assertEquals("count", sweep_debug.get("base")); - assertEquals(Arrays.asList("skg!fg","skg!bg"), sweep_debug.get("accs")); + assertEquals(Arrays.asList("skg!fg", "skg!bg"), sweep_debug.get("accs")); assertEquals(Arrays.asList("skg"), sweep_debug.get("mapped")); } - { // 'dv' will always *try* to sweep, but disabling on stat should mean debug is mostly empty... - final SolrParams params = SolrParams.wrapDefaults(params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, - "sweep_val", "false"), - facetParams); + { // 'dv' will always *try* to sweep, but disabling on stat should mean debug is mostly + // empty... + final SolrParams params = + SolrParams.wrapDefaults( + params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, "sweep_val", "false"), + facetParams); final NamedList debug = getFacetDebug(params); assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor")); @SuppressWarnings("unchecked") - final NamedList sweep_debug = (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); + final NamedList sweep_debug = + (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); assertNotNull(sweep_debug); assertEquals("count", sweep_debug.get("base")); assertEquals(Collections.emptyList(), sweep_debug.get("accs")); @@ -351,10 +366,13 @@ public void testWhiteboxSanitySweepDebug() throws Exception { } { // if we override 'dv' with 'hashdv' which doesn't sweep, our sweep debug should be empty, // even if the skg stat does ask for sweeping explicitly... - final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dvhash", - "sweep_key", RelatednessAgg.SWEEP_COLLECTION, - "sweep_val", "true"), - facetParams); + final SolrParams params = + SolrParams.wrapDefaults( + params( + "method_val", "dvhash", + "sweep_key", RelatednessAgg.SWEEP_COLLECTION, + "sweep_val", "true"), + facetParams); final NamedList debug = getFacetDebug(params); assertEquals(FacetFieldProcessorByHashDV.class.getSimpleName(), debug.get("processor")); assertNull(debug.get(SWEEP_COLLECTION_DEBUG_KEY)); @@ -365,85 +383,92 @@ public void testWhiteboxSanitySweepDebug() throws Exception { // // all results we test should be the same even if there is another 'skg_extra' stat, // neither skg should be involved in the sweeping at all. - for (Facet extra : Arrays.asList(null, new RelatednessFacet(multiStrField(2)+":9", null))) { - // choose a single value string so we know both 'dv' (sweep) and 'dvhash' (no sweep) can be specified - final TermFacet f = new TermFacet(soloStrField(9), map("limit", 3, "overrequest", 0, - "sort", "skg desc", - "prelim_sort", "count asc")); + for (Facet extra : Arrays.asList(null, new RelatednessFacet(multiStrField(2) + ":9", null))) { + // choose a single value string so we know both 'dv' (sweep) and 'dvhash' (no sweep) can be + // specified + final TermFacet f = + new TermFacet( + soloStrField(9), + map("limit", 3, "overrequest", 0, "sort", "skg desc", "prelim_sort", "count asc")); if (null != extra) { f.subFacets.put("skg_extra", extra); } - final Map facets = new LinkedHashMap<>(); + final Map facets = new LinkedHashMap<>(); facets.put("str", f); - - final SolrParams facetParams - = SolrParams.wrapDefaults(params("method_val", "dv", - "json.facet", Facet.toJSONFacetParamValue(facets)), - baseParams); - - // default sweep as well as any explicit sweep=true/false values should give same results: no sweeping - for (SolrParams sweepParams : Arrays.asList(params(), - params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, - "sweep_val", "false"), - params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, - "sweep_val", "true"))) { + + final SolrParams facetParams = + SolrParams.wrapDefaults( + params("method_val", "dv", "json.facet", Facet.toJSONFacetParamValue(facets)), + baseParams); + + // default sweep as well as any explicit sweep=true/false values should give same results: no + // sweeping + for (SolrParams sweepParams : + Arrays.asList( + params(), + params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, "sweep_val", "false"), + params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, "sweep_val", "true"))) { final SolrParams params = SolrParams.wrapDefaults(sweepParams, facetParams); - + final NamedList debug = getFacetDebug(params); assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor")); @SuppressWarnings("unchecked") - final NamedList sweep_debug = (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); + final NamedList sweep_debug = + (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); assertNotNull(sweep_debug); assertEquals("count", sweep_debug.get("base")); assertEquals(Collections.emptyList(), sweep_debug.get("accs")); assertEquals(Collections.emptyList(), sweep_debug.get("mapped")); } } - + { // single facet with infinite limit + multiple skgs... // this should trigger MultiAcc collection, causing sweeping on both skg functions // // all results we test should be the same even if there is another 'min' stat, // in each term facet. it shouldn't affect the sweeping/MultiAcc at all. - for (Facet extra : Arrays.asList(null, new SumFacet(multiIntField(2)))) { - final Map facets = new LinkedHashMap<>(); + for (Facet extra : Arrays.asList(null, new SumFacet(multiIntField(2)))) { + final Map facets = new LinkedHashMap<>(); final TermFacet facet = new TermFacet(soloStrField(9), -1, 0, "skg2 desc", null); - facet.subFacets.put("skg2", new RelatednessFacet(multiStrField(2)+":9", null)); + facet.subFacets.put("skg2", new RelatednessFacet(multiStrField(2) + ":9", null)); if (null != extra) { facet.subFacets.put("sum", extra); } facets.put("str", facet); - final SolrParams facetParams - = SolrParams.wrapDefaults(params("method_val", "dv", - "json.facet", Facet.toJSONFacetParamValue(facets)), - baseParams); - + final SolrParams facetParams = + SolrParams.wrapDefaults( + params("method_val", "dv", "json.facet", Facet.toJSONFacetParamValue(facets)), + baseParams); + // both default sweep option and explicit sweep should give same results... - for (SolrParams sweepParams : Arrays.asList(params(), - params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, - "sweep_val", "true"))) { + for (SolrParams sweepParams : + Arrays.asList( + params(), + params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, "sweep_val", "true"))) { final SolrParams params = SolrParams.wrapDefaults(sweepParams, facetParams); - + final NamedList debug = getFacetDebug(params); assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor")); @SuppressWarnings("unchecked") - final NamedList sweep_debug = (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); + final NamedList sweep_debug = + (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); assertNotNull(sweep_debug); assertEquals("count", sweep_debug.get("base")); - assertEquals(Arrays.asList("skg!fg","skg!bg","skg2!fg","skg2!bg"), sweep_debug.get("accs")); - assertEquals(Arrays.asList("skg","skg2"), sweep_debug.get("mapped")); + assertEquals( + Arrays.asList("skg!fg", "skg!bg", "skg2!fg", "skg2!bg"), sweep_debug.get("accs")); + assertEquals(Arrays.asList("skg", "skg2"), sweep_debug.get("mapped")); } } } - + // nested facets that both sort on an skg stat // (set limit + overrequest tiny to keep multishard response managable) // // all results we test should be the same even if there is another 'skg_extra' stat, // in each term facet. they shouldn't be involved in the sweeping at all. - for (Facet extra : Arrays.asList(null, new RelatednessFacet(multiStrField(2)+":9", null))) { - // choose single value strings so we know both 'dv' (sweep) and 'dvhash' (no sweep) can be specified - // choose 'id' for the parent facet so we are garunteed some child facets + for (Facet extra : Arrays.asList(null, new RelatednessFacet(multiStrField(2) + ":9", null))) { + // choose single value strings so we know both 'dv' (sweep) and 'dvhash' (no sweep) can be + // specified choose 'id' for the parent facet so we are garunteed some child facets final TermFacet parent = new TermFacet("id", 1, 0, "skg desc", false); final TermFacet child = new TermFacet(soloStrField(7), 1, 0, "skg desc", false); parent.subFacets.put("child", child); @@ -451,35 +476,38 @@ public void testWhiteboxSanitySweepDebug() throws Exception { parent.subFacets.put("skg_extra", extra); child.subFacets.put("skg_extra", extra); } - final Map facets = new LinkedHashMap<>(); + final Map facets = new LinkedHashMap<>(); facets.put("parent", parent); - - final SolrParams facetParams - = SolrParams.wrapDefaults(params("method_val", "dv", - "json.facet", Facet.toJSONFacetParamValue(facets)), - baseParams); + + final SolrParams facetParams = + SolrParams.wrapDefaults( + params("method_val", "dv", "json.facet", Facet.toJSONFacetParamValue(facets)), + baseParams); // both default sweep option and explicit sweep should give same results... - for (SolrParams sweepParams : Arrays.asList(params(), - params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, - "sweep_val", "true"))) { + for (SolrParams sweepParams : + Arrays.asList( + params(), + params("sweep_key", RelatednessAgg.SWEEP_COLLECTION, "sweep_val", "true"))) { final SolrParams params = SolrParams.wrapDefaults(sweepParams, facetParams); - + final NamedList parentDebug = getFacetDebug(params); assertEquals("id", parentDebug.get("field")); assertNotNull(parentDebug.get("sub-facet")); // may be multiples from diff shards, just use first one @SuppressWarnings("unchecked") - final NamedList childDebug = ((List>)parentDebug.get("sub-facet")).get(0); + final NamedList childDebug = + ((List>) parentDebug.get("sub-facet")).get(0); assertEquals(soloStrField(7), childDebug.get("field")); // these should all be true for both the parent and the child debug.. for (NamedList debug : Arrays.asList(parentDebug, childDebug)) { assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor")); @SuppressWarnings("unchecked") - final NamedList sweep_debug = (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); + final NamedList sweep_debug = + (NamedList) debug.get(SWEEP_COLLECTION_DEBUG_KEY); assertNotNull(sweep_debug); assertEquals("count", sweep_debug.get("base")); - assertEquals(Arrays.asList("skg!fg","skg!bg"), sweep_debug.get("accs")); + assertEquals(Arrays.asList("skg!fg", "skg!bg"), sweep_debug.get("accs")); assertEquals(Arrays.asList("skg"), sweep_debug.get("mapped")); } } @@ -487,9 +515,9 @@ public void testWhiteboxSanitySweepDebug() throws Exception { } /** - * returns the FIRST NamedList (under the implicit 'null' FacetQuery) in the "facet-trace" output - * of the request. Should not be used with multiple "top level" facets - * (the output is too confusing in cloud mode to be confident where/qhy each NamedList comes from) + * returns the FIRST NamedList (under the implicit 'null' FacetQuery) in the "facet-trace" + * output of the request. Should not be used with multiple "top level" facets (the output is too + * confusing in cloud mode to be confident where/qhy each NamedList comes from) */ private NamedList getFacetDebug(final SolrParams params) { try { @@ -497,167 +525,195 @@ private NamedList getFacetDebug(final SolrParams params) { assertNotNull(params + " is null rsp?", rsp); final NamedList topNamedList = rsp.getResponse(); assertNotNull(params + " is null topNamedList?", topNamedList); - + // skip past the (implicit) top Facet query to get it's "sub-facets" (the real facets)... @SuppressWarnings({"unchecked"}) final List> facetDebug = - (List>) topNamedList.findRecursive("debug", "facet-trace", "sub-facet"); + (List>) topNamedList.findRecursive("debug", "facet-trace", "sub-facet"); assertNotNull(topNamedList + " ... null facet debug?", facetDebug); assertFalse(topNamedList + " ... not even one facet debug?", facetDebug.isEmpty()); return facetDebug.get(0); } catch (Exception e) { - throw new RuntimeException("query failed: " + params + ": " + - e.getMessage(), e); - } - + throw new RuntimeException("query failed: " + params + ": " + e.getMessage(), e); + } } - /** - * Test some small, hand crafted, but non-trivial queries that are - * easier to trace/debug then a pure random monstrosity. - * (ie: if something obvious gets broken, this test may fail faster and in a more obvious way then testRandom) + /** + * Test some small, hand crafted, but non-trivial queries that are easier to trace/debug then a + * pure random monstrosity. (ie: if something obvious gets broken, this test may fail faster and + * in a more obvious way then testRandom) */ public void testBespoke() throws Exception { { // two trivial single level facets - Map facets = new LinkedHashMap<>(); + Map facets = new LinkedHashMap<>(); facets.put("str", new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, null, null)); facets.put("int", new TermFacet(multiIntField(9), UNIQUE_FIELD_VALS, 0, null, null)); - assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreConsistent( + facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); } - + { // trivial single level facet w/sorting on skg and refinement explicitly disabled - Map facets = new LinkedHashMap<>(); + Map facets = new LinkedHashMap<>(); facets.put("xxx", new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, "skg desc", false)); - assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreConsistent( + facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); } - + { // trivial single level facet w/ perSeg - Map facets = new LinkedHashMap<>(); - facets.put("xxx", new TermFacet(multiStrField(9), - map("perSeg", true))); - - assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + Map facets = new LinkedHashMap<>(); + facets.put("xxx", new TermFacet(multiStrField(9), map("perSeg", true))); + + assertFacetSKGsAreConsistent( + facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); } - - { // trivial single level facet w/ prefix - Map facets = new LinkedHashMap<>(); - facets.put("xxx", new TermFacet(multiStrField(9), - Map.of("prefix", "2"))); - - - assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + + { // trivial single level facet w/ prefix + Map facets = new LinkedHashMap<>(); + facets.put("xxx", new TermFacet(multiStrField(9), Map.of("prefix", "2"))); + + assertFacetSKGsAreConsistent( + facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); } - + { // trivial single level facet w/ 2 diff ways to request "limit = (effectively) Infinite" // to sanity check refinement of buckets missing from other shard in both cases - + // NOTE that these two queries & facets *should* effectively identical given that the // very large limit value is big enough no shard will ever return that may terms, // but the "limit=-1" case it actaully triggers slightly different code paths // because it causes FacetField.returnsPartial() to be "true" - for (int limit : new int[] { 999999999, -1 }) { - Map facets = new LinkedHashMap<>(); - facets.put("top_facet_limit__" + limit, new TermFacet(multiStrField(9), limit, 0, "skg desc", true)); - assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + for (int limit : new int[] {999999999, -1}) { + Map facets = new LinkedHashMap<>(); + facets.put( + "top_facet_limit__" + limit, + new TermFacet(multiStrField(9), limit, 0, "skg desc", true)); + assertFacetSKGsAreConsistent( + facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); } } - + { // multi-valued facet field w/infinite limit and an extra (non-SKG / non-sweeping) stat final TermFacet xxx = new TermFacet(multiStrField(12), -1, 0, "count asc", false); xxx.subFacets.put("sum", new SumFacet(multiIntField(4))); - final Map facets = new LinkedHashMap<>(); + final Map facets = new LinkedHashMap<>(); facets.put("xxx", xxx); - assertFacetSKGsAreConsistent(facets, - buildORQuery(multiStrField(13) + ":26", - multiStrField(6) + ":33", - multiStrField(9) + ":24"), - buildORQuery(multiStrField(4) + ":27", - multiStrField(12) + ":18", - multiStrField(2) + ":28", - multiStrField(13) + ":50"), - "*:*"); + assertFacetSKGsAreConsistent( + facets, + buildORQuery( + multiStrField(13) + ":26", multiStrField(6) + ":33", multiStrField(9) + ":24"), + buildORQuery( + multiStrField(4) + ":27", + multiStrField(12) + ":18", + multiStrField(2) + ":28", + multiStrField(13) + ":50"), + "*:*"); } } - + public void testBespokeAllBuckets() throws Exception { { // single level facet w/sorting on skg and allBuckets - Map facets = new LinkedHashMap<>(); - facets.put("xxx", new TermFacet(multiStrField(9), map("sort", "skg desc", - "allBuckets", true))); - - assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + Map facets = new LinkedHashMap<>(); + facets.put( + "xxx", new TermFacet(multiStrField(9), map("sort", "skg desc", "allBuckets", true))); + + assertFacetSKGsAreConsistent( + facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); } } - + public void testBespokePrefix() throws Exception { - { // trivial single level facet w/ prefix - Map facets = new LinkedHashMap<>(); - facets.put("xxx", new TermFacet(multiStrField(9), - map("sort", "skg desc", - "limit", -1, - "prefix", "2"))); - - assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*"); + { // trivial single level facet w/ prefix + Map facets = new LinkedHashMap<>(); + facets.put( + "xxx", + new TermFacet( + multiStrField(9), + map( + "sort", "skg desc", + "limit", -1, + "prefix", "2"))); + + assertFacetSKGsAreConsistent( + facets, multiStrField(7) + ":11", multiStrField(5) + ":9", "*:*"); } } - - /** - * Given a few explicit "structures" of requests, test many permutations of various params/options. - * This is more complex then {@link #testBespoke} but should still be easier to trace/debug then - * a pure random monstrosity. + + /** + * Given a few explicit "structures" of requests, test many permutations of various + * params/options. This is more complex then {@link #testBespoke} but should still be easier to + * trace/debug then a pure random monstrosity. */ public void testBespokeStructures() throws Exception { // we don't need to test every field, just make sure we test enough fields to hit every suffix.. - final int maxFacetFieldNum = Collections.max(Arrays.asList(MULTI_STR_FIELD_SUFFIXES.length, - MULTI_INT_FIELD_SUFFIXES.length, - SOLO_STR_FIELD_SUFFIXES.length, - SOLO_INT_FIELD_SUFFIXES.length)); - + final int maxFacetFieldNum = + Collections.max( + Arrays.asList( + MULTI_STR_FIELD_SUFFIXES.length, + MULTI_INT_FIELD_SUFFIXES.length, + SOLO_STR_FIELD_SUFFIXES.length, + SOLO_INT_FIELD_SUFFIXES.length)); + for (int facetFieldNum = 0; facetFieldNum < maxFacetFieldNum; facetFieldNum++) { - for (String facetFieldName : Arrays.asList(soloStrField(facetFieldNum), multiStrField(facetFieldNum))) { + for (String facetFieldName : + Arrays.asList(soloStrField(facetFieldNum), multiStrField(facetFieldNum))) { for (int limit : Arrays.asList(10, -1)) { for (String sort : Arrays.asList("count desc", "skg desc", "index asc")) { for (Boolean refine : Arrays.asList(false, true)) { { // 1 additional (non-SKG / non-sweeping) stat - final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit, - "overrequest", 0, - "sort", sort, - "refine", refine)); + final TermFacet xxx = + new TermFacet( + facetFieldName, + map( + "limit", limit, + "overrequest", 0, + "sort", sort, + "refine", refine)); xxx.subFacets.put("sum", new SumFacet(soloIntField(3))); - final Map facets = new LinkedHashMap<>(); + final Map facets = new LinkedHashMap<>(); facets.put("xxx1", xxx); - assertFacetSKGsAreConsistent(facets, - buildORQuery(multiStrField(11) + ":55", - multiStrField(0) + ":46"), - multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreConsistent( + facets, + buildORQuery(multiStrField(11) + ":55", multiStrField(0) + ":46"), + multiStrField(5) + ":9", + "*:*"); } { // multiple SKGs - final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit, - "overrequest", 0, - "sort", sort, - "refine", refine)); - xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2)+":9", "*:*")); - final Map facets = new LinkedHashMap<>(); + final TermFacet xxx = + new TermFacet( + facetFieldName, + map( + "limit", limit, + "overrequest", 0, + "sort", sort, + "refine", refine)); + xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2) + ":9", "*:*")); + final Map facets = new LinkedHashMap<>(); facets.put("xxx2", xxx); - assertFacetSKGsAreConsistent(facets, - buildORQuery(multiStrField(11) + ":55", - multiStrField(0) + ":46"), - multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreConsistent( + facets, + buildORQuery(multiStrField(11) + ":55", multiStrField(0) + ":46"), + multiStrField(5) + ":9", + "*:*"); } { // multiple SKGs and a multiple non-SKG / non-sweeping stats - final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit, - "overrequest", 0, - "sort", sort, - "refine", refine)); + final TermFacet xxx = + new TermFacet( + facetFieldName, + map( + "limit", limit, + "overrequest", 0, + "sort", sort, + "refine", refine)); xxx.subFacets.put("minAAA", new SumFacet(soloIntField(3))); - xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2)+":9", "*:*")); + xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2) + ":9", "*:*")); xxx.subFacets.put("minBBB", new SumFacet(soloIntField(2))); - final Map facets = new LinkedHashMap<>(); + final Map facets = new LinkedHashMap<>(); facets.put("xxx3", xxx); - assertFacetSKGsAreConsistent(facets, - buildORQuery(multiStrField(11) + ":55", - multiStrField(0) + ":46"), - multiStrField(5)+":9", "*:*"); + assertFacetSKGsAreConsistent( + facets, + buildORQuery(multiStrField(11) + ":55", multiStrField(0) + ":46"), + multiStrField(5) + ":9", + "*:*"); } } } @@ -665,13 +721,16 @@ public void testBespokeStructures() throws Exception { } } } - + public void testRandom() throws Exception { final int numIters = atLeast(10); for (int iter = 0; iter < numIters; iter++) { - assertFacetSKGsAreConsistent(TermFacet.buildRandomFacets(), - buildRandomQuery(), buildRandomQuery(), buildRandomQuery()); + assertFacetSKGsAreConsistent( + TermFacet.buildRandomFacets(), + buildRandomQuery(), + buildRandomQuery(), + buildRandomQuery()); } } @@ -682,7 +741,7 @@ public void testRandom() throws Exception { * @see #field */ private static String buildRandomQuery() { - if (0 == TestUtil.nextInt(random(), 0,10)) { + if (0 == TestUtil.nextInt(random(), 0, 10)) { return "*:*"; } final int numClauses = TestUtil.nextInt(random(), 3, 10); @@ -703,49 +762,65 @@ private static String buildORQuery(String... clauses) { assert 0 < clauses.length; return "(" + String.join(" OR ", clauses) + ")"; } - - + /** - * Given a set of term facets, and top level query strings, asserts that - * the results of these queries are identical even when varying the method_val param - * and when varying the {@value RelatednessAgg#SWEEP_COLLECTION} param; either by explicitly setting to + * Given a set of term facets, and top level query strings, asserts that the results of these + * queries are identical even when varying the method_val param and when varying the + * {@value RelatednessAgg#SWEEP_COLLECTION} param; either by explicitly setting to * true or false or by changing the param key to not set it at all. */ - private void assertFacetSKGsAreConsistent(final Map facets, - final String query, - final String foreQ, - final String backQ) throws SolrServerException, IOException { - final SolrParams basicParams = params("rows","0", - "q", query, "fore", foreQ, "back", backQ, - "json.facet", Facet.toJSONFacetParamValue(facets)); - + private void assertFacetSKGsAreConsistent( + final Map facets, + final String query, + final String foreQ, + final String backQ) + throws SolrServerException, IOException { + final SolrParams basicParams = + params( + "rows", + "0", + "q", + query, + "fore", + foreQ, + "back", + backQ, + "json.facet", + Facet.toJSONFacetParamValue(facets)); + log.info("Doing full run: {}", basicParams); try { // start by recording the results of the purely "default" behavior... final NamedList expected = getFacetResponse(basicParams); - // now loop over all permutations of processors and sweep values and and compare them to the "default"... + // now loop over all permutations of processors and sweep values and and compare them to the + // "default"... for (FacetMethod method : EnumSet.allOf(FacetMethod.class)) { for (Boolean sweep : Arrays.asList(true, false, null)) { - final ModifiableSolrParams options = params("method_val", method.toString().toLowerCase(Locale.ROOT)); + final ModifiableSolrParams options = + params("method_val", method.toString().toLowerCase(Locale.ROOT)); if (null != sweep) { options.add("sweep_key", RelatednessAgg.SWEEP_COLLECTION); options.add("sweep_val", sweep.toString()); } - - final NamedList actual = getFacetResponse(SolrParams.wrapAppended(options, basicParams)); - + + final NamedList actual = + getFacetResponse(SolrParams.wrapAppended(options, basicParams)); + // we can't rely on a trivial assertEquals() comparison... - // + // // the order of the sub-facet keys can change between // processors. (notably: method:enum vs method:smart when sort:"index asc") - // + // // NOTE: this doesn't ignore the order of the buckets, // it ignores the order of the keys in each bucket... - final String pathToMismatch = BaseDistributedSearchTestCase.compare - (expected, actual, 0, - Collections.singletonMap("buckets", BaseDistributedSearchTestCase.UNORDERED)); + final String pathToMismatch = + BaseDistributedSearchTestCase.compare( + expected, + actual, + 0, + Collections.singletonMap("buckets", BaseDistributedSearchTestCase.UNORDERED)); if (null != pathToMismatch) { log.error("{}: expected = {}", options, expected); log.error("{}: actual = {}", options, actual); @@ -756,13 +831,13 @@ private void assertFacetSKGsAreConsistent(final Map facets, } catch (AssertionError e) { throw new AssertionError(basicParams + " ===> " + e.getMessage(), e); } finally { - log.info("Ending full run"); + log.info("Ending full run"); } } - /** - * We ignore {@link QueryResponse#getJsonFacetingResponse()} because it isn't as useful for - * doing a "deep equals" comparison across requests + /** + * We ignore {@link QueryResponse#getJsonFacetingResponse()} because it isn't as useful for doing + * a "deep equals" comparison across requests */ private NamedList getFacetResponse(final SolrParams params) { try { @@ -772,68 +847,74 @@ private NamedList getFacetResponse(final SolrParams params) { assertNotNull(params + " is null topNamedList?", topNamedList); final NamedList facetResponse = (NamedList) topNamedList.get("facets"); assertNotNull("null facet results?", facetResponse); - assertEquals("numFound mismatch with top count?", - rsp.getResults().getNumFound(), ((Number)facetResponse.get("count")).longValue()); - + assertEquals( + "numFound mismatch with top count?", + rsp.getResults().getNumFound(), + ((Number) facetResponse.get("count")).longValue()); + return facetResponse; - + } catch (Exception e) { - throw new RuntimeException("query failed: " + params + ": " + - e.getMessage(), e); + throw new RuntimeException("query failed: " + params + ": " + e.getMessage(), e); } } private static interface Facet { // Mainly just a Marker Interface - + /** - * Given a set of (possibly nested) facets, generates a suitable json.facet param value to - * use for testing them against in a solr request. + * Given a set of (possibly nested) facets, generates a suitable json.facet param + * value to use for testing them against in a solr request. */ - public static String toJSONFacetParamValue(final Map facets) { + public static String toJSONFacetParamValue(final Map facets) { assert null != facets; - assert ! facets.isEmpty(); + assert !facets.isEmpty(); return JSONUtil.toJSON(facets, -1); // no newlines } } - /** - * trivial facet that is not SKG (and doesn't have any of it's special behavior) for the purposes + /** + * trivial facet that is not SKG (and doesn't have any of it's special behavior) for the purposes * of testing how TermFacet behaves with a mix of sub-facets. */ private static final class SumFacet implements Facet { private final String field; + public SumFacet(final String field) { this.field = field; } + @Override public String toString() { // used in JSON by default return "sum(" + field + ")"; } + public static SumFacet buildRandom() { final int fieldNum = random().nextInt(MAX_FIELD_NUM); final boolean multi = random().nextBoolean(); return new SumFacet(multi ? multiIntField(fieldNum) : soloIntField(fieldNum)); } } - + /** - * Trivial data structure for modeling a simple relatedness() facet that can be written out as a json.facet param. + * Trivial data structure for modeling a simple relatedness() facet that can be + * written out as a json.facet param. * - * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters + *

Doesn't do any string escaping or quoting, so don't use whitespace or reserved json + * characters * - * The specified fore/back queries will be wrapped in localparam syntax in the resulting json, - * unless they are 'null' in which case $fore and $back refs will be used - * in their place, and must be set as request params (this allows "random" facets to still easily - * trigger the "nested facets re-using the same fore/back set for SKG situation) + *

The specified fore/back queries will be wrapped in localparam syntax in the resulting json, + * unless they are 'null' in which case $fore and $back refs will be + * used in their place, and must be set as request params (this allows "random" facets to still + * easily trigger the "nested facets re-using the same fore/back set for SKG situation) * - * The JSON for all of these facets includes a ${sweep_key:xxx} (which will be ignored - * by default) and ${sweep_val:yyy} which may be set as params on each request to override the - * implicit default sweeping behavior of the underlying SKGAcc. + *

The JSON for all of these facets includes a ${sweep_key:xxx} (which will be + * ignored by default) and ${sweep_val:yyy} which may be set as params on each + * request to override the implicit default sweeping behavior of the underlying SKGAcc. */ private static final class RelatednessFacet implements Facet, Writable { - public final Map jsonData = new LinkedHashMap<>(); - + public final Map jsonData = new LinkedHashMap<>(); + /** Assumes null for fore/back queries w/no options */ public RelatednessFacet() { this(null, null, Collections.emptyMap()); @@ -842,117 +923,123 @@ public RelatednessFacet() { public RelatednessFacet(final String foreQ, final String backQ) { this(foreQ, backQ, Collections.emptyMap()); } - public RelatednessFacet(final String foreQ, final String backQ, - final Map options) { + + public RelatednessFacet( + final String foreQ, final String backQ, final Map options) { assert null != options; - - final String f = null == foreQ ? "$fore" : "{!v='"+foreQ+"'}"; - final String b = null == backQ ? "$back" : "{!v='"+backQ+"'}"; + + final String f = null == foreQ ? "$fore" : "{!v='" + foreQ + "'}"; + final String b = null == backQ ? "$back" : "{!v='" + backQ + "'}"; jsonData.putAll(options); - + // we don't allow these to be overridden by options, so set them now... jsonData.put("type", "func"); - jsonData.put("func", "relatedness("+f+","+b+")"); - jsonData.put("${sweep_key:xxx}","${sweep_val:yyy}"); + jsonData.put("func", "relatedness(" + f + "," + b + ")"); + jsonData.put("${sweep_key:xxx}", "${sweep_val:yyy}"); } + @Override public void write(JSONWriter writer) { writer.write(jsonData); } - + public static RelatednessFacet buildRandom() { - final Map options = new LinkedHashMap<>(); + final Map options = new LinkedHashMap<>(); if (random().nextBoolean()) { options.put("min_popularity", "0.001"); } - + // bias this in favor of null fore/back since that's most realistic for typical nested facets final boolean simple = random().nextBoolean(); final String fore = simple ? null : buildRandomORQuery(TestUtil.nextInt(random(), 1, 5)); final String back = simple ? null : buildRandomORQuery(TestUtil.nextInt(random(), 1, 9)); - + return new RelatednessFacet(fore, back, options); } } - + /** - * Trivial data structure for modeling a simple terms facet that can be written out as a json.facet param. - * Since the point of this test is SKG, every TermFacet implicitly has one fixed "skg" subFacet, but that - * can be overridden by the caller + * Trivial data structure for modeling a simple terms facet that can be written out as a + * json.facet param. Since the point of this test is SKG, every TermFacet implicitly has one fixed + * "skg" subFacet, but that can be overridden by the caller * - * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters + *

Doesn't do any string escaping or quoting, so don't use whitespace or reserved json + * characters * - * The resulting facets all specify a method of ${method_val:smart} which may be - * overridden via request params. + *

The resulting facets all specify a method of ${method_val:smart} + * which may be overridden via request params. */ private static final class TermFacet implements Facet, Writable { - public final Map jsonData = new LinkedHashMap<>(); - public final Map subFacets = new LinkedHashMap<>(); + public final Map jsonData = new LinkedHashMap<>(); + public final Map subFacets = new LinkedHashMap<>(); - /** + /** * @param field must be non null * @param options can set any of options used in a term facet other then field or (sub) facets */ - public TermFacet(final String field, final Map options) { + public TermFacet(final String field, final Map options) { assert null != field; - + jsonData.put("method", "${method_val:smart}"); - + jsonData.putAll(options); // we don't allow these to be overridden by options, so set them now... jsonData.put("type", "terms"); - jsonData.put("field",field); + jsonData.put("field", field); jsonData.put("facet", subFacets); - + subFacets.put("skg", new RelatednessFacet()); } /** all params except field can be null */ - public TermFacet(String field, Integer limit, Integer overrequest, String sort, Boolean refine) { + public TermFacet( + String field, Integer limit, Integer overrequest, String sort, Boolean refine) { this(field, map("limit", limit, "overrequest", overrequest, "sort", sort, "refine", refine)); } - + @Override public void write(JSONWriter writer) { writer.write(jsonData); } - /** - * Generates a random TermFacet that does not contai nany random sub-facets - * beyond a single consistent "skg" stat) + /** + * Generates a random TermFacet that does not contai nany random sub-facets beyond a single + * consistent "skg" stat) */ public static TermFacet buildRandom() { final String sort = randomSortParam(random()); final String facetField = randomFacetField(random()); - return new TermFacet(facetField, - map("limit", randomLimitParam(random()), - "overrequest", randomOverrequestParam(random(), sort), - "prefix", randomPrefixParam(random(), facetField), - "perSeg", randomPerSegParam(random()), - "sort", sort, - "prelim_sort", randomPrelimSortParam(random(), sort), - "allBuckets", randomAllBucketsParam(random(), sort), - "refine", randomRefineParam(random()))); + return new TermFacet( + facetField, + map( + "limit", randomLimitParam(random()), + "overrequest", randomOverrequestParam(random(), sort), + "prefix", randomPrefixParam(random(), facetField), + "perSeg", randomPerSegParam(random()), + "sort", sort, + "prelim_sort", randomPrelimSortParam(random(), sort), + "allBuckets", randomAllBucketsParam(random(), sort), + "refine", randomRefineParam(random()))); } - + /** - * Factory method for generating some random facets. + * Factory method for generating some random facets. * - * For simplicity, each facet will have a unique key name. + *

For simplicity, each facet will have a unique key name. */ - public static Map buildRandomFacets() { + public static Map buildRandomFacets() { // for simplicity, use a unique facet key regardless of depth - simplifies verification // and let's us enforce a hard limit on the total number of facets in a request AtomicInteger keyCounter = new AtomicInteger(0); - + final int maxDepth = TestUtil.nextInt(random(), 0, (usually() ? 2 : 3)); return buildRandomFacets(keyCounter, maxDepth); } - + /** * picks a random field to facet on. * @@ -961,34 +1048,41 @@ public static Map buildRandomFacets() { */ public static String randomFacetField(final Random r) { final int fieldNum = r.nextInt(MAX_FIELD_NUM); - switch(r.nextInt(4)) { - case 0: return multiStrField(fieldNum); - case 1: return multiIntField(fieldNum); - case 2: return soloStrField(fieldNum); - case 3: return soloIntField(fieldNum); - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return multiStrField(fieldNum); + case 1: + return multiIntField(fieldNum); + case 2: + return soloStrField(fieldNum); + case 3: + return soloIntField(fieldNum); + default: + throw new RuntimeException("Broken case statement"); } } - + /** * picks a random value for the "allBuckets" param, biased in favor of interesting test cases - * This bucket should be ignored by relatedness, but inclusion should not cause any problems - * (or change the results) + * This bucket should be ignored by relatedness, but inclusion should not cause any problems (or + * change the results) * - *

- * NOTE: allBuckets is meaningless in conjunction with the STREAM processor, so - * this method always returns null if sort is index asc. - *

+ *

NOTE: allBuckets is meaningless in conjunction with the STREAM + * processor, so this method always returns null if sort is index asc. * * @return a Boolean, may be null */ public static Boolean randomAllBucketsParam(final Random r, final String sort) { - switch(r.nextInt(4)) { - case 0: return true; - case 1: return false; - case 2: - case 3: return null; - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return true; + case 1: + return false; + case 2: + case 3: + return null; + default: + throw new RuntimeException("Broken case statement"); } } @@ -999,14 +1093,18 @@ public static Boolean randomAllBucketsParam(final Random r, final String sort) { */ public static Boolean randomRefineParam(final Random r) { - switch(r.nextInt(3)) { - case 0: return null; - case 1: return true; - case 2: return false; - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(3)) { + case 0: + return null; + case 1: + return true; + case 2: + return false; + default: + throw new RuntimeException("Broken case statement"); } } - + /** * picks a random value for the "perSeg" param, biased in favor of interesting test cases * @@ -1014,42 +1112,50 @@ public static Boolean randomRefineParam(final Random r) { */ public static Boolean randomPerSegParam(final Random r) { - switch(r.nextInt(4)) { - case 0: return true; - case 1: return false; - case 2: - case 3: return null; - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return true; + case 1: + return false; + case 2: + case 3: + return null; + default: + throw new RuntimeException("Broken case statement"); } } - + /** * picks a random value for the "prefix" param, biased in favor of interesting test cases * * @return a valid prefix value, may be null */ public static String randomPrefixParam(final Random r, final String facetField) { - + if (facetField.contains("multi_i") || facetField.contains("solo_i")) { // never used a prefix on a numeric field return null; } assert (facetField.contains("multi_s") || facetField.contains("solo_s")) - : "possible facet fields have changed, breaking test"; - - switch(r.nextInt(5)) { - case 0: return "2"; - case 1: return "3"; - case 2: - case 3: - case 4: return null; - default: throw new RuntimeException("Broken case statement"); + : "possible facet fields have changed, breaking test"; + + switch (r.nextInt(5)) { + case 0: + return "2"; + case 1: + return "3"; + case 2: + case 3: + case 4: + return null; + default: + throw new RuntimeException("Broken case statement"); } } - + /** - * picks a random value for the "sort" param, biased in favor of interesting test cases. - * Assumes every TermFacet will have at least one "skg" stat + * picks a random value for the "sort" param, biased in favor of interesting test cases. Assumes + * every TermFacet will have at least one "skg" stat * * @return a sort string (w/direction), or null to specify nothing (trigger default behavior) * @see #randomAllBucketsParam @@ -1058,16 +1164,21 @@ public static String randomPrefixParam(final Random r, final String facetField) public static String randomSortParam(final Random r) { final String dir = random().nextBoolean() ? "asc" : "desc"; - switch(r.nextInt(4)) { - case 0: return null; - case 1: return "count " + dir; - case 2: return "skg " + dir; - case 3: return "index " + dir; - default: throw new RuntimeException("Broken case statement"); + switch (r.nextInt(4)) { + case 0: + return null; + case 1: + return "count " + dir; + case 2: + return "skg " + dir; + case 3: + return "index " + dir; + default: + throw new RuntimeException("Broken case statement"); } } /** - * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases. + * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases. * * @return a sort string (w/direction), or null to specify nothing (trigger default behavior) * @see #randomSortParam @@ -1082,34 +1193,36 @@ public static String randomPrelimSortParam(final Random r, final String sort) { /** * picks a random value for the "limit" param, biased in favor of interesting test cases * - * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @return a number to specify in the request, or null to specify nothing (trigger default + * behavior) * @see #UNIQUE_FIELD_VALS */ public static Integer randomLimitParam(final Random r) { final int limit = 1 + r.nextInt((int) (UNIQUE_FIELD_VALS * 1.5F)); - + if (1 == TestUtil.nextInt(random(), 0, 3)) { // bias in favor of just using default return null; } - + if (limit >= UNIQUE_FIELD_VALS && r.nextBoolean()) { return -1; // unlimited } - + return limit; } - + /** * picks a random value for the "overrequest" param, biased in favor of interesting test cases. * - * @return a number to specify in the request, or null to specify nothing (trigger default behavior) + * @return a number to specify in the request, or null to specify nothing (trigger default + * behavior) * @see #UNIQUE_FIELD_VALS */ public static Integer randomOverrequestParam(final Random r, final String sort) { - switch(r.nextInt(10)) { + switch (r.nextInt(10)) { case 0: case 1: case 2: @@ -1119,40 +1232,48 @@ public static Integer randomOverrequestParam(final Random r, final String sort) case 5: return r.nextInt(UNIQUE_FIELD_VALS); // 20% ask for less them what's needed case 6: - return r.nextInt(Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough - default: break; + return r.nextInt( + Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough + default: + break; } // else.... either leave param unspecified (or redundently specify the -1 default) return r.nextBoolean() ? null : -1; } - /** + /** * recursive helper method for building random facets * * @param keyCounter used to ensure every generated facet has a unique key name - * @param maxDepth max possible depth allowed for the recusion, a lower value may be used depending on how many facets are returned at the current level. + * @param maxDepth max possible depth allowed for the recusion, a lower value may be used + * depending on how many facets are returned at the current level. */ - private static Map buildRandomFacets(AtomicInteger keyCounter, int maxDepth) { - final int numFacets = Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1' - Map results = new LinkedHashMap<>(); + private static Map buildRandomFacets( + AtomicInteger keyCounter, int maxDepth) { + final int numFacets = + Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1' + Map results = new LinkedHashMap<>(); for (int i = 0; i < numFacets; i++) { - if (keyCounter.get() < 3) { // a hard limit on the total number of facets (regardless of depth) to reduce OOM risk + // a hard limit on the total number of facets (regardless of depth) to reduce OOM risk + if (keyCounter.get() < 3) { final TermFacet facet = TermFacet.buildRandom(); - + results.put("facet_" + keyCounter.incrementAndGet(), facet); if (0 < maxDepth) { // if we're going wide, don't go deep final int nextMaxDepth = Math.max(0, maxDepth - numFacets); - facet.subFacets.putAll(buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth))); + facet.subFacets.putAll( + buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth))); } - + // we get one implicit RelatednessFacet automatically, // randomly add 1 or 2 more ... 3/5th chance of being '0' - final int numExtraSKGStats = Math.max(0, TestUtil.nextInt(random(), -2, 2)); + final int numExtraSKGStats = Math.max(0, TestUtil.nextInt(random(), -2, 2)); for (int skgId = 0; skgId < numExtraSKGStats; skgId++) { // sometimes we overwrite the trivial defualt "skg" with this one... - final String key = (0 == skgId && 0 == TestUtil.nextInt(random(), 0, 5)) ? "skg" : "skg" + skgId; + final String key = + (0 == skgId && 0 == TestUtil.nextInt(random(), 0, 5)) ? "skg" : "skg" + skgId; facet.subFacets.put(key, RelatednessFacet.buildRandom()); } @@ -1166,9 +1287,9 @@ private static Map buildRandomFacets(AtomicInteger keyCounter, } } - /** - * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed - * at a node in our cluster + /** + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed at a node + * in our cluster */ public static SolrClient getRandClient(Random rand) { int numClients = CLIENTS.size(); @@ -1179,29 +1300,29 @@ public static SolrClient getRandClient(Random rand) { /** * Uses a random SolrClient to execture a request and returns only the numFound + * * @see #getRandClient */ public static long getNumFound(final SolrParams req) throws SolrServerException, IOException { return getRandClient(random()).query(req).getResults().getNumFound(); } - + public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception { assert null != client.getDefaultCollection(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(), - client.getZkStateReader(), - true, true, 330); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + client.getDefaultCollection(), client.getZkStateReader(), true, true, 330); } - /** helper macro: fails on null keys, skips pairs with null values */ - public static Map map(Object... pairs) { + /** helper macro: fails on null keys, skips pairs with null values */ + public static Map map(Object... pairs) { if (0 != pairs.length % 2) throw new IllegalArgumentException("uneven number of arguments"); - final Map map = new LinkedHashMap<>(); - for (int i = 0; i < pairs.length; i+=2) { + final Map map = new LinkedHashMap<>(); + for (int i = 0; i < pairs.length; i += 2) { final Object key = pairs[i]; - final Object val = pairs[i+1]; + final Object val = pairs[i + 1]; if (null == key) throw new NullPointerException("arguemnt " + i); if (null == val) continue; - + map.put(key.toString(), val); } return map; diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetErrors.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetErrors.java index f28a46ef7a9..ea83c64ac21 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetErrors.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetErrors.java @@ -17,6 +17,8 @@ package org.apache.solr.search.facet; +import static org.hamcrest.core.StringContains.containsString; + import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseHS; import org.apache.solr.common.SolrException; @@ -25,12 +27,9 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.StringContains.containsString; - - public class TestJsonFacetErrors extends SolrTestCaseHS { - private static SolrInstances servers; // for distributed testing + private static SolrInstances servers; // for distributed testing @SuppressWarnings("deprecation") @BeforeClass @@ -39,14 +38,13 @@ public static void beforeTests() throws Exception { JSONTestUtil.failRepeatedKeys = true; // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); - initCore("solrconfig-tlog.xml","schema_latest.xml"); + initCore("solrconfig-tlog.xml", "schema_latest.xml"); } - /** - * Start all servers for cluster if they don't already exist - */ + /** Start all servers for cluster if they don't already exist */ public static void initServers() throws Exception { if (servers == null) { servers = new SolrInstances(3, "solrconfig-tlog.xml", "schema_latest.xml"); @@ -66,22 +64,63 @@ public static void afterTests() throws Exception { public void indexSimple(Client client) throws Exception { client.deleteByQuery("*:*", null); - client.add(sdoc("id", "1", "cat_s", "A", "where_s", "NY", "num_d", "4", "num_i", "2", - "num_is", "4", "num_is", "2", - "val_b", "true", "sparse_s", "one"), null); - client.add(sdoc("id", "2", "cat_s", "B", "where_s", "NJ", "num_d", "-9", "num_i", "-5", - "num_is", "-9", "num_is", "-5", - "val_b", "false"), null); + client.add( + sdoc( + "id", + "1", + "cat_s", + "A", + "where_s", + "NY", + "num_d", + "4", + "num_i", + "2", + "num_is", + "4", + "num_is", + "2", + "val_b", + "true", + "sparse_s", + "one"), + null); + client.add( + sdoc( + "id", "2", "cat_s", "B", "where_s", "NJ", "num_d", "-9", "num_i", "-5", "num_is", "-9", + "num_is", "-5", "val_b", "false"), + null); client.add(sdoc("id", "3"), null); client.commit(); - client.add(sdoc("id", "4", "cat_s", "A", "where_s", "NJ", "num_d", "2", "num_i", "3", - "num_is", "2", "num_is", "3"), null); - client.add(sdoc("id", "5", "cat_s", "B", "where_s", "NJ", "num_d", "11", "num_i", "7", - "num_is", "11", "num_is", "7", - "sparse_s", "two"),null); + client.add( + sdoc( + "id", "4", "cat_s", "A", "where_s", "NJ", "num_d", "2", "num_i", "3", "num_is", "2", + "num_is", "3"), + null); + client.add( + sdoc( + "id", + "5", + "cat_s", + "B", + "where_s", + "NJ", + "num_d", + "11", + "num_i", + "7", + "num_is", + "11", + "num_is", + "7", + "sparse_s", + "two"), + null); client.commit(); - client.add(sdoc("id", "6", "cat_s", "B", "where_s", "NY", "num_d", "-5", "num_i", "-5", - "num_is", "-5"),null); + client.add( + sdoc( + "id", "6", "cat_s", "B", "where_s", "NY", "num_d", "-5", "num_i", "-5", "num_is", "-5"), + null); client.commit(); } @@ -93,12 +132,19 @@ public void testErrors() throws Exception { public void doTestErrors(Client client) throws Exception { client.deleteByQuery("*:*", null); - SolrException e = assertThrows(SolrException.class, () -> - client.testJQ(params("ignore_exception", "true", "q", "*:*" - , "json.facet", "{f:{type:ignore_exception_aaa, field:bbbbbb}}" - ) - )); - assertTrue( e.getMessage().contains("ignore_exception_aaa") ); + SolrException e = + assertThrows( + SolrException.class, + () -> + client.testJQ( + params( + "ignore_exception", + "true", + "q", + "*:*", + "json.facet", + "{f:{type:ignore_exception_aaa, field:bbbbbb}}"))); + assertTrue(e.getMessage().contains("ignore_exception_aaa")); } @Test @@ -108,39 +154,62 @@ public void testDomainErrors() throws Exception { indexSimple(client); // using assertQEx so that, status code and error message can be asserted - assertQEx("Should Fail as filter with qparser in domain becomes null", + assertQEx( + "Should Fail as filter with qparser in domain becomes null", "QParser yields null, perhaps unresolved parameter reference in: {!query v=$NOfilt}", - req("q", "*:*", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{filter:'{!query v=$NOfilt}'}}}"), - SolrException.ErrorCode.BAD_REQUEST - ); + req( + "q", + "*:*", + "json.facet", + "{cat_s:{type:terms,field:cat_s,domain:{filter:'{!query v=$NOfilt}'}}}"), + SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should Fail as filter in domain becomes null", + assertQEx( + "Should Fail as filter in domain becomes null", "QParser yields null, perhaps unresolved parameter reference in: {!v=$NOfilt}", - req("q", "*:*", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{filter:'{!v=$NOfilt}'}}}"), - SolrException.ErrorCode.BAD_REQUEST - ); + req( + "q", + "*:*", + "json.facet", + "{cat_s:{type:terms,field:cat_s,domain:{filter:'{!v=$NOfilt}'}}}"), + SolrException.ErrorCode.BAD_REQUEST); // when domain type is invalid - assertQEx("Should Fail as domain not of type map", + assertQEx( + "Should Fail as domain not of type map", "Expected Map for 'domain', received String=bleh , path=facet/cat_s", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:bleh}}"), SolrException.ErrorCode.BAD_REQUEST); // when domain = null, should not throw exception - assertQ("Should pass as no domain is specified", + assertQ( + "Should pass as no domain is specified", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s}}")); // when blockChildren or blockParent is passed but not of string - assertQEx("Should Fail as blockChildren is of type map", + assertQEx( + "Should Fail as blockChildren is of type map", "Expected string type for param 'blockChildren' but got LinkedHashMap = {} , path=facet/cat_s", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{blockChildren:{}}}}"), + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,domain:{blockChildren:{}}}}"), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should Fail as blockParent is of type map", + assertQEx( + "Should Fail as blockParent is of type map", "Expected string type for param 'blockParent' but got LinkedHashMap = {} , path=facet/cat_s", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,domain:{blockParent:{}}}}"), + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,domain:{blockParent:{}}}}"), SolrException.ErrorCode.BAD_REQUEST); - } @Test @@ -152,102 +221,181 @@ public void testRangeFacetsErrorCases() throws Exception { SolrParams params = params("q", "*:*", "rows", "0"); // invalid format for ranges - SolrException ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i,start:-10,end:10,gap:2," + - "ranges:[{key:\"0-200\", to:200}]}}")) - ); + SolrException ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i,start:-10,end:10,gap:2," + + "ranges:[{key:\"0-200\", to:200}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertEquals("Cannot set gap/start/end and ranges params together", ex.getMessage()); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:bleh}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:bleh}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Expected List for ranges but got String")); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[bleh]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:[bleh]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Expected Map for range but got String")); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{from:0, to:200, inclusive_to:bleh}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + + "ranges:[{from:0, to:200, inclusive_to:bleh}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); - assertTrue(ex.getMessage().contains("Expected boolean type for param 'inclusive_to' but got String")); - - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{from:0, to:200, inclusive_from:bleh}]}}")) - ); + assertTrue( + ex.getMessage().contains("Expected boolean type for param 'inclusive_to' but got String")); + + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + + "ranges:[{from:0, to:200, inclusive_from:bleh}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); - assertTrue(ex.getMessage().contains("Expected boolean type for param 'inclusive_from' but got String")); - - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{from:bleh, to:200}]}}")) - ); + assertTrue( + ex.getMessage() + .contains("Expected boolean type for param 'inclusive_from' but got String")); + + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:[{from:bleh, to:200}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{from:0, to:bleh}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:[{from:0, to:bleh}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{from:200, to:0}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:[{from:200, to:0}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertEquals("'from' is higher than 'to' in range for key: [200,0)", ex.getMessage()); // with old format - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{range:\"\"}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:[{range:\"\"}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("empty facet range")); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{range:\"bl\"}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:[{range:\"bl\"}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Invalid start character b in facet range bl")); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{range:\"(bl\"}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + "ranges:[{range:\"(bl\"}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertTrue(ex.getMessage().contains("Invalid end character l in facet range (bl")); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{range:\"(bleh,12)\"}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + + "ranges:[{range:\"(bleh,12)\"}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{range:\"(12,bleh)\"}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + + "ranges:[{range:\"(12,bleh)\"}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertEquals("Can't parse value bleh for field: num_i", ex.getMessage()); - ex = expectThrows(SolrException.class, - () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," + - "ranges:[{range:\"(200,12)\"}]}}")) - ); + ex = + expectThrows( + SolrException.class, + () -> + h.query( + req( + params, + "json.facet", + "{price:{type :range, field : num_i," + + "ranges:[{range:\"(200,12)\"}]}}"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertEquals("'start' is higher than 'end' in range for key: (200,12)", ex.getMessage()); } @@ -259,138 +407,221 @@ public void testOtherErrorCases() throws Exception { indexSimple(client); // test for sort - assertQEx("Should fail as sort is of type list", + assertQEx( + "Should fail as sort is of type list", "Expected string/map for 'sort', received ArrayList=[count desc]", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:[\"count desc\"]}}"), + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,sort:[\"count desc\"]}}"), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should fail as facet is not of type map", + assertQEx( + "Should fail as facet is not of type map", "Expected Map for 'facet', received ArrayList=[{}]", - req("q", "*:*", "rows", "0", "json.facet", "[{}]"), SolrException.ErrorCode.BAD_REQUEST); + req("q", "*:*", "rows", "0", "json.facet", "[{}]"), + SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should fail as queries is not of type map", + assertQEx( + "Should fail as queries is not of type map", "Expected Map for 'queries', received [{}]", - req("q", "*:*", "rows", "0", "json.queries", "[{}]"), SolrException.ErrorCode.BAD_REQUEST); + req("q", "*:*", "rows", "0", "json.queries", "[{}]"), + SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should fail as queries are null in JSON", + assertQEx( + "Should fail as queries are null in JSON", "Expected Map for 'queries', received null", - req("json", "{query:\"*:*\", queries:null}"), SolrException.ErrorCode.BAD_REQUEST); + req("json", "{query:\"*:*\", queries:null}"), + SolrException.ErrorCode.BAD_REQUEST); // range facets - assertQEx("Should fail as 'other' is of type Map", - "Expected list of string or comma separated string values for 'other', " + - "received LinkedHashMap={} , path=facet/f", - req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10, end:12, gap:1, other:{}}}"), + assertQEx( + "Should fail as 'other' is of type Map", + "Expected list of string or comma separated string values for 'other', " + + "received LinkedHashMap={} , path=facet/f", + req( + "q", + "*:*", + "json.facet", + "{f:{type:range, field:num_d, start:10, end:12, gap:1, other:{}}}"), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should fail as 'include' is of type Map", - "Expected list of string or comma separated string values for 'include', " + - "received LinkedHashMap={} , path=facet/f", - req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10, end:12, gap:1, include:{}}}"), + assertQEx( + "Should fail as 'include' is of type Map", + "Expected list of string or comma separated string values for 'include', " + + "received LinkedHashMap={} , path=facet/f", + req( + "q", + "*:*", + "json.facet", + "{f:{type:range, field:num_d, start:10, end:12, gap:1, include:{}}}"), SolrException.ErrorCode.BAD_REQUEST); // missing start parameter - assertQEx("Should Fail with missing field error", + assertQEx( + "Should Fail with missing field error", "Missing required parameter: 'start' , path=facet/f", - req("q", "*:*", "json.facet", "{f:{type:range, field:num_d}}"), SolrException.ErrorCode.BAD_REQUEST); + req("q", "*:*", "json.facet", "{f:{type:range, field:num_d}}"), + SolrException.ErrorCode.BAD_REQUEST); // missing end parameter - assertQEx("Should Fail with missing field error", + assertQEx( + "Should Fail with missing field error", "Missing required parameter: 'end' , path=facet/f", req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10}}"), SolrException.ErrorCode.BAD_REQUEST); // missing gap parameter - assertQEx("Should Fail with missing field error", + assertQEx( + "Should Fail with missing field error", "Missing required parameter: 'gap' , path=facet/f", req("q", "*:*", "json.facet", "{f:{type:range, field:num_d, start:10, end:12}}"), SolrException.ErrorCode.BAD_REQUEST); // invalid value for facet field - assertQEx("Should Fail as args is of type long", + assertQEx( + "Should Fail as args is of type long", "Expected string/map for facet field, received Long=2 , path=facet/facet", - req("q", "*:*", "rows", "0", "json.facet.facet.field", "2"), SolrException.ErrorCode.BAD_REQUEST); + req("q", "*:*", "rows", "0", "json.facet.facet.field", "2"), + SolrException.ErrorCode.BAD_REQUEST); // invalid value for facet query - assertQEx("Should Fail as args is of type long for query", + assertQEx( + "Should Fail as args is of type long for query", "Expected string/map for facet query, received Long=2 , path=facet/facet", - req("q", "*:*", "rows", "0", "json.facet.facet.query", "2"), SolrException.ErrorCode.BAD_REQUEST); + req("q", "*:*", "rows", "0", "json.facet.facet.query", "2"), + SolrException.ErrorCode.BAD_REQUEST); // valid facet field - assertQ("Should pass as this is valid query", + assertQ( + "Should pass as this is valid query", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s}}")); // invalid perSeg - assertQEx("Should fail as perSeg is not of type boolean", + assertQEx( + "Should fail as perSeg is not of type boolean", "Expected boolean type for param 'perSeg' but got Long = 2 , path=facet/cat_s", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,perSeg:2}}"), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should fail as sort is invalid", + assertQEx( + "Should fail as sort is invalid", "Invalid sort option 'bleh' for field 'cat_s'", req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh}}"), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should fail as sort order is invalid", + assertQEx( + "Should fail as sort order is invalid", "Unknown Sort direction 'bleh'", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:{count: bleh}}}"), + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,sort:{count: bleh}}}"), SolrException.ErrorCode.BAD_REQUEST); // test for prelim_sort - assertQEx("Should fail as prelim_sort is invalid", + assertQEx( + "Should fail as prelim_sort is invalid", "Invalid prelim_sort option 'bleh' for field 'cat_s'", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,prelim_sort:bleh}}"), + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,prelim_sort:bleh}}"), SolrException.ErrorCode.BAD_REQUEST); - assertQEx("Should fail as prelim_sort map is invalid", + assertQEx( + "Should fail as prelim_sort map is invalid", "Invalid prelim_sort option '{bleh=desc}' for field 'cat_s'", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,prelim_sort:{bleh:desc}}}"), + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,prelim_sort:{bleh:desc}}}"), SolrException.ErrorCode.BAD_REQUEST); // with nested facet - assertQEx("Should fail as prelim_sort is invalid", + assertQEx( + "Should fail as prelim_sort is invalid", "Invalid sort option 'bleh' for field 'id'", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" + - "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:bleh}}}}"), + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" + + "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:bleh}}}}"), SolrException.ErrorCode.BAD_REQUEST); - assertQ("Should pass as sort is proper", - req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" + - "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:{bleh:desc},facet:{bleh:\"unique(id)\"}}}}}") - ); + assertQ( + "Should pass as sort is proper", + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" + + "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:{bleh:desc},facet:{bleh:\"unique(id)\"}}}}}")); } @Test public void testAggErrors() { ignoreException("aggregation"); - SolrException e = expectThrows(SolrException.class, () -> { - h.query(req("q", "*:*", "json.facet", "{bleh:'div(2,4)'}")); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + h.query(req("q", "*:*", "json.facet", "{bleh:'div(2,4)'}")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertThat(e.getMessage(), - containsString("Expected multi-doc aggregation from 'div' but got per-doc function in input ('div(2,4)")); - - e = expectThrows(SolrException.class, () -> { - h.query(req("q", "*:*", "json.facet", "{b:'agg(div(2,4))'}")); - }); + assertThat( + e.getMessage(), + containsString( + "Expected multi-doc aggregation from 'div' but got per-doc function in input ('div(2,4)")); + + e = + expectThrows( + SolrException.class, + () -> { + h.query(req("q", "*:*", "json.facet", "{b:'agg(div(2,4))'}")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertThat(e.getMessage(), - containsString("Expected multi-doc aggregation from 'div' but got per-doc function in input ('agg(div(2,4))")); - - e = expectThrows(SolrException.class, () -> { - h.query(req("q", "*:*", "json.facet", "{b:'agg(bleh(2,4))'}")); - }); + assertThat( + e.getMessage(), + containsString( + "Expected multi-doc aggregation from 'div' but got per-doc function in input ('agg(div(2,4))")); + + e = + expectThrows( + SolrException.class, + () -> { + h.query(req("q", "*:*", "json.facet", "{b:'agg(bleh(2,4))'}")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertThat(e.getMessage(), - containsString("Unknown aggregation 'bleh' in input ('agg(bleh(2,4))")); - - e = expectThrows(SolrException.class, () -> { - h.query(req("q", "*:*", "json.facet", "{b:'bleh(2,4)'}")); - }); + assertThat( + e.getMessage(), containsString("Unknown aggregation 'bleh' in input ('agg(bleh(2,4))")); + + e = + expectThrows( + SolrException.class, + () -> { + h.query(req("q", "*:*", "json.facet", "{b:'bleh(2,4)'}")); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - assertThat(e.getMessage(), - containsString("Unknown aggregation 'bleh' in input ('bleh(2,4)")); + assertThat(e.getMessage(), containsString("Unknown aggregation 'bleh' in input ('bleh(2,4)")); resetExceptionIgnores(); } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java index 225374594ce..141260829ad 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java @@ -21,9 +21,7 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; - import org.apache.lucene.util.TestUtil; - import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseHS; import org.apache.solr.client.solrj.SolrClient; @@ -39,14 +37,15 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS { - private static SolrInstances servers; // for distributed testing + private static SolrInstances servers; // for distributed testing @BeforeClass public static void beforeTests() throws Exception { systemSetPropertySolrDisableUrlAllowList("true"); // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + JSONTestUtil.failRepeatedKeys = true; initCore("solrconfig-tlog.xml", "schema_latest.xml"); } @@ -67,7 +66,6 @@ public static void afterTests() throws Exception { systemClearPropertySolrDisableUrlAllowList(); } - // todo - pull up to test base class? public void matchJSON(String json, double delta, String... tests) throws Exception { for (String test : tests) { @@ -80,15 +78,12 @@ public void matchJSON(String json, double delta, String... tests) throws Excepti String err = JSONTestUtil.match(json, test, delta); if (err != null) { - throw new RuntimeException("JSON failed validation. error=" + err + - "\n expected =" + test + - "\n got = " + json - ); + throw new RuntimeException( + "JSON failed validation. error=" + err + "\n expected =" + test + "\n got = " + json); } } } - public void match(Object input, double delta, String... tests) throws Exception { for (String test : tests) { String err = null; @@ -103,32 +98,28 @@ public void match(Object input, double delta, String... tests) throws Exception } if (err != null) { - throw new RuntimeException("JSON failed validation. error=" + err + - "\n expected =" + test + - "\n got = " + input - ); + throw new RuntimeException( + "JSON failed validation. error=" + err + "\n expected =" + test + "\n got = " + input); } } } - - /** - * Use SimpleOrderedMap rather than Map to match responses from shards - */ + /** Use SimpleOrderedMap rather than Map to match responses from shards */ public static Object fromJSON(String json) throws IOException { JSONParser parser = new JSONParser(json); - ObjectBuilder ob = new ObjectBuilder(parser) { - @Override - public Object newObject() throws IOException { - return new SimpleOrderedMap<>(); - } - - @Override - @SuppressWarnings("unchecked") - public void addKeyVal(Object map, Object key, Object val) throws IOException { - ((SimpleOrderedMap) map).add(key.toString(), val); - } - }; + ObjectBuilder ob = + new ObjectBuilder(parser) { + @Override + public Object newObject() throws IOException { + return new SimpleOrderedMap<>(); + } + + @Override + @SuppressWarnings("unchecked") + public void addKeyVal(Object map, Object key, Object val) throws IOException { + ((SimpleOrderedMap) map).add(key.toString(), val); + } + }; return ob.getObject(); } @@ -162,339 +153,345 @@ void doTestRefine(String facet, String... responsesAndTests) throws Exception { } finally { req.close(); } - } @Test public void testMerge() throws Exception { - - doTestRefine("{x : {type:terms, field:X, limit:2, refine:true} }", // the facet request - "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}], more:true } }", // shard0 response - "{x: {buckets:[{val:x2, count:4}, {val:x3, count:2}], more:true } }", // shard1 response - null, // shard0 expected refinement info - "=={x:{_l:[x1]}}" // shard1 expected refinement info - ); - - // same test as above, but shard1 indicates it doesn't have any more results, so there shouldn't be any refinement - doTestRefine("{x : {type:terms, field:X, limit:2, refine:true} }", // the facet request - "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}],more:true } }", // shard0 response - "{x: {buckets:[{val:x2, count:4}, {val:x3, count:2}] } }", // shard1 response - null, // shard0 expected refinement info - null // shard1 expected refinement info // without more:true, we should not attempt to get extra bucket - ); + + doTestRefine( + "{x : {type:terms, field:X, limit:2, refine:true} }", // the facet request + "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}], more:true } }", // shard0 response + "{x: {buckets:[{val:x2, count:4}, {val:x3, count:2}], more:true } }", // shard1 response + null, // shard0 expected refinement info + "=={x:{_l:[x1]}}" // shard1 expected refinement info + ); + + // same test as above, but shard1 indicates it doesn't have any more results, so there shouldn't + // be any refinement + doTestRefine( + "{x : {type:terms, field:X, limit:2, refine:true} }", // the facet request + "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}],more:true } }", // shard0 response + "{x: {buckets:[{val:x2, count:4}, {val:x3, count:2}] } }", // shard1 response + null, // shard0 expected refinement info + null // shard1 expected refinement info // without more:true, we should not attempt to get + // extra bucket + ); // same but with processEmpty:true we should check for refinement even if there isn't "more" - doTestRefine("{x : {type:terms, field:X, limit:2, refine:true, facet: { processEmpty:true } } }", - "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}],more:true } }", // shard0 response - "{x: {buckets:[{val:x2, count:4}] } }", // shard1 response -- NO "more" - null, // shard0 expected refinement info - "=={x:{_l:[x1]}}" // shard1 expected refinement info - ); + doTestRefine( + "{x : {type:terms, field:X, limit:2, refine:true, facet: { processEmpty:true } } }", + "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}],more:true } }", // shard0 response + "{x: {buckets:[{val:x2, count:4}] } }", // shard1 response -- NO "more" + null, // shard0 expected refinement info + "=={x:{_l:[x1]}}" // shard1 expected refinement info + ); // same test w/o refinement turned on (even though shards say they have more) - doTestRefine("{x : {type:terms, field:X, limit:2} }", // the facet request - "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}], more:true } }", // shard0 response - "{x: {buckets:[{val:x2, count:4}, {val:x3, count:2}], more:true } }", // shard1 response + doTestRefine( + "{x : {type:terms, field:X, limit:2} }", // the facet request + "{x: {buckets:[{val:x1, count:5}, {val:x2, count:3}], more:true } }", // shard0 response + "{x: {buckets:[{val:x2, count:4}, {val:x3, count:2}], more:true } }", // shard1 response null, // shard0 expected refinement info - null // shard1 expected refinement info - ); + null // shard1 expected refinement info + ); // same test, but nested in query facet - doTestRefine("{top:{type:query, q:'foo_s:myquery', facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", // the facet request - "{top: {x: {buckets:[{val:x1, count:5}, {val:x2, count:3}], more:true } } }", // shard0 response - "{top: {x: {buckets:[{val:x2, count:4}, {val:x3, count:2}], more:true } } }", // shard1 response - null, // shard0 expected refinement info - "=={top:{x:{_l:[x1]}}}" // shard1 expected refinement info - ); + doTestRefine( + "{top:{type:query, q:'foo_s:myquery', facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", // the facet request + "{top: {x: {buckets:[{val:x1, count:5}, {val:x2, count:3}], more:true } } }", // shard0 + // response + "{top: {x: {buckets:[{val:x2, count:4}, {val:x3, count:2}], more:true } } }", // shard1 + // response + null, // shard0 expected refinement info + "=={top:{x:{_l:[x1]}}}" // shard1 expected refinement info + ); // same test w/o refinement turned on - doTestRefine("{top:{type:query, q:'foo_s:myquery', facet:{x : {type:terms, field:X, limit:2, refine:false} } } }", - "{top: {x: {buckets:[{val:x1, count:5}, {val:x2, count:3}] } } }", // shard0 response - "{top: {x: {buckets:[{val:x2, count:4}, {val:x3, count:2}] } } }", // shard1 response + doTestRefine( + "{top:{type:query, q:'foo_s:myquery', facet:{x : {type:terms, field:X, limit:2, refine:false} } } }", + "{top: {x: {buckets:[{val:x1, count:5}, {val:x2, count:3}] } } }", // shard0 response + "{top: {x: {buckets:[{val:x2, count:4}, {val:x3, count:2}] } } }", // shard1 response null, - null - ); + null); // same test, but nested in a terms facet - doTestRefine("{top:{type:terms, field:Afield, facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + doTestRefine( + "{top:{type:terms, field:Afield, facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", "{top: {buckets:[{val:'A', count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}], more:true} } ] } }", "{top: {buckets:[{val:'A', count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}], more:true} } ] } }", null, - "=={top: {" + - "_s:[ ['A' , {x:{_l:[x1]}} ] ]" + - " } " + - "}" - ); + "=={top: {" + "_s:[ ['A' , {x:{_l:[x1]}} ] ]" + " } " + "}"); // same test, but nested in range facet - doTestRefine("{top:{type:range, field:R, start:0, end:1, gap:1, facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + doTestRefine( + "{top:{type:range, field:R, start:0, end:1, gap:1, facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", "{top: {buckets:[{val:0, count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ] } }", "{top: {buckets:[{val:0, count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ] } }", null, - "=={top: {" + - "_s:[ [0 , {x:{_l:[x1]}} ] ]" + - " } " + - "}" - ); + "=={top: {" + "_s:[ [0 , {x:{_l:[x1]}} ] ]" + " } " + "}"); // same test, but now the range facet includes "other" buckets // (so we also verify that the "_actual_end" is echoed back) - doTestRefine("{top:{type:range, other:all, field:R, start:0, end:1, gap:1, " + - " facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", - // phase #1 - "{top: {buckets:[{val:0, count:2, x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} } ]," + - " before:{count:0},after:{count:0}," + - " between:{count:2,x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} }," + - " '_actual_end':'does_not_matter_must_be_echoed_back' } }", - "{top: {buckets:[{val:0, count:1, x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} } ]," + - " before:{count:0},after:{count:0}," + - " between:{count:1,x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} }," + - " '_actual_end':'does_not_matter_must_be_echoed_back' } }", - // refinement... - null, - "=={top: {" + - " _s:[ [0 , {x:{_l:[x1]}} ] ]," + - " between:{ x:{_l : [x1]} }," + - " '_actual_end':'does_not_matter_must_be_echoed_back'" + - "} } "); + doTestRefine( + "{top:{type:range, other:all, field:R, start:0, end:1, gap:1, " + + " facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + // phase #1 + "{top: {buckets:[{val:0, count:2, x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} } ]," + + " before:{count:0},after:{count:0}," + + " between:{count:2,x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} }," + + " '_actual_end':'does_not_matter_must_be_echoed_back' } }", + "{top: {buckets:[{val:0, count:1, x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} } ]," + + " before:{count:0},after:{count:0}," + + " between:{count:1,x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} }," + + " '_actual_end':'does_not_matter_must_be_echoed_back' } }", + // refinement... + null, + "=={top: {" + + " _s:[ [0 , {x:{_l:[x1]}} ] ]," + + " between:{ x:{_l : [x1]} }," + + " '_actual_end':'does_not_matter_must_be_echoed_back'" + + "} } "); // imagine that all the nodes we query in phase#1 are running "old" versions of solr that - // don't know they are suppose to compute _actual_end ... our merger should not fail or freak out + // don't know they are supposed to compute _actual_end. our merger should not fail or freak out // trust that in the phase#2 refinement request either: // - the processor will re-compute it (if refine request goes to "new" version of solr) // - the processor wouldn't know what to do with an _actual_end sent by the merger anyway - doTestRefine("{top:{type:range, other:all, field:R, start:0, end:1, gap:1, " + - " facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", - // phase #1 - "{top: {buckets:[{val:0, count:2, x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} } ]," + - " before:{count:0},after:{count:0}," + - " between:{count:2,x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} }," + - " } }", // no actual_end - "{top: {buckets:[{val:0, count:1, x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} } ]," + - " before:{count:0},after:{count:0}," + - " between:{count:1,x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} }," + - " } }", // no actual_end - // refinement... - null, - "=={top: {" + - " _s:[ [0 , {x:{_l:[x1]}} ] ]," + - " between:{ x:{_l : [x1]} }" + - "} } "); + doTestRefine( + "{top:{type:range, other:all, field:R, start:0, end:1, gap:1, " + + " facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + // phase #1 + "{top: {buckets:[{val:0, count:2, x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} } ]," + + " before:{count:0},after:{count:0}," + + " between:{count:2,x:{more:true,buckets:[{val:x1, count:5},{val:x2, count:3}]} }," + + " } }", // no actual_end + "{top: {buckets:[{val:0, count:1, x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} } ]," + + " before:{count:0},after:{count:0}," + + " between:{count:1,x:{more:true,buckets:[{val:x2, count:4},{val:x3, count:2}]} }," + + " } }", // no actual_end + // refinement... + null, + "=={top: {" + + " _s:[ [0 , {x:{_l:[x1]}} ] ]," + + " between:{ x:{_l : [x1]} }" + + "} } "); // a range face w/o any sub facets shouldn't require any refinement - doTestRefine("{top:{type:range, other:all, field:R, start:0, end:3, gap:2 } }" , - // phase #1 - "{top: {buckets:[{val:0, count:2}, {val:2, count:2}]," + - " before:{count:3},after:{count:47}," + - " between:{count:5}," + - " } }", - "{top: {buckets:[{val:0, count:2}, {val:2, count:19}]," + - " before:{count:22},after:{count:0}," + - " between:{count:21}," + - " } }", - // refinement... - null, - null); + doTestRefine( + "{top:{type:range, other:all, field:R, start:0, end:3, gap:2 } }", + // phase #1 + "{top: {buckets:[{val:0, count:2}, {val:2, count:2}]," + + " before:{count:3},after:{count:47}," + + " between:{count:5}," + + " } }", + "{top: {buckets:[{val:0, count:2}, {val:2, count:19}]," + + " before:{count:22},after:{count:0}," + + " between:{count:21}," + + " } }", + // refinement... + null, + null); // same test, but nested in range facet with ranges - doTestRefine("{top:{type:range, field:R, ranges:[{from:0, to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + doTestRefine( + "{top:{type:range, field:R, ranges:[{from:0, to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", "{top: {buckets:[{val:\"[0,1)\", count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ] } }", "{top: {buckets:[{val:\"[0,1)\", count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ] } }", null, - "=={top: {" + - "_s:[ [\"[0,1)\" , {x:{_l:[x1]}} ] ]" + - " } " + - "}" - ); + "=={top: {" + "_s:[ [\"[0,1)\" , {x:{_l:[x1]}} ] ]" + " } " + "}"); - doTestRefine("{top:{type:range, field:R, ranges:[{from:\"*\", to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", + doTestRefine( + "{top:{type:range, field:R, ranges:[{from:\"*\", to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }", "{top: {buckets:[{val:\"[*,1)\", count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ] } }", "{top: {buckets:[{val:\"[*,1)\", count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ] } }", null, - "=={top: {" + - "_s:[ [\"[*,1)\" , {x:{_l:[x1]}} ] ]" + - " } " + - "}" - ); + "=={top: {" + "_s:[ [\"[*,1)\" , {x:{_l:[x1]}} ] ]" + " } " + "}"); // a range facet w/o any sub facets shouldn't require any refinement // other and include ignored for ranges - doTestRefine("{top:{type:range, other:all, field:R, ranges:[{from:0, to:2},{from:2, to:3}] } }", - // phase #1 - "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:2}]," + - " } }", - "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:19}]," + - " } }", + doTestRefine( + "{top:{type:range, other:all, field:R, ranges:[{from:0, to:2},{from:2, to:3}] } }", + // phase #1 + "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:2}]," + " } }", + "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:19}]," + " } }", // refinement... null, null); // for testing partial _p, we need a partial facet within a partial facet - doTestRefine("{top:{type:terms, field:Afield, refine:true, limit:1, facet:{x : {type:terms, field:X, limit:1, refine:true} } } }", + doTestRefine( + "{top:{type:terms, field:Afield, refine:true, limit:1, facet:{x : {type:terms, field:X, limit:1, refine:true} } } }", "{top: {buckets:[{val:'A', count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ],more:true } }", "{top: {buckets:[{val:'B', count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ],more:true } }", null, - "=={top: {" + - "_p:[ ['A' , {x:{_l:[x1]}} ] ]" + - " } " + - "}" - ); + "=={top: {" + "_p:[ ['A' , {x:{_l:[x1]}} ] ]" + " } " + "}"); // test partial _p under a missing bucket - doTestRefine("{top:{type:terms, field:Afield, refine:true, limit:1, missing:true, facet:{x : {type:terms, field:X, limit:1, refine:true} } } }", + doTestRefine( + "{top:{type:terms, field:Afield, refine:true, limit:1, missing:true, facet:{x : {type:terms, field:X, limit:1, refine:true} } } }", "{top: {buckets:[], missing:{count:12, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } } }", "{top: {buckets:[], missing:{count:10, x:{buckets:[{val:x1, count:5},{val:x4, count:3}],more:true} } } }", - "=={top: {" + - "missing:{x:{_l:[x1]}}" + - " } " + - "}" - , null - ); - + "=={top: {" + "missing:{x:{_l:[x1]}}" + " } " + "}", + null); } @Test public void testMergeWithOverrefine() throws Exception { // overrefine hueristic should use explicit overrequest as default - doTestRefine("{x : {type:terms, field:X, limit:1, overrequest:1, sort:'count asc', refine:true} }", - // - "{x: {buckets:[{val:x1, count:3}, {val:x2, count:5}, {val:x9, count:42}, {val:x0, count:42}], more:true } }", - "{x: {buckets:[{val:x2, count:2}, {val:x3, count:4}, {val:x7, count:66}, {val:x8, count:66}], more:true } }", - // - "=={x:{_l:[x3]}}", - "=={x:{_l:[x1]}}"); - doTestRefine("{x : {type:terms, field:X, limit:1, overrequest:0, sort:'count asc', refine:true} }", - // - "{x: {buckets:[{val:x1, count:3}, {val:x2, count:5}, {val:x9, count:42}, {val:x0, count:42}], more:true } }", - "{x: {buckets:[{val:x2, count:2}, {val:x3, count:4}, {val:x7, count:66}, {val:x8, count:66}], more:true } }", - // - null, - "=={x:{_l:[x1]}}"); - + doTestRefine( + "{x : {type:terms, field:X, limit:1, overrequest:1, sort:'count asc', refine:true} }", + // + "{x: {buckets:[{val:x1, count:3}, {val:x2, count:5}, {val:x9, count:42}, {val:x0, count:42}], more:true } }", + "{x: {buckets:[{val:x2, count:2}, {val:x3, count:4}, {val:x7, count:66}, {val:x8, count:66}], more:true } }", + // + "=={x:{_l:[x3]}}", + "=={x:{_l:[x1]}}"); + doTestRefine( + "{x : {type:terms, field:X, limit:1, overrequest:0, sort:'count asc', refine:true} }", + // + "{x: {buckets:[{val:x1, count:3}, {val:x2, count:5}, {val:x9, count:42}, {val:x0, count:42}], more:true } }", + "{x: {buckets:[{val:x2, count:2}, {val:x3, count:4}, {val:x7, count:66}, {val:x8, count:66}], more:true } }", + // + null, + "=={x:{_l:[x1]}}"); + // completely implicit hueristic when no explicit overrequest - // limit=1 + 10% + 4 =~ 5 total, but x2 is fully populated so only the other 4 "lowest" should be refined - doTestRefine("{x : {type:terms, field:X, limit:1, sort:'count asc', refine:true} }", - // - "{x: {buckets:[{val:x1, count:3}, {val:x2, count:5}, {val:x9, count:42}, {val:x0, count:42}], more:true } }", - "{x: {buckets:[{val:x2, count:2}, {val:x3, count:4}, {val:x7, count:66}, {val:x8, count:66}], more:true } }", - // - "=={x:{_l:[x3]}}", - "=={x:{_l:[x1,x0,x9]}}"); - - // when using (default) mincount (or mincount=0) sort="count desc" should eliminate need for overrefine - // (regardless of whether any explicit overrequest is specified) - for (String extra : Arrays.asList("", ", mincount:0", ", mincount:1", - ", overrequest:3", ", overrequest:3, mincount:0")) { - // w/o any overrefinement, we should only need to backfill x1 & x3 (x2 already fully populated) - doTestRefine("{x : {type:terms, field:X, limit:3, sort:'count desc', refine:true"+extra+" } }", - // - "{x: {buckets:[{val:x1, count:29}, {val:x2, count:15}, {val:x9, count:7}, {val:x0, count:7}], more:true } }", - "{x: {buckets:[{val:x2, count:20}, {val:x3, count:12}, {val:x7, count:7}, {val:x8, count:7}], more:true } }", - // - "=={x:{_l:[x3]}}", - "=={x:{_l:[x1]}}"); + // limit=1 + 10% + 4 =~ 5 total, but x2 is fully populated so only the other 4 "lowest" should + // be refined + doTestRefine( + "{x : {type:terms, field:X, limit:1, sort:'count asc', refine:true} }", + // + "{x: {buckets:[{val:x1, count:3}, {val:x2, count:5}, {val:x9, count:42}, {val:x0, count:42}], more:true } }", + "{x: {buckets:[{val:x2, count:2}, {val:x3, count:4}, {val:x7, count:66}, {val:x8, count:66}], more:true } }", + // + "=={x:{_l:[x3]}}", + "=={x:{_l:[x1,x0,x9]}}"); + + // when using (default) mincount (or mincount=0) sort="count desc" should eliminate need for + // overrefine (regardless of whether any explicit overrequest is specified) + for (String extra : + Arrays.asList( + "", ", mincount:0", ", mincount:1", ", overrequest:3", ", overrequest:3, mincount:0")) { + // w/o any overrefinement, we should only need to backfill x1 & x3 (x2 already fully + // populated) + doTestRefine( + "{x : {type:terms, field:X, limit:3, sort:'count desc', refine:true" + extra + " } }", + // + "{x: {buckets:[{val:x1, count:29}, {val:x2, count:15}, {val:x9, count:7}, {val:x0, count:7}], more:true } }", + "{x: {buckets:[{val:x2, count:20}, {val:x3, count:12}, {val:x7, count:7}, {val:x8, count:7}], more:true } }", + // + "=={x:{_l:[x3]}}", + "=={x:{_l:[x1]}}"); } // with 1 x2 is fully populated and child buckets are consistent - no refinement needed at all // -> x4 has counts from both shards, but child buckets don't align perfectly // - // For (test) simplicity, only x3 and x4 have enough (total) y buckets to prove that the sub-facet + // For (test) simplicity, only x3 and x4 have enough (total) y buckets to prove that the + // sub-facet // overrefine hueristic is finite... // -> x3 has 6 total sub-facet buckets, only "lowest 5" should be refined on missing shard - // -> x4 also has 6 total sub-facet buckets, but only 3 need refined since 2 already fully populated - doTestRefine("{x:{type:terms, field:X, limit:1, sort:'count asc', refine:true, " + - " facet:{y : {type:terms, field:X, limit:1, sort:'count asc', refine:true} } } }", - // - "{x: {buckets:[" + - " {val:'x1', count:1, y:{buckets:[{val:y11, count:1},{val:y12, count:3}], more:true} }, "+ - " {val:'x2', count:2, y:{buckets:[{val:y21, count:1},{val:y22, count:3}], more:true} }, "+ - " {val:'x4', count:3, y:{buckets:[{val:y41, count:1},{val:y4a, count:3}, "+ - " {val:y42, count:4},{val:y4d, count:5}], more:true} }, "+ - " {val:'x5', count:4, y:{buckets:[{val:y51, count:1},{val:y52, count:3}], more:true} }, "+ - " ], more:true } }", - "{x: {buckets:[" + - " {val:'x3', count:1, y:{buckets:[{val:y31, count:1},{val:y32, count:2}, "+ - " {val:y33, count:3},{val:y34, count:4}, "+ - " {val:y35, count:5},{val:y36, count:6}], more:true} }, "+ - " {val:'x2', count:2, y:{buckets:[{val:y21, count:1},{val:y22, count:3}], more:true} }, "+ - " {val:'x4', count:3, y:{buckets:[{val:y41, count:1},{val:y4b, count:3}, "+ - " {val:y42, count:4},{val:y4c, count:9}], more:true} }, "+ - " {val:'x9', count:9, y:{buckets:[{val:y91, count:1},{val:y92, count:3}], more:true} }, "+ - " ], more:true } }", - // - "=={x: {" + - " _p:[ ['x3' , {y:{_l:[y31,y32,y33,y34,y35]}} ] ]," + - " _s:[ ['x4' , {y:{_l:[y4b]}} ] ]," + - " } }", - "=={x: {" + - " _p:[ ['x1' , {y:{_l:[y11,y12]}} ], " + - " ['x5' , {y:{_l:[y51,y52]}} ] ]," + - " _s:[ ['x4' , {y:{_l:[y4a,y4d]}} ] ]," + - " } }"); - - + // -> x4 also has 6 total sub-facet buckets, but only 3 need refined since 2 already fully + // populated + doTestRefine( + "{x:{type:terms, field:X, limit:1, sort:'count asc', refine:true, " + + " facet:{y : {type:terms, field:X, limit:1, sort:'count asc', refine:true} } } }", + // + "{x: {buckets:[" + + " {val:'x1', count:1, y:{buckets:[{val:y11, count:1},{val:y12, count:3}], more:true} }, " + + " {val:'x2', count:2, y:{buckets:[{val:y21, count:1},{val:y22, count:3}], more:true} }, " + + " {val:'x4', count:3, y:{buckets:[{val:y41, count:1},{val:y4a, count:3}, " + + " {val:y42, count:4},{val:y4d, count:5}], more:true} }, " + + " {val:'x5', count:4, y:{buckets:[{val:y51, count:1},{val:y52, count:3}], more:true} }, " + + " ], more:true } }", + "{x: {buckets:[" + + " {val:'x3', count:1, y:{buckets:[{val:y31, count:1},{val:y32, count:2}, " + + " {val:y33, count:3},{val:y34, count:4}, " + + " {val:y35, count:5},{val:y36, count:6}], more:true} }, " + + " {val:'x2', count:2, y:{buckets:[{val:y21, count:1},{val:y22, count:3}], more:true} }, " + + " {val:'x4', count:3, y:{buckets:[{val:y41, count:1},{val:y4b, count:3}, " + + " {val:y42, count:4},{val:y4c, count:9}], more:true} }, " + + " {val:'x9', count:9, y:{buckets:[{val:y91, count:1},{val:y92, count:3}], more:true} }, " + + " ], more:true } }", + // + "=={x: {" + + " _p:[ ['x3' , {y:{_l:[y31,y32,y33,y34,y35]}} ] ]," + + " _s:[ ['x4' , {y:{_l:[y4b]}} ] ]," + + " } }", + "=={x: {" + + " _p:[ ['x1' , {y:{_l:[y11,y12]}} ], " + + " ['x5' , {y:{_l:[y51,y52]}} ] ]," + + " _s:[ ['x4' , {y:{_l:[y4a,y4d]}} ] ]," + + " } }"); } - /** - * When prelim_sort is used, all 'top bucket' choices for refinement should still be based on - * it, not the sort param, so this test is just some sanity checks that the presence of the - * these params doesn't break anything in the refine / logic. + /** + * When prelim_sort is used, all 'top bucket' choices for refinement should still be + * based on it, not the sort param, so this test is just some sanity checks that the + * presence of the these params doesn't break anything in the refine / logic. */ @Test public void testRefinementMergingWithPrelimSort() throws Exception { - doTestRefine("{x : { type:terms, field:X, limit:2, refine:true, prelim_sort:'count desc', sort:'y asc'," + - " facet:{ y:'sum(y_i)' } } }", - // shard0 response - "{x: {buckets:[{val:x1, count:5, y:73}, {val:x2, count:3, y:13}], more:true } }", - // shard1 response - "{x: {buckets:[{val:x2, count:4, y:4}, {val:x3, count:2, y:22}], more:true } }", - // shard0 expected refinement info - null, - // shard1 expected refinement info - "=={x:{_l:[x1]}}"); + doTestRefine( + "{x : { type:terms, field:X, limit:2, refine:true, prelim_sort:'count desc', sort:'y asc'," + + " facet:{ y:'sum(y_i)' } } }", + // shard0 response + "{x: {buckets:[{val:x1, count:5, y:73}, {val:x2, count:3, y:13}], more:true } }", + // shard1 response + "{x: {buckets:[{val:x2, count:4, y:4}, {val:x3, count:2, y:22}], more:true } }", + // shard0 expected refinement info + null, + // shard1 expected refinement info + "=={x:{_l:[x1]}}"); // same test as above, but shard1 indicates it doesn't have any more results, // so there shouldn't be any refinement - doTestRefine("{x : { type:terms, field:X, limit:2, refine:true, prelim_sort:'count desc', sort:'y asc'," + - " facet:{ y:'sum(y_i)' } } }", - // shard0 response - "{x: {buckets:[{val:x1, count:5, y:73}, {val:x2, count:3, y:13}], more:true } }", - // shard1 response - "{x: {buckets:[{val:x2, count:4, y:4}, {val:x3, count:2, y:22}] } }", - // shard0 expected refinement info - null, - // shard1 expected refinement info - null); + doTestRefine( + "{x : { type:terms, field:X, limit:2, refine:true, prelim_sort:'count desc', sort:'y asc'," + + " facet:{ y:'sum(y_i)' } } }", + // shard0 response + "{x: {buckets:[{val:x1, count:5, y:73}, {val:x2, count:3, y:13}], more:true } }", + // shard1 response + "{x: {buckets:[{val:x2, count:4, y:4}, {val:x3, count:2, y:22}] } }", + // shard0 expected refinement info + null, + // shard1 expected refinement info + null); } @Test @@ -502,10 +499,13 @@ public void testPrelimSortingWithRefinement() throws Exception { // NOTE: distributed prelim_sort testing in TestJsonFacets uses identical shards, so never needs // refinement, so here we focus on the (re)sorting of different topN refined buckets // after the prelim_sorting from diff shards - + initServers(); final Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set("shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean())); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); List clients = client.getClientProvider().all(); assertTrue(clients.size() >= 3); // we only use 2, but assert 3 to also test empty shard @@ -516,86 +516,104 @@ public void testPrelimSortingWithRefinement() throws Exception { int id = 0; // client 0 // shard1: A=1,B=1,C=2 ... - c0.add(sdoc("id", id++, "cat_s","A", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","B", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","C", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","C", "price_i","1")); + c0.add(sdoc("id", id++, "cat_s", "A", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "B", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "C", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "C", "price_i", "1")); // ... X=3,Y=3 - c0.add(sdoc("id", id++, "cat_s","X", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","X", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","X", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","Y", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","Y", "price_i","1")); - c0.add(sdoc("id", id++, "cat_s","Y", "price_i","1")); - + c0.add(sdoc("id", id++, "cat_s", "X", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "X", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "X", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1")); + c0.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1")); + // client 1 // shard2: X=1,Y=2,Z=2 ... - c1.add(sdoc("id", id++, "cat_s","X", "price_i","1")); - c1.add(sdoc("id", id++, "cat_s","Y", "price_i","1")); - c1.add(sdoc("id", id++, "cat_s","Y", "price_i","1")); - c1.add(sdoc("id", id++, "cat_s","Z", "price_i","1")); - c1.add(sdoc("id", id++, "cat_s","Z", "price_i","1")); + c1.add(sdoc("id", id++, "cat_s", "X", "price_i", "1")); + c1.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1")); + c1.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1")); + c1.add(sdoc("id", id++, "cat_s", "Z", "price_i", "1")); + c1.add(sdoc("id", id++, "cat_s", "Z", "price_i", "1")); // ... C=4 - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1")); - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1")); - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1")); - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1")); - + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1")); + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1")); + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1")); + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1")); + // Whole Collection: A=1,B=1,Z=2,X=4,Y=5,C=6 client.commit(); - - // in both cases, neither C nor Z make the cut for the top3 buckets in phase#1 (due to tie breaker), - // so they aren't refined -- after refinement the re-sorting re-orders the buckets - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " cat_1 : { type:terms, field:cat_s, limit:3, overrequest:0" - + " , refine:true, prelim_sort:'count asc', sort:'index desc' }, " - + " cat_2 : { type:terms, field:cat_s, limit:3, overrequest:0" - + " , refine:true, prelim_sort:'sum_p asc', sort:'count desc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "}") - , "facets=={ count: "+id+"," - + " cat_1:{ buckets:[ " - + " {val:X,count:4}," // index desc - + " {val:B,count:1}," - + " {val:A,count:1}," - + " ] }," - + " cat_2:{ buckets:[ " - + " {val:X,count:4,sum_p:4.0}," // count desc - + " {val:A,count:1,sum_p:1.0}," // index order tie break - + " {val:B,count:1,sum_p:1.0}," - + " ] }" - + "}" - ); + + // in both cases, neither C nor Z make the cut for the top3 buckets in phase#1 (due to tie + // breaker), so they aren't refined -- after refinement the re-sorting re-orders the buckets + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " cat_1 : { type:terms, field:cat_s, limit:3, overrequest:0" + + " , refine:true, prelim_sort:'count asc', sort:'index desc' }, " + + " cat_2 : { type:terms, field:cat_s, limit:3, overrequest:0" + + " , refine:true, prelim_sort:'sum_p asc', sort:'count desc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "}"), + "facets=={ count: " + + id + + "," + + " cat_1:{ buckets:[ " + + " {val:X,count:4}," // index desc + + " {val:B,count:1}," + + " {val:A,count:1}," + + " ] }," + + " cat_2:{ buckets:[ " + + " {val:X,count:4,sum_p:4.0}," // count desc + + " {val:A,count:1,sum_p:1.0}," // index order tie break + + " {val:B,count:1,sum_p:1.0}," + + " ] }" + + "}"); // with some explicit overrefinement=2, we also refine C and Y, giving us those additional // (fully populated) buckets to consider during re-sorting... - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " cat_1 : { type:terms, field:cat_s, limit:3, overrequest:0, overrefine:2" - + " , refine:true, prelim_sort:'count asc', sort:'index desc' }, " - + " cat_2 : { type:terms, field:cat_s, limit:3, overrequest:0, overrefine:2" - + " , refine:true, prelim_sort:'sum_p asc', sort:'count desc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "}") - , "facets=={ count: "+id+"," - + " cat_1:{ buckets:[ " - + " {val:Y,count:5}," // index desc - + " {val:X,count:4}," - + " {val:C,count:6}," - + " ] }," - + " cat_2:{ buckets:[ " - + " {val:C,count:6,sum_p:6.0}," // count desc - + " {val:Y,count:5,sum_p:5.0}," - + " {val:X,count:4,sum_p:4.0}," - + " ] }" - + "}" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " cat_1 : { type:terms, field:cat_s, limit:3, overrequest:0, overrefine:2" + + " , refine:true, prelim_sort:'count asc', sort:'index desc' }, " + + " cat_2 : { type:terms, field:cat_s, limit:3, overrequest:0, overrefine:2" + + " , refine:true, prelim_sort:'sum_p asc', sort:'count desc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "}"), + "facets=={ count: " + + id + + "," + + " cat_1:{ buckets:[ " + + " {val:Y,count:5}," // index desc + + " {val:X,count:4}," + + " {val:C,count:6}," + + " ] }," + + " cat_2:{ buckets:[ " + + " {val:C,count:6,sum_p:6.0}," // count desc + + " {val:Y,count:5,sum_p:5.0}," + + " {val:X,count:4,sum_p:4.0}," + + " ] }" + + "}"); } - @Test public void testSortedFacetRefinementPushingNonRefinedBucketBackIntoTopN() throws Exception { initServers(); final Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set("shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean())); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); List clients = client.getClientProvider().all(); assertTrue(clients.size() >= 3); // we only use 2, but assert 3 to also test empty shard @@ -609,269 +627,380 @@ public void testSortedFacetRefinementPushingNonRefinedBucketBackIntoTopN() throw // every doc will be in all_ss:z_all, (most c1 docs will be in all_ss:some // (with index order tie breaker, c1 should return "some" when limit:1 // but "z_all" should have a higher count from c0) - + // client 0 // shard1: A=1,B=1,C=2 ... - c0.add(sdoc("id", id++, "cat_s","A", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","B", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","C", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","C", "price_i","1", "all_ss","z_all")); + c0.add(sdoc("id", id++, "cat_s", "A", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "B", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "C", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "C", "price_i", "1", "all_ss", "z_all")); // ... X=3,Y=3 - c0.add(sdoc("id", id++, "cat_s","X", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","X", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","X", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","Y", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","Y", "price_i","1", "all_ss","z_all")); - c0.add(sdoc("id", id++, "cat_s","Y", "price_i","1", "all_ss","z_all")); - + c0.add(sdoc("id", id++, "cat_s", "X", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "X", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "X", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1", "all_ss", "z_all")); + c0.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1", "all_ss", "z_all")); + // client 1 // shard2: X=1,Y=2,Z=2 ... - c1.add(sdoc("id", id++, "cat_s","X", "price_i","1", "all_ss","z_all","all_ss","some")); - c1.add(sdoc("id", id++, "cat_s","Y", "price_i","1", "all_ss","z_all","all_ss","some")); - c1.add(sdoc("id", id++, "cat_s","Y", "price_i","1", "all_ss","z_all","all_ss","some")); - c1.add(sdoc("id", id++, "cat_s","Z", "price_i","1", "all_ss","z_all","all_ss","some")); - c1.add(sdoc("id", id++, "cat_s","Z", "price_i","1", "all_ss","z_all","all_ss","some")); + c1.add(sdoc("id", id++, "cat_s", "X", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); + c1.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); + c1.add(sdoc("id", id++, "cat_s", "Y", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); + c1.add(sdoc("id", id++, "cat_s", "Z", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); + c1.add(sdoc("id", id++, "cat_s", "Z", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); // ... C=4 - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1", "all_ss","z_all","all_ss","some")); - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1", "all_ss","z_all","all_ss","some")); - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1", "all_ss","z_all","all_ss","some")); - c1.add(sdoc("id", id++, "cat_s","C", "price_i","1", "all_ss","z_all","all_ss","some")); + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); + c1.add(sdoc("id", id++, "cat_s", "C", "price_i", "1", "all_ss", "z_all", "all_ss", "some")); // the amount of overrequest shouldn't matter for demonstrating the issue... // it only changes how many C_fillerN & Z_fillerN terms are needed on each shard - final int overreq = TestUtil.nextInt(random(),0,20); - + final int overreq = TestUtil.nextInt(random(), 0, 20); + // for overreq=n: C_n:(x2 on client0 + x4 on client1); Z_n:(x2 on client1) for (int i = 0; i < overreq; i++) { for (int t = 0; t < 2; t++) { - c0.add(sdoc("id", id++, "cat_s","C_filler"+i, "price_i","1", "all_ss","z_all")); - c1.add(sdoc("id", id++, "cat_s","Z_filler"+i, "price_i","1", "all_ss","z_all","all_ss","some")); + c0.add(sdoc("id", id++, "cat_s", "C_filler" + i, "price_i", "1", "all_ss", "z_all")); + c1.add( + sdoc( + "id", + id++, + "cat_s", + "Z_filler" + i, + "price_i", + "1", + "all_ss", + "z_all", + "all_ss", + "some")); } for (int t = 0; t < 4; t++) { - c1.add(sdoc("id", id++, "cat_s","C_filler"+i, "price_i","1", "all_ss","z_all","all_ss","some")); + c1.add( + sdoc( + "id", + id++, + "cat_s", + "C_filler" + i, + "price_i", + "1", + "all_ss", + "z_all", + "all_ss", + "some")); // extra c0 docs that don't contribute to the cat_s facet,... // just so "z_all" will win overall on parent facet - c0.add(sdoc("id", id++, "all_ss","z_all")); + c0.add(sdoc("id", id++, "all_ss", "z_all")); } } - // Whole Collection: A=1,B=1,Z=2,X=4,Y=5,C=6 client.commit(); - + // In an ideal world, 'Z:2' would be returned as the 3rd value, // but neither C or Z make the topN cut in phase#1, so only A,B,X get refined. // After refinement, X's increased values should *NOT* push it out of the (original) topN // to let "C" bubble back up into the topN, with incomplete/inaccurate count/stats - // (NOTE: hueristic for num buckets refined is based on 'overrequest' unless explicit 'overrefine') - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , refine:true, sort:'count asc' }," - + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , refine:true, sort:'sum_p asc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "}") - , "facets=={ count: "+id+"," - + " cat_count:{ buckets:[ " - + " {val:A,count:1}," - + " {val:B,count:1}," - + " {val:X,count:4}," - + " ] }," - + " cat_price:{ buckets:[ " - + " {val:A,count:1,sum_p:1.0}," - + " {val:B,count:1,sum_p:1.0}," - + " {val:X,count:4,sum_p:4.0}," - + " ] }" - + "}" - ); - + // (NOTE: hueristic for num buckets refined is based on 'overrequest' unless explicit + // 'overrefine') + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , refine:true, sort:'count asc' }," + + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , refine:true, sort:'sum_p asc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "}"), + "facets=={ count: " + + id + + "," + + " cat_count:{ buckets:[ " + + " {val:A,count:1}," + + " {val:B,count:1}," + + " {val:X,count:4}," + + " ] }," + + " cat_price:{ buckets:[ " + + " {val:A,count:1,sum_p:1.0}," + + " {val:B,count:1,sum_p:1.0}," + + " {val:X,count:4,sum_p:4.0}," + + " ] }" + + "}"); + // if we do the same query but explicitly request enough overrefinement to get past the filler // terms, we should get accurate counts for (C and) Z which should push X out - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , overrefine:"+((1+overreq)*3)+", refine:true, sort:'count asc' }," - + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , overrefine:"+((1+overreq)*3)+", refine:true, sort:'sum_p asc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "}") - , "facets=={ count: "+id+"," - + " cat_count:{ buckets:[ " - + " {val:A,count:1}," - + " {val:B,count:1}," - + " {val:Z,count:2}," - + " ] }," - + " cat_price:{ buckets:[ " - + " {val:A,count:1,sum_p:1.0}," - + " {val:B,count:1,sum_p:1.0}," - + " {val:Z,count:2,sum_p:2.0}," - + " ] }" - + "}" - ); - - // if we use mincount=2, such that A & B get filtered out, then we should have buckets.size() < limit - // rather then buckets w/inaccurate counts/stats. - // (explicitly disabling overrefine & overrequest to prevent filler terms) - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest: 0, overrefine: 0" - + " , mincount: 2, refine:true, sort:'count asc' }," - + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest: 0, overrefine: 0" - + " , mincount: 2, refine:true, sort:'sum_p asc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "}") - , "facets=={ count: "+id+"," - + " cat_count:{ buckets:[ " - + " {val:X,count:4}," - + " ] }," - + " cat_price:{ buckets:[ " - + " {val:X,count:4,sum_p:4.0}," - + " ] }" - + "}" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , overrefine:" + + ((1 + overreq) * 3) + + ", refine:true, sort:'count asc' }," + + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , overrefine:" + + ((1 + overreq) * 3) + + ", refine:true, sort:'sum_p asc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "}"), + "facets=={ count: " + + id + + "," + + " cat_count:{ buckets:[ " + + " {val:A,count:1}," + + " {val:B,count:1}," + + " {val:Z,count:2}," + + " ] }," + + " cat_price:{ buckets:[ " + + " {val:A,count:1,sum_p:1.0}," + + " {val:B,count:1,sum_p:1.0}," + + " {val:Z,count:2,sum_p:2.0}," + + " ] }" + + "}"); + + // if we use mincount=2, such that A & B get filtered out, then we should have buckets.size() < + // limit rather then buckets w/inaccurate counts/stats. (explicitly disabling overrefine & + // overrequest to prevent filler terms) + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest: 0, overrefine: 0" + + " , mincount: 2, refine:true, sort:'count asc' }," + + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest: 0, overrefine: 0" + + " , mincount: 2, refine:true, sort:'sum_p asc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "}"), + "facets=={ count: " + + id + + "," + + " cat_count:{ buckets:[ " + + " {val:X,count:4}," + + " ] }," + + " cat_price:{ buckets:[ " + + " {val:X,count:4,sum_p:4.0}," + + " ] }" + + "}"); // When our 'cat_s' facets are nested under an 'all_ss' facet, we should likewise not get // any (sub) buckets with incomplete/inaccurate counts // - // NOTE: parent facet limit is 1, testing with various top level overrequest/refine params to see - // how different refinement code paths of parent effect the child refinement + // NOTE: parent facet limit is 1, testing with various top level overrequest/refine params to + // see how different refinement code paths of parent effect the child refinement for (String top_refine : Arrays.asList("true", "false")) { // if our top level facet does *NO* overrequesting, then our shard1 will return "some" as it's // (only) top term, which will lose to "z_all" from shard0, and the (single pass) refinement // logic will have no choice but to choose & refine the child facet terms from shard0: A,B,C - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " all:{ type:terms, field:all_ss, limit:1, refine:"+top_refine - + ", overrequest:0" - + " , facet:{" - + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , refine:true, sort:'count asc' }," - + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , refine:true, sort:'sum_p asc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "} } }") - , "facets=={ count: "+id+"," - + "all:{ buckets:[ " - + " { val:z_all, count: "+id+"," - + " cat_count:{ buckets:[ " - + " {val:A,count:1}," - + " {val:B,count:1}," - + " {val:C,count:6}," - + " ] }," - + " cat_price:{ buckets:[ " - + " {val:A,count:1,sum_p:1.0}," - + " {val:B,count:1,sum_p:1.0}," - + " {val:C,count:6,sum_p:6.0}," - + " ] }" - + "} ] } }" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " all:{ type:terms, field:all_ss, limit:1, refine:" + + top_refine + + ", overrequest:0" + + " , facet:{" + + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , refine:true, sort:'count asc' }," + + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , refine:true, sort:'sum_p asc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "} } }"), + "facets=={ count: " + + id + + "," + + "all:{ buckets:[ " + + " { val:z_all, count: " + + id + + "," + + " cat_count:{ buckets:[ " + + " {val:A,count:1}," + + " {val:B,count:1}," + + " {val:C,count:6}," + + " ] }," + + " cat_price:{ buckets:[ " + + " {val:A,count:1,sum_p:1.0}," + + " {val:B,count:1,sum_p:1.0}," + + " {val:C,count:6,sum_p:6.0}," + + " ] }" + + "} ] } }"); // With any overrequest param > 0 on the parent facet, both shards will return "z_all" as a // viable candidate and the merge logic should recoginize that X is a better choice, // even though the (single shard) stats for "C" will be lower final int top_over = TestUtil.nextInt(random(), 1, 999); - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " all:{ type:terms, field:all_ss, limit:1, refine:"+top_refine - + ", overrequest:" + top_over - + " , facet:{" - + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , refine:true, sort:'count asc' }," - + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:"+overreq - + " , refine:true, sort:'sum_p asc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "} } }") - , "facets=={ count: "+id+"," - + "all:{ buckets:[ " - + " { val:z_all, count: "+id+"," - + " cat_count:{ buckets:[ " - + " {val:A,count:1}," - + " {val:B,count:1}," - + " {val:X,count:4}," - + " ] }," - + " cat_price:{ buckets:[ " - + " {val:A,count:1,sum_p:1.0}," - + " {val:B,count:1,sum_p:1.0}," - + " {val:X,count:4,sum_p:4.0}," - + " ] }" - + "} ] } }" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " all:{ type:terms, field:all_ss, limit:1, refine:" + + top_refine + + ", overrequest:" + + top_over + + " , facet:{" + + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , refine:true, sort:'count asc' }," + + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:" + + overreq + + " , refine:true, sort:'sum_p asc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "} } }"), + "facets=={ count: " + + id + + "," + + "all:{ buckets:[ " + + " { val:z_all, count: " + + id + + "," + + " cat_count:{ buckets:[ " + + " {val:A,count:1}," + + " {val:B,count:1}," + + " {val:X,count:4}," + + " ] }," + + " cat_price:{ buckets:[ " + + " {val:A,count:1,sum_p:1.0}," + + " {val:B,count:1,sum_p:1.0}," + + " {val:X,count:4,sum_p:4.0}," + + " ] }" + + "} ] } }"); // if we do the same query but explicitly request enough overrefinement on the child facet - // to get past the filler terms, we should get accurate counts for (C and) Z which should push X out - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " all:{ type:terms, field:all_ss, limit:1, refine:"+top_refine - + ", overrequest:" + top_over - + " , facet:{" - + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:"+((1+overreq)*3) - + " , refine:true, sort:'count asc' }," - + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:"+((1+overreq)*3) - + " , refine:true, sort:'sum_p asc' " - + " , facet: { sum_p: 'sum(price_i)' } }" - + "} } }") - , "facets=={ count: "+id+"," - + "all:{ buckets:[ " - + " { val:z_all, count: "+id+"," - + " cat_count:{ buckets:[ " - + " {val:A,count:1}," - + " {val:B,count:1}," - + " {val:Z,count:2}," - + " ] }," - + " cat_price:{ buckets:[ " - + " {val:A,count:1,sum_p:1.0}," - + " {val:B,count:1,sum_p:1.0}," - + " {val:Z,count:2,sum_p:2.0}," - + " ] }" - + "} ] } }" - ); - + // to get past the filler terms, we should get accurate counts for (C and) Z which should push + // X out + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " all:{ type:terms, field:all_ss, limit:1, refine:" + + top_refine + + ", overrequest:" + + top_over + + " , facet:{" + + " cat_count:{ type:terms, field:cat_s, limit:3, overrequest:" + + ((1 + overreq) * 3) + + " , refine:true, sort:'count asc' }," + + " cat_price:{ type:terms, field:cat_s, limit:3, overrequest:" + + ((1 + overreq) * 3) + + " , refine:true, sort:'sum_p asc' " + + " , facet: { sum_p: 'sum(price_i)' } }" + + "} } }"), + "facets=={ count: " + + id + + "," + + "all:{ buckets:[ " + + " { val:z_all, count: " + + id + + "," + + " cat_count:{ buckets:[ " + + " {val:A,count:1}," + + " {val:B,count:1}," + + " {val:Z,count:2}," + + " ] }," + + " cat_price:{ buckets:[ " + + " {val:A,count:1,sum_p:1.0}," + + " {val:B,count:1,sum_p:1.0}," + + " {val:Z,count:2,sum_p:2.0}," + + " ] }" + + "} ] } }"); } } - @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12556") + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12556") @Test public void testProcessEmptyRefinement() throws Exception { initServers(); final Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set("shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean())); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); List clients = client.getClientProvider().all(); - assertTrue(clients.size() >= 3); // we only use 2, but assert at least 3 to also test empty shard + assertTrue( + clients.size() >= 3); // we only use 2, but assert at least 3 to also test empty shard final SolrClient c0 = clients.get(0); final SolrClient c1 = clients.get(1); client.deleteByQuery("*:*", null); int id = 0; - + c0.add(sdoc("id", id++, "cat_s", "Ax")); c0.add(sdoc("id", id++, "cat_s", "Bx")); c0.add(sdoc("id", id++, "cat_s", "Cx")); - + c1.add(sdoc("id", id++, "cat_s", "Ay")); c1.add(sdoc("id", id++, "cat_s", "By")); c1.add(sdoc("id", id++, "cat_s", "Cy")); c1.add(sdoc("id", id++, "cat_s", "Dy")); - + client.commit(); - // regardless of how much overrequest there is, in phase#1 + // regardless of how much overrequest there is, in phase#1 // all terms will tie on the sort criteria, and "Ax" should win the tiebreaker. // - // When Ax is refined against c1, it's 'debug' sort value will increase, but regardless - // of the value of processEmpty, no other term should be returned in it's place - // (because if they are also correctly refined, then their 'debug' sort values will also increase - // and Ax will stll win the tie breaker -- and if they are not refined they shouldn't be returned) + // When Ax is refined against c1, it's 'debug' sort value will increase, but regardless of the + // value of processEmpty, no other term should be returned in it's place (because if they are + // also correctly refined, then their 'debug' sort values will also increase and Ax will stll + // win the tie breaker -- and if they are not refined they shouldn't be returned) for (int overrequest = 0; overrequest < 5; overrequest++) { for (boolean pe : Arrays.asList(false, true)) { - ModifiableSolrParams p - = params("q", "*:*", "rows", "0", "json.facet" - , "{" - + " top:{ type:terms, field:cat_s, limit:1, overrequest:"+overrequest+", " - + " refine:true, sort: 'debug asc', " - + " facet:{ debug:'debug(numShards)', processEmpty:"+pe+" } } }"); + ModifiableSolrParams p = + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " top:{ type:terms, field:cat_s, limit:1, overrequest:" + + overrequest + + ", " + + " refine:true, sort: 'debug asc', " + + " facet:{ debug:'debug(numShards)', processEmpty:" + + pe + + " } } }"); try { - client.testJQ(p - , "facets=={ count: "+id+"," - + " top:{ buckets:[ " - + " { val:Ax, count: 1, " - + " debug:"+(pe ? 2 : 1) - + " }" - + " ] } }" - ); + client.testJQ( + p, + "facets=={ count: " + + id + + "," + + " top:{ buckets:[ " + + " { val:Ax, count: 1, " + + " debug:" + + (pe ? 2 : 1) + + " }" + + " ] } }"); } catch (AssertionError | RuntimeException e) { throw new AssertionError(p + " --> " + e.getMessage(), e); } @@ -883,7 +1012,10 @@ public void testProcessEmptyRefinement() throws Exception { private int initSomeDocsWhere1ShardHasOnlyParentFacetField() throws Exception { initServers(); final Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set("shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean())); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); final List clients = client.getClientProvider().all(); assertTrue(clients.size() >= 2); @@ -892,12 +1024,12 @@ private int initSomeDocsWhere1ShardHasOnlyParentFacetField() throws Exception { client.deleteByQuery("*:*", null); int id = 0; - + // client 0 // shard1 // only terms pX & pY (with high counts) from the parent_s facet, no child_s values for (int i = 0; i < 10; i++) { c0.add(sdoc("id", id++, "parent_s", "pX")); - for (int j =0; j < 2; j++) { + for (int j = 0; j < 2; j++) { c0.add(sdoc("id", id++, "parent_s", "pY")); } } @@ -907,12 +1039,12 @@ private int initSomeDocsWhere1ShardHasOnlyParentFacetField() throws Exception { // (but not as high as pX/py on shard1) // all docs on this shard also have values in child_s for (int i = 0; i < 2; i++) { - for (int j = 0; j < 3 ; j++) { - c1.add(sdoc("id", id++, "parent_s", "pA", "child_s", "c"+i)); - c1.add(sdoc("id", id++, "parent_s", "pB", "child_s", "c"+i)); + for (int j = 0; j < 3; j++) { + c1.add(sdoc("id", id++, "parent_s", "pA", "child_s", "c" + i)); + c1.add(sdoc("id", id++, "parent_s", "pB", "child_s", "c" + i)); } - c1.add(sdoc("id", id++, "parent_s", "pX", "child_s", "c"+i)); - c1.add(sdoc("id", id++, "parent_s", "pY", "child_s", "c"+i)); + c1.add(sdoc("id", id++, "parent_s", "pX", "child_s", "c" + i)); + c1.add(sdoc("id", id++, "parent_s", "pY", "child_s", "c" + i)); } c1.add(sdoc("id", id++, "parent_s", "pX", "child_s", "c0")); c1.add(sdoc("id", id++, "parent_s", "pY", "child_s", "c1")); @@ -922,17 +1054,20 @@ private int initSomeDocsWhere1ShardHasOnlyParentFacetField() throws Exception { return id; } - /** @see #testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShardProcessEmpty */ + /** + * @see #testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShardProcessEmpty + */ @Test public void testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShard() throws Exception { final int numDocs = initSomeDocsWhere1ShardHasOnlyParentFacetField(); final Client client = servers.getClient(random().nextInt()); final List clients = client.getClientProvider().all(); - - assertTrue(clients.size() >= 3); // we only use 2, but assert at least 3 to also test empty shard + + assertTrue( + clients.size() >= 3); // we only use 2, but assert at least 3 to also test empty shard final SolrClient c0 = clients.get(0); final SolrClient c1 = clients.get(1); - + // during the initial request... // - shard1 should return "high" count pX & pY w/o any child buckets (no "more" child) // - shard2 should return "lower" count pA & pB w/some child buckets @@ -942,131 +1077,342 @@ public void testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShard() throw // - these children from shard2 will be the only (possibly) contributors to the child buckets // // - the numShards for all parent buckets should be 2, but for the child buckets it should be 1 - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + "parent:{ type:terms, field:parent_s, limit:2, overrequest:0, refine:true, facet:{" - + " debug:'debug(numShards)'," - + " child:{ type:terms, field:child_s, limit:2, overrequest:0, refine: true," - + " facet:{ debug:'debug(numShards)' } }" - + "} } }") - , "facets=={ count: "+numDocs+"," - + " parent:{ buckets:[ " - + " { val:pY, count: 24," - + " debug:2, " - + " child:{ buckets:[ " - + " {val:c1,count:3, debug:1}," - + " {val:c0,count:1, debug:1}," - + " ] } }," - + " { val:pX, count: 13," - + " debug:2, " - + " child:{ buckets:[ " - + " {val:c0,count:2, debug:1}," - + " {val:c1,count:1, debug:1}," - + " ] } }," - + " ] } }" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + "parent:{ type:terms, field:parent_s, limit:2, overrequest:0, refine:true, facet:{" + + " debug:'debug(numShards)'," + + " child:{ type:terms, field:child_s, limit:2, overrequest:0, refine: true," + + " facet:{ debug:'debug(numShards)' } }" + + "} } }"), + "facets=={ count: " + + numDocs + + "," + + " parent:{ buckets:[ " + + " { val:pY, count: 24," + + " debug:2, " + + " child:{ buckets:[ " + + " {val:c1,count:3, debug:1}," + + " {val:c0,count:1, debug:1}," + + " ] } }," + + " { val:pX, count: 13," + + " debug:2, " + + " child:{ buckets:[ " + + " {val:c0,count:2, debug:1}," + + " {val:c1,count:1, debug:1}," + + " ] } }," + + " ] } }"); } - - /** @see #testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShard */ - @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-12556") + + /** + * @see #testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShard + */ + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12556") @Test - public void testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShardProcessEmpty() throws Exception { + public void testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShardProcessEmpty() + throws Exception { final int numDocs = initSomeDocsWhere1ShardHasOnlyParentFacetField(); final Client client = servers.getClient(random().nextInt()); final List clients = client.getClientProvider().all(); final int numClients = clients.size(); - + assertTrue(numClients >= 3); // we only use 2, but assert at least 3 to also test empty shard final SolrClient c0 = clients.get(0); final SolrClient c1 = clients.get(1); - + // if we do the same request as testSortedSubFacetRefinementWhenParentOnlyReturnedByOneShard, // but with processEmpty:true, then ideally we should get the same buckets & counts as before, - // but the debug info should indicate that every shard contributed to every bucket (either initially, - // or during refinement) + // but the debug info should indicate that every shard contributed to every bucket (either + // initially, or during refinement) // // The problem comes in with how "empty" bucket lists are dealt with... - // - child debug counts never get higher then '2' because even with the forced "_l" refinement of - // the parent buckets against the "empty" shards we don't explicitly ask those shards to - // evaluate the child buckets + // - child debug counts never get higher then '2' because even with the forced "_l" refinement + // of the parent buckets against the "empty" shards we don't explicitly ask those shards to + // evaluate the child buckets // - perhaps we should reconsider the value of "_l" ? - // - why aren't we just specifying all the buckets (and child buckets) chosen in phase#1 using "_p" ? - // - or at the very least, if the purpose of "_l" is to give other buckets a chance to "bubble up" - // in phase#2, then shouldn't a "_l" refinement requests still include the buckets choosen in - // phase#1, and request that the shard fill them in in addition to returning its own top buckets? - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + "processEmpty:true," - + "parent:{ type:terms, field:parent_s, limit:2, overrequest:0, refine:true, facet:{" - + " processEmpty:true," - + " debug:'debug(numShards)'," - + " child:{ type:terms, field:child_s, limit:2, overrequest:0, refine: true," - + " facet:{ processEmpty:true, debug:'debug(numShards)' } }" - + "} } }") - , "facets=={ count: "+numDocs+"," - + " parent:{ buckets:[ " - + " { val:pY, count: 24," - + " debug:"+numClients+", " - + " child:{ buckets:[ " - + " {val:c1,count:3, debug:"+numClients+"}," - + " {val:c0,count:1, debug:"+numClients+"}," - + " ] } }," - + " { val:pX, count: 13," - + " debug:"+numClients+", " - + " child:{ buckets:[ " - + " {val:c0,count:2, debug:"+numClients+"}," - + " {val:c1,count:1, debug:"+numClients+"}," - + " ] } }," - + " ] } }" - ); + // - why aren't we just specifying all the buckets (and child buckets) chosen in phase#1 using + // "_p" ? + // - or at the very least, if the purpose of "_l" is to give other buckets a chance to "bubble + // up" in phase#2, then shouldn't a "_l" refinement requests still include the buckets choosen + // in phase#1, and request that the shard fill them in in addition to returning its own top + // buckets? + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + "processEmpty:true," + + "parent:{ type:terms, field:parent_s, limit:2, overrequest:0, refine:true, facet:{" + + " processEmpty:true," + + " debug:'debug(numShards)'," + + " child:{ type:terms, field:child_s, limit:2, overrequest:0, refine: true," + + " facet:{ processEmpty:true, debug:'debug(numShards)' } }" + + "} } }"), + "facets=={ count: " + + numDocs + + "," + + " parent:{ buckets:[ " + + " { val:pY, count: 24," + + " debug:" + + numClients + + ", " + + " child:{ buckets:[ " + + " {val:c1,count:3, debug:" + + numClients + + "}," + + " {val:c0,count:1, debug:" + + numClients + + "}," + + " ] } }," + + " { val:pX, count: 13," + + " debug:" + + numClients + + ", " + + " child:{ buckets:[ " + + " {val:c0,count:2, debug:" + + numClients + + "}," + + " {val:c1,count:1, debug:" + + numClients + + "}," + + " ] } }," + + " ] } }"); } - @Test public void testBasicRefinement() throws Exception { ModifiableSolrParams p; - p = params("cat_s", "cat_s", "cat_i", "cat_i", "date","cat_dt", "xy_s", "xy_s", "num_d", "num_d", "qw_s", "qw_s", "er_s", "er_s"); + p = + params( + "cat_s", "cat_s", "cat_i", "cat_i", "date", "cat_dt", "xy_s", "xy_s", "num_d", "num_d", + "qw_s", "qw_s", "er_s", "er_s"); doBasicRefinement(p); // multi-valued (except num_d) - p = params("cat_s", "cat_ss", "cat_i", "cat_is", "date","cat_dts", "xy_s", "xy_ss", "num_d", "num_d", "qw_s", "qw_ss", "er_s", "er_ss"); + p = + params( + "cat_s", "cat_ss", "cat_i", "cat_is", "date", "cat_dts", "xy_s", "xy_ss", "num_d", + "num_d", "qw_s", "qw_ss", "er_s", "er_ss"); doBasicRefinement(p); // single valued docvalues - p = params("cat_s", "cat_sd", "cat_i", "cat_id", "date","cat_dtd", "xy_s", "xy_sd", "num_d", "num_dd", "qw_s", "qw_sd", "er_s", "er_sd"); + p = + params( + "cat_s", "cat_sd", "cat_i", "cat_id", "date", "cat_dtd", "xy_s", "xy_sd", "num_d", + "num_dd", "qw_s", "qw_sd", "er_s", "er_sd"); doBasicRefinement(p); // multi valued docvalues (except num_d) - p = params("cat_s", "cat_sds", "cat_i", "cat_ids", "date","cat_dtds", "xy_s", "xy_sds", "num_d", "num_dd", "qw_s", "qw_sds", "er_s", "er_sds"); + p = + params( + "cat_s", + "cat_sds", + "cat_i", + "cat_ids", + "date", + "cat_dtds", + "xy_s", + "xy_sds", + "num_d", + "num_dd", + "qw_s", + "qw_sds", + "er_s", + "er_sds"); doBasicRefinement(p); } public void doBasicRefinement(ModifiableSolrParams p) throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set("shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean())); - + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); + List clients = client.getClientProvider().all(); assertTrue(clients.size() >= 3); - + client.deleteByQuery("*:*", null); String cat_s = p.get("cat_s"); String cat_i = p.get("cat_i"); // just like cat_s, but a number String xy_s = p.get("xy_s"); String qw_s = p.get("qw_s"); - String er_s = p.get("er_s"); // this field is designed to test numBuckets refinement... the first phase will only have a single bucket returned for the top count bucket of cat_s + // this field is designed to test numBuckets refinement... the first phase will only have a + // single bucket returned for the top count bucket of cat_s + String er_s = p.get("er_s"); String num_d = p.get("num_d"); String date = p.get("date"); - clients.get(0).add(sdoc("id", "01", "all_s", "all", cat_s, "A", cat_i, 1, date, "2001-01-01T01:01:01Z", xy_s, "X", num_d, -1, qw_s, "Q", er_s, "E")); // A wins count tie - clients.get(0).add(sdoc("id", "02", "all_s", "all", cat_s, "B", cat_i, 2, date, "2002-02-02T02:02:02Z", xy_s, "Y", num_d, 3)); - - clients.get(1).add(sdoc("id", "11", "all_s", "all", cat_s, "B", cat_i, 2, date, "2002-02-02T02:02:02Z", xy_s, "X", num_d, -5, er_s, "E")); // B highest count - clients.get(1).add(sdoc("id", "12", "all_s", "all", cat_s, "B", cat_i, 2, date, "2002-02-02T02:02:02Z", xy_s, "Y", num_d, -11, qw_s, "W")); - clients.get(1).add(sdoc("id", "13", "all_s", "all", cat_s, "A", cat_i, 1, date, "2001-01-01T01:01:01Z", xy_s, "X", num_d, 7, er_s, "R")); // "R" will only be picked up via refinement when parent facet is cat_s - - clients.get(2).add(sdoc("id", "21", "all_s", "all", cat_s, "A", cat_i, 1, date, "2001-01-01T01:01:01Z", xy_s, "X", num_d, 17, qw_s, "W", er_s, "E")); // A highest count - clients.get(2).add(sdoc("id", "22", "all_s", "all", cat_s, "A", cat_i, 1, date, "2001-01-01T01:01:01Z", xy_s, "Y", num_d, -19)); - clients.get(2).add(sdoc("id", "23", "all_s", "all", cat_s, "B", cat_i, 2, date, "2002-02-02T02:02:02Z", xy_s, "X", num_d, 11)); + clients + .get(0) + .add( + sdoc( + "id", + "01", + "all_s", + "all", + cat_s, + "A", + cat_i, + 1, + date, + "2001-01-01T01:01:01Z", + xy_s, + "X", + num_d, + -1, + qw_s, + "Q", + er_s, + "E")); // A wins count tie + clients + .get(0) + .add( + sdoc( + "id", + "02", + "all_s", + "all", + cat_s, + "B", + cat_i, + 2, + date, + "2002-02-02T02:02:02Z", + xy_s, + "Y", + num_d, + 3)); + + clients + .get(1) + .add( + sdoc( + "id", + "11", + "all_s", + "all", + cat_s, + "B", + cat_i, + 2, + date, + "2002-02-02T02:02:02Z", + xy_s, + "X", + num_d, + -5, + er_s, + "E")); // B highest count + clients + .get(1) + .add( + sdoc( + "id", + "12", + "all_s", + "all", + cat_s, + "B", + cat_i, + 2, + date, + "2002-02-02T02:02:02Z", + xy_s, + "Y", + num_d, + -11, + qw_s, + "W")); + clients + .get(1) + .add( + sdoc( + "id", + "13", + "all_s", + "all", + cat_s, + "A", + cat_i, + 1, + date, + "2001-01-01T01:01:01Z", + xy_s, + "X", + num_d, + 7, + er_s, + "R")); // "R" will only be picked up via refinement when parent facet is cat_s + + clients + .get(2) + .add( + sdoc( + "id", + "21", + "all_s", + "all", + cat_s, + "A", + cat_i, + 1, + date, + "2001-01-01T01:01:01Z", + xy_s, + "X", + num_d, + 17, + qw_s, + "W", + er_s, + "E")); // A highest count + clients + .get(2) + .add( + sdoc( + "id", + "22", + "all_s", + "all", + cat_s, + "A", + cat_i, + 1, + date, + "2001-01-01T01:01:01Z", + xy_s, + "Y", + num_d, + -19)); + clients + .get(2) + .add( + sdoc( + "id", + "23", + "all_s", + "all", + cat_s, + "B", + cat_i, + 2, + date, + "2002-02-02T02:02:02Z", + xy_s, + "X", + num_d, + 11)); client.commit(); @@ -1074,407 +1420,581 @@ public void doBasicRefinement(ModifiableSolrParams p) throws Exception { // One shard will have _facet_={"refine":{"cat0":{"_l":["A"]}}} on the second phase /**** - // fake a refinement request... good for development/debugging - assertJQ(clients.get(1), - params(p, "q", "*:*", "_facet_","{refine:{cat0:{_l:[A]}}}", "isShard","true", "distrib","false", "shards.purpose","2097216", "ids","11,12,13", - "json.facet", "{" + - "cat0:{type:terms, field:cat_s, sort:'count desc', limit:1, overrequest:0, refine:true}" + - "}" - ) - , "facets=={foo:555}" - ); + * // fake a refinement request... good for development/debugging + * assertJQ(clients.get(1), + * params(p, "q", "*:*", "_facet_","{refine:{cat0:{_l:[A]}}}", "isShard","true", "distrib","false", "shards.purpose","2097216", "ids","11,12,13", + * "json.facet", "{" + + * "cat0:{type:terms, field:cat_s, sort:'count desc', limit:1, overrequest:0, refine:true}" + + * "}" + * ) + * , "facets=={foo:555}" + * ); ****/ - for (String method : new String[]{"","dv", "dvhash","stream","uif","enum","stream","smart"}) { + for (String method : + new String[] {"", "dv", "dvhash", "stream", "uif", "enum", "stream", "smart"}) { if (method.equals("")) { p.remove("terms"); } else { - p.set("terms", "method:" + method+", "); + p.set("terms", "method:" + method + ", "); } - - - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:false}" + - "}" - ) - , "facets=={ count:8" + - ", cat0:{ buckets:[ {val:A,count:3} ] }" + // w/o overrequest and refinement, count is lower than it should be (we don't see the A from the middle shard) - "}" - ); - - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true}" + - "}" - ) - , "facets=={ count:8" + - ", cat0:{ buckets:[ {val:A,count:4} ] }" + // w/o overrequest, we need refining to get the correct count. - "}" - ); + + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:false}" + + "}"), + "facets=={ count:8" + + ", cat0:{ buckets:[ {val:A,count:3} ] }" + + // w/o overrequest and refinement, count is lower than it should be (we don't see + // the A from the middle shard) + "}"); + + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true}" + + "}"), + "facets=={ count:8" + + ", cat0:{ buckets:[ {val:A,count:4} ] }" + + // w/o overrequest, we need refining to get the correct count. + "}"); // same as above, but with an integer field instead of a string - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "cat0:{${terms} type:terms, field:${cat_i}, sort:'count desc', limit:1, overrequest:0, refine:true}" + - "}" - ) - , "facets=={ count:8" + - ", cat0:{ buckets:[ {val:1,count:4} ] }" + // w/o overrequest, we need refining to get the correct count. - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "cat0:{${terms} type:terms, field:${cat_i}, sort:'count desc', limit:1, overrequest:0, refine:true}" + + "}"), + "facets=={ count:8" + + ", cat0:{ buckets:[ {val:1,count:4} ] }" + + // w/o overrequest, we need refining to get the correct count. + "}"); // same as above, but with a date field - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "cat0:{${terms} type:terms, field:${date}, sort:'count desc', limit:1, overrequest:0, refine:true}" + - "}" - ) - , "facets=={ count:8" + - ", cat0:{ buckets:[ {val:'2001-01-01T01:01:01Z',count:4} ] }" + // w/o overrequest, we need refining to get the correct count. - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "cat0:{${terms} type:terms, field:${date}, sort:'count desc', limit:1, overrequest:0, refine:true}" + + "}"), + "facets=={ count:8" + + ", cat0:{ buckets:[ {val:'2001-01-01T01:01:01Z',count:4} ] }" + + // w/o overrequest, we need refining to get the correct count. + "}"); // basic refining test through/under a query facet - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "q1 : { type:query, q:'*:*', facet:{" + - "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true}" + - "}}" + - "}" - ) - , "facets=={ count:8" + - ", q1:{ count:8, cat0:{ buckets:[ {val:A,count:4} ] } }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "q1 : { type:query, q:'*:*', facet:{" + + "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true}" + + "}}" + + "}"), + "facets=={ count:8" + ", q1:{ count:8, cat0:{ buckets:[ {val:A,count:4} ] } }" + "}"); // basic refining test through/under a range facet - for (String end : Arrays.asList(// all of these end+hardened options should produce the same buckets - "end:20, hardend:true", // evenly divisible so shouldn't matter - "end:20, hardend:false", "end:20", // defaults to hardened:false - "end:5, hardend:false", "end:5")) { - // since the gap divides the start/end divide eveningly, + for (String end : + Arrays.asList( // all of these end+hardened options should produce the same buckets + "end:20, hardend:true", // evenly divisible so shouldn't matter + "end:20, hardend:false", + "end:20", // defaults to hardened:false + "end:5, hardend:false", + "end:5")) { + // since the gap divides the start/end divide eveningly, // all of these hardend params should we should produce identical results - String sub = "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true}"; + String sub = + "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true}"; // single bucket, all 'other' buckets - client.testJQ(params(p, "q", "*:*", "json.facet" - , "{ r1 : { type:range, field:${num_d} other:all, start:-20, gap:40, " + end - + " , facet:{" + sub + "}}}") - , "facets=={ count:8" - + ", r1:{ buckets:[{val:-20.0,count:8, cat0:{buckets:[{val:A,count:4}]} }]," - + " before:{count:0}, after:{count:0}" - + " between:{count:8, cat0:{buckets:[{val:A,count:4}]}}" - + "}}"); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ r1 : { type:range, field:${num_d} other:all, start:-20, gap:40, " + + end + + " , facet:{" + + sub + + "}}}"), + "facets=={ count:8" + + ", r1:{ buckets:[{val:-20.0,count:8, cat0:{buckets:[{val:A,count:4}]} }]," + + " before:{count:0}, after:{count:0}" + + " between:{count:8, cat0:{buckets:[{val:A,count:4}]}}" + + "}}"); // multiple buckets, only one 'other' buckets - client.testJQ(params(p, "q", "*:*", "json.facet" - , "{ r1 : { type:range, field:${num_d} other:between, start:-20, gap:20, " + end - + " , facet:{" + sub + "}}}") - , "facets=={ count:8" - // NOTE: in both buckets A & B are tied, but index order should break tie - + ", r1:{ buckets:[{val:-20.0, count:4, cat0:{buckets:[{val:A,count:2}]} }," - + " {val: 0.0, count:4, cat0:{buckets:[{val:A,count:2}]} } ]," - + " between:{count:8, cat0:{buckets:[{val:A,count:4}]}}" - + "}}"); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ r1 : { type:range, field:${num_d} other:between, start:-20, gap:20, " + + end + + " , facet:{" + + sub + + "}}}"), + "facets=={ count:8" + // NOTE: in both buckets A & B are tied, but index order should break tie + + ", r1:{ buckets:[{val:-20.0, count:4, cat0:{buckets:[{val:A,count:2}]} }," + + " {val: 0.0, count:4, cat0:{buckets:[{val:A,count:2}]} } ]," + + " between:{count:8, cat0:{buckets:[{val:A,count:4}]}}" + + "}}"); } // test that basic stats work for refinement - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true, facet:{ stat1:'sum(${num_d})'} }" + - "}" - ) - , "facets=={ count:8" + - ", cat0:{ buckets:[ {val:A,count:4, stat1:4.0} ] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "cat0:{${terms} type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0, refine:true, facet:{ stat1:'sum(${num_d})'} }" + + "}"), + "facets=={ count:8" + ", cat0:{ buckets:[ {val:A,count:4, stat1:4.0} ] }" + "}"); // test sorting buckets by a different stat - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - " cat0:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:false, facet:{ min1:'min(${num_d})'} }" + - ",cat1:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true, facet:{ min1:'min(${num_d})'} }" + - ",qfacet:{type:query, q:'*:*', facet:{ cat2:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true, facet:{ min1:'min(${num_d})'} } }}" + // refinement needed through a query facet - ",allf:{${terms} type:terms, field:all_s, facet:{ cat3:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true, facet:{ min1:'min(${num_d})'} } }}" + // refinement needed through field facet - ",sum1:'sum(${num_d})'" + // make sure that root bucket stats aren't affected by refinement - "}" - ) - , "facets=={ count:8" + - ", cat0:{ buckets:[ {val:A,count:3, min1:-19.0} ] }" + // B wins in shard2, so we're missing the "A" count for that shard w/o refinement. - ", cat1:{ buckets:[ {val:A,count:4, min1:-19.0} ] }" + // with refinement, we get the right count - ", qfacet:{ count:8, cat2:{ buckets:[ {val:A,count:4, min1:-19.0} ] } }" + // just like the previous response, just nested under a query facet - ", allf:{ buckets:[ {cat3:{ buckets:[ {val:A,count:4, min1:-19.0} ] } ,count:8,val:all }] }" + // just like the previous response, just nested under a field facet - ", sum1:2.0" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " cat0:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:false, facet:{ min1:'min(${num_d})'} }" + + ",cat1:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true, facet:{ min1:'min(${num_d})'} }" + + ",qfacet:{type:query, q:'*:*', facet:{ cat2:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true, facet:{ min1:'min(${num_d})'} } }}" + + // refinement needed through a query facet + ",allf:{${terms} type:terms, field:all_s, facet:{ cat3:{${terms} type:terms, field:${cat_s}, sort:'min1 asc', limit:1, overrequest:0, refine:true, facet:{ min1:'min(${num_d})'} } }}" + + // refinement needed through field facet + ",sum1:'sum(${num_d})'" + + // make sure that root bucket stats aren't affected by refinement + "}"), + "facets=={ count:8" + + ", cat0:{ buckets:[ {val:A,count:3, min1:-19.0} ] }" + + // B wins in shard2, so we're missing the "A" count for that shard w/o refinement. + ", cat1:{ buckets:[ {val:A,count:4, min1:-19.0} ] }" + + // with refinement, we get the right count + ", qfacet:{ count:8, cat2:{ buckets:[ {val:A,count:4, min1:-19.0} ] } }" + + // just like the previous response, just nested under a query facet + ", allf:{ buckets:[ {cat3:{ buckets:[ {val:A,count:4, min1:-19.0} ] } ,count:8,val:all }] }" + + // just like the previous response, just nested under a field facet + ", sum1:2.0" + + "}"); // test that SKG stat reflects merged refinement // results shouldn't care if we use the short or long syntax, or if we have a low min_pop - for (String s : Arrays.asList("'relatedness($fore,$back)'", - "{ type:func, func:'relatedness($fore,$back)' }", - "{ type:func, func:'relatedness($fore,$back)', min_popularity:0.2 }")) { - client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", - "json.facet", "{" - + " cat0:{ ${terms} type:terms, field: ${cat_s}, allBuckets:true, " - + " sort:'count desc', limit:1, overrequest:0, refine:true, " - + " facet:{ s:"+s+"} } }") - , "facets=={ count:8, cat0:{ " - // 's' key must not exist in the allBuckets bucket - + " allBuckets: { count:8 }" - + " buckets:[ " - + " { val:A, count:4, " - + " s : { relatedness: 0.00496, " - //+ " foreground_count: 3, " - //+ " foreground_size: 5, " - //+ " background_count: 2, " - //+ " background_size: 4, " - + " foreground_popularity: 0.75, " - + " background_popularity: 0.5, " - + " } } ] }" + - "}" - ); + for (String s : + Arrays.asList( + "'relatedness($fore,$back)'", + "{ type:func, func:'relatedness($fore,$back)' }", + "{ type:func, func:'relatedness($fore,$back)', min_popularity:0.2 }")) { + client.testJQ( + params( + p, + "rows", + "0", + "q", + "*:*", + "fore", + "${xy_s}:X", + "back", + "${num_d}:[0 TO 100]", + "json.facet", + "{" + + " cat0:{ ${terms} type:terms, field: ${cat_s}, allBuckets:true, " + + " sort:'count desc', limit:1, overrequest:0, refine:true, " + + " facet:{ s:" + + s + + "} } }"), + "facets=={ count:8, cat0:{ " + // 's' key must not exist in the allBuckets bucket + + " allBuckets: { count:8 }" + + " buckets:[ " + + " { val:A, count:4, " + + " s : { relatedness: 0.00496, " + // + " foreground_count: 3, " + // + " foreground_size: 5, " + // + " background_count: 2, " + // + " background_size: 4, " + + " foreground_popularity: 0.75, " + + " background_popularity: 0.5, " + + " } } ] }" + + "}"); } // same query with a high min_pop should result in a -Infinity relatedness score - client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", - "json.facet", "{" - + " cat0:{ ${terms} type:terms, field: ${cat_s}, allBuckets:true," - + " sort:'count desc', limit:1, overrequest:0, refine:true, " - + " facet:{ s:{ type:func, func:'relatedness($fore,$back)', " - + " min_popularity:0.6 } } } }") - , "facets=={ count:8, cat0:{ " - // 's' key must not exist in the allBuckets bucket - + " allBuckets: { count:8 }" - + " buckets:[ " - + " { val:A, count:4, " - + " s : { relatedness: '-Infinity', " - //+ " foreground_count: 3, " - //+ " foreground_size: 5, " - //+ " background_count: 2, " - //+ " background_size: 4, " - + " foreground_popularity: 0.75, " - + " background_popularity: 0.5, " - + " } } ] }" + - "}" - ); + client.testJQ( + params( + p, + "rows", + "0", + "q", + "*:*", + "fore", + "${xy_s}:X", + "back", + "${num_d}:[0 TO 100]", + "json.facet", + "{" + + " cat0:{ ${terms} type:terms, field: ${cat_s}, allBuckets:true," + + " sort:'count desc', limit:1, overrequest:0, refine:true, " + + " facet:{ s:{ type:func, func:'relatedness($fore,$back)', " + + " min_popularity:0.6 } } } }"), + "facets=={ count:8, cat0:{ " + // 's' key must not exist in the allBuckets bucket + + " allBuckets: { count:8 }" + + " buckets:[ " + + " { val:A, count:4, " + + " s : { relatedness: '-Infinity', " + // + " foreground_count: 3, " + // + " foreground_size: 5, " + // + " background_count: 2, " + // + " background_size: 4, " + + " foreground_popularity: 0.75, " + + " background_popularity: 0.5, " + + " } } ] }" + + "}"); // really special case: allBuckets when there are no regular buckets... for (String refine : Arrays.asList("", "refine: true,", "refine:false,")) { - client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", - "json.facet", "{" - + " cat0:{ ${terms} type:terms, field: bogus_field_s, allBuckets:true, " - + refine - + " facet:{ s:{ type:func, func:'relatedness($fore,$back)' } } } }") - , "facets=={ count:8, cat0:{ " - // 's' key must not exist in the allBuckets bucket - + " allBuckets: { count:0 }" - + " buckets:[ ]" - + "} }" - ); + client.testJQ( + params( + p, + "rows", + "0", + "q", + "*:*", + "fore", + "${xy_s}:X", + "back", + "${num_d}:[0 TO 100]", + "json.facet", + "{" + + " cat0:{ ${terms} type:terms, field: bogus_field_s, allBuckets:true, " + + refine + + " facet:{ s:{ type:func, func:'relatedness($fore,$back)' } } } }"), + "facets=={ count:8, cat0:{ " + // 's' key must not exist in the allBuckets bucket + + " allBuckets: { count:0 }" + + " buckets:[ ]" + + "} }"); } - // SKG under nested facet where some terms only exist on one shard - { + { // sub-bucket order should change as sort direction changes - final String jsonFacet = "" - + "{ processEmpty:true, " - + " cat0:{ ${terms} type:terms, field: ${cat_s}, " - + " sort:'count desc', limit:1, overrequest:0, refine:true, " - + " facet:{ processEmpty:true, " - + " qw1: { ${terms} type:terms, field: ${qw_s}, mincount:0, " - + " sort:'${skg_sort}', limit:100, overrequest:0, refine:true, " - + " facet:{ processEmpty:true, skg:'relatedness($fore,$back)' } } } } }"; - final String bucketQ = "" - + " { val:Q, count:1, " - + " skg : { relatedness: 1.0, " - + " foreground_popularity: 0.25, " - + " background_popularity: 0.0, " - // + " foreground_count: 1, " - // + " foreground_size: 3, " - // + " background_count: 0, " - // + " background_size: 4, " - + " } },"; - final String bucketW = "" - + " { val:W, count:1, " - + " skg : { relatedness: 0.0037, " - + " foreground_popularity: 0.25, " - + " background_popularity: 0.25, " - // + " foreground_count: 1, " - // + " foreground_size: 3, " - // + " background_count: 1, " - // + " background_size: 4, " - + " } },"; - - client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", - "skg_sort", "skg desc", "json.facet", jsonFacet) - , "facets=={ count:8, cat0:{ buckets:[ " - + " { val:A, count:4, " - + " qw1 : { buckets:[" - + bucketQ - + bucketW - + " ] } } ] } }"); - client.testJQ(params(p, "rows", "0", "q", "*:*", "fore", "${xy_s}:X", "back", "${num_d}:[0 TO 100]", - "skg_sort", "skg asc", "json.facet", jsonFacet) - , "facets=={ count:8, cat0:{ buckets:[ " - + " { val:A, count:4, " - + " qw1 : { buckets:[" - + bucketW - + bucketQ - + " ] } } ] } }"); + final String jsonFacet = + "" + + "{ processEmpty:true, " + + " cat0:{ ${terms} type:terms, field: ${cat_s}, " + + " sort:'count desc', limit:1, overrequest:0, refine:true, " + + " facet:{ processEmpty:true, " + + " qw1: { ${terms} type:terms, field: ${qw_s}, mincount:0, " + + " sort:'${skg_sort}', limit:100, overrequest:0, refine:true, " + + " facet:{ processEmpty:true, skg:'relatedness($fore,$back)' } } } } }"; + final String bucketQ = + "" + + " { val:Q, count:1, " + + " skg : { relatedness: 1.0, " + + " foreground_popularity: 0.25, " + + " background_popularity: 0.0, " + // + " foreground_count: 1, " + // + " foreground_size: 3, " + // + " background_count: 0, " + // + " background_size: 4, " + + " } },"; + final String bucketW = + "" + + " { val:W, count:1, " + + " skg : { relatedness: 0.0037, " + + " foreground_popularity: 0.25, " + + " background_popularity: 0.25, " + // + " foreground_count: 1, " + // + " foreground_size: 3, " + // + " background_count: 1, " + // + " background_size: 4, " + + " } },"; + + client.testJQ( + params( + p, + "rows", + "0", + "q", + "*:*", + "fore", + "${xy_s}:X", + "back", + "${num_d}:[0 TO 100]", + "skg_sort", + "skg desc", + "json.facet", + jsonFacet), + "facets=={ count:8, cat0:{ buckets:[ " + + " { val:A, count:4, " + + " qw1 : { buckets:[" + + bucketQ + + bucketW + + " ] } } ] } }"); + client.testJQ( + params( + p, + "rows", + "0", + "q", + "*:*", + "fore", + "${xy_s}:X", + "back", + "${num_d}:[0 TO 100]", + "skg_sort", + "skg asc", + "json.facet", + jsonFacet), + "facets=={ count:8, cat0:{ buckets:[ " + + " { val:A, count:4, " + + " qw1 : { buckets:[" + + bucketW + + bucketQ + + " ] } } ] } }"); } - + // test partial buckets (field facet within field facet) - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - " ab:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true } }}" + - ",cd:{${terms} type:terms, field:${cat_i}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true } }}" + - ",ef:{${terms} type:terms, field:${date}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true } }}" + - "}" - ) - , "facets=={ count:8" + - ", ab:{ buckets:[ {val:A, count:4, xy:{buckets:[ {val:X,count:3}]} }] }" + // just like the previous response, just nested under a field facet - ", cd:{ buckets:[ {val:1, count:4, xy:{buckets:[ {val:X,count:3}]} }] }" + // just like the previous response, just nested under a field facet (int type) - ", ef:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:4, xy:{buckets:[ {val:X,count:3}]} }] }" + // just like the previous response, just nested under a field facet (date type) - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " ab:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true } }}" + + ",cd:{${terms} type:terms, field:${cat_i}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true } }}" + + ",ef:{${terms} type:terms, field:${date}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true } }}" + + "}"), + "facets=={ count:8" + + ", ab:{ buckets:[ {val:A, count:4, xy:{buckets:[ {val:X,count:3}]} }] }" + + // just like the previous response, just nested under a field facet + ", cd:{ buckets:[ {val:1, count:4, xy:{buckets:[ {val:X,count:3}]} }] }" + + // just like the previous response, just nested under a field facet (int type) + ", ef:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:4, xy:{buckets:[ {val:X,count:3}]} }] }" + + // just like the previous response, just nested under a field facet (date type) + "}"); // test that sibling facets and stats are included for _p buckets, but skipped for _s buckets - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - " ab :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + - ",ab2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + // top level refine=false shouldn't matter - ",allf :{${terms} type:terms, field:all_s, limit:1, overrequest:0, refine:true, facet:{cat:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + - ",allf2:{${terms} type:terms, field:all_s, limit:1, overrequest:0, refine:false, facet:{cat:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + // top level refine=false shouldn't matter - "}" - ) - , "facets=={ count:8" + - ", ab:{ buckets:[ {val:A, count:4, xy:{buckets:[ {val:X,count:3}]} ,qq:{count:4}, ww:4.0 }] }" + // make sure qq and ww are included for _p buckets - ", allf:{ buckets:[ {count:8, val:all, cat:{buckets:[{val:A,count:4}]} ,qq:{count:8}, ww:2.0 }] }" + // make sure qq and ww are excluded (not calculated again in another phase) for _s buckets - ", ab2:{ buckets:[ {val:A, count:4, xy:{buckets:[ {val:X,count:3}]} ,qq:{count:4}, ww:4.0 }] }" + // make sure qq and ww are included for _p buckets - ", allf2:{ buckets:[ {count:8, val:all, cat:{buckets:[{val:A,count:4}]} ,qq:{count:8}, ww:2.0 }] }" + // make sure qq and ww are excluded (not calculated again in another phase) for _s buckets - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " ab :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + + ",ab2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + + // top level refine=false shouldn't matter + ",allf :{${terms} type:terms, field:all_s, limit:1, overrequest:0, refine:true, facet:{cat:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + + ",allf2:{${terms} type:terms, field:all_s, limit:1, overrequest:0, refine:false, facet:{cat:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true}, qq:{query:'*:*'},ww:'sum(${num_d})' }}" + + // top level refine=false shouldn't matter + "}"), + "facets=={ count:8" + + ", ab:{ buckets:[ {val:A, count:4, xy:{buckets:[ {val:X,count:3}]} ,qq:{count:4}, ww:4.0 }] }" + + // make sure qq and ww are included for _p buckets + ", allf:{ buckets:[ {count:8, val:all, cat:{buckets:[{val:A,count:4}]} ,qq:{count:8}, ww:2.0 }] }" + + // make sure qq and ww are excluded (not calculated again in another phase) for _s + // buckets + ", ab2:{ buckets:[ {val:A, count:4, xy:{buckets:[ {val:X,count:3}]} ,qq:{count:4}, ww:4.0 }] }" + + // make sure qq and ww are included for _p buckets + ", allf2:{ buckets:[ {count:8, val:all, cat:{buckets:[{val:A,count:4}]} ,qq:{count:8}, ww:2.0 }] }" + + // make sure qq and ww are excluded (not calculated again in another phase) for _s + // buckets + "}"); // test refining under the special "missing" bucket of a field facet - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "f:{${terms} type:terms, field:missing_s, limit:1, overrequest:0, missing:true, refine:true, facet:{ cat:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true } }}" + - "}" - ) - , "facets=={ count:8" + - ", f:{ buckets:[], missing:{count:8, cat:{buckets:[{val:A,count:4}]} } }" + // just like the previous response, just nested under a field facet - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "f:{${terms} type:terms, field:missing_s, limit:1, overrequest:0, missing:true, refine:true, facet:{ cat:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true } }}" + + "}"), + "facets=={ count:8" + + ", f:{ buckets:[], missing:{count:8, cat:{buckets:[{val:A,count:4}]} } }" + + // just like the previous response, just nested under a field facet + "}"); // test filling in "missing" bucket for partially refined facets - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - // test all values missing in sub-facet - " ab :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, facet:{ zz:{${terms} type:terms, field:missing_s, limit:1, overrequest:0, refine:false, missing:true} }}" + - ",ab2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , facet:{ zz:{${terms} type:terms, field:missing_s, limit:1, overrequest:0, refine:true , missing:true} }}" + - // test some values missing in sub-facet (and test that this works with normal partial bucket refinement) - ", cd :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, facet:{ qw:{${terms} type:terms, field:${qw_s}, limit:1, overrequest:0, refine:false, missing:true, facet:{qq:{query:'*:*'}} } }}" + - ", cd2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , facet:{ qw:{${terms} type:terms, field:${qw_s}, limit:1, overrequest:0, refine:true , missing:true, facet:{qq:{query:'*:*'}} } }}" + - - "}" - ) - , "facets=={ count:8" + - ", ab:{ buckets:[ {val:A, count:3, zz:{buckets:[], missing:{count:3}}}] }" + - ",ab2:{ buckets:[ {val:A, count:4, zz:{buckets:[], missing:{count:4}}}] }" + - ", cd:{ buckets:[ {val:A, count:3, qw:{buckets:[{val:Q, count:1, qq:{count:1}}], missing:{count:1,qq:{count:1}}}}] }" + - ",cd2:{ buckets:[ {val:A, count:4, qw:{buckets:[{val:Q, count:1, qq:{count:1}}], missing:{count:2,qq:{count:2}}}}] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + + // test all values missing in sub-facet + " ab :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, facet:{ zz:{${terms} type:terms, field:missing_s, limit:1, overrequest:0, refine:false, missing:true} }}" + + ",ab2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , facet:{ zz:{${terms} type:terms, field:missing_s, limit:1, overrequest:0, refine:true , missing:true} }}" + + + // test some values missing in sub-facet (and test that this works with normal + // partial bucket refinement) + ", cd :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, facet:{ qw:{${terms} type:terms, field:${qw_s}, limit:1, overrequest:0, refine:false, missing:true, facet:{qq:{query:'*:*'}} } }}" + + ", cd2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , facet:{ qw:{${terms} type:terms, field:${qw_s}, limit:1, overrequest:0, refine:true , missing:true, facet:{qq:{query:'*:*'}} } }}" + + "}"), + "facets=={ count:8" + + ", ab:{ buckets:[ {val:A, count:3, zz:{buckets:[], missing:{count:3}}}] }" + + ",ab2:{ buckets:[ {val:A, count:4, zz:{buckets:[], missing:{count:4}}}] }" + + ", cd:{ buckets:[ {val:A, count:3, qw:{buckets:[{val:Q, count:1, qq:{count:1}}], missing:{count:1,qq:{count:1}}}}] }" + + ",cd2:{ buckets:[ {val:A, count:4, qw:{buckets:[{val:Q, count:1, qq:{count:1}}], missing:{count:2,qq:{count:2}}}}] }" + + "}"); // test filling in missing "allBuckets" - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - " cat0:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:false} } }" + - ", cat1:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, allBuckets:true, sort:'min asc', facet:{ min:'min(${num_d})' } }" + - ", cat2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true } } }" + - ", cat3:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , facet:{sum:'sum(${num_d})'} } } }" + - ", cat4:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , sort:'sum asc', facet:{sum:'sum(${num_d})'} } } }" + - // using overrefine only so we aren't fooled by 'local maximum' and ask all shards for 'B' - ", cat5:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, overrefine:2, allBuckets:true, sort:'min desc' facet:{ min:'min(${num_d})', xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true, facet:{sum:'sum(${num_d})'} } } }" + - "}" - ) - , "facets=={ count:8" + - ",cat0:{ allBuckets:{count:8}, buckets:[ {val:A, count:3, xy:{buckets:[{count:2, val:X}], allBuckets:{count:3}}}] }" + - ",cat1:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:A, count:4, min:-19.0 }] }" + - ",cat2:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X}], allBuckets:{count:4}}}] }" + - ",cat3:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X, sum:23.0}], allBuckets:{count:4, sum:4.0}}}] }" + - ",cat4:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:1, val:Y, sum:-19.0}], allBuckets:{count:4, sum:4.0}}}] }" + - ",cat5:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:B, count:4, min:-11.0, xy:{buckets:[{count:2, val:X, sum:6.0}], allBuckets:{count:4, sum:-2.0}}}] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " cat0:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:false} } }" + + ", cat1:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, allBuckets:true, sort:'min asc', facet:{ min:'min(${num_d})' } }" + + ", cat2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true } } }" + + ", cat3:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , facet:{sum:'sum(${num_d})'} } } }" + + ", cat4:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , allBuckets:true, facet:{ xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true , sort:'sum asc', facet:{sum:'sum(${num_d})'} } } }" + + + // using overrefine only so we aren't fooled by 'local maximum' and ask all shards + // for 'B' + ", cat5:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true, overrefine:2, allBuckets:true, sort:'min desc' facet:{ min:'min(${num_d})', xy:{${terms} type:terms, field:${xy_s}, limit:1, overrequest:0, allBuckets:true, refine:true, facet:{sum:'sum(${num_d})'} } } }" + + "}"), + "facets=={ count:8" + + ",cat0:{ allBuckets:{count:8}, buckets:[ {val:A, count:3, xy:{buckets:[{count:2, val:X}], allBuckets:{count:3}}}] }" + + ",cat1:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:A, count:4, min:-19.0 }] }" + + ",cat2:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X}], allBuckets:{count:4}}}] }" + + ",cat3:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:3, val:X, sum:23.0}], allBuckets:{count:4, sum:4.0}}}] }" + + ",cat4:{ allBuckets:{count:8}, buckets:[ {val:A, count:4, xy:{buckets:[{count:1, val:Y, sum:-19.0}], allBuckets:{count:4, sum:4.0}}}] }" + + ",cat5:{ allBuckets:{count:8, min:-19.0 }, buckets:[ {val:B, count:4, min:-11.0, xy:{buckets:[{count:2, val:X, sum:6.0}], allBuckets:{count:4, sum:-2.0}}}] }" + + "}"); // test filling in missing numBuckets - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - " cat :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, numBuckets:true, facet:{ er:{${terms} type:terms, field:${er_s}, limit:1, overrequest:0, numBuckets:true, refine:false} } }" + - ", cat2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , numBuckets:true, facet:{ er:{${terms} type:terms, field:${er_s}, limit:1, overrequest:0, numBuckets:true, refine:true } } }" + - "}" - ) - , "facets=={ count:8" + - ", cat:{ numBuckets:2, buckets:[ {val:A, count:3, er:{numBuckets:1,buckets:[{count:2, val:E}] }}] }" + // the "R" bucket will not be seen w/o refinement - ",cat2:{ numBuckets:2, buckets:[ {val:A, count:4, er:{numBuckets:2,buckets:[{count:2, val:E}] }}] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " cat :{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:false, numBuckets:true, facet:{ er:{${terms} type:terms, field:${er_s}, limit:1, overrequest:0, numBuckets:true, refine:false} } }" + + ", cat2:{${terms} type:terms, field:${cat_s}, limit:1, overrequest:0, refine:true , numBuckets:true, facet:{ er:{${terms} type:terms, field:${er_s}, limit:1, overrequest:0, numBuckets:true, refine:true } } }" + + "}"), + "facets=={ count:8" + + ", cat:{ numBuckets:2, buckets:[ {val:A, count:3, er:{numBuckets:1,buckets:[{count:2, val:E}] }}] }" + + // the "R" bucket will not be seen w/o refinement + ",cat2:{ numBuckets:2, buckets:[ {val:A, count:4, er:{numBuckets:2,buckets:[{count:2, val:E}] }}] }" + + "}"); final String sort_limit_over = "sort:'count desc', limit:1, overrequest:0, "; // simplistic join domain testing: no refinement == low count - client.testJQ(params(p, "q", "${xy_s}:Y", // query only matches one doc per shard - "json.facet", "{" + - " cat0:{${terms} type:terms, field:${cat_s}, " + sort_limit_over + " refine:false," + - // self join on all_s ensures every doc on every shard included in facets - " domain: { join: { from:all_s, to:all_s } } }" + - "}" - ) - , + client.testJQ( + params( + p, + "q", + "${xy_s}:Y", // query only matches one doc per shard + "json.facet", + "{" + + " cat0:{${terms} type:terms, field:${cat_s}, " + + sort_limit_over + + " refine:false," + + + // self join on all_s ensures every doc on every shard included in facets + " domain: { join: { from:all_s, to:all_s } } }" + + "}"), "/response/numFound==3", - "facets=={ count:3, " + + "facets=={ count:3, " + + // w/o overrequest and refinement, count for 'A' is lower than it should be // (we don't see the A from the middle shard) " cat0:{ buckets:[ {val:A,count:3} ] } }"); // simplistic join domain testing: refinement == correct count - client.testJQ(params(p, "q", "${xy_s}:Y", // query only matches one doc per shard - "json.facet", "{" + - " cat0:{${terms} type:terms, field:${cat_s}, " + sort_limit_over + " refine:true," + - // self join on all_s ensures every doc on every shard included in facets - " domain: { join: { from:all_s, to:all_s } } }" + - "}" - ) - , + client.testJQ( + params( + p, + "q", + "${xy_s}:Y", // query only matches one doc per shard + "json.facet", + "{" + + " cat0:{${terms} type:terms, field:${cat_s}, " + + sort_limit_over + + " refine:true," + + + // self join on all_s ensures every doc on every shard included in facets + " domain: { join: { from:all_s, to:all_s } } }" + + "}"), "/response/numFound==3", - "facets=={ count:3," + + "facets=={ count:3," + + // w/o overrequest, we need refining to get the correct count for 'A'. " cat0:{ buckets:[ {val:A,count:4} ] } }"); // contrived join domain + refinement (at second level) + testing - client.testJQ(params(p, "q", "${xy_s}:Y", // query only matches one doc per shard - "json.facet", "{" + - // top level facet has a single term - " all:{${terms} type:terms, field:all_s, " + sort_limit_over + " refine:true, " + - " facet:{ " + - // subfacet will facet on cat after joining on all (so all docs should be included in subfacet) - " cat0:{${terms} type:terms, field:${cat_s}, " + sort_limit_over + " refine:true," + - " domain: { join: { from:all_s, to:all_s } } } } }" + - "}" - ) - , + client.testJQ( + params( + p, + "q", + "${xy_s}:Y", // query only matches one doc per shard + "json.facet", + "{" + + + // top level facet has a single term + " all:{${terms} type:terms, field:all_s, " + + sort_limit_over + + " refine:true, " + + " facet:{ " + + + // subfacet will facet on cat after joining on all (so all docs should be included + // in subfacet) + " cat0:{${terms} type:terms, field:${cat_s}, " + + sort_limit_over + + " refine:true," + + " domain: { join: { from:all_s, to:all_s } } } } }" + + "}"), "/response/numFound==3", - "facets=={ count:3," + + "facets=={ count:3," + + // all 3 docs matching base query have same 'all' value in top facet - " all:{ buckets:[ { val:all, count:3, " + + " all:{ buckets:[ { val:all, count:3, " + + // sub facet has refinement, so count for 'A' should be correct " cat0:{ buckets: [{val:A,count:4}] } } ] } }"); - } // end method loop } public void testIndexAscRefineConsistency() throws Exception { initServers(); final Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set("shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean())); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); List clients = client.getClientProvider().all(); assertTrue(clients.size() >= 3); @@ -1484,36 +2004,48 @@ public void testIndexAscRefineConsistency() throws Exception { client.deleteByQuery("*:*", null); int id = 0; - + c0.add(sdoc("id", id++, "cat_s", "Z", "price_i", 10)); - + c1.add(sdoc("id", id++, "cat_s", "Z", "price_i", -5000)); - c1.add(sdoc("id", id++, "cat_s", "X", "price_i", 2, "child_s", "A" )); - - c2.add(sdoc("id", id++, "cat_s", "X", "price_i", 2, "child_s", "B" )); - c2.add(sdoc("id", id++, "cat_s", "X", "price_i", 2, "child_s", "C" )); - + c1.add(sdoc("id", id++, "cat_s", "X", "price_i", 2, "child_s", "A")); + + c2.add(sdoc("id", id++, "cat_s", "X", "price_i", 2, "child_s", "B")); + c2.add(sdoc("id", id++, "cat_s", "X", "price_i", 2, "child_s", "C")); + client.commit(); for (FacetField.FacetMethod m : FacetField.FacetMethod.values()) { - client.testJQ(params("q", "*:*", "rows", "0", "json.facet", "{" - + " cat : { type:terms, field:cat_s, limit:1, refine:true," - + " overrequest:0, " // to trigger parent refinement given small data set - + " sort:'sum desc', " - + " facet: { sum : 'sum(price_i)', " - + " child_"+m+" : { " - + " type:terms, field:child_s, limit:1, refine:true," - + " sort:'index asc', method:" + m.toString().toLowerCase(Locale.ROOT) + " } " - + " }} }" - ) - , "facets=={ count:5" - + ", cat:{buckets:[ { val:X, count:3, sum:6.0, " - + " child_"+m+":{buckets:[{val:B, count:1}]}}]}}" // * (see below) - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{" + + " cat : { type:terms, field:cat_s, limit:1, refine:true," + + " overrequest:0, " // to trigger parent refinement given small data set + + " sort:'sum desc', " + + " facet: { sum : 'sum(price_i)', " + + " child_" + + m + + " : { " + + " type:terms, field:child_s, limit:1, refine:true," + + " sort:'index asc', method:" + + m.toString().toLowerCase(Locale.ROOT) + + " } " + + " }} }"), + "facets=={ count:5" + + ", cat:{buckets:[ { val:X, count:3, sum:6.0, " + + " child_" + + m + + ":{buckets:[{val:B, count:1}]}}]}}" // * (see below) + ); } // * NOTE: the intuitive value to return here would be "A"; but we're testing for _consistency_ // here, and artificially setting `overrequest:0`. With default overrequest, the intuitive - // "correct" behavior would indeed be achieved -- but we then wouldn't be triggering the behavior - // that this test is designed to evaluate. + // "correct" behavior would indeed be achieved -- but we then wouldn't be triggering the + // behavior that this test is designed to evaluate. } } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java index 516b99b2430..cb11a707cdd 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java @@ -16,6 +16,8 @@ */ package org.apache.solr.search.facet; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.tdunning.math.stats.AVLTreeDigest; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; @@ -26,9 +28,6 @@ import java.util.Map; import java.util.Random; import java.util.concurrent.atomic.AtomicLong; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import com.tdunning.math.stats.AVLTreeDigest; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseHS; @@ -50,10 +49,17 @@ // TestJsonFacetErrors for error case tests // TestJsonRangeFacets for range facet tests -@LuceneTestCase.SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Lucene45","Appending"}) +@LuceneTestCase.SuppressCodecs({ + "Lucene3x", + "Lucene40", + "Lucene41", + "Lucene42", + "Lucene45", + "Appending" +}) public class TestJsonFacets extends SolrTestCaseHS { - - private static SolrInstances servers; // for distributed testing + + private static SolrInstances servers; // for distributed testing private static int origTableSize; private static FacetField.FacetMethod origDefaultFacetMethod; @@ -64,21 +70,20 @@ public static void beforeTests() throws Exception { JSONTestUtil.failRepeatedKeys = true; origTableSize = FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE; - FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE=2; // stress test resizing + FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE = 2; // stress test resizing origDefaultFacetMethod = FacetField.FacetMethod.DEFAULT_METHOD; // instead of the following, see the constructor - //FacetField.FacetMethod.DEFAULT_METHOD = rand(FacetField.FacetMethod.values()); + // FacetField.FacetMethod.DEFAULT_METHOD = rand(FacetField.FacetMethod.values()); // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); - - initCore("solrconfig-tlog.xml","schema_latest.xml"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); + + initCore("solrconfig-tlog.xml", "schema_latest.xml"); } - /** - * Start all servers for cluster if they don't already exist - */ + /** Start all servers for cluster if they don't already exist */ public static void initServers() throws Exception { if (servers == null) { servers = new SolrInstances(3, "solrconfig-tlog.xml", "schema_latest.xml"); @@ -90,7 +95,7 @@ public static void initServers() throws Exception { public static void afterTests() throws Exception { systemClearPropertySolrDisableUrlAllowList(); JSONTestUtil.failRepeatedKeys = false; - FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE=origTableSize; + FacetFieldProcessorByHashDV.MAXIMUM_STARTING_TABLE_SIZE = origTableSize; FacetField.FacetMethod.DEFAULT_METHOD = origDefaultFacetMethod; if (servers != null) { servers.stop(); @@ -100,22 +105,23 @@ public static void afterTests() throws Exception { // tip: when debugging failures, change this variable to DEFAULT_METHOD // (or if only one method is problematic, set to that explicitly) - private static final FacetField.FacetMethod TEST_ONLY_ONE_FACET_METHOD - = null; // FacetField.FacetMethod.DEFAULT_METHOD; + private static final FacetField.FacetMethod TEST_ONLY_ONE_FACET_METHOD = + null; // FacetField.FacetMethod.DEFAULT_METHOD; @ParametersFactory public static Iterable parameters() { if (null != TEST_ONLY_ONE_FACET_METHOD) { - return Arrays.asList(new Object[] { TEST_ONLY_ONE_FACET_METHOD }); + return Arrays.asList(new Object[] {TEST_ONLY_ONE_FACET_METHOD}); } - + // wrap each enum val in an Object[] and return as Iterable - return () -> Arrays.stream(FacetField.FacetMethod.values()) - .map(it -> new Object[]{it}).iterator(); + return () -> + Arrays.stream(FacetField.FacetMethod.values()).map(it -> new Object[] {it}).iterator(); } public TestJsonFacets(FacetField.FacetMethod defMethod) { - FacetField.FacetMethod.DEFAULT_METHOD = defMethod; // note: the real default is restored in afterTests + FacetField.FacetMethod.DEFAULT_METHOD = + defMethod; // note: the real default is restored in afterTests } // attempt to reproduce https://github.com/Heliosearch/heliosearch/issues/33 @@ -128,34 +134,43 @@ public void testComplex() throws Exception { double price_low = 11000; double price_high = 100000; - ModifiableSolrParams p = params("make_s","make_s", "model_s","model_s", "price_low",Double.toString(price_low), "price_high",Double.toString(price_high)); + ModifiableSolrParams p = + params( + "make_s", + "make_s", + "model_s", + "model_s", + "price_low", + Double.toString(price_low), + "price_high", + Double.toString(price_high)); - - MacroExpander m = new MacroExpander( p.getMap() ); + MacroExpander m = new MacroExpander(p.getMap()); String make_s = m.expand("${make_s}"); String model_s = m.expand("${model_s}"); client.deleteByQuery("*:*", null); - int nDocs = 99; String[] makes = {"honda", "toyota", "ford", null}; Double[] prices = {10000.0, 30000.0, 50000.0, 0.0, null}; - String[] honda_models = {"accord", "civic", "fit", "pilot", null}; // make sure this is alphabetized to match tiebreaks in index + String[] honda_models = { + "accord", "civic", "fit", "pilot", null + }; // make sure this is alphabetized to match tiebreaks in index String[] other_models = {"z1", "z2", "z3", "z4", "z5", "z6", null}; int nHonda = 0; final int[] honda_model_counts = new int[honda_models.length]; - for (int i=0; i= price_low && price <= price_high; + boolean matches_price = price != null && price >= price_low && price <= price_high; String make = rand(makes); if (make != null) { @@ -173,18 +188,18 @@ public void testComplex() throws Exception { honda_model_counts[modelNum]++; } } else if (make == null) { - doc.addField(model_s, rand(honda_models)); // add some docs w/ model but w/o make + doc.addField(model_s, rand(honda_models)); // add some docs w/ model but w/o make } else { // other makes - doc.addField(model_s, rand(other_models)); // add some docs w/ model but w/o make + doc.addField(model_s, rand(other_models)); // add some docs w/ model but w/o make } client.add(doc, null); if (r.nextInt(10) == 0) { - client.add(doc, null); // dup, causing a delete + client.add(doc, null); // dup, causing a delete } if (r.nextInt(20) == 0) { - client.commit(); // force new seg + client.commit(); // force new seg } } @@ -192,50 +207,112 @@ public void testComplex() throws Exception { // now figure out top counts List idx = new ArrayList<>(); - for (int i=0; i { - int cmp = honda_model_counts[o2] - honda_model_counts[o1]; - return cmp == 0 ? o1 - o2 : cmp; - }); - - + Collections.sort( + idx, + (o1, o2) -> { + int cmp = honda_model_counts[o2] - honda_model_counts[o1]; + return cmp == 0 ? o1 - o2 : cmp; + }); // straight query facets - client.testJQ(params(p, "q", "*:*", "rows","0", "fq","+${make_s}:honda +cost_f:[${price_low} TO ${price_high}]" - , "json.facet", "{makes:{terms:{field:${make_s}, facet:{models:{terms:{field:${model_s}, limit:2, mincount:0}}}}" - + "}}" - , "facet","true", "facet.pivot","make_s,model_s", "facet.limit", "2" - ) - , "facets=={count:" + nHonda + ", makes:{buckets:[{val:honda, count:" + nHonda + ", models:{buckets:[" - + "{val:" + honda_models[idx.get(0)] + ", count:" + honda_model_counts[idx.get(0)] + "}," - + "{val:" + honda_models[idx.get(1)] + ", count:" + honda_model_counts[idx.get(1)] + "}]}" - + "}]}}" - ); - - + client.testJQ( + params( + p, + "q", + "*:*", + "rows", + "0", + "fq", + "+${make_s}:honda +cost_f:[${price_low} TO ${price_high}]", + "json.facet", + "{makes:{terms:{field:${make_s}, facet:{models:{terms:{field:${model_s}, limit:2, mincount:0}}}}" + + "}}", + "facet", + "true", + "facet.pivot", + "make_s,model_s", + "facet.limit", + "2"), + "facets=={count:" + + nHonda + + ", makes:{buckets:[{val:honda, count:" + + nHonda + + ", models:{buckets:[" + + "{val:" + + honda_models[idx.get(0)] + + ", count:" + + honda_model_counts[idx.get(0)] + + "}," + + "{val:" + + honda_models[idx.get(1)] + + ", count:" + + honda_model_counts[idx.get(1)] + + "}]}" + + "}]}}"); } - public void indexSimple(Client client) throws Exception { client.deleteByQuery("*:*", null); - client.add(sdoc("id", "1", "cat_s", "A", "where_s", "NY", "num_d", "4", "num_i", "2", - "num_is", "4", "num_is", "2", - "val_b", "true", "sparse_s", "one"), null); - client.add(sdoc("id", "2", "cat_s", "B", "where_s", "NJ", "num_d", "-9", "num_i", "-5", - "num_is", "-9", "num_is", "-5", - "val_b", "false"), null); + client.add( + sdoc( + "id", + "1", + "cat_s", + "A", + "where_s", + "NY", + "num_d", + "4", + "num_i", + "2", + "num_is", + "4", + "num_is", + "2", + "val_b", + "true", + "sparse_s", + "one"), + null); + client.add( + sdoc( + "id", "2", "cat_s", "B", "where_s", "NJ", "num_d", "-9", "num_i", "-5", "num_is", "-9", + "num_is", "-5", "val_b", "false"), + null); client.add(sdoc("id", "3"), null); client.commit(); - client.add(sdoc("id", "4", "cat_s", "A", "where_s", "NJ", "num_d", "2", "num_i", "3", - "num_is", "2", "num_is", "3"), null); - client.add(sdoc("id", "5", "cat_s", "B", "where_s", "NJ", "num_d", "11", "num_i", "7", - "num_is", "11", "num_is", "7", - "sparse_s", "two"),null); + client.add( + sdoc( + "id", "4", "cat_s", "A", "where_s", "NJ", "num_d", "2", "num_i", "3", "num_is", "2", + "num_is", "3"), + null); + client.add( + sdoc( + "id", + "5", + "cat_s", + "B", + "where_s", + "NJ", + "num_d", + "11", + "num_i", + "7", + "num_is", + "11", + "num_is", + "7", + "sparse_s", + "two"), + null); client.commit(); - client.add(sdoc("id", "6", "cat_s", "B", "where_s", "NY", "num_d", "-5", "num_i", "-5", - "num_is", "-5"),null); + client.add( + sdoc( + "id", "6", "cat_s", "B", "where_s", "NY", "num_d", "-5", "num_i", "-5", "num_is", "-5"), + null); client.commit(); } @@ -244,32 +321,38 @@ public void testMultiValuedBucketReHashing() throws Exception { client.deleteByQuery("*:*", null); // we want a domain with a small number of documents, and more facet (point) values then docs so // that we force dvhash to increase the number of slots via resize... - // (NOTE: normal resizing won't happen w/o at least 1024 slots, but test static overrides this to '2') - client.add(sdoc("id", "1", - "f_sd", "qqq", - "f_ids", "4", "f_ids", "2", "f_ids", "999", - "x_ids", "3", "x_ids", "5", "x_ids", "7", - "z_ids", "42"), null); - client.add(sdoc("id", "2", - "f_sd", "nnn", - "f_ids", "44", "f_ids", "22", "f_ids", "999", - "x_ids", "33", "x_ids", "55", "x_ids", "77", - "z_ids", "666"), null); - client.add(sdoc("id", "3", - "f_sd", "ggg", - "f_ids", "444", "f_ids", "222", "f_ids", "999", - "x_ids", "333", "x_ids", "555", "x_ids", "777", - "z_ids", "1010101"), null); + // (NOTE: normal resizing won't happen w/o at least 1024 slots, but test static overrides this + // to '2') + client.add( + sdoc( + "id", "1", "f_sd", "qqq", "f_ids", "4", "f_ids", "2", "f_ids", "999", "x_ids", "3", + "x_ids", "5", "x_ids", "7", "z_ids", "42"), + null); + client.add( + sdoc( + "id", "2", "f_sd", "nnn", "f_ids", "44", "f_ids", "22", "f_ids", "999", "x_ids", "33", + "x_ids", "55", "x_ids", "77", "z_ids", "666"), + null); + client.add( + sdoc( + "id", "3", "f_sd", "ggg", "f_ids", "444", "f_ids", "222", "f_ids", "999", "x_ids", + "333", "x_ids", "555", "x_ids", "777", "z_ids", "1010101"), + null); client.commit(); // faceting on a multivalued point field sorting on a stat... - assertJQ(req("rows", "0", "q", "id:[1 TO 2]", "json.facet" - , "{ f : { type: terms, field: f_ids, limit: 1, sort: 'x desc', " - + " facet: { x : 'sum(x_ids)', z : 'min(z_ids)' } } }") - , "response/numFound==2" - , "facets/count==2" - , "facets/f=={buckets:[{ val:999, count:2, x:180.0, z:42 }]}" - ); + assertJQ( + req( + "rows", + "0", + "q", + "id:[1 TO 2]", + "json.facet", + "{ f : { type: terms, field: f_ids, limit: 1, sort: 'x desc', " + + " facet: { x : 'sum(x_ids)', z : 'min(z_ids)' } } }"), + "response/numFound==2", + "facets/count==2", + "facets/f=={buckets:[{ val:999, count:2, x:180.0, z:42 }]}"); } public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { @@ -281,11 +364,19 @@ public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { // // it should behave the same as any attempt (using any method) at faceting on // and "indexed=false docValues=false" field... - for (String f : Arrays.asList("where_s_not_indexed_sS", - "where_s_multi_not_uninvert", - "where_s_single_not_uninvert")) { - SolrQueryRequest request = req("rows", "0", "q", "num_i:[* TO 2]", "json.facet", - "{x: {type:terms, field:'"+f+"'}}"); + for (String f : + Arrays.asList( + "where_s_not_indexed_sS", + "where_s_multi_not_uninvert", + "where_s_single_not_uninvert")) { + SolrQueryRequest request = + req( + "rows", + "0", + "q", + "num_i:[* TO 2]", + "json.facet", + "{x: {type:terms, field:'" + f + "'}}"); if (FacetField.FacetMethod.DEFAULT_METHOD == FacetField.FacetMethod.DVHASH && !f.contains("multi")) { // DVHASH is (currently) weird... @@ -293,20 +384,17 @@ public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { // it's ignored for multi valued fields -- but for single valued fields, it explicitly // checks the *FieldInfos* on the reader to see if the DocVals type is ok. // - // Which means that unlike most other facet method:xxx options, it fails hard if you try to use it - // on a field where no docs have been indexed (yet). - expectThrows(SolrException.class, () ->{ - assertJQ(request); - }); - + // Which means that unlike most other facet method:xxx options, it fails hard if you try to + // use it on a field where no docs have been indexed (yet). + expectThrows( + SolrException.class, + () -> { + assertJQ(request); + }); + } else { // In most cases, we should just get no buckets back... - assertJQ(request - , "response/numFound==3" - , "facets/count==3" - , "facets/x=={buckets:[]}" - - ); + assertJQ(request, "response/numFound==3", "facets/count==3", "facets/x=={buckets:[]}"); } } @@ -314,99 +402,146 @@ public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception { // faceting on an "uninvertible=false docValues=true" field should work, // // it should behave equivilently to it's copyField source... - for (String f : Arrays.asList("where_s", - "where_s_multi_not_uninvert_dv", - "where_s_single_not_uninvert_dv")) { - assertJQ(req("rows", "0", "q", "num_i:[* TO 2]", "json.facet", - "{x: {type:terms, field:'"+f+"'}}") - , "response/numFound==3" - , "facets/count==3" - , "facets/x=={buckets:[ {val:NY, count:2} , {val:NJ, count:1} ]}" - ); + for (String f : + Arrays.asList( + "where_s", "where_s_multi_not_uninvert_dv", "where_s_single_not_uninvert_dv")) { + assertJQ( + req( + "rows", + "0", + "q", + "num_i:[* TO 2]", + "json.facet", + "{x: {type:terms, field:'" + f + "'}}"), + "response/numFound==3", + "facets/count==3", + "facets/x=={buckets:[ {val:NY, count:2} , {val:NJ, count:1} ]}"); } - + // faceting on an "uninvertible=false docValues=false" field should be possible // when using method:enum w/sort:index // // it should behave equivilent to it's copyField source... - for (String f : Arrays.asList("where_s", - "where_s_multi_not_uninvert", - "where_s_single_not_uninvert")) { - assertJQ(req("rows", "0", "q", "num_i:[* TO 2]", "json.facet", - "{x: {type:terms, sort:'index asc', method:enum, field:'"+f+"'}}") - , "response/numFound==3" - , "facets/count==3" - , "facets/x=={buckets:[ {val:NJ, count:1} , {val:NY, count:2} ]}" - ); + for (String f : + Arrays.asList("where_s", "where_s_multi_not_uninvert", "where_s_single_not_uninvert")) { + assertJQ( + req( + "rows", + "0", + "q", + "num_i:[* TO 2]", + "json.facet", + "{x: {type:terms, sort:'index asc', method:enum, field:'" + f + "'}}"), + "response/numFound==3", + "facets/count==3", + "facets/x=={buckets:[ {val:NJ, count:1} , {val:NY, count:2} ]}"); } } - - @Test public void testExplicitQueryDomain() throws Exception { Client client = Client.localClient(); indexSimple(client); { // simple 'query' domain - + // the facet buckets for all of the requests below should be identical // only the numFound & top level facet count should differ - final String expectedFacets - = "facets/w=={ buckets:[" - + " { val:'NJ', count:2}, " - + " { val:'NY', count:1} ] }"; - - assertJQ(req("rows", "0", "q", "cat_s:B", "json.facet", - "{w: {type:terms, field:'where_s'}}"), - "response/numFound==3", - "facets/count==3", - expectedFacets); - assertJQ(req("rows", "0", "q", "id:3", "json.facet", - "{w: {type:terms, field:'where_s', domain: { query:'cat_s:B' }}}"), - "response/numFound==1", - "facets/count==1", - expectedFacets); - assertJQ(req("rows", "0", "q", "*:*", "fq", "-*:*", "json.facet", - "{w: {type:terms, field:'where_s', domain: { query:'cat_s:B' }}}"), - "response/numFound==0", - "facets/count==0", - expectedFacets); - assertJQ(req("rows", "0", "q", "*:*", "fq", "-*:*", "domain_q", "cat_s:B", "json.facet", - "{w: {type:terms, field:'where_s', domain: { query:{param:domain_q} }}}"), - "response/numFound==0", - "facets/count==0", - expectedFacets); + final String expectedFacets = + "facets/w=={ buckets:[" + " { val:'NJ', count:2}, " + " { val:'NY', count:1} ] }"; + + assertJQ( + req("rows", "0", "q", "cat_s:B", "json.facet", "{w: {type:terms, field:'where_s'}}"), + "response/numFound==3", + "facets/count==3", + expectedFacets); + assertJQ( + req( + "rows", + "0", + "q", + "id:3", + "json.facet", + "{w: {type:terms, field:'where_s', domain: { query:'cat_s:B' }}}"), + "response/numFound==1", + "facets/count==1", + expectedFacets); + assertJQ( + req( + "rows", + "0", + "q", + "*:*", + "fq", + "-*:*", + "json.facet", + "{w: {type:terms, field:'where_s', domain: { query:'cat_s:B' }}}"), + "response/numFound==0", + "facets/count==0", + expectedFacets); + assertJQ( + req( + "rows", + "0", + "q", + "*:*", + "fq", + "-*:*", + "domain_q", + "cat_s:B", + "json.facet", + "{w: {type:terms, field:'where_s', domain: { query:{param:domain_q} }}}"), + "response/numFound==0", + "facets/count==0", + expectedFacets); } - + { // a nested explicit query domain // for all of the "top" buckets, the subfacet should have identical sub-buckets final String expectedSubBuckets = "{ buckets:[ { val:'B', count:3}, { val:'A', count:2} ] }"; - assertJQ(req("rows", "0", "q", "num_i:[0 TO *]", "json.facet", - "{w: {type:terms, field:'where_s', " + - " facet: { c: { type:terms, field:'cat_s', domain: { query:'*:*' }}}}}") - , "facets/w=={ buckets:[" - + " { val:'NJ', count:2, c: " + expectedSubBuckets + "}, " - + " { val:'NY', count:1, c: " + expectedSubBuckets + "} " - + "] }" - ); + assertJQ( + req( + "rows", + "0", + "q", + "num_i:[0 TO *]", + "json.facet", + "{w: {type:terms, field:'where_s', " + + " facet: { c: { type:terms, field:'cat_s', domain: { query:'*:*' }}}}}"), + "facets/w=={ buckets:[" + + " { val:'NJ', count:2, c: " + + expectedSubBuckets + + "}, " + + " { val:'NY', count:1, c: " + + expectedSubBuckets + + "} " + + "] }"); } { // an (effectively) empty query should produce an error ignoreException("'query' domain can not be null"); ignoreException("'query' domain must not evaluate to an empty list"); for (String raw : Arrays.asList("null", "[ ]", "{param:bogus}")) { - expectThrows(SolrException.class, () -> { - assertJQ(req("rows", "0", "q", "num_i:[0 TO *]", "json.facet", - "{w: {type:terms, field:'where_s', " + - " facet: { c: { type:terms, field:'cat_s', domain: { query: "+raw+" }}}}}")); - }); + expectThrows( + SolrException.class, + () -> { + assertJQ( + req( + "rows", + "0", + "q", + "num_i:[0 TO *]", + "json.facet", + "{w: {type:terms, field:'where_s', " + + " facet: { c: { type:terms, field:'cat_s', domain: { query: " + + raw + + " }}}}}")); + }); } } } - @Test public void testSimpleSKG() throws Exception { Client client = Client.localClient(); @@ -414,140 +549,197 @@ public void testSimpleSKG() throws Exception { // using relatedness() as a top level stat, not nested under any facet // (not particularly useful, but shouldn't error either) - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NY", "back", "*:*", - "json.facet", " { skg: 'relatedness($fore,$back)' }") - , "facets=={" - + " count:5, " - + " skg : { relatedness: 0.00699," - + " foreground_popularity: 0.33333," - + " background_popularity: 0.83333," - + " } }" - ); - + assertJQ( + req( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "where_s:NY", + "back", + "*:*", + "json.facet", + " { skg: 'relatedness($fore,$back)' }"), + "facets=={" + + " count:5, " + + " skg : { relatedness: 0.00699," + + " foreground_popularity: 0.33333," + + " background_popularity: 0.83333," + + " } }"); + // simple single level facet w/skg stat & (re)sorting - for (String sort : Arrays.asList("sort:'index asc'", - "sort:'y desc'", - "sort:'z desc'", - "sort:'skg desc'", - "prelim_sort:'count desc', sort:'index asc'", - "prelim_sort:'count desc', sort:'y desc'", - "prelim_sort:'count desc', sort:'z desc'", - "prelim_sort:'count desc', sort:'skg desc'")) { - // the relatedness score of each of our cat_s values is (conviniently) also alphabetical order, - // (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') + for (String sort : + Arrays.asList( + "sort:'index asc'", + "sort:'y desc'", + "sort:'z desc'", + "sort:'skg desc'", + "prelim_sort:'count desc', sort:'index asc'", + "prelim_sort:'count desc', sort:'y desc'", + "prelim_sort:'count desc', sort:'z desc'", + "prelim_sort:'count desc', sort:'skg desc'")) { + // the relatedness score of each of our cat_s values is (conviniently) also alphabetical + // order, (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') // - // So all of these re/sort options should produce identical output (since the num buckets is < limit) - // - Testing "index" sort allows the randomized use of "stream" processor as default to be tested. - // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a "defered" Agg + // So all of these re/sort options should produce identical output (since the num buckets is < + // limit) + // - Testing "index" sort allows the randomized use of "stream" processor as default to be + // tested. + // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a + // "defered" Agg for (String limit : Arrays.asList(", ", ", limit:5, ", ", limit:-1, ")) { // results shouldn't change regardless of our limit param" - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NY", "back", "*:*", - "json.facet", "" - + "{x: { type: terms, field: 'cat_s', "+sort + limit - + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") - , "facets=={count:5, x:{ buckets:[" - + " { val:'A', count:2, y:5.0, z:2, " - + " skg : { relatedness: 0.00554, " - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 2, " - //+ " background_size: 6," - + " foreground_popularity: 0.16667," - + " background_popularity: 0.33333, }," - + " }, " - + " { val:'B', count:3, y:-3.0, z:-5, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 3, " - //+ " background_size: 6," - + " foreground_popularity: 0.16667," - + " background_popularity: 0.5 }," - + " } ] } } " - ); - // same query with a prefix of 'B' should produce only a single bucket with exact same results - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NY", "back", "*:*", - "json.facet", "" - + "{x: { type: terms, field: 'cat_s', prefix:'B', "+sort + limit - + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") - , "facets=={count:5, x:{ buckets:[" - + " { val:'B', count:3, y:-3.0, z:-5, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 3, " - //+ " background_size: 6," - + " foreground_popularity: 0.16667," - + " background_popularity: 0.5 }," - + " } ] } } " - ); + assertJQ( + req( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "where_s:NY", + "back", + "*:*", + "json.facet", + "" + + "{x: { type: terms, field: 'cat_s', " + + sort + + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }"), + "facets=={count:5, x:{ buckets:[" + + " { val:'A', count:2, y:5.0, z:2, " + + " skg : { relatedness: 0.00554, " + // + " foreground_count: 1, " + // + " foreground_size: 2, " + // + " background_count: 2, " + // + " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.33333, }," + + " }, " + + " { val:'B', count:3, y:-3.0, z:-5, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + // + " foreground_count: 1, " + // + " foreground_size: 2, " + // + " background_count: 3, " + // + " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.5 }," + + " } ] } } "); + // same query with a prefix of 'B' should produce only a single bucket with exact same + // results + assertJQ( + req( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "where_s:NY", + "back", + "*:*", + "json.facet", + "" + + "{x: { type: terms, field: 'cat_s', prefix:'B', " + + sort + + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }"), + "facets=={count:5, x:{ buckets:[" + + " { val:'B', count:3, y:-3.0, z:-5, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + // + " foreground_count: 1, " + // + " foreground_size: 2, " + // + " background_count: 3, " + // + " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.5 }," + + " } ] } } "); } } // relatedness shouldn't be computed for allBuckets, but it also shouldn't cause any problems - for (String sort : Arrays.asList("sort:'y desc'", - "sort:'z desc'", - "sort:'skg desc'", - "sort:'index asc'", - "prelim_sort:'count desc', sort:'skg desc'")) { - // the relatedness score of each of our cat_s values is (conveniently) also alphabetical order, - // (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') + for (String sort : + Arrays.asList( + "sort:'y desc'", + "sort:'z desc'", + "sort:'skg desc'", + "sort:'index asc'", + "prelim_sort:'count desc', sort:'skg desc'")) { + // the relatedness score of each of our cat_s values is (conveniently) also alphabetical + // order, (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') // - // So all of these re/sort options should produce identical output (since the num buckets is < limit) - // - Testing "index" sort allows the randomized use of "stream" processor as default to be tested. - // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a "deferred" Agg + // So all of these re/sort options should produce identical output (since the num buckets is < + // limit) + // - Testing "index" sort allows the randomized use of "stream" processor as default to be + // tested. + // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a + // "deferred" Agg for (String limit : Arrays.asList(", ", ", limit:5, ", ", limit:-1, ")) { // results shouldn't change regardless of our limit param" - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NY", "back", "*:*", - "json.facet", "" - + "{x: { type: terms, field: 'cat_s', allBuckets:true, "+sort + limit - + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") - , "facets=={count:5, x:{ " - // 'skg' key must not exist in th allBuckets bucket - + " allBuckets: { count:5, y:2.0, z:-5 }," - + "buckets:[" - + " { val:'A', count:2, y:5.0, z:2, " - + " skg : { relatedness: 0.00554, " - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 2, " - //+ " background_size: 6," - + " foreground_popularity: 0.16667," - + " background_popularity: 0.33333, }," - + " }, " - + " { val:'B', count:3, y:-3.0, z:-5, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrelated - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 3, " - //+ " background_size: 6," - + " foreground_popularity: 0.16667," - + " background_popularity: 0.5 }," - + " } ] } } " - ); - + assertJQ( + req( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "where_s:NY", + "back", + "*:*", + "json.facet", + "" + + "{x: { type: terms, field: 'cat_s', allBuckets:true, " + + sort + + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }"), + "facets=={count:5, x:{ " + // 'skg' key must not exist in th allBuckets bucket + + " allBuckets: { count:5, y:2.0, z:-5 }," + + "buckets:[" + + " { val:'A', count:2, y:5.0, z:2, " + + " skg : { relatedness: 0.00554, " + // + " foreground_count: 1, " + // + " foreground_size: 2, " + // + " background_count: 2, " + // + " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.33333, }," + + " }, " + + " { val:'B', count:3, y:-3.0, z:-5, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrelated + // + " foreground_count: 1, " + // + " foreground_size: 2, " + // + " background_count: 3, " + // + " background_size: 6," + + " foreground_popularity: 0.16667," + + " background_popularity: 0.5 }," + + " } ] } } "); + // really special case: allBuckets when there are no regular buckets... - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NY", "back", "*:*", - "json.facet", "" - + "{x: { type: terms, field: 'bogus_field_s', allBuckets:true, "+sort + limit - + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }") - , "facets=={count:5, x:{ " - // 'skg' key (as well as 'z' since it's a min) must not exist in the allBuckets bucket - + " allBuckets: { count:0, y:0.0 }," - + "buckets:[ ]" - + " } } " - ); - - + assertJQ( + req( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "where_s:NY", + "back", + "*:*", + "json.facet", + "" + + "{x: { type: terms, field: 'bogus_field_s', allBuckets:true, " + + sort + + limit + + " facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }"), + "facets=={count:5, x:{ " + // 'skg' key (as well as 'z' since it's a min) must not exist in the allBuckets + // bucket + + " allBuckets: { count:0, y:0.0 }," + + "buckets:[ ]" + + " } } "); } } - // trivial sanity check that we can (re)sort on SKG after pre-sorting on count... // ...and it's only computed for the top N buckets (based on our pre-sort) for (int overrequest : Arrays.asList(0, 1, 42)) { @@ -555,278 +747,368 @@ public void testSimpleSKG() throws Exception { // overrequest values ... only DebugAgg stats should change... DebugAgg.Acc.collectDocs.set(0); DebugAgg.Acc.collectDocSets.set(0); - - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NJ", "back", "*:*", - "json.facet", "" - + "{x: { type: terms, field: 'cat_s', prelim_sort: 'count desc', sort:'skg desc', " - + " limit: 1, overrequest: " + overrequest + ", " - + " facet: { skg: 'debug(wrap,relatedness($fore,$back))' } } }") - , "facets=={count:5, x:{ buckets:[" - + " { val:'B', count:3, " - + " skg : { relatedness: 0.00638, " - //+ " foreground_count: 2, " - //+ " foreground_size: 3, " - //+ " background_count: 3, " - //+ " background_size: 6," - + " foreground_popularity: 0.33333," - + " background_popularity: 0.5 }," - + " }, " - + " ] } } " - ); + + assertJQ( + req( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "where_s:NJ", + "back", + "*:*", + "json.facet", + "" + + "{x: { type: terms, field: 'cat_s', prelim_sort: 'count desc', sort:'skg desc', " + + " limit: 1, overrequest: " + + overrequest + + ", " + + " facet: { skg: 'debug(wrap,relatedness($fore,$back))' } } }"), + "facets=={count:5, x:{ buckets:[" + + " { val:'B', count:3, " + + " skg : { relatedness: 0.00638, " + // + " foreground_count: 2, " + // + " foreground_size: 3, " + // + " background_count: 3, " + // + " background_size: 6," + + " foreground_popularity: 0.33333," + + " background_popularity: 0.5 }," + + " }, " + + " ] } } "); // at most 2 buckets, regardless of overrequest... assertEqualsAndReset(0 < overrequest ? 2 : 1, DebugAgg.Acc.collectDocSets); assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); } - + // SKG used in multiple nested facets // // we'll re-use these params in 2 requests, one will simulate a shard request - final SolrParams nestedSKG = params - ("q", "cat_s:[* TO *]", "rows", "0", "fore", "num_i:[-1000 TO 0]", "back", "*:*", "json.facet" - , "{x: { type: terms, field: 'cat_s', sort: 'skg desc', " - + " facet: { skg: 'relatedness($fore,$back)', " - + " y: { type: terms, field: 'where_s', sort: 'skg desc', " - + " facet: { skg: 'relatedness($fore,$back)' } } } } }"); - + final SolrParams nestedSKG = + params( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "num_i:[-1000 TO 0]", + "back", + "*:*", + "json.facet", + "{x: { type: terms, field: 'cat_s', sort: 'skg desc', " + + " facet: { skg: 'relatedness($fore,$back)', " + + " y: { type: terms, field: 'where_s', sort: 'skg desc', " + + " facet: { skg: 'relatedness($fore,$back)' } } } } }"); + // plain old request - assertJQ(req(nestedSKG) - , "facets=={count:5, x:{ buckets:[" - + " { val:'B', count:3, " - + " skg : { relatedness: 0.01539, " - //+ " foreground_count: 2, " - //+ " foreground_size: 2, " - //+ " background_count: 3, " - //+ " background_size: 6, " - + " foreground_popularity: 0.33333," - + " background_popularity: 0.5 }," - + " y : { buckets:[" - + " { val:'NY', count: 1, " - + " skg : { relatedness: 0.00554, " - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 2, " - //+ " background_size: 6, " - + " foreground_popularity: 0.16667, " - + " background_popularity: 0.33333, " - + " } }, " - + " { val:'NJ', count: 2, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated - //+ " foreground_count: 1, " - //+ " foreground_size: 2, " - //+ " background_count: 3, " - //+ " background_size: 6, " - + " foreground_popularity: 0.16667, " - + " background_popularity: 0.5, " - + " } }, " - + " ] } " - + " }, " - + " { val:'A', count:2, " - + " skg : { relatedness:-0.01097, " - //+ " foreground_count: 0, " - //+ " foreground_size: 2, " - //+ " background_count: 2, " - //+ " background_size: 6," - + " foreground_popularity: 0.0," - + " background_popularity: 0.33333 }," - + " y : { buckets:[" - + " { val:'NJ', count: 1, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated - //+ " foreground_count: 0, " - //+ " foreground_size: 0, " - //+ " background_count: 3, " - //+ " background_size: 6, " - + " foreground_popularity: 0.0, " - + " background_popularity: 0.5, " - + " } }, " - + " { val:'NY', count: 1, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated - //+ " foreground_count: 0, " - //+ " foreground_size: 0, " - //+ " background_count: 2, " - //+ " background_size: 6, " - + " foreground_popularity: 0.0, " - + " background_popularity: 0.33333, " - + " } }, " - + " ] } } ] } } "); + assertJQ( + req(nestedSKG), + "facets=={count:5, x:{ buckets:[" + + " { val:'B', count:3, " + + " skg : { relatedness: 0.01539, " + // + " foreground_count: 2, " + // + " foreground_size: 2, " + // + " background_count: 3, " + // + " background_size: 6, " + + " foreground_popularity: 0.33333," + + " background_popularity: 0.5 }," + + " y : { buckets:[" + + " { val:'NY', count: 1, " + + " skg : { relatedness: 0.00554, " + // + " foreground_count: 1, " + // + " foreground_size: 2, " + // + " background_count: 2, " + // + " background_size: 6, " + + " foreground_popularity: 0.16667, " + + " background_popularity: 0.33333, " + + " } }, " + + " { val:'NJ', count: 2, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + // + " foreground_count: 1, " + // + " foreground_size: 2, " + // + " background_count: 3, " + // + " background_size: 6, " + + " foreground_popularity: 0.16667, " + + " background_popularity: 0.5, " + + " } }, " + + " ] } " + + " }, " + + " { val:'A', count:2, " + + " skg : { relatedness:-0.01097, " + // + " foreground_count: 0, " + // + " foreground_size: 2, " + // + " background_count: 2, " + // + " background_size: 6," + + " foreground_popularity: 0.0," + + " background_popularity: 0.33333 }," + + " y : { buckets:[" + + " { val:'NJ', count: 1, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + // + " foreground_count: 0, " + // + " foreground_size: 0, " + // + " background_count: 3, " + // + " background_size: 6, " + + " foreground_popularity: 0.0, " + + " background_popularity: 0.5, " + + " } }, " + + " { val:'NY', count: 1, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + // + " foreground_count: 0, " + // + " foreground_size: 0, " + // + " background_count: 2, " + // + " background_size: 6, " + + " foreground_popularity: 0.0, " + + " background_popularity: 0.33333, " + + " } }, " + + " ] } } ] } } "); // same request, but with whitebox params testing isShard // to verify the raw counts/sizes - assertJQ(req(nestedSKG, - // fake an initial shard request - "distrib", "false", "isShard", "true", "_facet_", "{}", - "shards.purpose", ""+FacetModule.PURPOSE_GET_JSON_FACETS) - , "facets=={count:5, x:{ buckets:[" - + " { val:'B', count:3, " - + " skg : { " - + " foreground_count: 2, " - + " foreground_size: 2, " - + " background_count: 3, " - + " background_size: 6 }, " - + " y : { buckets:[" - + " { val:'NY', count: 1, " - + " skg : { " - + " foreground_count: 1, " - + " foreground_size: 2, " - + " background_count: 2, " - + " background_size: 6, " - + " } }, " - + " { val:'NJ', count: 2, " - + " skg : { " - + " foreground_count: 1, " - + " foreground_size: 2, " - + " background_count: 3, " - + " background_size: 6, " - + " } }, " - + " ] } " - + " }, " - + " { val:'A', count:2, " - + " skg : { " - + " foreground_count: 0, " - + " foreground_size: 2, " - + " background_count: 2, " - + " background_size: 6 }," - + " y : { buckets:[" - + " { val:'NJ', count: 1, " - + " skg : { " - + " foreground_count: 0, " - + " foreground_size: 0, " - + " background_count: 3, " - + " background_size: 6, " - + " } }, " - + " { val:'NY', count: 1, " - + " skg : { " - + " foreground_count: 0, " - + " foreground_size: 0, " - + " background_count: 2, " - + " background_size: 6, " - + " } }, " - + " ] } } ] } } "); - - - // SKG w/min_pop (NOTE: incredibly contrived and not-useful fore/back for testing min_pop w/shard sorting) + assertJQ( + req( + nestedSKG, + // fake an initial shard request + "distrib", + "false", + "isShard", + "true", + "_facet_", + "{}", + "shards.purpose", + "" + FacetModule.PURPOSE_GET_JSON_FACETS), + "facets=={count:5, x:{ buckets:[" + + " { val:'B', count:3, " + + " skg : { " + + " foreground_count: 2, " + + " foreground_size: 2, " + + " background_count: 3, " + + " background_size: 6 }, " + + " y : { buckets:[" + + " { val:'NY', count: 1, " + + " skg : { " + + " foreground_count: 1, " + + " foreground_size: 2, " + + " background_count: 2, " + + " background_size: 6, " + + " } }, " + + " { val:'NJ', count: 2, " + + " skg : { " + + " foreground_count: 1, " + + " foreground_size: 2, " + + " background_count: 3, " + + " background_size: 6, " + + " } }, " + + " ] } " + + " }, " + + " { val:'A', count:2, " + + " skg : { " + + " foreground_count: 0, " + + " foreground_size: 2, " + + " background_count: 2, " + + " background_size: 6 }," + + " y : { buckets:[" + + " { val:'NJ', count: 1, " + + " skg : { " + + " foreground_count: 0, " + + " foreground_size: 0, " + + " background_count: 3, " + + " background_size: 6, " + + " } }, " + + " { val:'NY', count: 1, " + + " skg : { " + + " foreground_count: 0, " + + " foreground_size: 0, " + + " background_count: 2, " + + " background_size: 6, " + + " } }, " + + " ] } } ] } } "); + + // SKG w/min_pop (NOTE: incredibly contrived and not-useful fore/back for testing min_pop + // w/shard sorting) // // we'll re-use these params in 2 requests, one will simulate a shard request - final SolrParams minPopSKG = params - ("q", "cat_s:[* TO *]", "rows", "0", "fore", "num_i:[0 TO 1000]", "back", "cat_s:B", "json.facet" - , "{x: { type: terms, field: 'cat_s', sort: 'skg desc', " - + " facet: { skg: { type:func, func:'relatedness($fore,$back)', " - + " min_popularity: 0.001 }" - + " } } }"); + final SolrParams minPopSKG = + params( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "num_i:[0 TO 1000]", + "back", + "cat_s:B", + "json.facet", + "{x: { type: terms, field: 'cat_s', sort: 'skg desc', " + + " facet: { skg: { type:func, func:'relatedness($fore,$back)', " + + " min_popularity: 0.001 }" + + " } } }"); // plain old request - assertJQ(req(minPopSKG) - , "facets=={count:5, x:{ buckets:[" - + " { val:'B', count:3, " - + " skg : { relatedness: -1.0, " - //+ " foreground_count: 1, " - //+ " foreground_size: 3, " - //+ " background_count: 3, " - //+ " background_size: 3, " - + " foreground_popularity: 0.33333," - + " background_popularity: 1.0," - + " } }, " - + " { val:'A', count:2, " - + " skg : { relatedness:'-Infinity', " // bg_pop is below min_pop (otherwise 1.0) - //+ " foreground_count: 2, " - //+ " foreground_size: 3, " - //+ " background_count: 0, " - //+ " background_size: 3, " - + " foreground_popularity: 0.66667," - + " background_popularity: 0.0," - + " } } ] } } "); + assertJQ( + req(minPopSKG), + "facets=={count:5, x:{ buckets:[" + + " { val:'B', count:3, " + + " skg : { relatedness: -1.0, " + // + " foreground_count: 1, " + // + " foreground_size: 3, " + // + " background_count: 3, " + // + " background_size: 3, " + + " foreground_popularity: 0.33333," + + " background_popularity: 1.0," + + " } }, " + + " { val:'A', count:2, " + + " skg : { relatedness:'-Infinity', " // bg_pop is below min_pop (otherwise 1.0) + // + " foreground_count: 2, " + // + " foreground_size: 3, " + // + " background_count: 0, " + // + " background_size: 3, " + + " foreground_popularity: 0.66667," + + " background_popularity: 0.0," + + " } } ] } } "); // same request, but with whitebox params testing isShard - // to verify the raw counts/sizes and that per-shard sorting doesn't pre-emptively sort "A" to the bottom - assertJQ(req(minPopSKG, - // fake an initial shard request - "distrib", "false", "isShard", "true", "_facet_", "{}", - "shards.purpose", ""+FacetModule.PURPOSE_GET_JSON_FACETS) - , "facets=={count:5, x:{ buckets:[" - + " { val:'A', count:2, " - + " skg : { " - + " foreground_count: 2, " - + " foreground_size: 3, " - + " background_count: 0, " - + " background_size: 3, " - + " } }, " - + " { val:'B', count:3, " - + " skg : { " - + " foreground_count: 1, " - + " foreground_size: 3, " - + " background_count: 3, " - + " background_size: 3, " - + " } } ] } }"); + // to verify the raw counts/sizes and that per-shard sorting doesn't pre-emptively sort "A" to + // the bottom + assertJQ( + req( + minPopSKG, + // fake an initial shard request + "distrib", + "false", + "isShard", + "true", + "_facet_", + "{}", + "shards.purpose", + "" + FacetModule.PURPOSE_GET_JSON_FACETS), + "facets=={count:5, x:{ buckets:[" + + " { val:'A', count:2, " + + " skg : { " + + " foreground_count: 2, " + + " foreground_size: 3, " + + " background_count: 0, " + + " background_size: 3, " + + " } }, " + + " { val:'B', count:3, " + + " skg : { " + + " foreground_count: 1, " + + " foreground_size: 3, " + + " background_count: 3, " + + " background_size: 3, " + + " } } ] } }"); } @Test public void testSKGSweepMultiAcc() throws Exception { Client client = Client.localClient(); indexSimple(client); - + // simple single level facet w/skg & trivial non-sweeping stat using various sorts & (re)sorting - for (String sort : Arrays.asList("sort:'index asc'", - "sort:'y desc'", - "sort:'z desc'", - "sort:'skg desc'", - "prelim_sort:'count desc', sort:'index asc'", - "prelim_sort:'count desc', sort:'y desc'", - "prelim_sort:'count desc', sort:'z desc'", - "prelim_sort:'count desc', sort:'skg desc'")) { - // the relatedness score of each of our cat_s values is (conviniently) also alphabetical order, - // (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') + for (String sort : + Arrays.asList( + "sort:'index asc'", + "sort:'y desc'", + "sort:'z desc'", + "sort:'skg desc'", + "prelim_sort:'count desc', sort:'index asc'", + "prelim_sort:'count desc', sort:'y desc'", + "prelim_sort:'count desc', sort:'z desc'", + "prelim_sort:'count desc', sort:'skg desc'")) { + // the relatedness score of each of our cat_s values is (conviniently) also alphabetical + // order, (and the same order as 'sum(num_i) desc' & 'min(num_i) desc') // // So all of these re/sort options should produce identical output - // - Testing "index" sort allows the randomized use of "stream" processor as default to be tested. - // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a "defered" Agg + // - Testing "index" sort allows the randomized use of "stream" processor as default to be + // tested. + // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a + // "defered" Agg for (String sweep : Arrays.asList("true", "false")) { // results should be the same even if we disable sweeping... - assertJQ(req("q", "cat_s:[* TO *]", "rows", "0", - "fore", "where_s:NY", "back", "*:*", - "json.facet", "" - + "{x: { type: terms, field: 'cat_s', "+sort+", limit:-1, " - + " facet: { skg: { type: 'func', func:'relatedness($fore,$back)', " - +" "+RelatednessAgg.SWEEP_COLLECTION+": "+sweep+" }," - + " y:'sum(num_i)', " - +" z:'min(num_i)' } } }") - , "facets=={count:5, x:{ buckets:[" - + " { val:'A', count:2, y:5.0, z:2, " - + " skg : { relatedness: 0.00554, " - + " foreground_popularity: 0.16667," - + " background_popularity: 0.33333, }," - + " }, " - + " { val:'B', count:3, y:-3.0, z:-5, " - + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated - + " foreground_popularity: 0.16667," - + " background_popularity: 0.5 }," - + " } ] } } " - ); + assertJQ( + req( + "q", + "cat_s:[* TO *]", + "rows", + "0", + "fore", + "where_s:NY", + "back", + "*:*", + "json.facet", + "" + + "{x: { type: terms, field: 'cat_s', " + + sort + + ", limit:-1, " + + " facet: { skg: { type: 'func', func:'relatedness($fore,$back)', " + + " " + + RelatednessAgg.SWEEP_COLLECTION + + ": " + + sweep + + " }," + + " y:'sum(num_i)', " + + " z:'min(num_i)' } } }"), + "facets=={count:5, x:{ buckets:[" + + " { val:'A', count:2, y:5.0, z:2, " + + " skg : { relatedness: 0.00554, " + + " foreground_popularity: 0.16667," + + " background_popularity: 0.33333, }," + + " }, " + + " { val:'B', count:3, y:-3.0, z:-5, " + + " skg : { relatedness: 0.0, " // perfectly average and uncorrolated + + " foreground_popularity: 0.16667," + + " background_popularity: 0.5 }," + + " } ] } } "); } } } - @Test public void testRepeatedNumerics() throws Exception { Client client = Client.localClient(); - String field = "num_is"; // docValues of multi-valued points field can contain duplicate values... make sure they don't mess up our counts. - client.add(sdoc("id", "1", "cat_s", "A", "where_s", "NY", "num_d", "4", "num_i", "2", "val_b", "true", "sparse_s", "one", field,"0", field,"0"), null); + // docValues of multi-valued points field can contain duplicate values... make sure they don't + // mess up our counts. + String field = "num_is"; + client.add( + sdoc( + "id", + "1", + "cat_s", + "A", + "where_s", + "NY", + "num_d", + "4", + "num_i", + "2", + "val_b", + "true", + "sparse_s", + "one", + field, + "0", + field, + "0"), + null); client.commit(); - client.testJQ(params("q", "id:1", "field", field - , "json.facet", "{" + - "f1:{terms:${field}}" + - ",f2:'hll(${field})'" + - ",f3:{type:range, field:${field}, start:0, end:1, gap:1}" + - "}" - ) - , "facets=={count:1, " + - "f1:{buckets:[{val:0, count:1}]}" + - ",f2:1" + - ",f3:{buckets:[{val:0, count:1}]}" + - "}" - ); + client.testJQ( + params( + "q", + "id:1", + "field", + field, + "json.facet", + "{" + + "f1:{terms:${field}}" + + ",f2:'hll(${field})'" + + ",f3:{type:range, field:${field}, start:0, end:1, gap:1}" + + "}"), + "facets=={count:1, " + + "f1:{buckets:[{val:0, count:1}]}" + + ",f2:1" + + ",f3:{buckets:[{val:0, count:1}]}" + + "}"); } public void testDomainJoinSelf() throws Exception { @@ -834,36 +1116,47 @@ public void testDomainJoinSelf() throws Exception { indexSimple(client); // self join domain switch at the second level of faceting - assertJQ(req("q", "*:*", "rows", "0", - "json.facet", "" - + "{x: { type: terms, field: 'num_i', " - + " facet: { y: { domain: { join: { from: 'cat_s', to: 'cat_s' } }, " - + " type: terms, field: 'where_s' " - + " } } } }") - , "facets=={count:6, x:{ buckets:[" - + " { val:-5, count:2, " - + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } }, " - + " { val:2, count:1, " - + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " - + " { val:3, count:1, " - + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " - + " { val:7, count:1, " - + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } } ] } }" - ); + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "" + + "{x: { type: terms, field: 'num_i', " + + " facet: { y: { domain: { join: { from: 'cat_s', to: 'cat_s' } }, " + + " type: terms, field: 'where_s' " + + " } } } }"), + "facets=={count:6, x:{ buckets:[" + + " { val:-5, count:2, " + + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } }, " + + " { val:2, count:1, " + + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + + " { val:3, count:1, " + + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + + " { val:7, count:1, " + + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } } ] } }"); } - + public void testDomainGraph() throws Exception { Client client = Client.localClient(); indexSimple(client); // should be the same as join self - assertJQ(req("q", "*:*", "rows", "0", - "json.facet", "" - + "{x: { type: terms, field: 'num_i', " - + " facet: { y: { domain: { graph: { from: 'cat_s', to: 'cat_s' } }, " - + " type: terms, field: 'where_s' " - + " } } } }") - , "facets=={count:6, x:{ buckets:[" + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "" + + "{x: { type: terms, field: 'num_i', " + + " facet: { y: { domain: { graph: { from: 'cat_s', to: 'cat_s' } }, " + + " type: terms, field: 'where_s' " + + " } } } }"), + "facets=={count:6, x:{ buckets:[" + " { val:-5, count:2, " + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } }, " + " { val:2, count:1, " @@ -871,18 +1164,23 @@ public void testDomainGraph() throws Exception { + " { val:3, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:7, count:1, " - + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } } ] } }" - ); + + " y : { buckets:[{ val:'NJ', count:2 }, { val:'NY', count:1 } ] } } ] } }"); // This time, test with a traversalFilter // should be the same as join self - assertJQ(req("q", "*:*", "rows", "0", - "json.facet", "" - + "{x: { type: terms, field: 'num_i', " - + " facet: { y: { domain: { graph: { from: 'cat_s', to: 'cat_s', traversalFilter: 'where_s:NY' } }, " - + " type: terms, field: 'where_s' " - + " } } } }") - , "facets=={count:6, x:{ buckets:[" + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "" + + "{x: { type: terms, field: 'num_i', " + + " facet: { y: { domain: { graph: { from: 'cat_s', to: 'cat_s', traversalFilter: 'where_s:NY' } }, " + + " type: terms, field: 'where_s' " + + " } } } }"), + "facets=={count:6, x:{ buckets:[" + " { val:-5, count:2, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:2, count:1, " @@ -890,208 +1188,264 @@ public void testDomainGraph() throws Exception { + " { val:3, count:1, " + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } }, " + " { val:7, count:1, " - + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } } ] } }" - ); + + " y : { buckets:[{ val:'NJ', count:1 }, { val:'NY', count:1 } ] } } ] } }"); } - public void testNestedJoinDomain() throws Exception { Client client = Client.localClient(); client.deleteByQuery("*:*", null); - client.add(sdoc("id", "1", "1_s", "A", "2_s", "A", "3_s", "C", "y_s", "B", "x_t", "x z", "z_t", " 2 3"), null); - client.add(sdoc("id", "2", "1_s", "B", "2_s", "A", "3_s", "B", "y_s", "B", "x_t", "x y ", "z_t", "1 3"), null); - client.add(sdoc("id", "3", "1_s", "C", "2_s", "A", "3_s", "#", "y_s", "A", "x_t", " y z", "z_t", "1 2 "), null); - client.add(sdoc("id", "4", "1_s", "A", "2_s", "B", "3_s", "C", "y_s", "A", "x_t", " z", "z_t", " 3"), null); - client.add(sdoc("id", "5", "1_s", "B", "2_s", "_", "3_s", "B", "y_s", "C", "x_t", "x ", "z_t", "1 3"), null); - client.add(sdoc("id", "6", "1_s", "C", "2_s", "B", "3_s", "A", "y_s", "C", "x_t", "x y z", "z_t", "1 "), null); + client.add( + sdoc( + "id", "1", "1_s", "A", "2_s", "A", "3_s", "C", "y_s", "B", "x_t", "x z", "z_t", + " 2 3"), + null); + client.add( + sdoc( + "id", "2", "1_s", "B", "2_s", "A", "3_s", "B", "y_s", "B", "x_t", "x y ", "z_t", + "1 3"), + null); + client.add( + sdoc( + "id", "3", "1_s", "C", "2_s", "A", "3_s", "#", "y_s", "A", "x_t", " y z", "z_t", + "1 2 "), + null); + client.add( + sdoc( + "id", "4", "1_s", "A", "2_s", "B", "3_s", "C", "y_s", "A", "x_t", " z", "z_t", + " 3"), + null); + client.add( + sdoc( + "id", "5", "1_s", "B", "2_s", "_", "3_s", "B", "y_s", "C", "x_t", "x ", "z_t", + "1 3"), + null); + client.add( + sdoc( + "id", "6", "1_s", "C", "2_s", "B", "3_s", "A", "y_s", "C", "x_t", "x y z", "z_t", + "1 "), + null); client.commit(); - assertJQ(req("q", "x_t:x", "rows", "0", // NOTE q - only x=x in base set (1,2,5,6) - "json.facet", "" - + "{x: { type: terms, field: 'x_t', " - + " domain: { join: { from:'1_s', to:'2_s' } }," - // y1 & y2 are the same facet, with *similar* child facet z1/z2 ... - + " facet: { y1: { type: terms, field: 'y_s', " - // z1 & z2 are same field, diff join... - + " facet: { z1: { type: terms, field: 'z_t', " - + " domain: { join: { from:'2_s', to:'3_s' } } } } }," - + " y2: { type: terms, field: 'y_s', " - // z1 & z2 are same field, diff join... - + " facet: { z2: { type: terms, field: 'z_t', " - + " domain: { join: { from:'3_s', to:'1_s' } } } } } } } }") - , "facets=={count:4, " - + "x:{ buckets:[" // joined 1->2: doc5 drops out, counts: z=4, x=3, y=3 - + " { val:z, count:4, " // x=z (docs 1,3,4,6) y terms: A=2, B=1, C=1 - + " y1 : { buckets:[ " // z1 joins 2->3... - + " { val:A, count:2, " // A in docs(3,4), joins (A,B) -> docs(2,5,6) - + " z1: { buckets:[{ val:'1', count:3 }, { val:'3', count:2 }] } }, " - + " { val:B, count:1, " // B in doc1, joins A -> doc6 - + " z1: { buckets:[{ val:'1', count:1 }] } }, " - + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) - + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " - + " ] }, " - + " y2 : { buckets:[ " // z2 joins 3->1... - + " { val:A, count:2, " // A in docs(3,4), joins C -> docs(3,6) - + " z2: { buckets:[{ val:'1', count:2 }, { val:'2', count:1 }] } }, " - + " { val:B, count:1, " // B in doc1, joins C -> docs(3,6) - + " z2: { buckets:[{ val:'1', count:2 }, { val:'2', count:1 }] } }, " - + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) - + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " - + " ] } }, " - + " { val:x, count:3, " // x=x (docs 1,2,!5,6) y terms: B=2, C=1 - + " y1 : { buckets:[ " // z1 joins 2->3... - + " { val:B, count:2, " // B in docs(1,2), joins A -> doc6 - + " z1: { buckets:[{ val:'1', count:1 }] } }, " - + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) - + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " - + " ] }, " - + " y2 : { buckets:[ " // z2 joins 3->1... - + " { val:B, count:2, " // B in docs(1,2), joins C,B -> docs(2,3,5,6) - + " z2: { buckets:[{ val:'1', count:4 }, { val:'3', count:2 }, { val:'2', count:1 }] } }, " - + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) - + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " - + " ] } }, " - + " { val:y, count:3, " // x=y (docs 2,3,6) y terms: A=1, B=1, C=1 - + " y1 : { buckets:[ " // z1 joins 2->3... - + " { val:A, count:1, " // A in doc3, joins A -> doc6 - + " z1: { buckets:[{ val:'1', count:1 }] } }, " - + " { val:B, count:1, " // B in doc2, joins A -> doc6 - + " z1: { buckets:[{ val:'1', count:1 }] } }, " - + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) - + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " - + " ] }, " - + " y2 : { buckets:[ " // z2 joins 3->1... - + " { val:A, count:1, " // A in doc3, joins # -> empty set - + " z2: { buckets:[ ] } }, " - + " { val:B, count:1, " // B in doc2, joins B -> docs(2,5) - + " z2: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } }, " - + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) - + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " - + " ]} }" - + " ]}}" - ); + assertJQ( + req( + "q", + "x_t:x", + "rows", + "0", // NOTE q - only x=x in base set (1,2,5,6) + "json.facet", + "" + + "{x: { type: terms, field: 'x_t', " + + " domain: { join: { from:'1_s', to:'2_s' } }," + // y1 & y2 are the same facet, with *similar* child facet z1/z2 ... + + " facet: { y1: { type: terms, field: 'y_s', " + // z1 & z2 are same field, diff join... + + " facet: { z1: { type: terms, field: 'z_t', " + + " domain: { join: { from:'2_s', to:'3_s' } } } } }," + + " y2: { type: terms, field: 'y_s', " + // z1 & z2 are same field, diff join... + + " facet: { z2: { type: terms, field: 'z_t', " + + " domain: { join: { from:'3_s', to:'1_s' } } } } } } } }"), + "facets=={count:4, " + + "x:{ buckets:[" // joined 1->2: doc5 drops out, counts: z=4, x=3, y=3 + + " { val:z, count:4, " // x=z (docs 1,3,4,6) y terms: A=2, B=1, C=1 + + " y1 : { buckets:[ " // z1 joins 2->3... + + " { val:A, count:2, " // A in docs(3,4), joins (A,B) -> docs(2,5,6) + + " z1: { buckets:[{ val:'1', count:3 }, { val:'3', count:2 }] } }, " + + " { val:B, count:1, " // B in doc1, joins A -> doc6 + + " z1: { buckets:[{ val:'1', count:1 }] } }, " + + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) + + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " + + " ] }, " + + " y2 : { buckets:[ " // z2 joins 3->1... + + " { val:A, count:2, " // A in docs(3,4), joins C -> docs(3,6) + + " z2: { buckets:[{ val:'1', count:2 }, { val:'2', count:1 }] } }, " + + " { val:B, count:1, " // B in doc1, joins C -> docs(3,6) + + " z2: { buckets:[{ val:'1', count:2 }, { val:'2', count:1 }] } }, " + + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) + + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " + + " ] } }, " + + " { val:x, count:3, " // x=x (docs 1,2,!5,6) y terms: B=2, C=1 + + " y1 : { buckets:[ " // z1 joins 2->3... + + " { val:B, count:2, " // B in docs(1,2), joins A -> doc6 + + " z1: { buckets:[{ val:'1', count:1 }] } }, " + + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) + + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " + + " ] }, " + + " y2 : { buckets:[ " // z2 joins 3->1... + + " { val:B, count:2, " // B in docs(1,2), joins C,B -> docs(2,3,5,6) + + " z2: { buckets:[{ val:'1', count:4 }, { val:'3', count:2 }, { val:'2', count:1 }] } }, " + + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) + + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " + + " ] } }, " + + " { val:y, count:3, " // x=y (docs 2,3,6) y terms: A=1, B=1, C=1 + + " y1 : { buckets:[ " // z1 joins 2->3... + + " { val:A, count:1, " // A in doc3, joins A -> doc6 + + " z1: { buckets:[{ val:'1', count:1 }] } }, " + + " { val:B, count:1, " // B in doc2, joins A -> doc6 + + " z1: { buckets:[{ val:'1', count:1 }] } }, " + + " { val:C, count:1, " // C in doc6, joins B -> docs(2,5) + + " z1: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } } " + + " ] }, " + + " y2 : { buckets:[ " // z2 joins 3->1... + + " { val:A, count:1, " // A in doc3, joins # -> empty set + + " z2: { buckets:[ ] } }, " + + " { val:B, count:1, " // B in doc2, joins B -> docs(2,5) + + " z2: { buckets:[{ val:'1', count:2 }, { val:'3', count:2 }] } }, " + + " { val:C, count:1, " // C in doc6, joins A -> docs(1,4) + + " z2: { buckets:[{ val:'3', count:2 }, { val:'2', count:1 }] } } " + + " ]} }" + + " ]}}"); } - @Test public void testMethodStream() throws Exception { Client client = Client.localClient(); indexSimple(client); - assertJQ(req("q", "*:*", "rows", "0", "json.facet", "{x:'sum(num_is)'}") - , "facets=={count:6 , x:,10.0}" - ); - assertJQ(req("q", "*:*", "rows", "0", "json.facet", "{x:'min(num_is)'}") - , "facets=={count:6 , x:,-9}" - ); + assertJQ( + req("q", "*:*", "rows", "0", "json.facet", "{x:'sum(num_is)'}"), + "facets=={count:6 , x:,10.0}"); + assertJQ( + req("q", "*:*", "rows", "0", "json.facet", "{x:'min(num_is)'}"), + "facets=={count:6 , x:,-9}"); // test multiple json.facet commands - assertJQ(req("q", "*:*", "rows", "0" - , "json.facet", "{x:'sum(num_d)'}" - , "json.facet", "{y:'min(num_d)'}" - , "json.facet", "{z:'min(num_is)'}" - ) - , "facets=={count:6 , x:3.0, y:-9.0, z:-9 }" - ); - + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{x:'sum(num_d)'}", + "json.facet", + "{y:'min(num_d)'}", + "json.facet", + "{z:'min(num_is)'}"), + "facets=={count:6 , x:3.0, y:-9.0, z:-9 }"); // test streaming - assertJQ(req("q", "*:*", "rows", "0" - , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream }}" + // won't stream; need sort:index asc - ", cat2:{terms:{field:'cat_s', method:stream, sort:'index asc' }}" + - ", cat3:{terms:{field:'cat_s', method:stream, sort:'index asc', mincount:3 }}" + // mincount - ", cat4:{terms:{field:'cat_s', method:stream, sort:'index asc', prefix:B }}" + // prefix - ", cat5:{terms:{field:'cat_s', method:stream, sort:'index asc', offset:1 }}" + // offset - ", cat6:{terms:{field:'cat_s', method:stream, sort:'index asc', missing:true }}" + // missing - ", cat7:{terms:{field:'cat_s', method:stream, sort:'index asc', numBuckets:true }}" + // numBuckets - ", cat8:{terms:{field:'cat_s', method:stream, sort:'index asc', allBuckets:true }}" + // allBuckets - " }" - ) - , "facets=={count:6 " + - ", cat :{buckets:[{val:B, count:3},{val:A, count:2}]}" + - ", cat2:{buckets:[{val:A, count:2},{val:B, count:3}]}" + - ", cat3:{buckets:[{val:B, count:3}]}" + - ", cat4:{buckets:[{val:B, count:3}]}" + - ", cat5:{buckets:[{val:B, count:3}]}" + - ", cat6:{missing:{count:1}, buckets:[{val:A, count:2},{val:B, count:3}]}" + - ", cat7:{numBuckets:2, buckets:[{val:A, count:2},{val:B, count:3}]}" + - ", cat8:{allBuckets:{count:5}, buckets:[{val:A, count:2},{val:B, count:3}]}" + - " }" - ); - + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ cat:{terms:{field:'cat_s', method:stream }}" + + // won't stream; need sort:index asc + ", cat2:{terms:{field:'cat_s', method:stream, sort:'index asc' }}" + + ", cat3:{terms:{field:'cat_s', method:stream, sort:'index asc', mincount:3 }}" + + // mincount + ", cat4:{terms:{field:'cat_s', method:stream, sort:'index asc', prefix:B }}" + + // prefix + ", cat5:{terms:{field:'cat_s', method:stream, sort:'index asc', offset:1 }}" + + // offset + ", cat6:{terms:{field:'cat_s', method:stream, sort:'index asc', missing:true }}" + + // missing + ", cat7:{terms:{field:'cat_s', method:stream, sort:'index asc', numBuckets:true }}" + + // numBuckets + ", cat8:{terms:{field:'cat_s', method:stream, sort:'index asc', allBuckets:true }}" + + // allBuckets + " }"), + "facets=={count:6 " + + ", cat :{buckets:[{val:B, count:3},{val:A, count:2}]}" + + ", cat2:{buckets:[{val:A, count:2},{val:B, count:3}]}" + + ", cat3:{buckets:[{val:B, count:3}]}" + + ", cat4:{buckets:[{val:B, count:3}]}" + + ", cat5:{buckets:[{val:B, count:3}]}" + + ", cat6:{missing:{count:1}, buckets:[{val:A, count:2},{val:B, count:3}]}" + + ", cat7:{numBuckets:2, buckets:[{val:A, count:2},{val:B, count:3}]}" + + ", cat8:{allBuckets:{count:5}, buckets:[{val:A, count:2},{val:B, count:3}]}" + + " }"); // test nested streaming under non-streaming - assertJQ(req("q", "*:*", "rows", "0" - , "json.facet", "{ cat:{terms:{field:'cat_s', sort:'index asc', facet:{where:{terms:{field:where_s,method:stream,sort:'index asc'}}} }}}" - ) - , "facets=={count:6 " + - ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1},{val:NY,count:1}]} },{val:B, count:3, where:{buckets:[{val:NJ,count:2},{val:NY,count:1}]} }]}" - + "}" - ); + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ cat:{terms:{field:'cat_s', sort:'index asc', facet:{where:{terms:{field:where_s,method:stream,sort:'index asc'}}} }}}"), + "facets=={count:6 " + + ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1},{val:NY,count:1}]} },{val:B, count:3, where:{buckets:[{val:NJ,count:2},{val:NY,count:1}]} }]}" + + "}"); // test nested streaming under streaming - assertJQ(req("q", "*:*", "rows", "0" - , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{where:{terms:{field:where_s,method:stream,sort:'index asc'}}} }}}" - ) - , "facets=={count:6 " + - ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1},{val:NY,count:1}]} },{val:B, count:3, where:{buckets:[{val:NJ,count:2},{val:NY,count:1}]} }]}" - + "}" - ); + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{where:{terms:{field:where_s,method:stream,sort:'index asc'}}} }}}"), + "facets=={count:6 " + + ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1},{val:NY,count:1}]} },{val:B, count:3, where:{buckets:[{val:NJ,count:2},{val:NY,count:1}]} }]}" + + "}"); // test nested streaming with stats under streaming - assertJQ(req("q", "*:*", "rows", "0" - , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{ where:{terms:{field:where_s,method:stream,sort:'index asc',sort:'index asc', facet:{x:'max(num_d)', y:'sum(num_is)'} }}} }}}" - ) - , "facets=={count:6 " + - ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1,x:2.0,y:5.0},{val:NY,count:1,x:4.0,y:6.0}]} }," + - "{val:B, count:3, where:{buckets:[{val:NJ,count:2,x:11.0,y:4.0},{val:NY,count:1,x:-5.0,y:-5.0}]} }]}" - + "}" - ); + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{ where:{terms:{field:where_s,method:stream,sort:'index asc',sort:'index asc', facet:{x:'max(num_d)', y:'sum(num_is)'} }}} }}}"), + "facets=={count:6 " + + ", cat :{buckets:[{val:A, count:2, where:{buckets:[{val:NJ,count:1,x:2.0,y:5.0},{val:NY,count:1,x:4.0,y:6.0}]} }," + + "{val:B, count:3, where:{buckets:[{val:NJ,count:2,x:11.0,y:4.0},{val:NY,count:1,x:-5.0,y:-5.0}]} }]}" + + "}"); // test nested streaming with stats under streaming with stats - assertJQ(req("q", "*:*", "rows", "0", - "facet","true" - , "json.facet", "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{ y:'min(num_d)', where:{terms:{field:where_s,method:stream,sort:'index asc', facet:{x:'max(num_d)'} }}} }}}" - ) - , "facets=={count:6 " + - ", cat :{buckets:[{val:A, count:2, y:2.0, where:{buckets:[{val:NJ,count:1,x:2.0},{val:NY,count:1,x:4.0}]} },{val:B, count:3, y:-9.0, where:{buckets:[{val:NJ,count:2,x:11.0},{val:NY,count:1,x:-5.0}]} }]}" - + "}" - ); - - - assertJQ(req("q", "*:*", "fq","cat_s:A") - , "response/numFound==2" - ); + assertJQ( + req( + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "json.facet", + "{ cat:{terms:{field:'cat_s', method:stream,sort:'index asc', facet:{ y:'min(num_d)', where:{terms:{field:where_s,method:stream,sort:'index asc', facet:{x:'max(num_d)'} }}} }}}"), + "facets=={count:6 " + + ", cat :{buckets:[{val:A, count:2, y:2.0, where:{buckets:[{val:NJ,count:1,x:2.0},{val:NY,count:1,x:4.0}]} },{val:B, count:3, y:-9.0, where:{buckets:[{val:NJ,count:2,x:11.0},{val:NY,count:1,x:-5.0}]} }]}" + + "}"); + + assertJQ(req("q", "*:*", "fq", "cat_s:A"), "response/numFound==2"); } - Map suffixMap = new HashMap<>(); + Map suffixMap = new HashMap<>(); + { - suffixMap.put("_s", new String[]{"_s","_ss","_sd","_sds"} ); - suffixMap.put("_ss", new String[]{"_ss","_sds"} ); - suffixMap.put("_l", new String[]{"_l","_ls","_ld","_lds"} ); - suffixMap.put("_ls", new String[]{"_ls","_lds"} ); - suffixMap.put("_i", new String[]{"_i","_is","_id","_ids", "_l","_ls","_ld","_lds"} ); - suffixMap.put("_is", new String[]{"_is","_ids", "_ls","_lds"} ); - suffixMap.put("_d", new String[]{"_d","_ds","_dd","_dds"} ); - suffixMap.put("_ds", new String[]{"_ds","_dds"} ); - suffixMap.put("_f", new String[]{"_f","_fs","_fd","_fds", "_d","_ds","_dd","_dds"} ); - suffixMap.put("_fs", new String[]{"_fs","_fds","_ds","_dds"} ); - suffixMap.put("_dt", new String[]{"_dt","_dts","_dtd","_dtds"} ); - suffixMap.put("_dts", new String[]{"_dts","_dtds"} ); - suffixMap.put("_b", new String[]{"_b"} ); + suffixMap.put("_s", new String[] {"_s", "_ss", "_sd", "_sds"}); + suffixMap.put("_ss", new String[] {"_ss", "_sds"}); + suffixMap.put("_l", new String[] {"_l", "_ls", "_ld", "_lds"}); + suffixMap.put("_ls", new String[] {"_ls", "_lds"}); + suffixMap.put("_i", new String[] {"_i", "_is", "_id", "_ids", "_l", "_ls", "_ld", "_lds"}); + suffixMap.put("_is", new String[] {"_is", "_ids", "_ls", "_lds"}); + suffixMap.put("_d", new String[] {"_d", "_ds", "_dd", "_dds"}); + suffixMap.put("_ds", new String[] {"_ds", "_dds"}); + suffixMap.put("_f", new String[] {"_f", "_fs", "_fd", "_fds", "_d", "_ds", "_dd", "_dds"}); + suffixMap.put("_fs", new String[] {"_fs", "_fds", "_ds", "_dds"}); + suffixMap.put("_dt", new String[] {"_dt", "_dts", "_dtd", "_dtds"}); + suffixMap.put("_dts", new String[] {"_dts", "_dtds"}); + suffixMap.put("_b", new String[] {"_b"}); } List getAlternatives(String field) { int idx = field.lastIndexOf("_"); - if (idx<=0 || idx>=field.length()) return Collections.singletonList(field); + if (idx <= 0 || idx >= field.length()) return Collections.singletonList(field); String suffix = field.substring(idx); String[] alternativeSuffixes = suffixMap.get(suffix); if (alternativeSuffixes == null) return Collections.singletonList(field); String base = field.substring(0, idx); List out = new ArrayList<>(alternativeSuffixes.length); for (String altS : alternativeSuffixes) { - out.add( base + altS ); + out.add(base + altS); } Collections.shuffle(out, random()); return out; @@ -1099,15 +1453,18 @@ List getAlternatives(String field) { @Test public void testStats() throws Exception { - doStats(Client.localClient, params("debugQuery", Boolean.toString(random().nextBoolean()) )); + doStats(Client.localClient, params("debugQuery", Boolean.toString(random().nextBoolean()))); } @Test public void testStatsDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); - doStats( client, params() ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); + doStats(client, params()); } public void doStats(Client client, ModifiableSolrParams p) throws Exception { @@ -1115,7 +1472,9 @@ public void doStats(Client client, ModifiableSolrParams p) throws Exception { fieldLists.put("noexist", getAlternatives("noexist_s")); fieldLists.put("cat_s", getAlternatives("cat_s")); fieldLists.put("where_s", getAlternatives("where_s")); - fieldLists.put("num_d", getAlternatives("num_f")); // num_d name is historical, which is why we map it to num_f alternatives so we can include floats as well + // num_d name is historical, which is why we map it to num_f alternatives so + fieldLists.put("num_d", getAlternatives("num_f")); + // we can include floats as well fieldLists.put("num_i", getAlternatives("num_i")); fieldLists.put("super_s", getAlternatives("super_s")); fieldLists.put("val_b", getAlternatives("val_b")); @@ -1128,63 +1487,250 @@ public void doStats(Client client, ModifiableSolrParams p) throws Exception { maxAlt = Math.max(fieldList.size(), maxAlt); } - // take the field with the maximum number of alternative types and loop through our variants that many times - for (int i=0; i alts = fieldLists.get(field); - String alt = alts.get( i % alts.size() ); + String alt = alts.get(i % alts.size()); args.add(field, alt); } - args.set("rows","0"); + args.set("rows", "0"); // doStatsTemplated(client, args); } - // single valued strings - doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_s", "cat_s","cat_s", "where_s","where_s", "num_d","num_d", "num_i","num_i", "num_l","long_l", "super_s","super_s", "val_b","val_b", "date","date_dt", "sparse_s","sparse_s" ,"multi_ss","multi_ss") ); + doStatsTemplated( + client, + params( + p, + "rows", + "0", + "noexist", + "noexist_s", + "cat_s", + "cat_s", + "where_s", + "where_s", + "num_d", + "num_d", + "num_i", + "num_i", + "num_l", + "long_l", + "super_s", + "super_s", + "val_b", + "val_b", + "date", + "date_dt", + "sparse_s", + "sparse_s", + "multi_ss", + "multi_ss")); // multi-valued strings, long/float substitute for int/double - doStatsTemplated(client, params(p, "facet","true", "rows","0", "noexist","noexist_ss", "cat_s","cat_ss", "where_s","where_ss", "num_d","num_f", "num_i","num_l", "num_l","long_l", "num_is","num_ls", "num_fs", "num_ds", "super_s","super_ss", "val_b","val_b", "date","date_dt", "sparse_s","sparse_ss", "multi_ss","multi_ss") ); + doStatsTemplated( + client, + params( + p, + "facet", + "true", + "rows", + "0", + "noexist", + "noexist_ss", + "cat_s", + "cat_ss", + "where_s", + "where_ss", + "num_d", + "num_f", + "num_i", + "num_l", + "num_l", + "long_l", + "num_is", + "num_ls", + "num_fs", + "num_ds", + "super_s", + "super_ss", + "val_b", + "val_b", + "date", + "date_dt", + "sparse_s", + "sparse_ss", + "multi_ss", + "multi_ss")); // multi-valued strings, method=dv for terms facets - doStatsTemplated(client, params(p, "terms_method", "method:dv,", "rows", "0", "noexist", "noexist_ss", "cat_s", "cat_ss", "where_s", "where_ss", "num_d", "num_f", "num_i", "num_l", "num_l","long_l","super_s", "super_ss", "val_b", "val_b", "date", "date_dt", "sparse_s", "sparse_ss", "multi_ss", "multi_ss")); + doStatsTemplated( + client, + params( + p, + "terms_method", + "method:dv,", + "rows", + "0", + "noexist", + "noexist_ss", + "cat_s", + "cat_ss", + "where_s", + "where_ss", + "num_d", + "num_f", + "num_i", + "num_l", + "num_l", + "long_l", + "super_s", + "super_ss", + "val_b", + "val_b", + "date", + "date_dt", + "sparse_s", + "sparse_ss", + "multi_ss", + "multi_ss")); // single valued docvalues for strings, and single valued numeric doc values for numeric fields - doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_sd", "cat_s","cat_sd", "where_s","where_sd", "num_d","num_dd", "num_i","num_id", "num_is","num_lds", "num_l","long_ld", "num_fs","num_dds", "super_s","super_sd", "val_b","val_b", "date","date_dtd", "sparse_s","sparse_sd" ,"multi_ss","multi_sds") ); + doStatsTemplated( + client, + params( + p, + "rows", + "0", + "noexist", + "noexist_sd", + "cat_s", + "cat_sd", + "where_s", + "where_sd", + "num_d", + "num_dd", + "num_i", + "num_id", + "num_is", + "num_lds", + "num_l", + "long_ld", + "num_fs", + "num_dds", + "super_s", + "super_sd", + "val_b", + "val_b", + "date", + "date_dtd", + "sparse_s", + "sparse_sd", + "multi_ss", + "multi_sds")); // multi-valued docvalues - FacetFieldProcessorByArrayDV.unwrap_singleValued_multiDv = false; // better multi-valued coverage - doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_sds", "cat_s","cat_sds", "where_s","where_sds", "num_d","num_d", "num_i","num_i", "num_is","num_ids", "num_l","long_ld", "num_fs","num_fds", "super_s","super_sds", "val_b","val_b", "date","date_dtds", "sparse_s","sparse_sds" ,"multi_ss","multi_sds") ); + FacetFieldProcessorByArrayDV.unwrap_singleValued_multiDv = + false; // better multi-valued coverage + doStatsTemplated( + client, + params( + p, + "rows", + "0", + "noexist", + "noexist_sds", + "cat_s", + "cat_sds", + "where_s", + "where_sds", + "num_d", + "num_d", + "num_i", + "num_i", + "num_is", + "num_ids", + "num_l", + "long_ld", + "num_fs", + "num_fds", + "super_s", + "super_sds", + "val_b", + "val_b", + "date", + "date_dtds", + "sparse_s", + "sparse_sds", + "multi_ss", + "multi_sds")); // multi-valued docvalues FacetFieldProcessorByArrayDV.unwrap_singleValued_multiDv = true; - doStatsTemplated(client, params(p, "rows","0", "noexist","noexist_sds", "cat_s","cat_sds", "where_s","where_sds", "num_d","num_d", "num_i","num_i", "num_is","num_ids", "num_l","long_ld", "num_fs","num_fds", "super_s","super_sds", "val_b","val_b", "date","date_dtds", "sparse_s","sparse_sds" ,"multi_ss","multi_sds") ); + doStatsTemplated( + client, + params( + p, + "rows", + "0", + "noexist", + "noexist_sds", + "cat_s", + "cat_sds", + "where_s", + "where_sds", + "num_d", + "num_d", + "num_i", + "num_i", + "num_is", + "num_ids", + "num_l", + "long_ld", + "num_fs", + "num_fds", + "super_s", + "super_sds", + "val_b", + "val_b", + "date", + "date_dtds", + "sparse_s", + "sparse_sds", + "multi_ss", + "multi_sds")); } public static void doStatsTemplated(Client client, ModifiableSolrParams p) throws Exception { - p.set("Z_num_i", "Z_" + p.get("num_i") ); - p.set("Z_num_l", "Z_" + p.get("num_l") ); - p.set("sparse_num_d", "sparse_" + p.get("num_d") ); - if (p.get("num_is") == null) p.add("num_is","num_is"); - if (p.get("num_fs") == null) p.add("num_fs","num_fs"); + p.set("Z_num_i", "Z_" + p.get("num_i")); + p.set("Z_num_l", "Z_" + p.get("num_l")); + p.set("sparse_num_d", "sparse_" + p.get("num_d")); + if (p.get("num_is") == null) p.add("num_is", "num_is"); + if (p.get("num_fs") == null) p.add("num_fs", "num_fs"); String terms = p.get("terms"); - if (terms == null) terms=""; - int limit=0; + if (terms == null) terms = ""; + int limit = 0; switch (random().nextInt(4)) { - case 0: limit=-1; break; - case 1: limit=1000000; break; + case 0: + limit = -1; + break; + case 1: + limit = 1000000; + break; case 2: // fallthrough case 3: // fallthrough } if (limit != 0) { - terms=terms+"limit:"+limit+","; + terms = terms + "limit:" + limit + ","; } String terms_method = p.get("terms_method"); if (terms_method != null) { - terms=terms+terms_method; + terms = terms + terms_method; } String refine_method = p.get("refine_method"); if (refine_method == null && random().nextBoolean()) { @@ -1194,9 +1740,10 @@ public static void doStatsTemplated(Client client, ModifiableSolrParams p) throw p.set("terms", terms); // "${terms}" should be put at the beginning of generic terms facets. - // It may specify "method=..." or "limit:-1", so should not be used if the facet explicitly specifies. + // It may specify "method=..." or "limit:-1", so should not be used if the facet explicitly + // specifies. - MacroExpander m = new MacroExpander( p.getMap() ); + MacroExpander m = new MacroExpander(p.getMap()); String cat_s = m.expand("${cat_s}"); String where_s = m.expand("${where_s}"); @@ -1213,1361 +1760,1937 @@ public static void doStatsTemplated(Client client, ModifiableSolrParams p) throw String multi_ss = m.expand("${multi_ss}"); String sparse_num_d = m.expand("${sparse_num_d}"); - client.deleteByQuery("*:*", null); Client iclient = client; /*** This code was not needed yet, but may be needed if we want to force empty shard results more often. - // create a new indexing client that doesn't use one shard to better test for empty or non-existent results - if (!client.local()) { - List shards = client.getClientProvider().all(); - iclient = new Client(shards.subList(0, shards.size()-1), client.getClientProvider().getSeed()); - } + * // create a new indexing client that doesn't use one shard to better test for empty or non-existent results + * if (!client.local()) { + * List shards = client.getClientProvider().all(); + * iclient = new Client(shards.subList(0, shards.size()-1), client.getClientProvider().getSeed()); + * } ***/ SolrInputDocument doc = - sdoc("id", "1", cat_s, "A", where_s, "NY", num_d, "4", sparse_num_d, "6", num_i, "2", num_is,"2",num_is,"-5", num_fs,"2",num_fs,"-5", super_s, "zodiac", date, "2001-01-01T01:01:01Z", val_b, "true", sparse_s, "one"); + sdoc( + "id", + "1", + cat_s, + "A", + where_s, + "NY", + num_d, + "4", + sparse_num_d, + "6", + num_i, + "2", + num_is, + "2", + num_is, + "-5", + num_fs, + "2", + num_fs, + "-5", + super_s, + "zodiac", + date, + "2001-01-01T01:01:01Z", + val_b, + "true", + sparse_s, + "one"); iclient.add(doc, null); iclient.add(doc, null); - iclient.add(doc, null); // a couple of deleted docs - iclient.add(sdoc("id", "2", cat_s, "B", where_s, "NJ", num_d, "-9", num_i, "-5", num_is,"3",num_is,"-1", num_fs,"3",num_fs,"-1.5", super_s,"superman", date,"2002-02-02T02:02:02Z", val_b, "false" , multi_ss,"a", multi_ss,"b" , Z_num_i, "0", Z_num_l,"0"), null); + iclient.add(doc, null); // a couple of deleted docs + iclient.add( + sdoc( + "id", + "2", + cat_s, + "B", + where_s, + "NJ", + num_d, + "-9", + num_i, + "-5", + num_is, + "3", + num_is, + "-1", + num_fs, + "3", + num_fs, + "-1.5", + super_s, + "superman", + date, + "2002-02-02T02:02:02Z", + val_b, + "false", + multi_ss, + "a", + multi_ss, + "b", + Z_num_i, + "0", + Z_num_l, + "0"), + null); iclient.add(sdoc("id", "3"), null); iclient.commit(); - iclient.add(sdoc("id", "4", cat_s, "A", where_s, "NJ", num_d, "2", sparse_num_d,"-4",num_i, "3", num_is,"0",num_is,"3", num_fs,"0", num_fs,"3", super_s,"spiderman", date,"2003-03-03T03:03:03Z" , multi_ss, "b", Z_num_i, ""+Integer.MIN_VALUE, Z_num_l,Long.MIN_VALUE), null); - iclient.add(sdoc("id", "5", cat_s, "B", where_s, "NJ", num_d, "11", num_i, "7", num_is,"0", num_fs,"0", super_s,"batman" , date,"2001-02-03T01:02:03Z" ,sparse_s,"two", multi_ss, "a"), null); + iclient.add( + sdoc( + "id", + "4", + cat_s, + "A", + where_s, + "NJ", + num_d, + "2", + sparse_num_d, + "-4", + num_i, + "3", + num_is, + "0", + num_is, + "3", + num_fs, + "0", + num_fs, + "3", + super_s, + "spiderman", + date, + "2003-03-03T03:03:03Z", + multi_ss, + "b", + Z_num_i, + "" + Integer.MIN_VALUE, + Z_num_l, + Long.MIN_VALUE), + null); + iclient.add( + sdoc( + "id", + "5", + cat_s, + "B", + where_s, + "NJ", + num_d, + "11", + num_i, + "7", + num_is, + "0", + num_fs, + "0", + super_s, + "batman", + date, + "2001-02-03T01:02:03Z", + sparse_s, + "two", + multi_ss, + "a"), + null); iclient.commit(); - iclient.add(sdoc("id", "6", cat_s, "B", where_s, "NY", num_d, "-5", num_i, "-5", num_is,"-1", num_fs,"-1.5", super_s,"hulk" , date,"2002-03-01T03:02:01Z" , multi_ss, "b", multi_ss, "a", Z_num_i, ""+Integer.MAX_VALUE, Z_num_l,Long.MAX_VALUE), null); + iclient.add( + sdoc( + "id", + "6", + cat_s, + "B", + where_s, + "NY", + num_d, + "-5", + num_i, + "-5", + num_is, + "-1", + num_fs, + "-1.5", + super_s, + "hulk", + date, + "2002-03-01T03:02:01Z", + multi_ss, + "b", + multi_ss, + "a", + Z_num_i, + "" + Integer.MAX_VALUE, + Z_num_l, + Long.MAX_VALUE), + null); iclient.commit(); client.commit(); - // test for presence of debugging info ModifiableSolrParams debugP = params(p); - debugP.set("debugQuery","true"); - client.testJQ(params(debugP, "q", "*:*" - , "json.facet", "{catA:{query:{q:'${cat_s}:A'}}, catA2:{query:{query:'${cat_s}:A'}}, catA3:{query:'${cat_s}:A'} }" - ) - , "facets=={ 'count':6, 'catA':{ 'count':2}, 'catA2':{ 'count':2}, 'catA3':{ 'count':2}}" - , "debug/facet-trace==" // just test for presence, not exact structure / values - ); - + debugP.set("debugQuery", "true"); + client.testJQ( + params( + debugP, + "q", + "*:*", + "json.facet", + "{catA:{query:{q:'${cat_s}:A'}}, catA2:{query:{query:'${cat_s}:A'}}, catA3:{query:'${cat_s}:A'} }"), + "facets=={ 'count':6, 'catA':{ 'count':2}, 'catA2':{ 'count':2}, 'catA3':{ 'count':2}}", + "debug/facet-trace==" // just test for presence, not exact structure / values + ); // straight query facets - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{catA:{query:{q:'${cat_s}:A'}}, catA2:{query:{query:'${cat_s}:A'}}, catA3:{query:'${cat_s}:A'} }" - ) - , "facets=={ 'count':6, 'catA':{ 'count':2}, 'catA2':{ 'count':2}, 'catA3':{ 'count':2}}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{catA:{query:{q:'${cat_s}:A'}}, catA2:{query:{query:'${cat_s}:A'}}, catA3:{query:'${cat_s}:A'} }"), + "facets=={ 'count':6, 'catA':{ 'count':2}, 'catA2':{ 'count':2}, 'catA3':{ 'count':2}}"); // nested query facets - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ catB:{type:query, q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} }}" - ) - , "facets=={ 'count':6, 'catB':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ catB:{type:query, q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} }}"), + "facets=={ 'count':6, 'catB':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}}"); // nested query facets on subset - client.testJQ(params(p, "q", "id:(2 3)" - , "json.facet", "{ catB:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} }}}" - ) - , "facets=={ 'count':2, 'catB':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}}" - ); + client.testJQ( + params( + p, + "q", + "id:(2 3)", + "json.facet", + "{ catB:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} }}}"), + "facets=={ 'count':2, 'catB':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}}"); // nested query facets with stats - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ catB:{query:{q:'${cat_s}:B', facet:{nj:{query:{q:'${where_s}:NJ'}}, ny:{query:'${where_s}:NY'}} }}}" - ) - , "facets=={ 'count':6, 'catB':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ catB:{query:{q:'${cat_s}:B', facet:{nj:{query:{q:'${where_s}:NJ'}}, ny:{query:'${where_s}:NY'}} }}}"), + "facets=={ 'count':6, 'catB':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}}"); // field/terms facet - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{c1:{field:'${cat_s}'}, c2:{field:{field:'${cat_s}'}}, c3:{${terms} type:terms, field:'${cat_s}'} }" - ) - , "facets=={ 'count':6, " + - "'c1':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}, " + - "'c2':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}, " + - "'c3':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}} " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{c1:{field:'${cat_s}'}, c2:{field:{field:'${cat_s}'}}, c3:{${terms} type:terms, field:'${cat_s}'} }"), + "facets=={ 'count':6, " + + "'c1':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}, " + + "'c2':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}, " + + "'c3':{ 'buckets':[{ 'val':'B', 'count':3}, { 'val':'A', 'count':2}]}} "); // test mincount - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', mincount:3}}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[{ 'val':'B', 'count':3}]} } " - ); + client.testJQ( + params(p, "q", "*:*", "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', mincount:3}}}"), + "facets=={ 'count':6, " + "'f1':{ 'buckets':[{ 'val':'B', 'count':3}]} } "); // test default mincount of 1 - client.testJQ(params(p, "q", "id:1" - , "json.facet", "{f1:{terms:'${cat_s}'}}" - ) - , "facets=={ 'count':1, " + - "'f1':{ 'buckets':[{ 'val':'A', 'count':1}]} } " - ); + client.testJQ( + params(p, "q", "id:1", "json.facet", "{f1:{terms:'${cat_s}'}}"), + "facets=={ 'count':1, " + "'f1':{ 'buckets':[{ 'val':'A', 'count':1}]} } "); // test mincount of 0 - need processEmpty for distrib to match up - client.testJQ(params(p, "q", "id:1" - , "json.facet", "{processEmpty:true, f1:{terms:{${terms} field:'${cat_s}', mincount:0}}}" - ) - , "facets=={ 'count':1, " + - "'f1':{ 'buckets':[{ 'val':'A', 'count':1}, { 'val':'B', 'count':0}]} } " - ); + client.testJQ( + params( + p, + "q", + "id:1", + "json.facet", + "{processEmpty:true, f1:{terms:{${terms} field:'${cat_s}', mincount:0}}}"), + "facets=={ 'count':1, " + + "'f1':{ 'buckets':[{ 'val':'A', 'count':1}, { 'val':'B', 'count':0}]} } "); // test mincount of 0 with stats, need processEmpty for distrib to match up - client.testJQ(params(p, "q", "id:1" - , "json.facet", "{processEmpty:true, f1:{terms:{${terms} field:'${cat_s}', mincount:0, allBuckets:true, facet:{n1:'sum(${num_d})'} }}}" - ) - , "facets=={ 'count':1, " + - "'f1':{ allBuckets:{ 'count':1, n1:4.0}, 'buckets':[{ 'val':'A', 'count':1, n1:4.0}, { 'val':'B', 'count':0 /*, n1:0.0 */ }]} } " - ); + client.testJQ( + params( + p, + "q", + "id:1", + "json.facet", + "{processEmpty:true, f1:{terms:{${terms} field:'${cat_s}', mincount:0, allBuckets:true, facet:{n1:'sum(${num_d})'} }}}"), + "facets=={ 'count':1, " + + "'f1':{ allBuckets:{ 'count':1, n1:4.0}, 'buckets':[{ 'val':'A', 'count':1, n1:4.0}, { 'val':'B', 'count':0 /*, n1:0.0 */ }]} } "); // test sorting by other stats - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + - ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }"); // test trivial re-sorting by stats - // (there are other more indepth tests of this in doTestPrelimSorting, but this let's us sanity check + // (there are other more indepth tests of this in doTestPrelimSorting, but this let's us sanity + // check // small responses with multiple templatized params of diff real types) - client.testJQ(params(p, "q", "*:*", "json.facet" // num_d - , "{f1:{terms:{${terms} field:'${cat_s}', " - + " prelim_sort:'count desc', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}," - + " f2:{terms:{${terms} field:'${cat_s}', " - + " prelim_sort:'count asc', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }" - ) - , "facets=={ 'count':6 " - + ", f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" - + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }" - ); - client.testJQ(params(p, "q", "*:*", "json.facet" // num_i - , "{f1:{terms:{${terms} field:'${cat_s}', " - + " prelim_sort:'count desc', sort:'n1 desc', facet:{n1:'sum(${num_i})'} }}," - + " f2:{terms:{${terms} field:'${cat_s}', " - + " prelim_sort:'count asc', sort:'n1 asc', facet:{n1:'sum(${num_i})'} }} }" - ) - , "facets=={ 'count':6 " - + ", f1:{ 'buckets':[{ val:'A', count:2, n1:5.0 }, { val:'B', count:3, n1:-3.0}]}" - + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:5.0 }]} }" - ); - - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet" // num_d + , + "{f1:{terms:{${terms} field:'${cat_s}', " + + " prelim_sort:'count desc', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}," + + " f2:{terms:{${terms} field:'${cat_s}', " + + " prelim_sort:'count asc', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }"), + "facets=={ 'count':6 " + + ", f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }"); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet" // num_i + , + "{f1:{terms:{${terms} field:'${cat_s}', " + + " prelim_sort:'count desc', sort:'n1 desc', facet:{n1:'sum(${num_i})'} }}," + + " f2:{terms:{${terms} field:'${cat_s}', " + + " prelim_sort:'count asc', sort:'n1 asc', facet:{n1:'sum(${num_i})'} }} }"), + "facets=={ 'count':6 " + + ", f1:{ 'buckets':[{ val:'A', count:2, n1:5.0 }, { val:'B', count:3, n1:-3.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:5.0 }]} }"); + // test sorting by other stats and more than one facet - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc' , facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 , n2:3.0 }, { val:'B', count:3, n1:-3.0, n2:-1.0}]}" + - ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0, n2:-1.0}, { val:'A', count:2, n1:6.0 , n2:3.0 }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc' , facet:{n1:'sum(${num_d})', n2:'avg(${num_d})'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 , n2:3.0 }, { val:'B', count:3, n1:-3.0, n2:-1.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0, n2:-1.0}, { val:'A', count:2, n1:6.0 , n2:3.0 }]} }"); // test sorting by other stats - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'min(${num_d})'} }" + - " , f2:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'max(${num_d})'} } " + - " , f3:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'unique(${where_s})'} } " + - " , f4:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'hll(${where_s})'} } " + - " , f5:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'variance(${num_d})'} } " + - " , f6:{type:terms, field:${num_d}, limit:1, sort:'x desc', facet:{x:'hll(${num_i})'} } " + // facet on a field that will cause hashing and exercise hll.resize on numeric field - " , f7:{type:terms, field:${cat_s}, limit:2, sort:'x desc', facet:{x:'missing(${sparse_num_d})'} } " + - " , f8:{type:terms, field:${cat_s}, limit:2, sort:'x desc', facet:{x:'countvals(${sparse_num_d})'} } " + - "}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'A', count:2, x:2.0 }, { val:'B', count:3, x:-9.0}]}" + - ", f2:{ 'buckets':[{ val:'B', count:3, x:11.0 }, { val:'A', count:2, x:4.0 }]} " + - ", f3:{ 'buckets':[{ val:'A', count:2, x:2 }, { val:'B', count:3, x:2 }]} " + - ", f4:{ 'buckets':[{ val:'A', count:2, x:2 }, { val:'B', count:3, x:2 }]} " + - ", f5:{ 'buckets':[{ val:'B', count:3, x:112.0 }, { val:'A', count:2, x:2.0 }]} " + - ", f6:{ buckets:[{ val:-9.0, count:1, x:1 }]} " + - ", f7:{ buckets:[{ val:B, count:3, x:3 },{ val:A, count:2, x:0 }]} " + - ", f8:{ buckets:[{ val:A, count:2, x:2 },{ val:B, count:3, x:0 }]} " + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'min(${num_d})'} }" + + " , f2:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'max(${num_d})'} } " + + " , f3:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'unique(${where_s})'} } " + + " , f4:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'hll(${where_s})'} } " + + " , f5:{${terms} type:terms, field:'${cat_s}', sort:'x desc', facet:{x:'variance(${num_d})'} } " + + " , f6:{type:terms, field:${num_d}, limit:1, sort:'x desc', facet:{x:'hll(${num_i})'} } " + + // facet on a field that will cause hashing and exercise hll.resize on numeric + // field + " , f7:{type:terms, field:${cat_s}, limit:2, sort:'x desc', facet:{x:'missing(${sparse_num_d})'} } " + + " , f8:{type:terms, field:${cat_s}, limit:2, sort:'x desc', facet:{x:'countvals(${sparse_num_d})'} } " + + "}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'A', count:2, x:2.0 }, { val:'B', count:3, x:-9.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, x:11.0 }, { val:'A', count:2, x:4.0 }]} " + + ", f3:{ 'buckets':[{ val:'A', count:2, x:2 }, { val:'B', count:3, x:2 }]} " + + ", f4:{ 'buckets':[{ val:'A', count:2, x:2 }, { val:'B', count:3, x:2 }]} " + + ", f5:{ 'buckets':[{ val:'B', count:3, x:112.0 }, { val:'A', count:2, x:2.0 }]} " + + ", f6:{ buckets:[{ val:-9.0, count:1, x:1 }]} " + + ", f7:{ buckets:[{ val:B, count:3, x:3 },{ val:A, count:2, x:0 }]} " + + ", f8:{ buckets:[{ val:A, count:2, x:2 },{ val:B, count:3, x:0 }]} " + + "}"); // test for stdDev and variance of size 1 and 0 - client.testJQ(params(p, "q", "id:1", "json.facet", "{n1:'stddev(${num_d})', n2: 'variance(${num_d})'}") - , "facets=={ 'count':1, " + - " n1:0.0, n2:0.0 }" - ); - client.testJQ(params(p, "q", "id:3", "json.facet", "{n1:'stddev(${num_d})', n2: 'variance(${num_d})'}") - , "facets=={ 'count':1, " + - " n1:0.0, n2:0.0 }" - ); + client.testJQ( + params(p, "q", "id:1", "json.facet", "{n1:'stddev(${num_d})', n2: 'variance(${num_d})'}"), + "facets=={ 'count':1, " + " n1:0.0, n2:0.0 }"); + client.testJQ( + params(p, "q", "id:3", "json.facet", "{n1:'stddev(${num_d})', n2: 'variance(${num_d})'}"), + "facets=={ 'count':1, " + " n1:0.0, n2:0.0 }"); // test sorting by stat with function - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'avg(add(${num_d},${num_d}))'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'avg(add(${num_d},${num_d}))'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-2.0}]}" + - ", f2:{ 'buckets':[{ val:'B', count:3, n1:-2.0}, { val:'A', count:2, n1:6.0 }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'avg(add(${num_d},${num_d}))'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'avg(add(${num_d},${num_d}))'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-2.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-2.0}, { val:'A', count:2, n1:6.0 }]} }"); // test sorting by missing stat with function - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'missing(field(${sparse_num_d}))'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'missing(field(${sparse_num_d}))'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, n1:3 }, { val:'A', count:2, n1:0}]}" + - ", f2:{ 'buckets':[{ val:'A', count:2, n1:0}, { val:'B', count:3, n1:3 }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'missing(field(${sparse_num_d}))'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'missing(field(${sparse_num_d}))'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, n1:3 }, { val:'A', count:2, n1:0}]}" + + ", f2:{ 'buckets':[{ val:'A', count:2, n1:0}, { val:'B', count:3, n1:3 }]} }"); // test sorting by missing stat with domain query - client.testJQ(params(p, "q", "-id:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 desc', facet:{n1:'missing(field(${sparse_num_d}))'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 asc', facet:{n1:'missing(field(${sparse_num_d}))'} }} }" - ) - , "facets=={ 'count':0, " + - " f1:{ 'buckets':[{ val:'B', count:3, n1:3 }, { val:'A', count:2, n1:0}]}" + - ", f2:{ 'buckets':[{ val:'A', count:2, n1:0}, { val:'B', count:3, n1:3 }]} }" - ); + client.testJQ( + params( + p, + "q", + "-id:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 desc', facet:{n1:'missing(field(${sparse_num_d}))'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 asc', facet:{n1:'missing(field(${sparse_num_d}))'} }} }"), + "facets=={ 'count':0, " + + " f1:{ 'buckets':[{ val:'B', count:3, n1:3 }, { val:'A', count:2, n1:0}]}" + + ", f2:{ 'buckets':[{ val:'A', count:2, n1:0}, { val:'B', count:3, n1:3 }]} }"); // test with sub-facet aggregation with stat on field - client.testJQ(params(p, "q", "*:*" - , "json.facet", " {f1:{terms:{${terms}, field:'${cat_s}', " + - "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + - "facet:{n1:'missing(${sparse_num_d})'}}}}}}}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:2},{val:'NY', count:1, n1:1}]} }," + - " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:0},{val:'NY', count:1, n1:0}]}}]}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + " {f1:{terms:{${terms}, field:'${cat_s}', " + + "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + + "facet:{n1:'missing(${sparse_num_d})'}}}}}}}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:2},{val:'NY', count:1, n1:1}]} }," + + " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:0},{val:'NY', count:1, n1:0}]}}]}" + + "}"); // test with sub-facet aggregation with stat on func - client.testJQ(params(p, "q", "*:*" - , "json.facet", " {f1:{terms:{${terms}, field:'${cat_s}', " + - "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + - "facet:{n1:'missing(field(${sparse_num_d}))'}}}}}}}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:2},{val:'NY', count:1, n1:1}]} }," + - " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:0},{val:'NY', count:1, n1:0}]}}]}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + " {f1:{terms:{${terms}, field:'${cat_s}', " + + "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + + "facet:{n1:'missing(field(${sparse_num_d}))'}}}}}}}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:2},{val:'NY', count:1, n1:1}]} }," + + " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:0},{val:'NY', count:1, n1:0}]}}]}" + + "}"); // test sorting by countvals stat with function - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'countvals(field(${sparse_num_d}))'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'countvals(field(${sparse_num_d}))'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, n1:0 }, { val:'A', count:2, n1:2}]}" + - ", f2:{ 'buckets':[{ val:'A', count:2, n1:2}, { val:'B', count:3, n1:0 }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'countvals(field(${sparse_num_d}))'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'countvals(field(${sparse_num_d}))'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, n1:0 }, { val:'A', count:2, n1:2}]}" + + ", f2:{ 'buckets':[{ val:'A', count:2, n1:2}, { val:'B', count:3, n1:0 }]} }"); // test sorting by countvals stat with domain query - client.testJQ(params(p, "q", "-id:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 asc', facet:{n1:'countvals(field(${sparse_num_d}))'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 desc', facet:{n1:'countvals(field(${sparse_num_d}))'} }} }" - ) - , "facets=={ 'count':0, " + - " f1:{ 'buckets':[{ val:'B', count:3, n1:0 }, { val:'A', count:2, n1:2}]}" + - ", f2:{ 'buckets':[{ val:'A', count:2, n1:2}, { val:'B', count:3, n1:0 }]} }" - ); + client.testJQ( + params( + p, + "q", + "-id:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 asc', facet:{n1:'countvals(field(${sparse_num_d}))'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 desc', facet:{n1:'countvals(field(${sparse_num_d}))'} }} }"), + "facets=={ 'count':0, " + + " f1:{ 'buckets':[{ val:'B', count:3, n1:0 }, { val:'A', count:2, n1:2}]}" + + ", f2:{ 'buckets':[{ val:'A', count:2, n1:2}, { val:'B', count:3, n1:0 }]} }"); // test with sub-facet aggregation with stat on field - client.testJQ(params(p, "q", "*:*" - , "json.facet", " {f1:{terms:{${terms}, field:'${cat_s}', " + - "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + - "facet:{n1:'countvals(${sparse_num_d})'}}}}}}}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:0},{val:'NY', count:1, n1:0}]} }," + - " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:1},{val:'NY', count:1, n1:1}]}}]}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + " {f1:{terms:{${terms}, field:'${cat_s}', " + + "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + + "facet:{n1:'countvals(${sparse_num_d})'}}}}}}}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:0},{val:'NY', count:1, n1:0}]} }," + + " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:1},{val:'NY', count:1, n1:1}]}}]}" + + "}"); // test with sub-facet aggregation with stat on func - client.testJQ(params(p, "q", "*:*" - , "json.facet", " {f1:{terms:{${terms}, field:'${cat_s}', " + - "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + - "facet:{n1:'countvals(field(${sparse_num_d}))'}}}}}}}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:0},{val:'NY', count:1, n1:0}]} }," + - " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:1},{val:'NY', count:1, n1:1}]}}]}" + - "}" - ); - - // facet on numbers to test resize from hashing (may need to be sorting by the metric to test that) - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f1:{${terms} type:field, field:${num_is}, facet:{a:'min(${num_i})'}, sort:'a asc' }" + - ",f2:{${terms} type:field, field:${num_is}, facet:{a:'max(${num_i})'}, sort:'a desc' }" + - "}" - ) - , "facets=={count:6 " + - ",f1:{ buckets:[{val:-1,count:2,a:-5},{val:3,count:2,a:-5},{val:-5,count:1,a:2},{val:2,count:1,a:2},{val:0,count:2,a:3} ] } " + - ",f2:{ buckets:[{val:0,count:2,a:7},{val:3,count:2,a:3},{val:-5,count:1,a:2},{val:2,count:1,a:2},{val:-1,count:2,a:-5} ] } " + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + " {f1:{terms:{${terms}, field:'${cat_s}', " + + "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + + "facet:{n1:'countvals(field(${sparse_num_d}))'}}}}}}}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:0},{val:'NY', count:1, n1:0}]} }," + + " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:1},{val:'NY', count:1, n1:1}]}}]}" + + "}"); + + // facet on numbers to test resize from hashing (may need to be sorting by the metric to test + // that) + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f1:{${terms} type:field, field:${num_is}, facet:{a:'min(${num_i})'}, sort:'a asc' }" + + ",f2:{${terms} type:field, field:${num_is}, facet:{a:'max(${num_i})'}, sort:'a desc' }" + + "}"), + "facets=={count:6 " + + ",f1:{ buckets:[{val:-1,count:2,a:-5},{val:3,count:2,a:-5},{val:-5,count:1,a:2},{val:2,count:1,a:2},{val:0,count:2,a:3} ] } " + + ",f2:{ buckets:[{val:0,count:2,a:7},{val:3,count:2,a:3},{val:-5,count:1,a:2},{val:2,count:1,a:2},{val:-1,count:2,a:-5} ] } " + + "}"); // Same thing for dates // test min/max of string field - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f3:{${terms} type:field, field:${num_is}, facet:{a:'min(${date})'}, sort:'a desc' }" + - ",f4:{${terms} type:field, field:${num_is}, facet:{a:'max(${date})'}, sort:'a asc' }" + - "}" - ) - , "facets=={count:6 " + - ",f3:{ buckets:[{val:-1,count:2,a:'2002-02-02T02:02:02Z'},{val:3,count:2,a:'2002-02-02T02:02:02Z'},{val:0,count:2,a:'2001-02-03T01:02:03Z'},{val:-5,count:1,a:'2001-01-01T01:01:01Z'},{val:2,count:1,a:'2001-01-01T01:01:01Z'} ] } " + - ",f4:{ buckets:[{val:-5,count:1,a:'2001-01-01T01:01:01Z'},{val:2,count:1,a:'2001-01-01T01:01:01Z'},{val:-1,count:2,a:'2002-03-01T03:02:01Z'},{val:0,count:2,a:'2003-03-03T03:03:03Z'},{val:3,count:2,a:'2003-03-03T03:03:03Z'} ] } " + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f3:{${terms} type:field, field:${num_is}, facet:{a:'min(${date})'}, sort:'a desc' }" + + ",f4:{${terms} type:field, field:${num_is}, facet:{a:'max(${date})'}, sort:'a asc' }" + + "}"), + "facets=={count:6 " + + ",f3:{ buckets:[{val:-1,count:2,a:'2002-02-02T02:02:02Z'},{val:3,count:2,a:'2002-02-02T02:02:02Z'},{val:0,count:2,a:'2001-02-03T01:02:03Z'},{val:-5,count:1,a:'2001-01-01T01:01:01Z'},{val:2,count:1,a:'2001-01-01T01:01:01Z'} ] } " + + ",f4:{ buckets:[{val:-5,count:1,a:'2001-01-01T01:01:01Z'},{val:2,count:1,a:'2001-01-01T01:01:01Z'},{val:-1,count:2,a:'2002-03-01T03:02:01Z'},{val:0,count:2,a:'2003-03-03T03:03:03Z'},{val:3,count:2,a:'2003-03-03T03:03:03Z'} ] } " + + "}"); // test field faceting on date field - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f1:{${terms} type:field, field:${date}}" + - ",f2:{${terms} type:field, field:${date} sort:'index asc'}" + - ",f3:{${terms} type:field, field:${date} sort:'index desc'}" + - // ",f4:{${terms} type:field, field:${date}, facet:{x:{type:field,field:${num_is},limit:1}} }" + - "}" - ) - , "facets=={count:6 " + - ",f1:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2003-03-03T03:03:03Z', count:1} ] }" + - ",f2:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2003-03-03T03:03:03Z', count:1} ] }" + - ",f3:{ buckets:[ {val:'2003-03-03T03:03:03Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2001-01-01T01:01:01Z', count:1} ] }" + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f1:{${terms} type:field, field:${date}}" + + ",f2:{${terms} type:field, field:${date} sort:'index asc'}" + + ",f3:{${terms} type:field, field:${date} sort:'index desc'}" + + + // ",f4:{${terms} type:field, field:${date}, + // facet:{x:{type:field,field:${num_is},limit:1}} }" + + "}"), + "facets=={count:6 " + + ",f1:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2003-03-03T03:03:03Z', count:1} ] }" + + ",f2:{ buckets:[ {val:'2001-01-01T01:01:01Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2003-03-03T03:03:03Z', count:1} ] }" + + ",f3:{ buckets:[ {val:'2003-03-03T03:03:03Z', count:1},{val:'2002-03-01T03:02:01Z', count:1},{val:'2002-02-02T02:02:02Z', count:1},{val:'2001-02-03T01:02:03Z', count:1},{val:'2001-01-01T01:01:01Z', count:1} ] }" + + "}"); // percentiles 0,10,50,90,100 // catA: 2.0 2.2 3.0 3.8 4.0 // catB: -9.0 -8.2 -5.0 7.800000000000001 11.0 // all: -9.0 -7.3999999999999995 2.0 8.200000000000001 11.0 // test sorting by single percentile - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${num_d},50)'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'percentile(${num_d},50)'} }} " + - " , f3:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${sparse_num_d},50)'} }} " + - "}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'A', count:2, n1:3.0 }, { val:'B', count:3, n1:-5.0}]}" + - ", f2:{ 'buckets':[{ val:'B', count:3, n1:-5.0}, { val:'A', count:2, n1:3.0 }]}" + - ", f3:{ 'buckets':[{ val:'A', count:2, n1:1.0}, { val:'B', count:3}]}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${num_d},50)'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'percentile(${num_d},50)'} }} " + + " , f3:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${sparse_num_d},50)'} }} " + + "}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'A', count:2, n1:3.0 }, { val:'B', count:3, n1:-5.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-5.0}, { val:'A', count:2, n1:3.0 }]}" + + ", f3:{ 'buckets':[{ val:'A', count:2, n1:1.0}, { val:'B', count:3}]}" + + "}"); // test sorting by multiple percentiles (sort is by first) - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${cat_s}, sort:'n1 desc', facet:{n1:'percentile(${num_d},50,0,100)'} }}" + - " , f2:{terms:{${terms} field:${cat_s}, sort:'n1 asc', facet:{n1:'percentile(${num_d},50,0,100)'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'A', count:2, n1:[3.0,2.0,4.0] }, { val:'B', count:3, n1:[-5.0,-9.0,11.0] }]}" + - ", f2:{ 'buckets':[{ val:'B', count:3, n1:[-5.0,-9.0,11.0]}, { val:'A', count:2, n1:[3.0,2.0,4.0] }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${cat_s}, sort:'n1 desc', facet:{n1:'percentile(${num_d},50,0,100)'} }}" + + " , f2:{terms:{${terms} field:${cat_s}, sort:'n1 asc', facet:{n1:'percentile(${num_d},50,0,100)'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'A', count:2, n1:[3.0,2.0,4.0] }, { val:'B', count:3, n1:[-5.0,-9.0,11.0] }]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:[-5.0,-9.0,11.0]}, { val:'A', count:2, n1:[3.0,2.0,4.0] }]} }"); // test sorting by count/index order - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'count desc' } }" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + - " , f3:{terms:{${terms} field:'${cat_s}', sort:'index asc' } }" + - " , f4:{terms:{${terms} field:'${cat_s}', sort:'index desc' } }" + - "}" - ) - , "facets=={ count:6 " + - " ,f1:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + - " ,f2:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + - " ,f3:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + - " ,f4:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'count desc' } }" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + + " , f3:{terms:{${terms} field:'${cat_s}', sort:'index asc' } }" + + " , f4:{terms:{${terms} field:'${cat_s}', sort:'index desc' } }" + + "}"), + "facets=={ count:6 " + + " ,f1:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + + " ,f2:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + + " ,f3:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + + " ,f4:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + + "}"); // test sorting by default count/index order - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'count' } }" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + - " , f3:{terms:{${terms} field:'${cat_s}', sort:'index' } }" + - " , f4:{terms:{${terms} field:'${cat_s}', sort:'index desc' } }" + - "}" - ) - , "facets=={ count:6 " + - " ,f1:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + - " ,f2:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + - " ,f3:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + - " ,f4:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'count' } }" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + + " , f3:{terms:{${terms} field:'${cat_s}', sort:'index' } }" + + " , f4:{terms:{${terms} field:'${cat_s}', sort:'index desc' } }" + + "}"), + "facets=={ count:6 " + + " ,f1:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + + " ,f2:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + + " ,f3:{buckets:[ {val:A,count:2}, {val:B,count:3} ] }" + + " ,f4:{buckets:[ {val:B,count:3}, {val:A,count:2} ] }" + + "}"); // test tiebreaks when sorting by count - client.testJQ(params(p, "q", "id:1 id:6" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'count desc' } }" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + - "}" - ) - , "facets=={ count:2 " + - " ,f1:{buckets:[ {val:A,count:1}, {val:B,count:1} ] }" + - " ,f2:{buckets:[ {val:A,count:1}, {val:B,count:1} ] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "id:1 id:6", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'count desc' } }" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'count asc' } }" + + "}"), + "facets=={ count:2 " + + " ,f1:{buckets:[ {val:A,count:1}, {val:B,count:1} ] }" + + " ,f2:{buckets:[ {val:A,count:1}, {val:B,count:1} ] }" + + "}"); // terms facet with nested query facet - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{cat:{terms:{${terms} field:'${cat_s}', facet:{nj:{query:'${where_s}:NJ'}} } }}" - ) - , "facets=={ 'count':6, " + - "'cat':{ 'buckets':[{ 'val':'B', 'count':3, 'nj':{ 'count':2}}, { 'val':'A', 'count':2, 'nj':{ 'count':1}}]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{cat:{terms:{${terms} field:'${cat_s}', facet:{nj:{query:'${where_s}:NJ'}} } }}"), + "facets=={ 'count':6, " + + "'cat':{ 'buckets':[{ 'val':'B', 'count':3, 'nj':{ 'count':2}}, { 'val':'A', 'count':2, 'nj':{ 'count':1}}]} }"); // terms facet with nested query facet on subset - client.testJQ(params(p, "q", "id:(2 5 4)" - , "json.facet", "{cat:{terms:{${terms} field:'${cat_s}', facet:{nj:{query:'${where_s}:NJ'}} } }}" - ) - , "facets=={ 'count':3, " + - "'cat':{ 'buckets':[{ 'val':'B', 'count':2, 'nj':{ 'count':2}}, { 'val':'A', 'count':1, 'nj':{ 'count':1}}]} }" - ); + client.testJQ( + params( + p, + "q", + "id:(2 5 4)", + "json.facet", + "{cat:{terms:{${terms} field:'${cat_s}', facet:{nj:{query:'${where_s}:NJ'}} } }}"), + "facets=={ 'count':3, " + + "'cat':{ 'buckets':[{ 'val':'B', 'count':2, 'nj':{ 'count':2}}, { 'val':'A', 'count':1, 'nj':{ 'count':1}}]} }"); // test prefix - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:s, mincount:0 }}}" // even with mincount=0, we should only see buckets with the prefix - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[{val:spiderman, count:1}, {val:superman, count:1}]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${super_s}, prefix:s, mincount:0 }}}" // even with + // mincount=0, we + // should only see + // buckets with the + // prefix + ), + "facets=={ 'count':6, " + + "'f1':{ 'buckets':[{val:spiderman, count:1}, {val:superman, count:1}]} } "); // test prefix that doesn't exist - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:ttt, mincount:0 }}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${super_s}, prefix:ttt, mincount:0 }}}"), + "facets=={ 'count':6, " + "'f1':{ 'buckets':[]} } "); // test prefix that doesn't exist at start - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:aaaaaa, mincount:0 }}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${super_s}, prefix:aaaaaa, mincount:0 }}}"), + "facets=={ 'count':6, " + "'f1':{ 'buckets':[]} } "); // test prefix that doesn't exist at end - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${super_s}, prefix:zzzzzz, mincount:0 }}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${super_s}, prefix:zzzzzz, mincount:0 }}}"), + "facets=={ 'count':6, " + "'f1':{ 'buckets':[]} } "); // test prefix on where field - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f1:{${terms} type:terms, field:${where_s}, prefix:N }" + - ",f2:{${terms} type:terms, field:${where_s}, prefix:NY }" + - ",f3:{${terms} type:terms, field:${where_s}, prefix:NJ }" + - "}" - ) - , "facets=={ 'count':6 " + - ",f1:{ 'buckets':[ {val:NJ,count:3}, {val:NY,count:2} ]}" + - ",f2:{ 'buckets':[ {val:NY,count:2} ]}" + - ",f3:{ 'buckets':[ {val:NJ,count:3} ]}" + - " } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f1:{${terms} type:terms, field:${where_s}, prefix:N }" + + ",f2:{${terms} type:terms, field:${where_s}, prefix:NY }" + + ",f3:{${terms} type:terms, field:${where_s}, prefix:NJ }" + + "}"), + "facets=={ 'count':6 " + + ",f1:{ 'buckets':[ {val:NJ,count:3}, {val:NY,count:2} ]}" + + ",f2:{ 'buckets':[ {val:NY,count:2} ]}" + + ",f3:{ 'buckets':[ {val:NJ,count:3} ]}" + + " } "); // test prefix on real multi-valued field - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f1:{${terms} type:terms, field:${multi_ss}, prefix:A }" + - ",f2:{${terms} type:terms, field:${multi_ss}, prefix:z }" + - ",f3:{${terms} type:terms, field:${multi_ss}, prefix:aa }" + - ",f4:{${terms} type:terms, field:${multi_ss}, prefix:bb }" + - ",f5:{${terms} type:terms, field:${multi_ss}, prefix:a }" + - ",f6:{${terms} type:terms, field:${multi_ss}, prefix:b }" + - "}" - ) - , "facets=={ 'count':6 " + - ",f1:{buckets:[]}" + - ",f2:{buckets:[]}" + - ",f3:{buckets:[]}" + - ",f4:{buckets:[]}" + - ",f5:{buckets:[ {val:a,count:3} ]}" + - ",f6:{buckets:[ {val:b,count:3} ]}" + - " } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f1:{${terms} type:terms, field:${multi_ss}, prefix:A }" + + ",f2:{${terms} type:terms, field:${multi_ss}, prefix:z }" + + ",f3:{${terms} type:terms, field:${multi_ss}, prefix:aa }" + + ",f4:{${terms} type:terms, field:${multi_ss}, prefix:bb }" + + ",f5:{${terms} type:terms, field:${multi_ss}, prefix:a }" + + ",f6:{${terms} type:terms, field:${multi_ss}, prefix:b }" + + "}"), + "facets=={ 'count':6 " + + ",f1:{buckets:[]}" + + ",f2:{buckets:[]}" + + ",f3:{buckets:[]}" + + ",f4:{buckets:[]}" + + ",f5:{buckets:[ {val:a,count:3} ]}" + + ",f6:{buckets:[ {val:b,count:3} ]}" + + " } "); // // missing // // test missing w/ non-existent field - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${noexist}, missing:true}}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[], missing:{count:6} } } " - ); + client.testJQ( + params( + p, "q", "*:*", "json.facet", "{f1:{terms:{${terms} field:${noexist}, missing:true}}}"), + "facets=={ 'count':6, " + "'f1':{ 'buckets':[], missing:{count:6} } } "); // test missing - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true }}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[{val:one, count:1}, {val:two, count:1}], missing:{count:4} } } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${sparse_s}, missing:true }}}"), + "facets=={ 'count':6, " + + "'f1':{ 'buckets':[{val:one, count:1}, {val:two, count:1}], missing:{count:4} } } "); // test missing with stats - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true, facet:{x:'sum(${num_d})'} }}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[{val:one, count:1, x:4.0}, {val:two, count:1, x:11.0}], missing:{count:4, x:-12.0} } } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${sparse_s}, missing:true, facet:{x:'sum(${num_d})'} }}}"), + "facets=={ 'count':6, " + + "'f1':{ 'buckets':[{val:one, count:1, x:4.0}, {val:two, count:1, x:11.0}], missing:{count:4, x:-12.0} } } "); // test that the missing bucket is not affected by any prefix - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true, prefix:on, facet:{x:'sum(${num_d})'} }}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[{val:one, count:1, x:4.0}], missing:{count:4, x:-12.0} } } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${sparse_s}, missing:true, prefix:on, facet:{x:'sum(${num_d})'} }}}"), + "facets=={ 'count':6, " + + "'f1':{ 'buckets':[{val:one, count:1, x:4.0}], missing:{count:4, x:-12.0} } } "); // test missing with prefix that doesn't exist - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:${sparse_s}, missing:true, prefix:ppp, facet:{x:'sum(${num_d})'} }}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[], missing:{count:4, x:-12.0} } } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:${sparse_s}, missing:true, prefix:ppp, facet:{x:'sum(${num_d})'} }}}"), + "facets=={ 'count':6, " + "'f1':{ 'buckets':[], missing:{count:4, x:-12.0} } } "); // test numBuckets - client.testJQ(params(p, "q", "*:*", "rows", "0", "facet", "true" - , "json.facet", "{f1:{terms:{${terms_method} field:${cat_s}, numBuckets:true, limit:1}}}" // TODO: limit:0 produced an error - ) - , "facets=={ 'count':6, " + - "'f1':{ numBuckets:2, buckets:[{val:B, count:3}]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "json.facet", + "{f1:{terms:{${terms_method} field:${cat_s}, numBuckets:true, limit:1}}}" // TODO: + // limit:0 + // produced an + // error + ), + "facets=={ 'count':6, " + "'f1':{ numBuckets:2, buckets:[{val:B, count:3}]} } "); // prefix should lower numBuckets - client.testJQ(params(p, "q", "*:*", "rows", "0", "facet", "true" - , "json.facet", "{f1:{terms:{${terms} field:${cat_s}, numBuckets:true, prefix:B}}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ numBuckets:1, buckets:[{val:B, count:3}]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "json.facet", + "{f1:{terms:{${terms} field:${cat_s}, numBuckets:true, prefix:B}}}"), + "facets=={ 'count':6, " + "'f1':{ numBuckets:1, buckets:[{val:B, count:3}]} } "); // mincount should not lower numBuckets (since SOLR-10552) - client.testJQ(params(p, "q", "*:*", "rows", "0", "facet", "true" - , "json.facet", "{f1:{terms:{${terms} field:${cat_s}, numBuckets:true, mincount:3}}}" - ) - , "facets=={ 'count':6, " + - "'f1':{ numBuckets:2, buckets:[{val:B, count:3}]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "rows", + "0", + "facet", + "true", + "json.facet", + "{f1:{terms:{${terms} field:${cat_s}, numBuckets:true, mincount:3}}}"), + "facets=={ 'count':6, " + "'f1':{ numBuckets:2, buckets:[{val:B, count:3}]} } "); // basic range facet - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5}}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1}, {val:0.0,count:2}, {val:5.0,count:0} ] } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5}}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1}, {val:0.0,count:2}, {val:5.0,count:0} ] } }"); // basic range facet on dates - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{type:range, field:${date}, start:'2001-01-01T00:00:00Z', end:'2003-01-01T00:00:00Z', gap:'+1YEAR'}}" - ) - , "facets=={count:6, f:{buckets:[ {val:'2001-01-01T00:00:00Z',count:2}, {val:'2002-01-01T00:00:00Z',count:2}] } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{type:range, field:${date}, start:'2001-01-01T00:00:00Z', end:'2003-01-01T00:00:00Z', gap:'+1YEAR'}}"), + "facets=={count:6, f:{buckets:[ {val:'2001-01-01T00:00:00Z',count:2}, {val:'2002-01-01T00:00:00Z',count:2}] } }"); // range facet on dates w/ stats - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{type:range, field:${date}, start:'2002-01-01T00:00:00Z', end:'2005-01-01T00:00:00Z', gap:'+1YEAR', other:all, facet:{ x:'avg(${num_d})' } } }" - ) - , "facets=={count:6, f:{buckets:[ {val:'2002-01-01T00:00:00Z',count:2,x:-7.0}, {val:'2003-01-01T00:00:00Z',count:1,x:2.0}, {val:'2004-01-01T00:00:00Z',count:0}], before:{count:2,x:7.5}, after:{count:0}, between:{count:3,x:-4.0} } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{type:range, field:${date}, start:'2002-01-01T00:00:00Z', end:'2005-01-01T00:00:00Z', gap:'+1YEAR', other:all, facet:{ x:'avg(${num_d})' } } }"), + "facets=={count:6, f:{buckets:[ {val:'2002-01-01T00:00:00Z',count:2,x:-7.0}, {val:'2003-01-01T00:00:00Z',count:1,x:2.0}, {val:'2004-01-01T00:00:00Z',count:0}], before:{count:2,x:7.5}, after:{count:0}, between:{count:3,x:-4.0} } }"); // basic range facet with "include" params - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, include:upper}}}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:0}, {val:0.0,count:2}, {val:5.0,count:0} ] } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, include:upper}}}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:0}, {val:0.0,count:2}, {val:5.0,count:0} ] } }"); // range facet with sub facets and stats - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */ } ] } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */ } ] } }"); // range facet with sub facets and stats, with "other:all" - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + - ",before: {count:1,x:-5.0,ny:{count:0}}" + - ",after: {count:1,x:7.0, ny:{count:0}}" + - ",between:{count:3,x:0.0, ny:{count:2}}" + - " } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + + ",before: {count:1,x:-5.0,ny:{count:0}}" + + ",after: {count:1,x:7.0, ny:{count:0}}" + + ",between:{count:3,x:0.0, ny:{count:2}}" + + " } }"); // range facet with mincount - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, mincount:2, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}" - ) - , "facets=={count:6, f:{buckets:[ {val:0.0,count:2,x:5.0,ny:{count:1}} ]" + - ",before: {count:1,x:-5.0,ny:{count:0}}" + - ",after: {count:1,x:7.0, ny:{count:0}}" + - ",between:{count:3,x:0.0, ny:{count:2}}" + - " } }" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, mincount:2, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}"), + "facets=={count:6, f:{buckets:[ {val:0.0,count:2,x:5.0,ny:{count:1}} ]" + + ",before: {count:1,x:-5.0,ny:{count:0}}" + + ",after: {count:1,x:7.0, ny:{count:0}}" + + ",between:{count:3,x:0.0, ny:{count:2}}" + + " } }"); + // sparse range facet (with sub facets and stats), with "other:all" - client.testJQ(params(p, "q", "*:*", "json.facet", - "{f:{range:{field:${num_d}, start:-5, end:10, gap:1, other:all, "+ - " facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1, x:-5.0,ny:{count:1}}, "+ - " {val:-4.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val:-3.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val:-2.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val:-1.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 0.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 1.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 2.0,count:1, x:3.0,ny:{count:0}} , "+ - " {val: 3.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 4.0,count:1, x:2.0,ny:{count:1}} , "+ - " {val: 5.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 6.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 7.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 8.0,count:0 /* ,x:0.0,ny:{count:0} */} ,"+ - " {val: 9.0,count:0 /* ,x:0.0,ny:{count:0} */}"+ - " ]" + - " ,before: {count:1,x:-5.0,ny:{count:0}}" + - " ,after: {count:1,x:7.0, ny:{count:0}}" + - " ,between:{count:3,x:0.0, ny:{count:2}}" + - " } }" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{range:{field:${num_d}, start:-5, end:10, gap:1, other:all, " + + " facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1, x:-5.0,ny:{count:1}}, " + + " {val:-4.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val:-3.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val:-2.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val:-1.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 0.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 1.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 2.0,count:1, x:3.0,ny:{count:0}} , " + + " {val: 3.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 4.0,count:1, x:2.0,ny:{count:1}} , " + + " {val: 5.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 6.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 7.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 8.0,count:0 /* ,x:0.0,ny:{count:0} */} ," + + " {val: 9.0,count:0 /* ,x:0.0,ny:{count:0} */}" + + " ]" + + " ,before: {count:1,x:-5.0,ny:{count:0}}" + + " ,after: {count:1,x:7.0, ny:{count:0}}" + + " ,between:{count:3,x:0.0, ny:{count:2}}" + + " } }"); + // sparse range facet (with sub facets and stats), with "other:all" & mincount==1 - client.testJQ(params(p, "q", "*:*", "json.facet", - "{f:{range:{field:${num_d}, start:-5, end:10, gap:1, other:all, mincount:1, "+ - " facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1, x:-5.0,ny:{count:1}}, "+ - " {val: 2.0,count:1, x:3.0,ny:{count:0}} , "+ - " {val: 4.0,count:1, x:2.0,ny:{count:1}} "+ - " ]" + - " ,before: {count:1,x:-5.0,ny:{count:0}}" + - " ,after: {count:1,x:7.0, ny:{count:0}}" + - " ,between:{count:3,x:0.0, ny:{count:2}}" + - " } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{range:{field:${num_d}, start:-5, end:10, gap:1, other:all, mincount:1, " + + " facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1, x:-5.0,ny:{count:1}}, " + + " {val: 2.0,count:1, x:3.0,ny:{count:0}} , " + + " {val: 4.0,count:1, x:2.0,ny:{count:1}} " + + " ]" + + " ,before: {count:1,x:-5.0,ny:{count:0}}" + + " ,after: {count:1,x:7.0, ny:{count:0}}" + + " ,between:{count:3,x:0.0, ny:{count:2}}" + + " } }"); // range facet with sub facets and stats, with "other:all", on subset - client.testJQ(params(p, "q", "id:(3 4 6)" - , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" - ) - , "facets=={count:3, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:1,x:3.0,ny:{count:0}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + - ",before: {count:0 /* ,x:0.0,ny:{count:0} */ }" + - ",after: {count:0 /* ,x:0.0,ny:{count:0} */}" + - ",between:{count:2,x:-2.0, ny:{count:1}}" + - " } }" - ); + client.testJQ( + params( + p, + "q", + "id:(3 4 6)", + "json.facet", + "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}"), + "facets=={count:3, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:1,x:3.0,ny:{count:0}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + + ",before: {count:0 /* ,x:0.0,ny:{count:0} */ }" + + ",after: {count:0 /* ,x:0.0,ny:{count:0} */}" + + ",between:{count:2,x:-2.0, ny:{count:1}}" + + " } }"); // range facet with stats on string fields - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ wn:'unique(${where_s})',wh:'hll(${where_s})' } }}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,wn:1,wh:1}, {val:0.0,count:2,wn:2,wh:2}, {val:5.0,count:0}]" + - " ,before:{count:1,wn:1,wh:1}" + - " ,after:{count:1,wn:1,wh:1} " + - " ,between:{count:3,wn:2,wh:2} " + - " } }" - ); - - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ wmin:'min(${where_s})', wmax:'max(${where_s})' } }}" - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,wmin:NY,wmax:NY}, {val:0.0,count:2,wmin:NJ,wmax:NY}, {val:5.0,count:0}]" + - " ,before:{count:1,wmin:NJ,wmax:NJ}" + - " ,after:{count:1,wmin:NJ,wmax:NJ} " + - " ,between:{count:3,wmin:NJ,wmax:NY} " + - " } }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ wn:'unique(${where_s})',wh:'hll(${where_s})' } }}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,wn:1,wh:1}, {val:0.0,count:2,wn:2,wh:2}, {val:5.0,count:0}]" + + " ,before:{count:1,wn:1,wh:1}" + + " ,after:{count:1,wn:1,wh:1} " + + " ,between:{count:3,wn:2,wh:2} " + + " } }"); + + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ wmin:'min(${where_s})', wmax:'max(${where_s})' } }}"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,wmin:NY,wmax:NY}, {val:0.0,count:2,wmin:NJ,wmax:NY}, {val:5.0,count:0}]" + + " ,before:{count:1,wmin:NJ,wmax:NJ}" + + " ,after:{count:1,wmin:NJ,wmax:NJ} " + + " ,between:{count:3,wmin:NJ,wmax:NY} " + + " } }"); // stats at top level - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', avg2:'avg(def(${num_d},0))', mind:'min(${num_d})', maxd:'max(${num_d})'" + - ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + - ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + - ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})'" + - ", mini:'min(${num_i})', maxi:'max(${num_i})', missing:'missing(${sparse_num_d})', vals:'countvals(${sparse_num_d})'" + - " }" - ) - , "facets=={ 'count':6, " + - "sum1:3.0, sumsq1:247.0, avg1:0.6, avg2:0.5, mind:-9.0, maxd:11.0" + - ", numwhere:2, unique_num_i:4, unique_num_d:5, unique_date:5" + - ", where_hll:2, hll_num_i:4, hll_num_d:5, hll_date:5" + - ", med:2.0, perc:[-9.0,2.0,11.0], variance:61.3, stddev:7.829431652425353" + - ", mini:-5, maxi:7, missing:4, vals:2" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', avg2:'avg(def(${num_d},0))', mind:'min(${num_d})', maxd:'max(${num_d})'" + + ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + + ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + + ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})'" + + ", mini:'min(${num_i})', maxi:'max(${num_i})', missing:'missing(${sparse_num_d})', vals:'countvals(${sparse_num_d})'" + + " }"), + "facets=={ 'count':6, " + + "sum1:3.0, sumsq1:247.0, avg1:0.6, avg2:0.5, mind:-9.0, maxd:11.0" + + ", numwhere:2, unique_num_i:4, unique_num_d:5, unique_date:5" + + ", where_hll:2, hll_num_i:4, hll_num_d:5, hll_date:5" + + ", med:2.0, perc:[-9.0,2.0,11.0], variance:61.3, stddev:7.829431652425353" + + ", mini:-5, maxi:7, missing:4, vals:2" + + "}"); // stats at top level on multi-valued fields - client.testJQ(params(p, "q", "*:*", "myfield", "${multi_ss}" - , "json.facet", "{ sum1:'sum(${num_fs})', sumsq1:'sumsq(${num_fs})', avg1:'avg(${num_fs})', mind:'min(${num_fs})', maxd:'max(${num_fs})'" + - ", mini:'min(${num_is})', maxi:'max(${num_is})', mins:'min(${multi_ss})', maxs:'max(${multi_ss})'" + - ", stddev:'stddev(${num_fs})', variance:'variance(${num_fs})', median:'percentile(${num_fs}, 50)'" + - ", perc:'percentile(${num_fs}, 0,75,100)', maxss:'max($multi_ss)'" + - " }" - ) - , "facets=={ 'count':6, " + - "sum1:0.0, sumsq1:51.5, avg1:0.0, mind:-5.0, maxd:3.0" + - ", mini:-5, maxi:3, mins:'a', maxs:'b'" + - ", stddev:2.712405363721075, variance:7.3571428571, median:0.0, perc:[-5.0,2.25,3.0], maxss:'b'" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "myfield", + "${multi_ss}", + "json.facet", + "{ sum1:'sum(${num_fs})', sumsq1:'sumsq(${num_fs})', avg1:'avg(${num_fs})', mind:'min(${num_fs})', maxd:'max(${num_fs})'" + + ", mini:'min(${num_is})', maxi:'max(${num_is})', mins:'min(${multi_ss})', maxs:'max(${multi_ss})'" + + ", stddev:'stddev(${num_fs})', variance:'variance(${num_fs})', median:'percentile(${num_fs}, 50)'" + + ", perc:'percentile(${num_fs}, 0,75,100)', maxss:'max($multi_ss)'" + + " }"), + "facets=={ 'count':6, " + + "sum1:0.0, sumsq1:51.5, avg1:0.0, mind:-5.0, maxd:3.0" + + ", mini:-5, maxi:3, mins:'a', maxs:'b'" + + ", stddev:2.712405363721075, variance:7.3571428571, median:0.0, perc:[-5.0,2.25,3.0], maxss:'b'" + + "}"); // test sorting by multi-valued - client.testJQ(params(p, "q", "*:*", "my_field", "${num_is}" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'avg($my_field)'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'avg($my_field)'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, n1: 0.25}, { val:'A', count:2, n1:0.0}]}" + - ", f2:{ 'buckets':[{ val:'A', count:2, n1:0.0}, { val:'B', count:3, n1:0.25 }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "my_field", + "${num_is}", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'avg($my_field)'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'avg($my_field)'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, n1: 0.25}, { val:'A', count:2, n1:0.0}]}" + + ", f2:{ 'buckets':[{ val:'A', count:2, n1:0.0}, { val:'B', count:3, n1:0.25 }]} }"); // test sorting by percentile - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'percentile(${num_is}, 50)'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${num_is}, 50)'} }} }" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, n1: -0.50}, { val:'A', count:2, n1:1.0}]}" + - ", f2:{ 'buckets':[{ val:'A', count:2, n1:1.0}, { val:'B', count:3, n1:-0.50 }]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', sort:'n1 asc', facet:{n1:'percentile(${num_is}, 50)'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', sort:'n1 desc', facet:{n1:'percentile(${num_is}, 50)'} }} }"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, n1: -0.50}, { val:'A', count:2, n1:1.0}]}" + + ", f2:{ 'buckets':[{ val:'A', count:2, n1:1.0}, { val:'B', count:3, n1:-0.50 }]} }"); // test sorting by multi-valued field with domain query - client.testJQ(params(p, "q", "-id:*" - , "json.facet", "{f1:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 desc', facet:{n1:'sum(${num_is})'} }}" + - " , f2:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 asc', facet:{n1:'sum(${num_is})'} }} }" - ) - , "facets=={ 'count':0, " + - " f1:{ 'buckets':[{ val:'B', count:3, n1:1.0 }, { val:'A', count:2, n1:0.0}]}" + - ", f2:{ 'buckets':[{ val:'A', count:2, n1:0.0}, { val:'B', count:3, n1:1.0 }]} }" - ); - - client.testJQ(params(p, "q", "*:*" - , "json.facet", " {f1:{terms:{${terms}, field:'${cat_s}', " + - "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + - "facet:{n1:'min(${multi_ss})'}}}}}}}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:'a'},{val:'NY', count:1, n1:'a'}]} }," + - " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:'b'},{val:'NY', count:1}]}}]}" + - "}" - ); - - client.testJQ(params(p, "q", "*:*" - , "json.facet", " {f1:{terms:{${terms}, field:'${cat_s}', " + - "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + - "facet:{n1:'max(${multi_ss})'}}}}}}}" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:'b'},{val:'NY', count:1, n1:'b'}]} }," + - " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:'b'},{val:'NY', count:1}]}}]}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "-id:*", + "json.facet", + "{f1:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 desc', facet:{n1:'sum(${num_is})'} }}" + + " , f2:{terms:{${terms} field:'${cat_s}', domain:{query:'*:*'}, sort:'n1 asc', facet:{n1:'sum(${num_is})'} }} }"), + "facets=={ 'count':0, " + + " f1:{ 'buckets':[{ val:'B', count:3, n1:1.0 }, { val:'A', count:2, n1:0.0}]}" + + ", f2:{ 'buckets':[{ val:'A', count:2, n1:0.0}, { val:'B', count:3, n1:1.0 }]} }"); + + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + " {f1:{terms:{${terms}, field:'${cat_s}', " + + "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + + "facet:{n1:'min(${multi_ss})'}}}}}}}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:'a'},{val:'NY', count:1, n1:'a'}]} }," + + " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:'b'},{val:'NY', count:1}]}}]}" + + "}"); + + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + " {f1:{terms:{${terms}, field:'${cat_s}', " + + "facet:{f2:{terms:{${terms}, field:${where_s}, sort:'index asc', " + + "facet:{n1:'max(${multi_ss})'}}}}}}}"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'B', count:3, f2:{'buckets':[{val:'NJ', count:2, n1:'b'},{val:'NY', count:1, n1:'b'}]} }," + + " { val:'A', count:2, f2:{'buckets':[{val:'NJ', count:1, n1:'b'},{val:'NY', count:1}]}}]}" + + "}"); // stats at top level, no matches - client.testJQ(params(p, "q", "id:DOESNOTEXIST" - , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" + - ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + - ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + - ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }" - ) - , "facets=={count:0 " + - "\n// ,sum1:0.0, sumsq1:0.0, avg1:0.0, min1:'NaN', max1:'NaN', numwhere:0 \n" + - " }" - ); + client.testJQ( + params( + p, + "q", + "id:DOESNOTEXIST", + "json.facet", + "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" + + ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + + ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + + ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }"), + "facets=={count:0 " + + "\n// ,sum1:0.0, sumsq1:0.0, avg1:0.0, min1:'NaN', max1:'NaN', numwhere:0 \n" + + " }"); // stats at top level, matching documents, but no values in the field // NOTE: this represents the current state of what is returned, not the ultimate desired state. - client.testJQ(params(p, "q", "id:3" - , "json.facet", "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" + - ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + - ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + - ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }" - ) - , "facets=={count:1 " + - ",sum1:0.0," + - " sumsq1:0.0," + - " avg1:0.0," + // TODO: undesirable. omit? + client.testJQ( + params( + p, + "q", + "id:3", + "json.facet", + "{ sum1:'sum(${num_d})', sumsq1:'sumsq(${num_d})', avg1:'avg(${num_d})', min1:'min(${num_d})', max1:'max(${num_d})'" + + ", numwhere:'unique(${where_s})', unique_num_i:'unique(${num_i})', unique_num_d:'unique(${num_d})', unique_date:'unique(${date})'" + + ", where_hll:'hll(${where_s})', hll_num_i:'hll(${num_i})', hll_num_d:'hll(${num_d})', hll_date:'hll(${date})'" + + ", med:'percentile(${num_d},50)', perc:'percentile(${num_d},0,50.0,100)', variance:'variance(${num_d})', stddev:'stddev(${num_d})' }"), + "facets=={count:1 " + + ",sum1:0.0," + + " sumsq1:0.0," + + " avg1:0.0," + + // TODO: undesirable. omit? // " min1:'NaN'," + // " max1:'NaN'," + - " numwhere:0," + - " unique_num_i:0," + - " unique_num_d:0," + - " unique_date:0," + - " where_hll:0," + - " hll_num_i:0," + - " hll_num_d:0," + - " hll_date:0," + - " variance:0.0," + - " stddev:0.0" + - " }" - ); + " numwhere:0," + + " unique_num_i:0," + + " unique_num_d:0," + + " unique_date:0," + + " where_hll:0," + + " hll_num_i:0," + + " hll_num_d:0," + + " hll_date:0," + + " variance:0.0," + + " stddev:0.0" + + " }"); // // tests on a multi-valued field with actual multiple values, just to ensure that we are // using a multi-valued method for the rest of the tests when appropriate. // - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{cat:{terms:{${terms} field:'${multi_ss}', facet:{nj:{query:'${where_s}:NJ'}} } }}" - ) - , "facets=={ 'count':6, " + - "'cat':{ 'buckets':[{ 'val':'a', 'count':3, 'nj':{ 'count':2}}, { 'val':'b', 'count':3, 'nj':{ 'count':2}}]} }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{cat:{terms:{${terms} field:'${multi_ss}', facet:{nj:{query:'${where_s}:NJ'}} } }}"), + "facets=={ 'count':6, " + + "'cat':{ 'buckets':[{ 'val':'a', 'count':3, 'nj':{ 'count':2}}, { 'val':'b', 'count':3, 'nj':{ 'count':2}}]} }"); // test unique on multi-valued field - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - "x:'unique(${multi_ss})'" + - ",z:'missing(${multi_ss})'" + - ",z1:'missing(${num_is})'" + - ",v:'countvals(${multi_ss})'" + - ",v1:'countvals(${num_is})'" + - ",y:{query:{q:'id:2', facet:{x:'unique(${multi_ss})'} }} " + - ",x2:'hll(${multi_ss})'" + - ",y2:{query:{q:'id:2', facet:{x:'hll(${multi_ss})'} }} " + - " }" - ) - , "facets=={count:6 " + - ",x:2" + - ",z:2" + - ",z1:1" + - ",v:6" + - ",v1:8" + - ",y:{count:1, x:2}" + // single document should yield 2 unique values - ",x2:2" + - ",y2:{count:1, x:2}" + // single document should yield 2 unique values - " }" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "x:'unique(${multi_ss})'" + + ",z:'missing(${multi_ss})'" + + ",z1:'missing(${num_is})'" + + ",v:'countvals(${multi_ss})'" + + ",v1:'countvals(${num_is})'" + + ",y:{query:{q:'id:2', facet:{x:'unique(${multi_ss})'} }} " + + ",x2:'hll(${multi_ss})'" + + ",y2:{query:{q:'id:2', facet:{x:'hll(${multi_ss})'} }} " + + " }"), + "facets=={count:6 " + + ",x:2" + + ",z:2" + + ",z1:1" + + ",v:6" + + ",v1:8" + + ",y:{count:1, x:2}" + + // single document should yield 2 unique values + ",x2:2" + + ",y2:{count:1, x:2}" + + // single document should yield 2 unique values + " }"); // test allBucket multi-valued - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{x:{terms:{${terms} field:'${multi_ss}',allBuckets:true}}}" - ) - , "facets=={ count:6, " + - "x:{ buckets:[{val:a, count:3}, {val:b, count:3}] , allBuckets:{count:6} } }" - ); - - // allBuckets for multi-valued field with stats. This can sometimes take a different path of adding complete DocSets to the Acc + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{x:{terms:{${terms} field:'${multi_ss}',allBuckets:true}}}"), + "facets=={ count:6, " + + "x:{ buckets:[{val:a, count:3}, {val:b, count:3}] , allBuckets:{count:6} } }"); + + // allBuckets for multi-valued field with stats. This can sometimes take a different path of + // adding complete DocSets to the Acc // also test limit:0 - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f0:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0} " + - ",f1:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, offset:1} " + // offset with 0 limit - ",f2:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, facet:{x:'sum(${num_d})'}, sort:'x desc' } " + - ",f3:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, missing:true, facet:{x:'sum(${num_d})', y:'avg(${num_d})'}, sort:'x desc' } " + - "}" - ) - , "facets=={ 'count':6, " + - " f0:{allBuckets:{count:6}, buckets:[]}" + - ",f1:{allBuckets:{count:6}, buckets:[]}" + - ",f2:{allBuckets:{count:6, x:-15.0}, buckets:[]} " + - ",f3:{allBuckets:{count:6, x:-15.0, y:-2.5}, buckets:[], missing:{count:2, x:4.0, y:4.0} }} " + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f0:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0} " + + ",f1:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, offset:1} " + + // offset with 0 limit + ",f2:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, facet:{x:'sum(${num_d})'}, sort:'x desc' } " + + ",f3:{${terms_method} type:terms, field:${multi_ss}, allBuckets:true, limit:0, missing:true, facet:{x:'sum(${num_d})', y:'avg(${num_d})'}, sort:'x desc' } " + + "}"), + "facets=={ 'count':6, " + + " f0:{allBuckets:{count:6}, buckets:[]}" + + ",f1:{allBuckets:{count:6}, buckets:[]}" + + ",f2:{allBuckets:{count:6, x:-15.0}, buckets:[]} " + + ",f3:{allBuckets:{count:6, x:-15.0, y:-2.5}, buckets:[], missing:{count:2, x:4.0, y:4.0} }} " + + "}"); // allBuckets with numeric field with stats. // also test limit:0 - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f0:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0} " + - ",f1:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0, offset:1} " + // offset with 0 limit - ",f2:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0, facet:{x:'sum(${num_d})'}, sort:'x desc' } " + - "}" - ) - , "facets=={ 'count':6, " + - " f0:{allBuckets:{count:5}, buckets:[]}" + - ",f1:{allBuckets:{count:5}, buckets:[]}" + - ",f2:{allBuckets:{count:5, x:3.0}, buckets:[]} " + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f0:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0} " + + ",f1:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0, offset:1} " + + // offset with 0 limit + ",f2:{${terms_method} type:terms, field:${num_i}, allBuckets:true, limit:0, facet:{x:'sum(${num_d})'}, sort:'x desc' } " + + "}"), + "facets=={ 'count':6, " + + " f0:{allBuckets:{count:5}, buckets:[]}" + + ",f1:{allBuckets:{count:5}, buckets:[]}" + + ",f2:{allBuckets:{count:5, x:3.0}, buckets:[]} " + + "}"); ////////////////////////////////////////////////////////////////////////////////////////////////////////// // test converting legacy facets // test mincount - client.testJQ(params(p, "q", "*:*" + client.testJQ( + params( + p, + "q", + "*:*" // , "json.facet", "{f1:{terms:{field:'${cat_s}', mincount:3}}}" - , "facet","true", "facet.version", "2", "facet.field","{!key=f1}${cat_s}", "facet.mincount","3" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[{ 'val':'B', 'count':3}]} } " - ); + , + "facet", + "true", + "facet.version", + "2", + "facet.field", + "{!key=f1}${cat_s}", + "facet.mincount", + "3"), + "facets=={ 'count':6, " + "'f1':{ 'buckets':[{ 'val':'B', 'count':3}]} } "); // test prefix - client.testJQ(params(p, "q", "*:*" - // , "json.facet", "{f1:{terms:{field:${super_s}, prefix:s, mincount:0 }}}" // even with mincount=0, we should only see buckets with the prefix - , "facet","true", "facet.version", "2", "facet.field","{!key=f1}${super_s}", "facet.prefix","s", "facet.mincount","0" - ) - , "facets=={ 'count':6, " + - "'f1':{ 'buckets':[{val:spiderman, count:1}, {val:superman, count:1}]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*" + // , "json.facet", "{f1:{terms:{field:${super_s}, prefix:s, mincount:0 }}}" // even + // with mincount=0, we should only see buckets with the prefix + , + "facet", + "true", + "facet.version", + "2", + "facet.field", + "{!key=f1}${super_s}", + "facet.prefix", + "s", + "facet.mincount", + "0"), + "facets=={ 'count':6, " + + "'f1':{ 'buckets':[{val:spiderman, count:1}, {val:superman, count:1}]} } "); // range facet with sub facets and stats - client.testJQ(params(p, "q", "*:*" - // , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" - , "facet","true", "facet.version", "2", "facet.range","{!key=f}${num_d}", "facet.range.start","-5", "facet.range.end","10", "facet.range.gap","5" - , "f.f.facet.stat","x:sum(${num_i})", "subfacet.f.query","{!key=ny}${where_s}:NY" - - ) - , "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */ } ] } }" - ); + client.testJQ( + params( + p, + "q", + "*:*" + // , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, facet:{ + // x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" + , + "facet", + "true", + "facet.version", + "2", + "facet.range", + "{!key=f}${num_d}", + "facet.range.start", + "-5", + "facet.range.end", + "10", + "facet.range.gap", + "5", + "f.f.facet.stat", + "x:sum(${num_i})", + "subfacet.f.query", + "{!key=ny}${where_s}:NY"), + "facets=={count:6, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */ } ] } }"); // test sorting by stat - client.testJQ(params(p, "q", "*:*" - // , "json.facet", "{f1:{terms:{field:'${cat_s}', sort:'n1 desc', facet:{n1:'sum(${num_d})'} }}" + + client.testJQ( + params( + p, + "q", + "*:*" + // , "json.facet", "{f1:{terms:{field:'${cat_s}', sort:'n1 desc', + // facet:{n1:'sum(${num_d})'} }}" + // " , f2:{terms:{field:'${cat_s}', sort:'n1 asc', facet:{n1:'sum(${num_d})'} }} }" - , "facet","true", "facet.version", "2", "facet.field","{!key=f1}${cat_s}", "f.f1.facet.sort","n1 desc", "facet.stat","n1:sum(${num_d})" - , "facet.field","{!key=f2}${cat_s}", "f.f1.facet.sort","n1 asc" - ) - , "facets=={ 'count':6, " + - " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + - ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }" - ); + , + "facet", + "true", + "facet.version", + "2", + "facet.field", + "{!key=f1}${cat_s}", + "f.f1.facet.sort", + "n1 desc", + "facet.stat", + "n1:sum(${num_d})", + "facet.field", + "{!key=f2}${cat_s}", + "f.f1.facet.sort", + "n1 asc"), + "facets=={ 'count':6, " + + " f1:{ 'buckets':[{ val:'A', count:2, n1:6.0 }, { val:'B', count:3, n1:-3.0}]}" + + ", f2:{ 'buckets':[{ val:'B', count:3, n1:-3.0}, { val:'A', count:2, n1:6.0 }]} }"); // range facet with sub facets and stats, with "other:all", on subset - client.testJQ(params(p, "q", "id:(3 4 6)" - //, "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" - , "facet","true", "facet.version", "2", "facet.range","{!key=f}${num_d}", "facet.range.start","-5", "facet.range.end","10", "facet.range.gap","5" - , "f.f.facet.stat","x:sum(${num_i})", "subfacet.f.query","{!key=ny}${where_s}:NY", "facet.range.other","all" - ) - , "facets=={count:3, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:1,x:3.0,ny:{count:0}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + - ",before: {count:0 /* ,x:0.0,ny:{count:0} */ }" + - ",after: {count:0 /* ,x:0.0,ny:{count:0} */}" + - ",between:{count:2,x:-2.0, ny:{count:1}}" + - " } }" - ); - + client.testJQ( + params( + p, + "q", + "id:(3 4 6)" + // , "json.facet", "{f:{range:{field:${num_d}, start:-5, end:10, gap:5, other:all, + // facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }}}" + , + "facet", + "true", + "facet.version", + "2", + "facet.range", + "{!key=f}${num_d}", + "facet.range.start", + "-5", + "facet.range.end", + "10", + "facet.range.gap", + "5", + "f.f.facet.stat", + "x:sum(${num_i})", + "subfacet.f.query", + "{!key=ny}${where_s}:NY", + "facet.range.other", + "all"), + "facets=={count:3, f:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:1,x:3.0,ny:{count:0}}, {val:5.0,count:0 /* ,x:0.0,ny:{count:0} */} ]" + + ",before: {count:0 /* ,x:0.0,ny:{count:0} */ }" + + ",after: {count:0 /* ,x:0.0,ny:{count:0} */}" + + ",between:{count:2,x:-2.0, ny:{count:1}}" + + " } }"); //////////////////////////////////////////////////////////////////////////////////////////// // multi-select / exclude tagged filters via excludeTags //////////////////////////////////////////////////////////////////////////////////////////// // test uncached multi-select (see SOLR-8496) - client.testJQ(params(p, "q", "{!cache=false}*:*", "fq","{!tag=doc3,allfilt}-id:3" - - , "json.facet", "{" + - "f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } " + - "}" - ) - , "facets=={ count:5, " + - " f1:{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + - "}" - ); - - // test sub-facets of empty buckets with domain filter exclusions (canProduceFromEmpty) (see SOLR-9519) - client.testJQ(params(p, "q", "*:*", "fq","{!tag=doc3}id:non-exist", "fq","{!tag=CATA}${cat_s}:A" - - , "json.facet", "{" + - "f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } " + - ",q1 :{type:query, q:'*:*', facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + // nested under query - ",q1a:{type:query, q:'id:4', facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + // nested under query, make sure id:4 filter still applies - ",r1 :{type:range, field:${num_d}, start:0, gap:3, end:5, facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + // nested under range, make sure range constraints still apply - ",f2:{${terms} type:terms, field:${cat_s}, domain:{filter:'*:*'} } " + // domain filter doesn't widen, so f2 should not appear. - "}" - ) - , "facets=={ count:0, " + - " f1:{ buckets:[ {val:A, count:2} ] }" + - ",q1:{ count:0, f1:{buckets:[{val:A, count:2}]} }" + - ",q1a:{ count:0, f1:{buckets:[{val:A, count:1}]} }" + - ",r1:{ buckets:[ {val:0.0,count:0,f1:{buckets:[{val:A, count:1}]}}, {val:3.0,count:0,f1:{buckets:[{val:A, count:1}]}} ] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "{!cache=false}*:*", + "fq", + "{!tag=doc3,allfilt}-id:3", + "json.facet", + "{" + "f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } " + "}"), + "facets=={ count:5, " + " f1:{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + "}"); + + // test sub-facets of empty buckets with domain filter exclusions (canProduceFromEmpty) (see + // SOLR-9519) + client.testJQ( + params( + p, + "q", + "*:*", + "fq", + "{!tag=doc3}id:non-exist", + "fq", + "{!tag=CATA}${cat_s}:A", + "json.facet", + "{" + + "f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } " + + ",q1 :{type:query, q:'*:*', facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + + // nested under query + ",q1a:{type:query, q:'id:4', facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + + // nested under query, make sure id:4 filter still applies + ",r1 :{type:range, field:${num_d}, start:0, gap:3, end:5, facet:{ f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3} } } } " + + // nested under range, make sure range constraints still apply + ",f2:{${terms} type:terms, field:${cat_s}, domain:{filter:'*:*'} } " + + // domain filter doesn't widen, so f2 should not appear. + "}"), + "facets=={ count:0, " + + " f1:{ buckets:[ {val:A, count:2} ] }" + + ",q1:{ count:0, f1:{buckets:[{val:A, count:2}]} }" + + ",q1a:{ count:0, f1:{buckets:[{val:A, count:1}]} }" + + ",r1:{ buckets:[ {val:0.0,count:0,f1:{buckets:[{val:A, count:1}]}}, {val:3.0,count:0,f1:{buckets:[{val:A, count:1}]}} ] }" + + "}"); // nested query facets on subset (with excludeTags) - client.testJQ(params(p, "q", "*:*", "fq","{!tag=abc}id:(2 3)" - , "json.facet", "{ processEmpty:true," + - " f1:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz,qaz]}}" + - ",f2:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:abc }}" + - ",f3:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:'xyz ,abc ,qaz' }}" + - ",f4:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz , abc , qaz] }}" + - ",f5:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz,qaz]}}" + // this is repeated, but it did fail when a single context was shared among sub-facets - ",f6:{query:{q:'${cat_s}:B', facet:{processEmpty:true, nj:{query:'${where_s}:NJ'}, ny:{ type:query, q:'${where_s}:NY', excludeTags:abc}} }}" + // exclude in a sub-facet - ",f7:{query:{q:'${cat_s}:B', facet:{processEmpty:true, nj:{query:'${where_s}:NJ'}, ny:{ type:query, q:'${where_s}:NY', excludeTags:xyz}} }}" + // exclude in a sub-facet that doesn't match - "}" - ) - , "facets=={ 'count':2, " + - " 'f1':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + - ",'f2':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + - ",'f3':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + - ",'f4':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + - ",'f5':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + - ",'f6':{'count':1, 'nj':{'count':1}, 'ny':{'count':1}}" + - ",'f7':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "fq", + "{!tag=abc}id:(2 3)", + "json.facet", + "{ processEmpty:true," + + " f1:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz,qaz]}}" + + ",f2:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:abc }}" + + ",f3:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:'xyz ,abc ,qaz' }}" + + ",f4:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz , abc , qaz] }}" + + ",f5:{query:{q:'${cat_s}:B', facet:{nj:{query:'${where_s}:NJ'}, ny:{query:'${where_s}:NY'}} , excludeTags:[xyz,qaz]}}" + + // this is repeated, but it did fail when a single context was shared among + // sub-facets + ",f6:{query:{q:'${cat_s}:B', facet:{processEmpty:true, nj:{query:'${where_s}:NJ'}, ny:{ type:query, q:'${where_s}:NY', excludeTags:abc}} }}" + + // exclude in a sub-facet + ",f7:{query:{q:'${cat_s}:B', facet:{processEmpty:true, nj:{query:'${where_s}:NJ'}, ny:{ type:query, q:'${where_s}:NY', excludeTags:xyz}} }}" + + // exclude in a sub-facet that doesn't match + "}"), + "facets=={ 'count':2, " + + " 'f1':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + + ",'f2':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + + ",'f3':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + + ",'f4':{'count':3, 'nj':{'count':2}, 'ny':{'count':1}}" + + ",'f5':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + + ",'f6':{'count':1, 'nj':{'count':1}, 'ny':{'count':1}}" + + ",'f7':{'count':1, 'nj':{'count':1}, 'ny':{'count':0}}" + + "}"); // terms facet with nested query facet (with excludeTags, using new format inside domain:{}) - client.testJQ(params(p, "q", "{!cache=false}*:*", "fq", "{!tag=doc6,allfilt}-id:6", "fq","{!tag=doc3,allfilt}-id:3" - - , "json.facet", "{processEmpty:true, " + - " f0:{${terms} type:terms, field:${cat_s}, facet:{nj:{query:'${where_s}:NJ'}} } " + - ",f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3}, missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + - ",f2:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:allfilt},missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + - ",f3:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc6}, missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + - "}" - ) - , "facets=={ count:4, " + - " f0:{ buckets:[ {val:A, count:2, nj:{ count:1}}, {val:B, count:2, nj:{count:2}} ] }" + - ",f1:{ buckets:[ {val:A, count:2, nj:{ count:1}}, {val:B, count:2, nj:{count:2}} ] , missing:{count:1,nj:{count:0}} }" + - ",f2:{ buckets:[ {val:B, count:3, nj:{ count:2}}, {val:A, count:2, nj:{count:1}} ] , missing:{count:1,nj:{count:0}} }" + - ",f3:{ buckets:[ {val:B, count:3, nj:{ count:2}}, {val:A, count:2, nj:{count:1}} ] , missing:{count:0} }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "{!cache=false}*:*", + "fq", + "{!tag=doc6,allfilt}-id:6", + "fq", + "{!tag=doc3,allfilt}-id:3", + "json.facet", + "{processEmpty:true, " + + " f0:{${terms} type:terms, field:${cat_s}, facet:{nj:{query:'${where_s}:NJ'}} } " + + ",f1:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc3}, missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + + ",f2:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:allfilt},missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + + ",f3:{${terms} type:terms, field:${cat_s}, domain:{excludeTags:doc6}, missing:true, facet:{nj:{query:'${where_s}:NJ'}} } " + + "}"), + "facets=={ count:4, " + + " f0:{ buckets:[ {val:A, count:2, nj:{ count:1}}, {val:B, count:2, nj:{count:2}} ] }" + + ",f1:{ buckets:[ {val:A, count:2, nj:{ count:1}}, {val:B, count:2, nj:{count:2}} ] , missing:{count:1,nj:{count:0}} }" + + ",f2:{ buckets:[ {val:B, count:3, nj:{ count:2}}, {val:A, count:2, nj:{count:1}} ] , missing:{count:1,nj:{count:0}} }" + + ",f3:{ buckets:[ {val:B, count:3, nj:{ count:2}}, {val:A, count:2, nj:{count:1}} ] , missing:{count:0} }" + + "}"); // range facet with sub facets and stats, with "other:all" (with excludeTags) - client.testJQ(params(p, "q", "*:*", "fq", "{!tag=doc6,allfilt}-id:6", "fq","{!tag=doc3,allfilt}-id:3" - , "json.facet", "{processEmpty:true " + - ", f1:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} , domain:{excludeTags:allfilt} }" + - ", f2:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }" + - "}" - ) - , "facets=={count:4" + - ",f1:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0} ]" + - ",before: {count:1,x:-5.0,ny:{count:0}}" + - ",after: {count:1,x:7.0, ny:{count:0}}" + - ",between:{count:3,x:0.0, ny:{count:2}} }" + - ",f2:{buckets:[ {val:-5.0,count:0}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0} ]" + - ",before: {count:1,x:-5.0,ny:{count:0}}" + - ",after: {count:1,x:7.0, ny:{count:0}}" + - ",between:{count:2,x:5.0, ny:{count:1}} }" + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "fq", + "{!tag=doc6,allfilt}-id:6", + "fq", + "{!tag=doc3,allfilt}-id:3", + "json.facet", + "{processEmpty:true " + + ", f1:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} , domain:{excludeTags:allfilt} }" + + ", f2:{type:range, field:${num_d}, start:-5, end:10, gap:5, other:all, facet:{ x:'sum(${num_i})', ny:{query:'${where_s}:NY'}} }" + + "}"), + "facets=={count:4" + + ",f1:{buckets:[ {val:-5.0,count:1,x:-5.0,ny:{count:1}}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0} ]" + + ",before: {count:1,x:-5.0,ny:{count:0}}" + + ",after: {count:1,x:7.0, ny:{count:0}}" + + ",between:{count:3,x:0.0, ny:{count:2}} }" + + ",f2:{buckets:[ {val:-5.0,count:0}, {val:0.0,count:2,x:5.0,ny:{count:1}}, {val:5.0,count:0} ]" + + ",before: {count:1,x:-5.0,ny:{count:0}}" + + ",after: {count:1,x:7.0, ny:{count:0}}" + + ",between:{count:2,x:5.0, ny:{count:1}} }" + + "}"); // // facet on numbers // - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f1:{${terms} type:field, field:${num_i} }" + - ",f2:{${terms} type:field, field:${num_i}, sort:'count asc' }" + - ",f3:{${terms} type:field, field:${num_i}, sort:'index asc' }" + - ",f4:{${terms} type:field, field:${num_i}, sort:'index desc' }" + - ",f5:{${terms} type:field, field:${num_i}, sort:'index desc', limit:1, missing:true, allBuckets:true, numBuckets:true }" + - ",f6:{${terms} type:field, field:${num_i}, sort:'index desc', mincount:2, numBuckets:true }" + // mincount should not lower numbuckets (since SOLR-10552) - ",f7:{${terms} type:field, field:${num_i}, sort:'index desc', offset:2, numBuckets:true }" + // test offset - ",f8:{${terms} type:field, field:${num_i}, sort:'index desc', offset:100, numBuckets:true }" + // test high offset - ",f9:{${terms} type:field, field:${num_i}, sort:'x desc', facet:{x:'avg(${num_d})'}, missing:true, allBuckets:true, numBuckets:true }" + // test stats - ",f10:{${terms} type:field, field:${num_i}, facet:{a:{query:'${cat_s}:A'}}, missing:true, allBuckets:true, numBuckets:true }" + // test subfacets - ",f11:{${terms} type:field, field:${num_i}, facet:{a:'unique(${num_d})'} ,missing:true, allBuckets:true, sort:'a desc' }" + // test subfacet using unique on numeric field (this previously triggered a resizing bug) - "}" - ) - , "facets=={count:6 " + - ",f1:{ buckets:[{val:-5,count:2},{val:2,count:1},{val:3,count:1},{val:7,count:1} ] } " + - ",f2:{ buckets:[{val:2,count:1},{val:3,count:1},{val:7,count:1},{val:-5,count:2} ] } " + - ",f3:{ buckets:[{val:-5,count:2},{val:2,count:1},{val:3,count:1},{val:7,count:1} ] } " + - ",f4:{ buckets:[{val:7,count:1},{val:3,count:1},{val:2,count:1},{val:-5,count:2} ] } " + - ",f5:{ buckets:[{val:7,count:1}] , numBuckets:4, allBuckets:{count:5}, missing:{count:1} } " + - ",f6:{ buckets:[{val:-5,count:2}] , numBuckets:4 } " + - ",f7:{ buckets:[{val:2,count:1},{val:-5,count:2}] , numBuckets:4 } " + - ",f8:{ buckets:[] , numBuckets:4 } " + - ",f9:{ buckets:[{val:7,count:1,x:11.0},{val:2,count:1,x:4.0},{val:3,count:1,x:2.0},{val:-5,count:2,x:-7.0} ], numBuckets:4, allBuckets:{count:5,x:0.6},missing:{count:1,x:0.0} } " + // TODO: should missing exclude "x" because no values were collected? - ",f10:{ buckets:[{val:-5,count:2,a:{count:0}},{val:2,count:1,a:{count:1}},{val:3,count:1,a:{count:1}},{val:7,count:1,a:{count:0}} ], numBuckets:4, allBuckets:{count:5},missing:{count:1,a:{count:0}} } " + - ",f11:{ buckets:[{val:-5,count:2,a:2},{val:2,count:1,a:1},{val:3,count:1,a:1},{val:7,count:1,a:1} ] , missing:{count:1,a:0} , allBuckets:{count:5,a:5} } " + - "}" - ); - - - // facet on a float field - shares same code with integers/longs currently, so we only need to test labels/sorting - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " f1:{${terms} type:field, field:${num_d} }" + - ",f2:{${terms} type:field, field:${num_d}, sort:'index desc' }" + - "}" - ) - , "facets=={count:6 " + - ",f1:{ buckets:[{val:-9.0,count:1},{val:-5.0,count:1},{val:2.0,count:1},{val:4.0,count:1},{val:11.0,count:1} ] } " + - ",f2:{ buckets:[{val:11.0,count:1},{val:4.0,count:1},{val:2.0,count:1},{val:-5.0,count:1},{val:-9.0,count:1} ] } " + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f1:{${terms} type:field, field:${num_i} }" + + ",f2:{${terms} type:field, field:${num_i}, sort:'count asc' }" + + ",f3:{${terms} type:field, field:${num_i}, sort:'index asc' }" + + ",f4:{${terms} type:field, field:${num_i}, sort:'index desc' }" + + ",f5:{${terms} type:field, field:${num_i}, sort:'index desc', limit:1, missing:true, allBuckets:true, numBuckets:true }" + + ",f6:{${terms} type:field, field:${num_i}, sort:'index desc', mincount:2, numBuckets:true }" + + // mincount should not lower numbuckets (since SOLR-10552) + ",f7:{${terms} type:field, field:${num_i}, sort:'index desc', offset:2, numBuckets:true }" + + // test offset + ",f8:{${terms} type:field, field:${num_i}, sort:'index desc', offset:100, numBuckets:true }" + + // test high offset + ",f9:{${terms} type:field, field:${num_i}, sort:'x desc', facet:{x:'avg(${num_d})'}, missing:true, allBuckets:true, numBuckets:true }" + + // test stats + ",f10:{${terms} type:field, field:${num_i}, facet:{a:{query:'${cat_s}:A'}}, missing:true, allBuckets:true, numBuckets:true }" + + // test subfacets + ",f11:{${terms} type:field, field:${num_i}, facet:{a:'unique(${num_d})'} ,missing:true, allBuckets:true, sort:'a desc' }" + + // test subfacet using unique on numeric field (this previously triggered a + // resizing bug) + "}"), + "facets=={count:6 " + + ",f1:{ buckets:[{val:-5,count:2},{val:2,count:1},{val:3,count:1},{val:7,count:1} ] } " + + ",f2:{ buckets:[{val:2,count:1},{val:3,count:1},{val:7,count:1},{val:-5,count:2} ] } " + + ",f3:{ buckets:[{val:-5,count:2},{val:2,count:1},{val:3,count:1},{val:7,count:1} ] } " + + ",f4:{ buckets:[{val:7,count:1},{val:3,count:1},{val:2,count:1},{val:-5,count:2} ] } " + + ",f5:{ buckets:[{val:7,count:1}] , numBuckets:4, allBuckets:{count:5}, missing:{count:1} } " + + ",f6:{ buckets:[{val:-5,count:2}] , numBuckets:4 } " + + ",f7:{ buckets:[{val:2,count:1},{val:-5,count:2}] , numBuckets:4 } " + + ",f8:{ buckets:[] , numBuckets:4 } " + + ",f9:{ buckets:[{val:7,count:1,x:11.0},{val:2,count:1,x:4.0},{val:3,count:1,x:2.0},{val:-5,count:2,x:-7.0} ], numBuckets:4, allBuckets:{count:5,x:0.6},missing:{count:1,x:0.0} } " + + // TODO: should missing exclude "x" because no values were collected? + ",f10:{ buckets:[{val:-5,count:2,a:{count:0}},{val:2,count:1,a:{count:1}},{val:3,count:1,a:{count:1}},{val:7,count:1,a:{count:0}} ], numBuckets:4, allBuckets:{count:5},missing:{count:1,a:{count:0}} } " + + ",f11:{ buckets:[{val:-5,count:2,a:2},{val:2,count:1,a:1},{val:3,count:1,a:1},{val:7,count:1,a:1} ] , missing:{count:1,a:0} , allBuckets:{count:5,a:5} } " + + "}"); + + // facet on a float field - shares same code with integers/longs currently, so we only need to + // test labels/sorting + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " f1:{${terms} type:field, field:${num_d} }" + + ",f2:{${terms} type:field, field:${num_d}, sort:'index desc' }" + + "}"), + "facets=={count:6 " + + ",f1:{ buckets:[{val:-9.0,count:1},{val:-5.0,count:1},{val:2.0,count:1},{val:4.0,count:1},{val:11.0,count:1} ] } " + + ",f2:{ buckets:[{val:11.0,count:1},{val:4.0,count:1},{val:2.0,count:1},{val:-5.0,count:1},{val:-9.0,count:1} ] } " + + "}"); // test 0, min/max int/long - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - " u : 'unique(${Z_num_i})'" + - ", u2 : 'unique(${Z_num_l})'" + - ", min1 : 'min(${Z_num_i})', max1 : 'max(${Z_num_i})'" + - ", min2 : 'min(${Z_num_l})', max2 : 'max(${Z_num_l})'" + - ", f1:{${terms} type:field, field:${Z_num_i} }" + - ", f2:{${terms} type:field, field:${Z_num_l} }" + - "}" - ) - , "facets=={count:6 " + - ",u:3" + - ",u2:3" + - ",min1:" + Integer.MIN_VALUE + - ",max1:" + Integer.MAX_VALUE + - ",min2:" + Long.MIN_VALUE + - ",max2:" + Long.MAX_VALUE + - ",f1:{ buckets:[{val:" + Integer.MIN_VALUE + ",count:1},{val:0,count:1},{val:" + Integer.MAX_VALUE+",count:1}]} " + - ",f2:{ buckets:[{val:" + Long.MIN_VALUE + ",count:1},{val:0,count:1},{val:" + Long.MAX_VALUE+",count:1}]} " + - "}" - ); - - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + " u : 'unique(${Z_num_i})'" + + ", u2 : 'unique(${Z_num_l})'" + + ", min1 : 'min(${Z_num_i})', max1 : 'max(${Z_num_i})'" + + ", min2 : 'min(${Z_num_l})', max2 : 'max(${Z_num_l})'" + + ", f1:{${terms} type:field, field:${Z_num_i} }" + + ", f2:{${terms} type:field, field:${Z_num_l} }" + + "}"), + "facets=={count:6 " + + ",u:3" + + ",u2:3" + + ",min1:" + + Integer.MIN_VALUE + + ",max1:" + + Integer.MAX_VALUE + + ",min2:" + + Long.MIN_VALUE + + ",max2:" + + Long.MAX_VALUE + + ",f1:{ buckets:[{val:" + + Integer.MIN_VALUE + + ",count:1},{val:0,count:1},{val:" + + Integer.MAX_VALUE + + ",count:1}]} " + + ",f2:{ buckets:[{val:" + + Long.MIN_VALUE + + ",count:1},{val:0,count:1},{val:" + + Long.MAX_VALUE + + ",count:1}]} " + + "}"); // multi-valued integer - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ " + - " c1:'unique(${num_is})', c2:'hll(${num_is})', c3:'missing(${num_is})'" + - ", c4:'countvals(${num_is})', c5:'agg(countvals(${num_is}))'" + - ",f1:{${terms} type:terms, field:${num_is} } " + - "}" - ) - , "facets=={ count:6 " + - ", c1:5, c2:5, c3:1, c4:8, c5:8" + - ", f1:{ buckets:[ {val:-1,count:2},{val:0,count:2},{val:3,count:2},{val:-5,count:1},{val:2,count:1} ] } " + - "} " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ " + + " c1:'unique(${num_is})', c2:'hll(${num_is})', c3:'missing(${num_is})'" + + ", c4:'countvals(${num_is})', c5:'agg(countvals(${num_is}))'" + + ",f1:{${terms} type:terms, field:${num_is} } " + + "}"), + "facets=={ count:6 " + + ", c1:5, c2:5, c3:1, c4:8, c5:8" + + ", f1:{ buckets:[ {val:-1,count:2},{val:0,count:2},{val:3,count:2},{val:-5,count:1},{val:2,count:1} ] } " + + "} "); // multi-valued float - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ " + - " c1:'unique(${num_fs})', c2:'hll(${num_fs})', c3:'missing(${num_fs})', c4:'agg(missing(${num_fs}))', c5:'countvals(${num_fs})'" + - ",f1:{${terms} type:terms, field:${num_fs} } " + - "}" - ) - , "facets=={ count:6 " + - ", c1:5, c2:5, c3:1, c4:1, c5:8" + - ", f1:{ buckets:[ {val:-1.5,count:2},{val:0.0,count:2},{val:3.0,count:2},{val:-5.0,count:1},{val:2.0,count:1} ] } " + - "} " - ); - - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{" + - // "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + // overrequest=0 test needs predictable layout - "cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" + - ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest - ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up - "}" - ) - , "facets=={ count:6" + + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ " + + " c1:'unique(${num_fs})', c2:'hll(${num_fs})', c3:'missing(${num_fs})', c4:'agg(missing(${num_fs}))', c5:'countvals(${num_fs})'" + + ",f1:{${terms} type:terms, field:${num_fs} } " + + "}"), + "facets=={ count:6 " + + ", c1:5, c2:5, c3:1, c4:1, c5:8" + + ", f1:{ buckets:[ {val:-1.5,count:2},{val:0.0,count:2},{val:3.0,count:2},{val:-5.0,count:1},{val:2.0,count:1} ] } " + + "} "); + + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + + // "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + + // // overrequest=0 test needs predictable layout + "cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" + + ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + + // -1 is default overrequest + ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + + // make sure overflows don't mess us up + "}"), + "facets=={ count:6" + + // ", cat0:{ buckets:[ {val:B,count:3} ] }" - ", cat1:{ buckets:[ {val:B,count:3} ] }" + - ", catDef:{ buckets:[ {val:B,count:3} ] }" + - ", catBig:{ buckets:[ {val:A,count:2} ] }" + - "}" - ); - + ", cat1:{ buckets:[ {val:B,count:3} ] }" + + ", catDef:{ buckets:[ {val:B,count:3} ] }" + + ", catBig:{ buckets:[ {val:A,count:2} ] }" + + "}"); // test filter - client.testJQ(params(p, "q", "*:*", "myfilt","${cat_s}:A", "ff","-id:1", "ff","-id:2" - , "json.facet", "{" + - "t:{${terms} type:terms, field:${cat_s}, domain:{filter:[]} }" + // empty filter list - ",t_filt:{${terms} type:terms, field:${cat_s}, domain:{filter:'${cat_s}:B'} }" + - ",t_filt2 :{${terms} type:terms, field:${cat_s}, domain:{filter:'{!query v=$myfilt}'} }" + // test access to qparser and other query parameters - ",t_filt2a:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:myfilt} } }" + // test filter via "param" type - ",t_filt3: {${terms} type:terms, field:${cat_s}, domain:{filter:['-id:1','-id:2']} }" + - ",t_filt3a:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:ff}} }" + // test multi-valued query parameter - ",q:{type:query, q:'${cat_s}:B', domain:{filter:['-id:5']} }" + // also tests a top-level negative filter - ",r:{type:range, field:${num_d}, start:-5, end:10, gap:5, domain:{filter:'-id:4'} }" + - "}" - ) - , "facets=={ count:6, " + - "t :{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + - ",t_filt :{ buckets:[ {val:B, count:3}] } " + - ",t_filt2 :{ buckets:[ {val:A, count:2}] } " + - ",t_filt2a:{ buckets:[ {val:A, count:2}] } " + - ",t_filt3 :{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + - ",t_filt3a:{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + - ",q:{count:2}" + - ",r:{buckets:[ {val:-5.0,count:1}, {val:0.0,count:1}, {val:5.0,count:0} ] }" + - "}" - ); - - //test filter using queries from json.queries - client.testJQ(params(p, "q", "*:*" - , "json.queries", "{catS:{'#cat_sA': '${cat_s}:A'}, ff:[{'#id_1':'-id:1'},{'#id_2':'-id:2'}]}" - , "json.facet", "{" + - ",t_filt1:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:catS} } }" + // test filter via "param" type from .queries - ",t_filt2:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:ff}} }" + // test multi-valued query parameter from .queries - "}" - ) - , "facets=={ count:6, " + - ",t_filt1:{ buckets:[ {val:A, count:2}] } " + - ",t_filt2:{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + - "}" - ); - - // test acc reuse (i.e. reset() method). This is normally used for stats that are not calculated in the first phase, - // currently non-sorting stats. - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{type:terms, field:'${cat_s}', facet:{h:'hll(${where_s})' , u:'unique(${where_s})', mind:'min(${num_d})', maxd:'max(${num_d})', mini:'min(${num_i})', maxi:'max(${num_i})'" + - ", sumd:'sum(${num_d})', avgd:'avg(${num_d})', variance:'variance(${num_d})', stddev:'stddev(${num_d})', missing:'missing(${multi_ss})', vals:'countvals(${multi_ss})'} }}" - ) - , "facets=={ 'count':6, " + - "'f1':{ buckets:[{val:B, count:3, h:2, u:2, mind:-9.0, maxd:11.0, mini:-5, maxi:7, sumd:-3.0, avgd:-1.0, variance:112.0, stddev:10.583005244258363, missing:0, vals:5}," + - " {val:A, count:2, h:2, u:2, mind:2.0, maxd:4.0, mini:2, maxi:3, sumd:6.0, avgd:3.0, variance:2.0, stddev:1.4142135623730951, missing:1, vals:1}] } } " - - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "myfilt", + "${cat_s}:A", + "ff", + "-id:1", + "ff", + "-id:2", + "json.facet", + "{" + + "t:{${terms} type:terms, field:${cat_s}, domain:{filter:[]} }" + + // empty filter list + ",t_filt:{${terms} type:terms, field:${cat_s}, domain:{filter:'${cat_s}:B'} }" + + ",t_filt2 :{${terms} type:terms, field:${cat_s}, domain:{filter:'{!query v=$myfilt}'} }" + + // test access to qparser and other query parameters + ",t_filt2a:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:myfilt} } }" + + // test filter via "param" type + ",t_filt3: {${terms} type:terms, field:${cat_s}, domain:{filter:['-id:1','-id:2']} }" + + ",t_filt3a:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:ff}} }" + + // test multi-valued query parameter + ",q:{type:query, q:'${cat_s}:B', domain:{filter:['-id:5']} }" + + // also tests a top-level negative filter + ",r:{type:range, field:${num_d}, start:-5, end:10, gap:5, domain:{filter:'-id:4'} }" + + "}"), + "facets=={ count:6, " + + "t :{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + + ",t_filt :{ buckets:[ {val:B, count:3}] } " + + ",t_filt2 :{ buckets:[ {val:A, count:2}] } " + + ",t_filt2a:{ buckets:[ {val:A, count:2}] } " + + ",t_filt3 :{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + + ",t_filt3a:{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + + ",q:{count:2}" + + ",r:{buckets:[ {val:-5.0,count:1}, {val:0.0,count:1}, {val:5.0,count:0} ] }" + + "}"); + + // test filter using queries from json.queries + client.testJQ( + params( + p, + "q", + "*:*", + "json.queries", + "{catS:{'#cat_sA': '${cat_s}:A'}, ff:[{'#id_1':'-id:1'},{'#id_2':'-id:2'}]}", + "json.facet", + "{" + + ",t_filt1:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:catS} } }" + + // test filter via "param" type from .queries + ",t_filt2:{${terms} type:terms, field:${cat_s}, domain:{filter:{param:ff}} }" + + // test multi-valued query parameter from .queries + "}"), + "facets=={ count:6, " + + ",t_filt1:{ buckets:[ {val:A, count:2}] } " + + ",t_filt2:{ buckets:[ {val:B, count:2}, {val:A, count:1}] } " + + "}"); + + // test acc reuse (i.e. reset() method). This is normally used for stats that are not + // calculated in the first phase, currently non-sorting stats. + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{type:terms, field:'${cat_s}', facet:{h:'hll(${where_s})' , u:'unique(${where_s})', mind:'min(${num_d})', maxd:'max(${num_d})', mini:'min(${num_i})', maxi:'max(${num_i})'" + + ", sumd:'sum(${num_d})', avgd:'avg(${num_d})', variance:'variance(${num_d})', stddev:'stddev(${num_d})', missing:'missing(${multi_ss})', vals:'countvals(${multi_ss})'} }}"), + "facets=={ 'count':6, " + + "'f1':{ buckets:[{val:B, count:3, h:2, u:2, mind:-9.0, maxd:11.0, mini:-5, maxi:7, sumd:-3.0, avgd:-1.0, variance:112.0, stddev:10.583005244258363, missing:0, vals:5}," + + " {val:A, count:2, h:2, u:2, mind:2.0, maxd:4.0, mini:2, maxi:3, sumd:6.0, avgd:3.0, variance:2.0, stddev:1.4142135623730951, missing:1, vals:1}] } } "); // test min/max of string field - if (where_s.equals("where_s") || where_s.equals("where_sd")) { // supports only single valued currently... - client.testJQ(params(p, "q", "*:* -(+${cat_s}:A +${where_s}:NJ)" // make NY the only value in bucket A - , "json.facet", "{" + - " f1:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} }" + - ", f2:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} , sort:'min desc'}" + - ", f3:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} , sort:'min asc'}" + - ", f4:{type:terms, field:'${cat_s}', facet:{min:'min(${super_s})', max:'max(${super_s})'} , sort:'max asc'}" + - ", f5:{type:terms, field:'${cat_s}', facet:{min:'min(${super_s})', max:'max(${super_s})'} , sort:'max desc'}" + - "}" - ) - , "facets=={ count:5, " + - " f1:{ buckets:[{val:B, count:3, min:NJ, max:NY}, {val:A, count:1, min:NY, max:NY}]}" + - ",f2:{ buckets:[{val:A, count:1, min:NY, max:NY}, {val:B, count:3, min:NJ, max:NY}]}" + - ",f3:{ buckets:[{val:B, count:3, min:NJ, max:NY}, {val:A, count:1, min:NY, max:NY}]}" + - ",f4:{ buckets:[{val:B, count:3, min:batman, max:superman}, {val:A, count:1, min:zodiac, max:zodiac}]}" + - ",f5:{ buckets:[{val:A, count:1, min:zodiac, max:zodiac}, {val:B, count:3, min:batman, max:superman}]}" + - " } " - ); - - + if (where_s.equals("where_s") + || where_s.equals("where_sd")) { // supports only single valued currently... + client.testJQ( + params( + p, + "q", + "*:* -(+${cat_s}:A +${where_s}:NJ)" // make NY the only value in bucket A + , + "json.facet", + "{" + + " f1:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} }" + + ", f2:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} , sort:'min desc'}" + + ", f3:{type:terms, field:'${cat_s}', facet:{min:'min(${where_s})', max:'max(${where_s})'} , sort:'min asc'}" + + ", f4:{type:terms, field:'${cat_s}', facet:{min:'min(${super_s})', max:'max(${super_s})'} , sort:'max asc'}" + + ", f5:{type:terms, field:'${cat_s}', facet:{min:'min(${super_s})', max:'max(${super_s})'} , sort:'max desc'}" + + "}"), + "facets=={ count:5, " + + " f1:{ buckets:[{val:B, count:3, min:NJ, max:NY}, {val:A, count:1, min:NY, max:NY}]}" + + ",f2:{ buckets:[{val:A, count:1, min:NY, max:NY}, {val:B, count:3, min:NJ, max:NY}]}" + + ",f3:{ buckets:[{val:B, count:3, min:NJ, max:NY}, {val:A, count:1, min:NY, max:NY}]}" + + ",f4:{ buckets:[{val:B, count:3, min:batman, max:superman}, {val:A, count:1, min:zodiac, max:zodiac}]}" + + ",f5:{ buckets:[{val:A, count:1, min:zodiac, max:zodiac}, {val:B, count:3, min:batman, max:superman}]}" + + " } "); } - //////////////////////////////////////////////////////////////// // test which phase stats are calculated in //////////////////////////////////////////////////////////////// if (client.local()) { long creates, resets; - // NOTE: these test the current implementation and may need to be adjusted to match future optimizations (such as calculating N buckets in parallel in the second phase) + // NOTE: these test the current implementation and may need to be adjusted to match future + // optimizations (such as calculating N buckets in parallel in the second phase) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:1, facet:{x:'debug()'} }}}" // x should be deferred to 2nd phase - ) - , "facets=={ 'count':6, " + - "f1:{ buckets:[{ val:batman, count:1, x:1}]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms_method} field:${super_s}, limit:1, facet:{x:'debug()'} }}}" // x should be deferred to 2nd phase + ), + "facets=={ 'count':6, " + "f1:{ buckets:[{ val:batman, count:1, x:1}]} } "); assertEquals(1, DebugAgg.Acc.creates.get() - creates); - assertTrue( DebugAgg.Acc.resets.get() - resets <= 1); - assertTrue( DebugAgg.Acc.last.numSlots <= 2 ); // probably "1", but may be special slot for something. As long as it's not cardinality of the field - + assertTrue(DebugAgg.Acc.resets.get() - resets <= 1); + // probably "1", but may be special slot for something. As long as it's not cardinality of + // the field + assertTrue(DebugAgg.Acc.last.numSlots <= 2); creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:1, facet:{ x:'debug()'} , sort:'x asc' }}}" // sorting by x... must be done all at once in first phase - ) - , "facets=={ 'count':6, " + - "f1:{ buckets:[{ val:batman, count:1, x:1}]}" + - " } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms_method} field:${super_s}, limit:1, facet:{ x:'debug()'} , sort:'x asc' }}}" // sorting by x... must be done all at once in first phase + ), + "facets=={ 'count':6, " + "f1:{ buckets:[{ val:batman, count:1, x:1}]}" + " } "); assertEquals(1, DebugAgg.Acc.creates.get() - creates); - assertTrue( DebugAgg.Acc.resets.get() - resets == 0); - assertTrue( DebugAgg.Acc.last.numSlots >= 5 ); // all slots should be done in a single shot. there may be more than 5 due to special slots or hashing. - + assertTrue(DebugAgg.Acc.resets.get() - resets == 0); + // all slots should be done in a single shot. there may be more than 5 due to special slots or + // hashing. + assertTrue(DebugAgg.Acc.last.numSlots >= 5); // When limit:-1, we should do most stats in first phase (SOLR-10634) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:-1, facet:{x:'debug()'} }}}" - ) - , "facets==" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms_method} field:${super_s}, limit:-1, facet:{x:'debug()'} }}}"), + "facets=="); assertEquals(1, DebugAgg.Acc.creates.get() - creates); - assertTrue( DebugAgg.Acc.resets.get() - resets == 0); - assertTrue( DebugAgg.Acc.last.numSlots >= 5 ); // all slots should be done in a single shot. there may be more than 5 due to special slots or hashing. + assertTrue(DebugAgg.Acc.resets.get() - resets == 0); + // all slots should be done in a single shot. there may be more than 5 due to special slots or + // hashing. + assertTrue(DebugAgg.Acc.last.numSlots >= 5); // Now for a numeric field // When limit:-1, we should do most stats in first phase (SOLR-10634) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms_method} field:${num_d}, limit:-1, facet:{x:'debug()'} }}}" - ) - , "facets==" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms_method} field:${num_d}, limit:-1, facet:{x:'debug()'} }}}"), + "facets=="); assertEquals(1, DebugAgg.Acc.creates.get() - creates); - assertTrue( DebugAgg.Acc.resets.get() - resets == 0); - assertTrue( DebugAgg.Acc.last.numSlots >= 5 ); // all slots should be done in a single shot. there may be more than 5 due to special slots or hashing. + assertTrue(DebugAgg.Acc.resets.get() - resets == 0); + // all slots should be done in a single shot. there may be more than 5 due to special slots or + // hashing. + assertTrue(DebugAgg.Acc.last.numSlots >= 5); - - // But if we need to calculate domains anyway, it probably makes sense to calculate most stats in the 2nd phase (along with sub-facets) + // But if we need to calculate domains anyway, it probably makes sense to calculate most stats + // in the 2nd phase (along with sub-facets) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms_method} field:${super_s}, limit:-1, facet:{ x:'debug()' , y:{terms:${where_s}} } }}}" - ) - , "facets==" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms_method} field:${super_s}, limit:-1, facet:{ x:'debug()' , y:{terms:${where_s}} } }}}"), + "facets=="); assertEquals(1, DebugAgg.Acc.creates.get() - creates); - assertTrue( DebugAgg.Acc.resets.get() - resets >=4); - assertTrue( DebugAgg.Acc.last.numSlots <= 2 ); // probably 1, but could be higher + assertTrue(DebugAgg.Acc.resets.get() - resets >= 4); + assertTrue(DebugAgg.Acc.last.numSlots <= 2); // probably 1, but could be higher // Now with a numeric field - // But if we need to calculate domains anyway, it probably makes sense to calculate most stats in the 2nd phase (along with sub-facets) + // But if we need to calculate domains anyway, it probably makes sense to calculate most stats + // in the 2nd phase (along with sub-facets) creates = DebugAgg.Acc.creates.get(); resets = DebugAgg.Acc.resets.get(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{terms:{${terms_method} field:${num_d}, limit:-1, facet:{ x:'debug()' , y:{terms:${where_s}} } }}}" - ) - , "facets==" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{terms:{${terms_method} field:${num_d}, limit:-1, facet:{ x:'debug()' , y:{terms:${where_s}} } }}}"), + "facets=="); assertEquals(1, DebugAgg.Acc.creates.get() - creates); - assertTrue( DebugAgg.Acc.resets.get() - resets >=4); - assertTrue( DebugAgg.Acc.last.numSlots <= 2 ); // probably 1, but could be higher + assertTrue(DebugAgg.Acc.resets.get() - resets >= 4); + assertTrue(DebugAgg.Acc.last.numSlots <= 2); // probably 1, but could be higher } //////////////////////////////////////////////////////////////// end phase testing // - // Refinement should not be needed to get exact results here, so this tests that - // extra refinement requests are not sent out. This currently relies on counting the number of times - // debug() aggregation is parsed... which is somewhat fragile. Please replace this with something - // better in the future - perhaps debug level info about number of refinements or additional facet phases. + // Refinement should not be needed to get exact results here, so this tests that extra + // refinement requests are not sent out. This currently relies on counting the number of times + // debug() aggregation is parsed... which is somewhat fragile. Please replace this with + // something better in the future - perhaps debug level info about number of refinements or + // additional facet phases. // - for (String facet_field : new String[]{cat_s,where_s,num_d,num_i,num_is,num_fs,super_s,date,val_b,multi_ss}) { - ModifiableSolrParams test = params(p, "q", "id:(1 2)", "facet_field",facet_field, "debug", "true" - , "json.facet", "{ " + - " f1:{type:terms, field:'${facet_field}', refine:${refine}, facet:{x:'debug()'} }" + - ",f2:{type:terms, method:dvhash, field:'${facet_field}', refine:${refine}, facet:{x:'debug()'} }" + - ",f3:{type:terms, field:'${facet_field}', refine:${refine}, facet:{x:'debug()', y:{type:terms,field:'${facet_field}',refine:${refine}}} }" + // facet within facet - " }" - ); + for (String facet_field : + new String[] { + cat_s, where_s, num_d, num_i, num_is, num_fs, super_s, date, val_b, multi_ss + }) { + ModifiableSolrParams test = + params( + p, + "q", + "id:(1 2)", + "facet_field", + facet_field, + "debug", + "true", + "json.facet", + "{ " + + " f1:{type:terms, field:'${facet_field}', refine:${refine}, facet:{x:'debug()'} }" + + ",f2:{type:terms, method:dvhash, field:'${facet_field}', refine:${refine}, facet:{x:'debug()'} }" + + ",f3:{type:terms, field:'${facet_field}', refine:${refine}, facet:{x:'debug()', y:{type:terms,field:'${facet_field}',refine:${refine}}} }" + + // facet within facet + " }"); long startParses = DebugAgg.parses.get(); - client.testJQ(params(test, "refine", "false") - , "facets==" + "" - ); + client.testJQ(params(test, "refine", "false"), "facets==" + ""); long noRefineParses = DebugAgg.parses.get() - startParses; startParses = DebugAgg.parses.get(); - client.testJQ(params(test, "refine", "true") - , "facets==" + "" - ); + client.testJQ(params(test, "refine", "true"), "facets==" + ""); long refineParses = DebugAgg.parses.get() - startParses; assertEquals(noRefineParses, refineParses); } @@ -2576,104 +3699,118 @@ public static void doStatsTemplated(Client client, ModifiableSolrParams p) throw public void testPrelimSortingSingleNode() throws Exception { doTestPrelimSortingSingleNode(false, false); } - + public void testPrelimSortingSingleNodeExtraStat() throws Exception { doTestPrelimSortingSingleNode(true, false); } - + public void testPrelimSortingSingleNodeExtraFacet() throws Exception { doTestPrelimSortingSingleNode(false, true); } - + public void testPrelimSortingSingleNodeExtraStatAndFacet() throws Exception { doTestPrelimSortingSingleNode(true, true); } - - /** @see #doTestPrelimSorting */ - public void doTestPrelimSortingSingleNode(final boolean extraAgg, final boolean extraSubFacet) throws Exception { + + /** + * @see #doTestPrelimSorting + */ + public void doTestPrelimSortingSingleNode(final boolean extraAgg, final boolean extraSubFacet) + throws Exception { // we're not using Client.localClient because it doesn't provide a SolrClient to - // use in doTestPrelimSorting -- so instead we make a single node, and don't use any shards param... + // use in doTestPrelimSorting -- so instead we make a single node, and don't use any shards + // param... final SolrInstances nodes = new SolrInstances(1, "solrconfig-tlog.xml", "schema_latest.xml"); try { final Client client = nodes.getClient(random().nextInt()); - client.queryDefaults().set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client.queryDefaults().set("debugQuery", Boolean.toString(random().nextBoolean())); doTestPrelimSorting(client, extraAgg, extraSubFacet); } finally { nodes.stop(); } } - + public void testPrelimSortingDistrib() throws Exception { doTestPrelimSortingDistrib(false, false); } - + public void testPrelimSortingDistribExtraStat() throws Exception { doTestPrelimSortingDistrib(true, false); } - + public void testPrelimSortingDistribExtraFacet() throws Exception { doTestPrelimSortingDistrib(false, true); } - + public void testPrelimSortingDistribExtraStatAndFacet() throws Exception { doTestPrelimSortingDistrib(true, true); } - /** @see #doTestPrelimSorting */ - public void doTestPrelimSortingDistrib(final boolean extraAgg, final boolean extraSubFacet) throws Exception { - // we only use 2 shards, but we also want to to sanity check code paths if one (additional) shard is empty + /** + * @see #doTestPrelimSorting + */ + public void doTestPrelimSortingDistrib(final boolean extraAgg, final boolean extraSubFacet) + throws Exception { + // we only use 2 shards, but we also want to to sanity check code paths if one (additional) + // shard is empty final int totalShards = random().nextBoolean() ? 2 : 3; - - final SolrInstances nodes = new SolrInstances(totalShards, "solrconfig-tlog.xml", "schema_latest.xml"); + + final SolrInstances nodes = + new SolrInstances(totalShards, "solrconfig-tlog.xml", "schema_latest.xml"); try { final Client client = nodes.getClient(random().nextInt()); - client.queryDefaults().set( "shards", nodes.getShards(), - "debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set( + "shards", nodes.getShards(), + "debugQuery", Boolean.toString(random().nextBoolean())); doTestPrelimSorting(client, extraAgg, extraSubFacet); } finally { nodes.stop(); } } - + /** - * Helper method that indexes a fixed set of docs to exactly two of the SolrClients - * involved in the current Client such that each shard is identical for the purposes of simplified - * doc/facet counting/assertions -- if there is only one SolrClient (Client.local) then it sends that - * single shard twice as many docs so the counts/assertions will be consistent. + * Helper method that indexes a fixed set of docs to exactly two of the SolrClients + * involved in the current Client such that each shard is identical for the purposes of simplified + * doc/facet counting/assertions -- if there is only one SolrClient (Client.local) then it sends + * that single shard twice as many docs so the counts/assertions will be consistent. * - * Note: this test doesn't demonstrate practical uses of prelim_sort. - * The scenerios it tests are actualy fairly absurd, but help to ensure that edge cases are covered. + *

Note: this test doesn't demonstrate practical uses of prelim_sort. The scenerios it tests + * are actualy fairly absurd, but help to ensure that edge cases are covered. * * @param client client to use -- may be local or multishard - * @param extraAgg if an extra aggregation function should be included, this hits slightly diff code paths - * @param extraSubFacet if an extra sub facet should be included, this hits slightly diff code paths + * @param extraAgg if an extra aggregation function should be included, this hits slightly diff + * code paths + * @param extraSubFacet if an extra sub facet should be included, this hits slightly diff code + * paths */ - public void doTestPrelimSorting(final Client client, - final boolean extraAgg, - final boolean extraSubFacet) throws Exception { - + public void doTestPrelimSorting( + final Client client, final boolean extraAgg, final boolean extraSubFacet) throws Exception { + client.deleteByQuery("*:*", null); - + List clients = client.getClientProvider().all(); - - // carefully craft two balanced shards (assuming we have at least two) and leave any other shards + + // carefully craft two balanced shards (assuming we have at least two) and leave any other + // shards // empty to help check the code paths of some shards returning no buckets. // // if we are in a single node sitaution, these clients will be the same, and we'll have the same // total docs in our collection, but the numShardsWithData will be diff // (which will affect some assertions) final SolrClient shardA = clients.get(0); - final SolrClient shardB = clients.get(clients.size()-1); + final SolrClient shardB = clients.get(clients.size() - 1); final int numShardsWithData = (shardA == shardB) ? 1 : 2; - // for simplicity, each foo_s "term" exists on each shard in the same number of docs as it's numeric - // value (so count should be double the term) and bar_i is always 1 per doc (so sum(bar_i) - // should always be the same as count) + // for simplicity, each foo_s "term" exists on each shard in the same number of docs as it's + // numeric value (so count should be double the term) and bar_i is always 1 per doc (so + // sum(bar_i) should always be the same as count) int id = 0; for (int i = 1; i <= 20; i++) { for (int j = 1; j <= i; j++) { - shardA.add(new SolrInputDocument("id", ""+(++id), "foo_s", "foo_" + i, "bar_i", "1")); - shardB.add(new SolrInputDocument("id", ""+(++id), "foo_s", "foo_" + i, "bar_i", "1")); + shardA.add(new SolrInputDocument("id", "" + (++id), "foo_s", "foo_" + i, "bar_i", "1")); + shardB.add(new SolrInputDocument("id", "" + (++id), "foo_s", "foo_" + i, "bar_i", "1")); } } assertEquals(420, id); // sanity check @@ -2685,41 +3822,71 @@ public void doTestPrelimSorting(final Client client, // that cause our stat to be collected differently, so we have to account for that when // looking at DebugAdd collect stats if/when the test framework picks those // ...BUT... this only affects cloud, for single node prelim_sort overrides streaming - final boolean indexSortDebugAggFudge = ( 1 < numShardsWithData ) && - (FacetField.FacetMethod.DEFAULT_METHOD.equals(FacetField.FacetMethod.STREAM) || - FacetField.FacetMethod.DEFAULT_METHOD.equals(FacetField.FacetMethod.ENUM)); - - - final String common = "refine:true, type:field, field:'foo_s', facet: { " - + "x: 'debug(wrap,sum(bar_i))' " - + (extraAgg ? ", y:'min(bar_i)'" : "") - + (extraSubFacet ? ", z:{type:query, q:'bar_i:0'}" : "") - + "}"; + final boolean indexSortDebugAggFudge = + (1 < numShardsWithData) + && (FacetField.FacetMethod.DEFAULT_METHOD.equals(FacetField.FacetMethod.STREAM) + || FacetField.FacetMethod.DEFAULT_METHOD.equals(FacetField.FacetMethod.ENUM)); + + final String common = + "refine:true, type:field, field:'foo_s', facet: { " + + "x: 'debug(wrap,sum(bar_i))' " + + (extraAgg ? ", y:'min(bar_i)'" : "") + + (extraSubFacet ? ", z:{type:query, q:'bar_i:0'}" : "") + + "}"; final String yz = (extraAgg ? "y:1, " : "") + (extraSubFacet ? "z:{count:0}, " : ""); - + // really basic: top 5 by (prelim_sort) count, (re)sorted by a stat - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ foo_a:{ "+ common+", limit:5, overrequest:0, " - + " prelim_sort:'count desc', sort:'x asc' }" - + " foo_b:{ "+ common+", limit:5, overrequest:0, " - + " prelim_sort:'count asc', sort:'x desc' } }") - , "facets=={ 'count':420, " - + " 'foo_a':{ 'buckets':[" - + " { val:foo_16, count:32, " + yz + "x:32.0}," - + " { val:foo_17, count:34, " + yz + "x:34.0}," - + " { val:foo_18, count:36, " + yz + "x:36.0}," - + " { val:foo_19, count:38, " + yz + "x:38.0}," - + " { val:foo_20, count:40, " + yz + "x:40.0}," - + "] }," - + " 'foo_b':{ 'buckets':[" - + " { val:foo_5, count:10, " + yz + "x:10.0}," - + " { val:foo_4, count:8, " + yz + "x:8.0}," - + " { val:foo_3, count:6, " + yz + "x:6.0}," - + " { val:foo_2, count:4, " + yz + "x:4.0}," - + " { val:foo_1, count:2, " + yz + "x:2.0}," - + "] }," - + "}" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo_a:{ " + + common + + ", limit:5, overrequest:0, " + + " prelim_sort:'count desc', sort:'x asc' }" + + " foo_b:{ " + + common + + ", limit:5, overrequest:0, " + + " prelim_sort:'count asc', sort:'x desc' } }"), + "facets=={ 'count':420, " + + " 'foo_a':{ 'buckets':[" + + " { val:foo_16, count:32, " + + yz + + "x:32.0}," + + " { val:foo_17, count:34, " + + yz + + "x:34.0}," + + " { val:foo_18, count:36, " + + yz + + "x:36.0}," + + " { val:foo_19, count:38, " + + yz + + "x:38.0}," + + " { val:foo_20, count:40, " + + yz + + "x:40.0}," + + "] }," + + " 'foo_b':{ 'buckets':[" + + " { val:foo_5, count:10, " + + yz + + "x:10.0}," + + " { val:foo_4, count:8, " + + yz + + "x:8.0}," + + " { val:foo_3, count:6, " + + yz + + "x:6.0}," + + " { val:foo_2, count:4, " + + yz + + "x:4.0}," + + " { val:foo_1, count:2, " + + yz + + "x:2.0}," + + "] }," + + "}"); // (re)sorting should prevent 'sum(bar_i)' from being computed for every doc // only the choosen buckets should be collected (as a set) once per node... assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); @@ -2729,108 +3896,202 @@ public void doTestPrelimSorting(final Client client, { // same really basic top 5 by (prelim_sort) count, (re)sorted by a stat -- w/allBuckets:true // check code paths with and w/o allBuckets // NOTE: allBuckets includes stats, but not other sub-facets... - final String aout = "allBuckets:{ count:420, "+ (extraAgg ? "y:1, " : "") + "x:420.0 }"; - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ foo_a:{ " + common+", allBuckets:true, limit:5, overrequest:0, " - + " prelim_sort:'count desc', sort:'x asc' }" - + " foo_b:{ " + common+", allBuckets:true, limit:5, overrequest:0, " - + " prelim_sort:'count asc', sort:'x desc' } }") - , "facets=={ 'count':420, " - + " 'foo_a':{ " + aout + " 'buckets':[" - + " { val:foo_16, count:32, " + yz + "x:32.0}," - + " { val:foo_17, count:34, " + yz + "x:34.0}," - + " { val:foo_18, count:36, " + yz + "x:36.0}," - + " { val:foo_19, count:38, " + yz + "x:38.0}," - + " { val:foo_20, count:40, " + yz + "x:40.0}," - + "] }," - + " 'foo_b':{ " + aout + " 'buckets':[" - + " { val:foo_5, count:10, " + yz + "x:10.0}," - + " { val:foo_4, count:8, " + yz + "x:8.0}," - + " { val:foo_3, count:6, " + yz + "x:6.0}," - + " { val:foo_2, count:4, " + yz + "x:4.0}," - + " { val:foo_1, count:2, " + yz + "x:2.0}," - + "] }," - + "}" - ); - // because of allBuckets, we collect every doc on everyshard (x2 facets) in a single "all" slot... + final String aout = "allBuckets:{ count:420, " + (extraAgg ? "y:1, " : "") + "x:420.0 }"; + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo_a:{ " + + common + + ", allBuckets:true, limit:5, overrequest:0, " + + " prelim_sort:'count desc', sort:'x asc' }" + + " foo_b:{ " + + common + + ", allBuckets:true, limit:5, overrequest:0, " + + " prelim_sort:'count asc', sort:'x desc' } }"), + "facets=={ 'count':420, " + + " 'foo_a':{ " + + aout + + " 'buckets':[" + + " { val:foo_16, count:32, " + + yz + + "x:32.0}," + + " { val:foo_17, count:34, " + + yz + + "x:34.0}," + + " { val:foo_18, count:36, " + + yz + + "x:36.0}," + + " { val:foo_19, count:38, " + + yz + + "x:38.0}," + + " { val:foo_20, count:40, " + + yz + + "x:40.0}," + + "] }," + + " 'foo_b':{ " + + aout + + " 'buckets':[" + + " { val:foo_5, count:10, " + + yz + + "x:10.0}," + + " { val:foo_4, count:8, " + + yz + + "x:8.0}," + + " { val:foo_3, count:6, " + + yz + + "x:6.0}," + + " { val:foo_2, count:4, " + + yz + + "x:4.0}," + + " { val:foo_1, count:2, " + + yz + + "x:2.0}," + + "] }," + + "}"); + // because of allBuckets, we collect every doc on everyshard (x2 facets) in a single "all" + // slot... assertEqualsAndReset(2 * 420, DebugAgg.Acc.collectDocs); // ... in addition to collecting each of the choosen buckets (as sets) once per node... // 2 facets, 5 bucket, on each shard assertEqualsAndReset(numShardsWithData * 2 * 5, DebugAgg.Acc.collectDocSets); } - + // pagination (with offset) should happen against the re-sorted list (up to the effective limit) - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ foo_a:{ "+common+", offset:2, limit:3, overrequest:0, " - + " prelim_sort:'count desc', sort:'x asc' }" - + " foo_b:{ "+common+", offset:2, limit:3, overrequest:0, " - + " prelim_sort:'count asc', sort:'x desc' } }") - , "facets=={ 'count':420, " - + " 'foo_a':{ 'buckets':[" - + " { val:foo_18, count:36, " + yz + "x:36.0}," - + " { val:foo_19, count:38, " + yz + "x:38.0}," - + " { val:foo_20, count:40, " + yz + "x:40.0}," - + "] }," - + " 'foo_b':{ 'buckets':[" - + " { val:foo_3, count:6, " + yz + "x:6.0}," - + " { val:foo_2, count:4, " + yz + "x:4.0}," - + " { val:foo_1, count:2, " + yz + "x:2.0}," - + "] }," - + "}" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo_a:{ " + + common + + ", offset:2, limit:3, overrequest:0, " + + " prelim_sort:'count desc', sort:'x asc' }" + + " foo_b:{ " + + common + + ", offset:2, limit:3, overrequest:0, " + + " prelim_sort:'count asc', sort:'x desc' } }"), + "facets=={ 'count':420, " + + " 'foo_a':{ 'buckets':[" + + " { val:foo_18, count:36, " + + yz + + "x:36.0}," + + " { val:foo_19, count:38, " + + yz + + "x:38.0}," + + " { val:foo_20, count:40, " + + yz + + "x:40.0}," + + "] }," + + " 'foo_b':{ 'buckets':[" + + " { val:foo_3, count:6, " + + yz + + "x:6.0}," + + " { val:foo_2, count:4, " + + yz + + "x:4.0}," + + " { val:foo_1, count:2, " + + yz + + "x:2.0}," + + "] }," + + "}"); assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); // 2 facets, 5 buckets (including offset), on each shard assertEqualsAndReset(numShardsWithData * 2 * 5, DebugAgg.Acc.collectDocSets); - + // when overrequesting is used, the full list of candidate buckets should be considered - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ foo_a:{ "+common+", limit:5, overrequest:5, " - + " prelim_sort:'count desc', sort:'x asc' }" - + " foo_b:{ "+common+", limit:5, overrequest:5, " - + " prelim_sort:'count asc', sort:'x desc' } }") - , "facets=={ 'count':420, " - + " 'foo_a':{ 'buckets':[" - + " { val:foo_11, count:22, " + yz + "x:22.0}," - + " { val:foo_12, count:24, " + yz + "x:24.0}," - + " { val:foo_13, count:26, " + yz + "x:26.0}," - + " { val:foo_14, count:28, " + yz + "x:28.0}," - + " { val:foo_15, count:30, " + yz + "x:30.0}," - + "] }," - + " 'foo_b':{ 'buckets':[" - + " { val:foo_10, count:20, " + yz + "x:20.0}," - + " { val:foo_9, count:18, " + yz + "x:18.0}," - + " { val:foo_8, count:16, " + yz + "x:16.0}," - + " { val:foo_7, count:14, " + yz + "x:14.0}," - + " { val:foo_6, count:12, " + yz + "x:12.0}," - + "] }," - + "}" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo_a:{ " + + common + + ", limit:5, overrequest:5, " + + " prelim_sort:'count desc', sort:'x asc' }" + + " foo_b:{ " + + common + + ", limit:5, overrequest:5, " + + " prelim_sort:'count asc', sort:'x desc' } }"), + "facets=={ 'count':420, " + + " 'foo_a':{ 'buckets':[" + + " { val:foo_11, count:22, " + + yz + + "x:22.0}," + + " { val:foo_12, count:24, " + + yz + + "x:24.0}," + + " { val:foo_13, count:26, " + + yz + + "x:26.0}," + + " { val:foo_14, count:28, " + + yz + + "x:28.0}," + + " { val:foo_15, count:30, " + + yz + + "x:30.0}," + + "] }," + + " 'foo_b':{ 'buckets':[" + + " { val:foo_10, count:20, " + + yz + + "x:20.0}," + + " { val:foo_9, count:18, " + + yz + + "x:18.0}," + + " { val:foo_8, count:16, " + + yz + + "x:16.0}," + + " { val:foo_7, count:14, " + + yz + + "x:14.0}," + + " { val:foo_6, count:12, " + + yz + + "x:12.0}," + + "] }," + + "}"); assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); // 2 facets, 10 buckets (including overrequest), on each shard assertEqualsAndReset(numShardsWithData * 2 * 10, DebugAgg.Acc.collectDocSets); { // for an (effectively) unlimited facet, then from the black box perspective of the client, // preliminary sorting should be completely ignored... - final StringBuilder expected = new StringBuilder("facets=={ 'count':420, 'foo_a':{ 'buckets':[\n"); + final StringBuilder expected = + new StringBuilder("facets=={ 'count':420, 'foo_a':{ 'buckets':[\n"); for (int i = 20; 0 < i; i--) { final int x = i * 2; - expected.append("{ val:foo_"+i+", count:"+x+", " + yz + "x:"+x+".0},\n"); + expected.append("{ val:foo_" + i + ", count:" + x + ", " + yz + "x:" + x + ".0},\n"); } expected.append("] } }"); for (int limit : Arrays.asList(-1, 100000)) { - for (String sortOpts : Arrays.asList("sort:'x desc'", - "prelim_sort:'count asc', sort:'x desc'", - "prelim_sort:'index asc', sort:'x desc'")) { + for (String sortOpts : + Arrays.asList( + "sort:'x desc'", + "prelim_sort:'count asc', sort:'x desc'", + "prelim_sort:'index asc', sort:'x desc'")) { final String snippet = "limit: " + limit + ", " + sortOpts; - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ foo_a:{ "+common+", " + snippet + "}}") - , expected.toString()); - - // the only difference from a white box perspective, is when/if we are - // optimized to use the sort SlotAcc during collection instead of the prelim_sort SlotAcc.. + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo_a:{ " + common + ", " + snippet + "}}"), + expected.toString()); + + // the only difference from a white box perspective, is when/if we are + // optimized to use the sort SlotAcc during collection instead of the prelim_sort + // SlotAcc.. // (ie: sub facet preventing single pass (re)sort in single node mode) - if (((0 < limit || extraSubFacet) && snippet.contains("prelim_sort")) && - ! (indexSortDebugAggFudge && snippet.contains("index asc"))) { + if (((0 < limit || extraSubFacet) && snippet.contains("prelim_sort")) + && !(indexSortDebugAggFudge && snippet.contains("index asc"))) { // by-pass single pass collection, do everything as sets... assertEqualsAndReset(snippet, numShardsWithData * 20, DebugAgg.Acc.collectDocSets); assertEqualsAndReset(snippet, 0, DebugAgg.Acc.collectDocs); @@ -2847,43 +4108,96 @@ public void doTestPrelimSorting(final Client client, // test all permutations of (prelim_sort | sort) on (index | count | stat) since there are // custom sort codepaths for index & count that work differnetly then general stats // - // NOTE: there's very little value in re-sort by count/index after prelim_sort on something more complex, - // typically better to just ignore the prelim_sort, but we're testing it for completeness - // (and because you *might* want to prelim_sort by some function, for the purpose of "sampling" the - // top results and then (re)sorting by count/index) + // NOTE: there's very little value in re-sort by count/index after prelim_sort on something more + // complex, typically better to just ignore the prelim_sort, but we're testing it for + // completeness (and because you *might* want to prelim_sort by some function, for the purpose + // of "sampling" the top results and then (re)sorting by count/index) for (String numSort : Arrays.asList("count", "x")) { // equivilent ordering - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ foo_a:{ "+common+", limit:10, overrequest:0, " - + " prelim_sort:'"+numSort+" asc', sort:'index desc' }" - + " foo_b:{ "+common+", limit:10, overrequest:0, " - + " prelim_sort:'index asc', sort:'"+numSort+" desc' } }") - , "facets=={ 'count':420, " - + " 'foo_a':{ 'buckets':[" - + " { val:foo_9, count:18, " + yz + "x:18.0}," - + " { val:foo_8, count:16, " + yz + "x:16.0}," - + " { val:foo_7, count:14, " + yz + "x:14.0}," - + " { val:foo_6, count:12, " + yz + "x:12.0}," - + " { val:foo_5, count:10, " + yz + "x:10.0}," - + " { val:foo_4, count:8, " + yz + "x:8.0}," - + " { val:foo_3, count:6, " + yz + "x:6.0}," - + " { val:foo_2, count:4, " + yz + "x:4.0}," - + " { val:foo_10, count:20, " + yz + "x:20.0}," - + " { val:foo_1, count:2, " + yz + "x:2.0}," - + "] }," - + " 'foo_b':{ 'buckets':[" - + " { val:foo_18, count:36, " + yz + "x:36.0}," - + " { val:foo_17, count:34, " + yz + "x:34.0}," - + " { val:foo_16, count:32, " + yz + "x:32.0}," - + " { val:foo_15, count:30, " + yz + "x:30.0}," - + " { val:foo_14, count:28, " + yz + "x:28.0}," - + " { val:foo_13, count:26, " + yz + "x:26.0}," - + " { val:foo_12, count:24, " + yz + "x:24.0}," - + " { val:foo_11, count:22, " + yz + "x:22.0}," - + " { val:foo_10, count:20, " + yz + "x:20.0}," - + " { val:foo_1, count:2, " + yz + "x:2.0}," - + "] }," - + "}" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo_a:{ " + + common + + ", limit:10, overrequest:0, " + + " prelim_sort:'" + + numSort + + " asc', sort:'index desc' }" + + " foo_b:{ " + + common + + ", limit:10, overrequest:0, " + + " prelim_sort:'index asc', sort:'" + + numSort + + " desc' } }"), + "facets=={ 'count':420, " + + " 'foo_a':{ 'buckets':[" + + " { val:foo_9, count:18, " + + yz + + "x:18.0}," + + " { val:foo_8, count:16, " + + yz + + "x:16.0}," + + " { val:foo_7, count:14, " + + yz + + "x:14.0}," + + " { val:foo_6, count:12, " + + yz + + "x:12.0}," + + " { val:foo_5, count:10, " + + yz + + "x:10.0}," + + " { val:foo_4, count:8, " + + yz + + "x:8.0}," + + " { val:foo_3, count:6, " + + yz + + "x:6.0}," + + " { val:foo_2, count:4, " + + yz + + "x:4.0}," + + " { val:foo_10, count:20, " + + yz + + "x:20.0}," + + " { val:foo_1, count:2, " + + yz + + "x:2.0}," + + "] }," + + " 'foo_b':{ 'buckets':[" + + " { val:foo_18, count:36, " + + yz + + "x:36.0}," + + " { val:foo_17, count:34, " + + yz + + "x:34.0}," + + " { val:foo_16, count:32, " + + yz + + "x:32.0}," + + " { val:foo_15, count:30, " + + yz + + "x:30.0}," + + " { val:foo_14, count:28, " + + yz + + "x:28.0}," + + " { val:foo_13, count:26, " + + yz + + "x:26.0}," + + " { val:foo_12, count:24, " + + yz + + "x:24.0}," + + " { val:foo_11, count:22, " + + yz + + "x:22.0}," + + " { val:foo_10, count:20, " + + yz + + "x:20.0}," + + " { val:foo_1, count:2, " + + yz + + "x:2.0}," + + "] }," + + "}"); // since these behave differently, defer DebugAgg counter checks until all are done... } // These 3 permutations defer the compuation of x as docsets, @@ -2892,33 +4206,55 @@ public void doTestPrelimSorting(final Client client, // prelim_sort:index, sort:x // prelim_sort:index, sort:count // ...except when streaming, prelim_sort:index does no docsets. - assertEqualsAndReset((indexSortDebugAggFudge ? 1 : 3) * numShardsWithData * 10, - DebugAgg.Acc.collectDocSets); - // This is the only situation that should (always) result in every doc being collected (but 0 docsets)... + assertEqualsAndReset( + (indexSortDebugAggFudge ? 1 : 3) * numShardsWithData * 10, DebugAgg.Acc.collectDocSets); + // This is the only situation that should (always) result in every doc being collected (but 0 + // docsets)... // prelim_sort:x, sort:index - // ...but the (2) prelim_sort:index streaming situations above will also cause all the docs in the first + // ...but the (2) prelim_sort:index streaming situations above will also cause all the docs in + // the first // 10+1 buckets to be collected (enum checks limit+1 to know if there are "more"... - assertEqualsAndReset(420 + (indexSortDebugAggFudge ? - 2 * numShardsWithData * (1+10+11+12+13+14+15+16+17+18+19) : 0), - DebugAgg.Acc.collectDocs); + assertEqualsAndReset( + 420 + + (indexSortDebugAggFudge + ? 2 * numShardsWithData * (1 + 10 + 11 + 12 + 13 + 14 + 15 + 16 + 17 + 18 + 19) + : 0), + DebugAgg.Acc.collectDocs); // sanity check of prelim_sorting in a sub facet - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ bar:{ type:query, query:'foo_s:[foo_10 TO foo_19]', facet: {" - + " foo:{ "+ common+", limit:5, overrequest:0, " - + " prelim_sort:'count desc', sort:'x asc' } } } }") - , "facets=={ 'count':420, " - + " 'bar':{ 'count':290, " - + " 'foo':{ 'buckets':[" - + " { val:foo_15, count:30, " + yz + "x:30.0}," - + " { val:foo_16, count:32, " + yz + "x:32.0}," - + " { val:foo_17, count:34, " + yz + "x:34.0}," - + " { val:foo_18, count:36, " + yz + "x:36.0}," - + " { val:foo_19, count:38, " + yz + "x:38.0}," - + " ] }," - + " }," - + "}" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ bar:{ type:query, query:'foo_s:[foo_10 TO foo_19]', facet: {" + + " foo:{ " + + common + + ", limit:5, overrequest:0, " + + " prelim_sort:'count desc', sort:'x asc' } } } }"), + "facets=={ 'count':420, " + + " 'bar':{ 'count':290, " + + " 'foo':{ 'buckets':[" + + " { val:foo_15, count:30, " + + yz + + "x:30.0}," + + " { val:foo_16, count:32, " + + yz + + "x:32.0}," + + " { val:foo_17, count:34, " + + yz + + "x:34.0}," + + " { val:foo_18, count:36, " + + yz + + "x:36.0}," + + " { val:foo_19, count:38, " + + yz + + "x:38.0}," + + " ] }," + + " }," + + "}"); // the prelim_sort should prevent 'sum(bar_i)' from being computed for every doc // only the choosen buckets should be collected (as a set) once per node... assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); @@ -2926,26 +4262,46 @@ public void doTestPrelimSorting(final Client client, assertEqualsAndReset(numShardsWithData * 5, DebugAgg.Acc.collectDocSets); { // sanity check how defered stats are handled - - // here we'll prelim_sort & sort on things that are both "not x" and using the debug() counters - // (wrapping x) to assert that 'x' is correctly defered and only collected for the final top buckets + + // here we'll prelim_sort & sort on things that are both "not x" and using the debug() + // counters (wrapping x) to assert that 'x' is correctly defered and only collected for the + // final top buckets final List sorts = new ArrayList(Arrays.asList("index asc", "count asc")); if (extraAgg) { sorts.add("y asc"); // same for every bucket, but index order tie breaker should kick in } for (String s : sorts) { - client.testJQ(params("q", "*:*", "rows", "0", "json.facet" - , "{ foo:{ "+ common+", limit:5, overrequest:0, " - + " prelim_sort:'count desc', sort:'"+s+"' } }") - , "facets=={ 'count':420, " - + " 'foo':{ 'buckets':[" - + " { val:foo_16, count:32, " + yz + "x:32.0}," - + " { val:foo_17, count:34, " + yz + "x:34.0}," - + " { val:foo_18, count:36, " + yz + "x:36.0}," - + " { val:foo_19, count:38, " + yz + "x:38.0}," - + " { val:foo_20, count:40, " + yz + "x:40.0}," - + "] } }" - ); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{ foo:{ " + + common + + ", limit:5, overrequest:0, " + + " prelim_sort:'count desc', sort:'" + + s + + "' } }"), + "facets=={ 'count':420, " + + " 'foo':{ 'buckets':[" + + " { val:foo_16, count:32, " + + yz + + "x:32.0}," + + " { val:foo_17, count:34, " + + yz + + "x:34.0}," + + " { val:foo_18, count:36, " + + yz + + "x:36.0}," + + " { val:foo_19, count:38, " + + yz + + "x:38.0}," + + " { val:foo_20, count:40, " + + yz + + "x:40.0}," + + "] } }"); // Neither prelim_sort nor sort should need 'sum(bar_i)' to be computed for every doc // only the choosen buckets should be collected (as a set) once per node... assertEqualsAndReset(0, DebugAgg.Acc.collectDocs); @@ -2955,12 +4311,14 @@ public void doTestPrelimSorting(final Client client, } } - @Test public void testOverrequest() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); List clients = client.getClientProvider().all(); assertTrue(clients.size() >= 3); @@ -2970,37 +4328,42 @@ public void testOverrequest() throws Exception { ModifiableSolrParams p = params("cat_s", "cat_s"); String cat_s = p.get("cat_s"); - clients.get(0).add( sdoc("id", "1", cat_s, "A") ); // A will win tiebreak - clients.get(0).add( sdoc("id", "2", cat_s, "B") ); + clients.get(0).add(sdoc("id", "1", cat_s, "A")); // A will win tiebreak + clients.get(0).add(sdoc("id", "2", cat_s, "B")); - clients.get(1).add( sdoc("id", "3", cat_s, "B") ); - clients.get(1).add( sdoc("id", "4", cat_s, "A") ); // A will win tiebreak + clients.get(1).add(sdoc("id", "3", cat_s, "B")); + clients.get(1).add(sdoc("id", "4", cat_s, "A")); // A will win tiebreak - clients.get(2).add( sdoc("id", "5", cat_s, "B") ); - clients.get(2).add( sdoc("id", "6", cat_s, "B") ); + clients.get(2).add(sdoc("id", "5", cat_s, "B")); + clients.get(2).add(sdoc("id", "6", cat_s, "B")); client.commit(); // Shard responses should be A=1, A=1, B=2, merged should be "A=2, B=2" hence A wins tiebreak - client.testJQ(params(p, "q", "*:*", - "json.facet", "{" + - "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + - ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" + - ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + // -1 is default overrequest - ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + // make sure overflows don't mess us up - "}" - ) - , "facets=={ count:6" + - ", cat0:{ buckets:[ {val:A,count:2} ] }" + // with no overrequest, we incorrectly conclude that A is the top bucket - ", cat1:{ buckets:[ {val:B,count:4} ] }" + - ", catDef:{ buckets:[ {val:B,count:4} ] }" + - ", catBig:{ buckets:[ {val:A,count:2} ] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{" + + "cat0:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:0}" + + ",cat1:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:1}" + + ",catDef:{type:terms, field:${cat_s}, sort:'count desc', limit:1, overrequest:-1}" + + // -1 is default overrequest + ",catBig:{type:terms, field:${cat_s}, sort:'count desc', offset:1, limit:2147483647, overrequest:2147483647}" + + // make sure overflows don't mess us up + "}"), + "facets=={ count:6" + + ", cat0:{ buckets:[ {val:A,count:2} ] }" + + // with no overrequest, we incorrectly conclude that A is the top bucket + ", cat1:{ buckets:[ {val:B,count:4} ] }" + + ", catDef:{ buckets:[ {val:B,count:4} ] }" + + ", catBig:{ buckets:[ {val:A,count:2} ] }" + + "}"); } - @Test public void testBigger() throws Exception { ModifiableSolrParams p = params("rows", "0", "cat_s", "cat_ss", "where_s", "where_ss"); @@ -3008,8 +4371,8 @@ public void testBigger() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards() ); - doBigger( client, p ); + client.queryDefaults().set("shards", servers.getShards()); + doBigger(client, p); } private String getId(int id) { @@ -3024,18 +4387,18 @@ public void doBigger(Client client, ModifiableSolrParams p) throws Exception { client.deleteByQuery("*:*", null); - Random r = new Random(0); // make deterministic + Random r = new Random(0); // make deterministic int numCat = 1; int numWhere = 2000000000; int commitPercent = 10; - int ndocs=1000; + int ndocs = 1000; - Map>> model = new HashMap<>(); // cat->where->list - for (int i=0; i>> model = new HashMap<>(); // cat->where->list + for (int i = 0; i < ndocs; i++) { Integer cat = r.nextInt(numCat); Integer where = r.nextInt(numWhere); - client.add( sdoc("id", getId(i), cat_s,cat, where_s, where) , null ); - Map> sub = model.get(cat); + client.add(sdoc("id", getId(i), cat_s, cat, where_s, where), null); + Map> sub = model.get(cat); if (sub == null) { sub = new HashMap<>(); model.put(cat, sub); @@ -3056,68 +4419,101 @@ public void doBigger(Client client, ModifiableSolrParams p) throws Exception { int sz = model.get(0).size(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{type:terms, field:${cat_s}, limit:2, facet:{x:'unique($where_s)'} }}" - ) - , "facets=={ 'count':" + ndocs + "," + - "'f1':{ 'buckets':[{ 'val':'0', 'count':" + ndocs + ", x:" + sz + " }]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{type:terms, field:${cat_s}, limit:2, facet:{x:'unique($where_s)'} }}"), + "facets=={ 'count':" + + ndocs + + "," + + "'f1':{ 'buckets':[{ 'val':'0', 'count':" + + ndocs + + ", x:" + + sz + + " }]} } "); if (client.local()) { // distrib estimation prob won't match - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{type:terms, field:${cat_s}, limit:2, facet:{x:'hll($where_s)'} }}" - ) - , "facets=={ 'count':" + ndocs + "," + - "'f1':{ 'buckets':[{ 'val':'0', 'count':" + ndocs + ", x:" + sz + " }]} } " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{f1:{type:terms, field:${cat_s}, limit:2, facet:{x:'hll($where_s)'} }}"), + "facets=={ 'count':" + + ndocs + + "," + + "'f1':{ 'buckets':[{ 'val':'0', 'count':" + + ndocs + + ", x:" + + sz + + " }]} } "); } - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{f1:{type:terms, field:id, limit:1, offset:990}}" - ) - , "facets=={ 'count':" + ndocs + "," + - "'f1':{buckets:[{val:'00990',count:1}]}} " - ); + client.testJQ( + params(p, "q", "*:*", "json.facet", "{f1:{type:terms, field:id, limit:1, offset:990}}"), + "facets=={ 'count':" + ndocs + "," + "'f1':{buckets:[{val:'00990',count:1}]}} "); - - for (int i=0; i<20; i++) { + for (int i = 0; i < 20; i++) { int off = random().nextInt(ndocs); - client.testJQ(params(p, "q", "*:*", "off",Integer.toString(off) - , "json.facet", "{f1:{type:terms, field:id, limit:1, offset:${off}}}" - ) - , "facets=={ 'count':" + ndocs + "," + - "'f1':{buckets:[{val:'" + getId(off) + "',count:1}]}} " - ); + client.testJQ( + params( + p, + "q", + "*:*", + "off", + Integer.toString(off), + "json.facet", + "{f1:{type:terms, field:id, limit:1, offset:${off}}}"), + "facets=={ 'count':" + + ndocs + + "," + + "'f1':{buckets:[{val:'" + + getId(off) + + "',count:1}]}} "); } } public void testTolerant() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set("shards", servers.getShards() + "," + DEAD_HOST_1 + "/ignore_exception"); + client + .queryDefaults() + .set("shards", servers.getShards() + "," + DEAD_HOST_1 + "/ignore_exception"); indexSimple(client); try { - client.testJQ(params("ignore_exception", "true", "shards.tolerant", "false", "q", "*:*" - , "json.facet", "{f:{type:terms, field:cat_s}}" - ) - , "facets=={ count:6," + - "f:{ buckets:[{val:B,count:3},{val:A,count:2}] }" + - "}" - ); + client.testJQ( + params( + "ignore_exception", + "true", + "shards.tolerant", + "false", + "q", + "*:*", + "json.facet", + "{f:{type:terms, field:cat_s}}"), + "facets=={ count:6," + "f:{ buckets:[{val:B,count:3},{val:A,count:2}] }" + "}"); fail("we should have failed"); } catch (Exception e) { // ok } - client.testJQ(params("ignore_exception", "true", "shards.tolerant", "true", "q", "*:*" - , "json.facet", "{f:{type:terms, field:cat_s}}" - ) - , "facets=={ count:6," + - "f:{ buckets:[{val:B,count:3},{val:A,count:2}] }" + - "}" - ); + client.testJQ( + params( + "ignore_exception", + "true", + "shards.tolerant", + "true", + "q", + "*:*", + "json.facet", + "{f:{type:terms, field:cat_s}}"), + "facets=={ count:6," + "f:{ buckets:[{val:B,count:3},{val:A,count:2}] }" + "}"); } @Test @@ -3126,226 +4522,244 @@ public void testBlockJoin() throws Exception { } public void doBlockJoin(Client client) throws Exception { - ModifiableSolrParams p = params("rows","0"); + ModifiableSolrParams p = params("rows", "0"); client.deleteByQuery("*:*", null); SolrInputDocument parent; - parent = sdoc("id", "1", "type_s","book", "book_s","A", "v_t","q"); + parent = sdoc("id", "1", "type_s", "book", "book_s", "A", "v_t", "q"); client.add(parent, null); - parent = sdoc("id", "2", "type_s","book", "book_s","B", "v_t","q w"); - parent.addChildDocument( sdoc("id","2.1", "type_s","page", "page_s","a", "v_t","x y z") ); - parent.addChildDocument( sdoc("id","2.2", "type_s","page", "page_s","b", "v_t","x y ") ); - parent.addChildDocument( sdoc("id","2.3", "type_s","page", "page_s","c", "v_t"," y z" ) ); + parent = sdoc("id", "2", "type_s", "book", "book_s", "B", "v_t", "q w"); + parent.addChildDocument(sdoc("id", "2.1", "type_s", "page", "page_s", "a", "v_t", "x y z")); + parent.addChildDocument(sdoc("id", "2.2", "type_s", "page", "page_s", "b", "v_t", "x y ")); + parent.addChildDocument(sdoc("id", "2.3", "type_s", "page", "page_s", "c", "v_t", " y z")); client.add(parent, null); - parent = sdoc("id", "3", "type_s","book", "book_s","C", "v_t","q w e"); - parent.addChildDocument( sdoc("id","3.1", "type_s","page", "page_s","d", "v_t","x ") ); - parent.addChildDocument( sdoc("id","3.2", "type_s","page", "page_s","e", "v_t"," y ") ); - parent.addChildDocument( sdoc("id","3.3", "type_s","page", "page_s","f", "v_t"," z") ); + parent = sdoc("id", "3", "type_s", "book", "book_s", "C", "v_t", "q w e"); + parent.addChildDocument(sdoc("id", "3.1", "type_s", "page", "page_s", "d", "v_t", "x ")); + parent.addChildDocument(sdoc("id", "3.2", "type_s", "page", "page_s", "e", "v_t", " y ")); + parent.addChildDocument(sdoc("id", "3.3", "type_s", "page", "page_s", "f", "v_t", " z")); client.add(parent, null); - parent = sdoc("id", "4", "type_s","book", "book_s","D", "v_t","e"); + parent = sdoc("id", "4", "type_s", "book", "book_s", "D", "v_t", "e"); client.add(parent, null); client.commit(); - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ " + - "pages:{ type:query, domain:{blockChildren:'type_s:book'} , facet:{ x:{field:v_t} } }" + - ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'} }" + - ",books:{ type:query, domain:{blockParent:'type_s:book'} , facet:{ x:{field:v_t} } }" + - ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book'} }" + - ",pageof3:{ type:query, q:'id:3', facet : { x : { type:terms, field:page_s, domain:{blockChildren:'type_s:book'}}} }" + - ",bookof22:{ type:query, q:'id:2.2', facet : { x : { type:terms, field:book_s, domain:{blockParent:'type_s:book'}}} }" + - ",missing_blockParent:{ type:query, domain:{blockParent:'type_s:does_not_exist'} }" + - ",missing_blockChildren:{ type:query, domain:{blockChildren:'type_s:does_not_exist'} }" + - "}" - ) - , "facets=={ count:10" + - ", pages:{count:6 , x:{buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ]} }" + - ", pages2:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + - ", books:{count:4 , x:{buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ]} }" + - ", books2:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + - ", pageof3:{count:1 , x:{buckets:[ {val:d,count:1},{val:e,count:1},{val:f,count:1} ]} }" + - ", bookof22:{count:1 , x:{buckets:[ {val:B,count:1} ]} }" + - ", missing_blockParent:{count:0}" + - ", missing_blockChildren:{count:0}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ " + + "pages:{ type:query, domain:{blockChildren:'type_s:book'} , facet:{ x:{field:v_t} } }" + + ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'} }" + + ",books:{ type:query, domain:{blockParent:'type_s:book'} , facet:{ x:{field:v_t} } }" + + ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book'} }" + + ",pageof3:{ type:query, q:'id:3', facet : { x : { type:terms, field:page_s, domain:{blockChildren:'type_s:book'}}} }" + + ",bookof22:{ type:query, q:'id:2.2', facet : { x : { type:terms, field:book_s, domain:{blockParent:'type_s:book'}}} }" + + ",missing_blockParent:{ type:query, domain:{blockParent:'type_s:does_not_exist'} }" + + ",missing_blockChildren:{ type:query, domain:{blockChildren:'type_s:does_not_exist'} }" + + "}"), + "facets=={ count:10" + + ", pages:{count:6 , x:{buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ]} }" + + ", pages2:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + + ", books:{count:4 , x:{buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ]} }" + + ", books2:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + + ", pageof3:{count:1 , x:{buckets:[ {val:d,count:1},{val:e,count:1},{val:f,count:1} ]} }" + + ", bookof22:{count:1 , x:{buckets:[ {val:B,count:1} ]} }" + + ", missing_blockParent:{count:0}" + + ", missing_blockChildren:{count:0}" + + "}"); // no matches in base query - client.testJQ(params("q", "no_match_s:NO_MATCHES" - , "json.facet", "{ processEmpty:true," + - "pages:{ type:query, domain:{blockChildren:'type_s:book'} }" + - ",books:{ type:query, domain:{blockParent:'type_s:book'} }" + - "}" - ) - , "facets=={ count:0" + - ", pages:{count:0}" + - ", books:{count:0}" + - "}" - ); - + client.testJQ( + params( + "q", + "no_match_s:NO_MATCHES", + "json.facet", + "{ processEmpty:true," + + "pages:{ type:query, domain:{blockChildren:'type_s:book'} }" + + ",books:{ type:query, domain:{blockParent:'type_s:book'} }" + + "}"), + "facets=={ count:0" + ", pages:{count:0}" + ", books:{count:0}" + "}"); // test facet on children nested under terms facet on parents - client.testJQ(params("q", "*:*" - , "json.facet", "{" + - "books:{ type:terms, field:book_s, facet:{ pages:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'}} } }" + - "}" - ) - , "facets=={ count:10" + - ", books:{buckets:[{val:A,count:1,pages:{buckets:[]}}" + - " ,{val:B,count:1,pages:{buckets:[{val:y,count:3},{val:x,count:2},{val:z,count:2}]}}" + - " ,{val:C,count:1,pages:{buckets:[{val:x,count:1},{val:y,count:1},{val:z,count:1}]}}" + - " ,{val:D,count:1,pages:{buckets:[]}}"+ - "] }" + - "}" - ); + client.testJQ( + params( + "q", + "*:*", + "json.facet", + "{" + + "books:{ type:terms, field:book_s, facet:{ pages:{type:terms, field:v_t, domain:{blockChildren:'type_s:book'}} } }" + + "}"), + "facets=={ count:10" + + ", books:{buckets:[{val:A,count:1,pages:{buckets:[]}}" + + " ,{val:B,count:1,pages:{buckets:[{val:y,count:3},{val:x,count:2},{val:z,count:2}]}}" + + " ,{val:C,count:1,pages:{buckets:[{val:x,count:1},{val:y,count:1},{val:z,count:1}]}}" + + " ,{val:D,count:1,pages:{buckets:[]}}" + + "] }" + + "}"); // test filter after block join - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ " + - "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'*:*'} }" + - ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'-id:3.1'} }" + - ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'*:*'} }" + - ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'id:1'} }" + - "}" - ) - , "facets=={ count:10" + - ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + - ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + - ", books:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + - ", books2:{ buckets:[ {val:q,count:1} ] }" + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ " + + "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'*:*'} }" + + ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:'-id:3.1'} }" + + ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'*:*'} }" + + ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:'id:1'} }" + + "}"), + "facets=={ count:10" + + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + + ", books:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + + ", books2:{ buckets:[ {val:q,count:1} ] }" + + "}"); // test other various ways to get filters - client.testJQ(params(p, "q", "*:*", "f1","-id:3.1", "f2","id:1" - , "json.facet", "{ " + - "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:[]} }" + - ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:{param:f1} } }" + - ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:[{param:q},{param:missing_param}]} }" + - ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:[{param:f2}] } }" + - "}" - ) - , "facets=={ count:10" + - ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + - ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + - ", books:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + - ", books2:{ buckets:[ {val:q,count:1} ] }" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "f1", + "-id:3.1", + "f2", + "id:1", + "json.facet", + "{ " + + "pages1:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:[]} }" + + ",pages2:{type:terms, field:v_t, domain:{blockChildren:'type_s:book', filter:{param:f1} } }" + + ",books:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:[{param:q},{param:missing_param}]} }" + + ",books2:{type:terms, field:v_t, domain:{blockParent:'type_s:book', filter:[{param:f2}] } }" + + "}"), + "facets=={ count:10" + + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + + ", books:{ buckets:[ {val:q,count:3},{val:e,count:2},{val:w,count:2} ] }" + + ", books2:{ buckets:[ {val:q,count:1} ] }" + + "}"); } - /** - * An explicit test for unique*(_root_) across all methods - */ + /** An explicit test for unique*(_root_) across all methods */ public void testUniquesForMethod() throws Exception { final Client client = Client.localClient(); - final SolrParams p = params("rows","0"); + final SolrParams p = params("rows", "0"); client.deleteByQuery("*:*", null); SolrInputDocument parent; - parent = sdoc("id", "1", "type_s","book", "book_s","A", "v_t","q"); + parent = sdoc("id", "1", "type_s", "book", "book_s", "A", "v_t", "q"); client.add(parent, null); - parent = sdoc("id", "2", "type_s","book", "book_s","B", "v_t","q w"); - parent.addChildDocument( sdoc("id","2.1", "type_s","page", "page_s","a", "v_t","x y z") ); - parent.addChildDocument( sdoc("id","2.2", "type_s","page", "page_s","a", "v_t","x1 z") ); - parent.addChildDocument( sdoc("id","2.3", "type_s","page", "page_s","a", "v_t","x2 z") ); - parent.addChildDocument( sdoc("id","2.4", "type_s","page", "page_s","b", "v_t","x y ") ); - parent.addChildDocument( sdoc("id","2.5", "type_s","page", "page_s","c", "v_t"," y z" ) ); - parent.addChildDocument( sdoc("id","2.6", "type_s","page", "page_s","c", "v_t"," z" ) ); + parent = sdoc("id", "2", "type_s", "book", "book_s", "B", "v_t", "q w"); + parent.addChildDocument(sdoc("id", "2.1", "type_s", "page", "page_s", "a", "v_t", "x y z")); + parent.addChildDocument(sdoc("id", "2.2", "type_s", "page", "page_s", "a", "v_t", "x1 z")); + parent.addChildDocument(sdoc("id", "2.3", "type_s", "page", "page_s", "a", "v_t", "x2 z")); + parent.addChildDocument(sdoc("id", "2.4", "type_s", "page", "page_s", "b", "v_t", "x y ")); + parent.addChildDocument(sdoc("id", "2.5", "type_s", "page", "page_s", "c", "v_t", " y z")); + parent.addChildDocument(sdoc("id", "2.6", "type_s", "page", "page_s", "c", "v_t", " z")); client.add(parent, null); - parent = sdoc("id", "3", "type_s","book", "book_s","C", "v_t","q w e"); - parent.addChildDocument( sdoc("id","3.1", "type_s","page", "page_s","b", "v_t","x y ") ); - parent.addChildDocument( sdoc("id","3.2", "type_s","page", "page_s","d", "v_t","x ") ); - parent.addChildDocument( sdoc("id","3.3", "type_s","page", "page_s","e", "v_t"," y ") ); - parent.addChildDocument( sdoc("id","3.4", "type_s","page", "page_s","f", "v_t"," z") ); + parent = sdoc("id", "3", "type_s", "book", "book_s", "C", "v_t", "q w e"); + parent.addChildDocument(sdoc("id", "3.1", "type_s", "page", "page_s", "b", "v_t", "x y ")); + parent.addChildDocument(sdoc("id", "3.2", "type_s", "page", "page_s", "d", "v_t", "x ")); + parent.addChildDocument(sdoc("id", "3.3", "type_s", "page", "page_s", "e", "v_t", " y ")); + parent.addChildDocument(sdoc("id", "3.4", "type_s", "page", "page_s", "f", "v_t", " z")); client.add(parent, null); - parent = sdoc("id", "4", "type_s","book", "book_s","D", "v_t","e"); + parent = sdoc("id", "4", "type_s", "book", "book_s", "D", "v_t", "e"); client.add(parent, null); client.commit(); - client.testJQ(params(p, "q", "type_s:page" - , "json.facet", "{" + - " types: {" + - " type:terms," + - " field:type_s," + - " limit:-1," + - " facet: {" + - " in_books: \"unique(_root_)\"," + - " via_field:\"uniqueBlock(_root_)\","+ - " via_query:\"uniqueBlock({!v=type_s:book})\" }"+ - " }," + - " pages: {" + - " type:terms," + - " field:page_s," + - " limit:-1," + - " facet: {" + - " in_books: \"unique(_root_)\"," + - " via_field:\"uniqueBlock(_root_)\","+ - " via_query:\"uniqueBlock({!v=type_s:book})\" }"+ - " }" + - "}" ) - - , "response=={numFound:10,start:0,numFoundExact:true,docs:[]}" - , "facets=={ count:10," + - "types:{" + - " buckets:[ {val:page, count:10, in_books:2, via_field:2, via_query:2 } ]}" + - "pages:{" + - " buckets:[ " + - " {val:a, count:3, in_books:1, via_field:1, via_query:1}," + - " {val:b, count:2, in_books:2, via_field:2, via_query:2}," + - " {val:c, count:2, in_books:1, via_field:1, via_query:1}," + - " {val:d, count:1, in_books:1, via_field:1, via_query:1}," + - " {val:e, count:1, in_books:1, via_field:1, via_query:1}," + - " {val:f, count:1, in_books:1, via_field:1, via_query:1}" + - " ]}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "type_s:page", + "json.facet", + "{" + + " types: {" + + " type:terms," + + " field:type_s," + + " limit:-1," + + " facet: {" + + " in_books: \"unique(_root_)\"," + + " via_field:\"uniqueBlock(_root_)\"," + + " via_query:\"uniqueBlock({!v=type_s:book})\" }" + + " }," + + " pages: {" + + " type:terms," + + " field:page_s," + + " limit:-1," + + " facet: {" + + " in_books: \"unique(_root_)\"," + + " via_field:\"uniqueBlock(_root_)\"," + + " via_query:\"uniqueBlock({!v=type_s:book})\" }" + + " }" + + "}"), + "response=={numFound:10,start:0,numFoundExact:true,docs:[]}", + "facets=={ count:10," + + "types:{" + + " buckets:[ {val:page, count:10, in_books:2, via_field:2, via_query:2 } ]}" + + "pages:{" + + " buckets:[ " + + " {val:a, count:3, in_books:1, via_field:1, via_query:1}," + + " {val:b, count:2, in_books:2, via_field:2, via_query:2}," + + " {val:c, count:2, in_books:1, via_field:1, via_query:1}," + + " {val:d, count:1, in_books:1, via_field:1, via_query:1}," + + " {val:e, count:1, in_books:1, via_field:1, via_query:1}," + + " {val:f, count:1, in_books:1, via_field:1, via_query:1}" + + " ]}" + + "}"); } /** * Similar to {@link #testBlockJoin} but uses query time joining. - *

- * (asserts are slightly diff because if a query matches multiple types of documents, blockJoin domain switches - * to parent/child domains preserve any existing parent/children from the original domain - eg: when q=*:*) - *

+ * + *

(asserts are slightly diff because if a query matches multiple types of documents, blockJoin + * domain switches to parent/child domains preserve any existing parent/children from the original + * domain - eg: when q=*:*) */ public void testQueryJoinBooksAndPages() throws Exception { final Client client = Client.localClient(); - final SolrParams p = params("rows","0"); + final SolrParams p = params("rows", "0"); client.deleteByQuery("*:*", null); - // build up a list of the docs we want to test with List docsToAdd = new ArrayList<>(10); - docsToAdd.add(sdoc("id", "1", "type_s","book", "book_s","A", "v_t","q")); - - docsToAdd.add( sdoc("id", "2", "type_s","book", "book_s","B", "v_t","q w") ); - docsToAdd.add( sdoc("book_id_s", "2", "id", "2.1", "type_s","page", "page_s","a", "v_t","x y z") ); - docsToAdd.add( sdoc("book_id_s", "2", "id", "2.2", "type_s","page", "page_s","b", "v_t","x y ") ); - docsToAdd.add( sdoc("book_id_s", "2", "id","2.3", "type_s","page", "page_s","c", "v_t"," y z" ) ); - - docsToAdd.add( sdoc("id", "3", "type_s","book", "book_s","C", "v_t","q w e") ); - docsToAdd.add( sdoc("book_id_s", "3", "id","3.1", "type_s","page", "page_s","d", "v_t","x ") ); - docsToAdd.add( sdoc("book_id_s", "3", "id","3.2", "type_s","page", "page_s","e", "v_t"," y ") ); - docsToAdd.add( sdoc("book_id_s", "3", "id","3.3", "type_s","page", "page_s","f", "v_t"," z") ); - - docsToAdd.add( sdoc("id", "4", "type_s","book", "book_s","D", "v_t","e") ); - + docsToAdd.add(sdoc("id", "1", "type_s", "book", "book_s", "A", "v_t", "q")); + + docsToAdd.add(sdoc("id", "2", "type_s", "book", "book_s", "B", "v_t", "q w")); + docsToAdd.add( + sdoc("book_id_s", "2", "id", "2.1", "type_s", "page", "page_s", "a", "v_t", "x y z")); + docsToAdd.add( + sdoc("book_id_s", "2", "id", "2.2", "type_s", "page", "page_s", "b", "v_t", "x y ")); + docsToAdd.add( + sdoc("book_id_s", "2", "id", "2.3", "type_s", "page", "page_s", "c", "v_t", " y z")); + + docsToAdd.add(sdoc("id", "3", "type_s", "book", "book_s", "C", "v_t", "q w e")); + docsToAdd.add( + sdoc("book_id_s", "3", "id", "3.1", "type_s", "page", "page_s", "d", "v_t", "x ")); + docsToAdd.add( + sdoc("book_id_s", "3", "id", "3.2", "type_s", "page", "page_s", "e", "v_t", " y ")); + docsToAdd.add( + sdoc("book_id_s", "3", "id", "3.3", "type_s", "page", "page_s", "f", "v_t", " z")); + + docsToAdd.add(sdoc("id", "4", "type_s", "book", "book_s", "D", "v_t", "e")); + // shuffle the docs since order shouldn't matter Collections.shuffle(docsToAdd, random()); for (SolrInputDocument doc : docsToAdd) { @@ -3359,94 +4773,143 @@ public void testQueryJoinBooksAndPages() throws Exception { final String toBogusChildren = "join: { from:'id', to:'does_not_exist_s' }"; final String toBogusParents = "join: { from:'book_id_s', to:'does_not_exist_s' }"; - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ " + - "pages:{ type:query, domain:{"+toChildren+"} , facet:{ x:{field:v_t} } }" + - ",pages2:{type:terms, field:v_t, domain:{"+toChildren+"} }" + - ",books:{ type:query, domain:{"+toParents+"} , facet:{ x:{field:v_t} } }" + - ",books2:{type:terms, field:v_t, domain:{"+toParents+"} }" + - ",pageof3:{ type:query, q:'id:3', facet : { x : { type:terms, field:page_s, domain:{"+toChildren+"}}} }" + - ",bookof22:{ type:query, q:'id:2.2', facet : { x : { type:terms, field:book_s, domain:{"+toParents+"}}} }" + - ",missing_Parents:{ type:query, domain:{"+toBogusParents+"} }" + - ",missing_Children:{ type:query, domain:{"+toBogusChildren+"} }" + - "}" - ) - , "facets=={ count:10" + - ", pages:{count:6 , x:{buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ]} }" + - ", pages2:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + - ", books:{count:2 , x:{buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ]} }" + - ", books2:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + - ", pageof3:{count:1 , x:{buckets:[ {val:d,count:1},{val:e,count:1},{val:f,count:1} ]} }" + - ", bookof22:{count:1 , x:{buckets:[ {val:B,count:1} ]} }" + - ", missing_Parents:{count:0}" + - ", missing_Children:{count:0}" + - "}" - ); + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ " + + "pages:{ type:query, domain:{" + + toChildren + + "} , facet:{ x:{field:v_t} } }" + + ",pages2:{type:terms, field:v_t, domain:{" + + toChildren + + "} }" + + ",books:{ type:query, domain:{" + + toParents + + "} , facet:{ x:{field:v_t} } }" + + ",books2:{type:terms, field:v_t, domain:{" + + toParents + + "} }" + + ",pageof3:{ type:query, q:'id:3', facet : { x : { type:terms, field:page_s, domain:{" + + toChildren + + "}}} }" + + ",bookof22:{ type:query, q:'id:2.2', facet : { x : { type:terms, field:book_s, domain:{" + + toParents + + "}}} }" + + ",missing_Parents:{ type:query, domain:{" + + toBogusParents + + "} }" + + ",missing_Children:{ type:query, domain:{" + + toBogusChildren + + "} }" + + "}"), + "facets=={ count:10" + + ", pages:{count:6 , x:{buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ]} }" + + ", pages2:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + + ", books:{count:2 , x:{buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ]} }" + + ", books2:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + + ", pageof3:{count:1 , x:{buckets:[ {val:d,count:1},{val:e,count:1},{val:f,count:1} ]} }" + + ", bookof22:{count:1 , x:{buckets:[ {val:B,count:1} ]} }" + + ", missing_Parents:{count:0}" + + ", missing_Children:{count:0}" + + "}"); // no matches in base query - client.testJQ(params("q", "no_match_s:NO_MATCHES" - , "json.facet", "{ processEmpty:true," + - "pages:{ type:query, domain:{"+toChildren+"} }" + - ",books:{ type:query, domain:{"+toParents+"} }" + - "}" - ) - , "facets=={ count:0" + - ", pages:{count:0}" + - ", books:{count:0}" + - "}" - ); - + client.testJQ( + params( + "q", + "no_match_s:NO_MATCHES", + "json.facet", + "{ processEmpty:true," + + "pages:{ type:query, domain:{" + + toChildren + + "} }" + + ",books:{ type:query, domain:{" + + toParents + + "} }" + + "}"), + "facets=={ count:0" + ", pages:{count:0}" + ", books:{count:0}" + "}"); // test facet on children nested under terms facet on parents - client.testJQ(params("q", "*:*" - , "json.facet", "{" + - "books:{ type:terms, field:book_s, facet:{ pages:{type:terms, field:v_t, domain:{"+toChildren+"}} } }" + - "}" - ) - , "facets=={ count:10" + - ", books:{buckets:[{val:A,count:1,pages:{buckets:[]}}" + - " ,{val:B,count:1,pages:{buckets:[{val:y,count:3},{val:x,count:2},{val:z,count:2}]}}" + - " ,{val:C,count:1,pages:{buckets:[{val:x,count:1},{val:y,count:1},{val:z,count:1}]}}" + - " ,{val:D,count:1,pages:{buckets:[]}}"+ - "] }" + - "}" - ); + client.testJQ( + params( + "q", + "*:*", + "json.facet", + "{" + + "books:{ type:terms, field:book_s, facet:{ pages:{type:terms, field:v_t, domain:{" + + toChildren + + "}} } }" + + "}"), + "facets=={ count:10" + + ", books:{buckets:[{val:A,count:1,pages:{buckets:[]}}" + + " ,{val:B,count:1,pages:{buckets:[{val:y,count:3},{val:x,count:2},{val:z,count:2}]}}" + + " ,{val:C,count:1,pages:{buckets:[{val:x,count:1},{val:y,count:1},{val:z,count:1}]}}" + + " ,{val:D,count:1,pages:{buckets:[]}}" + + "] }" + + "}"); // test filter after join - client.testJQ(params(p, "q", "*:*" - , "json.facet", "{ " + - "pages1:{type:terms, field:v_t, domain:{"+toChildren+", filter:'*:*'} }" + - ",pages2:{type:terms, field:v_t, domain:{"+toChildren+", filter:'-id:3.1'} }" + - ",books:{type:terms, field:v_t, domain:{"+toParents+", filter:'*:*'} }" + - ",books2:{type:terms, field:v_t, domain:{"+toParents+", filter:'id:2'} }" + - "}" - ) - , "facets=={ count:10" + - ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + - ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + - ", books:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + - ", books2:{ buckets:[ {val:q,count:1}, {val:w,count:1} ] }" + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "json.facet", + "{ " + + "pages1:{type:terms, field:v_t, domain:{" + + toChildren + + ", filter:'*:*'} }" + + ",pages2:{type:terms, field:v_t, domain:{" + + toChildren + + ", filter:'-id:3.1'} }" + + ",books:{type:terms, field:v_t, domain:{" + + toParents + + ", filter:'*:*'} }" + + ",books2:{type:terms, field:v_t, domain:{" + + toParents + + ", filter:'id:2'} }" + + "}"), + "facets=={ count:10" + + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + + ", books:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + + ", books2:{ buckets:[ {val:q,count:1}, {val:w,count:1} ] }" + + "}"); // test other various ways to get filters - client.testJQ(params(p, "q", "*:*", "f1","-id:3.1", "f2","id:2" - , "json.facet", "{ " + - "pages1:{type:terms, field:v_t, domain:{"+toChildren+", filter:[]} }" + - ",pages2:{type:terms, field:v_t, domain:{"+toChildren+", filter:{param:f1} } }" + - ",books:{type:terms, field:v_t, domain:{"+toParents+", filter:[{param:q},{param:missing_param}]} }" + - ",books2:{type:terms, field:v_t, domain:{"+toParents+", filter:[{param:f2}] } }" + - "}" - ) - , "facets=={ count:10" + - ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + - ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + - ", books:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + - ", books2:{ buckets:[ {val:q,count:1}, {val:w,count:1} ] }" + - "}" - ); - + client.testJQ( + params( + p, + "q", + "*:*", + "f1", + "-id:3.1", + "f2", + "id:2", + "json.facet", + "{ " + + "pages1:{type:terms, field:v_t, domain:{" + + toChildren + + ", filter:[]} }" + + ",pages2:{type:terms, field:v_t, domain:{" + + toChildren + + ", filter:{param:f1} } }" + + ",books:{type:terms, field:v_t, domain:{" + + toParents + + ", filter:[{param:q},{param:missing_param}]} }" + + ",books2:{type:terms, field:v_t, domain:{" + + toParents + + ", filter:[{param:f2}] } }" + + "}"), + "facets=={ count:10" + + ", pages1:{ buckets:[ {val:y,count:4},{val:x,count:3},{val:z,count:3} ] }" + + ", pages2:{ buckets:[ {val:y,count:4},{val:z,count:3},{val:x,count:2} ] }" + + ", books:{ buckets:[ {val:q,count:2},{val:w,count:2},{val:e,count:1} ] }" + + ", books2:{ buckets:[ {val:q,count:1}, {val:w,count:1} ] }" + + "}"); } @Test @@ -3458,53 +4921,82 @@ public void testFacetValueTypes() throws Exception { public void testFacetValueTypesDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); doFacetValueTypeValidation(client); } private void doFacetValueTypeValidation(Client client) throws Exception { indexSimple(client); - client.testXQ(params("q", "*:*", "rows", "0", - "json.facet", "{cat_s:{type:terms,field:cat_s,mincount:0,missing:true,allBuckets:true,numBuckets:true,limit:1}}"), + client.testXQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{cat_s:{type:terms,field:cat_s,mincount:0,missing:true,allBuckets:true,numBuckets:true,limit:1}}"), "/response/lst[@name='facets']/long[@name='count'][.=6]", // count - "/response/lst[@name='facets']/lst[@name='cat_s']/long[@name='numBuckets'][.=2]", // total no of buckets + "/response/lst[@name='facets']/lst[@name='cat_s']/long[@name='numBuckets'][.=2]", // total + // no of + // buckets "*[count(/response/lst[@name='facets']/lst[@name='cat_s']/arr[@name='buckets']/lst)=1]", // no of entries "/response/lst[@name='facets']/lst[@name='cat_s']/lst[@name='allBuckets']/long[@name='count'][.=5]", // allBuckets "/response/lst[@name='facets']/lst[@name='cat_s']/lst[@name='missing']/long[@name='count'][.=1]", // missing "/response/lst[@name='facets']/lst[@name='cat_s']/arr[@name='buckets']/lst[1]/str[@name='val'][.='B']", // facet value "/response/lst[@name='facets']/lst[@name='cat_s']/arr[@name='buckets']/lst[1]/long[@name='count'][.='3']" // facet count - ); + ); // aggregations types for string - client.testXQ(params("q", "*:*", "rows", "0", - "json.facet", "{unique:'unique(cat_s)',hll:'hll(cat_s)',vals:'countvals(cat_s)',missing:'missing(cat_s)'}"), + client.testXQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{unique:'unique(cat_s)',hll:'hll(cat_s)',vals:'countvals(cat_s)',missing:'missing(cat_s)'}"), "/response/lst[@name='facets']/long[@name='count'][.=6]", // count "/response/lst[@name='facets']/long[@name='unique'][.=2]", // unique "/response/lst[@name='facets']/long[@name='hll'][.=2]", // hll "/response/lst[@name='facets']/long[@name='vals'][.=5]", // values "/response/lst[@name='facets']/long[@name='missing'][.=1]" // missing - ); + ); // aggregations types for number - client.testXQ(params("q", "*:*", "rows", "0", - "json.facet", "{unique:'unique(num_i)',hll:'hll(num_i)',vals:'countvals(num_i)',missing:'missing(num_i)'}"), + client.testXQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{unique:'unique(num_i)',hll:'hll(num_i)',vals:'countvals(num_i)',missing:'missing(num_i)'}"), "/response/lst[@name='facets']/long[@name='count'][.=6]", // count "/response/lst[@name='facets']/long[@name='unique'][.=4]", // unique "/response/lst[@name='facets']/long[@name='hll'][.=4]", // hll "/response/lst[@name='facets']/long[@name='vals'][.=5]", // values "/response/lst[@name='facets']/long[@name='missing'][.=1]" // missing - ); + ); // aggregations types for multi-valued number - client.testXQ(params("q", "*:*", "rows", "0", - "json.facet", "{unique:'unique(num_is)',hll:'hll(num_is)',vals:'countvals(num_is)',missing:'missing(num_is)'}"), + client.testXQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{unique:'unique(num_is)',hll:'hll(num_is)',vals:'countvals(num_is)',missing:'missing(num_is)'}"), "/response/lst[@name='facets']/long[@name='count'][.=6]", // count "/response/lst[@name='facets']/long[@name='unique'][.=7]", // unique "/response/lst[@name='facets']/long[@name='hll'][.=7]", // hll "/response/lst[@name='facets']/long[@name='vals'][.=9]", // values "/response/lst[@name='facets']/long[@name='missing'][.=1]" // missing - ); + ); } public void XtestPercentiles() { @@ -3532,7 +5024,7 @@ public void XtestPercentiles() { private static String str(AVLTreeDigest digest) { StringBuilder sb = new StringBuilder(); - for (double d : new double[] {0,.1,.5,.9,1}) { + for (double d : new double[] {0, .1, .5, .9, 1}) { sb.append(" ").append(digest.quantile(d)); } return sb.toString(); @@ -3595,16 +5087,14 @@ public void XtestHLL() { hll.addRaw(987654321); } - - /** atomicly resets the acctual AtomicLong value matches the expected and resets it to 0 */ + /** atomicly resets the acctual AtomicLong value matches the expected and resets it to 0 */ private static final void assertEqualsAndReset(String msg, long expected, AtomicLong actual) { final long current = actual.getAndSet(0); assertEquals(msg, expected, current); } - /** atomicly resets the acctual AtomicLong value matches the expected and resets it to 0 */ + /** atomicly resets the acctual AtomicLong value matches the expected and resets it to 0 */ private static final void assertEqualsAndReset(long expected, AtomicLong actual) { final long current = actual.getAndSet(0); assertEquals(expected, current); } - } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java index ed75cf59853..0ef08dd749b 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsStatsParsing.java @@ -16,9 +16,11 @@ */ package org.apache.solr.search.facet; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.core.IsInstanceOf.instanceOf; + import java.io.IOException; import java.util.Map; - import org.apache.lucene.queries.function.valuesource.IntFieldSource; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.Utils; @@ -28,75 +30,87 @@ import org.junit.BeforeClass; import org.noggit.ObjectBuilder; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.core.IsInstanceOf.instanceOf; - /** Whitebox test of the various syntaxes for specifying stats in JSON Facets */ public class TestJsonFacetsStatsParsing extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-tlog.xml","schema15.xml"); + initCore("solrconfig-tlog.xml", "schema15.xml"); } public void testSortEquality() throws Exception { - assertEquals(new FacetRequest.FacetSort("count", FacetRequest.SortDirection.desc), - FacetRequest.FacetSort.COUNT_DESC); - assertEquals(new FacetRequest.FacetSort("index", FacetRequest.SortDirection.asc), - FacetRequest.FacetSort.INDEX_ASC); - assertEquals(new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.asc), - new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.asc)); + assertEquals( + new FacetRequest.FacetSort("count", FacetRequest.SortDirection.desc), + FacetRequest.FacetSort.COUNT_DESC); + assertEquals( + new FacetRequest.FacetSort("index", FacetRequest.SortDirection.asc), + FacetRequest.FacetSort.INDEX_ASC); + assertEquals( + new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.asc), + new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.asc)); // negative assertions... - assertThat(new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.desc), - not(new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.asc))); - assertThat(new FacetRequest.FacetSort("bar", FacetRequest.SortDirection.desc), - not(new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.desc))); + assertThat( + new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.desc), + not(new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.asc))); + assertThat( + new FacetRequest.FacetSort("bar", FacetRequest.SortDirection.desc), + not(new FacetRequest.FacetSort("foo", FacetRequest.SortDirection.desc))); } - + public void testEquality() throws IOException { - try (SolrQueryRequest req = req("custom_req_param","foo_i", - "overridden_param","xxxxx_i")) { - + try (SolrQueryRequest req = + req( + "custom_req_param", "foo_i", + "overridden_param", "xxxxx_i")) { + @SuppressWarnings({"unchecked"}) - final FacetRequest fr = FacetRequest.parse - (req, (Map) Utils.fromJSONString - ("{ " + - // with valuesource - " f1:'min(field(\"foo_i\"))', " + - " f2:'min(field($custom_req_param))', " + - // with fieldName and query de-reference - " s1:'min(foo_i)', " + - " s2:'min($custom_req_param)', " + - " s3:{ func:'min($custom_req_param)' }, " + - " s4:{ type:func, func:'min($custom_req_param)' }, " + - " s5:{ type:func, func:'min($custom_local_param)', custom_local_param:foo_i }, " + - " s6:{ type:func, func:'min($overridden_param)', overridden_param:foo_i }, " + - // test the test... - " diff:'min(field(\"bar_i\"))'," + - "}")); - + final FacetRequest fr = + FacetRequest.parse( + req, + (Map) + Utils.fromJSONString( + "{ " + + + // with valuesource + " f1:'min(field(\"foo_i\"))', " + + " f2:'min(field($custom_req_param))', " + + + // with fieldName and query de-reference + " s1:'min(foo_i)', " + + " s2:'min($custom_req_param)', " + + " s3:{ func:'min($custom_req_param)' }, " + + " s4:{ type:func, func:'min($custom_req_param)' }, " + + " s5:{ type:func, func:'min($custom_local_param)', custom_local_param:foo_i }, " + + " s6:{ type:func, func:'min($overridden_param)', overridden_param:foo_i }, " + + + // test the test... + " diff:'min(field(\"bar_i\"))'," + + "}")); + final Map stats = fr.getFacetStats(); assertEquals(9, stats.size()); - - for (Map.Entry entry : stats.entrySet()) { + + for (Map.Entry entry : stats.entrySet()) { final String key = entry.getKey(); final AggValueSource agg = entry.getValue(); - + assertEquals("name of " + key, "min", agg.name()); MatcherAssert.assertThat("type of " + key, agg, instanceOf(SimpleAggValueSource.class)); SimpleAggValueSource sagg = (SimpleAggValueSource) agg; if (key.startsWith("f")) { // value source as arg to min MatcherAssert.assertThat("vs of " + key, sagg.getArg(), instanceOf(IntFieldSource.class)); - assertEquals("field of " + key, "foo_i", ((IntFieldSource)sagg.getArg()).getField()); + assertEquals("field of " + key, "foo_i", ((IntFieldSource) sagg.getArg()).getField()); assertEquals(key + ".equals(f1)", agg, stats.get("f1")); } else if (key.startsWith("s")) { // field as arg to min - MatcherAssert.assertThat("vs of " + key, sagg.getArg(), instanceOf(FieldNameValueSource.class)); - assertEquals("field of " + key, "foo_i", ((FieldNameValueSource)sagg.getArg()).getFieldName()); + MatcherAssert.assertThat( + "vs of " + key, sagg.getArg(), instanceOf(FieldNameValueSource.class)); + assertEquals( + "field of " + key, "foo_i", ((FieldNameValueSource) sagg.getArg()).getFieldName()); assertEquals(key + ".equals(s1)", agg, stats.get("s1")); - assertEquals("s1.equals("+key+")", stats.get("s1"), agg); + assertEquals("s1.equals(" + key + ")", stats.get("s1"), agg); } else if ("diff".equals(key)) { - assertEquals("field of " + key, "bar_i", ((IntFieldSource)sagg.getArg()).getField()); + assertEquals("field of " + key, "bar_i", ((IntFieldSource) sagg.getArg()).getField()); assertNotEquals("diff.equals(s1) ?!?!", agg, stats.get("f1")); } } @@ -108,20 +122,23 @@ public void testVerboseSyntaxWithLocalParams() throws IOException { // some parsers may choose to use "global" req params as defaults/shadows for // local params, but DebugAgg does not -- so use these to test that the // JSON Parsing doesn't pollute the local params the ValueSourceParser gets... - try (SolrQueryRequest req = req("foo", "zzzz", "yaz", "zzzzz")) { - final FacetRequest fr = FacetRequest.parse - (req, (Map) ObjectBuilder.fromJSON - ("{ x:{type:func, func:'debug()', foo:['abc','xyz'], bar:4.2 } }")); + try (SolrQueryRequest req = req("foo", "zzzz", "yaz", "zzzzz")) { + final FacetRequest fr = + FacetRequest.parse( + req, + (Map) + ObjectBuilder.fromJSON( + "{ x:{type:func, func:'debug()', foo:['abc','xyz'], bar:4.2 } }")); final Map stats = fr.getFacetStats(); assertEquals(1, stats.size()); AggValueSource agg = stats.get("x"); assertNotNull(agg); MatcherAssert.assertThat(agg, instanceOf(DebugAgg.class)); - - DebugAgg x = (DebugAgg)agg; + + DebugAgg x = (DebugAgg) agg; assertEquals(new String[] {"abc", "xyz"}, x.localParams.getParams("foo")); - assertEquals((Float)4.2F, x.localParams.getFloat("bar")); + assertEquals((Float) 4.2F, x.localParams.getFloat("bar")); assertNull(x.localParams.get("yaz")); } } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java index 3f297d1100f..802b1fa6b1b 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetsWithNestedObjects.java @@ -17,7 +17,6 @@ package org.apache.solr.search.facet; import java.io.IOException; - import org.apache.solr.SolrTestCaseHS; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; @@ -25,11 +24,11 @@ import org.junit.BeforeClass; import org.junit.Test; -public class TestJsonFacetsWithNestedObjects extends SolrTestCaseHS{ +public class TestJsonFacetsWithNestedObjects extends SolrTestCaseHS { @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig-tlog.xml","schema_latest.xml"); + initCore("solrconfig-tlog.xml", "schema_latest.xml"); indexBooksAndReviews(); } @@ -38,17 +37,19 @@ private static void indexBooksAndReviews() throws Exception { indexDocs(client); } - private static void indexDocs(final Client client) throws IOException, SolrServerException, Exception { + private static void indexDocs(final Client client) + throws IOException, SolrServerException, Exception { client.deleteByQuery("*:*", null); - SolrInputDocument book1 = sdoc( - "id", "book1", - "type_s", "book", - "title_t", "The Way of Kings", - "author_s", "Brandon Sanderson", - "cat_s", "fantasy", - "pubyear_i", "2010", - "publisher_s","Tor"); + SolrInputDocument book1 = + sdoc( + "id", "book1", + "type_s", "book", + "title_t", "The Way of Kings", + "author_s", "Brandon Sanderson", + "cat_s", "fantasy", + "pubyear_i", "2010", + "publisher_s", "Tor"); book1.addChildDocument( sdoc( @@ -71,14 +72,15 @@ private static void indexDocs(final Client client) throws IOException, SolrServe if (rarely()) { client.commit(); } - SolrInputDocument book2 = sdoc( - "id", "book2", - "type_s", "book", - "title_t", "Snow Crash", - "author_s", "Neal Stephenson", - "cat_s", "sci-fi", - "pubyear_i", "1992", - "publisher_s","Bantam"); + SolrInputDocument book2 = + sdoc( + "id", "book2", + "type_s", "book", + "title_t", "Snow Crash", + "author_s", "Neal Stephenson", + "cat_s", "sci-fi", + "pubyear_i", "1992", + "publisher_s", "Bantam"); book2.addChildDocument( sdoc( @@ -112,323 +114,380 @@ private static void indexDocs(final Client client) throws IOException, SolrServe } /** - * Example from http://yonik.com/solr-nested-objects/ - * The main query gives us a document list of reviews by author_s:yonik - * If we want to facet on the book genre (cat_s field) then we need to + * Example from http://yonik.com/solr-nested-objects/ The main query gives us a document list of + * reviews by author_s:yonik If we want to facet on the book genre (cat_s field) then we need to * switch the domain from the children (type_s:reviews) to the parents (type_s:books). * - * And we get a facet over the books which yonik reviewed + *

And we get a facet over the books which yonik reviewed * - * Note that regardless of which direction we are mapping - * (parents to children or children to parents), - * we provide a query that defines the complete set of parents in the index. - * In these examples, the parent filter is “type_s:book”. + *

Note that regardless of which direction we are mapping (parents to children or children to + * parents), we provide a query that defines the complete set of parents in the index. In these + * examples, the parent filter is “type_s:book”. */ @Test public void testFacetingOnParents() throws Exception { final Client client = Client.localClient(); - ModifiableSolrParams p = params("rows","10"); - client.testJQ(params(p, "q", "author_s:yonik", "fl", "id", "fl" , "comment_t" - , "json.facet", "{" + - " genres: {" + - " type:terms," + - " field:cat_s," + - " domain: { blockParent : \"type_s:book\" }" + - " }" + - "}" - ) - , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + - " {id:book1_c1," + - " comment_t:\"A great start to what looks like an epic series!\"}," + - " {id:book2_c1," + - " comment_t:\"Ahead of its time... I wonder if it helped inspire The Matrix?\"}]}" - , "facets=={ count:2," + - "genres:{buckets:[ {val:fantasy, count:1}," + - " {val:sci-fi, count:1}]}}" - ); + ModifiableSolrParams p = params("rows", "10"); + client.testJQ( + params( + p, + "q", + "author_s:yonik", + "fl", + "id", + "fl", + "comment_t", + "json.facet", + "{" + + " genres: {" + + " type:terms," + + " field:cat_s," + + " domain: { blockParent : \"type_s:book\" }" + + " }" + + "}"), + "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + + " {id:book1_c1," + + " comment_t:\"A great start to what looks like an epic series!\"}," + + " {id:book2_c1," + + " comment_t:\"Ahead of its time... I wonder if it helped inspire The Matrix?\"}]}", + "facets=={ count:2," + + "genres:{buckets:[ {val:fantasy, count:1}," + + " {val:sci-fi, count:1}]}}"); } /** - * Example from http://yonik.com/solr-nested-objects/ - * Now lets say we’re displaying the top sci-fi and fantasy books, - * and we want to find out who reviews the most books out of our selection. - * Since our root implicit facet bucket (formed by the query and filters) - * consists of parent documents (books), - * we need to switch the facet domain to the children for the author facet. + * Example from http://yonik.com/solr-nested-objects/ Now lets say we’re displaying the top sci-fi + * and fantasy books, and we want to find out who reviews the most books out of our selection. + * Since our root implicit facet bucket (formed by the query and filters) consists of parent + * documents (books), we need to switch the facet domain to the children for the author facet. * - * Note that regardless of which direction we are mapping - * (parents to children or children to parents), - * we provide a query that defines the complete set of parents in the index. - * In these examples, the parent filter is “type_s:book”. + *

Note that regardless of which direction we are mapping (parents to children or children to + * parents), we provide a query that defines the complete set of parents in the index. In these + * examples, the parent filter is “type_s:book”. */ @Test public void testFacetingOnChildren() throws Exception { final Client client = Client.localClient(); - ModifiableSolrParams p = params("rows","10"); - client.testJQ(params(p, "q", "cat_s:(sci-fi OR fantasy)", "fl", "id", "fl" , "title_t" - , "json.facet", "{" + - " top_reviewers: {" + - " type:terms," + - " field:author_s," + - " domain: { blockChildren : \"type_s:book\" }" + - " }" + - "}" - ) - , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + - " {id:book1," + - " title_t:\"The Way of Kings\"}," + - " {id:book2," + - " title_t:\"Snow Crash\"}]}" - , "facets=={ count:2," + - "top_reviewers:{buckets:[ {val:dan, count:2}," + - " {val:yonik, count:2}," + - " {val:mary, count:1} ]}}" - ); + ModifiableSolrParams p = params("rows", "10"); + client.testJQ( + params( + p, + "q", + "cat_s:(sci-fi OR fantasy)", + "fl", + "id", + "fl", + "title_t", + "json.facet", + "{" + + " top_reviewers: {" + + " type:terms," + + " field:author_s," + + " domain: { blockChildren : \"type_s:book\" }" + + " }" + + "}"), + "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + + " {id:book1," + + " title_t:\"The Way of Kings\"}," + + " {id:book2," + + " title_t:\"Snow Crash\"}]}", + "facets=={ count:2," + + "top_reviewers:{buckets:[ {val:dan, count:2}," + + " {val:yonik, count:2}," + + " {val:mary, count:1} ]}}"); } - - /** - * Explicit filter exclusions for rolled up child facets - */ + /** Explicit filter exclusions for rolled up child facets */ @Test public void testExplicitFilterExclusions() throws Exception { final Client client = Client.localClient(); - ModifiableSolrParams p = params("rows","10"); - client.testJQ(params(p, "q", "{!parent which=type_s:book}comment_t:* %2Bauthor_s:yonik %2Bstars_i:(5 3)" - , "fl", "id", "fl" , "title_t" - , "json.facet", "{" + - " comments_for_author: {" + - " type:query," + - //note: author filter is excluded - " q:\"comment_t:* +stars_i:(5 3)\"," + - " domain: { blockChildren : \"type_s:book\" }," + - " facet:{" + - " authors:{" + - " type:terms," + - " field:author_s," + - " facet: {" + - " in_books: \"unique(_root_)\" }}}}," + - " comments_for_stars: {" + - " type:query," + - //note: stars_i filter is excluded - " q:\"comment_t:* +author_s:yonik\"," + - " domain: { blockChildren : \"type_s:book\" }," + - " facet:{" + - " stars:{" + - " type:terms," + - " field:stars_i," + - " facet: {" + - " in_books: \"unique(_root_)\" }}}}}" ) - - , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + - " {id:book1," + - " title_t:\"The Way of Kings\"}," + - " {id:book2," + - " title_t:\"Snow Crash\"}]}" - , "facets=={ count:2," + - "comments_for_author:{" + - " count:3," + - " authors:{" + - " buckets:[ {val:yonik, count:2, in_books:2}," + - " {val:dan, count:1, in_books:1} ]}}," + - "comments_for_stars:{" + - " count:2," + - " stars:{" + - " buckets:[ {val:5, count:2, in_books:2} ]}}}" - ); + ModifiableSolrParams p = params("rows", "10"); + client.testJQ( + params( + p, + "q", + "{!parent which=type_s:book}comment_t:* %2Bauthor_s:yonik %2Bstars_i:(5 3)", + "fl", + "id", + "fl", + "title_t", + "json.facet", + "{" + + " comments_for_author: {" + + " type:query," + + + // note: author filter is excluded + " q:\"comment_t:* +stars_i:(5 3)\"," + + " domain: { blockChildren : \"type_s:book\" }," + + " facet:{" + + " authors:{" + + " type:terms," + + " field:author_s," + + " facet: {" + + " in_books: \"unique(_root_)\" }}}}," + + " comments_for_stars: {" + + " type:query," + + + // note: stars_i filter is excluded + " q:\"comment_t:* +author_s:yonik\"," + + " domain: { blockChildren : \"type_s:book\" }," + + " facet:{" + + " stars:{" + + " type:terms," + + " field:stars_i," + + " facet: {" + + " in_books: \"unique(_root_)\" }}}}}"), + "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + + " {id:book1," + + " title_t:\"The Way of Kings\"}," + + " {id:book2," + + " title_t:\"Snow Crash\"}]}", + "facets=={ count:2," + + "comments_for_author:{" + + " count:3," + + " authors:{" + + " buckets:[ {val:yonik, count:2, in_books:2}," + + " {val:dan, count:1, in_books:1} ]}}," + + "comments_for_stars:{" + + " count:2," + + " stars:{" + + " buckets:[ {val:5, count:2, in_books:2} ]}}}"); } - /** - * Child level facet exclusions - */ + /** Child level facet exclusions */ @Test public void testChildLevelFilterExclusions() throws Exception { final Client client = Client.localClient(); - ModifiableSolrParams p = params("rows","10"); - client.testJQ(params(p, "q", "{!parent filters=$child.fq which=type_s:book v=$childquery}" - , "childquery", "comment_t:*" - , "child.fq", "{!tag=author}author_s:yonik" - , "child.fq", "{!tag=stars}stars_i:(5 3)" - , "fl", "id", "fl" , "title_t" - , "json.facet", "{" + - " comments_for_author: {" + - " type:query," + - //note: author filter is excluded - " q:\"{!filters param=$child.fq excludeTags=author v=$childquery}\"," + - " domain: { blockChildren : \"type_s:book\", excludeTags:author }," + - " facet:{" + - " authors:{" + - " type:terms," + - " field:author_s," + - " facet: {" + - " in_books: \"unique(_root_)\" }}}}," + - " comments_for_stars: {" + - " type:query," + - //note: stars_i filter is excluded - " q:\"{!filters param=$child.fq excludeTags=stars v=$childquery}\"," + - " domain: { blockChildren : \"type_s:book\" }," + - " facet:{" + - " stars:{" + - " type:terms," + - " field:stars_i," + - " facet: {" + - " in_books: \"unique(_root_)\" }}}}}" ) - - , "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + - " {id:book1," + - " title_t:\"The Way of Kings\"}," + - " {id:book2," + - " title_t:\"Snow Crash\"}]}" - , "facets=={ count:2," + - "comments_for_author:{" + - " count:3," + - " authors:{" + - " buckets:[ {val:yonik, count:2, in_books:2}," + - " {val:dan, count:1, in_books:1} ]}}," + - "comments_for_stars:{" + - " count:2," + - " stars:{" + - " buckets:[ {val:5, count:2, in_books:2} ]}}}" - ); + ModifiableSolrParams p = params("rows", "10"); + client.testJQ( + params( + p, + "q", + "{!parent filters=$child.fq which=type_s:book v=$childquery}", + "childquery", + "comment_t:*", + "child.fq", + "{!tag=author}author_s:yonik", + "child.fq", + "{!tag=stars}stars_i:(5 3)", + "fl", + "id", + "fl", + "title_t", + "json.facet", + "{" + + " comments_for_author: {" + + " type:query," + + + // note: author filter is excluded + " q:\"{!filters param=$child.fq excludeTags=author v=$childquery}\"," + + " domain: { blockChildren : \"type_s:book\", excludeTags:author }," + + " facet:{" + + " authors:{" + + " type:terms," + + " field:author_s," + + " facet: {" + + " in_books: \"unique(_root_)\" }}}}," + + " comments_for_stars: {" + + " type:query," + + + // note: stars_i filter is excluded + " q:\"{!filters param=$child.fq excludeTags=stars v=$childquery}\"," + + " domain: { blockChildren : \"type_s:book\" }," + + " facet:{" + + " stars:{" + + " type:terms," + + " field:stars_i," + + " facet: {" + + " in_books: \"unique(_root_)\" }}}}}"), + "response=={numFound:2,start:0,'numFoundExact':true,docs:[" + + " {id:book1," + + " title_t:\"The Way of Kings\"}," + + " {id:book2," + + " title_t:\"Snow Crash\"}]}", + "facets=={ count:2," + + "comments_for_author:{" + + " count:3," + + " authors:{" + + " buckets:[ {val:yonik, count:2, in_books:2}," + + " {val:dan, count:1, in_books:1} ]}}," + + "comments_for_stars:{" + + " count:2," + + " stars:{" + + " buckets:[ {val:5, count:2, in_books:2} ]}}}"); } public void testDomainFilterExclusionsInFilters() throws Exception { final Client client = Client.localClient(); - ModifiableSolrParams p = params("rows","10"); - client.testJQ(params(p, "q", "{!parent tag=top filters=$child.fq which=type_s:book v=$childquery}" - , "childquery", "comment_t:*" - , "child.fq", "{!tag=author}author_s:dan" - , "child.fq", "{!tag=stars}stars_i:4" - , "fq", "{!tag=top}title_t:Snow\\ Crash" - , "fl", "id", "fl" , "title_t" - , "json.facet", "{" + - " comments_for_author: {" + - " domain: { excludeTags:\"top\"," + // 1. throwing current parent docset, - " filter:[\"{!filters param=$child.fq " + // compute children docset from scratch - " excludeTags=author v=$childquery}\"]"// 3. filter children with exclusions - + " }," + - " type:terms," + - " field:author_s," + - " facet: {" + - " in_books: \"unique(_root_)\" }"+//}}," + - " }" + - //note: stars_i filter is excluded - " ,comments_for_stars: {" + - " domain: { excludeTags:top, " + - " filter:\"{!filters param=$child.fq excludeTags=stars v=$childquery}\" }," + - " type:terms," + - " field:stars_i," + - " facet: {" + - " in_books: \"unique(_root_)\" }}"+ - - " ,comments_for_stars_parent_filter: {" + - " domain: { excludeTags:top, " + - " filter:[\"{!filters param=$child.fq excludeTags=stars v=$childquery}\"," - + " \"{!child of=type_s:book filters=$fq}type_s:book\"] }," + - " type:terms," + - " field:stars_i," + - " facet: {" + - " in_books: \"unique(_root_)\" }}"+ - "}" ) - - , "response=={numFound:0,start:0,'numFoundExact':true,docs:[]}" - , "facets=={ count:0," + - "comments_for_author:{" + - " buckets:[ {val:mary, count:1, in_books:1} ]}," + - "comments_for_stars:{" + - " buckets:[ {val:2, count:1, in_books:1}," + - " {val:3, count:1, in_books:1} ]}," + - "comments_for_stars_parent_filter:{" + - " buckets:[ {val:2, count:1, in_books:1}" + - " ]}}" - ); + ModifiableSolrParams p = params("rows", "10"); + client.testJQ( + params( + p, + "q", + "{!parent tag=top filters=$child.fq which=type_s:book v=$childquery}", + "childquery", + "comment_t:*", + "child.fq", + "{!tag=author}author_s:dan", + "child.fq", + "{!tag=stars}stars_i:4", + "fq", + "{!tag=top}title_t:Snow\\ Crash", + "fl", + "id", + "fl", + "title_t", + "json.facet", + "{" + + " comments_for_author: {" + + " domain: { excludeTags:\"top\"," + + // 1. throwing current parent docset, + " filter:[\"{!filters param=$child.fq " + + // compute children docset from scratch + " excludeTags=author v=$childquery}\"]" // 3. filter children + // with exclusions + + " }," + + " type:terms," + + " field:author_s," + + " facet: {" + + " in_books: \"unique(_root_)\" }" + + // }}," + + " }" + + + // note: stars_i filter is excluded + " ,comments_for_stars: {" + + " domain: { excludeTags:top, " + + " filter:\"{!filters param=$child.fq excludeTags=stars v=$childquery}\" }," + + " type:terms," + + " field:stars_i," + + " facet: {" + + " in_books: \"unique(_root_)\" }}" + + " ,comments_for_stars_parent_filter: {" + + " domain: { excludeTags:top, " + + " filter:[\"{!filters param=$child.fq excludeTags=stars v=$childquery}\"," + + " \"{!child of=type_s:book filters=$fq}type_s:book\"] }," + + " type:terms," + + " field:stars_i," + + " facet: {" + + " in_books: \"unique(_root_)\" }}" + + "}"), + "response=={numFound:0,start:0,'numFoundExact':true,docs:[]}", + "facets=={ count:0," + + "comments_for_author:{" + + " buckets:[ {val:mary, count:1, in_books:1} ]}," + + "comments_for_stars:{" + + " buckets:[ {val:2, count:1, in_books:1}," + + " {val:3, count:1, in_books:1} ]}," + + "comments_for_stars_parent_filter:{" + + " buckets:[ {val:2, count:1, in_books:1}" + + " ]}}"); } public void testBoolExclusion() throws Exception { final Client client = Client.localClient(); ModifiableSolrParams p = params("rows", "0"); - client.testJQ(params(p, - "json.queries", "{'childquery':'comment_t:*'," + - "'parentquery':'type_s:book'," + - "'child.fq':[ {'#author':{'field':{'f':'author_s','query':'dan'}}}," + - "{'#stars':{'field':{'f':'stars_i','query':'4'}}}]," + - "'snowcrash':{'field':{'f':'title_t','query':'Snow Crash'}}" + - "}", - "json.query", "{'#top':{'parent':{'which':{'param':'parentquery'}," + - "'query':{'bool':{'filter':{'param':'child.fq'}," + - "'must':{'param':'childquery'}" + - "}}}}}", - "json.facet", "{" + - "'comments_for_author':{'domain':{" + - "'excludeTags':'top', " + - "'blockChildren':'{!v=$parentquery}'," + - "'filter':'{!bool filter=$child.fq filter=$childquery excludeTags=author}'" + - "}," + - "'type':'terms', 'field':'author_s'" + - "}" + - " ,comments_for_stars: {" + - " domain: { excludeTags:top, " + - "'blockChildren':'{!v=$parentquery}'," + - " filter:\"{!bool filter=$child.fq excludeTags=stars filter=$childquery}\" }," + - "type:terms, field:stars_i" + - "}" + - ",comments_for_stars_parent_filter: {" + - "domain: { " + - "excludeTags:top, " + - "'blockChildren':'{!v=$parentquery}'," + - "filter:[\"{!bool filter=$child.fq excludeTags=stars filter=$childquery}\"," - + "\"{!child of=$parentquery}{!bool filter=$snowcrash}}\"] }," + - "type:terms, field:stars_i"+ - " }" + - "}", - "json.fields", "'id,title_t'") - , "response=={numFound:0,start:0,'numFoundExact':true,docs:[]}" - , "facets=={ count:0," + - "comments_for_author:{" + - " buckets:[ {val:mary, count:1} ]}" + - "," + - "comments_for_stars:{" + - " buckets:[ {val:2, count:1}," + - " {val:3, count:1} ]}," + - "comments_for_stars_parent_filter:{" + - " buckets:[ {val:2, count:1} ]}" + - "}"); + client.testJQ( + params( + p, + "json.queries", + "{'childquery':'comment_t:*'," + + "'parentquery':'type_s:book'," + + "'child.fq':[ {'#author':{'field':{'f':'author_s','query':'dan'}}}," + + "{'#stars':{'field':{'f':'stars_i','query':'4'}}}]," + + "'snowcrash':{'field':{'f':'title_t','query':'Snow Crash'}}" + + "}", + "json.query", + "{'#top':{'parent':{'which':{'param':'parentquery'}," + + "'query':{'bool':{'filter':{'param':'child.fq'}," + + "'must':{'param':'childquery'}" + + "}}}}}", + "json.facet", + "{" + + "'comments_for_author':{'domain':{" + + "'excludeTags':'top', " + + "'blockChildren':'{!v=$parentquery}'," + + "'filter':'{!bool filter=$child.fq filter=$childquery excludeTags=author}'" + + "}," + + "'type':'terms', 'field':'author_s'" + + "}" + + " ,comments_for_stars: {" + + " domain: { excludeTags:top, " + + "'blockChildren':'{!v=$parentquery}'," + + " filter:\"{!bool filter=$child.fq excludeTags=stars filter=$childquery}\" }," + + "type:terms, field:stars_i" + + "}" + + ",comments_for_stars_parent_filter: {" + + "domain: { " + + "excludeTags:top, " + + "'blockChildren':'{!v=$parentquery}'," + + "filter:[\"{!bool filter=$child.fq excludeTags=stars filter=$childquery}\"," + + "\"{!child of=$parentquery}{!bool filter=$snowcrash}}\"] }," + + "type:terms, field:stars_i" + + " }" + + "}", + "json.fields", + "'id,title_t'"), + "response=={numFound:0,start:0,'numFoundExact':true,docs:[]}", + "facets=={ count:0," + + "comments_for_author:{" + + " buckets:[ {val:mary, count:1} ]}" + + "," + + "comments_for_stars:{" + + " buckets:[ {val:2, count:1}," + + " {val:3, count:1} ]}," + + "comments_for_stars_parent_filter:{" + + " buckets:[ {val:2, count:1} ]}" + + "}"); } public void testUniqueBlock() throws Exception { final Client client = Client.localClient(); - ModifiableSolrParams p = params("rows","0"); + ModifiableSolrParams p = params("rows", "0"); // unique block using field and query logic - client.testJQ(params(p, "q", "{!parent tag=top which=type_s:book v=$childquery}" - , "childquery", "comment_t:*" - , "fl", "id", "fl" , "title_t" - , "root", "_root_" - , "parentQuery", "type_s:book" - , "json.facet", "{" + - " types: {" + - " domain: { blockChildren:\"type_s:book\"" + - " }," + - " type:terms," + - " field:type_s," + - " limit:-1," + - " facet: {" + - " in_books1: \"uniqueBlock(_root_)\"," + // field logic - " in_books2: \"uniqueBlock($root)\"," + // field reference logic - " via_query1:\"uniqueBlock({!v=type_s:book})\", " + // query logic - " via_query2:\"uniqueBlock({!v=$parentQuery})\" ," + // query reference logic - " partial_query:\"uniqueBlock({!v=cat_s:fantasy})\" ," + // first doc hit only, never count afterwards - " query_no_match:\"uniqueBlock({!v=cat_s:horor})\" }" + - " }" + - "}" ) - - , "response=={numFound:2,start:0,'numFoundExact':true,docs:[]}" - , "facets=={ count:2," + - "types:{" + - " buckets:[ {val:review, count:5, in_books1:2, in_books2:2, " + client.testJQ( + params( + p, + "q", + "{!parent tag=top which=type_s:book v=$childquery}", + "childquery", + "comment_t:*", + "fl", + "id", + "fl", + "title_t", + "root", + "_root_", + "parentQuery", + "type_s:book", + "json.facet", + "{" + + " types: {" + + " domain: { blockChildren:\"type_s:book\"" + + " }," + + " type:terms," + + " field:type_s," + + " limit:-1," + + " facet: {" + + " in_books1: \"uniqueBlock(_root_)\"," + + // field logic + " in_books2: \"uniqueBlock($root)\"," + + // field reference logic + " via_query1:\"uniqueBlock({!v=type_s:book})\", " + + // query logic + " via_query2:\"uniqueBlock({!v=$parentQuery})\" ," + + // query reference logic + " partial_query:\"uniqueBlock({!v=cat_s:fantasy})\" ," + + // first doc hit only, never count afterwards + " query_no_match:\"uniqueBlock({!v=cat_s:horor})\" }" + + " }" + + "}"), + "response=={numFound:2,start:0,'numFoundExact':true,docs:[]}", + "facets=={ count:2," + + "types:{" + + " buckets:[ {val:review, count:5, in_books1:2, in_books2:2, " + " via_query1:2, via_query2:2, " - + " partial_query:1, query_no_match:0} ]}" + - "}" - ); + + " partial_query:1, query_no_match:0} ]}" + + "}"); } } diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonRangeFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonRangeFacets.java index a3db86fa9b7..4cd5a21a683 100644 --- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonRangeFacets.java +++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonRangeFacets.java @@ -18,7 +18,6 @@ package org.apache.solr.search.facet; import java.util.Arrays; - import org.apache.solr.JSONTestUtil; import org.apache.solr.SolrTestCaseHS; import org.apache.solr.common.params.SolrParams; @@ -26,10 +25,9 @@ import org.junit.BeforeClass; import org.junit.Test; - public class TestJsonRangeFacets extends SolrTestCaseHS { - private static SolrInstances servers; // for distributed testing + private static SolrInstances servers; // for distributed testing private static String cache; @SuppressWarnings("deprecation") @@ -39,15 +37,14 @@ public static void beforeTests() throws Exception { JSONTestUtil.failRepeatedKeys = true; // we need DVs on point fields to compute stats & facets - if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true"); + if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) + System.setProperty(NUMERIC_DOCVALUES_SYSPROP, "true"); - initCore("solrconfig-tlog.xml","schema_latest.xml"); + initCore("solrconfig-tlog.xml", "schema_latest.xml"); cache = Boolean.toString(random().nextBoolean()); } - /** - * Start all servers for cluster if they don't already exist - */ + /** Start all servers for cluster if they don't already exist */ public static void initServers() throws Exception { if (servers == null) { servers = new SolrInstances(3, "solrconfig-tlog.xml", "schema_latest.xml"); @@ -67,29 +64,73 @@ public static void afterTests() throws Exception { public void indexSimple(Client client) throws Exception { client.deleteByQuery("*:*", null); - client.add(sdoc("id", "1", "cat_s", "A", "where_s", "NY", "num_d", "4", "num_i", "2", - "num_is", "4", "num_is", "2", - "val_b", "true", "sparse_s", "one"), null); - client.add(sdoc("id", "2", "cat_s", "B", "where_s", "NJ", "num_d", "-9", "num_i", "-5", - "num_is", "-9", "num_is", "-5", - "val_b", "false"), null); + client.add( + sdoc( + "id", + "1", + "cat_s", + "A", + "where_s", + "NY", + "num_d", + "4", + "num_i", + "2", + "num_is", + "4", + "num_is", + "2", + "val_b", + "true", + "sparse_s", + "one"), + null); + client.add( + sdoc( + "id", "2", "cat_s", "B", "where_s", "NJ", "num_d", "-9", "num_i", "-5", "num_is", "-9", + "num_is", "-5", "val_b", "false"), + null); client.add(sdoc("id", "3"), null); client.commit(); - client.add(sdoc("id", "4", "cat_s", "A", "where_s", "NJ", "num_d", "2", "num_i", "3", - "num_is", "2", "num_is", "3"), null); - client.add(sdoc("id", "5", "cat_s", "B", "where_s", "NJ", "num_d", "11", "num_i", "7", - "num_is", "11", "num_is", "7", - "sparse_s", "two"),null); + client.add( + sdoc( + "id", "4", "cat_s", "A", "where_s", "NJ", "num_d", "2", "num_i", "3", "num_is", "2", + "num_is", "3"), + null); + client.add( + sdoc( + "id", + "5", + "cat_s", + "B", + "where_s", + "NJ", + "num_d", + "11", + "num_i", + "7", + "num_is", + "11", + "num_is", + "7", + "sparse_s", + "two"), + null); client.commit(); - client.add(sdoc("id", "6", "cat_s", "B", "where_s", "NY", "num_d", "-5", "num_i", "-5", - "num_is", "-5"),null); + client.add( + sdoc( + "id", "6", "cat_s", "B", "where_s", "NY", "num_d", "-5", "num_i", "-5", "num_is", "-5"), + null); client.commit(); } public void testRangeOtherWhiteboxDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); } public void testRangeOtherWhitebox() throws Exception { @@ -97,8 +138,8 @@ public void testRangeOtherWhitebox() throws Exception { } /** - * whitebox sanity checks that a shard request range facet that returns "between" or "after" - * will cause the correct "actual_end" to be returned + * whitebox sanity checks that a shard request range facet that returns "between" or "after" will + * cause the correct "actual_end" to be returned */ private void doRangeOtherWhitebox(Client client) throws Exception { client.queryDefaults().set("cache", cache); @@ -108,60 +149,84 @@ private void doRangeOtherWhitebox(Client client) throws Exception { final String nohardend = random().nextBoolean() ? "" : " hardend:false, "; { // first check some "phase #1" requests - - final SolrParams p = params("q", "*:*", "rows", "0", "isShard", "true", "distrib", "false", - "_facet_", "{}", "shards.purpose", ""+FacetModule.PURPOSE_GET_JSON_FACETS); + final SolrParams p = + params( + "q", + "*:*", + "rows", + "0", + "isShard", + "true", + "distrib", + "false", + "_facet_", + "{}", + "shards.purpose", + "" + FacetModule.PURPOSE_GET_JSON_FACETS); final String basic_opts = "type:range, field:num_d, start:-5, end:10, gap:7, "; - final String buckets = "buckets:[ {val:-5.0,count:1}, {val:2.0,count:2}, {val:9.0,count:1} ], "; + final String buckets = + "buckets:[ {val:-5.0,count:1}, {val:2.0,count:2}, {val:9.0,count:1} ], "; - client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + nohardend + " other:before}}") - , "facets=={count:6, f:{" + buckets + client.testJQ( + params(p, "json.facet", "{f:{ " + basic_opts + nohardend + " other:before}}"), + "facets=={count:6, f:{" + + buckets // before doesn't need actual_end + " before:{count:1}" - + "} }" - ); - client.testJQ(params(p, "json.facet", "{f:{" + basic_opts + nohardend + "other:after}}") - , "facets=={count:6, f:{" + buckets - + " after:{count:0}, _actual_end:'16.0'" - + "} }" - ); - client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + nohardend + "other:between}}") - , "facets=={count:6, f:{" + buckets - + " between:{count:4}, _actual_end:'16.0'" - + "} }" - ); - client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + nohardend + "other:all}}") - , "facets=={count:6, f:{" + buckets + + "} }"); + client.testJQ( + params(p, "json.facet", "{f:{" + basic_opts + nohardend + "other:after}}"), + "facets=={count:6, f:{" + buckets + " after:{count:0}, _actual_end:'16.0'" + "} }"); + client.testJQ( + params(p, "json.facet", "{f:{ " + basic_opts + nohardend + "other:between}}"), + "facets=={count:6, f:{" + buckets + " between:{count:4}, _actual_end:'16.0'" + "} }"); + client.testJQ( + params(p, "json.facet", "{f:{ " + basic_opts + nohardend + "other:all}}"), + "facets=={count:6, f:{" + + buckets + " before:{count:1}," + " after:{count:0}," + " between:{count:4}," + " _actual_end:'16.0'" - + "} }" - ); - // with hardend:true, not only do the buckets change, but actual_end should not need to be returned - client.testJQ(params(p, "json.facet", "{f:{ " + basic_opts + " hardend:true, other:after}}") - , "facets=={count:6, f:{" + + "} }"); + // with hardend:true, not only do the buckets change, but actual_end should not need to be + // returned + client.testJQ( + params(p, "json.facet", "{f:{ " + basic_opts + " hardend:true, other:after}}"), + "facets=={count:6, f:{" + " buckets:[ {val:-5.0,count:1}, {val:2.0,count:2}, {val:9.0,count:0} ], " + " after:{count:1}" - + "} }" - ); + + "} }"); } { // now check some "phase #2" requests with refinement buckets already specified - - final String facet - = "{ top:{ type:range, field:num_i, start:-5, end:5, gap:7," + nohardend - + " other:all, facet:{ x:{ type:terms, field:cat_s, limit:1, refine:true } } } }"; + final String facet = + "{ top:{ type:range, field:num_i, start:-5, end:5, gap:7," + + nohardend + + " other:all, facet:{ x:{ type:terms, field:cat_s, limit:1, refine:true } } } }"; // the behavior should be the same, regardless of wether we pass actual_end to the shards - // because in a "mixed mode" rolling update, the shards should be smart enough to re-compute if + // because in a "mixed mode" rolling update, the shards should be smart enough to re-compute + // if // the merging node is running an older version that doesn't send it for (String actual_end : Arrays.asList(", _actual_end:'9'", "")) { - client.testJQ(params("q", "*:*", "rows", "0", "isShard", "true", "distrib", "false", - "shards.purpose", ""+FacetModule.PURPOSE_REFINE_JSON_FACETS, - "json.facet", facet, - "_facet_", "{ refine: { top: { between:{ x:{ _l:[B] } }" + actual_end + "} } }") - , "facets=={top:{ buckets:[], between:{x:{buckets:[{val:B,count:3}] }} } }"); + client.testJQ( + params( + "q", + "*:*", + "rows", + "0", + "isShard", + "true", + "distrib", + "false", + "shards.purpose", + "" + FacetModule.PURPOSE_REFINE_JSON_FACETS, + "json.facet", + facet, + "_facet_", + "{ refine: { top: { between:{ x:{ _l:[B] } }" + actual_end + "} } }"), + "facets=={top:{ buckets:[], between:{x:{buckets:[{val:B,count:3}] }} } }"); } } } @@ -170,7 +235,10 @@ private void doRangeOtherWhitebox(Client client) throws Exception { public void testDateFacetsDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); doDateFacets(client); } @@ -183,62 +251,122 @@ private void doDateFacets(Client client) throws Exception { client.queryDefaults().set("cache", cache); client.deleteByQuery("*:*", null); boolean multiValue = random().nextBoolean(); - String dateField = multiValue? "b_dts": "b_dt"; - String dateRange = multiValue? "b_drfs": "b_drf"; - - client.add(sdoc("id", "1", "cat_s", "A", dateField, "2014-03-15T12:00:00Z", - dateRange, "2014-03-15T12:00:00Z"), null); - client.add(sdoc("id", "2", "cat_s", "B", dateField, "2015-01-03T00:00:00Z", - dateRange, "2015-01-03T00:00:00Z"), null); + String dateField = multiValue ? "b_dts" : "b_dt"; + String dateRange = multiValue ? "b_drfs" : "b_drf"; + + client.add( + sdoc( + "id", + "1", + "cat_s", + "A", + dateField, + "2014-03-15T12:00:00Z", + dateRange, + "2014-03-15T12:00:00Z"), + null); + client.add( + sdoc( + "id", + "2", + "cat_s", + "B", + dateField, + "2015-01-03T00:00:00Z", + dateRange, + "2015-01-03T00:00:00Z"), + null); client.add(sdoc("id", "3"), null); client.commit(); - client.add(sdoc("id", "4", "cat_s", "A", dateField, "2014-03-15T12:00:00Z", - dateRange, "2014-03-15T12:00:00Z"), null); - client.add(sdoc("id", "5", "cat_s", "B", dateField, "2015-01-03T00:00:00Z", - dateRange, "2015-01-03T00:00:00Z"),null); + client.add( + sdoc( + "id", + "4", + "cat_s", + "A", + dateField, + "2014-03-15T12:00:00Z", + dateRange, + "2014-03-15T12:00:00Z"), + null); + client.add( + sdoc( + "id", + "5", + "cat_s", + "B", + dateField, + "2015-01-03T00:00:00Z", + dateRange, + "2015-01-03T00:00:00Z"), + null); client.commit(); - client.add(sdoc("id", "6", "cat_s", "B", dateField, "2014-03-15T12:00:00Z", - dateRange, "2014-03-15T12:00:00Z"),null); + client.add( + sdoc( + "id", + "6", + "cat_s", + "B", + dateField, + "2014-03-15T12:00:00Z", + dateRange, + "2014-03-15T12:00:00Z"), + null); client.commit(); SolrParams p = params("q", "*:*", "rows", "0"); - for (String s : new String[]{dateField, dateRange}) { - client.testJQ(params(p, "json.facet" - , "{date:{type : range, mincount:1, field :" + s + - ",start:'2013-11-01T00:00:00Z',end:NOW,gap:'+90DAY'}}"), - "facets=={count:6, date:{buckets:" + - "[{val:\"2014-01-30T00:00:00Z\",count:3}, {val:\"2014-10-27T00:00:00Z\",count:2}]" + - "}}"); + for (String s : new String[] {dateField, dateRange}) { + client.testJQ( + params( + p, + "json.facet", + "{date:{type : range, mincount:1, field :" + + s + + ",start:'2013-11-01T00:00:00Z',end:NOW,gap:'+90DAY'}}"), + "facets=={count:6, date:{buckets:" + + "[{val:\"2014-01-30T00:00:00Z\",count:3}, {val:\"2014-10-27T00:00:00Z\",count:2}]" + + "}}"); // with ranges - client.testJQ(params(p, "json.facet" - , "{date:{type : range, mincount:1, field :" + s + - ",ranges:[{from:'2013-11-01T00:00:00Z', to:'2014-04-30T00:00:00Z'}," + - "{from:'2015-01-01T00:00:00Z', to:'2020-01-30T00:00:00Z'}]}}"), - "facets=={count:6, date:{buckets:" + - "[{val:\"[2013-11-01T00:00:00Z,2014-04-30T00:00:00Z)\",count:3}," + - " {val:\"[2015-01-01T00:00:00Z,2020-01-30T00:00:00Z)\",count:2}]" + - "}}"); + client.testJQ( + params( + p, + "json.facet", + "{date:{type : range, mincount:1, field :" + + s + + ",ranges:[{from:'2013-11-01T00:00:00Z', to:'2014-04-30T00:00:00Z'}," + + "{from:'2015-01-01T00:00:00Z', to:'2020-01-30T00:00:00Z'}]}}"), + "facets=={count:6, date:{buckets:" + + "[{val:\"[2013-11-01T00:00:00Z,2014-04-30T00:00:00Z)\",count:3}," + + " {val:\"[2015-01-01T00:00:00Z,2020-01-30T00:00:00Z)\",count:2}]" + + "}}"); } - client.add(sdoc("id", "7", "cat_s", "B", dateRange, "[2010 TO 2014-05-21]"),null); + client.add(sdoc("id", "7", "cat_s", "B", dateRange, "[2010 TO 2014-05-21]"), null); client.commit(); - client.testJQ(params(p, "json.facet" - , "{date:{type : range, other:'before', field :" + dateRange + - ",start:'2011-11-01T00:00:00Z',end:'2016-01-30T00:00:00Z',gap:'+1YEAR'}}"), - "facets=={count:7, date:{buckets:[" + - "{val:\"2011-11-01T00:00:00Z\",count:1}, {val:\"2012-11-01T00:00:00Z\",count:1}," + - "{val:\"2013-11-01T00:00:00Z\",count:4}, {val:\"2014-11-01T00:00:00Z\",count:2}," + - "{val:\"2015-11-01T00:00:00Z\",count:0}" + - "],before:{count:1}" + - "}}"); + client.testJQ( + params( + p, + "json.facet", + "{date:{type : range, other:'before', field :" + + dateRange + + ",start:'2011-11-01T00:00:00Z',end:'2016-01-30T00:00:00Z',gap:'+1YEAR'}}"), + "facets=={count:7, date:{buckets:[" + + "{val:\"2011-11-01T00:00:00Z\",count:1}, {val:\"2012-11-01T00:00:00Z\",count:1}," + + "{val:\"2013-11-01T00:00:00Z\",count:4}, {val:\"2014-11-01T00:00:00Z\",count:2}," + + "{val:\"2015-11-01T00:00:00Z\",count:0}" + + "],before:{count:1}" + + "}}"); } @Test public void testRangeFacetWithRangesDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); doRangeFacetWithRanges(client); } @@ -254,54 +382,64 @@ private void doRangeFacetWithRanges(Client client) throws Exception { final SolrParams p = params("q", "*:*", "rows", "0"); // with lower and upper include - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i, ranges:[{range:\" [-5,7] \"}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i, ranges:[{range:\" [-5,7] \"}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}"); // with lower include and upper exclude - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{range:\"[-5,7)\"}]}}"), + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,ranges:[{range:\"[-5,7)\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,7)\",count:4}]}}"); // with lower exclude and upper include - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7]\"}]}}"), + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7]\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); // with lower and upper exclude - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}]}}"), + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}"); // with other and include, they are not supported // but wouldn't throw any error as they are not consumed - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}],include:\"lower\",other:[\"after\"]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}],include:\"lower\",other:[\"after\"]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}"); // with mincount>0 client.testJQ( - params(p, "json.facet", "{price:{type : range,field : num_i,mincount:3," + - "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}" - ), + params( + p, + "json.facet", + "{price:{type : range,field : num_i,mincount:3," + + "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); // with multiple ranges client.testJQ( - params(p, "json.facet", "{price:{type : range,field : num_i," + - "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}" - ), + params( + p, + "json.facet", + "{price:{type : range,field : num_i," + + "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2},{val:\"(-5,7]\",count:3}]}}"); // with * as one of the values - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{range:\"(*,10]\"}]}}"), + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,ranges:[{range:\"(*,10]\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"(*,10]\",count:5}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{range:\"[-5,*)\"}]}}"), + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,ranges:[{range:\"[-5,*)\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{range:\"[*,*]\"}]}}"), + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,ranges:[{range:\"[*,*]\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"[*,*]\",count:5}]}}"); } @@ -309,7 +447,10 @@ private void doRangeFacetWithRanges(Client client) throws Exception { public void testRangeFacetWithRangesInNewFormatDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); doRangeFacetWithRangesInNewFormat(client); } @@ -325,78 +466,126 @@ private void doRangeFacetWithRangesInNewFormat(Client client) throws Exception { indexSimple(client); SolrParams p = params("q", "*:*", "rows", "0"); - //case without inclusive params - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7}]}}"), + // case without inclusive params + client.testJQ( + params(p, "json.facet", "{price:{type : range,field : num_i,ranges:[{from:-5, to:7}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,7)\",count:4}]}}"); - //case without key param and to included - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"), + // case without key param and to included + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}"); - //case with all params - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"), + // case with all params + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}"); // from and to excluded - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}"); // from excluded and to included - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); // multiple ranges - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,include:[\"lower\"], outer:\"before\"," + - "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,include:[\"lower\"], outer:\"before\"," + + "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3},{val:\"(-5,7)\",count:2}]}}"); // with mincount>0 - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,mincount:3" + - "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,mincount:3" + + "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}"); // mix of old and new formats - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i," + - "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{range:\"(-5,7)\"}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i," + + "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{range:\"(-5,7)\"}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3},{val:\"(-5,7)\",count:2}]}}"); // from==to - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:true}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:true}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,-5]\",count:0}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"(-5,-5)\",count:0}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,-5)\",count:0}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:true}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:true}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,-5]\",count:2}]}}"); // with * as one of the values - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:10,inclusive_from:false ,inclusive_to:true}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:10,inclusive_from:false ,inclusive_to:true}]}}"), "facets=={count:6, price:{buckets:[{val:\"(*,10]\",count:5}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5, to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5, to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:-5,inclusive_from:true ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:-5,inclusive_from:true ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"[*,*)\",count:5}]}}"); - client.testJQ(params(p, "json.facet" - , "{price:{type : range,field : num_i,ranges:[{inclusive_from:true ,inclusive_to:false}]}}"), + client.testJQ( + params( + p, + "json.facet", + "{price:{type : range,field : num_i,ranges:[{inclusive_from:true ,inclusive_to:false}]}}"), "facets=={count:6, price:{buckets:[{val:\"[*,*)\",count:5}]}}"); } @@ -409,7 +598,10 @@ public void testFacetValueTypes() throws Exception { public void testFacetValueTypeDistrib() throws Exception { initServers(); Client client = servers.getClient(random().nextInt()); - client.queryDefaults().set( "shards", servers.getShards()).set("debugQuery", Boolean.toString(random().nextBoolean()) ); + client + .queryDefaults() + .set("shards", servers.getShards()) + .set("debugQuery", Boolean.toString(random().nextBoolean())); doFacetValueTypeValidation(client); } @@ -417,25 +609,40 @@ private void doFacetValueTypeValidation(Client client) throws Exception { indexSimple(client); // range faceting with start, end, and gap - client.testXQ(params("q", "*:*", "rows", "0", - "json.facet", "{num:{type: range, field:num_i,start:0,gap:2,end:10,mincount:1,other:all}}"), + client.testXQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{num:{type: range, field:num_i,start:0,gap:2,end:10,mincount:1,other:all}}"), "/response/lst[@name='facets']/long[@name='count'][.=6]", // count "/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst[1]/int[@name='val'][.=2]", // value "/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst[1]/long[@name='count'][.=2]", // count - "*[count(/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst)=2]", // no of entries + "*[count(/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst)=2]", // no + // of + // entries "/response/lst[@name='facets']/lst[@name='num']/lst[@name='before']/long[@name='count'][.=2]", // before "/response/lst[@name='facets']/lst[@name='num']/lst[@name='after']/long[@name='count'][.=0]", // after "/response/lst[@name='facets']/lst[@name='num']/lst[@name='between']/long[@name='count'][.=3]" // between - ); + ); // range faceting with ranges specified - client.testXQ(params("q", "*:*", "rows", "0", - "json.facet", "{num:{type: range, field:num_i,ranges:[{from:0, to:4},{from:-4,to:2}],mincount:1}}"), + client.testXQ( + params( + "q", + "*:*", + "rows", + "0", + "json.facet", + "{num:{type: range, field:num_i,ranges:[{from:0, to:4},{from:-4,to:2}],mincount:1}}"), "/response/lst[@name='facets']/long[@name='count'][.=6]", // count "/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst[1]/str[@name='val'][.='[0,4)']", // value "/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst[1]/long[@name='count'][.=2]", // count - "*[count(/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst)=1]" // no of entries - ); + "*[count(/response/lst[@name='facets']/lst[@name='num']/arr[@name='buckets']/lst)=1]" // no + // of + // entries + ); } - } diff --git a/solr/core/src/test/org/apache/solr/search/function/AggValueSourceTest.java b/solr/core/src/test/org/apache/solr/search/function/AggValueSourceTest.java index eca48e1a60e..5946d4dc955 100644 --- a/solr/core/src/test/org/apache/solr/search/function/AggValueSourceTest.java +++ b/solr/core/src/test/org/apache/solr/search/function/AggValueSourceTest.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.function.IntFunction; - import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.ConstValueSource; import org.apache.solr.SolrTestCase; @@ -33,7 +32,8 @@ public class AggValueSourceTest extends SolrTestCase { @Test public void testCustomAgg() { - // All we're really interested in testing here is that the custom agg compiles and can be created + // All we're really interested in testing here is that the custom agg compiles and can be + // created final CustomAggregate customAggregate = new CustomAggregate(new ConstValueSource(123.0f)); final FacetMerger facetMerger = customAggregate.createFacetMerger(0.0D); } @@ -60,7 +60,8 @@ static class CustomSlotAcc extends SlotAcc.DoubleFuncSlotAcc { } @Override - public void collect(int doc, int slot, IntFunction slotContext) throws IOException { + public void collect(int doc, int slot, IntFunction slotContext) + throws IOException { result[slot] += values.doubleVal(doc); } @@ -80,11 +81,11 @@ public FacetMerger createFacetMerger(Object prototype) { @Override public void merge(Object facetResult, Context mcontext) { - total += (Double)facetResult; + total += (Double) facetResult; } @Override - public void finish(Context mcontext) { } + public void finish(Context mcontext) {} @Override public Object getMergedResult() { diff --git a/solr/core/src/test/org/apache/solr/search/function/SortByFunctionTest.java b/solr/core/src/test/org/apache/solr/search/function/SortByFunctionTest.java index 1ddd76f8c90..8d27810900a 100644 --- a/solr/core/src/test/org/apache/solr/search/function/SortByFunctionTest.java +++ b/solr/core/src/test/org/apache/solr/search/function/SortByFunctionTest.java @@ -15,23 +15,20 @@ * limitations under the License. */ package org.apache.solr.search.function; + import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; - /** - * * @see TestSortByMinMaxFunction - **/ + */ public class SortByFunctionTest extends SolrTestCaseJ4 { - @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - @Override public void setUp() throws Exception { super.setUp(); @@ -40,109 +37,157 @@ public void setUp() throws Exception { } public void test() throws Exception { - assertU(adoc("id", "1", "x_td1", "0", "y_td1", "2", "w_td1", "25", "z_td1", "5", "f_t", "ipod")); - assertU(adoc("id", "2", "x_td1", "2", "y_td1", "2", "w_td1", "15", "z_td1", "5", "f_t", "ipod ipod ipod ipod ipod")); - assertU(adoc("id", "3", "x_td1", "3", "y_td1", "2", "w_td1", "55", "z_td1", "5", "f_t", "ipod ipod ipod ipod ipod ipod ipod ipod ipod")); - assertU(adoc("id", "4", "x_td1", "4", "y_td1", "2", "w_td1", "45", "z_td1", "5", "f_t", "ipod ipod ipod ipod ipod ipod ipod")); + assertU( + adoc("id", "1", "x_td1", "0", "y_td1", "2", "w_td1", "25", "z_td1", "5", "f_t", "ipod")); + assertU( + adoc( + "id", + "2", + "x_td1", + "2", + "y_td1", + "2", + "w_td1", + "15", + "z_td1", + "5", + "f_t", + "ipod ipod ipod ipod ipod")); + assertU( + adoc( + "id", + "3", + "x_td1", + "3", + "y_td1", + "2", + "w_td1", + "55", + "z_td1", + "5", + "f_t", + "ipod ipod ipod ipod ipod ipod ipod ipod ipod")); + assertU( + adoc( + "id", + "4", + "x_td1", + "4", + "y_td1", + "2", + "w_td1", + "45", + "z_td1", + "5", + "f_t", + "ipod ipod ipod ipod ipod ipod ipod")); assertU(commit()); - assertQ(req("fl", "*,score", "q", "*:*"), - "//*[@numFound='4']", - "//float[@name='score']='1.0'", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='3']", - "//result/doc[4]/str[@name='id'][.='4']" - ); - assertQ(req("fl", "*,score", "q", "*:*", "sort", "score desc"), - "//*[@numFound='4']", - "//float[@name='score']='1.0'", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='3']", - "//result/doc[4]/str[@name='id'][.='4']" - ); - assertQ(req("fl", "id,score", "q", "f_t:ipod", "sort", "score desc"), - "//*[@numFound='4']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='3']", - "//result/doc[4]/str[@name='id'][.='4']" - ); - - - assertQ(req("fl", "*,score", "q", "*:*", "sort", "sum(x_td1, y_td1) desc"), - "//*[@numFound='4']", - "//float[@name='score']='1.0'", - "//result/doc[1]/str[@name='id'][.='4']", - "//result/doc[2]/str[@name='id'][.='3']", - "//result/doc[3]/str[@name='id'][.='2']", - "//result/doc[4]/str[@name='id'][.='1']" - ); - assertQ(req("fl", "*,score", "q", "*:*", "sort", "sum(x_td1, y_td1) asc"), - "//*[@numFound='4']", - "//float[@name='score']='1.0'", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='3']", - "//result/doc[4]/str[@name='id'][.='4']" - ); - //the function is equal, w_td1 separates - assertQ(req("q", "*:*", "fl", "id", "sort", "sum(z_td1, y_td1) asc, w_td1 asc"), - "//*[@numFound='4']", - "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='1']", - "//result/doc[3]/str[@name='id'][.='4']", - "//result/doc[4]/str[@name='id'][.='3']" - ); + assertQ( + req("fl", "*,score", "q", "*:*"), + "//*[@numFound='4']", + "//float[@name='score']='1.0'", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='3']", + "//result/doc[4]/str[@name='id'][.='4']"); + assertQ( + req("fl", "*,score", "q", "*:*", "sort", "score desc"), + "//*[@numFound='4']", + "//float[@name='score']='1.0'", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='3']", + "//result/doc[4]/str[@name='id'][.='4']"); + assertQ( + req("fl", "id,score", "q", "f_t:ipod", "sort", "score desc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='3']", + "//result/doc[4]/str[@name='id'][.='4']"); + + assertQ( + req("fl", "*,score", "q", "*:*", "sort", "sum(x_td1, y_td1) desc"), + "//*[@numFound='4']", + "//float[@name='score']='1.0'", + "//result/doc[1]/str[@name='id'][.='4']", + "//result/doc[2]/str[@name='id'][.='3']", + "//result/doc[3]/str[@name='id'][.='2']", + "//result/doc[4]/str[@name='id'][.='1']"); + assertQ( + req("fl", "*,score", "q", "*:*", "sort", "sum(x_td1, y_td1) asc"), + "//*[@numFound='4']", + "//float[@name='score']='1.0'", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='3']", + "//result/doc[4]/str[@name='id'][.='4']"); + // the function is equal, w_td1 separates + assertQ( + req("q", "*:*", "fl", "id", "sort", "sum(z_td1, y_td1) asc, w_td1 asc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='2']", + "//result/doc[2]/str[@name='id'][.='1']", + "//result/doc[3]/str[@name='id'][.='4']", + "//result/doc[4]/str[@name='id'][.='3']"); } - - public void testSortJoinDocFreq() throws Exception - { - assertU(adoc("id", "4", "id_s1", "D", "links_mfacet", "A", "links_mfacet", "B", "links_mfacet", "C" ) ); - assertU(adoc("id", "3", "id_s1", "C", "links_mfacet", "A", "links_mfacet", "B" ) ); + + public void testSortJoinDocFreq() throws Exception { + assertU( + adoc( + "id", + "4", + "id_s1", + "D", + "links_mfacet", + "A", + "links_mfacet", + "B", + "links_mfacet", + "C")); + assertU(adoc("id", "3", "id_s1", "C", "links_mfacet", "A", "links_mfacet", "B")); assertU(commit()); // Make sure it uses two readers - assertU(adoc("id", "2", "id_s1", "B", "links_mfacet", "A" ) ); - assertU(adoc("id", "1", "id_s1", "A" ) ); + assertU(adoc("id", "2", "id_s1", "B", "links_mfacet", "A")); + assertU(adoc("id", "1", "id_s1", "A")); assertU(commit()); - assertQ(req("q", "links_mfacet:B", "fl", "id", "sort", "id asc"), - "//*[@numFound='2']", - "//result/doc[1]/str[@name='id'][.='3']", - "//result/doc[2]/str[@name='id'][.='4']" - ); - - assertQ(req("q", "*:*", "fl", "id", "sort", "joindf(id_s1, links_mfacet) desc"), - "//*[@numFound='4']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='3']", - "//result/doc[4]/str[@name='id'][.='4']" - ); - - assertQ(req("q", "*:*", "fl", "id", "sort", "joindf(id_s1, links_mfacet) asc"), - "//*[@numFound='4']", - "//result/doc[1]/str[@name='id'][.='4']", - "//result/doc[2]/str[@name='id'][.='3']", - "//result/doc[3]/str[@name='id'][.='2']", - "//result/doc[4]/str[@name='id'][.='1']" - ); + assertQ( + req("q", "links_mfacet:B", "fl", "id", "sort", "id asc"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='3']", + "//result/doc[2]/str[@name='id'][.='4']"); + + assertQ( + req("q", "*:*", "fl", "id", "sort", "joindf(id_s1, links_mfacet) desc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='3']", + "//result/doc[4]/str[@name='id'][.='4']"); + + assertQ( + req("q", "*:*", "fl", "id", "sort", "joindf(id_s1, links_mfacet) asc"), + "//*[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='4']", + "//result/doc[2]/str[@name='id'][.='3']", + "//result/doc[3]/str[@name='id'][.='2']", + "//result/doc[4]/str[@name='id'][.='1']"); } - + /** * The sort clauses to test in testFieldSortSpecifiedAsFunction. * * @see #testFieldSortSpecifiedAsFunction */ protected String[] getFieldFunctionClausesToTest() { - return new String[] { "primary_tl1", "field(primary_tl1)" }; + return new String[] {"primary_tl1", "field(primary_tl1)"}; } - /** - * Sort by function normally compares the double value, but if a function is specified that identifies - * a single field, we should use the underlying field's SortField to save of a lot of type converstion - * (and RAM), and keep the sort precision as high as possible + * Sort by function normally compares the double value, but if a function is specified that + * identifies a single field, we should use the underlying field's SortField to save of a lot of + * type converstion (and RAM), and keep the sort precision as high as possible * * @see #getFieldFunctionClausesToTest */ @@ -150,33 +195,64 @@ public void testFieldSortSpecifiedAsFunction() throws Exception { final long A = Long.MIN_VALUE; final long B = A + 1L; final long C = B + 1L; - + final long Z = Long.MAX_VALUE; final long Y = Z - 1L; final long X = Y - 1L; - + // test is predicated on the idea that if long -> double converstion is happening under the hood - // then we lose precision in sorting; so lets sanity check that our JVM isn't doing something wacky - // in converstion that violates the principle of the test - - assertEquals("WTF? small longs cast to double aren't equivalent?", - (double)A, (double)B, 0.0D); - assertEquals("WTF? small longs cast to double aren't equivalent?", - (double)A, (double)C, 0.0D); - - assertEquals("WTF? big longs cast to double aren't equivalent?", - (double)Z, (double)Y, 0.0D); - assertEquals("WTF? big longs cast to double aren't equivalent?", - (double)Z, (double)X, 0.0D); - + // then we lose precision in sorting; so lets sanity check that our JVM isn't doing something + // wacky in converstion that violates the principle of the test + + assertEquals( + "WTF? small longs cast to double aren't equivalent?", (double) A, (double) B, 0.0D); + assertEquals( + "WTF? small longs cast to double aren't equivalent?", (double) A, (double) C, 0.0D); + + assertEquals("WTF? big longs cast to double aren't equivalent?", (double) Z, (double) Y, 0.0D); + assertEquals("WTF? big longs cast to double aren't equivalent?", (double) Z, (double) X, 0.0D); + int docId = 0; for (int i = 0; i < 3; i++) { - assertU(adoc(sdoc("id", ++docId, "primary_tl1", X, "secondary_tl1", i, - "multi_l_dv", X, "multi_l_dv", A))); - assertU(adoc(sdoc("id", ++docId, "primary_tl1", Y, "secondary_tl1", i, - "multi_l_dv", Y, "multi_l_dv", B))); - assertU(adoc(sdoc("id", ++docId, "primary_tl1", Z, "secondary_tl1", i, - "multi_l_dv", Z, "multi_l_dv", C))); + assertU( + adoc( + sdoc( + "id", + ++docId, + "primary_tl1", + X, + "secondary_tl1", + i, + "multi_l_dv", + X, + "multi_l_dv", + A))); + assertU( + adoc( + sdoc( + "id", + ++docId, + "primary_tl1", + Y, + "secondary_tl1", + i, + "multi_l_dv", + Y, + "multi_l_dv", + B))); + assertU( + adoc( + sdoc( + "id", + ++docId, + "primary_tl1", + Z, + "secondary_tl1", + i, + "multi_l_dv", + Z, + "multi_l_dv", + C))); } assertU(commit()); @@ -184,32 +260,33 @@ public void testFieldSortSpecifiedAsFunction() throws Exception { // min/max of a field is tested in TestSortByMinMaxFunction for (String primarySort : getFieldFunctionClausesToTest()) { - assertQ(req("q", "*:*", - "sort", primarySort + " asc, secondary_tl1 asc") - , "//*[@numFound='9']" - // - , "//result/doc[1]/long[@name='primary_tl1'][.='"+X+"']" - , "//result/doc[1]/long[@name='secondary_tl1'][.='0']" - , "//result/doc[2]/long[@name='primary_tl1'][.='"+X+"']" - , "//result/doc[2]/long[@name='secondary_tl1'][.='1']" - , "//result/doc[3]/long[@name='primary_tl1'][.='"+X+"']" - , "//result/doc[3]/long[@name='secondary_tl1'][.='2']" - // - , "//result/doc[4]/long[@name='primary_tl1'][.='"+Y+"']" - , "//result/doc[4]/long[@name='secondary_tl1'][.='0']" - , "//result/doc[5]/long[@name='primary_tl1'][.='"+Y+"']" - , "//result/doc[5]/long[@name='secondary_tl1'][.='1']" - , "//result/doc[6]/long[@name='primary_tl1'][.='"+Y+"']" - , "//result/doc[6]/long[@name='secondary_tl1'][.='2']" - // - , "//result/doc[7]/long[@name='primary_tl1'][.='"+Z+"']" - , "//result/doc[7]/long[@name='secondary_tl1'][.='0']" - , "//result/doc[8]/long[@name='primary_tl1'][.='"+Z+"']" - , "//result/doc[8]/long[@name='secondary_tl1'][.='1']" - , "//result/doc[9]/long[@name='primary_tl1'][.='"+Z+"']" - , "//result/doc[9]/long[@name='secondary_tl1'][.='2']" - ); + assertQ( + req("q", "*:*", "sort", primarySort + " asc, secondary_tl1 asc"), + "//*[@numFound='9']" + // + , + "//result/doc[1]/long[@name='primary_tl1'][.='" + X + "']", + "//result/doc[1]/long[@name='secondary_tl1'][.='0']", + "//result/doc[2]/long[@name='primary_tl1'][.='" + X + "']", + "//result/doc[2]/long[@name='secondary_tl1'][.='1']", + "//result/doc[3]/long[@name='primary_tl1'][.='" + X + "']", + "//result/doc[3]/long[@name='secondary_tl1'][.='2']" + // + , + "//result/doc[4]/long[@name='primary_tl1'][.='" + Y + "']", + "//result/doc[4]/long[@name='secondary_tl1'][.='0']", + "//result/doc[5]/long[@name='primary_tl1'][.='" + Y + "']", + "//result/doc[5]/long[@name='secondary_tl1'][.='1']", + "//result/doc[6]/long[@name='primary_tl1'][.='" + Y + "']", + "//result/doc[6]/long[@name='secondary_tl1'][.='2']" + // + , + "//result/doc[7]/long[@name='primary_tl1'][.='" + Z + "']", + "//result/doc[7]/long[@name='secondary_tl1'][.='0']", + "//result/doc[8]/long[@name='primary_tl1'][.='" + Z + "']", + "//result/doc[8]/long[@name='secondary_tl1'][.='1']", + "//result/doc[9]/long[@name='primary_tl1'][.='" + Z + "']", + "//result/doc[9]/long[@name='secondary_tl1'][.='2']"); } } - } diff --git a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java index b38de13f216..24950f319b5 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java @@ -24,7 +24,6 @@ import java.util.Arrays; import java.util.List; import java.util.Random; - import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.solr.SolrTestCaseJ4; @@ -34,16 +33,15 @@ import org.junit.Test; /** - * Tests some basic functionality of Solr while demonstrating good - * Best Practices for using SolrTestCaseJ4 + * Tests some basic functionality of Solr while demonstrating good Best Practices for using + * SolrTestCaseJ4 */ public class TestFunctionQuery extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-functionquery.xml","schema11.xml"); + initCore("solrconfig-functionquery.xml", "schema11.xml"); } - String base = "external_foo_extf"; static long start = System.nanoTime(); @@ -52,33 +50,31 @@ void makeExternalFile(String field, String contents) { String dir = h.getCore().getDataDir(); String filename = dir + "/external_" + field + "." + (start++); - try (Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8)) { + try (Writer out = + new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8)) { out.write(contents); } catch (Exception e) { throw new RuntimeException(e); } } - void createIndex(String field, int... values) { // lrf.args.put("version","2.0"); for (int val : values) { String s = Integer.toString(val); - if (field!=null) assertU(adoc("id", s, field, s)); + if (field != null) assertU(adoc("id", s, field, s)); else assertU(adoc("id", s)); if (random().nextInt(100) < 20) { - if (field!=null) assertU(adoc("id", s, field, s)); + if (field != null) assertU(adoc("id", s, field, s)); else assertU(adoc("id", s)); } if (random().nextInt(100) < 20) { assertU(commit()); - } - // System.out.println("added doc for " + val); } // assertU(optimize()); // squeeze out any possible deleted docs @@ -101,38 +97,41 @@ public String func(String field, String template) { return sb.toString(); } - protected void singleTest(String field, String funcTemplate, List args, float... results) { + protected void singleTest( + String field, String funcTemplate, List args, float... results) { // NOTE: we're abusing the "results" float[] here ... // - even elements are ids which must be valid 'ints' // - odd elements are the expected score values String parseableQuery = func(field, funcTemplate); - List nargs = new ArrayList<>(Arrays.asList("q", parseableQuery - ,"fl", "*,score" - ,"indent","on" - ,"rows","100")); + List nargs = + new ArrayList<>( + Arrays.asList("q", parseableQuery, "fl", "*,score", "indent", "on", "rows", "100")); if (args != null) { for (String arg : args) { - nargs.add(arg.replace("\0",field)); + nargs.add(arg.replace("\0", field)); } } List tests = new ArrayList<>(); - for (int i=0; i h.query(req("q", "{!func}ord(" + field + ")", "fq", "id:1"))); - assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); + Exception e = + expectThrows( + SolrException.class, + () -> h.query(req("q", "{!func}ord(" + field + ")", "fq", "id:1"))); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException) e).code()); assertTrue(e.getMessage().contains("ord() is not supported over Points based field " + field)); - e = expectThrows(SolrException.class, () -> h.query(req("q", "{!func}rord(" + field + ")", "fq", "id:1"))); - assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); + e = + expectThrows( + SolrException.class, + () -> h.query(req("q", "{!func}rord(" + field + ")", "fq", "id:1"))); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException) e).code()); assertTrue(e.getMessage().contains("rord() is not supported over Points based field " + field)); } @@ -337,145 +341,270 @@ public void testOrdAndRordOverPointsField() throws Exception { public void testGeneral() throws Exception { clearIndex(); - assertU(adoc("id","1", "a_tdt","2009-08-31T12:10:10.123Z", "b_tdt","2009-08-31T12:10:10.124Z")); - assertU(adoc("id","2", "a_t","how now brown cow")); + assertU( + adoc("id", "1", "a_tdt", "2009-08-31T12:10:10.123Z", "b_tdt", "2009-08-31T12:10:10.124Z")); + assertU(adoc("id", "2", "a_t", "how now brown cow")); assertU(commit()); // create more than one segment - assertU(adoc("id","3", "a_t","brown cow")); - assertU(adoc("id","4")); + assertU(adoc("id", "3", "a_t", "brown cow")); + assertU(adoc("id", "4")); assertU(commit()); // create more than one segment - assertU(adoc("id","5")); - assertU(adoc("id","6", "a_t","cow cow cow cow cow")); + assertU(adoc("id", "5")); + assertU(adoc("id", "6", "a_t", "cow cow cow cow cow")); assertU(commit()); // test relevancy functions - assertQ(req("fl","*,score","q", "{!func}numdocs()", "fq","id:6"), "//float[@name='score']='6.0'"); - assertQ(req("fl","*,score","q", "{!func}maxdoc()", "fq","id:6"), "//float[@name='score']='6.0'"); - assertQ(req("fl","*,score","q", "{!func}docfreq(a_t,cow)", "fq","id:6"), "//float[@name='score']='3.0'"); - assertQ(req("fl","*,score","q", "{!func}docfreq('a_t','cow')", "fq","id:6"), "//float[@name='score']='3.0'"); - assertQ(req("fl","*,score","q", "{!func}docfreq($field,$value)", "fq","id:6", "field","a_t", "value","cow"), "//float[@name='score']='3.0'"); - assertQ(req("fl","*,score","q", "{!func}termfreq(a_t,cow)", "fq","id:6"), "//float[@name='score']='5.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}numdocs()", "fq", "id:6"), + "//float[@name='score']='6.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}maxdoc()", "fq", "id:6"), "//float[@name='score']='6.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}docfreq(a_t,cow)", "fq", "id:6"), + "//float[@name='score']='3.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}docfreq('a_t','cow')", "fq", "id:6"), + "//float[@name='score']='3.0'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}docfreq($field,$value)", + "fq", + "id:6", + "field", + "a_t", + "value", + "cow"), + "//float[@name='score']='3.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}termfreq(a_t,cow)", "fq", "id:6"), + "//float[@name='score']='5.0'"); // make sure it doesn't get a NPE if no terms are present in a field. - assertQ(req("fl","*,score","q", "{!func}termfreq(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='0.0'"); - assertQ(req("fl","*,score","q", "{!func}docfreq(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}termfreq(nofield_t,cow)", "fq", "id:6"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}docfreq(nofield_t,cow)", "fq", "id:6"), + "//float[@name='score']='0.0'"); // test that ord and rord are working on a global index basis, not just // at the segment level (since Lucene 2.9 has switched to per-segment searching) - assertQ(req("fl","*,score","q", "{!func}ord(id)", "fq","id:6"), "//float[@name='score']='5.0'"); - assertQ(req("fl","*,score","q", "{!func}top(ord(id))", "fq","id:6"), "//float[@name='score']='5.0'"); - assertQ(req("fl","*,score","q", "{!func}rord(id)", "fq","id:1"),"//float[@name='score']='5.0'"); - assertQ(req("fl","*,score","q", "{!func}top(rord(id))", "fq","id:1"),"//float[@name='score']='5.0'"); - + assertQ( + req("fl", "*,score", "q", "{!func}ord(id)", "fq", "id:6"), "//float[@name='score']='5.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}top(ord(id))", "fq", "id:6"), + "//float[@name='score']='5.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}rord(id)", "fq", "id:1"), "//float[@name='score']='5.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}top(rord(id))", "fq", "id:1"), + "//float[@name='score']='5.0'"); // test that we can subtract dates to millisecond precision - assertQ(req("fl","*,score","q", "{!func}ms(a_tdt,b_tdt)", "fq","id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl","*,score","q", "{!func}ms(b_tdt,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); - assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z,2009-08-31T12:10:10.124Z)", "fq","id:1"), "//float[@name='score']='1.0'"); - assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.124Z,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); - assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z,b_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); - - assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z/SECOND,2009-08-31T12:10:10.124Z/SECOND)", "fq","id:1"), "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}ms(a_tdt,b_tdt)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}ms(b_tdt,a_tdt)", "fq", "id:1"), + "//float[@name='score']='1.0'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}ms(2009-08-31T12:10:10.125Z,2009-08-31T12:10:10.124Z)", + "fq", + "id:1"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}ms(2009-08-31T12:10:10.124Z,a_tdt)", "fq", "id:1"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}ms(2009-08-31T12:10:10.125Z,b_tdt)", "fq", "id:1"), + "//float[@name='score']='1.0'"); + + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}ms(2009-08-31T12:10:10.125Z/SECOND,2009-08-31T12:10:10.124Z/SECOND)", + "fq", + "id:1"), + "//float[@name='score']='0.0'"); // test that we can specify "NOW" - assertQ(req("fl","*,score","q", "{!func}ms(NOW)", "NOW","1000"), "//float[@name='score']='1000.0'"); - + assertQ( + req("fl", "*,score", "q", "{!func}ms(NOW)", "NOW", "1000"), + "//float[@name='score']='1000.0'"); - for (int i=100; i<112; i++) { - assertU(adoc("id",""+i, "text","batman")); + for (int i = 100; i < 112; i++) { + assertU(adoc("id", "" + i, "text", "batman")); } assertU(commit()); - assertU(adoc("id","120", "text","batman superman")); // in a smaller segment - assertU(adoc("id","121", "text","superman junkterm")); + assertU(adoc("id", "120", "text", "batman superman")); // in a smaller segment + assertU(adoc("id", "121", "text", "superman junkterm")); assertU(commit()); // superman has a higher df (thus lower idf) in one segment, but reversed in the complete index - String q ="{!func}query($qq)"; - String fq="id:120"; - assertQ(req("fl","*,score","q", q, "qq","text:batman", "fq",fq), "//float[@name='score']<'0.6'"); - assertQ(req("fl","*,score","q", q, "qq","text:superman", "fq",fq), "//float[@name='score']>'0.6'"); + String q = "{!func}query($qq)"; + String fq = "id:120"; + assertQ( + req("fl", "*,score", "q", q, "qq", "text:batman", "fq", fq), + "//float[@name='score']<'0.6'"); + assertQ( + req("fl", "*,score", "q", q, "qq", "text:superman", "fq", fq), + "//float[@name='score']>'0.6'"); // test weighting through a function range query - assertQ(req("fl","*,score", "fq",fq, "q", "{!frange l=0.6 u=10}query($qq)", "qq","text:superman"), "//*[@numFound='1']"); + assertQ( + req( + "fl", + "*,score", + "fq", + fq, + "q", + "{!frange l=0.6 u=10}query($qq)", + "qq", + "text:superman"), + "//*[@numFound='1']"); // test weighting through a complex function - q ="{!func}sub(div(sum(0.0,product(1,query($qq))),1),0)"; - assertQ(req("fl","*,score","q", q, "qq","text:batman", "fq",fq), "//float[@name='score']<'0.6'"); - assertQ(req("fl","*,score","q", q, "qq","text:superman", "fq",fq), "//float[@name='score']>'0.6'"); - + q = "{!func}sub(div(sum(0.0,product(1,query($qq))),1),0)"; + assertQ( + req("fl", "*,score", "q", q, "qq", "text:batman", "fq", fq), + "//float[@name='score']<'0.6'"); + assertQ( + req("fl", "*,score", "q", q, "qq", "text:superman", "fq", fq), + "//float[@name='score']>'0.6'"); // test full param dereferencing - assertQ(req("fl","*,score","q", "{!func}add($v1,$v2)", "v1","add($v3,$v4)", "v2","1", "v3","2", "v4","5" - , "fq","id:1"), "//float[@name='score']='8.0'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}add($v1,$v2)", + "v1", + "add($v3,$v4)", + "v2", + "1", + "v3", + "2", + "v4", + "5", + "fq", + "id:1"), + "//float[@name='score']='8.0'"); // test ability to parse multiple values - assertQ(req("fl","*,score","q", "{!func}dist(2,vector(1,1),$pt)", "pt","3,1" - , "fq","id:1"), "//float[@name='score']='2.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(2,vector(1,1),$pt)", "pt", "3,1", "fq", "id:1"), + "//float[@name='score']='2.0'"); // test that extra stuff after a function causes an error try { - assertQ(req("fl","*,score","q", "{!func}10 wow dude ignore_exception")); + assertQ(req("fl", "*,score", "q", "{!func}10 wow dude ignore_exception")); fail(); } catch (Exception e) { // OK } - // test that sorting by function query weights correctly. superman should sort higher than batman due to idf of the whole index - - assertQ(req("q", "*:*", "fq","id:120 OR id:121", "sort","{!func v=$sortfunc} desc", "sortfunc","query($qq)", "qq","text:(batman OR superman)") - ,"*//doc[1]/str[.='120']" - ,"*//doc[2]/str[.='121']" - ); + // test that sorting by function query weights correctly. superman should sort higher than + // batman due to idf of the whole index + + assertQ( + req( + "q", + "*:*", + "fq", + "id:120 OR id:121", + "sort", + "{!func v=$sortfunc} desc", + "sortfunc", + "query($qq)", + "qq", + "text:(batman OR superman)"), + "*//doc[1]/str[.='120']", + "*//doc[2]/str[.='121']"); // test a query that doesn't specify nested query val - assertQEx("Should fail because of missing qq", + assertQEx( + "Should fail because of missing qq", "Missing param qq while parsing function 'query($qq)'", - req("q", "*:*", "fq","id:120 OR id:121", "defType","edismax", "boost","query($qq)"), - SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("Should fail because of missing sortfunc in sort", + req("q", "*:*", "fq", "id:120 OR id:121", "defType", "edismax", "boost", "query($qq)"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should fail because of missing sortfunc in sort", "Can't determine a Sort Order (asc or desc) in sort spec '{!func v=$sortfunc} desc'", - req("q", "*:*", "fq","id:120 OR id:121", "sort","{!func v=$sortfunc} desc", "sortfunc","query($qq)"), - SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("Should fail because of missing qq in boost", + req( + "q", + "*:*", + "fq", + "id:120 OR id:121", + "sort", + "{!func v=$sortfunc} desc", + "sortfunc", + "query($qq)"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should fail because of missing qq in boost", "Nested local params must have value in v parameter. got 'query({!dismax v=$qq})", - req("q", "*:*", "fq","id:120 OR id:121", "defType","edismax", "boost","query({!dismax v=$qq})"), - SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("Should fail as empty value is specified for v", + req( + "q", + "*:*", + "fq", + "id:120 OR id:121", + "defType", + "edismax", + "boost", + "query({!dismax v=$qq})"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should fail as empty value is specified for v", "Nested function query returned null for 'query({!v=})'", - req("q", "*:*", "defType","edismax", "boost","query({!v=})"), SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("Should fail as v's value contains only spaces", + req("q", "*:*", "defType", "edismax", "boost", "query({!v=})"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should fail as v's value contains only spaces", "Nested function query returned null for 'query({!v= })'", - req("q", "*:*", "defType","edismax", "boost","query({!v= })"), SolrException.ErrorCode.BAD_REQUEST - ); + req("q", "*:*", "defType", "edismax", "boost", "query({!v= })"), + SolrException.ErrorCode.BAD_REQUEST); // no field specified in ord() - assertQEx("Should fail as no field is specified in ord func", + assertQEx( + "Should fail as no field is specified in ord func", "Expected identifier instead of 'null' for function 'ord()'", - req("q", "*:*", "defType","edismax","boost","ord()"), SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("Should fail as no field is specified in rord func", + req("q", "*:*", "defType", "edismax", "boost", "ord()"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should fail as no field is specified in rord func", "Expected identifier instead of 'null' for function 'rord()'", - req("q", "*:*", "defType","edismax","boost","rord()"), SolrException.ErrorCode.BAD_REQUEST - ); + req("q", "*:*", "defType", "edismax", "boost", "rord()"), + SolrException.ErrorCode.BAD_REQUEST); // test parseFloat - assertQEx("Should fail as less args are specified for recip func", + assertQEx( + "Should fail as less args are specified for recip func", "Expected float instead of 'null' for function 'recip(1,2)'", - req("q", "*:*","defType","edismax", "boost","recip(1,2)"), SolrException.ErrorCode.BAD_REQUEST - ); - assertQEx("Should fail as invalid value is specified for recip func", + req("q", "*:*", "defType", "edismax", "boost", "recip(1,2)"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should fail as invalid value is specified for recip func", "Expected float instead of 'f' for function 'recip(1,2,3,f)'", - req("q", "*:*","defType","edismax", "boost","recip(1,2,3,f)"), SolrException.ErrorCode.BAD_REQUEST - ); + req("q", "*:*", "defType", "edismax", "boost", "recip(1,2,3,f)"), + SolrException.ErrorCode.BAD_REQUEST); // this should pass - assertQ(req("q", "*:*","defType","edismax", "boost","recip(1, 2, 3, 4)")); + assertQ(req("q", "*:*", "defType", "edismax", "boost", "recip(1, 2, 3, 4)")); // for undefined field NPE shouldn't be thrown - assertQEx("Should Fail as the field is undefined", "undefined field a", - req("q", "*:*", "fl", "x:payload(a,b)"), SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "Should Fail as the field is undefined", + "undefined field a", + req("q", "*:*", "fl", "x:payload(a,b)"), + SolrException.ErrorCode.BAD_REQUEST); } @Test @@ -486,90 +615,137 @@ public void testTFIDFFunctions() { { Similarity sim = h.getCore().getLatestSchema().getFieldType("a_tfidf").getSimilarity(); assertNotNull("Test needs *_tfidf to use a TFIDFSimilarity ... who broke the config?", sim); - assertTrue("Test needs *_tfidf to use a TFIDFSimilarity ... who broke the config: " + sim.getClass(), - sim instanceof TFIDFSimilarity); + assertTrue( + "Test needs *_tfidf to use a TFIDFSimilarity ... who broke the config: " + sim.getClass(), + sim instanceof TFIDFSimilarity); similarity = (TFIDFSimilarity) sim; } - assertU(adoc("id","1", "a_tdt","2009-08-31T12:10:10.123Z", "b_tdt","2009-08-31T12:10:10.124Z")); - assertU(adoc("id","2", "a_tfidf","how now brown cow")); + assertU( + adoc("id", "1", "a_tdt", "2009-08-31T12:10:10.123Z", "b_tdt", "2009-08-31T12:10:10.124Z")); + assertU(adoc("id", "2", "a_tfidf", "how now brown cow")); assertU(commit()); // create more than one segment - assertU(adoc("id","3", "a_tfidf","brown cow")); - assertU(adoc("id","4")); + assertU(adoc("id", "3", "a_tfidf", "brown cow")); + assertU(adoc("id", "4")); assertU(commit()); // create more than one segment - assertU(adoc("id","5")); - assertU(adoc("id","6", "a_tfidf","cow cow cow cow cow")); + assertU(adoc("id", "5")); + assertU(adoc("id", "6", "a_tfidf", "cow cow cow cow cow")); assertU(commit()); // make sure it doesn't get a NPE if no terms are present in a field. - assertQ(req("fl","*,score","q", "{!func}idf(nofield_tfidf,cow)", "fq","id:6"), - "//float[@name='score']='" + similarity.idf(0,6) + "'"); - assertQ(req("fl","*,score","q", "{!func}tf(nofield_tfidf,cow)", "fq","id:6"), - "//float[@name='score']='" + similarity.tf(0) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}idf(nofield_tfidf,cow)", "fq", "id:6"), + "//float[@name='score']='" + similarity.idf(0, 6) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}tf(nofield_tfidf,cow)", "fq", "id:6"), + "//float[@name='score']='" + similarity.tf(0) + "'"); // fields with real values - assertQ(req("fl","*,score","q", "{!func}idf(a_tfidf,cow)", "fq","id:6"), - "//float[@name='score']='" + similarity.idf(3,6) + "'"); - assertQ(req("fl","*,score","q", "{!func}tf(a_tfidf,cow)", "fq","id:6"), - "//float[@name='score']='" + similarity.tf(5) + "'"); - - assertQ(req("fl","*,score","q", "{!func}norm(a_tfidf)", "fq","id:2"), - "//float[@name='score']='0.5'"); // 1/sqrt(4)==1/2==0.5 - + assertQ( + req("fl", "*,score", "q", "{!func}idf(a_tfidf,cow)", "fq", "id:6"), + "//float[@name='score']='" + similarity.idf(3, 6) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}tf(a_tfidf,cow)", "fq", "id:6"), + "//float[@name='score']='" + similarity.tf(5) + "'"); + + assertQ( + req("fl", "*,score", "q", "{!func}norm(a_tfidf)", "fq", "id:2"), + "//float[@name='score']='0.5'"); // 1/sqrt(4)==1/2==0.5 } - /** - * test collection-level term stats (new in 4.x indexes) - */ + /** test collection-level term stats (new in 4.x indexes) */ @Test public void testTotalTermFreq() throws Exception { clearIndex(); - assertU(adoc("id","1", "a_tdt","2009-08-31T12:10:10.123Z", "b_tdt","2009-08-31T12:10:10.124Z")); - assertU(adoc("id","2", "a_t","how now brown cow")); + assertU( + adoc("id", "1", "a_tdt", "2009-08-31T12:10:10.123Z", "b_tdt", "2009-08-31T12:10:10.124Z")); + assertU(adoc("id", "2", "a_t", "how now brown cow")); assertU(commit()); // create more than one segment - assertU(adoc("id","3", "a_t","brown cow")); - assertU(adoc("id","4")); + assertU(adoc("id", "3", "a_t", "brown cow")); + assertU(adoc("id", "4")); assertU(commit()); // create more than one segment - assertU(adoc("id","5")); - assertU(adoc("id","6", "a_t","cow cow cow cow cow")); + assertU(adoc("id", "5")); + assertU(adoc("id", "6", "a_t", "cow cow cow cow cow")); assertU(commit()); - assertQ(req("fl","*,score","q", "{!func}totaltermfreq('a_t','cow')", "fq","id:6"), "//float[@name='score']='7.0'"); - assertQ(req("fl","*,score","q", "{!func}ttf(a_t,'cow')", "fq","id:6"), "//float[@name='score']='7.0'"); - assertQ(req("fl","*,score","q", "{!func}sumtotaltermfreq('a_t')", "fq","id:6"), "//float[@name='score']='11.0'"); - assertQ(req("fl","*,score","q", "{!func}sttf(a_t)", "fq","id:6"), "//float[@name='score']='11.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}totaltermfreq('a_t','cow')", "fq", "id:6"), + "//float[@name='score']='7.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}ttf(a_t,'cow')", "fq", "id:6"), + "//float[@name='score']='7.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sumtotaltermfreq('a_t')", "fq", "id:6"), + "//float[@name='score']='11.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sttf(a_t)", "fq", "id:6"), + "//float[@name='score']='11.0'"); } @Test public void testPayloadFunction() { clearIndex(); - assertU(adoc("id","1", "vals_dpf","A|1.0 B|2.0 C|3.0 mult|50 mult|100 x|22 x|37 x|19", "default_f", "42.0")); + assertU( + adoc( + "id", + "1", + "vals_dpf", + "A|1.0 B|2.0 C|3.0 mult|50 mult|100 x|22 x|37 x|19", + "default_f", + "42.0")); assertU(commit()); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,A)"), "//float[@name='score']='1.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,B)"), "//float[@name='score']='2.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,C,0)"), "//float[@name='score']='3.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,A)"), "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,B)"), "//float[@name='score']='2.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,C,0)"), "//float[@name='score']='3.0'"); // Test defaults, constant, field, and function value sources - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,D,37.0)"), "//float[@name='score']='37.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,E,default_f)"), "//float[@name='score']='42.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,E,mul(2,default_f))"), "//float[@name='score']='84.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,D,37.0)"), + "//float[@name='score']='37.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,E,default_f)"), + "//float[@name='score']='42.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,E,mul(2,default_f))"), + "//float[@name='score']='84.0'"); // Test PayloadFunction's for multiple terms, average being the default - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,mult,0.0,min)"), "//float[@name='score']='50.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,mult,0.0,max)"), "//float[@name='score']='100.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,mult,0.0,average)"), "//float[@name='score']='75.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,mult)"), "//float[@name='score']='75.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,mult,0.0,min)"), + "//float[@name='score']='50.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,mult,0.0,max)"), + "//float[@name='score']='100.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,mult,0.0,average)"), + "//float[@name='score']='75.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,mult)"), + "//float[@name='score']='75.0'"); // Test special "first" function, by checking the other functions with same term too - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,x,0.0,min)"), "//float[@name='score']='19.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,x,0.0,max)"), "//float[@name='score']='37.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,x,0.0,average)"), "//float[@name='score']='26.0'"); - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,x,0.0,first)"), "//float[@name='score']='22.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,x,0.0,min)"), + "//float[@name='score']='19.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,x,0.0,max)"), + "//float[@name='score']='37.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,x,0.0,average)"), + "//float[@name='score']='26.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,x,0.0,first)"), + "//float[@name='score']='22.0'"); // Test with debug - assertQ(req("fl","*,score","q", "{!func}payload(vals_dpf,A)", CommonParams.DEBUG, "true"), "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}payload(vals_dpf,A)", CommonParams.DEBUG, "true"), + "//float[@name='score']='1.0'"); } @Test @@ -578,23 +754,33 @@ public void testRetrievePayloads() throws Exception { int numDocs = 100 + random().nextInt(100); int numLocations = 1000 + random().nextInt(2000); - for (int docNum = 0 ; docNum < numDocs ; ++docNum) { + for (int docNum = 0; docNum < numDocs; ++docNum) { StringBuilder amountsBuilder = new StringBuilder(); - for (int location = 1 ; location <= numLocations ; ++location) { + for (int location = 1; location <= numLocations; ++location) { String amount = "" + location + '.' + random().nextInt(100); amountsBuilder.append(location).append('|').append(amount).append(' '); } - assertU(adoc("id","" + docNum, - "default_amount_f", "" + (10000 + random().nextInt(10000)) + ".0", - "amounts_dpf", amountsBuilder.toString())); + assertU( + adoc( + "id", "" + docNum, + "default_amount_f", "" + (10000 + random().nextInt(10000)) + ".0", + "amounts_dpf", amountsBuilder.toString())); } assertU(commit()); - assertJQ(req("q","*:*", - "fl","id,location:$locationId,amount:$amount", - "sort","$amount asc", - "amount","payload(amounts_dpf,$locationId,default_amount_f)", - "locationId",""+(1+random().nextInt(numLocations)), - "wt","json"), + assertJQ( + req( + "q", + "*:*", + "fl", + "id,location:$locationId,amount:$amount", + "sort", + "$amount asc", + "amount", + "payload(amounts_dpf,$locationId,default_amount_f)", + "locationId", + "" + (1 + random().nextInt(numLocations)), + "wt", + "json"), "/response/numFound==" + numDocs); } @@ -602,92 +788,111 @@ public void testRetrievePayloads() throws Exception { public void testSortByFunc() throws Exception { clearIndex(); - assertU(adoc("id", "1", "const_s", "xx", - "x_i", "100", "1_s", "a", - "x:x_i", "100", "1-1_s", "a")); - assertU(adoc("id", "2", "const_s", "xx", - "x_i", "300", "1_s", "c", - "x:x_i", "300", "1-1_s", "c")); - assertU(adoc("id", "3", "const_s", "xx", - "x_i", "200", "1_s", "b", - "x:x_i", "200", "1-1_s", "b")); + assertU( + adoc("id", "1", "const_s", "xx", "x_i", "100", "1_s", "a", "x:x_i", "100", "1-1_s", "a")); + assertU( + adoc("id", "2", "const_s", "xx", "x_i", "300", "1_s", "c", "x:x_i", "300", "1-1_s", "c")); + assertU( + adoc("id", "3", "const_s", "xx", "x_i", "200", "1_s", "b", "x:x_i", "200", "1-1_s", "b")); assertU(commit()); String desc = "/response/docs==[{'x_i':300},{'x_i':200},{'x_i':100}]"; - String asc = "/response/docs==[{'x_i':100},{'x_i':200},{'x_i':300}]"; + String asc = "/response/docs==[{'x_i':100},{'x_i':200},{'x_i':300}]"; - String threeonetwo = "/response/docs==[{'x_i':200},{'x_i':100},{'x_i':300}]"; + String threeonetwo = "/response/docs==[{'x_i':200},{'x_i':100},{'x_i':300}]"; String q = "id_i:[1 TO 3]"; - assertJQ(req("q",q, "fl","x_i", "sort","add(x_i,x_i) desc") - ,desc - ); + assertJQ(req("q", q, "fl", "x_i", "sort", "add(x_i,x_i) desc"), desc); // param sub of entire function - assertJQ(req("q",q, "fl","x_i", "sort", "const_s asc, $x asc", "x","add(x_i,x_i)") - ,asc - ); + assertJQ(req("q", q, "fl", "x_i", "sort", "const_s asc, $x asc", "x", "add(x_i,x_i)"), asc); // multiple functions - assertJQ(req("q",q, "fl","x_i", "sort", "$x asc, const_s asc, $y desc", "x", "5", "y","add(x_i,x_i)") - ,desc - ); + assertJQ( + req( + "q", + q, + "fl", + "x_i", + "sort", + "$x asc, const_s asc, $y desc", + "x", + "5", + "y", + "add(x_i,x_i)"), + desc); // multiple functions inline - assertJQ(req("q",q, "fl","x_i", "sort", "add( 10 , 10 ) asc, const_s asc, add(x_i , $const) desc", "const","50") - ,desc - ); + assertJQ( + req( + "q", + q, + "fl", + "x_i", + "sort", + "add( 10 , 10 ) asc, const_s asc, add(x_i , $const) desc", + "const", + "50"), + desc); // test function w/ local params + func inline - assertJQ(req("q",q, "fl","x_i", - "sort", "const_s asc, {!key=foo}add(x_i,x_i) desc") - ,desc - ); - assertJQ(req("q",q, "fl","x_i", - "sort", "{!key=foo}add(x_i,x_i) desc, const_s asc") - ,desc - ); + assertJQ(req("q", q, "fl", "x_i", "sort", "const_s asc, {!key=foo}add(x_i,x_i) desc"), desc); + assertJQ(req("q", q, "fl", "x_i", "sort", "{!key=foo}add(x_i,x_i) desc, const_s asc"), desc); // test multiple functions w/ local params + func inline - assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar}add(10,20) asc, const_s asc, {!key=foo}add(x_i,x_i) desc") - ,desc - ); + assertJQ( + req( + "q", + q, + "fl", + "x_i", + "sort", + "{!key=bar}add(10,20) asc, const_s asc, {!key=foo}add(x_i,x_i) desc"), + desc); // test multiple functions w/ local param value not inlined - assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar v=$s1} asc, {!key=foo v=$s2} desc", "s1","add(3,4)", "s2","add(x_i,5)") - ,desc - ); + assertJQ( + req( + "q", + q, + "fl", + "x_i", + "sort", + "{!key=bar v=$s1} asc, {!key=foo v=$s2} desc", + "s1", + "add(3,4)", + "s2", + "add(x_i,5)"), + desc); // no space between inlined localparams and sort order - assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar v=$s1}asc,const_s asc,{!key=foo v=$s2}desc", "s1","add(3,4)", "s2","add(x_i,5)") - ,desc - ); + assertJQ( + req( + "q", + q, + "fl", + "x_i", + "sort", + "{!key=bar v=$s1}asc,const_s asc,{!key=foo v=$s2}desc", + "s1", + "add(3,4)", + "s2", + "add(x_i,5)"), + desc); // field name that isn't a legal java Identifier // and starts with a number to trick function parser - assertJQ(req("q",q, "fl","x_i", "sort", "1_s asc") - ,asc - ); - assertJQ(req("q",q, "fl","x_i", "sort", "x:x_i desc") - ,desc - ); - assertJQ(req("q",q, "fl","x_i", "sort", "1-1_s asc") - ,asc - ); + assertJQ(req("q", q, "fl", "x_i", "sort", "1_s asc"), asc); + assertJQ(req("q", q, "fl", "x_i", "sort", "x:x_i desc"), desc); + assertJQ(req("q", q, "fl", "x_i", "sort", "1-1_s asc"), asc); // really ugly field name that isn't a java Id, and can't be // parsed as a func, but sorted fine in Solr 1.4 - assertJQ(req("q",q, "fl","x_i", - "sort", "[]_s asc, {!key=foo}add(x_i,x_i) desc") - ,desc - ); + assertJQ(req("q", q, "fl", "x_i", "sort", "[]_s asc, {!key=foo}add(x_i,x_i) desc"), desc); // use localparms to sort by a lucene query, then a function - assertJQ(req("q",q, "fl","x_i", - "sort", "{!lucene v='id:3'}desc, {!key=foo}add(x_i,x_i) asc") - ,threeonetwo - ); - - + assertJQ( + req("q", q, "fl", "x_i", "sort", "{!lucene v='id:3'}desc, {!key=foo}add(x_i,x_i) asc"), + threeonetwo); } @Test @@ -698,15 +903,26 @@ public void testDegreeRads() throws Exception { assertU(adoc("id", "2", "x_td", "90", "y_td", String.valueOf(Math.PI / 2))); assertU(adoc("id", "3", "x_td", "45", "y_td", String.valueOf(Math.PI / 4))); - assertU(commit()); - assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:2"), "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); - assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:3"), "//float[@name='score']='" + (float) (Math.PI / 4) + "'"); - - assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:2"), "//float[@name='score']='90.0'"); - assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:3"), "//float[@name='score']='45.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:2"), + "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:3"), + "//float[@name='score']='" + (float) (Math.PI / 4) + "'"); + + assertQ( + req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:2"), + "//float[@name='score']='90.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:3"), + "//float[@name='score']='45.0'"); } @Test @@ -715,40 +931,51 @@ public void testStrDistance() throws Exception { assertU(adoc("id", "1", "x_s", "foil")); assertU(commit()); - assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', edit)", "fq", "id:1"), "//float[@name='score']='0.75'"); - assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', jw)", "fq", "id:1"), "//float[@name='score']='0.8833333'"); - assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', ngram, 2)", "fq", "id:1"), "//float[@name='score']='0.875'"); + assertQ( + req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', edit)", "fq", "id:1"), + "//float[@name='score']='0.75'"); + assertQ( + req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', jw)", "fq", "id:1"), + "//float[@name='score']='0.8833333'"); + assertQ( + req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', ngram, 2)", "fq", "id:1"), + "//float[@name='score']='0.875'"); // strdist on a missing valuesource should itself by missing, so the ValueSourceAugmenter // should supress it... - assertQ(req("q", "id:1", - "fl", "good:strdist(x_s, 'toil', edit)", - "fl", "bad1:strdist(missing1_s, missing2_s, edit)", - "fl", "bad2:strdist(missing1_s, 'something', edit)", - "fl", "bad3:strdist(missing1_s, x_s, edit)") - , "//float[@name='good']='0.75'" - , "count(//float[starts-with(@name,'bad')])=0" - ); + assertQ( + req( + "q", "id:1", + "fl", "good:strdist(x_s, 'toil', edit)", + "fl", "bad1:strdist(missing1_s, missing2_s, edit)", + "fl", "bad2:strdist(missing1_s, 'something', edit)", + "fl", "bad3:strdist(missing1_s, x_s, edit)"), + "//float[@name='good']='0.75'", + "count(//float[starts-with(@name,'bad')])=0"); // in a query context, there is always a number... // // if a ValueSource is missing, it is maximally distant from every other // value source *except* for another missing value source // ie: strdist(null,null)==1 but strdist(null,anything)==0 - assertQ(req("fl","score","fq", "id:1", "q", "{!func}strdist(missing1_s, missing2_s, edit)"), - "//float[@name='score']='1.0'"); - assertQ(req("fl","score","fq", "id:1", "q", "{!func}strdist(missing1_s, x_s, edit)"), - "//float[@name='score']='0.0'"); - assertQ(req("fl","score","fq", "id:1", "q", "{!func}strdist(missing1_s, 'const', edit)"), - "//float[@name='score']='0.0'"); + assertQ( + req("fl", "score", "fq", "id:1", "q", "{!func}strdist(missing1_s, missing2_s, edit)"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "score", "fq", "id:1", "q", "{!func}strdist(missing1_s, x_s, edit)"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "score", "fq", "id:1", "q", "{!func}strdist(missing1_s, 'const', edit)"), + "//float[@name='score']='0.0'"); } public void dofunc(String func, double val) throws Exception { // String sval = Double.toString(val); - String sval = Float.toString((float)val); + String sval = Float.toString((float) val); - assertQ(req("fl", "*,score", "defType","func", "fq","id:1", "q",func), - "//float[@name='score']='" + sval + "'"); + assertQ( + req("fl", "*,score", "defType", "func", "fq", "id:1", "q", func), + "//float[@name='score']='" + sval + "'"); } @Test @@ -761,16 +988,18 @@ public void testFuncs() throws Exception { dofunc("1.0", 1.0); dofunc("e()", Math.E); dofunc("pi()", Math.PI); - dofunc("add(2,3)", 2+3); - dofunc("mul(2,3)", 2*3); + dofunc("add(2,3)", 2 + 3); + dofunc("mul(2,3)", 2 * 3); dofunc("rad(45)", Math.toRadians(45)); dofunc("deg(.5)", Math.toDegrees(.5)); dofunc("sqrt(9)", Math.sqrt(9)); dofunc("cbrt(8)", Math.cbrt(8)); - dofunc("max(0,1)", Math.max(0,1)); - dofunc("max(10,3,8,7,5,4)", Math.max(Math.max(Math.max(Math.max(Math.max(10,3),8),7),5),4)); - dofunc("min(0,1)", Math.min(0,1)); - dofunc("min(10,3,8,7,5,4)", Math.min(Math.min(Math.min(Math.min(Math.min(10,3),8),7),5),4)); + dofunc("max(0,1)", Math.max(0, 1)); + dofunc( + "max(10,3,8,7,5,4)", Math.max(Math.max(Math.max(Math.max(Math.max(10, 3), 8), 7), 5), 4)); + dofunc("min(0,1)", Math.min(0, 1)); + dofunc( + "min(10,3,8,7,5,4)", Math.min(Math.min(Math.min(Math.min(Math.min(10, 3), 8), 7), 5), 4)); dofunc("log(100)", Math.log10(100)); dofunc("ln(3)", Math.log(3)); dofunc("exp(1)", Math.exp(1)); @@ -786,14 +1015,14 @@ public void testFuncs() throws Exception { dofunc("ceil(2.3)", Math.ceil(2.3)); dofunc("floor(2.3)", Math.floor(2.3)); dofunc("rint(2.3)", Math.rint(2.3)); - dofunc("pow(2,0.5)", Math.pow(2,0.5)); - dofunc("hypot(3,4)", Math.hypot(3,4)); - dofunc("atan2(.25,.5)", Math.atan2(.25,.5)); + dofunc("pow(2,0.5)", Math.pow(2, 0.5)); + dofunc("hypot(3,4)", Math.hypot(3, 4)); + dofunc("atan2(.25,.5)", Math.atan2(.25, .5)); } /** - * verify that both the field("...") value source parser as well as - * ExternalFileField work with esoteric field names + * verify that both the field("...") value source parser as well as ExternalFileField work with + * esoteric field names */ @Test public void testExternalFieldValueSourceParser() { @@ -802,14 +1031,16 @@ public void testExternalFieldValueSourceParser() { String field = "CoMpleX fieldName _extf"; String fieldAsFunc = "field(\"CoMpleX fieldName _extf\")"; - int[] ids = {100,-4,0,10,25,5,77,23,55,-78,-45,-24,63,78,94,22,34,54321,261,-627}; + int[] ids = { + 100, -4, 0, 10, 25, 5, 77, 23, 55, -78, -45, -24, 63, 78, 94, 22, 34, 54321, 261, -627 + }; - createIndex(null,ids); + createIndex(null, ids); // Unsorted field, largest first makeExternalFile(field, "54321=543210\n0=-999\n25=250"); // test identity (straight field value) - singleTest(fieldAsFunc, "\0", 54321, 543210, 0,0, 25,250, 100, 1); + singleTest(fieldAsFunc, "\0", 54321, 543210, 0, 0, 25, 250, 100, 1); Object orig = FileFloatSource.onlyForTesting; singleTest(fieldAsFunc, "log(\0)"); // make sure the values were cached @@ -825,9 +1056,10 @@ public void testExternalFieldValueSourceParser() { } /** - * some platforms don't allow quote characters in filenames, so - * in addition to testExternalFieldValueSourceParser above, test a field - * name with quotes in it that does NOT use ExternalFileField + * some platforms don't allow quote characters in filenames, so in addition to + * testExternalFieldValueSourceParser above, test a field name with quotes in it that does NOT use + * ExternalFileField + * * @see #testExternalFieldValueSourceParser */ @Test @@ -837,73 +1069,133 @@ public void testFieldValueSourceParser() { String field = "CoMpleX \" fieldName _f"; String fieldAsFunc = "field(\"CoMpleX \\\" fieldName _f\")"; - int[] ids = {100,-4,0,10,25,5,77,1}; + int[] ids = {100, -4, 0, 10, 25, 5, 77, 1}; createIndex(field, ids); // test identity (straight field value) - singleTest(fieldAsFunc, "\0", - 100,100, -4,0, 0,0, 10,10, 25,25, 5,5, 77,77, 1,1); - singleTest(fieldAsFunc, "sqrt(\0)", - 100,10, 25,5, 0,0, 1,1); - singleTest(fieldAsFunc, "log(\0)", 1,0); + singleTest(fieldAsFunc, "\0", 100, 100, -4, 0, 0, 0, 10, 10, 25, 25, 5, 5, 77, 77, 1, 1); + singleTest(fieldAsFunc, "sqrt(\0)", 100, 10, 25, 5, 0, 0, 1, 1); + singleTest(fieldAsFunc, "log(\0)", 1, 0); } @Test public void testBooleanFunctions() throws Exception { clearIndex(); - assertU(adoc("id", "1", "text", "hello", "foo_s","A", "foo_ti", "0", "foo_tl","0", "foo_tf", "0.00001")); - assertU(adoc("id", "2" , "foo_ti","10", "foo_tl","11")); + assertU( + adoc( + "id", "1", "text", "hello", "foo_s", "A", "foo_ti", "0", "foo_tl", "0", "foo_tf", + "0.00001")); + assertU(adoc("id", "2", "foo_ti", "10", "foo_tl", "11")); assertU(commit()); // test weighting of functions - assertJQ(req("q", "id:1", "fl", "a:testfunc(1)") - , "/response/docs/[0]=={'a':1}"); + assertJQ(req("q", "id:1", "fl", "a:testfunc(1)"), "/response/docs/[0]=={'a':1}"); // true and false functions and constants - assertJQ(req("q", "id:1", "fl", "t:true(),f:false(),tt:{!func}true,ff:{!func}false") - , "/response/docs/[0]=={'t':true,'f':false,'tt':true,'ff':false}"); + assertJQ( + req("q", "id:1", "fl", "t:true(),f:false(),tt:{!func}true,ff:{!func}false"), + "/response/docs/[0]=={'t':true,'f':false,'tt':true,'ff':false}"); // test that exists(query) depends on the query matching the document - assertJQ(req("q", "id:1", "fl", "t:exists(query($q1)),f:exists(query($q2))", "q1","text:hello", "q2","text:there") - , "/response/docs/[0]=={'t':true,'f':false}"); + assertJQ( + req( + "q", + "id:1", + "fl", + "t:exists(query($q1)),f:exists(query($q2))", + "q1", + "text:hello", + "q2", + "text:there"), + "/response/docs/[0]=={'t':true,'f':false}"); // test if() - assertJQ(req("q", "id:1", "fl", "a1:if(true,'A','B')", "fl","b1:if(false,'A',testfunc('B'))") - , "/response/docs/[0]=={'a1':'A', 'b1':'B'}"); + assertJQ( + req("q", "id:1", "fl", "a1:if(true,'A','B')", "fl", "b1:if(false,'A',testfunc('B'))"), + "/response/docs/[0]=={'a1':'A', 'b1':'B'}"); // queries with positive scores < 1 should still evaluate to 'true' in boolean context - assertJQ(req("q", "id:1", "nested", "*:*^=0.00001", - "fl", "a1:if(query($nested),'A','B')", "fl","b1:if(not(query($nested)),'A','B')") - , "/response/docs/[0]=={'a1':'A', 'b1':'B'}"); + assertJQ( + req( + "q", + "id:1", + "nested", + "*:*^=0.00001", + "fl", + "a1:if(query($nested),'A','B')", + "fl", + "b1:if(not(query($nested)),'A','B')"), + "/response/docs/[0]=={'a1':'A', 'b1':'B'}"); // test boolean operators - assertJQ(req("q", "id:1", "fl", "t1:and(testfunc(true),true)", "fl","f1:and(true,false)", "fl","f2:and(false,true)", "fl","f3:and(false,false)") - , "/response/docs/[0]=={'t1':true, 'f1':false, 'f2':false, 'f3':false}"); - assertJQ(req("q", "id:1", "fl", "t1:or(testfunc(true),true)", "fl","t2:or(true,false)", "fl","t3:or(false,true)", "fl","f1:or(false,false)") - , "/response/docs/[0]=={'t1':true, 't2':true, 't3':true, 'f1':false}"); - assertJQ(req("q", "id:1", "fl", "f1:xor(testfunc(true),true)", "fl","t1:xor(true,false)", "fl","t2:xor(false,true)", "fl","f2:xor(false,false)") - , "/response/docs/[0]=={'t1':true, 't2':true, 'f1':false, 'f2':false}"); - assertJQ(req("q", "id:1", "fl", "t:not(testfunc(false)),f:not(true)") - , "/response/docs/[0]=={'t':true, 'f':false}"); + assertJQ( + req( + "q", + "id:1", + "fl", + "t1:and(testfunc(true),true)", + "fl", + "f1:and(true,false)", + "fl", + "f2:and(false,true)", + "fl", + "f3:and(false,false)"), + "/response/docs/[0]=={'t1':true, 'f1':false, 'f2':false, 'f3':false}"); + assertJQ( + req( + "q", + "id:1", + "fl", + "t1:or(testfunc(true),true)", + "fl", + "t2:or(true,false)", + "fl", + "t3:or(false,true)", + "fl", + "f1:or(false,false)"), + "/response/docs/[0]=={'t1':true, 't2':true, 't3':true, 'f1':false}"); + assertJQ( + req( + "q", + "id:1", + "fl", + "f1:xor(testfunc(true),true)", + "fl", + "t1:xor(true,false)", + "fl", + "t2:xor(false,true)", + "fl", + "f2:xor(false,false)"), + "/response/docs/[0]=={'t1':true, 't2':true, 'f1':false, 'f2':false}"); + assertJQ( + req("q", "id:1", "fl", "t:not(testfunc(false)),f:not(true)"), + "/response/docs/[0]=={'t':true, 'f':false}"); // test fields evaluated as booleans in wrapping functions - assertJQ(req("q", "id:1", "fl", "a:not(foo_ti), b:if(foo_tf,'TT','FF'), c:and(true,foo_tf)") - , "/response/docs/[0]=={'a':true, 'b':'TT', 'c':true}"); - assertJQ(req("q", "id:2", "fl", "a:not(foo_ti), b:if(foo_tf,'TT','FF'), c:and(true,foo_tf)") - , "/response/docs/[0]=={'a':false, 'b':'FF', 'c':false}"); - + assertJQ( + req("q", "id:1", "fl", "a:not(foo_ti), b:if(foo_tf,'TT','FF'), c:and(true,foo_tf)"), + "/response/docs/[0]=={'a':true, 'b':'TT', 'c':true}"); + assertJQ( + req("q", "id:2", "fl", "a:not(foo_ti), b:if(foo_tf,'TT','FF'), c:and(true,foo_tf)"), + "/response/docs/[0]=={'a':false, 'b':'FF', 'c':false}"); // def(), the default function that returns the first value that exists - assertJQ(req("q", "id:1", "fl", "x:def(id,testfunc(123)), y:def(foo_f,234.0)") - , "/response/docs/[0]=={'x':'1', 'y':234.0}"); - assertJQ(req("q", "id:1", "fl", "x:def(foo_s,'Q'), y:def(missing_s,'W')") - , "/response/docs/[0]=={'x':'A', 'y':'W'}"); + assertJQ( + req("q", "id:1", "fl", "x:def(id,testfunc(123)), y:def(foo_f,234.0)"), + "/response/docs/[0]=={'x':'1', 'y':234.0}"); + assertJQ( + req("q", "id:1", "fl", "x:def(foo_s,'Q'), y:def(missing_s,'W')"), + "/response/docs/[0]=={'x':'A', 'y':'W'}"); // test constant conversion to boolean - assertJQ(req("q", "id:1", "fl", "a:not(0), b:not(1), c:not(0.0), d:not(1.1), e:not('A'), f:not(0.001)") - , "/response/docs/[0]=={'a':true, 'b':false, 'c':true, 'd':false, 'e':false, 'f':false}"); - + assertJQ( + req( + "q", + "id:1", + "fl", + "a:not(0), b:not(1), c:not(0.0), d:not(1.1), e:not('A'), f:not(0.001)"), + "/response/docs/[0]=={'a':true, 'b':false, 'c':true, 'd':false, 'e':false, 'f':false}"); } @Test @@ -911,37 +1203,40 @@ public void testConcatFunction() { clearIndex(); assertU(adoc("id", "1", "field1_t", "buzz", "field2_t", "word")); - assertU(adoc("id", "2", "field1_t", "1", "field2_t", "2","field4_t", "4")); + assertU(adoc("id", "2", "field1_t", "1", "field2_t", "2", "field4_t", "4")); assertU(commit()); - assertQ(req("q","id:1", - "fl","field:concat(field1_t,field2_t)"), + assertQ( + req("q", "id:1", "fl", "field:concat(field1_t,field2_t)"), " //str[@name='field']='buzzword'"); - assertQ(req("q","id:2", - "fl","field:concat(field1_t,field2_t,field4_t)"), + assertQ( + req("q", "id:2", "fl", "field:concat(field1_t,field2_t,field4_t)"), " //str[@name='field']='124'"); - assertQ(req("q","id:1", - "fl","field:def(concat(field3_t, field4_t), 'defValue')"), + assertQ( + req("q", "id:1", "fl", "field:def(concat(field3_t, field4_t), 'defValue')"), " //str[@name='field']='defValue'"); - } + } @Test public void testPseudoFieldFunctions() throws Exception { clearIndex(); - assertU(adoc("id", "1", "text", "hello", "foo_s","A", "yak_i", "32")); + assertU(adoc("id", "1", "text", "hello", "foo_s", "A", "yak_i", "32")); assertU(adoc("id", "2")); assertU(commit()); // if exists() is false, no pseudo-field should be added - assertJQ(req("q", "id:1", "fl", "a:1,b:2.0,c:'X',d:{!func}foo_s,e:{!func}bar_s") - , "/response/docs/[0]=={'a':1, 'b':2.0,'c':'X','d':'A'}"); - assertJQ(req("q", "id:1", "fl", "a:sum(yak_i,bog_i),b:mul(yak_i,bog_i),c:min(yak_i,bog_i)") - , "/response/docs/[0]=={ 'c':32.0 }"); - assertJQ(req("q", "id:1", "fl", "a:sum(yak_i,def(bog_i,42)), b:max(yak_i,bog_i)") - , "/response/docs/[0]=={ 'a': 74.0, 'b':32.0 }"); + assertJQ( + req("q", "id:1", "fl", "a:1,b:2.0,c:'X',d:{!func}foo_s,e:{!func}bar_s"), + "/response/docs/[0]=={'a':1, 'b':2.0,'c':'X','d':'A'}"); + assertJQ( + req("q", "id:1", "fl", "a:sum(yak_i,bog_i),b:mul(yak_i,bog_i),c:min(yak_i,bog_i)"), + "/response/docs/[0]=={ 'c':32.0 }"); + assertJQ( + req("q", "id:1", "fl", "a:sum(yak_i,def(bog_i,42)), b:max(yak_i,bog_i)"), + "/response/docs/[0]=={ 'a': 74.0, 'b':32.0 }"); } @Test @@ -956,19 +1251,24 @@ public void testMissingFieldFunctionBehavior() throws Exception { // to the index might have ever had a value for, so that the segment // term metadata doesn't exist - for (String suffix : new String[] {"s", "b", "dt", "tdt", - "i", "l", "f", "d", - "ti", "tl", "tf", "td" }) { + for (String suffix : + new String[] { + "s", "b", "dt", "tdt", + "i", "l", "f", "d", + "ti", "tl", "tf", "td" + }) { final String field = "no__vals____" + suffix; - assertQ(req("q","id:1", - "fl","noval_if:if("+field+",42,-99)", - "fl","noval_def:def("+field+",-99)", - "fl","noval_not:not("+field+")", - "fl","noval_exists:exists("+field+")"), - "//long[@name='noval_if']='-99'", - "//long[@name='noval_def']='-99'", - "//bool[@name='noval_not']='true'", - "//bool[@name='noval_exists']='false'"); + assertQ( + req( + "q", "id:1", + "fl", "noval_if:if(" + field + ",42,-99)", + "fl", "noval_def:def(" + field + ",-99)", + "fl", "noval_not:not(" + field + ")", + "fl", "noval_exists:exists(" + field + ")"), + "//long[@name='noval_if']='-99'", + "//long[@name='noval_def']='-99'", + "//bool[@name='noval_not']='true'", + "//bool[@name='noval_exists']='false'"); } } @@ -981,48 +1281,29 @@ public void testNumericComparisons() throws Exception { assertU(commit()); // test weighting of functions - assertJQ(req("q", "id:1", "fl", "a:gt(age_i,30),b:lt(age_i,30)") - , "/response/docs/[0]=={'a':true,'b':false}"); - - assertJQ(req("q", "id:1", "fl", "a:exists(gt(foo_i,30))") - , "/response/docs/[0]=={'a':false}"); + assertJQ( + req("q", "id:1", "fl", "a:gt(age_i,30),b:lt(age_i,30)"), + "/response/docs/[0]=={'a':true,'b':false}"); - singleTest("age_i", "if(gt(age_i,30),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); + assertJQ(req("q", "id:1", "fl", "a:exists(gt(foo_i,30))"), "/response/docs/[0]=={'a':false}"); - singleTest("age_i", "if(lt(age_i,30),5,2)", - /*id*/1, /*score*/2, - /*id*/2, /*score*/5); + singleTest("age_i", "if(gt(age_i,30),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); - singleTest("age_i", "if(lt(age_i,34.5),5,2)", - /*id*/1, /*score*/2, - /*id*/2, /*score*/5); + singleTest("age_i", "if(lt(age_i,30),5,2)", /*id*/ 1, /*score*/ 2, /*id*/ 2, /*score*/ 5); - singleTest("age_i", "if(lte(age_i,35),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/5); + singleTest("age_i", "if(lt(age_i,34.5),5,2)", /*id*/ 1, /*score*/ 2, /*id*/ 2, /*score*/ 5); - singleTest("age_i", "if(gte(age_i,25),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/5); + singleTest("age_i", "if(lte(age_i,35),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 5); - singleTest("age_i", "if(lte(age_i,25),5,2)", - /*id*/1, /*score*/2, - /*id*/2, /*score*/5); + singleTest("age_i", "if(gte(age_i,25),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 5); - singleTest("age_i", "if(gte(age_i,35),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); + singleTest("age_i", "if(lte(age_i,25),5,2)", /*id*/ 1, /*score*/ 2, /*id*/ 2, /*score*/ 5); + singleTest("age_i", "if(gte(age_i,35),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); - singleTest("age_i", "if(eq(age_i,30),5,2)", - /*id*/1, /*score*/2, - /*id*/2, /*score*/2); + singleTest("age_i", "if(eq(age_i,30),5,2)", /*id*/ 1, /*score*/ 2, /*id*/ 2, /*score*/ 2); - singleTest("age_i", "if(eq(age_i,35),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); + singleTest("age_i", "if(eq(age_i,35),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); } @Test @@ -1033,13 +1314,21 @@ public void testLongComparisons() { assertU(adoc("id", "2", "number_of_atoms_in_universe_l", Long.toString(Long.MAX_VALUE - 1))); assertU(commit()); - singleTest("number_of_atoms_in_universe_l", "if(gt(number_of_atoms_in_universe_l," + Long.toString(Long.MAX_VALUE - 1) + "),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - - singleTest("number_of_atoms_in_universe_l", "if(lt(number_of_atoms_in_universe_l," + Long.toString(Long.MAX_VALUE) + "),5,2)", - /*id*/2, /*score*/5, - /*id*/1, /*score*/2); + singleTest( + "number_of_atoms_in_universe_l", + "if(gt(number_of_atoms_in_universe_l," + Long.toString(Long.MAX_VALUE - 1) + "),5,2)", + /*id*/ 1, /*score*/ + 5, + /*id*/ 2, /*score*/ + 2); + + singleTest( + "number_of_atoms_in_universe_l", + "if(lt(number_of_atoms_in_universe_l," + Long.toString(Long.MAX_VALUE) + "),5,2)", + /*id*/ 2, /*score*/ + 5, + /*id*/ 1, /*score*/ + 2); } @Test @@ -1052,7 +1341,8 @@ public void testQueryAsFlParam() { // some docs match the query func but some doesn't // if doc doesn't match, it should use default value - assertQ(req("q", "*:*", "fl", "*,score,bleh:query($qq,5.0)", "qq", "id:2"), + assertQ( + req("q", "*:*", "fl", "*,score,bleh:query($qq,5.0)", "qq", "id:2"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']", @@ -1063,73 +1353,112 @@ public void testQueryAsFlParam() { // when the doc match the query func condition default value shouldn't be used // when no def val is passed in query func, 0.0 would be used - assertQ(req("q", "*:*", "fl", "*,score,bleh:query($qq)", "qq", "id:*"), + assertQ( + req("q", "*:*", "fl", "*,score,bleh:query($qq)", "qq", "id:*"), "//*[@numFound='2']", "//result/doc[1]/str[@name='id'][.='1']", "//result/doc[2]/str[@name='id'][.='2']", "count(//result/doc[1]/float[@name='bleh'][.='0.0'])=0", "count(//result/doc[2]/float[@name='bleh'][.='0.0'])=0", "//result/doc[1]/float[@name='bleh']", - "//result/doc[2]/float[@name='bleh']" - ); + "//result/doc[2]/float[@name='bleh']"); } @Test public void testEqualFunction() { clearIndex(); - assertU(adoc("id", "1", "field1_s", "value1", "field2_s", "value1", - "field1_s_dv", "value1", "field2_s_dv", "value2", "field_b", "true")); - assertU(adoc("id", "2", "field1_s", "value1", "field2_s", "value2", - "field1_s_dv", "value1", "field2_s_dv", "value1", "field_b", "false")); + assertU( + adoc( + "id", + "1", + "field1_s", + "value1", + "field2_s", + "value1", + "field1_s_dv", + "value1", + "field2_s_dv", + "value2", + "field_b", + "true")); + assertU( + adoc( + "id", + "2", + "field1_s", + "value1", + "field2_s", + "value2", + "field1_s_dv", + "value1", + "field2_s_dv", + "value1", + "field_b", + "false")); assertU(commit()); - singleTest("field1_s", "if(eq(field1_s,field2_s),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - singleTest("field1_s_dv", "if(eq(field1_s_dv,field2_s_dv),5,2)", - /*id*/2, /*score*/5, - /*id*/1, /*score*/2); - singleTest("field1_s", "if(eq(field1_s,field1_s_dv),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/5); - singleTest("field2_s", "if(eq(field2_s,field2_s_dv),5,2)", - /*id*/1, /*score*/2, - /*id*/2, /*score*/2); - singleTest("field2_s", "if(eq(field2_s,'value1'),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - singleTest("field1_s", "if(eq('value1','value1'),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/5); - singleTest("field_b", "if(eq(if(field_b,'value1','value2'),'value1'),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); + singleTest( + "field1_s", "if(eq(field1_s,field2_s),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); + singleTest( + "field1_s_dv", + "if(eq(field1_s_dv,field2_s_dv),5,2)", + /*id*/ 2, /*score*/ + 5, + /*id*/ 1, /*score*/ + 2); + singleTest( + "field1_s", + "if(eq(field1_s,field1_s_dv),5,2)", + /*id*/ 1, /*score*/ + 5, + /*id*/ 2, /*score*/ + 5); + singleTest( + "field2_s", + "if(eq(field2_s,field2_s_dv),5,2)", + /*id*/ 1, /*score*/ + 2, + /*id*/ 2, /*score*/ + 2); + singleTest( + "field2_s", "if(eq(field2_s,'value1'),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); + singleTest( + "field1_s", "if(eq('value1','value1'),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 5); + singleTest( + "field_b", + "if(eq(if(field_b,'value1','value2'),'value1'),5,2)", + /*id*/ 1, /*score*/ + 5, + /*id*/ 2, /*score*/ + 2); } @Test public void testEqualNumericComparisons() { clearIndex(); assertU(adoc("id", "1", "field_d", "5.0", "field_i", "5")); - assertU(adoc("id", "2", "field_d", "3.0", "field_i", "3")); + assertU(adoc("id", "2", "field_d", "3.0", "field_i", "3")); assertU(commit()); - singleTest("field_d", "if(eq(field_d,5),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - singleTest("field_d", "if(eq(field_d,5.0),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - singleTest("field_d", "if(eq(5,def(field_d,5)),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - singleTest("field_i", "if(eq(5.0,def(field_i,5)),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - singleTest("field_not_existed_i", "if(def(field_not_existed_i,5.0),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/5); - singleTest("field_not_existed_i", "if(def(field_not_existed_i,5),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/5); + singleTest("field_d", "if(eq(field_d,5),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); + singleTest("field_d", "if(eq(field_d,5.0),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); + singleTest( + "field_d", "if(eq(5,def(field_d,5)),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); + singleTest( + "field_i", "if(eq(5.0,def(field_i,5)),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); + singleTest( + "field_not_existed_i", + "if(def(field_not_existed_i,5.0),5,2)", + /*id*/ 1, /*score*/ + 5, + /*id*/ 2, /*score*/ + 5); + singleTest( + "field_not_existed_i", + "if(def(field_not_existed_i,5),5,2)", + /*id*/ 1, /*score*/ + 5, + /*id*/ 2, /*score*/ + 5); } @Test @@ -1138,26 +1467,21 @@ public void testDifferentTypesComparisons() { assertU(adoc("id", "1", "field_s", "value")); assertU(adoc("id", "2")); assertU(commit()); - singleTest("field_s", "if(eq(field_s,'value'),5,2)", - /*id*/1, /*score*/5, - /*id*/2, /*score*/2); - singleTest("field_s", "if(eq(def(field_s,5),5),5,2)", - /*id*/2, /*score*/5, - /*id*/1, /*score*/2); - singleTest("field_s", "if(eq(def(field_s,5),5.0),5,2)", - /*id*/2, /*score*/5, - /*id*/1, /*score*/2); - singleTest("field_s", "if(eq(def(field_s,'5'),5),5,2)", - /*id*/1, /*score*/2, - /*id*/2, /*score*/2); + singleTest( + "field_s", "if(eq(field_s,'value'),5,2)", /*id*/ 1, /*score*/ 5, /*id*/ 2, /*score*/ 2); + singleTest( + "field_s", "if(eq(def(field_s,5),5),5,2)", /*id*/ 2, /*score*/ 5, /*id*/ 1, /*score*/ 2); + singleTest( + "field_s", "if(eq(def(field_s,5),5.0),5,2)", /*id*/ 2, /*score*/ 5, /*id*/ 1, /*score*/ 2); + singleTest( + "field_s", "if(eq(def(field_s,'5'),5),5,2)", /*id*/ 1, /*score*/ 2, /*id*/ 2, /*score*/ 2); } /** - * Tests a specific (edge) case where a subQuery is null, because no terms are - * found in the query. Below such subQuery is created from a field query on a - * query text containing only stopwords. Feeding the resulting null-subQuery - * into a QueryValueSource (and then using it in for example an if function) - * may not produce NullPointerExceptions. + * Tests a specific (edge) case where a subQuery is null, because no terms are found in the query. + * Below such subQuery is created from a field query on a query text containing only stopwords. + * Feeding the resulting null-subQuery into a QueryValueSource (and then using it in for example + * an if function) may not produce NullPointerExceptions. */ @Test public void testNullSubQuery() throws Exception { diff --git a/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java b/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java index 5a84393889c..56f9e43a028 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestMinMaxOnMultiValuedField.java @@ -16,11 +16,10 @@ */ package org.apache.solr.search.function; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; - import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; @@ -37,8 +36,9 @@ import org.junit.BeforeClass; /** - * Tests the behavior of field(foo,min|max) on numerious types of multivalued 'foo' fields, - * as well as the beahvior of sorting on foo asc|desc to implicitly choose the min|max. + * Tests the behavior of field(foo,min|max) on numerious types of multivalued 'foo' + * fields, as well as the beahvior of sorting on foo asc|desc to implicitly choose the + * min|max. */ @SuppressCodecs({"SimpleText"}) // see TestSortedSetSelector public class TestMinMaxOnMultiValuedField extends SolrTestCaseJ4 { @@ -47,26 +47,43 @@ public class TestMinMaxOnMultiValuedField extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-functionquery.xml","schema11.xml"); - checkFields(new String[] {"i", "l", "f", "d"}, new String [] {"_p", "_ni_p"}); - checkFields(new String[] {"ti", "tl", "tf", "td"}, new String [] {"", "_dv", "_ni_dv"}); - checkFields(new String[] {"str", // no expectation on missing first/last - "str_missf_", "str_missl_", - "int_missf_", "int_missl_", - "long_missf_", "long_missl_", - "float_missf_", "float_missl_", - "double_missf_", "double_missl_", - "date_missf_", "date_missl_", - "enum_missf_", "enum_missl_", - "bool_missf_", "bool_missl_" }, new String [] {"_dv"}); - checkFields(new String[] {"stxt_", // no expectation on missing first/last - "stxt_missf_", "stxt_missl_" }, new String [] { "_dv"}); - checkFields(new String [] { "stxt_" }, // no expectation on missing first/last - new String [] { "_nodv", "_dv" }); - checkFields(new String [] { "stxt_missf_", "stxt_missl_" }, new String [] { "_dv"}); - - } - + initCore("solrconfig-functionquery.xml", "schema11.xml"); + checkFields(new String[] {"i", "l", "f", "d"}, new String[] {"_p", "_ni_p"}); + checkFields(new String[] {"ti", "tl", "tf", "td"}, new String[] {"", "_dv", "_ni_dv"}); + checkFields( + new String[] { + "str", // no expectation on missing first/last + "str_missf_", + "str_missl_", + "int_missf_", + "int_missl_", + "long_missf_", + "long_missl_", + "float_missf_", + "float_missl_", + "double_missf_", + "double_missl_", + "date_missf_", + "date_missl_", + "enum_missf_", + "enum_missl_", + "bool_missf_", + "bool_missl_" + }, + new String[] {"_dv"}); + checkFields( + new String[] { + "stxt_", // no expectation on missing first/last + "stxt_missf_", + "stxt_missl_" + }, + new String[] {"_dv"}); + checkFields( + new String[] {"stxt_"}, // no expectation on missing first/last + new String[] {"_nodv", "_dv"}); + checkFields(new String[] {"stxt_missf_", "stxt_missl_"}, new String[] {"_dv"}); + } + private static void checkFields(String[] types, String[] suffixes) { // sanity check the expected properties of our fields (ie: who broke the schema?) IndexSchema schema = h.getCore().getLatestSchema(); @@ -75,33 +92,32 @@ private static void checkFields(String[] types, String[] suffixes) { String f = "val_" + type + "s" + suffix; SchemaField sf = schema.getField(f); assertTrue(f + " is not multivalued", sf.multiValued()); - assertEquals(f + " doesn't have expected docValues status", - ((f.contains("dv") || f.endsWith("_p") || Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)) - && !f.contains("nodv")), - sf.hasDocValues()); - assertEquals(f + " doesn't have expected index status", - ! f.contains("ni"), sf.indexed()); + assertEquals( + f + " doesn't have expected docValues status", + ((f.contains("dv") || f.endsWith("_p") || Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)) + && !f.contains("nodv")), + sf.hasDocValues()); + assertEquals(f + " doesn't have expected index status", !f.contains("ni"), sf.indexed()); if (f.contains("miss")) { // if name contains "miss" assert that the missing first/last props match // but don't make any asserts about fields w/o that in name // (schema11.xml's strings has some preexisting silliness that don't affect us) - assertEquals(f + " sortMissingFirst is wrong", - f.contains("missf"), sf.sortMissingFirst()); - assertEquals(f + " sortMissingLast is wrong", - f.contains("missl"), sf.sortMissingLast()); + assertEquals( + f + " sortMissingFirst is wrong", f.contains("missf"), sf.sortMissingFirst()); + assertEquals(f + " sortMissingLast is wrong", f.contains("missl"), sf.sortMissingLast()); } } } } - + /** Deletes all docs (which may be left over from a previous test */ @Before public void before() throws Exception { clearIndex(); assertU(commit()); } - + public void testBasics() throws Exception { testBasics("val_tis_dv", "val_tls_dv", "val_tfs_dv", "val_tds_dv"); testBasics("val_tis_ni_dv", "val_tls_ni_dv", "val_tfs_ni_dv", "val_tds_ni_dv"); @@ -109,133 +125,194 @@ public void testBasics() throws Exception { testBasics("val_is_ni_p", "val_ls_ni_p", "val_fs_ni_p", "val_ds_ni_p"); } - private void testBasics(String intField, String longField, String floatField, String doubleField) throws Exception { - assertTrue("Unexpected int field", h.getCore().getLatestSchema().getField(intField).getType() instanceof IntValueFieldType); - assertTrue("Unexpected long field", h.getCore().getLatestSchema().getField(longField).getType() instanceof LongValueFieldType); - assertTrue("Unexpected float field", h.getCore().getLatestSchema().getField(floatField).getType() instanceof FloatValueFieldType); - assertTrue("Unexpected double field", h.getCore().getLatestSchema().getField(doubleField).getType() instanceof DoubleValueFieldType); + private void testBasics(String intField, String longField, String floatField, String doubleField) + throws Exception { + assertTrue( + "Unexpected int field", + h.getCore().getLatestSchema().getField(intField).getType() instanceof IntValueFieldType); + assertTrue( + "Unexpected long field", + h.getCore().getLatestSchema().getField(longField).getType() instanceof LongValueFieldType); + assertTrue( + "Unexpected float field", + h.getCore().getLatestSchema().getField(floatField).getType() + instanceof FloatValueFieldType); + assertTrue( + "Unexpected double field", + h.getCore().getLatestSchema().getField(doubleField).getType() + instanceof DoubleValueFieldType); clearIndex(); - assertU(adoc(sdoc("id", "1" - // int - ,intField, "42" - ,intField, "9" - ,intField, "-54" - // long - ,longField, "420" - ,longField, "90" - ,longField, "-540" - // float - ,floatField, "-42.5" - ,floatField, "-4.5" - ,floatField, "-13.5" - // double - ,doubleField, "-420.5" - ,doubleField, "-40.5" - ,doubleField, "-130.5" - ))); - assertU(commit()); - - assertQ(req("q","id:1" + assertU( + adoc( + sdoc( + "id", + "1" // int - ,"fl","exists_min_i:exists(field(" + intField + ",min))" - ,"fl","exists_max_i:exists(field(" + intField + ",max))" - ,"fl","min_i:field(" + intField + ",min)" - ,"fl","max_i:field(" + intField + ",max)" + , + intField, + "42", + intField, + "9", + intField, + "-54" // long - ,"fl","exists_min_l:exists(field(" + longField + ",min))" - ,"fl","exists_max_l:exists(field(" + longField + ",max))" - ,"fl","min_l:field(" + longField + ",min)" - ,"fl","max_l:field(" + longField + ",max)" + , + longField, + "420", + longField, + "90", + longField, + "-540" // float - ,"fl","exists_min_f:exists(field(" + floatField + ",min))" - ,"fl","exists_max_f:exists(field(" + floatField + ",max))" - ,"fl","min_f:field(" + floatField + ",min)" - ,"fl","max_f:field(" + floatField + ",max)" + , + floatField, + "-42.5", + floatField, + "-4.5", + floatField, + "-13.5" // double - ,"fl","exists_min_d:exists(field(" + doubleField + ",min))" - ,"fl","exists_max_d:exists(field(" + doubleField + ",max))" - ,"fl","min_d:field(" + doubleField + ",min)" - ,"fl","max_d:field(" + doubleField + ",max)" - - ) - ,"//*[@numFound='1']" + , + doubleField, + "-420.5", + doubleField, + "-40.5", + doubleField, + "-130.5"))); + assertU(commit()); + + assertQ( + req( + "q", + "id:1" // int - ,"//bool[@name='exists_min_i']='true'" - ,"//bool[@name='exists_max_i']='true'" - ,"//int[@name='min_i']='-54'" - ,"//int[@name='max_i']='42'" + , + "fl", + "exists_min_i:exists(field(" + intField + ",min))", + "fl", + "exists_max_i:exists(field(" + intField + ",max))", + "fl", + "min_i:field(" + intField + ",min)", + "fl", + "max_i:field(" + intField + ",max)" // long - ,"//bool[@name='exists_min_l']='true'" - ,"//bool[@name='exists_max_l']='true'" - ,"//long[@name='min_l']='-540'" - ,"//long[@name='max_l']='420'" + , + "fl", + "exists_min_l:exists(field(" + longField + ",min))", + "fl", + "exists_max_l:exists(field(" + longField + ",max))", + "fl", + "min_l:field(" + longField + ",min)", + "fl", + "max_l:field(" + longField + ",max)" // float - ,"//bool[@name='exists_min_f']='true'" - ,"//bool[@name='exists_max_f']='true'" - ,"//float[@name='min_f']='-42.5'" - ,"//float[@name='max_f']='-4.5'" + , + "fl", + "exists_min_f:exists(field(" + floatField + ",min))", + "fl", + "exists_max_f:exists(field(" + floatField + ",max))", + "fl", + "min_f:field(" + floatField + ",min)", + "fl", + "max_f:field(" + floatField + ",max)" // double - ,"//bool[@name='exists_min_d']='true'" - ,"//bool[@name='exists_max_d']='true'" - ,"//double[@name='min_d']='-420.5'" - ,"//double[@name='max_d']='-40.5'" - ); + , + "fl", + "exists_min_d:exists(field(" + doubleField + ",min))", + "fl", + "exists_max_d:exists(field(" + doubleField + ",max))", + "fl", + "min_d:field(" + doubleField + ",min)", + "fl", + "max_d:field(" + doubleField + ",max)"), + "//*[@numFound='1']" + // int + , + "//bool[@name='exists_min_i']='true'", + "//bool[@name='exists_max_i']='true'", + "//int[@name='min_i']='-54'", + "//int[@name='max_i']='42'" + // long + , + "//bool[@name='exists_min_l']='true'", + "//bool[@name='exists_max_l']='true'", + "//long[@name='min_l']='-540'", + "//long[@name='max_l']='420'" + // float + , + "//bool[@name='exists_min_f']='true'", + "//bool[@name='exists_max_f']='true'", + "//float[@name='min_f']='-42.5'", + "//float[@name='max_f']='-4.5'" + // double + , + "//bool[@name='exists_min_d']='true'", + "//bool[@name='exists_max_d']='true'", + "//double[@name='min_d']='-420.5'", + "//double[@name='max_d']='-40.5'"); } public void testBasicStrings() { checkBasicStrings("val_strs_dv"); } + public void testBasicSortableText() { checkBasicStrings("val_stxt_s_dv"); checkBasicStrings("val_stxt_missf_s_dv"); checkBasicStrings("val_stxt_missl_s_dv"); } + private void checkBasicStrings(final String field) { - assertU(adoc(sdoc("id", "1", - field, "dog", - field, "xyz", - field, "cat"))); + assertU(adoc(sdoc("id", "1", field, "dog", field, "xyz", field, "cat"))); assertU(adoc(sdoc("id", "2"))); // 2 has no values in tested field assertU(commit()); // id=1: has values - assertQ(req("q","id:1" - ,"fl","exists_min_str:exists(field("+field+",min))" - ,"fl","exists_max_str:exists(field("+field+",max))" - ,"fl","min_str:field("+field+",min)" - ,"fl","max_str:field("+field+",max)" - - ) - ,"//*[@numFound='1']" - ,"//bool[@name='exists_min_str']='true'" - ,"//bool[@name='exists_max_str']='true'" - ,"//str[@name='min_str']='cat'" - ,"//str[@name='max_str']='xyz'" - ); + assertQ( + req( + "q", + "id:1", + "fl", + "exists_min_str:exists(field(" + field + ",min))", + "fl", + "exists_max_str:exists(field(" + field + ",max))", + "fl", + "min_str:field(" + field + ",min)", + "fl", + "max_str:field(" + field + ",max)"), + "//*[@numFound='1']", + "//bool[@name='exists_min_str']='true'", + "//bool[@name='exists_max_str']='true'", + "//str[@name='min_str']='cat'", + "//str[@name='max_str']='xyz'"); // id=2: no values - assertQ(req("q","id:2" - ,"fl","exists_min_str:exists(field("+field+",min))" - ,"fl","exists_max_str:exists(field("+field+",max))" - ,"fl","min_str:field("+field+",min)" - ,"fl","max_str:field("+field+",max)" - - ) - ,"//*[@numFound='1']" - ,"//bool[@name='exists_min_str']='false'" - ,"//bool[@name='exists_max_str']='false'" - ,"count(//*[@name='min_str'])=0" - ,"count(//*[@name='max_str'])=0" - ); + assertQ( + req( + "q", + "id:2", + "fl", + "exists_min_str:exists(field(" + field + ",min))", + "fl", + "exists_max_str:exists(field(" + field + ",max))", + "fl", + "min_str:field(" + field + ",min)", + "fl", + "max_str:field(" + field + ",max)"), + "//*[@numFound='1']", + "//bool[@name='exists_min_str']='false'", + "//bool[@name='exists_max_str']='false'", + "count(//*[@name='min_str'])=0", + "count(//*[@name='max_str'])=0"); } public void testExpectedSortOrderingStrings() { - testExpectedSortOrdering("val_strs_dv", false, - null, "a", "cat", "dog", "wako", "xyz", "zzzzz"); + testExpectedSortOrdering("val_strs_dv", false, null, "a", "cat", "dog", "wako", "xyz", "zzzzz"); } + public void testExpectedSortOrderingSortableText() { - testExpectedSortOrdering("val_stxt_s_dv", false, - null, "a", "cat", "dog", "wako", "xyz", "zzzzz"); + testExpectedSortOrdering( + "val_stxt_s_dv", false, null, "a", "cat", "dog", "wako", "xyz", "zzzzz"); } public void testExpectedSortMissingOrderings() { @@ -244,45 +321,56 @@ public void testExpectedSortMissingOrderings() { // (in this simple test) we aren't using a secondary sort, so there is no way to disambiguate // docs that have those values from docs that have those *effective* sort values - testSortMissingMinMax("val_str", "a", "aaaaaa", "xxxxx", "zzzzzzzzzzzzzzzzzzz"); + testSortMissingMinMax("val_str", "a", "aaaaaa", "xxxxx", "zzzzzzzzzzzzzzzzzzz"); testSortMissingMinMax("val_stxt", "a", "aaaaaa", "xxxxx", "zzzzzzzzzzzzzzzzzzz"); - - testSortMissingMinMax("val_int", - Integer.MIN_VALUE+1L, -9999, 0, 99999, Integer.MAX_VALUE-1L); - testSortMissingMinMax("val_long", - Long.MIN_VALUE+1L, -99999999L, 0, 9999999999L, Long.MAX_VALUE-1L); - testSortMissingMinMax("val_float", - Math.nextAfter(Float.NEGATIVE_INFINITY, 0F), -99.99F, - 0F, 99.99F, Math.nextAfter(Float.POSITIVE_INFINITY, 0F)); - testSortMissingMinMax("val_double", - Math.nextAfter(Double.NEGATIVE_INFINITY, 0D), -99.99D, - 0D, 99.99D, Math.nextAfter(Double.POSITIVE_INFINITY, 0F)); - testSortMissingMinMax("val_date", - "-1000000-01-01T00:00:00Z", "NOW-1YEAR", "NOW", "NOW+1YEAR", "+1000000-01-01T00:00:00Z"); + + testSortMissingMinMax( + "val_int", Integer.MIN_VALUE + 1L, -9999, 0, 99999, Integer.MAX_VALUE - 1L); + testSortMissingMinMax( + "val_long", Long.MIN_VALUE + 1L, -99999999L, 0, 9999999999L, Long.MAX_VALUE - 1L); + testSortMissingMinMax( + "val_float", + Math.nextAfter(Float.NEGATIVE_INFINITY, 0F), + -99.99F, + 0F, + 99.99F, + Math.nextAfter(Float.POSITIVE_INFINITY, 0F)); + testSortMissingMinMax( + "val_double", + Math.nextAfter(Double.NEGATIVE_INFINITY, 0D), + -99.99D, + 0D, + 99.99D, + Math.nextAfter(Double.POSITIVE_INFINITY, 0F)); + testSortMissingMinMax( + "val_date", + "-1000000-01-01T00:00:00Z", + "NOW-1YEAR", + "NOW", + "NOW+1YEAR", + "+1000000-01-01T00:00:00Z"); testSortMissingMinMax("val_bool", false, true); testSortMissingMinMax("val_enum", "Not Available", "Low", "High", "Critical"); - } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-6709") public void testIntFieldCache() { testSimpleInt("val_tis"); testExpectedSortOrderingInt("val_tis", true); } - + public void testPointInt() { testSimpleInt("val_is_p"); testSimpleInt("val_is_ni_p"); - + testExpectedSortOrderingInt("val_is_p", false); testExpectedSortOrderingInt("val_is_ni_p", false); } - + public void testIntDocValues() { testSimpleInt("val_tis_dv"); testSimpleInt("val_tis_ni_dv"); - + testExpectedSortOrderingInt("val_tis_dv", true); testExpectedSortOrderingInt("val_tis_ni_dv", true); } @@ -292,57 +380,56 @@ public void testLongFieldCache() { testSimpleLong("val_tls"); testExpectedSortOrderingLong("val_tls", true); } - + public void testLongDocValues() { testSimpleLong("val_tls_dv"); testSimpleLong("val_tls_ni_dv"); - + testExpectedSortOrderingLong("val_tls_dv", true); testExpectedSortOrderingLong("val_tls_ni_dv", true); } - + public void testPointLong() { testSimpleLong("val_ls_p"); testSimpleLong("val_ls_ni_p"); - + testExpectedSortOrderingLong("val_ls_p", false); testExpectedSortOrderingLong("val_ls_ni_p", false); } - @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-6709") public void testFloatFieldCache() { testSimpleFloat("val_tfs"); testExpectedSortOrderingFloat("val_tfs", true); } - + public void testFloatDocValues() { testSimpleFloat("val_tfs_dv"); testSimpleFloat("val_tfs_ni_dv"); - + testExpectedSortOrderingFloat("val_tfs_dv", true); testExpectedSortOrderingFloat("val_tfs_ni_dv", true); } - + public void testPointFloat() { testSimpleFloat("val_fs_p"); testSimpleFloat("val_fs_ni_p"); - + testExpectedSortOrderingFloat("val_fs_p", false); testExpectedSortOrderingFloat("val_fs_ni_p", false); } - + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-6709") public void testDoubleFieldCache() { testSimpleDouble("val_tds"); - + testExpectedSortOrderingDouble("val_tds", true); } - + public void testDoubleDocValues() { testSimpleDouble("val_tds_dv"); testSimpleDouble("val_tds_ni_dv"); - + testExpectedSortOrderingDouble("val_tds_dv", true); testExpectedSortOrderingDouble("val_tds_ni_dv", true); } @@ -350,7 +437,7 @@ public void testDoubleDocValues() { public void testPointDouble() { testSimpleDouble("val_ds_p"); testSimpleDouble("val_ds_ni_p"); - + testExpectedSortOrderingDouble("val_ds_p", false); testExpectedSortOrderingDouble("val_ds_ni_p", false); } @@ -358,60 +445,69 @@ public void testPointDouble() { public void testBadRequests() { // useful error msg when bogus selector is requested (ie: not min or max) - assertQEx("no error asking for bogus selector", - "hoss", - req("q","*:*", "fl", "field(val_tds_dv,'hoss')"), - SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error asking for bogus selector", + "hoss", + req("q", "*:*", "fl", "field(val_tds_dv,'hoss')"), + SolrException.ErrorCode.BAD_REQUEST); - assertQEx("no error asking for bogus selector", + assertQEx( + "no error asking for bogus selector", "hoss", - req("q","*:*", "fl", "field(val_ds_p,'hoss')"), + req("q", "*:*", "fl", "field(val_ds_p,'hoss')"), SolrException.ErrorCode.BAD_REQUEST); - + // useful error until/unless LUCENE-6709 assertFalse(h.getCore().getLatestSchema().getField("val_is_ndv_p").hasDocValues()); assertTrue(h.getCore().getLatestSchema().getField("val_is_ndv_p").multiValued()); - assertQEx("no error asking for max on a non docVals field", - "val_is_ndv_p", - req("q","*:*", "fl", "field(val_is_ndv_p,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - assertQEx("no error asking for max on a non docVals field", - "max", - req("q","*:*", "fl", "field(val_is_ndv_p,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - assertQEx("no error asking for max on a non docVals field", - "docValues", - req("q","*:*", "fl", "field(val_is_ndv_p,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - + assertQEx( + "no error asking for max on a non docVals field", + "val_is_ndv_p", + req("q", "*:*", "fl", "field(val_is_ndv_p,'max')"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error asking for max on a non docVals field", + "max", + req("q", "*:*", "fl", "field(val_is_ndv_p,'max')"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error asking for max on a non docVals field", + "docValues", + req("q", "*:*", "fl", "field(val_is_ndv_p,'max')"), + SolrException.ErrorCode.BAD_REQUEST); + // useful error if min/max is unsupported for fieldtype - assertQEx("no error mentioning field name when asking for max on type that doesn't support it", - "cat_length", - req("q","*:*", "fl", "field(cat_length,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - assertQEx("no error mentioning type when asking for max on type that doesn't support it", - "text_length", - req("q","*:*", "fl", "field(cat_length,'max')"), - SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error mentioning field name when asking for max on type that doesn't support it", + "cat_length", + req("q", "*:*", "fl", "field(cat_length,'max')"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error mentioning type when asking for max on type that doesn't support it", + "text_length", + req("q", "*:*", "fl", "field(cat_length,'max')"), + SolrException.ErrorCode.BAD_REQUEST); // type supports, but field doesn't have docValues - assertQEx("no error mentioning field name when asking for max on a non-dv str field", - "cat", - req("q","*:*", "fl", "field(cat,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - assertQEx("no error mentioning 'docValues' when asking for max on a non-dv str field", - "docValues", - req("q","*:*", "fl", "field(cat,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - assertQEx("no error mentioning field name when asking for max on a non-dv sortable text field", - "val_stxt_s_nodv", - req("q","*:*", "fl", "field(val_stxt_s_nodv,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - assertQEx("no error mentioning 'docValues' when asking for max on a non-dv sortable field", - "docValues", - req("q","*:*", "fl", "field(val_stxt_s_nodv,'max')"), - SolrException.ErrorCode.BAD_REQUEST); - - + assertQEx( + "no error mentioning field name when asking for max on a non-dv str field", + "cat", + req("q", "*:*", "fl", "field(cat,'max')"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error mentioning 'docValues' when asking for max on a non-dv str field", + "docValues", + req("q", "*:*", "fl", "field(cat,'max')"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error mentioning field name when asking for max on a non-dv sortable text field", + "val_stxt_s_nodv", + req("q", "*:*", "fl", "field(val_stxt_s_nodv,'max')"), + SolrException.ErrorCode.BAD_REQUEST); + assertQEx( + "no error mentioning 'docValues' when asking for max on a non-dv sortable field", + "docValues", + req("q", "*:*", "fl", "field(val_stxt_s_nodv,'max')"), + SolrException.ErrorCode.BAD_REQUEST); } public void testRandom() throws Exception { @@ -436,7 +532,7 @@ public void testRandom() throws Exception { testSimpleValues("val_ls_p", long.class, vals); testSimpleValues("val_tls_ni_dv", long.class, vals); testSimpleValues("val_ls_ni_p", long.class, vals); - + // random floats for (int i = 0; i < vals.length; i++) { // Random.nextFloat is lame @@ -450,7 +546,7 @@ public void testRandom() throws Exception { testSimpleValues("val_fs_p", float.class, vals); testSimpleValues("val_tfs_ni_dv", float.class, vals); testSimpleValues("val_fs_ni_p", float.class, vals); - + // random doubles for (int i = 0; i < vals.length; i++) { // Random.nextDouble is lame @@ -464,15 +560,15 @@ public void testRandom() throws Exception { testSimpleValues("val_ds_p", double.class, vals); testSimpleValues("val_tds_ni_dv", double.class, vals); testSimpleValues("val_ds_ni_p", double.class, vals); - } - - /** @see #testSimpleValues */ + /** + * @see #testSimpleValues + */ protected void testSimpleInt(final String fieldname) { // most basic case testSimpleValues(fieldname, int.class, 0); - + // order of values shouldn't matter testSimpleValues(fieldname, int.class, -42, 51, 3); testSimpleValues(fieldname, int.class, 51, 3, -42); @@ -483,12 +579,14 @@ protected void testSimpleInt(final String fieldname) { testSimpleSort(fieldname, -42, 666); } - - /** @see #testSimpleValues */ + + /** + * @see #testSimpleValues + */ protected void testSimpleLong(final String fieldname) { // most basic case testSimpleValues(fieldname, long.class, 0); - + // order of values shouldn't matter testSimpleValues(fieldname, long.class, -42L, 51L, 3L); testSimpleValues(fieldname, long.class, 51L, 3L, -42L); @@ -496,57 +594,67 @@ protected void testSimpleLong(final String fieldname) { // extreme's of the data type testSimpleValues(fieldname, long.class, Long.MIN_VALUE, 42L, -550L); testSimpleValues(fieldname, long.class, Long.MAX_VALUE, 0L, Long.MIN_VALUE); - + testSimpleSort(fieldname, -42, 666); } - - /** @see #testSimpleValues */ + + /** + * @see #testSimpleValues + */ protected void testSimpleFloat(final String fieldname) { // most basic case testSimpleValues(fieldname, float.class, 0.0F); - + // order of values shouldn't matter testSimpleValues(fieldname, float.class, -42.5F, 51.3F, 3.1415F); testSimpleValues(fieldname, float.class, 51.3F, 3.1415F, -42.5F); // extreme's of the data type testSimpleValues(fieldname, float.class, Float.NEGATIVE_INFINITY, 42.5F, -550.4F); - testSimpleValues(fieldname, float.class, Float.POSITIVE_INFINITY, 0.0F, Float.NEGATIVE_INFINITY); - + testSimpleValues( + fieldname, float.class, Float.POSITIVE_INFINITY, 0.0F, Float.NEGATIVE_INFINITY); + testSimpleSort(fieldname, -4.2, 6.66); } - - /** @see #testSimpleValues */ + + /** + * @see #testSimpleValues + */ protected void testSimpleDouble(final String fieldname) { // most basic case testSimpleValues(fieldname, double.class, 0.0D); - + // order of values shouldn't matter testSimpleValues(fieldname, double.class, -42.5D, 51.3D, 3.1415D); testSimpleValues(fieldname, double.class, 51.3D, 3.1415D, -42.5D); // extreme's of the data type testSimpleValues(fieldname, double.class, Double.NEGATIVE_INFINITY, 42.5D, -550.4D); - testSimpleValues(fieldname, double.class, Double.POSITIVE_INFINITY, 0.0D, Double.NEGATIVE_INFINITY); - + testSimpleValues( + fieldname, double.class, Double.POSITIVE_INFINITY, 0.0D, Double.NEGATIVE_INFINITY); + testSimpleSort(fieldname, -4.2, 6.66); } - - /** Tests a single doc with a few explicit values, as well as testing exists with and w/o values */ + + /** + * Tests a single doc with a few explicit values, as well as testing exists with and w/o values + */ @SuppressWarnings({"unchecked"}) - protected void testSimpleValues(final String fieldname, final Class clazz, - @SuppressWarnings({"rawtypes"})final Comparable... vals) { + protected void testSimpleValues( + final String fieldname, + final Class clazz, + @SuppressWarnings({"rawtypes"}) final Comparable... vals) { clearIndex(); - + assert 0 < vals.length; @SuppressWarnings({"rawtypes"}) Comparable min = vals[0]; @SuppressWarnings({"rawtypes"}) Comparable max = vals[0]; - + final String type = clazz.getName(); final SolrInputDocument doc1 = sdoc("id", "1"); - for (@SuppressWarnings({"rawtypes"})Comparable v : vals) { + for (@SuppressWarnings({"rawtypes"}) Comparable v : vals) { doc1.addField(fieldname, v); if (0 < min.compareTo(v)) { min = v; @@ -560,246 +668,314 @@ protected void testSimpleValues(final String fieldname, final Class clazz, assertU(commit()); // doc with values - assertQ(fieldname, - req("q","id:1", - "fl","exists_val_min:exists(field("+fieldname+",min))", - "fl","exists_val_max:exists(field("+fieldname+",max))", - "fl","val_min:field("+fieldname+",min)", - "fl","val_max:field("+fieldname+",max)") - ,"//*[@numFound='1']" - ,"//bool[@name='exists_val_min']='true'" - ,"//bool[@name='exists_val_max']='true'" - ,"//"+type+"[@name='val_min']='"+min+"'" - ,"//"+type+"[@name='val_max']='"+max+"'" - ); + assertQ( + fieldname, + req( + "q", "id:1", + "fl", "exists_val_min:exists(field(" + fieldname + ",min))", + "fl", "exists_val_max:exists(field(" + fieldname + ",max))", + "fl", "val_min:field(" + fieldname + ",min)", + "fl", "val_max:field(" + fieldname + ",max)"), + "//*[@numFound='1']", + "//bool[@name='exists_val_min']='true'", + "//bool[@name='exists_val_max']='true'", + "//" + type + "[@name='val_min']='" + min + "'", + "//" + type + "[@name='val_max']='" + max + "'"); // doc w/o values - assertQ(fieldname, - req("q","id:2", - "fl","exists_val_min:exists(field("+fieldname+",min))", - "fl","exists_val_max:exists(field("+fieldname+",max))", - "fl","val_min:field("+fieldname+",min)", - "fl","val_max:field("+fieldname+",max)") - ,"//*[@numFound='1']" - ,"//bool[@name='exists_val_min']='false'" - ,"//bool[@name='exists_val_max']='false'" - ,"count(//"+type+"[@name='val_min'])=0" - ,"count(//"+type+"[@name='val_max'])=0" - ); + assertQ( + fieldname, + req( + "q", "id:2", + "fl", "exists_val_min:exists(field(" + fieldname + ",min))", + "fl", "exists_val_max:exists(field(" + fieldname + ",max))", + "fl", "val_min:field(" + fieldname + ",min)", + "fl", "val_max:field(" + fieldname + ",max)"), + "//*[@numFound='1']", + "//bool[@name='exists_val_min']='false'", + "//bool[@name='exists_val_max']='false'", + "count(//" + type + "[@name='val_min'])=0", + "count(//" + type + "[@name='val_max'])=0"); // sanity check no sort error when there are missing values - for (String dir : new String[] { "asc", "desc" }) { - for (String mm : new String[] { "min", "max" }) { - for (String func : new String[] { "field("+fieldname+","+mm+")", - "def(field("+fieldname+","+mm+"),42)", - "sum(32,field("+fieldname+","+mm+"))" }) { - assertQ(fieldname, - req("q","*:*", - "fl", "id", - "sort", func + " " + dir) - ,"//*[@numFound='2']" - // no assumptions about order for now, see bug: SOLR-8005 - ,"//str[@name='id']='1'" - ,"//str[@name='id']='2'" - ); + for (String dir : new String[] {"asc", "desc"}) { + for (String mm : new String[] {"min", "max"}) { + for (String func : + new String[] { + "field(" + fieldname + "," + mm + ")", + "def(field(" + fieldname + "," + mm + "),42)", + "sum(32,field(" + fieldname + "," + mm + "))" + }) { + assertQ( + fieldname, + req( + "q", "*:*", + "fl", "id", + "sort", func + " " + dir), + "//*[@numFound='2']" + // no assumptions about order for now, see bug: SOLR-8005 + , + "//str[@name='id']='1'", + "//str[@name='id']='2'"); } } } } - /** + /** * Tests sort order of min/max realtive to other docs w/o any values. + * * @param fieldname The field to test - * @param negative a "negative" value for this field (ie: in a function context, is less then the "0") - * @param positive a "positive" value for this field (ie: in a function context, is more then the "0") + * @param negative a "negative" value for this field (ie: in a function context, is less then the + * "0") + * @param positive a "positive" value for this field (ie: in a function context, is more then the + * "0") */ - protected void testSimpleSort(final String fieldname, - @SuppressWarnings({"rawtypes"})final Comparable negative, - @SuppressWarnings({"rawtypes"})final Comparable positive) { + protected void testSimpleSort( + final String fieldname, + @SuppressWarnings({"rawtypes"}) final Comparable negative, + @SuppressWarnings({"rawtypes"}) final Comparable positive) { clearIndex(); int numDocsExpected = 1; for (int i = 1; i < 4; i++) { // pos docids if (random().nextBoolean()) { - assertU(adoc(sdoc("id",i))); // fieldname doesn't exist + assertU(adoc(sdoc("id", i))); // fieldname doesn't exist numDocsExpected++; } } - - assertU(adoc(sdoc("id", "0", - fieldname, negative, - fieldname, positive))); - + + assertU(adoc(sdoc("id", "0", fieldname, negative, fieldname, positive))); + for (int i = 1; i < 4; i++) { // neg docids if (random().nextBoolean()) { - assertU(adoc(sdoc("id",-i))); // fieldname doesn't exist + assertU(adoc(sdoc("id", -i))); // fieldname doesn't exist numDocsExpected++; } } assertU(commit()); // need to wrap with "def" until SOLR-8005 is resolved - assertDocWithValsIsFirst(numDocsExpected, "def(field("+fieldname+",min),0) asc"); - assertDocWithValsIsLast(numDocsExpected, "def(field("+fieldname+",min),0) desc"); - - assertDocWithValsIsFirst(numDocsExpected, "def(field("+fieldname+",max),0) desc"); - assertDocWithValsIsLast(numDocsExpected, "def(field("+fieldname+",max),0) asc"); + assertDocWithValsIsFirst(numDocsExpected, "def(field(" + fieldname + ",min),0) asc"); + assertDocWithValsIsLast(numDocsExpected, "def(field(" + fieldname + ",min),0) desc"); + + assertDocWithValsIsFirst(numDocsExpected, "def(field(" + fieldname + ",max),0) desc"); + assertDocWithValsIsLast(numDocsExpected, "def(field(" + fieldname + ",max),0) asc"); // def wrapper shouldn't be needed since it's already part of another function - assertDocWithValsIsFirst(numDocsExpected, "sum(32,field("+fieldname+",max)) desc"); - assertDocWithValsIsLast(numDocsExpected, "sum(32,field("+fieldname+",max)) asc"); - - assertDocWithValsIsFirst(numDocsExpected, "sum(32,field("+fieldname+",min)) asc"); - assertDocWithValsIsLast(numDocsExpected, "sum(32,field("+fieldname+",min)) desc"); + assertDocWithValsIsFirst(numDocsExpected, "sum(32,field(" + fieldname + ",max)) desc"); + assertDocWithValsIsLast(numDocsExpected, "sum(32,field(" + fieldname + ",max)) asc"); + + assertDocWithValsIsFirst(numDocsExpected, "sum(32,field(" + fieldname + ",min)) asc"); + assertDocWithValsIsLast(numDocsExpected, "sum(32,field(" + fieldname + ",min)) desc"); } /** helper for testSimpleSort */ private static void assertDocWithValsIsFirst(final int numDocs, final String sort) { - assertQ(sort, - req("q","*:*", "rows", ""+numDocs, "sort", sort) - ,"//result[@numFound='"+numDocs+"']" - ,"//result/doc[1]/str[@name='id']='0'" - ); + assertQ( + sort, + req("q", "*:*", "rows", "" + numDocs, "sort", sort), + "//result[@numFound='" + numDocs + "']", + "//result/doc[1]/str[@name='id']='0'"); } /** helper for testSimpleSort */ private static void assertDocWithValsIsLast(final int numDocs, final String sort) { - assertQ(sort, - req("q","*:*", "rows", ""+numDocs, "sort", sort) - ,"//result[@numFound='"+numDocs+"']" - ,"//result/doc["+numDocs+"]/str[@name='id']='0'" - ); + assertQ( + sort, + req("q", "*:*", "rows", "" + numDocs, "sort", sort), + "//result[@numFound='" + numDocs + "']", + "//result/doc[" + numDocs + "]/str[@name='id']='0'"); } - /** @see #testExpectedSortOrdering */ + /** + * @see #testExpectedSortOrdering + */ private void testExpectedSortOrderingInt(final String f, final boolean trieFieldHack) { // first a quick test where every doc has a value - testExpectedSortOrdering(f, trieFieldHack, - Integer.MIN_VALUE, -9999, 0, 1000, Integer.MAX_VALUE); + testExpectedSortOrdering( + f, trieFieldHack, Integer.MIN_VALUE, -9999, 0, 1000, Integer.MAX_VALUE); // now where one doc has no values - testExpectedSortOrdering(f, trieFieldHack, - Integer.MIN_VALUE, -9999, -42, -15, -3, - null, 7, 53, 1000, 121212112, Integer.MAX_VALUE); + testExpectedSortOrdering( + f, + trieFieldHack, + Integer.MIN_VALUE, + -9999, + -42, + -15, + -3, + null, + 7, + 53, + 1000, + 121212112, + Integer.MAX_VALUE); } - - /** @see #testExpectedSortOrdering */ + + /** + * @see #testExpectedSortOrdering + */ private void testExpectedSortOrderingLong(final String f, final boolean trieFieldHack) { // first a quick test where every doc has a value - testExpectedSortOrdering(f, trieFieldHack, - Long.MIN_VALUE, -4200L, 0, 121212112, Long.MAX_VALUE); + testExpectedSortOrdering( + f, trieFieldHack, Long.MIN_VALUE, -4200L, 0, 121212112, Long.MAX_VALUE); // now where one doc has no values - testExpectedSortOrdering(f, trieFieldHack, - Long.MIN_VALUE, ((long)Integer.MIN_VALUE)-1L, -4200L, - -150L, -3L, null, 70L, 530L, 121212112, - 1L+(long)Integer.MAX_VALUE, Long.MAX_VALUE); - - } - - /** @see #testExpectedSortOrdering */ + testExpectedSortOrdering( + f, + trieFieldHack, + Long.MIN_VALUE, + ((long) Integer.MIN_VALUE) - 1L, + -4200L, + -150L, + -3L, + null, + 70L, + 530L, + 121212112, + 1L + (long) Integer.MAX_VALUE, + Long.MAX_VALUE); + } + + /** + * @see #testExpectedSortOrdering + */ private void testExpectedSortOrderingFloat(final String f, final boolean trieFieldHack) { // first a quick test where every doc has a value - testExpectedSortOrdering(f, trieFieldHack, - Float.NEGATIVE_INFINITY, -15.0, 0F, 121212.112, Float.POSITIVE_INFINITY); + testExpectedSortOrdering( + f, trieFieldHack, Float.NEGATIVE_INFINITY, -15.0, 0F, 121212.112, Float.POSITIVE_INFINITY); // now where one doc has no values - testExpectedSortOrdering(f, trieFieldHack, - Float.NEGATIVE_INFINITY, -9999.999, -42.3, -15.0, -0.3, - null, 0.7, 5.3, 1000, 121212.112, Float.POSITIVE_INFINITY); - + testExpectedSortOrdering( + f, + trieFieldHack, + Float.NEGATIVE_INFINITY, + -9999.999, + -42.3, + -15.0, + -0.3, + null, + 0.7, + 5.3, + 1000, + 121212.112, + Float.POSITIVE_INFINITY); } - - /** @see #testExpectedSortOrdering */ + + /** + * @see #testExpectedSortOrdering + */ private void testExpectedSortOrderingDouble(final String f, final boolean trieFieldHack) { // first a quick test where every doc has a value - testExpectedSortOrdering(f, trieFieldHack, - Double.NEGATIVE_INFINITY, -9999.999D, - 0D, 121212.112D, Double.POSITIVE_INFINITY); + testExpectedSortOrdering( + f, + trieFieldHack, + Double.NEGATIVE_INFINITY, + -9999.999D, + 0D, + 121212.112D, + Double.POSITIVE_INFINITY); // now where one doc has no values - testExpectedSortOrdering(f, trieFieldHack, - Double.NEGATIVE_INFINITY, -9999.999D, -42.3D, -15.0D, -0.3D, - null, 0.7D, 5.3D, 1000, 121212.112D, Double.POSITIVE_INFINITY); + testExpectedSortOrdering( + f, + trieFieldHack, + Double.NEGATIVE_INFINITY, + -9999.999D, + -42.3D, + -15.0D, + -0.3D, + null, + 0.7D, + 5.3D, + 1000, + 121212.112D, + Double.POSITIVE_INFINITY); } /** - * Given a fieldPrefix and a list of sorted values which may not contain null, this method tests that sortMissingLast and sortMissingFirst fields using those prefixes sort correctly when {@link #buildMultiValueSortedDocuments} is used to generate documents containing these values and an additional document with no values in the field. + * Given a fieldPrefix and a list of sorted values which may not contain + * null, this method tests that sortMissingLast and sortMissingFirst fields using those prefixes + * sort correctly when {@link #buildMultiValueSortedDocuments} is used to generate documents + * containing these values and an additional document with no values in the field. + * + *

Permutations tested: * - *

- * Permutations tested: - *

*
    - *
  • fieldPrefix + "_missf_s_dv" asc
  • - *
  • fieldPrefix + "_missf_s_dv" desc
  • - *
  • fieldPrefix + "_missl_s_dv" asc
  • - *
  • fieldPrefix + "_missl_s_dv" desc
  • + *
  • fieldPrefix + "_missf_s_dv" asc + *
  • fieldPrefix + "_missf_s_dv" desc + *
  • fieldPrefix + "_missl_s_dv" asc + *
  • fieldPrefix + "_missl_s_dv" desc *
* * @see #buildMultiValueSortedDocuments * @see #testExpectedSortOrdering(String,List) */ - private void testSortMissingMinMax(final String fieldPrefix, - Object... sortedValues) { + private void testSortMissingMinMax(final String fieldPrefix, Object... sortedValues) { for (Object obj : sortedValues) { // sanity check assertNotNull("this helper method can't be used with 'null' values", obj); } - + for (String suffix : Arrays.asList("_missf_s_dv", "_missl_s_dv")) { final String f = fieldPrefix + suffix; final boolean first = f.contains("missf"); - + final List asc_vals = new ArrayList<>(sortedValues.length + 1); Collections.addAll(asc_vals, sortedValues); final List desc_vals = new ArrayList<>(sortedValues.length + 1); Collections.addAll(desc_vals, sortedValues); Collections.reverse(desc_vals); - + asc_vals.add(first ? 0 : sortedValues.length, null); desc_vals.add(first ? 0 : sortedValues.length, null); - + testExpectedSortOrdering(f + " asc", buildMultiValueSortedDocuments(f, asc_vals)); testExpectedSortOrdering(f + " desc", buildMultiValueSortedDocuments(f, desc_vals)); } } /** - * Given a (multivalued) field name and an (ascending) sorted list of values, this method uses {@link #buildMultiValueSortedDocuments} to generate and test multiple function & sort permutations ... + * Given a (multivalued) field name and an (ascending) sorted list of values, this method uses + * {@link #buildMultiValueSortedDocuments} to generate and test multiple function & sort + * permutations ... + * *
    - *
  • f asc (implicitly min)
  • - *
  • field(f,min) asc
  • - *
  • field(f,min) desc
  • - *
  • f desc (implicitly max)
  • - *
  • field(f,max) desc
  • - *
  • field(f,max) asc
  • + *
  • f asc (implicitly min) + *
  • field(f,min) asc + *
  • field(f,min) desc + *
  • f desc (implicitly max) + *
  • field(f,max) desc + *
  • field(f,max) asc *
* - *

- * NOTE: if the sortedValues includes "null" then the field must NOT use sortMissingFirst or sortMissingLast - *

+ *

NOTE: if the sortedValues includes "null" then the field must NOT use + * sortMissingFirst or sortMissingLast * * @param f the field to test - * @param trieFieldHack if this param and {@link #NUMERIC_POINTS_SYSPROP} are both true, then the field(f,min|max) functions will be wrapped in def(...,0) and the implicit f asc|desc syntax will not be tested -- see SOLR-8005 for the reason. + * @param trieFieldHack if this param and {@link #NUMERIC_POINTS_SYSPROP} are both true, then the + * field(f,min|max) functions will be wrapped in def(...,0) and the + * implicit f asc|desc syntax will not be tested -- see SOLR-8005 for the reason. * @param sortedValues the values to use when building the docs and validating the sort - * * @see #buildMultiValueSortedDocuments * @see #testExpectedSortOrdering(String,List) * @see #clearIndex */ - private void testExpectedSortOrdering(final String f, boolean trieFieldHack, - Object... sortedValues) { + private void testExpectedSortOrdering( + final String f, boolean trieFieldHack, Object... sortedValues) { SchemaField sf = h.getCore().getLatestSchema().getField(f); - assertFalse("this utility method does not work with fields that are sortMissingFirst|Last: " + f, - sf.sortMissingFirst() || sf.sortMissingLast()); - + assertFalse( + "this utility method does not work with fields that are sortMissingFirst|Last: " + f, + sf.sortMissingFirst() || sf.sortMissingLast()); + // make a copy we can re-order later final List vals = new ArrayList(sortedValues.length); Collections.addAll(vals, sortedValues); - - String minFunc = "field("+f+",min)"; - String maxFunc = "field("+f+",max)"; + + String minFunc = "field(" + f + ",min)"; + String maxFunc = "field(" + f + ",max)"; if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) { // we don't need to mess with this hack at all if we're using all point numerics @@ -810,7 +986,7 @@ private void testExpectedSortOrdering(final String f, boolean trieFieldHack, // if this line of code stops compiling, then trie fields have been removed from solr // and the entire trieFieldHack param should be removed from this method (and callers) && null != TrieField.class) { - + // the SOLR-8005 hack is only needed if/when a doc has no value... trieFieldHack = false; // assume we're safe for (Object val : vals) { @@ -827,49 +1003,50 @@ private void testExpectedSortOrdering(final String f, boolean trieFieldHack, maxFunc = "def(" + maxFunc + ",0)"; // and we can't test implicit min/max default behavior... } - + // // // // min - + final List min_asc = buildMultiValueSortedDocuments(f, vals); - + // explicit min + asc testExpectedSortOrdering(minFunc + " asc", min_asc); // implicit: asc -> min if (!trieFieldHack) testExpectedSortOrdering(f + " asc", min_asc); - + final List min_desc = new ArrayList<>(min_asc); Collections.reverse(min_desc); - + // explicit min + desc testExpectedSortOrdering(minFunc + " desc", min_desc); // // // // max Collections.reverse(vals); - + final List max_desc = buildMultiValueSortedDocuments(f, vals); // explicit: max + desc - testExpectedSortOrdering(maxFunc +" desc", max_desc); + testExpectedSortOrdering(maxFunc + " desc", max_desc); // implicit: desc -> max - if (!trieFieldHack) testExpectedSortOrdering(f + " desc", max_desc); - + if (!trieFieldHack) testExpectedSortOrdering(f + " desc", max_desc); + final List max_asc = new ArrayList<>(max_desc); Collections.reverse(max_asc); - + // explicit max + asc testExpectedSortOrdering(maxFunc + " asc", max_asc); } - + /** - * Given a sort clause, and a list of documents in sorted order, this method will clear the index - * and then add the documents in a random order (to ensure the index insertion order is not a factor) - * and then validate that a *:* query returns the documents in the original order. + * Given a sort clause, and a list of documents in sorted order, this method will clear the index + * and then add the documents in a random order (to ensure the index insertion order is not a + * factor) and then validate that a *:* query returns the documents in the original + * order. * * @see #buildMultiValueSortedDocuments * @see #clearIndex - */ - private void testExpectedSortOrdering(final String sort, - final List sortedDocs) { + */ + private void testExpectedSortOrdering( + final String sort, final List sortedDocs) { clearIndex(); // shuffle a copy of the doc list (to ensure index order isn't linked to uniqueKey order) @@ -883,33 +1060,41 @@ private void testExpectedSortOrdering(final String sort, // now use the original sorted docs to build up the expected sort order as a list of xpath List xpaths = new ArrayList<>(sortedDocs.size() + 1); - xpaths.add("//result[@numFound='"+sortedDocs.size()+"']"); + xpaths.add("//result[@numFound='" + sortedDocs.size() + "']"); int seq = 0; for (SolrInputDocument doc : sortedDocs) { - xpaths.add("//result/doc["+(++seq)+"]/str[@name='id']='"+doc.getFieldValue("id")+"'"); + xpaths.add("//result/doc[" + (++seq) + "]/str[@name='id']='" + doc.getFieldValue("id") + "'"); } - assertQ(req("q", "*:*", "rows", "" + sortedDocs.size(), "sort", sort), - xpaths.toArray(new String[xpaths.size()])); + assertQ( + req("q", "*:*", "rows", "" + sortedDocs.size(), "sort", sort), + xpaths.toArray(new String[xpaths.size()])); } /** - * Given a (multivalued) field name and an (ascending) sorted list of values, this method will generate a List of Solr Documents of the same size such that: + * Given a (multivalued) field name and an (ascending) sorted list of values, this method will + * generate a List of Solr Documents of the same size such that: + * *
    - *
  • For each non-null value in the original list, the corresponding document in the result will have that value in the specified field.
  • - *
  • For each null value in the original list, the corresponding document in teh result will have NO values in the specified field.
  • - *
  • If a document has a value in the field, then some random number of values that come after that value in the original list may also be included in the specified field.
  • - *
  • Every document in the result will have a randomly asssigned 'id', unique realitive to all other documents in the result.
  • + *
  • For each non-null value in the original list, the corresponding document in the result + * will have that value in the specified field. + *
  • For each null value in the original list, the corresponding document in teh result will + * have NO values in the specified field. + *
  • If a document has a value in the field, then some random number of values that come + * after that value in the original list may also be included in the specified + * field. + *
  • Every document in the result will have a randomly asssigned 'id', unique realitive to all + * other documents in the result. *
*/ - private static final List buildMultiValueSortedDocuments(final String f, - final List vals) { + private static final List buildMultiValueSortedDocuments( + final String f, final List vals) { // build a list of docIds that we can shuffle (so the id order doesn't match the value order) List ids = new ArrayList<>(vals.size()); for (int i = 0; i < vals.size(); i++) { - ids.add(i+1); + ids.add(i + 1); } Collections.shuffle(ids, random()); - + final List docs = new ArrayList<>(vals.size()); for (int i = 0; i < vals.size(); i++) { SolrInputDocument doc = new SolrInputDocument(); @@ -919,7 +1104,7 @@ private static final List buildMultiValueSortedDocuments(fina doc.addField(f, primaryValue); final int extraValCount = random().nextInt(vals.size() - i); for (int j = 0; j < extraValCount; j++) { - Object extraVal = vals.get(TestUtil.nextInt(random(), i+1, vals.size() - 1)); + Object extraVal = vals.get(TestUtil.nextInt(random(), i + 1, vals.size() - 1)); if (null != extraVal) { doc.addField(f, extraVal); } diff --git a/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java b/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java index ac94eb761da..72c4f507624 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java @@ -20,18 +20,15 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.TextField; -import org.apache.lucene.document.Field.Store; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.solr.SolrTestCase; -import org.apache.solr.legacy.LegacyFloatField; -import org.apache.solr.legacy.LegacyIntField; import org.apache.lucene.queries.function.FunctionQuery; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.FloatFieldSource; @@ -44,20 +41,22 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; +import org.apache.solr.legacy.LegacyFloatField; +import org.apache.solr.legacy.LegacyIntField; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; /** * Test search based on OrdFieldSource and ReverseOrdFieldSource. - *

- * Tests here create an index with a few documents, each having - * an indexed "id" field. - * The ord values of this field are later used for scoring. - *

- * The order tests use Hits to verify that docs are ordered as expected. - *

- * The exact score tests use TopDocs top to verify the exact score. + * + *

Tests here create an index with a few documents, each having an indexed "id" field. The ord + * values of this field are later used for scoring. + * + *

The order tests use Hits to verify that docs are ordered as expected. + * + *

The exact score tests use TopDocs top to verify the exact score. */ public class TestOrdValues extends SolrTestCase { @@ -66,17 +65,13 @@ public static void beforeClass() throws Exception { createIndex(false); } - /** - * Test OrdFieldSource - */ + /** Test OrdFieldSource */ @Test public void testOrdFieldRank() throws Exception { doTestRank(ID_FIELD, true); } - /** - * Test ReverseOrdFieldSource - */ + /** Test ReverseOrdFieldSource */ @Test public void testReverseOrdFieldRank() throws Exception { doTestRank(ID_FIELD, false); @@ -98,41 +93,39 @@ private void doTestRank(String field, boolean inOrder) throws Exception { QueryUtils.check(random(), q, s); ScoreDoc[] h = s.search(q, 1000).scoreDocs; assertEquals("All docs should be matched!", N_DOCS, h.length); - String prevID = inOrder - ? "IE" // greater than all ids of docs in this test ("ID0001", etc.) - : "IC"; // smaller than all ids of docs in this test ("ID0001", etc.) + String prevID = + inOrder + ? "IE" // greater than all ids of docs in this test ("ID0001", etc.) + : "IC"; // smaller than all ids of docs in this test ("ID0001", etc.) for (int i = 0; i < h.length; i++) { String resID = s.doc(h[i].doc).get(ID_FIELD); log(i + ". score=" + h[i].score + " - " + resID); log(s.explain(q, h[i].doc)); if (inOrder) { - assertTrue("res id " + resID + " should be < prev res id " + prevID, resID.compareTo(prevID) < 0); + assertTrue( + "res id " + resID + " should be < prev res id " + prevID, resID.compareTo(prevID) < 0); } else { - assertTrue("res id " + resID + " should be > prev res id " + prevID, resID.compareTo(prevID) > 0); + assertTrue( + "res id " + resID + " should be > prev res id " + prevID, resID.compareTo(prevID) > 0); } prevID = resID; } r.close(); } - /** - * Test exact score for OrdFieldSource - */ + /** Test exact score for OrdFieldSource */ @Test public void testOrdFieldExactScore() throws Exception { doTestExactScore(ID_FIELD, true); } - /** - * Test exact score for ReverseOrdFieldSource - */ + /** Test exact score for ReverseOrdFieldSource */ @Test public void testReverseOrdFieldExactScore() throws Exception { doTestExactScore(ID_FIELD, false); } - // Test that queries based on reverse/ordFieldScore returns docs with expected score. private void doTestExactScore(String field, boolean inOrder) throws Exception { IndexReader r = DirectoryReader.open(dir); @@ -153,27 +146,33 @@ private void doTestExactScore(String field, boolean inOrder) throws Exception { log("-------- " + i + ". Explain doc " + id); log(s.explain(q, sd[i].doc)); float expectedScore = N_DOCS - i - 1; - assertEquals("score of result " + i + " should be " + expectedScore + " != " + score, expectedScore, score, TEST_SCORE_TOLERANCE_DELTA); - String expectedId = inOrder + assertEquals( + "score of result " + i + " should be " + expectedScore + " != " + score, + expectedScore, + score, + TEST_SCORE_TOLERANCE_DELTA); + String expectedId = + inOrder ? id2String(N_DOCS - i) // in-order ==> larger values first - : id2String(i + 1); // reverse ==> smaller values first - assertTrue("id of result " + i + " should be " + expectedId + " != " + score, expectedId.equals(id)); + : id2String(i + 1); // reverse ==> smaller values first + assertTrue( + "id of result " + i + " should be " + expectedId + " != " + score, expectedId.equals(id)); } r.close(); } - + // LUCENE-1250 public void testEqualsNull() throws Exception { OrdFieldSource ofs = new OrdFieldSource("f"); assertFalse(ofs.equals(null)); - + ReverseOrdFieldSource rofs = new ReverseOrdFieldSource("f"); assertFalse(rofs.equals(null)); } - + /** - * Actual score computation order is slightly different than assumptios - * this allows for a small amount of variation + * Actual score computation order is slightly different than assumptios this allows for a small + * amount of variation */ protected static float TEST_SCORE_TOLERANCE_DELTA = 0.001f; @@ -188,20 +187,20 @@ public void testEqualsNull() throws Exception { protected ValueSource FLOAT_VALUESOURCE = new FloatFieldSource(FLOAT_FIELD); private static final String DOC_TEXT_LINES[] = { - "Well, this is just some plain text we use for creating the ", - "test documents. It used to be a text from an online collection ", - "devoted to first aid, but if there was there an (online) lawyers ", - "first aid collection with legal advices, \"it\" might have quite ", - "probably advised one not to include \"it\"'s text or the text of ", - "any other online collection in one's code, unless one has money ", - "that one don't need and one is happy to donate for lawyers ", - "charity. Anyhow at some point, rechecking the usage of this text, ", - "it became uncertain that this text is free to use, because ", - "the web site in the disclaimer of he eBook containing that text ", - "was not responding anymore, and at the same time, in projGut, ", - "searching for first aid no longer found that eBook as well. ", - "So here we are, with a perhaps much less interesting ", - "text for the test, but oh much much safer. ", + "Well, this is just some plain text we use for creating the ", + "test documents. It used to be a text from an online collection ", + "devoted to first aid, but if there was there an (online) lawyers ", + "first aid collection with legal advices, \"it\" might have quite ", + "probably advised one not to include \"it\"'s text or the text of ", + "any other online collection in one's code, unless one has money ", + "that one don't need and one is happy to donate for lawyers ", + "charity. Anyhow at some point, rechecking the usage of this text, ", + "it became uncertain that this text is free to use, because ", + "the web site in the disclaimer of he eBook containing that text ", + "was not responding anymore, and at the same time, in projGut, ", + "searching for first aid no longer found that eBook as well. ", + "So here we are, with a perhaps much less interesting ", + "text for the test, but oh much much safer. ", }; protected static Directory dir; @@ -234,12 +233,13 @@ protected static void createIndex(boolean doMultiSegment) throws Exception { int i = 0; while (remaining > 0) { if (done[i]) { - throw new Exception("to set this test correctly N_DOCS=" + N_DOCS + " must be primary and greater than 2!"); + throw new Exception( + "to set this test correctly N_DOCS=" + N_DOCS + " must be primary and greater than 2!"); } addDoc(iw, i); done[i] = true; i = (i + 4) % N_DOCS; - remaining --; + remaining--; } if (!doMultiSegment) { if (VERBOSE) { @@ -261,14 +261,18 @@ private static void addDoc(RandomIndexWriter iw, int i) throws Exception { FieldType customType = new FieldType(TextField.TYPE_STORED); customType.setTokenized(false); customType.setOmitNorms(true); - + f = newField(ID_FIELD, id2String(scoreAndID), customType); // for debug purposes d.add(f); d.add(new SortedDocValuesField(ID_FIELD, new BytesRef(id2String(scoreAndID)))); FieldType customType2 = new FieldType(TextField.TYPE_NOT_STORED); customType2.setOmitNorms(true); - f = newField(TEXT_FIELD, "text of doc" + scoreAndID + textLine(i), customType2); // for regular search + f = + newField( + TEXT_FIELD, + "text of doc" + scoreAndID + textLine(i), + customType2); // for regular search d.add(f); f = new LegacyIntField(INT_FIELD, scoreAndID, Store.YES); // for function scoring @@ -307,5 +311,4 @@ protected static void log(Object o) { System.out.println(o.toString()); } } - } diff --git a/solr/core/src/test/org/apache/solr/search/function/TestSortByMinMaxFunction.java b/solr/core/src/test/org/apache/solr/search/function/TestSortByMinMaxFunction.java index bf898f2f8c9..d1034cc907c 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestSortByMinMaxFunction.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestSortByMinMaxFunction.java @@ -15,21 +15,22 @@ * limitations under the License. */ package org.apache.solr.search.function; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; - +import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; /** * Split out from SortByFunctionTest due to codec support limitations for SortedSetSelector * * @see SortByFunctionTest - **/ + */ @SuppressCodecs({"SimpleText"}) // see TestSortedSetSelector public class TestSortByMinMaxFunction extends SortByFunctionTest { @Override public String[] getFieldFunctionClausesToTest() { - return new String[] { "primary_tl1", "field(primary_tl1)", - "field(multi_l_dv,max)", "field(multi_l_dv,min)" }; + return new String[] { + "primary_tl1", "field(primary_tl1)", + "field(multi_l_dv,max)", "field(multi_l_dv,min)" + }; } } diff --git a/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java b/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java index 3deffc77a74..2d0a655b2d8 100644 --- a/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java +++ b/solr/core/src/test/org/apache/solr/search/function/distance/DistanceFunctionTest.java @@ -21,10 +21,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - * - **/ +/** */ public class DistanceFunctionTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { @@ -36,124 +33,164 @@ public void testHaversine() throws Exception { clearIndex(); assertU(adoc("id", "1", "x_td", "0", "y_td", "0")); assertU(adoc("id", "2", "x_td", "0", "y_td", String.valueOf(Math.PI / 2))); - assertU(adoc("id", "3", "x_td", String.valueOf(Math.PI / 2), "y_td", String.valueOf(Math.PI / 2))); - assertU(adoc("id", "4", "x_td", String.valueOf(Math.PI / 4), "y_td", String.valueOf(Math.PI / 4))); + assertU( + adoc("id", "3", "x_td", String.valueOf(Math.PI / 2), "y_td", String.valueOf(Math.PI / 2))); + assertU( + adoc("id", "4", "x_td", String.valueOf(Math.PI / 4), "y_td", String.valueOf(Math.PI / 4))); assertU(adoc("id", "5", "x_td", "45.0", "y_td", "45.0")); assertU(adoc("id", "6", "point", "32.5, -79.0")); assertU(adoc("id", "7", "point", "32.6, -78.0")); assertU(commit()); - //Get the haversine distance between the point 0,0 and the docs above assuming a radius of 1 - assertQ(req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:2"), "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); - assertQ(req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:3"), "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); - assertQ(req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:4"), "//float[@name='score']='1.0471976'"); - assertQ(req("fl", "*,score", "q", "{!func}hsin(1, true, x_td, y_td, 0, 0)", "fq", "id:5"), "//float[@name='score']='1.0471976'"); - //SOLR-2114 - assertQ(req("fl", "*,score", "q", "{!func}hsin(6371.009, true, point, vector(0, 0))", "fq", "id:6"), "//float[@name='score']='8977.814'"); + // Get the haversine distance between the point 0,0 and the docs above assuming a radius of 1 + assertQ( + req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:2"), + "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:3"), + "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}hsin(1, false, x_td, y_td, 0, 0)", "fq", "id:4"), + "//float[@name='score']='1.0471976'"); + assertQ( + req("fl", "*,score", "q", "{!func}hsin(1, true, x_td, y_td, 0, 0)", "fq", "id:5"), + "//float[@name='score']='1.0471976'"); + // SOLR-2114 + assertQ( + req("fl", "*,score", "q", "{!func}hsin(6371.009, true, point, vector(0, 0))", "fq", "id:6"), + "//float[@name='score']='8977.814'"); } - @Test public void testLatLon() throws Exception { String sfield = "llp_km"; // primary spatial field double delta = 1e-4; // "lat,lon" order - assertU(adoc("id", "100", sfield, "1,2", "srpt_geohash_km", "1,2")); // secondary spatial is equal + assertU( + adoc("id", "100", sfield, "1,2", "srpt_geohash_km", "1,2")); // secondary spatial is equal assertU(commit()); - - assertJQ(req("defType","func", - "q","geodist(1,2,3,4)", - "fq","id:100", - "fl","id,score") - , delta - , "/response/docs/[0]/score==314.40338" - ); + + assertJQ( + req( + "defType", "func", + "q", "geodist(1,2,3,4)", + "fq", "id:100", + "fl", "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); // throw in some decimal points - assertJQ(req("defType","func", - "q","geodist(1.0,2,3,4.0)", - "fq","id:100", - "fl","id,score") - , delta - , "/response/docs/[0]/score==314.40338" - ); + assertJQ( + req( + "defType", "func", + "q", "geodist(1.0,2,3,4.0)", + "fq", "id:100", + "fl", "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); // default to reading pt - assertJQ(req("defType","func", - "q","geodist(1,2)", - "pt","3,4", - "fq","id:100", - "fl","id,score") - , delta - , "/response/docs/[0]/score==314.40338" - ); + assertJQ( + req( + "defType", "func", + "q", "geodist(1,2)", + "pt", "3,4", + "fq", "id:100", + "fl", "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); // default to reading pt first - assertJQ(req("defType","func", - "q","geodist(1,2)", - "pt","3,4", - "sfield", sfield, - "fq","id:100", - "fl","id,score") - , delta - , "/response/docs/[0]/score==314.40338" - ); + assertJQ( + req( + "defType", "func", + "q", "geodist(1,2)", + "pt", "3,4", + "sfield", sfield, + "fq", "id:100", + "fl", "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); // if pt missing, use sfield - assertJQ(req("defType","func", - "q","geodist(3,4)", - "sfield", sfield, - "fq","id:100", - "fl","id,score") - , delta - ,"/response/docs/[0]/score==314.40338" - ); + assertJQ( + req( + "defType", "func", + "q", "geodist(3,4)", + "sfield", sfield, + "fq", "id:100", + "fl", "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); // if pt missing, use sfield (RPT) - assertJQ(req("defType","func", - "q","geodist(3,4)", - "sfield","srpt_geohash_km", - "fq","id:100", - "fl","id,score") - , delta - ,"/response/docs/[0]/score==314.40338" - ); - + assertJQ( + req( + "defType", + "func", + "q", + "geodist(3,4)", + "sfield", + "srpt_geohash_km", + "fq", + "id:100", + "fl", + "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); + // read both pt and sfield - assertJQ(req("defType","func", - "q","geodist()","pt","3,4", - "sfield", sfield, - "fq","id:100", - "fl","id,score") - , delta - ,"/response/docs/[0]/score==314.40338" - ); + assertJQ( + req( + "defType", + "func", + "q", + "geodist()", + "pt", + "3,4", + "sfield", + sfield, + "fq", + "id:100", + "fl", + "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); // read both pt and sfield (RPT) - assertJQ(req("defType","func", - "q","geodist()","pt","3,4", - "sfield","srpt_geohash_km", - "fq","id:100", - "fl","id,score") - , delta - ,"/response/docs/[0]/score==314.40338" - ); + assertJQ( + req( + "defType", + "func", + "q", + "geodist()", + "pt", + "3,4", + "sfield", + "srpt_geohash_km", + "fq", + "id:100", + "fl", + "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); // param substitution - assertJQ(req("defType","func", - "q","geodist($a)", - "a","3,4", - "sfield", sfield, - "fq","id:100", - "fl","id,score") - , delta - ,"/response/docs/[0]/score==314.40338" - ); - + assertJQ( + req( + "defType", "func", + "q", "geodist($a)", + "a", "3,4", + "sfield", sfield, + "fq", "id:100", + "fl", "id,score"), + delta, + "/response/docs/[0]/score==314.40338"); } - @Test public void testVector() throws Exception { clearIndex(); @@ -165,30 +202,98 @@ public void testVector() throws Exception { assertU(adoc("id", "6", "point", "1.0,0.0")); assertU(adoc("id", "7", "point", "5.5,10.9")); assertU(commit()); - //two dimensions, notice how we only pass in 4 value sources - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:2"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:3"), "//float[@name='score']='" + 2.0f + "'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:4"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:5"), "//float[@name='score']='" + (float) (2.3 * 2.3 + 5.5 * 5.5) + "'"); - - //three dimensions, notice how we pass in 6 value sources - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:2"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:3"), "//float[@name='score']='" + 3.0f + "'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:4"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:5"), "//float[@name='score']='" + (float) (2.3 * 2.3 + 5.5 * 5.5 + 7.9 * 7.9) + "'"); - - //four dimensions, notice how we pass in 8 value sources - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", "fq", "id:2"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", "fq", "id:3"), "//float[@name='score']='" + 4.0f + "'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", "fq", "id:4"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", "fq", "id:5"), "//float[@name='score']='" + (float) (2.3 * 2.3 + 5.5 * 5.5 + 7.9 * 7.9 + 2.4 * 2.4) + "'"); - //Pass in imbalanced list, throw exception + // two dimensions, notice how we only pass in 4 value sources + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:2"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:3"), + "//float[@name='score']='" + 2.0f + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:4"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, 0, 0)", "fq", "id:5"), + "//float[@name='score']='" + (float) (2.3 * 2.3 + 5.5 * 5.5) + "'"); + + // three dimensions, notice how we pass in 6 value sources + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:2"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:3"), + "//float[@name='score']='" + 3.0f + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:4"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, 0, 0, 0)", "fq", "id:5"), + "//float[@name='score']='" + (float) (2.3 * 2.3 + 5.5 * 5.5 + 7.9 * 7.9) + "'"); + + // four dimensions, notice how we pass in 8 value sources + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", + "fq", + "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", + "fq", + "id:2"), + "//float[@name='score']='1.0'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", + "fq", + "id:3"), + "//float[@name='score']='" + 4.0f + "'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", + "fq", + "id:4"), + "//float[@name='score']='1.0'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0, 0)", + "fq", + "id:5"), + "//float[@name='score']='" + (float) (2.3 * 2.3 + 5.5 * 5.5 + 7.9 * 7.9 + 2.4 * 2.4) + "'"); + // Pass in imbalanced list, throw exception try { ignoreException("Illegal number of sources"); - assertQ(req("fl", "*,score", "q", "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0)", "fq", "id:1"), "//float[@name='score']='0.0'"); + assertQ( + req( + "fl", + "*,score", + "q", + "{!func}sqedist(x_td, y_td, z_td, w_td, 0, 0, 0)", + "fq", + "id:1"), + "//float[@name='score']='0.0'"); assertTrue("should throw an exception", false); } catch (Exception e) { Throwable cause = e.getCause(); @@ -197,30 +302,49 @@ public void testVector() throws Exception { } resetExceptionIgnores(); - //do one test of Euclidean - //two dimensions, notice how we only pass in 4 value sources - assertQ(req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:2"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:3"), "//float[@name='score']='" + (float) Math.sqrt(2.0) + "'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:4"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:5"), "//float[@name='score']='" + (float) Math.sqrt((2.3 * 2.3 + 5.5 * 5.5)) + "'"); - - //do one test of Manhattan - //two dimensions, notice how we only pass in 4 value sources - assertQ(req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:1"), "//float[@name='score']='0.0'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:2"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:3"), "//float[@name='score']='" + (float) 2.0 + "'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:4"), "//float[@name='score']='1.0'"); - assertQ(req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:5"), "//float[@name='score']='" + (float) (2.3 + 5.5) + "'"); + // do one test of Euclidean + // two dimensions, notice how we only pass in 4 value sources + assertQ( + req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:2"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:3"), + "//float[@name='score']='" + (float) Math.sqrt(2.0) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:4"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(2, x_td, y_td, 0, 0)", "fq", "id:5"), + "//float[@name='score']='" + (float) Math.sqrt((2.3 * 2.3 + 5.5 * 5.5)) + "'"); + // do one test of Manhattan + // two dimensions, notice how we only pass in 4 value sources + assertQ( + req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:1"), + "//float[@name='score']='0.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:2"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:3"), + "//float[@name='score']='" + (float) 2.0 + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:4"), + "//float[@name='score']='1.0'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(1, x_td, y_td, 0, 0)", "fq", "id:5"), + "//float[@name='score']='" + (float) (2.3 + 5.5) + "'"); - //Do point tests: - assertQ(req("fl", "*,score", "q", "{!func}dist(1, vector(x_td, y_td), vector(0, 0))", "fq", "id:5"), - "//float[@name='score']='" + (float) (2.3 + 5.5) + "'"); - - assertQ(req("fl", "*,score", "q", "{!func}dist(1, point, vector(0, 0))", "fq", "id:6"), - "//float[@name='score']='" + 1.0f + "'"); + // Do point tests: + assertQ( + req("fl", "*,score", "q", "{!func}dist(1, vector(x_td, y_td), vector(0, 0))", "fq", "id:5"), + "//float[@name='score']='" + (float) (2.3 + 5.5) + "'"); + assertQ( + req("fl", "*,score", "q", "{!func}dist(1, point, vector(0, 0))", "fq", "id:6"), + "//float[@name='score']='" + 1.0f + "'"); } - } diff --git a/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java b/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java index 6e9f35d4685..46077112eec 100644 --- a/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java +++ b/solr/core/src/test/org/apache/solr/search/join/BJQParserTest.java @@ -24,9 +24,7 @@ import java.util.ListIterator; import java.util.Locale; import java.util.Map; - import javax.xml.xpath.XPathConstants; - import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.join.ScoreMode; @@ -44,39 +42,43 @@ import org.junit.Test; public class BJQParserTest extends SolrTestCaseJ4 { - + private static final String[] klm = new String[] {"k", "l", "m"}; private static final List xyz = Arrays.asList("x", "y", "z"); private static final String[] abcdef = new String[] {"a", "b", "c", "d", "e", "f"}; - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema15.xml"); createIndex(); } - + public static void createIndex() throws IOException, Exception { int i = 0; List> blocks = createBlocks(); for (List block : blocks) { List updBlock = new ArrayList<>(); - + for (String[] doc : block) { - String[] idDoc = Arrays.copyOf(doc,doc.length+2); - idDoc[doc.length]="id"; - idDoc[doc.length+1]=Integer.toString(i); + String[] idDoc = Arrays.copyOf(doc, doc.length + 2); + idDoc[doc.length] = "id"; + idDoc[doc.length + 1] = Integer.toString(i); updBlock.add(doc(idDoc)); i++; } - //got xmls for every doc. now nest all into the last one - XmlDoc parentDoc = updBlock.get(updBlock.size()-1); - parentDoc.xml = parentDoc.xml.replace("", - updBlock.subList(0, updBlock.size()-1).toString().replaceAll("[\\[\\]]","")+""); + // got xmls for every doc. now nest all into the last one + XmlDoc parentDoc = updBlock.get(updBlock.size() - 1); + parentDoc.xml = + parentDoc.xml.replace( + "", + updBlock.subList(0, updBlock.size() - 1).toString().replaceAll("[\\[\\]]", "") + + ""); assertU(add(parentDoc)); - + if (random().nextBoolean()) { assertU(commit()); - // force empty segment (actually, this will no longer create an empty segment, only a new segments_n) + // force empty segment (actually, this will no longer create an empty segment, only a new + // segments_n) if (random().nextBoolean()) { assertU(commit()); } @@ -100,189 +102,287 @@ private static List> createBlocks() { private static List createChildrenBlock(String parent) { List block = new ArrayList<>(); for (String child : klm) { - block - .add(new String[] {"child_s", child, "parentchild_s", parent + child, "childparent_s", parent}); + block.add( + new String[] { + "child_s", child, "parentchild_s", parent + child, "childparent_s", parent + }); } Collections.shuffle(block, random()); addGrandChildren(block); return block; } - + private static void addGrandChildren(List block) { List grandChildren = new ArrayList<>(xyz); // add grandchildren after children - for (ListIterator iter = block.listIterator(); iter.hasNext();) { + for (ListIterator iter = block.listIterator(); iter.hasNext(); ) { String[] child = iter.next(); - assert child[0]=="child_s" && child[2]=="parentchild_s": Arrays.toString(child); + assert child[0] == "child_s" && child[2] == "parentchild_s" : Arrays.toString(child); String child_s = child[1]; String parentchild_s = child[3]; int grandChildPos = 0; - boolean lastLoopButStillHasGrCh = !iter.hasNext() - && !grandChildren.isEmpty(); + boolean lastLoopButStillHasGrCh = !iter.hasNext() && !grandChildren.isEmpty(); while (!grandChildren.isEmpty() - && ((grandChildPos = random().nextInt(grandChildren.size() * 2)) < grandChildren - .size() || lastLoopButStillHasGrCh)) { - grandChildPos = grandChildPos >= grandChildren.size() ? 0 - : grandChildPos; - iter.add(new String[] {"grand_s", grandChildren.remove(grandChildPos), - "grand_child_s", child_s, "grand_parentchild_s", parentchild_s}); + && ((grandChildPos = random().nextInt(grandChildren.size() * 2)) < grandChildren.size() + || lastLoopButStillHasGrCh)) { + grandChildPos = grandChildPos >= grandChildren.size() ? 0 : grandChildPos; + iter.add( + new String[] { + "grand_s", + grandChildren.remove(grandChildPos), + "grand_child_s", + child_s, + "grand_parentchild_s", + parentchild_s + }); } } // and reverse after that Collections.reverse(block); } - + @Test public void testFull() throws IOException, Exception { String childb = "{!parent which=\"parent_s:[* TO *]\"}child_s:l"; assertQ(req("q", childb), sixParents); } - - private static final String sixParents[] = new String[] { - "//*[@numFound='6']", "//doc/arr[@name=\"parent_s\"]/str='a'", - "//doc/arr[@name=\"parent_s\"]/str='b'", - "//doc/arr[@name=\"parent_s\"]/str='c'", - "//doc/arr[@name=\"parent_s\"]/str='d'", - "//doc/arr[@name=\"parent_s\"]/str='e'", - "//doc/arr[@name=\"parent_s\"]/str='f'"}; - + + private static final String sixParents[] = + new String[] { + "//*[@numFound='6']", + "//doc/arr[@name=\"parent_s\"]/str='a'", + "//doc/arr[@name=\"parent_s\"]/str='b'", + "//doc/arr[@name=\"parent_s\"]/str='c'", + "//doc/arr[@name=\"parent_s\"]/str='d'", + "//doc/arr[@name=\"parent_s\"]/str='e'", + "//doc/arr[@name=\"parent_s\"]/str='f'" + }; + @Test public void testJustParentsFilter() throws IOException { assertQ(req("q", "{!parent which=\"parent_s:[* TO *]\"}"), sixParents); } - + @Test public void testJustParentsFilterInChild() throws IOException { - assertQ(req("q", "{!child of=\"parent_s:[* TO *]\"}", - "fq", "childparent_s:"+abcdef[random().nextInt(abcdef.length)], - "indent","on"), - "//*[@numFound='"+klm.length+"']", //for any parent we have all three children - "//doc/arr[@name='child_s']/str='"+klm[0]+"'", - "//doc/arr[@name='child_s']/str='"+klm[1]+"'", - "//doc/arr[@name='child_s']/str='"+klm[2]+"'" - ); - assert klm.length==3 : "change asserts pls "+klm; + assertQ( + req( + "q", + "{!child of=\"parent_s:[* TO *]\"}", + "fq", + "childparent_s:" + abcdef[random().nextInt(abcdef.length)], + "indent", + "on"), + "//*[@numFound='" + klm.length + "']", // for any parent we have all three children + "//doc/arr[@name='child_s']/str='" + klm[0] + "'", + "//doc/arr[@name='child_s']/str='" + klm[1] + "'", + "//doc/arr[@name='child_s']/str='" + klm[2] + "'"); + assert klm.length == 3 : "change asserts pls " + klm; } - - private final static String beParents[] = new String[] {"//*[@numFound='2']", - "//doc/arr[@name=\"parent_s\"]/str='b'", - "//doc/arr[@name=\"parent_s\"]/str='e'"}; - + + private static final String beParents[] = + new String[] { + "//*[@numFound='2']", + "//doc/arr[@name=\"parent_s\"]/str='b'", + "//doc/arr[@name=\"parent_s\"]/str='e'" + }; + @Test public void testIntersectBqBjq() { - + assertQ( - req("q", "+parent_s:(e b) +_query_:\"{!parent which=$pq v=$chq}\"", - "chq", "child_s:l", "pq", "parent_s:[* TO *]"), beParents); + req( + "q", + "+parent_s:(e b) +_query_:\"{!parent which=$pq v=$chq}\"", + "chq", + "child_s:l", + "pq", + "parent_s:[* TO *]"), + beParents); assertQ( - req("fq", "{!parent which=$pq v=$chq}\"", "q", "parent_s:(e b)", "chq", - "child_s:l", "pq", "parent_s:[* TO *]"), beParents); - + req( + "fq", + "{!parent which=$pq v=$chq}\"", + "q", + "parent_s:(e b)", + "chq", + "child_s:l", + "pq", + "parent_s:[* TO *]"), + beParents); + assertQ( - req("q", "*:*", "fq", "{!parent which=$pq v=$chq}\"", "fq", - "parent_s:(e b)", "chq", "child_s:l", "pq", "parent_s:[* TO *]"), + req( + "q", + "*:*", + "fq", + "{!parent which=$pq v=$chq}\"", + "fq", + "parent_s:(e b)", + "chq", + "child_s:l", + "pq", + "parent_s:[* TO *]"), beParents); } public void testScoreNoneScoringForParent() throws Exception { - assertQ("score=none yields 0.0 score", - req("q", "{!parent which=\"parent_s:[* TO *]\" "+( - rarely()? "":(rarely()? "score=None":"score=none") - )+"}child_s:l","fl","score"), + assertQ( + "score=none yields 0.0 score", + req( + "q", + "{!parent which=\"parent_s:[* TO *]\" " + + (rarely() ? "" : (rarely() ? "score=None" : "score=none")) + + "}child_s:l", + "fl", + "score"), "//*[@numFound='6']", - "(//float[@name='score'])["+(random().nextInt(6)+1)+"]=0.0"); + "(//float[@name='score'])[" + (random().nextInt(6) + 1) + "]=0.0"); } public void testWrongScoreExceptionForParent() throws Exception { final String aMode = ScoreMode.values()[random().nextInt(ScoreMode.values().length)].name(); - final String wrongMode = rarely()? "":(rarely()? " ": - rarely()? aMode.substring(1):aMode.toUpperCase(Locale.ROOT)); - assertQEx("wrong score mode", - req("q", "{!parent which=\"parent_s:[* TO *]\" score="+wrongMode+"}child_s:l","fl","score") - , SolrException.ErrorCode.BAD_REQUEST.code); + final String wrongMode = + rarely() + ? "" + : (rarely() ? " " : rarely() ? aMode.substring(1) : aMode.toUpperCase(Locale.ROOT)); + assertQEx( + "wrong score mode", + req( + "q", + "{!parent which=\"parent_s:[* TO *]\" score=" + wrongMode + "}child_s:l", + "fl", + "score"), + SolrException.ErrorCode.BAD_REQUEST.code); } - public void testScoresForParent() throws Exception{ + public void testScoresForParent() throws Exception { final ArrayList noNone = new ArrayList<>(Arrays.asList(ScoreMode.values())); noNone.remove(ScoreMode.None); final String notNoneMode = (noNone.get(random().nextInt(noNone.size()))).name(); - + String leastScore = getLeastScore("child_s:l"); - assertTrue(leastScore+" > 0.0", Float.parseFloat(leastScore)>0.0); - final String notNoneLower = usually() ? notNoneMode: notNoneMode.toLowerCase(Locale.ROOT); - - assertQ(req("q", "{!parent which=\"parent_s:[* TO *]\" score="+notNoneLower+"}child_s:l","fl","score"), - "//*[@numFound='6']","(//float[@name='score'])["+(random().nextInt(6)+1)+"]>='"+leastScore+"'"); + assertTrue(leastScore + " > 0.0", Float.parseFloat(leastScore) > 0.0); + final String notNoneLower = usually() ? notNoneMode : notNoneMode.toLowerCase(Locale.ROOT); + + assertQ( + req( + "q", + "{!parent which=\"parent_s:[* TO *]\" score=" + notNoneLower + "}child_s:l", + "fl", + "score"), + "//*[@numFound='6']", + "(//float[@name='score'])[" + (random().nextInt(6) + 1) + "]>='" + leastScore + "'"); } - - public void testScoresForChild() throws Exception{ + + public void testScoresForChild() throws Exception { String leastScore = getLeastScore("parent_s:a"); - assertTrue(leastScore+" > 0.0", Float.parseFloat(leastScore)>0.0); - assertQ( - req("q", "{!child of=\"parent_s:[* TO *]\"}parent_s:a","fl","score"), - "//*[@numFound='6']","(//float[@name='score'])["+(random().nextInt(6)+1)+"]>='"+leastScore+"'"); + assertTrue(leastScore + " > 0.0", Float.parseFloat(leastScore) > 0.0); + assertQ( + req("q", "{!child of=\"parent_s:[* TO *]\"}parent_s:a", "fl", "score"), + "//*[@numFound='6']", + "(//float[@name='score'])[" + (random().nextInt(6) + 1) + "]>='" + leastScore + "'"); } - + private String getLeastScore(String query) throws Exception { - final String resp = h.query(req("q",query, "sort","score asc", "fl","score")); - return (String) BaseTestHarness. - evaluateXPath(resp,"(//float[@name='score'])[1]/text()", - XPathConstants.STRING); + final String resp = h.query(req("q", query, "sort", "score asc", "fl", "score")); + return (String) + BaseTestHarness.evaluateXPath( + resp, "(//float[@name='score'])[1]/text()", XPathConstants.STRING); } @Test public void testFq() { assertQ( - req("q", "{!parent which=$pq v=$chq}", "fq", "parent_s:(e b)", "chq", - "child_s:l", "pq", "parent_s:[* TO *]"// ,"debugQuery","on" - ), beParents); - + req( + "q", + "{!parent which=$pq v=$chq}", + "fq", + "parent_s:(e b)", + "chq", + "child_s:l", + "pq", + "parent_s:[* TO *]" // ,"debugQuery","on" + ), + beParents); + boolean qfq = random().nextBoolean(); assertQ( - req(qfq ? "q" : "fq", "parent_s:(a e b)", (!qfq) ? "q" : "fq", - "{!parent which=$pq v=$chq}", "chq", "parentchild_s:(bm ek cl)", - "pq", "parent_s:[* TO *]"), beParents); - + req( + qfq ? "q" : "fq", + "parent_s:(a e b)", + (!qfq) ? "q" : "fq", + "{!parent which=$pq v=$chq}", + "chq", + "parentchild_s:(bm ek cl)", + "pq", + "parent_s:[* TO *]"), + beParents); } - + @Test public void testIntersectParentBqChildBq() throws IOException { - + assertQ( - req("q", "+parent_s:(a e b) +_query_:\"{!parent which=$pq v=$chq}\"", - "chq", "parentchild_s:(bm ek cl)", "pq", "parent_s:[* TO *]"), + req( + "q", + "+parent_s:(a e b) +_query_:\"{!parent which=$pq v=$chq}\"", + "chq", + "parentchild_s:(bm ek cl)", + "pq", + "parent_s:[* TO *]"), beParents); } - + @Test public void testGrandChildren() throws IOException { assertQ( - req("q", "{!parent which=$parentfilter v=$children}", "children", + req( + "q", + "{!parent which=$parentfilter v=$children}", + "children", "{!parent which=$childrenfilter v=$grandchildren}", - "grandchildren", "grand_s:" + "x", "parentfilter", - "parent_s:[* TO *]", "childrenfilter", "child_s:[* TO *]"), + "grandchildren", + "grand_s:" + "x", + "parentfilter", + "parent_s:[* TO *]", + "childrenfilter", + "child_s:[* TO *]"), sixParents); // int loops = atLeast(1); String grandChildren = xyz.get(random().nextInt(xyz.size())); assertQ( - req("q", "+parent_s:(a e b) +_query_:\"{!parent which=$pq v=$chq}\"", - "chq", "{!parent which=$childfilter v=$grandchq}", "grandchq", + req( + "q", + "+parent_s:(a e b) +_query_:\"{!parent which=$pq v=$chq}\"", + "chq", + "{!parent which=$childfilter v=$grandchq}", + "grandchq", "+grand_s:" + grandChildren + " +grand_parentchild_s:(b* e* c*)", - "pq", "parent_s:[* TO *]", "childfilter", "child_s:[* TO *]"), + "pq", + "parent_s:[* TO *]", + "childfilter", + "child_s:[* TO *]"), beParents); } - + @Test public void testChildrenParser() { assertQ( - req("q", "{!child of=\"parent_s:[* TO *]\"}parent_s:a", "fq", - "NOT grand_s:[* TO *]"), "//*[@numFound='3']", + req("q", "{!child of=\"parent_s:[* TO *]\"}parent_s:a", "fq", "NOT grand_s:[* TO *]"), + "//*[@numFound='3']", "//doc/arr[@name=\"child_s\"]/str='k'", "//doc/arr[@name=\"child_s\"]/str='l'", "//doc/arr[@name=\"child_s\"]/str='m'"); assertQ( - req("q", "{!child of=\"parent_s:[* TO *]\"}parent_s:b", "fq", - "-parentchild_s:bm", "fq", "-grand_s:*"), "//*[@numFound='2']", + req( + "q", + "{!child of=\"parent_s:[* TO *]\"}parent_s:b", + "fq", + "-parentchild_s:bm", + "fq", + "-grand_s:*"), + "//*[@numFound='2']", "//doc/arr[@name=\"child_s\"]/str='k'", "//doc/arr[@name=\"child_s\"]/str='l'"); } @@ -290,84 +390,115 @@ public void testChildrenParser() { @Test public void testCacheHit() throws IOException { - MetricsMap parentFilterCache = (MetricsMap)((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry() - .getMetrics().get("CACHE.searcher.perSegFilter")).getGauge(); - MetricsMap filterCache = (MetricsMap)((SolrMetricManager.GaugeWrapper)h.getCore().getCoreMetricManager().getRegistry() - .getMetrics().get("CACHE.searcher.filterCache")).getGauge(); - - Map parentsBefore = parentFilterCache.getValue(); - - Map filtersBefore = filterCache.getValue(); + MetricsMap parentFilterCache = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.perSegFilter")) + .getGauge(); + MetricsMap filterCache = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) + h.getCore() + .getCoreMetricManager() + .getRegistry() + .getMetrics() + .get("CACHE.searcher.filterCache")) + .getGauge(); + + Map parentsBefore = parentFilterCache.getValue(); + + Map filtersBefore = filterCache.getValue(); // it should be weird enough to be uniq String parentFilter = "parent_s:([a TO c] [d TO f])"; - assertQ("search by parent filter", + assertQ( + "search by parent filter", req("q", "{!parent which=\"" + parentFilter + "\"}"), "//*[@numFound='6']"); - assertQ("filter by parent filter", + assertQ( + "filter by parent filter", req("q", "*:*", "fq", "{!parent which=\"" + parentFilter + "\"}"), "//*[@numFound='6']"); - assertEquals("didn't hit fqCache yet ", 0L, - delta("hits", filterCache.getValue(), filtersBefore)); + assertEquals( + "didn't hit fqCache yet ", 0L, delta("hits", filterCache.getValue(), filtersBefore)); assertQ( "filter by join", - req("q", "*:*", "fq", "{!parent which=\"" + parentFilter - + "\"}child_s:l"), "//*[@numFound='6']"); + req("q", "*:*", "fq", "{!parent which=\"" + parentFilter + "\"}child_s:l"), + "//*[@numFound='6']"); - assertEquals("in cache mode every request lookups", 3, + assertEquals( + "in cache mode every request lookups", + 3, delta("lookups", parentFilterCache.getValue(), parentsBefore)); - assertEquals("last two lookups causes hits", 2, + assertEquals( + "last two lookups causes hits", + 2, delta("hits", parentFilterCache.getValue(), parentsBefore)); - assertEquals("the first lookup gets insert", 1, + assertEquals( + "the first lookup gets insert", + 1, delta("inserts", parentFilterCache.getValue(), parentsBefore)); - assertEquals("true join query was not in fqCache", 0L, + assertEquals( + "true join query was not in fqCache", + 0L, delta("hits", filterCache.getValue(), filtersBefore)); - assertEquals("true join query is cached in fqCache", 1L, + assertEquals( + "true join query is cached in fqCache", + 1L, delta("inserts", filterCache.getValue(), filtersBefore)); } - - private long delta(String key, Map a, Map b) { + + private long delta(String key, Map a, Map b) { return (Long) a.get(key) - (Long) b.get(key); } - @Test public void nullInit() throws Exception { - final BlockJoinParentQParserPlugin blockJoinParentQParserPlugin = new BlockJoinParentQParserPlugin(); + final BlockJoinParentQParserPlugin blockJoinParentQParserPlugin = + new BlockJoinParentQParserPlugin(); blockJoinParentQParserPlugin.init(null); blockJoinParentQParserPlugin.close(); } - private final static String eParent[] = new String[]{"//*[@numFound='1']", - "//doc/arr[@name=\"parent_s\"]/str='e'"}; + private static final String eParent[] = + new String[] {"//*[@numFound='1']", "//doc/arr[@name=\"parent_s\"]/str='e'"}; @Test public void testToParentFilters() { assertQ( - req("fq", "{!parent filters=$child.fq which=$pq v=$chq}\"", + req( + "fq", "{!parent filters=$child.fq which=$pq v=$chq}\"", "q", "parent_s:(e b)", "child.fq", "+childparent_s:e +child_s:l", "chq", "child_s:[* TO *]", - "pq", "parent_s:[* TO *]"), eParent); + "pq", "parent_s:[* TO *]"), + eParent); assertQ( - req("fq", "{!parent filters=$child.fq which=$pq v=$chq}\"", + req( + "fq", "{!parent filters=$child.fq which=$pq v=$chq}\"", "q", "parent_s:(e b)", "child.fq", "childparent_s:e", "child.fq", "child_s:l", "chq", "child_s:[* TO *]", - "pq", "parent_s:[* TO *]"), eParent); + "pq", "parent_s:[* TO *]"), + eParent); } @Test public void testToChildFilters() { assertQ( - req("fq", "{!child of=$pq filters=$parent.fq v=$pq}\"", + req( + "fq", "{!child of=$pq filters=$parent.fq v=$pq}\"", "q", "child_s:(l m)", "parent.fq", "+parent_s:(d c)", "pq", "parent_s:[* TO *]"), @@ -375,112 +506,148 @@ public void testToChildFilters() { "//doc/arr[@name=\"parentchild_s\"]/str='dl'", "//doc/arr[@name=\"parentchild_s\"]/str='dm'", "//doc/arr[@name=\"parentchild_s\"]/str='cl'", - "//doc/arr[@name=\"parentchild_s\"]/str='cm'" - ); + "//doc/arr[@name=\"parentchild_s\"]/str='cm'"); assertQ( - req("fq", "{!child of=$pq filters=$parent.fq v=$pq}\"", + req( + "fq", "{!child of=$pq filters=$parent.fq v=$pq}\"", "q", "child_s:(l m)", "parent.fq", "+parent_s:(d c)", "parent.fq", "+parent_s:(c a)", "pq", "parent_s:[* TO *]"), "//*[@numFound='2']", "//doc/arr[@name=\"parentchild_s\"]/str='cl'", - "//doc/arr[@name=\"parentchild_s\"]/str='cm'" - ); + "//doc/arr[@name=\"parentchild_s\"]/str='cm'"); } - private final static String elChild[] = new String[]{"//*[@numFound='1']", - "//doc[" + - "arr[@name=\"child_s\"]/str='l' and arr[@name=\"childparent_s\"]/str='e']"}; - + private static final String elChild[] = + new String[] { + "//*[@numFound='1']", + "//doc[" + "arr[@name=\"child_s\"]/str='l' and arr[@name=\"childparent_s\"]/str='e']" + }; @Test public void testFilters() { assertQ( - req("q", "{!filters param=$child.fq v=$gchq}", + req( + "q", "{!filters param=$child.fq v=$gchq}", "child.fq", "childparent_s:e", "child.fq", "child_s:l", - "gchq", "child_s:[* TO *]"), elChild); + "gchq", "child_s:[* TO *]"), + elChild); assertQ( - req("q", "{!filters param=$child.fq excludeTags=firstTag v=$gchq}", + req( + "q", "{!filters param=$child.fq excludeTags=firstTag v=$gchq}", "child.fq", "{!tag=zeroTag,firstTag}childparent_s:e", "child.fq", "{!tag=secondTag}child_s:l", - "gchq", "child_s:[* TO *]"), "//*[@numFound='6']"); + "gchq", "child_s:[* TO *]"), + "//*[@numFound='6']"); assertQ( - req("q", "{!filters param=$child.fq excludeTags=secondTag v=$gchq}", + req( + "q", "{!filters param=$child.fq excludeTags=secondTag v=$gchq}", "child.fq", "{!tag=firstTag}childparent_s:e", "child.fq", "{!tag=secondTag}child_s:l", - "gchq", "child_s:[* TO *]"), "//*[@numFound='3']"); + "gchq", "child_s:[* TO *]"), + "//*[@numFound='3']"); + + assertQ( + req( + "q", + random().nextBoolean() + ? "{!filters param=$child.fq excludeTags=firstTag,secondTag v=$gchq}" + : random().nextBoolean() + ? "{!filters param=$thereAreNoLikeThese v=$gchq}" + : "{!filters v=$gchq}", + "child.fq", + "{!tag=firstTag}childparent_s:e", + "child.fq", + "{!tag=secondTag}child_s:l", + "gchq", + "child_s:[* TO *]"), + "//*[@numFound='18']"); + + assertQEx( + "expecting exception on weird param", + req( + "q", "{!filters v=$gchq param=}\"", + "gchq", "child_s:[* TO *]"), + ErrorCode.BAD_REQUEST); - assertQ(req("q", - random().nextBoolean() ? "{!filters param=$child.fq excludeTags=firstTag,secondTag v=$gchq}" : - random().nextBoolean() ? "{!filters param=$thereAreNoLikeThese v=$gchq}" : - "{!filters v=$gchq}" , - "child.fq", "{!tag=firstTag}childparent_s:e", - "child.fq", "{!tag=secondTag}child_s:l", - "gchq", "child_s:[* TO *]"), "//*[@numFound='18']"); - - assertQEx("expecting exception on weird param", - req("q", "{!filters v=$gchq param=}\"" , - "gchq", "child_s:[* TO *]" - ),ErrorCode.BAD_REQUEST); - assertQ( // omit main query - req("q", "{!filters param=$child.fq}", + req( + "q", "{!filters param=$child.fq}", "child.fq", "{!tag=firstTag}childparent_s:(e f)", - "child.fq", "{!tag=secondTag}child_s:l"), "//*[@numFound='2']"); - - assertQ( // all excluded, matching all - req("q", "{!filters param=$child.fq excludeTags=firstTag,secondTag}", + "child.fq", "{!tag=secondTag}child_s:l"), + "//*[@numFound='2']"); + + assertQ( // all excluded, matching all + req( + "q", "{!filters param=$child.fq excludeTags=firstTag,secondTag}", "child.fq", "{!tag=firstTag}childparent_s:(e f)", - "child.fq", "{!tag=secondTag}child_s:l"), "//*[@numFound='42']"); - - assertQ(req("q", // excluding top level - "{!filters param=$child.fq excludeTags=bot,top v=$gchq}" , - "child.fq", "{!tag=secondTag}child_s:l", // 6 ls remains - "gchq", "{!tag=top}childparent_s:e"), "//*[@numFound='6']"); - - assertQ(req("q", // top and filter are excluded, got all results - "{!filters excludeTags=bot,secondTag,top v=$gchq}" , - "child.fq", "{!tag=secondTag}child_s:l", - "gchq", "{!tag=top}childparent_s:e"), "//*[@numFound='42']"); + "child.fq", "{!tag=secondTag}child_s:l"), + "//*[@numFound='42']"); + + assertQ( + req( + "q", // excluding top level + "{!filters param=$child.fq excludeTags=bot,top v=$gchq}", + "child.fq", + "{!tag=secondTag}child_s:l", // 6 ls remains + "gchq", + "{!tag=top}childparent_s:e"), + "//*[@numFound='6']"); + + assertQ( + req( + "q", // top and filter are excluded, got all results + "{!filters excludeTags=bot,secondTag,top v=$gchq}", + "child.fq", + "{!tag=secondTag}child_s:l", + "gchq", + "{!tag=top}childparent_s:e"), + "//*[@numFound='42']"); } - + @Test public void testFiltersCache() throws SyntaxError, IOException { - final String [] elFilterQuery = new String[] {"q", "{!filters param=$child.fq v=$gchq}", - "child.fq", "childparent_s:e", - "child.fq", "child_s:l", - "gchq", "child_s:[* TO *]"}; - assertQ("precondition: single doc match", - req(elFilterQuery), elChild); + final String[] elFilterQuery = + new String[] { + "q", + "{!filters param=$child.fq v=$gchq}", + "child.fq", + "childparent_s:e", + "child.fq", + "child_s:l", + "gchq", + "child_s:[* TO *]" + }; + assertQ("precondition: single doc match", req(elFilterQuery), elChild); final Query query; - try(final SolrQueryRequest req = req(elFilterQuery)) { + try (final SolrQueryRequest req = req(elFilterQuery)) { QParser parser = QParser.getParser(req.getParams().get("q"), null, req); query = parser.getQuery(); final TopDocs topDocs = req.getSearcher().search(query, 10); assertEquals(1, topDocs.totalHits.value); } - assertU(adoc("id", "12275", - "child_s", "l", "childparent_s", "e")); + assertU(adoc("id", "12275", "child_s", "l", "childparent_s", "e")); assertU(commit()); - assertQ("here we rely on autowarming for cathing cache leak", //cache=false - req(elFilterQuery), "//*[@numFound='2']"); + assertQ( + "here we rely on autowarming for cathing cache leak", // cache=false + req(elFilterQuery), + "//*[@numFound='2']"); - try(final SolrQueryRequest req = req()) { - final int count = req.getSearcher().count(query); - assertEquals("expecting new doc is visible to old query", 2, count); + try (final SolrQueryRequest req = req()) { + final int count = req.getSearcher().count(query); + assertEquals("expecting new doc is visible to old query", 2, count); } } @After - public void cleanAfterTestFiltersCache(){ + public void cleanAfterTestFiltersCache() { assertU("should be noop", delI("12275")); assertU("most of the time", commit()); } } - diff --git a/solr/core/src/test/org/apache/solr/search/join/CrossCollectionJoinQueryTest.java b/solr/core/src/test/org/apache/solr/search/join/CrossCollectionJoinQueryTest.java index a976c9993e5..cc408e44bfe 100644 --- a/solr/core/src/test/org/apache/solr/search/join/CrossCollectionJoinQueryTest.java +++ b/solr/core/src/test/org/apache/solr/search/join/CrossCollectionJoinQueryTest.java @@ -23,7 +23,6 @@ import java.util.Collections; import java.util.List; import java.util.Locale; - import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -43,7 +42,7 @@ public class CrossCollectionJoinQueryTest extends SolrCloudTestCase { private static final int NUM_REPLICAS = 1; private static final int NUM_PRODUCTS = 200; - private static final String[] SIZES = new String[]{"S", "M", "L", "XL"}; + private static final String[] SIZES = new String[] {"S", "M", "L", "XL"}; @BeforeClass public static void setupCluster() throws Exception { @@ -52,7 +51,6 @@ public static void setupCluster() throws Exception { .withSolrXml(TEST_PATH().resolve("solr.xml")) .configure(); - CollectionAdminRequest.createCollection("products", "ccjoin", NUM_SHARDS, NUM_REPLICAS) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(cluster.getSolrClient()); @@ -60,7 +58,6 @@ public static void setupCluster() throws Exception { CollectionAdminRequest.createCollection("parts", "ccjoin", NUM_SHARDS, NUM_REPLICAS) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(cluster.getSolrClient()); - } public static void setupIndexes(boolean routeByKey) throws IOException, SolrServerException { @@ -68,7 +65,6 @@ public static void setupIndexes(boolean routeByKey) throws IOException, SolrServ clearCollection("parts"); buildIndexes(routeByKey); - } private static void clearCollection(String collection) throws IOException, SolrServerException { @@ -85,27 +81,37 @@ private static void buildIndexes(boolean routeByKey) throws IOException, SolrSer int sizeNum = productId % SIZES.length; String size = SIZES[sizeNum]; - productDocs.add(new SolrInputDocument( - "id", buildId(productId, String.valueOf(productId), routeByKey), - "product_id_i", String.valueOf(productId), - "product_id_l", String.valueOf(productId), - "product_id_s", String.valueOf(productId), - "size_s", size)); + productDocs.add( + new SolrInputDocument( + "id", buildId(productId, String.valueOf(productId), routeByKey), + "product_id_i", String.valueOf(productId), + "product_id_l", String.valueOf(productId), + "product_id_s", String.valueOf(productId), + "size_s", size)); // Index 1 parts document for each small product, 2 for each medium, 3 for each large, etc. for (int partNum = 0; partNum <= sizeNum; partNum++) { String partId = String.format(Locale.ROOT, "%d_%d", productId, partNum); - partDocs.add(new SolrInputDocument( - "id", buildId(productId, partId, routeByKey), - "product_id_i", String.valueOf(productId), - "product_id_l", String.valueOf(productId), - "product_id_s", String.valueOf(productId))); + partDocs.add( + new SolrInputDocument( + "id", buildId(productId, partId, routeByKey), + "product_id_i", String.valueOf(productId), + "product_id_l", String.valueOf(productId), + "product_id_s", String.valueOf(productId))); } } - // some extra docs in each collection (not counded in NUM_PRODUCTS) that should drop out of the joins because they don't have the join key - productDocs.add(new SolrInputDocument("id", buildId(NUM_PRODUCTS+10, String.valueOf(NUM_PRODUCTS+10), routeByKey), "size_s", "M")); - partDocs.add(new SolrInputDocument("id", buildId(NUM_PRODUCTS+10, String.valueOf(NUM_PRODUCTS+10), routeByKey))); + // some extra docs in each collection (not counded in NUM_PRODUCTS) that should drop out of the + // joins because they don't have the join key + productDocs.add( + new SolrInputDocument( + "id", + buildId(NUM_PRODUCTS + 10, String.valueOf(NUM_PRODUCTS + 10), routeByKey), + "size_s", + "M")); + partDocs.add( + new SolrInputDocument( + "id", buildId(NUM_PRODUCTS + 10, String.valueOf(NUM_PRODUCTS + 10), routeByKey))); Collections.shuffle(productDocs, random()); Collections.shuffle(partDocs, random()); @@ -123,7 +129,8 @@ private static String buildId(int productId, String id, boolean routeByKey) { return routeByKey ? productId + "!" + id : id; } - private static void indexDocs(String collection, Collection docs) throws IOException, SolrServerException { + private static void indexDocs(String collection, Collection docs) + throws IOException, SolrServerException { UpdateRequest update = new UpdateRequest(); update.add(docs); update.process(cluster.getSolrClient(), collection); @@ -138,7 +145,9 @@ private String getSolrUrl() { @Test public void testCcJoinRoutedCollection() throws Exception { setupIndexes(true); - testCcJoinQuery("{!join method=crossCollection fromIndex=products from=product_id_i to=product_id_i}size_s:M", true); + testCcJoinQuery( + "{!join method=crossCollection fromIndex=products from=product_id_i to=product_id_i}size_s:M", + true); int i = 0; for (JettySolrRunner runner : cluster.getJettySolrRunners()) { i++; @@ -154,38 +163,58 @@ public void testCcJoinRoutedCollection() throws Exception { CollectionAdminRequest.Reload.reloadCollection("parts").process(client); Thread.sleep(10000); - testCcJoinQuery("{!join method=crossCollection fromIndex=products from=product_id_i to=product_id_i}size_s:M", true); + testCcJoinQuery( + "{!join method=crossCollection fromIndex=products from=product_id_i to=product_id_i}size_s:M", + true); - testCcJoinQuery(String.format(Locale.ROOT, - "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_i to=product_id_i}size_s:M", getSolrUrl()), + testCcJoinQuery( + String.format( + Locale.ROOT, + "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_i to=product_id_i}size_s:M", + getSolrUrl()), true); - testCcJoinQuery("{!join method=crossCollection fromIndex=products from=product_id_l to=product_id_l}size_s:M", + testCcJoinQuery( + "{!join method=crossCollection fromIndex=products from=product_id_l to=product_id_l}size_s:M", true); - testCcJoinQuery(String.format(Locale.ROOT, - "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_l to=product_id_l}size_s:M", - getSolrUrl()), + testCcJoinQuery( + String.format( + Locale.ROOT, + "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_l to=product_id_l}size_s:M", + getSolrUrl()), true); - testCcJoinQuery("{!join method=crossCollection fromIndex=products from=product_id_s to=product_id_s}size_s:M", + testCcJoinQuery( + "{!join method=crossCollection fromIndex=products from=product_id_s to=product_id_s}size_s:M", true); - testCcJoinQuery(String.format(Locale.ROOT, - "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_s to=product_id_s}size_s:M", - getSolrUrl()), + testCcJoinQuery( + String.format( + Locale.ROOT, + "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_s to=product_id_s}size_s:M", + getSolrUrl()), true); - testCcJoinQuery(String.format(Locale.ROOT, - "{!join method=crossCollection zkHost=\"%s\" fromIndex=products from=product_id_s to=product_id_s}size_s:M", - cluster.getSolrClient().getZkHost()), + testCcJoinQuery( + String.format( + Locale.ROOT, + "{!join method=crossCollection zkHost=\"%s\" fromIndex=products from=product_id_s to=product_id_s}size_s:M", + cluster.getSolrClient().getZkHost()), true); - // Test the ability to set other parameters on crossCollection join and have them passed through - assertResultCount("parts", + // Test the ability to set other parameters on crossCollection join and have them passed + // through + assertResultCount( + "parts", "{!join method=crossCollection fromIndex=products from=product_id_s to=product_id_s fq=product_id_s:1}size_s:M", - 2, true); - assertResultCount("parts", - String.format(Locale.ROOT, + 2, + true); + assertResultCount( + "parts", + String.format( + Locale.ROOT, "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_s to=product_id_s fq=product_id_s:1}size_s:M", - getSolrUrl()), 2, true); + getSolrUrl()), + 2, + true); } finally { for (JettySolrRunner runner : cluster.getJettySolrRunners()) { i++; @@ -200,18 +229,22 @@ public void testCcJoinNonroutedCollection() throws Exception { // This query will expect the collection to have been routed on product_id, so it should return // incomplete results. - testCcJoinQuery("{!join method=crossCollection fromIndex=products from=product_id_s to=product_id_s}size_s:M", + testCcJoinQuery( + "{!join method=crossCollection fromIndex=products from=product_id_s to=product_id_s}size_s:M", false); // Now if we set routed=false we should get a complete set of results. - testCcJoinQuery("{!join method=crossCollection fromIndex=products from=product_id_s to=product_id_s routed=false}size_s:M", + testCcJoinQuery( + "{!join method=crossCollection fromIndex=products from=product_id_s to=product_id_s routed=false}size_s:M", true); // The join_nonrouted query parser doesn't assume that the collection was routed on product_id, // so we should get the full set of results. - testCcJoinQuery("{!join_nonrouted method=crossCollection fromIndex=products from=product_id_s to=product_id_s}size_s:M", + testCcJoinQuery( + "{!join_nonrouted method=crossCollection fromIndex=products from=product_id_s to=product_id_s}size_s:M", true); - // But if we set routed=true, we are now assuming again that the collection was routed on product_id, - // so we should get incomplete results. - testCcJoinQuery("{!join_nonrouted method=crossCollection fromIndex=products from=product_id_s to=product_id_s routed=true}size_s:M", + // But if we set routed=true, we are now assuming again that the collection was routed on + // product_id, so we should get incomplete results. + testCcJoinQuery( + "{!join_nonrouted method=crossCollection fromIndex=products from=product_id_s to=product_id_s routed=true}size_s:M", false); } @@ -219,7 +252,8 @@ public void testCcJoinNonroutedCollection() throws Exception { public void testAllowSolrUrlsList() throws Exception { setupIndexes(false); - // programmatically add the current jetty solr url to the allowSolrUrls property in the solrconfig.xml + // programmatically add the current jetty solr url to the allowSolrUrls property in the + // solrconfig.xml int i = 0; for (JettySolrRunner runner : cluster.getJettySolrRunners()) { i++; @@ -242,21 +276,27 @@ public void testAllowSolrUrlsList() throws Exception { try { // This should throw an exception. // verify the join plugin definition has the current valid urls and works. - testCcJoinQuery(String.format(Locale.ROOT, - "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_i to=product_id_i}size_s:M", - "http://bogus.example.com:8983/solr"), + testCcJoinQuery( + String.format( + Locale.ROOT, + "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_i to=product_id_i}size_s:M", + "http://bogus.example.com:8983/solr"), true); fail("The query invovling bogus.example.com should not succeed"); } catch (Exception e) { // should get here. String message = e.getMessage(); - assertTrue("message was " + message, message.contains("SyntaxError: Solr URL was not in allowSolrUrls list")); + assertTrue( + "message was " + message, + message.contains("SyntaxError: Solr URL was not in allowSolrUrls list")); } // verify the join plugin definition has the current valid urls and works. - testCcJoinQuery(String.format(Locale.ROOT, - "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_i to=product_id_i}size_s:M", - getSolrUrl()), + testCcJoinQuery( + String.format( + Locale.ROOT, + "{!join method=crossCollection solrUrl=\"%s\" fromIndex=products from=product_id_i to=product_id_i}size_s:M", + getSolrUrl()), true); } finally { @@ -271,7 +311,8 @@ public void testCcJoinQuery(String query, boolean expectFullResults) throws Exce assertResultCount("parts", query, NUM_PRODUCTS / 2, expectFullResults); } - private static void assertResultCount(String collection, String query, long expectedCount, boolean expectFullResults) + private static void assertResultCount( + String collection, String query, long expectedCount, boolean expectFullResults) throws IOException, SolrServerException { final ModifiableSolrParams params = new ModifiableSolrParams(); diff --git a/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java b/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java index 4b550e4a844..62b9c116e91 100644 --- a/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java +++ b/solr/core/src/test/org/apache/solr/search/join/GraphQueryTest.java @@ -23,49 +23,50 @@ import org.junit.Test; public class GraphQueryTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig.xml","schema_latest.xml"); + initCore("solrconfig.xml", "schema_latest.xml"); } @Test public void testGraph() throws Exception { // normal strings - doGraph( params("node_id","node_s", "edge_id","edge_ss") ); - doGraph( params("node_id","node_ss", "edge_id","edge_ss") ); + doGraph(params("node_id", "node_s", "edge_id", "edge_ss")); + doGraph(params("node_id", "node_ss", "edge_id", "edge_ss")); // point based fields with docvalues (single and multi-valued for the node field) - doGraph( params("node_id","node_ip", "edge_id","edge_ips") ); - doGraph( params("node_id","node_ips", "edge_id","edge_ips") ); - doGraph( params("node_id","node_lp", "edge_id","edge_lps") ); - doGraph( params("node_id","node_lps", "edge_id","edge_lps") ); - doGraph( params("node_id","node_fp", "edge_id","edge_fps") ); - doGraph( params("node_id","node_fps", "edge_id","edge_fps") ); - doGraph( params("node_id","node_dp", "edge_id","edge_dps") ); - doGraph( params("node_id","node_dps", "edge_id","edge_dps") ); + doGraph(params("node_id", "node_ip", "edge_id", "edge_ips")); + doGraph(params("node_id", "node_ips", "edge_id", "edge_ips")); + doGraph(params("node_id", "node_lp", "edge_id", "edge_lps")); + doGraph(params("node_id", "node_lps", "edge_id", "edge_lps")); + doGraph(params("node_id", "node_fp", "edge_id", "edge_fps")); + doGraph(params("node_id", "node_fps", "edge_id", "edge_fps")); + doGraph(params("node_id", "node_dp", "edge_id", "edge_dps")); + doGraph(params("node_id", "node_dps", "edge_id", "edge_dps")); // string with indexed=false and docValues=true - doGraph( params("node_id","node_sdN", "edge_id","edge_sdsN") ); + doGraph(params("node_id", "node_sdN", "edge_id", "edge_sdsN")); } public void doGraph(SolrParams p) throws Exception { String node_id = p.get("node_id"); String edge_id = p.get("edge_id"); - // NOTE: from/to fields are reversed from {!join}... values are looked up in the "toField" and then matched on the "fromField" + // NOTE: from/to fields are reversed from {!join}... values are looked up in the "toField" and + // then matched on the "fromField" // 1->-2->(3,9)->(4,5)->7 // 8->(1,-2)->... - assertU(adoc("id", "doc_1", node_id, "1", edge_id, "-2", "text", "foo", "title", "foo10" )); - assertU(adoc("id", "doc_2", node_id, "-2", edge_id, "3", "text", "foo" )); + assertU(adoc("id", "doc_1", node_id, "1", edge_id, "-2", "text", "foo", "title", "foo10")); + assertU(adoc("id", "doc_2", node_id, "-2", edge_id, "3", "text", "foo")); assertU(commit()); assertU(adoc("id", "doc_3", node_id, "3", edge_id, "4", edge_id, "5")); - assertU(adoc("id", "doc_4", node_id, "4" )); + assertU(adoc("id", "doc_4", node_id, "4")); assertU(commit()); - assertU(adoc("id", "doc_5", node_id, "5", edge_id, "7" )); - assertU(adoc("id", "doc_6", node_id, "6", edge_id, "3" )); - assertU(adoc("id", "doc_7", node_id, "7", edge_id, "1" )); - assertU(adoc("id", "doc_8", node_id, "8", edge_id, "1", edge_id, "-2" )); + assertU(adoc("id", "doc_5", node_id, "5", edge_id, "7")); + assertU(adoc("id", "doc_6", node_id, "6", edge_id, "3")); + assertU(adoc("id", "doc_7", node_id, "7", edge_id, "1")); + assertU(adoc("id", "doc_8", node_id, "8", edge_id, "1", edge_id, "-2")); assertU(adoc("id", "doc_9", node_id, "9")); assertU(commit()); // update docs so they're in a new segment. @@ -76,68 +77,105 @@ public void doGraph(SolrParams p) throws Exception { assertU(adoc("id", "doc_10", node_id, "10", edge_id, "11", "title", "foo")); assertU(adoc("id", "doc_11", node_id, "11", edge_id, "12", edge_id, "13", "text", "foo11")); assertU(adoc("id", "doc_12", node_id, "12", "text", "foo10")); - assertU(adoc("id", "doc_13", node_id, "13", edge_id, "12", "text", "foo10")); + assertU(adoc("id", "doc_13", node_id, "13", edge_id, "12", "text", "foo10")); assertU(commit()); // Now we have created a simple graph // start traversal from node id to edge id // TODO: assert which documents actually come back - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id}}id:doc_1") - , "/response/numFound==7" - ); + assertJQ( + req(p, "q", "{!graph from=${node_id} to=${edge_id}}id:doc_1"), "/response/numFound==7"); // reverse the order to test single/multi-valued on the opposite fields // start with doc1, look up node_id (1) and match to edge_id (docs 7 and 8) - assertJQ(req(p, "q","{!graph from=${edge_id} to=${node_id} maxDepth=1}id:doc_1") - , "/response/numFound==3" - ); + assertJQ( + req(p, "q", "{!graph from=${edge_id} to=${node_id} maxDepth=1}id:doc_1"), + "/response/numFound==3"); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false}id:doc_8") - , "/response/numFound==8" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=false returnOnlyLeaf=false}id:doc_8") - , "/response/numFound==7" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false traversalFilter='text:foo11'}id:doc_8") - , "/response/numFound==2" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false maxDepth=0}id:doc_8") - , "/response/numFound==1" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false maxDepth=1}id:doc_8") - , "/response/numFound==3" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=false returnOnlyLeaf=false maxDepth=1}id:doc_8") - , "/response/numFound==2" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=false returnOnlyLeaf=true maxDepth=2}id:doc_8") - , "/response/numFound==1" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} maxDepth=1}id:doc_1") - , "/response/numFound==2" - ); - assertJQ(req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=false maxDepth=1}id:doc_1") - , "/response/numFound==1" - ); + assertJQ( + req( + p, + "q", + "{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false}id:doc_8"), + "/response/numFound==8"); + assertJQ( + req( + p, + "q", + "{!graph from=${node_id} to=${edge_id} returnRoot=false returnOnlyLeaf=false}id:doc_8"), + "/response/numFound==7"); + assertJQ( + req( + p, + "q", + "{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false traversalFilter='text:foo11'}id:doc_8"), + "/response/numFound==2"); + assertJQ( + req( + p, + "q", + "{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false maxDepth=0}id:doc_8"), + "/response/numFound==1"); + assertJQ( + req( + p, + "q", + "{!graph from=${node_id} to=${edge_id} returnRoot=true returnOnlyLeaf=false maxDepth=1}id:doc_8"), + "/response/numFound==3"); + assertJQ( + req( + p, + "q", + "{!graph from=${node_id} to=${edge_id} returnRoot=false returnOnlyLeaf=false maxDepth=1}id:doc_8"), + "/response/numFound==2"); + assertJQ( + req( + p, + "q", + "{!graph from=${node_id} to=${edge_id} returnRoot=false returnOnlyLeaf=true maxDepth=2}id:doc_8"), + "/response/numFound==1"); + assertJQ( + req(p, "q", "{!graph from=${node_id} to=${edge_id} maxDepth=1}id:doc_1"), + "/response/numFound==2"); + assertJQ( + req(p, "q", "{!graph from=${node_id} to=${edge_id} returnRoot=false maxDepth=1}id:doc_1"), + "/response/numFound==1"); } - + @Test public void testGraphQueryParserValidation() throws Exception { // from schema field existence - doGraphQuery( params("node_id","node_nothere", "edge_id","edge_ss", - "message", "field node_nothere not defined in schema", "errorCode", String.valueOf(SolrException.ErrorCode.BAD_REQUEST.code)) ); + doGraphQuery( + params( + "node_id", + "node_nothere", + "edge_id", + "edge_ss", + "message", + "field node_nothere not defined in schema", + "errorCode", + String.valueOf(SolrException.ErrorCode.BAD_REQUEST.code))); // to schema field existence - doGraphQuery( params("node_id","node_s", "edge_id","edge_notthere", - "message", "field node_nothere not defined in schema", "errorCode", String.valueOf(SolrException.ErrorCode.BAD_REQUEST.code)) ); + doGraphQuery( + params( + "node_id", + "node_s", + "edge_id", + "edge_notthere", + "message", + "field node_nothere not defined in schema", + "errorCode", + String.valueOf(SolrException.ErrorCode.BAD_REQUEST.code))); } - + public void doGraphQuery(SolrParams p) throws Exception { String message = p.get("message"); int errorCode = p.getInt("errorCode", SolrException.ErrorCode.UNKNOWN.code); - - assertQEx(message , req(p, "q","{!graph from=${node_id} to=${edge_id} returnRoot=false maxDepth=1}id:doc_1") - , errorCode - ); + + assertQEx( + message, + req(p, "q", "{!graph from=${node_id} to=${edge_id} returnRoot=false maxDepth=1}id:doc_1"), + errorCode); } } diff --git a/solr/core/src/test/org/apache/solr/search/join/InvalidConfigJoinQueryTest.java b/solr/core/src/test/org/apache/solr/search/join/InvalidConfigJoinQueryTest.java index d8021f7404d..38974b33952 100644 --- a/solr/core/src/test/org/apache/solr/search/join/InvalidConfigJoinQueryTest.java +++ b/solr/core/src/test/org/apache/solr/search/join/InvalidConfigJoinQueryTest.java @@ -42,6 +42,18 @@ public void testInvalidFilterConfig() throws Exception { SolrClient client = new EmbeddedSolrServer(h.getCore()); req.commit(client, null); - assertThrows(SolrException.class, () -> assertJQ(req("q", "{!join from=id to=locid_s v=$q1}", "q1", "type_s:loc", "fl", "id", "sort", "id asc"))); + assertThrows( + SolrException.class, + () -> + assertJQ( + req( + "q", + "{!join from=id to=locid_s v=$q1}", + "q1", + "type_s:loc", + "fl", + "id", + "sort", + "id asc"))); } } diff --git a/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java b/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java index f01e73d599e..d26bd762c82 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestCloudNestedDocsSort.java @@ -24,7 +24,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -50,20 +49,18 @@ public class TestCloudNestedDocsSort extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { final int numVals = atLeast(10); - for (int i=0; i < numVals; i++) { - vals.add(""+Integer.toString(random().nextInt(1000000), Character.MAX_RADIX)); + for (int i = 0; i < numVals; i++) { + vals.add("" + Integer.toString(random().nextInt(1000000), Character.MAX_RADIX)); } - + final Path configDir = TEST_COLL1_CONF(); String configName = "solrCloudCollectionConfig"; int nodeCount = 5; - configureCluster(nodeCount) - .addConfig(configName, configDir) - .configure(); - + configureCluster(nodeCount).addConfig(configName, configDir).configure(); + int shards = 2; - int replicas = 2 ; + int replicas = 2; CollectionAdminRequest.createCollection("collection1", configName, shards, replicas) .withProperty("config", "solrconfig-minimal.xml") .withProperty("schema", "schema.xml") @@ -71,46 +68,59 @@ public static void setupCluster() throws Exception { client = cluster.getSolrClient(); client.setDefaultCollection("collection1"); - + ZkStateReader zkStateReader = client.getZkStateReader(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish("collection1", zkStateReader, true, true, 30); - + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + "collection1", zkStateReader, true, true, 30); + { int id = 42; final List docs = new ArrayList<>(); final int parentsNum = atLeast(20); - ; - for (int i=0; i parentFilter = addValsField(parent, "parentFilter_s"); final int kids = usually() ? atLeast(20) : 0; - for(int c = 0; c< kids; c++){ - SolrInputDocument child = new SolrInputDocument("id", ""+(id++), - "type_s", "child", - "parentTie_s1", parentTieVal, - "parent_id_s1", parentId); + for (int c = 0; c < kids; c++) { + SolrInputDocument child = + new SolrInputDocument( + "id", + "" + (id++), + "type_s", + "child", + "parentTie_s1", + parentTieVal, + "parent_id_s1", + parentId); child.addField("parentFilter_s", parentFilter); if (usually()) { - child.addField( "val_s1", Integer.toString(random().nextInt(1000), Character.MAX_RADIX)+"" ); + child.addField( + "val_s1", Integer.toString(random().nextInt(1000), Character.MAX_RADIX) + ""); } final List chVals = addValsField(child, "childFilter_s"); - parent.addChildDocument(child ); + parent.addChildDocument(child); // let's pickup at least matching child final boolean canPickMatchingChild = !chVals.isEmpty() && !parentFilter.isEmpty(); - final boolean haveNtPickedMatchingChild = matchingParent==null ||matchingChild==null; + final boolean haveNtPickedMatchingChild = matchingParent == null || matchingChild == null; if (canPickMatchingChild && haveNtPickedMatchingChild && usually()) { matchingParent = parentFilter.iterator().next(); matchingChild = chVals.iterator().next(); } } - maxDocs += parent.getChildDocumentCount()+1; + maxDocs += parent.getChildDocumentCount() + 1; docs.add(parent); } // don't add parents in increasing uniqueKey order @@ -125,65 +135,87 @@ public static void cleanUpAfterClass() throws Exception { client = null; } - @Test + @Test public void test() throws SolrServerException, IOException { final boolean asc = random().nextBoolean(); - final String dir = asc ? "asc": "desc"; - final String parentFilter = "+parentFilter_s:("+matchingParent+" "+anyValsSpaceDelim(2)+")^=0"; - String childFilter = "+childFilter_s:("+matchingChild+" "+anyValsSpaceDelim(4)+")^=0"; - final String fl = "id,type_s,parent_id_s1,val_s1,score,parentFilter_s,childFilter_s,parentTie_s1"; - String sortClause = "val_s1 "+dir+", "+"parent_id_s1 "+ascDesc(); - if(rarely()) { - sortClause ="parentTie_s1 "+ascDesc()+","+sortClause; + final String dir = asc ? "asc" : "desc"; + final String parentFilter = + "+parentFilter_s:(" + matchingParent + " " + anyValsSpaceDelim(2) + ")^=0"; + String childFilter = "+childFilter_s:(" + matchingChild + " " + anyValsSpaceDelim(4) + ")^=0"; + final String fl = + "id,type_s,parent_id_s1,val_s1,score,parentFilter_s,childFilter_s,parentTie_s1"; + String sortClause = "val_s1 " + dir + ", " + "parent_id_s1 " + ascDesc(); + if (rarely()) { + sortClause = "parentTie_s1 " + ascDesc() + "," + sortClause; } - final SolrQuery q = new SolrQuery("q", "+type_s:child^=0 "+parentFilter+" "+ - childFilter , - "sort", sortClause, - "rows", ""+maxDocs, - "fl",fl); + final SolrQuery q = + new SolrQuery( + "q", + "+type_s:child^=0 " + parentFilter + " " + childFilter, + "sort", + sortClause, + "rows", + "" + maxDocs, + "fl", + fl); final QueryResponse children = client.query(q); - - final SolrQuery bjq = random().nextBoolean() ? - new SolrQuery(// top level bjq - "q", "{!parent which=type_s:parent}(+type_s:child^=0 "+parentFilter+" "+ childFilter+")", - "sort", sortClause.replace("val_s1", "childfield(val_s1)"), - "rows", ""+maxDocs, "fl", fl) - : - new SolrQuery(// same bjq as a subordinate clause - "q", "+type_s:parent "+parentFilter+" +{!v=$parentcaluse}", - "parentcaluse","{!parent which=type_s:parent v='"+(childFilter).replace("+", "")+"'}", - "sort", sortClause.replace("val_s1", "childfield(val_s1,$parentcaluse)"), - "rows", ""+maxDocs, "fl", fl); + + final SolrQuery bjq = + random().nextBoolean() + ? new SolrQuery( // top level bjq + "q", + "{!parent which=type_s:parent}(+type_s:child^=0 " + + parentFilter + + " " + + childFilter + + ")", + "sort", + sortClause.replace("val_s1", "childfield(val_s1)"), + "rows", + "" + maxDocs, + "fl", + fl) + : new SolrQuery( // same bjq as a subordinate clause + "q", + "+type_s:parent " + parentFilter + " +{!v=$parentcaluse}", + "parentcaluse", + "{!parent which=type_s:parent v='" + (childFilter).replace("+", "") + "'}", + "sort", + sortClause.replace("val_s1", "childfield(val_s1,$parentcaluse)"), + "rows", + "" + maxDocs, + "fl", + fl); final QueryResponse parents = client.query(bjq); - + Set parentIds = new LinkedHashSet<>(); - assertTrue("it can never be empty for sure", parents.getResults().size()>0); - for(Iterator parentIter = parents.getResults().iterator(); parentIter.hasNext();) { + assertTrue("it can never be empty for sure", parents.getResults().size() > 0); + for (Iterator parentIter = parents.getResults().iterator(); + parentIter.hasNext(); ) { for (SolrDocument child : children.getResults()) { assertEquals("child", child.getFirstValue("type_s")); final String parentId = (String) child.getFirstValue("parent_id_s1"); - if( parentIds.add(parentId) ) { // in children the next parent appears, it should be next at parents + if (parentIds.add( + parentId)) { // in children the next parent appears, it should be next at parents final SolrDocument parent = parentIter.next(); assertEquals("parent", parent.getFirstValue("type_s")); - final String actParentId = ""+ parent.get("id"); + final String actParentId = "" + parent.get("id"); if (!actParentId.equals(parentId)) { - final String chDump = children.toString().replace("SolrDocument","\nSolrDocument"); - System.out.println("\n\n"+chDump+"\n\n"); - System.out.println("\n\n"+parents.toString().replace("SolrDocument","\nSolrDocument") - +"\n\n"); + final String chDump = children.toString().replace("SolrDocument", "\nSolrDocument"); + System.out.println("\n\n" + chDump + "\n\n"); + System.out.println( + "\n\n" + parents.toString().replace("SolrDocument", "\nSolrDocument") + "\n\n"); } - assertEquals(""+child+"\n"+parent,actParentId, parentId); + assertEquals("" + child + "\n" + parent, actParentId, parentId); } } } - - } private String ascDesc() { - return random().nextBoolean() ? "asc": "desc"; + return random().nextBoolean() ? "asc" : "desc"; } protected String anyValsSpaceDelim(int howMany) { @@ -193,10 +225,10 @@ protected String anyValsSpaceDelim(int howMany) { protected static List addValsField(final SolrInputDocument parent, final String field) { Collections.shuffle(vals, random()); - final ArrayList values = new ArrayList<>(vals.subList(0, 1+random().nextInt(vals.size()-1))); + final ArrayList values = + new ArrayList<>(vals.subList(0, 1 + random().nextInt(vals.size() - 1))); assertFalse(values.isEmpty()); parent.addField(field, values); return values; } - } diff --git a/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java b/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java index c224fde5586..448f8943bad 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestNestedDocsSort.java @@ -17,7 +17,6 @@ package org.apache.solr.search.join; import java.util.Map; - import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.solr.SolrTestCaseJ4; @@ -30,122 +29,126 @@ import org.junit.Test; public class TestNestedDocsSort extends SolrTestCaseJ4 { - - @BeforeClass - public static void beforeClass() throws Exception { - initCore("solrconfig.xml", "schema.xml"); - } - - public void testEquality(){ - parseAssertEq("childfield(name_s1,$q) asc", "childfield(name_s1,$q) asc"); - parseAssertEq("childfield(name_s1,$q) asc", "childfield(name_s1) asc"); - parseAssertEq("childfield(name_s1,$q) asc", "childfield(name_s1,) asc"); - - parseAssertNe("childfield(name_s1,$q) asc", "childfield(name_s1,$q) desc"); - parseAssertNe("childfield(name_s1,$q) asc", "childfield(surname_s1,$q) asc"); - parseAssertNe("childfield(name_s1,$q) asc", "childfield(surname_s1,$q2) desc"); - } - - public void testEqualityUpToBlockJoin(){ - parseAssertNe("childfield(name_s1,$q) asc","childfield(name_s1,$q2) asc"); - } - - @Test(expected=SolrException.class) - public void testNotBjqReference(){ - parse("childfield(name_s1,$notbjq) asc"); - } - - // root cause is swallowed, but it's logged there. - @Test(expected=SolrException.class) - public void testOmitFieldWithComma(){ - parse("childfield(,$q) asc"); - } - @Test(expected=SolrException.class) - public void testOmitField(){ - parse("childfield($q) asc"); - } - @Test(expected=SolrException.class) - public void testForgetEverything(){ - parse("childfield() asc"); - } - - @Test(expected=SolrException.class) - public void testEvenBraces(){ - parse("childfield asc"); - } - - @Test(expected=SolrException.class) - public void testAbsentField(){ - parse("childfield(NEVER_SEEN_IT,$q) asc"); - } - - @Test(expected=SolrException.class) - public void testOmitOrder(){ - parse("childfield(name_s1,$q)"); - } - - @Test - public void testOmitSpaceinFrontOfOrd(){ - parseAssertEq("childfield(name_s1,$q)asc", "childfield(name_s1,$q) asc"); - } - - private void parseAssertEq(String sortField, String sortField2) { - assertEq(parse(sortField), parse(sortField2)); - } - - private void assertEq(SortField sortField, SortField sortField2) { - assertEquals(sortField, sortField2); - assertEquals(sortField.hashCode(), sortField2.hashCode()); - } - - private void parseAssertNe(String sortField, String sortField2) { - assertFalse(parse(sortField).equals(parse(sortField2))); - } + @BeforeClass + public static void beforeClass() throws Exception { + initCore("solrconfig.xml", "schema.xml"); + } - private SortField parse(String a) { - final SolrQueryRequest req = req("q", "{!parent which=type_s1:parent}whatever_s1:foo", - "q2", "{!parent which=type_s1:parent}nomater_s1:what", - "notbjq", "foo_s1:bar"); - try { - final SortSpec spec = SortSpecParsing.parseSortSpec(a, - req); - assertNull(spec.getSchemaFields().get(0)); - final Sort sort = spec.getSort(); - final SortField field = sort.getSort()[0]; - assertNotNull(field); - return field; - } finally { - req.close(); - } - } + public void testEquality() { + parseAssertEq("childfield(name_s1,$q) asc", "childfield(name_s1,$q) asc"); + parseAssertEq("childfield(name_s1,$q) asc", "childfield(name_s1) asc"); + parseAssertEq("childfield(name_s1,$q) asc", "childfield(name_s1,) asc"); - public void testCachehits(){ - final SolrQueryRequest req = req(); - try { - @SuppressWarnings({"rawtypes"}) - final SolrCache cache = req.getSearcher().getCache("perSegFilter"); - assertNotNull(cache); - final Map state = cache.getSolrMetricsContext().getMetricsSnapshot(); - String lookupsKey = null; - for(String key : state.keySet()){ - if(key.endsWith(".lookups")) { - lookupsKey = key; - break; - } + parseAssertNe("childfield(name_s1,$q) asc", "childfield(name_s1,$q) desc"); + parseAssertNe("childfield(name_s1,$q) asc", "childfield(surname_s1,$q) asc"); + parseAssertNe("childfield(name_s1,$q) asc", "childfield(surname_s1,$q2) desc"); + } + + public void testEqualityUpToBlockJoin() { + parseAssertNe("childfield(name_s1,$q) asc", "childfield(name_s1,$q2) asc"); + } + + @Test(expected = SolrException.class) + public void testNotBjqReference() { + parse("childfield(name_s1,$notbjq) asc"); + } + + // root cause is swallowed, but it's logged there. + @Test(expected = SolrException.class) + public void testOmitFieldWithComma() { + parse("childfield(,$q) asc"); + } + + @Test(expected = SolrException.class) + public void testOmitField() { + parse("childfield($q) asc"); + } + + @Test(expected = SolrException.class) + public void testForgetEverything() { + parse("childfield() asc"); + } + + @Test(expected = SolrException.class) + public void testEvenBraces() { + parse("childfield asc"); + } + + @Test(expected = SolrException.class) + public void testAbsentField() { + parse("childfield(NEVER_SEEN_IT,$q) asc"); + } + + @Test(expected = SolrException.class) + public void testOmitOrder() { + parse("childfield(name_s1,$q)"); + } + + @Test + public void testOmitSpaceinFrontOfOrd() { + parseAssertEq("childfield(name_s1,$q)asc", "childfield(name_s1,$q) asc"); + } + + private void parseAssertEq(String sortField, String sortField2) { + assertEq(parse(sortField), parse(sortField2)); + } + + private void assertEq(SortField sortField, SortField sortField2) { + assertEquals(sortField, sortField2); + assertEquals(sortField.hashCode(), sortField2.hashCode()); + } + + private void parseAssertNe(String sortField, String sortField2) { + assertFalse(parse(sortField).equals(parse(sortField2))); + } + + private SortField parse(String a) { + final SolrQueryRequest req = + req( + "q", + "{!parent which=type_s1:parent}whatever_s1:foo", + "q2", + "{!parent which=type_s1:parent}nomater_s1:what", + "notbjq", + "foo_s1:bar"); + try { + final SortSpec spec = SortSpecParsing.parseSortSpec(a, req); + assertNull(spec.getSchemaFields().get(0)); + final Sort sort = spec.getSort(); + final SortField field = sort.getSort()[0]; + assertNotNull(field); + return field; + } finally { + req.close(); + } + } + + public void testCachehits() { + final SolrQueryRequest req = req(); + try { + @SuppressWarnings({"rawtypes"}) + final SolrCache cache = req.getSearcher().getCache("perSegFilter"); + assertNotNull(cache); + final Map state = cache.getSolrMetricsContext().getMetricsSnapshot(); + String lookupsKey = null; + for (String key : state.keySet()) { + if (key.endsWith(".lookups")) { + lookupsKey = key; + break; } - Number before = (Number) state.get(lookupsKey); - parse("childfield(name_s1,$q) asc"); - Number after = (Number) cache.getSolrMetricsContext().getMetricsSnapshot().get(lookupsKey); - assertEquals("parsing bjq lookups parent filter," - + "parsing sort spec lookups parent and child filters, " - + "hopefully for the purpose",3, after.intValue()-before.intValue()); - } finally { - req.close(); } - } - - - - + Number before = (Number) state.get(lookupsKey); + parse("childfield(name_s1,$q) asc"); + Number after = (Number) cache.getSolrMetricsContext().getMetricsSnapshot().get(lookupsKey); + assertEquals( + "parsing bjq lookups parent filter," + + "parsing sort spec lookups parent and child filters, " + + "hopefully for the purpose", + 3, + after.intValue() - before.intValue()); + } finally { + req.close(); + } + } } diff --git a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java index 7685391fc53..7bfce58210e 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java @@ -16,6 +16,8 @@ */ package org.apache.solr.search.join; +import static org.apache.solr.common.util.Utils.toJSONString; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -27,7 +29,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.apache.solr.JSONTestUtil; @@ -45,8 +46,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.util.Utils.toJSONString; - public class TestScoreJoinQPNoScore extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -55,147 +54,191 @@ public class TestScoreJoinQPNoScore extends SolrTestCaseJ4 { public static void beforeTests() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ System.setProperty("solr.filterCache.async", "true"); - initCore("solrconfig-basic.xml","schema-docValuesJoin.xml"); + initCore("solrconfig-basic.xml", "schema-docValuesJoin.xml"); } @Test public void testJoin() throws Exception { - assertU(add(doc("id", "1","name_s", "john", "title_s", "Director", "dept_ss","Engineering"))); - assertU(add(doc("id", "2","name_s", "mark", "title_s", "VP", "dept_ss","Marketing"))); - assertU(add(doc("id", "3","name_s", "nancy", "title_s", "MTS", "dept_ss","Sales"))); - assertU(add(doc("id", "4","name_s", "dave", "title_s", "MTS", "dept_ss","Support", "dept_ss","Engineering"))); - assertU(add(doc("id", "5","name_s", "tina", "title_s", "VP", "dept_ss","Engineering"))); - - assertU(add(doc("id","10", "dept_id_s", "Engineering", "text_t","These guys develop stuff"))); - assertU(add(doc("id","11", "dept_id_s", "Marketing", "text_t","These guys make you look good"))); - assertU(add(doc("id","12", "dept_id_s", "Sales", "text_t","These guys sell stuff"))); - assertU(add(doc("id","13", "dept_id_s", "Support", "text_t","These guys help customers"))); + assertU(add(doc("id", "1", "name_s", "john", "title_s", "Director", "dept_ss", "Engineering"))); + assertU(add(doc("id", "2", "name_s", "mark", "title_s", "VP", "dept_ss", "Marketing"))); + assertU(add(doc("id", "3", "name_s", "nancy", "title_s", "MTS", "dept_ss", "Sales"))); + assertU( + add( + doc( + "id", + "4", + "name_s", + "dave", + "title_s", + "MTS", + "dept_ss", + "Support", + "dept_ss", + "Engineering"))); + assertU(add(doc("id", "5", "name_s", "tina", "title_s", "VP", "dept_ss", "Engineering"))); + + assertU(add(doc("id", "10", "dept_id_s", "Engineering", "text_t", "These guys develop stuff"))); + assertU( + add(doc("id", "11", "dept_id_s", "Marketing", "text_t", "These guys make you look good"))); + assertU(add(doc("id", "12", "dept_id_s", "Sales", "text_t", "These guys sell stuff"))); + assertU(add(doc("id", "13", "dept_id_s", "Support", "text_t", "These guys help customers"))); assertU(commit()); // test debugging TODO no debug in JoinUtil - // assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", "fl","id", "debugQuery","true") - // ,"/debug/join/{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS=={'_MATCH_':'fromSetSize,toSetSize', 'fromSetSize':2, 'toSetSize':3}" - // ); + // assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", + // "fl","id", "debugQuery","true") + // ,"/debug/join/{!join from=dept_ss + // to=dept_id_s"+whateverScore()+"}title_s:MTS=={'_MATCH_':'fromSetSize,toSetSize', + // 'fromSetSize':2, 'toSetSize':3}" + // ); - assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", "fl","id") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" - ); + assertJQ( + req("q", "{!join from=dept_ss to=dept_id_s" + whateverScore() + "}title_s:MTS", "fl", "id"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); // empty from - assertJQ(req("q","{!join from=noexist_s to=dept_id_s"+whateverScore()+"}*:*", "fl","id") - ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" - ); + assertJQ( + req("q", "{!join from=noexist_s to=dept_id_s" + whateverScore() + "}*:*", "fl", "id"), + "/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}"); // empty to - assertJQ(req("q","{!join from=dept_ss to=noexist_s"+whateverScore()+"}*:*", "fl","id") - ,"/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}" - ); + assertJQ( + req("q", "{!join from=dept_ss to=noexist_s" + whateverScore() + "}*:*", "fl", "id"), + "/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}"); // self join... return everyone with she same title as Dave - assertJQ(req("q","{!join from=title_s to=title_s"+whateverScore()+"}name_s:dave", "fl","id") - ,"/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" - ); + assertJQ( + req("q", "{!join from=title_s to=title_s" + whateverScore() + "}name_s:dave", "fl", "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}"); // find people that develop stuff - assertJQ(req("q","{!join from=dept_id_s to=dept_ss"+whateverScore()+"}text_t:develop", "fl","id") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" - ); + assertJQ( + req( + "q", + "{!join from=dept_id_s to=dept_ss" + whateverScore() + "}text_t:develop", + "fl", + "id"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}"); // self join on multivalued text_t field - assertJQ(req("q","{!join from=title_s to=title_s"+whateverScore()+"}name_s:dave", "fl","id") - ,"/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}" - ); - - assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", "fl","id", "debugQuery","true") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}" - ); - + assertJQ( + req("q", "{!join from=title_s to=title_s" + whateverScore() + "}name_s:dave", "fl", "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'3'},{'id':'4'}]}"); + + assertJQ( + req( + "q", + "{!join from=dept_ss to=dept_id_s" + whateverScore() + "}title_s:MTS", + "fl", + "id", + "debugQuery", + "true"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); + // expected outcome for a sub query matching dave joined against departments - final String davesDepartments = - "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'13'}]}"; + final String davesDepartments = + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'13'}]}"; // straight forward query - assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}name_s:dave", - "fl","id"), - davesDepartments); + assertJQ( + req("q", "{!join from=dept_ss to=dept_id_s" + whateverScore() + "}name_s:dave", "fl", "id"), + davesDepartments); // variable deref for sub-query parsing - assertJQ(req("q","{!join from=dept_ss to=dept_id_s v=$qq"+whateverScore()+"}", - "qq","{!dismax}dave", - "qf","name_s", - "fl","id", - "debugQuery","true"), - davesDepartments); + assertJQ( + req( + "q", "{!join from=dept_ss to=dept_id_s v=$qq" + whateverScore() + "}", + "qq", "{!dismax}dave", + "qf", "name_s", + "fl", "id", + "debugQuery", "true"), + davesDepartments); // variable deref for sub-query parsing w/localparams - assertJQ(req("q","{!join from=dept_ss to=dept_id_s v=$qq"+whateverScore()+"}", - "qq","{!dismax qf=name_s}dave", - "fl","id", - "debugQuery","true"), - davesDepartments); + assertJQ( + req( + "q", "{!join from=dept_ss to=dept_id_s v=$qq" + whateverScore() + "}", + "qq", "{!dismax qf=name_s}dave", + "fl", "id", + "debugQuery", "true"), + davesDepartments); // defType local param to control sub-query parsing - assertJQ(req("q","{!join from=dept_ss to=dept_id_s defType=dismax"+whateverScore()+"}dave", - "qf","name_s", - "fl","id", - "debugQuery","true"), - davesDepartments); + assertJQ( + req( + "q", "{!join from=dept_ss to=dept_id_s defType=dismax" + whateverScore() + "}dave", + "qf", "name_s", + "fl", "id", + "debugQuery", "true"), + davesDepartments); // find people that develop stuff - but limit via filter query to a name of "john" // this tests filters being pushed down to queries (SOLR-3062) - assertJQ(req("q","{!join from=dept_id_s to=dept_ss"+whateverScore()+"}text_t:develop", "fl","id", "fq", "name_s:john") - ,"/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}" - ); - - - assertJQ(req("q","{!join from=dept_ss to=dept_id_s"+whateverScore()+"}title_s:MTS", "fl","id" - ) - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); - - // find people that develop stuff, even if it's requested as single value - assertJQ(req("q","{!join from=dept_id_s to=dept_ss"+whateverScore()+"}text_t:develop", "fl","id") - ,"/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}" - ); - + assertJQ( + req( + "q", + "{!join from=dept_id_s to=dept_ss" + whateverScore() + "}text_t:develop", + "fl", + "id", + "fq", + "name_s:john"), + "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'1'}]}"); + + assertJQ( + req("q", "{!join from=dept_ss to=dept_id_s" + whateverScore() + "}title_s:MTS", "fl", "id"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"); + + // find people that develop stuff, even if it's requested as single value + assertJQ( + req( + "q", + "{!join from=dept_id_s to=dept_ss" + whateverScore() + "}text_t:develop", + "fl", + "id"), + "/response=={'numFound':3,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}"); } - public void testNotEquals() throws SyntaxError, IOException{ + public void testNotEquals() throws SyntaxError, IOException { try (SolrQueryRequest req = req("*:*")) { - Query x = QParser.getParser("{!join from=dept_id_s to=dept_ss score=none}text_t:develop", req).getQuery(); - Query y = QParser.getParser("{!join from=dept_ss to=dept_ss score=none}text_t:develop", req).getQuery(); - assertFalse("diff from fields produce equal queries", - x.equals(y)); + Query x = + QParser.getParser("{!join from=dept_id_s to=dept_ss score=none}text_t:develop", req) + .getQuery(); + Query y = + QParser.getParser("{!join from=dept_ss to=dept_ss score=none}text_t:develop", req) + .getQuery(); + assertFalse("diff from fields produce equal queries", x.equals(y)); } } - - public void testJoinQueryType() throws SyntaxError, IOException{ + + public void testJoinQueryType() throws SyntaxError, IOException { SolrQueryRequest req = null; - try{ + try { final String score = whateverScore(); - - req = req("{!join from=dept_id_s to=dept_ss"+score+"}text_t:develop"); + + req = req("{!join from=dept_id_s to=dept_ss" + score + "}text_t:develop"); SolrQueryResponse rsp = new SolrQueryResponse(); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); - + { final Query query = QParser.getParser(req.getParams().get("q"), req).getQuery(); final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader()); - assertEquals(rewrittenQuery+" is expected to be from Solr", - ScoreJoinQParserPlugin.class.getPackage().getName(), + assertEquals( + rewrittenQuery + " is expected to be from Solr", + ScoreJoinQParserPlugin.class.getPackage().getName(), rewrittenQuery.getClass().getPackage().getName()); } { - final Query query = QParser.getParser( - "{!join from=dept_id_s to=dept_ss}text_t:develop" - , req).getQuery(); + final Query query = + QParser.getParser("{!join from=dept_id_s to=dept_ss}text_t:develop", req).getQuery(); final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader()); - assertEquals(rewrittenQuery+" is expected to be from Solr", - JoinQParserPlugin.class.getPackage().getName(), - rewrittenQuery.getClass().getPackage().getName()); + assertEquals( + rewrittenQuery + " is expected to be from Solr", + JoinQParserPlugin.class.getPackage().getName(), + rewrittenQuery.getClass().getPackage().getName()); } - }finally{ - if(req!=null){ + } finally { + if (req != null) { req.close(); } SolrRequestInfo.clearRequestInfo(); @@ -203,55 +246,64 @@ public void testJoinQueryType() throws SyntaxError, IOException{ } public static String whateverScore() { - final ScoreMode[] vals = ScoreMode.values(); - return " score="+vals[random().nextInt(vals.length)]+" "; + final ScoreMode[] vals = ScoreMode.values(); + return " score=" + vals[random().nextInt(vals.length)] + " "; } @SuppressWarnings({"rawtypes", "unchecked"}) @Test public void testRandomJoin() throws Exception { - int indexIter=50 * RANDOM_MULTIPLIER; - int queryIter=50 * RANDOM_MULTIPLIER; + int indexIter = 50 * RANDOM_MULTIPLIER; + int queryIter = 50 * RANDOM_MULTIPLIER; // groups of fields that have any chance of matching... used to // increase test effectiveness by avoiding 0 resultsets much of the time. - String[][] compat = new String[][] { - {"small_s_dv","small2_s_dv","small2_ss_dv","small3_ss_dv"}, - {"small_i_dv","small2_i_dv","small2_is_dv","small3_is_dv"} - }; - + String[][] compat = + new String[][] { + {"small_s_dv", "small2_s_dv", "small2_ss_dv", "small3_ss_dv"}, + {"small_i_dv", "small2_i_dv", "small2_is_dv", "small3_is_dv"} + }; while (--indexIter >= 0) { int indexSize = random().nextInt(20 * RANDOM_MULTIPLIER); List types = new ArrayList(); - types.add(new FldType("id",ONE_ONE, new SVal('A','Z',4,4))); - /** no numeric fields so far LUCENE-5868 - types.add(new FldType("score_f_dv",ONE_ONE, new FVal(1,100))); // field used to score - **/ - types.add(new FldType("small_s_dv",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_s_dv",ZERO_ONE, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small2_ss_dv",ZERO_TWO, new SVal('a',(char)('c'+indexSize/3),1,1))); - types.add(new FldType("small3_ss_dv",new IRange(0,25), new SVal('A','z',1,1))); - /** no numeric fields so far LUCENE-5868 - types.add(new FldType("small_i_dv",ZERO_ONE, new IRange(0,5+indexSize/3))); - types.add(new FldType("small2_i_dv",ZERO_ONE, new IRange(0,5+indexSize/3))); - types.add(new FldType("small2_is_dv",ZERO_TWO, new IRange(0,5+indexSize/3))); - types.add(new FldType("small3_is_dv",new IRange(0,25), new IRange(0,100))); - **/ - + types.add(new FldType("id", ONE_ONE, new SVal('A', 'Z', 4, 4))); + /** + * no numeric fields so far LUCENE-5868 types.add(new FldType("score_f_dv",ONE_ONE, new + * FVal(1,100))); // field used to score + */ + types.add( + new FldType("small_s_dv", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add( + new FldType("small2_s_dv", ZERO_ONE, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add( + new FldType("small2_ss_dv", ZERO_TWO, new SVal('a', (char) ('c' + indexSize / 3), 1, 1))); + types.add(new FldType("small3_ss_dv", new IRange(0, 25), new SVal('A', 'z', 1, 1))); + /** + * no numeric fields so far LUCENE-5868 types.add(new FldType("small_i_dv",ZERO_ONE, new + * IRange(0,5+indexSize/3))); types.add(new FldType("small2_i_dv",ZERO_ONE, new + * IRange(0,5+indexSize/3))); types.add(new FldType("small2_is_dv",ZERO_TWO, new + * IRange(0,5+indexSize/3))); types.add(new FldType("small3_is_dv",new IRange(0,25), new + * IRange(0,100))); + */ clearIndex(); Map model = indexDocs(types, null, indexSize); - Map>> pivots = new HashMap>>(); + Map>> pivots = + new HashMap>>(); - for (int qiter=0; qiter> pivot = pivots.get(fromField+"/"+toField); + Map> pivot = pivots.get(fromField + "/" + toField); if (pivot == null) { pivot = createJoinMap(model, fromField, toField); - pivots.put(fromField+"/"+toField, pivot); + pivots.put(fromField + "/" + toField, pivot); } Collection fromDocs = model.values(); Set docs = join(fromDocs, pivot); List docList = new ArrayList(docs.size()); for (Comparable id : docs) docList.add(model.get(id)); - Collections.sort(docList, createComparator("_docid_",true,false,false,false)); + Collections.sort(docList, createComparator("_docid_", true, false, false, false)); List sortedDocs = new ArrayList(); for (Doc doc : docList) { if (sortedDocs.size() >= 10) break; sortedDocs.add(doc.toObject(h.getCore().getLatestSchema())); } - Map resultSet = new LinkedHashMap(); + Map resultSet = new LinkedHashMap(); resultSet.put("numFound", docList.size()); resultSet.put("start", 0); resultSet.put("numFoundExact", true); @@ -284,58 +336,88 @@ public void testRandomJoin() throws Exception { // todo: use different join queries for better coverage - SolrQueryRequest req = req("wt","json","indent","true", "echoParams","all", - "q","{!join from="+fromField+" to="+toField - +" "+ (random().nextBoolean() ? "fromIndex=collection1" : "") - +" "+ (random().nextBoolean() ? "TESTenforceSameCoreAsAnotherOne=true" : "") - +" "+whateverScore()+"}*:*" - , "sort", "_docid_ asc" - ); + SolrQueryRequest req = + req( + "wt", + "json", + "indent", + "true", + "echoParams", + "all", + "q", + "{!join from=" + + fromField + + " to=" + + toField + + " " + + (random().nextBoolean() ? "fromIndex=collection1" : "") + + " " + + (random().nextBoolean() ? "TESTenforceSameCoreAsAnotherOne=true" : "") + + " " + + whateverScore() + + "}*:*", + "sort", + "_docid_ asc"); String strResponse = h.query(req); Object realResponse = Utils.fromJSONString(strResponse); String err = JSONTestUtil.matchObj("/response", realResponse, resultSet); if (err != null) { - final String m = "JOIN MISMATCH: " + err - + "\n\trequest="+req - + "\n\tresult="+strResponse - + "\n\texpected="+ toJSONString(resultSet) - ;// + "\n\tmodel="+ JSONUtil.toJSON(model); + final String m = + "JOIN MISMATCH: " + + err + + "\n\trequest=" + + req + + "\n\tresult=" + + strResponse + + "\n\texpected=" + + toJSONString(resultSet); // + "\n\tmodel="+ JSONUtil.toJSON(model); log.error(m); { - SolrQueryRequest f = req("wt","json","indent","true", "echoParams","all", - "q","*:*", "facet","true", - "facet.field", fromField - , "sort", "_docid_ asc" - ,"rows","0" - ); + SolrQueryRequest f = + req( + "wt", + "json", + "indent", + "true", + "echoParams", + "all", + "q", + "*:*", + "facet", + "true", + "facet.field", + fromField, + "sort", + "_docid_ asc", + "rows", + "0"); log.error("faceting on from field: {}", h.query(f)); } { - final Map ps = ((MapSolrParams)req.getParams()).getMap(); + final Map ps = ((MapSolrParams) req.getParams()).getMap(); final String q = ps.get("q"); ps.put("q", q.replaceAll("join score=none", "join")); log.error("plain join: {}", h.query(req)); ps.put("q", q); - } { - // re-execute the request... good for putting a breakpoint here for debugging - final Map ps = ((MapSolrParams)req.getParams()).getMap(); - final String q = ps.get("q"); - ps.put("q", q.replaceAll("\\}", " cache=false\\}")); - h.query(req); + // re-execute the request... good for putting a breakpoint here for debugging + final Map ps = ((MapSolrParams) req.getParams()).getMap(); + final String q = ps.get("q"); + ps.put("q", q.replaceAll("\\}", " cache=false\\}")); + h.query(req); } fail(err); } - } } } @SuppressWarnings("rawtypes") - Map> createJoinMap(Map model, String fromField, String toField) { + Map> createJoinMap( + Map model, String fromField, String toField) { Map> id_to_id = new HashMap>(); Map> value_to_id = invertField(model, toField); @@ -352,15 +434,13 @@ Map> createJoinMap(Map model, Strin ids = new HashSet(); id_to_id.put(fromId, ids); } - for (Comparable toId : toIds) - ids.add(toId); + for (Comparable toId : toIds) ids.add(toId); } } return id_to_id; } - @SuppressWarnings("rawtypes") Set join(Collection input, Map> joinMap) { Set ids = new HashSet(); @@ -371,5 +451,4 @@ Set join(Collection input, Map> joi } return ids; } - } diff --git a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java index 865888e21d3..9ef3b75635a 100644 --- a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java +++ b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPScore.java @@ -16,6 +16,7 @@ */ package org.apache.solr.search.join; +import com.codahale.metrics.Metric; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -23,8 +24,6 @@ import java.util.Locale; import java.util.Map; import java.util.Random; - -import com.codahale.metrics.Metric; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; @@ -59,38 +58,31 @@ public void testSimple() throws Exception { clearIndex(); // 0 - assertU(add(doc("t_description", "random text", - "name", "name1", - idField, "1"))); - -// 1 - - assertU(add(doc("price_s", "10.0", - idField, "2", - toField, "1"))); -// 2 - assertU(add(doc("price_s", "20.0", - idField, "3", - toField, "1"))); -// 3 - assertU(add(doc("t_description", "more random text", - "name", "name2", - idField, "4"))); -// 4 - assertU(add(doc("price_s", "10.0", - idField, "5", - toField, "4"))); -// 5 - assertU(add(doc("price_s", "20.0", - idField, "6", - toField, "4"))); + assertU(add(doc("t_description", "random text", "name", "name1", idField, "1"))); + + // 1 + + assertU(add(doc("price_s", "10.0", idField, "2", toField, "1"))); + // 2 + assertU(add(doc("price_s", "20.0", idField, "3", toField, "1"))); + // 3 + assertU(add(doc("t_description", "more random text", "name", "name2", idField, "4"))); + // 4 + assertU(add(doc("price_s", "10.0", idField, "5", toField, "4"))); + // 5 + assertU(add(doc("price_s", "20.0", idField, "6", toField, "4"))); assertU(commit()); // Search for product - assertJQ(req("q", "{!join from=" + idField + " to=" + toField + " score=None}name:name2", "fl", "id") - , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'5'},{'id':'6'}]}"); - + assertJQ( + req( + "q", + "{!join from=" + idField + " to=" + toField + " score=None}name:name2", + "fl", + "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'5'},{'id':'6'}]}"); + /*Query joinQuery = JoinUtil.createJoinQuery(idField, false, toField, new TermQuery(new Term("name", "name2")), indexSearcher, ScoreMode.None); @@ -99,8 +91,13 @@ public void testSimple() throws Exception { assertEquals(4, result.scoreDocs[0].doc); assertEquals(5, result.scoreDocs[1].doc); */ - assertJQ(req("q", "{!join from=" + idField + " to=" + toField + " score=None}name:name1", "fl", "id") - , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'2'},{'id':'3'}]}"); + assertJQ( + req( + "q", + "{!join from=" + idField + " to=" + toField + " score=None}name:name1", + "fl", + "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'2'},{'id':'3'}]}"); /*joinQuery = JoinUtil.createJoinQuery(idField, false, toField, new TermQuery(new Term("name", "name1")), indexSearcher, ScoreMode.None); result = indexSearcher.search(joinQuery, 10); @@ -109,8 +106,9 @@ public void testSimple() throws Exception { assertEquals(2, result.scoreDocs[1].doc);*/ // Search for offer - assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=None}id:5", "fl", "id") - , "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'4'}]}"); + assertJQ( + req("q", "{!join from=" + toField + " to=" + idField + " score=None}id:5", "fl", "id"), + "/response=={'numFound':1,'start':0,'numFoundExact':true,'docs':[{'id':'4'}]}"); /*joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("id", "5")), indexSearcher, ScoreMode.None); result = indexSearcher.search(joinQuery, 10); assertEquals(1, result.totalHits); @@ -123,19 +121,29 @@ public void testSimple() throws Exception { public void testDeleteByScoreJoinQuery() throws Exception { indexDataForScorring(); String joinQuery = "{!join from=" + toField + " to=" + idField + " score=Max}title:random"; - assertJQ(req("q", joinQuery, "fl", "id"), "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); + assertJQ( + req("q", joinQuery, "fl", "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); assertU(delQ(joinQuery)); assertU(commit()); - assertJQ(req("q", joinQuery, "fl", "id"), "/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}"); + assertJQ( + req("q", joinQuery, "fl", "id"), + "/response=={'numFound':0,'start':0,'numFoundExact':true,'docs':[]}"); } public void testSimpleWithScoring() throws Exception { indexDataForScorring(); // Search for movie via subtitle - assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Max}title:random", "fl", "id") - , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); - //dump(req("q","{!scorejoin from="+toField+" to="+idField+" score=Max}title:random", "fl","id,score", "debug", "true")); + assertJQ( + req( + "q", + "{!join from=" + toField + " to=" + idField + " score=Max}title:random", + "fl", + "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); + // dump(req("q","{!scorejoin from="+toField+" to="+idField+" score=Max}title:random", + // "fl","id,score", "debug", "true")); /* Query joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "random")), indexSearcher, ScoreMode.Max); @@ -144,14 +152,19 @@ public void testSimpleWithScoring() throws Exception { assertEquals(0, result.scoreDocs[0].doc); assertEquals(3, result.scoreDocs[1].doc);*/ - // Score mode max. - //dump(req("q","{!scorejoin from="+toField+" to="+idField+" score=Max}title:movie", "fl","id,score", "debug", "true")); + // dump(req("q","{!scorejoin from="+toField+" to="+idField+" score=Max}title:movie", + // "fl","id,score", "debug", "true")); // dump(req("q","title:movie", "fl","id,score", "debug", "true")); - assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Max}title:movie", "fl", "id") - , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'4'},{'id':'1'}]}"); - + assertJQ( + req( + "q", + "{!join from=" + toField + " to=" + idField + " score=Max}title:movie", + "fl", + "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'4'},{'id':'1'}]}"); + /*joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Max); result = indexSearcher.search(joinQuery, 10); assertEquals(2, result.totalHits); @@ -159,19 +172,29 @@ public void testSimpleWithScoring() throws Exception { assertEquals(0, result.scoreDocs[1].doc);*/ // Score mode total - assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Total}title:movie", "fl", "id") - , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); - /* joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Total); - result = indexSearcher.search(joinQuery, 10); - assertEquals(2, result.totalHits); - assertEquals(0, result.scoreDocs[0].doc); - assertEquals(3, result.scoreDocs[1].doc); -*/ - //Score mode avg - assertJQ(req("q", "{!join from=" + toField + " to=" + idField + " score=Avg}title:movie", "fl", "id") - , "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'4'},{'id':'1'}]}"); - - /* joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Avg); + assertJQ( + req( + "q", + "{!join from=" + toField + " to=" + idField + " score=Total}title:movie", + "fl", + "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'1'},{'id':'4'}]}"); + /* joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Total); + result = indexSearcher.search(joinQuery, 10); + assertEquals(2, result.totalHits); + assertEquals(0, result.scoreDocs[0].doc); + assertEquals(3, result.scoreDocs[1].doc); + */ + // Score mode avg + assertJQ( + req( + "q", + "{!join from=" + toField + " to=" + idField + " score=Avg}title:movie", + "fl", + "id"), + "/response=={'numFound':2,'start':0,'numFoundExact':true,'docs':[{'id':'4'},{'id':'1'}]}"); + + /* joinQuery = JoinUtil.createJoinQuery(toField, false, idField, new TermQuery(new Term("title", "movie")), indexSearcher, ScoreMode.Avg); result = indexSearcher.search(joinQuery, 10); assertEquals(2, result.totalHits); assertEquals(3, result.scoreDocs[0].doc); @@ -179,19 +202,30 @@ public void testSimpleWithScoring() throws Exception { } - final static Comparator lessFloat = (o1, o2) -> { - assertTrue(Float.parseFloat(o1) < Float.parseFloat(o2)); - return 0; - }; + static final Comparator lessFloat = + (o1, o2) -> { + assertTrue(Float.parseFloat(o1) < Float.parseFloat(o2)); + return 0; + }; @Ignore("SOLR-7814, also don't forget cover boost at testCacheHit()") public void testBoost() throws Exception { indexDataForScorring(); ScoreMode score = ScoreMode.values()[random().nextInt(ScoreMode.values().length)]; - final SolrQueryRequest req = req("q", "{!join from=movieId_s to=id score=" + score + " b=200}title:movie", "fl", "id,score", "omitHeader", "true"); + final SolrQueryRequest req = + req( + "q", + "{!join from=movieId_s to=id score=" + score + " b=200}title:movie", + "fl", + "id,score", + "omitHeader", + "true"); SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, new SolrQueryResponse())); - final Query luceneQ = QParser.getParser(req.getParams().get("q"), req).getQuery().rewrite(req.getSearcher().getSlowAtomicReader()); + final Query luceneQ = + QParser.getParser(req.getParams().get("q"), req) + .getQuery() + .rewrite(req.getSearcher().getSlowAtomicReader()); assertTrue(luceneQ instanceof BoostQuery); float boost = ((BoostQuery) luceneQ).getBoost(); assertEquals("" + luceneQ, Float.floatToIntBits(200), Float.floatToIntBits(boost)); @@ -202,24 +236,45 @@ public void testBoost() throws Exception { public void testCacheHit() throws Exception { indexDataForScorring(); - Map metrics = h.getCoreContainer().getMetricManager().registry(h.getCore().getCoreMetricManager().getRegistryName()).getMetrics(); + Map metrics = + h.getCoreContainer() + .getMetricManager() + .registry(h.getCore().getCoreMetricManager().getRegistryName()) + .getMetrics(); @SuppressWarnings("rawtypes") - MetricsMap mm = (MetricsMap)((SolrMetricManager.GaugeWrapper)metrics.get("CACHE.searcher.queryResultCache")).getGauge(); + MetricsMap mm = + (MetricsMap) + ((SolrMetricManager.GaugeWrapper) metrics.get("CACHE.searcher.queryResultCache")) + .getGauge(); { - Map statPre = mm.getValue(); - h.query(req("q", "{!join from=movieId_s to=id score=Avg}title:first", "fl", "id", "omitHeader", "true")); + Map statPre = mm.getValue(); + h.query( + req( + "q", + "{!join from=movieId_s to=id score=Avg}title:first", + "fl", + "id", + "omitHeader", + "true")); assertHitOrInsert(mm.getValue(), statPre); } { - Map statPre = mm.getValue(); - h.query(req("q", "{!join from=movieId_s to=id score=Avg}title:first", "fl", "id", "omitHeader", "true")); + Map statPre = mm.getValue(); + h.query( + req( + "q", + "{!join from=movieId_s to=id score=Avg}title:first", + "fl", + "id", + "omitHeader", + "true")); assertHit(mm.getValue(), statPre); } { - Map statPre = mm.getValue(); + Map statPre = mm.getValue(); Random r = random(); boolean changed = false; @@ -235,33 +290,67 @@ public void testCacheHit() throws Exception { changed |= x; */ String q = (!changed) ? (r.nextBoolean() ? "title:first^67" : "title:night") : "title:first"; - final String resp = h.query(req("q", "{!join from=" + from + " to=" + to + - " score=" + score + - //" b=" + boost + - "}" + q, "fl", "id", "omitHeader", "true") - ); + final String resp = + h.query( + req( + "q", + "{!join from=" + + from + + " to=" + + to + + " score=" + + score + + + // " b=" + boost + + "}" + + q, + "fl", + "id", + "omitHeader", + "true")); assertInsert(mm.getValue(), statPre); statPre = mm.getValue(); - final String repeat = h.query(req("q", "{!join from=" + from + " to=" + to + " score=" + score.toLowerCase(Locale.ROOT) + - //" b=" + boost - "}" + q, "fl", "id", "omitHeader", "true") - ); + final String repeat = + h.query( + req( + "q", + "{!join from=" + + from + + " to=" + + to + + " score=" + + score.toLowerCase(Locale.ROOT) + + + // " b=" + boost + "}" + + q, + "fl", + "id", + "omitHeader", + "true")); assertHit(mm.getValue(), statPre); assertEquals("lowercase shouldn't change anything", resp, repeat); - final String aMod = score.substring(0, score.length() - 1); - assertQEx("exception on "+aMod, "ScoreMode", - req("q", "{!join from=" + from + " to=" + to + " score=" + aMod + - "}" + q, "fl", "id", "omitHeader", "true"), - SolrException.ErrorCode.BAD_REQUEST); + final String aMod = score.substring(0, score.length() - 1); + assertQEx( + "exception on " + aMod, + "ScoreMode", + req( + "q", + "{!join from=" + from + " to=" + to + " score=" + aMod + "}" + q, + "fl", + "id", + "omitHeader", + "true"), + SolrException.ErrorCode.BAD_REQUEST); } - // this queries are not overlap, with other in this test case. + // this queries are not overlap, with other in this test case. // however it might be better to extract this method into the separate suite // for a while let's nuke a cache content, in case of repetitions @SuppressWarnings("rawtypes") - SolrCache cache = (SolrCache)h.getCore().getInfoRegistry().get("queryResultCache"); + SolrCache cache = (SolrCache) h.getCore().getInfoRegistry().get("queryResultCache"); cache.clear(); } @@ -272,72 +361,60 @@ private ScoreMode not(ScoreMode s) { return l.get(r.nextInt(l.size())); } - private void assertInsert(Map current, final Map statPre) { - assertEquals("it lookups", 1, - delta("lookups", current, statPre)); + private void assertInsert(Map current, final Map statPre) { + assertEquals("it lookups", 1, delta("lookups", current, statPre)); assertEquals("it doesn't hit", 0, delta("hits", current, statPre)); - assertEquals("it inserts", 1, - delta("inserts", current, statPre)); + assertEquals("it inserts", 1, delta("inserts", current, statPre)); } - private void assertHit(Map current, final Map statPre) { - assertEquals("it lookups", 1, - delta("lookups", current, statPre)); + private void assertHit(Map current, final Map statPre) { + assertEquals("it lookups", 1, delta("lookups", current, statPre)); assertEquals("it hits", 1, delta("hits", current, statPre)); - assertEquals("it doesn't insert", 0, - delta("inserts", current, statPre)); + assertEquals("it doesn't insert", 0, delta("inserts", current, statPre)); } - private void assertHitOrInsert(Map current, final Map statPre) { - assertEquals("it lookups", 1, - delta("lookups", current, statPre)); + private void assertHitOrInsert(Map current, final Map statPre) { + assertEquals("it lookups", 1, delta("lookups", current, statPre)); final long mayHit = delta("hits", current, statPre); assertTrue("it may hit", 0 == mayHit || 1 == mayHit); - assertEquals("or insert on cold", 1, - delta("inserts", current, statPre) + mayHit); + assertEquals("or insert on cold", 1, delta("inserts", current, statPre) + mayHit); } - private long delta(String key, Map a, Map b) { + private long delta(String key, Map a, Map b) { return (Long) a.get(key) - (Long) b.get(key); } private void indexDataForScorring() { clearIndex(); -// 0 - assertU(add(doc("t_description", "A random movie", - "name", "Movie 1", - idField, "1"))); -// 1 - - assertU(add(doc("title", "The first subtitle of this movie", - idField, "2", - toField, "1"))); - - -// 2 + // 0 + assertU(add(doc("t_description", "A random movie", "name", "Movie 1", idField, "1"))); + // 1 - assertU(add(doc("title", "random subtitle; random event movie", - idField, "3", - toField, "1"))); + assertU(add(doc("title", "The first subtitle of this movie", idField, "2", toField, "1"))); -// 3 + // 2 - assertU(add(doc("t_description", "A second random movie", - "name", "Movie 2", - idField, "4"))); -// 4 + assertU(add(doc("title", "random subtitle; random event movie", idField, "3", toField, "1"))); - assertU(add(doc("title", "a very random event happened during christmas night", - idField, "5", - toField, "4"))); + // 3 + assertU(add(doc("t_description", "A second random movie", "name", "Movie 2", idField, "4"))); + // 4 -// 5 + assertU( + add( + doc( + "title", + "a very random event happened during christmas night", + idField, + "5", + toField, + "4"))); - assertU(add(doc("title", "movie end movie test 123 test 123 random", - idField, "6", - toField, "4"))); + // 5 + assertU( + add(doc("title", "movie end movie test 123 test 123 random", idField, "6", toField, "4"))); assertU(commit()); } diff --git a/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java b/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java index d5044e6c548..e1f029daff6 100644 --- a/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java +++ b/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java @@ -17,8 +17,9 @@ package org.apache.solr.search.join.another; -import java.io.IOException; +import static org.apache.solr.search.join.BJQParserTest.createIndex; +import java.io.IOException; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -31,9 +32,7 @@ import org.junit.Assert; import org.junit.BeforeClass; -import static org.apache.solr.search.join.BJQParserTest.createIndex; - -public class BJQFilterAccessibleTest extends SolrTestCaseJ4 { +public class BJQFilterAccessibleTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { @@ -45,9 +44,12 @@ public void testAbilityToCreateBJQfromAnotherPackage() throws IOException { try (SolrQueryRequest req = lrf.makeRequest()) { TermQuery childQuery = new TermQuery(new Term("child_s", "l")); Query parentQuery = new WildcardQuery(new Term("parent_s", "*")); - ToParentBlockJoinQuery tpbjq = new ToParentBlockJoinQuery(childQuery, - BlockJoinParentQParser.getCachedBitSetProducer(req,parentQuery), ScoreMode.Max); - Assert.assertEquals(6, req.getSearcher().search(tpbjq,10).totalHits.value); + ToParentBlockJoinQuery tpbjq = + new ToParentBlockJoinQuery( + childQuery, + BlockJoinParentQParser.getCachedBitSetProducer(req, parentQuery), + ScoreMode.Max); + Assert.assertEquals(6, req.getSearcher().search(tpbjq, 10).totalHits.value); } } } diff --git a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java index c0ee7fb981f..7f307b520fb 100644 --- a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java +++ b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.search.json; +import static org.hamcrest.core.StringContains.containsString; + import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -32,25 +34,29 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.StringContains.containsString; - - -@LuceneTestCase.SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Lucene45","Appending"}) +@LuceneTestCase.SuppressCodecs({ + "Lucene3x", + "Lucene40", + "Lucene41", + "Lucene42", + "Lucene45", + "Appending" +}) public class TestJsonRequest extends SolrTestCaseHS { - private static SolrInstances servers; // for distributed testing + private static SolrInstances servers; // for distributed testing @SuppressWarnings("deprecation") @BeforeClass public static void beforeTests() throws Exception { systemSetPropertySolrDisableUrlAllowList("true"); JSONTestUtil.failRepeatedKeys = true; - initCore("solrconfig-tlog.xml","schema_latest.xml"); + initCore("solrconfig-tlog.xml", "schema_latest.xml"); } public static void initServers() throws Exception { if (servers == null) { - servers = new SolrInstances(3, "solrconfig-tlog.xml","schema_latest.xml"); + servers = new SolrInstances(3, "solrconfig-tlog.xml", "schema_latest.xml"); } } @@ -78,8 +84,8 @@ public void testLocalJsonRequestWithTags() throws Exception { @Test public void testDistribJsonRequest() throws Exception { initServers(); - Client client = servers.getClient( random().nextInt() ); - client.queryDefaults().set( "shards", servers.getShards() ); + Client client = servers.getClient(random().nextInt()); + client.queryDefaults().set("shards", servers.getShards()); doJsonRequest(client, true); } @@ -89,321 +95,482 @@ public static void doJsonRequest(Client client, boolean isDistrib) throws Except ignoreException("Expected JSON"); // test json param - client.testJQ( params("json","{query:'cat_s:A'}") - , "response/numFound==2" - ); + client.testJQ(params("json", "{query:'cat_s:A'}"), "response/numFound==2"); // invalid value - SolrException ex = expectThrows(SolrException.class, () -> client.testJQ(params("q", "*:*", "json", "5"))); + SolrException ex = + expectThrows(SolrException.class, () -> client.testJQ(params("q", "*:*", "json", "5"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code()); assertThat(ex.getMessage(), containsString("Expected JSON Object but got Long=5")); // this is to verify other json params are not affected - client.testJQ( params("q", "cat_s:A", "json.limit", "1"), - "response/numFound==2" - ); + client.testJQ(params("q", "cat_s:A", "json.limit", "1"), "response/numFound==2"); // test multiple json params - client.testJQ( params("json","{query:'cat_s:A'}", "json","{filter:'where_s:NY'}") - , "response/numFound==1" - ); + client.testJQ( + params("json", "{query:'cat_s:A'}", "json", "{filter:'where_s:NY'}"), + "response/numFound==1"); // test multiple json params with one being zero length - client.testJQ( params("json","{query:'cat_s:A'}", "json","{filter:'where_s:NY'}", "json","") - , "response/numFound==1" - ); + client.testJQ( + params("json", "{query:'cat_s:A'}", "json", "{filter:'where_s:NY'}", "json", ""), + "response/numFound==1"); // test multiple json params with one being a comment - client.testJQ( params("json","{query:'cat_s:A'}", "json","{filter:'where_s:NY'}", "json","/* */") - , "response/numFound==1" - ); + client.testJQ( + params("json", "{query:'cat_s:A'}", "json", "{filter:'where_s:NY'}", "json", "/* */"), + "response/numFound==1"); // test merging multi-valued params into list - client.testJQ( params("json","{query:'*:*'}", "json","{filter:'where_s:NY'}", "json","{filter:'cat_s:A'}") - , "response/numFound==1" - ); + client.testJQ( + params( + "json", "{query:'*:*'}", "json", "{filter:'where_s:NY'}", "json", "{filter:'cat_s:A'}"), + "response/numFound==1"); // test merging multi-valued params into list, second value is already list - client.testJQ( params("json","{query:'*:*'}", "json","{filter:'where_s:NY'}", "json","{filter:['cat_s:A']}") - , "response/numFound==1" - ); + client.testJQ( + params( + "json", + "{query:'*:*'}", + "json", + "{filter:'where_s:NY'}", + "json", + "{filter:['cat_s:A']}"), + "response/numFound==1"); // test merging multi-valued params into list, first value is already list - client.testJQ( params("json","{query:'*:*'}", "json","{filter:['where_s:NY']}", "json","{filter:'cat_s:A'}") - , "response/numFound==1" - ); + client.testJQ( + params( + "json", + "{query:'*:*'}", + "json", + "{filter:['where_s:NY']}", + "json", + "{filter:'cat_s:A'}"), + "response/numFound==1"); // test merging multi-valued params into list, both values are already list - client.testJQ( params("json","{query:'*:*'}", "json","{filter:['where_s:NY']}", "json","{filter:['cat_s:A']}") - , "response/numFound==1" - ); + client.testJQ( + params( + "json", + "{query:'*:*'}", + "json", + "{filter:['where_s:NY']}", + "json", + "{filter:['cat_s:A']}"), + "response/numFound==1"); // test inserting and merging with paths - client.testJQ( params("json.query","'*:*'", "json.filter","'where_s:NY'", "json.filter","'cat_s:A'") - , "response/numFound==1" - ); + client.testJQ( + params("json.query", "'*:*'", "json.filter", "'where_s:NY'", "json.filter", "'cat_s:A'"), + "response/numFound==1"); // test inserting and merging with paths with an empty string and a comment - client.testJQ( params("json.query","'*:*'", "json.filter","'where_s:NY'", "json.filter","'cat_s:A'", "json.filter","", "json.filter","/* */") - , "response/numFound==1" - ); + client.testJQ( + params( + "json.query", + "'*:*'", + "json.filter", + "'where_s:NY'", + "json.filter", + "'cat_s:A'", + "json.filter", + "", + "json.filter", + "/* */"), + "response/numFound==1"); // test overwriting of non-multivalued params - client.testJQ( params("json.query","'foo_s:NONE'", "json.filter","'where_s:NY'", "json.filter","'cat_s:A'", "json.query","'*:*'") - , "response/numFound==1" - ); + client.testJQ( + params( + "json.query", + "'foo_s:NONE'", + "json.filter", + "'where_s:NY'", + "json.filter", + "'cat_s:A'", + "json.query", + "'*:*'"), + "response/numFound==1"); // normal parameter specified in the params block, including numeric params cast back to string - client.testJQ( params("json","{params:{q:'*:*', fq:['cat_s:A','where_s:NY'], start:0, rows:5, fl:id}}") - , "response/docs==[{id:'1'}]" - ); - client.testJQ( params("json","{params:{q:'*:*', fq:['cat_s:A','where_s:(NY OR NJ)'], start:0, rows:1, fl:id, sort:'where_s asc'}}") - , "response/numFound==2" - , "response/docs==[{id:'4'}]" - ); - client.testJQ( params("json","{params:{q:'*:*', fq:['cat_s:A','where_s:(NY OR NJ)'], start:0, rows:1, fl:[id,'x:5.5'], sort:'where_s asc'}}") - , "response/numFound==2" - , "response/docs==[{id:'4', x:5.5}]" - ); + client.testJQ( + params("json", "{params:{q:'*:*', fq:['cat_s:A','where_s:NY'], start:0, rows:5, fl:id}}"), + "response/docs==[{id:'1'}]"); + client.testJQ( + params( + "json", + "{params:{q:'*:*', fq:['cat_s:A','where_s:(NY OR NJ)'], start:0, rows:1, fl:id, sort:'where_s asc'}}"), + "response/numFound==2", + "response/docs==[{id:'4'}]"); + client.testJQ( + params( + "json", + "{params:{q:'*:*', fq:['cat_s:A','where_s:(NY OR NJ)'], start:0, rows:1, fl:[id,'x:5.5'], sort:'where_s asc'}}"), + "response/numFound==2", + "response/docs==[{id:'4', x:5.5}]"); // test merge params - client.testJQ( params("json","{params:{q:'*:*'}}", "json","{params:{fq:['cat_s:A','where_s:(NY OR NJ)'], start:0, rows:1, fl:[id,'x:5.5']}}", "json","{params:{sort:'where_s asc'}}") - , "response/numFound==2" - , "response/docs==[{id:'4', x:5.5}]" - ); - + client.testJQ( + params( + "json", + "{params:{q:'*:*'}}", + "json", + "{params:{fq:['cat_s:A','where_s:(NY OR NJ)'], start:0, rows:1, fl:[id,'x:5.5']}}", + "json", + "{params:{sort:'where_s asc'}}"), + "response/numFound==2", + "response/docs==[{id:'4', x:5.5}]"); // test offset/limit/sort/fields - client.testJQ( params("json.query","'*:*'", "json.offset","1", "json.limit","2", "json.sort","'id desc'", "json.fields","'id'") - , "response/docs==[{id:'5'},{id:'4'}]" - ); + client.testJQ( + params( + "json.query", + "'*:*'", + "json.offset", + "1", + "json.limit", + "2", + "json.sort", + "'id desc'", + "json.fields", + "'id'"), + "response/docs==[{id:'5'},{id:'4'}]"); // test offset/limit/sort/fields, multi-valued json.fields - client.testJQ( params("json.query","'*:*'", "json.offset","1", "json.limit","2", "json.sort","'id desc'", "json.fields","'id'", "json.fields","'x:5.5'") - , "response/docs==[{id:'5', x:5.5},{id:'4', x:5.5}]" - ); + client.testJQ( + params( + "json.query", + "'*:*'", + "json.offset", + "1", + "json.limit", + "2", + "json.sort", + "'id desc'", + "json.fields", + "'id'", + "json.fields", + "'x:5.5'"), + "response/docs==[{id:'5', x:5.5},{id:'4', x:5.5}]"); // test offset/limit/sort/fields, overwriting non-multivalued params - client.testJQ( params("json.query","'*:*'", "json.offset","17", "json.offset","1", "json.limit","42", "json.limit","2", "json.sort","'id asc'", "json.sort","'id desc'", "json.fields","'id'", "json.fields","'x:5.5'") - , "response/docs==[{id:'5', x:5.5},{id:'4', x:5.5}]" - ); + client.testJQ( + params( + "json.query", + "'*:*'", + "json.offset", + "17", + "json.offset", + "1", + "json.limit", + "42", + "json.limit", + "2", + "json.sort", + "'id asc'", + "json.sort", + "'id desc'", + "json.fields", + "'id'", + "json.fields", + "'x:5.5'"), + "response/docs==[{id:'5', x:5.5},{id:'4', x:5.5}]"); doParamRefDslTest(client); // test templating before parsing JSON - client.testJQ( params("json","${OPENBRACE} query:'cat_s:A' ${CLOSEBRACE}", "json","${OPENBRACE} filter:'where_s:NY'${CLOSEBRACE}", "OPENBRACE","{", "CLOSEBRACE","}") - , "response/numFound==1" - ); - - // test templating with params defined in the JSON itself! Do we want to keep this functionality? - client.testJQ( params("json","{params:{V1:A,V2:NY}, query:'cat_s:${V1}'}", "json","{filter:'where_s:${V2}'}") - , "response/numFound==1" - ); - + client.testJQ( + params( + "json", + "${OPENBRACE} query:'cat_s:A' ${CLOSEBRACE}", + "json", + "${OPENBRACE} filter:'where_s:NY'${CLOSEBRACE}", + "OPENBRACE", + "{", + "CLOSEBRACE", + "}"), + "response/numFound==1"); + + // test templating with params defined in the JSON itself! Do we want to keep this + // functionality? + client.testJQ( + params( + "json", + "{params:{V1:A,V2:NY}, query:'cat_s:${V1}'}", + "json", + "{filter:'where_s:${V2}'}"), + "response/numFound==1"); // // with body // - client.testJQ(params(CommonParams.STREAM_BODY, "{query:'cat_s:A'}", "stream.contentType", "application/json") - , "response/numFound==2" - ); + client.testJQ( + params( + CommonParams.STREAM_BODY, + "{query:'cat_s:A'}", + "stream.contentType", + "application/json"), + "response/numFound==2"); // test body in conjunction with query params - client.testJQ(params(CommonParams.STREAM_BODY, "{query:'cat_s:A'}", "stream.contentType", "application/json", "json.filter", "'where_s:NY'") - , "response/numFound==1" - ); + client.testJQ( + params( + CommonParams.STREAM_BODY, + "{query:'cat_s:A'}", + "stream.contentType", + "application/json", + "json.filter", + "'where_s:NY'"), + "response/numFound==1"); // test that json body in params come "after" (will overwrite) - client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', filter:'where_s:NY'}", "stream.contentType", "application/json", "json","{query:'cat_s:A'}") - , "response/numFound==1" - ); + client.testJQ( + params( + CommonParams.STREAM_BODY, + "{query:'*:*', filter:'where_s:NY'}", + "stream.contentType", + "application/json", + "json", + "{query:'cat_s:A'}"), + "response/numFound==1"); // test that json.x params come after body - client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', filter:'where_s:NY'}", "stream.contentType", "application/json", "json.query","'cat_s:A'") - , "response/numFound==1" - ); - + client.testJQ( + params( + CommonParams.STREAM_BODY, + "{query:'*:*', filter:'where_s:NY'}", + "stream.contentType", + "application/json", + "json.query", + "'cat_s:A'"), + "response/numFound==1"); // test facet with json body - client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', facet:{x:'unique(where_s)'}}", "stream.contentType", "application/json") - , "facets=={count:6,x:2}" - ); + client.testJQ( + params( + CommonParams.STREAM_BODY, + "{query:'*:*', facet:{x:'unique(where_s)'}}", + "stream.contentType", + "application/json"), + "facets=={count:6,x:2}"); // test facet with json body, insert additional facets via query parameter - client.testJQ(params(CommonParams.STREAM_BODY, "{query:'*:*', facet:{x:'unique(where_s)'}}", "stream.contentType", "application/json", "json.facet.y","{terms:{field:where_s}}", "json.facet.z","'unique(where_s)'") - , "facets=={count:6,x:2, y:{buckets:[{val:NJ,count:3},{val:NY,count:2}]}, z:2}" - ); + client.testJQ( + params( + CommonParams.STREAM_BODY, + "{query:'*:*', facet:{x:'unique(where_s)'}}", + "stream.contentType", + "application/json", + "json.facet.y", + "{terms:{field:where_s}}", + "json.facet.z", + "'unique(where_s)'"), + "facets=={count:6,x:2, y:{buckets:[{val:NJ,count:3},{val:NY,count:2}]}, z:2}"); // test debug - client.testJQ( params("json","{query:'cat_s:A'}", "json.filter","'where_s:NY'", "debug","true") - , "debug/json=={query:'cat_s:A', filter:'where_s:NY'}" - ); + client.testJQ( + params("json", "{query:'cat_s:A'}", "json.filter", "'where_s:NY'", "debug", "true"), + "debug/json=={query:'cat_s:A', filter:'where_s:NY'}"); // test query dsl - client.testJQ( params("json", "{'query':'{!lucene}id:1'}") - , "response/numFound==1" - ); - - client.testJQ( params("json", "{" + - " 'query': {" + - " 'bool' : {" + - " 'should' : [" + - " {'lucene' : {'query' : 'id:1'}}," + - " 'id:2'" + - " ]" + - " }" + - " }" + - "}") - , "response/numFound==2" - ); - - client.testJQ( params("json", "{" + - " 'query': {" + - " 'bool' : {" + - " 'should' : [" + - " {'#MYTAG' : 'id:1'}," + // tagged query (the tag should do nothing here) - " 'id:2'" + - " ]" + - " }" + - " }" + - "}") - , "response/numFound==2" - ); - - client.testJQ( params("json", "{ " + - " query : {" + - " boost : {" + - " query : {" + - " lucene : { " + - " df : cat_s, " + - " query : A " + - " }" + - " }, " + - " b : 1.5 " + - " } " + - " } " + - "}") - , "response/numFound==2" - ); - - client.testJQ( params("json","{ " + - " query : {" + - " bool : {" + - " must : {" + - " lucene : {" + - " q.op : AND," + - " df : cat_s," + - " query : A" + - " }" + - " }" + - " must_not : {'#NOT':{lucene : {query:'id: 1'}}}" + // testing tagging syntax at the same time (the tag should do nothing here) - " }" + - " }" + - "}") - , "response/numFound==1" - ); - - client.testJQ( params("json","{ " + - " query : {" + - " bool : {" + - " must : {" + - " lucene : {" + - " q.op : AND," + - " df : cat_s," + - " query : A" + - " }" + - " }" + - " must_not : [{lucene : {query:'id: 1'}}]" + - " }" + - " }" + - "}") - , "response/numFound==1" - ); + client.testJQ(params("json", "{'query':'{!lucene}id:1'}"), "response/numFound==1"); + + client.testJQ( + params( + "json", + "{" + + " 'query': {" + + " 'bool' : {" + + " 'should' : [" + + " {'lucene' : {'query' : 'id:1'}}," + + " 'id:2'" + + " ]" + + " }" + + " }" + + "}"), + "response/numFound==2"); + + client.testJQ( + params( + "json", + "{" + + " 'query': {" + + " 'bool' : {" + + " 'should' : [" + + " {'#MYTAG' : 'id:1'}," + + // tagged query (the tag should do nothing here) + " 'id:2'" + + " ]" + + " }" + + " }" + + "}"), + "response/numFound==2"); + + client.testJQ( + params( + "json", + "{ " + + " query : {" + + " boost : {" + + " query : {" + + " lucene : { " + + " df : cat_s, " + + " query : A " + + " }" + + " }, " + + " b : 1.5 " + + " } " + + " } " + + "}"), + "response/numFound==2"); + + client.testJQ( + params( + "json", + "{ " + + " query : {" + + " bool : {" + + " must : {" + + " lucene : {" + + " q.op : AND," + + " df : cat_s," + + " query : A" + + " }" + + " }" + + " must_not : {'#NOT':{lucene : {query:'id: 1'}}}" + + // testing tagging syntax at the same time (the tag should do nothing here) + " }" + + " }" + + "}"), + "response/numFound==1"); + + client.testJQ( + params( + "json", + "{ " + + " query : {" + + " bool : {" + + " must : {" + + " lucene : {" + + " q.op : AND," + + " df : cat_s," + + " query : A" + + " }" + + " }" + + " must_not : [{lucene : {query:'id: 1'}}]" + + " }" + + " }" + + "}"), + "response/numFound==1"); assertCatANot1(client, "must"); - + testFilterCachingLocally(client); - client.testJQ( params("json","{" + - " query : '*:*'," + - " filter : {" + - " collapse : {" + - " field : cat_s" + - " } " + - " } " + - "}") - , isDistrib ? "" : "response/numFound==2" - ); - - client.testJQ( params("json","{" + - " query : {" + - " edismax : {" + - " query : 'A'," + - " qf : 'cat_s'," + - " bq : {" + - " edismax : {" + - " query : 'NJ'" + - " qf : 'where_s'" + - " }" + - " }" + - " }" + - " }, " + - " fields : id" + - "}") - , "response/numFound==2", isDistrib? "" : "response/docs==[{id:'4'},{id:'1'}]" - ); - - client.testJQ( params("json","{" + - " query : {" + - " edismax : {" + - " query : 'A'," + - " qf : 'cat_s'," + - " bq : {" + - " edismax : {" + - " query : 'NY'" + - " qf : 'where_s'" + - " }" + - " }" + - " }" + - " }, " + - " fields : id" + - "}") - , "response/numFound==2", isDistrib? "" : "response/docs==[{id:'1'},{id:'4'}]" - ); - - client.testJQ( params("json","{" + - " query : {" + - " dismax : {" + - " query : 'A NJ'" + - " qf : 'cat_s^0.1 where_s^100'" + - " } " + - " }, " + - " filter : '-id:2'," + - " fields : id" + - "}") - , "response/numFound==3", isDistrib? "" : "response/docs==[{id:'4'},{id:'5'},{id:'1'}]" - ); - - client.testJQ( params("json","{" + - " query : {" + - " dismax : {" + - " query : 'A NJ'" + - " qf : ['cat_s^100', 'where_s^0.1']" + - " } " + - " }, " + - " filter : '-id:2'," + - " fields : id" + - "}") - , "response/numFound==3", isDistrib? "" : "response/docs==[{id:'4'},{id:'1'},{id:'5'}]" - ); - - // TODO: this seems like a reasonable capability that we would want to support in the future. It should be OK to make this pass. - Exception e = expectThrows(Exception.class, () -> { - client.testJQ(params("json", "{query:{'lucene':'foo_s:ignore_exception'}}")); - }); + client.testJQ( + params( + "json", + "{" + + " query : '*:*'," + + " filter : {" + + " collapse : {" + + " field : cat_s" + + " } " + + " } " + + "}"), + isDistrib ? "" : "response/numFound==2"); + + client.testJQ( + params( + "json", + "{" + + " query : {" + + " edismax : {" + + " query : 'A'," + + " qf : 'cat_s'," + + " bq : {" + + " edismax : {" + + " query : 'NJ'" + + " qf : 'where_s'" + + " }" + + " }" + + " }" + + " }, " + + " fields : id" + + "}"), + "response/numFound==2", + isDistrib ? "" : "response/docs==[{id:'4'},{id:'1'}]"); + + client.testJQ( + params( + "json", + "{" + + " query : {" + + " edismax : {" + + " query : 'A'," + + " qf : 'cat_s'," + + " bq : {" + + " edismax : {" + + " query : 'NY'" + + " qf : 'where_s'" + + " }" + + " }" + + " }" + + " }, " + + " fields : id" + + "}"), + "response/numFound==2", + isDistrib ? "" : "response/docs==[{id:'1'},{id:'4'}]"); + + client.testJQ( + params( + "json", + "{" + + " query : {" + + " dismax : {" + + " query : 'A NJ'" + + " qf : 'cat_s^0.1 where_s^100'" + + " } " + + " }, " + + " filter : '-id:2'," + + " fields : id" + + "}"), + "response/numFound==3", + isDistrib ? "" : "response/docs==[{id:'4'},{id:'5'},{id:'1'}]"); + + client.testJQ( + params( + "json", + "{" + + " query : {" + + " dismax : {" + + " query : 'A NJ'" + + " qf : ['cat_s^100', 'where_s^0.1']" + + " } " + + " }, " + + " filter : '-id:2'," + + " fields : id" + + "}"), + "response/numFound==3", + isDistrib ? "" : "response/docs==[{id:'4'},{id:'1'},{id:'5'}]"); + + // TODO: this seems like a reasonable capability that we would want to support in the future. + // It should be OK to make this pass. + Exception e = + expectThrows( + Exception.class, + () -> { + client.testJQ(params("json", "{query:{'lucene':'foo_s:ignore_exception'}}")); + }); assertThat(e.getMessage(), containsString("foo_s")); // test failure on unknown parameter - e = expectThrows(Exception.class, () -> { - client.testJQ(params("json", "{query:'cat_s:A', foobar_ignore_exception:5}"), "response/numFound==2"); - }); + e = + expectThrows( + Exception.class, + () -> { + client.testJQ( + params("json", "{query:'cat_s:A', foobar_ignore_exception:5}"), + "response/numFound==2"); + }); assertThat(e.getMessage(), containsString("foobar")); resetExceptionIgnores(); @@ -411,63 +578,84 @@ public static void doJsonRequest(Client client, boolean isDistrib) throws Except private static void doParamRefDslTest(Client client) throws Exception { // referencing in dsl //nestedqp - client.testJQ( params("json","{query: {query: {param:'ref1'}}}", "ref1","{!field f=cat_s}A") - , "response/numFound==2" - ); + client.testJQ( + params("json", "{query: {query: {param:'ref1'}}}", "ref1", "{!field f=cat_s}A"), + "response/numFound==2"); // referencing json string param - client.testJQ( params("json", random().nextBoolean() ? - "{query:{query:{param:'ref1'}}}" // nestedqp - : "{query: {query: {query:{param:'ref1'}}}}", // nestedqp, v local param - "json",random().nextBoolean() - ? "{params:{ref1:'{!field f=cat_s}A'}}" // string param - : "{queries:{ref1:{field:{f:cat_s,query:A}}}}" ) // qdsl - , "response/numFound==2" - ); - { // shortest top level ref - final ModifiableSolrParams params = params("json","{query:{param:'ref1'}}"); + client.testJQ( + params( + "json", + random().nextBoolean() + ? "{query:{query:{param:'ref1'}}}" // nestedqp + : "{query: {query: {query:{param:'ref1'}}}}", // nestedqp, v local param + "json", + random().nextBoolean() + ? "{params:{ref1:'{!field f=cat_s}A'}}" // string param + : "{queries:{ref1:{field:{f:cat_s,query:A}}}}") // qdsl + , + "response/numFound==2"); + { // shortest top level ref + final ModifiableSolrParams params = params("json", "{query:{param:'ref1'}}"); if (random().nextBoolean()) { - params.add("ref1","cat_s:A"); // either to plain string + params.add("ref1", "cat_s:A"); // either to plain string } else { - params.add("json","{queries:{ref1:{field:{f:cat_s,query:A}}}}");// or to qdsl + params.add("json", "{queries:{ref1:{field:{f:cat_s,query:A}}}}"); // or to qdsl } - client.testJQ( params, "response/numFound==2"); - } // ref in bool must - client.testJQ( params("json","{query:{bool: {must:[{param:fq1},{param:fq2}]}}}", - "json","{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", "json.fields", "id") - , "response/docs==[{id:'1'}]" - );// referencing dsl&strings from filters objs&array - client.testJQ( params("json.filter","{param:fq1}","json.filter","{param:fq2}", - "json", random().nextBoolean() ? - "{queries:{fq1:{lucene:{query:'cat_s:A'}}, fq2:{lucene:{query:'where_s:NY'}}}}" : - "{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", - "json.fields", "id", "q", "*:*") - , "response/docs==[{id:'1'}]" - ); + client.testJQ(params, "response/numFound==2"); + } // ref in bool must + client.testJQ( + params( + "json", + "{query:{bool: {must:[{param:fq1},{param:fq2}]}}}", + "json", + "{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", + "json.fields", + "id"), + "response/docs==[{id:'1'}]"); // referencing dsl&strings from filters objs&array + client.testJQ( + params( + "json.filter", + "{param:fq1}", + "json.filter", + "{param:fq2}", + "json", + random().nextBoolean() + ? "{queries:{fq1:{lucene:{query:'cat_s:A'}}, fq2:{lucene:{query:'where_s:NY'}}}}" + : "{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", + "json.fields", + "id", + "q", + "*:*"), + "response/docs==[{id:'1'}]"); } private static void testFilterCachingLocally(Client client) throws Exception { - if(client.getClientProvider()==null) { + if (client.getClientProvider() == null) { final SolrQueryRequest request = req(); try { - final CaffeineCache filterCache = (CaffeineCache) request.getSearcher().getFilterCache(); + final CaffeineCache filterCache = + (CaffeineCache) request.getSearcher().getFilterCache(); filterCache.clear(); final TermQuery catA = new TermQuery(new Term("cat_s", "A")); - assertNull("cache is empty",filterCache.get(catA)); + assertNull("cache is empty", filterCache.get(catA)); - if(random().nextBoolean()) { - if(random().nextBoolean()) { - if(random().nextBoolean()) { + if (random().nextBoolean()) { + if (random().nextBoolean()) { + if (random().nextBoolean()) { assertCatANot1(client, "must"); - }else { + } else { assertCatANot1(client, "must", "cat_s:A"); } } else { - assertCatANot1(client, "must","{!lucene q.op=AND df=cat_s "+"cache="+random().nextBoolean()+"}A" ); - } + assertCatANot1( + client, + "must", + "{!lucene q.op=AND df=cat_s " + "cache=" + random().nextBoolean() + "}A"); + } } else { assertCatANot1(client, "filter", "{!lucene q.op=AND df=cat_s cache=false}A"); } - assertNull("no cache still",filterCache.get(catA)); + assertNull("no cache still", filterCache.get(catA)); if (random().nextBoolean()) { if (random().nextBoolean()) { @@ -476,9 +664,9 @@ private static void testFilterCachingLocally(Client client) throws Exception { assertCatANot1(client, "filter"); } } else { - assertCatANot1(client, "filter","{!lucene q.op=AND df=cat_s cache=true}A"); + assertCatANot1(client, "filter", "{!lucene q.op=AND df=cat_s cache=true}A"); } - assertNotNull("got cached ",filterCache.get(catA)); + assertNotNull("got cached ", filterCache.get(catA)); } finally { request.close(); @@ -487,148 +675,176 @@ private static void testFilterCachingLocally(Client client) throws Exception { } private static void assertCatANot1(Client client, final String occur) throws Exception { - assertCatANot1(client, occur, "{!lucene q.op=AND df=cat_s}A"); + assertCatANot1(client, occur, "{!lucene q.op=AND df=cat_s}A"); } - private static void assertCatANot1(Client client, final String occur, String catAclause) throws Exception { - client.testJQ( params("json","{ " + - " query : {" + - " bool : {" + - " " + occur + " : '"+ catAclause+ "'" + - " must_not : '{!lucene v=\\'id:1\\'}'" + - " }" + - " }" + - "}") - , "response/numFound==1" - ); + private static void assertCatANot1(Client client, final String occur, String catAclause) + throws Exception { + client.testJQ( + params( + "json", + "{ " + + " query : {" + + " bool : {" + + " " + + occur + + " : '" + + catAclause + + "'" + + " must_not : '{!lucene v=\\'id:1\\'}'" + + " }" + + " }" + + "}"), + "response/numFound==1"); } public static void doJsonRequestWithTag(Client client) throws Exception { addDocs(client); try { - client.testJQ( params("json","{" + - " query : '*:*'," + - " filter : { \"RCAT\" : \"cat_s:A OR ignore_exception\" }" + // without the pound, the tag would be interpreted as a query type - "}", "json.facet", "{" + - "categories:{ type:terms, field:cat_s, domain:{excludeTags:\"RCAT\"} } " + - "}"), "facets=={ count:2, " + - " categories:{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + - "}" - ); + client.testJQ( + params( + "json", + "{" + + " query : '*:*'," + + " filter : { \"RCAT\" : \"cat_s:A OR ignore_exception\" }" + + // without the pound, the tag would be interpreted as a query type + "}", + "json.facet", + "{" + + "categories:{ type:terms, field:cat_s, domain:{excludeTags:\"RCAT\"} } " + + "}"), + "facets=={ count:2, " + + " categories:{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + + "}"); fail("no # no tag"); } catch (Exception e) { - // This is just the current mode of failure. It's fine if it fails a different way (with a 400 error) in the future. + // This is just the current mode of failure. It's fine if it fails a different way (with a + // 400 error) in the future. assertTrue(e.getMessage().contains("expect a json object")); } - final String taggedQ = "{" + - " \"#RCAT\" : " + (random().nextBoolean() ? - "{" + - " term : {" + - " f : cat_s," + - " v : A" + - " } " + - " } " - : "\"cat_s:A\"")+ - " } "; - boolean queryAndFilter = random().nextBoolean() ; - client.testJQ(params("json", "{" + - " query :" + ( queryAndFilter ? " '*:*', filter : " : "") - + (!queryAndFilter || random().nextBoolean() ? taggedQ : "["+taggedQ+"]" )+ - "}", "json.facet", "{" + - "categories:{ type:terms, field:cat_s, domain:{excludeTags:\"RCAT\"} } " + - "}"), "facets=={ count:2, " + - " categories:{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + - "}" - ); - - client.testJQ( params("json","{" + - " query : '*:*'," + - " filter : {" + - " term : {" + - " f : cat_s," + - " v : A" + - " } " + - " } " + - "}", "json.facet", "{" + - "categories:{ type:terms, field:cat_s" - +( random().nextBoolean() ? ", domain:{excludeTags:\"RCAT\"} ": " ") - + "} " + - "}"), "facets=={ count:2, " + - " categories:{ buckets:[ {val:A, count:2} ] }" + - "}" - ); - - client.testJQ( params("json","{" + - " query : '*:*'," + - " filter : {" + - " \"#RCAT\" : {" + - " term : {" + - " f : cat_s," + - " v : A" + - " } " + - " } " + - " } " + - "}", "json.facet", "{" + - "categories:{ type:terms, field:cat_s } " + - "}"), "facets=={ count:2, " + - " categories:{ buckets:[ {val:A, count:2} ] }" + - "}" - ); + final String taggedQ = + "{" + + " \"#RCAT\" : " + + (random().nextBoolean() + ? "{" + " term : {" + " f : cat_s," + " v : A" + " } " + " } " + : "\"cat_s:A\"") + + " } "; + boolean queryAndFilter = random().nextBoolean(); + client.testJQ( + params( + "json", + "{" + + " query :" + + (queryAndFilter ? " '*:*', filter : " : "") + + (!queryAndFilter || random().nextBoolean() ? taggedQ : "[" + taggedQ + "]") + + "}", + "json.facet", + "{" + "categories:{ type:terms, field:cat_s, domain:{excludeTags:\"RCAT\"} } " + "}"), + "facets=={ count:2, " + + " categories:{ buckets:[ {val:B, count:3}, {val:A, count:2} ] }" + + "}"); + + client.testJQ( + params( + "json", + "{" + + " query : '*:*'," + + " filter : {" + + " term : {" + + " f : cat_s," + + " v : A" + + " } " + + " } " + + "}", + "json.facet", + "{" + + "categories:{ type:terms, field:cat_s" + + (random().nextBoolean() ? ", domain:{excludeTags:\"RCAT\"} " : " ") + + "} " + + "}"), + "facets=={ count:2, " + " categories:{ buckets:[ {val:A, count:2} ] }" + "}"); + + client.testJQ( + params( + "json", + "{" + + " query : '*:*'," + + " filter : {" + + " \"#RCAT\" : {" + + " term : {" + + " f : cat_s," + + " v : A" + + " } " + + " } " + + " } " + + "}", + "json.facet", + "{" + "categories:{ type:terms, field:cat_s } " + "}"), + "facets=={ count:2, " + " categories:{ buckets:[ {val:A, count:2} ] }" + "}"); boolean multiTag = random().nextBoolean(); - client.testJQ(params("json", "{" + - " query : '*:*'," + - " filter : [" + - "{ \"#RCAT"+(multiTag ? ",RCATSECONDTAG":"") + "\" : \"cat_s:A\" }," + - "{ \"#RWHERE\" : {" + - " term : {" + - " f : where_s," + - " v : NY" + - " } " + - " }" + - "}]}" - , "json.facet", "{" + - "categories:{ type:terms, field:cat_s, domain:{excludeTags:\"RCAT\"} } " + - "countries:{ type:terms, field:where_s, domain:{excludeTags:\"RWHERE\"} } " + - "ids:{ type:terms, field:id, domain:{excludeTags:[\""+ (multiTag ? "RCATSECONDTAG":"RCAT")+ "\", \"RWHERE\"]} } " + - "}"), "facets==" + "{\n" + - " \"count\":1,\n" + - " \"categories\":{\n" + - " \"buckets\":[{\n" + - " \"val\":\"A\",\n" + - " \"count\":1},\n" + - " {\n" + - " \"val\":\"B\",\n" + - " \"count\":1}]},\n" + - " \"countries\":{\n" + - " \"buckets\":[{\n" + - " \"val\":\"NJ\",\n" + - " \"count\":1},\n" + - " {\n" + - " \"val\":\"NY\",\n" + - " \"count\":1}]},\n" + - " \"ids\":{\n" + - " \"buckets\":[{\n" + - " \"val\":\"1\",\n" + - " \"count\":1},\n" + - " {\n" + - " \"val\":\"2\",\n" + - " \"count\":1},\n" + - " {\n" + - " \"val\":\"3\",\n" + - " \"count\":1},\n" + - " {\n" + - " \"val\":\"4\",\n" + - " \"count\":1},\n" + - " {\n" + - " \"val\":\"5\",\n" + - " \"count\":1},\n" + - " {\n" + - " \"val\":\"6\",\n" + - " \"count\":1}]}}}" - ); + client.testJQ( + params( + "json", + "{" + + " query : '*:*'," + + " filter : [" + + "{ \"#RCAT" + + (multiTag ? ",RCATSECONDTAG" : "") + + "\" : \"cat_s:A\" }," + + "{ \"#RWHERE\" : {" + + " term : {" + + " f : where_s," + + " v : NY" + + " } " + + " }" + + "}]}", + "json.facet", + "{" + + "categories:{ type:terms, field:cat_s, domain:{excludeTags:\"RCAT\"} } " + + "countries:{ type:terms, field:where_s, domain:{excludeTags:\"RWHERE\"} } " + + "ids:{ type:terms, field:id, domain:{excludeTags:[\"" + + (multiTag ? "RCATSECONDTAG" : "RCAT") + + "\", \"RWHERE\"]} } " + + "}"), + "facets==" + + "{\n" + + " \"count\":1,\n" + + " \"categories\":{\n" + + " \"buckets\":[{\n" + + " \"val\":\"A\",\n" + + " \"count\":1},\n" + + " {\n" + + " \"val\":\"B\",\n" + + " \"count\":1}]},\n" + + " \"countries\":{\n" + + " \"buckets\":[{\n" + + " \"val\":\"NJ\",\n" + + " \"count\":1},\n" + + " {\n" + + " \"val\":\"NY\",\n" + + " \"count\":1}]},\n" + + " \"ids\":{\n" + + " \"buckets\":[{\n" + + " \"val\":\"1\",\n" + + " \"count\":1},\n" + + " {\n" + + " \"val\":\"2\",\n" + + " \"count\":1},\n" + + " {\n" + + " \"val\":\"3\",\n" + + " \"count\":1},\n" + + " {\n" + + " \"val\":\"4\",\n" + + " \"count\":1},\n" + + " {\n" + + " \"val\":\"5\",\n" + + " \"count\":1},\n" + + " {\n" + + " \"val\":\"6\",\n" + + " \"count\":1}]}}}"); } private static void addDocs(Client client) throws Exception { @@ -643,5 +859,4 @@ private static void addDocs(Client client) throws Exception { client.add(sdoc("id", "6", "cat_s", "B", "where_s", "NY"), null); client.commit(); } - } diff --git a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java index 3de80490779..3e55908d627 100644 --- a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java +++ b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -34,13 +33,11 @@ import org.junit.Test; public class CloudMLTQParserTest extends SolrCloudTestCase { - + @Before public void setupCluster() throws Exception { - configureCluster(2) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); - + configureCluster(2).addConfig("conf", configset("cloud-dynamic")).configure(); + final CloudSolrClient client = cluster.getSolrClient(); CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1) @@ -49,8 +46,8 @@ public void setupCluster() throws Exception { cluster.waitForActiveCollection(COLLECTION, 2, 2); String id = "id"; - String FIELD1 = "lowerfilt_u" ; - String FIELD2 = "lowerfilt1_u" ; + String FIELD1 = "lowerfilt_u"; + String FIELD2 = "lowerfilt1_u"; new UpdateRequest() .add(sdoc(id, "1", FIELD1, "toyota")) @@ -59,17 +56,31 @@ public void setupCluster() throws Exception { .add(sdoc(id, "4", FIELD1, "ford")) .add(sdoc(id, "5", FIELD1, "ferrari")) .add(sdoc(id, "6", FIELD1, "jaguar")) - .add(sdoc(id, "7", FIELD1, "mclaren moon or the moon and moon moon shine and the moon but moon was good foxes too")) + .add( + sdoc( + id, + "7", + FIELD1, + "mclaren moon or the moon and moon moon shine and the moon but moon was good foxes too")) .add(sdoc(id, "8", FIELD1, "sonata")) - .add(sdoc(id, "9", FIELD1, "The quick red fox jumped over the lazy big and large brown dogs.")) + .add( + sdoc( + id, + "9", + FIELD1, + "The quick red fox jumped over the lazy big and large brown dogs.")) .add(sdoc(id, "10", FIELD1, "blue")) .add(sdoc(id, "12", FIELD1, "glue")) .add(sdoc(id, "13", FIELD1, "The quote red fox jumped over the lazy brown dogs.")) .add(sdoc(id, "14", FIELD1, "The quote red fox jumped over the lazy brown dogs.")) .add(sdoc(id, "15", FIELD1, "The fat red fox jumped over the lazy brown dogs.")) .add(sdoc(id, "16", FIELD1, "The slim red fox jumped over the lazy brown dogs.")) - .add(sdoc(id, "17", FIELD1, - "The quote red fox jumped moon over the lazy brown dogs moon. Of course moon. Foxes and moon come back to the foxes and moon")) + .add( + sdoc( + id, + "17", + FIELD1, + "The quote red fox jumped moon over the lazy brown dogs moon. Of course moon. Foxes and moon come back to the foxes and moon")) .add(sdoc(id, "18", FIELD1, "The quote red fox jumped over the lazy brown dogs.")) .add(sdoc(id, "19", FIELD1, "The hose red fox jumped over the lazy brown dogs.")) .add(sdoc(id, "20", FIELD1, "The quote red fox jumped over the lazy brown dogs.")) @@ -82,12 +93,33 @@ public void setupCluster() throws Exception { .add(sdoc(id, "27", FIELD1, "bmw usa 535i")) .add(sdoc(id, "28", FIELD1, "bmw 750Li")) .add(sdoc(id, "29", FIELD1, "bmw usa", FIELD2, "red green blue")) - .add(sdoc(id, "30", FIELD1, "The quote red fox jumped over the lazy brown dogs.", FIELD2, "red green yellow")) - .add(sdoc(id, "31", FIELD1, "The fat red fox jumped over the lazy brown dogs.", FIELD2, "green blue yellow")) - .add(sdoc(id, "32", FIELD1, "The slim red fox jumped over the lazy brown dogs.", FIELD2, "yellow white black")) + .add( + sdoc( + id, + "30", + FIELD1, + "The quote red fox jumped over the lazy brown dogs.", + FIELD2, + "red green yellow")) + .add( + sdoc( + id, + "31", + FIELD1, + "The fat red fox jumped over the lazy brown dogs.", + FIELD2, + "green blue yellow")) + .add( + sdoc( + id, + "32", + FIELD1, + "The slim red fox jumped over the lazy brown dogs.", + FIELD2, + "yellow white black")) .commit(client, COLLECTION); } - + @After public void cleanCluster() throws Exception { if (null != cluster) { @@ -100,96 +132,122 @@ public void cleanCluster() throws Exception { @Test public void testMLTQParser() throws Exception { - QueryResponse queryResponse = cluster.getSolrClient() - .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u}17").setShowDebugInfo(true)); + QueryResponse queryResponse = + cluster + .getSolrClient() + .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u}17").setShowDebugInfo(true)); SolrDocumentList solrDocuments = queryResponse.getResults(); - int[] expectedIds = new int[]{7, 9, 13, 14, 15, 16, 20, 22, 24, 32}; + int[] expectedIds = new int[] {7, 9, 13, 14, 15, 16, 20, 22, 24, 32}; int[] actualIds = new int[10]; int i = 0; for (SolrDocument solrDocument : solrDocuments) { actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id"))); } - + Arrays.sort(actualIds); Arrays.sort(expectedIds); assertArrayEquals(expectedIds, actualIds); - } @Test public void testBoost() throws Exception { - QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u boost=true}17")); + QueryResponse queryResponse = + cluster + .getSolrClient() + .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u boost=true}17")); SolrDocumentList solrDocuments = queryResponse.getResults(); - int[] expectedIds = new int[]{7, 9, 13, 14, 15, 16, 20, 22, 24, 32}; + int[] expectedIds = new int[] {7, 9, 13, 14, 15, 16, 20, 22, 24, 32}; int[] actualIds = new int[solrDocuments.size()]; int i = 0; for (SolrDocument solrDocument : solrDocuments) { actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id"))); } - + Arrays.sort(actualIds); Arrays.sort(expectedIds); assertArrayEquals(expectedIds, actualIds); - queryResponse = cluster.getSolrClient().query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u^10,lowerfilt1_u^1000 boost=false mintf=0 mindf=0}30")); + queryResponse = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery( + "{!mlt qf=lowerfilt_u^10,lowerfilt1_u^1000 boost=false mintf=0 mindf=0}30")); solrDocuments = queryResponse.getResults(); - expectedIds = new int[]{31, 18, 23, 13, 14, 20, 22, 32, 19, 21}; + expectedIds = new int[] {31, 18, 23, 13, 14, 20, 22, 32, 19, 21}; actualIds = new int[solrDocuments.size()]; i = 0; for (SolrDocument solrDocument : solrDocuments) { actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id"))); } - + Arrays.sort(actualIds); Arrays.sort(expectedIds); System.out.println("DEBUG ACTUAL IDS 1: " + Arrays.toString(actualIds)); assertArrayEquals(expectedIds, actualIds); - queryResponse = cluster.getSolrClient().query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u^10,lowerfilt1_u^1000 boost=true mintf=0 mindf=0}30")); + queryResponse = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery( + "{!mlt qf=lowerfilt_u^10,lowerfilt1_u^1000 boost=true mintf=0 mindf=0}30")); solrDocuments = queryResponse.getResults(); - expectedIds = new int[]{29, 31, 32, 18, 23, 13, 14, 20, 22, 19}; + expectedIds = new int[] {29, 31, 32, 18, 23, 13, 14, 20, 22, 19}; actualIds = new int[solrDocuments.size()]; i = 0; for (SolrDocument solrDocument : solrDocuments) { actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id"))); } - + Arrays.sort(actualIds); Arrays.sort(expectedIds); System.out.println("DEBUG ACTUAL IDS 2: " + Arrays.toString(actualIds)); - assertArrayEquals(Arrays.toString(expectedIds) + " " + Arrays.toString(actualIds), expectedIds, actualIds); + assertArrayEquals( + Arrays.toString(expectedIds) + " " + Arrays.toString(actualIds), expectedIds, actualIds); } @Test @SuppressWarnings({"unchecked"}) public void testMinDF() throws Exception { - QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("{!mlt qf=lowerfilt_u mindf=0 mintf=1}3").setShowDebugInfo(true)); + QueryResponse queryResponse = + cluster + .getSolrClient() + .query( + COLLECTION, + new SolrQuery("{!mlt qf=lowerfilt_u mindf=0 mintf=1}3").setShowDebugInfo(true)); SolrDocumentList solrDocuments = queryResponse.getResults(); - int[] expectedIds = new int[]{29, 27, 26, 28}; + int[] expectedIds = new int[] {29, 27, 26, 28}; int[] actualIds = new int[solrDocuments.size()]; int i = 0; for (SolrDocument solrDocument : solrDocuments) { actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id"))); } - + Arrays.sort(actualIds); Arrays.sort(expectedIds); - assertArrayEquals(Arrays.toString(expectedIds) + " " + Arrays.toString(actualIds), expectedIds, actualIds); + assertArrayEquals( + Arrays.toString(expectedIds) + " " + Arrays.toString(actualIds), expectedIds, actualIds); - String[] expectedQueryStrings = new String[]{ - "+(lowerfilt_u:bmw lowerfilt_u:usa) -id:3", - "+(lowerfilt_u:usa lowerfilt_u:bmw) -id:3"}; + String[] expectedQueryStrings = + new String[] { + "+(lowerfilt_u:bmw lowerfilt_u:usa) -id:3", "+(lowerfilt_u:usa lowerfilt_u:bmw) -id:3" + }; String[] actualParsedQueries; if (queryResponse.getDebugMap().get("parsedquery") instanceof String) { String parsedQueryString = (String) queryResponse.getDebugMap().get("parsedquery"); - assertTrue(parsedQueryString.equals(expectedQueryStrings[0]) || parsedQueryString.equals(expectedQueryStrings[1])); + assertTrue( + parsedQueryString.equals(expectedQueryStrings[0]) + || parsedQueryString.equals(expectedQueryStrings[1])); } else { - actualParsedQueries = ((ArrayList) queryResponse - .getDebugMap().get("parsedquery")).toArray(new String[0]); + actualParsedQueries = + ((ArrayList) queryResponse.getDebugMap().get("parsedquery")) + .toArray(new String[0]); Arrays.sort(actualParsedQueries); assertArrayEquals(expectedQueryStrings, actualParsedQueries); } @@ -198,10 +256,13 @@ public void testMinDF() throws Exception { @Test public void testMultipleFields() throws Exception { - QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("{!mlt qf=lowerfilt_u,lowerfilt1_u mindf=0 mintf=1}26")); + QueryResponse queryResponse = + cluster + .getSolrClient() + .query( + COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u,lowerfilt1_u mindf=0 mintf=1}26")); SolrDocumentList solrDocuments = queryResponse.getResults(); - int[] expectedIds = new int[]{3, 29, 27, 28}; + int[] expectedIds = new int[] {3, 29, 27, 28}; int[] actualIds = new int[solrDocuments.size()]; int i = 0; for (SolrDocument solrDocument : solrDocuments) { @@ -210,67 +271,79 @@ public void testMultipleFields() throws Exception { Arrays.sort(actualIds); Arrays.sort(expectedIds); - assertArrayEquals(Arrays.toString(expectedIds) + " " + Arrays.toString(actualIds), expectedIds, actualIds); - + assertArrayEquals( + Arrays.toString(expectedIds) + " " + Arrays.toString(actualIds), expectedIds, actualIds); } @Test public void testHighDFValue() throws Exception { // Test out a high value of df and make sure nothing matches. - QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("{!mlt qf=lowerfilt_u mindf=20 mintf=1}3")); + QueryResponse queryResponse = + cluster + .getSolrClient() + .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u mindf=20 mintf=1}3")); SolrDocumentList solrDocuments = queryResponse.getResults(); - assertEquals("Expected to match 0 documents with a mindf of 20 but found more", solrDocuments.size(), 0); - + assertEquals( + "Expected to match 0 documents with a mindf of 20 but found more", solrDocuments.size(), 0); } @Test public void testHighWLValue() throws Exception { // Test out a high value of wl and make sure nothing matches. - QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("{!mlt qf=lowerfilt_u minwl=4 mintf=1}3")); + QueryResponse queryResponse = + cluster + .getSolrClient() + .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u minwl=4 mintf=1}3")); SolrDocumentList solrDocuments = queryResponse.getResults(); - assertEquals("Expected to match 0 documents with a minwl of 4 but found more", solrDocuments.size(), 0); - + assertEquals( + "Expected to match 0 documents with a minwl of 4 but found more", solrDocuments.size(), 0); } @Test public void testLowMinWLValue() throws Exception { // Test out a low enough value of minwl and make sure we get the expected matches. - QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, - new SolrQuery("{!mlt qf=lowerfilt_u minwl=3 mintf=1}3")); + QueryResponse queryResponse = + cluster + .getSolrClient() + .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u minwl=3 mintf=1}3")); SolrDocumentList solrDocuments = queryResponse.getResults(); - assertEquals("Expected to match 4 documents with a minwl of 3 but found more", 4, solrDocuments.size()); - + assertEquals( + "Expected to match 4 documents with a minwl of 3 but found more", 4, solrDocuments.size()); } @Test public void testUnstoredAndUnanalyzedFieldsAreIgnored() throws Exception { - // Assert that {!mlt}id does not throw an exception i.e. implicitly, only fields that are stored + have explicit - // analyzer are used for MLT Query construction. - QueryResponse queryResponse = cluster.getSolrClient().query(COLLECTION, new SolrQuery("{!mlt}20")); + // Assert that {!mlt}id does not throw an exception i.e. implicitly, only fields that are stored + // + have explicit analyzer are used for MLT Query construction. + QueryResponse queryResponse = + cluster.getSolrClient().query(COLLECTION, new SolrQuery("{!mlt}20")); SolrDocumentList solrDocuments = queryResponse.getResults(); int[] actualIds = new int[solrDocuments.size()]; - int[] expectedIds = new int[]{13, 14, 15, 16, 22, 24, 32, 18, 19, 21}; + int[] expectedIds = new int[] {13, 14, 15, 16, 22, 24, 32, 18, 19, 21}; int i = 0; StringBuilder sb = new StringBuilder(); for (SolrDocument solrDocument : solrDocuments) { - actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id"))); - sb.append(actualIds[i-1]).append(", "); + actualIds[i++] = Integer.parseInt(String.valueOf(solrDocument.getFieldValue("id"))); + sb.append(actualIds[i - 1]).append(", "); } - + Arrays.sort(actualIds); Arrays.sort(expectedIds); assertArrayEquals(expectedIds, actualIds); } public void testInvalidSourceDocument() throws IOException { - SolrException e = expectThrows(SolrException.class, () -> { - cluster.getSolrClient().query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u}999999")); - }); + SolrException e = + expectThrows( + SolrException.class, + () -> { + cluster + .getSolrClient() + .query(COLLECTION, new SolrQuery("{!mlt qf=lowerfilt_u}999999")); + }); } } diff --git a/solr/core/src/test/org/apache/solr/search/mlt/SimpleMLTQParserTest.java b/solr/core/src/test/org/apache/solr/search/mlt/SimpleMLTQParserTest.java index 951f293ba18..b88ee98cf84 100644 --- a/solr/core/src/test/org/apache/solr/search/mlt/SimpleMLTQParserTest.java +++ b/solr/core/src/test/org/apache/solr/search/mlt/SimpleMLTQParserTest.java @@ -33,8 +33,8 @@ public static void moreLikeThisBeforeClass() throws Exception { @Test public void doTest() throws Exception { String id = "id"; - String FIELD1 = "lowerfilt" ; - String FIELD2 = "lowerfilt1" ; + String FIELD1 = "lowerfilt"; + String FIELD2 = "lowerfilt1"; delQ("*:*"); assertU(adoc(id, "1", FIELD1, "toyota")); assertU(adoc(id, "2", FIELD1, "chevrolet")); @@ -42,19 +42,33 @@ public void doTest() throws Exception { assertU(adoc(id, "4", FIELD1, "ford")); assertU(adoc(id, "5", FIELD1, "ferrari")); assertU(adoc(id, "6", FIELD1, "jaguar")); - assertU(adoc(id, "7", FIELD1, "mclaren moon or the moon and moon moon shine " + - "and the moon but moon was good foxes too")); + assertU( + adoc( + id, + "7", + FIELD1, + "mclaren moon or the moon and moon moon shine " + + "and the moon but moon was good foxes too")); assertU(adoc(id, "8", FIELD1, "sonata")); - assertU(adoc(id, "9", FIELD1, "The quick red fox jumped over the lazy big " + - "and large brown dogs.")); + assertU( + adoc( + id, + "9", + FIELD1, + "The quick red fox jumped over the lazy big " + "and large brown dogs.")); assertU(adoc(id, "10", FIELD1, "blue")); assertU(adoc(id, "12", FIELD1, "glue")); assertU(adoc(id, "13", FIELD1, "The quote red fox jumped over the lazy brown dogs.")); assertU(adoc(id, "14", FIELD1, "The quote red fox jumped over the lazy brown dogs.")); assertU(adoc(id, "15", FIELD1, "The fat red fox jumped over the lazy brown dogs.")); assertU(adoc(id, "16", FIELD1, "The slim red fox jumped over the lazy brown dogs.")); - assertU(adoc(id, "17", FIELD1, "The quote red fox jumped moon over the lazy " + - "brown dogs moon. Of course moon. Foxes and moon come back to the foxes and moon")); + assertU( + adoc( + id, + "17", + FIELD1, + "The quote red fox jumped moon over the lazy " + + "brown dogs moon. Of course moon. Foxes and moon come back to the foxes and moon")); assertU(adoc(id, "18", FIELD1, "The quote red fox jumped over the lazy brown dogs.")); assertU(adoc(id, "19", FIELD1, "The hose red fox jumped over the lazy brown dogs.")); assertU(adoc(id, "20", FIELD1, "The quote red fox jumped over the lazy brown dogs.")); @@ -66,21 +80,39 @@ public void doTest() throws Exception { assertU(adoc(id, "26", FIELD1, "bmw usa 328i")); assertU(adoc(id, "27", FIELD1, "bmw usa 535i")); assertU(adoc(id, "28", FIELD1, "bmw 750Li")); - assertU(adoc(id, "29", FIELD1, "bmw usa", - FIELD2, "red green blue")); - assertU(adoc(id, "30", FIELD1, "The quote red fox jumped over the lazy brown dogs.", - FIELD2, "red green yellow")); - assertU(adoc(id, "31", FIELD1, "The fat red fox jumped over the lazy brown dogs.", - FIELD2, "green blue yellow")); - assertU(adoc(id, "32", FIELD1, "The slim red fox jumped over the lazy brown dogs.", - FIELD2, "yellow white black")); + assertU(adoc(id, "29", FIELD1, "bmw usa", FIELD2, "red green blue")); + assertU( + adoc( + id, + "30", + FIELD1, + "The quote red fox jumped over the lazy brown dogs.", + FIELD2, + "red green yellow")); + assertU( + adoc( + id, + "31", + FIELD1, + "The fat red fox jumped over the lazy brown dogs.", + FIELD2, + "green blue yellow")); + assertU( + adoc( + id, + "32", + FIELD1, + "The slim red fox jumped over the lazy brown dogs.", + FIELD2, + "yellow white black")); assertU(commit()); // for score tiebreaker, use doc ID order final SolrParams sortParams = params("sort", "score desc, id asc"); - - assertQ(req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt}17"), + + assertQ( + req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt}17"), "//result/doc[1]/str[@name='id'][.='13']", "//result/doc[2]/str[@name='id'][.='14']", "//result/doc[3]/str[@name='id'][.='15']", @@ -90,10 +122,10 @@ public void doTest() throws Exception { "//result/doc[7]/str[@name='id'][.='20']", "//result/doc[8]/str[@name='id'][.='21']", "//result/doc[9]/str[@name='id'][.='22']", - "//result/doc[10]/str[@name='id'][.='23']" - ); + "//result/doc[10]/str[@name='id'][.='23']"); - assertQ(req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt boost=true}17"), + assertQ( + req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt boost=true}17"), "//result/doc[1]/str[@name='id'][.='13']", "//result/doc[2]/str[@name='id'][.='14']", "//result/doc[3]/str[@name='id'][.='15']", @@ -103,10 +135,13 @@ public void doTest() throws Exception { "//result/doc[7]/str[@name='id'][.='20']", "//result/doc[8]/str[@name='id'][.='21']", "//result/doc[9]/str[@name='id'][.='22']", - "//result/doc[10]/str[@name='id'][.='23']" - ); + "//result/doc[10]/str[@name='id'][.='23']"); - assertQ(req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt,lowerfilt1^1000 boost=false mintf=0 mindf=0}30"), + assertQ( + req( + sortParams, + CommonParams.Q, + "{!mlt qf=lowerfilt,lowerfilt1^1000 boost=false mintf=0 mindf=0}30"), "//result/doc[1]/str[@name='id'][.='31']", "//result/doc[2]/str[@name='id'][.='13']", "//result/doc[3]/str[@name='id'][.='14']", @@ -116,10 +151,13 @@ public void doTest() throws Exception { "//result/doc[7]/str[@name='id'][.='23']", "//result/doc[8]/str[@name='id'][.='32']", "//result/doc[9]/str[@name='id'][.='15']", - "//result/doc[10]/str[@name='id'][.='16']" - ); + "//result/doc[10]/str[@name='id'][.='16']"); - assertQ(req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt,lowerfilt1^1000 boost=true mintf=0 mindf=0}30"), + assertQ( + req( + sortParams, + CommonParams.Q, + "{!mlt qf=lowerfilt,lowerfilt1^1000 boost=true mintf=0 mindf=0}30"), "//result/doc[1]/str[@name='id'][.='29']", "//result/doc[2]/str[@name='id'][.='31']", "//result/doc[3]/str[@name='id'][.='32']", @@ -129,27 +167,25 @@ public void doTest() throws Exception { "//result/doc[7]/str[@name='id'][.='20']", "//result/doc[8]/str[@name='id'][.='22']", "//result/doc[9]/str[@name='id'][.='23']", - "//result/doc[10]/str[@name='id'][.='15']" - ); + "//result/doc[10]/str[@name='id'][.='15']"); - assertQ(req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt mindf=0 mintf=1}26"), + assertQ( + req(sortParams, CommonParams.Q, "{!mlt qf=lowerfilt mindf=0 mintf=1}26"), "//result/doc[1]/str[@name='id'][.='29']", "//result/doc[2]/str[@name='id'][.='27']", - "//result/doc[3]/str[@name='id'][.='28']" - ); + "//result/doc[3]/str[@name='id'][.='28']"); - assertQ(req(CommonParams.Q, "{!mlt qf=lowerfilt mindf=10 mintf=1}26"), - "//result[@numFound='0']" - ); + assertQ( + req(CommonParams.Q, "{!mlt qf=lowerfilt mindf=10 mintf=1}26"), "//result[@numFound='0']"); - assertQ(req(CommonParams.Q, "{!mlt qf=lowerfilt minwl=3 mintf=1 mindf=1}26"), - "//result[@numFound='3']" - ); + assertQ( + req(CommonParams.Q, "{!mlt qf=lowerfilt minwl=3 mintf=1 mindf=1}26"), + "//result[@numFound='3']"); - assertQ(req(CommonParams.Q, "{!mlt qf=lowerfilt minwl=4 mintf=1 mindf=1}26", - CommonParams.DEBUG, "true"), - "//result[@numFound='0']" - ); + assertQ( + req( + CommonParams.Q, "{!mlt qf=lowerfilt minwl=4 mintf=1 mindf=1}26", + CommonParams.DEBUG, "true"), + "//result[@numFound='0']"); } - } diff --git a/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java b/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java index 8d1f4d9ca07..40c052d9f35 100644 --- a/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java +++ b/solr/core/src/test/org/apache/solr/search/neural/KnnQParserTest.java @@ -16,6 +16,11 @@ */ package org.apache.solr.search.neural; +import static org.apache.solr.search.neural.KnnQParser.DEFAULT_TOP_K; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -24,281 +29,335 @@ import org.junit.Before; import org.junit.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import static org.apache.solr.search.neural.KnnQParser.DEFAULT_TOP_K; - public class KnnQParserTest extends SolrTestCaseJ4 { - String IDField = "id"; - String vectorField = "vector"; - String vectorField2 = "vector2"; - - @Before - public void prepareIndex() throws Exception { - /* vectorDimension="4" similarityFunction="cosine" */ - initCore("solrconfig-basic.xml", "schema-densevector.xml"); - - List docsToIndex = this.prepareDocs(); - for(SolrInputDocument doc:docsToIndex){ - assertU(adoc(doc)); - } - - assertU(commit()); + String IDField = "id"; + String vectorField = "vector"; + String vectorField2 = "vector2"; + + @Before + public void prepareIndex() throws Exception { + /* vectorDimension="4" similarityFunction="cosine" */ + initCore("solrconfig-basic.xml", "schema-densevector.xml"); + + List docsToIndex = this.prepareDocs(); + for (SolrInputDocument doc : docsToIndex) { + assertU(adoc(doc)); } - private List prepareDocs() { - int docsCount = 13; - List docs = new ArrayList<>(docsCount); - for (int i = 1; i < docsCount + 1; i++) { - SolrInputDocument doc = new SolrInputDocument(); - doc.addField(IDField, i); - docs.add(doc); - } - - docs.get(0).addField(vectorField, Arrays.asList(1f, 2f, 3f, 4f)); // cosine distance vector1= 1.0 - docs.get(1).addField(vectorField, Arrays.asList(1.5f, 2.5f, 3.5f, 4.5f)); // cosine distance vector1= 0.998 - docs.get(2).addField(vectorField, Arrays.asList(7.5f, 15.5f, 17.5f, 22.5f)); // cosine distance vector1= 0.992 - docs.get(3).addField(vectorField, Arrays.asList(1.4f, 2.4f, 3.4f, 4.4f)); // cosine distance vector1= 0.999 - docs.get(4).addField(vectorField, Arrays.asList(30f, 22f, 35f, 20f)); // cosine distance vector1= 0.862 - docs.get(5).addField(vectorField, Arrays.asList(40f, 1f, 1f, 200f)); // cosine distance vector1= 0.756 - docs.get(6).addField(vectorField, Arrays.asList(5f, 10f, 20f, 40f)); // cosine distance vector1= 0.970 - docs.get(7).addField(vectorField, Arrays.asList(120f, 60f, 30f, 15f)); // cosine distance vector1= 0.515 - docs.get(8).addField(vectorField, Arrays.asList(200f, 50f, 100f, 25f)); // cosine distance vector1= 0.554 - docs.get(9).addField(vectorField, Arrays.asList(1.8f, 2.5f, 3.7f, 4.9f)); // cosine distance vector1= 0.997 - - docs.get(10).addField(vectorField2, Arrays.asList(1f, 2f, 3f, 4f)); // cosine distance vector2= 1 - docs.get(11).addField(vectorField2, Arrays.asList(7.5f, 15.5f, 17.5f, 22.5f)); // cosine distance vector2= 0.992 - docs.get(12).addField(vectorField2, Arrays.asList(1.5f, 2.5f, 3.5f, 4.5f)); // cosine distance vector2= 0.998 - - return docs; - } - - @After - public void cleanUp() throws Exception { - clearIndex(); - deleteCore(); - } + assertU(commit()); + } - @Test - public void incorrectTopK_shouldThrowException() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQEx("String topK should throw Exception", - "For input string: \"string\"", - req(CommonParams.Q, "{!knn f=vector topK=string}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - - assertQEx("Double topK should throw Exception", - "For input string: \"4.5\"", - req(CommonParams.Q, "{!knn f=vector topK=4.5}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); + private List prepareDocs() { + int docsCount = 13; + List docs = new ArrayList<>(docsCount); + for (int i = 1; i < docsCount + 1; i++) { + SolrInputDocument doc = new SolrInputDocument(); + doc.addField(IDField, i); + docs.add(doc); } - @Test - public void topKMissing_shouldReturnDefaultTopK() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQ(req(CommonParams.Q, "{!knn f=vector}" + vectorToSearch, "fl", "id"), - "//result[@numFound='" + DEFAULT_TOP_K + "']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='4']", - "//result/doc[3]/str[@name='id'][.='2']", - "//result/doc[4]/str[@name='id'][.='10']", - "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='7']", - "//result/doc[7]/str[@name='id'][.='5']", - "//result/doc[8]/str[@name='id'][.='6']", - "//result/doc[9]/str[@name='id'][.='9']", - "//result/doc[10]/str[@name='id'][.='8']" - ); - } - - @Test - public void topK_shouldReturnOnlyTopKResults() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQ(req(CommonParams.Q, "{!knn f=vector topK=5}" + vectorToSearch, "fl", "id"), - "//result[@numFound='5']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='4']", - "//result/doc[3]/str[@name='id'][.='2']", - "//result/doc[4]/str[@name='id'][.='10']", - "//result/doc[5]/str[@name='id'][.='3']" - ); - - assertQ(req(CommonParams.Q, "{!knn f=vector topK=3}" + vectorToSearch, "fl", "id"), - "//result[@numFound='3']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='4']", - "//result/doc[3]/str[@name='id'][.='2']" - ); - } - - @Test - public void incorrectVectorFieldType_shouldThrowException() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQEx("Incorrect vector field type should throw Exception", - "only DenseVectorField is compatible with Knn Query Parser", - req(CommonParams.Q, "{!knn f=id topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - } - - @Test - public void undefinedVectorField_shouldThrowException() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQEx("Undefined vector field should throw Exception", - "undefined field: \"notExistent\"", - req(CommonParams.Q, "{!knn f=notExistent topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - } - - @Test - public void missingVectorField_shouldThrowException() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQEx("missing vector field should throw Exception", - "the Dense Vector field 'f' is missing", - req(CommonParams.Q, "{!knn topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - } - - @Test - public void correctVectorField_shouldSearchOnThatField() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQ(req(CommonParams.Q, "{!knn f=vector2 topK=5}" + vectorToSearch, "fl", "id"), - "//result[@numFound='3']", - "//result/doc[1]/str[@name='id'][.='11']", - "//result/doc[2]/str[@name='id'][.='13']", - "//result/doc[3]/str[@name='id'][.='12']" - ); - } - - @Test - public void missingVectorToSearch_shouldThrowException() { - assertQEx("missing vector to search should throw Exception", - "the Dense Vector value 'v' to search is missing", - req(CommonParams.Q, "{!knn f=vector topK=10}", "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - } - - @Test - public void incorrectVectorToSearchDimension_shouldThrowException() { - String vectorToSearch = "[2.0, 4.4, 3.]"; - assertQEx("missing vector to search should throw Exception", - "incorrect vector dimension. The vector value has size 3 while it is expected a vector with size 4", - req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - - vectorToSearch = "[2.0, 4.4,,]"; - assertQEx("incorrect vector to search should throw Exception", - "incorrect vector dimension. The vector value has size 2 while it is expected a vector with size 4", - req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - } - - @Test - public void incorrectVectorToSearch_shouldThrowException() { - String vectorToSearch = "2.0, 4.4, 3.5, 6.4"; - assertQEx("incorrect vector to search should throw Exception", - "incorrect vector format. The expected format is:'[f1,f2..f3]' where each element f is a float", - req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - - vectorToSearch = "[2.0, 4.4, 3.5, 6.4"; - assertQEx("incorrect vector to search should throw Exception", - "incorrect vector format. The expected format is:'[f1,f2..f3]' where each element f is a float", - req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - - vectorToSearch = "2.0, 4.4, 3.5, 6.4]"; - assertQEx("incorrect vector to search should throw Exception", - "incorrect vector format. The expected format is:'[f1,f2..f3]' where each element f is a float", - req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - - vectorToSearch = "[2.0, 4.4, 3.5, stringElement]"; - assertQEx("incorrect vector to search should throw Exception", - "incorrect vector element: ' stringElement'. The expected format is:'[f1,f2..f3]' where each element f is a float", - req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - - vectorToSearch = "[2.0, 4.4, , ]"; - assertQEx("incorrect vector to search should throw Exception", - "incorrect vector element: ' '. The expected format is:'[f1,f2..f3]' where each element f is a float", - req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - SolrException.ErrorCode.BAD_REQUEST); - } - - @Test - public void correctQuery_shouldRankBySimilarityFunction() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQ(req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), - "//result[@numFound='10']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='4']", - "//result/doc[3]/str[@name='id'][.='2']", - "//result/doc[4]/str[@name='id'][.='10']", - "//result/doc[5]/str[@name='id'][.='3']", - "//result/doc[6]/str[@name='id'][.='7']", - "//result/doc[7]/str[@name='id'][.='5']", - "//result/doc[8]/str[@name='id'][.='6']", - "//result/doc[9]/str[@name='id'][.='9']", - "//result/doc[10]/str[@name='id'][.='8']" - ); - } - - @Test - public void knnQueryWithFilterQuery_shouldIntersectResults() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQ(req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fq", "id:(1 2 7 20)", "fl", "id"), - "//result[@numFound='3']", - "//result/doc[1]/str[@name='id'][.='1']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='7']" - ); - /* - * This behavior is counter-intuitive. - * You would expect the query to apply to only the results filtered by the filter query. - * So you ideally would like to see the following ranked list as a result: [4,2,3,9]. - * To get this please use the knn query parser as a reranker, example in a following test. - * This is how filter queries work(it's just intersection of the bitsets coming from each query and filter query): - * Ranked List from q=[1,4,2,10] Set from fq={3,4,9,2} = [4,2] - * */ - assertQ(req(CommonParams.Q, "{!knn f=vector topK=4}" + vectorToSearch, "fq", "id:(3 4 9 2)", "fl", "id"), - "//result[@numFound='2']", - "//result/doc[1]/str[@name='id'][.='4']", - "//result/doc[2]/str[@name='id'][.='2']" - ); - /* The ranking is now different as default solr score is used for the main query */ - assertQ(req(CommonParams.Q, "id:(3 4 9 2)", "fq", "{!knn f=vector topK=4}" + vectorToSearch, "fl", "id"), - "//result[@numFound='2']", - "//result/doc[1]/str[@name='id'][.='2']", - "//result/doc[2]/str[@name='id'][.='4']" - ); - } - - + docs.get(0) + .addField(vectorField, Arrays.asList(1f, 2f, 3f, 4f)); // cosine distance vector1= 1.0 + docs.get(1) + .addField( + vectorField, Arrays.asList(1.5f, 2.5f, 3.5f, 4.5f)); // cosine distance vector1= 0.998 + docs.get(2) + .addField( + vectorField, + Arrays.asList(7.5f, 15.5f, 17.5f, 22.5f)); // cosine distance vector1= 0.992 + docs.get(3) + .addField( + vectorField, Arrays.asList(1.4f, 2.4f, 3.4f, 4.4f)); // cosine distance vector1= 0.999 + docs.get(4) + .addField(vectorField, Arrays.asList(30f, 22f, 35f, 20f)); // cosine distance vector1= 0.862 + docs.get(5) + .addField(vectorField, Arrays.asList(40f, 1f, 1f, 200f)); // cosine distance vector1= 0.756 + docs.get(6) + .addField(vectorField, Arrays.asList(5f, 10f, 20f, 40f)); // cosine distance vector1= 0.970 + docs.get(7) + .addField( + vectorField, Arrays.asList(120f, 60f, 30f, 15f)); // cosine distance vector1= 0.515 + docs.get(8) + .addField( + vectorField, Arrays.asList(200f, 50f, 100f, 25f)); // cosine distance vector1= 0.554 + docs.get(9) + .addField( + vectorField, Arrays.asList(1.8f, 2.5f, 3.7f, 4.9f)); // cosine distance vector1= 0.997 + + docs.get(10) + .addField(vectorField2, Arrays.asList(1f, 2f, 3f, 4f)); // cosine distance vector2= 1 + docs.get(11) + .addField( + vectorField2, + Arrays.asList(7.5f, 15.5f, 17.5f, 22.5f)); // cosine distance vector2= 0.992 + docs.get(12) + .addField( + vectorField2, Arrays.asList(1.5f, 2.5f, 3.5f, 4.5f)); // cosine distance vector2= 0.998 + + return docs; + } + + @After + public void cleanUp() throws Exception { + clearIndex(); + deleteCore(); + } + + @Test + public void incorrectTopK_shouldThrowException() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQEx( + "String topK should throw Exception", + "For input string: \"string\"", + req(CommonParams.Q, "{!knn f=vector topK=string}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + + assertQEx( + "Double topK should throw Exception", + "For input string: \"4.5\"", + req(CommonParams.Q, "{!knn f=vector topK=4.5}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + } + + @Test + public void topKMissing_shouldReturnDefaultTopK() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQ( + req(CommonParams.Q, "{!knn f=vector}" + vectorToSearch, "fl", "id"), + "//result[@numFound='" + DEFAULT_TOP_K + "']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='4']", + "//result/doc[3]/str[@name='id'][.='2']", + "//result/doc[4]/str[@name='id'][.='10']", + "//result/doc[5]/str[@name='id'][.='3']", + "//result/doc[6]/str[@name='id'][.='7']", + "//result/doc[7]/str[@name='id'][.='5']", + "//result/doc[8]/str[@name='id'][.='6']", + "//result/doc[9]/str[@name='id'][.='9']", + "//result/doc[10]/str[@name='id'][.='8']"); + } + + @Test + public void topK_shouldReturnOnlyTopKResults() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQ( + req(CommonParams.Q, "{!knn f=vector topK=5}" + vectorToSearch, "fl", "id"), + "//result[@numFound='5']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='4']", + "//result/doc[3]/str[@name='id'][.='2']", + "//result/doc[4]/str[@name='id'][.='10']", + "//result/doc[5]/str[@name='id'][.='3']"); + + assertQ( + req(CommonParams.Q, "{!knn f=vector topK=3}" + vectorToSearch, "fl", "id"), + "//result[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='4']", + "//result/doc[3]/str[@name='id'][.='2']"); + } + + @Test + public void incorrectVectorFieldType_shouldThrowException() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQEx( + "Incorrect vector field type should throw Exception", + "only DenseVectorField is compatible with Knn Query Parser", + req(CommonParams.Q, "{!knn f=id topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + } + + @Test + public void undefinedVectorField_shouldThrowException() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQEx( + "Undefined vector field should throw Exception", + "undefined field: \"notExistent\"", + req(CommonParams.Q, "{!knn f=notExistent topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + } + + @Test + public void missingVectorField_shouldThrowException() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQEx( + "missing vector field should throw Exception", + "the Dense Vector field 'f' is missing", + req(CommonParams.Q, "{!knn topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + } + + @Test + public void correctVectorField_shouldSearchOnThatField() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQ( + req(CommonParams.Q, "{!knn f=vector2 topK=5}" + vectorToSearch, "fl", "id"), + "//result[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='11']", + "//result/doc[2]/str[@name='id'][.='13']", + "//result/doc[3]/str[@name='id'][.='12']"); + } + + @Test + public void missingVectorToSearch_shouldThrowException() { + assertQEx( + "missing vector to search should throw Exception", + "the Dense Vector value 'v' to search is missing", + req(CommonParams.Q, "{!knn f=vector topK=10}", "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + } + + @Test + public void incorrectVectorToSearchDimension_shouldThrowException() { + String vectorToSearch = "[2.0, 4.4, 3.]"; + assertQEx( + "missing vector to search should throw Exception", + "incorrect vector dimension. The vector value has size 3 while it is expected a vector with size 4", + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + + vectorToSearch = "[2.0, 4.4,,]"; + assertQEx( + "incorrect vector to search should throw Exception", + "incorrect vector dimension. The vector value has size 2 while it is expected a vector with size 4", + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + } + + @Test + public void incorrectVectorToSearch_shouldThrowException() { + String vectorToSearch = "2.0, 4.4, 3.5, 6.4"; + assertQEx( + "incorrect vector to search should throw Exception", + "incorrect vector format. The expected format is:'[f1,f2..f3]' where each element f is a float", + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + + vectorToSearch = "[2.0, 4.4, 3.5, 6.4"; + assertQEx( + "incorrect vector to search should throw Exception", + "incorrect vector format. The expected format is:'[f1,f2..f3]' where each element f is a float", + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + + vectorToSearch = "2.0, 4.4, 3.5, 6.4]"; + assertQEx( + "incorrect vector to search should throw Exception", + "incorrect vector format. The expected format is:'[f1,f2..f3]' where each element f is a float", + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + + vectorToSearch = "[2.0, 4.4, 3.5, stringElement]"; + assertQEx( + "incorrect vector to search should throw Exception", + "incorrect vector element: ' stringElement'. The expected format is:'[f1,f2..f3]' where each element f is a float", + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + + vectorToSearch = "[2.0, 4.4, , ]"; + assertQEx( + "incorrect vector to search should throw Exception", + "incorrect vector element: ' '. The expected format is:'[f1,f2..f3]' where each element f is a float", + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + SolrException.ErrorCode.BAD_REQUEST); + } + + @Test + public void correctQuery_shouldRankBySimilarityFunction() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQ( + req(CommonParams.Q, "{!knn f=vector topK=10}" + vectorToSearch, "fl", "id"), + "//result[@numFound='10']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='4']", + "//result/doc[3]/str[@name='id'][.='2']", + "//result/doc[4]/str[@name='id'][.='10']", + "//result/doc[5]/str[@name='id'][.='3']", + "//result/doc[6]/str[@name='id'][.='7']", + "//result/doc[7]/str[@name='id'][.='5']", + "//result/doc[8]/str[@name='id'][.='6']", + "//result/doc[9]/str[@name='id'][.='9']", + "//result/doc[10]/str[@name='id'][.='8']"); + } + + @Test + public void knnQueryWithFilterQuery_shouldIntersectResults() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQ( + req( + CommonParams.Q, + "{!knn f=vector topK=10}" + vectorToSearch, + "fq", + "id:(1 2 7 20)", + "fl", + "id"), + "//result[@numFound='3']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='7']"); /* - * See {@link org.apache.solr.search.ReRankQParserPlugin.ReRankQueryRescorer.combine} for more details. + * This behavior is counter-intuitive. + * You would expect the query to apply to only the results filtered by the filter query. + * So you ideally would like to see the following ranked list as a result: [4,2,3,9]. + * To get this please use the knn query parser as a reranker, example in a following test. + * This is how filter queries work(it's just intersection of the bitsets coming from each query and filter query): + * Ranked List from q=[1,4,2,10] Set from fq={3,4,9,2} = [4,2] * */ - @Test - public void knnQueryAsRerank_shouldAddSimilarityFunctionScore() { - String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; - - assertQ(req(CommonParams.Q, "id:(3 4 9 2)", "rq", "{!rerank reRankQuery=$rqq reRankDocs=4 reRankWeight=1}", - "rqq", "{!knn f=vector topK=4}" + vectorToSearch, "fl", "id"), - "//result[@numFound='4']", - "//result/doc[1]/str[@name='id'][.='4']", - "//result/doc[2]/str[@name='id'][.='2']", - "//result/doc[3]/str[@name='id'][.='3']", - "//result/doc[4]/str[@name='id'][.='9']" - ); - } + assertQ( + req( + CommonParams.Q, + "{!knn f=vector topK=4}" + vectorToSearch, + "fq", + "id:(3 4 9 2)", + "fl", + "id"), + "//result[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='4']", + "//result/doc[2]/str[@name='id'][.='2']"); + /* The ranking is now different as default solr score is used for the main query */ + assertQ( + req( + CommonParams.Q, + "id:(3 4 9 2)", + "fq", + "{!knn f=vector topK=4}" + vectorToSearch, + "fl", + "id"), + "//result[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='2']", + "//result/doc[2]/str[@name='id'][.='4']"); + } + + /* + * See {@link org.apache.solr.search.ReRankQParserPlugin.ReRankQueryRescorer.combine} for more details. + * */ + @Test + public void knnQueryAsRerank_shouldAddSimilarityFunctionScore() { + String vectorToSearch = "[1.0, 2.0, 3.0, 4.0]"; + + assertQ( + req( + CommonParams.Q, + "id:(3 4 9 2)", + "rq", + "{!rerank reRankQuery=$rqq reRankDocs=4 reRankWeight=1}", + "rqq", + "{!knn f=vector topK=4}" + vectorToSearch, + "fl", + "id"), + "//result[@numFound='4']", + "//result/doc[1]/str[@name='id'][.='4']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[3]/str[@name='id'][.='3']", + "//result/doc[4]/str[@name='id'][.='9']"); + } } diff --git a/solr/core/src/test/org/apache/solr/search/similarities/BaseSimilarityTestCase.java b/solr/core/src/test/org/apache/solr/search/similarities/BaseSimilarityTestCase.java index 850bc2df976..af7c1f0e98e 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/BaseSimilarityTestCase.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/BaseSimilarityTestCase.java @@ -17,7 +17,6 @@ package org.apache.solr.search.similarities; import java.io.IOException; - import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; @@ -34,21 +33,20 @@ protected Similarity getSimilarity(String field) { throw new RuntimeException(e); } while (sim instanceof PerFieldSimilarityWrapper) { - sim = ((PerFieldSimilarityWrapper)sim).get(field); + sim = ((PerFieldSimilarityWrapper) sim).get(field); } return sim; } - /** - * Returns the similarity in use for the field, - * after asserting that it implements the specified class + /** + * Returns the similarity in use for the field, after asserting that it implements the specified + * class */ - protected T getSimilarity(String field, - Class clazz) { + protected T getSimilarity(String field, Class clazz) { Similarity sim = getSimilarity(field); - assertTrue("Similarity for Field " + field + - " does not match expected class: " + clazz.getName(), - clazz.isInstance(sim)); + assertTrue( + "Similarity for Field " + field + " does not match expected class: " + clazz.getName(), + clazz.isInstance(sim)); return clazz.cast(sim); } } diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestBM25SimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestBM25SimilarityFactory.java index 3f6deacec42..ca1ec988b22 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestBM25SimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestBM25SimilarityFactory.java @@ -20,20 +20,18 @@ import org.apache.lucene.search.similarities.Similarity; import org.junit.BeforeClass; -/** - * Tests {@link BM25SimilarityFactory} - */ +/** Tests {@link BM25SimilarityFactory} */ public class TestBM25SimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-bm25.xml"); + initCore("solrconfig-basic.xml", "schema-bm25.xml"); } - + /** bm25 with default parameters */ public void test() throws Exception { assertEquals(BM25Similarity.class, getSimilarity("text").getClass()); } - + /** bm25 with parameters */ public void testParameters() throws Exception { Similarity sim = getSimilarity("text_params"); diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestBooleanSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestBooleanSimilarityFactory.java index 23e7d11ea56..ebd3e114ead 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestBooleanSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestBooleanSimilarityFactory.java @@ -21,19 +21,19 @@ /** * Tests {@link BooleanSimilarityFactory} when specified on a per-fieldtype basis. + * * @see SchemaSimilarityFactory */ public class TestBooleanSimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-booleansimilarity.xml"); + initCore("solrconfig-basic.xml", "schema-booleansimilarity.xml"); } - + /** Boolean w/ default parameters */ public void testDefaults() throws Exception { BooleanSimilarity sim = getSimilarity("text", BooleanSimilarity.class); assertEquals(BooleanSimilarity.class, sim.getClass()); } - } diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestClassicSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestClassicSimilarityFactory.java index 274bf068fdb..93653682068 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestClassicSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestClassicSimilarityFactory.java @@ -20,15 +20,17 @@ import org.junit.BeforeClass; /** - * Tests {@link ClassicSimilarityFactory} when specified on a per-fieldtype basis with various init options. + * Tests {@link ClassicSimilarityFactory} when specified on a per-fieldtype basis with various init + * options. + * * @see SchemaSimilarityFactory */ public class TestClassicSimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-tfidf.xml"); + initCore("solrconfig-basic.xml", "schema-tfidf.xml"); } - + /** Classic w/ default parameters */ public void testDefaults() throws Exception { ClassicSimilarity sim = getSimilarity("text", ClassicSimilarity.class); @@ -39,5 +41,4 @@ public void testParams() throws Exception { ClassicSimilarity sim = getSimilarity("text_overlap", ClassicSimilarity.class); assertEquals(false, sim.getDiscountOverlaps()); } - } diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestDFISimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestDFISimilarityFactory.java index c53ec37125d..cea9c6da6b2 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestDFISimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestDFISimilarityFactory.java @@ -21,9 +21,7 @@ import org.apache.lucene.search.similarities.Similarity; import org.junit.BeforeClass; -/** - * Tests {@link DFISimilarityFactory} - */ +/** Tests {@link DFISimilarityFactory} */ public class TestDFISimilarityFactory extends BaseSimilarityTestCase { @BeforeClass @@ -31,9 +29,7 @@ public static void beforeClass() throws Exception { initCore("solrconfig-basic.xml", "schema-dfi.xml"); } - /** - * dfi with no parameters - */ + /** dfi with no parameters */ public void test() throws Exception { Similarity sim = getSimilarity("text"); assertEquals(DFISimilarity.class, sim.getClass()); @@ -42,9 +38,7 @@ public void test() throws Exception { assertTrue(dfi.getIndependence() instanceof IndependenceChiSquared); } - /** - * dfi with discountOverlaps parameter set to false - */ + /** dfi with discountOverlaps parameter set to false */ public void testParameters() throws Exception { Similarity sim = getSimilarity("text_params"); assertEquals(DFISimilarity.class, sim.getClass()); @@ -52,4 +46,3 @@ public void testParameters() throws Exception { assertFalse(dfr.getDiscountOverlaps()); } } - diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestDFRSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestDFRSimilarityFactory.java index f3b05b3c39d..eccd721e936 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestDFRSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestDFRSimilarityFactory.java @@ -26,15 +26,13 @@ import org.apache.lucene.search.similarities.Similarity; import org.junit.BeforeClass; -/** - * Tests {@link DFRSimilarityFactory} - */ +/** Tests {@link DFRSimilarityFactory} */ public class TestDFRSimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-dfr.xml"); + initCore("solrconfig-basic.xml", "schema-dfr.xml"); } - + /** dfr with default parameters */ public void test() throws Exception { Similarity sim = getSimilarity("text"); @@ -44,7 +42,7 @@ public void test() throws Exception { assertEquals(AfterEffectB.class, dfr.getAfterEffect().getClass()); assertEquals(NormalizationH2.class, dfr.getNormalization().getClass()); } - + /** dfr with parametrized normalization */ public void testParameters() throws Exception { Similarity sim = getSimilarity("text_params"); @@ -56,7 +54,7 @@ public void testParameters() throws Exception { NormalizationH3 norm = (NormalizationH3) dfr.getNormalization(); assertEquals(900f, norm.getMu(), 0.01f); } - + /** LUCENE-3566 */ public void testParameterC() throws Exception { Similarity sim = getSimilarity("text_paramc"); diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestIBSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestIBSimilarityFactory.java index 06e143d2fc0..b494b135461 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestIBSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestIBSimilarityFactory.java @@ -26,15 +26,13 @@ import org.apache.lucene.search.similarities.Similarity; import org.junit.BeforeClass; -/** - * Tests {@link IBSimilarityFactory} - */ +/** Tests {@link IBSimilarityFactory} */ public class TestIBSimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-ib.xml"); + initCore("solrconfig-basic.xml", "schema-ib.xml"); } - + /** spl/df/h2 with default parameters */ public void test() throws Exception { Similarity sim = getSimilarity("text"); @@ -44,7 +42,7 @@ public void test() throws Exception { assertEquals(LambdaDF.class, ib.getLambda().getClass()); assertEquals(NormalizationH2.class, ib.getNormalization().getClass()); } - + /** ll/ttf/h3 with parametrized normalization */ public void testParameters() throws Exception { Similarity sim = getSimilarity("text_params"); diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestLMDirichletSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestLMDirichletSimilarityFactory.java index 3f75764b4ab..f8442bb5fee 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestLMDirichletSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestLMDirichletSimilarityFactory.java @@ -20,20 +20,18 @@ import org.apache.lucene.search.similarities.Similarity; import org.junit.BeforeClass; -/** - * Tests {@link LMDirichletSimilarityFactory} - */ +/** Tests {@link LMDirichletSimilarityFactory} */ public class TestLMDirichletSimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-lmdirichlet.xml"); + initCore("solrconfig-basic.xml", "schema-lmdirichlet.xml"); } - + /** dirichlet with default parameters */ public void test() throws Exception { assertEquals(LMDirichletSimilarity.class, getSimilarity("text").getClass()); } - + /** dirichlet with parameters */ public void testParameters() throws Exception { Similarity sim = getSimilarity("text_params"); diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestLMJelinekMercerSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestLMJelinekMercerSimilarityFactory.java index 9e7eeaf3acb..a5e3d5204fb 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestLMJelinekMercerSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestLMJelinekMercerSimilarityFactory.java @@ -20,20 +20,18 @@ import org.apache.lucene.search.similarities.Similarity; import org.junit.BeforeClass; -/** - * Tests {@link LMJelinekMercerSimilarityFactory} - */ +/** Tests {@link LMJelinekMercerSimilarityFactory} */ public class TestLMJelinekMercerSimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-lmjelinekmercer.xml"); + initCore("solrconfig-basic.xml", "schema-lmjelinekmercer.xml"); } - + /** jelinek-mercer with default parameters */ public void test() throws Exception { assertEquals(LMJelinekMercerSimilarity.class, getSimilarity("text").getClass()); } - + /** jelinek-mercer with parameters */ public void testParameters() throws Exception { Similarity sim = getSimilarity("text_params"); diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestNonDefinedSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestNonDefinedSimilarityFactory.java index b5b9d2e60a9..c51da1c6ad2 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestNonDefinedSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestNonDefinedSimilarityFactory.java @@ -20,9 +20,9 @@ import org.junit.After; /** - * Verifies that the default behavior of the implicit {@link BM25Similarity} - * (ie: no similarity configured in schema.xml at all) is consistent with - * expectations based on the luceneMatchVersion + * Verifies that the default behavior of the implicit {@link BM25Similarity} (ie: no similarity + * configured in schema.xml at all) is consistent with expectations based on the luceneMatchVersion + * * @see SOLR-5561 * @see SOLR-8057 * @see SOLR-13025 @@ -37,9 +37,8 @@ public void cleanup() throws Exception { public void testCurrentBM25FromV8() throws Exception { // no sys prop set, rely on LATEST - initCore("solrconfig-basic.xml","schema-tiny.xml"); + initCore("solrconfig-basic.xml", "schema-tiny.xml"); BM25Similarity sim = getSimilarity("text", BM25Similarity.class); assertEquals(0.75F, sim.getB(), 0.0F); } - } diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarity.java b/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarity.java index 58fe6eff2f0..32db6032970 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarity.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarity.java @@ -21,52 +21,51 @@ import org.apache.lucene.search.similarities.Similarity; import org.junit.BeforeClass; -/** - * Tests per-field similarity support in the schema - */ +/** Tests per-field similarity support in the schema */ public class TestPerFieldSimilarity extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-sim.xml"); + initCore("solrconfig-basic.xml", "schema-sim.xml"); } - + /** test a field where the sim is specified directly */ public void testDirect() throws Exception { assertEquals(SweetSpotSimilarity.class, getSimilarity("sim1text").getClass()); } - + /** ... and for a dynamic field */ public void testDirectDynamic() throws Exception { assertEquals(SweetSpotSimilarity.class, getSimilarity("text_sim1").getClass()); } - + /** test a field where a configurable sim factory is defined */ public void testFactory() throws Exception { Similarity sim = getSimilarity("sim2text"); assertEquals(MockConfigurableSimilarity.class, sim.getClass()); - assertEquals("is there an echo?", ((MockConfigurableSimilarity)sim).getPassthrough()); + assertEquals("is there an echo?", ((MockConfigurableSimilarity) sim).getPassthrough()); } - + /** ... and for a dynamic field */ public void testFactoryDynamic() throws Exception { Similarity sim = getSimilarity("text_sim2"); assertEquals(MockConfigurableSimilarity.class, sim.getClass()); - assertEquals("is there an echo?", ((MockConfigurableSimilarity)sim).getPassthrough()); + assertEquals("is there an echo?", ((MockConfigurableSimilarity) sim).getPassthrough()); } - + /** test a field where no similarity is specified */ public void testDefaults() throws Exception { Similarity sim = getSimilarity("sim3text"); - assertEquals(BM25Similarity.class, sim.getClass());; + assertEquals(BM25Similarity.class, sim.getClass()); + ; } - + /** ... and for a dynamic field */ public void testDefaultsDynamic() throws Exception { Similarity sim = getSimilarity("text_sim3"); assertEquals(BM25Similarity.class, sim.getClass()); } - + /** test a field that does not exist */ public void testNonexistent() throws Exception { Similarity sim = getSimilarity("sdfdsfdsfdswr5fsdfdsfdsfs"); diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarityWithDefaultOverride.java b/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarityWithDefaultOverride.java index ad066638c83..2e65cf1375e 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarityWithDefaultOverride.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarityWithDefaultOverride.java @@ -22,49 +22,50 @@ /** * Tests per-field similarity support in the schema when SchemaSimilarityFactory is explicitly * configured to use a custom default sim for field types that do not override it. + * * @see TestPerFieldSimilarity */ public class TestPerFieldSimilarityWithDefaultOverride extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-sim-default-override.xml"); + initCore("solrconfig-basic.xml", "schema-sim-default-override.xml"); } - + /** test a field where the sim is specified directly */ public void testDirect() throws Exception { assertNotNull(getSimilarity("sim1text", SweetSpotSimilarity.class)); } - + /** ... and for a dynamic field */ public void testDirectDynamic() throws Exception { assertNotNull(getSimilarity("text_sim1", SweetSpotSimilarity.class)); } - + /** test a field where a configurable sim factory is explicitly defined */ public void testDirectFactory() throws Exception { MockConfigurableSimilarity sim = getSimilarity("sim2text", MockConfigurableSimilarity.class); assertEquals("is there an echo?", sim.getPassthrough()); } - + /** ... and for a dynamic field */ public void testDirectFactoryDynamic() throws Exception { MockConfigurableSimilarity sim = getSimilarity("text_sim2", MockConfigurableSimilarity.class); assertEquals("is there an echo?", sim.getPassthrough()); } - + /** test a field where no similarity is specified */ public void testDefaults() throws Exception { MockConfigurableSimilarity sim = getSimilarity("sim3text", MockConfigurableSimilarity.class); assertEquals("is there an echo?", sim.getPassthrough()); } - + /** ... and for a dynamic field */ public void testDefaultsDynamic() throws Exception { MockConfigurableSimilarity sim = getSimilarity("text_sim3", MockConfigurableSimilarity.class); assertEquals("is there an echo?", sim.getPassthrough()); } - + /** test a field that does not exist */ public void testNonexistent() throws Exception { MockConfigurableSimilarity sim = getSimilarity("text_sim3", MockConfigurableSimilarity.class); diff --git a/solr/core/src/test/org/apache/solr/search/similarities/TestSweetSpotSimilarityFactory.java b/solr/core/src/test/org/apache/solr/search/similarities/TestSweetSpotSimilarityFactory.java index c6f4ac3048a..81228d1d369 100644 --- a/solr/core/src/test/org/apache/solr/search/similarities/TestSweetSpotSimilarityFactory.java +++ b/solr/core/src/test/org/apache/solr/search/similarities/TestSweetSpotSimilarityFactory.java @@ -20,7 +20,6 @@ import java.util.Collections; import java.util.stream.Collectors; import java.util.stream.IntStream; - import org.apache.lucene.document.Field.Store; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -34,13 +33,11 @@ import org.apache.lucene.store.Directory; import org.junit.BeforeClass; -/** - * Tests {@link SweetSpotSimilarityFactory} - */ +/** Tests {@link SweetSpotSimilarityFactory} */ public class TestSweetSpotSimilarityFactory extends BaseSimilarityTestCase { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-basic.xml","schema-sweetspot.xml"); + initCore("solrconfig-basic.xml", "schema-sweetspot.xml"); } private static float computeNorm(Similarity sim, int length) throws IOException { @@ -80,73 +77,60 @@ public void testDefaults() throws Exception { // SSS tf w/defaults should behave just like DS ClassicSimilarity d = new ClassicSimilarity(); - for (int i = 0; i <=1000; i++) { - assertEquals("tf: i="+i, d.tf(i), sim.tf(i), 0.0F); + for (int i = 0; i <= 1000; i++) { + assertEquals("tf: i=" + i, d.tf(i), sim.tf(i), 0.0F); } // default norm sanity check - assertEquals("norm 1", 1.00F, computeNorm(sim, 1), 0.0F); - assertEquals("norm 4", 0.50F, computeNorm(sim, 4), 0.0F); + assertEquals("norm 1", 1.00F, computeNorm(sim, 1), 0.0F); + assertEquals("norm 4", 0.50F, computeNorm(sim, 4), 0.0F); assertEquals("norm 16", 0.25F, computeNorm(sim, 16), 0.0F); } - + /** baseline with parameters */ public void testBaselineParameters() throws Exception { - SweetSpotSimilarity sim = getSimilarity("text_baseline", - SweetSpotSimilarity.class); - + SweetSpotSimilarity sim = getSimilarity("text_baseline", SweetSpotSimilarity.class); + ClassicSimilarity d = new ClassicSimilarity(); // constant up to 6 - for (int i = 1; i <=6; i++) { - assertEquals("tf i="+i, 1.5F, sim.tf(i), 0.0F); + for (int i = 1; i <= 6; i++) { + assertEquals("tf i=" + i, 1.5F, sim.tf(i), 0.0F); } // less then default sim above 6 - for (int i = 6; i <=1000; i++) { - assertTrue("tf: i="+i+" : s="+sim.tf(i)+ - " < d="+d.tf(i), - sim.tf(i) < d.tf(i)); + for (int i = 6; i <= 1000; i++) { + assertTrue("tf: i=" + i + " : s=" + sim.tf(i) + " < d=" + d.tf(i), sim.tf(i) < d.tf(i)); } // norms: plateau from 3-5 - assertEquals("norm 1 == 7", - computeNorm(sim, 1), computeNorm(sim, 7), 0.0F); - assertEquals("norm 2 == 6", - computeNorm(sim, 1), computeNorm(sim, 7), 0.0F); - assertEquals("norm 3", 1.00F, computeNorm(sim, 3), 0.0F); - assertEquals("norm 4", 1.00F, computeNorm(sim, 4), 0.0F); - assertEquals("norm 5", 1.00F, computeNorm(sim, 5), 0.0F); - assertTrue("norm 6 too high: " + computeNorm(sim, 6), - computeNorm(sim, 6) < 1.0F); - assertTrue("norm 7 higher then norm 6", - computeNorm(sim, 7) < computeNorm(sim, 6)); + assertEquals("norm 1 == 7", computeNorm(sim, 1), computeNorm(sim, 7), 0.0F); + assertEquals("norm 2 == 6", computeNorm(sim, 1), computeNorm(sim, 7), 0.0F); + assertEquals("norm 3", 1.00F, computeNorm(sim, 3), 0.0F); + assertEquals("norm 4", 1.00F, computeNorm(sim, 4), 0.0F); + assertEquals("norm 5", 1.00F, computeNorm(sim, 5), 0.0F); + assertTrue("norm 6 too high: " + computeNorm(sim, 6), computeNorm(sim, 6) < 1.0F); + assertTrue("norm 7 higher then norm 6", computeNorm(sim, 7) < computeNorm(sim, 6)); assertEquals("norm 20", 0.25F, computeNorm(sim, 20), 0.0F); } /** hyperbolic with parameters */ public void testHyperbolicParameters() throws Exception { - SweetSpotSimilarity sim = getSimilarity("text_hyperbolic", - SweetSpotSimilarity.class); + SweetSpotSimilarity sim = getSimilarity("text_hyperbolic", SweetSpotSimilarity.class); - for (int i = 1; i <=1000; i++) { - assertTrue("MIN tf: i="+i+" : s="+sim.tf(i), - 3.3F <= sim.tf(i)); - assertTrue("MAX tf: i="+i+" : s="+sim.tf(i), - sim.tf(i) <= 7.7F); + for (int i = 1; i <= 1000; i++) { + assertTrue("MIN tf: i=" + i + " : s=" + sim.tf(i), 3.3F <= sim.tf(i)); + assertTrue("MAX tf: i=" + i + " : s=" + sim.tf(i), sim.tf(i) <= 7.7F); } - assertEquals("MID tf", 3.3F+(7.7F - 3.3F)/2.0F, sim.tf(5), 0.00001F); + assertEquals("MID tf", 3.3F + (7.7F - 3.3F) / 2.0F, sim.tf(5), 0.00001F); // norms: plateau from 1-5, shallow slope - assertEquals("norm 1", 1.00F, computeNorm(sim, 1), 0.0F); - assertEquals("norm 2", 1.00F, computeNorm(sim, 2), 0.0F); - assertEquals("norm 3", 1.00F, computeNorm(sim, 3), 0.0F); - assertEquals("norm 4", 1.00F, computeNorm(sim, 4), 0.0F); - assertEquals("norm 5", 1.00F, computeNorm(sim, 5), 0.0F); - assertTrue("norm 6 too high: " + computeNorm(sim, 6), - computeNorm(sim, 6) < 1.0F); - assertTrue("norm 7 higher then norm 6", - computeNorm(sim, 7) < computeNorm(sim, 6)); - assertTrue("norm 20 not high enough: " + computeNorm(sim, 20), - 0.25F < computeNorm(sim, 20)); + assertEquals("norm 1", 1.00F, computeNorm(sim, 1), 0.0F); + assertEquals("norm 2", 1.00F, computeNorm(sim, 2), 0.0F); + assertEquals("norm 3", 1.00F, computeNorm(sim, 3), 0.0F); + assertEquals("norm 4", 1.00F, computeNorm(sim, 4), 0.0F); + assertEquals("norm 5", 1.00F, computeNorm(sim, 5), 0.0F); + assertTrue("norm 6 too high: " + computeNorm(sim, 6), computeNorm(sim, 6) < 1.0F); + assertTrue("norm 7 higher then norm 6", computeNorm(sim, 7) < computeNorm(sim, 6)); + assertTrue("norm 20 not high enough: " + computeNorm(sim, 20), 0.25F < computeNorm(sim, 20)); } } diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java b/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java index 33ceb5b0342..b3bfc30f0e7 100644 --- a/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java +++ b/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java @@ -16,13 +16,12 @@ */ package org.apache.solr.search.stats; +import java.util.Iterator; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.junit.Ignore; -import java.util.Iterator; - @Ignore("Abstract calls should not executed as test") public abstract class TestBaseStatsCache extends TestDefaultStatsCache { @@ -38,7 +37,7 @@ public void distribTearDown() throws Exception { super.distribTearDown(); System.clearProperty("solr.statsCache"); } - + // in this case, as the number of shards increases, per-shard scores should // remain identical @Override @@ -52,9 +51,9 @@ protected void checkResponse(QueryResponse controlRsp, QueryResponse shardRsp) { System.out.println(shardRsp); SolrDocumentList shardList = shardRsp.getResults(); SolrDocumentList controlList = controlRsp.getResults(); - + assertEquals(controlList.size(), shardList.size()); - + assertEquals(controlList.getNumFound(), shardList.getNumFound()); Iterator it = controlList.iterator(); Iterator it2 = shardList.iterator(); @@ -64,5 +63,4 @@ protected void checkResponse(QueryResponse controlRsp, QueryResponse shardRsp) { assertEquals(controlDoc.getFieldValue("score"), shardDoc.getFieldValue("score")); } } - } diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java index 0aefb9b775d..50db3e0e44c 100644 --- a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java +++ b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java @@ -23,64 +23,63 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.junit.Test; -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class TestDefaultStatsCache extends BaseDistributedSearchTestCase { private int docId = 0; - + @Override public void distribSetUp() throws Exception { System.setProperty("metricsEnabled", "true"); super.distribSetUp(); System.setProperty("solr.statsCache", LocalStatsCache.class.getName()); } - + public void distribTearDown() throws Exception { super.distribTearDown(); System.clearProperty("solr.statsCache"); } - @Test + @Test public void test() throws Exception { del("*:*"); commit(); - String aDocId=null; + String aDocId = null; for (int i = 0; i < clients.size(); i++) { int shard = i + 1; for (int j = 0; j <= i; j++) { int currentId = docId++; - index_specific(i, id,currentId , "a_t", "one two three", - "shard_i", shard); - aDocId = rarely() ? currentId+"":aDocId; + index_specific(i, id, currentId, "a_t", "one two three", "shard_i", shard); + aDocId = rarely() ? currentId + "" : aDocId; } } commit(); handle.clear(); - handle.put("QTime", SKIPVAL); + handle.put("QTime", SKIPVAL); handle.put("timestamp", SKIPVAL); - + if (aDocId != null) { - dfQuery("q", "id:"+aDocId, "debugQuery", "true", "fl", "*,score"); + dfQuery("q", "id:" + aDocId, "debugQuery", "true", "fl", "*,score"); } dfQuery("q", "a_t:one", "debugQuery", "true", "fl", "*,score"); - + // add another document for (int i = 0; i < clients.size(); i++) { int shard = i + 1; for (int j = 0; j <= i; j++) { int currentId = docId++; - index_specific(i, id, currentId, "a_t", "one two three four five", - "shard_i", shard); - aDocId = rarely() ? currentId+"":aDocId; + index_specific(i, id, currentId, "a_t", "one two three four five", "shard_i", shard); + aDocId = rarely() ? currentId + "" : aDocId; } } commit(); if (aDocId != null) { - dfQuery("q", "{!cache=false}id:"+aDocId,"debugQuery", "true", "fl", "*,score"); + dfQuery("q", "{!cache=false}id:" + aDocId, "debugQuery", "true", "fl", "*,score"); } dfQuery("q", "a_t:one a_t:four", "debugQuery", "true", "fl", "*,score"); } - + // in this case, as the number of shards increases, per-shard scores begin to // diverge due to the different docFreq-s per shard. protected void checkResponse(QueryResponse controlRsp, QueryResponse shardRsp) { @@ -94,16 +93,16 @@ protected void checkResponse(QueryResponse controlRsp, QueryResponse shardRsp) { assertEquals(controlScore, shardScore); } } - + protected void dfQuery(Object... q) throws Exception { final ModifiableSolrParams params = new ModifiableSolrParams(); - + for (int i = 0; i < q.length; i += 2) { params.add(q[i].toString(), q[i + 1].toString()); } - + final QueryResponse controlRsp = controlClient.query(params); - + // query a random server params.set("shards", shards); int which = r.nextInt(clients.size()); diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java b/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java index cf0ac7e9ae1..e5483d37073 100644 --- a/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java +++ b/solr/core/src/test/org/apache/solr/search/stats/TestDistribIDF.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -72,7 +71,7 @@ public void tearDown() throws Exception { @Test public void testSimpleQuery() throws Exception { - //3 shards. 3rd shard won't have any data. + // 3 shards. 3rd shard won't have any data. createCollection("onecollection", "conf1", ImplicitDocRouter.NAME); createCollection("onecollection_local", "conf2", ImplicitDocRouter.NAME); @@ -91,13 +90,14 @@ public void testSimpleQuery() throws Exception { solrCluster.getSolrClient().add("onecollection_local", doc); int nDocs = TestUtil.nextInt(random(), 10, 100); - for (int i=0; i registries = getMetricsReigstries(testHarness.get().cluster); - Timer timer = ((Timer) registries.get(0).getMetrics().get("SECURITY./auditlogging.CallbackAuditLoggerPlugin.queuedTime")); + Timer timer = + ((Timer) + registries + .get(0) + .getMetrics() + .get("SECURITY./auditlogging.CallbackAuditLoggerPlugin.queuedTime")); double meanTimeOnQueue = timer.getSnapshot().getMean(); double meanTimeExpected = (start - end) / 3.0D; - assertTrue("Expecting mean time on queue > "+meanTimeExpected+", got " + meanTimeOnQueue, - meanTimeOnQueue > meanTimeExpected); + assertTrue( + "Expecting mean time on queue > " + meanTimeExpected + ", got " + meanTimeOnQueue, + meanTimeOnQueue > meanTimeExpected); } @Test @@ -162,21 +168,24 @@ public void testAsyncQueueDrain() throws Exception { gate.release(preShutdownEventsAllowed); runThreeTestAdminCommands(); - final List events = new ArrayList<> - (harness.receiver.waitForAuditEvents(preShutdownEventsAllowed)); + final List events = + new ArrayList<>(harness.receiver.waitForAuditEvents(preShutdownEventsAllowed)); assertEquals(preShutdownEventsAllowed, events.size()); // Now shutdown cluster while 1 event still in process // Do this in a background thread because it blocks... - final Thread shutdownThread = new SolrNamedThreadFactory("shutdown") - .newThread(() -> { try { - log.info("START Shutting down Cluster."); - harness.shutdownCluster(); - log.info("END Shutting down Cluster."); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + final Thread shutdownThread = + new SolrNamedThreadFactory("shutdown") + .newThread( + () -> { + try { + log.info("START Shutting down Cluster."); + harness.shutdownCluster(); + log.info("END Shutting down Cluster."); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); try { shutdownThread.start(); // release the ticket so the event can be processed @@ -190,42 +199,70 @@ public void testAsyncQueueDrain() throws Exception { shutdownThread.join(); } } - + @Test public void testMuteAdminListCollections() throws Exception { - setupCluster(false, null, false, "\"type:UNKNOWN\"", "[ \"path:/admin\", \"param:action=LIST\" ]"); + setupCluster( + false, null, false, "\"type:UNKNOWN\"", "[ \"path:/admin\", \"param:action=LIST\" ]"); runThreeTestAdminCommands(); testHarness.get().shutdownCluster(); final List events = testHarness.get().receiver.waitForAuditEvents(2); assertEquals(2, events.size()); // sanity check - assertAuditEvent(events.get(0), COMPLETED, "/admin/collections", ADMIN, null, 200, - "action", "CLUSTERSTATUS"); - - assertAuditEvent(events.get(1), COMPLETED, "/admin/collections", ADMIN, null, 200, - "action", "OVERSEERSTATUS"); + assertAuditEvent( + events.get(0), + COMPLETED, + "/admin/collections", + ADMIN, + null, + 200, + "action", + "CLUSTERSTATUS"); + + assertAuditEvent( + events.get(1), + COMPLETED, + "/admin/collections", + ADMIN, + null, + 200, + "action", + "OVERSEERSTATUS"); } @Test public void searchWithException() throws Exception { setupCluster(false, null, false); - testHarness.get().cluster.getSolrClient().request(CollectionAdminRequest.createCollection("test", 1, 1)); - expectThrows(SolrException.class, () -> { - testHarness.get().cluster.getSolrClient().query("test", new MapSolrParams(Collections.singletonMap("q", "a(bc"))); - }); + testHarness + .get() + .cluster + .getSolrClient() + .request(CollectionAdminRequest.createCollection("test", 1, 1)); + expectThrows( + SolrException.class, + () -> { + testHarness + .get() + .cluster + .getSolrClient() + .query("test", new MapSolrParams(Collections.singletonMap("q", "a(bc"))); + }); final List events = testHarness.get().receiver.waitForAuditEvents(3); assertAuditEvent(events.get(0), COMPLETED, "/admin/cores"); assertAuditEvent(events.get(1), COMPLETED, "/admin/collections"); - assertAuditEvent(events.get(2), ERROR,"/select", SEARCH, null, 400); + assertAuditEvent(events.get(2), ERROR, "/select", SEARCH, null, 400); } @Test public void illegalAdminPathError() throws Exception { setupCluster(false, null, false); String baseUrl = testHarness.get().cluster.getJettySolrRunner(0).getBaseUrl().toString(); - expectThrows(FileNotFoundException.class, () -> { - IOUtils.toString(new URL(baseUrl.replace("/solr", "") + "/api/node/foo"), StandardCharsets.UTF_8); - }); + expectThrows( + FileNotFoundException.class, + () -> { + IOUtils.toString( + new URL(baseUrl.replace("/solr", "") + "/api/node/foo"), StandardCharsets.UTF_8); + }); final List events = testHarness.get().receiver.waitForAuditEvents(1); assertAuditEvent(events.get(0), ERROR, "/api/node/foo", ADMIN, null, 404); } @@ -247,8 +284,10 @@ public void authValid() throws Exception { client.request(req); final List events = receiver.waitForAuditEvents(2); - assertAuditEvent(events.get(0), COMPLETED, "/admin/collections", ADMIN, null, 200, "action", "LIST"); - assertAuditEvent(events.get(1), COMPLETED, "/admin/collections", ADMIN, "solr", 200, "action", "LIST"); + assertAuditEvent( + events.get(0), COMPLETED, "/admin/collections", ADMIN, null, 200, "action", "LIST"); + assertAuditEvent( + events.get(1), COMPLETED, "/admin/collections", ADMIN, "solr", 200, "action", "LIST"); } { // valid CREATE request: Authenticated admin user should be allowed to CREATE collection @@ -258,8 +297,10 @@ public void authValid() throws Exception { // collection createion leads to AuditEvent's for the core as well... final List events = receiver.waitForAuditEvents(2); - assertAuditEvent(events.get(0), COMPLETED, "/admin/cores", ADMIN, null, 200, "action", "CREATE"); - assertAuditEvent(events.get(1), COMPLETED, "/admin/collections", ADMIN, null, 200, "action", "CREATE"); + assertAuditEvent( + events.get(0), COMPLETED, "/admin/cores", ADMIN, null, 200, "action", "CREATE"); + assertAuditEvent( + events.get(1), COMPLETED, "/admin/collections", ADMIN, null, 200, "action", "CREATE"); } } @@ -270,34 +311,54 @@ public void authFailures() throws Exception { final CallbackReceiver receiver = testHarness.get().receiver; { // invalid request: Authenticated user not allowed to CREATE w/o Authorization - final SolrException e = expectThrows(SolrException.class, () -> { - final Create createRequest = CollectionAdminRequest.createCollection("test_jimbo", 1, 1); - createRequest.setBasicAuthCredentials("jimbo", JIMBO_PASS); - client.request(createRequest); - }); + final SolrException e = + expectThrows( + SolrException.class, + () -> { + final Create createRequest = + CollectionAdminRequest.createCollection("test_jimbo", 1, 1); + createRequest.setBasicAuthCredentials("jimbo", JIMBO_PASS); + client.request(createRequest); + }); assertEquals(403, e.code()); final List events = receiver.waitForAuditEvents(1); - assertAuditEvent(events.get(0), UNAUTHORIZED, "/admin/collections", ADMIN, "jimbo", 403, "name", "test_jimbo"); + assertAuditEvent( + events.get(0), + UNAUTHORIZED, + "/admin/collections", + ADMIN, + "jimbo", + 403, + "name", + "test_jimbo"); } { // invalid request: Anon user not allowed to CREATE w/o authentication + authorization - final SolrException e = expectThrows(SolrException.class, () -> { - Create createRequest = CollectionAdminRequest.createCollection("test_anon", 1, 1); - client.request(createRequest); - }); + final SolrException e = + expectThrows( + SolrException.class, + () -> { + Create createRequest = CollectionAdminRequest.createCollection("test_anon", 1, 1); + client.request(createRequest); + }); assertEquals(401, e.code()); final List events = receiver.waitForAuditEvents(1); - assertAuditEvent(events.get(0), REJECTED, "/admin/collections", ADMIN, null, 401, "name", "test_anon"); + assertAuditEvent( + events.get(0), REJECTED, "/admin/collections", ADMIN, null, 401, "name", "test_anon"); } { // invalid request: Admin user not Authenticated due to incorrect password - final SolrException e = expectThrows(SolrException.class, () -> { - Create createRequest = CollectionAdminRequest.createCollection("test_wrongpass", 1, 1); - createRequest.setBasicAuthCredentials("solr", "wrong_" + SOLR_PASS); - client.request(createRequest); - }); + final SolrException e = + expectThrows( + SolrException.class, + () -> { + Create createRequest = + CollectionAdminRequest.createCollection("test_wrongpass", 1, 1); + createRequest.setBasicAuthCredentials("solr", "wrong_" + SOLR_PASS); + client.request(createRequest); + }); assertEquals(401, e.code()); final List events = receiver.waitForAuditEvents(1); @@ -306,11 +367,19 @@ public void authFailures() throws Exception { } } - private static void assertAuditEvent(AuditEvent e, EventType type, String path, String... params) { - assertAuditEvent(e, type, path, null, null,null, params); + private static void assertAuditEvent( + AuditEvent e, EventType type, String path, String... params) { + assertAuditEvent(e, type, path, null, null, null, params); } - private static void assertAuditEvent(AuditEvent e, EventType type, String path, RequestType requestType, String username, Integer status, String... params) { + private static void assertAuditEvent( + AuditEvent e, + EventType type, + String path, + RequestType requestType, + String username, + Integer status, + String... params) { try { assertEquals(type, e.getEventType()); assertEquals(path, e.getResource()); @@ -339,15 +408,21 @@ private static void assertAuditEvent(AuditEvent e, EventType type, String path, private ArrayList getMetricsReigstries(MiniSolrCloudCluster cluster) { ArrayList registries = new ArrayList<>(); - cluster.getJettySolrRunners().forEach(r -> { - MetricRegistry registry = r.getCoreContainer().getMetricManager().registry("solr.node"); - assertNotNull(registry); - registries.add(registry); - }); + cluster + .getJettySolrRunners() + .forEach( + r -> { + MetricRegistry registry = + r.getCoreContainer().getMetricManager().registry("solr.node"); + assertNotNull(registry); + registries.add(registry); + }); return registries; } - /** @see #assertThreeTestAdminEvents */ + /** + * @see #assertThreeTestAdminEvents + */ private void runThreeTestAdminCommands() throws IOException, SolrServerException { SolrClient client = testHarness.get().cluster.getSolrClient(); CollectionAdminRequest.listCollections(client); @@ -355,57 +430,95 @@ private void runThreeTestAdminCommands() throws IOException, SolrServerException client.request(getOverseerStatus()); } - /** @see #runThreeTestAdminCommands */ + /** + * @see #runThreeTestAdminCommands + */ private void assertThreeTestAdminEvents() throws Exception { final CallbackReceiver receiver = testHarness.get().receiver; final List events = receiver.waitForAuditEvents(3); assertThreeTestAdminEvents(events); } - /** @see #runThreeTestAdminCommands */ + /** + * @see #runThreeTestAdminCommands + */ private static void assertThreeTestAdminEvents(final List events) throws Exception { assertEquals(3, events.size()); // sanity check - assertAuditEvent(events.get(0), COMPLETED, "/admin/collections", ADMIN, null, 200, - "action", "LIST", "wt", "javabin"); - - assertAuditEvent(events.get(1), COMPLETED, "/admin/collections", ADMIN, null, 200, - "action", "CLUSTERSTATUS"); - - assertAuditEvent(events.get(2), COMPLETED, "/admin/collections", ADMIN, null, 200, - "action", "OVERSEERSTATUS"); - + assertAuditEvent( + events.get(0), + COMPLETED, + "/admin/collections", + ADMIN, + null, + 200, + "action", + "LIST", + "wt", + "javabin"); + + assertAuditEvent( + events.get(1), + COMPLETED, + "/admin/collections", + ADMIN, + null, + 200, + "action", + "CLUSTERSTATUS"); + + assertAuditEvent( + events.get(2), + COMPLETED, + "/admin/collections", + ADMIN, + null, + 200, + "action", + "OVERSEERSTATUS"); } private static String SOLR_PASS = "SolrRocks"; private static String JIMBO_PASS = "JimIsCool"; - private static String AUTH_SECTION = ",\n" + - " \"authentication\":{\n" + - " \"blockUnknown\":\"false\",\n" + - " \"class\":\"solr.BasicAuthPlugin\",\n" + - " \"credentials\":{\"solr\":\"" + getSaltedHashedValue(SOLR_PASS) + "\"," + - " \"jimbo\":\"" + getSaltedHashedValue(JIMBO_PASS) + "\"}},\n" + - " \"authorization\":{\n" + - " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + - " \"user-role\":{\"solr\":\"admin\"},\n" + - " \"permissions\":[{\"name\":\"collection-admin-edit\",\"role\":\"admin\"}]\n" + - " }\n"; + private static String AUTH_SECTION = + ",\n" + + " \"authentication\":{\n" + + " \"blockUnknown\":\"false\",\n" + + " \"class\":\"solr.BasicAuthPlugin\",\n" + + " \"credentials\":{\"solr\":\"" + + getSaltedHashedValue(SOLR_PASS) + + "\"," + + " \"jimbo\":\"" + + getSaltedHashedValue(JIMBO_PASS) + + "\"}},\n" + + " \"authorization\":{\n" + + " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + + " \"user-role\":{\"solr\":\"admin\"},\n" + + " \"permissions\":[{\"name\":\"collection-admin-edit\",\"role\":\"admin\"}]\n" + + " }\n"; /** - * Starts the cluster with a security.json built from template, using CallbackAuditLoggerPlugin. The params - * to this method will fill the template. + * Starts the cluster with a security.json built from template, using CallbackAuditLoggerPlugin. + * The params to this method will fill the template. + * * @param async enable async audit logging * @param semaphoreName name of semaphore for controlling how to delay logging * @param enableAuth should authentication be enabled in this cluster? * @param muteRulesJson mute rules to trim down what events we care about in our tests * @throws Exception if anything goes wrong */ - private void setupCluster(boolean async, String semaphoreName, boolean enableAuth, String... muteRulesJson) throws Exception { - String securityJson = FileUtils.readFileToString(TEST_PATH().resolve("security").resolve("auditlog_plugin_security.json").toFile(), StandardCharsets.UTF_8); + private void setupCluster( + boolean async, String semaphoreName, boolean enableAuth, String... muteRulesJson) + throws Exception { + String securityJson = + FileUtils.readFileToString( + TEST_PATH().resolve("security").resolve("auditlog_plugin_security.json").toFile(), + StandardCharsets.UTF_8); securityJson = securityJson.replace("_PORT_", Integer.toString(testHarness.get().callbackPort)); securityJson = securityJson.replace("_ASYNC_", Boolean.toString(async)); - securityJson = securityJson.replace("_SEMAPHORE_", - null == semaphoreName ? "null" : "\""+semaphoreName+"\""); + securityJson = + securityJson.replace( + "_SEMAPHORE_", null == semaphoreName ? "null" : "\"" + semaphoreName + "\""); securityJson = securityJson.replace("_AUTH_", enableAuth ? AUTH_SECTION : ""); // start with any test specific mute rules... @@ -420,24 +533,26 @@ private void setupCluster(boolean async, String semaphoreName, boolean enableAut muteRules.add("\"path:/admin/info/key\""); } - securityJson = securityJson.replace("_MUTERULES_", "[" + StringUtils.join(muteRules, ",") + "]"); + securityJson = + securityJson.replace("_MUTERULES_", "[" + StringUtils.join(muteRules, ",") + "]"); + + MiniSolrCloudCluster myCluster = + new MiniSolrCloudCluster.Builder(NUM_SERVERS, createTempDir()) + .withSecurityJson(securityJson) + .addConfig( + "conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .build(); - MiniSolrCloudCluster myCluster = new MiniSolrCloudCluster.Builder(NUM_SERVERS, createTempDir()) - .withSecurityJson(securityJson) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) - .build(); - myCluster.waitForAllNodes(10); testHarness.get().setCluster(myCluster); } - /** * Listening for socket callbacks in background thread from the custom CallbackAuditLoggerPlugin */ - // we don't really care about the InterruptedException that could be thrown from close in test code - // This all goes back to MiniSolrCloudCluster.close, which really _can_ throw - // an InterruptedException + // we don't really care about the InterruptedException that could be thrown from close in test + // code. This all goes back to MiniSolrCloudCluster.close, which really _can_ throw an + // InterruptedException @SuppressWarnings({"try"}) private class CallbackReceiver implements Runnable, AutoCloseable { private final ServerSocket serverSocket; @@ -458,7 +573,9 @@ public void run() { log.info("Listening for audit callbacks on on port {}", serverSocket.getLocalPort()); } Socket socket = serverSocket.accept(); - BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8)); + BufferedReader reader = + new BufferedReader( + new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8)); while (!Thread.currentThread().isInterrupted()) { if (!reader.ready()) continue; ObjectMapper om = new ObjectMapper(); @@ -468,7 +585,7 @@ public void run() { log.info("Received {}: {}", event, msg); queue.add(event); } - } catch (IOException e) { + } catch (IOException e) { log.info("Socket closed", e); } } @@ -476,8 +593,10 @@ public void run() { @Override public void close() throws Exception { serverSocket.close(); - assertEquals("Unexpected AuditEvents still in the queue", - Collections.emptyList(), new LinkedList<>(queue)); + assertEquals( + "Unexpected AuditEvents still in the queue", + Collections.emptyList(), + new LinkedList<>(queue)); } public List waitForAuditEvents(final int expected) throws InterruptedException { @@ -485,8 +604,12 @@ public List waitForAuditEvents(final int expected) throws Interrupte for (int i = 1; i <= expected; i++) { // NOTE: counting from 1 for error message readabiity... final AuditEvent e = queue.poll(120, TimeUnit.SECONDS); if (null == e) { - fail("did not recieved expected event #" + i + "/" + expected - + " even after waiting an excessive amount of time"); + fail( + "did not recieved expected event #" + + i + + "/" + + expected + + " even after waiting an excessive amount of time"); } log.info("Waited for and recieved event: {}", e); results.add(e); @@ -495,9 +618,9 @@ public List waitForAuditEvents(final int expected) throws Interrupte } } - // we don't really care about the InterruptedException that could be thrown from close in test code - // This all goes back to MiniSolrCloudCluster.close, which really _can_ throw - // an InterruptedException + // we don't really care about the InterruptedException that could be thrown from close in test + // code. This all goes back to MiniSolrCloudCluster.close, which really _can_ throw an + // InterruptedException @SuppressWarnings({"try"}) private class AuditTestHarness implements AutoCloseable { CallbackReceiver receiver; diff --git a/solr/core/src/test/org/apache/solr/security/AuditLoggerPluginTest.java b/solr/core/src/test/org/apache/solr/security/AuditLoggerPluginTest.java index 62eb27d06a9..b9499caffb0 100644 --- a/solr/core/src/test/org/apache/solr/security/AuditLoggerPluginTest.java +++ b/solr/core/src/test/org/apache/solr/security/AuditLoggerPluginTest.java @@ -24,7 +24,6 @@ import java.util.HashMap; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.junit.After; @@ -33,87 +32,99 @@ public class AuditLoggerPluginTest extends SolrTestCaseJ4 { protected static final Date SAMPLE_DATE = new Date(1234567890); - protected static final AuditEvent EVENT_ANONYMOUS = new AuditEvent(AuditEvent.EventType.ANONYMOUS) - .setHttpMethod("GET") - .setMessage("Anonymous") - .setResource("/collection1") - .setDate(SAMPLE_DATE); - protected static final AuditEvent EVENT_WITH_URL = new AuditEvent(AuditEvent.EventType.ANONYMOUS) - .setHttpMethod("GET") - .setMessage("Anonymous") - .setResource("/collection1") - .setBaseUrl("http://myserver/mypath") - .setHttpQueryString("a=b&c=d") - .setDate(SAMPLE_DATE); - protected static final AuditEvent EVENT_ANONYMOUS_REJECTED = new AuditEvent(AuditEvent.EventType.ANONYMOUS_REJECTED) - .setHttpMethod("GET") - .setMessage("Anonymous rejected") - .setResource("/collection1"); - protected static final AuditEvent EVENT_AUTHENTICATED = new AuditEvent(AuditEvent.EventType.AUTHENTICATED) - .setUsername("Jan") - .setHttpMethod("GET") - .setMessage("Authenticated") - .setDate(SAMPLE_DATE) - .setResource("/collection1"); - protected static final AuditEvent EVENT_REJECTED = new AuditEvent(AuditEvent.EventType.REJECTED) - .setUsername("Jan") - .setHttpMethod("POST") - .setMessage("Wrong password") - .setDate(SAMPLE_DATE) - .setResource("/collection1"); - protected static final AuditEvent EVENT_AUTHORIZED = new AuditEvent(AuditEvent.EventType.AUTHORIZED) - .setUsername("Per") - .setClientIp("192.168.0.10") - .setHttpMethod("GET") - .setMessage("Async") - .setDate(SAMPLE_DATE) - .setResource("/collection1"); - protected static final AuditEvent EVENT_UNAUTHORIZED = new AuditEvent(AuditEvent.EventType.UNAUTHORIZED) - .setUsername("Jan") - .setHttpMethod("POST") - .setMessage("No access to collection1") - .setDate(SAMPLE_DATE) - .setResource("/collection1"); - protected static final AuditEvent EVENT_ERROR = new AuditEvent(AuditEvent.EventType.ERROR) - .setUsername("Jan") - .setHttpMethod("POST") - .setMessage("Error occurred") - .setDate(SAMPLE_DATE) - .setSolrParams(Collections.singletonMap("action", Collections.singletonList("DELETE"))) - .setResource("/admin/collections"); - protected static final AuditEvent EVENT_UPDATE = new AuditEvent(AuditEvent.EventType.COMPLETED) - .setUsername("updateuser") - .setHttpMethod("POST") - .setRequestType(AuditEvent.RequestType.UPDATE) - .setMessage("Success") - .setDate(SAMPLE_DATE) - .setCollections(Collections.singletonList("updatecoll")) - .setRequestType(AuditEvent.RequestType.UPDATE) - .setResource("/update"); - protected static final AuditEvent EVENT_STREAMING = new AuditEvent(AuditEvent.EventType.COMPLETED) - .setUsername("streaminguser") - .setHttpMethod("POST") - .setRequestType(AuditEvent.RequestType.STREAMING) - .setMessage("Success") - .setDate(SAMPLE_DATE) - .setCollections(Collections.singletonList("streamcoll")) - .setResource("/stream"); - protected static final AuditEvent EVENT_HEALTH_API = new AuditEvent(AuditEvent.EventType.COMPLETED) - .setUsername("Jan") - .setHttpMethod("GET") - .setMessage("Healthy") - .setDate(SAMPLE_DATE) - .setResource("/api/node/health"); - protected static final AuditEvent EVENT_HEALTH_V2 = new AuditEvent(AuditEvent.EventType.COMPLETED) - .setUsername("Jan") - .setHttpMethod("GET") - .setMessage("Healthy") - .setDate(SAMPLE_DATE) - .setResource("/____v2/node/health"); + protected static final AuditEvent EVENT_ANONYMOUS = + new AuditEvent(AuditEvent.EventType.ANONYMOUS) + .setHttpMethod("GET") + .setMessage("Anonymous") + .setResource("/collection1") + .setDate(SAMPLE_DATE); + protected static final AuditEvent EVENT_WITH_URL = + new AuditEvent(AuditEvent.EventType.ANONYMOUS) + .setHttpMethod("GET") + .setMessage("Anonymous") + .setResource("/collection1") + .setBaseUrl("http://myserver/mypath") + .setHttpQueryString("a=b&c=d") + .setDate(SAMPLE_DATE); + protected static final AuditEvent EVENT_ANONYMOUS_REJECTED = + new AuditEvent(AuditEvent.EventType.ANONYMOUS_REJECTED) + .setHttpMethod("GET") + .setMessage("Anonymous rejected") + .setResource("/collection1"); + protected static final AuditEvent EVENT_AUTHENTICATED = + new AuditEvent(AuditEvent.EventType.AUTHENTICATED) + .setUsername("Jan") + .setHttpMethod("GET") + .setMessage("Authenticated") + .setDate(SAMPLE_DATE) + .setResource("/collection1"); + protected static final AuditEvent EVENT_REJECTED = + new AuditEvent(AuditEvent.EventType.REJECTED) + .setUsername("Jan") + .setHttpMethod("POST") + .setMessage("Wrong password") + .setDate(SAMPLE_DATE) + .setResource("/collection1"); + protected static final AuditEvent EVENT_AUTHORIZED = + new AuditEvent(AuditEvent.EventType.AUTHORIZED) + .setUsername("Per") + .setClientIp("192.168.0.10") + .setHttpMethod("GET") + .setMessage("Async") + .setDate(SAMPLE_DATE) + .setResource("/collection1"); + protected static final AuditEvent EVENT_UNAUTHORIZED = + new AuditEvent(AuditEvent.EventType.UNAUTHORIZED) + .setUsername("Jan") + .setHttpMethod("POST") + .setMessage("No access to collection1") + .setDate(SAMPLE_DATE) + .setResource("/collection1"); + protected static final AuditEvent EVENT_ERROR = + new AuditEvent(AuditEvent.EventType.ERROR) + .setUsername("Jan") + .setHttpMethod("POST") + .setMessage("Error occurred") + .setDate(SAMPLE_DATE) + .setSolrParams(Collections.singletonMap("action", Collections.singletonList("DELETE"))) + .setResource("/admin/collections"); + protected static final AuditEvent EVENT_UPDATE = + new AuditEvent(AuditEvent.EventType.COMPLETED) + .setUsername("updateuser") + .setHttpMethod("POST") + .setRequestType(AuditEvent.RequestType.UPDATE) + .setMessage("Success") + .setDate(SAMPLE_DATE) + .setCollections(Collections.singletonList("updatecoll")) + .setRequestType(AuditEvent.RequestType.UPDATE) + .setResource("/update"); + protected static final AuditEvent EVENT_STREAMING = + new AuditEvent(AuditEvent.EventType.COMPLETED) + .setUsername("streaminguser") + .setHttpMethod("POST") + .setRequestType(AuditEvent.RequestType.STREAMING) + .setMessage("Success") + .setDate(SAMPLE_DATE) + .setCollections(Collections.singletonList("streamcoll")) + .setResource("/stream"); + protected static final AuditEvent EVENT_HEALTH_API = + new AuditEvent(AuditEvent.EventType.COMPLETED) + .setUsername("Jan") + .setHttpMethod("GET") + .setMessage("Healthy") + .setDate(SAMPLE_DATE) + .setResource("/api/node/health"); + protected static final AuditEvent EVENT_HEALTH_V2 = + new AuditEvent(AuditEvent.EventType.COMPLETED) + .setUsername("Jan") + .setHttpMethod("GET") + .setMessage("Healthy") + .setDate(SAMPLE_DATE) + .setResource("/____v2/node/health"); private MockAuditLoggerPlugin plugin; private HashMap config; - + @Before public void setUp() throws Exception { super.setUp(); @@ -150,8 +161,8 @@ public void shouldLog() { assertTrue(plugin.shouldLog(EVENT_REJECTED.getEventType())); assertTrue(plugin.shouldLog(EVENT_UNAUTHORIZED.getEventType())); assertTrue(plugin.shouldLog(EVENT_ERROR.getEventType())); - assertFalse(plugin.shouldLog(EVENT_ANONYMOUS.getEventType())); - assertFalse(plugin.shouldLog(EVENT_AUTHENTICATED.getEventType())); + assertFalse(plugin.shouldLog(EVENT_ANONYMOUS.getEventType())); + assertFalse(plugin.shouldLog(EVENT_AUTHENTICATED.getEventType())); assertFalse(plugin.shouldLog(EVENT_AUTHORIZED.getEventType())); assertFalse(plugin.shouldLog(EVENT_AUTHORIZED.getEventType())); } @@ -161,7 +172,7 @@ public void invalidMuteRule() { config.put("muteRules", Collections.singletonList("foo:bar")); plugin.init(config); } - + @Test public void shouldMute() { List rules = new ArrayList<>(); @@ -169,13 +180,13 @@ public void shouldMute() { rules.add(Arrays.asList("user:updateuser", "collection:updatecoll")); rules.add(Arrays.asList("path:/admin/collection", "param:action=DELETE")); rules.add("ip:192.168.0.10"); - config.put("muteRules",rules); + config.put("muteRules", rules); plugin.init(config); assertFalse(plugin.shouldMute(EVENT_ANONYMOUS)); assertFalse(plugin.shouldMute(EVENT_AUTHENTICATED)); - assertTrue(plugin.shouldMute(EVENT_STREAMING)); // type:STREAMING - assertTrue(plugin.shouldMute(EVENT_UPDATE)); // updateuser, updatecoll - assertTrue(plugin.shouldMute(EVENT_ERROR)); // admin/collection action=DELETE + assertTrue(plugin.shouldMute(EVENT_STREAMING)); // type:STREAMING + assertTrue(plugin.shouldMute(EVENT_UPDATE)); // updateuser, updatecoll + assertTrue(plugin.shouldMute(EVENT_ERROR)); // admin/collection action=DELETE assertTrue(plugin.shouldMute(EVENT_AUTHORIZED)); // ip } @@ -183,7 +194,8 @@ public void shouldMute() { public void audit() { plugin.doAudit(EVENT_ANONYMOUS_REJECTED); plugin.doAudit(EVENT_REJECTED); - assertEquals(1, plugin.typeCounts.getOrDefault("ANONYMOUS_REJECTED", new AtomicInteger()).get()); + assertEquals( + 1, plugin.typeCounts.getOrDefault("ANONYMOUS_REJECTED", new AtomicInteger()).get()); assertEquals(1, plugin.typeCounts.getOrDefault("REJECTED", new AtomicInteger()).get()); assertEquals(2, plugin.events.size()); } @@ -197,9 +209,15 @@ public void v2ApiPath() { @Test public void jsonEventFormatter() { - assertEquals("{\"message\":\"Anonymous\",\"level\":\"INFO\",\"date\":" + SAMPLE_DATE.getTime() + ",\"solrParams\":{},\"solrPort\":0,\"resource\":\"/collection1\",\"httpMethod\":\"GET\",\"eventType\":\"ANONYMOUS\",\"status\":-1,\"qtime\":-1.0}", + assertEquals( + "{\"message\":\"Anonymous\",\"level\":\"INFO\",\"date\":" + + SAMPLE_DATE.getTime() + + ",\"solrParams\":{},\"solrPort\":0,\"resource\":\"/collection1\",\"httpMethod\":\"GET\",\"eventType\":\"ANONYMOUS\",\"status\":-1,\"qtime\":-1.0}", plugin.formatter.formatEvent(EVENT_ANONYMOUS)); - assertEquals("{\"message\":\"Authenticated\",\"level\":\"INFO\",\"date\":" + SAMPLE_DATE.getTime() + ",\"username\":\"Jan\",\"solrParams\":{},\"solrPort\":0,\"resource\":\"/collection1\",\"httpMethod\":\"GET\",\"eventType\":\"AUTHENTICATED\",\"status\":-1,\"qtime\":-1.0}", + assertEquals( + "{\"message\":\"Authenticated\",\"level\":\"INFO\",\"date\":" + + SAMPLE_DATE.getTime() + + ",\"username\":\"Jan\",\"solrParams\":{},\"solrPort\":0,\"resource\":\"/collection1\",\"httpMethod\":\"GET\",\"eventType\":\"AUTHENTICATED\",\"status\":-1,\"qtime\":-1.0}", plugin.formatter.formatEvent(EVENT_AUTHENTICATED)); } @@ -212,7 +230,6 @@ public void getBaseUrl() { @Test public void getUrl() { - assertEquals("http://myserver/mypath?a=b&c=d", - EVENT_WITH_URL.getUrl()); + assertEquals("http://myserver/mypath?a=b&c=d", EVENT_WITH_URL.getUrl()); } } diff --git a/solr/core/src/test/org/apache/solr/security/AuthWithShardHandlerFactoryOverrideTest.java b/solr/core/src/test/org/apache/solr/security/AuthWithShardHandlerFactoryOverrideTest.java index 6f7eba60e92..5c3a67f1dad 100644 --- a/solr/core/src/test/org/apache/solr/security/AuthWithShardHandlerFactoryOverrideTest.java +++ b/solr/core/src/test/org/apache/solr/security/AuthWithShardHandlerFactoryOverrideTest.java @@ -26,28 +26,27 @@ import org.junit.Before; import org.junit.Test; -/** - * Similar to BasicAuthOnSingleNodeTest, but using a different configset, to test SOLR-14569 - */ +/** Similar to BasicAuthOnSingleNodeTest, but using a different configset, to test SOLR-14569 */ public class AuthWithShardHandlerFactoryOverrideTest extends SolrCloudAuthTestCase { private static final String COLLECTION = "authCollection"; private static final String ALIAS = "alias"; - - private static final String SECURITY_CONF = "{\n" + - " \"authentication\":{\n" + - " \"blockUnknown\": true,\n" + - " \"class\":\"solr.BasicAuthPlugin\",\n" + - " \"credentials\":{\"solr\":\"EEKn7ywYk5jY8vG9TyqlG2jvYuvh1Q7kCCor6Hqm320= 6zkmjMjkMKyJX6/f0VarEWQujju5BzxZXub6WOrEKCw=\"}\n" + - " },\n" + - " \"authorization\":{\n" + - " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + - " \"permissions\":[\n" + - " {\"name\":\"all\", \"role\":\"admin\"}\n" + - " ],\n" + - " \"user-role\":{\"solr\":\"admin\"}\n" + - " }\n" + - "}"; + + private static final String SECURITY_CONF = + "{\n" + + " \"authentication\":{\n" + + " \"blockUnknown\": true,\n" + + " \"class\":\"solr.BasicAuthPlugin\",\n" + + " \"credentials\":{\"solr\":\"EEKn7ywYk5jY8vG9TyqlG2jvYuvh1Q7kCCor6Hqm320= 6zkmjMjkMKyJX6/f0VarEWQujju5BzxZXub6WOrEKCw=\"}\n" + + " },\n" + + " \"authorization\":{\n" + + " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + + " \"permissions\":[\n" + + " {\"name\":\"all\", \"role\":\"admin\"}\n" + + " ],\n" + + " \"user-role\":{\"solr\":\"admin\"}\n" + + " }\n" + + "}"; @Before public void setupCluster() throws Exception { @@ -58,11 +57,11 @@ public void setupCluster() throws Exception { CollectionAdminRequest.createCollection(COLLECTION, "conf", 4, 1) .setBasicAuthCredentials("solr", "solr") .process(cluster.getSolrClient()); - + CollectionAdminRequest.createAlias(ALIAS, COLLECTION) .setBasicAuthCredentials("solr", "solr") .process(cluster.getSolrClient()); - + cluster.waitForActiveCollection(COLLECTION, 4, 4); JettySolrRunner jetty = cluster.getJettySolrRunner(0); @@ -82,12 +81,15 @@ public void tearDown() throws Exception { @Test public void collectionTest() throws Exception { - try (Http2SolrClient client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) - .build()){ + try (Http2SolrClient client = + new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) + .build()) { for (int i = 0; i < 30; i++) { - SolrResponse response = new QueryRequest(params("q", "*:*")) - .setBasicAuthCredentials("solr", "solr").process(client, COLLECTION); + SolrResponse response = + new QueryRequest(params("q", "*:*")) + .setBasicAuthCredentials("solr", "solr") + .process(client, COLLECTION); // likely to be non-null, even if an error occurred assertNotNull(response); assertNotNull(response.getResponse()); @@ -96,15 +98,18 @@ public void collectionTest() throws Exception { } } } - + @Test public void aliasTest() throws Exception { - try (Http2SolrClient client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) - .build()){ + try (Http2SolrClient client = + new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) + .build()) { for (int i = 0; i < 30; i++) { - SolrResponse response = new QueryRequest(params("q", "*:*")) - .setBasicAuthCredentials("solr", "solr").process(client, ALIAS); + SolrResponse response = + new QueryRequest(params("q", "*:*")) + .setBasicAuthCredentials("solr", "solr") + .process(client, ALIAS); // likely to be non-null, even if an error occurred assertNotNull(response); assertNotNull(response.getResponse()); diff --git a/solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java index f1598ac1552..e12bd811822 100644 --- a/solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java @@ -16,6 +16,13 @@ */ package org.apache.solr.security; +import static java.util.Collections.*; +import static org.apache.solr.common.util.CommandOperation.captureErrors; +import static org.apache.solr.common.util.Utils.getObjectByPath; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assume.assumeThat; + import java.io.IOException; import java.io.StringReader; import java.security.Principal; @@ -26,7 +33,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import org.apache.http.auth.BasicUserPrincipal; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.MapSolrParams; @@ -49,16 +55,9 @@ import org.junit.Before; import org.junit.Test; -import static java.util.Collections.*; -import static org.apache.solr.common.util.CommandOperation.captureErrors; -import static org.apache.solr.common.util.Utils.getObjectByPath; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assume.assumeThat; - /** - * Base class for testing RBAC. This will test the {@link RuleBasedAuthorizationPlugin} implementation - * but also serves as a base class for testing other sub classes + * Base class for testing RBAC. This will test the {@link RuleBasedAuthorizationPlugin} + * implementation but also serves as a base class for testing other sub classes */ @SuppressWarnings("unchecked") @LogLevel("org.apache.solr.security=TRACE") @@ -81,151 +80,269 @@ public void setupPermissionsAndRoles() { } protected void resetPermissionsAndRoles() { - String permissions = "{" + - " user-role : {" + - " steve: [dev,user]," + - " tim: [dev,admin]," + - " joe: [user]," + - " noble:[dev,user]" + - " }," + - " permissions : [" + - " {name:'schema-edit'," + - " role:admin}," + - " {name:'collection-admin-read'," + - " role:null}," + - " {name:collection-admin-edit ," + - " role:admin}," + - " {name:mycoll_update," + - " collection:mycoll," + - " path:'/update/*'," + - " role:[dev,admin]" + - " }," + - "{name:read, role:dev }," + - "{name:freeforall, path:'/foo', role:'*'}]}"; - rules = (Map) Utils.fromJSONString(permissions); + String permissions = + "{" + + " user-role : {" + + " steve: [dev,user]," + + " tim: [dev,admin]," + + " joe: [user]," + + " noble:[dev,user]" + + " }," + + " permissions : [" + + " {name:'schema-edit'," + + " role:admin}," + + " {name:'collection-admin-read'," + + " role:null}," + + " {name:collection-admin-edit ," + + " role:admin}," + + " {name:mycoll_update," + + " collection:mycoll," + + " path:'/update/*'," + + " role:[dev,admin]" + + " }," + + "{name:read, role:dev }," + + "{name:freeforall, path:'/foo', role:'*'}]}"; + rules = (Map) Utils.fromJSONString(permissions); } @Test public void testBasicPermissions() { - checkRules(Map.of("resource", "/update/json/docs", - "httpMethod", "POST", - "userPrincipal", "unknownuser", - "collectionRequests", "freeforall", - "handler", new UpdateRequestHandler()) - , STATUS_OK); - - checkRules(Map.of("resource", "/update/json/docs", - "httpMethod", "POST", - "userPrincipal", "tim", - "collectionRequests", "mycoll", - "handler", new UpdateRequestHandler()) - , STATUS_OK); - - checkRules(Map.of("resource", "/update/json/docs", - "httpMethod", "POST", - "collectionRequests", "mycoll", - "handler", new UpdateRequestHandler()) - , PROMPT_FOR_CREDENTIALS); - - checkRules(Map.of("resource", "/schema", - "userPrincipal", "somebody", - "collectionRequests", "mycoll", - "httpMethod", "POST", - "handler", new SchemaHandler()) - , FORBIDDEN); - - checkRules(Map.of("resource", "/schema", - "userPrincipal", "somebody", - "collectionRequests", "mycoll", - "httpMethod", "GET", - "handler", new SchemaHandler()) - , STATUS_OK); - - checkRules(Map.of("resource", "/schema/fields", - "userPrincipal", "somebody", - "collectionRequests", "mycoll", - "httpMethod", "GET", - "handler", new SchemaHandler()) - , STATUS_OK); - - checkRules(Map.of("resource", "/schema", - "userPrincipal", "somebody", - "collectionRequests", "mycoll", - "httpMethod", "POST", - "handler", new SchemaHandler()) - , FORBIDDEN); - - checkRules(Map.of("resource", "/admin/collections", - "userPrincipal", "tim", - "requestType", RequestType.ADMIN, - "httpMethod", "GET", - "handler", new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "LIST"))) - , STATUS_OK); - - checkRules(Map.of("resource", "/admin/collections", - "requestType", RequestType.ADMIN, - "httpMethod", "GET", - "handler", new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "LIST"))) - , STATUS_OK); - - checkRules(Map.of("resource", "/admin/collections", - "requestType", RequestType.ADMIN, - "handler", new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , PROMPT_FOR_CREDENTIALS); - - checkRules(Map.of("resource", "/admin/collections", - "requestType", RequestType.ADMIN, - "handler", new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "RELOAD"))) - , PROMPT_FOR_CREDENTIALS); - - checkRules(Map.of("resource", "/admin/collections", - "userPrincipal", "somebody", - "requestType", RequestType.ADMIN, - "handler", new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , FORBIDDEN); - - checkRules(Map.of("resource", "/admin/collections", - "userPrincipal", "tim", - "requestType", RequestType.ADMIN, - "handler", new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , STATUS_OK); - - checkRules(Map.of("resource", "/select", - "httpMethod", "GET", - "handler", new SearchHandler(), - "collectionRequests", singletonList(new CollectionRequest("mycoll")), - "userPrincipal", "joe") - , FORBIDDEN); + checkRules( + Map.of( + "resource", + "/update/json/docs", + "httpMethod", + "POST", + "userPrincipal", + "unknownuser", + "collectionRequests", + "freeforall", + "handler", + new UpdateRequestHandler()), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/update/json/docs", + "httpMethod", + "POST", + "userPrincipal", + "tim", + "collectionRequests", + "mycoll", + "handler", + new UpdateRequestHandler()), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/update/json/docs", + "httpMethod", + "POST", + "collectionRequests", + "mycoll", + "handler", + new UpdateRequestHandler()), + PROMPT_FOR_CREDENTIALS); + + checkRules( + Map.of( + "resource", + "/schema", + "userPrincipal", + "somebody", + "collectionRequests", + "mycoll", + "httpMethod", + "POST", + "handler", + new SchemaHandler()), + FORBIDDEN); + + checkRules( + Map.of( + "resource", + "/schema", + "userPrincipal", + "somebody", + "collectionRequests", + "mycoll", + "httpMethod", + "GET", + "handler", + new SchemaHandler()), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/schema/fields", + "userPrincipal", + "somebody", + "collectionRequests", + "mycoll", + "httpMethod", + "GET", + "handler", + new SchemaHandler()), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/schema", + "userPrincipal", + "somebody", + "collectionRequests", + "mycoll", + "httpMethod", + "POST", + "handler", + new SchemaHandler()), + FORBIDDEN); + + checkRules( + Map.of( + "resource", + "/admin/collections", + "userPrincipal", + "tim", + "requestType", + RequestType.ADMIN, + "httpMethod", + "GET", + "handler", + new CollectionsHandler(), + "params", + new MapSolrParams(singletonMap("action", "LIST"))), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/admin/collections", + "requestType", + RequestType.ADMIN, + "httpMethod", + "GET", + "handler", + new CollectionsHandler(), + "params", + new MapSolrParams(singletonMap("action", "LIST"))), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/admin/collections", + "requestType", + RequestType.ADMIN, + "handler", + new CollectionsHandler(), + "params", + new MapSolrParams(singletonMap("action", "CREATE"))), + PROMPT_FOR_CREDENTIALS); + + checkRules( + Map.of( + "resource", + "/admin/collections", + "requestType", + RequestType.ADMIN, + "handler", + new CollectionsHandler(), + "params", + new MapSolrParams(singletonMap("action", "RELOAD"))), + PROMPT_FOR_CREDENTIALS); + + checkRules( + Map.of( + "resource", + "/admin/collections", + "userPrincipal", + "somebody", + "requestType", + RequestType.ADMIN, + "handler", + new CollectionsHandler(), + "params", + new MapSolrParams(singletonMap("action", "CREATE"))), + FORBIDDEN); + + checkRules( + Map.of( + "resource", + "/admin/collections", + "userPrincipal", + "tim", + "requestType", + RequestType.ADMIN, + "handler", + new CollectionsHandler(), + "params", + new MapSolrParams(singletonMap("action", "CREATE"))), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/select", + "httpMethod", + "GET", + "handler", + new SearchHandler(), + "collectionRequests", + singletonList(new CollectionRequest("mycoll")), + "userPrincipal", + "joe"), + FORBIDDEN); setUserRole("cio", "su"); addPermission("all", "su"); - checkRules(Map.of("resource", ReplicationHandler.PATH, - "httpMethod", "POST", - "userPrincipal", "tim", - "handler", new ReplicationHandler(), - "collectionRequests", singletonList(new CollectionRequest("mycoll")) ) - , STATUS_OK); // Replication requires "READ" permission, which Tim has - - checkRules(Map.of("resource", ReplicationHandler.PATH, - "httpMethod", "POST", - "userPrincipal", "cio", - "handler", new ReplicationHandler(), - "collectionRequests", singletonList(new CollectionRequest("mycoll")) ) - , FORBIDDEN); // User cio has role 'su' which does not have 'read' permission - - checkRules(Map.of("resource", "/admin/collections", - "userPrincipal", "tim", - "requestType", AuthorizationContext.RequestType.ADMIN, - "handler", new CollectionsHandler(), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , STATUS_OK); + checkRules( + Map.of( + "resource", + ReplicationHandler.PATH, + "httpMethod", + "POST", + "userPrincipal", + "tim", + "handler", + new ReplicationHandler(), + "collectionRequests", + singletonList(new CollectionRequest("mycoll"))), + STATUS_OK); // Replication requires "READ" permission, which Tim has + + checkRules( + Map.of( + "resource", + ReplicationHandler.PATH, + "httpMethod", + "POST", + "userPrincipal", + "cio", + "handler", + new ReplicationHandler(), + "collectionRequests", + singletonList(new CollectionRequest("mycoll"))), + FORBIDDEN); // User cio has role 'su' which does not have 'read' permission + + checkRules( + Map.of( + "resource", + "/admin/collections", + "userPrincipal", + "tim", + "requestType", + AuthorizationContext.RequestType.ADMIN, + "handler", + new CollectionsHandler(), + "params", + new MapSolrParams(singletonMap("action", "CREATE"))), + STATUS_OK); } @Test @@ -235,99 +352,183 @@ public void testCoreAdminPermissions() { setUserRole("cio", "su"); addPermission("all", "su"); - checkRules(Map.of("resource", "/admin/cores", - "requestType", RequestType.ADMIN, - "handler", new CoreAdminHandler(null), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , PROMPT_FOR_CREDENTIALS); - - checkRules(Map.of("resource", "/admin/cores", - "userPrincipal", "joe", - "requestType", RequestType.ADMIN, - "handler", new CoreAdminHandler(null), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - , FORBIDDEN); - - checkRules(Map.of("resource", "/admin/cores", - "userPrincipal", "joe", - "requestType", RequestType.ADMIN, - "handler", new CoreAdminHandler(null), - "params", new MapSolrParams(singletonMap("action", "STATUS"))) - , STATUS_OK); - - checkRules(Map.of("resource", "/admin/cores", - "userPrincipal", "cio", - "requestType", RequestType.ADMIN, - "handler", new CoreAdminHandler(null), - "params", new MapSolrParams(singletonMap("action", "CREATE"))) - ,STATUS_OK); + checkRules( + Map.of( + "resource", + "/admin/cores", + "requestType", + RequestType.ADMIN, + "handler", + new CoreAdminHandler(null), + "params", + new MapSolrParams(singletonMap("action", "CREATE"))), + PROMPT_FOR_CREDENTIALS); + + checkRules( + Map.of( + "resource", + "/admin/cores", + "userPrincipal", + "joe", + "requestType", + RequestType.ADMIN, + "handler", + new CoreAdminHandler(null), + "params", + new MapSolrParams(singletonMap("action", "CREATE"))), + FORBIDDEN); + + checkRules( + Map.of( + "resource", + "/admin/cores", + "userPrincipal", + "joe", + "requestType", + RequestType.ADMIN, + "handler", + new CoreAdminHandler(null), + "params", + new MapSolrParams(singletonMap("action", "STATUS"))), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/admin/cores", + "userPrincipal", + "cio", + "requestType", + RequestType.ADMIN, + "handler", + new CoreAdminHandler(null), + "params", + new MapSolrParams(singletonMap("action", "CREATE"))), + STATUS_OK); } @Test public void testParamsPermissions() { - addPermission("test-params", "admin", "/x", Map.of("key", Arrays.asList("REGEX:(?i)val1", "VAL2"))); - - checkRules(Map.of("resource", "/x", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new DumpRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "VAL1"))) - , PROMPT_FOR_CREDENTIALS); - - checkRules(Map.of("resource", "/x", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new DumpRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "Val1"))) - , PROMPT_FOR_CREDENTIALS); - - checkRules(Map.of("resource", "/x", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new DumpRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "Val1"))) - , PROMPT_FOR_CREDENTIALS); - - checkRules(Map.of("resource", "/x", - "userPrincipal", "joe", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new DumpRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "Val1"))) - , FORBIDDEN); - - checkRules(Map.of("resource", "/x", - "userPrincipal", "joe", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new DumpRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "Val2"))) - , STATUS_OK); - - checkRules(Map.of("resource", "/x", - "userPrincipal", "joe", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new DumpRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , FORBIDDEN); + addPermission( + "test-params", "admin", "/x", Map.of("key", Arrays.asList("REGEX:(?i)val1", "VAL2"))); + + checkRules( + Map.of( + "resource", + "/x", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new DumpRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "VAL1"))), + PROMPT_FOR_CREDENTIALS); + + checkRules( + Map.of( + "resource", + "/x", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new DumpRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "Val1"))), + PROMPT_FOR_CREDENTIALS); + + checkRules( + Map.of( + "resource", + "/x", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new DumpRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "Val1"))), + PROMPT_FOR_CREDENTIALS); + + checkRules( + Map.of( + "resource", + "/x", + "userPrincipal", + "joe", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new DumpRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "Val1"))), + FORBIDDEN); + + checkRules( + Map.of( + "resource", + "/x", + "userPrincipal", + "joe", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new DumpRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "Val2"))), + STATUS_OK); + + checkRules( + Map.of( + "resource", + "/x", + "userPrincipal", + "joe", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new DumpRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "VAL2"))), + FORBIDDEN); } @Test public void testCustomRules() { - Map customRules = (Map) Utils.fromJSONString( - "{permissions:[" + - " {name:update, role:[admin_role,update_role]}," + - " {name:read, role:[admin_role,update_role,read_role]}" + - "]}"); - - checkRules(Map.of("resource", "/update", - "userPrincipal", "solr", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new UpdateRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , FORBIDDEN, customRules); + Map customRules = + (Map) + Utils.fromJSONString( + "{permissions:[" + + " {name:update, role:[admin_role,update_role]}," + + " {name:read, role:[admin_role,update_role,read_role]}" + + "]}"); + + checkRules( + Map.of( + "resource", + "/update", + "userPrincipal", + "solr", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new UpdateRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "VAL2"))), + FORBIDDEN, + customRules); } /* @@ -342,26 +543,41 @@ public void testAllPermissionAllowsActionsWhenUserHasCorrectRole() { setUserRole("dev", "dev"); setUserRole("admin", "admin"); addPermission("all", "dev", "admin"); - checkRules(Map.of("resource", "/update", - "userPrincipal", "dev", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", handler, - "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , STATUS_OK); + checkRules( + Map.of( + "resource", + "/update", + "userPrincipal", + "dev", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + handler, + "params", + new MapSolrParams(singletonMap("key", "VAL2"))), + STATUS_OK); handler = new PropertiesRequestHandler(); assertThat(handler, new IsInstanceOf(PermissionNameProvider.class)); - checkRules(Map.of("resource", "/admin/info/properties", - "userPrincipal", "dev", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", handler, - "params", new MapSolrParams(emptyMap())) - , STATUS_OK); + checkRules( + Map.of( + "resource", + "/admin/info/properties", + "userPrincipal", + "dev", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + handler, + "params", + new MapSolrParams(emptyMap())), + STATUS_OK); } - /* * RuleBasedAuthorizationPlugin handles requests differently based on whether the underlying handler implements * PermissionNameProvider or not. If this test fails because UpdateRequestHandler stops implementing @@ -374,23 +590,39 @@ public void testAllPermissionAllowsActionsWhenAssociatedRoleIsWildcard() { setUserRole("dev", "dev"); setUserRole("admin", "admin"); addPermission("all", "*"); - checkRules(Map.of("resource", "/update", - "userPrincipal", "dev", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new UpdateRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , STATUS_OK); + checkRules( + Map.of( + "resource", + "/update", + "userPrincipal", + "dev", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new UpdateRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "VAL2"))), + STATUS_OK); handler = new PropertiesRequestHandler(); assertThat(handler, new IsInstanceOf(PermissionNameProvider.class)); - checkRules(Map.of("resource", "/admin/info/properties", - "userPrincipal", "dev", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", handler, - "params", new MapSolrParams(emptyMap())) - , STATUS_OK); + checkRules( + Map.of( + "resource", + "/admin/info/properties", + "userPrincipal", + "dev", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + handler, + "params", + new MapSolrParams(emptyMap())), + STATUS_OK); } /* @@ -405,39 +637,58 @@ public void testAllPermissionDeniesActionsWhenUserIsNotCorrectRole() { setUserRole("dev", "dev"); setUserRole("admin", "admin"); addPermission("all", "admin"); - checkRules(Map.of("resource", "/update", - "userPrincipal", "dev", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", new UpdateRequestHandler(), - "params", new MapSolrParams(singletonMap("key", "VAL2"))) - , FORBIDDEN); + checkRules( + Map.of( + "resource", + "/update", + "userPrincipal", + "dev", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + new UpdateRequestHandler(), + "params", + new MapSolrParams(singletonMap("key", "VAL2"))), + FORBIDDEN); handler = new PropertiesRequestHandler(); assertThat(handler, new IsInstanceOf(PermissionNameProvider.class)); - checkRules(Map.of("resource", "/admin/info/properties", - "userPrincipal", "dev", - "requestType", RequestType.UNKNOWN, - "collectionRequests", "go", - "handler", handler, - "params", new MapSolrParams(emptyMap())) - , FORBIDDEN); + checkRules( + Map.of( + "resource", + "/admin/info/properties", + "userPrincipal", + "dev", + "requestType", + RequestType.UNKNOWN, + "collectionRequests", + "go", + "handler", + handler, + "params", + new MapSolrParams(emptyMap())), + FORBIDDEN); } @Test public void testShortNameResolvesPermissions() { - assumeThat("ExternalRBAPlugin doesn't use short name", - createPlugin(), is(instanceOf(RuleBasedAuthorizationPlugin.class))); + assumeThat( + "ExternalRBAPlugin doesn't use short name", + createPlugin(), + is(instanceOf(RuleBasedAuthorizationPlugin.class))); setUserRole("admin", "admin"); addPermission("all", "admin"); - Map values = Map.of( - "userPrincipal", "admin@EXAMPLE", - "userName", "admin", - "resource", "/admin/info/properties", - "requestType", RequestType.ADMIN, - "handler", new PropertiesRequestHandler()); + Map values = + Map.of( + "userPrincipal", "admin@EXAMPLE", + "userName", "admin", + "resource", "/admin/info/properties", + "requestType", RequestType.ADMIN, + "handler", new PropertiesRequestHandler()); // Short names disabled, admin should fail, admin@EXAMPLE should succeed rules.put("useShortName", "false"); @@ -450,35 +701,43 @@ public void testShortNameResolvesPermissions() { @Test public void testGetPermissionNamesForRoles() { - // Tests the method that maps role(s) to permissions, used by SystemInfoHandler to provide UI with logged in user's permissions + // Tests the method that maps role(s) to permissions, used by SystemInfoHandler to provide UI + // with logged in user's permissions try (RuleBasedAuthorizationPluginBase plugin = createPlugin()) { plugin.init(rules); - assertEquals(Set.of("mycoll_update", "read"), plugin.getPermissionNamesForRoles(Set.of("dev"))); + assertEquals( + Set.of("mycoll_update", "read"), plugin.getPermissionNamesForRoles(Set.of("dev"))); assertEquals(emptySet(), plugin.getPermissionNamesForRoles(Set.of("user"))); - assertEquals(Set.of("schema-edit", "collection-admin-edit", "mycoll_update"), plugin.getPermissionNamesForRoles(Set.of("admin"))); - assertEquals(Set.of("schema-edit", "collection-admin-edit", "mycoll_update", "read"), plugin.getPermissionNamesForRoles(Set.of("admin", "dev"))); + assertEquals( + Set.of("schema-edit", "collection-admin-edit", "mycoll_update"), + plugin.getPermissionNamesForRoles(Set.of("admin"))); + assertEquals( + Set.of("schema-edit", "collection-admin-edit", "mycoll_update", "read"), + plugin.getPermissionNamesForRoles(Set.of("admin", "dev"))); } catch (IOException e) { ; // swallow error, otherwise a you have to add a _lot_ of exceptions to methods. } } void addPermission(String permissionName, String role, String path, Map params) { - ((List)rules.get("permissions")).add(Map.of("name", permissionName, "role", role, "path", path, "params", params)); + ((List) rules.get("permissions")) + .add(Map.of("name", permissionName, "role", role, "path", path, "params", params)); } protected void addPermission(String permissionName, String... roles) { - ((List)rules.get("permissions")).add(Map.of("name", permissionName, "role", Arrays.asList(roles))); + ((List) rules.get("permissions")) + .add(Map.of("name", permissionName, "role", Arrays.asList(roles))); } protected void setUserRole(String user, String role) { - ((Map)rules.get("user-role")).put(user, role); + ((Map) rules.get("user-role")).put(user, role); } public void testEditRules() throws IOException { - Perms perms = new Perms(); + Perms perms = new Perms(); perms.runCmd("{set-permission : {name: config-edit, role: admin } }", true); - assertEquals("config-edit", getObjectByPath(perms.conf, false, "permissions[0]/name")); - assertEquals(1 , perms.getVal("permissions[0]/index")); + assertEquals("config-edit", getObjectByPath(perms.conf, false, "permissions[0]/name")); + assertEquals(1, perms.getVal("permissions[0]/index")); assertEquals("admin", perms.getVal("permissions[0]/role")); perms.runCmd("{set-permission : {name: config-edit, role: [admin, dev], index:2 } }", false); perms.runCmd("{set-permission : {name: config-edit, role: [admin, dev], index:1}}", true); @@ -486,7 +745,9 @@ public void testEditRules() throws IOException { assertEquals(2, roles.size()); assertTrue(roles.contains("admin")); assertTrue(roles.contains("dev")); - perms.runCmd("{set-permission : {role: [admin, dev], collection: x , path: '/a/b' , method :[GET, POST] }}", true); + perms.runCmd( + "{set-permission : {role: [admin, dev], collection: x , path: '/a/b' , method :[GET, POST] }}", + true); assertNotNull(perms.getVal("permissions[1]")); assertEquals("x", perms.getVal("permissions[1]/collection")); assertEquals("/a/b", perms.getVal("permissions[1]/path")); @@ -494,7 +755,8 @@ public void testEditRules() throws IOException { assertEquals("POST", perms.getVal("permissions[1]/method")); assertEquals("/a/b", perms.getVal("permissions[1]/path")); - perms.runCmd("{set-permission : {name : read, collection : y, role:[guest, dev] , before :2}}", true); + perms.runCmd( + "{set-permission : {name : read, collection : y, role:[guest, dev] , before :2}}", true); assertNotNull(perms.getVal("permissions[2]")); assertEquals("y", perms.getVal("permissions[1]/collection")); assertEquals("read", perms.getVal("permissions[1]/name")); @@ -505,7 +767,7 @@ public void testEditRules() throws IOException { assertTrue(captureErrors(perms.parsedCommands).isEmpty()); assertEquals("y", perms.getVal("permissions[1]/collection")); - List> permList = (List>)perms.getVal("permissions"); + List> permList = (List>) perms.getVal("permissions"); assertEquals(2, permList.size()); assertEquals("config-edit", perms.getVal("permissions[0]/name")); assertEquals(1, perms.getVal("permissions[0]/index")); @@ -520,7 +782,7 @@ public void testEditRules() throws IOException { perms.runCmd("{delete-permission : 1}", true); assertTrue(captureErrors(perms.parsedCommands).isEmpty()); - permList = (List>)perms.getVal("permissions"); + permList = (List>) perms.getVal("permissions"); assertEquals(1, permList.size()); // indexes should have been re-ordered after the delete, so now "read" has index==1 assertEquals("read", perms.getVal("permissions[0]/name")); @@ -528,28 +790,29 @@ public void testEditRules() throws IOException { // delete last remaining perms.runCmd("{delete-permission : 1}", true); assertTrue(captureErrors(perms.parsedCommands).isEmpty()); - permList = (List>)perms.getVal("permissions"); + permList = (List>) perms.getVal("permissions"); assertEquals(0, permList.size()); } static class Perms { - Map conf = new HashMap<>(); + Map conf = new HashMap<>(); ConfigEditablePlugin plugin = new RuleBasedAuthorizationPlugin(); List parsedCommands; public void runCmd(String cmds, boolean failOnError) throws IOException { parsedCommands = CommandOperation.parse(new StringReader(cmds)); Map edited = plugin.edit(conf, parsedCommands); - if(edited!= null) conf = edited; - List> maps = captureErrors(parsedCommands); - if(failOnError){ - assertTrue("unexpected error ,"+maps , maps.isEmpty()); + if (edited != null) conf = edited; + List> maps = captureErrors(parsedCommands); + if (failOnError) { + assertTrue("unexpected error ," + maps, maps.isEmpty()); } else { assertFalse("expected error", maps.isEmpty()); } } - public Object getVal(String path){ - return getObjectByPath(conf,false, path); + + public Object getVal(String path) { + return getObjectByPath(conf, false, path); } } @@ -588,7 +851,7 @@ public String getUserName() { } protected abstract static class MockAuthorizationContext extends AuthorizationContext { - private final Map values; + private final Map values; public MockAuthorizationContext(Map values) { this.values = values; @@ -597,7 +860,7 @@ public MockAuthorizationContext(Map values) { @Override public SolrParams getParams() { SolrParams params = (SolrParams) values.get("params"); - return params == null ? new MapSolrParams(new HashMap<>()) : params; + return params == null ? new MapSolrParams(new HashMap<>()) : params; } @Override @@ -630,7 +893,7 @@ public String getRemoteHost() { public List getCollectionRequests() { Object collectionRequests = values.get("collectionRequests"); if (collectionRequests instanceof String) { - return singletonList(new CollectionRequest((String)collectionRequests)); + return singletonList(new CollectionRequest((String) collectionRequests)); } return (List) collectionRequests; } @@ -648,7 +911,9 @@ public String getHttpMethod() { @Override public Object getHandler() { Object handler = values.get("handler"); - return handler instanceof String ? (PermissionNameProvider) request -> PermissionNameProvider.Name.get((String) handler) : handler; + return handler instanceof String + ? (PermissionNameProvider) request -> PermissionNameProvider.Name.get((String) handler) + : handler; } @Override diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java index 0168a1c6026..d8214d690d8 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java @@ -16,6 +16,10 @@ */ package org.apache.solr.security; +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Collections.singletonMap; + +import com.codahale.metrics.MetricRegistry; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -28,8 +32,6 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.TimeUnit; - -import com.codahale.metrics.MetricRegistry; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; @@ -70,9 +72,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.nio.charset.StandardCharsets.UTF_8; -import static java.util.Collections.singletonMap; - public class BasicAuthIntegrationTest extends SolrCloudAuthTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -81,29 +80,26 @@ public class BasicAuthIntegrationTest extends SolrCloudAuthTestCase { @Before public void setupCluster() throws Exception { - configureCluster(3) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(3).addConfig("conf", configset("cloud-minimal")).configure(); + + CollectionAdminRequest.createCollection(COLLECTION, "conf", 3, 1) + .process(cluster.getSolrClient()); - CollectionAdminRequest.createCollection(COLLECTION, "conf", 3, 1).process(cluster.getSolrClient()); - cluster.waitForActiveCollection(COLLECTION, 3, 3); } - + @After public void tearDownCluster() throws Exception { shutdownCluster(); } @Test - //commented 9-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018 - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 @LogLevel("org.apache.solr.security=DEBUG") public void testBasicAuth() throws Exception { boolean isUseV2Api = random().nextBoolean(); String authcPrefix = "/admin/authentication"; String authzPrefix = "/admin/authorization"; - if(isUseV2Api){ + if (isUseV2Api) { authcPrefix = "/____v2/cluster/security/authentication"; authzPrefix = "/____v2/cluster/security/authorization"; } @@ -117,51 +113,62 @@ public void testBasicAuth() throws Exception { String baseUrl = randomJetty.getBaseUrl().toString(); verifySecurityStatus(cl, baseUrl + authcPrefix, "/errorMessages", null, 20); zkClient().setData("/security.json", STD_CONF.replaceAll("'", "\"").getBytes(UTF_8), true); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20); + verifySecurityStatus( + cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20); randomJetty.stop(); - + cluster.waitForJettyToStop(randomJetty); - + randomJetty.start(); - + cluster.waitForAllNodes(30); - + cluster.waitForActiveCollection(COLLECTION, 3, 3); - + baseUrl = randomJetty.getBaseUrl().toString(); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20); + verifySecurityStatus( + cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20); assertNumberOfMetrics(16); // Basic auth metrics available assertAuthMetricsMinimums(1, 0, 1, 0, 0, 0); assertPkiAuthMetricsMinimums(0, 0, 0, 0, 0, 0); - - String command = "{\n" + - "'set-user': {'harry':'HarryIsCool'}\n" + - "}"; + + String command = "{\n" + "'set-user': {'harry':'HarryIsCool'}\n" + "}"; final SolrRequest genericReq; if (isUseV2Api) { - genericReq = new V2Request.Builder("/cluster/security/authentication").withMethod(SolrRequest.METHOD.POST).build(); + genericReq = + new V2Request.Builder("/cluster/security/authentication") + .withMethod(SolrRequest.METHOD.POST) + .build(); } else { - GenericSolrRequest genericSolrRequest = new GenericSolrRequest(SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams()); - genericSolrRequest.setContentWriter(new StringPayloadContentWriter(command, CommonParams.JSON_MIME)); + GenericSolrRequest genericSolrRequest = + new GenericSolrRequest( + SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams()); + genericSolrRequest.setContentWriter( + new StringPayloadContentWriter(command, CommonParams.JSON_MIME)); genericReq = genericSolrRequest; } // avoid bad connection races due to shutdown cluster.getSolrClient().getHttpClient().getConnectionManager().closeExpiredConnections(); - cluster.getSolrClient().getHttpClient().getConnectionManager().closeIdleConnections(1, TimeUnit.MILLISECONDS); - - BaseHttpSolrClient.RemoteSolrException exp = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - cluster.getSolrClient().request(genericReq); - }); + cluster + .getSolrClient() + .getHttpClient() + .getConnectionManager() + .closeIdleConnections(1, TimeUnit.MILLISECONDS); + + BaseHttpSolrClient.RemoteSolrException exp = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + cluster.getSolrClient().request(genericReq); + }); assertEquals(401, exp.code()); assertAuthMetricsMinimums(2, 0, 2, 0, 0, 0); assertPkiAuthMetricsMinimums(0, 0, 0, 0, 0, 0); - - command = "{\n" + - "'set-user': {'harry':'HarryIsUberCool'}\n" + - "}"; + + command = "{\n" + "'set-user': {'harry':'HarryIsUberCool'}\n" + "}"; HttpPost httpPost = new HttpPost(baseUrl + authcPrefix); setAuthorizationHeader(httpPost, makeBasicAuthHeader("solr", "SolrRocks")); @@ -177,101 +184,153 @@ public void testBasicAuth() throws Exception { baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/credentials/harry", NOT_NULL_PREDICATE, 20); - command = "{\n" + - "'set-user-role': {'harry':'admin'}\n" + - "}"; + verifySecurityStatus( + cl, baseUrl + authcPrefix, "authentication/credentials/harry", NOT_NULL_PREDICATE, 20); + command = "{\n" + "'set-user-role': {'harry':'admin'}\n" + "}"; - executeCommand(baseUrl + authzPrefix, cl,command, "solr", "SolrRocks"); + executeCommand(baseUrl + authzPrefix, cl, command, "solr", "SolrRocks"); assertAuthMetricsMinimums(5, 2, 3, 0, 0, 0); baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString(); - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/user-role/harry", NOT_NULL_PREDICATE, 20); - - executeCommand(baseUrl + authzPrefix, cl, Utils.toJSONString(singletonMap("set-permission", Map.of - ("collection", "x", - "path", "/update/*", - "role", "dev"))), "harry", "HarryIsUberCool" ); - - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[1]/collection", "x", 20); + verifySecurityStatus( + cl, baseUrl + authzPrefix, "authorization/user-role/harry", NOT_NULL_PREDICATE, 20); + + executeCommand( + baseUrl + authzPrefix, + cl, + Utils.toJSONString( + singletonMap( + "set-permission", Map.of("collection", "x", "path", "/update/*", "role", "dev"))), + "harry", + "HarryIsUberCool"); + + verifySecurityStatus( + cl, baseUrl + authzPrefix, "authorization/permissions[1]/collection", "x", 20); assertAuthMetricsMinimums(8, 3, 5, 0, 0, 0); - executeCommand(baseUrl + authzPrefix, cl,Utils.toJSONString(singletonMap("set-permission", Map.of - ("name", "collection-admin-edit", "role", "admin"))), "harry", "HarryIsUberCool" ); - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[2]/name", "collection-admin-edit", 20); + executeCommand( + baseUrl + authzPrefix, + cl, + Utils.toJSONString( + singletonMap( + "set-permission", Map.of("name", "collection-admin-edit", "role", "admin"))), + "harry", + "HarryIsUberCool"); + verifySecurityStatus( + cl, + baseUrl + authzPrefix, + "authorization/permissions[2]/name", + "collection-admin-edit", + 20); assertAuthMetricsMinimums(10, 4, 6, 0, 0, 0); CollectionAdminRequest.Reload reload = CollectionAdminRequest.reloadCollection(COLLECTION); try (HttpSolrClient solrClient = getHttpSolrClient(baseUrl)) { - expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> solrClient.request(reload)); + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, () -> solrClient.request(reload)); reload.setMethod(SolrRequest.METHOD.POST); - expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> solrClient.request(reload)); + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, () -> solrClient.request(reload)); } - cluster.getSolrClient().request(CollectionAdminRequest.reloadCollection(COLLECTION) - .setBasicAuthCredentials("harry", "HarryIsUberCool")); - - expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - cluster.getSolrClient().request(CollectionAdminRequest.reloadCollection(COLLECTION) - .setBasicAuthCredentials("harry", "Cool12345")); - }); + cluster + .getSolrClient() + .request( + CollectionAdminRequest.reloadCollection(COLLECTION) + .setBasicAuthCredentials("harry", "HarryIsUberCool")); + + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + cluster + .getSolrClient() + .request( + CollectionAdminRequest.reloadCollection(COLLECTION) + .setBasicAuthCredentials("harry", "Cool12345")); + }); assertAuthMetricsMinimums(14, 5, 8, 1, 0, 0); - executeCommand(baseUrl + authzPrefix, cl,"{set-permission : { name : update , role : admin}}", "harry", "HarryIsUberCool"); + executeCommand( + baseUrl + authzPrefix, + cl, + "{set-permission : { name : update , role : admin}}", + "harry", + "HarryIsUberCool"); UpdateRequest del = new UpdateRequest().deleteByQuery("*:*"); - del.setBasicAuthCredentials("harry","HarryIsUberCool"); + del.setBasicAuthCredentials("harry", "HarryIsUberCool"); del.setCommitWithin(10); del.process(cluster.getSolrClient(), COLLECTION); - //Test for SOLR-12514. Create a new jetty . This jetty does not have the collection. - //Make a request to that jetty and it should fail + // Test for SOLR-12514. Create a new jetty . This jetty does not have the collection. + // Make a request to that jetty and it should fail JettySolrRunner aNewJetty = cluster.startJettySolrRunner(); SolrClient aNewClient = aNewJetty.newClient(); UpdateRequest delQuery = null; delQuery = new UpdateRequest().deleteByQuery("*:*"); - delQuery.setBasicAuthCredentials("harry","HarryIsUberCool"); - delQuery.process(aNewClient, COLLECTION);//this should succeed + delQuery.setBasicAuthCredentials("harry", "HarryIsUberCool"); + delQuery.process(aNewClient, COLLECTION); // this should succeed try { - BaseHttpSolrClient.RemoteSolrException e = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - new UpdateRequest().deleteByQuery("*:*").process(aNewClient, COLLECTION); - }); + BaseHttpSolrClient.RemoteSolrException e = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + new UpdateRequest().deleteByQuery("*:*").process(aNewClient, COLLECTION); + }); assertTrue(e.getMessage(), e.getMessage().contains("Authentication failed")); } finally { aNewClient.close(); cluster.stopJettySolrRunner(aNewJetty); } - addDocument("harry","HarryIsUberCool","id", "4"); - - executeCommand(baseUrl + authcPrefix, cl, "{set-property : { blockUnknown: true}}", "harry", "HarryIsUberCool"); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/blockUnknown", "true", 20, "harry", "HarryIsUberCool"); + addDocument("harry", "HarryIsUberCool", "id", "4"); + + executeCommand( + baseUrl + authcPrefix, + cl, + "{set-property : { blockUnknown: true}}", + "harry", + "HarryIsUberCool"); + verifySecurityStatus( + cl, + baseUrl + authcPrefix, + "authentication/blockUnknown", + "true", + 20, + "harry", + "HarryIsUberCool"); verifySecurityStatus(cl, baseUrl + "/admin/info/key", "key", NOT_NULL_PREDICATE, 20); assertAuthMetricsMinimums(17, 8, 8, 1, 0, 0); - String[] toolArgs = new String[]{ - "status", "-solr", baseUrl}; + String[] toolArgs = new String[] {"status", "-solr", baseUrl}; ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name()); SolrCLI.StatusTool tool = new SolrCLI.StatusTool(stdoutSim); try { System.setProperty("basicauth", "harry:HarryIsUberCool"); - tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); Map obj = (Map) Utils.fromJSON(new ByteArrayInputStream(baos.toByteArray())); assertTrue(obj.containsKey("version")); assertTrue(obj.containsKey("startTime")); assertTrue(obj.containsKey("uptime")); assertTrue(obj.containsKey("memory")); } catch (Exception e) { - log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}" - , e, baos.toString(StandardCharsets.UTF_8.name())); // nowarn + log.error( + "RunExampleTool failed due to: {}; stdout from tool prior to failure: {}", + e, + baos.toString(StandardCharsets.UTF_8.name())); // nowarn } SolrParams params = new MapSolrParams(Collections.singletonMap("q", "*:*")); // Query that fails due to missing credentials - exp = expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> { - cluster.getSolrClient().query(COLLECTION, params); - }); + exp = + expectThrows( + BaseHttpSolrClient.RemoteSolrException.class, + () -> { + cluster.getSolrClient().query(COLLECTION, params); + }); assertEquals(401, exp.code()); assertAuthMetricsMinimums(19, 8, 8, 1, 2, 0); assertPkiAuthMetricsMinimums(3, 3, 0, 0, 0, 0); @@ -280,32 +339,55 @@ public void testBasicAuth() throws Exception { GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/select", params); req.setBasicAuthCredentials("harry", "HarryIsUberCool"); cluster.getSolrClient().request(req, COLLECTION); - + assertAuthMetricsMinimums(20, 8, 8, 1, 2, 0); assertPkiAuthMetricsMinimums(10, 10, 0, 0, 0, 0); - addDocument("harry","HarryIsUberCool","id", "5"); + addDocument("harry", "HarryIsUberCool", "id", "5"); assertAuthMetricsMinimums(23, 11, 9, 1, 2, 0); assertPkiAuthMetricsMinimums(14, 14, 0, 0, 0, 0); - // Reindex collection depends on streaming request that needs to authenticate against new collection - CollectionAdminRequest.ReindexCollection reindexReq = CollectionAdminRequest.reindexCollection(COLLECTION); + // Reindex collection depends on streaming request that needs to authenticate against new + // collection + CollectionAdminRequest.ReindexCollection reindexReq = + CollectionAdminRequest.reindexCollection(COLLECTION); reindexReq.setBasicAuthCredentials("harry", "HarryIsUberCool"); cluster.getSolrClient().request(reindexReq, COLLECTION); assertAuthMetricsMinimums(24, 12, 9, 1, 2, 0); assertPkiAuthMetricsMinimums(15, 15, 0, 0, 0, 0); // Validate forwardCredentials - assertEquals(1, executeQuery(params("q", "id:5"), "harry", "HarryIsUberCool").getResults().getNumFound()); + assertEquals( + 1, + executeQuery(params("q", "id:5"), "harry", "HarryIsUberCool").getResults().getNumFound()); assertAuthMetricsMinimums(25, 13, 9, 1, 2, 0); assertPkiAuthMetricsMinimums(19, 19, 0, 0, 0, 0); - executeCommand(baseUrl + authcPrefix, cl, "{set-property : { forwardCredentials: true}}", "harry", "HarryIsUberCool"); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/forwardCredentials", "true", 20, "harry", "HarryIsUberCool"); - assertEquals(1, executeQuery(params("q", "id:5"), "harry", "HarryIsUberCool").getResults().getNumFound()); + executeCommand( + baseUrl + authcPrefix, + cl, + "{set-property : { forwardCredentials: true}}", + "harry", + "HarryIsUberCool"); + verifySecurityStatus( + cl, + baseUrl + authcPrefix, + "authentication/forwardCredentials", + "true", + 20, + "harry", + "HarryIsUberCool"); + assertEquals( + 1, + executeQuery(params("q", "id:5"), "harry", "HarryIsUberCool").getResults().getNumFound()); assertAuthMetricsMinimums(32, 20, 9, 1, 2, 0); assertPkiAuthMetricsMinimums(19, 19, 0, 0, 0, 0); - - executeCommand(baseUrl + authcPrefix, cl, "{set-property : { blockUnknown: false}}", "harry", "HarryIsUberCool"); + + executeCommand( + baseUrl + authcPrefix, + cl, + "{set-property : { blockUnknown: false}}", + "harry", + "HarryIsUberCool"); } finally { if (cl != null) { HttpClientUtil.close(cl); @@ -314,13 +396,19 @@ public void testBasicAuth() throws Exception { } private void assertNumberOfMetrics(int num) { - MetricRegistry registry0 = cluster.getJettySolrRunner(0).getCoreContainer().getMetricManager().registry("solr.node"); + MetricRegistry registry0 = + cluster.getJettySolrRunner(0).getCoreContainer().getMetricManager().registry("solr.node"); assertNotNull(registry0); - assertEquals(num, registry0.getMetrics().entrySet().stream().filter(e -> e.getKey().startsWith("SECURITY")).count()); + assertEquals( + num, + registry0.getMetrics().entrySet().stream() + .filter(e -> e.getKey().startsWith("SECURITY")) + .count()); } - private QueryResponse executeQuery(ModifiableSolrParams params, String user, String pass) throws IOException, SolrServerException { + private QueryResponse executeQuery(ModifiableSolrParams params, String user, String pass) + throws IOException, SolrServerException { QueryRequest req = new QueryRequest(params); req.setBasicAuthCredentials(user, pass); QueryResponse resp = req.process(cluster.getSolrClient(), COLLECTION); @@ -329,7 +417,8 @@ private QueryResponse executeQuery(ModifiableSolrParams params, String user, Str return resp; } - private void addDocument(String user, String pass, String... fields) throws IOException, SolrServerException { + private void addDocument(String user, String pass, String... fields) + throws IOException, SolrServerException { SolrInputDocument doc = new SolrInputDocument(); boolean isKey = true; String key = null; @@ -348,16 +437,16 @@ private void addDocument(String user, String pass, String... fields) throws IOEx update.commit(cluster.getSolrClient(), COLLECTION); } - public static void executeCommand(String url, HttpClient cl, String payload, - String user, String pwd) throws Exception { + public static void executeCommand( + String url, HttpClient cl, String payload, String user, String pwd) throws Exception { // HACK: work around for SOLR-13464... // // note the authz/authn objects in use on each node before executing the command, // then wait until we see new objects on every node *after* executing the command // before returning... - final Set> initialPlugins - = getAuthPluginsInUseForCluster(url).entrySet(); + final Set> initialPlugins = + getAuthPluginsInUseForCluster(url).entrySet(); HttpPost httpPost; HttpResponse r; @@ -367,19 +456,21 @@ public static void executeCommand(String url, HttpClient cl, String payload, httpPost.addHeader("Content-Type", "application/json; charset=UTF-8"); r = cl.execute(httpPost); String response = IOUtils.toString(r.getEntity().getContent(), StandardCharsets.UTF_8); - assertEquals("Non-200 response code. Response was " + response, 200, r.getStatusLine().getStatusCode()); + assertEquals( + "Non-200 response code. Response was " + response, 200, r.getStatusLine().getStatusCode()); assertFalse("Response contained errors: " + response, response.contains("errorMessages")); Utils.consumeFully(r.getEntity()); // HACK (continued)... final TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); - timeout.waitFor("core containers never fully updated their auth plugins", - () -> { - final Set> tmpSet - = getAuthPluginsInUseForCluster(url).entrySet(); - tmpSet.retainAll(initialPlugins); - return tmpSet.isEmpty(); - }); + timeout.waitFor( + "core containers never fully updated their auth plugins", + () -> { + final Set> tmpSet = + getAuthPluginsInUseForCluster(url).entrySet(); + tmpSet.retainAll(initialPlugins); + return tmpSet.isEmpty(); + }); } public static Replica getRandomReplica(DocCollection coll, Random random) { @@ -392,15 +483,16 @@ public static Replica getRandomReplica(DocCollection coll, Random random) { return l.isEmpty() ? null : l.get(0); } - //the password is 'SolrRocks' - //this could be generated everytime. But , then we will not know if there is any regression - protected static final String STD_CONF = "{\n" + - " 'authentication':{\n" + - " 'blockUnknown':'false',\n" + - " 'class':'solr.BasicAuthPlugin',\n" + - " 'credentials':{'solr':'orwp2Ghgj39lmnrZOTm7Qtre1VqHFDfwAEzr0ApbN3Y= Ju5osoAqOX8iafhWpPP01E5P+sg8tK8tHON7rCYZRRw='}},\n" + - " 'authorization':{\n" + - " 'class':'solr.RuleBasedAuthorizationPlugin',\n" + - " 'user-role':{'solr':'admin'},\n" + - " 'permissions':[{'name':'security-edit','role':'admin'}]}}"; + // the password is 'SolrRocks' + // this could be generated everytime. But , then we will not know if there is any regression + protected static final String STD_CONF = + "{\n" + + " 'authentication':{\n" + + " 'blockUnknown':'false',\n" + + " 'class':'solr.BasicAuthPlugin',\n" + + " 'credentials':{'solr':'orwp2Ghgj39lmnrZOTm7Qtre1VqHFDfwAEzr0ApbN3Y= Ju5osoAqOX8iafhWpPP01E5P+sg8tK8tHON7rCYZRRw='}},\n" + + " 'authorization':{\n" + + " 'class':'solr.RuleBasedAuthorizationPlugin',\n" + + " 'user-role':{'solr':'admin'},\n" + + " 'permissions':[{'name':'security-edit','role':'admin'}]}}"; } diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java index dfff0581513..fde29f411cb 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthOnSingleNodeTest.java @@ -18,7 +18,6 @@ package org.apache.solr.security; import java.lang.invoke.MethodHandles; - import org.apache.solr.client.solrj.impl.Http2SolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.QueryRequest; @@ -55,33 +54,41 @@ public void tearDown() throws Exception { @Test public void basicTest() throws Exception { - try (Http2SolrClient client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) - .build()){ + try (Http2SolrClient client = + new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) + .build()) { - // SOLR-13510, this will be failed if the listener (handling inject credential in header) is called in another - // thread since SolrRequestInfo will return null in that case. + // SOLR-13510, this will be failed if the listener (handling inject credential in header) is + // called in another thread since SolrRequestInfo will return null in that case. for (int i = 0; i < 30; i++) { - assertNotNull(new QueryRequest(params("q", "*:*")) - .setBasicAuthCredentials("solr", "solr").process(client, COLLECTION)); + assertNotNull( + new QueryRequest(params("q", "*:*")) + .setBasicAuthCredentials("solr", "solr") + .process(client, COLLECTION)); } } } @Test public void testDeleteSecurityJsonZnode() throws Exception { - try (Http2SolrClient client = new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) - .build()){ + try (Http2SolrClient client = + new Http2SolrClient.Builder(cluster.getJettySolrRunner(0).getBaseUrl().toString()) + .build()) { try { new QueryRequest(params("q", "*:*")).process(client, COLLECTION); fail("Should throw exception due to authentication needed"); - } catch (Exception e) { /* Ignore */ } + } catch (Exception e) { + /* Ignore */ + } - // Deleting security.json will disable security - before SOLR-9679 it would instead cause an exception + // Deleting security.json will disable security - before SOLR-9679 it would instead cause an + // exception cluster.getZkClient().delete("/security.json", -1, false); int count = 0; boolean done = false; - // Assert that security is turned off. This is async, so we retry up to 5s before failing the test + // Assert that security is turned off. This is async, so we retry up to 5s before failing the + // test while (!done) { try { Thread.sleep(500); @@ -97,22 +104,21 @@ public void testDeleteSecurityJsonZnode() throws Exception { } } - protected static final String STD_CONF = "{\n" + - " \"authentication\":{\n" + - " \"blockUnknown\": true,\n" + - " \"class\":\"solr.BasicAuthPlugin\",\n" + - " \"credentials\":{\"solr\":\"EEKn7ywYk5jY8vG9TyqlG2jvYuvh1Q7kCCor6Hqm320= 6zkmjMjkMKyJX6/f0VarEWQujju5BzxZXub6WOrEKCw=\"}\n" + - " },\n" + - " \"authorization\":{\n" + - " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + - " \"permissions\":[\n" + - " {\"name\":\"security-edit\", \"role\":\"admin\"},\n" + - " {\"name\":\"collection-admin-edit\", \"role\":\"admin\"},\n" + - " {\"name\":\"core-admin-edit\", \"role\":\"admin\"}\n" + - " ],\n" + - " \"user-role\":{\"solr\":\"admin\"}\n" + - " }\n" + - "}"; + protected static final String STD_CONF = + "{\n" + + " \"authentication\":{\n" + + " \"blockUnknown\": true,\n" + + " \"class\":\"solr.BasicAuthPlugin\",\n" + + " \"credentials\":{\"solr\":\"EEKn7ywYk5jY8vG9TyqlG2jvYuvh1Q7kCCor6Hqm320= 6zkmjMjkMKyJX6/f0VarEWQujju5BzxZXub6WOrEKCw=\"}\n" + + " },\n" + + " \"authorization\":{\n" + + " \"class\":\"solr.RuleBasedAuthorizationPlugin\",\n" + + " \"permissions\":[\n" + + " {\"name\":\"security-edit\", \"role\":\"admin\"},\n" + + " {\"name\":\"collection-admin-edit\", \"role\":\"admin\"},\n" + + " {\"name\":\"core-admin-edit\", \"role\":\"admin\"}\n" + + " ],\n" + + " \"user-role\":{\"solr\":\"admin\"}\n" + + " }\n" + + "}"; } - - diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java index d70725b4d47..f5aea418af7 100644 --- a/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java +++ b/solr/core/src/test/org/apache/solr/security/BasicAuthStandaloneTest.java @@ -16,6 +16,11 @@ */ package org.apache.solr.security; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.solr.cloud.SolrCloudAuthTestCase.NOT_NULL_PREDICATE; +import static org.apache.solr.security.BasicAuthIntegrationTest.STD_CONF; +import static org.apache.solr.security.BasicAuthIntegrationTest.verifySecurityStatus; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.nio.charset.Charset; @@ -25,7 +30,6 @@ import java.util.Base64; import java.util.Collections; import java.util.Properties; - import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; @@ -47,25 +51,20 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.apache.solr.cloud.SolrCloudAuthTestCase.NOT_NULL_PREDICATE; -import static org.apache.solr.security.BasicAuthIntegrationTest.STD_CONF; -import static org.apache.solr.security.BasicAuthIntegrationTest.verifySecurityStatus; - public class BasicAuthStandaloneTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final Path ROOT_DIR = Paths.get(TEST_HOME()); - private static final Path CONF_DIR = ROOT_DIR.resolve("configsets").resolve("configset-2").resolve("conf"); + private static final Path CONF_DIR = + ROOT_DIR.resolve("configsets").resolve("configset-2").resolve("conf"); SecurityConfHandlerLocalForTesting securityConfHandler; SolrInstance instance = null; JettySolrRunner jetty; - + @Before @Override - public void setUp() throws Exception - { + public void setUp() throws Exception { super.setUp(); instance = new SolrInstance("inst", null); instance.setUp(); @@ -94,49 +93,56 @@ public void testBasicAuth() throws Exception { HttpSolrClient httpSolrClient = null; try { cl = HttpClientUtil.createClient(null); - String baseUrl = buildUrl(jetty.getLocalPort(), "/solr"); + String baseUrl = buildUrl(jetty.getLocalPort(), "/solr"); httpSolrClient = getHttpSolrClient(baseUrl); - + verifySecurityStatus(cl, baseUrl + authcPrefix, "/errorMessages", null, 20); // Write security.json locally. Should cause security to be initialized - securityConfHandler.persistConf(new SecurityConfHandler.SecurityConfig() - .setData(Utils.fromJSONString(STD_CONF.replaceAll("'", "\"")))); + securityConfHandler.persistConf( + new SecurityConfHandler.SecurityConfig() + .setData(Utils.fromJSONString(STD_CONF.replaceAll("'", "\"")))); securityConfHandler.securityConfEdited(); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20); + verifySecurityStatus( + cl, baseUrl + authcPrefix, "authentication/class", "solr.BasicAuthPlugin", 20); - String command = "{\n" + - "'set-user': {'harry':'HarryIsCool'}\n" + - "}"; + String command = "{\n" + "'set-user': {'harry':'HarryIsCool'}\n" + "}"; doHttpPost(cl, baseUrl + authcPrefix, command, null, null, 401); verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication.enabled", "true", 20); - command = "{\n" + - "'set-user': {'harry':'HarryIsUberCool'}\n" + - "}"; - + command = "{\n" + "'set-user': {'harry':'HarryIsUberCool'}\n" + "}"; doHttpPost(cl, baseUrl + authcPrefix, command, "solr", "SolrRocks"); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/credentials/harry", NOT_NULL_PREDICATE, 20); + verifySecurityStatus( + cl, baseUrl + authcPrefix, "authentication/credentials/harry", NOT_NULL_PREDICATE, 20); // Read file from SOLR_HOME and verify that it contains our new user - assertTrue(new String(Utils.toJSON(securityConfHandler.getSecurityConfig(false).getData()), - Charset.forName("UTF-8")).contains("harry")); + assertTrue( + new String( + Utils.toJSON(securityConfHandler.getSecurityConfig(false).getData()), + Charset.forName("UTF-8")) + .contains("harry")); // Edit authorization - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[1]/role", null, 20); - doHttpPost(cl, baseUrl + authzPrefix, "{'set-permission': {'name': 'update', 'role':'updaterole'}}", "solr", "SolrRocks"); - command = "{\n" + - "'set-permission': {'name': 'read', 'role':'solr'}\n" + - "}"; + verifySecurityStatus( + cl, baseUrl + authzPrefix, "authorization/permissions[1]/role", null, 20); + doHttpPost( + cl, + baseUrl + authzPrefix, + "{'set-permission': {'name': 'update', 'role':'updaterole'}}", + "solr", + "SolrRocks"); + command = "{\n" + "'set-permission': {'name': 'read', 'role':'solr'}\n" + "}"; doHttpPost(cl, baseUrl + authzPrefix, command, "solr", "SolrRocks"); try { - httpSolrClient.query("collection1", new MapSolrParams(Collections.singletonMap("q", "foo"))); + httpSolrClient.query( + "collection1", new MapSolrParams(Collections.singletonMap("q", "foo"))); fail("Should return a 401 response"); } catch (Exception e) { // Test that the second doPost request to /security/authorization went through - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[2]/role", "solr", 20); + verifySecurityStatus( + cl, baseUrl + authzPrefix, "authorization/permissions[2]/role", "solr", 20); } } finally { if (cl != null) { @@ -146,15 +152,27 @@ public void testBasicAuth() throws Exception { } } - static void doHttpPost(HttpClient cl, String url, String jsonCommand, String basicUser, String basicPass) throws IOException { + static void doHttpPost( + HttpClient cl, String url, String jsonCommand, String basicUser, String basicPass) + throws IOException { doHttpPost(cl, url, jsonCommand, basicUser, basicPass, 200); } - static void doHttpPost(HttpClient cl, String url, String jsonCommand, String basicUser, String basicPass, int expectStatusCode) throws IOException { - doHttpPostWithHeader(cl, url, jsonCommand, getBasicAuthHeader(basicUser, basicPass), expectStatusCode); + static void doHttpPost( + HttpClient cl, + String url, + String jsonCommand, + String basicUser, + String basicPass, + int expectStatusCode) + throws IOException { + doHttpPostWithHeader( + cl, url, jsonCommand, getBasicAuthHeader(basicUser, basicPass), expectStatusCode); } - static void doHttpPostWithHeader(HttpClient cl, String url, String jsonCommand, Header header, int expectStatusCode) throws IOException { + static void doHttpPostWithHeader( + HttpClient cl, String url, String jsonCommand, Header header, int expectStatusCode) + throws IOException { HttpPost httpPost = new HttpPost(url); httpPost.setHeader(header); httpPost.setEntity(new ByteArrayEntity(jsonCommand.replaceAll("'", "\"").getBytes(UTF_8))); @@ -182,22 +200,23 @@ public static void setBasicAuthHeader(AbstractHttpMessage httpMsg, String user, static JettySolrRunner createAndStartJetty(SolrInstance instance) throws Exception { Properties nodeProperties = new Properties(); nodeProperties.setProperty("solr.data.dir", instance.getDataDir().toString()); - JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir().toString(), nodeProperties, buildJettyConfig("/solr")); + JettySolrRunner jetty = + new JettySolrRunner( + instance.getHomeDir().toString(), nodeProperties, buildJettyConfig("/solr")); jetty.start(); return jetty; } - - + static class SolrInstance { String name; Integer port; Path homeDir; Path confDir; Path dataDir; - + /** - * if leaderPort is null, this instance is a leader -- otherwise this instance is a follower, and assumes the leader is - * on localhost at the specified port. + * if leaderPort is null, this instance is a leader -- otherwise this instance is a follower, + * and assumes the leader is on localhost at the specified port. */ public SolrInstance(String name, Integer port) { this.name = name; @@ -228,7 +247,6 @@ public Path getSolrXmlFile() { return ROOT_DIR.resolve("solr.xml"); } - public void setUp() throws Exception { homeDir = createTempDir(name).toAbsolutePath(); dataDir = homeDir.resolve("collection1").resolve("data"); @@ -244,6 +262,5 @@ public void setUp() throws Exception { Files.createFile(homeDir.resolve("collection1").resolve("core.properties")); } - } } diff --git a/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java b/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java index cc6ad98c0e6..09c881c4490 100644 --- a/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/CallbackAuditLoggerPlugin.java @@ -22,27 +22,28 @@ import java.lang.invoke.MethodHandles; import java.net.Socket; import java.nio.charset.StandardCharsets; -import java.util.concurrent.Semaphore; import java.util.HashMap; import java.util.Map; - +import java.util.concurrent.Semaphore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** - * Special test-only audit logger which will send the path (e.g. /select) as a callback to the running test + * Special test-only audit logger which will send the path (e.g. /select) as a callback to the + * running test */ public class CallbackAuditLoggerPlugin extends AuditLoggerPlugin { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - public static final Map BLOCKING_SEMAPHORES = new HashMap<>(); - + public static final Map BLOCKING_SEMAPHORES = new HashMap<>(); + private int callbackPort; private Socket socket; private PrintWriter out; private Semaphore semaphore = null; - + /** * Opens a socket to send a callback, e.g. to a running test client + * * @param event the audit event */ @Override @@ -52,16 +53,18 @@ public void audit(AuditEvent event) { try { semaphore.acquire(); } catch (InterruptedException e) { - log.warn("audit() interrupted while waiting for ticket, probably due to shutdown, aborting"); + log.warn( + "audit() interrupted while waiting for ticket, probably due to shutdown, aborting"); return; } } out.write(formatter.formatEvent(event) + "\n"); - if (! out.checkError()) { + if (!out.checkError()) { log.error("Output stream has an ERROR!"); } if (log.isInfoEnabled()) { - log.info("Sent audit callback {} to localhost:{}", formatter.formatEvent(event), callbackPort); + log.info( + "Sent audit callback {} to localhost:{}", formatter.formatEvent(event), callbackPort); } } @@ -73,19 +76,22 @@ public void init(Map pluginConfig) { if (null != semaphoreName) { semaphore = BLOCKING_SEMAPHORES.get(semaphoreName); if (null == semaphore) { - throw new RuntimeException("Test did not setup semaphore of specified name: " + semaphoreName); + throw new RuntimeException( + "Test did not setup semaphore of specified name: " + semaphoreName); } } try { socket = new Socket("localhost", callbackPort); - out = new PrintWriter(new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8), true); + out = + new PrintWriter( + new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8), true); } catch (IOException e) { throw new RuntimeException(e); } } @Override - public void close() throws IOException { + public void close() throws IOException { super.close(); if (socket != null) socket.close(); } diff --git a/solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java b/solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java index fb32a217cea..a7c679265cd 100644 --- a/solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java +++ b/solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java @@ -16,18 +16,6 @@ */ package org.apache.solr.security; -import org.apache.solr.SolrTestCaseJ4; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - -import javax.security.auth.x500.X500Principal; -import javax.servlet.FilterChain; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import java.security.cert.X509Certificate; - import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; @@ -35,45 +23,56 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import java.security.cert.X509Certificate; +import javax.security.auth.x500.X500Principal; +import javax.servlet.FilterChain; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import org.apache.solr.SolrTestCaseJ4; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + public class CertAuthPluginTest extends SolrTestCaseJ4 { - private CertAuthPlugin plugin; + private CertAuthPlugin plugin; - @BeforeClass - public static void setupMockito() { - SolrTestCaseJ4.assumeWorkingMockito(); - } + @BeforeClass + public static void setupMockito() { + SolrTestCaseJ4.assumeWorkingMockito(); + } - @Before - public void setUp() throws Exception { - super.setUp(); - plugin = new CertAuthPlugin(); - } + @Before + public void setUp() throws Exception { + super.setUp(); + plugin = new CertAuthPlugin(); + } - @Test - public void testAuthenticateOk() throws Exception { - X500Principal principal = new X500Principal("CN=NAME"); - X509Certificate certificate = mock(X509Certificate.class); - HttpServletRequest request = mock(HttpServletRequest.class); + @Test + public void testAuthenticateOk() throws Exception { + X500Principal principal = new X500Principal("CN=NAME"); + X509Certificate certificate = mock(X509Certificate.class); + HttpServletRequest request = mock(HttpServletRequest.class); - when(certificate.getSubjectX500Principal()).thenReturn(principal); - when(request.getAttribute(any())).thenReturn(new X509Certificate[] { certificate }); + when(certificate.getSubjectX500Principal()).thenReturn(principal); + when(request.getAttribute(any())).thenReturn(new X509Certificate[] {certificate}); - FilterChain chain = (req, rsp) -> assertEquals(principal, ((HttpServletRequest) req).getUserPrincipal()); - assertTrue(plugin.doAuthenticate(request, null, chain)); + FilterChain chain = + (req, rsp) -> assertEquals(principal, ((HttpServletRequest) req).getUserPrincipal()); + assertTrue(plugin.doAuthenticate(request, null, chain)); - assertEquals(1, plugin.numAuthenticated.getCount()); - } + assertEquals(1, plugin.numAuthenticated.getCount()); + } - @Test - public void testAuthenticateMissing() throws Exception { - HttpServletRequest request = mock(HttpServletRequest.class); - when(request.getAttribute(any())).thenReturn(null); + @Test + public void testAuthenticateMissing() throws Exception { + HttpServletRequest request = mock(HttpServletRequest.class); + when(request.getAttribute(any())).thenReturn(null); - HttpServletResponse response = mock(HttpServletResponse.class); + HttpServletResponse response = mock(HttpServletResponse.class); - assertFalse(plugin.doAuthenticate(request, response, null)); - verify(response).sendError(eq(401), anyString()); + assertFalse(plugin.doAuthenticate(request, response, null)); + verify(response).sendError(eq(401), anyString()); - assertEquals(1, plugin.numMissingCredentials.getCount()); - } + assertEquals(1, plugin.numMissingCredentials.getCount()); + } } diff --git a/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java b/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java index db995a9d733..51d6d7291eb 100644 --- a/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/MockAuditLoggerPlugin.java @@ -22,17 +22,17 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MockAuditLoggerPlugin extends AuditLoggerPlugin { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public List events = new ArrayList<>(); - public Map typeCounts = new HashMap<>(); + public Map typeCounts = new HashMap<>(); /** * Audits an event to an internal list that can be inspected later by the test code + * * @param event the audit event */ @Override @@ -45,8 +45,7 @@ public void audit(AuditEvent event) { } private void incrementType(String type) { - if (!typeCounts.containsKey(type)) - typeCounts.put(type, new AtomicInteger(0)); + if (!typeCounts.containsKey(type)) typeCounts.put(type, new AtomicInteger(0)); typeCounts.get(type).incrementAndGet(); } diff --git a/solr/core/src/test/org/apache/solr/security/MockAuthenticationPlugin.java b/solr/core/src/test/org/apache/solr/security/MockAuthenticationPlugin.java index ce042cada11..03cc6e5315c 100644 --- a/solr/core/src/test/org/apache/solr/security/MockAuthenticationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/MockAuthenticationPlugin.java @@ -16,29 +16,29 @@ */ package org.apache.solr.security; +import java.io.IOException; +import java.security.Principal; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Predicate; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import java.io.IOException; -import java.security.Principal; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Predicate; - import org.apache.http.auth.BasicUserPrincipal; public class MockAuthenticationPlugin extends AuthenticationPlugin { static Predicate predicate; @Override - public void init(Map pluginConfig) { - } + public void init(Map pluginConfig) {} @Override - public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws IOException, ServletException { + public boolean doAuthenticate( + HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) + throws IOException, ServletException { String user = null; if (predicate != null) { if (predicate.test(request)) { @@ -48,16 +48,21 @@ public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse re } final AtomicBoolean requestContinues = new AtomicBoolean(false); - forward(user, request, response, (req, res) -> { - filterChain.doFilter(req, res); - requestContinues.set(true); - }); + forward( + user, + request, + response, + (req, res) -> { + filterChain.doFilter(req, res); + requestContinues.set(true); + }); return requestContinues.get(); } - protected void forward(String user, HttpServletRequest req, ServletResponse rsp, - FilterChain chain) throws IOException, ServletException { - if(user != null) { + protected void forward( + String user, HttpServletRequest req, ServletResponse rsp, FilterChain chain) + throws IOException, ServletException { + if (user != null) { final Principal p = new BasicUserPrincipal(user); req = wrapWithPrincipal(req, p); } @@ -65,7 +70,5 @@ protected void forward(String user, HttpServletRequest req, ServletResponse rsp, } @Override - public void close() throws IOException { - - } + public void close() throws IOException {} } diff --git a/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java index d58a4993f29..3fc155a6c64 100644 --- a/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/MockAuthorizationPlugin.java @@ -21,7 +21,6 @@ import java.util.HashSet; import java.util.Map; import java.util.function.Consumer; - import org.apache.solr.common.SolrException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,19 +48,14 @@ public AuthorizationResponse authorize(AuthorizationContext context) { } if (uname == null) uname = context.getParams().get("uname"); log.info("User request: {}", uname); - if (uname == null || denyUsers.contains(uname)) - return new AuthorizationResponse(403); - else - return new AuthorizationResponse(200); + if (uname == null || denyUsers.contains(uname)) return new AuthorizationResponse(403); + else return new AuthorizationResponse(200); } } @Override - public void init(Map initInfo) { - } + public void init(Map initInfo) {} @Override - public void close() throws IOException { - - } + public void close() throws IOException {} } diff --git a/solr/core/src/test/org/apache/solr/security/MultiAuthPluginTest.java b/solr/core/src/test/org/apache/solr/security/MultiAuthPluginTest.java index 0599113d81b..7d897ea621c 100644 --- a/solr/core/src/test/org/apache/solr/security/MultiAuthPluginTest.java +++ b/solr/core/src/test/org/apache/solr/security/MultiAuthPluginTest.java @@ -16,9 +16,13 @@ */ package org.apache.solr.security; -import javax.servlet.FilterChain; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import static org.apache.solr.cloud.SolrCloudAuthTestCase.NOT_NULL_PREDICATE; +import static org.apache.solr.security.BasicAuthIntegrationTest.verifySecurityStatus; +import static org.apache.solr.security.BasicAuthStandaloneTest.SolrInstance; +import static org.apache.solr.security.BasicAuthStandaloneTest.createAndStartJetty; +import static org.apache.solr.security.BasicAuthStandaloneTest.doHttpPost; +import static org.apache.solr.security.BasicAuthStandaloneTest.doHttpPostWithHeader; + import java.io.IOException; import java.io.Serializable; import java.nio.charset.StandardCharsets; @@ -27,7 +31,9 @@ import java.util.Map; import java.util.Objects; import java.util.function.Predicate; - +import javax.servlet.FilterChain; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FileUtils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; @@ -46,13 +52,6 @@ import org.junit.Before; import org.junit.Test; -import static org.apache.solr.cloud.SolrCloudAuthTestCase.NOT_NULL_PREDICATE; -import static org.apache.solr.security.BasicAuthIntegrationTest.verifySecurityStatus; -import static org.apache.solr.security.BasicAuthStandaloneTest.SolrInstance; -import static org.apache.solr.security.BasicAuthStandaloneTest.createAndStartJetty; -import static org.apache.solr.security.BasicAuthStandaloneTest.doHttpPost; -import static org.apache.solr.security.BasicAuthStandaloneTest.doHttpPostWithHeader; - public class MultiAuthPluginTest extends SolrTestCaseJ4 { private static final String authcPrefix = "/admin/authentication"; @@ -99,95 +98,137 @@ public void testMultiAuthEditAPI() throws Exception { // Initialize security.json with multiple auth plugins configured String multiAuthPluginSecurityJson = - FileUtils.readFileToString(TEST_PATH().resolve("security").resolve("multi_auth_plugin_security.json").toFile(), StandardCharsets.UTF_8); - securityConfHandler.persistConf(new SecurityConfHandler.SecurityConfig().setData(Utils.fromJSONString(multiAuthPluginSecurityJson))); + FileUtils.readFileToString( + TEST_PATH().resolve("security").resolve("multi_auth_plugin_security.json").toFile(), + StandardCharsets.UTF_8); + securityConfHandler.persistConf( + new SecurityConfHandler.SecurityConfig() + .setData(Utils.fromJSONString(multiAuthPluginSecurityJson))); securityConfHandler.securityConfEdited(); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/class", "solr.MultiAuthPlugin", 5, user, pass); - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/class", "solr.MultiAuthRuleBasedAuthorizationPlugin", 5, user, pass); + verifySecurityStatus( + cl, baseUrl + authcPrefix, "authentication/class", "solr.MultiAuthPlugin", 5, user, pass); + verifySecurityStatus( + cl, + baseUrl + authzPrefix, + "authorization/class", + "solr.MultiAuthRuleBasedAuthorizationPlugin", + 5, + user, + pass); // anonymous requests are blocked by all plugins int statusCode = doHttpGetAnonymous(cl, baseUrl + "/admin/info/system"); assertEquals("anonymous get succeeded but should not have", 401, statusCode); // update blockUnknown to allow anonymous for the basic plugin - String command = "{\n" + - "'set-property': { 'basic': {'blockUnknown':false} }\n" + - "}"; + String command = "{\n" + "'set-property': { 'basic': {'blockUnknown':false} }\n" + "}"; doHttpPost(cl, baseUrl + authcPrefix, command, user, pass, 200); statusCode = doHttpGetAnonymous(cl, baseUrl + "/admin/info/system"); assertEquals("anonymous get failed but should have succeeded", 200, statusCode); - // For the multi-auth plugin, every command is wrapped with an object that identifies the "scheme" - command = "{\n" + - "'set-user': {'harry':'HarryIsCool'}\n" + - "}"; + // For the multi-auth plugin, every command is wrapped with an object that identifies the + // "scheme" + command = "{\n" + "'set-user': {'harry':'HarryIsCool'}\n" + "}"; // no scheme identified! doHttpPost(cl, baseUrl + authcPrefix, command, user, pass, 400); - command = "{\n" + - "'set-user': { 'foo': {'harry':'HarryIsCool'} }\n" + - "}"; + command = "{\n" + "'set-user': { 'foo': {'harry':'HarryIsCool'} }\n" + "}"; // no "foo" scheme configured doHttpPost(cl, baseUrl + authcPrefix, command, user, pass, 400); - command = "{\n" + - "'set-user': { 'basic': {'harry':'HarryIsCool'} }\n" + - "}"; + command = "{\n" + "'set-user': { 'basic': {'harry':'HarryIsCool'} }\n" + "}"; // no creds, should fail ... doHttpPost(cl, baseUrl + authcPrefix, command, null, null, 401); // with basic creds, should pass ... doHttpPost(cl, baseUrl + authcPrefix, command, user, pass, 200); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/schemes[0]/credentials/harry", NOT_NULL_PREDICATE, 5, user, pass); + verifySecurityStatus( + cl, + baseUrl + authcPrefix, + "authentication/schemes[0]/credentials/harry", + NOT_NULL_PREDICATE, + 5, + user, + pass); // authz command but missing the "scheme" wrapper - command = "{\n" + - "'set-user-role': {'harry':['users']}\n" + - "}"; + command = "{\n" + "'set-user-role': {'harry':['users']}\n" + "}"; doHttpPost(cl, baseUrl + authzPrefix, command, user, pass, 400); // add "harry" to the "users" role ... - command = "{\n" + - "'set-user-role': { 'basic': {'harry':['users']} }\n" + - "}"; + command = "{\n" + "'set-user-role': { 'basic': {'harry':['users']} }\n" + "}"; doHttpPost(cl, baseUrl + authzPrefix, command, user, pass, 200); - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/schemes[0]/user-role/harry", NOT_NULL_PREDICATE, 5, user, pass); + verifySecurityStatus( + cl, + baseUrl + authzPrefix, + "authorization/schemes[0]/user-role/harry", + NOT_NULL_PREDICATE, + 5, + user, + pass); // give the users role a custom permission - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[6]", NULL_PREDICATE, 5, user, pass); - command = "{\n" + - "'set-permission': { 'name':'k8s-zk', 'role':'users', 'collection':null, 'path':'/admin/zookeeper/status' }\n" + - "}"; + verifySecurityStatus( + cl, baseUrl + authzPrefix, "authorization/permissions[6]", NULL_PREDICATE, 5, user, pass); + command = + "{\n" + + "'set-permission': { 'name':'k8s-zk', 'role':'users', 'collection':null, 'path':'/admin/zookeeper/status' }\n" + + "}"; doHttpPost(cl, baseUrl + authzPrefix, command, user, pass, 200); - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[6]/path", new ExpectedValuePredicate("/admin/zookeeper/status"), 5, user, pass); - - command = "{\n" + - "'update-permission': { 'index':'7', 'name':'k8s-zk', 'role':'users', 'collection':null, 'path':'/admin/zookeeper/status2' }\n" + - "}"; + verifySecurityStatus( + cl, + baseUrl + authzPrefix, + "authorization/permissions[6]/path", + new ExpectedValuePredicate("/admin/zookeeper/status"), + 5, + user, + pass); + + command = + "{\n" + + "'update-permission': { 'index':'7', 'name':'k8s-zk', 'role':'users', 'collection':null, 'path':'/admin/zookeeper/status2' }\n" + + "}"; doHttpPost(cl, baseUrl + authzPrefix, command, user, pass, 200); - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[6]/path", new ExpectedValuePredicate("/admin/zookeeper/status2"), 5, user, pass); + verifySecurityStatus( + cl, + baseUrl + authzPrefix, + "authorization/permissions[6]/path", + new ExpectedValuePredicate("/admin/zookeeper/status2"), + 5, + user, + pass); // delete the permission - command = "{\n" + - "'delete-permission': 7\n" + - "}"; + command = "{\n" + "'delete-permission': 7\n" + "}"; doHttpPost(cl, baseUrl + authzPrefix, command, user, pass, 200); - verifySecurityStatus(cl, baseUrl + authzPrefix, "authorization/permissions[6]", NULL_PREDICATE, 5, user, pass); + verifySecurityStatus( + cl, baseUrl + authzPrefix, "authorization/permissions[6]", NULL_PREDICATE, 5, user, pass); // delete the user - command = "{\n" + - "'delete-user': { 'basic': 'harry' }\n" + - "}"; + command = "{\n" + "'delete-user': { 'basic': 'harry' }\n" + "}"; doHttpPost(cl, baseUrl + authcPrefix, command, user, pass, 200); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/schemes[0]/credentials/harry", NULL_PREDICATE, 5, user, pass); + verifySecurityStatus( + cl, + baseUrl + authcPrefix, + "authentication/schemes[0]/credentials/harry", + NULL_PREDICATE, + 5, + user, + pass); // update the property on the mock (just to test routing to the mock plugin) - command = "{\n" + - "'set-property': { 'mock': { 'blockUnknown':false } }\n" + - "}"; - - doHttpPostWithHeader(cl, baseUrl + authcPrefix, command, new BasicHeader("Authorization", "mock foo"), 200); - verifySecurityStatus(cl, baseUrl + authcPrefix, "authentication/schemes[1]/blockUnknown", new ExpectedValuePredicate(Boolean.FALSE), 5, user, pass); + command = "{\n" + "'set-property': { 'mock': { 'blockUnknown':false } }\n" + "}"; + + doHttpPostWithHeader( + cl, baseUrl + authcPrefix, command, new BasicHeader("Authorization", "mock foo"), 200); + verifySecurityStatus( + cl, + baseUrl + authcPrefix, + "authentication/schemes[1]/blockUnknown", + new ExpectedValuePredicate(Boolean.FALSE), + 5, + user, + pass); } finally { if (cl != null) { HttpClientUtil.close(cl); @@ -213,15 +254,16 @@ public String getName() { } } - public static final class MockAuthPluginForTesting extends AuthenticationPlugin implements ConfigEditablePlugin { + public static final class MockAuthPluginForTesting extends AuthenticationPlugin + implements ConfigEditablePlugin { @Override - public void init(Map pluginConfig) { - - } + public void init(Map pluginConfig) {} @Override - public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) throws Exception { + public boolean doAuthenticate( + HttpServletRequest request, HttpServletResponse response, FilterChain filterChain) + throws Exception { Principal principal = new MockPrincipal(); request = wrapWithPrincipal(request, principal, "mock"); filterChain.doFilter(request, response); @@ -229,7 +271,8 @@ public boolean doAuthenticate(HttpServletRequest request, HttpServletResponse re } @Override - public Map edit(Map latestConf, List commands) { + public Map edit( + Map latestConf, List commands) { for (CommandOperation op : commands) { if ("set-property".equals(op.name)) { for (Map.Entry e : op.getDataMap().entrySet()) { @@ -241,7 +284,8 @@ public Map edit(Map latestConf, List config = new HashMap<>(); + Map config = new HashMap<>(); config.put("class", "solr.MultiDestinationAuditLogger"); config.put("async", false); config.put("eventTypes", Arrays.asList(AuditEvent.EventType.COMPLETED.name())); ArrayList> plugins = new ArrayList>(); - Map conf1 = new HashMap<>(); + Map conf1 = new HashMap<>(); conf1.put("class", "solr.SolrLogAuditLoggerPlugin"); conf1.put("async", false); conf1.put("eventTypes", Arrays.asList(AuditEvent.EventType.ANONYMOUS.name())); plugins.add(conf1); - Map conf2 = new HashMap<>(); + Map conf2 = new HashMap<>(); conf2.put("class", "solr.MockAuditLoggerPlugin"); conf2.put("async", false); conf2.put("eventTypes", Arrays.asList(AuditEvent.EventType.AUTHENTICATED.name())); @@ -54,10 +53,14 @@ public void init() throws IOException { al.init(config); al.doAudit(new AuditEvent(AuditEvent.EventType.ANONYMOUS).setUsername("me")); - assertEquals(0, ((MockAuditLoggerPlugin)al.plugins.get(1)).events.size()); // not configured for ANONYMOUS + assertEquals( + 0, + ((MockAuditLoggerPlugin) al.plugins.get(1)).events.size()); // not configured for ANONYMOUS al.doAudit(new AuditEvent(AuditEvent.EventType.AUTHENTICATED).setUsername("me")); - assertEquals(1, ((MockAuditLoggerPlugin)al.plugins.get(1)).events.size()); // configured for authenticated - + assertEquals( + 1, + ((MockAuditLoggerPlugin) al.plugins.get(1)).events.size()); // configured for authenticated + assertFalse(al.shouldLog(AuditEvent.EventType.ERROR)); assertFalse(al.shouldLog(AuditEvent.EventType.UNAUTHORIZED)); assertTrue(al.shouldLog(AuditEvent.EventType.COMPLETED)); @@ -65,7 +68,7 @@ public void init() throws IOException { assertTrue(al.shouldLog(AuditEvent.EventType.AUTHENTICATED)); assertEquals(0, config.size()); - + al.close(); loader.close(); } @@ -73,7 +76,7 @@ public void init() throws IOException { @Test public void wrongConfigParam() throws IOException { MultiDestinationAuditLogger al = new MultiDestinationAuditLogger(); - Map config = new HashMap<>(); + Map config = new HashMap<>(); config.put("class", "solr.MultiDestinationAuditLogger"); config.put("foo", "Should complain"); al.init(config); diff --git a/solr/core/src/test/org/apache/solr/security/PKIAuthenticationIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/PKIAuthenticationIntegrationTest.java index 787da2632a4..43f0c45f438 100644 --- a/solr/core/src/test/org/apache/solr/security/PKIAuthenticationIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/security/PKIAuthenticationIntegrationTest.java @@ -16,12 +16,13 @@ */ package org.apache.solr.security; -import javax.servlet.http.HttpServletRequest; +import static java.util.Collections.singletonMap; + import java.lang.invoke.MethodHandles; import java.security.Principal; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; - +import javax.servlet.http.HttpServletRequest; import org.apache.http.client.HttpClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -35,25 +36,28 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static java.util.Collections.singletonMap; - public class PKIAuthenticationIntegrationTest extends SolrCloudAuthTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String COLLECTION = "pkiCollection"; - + @BeforeClass public static void setupCluster() throws Exception { - final String SECURITY_CONF = Utils.toJSONString - (Map.of("authorization", singletonMap("class", MockAuthorizationPlugin.class.getName()), - "authentication", singletonMap("class", MockAuthenticationPlugin.class.getName()))); - + final String SECURITY_CONF = + Utils.toJSONString( + Map.of( + "authorization", + singletonMap("class", MockAuthorizationPlugin.class.getName()), + "authentication", + singletonMap("class", MockAuthenticationPlugin.class.getName()))); + configureCluster(2) - .addConfig("conf", configset("cloud-minimal")) - .withSecurityJson(SECURITY_CONF) - .configure(); + .addConfig("conf", configset("cloud-minimal")) + .withSecurityJson(SECURITY_CONF) + .configure(); - CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1).process(cluster.getSolrClient()); + CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 1) + .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION, 2, 2); } @@ -63,8 +67,14 @@ public void testPkiAuth() throws Exception { HttpClient httpClient = cluster.getSolrClient().getHttpClient(); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { String baseUrl = jetty.getBaseUrl().toString(); - verifySecurityStatus(httpClient, baseUrl + "/admin/authorization", "authorization/class", MockAuthorizationPlugin.class.getName(), 20); - verifySecurityStatus(httpClient, baseUrl + "/admin/authentication", "authentication.enabled", "true", 20); + verifySecurityStatus( + httpClient, + baseUrl + "/admin/authorization", + "authorization/class", + MockAuthorizationPlugin.class.getName(), + 20); + verifySecurityStatus( + httpClient, baseUrl + "/admin/authentication", "authentication.enabled", "true", 20); } log.info("Starting test"); ModifiableSolrParams params = new ModifiableSolrParams(); @@ -74,27 +84,30 @@ public void testPkiAuth() throws Exception { // This should work fine. final AtomicInteger count = new AtomicInteger(); - - MockAuthorizationPlugin.predicate = context -> { - if ("/select".equals(context.getResource())) { - Principal principal = context.getUserPrincipal(); - log.info("principalIs : {}", principal); - if (principal != null && principal.getName().equals("solr")) { - count.incrementAndGet(); + MockAuthorizationPlugin.predicate = + context -> { + if ("/select".equals(context.getResource())) { + Principal principal = context.getUserPrincipal(); + log.info("principalIs : {}", principal); + if (principal != null && principal.getName().equals("solr")) { + count.incrementAndGet(); + } } - } - }; + }; - MockAuthenticationPlugin.predicate = servletRequest -> { - String s = ((HttpServletRequest) servletRequest).getQueryString(); - if (s != null && s.contains("__user=solr") && s.contains("__pwd=SolrRocks")) { - servletRequest.setAttribute(Principal.class.getName(), "solr"); - } - return true; - }; + MockAuthenticationPlugin.predicate = + servletRequest -> { + String s = ((HttpServletRequest) servletRequest).getQueryString(); + if (s != null && s.contains("__user=solr") && s.contains("__pwd=SolrRocks")) { + servletRequest.setAttribute(Principal.class.getName(), "solr"); + } + return true; + }; QueryRequest query = new QueryRequest(params); query.process(cluster.getSolrClient(), COLLECTION); - assertTrue("all nodes must get the user solr , no:of nodes got solr : " + count.get(), count.get() > 2); + assertTrue( + "all nodes must get the user solr , no:of nodes got solr : " + count.get(), + count.get() > 2); assertPkiAuthMetricsMinimums(2, 2, 0, 0, 0, 0); } @@ -103,5 +116,4 @@ public void distribTearDown() throws Exception { MockAuthenticationPlugin.predicate = null; MockAuthorizationPlugin.predicate = null; } - } diff --git a/solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java b/solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java index 8d27d0b00d5..c75b614b48c 100644 --- a/solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java +++ b/solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java @@ -21,11 +21,11 @@ import java.util.Set; /** - * Type of Principal object that can contain also a list of roles the user has. - * One use case can be to keep track of user-role mappings in an Identity Server - * external to Solr and pass the information to Solr in a signed JWT token or in - * another secure manner. The role information can then be used to authorize - * requests without the need to maintain or lookup what roles each user belongs to. + * Type of Principal object that can contain also a list of roles the user has. One use case can be + * to keep track of user-role mappings in an Identity Server external to Solr and pass the + * information to Solr in a signed JWT token or in another secure manner. The role information can + * then be used to authorize requests without the need to maintain or lookup what roles each user + * belongs to. */ public class PrincipalWithUserRoles implements Principal, VerifiedUserRoles { private final String username; @@ -34,6 +34,7 @@ public class PrincipalWithUserRoles implements Principal, VerifiedUserRoles { /** * User principal with user name as well as one or more roles that he/she belong to + * * @param username string with user name for user * @param roles a set of roles that we know this user belongs to, or empty list for no roles */ @@ -55,9 +56,7 @@ public String getName() { return this.username; } - /** - * Gets the list of roles - */ + /** Gets the list of roles */ @Override public Set getVerifiedRoles() { return roles; @@ -83,9 +82,6 @@ public int hashCode() { @Override public String toString() { - return "PrincipalWithUserRoles{" + - "username='" + username + '\'' + - ", roles=" + roles + - '}'; + return "PrincipalWithUserRoles{" + "username='" + username + '\'' + ", roles=" + roles + '}'; } } diff --git a/solr/core/src/test/org/apache/solr/security/SolrLogAuditLoggerPluginTest.java b/solr/core/src/test/org/apache/solr/security/SolrLogAuditLoggerPluginTest.java index a98a9ebb678..d0e9c155407 100644 --- a/solr/core/src/test/org/apache/solr/security/SolrLogAuditLoggerPluginTest.java +++ b/solr/core/src/test/org/apache/solr/security/SolrLogAuditLoggerPluginTest.java @@ -17,18 +17,17 @@ package org.apache.solr.security; +import static org.apache.solr.security.AuditLoggerPluginTest.EVENT_ANONYMOUS; +import static org.apache.solr.security.AuditLoggerPluginTest.EVENT_AUTHENTICATED; + import java.io.IOException; import java.util.HashMap; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.junit.After; import org.junit.Before; import org.junit.Test; -import static org.apache.solr.security.AuditLoggerPluginTest.EVENT_ANONYMOUS; -import static org.apache.solr.security.AuditLoggerPluginTest.EVENT_AUTHENTICATED; - public class SolrLogAuditLoggerPluginTest extends SolrTestCaseJ4 { private SolrLogAuditLoggerPlugin plugin; private HashMap config; @@ -47,7 +46,7 @@ public void badConfig() throws IOException { config.put("invalid", "parameter"); plugin.init(config); } - + @Test public void audit() { plugin.init(config); @@ -57,9 +56,11 @@ public void audit() { @Test public void eventFormatter() { plugin.init(config); - assertEquals("type=\"ANONYMOUS\" message=\"Anonymous\" method=\"GET\" status=\"-1\" requestType=\"null\" username=\"null\" resource=\"/collection1\" queryString=\"null\" collections=null", + assertEquals( + "type=\"ANONYMOUS\" message=\"Anonymous\" method=\"GET\" status=\"-1\" requestType=\"null\" username=\"null\" resource=\"/collection1\" queryString=\"null\" collections=null", plugin.formatter.formatEvent(EVENT_ANONYMOUS)); - assertEquals("type=\"AUTHENTICATED\" message=\"Authenticated\" method=\"GET\" status=\"-1\" requestType=\"null\" username=\"Jan\" resource=\"/collection1\" queryString=\"null\" collections=null", + assertEquals( + "type=\"AUTHENTICATED\" message=\"Authenticated\" method=\"GET\" status=\"-1\" requestType=\"null\" username=\"Jan\" resource=\"/collection1\" queryString=\"null\" collections=null", plugin.formatter.formatEvent(EVENT_AUTHENTICATED)); } diff --git a/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java b/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java index dd5548c60b6..74d0cbdda41 100644 --- a/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java +++ b/solr/core/src/test/org/apache/solr/security/TestAuthorizationFramework.java @@ -23,7 +23,6 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; - import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.util.EntityUtils; @@ -44,17 +43,22 @@ public class TestAuthorizationFramework extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); static final int TIMEOUT = 10000; + public void distribSetUp() throws Exception { super.distribSetUp(); - try (ZkStateReader zkStateReader = new ZkStateReader(zkServer.getZkAddress(), - TIMEOUT, TIMEOUT)) { - zkStateReader.getZkClient().create(ZkStateReader.SOLR_SECURITY_CONF_PATH, - "{\"authorization\":{\"class\":\"org.apache.solr.security.MockAuthorizationPlugin\"}}".getBytes(StandardCharsets.UTF_8), - CreateMode.PERSISTENT, true); + try (ZkStateReader zkStateReader = + new ZkStateReader(zkServer.getZkAddress(), TIMEOUT, TIMEOUT)) { + zkStateReader + .getZkClient() + .create( + ZkStateReader.SOLR_SECURITY_CONF_PATH, + "{\"authorization\":{\"class\":\"org.apache.solr.security.MockAuthorizationPlugin\"}}" + .getBytes(StandardCharsets.UTF_8), + CreateMode.PERSISTENT, + true); } } - @Test public void authorizationFrameworkTest() throws Exception { MockAuthorizationPlugin.denyUsers.add("user1"); @@ -63,7 +67,12 @@ public void authorizationFrameworkTest() throws Exception { try { waitForThingsToLevelOut(10, TimeUnit.SECONDS); String baseUrl = jettys.get(0).getBaseUrl().toString(); - verifySecurityStatus(cloudClient.getLbClient().getHttpClient(), baseUrl + "/admin/authorization", "authorization/class", MockAuthorizationPlugin.class.getName(), 20); + verifySecurityStatus( + cloudClient.getLbClient().getHttpClient(), + baseUrl + "/admin/authorization", + "authorization/class", + MockAuthorizationPlugin.class.getName(), + 20); log.info("Starting test"); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*"); @@ -78,7 +87,6 @@ public void authorizationFrameworkTest() throws Exception { } finally { MockAuthorizationPlugin.denyUsers.clear(); MockAuthorizationPlugin.protectedResources.clear(); - } } @@ -86,16 +94,18 @@ public void authorizationFrameworkTest() throws Exception { public void distribTearDown() throws Exception { super.distribTearDown(); MockAuthorizationPlugin.denyUsers.clear(); - } - public static void verifySecurityStatus(HttpClient cl, String url, String objPath, Object expected, int count) throws Exception { + public static void verifySecurityStatus( + HttpClient cl, String url, String objPath, Object expected, int count) throws Exception { boolean success = false; String s = null; List hierarchy = StrUtils.splitSmart(objPath, '/'); for (int i = 0; i < count; i++) { HttpGet get = new HttpGet(url); - s = EntityUtils.toString(cl.execute(get, HttpClientUtil.createNewHttpClientRequestContext()).getEntity()); + s = + EntityUtils.toString( + cl.execute(get, HttpClientUtil.createNewHttpClientRequestContext()).getEntity()); Map m = (Map) Utils.fromJSONString(s); Object actual = Utils.getObjectByPath(m, true, hierarchy); @@ -113,6 +123,5 @@ public static void verifySecurityStatus(HttpClient cl, String url, String objPat Thread.sleep(50); } assertTrue("No match for " + objPath + " = " + expected + ", full response = " + s, success); - } } diff --git a/solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java index c36cc255307..8102a6a6fa6 100644 --- a/solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java @@ -23,13 +23,14 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; - import org.apache.http.auth.BasicUserPrincipal; /** - * Tests {@link ExternalRoleRuleBasedAuthorizationPlugin} through simulating principals with roles attached + * Tests {@link ExternalRoleRuleBasedAuthorizationPlugin} through simulating principals with roles + * attached */ -public class TestExternalRoleRuleBasedAuthorizationPlugin extends BaseTestRuleBasedAuthorizationPlugin { +public class TestExternalRoleRuleBasedAuthorizationPlugin + extends BaseTestRuleBasedAuthorizationPlugin { private HashMap principals; @Override @@ -58,9 +59,11 @@ AuthorizationContext getMockContext(Map values) { @Override public Principal getUserPrincipal() { String userPrincipal = (String) values.get("userPrincipal"); - return userPrincipal == null ? null : - principals.get(userPrincipal) != null ? principals.get(userPrincipal) : - new BasicUserPrincipal(userPrincipal); + return userPrincipal == null + ? null + : principals.get(userPrincipal) != null + ? principals.get(userPrincipal) + : new BasicUserPrincipal(userPrincipal); } }; } diff --git a/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java index 8044b8fb590..f4be6e1eed7 100644 --- a/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java +++ b/solr/core/src/test/org/apache/solr/security/TestPKIAuthenticationPlugin.java @@ -16,17 +16,23 @@ */ package org.apache.solr.security; -import javax.servlet.FilterChain; -import javax.servlet.ServletRequest; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import java.nio.ByteBuffer; import java.security.Principal; import java.security.PublicKey; import java.time.Instant; import java.util.Base64; import java.util.concurrent.atomic.AtomicReference; - +import javax.servlet.FilterChain; +import javax.servlet.ServletRequest; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import org.apache.http.Header; import org.apache.http.HttpHeaders; import org.apache.http.auth.BasicUserPrincipal; @@ -40,13 +46,6 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - public class TestPKIAuthenticationPlugin extends SolrTestCaseJ4 { private static class MockPKIAuthenticationPlugin extends PKIAuthenticationPlugin { @@ -79,17 +78,19 @@ boolean isSolrThread() { final AtomicReference
header = new AtomicReference<>(); final AtomicReference wrappedRequestByFilter = new AtomicReference<>(); - final FilterChain filterChain = (servletRequest, servletResponse) -> wrappedRequestByFilter.set(servletRequest); + final FilterChain filterChain = + (servletRequest, servletResponse) -> wrappedRequestByFilter.set(servletRequest); final String nodeName = "node_x_233"; final CryptoKeys.RSAKeyPair aKeyPair = new CryptoKeys.RSAKeyPair(); - final LocalSolrQueryRequest localSolrQueryRequest = new LocalSolrQueryRequest(null, new ModifiableSolrParams()) { - @Override - public Principal getUserPrincipal() { - return principal.get(); - } - }; + final LocalSolrQueryRequest localSolrQueryRequest = + new LocalSolrQueryRequest(null, new ModifiableSolrParams()) { + @Override + public Principal getUserPrincipal() { + return principal.get(); + } + }; String headerKey; HttpServletRequest mockReq; @@ -140,24 +141,26 @@ public void testBasicRequest() throws Exception { assertNotNull(wrappedRequestByFilter.get()); assertNotNull(((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal()); - assertEquals(username, ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName()); + assertEquals( + username, ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName()); } public void testSuperUser() throws Exception { // Simulate the restart of a node, this will return a different key on subsequent invocations. // Create it in advance because it can take some time and should be done before header is set - MockPKIAuthenticationPlugin mock1 = new MockPKIAuthenticationPlugin(nodeName) { - boolean firstCall = true; - - @Override - PublicKey getRemotePublicKey(String ignored) { - try { - return firstCall ? myKey : mock.myKey; - } finally { - firstCall = false; - } - } - }; + MockPKIAuthenticationPlugin mock1 = + new MockPKIAuthenticationPlugin(nodeName) { + boolean firstCall = true; + + @Override + PublicKey getRemotePublicKey(String ignored) { + try { + return firstCall ? myKey : mock.myKey; + } finally { + firstCall = false; + } + } + }; // Setup regular superuser request mock.solrRequestInfo = null; @@ -168,12 +171,14 @@ PublicKey getRemotePublicKey(String ignored) { assertTrue(mock.authenticate(mockReq, null, filterChain)); assertNotNull(wrappedRequestByFilter.get()); - assertEquals("$", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName()); + assertEquals( + "$", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName()); // With the simulated restart assertTrue(mock1.authenticate(mockReq, null, filterChain)); assertNotNull(wrappedRequestByFilter.get()); - assertEquals("$", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName()); + assertEquals( + "$", ((HttpServletRequest) wrappedRequestByFilter.get()).getUserPrincipal().getName()); mock1.close(); } @@ -189,23 +194,29 @@ public void testProtocolMismatch() throws Exception { HttpServletResponse response = mock(HttpServletResponse.class); // This will fail in the same way that a missing header would fail - assertFalse("Should have failed authentication", mock.authenticate(mockReq, response, filterChain)); + assertFalse( + "Should have failed authentication", mock.authenticate(mockReq, response, filterChain)); verify(response).setHeader(HttpHeaders.WWW_AUTHENTICATE, PKIAuthenticationPlugin.HEADER_V2); verify(response).sendError(ArgumentMatchers.eq(401), anyString()); - assertNull("Should not have proceeded after authentication failure", wrappedRequestByFilter.get()); + assertNull( + "Should not have proceeded after authentication failure", wrappedRequestByFilter.get()); } public void testParseCipher() { - for (String validUser: new String[]{"user1", "$", "some user","some 123"}) { - for (long validTimestamp: new long[]{Instant.now().toEpochMilli(), 99999999999L, 9999999999999L}) { + for (String validUser : new String[] {"user1", "$", "some user", "some 123"}) { + for (long validTimestamp : + new long[] {Instant.now().toEpochMilli(), 99999999999L, 9999999999999L}) { String s = validUser + " " + validTimestamp; byte[] payload = s.getBytes(UTF_8); byte[] payloadCipher = aKeyPair.encrypt(ByteBuffer.wrap(payload)); String base64Cipher = Base64.getEncoder().encodeToString(payloadCipher); - PKIAuthenticationPlugin.PKIHeaderData header = PKIAuthenticationPlugin.parseCipher(base64Cipher, aKeyPair.getPublicKey()); - assertNotNull("Expecting valid header for user " + validUser + " and timestamp " + validTimestamp, header); + PKIAuthenticationPlugin.PKIHeaderData header = + PKIAuthenticationPlugin.parseCipher(base64Cipher, aKeyPair.getPublicKey()); + assertNotNull( + "Expecting valid header for user " + validUser + " and timestamp " + validTimestamp, + header); assertEquals(validUser, header.userName); assertEquals(validTimestamp, header.timestamp); } @@ -237,7 +248,9 @@ public void testParseCipherInvalidKey() { byte[] payload = s.getBytes(UTF_8); byte[] payloadCipher = aKeyPair.encrypt(ByteBuffer.wrap(payload)); String base64Cipher = Base64.getEncoder().encodeToString(payloadCipher); - assertNull(PKIAuthenticationPlugin.parseCipher(base64Cipher, new CryptoKeys.RSAKeyPair().getPublicKey())); + assertNull( + PKIAuthenticationPlugin.parseCipher( + base64Cipher, new CryptoKeys.RSAKeyPair().getPublicKey())); } public void testParseCipherNoSpace() { @@ -262,19 +275,25 @@ public void testParseCipherInvalidKeyExample() { /* This test shows a case with an invalid public key for which the decrypt will return an output that triggers SOLR-15961. */ - String base64Cipher = "A8tEkMfmA5m5+wVG9xSI46Lhg8MqDFkjPVqXc6Tf6LT/EVIpW3DUrkIygIjk9tSCCAxhHwSvKfVJeujaBtxr19ajmpWjtZKgZOXkynF5aPbDuI+mnvCiTmhLuZYExvnmeYxag6A4Fu2TpA/Wo97S4cIkRgfyag/ZOYM0pZwVAtNoJgTpmODDGrH4W16BXSZ6xm+EV4vrfUqpuuO7U7YiU5fd1tv22Au0ZaY6lPbxAHjeFyD8WrkPPIkEoM14K0G5vAg4wUxpRF/eVlnzhULoPgKFErz7cKVxuvxSsYpVw5oko+ldzyfsnMrC1brqUKA7NxhpdpJzp7bmd8W8/mvZEw=="; - String publicKey = "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsJu1O+A/gGikFSeLGYdgNPrz3ef/tqJP1sRqzkVjnBcdyI2oXMmAWF+yDe0Zmya+HevyOI8YN2Yaq6aCLjbHnT364Rno/urhKvR5PmaH/PqXrh3Dl+vn08B74iLVZxZro/v34FGjX8fkiasZggC4AnyLjFkU7POsHhJKSXGslsWe0dq7yaaA2AES/bFwJ3r3FNxUsE+kWEtZG1RKMq8P8wlx/HLDzjYKaGnyApAltBHVx60XHiOC9Oatu5HZb/eKU3jf7sKibrzrRsqwb+iE4ZxxtXkgATuLOl/2ks5Mnkk4u7bPEAgEpEuzQBB4AahMC7r+R5AzRnB4+xx69FP1IwIDAQAB"; - assertNull(PKIAuthenticationPlugin.parseCipher(base64Cipher, CryptoKeys.deserializeX509PublicKey(publicKey))); + String base64Cipher = + "A8tEkMfmA5m5+wVG9xSI46Lhg8MqDFkjPVqXc6Tf6LT/EVIpW3DUrkIygIjk9tSCCAxhHwSvKfVJeujaBtxr19ajmpWjtZKgZOXkynF5aPbDuI+mnvCiTmhLuZYExvnmeYxag6A4Fu2TpA/Wo97S4cIkRgfyag/ZOYM0pZwVAtNoJgTpmODDGrH4W16BXSZ6xm+EV4vrfUqpuuO7U7YiU5fd1tv22Au0ZaY6lPbxAHjeFyD8WrkPPIkEoM14K0G5vAg4wUxpRF/eVlnzhULoPgKFErz7cKVxuvxSsYpVw5oko+ldzyfsnMrC1brqUKA7NxhpdpJzp7bmd8W8/mvZEw=="; + String publicKey = + "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsJu1O+A/gGikFSeLGYdgNPrz3ef/tqJP1sRqzkVjnBcdyI2oXMmAWF+yDe0Zmya+HevyOI8YN2Yaq6aCLjbHnT364Rno/urhKvR5PmaH/PqXrh3Dl+vn08B74iLVZxZro/v34FGjX8fkiasZggC4AnyLjFkU7POsHhJKSXGslsWe0dq7yaaA2AES/bFwJ3r3FNxUsE+kWEtZG1RKMq8P8wlx/HLDzjYKaGnyApAltBHVx60XHiOC9Oatu5HZb/eKU3jf7sKibrzrRsqwb+iE4ZxxtXkgATuLOl/2ks5Mnkk4u7bPEAgEpEuzQBB4AahMC7r+R5AzRnB4+xx69FP1IwIDAQAB"; + assertNull( + PKIAuthenticationPlugin.parseCipher( + base64Cipher, CryptoKeys.deserializeX509PublicKey(publicKey))); } private HttpServletRequest createMockRequest(final AtomicReference
header) { HttpServletRequest mockReq = mock(HttpServletRequest.class); - when(mockReq.getHeader(any(String.class))).then(invocation -> { - if (headerKey.equals(invocation.getArgument(0))) { - if (header.get() == null) return null; - return header.get().getValue(); - } else return null; - }); + when(mockReq.getHeader(any(String.class))) + .then( + invocation -> { + if (headerKey.equals(invocation.getArgument(0))) { + if (header.get() == null) return null; + return header.get().getValue(); + } else return null; + }); when(mockReq.getRequestURI()).thenReturn("/collection1/select"); return mockReq; } diff --git a/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java b/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java index 5acd6db93f6..1646f2a772e 100644 --- a/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java +++ b/solr/core/src/test/org/apache/solr/security/TestSha256AuthenticationProvider.java @@ -16,20 +16,19 @@ */ package org.apache.solr.security; +import static java.util.Collections.singletonMap; + import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.util.CommandOperation; import org.junit.Test; -import static java.util.Collections.singletonMap; - public class TestSha256AuthenticationProvider extends SolrTestCaseJ4 { - public void testAuthenticate(){ + public void testAuthenticate() { Sha256AuthenticationProvider zkAuthenticationProvider = new Sha256AuthenticationProvider(); zkAuthenticationProvider.init(createConfigMap("ignore", "me")); @@ -37,15 +36,15 @@ public void testAuthenticate(){ String user = "marcus"; Map latestConf = createConfigMap(user, pwd); Map params = singletonMap(user, pwd); - Map result = zkAuthenticationProvider.edit(latestConf, - Collections.singletonList(new CommandOperation("set-user",params ))); + Map result = + zkAuthenticationProvider.edit( + latestConf, Collections.singletonList(new CommandOperation("set-user", params))); zkAuthenticationProvider = new Sha256AuthenticationProvider(); zkAuthenticationProvider.init(result); assertTrue(zkAuthenticationProvider.authenticate(user, pwd)); assertFalse(zkAuthenticationProvider.authenticate(user, "WrongPassword")); assertFalse(zkAuthenticationProvider.authenticate("unknownuser", "WrongPassword")); - } public void testBasicAuthCommands() throws IOException { @@ -54,7 +53,8 @@ public void testBasicAuthCommands() throws IOException { Map latestConf = createConfigMap("solr", "SolrRocks"); - CommandOperation blockUnknown = new CommandOperation("set-property", singletonMap("blockUnknown", true)); + CommandOperation blockUnknown = + new CommandOperation("set-property", singletonMap("blockUnknown", true)); basicAuthPlugin.edit(latestConf, Collections.singletonList(blockUnknown)); assertEquals(Boolean.TRUE, latestConf.get("blockUnknown")); basicAuthPlugin.init(latestConf); @@ -69,7 +69,10 @@ public void testBasicAuthCommands() throws IOException { public void testBasicAuthWithCredentials() throws IOException { try (BasicAuthPlugin basicAuthPlugin = new BasicAuthPlugin()) { - Map config = createConfigMap("solr", "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="); + Map config = + createConfigMap( + "solr", + "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="); basicAuthPlugin.init(config); assertTrue(basicAuthPlugin.authenticate("solr", "SolrRocks")); } @@ -85,7 +88,10 @@ public void testBasicAuthUserNotFound() throws IOException { public void testBasicAuthDeleteFinalUser() throws IOException { try (BasicAuthPlugin basicAuthPlugin = new BasicAuthPlugin()) { - Map config = createConfigMap("solr", "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="); + Map config = + createConfigMap( + "solr", + "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="); basicAuthPlugin.init(config); assertTrue(basicAuthPlugin.authenticate("solr", "SolrRocks")); @@ -93,7 +99,10 @@ public void testBasicAuthDeleteFinalUser() throws IOException { assertFalse(deleteUser.hasError()); basicAuthPlugin.edit(config, Arrays.asList(deleteUser)); assertTrue(deleteUser.hasError()); - assertTrue(deleteUser.getErrors().contains(Sha256AuthenticationProvider.CANNOT_DELETE_LAST_USER_ERROR)); + assertTrue( + deleteUser + .getErrors() + .contains(Sha256AuthenticationProvider.CANNOT_DELETE_LAST_USER_ERROR)); } } @@ -106,6 +115,4 @@ private Map createConfigMap(String user, String pw) { config.put("credentials", credentials); return config; } - } - diff --git a/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java b/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java index f9c07e51752..fc8bfd77b92 100644 --- a/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTest.java @@ -16,8 +16,6 @@ */ package org.apache.solr.servlet; - - import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; @@ -26,7 +24,6 @@ import java.nio.file.Files; import java.util.Arrays; import java.util.Date; - import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpRequestBase; @@ -37,12 +34,10 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * A test case for the several HTTP cache headers emitted by Solr - */ +/** A test case for the several HTTP cache headers emitted by Solr */ public class CacheHeaderTest extends CacheHeaderTestBase { private static File solrHomeDirectory; - + @BeforeClass public static void beforeTest() throws Exception { solrHomeDirectory = createTempDir().toFile(); @@ -51,24 +46,26 @@ public static void beforeTest() throws Exception { } @AfterClass - public static void afterTest() throws Exception { - - } + public static void afterTest() throws Exception {} protected static final String CONTENTS = "id\n100\n101\n102"; @Test public void testCacheVetoHandler() throws Exception { - File f=makeFile(CONTENTS); - HttpRequestBase m=getUpdateMethod("GET", - CommonParams.STREAM_FILE, f.getCanonicalPath(), - CommonParams.STREAM_CONTENTTYPE, "text/csv" ); + File f = makeFile(CONTENTS); + HttpRequestBase m = + getUpdateMethod( + "GET", + CommonParams.STREAM_FILE, + f.getCanonicalPath(), + CommonParams.STREAM_CONTENTTYPE, + "text/csv"); HttpResponse response = getClient().execute(m); assertEquals(200, response.getStatusLine().getStatusCode()); checkVetoHeaders(response, true); Files.delete(f.toPath()); } - + @Test public void testCacheVetoException() throws Exception { HttpRequestBase m = getSelectMethod("GET", "q", "xyz_ignore_exception:solr", "qt", "standard"); @@ -82,8 +79,12 @@ public void testCacheVetoException() throws Exception { protected void checkVetoHeaders(HttpResponse response, boolean checkExpires) throws Exception { Header head = response.getFirstHeader("Cache-Control"); assertNotNull("We got no Cache-Control header", head); - assertTrue("We got no no-cache in the Cache-Control header ["+head+"]", head.getValue().contains("no-cache")); - assertTrue("We got no no-store in the Cache-Control header ["+head+"]", head.getValue().contains("no-store")); + assertTrue( + "We got no no-cache in the Cache-Control header [" + head + "]", + head.getValue().contains("no-cache")); + assertTrue( + "We got no no-store in the Cache-Control header [" + head + "]", + head.getValue().contains("no-store")); head = response.getFirstHeader("Pragma"); assertNotNull("We got no Pragma header", head); @@ -93,9 +94,9 @@ protected void checkVetoHeaders(HttpResponse response, boolean checkExpires) thr head = response.getFirstHeader("Expires"); assertNotNull("We got no Expires header:" + Arrays.asList(response.getAllHeaders()), head); Date d = DateUtils.parseDate(head.getValue()); - assertTrue("We got no Expires header far in the past", System - .currentTimeMillis() - - d.getTime() > 100000); + assertTrue( + "We got no Expires header far in the past", + System.currentTimeMillis() - d.getTime() > 100000); } } @@ -107,8 +108,10 @@ protected void doLastModified(String method) throws Exception { HttpResponse response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Got no response code 200 in initial request", 200, response. - getStatusLine().getStatusCode()); + assertEquals( + "Got no response code 200 in initial request", + 200, + response.getStatusLine().getStatusCode()); Header head = response.getFirstHeader("Last-Modified"); assertNotNull("We got no Last-Modified header", head); @@ -121,36 +124,41 @@ protected void doLastModified(String method) throws Exception { response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Expected 304 NotModified response with current date", 304, + assertEquals( + "Expected 304 NotModified response with current date", + 304, response.getStatusLine().getStatusCode()); get = getSelectMethod(method); - get.addHeader("If-Modified-Since", DateUtils.formatDate(new Date( - lastModified.getTime() - 10000))); + get.addHeader( + "If-Modified-Since", DateUtils.formatDate(new Date(lastModified.getTime() - 10000))); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Expected 200 OK response with If-Modified-Since in the past", - 200, response.getStatusLine().getStatusCode()); + assertEquals( + "Expected 200 OK response with If-Modified-Since in the past", + 200, + response.getStatusLine().getStatusCode()); // If-Unmodified-Since tests get = getSelectMethod(method); - get.addHeader("If-Unmodified-Since", DateUtils.formatDate(new Date( - lastModified.getTime() - 10000))); + get.addHeader( + "If-Unmodified-Since", DateUtils.formatDate(new Date(lastModified.getTime() - 10000))); response = getClient().execute(get); checkResponseBody(method, response); assertEquals( "Expected 412 Precondition failed with If-Unmodified-Since in the past", - 412, response.getStatusLine().getStatusCode()); + 412, + response.getStatusLine().getStatusCode()); get = getSelectMethod(method); - get.addHeader("If-Unmodified-Since", DateUtils - .formatDate(new Date())); + get.addHeader("If-Unmodified-Since", DateUtils.formatDate(new Date())); response = getClient().execute(get); checkResponseBody(method, response); assertEquals( "Expected 200 OK response with If-Unmodified-Since and current date", - 200, response.getStatusLine().getStatusCode()); + 200, + response.getStatusLine().getStatusCode()); } // test ETag @@ -160,13 +168,15 @@ protected void doETag(String method) throws Exception { HttpResponse response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Got no response code 200 in initial request", 200, response - .getStatusLine().getStatusCode()); + assertEquals( + "Got no response code 200 in initial request", + 200, + response.getStatusLine().getStatusCode()); Header head = response.getFirstHeader("ETag"); assertNotNull("We got no ETag in the response", head); - assertTrue("Not a valid ETag", head.getValue().startsWith("\"") - && head.getValue().endsWith("\"")); + assertTrue( + "Not a valid ETag", head.getValue().startsWith("\"") && head.getValue().endsWith("\"")); String etag = head.getValue(); @@ -178,16 +188,18 @@ protected void doETag(String method) throws Exception { checkResponseBody(method, response); assertEquals( "If-None-Match: Got no response code 200 in response to non matching ETag", - 200, response.getStatusLine().getStatusCode()); + 200, + response.getStatusLine().getStatusCode()); // now we set matching ETags get = getSelectMethod(method); get.addHeader("If-None-Match", "\"xyz1223\""); - get.addHeader("If-None-Match", "\"1231323423\", \"1211211\", " - + etag); + get.addHeader("If-None-Match", "\"1231323423\", \"1211211\", " + etag); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("If-None-Match: Got no response 304 to matching ETag", 304, + assertEquals( + "If-None-Match: Got no response 304 to matching ETag", + 304, response.getStatusLine().getStatusCode()); // we now set the special star ETag @@ -195,7 +207,9 @@ protected void doETag(String method) throws Exception { get.addHeader("If-None-Match", "*"); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("If-None-Match: Got no response 304 for star ETag", 304, + assertEquals( + "If-None-Match: Got no response 304 for star ETag", + 304, response.getStatusLine().getStatusCode()); // If-Match tests @@ -206,7 +220,8 @@ protected void doETag(String method) throws Exception { checkResponseBody(method, response); assertEquals( "If-Match: Got no response code 412 in response to non matching ETag", - 412, response.getStatusLine().getStatusCode()); + 412, + response.getStatusLine().getStatusCode()); // now we set matching ETags get = getSelectMethod(method); @@ -214,7 +229,9 @@ protected void doETag(String method) throws Exception { get.addHeader("If-Match", "\"1231323423\", \"1211211\", " + etag); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("If-Match: Got no response 200 to matching ETag", 200, + assertEquals( + "If-Match: Got no response 200 to matching ETag", + 200, response.getStatusLine().getStatusCode()); // now we set the special star ETag @@ -222,8 +239,10 @@ protected void doETag(String method) throws Exception { get.addHeader("If-Match", "*"); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("If-Match: Got no response 200 to star ETag", 200, response - .getStatusLine().getStatusCode()); + assertEquals( + "If-Match: Got no response 200 to star ETag", + 200, + response.getStatusLine().getStatusCode()); } @Override @@ -257,7 +276,7 @@ protected File makeFile(String contents) { protected File makeFile(String contents, String charset) { try { - File f = createTempFile("cachetest","csv").toFile(); + File f = createTempFile("cachetest", "csv").toFile(); try (Writer out = new OutputStreamWriter(new FileOutputStream(f), charset)) { out.write(contents); } diff --git a/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTestBase.java b/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTestBase.java index 09b80209013..b63a6dccf13 100644 --- a/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTestBase.java +++ b/solr/core/src/test/org/apache/solr/servlet/CacheHeaderTestBase.java @@ -16,6 +16,10 @@ */ package org.apache.solr.servlet; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; @@ -29,19 +33,15 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.junit.Test; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; - public abstract class CacheHeaderTestBase extends SolrJettyTestBase { - protected HttpRequestBase getSelectMethod(String method, String... params) throws URISyntaxException { + protected HttpRequestBase getSelectMethod(String method, String... params) + throws URISyntaxException { HttpSolrClient client = (HttpSolrClient) getSolrClient(); HttpRequestBase m = null; - + ArrayList qparams = new ArrayList<>(); - if(params.length==0) { + if (params.length == 0) { qparams.add(new BasicNameValuePair("q", "solr")); qparams.add(new BasicNameValuePair("qt", "standard")); } @@ -49,9 +49,12 @@ protected HttpRequestBase getSelectMethod(String method, String... params) throw qparams.add(new BasicNameValuePair(params[i * 2], params[i * 2 + 1])); } - URI uri = URI.create(client.getBaseURL() + "/select?" + - URLEncodedUtils.format(qparams, StandardCharsets.UTF_8)); - + URI uri = + URI.create( + client.getBaseURL() + + "/select?" + + URLEncodedUtils.format(qparams, StandardCharsets.UTF_8)); + if ("GET".equals(method)) { m = new HttpGet(uri); } else if ("HEAD".equals(method)) { @@ -59,42 +62,45 @@ protected HttpRequestBase getSelectMethod(String method, String... params) throw } else if ("POST".equals(method)) { m = new HttpPost(uri); } - + return m; } - protected HttpRequestBase getUpdateMethod(String method, String... params) throws URISyntaxException { + protected HttpRequestBase getUpdateMethod(String method, String... params) + throws URISyntaxException { HttpSolrClient client = (HttpSolrClient) getSolrClient(); HttpRequestBase m = null; - + ArrayList qparams = new ArrayList<>(); - for(int i=0;i 0); break; case 304: - assertTrue("Response body was not empty for method " + method, + assertTrue( + "Response body was not empty for method " + method, responseBody == null || responseBody.length() == 0); break; case 412: - assertTrue("Response body was not empty for method " + method, + assertTrue( + "Response body was not empty for method " + method, responseBody == null || responseBody.length() == 0); break; default: @@ -119,7 +128,8 @@ protected void checkResponseBody(String method, HttpResponse resp) } } if ("HEAD".equals(method)) { - assertTrue("Response body was not empty for method " + method, + assertTrue( + "Response body was not empty for method " + method, responseBody == null || responseBody.length() == 0); } } @@ -145,7 +155,8 @@ public void testCacheControl() throws Exception { } protected abstract void doCacheControl(String method) throws Exception; + protected abstract void doETag(String method) throws Exception; + protected abstract void doLastModified(String method) throws Exception; - } diff --git a/solr/core/src/test/org/apache/solr/servlet/DirectSolrConnectionTest.java b/solr/core/src/test/org/apache/solr/servlet/DirectSolrConnectionTest.java index 5252d4befc5..6228c9fa9f2 100644 --- a/solr/core/src/test/org/apache/solr/servlet/DirectSolrConnectionTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/DirectSolrConnectionTest.java @@ -17,67 +17,63 @@ package org.apache.solr.servlet; import java.net.URLEncoder; - -import org.apache.solr.common.params.CommonParams; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.params.CommonParams; import org.junit.BeforeClass; - public class DirectSolrConnectionTest extends SolrTestCaseJ4 { - @BeforeClass public static void beforeClass() throws Exception { initCore("solr/crazy-path-to-config.xml", "solr/crazy-path-to-schema.xml"); } - DirectSolrConnection direct; - + @Override - public void setUp() throws Exception - { + public void setUp() throws Exception { super.setUp(); direct = new DirectSolrConnection(h.getCore()); } // Check that a request gets back the echoParams call - public void testSimpleRequest() throws Exception - { + public void testSimpleRequest() throws Exception { String pathAndParams = "/select?wt=xml&version=2.2&echoParams=explicit&q=*:*"; - - String got = direct.request( pathAndParams, null ); - - assertTrue( got.indexOf( "explicit" ) > 5 ); - + + String got = direct.request(pathAndParams, null); + + assertTrue(got.indexOf("explicit") > 5); + // It should throw an exception for unknown handler - expectThrows(Exception.class, () -> direct.request( "/path to nonexistang thingy!!", null )); + expectThrows(Exception.class, () -> direct.request("/path to nonexistang thingy!!", null)); } - // Check that a request gets back the echoParams call - public void testInsertThenSelect() throws Exception - { + public void testInsertThenSelect() throws Exception { String value = "Kittens!!! \u20AC"; - String[] cmds = new String[] { - "42", - "42"+value+"", - "" - }; + String[] cmds = + new String[] { + "42", + "42" + + value + + "", + "" + }; String getIt = "/select?wt=xml&q=id:42"; - + // Test using the Stream body parameter - for( String cmd : cmds ) { - direct.request( "/update?"+CommonParams.STREAM_BODY+"="+URLEncoder.encode(cmd, "UTF-8"), null ); + for (String cmd : cmds) { + direct.request( + "/update?" + CommonParams.STREAM_BODY + "=" + URLEncoder.encode(cmd, "UTF-8"), null); } - String got = direct.request( getIt, null ); - assertTrue( got.indexOf( value ) > 0 ); - + String got = direct.request(getIt, null); + assertTrue(got.indexOf(value) > 0); + // Same thing using the posted body - for( String cmd : cmds ) { - direct.request( "/update", cmd ); + for (String cmd : cmds) { + direct.request("/update", cmd); } - got = direct.request( getIt, null ); - assertTrue( got.indexOf( value ) > 0 ); + got = direct.request(getIt, null); + assertTrue(got.indexOf(value) > 0); } } diff --git a/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallCloudTest.java b/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallCloudTest.java index 85dd630112b..f34fc68b376 100644 --- a/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallCloudTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/HttpSolrCallCloudTest.java @@ -17,17 +17,16 @@ package org.apache.solr.servlet; -import javax.servlet.ReadListener; -import javax.servlet.ServletInputStream; -import javax.servlet.ServletOutputStream; -import javax.servlet.UnavailableException; -import javax.servlet.WriteListener; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import java.util.HashSet; import java.util.Set; - +import javax.servlet.ReadListener; +import javax.servlet.ServletInputStream; +import javax.servlet.ServletOutputStream; +import javax.servlet.UnavailableException; +import javax.servlet.WriteListener; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -47,14 +46,14 @@ public class HttpSolrCallCloudTest extends SolrCloudTestCase { @BeforeClass public static void setupCluster() throws Exception { configureCluster(1) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); - CollectionAdminRequest - .createCollection(COLLECTION, "config", NUM_SHARD, REPLICA_FACTOR) + CollectionAdminRequest.createCollection(COLLECTION, "config", NUM_SHARD, REPLICA_FACTOR) .process(cluster.getSolrClient()); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(COLLECTION, cluster.getSolrClient().getZkStateReader(), - false, true, 30); + AbstractDistribZkTestBase.waitForRecoveriesToFinish( + COLLECTION, cluster.getSolrClient().getZkStateReader(), false, true, 30); } @Test @@ -68,7 +67,8 @@ public void testCoreChosen() throws Exception { @Test public void testWrongUtf8InQ() throws Exception { var baseUrl = cluster.getJettySolrRunner(0).getBaseUrl(); - var request = new URL(baseUrl.toString() + "/" + COLLECTION + "/select?q=%C0"); // Illegal UTF-8 string + var request = + new URL(baseUrl.toString() + "/" + COLLECTION + "/select?q=%C0"); // Illegal UTF-8 string var connection = (HttpURLConnection) request.openConnection(); assertEquals(400, connection.getResponseCode()); } @@ -79,7 +79,9 @@ private void assertCoreChosen(int numCores, TestRequest testRequest) throws Unav SolrDispatchFilter dispatchFilter = jettySolrRunner.getSolrDispatchFilter(); for (int i = 0; i < NUM_SHARD * REPLICA_FACTOR * 20; i++) { if (coreNames.size() == numCores) return; - HttpSolrCall httpSolrCall = new HttpSolrCall(dispatchFilter, dispatchFilter.getCores(), testRequest, new TestResponse(), false); + HttpSolrCall httpSolrCall = + new HttpSolrCall( + dispatchFilter, dispatchFilter.getCores(), testRequest, new TestResponse(), false); try { httpSolrCall.init(); } catch (Exception e) { @@ -106,14 +108,10 @@ public boolean isReady() { } @Override - public void setWriteListener(WriteListener writeListener) { - - } + public void setWriteListener(WriteListener writeListener) {} @Override - public void write(int b) throws IOException { - - } + public void write(int b) throws IOException {} }; } @@ -165,9 +163,7 @@ public boolean isReady() { } @Override - public void setReadListener(ReadListener readListener) { - - } + public void setReadListener(ReadListener readListener) {} @Override public int read() throws IOException { @@ -176,5 +172,4 @@ public int read() throws IOException { }; } } - } diff --git a/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java b/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java index 12445182f14..c7b05783f4e 100644 --- a/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/NoCacheHeaderTest.java @@ -17,7 +17,6 @@ package org.apache.solr.servlet; import java.util.Date; - import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpRequestBase; @@ -26,9 +25,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * A test case for the several HTTP cache headers emitted by Solr - */ +/** A test case for the several HTTP cache headers emitted by Solr */ public class NoCacheHeaderTest extends CacheHeaderTestBase { // TODO: fix this test not to directly use the test-files copied to build/ // as its home. it could interfere with other tests! @@ -69,8 +66,10 @@ protected void doLastModified(String method) throws Exception { HttpResponse response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Got no response code 200 in initial request", 200, response - .getStatusLine().getStatusCode()); + assertEquals( + "Got no response code 200 in initial request", + 200, + response.getStatusLine().getStatusCode()); Header head = response.getFirstHeader("Last-Modified"); assertNull("We got a Last-Modified header", head); @@ -81,25 +80,32 @@ protected void doLastModified(String method) throws Exception { response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Expected 200 with If-Modified-Since header. We should never get a 304 here", 200, + assertEquals( + "Expected 200 with If-Modified-Since header. We should never get a 304 here", + 200, response.getStatusLine().getStatusCode()); get = getSelectMethod(method); - get.addHeader("If-Modified-Since", DateUtils.formatDate(new Date(System.currentTimeMillis()-10000))); + get.addHeader( + "If-Modified-Since", DateUtils.formatDate(new Date(System.currentTimeMillis() - 10000))); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Expected 200 with If-Modified-Since header. We should never get a 304 here", - 200, response.getStatusLine().getStatusCode()); + assertEquals( + "Expected 200 with If-Modified-Since header. We should never get a 304 here", + 200, + response.getStatusLine().getStatusCode()); // If-Unmodified-Since tests get = getSelectMethod(method); - get.addHeader("If-Unmodified-Since", DateUtils.formatDate(new Date(System.currentTimeMillis()-10000))); + get.addHeader( + "If-Unmodified-Since", DateUtils.formatDate(new Date(System.currentTimeMillis() - 10000))); response = getClient().execute(get); checkResponseBody(method, response); assertEquals( "Expected 200 with If-Unmodified-Since header. We should never get a 304 here", - 200, response.getStatusLine().getStatusCode()); + 200, + response.getStatusLine().getStatusCode()); get = getSelectMethod(method); get.addHeader("If-Unmodified-Since", DateUtils.formatDate(new Date())); @@ -107,7 +113,8 @@ protected void doLastModified(String method) throws Exception { checkResponseBody(method, response); assertEquals( "Expected 200 with If-Unmodified-Since header. We should never get a 304 here", - 200, response.getStatusLine().getStatusCode()); + 200, + response.getStatusLine().getStatusCode()); } // test ETag @@ -117,8 +124,10 @@ protected void doETag(String method) throws Exception { HttpResponse response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("Got no response code 200 in initial request", 200, response - .getStatusLine().getStatusCode()); + assertEquals( + "Got no response code 200 in initial request", + 200, + response.getStatusLine().getStatusCode()); Header head = response.getFirstHeader("ETag"); assertNull("We got an ETag in the response", head); @@ -131,14 +140,17 @@ protected void doETag(String method) throws Exception { checkResponseBody(method, response); assertEquals( "If-None-Match: Got no response code 200 in response to non matching ETag", - 200, response.getStatusLine().getStatusCode()); + 200, + response.getStatusLine().getStatusCode()); // we now set the special star ETag get = getSelectMethod(method); get.addHeader("If-None-Match", "*"); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("If-None-Match: Got no response 200 for star ETag", 200, + assertEquals( + "If-None-Match: Got no response 200 for star ETag", + 200, response.getStatusLine().getStatusCode()); // If-Match tests @@ -149,27 +161,30 @@ protected void doETag(String method) throws Exception { checkResponseBody(method, response); assertEquals( "If-Match: Got no response code 200 in response to non matching ETag", - 200, response.getStatusLine().getStatusCode()); + 200, + response.getStatusLine().getStatusCode()); // now we set the special star ETag get = getSelectMethod(method); get.addHeader("If-Match", "*"); response = getClient().execute(get); checkResponseBody(method, response); - assertEquals("If-Match: Got no response 200 to star ETag", 200, response - .getStatusLine().getStatusCode()); + assertEquals( + "If-Match: Got no response 200 to star ETag", + 200, + response.getStatusLine().getStatusCode()); } @Override protected void doCacheControl(String method) throws Exception { - HttpRequestBase m = getSelectMethod(method); - HttpResponse response = getClient().execute(m); - checkResponseBody(method, response); - - Header head = response.getFirstHeader("Cache-Control"); - assertNull("We got a cache-control header in response", head); - - head = response.getFirstHeader("Expires"); - assertNull("We got an Expires header in response", head); + HttpRequestBase m = getSelectMethod(method); + HttpResponse response = getClient().execute(m); + checkResponseBody(method, response); + + Header head = response.getFirstHeader("Cache-Control"); + assertNull("We got a cache-control header in response", head); + + head = response.getFirstHeader("Expires"); + assertNull("We got an Expires header in response", head); } } diff --git a/solr/core/src/test/org/apache/solr/servlet/ResponseHeaderTest.java b/solr/core/src/test/org/apache/solr/servlet/ResponseHeaderTest.java index 639a7744b02..dd8c714b870 100644 --- a/solr/core/src/test/org/apache/solr/servlet/ResponseHeaderTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/ResponseHeaderTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.servlet; +import java.io.File; +import java.io.IOException; +import java.net.URI; import org.apache.commons.io.FileUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; @@ -31,31 +34,28 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; -import java.io.IOException; -import java.net.URI; - - public class ResponseHeaderTest extends SolrJettyTestBase { - + private static File solrHomeDirectory; - + @BeforeClass public static void beforeTest() throws Exception { solrHomeDirectory = createTempDir().toFile(); setupJettyTestHome(solrHomeDirectory, "collection1"); String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf"; - FileUtils.copyFile(new File(top, "solrconfig-headers.xml"), new File(solrHomeDirectory + "/collection1/conf", "solrconfig.xml")); + FileUtils.copyFile( + new File(top, "solrconfig-headers.xml"), + new File(solrHomeDirectory + "/collection1/conf", "solrconfig.xml")); createAndStartJetty(solrHomeDirectory.getAbsolutePath()); } - + @AfterClass public static void afterTest() throws Exception { if (null != solrHomeDirectory) { cleanUpJettyHome(solrHomeDirectory); } } - + @Test public void testHttpResponse() throws SolrServerException, IOException { HttpSolrClient client = (HttpSolrClient) getSolrClient(); @@ -65,7 +65,7 @@ public void testHttpResponse() throws SolrServerException, IOException { HttpResponse response = httpClient.execute(httpGet); Header[] headers = response.getAllHeaders(); boolean containsWarningHeader = false; - for (Header header:headers) { + for (Header header : headers) { if ("Warning".equals(header.getName())) { containsWarningHeader = true; assertEquals("This is a test warning", header.getValue()); @@ -74,21 +74,20 @@ public void testHttpResponse() throws SolrServerException, IOException { } assertTrue("Expected header not found", containsWarningHeader); } - + public static class ComponentThatAddsHeader extends SearchComponent { - + @Override public void prepare(ResponseBuilder rb) throws IOException { rb.rsp.addHttpHeader("Warning", "This is a test warning"); } - + @Override public void process(ResponseBuilder rb) throws IOException {} - + @Override public String getDescription() { return null; } } - } diff --git a/solr/core/src/test/org/apache/solr/servlet/SecurityHeadersTest.java b/solr/core/src/test/org/apache/solr/servlet/SecurityHeadersTest.java index 115f8f6b98e..e4fcf6a48fd 100644 --- a/solr/core/src/test/org/apache/solr/servlet/SecurityHeadersTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/SecurityHeadersTest.java @@ -19,39 +19,39 @@ import java.net.URI; import java.util.Arrays; import java.util.Map; - -import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpGet; import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.params.SolrParams; - -import org.apache.http.HttpResponse; -import org.apache.http.client.HttpClient; -import org.apache.http.client.methods.HttpGet; - import org.junit.BeforeClass; import org.junit.Test; /** - * Confirm that the expected security headers are returned when making requests to solr, - * regardless of wether the request is interanlly forwared to another node. + * Confirm that the expected security headers are returned when making requests to solr, regardless + * of wether the request is interanlly forwared to another node. */ -@org.apache.lucene.util.LuceneTestCase.AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-14903") +@org.apache.lucene.util.LuceneTestCase.AwaitsFix( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-14903") public class SecurityHeadersTest extends SolrCloudTestCase { - private static final String COLLECTION = "xxx" ; + private static final String COLLECTION = "xxx"; private static final int NODE_COUNT = 2; /* A quick and dirty mapping of the headers/values we expect to find */ - private static final SolrParams EXPECTED_HEADERS - = params("Content-Security-Policy", "default-src 'none'; base-uri 'none'; connect-src 'self'; form-action 'self'; font-src 'self'; frame-ancestors 'none'; img-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; script-src 'self'; worker-src 'self';", - "X-Content-Type-Options", "nosniff", - "X-Frame-Options", "SAMEORIGIN", - "X-XSS-Protection", "1; mode=block"); - + private static final SolrParams EXPECTED_HEADERS = + params( + "Content-Security-Policy", + "default-src 'none'; base-uri 'none'; connect-src 'self'; form-action 'self'; font-src 'self'; frame-ancestors 'none'; img-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; script-src 'self'; worker-src 'self';", + "X-Content-Type-Options", "nosniff", + "X-Frame-Options", "SAMEORIGIN", + "X-XSS-Protection", "1; mode=block"); + @BeforeClass public static void setupCluster() throws Exception { @@ -60,34 +60,29 @@ public static void setupCluster() throws Exception { // create a 1 shard x 1 node collection CollectionAdminRequest.createCollection(COLLECTION, null, 1, 1) .process(cluster.getSolrClient()); - } @Test public void testHeaders() throws Exception { - // it shouldn't matter what node our lone replica/core wound up on, headers should be the same... + // it shouldn't matter what node our lone replica/core wound up on, headers should be the + // same... for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { try (SolrClient solrClient = jetty.newClient()) { final HttpClient client = ((HttpSolrClient) solrClient).getHttpClient(); // path shouldn't matter -- even if bogus / 404 for (String path : Arrays.asList("/select", "/bogus")) { - final HttpResponse resp = client.execute - (new HttpGet(URI.create(jetty.getBaseUrl().toString() + "/" + COLLECTION + path))); + final HttpResponse resp = + client.execute( + new HttpGet(URI.create(jetty.getBaseUrl().toString() + "/" + COLLECTION + path))); - for (Map.Entry entry : EXPECTED_HEADERS) { + for (Map.Entry entry : EXPECTED_HEADERS) { // these exact arrays (of 1 element each) should be *ALL* of the header instances... // no more, no less. - assertEquals(entry.getValue(), - resp.getHeaders(entry.getKey())); - + assertEquals(entry.getValue(), resp.getHeaders(entry.getKey())); } } } } - } - - } - diff --git a/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java b/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java index 3a76690eaa7..d2a57feb6f9 100644 --- a/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java +++ b/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java @@ -16,9 +16,10 @@ */ package org.apache.solr.servlet; -import javax.servlet.ReadListener; -import javax.servlet.ServletInputStream; -import javax.servlet.http.HttpServletRequest; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.File; @@ -33,7 +34,9 @@ import java.util.List; import java.util.Map; import java.util.Vector; - +import javax.servlet.ReadListener; +import javax.servlet.ServletInputStream; +import javax.servlet.http.HttpServletRequest; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.solr.SolrTestCaseJ4; @@ -54,10 +57,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - public class SolrRequestParserTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -66,192 +65,192 @@ public class SolrRequestParserTest extends SolrTestCaseJ4 { public static void beforeClass() throws Exception { assumeWorkingMockito(); initCore("solrconfig.xml", "schema.xml"); - parser = new SolrRequestParsers( h.getCore().getSolrConfig() ); + parser = new SolrRequestParsers(h.getCore().getSolrConfig()); } - + static SolrRequestParsers parser; @AfterClass public static void afterClass() { parser = null; } - + @Test - public void testStreamBody() throws Exception - { + public void testStreamBody() throws Exception { String body1 = "AMANAPLANPANAMA"; String body2 = "qwertasdfgzxcvb"; String body3 = "1234567890"; - + SolrCore core = h.getCore(); - - Map args = new HashMap<>(); - args.put( CommonParams.STREAM_BODY, new String[] {body1} ); - + + Map args = new HashMap<>(); + args.put(CommonParams.STREAM_BODY, new String[] {body1}); + // Make sure it got a single stream in and out ok List streams = new ArrayList<>(); - SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams ); - assertEquals( 1, streams.size() ); - assertEquals( body1, IOUtils.toString( streams.get(0).getReader() ) ); + SolrQueryRequest req = parser.buildRequestFrom(core, new MultiMapSolrParams(args), streams); + assertEquals(1, streams.size()); + assertEquals(body1, IOUtils.toString(streams.get(0).getReader())); req.close(); // Now add three and make sure they come out ok streams = new ArrayList<>(); - args.put( CommonParams.STREAM_BODY, new String[] {body1,body2,body3} ); - req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams ); - assertEquals( 3, streams.size() ); - ArrayList input = new ArrayList<>(); + args.put(CommonParams.STREAM_BODY, new String[] {body1, body2, body3}); + req = parser.buildRequestFrom(core, new MultiMapSolrParams(args), streams); + assertEquals(3, streams.size()); + ArrayList input = new ArrayList<>(); ArrayList output = new ArrayList<>(); - input.add( body1 ); - input.add( body2 ); - input.add( body3 ); - output.add( IOUtils.toString( streams.get(0).getReader() ) ); - output.add( IOUtils.toString( streams.get(1).getReader() ) ); - output.add( IOUtils.toString( streams.get(2).getReader() ) ); + input.add(body1); + input.add(body2); + input.add(body3); + output.add(IOUtils.toString(streams.get(0).getReader())); + output.add(IOUtils.toString(streams.get(1).getReader())); + output.add(IOUtils.toString(streams.get(2).getReader())); // sort them so the output is consistent - Collections.sort( input ); - Collections.sort( output ); - assertEquals( input.toString(), output.toString() ); + Collections.sort(input); + Collections.sort(output); + assertEquals(input.toString(), output.toString()); req.close(); // set the contentType and make sure tat gets set String ctype = "text/xxx"; streams = new ArrayList<>(); - args.put( CommonParams.STREAM_CONTENTTYPE, new String[] {ctype} ); - req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams ); - for( ContentStream s : streams ) { - assertEquals( ctype, s.getContentType() ); + args.put(CommonParams.STREAM_CONTENTTYPE, new String[] {ctype}); + req = parser.buildRequestFrom(core, new MultiMapSolrParams(args), streams); + for (ContentStream s : streams) { + assertEquals(ctype, s.getContentType()); } req.close(); } - + @Test @SuppressWarnings({"try"}) - public void testStreamURL() throws Exception - { + public void testStreamURL() throws Exception { URL url = getClass().getResource("/README"); assertNotNull("Missing file 'README' in test-resources root folder.", url); - + byte[] bytes = IOUtils.toByteArray(url); SolrCore core = h.getCore(); - - Map args = new HashMap<>(); - args.put( CommonParams.STREAM_URL, new String[] { url.toExternalForm() } ); - + + Map args = new HashMap<>(); + args.put(CommonParams.STREAM_URL, new String[] {url.toExternalForm()}); + // Make sure it got a single stream in and out ok List streams = new ArrayList<>(); - try (SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams )) { - assertEquals( 1, streams.size() ); + try (SolrQueryRequest req = + parser.buildRequestFrom(core, new MultiMapSolrParams(args), streams)) { + assertEquals(1, streams.size()); try (InputStream in = streams.get(0).getStream()) { - assertArrayEquals( bytes, IOUtils.toByteArray( in ) ); + assertArrayEquals(bytes, IOUtils.toByteArray(in)); } } } - + @Test @SuppressWarnings({"try"}) - public void testStreamFile() throws Exception - { + public void testStreamFile() throws Exception { File file = getFile("README"); - + byte[] bytes = FileUtils.readFileToByteArray(file); SolrCore core = h.getCore(); - - Map args = new HashMap<>(); - args.put( CommonParams.STREAM_FILE, new String[] { file.getAbsolutePath() } ); - + + Map args = new HashMap<>(); + args.put(CommonParams.STREAM_FILE, new String[] {file.getAbsolutePath()}); + // Make sure it got a single stream in and out ok List streams = new ArrayList<>(); - try (SolrQueryRequest req = parser.buildRequestFrom( core, new MultiMapSolrParams( args ), streams )) { - assertEquals( 1, streams.size() ); + try (SolrQueryRequest req = + parser.buildRequestFrom(core, new MultiMapSolrParams(args), streams)) { + assertEquals(1, streams.size()); try (InputStream in = streams.get(0).getStream()) { - assertArrayEquals( bytes, IOUtils.toByteArray( in ) ); + assertArrayEquals(bytes, IOUtils.toByteArray(in)); } } } - + @Test - public void testUrlParamParsing() throws Exception - { - final String[][] teststr = new String[][] { - { "this is simple", "this%20is%20simple" }, - { "this is simple", "this+is+simple" }, - { "\u00FC", "%C3%BC" }, // lower-case "u" with diaeresis/umlaut - { "\u0026", "%26" }, // & - { "", "" }, // empty - { "\u20AC", "%E2%82%ac" } // euro, also with lowercase escapes - }; - - for( String[] tst : teststr ) { - SolrParams params = SolrRequestParsers.parseQueryString( "val="+tst[1] ); - assertEquals( tst[0], params.get( "val" ) ); - params = SolrRequestParsers.parseQueryString( "val="+tst[1]+"&" ); - assertEquals( tst[0], params.get( "val" ) ); - params = SolrRequestParsers.parseQueryString( "&&val="+tst[1]+"&" ); - assertEquals( tst[0], params.get( "val" ) ); - params = SolrRequestParsers.parseQueryString( "&&val="+tst[1]+"&&&val="+tst[1]+"&" ); - assertArrayEquals(new String[]{tst[0],tst[0]}, params.getParams("val") ); - } - + public void testUrlParamParsing() throws Exception { + final String[][] teststr = + new String[][] { + {"this is simple", "this%20is%20simple"}, + {"this is simple", "this+is+simple"}, + {"\u00FC", "%C3%BC"}, // lower-case "u" with diaeresis/umlaut + {"\u0026", "%26"}, // & + {"", ""}, // empty + {"\u20AC", "%E2%82%ac"} // euro, also with lowercase escapes + }; + + for (String[] tst : teststr) { + SolrParams params = SolrRequestParsers.parseQueryString("val=" + tst[1]); + assertEquals(tst[0], params.get("val")); + params = SolrRequestParsers.parseQueryString("val=" + tst[1] + "&"); + assertEquals(tst[0], params.get("val")); + params = SolrRequestParsers.parseQueryString("&&val=" + tst[1] + "&"); + assertEquals(tst[0], params.get("val")); + params = SolrRequestParsers.parseQueryString("&&val=" + tst[1] + "&&&val=" + tst[1] + "&"); + assertArrayEquals(new String[] {tst[0], tst[0]}, params.getParams("val")); + } + SolrParams params = SolrRequestParsers.parseQueryString("val"); assertEquals("", params.get("val")); - + params = SolrRequestParsers.parseQueryString("val&foo=bar=bar&muh&"); assertEquals("", params.get("val")); assertEquals("bar=bar", params.get("foo")); assertEquals("", params.get("muh")); - + final String[] invalid = { - "q=h%FCllo", // non-UTF-8 - "q=h\u00FCllo", // encoded string is not pure US-ASCII - "q=hallo%", // incomplete escape - "q=hallo%1", // incomplete escape + "q=h%FCllo", // non-UTF-8 + "q=h\u00FCllo", // encoded string is not pure US-ASCII + "q=hallo%", // incomplete escape + "q=hallo%1", // incomplete escape "q=hallo%XX123", // invalid digit 'X' in escape - "=hallo" // missing key + "=hallo" // missing key }; for (String s : invalid) { expectThrows(SolrException.class, () -> SolrRequestParsers.parseQueryString(s)); } } - + @Test - public void testStandardParseParamsAndFillStreams() throws Exception - { + public void testStandardParseParamsAndFillStreams() throws Exception { final String getParams = "qt=%C3%BC&dup=foo", postParams = "q=hello&d%75p=bar"; final byte[] postBytes = postParams.getBytes(StandardCharsets.US_ASCII); - + // Set up the expected behavior - final String[] ct = new String[] { - "application/x-www-form-urlencoded", - "Application/x-www-form-urlencoded", - "application/x-www-form-urlencoded; charset=utf-8", - "application/x-www-form-urlencoded;" - }; - - for( String contentType : ct ) { + final String[] ct = + new String[] { + "application/x-www-form-urlencoded", + "Application/x-www-form-urlencoded", + "application/x-www-form-urlencoded; charset=utf-8", + "application/x-www-form-urlencoded;" + }; + + for (String contentType : ct) { HttpServletRequest request = getMock("/solr/select", contentType, postBytes.length); when(request.getMethod()).thenReturn("POST"); when(request.getQueryString()).thenReturn(getParams); when(request.getInputStream()).thenReturn(new ByteServletInputStream(postBytes)); - MultipartRequestParser multipart = new MultipartRequestParser( 2048 ); + MultipartRequestParser multipart = new MultipartRequestParser(2048); RawRequestParser raw = new RawRequestParser(); - FormDataRequestParser formdata = new FormDataRequestParser( 2048 ); - StandardRequestParser standard = new StandardRequestParser( multipart, raw, formdata ); - + FormDataRequestParser formdata = new FormDataRequestParser(2048); + StandardRequestParser standard = new StandardRequestParser(multipart, raw, formdata); + SolrParams p = standard.parseParamsAndFillStreams(request, new ArrayList()); - - assertEquals( "contentType: "+contentType, "hello", p.get("q") ); - assertEquals( "contentType: "+contentType, "\u00FC", p.get("qt") ); - assertArrayEquals( "contentType: "+contentType, new String[]{"foo","bar"}, p.getParams("dup") ); + + assertEquals("contentType: " + contentType, "hello", p.get("q")); + assertEquals("contentType: " + contentType, "\u00FC", p.get("qt")); + assertArrayEquals( + "contentType: " + contentType, new String[] {"foo", "bar"}, p.getParams("dup")); verify(request).getInputStream(); } } - static class ByteServletInputStream extends ServletInputStream { + static class ByteServletInputStream extends ServletInputStream { final BufferedInputStream in; final int len; int readCount = 0; @@ -283,38 +282,38 @@ public int read() throws IOException { return read; } } - + @Test - public void testStandardParseParamsAndFillStreamsISO88591() throws Exception - { - final String getParams = "qt=%FC&dup=foo&ie=iso-8859-1&dup=%FC", postParams = "qt2=%FC&q=hello&d%75p=bar"; + public void testStandardParseParamsAndFillStreamsISO88591() throws Exception { + final String getParams = "qt=%FC&dup=foo&ie=iso-8859-1&dup=%FC", + postParams = "qt2=%FC&q=hello&d%75p=bar"; final byte[] postBytes = postParams.getBytes(StandardCharsets.US_ASCII); final String contentType = "application/x-www-form-urlencoded; charset=iso-8859-1"; - + // Set up the expected behavior HttpServletRequest request = getMock("/solr/select", contentType, postBytes.length); when(request.getMethod()).thenReturn("POST"); when(request.getQueryString()).thenReturn(getParams); when(request.getInputStream()).thenReturn(new ByteServletInputStream(postBytes)); - - MultipartRequestParser multipart = new MultipartRequestParser( 2048 ); + + MultipartRequestParser multipart = new MultipartRequestParser(2048); RawRequestParser raw = new RawRequestParser(); - FormDataRequestParser formdata = new FormDataRequestParser( 2048 ); - StandardRequestParser standard = new StandardRequestParser( multipart, raw, formdata ); - + FormDataRequestParser formdata = new FormDataRequestParser(2048); + StandardRequestParser standard = new StandardRequestParser(multipart, raw, formdata); + SolrParams p = standard.parseParamsAndFillStreams(request, new ArrayList()); - - assertEquals( "contentType: "+contentType, "hello", p.get("q") ); - assertEquals( "contentType: "+contentType, "\u00FC", p.get("qt") ); - assertEquals( "contentType: "+contentType, "\u00FC", p.get("qt2") ); - assertArrayEquals( "contentType: "+contentType, new String[]{"foo","\u00FC","bar"}, p.getParams("dup") ); + + assertEquals("contentType: " + contentType, "hello", p.get("q")); + assertEquals("contentType: " + contentType, "\u00FC", p.get("qt")); + assertEquals("contentType: " + contentType, "\u00FC", p.get("qt2")); + assertArrayEquals( + "contentType: " + contentType, new String[] {"foo", "\u00FC", "bar"}, p.getParams("dup")); verify(request).getInputStream(); } - + @Test - public void testStandardFormdataUploadLimit() throws Exception - { + public void testStandardFormdataUploadLimit() throws Exception { final int limitKBytes = 128; final StringBuilder large = new StringBuilder("q=hello"); @@ -324,67 +323,79 @@ public void testStandardFormdataUploadLimit() throws Exception } HttpServletRequest request = getMock("/solr/select", "application/x-www-form-urlencoded", -1); when(request.getMethod()).thenReturn("POST"); - when(request.getInputStream()).thenReturn(new ByteServletInputStream(large.toString().getBytes(StandardCharsets.US_ASCII))); - - FormDataRequestParser formdata = new FormDataRequestParser( limitKBytes ); - SolrException e = expectThrows(SolrException.class, () -> { - formdata.parseParamsAndFillStreams(request, new ArrayList<>()); - }); + when(request.getInputStream()) + .thenReturn( + new ByteServletInputStream(large.toString().getBytes(StandardCharsets.US_ASCII))); + + FormDataRequestParser formdata = new FormDataRequestParser(limitKBytes); + SolrException e = + expectThrows( + SolrException.class, + () -> { + formdata.parseParamsAndFillStreams(request, new ArrayList<>()); + }); assertTrue(e.getMessage().contains("upload limit")); assertEquals(400, e.code()); verify(request).getInputStream(); } - + @Test - public void testParameterIncompatibilityException1() throws Exception - { + public void testParameterIncompatibilityException1() throws Exception { HttpServletRequest request = getMock("/solr/select", "application/x-www-form-urlencoded", 100); // we emulate Jetty that returns empty stream when parameters were parsed before: - when(request.getInputStream()).thenReturn(new ServletInputStream() { - @Override public int read() { return -1; } - - @Override - public boolean isFinished() { - return true; - } - - @Override - public boolean isReady() { - return true; - } - - @Override - public void setReadListener(ReadListener readListener) { - - } - }); - - FormDataRequestParser formdata = new FormDataRequestParser( 2048 ); - SolrException e = expectThrows(SolrException.class, () -> { - formdata.parseParamsAndFillStreams(request, new ArrayList<>()); - }); + when(request.getInputStream()) + .thenReturn( + new ServletInputStream() { + @Override + public int read() { + return -1; + } + + @Override + public boolean isFinished() { + return true; + } + + @Override + public boolean isReady() { + return true; + } + + @Override + public void setReadListener(ReadListener readListener) {} + }); + + FormDataRequestParser formdata = new FormDataRequestParser(2048); + SolrException e = + expectThrows( + SolrException.class, + () -> { + formdata.parseParamsAndFillStreams(request, new ArrayList<>()); + }); assertTrue(e.getMessage().startsWith("Solr requires that request parameters")); assertEquals(500, e.code()); verify(request).getInputStream(); } - + @Test - public void testParameterIncompatibilityException2() throws Exception - { + public void testParameterIncompatibilityException2() throws Exception { HttpServletRequest request = getMock("/solr/select", "application/x-www-form-urlencoded", 100); when(request.getMethod()).thenReturn("POST"); // we emulate Tomcat that throws IllegalStateException when parameters were parsed before: when(request.getInputStream()).thenThrow(new IllegalStateException()); - FormDataRequestParser formdata = new FormDataRequestParser( 2048 ); - SolrException e = expectThrows(SolrException.class, () -> { - formdata.parseParamsAndFillStreams(request, new ArrayList<>()); - }); + FormDataRequestParser formdata = new FormDataRequestParser(2048); + SolrException e = + expectThrows( + SolrException.class, + () -> { + formdata.parseParamsAndFillStreams(request, new ArrayList<>()); + }); assertTrue(e.getMessage().startsWith("Solr requires that request parameters")); assertEquals(500, e.code()); verify(request).getInputStream(); } - + @Test public void testAddHttpRequestToContext() throws Exception { HttpServletRequest request = getMock("/solr/select", null, -1); @@ -393,7 +404,7 @@ public void testAddHttpRequestToContext() throws Exception { Map headers = new HashMap<>(); headers.put("X-Forwarded-For", "10.0.0.1"); when(request.getHeaderNames()).thenReturn(new Vector<>(headers.keySet()).elements()); - for(Map.Entry entry:headers.entrySet()) { + for (Map.Entry entry : headers.entrySet()) { Vector v = new Vector<>(); v.add(entry.getValue()); when(request.getHeaders(entry.getKey())).thenReturn(v.elements()); @@ -403,12 +414,15 @@ public void testAddHttpRequestToContext() throws Exception { assertFalse(parsers.isAddRequestHeadersToContext()); SolrQueryRequest solrReq = parsers.parse(h.getCore(), "/select", request); assertFalse(solrReq.getContext().containsKey("httpRequest")); - + parsers.setAddRequestHeadersToContext(true); solrReq = parsers.parse(h.getCore(), "/select", request); assertEquals(request, solrReq.getContext().get("httpRequest")); - assertEquals("10.0.0.1", ((HttpServletRequest)solrReq.getContext().get("httpRequest")).getHeaders("X-Forwarded-For").nextElement()); - + assertEquals( + "10.0.0.1", + ((HttpServletRequest) solrReq.getContext().get("httpRequest")) + .getHeaders("X-Forwarded-For") + .nextElement()); } public void testPostMissingContentType() throws Exception { @@ -427,25 +441,69 @@ public void testPostMissingContentType() throws Exception { @Test public void testAutoDetect() throws Exception { String curl = "curl/7.30.0"; - for (String method : new String[]{"GET","POST"}) { - doAutoDetect(null, method, "{}=a", null, "{}", "a"); // unknown agent should not auto-detect - doAutoDetect(curl, method, "{}", "application/json", null, null); // curl should auto-detect - doAutoDetect(curl, method, " \t\n\r {} ", "application/json", null, null); // starting with whitespace - doAutoDetect(curl, method, " \t\n\r // how now brown cow\n {} ", "application/json", null, null); // supporting comments - doAutoDetect(curl, method, " \t\n\r #different style comment\n {} ", "application/json", null, null); - doAutoDetect(curl, method, " \t\n\r /* C style comment */\n {} ", "application/json", null, null); + for (String method : new String[] {"GET", "POST"}) { + doAutoDetect(null, method, "{}=a", null, "{}", "a"); // unknown agent should not auto-detect + doAutoDetect(curl, method, "{}", "application/json", null, null); // curl should auto-detect + doAutoDetect( + curl, + method, + " \t\n\r {} ", + "application/json", + null, + null); // starting with whitespace + doAutoDetect( + curl, + method, + " \t\n\r // how now brown cow\n {} ", + "application/json", + null, + null); // supporting comments + doAutoDetect( + curl, + method, + " \t\n\r #different style comment\n {} ", + "application/json", + null, + null); + doAutoDetect( + curl, method, " \t\n\r /* C style comment */\n {} ", "application/json", null, null); doAutoDetect(curl, method, " \t\n\r hi ", "text/xml", null, null); - doAutoDetect(curl, method, " \t\r\n aaa=1&bbb=2&ccc=3", null, "bbb", "2"); // params with whitespace first - doAutoDetect(curl, method, "/x=foo&aaa=1&bbb=2&ccc=3", null, "/x", "foo"); // param name that looks like a path - doAutoDetect(curl, method, " \t\r\n /x=foo&aaa=1&bbb=2&ccc=3", null, "bbb", "2"); // param name that looks like a path + doAutoDetect( + curl, + method, + " \t\r\n aaa=1&bbb=2&ccc=3", + null, + "bbb", + "2"); // params with whitespace first + doAutoDetect( + curl, + method, + "/x=foo&aaa=1&bbb=2&ccc=3", + null, + "/x", + "foo"); // param name that looks like a path + doAutoDetect( + curl, + method, + " \t\r\n /x=foo&aaa=1&bbb=2&ccc=3", + null, + "bbb", + "2"); // param name that looks like a path } } - public void doAutoDetect(String userAgent, String method, final String body, String expectedContentType, String expectedKey, String expectedValue) throws Exception { + public void doAutoDetect( + String userAgent, + String method, + final String body, + String expectedContentType, + String expectedKey, + String expectedValue) + throws Exception { String uri = "/solr/select"; String contentType = "application/x-www-form-urlencoded"; - int contentLength = -1; // does this mean auto-detect? + int contentLength = -1; // does this mean auto-detect? HttpServletRequest request = mock(HttpServletRequest.class); when(request.getHeader("User-Agent")).thenReturn(userAgent); @@ -456,11 +514,12 @@ public void doAutoDetect(String userAgent, String method, final String body, Str when(request.getMethod()).thenReturn(method); // we dont pass a content-length to let the security mechanism limit it: when(request.getQueryString()).thenReturn("foo=1&bar=2"); - when(request.getInputStream()).thenReturn(new ByteServletInputStream(body.getBytes(StandardCharsets.US_ASCII))); + when(request.getInputStream()) + .thenReturn(new ByteServletInputStream(body.getBytes(StandardCharsets.US_ASCII))); SolrRequestParsers parsers = new SolrRequestParsers(h.getCore().getSolrConfig()); SolrQueryRequest req = parsers.parse(h.getCore(), "/select", request); - int num=0; + int num = 0; if (expectedContentType != null) { for (ContentStream cs : req.getContentStreams()) { num++; @@ -482,7 +541,6 @@ public void doAutoDetect(String userAgent, String method, final String body, Str verify(request).getInputStream(); } - public HttpServletRequest getMock() { return getMock("/solr/select", null, -1); } @@ -494,5 +552,4 @@ public HttpServletRequest getMock(String uri, String contentType, int contentLen when(request.getContentLength()).thenReturn(contentLength); return request; } - } diff --git a/solr/core/src/test/org/apache/solr/servlet/TestRequestRateLimiter.java b/solr/core/src/test/org/apache/solr/servlet/TestRequestRateLimiter.java index 8a1587c55c6..3784d861cbb 100644 --- a/solr/core/src/test/org/apache/solr/servlet/TestRequestRateLimiter.java +++ b/solr/core/src/test/org/apache/solr/servlet/TestRequestRateLimiter.java @@ -17,13 +17,15 @@ package org.apache.solr.servlet; +import static org.apache.solr.servlet.RateLimitManager.DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS; +import static org.hamcrest.CoreMatchers.containsString; + import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -37,12 +39,9 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.servlet.RateLimitManager.DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS; -import static org.hamcrest.CoreMatchers.containsString; - public class TestRequestRateLimiter extends SolrCloudTestCase { - private final static String FIRST_COLLECTION = "c1"; - private final static String SECOND_COLLECTION = "c2"; + private static final String FIRST_COLLECTION = "c1"; + private static final String SECOND_COLLECTION = "c2"; @BeforeClass public static void setupCluster() throws Exception { @@ -59,10 +58,19 @@ public void testConcurrentQueries() throws Exception { SolrDispatchFilter solrDispatchFilter = cluster.getJettySolrRunner(0).getSolrDispatchFilter(); - RateLimiterConfig rateLimiterConfig = new RateLimiterConfig(SolrRequest.SolrRequestType.QUERY, - true, 1, DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS, 5 /* allowedRequests */, true /* isSlotBorrowing */); - // We are fine with a null FilterConfig here since we ensure that MockBuilder never invokes its parent here - RateLimitManager.Builder builder = new MockBuilder(null /* dummy SolrZkClient */, new MockRequestRateLimiter(rateLimiterConfig, 5)); + RateLimiterConfig rateLimiterConfig = + new RateLimiterConfig( + SolrRequest.SolrRequestType.QUERY, + true, + 1, + DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS, + 5 /* allowedRequests */, + true /* isSlotBorrowing */); + // We are fine with a null FilterConfig here since we ensure that MockBuilder never invokes its + // parent here + RateLimitManager.Builder builder = + new MockBuilder( + null /* dummy SolrZkClient */, new MockRequestRateLimiter(rateLimiterConfig, 5)); RateLimitManager rateLimitManager = builder.build(); solrDispatchFilter.replaceRateLimitManager(rateLimitManager); @@ -71,15 +79,21 @@ public void testConcurrentQueries() throws Exception { processTest(client, numDocs, 350 /* number of queries */); - MockRequestRateLimiter mockQueryRateLimiter = (MockRequestRateLimiter) rateLimitManager.getRequestRateLimiter(SolrRequest.SolrRequestType.QUERY); + MockRequestRateLimiter mockQueryRateLimiter = + (MockRequestRateLimiter) + rateLimitManager.getRequestRateLimiter(SolrRequest.SolrRequestType.QUERY); assertEquals(350, mockQueryRateLimiter.incomingRequestCount.get()); assertTrue(mockQueryRateLimiter.acceptedNewRequestCount.get() > 0); - assertTrue((mockQueryRateLimiter.acceptedNewRequestCount.get() == mockQueryRateLimiter.incomingRequestCount.get() - || mockQueryRateLimiter.rejectedRequestCount.get() > 0)); - assertEquals(mockQueryRateLimiter.incomingRequestCount.get(), - mockQueryRateLimiter.acceptedNewRequestCount.get() + mockQueryRateLimiter.rejectedRequestCount.get()); + assertTrue( + (mockQueryRateLimiter.acceptedNewRequestCount.get() + == mockQueryRateLimiter.incomingRequestCount.get() + || mockQueryRateLimiter.rejectedRequestCount.get() > 0)); + assertEquals( + mockQueryRateLimiter.incomingRequestCount.get(), + mockQueryRateLimiter.acceptedNewRequestCount.get() + + mockQueryRateLimiter.rejectedRequestCount.get()); } @Nightly @@ -92,12 +106,29 @@ public void testSlotBorrowing() throws Exception { SolrDispatchFilter solrDispatchFilter = cluster.getJettySolrRunner(0).getSolrDispatchFilter(); - RateLimiterConfig queryRateLimiterConfig = new RateLimiterConfig(SolrRequest.SolrRequestType.QUERY, - true, 1, DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS, 5 /* allowedRequests */, true /* isSlotBorrowing */); - RateLimiterConfig indexRateLimiterConfig = new RateLimiterConfig(SolrRequest.SolrRequestType.UPDATE, - true, 1, DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS, 5 /* allowedRequests */, true /* isSlotBorrowing */); - // We are fine with a null FilterConfig here since we ensure that MockBuilder never invokes its parent - RateLimitManager.Builder builder = new MockBuilder(null /*dummy SolrZkClient */, new MockRequestRateLimiter(queryRateLimiterConfig, 5), new MockRequestRateLimiter(indexRateLimiterConfig, 5)); + RateLimiterConfig queryRateLimiterConfig = + new RateLimiterConfig( + SolrRequest.SolrRequestType.QUERY, + true, + 1, + DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS, + 5 /* allowedRequests */, + true /* isSlotBorrowing */); + RateLimiterConfig indexRateLimiterConfig = + new RateLimiterConfig( + SolrRequest.SolrRequestType.UPDATE, + true, + 1, + DEFAULT_SLOT_ACQUISITION_TIMEOUT_MS, + 5 /* allowedRequests */, + true /* isSlotBorrowing */); + // We are fine with a null FilterConfig here since we ensure that MockBuilder never invokes its + // parent + RateLimitManager.Builder builder = + new MockBuilder( + null /*dummy SolrZkClient */, + new MockRequestRateLimiter(queryRateLimiterConfig, 5), + new MockRequestRateLimiter(indexRateLimiterConfig, 5)); RateLimitManager rateLimitManager = builder.build(); solrDispatchFilter.replaceRateLimitManager(rateLimitManager); @@ -106,13 +137,18 @@ public void testSlotBorrowing() throws Exception { processTest(client, numDocs, 400 /* Number of queries */); - MockRequestRateLimiter mockIndexRateLimiter = (MockRequestRateLimiter) rateLimitManager.getRequestRateLimiter(SolrRequest.SolrRequestType.UPDATE); + MockRequestRateLimiter mockIndexRateLimiter = + (MockRequestRateLimiter) + rateLimitManager.getRequestRateLimiter(SolrRequest.SolrRequestType.UPDATE); - assertTrue("Incoming slots borrowed count did not match. Expected > 0 incoming " + mockIndexRateLimiter.borrowedSlotCount.get(), + assertTrue( + "Incoming slots borrowed count did not match. Expected > 0 incoming " + + mockIndexRateLimiter.borrowedSlotCount.get(), mockIndexRateLimiter.borrowedSlotCount.get() > 0); } - private void processTest(CloudSolrClient client, int numDocuments, int numQueries) throws Exception { + private void processTest(CloudSolrClient client, int numDocuments, int numQueries) + throws Exception { for (int i = 0; i < numDocuments; i++) { SolrInputDocument doc = new SolrInputDocument(); @@ -130,17 +166,18 @@ private void processTest(CloudSolrClient client, int numDocuments, int numQuerie try { for (int i = 0; i < numQueries; i++) { - callableList.add(() -> { - try { - QueryResponse response = client.query(new SolrQuery("*:*")); - - assertEquals(numDocuments, response.getResults().getNumFound()); - } catch (Exception e) { - throw new RuntimeException(e.getMessage()); - } - - return true; - }); + callableList.add( + () -> { + try { + QueryResponse response = client.query(new SolrQuery("*:*")); + + assertEquals(numDocuments, response.getResults().getNumFound()); + } catch (Exception e) { + throw new RuntimeException(e.getMessage()); + } + + return true; + }); } futures = executor.invokeAll(callableList); @@ -149,7 +186,8 @@ private void processTest(CloudSolrClient client, int numDocuments, int numQuerie try { assertTrue(future.get() != null); } catch (Exception e) { - assertThat(e.getMessage(), containsString("non ok status: 429, message:Too Many Requests")); + assertThat( + e.getMessage(), containsString("non ok status: 429, message:Too Many Requests")); } } } finally { @@ -213,7 +251,10 @@ public MockBuilder(SolrZkClient zkClient, RequestRateLimiter queryRequestRateLim this.indexRequestRateLimiter = null; } - public MockBuilder(SolrZkClient zkClient, RequestRateLimiter queryRequestRateLimiter, RequestRateLimiter indexRequestRateLimiter) { + public MockBuilder( + SolrZkClient zkClient, + RequestRateLimiter queryRequestRateLimiter, + RequestRateLimiter indexRequestRateLimiter) { super(zkClient); this.queryRequestRateLimiter = queryRequestRateLimiter; @@ -224,10 +265,12 @@ public MockBuilder(SolrZkClient zkClient, RequestRateLimiter queryRequestRateLim public RateLimitManager build() { RateLimitManager rateLimitManager = new RateLimitManager(); - rateLimitManager.registerRequestRateLimiter(queryRequestRateLimiter, SolrRequest.SolrRequestType.QUERY); + rateLimitManager.registerRequestRateLimiter( + queryRequestRateLimiter, SolrRequest.SolrRequestType.QUERY); if (indexRequestRateLimiter != null) { - rateLimitManager.registerRequestRateLimiter(indexRequestRateLimiter, SolrRequest.SolrRequestType.UPDATE); + rateLimitManager.registerRequestRateLimiter( + indexRequestRateLimiter, SolrRequest.SolrRequestType.UPDATE); } return rateLimitManager; diff --git a/solr/core/src/test/org/apache/solr/spelling/ConjunctionSolrSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/ConjunctionSolrSpellCheckerTest.java index 7a3bf489c4a..ec5f7b5249f 100644 --- a/solr/core/src/test/org/apache/solr/spelling/ConjunctionSolrSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/ConjunctionSolrSpellCheckerTest.java @@ -17,7 +17,6 @@ package org.apache.solr.spelling; import java.io.IOException; - import org.apache.lucene.search.spell.JaroWinklerDistance; import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.search.spell.LuceneLevenshteinDistance; @@ -30,62 +29,69 @@ import org.junit.Test; public class ConjunctionSolrSpellCheckerTest extends SolrTestCase { - - public static final Class[] AVAILABLE_DISTANCES = {LevenshteinDistance.class, LuceneLevenshteinDistance.class, - JaroWinklerDistance.class, NGramDistance.class}; + + public static final Class[] AVAILABLE_DISTANCES = { + LevenshteinDistance.class, + LuceneLevenshteinDistance.class, + JaroWinklerDistance.class, + NGramDistance.class + }; @Test public void test() throws Exception { ConjunctionSolrSpellChecker cssc = new ConjunctionSolrSpellChecker(); @SuppressWarnings("unchecked") - Class sameDistance = (Class) AVAILABLE_DISTANCES[random().nextInt(AVAILABLE_DISTANCES.length)]; - + Class sameDistance = + (Class) AVAILABLE_DISTANCES[random().nextInt(AVAILABLE_DISTANCES.length)]; + StringDistance sameDistance1 = sameDistance.getConstructor().newInstance(); StringDistance sameDistance2 = sameDistance.getConstructor().newInstance(); - - //NGramDistance defaults to 2, so we'll try 3 or 4 to ensure we have one that is not-equal. + + // NGramDistance defaults to 2, so we'll try 3 or 4 to ensure we have one that is not-equal. StringDistance differentDistance = new NGramDistance(3); - if(sameDistance1.equals(differentDistance)) { + if (sameDistance1.equals(differentDistance)) { differentDistance = new NGramDistance(4); - if(sameDistance1.equals(differentDistance)) { - fail("Cannot set up test. 2 NGramDistances with different gram sizes should not be equal."); + if (sameDistance1.equals(differentDistance)) { + fail( + "Cannot set up test. 2 NGramDistances with different gram sizes should not be equal."); } } - Assert.assertEquals("The distance " + sameDistance + " does not properly implement equals.", sameDistance1, sameDistance2); - - + Assert.assertEquals( + "The distance " + sameDistance + " does not properly implement equals.", + sameDistance1, + sameDistance2); + MockSolrSpellChecker checker1 = new MockSolrSpellChecker(sameDistance1); MockSolrSpellChecker checker2 = new MockSolrSpellChecker(sameDistance2); MockSolrSpellChecker checker3 = new MockSolrSpellChecker(differentDistance); - + cssc.addChecker(checker1); cssc.addChecker(checker2); expectThrows(IllegalArgumentException.class, () -> cssc.addChecker(checker3)); } static class MockSolrSpellChecker extends SolrSpellChecker { - + final StringDistance sd; - + MockSolrSpellChecker(StringDistance sd) { this.sd = sd; } - + @Override protected StringDistance getStringDistance() { return sd; } - + @Override public void reload(SolrCore core, SolrIndexSearcher searcher) throws IOException {} - + @Override public void build(SolrCore core, SolrIndexSearcher searcher) throws IOException {} - + @Override public SpellingResult getSuggestions(SpellingOptions options) throws IOException { return null; } - } } diff --git a/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java index 99d424d55b7..9e1e192f566 100644 --- a/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/DirectSolrSpellCheckerTest.java @@ -18,7 +18,6 @@ import java.util.Collection; import java.util.Map; - import org.apache.lucene.util.LuceneTestCase.SuppressTempFileChecks; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.SpellingParams; @@ -28,20 +27,21 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * Simple tests for {@link DirectSolrSpellChecker} - */ -@SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") +/** Simple tests for {@link DirectSolrSpellChecker} */ +@SuppressTempFileChecks( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") public class DirectSolrSpellCheckerTest extends SolrTestCaseJ4 { private static SpellingQueryConverter queryConverter; @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-spellcheckcomponent.xml","schema.xml"); - //Index something with a title + initCore("solrconfig-spellcheckcomponent.xml", "schema.xml"); + // Index something with a title assertNull(h.validateUpdate(adoc("id", "0", "teststop", "This is a title"))); - assertNull(h.validateUpdate(adoc("id", "1", "teststop", "The quick reb fox jumped over the lazy brown dogs."))); + assertNull( + h.validateUpdate( + adoc("id", "1", "teststop", "The quick reb fox jumped over the lazy brown dogs."))); assertNull(h.validateUpdate(adoc("id", "2", "teststop", "This is a Solr"))); assertNull(h.validateUpdate(adoc("id", "3", "teststop", "solr foo"))); assertNull(h.validateUpdate(adoc("id", "4", "teststop", "another foo"))); @@ -49,7 +49,7 @@ public static void beforeClass() throws Exception { queryConverter = new SimpleQueryConverter(); queryConverter.init(new NamedList<>()); } - + @Test public void test() throws Exception { DirectSolrSpellChecker checker = new DirectSolrSpellChecker(); @@ -61,38 +61,54 @@ public void test() throws Exception { SolrCore core = h.getCore(); checker.init(spellchecker, core); - h.getCore().withSearcher(searcher -> { - - // check that 'fob' is corrected to 'foo' - Collection tokens = queryConverter.convert("fob"); - SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); - SpellingResult result = checker.getSuggestions(spellOpts); - assertNotNull("result shouldn't be null", result); - Map suggestions = result.get(tokens.iterator().next()); - assertFalse("suggestions shouldn't be empty", suggestions.isEmpty()); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertEquals("foo", entry.getKey()); - assertFalse(entry.getValue() + " equals: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); - - // check that 'super' is *not* corrected - spellOpts.tokens = queryConverter.convert("super"); - result = checker.getSuggestions(spellOpts); - assertNotNull("result shouldn't be null", result); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertNotNull("suggestions shouldn't be null", suggestions); - assertTrue("suggestions should be empty", suggestions.isEmpty()); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + + // check that 'fob' is corrected to 'foo' + Collection tokens = queryConverter.convert("fob"); + SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); + SpellingResult result = checker.getSuggestions(spellOpts); + assertNotNull("result shouldn't be null", result); + Map suggestions = result.get(tokens.iterator().next()); + assertFalse("suggestions shouldn't be empty", suggestions.isEmpty()); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertEquals("foo", entry.getKey()); + assertFalse( + entry.getValue() + " equals: " + SpellingResult.NO_FREQUENCY_INFO, + entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); + + // check that 'super' is *not* corrected + spellOpts.tokens = queryConverter.convert("super"); + result = checker.getSuggestions(spellOpts); + assertNotNull("result shouldn't be null", result); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertNotNull("suggestions shouldn't be null", suggestions); + assertTrue("suggestions should be empty", suggestions.isEmpty()); + return null; + }); } - + @Test public void testOnlyMorePopularWithExtendedResults() throws Exception { - assertQ(req("q", "teststop:fox", "qt", "/spellCheckCompRH", SpellCheckComponent.COMPONENT_NAME, "true", SpellingParams.SPELLCHECK_DICT, "direct", SpellingParams.SPELLCHECK_EXTENDED_RESULTS, "true", SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, "true"), + assertQ( + req( + "q", + "teststop:fox", + "qt", + "/spellCheckCompRH", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellingParams.SPELLCHECK_DICT, + "direct", + SpellingParams.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, + "true"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='fox']/int[@name='origFreq']=1", "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='fox']/arr[@name='suggestion']/lst/str[@name='word']='foo'", "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='fox']/arr[@name='suggestion']/lst/int[@name='freq']=2", - "//lst[@name='spellcheck']/bool[@name='correctlySpelled']='true'" - ); + "//lst[@name='spellcheck']/bool[@name='correctlySpelled']='true'"); } @Test @@ -115,24 +131,25 @@ private void testMaxQueryLength(Boolean limitQueryLength) throws Exception { SolrCore core = h.getCore(); checker.init(spellchecker, core); - h.getCore().withSearcher(searcher -> { - Collection tokens = queryConverter.convert("anothar"); - SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); - SpellingResult result = checker.getSuggestions(spellOpts); - assertNotNull("result shouldn't be null", result); - Map suggestions = result.get(tokens.iterator().next()); - assertNotNull("suggestions shouldn't be null", suggestions); - - if (limitQueryLength) { - assertTrue("suggestions should be empty", suggestions.isEmpty()); - } else { - assertFalse("suggestions shouldn't be empty", suggestions.isEmpty()); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertEquals("another", entry.getKey()); - } - - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + Collection tokens = queryConverter.convert("anothar"); + SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); + SpellingResult result = checker.getSuggestions(spellOpts); + assertNotNull("result shouldn't be null", result); + Map suggestions = result.get(tokens.iterator().next()); + assertNotNull("suggestions shouldn't be null", suggestions); + + if (limitQueryLength) { + assertTrue("suggestions should be empty", suggestions.isEmpty()); + } else { + assertFalse("suggestions shouldn't be empty", suggestions.isEmpty()); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertEquals("another", entry.getKey()); + } + + return null; + }); } - } diff --git a/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java index a79ec84f1d3..a7569d85268 100644 --- a/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/FileBasedSpellCheckerTest.java @@ -19,7 +19,6 @@ import java.io.File; import java.util.Collection; import java.util.Map; - import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressTempFileChecks; import org.apache.solr.SolrTestCaseJ4; @@ -30,27 +29,29 @@ import org.junit.Test; /** - * * @since solr 1.3 - **/ -@SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") + */ +@SuppressTempFileChecks( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") public class FileBasedSpellCheckerTest extends SolrTestCaseJ4 { private static SpellingQueryConverter queryConverter; @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); - //Index something with a title + initCore("solrconfig.xml", "schema.xml"); + // Index something with a title assertNull(h.validateUpdate(adoc("id", "0", "teststop", "This is a title"))); - assertNull(h.validateUpdate(adoc("id", "1", "teststop", "The quick reb fox jumped over the lazy brown dogs."))); + assertNull( + h.validateUpdate( + adoc("id", "1", "teststop", "The quick reb fox jumped over the lazy brown dogs."))); assertNull(h.validateUpdate(adoc("id", "2", "teststop", "This is a Solr"))); assertNull(h.validateUpdate(adoc("id", "3", "teststop", "solr foo"))); assertNull(h.validateUpdate(commit())); queryConverter = new SimpleQueryConverter(); queryConverter.init(new NamedList<>()); } - + @AfterClass public static void afterClass() { queryConverter = null; @@ -73,24 +74,29 @@ public void test() throws Exception { assertTrue(dictName + " is not equal to " + "external", dictName.equals("external") == true); checker.build(core, null); - h.getCore().withSearcher(searcher -> { - Collection tokens = queryConverter.convert("fob"); - SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); - SpellingResult result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - Map suggestions = result.get(tokens.iterator().next()); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is not equal to " + "foo", entry.getKey().equals("foo") == true); - assertTrue(entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); - - spellOpts.tokens = queryConverter.convert("super"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(tokens.iterator().next()); - assertTrue("suggestions is not null and it should be", suggestions == null); - return null; - }); - + h.getCore() + .withSearcher( + searcher -> { + Collection tokens = queryConverter.convert("fob"); + SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); + SpellingResult result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + Map suggestions = result.get(tokens.iterator().next()); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is not equal to " + "foo", + entry.getKey().equals("foo") == true); + assertTrue( + entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, + entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); + + spellOpts.tokens = queryConverter.convert("super"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(tokens.iterator().next()); + assertTrue("suggestions is not null and it should be", suggestions == null); + return null; + }); } @Test @@ -112,30 +118,36 @@ public void testFieldType() throws Exception { checker.build(core, null); Collection tokens = queryConverter.convert("Solar"); - h.getCore().withSearcher(searcher -> { - SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); - SpellingResult result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - //should be lowercased, b/c we are using a lowercasing analyzer - Map suggestions = result.get(tokens.iterator().next()); - assertTrue("suggestions Size: " + suggestions.size() + " is not: " + 1, suggestions.size() == 1); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is not equal to " + "solr", entry.getKey().equals("solr") == true); - assertTrue(entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); - - //test something not in the spell checker - spellOpts.tokens = queryConverter.convert("super"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(tokens.iterator().next()); - assertTrue("suggestions is not null and it should be", suggestions == null); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); + SpellingResult result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + // should be lowercased, b/c we are using a lowercasing analyzer + Map suggestions = result.get(tokens.iterator().next()); + assertTrue( + "suggestions Size: " + suggestions.size() + " is not: " + 1, + suggestions.size() == 1); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is not equal to " + "solr", + entry.getKey().equals("solr") == true); + assertTrue( + entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, + entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); + + // test something not in the spell checker + spellOpts.tokens = queryConverter.convert("super"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(tokens.iterator().next()); + assertTrue("suggestions is not null and it should be", suggestions == null); + return null; + }); } - /** - * No indexDir location set - */ + /** No indexDir location set */ @Test public void testRAMDirectory() throws Exception { FileBasedSpellChecker checker = new FileBasedSpellChecker(); @@ -154,25 +166,32 @@ public void testRAMDirectory() throws Exception { assertTrue(dictName + " is not equal to " + "external", dictName.equals("external") == true); checker.build(core, null); - h.getCore().withSearcher(searcher -> { - Collection tokens = queryConverter.convert("solar"); - SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); - SpellingResult result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - //should be lowercased, b/c we are using a lowercasing analyzer - Map suggestions = result.get(tokens.iterator().next()); - assertTrue("suggestions Size: " + suggestions.size() + " is not: " + 1, suggestions.size() == 1); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is not equal to " + "solr", entry.getKey().equals("solr") == true); - assertTrue(entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); - - - spellOpts.tokens = queryConverter.convert("super"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions size should be 0", suggestions.size()==0); - return null; - }); + h.getCore() + .withSearcher( + searcher -> { + Collection tokens = queryConverter.convert("solar"); + SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.getIndexReader()); + SpellingResult result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + // should be lowercased, b/c we are using a lowercasing analyzer + Map suggestions = result.get(tokens.iterator().next()); + assertTrue( + "suggestions Size: " + suggestions.size() + " is not: " + 1, + suggestions.size() == 1); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is not equal to " + "solr", + entry.getKey().equals("solr") == true); + assertTrue( + entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, + entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); + + spellOpts.tokens = queryConverter.convert("super"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions size should be 0", suggestions.size() == 0); + return null; + }); } } diff --git a/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java index c3d0042c910..4f82333e557 100644 --- a/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/IndexBasedSpellCheckerTest.java @@ -21,7 +21,6 @@ import java.util.Comparator; import java.util.Date; import java.util.Map; - import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -48,32 +47,33 @@ /** * @since solr 1.3 */ -@SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") +@SuppressTempFileChecks( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") public class IndexBasedSpellCheckerTest extends SolrTestCaseJ4 { protected static SpellingQueryConverter queryConverter; - protected static String[] DOCS = new String[]{ - "This is a title", - "The quick reb fox jumped over the lazy brown dogs.", - "This is a document", - "another document", - "red fox", - "green bun", - "green bud" - }; - + protected static String[] DOCS = + new String[] { + "This is a title", + "The quick reb fox jumped over the lazy brown dogs.", + "This is a document", + "another document", + "red fox", + "green bun", + "green bud" + }; @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); - //Index something with a title + initCore("solrconfig.xml", "schema.xml"); + // Index something with a title for (int i = 0; i < DOCS.length; i++) { assertNull(h.validateUpdate(adoc("id", String.valueOf(i), "title", DOCS[i]))); } assertNull(h.validateUpdate(commit())); queryConverter = new SimpleQueryConverter(); } - + @AfterClass public static void afterClass() { queryConverter = null; @@ -81,7 +81,8 @@ public static void afterClass() { @Test public void testComparator() throws Exception { - SpellCheckComponent component = (SpellCheckComponent) h.getCore().getSearchComponent("spellcheck"); + SpellCheckComponent component = + (SpellCheckComponent) h.getCore().getSearchComponent("spellcheck"); assertNotNull(component); AbstractLuceneSpellChecker spellChecker; Comparator comp; @@ -96,8 +97,6 @@ public void testComparator() throws Exception { comp = spellChecker.getSpellChecker().getComparator(); assertNotNull(comp); assertTrue(comp instanceof SampleComparator); - - } @Test @@ -115,63 +114,82 @@ public void testSpelling() throws Exception { SolrCore core = h.getCore(); String dictName = checker.init(spellchecker, core); - assertTrue(dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, - dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); - h.getCore().withSearcher(searcher -> { - checker.build(core, searcher); - - IndexReader reader = searcher.getIndexReader(); - Collection tokens = queryConverter.convert("documemt"); - SpellingOptions spellOpts = new SpellingOptions(tokens, reader); - SpellingResult result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - //should be lowercased, b/c we are using a lowercasing analyzer - Map suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("documemt is null and it shouldn't be", suggestions != null); - assertTrue("documemt Size: " + suggestions.size() + " is not: " + 1, suggestions.size() == 1); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is not equal to " + "document", entry.getKey().equals("document") == true); - assertTrue(entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); - - //test something not in the spell checker - spellOpts.tokens = queryConverter.convert("super"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions size should be 0", suggestions.size()==0); - - //test something that is spelled correctly - spellOpts.tokens = queryConverter.convert("document"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions is null and it shouldn't be", suggestions == null); - - //Has multiple possibilities, but the exact exists, so that should be returned - spellOpts.tokens = queryConverter.convert("red"); - spellOpts.count = 2; - result = checker.getSuggestions(spellOpts); - assertNotNull(result); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions is not null and it should be", suggestions == null); - - //Try out something which should have multiple suggestions - spellOpts.tokens = queryConverter.convert("bug"); - result = checker.getSuggestions(spellOpts); - assertNotNull(result); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertNotNull(suggestions); - assertTrue("suggestions Size: " + suggestions.size() + " is not: " + 2, suggestions.size() == 2); - - entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is equal to " + "bug and it shouldn't be", entry.getKey().equals("bug") == false); - assertTrue(entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); - - entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is equal to " + "bug and it shouldn't be", entry.getKey().equals("bug") == false); - assertTrue(entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); - return null; - }); + assertTrue( + dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, + dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); + h.getCore() + .withSearcher( + searcher -> { + checker.build(core, searcher); + + IndexReader reader = searcher.getIndexReader(); + Collection tokens = queryConverter.convert("documemt"); + SpellingOptions spellOpts = new SpellingOptions(tokens, reader); + SpellingResult result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + // should be lowercased, b/c we are using a lowercasing analyzer + Map suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("documemt is null and it shouldn't be", suggestions != null); + assertTrue( + "documemt Size: " + suggestions.size() + " is not: " + 1, + suggestions.size() == 1); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is not equal to " + "document", + entry.getKey().equals("document") == true); + assertTrue( + entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, + entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); + + // test something not in the spell checker + spellOpts.tokens = queryConverter.convert("super"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions size should be 0", suggestions.size() == 0); + + // test something that is spelled correctly + spellOpts.tokens = queryConverter.convert("document"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions is null and it shouldn't be", suggestions == null); + + // Has multiple possibilities, but the exact exists, so that should be returned + spellOpts.tokens = queryConverter.convert("red"); + spellOpts.count = 2; + result = checker.getSuggestions(spellOpts); + assertNotNull(result); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions is not null and it should be", suggestions == null); + + // Try out something which should have multiple suggestions + spellOpts.tokens = queryConverter.convert("bug"); + result = checker.getSuggestions(spellOpts); + assertNotNull(result); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertNotNull(suggestions); + assertTrue( + "suggestions Size: " + suggestions.size() + " is not: " + 2, + suggestions.size() == 2); + + entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is equal to " + "bug and it shouldn't be", + entry.getKey().equals("bug") == false); + assertTrue( + entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, + entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); + + entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is equal to " + "bug and it shouldn't be", + entry.getKey().equals("bug") == false); + assertTrue( + entry.getValue() + " does not equal: " + SpellingResult.NO_FREQUENCY_INFO, + entry.getValue() == SpellingResult.NO_FREQUENCY_INFO); + return null; + }); } @Test @@ -187,43 +205,52 @@ public void testExtendedResults() throws Exception { spellchecker.add(AbstractLuceneSpellChecker.SPELLCHECKER_ARG_NAME, spellchecker); SolrCore core = h.getCore(); String dictName = checker.init(spellchecker, core); - assertTrue(dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, - dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); - h.getCore().withSearcher(searcher -> { - checker.build(core, searcher); - - IndexReader reader = searcher.getIndexReader(); - Collection tokens = queryConverter.convert("documemt"); - SpellingOptions spellOpts = new SpellingOptions(tokens, reader, 1, SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX, true, 0.5f, null); - SpellingResult result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - //should be lowercased, b/c we are using a lowercasing analyzer - Map suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("documemt is null and it shouldn't be", suggestions != null); - assertTrue("documemt Size: " + suggestions.size() + " is not: " + 1, suggestions.size() == 1); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is not equal to " + "document", entry.getKey().equals("document") == true); - assertTrue(entry.getValue() + " does not equal: " + 2, entry.getValue() == 2); - - //test something not in the spell checker - spellOpts.tokens = queryConverter.convert("super"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions size should be 0", suggestions.size()==0); - - spellOpts.tokens = queryConverter.convert("document"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions is not null and it should be", suggestions == null); - return null; - }); + assertTrue( + dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, + dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); + h.getCore() + .withSearcher( + searcher -> { + checker.build(core, searcher); + + IndexReader reader = searcher.getIndexReader(); + Collection tokens = queryConverter.convert("documemt"); + SpellingOptions spellOpts = + new SpellingOptions( + tokens, reader, 1, SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX, true, 0.5f, null); + SpellingResult result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + // should be lowercased, b/c we are using a lowercasing analyzer + Map suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("documemt is null and it shouldn't be", suggestions != null); + assertTrue( + "documemt Size: " + suggestions.size() + " is not: " + 1, + suggestions.size() == 1); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is not equal to " + "document", + entry.getKey().equals("document") == true); + assertTrue(entry.getValue() + " does not equal: " + 2, entry.getValue() == 2); + + // test something not in the spell checker + spellOpts.tokens = queryConverter.convert("super"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions size should be 0", suggestions.size() == 0); + + spellOpts.tokens = queryConverter.convert("document"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions is not null and it should be", suggestions == null); + return null; + }); } - private static class TestSpellChecker extends IndexBasedSpellChecker{ + private static class TestSpellChecker extends IndexBasedSpellChecker { @Override - public SpellChecker getSpellChecker(){ + public SpellChecker getSpellChecker() { return spellChecker; } } @@ -238,48 +265,51 @@ public void testAlternateDistance() throws Exception { spellchecker.add(AbstractLuceneSpellChecker.INDEX_DIR, indexDir.getAbsolutePath()); spellchecker.add(AbstractLuceneSpellChecker.FIELD, "title"); spellchecker.add(AbstractLuceneSpellChecker.SPELLCHECKER_ARG_NAME, spellchecker); - spellchecker.add(AbstractLuceneSpellChecker.STRING_DISTANCE, JaroWinklerDistance.class.getName()); + spellchecker.add( + AbstractLuceneSpellChecker.STRING_DISTANCE, JaroWinklerDistance.class.getName()); SolrCore core = h.getCore(); String dictName = checker.init(spellchecker, core); - assertTrue(dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, - dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); - h.getCore().withSearcher(searcher -> { - checker.build(core, searcher); - SpellChecker sc = checker.getSpellChecker(); - assertTrue("sc is null and it shouldn't be", sc != null); - StringDistance sd = sc.getStringDistance(); - assertTrue("sd is null and it shouldn't be", sd != null); - assertTrue("sd is not an instance of " + JaroWinklerDistance.class.getName(), sd instanceof JaroWinklerDistance); - return null; - }); - + assertTrue( + dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, + dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); + h.getCore() + .withSearcher( + searcher -> { + checker.build(core, searcher); + SpellChecker sc = checker.getSpellChecker(); + assertTrue("sc is null and it shouldn't be", sc != null); + StringDistance sd = sc.getStringDistance(); + assertTrue("sd is null and it shouldn't be", sd != null); + assertTrue( + "sd is not an instance of " + JaroWinklerDistance.class.getName(), + sd instanceof JaroWinklerDistance); + return null; + }); } @Test public void testAlternateLocation() throws Exception { - String[] ALT_DOCS = new String[]{ - "jumpin jack flash", - "Sargent Peppers Lonely Hearts Club Band", - "Born to Run", - "Thunder Road", - "Londons Burning", - "A Horse with No Name", - "Sweet Caroline" - }; + String[] ALT_DOCS = + new String[] { + "jumpin jack flash", + "Sargent Peppers Lonely Hearts Club Band", + "Born to Run", + "Thunder Road", + "Londons Burning", + "A Horse with No Name", + "Sweet Caroline" + }; IndexBasedSpellChecker checker = new IndexBasedSpellChecker(); NamedList spellchecker = new NamedList<>(); spellchecker.add("classname", IndexBasedSpellChecker.class.getName()); - + File tmpDir = createTempDir().toFile(); File indexDir = new File(tmpDir, "spellingIdx"); - //create a standalone index + // create a standalone index File altIndexDir = new File(tmpDir, "alternateIdx" + new Date().getTime()); Directory dir = newFSDirectory(altIndexDir.toPath()); - IndexWriter iw = new IndexWriter( - dir, - new IndexWriterConfig(new WhitespaceAnalyzer()) - ); + IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(new WhitespaceAnalyzer())); for (int i = 0; i < ALT_DOCS.length; i++) { Document doc = new Document(); doc.add(new TextField("title", ALT_DOCS[i], Field.Store.YES)); @@ -295,38 +325,45 @@ public void testAlternateLocation() throws Exception { spellchecker.add(AbstractLuceneSpellChecker.SPELLCHECKER_ARG_NAME, spellchecker); SolrCore core = h.getCore(); String dictName = checker.init(spellchecker, core); - assertTrue(dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, - dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); - h.getCore().withSearcher(searcher -> { - checker.build(core, searcher); - - IndexReader reader = searcher.getIndexReader(); - Collection tokens = queryConverter.convert("flesh"); - SpellingOptions spellOpts = new SpellingOptions(tokens, reader, 1, SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX, true, 0.5f, null); - SpellingResult result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - //should be lowercased, b/c we are using a lowercasing analyzer - Map suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("flesh is null and it shouldn't be", suggestions != null); - assertTrue("flesh Size: " + suggestions.size() + " is not: " + 1, suggestions.size() == 1); - Map.Entry entry = suggestions.entrySet().iterator().next(); - assertTrue(entry.getKey() + " is not equal to " + "flash", entry.getKey().equals("flash") == true); - assertTrue(entry.getValue() + " does not equal: " + 1, entry.getValue() == 1); - - //test something not in the spell checker - spellOpts.tokens = queryConverter.convert("super"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions size should be 0", suggestions.size()==0); - - spellOpts.tokens = queryConverter.convert("Caroline"); - result = checker.getSuggestions(spellOpts); - assertTrue("result is null and it shouldn't be", result != null); - suggestions = result.get(spellOpts.tokens.iterator().next()); - assertTrue("suggestions is not null and it should be", suggestions == null); - return null; - }); + assertTrue( + dictName + " is not equal to " + SolrSpellChecker.DEFAULT_DICTIONARY_NAME, + dictName.equals(SolrSpellChecker.DEFAULT_DICTIONARY_NAME) == true); + h.getCore() + .withSearcher( + searcher -> { + checker.build(core, searcher); + + IndexReader reader = searcher.getIndexReader(); + Collection tokens = queryConverter.convert("flesh"); + SpellingOptions spellOpts = + new SpellingOptions( + tokens, reader, 1, SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX, true, 0.5f, null); + SpellingResult result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + // should be lowercased, b/c we are using a lowercasing analyzer + Map suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("flesh is null and it shouldn't be", suggestions != null); + assertTrue( + "flesh Size: " + suggestions.size() + " is not: " + 1, suggestions.size() == 1); + Map.Entry entry = suggestions.entrySet().iterator().next(); + assertTrue( + entry.getKey() + " is not equal to " + "flash", + entry.getKey().equals("flash") == true); + assertTrue(entry.getValue() + " does not equal: " + 1, entry.getValue() == 1); + + // test something not in the spell checker + spellOpts.tokens = queryConverter.convert("super"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions size should be 0", suggestions.size() == 0); + + spellOpts.tokens = queryConverter.convert("Caroline"); + result = checker.getSuggestions(spellOpts); + assertTrue("result is null and it shouldn't be", result != null); + suggestions = result.get(spellOpts.tokens.iterator().next()); + assertTrue("suggestions is not null and it should be", suggestions == null); + return null; + }); } } - diff --git a/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java b/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java index bd647b31f90..9cda54676b1 100644 --- a/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java +++ b/solr/core/src/test/org/apache/solr/spelling/SimpleQueryConverter.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.Collection; import java.util.HashSet; - import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -29,18 +28,17 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; - /** - * * @since solr 1.3 - **/ + */ class SimpleQueryConverter extends SpellingQueryConverter { @Override public Collection convert(String origQuery) { Collection result = new HashSet<>(); - try (WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(); TokenStream ts = analyzer.tokenStream("", origQuery)) { + try (WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(); + TokenStream ts = analyzer.tokenStream("", origQuery)) { // TODO: support custom attributes CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class); @@ -61,7 +59,7 @@ public Collection convert(String origQuery) { tok.setType(typeAtt.type()); result.add(tok); } - ts.end(); + ts.end(); return result; } catch (IOException e) { throw new RuntimeException(e); diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java index 31aac34f121..92a60c36c2d 100644 --- a/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorTest.java @@ -15,19 +15,19 @@ * limitations under the License. */ package org.apache.solr.spelling; + import java.util.HashSet; import java.util.List; import java.util.Set; - import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.SuppressTempFileChecks; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.CursorMarkParams; import org.apache.solr.common.params.GroupParams; import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.SpellingParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; @@ -42,77 +42,112 @@ import org.junit.Test; @Slow -@SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") +@SuppressTempFileChecks( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") public class SpellCheckCollatorTest extends SolrTestCaseJ4 { - + // if adding documents to this test, adjust me. - private static final int NUM_DOCS_WITH_TERM_EVERYOTHER=8; - private static final int NUM_DOCS=17; + private static final int NUM_DOCS_WITH_TERM_EVERYOTHER = 8; + private static final int NUM_DOCS = 17; + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-spellcheckcomponent.xml", "schema.xml"); - assertU(adoc("id", "0", - "lowerfilt", "faith hope and love to", - "teststop", "metanoia")); - assertU(adoc("id", "1", - "lowerfilt", "faith hope and loaves", - "teststop", "everyother")); - assertU(adoc("id", "2", - "lowerfilt", "fat hops and loaves")); - assertU(adoc("id", "3", - "lowerfilt", "faith of homer", - "teststop", "metanoia", - "teststop", "everyother")); - assertU(adoc("id", "4", - "lowerfilt", "fat of homer")); - assertU(adoc("id", "5", - "lowerfilt1", "peace", - "teststop", "everyother")); - assertU(adoc("id", "6", - "lowerfilt", "hyphenated word")); - assertU(adoc("id", "7", - "teststop", "Jane filled out a form at Charles De Gaulle", - "teststop", "everyother")); - assertU(adoc("id", "8", - "teststop", "Dick flew from Heathrow")); - assertU(adoc("id", "9", - "teststop", "Jane is stuck in customs because Spot chewed up the form", - "teststop", "everyother")); - assertU(adoc("id", "10", - "teststop", "Once in Paris Dick built a fire on the hearth")); - assertU(adoc("id", "11", - "teststop", "Dick waited for Jane as he watched the sparks flow upward", - "teststop", "everyother")); - assertU(adoc("id", "12", - "teststop", "This June parisian rendez-vous is ruined because of a customs snafu")); - assertU(adoc("id", "13", - "teststop", "partisan political machine", - "teststop", "metanoia", - "teststop", "everyother")); - assertU(adoc("id", "14", - "teststop", "metanoia")); - assertU(adoc("id", "15", - "teststop", "metanoia", - "teststop", "everyother")); - assertU(adoc("id", "16", - "teststop", "metanoia")); + assertU( + adoc( + "id", "0", + "lowerfilt", "faith hope and love to", + "teststop", "metanoia")); + assertU( + adoc( + "id", "1", + "lowerfilt", "faith hope and loaves", + "teststop", "everyother")); + assertU( + adoc( + "id", "2", + "lowerfilt", "fat hops and loaves")); + assertU( + adoc( + "id", "3", + "lowerfilt", "faith of homer", + "teststop", "metanoia", + "teststop", "everyother")); + assertU( + adoc( + "id", "4", + "lowerfilt", "fat of homer")); + assertU( + adoc( + "id", "5", + "lowerfilt1", "peace", + "teststop", "everyother")); + assertU( + adoc( + "id", "6", + "lowerfilt", "hyphenated word")); + assertU( + adoc( + "id", "7", + "teststop", "Jane filled out a form at Charles De Gaulle", + "teststop", "everyother")); + assertU( + adoc( + "id", "8", + "teststop", "Dick flew from Heathrow")); + assertU( + adoc( + "id", "9", + "teststop", "Jane is stuck in customs because Spot chewed up the form", + "teststop", "everyother")); + assertU( + adoc( + "id", "10", + "teststop", "Once in Paris Dick built a fire on the hearth")); + assertU( + adoc( + "id", "11", + "teststop", "Dick waited for Jane as he watched the sparks flow upward", + "teststop", "everyother")); + assertU( + adoc( + "id", "12", + "teststop", "This June parisian rendez-vous is ruined because of a customs snafu")); + assertU( + adoc( + "id", "13", + "teststop", "partisan political machine", + "teststop", "metanoia", + "teststop", "everyother")); + assertU( + adoc( + "id", "14", + "teststop", "metanoia")); + assertU( + adoc( + "id", "15", + "teststop", "metanoia", + "teststop", "everyother")); + assertU( + adoc( + "id", "16", + "teststop", "metanoia")); assertU(commit()); } - + @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testCollationWithRangeQuery() throws Exception - { + public void testCollationWithRangeQuery() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); - - ModifiableSolrParams params = new ModifiableSolrParams(); + + ModifiableSolrParams params = new ModifiableSolrParams(); params.add(SpellCheckComponent.COMPONENT_NAME, "true"); params.add(SpellingParams.SPELLCHECK_BUILD, "true"); - params.add(SpellingParams.SPELLCHECK_COUNT, "10"); - params.add(SpellingParams.SPELLCHECK_COLLATE, "true"); - params.add(SpellingParams.SPELLCHECK_ALTERNATIVE_TERM_COUNT, "10"); + params.add(SpellingParams.SPELLCHECK_COUNT, "10"); + params.add(SpellingParams.SPELLCHECK_COLLATE, "true"); + params.add(SpellingParams.SPELLCHECK_ALTERNATIVE_TERM_COUNT, "10"); params.add(CommonParams.Q, "id:[1 TO 10] AND lowerfilt:lovw"); { SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); @@ -125,27 +160,27 @@ public void testCollationWithRangeQuery() throws Exception NamedList spellCheck = (NamedList) values.get("spellcheck"); NamedList collationHolder = (NamedList) spellCheck.get("collations"); List collations = collationHolder.getAll("collation"); - assertTrue(collations.size()==1); - String collation = collations.iterator().next(); + assertTrue(collations.size() == 1); + String collation = collations.iterator().next(); System.out.println(collation); - assertTrue("Incorrect collation: " + collation,"id:[1 TO 10] AND lowerfilt:love".equals(collation)); + assertTrue( + "Incorrect collation: " + collation, "id:[1 TO 10] AND lowerfilt:love".equals(collation)); } } @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testCollationWithHypens() throws Exception - { + public void testCollationWithHypens() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); - - ModifiableSolrParams params = new ModifiableSolrParams(); + + ModifiableSolrParams params = new ModifiableSolrParams(); params.add(SpellCheckComponent.COMPONENT_NAME, "true"); params.add(SpellingParams.SPELLCHECK_BUILD, "true"); - params.add(SpellingParams.SPELLCHECK_COUNT, "10"); + params.add(SpellingParams.SPELLCHECK_COUNT, "10"); params.add(SpellingParams.SPELLCHECK_COLLATE, "true"); - + params.add(CommonParams.Q, "lowerfilt:(hypenated-wotd)"); { SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); @@ -158,9 +193,10 @@ public void testCollationWithHypens() throws Exception NamedList spellCheck = (NamedList) values.get("spellcheck"); NamedList collationHolder = (NamedList) spellCheck.get("collations"); List collations = collationHolder.getAll("collation"); - assertTrue(collations.size()==1); - String collation = collations.iterator().next(); - assertTrue("Incorrect collation: " + collation,"lowerfilt:(hyphenated-word)".equals(collation)); + assertTrue(collations.size() == 1); + String collation = collations.iterator().next(); + assertTrue( + "Incorrect collation: " + collation, "lowerfilt:(hyphenated-word)".equals(collation)); } params.remove(CommonParams.Q); @@ -178,55 +214,70 @@ public void testCollationWithHypens() throws Exception NamedList spellCheck = (NamedList) values.get("spellcheck"); NamedList collationHolder = (NamedList) spellCheck.get("collations"); List collations = collationHolder.getAll("collation"); - assertTrue(collations.size()==1); + assertTrue(collations.size() == 1); String collation = collations.iterator().next(); - assertTrue("Incorrect collation: " + collation,"hyphenated-word".equals(collation)); + assertTrue("Incorrect collation: " + collation, "hyphenated-word".equals(collation)); } - } - public void testCollateWithOverride() throws Exception - { + public void testCollateWithOverride() throws Exception { assertQ( - req( - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_DICT, "direct", - SpellingParams.SPELLCHECK_COUNT, "10", - SpellingParams.SPELLCHECK_COLLATE, "true", - SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "10", - SpellingParams.SPELLCHECK_MAX_COLLATIONS, "10", - "qt", "/spellCheckCompRH", - "defType", "edismax", - "qf", "teststop", - "mm", "1", - CommonParams.Q, "partisian politcal mashine" - ), - "//lst[@name='spellcheck']/lst[@name='collations']/str[@name='collation']='parisian political machine'" - ); + req( + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_DICT, + "direct", + SpellingParams.SPELLCHECK_COUNT, + "10", + SpellingParams.SPELLCHECK_COLLATE, + "true", + SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, + "10", + SpellingParams.SPELLCHECK_MAX_COLLATIONS, + "10", + "qt", + "/spellCheckCompRH", + "defType", + "edismax", + "qf", + "teststop", + "mm", + "1", + CommonParams.Q, + "partisian politcal mashine"), + "//lst[@name='spellcheck']/lst[@name='collations']/str[@name='collation']='parisian political machine'"); assertQ( req( - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_DICT, "direct", - SpellingParams.SPELLCHECK_COUNT, "10", - SpellingParams.SPELLCHECK_COLLATE, "true", - SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "10", - SpellingParams.SPELLCHECK_MAX_COLLATIONS, "10", - "qt", "/spellCheckCompRH", - "defType", "edismax", - "qf", "teststop", - "mm", "1", - SpellingParams.SPELLCHECK_COLLATE_PARAM_OVERRIDE + "mm", "100%", - CommonParams.Q, "partisian politcal mashine" - ), - "//lst[@name='spellcheck']/lst[@name='collations']/str[@name='collation']='partisan political machine'" - ); - + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_DICT, + "direct", + SpellingParams.SPELLCHECK_COUNT, + "10", + SpellingParams.SPELLCHECK_COLLATE, + "true", + SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, + "10", + SpellingParams.SPELLCHECK_MAX_COLLATIONS, + "10", + "qt", + "/spellCheckCompRH", + "defType", + "edismax", + "qf", + "teststop", + "mm", + "1", + SpellingParams.SPELLCHECK_COLLATE_PARAM_OVERRIDE + "mm", + "100%", + CommonParams.Q, + "partisian politcal mashine"), + "//lst[@name='spellcheck']/lst[@name='collations']/str[@name='collation']='partisan political machine'"); } @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testCollateWithFilter() throws Exception - { + public void testCollateWithFilter() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); @@ -241,8 +292,9 @@ public void testCollateWithFilter() throws Exception params.add(CommonParams.Q, "lowerfilt:(+fauth +home +loane)"); params.add(CommonParams.FQ, "NOT(id:1)"); - //Because a FilterQuery is applied which removes doc id#1 from possible hits, we would - //not want the collations to return us "lowerfilt:(+faith +hope +loaves)" as this only matches doc id#1. + // Because a FilterQuery is applied which removes doc id#1 from possible hits, we would + // not want the collations to return us "lowerfilt:(+faith +hope +loaves)" as this only matches + // doc id#1. SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); SolrQueryResponse rsp = new SolrQueryResponse(); rsp.addResponseHeader(new SimpleOrderedMap()); @@ -254,15 +306,14 @@ public void testCollateWithFilter() throws Exception NamedList collationHolder = (NamedList) spellCheck.get("collations"); List collations = collationHolder.getAll("collation"); assertTrue(collations.size() > 0); - for(String collation : collations) { + for (String collation : collations) { assertTrue(!collation.equals("lowerfilt:(+faith +hope +loaves)")); } } @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testCollateWithMultipleRequestHandlers() throws Exception - { + public void testCollateWithMultipleRequestHandlers() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); @@ -277,8 +328,8 @@ public void testCollateWithMultipleRequestHandlers() throws Exception params.add(SpellingParams.SPELLCHECK_MAX_COLLATIONS, "1"); params.add(CommonParams.Q, "peac"); - //SpellCheckCompRH has no "qf" defined. It will not find "peace" from "peac" despite it being in the dictionary - //because requrying against this Request Handler results in 0 hits. + // SpellCheckCompRH has no "qf" defined. It will not find "peace" from "peac" despite it being + // in the dictionary because requrying against this Request Handler results in 0 hits. SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); SolrQueryResponse rsp = new SolrQueryResponse(); rsp.addResponseHeader(new SimpleOrderedMap()); @@ -291,8 +342,8 @@ public void testCollateWithMultipleRequestHandlers() throws Exception String singleCollation = (String) collationHolder.get("collation"); assertNull(singleCollation); - //SpellCheckCompRH1 has "lowerfilt1" defined in the "qf" param. It will find "peace" from "peac" because - //requrying field "lowerfilt1" returns the hit. + // SpellCheckCompRH1 has "lowerfilt1" defined in the "qf" param. It will find "peace" from + // "peac" because requrying field "lowerfilt1" returns the hit. params.remove(SpellingParams.SPELLCHECK_BUILD); handler = core.getRequestHandler("/spellCheckCompRH1"); rsp = new SolrQueryResponse(); @@ -374,8 +425,9 @@ public void testExtendedCollate() throws Exception { List collations = collationHolder.getAll("collation"); assertTrue(collations.size() == 2); for (String multipleCollation : collations) { - assertTrue(multipleCollation.equals("lowerfilt:(+faith +hope +love)") - || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)")); + assertTrue( + multipleCollation.equals("lowerfilt:(+faith +hope +love)") + || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)")); } // Testing return multiple collations with expanded collation response @@ -395,14 +447,16 @@ public void testExtendedCollate() throws Exception { assertTrue(expandedCollationList.size() == 2); for (NamedList expandedCollation : expandedCollationList) { String multipleCollation = (String) expandedCollation.get("collationQuery"); - assertTrue(multipleCollation.equals("lowerfilt:(+faith +hope +love)") - || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)")); + assertTrue( + multipleCollation.equals("lowerfilt:(+faith +hope +love)") + || multipleCollation.equals("lowerfilt:(+faith +hope +loaves)")); assertTrue(!usedcollations.contains(multipleCollation)); usedcollations.add(multipleCollation); assertEquals(1L, expandedCollation.get("hits")); - NamedList misspellingsAndCorrections = (NamedList) expandedCollation.get("misspellingsAndCorrections"); + NamedList misspellingsAndCorrections = + (NamedList) expandedCollation.get("misspellingsAndCorrections"); assertTrue(misspellingsAndCorrections.size() == 3); String correctionForFauth = (String) misspellingsAndCorrections.get("fauth"); @@ -416,8 +470,7 @@ public void testExtendedCollate() throws Exception { @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testCollateWithGrouping() throws Exception - { + public void testCollateWithGrouping() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); @@ -433,8 +486,9 @@ public void testCollateWithGrouping() throws Exception params.add(GroupParams.GROUP, "true"); params.add(GroupParams.GROUP_FIELD, "id"); - //Because a FilterQuery is applied which removes doc id#1 from possible hits, we would - //not want the collations to return us "lowerfilt:(+faith +hope +loaves)" as this only matches doc id#1. + // Because a FilterQuery is applied which removes doc id#1 from possible hits, we would + // not want the collations to return us "lowerfilt:(+faith +hope +loaves)" as this only matches + // doc id#1. SolrRequestHandler handler = core.getRequestHandler("/spellCheckCompRH"); SolrQueryResponse rsp = new SolrQueryResponse(); rsp.addResponseHeader(new SimpleOrderedMap()); @@ -451,118 +505,167 @@ public void testCollateWithGrouping() throws Exception @Test public void testContextSensitiveCollate() throws Exception { // DirectSolrSpellChecker IndexBasedSpellChecker - String[] dictionary = {"direct", "default_teststop" }; - for(int i=0 ; i<=1 ; i++) { + String[] dictionary = {"direct", "default_teststop"}; + for (int i = 0; i <= 1; i++) { assertQ( - req( - "q", "teststop:(flew AND form AND heathrow)", - "qt", "/spellCheckCompRH", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_BUILD, "true", - SpellCheckComponent.SPELLCHECK_DICT, dictionary[i], - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COUNT, "10", - SpellCheckComponent.SPELLCHECK_ALTERNATIVE_TERM_COUNT, "5", - SpellCheckComponent.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, "0", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "10", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "1", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true" - ), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='flew']/arr[@name='suggestion']/lst/str[@name='word']='flow'", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='form']/arr[@name='suggestion']/lst/str[@name='word']='from'", -/* DirectSolrSpellChecker won't suggest if the edit distance > 2, so we can't test for this one... - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='heathrow']/arr[@name='suggestion']/lst/str[@name='word']='hearth'", -*/ - "//lst[@name='spellcheck']/bool[@name='correctlySpelled']='false'", - "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/str[@name='collationQuery']='teststop:(flew AND from AND heathrow)'", - "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/long[@name='hits']=1", - "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/lst[@name='misspellingsAndCorrections']/str[@name='form']='from'" - ); + req( + "q", + "teststop:(flew AND form AND heathrow)", + "qt", + "/spellCheckCompRH", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_BUILD, + "true", + SpellCheckComponent.SPELLCHECK_DICT, + dictionary[i], + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COUNT, + "10", + SpellCheckComponent.SPELLCHECK_ALTERNATIVE_TERM_COUNT, + "5", + SpellCheckComponent.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + "0", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, + "10", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "1", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='flew']/arr[@name='suggestion']/lst/str[@name='word']='flow'", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='form']/arr[@name='suggestion']/lst/str[@name='word']='from'", + /* DirectSolrSpellChecker won't suggest if the edit distance > 2, so we can't test for this one... + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='heathrow']/arr[@name='suggestion']/lst/str[@name='word']='hearth'", + */ + "//lst[@name='spellcheck']/bool[@name='correctlySpelled']='false'", + "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/str[@name='collationQuery']='teststop:(flew AND from AND heathrow)'", + "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/long[@name='hits']=1", + "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/lst[@name='misspellingsAndCorrections']/str[@name='form']='from'"); assertQ( - req( - "q", "teststop:(june AND customs)", - "qt", "/spellCheckCompRH", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_DICT, dictionary[i], - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COUNT, "10", - SpellCheckComponent.SPELLCHECK_ALTERNATIVE_TERM_COUNT, "5", - SpellCheckComponent.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, "1", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "10", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "1", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true" - ), - "//result[@numFound=1]", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='june']/arr[@name='suggestion']/lst/str[@name='word']='jane'", - "//lst[@name='spellcheck']/bool[@name='correctlySpelled']='false'", - "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/str[@name='collationQuery']='teststop:(jane AND customs)'", - "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/long[@name='hits']=1", - "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/lst[@name='misspellingsAndCorrections']/str[@name='june']='jane'" - ); - //SOLR-5090, alternativeTermCount==0 was being evaluated, sometimes would throw NPE - assertQ(req("q", "teststop:(june customs)", "mm", "2", - "qt", "/spellCheckCompRH", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_DICT, dictionary[i], - SpellCheckComponent.SPELLCHECK_COUNT, "10", - SpellCheckComponent.SPELLCHECK_ALTERNATIVE_TERM_COUNT, "0", - SpellCheckComponent.SPELLCHECK_COLLATE, "true")); + req( + "q", + "teststop:(june AND customs)", + "qt", + "/spellCheckCompRH", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_DICT, + dictionary[i], + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COUNT, + "10", + SpellCheckComponent.SPELLCHECK_ALTERNATIVE_TERM_COUNT, + "5", + SpellCheckComponent.SPELLCHECK_MAX_RESULTS_FOR_SUGGEST, + "1", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, + "10", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "1", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true"), + "//result[@numFound=1]", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='june']/arr[@name='suggestion']/lst/str[@name='word']='jane'", + "//lst[@name='spellcheck']/bool[@name='correctlySpelled']='false'", + "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/str[@name='collationQuery']='teststop:(jane AND customs)'", + "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/long[@name='hits']=1", + "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/lst[@name='misspellingsAndCorrections']/str[@name='june']='jane'"); + // SOLR-5090, alternativeTermCount==0 was being evaluated, sometimes would throw NPE + assertQ( + req( + "q", + "teststop:(june customs)", + "mm", + "2", + "qt", + "/spellCheckCompRH", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_DICT, + dictionary[i], + SpellCheckComponent.SPELLCHECK_COUNT, + "10", + SpellCheckComponent.SPELLCHECK_ALTERNATIVE_TERM_COUNT, + "0", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true")); } } @Test public void testEstimatedHitCounts() throws Exception { - final String xpathPrefix = - "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/"; - final SolrParams reusedParams = params - (SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_DICT, "direct", - SpellingParams.SPELLCHECK_COUNT, "1", - SpellingParams.SPELLCHECK_COLLATE, "true", - SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "1", - SpellingParams.SPELLCHECK_MAX_COLLATIONS, "1", - SpellingParams.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - "qt", "/spellCheckCompRH"); + final String xpathPrefix = + "//lst[@name='spellcheck']/lst[@name='collations']/lst[@name='collation']/"; + final SolrParams reusedParams = + params( + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_DICT, + "direct", + SpellingParams.SPELLCHECK_COUNT, + "1", + SpellingParams.SPELLCHECK_COLLATE, + "true", + SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, + "1", + SpellingParams.SPELLCHECK_MAX_COLLATIONS, + "1", + SpellingParams.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + "qt", + "/spellCheckCompRH"); // default case, no SPELLCHECK_COLLATE_MAX_COLLECT_DOCS should be exact num hits - assertQ(req(reusedParams, - CommonParams.Q, "teststop:metnoia") - , xpathPrefix + "str[@name='collationQuery']='teststop:metanoia'" - , xpathPrefix + "long[@name='hits']=6" - ); + assertQ( + req(reusedParams, CommonParams.Q, "teststop:metnoia"), + xpathPrefix + "str[@name='collationQuery']='teststop:metanoia'", + xpathPrefix + "long[@name='hits']=6"); - // specifying 0 means "exact" same as default, but specifing a value greater + // specifying 0 means "exact" same as default, but specifing a value greater // then the total number of docs in the index should also result in it // "estimating" and getting exact number as well. - for (String val : new String[] { "0", "30", "100", "10000" }) { - assertQ(req(reusedParams, - CommonParams.Q, "teststop:metnoia", - SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, val) - , xpathPrefix + "str[@name='collationQuery']='teststop:metanoia'" - , xpathPrefix + "long[@name='hits']=6" - ); + for (String val : new String[] {"0", "30", "100", "10000"}) { + assertQ( + req( + reusedParams, + CommonParams.Q, + "teststop:metnoia", + SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, + val), + xpathPrefix + "str[@name='collationQuery']='teststop:metanoia'", + xpathPrefix + "long[@name='hits']=6"); } - // values between 0 and the num docs in the index should not error, and should + // values between 0 and the num docs in the index should not error, and should // produce an estimate no more then the total number of docs final int iters = atLeast(10); for (int iter = 0; iter < iters; iter++) { final int val = TestUtil.nextInt(random(), 1, 17); - assertQ(req(reusedParams, - CommonParams.Q, "teststop:metnoia", - SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, ""+val) - , xpathPrefix + "str[@name='collationQuery']='teststop:metanoia'" - , xpathPrefix + "long[@name='hits' and . <= 17 and 0 < .]" - ); + assertQ( + req( + reusedParams, + CommonParams.Q, + "teststop:metnoia", + SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, + "" + val), + xpathPrefix + "str[@name='collationQuery']='teststop:metanoia'", + xpathPrefix + "long[@name='hits' and . <= 17 and 0 < .]"); } - // "everYother" appears in every other doc in the index, so "everother" + // "everYother" appears in every other doc in the index, so "everother" // should produce a "decent" aproximation of the number of hits (8) // for any 5 <= SPELLCHECK_COLLATE_MAX_COLLECT_DOCS // @@ -570,11 +673,12 @@ public void testEstimatedHitCounts() throws Exception { // since we're dealing with a fairly small index here) for (int val = 5; val <= 20; val++) { String hitsXPath = xpathPrefix + "long[@name='hits']"; // we will append to this... - + if (val <= NUM_DOCS_WITH_TERM_EVERYOTHER) { // strongest assertions we can make given an arbirary MergePolicy on such a small index // is based on the idea that the docs may all come *first* or all come *last* - // and then do the math on what estimate should come from that if we collected *exactly* 'val'.. + // and then do the math on what estimate should come from that if we collected *exactly* + // 'val'.. // // if they are all "first" we will overestimate and assume everything is a match... int max = NUM_DOCS; @@ -582,31 +686,33 @@ public void testEstimatedHitCounts() throws Exception { int min = (/* min collected */ val) / (/* max docs possibly scanned */ NUM_DOCS); hitsXPath += "[" + min + " <= . and . <= " + max + "]"; } else { - // we've asked for a number greater then what can possibly be found in our tiny index, which should - // force it to scan all docs so our hits should be exact + // we've asked for a number greater then what can possibly be found in our tiny index, which + // should force it to scan all docs so our hits should be exact hitsXPath += "[.=" + NUM_DOCS_WITH_TERM_EVERYOTHER + "]"; } - assertQ(req(reusedParams, - CommonParams.Q, "teststop:everother", - SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, ""+val) - , xpathPrefix + "str[@name='collationQuery']='teststop:everyother'" - , hitsXPath - ); + assertQ( + req( + reusedParams, + CommonParams.Q, + "teststop:everother", + SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, + "" + val), + xpathPrefix + "str[@name='collationQuery']='teststop:everyother'", + hitsXPath); } + } - } @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testZeroTries() throws Exception - { + public void testZeroTries() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); - - ModifiableSolrParams params = new ModifiableSolrParams(); + + ModifiableSolrParams params = new ModifiableSolrParams(); params.add(SpellCheckComponent.COMPONENT_NAME, "true"); params.add(SpellCheckComponent.SPELLCHECK_BUILD, "true"); - params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10"); + params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10"); params.add(SpellCheckComponent.SPELLCHECK_COLLATE, "true"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "0"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "2"); @@ -623,18 +729,18 @@ public void testZeroTries() throws Exception List collations = (List) collationList.getAll("collation"); assertTrue(collations.size() == 2); } + @Test @SuppressWarnings({"unchecked", "rawtypes"}) - public void testWithCursorMark() throws Exception - { + public void testWithCursorMark() throws Exception { SolrCore core = h.getCore(); SearchComponent speller = core.getSearchComponent("spellcheck"); assertTrue("speller is null and it shouldn't be", speller != null); - - ModifiableSolrParams params = new ModifiableSolrParams(); + + ModifiableSolrParams params = new ModifiableSolrParams(); params.add(SpellCheckComponent.COMPONENT_NAME, "true"); params.add(SpellCheckComponent.SPELLCHECK_BUILD, "true"); - params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10"); + params.add(SpellCheckComponent.SPELLCHECK_COUNT, "10"); params.add(SpellCheckComponent.SPELLCHECK_COLLATE, "true"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATION_TRIES, "2"); params.add(SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "1"); @@ -653,5 +759,4 @@ public void testWithCursorMark() throws Exception List collations = (List) collationList.getAll("collation"); assertTrue(collations.size() == 1); } - } diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java index 70fc455fc2b..482c2fce638 100644 --- a/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/SpellCheckCollatorWithCollapseTest.java @@ -25,7 +25,7 @@ import org.junit.BeforeClass; import org.junit.Test; -public class SpellCheckCollatorWithCollapseTest extends SolrTestCaseJ4 { +public class SpellCheckCollatorWithCollapseTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-collapseqparser.xml", "schema11.xml"); @@ -38,39 +38,53 @@ public void setUp() throws Exception { clearIndex(); assertU(commit()); } - + @Test public void test() throws Exception { - for(int i=0 ; i<200 ; i++) { - String[] doc = {"id","" + i, "group_i", "" + (i % 10), "a_s", ((i%2)==0 ? "love" : "peace")}; + for (int i = 0; i < 200; i++) { + String[] doc = { + "id", "" + i, "group_i", "" + (i % 10), "a_s", ((i % 2) == 0 ? "love" : "peace") + }; assertU(adoc(doc)); - if(i%5==0) { + if (i % 5 == 0) { assertU(commit()); } } assertU(commit()); - for (SolrParams params : new SolrParams[]{ - params(CommonParams.FQ, "{!collapse field=group_i}"), - params(CommonParams.FQ, "${bleh}", "bleh", "{!collapse field=group_i}"), // substitution - params(CommonParams.FQ, "{!tag=collapser}{!collapse field=group_i}"), // with tag & collapse in localparams - params(CommonParams.FQ, "{!collapse tag=collapser field=group_i}") - }) { + for (SolrParams params : + new SolrParams[] { + params(CommonParams.FQ, "{!collapse field=group_i}"), + params(CommonParams.FQ, "${bleh}", "bleh", "{!collapse field=group_i}"), // substitution + params( + CommonParams.FQ, + "{!tag=collapser}{!collapse field=group_i}"), // with tag & collapse in localparams + params(CommonParams.FQ, "{!collapse tag=collapser field=group_i}") + }) { assertQ( - req(params, - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_DICT, "direct", - SpellingParams.SPELLCHECK_COUNT, "10", - SpellingParams.SPELLCHECK_COLLATE, "true", - SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, "5", - SpellingParams.SPELLCHECK_MAX_COLLATIONS, "1", - CommonParams.Q, "a_s:lpve", - CommonParams.QT, "/spellCheckCompRH_Direct", - SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, "5", - "expand", "true"), - "//lst[@name='spellcheck']/lst[@name='collations']/str[@name='collation']='a_s:love'" - ); + req( + params, + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_DICT, + "direct", + SpellingParams.SPELLCHECK_COUNT, + "10", + SpellingParams.SPELLCHECK_COLLATE, + "true", + SpellingParams.SPELLCHECK_MAX_COLLATION_TRIES, + "5", + SpellingParams.SPELLCHECK_MAX_COLLATIONS, + "1", + CommonParams.Q, + "a_s:lpve", + CommonParams.QT, + "/spellCheckCompRH_Direct", + SpellingParams.SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, + "5", + "expand", + "true"), + "//lst[@name='spellcheck']/lst[@name='collations']/str[@name='collation']='a_s:love'"); } } - } diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellPossibilityIteratorTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellPossibilityIteratorTest.java index ff53e042aea..e2731146020 100644 --- a/solr/core/src/test/org/apache/solr/spelling/SpellPossibilityIteratorTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/SpellPossibilityIteratorTest.java @@ -15,11 +15,11 @@ * limitations under the License. */ package org.apache.solr.spelling; + import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; - import org.apache.solr.SolrTestCaseJ4; import org.junit.Before; import org.junit.Test; @@ -72,7 +72,6 @@ public void setUp() throws Exception { AYE_BEE.put("eight-theta", 0); AYE_BEE.put("nine-iota", 0); - CEE = new LinkedHashMap<>(); CEE.put("one", 0); CEE.put("two", 0); @@ -89,33 +88,33 @@ public void setUp() throws Exception { @Test public void testScalability() throws Exception { Map> lotsaSuggestions = new LinkedHashMap<>(); - lotsaSuggestions.put(TOKEN_AYE , AYE); - lotsaSuggestions.put(TOKEN_BEE , BEE); - lotsaSuggestions.put(TOKEN_CEE , CEE); - - lotsaSuggestions.put(new Token("AYE1", 0, 3), AYE); - lotsaSuggestions.put(new Token("BEE1", 4, 7), BEE); + lotsaSuggestions.put(TOKEN_AYE, AYE); + lotsaSuggestions.put(TOKEN_BEE, BEE); + lotsaSuggestions.put(TOKEN_CEE, CEE); + + lotsaSuggestions.put(new Token("AYE1", 0, 3), AYE); + lotsaSuggestions.put(new Token("BEE1", 4, 7), BEE); lotsaSuggestions.put(new Token("CEE1", 8, 11), CEE); - - lotsaSuggestions.put(new Token("AYE2", 0, 3), AYE); - lotsaSuggestions.put(new Token("BEE2", 4, 7), BEE); + + lotsaSuggestions.put(new Token("AYE2", 0, 3), AYE); + lotsaSuggestions.put(new Token("BEE2", 4, 7), BEE); lotsaSuggestions.put(new Token("CEE2", 8, 11), CEE); - - lotsaSuggestions.put(new Token("AYE3", 0, 3), AYE); - lotsaSuggestions.put(new Token("BEE3", 4, 7), BEE); + + lotsaSuggestions.put(new Token("AYE3", 0, 3), AYE); + lotsaSuggestions.put(new Token("BEE3", 4, 7), BEE); lotsaSuggestions.put(new Token("CEE3", 8, 11), CEE); - - lotsaSuggestions.put(new Token("AYE4", 0, 3), AYE); - lotsaSuggestions.put(new Token("BEE4", 4, 7), BEE); + + lotsaSuggestions.put(new Token("AYE4", 0, 3), AYE); + lotsaSuggestions.put(new Token("BEE4", 4, 7), BEE); lotsaSuggestions.put(new Token("CEE4", 8, 11), CEE); - + PossibilityIterator iter = new PossibilityIterator(lotsaSuggestions, 1000, 10000, false); int count = 0; while (iter.hasNext()) { PossibilityIterator.RankedSpellPossibility rsp = iter.next(); count++; } - assertTrue(count==1000); + assertTrue(count == 1000); lotsaSuggestions.put(new Token("AYE_BEE1", 0, 7), AYE_BEE); lotsaSuggestions.put(new Token("AYE_BEE2", 0, 7), AYE_BEE); @@ -123,33 +122,35 @@ public void testScalability() throws Exception { lotsaSuggestions.put(new Token("AYE_BEE4", 0, 7), AYE_BEE); iter = new PossibilityIterator(lotsaSuggestions, 1000, 10000, true); count = 0; - while (iter.hasNext()) { + while (iter.hasNext()) { PossibilityIterator.RankedSpellPossibility rsp = iter.next(); count++; } - assertTrue(count<100); + assertTrue(count < 100); } @Test public void testSpellPossibilityIterator() throws Exception { Map> suggestions = new LinkedHashMap<>(); - suggestions.put(TOKEN_AYE , AYE); - suggestions.put(TOKEN_BEE , BEE); - suggestions.put(TOKEN_CEE , CEE); - + suggestions.put(TOKEN_AYE, AYE); + suggestions.put(TOKEN_BEE, BEE); + suggestions.put(TOKEN_CEE, CEE); + PossibilityIterator iter = new PossibilityIterator(suggestions, 1000, 10000, false); int count = 0; while (iter.hasNext()) { PossibilityIterator.RankedSpellPossibility rsp = iter.next(); - if(count==0) { + if (count == 0) { assertTrue("I".equals(rsp.corrections.get(0).getCorrection())); assertTrue("alpha".equals(rsp.corrections.get(1).getCorrection())); assertTrue("one".equals(rsp.corrections.get(2).getCorrection())); } count++; } - assertTrue(("Three maps (8*9*10) should return 720 iterations but instead returned " + count), count == 720); + assertTrue( + ("Three maps (8*9*10) should return 720 iterations but instead returned " + count), + count == 720); suggestions.remove(TOKEN_CEE); iter = new PossibilityIterator(suggestions, 100, 10000, false); @@ -158,7 +159,8 @@ public void testSpellPossibilityIterator() throws Exception { iter.next(); count++; } - assertTrue(("Two maps (8*9) should return 72 iterations but instead returned " + count), count == 72); + assertTrue( + ("Two maps (8*9) should return 72 iterations but instead returned " + count), count == 72); suggestions.remove(TOKEN_BEE); iter = new PossibilityIterator(suggestions, 5, 10000, false); @@ -177,7 +179,6 @@ public void testSpellPossibilityIterator() throws Exception { count++; } assertTrue(("No maps should return 0 iterations but instead returned " + count), count == 0); - } @Test @@ -187,8 +188,9 @@ public void testOverlappingTokens() throws Exception { overlappingSuggestions.put(TOKEN_BEE, BEE); overlappingSuggestions.put(TOKEN_AYE_BEE, AYE_BEE); overlappingSuggestions.put(TOKEN_CEE, CEE); - - PossibilityIterator iter = new PossibilityIterator(overlappingSuggestions, Integer.MAX_VALUE, Integer.MAX_VALUE, true); + + PossibilityIterator iter = + new PossibilityIterator(overlappingSuggestions, Integer.MAX_VALUE, Integer.MAX_VALUE, true); int aCount = 0; int abCount = 0; Set dupChecker = new HashSet<>(); @@ -198,28 +200,28 @@ public void testOverlappingTokens() throws Exception { Token b = null; Token ab = null; Token c = null; - for(SpellCheckCorrection scc : rsp.corrections) { - if(scc.getOriginal().equals(TOKEN_AYE)) { + for (SpellCheckCorrection scc : rsp.corrections) { + if (scc.getOriginal().equals(TOKEN_AYE)) { a = scc.getOriginal(); - } else if(scc.getOriginal().equals(TOKEN_BEE)) { + } else if (scc.getOriginal().equals(TOKEN_BEE)) { b = scc.getOriginal(); - } else if(scc.getOriginal().equals(TOKEN_AYE_BEE)) { + } else if (scc.getOriginal().equals(TOKEN_AYE_BEE)) { ab = scc.getOriginal(); - } else if(scc.getOriginal().equals(TOKEN_CEE)) { + } else if (scc.getOriginal().equals(TOKEN_CEE)) { c = scc.getOriginal(); - } - if(ab!=null) { + } + if (ab != null) { abCount++; } else { aCount++; - } + } } assertTrue(c != null); - assertTrue(ab != null || (a!=null && b!=null)); - assertTrue(ab == null || (a==null && b==null)); + assertTrue(ab != null || (a != null && b != null)); + assertTrue(ab == null || (a == null && b == null)); assertTrue(dupChecker.add(rsp)); } - assertTrue(aCount==2160); - assertTrue(abCount==180); + assertTrue(aCount == 2160); + assertTrue(abCount == 180); } } diff --git a/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java b/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java index 2c9ae4cf5e3..fc379faae88 100644 --- a/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/SpellingQueryConverterTest.java @@ -19,17 +19,14 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; - import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.solr.SolrTestCase; import org.apache.solr.common.util.NamedList; import org.junit.Test; - /** * Test for SpellingQueryConverter * - * * @since solr 1.3 */ public class SpellingQueryConverterTest extends SolrTestCase { @@ -43,23 +40,33 @@ public void test() throws Exception { assertTrue("tokens is null and it shouldn't be", tokens != null); assertTrue("tokens Size: " + tokens.size() + " is not: " + 1, tokens.size() == 1); } - + @Test public void testNumeric() throws Exception { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList<>()); converter.setAnalyzer(new WhitespaceAnalyzer()); - String[] queries = {"12345", "foo:12345", "12345 67890", "foo:(12345 67890)", "foo:(life 67890)", "12345 life", - "+12345 +life", "-12345 life"}; + String[] queries = { + "12345", + "foo:12345", + "12345 67890", + "foo:(12345 67890)", + "foo:(life 67890)", + "12345 life", + "+12345 +life", + "-12345 life" + }; int[] tokensToExpect = {1, 1, 2, 2, 2, 2, 2, 2}; for (int i = 0; i < queries.length; i++) { Collection tokens = converter.convert(queries[i]); - assertTrue("tokens Size: " + tokens.size() + " is not: " + tokensToExpect[i], tokens.size() == tokensToExpect[i]); + assertTrue( + "tokens Size: " + tokens.size() + " is not: " + tokensToExpect[i], + tokens.size() == tokensToExpect[i]); } } - + @Test - public void testSpecialChars() { + public void testSpecialChars() { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList<>()); converter.setAnalyzer(new WhitespaceAnalyzer()); @@ -82,18 +89,18 @@ public void testSpecialChars() { assertTrue("Token offsets do not match", isOffsetCorrect(original, tokens)); // mix 'em up and add some to the value -// original = "field_with-123s:value_,.|with-hyphens"; -// tokens = converter.convert(original); -// assertTrue("tokens is null and it shouldn't be", tokens != null); -// assertEquals("tokens Size: " + tokens.size() + " is not 1", 1, tokens.size()); -// assertTrue("Token offsets do not match", isOffsetCorrect(original, tokens)); + // original = "field_with-123s:value_,.|with-hyphens"; + // tokens = converter.convert(original); + // assertTrue("tokens is null and it shouldn't be", tokens != null); + // assertEquals("tokens Size: " + tokens.size() + " is not 1", 1, tokens.size()); + // assertTrue("Token offsets do not match", isOffsetCorrect(original, tokens)); original = "foo:bar^5.0"; tokens = converter.convert(original); assertTrue("tokens is null and it shouldn't be", tokens != null); assertEquals("tokens Size: " + tokens.size() + " is not 1", 1, tokens.size()); assertTrue("Token offsets do not match", isOffsetCorrect(original, tokens)); - + String firstKeyword = "value1"; String secondKeyword = "value2"; original = "field-with-parenthesis:(" + firstKeyword + " " + secondKeyword + ")"; @@ -101,15 +108,19 @@ public void testSpecialChars() { assertTrue("tokens is null and it shouldn't be", tokens != null); assertEquals("tokens Size: " + tokens.size() + " is not 2", 2, tokens.size()); assertTrue("Token offsets do not match", isOffsetCorrect(original, tokens)); - assertTrue("first Token is not " + firstKeyword, new ArrayList<>(tokens).get(0).toString().equals(firstKeyword)); - assertTrue("second Token is not " + secondKeyword, new ArrayList<>(tokens).get(1).toString().equals(secondKeyword)); + assertTrue( + "first Token is not " + firstKeyword, + new ArrayList<>(tokens).get(0).toString().equals(firstKeyword)); + assertTrue( + "second Token is not " + secondKeyword, + new ArrayList<>(tokens).get(1).toString().equals(secondKeyword)); } private boolean isOffsetCorrect(String s, Collection tokens) { for (Token token : tokens) { int start = token.startOffset(); int end = token.endOffset(); - if (!s.substring(start, end).equals(token.toString())) return false; + if (!s.substring(start, end).equals(token.toString())) return false; } return true; } @@ -119,7 +130,7 @@ public void testUnicode() { SpellingQueryConverter converter = new SpellingQueryConverter(); converter.init(new NamedList<>()); converter.setAnalyzer(new WhitespaceAnalyzer()); - + // chinese text value Collection tokens = converter.convert("text_field:我购买了道具和服装。"); assertTrue("tokens is null and it shouldn't be", tokens != null); @@ -150,7 +161,7 @@ public void testMultipleClauses() { assertTrue("tokens is null and it shouldn't be", tokens != null); assertEquals("tokens Size: " + tokens.size() + " is not 2", 2, tokens.size()); } - + @Test public void testRequiredOrProhibitedFlags() { SpellingQueryConverter converter = new SpellingQueryConverter(); @@ -159,66 +170,108 @@ public void testRequiredOrProhibitedFlags() { { List tokens = new ArrayList<>(converter.convert("aaa bbb ccc")); - assertTrue("Should have 3 tokens", tokens != null && tokens.size()==3); - assertTrue("token 1 should be optional", !hasRequiredFlag(tokens.get(0)) && !hasProhibitedFlag(tokens.get(0))); - assertTrue("token 2 should be optional", !hasRequiredFlag(tokens.get(1)) && !hasProhibitedFlag(tokens.get(1))); - assertTrue("token 3 should be optional", !hasRequiredFlag(tokens.get(2)) && !hasProhibitedFlag(tokens.get(2))); + assertTrue("Should have 3 tokens", tokens != null && tokens.size() == 3); + assertTrue( + "token 1 should be optional", + !hasRequiredFlag(tokens.get(0)) && !hasProhibitedFlag(tokens.get(0))); + assertTrue( + "token 2 should be optional", + !hasRequiredFlag(tokens.get(1)) && !hasProhibitedFlag(tokens.get(1))); + assertTrue( + "token 3 should be optional", + !hasRequiredFlag(tokens.get(2)) && !hasProhibitedFlag(tokens.get(2))); } { List tokens = new ArrayList<>(converter.convert("+aaa bbb -ccc")); - assertTrue("Should have 3 tokens", tokens != null && tokens.size()==3); - assertTrue("token 1 should be required", hasRequiredFlag(tokens.get(0)) && !hasProhibitedFlag(tokens.get(0))); - assertTrue("token 2 should be optional", !hasRequiredFlag(tokens.get(1)) && !hasProhibitedFlag(tokens.get(1))); - assertTrue("token 3 should be prohibited", !hasRequiredFlag(tokens.get(2)) && hasProhibitedFlag(tokens.get(2))); + assertTrue("Should have 3 tokens", tokens != null && tokens.size() == 3); + assertTrue( + "token 1 should be required", + hasRequiredFlag(tokens.get(0)) && !hasProhibitedFlag(tokens.get(0))); + assertTrue( + "token 2 should be optional", + !hasRequiredFlag(tokens.get(1)) && !hasProhibitedFlag(tokens.get(1))); + assertTrue( + "token 3 should be prohibited", + !hasRequiredFlag(tokens.get(2)) && hasProhibitedFlag(tokens.get(2))); } { List tokens = new ArrayList<>(converter.convert("aaa AND bbb ccc")); - assertTrue("Should have 3 tokens", tokens != null && tokens.size()==3); - assertTrue("token 1 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 2 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 3 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); + assertTrue("Should have 3 tokens", tokens != null && tokens.size() == 3); + assertTrue( + "token 1 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 2 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 3 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); } { List tokens = new ArrayList<>(converter.convert("aaa OR bbb OR ccc")); - assertTrue("Should have 3 tokens", tokens != null && tokens.size()==3); - assertTrue("token 1 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 2 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 3 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); + assertTrue("Should have 3 tokens", tokens != null && tokens.size() == 3); + assertTrue( + "token 1 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 2 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 3 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); } { List tokens = new ArrayList<>(converter.convert("aaa AND bbb NOT ccc")); - assertTrue("Should have 3 tokens", tokens != null && tokens.size()==3); - assertTrue("token 1 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 2 precedes n.b.o.", hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 3 doesn't precede n.b.o.", !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); + assertTrue("Should have 3 tokens", tokens != null && tokens.size() == 3); + assertTrue( + "token 1 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 2 precedes n.b.o.", hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 3 doesn't precede n.b.o.", + !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); } { List tokens = new ArrayList<>(converter.convert("aaa NOT bbb AND ccc")); - assertTrue("Should have 3 tokens", tokens != null && tokens.size()==3); - assertTrue("token 1 precedes n.b.o.", hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 2 precedes n.b.o.", hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 3 doesn't precedes n.b.o.", !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); + assertTrue("Should have 3 tokens", tokens != null && tokens.size() == 3); + assertTrue( + "token 1 precedes n.b.o.", hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 2 precedes n.b.o.", hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 3 doesn't precedes n.b.o.", + !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); } { List tokens = new ArrayList<>(converter.convert("aaa AND NOT bbb AND ccc")); - assertTrue("Should have 3 tokens", tokens != null && tokens.size()==3); - assertTrue("token 1 precedes n.b.o.", hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 2 precedes n.b.o.", hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); - assertTrue("token 3 doesn't precedes n.b.o.", !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); + assertTrue("Should have 3 tokens", tokens != null && tokens.size() == 3); + assertTrue( + "token 1 precedes n.b.o.", hasNBOFlag(tokens.get(0)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 2 precedes n.b.o.", hasNBOFlag(tokens.get(1)) && hasInBooleanFlag(tokens.get(0))); + assertTrue( + "token 3 doesn't precedes n.b.o.", + !hasNBOFlag(tokens.get(2)) && hasInBooleanFlag(tokens.get(0))); } - } - + private boolean hasRequiredFlag(Token t) { return (t.getFlags() & QueryConverter.REQUIRED_TERM_FLAG) == QueryConverter.REQUIRED_TERM_FLAG; } + private boolean hasProhibitedFlag(Token t) { - return (t.getFlags() & QueryConverter.PROHIBITED_TERM_FLAG) == QueryConverter.PROHIBITED_TERM_FLAG; + return (t.getFlags() & QueryConverter.PROHIBITED_TERM_FLAG) + == QueryConverter.PROHIBITED_TERM_FLAG; } + private boolean hasNBOFlag(Token t) { - return (t.getFlags() & QueryConverter.TERM_PRECEDES_NEW_BOOLEAN_OPERATOR_FLAG) == QueryConverter.TERM_PRECEDES_NEW_BOOLEAN_OPERATOR_FLAG; + return (t.getFlags() & QueryConverter.TERM_PRECEDES_NEW_BOOLEAN_OPERATOR_FLAG) + == QueryConverter.TERM_PRECEDES_NEW_BOOLEAN_OPERATOR_FLAG; } + private boolean hasInBooleanFlag(Token t) { - return (t.getFlags() & QueryConverter.TERM_IN_BOOLEAN_QUERY_FLAG) == QueryConverter.TERM_IN_BOOLEAN_QUERY_FLAG; + return (t.getFlags() & QueryConverter.TERM_IN_BOOLEAN_QUERY_FLAG) + == QueryConverter.TERM_IN_BOOLEAN_QUERY_FLAG; } } diff --git a/solr/core/src/test/org/apache/solr/spelling/TestSuggestSpellingConverter.java b/solr/core/src/test/org/apache/solr/spelling/TestSuggestSpellingConverter.java index 0e4a0115efb..649f7a7a43b 100644 --- a/solr/core/src/test/org/apache/solr/spelling/TestSuggestSpellingConverter.java +++ b/solr/core/src/test/org/apache/solr/spelling/TestSuggestSpellingConverter.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.util.Collection; import java.util.regex.Pattern; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.LowerCaseFilter; @@ -33,34 +32,40 @@ public class TestSuggestSpellingConverter extends BaseTokenStreamTestCase { SuggestQueryConverter converter = new SuggestQueryConverter(); - + public void testSimple() throws Exception { // lowercases only! converter.setAnalyzer(new MockAnalyzer(random(), MockTokenizer.KEYWORD, true)); - assertConvertsTo("This is a test", new String[] { "this is a test" }); + assertConvertsTo("This is a test", new String[] {"this is a test"}); } - + public void testComplicated() throws Exception { // lowercases, removes field names, other syntax, collapses runs of whitespace, etc. - converter.setAnalyzer(new Analyzer() { - @Override - protected TokenStreamComponents createComponents(String fieldName) { - Tokenizer tokenizer = new KeywordTokenizer(); - TokenStream filter = new PatternReplaceFilter(tokenizer, - Pattern.compile("([^\\p{L}\\p{M}\\p{N}\\p{Cs}]*[\\p{L}\\p{M}\\p{N}\\p{Cs}\\_]+:)|([^\\p{L}\\p{M}\\p{N}\\p{Cs}])+"), " ", true); - filter = new LowerCaseFilter(filter); - filter = new TrimFilter(filter); - return new TokenStreamComponents(tokenizer, filter); - } - }); - assertConvertsTo("test1 +test2", new String[] { "test1 test2" }); - assertConvertsTo("test~", new String[] { "test" }); - assertConvertsTo("field:test", new String[] { "test" }); - assertConvertsTo("This is a test", new String[] { "this is a test" }); - assertConvertsTo(" This is a test", new String[] { "this is a test" }); - assertConvertsTo("Foo (field:bar) text_hi:हिन्दी ", new String[] { "foo bar हिन्दी" }); + converter.setAnalyzer( + new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new KeywordTokenizer(); + TokenStream filter = + new PatternReplaceFilter( + tokenizer, + Pattern.compile( + "([^\\p{L}\\p{M}\\p{N}\\p{Cs}]*[\\p{L}\\p{M}\\p{N}\\p{Cs}\\_]+:)|([^\\p{L}\\p{M}\\p{N}\\p{Cs}])+"), + " ", + true); + filter = new LowerCaseFilter(filter); + filter = new TrimFilter(filter); + return new TokenStreamComponents(tokenizer, filter); + } + }); + assertConvertsTo("test1 +test2", new String[] {"test1 test2"}); + assertConvertsTo("test~", new String[] {"test"}); + assertConvertsTo("field:test", new String[] {"test"}); + assertConvertsTo("This is a test", new String[] {"this is a test"}); + assertConvertsTo(" This is a test", new String[] {"this is a test"}); + assertConvertsTo("Foo (field:bar) text_hi:हिन्दी ", new String[] {"foo bar हिन्दी"}); } - + public void assertConvertsTo(String text, String expected[]) throws IOException { Collection tokens = converter.convert(text); assertEquals(tokens.size(), expected.length); diff --git a/solr/core/src/test/org/apache/solr/spelling/WordBreakSolrSpellCheckerTest.java b/solr/core/src/test/org/apache/solr/spelling/WordBreakSolrSpellCheckerTest.java index ea40f326c95..7ae48c98021 100644 --- a/solr/core/src/test/org/apache/solr/spelling/WordBreakSolrSpellCheckerTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/WordBreakSolrSpellCheckerTest.java @@ -19,7 +19,6 @@ import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.util.LuceneTestCase.SuppressTempFileChecks; import org.apache.solr.SolrTestCaseJ4; @@ -31,13 +30,15 @@ import org.junit.BeforeClass; import org.junit.Test; -@SuppressTempFileChecks(bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") +@SuppressTempFileChecks( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-1877 Spellcheck IndexReader leak bug?") public class WordBreakSolrSpellCheckerTest extends SolrTestCaseJ4 { - + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-spellcheckcomponent.xml","schema.xml"); - assertNull(h.validateUpdate(adoc("id", "0", "lowerfilt", "pain table paintablepine pi ne in able"))); + initCore("solrconfig-spellcheckcomponent.xml", "schema.xml"); + assertNull( + h.validateUpdate(adoc("id", "0", "lowerfilt", "pain table paintablepine pi ne in able"))); assertNull(h.validateUpdate(adoc("id", "1", "lowerfilt", "paint able pineapple goodness in"))); assertNull(h.validateUpdate(adoc("id", "2", "lowerfilt", "pa in table pineapplegoodness"))); assertNull(h.validateUpdate(adoc("id", "3", "lowerfilt", "printable line in ample food mess"))); @@ -45,15 +46,15 @@ public static void beforeClass() throws Exception { assertNull(h.validateUpdate(adoc("id", "5", "lowerfilt", "printable in puntable paint able "))); assertNull(h.validateUpdate(adoc("id", "6", "lowerfilt", "paint able in pintable plantable"))); assertNull(h.validateUpdate(adoc("id", "7", "lowerfilt", "zxcvqwtp fg hj"))); - assertNull(h.validateUpdate(commit())); - //docfreq=7: in - //docfreq=5: able - //docfreq=4: paint - //docfreq=3: printable - //docfreq=2: table - //docfreq=1: {all others} + assertNull(h.validateUpdate(commit())); + // docfreq=7: in + // docfreq=5: able + // docfreq=4: paint + // docfreq=3: printable + // docfreq=2: table + // docfreq=1: {all others} } - + @Test public void testStandAlone() throws Exception { SolrCore core = h.getCore(); @@ -65,106 +66,115 @@ public void testStandAlone() throws Exception { params.add(WordBreakSolrSpellChecker.PARAM_MAX_CHANGES, "10"); checker.init(params, core); - //TODO can we use core.withSearcher ? refcounting here is confusing; not sure if intentional + // TODO can we use core.withSearcher ? refcounting here is confusing; not sure if intentional RefCounted searcher = core.getSearcher(); QueryConverter qc = new SpellingQueryConverter(); qc.setAnalyzer(new MockAnalyzer(random())); - + { - //Prior to SOLR-8175, the required term would cause an AIOOBE. + // Prior to SOLR-8175, the required term would cause an AIOOBE. Collection tokens = qc.convert("+pine apple good ness"); SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.get().getIndexReader(), 10); SpellingResult result = checker.getSuggestions(spellOpts); - searcher.decref(); + searcher.decref(); assertTrue(result != null && result.getSuggestions() != null); - assertTrue(result.getSuggestions().size()==5); + assertTrue(result.getSuggestions().size() == 5); } - + Collection tokens = qc.convert("paintable pine apple good ness"); SpellingOptions spellOpts = new SpellingOptions(tokens, searcher.get().getIndexReader(), 10); SpellingResult result = checker.getSuggestions(spellOpts); searcher.decref(); - + assertTrue(result != null && result.getSuggestions() != null); - assertTrue(result.getSuggestions().size()==9); - - for(Map.Entry> s : result.getSuggestions().entrySet()) { + assertTrue(result.getSuggestions().size() == 9); + + for (Map.Entry> s : result.getSuggestions().entrySet()) { Token orig = s.getKey(); String[] corr = s.getValue().keySet().toArray(new String[0]); - if(orig.toString().equals("paintable")) { - assertTrue(orig.startOffset()==0); - assertTrue(orig.endOffset()==9); - assertTrue(orig.length()==9); - assertTrue(corr.length==3); - assertTrue(corr[0].equals("paint able")); //1 op ; max doc freq=5 - assertTrue(corr[1].equals("pain table")); //1 op ; max doc freq=2 - assertTrue(corr[2].equals("pa in table")); //2 ops - } else if(orig.toString().equals("pine apple")) { - assertTrue(orig.startOffset()==10); - assertTrue(orig.endOffset()==20); - assertTrue(orig.length()==10); - assertTrue(corr.length==1); + if (orig.toString().equals("paintable")) { + assertTrue(orig.startOffset() == 0); + assertTrue(orig.endOffset() == 9); + assertTrue(orig.length() == 9); + assertTrue(corr.length == 3); + assertTrue(corr[0].equals("paint able")); // 1 op ; max doc freq=5 + assertTrue(corr[1].equals("pain table")); // 1 op ; max doc freq=2 + assertTrue(corr[2].equals("pa in table")); // 2 ops + } else if (orig.toString().equals("pine apple")) { + assertTrue(orig.startOffset() == 10); + assertTrue(orig.endOffset() == 20); + assertTrue(orig.length() == 10); + assertTrue(corr.length == 1); assertTrue(corr[0].equals("pineapple")); - } else if(orig.toString().equals("paintable pine")) { - assertTrue(orig.startOffset()==0); - assertTrue(orig.endOffset()==14); - assertTrue(orig.length()==14); - assertTrue(corr.length==1); + } else if (orig.toString().equals("paintable pine")) { + assertTrue(orig.startOffset() == 0); + assertTrue(orig.endOffset() == 14); + assertTrue(orig.length() == 14); + assertTrue(corr.length == 1); assertTrue(corr[0].equals("paintablepine")); - } else if(orig.toString().equals("good ness")) { - assertTrue(orig.startOffset()==21); - assertTrue(orig.endOffset()==30); - assertTrue(orig.length()==9); - assertTrue(corr.length==1); + } else if (orig.toString().equals("good ness")) { + assertTrue(orig.startOffset() == 21); + assertTrue(orig.endOffset() == 30); + assertTrue(orig.length() == 9); + assertTrue(corr.length == 1); assertTrue(corr[0].equals("goodness")); - } else if(orig.toString().equals("pine apple good ness")) { - assertTrue(orig.startOffset()==10); - assertTrue(orig.endOffset()==30); - assertTrue(orig.length()==20); - assertTrue(corr.length==1); + } else if (orig.toString().equals("pine apple good ness")) { + assertTrue(orig.startOffset() == 10); + assertTrue(orig.endOffset() == 30); + assertTrue(orig.length() == 20); + assertTrue(corr.length == 1); assertTrue(corr[0].equals("pineapplegoodness")); - } else if(orig.toString().equals("pine")) { - assertTrue(orig.startOffset()==10); - assertTrue(orig.endOffset()==14); - assertTrue(orig.length()==4); - assertTrue(corr.length==1); + } else if (orig.toString().equals("pine")) { + assertTrue(orig.startOffset() == 10); + assertTrue(orig.endOffset() == 14); + assertTrue(orig.length() == 4); + assertTrue(corr.length == 1); assertTrue(corr[0].equals("pi ne")); - } else if(orig.toString().equals("pine")) { - assertTrue(orig.startOffset()==10); - assertTrue(orig.endOffset()==14); - assertTrue(orig.length()==4); - assertTrue(corr.length==1); + } else if (orig.toString().equals("pine")) { + assertTrue(orig.startOffset() == 10); + assertTrue(orig.endOffset() == 14); + assertTrue(orig.length() == 4); + assertTrue(corr.length == 1); assertTrue(corr[0].equals("pi ne")); - } else if(orig.toString().equals("apple")) { - assertTrue(orig.startOffset()==15); - assertTrue(orig.endOffset()==20); - assertTrue(orig.length()==5); - assertTrue(corr.length==0); - } else if(orig.toString().equals("good")) { - assertTrue(orig.startOffset()==21); - assertTrue(orig.endOffset()==25); - assertTrue(orig.length()==4); - assertTrue(corr.length==0); - } else if(orig.toString().equals("ness")) { - assertTrue(orig.startOffset()==26); - assertTrue(orig.endOffset()==30); - assertTrue(orig.length()==4); - assertTrue(corr.length==0); - }else { + } else if (orig.toString().equals("apple")) { + assertTrue(orig.startOffset() == 15); + assertTrue(orig.endOffset() == 20); + assertTrue(orig.length() == 5); + assertTrue(corr.length == 0); + } else if (orig.toString().equals("good")) { + assertTrue(orig.startOffset() == 21); + assertTrue(orig.endOffset() == 25); + assertTrue(orig.length() == 4); + assertTrue(corr.length == 0); + } else if (orig.toString().equals("ness")) { + assertTrue(orig.startOffset() == 26); + assertTrue(orig.endOffset() == 30); + assertTrue(orig.length() == 4); + assertTrue(corr.length == 0); + } else { fail("Unexpected original result: " + orig); - } - } + } + } } + @Test public void testInConjunction() throws Exception { - assertQ(req( - "q", "lowerfilt:(paintable pine apple good ness)", - "qt", "/spellCheckWithWordbreak", - "indent", "true", - SpellCheckComponent.SPELLCHECK_BUILD, "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, ".75", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true"), + assertQ( + req( + "q", + "lowerfilt:(paintable pine apple good ness)", + "qt", + "/spellCheckWithWordbreak", + "indent", + "true", + SpellCheckComponent.SPELLCHECK_BUILD, + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + ".75", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true"), "//lst[@name='suggestions']/lst[1]/@name='paintable'", "//lst[@name='suggestions']/lst[2]/@name='pine'", "//lst[@name='suggestions']/lst[3]/@name='apple'", @@ -174,22 +184,22 @@ public void testInConjunction() throws Exception { "//lst[@name='paintable']/int[@name='startOffset']=11", "//lst[@name='paintable']/int[@name='endOffset']=20", "//lst[@name='paintable']/int[@name='origFreq']=0", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[1]/str[@name='word']='printable'", //SolrSpellChecker result interleaved - "//lst[@name='paintable']/arr[@name='suggestion']/lst[1]/int[@name='freq']=3", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[2]/str[@name='word']='paint able'", //1 op - "//lst[@name='paintable']/arr[@name='suggestion']/lst[2]/int[@name='freq']=5", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[3]/str[@name='word']='pintable'", //SolrSpellChecker result interleaved - "//lst[@name='paintable']/arr[@name='suggestion']/lst[3]/int[@name='freq']=1", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[4]/str[@name='word']='pain table'", //1 op - "//lst[@name='paintable']/arr[@name='suggestion']/lst[4]/int[@name='freq']=2", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[5]/str[@name='word']='pointable'", //SolrSpellChecker result interleaved - "//lst[@name='paintable']/arr[@name='suggestion']/lst[5]/int[@name='freq']=1", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[6]/str[@name='word']='pa in table'", //2 ops + "//lst[@name='paintable']/arr[@name='suggestion']/lst[1]/str[@name='word']='printable'", // SolrSpellChecker result interleaved + "//lst[@name='paintable']/arr[@name='suggestion']/lst[1]/int[@name='freq']=3", + "//lst[@name='paintable']/arr[@name='suggestion']/lst[2]/str[@name='word']='paint able'", // 1 op + "//lst[@name='paintable']/arr[@name='suggestion']/lst[2]/int[@name='freq']=5", + "//lst[@name='paintable']/arr[@name='suggestion']/lst[3]/str[@name='word']='pintable'", // SolrSpellChecker result interleaved + "//lst[@name='paintable']/arr[@name='suggestion']/lst[3]/int[@name='freq']=1", + "//lst[@name='paintable']/arr[@name='suggestion']/lst[4]/str[@name='word']='pain table'", // 1 op + "//lst[@name='paintable']/arr[@name='suggestion']/lst[4]/int[@name='freq']=2", + "//lst[@name='paintable']/arr[@name='suggestion']/lst[5]/str[@name='word']='pointable'", // SolrSpellChecker result interleaved + "//lst[@name='paintable']/arr[@name='suggestion']/lst[5]/int[@name='freq']=1", + "//lst[@name='paintable']/arr[@name='suggestion']/lst[6]/str[@name='word']='pa in table'", // 2 ops "//lst[@name='paintable']/arr[@name='suggestion']/lst[6]/int[@name='freq']=7", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[7]/str[@name='word']='plantable'", //SolrSpellChecker result interleaved - "//lst[@name='paintable']/arr[@name='suggestion']/lst[7]/int[@name='freq']=1", - "//lst[@name='paintable']/arr[@name='suggestion']/lst[8]/str[@name='word']='puntable'", //SolrSpellChecker result interleaved - "//lst[@name='paintable']/arr[@name='suggestion']/lst[8]/int[@name='freq']=1", + "//lst[@name='paintable']/arr[@name='suggestion']/lst[7]/str[@name='word']='plantable'", // SolrSpellChecker result interleaved + "//lst[@name='paintable']/arr[@name='suggestion']/lst[7]/int[@name='freq']=1", + "//lst[@name='paintable']/arr[@name='suggestion']/lst[8]/str[@name='word']='puntable'", // SolrSpellChecker result interleaved + "//lst[@name='paintable']/arr[@name='suggestion']/lst[8]/int[@name='freq']=1", "//lst[@name='pine']/int[@name='numFound']=2", "//lst[@name='pine']/int[@name='startOffset']=21", "//lst[@name='pine']/int[@name='endOffset']=25", @@ -216,22 +226,33 @@ public void testInConjunction() throws Exception { "//lst[@name='pine apple good ness']/int[@name='numFound']=1", "//lst[@name='pine apple good ness']/int[@name='startOffset']=21", "//lst[@name='pine apple good ness']/int[@name='endOffset']=41", - "//lst[@name='pine apple good ness']/arr[@name='suggestion']/lst[1]/str[@name='word']='pineapplegoodness'" - ); + "//lst[@name='pine apple good ness']/arr[@name='suggestion']/lst[1]/str[@name='word']='pineapplegoodness'"); } + @Test public void testCollate() throws Exception { - assertQ(req( - "q", "lowerfilt:(paintable pine apple godness)", - "qt", "/spellCheckWithWordbreak", - "indent", "true", - SpellCheckComponent.SPELLCHECK_BUILD, "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, ".75", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "10"), + assertQ( + req( + "q", + "lowerfilt:(paintable pine apple godness)", + "qt", + "/spellCheckWithWordbreak", + "indent", + "true", + SpellCheckComponent.SPELLCHECK_BUILD, + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + ".75", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "10"), "//lst[@name='collation'][1 ]/str[@name='collationQuery']='lowerfilt:(printable line ample goodness)'", "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(paintablepine ample goodness)'", "//lst[@name='collation'][3 ]/str[@name='collationQuery']='lowerfilt:(printable pineapple goodness)'", @@ -245,87 +266,143 @@ public void testCollate() throws Exception { "//lst[@name='collation'][10]/lst[@name='misspellingsAndCorrections']/str[@name='paintable']='pintable'", "//lst[@name='collation'][10]/lst[@name='misspellingsAndCorrections']/str[@name='pine']='pi ne'", "//lst[@name='collation'][10]/lst[@name='misspellingsAndCorrections']/str[@name='apple']='ample'", - "//lst[@name='collation'][10]/lst[@name='misspellingsAndCorrections']/str[@name='godness']='goodness'" - ); - assertQ(req( - "q", "lowerfilt:(pine AND apple)", - "qt", "/spellCheckWithWordbreak", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, ".75", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "10"), + "//lst[@name='collation'][10]/lst[@name='misspellingsAndCorrections']/str[@name='godness']='goodness'"); + assertQ( + req( + "q", + "lowerfilt:(pine AND apple)", + "qt", + "/spellCheckWithWordbreak", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + ".75", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "10"), "//lst[@name='collation'][1 ]/str[@name='collationQuery']='lowerfilt:(line AND ample)'", "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(pineapple)'", - "//lst[@name='collation'][3 ]/str[@name='collationQuery']='lowerfilt:((pi AND ne) AND ample)'" - ); - assertQ(req( - "q", "lowerfilt:pine AND NOT lowerfilt:apple", - "qt", "/spellCheckWithWordbreak", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, ".75", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "10"), + "//lst[@name='collation'][3 ]/str[@name='collationQuery']='lowerfilt:((pi AND ne) AND ample)'"); + assertQ( + req( + "q", + "lowerfilt:pine AND NOT lowerfilt:apple", + "qt", + "/spellCheckWithWordbreak", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + ".75", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "10"), "//lst[@name='collation'][1 ]/str[@name='collationQuery']='lowerfilt:line AND NOT lowerfilt:ample'", - "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(pi AND ne) AND NOT lowerfilt:ample'" - ); - assertQ(req( - "q", "lowerfilt:pine NOT lowerfilt:apple", - "qt", "/spellCheckWithWordbreak", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, ".75", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "10"), + "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(pi AND ne) AND NOT lowerfilt:ample'"); + assertQ( + req( + "q", + "lowerfilt:pine NOT lowerfilt:apple", + "qt", + "/spellCheckWithWordbreak", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + ".75", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "10"), "//lst[@name='collation'][1 ]/str[@name='collationQuery']='lowerfilt:line NOT lowerfilt:ample'", - "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(pi AND ne) NOT lowerfilt:ample'" - ); - assertQ(req( - "q", "lowerfilt:(+pine -apple)", - "qt", "/spellCheckWithWordbreak", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, ".75", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "10"), + "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(pi AND ne) NOT lowerfilt:ample'"); + assertQ( + req( + "q", + "lowerfilt:(+pine -apple)", + "qt", + "/spellCheckWithWordbreak", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + ".75", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "10"), "//lst[@name='collation'][1 ]/str[@name='collationQuery']='lowerfilt:(+line -ample)'", - "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(+pi +ne -ample)'" - ); - assertQ(req( - "q", "lowerfilt:(+printableinpuntableplantable)", - "qt", "/spellCheckWithWordbreak", - "indent", "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, "1", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "1"), - "//lst[@name='collation'][1 ]/str[@name='collationQuery']='lowerfilt:(+printable +in +puntable +plantable)'" - ); - assertQ(req( - "q", "zxcv AND qwtp AND fghj", - "qt", "/spellCheckWithWordbreak", - "defType", "edismax", - "qf", "lowerfilt", - "indent", "true", - SpellCheckComponent.SPELLCHECK_BUILD, "true", - SpellCheckComponent.COMPONENT_NAME, "true", - SpellCheckComponent.SPELLCHECK_ACCURACY, ".75", - SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_COLLATE, "true", - SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, "true", - SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, "10"), - "//lst[@name='collation'][1 ]/str[@name='collationQuery']='zxcvqwtp AND (fg AND hj)'" - ); + "//lst[@name='collation'][2 ]/str[@name='collationQuery']='lowerfilt:(+pi +ne -ample)'"); + assertQ( + req( + "q", + "lowerfilt:(+printableinpuntableplantable)", + "qt", + "/spellCheckWithWordbreak", + "indent", + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + "1", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "1"), + "//lst[@name='collation'][1 ]/str[@name='collationQuery']='lowerfilt:(+printable +in +puntable +plantable)'"); + assertQ( + req( + "q", + "zxcv AND qwtp AND fghj", + "qt", + "/spellCheckWithWordbreak", + "defType", + "edismax", + "qf", + "lowerfilt", + "indent", + "true", + SpellCheckComponent.SPELLCHECK_BUILD, + "true", + SpellCheckComponent.COMPONENT_NAME, + "true", + SpellCheckComponent.SPELLCHECK_ACCURACY, + ".75", + SpellCheckComponent.SPELLCHECK_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE, + "true", + SpellCheckComponent.SPELLCHECK_COLLATE_EXTENDED_RESULTS, + "true", + SpellCheckComponent.SPELLCHECK_MAX_COLLATIONS, + "10"), + "//lst[@name='collation'][1 ]/str[@name='collationQuery']='zxcvqwtp AND (fg AND hj)'"); } } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java b/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java index f692d42b6e2..06195e855ea 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/RandomTestDictionaryFactory.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; - import org.apache.lucene.search.spell.Dictionary; import org.apache.lucene.search.suggest.InputIterator; import org.apache.lucene.util.BytesRef; @@ -33,10 +32,10 @@ import org.slf4j.LoggerFactory; /** - * Factory for a dictionary with an iterator over bounded-length random strings (with fixed - * weight of 1 and null payloads) that only operates when RandomDictionary.enabledSysProp - * is set; this will be true from the time a RandomDictionary has been constructed until - * its enabledSysProp has been cleared. + * Factory for a dictionary with an iterator over bounded-length random strings (with fixed weight + * of 1 and null payloads) that only operates when RandomDictionary.enabledSysProp is set; this will + * be true from the time a RandomDictionary has been constructed until its enabledSysProp has been + * cleared. */ public class RandomTestDictionaryFactory extends DictionaryFactory { public static final String RAND_DICT_MAX_ITEMS = "randDictMaxItems"; @@ -46,11 +45,11 @@ public class RandomTestDictionaryFactory extends DictionaryFactory { @Override public RandomTestDictionary create(SolrCore core, SolrIndexSearcher searcher) { - if(params == null) { + if (params == null) { // should not happen; implies setParams was not called throw new IllegalStateException("Value of params not set"); } - String name = (String)params.get(CommonParams.NAME); + String name = (String) params.get(CommonParams.NAME); if (name == null) { // Shouldn't happen since this is the component name throw new IllegalArgumentException(CommonParams.NAME + " is a mandatory parameter"); } @@ -63,7 +62,8 @@ public RandomTestDictionary create(SolrCore core, SolrIndexSearcher searcher) { } public static class RandomTestDictionary implements Dictionary { - private static final String SYS_PROP_PREFIX = RandomTestDictionary.class.getName() + ".enabled."; + private static final String SYS_PROP_PREFIX = + RandomTestDictionary.class.getName() + ".enabled."; private final String enabledSysProp; // Clear this property to stop the input iterator private final long maxItems; private long emittedItems = 0L; diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java b/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java index c1d2ccc998c..64d4ad834d8 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/SuggesterTest.java @@ -24,9 +24,7 @@ import org.junit.Test; public class SuggesterTest extends SolrTestCaseJ4 { - /** - * Expected URI at which the given suggester will live. - */ + /** Expected URI at which the given suggester will live. */ protected String requestUri = "/suggest"; // TODO: fix this test to not require FSDirectory @@ -36,9 +34,9 @@ public class SuggesterTest extends SolrTestCaseJ4 { public static void beforeClass() throws Exception { savedFactory = System.getProperty("solr.DirectoryFactory"); System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockFSDirectoryFactory"); - initCore("solrconfig-spellchecker.xml","schema-spellchecker.xml"); + initCore("solrconfig-spellchecker.xml", "schema-spellchecker.xml"); } - + @AfterClass public static void afterClass() { if (savedFactory == null) { @@ -49,29 +47,38 @@ public static void afterClass() { } public static void addDocs() { - assertU(adoc("id", "1", - "text", "acceptable accidentally accommodate acquire" - )); - assertU(adoc("id", "2", - "text", "believe bellwether accommodate acquire" - )); - assertU(adoc("id", "3", - "text", "cemetery changeable conscientious consensus acquire bellwether" - )); + assertU( + adoc( + "id", "1", + "text", "acceptable accidentally accommodate acquire")); + assertU( + adoc( + "id", "2", + "text", "believe bellwether accommodate acquire")); + assertU( + adoc("id", "3", "text", "cemetery changeable conscientious consensus acquire bellwether")); } - + @Test public void testSuggestions() throws Exception { addDocs(); assertU(commit()); // configured to do a rebuild on commit - assertQ(req("qt", requestUri, "q", "ac", SpellingParams.SPELLCHECK_COUNT, "2", SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, "true"), + assertQ( + req( + "qt", + requestUri, + "q", + "ac", + SpellingParams.SPELLCHECK_COUNT, + "2", + SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, + "true"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/int[@name='numFound'][.='2']", "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/arr[@name='suggestion']/str[1][.='acquire']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/arr[@name='suggestion']/str[2][.='accommodate']" - ); + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/arr[@name='suggestion']/str[2][.='accommodate']"); } - + @Test public void testReload() throws Exception { addDocs(); @@ -80,28 +87,52 @@ public void testReload() throws Exception { h.reload(); // wait until the new searcher is registered waitForWarming(); - - assertQ(req("qt", requestUri, "q", "ac", SpellingParams.SPELLCHECK_COUNT, "2", SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, "true"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/int[@name='numFound'][.='2']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/arr[@name='suggestion']/str[1][.='acquire']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/arr[@name='suggestion']/str[2][.='accommodate']" - ); + + assertQ( + req( + "qt", + requestUri, + "q", + "ac", + SpellingParams.SPELLCHECK_COUNT, + "2", + SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, + "true"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/int[@name='numFound'][.='2']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/arr[@name='suggestion']/str[1][.='acquire']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/arr[@name='suggestion']/str[2][.='accommodate']"); } - + @Test public void testRebuild() throws Exception { addDocs(); assertU(commit()); - assertQ(req("qt", requestUri, "q", "ac", SpellingParams.SPELLCHECK_COUNT, "2", SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, "true"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/int[@name='numFound'][.='2']"); - assertU(adoc("id", "4", - "text", "actually" - )); + assertQ( + req( + "qt", + requestUri, + "q", + "ac", + SpellingParams.SPELLCHECK_COUNT, + "2", + SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, + "true"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/int[@name='numFound'][.='2']"); + assertU(adoc("id", "4", "text", "actually")); assertU(commit()); - assertQ(req("qt", requestUri, "q", "ac", SpellingParams.SPELLCHECK_COUNT, "2", SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, "true"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/int[@name='numFound'][.='2']"); + assertQ( + req( + "qt", + requestUri, + "q", + "ac", + SpellingParams.SPELLCHECK_COUNT, + "2", + SpellingParams.SPELLCHECK_ONLY_MORE_POPULAR, + "true"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ac']/int[@name='numFound'][.='2']"); } - + // SOLR-2726 public void testAnalyzer() throws Exception { Suggester suggester = new Suggester(); diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzeInfixSuggestions.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzeInfixSuggestions.java index 321e9ad242f..9f0d3064554 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzeInfixSuggestions.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzeInfixSuggestions.java @@ -20,98 +20,151 @@ import org.apache.solr.common.params.SpellingParams; import org.junit.BeforeClass; -public class TestAnalyzeInfixSuggestions extends SolrTestCaseJ4 { +public class TestAnalyzeInfixSuggestions extends SolrTestCaseJ4 { static final String URI_DEFAULT = "/infix_suggest_analyzing"; static final String URI_SUGGEST_DEFAULT = "/analyzing_infix_suggest"; @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); assertQ(req("qt", URI_DEFAULT, "q", "", SpellingParams.SPELLCHECK_BUILD, "true")); - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "", SuggesterParams.SUGGEST_BUILD_ALL, "true")); + assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "", SuggesterParams.SUGGEST_BUILD_ALL, "true")); } - + public void testSingle() throws Exception { - - assertQ(req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "1"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']" - ); - - assertQ(req("qt", URI_DEFAULT, "q", "high", SpellingParams.SPELLCHECK_COUNT, "1"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='high']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='high']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']" - ); - - /* equivalent SolrSuggester, SuggestComponent tests */ - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "japan", SuggesterParams.SUGGEST_COUNT, "1", SuggesterParams.SUGGEST_DICT, "analyzing_infix_suggest_default"), - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']" - ); - - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "high", SuggesterParams.SUGGEST_COUNT, "1", SuggesterParams.SUGGEST_DICT, "analyzing_infix_suggest_default"), - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='high']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='high']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']" - ); + + assertQ( + req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "1"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='1']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']"); + + assertQ( + req("qt", URI_DEFAULT, "q", "high", SpellingParams.SPELLCHECK_COUNT, "1"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='high']/int[@name='numFound'][.='1']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='high']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']"); + + /* equivalent SolrSuggester, SuggestComponent tests */ + assertQ( + req( + "qt", + URI_SUGGEST_DEFAULT, + "q", + "japan", + SuggesterParams.SUGGEST_COUNT, + "1", + SuggesterParams.SUGGEST_DICT, + "analyzing_infix_suggest_default"), + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/int[@name='numFound'][.='1']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']"); + + assertQ( + req( + "qt", + URI_SUGGEST_DEFAULT, + "q", + "high", + SuggesterParams.SUGGEST_COUNT, + "1", + SuggesterParams.SUGGEST_DICT, + "analyzing_infix_suggest_default"), + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='high']/int[@name='numFound'][.='1']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='high']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']"); } - + public void testMultiple() throws Exception { - - assertQ(req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "2"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='2']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[2][.='Add Japanese Kanji number normalization to Kuromoji']" - ); - assertQ(req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='3']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[2][.='Add Japanese Kanji number normalization to Kuromoji']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[3][.='Add decompose compound Japanese Katakana token capability to Kuromoji']" - ); - assertQ(req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "4"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='3']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[2][.='Add Japanese Kanji number normalization to Kuromoji']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[3][.='Add decompose compound Japanese Katakana token capability to Kuromoji']" - ); - - /* SolrSuggester, SuggestComponent tests: allTermsRequire (true), highlight (true) */ - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "japan", SuggesterParams.SUGGEST_COUNT, "2", SuggesterParams.SUGGEST_DICT, "analyzing_infix_suggest_default"), - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']" - ); - - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "japanese ka", SuggesterParams.SUGGEST_COUNT, "2", SuggesterParams.SUGGEST_DICT, "analyzing_infix_suggest_default"), - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japanese ka']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japanese ka']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japanese ka']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add decompose compound Japanese Katakana token capability to Kuromoji']" - ); - + + assertQ( + req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "2"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='2']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[2][.='Add Japanese Kanji number normalization to Kuromoji']"); + assertQ( + req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='3']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[2][.='Add Japanese Kanji number normalization to Kuromoji']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[3][.='Add decompose compound Japanese Katakana token capability to Kuromoji']"); + assertQ( + req("qt", URI_DEFAULT, "q", "japan", SpellingParams.SPELLCHECK_COUNT, "4"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/int[@name='numFound'][.='3']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[1][.='Japanese Autocomplete and Japanese Highlighter broken']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[2][.='Add Japanese Kanji number normalization to Kuromoji']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='japan']/arr[@name='suggestion']/str[3][.='Add decompose compound Japanese Katakana token capability to Kuromoji']"); + + /* SolrSuggester, SuggestComponent tests: allTermsRequire (true), highlight (true) */ + assertQ( + req( + "qt", + URI_SUGGEST_DEFAULT, + "q", + "japan", + SuggesterParams.SUGGEST_COUNT, + "2", + SuggesterParams.SUGGEST_DICT, + "analyzing_infix_suggest_default"), + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japan']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']"); + + assertQ( + req( + "qt", + URI_SUGGEST_DEFAULT, + "q", + "japanese ka", + SuggesterParams.SUGGEST_COUNT, + "2", + SuggesterParams.SUGGEST_DICT, + "analyzing_infix_suggest_default"), + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japanese ka']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japanese ka']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_default']/lst[@name='japanese ka']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add decompose compound Japanese Katakana token capability to Kuromoji']"); } - + public void testWithoutHighlight() throws Exception { - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "japan", SuggesterParams.SUGGEST_COUNT, "2", SuggesterParams.SUGGEST_DICT, "analyzing_infix_suggest_without_highlight"), - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_without_highlight']/lst[@name='japan']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_without_highlight']/lst[@name='japan']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_without_highlight']/lst[@name='japan']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']" - ); + assertQ( + req( + "qt", + URI_SUGGEST_DEFAULT, + "q", + "japan", + SuggesterParams.SUGGEST_COUNT, + "2", + SuggesterParams.SUGGEST_DICT, + "analyzing_infix_suggest_without_highlight"), + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_without_highlight']/lst[@name='japan']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_without_highlight']/lst[@name='japan']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_without_highlight']/lst[@name='japan']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']"); } - + public void testNotAllTermsRequired() throws Exception { - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "japanese javanese", SuggesterParams.SUGGEST_COUNT, "5", SuggesterParams.SUGGEST_DICT, "analyzing_infix_suggest_not_all_terms_required"), - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/int[@name='numFound'][.='3']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='Add decompose compound Japanese Katakana token capability to Kuromoji']" - ); - - assertQ(req("qt", URI_SUGGEST_DEFAULT, "q", "just number", SuggesterParams.SUGGEST_COUNT, "5", SuggesterParams.SUGGEST_DICT, "analyzing_infix_suggest_not_all_terms_required"), - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='just number']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='just number']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']", - "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='just number']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='This is just another entry!']" - ); - + assertQ( + req( + "qt", + URI_SUGGEST_DEFAULT, + "q", + "japanese javanese", + SuggesterParams.SUGGEST_COUNT, + "5", + SuggesterParams.SUGGEST_DICT, + "analyzing_infix_suggest_not_all_terms_required"), + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/int[@name='numFound'][.='3']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Japanese Autocomplete and Japanese Highlighter broken']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='japanese javanese']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='Add decompose compound Japanese Katakana token capability to Kuromoji']"); + + assertQ( + req( + "qt", + URI_SUGGEST_DEFAULT, + "q", + "just number", + SuggesterParams.SUGGEST_COUNT, + "5", + SuggesterParams.SUGGEST_DICT, + "analyzing_infix_suggest_not_all_terms_required"), + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='just number']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='just number']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Add Japanese Kanji number normalization to Kuromoji']", + "//lst[@name='suggest']/lst[@name='analyzing_infix_suggest_not_all_terms_required']/lst[@name='just number']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='This is just another entry!']"); } - } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzedSuggestions.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzedSuggestions.java index cb19af880bc..41079727ef7 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzedSuggestions.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestAnalyzedSuggestions.java @@ -22,37 +22,37 @@ public class TestAnalyzedSuggestions extends SolrTestCaseJ4 { static final String URI = "/suggest_analyzing"; - + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); assertQ(req("qt", URI, "q", "", SpellingParams.SPELLCHECK_BUILD, "true")); } - + public void test() { - assertQ(req("qt", URI, "q", "hokk", SpellingParams.SPELLCHECK_COUNT, "1"), + assertQ( + req("qt", URI, "q", "hokk", SpellingParams.SPELLCHECK_COUNT, "1"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='hokk']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='hokk']/arr[@name='suggestion']/str[1][.='北海道']" - ); - assertQ(req("qt", URI, "q", "ほっk", SpellingParams.SPELLCHECK_COUNT, "1"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='hokk']/arr[@name='suggestion']/str[1][.='北海道']"); + assertQ( + req("qt", URI, "q", "ほっk", SpellingParams.SPELLCHECK_COUNT, "1"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ほっk']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ほっk']/arr[@name='suggestion']/str[1][.='北海道']" - ); - assertQ(req("qt", URI, "q", "ホッk", SpellingParams.SPELLCHECK_COUNT, "1"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ほっk']/arr[@name='suggestion']/str[1][.='北海道']"); + assertQ( + req("qt", URI, "q", "ホッk", SpellingParams.SPELLCHECK_COUNT, "1"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ホッk']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ホッk']/arr[@name='suggestion']/str[1][.='北海道']" - ); - assertQ(req("qt", URI, "q", "ホッk", SpellingParams.SPELLCHECK_COUNT, "1"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ホッk']/arr[@name='suggestion']/str[1][.='北海道']"); + assertQ( + req("qt", URI, "q", "ホッk", SpellingParams.SPELLCHECK_COUNT, "1"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ホッk']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ホッk']/arr[@name='suggestion']/str[1][.='北海道']" - ); + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='ホッk']/arr[@name='suggestion']/str[1][.='北海道']"); } - + public void testMultiple() { - assertQ(req("qt", URI, "q", "h", SpellingParams.SPELLCHECK_COUNT, "2"), + assertQ( + req("qt", URI, "q", "h", SpellingParams.SPELLCHECK_COUNT, "2"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='h']/int[@name='numFound'][.='2']", "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='h']/arr[@name='suggestion']/str[1][.='話した']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='h']/arr[@name='suggestion']/str[2][.='北海道']" - ); + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='h']/arr[@name='suggestion']/str[2][.='北海道']"); } } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestBlendedInfixSuggestions.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestBlendedInfixSuggestions.java index 4dc038ffacf..2157efd2822 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestBlendedInfixSuggestions.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestBlendedInfixSuggestions.java @@ -21,15 +21,24 @@ public class TestBlendedInfixSuggestions extends SolrTestCaseJ4 { static final String URI = "/blended_infix_suggest"; - + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); assertQ(req("qt", URI, "q", "", SuggesterParams.SUGGEST_BUILD_ALL, "true")); } public void testLinearBlenderType() { - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "10", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_linear"), + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "10", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_linear"), "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='top of the lake']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='14']", @@ -39,12 +48,20 @@ public void testLinearBlenderType() { "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[2]/str[@name='payload'][.='ret']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='star wars: episode v - the empire strikes back']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[3]/long[@name='weight'][.='7']", - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']" - ); + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']"); } - + public void testReciprocalBlenderType() { - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "10", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_reciprocal"), + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "10", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_reciprocal"), "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='the returned']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='10']", @@ -54,12 +71,22 @@ public void testReciprocalBlenderType() { "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[2]/str[@name='payload'][.='lake']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='star wars: episode v - the empire strikes back']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/long[@name='weight'][.='2']", - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']" - ); + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']"); } - public void testExponentialReciprocalBlenderTypeExponent1() { //exponent=1 will give same output as reciprocal - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "10", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_exponential_reciprocal_1"), + public void + testExponentialReciprocalBlenderTypeExponent1() { // exponent=1 will give same output as + // reciprocal + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "10", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_exponential_reciprocal_1"), "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='the returned']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='10']", @@ -69,12 +96,20 @@ public void testExponentialReciprocalBlenderTypeExponent1() { //exponent=1 will "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/arr[@name='suggestions']/lst[2]/str[@name='payload'][.='lake']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='star wars: episode v - the empire strikes back']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/arr[@name='suggestions']/lst[3]/long[@name='weight'][.='2']", - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']" - ); + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal_1']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']"); } public void testExponentialReciprocalBlenderType() { // default is exponent=2.0 - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "10", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_exponential_reciprocal"), + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "10", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_exponential_reciprocal"), "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='the returned']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='10']", @@ -84,12 +119,22 @@ public void testExponentialReciprocalBlenderType() { // default is exponent=2.0 "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[2]/str[@name='payload'][.='lake']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='star wars: episode v - the empire strikes back']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/long[@name='weight'][.='0']", - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']" - ); + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_exponential_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']"); } public void testMultiSuggester() { - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "10", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_linear", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_reciprocal"), + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "10", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_linear", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_reciprocal"), "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/int[@name='numFound'][.='3']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='top of the lake']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_linear']/lst[@name='the']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='14']", @@ -109,27 +154,57 @@ public void testMultiSuggester() { "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[2]/str[@name='payload'][.='lake']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='star wars: episode v - the empire strikes back']", "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/long[@name='weight'][.='2']", - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']" - ); + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/arr[@name='suggestions']/lst[3]/str[@name='payload'][.='star']"); } public void testSuggestCount() { - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "1", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_reciprocal"), - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='1']" - ); + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "1", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_reciprocal"), + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='1']"); - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "2", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_reciprocal"), - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='2']" - ); + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "2", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_reciprocal"), + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='2']"); - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "3", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_reciprocal"), - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='3']" - ); + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "3", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_reciprocal"), + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='3']"); - assertQ(req("qt", URI, "q", "the", SuggesterParams.SUGGEST_COUNT, "20", SuggesterParams.SUGGEST_DICT, "blended_infix_suggest_reciprocal"), - "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='3']" - ); + assertQ( + req( + "qt", + URI, + "q", + "the", + SuggesterParams.SUGGEST_COUNT, + "20", + SuggesterParams.SUGGEST_DICT, + "blended_infix_suggest_reciprocal"), + "//lst[@name='suggest']/lst[@name='blended_infix_suggest_reciprocal']/lst[@name='the']/int[@name='numFound'][.='3']"); } - } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestFileDictionaryLookup.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestFileDictionaryLookup.java index 387dec0f9b0..7067ea05152 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestFileDictionaryLookup.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestFileDictionaryLookup.java @@ -19,42 +19,111 @@ import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; -public class TestFileDictionaryLookup extends SolrTestCaseJ4 { +public class TestFileDictionaryLookup extends SolrTestCaseJ4 { static final String REQUEST_URI = "/fuzzy_suggest_analyzing_with_file_dict"; static final String DICT_NAME = "fuzzy_suggest_analyzing_with_file_dict"; - + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); - assertQ(req("qt", REQUEST_URI, "q", "", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_BUILD, "true")); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_BUILD, + "true")); } - + public void testDefault() throws Exception { - + // tests to demonstrate default maxEdit parameter (value: 1), control for testWithMaxEdit2 - assertQ(req("qt", REQUEST_URI, "q", "chagn", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagn']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "chacn", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chacn']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chacn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chacn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "chagr", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagr']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagr']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='charge']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "chanr", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chanr']/int[@name='numFound'][.='3']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "cyhnce", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='cyhnce']/int[@name='numFound'][.='0']" - ); + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chagn", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagn']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chacn", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chacn']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chacn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chacn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chagr", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagr']/int[@name='numFound'][.='1']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagr']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='charge']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chanr", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chanr']/int[@name='numFound'][.='3']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "cyhnce", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='cyhnce']/int[@name='numFound'][.='0']"); } } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestFreeTextSuggestions.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestFreeTextSuggestions.java index f304f8e0666..3bfa6130f38 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestFreeTextSuggestions.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestFreeTextSuggestions.java @@ -21,37 +21,68 @@ public class TestFreeTextSuggestions extends SolrTestCaseJ4 { static final String URI = "/free_text_suggest"; - + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); assertQ(req("qt", URI, "q", "", SuggesterParams.SUGGEST_BUILD_ALL, "true")); } - + public void test() { - assertQ(req("qt", URI, "q", "foo b", SuggesterParams.SUGGEST_COUNT, "1", SuggesterParams.SUGGEST_DICT, "free_text_suggest"), + assertQ( + req( + "qt", + URI, + "q", + "foo b", + SuggesterParams.SUGGEST_COUNT, + "1", + SuggesterParams.SUGGEST_DICT, + "free_text_suggest"), "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo b']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo b']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='foo bar']" - ); - - assertQ(req("qt", URI, "q", "foo ", SuggesterParams.SUGGEST_COUNT, "2", SuggesterParams.SUGGEST_DICT, "free_text_suggest"), + "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo b']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='foo bar']"); + + assertQ( + req( + "qt", + URI, + "q", + "foo ", + SuggesterParams.SUGGEST_COUNT, + "2", + SuggesterParams.SUGGEST_DICT, + "free_text_suggest"), "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo ']/int[@name='numFound'][.='2']", "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo ']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='foo bar']", - "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo ']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='foo bee']" - ); - - assertQ(req("qt", URI, "q", "foo", SuggesterParams.SUGGEST_COUNT, "2", SuggesterParams.SUGGEST_DICT, "free_text_suggest"), + "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo ']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='foo bee']"); + + assertQ( + req( + "qt", + URI, + "q", + "foo", + SuggesterParams.SUGGEST_COUNT, + "2", + SuggesterParams.SUGGEST_DICT, + "free_text_suggest"), "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='foo']" - ); - assertQ(req("qt", URI, "q", "b", SuggesterParams.SUGGEST_COUNT, "5", SuggesterParams.SUGGEST_DICT, "free_text_suggest"), + "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='foo']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='foo']"); + assertQ( + req( + "qt", + URI, + "q", + "b", + SuggesterParams.SUGGEST_COUNT, + "5", + SuggesterParams.SUGGEST_DICT, + "free_text_suggest"), "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='b']/int[@name='numFound'][.='5']", "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='b']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='bar']", "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='b']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='baz']", "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='b']/arr[@name='suggestions']/lst[3]/str[@name='term'][.='bee']", "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='b']/arr[@name='suggestions']/lst[4]/str[@name='term'][.='blah']", - "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='b']/arr[@name='suggestions']/lst[5]/str[@name='term'][.='boo']" - ); + "//lst[@name='suggest']/lst[@name='free_text_suggest']/lst[@name='b']/arr[@name='suggestions']/lst[5]/str[@name='term'][.='boo']"); } - } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestFuzzyAnalyzedSuggestions.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestFuzzyAnalyzedSuggestions.java index 4d33e8bf0f6..a10ca01e0fa 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestFuzzyAnalyzedSuggestions.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestFuzzyAnalyzedSuggestions.java @@ -20,7 +20,7 @@ import org.apache.solr.common.params.SpellingParams; import org.junit.BeforeClass; -public class TestFuzzyAnalyzedSuggestions extends SolrTestCaseJ4 { +public class TestFuzzyAnalyzedSuggestions extends SolrTestCaseJ4 { static final String URI_DEFAULT = "/fuzzy_suggest_analyzing"; static final String URI_MIN_EDIT_2 = "/fuzzy_suggest_analyzing_with_max_edit_2"; static final String URI_NON_PREFIX_LENGTH_4 = "/fuzzy_suggest_analyzing_with_non_fuzzy_prefix_4"; @@ -28,85 +28,87 @@ public class TestFuzzyAnalyzedSuggestions extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); // Suggestions text include : change, charge, chance assertQ(req("qt", URI_DEFAULT, "q", "", SpellingParams.SPELLCHECK_BUILD, "true")); assertQ(req("qt", URI_MIN_EDIT_2, "q", "", SpellingParams.SPELLCHECK_BUILD, "true")); assertQ(req("qt", URI_NON_PREFIX_LENGTH_4, "q", "", SpellingParams.SPELLCHECK_BUILD, "true")); assertQ(req("qt", URI_MIN_FUZZY_LENGTH, "q", "", SpellingParams.SPELLCHECK_BUILD, "true")); } - + public void testDefault() throws Exception { - + // tests to demonstrate default maxEdit parameter (value: 1), control for testWithMaxEdit2 - assertQ(req("qt", URI_DEFAULT, "q", "chagn", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/int[@name='numFound'][.='2']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[1][.='chance']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[2][.='change']" - ); - - assertQ(req("qt", URI_DEFAULT, "q", "chacn", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/int[@name='numFound'][.='2']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[1][.='chance']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[2][.='change']" - ); - - assertQ(req("qt", URI_DEFAULT, "q", "chagr", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[1][.='charge']" - ); - - // test to demonstrate default nonFuzzyPrefix parameter (value: 1), control for testWithNonFuzzyPrefix4 - assertQ(req("qt", URI_DEFAULT, "q", "chanr", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/int[@name='numFound'][.='3']" - ); - - // test to demonstrate default minFuzzyPrefix parameter (value: 3), control for testWithMinFuzzyLength2 - assertQ(req("qt", URI_DEFAULT, "q", "cyhnce", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions'][not(node())]" - ); + assertQ( + req("qt", URI_DEFAULT, "q", "chagn", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/int[@name='numFound'][.='2']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[1][.='chance']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[2][.='change']"); + + assertQ( + req("qt", URI_DEFAULT, "q", "chacn", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/int[@name='numFound'][.='2']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[1][.='chance']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[2][.='change']"); + + assertQ( + req("qt", URI_DEFAULT, "q", "chagr", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/int[@name='numFound'][.='1']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[1][.='charge']"); + + // test to demonstrate default nonFuzzyPrefix parameter (value: 1), control for + // testWithNonFuzzyPrefix4 + assertQ( + req("qt", URI_DEFAULT, "q", "chanr", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/int[@name='numFound'][.='3']"); + + // test to demonstrate default minFuzzyPrefix parameter (value: 3), control for + // testWithMinFuzzyLength2 + assertQ( + req("qt", URI_DEFAULT, "q", "cyhnce", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions'][not(node())]"); } - + public void testWithMaxEdit2() throws Exception { - - assertQ(req("qt", URI_MIN_EDIT_2, "q", "chagn", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/int[@name='numFound'][.='3']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[1][.='chance']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[2][.='change']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[3][.='charge']" - ); - - assertQ(req("qt", URI_MIN_EDIT_2, "q", "chagr", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/int[@name='numFound'][.='3']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[1][.='chance']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[2][.='change']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[3][.='charge']" - ); - - assertQ(req("qt", URI_MIN_EDIT_2, "q", "chacn", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/int[@name='numFound'][.='3']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[1][.='chance']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[2][.='change']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[3][.='charge']" - ); + + assertQ( + req("qt", URI_MIN_EDIT_2, "q", "chagn", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/int[@name='numFound'][.='3']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[1][.='chance']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[2][.='change']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagn']/arr[@name='suggestion']/str[3][.='charge']"); + + assertQ( + req("qt", URI_MIN_EDIT_2, "q", "chagr", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/int[@name='numFound'][.='3']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[1][.='chance']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[2][.='change']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chagr']/arr[@name='suggestion']/str[3][.='charge']"); + + assertQ( + req("qt", URI_MIN_EDIT_2, "q", "chacn", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/int[@name='numFound'][.='3']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[1][.='chance']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[2][.='change']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chacn']/arr[@name='suggestion']/str[3][.='charge']"); } - + public void testWithNonFuzzyPrefix4() throws Exception { - + // This test should not match charge, as the nonFuzzyPrefix has been set to 4 - assertQ(req("qt", URI_NON_PREFIX_LENGTH_4, "q", "chanr", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/int[@name='numFound'][.='2']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/arr[@name='suggestion']/str[1][.='chance']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/arr[@name='suggestion']/str[2][.='change']" - ); + assertQ( + req("qt", URI_NON_PREFIX_LENGTH_4, "q", "chanr", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/int[@name='numFound'][.='2']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/arr[@name='suggestion']/str[1][.='chance']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chanr']/arr[@name='suggestion']/str[2][.='change']"); } - + public void testWithMinFuzzyLength2() throws Exception { - + // This test should match chance as the minFuzzyLength parameter has been set to 2 - assertQ(req("qt", URI_MIN_FUZZY_LENGTH, "q", "chynce", SpellingParams.SPELLCHECK_COUNT, "3"), - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chynce']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chynce']/arr[@name='suggestion']/str[1][.='chance']" - ); + assertQ( + req("qt", URI_MIN_FUZZY_LENGTH, "q", "chynce", SpellingParams.SPELLCHECK_COUNT, "3"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chynce']/int[@name='numFound'][.='1']", + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='chynce']/arr[@name='suggestion']/str[1][.='chance']"); } } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestHighFrequencyDictionaryFactory.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestHighFrequencyDictionaryFactory.java index 41dd57285b1..1d1f1ac8676 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestHighFrequencyDictionaryFactory.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestHighFrequencyDictionaryFactory.java @@ -19,62 +19,120 @@ import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; -public class TestHighFrequencyDictionaryFactory extends SolrTestCaseJ4 { - +public class TestHighFrequencyDictionaryFactory extends SolrTestCaseJ4 { + static final String REQUEST_URI = "/fuzzy_suggest_analyzing_with_high_freq_dict"; static final String DICT_NAME = "fuzzy_suggest_analyzing_with_high_freq_dict"; @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); // Suggestions text include : change, charge, chance - assertU(adoc("id", "9999990", - "text", "true", - "stext", "foobar")); - assertU(adoc("id", "9999991", - "text", "true", - "stext", "foobar")); - assertU(adoc("id", "9999992", - "text", "true", - "stext", "change")); - assertU(adoc("id", "9999993", - "text", "true", - "stext", "charge")); - assertU(adoc("id", "9999993", - "text", "true", - "stext", "chance")); + assertU(adoc("id", "9999990", "text", "true", "stext", "foobar")); + assertU(adoc("id", "9999991", "text", "true", "stext", "foobar")); + assertU(adoc("id", "9999992", "text", "true", "stext", "change")); + assertU(adoc("id", "9999993", "text", "true", "stext", "charge")); + assertU(adoc("id", "9999993", "text", "true", "stext", "chance")); assertU(commit()); - assertQ(req("qt", REQUEST_URI, "q", "", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_BUILD, "true")); + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_BUILD, + "true")); } - - + public void testDefault() throws Exception { - - // tests to demonstrate default maxEdit parameter (value: 1), control for testWithMaxEdit2 - assertQ(req("qt", REQUEST_URI, "q", "chagn", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagn']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "chacn", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chacn']/int[@name='numFound'][.='2']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chacn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chacn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "chagr", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagr']/int[@name='numFound'][.='1']", - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chagr']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='charge']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "chanr", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='chanr']/int[@name='numFound'][.='3']" - ); - - assertQ(req("qt", REQUEST_URI, "q", "cyhnce", SuggesterParams.SUGGEST_DICT, DICT_NAME, SuggesterParams.SUGGEST_COUNT, "3"), - "//lst[@name='suggest']/lst[@name='"+ DICT_NAME +"']/lst[@name='cyhnce']/int[@name='numFound'][.='0']" - ); + + // tests to demonstrate default maxEdit parameter (value: 1), control for testWithMaxEdit2 + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chagn", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagn']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chacn", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chacn']/int[@name='numFound'][.='2']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chacn']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='chance']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chacn']/arr[@name='suggestions']/lst[2]/str[@name='term'][.='change']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chagr", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagr']/int[@name='numFound'][.='1']", + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chagr']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='charge']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "chanr", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='chanr']/int[@name='numFound'][.='3']"); + + assertQ( + req( + "qt", + REQUEST_URI, + "q", + "cyhnce", + SuggesterParams.SUGGEST_DICT, + DICT_NAME, + SuggesterParams.SUGGEST_COUNT, + "3"), + "//lst[@name='suggest']/lst[@name='" + + DICT_NAME + + "']/lst[@name='cyhnce']/int[@name='numFound'][.='0']"); } } diff --git a/solr/core/src/test/org/apache/solr/spelling/suggest/TestPhraseSuggestions.java b/solr/core/src/test/org/apache/solr/spelling/suggest/TestPhraseSuggestions.java index 88cd3247ec3..f73f2f2407e 100644 --- a/solr/core/src/test/org/apache/solr/spelling/suggest/TestPhraseSuggestions.java +++ b/solr/core/src/test/org/apache/solr/spelling/suggest/TestPhraseSuggestions.java @@ -22,24 +22,24 @@ public class TestPhraseSuggestions extends SolrTestCaseJ4 { static final String URI = "/suggest_wfst"; - + @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-phrasesuggest.xml","schema-phrasesuggest.xml"); + initCore("solrconfig-phrasesuggest.xml", "schema-phrasesuggest.xml"); assertQ(req("qt", URI, "q", "", SpellingParams.SPELLCHECK_BUILD, "true")); } - + public void test() { - assertQ(req("qt", URI, "q", "the f", SpellingParams.SPELLCHECK_COUNT, "4"), + assertQ( + req("qt", URI, "q", "the f", SpellingParams.SPELLCHECK_COUNT, "4"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='the f']/int[@name='numFound'][.='3']", "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='the f']/arr[@name='suggestion']/str[1][.='the final phrase']", "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='the f']/arr[@name='suggestion']/str[2][.='the fifth phrase']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='the f']/arr[@name='suggestion']/str[3][.='the first phrase']" - ); - - assertQ(req("qt", URI, "q", "Testing +12", SpellingParams.SPELLCHECK_COUNT, "4"), + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='the f']/arr[@name='suggestion']/str[3][.='the first phrase']"); + + assertQ( + req("qt", URI, "q", "Testing +12", SpellingParams.SPELLCHECK_COUNT, "4"), "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='testing 12']/int[@name='numFound'][.='1']", - "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='testing 12']/arr[@name='suggestion']/str[1][.='testing 1234']" - ); + "//lst[@name='spellcheck']/lst[@name='suggestions']/lst[@name='testing 12']/arr[@name='suggestion']/str[1][.='testing 1234']"); } } diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrds.java b/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrds.java index ef51c76b880..4d7e49d8a41 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrds.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestDocTermOrds.java @@ -22,7 +22,6 @@ import java.util.HashSet; import java.util.List; import java.util.Set; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.codecs.Codec; @@ -46,9 +45,9 @@ import org.apache.lucene.index.TermsEnum.SeekStatus; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.solr.SolrTestCase; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; import org.apache.solr.index.SlowCompositeReaderWrapper; import org.apache.solr.legacy.LegacyIntField; import org.apache.solr.legacy.LegacyLongField; @@ -66,13 +65,13 @@ public void testEmptyIndex() throws IOException { final Directory dir = newDirectory(); final IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); iw.close(); - + final DirectoryReader ir = DirectoryReader.open(dir); TestUtil.checkReader(ir); - + final LeafReader composite = SlowCompositeReaderWrapper.wrap(ir); TestUtil.checkReader(composite); - + // check the leaves // (normally there are none for an empty index, so this is really just future // proofing in case that changes for some reason) @@ -83,10 +82,11 @@ public void testEmptyIndex() throws IOException { assertEquals("iterator should be empty (leaf)", 0, dto.iterator(r).getValueCount()); } - // check the composite + // check the composite final DocTermOrds dto = new DocTermOrds(composite, composite.getLiveDocs(), "any_field"); assertNull("OrdTermsEnum should be null (composite)", dto.getOrdTermsEnum(composite)); - assertEquals("iterator should be empty (composite)", 0, dto.iterator(composite).getValueCount()); + assertEquals( + "iterator should be empty (composite)", 0, dto.iterator(composite).getValueCount()); ir.close(); dir.close(); @@ -94,7 +94,11 @@ public void testEmptyIndex() throws IOException { public void testSimple() throws Exception { Directory dir = newDirectory(); - final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + final RandomIndexWriter w = + new RandomIndexWriter( + random(), + dir, + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); Field field = newTextField("field", "", Field.Store.NO); doc.add(field); @@ -106,7 +110,7 @@ public void testSimple() throws Exception { field.setStringValue("a f"); w.addDocument(doc); - + final IndexReader r = w.getReader(); w.close(); @@ -114,13 +118,13 @@ public void testSimple() throws Exception { TestUtil.checkReader(ar); final DocTermOrds dto = new DocTermOrds(ar, ar.getLiveDocs(), "field"); SortedSetDocValues iter = dto.iterator(ar); - + assertEquals(0, iter.nextDoc()); assertEquals(0, iter.nextOrd()); assertEquals(1, iter.nextOrd()); assertEquals(2, iter.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, iter.nextOrd()); - + assertEquals(1, iter.nextDoc()); assertEquals(3, iter.nextOrd()); assertEquals(4, iter.nextOrd()); @@ -141,16 +145,16 @@ public void testRandom() throws Exception { final int NUM_TERMS = atLeast(20); final Set terms = new HashSet<>(); - while(terms.size() < NUM_TERMS) { + while (terms.size() < NUM_TERMS) { final String s = TestUtil.randomRealisticUnicodeString(random()); - //final String s = _TestUtil.randomSimpleString(random); + // final String s = _TestUtil.randomSimpleString(random); if (s.length() > 0) { terms.add(new BytesRef(s)); } } final BytesRef[] termsArray = terms.toArray(new BytesRef[terms.size()]); Arrays.sort(termsArray); - + final int NUM_DOCS = atLeast(100); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); @@ -161,19 +165,19 @@ public void testRandom() throws Exception { Codec codec = TestUtil.alwaysPostingsFormat(TestUtil.getPostingsFormatWithOrds(random())); conf.setCodec(codec); } - + final RandomIndexWriter w = new RandomIndexWriter(random(), dir, conf); final int[][] idToOrds = new int[NUM_DOCS][]; final Set ordsForDocSet = new HashSet<>(); - for(int id=0;id terms = new HashSet<>(); - while(terms.size() < NUM_TERMS) { - final String s = prefixesArray[random().nextInt(prefixesArray.length)] + TestUtil.randomRealisticUnicodeString(random()); - //final String s = prefixesArray[random.nextInt(prefixesArray.length)] + _TestUtil.randomSimpleString(random); + while (terms.size() < NUM_TERMS) { + final String s = + prefixesArray[random().nextInt(prefixesArray.length)] + + TestUtil.randomRealisticUnicodeString(random()); + // final String s = prefixesArray[random.nextInt(prefixesArray.length)] + + // _TestUtil.randomSimpleString(random); if (s.length() > 0) { terms.add(new BytesRef(s)); } } final BytesRef[] termsArray = terms.toArray(new BytesRef[terms.size()]); Arrays.sort(termsArray); - + final int NUM_DOCS = atLeast(100); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); @@ -259,19 +266,19 @@ public void testRandomWithPrefix() throws Exception { Codec codec = TestUtil.alwaysPostingsFormat(TestUtil.getPostingsFormatWithOrds(random())); conf.setCodec(codec); } - + final RandomIndexWriter w = new RandomIndexWriter(random(), dir, conf); final int[][] idToOrds = new int[NUM_DOCS][]; final Set ordsForDocSet = new HashSet<>(); - for(int id=0;id 0 && random().nextInt(3) == 1) { // reuse past string -- try to find one that's not null - for(int iter = 0; iter < 10 && s == null;iter++) { + for (int iter = 0; iter < 10 && s == null; iter++) { s = unicodeStrings[random().nextInt(i)]; } if (s == null) { @@ -322,7 +345,7 @@ public void testDocsWithField() throws Exception { NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.INT_POINT_PARSER); assertEquals(4, cache.getCacheEntries().length); for (int i = 0; i < reader.maxDoc(); i++) { - if (i%2 == 0) { + if (i % 2 == 0) { assertEquals(i, ints.nextDoc()); assertEquals(i, ints.longValue()); } @@ -330,13 +353,13 @@ public void testDocsWithField() throws Exception { NumericDocValues numInts = cache.getNumerics(reader, "numInt", FieldCache.INT_POINT_PARSER); for (int i = 0; i < reader.maxDoc(); i++) { - if (i%2 == 0) { + if (i % 2 == 0) { assertEquals(i, numInts.nextDoc()); assertEquals(i, numInts.longValue()); } } } - + public void testGetDocsWithFieldThreadSafety() throws Exception { final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); @@ -346,60 +369,65 @@ public void testGetDocsWithFieldThreadSafety() throws Exception { final AtomicBoolean failed = new AtomicBoolean(); final AtomicInteger iters = new AtomicInteger(); final int NUM_ITER = 200 * RANDOM_MULTIPLIER; - final CyclicBarrier restart = new CyclicBarrier(NUM_THREADS, - new Runnable() { - @Override - public void run() { - cache.purgeAllCaches(); - iters.incrementAndGet(); - } - }); - for(int threadIDX=0;threadIDX= NUM_ITER) { - break; - } - } else if (op == 1) { - Bits docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER); - for (int i = 0; i < docsWithField.length(); i++) { - assertEquals(i%2 == 0, docsWithField.get(i)); - } - } else { - NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.INT_POINT_PARSER); - for (int i = 0; i < reader.maxDoc();i++) { - if (i%2 == 0) { - assertEquals(i, ints.nextDoc()); - assertEquals(i, ints.longValue()); + final CyclicBarrier restart = + new CyclicBarrier( + NUM_THREADS, + new Runnable() { + @Override + public void run() { + cache.purgeAllCaches(); + iters.incrementAndGet(); + } + }); + for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++) { + threads[threadIDX] = + new Thread() { + @Override + public void run() { + + try { + while (!failed.get()) { + final int op = random().nextInt(3); + if (op == 0) { + // Purge all caches & resume, once all + // threads get here: + restart.await(); + if (iters.get() >= NUM_ITER) { + break; + } + } else if (op == 1) { + Bits docsWithField = + cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER); + for (int i = 0; i < docsWithField.length(); i++) { + assertEquals(i % 2 == 0, docsWithField.get(i)); + } + } else { + NumericDocValues ints = + cache.getNumerics(reader, "sparse", FieldCache.INT_POINT_PARSER); + for (int i = 0; i < reader.maxDoc(); i++) { + if (i % 2 == 0) { + assertEquals(i, ints.nextDoc()); + assertEquals(i, ints.longValue()); + } } } } + } catch (Throwable t) { + failed.set(true); + restart.reset(); + throw new RuntimeException(t); } - } catch (Throwable t) { - failed.set(true); - restart.reset(); - throw new RuntimeException(t); } - } - }; + }; threads[threadIDX].start(); } - for(int threadIDX=0;threadIDX { - FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.INT_POINT_PARSER); - }); - + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.INT_POINT_PARSER); + }); + BinaryDocValues binary = FieldCache.DEFAULT.getTerms(ar, "binary"); assertEquals(0, binary.nextDoc()); final BytesRef term = binary.binaryValue(); assertEquals("binary value", term.utf8ToString()); - - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getTermsIndex(ar, "binary"); - }); - - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getDocTermOrds(ar, "binary", null); - }); - - expectThrows(IllegalStateException.class, () -> { - new DocTermOrds(ar, null, "binary"); - }); - + + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getTermsIndex(ar, "binary"); + }); + + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getDocTermOrds(ar, "binary", null); + }); + + expectThrows( + IllegalStateException.class, + () -> { + new DocTermOrds(ar, null, "binary"); + }); + Bits bits = FieldCache.DEFAULT.getDocsWithField(ar, "binary", null); assertTrue(bits.get(0)); - + // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds() - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.INT_POINT_PARSER); - }); - - expectThrows(IllegalStateException.class, () -> { - new DocTermOrds(ar, null, "sorted"); - }); - + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.INT_POINT_PARSER); + }); + + expectThrows( + IllegalStateException.class, + () -> { + new DocTermOrds(ar, null, "sorted"); + }); + SortedDocValues sorted = FieldCache.DEFAULT.getTermsIndex(ar, "sorted"); assertEquals(0, sorted.nextDoc()); assertEquals(0, sorted.ordValue()); assertEquals(1, sorted.getValueCount()); BytesRef scratch = sorted.lookupOrd(sorted.ordValue()); assertEquals("sorted value", scratch.utf8ToString()); - + SortedSetDocValues sortedSet = FieldCache.DEFAULT.getDocTermOrds(ar, "sorted", null); assertEquals(0, sortedSet.nextDoc()); assertEquals(0, sortedSet.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd()); assertEquals(1, sortedSet.getValueCount()); - + bits = FieldCache.DEFAULT.getDocsWithField(ar, "sorted", null); assertTrue(bits.get(0)); - + // Numeric type: can be retrieved via getInts() and so on - NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.INT_POINT_PARSER); + NumericDocValues numeric = + FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.INT_POINT_PARSER); assertEquals(0, numeric.nextDoc()); assertEquals(42, numeric.longValue()); - - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getTerms(ar, "numeric"); - }); - - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getTermsIndex(ar, "numeric"); - }); - - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getDocTermOrds(ar, "numeric", null); - }); - - expectThrows(IllegalStateException.class, () -> { - new DocTermOrds(ar, null, "numeric"); - }); - + + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getTerms(ar, "numeric"); + }); + + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getTermsIndex(ar, "numeric"); + }); + + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getDocTermOrds(ar, "numeric", null); + }); + + expectThrows( + IllegalStateException.class, + () -> { + new DocTermOrds(ar, null, "numeric"); + }); + bits = FieldCache.DEFAULT.getDocsWithField(ar, "numeric", null); assertTrue(bits.get(0)); - - // SortedSet type: can be retrieved via getDocTermOrds() - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.INT_POINT_PARSER); - }); - - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getTerms(ar, "sortedset"); - }); - - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getTermsIndex(ar, "sortedset"); - }); - - expectThrows(IllegalStateException.class, () -> { - new DocTermOrds(ar, null, "sortedset"); - }); - + + // SortedSet type: can be retrieved via getDocTermOrds() + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.INT_POINT_PARSER); + }); + + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getTerms(ar, "sortedset"); + }); + + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getTermsIndex(ar, "sortedset"); + }); + + expectThrows( + IllegalStateException.class, + () -> { + new DocTermOrds(ar, null, "sortedset"); + }); + sortedSet = FieldCache.DEFAULT.getDocTermOrds(ar, "sortedset", null); assertEquals(0, sortedSet.nextDoc()); assertEquals(0, sortedSet.nextOrd()); assertEquals(1, sortedSet.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd()); assertEquals(2, sortedSet.getValueCount()); - + bits = FieldCache.DEFAULT.getDocsWithField(ar, "sortedset", null); assertTrue(bits.get(0)); - + ir.close(); dir.close(); } - + public void testNonexistantFields() throws Exception { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); @@ -527,43 +584,44 @@ public void testNonexistantFields() throws Exception { iw.addDocument(doc); DirectoryReader ir = iw.getReader(); iw.close(); - + LeafReader ar = getOnlyLeafReader(ir); - + final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); assertEquals(0, cache.getCacheEntries().length); - + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER); assertEquals(NO_MORE_DOCS, ints.nextDoc()); - + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER); assertEquals(NO_MORE_DOCS, longs.nextDoc()); - + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER); assertEquals(NO_MORE_DOCS, floats.nextDoc()); - - NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER); + + NumericDocValues doubles = + cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER); assertEquals(NO_MORE_DOCS, doubles.nextDoc()); - + BinaryDocValues binaries = cache.getTerms(ar, "bogusterms"); assertEquals(NO_MORE_DOCS, binaries.nextDoc()); - + SortedDocValues sorted = cache.getTermsIndex(ar, "bogustermsindex"); assertEquals(NO_MORE_DOCS, sorted.nextDoc()); - + SortedSetDocValues sortedSet = cache.getDocTermOrds(ar, "bogusmultivalued", null); assertEquals(NO_MORE_DOCS, sortedSet.nextDoc()); - + Bits bits = cache.getDocsWithField(ar, "bogusbits", null); assertFalse(bits.get(0)); - + // check that we cached nothing assertEquals(0, cache.getCacheEntries().length); ir.close(); dir.close(); } - + public void testNonIndexedFields() throws Exception { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); @@ -581,37 +639,38 @@ public void testNonIndexedFields() throws Exception { iw.addDocument(doc); DirectoryReader ir = iw.getReader(); iw.close(); - + LeafReader ar = getOnlyLeafReader(ir); - + final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); assertEquals(0, cache.getCacheEntries().length); - + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.INT_POINT_PARSER); assertEquals(NO_MORE_DOCS, ints.nextDoc()); - + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LONG_POINT_PARSER); assertEquals(NO_MORE_DOCS, longs.nextDoc()); - + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.FLOAT_POINT_PARSER); assertEquals(NO_MORE_DOCS, floats.nextDoc()); - - NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER); + + NumericDocValues doubles = + cache.getNumerics(ar, "bogusdoubles", FieldCache.DOUBLE_POINT_PARSER); assertEquals(NO_MORE_DOCS, doubles.nextDoc()); - + BinaryDocValues binaries = cache.getTerms(ar, "bogusterms"); assertEquals(NO_MORE_DOCS, binaries.nextDoc()); - + SortedDocValues sorted = cache.getTermsIndex(ar, "bogustermsindex"); assertEquals(NO_MORE_DOCS, sorted.nextDoc()); - + SortedSetDocValues sortedSet = cache.getDocTermOrds(ar, "bogusmultivalued", null); assertEquals(NO_MORE_DOCS, sortedSet.nextDoc()); - + Bits bits = cache.getDocsWithField(ar, "bogusbits", null); assertFalse(bits.get(0)); - + // check that we cached nothing assertEquals(0, cache.getCacheEntries().length); ir.close(); @@ -660,7 +719,9 @@ public void testLongFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LONG_POINT_PARSER); + final NumericDocValues longs = + FieldCache.DEFAULT.getNumerics( + getOnlyLeafReader(reader), "f", FieldCache.LONG_POINT_PARSER); for (int i = 0; i < values.length; ++i) { if (missing.contains(i) == false) { assertEquals(i, longs.nextDoc()); @@ -712,7 +773,8 @@ public void testIntFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.INT_POINT_PARSER); + final NumericDocValues ints = + FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.INT_POINT_PARSER); for (int i = 0; i < values.length; ++i) { if (missing.contains(i) == false) { assertEquals(i, ints.nextDoc()); @@ -724,5 +786,4 @@ public void testIntFieldCache() throws IOException { iw.close(); dir.close(); } - } diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheReopen.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheReopen.java index 665d68773f6..80f90582c80 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheReopen.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheReopen.java @@ -19,53 +19,54 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.IntPoint; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.store.Directory; import org.apache.solr.SolrTestCase; public class TestFieldCacheReopen extends SolrTestCase { - + // TODO: make a version of this that tests the same thing with UninvertingReader.wrap() - + // LUCENE-1579: Ensure that on a reopened reader, that any // shared segments reuse the doc values arrays in // FieldCache public void testFieldCacheReuseAfterReopen() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter( - dir, - newIndexWriterConfig(new MockAnalyzer(random())). - setMergePolicy(newLogMergePolicy(10)) - ); + IndexWriter writer = + new IndexWriter( + dir, + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(10))); Document doc = new Document(); doc.add(new IntPoint("number", 17)); writer.addDocument(doc); writer.commit(); - + // Open reader1 DirectoryReader r = DirectoryReader.open(dir); LeafReader r1 = getOnlyLeafReader(r); - final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.INT_POINT_PARSER); + final NumericDocValues ints = + FieldCache.DEFAULT.getNumerics(r1, "number", FieldCache.INT_POINT_PARSER); assertEquals(0, ints.nextDoc()); assertEquals(17, ints.longValue()); - + // Add new segment writer.addDocument(doc); writer.commit(); - + // Reopen reader1 --> reader2 DirectoryReader r2 = DirectoryReader.openIfChanged(r); assertNotNull(r2); r.close(); LeafReader sub0 = r2.leaves().get(0).reader(); - final NumericDocValues ints2 = FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.INT_POINT_PARSER); + final NumericDocValues ints2 = + FieldCache.DEFAULT.getNumerics(sub0, "number", FieldCache.INT_POINT_PARSER); r2.close(); assertEquals(0, ints2.nextDoc()); assertEquals(17, ints2.longValue()); - + writer.close(); dir.close(); } diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java index 774a2ea3471..f2195cb104c 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSort.java @@ -20,7 +20,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; @@ -38,10 +37,6 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.apache.solr.legacy.LegacyDoubleField; -import org.apache.solr.legacy.LegacyFloatField; -import org.apache.solr.legacy.LegacyIntField; -import org.apache.solr.legacy.LegacyLongField; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; @@ -53,9 +48,13 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.store.Directory; -import org.apache.solr.uninverting.UninvertingReader.Type; -import org.apache.solr.SolrTestCase; import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; +import org.apache.solr.legacy.LegacyDoubleField; +import org.apache.solr.legacy.LegacyFloatField; +import org.apache.solr.legacy.LegacyIntField; +import org.apache.solr.legacy.LegacyLongField; +import org.apache.solr.uninverting.UninvertingReader.Type; /* * Tests sorting (but with fieldcache instead of docvalues) @@ -81,10 +80,10 @@ private void testString(SortField.Type sortType) throws IOException { doc.add(newStringField("value", "bar", Field.Store.YES)); writer.addDocument(doc); Type type = sortType == SortField.Type.STRING ? Type.SORTED : Type.BINARY; - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", type)); + IndexReader ir = + UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", type)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", sortType)); @@ -98,15 +97,15 @@ private void testString(SortField.Type sortType) throws IOException { ir.close(); dir.close(); } - + public void testStringMissing() throws IOException { testStringMissing(SortField.Type.STRING); } - + public void testStringValMissing() throws IOException { testStringMissing(SortField.Type.STRING_VAL); } - + /** Tests sorting on type string with a missing value */ private void testStringMissing(SortField.Type sortType) throws IOException { Directory dir = newDirectory(); @@ -120,10 +119,10 @@ private void testStringMissing(SortField.Type sortType) throws IOException { doc.add(newStringField("value", "bar", Field.Store.YES)); writer.addDocument(doc); Type type = sortType == SortField.Type.STRING ? Type.SORTED : Type.BINARY; - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", type)); + IndexReader ir = + UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", type)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", sortType)); @@ -137,15 +136,15 @@ private void testStringMissing(SortField.Type sortType) throws IOException { ir.close(); dir.close(); } - + public void testStringReverse() throws IOException { testStringReverse(SortField.Type.STRING); } - + public void testStringValReverse() throws IOException { testStringReverse(SortField.Type.STRING_VAL); } - + /** Tests reverse sorting on type string */ private void testStringReverse(SortField.Type sortType) throws IOException { Directory dir = newDirectory(); @@ -157,10 +156,10 @@ private void testStringReverse(SortField.Type sortType) throws IOException { doc.add(newStringField("value", "foo", Field.Store.YES)); writer.addDocument(doc); Type type = sortType == SortField.Type.STRING ? Type.SORTED : Type.BINARY; - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", type)); + IndexReader ir = + UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", type)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", sortType, true)); @@ -173,17 +172,16 @@ private void testStringReverse(SortField.Type sortType) throws IOException { ir.close(); dir.close(); } - + public void testStringMissingSortedFirst() throws IOException { testStringMissingSortedFirst(SortField.Type.STRING); } - + public void testStringValMissingSortedFirst() throws IOException { testStringMissingSortedFirst(SortField.Type.STRING_VAL); } - /** Tests sorting on type string with a missing - * value sorted first */ + /** Tests sorting on type string with a missing value sorted first */ private void testStringMissingSortedFirst(SortField.Type sortType) throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -196,10 +194,10 @@ private void testStringMissingSortedFirst(SortField.Type sortType) throws IOExce doc.add(newStringField("value", "bar", Field.Store.YES)); writer.addDocument(doc); Type type = sortType == SortField.Type.STRING ? Type.SORTED : Type.BINARY; - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", type)); + IndexReader ir = + UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", type)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sf = new SortField("value", sortType); Sort sort = new Sort(sf); @@ -218,13 +216,12 @@ private void testStringMissingSortedFirst(SortField.Type sortType) throws IOExce public void testStringMissingSortedFirstReverse() throws IOException { testStringMissingSortedFirstReverse(SortField.Type.STRING); } - + public void testStringValMissingSortedFirstReverse() throws IOException { testStringMissingSortedFirstReverse(SortField.Type.STRING_VAL); } - - /** Tests reverse sorting on type string with a missing - * value sorted first */ + + /** Tests reverse sorting on type string with a missing value sorted first */ private void testStringMissingSortedFirstReverse(SortField.Type sortType) throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -237,10 +234,10 @@ private void testStringMissingSortedFirstReverse(SortField.Type sortType) throws doc.add(newStringField("value", "bar", Field.Store.YES)); writer.addDocument(doc); Type type = sortType == SortField.Type.STRING ? Type.SORTED : Type.BINARY; - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", type)); + IndexReader ir = + UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", type)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sf = new SortField("value", sortType, true); Sort sort = new Sort(sf); @@ -259,13 +256,12 @@ private void testStringMissingSortedFirstReverse(SortField.Type sortType) throws public void testStringMissingSortedLast() throws IOException { testStringMissingSortedLast(SortField.Type.STRING); } - + public void testStringValMissingSortedLast() throws IOException { testStringMissingSortedLast(SortField.Type.STRING_VAL); } - /** Tests sorting on type string with a missing - * value sorted last */ + /** Tests sorting on type string with a missing value sorted last */ private void testStringMissingSortedLast(SortField.Type sortType) throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -278,10 +274,10 @@ private void testStringMissingSortedLast(SortField.Type sortType) throws IOExcep doc.add(newStringField("value", "bar", Field.Store.YES)); writer.addDocument(doc); Type type = sortType == SortField.Type.STRING ? Type.SORTED : Type.BINARY; - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", type)); + IndexReader ir = + UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", type)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sf = new SortField("value", sortType); sf.setMissingValue(SortField.STRING_LAST); @@ -301,13 +297,12 @@ private void testStringMissingSortedLast(SortField.Type sortType) throws IOExcep public void testStringMissingSortedLastReverse() throws IOException { testStringMissingSortedLastReverse(SortField.Type.STRING); } - + public void testStringValMissingSortedLastReverse() throws IOException { testStringMissingSortedLastReverse(SortField.Type.STRING_VAL); } - /** Tests reverse sorting on type string with a missing - * value sorted last */ + /** Tests reverse sorting on type string with a missing value sorted last */ private void testStringMissingSortedLastReverse(SortField.Type sortType) throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -320,10 +315,10 @@ private void testStringMissingSortedLastReverse(SortField.Type sortType) throws doc.add(newStringField("value", "bar", Field.Store.YES)); writer.addDocument(doc); Type type = sortType == SortField.Type.STRING ? Type.SORTED : Type.BINARY; - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", type)); + IndexReader ir = + UninvertingReader.wrap(writer.getReader(), Collections.singletonMap("value", type)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sf = new SortField("value", sortType, true); sf.setMissingValue(SortField.STRING_LAST); @@ -339,7 +334,7 @@ private void testStringMissingSortedLastReverse(SortField.Type sortType) throws ir.close(); dir.close(); } - + /** Tests sorting on internal docid order */ public void testFieldDoc() throws Exception { Directory dir = newDirectory(); @@ -352,7 +347,7 @@ public void testFieldDoc() throws Exception { writer.addDocument(doc); IndexReader ir = writer.getReader(); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(SortField.FIELD_DOC); @@ -365,7 +360,7 @@ public void testFieldDoc() throws Exception { ir.close(); dir.close(); } - + /** Tests sorting on reverse internal docid order */ public void testFieldDocReverse() throws Exception { Directory dir = newDirectory(); @@ -378,7 +373,7 @@ public void testFieldDocReverse() throws Exception { writer.addDocument(doc); IndexReader ir = writer.getReader(); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField(null, SortField.Type.DOC, true)); @@ -391,7 +386,7 @@ public void testFieldDocReverse() throws Exception { ir.close(); dir.close(); } - + /** Tests default sort (by score) */ public void testFieldScore() throws Exception { Directory dir = newDirectory(); @@ -404,7 +399,7 @@ public void testFieldScore() throws Exception { writer.addDocument(doc); IndexReader ir = writer.getReader(); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(); @@ -421,7 +416,7 @@ public void testFieldScore() throws Exception { ir.close(); dir.close(); } - + /** Tests default sort (by score) in reverse */ public void testFieldScoreReverse() throws Exception { Directory dir = newDirectory(); @@ -434,7 +429,7 @@ public void testFieldScoreReverse() throws Exception { writer.addDocument(doc); IndexReader ir = writer.getReader(); writer.close(); - + IndexSearcher searcher = newSearcher(ir); // this test expects the freq to make doc 1 scores greater than doc 0 searcher.setSimilarity(new BM25Similarity()); @@ -469,10 +464,11 @@ public void testInt() throws IOException { doc.add(new IntPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.INTEGER_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.INT)); @@ -486,7 +482,7 @@ public void testInt() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type int with a missing value */ public void testIntMissing() throws IOException { Directory dir = newDirectory(); @@ -501,10 +497,11 @@ public void testIntMissing() throws IOException { doc.add(new IntPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.INTEGER_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.INT)); @@ -518,8 +515,10 @@ public void testIntMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE */ + + /** + * Tests sorting on type int, specifying the missing value should be treated as Integer.MAX_VALUE + */ public void testIntMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -533,10 +532,11 @@ public void testIntMissingLast() throws IOException { doc.add(new IntPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.INTEGER_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); SortField sortField = new SortField("value", SortField.Type.INT); sortField.setMissingValue(Integer.MAX_VALUE); @@ -552,7 +552,7 @@ public void testIntMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type int in reverse */ public void testIntReverse() throws IOException { Directory dir = newDirectory(); @@ -569,10 +569,11 @@ public void testIntReverse() throws IOException { doc.add(new IntPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.INTEGER_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.INTEGER_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.INT, true)); @@ -600,10 +601,11 @@ public void testLegacyInt() throws IOException { doc = new Document(); doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_INTEGER)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.INT)); @@ -617,7 +619,7 @@ public void testLegacyInt() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy int with a missing value */ public void testLegacyIntMissing() throws IOException { Directory dir = newDirectory(); @@ -630,10 +632,11 @@ public void testLegacyIntMissing() throws IOException { doc = new Document(); doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_INTEGER)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.INT)); @@ -647,8 +650,11 @@ public void testLegacyIntMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type legacy int, specifying the missing value should be treated as Integer.MAX_VALUE */ + + /** + * Tests sorting on type legacy int, specifying the missing value should be treated as + * Integer.MAX_VALUE + */ public void testLegacyIntMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -660,10 +666,11 @@ public void testLegacyIntMissingLast() throws IOException { doc = new Document(); doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_INTEGER)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sortField = new SortField("value", SortField.Type.INT); sortField.setMissingValue(Integer.MAX_VALUE); @@ -679,7 +686,7 @@ public void testLegacyIntMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy int in reverse */ public void testLegacyIntReverse() throws IOException { Directory dir = newDirectory(); @@ -693,10 +700,11 @@ public void testLegacyIntReverse() throws IOException { doc = new Document(); doc.add(new LegacyIntField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_INTEGER)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_INTEGER)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.INT, true)); @@ -710,7 +718,7 @@ public void testLegacyIntReverse() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type long */ public void testLong() throws IOException { Directory dir = newDirectory(); @@ -727,10 +735,11 @@ public void testLong() throws IOException { doc.add(new LongPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LONG_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.LONG)); @@ -744,7 +753,7 @@ public void testLong() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type long with a missing value */ public void testLongMissing() throws IOException { Directory dir = newDirectory(); @@ -759,10 +768,11 @@ public void testLongMissing() throws IOException { doc.add(new LongPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LONG_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.LONG)); @@ -776,8 +786,10 @@ public void testLongMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE */ + + /** + * Tests sorting on type long, specifying the missing value should be treated as Long.MAX_VALUE + */ public void testLongMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -791,10 +803,11 @@ public void testLongMissingLast() throws IOException { doc.add(new LongPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LONG_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); SortField sortField = new SortField("value", SortField.Type.LONG); sortField.setMissingValue(Long.MAX_VALUE); @@ -810,7 +823,7 @@ public void testLongMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type long in reverse */ public void testLongReverse() throws IOException { Directory dir = newDirectory(); @@ -827,10 +840,11 @@ public void testLongReverse() throws IOException { doc.add(new LongPoint("value", 4)); doc.add(new StoredField("value", 4)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LONG_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LONG_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true)); @@ -844,7 +858,7 @@ public void testLongReverse() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy long */ public void testLegacyLong() throws IOException { Directory dir = newDirectory(); @@ -858,10 +872,11 @@ public void testLegacyLong() throws IOException { doc = new Document(); doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_LONG)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.LONG)); @@ -875,7 +890,7 @@ public void testLegacyLong() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy long with a missing value */ public void testLegacyLongMissing() throws IOException { Directory dir = newDirectory(); @@ -888,10 +903,11 @@ public void testLegacyLongMissing() throws IOException { doc = new Document(); doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_LONG)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.LONG)); @@ -905,8 +921,11 @@ public void testLegacyLongMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type legacy long, specifying the missing value should be treated as Long.MAX_VALUE */ + + /** + * Tests sorting on type legacy long, specifying the missing value should be treated as + * Long.MAX_VALUE + */ public void testLegacyLongMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -918,10 +937,11 @@ public void testLegacyLongMissingLast() throws IOException { doc = new Document(); doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_LONG)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sortField = new SortField("value", SortField.Type.LONG); sortField.setMissingValue(Long.MAX_VALUE); @@ -937,7 +957,7 @@ public void testLegacyLongMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy long in reverse */ public void testLegacyLongReverse() throws IOException { Directory dir = newDirectory(); @@ -951,10 +971,11 @@ public void testLegacyLongReverse() throws IOException { doc = new Document(); doc.add(new LegacyLongField("value", 4, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_LONG)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_LONG)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.LONG, true)); @@ -968,7 +989,7 @@ public void testLegacyLongReverse() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type float */ public void testFloat() throws IOException { Directory dir = newDirectory(); @@ -985,10 +1006,11 @@ public void testFloat() throws IOException { doc.add(new FloatPoint("value", 4.2f)); doc.add(new StoredField("value", 4.2f)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.FLOAT_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT)); @@ -1002,7 +1024,7 @@ public void testFloat() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type float with a missing value */ public void testFloatMissing() throws IOException { Directory dir = newDirectory(); @@ -1017,10 +1039,11 @@ public void testFloatMissing() throws IOException { doc.add(new FloatPoint("value", 4.2f)); doc.add(new StoredField("value", 4.2f)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.FLOAT_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT)); @@ -1034,8 +1057,10 @@ public void testFloatMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE */ + + /** + * Tests sorting on type float, specifying the missing value should be treated as Float.MAX_VALUE + */ public void testFloatMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -1049,10 +1074,11 @@ public void testFloatMissingLast() throws IOException { doc.add(new FloatPoint("value", 4.2f)); doc.add(new StoredField("value", 4.2f)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.FLOAT_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); SortField sortField = new SortField("value", SortField.Type.FLOAT); sortField.setMissingValue(Float.MAX_VALUE); @@ -1068,7 +1094,7 @@ public void testFloatMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type float in reverse */ public void testFloatReverse() throws IOException { Directory dir = newDirectory(); @@ -1085,10 +1111,11 @@ public void testFloatReverse() throws IOException { doc.add(new FloatPoint("value", 4.2f)); doc.add(new StoredField("value", 4.2f)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.FLOAT_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.FLOAT_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true)); @@ -1102,7 +1129,7 @@ public void testFloatReverse() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy float */ public void testLegacyFloat() throws IOException { Directory dir = newDirectory(); @@ -1116,10 +1143,11 @@ public void testLegacyFloat() throws IOException { doc = new Document(); doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_FLOAT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT)); @@ -1133,7 +1161,7 @@ public void testLegacyFloat() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy float with a missing value */ public void testLegacyFloatMissing() throws IOException { Directory dir = newDirectory(); @@ -1146,10 +1174,11 @@ public void testLegacyFloatMissing() throws IOException { doc = new Document(); doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_FLOAT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT)); @@ -1163,8 +1192,11 @@ public void testLegacyFloatMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type legacy float, specifying the missing value should be treated as Float.MAX_VALUE */ + + /** + * Tests sorting on type legacy float, specifying the missing value should be treated as + * Float.MAX_VALUE + */ public void testLegacyFloatMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -1176,10 +1208,11 @@ public void testLegacyFloatMissingLast() throws IOException { doc = new Document(); doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_FLOAT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sortField = new SortField("value", SortField.Type.FLOAT); sortField.setMissingValue(Float.MAX_VALUE); @@ -1195,7 +1228,7 @@ public void testLegacyFloatMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy float in reverse */ public void testLegacyFloatReverse() throws IOException { Directory dir = newDirectory(); @@ -1209,10 +1242,11 @@ public void testLegacyFloatReverse() throws IOException { doc = new Document(); doc.add(new LegacyFloatField("value", 4.2f, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_FLOAT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_FLOAT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.FLOAT, true)); @@ -1226,7 +1260,7 @@ public void testLegacyFloatReverse() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type double */ public void testDouble() throws IOException { Directory dir = newDirectory(); @@ -1247,10 +1281,11 @@ public void testDouble() throws IOException { doc.add(new DoublePoint("value", 4.2333333333332)); doc.add(new StoredField("value", 4.2333333333332)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.DOUBLE_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); @@ -1265,7 +1300,7 @@ public void testDouble() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type double with +/- zero */ public void testDoubleSignedZero() throws IOException { Directory dir = newDirectory(); @@ -1279,10 +1314,11 @@ public void testDoubleSignedZero() throws IOException { doc.add(new StoredField("value", -0d)); writer.addDocument(doc); doc = new Document(); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.DOUBLE_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); @@ -1300,7 +1336,7 @@ public void testDoubleSignedZero() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type double with a missing value */ public void testDoubleMissing() throws IOException { Directory dir = newDirectory(); @@ -1319,10 +1355,11 @@ public void testDoubleMissing() throws IOException { doc.add(new DoublePoint("value", 4.2333333333332)); doc.add(new StoredField("value", 4.2333333333332)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.DOUBLE_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); @@ -1337,8 +1374,11 @@ public void testDoubleMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type double, specifying the missing value should be treated as Double.MAX_VALUE */ + + /** + * Tests sorting on type double, specifying the missing value should be treated as + * Double.MAX_VALUE + */ public void testDoubleMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -1356,10 +1396,11 @@ public void testDoubleMissingLast() throws IOException { doc.add(new DoublePoint("value", 4.2333333333332)); doc.add(new StoredField("value", 4.2333333333332)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.DOUBLE_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); SortField sortField = new SortField("value", SortField.Type.DOUBLE); sortField.setMissingValue(Double.MAX_VALUE); @@ -1376,7 +1417,7 @@ public void testDoubleMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type double in reverse */ public void testDoubleReverse() throws IOException { Directory dir = newDirectory(); @@ -1397,10 +1438,11 @@ public void testDoubleReverse() throws IOException { doc.add(new DoublePoint("value", 4.2333333333332)); doc.add(new StoredField("value", 4.2333333333332)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.DOUBLE_POINT)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.DOUBLE_POINT)); writer.close(); - + IndexSearcher searcher = newSearcher(ir, false); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true)); @@ -1432,10 +1474,11 @@ public void testLegacyDouble() throws IOException { doc = new Document(); doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_DOUBLE)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); @@ -1450,7 +1493,7 @@ public void testLegacyDouble() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy double with +/- zero */ public void testLegacyDoubleSignedZero() throws IOException { Directory dir = newDirectory(); @@ -1462,10 +1505,11 @@ public void testLegacyDoubleSignedZero() throws IOException { doc.add(new LegacyDoubleField("value", -0d, Field.Store.YES)); writer.addDocument(doc); doc = new Document(); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_DOUBLE)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); @@ -1483,7 +1527,7 @@ public void testLegacyDoubleSignedZero() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy double with a missing value */ public void testLegacyDoubleMissing() throws IOException { Directory dir = newDirectory(); @@ -1499,10 +1543,11 @@ public void testLegacyDoubleMissing() throws IOException { doc = new Document(); doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_DOUBLE)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE)); @@ -1517,8 +1562,11 @@ public void testLegacyDoubleMissing() throws IOException { ir.close(); dir.close(); } - - /** Tests sorting on type legacy double, specifying the missing value should be treated as Double.MAX_VALUE */ + + /** + * Tests sorting on type legacy double, specifying the missing value should be treated as + * Double.MAX_VALUE + */ public void testLegacyDoubleMissingLast() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); @@ -1533,10 +1581,11 @@ public void testLegacyDoubleMissingLast() throws IOException { doc = new Document(); doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_DOUBLE)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); SortField sortField = new SortField("value", SortField.Type.DOUBLE); sortField.setMissingValue(Double.MAX_VALUE); @@ -1553,7 +1602,7 @@ public void testLegacyDoubleMissingLast() throws IOException { ir.close(); dir.close(); } - + /** Tests sorting on type legacy double in reverse */ public void testLegacyDoubleReverse() throws IOException { Directory dir = newDirectory(); @@ -1570,10 +1619,11 @@ public void testLegacyDoubleReverse() throws IOException { doc = new Document(); doc.add(new LegacyDoubleField("value", 4.2333333333332, Field.Store.YES)); writer.addDocument(doc); - IndexReader ir = UninvertingReader.wrap(writer.getReader(), - Collections.singletonMap("value", Type.LEGACY_DOUBLE)); + IndexReader ir = + UninvertingReader.wrap( + writer.getReader(), Collections.singletonMap("value", Type.LEGACY_DOUBLE)); writer.close(); - + IndexSearcher searcher = newSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type.DOUBLE, true)); @@ -1588,10 +1638,14 @@ public void testLegacyDoubleReverse() throws IOException { ir.close(); dir.close(); } - + public void testEmptyStringVsNullStringSort() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + IndexWriter w = + new IndexWriter( + dir, + newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); Document doc = new Document(); doc.add(newStringField("f", "", Field.Store.NO)); doc.add(newStringField("t", "1", Field.Store.NO)); @@ -1601,11 +1655,15 @@ public void testEmptyStringVsNullStringSort() throws Exception { doc.add(newStringField("t", "1", Field.Store.NO)); w.addDocument(doc); - IndexReader r = UninvertingReader.wrap(DirectoryReader.open(w), - Collections.singletonMap("f", Type.SORTED)); + IndexReader r = + UninvertingReader.wrap(DirectoryReader.open(w), Collections.singletonMap("f", Type.SORTED)); w.close(); IndexSearcher s = newSearcher(r); - TopDocs hits = s.search(new TermQuery(new Term("t", "1")), 10, new Sort(new SortField("f", SortField.Type.STRING))); + TopDocs hits = + s.search( + new TermQuery(new Term("t", "1")), + 10, + new Sort(new SortField("f", SortField.Type.STRING))); assertEquals(2, hits.totalHits.value); // null sorts first assertEquals(1, hits.scoreDocs[0].doc); @@ -1614,43 +1672,48 @@ public void testEmptyStringVsNullStringSort() throws Exception { r.close(); dir.close(); } - - /** test that we throw exception on multi-valued field, creates corrupt reader, use SORTED_SET instead */ + + /** + * test that we throw exception on multi-valued field, creates corrupt reader, use SORTED_SET + * instead + */ public void testMultiValuedField() throws IOException { Directory indexStore = newDirectory(); - IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(new MockAnalyzer(random()))); - for(int i=0; i<5; i++) { - Document doc = new Document(); - doc.add(new StringField("string", "a"+i, Field.Store.NO)); - doc.add(new StringField("string", "b"+i, Field.Store.NO)); - writer.addDocument(doc); + IndexWriter writer = + new IndexWriter(indexStore, newIndexWriterConfig(new MockAnalyzer(random()))); + for (int i = 0; i < 5; i++) { + Document doc = new Document(); + doc.add(new StringField("string", "a" + i, Field.Store.NO)); + doc.add(new StringField("string", "b" + i, Field.Store.NO)); + writer.addDocument(doc); } writer.forceMerge(1); // enforce one segment to have a higher unique term count in all cases writer.close(); - Sort sort = new Sort( - new SortField("string", SortField.Type.STRING), - SortField.FIELD_DOC); - IndexReader reader = UninvertingReader.wrap(DirectoryReader.open(indexStore), - Collections.singletonMap("string", Type.SORTED)); + Sort sort = new Sort(new SortField("string", SortField.Type.STRING), SortField.FIELD_DOC); + IndexReader reader = + UninvertingReader.wrap( + DirectoryReader.open(indexStore), Collections.singletonMap("string", Type.SORTED)); IndexSearcher searcher = new IndexSearcher(reader); - expectThrows(IllegalStateException.class, () -> { - searcher.search(new MatchAllDocsQuery(), 500, sort); - }); + expectThrows( + IllegalStateException.class, + () -> { + searcher.search(new MatchAllDocsQuery(), 500, sort); + }); reader.close(); indexStore.close(); } - + public void testMaxScore() throws Exception { Directory d = newDirectory(); // Not RIW because we need exactly 2 segs: IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random()))); int id = 0; - for(int seg=0;seg<2;seg++) { - for(int docIDX=0;docIDX<10;docIDX++) { + for (int seg = 0; seg < 2; seg++) { + for (int docIDX = 0; docIDX < 10; docIDX++) { Document doc = new Document(); doc.add(new LegacyIntField("id", docIDX, Field.Store.YES)); StringBuilder sb = new StringBuilder(); - for(int i=0;i mappings = new HashMap<>(); + Map mappings = new HashMap<>(); mappings.put("tievalue", Type.SORTED); mappings.put("value", Type.SORTED); - + IndexReader ir = UninvertingReader.wrap(writer.getReader(), mappings); writer.close(); - + IndexSearcher searcher = newSearcher(ir); // tievalue, then value - Sort sort = new Sort(new SortField("tievalue", SortField.Type.STRING), - new SortField("value", SortField.Type.STRING)); + Sort sort = + new Sort( + new SortField("tievalue", SortField.Type.STRING), + new SortField("value", SortField.Type.STRING)); TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort); assertEquals(2, td.totalHits.value); @@ -1801,7 +1871,8 @@ public void testScore() throws IOException { bq.add(new MatchAllDocsQuery(), Occur.SHOULD); TopDocs td = searcher.search(bq.build(), 10, sort); assertEquals(2, td.totalHits.value); - if (Float.isNaN(td.scoreDocs[0].score) == false && Float.isNaN(td.scoreDocs[1].score) == false) { + if (Float.isNaN(td.scoreDocs[0].score) == false + && Float.isNaN(td.scoreDocs[1].score) == false) { assertEquals(1, td.scoreDocs[0].doc); assertEquals(0, td.scoreDocs[1].doc); } diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java index 701cd5778fe..6a7f7181984 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheSortRandom.java @@ -27,7 +27,6 @@ import java.util.Objects; import java.util.Random; import java.util.Set; - import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -81,7 +80,8 @@ private void testRandomStringSort(SortField.Type type) throws Exception { final Set seen = new HashSet<>(); final int maxLength = TestUtil.nextInt(random, 5, 100); if (VERBOSE) { - System.out.println("TEST: NUM_DOCS=" + NUM_DOCS + " maxLength=" + maxLength + " allowDups=" + allowDups); + System.out.println( + "TEST: NUM_DOCS=" + NUM_DOCS + " maxLength=" + maxLength + " allowDups=" + allowDups); } int numDocs = 0; @@ -138,7 +138,7 @@ private void testRandomStringSort(SortField.Type type) throws Exception { } } - Map mapping = new HashMap<>(); + Map mapping = new HashMap<>(); mapping.put("stringdv", Type.SORTED); mapping.put("id", Type.INTEGER_POINT); final IndexReader r = UninvertingReader.wrap(writer.getReader(), mapping); @@ -146,10 +146,10 @@ private void testRandomStringSort(SortField.Type type) throws Exception { if (VERBOSE) { System.out.println(" reader=" + r); } - + final IndexSearcher s = newSearcher(r, false); final int ITERS = atLeast(100); - for(int iter=0;iter" : br.utf8ToString())); - if (idx == hitCount-1) { + if (idx == hitCount - 1) { break; } } } - + if (VERBOSE) { System.out.println(" actual:"); - for(int hitIDX=0;hitIDX" : br.utf8ToString()) + " id=" + s.doc(fd.doc).get("id")); + System.out.println( + " " + + hitIDX + + ": " + + (br == null ? "" : br.utf8ToString()) + + " id=" + + s.doc(fd.doc).get("id")); } } - for(int hitIDX=0;hitIDX docValues; - public final List matchValues = Collections.synchronizedList(new ArrayList()); + public final List matchValues = + Collections.synchronizedList(new ArrayList()); // density should be 0.0 ... 1.0 public RandomQuery(long seed, float density, List docValues) { @@ -276,7 +296,8 @@ public RandomQuery(long seed, float density, List docValues) { } @Override - public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) + throws IOException { return new ConstantScoreWeight(this, boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { @@ -285,16 +306,17 @@ public Scorer scorer(LeafReaderContext context) throws IOException { final NumericDocValues idSource = DocValues.getNumeric(context.reader(), "id"); assertNotNull(idSource); final FixedBitSet bits = new FixedBitSet(maxDoc); - for(int docID=0;docID unordered = new ArrayList<>(values); Collections.shuffle(unordered, random()); @@ -277,14 +276,14 @@ private void doTestSortedSetVsUninvertedField(int minLength, int maxLength) thro writer.commit(); } } - + // delete some docs - int numDeletions = random().nextInt(numDocs/10); + int numDeletions = random().nextInt(numDocs / 10); for (int i = 0; i < numDeletions; i++) { int id = random().nextInt(numDocs); writer.deleteDocuments(new Term("id", Integer.toString(id))); } - + // compare per-segment DirectoryReader ir = writer.getReader(); for (LeafReaderContext context : ir.leaves()) { @@ -294,9 +293,9 @@ private void doTestSortedSetVsUninvertedField(int minLength, int maxLength) thro assertEquals(r.maxDoc(), expected, actual); } ir.close(); - + writer.forceMerge(1); - + // now compare again after the merge ir = writer.getReader(); LeafReader ar = getOnlyLeafReader(ir); @@ -304,11 +303,11 @@ private void doTestSortedSetVsUninvertedField(int minLength, int maxLength) thro SortedSetDocValues actual = ar.getSortedSetDocValues("dv"); assertEquals(ir.maxDoc(), expected, actual); ir.close(); - + writer.close(); dir.close(); } - + private void doTestMissingVsFieldCache(LongProducer longs) throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); @@ -317,7 +316,6 @@ private void doTestMissingVsFieldCache(LongProducer longs) throws Exception { Field indexedField = newStringField("indexed", "", Field.Store.NO); Field dvField = new NumericDocValuesField("dv", 0); - // index some docs int numDocs = atLeast(300); // numDocs should be always > 256 so that in case of a codec that optimizes @@ -340,9 +338,9 @@ private void doTestMissingVsFieldCache(LongProducer longs) throws Exception { writer.commit(); } } - + // delete some docs - int numDeletions = random().nextInt(numDocs/10); + int numDeletions = random().nextInt(numDocs / 10); for (int i = 0; i < numDeletions; i++) { int id = random().nextInt(numDocs); writer.deleteDocuments(new Term("id", Integer.toString(id))); @@ -353,7 +351,7 @@ private void doTestMissingVsFieldCache(LongProducer longs) throws Exception { writer.forceMerge(numDocs / 256); writer.close(); - + // compare DirectoryReader ir = DirectoryReader.open(dir); for (LeafReaderContext context : ir.leaves()) { @@ -365,17 +363,19 @@ private void doTestMissingVsFieldCache(LongProducer longs) throws Exception { ir.close(); dir.close(); } - - private void doTestMissingVsFieldCache(final long minValue, final long maxValue) throws Exception { - doTestMissingVsFieldCache(new LongProducer() { - @Override - long next() { - return TestUtil.nextLong(random(), minValue, maxValue); - } - }); + + private void doTestMissingVsFieldCache(final long minValue, final long maxValue) + throws Exception { + doTestMissingVsFieldCache( + new LongProducer() { + @Override + long next() { + return TestUtil.nextLong(random(), minValue, maxValue); + } + }); } - - static abstract class LongProducer { + + abstract static class LongProducer { abstract long next(); } @@ -385,8 +385,9 @@ private void assertEquals(Bits expected, Bits actual) throws Exception { assertEquals(expected.get(i), actual.get(i)); } } - - private void assertEquals(int maxDoc, SortedDocValues expected, SortedDocValues actual) throws Exception { + + private void assertEquals(int maxDoc, SortedDocValues expected, SortedDocValues actual) + throws Exception { // can be null for the segment if no docs actually had any SortedDocValues // in this case FC.getDocTermsOrds returns EMPTY if (actual == null) { @@ -406,19 +407,20 @@ private void assertEquals(int maxDoc, SortedDocValues expected, SortedDocValues assertEquals(expected.ordValue(), actual.ordValue()); assertEquals(expected.lookupOrd(expected.ordValue()), actual.lookupOrd(actual.ordValue())); } - + // compare ord dictionary for (long i = 0; i < expected.getValueCount(); i++) { final BytesRef expectedBytes = BytesRef.deepCopyOf(expected.lookupOrd((int) i)); final BytesRef actualBytes = actual.lookupOrd((int) i); assertEquals(expectedBytes, actualBytes); } - + // compare termsenum assertEquals(expected.getValueCount(), expected.termsEnum(), actual.termsEnum()); } - - private void assertEquals(int maxDoc, SortedSetDocValues expected, SortedSetDocValues actual) throws Exception { + + private void assertEquals(int maxDoc, SortedSetDocValues expected, SortedSetDocValues actual) + throws Exception { // can be null for the segment if no docs actually had any SortedDocValues // in this case FC.getDocTermsOrds returns EMPTY if (actual == null) { @@ -438,21 +440,21 @@ private void assertEquals(int maxDoc, SortedSetDocValues expected, SortedSetDocV } assertEquals(NO_MORE_ORDS, actual.nextOrd()); } - + // compare ord dictionary for (long i = 0; i < expected.getValueCount(); i++) { final BytesRef expectedBytes = BytesRef.deepCopyOf(expected.lookupOrd(i)); final BytesRef actualBytes = actual.lookupOrd(i); assertEquals(expectedBytes, actualBytes); } - + // compare termsenum assertEquals(expected.getValueCount(), expected.termsEnum(), actual.termsEnum()); } - + private void assertEquals(long numOrds, TermsEnum expected, TermsEnum actual) throws Exception { BytesRef ref; - + // sequential next() through all terms while ((ref = expected.next()) != null) { assertEquals(ref, actual.next()); @@ -460,7 +462,7 @@ private void assertEquals(long numOrds, TermsEnum expected, TermsEnum actual) th assertEquals(expected.term(), actual.term()); } assertNull(actual.next()); - + // sequential seekExact(ord) through all terms for (long i = 0; i < numOrds; i++) { expected.seekExact(i); @@ -468,7 +470,7 @@ private void assertEquals(long numOrds, TermsEnum expected, TermsEnum actual) th assertEquals(expected.ord(), actual.ord()); assertEquals(expected.term(), actual.term()); } - + // sequential seekExact(BytesRef) through all terms for (long i = 0; i < numOrds; i++) { expected.seekExact(i); @@ -476,7 +478,7 @@ private void assertEquals(long numOrds, TermsEnum expected, TermsEnum actual) th assertEquals(expected.ord(), actual.ord()); assertEquals(expected.term(), actual.term()); } - + // sequential seekCeil(BytesRef) through all terms for (long i = 0; i < numOrds; i++) { expected.seekExact(i); @@ -484,7 +486,7 @@ private void assertEquals(long numOrds, TermsEnum expected, TermsEnum actual) th assertEquals(expected.ord(), actual.ord()); assertEquals(expected.term(), actual.term()); } - + // random seekExact(ord) for (long i = 0; i < numOrds; i++) { long randomOrd = TestUtil.nextLong(random(), 0, numOrds - 1); @@ -493,7 +495,7 @@ private void assertEquals(long numOrds, TermsEnum expected, TermsEnum actual) th assertEquals(expected.ord(), actual.ord()); assertEquals(expected.term(), actual.term()); } - + // random seekExact(BytesRef) for (long i = 0; i < numOrds; i++) { long randomOrd = TestUtil.nextLong(random(), 0, numOrds - 1); @@ -502,7 +504,7 @@ private void assertEquals(long numOrds, TermsEnum expected, TermsEnum actual) th assertEquals(expected.ord(), actual.ord()); assertEquals(expected.term(), actual.term()); } - + // random seekCeil(BytesRef) for (long i = 0; i < numOrds; i++) { BytesRef target = new BytesRef(TestUtil.randomUnicodeString(random())); diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheWithThreads.java b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheWithThreads.java index 523114567d1..ed25bbe6c1c 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheWithThreads.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestFieldCacheWithThreads.java @@ -24,7 +24,6 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; @@ -48,13 +47,16 @@ public class TestFieldCacheWithThreads extends SolrTestCase { public void test() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + IndexWriter w = + new IndexWriter( + dir, + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); final List numbers = new ArrayList<>(); final List binary = new ArrayList<>(); final List sorted = new ArrayList<>(); final int numDocs = atLeast(100); - for(int i=0;i threads = new ArrayList<>(); final CountDownLatch startingGun = new CountDownLatch(1); - for(int t=0;t= NUM_ITER) { - break; - } - } else if (op == 1) { - Bits docsWithField = cache.getDocsWithField(reader, "sparse", null); - for (int i = 0; i < docsWithField.length(); i++) { - assertEquals(i%2 == 0, docsWithField.get(i)); - } - } else { - NumericDocValues ints = cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER); - for (int i = 0; i < reader.maxDoc(); i++) { - if (i%2 == 0) { - assertEquals(i, ints.nextDoc()); - assertEquals(i, ints.longValue()); + final CyclicBarrier restart = + new CyclicBarrier( + NUM_THREADS, + new Runnable() { + @Override + public void run() { + cache.purgeAllCaches(); + iters.incrementAndGet(); + } + }); + for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++) { + threads[threadIDX] = + new Thread() { + @Override + public void run() { + + try { + while (!failed.get()) { + final int op = random().nextInt(3); + if (op == 0) { + // Purge all caches & resume, once all + // threads get here: + restart.await(); + if (iters.get() >= NUM_ITER) { + break; + } + } else if (op == 1) { + Bits docsWithField = cache.getDocsWithField(reader, "sparse", null); + for (int i = 0; i < docsWithField.length(); i++) { + assertEquals(i % 2 == 0, docsWithField.get(i)); + } + } else { + NumericDocValues ints = + cache.getNumerics(reader, "sparse", FieldCache.LEGACY_INT_PARSER); + for (int i = 0; i < reader.maxDoc(); i++) { + if (i % 2 == 0) { + assertEquals(i, ints.nextDoc()); + assertEquals(i, ints.longValue()); + } } } } + } catch (Throwable t) { + failed.set(true); + restart.reset(); + throw new RuntimeException(t); } - } catch (Throwable t) { - failed.set(true); - restart.reset(); - throw new RuntimeException(t); } - } - }; + }; threads[threadIDX].start(); } - for(int threadIDX=0;threadIDX { - FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.LEGACY_INT_PARSER); - }); - + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getNumerics(ar, "binary", FieldCache.LEGACY_INT_PARSER); + }); + // Sorted type: can be retrieved via getTerms(), getTermsIndex(), getDocTermOrds() - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.LEGACY_INT_PARSER); - }); - + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getNumerics(ar, "sorted", FieldCache.LEGACY_INT_PARSER); + }); + // Numeric type: can be retrieved via getInts() and so on - NumericDocValues numeric = FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.LEGACY_INT_PARSER); + NumericDocValues numeric = + FieldCache.DEFAULT.getNumerics(ar, "numeric", FieldCache.LEGACY_INT_PARSER); assertEquals(0, numeric.nextDoc()); assertEquals(42, numeric.longValue()); - - // SortedSet type: can be retrieved via getDocTermOrds() - expectThrows(IllegalStateException.class, () -> { - FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.LEGACY_INT_PARSER); - }); - + + // SortedSet type: can be retrieved via getDocTermOrds() + expectThrows( + IllegalStateException.class, + () -> { + FieldCache.DEFAULT.getNumerics(ar, "sortedset", FieldCache.LEGACY_INT_PARSER); + }); + ir.close(); dir.close(); } - + public void testNonexistantFields() throws Exception { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); @@ -306,31 +337,32 @@ public void testNonexistantFields() throws Exception { iw.addDocument(doc); DirectoryReader ir = iw.getReader(); iw.close(); - + LeafReader ar = getOnlyLeafReader(ir); - + final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); assertEquals(0, cache.getCacheEntries().length); - + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER); assertEquals(NO_MORE_DOCS, ints.nextDoc()); - + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER); assertEquals(NO_MORE_DOCS, longs.nextDoc()); - + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER); assertEquals(NO_MORE_DOCS, floats.nextDoc()); - - NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER); + + NumericDocValues doubles = + cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER); assertEquals(NO_MORE_DOCS, doubles.nextDoc()); - + // check that we cached nothing assertEquals(0, cache.getCacheEntries().length); ir.close(); dir.close(); } - + public void testNonIndexedFields() throws Exception { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); @@ -345,25 +377,26 @@ public void testNonIndexedFields() throws Exception { iw.addDocument(doc); DirectoryReader ir = iw.getReader(); iw.close(); - + LeafReader ar = getOnlyLeafReader(ir); - + final FieldCache cache = FieldCache.DEFAULT; cache.purgeAllCaches(); assertEquals(0, cache.getCacheEntries().length); - + NumericDocValues ints = cache.getNumerics(ar, "bogusints", FieldCache.LEGACY_INT_PARSER); assertEquals(NO_MORE_DOCS, ints.nextDoc()); - + NumericDocValues longs = cache.getNumerics(ar, "boguslongs", FieldCache.LEGACY_LONG_PARSER); assertEquals(NO_MORE_DOCS, longs.nextDoc()); - + NumericDocValues floats = cache.getNumerics(ar, "bogusfloats", FieldCache.LEGACY_FLOAT_PARSER); assertEquals(NO_MORE_DOCS, floats.nextDoc()); - - NumericDocValues doubles = cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER); + + NumericDocValues doubles = + cache.getNumerics(ar, "bogusdoubles", FieldCache.LEGACY_DOUBLE_PARSER); assertEquals(NO_MORE_DOCS, doubles.nextDoc()); - + // check that we cached nothing assertEquals(0, cache.getCacheEntries().length); ir.close(); @@ -409,7 +442,9 @@ public void testLongFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_LONG_PARSER); + final NumericDocValues longs = + FieldCache.DEFAULT.getNumerics( + getOnlyLeafReader(reader), "f", FieldCache.LEGACY_LONG_PARSER); for (int i = 0; i < values.length; ++i) { if (missing.contains(i) == false) { assertEquals(i, longs.nextDoc()); @@ -461,7 +496,9 @@ public void testIntFieldCache() throws IOException { } iw.forceMerge(1); final DirectoryReader reader = iw.getReader(); - final NumericDocValues ints = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f", FieldCache.LEGACY_INT_PARSER); + final NumericDocValues ints = + FieldCache.DEFAULT.getNumerics( + getOnlyLeafReader(reader), "f", FieldCache.LEGACY_INT_PARSER); for (int i = 0; i < values.length; ++i) { if (missing.contains(i) == false) { assertEquals(i, ints.nextDoc()); @@ -473,5 +510,4 @@ public void testIntFieldCache() throws IOException { iw.close(); dir.close(); } - } diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms32.java b/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms32.java index 70451c078e6..452b0d2e558 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms32.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms32.java @@ -18,14 +18,10 @@ import java.util.HashMap; import java.util.Map; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.solr.legacy.LegacyFieldType; -import org.apache.solr.legacy.LegacyIntField; -import org.apache.solr.legacy.LegacyNumericRangeQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; @@ -33,8 +29,11 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; -import org.apache.solr.SolrTestCase; import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; +import org.apache.solr.legacy.LegacyFieldType; +import org.apache.solr.legacy.LegacyIntField; +import org.apache.solr.legacy.LegacyNumericRangeQuery; import org.apache.solr.uninverting.UninvertingReader.Type; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -44,24 +43,27 @@ public class TestNumericTerms32 extends SolrTestCase { // distance of entries private static int distance; // shift the starting of the values to the left, to also have negative values: - private static final int startOffset = - 1 << 15; + private static final int startOffset = -1 << 15; // number of docs to generate for testing private static int noDocs; - + private static Directory directory = null; private static IndexReader reader = null; private static IndexSearcher searcher = null; - + @BeforeClass public static void beforeClass() throws Exception { noDocs = atLeast(4096); distance = (1 << 30) / noDocs; directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(new MockAnalyzer(random())) - .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) - .setMergePolicy(newLogMergePolicy())); - + RandomIndexWriter writer = + new RandomIndexWriter( + random(), + directory, + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) + .setMergePolicy(newLogMergePolicy())); + final LegacyFieldType storedInt = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED); storedInt.setStored(true); storedInt.freeze(); @@ -75,35 +77,36 @@ public static void beforeClass() throws Exception { final LegacyFieldType storedInt2 = new LegacyFieldType(storedInt); storedInt2.setNumericPrecisionStep(2); - LegacyIntField - field8 = new LegacyIntField("field8", 0, storedInt8), - field4 = new LegacyIntField("field4", 0, storedInt4), - field2 = new LegacyIntField("field2", 0, storedInt2); - + LegacyIntField field8 = new LegacyIntField("field8", 0, storedInt8), + field4 = new LegacyIntField("field4", 0, storedInt4), + field2 = new LegacyIntField("field2", 0, storedInt2); + Document doc = new Document(); // add fields, that have a distance to test general functionality - doc.add(field8); doc.add(field4); doc.add(field2); - + doc.add(field8); + doc.add(field4); + doc.add(field2); + // Add a series of noDocs docs with increasing int values - for (int l=0; l map = new HashMap<>(); + + Map map = new HashMap<>(); map.put("field2", Type.LEGACY_INTEGER); map.put("field4", Type.LEGACY_INTEGER); map.put("field8", Type.LEGACY_INTEGER); reader = UninvertingReader.wrap(writer.getReader(), map); - searcher=newSearcher(reader); + searcher = newSearcher(reader); writer.close(); } - + @AfterClass public static void afterClass() throws Exception { searcher = null; @@ -117,28 +120,32 @@ public static void afterClass() throws Exception { directory = null; } } - + private void testSorting(int precisionStep) throws Exception { - String field="field"+precisionStep; + String field = "field" + precisionStep; // 10 random tests, the index order is ascending, // so using a reverse sort field should retun descending documents int num = TestUtil.nextInt(random(), 10, 20); for (int i = 0; i < num; i++) { - int lower=(int)(random().nextDouble()*noDocs*distance)+startOffset; - int upper=(int)(random().nextDouble()*noDocs*distance)+startOffset; - if (lower>upper) { - int a=lower; lower=upper; upper=a; + int lower = (int) (random().nextDouble() * noDocs * distance) + startOffset; + int upper = (int) (random().nextDouble() * noDocs * distance) + startOffset; + if (lower > upper) { + int a = lower; + lower = upper; + upper = a; } - Query tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true); - TopDocs topDocs = searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.INT, true))); - if (topDocs.totalHits.value==0) continue; + Query tq = + LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true); + TopDocs topDocs = + searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.INT, true))); + if (topDocs.totalHits.value == 0) continue; ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); int last = searcher.doc(sd[0].doc).getField(field).numericValue().intValue(); - for (int j=1; jact ); - last=act; + assertTrue("Docs should be sorted backwards", last > act); + last = act; } } } @@ -147,14 +154,14 @@ private void testSorting(int precisionStep) throws Exception { public void testSorting_8bit() throws Exception { testSorting(8); } - + @Test public void testSorting_4bit() throws Exception { testSorting(4); } - + @Test public void testSorting_2bit() throws Exception { testSorting(2); - } + } } diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms64.java b/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms64.java index 228503ed850..bb00e2fa30f 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms64.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestNumericTerms64.java @@ -18,14 +18,10 @@ import java.util.HashMap; import java.util.Map; - import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.solr.legacy.LegacyFieldType; -import org.apache.solr.legacy.LegacyLongField; -import org.apache.solr.legacy.LegacyNumericRangeQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; @@ -33,8 +29,11 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; -import org.apache.solr.SolrTestCase; import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; +import org.apache.solr.legacy.LegacyFieldType; +import org.apache.solr.legacy.LegacyLongField; +import org.apache.solr.legacy.LegacyNumericRangeQuery; import org.apache.solr.uninverting.UninvertingReader.Type; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -44,23 +43,26 @@ public class TestNumericTerms64 extends SolrTestCase { // distance of entries private static long distance; // shift the starting of the values to the left, to also have negative values: - private static final long startOffset = - 1L << 31; + private static final long startOffset = -1L << 31; // number of docs to generate for testing private static int noDocs; - + private static Directory directory = null; private static IndexReader reader = null; private static IndexSearcher searcher = null; - + @BeforeClass public static void beforeClass() throws Exception { noDocs = atLeast(4096); distance = (1L << 60) / noDocs; directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(new MockAnalyzer(random())) - .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) - .setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = + new RandomIndexWriter( + random(), + directory, + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) + .setMergePolicy(newLogMergePolicy())); final LegacyFieldType storedLong = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED); storedLong.setStored(true); @@ -78,37 +80,39 @@ public static void beforeClass() throws Exception { final LegacyFieldType storedLong2 = new LegacyFieldType(storedLong); storedLong2.setNumericPrecisionStep(2); - LegacyLongField - field8 = new LegacyLongField("field8", 0L, storedLong8), - field6 = new LegacyLongField("field6", 0L, storedLong6), - field4 = new LegacyLongField("field4", 0L, storedLong4), - field2 = new LegacyLongField("field2", 0L, storedLong2); + LegacyLongField field8 = new LegacyLongField("field8", 0L, storedLong8), + field6 = new LegacyLongField("field6", 0L, storedLong6), + field4 = new LegacyLongField("field4", 0L, storedLong4), + field2 = new LegacyLongField("field2", 0L, storedLong2); Document doc = new Document(); // add fields, that have a distance to test general functionality - doc.add(field8); doc.add(field6); doc.add(field4); doc.add(field2); - + doc.add(field8); + doc.add(field6); + doc.add(field4); + doc.add(field2); + // Add a series of noDocs docs with increasing long values, by updating the fields - for (int l=0; l map = new HashMap<>(); + Map map = new HashMap<>(); map.put("field2", Type.LEGACY_LONG); map.put("field4", Type.LEGACY_LONG); map.put("field6", Type.LEGACY_LONG); map.put("field8", Type.LEGACY_LONG); reader = UninvertingReader.wrap(writer.getReader(), map); - searcher=newSearcher(reader); + searcher = newSearcher(reader); writer.close(); } - + @AfterClass public static void afterClass() throws Exception { searcher = null; @@ -122,28 +126,32 @@ public static void afterClass() throws Exception { directory = null; } } - + private void testSorting(int precisionStep) throws Exception { - String field="field"+precisionStep; + String field = "field" + precisionStep; // 10 random tests, the index order is ascending, // so using a reverse sort field should retun descending documents int num = TestUtil.nextInt(random(), 10, 20); for (int i = 0; i < num; i++) { - long lower=(long)(random().nextDouble()*noDocs*distance)+startOffset; - long upper=(long)(random().nextDouble()*noDocs*distance)+startOffset; - if (lower>upper) { - long a=lower; lower=upper; upper=a; + long lower = (long) (random().nextDouble() * noDocs * distance) + startOffset; + long upper = (long) (random().nextDouble() * noDocs * distance) + startOffset; + if (lower > upper) { + long a = lower; + lower = upper; + upper = a; } - Query tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true); - TopDocs topDocs = searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.LONG, true))); - if (topDocs.totalHits.value==0) continue; + Query tq = + LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true); + TopDocs topDocs = + searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.LONG, true))); + if (topDocs.totalHits.value == 0) continue; ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); - long last=searcher.doc(sd[0].doc).getField(field).numericValue().longValue(); - for (int j=1; jact ); - last=act; + long last = searcher.doc(sd[0].doc).getField(field).numericValue().longValue(); + for (int j = 1; j < sd.length; j++) { + long act = searcher.doc(sd[j].doc).getField(field).numericValue().longValue(); + assertTrue("Docs should be sorted backwards", last > act); + last = act; } } } @@ -152,17 +160,17 @@ private void testSorting(int precisionStep) throws Exception { public void testSorting_8bit() throws Exception { testSorting(8); } - + @Test public void testSorting_6bit() throws Exception { testSorting(6); } - + @Test public void testSorting_4bit() throws Exception { testSorting(4); } - + @Test public void testSorting_2bit() throws Exception { testSorting(2); diff --git a/solr/core/src/test/org/apache/solr/uninverting/TestUninvertingReader.java b/solr/core/src/test/org/apache/solr/uninverting/TestUninvertingReader.java index 7cce8f21cac..dcdf36cf86f 100644 --- a/solr/core/src/test/org/apache/solr/uninverting/TestUninvertingReader.java +++ b/solr/core/src/test/org/apache/solr/uninverting/TestUninvertingReader.java @@ -24,10 +24,9 @@ import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; - import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.Field; +import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StoredField; @@ -41,177 +40,181 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.solr.legacy.LegacyFieldType; -import org.apache.solr.legacy.LegacyIntField; -import org.apache.solr.legacy.LegacyLongField; -import org.apache.solr.legacy.LegacyNumericUtils; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.solr.SolrTestCase; import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; import org.apache.solr.index.SlowCompositeReaderWrapper; +import org.apache.solr.legacy.LegacyFieldType; +import org.apache.solr.legacy.LegacyIntField; +import org.apache.solr.legacy.LegacyLongField; +import org.apache.solr.legacy.LegacyNumericUtils; import org.apache.solr.uninverting.UninvertingReader.Type; public class TestUninvertingReader extends SolrTestCase { - + public void testSortedSetInteger() throws IOException { Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); - + Document doc = new Document(); doc.add(new LegacyIntField("foo", 5, Field.Store.NO)); iw.addDocument(doc); - + doc = new Document(); doc.add(new LegacyIntField("foo", 5, Field.Store.NO)); doc.add(new LegacyIntField("foo", -3, Field.Store.NO)); iw.addDocument(doc); - + iw.forceMerge(1); iw.close(); - - DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), - Collections.singletonMap("foo", Type.SORTED_SET_INTEGER)); + + DirectoryReader ir = + UninvertingReader.wrap( + DirectoryReader.open(dir), Collections.singletonMap("foo", Type.SORTED_SET_INTEGER)); LeafReader ar = ir.leaves().get(0).reader(); SortedSetDocValues v = ar.getSortedSetDocValues("foo"); assertEquals(2, v.getValueCount()); - + assertEquals(0, v.nextDoc()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + assertEquals(1, v.nextDoc()); assertEquals(0, v.nextOrd()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + BytesRef value = v.lookupOrd(0); assertEquals(-3, LegacyNumericUtils.prefixCodedToInt(value)); - + value = v.lookupOrd(1); assertEquals(5, LegacyNumericUtils.prefixCodedToInt(value)); TestUtil.checkReader(ir); ir.close(); dir.close(); } - + public void testSortedSetFloat() throws IOException { Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); - + Document doc = new Document(); doc.add(new LegacyIntField("foo", Float.floatToRawIntBits(5f), Field.Store.NO)); iw.addDocument(doc); - + doc = new Document(); doc.add(new LegacyIntField("foo", Float.floatToRawIntBits(5f), Field.Store.NO)); doc.add(new LegacyIntField("foo", Float.floatToRawIntBits(-3f), Field.Store.NO)); iw.addDocument(doc); - + iw.forceMerge(1); iw.close(); - - DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), - Collections.singletonMap("foo", Type.SORTED_SET_FLOAT)); + + DirectoryReader ir = + UninvertingReader.wrap( + DirectoryReader.open(dir), Collections.singletonMap("foo", Type.SORTED_SET_FLOAT)); LeafReader ar = ir.leaves().get(0).reader(); - + SortedSetDocValues v = ar.getSortedSetDocValues("foo"); assertEquals(2, v.getValueCount()); - + assertEquals(0, v.nextDoc()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + assertEquals(1, v.nextDoc()); assertEquals(0, v.nextOrd()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + BytesRef value = v.lookupOrd(0); assertEquals(Float.floatToRawIntBits(-3f), LegacyNumericUtils.prefixCodedToInt(value)); - + value = v.lookupOrd(1); assertEquals(Float.floatToRawIntBits(5f), LegacyNumericUtils.prefixCodedToInt(value)); TestUtil.checkReader(ir); ir.close(); dir.close(); } - + public void testSortedSetLong() throws IOException { Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); - + Document doc = new Document(); doc.add(new LegacyLongField("foo", 5, Field.Store.NO)); iw.addDocument(doc); - + doc = new Document(); doc.add(new LegacyLongField("foo", 5, Field.Store.NO)); doc.add(new LegacyLongField("foo", -3, Field.Store.NO)); iw.addDocument(doc); - + iw.forceMerge(1); iw.close(); - - DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), - Collections.singletonMap("foo", Type.SORTED_SET_LONG)); + + DirectoryReader ir = + UninvertingReader.wrap( + DirectoryReader.open(dir), Collections.singletonMap("foo", Type.SORTED_SET_LONG)); LeafReader ar = ir.leaves().get(0).reader(); SortedSetDocValues v = ar.getSortedSetDocValues("foo"); assertEquals(2, v.getValueCount()); - + assertEquals(0, v.nextDoc()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + assertEquals(1, v.nextDoc()); assertEquals(0, v.nextOrd()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + BytesRef value = v.lookupOrd(0); assertEquals(-3, LegacyNumericUtils.prefixCodedToLong(value)); - + value = v.lookupOrd(1); assertEquals(5, LegacyNumericUtils.prefixCodedToLong(value)); TestUtil.checkReader(ir); ir.close(); dir.close(); } - + public void testSortedSetDouble() throws IOException { Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); - + Document doc = new Document(); doc.add(new LegacyLongField("foo", Double.doubleToRawLongBits(5d), Field.Store.NO)); iw.addDocument(doc); - + doc = new Document(); doc.add(new LegacyLongField("foo", Double.doubleToRawLongBits(5d), Field.Store.NO)); doc.add(new LegacyLongField("foo", Double.doubleToRawLongBits(-3d), Field.Store.NO)); iw.addDocument(doc); - + iw.forceMerge(1); iw.close(); - - DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), - Collections.singletonMap("foo", Type.SORTED_SET_DOUBLE)); + + DirectoryReader ir = + UninvertingReader.wrap( + DirectoryReader.open(dir), Collections.singletonMap("foo", Type.SORTED_SET_DOUBLE)); LeafReader ar = ir.leaves().get(0).reader(); SortedSetDocValues v = ar.getSortedSetDocValues("foo"); assertEquals(2, v.getValueCount()); - + assertEquals(0, v.nextDoc()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + assertEquals(1, v.nextDoc()); assertEquals(0, v.nextOrd()); assertEquals(1, v.nextOrd()); assertEquals(SortedSetDocValues.NO_MORE_ORDS, v.nextOrd()); - + BytesRef value = v.lookupOrd(0); assertEquals(Double.doubleToRawLongBits(-3d), LegacyNumericUtils.prefixCodedToLong(value)); - + value = v.lookupOrd(1); assertEquals(Double.doubleToRawLongBits(5d), LegacyNumericUtils.prefixCodedToLong(value)); TestUtil.checkReader(ir); @@ -219,16 +222,17 @@ public void testSortedSetDouble() throws IOException { dir.close(); } - - /** Tests {@link Type#SORTED_SET_INTEGER} using Integer based fields, with and w/o precision steps */ + /** + * Tests {@link Type#SORTED_SET_INTEGER} using Integer based fields, with and w/o precision steps + */ public void testSortedSetIntegerManyValues() throws IOException { final Directory dir = newDirectory(); final IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); - + final LegacyFieldType NO_TRIE_TYPE = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED); NO_TRIE_TYPE.setNumericPrecisionStep(Integer.MAX_VALUE); - final Map UNINVERT_MAP = new LinkedHashMap(); + final Map UNINVERT_MAP = new LinkedHashMap(); UNINVERT_MAP.put("notrie_single", Type.SORTED_SET_INTEGER); UNINVERT_MAP.put("notrie_multi", Type.SORTED_SET_INTEGER); UNINVERT_MAP.put("trie_single", Type.SORTED_SET_INTEGER); @@ -237,7 +241,6 @@ public void testSortedSetIntegerManyValues() throws IOException { MULTI_VALUES.add("trie_multi"); MULTI_VALUES.add("notrie_multi"); - final int NUM_DOCS = TestUtil.nextInt(random(), 200, 1500); final int MIN = TestUtil.nextInt(random(), 10, 100); final int MAX = MIN + TestUtil.nextInt(random(), 10, 100); @@ -272,26 +275,35 @@ public void testSortedSetIntegerManyValues() throws IOException { } iw.close(); - + final DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), UNINVERT_MAP); TestUtil.checkReader(ir); - + final int NUM_LEAVES = ir.leaves().size(); - + // check the leaves: no more then total set size for (LeafReaderContext rc : ir.leaves()) { final LeafReader ar = rc.reader(); for (String f : UNINVERT_MAP.keySet()) { final SortedSetDocValues v = DocValues.getSortedSet(ar, f); final long valSetSize = v.getValueCount(); - assertTrue(f + ": Expected no more then " + EXPECTED_VALSET_SIZE + " values per segment, got " + - valSetSize + " from: " + ar.toString(), - valSetSize <= EXPECTED_VALSET_SIZE); - + assertTrue( + f + + ": Expected no more then " + + EXPECTED_VALSET_SIZE + + " values per segment, got " + + valSetSize + + " from: " + + ar.toString(), + valSetSize <= EXPECTED_VALSET_SIZE); + if (1 == NUM_LEAVES && MULTI_VALUES.contains(f)) { - // tighter check on multi fields in single segment index since we know one doc has all of them - assertEquals(f + ": Single segment LeafReader's value set should have had exactly expected size", - EXPECTED_VALSET_SIZE, valSetSize); + // tighter check on multi fields in single segment index since we know one doc has all of + // them + assertEquals( + f + ": Single segment LeafReader's value set should have had exactly expected size", + EXPECTED_VALSET_SIZE, + valSetSize); } } } @@ -299,48 +311,50 @@ public void testSortedSetIntegerManyValues() throws IOException { // check the composite of all leaves: exact expectation of set size final LeafReader composite = SlowCompositeReaderWrapper.wrap(ir); TestUtil.checkReader(composite); - + for (String f : MULTI_VALUES) { final SortedSetDocValues v = composite.getSortedSetDocValues(f); final long valSetSize = v.getValueCount(); - assertEquals(f + ": Composite reader value set should have had exactly expected size", - EXPECTED_VALSET_SIZE, valSetSize); + assertEquals( + f + ": Composite reader value set should have had exactly expected size", + EXPECTED_VALSET_SIZE, + valSetSize); } - + ir.close(); dir.close(); } - + public void testSortedSetEmptyIndex() throws IOException { final Directory dir = newDirectory(); final IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); iw.close(); - - final Map UNINVERT_MAP = new LinkedHashMap(); + + final Map UNINVERT_MAP = new LinkedHashMap(); for (Type t : EnumSet.allOf(Type.class)) { UNINVERT_MAP.put(t.name(), t); } final DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), UNINVERT_MAP); TestUtil.checkReader(ir); - + final LeafReader composite = SlowCompositeReaderWrapper.wrap(ir); TestUtil.checkReader(composite); - - for (String f : UNINVERT_MAP.keySet()) { + + for (String f : UNINVERT_MAP.keySet()) { // check the leaves // (normally there are none for an empty index, so this is really just future // proofing in case that changes for some reason) for (LeafReaderContext rc : ir.leaves()) { final LeafReader ar = rc.reader(); - assertNull(f + ": Expected no doc values from empty index (leaf)", - ar.getSortedSetDocValues(f)); + assertNull( + f + ": Expected no doc values from empty index (leaf)", ar.getSortedSetDocValues(f)); } - + // check the composite - assertNull(f + ": Expected no doc values from empty index (composite)", - composite.getSortedSetDocValues(f)); - + assertNull( + f + ": Expected no doc values from empty index (composite)", + composite.getSortedSetDocValues(f)); } ir.close(); @@ -371,11 +385,13 @@ public void testFieldInfos() throws IOException { DirectoryReader ir = UninvertingReader.wrap(DirectoryReader.open(dir), uninvertingMap); LeafReader leafReader = ir.leaves().get(0).reader(); FieldInfos fieldInfos = leafReader.getFieldInfos(); - LeafReader originalLeafReader = ((UninvertingReader)leafReader).getDelegate(); + LeafReader originalLeafReader = ((UninvertingReader) leafReader).getDelegate(); assertNotSame(originalLeafReader.getFieldInfos(), fieldInfos); - assertSame("do not rebuild FieldInfo for unaffected fields", - originalLeafReader.getFieldInfos().fieldInfo("id"), fieldInfos.fieldInfo("id")); + assertSame( + "do not rebuild FieldInfo for unaffected fields", + originalLeafReader.getFieldInfos().fieldInfo("id"), + fieldInfos.fieldInfo("id")); FieldInfo intFInfo = fieldInfos.fieldInfo("int"); assertEquals(DocValuesType.NUMERIC, intFInfo.getDocValuesType()); @@ -399,5 +415,4 @@ public void testFieldInfos() throws IOException { ir.close(); dir.close(); } - } diff --git a/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java b/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java index e40cb6f181f..7901cb1484a 100644 --- a/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java +++ b/solr/core/src/test/org/apache/solr/update/AddBlockUpdateTest.java @@ -16,16 +16,6 @@ */ package org.apache.solr.update; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.stream.XMLInputFactory; -import javax.xml.stream.XMLStreamException; -import javax.xml.stream.XMLStreamReader; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -45,7 +35,16 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; - +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; import org.apache.commons.io.output.ByteArrayOutputStream; import org.apache.lucene.index.Term; import org.apache.lucene.search.TermQuery; @@ -62,11 +61,11 @@ import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.JavaBinCodec; +import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.handler.loader.XMLLoader; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.SolrIndexSearcher; -import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.util.RefCounted; import org.junit.After; import org.junit.AfterClass; @@ -90,7 +89,7 @@ public class AddBlockUpdateTest extends SolrTestCaseJ4 { private static final String parent = "parent_s"; private static final String type = "type_s"; - private final static AtomicInteger counter = new AtomicInteger(); + private static final AtomicInteger counter = new AtomicInteger(); private static ExecutorService exe; private static boolean cachedMode; @@ -99,23 +98,25 @@ public class AddBlockUpdateTest extends SolrTestCaseJ4 { private RefCounted searcherRef; private SolrIndexSearcher _searcher; - @Rule - public ExpectedException thrown = ExpectedException.none(); + @Rule public ExpectedException thrown = ExpectedException.none(); @BeforeClass public static void beforeClass() throws Exception { - String oldCacheNamePropValue = System - .getProperty("blockJoinParentFilterCache"); - System.setProperty("blockJoinParentFilterCache", (cachedMode = random() - .nextBoolean()) ? "blockJoinParentFilterCache" : "don't cache"); + String oldCacheNamePropValue = System.getProperty("blockJoinParentFilterCache"); + System.setProperty( + "blockJoinParentFilterCache", + (cachedMode = random().nextBoolean()) ? "blockJoinParentFilterCache" : "don't cache"); if (oldCacheNamePropValue != null) { System.setProperty("blockJoinParentFilterCache", oldCacheNamePropValue); } inputFactory = XMLInputFactory.newInstance(); exe = // Executors.newSingleThreadExecutor(); - rarely() ? ExecutorUtil.newMDCAwareFixedThreadPool(atLeast(2), new SolrNamedThreadFactory("AddBlockUpdateTest")) : ExecutorUtil - .newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("AddBlockUpdateTest")); + rarely() + ? ExecutorUtil.newMDCAwareFixedThreadPool( + atLeast(2), new SolrNamedThreadFactory("AddBlockUpdateTest")) + : ExecutorUtil.newMDCAwareCachedThreadPool( + new SolrNamedThreadFactory("AddBlockUpdateTest")); counter.set(0); initCore("solrconfig.xml", "schema15.xml"); @@ -161,62 +162,78 @@ public static void afterClass() throws Exception { } @Test - public void testOverwrite() throws IOException{ - assertU(add( - nest(doc("id","X", parent, "X"), - doc(child,"a", "id", "66"), - doc(child,"b", "id", "66")))); - assertU(add( - nest(doc("id","Y", parent, "Y"), - doc(child,"a", "id", "66"), - doc(child,"b", "id", "66")))); - String overwritten = random().nextBoolean() ? "X": "Y"; - String dubbed = overwritten.equals("X") ? "Y":"X"; - - assertU(add( - nest(doc("id",overwritten, parent, overwritten), - doc(child,"c","id", "66"), - doc(child,"d","id", "66")), "overwrite", "true")); - assertU(add( - nest(doc("id",dubbed, parent, dubbed), - doc(child,"c","id", "66"), - doc(child,"d","id", "66")), "overwrite", "false")); + public void testOverwrite() throws IOException { + assertU( + add( + nest( + doc("id", "X", parent, "X"), + doc(child, "a", "id", "66"), + doc(child, "b", "id", "66")))); + assertU( + add( + nest( + doc("id", "Y", parent, "Y"), + doc(child, "a", "id", "66"), + doc(child, "b", "id", "66")))); + String overwritten = random().nextBoolean() ? "X" : "Y"; + String dubbed = overwritten.equals("X") ? "Y" : "X"; + + assertU( + add( + nest( + doc("id", overwritten, parent, overwritten), + doc(child, "c", "id", "66"), + doc(child, "d", "id", "66")), + "overwrite", + "true")); + assertU( + add( + nest( + doc("id", dubbed, parent, dubbed), + doc(child, "c", "id", "66"), + doc(child, "d", "id", "66")), + "overwrite", + "false")); assertU(commit()); - assertQ(req(parent+":"+overwritten, "//*[@numFound='1']")); - assertQ(req(parent+":"+dubbed, "//*[@numFound='2']")); + assertQ(req(parent + ":" + overwritten, "//*[@numFound='1']")); + assertQ(req(parent + ":" + dubbed, "//*[@numFound='2']")); final SolrIndexSearcher searcher = getSearcher(); assertSingleParentOf(searcher, one("ab"), dubbed); final TopDocs docs = searcher.search(join(one("cd")), 10); assertEquals(2, docs.totalHits.value); - final String pAct = searcher.doc(docs.scoreDocs[0].doc).get(parent)+ - searcher.doc(docs.scoreDocs[1].doc).get(parent); - assertTrue(pAct.contains(dubbed) && pAct.contains(overwritten) && pAct.length()==2); + final String pAct = + searcher.doc(docs.scoreDocs[0].doc).get(parent) + + searcher.doc(docs.scoreDocs[1].doc).get(parent); + assertTrue(pAct.contains(dubbed) && pAct.contains(overwritten) && pAct.length() == 2); assertQ(req("id:66", "//*[@numFound='6']")); - assertQ(req(child+":(a b)", "//*[@numFound='2']")); - assertQ(req(child+":(c d)", "//*[@numFound='4']")); + assertQ(req(child + ":(a b)", "//*[@numFound='2']")); + assertQ(req(child + ":(c d)", "//*[@numFound='4']")); } - private static XmlDoc nest(XmlDoc parent, XmlDoc ... children){ + private static XmlDoc nest(XmlDoc parent, XmlDoc... children) { XmlDoc xmlDoc = new XmlDoc(); - xmlDoc.xml = parent.xml.replace("", - Arrays.toString(children).replaceAll("[\\[\\]]", "")+""); + xmlDoc.xml = + parent.xml.replace( + "", Arrays.toString(children).replaceAll("[\\[\\]]", "") + ""); return xmlDoc; } @Test public void testBasics() throws Exception { - List blocks = new ArrayList<>(Arrays.asList( - block("abcD"), - block("efgH"), - merge(block("ijkL"), block("mnoP")), - merge(block("qrsT"), block("uvwX")), - block("Y"), - block("Z"))); + List blocks = + new ArrayList<>( + Arrays.asList( + block("abcD"), + block("efgH"), + merge(block("ijkL"), block("mnoP")), + merge(block("qrsT"), block("uvwX")), + block("Y"), + block("Z"))); Collections.shuffle(blocks, random()); @@ -234,8 +251,7 @@ public void testBasics() throws Exception { // log.trace(resp); int parentsNum = "DHLPTXYZ".length(); assertQ(req(parent + ":[* TO *]"), "//*[@numFound='" + parentsNum + "']"); - assertQ(req(child + ":[* TO *]"), "//*[@numFound='" - + (('z' - 'a' + 1) - parentsNum) + "']"); + assertQ(req(child + ":[* TO *]"), "//*[@numFound='" + (('z' - 'a' + 1) - parentsNum) + "']"); assertQ(req("*:*"), "//*[@numFound='" + ('z' - 'a' + 1) + "']"); assertSingleParentOf(searcher, one("abc"), "D"); assertSingleParentOf(searcher, one("efg"), "H"); @@ -244,11 +260,12 @@ public void testBasics() throws Exception { assertSingleParentOf(searcher, one("qrs"), "T"); assertSingleParentOf(searcher, one("uvw"), "X"); - assertQ(req("q",child+":(a b c)", "sort","_docid_ asc"), + assertQ( + req("q", child + ":(a b c)", "sort", "_docid_ asc"), "//*[@numFound='3']", // assert physical order of children - "//doc[1]/arr[@name='child_s']/str[text()='a']", - "//doc[2]/arr[@name='child_s']/str[text()='b']", - "//doc[3]/arr[@name='child_s']/str[text()='c']"); + "//doc[1]/arr[@name='child_s']/str[text()='a']", + "//doc[2]/arr[@name='child_s']/str[text()='b']", + "//doc[3]/arr[@name='child_s']/str[text()='c']"); } @Test @@ -280,7 +297,9 @@ public void testExceptionThrown() throws Exception { assertBlockU(commit()); final SolrIndexSearcher searcher = getSearcher(); - assertQ(req("q","*:*","indent","true", "fl","id,parent_s,child_s"), "//*[@numFound='" + "abcDefgH".length() + "']"); + assertQ( + req("q", "*:*", "indent", "true", "fl", "id,parent_s,child_s"), + "//*[@numFound='" + "abcDefgH".length() + "']"); assertSingleParentOf(searcher, one("abc"), "D"); assertSingleParentOf(searcher, one("efg"), "H"); @@ -291,9 +310,16 @@ public void testExceptionThrown() throws Exception { @Test public void testExceptionThrownChildDocWAnonymousChildren() throws Exception { - SolrInputDocument document1 = sdoc("id", id(), parent, "X", - "child1_s", sdoc("id", id(), "child_s", "y"), - "child2_s", sdoc("id", id(), "child_s", "z")); + SolrInputDocument document1 = + sdoc( + "id", + id(), + parent, + "X", + "child1_s", + sdoc("id", id(), "child_s", "y"), + "child2_s", + sdoc("id", id(), "child_s", "z")); SolrInputDocument exceptionChildDoc = (SolrInputDocument) document1.get("child1_s").getValue(); addChildren("child", exceptionChildDoc, 0, false); @@ -307,31 +333,36 @@ public void testExceptionThrownChildDocWAnonymousChildren() throws Exception { @Test public void testSolrNestedFieldsList() throws Exception { final String id1 = id(); - List children1 = Arrays.asList(sdoc("id", id(), child, "y"), sdoc("id", id(), child, "z")); + List children1 = + Arrays.asList(sdoc("id", id(), child, "y"), sdoc("id", id(), child, "z")); - SolrInputDocument document1 = sdoc("id", id1, parent, "X", - "children", children1); + SolrInputDocument document1 = sdoc("id", id1, parent, "X", "children", children1); final String id2 = id(); - List children2 = Arrays.asList(sdoc("id", id(), child, "b"), sdoc("id", id(), child, "c")); + List children2 = + Arrays.asList(sdoc("id", id(), child, "b"), sdoc("id", id(), child, "c")); - SolrInputDocument document2 = sdoc("id", id2, parent, "A", - "children", children2); + SolrInputDocument document2 = sdoc("id", id2, parent, "A", "children", children2); indexSolrInputDocumentsDirectly(document1, document2); final SolrIndexSearcher searcher = getSearcher(); - assertJQ(req("q","*:*", - "fl","*", - "sort","id asc", - "wt","json"), + assertJQ( + req("q", "*:*", "fl", "*", "sort", "id asc", "wt", "json"), "/response/numFound==" + "XyzAbc".length()); - assertJQ(req("q",parent+":" + document2.getFieldValue(parent), - "fl","*", - "sort","id asc", - "wt","json"), + assertJQ( + req( + "q", + parent + ":" + document2.getFieldValue(parent), + "fl", + "*", + "sort", + "id asc", + "wt", + "json"), "/response/docs/[0]/id=='" + document2.getFieldValue("id") + "'"); - assertQ(req("q",child+":(y z b c)", "sort","_docid_ asc"), + assertQ( + req("q", child + ":(y z b c)", "sort", "_docid_ asc"), "//*[@numFound='" + "yzbc".length() + "']", // assert physical order of children "//doc[1]/arr[@name='child_s']/str[text()='y']", "//doc[2]/arr[@name='child_s']/str[text()='z']", @@ -343,28 +374,47 @@ public void testSolrNestedFieldsList() throws Exception { @Test public void testSolrNestedFieldsSingleVal() throws Exception { - SolrInputDocument document1 = sdoc("id", id(), parent, "X", - "child1_s", sdoc("id", id(), "child_s", "y"), - "child2_s", sdoc("id", id(), "child_s", "z")); - - SolrInputDocument document2 = sdoc("id", id(), parent, "A", - "child1_s", sdoc("id", id(), "child_s", "b"), - "child2_s", sdoc("id", id(), "child_s", "c")); + SolrInputDocument document1 = + sdoc( + "id", + id(), + parent, + "X", + "child1_s", + sdoc("id", id(), "child_s", "y"), + "child2_s", + sdoc("id", id(), "child_s", "z")); + + SolrInputDocument document2 = + sdoc( + "id", + id(), + parent, + "A", + "child1_s", + sdoc("id", id(), "child_s", "b"), + "child2_s", + sdoc("id", id(), "child_s", "c")); indexSolrInputDocumentsDirectly(document1, document2); final SolrIndexSearcher searcher = getSearcher(); - assertJQ(req("q","*:*", - "fl","*", - "sort","id asc", - "wt","json"), + assertJQ( + req("q", "*:*", "fl", "*", "sort", "id asc", "wt", "json"), "/response/numFound==" + "XyzAbc".length()); - assertJQ(req("q",parent+":" + document2.getFieldValue(parent), - "fl","*", - "sort","id asc", - "wt","json"), + assertJQ( + req( + "q", + parent + ":" + document2.getFieldValue(parent), + "fl", + "*", + "sort", + "id asc", + "wt", + "json"), "/response/docs/[0]/id=='" + document2.getFieldValue("id") + "'"); - assertQ(req("q",child+":(y z b c)", "sort","_docid_ asc"), + assertQ( + req("q", child + ":(y z b c)", "sort", "_docid_ asc"), "//*[@numFound='" + "yzbc".length() + "']", // assert physical order of children "//doc[1]/arr[@name='child_s']/str[text()='y']", "//doc[2]/arr[@name='child_s']/str[text()='z']", @@ -381,49 +431,56 @@ public void testSolrJXML() throws Exception { List docs = new ArrayList<>(); - SolrInputDocument document1 = new SolrInputDocument() { - { - final String id = id(); - addField("id", id); - addField("parent_s", "X"); - - ArrayList ch1 = new ArrayList<>( - Arrays.asList(new SolrInputDocument() { - { - addField("id", id()); - addField("child_s", "y"); - } - }, new SolrInputDocument() { - { - addField("id", id()); - addField("child_s", "z"); - } - })); - - Collections.shuffle(ch1, random()); - addChildDocuments(ch1); - } - }; - - SolrInputDocument document2 = new SolrInputDocument() { - { - final String id = id(); - addField("id", id); - addField("parent_s", "A"); - addChildDocument(new SolrInputDocument() { + SolrInputDocument document1 = + new SolrInputDocument() { { - addField("id", id()); - addField("child_s", "b"); + final String id = id(); + addField("id", id); + addField("parent_s", "X"); + + ArrayList ch1 = + new ArrayList<>( + Arrays.asList( + new SolrInputDocument() { + { + addField("id", id()); + addField("child_s", "y"); + } + }, + new SolrInputDocument() { + { + addField("id", id()); + addField("child_s", "z"); + } + })); + + Collections.shuffle(ch1, random()); + addChildDocuments(ch1); } - }); - addChildDocument(new SolrInputDocument() { + }; + + SolrInputDocument document2 = + new SolrInputDocument() { { - addField("id", id()); - addField("child_s", "c"); + final String id = id(); + addField("id", id); + addField("parent_s", "A"); + addChildDocument( + new SolrInputDocument() { + { + addField("id", id()); + addField("child_s", "b"); + } + }); + addChildDocument( + new SolrInputDocument() { + { + addField("id", id()); + addField("child_s", "c"); + } + }); } - }); - } - }; + }; docs.add(document1); docs.add(document2); @@ -437,16 +494,13 @@ public void testSolrJXML() throws Exception { assertBlockU(os.toString()); assertU(commit()); - assertJQ(req("q","*:*", - "fl","*", - "sort","id asc", - "wt","json"), - "/response/numFound==" + 6); + assertJQ( + req("q", "*:*", "fl", "*", "sort", "id asc", "wt", "json"), "/response/numFound==" + 6); final SolrIndexSearcher searcher = getSearcher(); assertSingleParentOf(searcher, one("yz"), "X"); assertSingleParentOf(searcher, one("bc"), "A"); } - //This is the same as testSolrJXML above but uses the XMLLoader + // This is the same as testSolrJXML above but uses the XMLLoader // to illustrate the structure of the XML documents @Test public void testXML() throws IOException, XMLStreamException { @@ -455,47 +509,44 @@ public void testXML() throws IOException, XMLStreamException { List docs = new ArrayList<>(); String xml_doc1 = - "" + - " 1" + - " X" + - " " + - " 2" + - " y" + - ""+ - " " + - " 3" + - " z" + - ""+ - ""; + "" + + " 1" + + " X" + + " " + + " 2" + + " y" + + "" + + " " + + " 3" + + " z" + + "" + + ""; String xml_doc2 = - "" + - " 4" + - " A" + - " " + - " 5" + - " b" + - ""+ - " " + - " 6" + - " c" + - ""+ - ""; - - - XMLStreamReader parser = - inputFactory.createXMLStreamReader( new StringReader( xml_doc1 ) ); + "" + + " 4" + + " A" + + " " + + " 5" + + " b" + + "" + + " " + + " 6" + + " c" + + "" + + ""; + + XMLStreamReader parser = inputFactory.createXMLStreamReader(new StringReader(xml_doc1)); parser.next(); // read the START document... - //null for the processor is all right here + // null for the processor is all right here XMLLoader loader = new XMLLoader(); - SolrInputDocument document1 = loader.readDoc( parser ); + SolrInputDocument document1 = loader.readDoc(parser); - XMLStreamReader parser2 = - inputFactory.createXMLStreamReader( new StringReader( xml_doc2 ) ); - parser2.next(); // read the START document... - //null for the processor is all right here - //XMLLoader loader = new XMLLoader(); - SolrInputDocument document2 = loader.readDoc( parser2 ); + XMLStreamReader parser2 = inputFactory.createXMLStreamReader(new StringReader(xml_doc2)); + parser2.next(); // read the START document... + // null for the processor is all right here + // XMLLoader loader = new XMLLoader(); + SolrInputDocument document2 = loader.readDoc(parser2); docs.add(document1); docs.add(document2); @@ -517,68 +568,91 @@ public void testXML() throws IOException, XMLStreamException { @Test public void testXMLMultiLevelLabeledChildren() throws XMLStreamException { String xml_doc1 = - "" + - " 1" + - " " + - " X" + - " " + - " " + - " 2" + - " y" + - " " + - " " + - " 3" + - " z" + - " " + - " " + - ""; + "" + + " 1" + + " " + + " X" + + " " + + " " + + " 2" + + " y" + + " " + + " " + + " 3" + + " z" + + " " + + " " + + ""; String xml_doc2 = - "" + - " 4" + - " A" + - " " + - " " + - " 5" + - " b" + - " " + - " " + - " 7" + - " d" + - " " + - " " + - " " + - " " + - " " + - " " + - " 6" + - " c" + - " " + - " " + - ""; - - XMLStreamReader parser = - inputFactory.createXMLStreamReader(new StringReader(xml_doc1)); + "" + + " 4" + + " A" + + " " + + " " + + " 5" + + " b" + + " " + + " " + + " 7" + + " d" + + " " + + " " + + " " + + " " + + " " + + " " + + " 6" + + " c" + + " " + + " " + + ""; + + XMLStreamReader parser = inputFactory.createXMLStreamReader(new StringReader(xml_doc1)); parser.next(); // read the START document... - //null for the processor is all right here + // null for the processor is all right here XMLLoader loader = new XMLLoader(); SolrInputDocument document1 = loader.readDoc(parser); - XMLStreamReader parser2 = - inputFactory.createXMLStreamReader(new StringReader(xml_doc2)); + XMLStreamReader parser2 = inputFactory.createXMLStreamReader(new StringReader(xml_doc2)); parser2.next(); // read the START document... - //null for the processor is all right here - //XMLLoader loader = new XMLLoader(); + // null for the processor is all right here + // XMLLoader loader = new XMLLoader(); SolrInputDocument document2 = loader.readDoc(parser2); assertFalse(document1.hasChildDocuments()); - assertEquals(document1.toString(), sdoc("id", "1", "empty_s", "", "parent_s", "X", "test", - sdocs(sdoc("id", "2", "child_s", "y"), sdoc("id", "3", "child_s", "z"))).toString()); + assertEquals( + document1.toString(), + sdoc( + "id", + "1", + "empty_s", + "", + "parent_s", + "X", + "test", + sdocs(sdoc("id", "2", "child_s", "y"), sdoc("id", "3", "child_s", "z"))) + .toString()); assertFalse(document2.hasChildDocuments()); - assertEquals(document2.toString(), sdoc("id", "4", "parent_s", "A", "test", - sdocs(sdoc("id", "5", "child_s", "b", "grandChild", Collections.singleton(sdoc("id", "7", "child_s", "d"))), - sdoc("id", "6", "child_s", "c"))).toString()); + assertEquals( + document2.toString(), + sdoc( + "id", + "4", + "parent_s", + "A", + "test", + sdocs( + sdoc( + "id", + "5", + "child_s", + "b", + "grandChild", + Collections.singleton(sdoc("id", "7", "child_s", "d"))), + sdoc("id", "6", "child_s", "c"))) + .toString()); } @Test @@ -588,61 +662,77 @@ public void testXMLLabeledChildren() throws IOException, XMLStreamException { List docs = new ArrayList<>(); String xml_doc1 = - "" + - " 1" + - " " + - " X" + - " " + - " " + - " 2" + - " y" + - " "+ - " " + - " 3" + - " z" + - " " + - " " + - ""; + "" + + " 1" + + " " + + " X" + + " " + + " " + + " 2" + + " y" + + " " + + " " + + " 3" + + " z" + + " " + + " " + + ""; String xml_doc2 = - "" + - " 4" + - " A" + - " " + - " " + - " 5" + - " b" + - " "+ - " " + - " " + - " " + - " 6" + - " c" + - " " + - " " + - ""; - - XMLStreamReader parser = - inputFactory.createXMLStreamReader( new StringReader( xml_doc1 ) ); + "" + + " 4" + + " A" + + " " + + " " + + " 5" + + " b" + + " " + + " " + + " " + + " " + + " 6" + + " c" + + " " + + " " + + ""; + + XMLStreamReader parser = inputFactory.createXMLStreamReader(new StringReader(xml_doc1)); parser.next(); // read the START document... - //null for the processor is all right here + // null for the processor is all right here XMLLoader loader = new XMLLoader(); - SolrInputDocument document1 = loader.readDoc( parser ); + SolrInputDocument document1 = loader.readDoc(parser); - XMLStreamReader parser2 = - inputFactory.createXMLStreamReader( new StringReader( xml_doc2 ) ); + XMLStreamReader parser2 = inputFactory.createXMLStreamReader(new StringReader(xml_doc2)); parser2.next(); // read the START document... - //null for the processor is all right here - //XMLLoader loader = new XMLLoader(); - SolrInputDocument document2 = loader.readDoc( parser2 ); + // null for the processor is all right here + // XMLLoader loader = new XMLLoader(); + SolrInputDocument document2 = loader.readDoc(parser2); assertFalse(document1.hasChildDocuments()); - assertEquals(document1.toString(), sdoc("id", "1", "empty_s", "", "parent_s", "X", "test", - sdocs(sdoc("id", "2", "child_s", "y"), sdoc("id", "3", "child_s", "z"))).toString()); + assertEquals( + document1.toString(), + sdoc( + "id", + "1", + "empty_s", + "", + "parent_s", + "X", + "test", + sdocs(sdoc("id", "2", "child_s", "y"), sdoc("id", "3", "child_s", "z"))) + .toString()); assertFalse(document2.hasChildDocuments()); - assertEquals(document2.toString(), sdoc("id", "4", "parent_s", "A", "test", - sdocs(sdoc("id", "5", "child_s", "b"), sdoc("id", "6", "child_s", "c"))).toString()); + assertEquals( + document2.toString(), + sdoc( + "id", + "4", + "parent_s", + "A", + "test", + sdocs(sdoc("id", "5", "child_s", "b"), sdoc("id", "6", "child_s", "c"))) + .toString()); docs.add(document1); docs.add(document2); @@ -669,7 +759,7 @@ public void testJavaBinCodecNestedRelation() throws IOException { int childsNum = atLeast(10); Map children = new HashMap<>(childsNum); - for(int i = 0; i < childsNum; ++i) { + for (int i = 0; i < childsNum; ++i) { SolrInputDocument child = new SolrInputDocument(); child.addField("key", (i + 5) * atLeast(4)); String childKey = String.format(Locale.ROOT, "child%d", i); @@ -682,18 +772,19 @@ public void testJavaBinCodecNestedRelation() throws IOException { jbc.marshal(topDocument, os); } byte[] buffer = os.toByteArray(); - //now read the Object back + // now read the Object back SolrInputDocument result; - try (JavaBinCodec jbc = new JavaBinCodec(); InputStream is = new ByteArrayInputStream(buffer)) { + try (JavaBinCodec jbc = new JavaBinCodec(); + InputStream is = new ByteArrayInputStream(buffer)) { result = (SolrInputDocument) jbc.unmarshal(is); } assertTrue(compareSolrInputDocument(topDocument, result)); } - @Test - public void testJavaBinCodec() throws IOException { //actually this test must be in other test class + public void testJavaBinCodec() + throws IOException { // actually this test must be in other test class SolrInputDocument topDocument = new SolrInputDocument(); topDocument.addField("parent_f1", "v1"); topDocument.addField("parent_f2", "v2"); @@ -708,9 +799,10 @@ public void testJavaBinCodec() throws IOException { //actually this test must be jbc.marshal(topDocument, os); } byte[] buffer = os.toByteArray(); - //now read the Object back + // now read the Object back SolrInputDocument result; - try (JavaBinCodec jbc = new JavaBinCodec(); InputStream is = new ByteArrayInputStream(buffer)) { + try (JavaBinCodec jbc = new JavaBinCodec(); + InputStream is = new ByteArrayInputStream(buffer)) { result = (SolrInputDocument) jbc.unmarshal(is); } assertEquals(2, result.size()); @@ -724,7 +816,8 @@ public void testJavaBinCodec() throws IOException { //actually this test must be for (int childIndex = 0; childIndex < childsNum; ++childIndex) { SolrInputDocument child = resultChilds.get(childIndex); for (int fieldNum = 0; fieldNum < childIndex; ++fieldNum) { - assertEquals(childIndex + "value" + fieldNum, child.getFieldValue(childIndex + "child" + fieldNum)); + assertEquals( + childIndex + "value" + fieldNum, child.getFieldValue(childIndex + "child" + fieldNum)); } List grandChilds = child.getChildDocuments(); @@ -735,16 +828,19 @@ public void testJavaBinCodec() throws IOException { //actually this test must be SolrInputDocument grandChild = grandChilds.get(grandIndex); assertFalse(grandChild.hasChildDocuments()); for (int fieldNum = 0; fieldNum < grandIndex; ++fieldNum) { - assertEquals(grandIndex + "value" + fieldNum, grandChild.getFieldValue(grandIndex + "grand" + fieldNum)); + assertEquals( + grandIndex + "value" + fieldNum, + grandChild.getFieldValue(grandIndex + "grand" + fieldNum)); } } } } - private void addChildren(String prefix, SolrInputDocument topDocument, int childIndex, boolean lastLevel) { + private void addChildren( + String prefix, SolrInputDocument topDocument, int childIndex, boolean lastLevel) { SolrInputDocument childDocument = new SolrInputDocument(); for (int index = 0; index < childIndex; ++index) { - childDocument.addField(childIndex + prefix + index, childIndex + "value"+ index); + childDocument.addField(childIndex + prefix + index, childIndex + "value" + index); } if (!lastLevel) { @@ -756,11 +852,10 @@ private void addChildren(String prefix, SolrInputDocument topDocument, int child } /** - * on the given abcD it generates one parent doc, taking D from the tail and - * two subdocs relaitons ab and c uniq ids are supplied also + * on the given abcD it generates one parent doc, taking D from the tail and two subdocs relaitons + * ab and c uniq ids are supplied also * - *
-   * {@code
+   * 
{@code
    * 
    *  
    *    D
@@ -778,9 +873,8 @@ private void addChildren(String prefix, SolrInputDocument topDocument, int child
    *    
    *  
    * 
-   * }
-   * 
- * */ + * }
+ */ private Document block(String string) throws ParserConfigurationException { Document document = getDocument(); Element root = document.createElement("add"); @@ -790,15 +884,13 @@ private Document block(String string) throws ParserConfigurationException { if (string.length() > 0) { // last character is a top parent - attachField(document, doc, parent, - String.valueOf(string.charAt(string.length() - 1))); + attachField(document, doc, parent, String.valueOf(string.charAt(string.length() - 1))); attachField(document, doc, "id", id()); // add subdocs int type = 1; for (int i = 0; i < string.length() - 1; i += 2) { - String relation = string.substring(i, - Math.min(i + 2, string.length() - 1)); + String relation = string.substring(i, Math.min(i + 2, string.length() - 1)); attachSubDocs(document, doc, relation, type); type++; } @@ -817,10 +909,10 @@ private void attachSubDocs(Document document, Element parent, String relation, i } } - private void indexSolrInputDocumentsDirectly(SolrInputDocument ... docs) throws IOException { + private void indexSolrInputDocumentsDirectly(SolrInputDocument... docs) throws IOException { SolrQueryRequest coreReq = new LocalSolrQueryRequest(h.getCore(), new ModifiableSolrParams()); AddUpdateCommand updateCmd = new AddUpdateCommand(coreReq); - for (SolrInputDocument doc: docs) { + for (SolrInputDocument doc : docs) { updateCmd.solrDoc = doc; h.getCore().getUpdateHandler().addDoc(updateCmd); updateCmd.clear(); @@ -835,15 +927,13 @@ private void indexSolrInputDocumentsDirectly(SolrInputDocument ... docs) throws * {@code ... + ... = ... + ...} * * - * @param doc1 - * first document - * @param doc2 - * second document + * @param doc1 first document + * @param doc2 second document * @return merged document */ private Document merge(Document doc1, Document doc2) { NodeList doc2ChildNodes = doc2.getDocumentElement().getChildNodes(); - for(int i = 0; i < doc2ChildNodes.getLength(); i++) { + for (int i = 0; i < doc2ChildNodes.getLength(); i++) { Node doc2ChildNode = doc2ChildNodes.item(i); doc1.getDocumentElement().appendChild(doc1.importNode(doc2ChildNode, true)); doc2.getDocumentElement().removeChild(doc2ChildNode); @@ -867,8 +957,9 @@ private String one(String string) { return "" + string.charAt(random().nextInt(string.length())); } - protected void assertSingleParentOf(final SolrIndexSearcher searcher, - final String childTerm, String parentExp) throws IOException { + protected void assertSingleParentOf( + final SolrIndexSearcher searcher, final String childTerm, String parentExp) + throws IOException { final TopDocs docs = searcher.search(join(childTerm), 10); assertEquals(1, docs.totalHits.value); final String pAct = searcher.doc(docs.scoreDocs[0].doc).get(parent); @@ -877,8 +968,9 @@ protected void assertSingleParentOf(final SolrIndexSearcher searcher, protected ToParentBlockJoinQuery join(final String childTerm) { return new ToParentBlockJoinQuery( - new TermQuery(new Term(child, childTerm)), new QueryBitSetProducer( - new TermRangeQuery(parent, null, null, false, false)), ScoreMode.None); + new TermQuery(new Term(child, childTerm)), + new QueryBitSetProducer(new TermRangeQuery(parent, null, null, false, false)), + ScoreMode.None); } private Collection> callables(List blocks) { @@ -886,15 +978,17 @@ private Collection> callables(List blocks) { for (Document block : blocks) { final String msg = getStringFromDocument(block); if (msg.length() > 0) { - rez.add(() -> { - assertBlockU(msg); - return null; - }); + rez.add( + () -> { + assertBlockU(msg); + return null; + }); if (rarely()) { - rez.add(() -> { - assertBlockU(commit()); - return null; - }); + rez.add( + () -> { + assertBlockU(commit()); + return null; + }); } } } @@ -921,7 +1015,7 @@ private void assertBlockU(final String msg, String expected) { } public static String getStringFromDocument(Document doc) { - try (StringWriter writer = new StringWriter()){ + try (StringWriter writer = new StringWriter()) { TransformerFactory tf = TransformerFactory.newInstance(); Transformer transformer = tf.newTransformer(); transformer.transform(new DOMSource(doc), new StreamResult(writer)); diff --git a/solr/core/src/test/org/apache/solr/update/AnalysisErrorHandlingTest.java b/solr/core/src/test/org/apache/solr/update/AnalysisErrorHandlingTest.java index 535386f7eea..83ef6719cab 100644 --- a/solr/core/src/test/org/apache/solr/update/AnalysisErrorHandlingTest.java +++ b/solr/core/src/test/org/apache/solr/update/AnalysisErrorHandlingTest.java @@ -22,24 +22,29 @@ import org.junit.Test; /** - * Test that runtime exceptions thrown during analysis - * result in Solr errors that contain the document ID. + * Test that runtime exceptions thrown during analysis result in Solr errors that contain the + * document ID. */ public class AnalysisErrorHandlingTest extends SolrTestCaseJ4 { - public String getCoreName() { return "basic"; } + public String getCoreName() { + return "basic"; + } @BeforeClass public static void beforeTests() throws Exception { - initCore("solrconfig-basic.xml","solr/analysisconfs/analysis-err-schema.xml"); + initCore("solrconfig-basic.xml", "solr/analysisconfs/analysis-err-schema.xml"); } @Test public void testMultipleUpdatesPerAdd() { clearIndex(); - SolrException se = expectThrows(SolrException.class, - () -> h.update("1Alas Poor Yorik") - ); + SolrException se = + expectThrows( + SolrException.class, + () -> + h.update( + "1Alas Poor Yorik")); assertTrue(se.getMessage().contains("Exception writing document id 1 to the index")); } } diff --git a/solr/core/src/test/org/apache/solr/update/DataDrivenBlockJoinTest.java b/solr/core/src/test/org/apache/solr/update/DataDrivenBlockJoinTest.java index 5d643df8bd5..777a72601ad 100644 --- a/solr/core/src/test/org/apache/solr/update/DataDrivenBlockJoinTest.java +++ b/solr/core/src/test/org/apache/solr/update/DataDrivenBlockJoinTest.java @@ -17,7 +17,6 @@ package org.apache.solr.update; import java.io.File; - import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.Before; @@ -30,41 +29,46 @@ public class DataDrivenBlockJoinTest extends SolrTestCaseJ4 { private static final String collection = "collection1"; private static final String confDir = collection + "/conf"; - @Before public void before() throws Exception { tmpSolrHome = createTempDir().toFile(); tmpConfDir = new File(tmpSolrHome, confDir); File testHomeConfDir = new File(TEST_HOME(), confDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-schemaless.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-add-schema-fields-update-processor.xml"), tmpConfDir); - FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig-schemaless.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "schema-add-schema-fields-update-processor.xml"), tmpConfDir); + FileUtils.copyFileToDirectory( + new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir); System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); - initCore("solrconfig-schemaless.xml", "schema-add-schema-fields-update-processor.xml", tmpSolrHome.getPath()); + initCore( + "solrconfig-schemaless.xml", + "schema-add-schema-fields-update-processor.xml", + tmpSolrHome.getPath()); } @Test public void testAddNestedDocuments() throws Exception { - assertU("" - + " " - + " 1" - + " X" - + " 8" - + " " - + " 2" - + " y" - + " 8.138" - + " " - + " 3" - + " z" - + " 8.138.4498" - + " " - + " " - + " " - + ""); + assertU( + "" + + " " + + " 1" + + " X" + + " 8" + + " " + + " 2" + + " y" + + " 8.138" + + " " + + " 3" + + " z" + + " 8.138.4498" + + " " + + " " + + " " + + ""); assertU(""); } } - diff --git a/solr/core/src/test/org/apache/solr/update/DeleteByIdWithRouterFieldTest.java b/solr/core/src/test/org/apache/solr/update/DeleteByIdWithRouterFieldTest.java index af16af4dbf1..be7d4ec90bc 100644 --- a/solr/core/src/test/org/apache/solr/update/DeleteByIdWithRouterFieldTest.java +++ b/solr/core/src/test/org/apache/solr/update/DeleteByIdWithRouterFieldTest.java @@ -19,20 +19,18 @@ import java.util.ArrayList; import java.util.Collections; -import java.util.List; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; - import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.TestUtil; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.impl.LBSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; -import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.cloud.CloudInspectUtil; +import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.ClusterState; @@ -41,7 +39,6 @@ import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.params.ShardParams; import org.apache.solr.common.params.SolrParams; - import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -51,31 +48,33 @@ public class DeleteByIdWithRouterFieldTest extends SolrCloudTestCase { public static final String COLL = "test"; public static final String ROUTE_FIELD = "field_s"; public static final int NUM_SHARDS = 3; - + private static final List clients = new ArrayList<>(); // not CloudSolrClient - /** - * A randomized prefix to put on every route value. - * This helps ensure that documents wind up on diff shards between diff test runs + /** + * A randomized prefix to put on every route value. This helps ensure that documents wind up on + * diff shards between diff test runs */ private static String RVAL_PRE = null; - + @BeforeClass public static void setupClusterAndCollection() throws Exception { RVAL_PRE = TestUtil.randomRealisticUnicodeString(random()); - + // sometimes use 2 replicas of every shard so we hit more interesting update code paths final int numReplicas = usually() ? 1 : 2; - - configureCluster(1 + (NUM_SHARDS * numReplicas) ) // we'll have one node that doesn't host any replicas - .addConfig("conf", configset("cloud-minimal")) - .configure(); - - assertTrue(CollectionAdminRequest.createCollection(COLL, "conf", NUM_SHARDS, numReplicas) - .setRouterField(ROUTE_FIELD) - .process(cluster.getSolrClient()) - .isSuccess()); - + + configureCluster( + 1 + (NUM_SHARDS * numReplicas)) // we'll have one node that doesn't host any replicas + .addConfig("conf", configset("cloud-minimal")) + .configure(); + + assertTrue( + CollectionAdminRequest.createCollection(COLL, "conf", NUM_SHARDS, numReplicas) + .setRouterField(ROUTE_FIELD) + .process(cluster.getSolrClient()) + .isSuccess()); + cluster.getSolrClient().setDefaultCollection(COLL); ClusterState clusterState = cluster.getSolrClient().getClusterStateProvider().getClusterState(); @@ -83,7 +82,7 @@ public static void setupClusterAndCollection() throws Exception { clients.add(getHttpSolrClient(replica.getCoreUrl())); } } - + @AfterClass public static void afterClass() throws Exception { IOUtils.close(clients); @@ -94,18 +93,20 @@ public static void afterClass() throws Exception { @After public void checkShardConsistencyAndCleanUp() throws Exception { checkShardsConsistentNumFound(); - assertEquals(0, - new UpdateRequest() - .deleteByQuery("*:*") - .setAction(UpdateRequest.ACTION.COMMIT, true, true) - .process(cluster.getSolrClient()) - .getStatus()); + assertEquals( + 0, + new UpdateRequest() + .deleteByQuery("*:*") + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(cluster.getSolrClient()) + .getStatus()); } - + private void checkShardsConsistentNumFound() throws Exception { final SolrParams params = params("q", "*:*", "distrib", "false"); - final DocCollection collection = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLL); - for (Map.Entry entry : collection.getActiveSlicesMap().entrySet()) { + final DocCollection collection = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLL); + for (Map.Entry entry : collection.getActiveSlicesMap().entrySet()) { final String shardName = entry.getKey(); final Slice slice = entry.getValue(); final Replica leader = entry.getValue().getLeader(); @@ -114,45 +115,50 @@ private void checkShardsConsistentNumFound() throws Exception { for (Replica replica : slice) { try (SolrClient replicaClient = getHttpSolrClient(replica.getCoreUrl())) { final SolrDocumentList replicaResults = replicaClient.query(params).getResults(); - assertEquals("inconsistency w/leader: shard=" + shardName + "core=" + replica.getCoreName(), - Collections.emptySet(), - CloudInspectUtil.showDiff(leaderResults, replicaResults, - shardName + " leader: " + leader.getCoreUrl(), - shardName + ": " + replica.getCoreUrl())); + assertEquals( + "inconsistency w/leader: shard=" + shardName + "core=" + replica.getCoreName(), + Collections.emptySet(), + CloudInspectUtil.showDiff( + leaderResults, + replicaResults, + shardName + " leader: " + leader.getCoreUrl(), + shardName + ": " + replica.getCoreUrl())); } } } } } - - private SolrClient getRandomSolrClient() { + + private SolrClient getRandomSolrClient() { final int index = random().nextInt(clients.size() + 1); return index == clients.size() ? cluster.getSolrClient() : clients.get(index); } - /** - * 100 docs with 2 digit ids and a route field that matches the last digit + /** + * 100 docs with 2 digit ids and a route field that matches the last digit + * * @see #del100Docs */ private UpdateRequest add100Docs() { final UpdateRequest adds = new UpdateRequest(); for (int x = 0; x <= 9; x++) { for (int y = 0; y <= 9; y++) { - adds.add("id", x+""+y, ROUTE_FIELD, RVAL_PRE+y); + adds.add("id", x + "" + y, ROUTE_FIELD, RVAL_PRE + y); } } return adds; } - - /** - * 100 doc deletions with 2 digit ids and a route field that matches the last digit + + /** + * 100 doc deletions with 2 digit ids and a route field that matches the last digit + * * @see #add100Docs */ private UpdateRequest del100Docs() { final UpdateRequest dels = new UpdateRequest(); for (int x = 0; x <= 9; x++) { for (int y = 0; y <= 9; y++) { - dels.deleteById(x+""+y, RVAL_PRE+y); + dels.deleteById(x + "" + y, RVAL_PRE + y); } } return dels; @@ -160,132 +166,182 @@ private UpdateRequest del100Docs() { public void testBlocksOfDeletes() throws Exception { - assertEquals(0, add100Docs().setAction(UpdateRequest.ACTION.COMMIT, true, true).process(getRandomSolrClient()).getStatus()); + assertEquals( + 0, + add100Docs() + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(getRandomSolrClient()) + .getStatus()); assertEquals(100, getRandomSolrClient().query(params("q", "*:*")).getResults().getNumFound()); // sanity check a "block" of 1 delete - assertEquals(0, - new UpdateRequest() - .deleteById("06", RVAL_PRE+"6") - .setAction(UpdateRequest.ACTION.COMMIT, true, true) - .process(getRandomSolrClient()) - .getStatus()); + assertEquals( + 0, + new UpdateRequest() + .deleteById("06", RVAL_PRE + "6") + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(getRandomSolrClient()) + .getStatus()); assertEquals(99, getRandomSolrClient().query(params("q", "*:*")).getResults().getNumFound()); checkShardsConsistentNumFound(); - + // block of 2 deletes w/diff routes - assertEquals(0, - new UpdateRequest() - .deleteById("17", RVAL_PRE+"7") - .deleteById("18", RVAL_PRE+"8") - .setAction(UpdateRequest.ACTION.COMMIT, true, true) - .process(getRandomSolrClient()) - .getStatus()); + assertEquals( + 0, + new UpdateRequest() + .deleteById("17", RVAL_PRE + "7") + .deleteById("18", RVAL_PRE + "8") + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(getRandomSolrClient()) + .getStatus()); assertEquals(97, getRandomSolrClient().query(params("q", "*:*")).getResults().getNumFound()); - + checkShardsConsistentNumFound(); // block of 2 deletes using single 'withRoute()' for both - assertEquals(0, - new UpdateRequest() - .deleteById("29") - .deleteById("39") - .withRoute(RVAL_PRE+"9") - .setAction(UpdateRequest.ACTION.COMMIT, true, true) - .process(getRandomSolrClient()) - .getStatus()); + assertEquals( + 0, + new UpdateRequest() + .deleteById("29") + .deleteById("39") + .withRoute(RVAL_PRE + "9") + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(getRandomSolrClient()) + .getStatus()); assertEquals(95, getRandomSolrClient().query(params("q", "*:*")).getResults().getNumFound()); - + checkShardsConsistentNumFound(); - + { // block of 2 deletes w/ diff routes that are conditional on optimistic concurrency - final Long v48 = (Long) getRandomSolrClient().query(params("q", "id:48", "fl", "_version_")).getResults().get(0).get("_version_"); - final Long v49 = (Long) getRandomSolrClient().query(params("q", "id:49", "fl", "_version_")).getResults().get(0).get("_version_"); - - assertEquals(0, - new UpdateRequest() - .deleteById("48", RVAL_PRE+"8", v48) - .deleteById("49", RVAL_PRE+"9", v49) - .setAction(UpdateRequest.ACTION.COMMIT, true, true) - .process(getRandomSolrClient()) - .getStatus()); + final Long v48 = + (Long) + getRandomSolrClient() + .query(params("q", "id:48", "fl", "_version_")) + .getResults() + .get(0) + .get("_version_"); + final Long v49 = + (Long) + getRandomSolrClient() + .query(params("q", "id:49", "fl", "_version_")) + .getResults() + .get(0) + .get("_version_"); + + assertEquals( + 0, + new UpdateRequest() + .deleteById("48", RVAL_PRE + "8", v48) + .deleteById("49", RVAL_PRE + "9", v49) + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(getRandomSolrClient()) + .getStatus()); } assertEquals(93, getRandomSolrClient().query(params("q", "*:*")).getResults().getNumFound()); - + checkShardsConsistentNumFound(); - - { // block of 2 deletes using single 'withRoute()' for both that are conditional on optimistic concurrency - final Long v58 = (Long) getRandomSolrClient().query(params("q", "id:58", "fl", "_version_")).getResults().get(0).get("_version_"); - final Long v68 = (Long) getRandomSolrClient().query(params("q", "id:68", "fl", "_version_")).getResults().get(0).get("_version_"); - - assertEquals(0, - new UpdateRequest() - .deleteById("58", v58) - .deleteById("68", v68) - .withRoute(RVAL_PRE+"8") - .setAction(UpdateRequest.ACTION.COMMIT, true, true) - .process(getRandomSolrClient()) - .getStatus()); + + { // block of 2 deletes using single 'withRoute()' for both that are conditional on optimistic + // concurrency + final Long v58 = + (Long) + getRandomSolrClient() + .query(params("q", "id:58", "fl", "_version_")) + .getResults() + .get(0) + .get("_version_"); + final Long v68 = + (Long) + getRandomSolrClient() + .query(params("q", "id:68", "fl", "_version_")) + .getResults() + .get(0) + .get("_version_"); + + assertEquals( + 0, + new UpdateRequest() + .deleteById("58", v58) + .deleteById("68", v68) + .withRoute(RVAL_PRE + "8") + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(getRandomSolrClient()) + .getStatus()); } assertEquals(91, getRandomSolrClient().query(params("q", "*:*")).getResults().getNumFound()); - + checkShardsConsistentNumFound(); - + // now delete all docs, including the ones we already deleted (shouldn't cause any problems) - assertEquals(0, del100Docs().setAction(UpdateRequest.ACTION.COMMIT, true, true).process(getRandomSolrClient()).getStatus()); + assertEquals( + 0, + del100Docs() + .setAction(UpdateRequest.ACTION.COMMIT, true, true) + .process(getRandomSolrClient()) + .getStatus()); assertEquals(0, getRandomSolrClient().query(params("q", "*:*")).getResults().getNumFound()); - + checkShardsConsistentNumFound(); } - /** - * Test that {@link UpdateRequest#getRoutesToCollection} correctly populates routes for all deletes + * Test that {@link UpdateRequest#getRoutesToCollection} correctly populates routes for all + * deletes */ public void testGlassBoxUpdateRequestRoutesToShards() throws Exception { - final DocCollection docCol = cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLL); - // we don't need "real" urls for all replicas, just something we can use as lookup keys for verification - // so we'll use the shard names as "leader urls" - final Map> urlMap = docCol.getActiveSlices().stream().collect - (Collectors.toMap(s -> s.getName(), s -> Collections.singletonList(s.getName()))); + final DocCollection docCol = + cluster.getSolrClient().getZkStateReader().getClusterState().getCollection(COLL); + // we don't need "real" urls for all replicas, just something we can use as lookup keys for + // verification so we'll use the shard names as "leader urls" + final Map> urlMap = + docCol.getActiveSlices().stream() + .collect( + Collectors.toMap(s -> s.getName(), s -> Collections.singletonList(s.getName()))); - // simplified rote info we'll build up with the shards for each delete (after sanity checking they have routing info at all)... - final Map actualDelRoutes = new LinkedHashMap<>(); - - final Map rawDelRoutes = del100Docs().getRoutesToCollection(docCol.getRouter(), docCol, urlMap, params(), ROUTE_FIELD); + // simplified rote info we'll build up with the shards for each delete (after sanity checking + // they have routing info at all)... + final Map actualDelRoutes = new LinkedHashMap<>(); + + final Map rawDelRoutes = + del100Docs() + .getRoutesToCollection(docCol.getRouter(), docCol, urlMap, params(), ROUTE_FIELD); for (LBSolrClient.Req lbreq : rawDelRoutes.values()) { assertTrue(lbreq.getRequest() instanceof UpdateRequest); final String shard = lbreq.getServers().get(0); final UpdateRequest req = (UpdateRequest) lbreq.getRequest(); - for (Map.Entry> entry : req.getDeleteByIdMap().entrySet()) { + for (Map.Entry> entry : req.getDeleteByIdMap().entrySet()) { final String id = entry.getKey(); // quick sanity checks... assertNotNull(id + " has null values", entry.getValue()); final Object route = entry.getValue().get(ShardParams._ROUTE_); assertNotNull(id + " has null route value", route); - assertEquals("route value is wrong for id: " + id, RVAL_PRE + id.substring(id.length() - 1), route.toString()); + assertEquals( + "route value is wrong for id: " + id, + RVAL_PRE + id.substring(id.length() - 1), + route.toString()); actualDelRoutes.put(id, shard); } } - // look at the routes computed from the "adds" as the expected value for the routes of each "del" + // look at the routes computed from the "adds" as the expected value for the routes of each + // "del" for (SolrInputDocument doc : add100Docs().getDocuments()) { final String id = doc.getFieldValue("id").toString(); final String actualShard = actualDelRoutes.get(id); assertNotNull(id + " delete is missing?", actualShard); - final Slice expectedShard = docCol.getRouter().getTargetSlice(id, doc, doc.getFieldValue(ROUTE_FIELD).toString(), params(), docCol); + final Slice expectedShard = + docCol + .getRouter() + .getTargetSlice(id, doc, doc.getFieldValue(ROUTE_FIELD).toString(), params(), docCol); assertNotNull(id + " add route is null?", expectedShard); - assertEquals("Wrong shard for delete of id: " + id, - expectedShard.getName(), actualShard); + assertEquals("Wrong shard for delete of id: " + id, expectedShard.getName(), actualShard); } // sanity check no one broke our test and made it a waste of time assertEquals(100, add100Docs().getDocuments().size()); assertEquals(100, actualDelRoutes.entrySet().size()); } - } - diff --git a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java index 3324d4779df..079141cc92f 100644 --- a/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/update/DirectUpdateHandlerTest.java @@ -16,16 +16,17 @@ */ package org.apache.solr.update; +import static org.apache.solr.common.params.CommonParams.VERSION_FIELD; + +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Meter; +import com.codahale.metrics.Metric; import java.lang.invoke.MethodHandles; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; - -import com.codahale.metrics.Gauge; -import com.codahale.metrics.Meter; -import com.codahale.metrics.Metric; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.store.Directory; @@ -46,14 +47,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.common.params.CommonParams.VERSION_FIELD; - @LogLevel("org.apache.solr.update=INFO") public class DirectUpdateHandlerTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); static String savedFactory; + @BeforeClass public static void beforeClass() throws Exception { savedFactory = System.getProperty("solr.DirectoryFactory"); @@ -62,7 +62,7 @@ public static void beforeClass() throws Exception { systemSetPropertySolrTestsMergePolicyFactory(TieredMergePolicyFactory.class.getName()); initCore("solrconfig.xml", "schema12.xml"); } - + @AfterClass public static void afterClass() { systemClearPropertySolrTestsMergePolicyFactory(); @@ -84,26 +84,27 @@ public void setUp() throws Exception { @Test public void testRequireUniqueKey() throws Exception { // Add a valid document - assertU(adoc("id","1")); + assertU(adoc("id", "1")); // More than one id should fail - assertFailedU(adoc("id","2", "id","ignore_exception", "text","foo")); + assertFailedU(adoc("id", "2", "id", "ignore_exception", "text", "foo")); // No id should fail ignoreException("id"); - assertFailedU(adoc("text","foo")); + assertFailedU(adoc("text", "foo")); resetExceptionIgnores(); } - - @Test @SuppressWarnings({"unchecked"}) public void testBasics() throws Exception { // get initial metrics - Map metrics = h.getCoreContainer().getMetricManager() - .registry(h.getCore().getCoreMetricManager().getRegistryName()).getMetrics(); + Map metrics = + h.getCoreContainer() + .getMetricManager() + .registry(h.getCore().getCoreMetricManager().getRegistryName()) + .getMetrics(); String PREFIX = "UPDATE.updateHandler."; @@ -122,18 +123,18 @@ public void testBasics() throws Exception { long cumulativeDelsI = ((Meter) metrics.get(cumulativeDelsIName)).getCount(); long cumulativeDelsQ = ((Meter) metrics.get(cumulativeDelsQName)).getCount(); + assertNull( + "This test requires a schema that has no version field, " + + "it appears the schema file in use has been edited to violate " + + "this requirement", + h.getCore().getLatestSchema().getFieldOrNull(VERSION_FIELD)); - assertNull("This test requires a schema that has no version field, " + - "it appears the schema file in use has been edited to violate " + - "this requirement", - h.getCore().getLatestSchema().getFieldOrNull(VERSION_FIELD)); - - assertU(adoc("id","5")); - assertU(adoc("id","6")); + assertU(adoc("id", "5")); + assertU(adoc("id", "6")); // search - not committed - docs should not be found. - assertQ(req("q","id:5"), "//*[@numFound='0']"); - assertQ(req("q","id:6"), "//*[@numFound='0']"); + assertQ(req("q", "id:5"), "//*[@numFound='0']"); + assertQ(req("q", "id:6"), "//*[@numFound='0']"); long newAdds = ((Gauge) metrics.get(addsName)).getValue().longValue(); long newCumulativeAdds = ((Meter) metrics.get(cumulativeAddsName)).getCount(); @@ -153,8 +154,8 @@ public void testBasics() throws Exception { assertEquals("new cumulative adds after commit", 2, newCumulativeAdds - cumulativeAdds); // now they should be there - assertQ(req("q","id:5"), "//*[@numFound='1']"); - assertQ(req("q","id:6"), "//*[@numFound='1']"); + assertQ(req("q", "id:5"), "//*[@numFound='1']"); + assertQ(req("q", "id:6"), "//*[@numFound='1']"); // now delete one assertU(delI("5")); @@ -165,7 +166,7 @@ public void testBasics() throws Exception { assertEquals("new cumulative delsI", 1, newCumulativeDelsI - cumulativeDelsI); // not committed yet - assertQ(req("q","id:5"), "//*[@numFound='1']"); + assertQ(req("q", "id:5"), "//*[@numFound='1']"); assertU(commit()); // delsI should be reset to 0 after commit @@ -175,8 +176,8 @@ public void testBasics() throws Exception { assertEquals("new cumulative delsI after commit", 1, newCumulativeDelsI - cumulativeDelsI); // 5 should be gone - assertQ(req("q","id:5"), "//*[@numFound='0']"); - assertQ(req("q","id:6"), "//*[@numFound='1']"); + assertQ(req("q", "id:5"), "//*[@numFound='0']"); + assertQ(req("q", "id:6"), "//*[@numFound='1']"); // now delete all assertU(delQ("*:*")); @@ -187,7 +188,7 @@ public void testBasics() throws Exception { assertEquals("new cumulative delsQ", 1, newCumulativeDelsQ - cumulativeDelsQ); // not committed yet - assertQ(req("q","id:6"), "//*[@numFound='1']"); + assertQ(req("q", "id:6"), "//*[@numFound='1']"); assertU(commit()); @@ -197,7 +198,7 @@ public void testBasics() throws Exception { assertEquals("new cumulative delsQ after commit", 1, newCumulativeDelsQ - cumulativeDelsQ); // 6 should be gone - assertQ(req("q","id:6"), "//*[@numFound='0']"); + assertQ(req("q", "id:6"), "//*[@numFound='0']"); // verify final metrics newCommits = ((Meter) metrics.get(commitsName)).getCount(); @@ -210,67 +211,67 @@ public void testBasics() throws Exception { assertEquals("new delsI", 0, newDelsI); newCumulativeDelsI = ((Meter) metrics.get(cumulativeDelsIName)).getCount(); assertEquals("new cumulative delsI", 1, newCumulativeDelsI - cumulativeDelsI); - } - @Test public void testAddRollback() throws Exception { // re-init the core deleteCore(); initCore("solrconfig.xml", "schema12.xml"); - assertU(adoc("id","A")); + assertU(adoc("id", "A")); // commit "A" SolrCore core = h.getCore(); UpdateHandler updater = core.getUpdateHandler(); - assertTrue( updater instanceof DirectUpdateHandler2 ); - DirectUpdateHandler2 duh2 = (DirectUpdateHandler2)updater; + assertTrue(updater instanceof DirectUpdateHandler2); + DirectUpdateHandler2 duh2 = (DirectUpdateHandler2) updater; SolrQueryRequest ureq = req(); CommitUpdateCommand cmtCmd = new CommitUpdateCommand(ureq, false); cmtCmd.waitSearcher = true; - assertEquals( 1, duh2.addCommands.longValue() ); - assertEquals( 1, duh2.addCommandsCumulative.getCount() ); - assertEquals( 0, duh2.commitCommands.getCount() ); + assertEquals(1, duh2.addCommands.longValue()); + assertEquals(1, duh2.addCommandsCumulative.getCount()); + assertEquals(0, duh2.commitCommands.getCount()); updater.commit(cmtCmd); - assertEquals( 0, duh2.addCommands.longValue() ); - assertEquals( 1, duh2.addCommandsCumulative.getCount() ); - assertEquals( 1, duh2.commitCommands.getCount() ); + assertEquals(0, duh2.addCommands.longValue()); + assertEquals(1, duh2.addCommandsCumulative.getCount()); + assertEquals(1, duh2.commitCommands.getCount()); ureq.close(); - assertU(adoc("id","B")); + assertU(adoc("id", "B")); // rollback "B" ureq = req(); RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand(ureq); - assertEquals( 1, duh2.addCommands.longValue() ); - assertEquals( 2, duh2.addCommandsCumulative.getCount() ); - assertEquals( 0, duh2.rollbackCommands.getCount() ); + assertEquals(1, duh2.addCommands.longValue()); + assertEquals(2, duh2.addCommandsCumulative.getCount()); + assertEquals(0, duh2.rollbackCommands.getCount()); updater.rollback(rbkCmd); - assertEquals( 0, duh2.addCommands.longValue() ); - assertEquals( 1, duh2.addCommandsCumulative.getCount() ); - assertEquals( 1, duh2.rollbackCommands.getCount() ); + assertEquals(0, duh2.addCommands.longValue()); + assertEquals(1, duh2.addCommandsCumulative.getCount()); + assertEquals(1, duh2.rollbackCommands.getCount()); ureq.close(); - + // search - "B" should not be found. - Map args = new HashMap<>(); - args.put( CommonParams.Q, "id:A OR id:B" ); - args.put( "indent", "true" ); - SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("\"B\" should not be found.", req - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='A']" - ); + Map args = new HashMap<>(); + args.put(CommonParams.Q, "id:A OR id:B"); + args.put("indent", "true"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "\"B\" should not be found.", + req, + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='A']"); // Add a doc after the rollback to make sure we can continue to add/delete documents // after a rollback as normal - assertU(adoc("id","ZZZ")); + assertU(adoc("id", "ZZZ")); assertU(commit()); - assertQ("\"ZZZ\" must be found.", req("q", "id:ZZZ") - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='ZZZ']" - ); + assertQ( + "\"ZZZ\" must be found.", + req("q", "id:ZZZ"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='ZZZ']"); } @Test @@ -279,107 +280,111 @@ public void testDeleteRollback() throws Exception { deleteCore(); initCore("solrconfig.xml", "schema12.xml"); - assertU(adoc("id","A")); - assertU(adoc("id","B")); + assertU(adoc("id", "A")); + assertU(adoc("id", "B")); // commit "A", "B" SolrCore core = h.getCore(); UpdateHandler updater = core.getUpdateHandler(); - assertTrue( updater instanceof DirectUpdateHandler2 ); - DirectUpdateHandler2 duh2 = (DirectUpdateHandler2)updater; + assertTrue(updater instanceof DirectUpdateHandler2); + DirectUpdateHandler2 duh2 = (DirectUpdateHandler2) updater; SolrQueryRequest ureq = req(); CommitUpdateCommand cmtCmd = new CommitUpdateCommand(ureq, false); cmtCmd.waitSearcher = true; - assertEquals( 2, duh2.addCommands.longValue() ); - assertEquals( 2, duh2.addCommandsCumulative.getCount() ); - assertEquals( 0, duh2.commitCommands.getCount() ); + assertEquals(2, duh2.addCommands.longValue()); + assertEquals(2, duh2.addCommandsCumulative.getCount()); + assertEquals(0, duh2.commitCommands.getCount()); updater.commit(cmtCmd); - assertEquals( 0, duh2.addCommands.longValue() ); - assertEquals( 2, duh2.addCommandsCumulative.getCount() ); - assertEquals( 1, duh2.commitCommands.getCount() ); + assertEquals(0, duh2.addCommands.longValue()); + assertEquals(2, duh2.addCommandsCumulative.getCount()); + assertEquals(1, duh2.commitCommands.getCount()); ureq.close(); // search - "A","B" should be found. - Map args = new HashMap<>(); - args.put( CommonParams.Q, "id:A OR id:B" ); - args.put( "indent", "true" ); - SolrQueryRequest req = new LocalSolrQueryRequest( core, new MapSolrParams( args) ); - assertQ("\"A\" and \"B\" should be found.", req - ,"//*[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][.='A']" - ,"//result/doc[2]/str[@name='id'][.='B']" - ); + Map args = new HashMap<>(); + args.put(CommonParams.Q, "id:A OR id:B"); + args.put("indent", "true"); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new MapSolrParams(args)); + assertQ( + "\"A\" and \"B\" should be found.", + req, + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='A']", + "//result/doc[2]/str[@name='id'][.='B']"); // delete "B" assertU(delI("B")); // search - "A","B" should be found. - assertQ("\"A\" and \"B\" should be found.", req - ,"//*[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][.='A']" - ,"//result/doc[2]/str[@name='id'][.='B']" - ); + assertQ( + "\"A\" and \"B\" should be found.", + req, + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='A']", + "//result/doc[2]/str[@name='id'][.='B']"); // rollback "B" ureq = req(); RollbackUpdateCommand rbkCmd = new RollbackUpdateCommand(ureq); - assertEquals( 1, duh2.deleteByIdCommands.longValue() ); - assertEquals( 1, duh2.deleteByIdCommandsCumulative.getCount() ); - assertEquals( 0, duh2.rollbackCommands.getCount() ); + assertEquals(1, duh2.deleteByIdCommands.longValue()); + assertEquals(1, duh2.deleteByIdCommandsCumulative.getCount()); + assertEquals(0, duh2.rollbackCommands.getCount()); updater.rollback(rbkCmd); ureq.close(); - assertEquals( 0, duh2.deleteByIdCommands.longValue() ); - assertEquals( 0, duh2.deleteByIdCommandsCumulative.getCount() ); - assertEquals( 1, duh2.rollbackCommands.getCount() ); - + assertEquals(0, duh2.deleteByIdCommands.longValue()); + assertEquals(0, duh2.deleteByIdCommandsCumulative.getCount()); + assertEquals(1, duh2.rollbackCommands.getCount()); + // search - "B" should be found. - assertQ("\"B\" should be found.", req - ,"//*[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][.='A']" - ,"//result/doc[2]/str[@name='id'][.='B']" - ); + assertQ( + "\"B\" should be found.", + req, + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='A']", + "//result/doc[2]/str[@name='id'][.='B']"); // Add a doc after the rollback to make sure we can continue to add/delete documents // after a rollback as normal - assertU(adoc("id","ZZZ")); + assertU(adoc("id", "ZZZ")); assertU(commit()); - assertQ("\"ZZZ\" must be found.", req("q", "id:ZZZ") - ,"//*[@numFound='1']" - ,"//result/doc[1]/str[@name='id'][.='ZZZ']" - ); + assertQ( + "\"ZZZ\" must be found.", + req("q", "id:ZZZ"), + "//*[@numFound='1']", + "//result/doc[1]/str[@name='id'][.='ZZZ']"); } @Test public void testExpungeDeletes() throws Exception { - assertU(adoc("id","1")); - assertU(adoc("id","2")); + assertU(adoc("id", "1")); + assertU(adoc("id", "2")); assertU(commit()); - assertU(adoc("id","3")); - assertU(adoc("id","2")); // dup, triggers delete - assertU(adoc("id","4")); + assertU(adoc("id", "3")); + assertU(adoc("id", "2")); // dup, triggers delete + assertU(adoc("id", "4")); assertU(commit()); - SolrQueryRequest sr = req("q","foo"); + SolrQueryRequest sr = req("q", "foo"); DirectoryReader r = sr.getSearcher().getIndexReader(); - assertTrue("maxDoc !> numDocs ... expected some deletions", - r.maxDoc() > r.numDocs()); + assertTrue("maxDoc !> numDocs ... expected some deletions", r.maxDoc() > r.numDocs()); sr.close(); - assertU(commit("expungeDeletes","true")); + assertU(commit("expungeDeletes", "true")); - sr = req("q","foo"); + sr = req("q", "foo"); r = sr.getSearcher().getIndexReader(); - assertEquals(r.maxDoc(), r.numDocs()); // no deletions - assertEquals(4,r.maxDoc()); // no dups + assertEquals(r.maxDoc(), r.numDocs()); // no deletions + assertEquals(4, r.maxDoc()); // no dups sr.close(); } - + @Test public void testPrepareCommit() throws Exception { assertU(adoc("id", "999")); - assertU(optimize("maxSegments", "1")); // make sure there's just one segment - assertU(commit()); // commit a second time to make sure index files aren't still referenced by the old searcher + assertU(optimize("maxSegments", "1")); // make sure there's just one segment + // commit a second time to make sure index files aren't still referenced by the old searcher + assertU(commit()); SolrQueryRequest sr = req(); DirectoryReader r = sr.getSearcher().getIndexReader(); @@ -390,9 +395,13 @@ public void testPrepareCommit() throws Exception { } assertU(adoc("id", "1")); - assertFalse(Arrays.stream(d.listAll()).anyMatch(s -> s.startsWith(IndexFileNames.PENDING_SEGMENTS))); + assertFalse( + Arrays.stream(d.listAll()).anyMatch(s -> s.startsWith(IndexFileNames.PENDING_SEGMENTS))); String beforeSegmentsFile = - Arrays.stream(d.listAll()).filter(s -> s.startsWith(IndexFileNames.SEGMENTS)).findAny().get(); + Arrays.stream(d.listAll()) + .filter(s -> s.startsWith(IndexFileNames.SEGMENTS)) + .findAny() + .get(); if (log.isInfoEnabled()) { log.info("FILES before prepareCommit={}", Arrays.asList(d.listAll())); @@ -403,33 +412,30 @@ public void testPrepareCommit() throws Exception { if (log.isInfoEnabled()) { log.info("FILES after prepareCommit={}", Arrays.asList(d.listAll())); } - assertTrue(Arrays.stream(d.listAll()).anyMatch(s -> s.startsWith(IndexFileNames.PENDING_SEGMENTS))); - assertEquals(beforeSegmentsFile, - Arrays.stream(d.listAll()).filter(s -> s.startsWith(IndexFileNames.SEGMENTS)).findAny().get()); - - assertJQ(req("q", "id:1") - , "/response/numFound==0" - ); - - updateJ("", params("rollback","true")); + assertTrue( + Arrays.stream(d.listAll()).anyMatch(s -> s.startsWith(IndexFileNames.PENDING_SEGMENTS))); + assertEquals( + beforeSegmentsFile, + Arrays.stream(d.listAll()) + .filter(s -> s.startsWith(IndexFileNames.SEGMENTS)) + .findAny() + .get()); + + assertJQ(req("q", "id:1"), "/response/numFound==0"); + + updateJ("", params("rollback", "true")); assertU(commit()); - assertJQ(req("q", "id:1") - , "/response/numFound==0" - ); + assertJQ(req("q", "id:1"), "/response/numFound==0"); - assertU(adoc("id","1")); - updateJ("", params("prepareCommit","true")); + assertU(adoc("id", "1")); + updateJ("", params("prepareCommit", "true")); - assertJQ(req("q", "id:1") - , "/response/numFound==0" - ); + assertJQ(req("q", "id:1"), "/response/numFound==0"); assertU(commit()); - assertJQ(req("q", "id:1") - , "/response/numFound==1" - ); + assertJQ(req("q", "id:1"), "/response/numFound==1"); sr.close(); } @@ -446,7 +452,9 @@ public void testPostSoftCommitEvents() throws Exception { assertU(commit("softCommit", "true")); assertEquals("newSearcher was called more than once", 1, listener.newSearcherCount.get()); assertFalse("postSoftCommit was not called", listener.postSoftCommitAt.get() == Long.MAX_VALUE); - assertTrue("newSearcher was called after postSoftCommitCallback", listener.postSoftCommitAt.get() >= listener.newSearcherOpenedAt.get()); + assertTrue( + "newSearcher was called after postSoftCommitCallback", + listener.postSoftCommitAt.get() >= listener.newSearcherOpenedAt.get()); } static class MySolrEventListener implements SolrEventListener { @@ -455,8 +463,7 @@ static class MySolrEventListener implements SolrEventListener { AtomicLong postSoftCommitAt = new AtomicLong(Long.MAX_VALUE); @Override - public void postCommit() { - } + public void postCommit() {} @Override public void postSoftCommit() { diff --git a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java index 534d213537a..0c747c7b6ef 100644 --- a/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java +++ b/solr/core/src/test/org/apache/solr/update/DocumentBuilderTest.java @@ -16,13 +16,14 @@ */ package org.apache.solr.update; +import static org.hamcrest.core.Is.is; + +import com.carrotsearch.randomizedtesting.generators.RandomStrings; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - -import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnVectorField; import org.apache.lucene.index.IndexableField; @@ -43,12 +44,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.Is.is; - -/** - * - * - */ +/** */ public class DocumentBuilderTest extends SolrTestCaseJ4 { static final int save_min_len = DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST; @@ -72,44 +68,49 @@ public void testBuildDocument() throws Exception { SolrCore core = h.getCore(); // undefined field SolrInputDocument doc = new SolrInputDocument(); - doc.setField( "unknown field", 12345 ); + doc.setField("unknown field", 12345); - SolrException ex = expectThrows(SolrException.class, () -> DocumentBuilder.toDocument( doc, core.getLatestSchema() )); + SolrException ex = + expectThrows( + SolrException.class, () -> DocumentBuilder.toDocument(doc, core.getLatestSchema())); assertEquals("should be bad request", 400, ex.code()); } @Test public void testNullField() { SolrCore core = h.getCore(); - + // make sure a null value is not indexed SolrInputDocument doc = new SolrInputDocument(); - doc.addField( "name", null ); - Document out = DocumentBuilder.toDocument( doc, core.getLatestSchema() ); - assertNull( out.get( "name" ) ); + doc.addField("name", null); + Document out = DocumentBuilder.toDocument(doc, core.getLatestSchema()); + assertNull(out.get("name")); } @Test public void testExceptions() { SolrCore core = h.getCore(); - + // make sure a null value is not indexed SolrInputDocument doc = new SolrInputDocument(); - doc.addField( "id", "123" ); - doc.addField( "unknown", "something" ); - Exception ex = expectThrows(Exception.class, () -> DocumentBuilder.toDocument( doc, core.getLatestSchema() )); - assertTrue( "should have document ID", ex.getMessage().indexOf( "doc=123" ) > 0 ); - doc.remove( "unknown" ); - - - doc.addField( "weight", "not a number" ); - ex = expectThrows(Exception.class, () -> DocumentBuilder.toDocument( doc, core.getLatestSchema())); - assertTrue( "should have document ID", ex.getMessage().indexOf( "doc=123" ) > 0 ); - assertTrue( "cause is number format", ex.getCause() instanceof NumberFormatException ); + doc.addField("id", "123"); + doc.addField("unknown", "something"); + Exception ex = + expectThrows( + Exception.class, () -> DocumentBuilder.toDocument(doc, core.getLatestSchema())); + assertTrue("should have document ID", ex.getMessage().indexOf("doc=123") > 0); + doc.remove("unknown"); + + doc.addField("weight", "not a number"); + ex = + expectThrows( + Exception.class, () -> DocumentBuilder.toDocument(doc, core.getLatestSchema())); + assertTrue("should have document ID", ex.getMessage().indexOf("doc=123") > 0); + assertTrue("cause is number format", ex.getCause() instanceof NumberFormatException); // now make sure it is OK - doc.setField( "weight", "1.34" ); - DocumentBuilder.toDocument( doc, core.getLatestSchema() ); + doc.setField("weight", "1.34"); + DocumentBuilder.toDocument(doc, core.getLatestSchema()); } @Test @@ -118,34 +119,32 @@ public void testMultiField() throws Exception { // make sure a null value is not indexed SolrInputDocument doc = new SolrInputDocument(); - doc.addField( "home", "2.2,3.3" ); - Document out = DocumentBuilder.toDocument( doc, core.getLatestSchema() ); - assertNotNull( out.get( "home" ) );//contains the stored value and term vector, if there is one - assertNotNull( out.getField( "home_0" + FieldType.POLY_FIELD_SEPARATOR + "double") ); - assertNotNull( out.getField( "home_1" + FieldType.POLY_FIELD_SEPARATOR + "double") ); + doc.addField("home", "2.2,3.3"); + Document out = DocumentBuilder.toDocument(doc, core.getLatestSchema()); + assertNotNull(out.get("home")); // contains the stored value and term vector, if there is one + assertNotNull(out.getField("home_0" + FieldType.POLY_FIELD_SEPARATOR + "double")); + assertNotNull(out.getField("home_1" + FieldType.POLY_FIELD_SEPARATOR + "double")); } - - /** - * Even though boosts have been removed, we still support them for bw compat. - */ + + /** Even though boosts have been removed, we still support them for bw compat. */ public void testBoost() throws Exception { XmlDoc xml = new XmlDoc(); - xml.xml = "" - + "0" - + "mytitle" - + ""; + xml.xml = + "" + + "0" + + "mytitle" + + ""; assertNull(h.validateUpdate(add(xml, new String[0]))); } - - /** - * It's ok to supply a document boost even if a field omits norms - */ + + /** It's ok to supply a document boost even if a field omits norms */ public void testDocumentBoostOmitNorms() throws Exception { XmlDoc xml = new XmlDoc(); - xml.xml = "" - + "2" - + "mytitle" - + ""; + xml.xml = + "" + + "2" + + "mytitle" + + ""; assertNull(h.validateUpdate(add(xml, new String[0]))); } @@ -182,7 +181,6 @@ public void testSolrDocumentEquals() { doc2.addField("bar", randomInt); assertFalse(compareSolrDocument(doc1, doc2)); - } public void testSolrInputDocumentEquality() { @@ -196,7 +194,6 @@ public void testSolrInputDocumentEquality() { assertTrue(compareSolrInputDocument(doc1, doc2)); - doc1 = new SolrInputDocument(); doc1.addField("foo", randomString); doc2 = new SolrInputDocument(); @@ -215,7 +212,6 @@ public void testSolrInputDocumentEquality() { childDoc.addField(TestUtil.randomSimpleString(random()), TestUtil.randomSimpleString(random())); doc2.addChildDocument(childDoc1); assertFalse(compareSolrInputDocument(doc1, doc2)); - } public void testSolrInputFieldEquality() { @@ -231,40 +227,52 @@ public void testSolrInputFieldEquality() { sif2.setName("foo"); assertFalse(assertSolrInputFieldEquals(sif1, sif2)); - } public void testMoveLargestLast() { SolrInputDocument inDoc = new SolrInputDocument(); - String TEXT_FLD = "text"; // not stored. It won't be moved. This value is the longest, however. - inDoc.addField(TEXT_FLD, - "NOT STORED|" + RandomStrings.randomAsciiOfLength(random(), 4 * DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); + // not stored. It won't be moved. This value is the longest, however. + String TEXT_FLD = "text"; + inDoc.addField( + TEXT_FLD, + "NOT STORED|" + + RandomStrings.randomAsciiOfLength( + random(), 4 * DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); String CAT_FLD = "cat"; // stored, multiValued - inDoc.addField(CAT_FLD, - "STORED V1|"); + inDoc.addField(CAT_FLD, "STORED V1|"); // pretty long value - inDoc.addField(CAT_FLD, - "STORED V2|" + RandomStrings.randomAsciiOfLength(random(), 2 * DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); - inDoc.addField(CAT_FLD, - "STORED V3|" + RandomStrings.randomAsciiOfLength(random(), DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); + inDoc.addField( + CAT_FLD, + "STORED V2|" + + RandomStrings.randomAsciiOfLength( + random(), 2 * DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); + inDoc.addField( + CAT_FLD, + "STORED V3|" + + RandomStrings.randomAsciiOfLength(random(), DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); String SUBJECT_FLD = "subject"; // stored. This value is long, but not long enough. - inDoc.addField(SUBJECT_FLD, - "2ndplace|" + RandomStrings.randomAsciiOfLength(random(), DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); + inDoc.addField( + SUBJECT_FLD, + "2ndplace|" + + RandomStrings.randomAsciiOfLength(random(), DocumentBuilder.MIN_LENGTH_TO_MOVE_LAST)); Document outDoc = DocumentBuilder.toDocument(inDoc, h.getCore().getLatestSchema()); // filter outDoc by stored fields; convert to list. - List storedFields = StreamSupport.stream(outDoc.spliterator(), false) - .filter(f -> f.fieldType().stored()).collect(Collectors.toList()); + List storedFields = + StreamSupport.stream(outDoc.spliterator(), false) + .filter(f -> f.fieldType().stored()) + .collect(Collectors.toList()); // clip to last 3. We expect these to be for CAT_FLD storedFields = storedFields.subList(storedFields.size() - 3, storedFields.size()); Iterator fieldIterator = storedFields.iterator(); IndexableField field; - // Test that we retained the particular value ordering, even though though the 2nd of three was longest + // Test that we retained the particular value ordering, even though though the 2nd of three was + // longest assertTrue(fieldIterator.hasNext()); field = fieldIterator.next(); @@ -289,17 +297,17 @@ public void testCopyFieldMaxChars() { String testValue = "this is more than 10 characters"; String truncatedValue = "this is mo"; - //maxChars with a string value + // maxChars with a string value SolrInputDocument doc = new SolrInputDocument(); - doc.addField( "title", testValue); + doc.addField("title", testValue); Document out = DocumentBuilder.toDocument(doc, core.getLatestSchema()); assertEquals(testValue, out.get("title")); assertEquals(truncatedValue, out.get("max_chars")); - //maxChars with a ByteArrayUtf8CharSequence + // maxChars with a ByteArrayUtf8CharSequence doc = new SolrInputDocument(); - doc.addField( "title", new ByteArrayUtf8CharSequence(testValue)); + doc.addField("title", new ByteArrayUtf8CharSequence(testValue)); out = DocumentBuilder.toDocument(doc, core.getLatestSchema()); assertEquals(testValue, out.get("title")); @@ -313,16 +321,22 @@ public void denseVector_shouldReturnOneIndexableFieldAndOneStoredFieldPerVectorE SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "0"); doc.addField("vector", Arrays.asList(1.1f, 2.1f, 3.1f, 4.1f)); - + Document out = DocumentBuilder.toDocument(doc, core.getLatestSchema()); - - //from /solr/core/src/test-files/solr/collection1/conf/schema.xml - KnnVectorField expectedIndexableField = new KnnVectorField("vector", new float[]{1.1f, 2.1f, 3.1f, 4.1f}, VectorSimilarityFunction.COSINE); - - MatcherAssert.assertThat(((KnnVectorField)out.getField("vector")).vectorValue(), is(expectedIndexableField.vectorValue())); - - List storedFields = StreamSupport.stream(out.spliterator(), false) - .filter(f -> (f.fieldType().stored() && f.name().equals("vector"))).collect(Collectors.toList()); + + // from /solr/core/src/test-files/solr/collection1/conf/schema.xml + KnnVectorField expectedIndexableField = + new KnnVectorField( + "vector", new float[] {1.1f, 2.1f, 3.1f, 4.1f}, VectorSimilarityFunction.COSINE); + + MatcherAssert.assertThat( + ((KnnVectorField) out.getField("vector")).vectorValue(), + is(expectedIndexableField.vectorValue())); + + List storedFields = + StreamSupport.stream(out.spliterator(), false) + .filter(f -> (f.fieldType().stored() && f.name().equals("vector"))) + .collect(Collectors.toList()); MatcherAssert.assertThat(storedFields.size(), is(4)); MatcherAssert.assertThat(storedFields.get(0).numericValue(), is(1.1f)); @@ -341,13 +355,19 @@ public void denseVector_shouldWorkWithCopyFields() { Document out = DocumentBuilder.toDocument(doc, core.getLatestSchema()); - //from /solr/core/src/test-files/solr/collection1/conf/schema.xml - KnnVectorField exectedDestination = new KnnVectorField("vector2", new float[]{1.1f, 2.1f, 3.1f, 4.1f}, VectorSimilarityFunction.DOT_PRODUCT); + // from /solr/core/src/test-files/solr/collection1/conf/schema.xml + KnnVectorField exectedDestination = + new KnnVectorField( + "vector2", new float[] {1.1f, 2.1f, 3.1f, 4.1f}, VectorSimilarityFunction.DOT_PRODUCT); - MatcherAssert.assertThat(((KnnVectorField)out.getField("vector2")).vectorValue(), is(exectedDestination.vectorValue())); + MatcherAssert.assertThat( + ((KnnVectorField) out.getField("vector2")).vectorValue(), + is(exectedDestination.vectorValue())); - List storedFields = StreamSupport.stream(out.spliterator(), false) - .filter(f -> (f.fieldType().stored() && f.name().equals("vector2"))).collect(Collectors.toList()); + List storedFields = + StreamSupport.stream(out.spliterator(), false) + .filter(f -> (f.fieldType().stored() && f.name().equals("vector2"))) + .collect(Collectors.toList()); MatcherAssert.assertThat(storedFields.size(), is(4)); MatcherAssert.assertThat(storedFields.get(0).numericValue(), is(1.1f)); @@ -363,11 +383,18 @@ public void denseVector_incorrectCopyFieldDestinationType_shouldThrowException() SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "0"); doc.addField("vector3", Arrays.asList(1.1f, 2.1f, 3.1f, 4.1f)); - - RuntimeException thrown = Assert.assertThrows("Incorrect destination field type should raise exception", SolrException.class, () -> { - DocumentBuilder.toDocument(doc, core.getLatestSchema()); - }); - MatcherAssert.assertThat(thrown.getMessage(), is("ERROR: [doc=0] Error adding field 'vector3'='[1.1, 2.1, 3.1, 4.1]' msg=The copy field destination must be a DenseVectorField: vector_f_p")); + + RuntimeException thrown = + Assert.assertThrows( + "Incorrect destination field type should raise exception", + SolrException.class, + () -> { + DocumentBuilder.toDocument(doc, core.getLatestSchema()); + }); + MatcherAssert.assertThat( + thrown.getMessage(), + is( + "ERROR: [doc=0] Error adding field 'vector3'='[1.1, 2.1, 3.1, 4.1]' msg=The copy field destination must be a DenseVectorField: vector_f_p")); } @Test @@ -378,11 +405,20 @@ public void denseVector_incorrectCopyFieldDestinationDimension_shouldThrowExcept doc.addField("id", "0"); doc.addField("vector4", Arrays.asList(1.1f, 2.1f, 3.1f, 4.1f)); - RuntimeException thrown = Assert.assertThrows("Incorrect destination dimension should raise exception", SolrException.class, () -> { - DocumentBuilder.toDocument(doc, core.getLatestSchema()); - }); - MatcherAssert.assertThat(thrown.getMessage(), is("ERROR: [doc=0] Error adding field 'vector4'='[1.1, 2.1, 3.1, 4.1]' msg=Error while creating field 'vector5{type=knn_vector5,properties=indexed,stored}' from value '[1.1, 2.1, 3.1, 4.1]', expected format:'[f1, f2, f3...fn]' e.g. [1.0, 3.4, 5.6]")); - MatcherAssert.assertThat(thrown.getCause().getCause().getMessage(), is("incorrect vector dimension. The vector value has size 4 while it is expected a vector with size 5")); + RuntimeException thrown = + Assert.assertThrows( + "Incorrect destination dimension should raise exception", + SolrException.class, + () -> { + DocumentBuilder.toDocument(doc, core.getLatestSchema()); + }); + MatcherAssert.assertThat( + thrown.getMessage(), + is( + "ERROR: [doc=0] Error adding field 'vector4'='[1.1, 2.1, 3.1, 4.1]' msg=Error while creating field 'vector5{type=knn_vector5,properties=indexed,stored}' from value '[1.1, 2.1, 3.1, 4.1]', expected format:'[f1, f2, f3...fn]' e.g. [1.0, 3.4, 5.6]")); + MatcherAssert.assertThat( + thrown.getCause().getCause().getMessage(), + is( + "incorrect vector dimension. The vector value has size 4 while it is expected a vector with size 5")); } - } diff --git a/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java b/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java index 2f7dedac8f6..224f756b23e 100644 --- a/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java +++ b/solr/core/src/test/org/apache/solr/update/MaxSizeAutoCommitTest.java @@ -18,7 +18,6 @@ package org.apache.solr.update; import java.lang.invoke.MethodHandles; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -26,23 +25,20 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.function.Function; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.util.ContentStream; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrEventListener; -import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.handler.UpdateRequestHandler; import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.response.SolrQueryResponse; - +import org.apache.solr.search.SolrIndexSearcher; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,7 +57,7 @@ private static String delDoc(int id) { private static final int COMMIT_CHECKING_SLEEP_TIME_MS = 50; // max TLOG file size private static final int MAX_FILE_SIZE = 1000; - + private SolrCore core; private DirectUpdateHandler2 updateHandler; private CommitTracker hardCommitTracker; @@ -83,7 +79,7 @@ public void setup() throws Exception { updateHandler.softCommitTracker.setTimeUpperBound(-1); updateHandler.softCommitTracker.setDocsUpperBound(-1); updateHandler.softCommitTracker.setTLogFileSizeUpperBound(-1); - + hardCommitTracker = updateHandler.commitTracker; // Only testing file-size based auto hard commits - disable other checks hardCommitTracker.setTimeUpperBound(-1); @@ -92,9 +88,9 @@ public void setup() throws Exception { monitor = new MockEventListener(); updateHandler.registerCommitCallback(monitor); - + updateRequestHandler = new UpdateRequestHandler(); - updateRequestHandler.init( null ); + updateRequestHandler.init(null); } @After @@ -111,83 +107,111 @@ public void tearDown() throws Exception { @Test public void testAdds() throws Exception { - Assert.assertEquals("There have been no updates yet, so there shouldn't have been any commits", 0, - hardCommitTracker.getCommitCount()); + Assert.assertEquals( + "There have been no updates yet, so there shouldn't have been any commits", + 0, + hardCommitTracker.getCommitCount()); long tlogSizePreUpdates = updateHandler.getUpdateLog().getCurrentLogSizeFromStream(); - Assert.assertEquals("There have been no updates yet, so tlog should be empty", 0, tlogSizePreUpdates); + Assert.assertEquals( + "There have been no updates yet, so tlog should be empty", 0, tlogSizePreUpdates); // Add a large number of docs - should trigger a commit int numDocsToAdd = 500; SolrQueryResponse updateResp = new SolrQueryResponse(); - - monitor.doStuffAndExpectAtLeastOneCommit(hardCommitTracker, updateHandler, () -> { - updateRequestHandler.handleRequest(constructBatchAddDocRequest(0, numDocsToAdd), updateResp); - }); + + monitor.doStuffAndExpectAtLeastOneCommit( + hardCommitTracker, + updateHandler, + () -> { + updateRequestHandler.handleRequest( + constructBatchAddDocRequest(0, numDocsToAdd), updateResp); + }); } @Test public void testRedundantDeletes() throws Exception { - Assert.assertEquals("There have been no updates yet, so there shouldn't have been any commits", 0, - hardCommitTracker.getCommitCount()); + Assert.assertEquals( + "There have been no updates yet, so there shouldn't have been any commits", + 0, + hardCommitTracker.getCommitCount()); long tlogSizePreUpdates = updateHandler.getUpdateLog().getCurrentLogSizeFromStream(); - Assert.assertEquals("There have been no updates yet, so tlog should be empty", 0, tlogSizePreUpdates); - + Assert.assertEquals( + "There have been no updates yet, so tlog should be empty", 0, tlogSizePreUpdates); + // Add docs int numDocsToAdd = 150; SolrQueryResponse updateResp = new SolrQueryResponse(); - monitor.doStuffAndExpectAtLeastOneCommit(hardCommitTracker, updateHandler, () -> { - updateRequestHandler.handleRequest(constructBatchAddDocRequest(0, numDocsToAdd), updateResp); - }); - + monitor.doStuffAndExpectAtLeastOneCommit( + hardCommitTracker, + updateHandler, + () -> { + updateRequestHandler.handleRequest( + constructBatchAddDocRequest(0, numDocsToAdd), updateResp); + }); // Send a bunch of redundant deletes int numDeletesToSend = 500; int docIdToDelete = 100; - SolrQueryRequestBase batchSingleDeleteRequest = new SolrQueryRequestBase(core, new MapSolrParams(new HashMap<>())) {}; + SolrQueryRequestBase batchSingleDeleteRequest = + new SolrQueryRequestBase(core, new MapSolrParams(new HashMap<>())) {}; List docs = new ArrayList<>(); for (int i = 0; i < numDeletesToSend; i++) { docs.add(delI(Integer.toString(docIdToDelete))); } batchSingleDeleteRequest.setContentStreams(toContentStreams(docs)); - - monitor.doStuffAndExpectAtLeastOneCommit(hardCommitTracker, updateHandler, () -> { - updateRequestHandler.handleRequest(batchSingleDeleteRequest, updateResp); - }); - + + monitor.doStuffAndExpectAtLeastOneCommit( + hardCommitTracker, + updateHandler, + () -> { + updateRequestHandler.handleRequest(batchSingleDeleteRequest, updateResp); + }); } @Test public void testDeletes() throws Exception { - Assert.assertEquals("There have been no updates yet, so there shouldn't have been any commits", 0, - hardCommitTracker.getCommitCount()); + Assert.assertEquals( + "There have been no updates yet, so there shouldn't have been any commits", + 0, + hardCommitTracker.getCommitCount()); long tlogSizePreUpdates = updateHandler.getUpdateLog().getCurrentLogSizeFromStream(); - Assert.assertEquals("There have been no updates yet, so tlog should be empty", 0, tlogSizePreUpdates); - + Assert.assertEquals( + "There have been no updates yet, so tlog should be empty", 0, tlogSizePreUpdates); + // Add docs int numDocsToAdd = 500; SolrQueryResponse updateResp = new SolrQueryResponse(); - - monitor.doStuffAndExpectAtLeastOneCommit(hardCommitTracker, updateHandler, () -> { - updateRequestHandler.handleRequest(constructBatchAddDocRequest(0, numDocsToAdd), updateResp); - }); - + + monitor.doStuffAndExpectAtLeastOneCommit( + hardCommitTracker, + updateHandler, + () -> { + updateRequestHandler.handleRequest( + constructBatchAddDocRequest(0, numDocsToAdd), updateResp); + }); + // Delete all documents - should trigger a commit - - monitor.doStuffAndExpectAtLeastOneCommit(hardCommitTracker, updateHandler, () -> { - updateRequestHandler.handleRequest(constructBatchDeleteDocRequest(0, numDocsToAdd), updateResp); - }); - + + monitor.doStuffAndExpectAtLeastOneCommit( + hardCommitTracker, + updateHandler, + () -> { + updateRequestHandler.handleRequest( + constructBatchDeleteDocRequest(0, numDocsToAdd), updateResp); + }); } /** - * Construct a batch add document request with a series of very simple Solr docs with increasing IDs. + * Construct a batch add document request with a series of very simple Solr docs with increasing + * IDs. + * * @param startId the document ID to begin with * @param batchSize the number of documents to include in the batch * @return a SolrQueryRequestBase @@ -198,6 +222,7 @@ private SolrQueryRequestBase constructBatchAddDocRequest(int startId, int batchS /** * Construct a batch delete document request, with IDs incrementing from startId + * * @param startId the document ID to begin with * @param batchSize the number of documents to include in the batch * @return a SolrQueryRequestBase @@ -208,13 +233,17 @@ private SolrQueryRequestBase constructBatchDeleteDocRequest(int startId, int bat /** * Helper for constructing a batch update request + * * @param startId the document ID to begin with * @param batchSize the number of documents to include in the batch - * @param requestFn a function that takes an (int) ID and returns an XML string of the request to add to the batch request + * @param requestFn a function that takes an (int) ID and returns an XML string of the request to + * add to the batch request * @return a SolrQueryRequestBase */ - private SolrQueryRequestBase constructBatchRequestHelper(int startId, int batchSize, Function requestFn) { - SolrQueryRequestBase updateReq = new SolrQueryRequestBase(core, new MapSolrParams(new HashMap<>())) {}; + private SolrQueryRequestBase constructBatchRequestHelper( + int startId, int batchSize, Function requestFn) { + SolrQueryRequestBase updateReq = + new SolrQueryRequestBase(core, new MapSolrParams(new HashMap<>())) {}; List docs = new ArrayList<>(); for (int i = startId; i < startId + batchSize; i++) { docs.add(requestFn.apply(i)); @@ -225,6 +254,7 @@ private SolrQueryRequestBase constructBatchRequestHelper(int startId, int batchS /** * Convert the given list of strings into a list of streams, for Solr update requests + * * @param strs strings to convert into streams * @return list of streams */ @@ -238,14 +268,14 @@ private List toContentStreams(List strs) { private static final class MockEventListener implements SolrEventListener { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + public MockEventListener() { /* No-Op */ } - - // use capacity bound Queue just so we're sure we don't OOM + + // use capacity bound Queue just so we're sure we don't OOM public final BlockingQueue hard = new LinkedBlockingQueue<>(1000); - + // if non enpty, then at least one offer failed (queues full) private StringBuffer fail = new StringBuffer(); @@ -253,72 +283,76 @@ public MockEventListener() { public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { // No-Op } - + @Override public void postCommit() { Long now = System.nanoTime(); if (!hard.offer(now)) fail.append(", hardCommit @ " + now); } - + @Override public void postSoftCommit() { // No-Op } - + public void clear() { hard.clear(); fail.setLength(0); } - public void doStuffAndExpectAtLeastOneCommit(final CommitTracker commitTracker, - final DirectUpdateHandler2 updateHandler, - final Runnable stuff) throws InterruptedException { + public void doStuffAndExpectAtLeastOneCommit( + final CommitTracker commitTracker, + final DirectUpdateHandler2 updateHandler, + final Runnable stuff) + throws InterruptedException { assertSaneOffers(); - + final int POLL_TIME = 5; final TimeUnit POLL_UNIT = TimeUnit.SECONDS; - + final int preAutoCommitCount = commitTracker.getCommitCount(); log.info("Auto-Commit count prior to doing work: {}", preAutoCommitCount); stuff.run(); log.info("Work Completed"); - + int numIters = 0; Long lastPostCommitTimeStampSeen = null; final long startTimeNanos = System.nanoTime(); final long cutOffTime = startTimeNanos + TimeUnit.SECONDS.toNanos(300); while (System.nanoTime() < cutOffTime) { numIters++; - log.info("Polling at most {} {} for expected (post-)commit#{}", POLL_TIME, POLL_UNIT, numIters); + log.info( + "Polling at most {} {} for expected (post-)commit#{}", POLL_TIME, POLL_UNIT, numIters); lastPostCommitTimeStampSeen = hard.poll(POLL_TIME, POLL_UNIT); - assertNotNull("(post-)commit#" + numIters + " didn't occur in allowed time frame", - lastPostCommitTimeStampSeen); + assertNotNull( + "(post-)commit#" + numIters + " didn't occur in allowed time frame", + lastPostCommitTimeStampSeen); synchronized (commitTracker) { final int currentAutoCommitCount = commitTracker.getCommitCount() - preAutoCommitCount; final long currentFileSize = updateHandler.getUpdateLog().getCurrentLogSizeFromStream(); - if ((currentFileSize < MAX_FILE_SIZE) && - (currentAutoCommitCount == numIters) && - ( ! commitTracker.hasPending() )) { + if ((currentFileSize < MAX_FILE_SIZE) + && (currentAutoCommitCount == numIters) + && (!commitTracker.hasPending())) { // if all of these condiions are met, then we should be completely done assertSaneOffers(); // last minute sanity check return; } // else: log & loop... - log.info("(Auto-)commits triggered: {}; (post-)commits seen: {}; current tlog file size: {}", - currentAutoCommitCount, numIters, currentFileSize); + log.info( + "(Auto-)commits triggered: {}; (post-)commits seen: {}; current tlog file size: {}", + currentAutoCommitCount, + numIters, + currentFileSize); } } - + // if we didn't return already, then we ran out of time fail("Exhausted cut off time polling for post-commit events (got " + numIters + ")"); } - + public void assertSaneOffers() { - assertEquals("Failure of MockEventListener" + fail.toString(), - 0, fail.length()); + assertEquals("Failure of MockEventListener" + fail.toString(), 0, fail.length()); } - } - + } } - diff --git a/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java b/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java index bfa62218a09..a95e68cbdfe 100644 --- a/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java +++ b/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java @@ -27,21 +27,25 @@ import org.apache.solr.common.util.NamedList; public class MockStreamingSolrClients extends StreamingSolrClients { - - public enum Exp {CONNECT_EXCEPTION, SOCKET_EXCEPTION, BAD_REQUEST}; - + + public enum Exp { + CONNECT_EXCEPTION, + SOCKET_EXCEPTION, + BAD_REQUEST + }; + private volatile Exp exp = null; - + public MockStreamingSolrClients(UpdateShardHandler updateShardHandler) { super(updateShardHandler); } - + @Override public synchronized SolrClient getSolrClient(final SolrCmdDistributor.Req req) { SolrClient client = super.getSolrClient(req); return new MockSolrClient(client); } - + public void setExp(Exp exp) { this.exp = exp; } @@ -67,30 +71,30 @@ class MockSolrClient extends SolrClient { public MockSolrClient(SolrClient solrClient) { this.solrClient = solrClient; } - + @Override - public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, String collection) + public NamedList request( + @SuppressWarnings({"rawtypes"}) SolrRequest request, String collection) throws SolrServerException, IOException { if (exp != null) { Exception e = exception(); if (e instanceof IOException) { if (LuceneTestCase.random().nextBoolean()) { - throw (IOException)e; + throw (IOException) e; } else { throw new SolrServerException(e); } } else if (e instanceof SolrServerException) { - throw (SolrServerException)e; + throw (SolrServerException) e; } else { throw new SolrServerException(e); } } - + return solrClient.request(request); } @Override public void close() {} - } } diff --git a/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java b/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java index 200de4bb08c..8a65ef86c9e 100644 --- a/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java +++ b/solr/core/src/test/org/apache/solr/update/MockingHttp2SolrClient.java @@ -22,23 +22,27 @@ import java.net.SocketException; import java.util.HashSet; import java.util.Set; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.Http2SolrClient; import org.apache.solr.client.solrj.request.UpdateRequest; -import org.apache.solr.client.solrj.util.Cancellable; import org.apache.solr.client.solrj.util.AsyncListener; +import org.apache.solr.client.solrj.util.Cancellable; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.NamedList; public class MockingHttp2SolrClient extends Http2SolrClient { - public enum Exp {CONNECT_EXCEPTION, SOCKET_EXCEPTION, BAD_REQUEST}; + public enum Exp { + CONNECT_EXCEPTION, + SOCKET_EXCEPTION, + BAD_REQUEST + }; private volatile Exp exp = null; private boolean oneExpPerReq; + @SuppressWarnings({"rawtypes"}) private Set reqGotException; @@ -68,7 +72,6 @@ Builder oneExpPerReq() { } } - public void setExp(Exp exp) { this.exp = exp; } @@ -88,21 +91,18 @@ private Exception exception() { } @Override - public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest request, - String collection) + public NamedList request( + @SuppressWarnings({"rawtypes"}) SolrRequest request, String collection) throws SolrServerException, IOException { if (request instanceof UpdateRequest) { UpdateRequest ur = (UpdateRequest) request; - if (!ur.getDeleteQuery().isEmpty()) - return super.request(request, collection); + if (!ur.getDeleteQuery().isEmpty()) return super.request(request, collection); } if (exp != null) { if (oneExpPerReq) { - if (reqGotException.contains(request)) - return super.request(request, collection); - else - reqGotException.add(request); + if (reqGotException.contains(request)) return super.request(request, collection); + else reqGotException.add(request); } Exception e = exception(); @@ -123,8 +123,8 @@ public NamedList request(@SuppressWarnings({"rawtypes"})SolrRequest requ } @Override - public Cancellable asyncRequest(SolrRequest request, - String collection, AsyncListener> asyncListener) { + public Cancellable asyncRequest( + SolrRequest request, String collection, AsyncListener> asyncListener) { if (request instanceof UpdateRequest) { UpdateRequest ur = (UpdateRequest) request; // won't throw exception if request is DBQ @@ -137,9 +137,7 @@ public Cancellable asyncRequest(SolrRequest request, if (oneExpPerReq) { if (reqGotException.contains(request)) { return super.asyncRequest(request, collection, asyncListener); - } - else - reqGotException.add(request); + } else reqGotException.add(request); } Exception e = exception(); diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java index 4d9b74447fa..3e5e03faada 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java @@ -17,6 +17,7 @@ package org.apache.solr.update; import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; +import static org.hamcrest.core.StringContains.containsString; import java.io.IOException; import java.util.Arrays; @@ -25,16 +26,15 @@ import java.util.LinkedList; import java.util.List; import java.util.Set; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.common.SolrException; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.StrUtils; import org.apache.solr.schema.IndexSchema; @@ -42,15 +42,14 @@ import org.apache.solr.update.processor.DistributedUpdateProcessor; import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase; import org.junit.Test; -import static org.hamcrest.core.StringContains.containsString; @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class PeerSyncTest extends BaseDistributedSearchTestCase { - protected static int numVersions = 100; // number of versions to use when syncing + protected static int numVersions = 100; // number of versions to use when syncing protected static final String FROM_LEADER = DistribPhase.FROMLEADER.toString(); protected static final ModifiableSolrParams seenLeader = - params(DISTRIB_UPDATE_PARAM, FROM_LEADER); - + params(DISTRIB_UPDATE_PARAM, FROM_LEADER); + public PeerSyncTest() { stress = 0; @@ -65,10 +64,14 @@ public PeerSyncTest() { throw new RuntimeException(e); } IndexSchema schema = h.getCore().getLatestSchema(); - assertTrue(schema.getFieldOrNull("_version_").hasDocValues() && !schema.getFieldOrNull("_version_").indexed() - && !schema.getFieldOrNull("_version_").stored()); - assertTrue(!schema.getFieldOrNull("val_i_dvo").indexed() && !schema.getFieldOrNull("val_i_dvo").stored() && - schema.getFieldOrNull("val_i_dvo").hasDocValues()); + assertTrue( + schema.getFieldOrNull("_version_").hasDocValues() + && !schema.getFieldOrNull("_version_").indexed() + && !schema.getFieldOrNull("_version_").stored()); + assertTrue( + !schema.getFieldOrNull("val_i_dvo").indexed() + && !schema.getFieldOrNull("val_i_dvo").stored() + && schema.getFieldOrNull("val_i_dvo").hasDocValues()); } @Test @@ -85,73 +88,98 @@ public void test() throws Exception { SolrClient client2 = clients.get(2); long v = 0; - add(client0, seenLeader, sdoc("id","1","_version_",++v)); + add(client0, seenLeader, sdoc("id", "1", "_version_", ++v)); - // this fails because client0 has no context (i.e. no updates of its own to judge if applying the updates + // this fails because client0 has no context (i.e. no updates of its own to judge if applying + // the updates // from client1 will bring it into sync with client1) assertSync(client1, numVersions, false, shardsArr[0]); // bring client1 back into sync with client0 by adding the doc - add(client1, seenLeader, sdoc("id","1","_version_",v)); + add(client1, seenLeader, sdoc("id", "1", "_version_", v)); // both have the same version list, so sync should now return true assertSync(client1, numVersions, true, shardsArr[0]); // TODO: test that updates weren't necessary - client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*"), client0, client1); + client0.commit(); + client1.commit(); + queryAndCompare(params("q", "*:*"), client0, client1); - add(client0, seenLeader, addRandFields(sdoc("id","2","_version_",++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "2", "_version_", ++v))); // now client1 has the context to sync assertSync(client1, numVersions, true, shardsArr[0]); - client0.commit(); client1.commit(); queryAndCompare(params("q", "*:*"), client0, client1); - - add(client0, seenLeader, addRandFields(sdoc("id","3","_version_",++v))); - add(client0, seenLeader, addRandFields(sdoc("id","4","_version_",++v))); - add(client0, seenLeader, addRandFields(sdoc("id","5","_version_",++v))); - add(client0, seenLeader, addRandFields(sdoc("id","6","_version_",++v))); - add(client0, seenLeader, addRandFields(sdoc("id","7","_version_",++v))); - add(client0, seenLeader, addRandFields(sdoc("id","8","_version_",++v))); - add(client0, seenLeader, addRandFields(sdoc("id","9","_version_",++v))); - add(client0, seenLeader, addRandFields(sdoc("id","10","_version_",++v))); - for (int i=0; i<10; i++) docsAdded.add(i+1); + client0.commit(); + client1.commit(); + queryAndCompare(params("q", "*:*"), client0, client1); + + add(client0, seenLeader, addRandFields(sdoc("id", "3", "_version_", ++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "4", "_version_", ++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "5", "_version_", ++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "6", "_version_", ++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "7", "_version_", ++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "8", "_version_", ++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "9", "_version_", ++v))); + add(client0, seenLeader, addRandFields(sdoc("id", "10", "_version_", ++v))); + for (int i = 0; i < 10; i++) docsAdded.add(i + 1); assertSync(client1, numVersions, true, shardsArr[0]); validateDocs(docsAdded, client0, client1); testOverlap(docsAdded, client0, client1, v); // test delete and deleteByQuery - v=1000; - SolrInputDocument doc = sdoc("id","1000","_version_",++v); + v = 1000; + SolrInputDocument doc = sdoc("id", "1000", "_version_", ++v); add(client0, seenLeader, doc); - add(client0, seenLeader, sdoc("id","1001","_version_",++v)); - delQ(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "id:1001 OR id:1002"); - add(client0, seenLeader, sdoc("id","1002","_version_",++v)); - del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "1000"); + add(client0, seenLeader, sdoc("id", "1001", "_version_", ++v)); + delQ( + client0, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(- ++v)), + "id:1001 OR id:1002"); + add(client0, seenLeader, sdoc("id", "1002", "_version_", ++v)); + del( + client0, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(- ++v)), + "1000"); docsAdded.add(1002); // 1002 added assertSync(client1, numVersions, true, shardsArr[0]); validateDocs(docsAdded, client0, client1); - // test that delete by query is returned even if not requested, and that it doesn't delete newer stuff than it should - v=2000; + // test that delete by query is returned even if not requested, and that it doesn't delete newer + // stuff than it should + v = 2000; SolrClient client = client0; - add(client, seenLeader, sdoc("id","2000","_version_",++v)); - add(client, seenLeader, sdoc("id","2001","_version_",++v)); - delQ(client, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "id:2001 OR id:2002"); - add(client, seenLeader, sdoc("id","2002","_version_",++v)); - del(client, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "2000"); + add(client, seenLeader, sdoc("id", "2000", "_version_", ++v)); + add(client, seenLeader, sdoc("id", "2001", "_version_", ++v)); + delQ( + client, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(- ++v)), + "id:2001 OR id:2002"); + add(client, seenLeader, sdoc("id", "2002", "_version_", ++v)); + del( + client, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(- ++v)), + "2000"); docsAdded.add(2002); // 2002 added - v=2000; + v = 2000; client = client1; - add(client, seenLeader, sdoc("id","2000","_version_",++v)); - ++v; // pretend we missed the add of 2001. peersync should retrieve it, but should also retrieve any deleteByQuery objects after it + add(client, seenLeader, sdoc("id", "2000", "_version_", ++v)); + ++v; // pretend we missed the add of 2001. peersync should retrieve it, but should also + // retrieve any deleteByQuery objects after it // add(client, seenLeader, sdoc("id","2001","_version_",++v)); - delQ(client, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "id:2001 OR id:2002"); - add(client, seenLeader, sdoc("id","2002","_version_",++v)); - del(client, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "2000"); + delQ( + client, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(- ++v)), + "id:2001 OR id:2002"); + add(client, seenLeader, sdoc("id", "2002", "_version_", ++v)); + del( + client, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(- ++v)), + "2000"); assertSync(client1, numVersions, true, shardsArr[0]); @@ -163,38 +191,45 @@ public void test() throws Exception { // this should cause us to retrieve the delete (but not the following add) // the reorder in application shouldn't affect anything - add(client0, seenLeader, sdoc("id","3000","_version_",3001)); - add(client1, seenLeader, sdoc("id","3000","_version_",3001)); - del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_","3000"), "3000"); + add(client0, seenLeader, sdoc("id", "3000", "_version_", 3001)); + add(client1, seenLeader, sdoc("id", "3000", "_version_", 3001)); + del(client0, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "3000"), "3000"); docsAdded.add(3000); // this should cause us to retrieve an add tha was previously deleted - add(client0, seenLeader, sdoc("id","3001","_version_",3003)); - del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_","3001"), "3004"); - del(client1, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_","3001"), "3004"); + add(client0, seenLeader, sdoc("id", "3001", "_version_", 3003)); + del(client0, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "3001"), "3004"); + del(client1, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "3001"), "3004"); // this should cause us to retrieve an older add that was overwritten - add(client0, seenLeader, sdoc("id","3002","_version_",3004)); - add(client0, seenLeader, sdoc("id","3002","_version_",3005)); - add(client1, seenLeader, sdoc("id","3002","_version_",3005)); + add(client0, seenLeader, sdoc("id", "3002", "_version_", 3004)); + add(client0, seenLeader, sdoc("id", "3002", "_version_", 3005)); + add(client1, seenLeader, sdoc("id", "3002", "_version_", 3005)); docsAdded.add(3001); // 3001 added docsAdded.add(3002); // 3002 added - + assertSync(client1, numVersions, true, shardsArr[0]); validateDocs(docsAdded, client0, client1); // now lets check fingerprinting causes appropriate fails v = 4000; - add(client0, seenLeader, sdoc("id",Integer.toString((int)v),"_version_",v)); + add(client0, seenLeader, sdoc("id", Integer.toString((int) v), "_version_", v)); docsAdded.add(4000); - int toAdd = numVersions+10; - for (int i=0; i { - inPlaceParams.set(DistributedUpdateProcessor.DISTRIB_INPLACE_PREVVERSION, "6000"); - add(client0, inPlaceParams, sdoc("id", 6000, "val_i_dvo", 6003, "_version_", 5007)); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + inPlaceParams.set(DistributedUpdateProcessor.DISTRIB_INPLACE_PREVVERSION, "6000"); + add(client0, inPlaceParams, sdoc("id", 6000, "val_i_dvo", 6003, "_version_", 5007)); + }); assertEquals(ex.toString(), SolrException.ErrorCode.SERVER_ERROR.code, ex.code()); assertThat(ex.getMessage(), containsString("Can't find document with id=6000")); - // Reordered DBQ with Child-nodes (SOLR-10114) docsAdded.clear(); @@ -296,13 +362,19 @@ public void test() throws Exception { docsAdded.add(7001); docsAdded.add(7001001); docsAdded.add(7001002); - delQ(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_","7000"), "id:*"); // reordered delete + delQ( + client0, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "7000"), + "id:*"); // reordered delete assertSync(client1, numVersions, true, shardsArr[0]); validateDocs(docsAdded, client0, client1); // Reordered DBQ should not affect update add(client0, seenLeader, sdocWithChildren(8000, "8000", 5)); // add with later version - delQ(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_","8002"), "id:8500"); // not found, arrives earlier + delQ( + client0, + params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", "8002"), + "id:8500"); // not found, arrives earlier add(client0, seenLeader, sdocWithChildren(8000, "8001", 2)); // update with two childs docsAdded.add(8000); docsAdded.add(8000001); @@ -310,48 +382,62 @@ public void test() throws Exception { assertSync(client1, numVersions, true, shardsArr[0]); validateDocs(docsAdded, client0, client1); - assertNotEquals(PeerSync.SHARD_REQUEST_PURPOSE_GET_UPDATES, PeerSync.SHARD_REQUEST_PURPOSE_GET_VERSIONS); + assertNotEquals( + PeerSync.SHARD_REQUEST_PURPOSE_GET_UPDATES, PeerSync.SHARD_REQUEST_PURPOSE_GET_VERSIONS); handleVersionsWithRangesTests(); } - protected void testOverlap(Set docsAdded, SolrClient client0, SolrClient client1, long v) throws IOException, SolrServerException { - int toAdd = (int)(numVersions *.95); - for (int i=0; i docsAdded, SolrClient client0, SolrClient client1, long v) + throws IOException, SolrServerException { + int toAdd = (int) (numVersions * .95); + for (int i = 0; i < toAdd; i++) { + add(client0, seenLeader, sdoc("id", Integer.toString(i + 11), "_version_", v + i + 1)); + docsAdded.add(i + 11); } // sync should fail since there's not enough overlap to give us confidence assertSync(client1, numVersions, false, shardsArr[0]); // add some of the docs that were missing... just enough to give enough overlap - int toAdd2 = (int)(numVersions * .25); - for (int i=0; i docsAdded, SolrClient client0, SolrClient client1) throws SolrServerException, IOException { + protected void validateDocs(Set docsAdded, SolrClient client0, SolrClient client1) + throws SolrServerException, IOException { client0.commit(); client1.commit(); QueryResponse qacResponse; - qacResponse = queryAndCompare(params("q", "*:*", "rows", "10000", "sort","_version_ desc"), client0, client1); + qacResponse = + queryAndCompare( + params("q", "*:*", "rows", "10000", "sort", "_version_ desc"), client0, client1); validateQACResponse(docsAdded, qacResponse); } - void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { - QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "sync", StrUtils.join(Arrays.asList(syncWith), ','))); + void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) + throws IOException, SolrServerException { + QueryRequest qr = + new QueryRequest( + params( + "qt", + "/get", + "getVersions", + Integer.toString(numVersions), + "sync", + StrUtils.join(Arrays.asList(syncWith), ','))); NamedList rsp = client.request(qr); assertEquals(expectedResult, (Boolean) rsp.get("sync")); } - + void validateQACResponse(Set docsAdded, QueryResponse qacResponse) { Set qacDocs = new LinkedHashSet<>(); - for (int i=0; i otherVersions = Collections.emptyList(); List ourUpdates = Collections.singletonList(42L); assertEquals(1, ourUpdates.size()); long ourLowThreshold = ourUpdates.get(0); - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); // no updates requested since other has nothing assertEquals(0L, mur.totalRequestedUpdates); assertEquals(null, mur.versionsAndRanges); @@ -385,21 +472,23 @@ private static void testHandleVersionsWithRangesNoOther() throws Exception { } private static void testHandleVersionsWithRangesSameOne() throws Exception { - for (boolean completeList : new boolean[] { false, true }) { + for (boolean completeList : new boolean[] {false, true}) { List otherVersions = Collections.singletonList(42L); List ourUpdates = Collections.singletonList(42L); assertEquals(1, ourUpdates.size()); long ourLowThreshold = ourUpdates.get(0); - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); // no updates requested since us and other have the same versions assertEquals(0L, mur.totalRequestedUpdates); assertEquals(null, mur.versionsAndRanges); } } - private static void testHandleVersionsWithRangesMissingOneOfTwo(boolean highestMissing) throws Exception { - for (boolean completeList : new boolean[] { false, true }) { + private static void testHandleVersionsWithRangesMissingOneOfTwo(boolean highestMissing) + throws Exception { + for (boolean completeList : new boolean[] {false, true}) { LinkedList otherVersions = new LinkedList<>(List.of(44L, 22L)); LinkedList ourUpdates = new LinkedList<>(otherVersions); final Long missing; @@ -410,8 +499,9 @@ private static void testHandleVersionsWithRangesMissingOneOfTwo(boolean highestM } assertEquals(1, ourUpdates.size()); long ourLowThreshold = ourUpdates.get(0); - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); if (highestMissing || completeList) { /* * request one update for the missing one, because @@ -419,7 +509,7 @@ private static void testHandleVersionsWithRangesMissingOneOfTwo(boolean highestM * it's not the highest/latest but we need a complete list */ assertEquals(1L, mur.totalRequestedUpdates); - assertEquals(missing+"..."+missing, mur.versionsAndRanges); + assertEquals(missing + "..." + missing, mur.versionsAndRanges); } else { /* * request no updates because we already have the highest/latest and @@ -433,27 +523,29 @@ private static void testHandleVersionsWithRangesMissingOneOfTwo(boolean highestM } private static void testHandleVersionsWithRangesMissingMiddleOfThree() throws Exception { - for (boolean completeList : new boolean[] { false, true }) { + for (boolean completeList : new boolean[] {false, true}) { List otherVersions = List.of(55L, 33L, 11L); LinkedList ourUpdates = new LinkedList<>(otherVersions); - Long missing = ourUpdates.remove(ourUpdates.size()/2); + Long missing = ourUpdates.remove(ourUpdates.size() / 2); assertEquals(33L, missing.longValue()); { long ourLowThreshold = ourUpdates.getLast(); // lowest in descending list - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); // request the one update we are missing assertEquals(1L, mur.totalRequestedUpdates); - assertEquals(missing+"..."+missing, mur.versionsAndRanges); + assertEquals(missing + "..." + missing, mur.versionsAndRanges); } { long ourLowThreshold = ourUpdates.getFirst(); // highest in descending list - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); if (completeList) { // request the one update we are missing assertEquals(1L, mur.totalRequestedUpdates); - assertEquals(missing+"..."+missing, mur.versionsAndRanges); + assertEquals(missing + "..." + missing, mur.versionsAndRanges); } else { /* * request no updates because we don't need a complete list and @@ -467,25 +559,29 @@ private static void testHandleVersionsWithRangesMissingMiddleOfThree() throws Ex } } - private static void testHandleVersionsWithRangesMissingOneRange(boolean duplicateMiddle) throws Exception { - for (boolean completeList : new boolean[] { false , true }) { - List otherVersions = duplicateMiddle - ? List.of(9L, 8L, 7L, 6L, 5L, 5L, 4L, 3L, 2L, 1L) + private static void testHandleVersionsWithRangesMissingOneRange(boolean duplicateMiddle) + throws Exception { + for (boolean completeList : new boolean[] {false, true}) { + List otherVersions = + duplicateMiddle + ? List.of(9L, 8L, 7L, 6L, 5L, 5L, 4L, 3L, 2L, 1L) : List.of(9L, 8L, 7L, 6L, 5L, 4L, 3L, 2L, 1L); LinkedList ourUpdates = new LinkedList<>(List.of(9L, 8L, 7L, 3L, 2L, 1L)); long expectedTotalRequestedUpdates = duplicateMiddle ? 4L : 3L; { long ourLowThreshold = ourUpdates.getLast(); // lowest in descending list - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); // request all we are missing assertEquals(expectedTotalRequestedUpdates, mur.totalRequestedUpdates); assertEquals("4...6", mur.versionsAndRanges); } { long ourLowThreshold = 3; // min of gap minus 1 - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); if (completeList) { // request all we are missing since we want a complete list assertEquals(expectedTotalRequestedUpdates, mur.totalRequestedUpdates); @@ -498,8 +594,9 @@ private static void testHandleVersionsWithRangesMissingOneRange(boolean duplicat } { long ourLowThreshold = 7; // max of gap plus 1 - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); if (completeList) { // request all we are missing since we want a complete list assertEquals(expectedTotalRequestedUpdates, mur.totalRequestedUpdates); @@ -512,8 +609,9 @@ private static void testHandleVersionsWithRangesMissingOneRange(boolean duplicat } { long ourLowThreshold = ourUpdates.getFirst(); // highest in descending list - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); if (completeList) { // request all we are missing since we want a complete list assertEquals(expectedTotalRequestedUpdates, mur.totalRequestedUpdates); @@ -534,22 +632,29 @@ private static void testHandleVersionsWithRangesMissingOneRange(boolean duplicat } private static void testHandleVersionsWithRangesMissingTwoRanges() throws Exception { - for (boolean completeList : new boolean[] { false , true }) { - LinkedList otherVersions = new LinkedList<>(List.of(9L, 8L, 7L, 6L, 5L, 4L, 3L, 2L, 1L)); - LinkedList ourUpdates = new LinkedList<>(List.of( - otherVersions.getFirst(), otherVersions.get(otherVersions.size()/2), otherVersions.getLast())); + for (boolean completeList : new boolean[] {false, true}) { + LinkedList otherVersions = + new LinkedList<>(List.of(9L, 8L, 7L, 6L, 5L, 4L, 3L, 2L, 1L)); + LinkedList ourUpdates = + new LinkedList<>( + List.of( + otherVersions.getFirst(), + otherVersions.get(otherVersions.size() / 2), + otherVersions.getLast())); { long ourLowThreshold = ourUpdates.getLast(); // lowest in descending list - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); // request all we are missing assertEquals(6L, mur.totalRequestedUpdates); assertEquals("2...4,6...8", mur.versionsAndRanges); } { - long ourLowThreshold = ourUpdates.get(ourUpdates.size()/2); // middle in descending list - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + long ourLowThreshold = ourUpdates.get(ourUpdates.size() / 2); // middle in descending list + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); if (completeList) { // request all we are missing assertEquals(6L, mur.totalRequestedUpdates); @@ -562,8 +667,9 @@ private static void testHandleVersionsWithRangesMissingTwoRanges() throws Except } { long ourLowThreshold = ourUpdates.getFirst(); // highest in descending list - MissedUpdatesRequest mur = PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( - otherVersions, completeList, ourUpdates, ourLowThreshold); + MissedUpdatesRequest mur = + PeerSync.MissedUpdatesFinderBase.handleVersionsWithRanges( + otherVersions, completeList, ourUpdates, ourLowThreshold); if (completeList) { // request all we are missing since we want a complete list assertEquals(6L, mur.totalRequestedUpdates); @@ -582,5 +688,4 @@ private static void testHandleVersionsWithRangesMissingTwoRanges() throws Except } } } - } diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java index 544685acafd..f93146c922d 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithBufferUpdatesTest.java @@ -17,6 +17,8 @@ package org.apache.solr.update; +import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -24,7 +26,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Set; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; @@ -40,17 +41,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; - @SolrTestCaseJ4.SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") -public class PeerSyncWithBufferUpdatesTest extends BaseDistributedSearchTestCase { +public class PeerSyncWithBufferUpdatesTest extends BaseDistributedSearchTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static int numVersions = 100; // number of versions to use when syncing + private static int numVersions = 100; // number of versions to use when syncing private final String FROM_LEADER = DistributedUpdateProcessor.DistribPhase.FROMLEADER.toString(); - private ModifiableSolrParams seenLeader = - params(DISTRIB_UPDATE_PARAM, FROM_LEADER); + private ModifiableSolrParams seenLeader = params(DISTRIB_UPDATE_PARAM, FROM_LEADER); public PeerSyncWithBufferUpdatesTest() { stress = 0; @@ -76,37 +74,37 @@ public void test() throws Exception { long v = 0; // add some context for (int i = 1; i <= 10; i++) { - add(client0, seenLeader, sdoc("id",String.valueOf(i),"_version_",++v)); - add(client1, seenLeader, sdoc("id",String.valueOf(i),"_version_",v)); + add(client0, seenLeader, sdoc("id", String.valueOf(i), "_version_", ++v)); + add(client1, seenLeader, sdoc("id", String.valueOf(i), "_version_", v)); } // jetty1 was down for (int i = 11; i <= 15; i++) { - add(client0, seenLeader, sdoc("id",String.valueOf(i),"_version_",++v)); + add(client0, seenLeader, sdoc("id", String.valueOf(i), "_version_", ++v)); } // it restarted and must do PeerSync SolrCore jetty1Core = jettys.get(1).getCoreContainer().getCores().iterator().next(); jetty1Core.getUpdateHandler().getUpdateLog().bufferUpdates(); for (int i = 16; i <= 20; i++) { - add(client0, seenLeader, sdoc("id",String.valueOf(i),"_version_",++v)); - add(client1, seenLeader, sdoc("id",String.valueOf(i),"_version_",v)); + add(client0, seenLeader, sdoc("id", String.valueOf(i), "_version_", ++v)); + add(client1, seenLeader, sdoc("id", String.valueOf(i), "_version_", v)); } // some updates are on-wire - add(client0, seenLeader, sdoc("id","21","_version_",++v)); - add(client0, seenLeader, sdoc("id","22","_version_",++v)); + add(client0, seenLeader, sdoc("id", "21", "_version_", ++v)); + add(client0, seenLeader, sdoc("id", "22", "_version_", ++v)); // this will make a gap in buffer tlog - add(client0, seenLeader, sdoc("id","23","_version_",++v)); - add(client1, seenLeader, sdoc("id","23","_version_",v)); + add(client0, seenLeader, sdoc("id", "23", "_version_", ++v)); + add(client1, seenLeader, sdoc("id", "23", "_version_", v)); // client1 should be able to sync assertSync(client1, numVersions, true, shardsArr[0]); // on-wire updates arrived on jetty1 - add(client1, seenLeader, sdoc("id","21","_version_",v-2)); - add(client1, seenLeader, sdoc("id","22","_version_",v-1)); + add(client1, seenLeader, sdoc("id", "21", "_version_", v - 2)); + add(client1, seenLeader, sdoc("id", "22", "_version_", v - 1)); log.info("Apply buffered updates"); jetty1Core.getUpdateHandler().getUpdateLog().applyBufferedUpdates().get(); @@ -119,8 +117,8 @@ public void test() throws Exception { v = 2000; if (random().nextBoolean()) { for (int i = 24; i <= 30; i++) { - add(client0, seenLeader, sdoc("id",String.valueOf(i),"_version_",++v)); - add(client1, seenLeader, sdoc("id",String.valueOf(i),"_version_",v)); + add(client0, seenLeader, sdoc("id", String.valueOf(i), "_version_", ++v)); + add(client1, seenLeader, sdoc("id", String.valueOf(i), "_version_", v)); } } @@ -132,14 +130,14 @@ public void test() throws Exception { for (int i = 0; i <= 50; i++) { int kindOfUpdate = random().nextInt(100); if (docIds.size() < 10) kindOfUpdate = 0; - //TODO test atomic update + // TODO test atomic update if (kindOfUpdate <= 50) { // add a new document update, may by duplicate with the current one int val = random().nextInt(1000); int docId = random().nextInt(10000); docIds.add(docId); - SolrInputDocument doc = sdoc("id", docId, "val_i_dvo", val, "_version_",++v); + SolrInputDocument doc = sdoc("id", docId, "val_i_dvo", val, "_version_", ++v); add(client0, seenLeader, doc); if (random().nextBoolean()) add(client1, seenLeader, doc); else onWireUpdates.add(doc); @@ -150,11 +148,11 @@ public void test() throws Exception { int docId1 = ids.get(random().nextInt(ids.size())); int docId2 = ids.get(random().nextInt(ids.size())); - String query = "id:" +docId1+" OR id:"+docId2; - String version = Long.toString(-++v); - delQ(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",version), query); + String query = "id:" + docId1 + " OR id:" + docId2; + String version = Long.toString(- ++v); + delQ(client0, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", version), query); if (random().nextBoolean()) { - delQ(client1, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",version), query); + delQ(client1, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", version), query); } else { onWireUpdates.add(new DeleteByQuery(query, version)); } @@ -163,11 +161,11 @@ public void test() throws Exception { // delete by id ArrayList ids = new ArrayList<>(docIds); String docId = ids.get(random().nextInt(ids.size())) + ""; - String version = Long.toString(-++v); + String version = Long.toString(- ++v); - del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",version), docId); + del(client0, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", version), docId); if (random().nextBoolean()) { - del(client1, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",version), docId); + del(client1, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", version), docId); } else { onWireUpdates.add(new DeleteById(docId, version)); } @@ -197,27 +195,38 @@ public DeleteById(String id, String version) { } } - private void validateDocs(Set docsAdded, SolrClient client0, SolrClient client1) throws SolrServerException, IOException { + private void validateDocs(Set docsAdded, SolrClient client0, SolrClient client1) + throws SolrServerException, IOException { client0.commit(); client1.commit(); QueryResponse qacResponse; - qacResponse = queryAndCompare(params("q", "*:*", "rows", "10000", "sort","_version_ desc"), client0, client1); + qacResponse = + queryAndCompare( + params("q", "*:*", "rows", "10000", "sort", "_version_ desc"), client0, client1); validateQACResponse(docsAdded, qacResponse); } void validateQACResponse(Set docsAdded, QueryResponse qacResponse) { Set qacDocs = new LinkedHashSet<>(); - for (int i=0; i rsp = client.request(qr); assertEquals(expectedResult, rsp.get("syncWithLeader")); } - } diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java index b4abc4c6741..cf32823994f 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithIndexFingerprintCachingTest.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.Arrays; - import org.apache.solr.BaseDistributedSearchTestCase; import org.apache.solr.SolrTestCaseJ4.SuppressSSL; import org.apache.solr.client.solrj.SolrClient; @@ -33,22 +32,19 @@ import org.junit.Assert; import org.junit.Test; - /** - * This test is deliberately kept in different class as we don't want segment merging to kick in after deleting documents. - * This ensures that first check the cached IndexFingerprint and - * recompute it only if any documents in the segment were deleted since caching the fingerprint first time around - * - * + * This test is deliberately kept in different class as we don't want segment merging to kick in + * after deleting documents. This ensures that first check the cached IndexFingerprint and recompute + * it only if any documents in the segment were deleted since caching the fingerprint first time + * around */ @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class PeerSyncWithIndexFingerprintCachingTest extends BaseDistributedSearchTestCase { - private static int numVersions = 100; // number of versions to use when syncing + private static int numVersions = 100; // number of versions to use when syncing private final String FROM_LEADER = DistribPhase.FROMLEADER.toString(); - private ModifiableSolrParams seenLeader = - params(DISTRIB_UPDATE_PARAM, FROM_LEADER); - + private ModifiableSolrParams seenLeader = params(DISTRIB_UPDATE_PARAM, FROM_LEADER); + public PeerSyncWithIndexFingerprintCachingTest() { stress = 0; @@ -68,41 +64,52 @@ public void test() throws Exception { SolrClient client0 = clients.get(0); SolrClient client1 = clients.get(1); - long v =1; - for(; v < 8; ++v) { - add(client0, seenLeader, sdoc("id", ""+v,"_version_",v)); - add(client1, seenLeader, sdoc("id",""+v,"_version_",v)); - + long v = 1; + for (; v < 8; ++v) { + add(client0, seenLeader, sdoc("id", "" + v, "_version_", v)); + add(client1, seenLeader, sdoc("id", "" + v, "_version_", v)); } - client0.commit(); client1.commit(); - + client0.commit(); + client1.commit(); + IndexFingerprint before = getFingerprint(client0, Long.MAX_VALUE); - - del(client0, params(DISTRIB_UPDATE_PARAM,FROM_LEADER,"_version_",Long.toString(-++v)), "2"); - client0.commit(); - + + del(client0, params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(- ++v)), "2"); + client0.commit(); + IndexFingerprint after = getFingerprint(client0, Long.MAX_VALUE); - + // make sure fingerprint before and after deleting are not the same Assert.assertTrue(IndexFingerprint.compare(before, after) != 0); - + // replica which missed the delete should be able to sync assertSync(client1, numVersions, true, shardsArr[0]); - client0.commit(); client1.commit(); + client0.commit(); + client1.commit(); - queryAndCompare(params("q", "*:*", "sort","_version_ desc"), client0, client1); + queryAndCompare(params("q", "*:*", "sort", "_version_ desc"), client0, client1); } - IndexFingerprint getFingerprint(SolrClient client, long maxVersion) throws IOException, SolrServerException { - QueryRequest qr = new QueryRequest(params("qt","/get", "getFingerprint",Long.toString(maxVersion))); + IndexFingerprint getFingerprint(SolrClient client, long maxVersion) + throws IOException, SolrServerException { + QueryRequest qr = + new QueryRequest(params("qt", "/get", "getFingerprint", Long.toString(maxVersion))); NamedList rsp = client.request(qr); return IndexFingerprint.fromObject(rsp.get("fingerprint")); } - void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { - QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "sync", StrUtils.join(Arrays.asList(syncWith), ','))); + void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) + throws IOException, SolrServerException { + QueryRequest qr = + new QueryRequest( + params( + "qt", + "/get", + "getVersions", + Integer.toString(numVersions), + "sync", + StrUtils.join(Arrays.asList(syncWith), ','))); NamedList rsp = client.request(qr); assertEquals(expectedResult, rsp.get("sync")); } - } diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java index 3b386f4fd22..73a3a7d54cf 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderAndIndexFingerprintCachingTest.java @@ -19,17 +19,26 @@ import java.io.IOException; import java.util.Arrays; - import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.StrUtils; -public class PeerSyncWithLeaderAndIndexFingerprintCachingTest extends PeerSyncWithIndexFingerprintCachingTest { +public class PeerSyncWithLeaderAndIndexFingerprintCachingTest + extends PeerSyncWithIndexFingerprintCachingTest { @Override - void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { - QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "syncWithLeader", StrUtils.join(Arrays.asList(syncWith), ','))); + void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) + throws IOException, SolrServerException { + QueryRequest qr = + new QueryRequest( + params( + "qt", + "/get", + "getVersions", + Integer.toString(numVersions), + "syncWithLeader", + StrUtils.join(Arrays.asList(syncWith), ','))); @SuppressWarnings({"rawtypes"}) NamedList rsp = client.request(qr); assertEquals(expectedResult, (Boolean) rsp.get("syncWithLeader")); diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java index 1a2748e6879..6e83e278c63 100644 --- a/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/update/PeerSyncWithLeaderTest.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Set; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrServerException; @@ -32,25 +31,35 @@ public class PeerSyncWithLeaderTest extends PeerSyncTest { @Override - protected void testOverlap(Set docsAdded, SolrClient client0, SolrClient client1, long v) throws IOException, SolrServerException { - for (int i=0; i docsAdded, SolrClient client0, SolrClient client1, long v) + throws IOException, SolrServerException { + for (int i = 0; i < numVersions; i++) { + add(client0, seenLeader, sdoc("id", Integer.toString(i + 11), "_version_", v + i + 1)); + docsAdded.add(i + 11); } // sync should fail since we are too far with the leader assertSync(client1, numVersions, false, shardsArr[0]); // add a doc that was missing... just enough to give enough overlap - add(client1, seenLeader, sdoc("id",Integer.toString(11),"_version_",v+1)); + add(client1, seenLeader, sdoc("id", Integer.toString(11), "_version_", v + 1)); assertSync(client1, numVersions, true, shardsArr[0]); validateDocs(docsAdded, client0, client1); } @Override - void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) throws IOException, SolrServerException { - QueryRequest qr = new QueryRequest(params("qt","/get", "getVersions",Integer.toString(numVersions), "syncWithLeader", StrUtils.join(Arrays.asList(syncWith), ','))); + void assertSync(SolrClient client, int numVersions, boolean expectedResult, String... syncWith) + throws IOException, SolrServerException { + QueryRequest qr = + new QueryRequest( + params( + "qt", + "/get", + "getVersions", + Integer.toString(numVersions), + "syncWithLeader", + StrUtils.join(Arrays.asList(syncWith), ','))); NamedList rsp = client.request(qr); assertEquals(expectedResult, rsp.get("syncWithLeader")); } diff --git a/solr/core/src/test/org/apache/solr/update/RootFieldTest.java b/solr/core/src/test/org/apache/solr/update/RootFieldTest.java index 8015d19d604..9170eb6cd97 100644 --- a/solr/core/src/test/org/apache/solr/update/RootFieldTest.java +++ b/solr/core/src/test/org/apache/solr/update/RootFieldTest.java @@ -17,6 +17,8 @@ package org.apache.solr.update; +import static org.hamcrest.CoreMatchers.is; + import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; @@ -30,15 +32,13 @@ import org.junit.Test; import org.junit.rules.ExpectedException; -import static org.hamcrest.CoreMatchers.is; - public class RootFieldTest extends EmbeddedSolrServerTestBase { private static boolean useRootSchema; - private static final String MESSAGE = "Update handler should create and process _root_ field " + - "unless there is no such a field in schema"; + private static final String MESSAGE = + "Update handler should create and process _root_ field " + + "unless there is no such a field in schema"; - @Rule - public ExpectedException thrown = ExpectedException.none(); + @Rule public ExpectedException thrown = ExpectedException.none(); private static boolean expectRoot() { return useRootSchema; @@ -53,71 +53,70 @@ public static void beforeTest() throws Exception { } @Test - public void testLegacyBlockProcessing() throws Exception - { + public void testLegacyBlockProcessing() throws Exception { SolrClient client = getSolrClient(); - client.deleteByQuery("*:*");// delete everything! + client.deleteByQuery("*:*"); // delete everything! // Add child free doc SolrInputDocument docToUpdate = new SolrInputDocument(); String docId = "11"; - docToUpdate.addField( "id", docId); - docToUpdate.addField( "name", "child free doc" ); + docToUpdate.addField("id", docId); + docToUpdate.addField("name", "child free doc"); client.add(docToUpdate); client.commit(); SolrQuery query = new SolrQuery(); - query.setQuery( "*:*" ); - query.set( CommonParams.FL, "id,name,_root_" ); + query.setQuery("*:*"); + query.set(CommonParams.FL, "id,name,_root_"); SolrDocumentList results = client.query(query).getResults(); assertThat(results.getNumFound(), is(1L)); - SolrDocument foundDoc = results.get( 0 ); + SolrDocument foundDoc = results.get(0); // Check retrieved field values - assertThat(foundDoc.getFieldValue( "id" ), is(docId)); - assertThat(foundDoc.getFieldValue( "name" ), is("child free doc")); + assertThat(foundDoc.getFieldValue("id"), is(docId)); + assertThat(foundDoc.getFieldValue("name"), is("child free doc")); String expectedRootValue = expectRoot() ? docId : null; - assertThat(MESSAGE, foundDoc.getFieldValue( "_root_" ), is(expectedRootValue)); + assertThat(MESSAGE, foundDoc.getFieldValue("_root_"), is(expectedRootValue)); // Update the doc - docToUpdate.setField( "name", "updated doc" ); + docToUpdate.setField("name", "updated doc"); client.add(docToUpdate); client.commit(); results = client.query(query).getResults(); - assertEquals( 1, results.getNumFound() ); - foundDoc = results.get( 0 ); + assertEquals(1, results.getNumFound()); + foundDoc = results.get(0); // Check updated field values - assertThat(foundDoc.getFieldValue( "id" ), is(docId)); - assertThat(foundDoc.getFieldValue( "name" ), is("updated doc")); - assertThat(MESSAGE, foundDoc.getFieldValue( "_root_" ), is(expectedRootValue)); + assertThat(foundDoc.getFieldValue("id"), is(docId)); + assertThat(foundDoc.getFieldValue("name"), is("updated doc")); + assertThat(MESSAGE, foundDoc.getFieldValue("_root_"), is(expectedRootValue)); } @Test public void testUpdateWithChildDocs() throws Exception { SolrClient client = getSolrClient(); - client.deleteByQuery("*:*");// delete everything! + client.deleteByQuery("*:*"); // delete everything! // Add child free doc SolrInputDocument docToUpdate = new SolrInputDocument(); String docId = "11"; - docToUpdate.addField( "id", docId); - docToUpdate.addField( "name", "parent doc with a child" ); + docToUpdate.addField("id", docId); + docToUpdate.addField("name", "parent doc with a child"); SolrInputDocument child = new SolrInputDocument(); child.addField("id", "111"); child.addField("name", "child doc"); docToUpdate.addChildDocument(child); if (!useRootSchema) { thrown.expect(SolrException.class); - thrown.expectMessage("Unable to index docs with children:" + - " the schema must include definitions for both a uniqueKey field" + - " and the '_root_' field, using the exact same fieldType"); + thrown.expectMessage( + "Unable to index docs with children:" + + " the schema must include definitions for both a uniqueKey field" + + " and the '_root_' field, using the exact same fieldType"); } client.add(docToUpdate); client.commit(); } - } diff --git a/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java b/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java index 42e56a6de99..0815b8398b6 100644 --- a/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java +++ b/solr/core/src/test/org/apache/solr/update/SoftAutoCommitTest.java @@ -24,13 +24,12 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; - import org.apache.lucene.util.Constants; import org.apache.lucene.util.LuceneTestCase.Slow; +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrEventListener; import org.apache.solr.search.SolrIndexSearcher; -import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.util.TestHarness; import org.junit.Before; import org.junit.BeforeClass; @@ -39,21 +38,16 @@ /** * Test auto commit functionality in a way that doesn't suck. - *

- * AutoCommitTest is an abomination that is way to brittle in how it - * tries to check that commits happened, and when they happened. - * The goal of this test class is to (ultimately) completely replace all - * of the functionality of that test class using: - *

+ * + *

AutoCommitTest is an abomination that is way to brittle in how it tries to check that commits + * happened, and when they happened. The goal of this test class is to (ultimately) completely + * replace all of the functionality of that test class using: + * *

    - *
  • A more robust monitor of commit/newSearcher events that records - * the times of those events in a queue that can be polled. - * Multiple events in rapid succession are not lost. - *
  • - *
  • Timing checks that are forgiving of slow machines and use - * knowledge of how slow A->B was to affect the expectation of - * how slow B->C will be - *
  • + *
  • A more robust monitor of commit/newSearcher events that records the times of those events + * in a queue that can be polled. Multiple events in rapid succession are not lost. + *
  • Timing checks that are forgiving of slow machines and use knowledge of how slow A->B was + * to affect the expectation of how slow B->C will be *
*/ @Slow @@ -67,12 +61,13 @@ public static void beforeClass() throws Exception { private MockEventListener monitor; private DirectUpdateHandler2 updater; - + @Before public void createMonitor() throws Exception { - assumeFalse("This test is not working on Windows (or maybe machines with only 2 CPUs)", - Constants.WINDOWS); - + assumeFalse( + "This test is not working on Windows (or maybe machines with only 2 CPUs)", + Constants.WINDOWS); + SolrCore core = h.getCore(); updater = (DirectUpdateHandler2) core.getUpdateHandler(); @@ -86,13 +81,12 @@ public void createMonitor() throws Exception { // isolate searcher getting ready from this test monitor.searcher.poll(5000, MILLISECONDS); } - + @Override public void setUp() throws Exception { super.setUp(); - } - + public void testSoftAndHardCommitMaxDocs() throws Exception { // NOTE WHEN READING THIS TEST... @@ -102,16 +96,17 @@ public void testSoftAndHardCommitMaxDocs() throws Exception { final int softCommitMaxDocs = 5; final int hardCommitMaxDocs = 7; - assert softCommitMaxDocs < hardCommitMaxDocs; // remainder of test designed with these assumptions - + // remainder of test designed with these assumptions + assert softCommitMaxDocs < hardCommitMaxDocs; + CommitTracker hardTracker = updater.commitTracker; CommitTracker softTracker = updater.softCommitTracker; - + // wait out any leaked commits monitor.hard.poll(3000, MILLISECONDS); monitor.soft.poll(0, MILLISECONDS); monitor.clear(); - + softTracker.setDocsUpperBound(softCommitMaxDocs); softTracker.setTimeUpperBound(-1); hardTracker.setDocsUpperBound(hardCommitMaxDocs); @@ -123,21 +118,21 @@ public void testSoftAndHardCommitMaxDocs() throws Exception { // add num docs up to the soft commit upper bound for (int i = 0; i < softCommitMaxDocs; i++) { - assertU(adoc("id", ""+(8000 + i), "subject", "testMaxDocs")); + assertU(adoc("id", "" + (8000 + i), "subject", "testMaxDocs")); } // the first soft commit we see must be after this. final long minSoftCommitNanos = System.nanoTime(); - + // now add the doc that will trigger the soft commit, // as well as additional docs up to the hard commit upper bound for (int i = softCommitMaxDocs; i < hardCommitMaxDocs; i++) { - assertU(adoc("id", ""+(8000 + i), "subject", "testMaxDocs")); + assertU(adoc("id", "" + (8000 + i), "subject", "testMaxDocs")); } // the first hard commit we see must be after this. final long minHardCommitNanos = System.nanoTime(); // a final doc to trigger the hard commit - assertU(adoc("id", ""+(8000 + hardCommitMaxDocs), "subject", "testMaxDocs")); + assertU(adoc("id", "" + (8000 + hardCommitMaxDocs), "subject", "testMaxDocs")); // now poll our monitors for the timestamps on the first commits final Long firstSoftNanos = monitor.soft.poll(5000, MILLISECONDS); @@ -145,54 +140,65 @@ public void testSoftAndHardCommitMaxDocs() throws Exception { assertNotNull("didn't get a single softCommit after adding the max docs", firstSoftNanos); assertNotNull("didn't get a single hardCommit after adding the max docs", firstHardNanos); - - assertTrue("softCommit @ " + firstSoftNanos + "ns is before the maxDocs should have triggered it @ " + - minSoftCommitNanos + "ns", - minSoftCommitNanos < firstSoftNanos); - assertTrue("hardCommit @ " + firstHardNanos + "ns is before the maxDocs should have triggered it @ " + - minHardCommitNanos + "ns", - minHardCommitNanos < firstHardNanos); - - // wait a bit, w/o other action we shouldn't see any new hard/soft commits - assertNull("Got a hard commit we weren't expecting", - monitor.hard.poll(1000, MILLISECONDS)); - assertNull("Got a soft commit we weren't expecting", - monitor.soft.poll(0, MILLISECONDS)); - + + assertTrue( + "softCommit @ " + + firstSoftNanos + + "ns is before the maxDocs should have triggered it @ " + + minSoftCommitNanos + + "ns", + minSoftCommitNanos < firstSoftNanos); + assertTrue( + "hardCommit @ " + + firstHardNanos + + "ns is before the maxDocs should have triggered it @ " + + minHardCommitNanos + + "ns", + minHardCommitNanos < firstHardNanos); + + // wait a bit, w/o other action we shouldn't see any new hard/soft commits + assertNull("Got a hard commit we weren't expecting", monitor.hard.poll(1000, MILLISECONDS)); + assertNull("Got a soft commit we weren't expecting", monitor.soft.poll(0, MILLISECONDS)); + monitor.assertSaneOffers(); monitor.clear(); } public void testSoftAndHardCommitMaxTimeMixedAdds() throws Exception { - doTestSoftAndHardCommitMaxTimeMixedAdds(CommitWithinType.NONE); + doTestSoftAndHardCommitMaxTimeMixedAdds(CommitWithinType.NONE); } + public void testSoftCommitWithinAndHardCommitMaxTimeMixedAdds() throws Exception { doTestSoftAndHardCommitMaxTimeMixedAdds(CommitWithinType.SOFT); } + public void testHardCommitWithinAndSoftCommitMaxTimeMixedAdds() throws Exception { doTestSoftAndHardCommitMaxTimeMixedAdds(CommitWithinType.HARD); } + private void doTestSoftAndHardCommitMaxTimeMixedAdds(final CommitWithinType commitWithinType) - throws Exception { - + throws Exception { + final int softCommitWaitMillis = 500; final int hardCommitWaitMillis = 1200; final int commitWithin = commitWithinType.useValue(softCommitWaitMillis, hardCommitWaitMillis); - + CommitTracker hardTracker = updater.commitTracker; CommitTracker softTracker = updater.softCommitTracker; updater.setCommitWithinSoftCommit(commitWithinType.equals(CommitWithinType.SOFT)); - + // wait out any leaked commits monitor.soft.poll(softCommitWaitMillis * 2, MILLISECONDS); monitor.hard.poll(hardCommitWaitMillis * 2, MILLISECONDS); - + int startingHardCommits = hardTracker.getCommitCount(); int startingSoftCommits = softTracker.getCommitCount(); - - softTracker.setTimeUpperBound(commitWithinType.equals(CommitWithinType.SOFT) ? -1 : softCommitWaitMillis); + + softTracker.setTimeUpperBound( + commitWithinType.equals(CommitWithinType.SOFT) ? -1 : softCommitWaitMillis); softTracker.setDocsUpperBound(-1); - hardTracker.setTimeUpperBound(commitWithinType.equals(CommitWithinType.HARD) ? -1 : hardCommitWaitMillis); + hardTracker.setTimeUpperBound( + commitWithinType.equals(CommitWithinType.HARD) ? -1 : hardCommitWaitMillis); hardTracker.setDocsUpperBound(-1); // simplify whats going on by only having soft auto commits trigger new searchers hardTracker.setOpenSearcher(false); @@ -208,11 +214,10 @@ private void doTestSoftAndHardCommitMaxTimeMixedAdds(final CommitWithinType comm assertNotNull("soft529 wasn't fast enough", soft529); monitor.assertSaneOffers(); - // wait for the hard commit Long hard529 = monitor.hard.poll(hardCommitWaitMillis * 5, MILLISECONDS); assertNotNull("hard529 wasn't fast enough", hard529); - + // check for the searcher, should have happened right after soft commit Long searcher529 = monitor.searcher.poll(5000, MILLISECONDS); assertNotNull("searcher529 wasn't fast enough", searcher529); @@ -221,73 +226,85 @@ private void doTestSoftAndHardCommitMaxTimeMixedAdds(final CommitWithinType comm // toss in another doc, shouldn't affect first hard commit time we poll assertU(adoc(commitWithin, "id", "530", "subject", "just for noise/activity")); - monitor.assertSaneOffers(); final long soft529Ms = TimeUnit.MILLISECONDS.convert(soft529 - add529, TimeUnit.NANOSECONDS); - assertTrue("soft529 occurred too fast, in " + - soft529Ms + "ms, less than soft commit interval " + softCommitWaitMillis, + assertTrue( + "soft529 occurred too fast, in " + + soft529Ms + + "ms, less than soft commit interval " + + softCommitWaitMillis, soft529Ms >= softCommitWaitMillis); final long hard529Ms = TimeUnit.MILLISECONDS.convert(hard529 - add529, TimeUnit.NANOSECONDS); - assertTrue("hard529 occurred too fast, in " + - hard529Ms + "ms, less than hard commit interval " + hardCommitWaitMillis, + assertTrue( + "hard529 occurred too fast, in " + + hard529Ms + + "ms, less than hard commit interval " + + hardCommitWaitMillis, hard529Ms >= hardCommitWaitMillis); // however slow the machine was to do the soft commit compared to expected, - // assume newSearcher had some magnitude of that much overhead as well + // assume newSearcher had some magnitude of that much overhead as well long slowTestFudge = Math.max(300, 12 * (soft529Ms - softCommitWaitMillis)); - final long softCommitToSearcherOpenMs = TimeUnit.MILLISECONDS.convert(searcher529 - soft529, TimeUnit.NANOSECONDS); - assertTrue("searcher529 wasn't soon enough after soft529: Took " + - softCommitToSearcherOpenMs + "ms, >= acceptable " + slowTestFudge + "ms (fudge)", + final long softCommitToSearcherOpenMs = + TimeUnit.MILLISECONDS.convert(searcher529 - soft529, TimeUnit.NANOSECONDS); + assertTrue( + "searcher529 wasn't soon enough after soft529: Took " + + softCommitToSearcherOpenMs + + "ms, >= acceptable " + + slowTestFudge + + "ms (fudge)", softCommitToSearcherOpenMs < slowTestFudge); - assertTrue("hard529 was before searcher529: " + - searcher529 + " !<= " + hard529, - searcher529 <= hard529); + assertTrue( + "hard529 was before searcher529: " + searcher529 + " !<= " + hard529, + searcher529 <= hard529); monitor.assertSaneOffers(); // there may have been (or will be) a second hard commit for 530 Long hard530 = monitor.hard.poll(hardCommitWaitMillis * 5, MILLISECONDS); - assertEquals("Tracker reports too many hard commits", - (null == hard530 ? 1 : 2), - hardTracker.getCommitCount() - startingHardCommits); + assertEquals( + "Tracker reports too many hard commits", + (null == hard530 ? 1 : 2), + hardTracker.getCommitCount() - startingHardCommits); - // there may have been a second soft commit for 530, + // there may have been a second soft commit for 530, // but if so it must have already happend Long soft530 = monitor.soft.poll(0, MILLISECONDS); if (null != soft530) { - assertEquals("Tracker reports too many soft commits", - 2, softTracker.getCommitCount() - startingSoftCommits); + assertEquals( + "Tracker reports too many soft commits", + 2, + softTracker.getCommitCount() - startingSoftCommits); if (null != hard530) { - assertTrue("soft530 after hard530: " + - soft530 + " !<= " + hard530, - soft530 <= hard530); + assertTrue("soft530 after hard530: " + soft530 + " !<= " + hard530, soft530 <= hard530); } else { - assertTrue("soft530 after hard529 but no hard530: " + - soft530 + " !<= " + hard529, - soft530 <= hard529); + assertTrue( + "soft530 after hard529 but no hard530: " + soft530 + " !<= " + hard529, + soft530 <= hard529); } } else { - assertEquals("Tracker reports too many soft commits", - 1, softTracker.getCommitCount() - startingSoftCommits); + assertEquals( + "Tracker reports too many soft commits", + 1, + softTracker.getCommitCount() - startingSoftCommits); } - + if (null != soft530 || null != hard530) { - assertNotNull("at least one extra commit for 530, but no searcher", - monitor.searcher.poll(0, MILLISECONDS)); + assertNotNull( + "at least one extra commit for 530, but no searcher", + monitor.searcher.poll(0, MILLISECONDS)); } // clear commits monitor.hard.clear(); monitor.soft.clear(); - // wait a bit, w/o other action we shouldn't see any - // new hard/soft commits - assertNull("Got a hard commit we weren't expecting", - monitor.hard.poll(1000, MILLISECONDS)); - assertNull("Got a soft commit we weren't expecting", - monitor.soft.poll(0, MILLISECONDS)); + // wait a bit, w/o other action we shouldn't see any + // new hard/soft commits + assertNull("Got a hard commit we weren't expecting", monitor.hard.poll(1000, MILLISECONDS)); + assertNull("Got a soft commit we weren't expecting", monitor.soft.poll(0, MILLISECONDS)); monitor.assertSaneOffers(); monitor.searcher.clear(); @@ -296,15 +313,18 @@ private void doTestSoftAndHardCommitMaxTimeMixedAdds(final CommitWithinType comm public void testSoftAndHardCommitMaxTimeDelete() throws Exception { doTestSoftAndHardCommitMaxTimeDelete(CommitWithinType.NONE); } + public void testSoftCommitWithinAndHardCommitMaxTimeDelete() throws Exception { doTestSoftAndHardCommitMaxTimeDelete(CommitWithinType.SOFT); } + public void testHardCommitWithinAndSoftCommitMaxTimeDelete() throws Exception { doTestSoftAndHardCommitMaxTimeDelete(CommitWithinType.HARD); } + private void doTestSoftAndHardCommitMaxTimeDelete(final CommitWithinType commitWithinType) - throws Exception { - + throws Exception { + final int softCommitWaitMillis = 500; final int hardCommitWaitMillis = 1200; final int commitWithin = commitWithinType.useValue(softCommitWaitMillis, hardCommitWaitMillis); @@ -312,18 +332,20 @@ private void doTestSoftAndHardCommitMaxTimeDelete(final CommitWithinType commitW CommitTracker hardTracker = updater.commitTracker; CommitTracker softTracker = updater.softCommitTracker; updater.setCommitWithinSoftCommit(commitWithinType.equals(CommitWithinType.SOFT)); - + int startingHardCommits = hardTracker.getCommitCount(); int startingSoftCommits = softTracker.getCommitCount(); - - softTracker.setTimeUpperBound(commitWithinType.equals(CommitWithinType.SOFT) ? -1 : softCommitWaitMillis); + + softTracker.setTimeUpperBound( + commitWithinType.equals(CommitWithinType.SOFT) ? -1 : softCommitWaitMillis); softTracker.setDocsUpperBound(-1); - hardTracker.setTimeUpperBound(commitWithinType.equals(CommitWithinType.HARD) ? -1 : hardCommitWaitMillis); + hardTracker.setTimeUpperBound( + commitWithinType.equals(CommitWithinType.HARD) ? -1 : hardCommitWaitMillis); hardTracker.setDocsUpperBound(-1); // we don't want to overlap soft and hard opening searchers - this now blocks commits and we // are looking for prompt timings hardTracker.setOpenSearcher(false); - + // add a doc and force a commit assertU(adoc(commitWithin, "id", "529", "subject", "the doc we care about in this test")); assertU(commit()); @@ -335,7 +357,7 @@ private void doTestSoftAndHardCommitMaxTimeDelete(final CommitWithinType commitW // Delete the document long del529 = System.nanoTime(); - assertU( delI("529", "commitWithin", ""+commitWithin)); + assertU(delI("529", "commitWithin", "" + commitWithin)); monitor.assertSaneOffers(); @@ -343,7 +365,7 @@ private void doTestSoftAndHardCommitMaxTimeDelete(final CommitWithinType commitW soft529 = monitor.soft.poll(softCommitWaitMillis * 5, MILLISECONDS); assertNotNull("soft529 wasn't fast enough", soft529); monitor.assertSaneOffers(); - + // check for the searcher, should have happened right after soft commit Long searcher529 = monitor.searcher.poll(softCommitWaitMillis, MILLISECONDS); assertNotNull("searcher529 wasn't fast enough", searcher529); @@ -358,61 +380,73 @@ private void doTestSoftAndHardCommitMaxTimeDelete(final CommitWithinType commitW monitor.assertSaneOffers(); final long soft529Ms = TimeUnit.MILLISECONDS.convert(soft529 - del529, TimeUnit.NANOSECONDS); - assertTrue("soft529 occurred too fast, in " + soft529Ms + - "ms, less than soft commit interval " + softCommitWaitMillis, + assertTrue( + "soft529 occurred too fast, in " + + soft529Ms + + "ms, less than soft commit interval " + + softCommitWaitMillis, soft529Ms >= softCommitWaitMillis); final long hard529Ms = TimeUnit.MILLISECONDS.convert(hard529 - del529, TimeUnit.NANOSECONDS); - assertTrue("hard529 occurred too fast, in " + - hard529Ms + "ms, less than hard commit interval " + hardCommitWaitMillis, + assertTrue( + "hard529 occurred too fast, in " + + hard529Ms + + "ms, less than hard commit interval " + + hardCommitWaitMillis, hard529Ms >= hardCommitWaitMillis); // however slow the machine was to do the soft commit compared to expected, // assume newSearcher had some magnitude of that much overhead as well long slowTestFudge = Math.max(300, 12 * (soft529Ms - softCommitWaitMillis)); - final long softCommitToSearcherOpenMs = TimeUnit.MILLISECONDS.convert(searcher529 - soft529, TimeUnit.NANOSECONDS); - assertTrue("searcher529 wasn't soon enough after soft529: Took " + - softCommitToSearcherOpenMs + "ms, >= acceptable " + slowTestFudge + "ms (fudge)", + final long softCommitToSearcherOpenMs = + TimeUnit.MILLISECONDS.convert(searcher529 - soft529, TimeUnit.NANOSECONDS); + assertTrue( + "searcher529 wasn't soon enough after soft529: Took " + + softCommitToSearcherOpenMs + + "ms, >= acceptable " + + slowTestFudge + + "ms (fudge)", softCommitToSearcherOpenMs < slowTestFudge); - assertTrue("hard529 was before searcher529: " + - searcher529 + " !<= " + hard529, - searcher529 <= hard529); + assertTrue( + "hard529 was before searcher529: " + searcher529 + " !<= " + hard529, + searcher529 <= hard529); // ensure we wait for the last searcher we triggered with 550 monitor.searcher.poll(5000, MILLISECONDS); - + // ensure we wait for the commits on 550 monitor.hard.poll(5000, MILLISECONDS); monitor.soft.poll(5000, MILLISECONDS); - + // clear commits monitor.hard.clear(); monitor.soft.clear(); - - // wait a bit, w/o other action we shouldn't see any - // new hard/soft commits - assertNull("Got a hard commit we weren't expecting", - monitor.hard.poll(1000, MILLISECONDS)); - assertNull("Got a soft commit we weren't expecting", - monitor.soft.poll(0, MILLISECONDS)); + + // wait a bit, w/o other action we shouldn't see any + // new hard/soft commits + assertNull("Got a hard commit we weren't expecting", monitor.hard.poll(1000, MILLISECONDS)); + assertNull("Got a soft commit we weren't expecting", monitor.soft.poll(0, MILLISECONDS)); monitor.assertSaneOffers(); - + monitor.searcher.clear(); } public void testSoftAndHardCommitMaxTimeRapidAdds() throws Exception { doTestSoftAndHardCommitMaxTimeRapidAdds(CommitWithinType.NONE); } + public void testSoftCommitWithinAndHardCommitMaxTimeRapidAdds() throws Exception { doTestSoftAndHardCommitMaxTimeRapidAdds(CommitWithinType.SOFT); } + public void testHardCommitWithinAndSoftCommitMaxTimeRapidAdds() throws Exception { doTestSoftAndHardCommitMaxTimeRapidAdds(CommitWithinType.HARD); } + public void doTestSoftAndHardCommitMaxTimeRapidAdds(final CommitWithinType commitWithinType) - throws Exception { - + throws Exception { + final int softCommitWaitMillis = 500; final int hardCommitWaitMillis = 1200; final int commitWithin = commitWithinType.useValue(softCommitWaitMillis, hardCommitWaitMillis); @@ -420,30 +454,33 @@ public void doTestSoftAndHardCommitMaxTimeRapidAdds(final CommitWithinType commi CommitTracker hardTracker = updater.commitTracker; CommitTracker softTracker = updater.softCommitTracker; updater.setCommitWithinSoftCommit(commitWithinType.equals(CommitWithinType.SOFT)); - - softTracker.setTimeUpperBound(commitWithinType.equals(CommitWithinType.SOFT) ? -1 : softCommitWaitMillis); + + softTracker.setTimeUpperBound( + commitWithinType.equals(CommitWithinType.SOFT) ? -1 : softCommitWaitMillis); softTracker.setDocsUpperBound(-1); - hardTracker.setTimeUpperBound(commitWithinType.equals(CommitWithinType.HARD) ? -1 : hardCommitWaitMillis); + hardTracker.setTimeUpperBound( + commitWithinType.equals(CommitWithinType.HARD) ? -1 : hardCommitWaitMillis); hardTracker.setDocsUpperBound(-1); // we don't want to overlap soft and hard opening searchers - this now blocks commits and we // are looking for prompt timings hardTracker.setOpenSearcher(false); - + // try to add 5 docs really fast final long preFirstNanos = System.nanoTime(); - for( int i=0;i<5; i++ ) { - assertU(adoc(commitWithin, "id", ""+500 + i, "subject", "five fast docs")); + for (int i = 0; i < 5; i++) { + assertU(adoc(commitWithin, "id", "" + 500 + i, "subject", "five fast docs")); } final long postLastNanos = System.nanoTime(); - + monitor.assertSaneOffers(); final long maxTimeMillis = MILLISECONDS.convert(postLastNanos - preFirstNanos, NANOSECONDS); log.info("maxTimeMillis: {}ns - {}ns == {}ms", postLastNanos, preFirstNanos, maxTimeMillis); - + // NOTE: explicitly using truncated division of longs to round down - // even if evenly divisible, need +1 to account for possible "last" commit triggered by "last" doc + // even if evenly divisible, need +1 to account for possible "last" commit triggered by "last" + // doc final long maxExpectedSoft = 1L + (maxTimeMillis / softCommitWaitMillis); final long maxExpectedHard = 1L + (maxTimeMillis / hardCommitWaitMillis); @@ -456,34 +493,36 @@ public void doTestSoftAndHardCommitMaxTimeRapidAdds(final CommitWithinType commi // - any commit we do get doesn't happen "too fast" relative the previous commit // (or first doc added for the first commit) monitor.assertSaneOffers(); - assertRapidMultiCommitQueues("softCommit", preFirstNanos, softCommitWaitMillis, - maxExpectedSoft, monitor.soft); + assertRapidMultiCommitQueues( + "softCommit", preFirstNanos, softCommitWaitMillis, maxExpectedSoft, monitor.soft); monitor.assertSaneOffers(); - assertRapidMultiCommitQueues("hardCommit", preFirstNanos, hardCommitWaitMillis, - maxExpectedHard, monitor.hard); + assertRapidMultiCommitQueues( + "hardCommit", preFirstNanos, hardCommitWaitMillis, maxExpectedHard, monitor.hard); // now wait a bit... // w/o other action we shouldn't see any additional hard/soft commits - assertNull("Got a hard commit we weren't expecting", - monitor.hard.poll(1000, MILLISECONDS)); - assertNull("Got a soft commit we weren't expecting", - monitor.soft.poll(0, MILLISECONDS)); + assertNull("Got a hard commit we weren't expecting", monitor.hard.poll(1000, MILLISECONDS)); + assertNull("Got a soft commit we weren't expecting", monitor.soft.poll(0, MILLISECONDS)); monitor.assertSaneOffers(); - } /** * Helper method + * * @see #testSoftAndHardCommitMaxTimeRapidAdds */ - private static void assertRapidMultiCommitQueues - (final String debug, final long startTimestampNanos, final long commitWaitMillis, - final long maxNumCommits, final BlockingQueue queue) throws InterruptedException { + private static void assertRapidMultiCommitQueues( + final String debug, + final long startTimestampNanos, + final long commitWaitMillis, + final long maxNumCommits, + final BlockingQueue queue) + throws InterruptedException { assert 0 < maxNumCommits; - + // do all our math/comparisons in Nanos... final long commitWaitNanos = NANOSECONDS.convert(commitWaitMillis, MILLISECONDS); @@ -492,18 +531,32 @@ public void doTestSoftAndHardCommitMaxTimeRapidAdds(final CommitWithinType commi int count = 1; Long commitNanos = queue.poll(commitWaitMillis * 6, MILLISECONDS); assertNotNull(debug + ": did not find a single commit", commitNanos); - + while (null != commitNanos) { if (commitNanos < prevTimestampNanos + commitWaitMillis) { - fail(debug + ": commit#" + count + " has TS too low relative to previous TS & commitWait: " + - "commitNanos=" + commitNanos + ", prevTimestampNanos=" + prevTimestampNanos + - ", commitWaitMillis=" + commitWaitMillis); + fail( + debug + + ": commit#" + + count + + " has TS too low relative to previous TS & commitWait: " + + "commitNanos=" + + commitNanos + + ", prevTimestampNanos=" + + prevTimestampNanos + + ", commitWaitMillis=" + + commitWaitMillis); } if (maxNumCommits < count) { - fail(debug + ": commit#" + count + " w/ commitNanos=" + commitNanos + - ", but maxNumCommits=" +maxNumCommits); + fail( + debug + + ": commit#" + + count + + " w/ commitNanos=" + + commitNanos + + ", but maxNumCommits=" + + maxNumCommits); } - + prevTimestampNanos = commitNanos; count++; commitNanos = queue.poll(commitWaitMillis * 3, MILLISECONDS); @@ -513,20 +566,24 @@ public void doTestSoftAndHardCommitMaxTimeRapidAdds(final CommitWithinType commi /** enum for indicating if a test should use commitWithin, and if so what type: hard or soft */ private static enum CommitWithinType { NONE { - @Override public int useValue(final int softCommitWaitMillis, final int hardCommitWaitMillis) { + @Override + public int useValue(final int softCommitWaitMillis, final int hardCommitWaitMillis) { return -1; } }, SOFT { - @Override public int useValue(final int softCommitWaitMillis, final int hardCommitWaitMillis) { + @Override + public int useValue(final int softCommitWaitMillis, final int hardCommitWaitMillis) { return softCommitWaitMillis; } }, HARD { - @Override public int useValue(final int softCommitWaitMillis, final int hardCommitWaitMillis) { + @Override + public int useValue(final int softCommitWaitMillis, final int hardCommitWaitMillis) { return hardCommitWaitMillis; } }; + public abstract int useValue(final int softCommitWaitMillis, final int hardCommitWaitMillis); } @@ -542,7 +599,7 @@ public String adoc(int commitWithin, String... fieldsAndValues) { class MockEventListener implements SolrEventListener { - // use capacity bound Queues just so we're sure we don't OOM + // use capacity bound Queues just so we're sure we don't OOM public final BlockingQueue soft = new LinkedBlockingQueue<>(1000); public final BlockingQueue hard = new LinkedBlockingQueue<>(1000); public final BlockingQueue searcher = new LinkedBlockingQueue<>(1000); @@ -550,37 +607,36 @@ class MockEventListener implements SolrEventListener { // if non enpty, then at least one offer failed (queues full) private StringBuffer fail = new StringBuffer(); - public MockEventListener() { /* NOOP */ } - + public MockEventListener() { + /* NOOP */ + } + @Override - public void newSearcher(SolrIndexSearcher newSearcher, - SolrIndexSearcher currentSearcher) { + public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { Long now = System.nanoTime(); if (!searcher.offer(now)) fail.append(", newSearcher @ " + now); } - + @Override public void postCommit() { Long now = System.nanoTime(); if (!hard.offer(now)) fail.append(", hardCommit @ " + now); } - + @Override public void postSoftCommit() { Long now = System.nanoTime(); if (!soft.offer(now)) fail.append(", softCommit @ " + now); } - + public void clear() { soft.clear(); hard.clear(); searcher.clear(); fail.setLength(0); } - + public void assertSaneOffers() { - assertEquals("Failure of MockEventListener" + fail.toString(), - 0, fail.length()); + assertEquals("Failure of MockEventListener" + fail.toString(), 0, fail.length()); } } - diff --git a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java index cd8ff226a1a..16d4e9e48c2 100644 --- a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java +++ b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java @@ -63,13 +63,17 @@ import org.junit.Test; import org.xml.sax.SAXException; -// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows machines occasionally +// See: https://issues.apache.org/jira/browse/SOLR-12028 Tests cannot remove files on Windows +// machines occasionally public class SolrCmdDistributorTest extends BaseDistributedSearchTestCase { - - private static enum NodeType {FORWARD, STANDARD}; - + + private static enum NodeType { + FORWARD, + STANDARD + }; + private AtomicInteger id = new AtomicInteger(); - + @BeforeClass public static void beforeClass() throws Exception { // we can't use the Randomized merge policy because the test depends on @@ -85,22 +89,22 @@ public static void afterClass() { } private UpdateShardHandler updateShardHandler; - + public SolrCmdDistributorTest() throws ParserConfigurationException, IOException, SAXException { updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT); - + stress = 0; } public static String getSchemaFile() { return "schema.xml"; } - - public static String getSolrConfigFile() { + + public static String getSolrConfigFile() { // use this because it has /update and is minimal return "solrconfig-tlog.xml"; } - + // TODO: for now we redefine this method so that it pulls from the above // we don't get helpful override behavior due to the method being static @Override @@ -111,8 +115,12 @@ protected void createServers(int numShards) throws Exception { File controlHome = testDir.toPath().resolve("control").toFile(); seedSolrHome(controlHome); - writeCoreProperties(controlHome.toPath().resolve("cores").resolve(DEFAULT_TEST_CORENAME), DEFAULT_TEST_CORENAME); - controlJetty = createJetty(controlHome, testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile()); + writeCoreProperties( + controlHome.toPath().resolve("cores").resolve(DEFAULT_TEST_CORENAME), + DEFAULT_TEST_CORENAME); + controlJetty = + createJetty( + controlHome, testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile()); controlJetty.start(); controlClient = createNewSolrClient(controlJetty.getLocalPort()); @@ -125,9 +133,13 @@ protected void createServers(int numShards) throws Exception { seedSolrHome(shardHome.toFile()); Path coresPath = shardHome.resolve("cores"); writeCoreProperties(coresPath.resolve(DEFAULT_TEST_CORENAME), DEFAULT_TEST_CORENAME); - JettySolrRunner j = createJetty(shardHome.toFile(), - testDir + "/shard" + i + "/data", null, getSolrConfigFile(), - getSchemaFile()); + JettySolrRunner j = + createJetty( + shardHome.toFile(), + testDir + "/shard" + i + "/data", + null, + getSolrConfigFile(), + getSchemaFile()); j.start(); jettys.add(j); clients.add(createNewSolrClient(j.getLocalPort())); @@ -156,9 +168,12 @@ public void test() throws Exception { try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) { - nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, - ((HttpSolrClient) controlClient).getBaseURL(), - ZkStateReader.CORE_NAME_PROP, ""); + nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + ((HttpSolrClient) controlClient).getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); // add one doc to controlClient @@ -172,18 +187,17 @@ public void test() throws Exception { cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); - errors = cmdDistrib.getErrors(); assertEquals(errors.toString(), 0, errors.size()); - numFound = controlClient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + numFound = controlClient.query(new SolrQuery("*:*")).getResults().getNumFound(); assertEquals(1, numFound); client = (HttpSolrClient) clients.get(0); - nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, - client.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); + nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, client.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); } int id2; @@ -221,8 +235,7 @@ public void test() throws Exception { numFound = results.getNumFound(); assertEquals(results.toString(), 3, numFound); - numFound = client.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + numFound = client.query(new SolrQuery("*:*")).getResults().getNumFound(); assertEquals(3, numFound); // now delete doc 2 which is on both control and client1 @@ -230,7 +243,6 @@ public void test() throws Exception { DeleteUpdateCommand dcmd = new DeleteUpdateCommand(null); dcmd.id = Integer.toString(id2); - try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) { params = new ModifiableSolrParams(); @@ -249,18 +261,16 @@ public void test() throws Exception { assertEquals(errors.toString(), 0, errors.size()); - results = controlClient.query(new SolrQuery("*:*")).getResults(); numFound = results.getNumFound(); assertEquals(results.toString(), 2, numFound); - numFound = client.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + numFound = client.query(new SolrQuery("*:*")).getResults().getNumFound(); assertEquals(results.toString(), 2, numFound); for (SolrClient c : clients) { c.optimize(); - //System.out.println(clients.get(0).request(new LukeRequest())); + // System.out.println(clients.get(0).request(new LukeRequest())); } try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) { @@ -273,10 +283,13 @@ public void test() throws Exception { continue; } HttpSolrClient httpClient = (HttpSolrClient) c; - nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, - httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); + nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + httpClient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); - } AddUpdateCommand c = new AddUpdateCommand(null); c.solrDoc = sdoc("id", id.incrementAndGet()); @@ -290,8 +303,12 @@ public void test() throws Exception { for (SolrClient c : clients) { HttpSolrClient httpClient = (HttpSolrClient) c; - nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, - httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); + nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + httpClient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps))); } @@ -300,21 +317,21 @@ public void test() throws Exception { for (JettySolrRunner jetty : jettys) { CoreContainer cores = jetty.getCoreContainer(); try (SolrCore core = cores.getCore("collection1")) { - core.getUpdateHandler().registerCommitCallback(new SolrEventListener() { - @Override - public void postSoftCommit() { - } - - @Override - public void postCommit() { - commits.incrementAndGet(); - } - - @Override - public void newSearcher(SolrIndexSearcher newSearcher, - SolrIndexSearcher currentSearcher) { - } - }); + core.getUpdateHandler() + .registerCommitCallback( + new SolrEventListener() { + @Override + public void postSoftCommit() {} + + @Override + public void postCommit() { + commits.incrementAndGet(); + } + + @Override + public void newSearcher( + SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) {} + }); } } params = new ModifiableSolrParams(); @@ -328,12 +345,13 @@ public void newSearcher(SolrIndexSearcher newSearcher, for (SolrClient c : clients) { NamedList resp = c.request(new LukeRequest()); - assertEquals("SOLR-3428: We only did adds - there should be no deletes", + assertEquals( + "SOLR-3428: We only did adds - there should be no deletes", ((NamedList) resp.get("index")).get("numDocs"), ((NamedList) resp.get("index")).get("maxDoc")); } } - + testMaxRetries(NodeType.FORWARD); testMaxRetries(NodeType.STANDARD); testOneRetry(NodeType.FORWARD); @@ -358,13 +376,13 @@ public void newSearcher(SolrIndexSearcher newSearcher, getRfFromResponseShouldNotCloseTheInputStream(); testStuckUpdates(); } - + private void testDeletes(boolean dbq, boolean withFailures) throws Exception { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); solrclient.commit(true, true); - long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); - final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); + long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); + final MockStreamingSolrClients streamingClients = + new MockStreamingSolrClients(updateShardHandler); try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) { if (withFailures) { streamingClients.setExp(Exp.CONNECT_EXCEPTION); @@ -372,20 +390,25 @@ private void testDeletes(boolean dbq, boolean withFailures) throws Exception { ArrayList nodes = new ArrayList<>(); final AtomicInteger retries = new AtomicInteger(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); - Node retryNode = new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - streamingClients.setExp(null); - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; - + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); + Node retryNode = + new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + streamingClients.setExp(null); + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; nodes.add(retryNode); - for (int i = 0 ; i < 5 ; i++) { + for (int i = 0; i < 5; i++) { AddUpdateCommand cmd = new AddUpdateCommand(null); int currentId = id.incrementAndGet(); cmd.solrDoc = sdoc("id", currentId); @@ -399,12 +422,11 @@ public boolean checkRetry(Error err) { } cmdDistrib.distribDelete(dcmd, nodes, params, false, null, null); } - CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false); cmdDistrib.distribCommit(ccmd, nodes, new ModifiableSolrParams()); cmdDistrib.finish(); - + int expectedRetryCount = 0; if (withFailures) { if (dbq) { @@ -415,9 +437,7 @@ public boolean checkRetry(Error err) { } assertEquals(expectedRetryCount, retries.get()); - - long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); // we will get java.net.ConnectException which we retry on assertEquals(numFoundBefore, numFoundAfter); @@ -427,79 +447,96 @@ public boolean checkRetry(Error err) { private void testMinRfOnRetries(NodeType nodeType) throws Exception { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); - final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); + final MockStreamingSolrClients streamingClients = + new MockStreamingSolrClients(updateShardHandler); try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) { streamingClients.setExp(Exp.CONNECT_EXCEPTION); ArrayList nodes = new ArrayList<>(); final AtomicInteger retries = new AtomicInteger(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); if (nodeType == NodeType.FORWARD) { - nodes.add(new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - if (retries.incrementAndGet() >= 3) { - streamingClients.setExp(null); - } - return super.checkRetry(err); - } - }); + nodes.add( + new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + if (retries.incrementAndGet() >= 3) { + streamingClients.setExp(null); + } + return super.checkRetry(err); + } + }); } else { - nodes.add(new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - if (retries.incrementAndGet() >= 3) { - streamingClients.setExp(null); - } - return super.checkRetry(err); - } - }); + nodes.add( + new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + if (retries.incrementAndGet() >= 3) { + streamingClients.setExp(null); + } + return super.checkRetry(err); + } + }); } - AddUpdateCommand cmd = new AddUpdateCommand(null); cmd.solrDoc = sdoc("id", id.incrementAndGet()); ModifiableSolrParams params = new ModifiableSolrParams(); RollupRequestReplicationTracker rollupReqTracker = new RollupRequestReplicationTracker(); - LeaderRequestReplicationTracker leaderReqTracker = new LeaderRequestReplicationTracker("shard1"); + LeaderRequestReplicationTracker leaderReqTracker = + new LeaderRequestReplicationTracker("shard1"); cmdDistrib.distribAdd(cmd, nodes, params, false, rollupReqTracker, leaderReqTracker); cmdDistrib.finish(); assertEquals(3, retries.get()); - assertEquals(2, leaderReqTracker.getAchievedRf());// "2" here is because one would be the leader, that creates the instance of LeaderRequestReplicationTracker, the second one is the node + // "2" here is because one would be the leader, that creates the instance of + // LeaderRequestReplicationTracker, the second one is the node + assertEquals(2, leaderReqTracker.getAchievedRf()); assertEquals(0, cmdDistrib.getErrors().size()); } } private void testMaxRetries(NodeType nodeType) throws IOException { - final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); + final MockStreamingSolrClients streamingClients = + new MockStreamingSolrClients(updateShardHandler); try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) { streamingClients.setExp(Exp.CONNECT_EXCEPTION); ArrayList nodes = new ArrayList<>(); final HttpSolrClient solrclient1 = (HttpSolrClient) clients.get(0); final AtomicInteger retries = new AtomicInteger(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient1.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient1.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); Node retryNode; if (nodeType == NodeType.FORWARD) { - retryNode = new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 6) { - @Override - public boolean checkRetry(Error err) { - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; + retryNode = + new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 6) { + @Override + public boolean checkRetry(Error err) { + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; } else { - retryNode = new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 6) { - @Override - public boolean checkRetry(Error err) { - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; + retryNode = + new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 6) { + @Override + public boolean checkRetry(Error err) { + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; } - nodes.add(retryNode); @@ -515,99 +552,118 @@ public boolean checkRetry(Error err) { assertEquals(1, cmdDistrib.getErrors().size()); } } - + private void testReqShouldRetryNoRetries() { - Error err = getError(new SocketException()); - SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 0), new UpdateRequest(), true); + Error err = getError(new SocketException()); + SolrCmdDistributor.Req req = + new SolrCmdDistributor.Req( + null, new StdNode(null, "collection1", "shard1", 0), new UpdateRequest(), true); assertFalse(req.shouldRetry(err)); } - + private void testReqShouldRetryDBQ() { - Error err = getError(new SocketException()); + Error err = getError(new SocketException()); UpdateRequest dbqReq = new UpdateRequest(); dbqReq.deleteByQuery("*:*"); - SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true); + SolrCmdDistributor.Req req = + new SolrCmdDistributor.Req( + null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true); assertFalse(req.shouldRetry(err)); } public void getRfFromResponseShouldNotCloseTheInputStream() { UpdateRequest dbqReq = new UpdateRequest(); dbqReq.deleteByQuery("*:*"); - SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true); + SolrCmdDistributor.Req req = + new SolrCmdDistributor.Req( + null, new StdNode(null, "collection1", "shard1", 1), dbqReq, true); AtomicBoolean isClosed = new AtomicBoolean(false); - ByteArrayInputStream is = new ByteArrayInputStream(new byte[100]) { - @Override - public void close() throws IOException { - isClosed.set(true); - super.close(); - } - }; + ByteArrayInputStream is = + new ByteArrayInputStream(new byte[100]) { + @Override + public void close() throws IOException { + isClosed.set(true); + super.close(); + } + }; req.trackRequestResult(null, is, true); assertFalse("Underlying stream should not be closed!", isClosed.get()); } - + private void testReqShouldRetryMaxRetries() { - Error err = getError(new SocketException()); - SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true); + Error err = getError(new SocketException()); + SolrCmdDistributor.Req req = + new SolrCmdDistributor.Req( + null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true); assertTrue(req.shouldRetry(err)); req.retries++; assertFalse(req.shouldRetry(err)); } - + private void testReqShouldRetryBadRequest() { - Error err = getError(new SolrException(SolrException.ErrorCode.BAD_REQUEST, "bad request")); - SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true); + Error err = getError(new SolrException(SolrException.ErrorCode.BAD_REQUEST, "bad request")); + SolrCmdDistributor.Req req = + new SolrCmdDistributor.Req( + null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true); assertFalse(req.shouldRetry(err)); } - + private void testReqShouldRetryNotFound() { Error err = getError(new SolrException(SolrException.ErrorCode.NOT_FOUND, "not found")); - SolrCmdDistributor.Req req = new SolrCmdDistributor.Req(null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true); + SolrCmdDistributor.Req req = + new SolrCmdDistributor.Req( + null, new StdNode(null, "collection1", "shard1", 1), new UpdateRequest(), true); assertTrue(req.shouldRetry(err)); } - + private Error getError(Exception e) { Error err = new Error(); err.e = e; if (e instanceof SolrException) { - err.statusCode = ((SolrException)e).code(); + err.statusCode = ((SolrException) e).code(); } return err; } - + private void testOneRetry(NodeType nodeType) throws Exception { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); - long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); - final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); + long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); + final MockStreamingSolrClients streamingClients = + new MockStreamingSolrClients(updateShardHandler); try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) { streamingClients.setExp(Exp.CONNECT_EXCEPTION); ArrayList nodes = new ArrayList<>(); final AtomicInteger retries = new AtomicInteger(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); Node retryNode; if (nodeType == NodeType.FORWARD) { - retryNode = new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - streamingClients.setExp(null); - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; + retryNode = + new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + streamingClients.setExp(null); + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; } else { - retryNode = new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - streamingClients.setExp(null); - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; + retryNode = + new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + streamingClients.setExp(null); + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; } - nodes.add(retryNode); AddUpdateCommand cmd = new AddUpdateCommand(null); @@ -621,9 +677,7 @@ public boolean checkRetry(Error err) { assertEquals(1, retries.get()); - - long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); // we will get java.net.ConnectException which we retry on assertEquals(numFoundBefore + 1, numFoundAfter); @@ -634,33 +688,39 @@ public boolean checkRetry(Error err) { private void testNodeWontRetryBadRequest(NodeType nodeType) throws Exception { ignoreException("Bad Request"); final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); - long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); - final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); + long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); + final MockStreamingSolrClients streamingClients = + new MockStreamingSolrClients(updateShardHandler); try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) { streamingClients.setExp(Exp.BAD_REQUEST); ArrayList nodes = new ArrayList<>(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), - ZkStateReader.CORE_NAME_PROP, ""); + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); final AtomicInteger retries = new AtomicInteger(); Node retryNode; if (nodeType == NodeType.FORWARD) { - retryNode = new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; + retryNode = + new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; } else { - retryNode = new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; + retryNode = + new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; } nodes.add(retryNode); @@ -678,9 +738,7 @@ public boolean checkRetry(Error err) { // it will checkRetry, but not actually do it... assertEquals(1, retries.get()); - - long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); // we will get java.net.SocketException: Network is unreachable, which we don't retry on assertEquals(numFoundBefore, numFoundAfter); @@ -688,26 +746,31 @@ public boolean checkRetry(Error err) { unIgnoreException("Bad Request"); } } - + private void testForwardNodeWontRetrySocketError() throws Exception { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); - long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); - final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); + long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); + final MockStreamingSolrClients streamingClients = + new MockStreamingSolrClients(updateShardHandler); try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) { streamingClients.setExp(Exp.SOCKET_EXCEPTION); ArrayList nodes = new ArrayList<>(); final AtomicInteger retries = new AtomicInteger(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); - ForwardNode retryNode = new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; - + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); + ForwardNode retryNode = + new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; nodes.add(retryNode); @@ -725,33 +788,37 @@ public boolean checkRetry(Error err) { // it will checkRetry, but not actually do it... assertEquals(1, retries.get()); - - long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); // we will get java.net.SocketException: Network is unreachable, which we don't retry on assertEquals(numFoundBefore, numFoundAfter); assertEquals(1, cmdDistrib.getErrors().size()); } } - + private void testStdNodeRetriesSocketError() throws Exception { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); - final MockStreamingSolrClients streamingClients = new MockStreamingSolrClients(updateShardHandler); + final MockStreamingSolrClients streamingClients = + new MockStreamingSolrClients(updateShardHandler); try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(streamingClients, 0)) { streamingClients.setExp(Exp.SOCKET_EXCEPTION); ArrayList nodes = new ArrayList<>(); final AtomicInteger retries = new AtomicInteger(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); - Node retryNode = new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - retries.incrementAndGet(); - return super.checkRetry(err); - } - }; - + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); + Node retryNode = + new StdNode(new ZkCoreNodeProps(nodeProps), "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + retries.incrementAndGet(); + return super.checkRetry(err); + } + }; nodes.add(retryNode); @@ -771,27 +838,31 @@ private void testRetryNodeAgainstBadAddress() throws SolrServerException, IOExce // Test RetryNode try (SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler)) { final HttpSolrClient solrclient = (HttpSolrClient) clients.get(0); - long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + long numFoundBefore = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); ArrayList nodes = new ArrayList<>(); - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, DEAD_HOST_1 + context, ZkStateReader.CORE_NAME_PROP, ""); - ForwardNode retryNode = new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { - @Override - public boolean checkRetry(Error err) { - ZkNodeProps leaderProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, solrclient.getBaseURL(), - ZkStateReader.CORE_NAME_PROP, ""); - this.nodeProps = new ZkCoreNodeProps(leaderProps); - - return super.checkRetry(err); - } - }; - + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, DEAD_HOST_1 + context, ZkStateReader.CORE_NAME_PROP, ""); + ForwardNode retryNode = + new ForwardNode(new ZkCoreNodeProps(nodeProps), null, "collection1", "shard1", 5) { + @Override + public boolean checkRetry(Error err) { + ZkNodeProps leaderProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + solrclient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); + this.nodeProps = new ZkCoreNodeProps(leaderProps); + + return super.checkRetry(err); + } + }; nodes.add(retryNode); - AddUpdateCommand cmd = new AddUpdateCommand(null); cmd.solrDoc = sdoc("id", id.incrementAndGet()); ModifiableSolrParams params = new ModifiableSolrParams(); @@ -804,8 +875,7 @@ public boolean checkRetry(Error err) { cmdDistrib.distribCommit(ccmd, nodes, params); cmdDistrib.finish(); - long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults() - .getNumFound(); + long numFoundAfter = solrclient.query(new SolrQuery("*:*")).getResults().getNumFound(); // different OS's will throw different exceptions for the bad address above if (numFoundBefore != numFoundAfter) { @@ -819,7 +889,7 @@ public boolean checkRetry(Error err) { } } } - + @Override public void distribTearDown() throws Exception { updateShardHandler.close(); @@ -832,12 +902,12 @@ private void testDistribOpenSearcher() { CommitUpdateCommand ccmd = new CommitUpdateCommand(null, false); - //test default value (should be true) + // test default value (should be true) cmdDistrib.addCommit(updateRequest, ccmd); boolean openSearcher = updateRequest.getParams().getBool(UpdateParams.OPEN_SEARCHER, false); assertTrue(openSearcher); - //test openSearcher = false + // test openSearcher = false ccmd.openSearcher = false; cmdDistrib.addCommit(updateRequest, ccmd); @@ -858,8 +928,12 @@ private void testStuckUpdates() throws Exception { continue; } HttpSolrClient httpClient = (HttpSolrClient) c; - ZkNodeProps nodeProps = new ZkNodeProps(ZkStateReader.BASE_URL_PROP, - httpClient.getBaseURL(), ZkStateReader.CORE_NAME_PROP, ""); + ZkNodeProps nodeProps = + new ZkNodeProps( + ZkStateReader.BASE_URL_PROP, + httpClient.getBaseURL(), + ZkStateReader.CORE_NAME_PROP, + ""); StdNode node = new StdNode(new ZkCoreNodeProps(nodeProps)); nodes.add(node); } diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java index 4a40abcd888..41fbfe32731 100644 --- a/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java +++ b/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java @@ -19,7 +19,6 @@ import java.nio.file.Path; import java.util.LinkedHashMap; import java.util.Map; - import org.apache.lucene.index.ConcurrentMergeScheduler; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.MergePolicy; @@ -48,35 +47,43 @@ public class SolrIndexConfigTest extends SolrTestCaseJ4 { private static final String solrConfigFileName = "solrconfig.xml"; - private static final String solrConfigFileNameWarmerRandomMergePolicyFactory = "solrconfig-warmer-randommergepolicyfactory.xml"; - private static final String solrConfigFileNameTieredMergePolicyFactory = "solrconfig-tieredmergepolicyfactory.xml"; - private static final String solrConfigFileNameConnMSPolicyFactory = "solrconfig-concurrentmergescheduler.xml"; - private static final String solrConfigFileNameSortingMergePolicyFactory = "solrconfig-sortingmergepolicyfactory.xml"; + private static final String solrConfigFileNameWarmerRandomMergePolicyFactory = + "solrconfig-warmer-randommergepolicyfactory.xml"; + private static final String solrConfigFileNameTieredMergePolicyFactory = + "solrconfig-tieredmergepolicyfactory.xml"; + private static final String solrConfigFileNameConnMSPolicyFactory = + "solrconfig-concurrentmergescheduler.xml"; + private static final String solrConfigFileNameSortingMergePolicyFactory = + "solrconfig-sortingmergepolicyfactory.xml"; private static final String schemaFileName = "schema.xml"; @BeforeClass public static void beforeClass() throws Exception { - initCore(solrConfigFileName,schemaFileName); + initCore(solrConfigFileName, schemaFileName); } - + @After public void tearDown() throws Exception { System.clearProperty("solr.tests.maxCommitMergeWait"); super.tearDown(); } - + private final Path instanceDir = TEST_PATH().resolve("collection1"); @Test public void testFailingSolrIndexConfigCreation() throws Exception { - SolrConfig solrConfig = new SolrConfig(instanceDir,"bad-mpf-solrconfig.xml"); + SolrConfig solrConfig = new SolrConfig(instanceDir, "bad-mpf-solrconfig.xml"); SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null); IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema(schemaFileName, solrConfig); h.getCore().setLatestSchema(indexSchema); // this should fail as mergePolicy doesn't have any public constructor - SolrException ex = expectThrows(SolrException.class, () -> solrIndexConfig.toIndexWriterConfig(h.getCore())); - assertTrue(ex.getMessage().contains("Error instantiating class: 'org.apache.solr.index.DummyMergePolicyFactory'")); + SolrException ex = + expectThrows(SolrException.class, () -> solrIndexConfig.toIndexWriterConfig(h.getCore())); + assertTrue( + ex.getMessage() + .contains( + "Error instantiating class: 'org.apache.solr.index.DummyMergePolicyFactory'")); } @Test @@ -85,7 +92,7 @@ public void testTieredMPSolrIndexConfigCreation() throws Exception { SolrConfig solrConfig = new SolrConfig(instanceDir, solrConfigFileName); SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null); IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema(schemaFileName, solrConfig); - + h.getCore().setLatestSchema(indexSchema); IndexWriterConfig iwc = solrIndexConfig.toIndexWriterConfig(h.getCore()); @@ -93,16 +100,14 @@ public void testTieredMPSolrIndexConfigCreation() throws Exception { assertTrue("mp is not TieredMergePolicy", iwc.getMergePolicy() instanceof TieredMergePolicy); TieredMergePolicy mp = (TieredMergePolicy) iwc.getMergePolicy(); assertEquals("mp.maxMergeAtOnce", 7, mp.getMaxMergeAtOnce()); - assertEquals("mp.segmentsPerTier",9,(int)mp.getSegmentsPerTier()); - + assertEquals("mp.segmentsPerTier", 9, (int) mp.getSegmentsPerTier()); assertNotNull("null ms", iwc.getMergeScheduler()); assertTrue("ms is not CMS", iwc.getMergeScheduler() instanceof ConcurrentMergeScheduler); - ConcurrentMergeScheduler ms = (ConcurrentMergeScheduler) iwc.getMergeScheduler(); + ConcurrentMergeScheduler ms = (ConcurrentMergeScheduler) iwc.getMergeScheduler(); assertEquals("ms.maxMergeCount", 987, ms.getMaxMergeCount()); assertEquals("ms.maxThreadCount", 42, ms.getMaxThreadCount()); assertEquals("ms.isAutoIOThrottle", true, ms.getAutoIOThrottle()); - } @Test @@ -120,11 +125,10 @@ public void testConcurrentMergeSchedularSolrIndexConfigCreation() throws Excepti assertNotNull("null ms", iwc.getMergeScheduler()); assertTrue("ms is not CMS", iwc.getMergeScheduler() instanceof ConcurrentMergeScheduler); - ConcurrentMergeScheduler ms = (ConcurrentMergeScheduler) iwc.getMergeScheduler(); + ConcurrentMergeScheduler ms = (ConcurrentMergeScheduler) iwc.getMergeScheduler(); assertEquals("ms.maxMergeCount", 987, ms.getMaxMergeCount()); assertEquals("ms.maxThreadCount", 42, ms.getMaxThreadCount()); assertEquals("ms.isAutoIOThrottle", false, ms.getAutoIOThrottle()); - } public void testSortingMPSolrIndexConfigCreation() throws Exception { @@ -132,7 +136,8 @@ public void testSortingMPSolrIndexConfigCreation() throws Exception { final SortField.Type expectedFieldType = SortField.Type.INT; final boolean expectedFieldSortDescending = true; - SolrConfig solrConfig = new SolrConfig(instanceDir, solrConfigFileNameSortingMergePolicyFactory); + SolrConfig solrConfig = + new SolrConfig(instanceDir, solrConfigFileNameSortingMergePolicyFactory); SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null); assertNotNull(solrIndexConfig); IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema(schemaFileName, solrConfig); @@ -142,19 +147,24 @@ public void testSortingMPSolrIndexConfigCreation() throws Exception { final MergePolicy mergePolicy = iwc.getMergePolicy(); assertNotNull("null mergePolicy", mergePolicy); - assertTrue("mergePolicy ("+mergePolicy+") is not a SortingMergePolicy", mergePolicy instanceof SortingMergePolicy); + assertTrue( + "mergePolicy (" + mergePolicy + ") is not a SortingMergePolicy", + mergePolicy instanceof SortingMergePolicy); final SortingMergePolicy sortingMergePolicy = (SortingMergePolicy) mergePolicy; - final Sort expected = new Sort(new SortField(expectedFieldName, expectedFieldType, expectedFieldSortDescending)); + final Sort expected = + new Sort(new SortField(expectedFieldName, expectedFieldType, expectedFieldSortDescending)); final Sort actual = sortingMergePolicy.getSort(); assertEquals("SortingMergePolicy.getSort", expected, actual); } public void testMergedSegmentWarmerIndexConfigCreation() throws Exception { - SolrConfig solrConfig = new SolrConfig(instanceDir, solrConfigFileNameWarmerRandomMergePolicyFactory); + SolrConfig solrConfig = + new SolrConfig(instanceDir, solrConfigFileNameWarmerRandomMergePolicyFactory); SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null); assertNotNull(solrIndexConfig); assertNotNull(solrIndexConfig.mergedSegmentWarmerInfo); - assertEquals(SimpleMergedSegmentWarmer.class.getName(), + assertEquals( + SimpleMergedSegmentWarmer.class.getName(), solrIndexConfig.mergedSegmentWarmerInfo.className); IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema(schemaFileName, solrConfig); h.getCore().setLatestSchema(indexSchema); @@ -165,7 +175,8 @@ public void testMergedSegmentWarmerIndexConfigCreation() throws Exception { public void testToMap() throws Exception { final String solrConfigFileNameWarmer = solrConfigFileNameWarmerRandomMergePolicyFactory; final String solrConfigFileNameTMP = solrConfigFileNameTieredMergePolicyFactory; - final String solrConfigFileName = (random().nextBoolean() ? solrConfigFileNameWarmer : solrConfigFileNameTMP); + final String solrConfigFileName = + (random().nextBoolean() ? solrConfigFileNameWarmer : solrConfigFileNameTMP); SolrConfig solrConfig = new SolrConfig(instanceDir, solrConfigFileName); SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null); assertNotNull(solrIndexConfig); @@ -180,51 +191,67 @@ public void testToMap() throws Exception { Map m = solrIndexConfig.toMap(new LinkedHashMap<>()); int mSizeExpected = 0; - ++mSizeExpected; assertTrue(m.get("useCompoundFile") instanceof Boolean); + ++mSizeExpected; + assertTrue(m.get("useCompoundFile") instanceof Boolean); + + ++mSizeExpected; + assertTrue(m.get("maxBufferedDocs") instanceof Integer); - ++mSizeExpected; assertTrue(m.get("maxBufferedDocs") instanceof Integer); + ++mSizeExpected; + assertTrue(m.get("ramBufferSizeMB") instanceof Double); - ++mSizeExpected; assertTrue(m.get("ramBufferSizeMB") instanceof Double); - - ++mSizeExpected; assertTrue(m.get("maxCommitMergeWaitTime") instanceof Integer); + ++mSizeExpected; + assertTrue(m.get("maxCommitMergeWaitTime") instanceof Integer); - ++mSizeExpected; assertTrue(m.get("ramPerThreadHardLimitMB") instanceof Integer); + ++mSizeExpected; + assertTrue(m.get("ramPerThreadHardLimitMB") instanceof Integer); - ++mSizeExpected; assertTrue(m.get("writeLockTimeout") instanceof Integer); + ++mSizeExpected; + assertTrue(m.get("writeLockTimeout") instanceof Integer); - ++mSizeExpected; assertTrue(m.get("lockType") instanceof String); + ++mSizeExpected; + assertTrue(m.get("lockType") instanceof String); { - final String lockType = (String)m.get("lockType"); - assertTrue(DirectoryFactory.LOCK_TYPE_SIMPLE.equals(lockType) || - DirectoryFactory.LOCK_TYPE_NATIVE.equals(lockType) || - DirectoryFactory.LOCK_TYPE_SINGLE.equals(lockType) || - DirectoryFactory.LOCK_TYPE_NONE.equals(lockType)); + final String lockType = (String) m.get("lockType"); + assertTrue( + DirectoryFactory.LOCK_TYPE_SIMPLE.equals(lockType) + || DirectoryFactory.LOCK_TYPE_NATIVE.equals(lockType) + || DirectoryFactory.LOCK_TYPE_SINGLE.equals(lockType) + || DirectoryFactory.LOCK_TYPE_NONE.equals(lockType)); } - ++mSizeExpected; assertTrue(m.get("infoStreamEnabled") instanceof Boolean); + ++mSizeExpected; + assertTrue(m.get("infoStreamEnabled") instanceof Boolean); { assertFalse(Boolean.valueOf(m.get("infoStreamEnabled").toString()).booleanValue()); } - - ++mSizeExpected; assertTrue(m.get("mergeScheduler") instanceof MapSerializable); - ++mSizeExpected; assertTrue(m.get("mergePolicyFactory") instanceof MapSerializable); + + ++mSizeExpected; + assertTrue(m.get("mergeScheduler") instanceof MapSerializable); + ++mSizeExpected; + assertTrue(m.get("mergePolicyFactory") instanceof MapSerializable); if (solrConfigFileName.equals(solrConfigFileNameWarmerRandomMergePolicyFactory)) { - ++mSizeExpected; assertTrue(m.get("mergedSegmentWarmer") instanceof MapSerializable); + ++mSizeExpected; + assertTrue(m.get("mergedSegmentWarmer") instanceof MapSerializable); } else { assertNull(m.get("mergedSegmentWarmer")); } - ++mSizeExpected; assertNotNull(m.get("metrics")); + ++mSizeExpected; + assertNotNull(m.get("metrics")); assertEquals(mSizeExpected, m.size()); } - + public void testMaxCommitMergeWaitTime() throws Exception { SolrConfig sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-test-misc.xml"); assertEquals(-1, sc.indexConfig.maxCommitMergeWaitMillis); - assertEquals(IndexWriterConfig.DEFAULT_MAX_FULL_FLUSH_MERGE_WAIT_MILLIS, sc.indexConfig.toIndexWriterConfig(h.getCore()).getMaxFullFlushMergeWaitMillis()); + assertEquals( + IndexWriterConfig.DEFAULT_MAX_FULL_FLUSH_MERGE_WAIT_MILLIS, + sc.indexConfig.toIndexWriterConfig(h.getCore()).getMaxFullFlushMergeWaitMillis()); System.setProperty("solr.tests.maxCommitMergeWaitTime", "10"); sc = new SolrConfig(TEST_PATH().resolve("collection1"), "solrconfig-test-misc.xml"); assertEquals(10, sc.indexConfig.maxCommitMergeWaitMillis); - assertEquals(10, sc.indexConfig.toIndexWriterConfig(h.getCore()).getMaxFullFlushMergeWaitMillis()); + assertEquals( + 10, sc.indexConfig.toIndexWriterConfig(h.getCore()).getMaxFullFlushMergeWaitMillis()); } } diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexMetricsTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexMetricsTest.java index a629be3bb41..73bc9c73028 100644 --- a/solr/core/src/test/org/apache/solr/update/SolrIndexMetricsTest.java +++ b/solr/core/src/test/org/apache/solr/update/SolrIndexMetricsTest.java @@ -16,21 +16,18 @@ */ package org.apache.solr.update; -import java.util.Map; - import com.codahale.metrics.Meter; import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.request.SolrQueryRequest; import org.junit.After; import org.junit.Test; -/** - * Test proper registration and collection of index and directory metrics. - */ +/** Test proper registration and collection of index and directory metrics. */ public class SolrIndexMetricsTest extends SolrTestCaseJ4 { @After @@ -62,21 +59,25 @@ public void testIndexMetricsNoDetails() throws Exception { addDocs(); - MetricRegistry registry = h.getCoreContainer().getMetricManager().registry(h.getCore().getCoreMetricManager().getRegistryName()); + MetricRegistry registry = + h.getCoreContainer() + .getMetricManager() + .registry(h.getCore().getCoreMetricManager().getRegistryName()); assertNotNull(registry); Map metrics = registry.getMetrics(); - assertEquals(13, metrics.entrySet().stream().filter(e -> e.getKey().startsWith("INDEX")).count()); + assertEquals( + 13, metrics.entrySet().stream().filter(e -> e.getKey().startsWith("INDEX")).count()); // check basic index meters - Timer timer = (Timer)metrics.get("INDEX.merge.minor"); + Timer timer = (Timer) metrics.get("INDEX.merge.minor"); assertTrue("minorMerge: " + timer.getCount(), timer.getCount() >= 3); - timer = (Timer)metrics.get("INDEX.merge.major"); + timer = (Timer) metrics.get("INDEX.merge.major"); assertEquals("majorMerge: " + timer.getCount(), 0, timer.getCount()); // check detailed meters - assertNull((Meter)metrics.get("INDEX.merge.major.docs")); - Meter meter = (Meter)metrics.get("INDEX.flush"); + assertNull((Meter) metrics.get("INDEX.merge.major.docs")); + Meter meter = (Meter) metrics.get("INDEX.flush"); assertTrue("flush: " + meter.getCount(), meter.getCount() > 10); } @@ -88,12 +89,16 @@ public void testIndexNoMetrics() throws Exception { addDocs(); - MetricRegistry registry = h.getCoreContainer().getMetricManager().registry(h.getCore().getCoreMetricManager().getRegistryName()); + MetricRegistry registry = + h.getCoreContainer() + .getMetricManager() + .registry(h.getCore().getCoreMetricManager().getRegistryName()); assertNotNull(registry); Map metrics = registry.getMetrics(); // INDEX.size, INDEX.sizeInBytes, INDEX.segmentCount - assertEquals(3, metrics.entrySet().stream().filter(e -> e.getKey().startsWith("INDEX")).count()); + assertEquals( + 3, metrics.entrySet().stream().filter(e -> e.getKey().startsWith("INDEX")).count()); } @Test @@ -104,23 +109,27 @@ public void testIndexMetricsWithDetails() throws Exception { addDocs(); - MetricRegistry registry = h.getCoreContainer().getMetricManager().registry(h.getCore().getCoreMetricManager().getRegistryName()); + MetricRegistry registry = + h.getCoreContainer() + .getMetricManager() + .registry(h.getCore().getCoreMetricManager().getRegistryName()); assertNotNull(registry); Map metrics = registry.getMetrics(); - assertTrue(metrics.entrySet().stream().filter(e -> e.getKey().startsWith("INDEX")).count() >= 12); + assertTrue( + metrics.entrySet().stream().filter(e -> e.getKey().startsWith("INDEX")).count() >= 12); // check basic index meters - Timer timer = (Timer)metrics.get("INDEX.merge.minor"); + Timer timer = (Timer) metrics.get("INDEX.merge.minor"); assertTrue("minorMerge: " + timer.getCount(), timer.getCount() >= 3); - timer = (Timer)metrics.get("INDEX.merge.major"); + timer = (Timer) metrics.get("INDEX.merge.major"); assertEquals("majorMerge: " + timer.getCount(), 0, timer.getCount()); // check detailed meters - Meter meter = (Meter)metrics.get("INDEX.merge.major.docs"); + Meter meter = (Meter) metrics.get("INDEX.merge.major.docs"); assertEquals("majorMergeDocs: " + meter.getCount(), 0, meter.getCount()); - meter = (Meter)metrics.get("INDEX.flush"); + meter = (Meter) metrics.get("INDEX.flush"); assertTrue("flush: " + meter.getCount(), meter.getCount() > 10); } } diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java index fb6498505de..694628333cf 100644 --- a/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java +++ b/solr/core/src/test/org/apache/solr/update/SolrIndexSplitterTest.java @@ -16,15 +16,14 @@ */ package org.apache.solr.update; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; import java.io.File; import java.io.UnsupportedEncodingException; import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Set; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; @@ -53,7 +52,7 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - // System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ + // System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ System.setProperty("solr.directoryFactory", "solr.NRTCachingDirectoryFactory"); System.setProperty("solr.tests.lockType", DirectoryFactory.LOCK_TYPE_SIMPLE); @@ -69,7 +68,9 @@ public void setUp() throws Exception { indexDir1 = createTempDir("_testSplit1").toFile(); indexDir2 = createTempDir("_testSplit2").toFile(); indexDir3 = createTempDir("_testSplit3").toFile(); - h.getCoreContainer().getAllowPaths().addAll(Set.of(indexDir1.toPath(), indexDir2.toPath(), indexDir3.toPath())); + h.getCoreContainer() + .getAllowPaths() + .addAll(Set.of(indexDir1.toPath(), indexDir2.toPath(), indexDir3.toPath())); } @Test @@ -98,23 +99,54 @@ private void doTestSplitByPaths(SolrIndexSplitter.SplitMethod splitMethod) throw request = lrf.makeRequest("q", "dummy"); SolrQueryResponse rsp = new SolrQueryResponse(); - SplitIndexCommand command = new SplitIndexCommand(request, rsp, - Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), null, ranges, new PlainIdRouter(), null, null, splitMethod); + SplitIndexCommand command = + new SplitIndexCommand( + request, + rsp, + Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), + null, + ranges, + new PlainIdRouter(), + null, + null, + splitMethod); doSplit(command); - Directory directory = h.getCore().getDirectoryFactory().get(indexDir1.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + Directory directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir1.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); DirectoryReader reader = DirectoryReader.open(directory); - assertEquals("id:dorothy should be present in split index1", 1, reader.docFreq(new Term("id", "dorothy"))); - assertEquals("id:kansas should not be present in split index1", 0, reader.docFreq(new Term("id", "kansas"))); + assertEquals( + "id:dorothy should be present in split index1", + 1, + reader.docFreq(new Term("id", "dorothy"))); + assertEquals( + "id:kansas should not be present in split index1", + 0, + reader.docFreq(new Term("id", "kansas"))); assertEquals("split index1 should have only one document", 1, reader.numDocs()); reader.close(); h.getCore().getDirectoryFactory().release(directory); - directory = h.getCore().getDirectoryFactory().get(indexDir2.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir2.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); reader = DirectoryReader.open(directory); - assertEquals("id:dorothy should not be present in split index2", 0, reader.docFreq(new Term("id", "dorothy"))); - assertEquals("id:kansas should be present in split index2", 1, reader.docFreq(new Term("id", "kansas"))); + assertEquals( + "id:dorothy should not be present in split index2", + 0, + reader.docFreq(new Term("id", "dorothy"))); + assertEquals( + "id:kansas should be present in split index2", + 1, + reader.docFreq(new Term("id", "kansas"))); assertEquals("split index2 should have only one document", 1, reader.numDocs()); reader.close(); h.getCore().getDirectoryFactory().release(directory); @@ -128,7 +160,7 @@ private void doSplit(SplitIndexCommand command) throws Exception { new SolrIndexSplitter(command).split(results); command.rsp.addResponse(results); } - + // SOLR-5144 public void testSplitDeletes() throws Exception { doTestSplitDeletes(SolrIndexSplitter.SplitMethod.REWRITE); @@ -151,27 +183,51 @@ private void doTestSplitDeletes(SolrIndexSplitter.SplitMethod splitMethod) throw assertU(delI(id2)); // delete id2 assertU(commit()); - // find minHash/maxHash hash ranges List ranges = getRanges(id1, id2); request = lrf.makeRequest("q", "dummy"); SolrQueryResponse rsp = new SolrQueryResponse(); - SplitIndexCommand command = new SplitIndexCommand(request, rsp, - Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), null, ranges, new PlainIdRouter(), null, null, splitMethod); + SplitIndexCommand command = + new SplitIndexCommand( + request, + rsp, + Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath()), + null, + ranges, + new PlainIdRouter(), + null, + null, + splitMethod); doSplit(command); - Directory directory = h.getCore().getDirectoryFactory().get(indexDir1.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + Directory directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir1.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); DirectoryReader reader = DirectoryReader.open(directory); - assertEquals("id:dorothy should be present in split index1", 1, reader.docFreq(new Term("id", "dorothy"))); - assertEquals("id:kansas should not be present in split index1", 0, reader.docFreq(new Term("id", "kansas"))); + assertEquals( + "id:dorothy should be present in split index1", + 1, + reader.docFreq(new Term("id", "dorothy"))); + assertEquals( + "id:kansas should not be present in split index1", + 0, + reader.docFreq(new Term("id", "kansas"))); assertEquals("split index1 should have only one document", 1, reader.numDocs()); reader.close(); h.getCore().getDirectoryFactory().release(directory); - directory = h.getCore().getDirectoryFactory().get(indexDir2.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir2.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); reader = DirectoryReader.open(directory); assertEquals(0, reader.numDocs()); // should be empty reader.close(); @@ -209,17 +265,34 @@ private void doTestSplitByCores(SolrIndexSplitter.SplitMethod splitMethod) throw SolrCore core1 = null, core2 = null; try { - core1 = h.getCoreContainer().create("split1", - ImmutableMap.of("dataDir", indexDir1.getAbsolutePath(), "configSet", "cloud-minimal")); - core2 = h.getCoreContainer().create("split2", - ImmutableMap.of("dataDir", indexDir2.getAbsolutePath(), "configSet", "cloud-minimal")); + core1 = + h.getCoreContainer() + .create( + "split1", + ImmutableMap.of( + "dataDir", indexDir1.getAbsolutePath(), "configSet", "cloud-minimal")); + core2 = + h.getCoreContainer() + .create( + "split2", + ImmutableMap.of( + "dataDir", indexDir2.getAbsolutePath(), "configSet", "cloud-minimal")); LocalSolrQueryRequest request = null; try { request = lrf.makeRequest("q", "dummy"); SolrQueryResponse rsp = new SolrQueryResponse(); - SplitIndexCommand command = new SplitIndexCommand(request, rsp, null, Lists.newArrayList(core1, core2), ranges, - new PlainIdRouter(), null, null, splitMethod); + SplitIndexCommand command = + new SplitIndexCommand( + request, + rsp, + null, + Lists.newArrayList(core1, core2), + ranges, + new PlainIdRouter(), + null, + null, + splitMethod); doSplit(command); } finally { if (request != null) request.close(); @@ -230,10 +303,22 @@ private void doTestSplitByCores(SolrIndexSplitter.SplitMethod splitMethod) throw final EmbeddedSolrServer server2 = new EmbeddedSolrServer(h.getCoreContainer(), "split2"); server1.commit(true, true); server2.commit(true, true); - assertEquals("id:dorothy should be present in split index1", 1, server1.query(new SolrQuery("id:dorothy")).getResults().getNumFound()); - assertEquals("id:kansas should not be present in split index1", 0, server1.query(new SolrQuery("id:kansas")).getResults().getNumFound()); - assertEquals("id:dorothy should not be present in split index2", 0, server2.query(new SolrQuery("id:dorothy")).getResults().getNumFound()); - assertEquals("id:kansas should be present in split index2", 1, server2.query(new SolrQuery("id:kansas")).getResults().getNumFound()); + assertEquals( + "id:dorothy should be present in split index1", + 1, + server1.query(new SolrQuery("id:dorothy")).getResults().getNumFound()); + assertEquals( + "id:kansas should not be present in split index1", + 0, + server1.query(new SolrQuery("id:kansas")).getResults().getNumFound()); + assertEquals( + "id:dorothy should not be present in split index2", + 0, + server2.query(new SolrQuery("id:dorothy")).getResults().getNumFound()); + assertEquals( + "id:kansas should be present in split index2", + 1, + server2.query(new SolrQuery("id:kansas")).getResults().getNumFound()); } finally { h.getCoreContainer().unload("split2"); h.getCoreContainer().unload("split1"); @@ -264,25 +349,51 @@ private void doTestSplitAlternately(SolrIndexSplitter.SplitMethod splitMethod) t request = lrf.makeRequest("q", "dummy"); SolrQueryResponse rsp = new SolrQueryResponse(); - SplitIndexCommand command = new SplitIndexCommand(request, rsp, - Lists.newArrayList(indexDir1.getAbsolutePath(), indexDir2.getAbsolutePath(), indexDir3.getAbsolutePath()), - null, null, new PlainIdRouter(), null, null, splitMethod); + SplitIndexCommand command = + new SplitIndexCommand( + request, + rsp, + Lists.newArrayList( + indexDir1.getAbsolutePath(), + indexDir2.getAbsolutePath(), + indexDir3.getAbsolutePath()), + null, + null, + new PlainIdRouter(), + null, + null, + splitMethod); doSplit(command); - directory = h.getCore().getDirectoryFactory().get(indexDir1.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir1.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); DirectoryReader reader = DirectoryReader.open(directory); int numDocs1 = reader.numDocs(); reader.close(); h.getCore().getDirectoryFactory().release(directory); - directory = h.getCore().getDirectoryFactory().get(indexDir2.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir2.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); reader = DirectoryReader.open(directory); int numDocs2 = reader.numDocs(); reader.close(); h.getCore().getDirectoryFactory().release(directory); - directory = h.getCore().getDirectoryFactory().get(indexDir3.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir3.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); reader = DirectoryReader.open(directory); int numDocs3 = reader.numDocs(); reader.close(); @@ -294,7 +405,7 @@ private void doTestSplitAlternately(SolrIndexSplitter.SplitMethod splitMethod) t assertEquals("split index3 has wrong number of documents", max / 3, numDocs3); } finally { if (request != null) request.close(); // decrefs the searcher - if (directory != null) { + if (directory != null) { // perhaps an assert failed, release the directory h.getCore().getDirectoryFactory().release(directory); } @@ -307,11 +418,11 @@ public void testSplitByRouteKey() throws Exception { } @Test - public void testSplitByRouteKeyLink() throws Exception { + public void testSplitByRouteKeyLink() throws Exception { doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod.LINK); } - private void doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod splitMethod) throws Exception { + private void doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod splitMethod) throws Exception { File indexDir = createTempDir().toFile(); CompositeIdRouter r1 = new CompositeIdRouter(); @@ -330,7 +441,7 @@ private void doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod splitMethod) th "lift direction" "testimony meeting" */ - for (int i=0; i<10; i++) { + for (int i = 0; i < 10; i++) { assertU(adoc("id", splitKey + i)); assertU(adoc("id", key2 + i)); } @@ -344,35 +455,49 @@ private void doTestSplitByRouteKey(SolrIndexSplitter.SplitMethod splitMethod) th try { request = lrf.makeRequest("q", "dummy"); SolrQueryResponse rsp = new SolrQueryResponse(); - SplitIndexCommand command = new SplitIndexCommand(request, rsp, - Lists.newArrayList(indexDir.getAbsolutePath()), null, Lists.newArrayList(splitKeyRange), - new CompositeIdRouter(), null, splitKey, splitMethod); + SplitIndexCommand command = + new SplitIndexCommand( + request, + rsp, + Lists.newArrayList(indexDir.getAbsolutePath()), + null, + Lists.newArrayList(splitKeyRange), + new CompositeIdRouter(), + null, + splitKey, + splitMethod); doSplit(command); - directory = h.getCore().getDirectoryFactory().get(indexDir.getAbsolutePath(), - DirectoryFactory.DirContext.DEFAULT, h.getCore().getSolrConfig().indexConfig.lockType); + directory = + h.getCore() + .getDirectoryFactory() + .get( + indexDir.getAbsolutePath(), + DirectoryFactory.DirContext.DEFAULT, + h.getCore().getSolrConfig().indexConfig.lockType); DirectoryReader reader = DirectoryReader.open(directory); assertEquals("split index has wrong number of documents", 10, reader.numDocs()); reader.close(); h.getCore().getDirectoryFactory().release(directory); directory = null; } finally { - if (request != null) { + if (request != null) { request.close(); } - if (directory != null) { + if (directory != null) { h.getCore().getDirectoryFactory().release(directory); } } } - private List getRanges(String id1, String id2) throws UnsupportedEncodingException { + private List getRanges(String id1, String id2) + throws UnsupportedEncodingException { // find minHash/maxHash hash ranges byte[] bytes = id1.getBytes(StandardCharsets.UTF_8); int minHash = Hash.murmurhash3_x86_32(bytes, 0, bytes.length, 0); bytes = id2.getBytes(StandardCharsets.UTF_8); int maxHash = Hash.murmurhash3_x86_32(bytes, 0, bytes.length, 0); - if (minHash > maxHash) { + if (minHash > maxHash) { int temp = maxHash; maxHash = minHash; minHash = temp; diff --git a/solr/core/src/test/org/apache/solr/update/TestAtomicUpdateErrorCases.java b/solr/core/src/test/org/apache/solr/update/TestAtomicUpdateErrorCases.java index 53842b578bc..f24899f5765 100644 --- a/solr/core/src/test/org/apache/solr/update/TestAtomicUpdateErrorCases.java +++ b/solr/core/src/test/org/apache/solr/update/TestAtomicUpdateErrorCases.java @@ -24,24 +24,26 @@ public class TestAtomicUpdateErrorCases extends SolrTestCaseJ4 { public void testUpdateNoTLog() throws Exception { try { System.setProperty("enable.update.log", "false"); - initCore("solrconfig.xml","schema15.xml"); - + initCore("solrconfig.xml", "schema15.xml"); + UpdateHandler uh = h.getCore().getUpdateHandler(); - assertTrue("this test requires DirectUpdateHandler2", - uh instanceof DirectUpdateHandler2); + assertTrue("this test requires DirectUpdateHandler2", uh instanceof DirectUpdateHandler2); + + assertNull( + "this test requires that the updateLog not be enabled, it " + + "seems that someone modified the configs", + ((DirectUpdateHandler2) uh).getUpdateLog()); - assertNull("this test requires that the updateLog not be enabled, it " + - "seems that someone modified the configs", - ((DirectUpdateHandler2)uh).getUpdateLog()); - // creating docs should work fine addAndGetVersion(sdoc("id", "1", "val_i", "42"), null); assertU(commit()); // updating docs should fail ignoreException("updateLog"); - SolrException ex = expectThrows(SolrException.class, - () -> addAndGetVersion(sdoc("id", "1", "val_i", map("inc",-666)), null)); + SolrException ex = + expectThrows( + SolrException.class, + () -> addAndGetVersion(sdoc("id", "1", "val_i", map("inc", -666)), null)); assertEquals(400, ex.code()); assertTrue(ex.getMessage().contains("unless is configured")); resetExceptionIgnores(); @@ -53,22 +55,26 @@ public void testUpdateNoTLog() throws Exception { public void testUpdateNoDistribProcessor() throws Exception { try { - initCore("solrconfig-tlog.xml","schema15.xml"); - - assertNotNull("this test requires an update chain named 'nodistrib'", - h.getCore().getUpdateProcessingChain("nodistrib")); + initCore("solrconfig-tlog.xml", "schema15.xml"); + + assertNotNull( + "this test requires an update chain named 'nodistrib'", + h.getCore().getUpdateProcessingChain("nodistrib")); // creating docs should work fine - addAndGetVersion(sdoc("id", "1", "val_i", "42"), - params("update.chain","nodistrib")); + addAndGetVersion(sdoc("id", "1", "val_i", "42"), params("update.chain", "nodistrib")); assertU(commit()); ignoreException("DistributedUpdateProcessorFactory"); // updating docs should fail - SolrException ex = expectThrows(SolrException.class, () -> { - addAndGetVersion(sdoc("id", "1", "val_i", map("inc",-666)), - params("update.chain","nodistrib")); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + addAndGetVersion( + sdoc("id", "1", "val_i", map("inc", -666)), + params("update.chain", "nodistrib")); + }); assertEquals(400, ex.code()); assertTrue(ex.getMessage().contains("DistributedUpdateProcessorFactory")); resetExceptionIgnores(); @@ -76,5 +82,4 @@ public void testUpdateNoDistribProcessor() throws Exception { deleteCore(); } } - } diff --git a/solr/core/src/test/org/apache/solr/update/TestExceedMaxTermLength.java b/solr/core/src/test/org/apache/solr/update/TestExceedMaxTermLength.java index 2bc3c259864..e6e210ee0b9 100644 --- a/solr/core/src/test/org/apache/solr/update/TestExceedMaxTermLength.java +++ b/solr/core/src/test/org/apache/solr/update/TestExceedMaxTermLength.java @@ -16,23 +16,21 @@ */ package org.apache.solr.update; +import java.util.Locale; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; - -import java.util.Locale; - import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; public class TestExceedMaxTermLength extends SolrTestCaseJ4 { - public final static String TEST_SOLRCONFIG_NAME = "solrconfig.xml"; - public final static String TEST_SCHEMAXML_NAME = "schema11.xml"; + public static final String TEST_SOLRCONFIG_NAME = "solrconfig.xml"; + public static final String TEST_SCHEMAXML_NAME = "schema11.xml"; - private final static int minTestTermLength = IndexWriter.MAX_TERM_LENGTH + 1; - private final static int maxTestTermLength = IndexWriter.MAX_TERM_LENGTH * 2; + private static final int minTestTermLength = IndexWriter.MAX_TERM_LENGTH + 1; + private static final int maxTestTermLength = IndexWriter.MAX_TERM_LENGTH * 2; @BeforeClass public static void beforeTests() throws Exception { @@ -46,37 +44,37 @@ public void cleanup() throws Exception { } @Test - public void testExceededMaxTermLength(){ + public void testExceededMaxTermLength() { // problematic field final String longFieldName = "cat"; - final String longFieldValue = TestUtil.randomSimpleString(random(), - minTestTermLength, - maxTestTermLength); + final String longFieldValue = + TestUtil.randomSimpleString(random(), minTestTermLength, maxTestTermLength); - final String okayFieldName = TestUtil.randomSimpleString(random(), 1, 50) + "_sS" ; //Dynamic field - final String okayFieldValue = TestUtil.randomSimpleString(random(), - minTestTermLength, - maxTestTermLength); + final String okayFieldName = + TestUtil.randomSimpleString(random(), 1, 50) + "_sS"; // Dynamic field + final String okayFieldValue = + TestUtil.randomSimpleString(random(), minTestTermLength, maxTestTermLength); boolean includeOkayFields = random().nextBoolean(); - if(random().nextBoolean()) { - //Use XML + if (random().nextBoolean()) { + // Use XML String doc; - if(includeOkayFields) { + if (includeOkayFields) { doc = adoc("id", "1", longFieldName, longFieldValue, okayFieldName, okayFieldValue); } else { doc = adoc("id", "1", longFieldName, longFieldValue); } assertFailedU(doc); } else { - //Use JSON + // Use JSON final String jsonStr; - if(includeOkayFields) { + if (includeOkayFields) { String format = "[{'id':'1','%s':'%s', '%s': '%s'}]"; - jsonStr = String.format(Locale.ROOT, format, longFieldName, longFieldValue, - okayFieldName, okayFieldValue); + jsonStr = + String.format( + Locale.ROOT, format, longFieldName, longFieldValue, okayFieldName, okayFieldValue); } else { String format = "[{'id':'1','%s':'%s'}]"; jsonStr = String.format(Locale.ROOT, format, longFieldName, longFieldValue); @@ -92,38 +90,43 @@ public void testExceededMaxTermLength(){ } @Test - public void testExceededMaxTermLengthWithLimitingFilter(){ + public void testExceededMaxTermLengthWithLimitingFilter() { // problematic field final String longFieldName = "cat_length"; - final String longFieldValue = TestUtil.randomSimpleString(random(), - minTestTermLength, - maxTestTermLength); + final String longFieldValue = + TestUtil.randomSimpleString(random(), minTestTermLength, maxTestTermLength); - final String okayFieldName = TestUtil.randomSimpleString(random(), 1, 50) + "_sS" ; //Dynamic field - final String okayFieldValue = TestUtil.randomSimpleString(random(), - minTestTermLength, - maxTestTermLength); + final String okayFieldName = + TestUtil.randomSimpleString(random(), 1, 50) + "_sS"; // Dynamic field + final String okayFieldValue = + TestUtil.randomSimpleString(random(), minTestTermLength, maxTestTermLength); boolean includeOkayFields = random().nextBoolean(); - if(random().nextBoolean()) { - //Use XML + if (random().nextBoolean()) { + // Use XML String doc; - if(includeOkayFields) { + if (includeOkayFields) { doc = adoc("id", "1", longFieldName, longFieldValue, okayFieldName, okayFieldValue); } else { doc = adoc("id", "1", longFieldName, longFieldValue); } assertU(doc); } else { - //Use JSON + // Use JSON String jsonStr = null; try { - if(includeOkayFields) { + if (includeOkayFields) { jsonStr = "[{'id':'1','%s':'%s', '%s': '%s'}]"; - jsonStr = String.format(Locale.ROOT, jsonStr, longFieldName, longFieldValue, - okayFieldName, okayFieldValue); + jsonStr = + String.format( + Locale.ROOT, + jsonStr, + longFieldName, + longFieldValue, + okayFieldName, + okayFieldValue); updateJ(json(jsonStr), null); } else { jsonStr = "[{'id':'1','%s':'%s'}]"; @@ -132,10 +135,9 @@ public void testExceededMaxTermLengthWithLimitingFilter(){ } } catch (Exception e) { fail("Should not have failed adding doc " + jsonStr); - String msg= e.getCause().getMessage(); + String msg = e.getCause().getMessage(); assertTrue(msg.contains("one immense term in field=\"cat\"")); } - } assertU(commit()); diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdateWithRouteField.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdateWithRouteField.java index 41e6aa2e7f0..0c7f8ea5956 100644 --- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdateWithRouteField.java +++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdateWithRouteField.java @@ -18,6 +18,7 @@ package org.apache.solr.update; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.core.StringContains.containsString; import java.io.IOException; import java.nio.file.Path; @@ -28,7 +29,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; - import org.apache.lucene.util.TestUtil; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; @@ -46,14 +46,12 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.StringContains.containsString; - public class TestInPlaceUpdateWithRouteField extends SolrCloudTestCase { private static final int NUMBER_OF_DOCS = 100; private static final String COLLECTION = "collection1"; - private static final String[] shards = new String[]{"shard1","shard2","shard3"}; + private static final String[] shards = new String[] {"shard1", "shard2", "shard3"}; @BeforeClass public static void setupCluster() throws Exception { @@ -61,23 +59,21 @@ public static void setupCluster() throws Exception { String configName = "solrCloudCollectionConfig"; int nodeCount = TestUtil.nextInt(random(), 1, 3); - configureCluster(nodeCount) - - .addConfig(configName, configDir) - .configure(); + configureCluster(nodeCount).addConfig(configName, configDir).configure(); Map collectionProperties = new HashMap<>(); - collectionProperties.put("config", "solrconfig-tlog.xml" ); + collectionProperties.put("config", "solrconfig-tlog.xml"); collectionProperties.put("schema", "schema-inplace-updates.xml"); int replicas = 2; // router field can be defined either for ImplicitDocRouter or CompositeIdRouter boolean implicit = random().nextBoolean(); - String routerName = implicit ? "implicit":"compositeId"; - Create createCmd = CollectionAdminRequest.createCollection(COLLECTION, configName, shards.length, replicas) - .setProperties(collectionProperties) - .setRouterName(routerName) - .setRouterField("shardName"); + String routerName = implicit ? "implicit" : "compositeId"; + Create createCmd = + CollectionAdminRequest.createCollection(COLLECTION, configName, shards.length, replicas) + .setProperties(collectionProperties) + .setRouterName(routerName) + .setRouterField("shardName"); if (implicit) { createCmd.setShards(Arrays.stream(shards).collect(Collectors.joining(","))); } @@ -87,10 +83,9 @@ public static void setupCluster() throws Exception { @Test public void testUpdatingDocValuesWithRouteField() throws Exception { - new UpdateRequest() - .deleteByQuery("*:*").commit(cluster.getSolrClient(), COLLECTION); - - new UpdateRequest().add(createDocs(NUMBER_OF_DOCS)).commit(cluster.getSolrClient(), COLLECTION); + new UpdateRequest().deleteByQuery("*:*").commit(cluster.getSolrClient(), COLLECTION); + + new UpdateRequest().add(createDocs(NUMBER_OF_DOCS)).commit(cluster.getSolrClient(), COLLECTION); int id = TestUtil.nextInt(random(), 1, NUMBER_OF_DOCS - 1); SolrDocument solrDocument = queryDoc(id); @@ -100,47 +95,63 @@ public void testUpdatingDocValuesWithRouteField() throws Exception { Assert.assertThat(solrDocument.get("inplace_updatable_int"), is(id)); int newDocValue = TestUtil.nextInt(random(), 1, 2 * NUMBER_OF_DOCS - 1); - SolrInputDocument sdoc = sdoc("id", ""+id, - // use route field in update command - "shardName", shardName, - "inplace_updatable_int", map("set", newDocValue)); - - UpdateRequest updateRequest = new UpdateRequest() - .add(sdoc); - - // since this atomic update will be done in place, it shouldn't matter if we specify this param, or what it's value is + SolrInputDocument sdoc = + sdoc( + "id", + "" + id, + // use route field in update command + "shardName", + shardName, + "inplace_updatable_int", + map("set", newDocValue)); + + UpdateRequest updateRequest = new UpdateRequest().add(sdoc); + + // since this atomic update will be done in place, it shouldn't matter if we specify this param, + // or what it's value is if (random().nextBoolean()) { - updateRequest.setParam(UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, Boolean.toString(random().nextBoolean())); + updateRequest.setParam( + UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, + Boolean.toString(random().nextBoolean())); } updateRequest.commit(cluster.getSolrClient(), COLLECTION); solrDocument = queryDoc(id); Long newVersion = (Long) solrDocument.get("_version_"); - Assert.assertTrue("Version of updated document must be greater than original one", + Assert.assertTrue( + "Version of updated document must be greater than original one", newVersion > initialVersion); - Assert.assertThat( "Doc value must be updated", solrDocument.get("inplace_updatable_int"), is(newDocValue)); - Assert.assertThat("Lucene doc id should not be changed for In-Place Updates.", solrDocument.get("[docid]"), is(luceneDocId)); + Assert.assertThat( + "Doc value must be updated", solrDocument.get("inplace_updatable_int"), is(newDocValue)); + Assert.assertThat( + "Lucene doc id should not be changed for In-Place Updates.", + solrDocument.get("[docid]"), + is(luceneDocId)); sdoc.remove("shardName"); checkWrongCommandFailure(sdoc); - sdoc.addField("shardName", map("set", "newShardName")); + sdoc.addField("shardName", map("set", "newShardName")); checkWrongCommandFailure(sdoc); sdoc.setField("shardName", shardName); - + // if we now attempt an atomic update that we know can't be done in-place, this should fail... sdoc.addField("title_s", map("set", "this is a string that can't be updated in place")); - final SolrException e = expectThrows(SolrException.class, () -> { - final UpdateRequest r = new UpdateRequest(); - r.add(sdoc); - r.setParam(UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, "true"); - r.process(cluster.getSolrClient(), COLLECTION); - }); + final SolrException e = + expectThrows( + SolrException.class, + () -> { + final UpdateRequest r = new UpdateRequest(); + r.add(sdoc); + r.setParam(UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, "true"); + r.process(cluster.getSolrClient(), COLLECTION); + }); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); assertThat(e.getMessage(), containsString("Unable to update doc in-place: " + id)); } - private void checkWrongCommandFailure(SolrInputDocument sdoc) throws SolrServerException, IOException { + private void checkWrongCommandFailure(SolrInputDocument sdoc) + throws SolrServerException, IOException { try { new UpdateRequest().add(sdoc).process(cluster.getSolrClient(), COLLECTION); fail("expect an exception for wrong update command"); @@ -153,18 +164,21 @@ private Collection createDocs(int number) { List result = new ArrayList<>(); for (int i = 0; i < number; i++) { String randomShard = shards[random().nextInt(shards.length)]; - result.add(sdoc("id", String.valueOf(i), - "shardName", randomShard, - "inplace_updatable_int", i)); + result.add( + sdoc("id", String.valueOf(i), "shardName", randomShard, "inplace_updatable_int", i)); } return result; } private SolrDocument queryDoc(int id) throws SolrServerException, IOException { - SolrQuery query = new SolrQuery( - "q", "id:" + id, - "fl", "_version_,inplace_updatable_int,[docid],shardName", - "targetCollection", COLLECTION); + SolrQuery query = + new SolrQuery( + "q", + "id:" + id, + "fl", + "_version_,inplace_updatable_int,[docid],shardName", + "targetCollection", + COLLECTION); QueryResponse response = cluster.getSolrClient().query(COLLECTION, query); SolrDocumentList result = (SolrDocumentList) response.getResponse().get("response"); Assert.assertThat(result.getNumFound(), is(1L)); diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java index 74748b7a5b2..ab89d43a16c 100644 --- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java +++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java @@ -17,6 +17,8 @@ package org.apache.solr.update; +import static org.hamcrest.core.StringContains.containsString; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; @@ -30,7 +32,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.util.LuceneTestCase.Slow; @@ -70,11 +71,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.hamcrest.core.StringContains.containsString; - -/** - * Tests the in-place updates (docValues updates) for a one shard, three replica cluster. - */ +/** Tests the in-place updates (docValues updates) for a one shard, three replica cluster. */ @Slow public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -90,19 +87,20 @@ public static void beforeSuperClass() throws Exception { systemSetPropertySolrTestsMergePolicyFactory(NoMergePolicyFactory.class.getName()); initCore(configString, schemaString); - + // sanity check that autocommits are disabled assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoCommmitMaxTime); assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxTime); assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoCommmitMaxDocs); assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxDocs); - + // assert that NoMergePolicy was chosen RefCounted iw = h.getCore().getSolrCoreState().getIndexWriter(h.getCore()); try { IndexWriter writer = iw.get(); - assertTrue("Actual merge policy is: " + writer.getConfig().getMergePolicy(), - writer.getConfig().getMergePolicy() instanceof NoMergePolicy); + assertTrue( + "Actual merge policy is: " + writer.getConfig().getMergePolicy(), + writer.getConfig().getMergePolicy() instanceof NoMergePolicy); } finally { iw.decref(); } @@ -110,7 +108,9 @@ public static void beforeSuperClass() throws Exception { @Override protected boolean useTlogReplicas() { - return false; // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's TestInjection use + // TODO: tlog replicas makes commits take way to long due to what is likely a bug and it's + // TestInjection use + return false; } public TestInPlaceUpdatesDistrib() throws Exception { @@ -121,44 +121,62 @@ public TestInPlaceUpdatesDistrib() throws Exception { private SolrClient LEADER = null; private List NONLEADERS = null; - + @Test @ShardsFixed(num = 3) - //28-June-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018 - // commented 4-Sep-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018 - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // annotated on: 24-Dec-2018 public void test() throws Exception { waitForRecoveriesToFinish(true); resetDelays(); - + mapReplicasToClients(); - + clearIndex(); commit(); - + // sanity check no one broke the assumptions we make about our schema - checkExpectedSchemaField(map("name", "inplace_updatable_int", - "type","int", - "stored",Boolean.FALSE, - "indexed",Boolean.FALSE, - "docValues",Boolean.TRUE)); - checkExpectedSchemaField(map("name", "inplace_updatable_float", - "type","float", - "stored",Boolean.FALSE, - "indexed",Boolean.FALSE, - "docValues",Boolean.TRUE)); - checkExpectedSchemaField(map("name", "_version_", - "type","long", - "stored",Boolean.FALSE, - "indexed",Boolean.FALSE, - "docValues",Boolean.TRUE)); + checkExpectedSchemaField( + map( + "name", + "inplace_updatable_int", + "type", + "int", + "stored", + Boolean.FALSE, + "indexed", + Boolean.FALSE, + "docValues", + Boolean.TRUE)); + checkExpectedSchemaField( + map( + "name", + "inplace_updatable_float", + "type", + "float", + "stored", + Boolean.FALSE, + "indexed", + Boolean.FALSE, + "docValues", + Boolean.TRUE)); + checkExpectedSchemaField( + map( + "name", + "_version_", + "type", + "long", + "stored", + Boolean.FALSE, + "indexed", + Boolean.FALSE, + "docValues", + Boolean.TRUE)); // Do the tests now: - + // AwaitsFix this test fails easily // delayedReorderingFetchesMissingUpdateFromLeaderTest(); - + resetDelays(); docValuesUpdateTest(); resetDelays(); @@ -170,7 +188,8 @@ public void test() throws Exception { resetDelays(); updateExistingThenNonExistentDoc(); resetDelays(); - // TODO Should we combine all/some of these into a single test, so as to cut down on execution time? + // TODO Should we combine all/some of these into a single test, so as to cut down on execution + // time? reorderedDBQIndividualReplicaTest(); resetDelays(); reorderedDeletesTest(); @@ -181,17 +200,17 @@ public void test() throws Exception { resetDelays(); setNullForDVEnabledField(); resetDelays(); - + // AwaitsFix this test fails easily // reorderedDBQsUsingUpdatedValueFromADroppedUpdate(); } private void resetDelays() { - for (JettySolrRunner j : jettys ) { + for (JettySolrRunner j : jettys) { j.getDebugFilter().unsetDelay(); } } - + private void mapReplicasToClients() throws KeeperException, InterruptedException { ZkStateReader zkStateReader = cloudClient.getZkStateReader(); cloudClient.getZkStateReader().forceUpdateCollection(DEFAULT_COLLECTION); @@ -201,23 +220,23 @@ private void mapReplicasToClients() throws KeeperException, InterruptedException leader = shard1.getLeader(); String leaderBaseUrl = zkStateReader.getBaseUrlForNodeName(leader.getNodeName()); - for (int i=0; i(); - for (Replica rep: shard1.getReplicas()) { + for (Replica rep : shard1.getReplicas()) { if (rep.equals(leader)) { continue; } String baseUrl = zkStateReader.getBaseUrlForNodeName(rep.getNodeName()); - for (int i=0; i updates = new ArrayList<>(); - updates.add(simulatedUpdateRequest(null, "id", 0, "title_s", "title0_new", "inplace_updatable_float", newinplace_updatable_float, "_version_", version0 + 1)); // full update - updates.add(simulatedUpdateRequest(version0 + 1, "id", 0, "inplace_updatable_float", newinplace_updatable_float + 1, "_version_", version0 + 2)); // inplace_updatable_float=101 + updates.add( + simulatedUpdateRequest( + null, + "id", + 0, + "title_s", + "title0_new", + "inplace_updatable_float", + newinplace_updatable_float, + "_version_", + version0 + 1)); // full update + updates.add( + simulatedUpdateRequest( + version0 + 1, + "id", + 0, + "inplace_updatable_float", + newinplace_updatable_float + 1, + "_version_", + version0 + 2)); // inplace_updatable_float=101 updates.add(simulatedDeleteRequest(0, version0 + 3)); // order the updates correctly for NONLEADER 1 @@ -298,26 +335,31 @@ private void reorderedDBQsSimpleTest() throws Exception { } // Reordering needs to happen using parallel threads - ExecutorService threadpool = - ExecutorUtil.newMDCAwareFixedThreadPool(updates.size() + 1, new SolrNamedThreadFactory(getTestName())); + ExecutorService threadpool = + ExecutorUtil.newMDCAwareFixedThreadPool( + updates.size() + 1, new SolrNamedThreadFactory(getTestName())); // re-order the updates for NONLEADER 0 List reorderedUpdates = new ArrayList<>(updates); Collections.shuffle(reorderedUpdates, random()); List> updateResponses = new ArrayList<>(); for (UpdateRequest update : reorderedUpdates) { - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); updateResponses.add(threadpool.submit(task)); - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted Thread.sleep(10); } - + threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 15 secs", threadpool.awaitTermination(15, TimeUnit.SECONDS)); - + assertTrue( + "Thread pool didn't terminate within 15 secs", + threadpool.awaitTermination(15, TimeUnit.SECONDS)); + // assert all requests were successful - for (Future resp: updateResponses) { + for (Future resp : updateResponses) { assertEquals(0, resp.get().getStatus()); } @@ -342,15 +384,34 @@ private void reorderedDBQIndividualReplicaTest() throws Exception { float newinplace_updatable_float = 100; long version0 = 2000; List updates = new ArrayList<>(); - updates.add(simulatedUpdateRequest(null, "id", 0, "title_s", "title0_new", "inplace_updatable_float", - newinplace_updatable_float, "_version_", version0 + 1)); // full update - updates.add(simulatedUpdateRequest(version0 + 1, "id", 0, "inplace_updatable_float", - newinplace_updatable_float + 1, "_version_", version0 + 2)); // inplace_updatable_float=101 - updates.add(simulatedDeleteRequest("inplace_updatable_float:"+(newinplace_updatable_float + 1), version0 + 3)); + updates.add( + simulatedUpdateRequest( + null, + "id", + 0, + "title_s", + "title0_new", + "inplace_updatable_float", + newinplace_updatable_float, + "_version_", + version0 + 1)); // full update + updates.add( + simulatedUpdateRequest( + version0 + 1, + "id", + 0, + "inplace_updatable_float", + newinplace_updatable_float + 1, + "_version_", + version0 + 2)); // inplace_updatable_float=101 + updates.add( + simulatedDeleteRequest( + "inplace_updatable_float:" + (newinplace_updatable_float + 1), version0 + 3)); // Reordering needs to happen using parallel threads ExecutorService threadpool = - ExecutorUtil.newMDCAwareFixedThreadPool(updates.size() + 1, new SolrNamedThreadFactory(getTestName())); + ExecutorUtil.newMDCAwareFixedThreadPool( + updates.size() + 1, new SolrNamedThreadFactory(getTestName())); // re-order the updates by swapping the last two List reorderedUpdates = new ArrayList<>(updates); @@ -359,18 +420,22 @@ private void reorderedDBQIndividualReplicaTest() throws Exception { List> updateResponses = new ArrayList<>(); for (UpdateRequest update : reorderedUpdates) { - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); updateResponses.add(threadpool.submit(task)); - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted Thread.sleep(100); } threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 15 secs", threadpool.awaitTermination(15, TimeUnit.SECONDS)); + assertTrue( + "Thread pool didn't terminate within 15 secs", + threadpool.awaitTermination(15, TimeUnit.SECONDS)); // assert all requests were successful - for (Future resp: updateResponses) { + for (Future resp : updateResponses) { assertEquals(0, resp.get().getStatus()); } @@ -391,12 +456,21 @@ private void docValuesUpdateTest() throws Exception { for (int id = 0; id < numDocs; id++) { ids.add(id); } - + buildRandomIndex(101.0F, ids); - + List luceneDocids = new ArrayList<>(numDocs); List valuesList = new ArrayList<>(numDocs); - SolrParams params = params("q", "id:[0 TO *]", "fl", "*,[docid]", "rows", String.valueOf(numDocs), "sort", "id_i asc"); + SolrParams params = + params( + "q", + "id:[0 TO *]", + "fl", + "*,[docid]", + "rows", + String.valueOf(numDocs), + "sort", + "id_i asc"); SolrDocumentList results = LEADER.query(params).getResults(); assertEquals(numDocs, results.size()); for (SolrDocument doc : results) { @@ -404,9 +478,10 @@ private void docValuesUpdateTest() throws Exception { valuesList.add((Float) doc.get("inplace_updatable_float")); } log.info("Initial results: {}", results); - + // before we do any atomic operations, sanity check our results against all clients - assertDocIdsAndValuesAgainstAllClients("sanitycheck", params, luceneDocids, "inplace_updatable_float", valuesList); + assertDocIdsAndValuesAgainstAllClients( + "sanitycheck", params, luceneDocids, "inplace_updatable_float", valuesList); // now we're going to overwrite the value for all of our testing docs // giving them a value between -5 and +5 @@ -419,7 +494,7 @@ private void docValuesUpdateTest() throws Exception { valuesList.set(id, value); } log.info("inplace_updatable_float: {}", valuesList); - + // update doc w/ set Collections.shuffle(ids, r); // so updates aren't applied in index order for (int id : ids) { @@ -428,13 +503,18 @@ private void docValuesUpdateTest() throws Exception { commit(); - assertDocIdsAndValuesAgainstAllClients - ("set", SolrParams.wrapDefaults(params("q", "inplace_updatable_float:[-5.0 TO 5.0]", - "fq", "id:[0 TO *]"), - // existing sort & fl that we want... - params), - luceneDocids, "inplace_updatable_float", valuesList); - + assertDocIdsAndValuesAgainstAllClients( + "set", + SolrParams.wrapDefaults( + params( + "q", "inplace_updatable_float:[-5.0 TO 5.0]", + "fq", "id:[0 TO *]"), + // existing sort & fl that we want... + params), + luceneDocids, + "inplace_updatable_float", + valuesList); + // update doc, w/increment log.info("Updating the documents..."); Collections.shuffle(ids, r); // so updates aren't applied in the same order as our 'set' @@ -442,43 +522,48 @@ private void docValuesUpdateTest() throws Exception { // all incremements will use some value X such that 20 < abs(X) // thus ensuring that after all incrememnts are done, there should be // 0 test docs matching the query inplace_updatable_float:[-10 TO 10] - final float inc = (r.nextBoolean() ? -1.0F : 1.0F) * (r.nextFloat() + (float)atLeast(20)); + final float inc = (r.nextBoolean() ? -1.0F : 1.0F) * (r.nextFloat() + (float) atLeast(20)); assert 20 < Math.abs(inc); - final float value = (float)valuesList.get(id) + inc; + final float value = (float) valuesList.get(id) + inc; assert value < -10 || 10 < value; - + valuesList.set(id, value); index("id", id, "inplace_updatable_float", map("inc", inc)); } commit(); - - assertDocIdsAndValuesAgainstAllClients - ("inc", SolrParams.wrapDefaults(params("q", "-inplace_updatable_float:[-10.0 TO 10.0]", - "fq", "id:[0 TO *]"), - // existing sort & fl that we want... - params), - luceneDocids, "inplace_updatable_float", valuesList); + + assertDocIdsAndValuesAgainstAllClients( + "inc", + SolrParams.wrapDefaults( + params( + "q", "-inplace_updatable_float:[-10.0 TO 10.0]", + "fq", "id:[0 TO *]"), + // existing sort & fl that we want... + params), + luceneDocids, + "inplace_updatable_float", + valuesList); log.info("Updating the documents with new field..."); Collections.shuffle(ids, r); for (int id : ids) { final int val = random().nextInt(20); valuesList.set(id, val); - index("id", id, "inplace_updatable_int", map((random().nextBoolean()?"inc": "set"), val)); + index("id", id, "inplace_updatable_int", map((random().nextBoolean() ? "inc" : "set"), val)); } commit(); - assertDocIdsAndValuesAgainstAllClients - ("inplace_for_first_field_update", SolrParams.wrapDefaults(params("q", "inplace_updatable_int:[* TO *]", - "fq", "id:[0 TO *]"), - params), - luceneDocids, "inplace_updatable_int", valuesList); + assertDocIdsAndValuesAgainstAllClients( + "inplace_for_first_field_update", + SolrParams.wrapDefaults( + params("q", "inplace_updatable_int:[* TO *]", "fq", "id:[0 TO *]"), params), + luceneDocids, + "inplace_updatable_int", + valuesList); log.info("docValuesUpdateTest: This test passed fine..."); } - /** - * Ingest many documents, keep committing. Then update a document from a very old segment. - */ + /** Ingest many documents, keep committing. Then update a document from a very old segment. */ private void updatingDVsInAVeryOldSegment() throws Exception { clearIndex(); commit(); @@ -487,18 +572,18 @@ private void updatingDVsInAVeryOldSegment() throws Exception { index("id", id, "inplace_updatable_float", "1", "title_s", "newtitle"); // create 10 more segments - for (int i=0; i<10; i++) { + for (int i = 0; i < 10; i++) { buildRandomIndex(101.0F, Collections.emptyList()); } index("id", id, "inplace_updatable_float", map("inc", "1")); - for (SolrClient client: new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { + for (SolrClient client : new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { assertEquals("newtitle", client.getById(id).get("title_s")); assertEquals(2.0f, client.getById(id).get("inplace_updatable_float")); } commit(); - for (SolrClient client: new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { + for (SolrClient client : new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { assertEquals("newtitle", client.getById(id).get("title_s")); assertEquals(2.0f, client.getById(id).get("inplace_updatable_float")); } @@ -506,16 +591,16 @@ private void updatingDVsInAVeryOldSegment() throws Exception { log.info("updatingDVsInAVeryOldSegment: This test passed fine..."); } - /** * Test scenario: + * *
    - *
  • Send a batch of documents to one node
  • - *
  • Batch consist of an update for document which is existed and an update for documents which is not existed
  • - *
  • Assumption which is made is that both updates will be applied: field for existed document will be updated, - * new document will be created for a non existed one
  • + *
  • Send a batch of documents to one node + *
  • Batch consist of an update for document which is existed and an update for documents + * which is not existed + *
  • Assumption which is made is that both updates will be applied: field for existed document + * will be updated, new document will be created for a non existed one *
- * */ private void updateExistingThenNonExistentDoc() throws Exception { clearIndex(); @@ -528,88 +613,113 @@ private void updateExistingThenNonExistentDoc() throws Exception { SolrInputDocument nonexistentDocUpdate = new SolrInputDocument(); nonexistentDocUpdate.setField("id", 2); nonexistentDocUpdate.setField("inplace_updatable_float", map("set", "50")); - + SolrInputDocument docs[] = new SolrInputDocument[] {existingDocUpdate, nonexistentDocUpdate}; SolrClient solrClient = clients.get(random().nextInt(clients.size())); add(solrClient, null, docs); commit(); - for (SolrClient client: new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { + for (SolrClient client : new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { for (SolrInputDocument expectDoc : docs) { String docId = expectDoc.getFieldValue("id").toString(); SolrDocument actualDoc = client.getById(docId); assertNotNull("expected to get doc by id:" + docId, actualDoc); - assertEquals("expected to update "+actualDoc, - 50.0f, actualDoc.get("inplace_updatable_float")); + assertEquals( + "expected to update " + actualDoc, 50.0f, actualDoc.get("inplace_updatable_float")); } } } /** - * Retries the specified 'req' against each SolrClient in "clients" until the expected number of - * results are returned, at which point the results are verified using assertDocIdsAndValuesInResults + * Retries the specified 'req' against each SolrClient in "clients" until the expected number of + * results are returned, at which point the results are verified using + * assertDocIdsAndValuesInResults * * @param debug used in log and assertion messages - * @param req the query to execut, should include rows & sort params such that the results can be compared to luceneDocids and valuesList - * @param luceneDocids a list of "[docid]" values to be tested against each doc in the req results (in order) + * @param req the query to execut, should include rows & sort params such that the results can + * be compared to luceneDocids and valuesList + * @param luceneDocids a list of "[docid]" values to be tested against each doc in the req results + * (in order) * @param fieldName used to get value from the doc to validate with valuesList - * @param valuesList a list of given fieldName values to be tested against each doc in results (in order) + * @param valuesList a list of given fieldName values to be tested against each doc in results (in + * order) */ - private void assertDocIdsAndValuesAgainstAllClients(final String debug, - final SolrParams req, - final List luceneDocids, - final String fieldName, - final List valuesList) throws Exception { + private void assertDocIdsAndValuesAgainstAllClients( + final String debug, + final SolrParams req, + final List luceneDocids, + final String fieldName, + final List valuesList) + throws Exception { assert luceneDocids.size() == valuesList.size(); final long numFoundExpected = luceneDocids.size(); - - CLIENT: for (SolrClient client : clients) { - final String clientDebug = client.toString() + (LEADER.equals(client) ? " (leader)" : " (not leader)"); - final String msg = "'"+debug+"' results against client: " + clientDebug; + + CLIENT: + for (SolrClient client : clients) { + final String clientDebug = + client.toString() + (LEADER.equals(client) ? " (leader)" : " (not leader)"); + final String msg = "'" + debug + "' results against client: " + clientDebug; SolrDocumentList results = null; // For each client, do a (sorted) sanity check query to confirm searcher has been re-opened // after our update -- if the numFound matches our expectations, then verify the inplace float // value and [docid] of each result doc against our expecations to ensure that the values were // updated properly w/o the doc being completley re-added internally. (ie: truly inplace) - RETRY: for (int attempt = 0; attempt <= NUM_RETRIES; attempt++) { + RETRY: + for (int attempt = 0; attempt <= NUM_RETRIES; attempt++) { log.info("Attempt #{} checking {}", attempt, msg); results = client.query(req).getResults(); if (numFoundExpected == results.getNumFound()) { break RETRY; } if (attempt == NUM_RETRIES) { - fail("Repeated retry for "+msg+"; Never got numFound="+numFoundExpected+"; results=> "+results); + fail( + "Repeated retry for " + + msg + + "; Never got numFound=" + + numFoundExpected + + "; results=> " + + results); } - log.info("numFound missmatch, searcher may not have re-opened yet. Will sleep an retry..."); - Thread.sleep(WAIT_TIME); + log.info( + "numFound missmatch, searcher may not have re-opened yet. Will sleep an retry..."); + Thread.sleep(WAIT_TIME); } - + assertDocIdsAndValuesInResults(msg, results, luceneDocids, fieldName, valuesList); } } - + /** - * Given a result list sorted by "id", asserts that the "[docid] and "inplace_updatable_float" values - * for each document match in order. + * Given a result list sorted by "id", asserts that the "[docid] and "inplace_updatable_float" + * values for each document match in order. * * @param msgPre used as a prefix for assertion messages - * @param results the sorted results of some query, such that all matches are included (ie: rows = numFound) - * @param luceneDocids a list of "[docid]" values to be tested against each doc in results (in order) + * @param results the sorted results of some query, such that all matches are included (ie: rows = + * numFound) + * @param luceneDocids a list of "[docid]" values to be tested against each doc in results (in + * order) * @param fieldName used to get value from the doc to validate with valuesList - * @param valuesList a list of given fieldName values to be tested against each doc in results (in order) + * @param valuesList a list of given fieldName values to be tested against each doc in results (in + * order) */ - private void assertDocIdsAndValuesInResults(final String msgPre, - final SolrDocumentList results, - final List luceneDocids, - final String fieldName, - final List valuesList) { + private void assertDocIdsAndValuesInResults( + final String msgPre, + final SolrDocumentList results, + final List luceneDocids, + final String fieldName, + final List valuesList) { assert luceneDocids.size() == valuesList.size(); - assertEquals(msgPre + ": rows param wasn't big enough, we need to compare all results matching the query", - results.getNumFound(), results.size()); - assertEquals(msgPre + ": didn't get a result for every known docid", - luceneDocids.size(), results.size()); - + assertEquals( + msgPre + + ": rows param wasn't big enough, we need to compare all results matching the query", + results.getNumFound(), + results.size()); + assertEquals( + msgPre + ": didn't get a result for every known docid", + luceneDocids.size(), + results.size()); + for (SolrDocument doc : results) { final int id = Integer.parseInt(doc.get("id").toString()); final Object val = doc.get(fieldName); @@ -618,8 +728,7 @@ private void assertDocIdsAndValuesInResults(final String msgPre, assertEquals(msgPre + " wrong [docid] for " + doc.toString(), luceneDocids.get(id), docid); } } - - + private void ensureRtgWorksWithPartialUpdatesTest() throws Exception { clearIndex(); commit(); @@ -631,35 +740,48 @@ private void ensureRtgWorksWithPartialUpdatesTest() throws Exception { currentVersion = buildRandomIndex(100).get(0); assertTrue(currentVersion > version); - // do an initial (non-inplace) update to ensure both the float & int fields we care about have (any) value - // that way all subsequent atomic updates will be inplace - currentVersion = addDocAndGetVersion("id", 100, - "inplace_updatable_float", map("set", r.nextFloat()), - "inplace_updatable_int", map("set", r.nextInt())); + // do an initial (non-inplace) update to ensure both the float & int fields we care about have + // (any) value that way all subsequent atomic updates will be inplace + currentVersion = + addDocAndGetVersion( + "id", 100, + "inplace_updatable_float", map("set", r.nextFloat()), + "inplace_updatable_int", map("set", r.nextInt())); LEADER.commit(); - + // get the internal docids of id=100 document from the three replicas List docids = getInternalDocIds("100"); // update doc, set - currentVersion = addDocAndGetVersion("id", 100, "inplace_updatable_float", map("set", inplace_updatable_float)); + currentVersion = + addDocAndGetVersion( + "id", 100, "inplace_updatable_float", map("set", inplace_updatable_float)); assertTrue(currentVersion > version); version = currentVersion; LEADER.commit(); - assertTrue("Earlier: "+docids+", now: "+getInternalDocIds("100"), docids.equals(getInternalDocIds("100"))); - - SolrDocument sdoc = LEADER.getById("100"); // RTG straight from the index + assertTrue( + "Earlier: " + docids + ", now: " + getInternalDocIds("100"), + docids.equals(getInternalDocIds("100"))); + + SolrDocument sdoc = LEADER.getById("100"); // RTG straight from the index assertEquals(sdoc.toString(), inplace_updatable_float, sdoc.get("inplace_updatable_float")); assertEquals(sdoc.toString(), title, sdoc.get("title_s")); assertEquals(sdoc.toString(), version, sdoc.get("_version_")); - if(r.nextBoolean()) { + if (r.nextBoolean()) { title = "newtitle100"; - currentVersion = addDocAndGetVersion("id", 100, "title_s", title, "inplace_updatable_float", inplace_updatable_float); // full indexing + currentVersion = + addDocAndGetVersion( + "id", + 100, + "title_s", + title, + "inplace_updatable_float", + inplace_updatable_float); // full indexing assertTrue(currentVersion > version); version = currentVersion; - sdoc = LEADER.getById("100"); // RTG from the tlog + sdoc = LEADER.getById("100"); // RTG from the tlog assertEquals(sdoc.toString(), inplace_updatable_float, sdoc.get("inplace_updatable_float")); assertEquals(sdoc.toString(), title, sdoc.get("title_s")); assertEquals(sdoc.toString(), version, sdoc.get("_version_")); @@ -674,8 +796,10 @@ private void ensureRtgWorksWithPartialUpdatesTest() throws Exception { assertTrue(currentVersion > version); version = currentVersion; LEADER.commit(); - assertTrue("Earlier: "+docids+", now: "+getInternalDocIds("100"), docids.equals(getInternalDocIds("100"))); - + assertTrue( + "Earlier: " + docids + ", now: " + getInternalDocIds("100"), + docids.equals(getInternalDocIds("100"))); + currentVersion = addDocAndGetVersion("id", 100, "inplace_updatable_int", map("set", "100")); assertTrue(currentVersion > version); version = currentVersion; @@ -686,38 +810,51 @@ private void ensureRtgWorksWithPartialUpdatesTest() throws Exception { version = currentVersion; // set operation with invalid value for field - SolrException e = expectThrows(SolrException.class, - () -> addDocAndGetVersion( "id", 100, "inplace_updatable_float", map("set", "NOT_NUMBER"))); + SolrException e = + expectThrows( + SolrException.class, + () -> + addDocAndGetVersion( + "id", 100, "inplace_updatable_float", map("set", "NOT_NUMBER"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); MatcherAssert.assertThat(e.getMessage(), containsString("For input string: \"NOT_NUMBER\"")); // inc operation with invalid inc value - e = expectThrows(SolrException.class, - () -> addDocAndGetVersion( "id", 100, "inplace_updatable_int", map("inc", "NOT_NUMBER"))); + e = + expectThrows( + SolrException.class, + () -> + addDocAndGetVersion("id", 100, "inplace_updatable_int", map("inc", "NOT_NUMBER"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); MatcherAssert.assertThat(e.getMessage(), containsString("For input string: \"NOT_NUMBER\"")); // RTG from tlog(s) for (SolrClient client : clients) { - final String clientDebug = client.toString() + (LEADER.equals(client) ? " (leader)" : " (not leader)"); + final String clientDebug = + client.toString() + (LEADER.equals(client) ? " (leader)" : " (not leader)"); sdoc = client.getById("100", params("distrib", "false")); - assertEquals(clientDebug + " => "+ sdoc, 100, sdoc.get("inplace_updatable_int")); - assertEquals(clientDebug + " => "+ sdoc, inplace_updatable_float, sdoc.get("inplace_updatable_float")); - assertEquals(clientDebug + " => "+ sdoc, title, sdoc.get("title_s")); - assertEquals(clientDebug + " => "+ sdoc, version, sdoc.get("_version_")); + assertEquals(clientDebug + " => " + sdoc, 100, sdoc.get("inplace_updatable_int")); + assertEquals( + clientDebug + " => " + sdoc, + inplace_updatable_float, + sdoc.get("inplace_updatable_float")); + assertEquals(clientDebug + " => " + sdoc, title, sdoc.get("title_s")); + assertEquals(clientDebug + " => " + sdoc, version, sdoc.get("_version_")); } - + // assert that the internal docid for id=100 document remains same, in each replica, as before LEADER.commit(); // can't get (real) [docid] from the tlogs, need to force a commit - assertTrue("Earlier: "+docids+", now: "+getInternalDocIds("100"), docids.equals(getInternalDocIds("100"))); + assertTrue( + "Earlier: " + docids + ", now: " + getInternalDocIds("100"), + docids.equals(getInternalDocIds("100"))); log.info("ensureRtgWorksWithPartialUpdatesTest: This test passed fine..."); } /** - * Returns the "[docid]" value(s) returned from a non-distrib RTG to each of the clients used - * in this test (in the same order as the clients list) + * Returns the "[docid]" value(s) returned from a non-distrib RTG to each of the clients used in + * this test (in the same order as the clients list) */ private List getInternalDocIds(String id) throws SolrServerException, IOException { List ret = new ArrayList<>(clients.size()); @@ -743,7 +880,7 @@ private void outOfOrderUpdatesIndividualReplicaTest() throws Exception { index("id", 0, "inplace_updatable_float", map("set", inplace_updatable_float)); LEADER.commit(); - SolrDocument sdoc = LEADER.getById("0"); // RTG straight from the index + SolrDocument sdoc = LEADER.getById("0"); // RTG straight from the index assertEquals(inplace_updatable_float, sdoc.get("inplace_updatable_float")); assertEquals("title0", sdoc.get("title_s")); long version0 = (long) sdoc.get("_version_"); @@ -751,9 +888,27 @@ private void outOfOrderUpdatesIndividualReplicaTest() throws Exception { // put replica out of sync float newinplace_updatable_float = 100; List updates = new ArrayList<>(); - updates.add(simulatedUpdateRequest(null, "id", 0, "title_s", "title0_new", "inplace_updatable_float", newinplace_updatable_float, "_version_", version0 + 1)); // full update - for (int i=1; i reorderedUpdates = new ArrayList<>(updates); Collections.shuffle(reorderedUpdates, r); List> updateResponses = new ArrayList<>(); for (UpdateRequest update : reorderedUpdates) { - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); updateResponses.add(threadpool.submit(task)); - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted Thread.sleep(10); } - + threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 15 secs", threadpool.awaitTermination(15, TimeUnit.SECONDS)); + assertTrue( + "Thread pool didn't terminate within 15 secs", + threadpool.awaitTermination(15, TimeUnit.SECONDS)); // assert all requests were successful - for (Future resp: updateResponses) { + for (Future resp : updateResponses) { assertEquals(0, resp.get().getStatus()); } @@ -794,19 +954,28 @@ private void outOfOrderUpdatesIndividualReplicaTest() throws Exception { if (log.isInfoEnabled()) { log.info("Testing client: {}", ((HttpSolrClient) client).getBaseURL()); } - assertReplicaValue(client, 0, "inplace_updatable_float", (newinplace_updatable_float + (float)(updates.size() - 1)), - "inplace_updatable_float didn't match for replica at client: " + ((HttpSolrClient)client).getBaseURL()); - assertReplicaValue(client, 0, "title_s", "title0_new", - "Title didn't match for replica at client: " + ((HttpSolrClient)client).getBaseURL()); + assertReplicaValue( + client, + 0, + "inplace_updatable_float", + (newinplace_updatable_float + (float) (updates.size() - 1)), + "inplace_updatable_float didn't match for replica at client: " + + ((HttpSolrClient) client).getBaseURL()); + assertReplicaValue( + client, + 0, + "title_s", + "title0_new", + "Title didn't match for replica at client: " + ((HttpSolrClient) client).getBaseURL()); assertEquals(version0 + updates.size(), getReplicaValue(client, 0, "_version_")); } log.info("outOfOrderUpdatesIndividualReplicaTest: This test passed fine..."); } - + // The following should work: full update to doc 0, in-place update for doc 0, delete doc 0 private void reorderedDeletesTest() throws Exception { - + clearIndex(); commit(); @@ -817,7 +986,7 @@ private void reorderedDeletesTest() throws Exception { index("id", 0, "inplace_updatable_float", map("set", inplace_updatable_float)); LEADER.commit(); - SolrDocument sdoc = LEADER.getById("0"); // RTG straight from the index + SolrDocument sdoc = LEADER.getById("0"); // RTG straight from the index assertEquals(inplace_updatable_float, sdoc.get("inplace_updatable_float")); assertEquals("title0", sdoc.get("title_s")); long version0 = (long) sdoc.get("_version_"); @@ -825,8 +994,26 @@ private void reorderedDeletesTest() throws Exception { // put replica out of sync float newinplace_updatable_float = 100; List updates = new ArrayList<>(); - updates.add(simulatedUpdateRequest(null, "id", 0, "title_s", "title0_new", "inplace_updatable_float", newinplace_updatable_float, "_version_", version0 + 1)); // full update - updates.add(simulatedUpdateRequest(version0 + 1, "id", 0, "inplace_updatable_float", newinplace_updatable_float + 1, "_version_", version0 + 2)); // inplace_updatable_float=101 + updates.add( + simulatedUpdateRequest( + null, + "id", + 0, + "title_s", + "title0_new", + "inplace_updatable_float", + newinplace_updatable_float, + "_version_", + version0 + 1)); // full update + updates.add( + simulatedUpdateRequest( + version0 + 1, + "id", + 0, + "inplace_updatable_float", + newinplace_updatable_float + 1, + "_version_", + version0 + 2)); // inplace_updatable_float=101 updates.add(simulatedDeleteRequest(0, version0 + 3)); // order the updates correctly for NONLEADER 1 @@ -838,26 +1025,31 @@ private void reorderedDeletesTest() throws Exception { } // Reordering needs to happen using parallel threads - ExecutorService threadpool = - ExecutorUtil.newMDCAwareFixedThreadPool(updates.size() + 1, new SolrNamedThreadFactory(getTestName())); + ExecutorService threadpool = + ExecutorUtil.newMDCAwareFixedThreadPool( + updates.size() + 1, new SolrNamedThreadFactory(getTestName())); // re-order the updates for NONLEADER 0 List reorderedUpdates = new ArrayList<>(updates); Collections.shuffle(reorderedUpdates, r); List> updateResponses = new ArrayList<>(); for (UpdateRequest update : reorderedUpdates) { - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); updateResponses.add(threadpool.submit(task)); - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted Thread.sleep(10); } - + threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 15 secs", threadpool.awaitTermination(15, TimeUnit.SECONDS)); + assertTrue( + "Thread pool didn't terminate within 15 secs", + threadpool.awaitTermination(15, TimeUnit.SECONDS)); // assert all requests were successful - for (Future resp: updateResponses) { + for (Future resp : updateResponses) { assertEquals(0, resp.get().getStatus()); } @@ -871,14 +1063,14 @@ private void reorderedDeletesTest() throws Exception { } /* Test for a situation when a document requiring in-place update cannot be "resurrected" - * when the original full indexed document has been deleted by an out of order DBQ. - * Expected behaviour in this case should be to throw the replica into LIR (since this will - * be rare). Here's an example of the situation: - ADD(id=x, val=5, ver=1) - UPD(id=x, val=10, ver = 2) - DBQ(q=val:10, v=4) - DV(id=x, val=5, ver=3) - */ + * when the original full indexed document has been deleted by an out of order DBQ. + * Expected behaviour in this case should be to throw the replica into LIR (since this will + * be rare). Here's an example of the situation: + ADD(id=x, val=5, ver=1) + UPD(id=x, val=10, ver = 2) + DBQ(q=val:10, v=4) + DV(id=x, val=5, ver=3) + */ private void reorderedDBQsResurrectionTest() throws Exception { if (onlyLeaderIndexes) { log.info("RTG with DBQs are not working in tlog replicas"); @@ -889,19 +1081,46 @@ private void reorderedDBQsResurrectionTest() throws Exception { buildRandomIndex(0); - SolrDocument sdoc = LEADER.getById("0"); // RTG straight from the index - //assertEquals(value, sdoc.get("inplace_updatable_float")); + SolrDocument sdoc = LEADER.getById("0"); // RTG straight from the index + // assertEquals(value, sdoc.get("inplace_updatable_float")); assertEquals("title0", sdoc.get("title_s")); long version0 = (long) sdoc.get("_version_"); String field = "inplace_updatable_int"; - + // put replica out of sync List updates = new ArrayList<>(); - updates.add(simulatedUpdateRequest(null, "id", 0, "title_s", "title0_new", field, 5, "_version_", version0 + 1)); // full update - updates.add(simulatedUpdateRequest(version0 + 1, "id", 0, field, 10, "_version_", version0 + 2)); // inplace_updatable_float=101 - updates.add(simulatedUpdateRequest(version0 + 2, "id", 0, field, 5, "_version_", version0 + 3)); // inplace_updatable_float=101 - updates.add(simulatedDeleteRequest(field+":10", version0 + 4)); // supposed to not delete anything + updates.add( + simulatedUpdateRequest( + null, + "id", + 0, + "title_s", + "title0_new", + field, + 5, + "_version_", + version0 + 1)); // full update + updates.add( + simulatedUpdateRequest( + version0 + 1, + "id", + 0, + field, + 10, + "_version_", + version0 + 2)); // inplace_updatable_float=101 + updates.add( + simulatedUpdateRequest( + version0 + 2, + "id", + 0, + field, + 5, + "_version_", + version0 + 3)); // inplace_updatable_float=101 + updates.add( + simulatedDeleteRequest(field + ":10", version0 + 4)); // supposed to not delete anything // order the updates correctly for NONLEADER 1 for (UpdateRequest update : updates) { @@ -912,8 +1131,9 @@ private void reorderedDBQsResurrectionTest() throws Exception { } // Reordering needs to happen using parallel threads - ExecutorService threadpool = - ExecutorUtil.newMDCAwareFixedThreadPool(updates.size() + 1, new SolrNamedThreadFactory(getTestName())); + ExecutorService threadpool = + ExecutorUtil.newMDCAwareFixedThreadPool( + updates.size() + 1, new SolrNamedThreadFactory(getTestName())); // re-order the last two updates for NONLEADER 0 List reorderedUpdates = new ArrayList<>(updates); Collections.swap(reorderedUpdates, 2, 3); @@ -922,20 +1142,25 @@ private void reorderedDBQsResurrectionTest() throws Exception { for (UpdateRequest update : reorderedUpdates) { // pretend as this update is coming from the other non-leader, so that // the resurrection can happen from there (instead of the leader) - update.setParam(DistributedUpdateProcessor.DISTRIB_FROM, ((HttpSolrClient)NONLEADERS.get(1)).getBaseURL()); - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), - random().nextLong()); + update.setParam( + DistributedUpdateProcessor.DISTRIB_FROM, + ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, NONLEADERS.get(0), random().nextLong()); updateResponses.add(threadpool.submit(task)); - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted Thread.sleep(10); } - + threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 15 secs", threadpool.awaitTermination(15, TimeUnit.SECONDS)); + assertTrue( + "Thread pool didn't terminate within 15 secs", + threadpool.awaitTermination(15, TimeUnit.SECONDS)); int successful = 0; - for (Future resp: updateResponses) { + for (Future resp : updateResponses) { try { UpdateResponse r = resp.get(); if (r.getStatus() == 0) { @@ -944,11 +1169,13 @@ private void reorderedDBQsResurrectionTest() throws Exception { } catch (Exception ex) { // reordered DBQ should trigger an error, thus throwing the replica into LIR. // the cause of the error is that the full document was deleted by mistake due to the - // out of order DBQ, and the in-place update that arrives after the DBQ (but was supposed to + // out of order DBQ, and the in-place update that arrives after the DBQ (but was supposed to // arrive before) cannot be applied, since the full document can't now be "resurrected". - if (!ex.getMessage().contains("Tried to fetch missing update" - + " from the leader, but missing wasn't present at leader.")) { + if (!ex.getMessage() + .contains( + "Tried to fetch missing update" + + " from the leader, but missing wasn't present at leader.")) { throw ex; } } @@ -960,163 +1187,218 @@ private void reorderedDBQsResurrectionTest() throws Exception { log.info("Non leader 0: {}", ((HttpSolrClient) NONLEADERS.get(0)).getBaseURL()); log.info("Non leader 1: {}", ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); // nowarn } - + SolrDocument doc0 = NONLEADERS.get(0).getById(String.valueOf(0), params("distrib", "false")); SolrDocument doc1 = NONLEADERS.get(1).getById(String.valueOf(0), params("distrib", "false")); log.info("Doc in both replica 0: {}", doc0); log.info("Doc in both replica 1: {}", doc1); // assert both replicas have same effect - for (int i=0; i updates = new ArrayList<>(); - updates.add(regularUpdateRequest("id", 1, "title_s", "title1_new", "id_i", 1, "inplace_updatable_float", newinplace_updatable_float)); + updates.add( + regularUpdateRequest( + "id", + 1, + "title_s", + "title1_new", + "id_i", + 1, + "inplace_updatable_float", + newinplace_updatable_float)); updates.add(regularUpdateRequest("id", 1, "inplace_updatable_float", map("inc", 1))); updates.add(regularUpdateRequest("id", 1, "inplace_updatable_float", map("inc", 1))); // The next request to replica2 will be delayed (timeout is 5s) - shardToJetty.get(SHARD1).get(1).jetty.getDebugFilter().addDelay( - "Waiting for dependant update to timeout", 1, 6000); + shardToJetty + .get(SHARD1) + .get(1) + .jetty + .getDebugFilter() + .addDelay("Waiting for dependant update to timeout", 1, 6000); ExecutorService threadpool = - ExecutorUtil.newMDCAwareFixedThreadPool(updates.size() + 1, new SolrNamedThreadFactory(getTestName())); + ExecutorUtil.newMDCAwareFixedThreadPool( + updates.size() + 1, new SolrNamedThreadFactory(getTestName())); for (UpdateRequest update : updates) { - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, cloudClient, - random().nextLong()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, cloudClient, random().nextLong()); threadpool.submit(task); - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted - Thread.sleep(100); + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted + Thread.sleep(100); } threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 15 secs", threadpool.awaitTermination(15, TimeUnit.SECONDS)); + assertTrue( + "Thread pool didn't terminate within 15 secs", + threadpool.awaitTermination(15, TimeUnit.SECONDS)); commit(); // TODO: Could try checking ZK for LIR flags to ensure LIR has not kicked in // Check every 10ms, 100 times, for a replica to go down (& assert that it doesn't) - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { Thread.sleep(10); cloudClient.getZkStateReader().forceUpdateCollection(DEFAULT_COLLECTION); ClusterState state = cloudClient.getZkStateReader().getClusterState(); int numActiveReplicas = 0; - for (Replica rep: state.getCollection(DEFAULT_COLLECTION).getSlice(SHARD1).getReplicas()) - if (rep.getState().equals(Replica.State.ACTIVE)) - numActiveReplicas++; + for (Replica rep : state.getCollection(DEFAULT_COLLECTION).getSlice(SHARD1).getReplicas()) + if (rep.getState().equals(Replica.State.ACTIVE)) numActiveReplicas++; - assertEquals("The replica receiving reordered updates must not have gone down", 3, numActiveReplicas); + assertEquals( + "The replica receiving reordered updates must not have gone down", 3, numActiveReplicas); } - + for (SolrClient client : clients) { TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME); try { - timeout.waitFor("Timeout", () -> { - try { - return (float) getReplicaValue(client, 1, "inplace_updatable_float") == newinplace_updatable_float + 2.0f; - } catch (SolrServerException e) { - throw new RuntimeException(e); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + timeout.waitFor( + "Timeout", + () -> { + try { + return (float) getReplicaValue(client, 1, "inplace_updatable_float") + == newinplace_updatable_float + 2.0f; + } catch (SolrServerException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } catch (TimeoutException e) { } } - + for (SolrClient client : clients) { if (log.isInfoEnabled()) { log.info("Testing client (Fetch missing test): {}", ((HttpSolrClient) client).getBaseURL()); - log.info("Version at {} is: {}" - , ((HttpSolrClient) client).getBaseURL(), getReplicaValue(client, 1, "_version_")); // nowarn + log.info( + "Version at {} is: {}", + ((HttpSolrClient) client).getBaseURL(), + getReplicaValue(client, 1, "_version_")); // nowarn } - assertReplicaValue(client, 1, "inplace_updatable_float", (newinplace_updatable_float + 2.0f), - "inplace_updatable_float didn't match for replica at client: " + ((HttpSolrClient) client).getBaseURL()); - assertReplicaValue(client, 1, "title_s", "title1_new", + assertReplicaValue( + client, + 1, + "inplace_updatable_float", + (newinplace_updatable_float + 2.0f), + "inplace_updatable_float didn't match for replica at client: " + + ((HttpSolrClient) client).getBaseURL()); + assertReplicaValue( + client, + 1, + "title_s", + "title1_new", "Title didn't match for replica at client: " + ((HttpSolrClient) client).getBaseURL()); } - + // Try another round of these updates, this time with a delete request at the end. - // This is to ensure that the fetch missing update from leader doesn't bomb out if the + // This is to ensure that the fetch missing update from leader doesn't bomb out if the // document has been deleted on the leader later on { clearIndex(); commit(); shardToJetty.get(SHARD1).get(1).jetty.getDebugFilter().unsetDelay(); - + updates.add(regularDeleteRequest(1)); - shardToJetty.get(SHARD1).get(1).jetty.getDebugFilter().addDelay("Waiting for dependant update to timeout", 1, 5999); // the first update - shardToJetty.get(SHARD1).get(1).jetty.getDebugFilter().addDelay("Waiting for dependant update to timeout", 4, 5998); // the delete update + shardToJetty + .get(SHARD1) + .get(1) + .jetty + .getDebugFilter() + .addDelay("Waiting for dependant update to timeout", 1, 5999); // the first update + shardToJetty + .get(SHARD1) + .get(1) + .jetty + .getDebugFilter() + .addDelay("Waiting for dependant update to timeout", 4, 5998); // the delete update threadpool = - ExecutorUtil.newMDCAwareFixedThreadPool(updates.size() + 1, new SolrNamedThreadFactory(getTestName())); + ExecutorUtil.newMDCAwareFixedThreadPool( + updates.size() + 1, new SolrNamedThreadFactory(getTestName())); for (UpdateRequest update : updates) { - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, cloudClient, - random().nextLong()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, cloudClient, random().nextLong()); threadpool.submit(task); - - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted + + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted Thread.sleep(100); } threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 15 secs", threadpool.awaitTermination(15, TimeUnit.SECONDS)); + assertTrue( + "Thread pool didn't terminate within 15 secs", + threadpool.awaitTermination(15, TimeUnit.SECONDS)); commit(); - try (ZkShardTerms zkShardTerms = new ZkShardTerms(DEFAULT_COLLECTION, SHARD1, cloudClient.getZkStateReader().getZkClient())) { - for (int i=0; i<100; i++) { + try (ZkShardTerms zkShardTerms = + new ZkShardTerms( + DEFAULT_COLLECTION, SHARD1, cloudClient.getZkStateReader().getZkClient())) { + for (int i = 0; i < 100; i++) { Thread.sleep(10); cloudClient.getZkStateReader().forceUpdateCollection(DEFAULT_COLLECTION); ClusterState state = cloudClient.getZkStateReader().getClusterState(); int numActiveReplicas = 0; - for (Replica rep: state.getCollection(DEFAULT_COLLECTION).getSlice(SHARD1).getReplicas()) { + for (Replica rep : + state.getCollection(DEFAULT_COLLECTION).getSlice(SHARD1).getReplicas()) { assertTrue(zkShardTerms.canBecomeLeader(rep.getName())); - if (rep.getState().equals(Replica.State.ACTIVE)) - numActiveReplicas++; + if (rep.getState().equals(Replica.State.ACTIVE)) numActiveReplicas++; } - assertEquals("The replica receiving reordered updates must not have gone down", 3, numActiveReplicas); + assertEquals( + "The replica receiving reordered updates must not have gone down", + 3, + numActiveReplicas); } } - for (SolrClient client: new SolrClient[] {LEADER, NONLEADERS.get(0), - NONLEADERS.get(1)}) { // nonleader 0 re-ordered replica, nonleader 1 well-ordered replica + for (SolrClient client : + new SolrClient[] { + LEADER, NONLEADERS.get(0), NONLEADERS.get(1) + }) { // nonleader 0 re-ordered replica, nonleader 1 well-ordered replica SolrDocument doc = client.getById(String.valueOf(1), params("distrib", "false")); assertNull("This doc was supposed to have been deleted, but was: " + doc, doc); } - } log.info("delayedReorderingFetchesMissingUpdateFromLeaderTest: This test passed fine..."); } /** - * Use the schema API to verify that the specified expected Field exists with those exact attributes. + * Use the schema API to verify that the specified expected Field exists with those exact + * attributes. */ - public void checkExpectedSchemaField(Map expected) throws Exception { + public void checkExpectedSchemaField(Map expected) throws Exception { String fieldName = (String) expected.get("name"); assertNotNull("expected contains no name: " + expected, fieldName); FieldResponse rsp = new Field(fieldName).process(this.cloudClient); @@ -1131,7 +1413,7 @@ private class AsyncUpdateWithRandomCommit implements Callable { final Random rnd; int commitBound = onlyLeaderIndexes ? 50 : 3; - public AsyncUpdateWithRandomCommit (UpdateRequest update, SolrClient solrClient, long seed) { + public AsyncUpdateWithRandomCommit(UpdateRequest update, SolrClient solrClient, long seed) { this.update = update; this.solrClient = solrClient; this.rnd = new Random(seed); @@ -1139,29 +1421,30 @@ public AsyncUpdateWithRandomCommit (UpdateRequest update, SolrClient solrClient, @Override public UpdateResponse call() throws Exception { - UpdateResponse resp = update.process(solrClient); //solrClient.request(update); - if (rnd.nextInt(commitBound) == 0) - solrClient.commit(); + UpdateResponse resp = update.process(solrClient); // solrClient.request(update); + if (rnd.nextInt(commitBound) == 0) solrClient.commit(); return resp; } } - - Object getReplicaValue(SolrClient client, int doc, String field) throws SolrServerException, IOException { + + Object getReplicaValue(SolrClient client, int doc, String field) + throws SolrServerException, IOException { SolrDocument sdoc = client.getById(String.valueOf(doc), params("distrib", "false")); - return sdoc==null? null: sdoc.get(field); + return sdoc == null ? null : sdoc.get(field); } - void assertReplicaValue(SolrClient client, int doc, String field, Object expected, - String message) throws SolrServerException, IOException { + void assertReplicaValue(SolrClient client, int doc, String field, Object expected, String message) + throws SolrServerException, IOException { assertEquals(message, expected, getReplicaValue(client, doc, field)); } // This returns an UpdateRequest with the given fields that represent a document. // This request is constructed such that it is a simulation of a request coming from // a leader to a replica. - UpdateRequest simulatedUpdateRequest(Long prevVersion, Object... fields) throws SolrServerException, IOException { + UpdateRequest simulatedUpdateRequest(Long prevVersion, Object... fields) + throws SolrServerException, IOException { SolrInputDocument doc = sdoc(fields); - + // get baseUrl of the leader String baseUrl = getBaseUrl(doc.get("id").toString()); @@ -1176,34 +1459,37 @@ UpdateRequest simulatedUpdateRequest(Long prevVersion, Object... fields) throws return ur; } - UpdateRequest simulatedDeleteRequest(int id, long version) throws SolrServerException, IOException { - String baseUrl = getBaseUrl(""+id); + UpdateRequest simulatedDeleteRequest(int id, long version) + throws SolrServerException, IOException { + String baseUrl = getBaseUrl("" + id); UpdateRequest ur = new UpdateRequest(); if (random().nextBoolean() || onlyLeaderIndexes) { - ur.deleteById(""+id); + ur.deleteById("" + id); } else { - ur.deleteByQuery("id:"+id); + ur.deleteByQuery("id:" + id); } - ur.setParam("_version_", ""+version); + ur.setParam("_version_", "" + version); ur.setParam("update.distrib", "FROMLEADER"); ur.setParam("distrib.from", baseUrl); return ur; } - UpdateRequest simulatedDeleteRequest(String query, long version) throws SolrServerException, IOException { - String baseUrl = getBaseUrl((HttpSolrClient)LEADER); + UpdateRequest simulatedDeleteRequest(String query, long version) + throws SolrServerException, IOException { + String baseUrl = getBaseUrl((HttpSolrClient) LEADER); UpdateRequest ur = new UpdateRequest(); ur.deleteByQuery(query); - ur.setParam("_version_", ""+version); + ur.setParam("_version_", "" + version); ur.setParam("update.distrib", "FROMLEADER"); ur.setParam("distrib.from", baseUrl + DEFAULT_COLLECTION + "/"); return ur; } private String getBaseUrl(String id) { - DocCollection collection = cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); + DocCollection collection = + cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION); Slice slice = collection.getRouter().getTargetSlice(id, null, null, null, collection); String baseUrl = slice.getLeader().getCoreUrl(); return baseUrl; @@ -1218,7 +1504,7 @@ UpdateRequest regularUpdateRequest(Object... fields) throws SolrServerException, UpdateRequest regularDeleteRequest(int id) throws SolrServerException, IOException { UpdateRequest ur = new UpdateRequest(); - ur.deleteById(""+id); + ur.deleteById("" + id); return ur; } @@ -1231,44 +1517,53 @@ UpdateRequest regularDeleteByQueryRequest(String q) throws SolrServerException, protected long addDocAndGetVersion(Object... fields) throws Exception { SolrInputDocument doc = new SolrInputDocument(); addFields(doc, fields); - + UpdateRequest ureq = new UpdateRequest(); ureq.setParam("versions", "true"); ureq.add(doc); UpdateResponse resp; - + // send updates to leader, to avoid SOLR-8733 resp = ureq.process(LEADER); - - long returnedVersion = Long.parseLong(((NamedList)resp.getResponse().get("adds")).getVal(0).toString()); - assertTrue("Due to SOLR-8733, sometimes returned version is 0. Let us assert that we have successfully" - + " worked around that problem here.", returnedVersion > 0); + + long returnedVersion = + Long.parseLong(((NamedList) resp.getResponse().get("adds")).getVal(0).toString()); + assertTrue( + "Due to SOLR-8733, sometimes returned version is 0. Let us assert that we have successfully" + + " worked around that problem here.", + returnedVersion > 0); return returnedVersion; } /** * Convinience method variant that never uses initFloat + * * @see #buildRandomIndex(Float,List) */ protected List buildRandomIndex(Integer... specialIds) throws Exception { return buildRandomIndex(null, Arrays.asList(specialIds)); } - - /** - * Helper method to build a randomized index with the fields needed for all test methods in this class. - * At a minimum, this index will contain 1 doc per "special" (non-negative) document id. These special documents will be added with the initFloat specified in the "inplace_updatable_float" field. + + /** + * Helper method to build a randomized index with the fields needed for all test methods in this + * class. At a minimum, this index will contain 1 doc per "special" (non-negative) document id. + * These special documents will be added with the initFloat specified in the + * "inplace_updatable_float" field. * - * A random number of documents (with negative ids) will be indexed in between each of the + *

A random number of documents (with negative ids) will be indexed in between each of the * "special" documents, as well as before/after the first/last special document. * - * @param initFloat Value to use in the "inplace_updatable_float" for the special documents; will never be used if null + * @param initFloat Value to use in the "inplace_updatable_float" for the special documents; will + * never be used if null * @param specialIds The ids to use for the special documents, all values must be non-negative * @return the versions of each of the specials document returned when indexing it */ - protected List buildRandomIndex(Float initFloat, List specialIds) throws Exception { + protected List buildRandomIndex(Float initFloat, List specialIds) + throws Exception { int id = -1; // used for non special docs - final int numPreDocs = rarely() || onlyLeaderIndexes ? TestUtil.nextInt(random(),0,9) : atLeast(10); + final int numPreDocs = + rarely() || onlyLeaderIndexes ? TestUtil.nextInt(random(), 0, 9) : atLeast(10); for (int i = 1; i <= numPreDocs; i++) { addDocAndGetVersion("id", id, "title_s", "title" + id, "id_i", id); id--; @@ -1276,19 +1571,29 @@ protected List buildRandomIndex(Float initFloat, List specialIds) final List versions = new ArrayList<>(specialIds.size()); for (int special : specialIds) { if (null == initFloat) { - versions.add(addDocAndGetVersion("id", special, "title_s", "title" + special, "id_i", special)); + versions.add( + addDocAndGetVersion("id", special, "title_s", "title" + special, "id_i", special)); } else { - versions.add(addDocAndGetVersion("id", special, "title_s", "title" + special, "id_i", special, - "inplace_updatable_float", initFloat)); + versions.add( + addDocAndGetVersion( + "id", + special, + "title_s", + "title" + special, + "id_i", + special, + "inplace_updatable_float", + initFloat)); } - final int numPostDocs = rarely() || onlyLeaderIndexes ? TestUtil.nextInt(random(),0,2) : atLeast(10); + final int numPostDocs = + rarely() || onlyLeaderIndexes ? TestUtil.nextInt(random(), 0, 2) : atLeast(10); for (int i = 1; i <= numPostDocs; i++) { addDocAndGetVersion("id", id, "title_s", "title" + id, "id_i", id); id--; } } LEADER.commit(); - + assert specialIds.size() == versions.size(); return versions; } @@ -1307,57 +1612,73 @@ private void reorderedDBQsUsingUpdatedValueFromADroppedUpdate() throws Exception } clearIndex(); commit(); - + float inplace_updatable_float = 1F; buildRandomIndex(inplace_updatable_float, Collections.singletonList(1)); List updates = new ArrayList<>(); - updates.add(regularUpdateRequest("id", 1, "id_i", 1, "inplace_updatable_float", 12, "title_s", "mytitle")); - updates.add(regularUpdateRequest("id", 1, "inplace_updatable_float", map("inc", 1))); // delay indefinitely + updates.add( + regularUpdateRequest( + "id", 1, "id_i", 1, "inplace_updatable_float", 12, "title_s", "mytitle")); + updates.add( + regularUpdateRequest( + "id", 1, "inplace_updatable_float", map("inc", 1))); // delay indefinitely updates.add(regularUpdateRequest("id", 1, "inplace_updatable_float", map("inc", 1))); updates.add(regularDeleteByQueryRequest("inplace_updatable_float:14")); - // The second request will be delayed very very long, so that the next update actually gives up waiting for this - // and fetches a full update from the leader. - shardToJetty.get(SHARD1).get(1).jetty.getDebugFilter().addDelay( - "Waiting for dependant update to timeout", 2, 8000); + // The second request will be delayed very very long, so that the next update actually gives up + // waiting for this and fetches a full update from the leader. + shardToJetty + .get(SHARD1) + .get(1) + .jetty + .getDebugFilter() + .addDelay("Waiting for dependant update to timeout", 2, 8000); ExecutorService threadpool = - ExecutorUtil.newMDCAwareFixedThreadPool(updates.size() + 1, new SolrNamedThreadFactory(getTestName())); + ExecutorUtil.newMDCAwareFixedThreadPool( + updates.size() + 1, new SolrNamedThreadFactory(getTestName())); for (UpdateRequest update : updates) { - AsyncUpdateWithRandomCommit task = new AsyncUpdateWithRandomCommit(update, cloudClient, - random().nextLong()); + AsyncUpdateWithRandomCommit task = + new AsyncUpdateWithRandomCommit(update, cloudClient, random().nextLong()); threadpool.submit(task); - // while we can't guarantee/trust what order the updates are executed in, since multiple threads - // are involved, but we're trying to bias the thread scheduling to run them in the order submitted - Thread.sleep(100); + // while we can't guarantee/trust what order the updates are executed in, since multiple + // threads are involved, but we're trying to bias the thread scheduling to run them in the + // order submitted + Thread.sleep(100); } threadpool.shutdown(); - assertTrue("Thread pool didn't terminate within 12 secs", threadpool.awaitTermination(12, TimeUnit.SECONDS)); + assertTrue( + "Thread pool didn't terminate within 12 secs", + threadpool.awaitTermination(12, TimeUnit.SECONDS)); commit(); // TODO: Could try checking ZK for LIR flags to ensure LIR has not kicked in // Check every 10ms, 100 times, for a replica to go down (& assert that it doesn't) - for (int i=0; i<100; i++) { + for (int i = 0; i < 100; i++) { Thread.sleep(10); cloudClient.getZkStateReader().forceUpdateCollection(DEFAULT_COLLECTION); ClusterState state = cloudClient.getZkStateReader().getClusterState(); int numActiveReplicas = 0; - for (Replica rep: state.getCollection(DEFAULT_COLLECTION).getSlice(SHARD1).getReplicas()) - if (rep.getState().equals(Replica.State.ACTIVE)) - numActiveReplicas++; + for (Replica rep : state.getCollection(DEFAULT_COLLECTION).getSlice(SHARD1).getReplicas()) + if (rep.getState().equals(Replica.State.ACTIVE)) numActiveReplicas++; - assertEquals("The replica receiving reordered updates must not have gone down", 3, numActiveReplicas); + assertEquals( + "The replica receiving reordered updates must not have gone down", 3, numActiveReplicas); } for (SolrClient client : clients) { if (log.isInfoEnabled()) { - log.info("Testing client (testDBQUsingUpdatedFieldFromDroppedUpdate): {}", ((HttpSolrClient) client).getBaseURL()); - log.info("Version at {} is: {}", ((HttpSolrClient) client).getBaseURL(), + log.info( + "Testing client (testDBQUsingUpdatedFieldFromDroppedUpdate): {}", + ((HttpSolrClient) client).getBaseURL()); + log.info( + "Version at {} is: {}", + ((HttpSolrClient) client).getBaseURL(), getReplicaValue(client, 1, "_version_")); // nowarn } assertNull(client.getById("1", params("distrib", "false"))); @@ -1370,7 +1691,7 @@ private void reorderedDBQsUsingUpdatedValueFromADroppedUpdate() throws Exception public void clearIndex() { super.clearIndex(); try { - for (SolrClient client: new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { + for (SolrClient client : new SolrClient[] {LEADER, NONLEADERS.get(0), NONLEADERS.get(1)}) { if (client != null) { client.request(simulatedDeleteRequest("*:*", -Long.MAX_VALUE)); client.commit(); diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesRequiredField.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesRequiredField.java index a7f4da340a0..b8911e837e3 100644 --- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesRequiredField.java +++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesRequiredField.java @@ -17,13 +17,13 @@ package org.apache.solr.update; +import static org.apache.solr.update.TestInPlaceUpdatesStandalone.addAndAssertVersion; + import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.update.TestInPlaceUpdatesStandalone.addAndAssertVersion; - -public class TestInPlaceUpdatesRequiredField extends SolrTestCaseJ4 { +public class TestInPlaceUpdatesRequiredField extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { @@ -32,17 +32,20 @@ public static void beforeClass() throws Exception { @Test public void testUpdateFromTlog() throws Exception { - long version1 = addAndGetVersion(sdoc("id", "1", "name", "first", "inplace_updatable_int", 1), null); + long version1 = + addAndGetVersion(sdoc("id", "1", "name", "first", "inplace_updatable_int", 1), null); assertU(commit("softCommit", "false")); assertQ(req("q", "*:*"), "//*[@numFound='1']"); // do an in place update that hits off the tlog version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_int", map("inc", 1)); version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_int", map("inc", 1)); - version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_int", map("inc", 1)); // new value should be 4 + // new value should be 4 + version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_int", map("inc", 1)); assertU(commit("softCommit", "false")); - assertQ(req("q", "id:1", "fl", "*,[docid]"), + assertQ( + req("q", "id:1", "fl", "*,[docid]"), "//result/doc[1]/int[@name='inplace_updatable_int'][.='4']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']"); } } diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java index 1b777eb15de..a63a1655122 100644 --- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java +++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesStandalone.java @@ -1,4 +1,3 @@ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -18,6 +17,9 @@ package org.apache.solr.update; +import static org.apache.solr.update.UpdateLogTest.buildAddUpdateCommand; +import static org.hamcrest.core.StringContains.containsString; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -28,7 +30,6 @@ import java.util.Map; import java.util.Random; import java.util.Set; - import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.NoMergePolicy; @@ -57,13 +58,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.update.UpdateLogTest.buildAddUpdateCommand; -import static org.hamcrest.core.StringContains.containsString; - - -/** - * Tests the in-place updates (docValues updates) for a standalone Solr instance. - */ +/** Tests the in-place updates (docValues updates) for a standalone Solr instance. */ public class TestInPlaceUpdatesStandalone extends SolrTestCaseJ4 { private static SolrClient client; @@ -85,30 +80,32 @@ public static void beforeClass() throws Exception { RefCounted iw = h.getCore().getSolrCoreState().getIndexWriter(h.getCore()); try { IndexWriter writer = iw.get(); - assertTrue("Actual merge policy is: " + writer.getConfig().getMergePolicy(), - writer.getConfig().getMergePolicy() instanceof NoMergePolicy); + assertTrue( + "Actual merge policy is: " + writer.getConfig().getMergePolicy(), + writer.getConfig().getMergePolicy() instanceof NoMergePolicy); } finally { iw.decref(); } // validate that the schema was not changed to an unexpected state IndexSchema schema = h.getCore().getLatestSchema(); - for (String fieldName : Arrays.asList("_version_", - "inplace_l_dvo", - "inplace_updatable_float", - "inplace_updatable_int", - "inplace_updatable_float_with_default", - "inplace_updatable_int_with_default")) { + for (String fieldName : + Arrays.asList( + "_version_", + "inplace_l_dvo", + "inplace_updatable_float", + "inplace_updatable_int", + "inplace_updatable_float_with_default", + "inplace_updatable_int_with_default")) { // these fields must only be using docValues to support inplace updates SchemaField field = schema.getField(fieldName); - assertTrue(field.toString(), - field.hasDocValues() && ! field.indexed() && ! field.stored()); + assertTrue(field.toString(), field.hasDocValues() && !field.indexed() && !field.stored()); } for (String fieldName : Arrays.asList("title_s", "regular_l", "stored_i")) { // these fields must support atomic updates, but not inplace updates (ie: stored) SchemaField field = schema.getField(fieldName); assertTrue(field.toString(), field.stored()); - } + } // Don't close this client, it would shutdown the CoreContainer client = new EmbeddedSolrServer(h.getCoreContainer(), h.coreName); @@ -127,50 +124,79 @@ public void deleteAllAndCommit() throws Exception { @Test public void testUpdateBadRequest() throws Exception { - final long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 41), null); + final long version1 = + addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 41), null); assertU(commit()); // invalid value with set operation - SolrException e = expectThrows(SolrException.class, - () -> addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("set", "NOT_NUMBER"))); + SolrException e = + expectThrows( + SolrException.class, + () -> + addAndAssertVersion( + version1, "id", "1", "inplace_updatable_float", map("set", "NOT_NUMBER"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); MatcherAssert.assertThat(e.getMessage(), containsString("For input string: \"NOT_NUMBER\"")); // invalid value with inc operation - e = expectThrows(SolrException.class, - () -> addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", "NOT_NUMBER"))); + e = + expectThrows( + SolrException.class, + () -> + addAndAssertVersion( + version1, "id", "1", "inplace_updatable_float", map("inc", "NOT_NUMBER"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); MatcherAssert.assertThat(e.getMessage(), containsString("For input string: \"NOT_NUMBER\"")); // inc op with null value - e = expectThrows(SolrException.class, - () -> addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", null))); + e = + expectThrows( + SolrException.class, + () -> + addAndAssertVersion( + version1, "id", "1", "inplace_updatable_float", map("inc", null))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - MatcherAssert.assertThat(e.getMessage(), containsString("Invalid input 'null' for field inplace_updatable_float")); - - e = expectThrows(SolrException.class, - () -> addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", - map("inc", new ArrayList<>(Collections.singletonList(123))))); + MatcherAssert.assertThat( + e.getMessage(), containsString("Invalid input 'null' for field inplace_updatable_float")); + + e = + expectThrows( + SolrException.class, + () -> + addAndAssertVersion( + version1, + "id", + "1", + "inplace_updatable_float", + map("inc", new ArrayList<>(Collections.singletonList(123))))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - MatcherAssert.assertThat(e.getMessage(), containsString("Invalid input '[123]' for field inplace_updatable_float")); + MatcherAssert.assertThat( + e.getMessage(), containsString("Invalid input '[123]' for field inplace_updatable_float")); // regular atomic update should fail if user says they only want in-place atomic updates... - e = expectThrows(SolrException.class, - () -> addAndGetVersion(sdoc("id", "1", "regular_l", map("inc", 1)), - params(UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, "true"))); + e = + expectThrows( + SolrException.class, + () -> + addAndGetVersion( + sdoc("id", "1", "regular_l", map("inc", 1)), + params(UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, "true"))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); MatcherAssert.assertThat(e.getMessage(), containsString("Unable to update doc in-place: 1")); } @Test public void testUpdatingDocValues() throws Exception { - long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 41), null); - long version2 = addAndGetVersion(sdoc("id", "2", "title_s", "second", "inplace_updatable_float", 42), null); - long version3 = addAndGetVersion(sdoc("id", "3", "title_s", "third", "inplace_updatable_float", 43), null); + long version1 = + addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 41), null); + long version2 = + addAndGetVersion(sdoc("id", "2", "title_s", "second", "inplace_updatable_float", 42), null); + long version3 = + addAndGetVersion(sdoc("id", "3", "title_s", "third", "inplace_updatable_float", 43), null); assertU(commit("softCommit", "false")); assertQ(req("q", "*:*"), "//*[@numFound='3']"); - // the reason we're fetching these docids is to validate that the subsequent updates + // the reason we're fetching these docids is to validate that the subsequent updates // are done in place and don't cause the docids to change int docid1 = getDocId("1"); int docid2 = getDocId("2"); @@ -182,174 +208,223 @@ public void testUpdatingDocValues() throws Exception { version3 = addAndAssertVersion(version3, "id", "3", "inplace_updatable_float", map("set", 100)); assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), "//*[@numFound='3']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='200.0']", "//result/doc[2]/float[@name='inplace_updatable_float'][.='300.0']", "//result/doc[3]/float[@name='inplace_updatable_float'][.='100.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']", - "//result/doc[2]/long[@name='_version_'][.='"+version2+"']", - "//result/doc[3]/long[@name='_version_'][.='"+version3+"']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']", - "//result/doc[2]/int[@name='[docid]'][.='"+docid2+"']", - "//result/doc[3]/int[@name='[docid]'][.='"+docid3+"']" - ); + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[2]/long[@name='_version_'][.='" + version2 + "']", + "//result/doc[3]/long[@name='_version_'][.='" + version3 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']", + "//result/doc[2]/int[@name='[docid]'][.='" + docid2 + "']", + "//result/doc[3]/int[@name='[docid]'][.='" + docid3 + "']"); // Check docValues are "inc"ed version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", 1)); version2 = addAndAssertVersion(version2, "id", "2", "inplace_updatable_float", map("inc", -2)); version3 = addAndAssertVersion(version3, "id", "3", "inplace_updatable_float", map("inc", 3)); assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), "//*[@numFound='3']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='201.0']", "//result/doc[2]/float[@name='inplace_updatable_float'][.='298.0']", "//result/doc[3]/float[@name='inplace_updatable_float'][.='103.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']", - "//result/doc[2]/long[@name='_version_'][.='"+version2+"']", - "//result/doc[3]/long[@name='_version_'][.='"+version3+"']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']", - "//result/doc[2]/int[@name='[docid]'][.='"+docid2+"']", - "//result/doc[3]/int[@name='[docid]'][.='"+docid3+"']" - ); + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[2]/long[@name='_version_'][.='" + version2 + "']", + "//result/doc[3]/long[@name='_version_'][.='" + version3 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']", + "//result/doc[2]/int[@name='[docid]'][.='" + docid2 + "']", + "//result/doc[3]/int[@name='[docid]'][.='" + docid3 + "']"); // Check back to back "inc"s are working (off the transaction log) version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", 1)); - version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", 2)); // new value should be 204 + version1 = + addAndAssertVersion( + version1, + "id", + "1", + "inplace_updatable_float", + map("inc", 2)); // new value should be 204 assertU(commit("softCommit", "false")); - assertQ(req("q", "id:1", "fl", "*,[docid]"), + assertQ( + req("q", "id:1", "fl", "*,[docid]"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='204.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']"); - // Now let the document be atomically updated (non-inplace), ensure the old docvalue is part of new doc + // Now let the document be atomically updated (non-inplace), ensure the old docvalue is part of + // new doc version1 = addAndAssertVersion(version1, "id", "1", "title_s", map("set", "new first")); assertU(commit("softCommit", "false")); int newDocid1 = getDocId("1"); assertTrue(newDocid1 != docid1); docid1 = newDocid1; - assertQ(req("q", "id:1"), + assertQ( + req("q", "id:1"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='204.0']", "//result/doc[1]/str[@name='title_s'][.='new first']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']"); // Check if atomic update with "inc" to a docValue works - version2 = addAndAssertVersion(version2, "id", "2", "title_s", map("set", "new second"), "inplace_updatable_float", map("inc", 2)); + version2 = + addAndAssertVersion( + version2, + "id", + "2", + "title_s", + map("set", "new second"), + "inplace_updatable_float", + map("inc", 2)); assertU(commit("softCommit", "false")); int newDocid2 = getDocId("2"); assertTrue(newDocid2 != docid2); docid2 = newDocid2; - assertQ(req("q", "id:2"), + assertQ( + req("q", "id:2"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='300.0']", "//result/doc[1]/str[@name='title_s'][.='new second']", - "//result/doc[1]/long[@name='_version_'][.='"+version2+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + version2 + "']"); // Check if docvalue "inc" update works for a newly created document, which is not yet committed // Case1: docvalue was supplied during add of new document - long version4 = addAndGetVersion(sdoc("id", "4", "title_s", "fourth", "inplace_updatable_float", "400"), params()); + long version4 = + addAndGetVersion( + sdoc("id", "4", "title_s", "fourth", "inplace_updatable_float", "400"), params()); version4 = addAndAssertVersion(version4, "id", "4", "inplace_updatable_float", map("inc", 1)); assertU(commit("softCommit", "false")); - assertQ(req("q", "id:4"), + assertQ( + req("q", "id:4"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='401.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version4+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + version4 + "']"); // Check if docvalue "inc" update works for a newly created document, which is not yet committed // Case2: docvalue was not supplied during add of new document, should assume default long version5 = addAndGetVersion(sdoc("id", "5", "title_s", "fifth"), params()); version5 = addAndAssertVersion(version5, "id", "5", "inplace_updatable_float", map("inc", 1)); assertU(commit("softCommit", "false")); - assertQ(req("q", "id:5"), + assertQ( + req("q", "id:5"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='1.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version5+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + version5 + "']"); // Check if docvalue "set" update works for a newly created document, which is not yet committed long version6 = addAndGetVersion(sdoc("id", "6", "title_s", "sixth"), params()); version6 = addAndAssertVersion(version6, "id", "6", "inplace_updatable_float", map("set", 600)); assertU(commit("softCommit", "false")); - assertQ(req("q", "id:6"), + assertQ( + req("q", "id:6"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='600.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version6+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + version6 + "']"); // Check optimistic concurrency works - long v20 = addAndGetVersion(sdoc("id", "20", "title_s","first", "inplace_updatable_float", 100), params()); - SolrException exception = expectThrows(SolrException.class, () -> { - addAndGetVersion(sdoc("id","20", "_version_", -1, "inplace_updatable_float", map("inc", 1)), null); - }); + long v20 = + addAndGetVersion( + sdoc("id", "20", "title_s", "first", "inplace_updatable_float", 100), params()); + SolrException exception = + expectThrows( + SolrException.class, + () -> { + addAndGetVersion( + sdoc("id", "20", "_version_", -1, "inplace_updatable_float", map("inc", 1)), + null); + }); assertEquals(exception.toString(), SolrException.ErrorCode.CONFLICT.code, exception.code()); assertThat(exception.getMessage(), containsString("expected=-1")); - assertThat(exception.getMessage(), containsString("actual="+v20)); - + assertThat(exception.getMessage(), containsString("actual=" + v20)); long oldV20 = v20; - v20 = addAndAssertVersion(v20, "id","20", "_version_", v20, "inplace_updatable_float", map("inc", 1)); - exception = expectThrows(SolrException.class, () -> { - addAndGetVersion(sdoc("id","20", "_version_", oldV20, "inplace_updatable_float", map("inc", 1)), null); - }); + v20 = + addAndAssertVersion( + v20, "id", "20", "_version_", v20, "inplace_updatable_float", map("inc", 1)); + exception = + expectThrows( + SolrException.class, + () -> { + addAndGetVersion( + sdoc("id", "20", "_version_", oldV20, "inplace_updatable_float", map("inc", 1)), + null); + }); assertEquals(exception.toString(), SolrException.ErrorCode.CONFLICT.code, exception.code()); - assertThat(exception.getMessage(), containsString("expected="+oldV20)); - assertThat(exception.getMessage(), containsString("actual="+v20)); + assertThat(exception.getMessage(), containsString("expected=" + oldV20)); + assertThat(exception.getMessage(), containsString("actual=" + v20)); - v20 = addAndAssertVersion(v20, "id","20", "_version_", v20, "inplace_updatable_float", map("inc", 1)); + v20 = + addAndAssertVersion( + v20, "id", "20", "_version_", v20, "inplace_updatable_float", map("inc", 1)); // RTG before a commit - assertJQ(req("qt","/get", "id","20", "fl","id,inplace_updatable_float,_version_"), + assertJQ( + req("qt", "/get", "id", "20", "fl", "id,inplace_updatable_float,_version_"), "=={'doc':{'id':'20', 'inplace_updatable_float':" + 102.0 + ",'_version_':" + v20 + "}}"); assertU(commit("softCommit", "false")); - assertQ(req("q", "id:20"), + assertQ( + req("q", "id:20"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='102.0']", - "//result/doc[1]/long[@name='_version_'][.='"+v20+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + v20 + "']"); // Check if updated DVs can be used for search - assertQ(req("q", "inplace_updatable_float:102"), + assertQ( + req("q", "inplace_updatable_float:102"), "//result/doc[1]/str[@name='id'][.='20']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='102.0']", - "//result/doc[1]/long[@name='_version_'][.='"+v20+"']"); + "//result/doc[1]/long[@name='_version_'][.='" + v20 + "']"); // Check if updated DVs can be used for sorting - assertQ(req("q", "*:*", "sort", "inplace_updatable_float asc"), + assertQ( + req("q", "*:*", "sort", "inplace_updatable_float asc"), "//result/doc[4]/str[@name='id'][.='1']", "//result/doc[4]/float[@name='inplace_updatable_float'][.='204.0']", - "//result/doc[5]/str[@name='id'][.='2']", "//result/doc[5]/float[@name='inplace_updatable_float'][.='300.0']", - "//result/doc[3]/str[@name='id'][.='3']", "//result/doc[3]/float[@name='inplace_updatable_float'][.='103.0']", - "//result/doc[6]/str[@name='id'][.='4']", "//result/doc[6]/float[@name='inplace_updatable_float'][.='401.0']", - "//result/doc[1]/str[@name='id'][.='5']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='1.0']", - "//result/doc[7]/str[@name='id'][.='6']", "//result/doc[7]/float[@name='inplace_updatable_float'][.='600.0']", - "//result/doc[2]/str[@name='id'][.='20']", "//result/doc[2]/float[@name='inplace_updatable_float'][.='102.0']"); } public void testUserRequestedFailIfNotInPlace() throws Exception { - final SolrParams require_inplace = params(UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, "true"); + final SolrParams require_inplace = + params(UpdateParams.REQUIRE_PARTIAL_DOC_UPDATES_INPLACE, "true"); long v; - + // regular updates should be ok even if require_inplace params are used, - // that way true "adds" wil work even if require_inplace params are in in "/update" defaults or invariants... - long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first", "regular_l", 1, "inplace_updatable_float", 41), require_inplace); - long version2 = addAndGetVersion(sdoc("id", "2", "title_s", "second", "regular_l", 2, "inplace_updatable_float", 42), require_inplace); - long version3 = addAndGetVersion(sdoc("id", "3", "title_s", "third", "regular_l", 3, "inplace_updatable_float", 43), require_inplace); + // that way true "adds" wil work even if require_inplace params are in in "/update" defaults or + // invariants... + long version1 = + addAndGetVersion( + sdoc("id", "1", "title_s", "first", "regular_l", 1, "inplace_updatable_float", 41), + require_inplace); + long version2 = + addAndGetVersion( + sdoc("id", "2", "title_s", "second", "regular_l", 2, "inplace_updatable_float", 42), + require_inplace); + long version3 = + addAndGetVersion( + sdoc("id", "3", "title_s", "third", "regular_l", 3, "inplace_updatable_float", 43), + require_inplace); assertU(commit("softCommit", "false")); assertQ(req("q", "*:*"), "//*[@numFound='3']"); - // the reason we're fetching these docids is to validate that the subsequent updates + // the reason we're fetching these docids is to validate that the subsequent updates // are done in place and don't cause the docids to change final int docid1 = getDocId("1"); final int docid2 = getDocId("2"); final int docid3 = getDocId("3"); // this atomic update should be done in place... - v = addAndGetVersion(sdoc("id", "2", "inplace_updatable_float", map("inc", 2)), require_inplace); + v = + addAndGetVersion( + sdoc("id", "2", "inplace_updatable_float", map("inc", 2)), require_inplace); assertTrue(v > version2); version2 = v; @@ -359,48 +434,49 @@ public void testUserRequestedFailIfNotInPlace() throws Exception { version3 = v; assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*,[docid]") - , "//*[@numFound='3']" - , "//result/doc[1]/long[@name='regular_l'][.='1']" - , "//result/doc[2]/long[@name='regular_l'][.='2']" - , "//result/doc[3]/long[@name='regular_l'][.='3']" - , "//result/doc[1]/float[@name='inplace_updatable_float'][.='41.0']" - , "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']" - , "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']" - , "//result/doc[1]/long[@name='_version_'][.='"+version1+"']" - , "//result/doc[2]/long[@name='_version_'][.='"+version2+"']" - , "//result/doc[3]/long[@name='_version_'][.='"+version3+"']" - , "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']" - , "//result/doc[2]/int[@name='[docid]'][.='"+docid2+"']" - , "//result/doc[3]/int[@name='[docid]'][.='"+docid3+"']" - ); - - // this is an atomic update, but it can't be done in-place, so it should fail w/o affecting index... - SolrException e = expectThrows(SolrException.class, - () -> addAndGetVersion(sdoc("id", "1", "regular_l", map("inc", 1)), - require_inplace)); + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), + "//*[@numFound='3']", + "//result/doc[1]/long[@name='regular_l'][.='1']", + "//result/doc[2]/long[@name='regular_l'][.='2']", + "//result/doc[3]/long[@name='regular_l'][.='3']", + "//result/doc[1]/float[@name='inplace_updatable_float'][.='41.0']", + "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']", + "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']", + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[2]/long[@name='_version_'][.='" + version2 + "']", + "//result/doc[3]/long[@name='_version_'][.='" + version3 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']", + "//result/doc[2]/int[@name='[docid]'][.='" + docid2 + "']", + "//result/doc[3]/int[@name='[docid]'][.='" + docid3 + "']"); + + // this is an atomic update, but it can't be done in-place, so it should fail w/o affecting + // index... + SolrException e = + expectThrows( + SolrException.class, + () -> addAndGetVersion(sdoc("id", "1", "regular_l", map("inc", 1)), require_inplace)); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); MatcherAssert.assertThat(e.getMessage(), containsString("Unable to update doc in-place: 1")); // data in solr should be unchanged after failed attempt at non-inplace atomic update... assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*,[docid]") - , "//*[@numFound='3']" - , "//result/doc[1]/long[@name='regular_l'][.='1']" - , "//result/doc[2]/long[@name='regular_l'][.='2']" - , "//result/doc[3]/long[@name='regular_l'][.='3']" - , "//result/doc[1]/float[@name='inplace_updatable_float'][.='41.0']" - , "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']" - , "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']" - , "//result/doc[1]/long[@name='_version_'][.='"+version1+"']" - , "//result/doc[2]/long[@name='_version_'][.='"+version2+"']" - , "//result/doc[3]/long[@name='_version_'][.='"+version3+"']" - , "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']" - , "//result/doc[2]/int[@name='[docid]'][.='"+docid2+"']" - , "//result/doc[3]/int[@name='[docid]'][.='"+docid3+"']" - ); - - + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), + "//*[@numFound='3']", + "//result/doc[1]/long[@name='regular_l'][.='1']", + "//result/doc[2]/long[@name='regular_l'][.='2']", + "//result/doc[3]/long[@name='regular_l'][.='3']", + "//result/doc[1]/float[@name='inplace_updatable_float'][.='41.0']", + "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']", + "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']", + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[2]/long[@name='_version_'][.='" + version2 + "']", + "//result/doc[3]/long[@name='_version_'][.='" + version3 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']", + "//result/doc[2]/int[@name='[docid]'][.='" + docid2 + "']", + "//result/doc[3]/int[@name='[docid]'][.='" + docid3 + "']"); + // the same atomic update w/o require_inplace params should proceed, and can modify the docid(s) // (but we don't assert that, since it the merge policy might kick in v = addAndGetVersion(sdoc("id", "1", "regular_l", map("inc", 100)), params()); @@ -408,40 +484,41 @@ public void testUserRequestedFailIfNotInPlace() throws Exception { version1 = v; assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*") - , "//*[@numFound='3']" - , "//result/doc[1]/long[@name='regular_l'][.='101']" - , "//result/doc[2]/long[@name='regular_l'][.='2']" - , "//result/doc[3]/long[@name='regular_l'][.='3']" - , "//result/doc[1]/float[@name='inplace_updatable_float'][.='41.0']" - , "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']" - , "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']" - , "//result/doc[1]/long[@name='_version_'][.='"+version1+"']" - , "//result/doc[2]/long[@name='_version_'][.='"+version2+"']" - , "//result/doc[3]/long[@name='_version_'][.='"+version3+"']" - ); - - // a regular old re-indexing of a document should also succeed, even w/require_inplace, since it's not ant atomic update + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*"), + "//*[@numFound='3']", + "//result/doc[1]/long[@name='regular_l'][.='101']", + "//result/doc[2]/long[@name='regular_l'][.='2']", + "//result/doc[3]/long[@name='regular_l'][.='3']", + "//result/doc[1]/float[@name='inplace_updatable_float'][.='41.0']", + "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']", + "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']", + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[2]/long[@name='_version_'][.='" + version2 + "']", + "//result/doc[3]/long[@name='_version_'][.='" + version3 + "']"); + + // a regular old re-indexing of a document should also succeed, even w/require_inplace, since + // it's not ant atomic update v = addAndGetVersion(sdoc("id", "1", "regular_l", "999"), require_inplace); assertTrue(v > version1); version1 = v; - - assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*") - , "//*[@numFound='3']" - , "//result/doc[1]/long[@name='regular_l'][.='999']" - , "//result/doc[2]/long[@name='regular_l'][.='2']" - , "//result/doc[3]/long[@name='regular_l'][.='3']" - , "0=count(//result/doc[1]/float[@name='inplace_updatable_float'])" // not in new doc - , "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']" - , "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']" - , "//result/doc[1]/long[@name='_version_'][.='"+version1+"']" - , "//result/doc[2]/long[@name='_version_'][.='"+version2+"']" - , "//result/doc[3]/long[@name='_version_'][.='"+version3+"']" - ); + assertU(commit("softCommit", "false")); + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*"), + "//*[@numFound='3']", + "//result/doc[1]/long[@name='regular_l'][.='999']", + "//result/doc[2]/long[@name='regular_l'][.='2']", + "//result/doc[3]/long[@name='regular_l'][.='3']", + "0=count(//result/doc[1]/float[@name='inplace_updatable_float'])" // not in new doc + , + "//result/doc[2]/float[@name='inplace_updatable_float'][.='44.0']", + "//result/doc[3]/float[@name='inplace_updatable_float'][.='46.0']", + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[2]/long[@name='_version_'][.='" + version2 + "']", + "//result/doc[3]/long[@name='_version_'][.='" + version3 + "']"); } - + @Test public void testUpdatingFieldNotPresentInDoc() throws Exception { long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first"), null); @@ -463,31 +540,31 @@ public void testUpdatingFieldNotPresentInDoc() throws Exception { version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_int", map("set", 300)); assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), "//*[@numFound='3']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='200.0']", "//result/doc[1]/int[@name='inplace_updatable_int'][.='300']", "//result/doc[2]/float[@name='inplace_updatable_float'][.='100.0']", "//result/doc[3]/float[@name='inplace_updatable_float'][.='300.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']", - "//result/doc[2]/long[@name='_version_'][.='"+version2+"']", - "//result/doc[3]/long[@name='_version_'][.='"+version3+"']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']", - "//result/doc[2]/int[@name='[docid]'][.='"+docid2+"']", - "//result/doc[3]/int[@name='[docid]'][.='"+docid3+"']" - ); + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[2]/long[@name='_version_'][.='" + version2 + "']", + "//result/doc[3]/long[@name='_version_'][.='" + version3 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']", + "//result/doc[2]/int[@name='[docid]'][.='" + docid2 + "']", + "//result/doc[3]/int[@name='[docid]'][.='" + docid3 + "']"); // adding new field which is not present in any docs but matches dynamic field rule // and satisfies inplace condition should be treated as inplace update version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_i_dvo", map("set", 200)); assertU(commit("softCommit", "false")); - assertQ(req("q", "id:1", "sort", "id asc", "fl", "*,[docid]"), + assertQ( + req("q", "id:1", "sort", "id asc", "fl", "*,[docid]"), "//*[@numFound='1']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='200.0']", "//result/doc[1]/int[@name='inplace_updatable_int'][.='300']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']", - "//result/doc[1]/int[@name='inplace_updatable_i_dvo'][.='200']" - ); + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']", + "//result/doc[1]/int[@name='inplace_updatable_i_dvo'][.='200']"); // delete everything deleteAllAndCommit(); @@ -518,20 +595,20 @@ public void testUpdatingFieldNotPresentInDoc() throws Exception { assertU(commit("softCommit", "false")); // first child docs would be returned followed by parent doc - assertQ(req("q", "*:*", "fl", "*,[docid]"), + assertQ( + req("q", "*:*", "fl", "*,[docid]"), "//*[@numFound='3']", "//result/doc[3]/float[@name='inplace_updatable_float'][.='200.0']", "//result/doc[3]/int[@name='inplace_updatable_int'][.='300']", - "//result/doc[3]/int[@name='[docid]'][.='"+parentDocId+"']", - "//result/doc[1]/int[@name='[docid]'][.='"+childDocid1+"']", - "//result/doc[2]/int[@name='[docid]'][.='"+childDocid2+"']" - ); + "//result/doc[3]/int[@name='[docid]'][.='" + parentDocId + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + childDocid1 + "']", + "//result/doc[2]/int[@name='[docid]'][.='" + childDocid2 + "']"); } - @Test public void testUpdateTwoDifferentFields() throws Exception { - long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 42), null); + long version1 = + addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 42), null); assertU(commit("softCommit", "false")); assertQ(req("q", "*:*"), "//*[@numFound='1']"); @@ -544,45 +621,58 @@ public void testUpdateTwoDifferentFields() throws Exception { assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), "//*[@numFound='1']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='200.0']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']" - ); + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']"); // two different update commands, updating each of the fields separately version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_int", map("inc", 1)); version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", 1)); // same update command, updating both the fields together - version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_int", map("inc", 1), - "inplace_updatable_float", map("inc", 1)); + version1 = + addAndAssertVersion( + version1, + "id", + "1", + "inplace_updatable_int", + map("inc", 1), + "inplace_updatable_float", + map("inc", 1)); if (random().nextBoolean()) { assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*,[docid]"), "//*[@numFound='1']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='202.0']", "//result/doc[1]/int[@name='inplace_updatable_int'][.='12']", - "//result/doc[1]/long[@name='_version_'][.='"+version1+"']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']" - ); - } + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']", + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']"); + } // RTG - assertJQ(req("qt","/get", "id","1", "fl","id,inplace_updatable_float,inplace_updatable_int"), - "=={'doc':{'id':'1', 'inplace_updatable_float':" + 202.0 + ",'inplace_updatable_int':" + 12 + "}}"); - + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,inplace_updatable_float,inplace_updatable_int"), + "=={'doc':{'id':'1', 'inplace_updatable_float':" + + 202.0 + + ",'inplace_updatable_int':" + + 12 + + "}}"); } @Test public void testUpdateWithValueNull() throws Exception { - long doc = addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 42), null); + long doc = + addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", 42), null); assertU(commit("softCommit", "false")); assertQ(req("q", "*:*", "fq", "inplace_updatable_float:[* TO *]"), "//*[@numFound='1']"); // RTG before update - assertJQ(req("qt","/get", "id","1", "fl","id,inplace_updatable_float,title_s"), + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,inplace_updatable_float,title_s"), "=={'doc':{'id':'1', 'inplace_updatable_float':" + 42.0 + ",'title_s':" + "first" + "}}"); // set the value to null @@ -592,17 +682,20 @@ public void testUpdateWithValueNull() throws Exception { // numProducts should be 0 assertQ(req("q", "*:*", "fq", "inplace_updatable_float:[* TO *]"), "//*[@numFound='0']"); // after update - assertJQ(req("qt","/get", "id","1", "fl","id,inplace_updatable_float,title_s"), + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,inplace_updatable_float,title_s"), "=={'doc':{'id':'1','title_s':first}}"); } @Test public void testDVUpdatesWithDBQofUpdatedValue() throws Exception { - long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", "0"), null); + long version1 = + addAndGetVersion(sdoc("id", "1", "title_s", "first", "inplace_updatable_float", "0"), null); assertU(commit()); // in-place update - addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("set", 100), "_version_", version1); + addAndAssertVersion( + version1, "id", "1", "inplace_updatable_float", map("set", 100), "_version_", version1); // DBQ where q=inplace_updatable_float:100 assertU(delQ("inplace_updatable_float:100")); @@ -623,36 +716,41 @@ public void testDVUpdatesWithDelete() throws Exception { if (postAddCommit) assertU(commit()); assertU(delById ? delI("1") : delQ("id:1")); if (postDelCommit) assertU(commit()); - version1 = addAndGetVersion(sdoc("id", "1", "inplace_updatable_float", map("set", 200)), params()); + version1 = + addAndGetVersion( + sdoc("id", "1", "inplace_updatable_float", map("set", 200)), params()); // assert current doc#1 doesn't have old value of "title_s" assertU(commit()); - assertQ(req("q", "title_s:first", "sort", "id asc", "fl", "*,[docid]"), - "//*[@numFound='0']"); + assertQ( + req("q", "title_s:first", "sort", "id asc", "fl", "*,[docid]"), "//*[@numFound='0']"); } } } - // Update to recently deleted (or non-existent) document with a "set" on updatable + // Update to recently deleted (or non-existent) document with a "set" on updatable // field should succeed, since it is executed internally as a full update // because AUDM.doInPlaceUpdateMerge() returns false - assertU(random().nextBoolean()? delI("1"): delQ("id:1")); + assertU(random().nextBoolean() ? delI("1") : delQ("id:1")); if (random().nextBoolean()) assertU(commit()); addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("set", 200)); assertU(commit()); - assertQ(req("q", "id:1", "sort", "id asc", "fl", "*"), + assertQ( + req("q", "id:1", "sort", "id asc", "fl", "*"), "//*[@numFound='1']", "//result/doc[1]/float[@name='inplace_updatable_float'][.='200.0']"); - // Another "set" on the same field should be an in-place update + // Another "set" on the same field should be an in-place update int docid1 = getDocId("1"); addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("set", 300)); assertU(commit()); - assertQ(req("q", "id:1", "fl", "*,[docid]"), + assertQ( + req("q", "id:1", "fl", "*,[docid]"), "//result/doc[1]/float[@name='inplace_updatable_float'][.='300.0']", - "//result/doc[1]/int[@name='[docid]'][.='"+docid1+"']"); + "//result/doc[1]/int[@name='[docid]'][.='" + docid1 + "']"); } - public static long addAndAssertVersion(long expectedCurrentVersion, Object... fields) throws Exception { + public static long addAndAssertVersion(long expectedCurrentVersion, Object... fields) + throws Exception { assert 0 < expectedCurrentVersion; long currentVersion = addAndGetVersion(sdoc(fields), null); assertTrue(currentVersion > expectedCurrentVersion); @@ -660,23 +758,29 @@ public static long addAndAssertVersion(long expectedCurrentVersion, Object... fi } /** - * Helper method to search for the specified (uniqueKey field) id using fl=[docid] + * Helper method to search for the specified (uniqueKey field) id using fl=[docid] * and return the internal lucene docid. */ private int getDocId(String id) throws Exception { - SolrDocumentList results = client.query(params("q","id:" + id, "fl", "[docid]")).getResults(); + SolrDocumentList results = client.query(params("q", "id:" + id, "fl", "[docid]")).getResults(); assertEquals(1, results.getNumFound()); assertEquals(1, results.size()); Object docid = results.get(0).getFieldValue("[docid]"); assertTrue(docid instanceof Integer); - return ((Integer)docid); + return ((Integer) docid); } @Test public void testUpdateOfNonExistentDVsShouldNotFail() throws Exception { // schema sanity check: assert that the nonexistent_field_i_dvo doesn't exist already - FieldInfo fi = h.getCore().withSearcher(searcher -> - searcher.getSlowAtomicReader().getFieldInfos().fieldInfo("nonexistent_field_i_dvo")); + FieldInfo fi = + h.getCore() + .withSearcher( + searcher -> + searcher + .getSlowAtomicReader() + .getFieldInfos() + .fieldInfo("nonexistent_field_i_dvo")); assertNull(fi); // Partial update @@ -688,17 +792,23 @@ public void testUpdateOfNonExistentDVsShouldNotFail() throws Exception { assertU(commit()); - assertQ(req("q", "*:*"), "//*[@numFound='2']"); - assertQ(req("q", "nonexistent_field_i_dvo:42"), "//*[@numFound='1']"); - assertQ(req("q", "nonexistent_field_i_dvo:2"), "//*[@numFound='1']"); + assertQ(req("q", "*:*"), "//*[@numFound='2']"); + assertQ(req("q", "nonexistent_field_i_dvo:42"), "//*[@numFound='1']"); + assertQ(req("q", "nonexistent_field_i_dvo:2"), "//*[@numFound='1']"); } @Test public void testOnlyPartialUpdatesBetweenCommits() throws Exception { // Full updates - long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first", "val1_i_dvo", "1", "val2_l_dvo", "1"), params()); - long version2 = addAndGetVersion(sdoc("id", "2", "title_s", "second", "val1_i_dvo", "2", "val2_l_dvo", "2"), params()); - long version3 = addAndGetVersion(sdoc("id", "3", "title_s", "third", "val1_i_dvo", "3", "val2_l_dvo", "3"), params()); + long version1 = + addAndGetVersion( + sdoc("id", "1", "title_s", "first", "val1_i_dvo", "1", "val2_l_dvo", "1"), params()); + long version2 = + addAndGetVersion( + sdoc("id", "2", "title_s", "second", "val1_i_dvo", "2", "val2_l_dvo", "2"), params()); + long version3 = + addAndGetVersion( + sdoc("id", "3", "title_s", "third", "val1_i_dvo", "3", "val2_l_dvo", "3"), params()); assertU(commit("softCommit", "false")); assertQ(req("q", "*:*", "fl", "*,[docid]"), "//*[@numFound='3']"); @@ -708,7 +818,7 @@ public void testOnlyPartialUpdatesBetweenCommits() throws Exception { int docid3 = getDocId("3"); int numPartialUpdates = 1 + random().nextInt(5000); - for (int i=0; ivalField Long field (presumably that only uses docvalues) against an in memory model - * maintained in parallel (for the purpose of testing the correctness of in-place updates.. + * Executes a sequence of commands against Solr, while tracking the expected value of a specified + * valField Long field (presumably that only uses docvalues) against an in memory + * model maintained in parallel (for the purpose of testing the correctness of in-place updates.. + * + *

A few restrictions are placed on the {@link SolrInputDocument}s that can be included when + * using this method, in order to keep the in-memory model management simple: * - *

- * A few restrictions are placed on the {@link SolrInputDocument}s that can be included when using - * this method, in order to keep the in-memory model management simple: - *

*
    - *
  • id must be uniqueKey field
  • - *
  • id may have any FieldType, but all values must be parsable as Integers
  • - *
  • valField must be a single valued field
  • - *
  • All values in the valField must either be {@link Number}s, or Maps containing - * atomic updates ("inc" or "set") where the atomic value is a {@link Number}
  • + *
  • id must be uniqueKey field + *
  • id may have any FieldType, but all values must be parsable as Integers + *
  • valField must be a single valued field + *
  • All values in the valField must either be {@link Number}s, or Maps + * containing atomic updates ("inc" or "set") where the atomic value is a {@link Number} *
- * + * * @param valField the field to model - * @param commands A sequence of Commands which can either be SolrInputDocuments - * (regular or containing atomic update Maps) - * or one of the {@link TestInPlaceUpdatesStandalone#HARDCOMMIT} or {@link TestInPlaceUpdatesStandalone#SOFTCOMMIT} sentinal objects. + * @param commands A sequence of Commands which can either be SolrInputDocuments (regular or + * containing atomic update Maps) or one of the {@link + * TestInPlaceUpdatesStandalone#HARDCOMMIT} or {@link TestInPlaceUpdatesStandalone#SOFTCOMMIT} + * sentinal objects. */ @SuppressWarnings({"unchecked", "rawtypes"}) public void checkReplay(final String valField, Object... commands) throws Exception { - + HashMap model = new LinkedHashMap<>(); HashMap committedModel = new LinkedHashMap<>(); // by default, we only check the committed model after a commit // of if the number of total commands is relatively small. // - // (in theory, there's no reason to check the committed model unless we know there's been a commit - // but for smaller tests the overhead of doing so is tiny, so we might as well) + // (in theory, there's no reason to check the committed model unless we know there's been a + // commit but for smaller tests the overhead of doing so is tiny, so we might as well) // // if some test seed fails, and you want to force the committed model to be checked // after every command, just temporaribly force this variable to true... boolean checkCommittedModel = (commands.length < 50); - + for (Object cmd : commands) { if (cmd == SOFTCOMMIT) { assertU(commit("softCommit", "true")); @@ -1093,30 +1241,32 @@ public void checkReplay(final String valField, Object... commands) throws Except checkCommittedModel = true; } else { assertNotNull("null command in checkReplay", cmd); - assertTrue("cmd is neither sentinal (HARD|SOFT)COMMIT object, nor Solr doc: " + cmd.getClass(), - cmd instanceof SolrInputDocument); - + assertTrue( + "cmd is neither sentinal (HARD|SOFT)COMMIT object, nor Solr doc: " + cmd.getClass(), + cmd instanceof SolrInputDocument); + final SolrInputDocument sdoc = (SolrInputDocument) cmd; final int id = Integer.parseInt(sdoc.getFieldValue("id").toString()); - + final DocInfo previousInfo = model.get(id); final Long previousValue = (null == previousInfo) ? null : previousInfo.value; - + final long version = addAndGetVersion(sdoc, null); - + final Object val = sdoc.getFieldValue(valField); if (val instanceof Map) { // atomic update of the field we're modeling - - Map atomicUpdate = (Map) val; + + Map atomicUpdate = (Map) val; assertEquals(sdoc.toString(), 1, atomicUpdate.size()); if (atomicUpdate.containsKey("inc")) { - // Solr treats inc on a non-existing doc (or doc w/o existing value) as if existing value is 0 + // Solr treats inc on a non-existing doc (or doc w/o existing value) as if existing + // value is 0 final long base = (null == previousValue) ? 0L : previousValue; - model.put(id, new DocInfo(version, - base + ((Number)atomicUpdate.get("inc")).longValue())); + model.put( + id, new DocInfo(version, base + ((Number) atomicUpdate.get("inc")).longValue())); } else if (atomicUpdate.containsKey("set")) { - model.put(id, new DocInfo(version, ((Number)atomicUpdate.get("set")).longValue())); + model.put(id, new DocInfo(version, ((Number) atomicUpdate.get("set")).longValue())); } else { fail("wtf update is this? ... " + sdoc); } @@ -1129,28 +1279,29 @@ public void checkReplay(final String valField, Object... commands) throws Except // for now, assume it's atomic and we're going to keep our existing value... Long newValue = (null == previousInfo) ? null : previousInfo.value; for (SolrInputField field : sdoc) { - if (! ( "id".equals(field.getName()) || (field.getValue() instanceof Map)) ) { + if (!("id".equals(field.getName()) || (field.getValue() instanceof Map))) { // not an atomic update, newValue in model should be null newValue = null; break; } } model.put(id, new DocInfo(version, newValue)); - + } else { // regular replacement of the value in the field we're modeling - + assertTrue("Model field value is not a Number: " + val.getClass(), val instanceof Number); - model.put(id, new DocInfo(version, ((Number)val).longValue())); + model.put(id, new DocInfo(version, ((Number) val).longValue())); } } // after every op, check the model(s) - + // RTG to check the values for every id against the model for (Map.Entry entry : model.entrySet()) { final Long expected = entry.getValue().value; - assertEquals(expected, client.getById(String.valueOf(entry.getKey())).getFirstValue(valField)); + assertEquals( + expected, client.getById(String.valueOf(entry.getKey())).getFirstValue(valField)); } // search to check the values for every id in the committed model @@ -1162,17 +1313,19 @@ public void checkReplay(final String valField, Object... commands) throws Except Integer id = entry.getKey(); Long expected = entry.getValue().value; if (null != expected) { - xpaths[i] = "//result/doc[./str='"+id+"'][./long='"+expected+"']"; + xpaths[i] = "//result/doc[./str='" + id + "'][./long='" + expected + "']"; } else { - xpaths[i] = "//result/doc[./str='"+id+"'][not(./long)]"; - } + xpaths[i] = "//result/doc[./str='" + id + "'][not(./long)]"; + } i++; } - xpaths[i] = "//*[@numFound='"+numCommitedDocs+"']"; - assertQ(req("q", "*:*", - "fl", "id," + valField, - "rows", ""+numCommitedDocs), - xpaths); + xpaths[i] = "//*[@numFound='" + numCommitedDocs + "']"; + assertQ( + req( + "q", "*:*", + "fl", "id," + valField, + "rows", "" + numCommitedDocs), + xpaths); } } } @@ -1180,13 +1333,23 @@ public void checkReplay(final String valField, Object... commands) throws Except @Test public void testMixedInPlaceAndNonInPlaceAtomicUpdates() throws Exception { SolrDocument rtgDoc = null; - long version1 = addAndGetVersion(sdoc("id", "1", "inplace_updatable_float", "100", "stored_i", "100"), params()); - - version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", "1"), "stored_i", map("inc", "1")); + long version1 = + addAndGetVersion( + sdoc("id", "1", "inplace_updatable_float", "100", "stored_i", "100"), params()); + + version1 = + addAndAssertVersion( + version1, + "id", + "1", + "inplace_updatable_float", + map("inc", "1"), + "stored_i", + map("inc", "1")); rtgDoc = client.getById("1"); assertEquals(101, rtgDoc.getFieldValue("stored_i")); assertEquals(101.0f, rtgDoc.getFieldValue("inplace_updatable_float")); - + version1 = addAndAssertVersion(version1, "id", "1", "inplace_updatable_float", map("inc", "1")); rtgDoc = client.getById("1"); assertEquals(101, rtgDoc.getFieldValue("stored_i")); @@ -1198,12 +1361,12 @@ public void testMixedInPlaceAndNonInPlaceAtomicUpdates() throws Exception { assertEquals(102.0f, rtgDoc.getFieldValue("inplace_updatable_float")); assertU(commit("softCommit", "false")); - assertQ(req("q", "*:*", "sort", "id asc", "fl", "*"), - "//*[@numFound='1']", - "//result/doc[1]/float[@name='inplace_updatable_float'][.='102.0']", - "//result/doc[1]/int[@name='stored_i'][.='102']", - "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']" - ); + assertQ( + req("q", "*:*", "sort", "id asc", "fl", "*"), + "//*[@numFound='1']", + "//result/doc[1]/float[@name='inplace_updatable_float'][.='102.0']", + "//result/doc[1]/int[@name='stored_i'][.='102']", + "//result/doc[1]/long[@name='_version_'][.='" + version1 + "']"); // recheck RTG after commit rtgDoc = client.getById("1"); @@ -1211,102 +1374,137 @@ public void testMixedInPlaceAndNonInPlaceAtomicUpdates() throws Exception { assertEquals(102.0f, rtgDoc.getFieldValue("inplace_updatable_float")); } - /** + /** * @see #callComputeInPlaceUpdatableFields - * @see AtomicUpdateDocumentMerger#computeInPlaceUpdatableFields + * @see AtomicUpdateDocumentMerger#computeInPlaceUpdatableFields */ @Test public void testComputeInPlaceUpdatableFields() throws Exception { Set inPlaceUpdatedFields = new HashSet(); // these asserts should hold true regardless of type, or wether the field has a default - List fieldsToCheck = Arrays.asList("inplace_updatable_float", - "inplace_updatable_int", - "inplace_updatable_float_with_default", - "inplace_updatable_int_with_default"); + List fieldsToCheck = + Arrays.asList( + "inplace_updatable_float", + "inplace_updatable_int", + "inplace_updatable_float_with_default", + "inplace_updatable_int_with_default"); Collections.shuffle(fieldsToCheck, random()); // ... and regardless of order checked for (String field : fieldsToCheck) { - // In-place updatable field updated before it exists SHOULD NOW BE in-place updated (since LUCENE-8316): - inPlaceUpdatedFields = callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - field, map("set", 10))); + // In-place updatable field updated before it exists SHOULD NOW BE in-place updated (since + // LUCENE-8316): + inPlaceUpdatedFields = + callComputeInPlaceUpdatableFields( + sdoc("id", "1", "_version_", 42L, field, map("set", 10))); assertTrue(field, inPlaceUpdatedFields.contains(field)); - + // In-place updatable field updated after it exists SHOULD BE in-place updated: addAndGetVersion(sdoc("id", "1", field, "0"), params()); // setting up the dv - inPlaceUpdatedFields = callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - field, map("set", 10))); + inPlaceUpdatedFields = + callComputeInPlaceUpdatableFields( + sdoc("id", "1", "_version_", 42L, field, map("set", 10))); assertTrue(field, inPlaceUpdatedFields.contains(field)); - inPlaceUpdatedFields = callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - field, map("inc", 10))); + inPlaceUpdatedFields = + callComputeInPlaceUpdatableFields( + sdoc("id", "1", "_version_", 42L, field, map("inc", 10))); assertTrue(field, inPlaceUpdatedFields.contains(field)); - final String altFieldWithDefault = field.contains("float") ? - "inplace_updatable_int_with_default" : "inplace_updatable_int_with_default"; - + final String altFieldWithDefault = + field.contains("float") + ? "inplace_updatable_int_with_default" + : "inplace_updatable_int_with_default"; + // Updating an in-place updatable field (with a default) for the first time. // DV for it should have been already created when first document was indexed (above), // since it has a default value - inPlaceUpdatedFields = callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - altFieldWithDefault, map("set", 10))); - assertTrue(field + " -> " + altFieldWithDefault, inPlaceUpdatedFields.contains(altFieldWithDefault)); - + inPlaceUpdatedFields = + callComputeInPlaceUpdatableFields( + sdoc("id", "1", "_version_", 42L, altFieldWithDefault, map("set", 10))); + assertTrue( + field + " -> " + altFieldWithDefault, inPlaceUpdatedFields.contains(altFieldWithDefault)); + deleteAllAndCommit(); } - + // Non in-place updates addAndGetVersion(sdoc("id", "1", "stored_i", "0"), params()); // setting up the dv - assertTrue("stored field updated", - callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - "stored_i", map("inc", 1))).isEmpty()); - - assertTrue("full document update", - callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - "inplace_updatable_int_with_default", "100")).isEmpty()); - - assertFalse("non existent dynamic dv field updated first time", - callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - "new_updatable_int_i_dvo", map("set", 10))).isEmpty()); - + assertTrue( + "stored field updated", + callComputeInPlaceUpdatableFields( + sdoc("id", "1", "_version_", 42L, "stored_i", map("inc", 1))) + .isEmpty()); + + assertTrue( + "full document update", + callComputeInPlaceUpdatableFields( + sdoc("id", "1", "_version_", 42L, "inplace_updatable_int_with_default", "100")) + .isEmpty()); + + assertFalse( + "non existent dynamic dv field updated first time", + callComputeInPlaceUpdatableFields( + sdoc("id", "1", "_version_", 42L, "new_updatable_int_i_dvo", map("set", 10))) + .isEmpty()); + // After adding a full document with the dynamic dv field, in-place update should work - addAndGetVersion(sdoc("id", "2", "new_updatable_int_i_dvo", "0"), params()); // setting up the dv + addAndGetVersion( + sdoc("id", "2", "new_updatable_int_i_dvo", "0"), params()); // setting up the dv if (random().nextBoolean()) { assertU(commit("softCommit", "false")); } - inPlaceUpdatedFields = callComputeInPlaceUpdatableFields(sdoc("id", "2", "_version_", 42L, - "new_updatable_int_i_dvo", map("set", 10))); + inPlaceUpdatedFields = + callComputeInPlaceUpdatableFields( + sdoc("id", "2", "_version_", 42L, "new_updatable_int_i_dvo", map("set", 10))); assertTrue(inPlaceUpdatedFields.contains("new_updatable_int_i_dvo")); // for copy fields, regardless of whether the source & target support inplace updates, // it won't be inplace if the DVs don't exist yet... - assertTrue("inplace fields should be empty when doc has no copyfield src values yet", - callComputeInPlaceUpdatableFields(sdoc("id", "1", "_version_", 42L, - "copyfield1_src__both_updatable", map("set", 1), - "copyfield2_src__only_src_updatable", map("set", 2))).isEmpty()); + assertTrue( + "inplace fields should be empty when doc has no copyfield src values yet", + callComputeInPlaceUpdatableFields( + sdoc( + "id", + "1", + "_version_", + 42L, + "copyfield1_src__both_updatable", + map("set", 1), + "copyfield2_src__only_src_updatable", + map("set", 2))) + .isEmpty()); // now add a doc that *does* have the src field for each copyfield... - addAndGetVersion(sdoc("id", "3", - "copyfield1_src__both_updatable", -13, - "copyfield2_src__only_src_updatable", -15), params()); + addAndGetVersion( + sdoc( + "id", "3", + "copyfield1_src__both_updatable", -13, + "copyfield2_src__only_src_updatable", -15), + params()); if (random().nextBoolean()) { assertU(commit("softCommit", "false")); } - - // If a supported dv field has a copyField target which is supported, it should be an in-place update - inPlaceUpdatedFields = callComputeInPlaceUpdatableFields(sdoc("id", "3", "_version_", 42L, - "copyfield1_src__both_updatable", map("set", 10))); + + // If a supported dv field has a copyField target which is supported, it should be an in-place + // update + inPlaceUpdatedFields = + callComputeInPlaceUpdatableFields( + sdoc("id", "3", "_version_", 42L, "copyfield1_src__both_updatable", map("set", 10))); assertTrue(inPlaceUpdatedFields.contains("copyfield1_src__both_updatable")); - // If a supported dv field has a copyField target which is not supported, it should not be an in-place update - inPlaceUpdatedFields = callComputeInPlaceUpdatableFields(sdoc("id", "3", "_version_", 42L, - "copyfield2_src__only_src_updatable", map("set", 10))); + // If a supported dv field has a copyField target which is not supported, it should not be an + // in-place update + inPlaceUpdatedFields = + callComputeInPlaceUpdatableFields( + sdoc( + "id", "3", "_version_", 42L, "copyfield2_src__only_src_updatable", map("set", 10))); assertTrue(inPlaceUpdatedFields.isEmpty()); } @Test /** - * Test the @see {@link AtomicUpdateDocumentMerger#doInPlaceUpdateMerge(AddUpdateCommand,Set)} - * method is working fine + * Test the @see {@link AtomicUpdateDocumentMerger#doInPlaceUpdateMerge(AddUpdateCommand,Set)} + * method is working fine */ public void testDoInPlaceUpdateMerge() throws Exception { long version1 = addAndGetVersion(sdoc("id", "1", "title_s", "first"), null); @@ -1320,10 +1518,13 @@ public void testDoInPlaceUpdateMerge() throws Exception { // Test the AUDM.doInPlaceUpdateMerge() method is working fine try (SolrQueryRequest req = req()) { - AddUpdateCommand cmd = buildAddUpdateCommand(req, sdoc("id", "1", "_version_", 42L, - "inplace_updatable_float", map("inc", 10))); + AddUpdateCommand cmd = + buildAddUpdateCommand( + req, sdoc("id", "1", "_version_", 42L, "inplace_updatable_float", map("inc", 10))); AtomicUpdateDocumentMerger docMerger = new AtomicUpdateDocumentMerger(req); - assertTrue(docMerger.doInPlaceUpdateMerge(cmd, AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd))); + assertTrue( + docMerger.doInPlaceUpdateMerge( + cmd, AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd))); assertEquals(42L, cmd.getSolrInputDocument().getFieldValue("_version_")); assertEquals(42L, cmd.getSolrInputDocument().getFieldValue("_version_")); assertEquals(210f, cmd.getSolrInputDocument().getFieldValue("inplace_updatable_float")); @@ -1331,21 +1532,24 @@ public void testDoInPlaceUpdateMerge() throws Exception { assertFalse(cmd.getSolrInputDocument().containsKey("title_s")); assertEquals(version1, cmd.prevVersion); } - + // do a commit, and the same results should be repeated assertU(commit("softCommit", "false")); // Test the AUDM.doInPlaceUpdateMerge() method is working fine try (SolrQueryRequest req = req()) { - AddUpdateCommand cmd = buildAddUpdateCommand(req, sdoc("id", "1", "_version_", 42L, - "inplace_updatable_float", map("inc", 10))); + AddUpdateCommand cmd = + buildAddUpdateCommand( + req, sdoc("id", "1", "_version_", 42L, "inplace_updatable_float", map("inc", 10))); AtomicUpdateDocumentMerger docMerger = new AtomicUpdateDocumentMerger(req); - assertTrue(docMerger.doInPlaceUpdateMerge(cmd, AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd))); + assertTrue( + docMerger.doInPlaceUpdateMerge( + cmd, AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd))); assertEquals(42L, cmd.getSolrInputDocument().getFieldValue("_version_")); assertEquals(42L, cmd.getSolrInputDocument().getFieldValue("_version_")); assertEquals(210f, cmd.getSolrInputDocument().getFieldValue("inplace_updatable_float")); // in-place merged doc shouldn't have non-inplace fields from the index/tlog - assertFalse(cmd.getSolrInputDocument().containsKey("title_s")); + assertFalse(cmd.getSolrInputDocument().containsKey("title_s")); assertEquals(version1, cmd.prevVersion); } } @@ -1363,26 +1567,32 @@ public void testFailOnVersionConflicts() throws Exception { params.add("_version_", "-1"); SolrInputDocument doc = new SolrInputDocument("id", "1", "title_s", "first2"); SolrInputDocument doc2 = new SolrInputDocument("id", "2", "title_s", "second"); - SolrException ex = expectThrows(SolrException.class, "This should have failed", () -> updateJ(jsonAdd(doc, doc2), params)); + SolrException ex = + expectThrows( + SolrException.class, + "This should have failed", + () -> updateJ(jsonAdd(doc, doc2), params)); assertTrue(ex.getMessage().contains("version conflict for")); params.add(CommonParams.FAIL_ON_VERSION_CONFLICTS, "false"); - updateJ(jsonAdd(doc, doc2), params);//this should not throw any error + updateJ(jsonAdd(doc, doc2), params); // this should not throw any error assertU(commit()); assertQ(req("q", "title_s:second"), "//*[@numFound='1']"); - assertQ(req("q", "title_s:first1"), "//*[@numFound='1']");// but the old value exists - assertQ(req("q", "title_s:first2"), "//*[@numFound='0']");// and the new value does not reflect + assertQ(req("q", "title_s:first1"), "//*[@numFound='1']"); // but the old value exists + assertQ(req("q", "title_s:first2"), "//*[@numFound='0']"); // and the new value does not reflect } - /** - * Helper method that sets up a req/cmd to run {@link AtomicUpdateDocumentMerger#computeInPlaceUpdatableFields} - * on the specified solr input document. + /** + * Helper method that sets up a req/cmd to run {@link + * AtomicUpdateDocumentMerger#computeInPlaceUpdatableFields} on the specified solr input document. */ - private static Set callComputeInPlaceUpdatableFields(final SolrInputDocument sdoc) throws Exception { + private static Set callComputeInPlaceUpdatableFields(final SolrInputDocument sdoc) + throws Exception { try (SolrQueryRequest req = req()) { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = sdoc; assertTrue(cmd.solrDoc.containsKey(CommonParams.VERSION_FIELD)); - cmd.setVersion(Long.parseLong(cmd.solrDoc.getFieldValue(CommonParams.VERSION_FIELD).toString())); + cmd.setVersion( + Long.parseLong(cmd.solrDoc.getFieldValue(CommonParams.VERSION_FIELD).toString())); return AtomicUpdateDocumentMerger.computeInPlaceUpdatableFields(cmd); } } diff --git a/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java b/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java index 52230c1ce3b..704c2dcaefe 100644 --- a/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java +++ b/solr/core/src/test/org/apache/solr/update/TestIndexingPerformance.java @@ -16,30 +16,31 @@ */ package org.apache.solr.update; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.request.SolrQueryRequest; +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.Arrays; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.StrUtils; +import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.util.RTimer; import org.junit.AfterClass; import org.junit.BeforeClass; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.Arrays; - -/** Bypass the normal Solr pipeline and just text indexing performance - * starting at the update handler. The same document is indexed repeatedly. - * - * $ ant test -Dtestcase=TestIndexingPerformance -Dargs="-server -Diter=100000"; grep throughput build/test-results/*TestIndexingPerformance.xml +/** + * Bypass the normal Solr pipeline and just text indexing performance starting at the update + * handler. The same document is indexed repeatedly. + * + *

$ ant test -Dtestcase=TestIndexingPerformance -Dargs="-server -Diter=100000"; grep throughput + * build/test-results/*TestIndexingPerformance.xml */ public class TestIndexingPerformance extends SolrTestCaseJ4 { - + // TODO: fix this test to not require FSDirectory static String savedFactory; + @BeforeClass public static void beforeClass() throws Exception { savedFactory = System.getProperty("solr.DirectoryFactory"); @@ -47,6 +48,7 @@ public static void beforeClass() throws Exception { initCore("solrconfig_perf.xml", "schema12.xml"); } + @AfterClass public static void afterClass() { if (savedFactory == null) { @@ -57,61 +59,66 @@ public static void afterClass() { } private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - public void testIndexingPerf() throws IOException { - int iter=1000; + int iter = 1000; String iterS = System.getProperty("iter"); - if (iterS != null) iter=Integer.parseInt(iterS); - boolean overwrite = Boolean.parseBoolean(System.getProperty("overwrite","false")); + if (iterS != null) iter = Integer.parseInt(iterS); + boolean overwrite = Boolean.parseBoolean(System.getProperty("overwrite", "false")); String doc = System.getProperty("doc"); if (doc != null) { - StrUtils.splitSmart(doc,",",true); + StrUtils.splitSmart(doc, ",", true); } - SolrQueryRequest req = lrf.makeRequest(); UpdateHandler updateHandler = req.getCore().getUpdateHandler(); String field = "textgap"; - String[] fields = {field,"simple" - ,field,"test" - ,field,"how now brown cow" - ,field,"what's that?" - ,field,"radical!" - ,field,"what's all this about, anyway?" - ,field,"just how fast is this text indexing?" - }; - - - /*** String[] fields = { - "a_i","1" - ,"b_i","2" - ,"c_i","3" - ,"d_i","4" - ,"e_i","5" - ,"f_i","6" - ,"g_i","7" - ,"h_i","8" - ,"i_i","9" - ,"j_i","0" - ,"k_i","0" + field, + "simple", + field, + "test", + field, + "how now brown cow", + field, + "what's that?", + field, + "radical!", + field, + "what's all this about, anyway?", + field, + "just how fast is this text indexing?" }; - ***/ + + /*** + * String[] fields = { + * "a_i","1" + * ,"b_i","2" + * ,"c_i","3" + * ,"d_i","4" + * ,"e_i","5" + * ,"f_i","6" + * ,"g_i","7" + * ,"h_i","8" + * ,"i_i","9" + * ,"j_i","0" + * ,"k_i","0" + * }; + ***/ final RTimer timer = new RTimer(); AddUpdateCommand add = new AddUpdateCommand(req); add.overwrite = overwrite; - for (int i=0; iof / which params in the child / parent QParsers - * when a specific _nest_path_ is desired + * Randomized test to look for flaws in the documented approach for building "safe" values of the + * of / which params in the child / parent + * QParsers when a specific _nest_path_ is desired * * @see SOLR-14687 */ public void testRandomNestPathQueryFiltering() throws Exception { // First: build a bunch of complex randomly nested documents, with random "nest paths" - // re-use the same "path segments" at various levels of nested, so as to confuse things even more + // re-use the same "path segments" at various levels of nested, so as to confuse things even + // more final RandomNestedDocModel docs = new RandomNestedDocModel(); for (int i = 0; i < 50; i++) { final SolrInputDocument rootDoc = docs.buildRandomDoc(); @@ -243,67 +300,71 @@ public void testRandomNestPathQueryFiltering() throws Exception { // *:* w/ parent parser... // starts at "root" parent_path and recurses until we get no (expected) results - assertTrue(// we expected at least one query for every "real" path, - // but there will be more because we'll try lots of sub-paths that have no docs - docs.numDocsDescendentFromPath.keySet().size() - < docs.recursiveCheckParentQueryOfAllChildren(Collections.emptyList())); + assertTrue( // we expected at least one query for every "real" path, + // but there will be more because we'll try lots of sub-paths that have no docs + docs.numDocsDescendentFromPath.keySet().size() + < docs.recursiveCheckParentQueryOfAllChildren(Collections.emptyList())); // sanity check: path that is garunteed not to exist... assertEquals(1, docs.recursiveCheckParentQueryOfAllChildren(Arrays.asList("xxx", "yyy"))); // *:* w/ child parser... // starts at "root" parent_path and recurses until we get no (expected) results - assertTrue(// we expected at least one query for every "real" path, - // but there will be more because we'll try lots of sub-paths that have no docs - docs.numDocsWithPathWithKids.keySet().size() - < docs.recursiveCheckChildQueryOfAllParents(Collections.emptyList())); + assertTrue( // we expected at least one query for every "real" path, + // but there will be more because we'll try lots of sub-paths that have no docs + docs.numDocsWithPathWithKids.keySet().size() + < docs.recursiveCheckChildQueryOfAllParents(Collections.emptyList())); // sanity check: path that is garunteed not to exist... assertEquals(1, docs.recursiveCheckChildQueryOfAllParents(Arrays.asList("xxx", "yyy"))); // quering against individual child ids w/ both parent & child parser... docs.checkParentAndChildQueriesOfEachDocument(); } - + private static class RandomNestedDocModel { public static final List PATH_ELEMENTS = Arrays.asList("aa", "bb", "cc", "dd"); - private final Map allDocs = new HashMap<>(); - - public final Map numDocsDescendentFromPath = new HashMap<>(); - public final Map numDocsWithPathWithKids = new HashMap<>(); - + private final Map allDocs = new HashMap<>(); + + public final Map numDocsDescendentFromPath = new HashMap<>(); + public final Map numDocsWithPathWithKids = new HashMap<>(); + private int idCounter = 0; public synchronized SolrInputDocument buildRandomDoc() { return buildRandomDoc(null, Collections.emptyList(), 15); } + private static String joinPath(List test_path) { return "/" + String.join("/", test_path); } - private synchronized SolrInputDocument buildRandomDoc(SolrInputDocument parent, - List test_path, - int maxDepthAndBreadth) { + + private synchronized SolrInputDocument buildRandomDoc( + SolrInputDocument parent, List test_path, int maxDepthAndBreadth) { final String path_string = joinPath(test_path); final String id = "" + (++idCounter); maxDepthAndBreadth--; - final SolrInputDocument doc = sdoc - ("id", id, - // may change, but we want it 0 even if we never add any - "num_direct_kids_s", "0", - // conceptually matches _nest_path_ but should be easier to make assertions about (no inline position #s) - "test_path_s", path_string); + final SolrInputDocument doc = + sdoc( + "id", id, + // may change, but we want it 0 even if we never add any + "num_direct_kids_s", "0", + // conceptually matches _nest_path_ but should be easier to make assertions about (no + // inline position #s) + "test_path_s", path_string); if (null != parent) { // order matters: if we add the Collection first, SolrInputDocument will try to reuse it doc.addField("ancestor_ids_ss", parent.getFieldValue("id")); - if (parent.containsKey("ancestor_ids_ss")) { // sigh: getFieldValues returns null, not empty collection + if (parent.containsKey( + "ancestor_ids_ss")) { // sigh: getFieldValues returns null, not empty collection doc.addField("ancestor_ids_ss", parent.getFieldValues("ancestor_ids_ss")); } } - + for (int i = 0; i < test_path.size(); i++) { // NOTE: '<' not '<=" .. we only includes paths we are descendents of, not our full path... numDocsDescendentFromPath.merge(joinPath(test_path.subList(0, i)), 1, Math::addExact); } - + if (0 < maxDepthAndBreadth) { final int numDirectKids = TestUtil.nextInt(random(), 0, Math.min(4, maxDepthAndBreadth)); doc.setField("num_direct_kids_s", "" + numDirectKids); @@ -319,7 +380,8 @@ private synchronized SolrInputDocument buildRandomDoc(SolrInputDocument parent, doc.addField(kidType, kid); // order matters: if we add the Collection first, SolrInputDocument will try to reuse it doc.addField("descendent_ids_ss", kid.getFieldValue("id")); - if (kid.containsKey("descendent_ids_ss")) { // sigh: getFieldValues returns null, not empty collection + if (kid.containsKey( + "descendent_ids_ss")) { // sigh: getFieldValues returns null, not empty collection doc.addField("descendent_ids_ss", kid.getFieldValues("descendent_ids_ss")); } } @@ -328,107 +390,136 @@ private synchronized SolrInputDocument buildRandomDoc(SolrInputDocument parent, return doc; } - - /** - * Loops over the 'model' of every document we've indexed, asserting that - * parent/child queries wrapping an 'id:foo using various paths - * match the expected ancestors/descendents + /** + * Loops over the 'model' of every document we've indexed, asserting that parent/child queries + * wrapping an 'id:foo using various paths match the expected ancestors/descendents */ public void checkParentAndChildQueriesOfEachDocument() { assertFalse("You didn't build any docs", allDocs.isEmpty()); - + for (String doc_id : allDocs.keySet()) { final String doc_path = allDocs.get(doc_id).getFieldValue("test_path_s").toString(); - - if ( ! doc_path.equals("/") ) { - - // doc_id -> descdentId must have at least one ancestor (since it's not a root level document) + + if (!doc_path.equals("/")) { + + // doc_id -> descdentId must have at least one ancestor (since it's not a root level + // document) final String descendentId = doc_id; assert allDocs.get(descendentId).containsKey("ancestor_ids_ss"); - final List allAncestorIds = new ArrayList<>(allDocs.get(descendentId).getFieldValues("ancestor_ids_ss")); - + final List allAncestorIds = + new ArrayList<>(allDocs.get(descendentId).getFieldValues("ancestor_ids_ss")); + // pick a random ancestor to use in our testing... - final String ancestorId = allAncestorIds.get(random().nextInt(allAncestorIds.size())).toString(); - final String ancestor_path = allDocs.get(ancestorId).getFieldValue("test_path_s").toString(); - - final Collection allOfAncestorsDescendentIds - = allDocs.get(ancestorId).getFieldValues("descendent_ids_ss"); - - assertTrue("Sanity check " + ancestorId + " ancestor of " + descendentId, - allOfAncestorsDescendentIds.contains(descendentId)); - - // now we should be able to assert that a 'parent' query wrapped around a query for the descendentId - // using the ancestor_path should match exactly one doc: our ancestorId... - assertQ(req(parentQueryMaker(ancestor_path, "id:" + descendentId), - "_trace_path_tested", ancestor_path, - "fl", "id", - "indent", "true") - , "//result/@numFound=1" - , "//doc/str[@name='id'][.='"+ancestorId+"']" - ); - - // meanwhile, a 'child' query wrapped arround a query for the ancestorId, using the ancestor_path, - // should match all of it's descendents (for simplicity we'll check just the numFound and the - // 'descendentId' we started with) - assertQ(req(childQueryMaker(ancestor_path, "id:" + ancestorId), - "_trace_path_tested", ancestor_path, - "rows", "9999", - "fl", "id", - "indent", "true") - , "//result/@numFound="+allOfAncestorsDescendentIds.size() - , "//doc/str[@name='id'][.='"+descendentId+"']" - ); - + final String ancestorId = + allAncestorIds.get(random().nextInt(allAncestorIds.size())).toString(); + final String ancestor_path = + allDocs.get(ancestorId).getFieldValue("test_path_s").toString(); + + final Collection allOfAncestorsDescendentIds = + allDocs.get(ancestorId).getFieldValues("descendent_ids_ss"); + + assertTrue( + "Sanity check " + ancestorId + " ancestor of " + descendentId, + allOfAncestorsDescendentIds.contains(descendentId)); + + // now we should be able to assert that a 'parent' query wrapped around a query for the + // descendentId using the ancestor_path should match exactly one doc: our ancestorId... + assertQ( + req( + parentQueryMaker(ancestor_path, "id:" + descendentId), + "_trace_path_tested", + ancestor_path, + "fl", + "id", + "indent", + "true"), + "//result/@numFound=1", + "//doc/str[@name='id'][.='" + ancestorId + "']"); + + // meanwhile, a 'child' query wrapped arround a query for the ancestorId, using the + // ancestor_path, should match all of it's descendents (for simplicity we'll check just + // the numFound and the 'descendentId' we started with) + assertQ( + req( + childQueryMaker(ancestor_path, "id:" + ancestorId), + "_trace_path_tested", + ancestor_path, + "rows", + "9999", + "fl", + "id", + "indent", + "true"), + "//result/@numFound=" + allOfAncestorsDescendentIds.size(), + "//doc/str[@name='id'][.='" + descendentId + "']"); } - - // regardless of wether doc_id has an ancestor or not, a 'parent' query with a path that isn't a - // prefix of the path of the (child) doc_id in the wrapped query should match 0 docs w/o failing - assertQ(req(parentQueryMaker("/xxx/yyy", "id:" + doc_id), - "_trace_path_tested", "/xxx/yyy", - "indent", "true") - , "//result/@numFound=0"); - - // likewise: a 'child' query wrapped around a query for our doc_id (regardless of wether if has - // any kids), using a path that doesn't start with the same prefix as doc_id, should match 0 - // docs w/o failing - assertQ(req(childQueryMaker("/xxx/yyy", "id:" + doc_id), - "_trace_path_tested", "/xxx/yyy", - "indent", "true") - , "//result/@numFound=0"); - - // lastly: wrapping a child query around a query for our doc_id, using a path that "extends" + + // regardless of wether doc_id has an ancestor or not, a 'parent' query with a path that + // isn't a prefix of the path of the (child) doc_id in the wrapped query should match 0 docs + // w/o failing + assertQ( + req( + parentQueryMaker("/xxx/yyy", "id:" + doc_id), + "_trace_path_tested", + "/xxx/yyy", + "indent", + "true"), + "//result/@numFound=0"); + + // likewise: a 'child' query wrapped around a query for our doc_id (regardless of wether if + // has any kids), using a path that doesn't start with the same prefix as doc_id, should + // match 0 docs w/o failing + assertQ( + req( + childQueryMaker("/xxx/yyy", "id:" + doc_id), + "_trace_path_tested", + "/xxx/yyy", + "indent", + "true"), + "//result/@numFound=0"); + + // lastly: wrapping a child query around a query for our doc_id, using a path that "extends" // the doc_id's path should always get 0 results if that path doesn't match any actual kids // (regardless of wether doc_id has any children/descendents) - assertQ(req(childQueryMaker(doc_path + "/xxx/yyy", "id:" + doc_id), - "_trace_path_tested", doc_path + "/xxx/yyy", - "indent", "true") - , "//result/@numFound=0"); + assertQ( + req( + childQueryMaker(doc_path + "/xxx/yyy", "id:" + doc_id), + "_trace_path_tested", + doc_path + "/xxx/yyy", + "indent", + "true"), + "//result/@numFound=0"); } } - - /** - * recursively check path permutations using *:* inner query, asserting that the - * only docs matched have the expected path, and at least one kid (since this is the "parents" parser) + /** + * recursively check path permutations using *:* inner query, asserting that the + * only docs matched have the expected path, and at least one kid (since this is the "parents" + * parser) * - * (using *:* as our inner query keeps the validation simple and also helps stress out - * risk of matching incorrect docs if the 'which' param is wrong) + *

(using *:* as our inner query keeps the validation simple and also helps + * stress out risk of matching incorrect docs if the 'which' param is wrong) * * @return total number of queries checked (assuming no assertion failures) */ public int recursiveCheckParentQueryOfAllChildren(List parent_path) { final String p = joinPath(parent_path); final int expectedParents = numDocsWithPathWithKids.getOrDefault(p, 0); - assertQ(req(parentQueryMaker(p, "*:*"), - "rows", "9999", - "_trace_path_tested", p, - "fl", "test_path_s,num_direct_kids_s", - "indent", "true") - , "//result/@numFound="+expectedParents - , "count(//doc)="+expectedParents - , "count(//doc/str[@name='test_path_s'][.='"+p+"'])="+expectedParents - , "0=count(//doc/str[@name='num_direct_kids_s'][.='0'])" - ); + assertQ( + req( + parentQueryMaker(p, "*:*"), + "rows", + "9999", + "_trace_path_tested", + p, + "fl", + "test_path_s,num_direct_kids_s", + "indent", + "true"), + "//result/@numFound=" + expectedParents, + "count(//doc)=" + expectedParents, + "count(//doc/str[@name='test_path_s'][.='" + p + "'])=" + expectedParents, + "0=count(//doc/str[@name='num_direct_kids_s'][.='0'])"); int numChecked = 1; // no point in recursing on the current path if we already have no results found... @@ -441,64 +532,82 @@ public int recursiveCheckParentQueryOfAllChildren(List parent_path) { } return numChecked; } - - /** + + /** * This implements the "safe query based on parent path" rules we're sanity checking. * * @param parent_path the nest path of the parents to consider - * @param inner_child_query the specific children whose ancestors we are looking for, must be simple string *:* or id:foo + * @param inner_child_query the specific children whose ancestors we are looking for, must be + * simple string *:* or id:foo */ private SolrParams parentQueryMaker(String parent_path, String inner_child_query) { assertValidPathSytax(parent_path); final boolean verbose = random().nextBoolean(); - + if (parent_path.equals("/")) { if (verbose) { - return params("q", "{!parent which=$parent_filt v=$child_q}", - "parent_filt", "(*:* -_nest_path_:*)", - "child_q", "(+" + inner_child_query + " +_nest_path_:*)"); + return params( + "q", "{!parent which=$parent_filt v=$child_q}", + "parent_filt", "(*:* -_nest_path_:*)", + "child_q", "(+" + inner_child_query + " +_nest_path_:*)"); } else { - return params("q", "{!parent which='(*:* -_nest_path_:*)'}(+" + inner_child_query + " +_nest_path_:*)"); + return params( + "q", + "{!parent which='(*:* -_nest_path_:*)'}(+" + inner_child_query + " +_nest_path_:*)"); } } // else... if (verbose) { final String path = parent_path + "/"; - return params("q", "{!parent which=$parent_filt v=$child_q}", - "parent_filt", "(*:* -{!prefix f='_nest_path_' v='"+path+"'})", - "child_q", "(+" + inner_child_query + " +{!prefix f='_nest_path_' v='"+path+"'})"); + return params( + "q", "{!parent which=$parent_filt v=$child_q}", + "parent_filt", "(*:* -{!prefix f='_nest_path_' v='" + path + "'})", + "child_q", "(+" + inner_child_query + " +{!prefix f='_nest_path_' v='" + path + "'})"); } else { // '/' has to be escaped other wise it will be treated as a regex query... // (and of course '\' escaping is the java syntax as well, we have to double it) final String path = (parent_path + "/").replace("/", "\\/"); // ...and when used inside the 'which' param it has to be escaped *AGAIN* because of // the "quoted" localparam evaluation layer... - return params("q", "{!parent which='(*:* -_nest_path_:" + path.replace("\\/","\\\\/") + "*)'}" - + "(+" + inner_child_query + " +_nest_path_:" + path + "*)"); + return params( + "q", + "{!parent which='(*:* -_nest_path_:" + + path.replace("\\/", "\\\\/") + + "*)'}" + + "(+" + + inner_child_query + + " +_nest_path_:" + + path + + "*)"); } } - /** - * recursively check path permutations using *:* inner query, asserting that the + /** + * recursively check path permutations using *:* inner query, asserting that the * only docs matched have paths that include the specified path as a (strict) prefix * - * (using *:* as our inner query keeps the validation simple and also helps stress out - * risk of matching incorrect docs if the 'of' param is wrong) + *

(using *:* as our inner query keeps the validation simple and also helps + * stress out risk of matching incorrect docs if the 'of' param is wrong) * * @return total number of queries checked (assuming no assertion failures) */ public int recursiveCheckChildQueryOfAllParents(List parent_path) { final String p = joinPath(parent_path); final int expectedMatches = numDocsDescendentFromPath.getOrDefault(p, 0); - assertQ(req(childQueryMaker(p, "*:*"), - "rows", "9999", - "_trace_path_tested", p, - "fl", "test_path_s", - "indent", "true") - , "//result/@numFound="+expectedMatches - , "count(//doc)="+expectedMatches - , "count(//doc/str[@name='test_path_s'][starts-with(., '"+p+"')])="+expectedMatches - ); + assertQ( + req( + childQueryMaker(p, "*:*"), + "rows", + "9999", + "_trace_path_tested", + p, + "fl", + "test_path_s", + "indent", + "true"), + "//result/@numFound=" + expectedMatches, + "count(//doc)=" + expectedMatches, + "count(//doc/str[@name='test_path_s'][starts-with(., '" + p + "')])=" + expectedMatches); int numChecked = 1; // no point in recursing on the current path if we already have no results found... @@ -512,45 +621,59 @@ public int recursiveCheckChildQueryOfAllParents(List parent_path) { return numChecked; } - /** + /** * This implements the "safe query based on parent path" rules we're sanity checking. * * @param parent_path the nest path of the parents to consider - * @param inner_parent_query the specific parents whose descendents we are looking for, must be simple string *:* or id:foo + * @param inner_parent_query the specific parents whose descendents we are looking for, must be + * simple string *:* or id:foo */ private SolrParams childQueryMaker(String parent_path, String inner_parent_query) { assertValidPathSytax(parent_path); final boolean verbose = random().nextBoolean(); - + if (parent_path.equals("/")) { if (verbose) { - return params("q", "{!child of=$parent_filt v=$parent_q})", - "parent_filt", "(*:* -_nest_path_:*)", - "parent_q", "(+" + inner_parent_query + " -_nest_path_:*)"); + return params( + "q", "{!child of=$parent_filt v=$parent_q})", + "parent_filt", "(*:* -_nest_path_:*)", + "parent_q", "(+" + inner_parent_query + " -_nest_path_:*)"); } else { - return params("q", "{!child of='(*:* -_nest_path_:*)'}(+" + inner_parent_query + " -_nest_path_:*)"); + return params( + "q", + "{!child of='(*:* -_nest_path_:*)'}(+" + inner_parent_query + " -_nest_path_:*)"); } } // else... - + if (verbose) { - return params("q", "{!child of=$parent_filt v=$parent_q})", - "parent_filt", "(*:* -{!prefix f='_nest_path_' v='"+parent_path+"/'})", - "parent_q", "(+" + inner_parent_query + " +{!field f='_nest_path_' v='"+parent_path+"'})"); + return params( + "q", "{!child of=$parent_filt v=$parent_q})", + "parent_filt", "(*:* -{!prefix f='_nest_path_' v='" + parent_path + "/'})", + "parent_q", + "(+" + inner_parent_query + " +{!field f='_nest_path_' v='" + parent_path + "'})"); } else { // '/' has to be escaped other wise it will be treated as a regex query... // (and of course '\' escaping is the java syntax as well, we have to double it) final String exact_path = parent_path.replace("/", "\\/"); // ...and when used inside the 'which' param it has to be escaped *AGAIN* because of // the "quoted" localparam evaluation layer... - final String prefix_path = (parent_path + "/").replace("/","\\\\/"); - return params("q", "{!child of='(*:* -_nest_path_:"+prefix_path+"*)'}" - + "(+" + inner_parent_query + " +_nest_path_:" + exact_path + ")"); + final String prefix_path = (parent_path + "/").replace("/", "\\\\/"); + return params( + "q", + "{!child of='(*:* -_nest_path_:" + + prefix_path + + "*)'}" + + "(+" + + inner_parent_query + + " +_nest_path_:" + + exact_path + + ")"); } } private void assertValidPathSytax(String path) { assert path.startsWith("/"); - assert (1 == path.length()) ^ ! path.endsWith("/"); + assert (1 == path.length()) ^ !path.endsWith("/"); } } } diff --git a/solr/core/src/test/org/apache/solr/update/TestUpdate.java b/solr/core/src/test/org/apache/solr/update/TestUpdate.java index b7bae0f54b4..6882fa1002b 100644 --- a/solr/core/src/test/org/apache/solr/update/TestUpdate.java +++ b/solr/core/src/test/org/apache/solr/update/TestUpdate.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.util.concurrent.Callable; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -28,7 +27,7 @@ public class TestUpdate extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig-tlog.xml","schema15.xml"); + initCore("solrconfig-tlog.xml", "schema15.xml"); } @Test @@ -40,12 +39,11 @@ public void testUpdatableDocs() throws Exception { doUpdateTest(() -> null); // do with commits - doUpdateTest(() -> { - assertU(commit("softCommit","false")); - return null; - }); - - + doUpdateTest( + () -> { + assertU(commit("softCommit", "false")); + return null; + }); } public void doUpdateTest(Callable afterUpdate) throws Exception { @@ -54,176 +52,217 @@ public void doUpdateTest(Callable afterUpdate) throws Exception { long version; - version = addAndGetVersion(sdoc("id","1", "val_i",5, "copyfield_source","a"), null); + version = addAndGetVersion(sdoc("id", "1", "val_i", 5, "copyfield_source", "a"), null); afterUpdate.call(); - version = addAndGetVersion(sdoc("id","1", "val_is",map("add",10), "copyfield_source",map("add","b")), null); + version = + addAndGetVersion( + sdoc("id", "1", "val_is", map("add", 10), "copyfield_source", map("add", "b")), null); afterUpdate.call(); - version = addAndGetVersion(sdoc("id","1", "val_is",map("add",5)), null); + version = addAndGetVersion(sdoc("id", "1", "val_is", map("add", 5)), null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,*_i,*_is,copyfield_*") - ,"=={'doc':{'id':'1', 'val_i':5, 'val_is':[10,5], 'copyfield_source':['a','b']}}" // real-time get should not return stored copyfield targets - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,*_i,*_is,copyfield_*"), + "=={'doc':{'id':'1', 'val_i':5, 'val_is':[10,5], 'copyfield_source':['a','b']}}" // real-time get should not return stored copyfield targets + ); - version = addAndGetVersion(sdoc("id","1", "val_is",map("add",-1), "val_i",map("set",100)), null); + version = + addAndGetVersion(sdoc("id", "1", "val_is", map("add", -1), "val_i", map("set", 100)), null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,*_i,*_is") - ,"=={'doc':{'id':'1', 'val_i':100, 'val_is':[10,5,-1]}}" - ); - + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,*_i,*_is"), + "=={'doc':{'id':'1', 'val_i':100, 'val_is':[10,5,-1]}}"); // Do a search to get all stored fields back and make sure that the stored copyfield target only // has one copy of the source. This may not be supported forever! - assertU(commit("softCommit","true")); - assertJQ(req("q","*:*", "fl","id,*_i,*_is,copyfield_*") - ,"/response/docs/[0]=={'id':'1', 'val_i':100, 'val_is':[10,5,-1], 'copyfield_source':['a','b'], 'copyfield_dest_ss':['a','b']}" - ); - + assertU(commit("softCommit", "true")); + assertJQ( + req("q", "*:*", "fl", "id,*_i,*_is,copyfield_*"), + "/response/docs/[0]=={'id':'1', 'val_i':100, 'val_is':[10,5,-1], 'copyfield_source':['a','b'], 'copyfield_dest_ss':['a','b']}"); long version2; - SolrException se = expectThrows(SolrException.class, - () -> addAndGetVersion(sdoc("id","1", "val_is",map("add",-100), "_version_",2), null)); + SolrException se = + expectThrows( + SolrException.class, + () -> + addAndGetVersion( + sdoc("id", "1", "val_is", map("add", -100), "_version_", 2), null)); assertEquals(409, se.code()); // try bad version added as a request param - se = expectThrows(SolrException.class, - () -> addAndGetVersion(sdoc("id","1", "val_is",map("add",-100)), params("_version_","2"))); + se = + expectThrows( + SolrException.class, + () -> + addAndGetVersion( + sdoc("id", "1", "val_is", map("add", -100)), params("_version_", "2"))); assertEquals(409, se.code()); // try good version added as a field in the doc - version = addAndGetVersion(sdoc("id","1", "val_is",map("add",-100), "_version_",version), null); + version = + addAndGetVersion(sdoc("id", "1", "val_is", map("add", -100), "_version_", version), null); afterUpdate.call(); // try good version added as a request parameter - version = addAndGetVersion(sdoc("id","1", "val_is",map("add",-200)), params("_version_",Long.toString(version))); + version = + addAndGetVersion( + sdoc("id", "1", "val_is", map("add", -200)), + params("_version_", Long.toString(version))); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,*_i,*_is") - ,"=={'doc':{'id':'1', 'val_i':100, 'val_is':[10,5,-1,-100,-200]}}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,*_i,*_is"), + "=={'doc':{'id':'1', 'val_i':100, 'val_is':[10,5,-1,-100,-200]}}"); // extra field should just be treated as a "set" - version = addAndGetVersion(sdoc("id","1", "val_is",map("add",-300), "val_i",2), null); + version = addAndGetVersion(sdoc("id", "1", "val_is", map("add", -300), "val_i", 2), null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,*_i,*_is") - ,"=={'doc':{'id':'1', 'val_i':2, 'val_is':[10,5,-1,-100,-200,-300]}}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,*_i,*_is"), + "=={'doc':{'id':'1', 'val_i':2, 'val_is':[10,5,-1,-100,-200,-300]}}"); // a null value should be treated as "remove" - version = addAndGetVersion(sdoc("id","1", "val_is",map("add",-400), "val_i",null), null); + version = addAndGetVersion(sdoc("id", "1", "val_is", map("add", -400), "val_i", null), null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,*_i,*_is") - ,"=={'doc':{'id':'1', 'val_is':[10,5,-1,-100,-200,-300,-400]}}" - ); - + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,*_i,*_is"), + "=={'doc':{'id':'1', 'val_is':[10,5,-1,-100,-200,-300,-400]}}"); version = deleteAndGetVersion("1", null); afterUpdate.call(); // test that updating a non-existing doc fails if we set _version_=1 - se = expectThrows(SolrException.class, - () -> addAndGetVersion(sdoc("id","1", "val_is",map("add",-101), "_version_","1"), null)); + se = + expectThrows( + SolrException.class, + () -> + addAndGetVersion( + sdoc("id", "1", "val_is", map("add", -101), "_version_", "1"), null)); assertEquals(409, se.code()); // test that by default we can update a non-existing doc - version = addAndGetVersion(sdoc("id","1", "val_i",102, "val_is",map("add",-102)), null); + version = addAndGetVersion(sdoc("id", "1", "val_i", 102, "val_is", map("add", -102)), null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,val*") - ,"=={'doc':{'id':'1', 'val_i':102, 'val_is':[-102]}}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,val*"), + "=={'doc':{'id':'1', 'val_i':102, 'val_is':[-102]}}"); - version = addAndGetVersion(sdoc("id","1", "val_i",5), null); + version = addAndGetVersion(sdoc("id", "1", "val_i", 5), null); afterUpdate.call(); - version = addAndGetVersion(sdoc("id","1", - "val_is",map("inc",1), - "val2_i",map("inc","1"), - "val2_f",map("inc",1), - "val2_d",map("inc","1.0"), - "val2_l",map("inc",1) - ), - null); + version = + addAndGetVersion( + sdoc( + "id", + "1", + "val_is", + map("inc", 1), + "val2_i", + map("inc", "1"), + "val2_f", + map("inc", 1), + "val2_d", + map("inc", "1.0"), + "val2_l", + map("inc", 1)), + null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,val*") - ,"=={'doc':{'id':'1', 'val_i':5, 'val_is':[1], 'val2_i':1, 'val2_f':1.0, 'val2_d':1.0, 'val2_l':1}}" - ); - - version = addAndGetVersion(sdoc("id","1", - "val_is",map("inc","-5"), - "val2_i",map("inc",-5), - "val2_f",map("inc","-5.0"), - "val2_d",map("inc",-5), - "val2_l",map("inc","-5") - ), - null); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,val*"), + "=={'doc':{'id':'1', 'val_i':5, 'val_is':[1], 'val2_i':1, 'val2_f':1.0, 'val2_d':1.0, 'val2_l':1}}"); + + version = + addAndGetVersion( + sdoc( + "id", + "1", + "val_is", + map("inc", "-5"), + "val2_i", + map("inc", -5), + "val2_f", + map("inc", "-5.0"), + "val2_d", + map("inc", -5), + "val2_l", + map("inc", "-5")), + null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,val*") - ,"=={'doc':{'id':'1', 'val_i':5, 'val_is':[-4], 'val2_i':-4, 'val2_f':-4.0, 'val2_d':-4.0, 'val2_l':-4}}" - ); - - version = addAndGetVersion(sdoc("id","1", - "val_is",map("inc","2000000000"), - "val2_i",map("inc",-2000000000), - "val2_f",map("inc","1e+20"), - "val2_d",map("inc",-1.2345678901e+100), - "val2_l",map("inc","5000000000") - ), - null); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,val*"), + "=={'doc':{'id':'1', 'val_i':5, 'val_is':[-4], 'val2_i':-4, 'val2_f':-4.0, 'val2_d':-4.0, 'val2_l':-4}}"); + + version = + addAndGetVersion( + sdoc( + "id", + "1", + "val_is", + map("inc", "2000000000"), + "val2_i", + map("inc", -2000000000), + "val2_f", + map("inc", "1e+20"), + "val2_d", + map("inc", -1.2345678901e+100), + "val2_l", + map("inc", "5000000000")), + null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,val*") - ,"=={'doc':{'id':'1', 'val_i':5, 'val_is':[1999999996], 'val2_i':-2000000004, 'val2_f':1.0E20, 'val2_d':-1.2345678901e+100, 'val2_l':4999999996}}" - ); - + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,val*"), + "=={'doc':{'id':'1', 'val_i':5, 'val_is':[1999999996], 'val2_i':-2000000004, 'val2_f':1.0E20, 'val2_d':-1.2345678901e+100, 'val2_l':4999999996}}"); // remove some fields - version = addAndGetVersion(sdoc( - "id", "1", - "val_is", map("set",null), - "val2_f", map("set",null) - ), - null); + version = + addAndGetVersion( + sdoc( + "id", "1", + "val_is", map("set", null), + "val2_f", map("set", null)), + null); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,val*") - ,"=={'doc':{'id':'1', 'val_i':5, 'val2_i':-2000000004, 'val2_d':-1.2345678901e+100, 'val2_l':4999999996}}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,val*"), + "=={'doc':{'id':'1', 'val_i':5, 'val2_i':-2000000004, 'val2_d':-1.2345678901e+100, 'val2_l':4999999996}}"); // test that updating a unique id results in failure. ignoreException("Invalid update of id field"); - se = expectThrows(SolrException.class, - () -> addAndGetVersion( - sdoc("id", map("set","1"), "val_is", map("inc","2000000000")), null) - ); + se = + expectThrows( + SolrException.class, + () -> + addAndGetVersion( + sdoc("id", map("set", "1"), "val_is", map("inc", "2000000000")), null)); resetExceptionIgnores(); assertEquals(400, se.code()); - assertTrue(se.getMessage().contains("Updating unique key, version or route field is not allowed")); + assertTrue( + se.getMessage().contains("Updating unique key, version or route field is not allowed")); afterUpdate.call(); - assertJQ(req("qt","/get", "id","1", "fl","id,val*") - ,"=={'doc':{'id':'1', 'val_i':5, 'val2_i':-2000000004, 'val2_d':-1.2345678901e+100, 'val2_l':4999999996}}" - ); - - // nothing should have changed - check with a normal query that we didn't create a duplicate - assertU(commit("softCommit","false")); - assertJQ(req("q","id:1", "fl","id") - ,"/response/numFound==1" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id,val*"), + "=={'doc':{'id':'1', 'val_i':5, 'val2_i':-2000000004, 'val2_d':-1.2345678901e+100, 'val2_l':4999999996}}"); + // nothing should have changed - check with a normal query that we didn't create a duplicate + assertU(commit("softCommit", "false")); + assertJQ(req("q", "id:1", "fl", "id"), "/response/numFound==1"); } @Test // SOLR-8866 public void testUpdateLogThrowsForUnknownTypes() throws IOException { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "444"); - doc.addField("text", new Object());//Object shouldn't be serialized later... + doc.addField("text", new Object()); // Object shouldn't be serialized later... AddUpdateCommand cmd = new AddUpdateCommand(req()); cmd.solrDoc = doc; @@ -231,11 +270,10 @@ public void testUpdateLogThrowsForUnknownTypes() throws IOException { h.getCore().getUpdateHandler().addDoc(cmd); // should throw } catch (SolrException e) { if (e.getMessage().contains("serialize")) { - return;//passed test + return; // passed test } throw e; } fail(); } - } diff --git a/solr/core/src/test/org/apache/solr/update/TransactionLogTest.java b/solr/core/src/test/org/apache/solr/update/TransactionLogTest.java index dfe741d551a..9a477100fc8 100644 --- a/solr/core/src/test/org/apache/solr/update/TransactionLogTest.java +++ b/solr/core/src/test/org/apache/solr/update/TransactionLogTest.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Locale; import java.util.UUID; - import org.apache.solr.SolrTestCase; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.update.TransactionLog.LogReader; @@ -32,8 +31,9 @@ public class TransactionLogTest extends SolrTestCase { @Test public void testBigLastAddSize() { - String tlogFileName = String.format(Locale.ROOT, UpdateLog.LOG_FILENAME_PATTERN, UpdateLog.TLOG_NAME, - Long.MAX_VALUE); + String tlogFileName = + String.format( + Locale.ROOT, UpdateLog.LOG_FILENAME_PATTERN, UpdateLog.TLOG_NAME, Long.MAX_VALUE); Path path = createTempDir(); Path logFile = path.resolve(tlogFileName); try (TransactionLog transactionLog = new TransactionLog(logFile, null)) { @@ -46,8 +46,9 @@ public void testBigLastAddSize() { @Test public void testUUID() throws IOException, InterruptedException { - String tlogFileName = String.format(Locale.ROOT, UpdateLog.LOG_FILENAME_PATTERN, UpdateLog.TLOG_NAME, - Long.MAX_VALUE); + String tlogFileName = + String.format( + Locale.ROOT, UpdateLog.LOG_FILENAME_PATTERN, UpdateLog.TLOG_NAME, Long.MAX_VALUE); Path path = createTempDir(); Path logFile = path.resolve(tlogFileName); UUID uuid = UUID.randomUUID(); diff --git a/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java b/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java index b9d9e4cca3d..2da8dd332b2 100644 --- a/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java +++ b/solr/core/src/test/org/apache/solr/update/UpdateLogTest.java @@ -16,8 +16,10 @@ */ package org.apache.solr.update; -import java.util.List; +import static org.apache.solr.common.params.CommonParams.VERSION_FIELD; +import static org.hamcrest.core.StringContains.containsString; +import java.util.List; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; @@ -30,15 +32,11 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.common.params.CommonParams.VERSION_FIELD; -import static org.hamcrest.core.StringContains.containsString; - public class UpdateLogTest extends SolrTestCaseJ4 { /** BytesRef that can be re-used to lookup doc with id "1" */ private static final BytesRef DOC_1_INDEXED_ID = new BytesRef("1"); - static UpdateLog ulog = null; @BeforeClass @@ -62,30 +60,35 @@ public static void afterClass() { /** * @see org.apache.solr.update.UpdateLog#applyPartialUpdates */ - public void testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence() { + public void testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence() { // Add a full update, two in-place updates and verify applying partial updates is working - ulogAdd(ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); + ulogAdd( + ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); ulogAdd(ulog, 100L, sdoc("id", "1", "price", "1000", "val1_i_dvo", "2", "_version_", "101")); ulogAdd(ulog, 101L, sdoc("id", "1", "val1_i_dvo", "3", "_version_", "102")); Object partialUpdate = ulog.lookup(DOC_1_INDEXED_ID); - SolrDocument partialDoc = RealTimeGetComponent.toSolrDoc((SolrInputDocument)((List)partialUpdate).get(4), - h.getCore().getLatestSchema()); - long prevVersion = (Long)((List)partialUpdate).get(3); - long prevPointer = (Long)((List)partialUpdate).get(2); - - assertEquals(3L, ((NumericDocValuesField)partialDoc.getFieldValue("val1_i_dvo")).numericValue()); + SolrDocument partialDoc = + RealTimeGetComponent.toSolrDoc( + (SolrInputDocument) ((List) partialUpdate).get(4), h.getCore().getLatestSchema()); + long prevVersion = (Long) ((List) partialUpdate).get(3); + long prevPointer = (Long) ((List) partialUpdate).get(2); + + assertEquals( + 3L, ((NumericDocValuesField) partialDoc.getFieldValue("val1_i_dvo")).numericValue()); assertFalse(partialDoc.containsKey("title_s")); - long returnVal = ulog.applyPartialUpdates(DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); + long returnVal = + ulog.applyPartialUpdates(DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); assertEquals(0, returnVal); assertEquals(1000, Integer.parseInt(partialDoc.getFieldValue("price").toString())); - assertEquals(3L, ((NumericDocValuesField)partialDoc.getFieldValue("val1_i_dvo")).numericValue()); + assertEquals( + 3L, ((NumericDocValuesField) partialDoc.getFieldValue("val1_i_dvo")).numericValue()); assertEquals("title1", partialDoc.getFieldValue("title_s")); - // Add a full update, commit, then two in-place updates, and verify that applying partial updates is working (since - // the prevTlog and prevTlog2 are retained after a commit + // Add a full update, commit, then two in-place updates, and verify that applying partial + // updates is working (since the prevTlog and prevTlog2 are retained after a commit ulogCommit(ulog); if (random().nextBoolean()) { // sometimes also try a second commit ulogCommit(ulog); @@ -94,67 +97,89 @@ public void testApplyPartialUpdatesOnMultipleInPlaceUpdatesInSequence() { ulogAdd(ulog, 200L, sdoc("id", "1", "val1_i_dvo", "5", "_version_", "201")); partialUpdate = ulog.lookup(DOC_1_INDEXED_ID); - partialDoc = RealTimeGetComponent.toSolrDoc((SolrInputDocument)((List)partialUpdate).get(4), h.getCore().getLatestSchema()); - prevVersion = (Long)((List)partialUpdate).get(3); - prevPointer = (Long)((List)partialUpdate).get(2); - - assertEquals(5L, ((NumericDocValuesField)partialDoc.getFieldValue("val1_i_dvo")).numericValue()); + partialDoc = + RealTimeGetComponent.toSolrDoc( + (SolrInputDocument) ((List) partialUpdate).get(4), h.getCore().getLatestSchema()); + prevVersion = (Long) ((List) partialUpdate).get(3); + prevPointer = (Long) ((List) partialUpdate).get(2); + + assertEquals( + 5L, ((NumericDocValuesField) partialDoc.getFieldValue("val1_i_dvo")).numericValue()); assertFalse(partialDoc.containsKey("title_s")); - returnVal = ulog.applyPartialUpdates(DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); + returnVal = + ulog.applyPartialUpdates(DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); assertEquals(0, returnVal); assertEquals(2000, Integer.parseInt(partialDoc.getFieldValue("price").toString())); - assertEquals(5L, ((NumericDocValuesField)partialDoc.getFieldValue("val1_i_dvo")).numericValue()); + assertEquals( + 5L, ((NumericDocValuesField) partialDoc.getFieldValue("val1_i_dvo")).numericValue()); assertEquals("title1", partialDoc.getFieldValue("title_s")); } - + @Test - public void testApplyPartialUpdatesAfterMultipleCommits() { - ulogAdd(ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); + public void testApplyPartialUpdatesAfterMultipleCommits() { + ulogAdd( + ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); ulogAdd(ulog, 100L, sdoc("id", "1", "price", "1000", "val1_i_dvo", "2", "_version_", "101")); ulogAdd(ulog, 101L, sdoc("id", "1", "val1_i_dvo", "3", "_version_", "102")); - // Do 3 commits, then in-place update, and verify that applying partial updates can't find full doc - for (int i=0; i<3; i++) - ulogCommit(ulog); + // Do 3 commits, then in-place update, and verify that applying partial updates can't find full + // doc + for (int i = 0; i < 3; i++) ulogCommit(ulog); ulogAdd(ulog, 101L, sdoc("id", "1", "val1_i_dvo", "6", "_version_", "300")); Object partialUpdate = ulog.lookup(DOC_1_INDEXED_ID); - SolrDocument partialDoc = RealTimeGetComponent.toSolrDoc((SolrInputDocument)((List)partialUpdate).get(4), h.getCore().getLatestSchema()); - long prevVersion = (Long)((List)partialUpdate).get(3); - long prevPointer = (Long)((List)partialUpdate).get(2); - - assertEquals(6L, ((NumericDocValuesField)partialDoc.getFieldValue("val1_i_dvo")).numericValue()); + SolrDocument partialDoc = + RealTimeGetComponent.toSolrDoc( + (SolrInputDocument) ((List) partialUpdate).get(4), h.getCore().getLatestSchema()); + long prevVersion = (Long) ((List) partialUpdate).get(3); + long prevPointer = (Long) ((List) partialUpdate).get(2); + + assertEquals( + 6L, ((NumericDocValuesField) partialDoc.getFieldValue("val1_i_dvo")).numericValue()); assertFalse(partialDoc.containsKey("title_s")); - long returnVal = ulog.applyPartialUpdates(DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); + long returnVal = + ulog.applyPartialUpdates(DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); assertEquals(-1, returnVal); } @Test public void testApplyPartialUpdatesDependingOnNonAddShouldThrowException() { - ulogAdd(ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); + ulogAdd( + ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); ulogDelete(ulog, "1", 500L, false); // dbi ulogAdd(ulog, 500L, sdoc("id", "1", "val1_i_dvo", "2", "_version_", "501")); ulogAdd(ulog, 501L, sdoc("id", "1", "val1_i_dvo", "3", "_version_", "502")); Object partialUpdate = ulog.lookup(DOC_1_INDEXED_ID); - SolrDocument partialDoc = RealTimeGetComponent.toSolrDoc((SolrInputDocument)((List)partialUpdate).get(4), h.getCore().getLatestSchema()); - long prevVersion = (Long)((List)partialUpdate).get(3); - long prevPointer = (Long)((List)partialUpdate).get(2); - - assertEquals(3L, ((NumericDocValuesField)partialDoc.getFieldValue("val1_i_dvo")).numericValue()); - assertEquals(502L, ((NumericDocValuesField)partialDoc.getFieldValue("_version_")).numericValue()); + SolrDocument partialDoc = + RealTimeGetComponent.toSolrDoc( + (SolrInputDocument) ((List) partialUpdate).get(4), h.getCore().getLatestSchema()); + long prevVersion = (Long) ((List) partialUpdate).get(3); + long prevPointer = (Long) ((List) partialUpdate).get(2); + + assertEquals( + 3L, ((NumericDocValuesField) partialDoc.getFieldValue("val1_i_dvo")).numericValue()); + assertEquals( + 502L, ((NumericDocValuesField) partialDoc.getFieldValue("_version_")).numericValue()); assertFalse(partialDoc.containsKey("title_s")); // If an in-place update depends on a non-add (i.e. DBI), assert that an exception is thrown. - SolrException ex = expectThrows(SolrException.class, () -> { - long returnVal = ulog.applyPartialUpdates(DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); - fail("502 depends on 501, 501 depends on 500, but 500 is a" - + " DELETE. This should've generated an exception. returnVal is: "+returnVal); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + long returnVal = + ulog.applyPartialUpdates( + DOC_1_INDEXED_ID, prevPointer, prevVersion, null, partialDoc); + fail( + "502 depends on 501, 501 depends on 500, but 500 is a" + + " DELETE. This should've generated an exception. returnVal is: " + + returnVal); + }); assertEquals(ex.toString(), SolrException.ErrorCode.INVALID_STATE.code, ex.code()); assertThat(ex.getMessage(), containsString("should've been either ADD or UPDATE_INPLACE")); assertThat(ex.getMessage(), containsString("looking for id=1")); @@ -162,35 +187,38 @@ public void testApplyPartialUpdatesDependingOnNonAddShouldThrowException() { @Test public void testApplyPartialUpdatesWithDelete() throws Exception { - ulogAdd(ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); + ulogAdd( + ulog, null, sdoc("id", "1", "title_s", "title1", "val1_i_dvo", "1", "_version_", "100")); ulogAdd(ulog, 100L, sdoc("id", "1", "val1_i_dvo", "2", "_version_", "101")); // in-place update ulogAdd(ulog, 101L, sdoc("id", "1", "val1_i_dvo", "3", "_version_", "102")); // in-place update - + // sanity check that the update log has one document, and RTG returns the document assertEquals(1, ulog.map.size()); - assertJQ(req("qt","/get", "id","1") - , "=={'doc':{ 'id':'1', 'val1_i_dvo':3, '_version_':102, 'title_s':'title1', " - // fields with default values - + "'inplace_updatable_int_with_default':666, 'inplace_updatable_float_with_default':42.0}}"); - + assertJQ( + req("qt", "/get", "id", "1"), + "=={'doc':{ 'id':'1', 'val1_i_dvo':3, '_version_':102, 'title_s':'title1', " + // fields with default values + + "'inplace_updatable_int_with_default':666, 'inplace_updatable_float_with_default':42.0}}"); + boolean dbq = random().nextBoolean(); ulogDelete(ulog, "1", 200L, dbq); // delete id:1 document if (dbq) { - assertNull(ulog.lookup(DOC_1_INDEXED_ID)); // any DBQ clears out the ulog, so this document shouldn't exist + assertNull( + ulog.lookup( + DOC_1_INDEXED_ID)); // any DBQ clears out the ulog, so this document shouldn't exist assertEquals(0, ulog.map.size()); assertTrue(String.valueOf(ulog.prevMap), ulog.prevMap == null || ulog.prevMap.size() == 0); assertTrue(String.valueOf(ulog.prevMap2), ulog.prevMap2 == null || ulog.prevMap2.size() == 0); // verify that the document is deleted, by doing an RTG call - assertJQ(req("qt","/get", "id","1"), "=={'doc':null}"); + assertJQ(req("qt", "/get", "id", "1"), "=={'doc':null}"); } else { // dbi - List entry = ((List)ulog.lookup(DOC_1_INDEXED_ID)); - assertEquals(UpdateLog.DELETE, (int)entry.get(UpdateLog.FLAGS_IDX) & UpdateLog.OPERATION_MASK); + List entry = ((List) ulog.lookup(DOC_1_INDEXED_ID)); + assertEquals( + UpdateLog.DELETE, (int) entry.get(UpdateLog.FLAGS_IDX) & UpdateLog.OPERATION_MASK); } } - /** - * Simulate a commit on a given updateLog - */ + /** Simulate a commit on a given updateLog */ private static void ulogCommit(UpdateLog ulog) { try (SolrQueryRequest req = req()) { CommitUpdateCommand commitCmd = new CommitUpdateCommand(req, false); @@ -212,7 +240,7 @@ private static void ulogDelete(UpdateLog ulog, String id, long version, boolean DeleteUpdateCommand cmd = new DeleteUpdateCommand(req); cmd.setVersion(version); if (dbq) { - cmd.query = ("id:"+id); + cmd.query = ("id:" + id); ulog.deleteByQuery(cmd); } else { cmd.id = id; @@ -223,14 +251,13 @@ private static void ulogDelete(UpdateLog ulog, String id, long version, boolean /** * Simulate an add on a given updateLog. - *

- * This method, when prevVersion is passed in (i.e. for in-place update), represents an - * AddUpdateCommand that has undergone the merge process and inc/set operations have now been - * converted into actual values that just need to be written. - *

- *

- * NOTE: For test simplicity, the Solr input document must include the _version_ field. - *

+ * + *

This method, when prevVersion is passed in (i.e. for in-place update), represents an + * AddUpdateCommand that has undergone the merge process and inc/set operations have now been + * converted into actual values that just need to be written. + * + *

NOTE: For test simplicity, the Solr input document must include the _version_ + * field. * * @param ulog The UpdateLog to apply a delete against * @param prevVersion If non-null, then this AddUpdateCommand represents an in-place update. @@ -248,12 +275,14 @@ private static void ulogAdd(UpdateLog ulog, Long prevVersion, SolrInputDocument } /** - * Helper method to construct an AddUpdateCommand for a SolrInputDocument - * in the context of the specified SolrQueryRequest. + * Helper method to construct an AddUpdateCommand for a SolrInputDocument + * in the context of the specified SolrQueryRequest. * - * NOTE: For test simplicity, the Solr input document must include the _version_ field. - */ - public static AddUpdateCommand buildAddUpdateCommand(final SolrQueryRequest req, final SolrInputDocument sdoc) { + *

NOTE: For test simplicity, the Solr input document must include the _version_ + * field. + */ + public static AddUpdateCommand buildAddUpdateCommand( + final SolrQueryRequest req, final SolrInputDocument sdoc) { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = sdoc; assertTrue("", cmd.solrDoc.containsKey(VERSION_FIELD)); diff --git a/solr/core/src/test/org/apache/solr/update/UpdateParamsTest.java b/solr/core/src/test/org/apache/solr/update/UpdateParamsTest.java index 114ff6ac6d4..d4c26b7b4dc 100644 --- a/solr/core/src/test/org/apache/solr/update/UpdateParamsTest.java +++ b/solr/core/src/test/org/apache/solr/update/UpdateParamsTest.java @@ -17,60 +17,59 @@ package org.apache.solr.update; import java.util.HashMap; - +import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.UpdateParams; import org.apache.solr.core.SolrCore; import org.apache.solr.handler.UpdateRequestHandler; import org.apache.solr.request.SolrQueryRequestBase; import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.SolrTestCaseJ4; import org.junit.BeforeClass; - - public class UpdateParamsTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } - - /** - * Tests that only update.chain and not update.processor works (SOLR-2105) - */ + + /** Tests that only update.chain and not update.processor works (SOLR-2105) */ public void testUpdateProcessorParamDeprecationRemoved() throws Exception { SolrCore core = h.getCore(); - + UpdateRequestHandler handler = new UpdateRequestHandler(); - handler.init( null ); - - MapSolrParams params = new MapSolrParams( new HashMap() ); + handler.init(null); + + MapSolrParams params = new MapSolrParams(new HashMap()); params.getMap().put("update.processor", "nonexistant"); // Add a single document SolrQueryResponse rsp = new SolrQueryResponse(); - SolrQueryRequestBase req = new SolrQueryRequestBase( core, params ) {}; - + SolrQueryRequestBase req = new SolrQueryRequestBase(core, params) {}; + // First check that the old param behaves as it should try { handler.handleRequestBody(req, rsp); assertTrue("Old param update.processor should not have any effect anymore", true); } catch (Exception e) { - assertFalse("Got wrong exception while testing update.chain", e.getMessage().equals("unknown UpdateRequestProcessorChain: nonexistant")); + assertFalse( + "Got wrong exception while testing update.chain", + e.getMessage().equals("unknown UpdateRequestProcessorChain: nonexistant")); } - + // Then check that the new param behaves correctly params.getMap().remove("update.processor"); - params.getMap().put(UpdateParams.UPDATE_CHAIN, "nonexistant"); + params.getMap().put(UpdateParams.UPDATE_CHAIN, "nonexistant"); req.setParams(params); try { handler.handleRequestBody(req, rsp); - assertFalse("Faulty update.chain parameter not causing an error - i.e. it is not detected", true); + assertFalse( + "Faulty update.chain parameter not causing an error - i.e. it is not detected", true); } catch (Exception e) { - assertEquals("Got wrong exception while testing update.chain", e.getMessage(), "unknown UpdateRequestProcessorChain: nonexistant"); + assertEquals( + "Got wrong exception while testing update.chain", + e.getMessage(), + "unknown UpdateRequestProcessorChain: nonexistant"); } - } - } diff --git a/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java b/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java index de68a5073da..0881ba3c332 100644 --- a/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java +++ b/solr/core/src/test/org/apache/solr/update/VersionInfoTest.java @@ -30,11 +30,12 @@ public class VersionInfoTest extends SolrTestCaseJ4 { public void testMaxIndexedVersionFromIndex() throws Exception { initCore("solrconfig-tlog.xml", "schema-version-indexed.xml"); try (SolrQueryRequest r = req()) { - SchemaField v = r.getCore().getUpdateHandler().getUpdateLog().getVersionInfo().getVersionField(); + SchemaField v = + r.getCore().getUpdateHandler().getUpdateLog().getVersionInfo().getVersionField(); assertNotNull(v); assertTrue(v.indexed()); assertFalse(v.hasDocValues()); - + testMaxVersionLogic(r); } finally { deleteCore(); @@ -43,13 +44,14 @@ public void testMaxIndexedVersionFromIndex() throws Exception { @Test public void testMaxDocValuesVersionFromIndex() throws Exception { - initCore("solrconfig-tlog.xml","schema-version-dv.xml"); + initCore("solrconfig-tlog.xml", "schema-version-dv.xml"); try (SolrQueryRequest r = req()) { - SchemaField v = r.getCore().getUpdateHandler().getUpdateLog().getVersionInfo().getVersionField(); + SchemaField v = + r.getCore().getUpdateHandler().getUpdateLog().getVersionInfo().getVersionField(); assertNotNull(v); assertFalse(v.indexed()); assertTrue(v.hasDocValues()); - + testMaxVersionLogic(r); } finally { deleteCore(); @@ -83,10 +85,10 @@ protected void testMaxVersionLogic(SolrQueryRequest req) throws Exception { assertNotNull(vInfoMax); assertEquals(maxVersionFromUlog, vInfoMax); } - + // max version from ulog (and index) should be exactly the same as our single committed doc Long version = vInfo.getVersionFromIndex(idBytes); - assertNotNull("version info should not be null for test doc: "+docId, version); + assertNotNull("version info should not be null for test doc: " + docId, version); assertEquals(maxVersionFromUlog, version); int bucketHash = Hash.murmurhash3_x86_32(idBytes.bytes, idBytes.offset, idBytes.length, 0); @@ -97,7 +99,7 @@ protected void testMaxVersionLogic(SolrQueryRequest req) throws Exception { docId = Integer.toString(2); idBytes = new BytesRef(docId); assertU(adoc("id", docId)); - + try (SolrQueryRequest newReq = req()) { // max version direct from the index should not be null, and should still match what ulog // previously reported (since new doc is un-committed) @@ -105,18 +107,22 @@ protected void testMaxVersionLogic(SolrQueryRequest req) throws Exception { assertNotNull(vInfoMax); assertEquals(maxVersionFromUlog, vInfoMax); } - + maxVersionFromUlog = ulog.getMaxVersionFromIndex(); assertNotNull(maxVersionFromUlog); - assertTrue("max version in ulog should have increased since our last committed doc: " + - version + " ?< " + maxVersionFromUlog, - version < maxVersionFromUlog.longValue()); + assertTrue( + "max version in ulog should have increased since our last committed doc: " + + version + + " ?< " + + maxVersionFromUlog, + version < maxVersionFromUlog.longValue()); version = vInfo.getVersionFromIndex(idBytes); - assertNull("version info should be null for uncommited test doc: "+docId, version); + assertNull("version info should be null for uncommited test doc: " + docId, version); Long versionFromTLog = ulog.lookupVersion(idBytes); - assertNotNull("version from tlog should be non-null for uncommited test doc: "+docId, versionFromTLog); + assertNotNull( + "version from tlog should be non-null for uncommited test doc: " + docId, versionFromTLog); // now commit that 2nd doc assertU(commit()); @@ -125,13 +131,18 @@ protected void testMaxVersionLogic(SolrQueryRequest req) throws Exception { Long vInfoMax = vInfo.getMaxVersionFromIndex(newReq.getSearcher()); assertEquals(versionFromTLog, vInfoMax); } - assertEquals("committing doc should not have changed version from ulog", - versionFromTLog, ulog.lookupVersion(idBytes)); + assertEquals( + "committing doc should not have changed version from ulog", + versionFromTLog, + ulog.lookupVersion(idBytes)); Long versionFromIndex = version = vInfo.getVersionFromIndex(idBytes); - assertNotNull("version from index should be non-null for commited test doc: "+docId, versionFromIndex); - assertEquals("version from tlog and version from index should be the same", - versionFromTLog, versionFromIndex); - + assertNotNull( + "version from index should be non-null for commited test doc: " + docId, versionFromIndex); + assertEquals( + "version from tlog and version from index should be the same", + versionFromTLog, + versionFromIndex); + bucketHash = Hash.murmurhash3_x86_32(idBytes.bytes, idBytes.offset, idBytes.length, 0); bucket = vInfo.bucket(bucketHash); assertEquals(bucket.highest, version.longValue()); @@ -140,8 +151,10 @@ protected void testMaxVersionLogic(SolrQueryRequest req) throws Exception { CoreContainer coreContainer = req.getCore().getCoreContainer(); coreContainer.reload(req.getCore().getName()); maxVersionFromUlog = ulog.getMaxVersionFromIndex(); - assertEquals("after reload, max version from ulog should be equal to version of last doc added", - maxVersionFromUlog, versionFromIndex); + assertEquals( + "after reload, max version from ulog should be equal to version of last doc added", + maxVersionFromUlog, + versionFromIndex); // one more doc after reload docId = Integer.toString(3); @@ -162,7 +175,7 @@ protected void testMaxVersionLogic(SolrQueryRequest req) throws Exception { assertEquals(maxVersionFromUlog, vInfoMax); } version = vInfo.getVersionFromIndex(idBytes); - assertNotNull("version info should not be null for test doc: "+docId, version); + assertNotNull("version info should not be null for test doc: " + docId, version); assertEquals(maxVersionFromUlog, version); bucketHash = Hash.murmurhash3_x86_32(idBytes.bytes, idBytes.offset, idBytes.length, 0); diff --git a/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java b/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java index 70f6908a4d6..a2ccab1fd88 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java @@ -19,6 +19,7 @@ import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.not; +import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.time.ZonedDateTime; import java.util.Arrays; @@ -29,7 +30,6 @@ import java.util.UUID; import java.util.function.Function; import java.util.stream.Collectors; - import org.apache.solr.EmbeddedSolrServerTestBase; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; @@ -39,13 +39,11 @@ import org.junit.BeforeClass; import org.junit.Test; -import com.google.common.collect.ImmutableMap; - public abstract class AbstractAtomicUpdatesMultivalueTestBase extends EmbeddedSolrServerTestBase { @BeforeClass public static void beforeClass() throws Exception { - System.setProperty("enable.update.log","true"); + System.setProperty("enable.update.log", "true"); initCore("solrconfig.xml", "schema.xml"); } @@ -58,22 +56,31 @@ public void before() throws SolrServerException, IOException { @Override public synchronized EmbeddedSolrServer getSolrClient() { - return new EmbeddedSolrServer(h.getCoreContainer(), DEFAULT_CORE_NAME, getRequestWriterSupplier()); + return new EmbeddedSolrServer( + h.getCoreContainer(), DEFAULT_CORE_NAME, getRequestWriterSupplier()); } - private static void assertQR(final String fieldName, final String queryValue, final int numFound) { - assertQ(req("q", fieldName + ":" + queryValue, "indent", "true"), "//result[@numFound = '" + numFound + "']"); + private static void assertQR( + final String fieldName, final String queryValue, final int numFound) { + assertQ( + req("q", fieldName + ":" + queryValue, "indent", "true"), + "//result[@numFound = '" + numFound + "']"); } - private void runTestForField(final String fieldName, final Object[] values, final String[] queries, - final Optional> valueConverter) + private void runTestForField( + final String fieldName, + final Object[] values, + final String[] queries, + final Optional> valueConverter) throws SolrServerException, IOException { - final Function vc = valueConverter.orElse(o -> o); + final Function vc = valueConverter.orElse(o -> o); - getSolrClient().add(Arrays.asList( - sdoc("id", "20000", fieldName, Arrays.asList(values[0], values[1], values[2])), - sdoc("id", "20001", fieldName, Arrays.asList(values[1], values[2], values[3])))); + getSolrClient() + .add( + Arrays.asList( + sdoc("id", "20000", fieldName, Arrays.asList(values[0], values[1], values[2])), + sdoc("id", "20001", fieldName, Arrays.asList(values[1], values[2], values[3])))); getSolrClient().commit(true, true); if (queries != null) { @@ -85,15 +92,17 @@ private void runTestForField(final String fieldName, final Object[] values, fina Collection fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); assertEquals(3, fieldValues.size()); - assertThat(fieldValues, hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]))); + assertThat( + fieldValues, hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]))); assertThat(fieldValues, not(hasItems(vc.apply(values[3])))); fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); assertEquals(3, fieldValues.size()); - assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + assertThat( + fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); assertThat(fieldValues, not(hasItems(vc.apply(values[0])))); - getSolrClient().add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove", - List.of(values[0])))); + getSolrClient() + .add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove", List.of(values[0])))); getSolrClient().commit(true, true); if (queries != null) { @@ -109,11 +118,17 @@ private void runTestForField(final String fieldName, final Object[] values, fina assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[3])))); fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); assertEquals(3, fieldValues.size()); - assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + assertThat( + fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); assertThat(fieldValues, not(hasItems(vc.apply(values[0])))); - getSolrClient().add(sdoc("id", "20001", fieldName, ImmutableMap.of("remove", - List.of(values[0], values[1], values[2])))); + getSolrClient() + .add( + sdoc( + "id", + "20001", + fieldName, + ImmutableMap.of("remove", List.of(values[0], values[1], values[2])))); getSolrClient().commit(true, true); if (queries != null) { @@ -130,12 +145,23 @@ private void runTestForField(final String fieldName, final Object[] values, fina fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); assertEquals(1, fieldValues.size()); assertThat(fieldValues, hasItems(vc.apply(values[3]))); - assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2])))); - - getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("add", - List.of(values[0]), "remove", List.of(values[1], values[2]))), - sdoc("id", "20001", fieldName, - ImmutableMap.of("add", List.of(values[0]), "remove", List.of(values[3]))))); + assertThat( + fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2])))); + + getSolrClient() + .add( + Arrays.asList( + sdoc( + "id", + "20000", + fieldName, + ImmutableMap.of( + "add", List.of(values[0]), "remove", List.of(values[1], values[2]))), + sdoc( + "id", + "20001", + fieldName, + ImmutableMap.of("add", List.of(values[0]), "remove", List.of(values[3]))))); getSolrClient().commit(true, true); if (queries != null) { @@ -148,16 +174,27 @@ private void runTestForField(final String fieldName, final Object[] values, fina fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); assertEquals(1, fieldValues.size()); assertThat(fieldValues, hasItems(vc.apply(values[0]))); - assertThat(fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])))); + assertThat( + fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])))); fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); assertEquals(1, fieldValues.size()); assertThat(fieldValues, hasItems(vc.apply(values[0]))); - assertThat(fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])))); - - getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("set", - List.of(values[0], values[1], values[2], values[3]))), sdoc("id", "20001", fieldName, - ImmutableMap.of("set", - List.of(values[0], values[1], values[2], values[3]))))); + assertThat( + fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])))); + + getSolrClient() + .add( + Arrays.asList( + sdoc( + "id", + "20000", + fieldName, + ImmutableMap.of("set", List.of(values[0], values[1], values[2], values[3]))), + sdoc( + "id", + "20001", + fieldName, + ImmutableMap.of("set", List.of(values[0], values[1], values[2], values[3]))))); getSolrClient().commit(true, true); if (queries != null) { @@ -169,16 +206,23 @@ private void runTestForField(final String fieldName, final Object[] values, fina fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName); assertEquals(4, fieldValues.size()); - assertThat(fieldValues, - hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + assertThat( + fieldValues, + hasItems( + vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); assertEquals(4, fieldValues.size()); - assertThat(fieldValues, - hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); + assertThat( + fieldValues, + hasItems( + vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))); } private String[] toStringArray(final Object[] values) { - return Arrays.stream(values).map(v -> v.toString()).collect(Collectors.toList()).toArray(new String[] {}); + return Arrays.stream(values) + .map(v -> v.toString()) + .collect(Collectors.toList()) + .toArray(new String[] {}); } private void runTestForFieldWithQuery(final String fieldName, final Object[] values) @@ -186,13 +230,17 @@ private void runTestForFieldWithQuery(final String fieldName, final Object[] val runTestForField(fieldName, values, toStringArray(values), Optional.empty()); } - private void runTestForFieldWithQuery(final String fieldName, final Object[] values, final String[] queries) + private void runTestForFieldWithQuery( + final String fieldName, final Object[] values, final String[] queries) throws SolrServerException, IOException { runTestForField(fieldName, values, queries, Optional.empty()); } - private void runTestForFieldWithQuery(final String fieldName, final Object[] values, final String[] queries, - final Function valueConverter) + private void runTestForFieldWithQuery( + final String fieldName, + final Object[] values, + final String[] queries, + final Function valueConverter) throws SolrServerException, IOException { runTestForField(fieldName, values, queries, Optional.of(valueConverter)); } @@ -204,7 +252,8 @@ private void runTestForFieldWithoutQuery(final String fieldName, final Object[] @Test public void testMultivalueBinaryField() throws SolrServerException, IOException { - runTestForFieldWithoutQuery("binaryRemove", + runTestForFieldWithoutQuery( + "binaryRemove", new byte[][] {new byte[] {0}, new byte[] {1}, new byte[] {2}, new byte[] {3}}); } @@ -213,9 +262,11 @@ public void testMultivalueBooleanField() throws SolrServerException, IOException final String fieldName = "booleanRemove"; - getSolrClient().add(Arrays.asList( - sdoc("id", "20000", fieldName, List.of(true, false)), - sdoc("id", "20001", fieldName, List.of(false, true)))); + getSolrClient() + .add( + Arrays.asList( + sdoc("id", "20000", fieldName, List.of(true, false)), + sdoc("id", "20001", fieldName, List.of(false, true)))); getSolrClient().commit(true, true); assertQR(fieldName, "true", 2); @@ -228,8 +279,7 @@ public void testMultivalueBooleanField() throws SolrServerException, IOException assertEquals(2, fieldValues.size()); assertThat(fieldValues, hasItems(true, false)); - getSolrClient().add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove", - List.of(false)))); + getSolrClient().add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove", List.of(false)))); getSolrClient().commit(true, true); assertQR(fieldName, "true", 2); @@ -242,8 +292,8 @@ public void testMultivalueBooleanField() throws SolrServerException, IOException assertEquals(2, fieldValues.size()); assertThat(fieldValues, hasItems(true, false)); - getSolrClient().add(sdoc("id", "20001", fieldName, ImmutableMap.of("remove", - List.of(true, false)))); + getSolrClient() + .add(sdoc("id", "20001", fieldName, ImmutableMap.of("remove", List.of(true, false)))); getSolrClient().commit(true, true); assertQR(fieldName, "true", 1); @@ -256,8 +306,10 @@ public void testMultivalueBooleanField() throws SolrServerException, IOException fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); assertNull(fieldValues); - getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("add", - List.of(false, false))))); + getSolrClient() + .add( + Arrays.asList( + sdoc("id", "20000", fieldName, ImmutableMap.of("add", List.of(false, false))))); getSolrClient().commit(true, true); assertQR(fieldName, "true", 1); @@ -269,10 +321,11 @@ public void testMultivalueBooleanField() throws SolrServerException, IOException fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName); assertNull(fieldValues); - getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("set", - List.of(true, false))), sdoc("id", "20001", fieldName, - ImmutableMap.of("set", - List.of(false, true))))); + getSolrClient() + .add( + Arrays.asList( + sdoc("id", "20000", fieldName, ImmutableMap.of("set", List.of(true, false))), + sdoc("id", "20001", fieldName, ImmutableMap.of("set", List.of(false, true))))); getSolrClient().commit(true, true); assertQR(fieldName, "true", 2); @@ -303,7 +356,9 @@ public void testMultivalueDatePointField() throws SolrServerException, IOExcepti final String s4 = "2010-01-01T00:00:00Z"; final Date d4 = Date.from(ZonedDateTime.parse(s4).toInstant()); - runTestForFieldWithQuery("datePointRemove", new Date[] {d1, d2, d3, d4}, + runTestForFieldWithQuery( + "datePointRemove", + new Date[] {d1, d2, d3, d4}, new String[] {"\"" + s1 + "\"", "\"" + s2 + "\"", "\"" + s3 + "\"", "\"" + s4 + "\""}); } @@ -315,7 +370,9 @@ public void testMultivalueDateRangeField() throws SolrServerException, IOExcepti final String s3 = "2000-01-01T00:00:00Z"; final String s4 = "2010-01-01T00:00:00Z"; - runTestForFieldWithQuery("dateRangeRemove", new String[] {s1, s2, s3, s4}, + runTestForFieldWithQuery( + "dateRangeRemove", + new String[] {s1, s2, s3, s4}, new String[] {"\"" + s1 + "\"", "\"" + s2 + "\"", "\"" + s3 + "\"", "\"" + s4 + "\""}); } @@ -326,25 +383,30 @@ public void testMultivalueDoublePointField() throws SolrServerException, IOExcep @Test public void testMultivalueEnumField() throws SolrServerException, IOException { - runTestForFieldWithQuery("enumRemove_sev_enum", new Object[] {"Low", "Medium", "High", "Critical"}); + runTestForFieldWithQuery( + "enumRemove_sev_enum", new Object[] {"Low", "Medium", "High", "Critical"}); } @Test public void testMultivalueEnumFieldWithNumbers() throws SolrServerException, IOException { final Object[] values = new Object[] {"Low", "Medium", "High", 11}; - runTestForFieldWithQuery("enumRemove_sev_enum", values, toStringArray(values), o -> { - if (Integer.valueOf(11).equals(o)) { - return "Critical"; - } else { - return o; - } - }); + runTestForFieldWithQuery( + "enumRemove_sev_enum", + values, + toStringArray(values), + o -> { + if (Integer.valueOf(11).equals(o)) { + return "Critical"; + } else { + return o; + } + }); } @Test public void testMultivalueExternalFileField() throws SolrServerException, IOException { - runTestForFieldWithoutQuery("externalFileRemove", - new String[] {"file1.txt", "file2.txt", "file3.txt", "file4.txt"}); + runTestForFieldWithoutQuery( + "externalFileRemove", new String[] {"file1.txt", "file2.txt", "file3.txt", "file4.txt"}); } @Test @@ -354,7 +416,8 @@ public void testMultivalueFloatPointField() throws SolrServerException, IOExcept @Test public void testMultivalueICUCollationField() throws SolrServerException, IOException { - runTestForFieldWithQuery("icuCollationRemove", new String[] {"iuccf1", "icucf2", "icucf3", "icucf4"}); + runTestForFieldWithQuery( + "icuCollationRemove", new String[] {"iuccf1", "icucf2", "icucf3", "icucf4"}); } @Test @@ -364,16 +427,22 @@ public void testMultivalueIntPointField() throws SolrServerException, IOExceptio @Test public void testMultivalueLatLonPointSpatialField() throws SolrServerException, IOException { - runTestForFieldWithoutQuery("latLonPointSpatialRemove", - new String[] {"1.0,-1.0", "2.0,-2.0", "3.0,-3.0", "4.0,-4.0"}); + runTestForFieldWithoutQuery( + "latLonPointSpatialRemove", new String[] {"1.0,-1.0", "2.0,-2.0", "3.0,-3.0", "4.0,-4.0"}); } @Test public void testMultivalueLatLonField() throws SolrServerException, IOException { String[] values = {"1.0,-1.0", "2.0,-2.0", "3.0,-3.0", "4.0,-4.0"}; - String[] queries = Arrays.stream(values) - .map(v -> v.substring(v.indexOf(',') + 1) + " " + v.substring(0, v.indexOf(','))) // map "lat,lon" to "lon lat" - .map(v -> "\"Intersects(BUFFER(POINT(" + v + "),0.001))\"").toArray(String[]::new); + String[] queries = + Arrays.stream(values) + .map( + v -> + v.substring(v.indexOf(',') + 1) + + " " + + v.substring(0, v.indexOf(','))) // map "lat,lon" to "lon lat" + .map(v -> "\"Intersects(BUFFER(POINT(" + v + "),0.001))\"") + .toArray(String[]::new); runTestForFieldWithQuery("latLonRemove", values, queries); } @@ -393,8 +462,10 @@ public void testMultivalueRandomSortField() throws SolrServerException, IOExcept } @Test - public void testMultivalueSpatialRecursivePrefixTreeFieldType() throws SolrServerException, IOException { - runTestForFieldWithoutQuery("spatialRecursivePrefixTreeRemove", new String[] {"1,1", "2,2", "3,3", "4,4"}); + public void testMultivalueSpatialRecursivePrefixTreeFieldType() + throws SolrServerException, IOException { + runTestForFieldWithoutQuery( + "spatialRecursivePrefixTreeRemove", new String[] {"1,1", "2,2", "3,3", "4,4"}); } @Test @@ -404,9 +475,13 @@ public void testMultivalueStringField() throws SolrServerException, IOException @Test public void testMultivalueStringFieldUsingCharSequence() throws SolrServerException, IOException { - final ByteArrayUtf8CharSequence[] values = new ByteArrayUtf8CharSequence[] {new ByteArrayUtf8CharSequence("str1"), - new ByteArrayUtf8CharSequence("str2"), - new ByteArrayUtf8CharSequence("str3"), new ByteArrayUtf8CharSequence("str4")}; + final ByteArrayUtf8CharSequence[] values = + new ByteArrayUtf8CharSequence[] { + new ByteArrayUtf8CharSequence("str1"), + new ByteArrayUtf8CharSequence("str2"), + new ByteArrayUtf8CharSequence("str3"), + new ByteArrayUtf8CharSequence("str4") + }; runTestForFieldWithQuery("stringRemove", values, toStringArray(values), o -> o.toString()); } @@ -417,9 +492,13 @@ public void testMultivalueTextField() throws SolrServerException, IOException { @Test public void testMultivalueUUIDField() throws SolrServerException, IOException { - final String[] values = new String[] {UUID.randomUUID().toString(), UUID.randomUUID().toString(), - UUID.randomUUID().toString(), UUID.randomUUID().toString()}; + final String[] values = + new String[] { + UUID.randomUUID().toString(), + UUID.randomUUID().toString(), + UUID.randomUUID().toString(), + UUID.randomUUID().toString() + }; runTestForFieldWithQuery("uuidRemove", values); } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactoryTest.java index a49e9fe9b77..33256f3e1f5 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AddSchemaFieldsUpdateProcessorFactoryTest.java @@ -24,7 +24,6 @@ import java.util.Collections; import java.util.Date; import java.util.Locale; - import org.apache.commons.io.FileUtils; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; @@ -33,12 +32,12 @@ import org.junit.Before; /** - * Tests for the field mutating update processors - * that parse Dates, Longs, Doubles, and Booleans. + * Tests for the field mutating update processors that parse Dates, Longs, Doubles, and Booleans. */ public class AddSchemaFieldsUpdateProcessorFactoryTest extends UpdateProcessorTestBase { - private static final String SOLRCONFIG_XML = "solrconfig-add-schema-fields-update-processor-chains.xml"; - private static final String SCHEMA_XML = "schema-add-schema-fields-update-processor.xml"; + private static final String SOLRCONFIG_XML = + "solrconfig-add-schema-fields-update-processor-chains.xml"; + private static final String SCHEMA_XML = "schema-add-schema-fields-update-processor.xml"; private static File tmpSolrHome; private static File tmpConfDir; @@ -63,7 +62,7 @@ public void testEmptyValue() { IndexSchema schema = h.getCore().getLatestSchema(); final String fieldName = "newFieldABC"; assertNull(schema.getFieldOrNull(fieldName)); - //UpdateProcessorTestBase#doc doesn't deal with nulls + // UpdateProcessorTestBase#doc doesn't deal with nulls SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "1"); doc.addField(fieldName, null); @@ -71,7 +70,9 @@ public void testEmptyValue() { SolrInputDocument finalDoc = doc; expectThrows(AssertionError.class, () -> processAdd("add-fields-no-run-processor", finalDoc)); - expectThrows(AssertionError.class, () -> processAdd("add-fields-no-run-processor", new SolrInputDocument(null , null))); + expectThrows( + AssertionError.class, + () -> processAdd("add-fields-no-run-processor", new SolrInputDocument(null, null))); } public void testSingleField() throws Exception { @@ -79,7 +80,8 @@ public void testSingleField() throws Exception { final String fieldName = "newfield1"; assertNull(schema.getFieldOrNull(fieldName)); Date date = Date.from(Instant.now()); - SolrInputDocument d = processAdd("add-fields-no-run-processor", doc(f("id", "1"), f(fieldName, date))); + SolrInputDocument d = + processAdd("add-fields-no-run-processor", doc(f("id", "1"), f(fieldName, date))); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName)); @@ -97,7 +99,8 @@ public void testSingleFieldRoundTrip() throws Exception { assertNotNull(schema.getFieldOrNull(fieldName)); assertEquals("pfloats", schema.getFieldType(fieldName).getTypeName()); assertU(commit()); - assertQ(req("id:2"), "//arr[@name='" + fieldName + "']/float[.='" + floatValue.toString() + "']"); + assertQ( + req("id:2"), "//arr[@name='" + fieldName + "']/float[.='" + floatValue.toString() + "']"); } public void testSingleFieldMixedFieldTypesRoundTrip() throws Exception { @@ -105,17 +108,18 @@ public void testSingleFieldMixedFieldTypesRoundTrip() throws Exception { final String fieldName = "newfield3"; assertNull(schema.getFieldOrNull(fieldName)); Float fieldValue1 = -13258.0f; - Double fieldValue2 = 8.4828800808E10; - SolrInputDocument d = processAdd - ("add-fields", doc(f("id", "3"), f(fieldName, fieldValue1, fieldValue2))); + Double fieldValue2 = 8.4828800808E10; + SolrInputDocument d = + processAdd("add-fields", doc(f("id", "3"), f(fieldName, fieldValue1, fieldValue2))); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName)); assertEquals("pdoubles", schema.getFieldType(fieldName).getTypeName()); assertU(commit()); - assertQ(req("id:3") - ,"//arr[@name='" + fieldName + "']/double[.='" + fieldValue1.toString() + "']" - ,"//arr[@name='" + fieldName + "']/double[.='" + fieldValue2.toString() + "']"); + assertQ( + req("id:3"), + "//arr[@name='" + fieldName + "']/double[.='" + fieldValue1.toString() + "']", + "//arr[@name='" + fieldName + "']/double[.='" + fieldValue2.toString() + "']"); } public void testSingleFieldDefaultFieldTypeRoundTrip() throws Exception { @@ -125,19 +129,21 @@ public void testSingleFieldDefaultFieldTypeRoundTrip() throws Exception { Float fieldValue1 = -13258.0f; Double fieldValue2 = 8.4828800808E10; String fieldValue3 = "blah blah"; - SolrInputDocument d = processAdd - ("add-fields", doc(f("id", "4"), f(fieldName, fieldValue1, fieldValue2, fieldValue3))); + SolrInputDocument d = + processAdd( + "add-fields", doc(f("id", "4"), f(fieldName, fieldValue1, fieldValue2, fieldValue3))); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName)); assertEquals("text", schema.getFieldType(fieldName).getTypeName()); - assertEquals(0, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size()); + assertEquals( + 0, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size()); assertU(commit()); - assertQ(req("id:4") - ,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue1.toString() + "']" - ,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue2.toString() + "']" - ,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue3.toString() + "']" - ); + assertQ( + req("id:4"), + "//arr[@name='" + fieldName + "']/str[.='" + fieldValue1.toString() + "']", + "//arr[@name='" + fieldName + "']/str[.='" + fieldValue2.toString() + "']", + "//arr[@name='" + fieldName + "']/str[.='" + fieldValue3.toString() + "']"); } public void testSingleFieldDefaultTypeMappingRoundTrip() throws Exception { @@ -147,19 +153,22 @@ public void testSingleFieldDefaultTypeMappingRoundTrip() throws Exception { Float fieldValue1 = -13258.0f; Double fieldValue2 = 8.4828800808E10; String fieldValue3 = "blah blah"; - SolrInputDocument d = processAdd - ("add-fields-default-mapping", doc(f("id", "4"), f(fieldName, fieldValue1, fieldValue2, fieldValue3))); + SolrInputDocument d = + processAdd( + "add-fields-default-mapping", + doc(f("id", "4"), f(fieldName, fieldValue1, fieldValue2, fieldValue3))); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName)); assertEquals("text", schema.getFieldType(fieldName).getTypeName()); - assertEquals(1, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size()); + assertEquals( + 1, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size()); assertU(commit()); - assertQ(req("id:4") - ,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue1.toString() + "']" - ,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue2.toString() + "']" - ,"//arr[@name='" + fieldName + "']/str[.='" + fieldValue3.toString() + "']" - ); + assertQ( + req("id:4"), + "//arr[@name='" + fieldName + "']/str[.='" + fieldValue1.toString() + "']", + "//arr[@name='" + fieldName + "']/str[.='" + fieldValue2.toString() + "']", + "//arr[@name='" + fieldName + "']/str[.='" + fieldValue3.toString() + "']"); } public void testMultipleFieldsRoundTrip() throws Exception { @@ -173,9 +182,13 @@ public void testMultipleFieldsRoundTrip() throws Exception { Long field1Value3 = 999L; Integer field2Value1 = 55123; Long field2Value2 = 1234567890123456789L; - SolrInputDocument d = processAdd - ("add-fields", doc(f("id", "5"), f(fieldName1, field1Value1, field1Value2, field1Value3), - f(fieldName2, field2Value1, field2Value2))); + SolrInputDocument d = + processAdd( + "add-fields", + doc( + f("id", "5"), + f(fieldName1, field1Value1, field1Value2, field1Value3), + f(fieldName2, field2Value1, field2Value2))); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName1)); @@ -183,12 +196,13 @@ public void testMultipleFieldsRoundTrip() throws Exception { assertEquals("pdoubles", schema.getFieldType(fieldName1).getTypeName()); assertEquals("plongs", schema.getFieldType(fieldName2).getTypeName()); assertU(commit()); - assertQ(req("id:5") - ,"//arr[@name='" + fieldName1 + "']/double[.='" + field1Value1.toString() + "']" - ,"//arr[@name='" + fieldName1 + "']/double[.='" + field1Value2.toString() + "']" - ,"//arr[@name='" + fieldName1 + "']/double[.='" + field1Value3.doubleValue() + "']" - ,"//arr[@name='" + fieldName2 + "']/long[.='" + field2Value1.toString() + "']" - ,"//arr[@name='" + fieldName2 + "']/long[.='" + field2Value2.toString() + "']"); + assertQ( + req("id:5"), + "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value1.toString() + "']", + "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value2.toString() + "']", + "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value3.doubleValue() + "']", + "//arr[@name='" + fieldName2 + "']/long[.='" + field2Value1.toString() + "']", + "//arr[@name='" + fieldName2 + "']/long[.='" + field2Value2.toString() + "']"); } public void testParseAndAddMultipleFieldsRoundTrip() throws Exception { @@ -201,9 +215,9 @@ public void testParseAndAddMultipleFieldsRoundTrip() throws Exception { assertNull(schema.getFieldOrNull(fieldName2)); assertNull(schema.getFieldOrNull(fieldName3)); assertNull(schema.getFieldOrNull(fieldName4)); - String field1String1 = "-13,258.0"; + String field1String1 = "-13,258.0"; Float field1Value1 = -13258.0f; - String field1String2 = "84,828,800,808.0"; + String field1String2 = "84,828,800,808.0"; Double field1Value2 = 8.4828800808E10; String field1String3 = "999"; Long field1Value3 = 999L; @@ -216,17 +230,23 @@ public void testParseAndAddMultipleFieldsRoundTrip() throws Exception { String field3String2 = "-5.28E-3"; Double field3Value2 = -5.28E-3; String field4String1 = "1999-04-17 17:42"; - DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ROOT).withZone(ZoneOffset.UTC); + DateTimeFormatter dateTimeFormatter = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm", Locale.ROOT).withZone(ZoneOffset.UTC); LocalDateTime dateTime = LocalDateTime.parse(field4String1, dateTimeFormatter); Date field4Value1 = Date.from(dateTime.atZone(ZoneOffset.UTC).toInstant()); - DateTimeFormatter dateTimeFormatter2 = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT).withZone(ZoneOffset.UTC); + DateTimeFormatter dateTimeFormatter2 = + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT).withZone(ZoneOffset.UTC); String field4Value1String = dateTime.format(dateTimeFormatter2) + "Z"; - - SolrInputDocument d = processAdd - ("parse-and-add-fields", doc(f("id", "6"), f(fieldName1, field1String1, field1String2, field1String3), - f(fieldName2, field2String1, field2String2), - f(fieldName3, field3String1, field3String2), - f(fieldName4, field4String1))); + + SolrInputDocument d = + processAdd( + "parse-and-add-fields", + doc( + f("id", "6"), + f(fieldName1, field1String1, field1String2, field1String3), + f(fieldName2, field2String1, field2String2), + f(fieldName3, field3String1, field3String2), + f(fieldName4, field4String1))); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName1)); @@ -238,21 +258,22 @@ public void testParseAndAddMultipleFieldsRoundTrip() throws Exception { assertEquals("text", schema.getFieldType(fieldName3).getTypeName()); assertEquals("pdates", schema.getFieldType(fieldName4).getTypeName()); assertU(commit()); - assertQ(req("id:6") - ,"//arr[@name='" + fieldName1 + "']/double[.='" + field1Value1.toString() + "']" - ,"//arr[@name='" + fieldName1 + "']/double[.='" + field1Value2.toString() + "']" - ,"//arr[@name='" + fieldName1 + "']/double[.='" + field1Value3.doubleValue() + "']" - ,"//arr[@name='" + fieldName2 + "']/long[.='" + field2Value1.toString() + "']" - ,"//arr[@name='" + fieldName2 + "']/long[.='" + field2Value2.toString() + "']" - ,"//arr[@name='" + fieldName3 + "']/str[.='" + field3String1 + "']" - ,"//arr[@name='" + fieldName3 + "']/str[.='" + field3String2 + "']" - ,"//arr[@name='" + fieldName4 + "']/date[.='" + field4Value1String + "']"); + assertQ( + req("id:6"), + "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value1.toString() + "']", + "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value2.toString() + "']", + "//arr[@name='" + fieldName1 + "']/double[.='" + field1Value3.doubleValue() + "']", + "//arr[@name='" + fieldName2 + "']/long[.='" + field2Value1.toString() + "']", + "//arr[@name='" + fieldName2 + "']/long[.='" + field2Value2.toString() + "']", + "//arr[@name='" + fieldName3 + "']/str[.='" + field3String1 + "']", + "//arr[@name='" + fieldName3 + "']/str[.='" + field3String2 + "']", + "//arr[@name='" + fieldName4 + "']/date[.='" + field4Value1String + "']"); } public void testStringWithCopyField() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); final String fieldName = "stringField"; - final String strFieldName = fieldName+"_str"; + final String strFieldName = fieldName + "_str"; assertNull(schema.getFieldOrNull(fieldName)); String content = "This is a text that should be copied to a string field but not be cutoff"; SolrInputDocument d = processAdd("add-fields", doc(f("id", "1"), f(fieldName, content))); @@ -261,48 +282,73 @@ public void testStringWithCopyField() throws Exception { assertNotNull(schema.getFieldOrNull(fieldName)); assertNotNull(schema.getFieldOrNull(strFieldName)); assertEquals("text", schema.getFieldType(fieldName).getTypeName()); - assertEquals(1, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(strFieldName)).size()); + assertEquals( + 1, + schema + .getCopyFieldProperties( + true, Collections.singleton(fieldName), Collections.singleton(strFieldName)) + .size()); } public void testStringWithCopyFieldAndMaxChars() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); final String fieldName = "stringField"; - final String strFieldName = fieldName+"_str"; + final String strFieldName = fieldName + "_str"; assertNull(schema.getFieldOrNull(fieldName)); - String content = "This is a text that should be copied to a string field and cutoff at 10 characters"; - SolrInputDocument d = processAdd("add-fields-maxchars", doc(f("id", "1"), f(fieldName, content))); + String content = + "This is a text that should be copied to a string field and cutoff at 10 characters"; + SolrInputDocument d = + processAdd("add-fields-maxchars", doc(f("id", "1"), f(fieldName, content))); assertNotNull(d); - System.out.println("Document is "+d); + System.out.println("Document is " + d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull(fieldName)); assertNotNull(schema.getFieldOrNull(strFieldName)); assertEquals("text", schema.getFieldType(fieldName).getTypeName()); // We have three copyFields, one with maxChars 10 and two with maxChars 20 - assertEquals(3, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size()); - assertEquals("The configured maxChars cutoff does not exist on the copyField", 10, - schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(strFieldName)) - .get(0).get("maxChars")); - assertEquals("The configured maxChars cutoff does not exist on the copyField", 20, - schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(fieldName+"_t")) - .get(0).get("maxChars")); - assertEquals("The configured maxChars cutoff does not exist on the copyField", 20, - schema.getCopyFieldProperties(true, Collections.singleton(fieldName), Collections.singleton(fieldName+"2_t")) - .get(0).get("maxChars")); + assertEquals( + 3, schema.getCopyFieldProperties(true, Collections.singleton(fieldName), null).size()); + assertEquals( + "The configured maxChars cutoff does not exist on the copyField", + 10, + schema + .getCopyFieldProperties( + true, Collections.singleton(fieldName), Collections.singleton(strFieldName)) + .get(0) + .get("maxChars")); + assertEquals( + "The configured maxChars cutoff does not exist on the copyField", + 20, + schema + .getCopyFieldProperties( + true, Collections.singleton(fieldName), Collections.singleton(fieldName + "_t")) + .get(0) + .get("maxChars")); + assertEquals( + "The configured maxChars cutoff does not exist on the copyField", + 20, + schema + .getCopyFieldProperties( + true, Collections.singleton(fieldName), Collections.singleton(fieldName + "2_t")) + .get(0) + .get("maxChars")); } - + public void testCopyFieldByIndexing() throws Exception { - String content = "This is a text that should be copied to a string field and cutoff at 10 characters"; - SolrInputDocument d = processAdd("add-fields-default-mapping", doc(f("id", "1"), f("mynewfield", content))); + String content = + "This is a text that should be copied to a string field and cutoff at 10 characters"; + SolrInputDocument d = + processAdd("add-fields-default-mapping", doc(f("id", "1"), f("mynewfield", content))); assertU(commit()); ModifiableSolrParams params = new ModifiableSolrParams(); params.add("q", "*:*").add("facet", "true").add("facet.field", "mynewfield_str"); - assertQ(req(params) - , "*[count(//doc)=1]" - ,"//lst[@name='mynewfield_str']/int[@name='This is a '][.='1']" - ); + assertQ( + req(params), + "*[count(//doc)=1]", + "//lst[@name='mynewfield_str']/int[@name='This is a '][.='1']"); } - + @After private void deleteCoreAndTempSolrHomeDirectory() throws Exception { deleteCore(); diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateJavabinTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateJavabinTest.java index 758de5f37ab..13deb77f558 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateJavabinTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateJavabinTest.java @@ -17,6 +17,11 @@ package org.apache.solr.update.processor; +import java.time.Instant; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.client.solrj.request.UpdateRequest; @@ -30,18 +35,13 @@ import org.junit.BeforeClass; import org.junit.Test; -import java.time.Instant; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.Map; - /** * Tests Solr's atomic-update functionality using requests sent through SolrJ using wt=javabin * - * {@link AtomicUpdatesTest} covers some of the same functionality, but does so by making xml-based requests. Recent - * changes to Solr have made it possible for the same data sent with different formats to result in different NamedLists - * after unmarshalling, so the test duplication is now necessary. See SOLR-13331 for an example. + *

{@link AtomicUpdatesTest} covers some of the same functionality, but does so by making + * xml-based requests. Recent changes to Solr have made it possible for the same data sent with + * different formats to result in different NamedLists after unmarshalling, so the test duplication + * is now necessary. See SOLR-13331 for an example. */ public class AtomicUpdateJavabinTest extends SolrCloudTestCase { private static final String COMMITTED_DOC_ID = "1"; @@ -58,12 +58,9 @@ public class AtomicUpdateJavabinTest extends SolrCloudTestCase { private static final Date DATE_3 = Date.from(Instant.ofEpochSecond(1554243909)); private static final String DATE_3_STR = "2019-04-02T22:25:09Z"; - @BeforeClass public static void setupCluster() throws Exception { - configureCluster(1) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); + configureCluster(1).addConfig("conf", configset("cloud-dynamic")).configure(); CollectionAdminRequest.createCollection(COLLECTION, "conf", NUM_SHARDS, NUM_REPLICAS) .process(cluster.getSolrClient()); @@ -75,44 +72,110 @@ public static void setupCluster() throws Exception { public void setUp() throws Exception { super.setUp(); - final SolrInputDocument committedDoc = sdoc( - "id", COMMITTED_DOC_ID, - "title_s", "title_1", "title_s", "title_2", - "tv_mv_text", "text_1", "tv_mv_text", "text_2", - "count_is", 1, "count_is", 2, - "count_md", 1.0, "count_md", 2.0, - "timestamps_mdt", DATE_1, "timestamps_mdt", DATE_2); - final SolrInputDocument committedStrDoc = sdoc( - "id", COMMITTED_DOC_STR_VALUES_ID, - "title_s", "title_1", "title_s", "title_2", - "tv_mv_text", "text_1", "tv_mv_text", "text_2", - "count_is", "1", "count_is", "2", - "count_md", "1.0", "count_md", "2.0", - "timestamps_mdt", DATE_1_STR, "timestamps_mdt", DATE_2_STR); - final UpdateRequest committedRequest = new UpdateRequest() - .add(committedDoc) - .add(committedStrDoc); + final SolrInputDocument committedDoc = + sdoc( + "id", + COMMITTED_DOC_ID, + "title_s", + "title_1", + "title_s", + "title_2", + "tv_mv_text", + "text_1", + "tv_mv_text", + "text_2", + "count_is", + 1, + "count_is", + 2, + "count_md", + 1.0, + "count_md", + 2.0, + "timestamps_mdt", + DATE_1, + "timestamps_mdt", + DATE_2); + final SolrInputDocument committedStrDoc = + sdoc( + "id", + COMMITTED_DOC_STR_VALUES_ID, + "title_s", + "title_1", + "title_s", + "title_2", + "tv_mv_text", + "text_1", + "tv_mv_text", + "text_2", + "count_is", + "1", + "count_is", + "2", + "count_md", + "1.0", + "count_md", + "2.0", + "timestamps_mdt", + DATE_1_STR, + "timestamps_mdt", + DATE_2_STR); + final UpdateRequest committedRequest = + new UpdateRequest().add(committedDoc).add(committedStrDoc); committedRequest.commit(cluster.getSolrClient(), COLLECTION); // Upload a copy of id:1 that's uncommitted to test how atomic-updates modify values in the tlog // See SOLR-14971 for an example of why this case needs tested separately - final SolrInputDocument uncommittedDoc = sdoc( - "id", UNCOMMITTED_DOC_ID, - "title_s", "title_1", "title_s", "title_2", - "tv_mv_text", "text_1", "tv_mv_text", "text_2", - "count_is", 1, "count_is", 2, - "count_md", 1.0, "count_md", 2.0, - "timestamps_mdt", DATE_1, "timestamps_mdt", DATE_2); - final SolrInputDocument uncommittedStrDoc = sdoc( - "id", UNCOMMITTED_DOC_STR_VALUES_ID, - "title_s", "title_1", "title_s", "title_2", - "tv_mv_text", "text_1", "tv_mv_text", "text_2", - "count_is", "1", "count_is", "2", - "count_md", "1.0", "count_md", "2.0", - "timestamps_mdt", DATE_1_STR, "timestamps_mdt", DATE_2_STR); - final UpdateRequest uncommittedRequest = new UpdateRequest() - .add(uncommittedDoc) - .add(uncommittedStrDoc); + final SolrInputDocument uncommittedDoc = + sdoc( + "id", + UNCOMMITTED_DOC_ID, + "title_s", + "title_1", + "title_s", + "title_2", + "tv_mv_text", + "text_1", + "tv_mv_text", + "text_2", + "count_is", + 1, + "count_is", + 2, + "count_md", + 1.0, + "count_md", + 2.0, + "timestamps_mdt", + DATE_1, + "timestamps_mdt", + DATE_2); + final SolrInputDocument uncommittedStrDoc = + sdoc( + "id", + UNCOMMITTED_DOC_STR_VALUES_ID, + "title_s", + "title_1", + "title_s", + "title_2", + "tv_mv_text", + "text_1", + "tv_mv_text", + "text_2", + "count_is", + "1", + "count_is", + "2", + "count_md", + "1.0", + "count_md", + "2.0", + "timestamps_mdt", + DATE_1_STR, + "timestamps_mdt", + DATE_2_STR); + final UpdateRequest uncommittedRequest = + new UpdateRequest().add(uncommittedDoc).add(uncommittedStrDoc); uncommittedRequest.process(cluster.getSolrClient(), COLLECTION); } @@ -329,7 +392,8 @@ public void testAtomicUpdateAddDistinctOfDuplicateValueOnDateField() throws Exce ensureFieldHasValues(UNCOMMITTED_DOC_STR_VALUES_ID, "timestamps_mdt", DATE_1, DATE_2); } - private void atomicRemoveValueFromField(String docId, String fieldName, Object value) throws Exception { + private void atomicRemoveValueFromField(String docId, String fieldName, Object value) + throws Exception { final SolrInputDocument doc = new SolrInputDocument(); doc.setField("id", docId); Map atomicUpdateRemoval = new HashMap<>(1); @@ -339,7 +403,8 @@ private void atomicRemoveValueFromField(String docId, String fieldName, Object v cluster.getSolrClient().add(COLLECTION, doc); } - private void atomicAddDistinctValueToField(String docId, String fieldName, Object value) throws Exception { + private void atomicAddDistinctValueToField(String docId, String fieldName, Object value) + throws Exception { final SolrInputDocument doc = new SolrInputDocument(); doc.setField("id", docId); Map atomicUpdateRemoval = new HashMap<>(1); @@ -349,7 +414,8 @@ private void atomicAddDistinctValueToField(String docId, String fieldName, Objec cluster.getSolrClient().add(COLLECTION, doc); } - private void ensureFieldHasValues(String identifyingDocId, String fieldName, Object... expectedValues) throws Exception { + private void ensureFieldHasValues( + String identifyingDocId, String fieldName, Object... expectedValues) throws Exception { final ModifiableSolrParams solrParams = new ModifiableSolrParams(); solrParams.set("id", identifyingDocId); QueryRequest request = new QueryRequest(solrParams); @@ -361,10 +427,17 @@ private void ensureFieldHasValues(String identifyingDocId, String fieldName, Obj assertTrue(rawResponse.get("doc") instanceof SolrDocument); final SolrDocument doc = (SolrDocument) rawResponse.get("doc"); final Collection valuesAfterUpdate = doc.getFieldValues(fieldName); - assertEquals("Expected field to have " + expectedValues.length + " values, but found " + valuesAfterUpdate.size(), - expectedValues.length, valuesAfterUpdate.size()); - for (Object expectedValue: expectedValues) { - assertTrue("Expected value [" + expectedValue + "] was not found in field", valuesAfterUpdate.contains(expectedValue)); + assertEquals( + "Expected field to have " + + expectedValues.length + + " values, but found " + + valuesAfterUpdate.size(), + expectedValues.length, + valuesAfterUpdate.size()); + for (Object expectedValue : expectedValues) { + assertTrue( + "Expected value [" + expectedValue + "] was not found in field", + valuesAfterUpdate.contains(expectedValue)); } } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java index 855732e7f9e..b3eeb964227 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java @@ -21,7 +21,6 @@ import java.util.Arrays; import java.util.List; import java.util.StringJoiner; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -32,47 +31,46 @@ import org.apache.solr.update.AddUpdateCommand; import org.junit.BeforeClass; -/** - * test class for @see AtomicUpdateProcessorFactory - */ +/** test class for @see AtomicUpdateProcessorFactory */ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - System.setProperty("enable.update.log","true"); + System.setProperty("enable.update.log", "true"); initCore("solrconfig.xml", "schema.xml"); } public void testWrongAtomicOpPassed() throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams() - .add("processor", "Atomic") - .add("atomic.cat", "delete") - .add("commit", "true"); + ModifiableSolrParams params = + new ModifiableSolrParams() + .add("processor", "Atomic") + .add("atomic.cat", "delete") + .add("commit", "true"); try (SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params)) { AddUpdateCommand cmd = new AddUpdateCommand(req); AtomicUpdateProcessorFactory factory = new AtomicUpdateProcessorFactory(); factory.inform(h.getCore()); - factory.getInstance(cmd.getReq(), new SolrQueryResponse(), - null).processAdd(cmd); + factory.getInstance(cmd.getReq(), new SolrQueryResponse(), null).processAdd(cmd); } catch (SolrException e) { - assertEquals("Unexpected param(s) for AtomicUpdateProcessor, invalid atomic op passed: 'delete'", + assertEquals( + "Unexpected param(s) for AtomicUpdateProcessor, invalid atomic op passed: 'delete'", e.getMessage()); } } - public void testNoUniqueIdPassed() throws Exception { //TODO - ModifiableSolrParams params = new ModifiableSolrParams() - .add("processor", "atomic") - .add("atomic.cat", "add") - .add("commit", "true"); + public void testNoUniqueIdPassed() throws Exception { // TODO + ModifiableSolrParams params = + new ModifiableSolrParams() + .add("processor", "atomic") + .add("atomic.cat", "add") + .add("commit", "true"); try (SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params)) { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = new SolrInputDocument(); cmd.solrDoc.addField("title", 1); AtomicUpdateProcessorFactory factory = new AtomicUpdateProcessorFactory(); factory.inform(h.getCore()); - factory.getInstance(cmd.getReq(), new SolrQueryResponse(), - null).processAdd(cmd); + factory.getInstance(cmd.getReq(), new SolrQueryResponse(), null).processAdd(cmd); } catch (SolrException e) { assertEquals("Document passed with no unique field: 'id'", e.getMessage()); } @@ -80,14 +78,15 @@ public void testNoUniqueIdPassed() throws Exception { //TODO public void testBasics() throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams() - .add("processor", "atomic") - .add("atomic.cat", "add") - .add("atomic.title", "set") - .add("atomic.count_i", "set") - .add("atomic.name_s", "set") - .add("atomic.multiDefault", "set") - .add("commit", "true"); + ModifiableSolrParams params = + new ModifiableSolrParams() + .add("processor", "atomic") + .add("atomic.cat", "add") + .add("atomic.title", "set") + .add("atomic.count_i", "set") + .add("atomic.name_s", "set") + .add("atomic.multiDefault", "set") + .add("commit", "true"); try (SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params)) { AddUpdateCommand cmd = new AddUpdateCommand(req); @@ -107,44 +106,35 @@ public void testBasics() throws Exception { assertU(commit()); - assertQ("Check the total number of docs", - req("q", "id:1") - , "//result[@numFound=1]"); - - assertQ("Check the total number of docs", - req("q", "cat:human") - , "//result[@numFound=1]"); - - assertQ("Check the total number of docs", - req("q", "title:Mr") - , "//result[@numFound=1]"); - - assertQ("Check the total number of docs", - req("q", "count_i:20") - , "//result[@numFound=1]"); - - assertQ("Check the total number of docs", - req("q", "name_s:Virat") - , "//result[@numFound=1]"); - - assertQ("Check the total number of docs", - req("q", "multiDefault:Delhi") - , "//result[@numFound=1]"); - - params = new ModifiableSolrParams() - .add("processor", "atomic") - .add("atomic.cat", "add-distinct") - .add("atomic.title", "set") - .add("atomic.count_i", "inc") - .add("atomic.name_s", "remove") - .add("atomic.multiDefault", "removeregex") - .add("commit", "true"); + assertQ("Check the total number of docs", req("q", "id:1"), "//result[@numFound=1]"); + + assertQ("Check the total number of docs", req("q", "cat:human"), "//result[@numFound=1]"); + + assertQ("Check the total number of docs", req("q", "title:Mr"), "//result[@numFound=1]"); + + assertQ("Check the total number of docs", req("q", "count_i:20"), "//result[@numFound=1]"); + + assertQ("Check the total number of docs", req("q", "name_s:Virat"), "//result[@numFound=1]"); + + assertQ( + "Check the total number of docs", req("q", "multiDefault:Delhi"), "//result[@numFound=1]"); + + params = + new ModifiableSolrParams() + .add("processor", "atomic") + .add("atomic.cat", "add-distinct") + .add("atomic.title", "set") + .add("atomic.count_i", "inc") + .add("atomic.name_s", "remove") + .add("atomic.multiDefault", "removeregex") + .add("commit", "true"); try (SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params)) { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = new SolrInputDocument(); cmd.solrDoc.addField("id", 1); - cmd.solrDoc.addField("cat", Arrays.asList(new String[]{"human", "human", "animal", "animal"})); + cmd.solrDoc.addField( + "cat", Arrays.asList(new String[] {"human", "human", "animal", "animal"})); cmd.solrDoc.addField("title", "Dr"); cmd.solrDoc.addField("count_i", 20); cmd.solrDoc.addField("name_s", "Virat"); @@ -157,113 +147,98 @@ public void testBasics() throws Exception { assertU(commit()); - assertQ("Check the total number of docs", - req("q", "id:1") - , "//result[@numFound=1]"); - - assertQ("Check the total number of docs", - req("q", "cat:human") - , "//result[@numFound=1]"); + assertQ("Check the total number of docs", req("q", "id:1"), "//result[@numFound=1]"); - assertQ("Check the total number of docs", - req("q", "cat:animal") - , "//result[@numFound=1]"); + assertQ("Check the total number of docs", req("q", "cat:human"), "//result[@numFound=1]"); - assertQ(req("q", "id:1", "indent", "true"), - "//doc/arr[@name='cat'][count(str)=2]"); + assertQ("Check the total number of docs", req("q", "cat:animal"), "//result[@numFound=1]"); - assertQ("Check the total number of docs", - req("q", "title:Mr") - , "//result[@numFound=0]"); + assertQ(req("q", "id:1", "indent", "true"), "//doc/arr[@name='cat'][count(str)=2]"); - assertQ("Check the total number of docs", - req("q", "title:Dr") - , "//result[@numFound=1]"); + assertQ("Check the total number of docs", req("q", "title:Mr"), "//result[@numFound=0]"); - assertQ("Check the total number of docs", - req("q", "count_i:20") - , "//result[@numFound=0]"); + assertQ("Check the total number of docs", req("q", "title:Dr"), "//result[@numFound=1]"); - assertQ("Check the total number of docs", - req("q", "count_i:40") - , "//result[@numFound=1]"); + assertQ("Check the total number of docs", req("q", "count_i:20"), "//result[@numFound=0]"); - assertQ("Check the total number of docs", - req("q", "name_s:Virat") - , "//result[@numFound=0]"); + assertQ("Check the total number of docs", req("q", "count_i:40"), "//result[@numFound=1]"); - assertQ("Check the total number of docs", - req("q", "multiDefault:Delhi") - , "//result[@numFound=0]"); + assertQ("Check the total number of docs", req("q", "name_s:Virat"), "//result[@numFound=0]"); + assertQ( + "Check the total number of docs", req("q", "multiDefault:Delhi"), "//result[@numFound=0]"); } public void testMultipleThreads() throws Exception { clearIndex(); String[] strings = new String[5]; - for (int i=0; i<5; i++) { + for (int i = 0; i < 5; i++) { strings[i] = generateRandomString(); } List threads = new ArrayList<>(100); - int finalCount = 0; //int_i + int finalCount = 0; // int_i AtomicUpdateProcessorFactory factory = new AtomicUpdateProcessorFactory(); factory.inform(h.getCore()); for (int i = 0; i < 10; i++) { int index = random().nextInt(5); - Thread t = new Thread() { - @Override - public void run() { - ModifiableSolrParams params = new ModifiableSolrParams() - .add("processor", "atomic") - .add("atomic.cat", "add") - .add("atomic.int_i", "inc") - .add("commit","true"); - try (SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params)) { - AddUpdateCommand cmd = new AddUpdateCommand(req); - cmd.solrDoc = new SolrInputDocument(); - cmd.solrDoc.addField("id", 10); //hardcoded id=10 - cmd.solrDoc.addField("cat", strings[index]); - cmd.solrDoc.addField("int_i", index); - SolrQueryResponse rsp = new SolrQueryResponse(); - factory.getInstance(cmd.getReq(), new SolrQueryResponse(), - createDistributedUpdateProcessor(cmd.getReq(), rsp, - createRunUpdateProcessor(cmd.getReq(), rsp, null))).processAdd(cmd); - } catch (IOException e) { - } - } - }; + Thread t = + new Thread() { + @Override + public void run() { + ModifiableSolrParams params = + new ModifiableSolrParams() + .add("processor", "atomic") + .add("atomic.cat", "add") + .add("atomic.int_i", "inc") + .add("commit", "true"); + try (SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), params)) { + AddUpdateCommand cmd = new AddUpdateCommand(req); + cmd.solrDoc = new SolrInputDocument(); + cmd.solrDoc.addField("id", 10); // hardcoded id=10 + cmd.solrDoc.addField("cat", strings[index]); + cmd.solrDoc.addField("int_i", index); + SolrQueryResponse rsp = new SolrQueryResponse(); + factory + .getInstance( + cmd.getReq(), + new SolrQueryResponse(), + createDistributedUpdateProcessor( + cmd.getReq(), rsp, createRunUpdateProcessor(cmd.getReq(), rsp, null))) + .processAdd(cmd); + } catch (IOException e) { + } + } + }; threads.add(t); t.start(); - finalCount += index; //int_i + finalCount += index; // int_i } - for (Thread thread: threads) thread.join(); + for (Thread thread : threads) thread.join(); assertU(commit()); - assertQ("Check the total number of docs", - req("q", "id:10"), "//result[@numFound=1]"); - + assertQ("Check the total number of docs", req("q", "id:10"), "//result[@numFound=1]"); StringJoiner queryString = new StringJoiner(" "); - for(String string: strings) { + for (String string : strings) { queryString.add(string); } - assertQ("Check the total number of docs", - req("q", "cat:(" + queryString.toString() + ")") - , "//result[@numFound=1]"); - - assertQ("Check the total number of docs", - req("q", "int_i:" + finalCount) - , "//result[@numFound=1]"); + assertQ( + "Check the total number of docs", + req("q", "cat:(" + queryString.toString() + ")"), + "//result[@numFound=1]"); + assertQ( + "Check the total number of docs", req("q", "int_i:" + finalCount), "//result[@numFound=1]"); } - private UpdateRequestProcessor createRunUpdateProcessor(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { + private UpdateRequestProcessor createRunUpdateProcessor( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { return new RunUpdateProcessorFactory().getInstance(req, rsp, next); } @@ -276,5 +251,4 @@ private String generateRandomString() { } return sb.toString(); } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java index fcedc1f4ecc..79de49763f8 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java @@ -16,12 +16,13 @@ */ package org.apache.solr.update.processor; +import static org.hamcrest.core.StringContains.containsString; + +import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; - -import com.google.common.collect.ImmutableMap; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -32,8 +33,6 @@ import org.junit.Ignore; import org.junit.Test; -import static org.hamcrest.core.StringContains.containsString; - public class AtomicUpdatesTest extends SolrTestCaseJ4 { @BeforeClass @@ -47,59 +46,60 @@ public void before() { h.update("*:*"); assertU(commit()); } - + @Test public void testRemove() throws Exception { SolrInputDocument doc; doc = new SolrInputDocument(); doc.setField("id", "1"); - doc.setField("cat", new String[]{"aaa", "bbb", "ccc", "ccc", "ddd"}); + doc.setField("cat", new String[] {"aaa", "bbb", "ccc", "ccc", "ddd"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "2"); - doc.setField("cat", new String[]{"aaa", "bbb", "bbb", "ccc", "ddd"}); + doc.setField("cat", new String[] {"aaa", "bbb", "bbb", "ccc", "ddd"}); assertU(adoc(doc)); - doc = new SolrInputDocument(); doc.setField("id", "20"); - doc.setField("cat", new String[]{"aaa", "ccc", "ddd"}); + doc.setField("cat", new String[] {"aaa", "ccc", "ddd"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "21"); - doc.setField("cat", new String[]{"aaa", "bbb", "ddd"}); + doc.setField("cat", new String[] {"aaa", "bbb", "ddd"}); assertU(adoc(doc)); - assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "1"); List removeList = new ArrayList(); removeList.add("bbb"); removeList.add("ccc"); - doc.setField("cat", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "cat", ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "cat:ccc", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "21"); removeList = new ArrayList(); removeList.add("bbb"); removeList.add("ccc"); - doc.setField("cat", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "cat", ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -108,7 +108,7 @@ public void testRemove() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1"); - doc.setField("cat", ImmutableMap.of("remove", "aaa")); //behavior when hitting Solr directly + doc.setField("cat", ImmutableMap.of("remove", "aaa")); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -123,24 +123,23 @@ public void testRemoveInteger() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", new String[]{"111", "222", "333", "333", "444"}); - + doc.setField("intRemove", new String[] {"111", "222", "333", "333", "444"}); + assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1002"); - doc.setField("intRemove", new String[]{"111", "222", "222", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "222", "333", "444"}); assertU(adoc(doc)); - doc = new SolrInputDocument(); doc.setField("id", "1020"); - doc.setField("intRemove", new String[]{"111", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "333", "444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1021"); - doc.setField("intRemove", new String[]{"111", "222", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "444"}); assertU(adoc(doc)); assertU(commit()); @@ -149,26 +148,31 @@ public void testRemoveInteger() throws Exception { assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "1001"); List removeList = new ArrayList(); removeList.add(222L); removeList.add(333L); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "intRemove:333", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); removeList = new ArrayList(); removeList.add(222L); removeList.add(333L); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -177,7 +181,8 @@ public void testRemoveInteger() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", ImmutableMap.of("remove", 111)); //behavior when hitting Solr directly + doc.setField( + "intRemove", ImmutableMap.of("remove", 111)); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -185,7 +190,8 @@ public void testRemoveInteger() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']"); - // Test that mv int fields can have values removed prior to being committed to index (see SOLR-14971) + // Test that mv int fields can have values removed prior to being committed to index (see + // SOLR-14971) doc = new SolrInputDocument(); doc.setField("id", "4242"); doc.setField("values_is", new String[] {"111", "222", "333"}); @@ -202,31 +208,29 @@ public void testRemoveInteger() throws Exception { assertQ(req("q", "values_is:333", "indent", "true"), "//result[@numFound = '1']"); } - @Test public void testRemoveIntegerInDocSavedWithInteger() throws Exception { SolrInputDocument doc; doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", new Integer[]{111, 222, 333, 333, 444}); - + doc.setField("intRemove", new Integer[] {111, 222, 333, 333, 444}); + assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1002"); - doc.setField("intRemove", new Integer[]{111, 222, 222, 333, 444}); + doc.setField("intRemove", new Integer[] {111, 222, 222, 333, 444}); assertU(adoc(doc)); - doc = new SolrInputDocument(); doc.setField("id", "1020"); - doc.setField("intRemove", new Integer[]{111, 333, 444}); + doc.setField("intRemove", new Integer[] {111, 333, 444}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1021"); - doc.setField("intRemove", new Integer[]{111, 222, 444}); + doc.setField("intRemove", new Integer[] {111, 222, 444}); assertU(adoc(doc)); assertU(commit()); @@ -235,26 +239,31 @@ public void testRemoveIntegerInDocSavedWithInteger() throws Exception { assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "1001"); List removeList = new ArrayList(); removeList.add(222L); removeList.add(333L); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "intRemove:333", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); removeList = new ArrayList(); removeList.add(222L); removeList.add(333L); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -263,7 +272,8 @@ public void testRemoveIntegerInDocSavedWithInteger() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", ImmutableMap.of("remove", 111)); //behavior when hitting Solr directly + doc.setField( + "intRemove", ImmutableMap.of("remove", 111)); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -271,7 +281,8 @@ public void testRemoveIntegerInDocSavedWithInteger() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']"); - // Test that mv int fields can have values removed prior to being committed to index (see SOLR-14971) + // Test that mv int fields can have values removed prior to being committed to index (see + // SOLR-14971) doc = new SolrInputDocument(); doc.setField("id", "4242"); doc.setField("values_is", new Integer[] {111, 222, 333}); @@ -294,22 +305,22 @@ public void testRemoveIntegerUsingStringType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", new String[]{"111", "222", "333", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "333", "333", "444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1002"); - doc.setField("intRemove", new String[]{"111", "222", "222", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "222", "333", "444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1020"); - doc.setField("intRemove", new String[]{"111", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "333", "444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1021"); - doc.setField("intRemove", new String[]{"111", "222", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "444"}); assertU(adoc(doc)); assertU(commit()); @@ -317,26 +328,31 @@ public void testRemoveIntegerUsingStringType() throws Exception { assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "1001"); List removeList = new ArrayList(); removeList.add("222"); removeList.add("333"); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "intRemove:333", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); removeList = new ArrayList(); removeList.add("222"); removeList.add("333"); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -345,7 +361,8 @@ public void testRemoveIntegerUsingStringType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", ImmutableMap.of("remove", "111")); //behavior when hitting Solr directly + doc.setField( + "intRemove", ImmutableMap.of("remove", "111")); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -360,22 +377,22 @@ public void testRemoveIntegerUsingLongType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", new Long[]{111L, 222L, 333L, 333L, 444L}); + doc.setField("intRemove", new Long[] {111L, 222L, 333L, 333L, 444L}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1002"); - doc.setField("intRemove", new Long[]{111L, 222L, 222L, 333L, 444L}); + doc.setField("intRemove", new Long[] {111L, 222L, 222L, 333L, 444L}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1020"); - doc.setField("intRemove", new Long[]{111L, 333L, 444L}); + doc.setField("intRemove", new Long[] {111L, 333L, 444L}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1021"); - doc.setField("intRemove", new Long[]{111L, 222L, 444L}); + doc.setField("intRemove", new Long[] {111L, 222L, 444L}); assertU(adoc(doc)); assertU(commit()); @@ -389,20 +406,26 @@ public void testRemoveIntegerUsingLongType() throws Exception { List removeList = new ArrayList(); removeList.add(222L); removeList.add(333L); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "intRemove:333", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); removeList = new ArrayList(); removeList.add(222L); removeList.add(333L); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -411,7 +434,8 @@ public void testRemoveIntegerUsingLongType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", ImmutableMap.of("remove", 111L)); //behavior when hitting Solr directly + doc.setField( + "intRemove", ImmutableMap.of("remove", 111L)); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -420,48 +444,47 @@ public void testRemoveIntegerUsingLongType() throws Exception { assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']"); } - @Test public void testRemoveIntegerUsingFloatType() throws Exception { SolrInputDocument doc; doc = new SolrInputDocument(); -// add with float in integer field -// doc.setField("id", "1001"); -// doc.setField("intRemove", new Float[]{111.10F, 222.20F, 333.30F, 333.30F, 444.40F}); -// assertU(adoc(doc)); -// -// doc = new SolrInputDocument(); -// doc.setField("id", "1002"); -// doc.setField("intRemove", new Float[]{111.10F, 222.20F, 222.20F, 333.30F, 444.40F}); -// assertU(adoc(doc)); -// -// doc = new SolrInputDocument(); -// doc.setField("id", "1020"); -// doc.setField("intRemove", new Float[]{111.10F, 333.30F, 444.40F}); -// assertU(adoc(doc)); -// -// doc = new SolrInputDocument(); -// doc.setField("id", "1021"); -// doc.setField("intRemove", new Float[]{111.10F, 222.20F, 444.40F}); + // add with float in integer field + // doc.setField("id", "1001"); + // doc.setField("intRemove", new Float[]{111.10F, 222.20F, 333.30F, 333.30F, 444.40F}); + // assertU(adoc(doc)); + // + // doc = new SolrInputDocument(); + // doc.setField("id", "1002"); + // doc.setField("intRemove", new Float[]{111.10F, 222.20F, 222.20F, 333.30F, 444.40F}); + // assertU(adoc(doc)); + // + // doc = new SolrInputDocument(); + // doc.setField("id", "1020"); + // doc.setField("intRemove", new Float[]{111.10F, 333.30F, 444.40F}); + // assertU(adoc(doc)); + // + // doc = new SolrInputDocument(); + // doc.setField("id", "1021"); + // doc.setField("intRemove", new Float[]{111.10F, 222.20F, 444.40F}); doc.setField("id", "1001"); - doc.setField("intRemove", new String[]{"111", "222", "333", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "333", "333", "444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1002"); - doc.setField("intRemove", new String[]{"111", "222", "222", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "222", "333", "444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1020"); - doc.setField("intRemove", new String[]{"111", "333", "444"}); + doc.setField("intRemove", new String[] {"111", "333", "444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1021"); - doc.setField("intRemove", new String[]{"111", "222", "444"}); + doc.setField("intRemove", new String[] {"111", "222", "444"}); assertU(adoc(doc)); assertU(commit()); @@ -470,26 +493,31 @@ public void testRemoveIntegerUsingFloatType() throws Exception { assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "1001"); List removeList = new ArrayList(); removeList.add(222.20F); removeList.add(333.30F); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "intRemove:333", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); removeList = new ArrayList(); removeList.add(222.20F); removeList.add(333.30F); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -498,7 +526,8 @@ public void testRemoveIntegerUsingFloatType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", ImmutableMap.of("remove", 111L)); //behavior when hitting Solr directly + doc.setField( + "intRemove", ImmutableMap.of("remove", 111L)); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -506,7 +535,6 @@ public void testRemoveIntegerUsingFloatType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:111", "indent", "true"), "//result[@numFound = '3']"); } - @Test public void testRemoveIntegerUsingDoubleType() throws Exception { @@ -514,22 +542,24 @@ public void testRemoveIntegerUsingDoubleType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", new String[]{"11111111", "22222222", "33333333", "33333333", "44444444"}); + doc.setField( + "intRemove", new String[] {"11111111", "22222222", "33333333", "33333333", "44444444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1002"); - doc.setField("intRemove", new String[]{"11111111", "22222222", "22222222", "33333333", "44444444"}); + doc.setField( + "intRemove", new String[] {"11111111", "22222222", "22222222", "33333333", "44444444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1020"); - doc.setField("intRemove", new String[]{"11111111", "33333333", "44444444"}); + doc.setField("intRemove", new String[] {"11111111", "33333333", "44444444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "1021"); - doc.setField("intRemove", new String[]{"11111111", "22222222", "44444444"}); + doc.setField("intRemove", new String[] {"11111111", "22222222", "44444444"}); assertU(adoc(doc)); assertU(commit()); @@ -538,26 +568,31 @@ public void testRemoveIntegerUsingDoubleType() throws Exception { assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "intRemove:33333333", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "1001"); List removeList = new ArrayList(); removeList.add(22222222D); removeList.add(33333333D); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "intRemove:33333333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "intRemove:33333333", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "1021"); removeList = new ArrayList(); removeList.add(22222222D); removeList.add(33333333D); - doc.setField("intRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "intRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -566,7 +601,8 @@ public void testRemoveIntegerUsingDoubleType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1001"); - doc.setField("intRemove", ImmutableMap.of("remove", 11111111D)); //behavior when hitting Solr directly + doc.setField( + "intRemove", ImmutableMap.of("remove", 11111111D)); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -574,41 +610,66 @@ public void testRemoveIntegerUsingDoubleType() throws Exception { assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "intRemove:11111111", "indent", "true"), "//result[@numFound = '3']"); } - + @Test public void testRemoveDateUsingStringType() throws Exception { SolrInputDocument doc; doc = new SolrInputDocument(); doc.setField("id", "10001"); - doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-04T12:00:00Z"}); + doc.setField( + "dateRemove", + new String[] { + "2014-09-01T12:00:00Z", + "2014-09-02T12:00:00Z", + "2014-09-03T12:00:00Z", + "2014-09-03T12:00:00Z", + "2014-09-04T12:00:00Z" + }); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10002"); - doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-04T12:00:00Z"}); + doc.setField( + "dateRemove", + new String[] { + "2014-09-01T12:00:00Z", + "2014-09-02T12:00:00Z", + "2014-09-02T12:00:00Z", + "2014-09-03T12:00:00Z", + "2014-09-04T12:00:00Z" + }); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10020"); - doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-04T12:00:00Z"}); + doc.setField( + "dateRemove", + new String[] {"2014-09-01T12:00:00Z", "2014-09-03T12:00:00Z", "2014-09-04T12:00:00Z"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10021"); - doc.setField("dateRemove", new String[]{"2014-09-01T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-04T12:00:00Z"}); + doc.setField( + "dateRemove", + new String[] {"2014-09-01T12:00:00Z", "2014-09-02T12:00:00Z", "2014-09-04T12:00:00Z"}); assertU(adoc(doc)); assertU(commit()); - boolean isPointField = h.getCore().getLatestSchema().getField("dateRemove").getType().isPointField(); + boolean isPointField = + h.getCore().getLatestSchema().getField("dateRemove").getType().isPointField(); if (isPointField) { assertQ(req("q", "dateRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); } else { assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); } - assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); - assertQ(req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); + assertQ( + req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '3']"); + assertQ( + req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '3']"); doc = new SolrInputDocument(); doc.setField("id", "10001"); @@ -616,7 +677,9 @@ public void testRemoveDateUsingStringType() throws Exception { removeList.add("2014-09-02T12:00:00Z"); removeList.add("2014-09-03T12:00:00Z"); - doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "dateRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -625,14 +688,18 @@ public void testRemoveDateUsingStringType() throws Exception { } else { assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); } - assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '2']"); + assertQ( + req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '2']"); doc = new SolrInputDocument(); doc.setField("id", "10021"); removeList = new ArrayList(); removeList.add("2014-09-02T12:00:00Z"); removeList.add("2014-09-03T12:00:00Z"); - doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "dateRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -641,11 +708,15 @@ public void testRemoveDateUsingStringType() throws Exception { } else { assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); } - assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '1']"); + assertQ( + req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '1']"); doc = new SolrInputDocument(); doc.setField("id", "10001"); - doc.setField("dateRemove", ImmutableMap.of("remove", "2014-09-01T12:00:00Z")); //behavior when hitting Solr directly + doc.setField( + "dateRemove", + ImmutableMap.of("remove", "2014-09-01T12:00:00Z")); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -655,9 +726,11 @@ public void testRemoveDateUsingStringType() throws Exception { } else { assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); } - assertQ(req("q", "dateRemove:\"2014-09-01T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); + assertQ( + req("q", "dateRemove:\"2014-09-01T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '3']"); } - + @Ignore("Remove Date is not supported in other formats than UTC") @Test public void testRemoveDateUsingDateType() throws Exception { @@ -666,37 +739,49 @@ public void testRemoveDateUsingDateType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "10001"); Date tempDate = DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"); - doc.setField("dateRemove", new Date[]{DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), - DateMathParser.parseMath(null, "2014-07-02T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") + doc.setField( + "dateRemove", + new Date[] { + DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), + DateMathParser.parseMath(null, "2014-07-02T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") }); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10002"); - doc.setField("dateRemove", new Date[]{DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), - DateMathParser.parseMath(null, "2014-07-02T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-02T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") + doc.setField( + "dateRemove", + new Date[] { + DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), + DateMathParser.parseMath(null, "2014-07-02T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-02T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") }); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10020"); - doc.setField("dateRemove", new Date[]{DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") + doc.setField( + "dateRemove", + new Date[] { + DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-03T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") }); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10021"); - doc.setField("dateRemove", new Date[]{DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-02T12:00:00Z"), - DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") + doc.setField( + "dateRemove", + new Date[] { + DateMathParser.parseMath(null, "2014-02-01T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-02T12:00:00Z"), + DateMathParser.parseMath(null, "2014-02-04T12:00:00Z") }); assertU(adoc(doc)); @@ -704,11 +789,15 @@ public void testRemoveDateUsingDateType() throws Exception { assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); String dateString = DateMathParser.parseMath(null, "2014-02-02T12:00:00Z").toString(); -// assertQ(req("q", "dateRemove:"+URLEncoder.encode(dateString, "UTF-8"), "indent", "true"), "//result[@numFound = '3']"); -// assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); -// assertQ(req("q", "dateRemove:"+dateString, "indent", "true"), "//result[@numFound = '3']"); //Sun Feb 02 10:00:00 FNT 2014 - assertQ(req("q", "dateRemove:\"Sun Feb 02 10:00:00 FNT 2014\"", "indent", "true"), "//result[@numFound = '3']"); //Sun Feb 02 10:00:00 FNT 2014 - + // assertQ(req("q", "dateRemove:"+URLEncoder.encode(dateString, "UTF-8"), "indent", "true"), + // "//result[@numFound = '3']"); + // assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), + // "//result[@numFound = '3']"); + // assertQ(req("q", "dateRemove:"+dateString, "indent", "true"), "//result[@numFound = + // '3']"); //Sun Feb 02 10:00:00 FNT 2014 + assertQ( + req("q", "dateRemove:\"Sun Feb 02 10:00:00 FNT 2014\"", "indent", "true"), + "//result[@numFound = '3']"); // Sun Feb 02 10:00:00 FNT 2014 doc = new SolrInputDocument(); doc.setField("id", "10001"); @@ -716,61 +805,77 @@ public void testRemoveDateUsingDateType() throws Exception { removeList.add(DateMathParser.parseMath(null, "2014-09-02T12:00:00Z")); removeList.add(DateMathParser.parseMath(null, "2014-09-03T12:00:00Z")); - doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "dateRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); - assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '2']"); + assertQ( + req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "10021"); removeList = new ArrayList(); removeList.add(DateMathParser.parseMath(null, "2014-09-02T12:00:00Z")); removeList.add(DateMathParser.parseMath(null, "2014-09-03T12:00:00Z")); - doc.setField("dateRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "dateRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); - assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '1']"); + assertQ( + req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '1']"); doc = new SolrInputDocument(); doc.setField("id", "10001"); - doc.setField("dateRemove", ImmutableMap.of("remove", DateMathParser.parseMath(null, "2014-09-01T12:00:00Z"))); //behavior when hitting Solr directly + doc.setField( + "dateRemove", + ImmutableMap.of( + "remove", + DateMathParser.parseMath( + null, "2014-09-01T12:00:00Z"))); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']"); - assertQ(req("q", "dateRemove:\"2014-09-01T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); + assertQ( + req("q", "dateRemove:\"2014-09-01T12:00:00Z\"", "indent", "true"), + "//result[@numFound = '3']"); } - + @Test public void testRemoveFloatUsingFloatType() throws Exception { SolrInputDocument doc; doc = new SolrInputDocument(); doc.setField("id", "10001"); - doc.setField("floatRemove", new Float[]{111.111F, 222.222F, 333.333F, 333.333F, 444.444F}); + doc.setField("floatRemove", new Float[] {111.111F, 222.222F, 333.333F, 333.333F, 444.444F}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10002"); - doc.setField("floatRemove", new Float[]{111.111F, 222.222F, 222.222F, 333.333F, 444.444F}); + doc.setField("floatRemove", new Float[] {111.111F, 222.222F, 222.222F, 333.333F, 444.444F}); assertU(adoc(doc)); - doc = new SolrInputDocument(); doc.setField("id", "10020"); - doc.setField("floatRemove", new Float[]{111.111F, 333.333F, 444.444F}); + doc.setField("floatRemove", new Float[] {111.111F, 333.333F, 444.444F}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10021"); - doc.setField("floatRemove", new Float[]{111.111F, 222.222F, 444.444F}); + doc.setField("floatRemove", new Float[] {111.111F, 222.222F, 444.444F}); assertU(adoc(doc)); assertU(commit()); @@ -778,14 +883,15 @@ public void testRemoveFloatUsingFloatType() throws Exception { assertQ(req("q", "floatRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "10001"); List removeList = new ArrayList(); removeList.add(222.222F); removeList.add(333.333F); - doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "floatRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -797,7 +903,9 @@ public void testRemoveFloatUsingFloatType() throws Exception { removeList = new ArrayList(); removeList.add(222.222F); removeList.add(333.333F); - doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "floatRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -806,7 +914,8 @@ public void testRemoveFloatUsingFloatType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "10001"); - doc.setField("floatRemove", ImmutableMap.of("remove", "111.111")); //behavior when hitting Solr directly + doc.setField( + "floatRemove", ImmutableMap.of("remove", "111.111")); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -814,31 +923,32 @@ public void testRemoveFloatUsingFloatType() throws Exception { assertQ(req("q", "floatRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "floatRemove:\"111.111\"", "indent", "true"), "//result[@numFound = '3']"); } - + @Test public void testRemoveFloatUsingStringType() throws Exception { SolrInputDocument doc; doc = new SolrInputDocument(); doc.setField("id", "10001"); - doc.setField("floatRemove", new String[]{"111.111", "222.222", "333.333", "333.333", "444.444"}); + doc.setField( + "floatRemove", new String[] {"111.111", "222.222", "333.333", "333.333", "444.444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10002"); - doc.setField("floatRemove", new String[]{"111.111", "222.222", "222.222", "333.333", "444.444"}); + doc.setField( + "floatRemove", new String[] {"111.111", "222.222", "222.222", "333.333", "444.444"}); assertU(adoc(doc)); - doc = new SolrInputDocument(); doc.setField("id", "10020"); - doc.setField("floatRemove", new String[]{"111.111", "333.333", "444.444"}); + doc.setField("floatRemove", new String[] {"111.111", "333.333", "444.444"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "10021"); - doc.setField("floatRemove", new String[]{"111.111", "222.222", "444.444"}); + doc.setField("floatRemove", new String[] {"111.111", "222.222", "444.444"}); assertU(adoc(doc)); assertU(commit()); @@ -847,27 +957,32 @@ public void testRemoveFloatUsingStringType() throws Exception { assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "floatRemove:\"333.333\"", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "10001"); List removeList = new ArrayList(); removeList.add("222.222"); removeList.add("333.333"); - doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "floatRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "floatRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "floatRemove:\"333.333\"", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence + assertQ( + req("q", "floatRemove:\"333.333\"", "indent", "true"), + "//result[@numFound = '3']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "10021"); removeList = new ArrayList(); removeList.add("222.222"); removeList.add("333.333"); - doc.setField("floatRemove", ImmutableMap.of("remove", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "floatRemove", + ImmutableMap.of("remove", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -876,7 +991,8 @@ public void testRemoveFloatUsingStringType() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "10001"); - doc.setField("floatRemove", ImmutableMap.of("remove", "111.111")); //behavior when hitting Solr directly + doc.setField( + "floatRemove", ImmutableMap.of("remove", "111.111")); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -891,52 +1007,53 @@ public void testRemoveregex() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1"); - doc.setField("cat", new String[]{"aaa", "bbb", "ccc", "ccc", "ddd"}); + doc.setField("cat", new String[] {"aaa", "bbb", "ccc", "ccc", "ddd"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "2"); - doc.setField("cat", new String[]{"aaa", "bbb", "bbb", "ccc", "ddd"}); + doc.setField("cat", new String[] {"aaa", "bbb", "bbb", "ccc", "ddd"}); assertU(adoc(doc)); - doc = new SolrInputDocument(); doc.setField("id", "20"); - doc.setField("cat", new String[]{"aaa", "ccc", "ddd"}); + doc.setField("cat", new String[] {"aaa", "ccc", "ddd"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "21"); - doc.setField("cat", new String[]{"aaa", "bbb", "ddd"}); + doc.setField("cat", new String[] {"aaa", "bbb", "ddd"}); assertU(adoc(doc)); - assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '3']"); assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']"); - doc = new SolrInputDocument(); doc.setField("id", "1"); List removeList = new ArrayList<>(); removeList.add(".b."); removeList.add("c+c"); - doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "cat", ImmutableMap.of("removeregex", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']"); - assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '2']"); // removeregex does remove all occurrences + assertQ( + req("q", "cat:ccc", "indent", "true"), + "//result[@numFound = '2']"); // removeregex does remove all occurrences doc = new SolrInputDocument(); doc.setField("id", "21"); removeList = new ArrayList<>(); removeList.add("bb*"); removeList.add("cc+"); - doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "cat", ImmutableMap.of("removeregex", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); @@ -945,7 +1062,8 @@ public void testRemoveregex() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1"); - doc.setField("cat", ImmutableMap.of("removeregex", "a.a")); //behavior when hitting Solr directly + doc.setField( + "cat", ImmutableMap.of("removeregex", "a.a")); // behavior when hitting Solr directly assertU(adoc(doc)); assertU(commit()); @@ -960,47 +1078,52 @@ public void testRemoveregexMustMatchWholeValue() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "1"); - doc.setField("cat", new String[]{"aaa", "bbb", "ccc", "ccc", "ddd"}); + doc.setField("cat", new String[] {"aaa", "bbb", "ccc", "ccc", "ddd"}); assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); - doc = new SolrInputDocument(); doc.setField("id", "1"); List removeList = new ArrayList<>(); removeList.add("bb"); - doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "cat", ImmutableMap.of("removeregex", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // Was not removed - regex didn't match whole value + assertQ( + req("q", "cat:bbb", "indent", "true"), + "//result[@numFound = '1']"); // Was not removed - regex didn't match whole value doc = new SolrInputDocument(); doc.setField("id", "1"); removeList = new ArrayList<>(); removeList.add("bbb"); - doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK + doc.setField( + "cat", ImmutableMap.of("removeregex", removeList)); // behavior when hitting Solr through ZK assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); // Was removed now - regex matches + assertQ( + req("q", "cat:bbb", "indent", "true"), + "//result[@numFound = '0']"); // Was removed now - regex matches } @Test public void testAdd() throws Exception { SolrInputDocument doc = new SolrInputDocument(); doc.setField("id", "3"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "4"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); assertU(adoc(doc)); assertU(commit()); @@ -1012,7 +1135,6 @@ public void testAdd() throws Exception { assertQ(req("q", "cat:*", "indent", "true", "fl", "id"), "//result[@numFound = '2']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); - doc = new SolrInputDocument(); doc.setField("id", "3"); doc.setField("cat", ImmutableMap.of("add", "bbb")); @@ -1027,13 +1149,13 @@ public void testAdd() throws Exception { public void testAddDistinct() throws Exception { SolrInputDocument doc = new SolrInputDocument(); doc.setField("id", "3"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); doc.setField("atomic_is", 10); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "4"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); assertU(adoc(doc)); assertU(commit()); @@ -1041,7 +1163,6 @@ public void testAddDistinct() throws Exception { assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '2']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); - doc = new SolrInputDocument(); doc.setField("id", "3"); doc.setField("cat", ImmutableMap.of("add-distinct", "bbb")); @@ -1051,10 +1172,10 @@ public void testAddDistinct() throws Exception { assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '2']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), + assertQ( + req("q", "cat:bbb", "indent", "true"), "//doc/arr[@name='cat'][count(str)=3]", - "//doc/arr[@name='atomic_is'][count(int)=1]" - ); + "//doc/arr[@name='atomic_is'][count(int)=1]"); doc = new SolrInputDocument(); doc.setField("id", "3"); @@ -1065,10 +1186,10 @@ public void testAddDistinct() throws Exception { assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '2']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), - "//doc/arr[@name='cat'][count(str)=3]", //'bbb' already present will not be added again - "//doc/arr[@name='atomic_is'][count(int)=2]" - ); + assertQ( + req("q", "cat:bbb", "indent", "true"), + "//doc/arr[@name='cat'][count(str)=3]", // 'bbb' already present will not be added again + "//doc/arr[@name='atomic_is'][count(int)=2]"); doc = new SolrInputDocument(); doc.setField("id", "5"); @@ -1077,21 +1198,22 @@ public void testAddDistinct() throws Exception { assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '3']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']"); //'cat' field not present, do 'add' atomic operation + assertQ( + req("q", "cat:bbb", "indent", "true"), + "//result[@numFound = '2']"); // 'cat' field not present, do 'add' atomic operation } @Test public void testAddMultiple() throws Exception { SolrInputDocument doc = new SolrInputDocument(); doc.setField("id", "3"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); assertU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); - doc = new SolrInputDocument(); doc.setField("id", "3"); doc.setField("cat", ImmutableMap.of("add", "bbb")); @@ -1108,7 +1230,9 @@ public void testAddMultiple() throws Exception { assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // Should now have 2 occurrences of bbb + assertQ( + req("q", "cat:bbb", "indent", "true"), + "//result[@numFound = '1']"); // Should now have 2 occurrences of bbb doc = new SolrInputDocument(); doc.setField("id", "3"); @@ -1117,7 +1241,9 @@ public void testAddMultiple() throws Exception { assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // remove only removed first occurrence + assertQ( + req("q", "cat:bbb", "indent", "true"), + "//result[@numFound = '1']"); // remove only removed first occurrence doc = new SolrInputDocument(); doc.setField("id", "3"); @@ -1126,7 +1252,9 @@ public void testAddMultiple() throws Exception { assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); - assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); // remove now removed last occurrence + assertQ( + req("q", "cat:bbb", "indent", "true"), + "//result[@numFound = '0']"); // remove now removed last occurrence } @Test @@ -1135,12 +1263,12 @@ public void testSet() throws Exception { doc = new SolrInputDocument(); doc.setField("id", "5"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); assertU(adoc(doc)); doc = new SolrInputDocument(); doc.setField("id", "6"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); assertU(adoc(doc)); assertU(commit()); @@ -1150,7 +1278,6 @@ public void testSet() throws Exception { assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '2']"); - doc = new SolrInputDocument(); doc.setField("id", "5"); doc.setField("cat", ImmutableMap.of("set", "bbb")); @@ -1185,53 +1312,56 @@ public void testAtomicUpdatesOnDateFields() throws Exception { assertQ(req("q", "id:6"), "boolean(//result/doc/date[@name='simple_tdt1'])"); - for (String dateFieldName : dateFieldNames) { // none (this can fail with Invalid Date String exception) doc = new SolrInputDocument(); doc.setField("id", "6"); - doc.setField("other_i", ImmutableMap.of("set", "43")); // set the independent field to another value + doc.setField( + "other_i", ImmutableMap.of("set", "43")); // set the independent field to another value assertU(adoc(doc)); - if (dateFieldName.endsWith("s")) { + if (dateFieldName.endsWith("s")) { // add doc = new SolrInputDocument(); doc.setField("id", "6"); - doc.setField("other_i", ImmutableMap.of("set", "43")); // set the independent field to another value + doc.setField( + "other_i", ImmutableMap.of("set", "43")); // set the independent field to another value doc.setField(dateFieldName, ImmutableMap.of("add", "1987-01-01T00:00:00Z")); assertU(adoc(doc)); // remove doc = new SolrInputDocument(); doc.setField("id", "6"); - doc.setField("other_i", ImmutableMap.of("set", "43")); // set the independent field to another value + doc.setField( + "other_i", ImmutableMap.of("set", "43")); // set the independent field to another value doc.setField(dateFieldName, ImmutableMap.of("remove", "1987-01-01T00:00:00Z")); assertU(adoc(doc)); } else { // set doc = new SolrInputDocument(); doc.setField("id", "6"); - doc.setField("other_i", ImmutableMap.of("set", "43")); // set the independent field to another value + doc.setField( + "other_i", ImmutableMap.of("set", "43")); // set the independent field to another value doc.setField(dateFieldName, ImmutableMap.of("set", "1987-01-01T00:00:00Z")); assertU(adoc(doc)); // unset doc = new SolrInputDocument(); doc.setField("id", "6"); - doc.setField("other_i", ImmutableMap.of("set", "43")); // set the independent field to another value + doc.setField( + "other_i", ImmutableMap.of("set", "43")); // set the independent field to another value doc.setField(dateFieldName, map("set", null)); assertU(adoc(doc)); } assertU(commit()); - if (dateFieldName.endsWith("s")) { + if (dateFieldName.endsWith("s")) { assertQ(req("q", "id:6"), "//result/doc[count(arr[@name='" + dateFieldName + "'])=1]"); assertQ(req("q", "id:6"), "//result/doc/arr[@name='" + dateFieldName + "'][count(date)=1]"); } else { assertQ(req("q", "id:6"), "//result/doc[count(date[@name='" + dateFieldName + "'])=0]"); } } - } @Test @@ -1241,25 +1371,42 @@ public void testAtomicUpdatesOnNonStoredDocValues() throws Exception { assertU(adoc(sdoc("id", 4, "single_s_dvo", "abc", "single_i_dvo", 1))); assertU(commit()); - assertU(adoc(sdoc("id", 2, "title", ImmutableMap.of("set", "newtitle2"), - "single_i_dvo", ImmutableMap.of("inc", 1)))); - assertU(adoc(sdoc("id", 3, "title", ImmutableMap.of("set", "newtitle3"), - "single_d_dvo", ImmutableMap.of("inc", 1)))); + assertU( + adoc( + sdoc( + "id", + 2, + "title", + ImmutableMap.of("set", "newtitle2"), + "single_i_dvo", + ImmutableMap.of("inc", 1)))); + assertU( + adoc( + sdoc( + "id", + 3, + "title", + ImmutableMap.of("set", "newtitle3"), + "single_d_dvo", + ImmutableMap.of("inc", 1)))); assertU(adoc(sdoc("id", 4, "single_i_dvo", ImmutableMap.of("inc", 1)))); assertU(commit()); - assertJQ(req("q", "id:2"), + assertJQ( + req("q", "id:2"), "/response/docs/[0]/id=='2'", "/response/docs/[0]/title/[0]=='newtitle2'", "/response/docs/[0]/single_i_dvo==101"); - assertJQ(req("q", "id:3"), + assertJQ( + req("q", "id:3"), 1e-4, "/response/docs/[0]/id=='3'", "/response/docs/[0]/title/[0]=='newtitle3'", "/response/docs/[0]/single_d_dvo==4.14"); - assertJQ(req("q", "id:4"), + assertJQ( + req("q", "id:4"), 1e-4, "/response/docs/[0]/id=='4'", "/response/docs/[0]/single_s_dvo=='abc'", @@ -1268,7 +1415,8 @@ public void testAtomicUpdatesOnNonStoredDocValues() throws Exception { // test that non stored docvalues was carried forward for a non-docvalue update assertU(adoc(sdoc("id", 3, "title", ImmutableMap.of("set", "newertitle3")))); assertU(commit()); - assertJQ(req("q", "id:3"), + assertJQ( + req("q", "id:3"), 1e-4, "/response/docs/[0]/id=='3'", "/response/docs/[0]/title/[0]=='newertitle3'", @@ -1277,20 +1425,31 @@ public void testAtomicUpdatesOnNonStoredDocValues() throws Exception { @Test public void testAtomicUpdatesOnNonStoredDocValuesMulti() throws Exception { - assertU(adoc(sdoc("id", 1, "title", "title1", "multi_ii_dvo", 100, "multi_ii_dvo", Integer.MAX_VALUE))); + assertU( + adoc( + sdoc( + "id", + 1, + "title", + "title1", + "multi_ii_dvo", + 100, + "multi_ii_dvo", + Integer.MAX_VALUE))); assertU(commit()); assertU(adoc(sdoc("id", 1, "title", ImmutableMap.of("set", "newtitle1")))); assertU(commit()); // test that non stored multivalued docvalues was carried forward for a non docvalues update - assertJQ(req("q", "id:1"), + assertJQ( + req("q", "id:1"), "/response/docs/[0]/id=='1'", "/response/docs/[0]/title/[0]=='newtitle1'", "/response/docs/[0]/multi_ii_dvo/[0]==100", "/response/docs/[0]/multi_ii_dvo/[1]==" + Integer.MAX_VALUE); } - + @Test public void testAtomicUpdatesOnNonStoredDocValuesCopyField() throws Exception { assertU(adoc(sdoc("id", 101, "title", "title2", "single_i_dvn", 100))); @@ -1298,37 +1457,63 @@ public void testAtomicUpdatesOnNonStoredDocValuesCopyField() throws Exception { assertU(adoc(sdoc("id", 103, "single_s_dvn", "abc", "single_i_dvn", 1))); assertU(commit()); - // Do each one twice... the first time it will be retrieved from the index, and the second time from the transaction log. - for (int i=0; i<2; i++) { - assertU(adoc(sdoc("id", 101, "title", ImmutableMap.of("set", "newtitle2"), - "single_i_dvn", ImmutableMap.of("inc", 1)))); - assertU(adoc(sdoc("id", 102, "title", ImmutableMap.of("set", "newtitle3"), - "single_d_dvn", ImmutableMap.of("inc", 1)))); + // Do each one twice... the first time it will be retrieved from the index, and the second time + // from the transaction log. + for (int i = 0; i < 2; i++) { + assertU( + adoc( + sdoc( + "id", + 101, + "title", + ImmutableMap.of("set", "newtitle2"), + "single_i_dvn", + ImmutableMap.of("inc", 1)))); + assertU( + adoc( + sdoc( + "id", + 102, + "title", + ImmutableMap.of("set", "newtitle3"), + "single_d_dvn", + ImmutableMap.of("inc", 1)))); assertU(adoc(sdoc("id", 103, "single_i_dvn", ImmutableMap.of("inc", 1)))); } assertU(commit()); - assertJQ(req("q", "id:101"), + assertJQ( + req("q", "id:101"), "/response/docs/[0]/id=='101'", "/response/docs/[0]/title/[0]=='newtitle2'", "/response/docs/[0]/single_i_dvn==102"); - assertJQ(req("q", "id:102"), + assertJQ( + req("q", "id:102"), 1e-4, "/response/docs/[0]/id=='102'", "/response/docs/[0]/title/[0]=='newtitle3'", "/response/docs/[0]/single_d_dvn==5.14"); - assertJQ(req("q", "id:103"), + assertJQ( + req("q", "id:103"), "/response/docs/[0]/id=='103'", "/response/docs/[0]/single_s_dvn=='abc'", "/response/docs/[0]/single_i_dvn==3"); // test that non stored docvalues was carried forward for a non-docvalue update - assertU(adoc(sdoc("id", 103, "single_s_dvn", ImmutableMap.of("set", "abcupdate"), - "single_i_dvn", ImmutableMap.of("set", 5)))); - assertU(commit()); - assertJQ(req("q", "id:103"), + assertU( + adoc( + sdoc( + "id", + 103, + "single_s_dvn", + ImmutableMap.of("set", "abcupdate"), + "single_i_dvn", + ImmutableMap.of("set", 5)))); + assertU(commit()); + assertJQ( + req("q", "id:103"), "/response/docs/[0]/id=='103'", "/response/docs/[0]/single_s_dvn=='abcupdate'", "/response/docs/[0]/single_i_dvn==5"); @@ -1340,7 +1525,7 @@ public void testInvalidOperation() { doc = new SolrInputDocument(); doc.setField("id", "7"); - doc.setField("cat", new String[]{"aaa", "ccc"}); + doc.setField("cat", new String[] {"aaa", "ccc"}); assertU(adoc(doc)); assertU(commit()); @@ -1350,7 +1535,7 @@ public void testInvalidOperation() { doc = new SolrInputDocument(); doc.setField("id", "7"); doc.setField("cat", ImmutableMap.of("whatever", "bbb")); - assertFailedU( adoc(doc)); + assertFailedU(adoc(doc)); assertU(commit()); assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']"); @@ -1405,54 +1590,54 @@ public void testInvalidOperation() { SolrException e = expectThrows(SolrException.class, () -> assertU(adoc(invalidDoc))); assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code()); - MatcherAssert.assertThat(e.getMessage(), containsString("'inc' is not supported on non-numeric field cat")); + MatcherAssert.assertThat( + e.getMessage(), containsString("'inc' is not supported on non-numeric field cat")); } public void testFieldsWithDefaultValuesWhenAtomicUpdatesAgainstTlog() { for (String fieldToUpdate : Arrays.asList("field_to_update_i1", "field_to_update_i_dvo")) { clearIndex(); - + assertU(adoc(sdoc("id", "7", fieldToUpdate, "666"))); - assertQ(fieldToUpdate + ": initial RTG" - , req("qt", "/get", "id", "7") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='7']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='666']" - , "//doc/int[@name='intDefault'][.='42']" - , "//doc/int[@name='intDvoDefault'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); + assertQ( + fieldToUpdate + ": initial RTG", + req("qt", "/get", "id", "7"), + "count(//doc)=1", + "//doc/str[@name='id'][.='7']", + "//doc/int[@name='" + fieldToUpdate + "'][.='666']", + "//doc/int[@name='intDefault'][.='42']", + "//doc/int[@name='intDvoDefault'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); // do atomic update assertU(adoc(sdoc("id", "7", fieldToUpdate, ImmutableMap.of("inc", -555)))); - assertQ(fieldToUpdate + ": RTG after atomic update" - , req("qt", "/get", "id", "7") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='7']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='111']" - , "//doc/int[@name='intDefault'][.='42']" - , "//doc/int[@name='intDvoDefault'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); + assertQ( + fieldToUpdate + ": RTG after atomic update", + req("qt", "/get", "id", "7"), + "count(//doc)=1", + "//doc/str[@name='id'][.='7']", + "//doc/int[@name='" + fieldToUpdate + "'][.='111']", + "//doc/int[@name='intDefault'][.='42']", + "//doc/int[@name='intDvoDefault'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); assertU(commit()); - assertQ(fieldToUpdate + ": post commit RTG" - , req("qt", "/get", "id", "7") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='7']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='111']" - , "//doc/int[@name='intDefault'][.='42']" - , "//doc/int[@name='intDvoDefault'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); + assertQ( + fieldToUpdate + ": post commit RTG", + req("qt", "/get", "id", "7"), + "count(//doc)=1", + "//doc/str[@name='id'][.='7']", + "//doc/int[@name='" + fieldToUpdate + "'][.='111']", + "//doc/int[@name='intDefault'][.='42']", + "//doc/int[@name='intDvoDefault'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); } - } public void testAtomicUpdateOfFieldsWithDefaultValue() { @@ -1462,85 +1647,88 @@ public void testAtomicUpdateOfFieldsWithDefaultValue() { // doc where we immediately attempt to inc the default value assertU(adoc(sdoc("id", "7", fieldToUpdate, ImmutableMap.of("inc", "666")))); - assertQ(fieldToUpdate + ": initial RTG#7" - , req("qt", "/get", "id", "7") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='7']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='708']" - // whichever field we did *NOT* update - , "//doc/int[@name!='"+fieldToUpdate+"'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); + assertQ( + fieldToUpdate + ": initial RTG#7", + req("qt", "/get", "id", "7"), + "count(//doc)=1", + "//doc/str[@name='id'][.='7']", + "//doc/int[@name='" + fieldToUpdate + "'][.='708']" + // whichever field we did *NOT* update + , + "//doc/int[@name!='" + fieldToUpdate + "'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); // do atomic update assertU(adoc(sdoc("id", "7", fieldToUpdate, ImmutableMap.of("inc", -555)))); - assertQ(fieldToUpdate + ": RTG#7 after atomic update" - , req("qt", "/get", "id", "7") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='7']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='153']" - // whichever field we did *NOT* update - , "//doc/int[@name!='"+fieldToUpdate+"'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); + assertQ( + fieldToUpdate + ": RTG#7 after atomic update", + req("qt", "/get", "id", "7"), + "count(//doc)=1", + "//doc/str[@name='id'][.='7']", + "//doc/int[@name='" + fieldToUpdate + "'][.='153']" + // whichever field we did *NOT* update + , + "//doc/int[@name!='" + fieldToUpdate + "'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); // diff doc where we check that we can overwrite the default value assertU(adoc(sdoc("id", "8", fieldToUpdate, ImmutableMap.of("set", "666")))); - assertQ(fieldToUpdate + ": initial RTG#8" - , req("qt", "/get", "id", "8") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='8']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='666']" - // whichever field we did *NOT* update - , "//doc/int[@name!='"+fieldToUpdate+"'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); + assertQ( + fieldToUpdate + ": initial RTG#8", + req("qt", "/get", "id", "8"), + "count(//doc)=1", + "//doc/str[@name='id'][.='8']", + "//doc/int[@name='" + fieldToUpdate + "'][.='666']" + // whichever field we did *NOT* update + , + "//doc/int[@name!='" + fieldToUpdate + "'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); // do atomic update assertU(adoc(sdoc("id", "8", fieldToUpdate, ImmutableMap.of("inc", -555)))); - assertQ(fieldToUpdate + ": RTG after atomic update" - , req("qt", "/get", "id", "8") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='8']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='111']" - // whichever field we did *NOT* update - , "//doc/int[@name!='"+fieldToUpdate+"'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); - + assertQ( + fieldToUpdate + ": RTG after atomic update", + req("qt", "/get", "id", "8"), + "count(//doc)=1", + "//doc/str[@name='id'][.='8']", + "//doc/int[@name='" + fieldToUpdate + "'][.='111']" + // whichever field we did *NOT* update + , + "//doc/int[@name!='" + fieldToUpdate + "'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); + assertU(commit()); - - assertQ(fieldToUpdate + ": doc7 post commit RTG" - , req("qt", "/get", "id", "7") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='7']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='153']" - // whichever field we did *NOT* update - , "//doc/int[@name!='"+fieldToUpdate+"'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); - assertQ(fieldToUpdate + ": doc8 post commit RTG" - , req("qt", "/get", "id", "8") - , "count(//doc)=1" - , "//doc/str[@name='id'][.='8']" - , "//doc/int[@name='"+fieldToUpdate+"'][.='111']" - // whichever field we did *NOT* update - , "//doc/int[@name!='"+fieldToUpdate+"'][.='42']" - , "//doc/long[@name='_version_']" - , "//doc/date[@name='timestamp']" - , "//doc/arr[@name='multiDefault']/str[.='muLti-Default']" - ); + + assertQ( + fieldToUpdate + ": doc7 post commit RTG", + req("qt", "/get", "id", "7"), + "count(//doc)=1", + "//doc/str[@name='id'][.='7']", + "//doc/int[@name='" + fieldToUpdate + "'][.='153']" + // whichever field we did *NOT* update + , + "//doc/int[@name!='" + fieldToUpdate + "'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); + assertQ( + fieldToUpdate + ": doc8 post commit RTG", + req("qt", "/get", "id", "8"), + "count(//doc)=1", + "//doc/str[@name='id'][.='8']", + "//doc/int[@name='" + fieldToUpdate + "'][.='111']" + // whichever field we did *NOT* update + , + "//doc/int[@name!='" + fieldToUpdate + "'][.='42']", + "//doc/long[@name='_version_']", + "//doc/date[@name='timestamp']", + "//doc/arr[@name='multiDefault']/str[.='muLti-Default']"); } - } - - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java index 9e3d6b8af6f..5a20c6cdfbf 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java @@ -23,7 +23,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; - import org.apache.lucene.util.IOUtils; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CloudSolrClient; @@ -51,11 +50,7 @@ public class CategoryRoutedAliasUpdateProcessorTest extends RoutedAliasUpdatePro // use this for example categories private static final String[] SHIPS = { - "Constructor", - "Heart of Gold", - "Stunt Ship", - "B-ark", - "Bi$tromath" + "Constructor", "Heart of Gold", "Stunt Ship", "B-ark", "Bi$tromath" }; private static final String categoryField = "ship_name_en"; @@ -65,15 +60,15 @@ public class CategoryRoutedAliasUpdateProcessorTest extends RoutedAliasUpdatePro private static CloudSolrClient solrClient; private int numDocsDeletedOrFailed = 0; // uncomment to create pause for attaching profiler. -// static { -// JOptionPane.showMessageDialog(null,"Ready?"); -// } + // static { + // JOptionPane.showMessageDialog(null,"Ready?"); + // } @Before public void doBefore() throws Exception { configureCluster(1).configure(); solrClient = getCloudSolrClient(cluster); - //log this to help debug potential causes of problems + // log this to help debug potential causes of problems if (log.isInfoEnabled()) { log.info("SolrClient: {}", solrClient); log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn @@ -105,41 +100,50 @@ public void testNonEnglish() throws Exception { // This should be changed in an enhancement (wherein the category is RFC-4648 url-safe encoded). // For now document it as an expected limitation. - String somethingInChinese = "中文的东西"; // 5 chars - String somethingInHebrew = "משהו בסינית"; // 11 chars - String somethingInThai = "บางอย่างในภาษาจีน"; // 17 chars + String somethingInChinese = "中文的东西"; // 5 chars + String somethingInHebrew = "משהו בסינית"; // 11 chars + String somethingInThai = "บางอย่างในภาษาจีน"; // 17 chars String somethingInArabic = "شيء في الصينية"; // 14 chars - String somethingInGreek = "κάτι κινεζικό"; // 13 chars - String somethingInGujarati = "િનીમાં કંઈક"; // 11 chars (same as hebrew) + String somethingInGreek = "κάτι κινεζικό"; // 13 chars + String somethingInGujarati = "િનીમાં કંઈક"; // 11 chars (same as hebrew) - String ONE_ = "_"; - String TWO_ = "__"; + String ONE_ = "_"; + String TWO_ = "__"; String THREE_ = "___"; - String FOUR_ = "____"; - String FIVE_ = "_____"; - - String collectionChinese = getAlias() + "__CRA__" + FIVE_; - String collectionHebrew = getAlias() + "__CRA__" + FIVE_ + FIVE_ + ONE_; - String collectionThai = getAlias() + "__CRA__" + FIVE_ + FIVE_ + FIVE_ + TWO_; - String collectionArabic = getAlias() + "__CRA__" + FIVE_ + FIVE_ + FOUR_; - String collectionGreek = getAlias() + "__CRA__" + FIVE_ + FIVE_ + THREE_; + String FOUR_ = "____"; + String FIVE_ = "_____"; + + String collectionChinese = getAlias() + "__CRA__" + FIVE_; + String collectionHebrew = getAlias() + "__CRA__" + FIVE_ + FIVE_ + ONE_; + String collectionThai = getAlias() + "__CRA__" + FIVE_ + FIVE_ + FIVE_ + TWO_; + String collectionArabic = getAlias() + "__CRA__" + FIVE_ + FIVE_ + FOUR_; + String collectionGreek = getAlias() + "__CRA__" + FIVE_ + FIVE_ + THREE_; // Note Gujarati not listed, because it duplicates hebrew. String configName = getSaferTestName(); createConfigSet(configName); - List retrievedConfigSetNames = new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); + List retrievedConfigSetNames = + new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); List expectedConfigSetNames = Arrays.asList("_default", configName); - // config sets leak between tests so we can't be any more specific than this on the next 2 asserts - assertTrue("We expect at least 2 configSets", + // config sets leak between tests so we can't be any more specific than this on the next 2 + // asserts + assertTrue( + "We expect at least 2 configSets", retrievedConfigSetNames.size() >= expectedConfigSetNames.size()); - assertTrue("ConfigNames should include :" + expectedConfigSetNames, retrievedConfigSetNames.containsAll(expectedConfigSetNames)); - - CollectionAdminRequest.createCategoryRoutedAlias(getAlias(), categoryField, 20, - CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) + assertTrue( + "ConfigNames should include :" + expectedConfigSetNames, + retrievedConfigSetNames.containsAll(expectedConfigSetNames)); + + CollectionAdminRequest.createCategoryRoutedAlias( + getAlias(), + categoryField, + 20, + CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) .process(solrClient); - addDocsAndCommit(true, + addDocsAndCommit( + true, newDoc(somethingInChinese), newDoc(somethingInHebrew), newDoc(somethingInThai), @@ -148,22 +152,26 @@ public void testNonEnglish() throws Exception { newDoc(somethingInGujarati)); // Note Gujarati not listed, because it duplicates hebrew. - assertInvariants(collectionChinese, collectionHebrew, collectionThai, collectionArabic, collectionGreek); + assertInvariants( + collectionChinese, collectionHebrew, collectionThai, collectionArabic, collectionGreek); assertColHasDocCount(collectionChinese, 1); assertColHasDocCount(collectionHebrew, 2); assertColHasDocCount(collectionThai, 1); assertColHasDocCount(collectionArabic, 1); assertColHasDocCount(collectionGreek, 1); - } - private void assertColHasDocCount(String collectionChinese, int expected) throws SolrServerException, IOException { - final QueryResponse colResponse = solrClient.query(collectionChinese, params( - "q", "*:*", - "rows", "0")); + private void assertColHasDocCount(String collectionChinese, int expected) + throws SolrServerException, IOException { + final QueryResponse colResponse = + solrClient.query( + collectionChinese, + params( + "q", "*:*", + "rows", "0")); long aliasNumFound = colResponse.getResults().getNumFound(); - assertEquals(expected,aliasNumFound); + assertEquals(expected, aliasNumFound); } @Slow @@ -172,7 +180,8 @@ public void test() throws Exception { String configName = getSaferTestName(); createConfigSet(configName); - // Start with one collection manually created (and use higher numShards & replicas than we'll use for others) + // Start with one collection manually created (and use higher numShards & replicas than we'll + // use for others) // This tests we may pre-create the collection and it's acceptable. final String colVogon = getAlias() + "__CRA__" + SHIPS[0]; @@ -182,16 +191,24 @@ public void test() throws Exception { final String colArk = getAlias() + "__CRA__" + noDashes(SHIPS[3]); final String colBistro = getAlias() + "__CRA__" + noDollar(SHIPS[4]); - List retrievedConfigSetNames = new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); + List retrievedConfigSetNames = + new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); List expectedConfigSetNames = Arrays.asList("_default", configName); - // config sets leak between tests so we can't be any more specific than this on the next 2 asserts - assertTrue("We expect at least 2 configSets", + // config sets leak between tests so we can't be any more specific than this on the next 2 + // asserts + assertTrue( + "We expect at least 2 configSets", retrievedConfigSetNames.size() >= expectedConfigSetNames.size()); - assertTrue("ConfigNames should include :" + expectedConfigSetNames, retrievedConfigSetNames.containsAll(expectedConfigSetNames)); - - CollectionAdminRequest.createCategoryRoutedAlias(getAlias(), categoryField, 20, - CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) + assertTrue( + "ConfigNames should include :" + expectedConfigSetNames, + retrievedConfigSetNames.containsAll(expectedConfigSetNames)); + + CollectionAdminRequest.createCategoryRoutedAlias( + getAlias(), + categoryField, + 20, + CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) .process(solrClient); // now we index a document @@ -199,17 +216,13 @@ public void test() throws Exception { String uninitialized = getAlias() + "__CRA__" + CategoryRoutedAlias.UNINITIALIZED; - // important to test that we don't try to delete the temp collection on the first document. If we did so - // we would be at risk of out of order execution of the deletion/creation which would leave a window - // of time where there were no collections in the alias. That would likely break all manner of other - // parts of solr. + // important to test that we don't try to delete the temp collection on the first document. If + // we did so we would be at risk of out of order execution of the deletion/creation which would + // leave a window of time where there were no collections in the alias. That would likely break + // all manner of other parts of solr. assertInvariants(colVogon, uninitialized); - addDocsAndCommit(true, - newDoc(SHIPS[1]), - newDoc(SHIPS[2]), - newDoc(SHIPS[3]), - newDoc(SHIPS[4])); + addDocsAndCommit(true, newDoc(SHIPS[1]), newDoc(SHIPS[2]), newDoc(SHIPS[3]), newDoc(SHIPS[4])); // NOW the temp collection should be gone! assertInvariants(colVogon, colHoG, colStunt, colArk, colBistro); @@ -218,15 +231,16 @@ public void test() throws Exception { testFailedDocument(newDoc(null), "Route value is null"); testFailedDocument(newDoc("foo__CRA__bar"), "7 character sequence __CRA__"); testFailedDocument(newDoc("fóóCRAóóbar"), "7 character sequence __CRA__"); - } private String noSpaces(String ship) { return ship.replaceAll("\\s", "_"); } + private String noDashes(String ship) { return ship.replaceAll("-", "_"); } + private String noDollar(String ship) { return ship.replaceAll("\\$", "_"); } @@ -240,34 +254,43 @@ public void testMustMatch() throws Exception { final int maxCardinality = Integer.MAX_VALUE; // max cardinality for current test - // Start with one collection manually created (and use higher numShards & replicas than we'll use for others) + // Start with one collection manually created (and use higher numShards & replicas than we'll + // use for others) // This tests we may pre-create the collection and it's acceptable. - final String colVogon = getAlias() + "__CRA__" + noSpaces("HHS "+ SHIPS[0]) + "_solr"; + final String colVogon = getAlias() + "__CRA__" + noSpaces("HHS " + SHIPS[0]) + "_solr"; // we expect changes ensuring a legal collection name. - final String colHoG = getAlias() + "__CRA__" + noSpaces("HHS "+ SHIPS[1]) + "_solr"; + final String colHoG = getAlias() + "__CRA__" + noSpaces("HHS " + SHIPS[1]) + "_solr"; - List retrievedConfigSetNames = new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); + List retrievedConfigSetNames = + new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); List expectedConfigSetNames = Arrays.asList("_default", configName); - // config sets leak between tests so we can't be any more specific than this on the next 2 asserts - assertTrue("We expect at least 2 configSets", + // config sets leak between tests so we can't be any more specific than this on the next 2 + // asserts + assertTrue( + "We expect at least 2 configSets", retrievedConfigSetNames.size() >= expectedConfigSetNames.size()); - assertTrue("ConfigNames should include :" + expectedConfigSetNames, retrievedConfigSetNames.containsAll(expectedConfigSetNames)); - - CollectionAdminRequest.createCategoryRoutedAlias(getAlias(), categoryField, maxCardinality, - CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) + assertTrue( + "ConfigNames should include :" + expectedConfigSetNames, + retrievedConfigSetNames.containsAll(expectedConfigSetNames)); + + CollectionAdminRequest.createCategoryRoutedAlias( + getAlias(), + categoryField, + maxCardinality, + CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) .setMustMatch(mustMatchRegex) .process(solrClient); // now we index a document addDocsAndCommit(true, newDoc("HHS " + SHIPS[0] + "_solr")); - //assertDocRoutedToCol(lastDocId, col23rd); + // assertDocRoutedToCol(lastDocId, col23rd); String uninitialized = getAlias() + "__CRA__" + CategoryRoutedAlias.UNINITIALIZED; assertInvariants(colVogon, uninitialized); - addDocsAndCommit(true, newDoc("HHS "+ SHIPS[1] + "_solr")); + addDocsAndCommit(true, newDoc("HHS " + SHIPS[1] + "_solr")); assertInvariants(colVogon, colHoG); @@ -286,21 +309,33 @@ public void testInvalidMustMatch() throws Exception { final int maxCardinality = Integer.MAX_VALUE; // max cardinality for current test - List retrievedConfigSetNames = new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); + List retrievedConfigSetNames = + new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); List expectedConfigSetNames = Arrays.asList("_default", configName); - // config sets leak between tests so we can't be any more specific than this on the next 2 asserts - assertTrue("We expect at least 2 configSets", + // config sets leak between tests so we can't be any more specific than this on the next 2 + // asserts + assertTrue( + "We expect at least 2 configSets", retrievedConfigSetNames.size() >= expectedConfigSetNames.size()); - assertTrue("ConfigNames should include :" + expectedConfigSetNames, retrievedConfigSetNames.containsAll(expectedConfigSetNames)); - - SolrException e = expectThrows(SolrException.class, () -> CollectionAdminRequest.createCategoryRoutedAlias(getAlias(), categoryField, maxCardinality, - CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) - .setMustMatch(mustMatchRegex) - .process(solrClient) - ); - - assertTrue("Create Alias should fail since router.mustMatch must be a valid regular expression", + assertTrue( + "ConfigNames should include :" + expectedConfigSetNames, + retrievedConfigSetNames.containsAll(expectedConfigSetNames)); + + SolrException e = + expectThrows( + SolrException.class, + () -> + CollectionAdminRequest.createCategoryRoutedAlias( + getAlias(), + categoryField, + maxCardinality, + CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) + .setMustMatch(mustMatchRegex) + .process(solrClient)); + + assertTrue( + "Create Alias should fail since router.mustMatch must be a valid regular expression", e.getMessage().contains("router.mustMatch must be a valid regular expression")); } @@ -312,28 +347,37 @@ public void testMaxCardinality() throws Exception { final int maxCardinality = 2; // max cardinality for current test - // Start with one collection manually created (and use higher numShards & replicas than we'll use for others) + // Start with one collection manually created (and use higher numShards & replicas than we'll + // use for others) // This tests we may pre-create the collection and it's acceptable. final String colVogon = getAlias() + "__CRA__" + SHIPS[0]; // we expect changes ensuring a legal collection name. final String colHoG = getAlias() + "__CRA__" + SHIPS[1].replaceAll("\\s", "_"); - List retrievedConfigSetNames = new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); + List retrievedConfigSetNames = + new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); List expectedConfigSetNames = Arrays.asList("_default", configName); - // config sets leak between tests so we can't be any more specific than this on the next 2 asserts - assertTrue("We expect at least 2 configSets", + // config sets leak between tests so we can't be any more specific than this on the next 2 + // asserts + assertTrue( + "We expect at least 2 configSets", retrievedConfigSetNames.size() >= expectedConfigSetNames.size()); - assertTrue("ConfigNames should include :" + expectedConfigSetNames, retrievedConfigSetNames.containsAll(expectedConfigSetNames)); - - CollectionAdminRequest.createCategoryRoutedAlias(getAlias(), categoryField, maxCardinality, - CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) + assertTrue( + "ConfigNames should include :" + expectedConfigSetNames, + retrievedConfigSetNames.containsAll(expectedConfigSetNames)); + + CollectionAdminRequest.createCategoryRoutedAlias( + getAlias(), + categoryField, + maxCardinality, + CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) .process(solrClient); // now we index a document addDocsAndCommit(true, newDoc(SHIPS[0])); - //assertDocRoutedToCol(lastDocId, col23rd); + // assertDocRoutedToCol(lastDocId, col23rd); String uninitialized = getAlias() + "__CRA__" + CategoryRoutedAlias.UNINITIALIZED; assertInvariants(colVogon, uninitialized); @@ -347,10 +391,9 @@ public void testMaxCardinality() throws Exception { assertInvariants(colVogon, colHoG); } - /** - * Test that the Update Processor Factory routes documents to leader shards and thus - * avoids the possibility of introducing an extra hop to find the leader. + * Test that the Update Processor Factory routes documents to leader shards and thus avoids the + * possibility of introducing an extra hop to find the leader. * * @throws Exception when it blows up unexpectedly :) */ @@ -365,14 +408,20 @@ public void testSliceRouting() throws Exception { // 4 of which are leaders, and 8 of which should fail this test. final int numShards = 1 + random().nextInt(4); final int numReplicas = 1 + random().nextInt(3); - CollectionAdminRequest.createCategoryRoutedAlias(getAlias(), categoryField, 20, - CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) + CollectionAdminRequest.createCategoryRoutedAlias( + getAlias(), + categoryField, + 20, + CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) .process(solrClient); // cause some collections to be created - assertUpdateResponse(solrClient.add(getAlias(), new SolrInputDocument("id","1",categoryField, SHIPS[0]))); - assertUpdateResponse(solrClient.add(getAlias(), new SolrInputDocument("id","2",categoryField, SHIPS[1]))); - assertUpdateResponse(solrClient.add(getAlias(), new SolrInputDocument("id","3",categoryField, SHIPS[2]))); + assertUpdateResponse( + solrClient.add(getAlias(), new SolrInputDocument("id", "1", categoryField, SHIPS[0]))); + assertUpdateResponse( + solrClient.add(getAlias(), new SolrInputDocument("id", "2", categoryField, SHIPS[1]))); + assertUpdateResponse( + solrClient.add(getAlias(), new SolrInputDocument("id", "3", categoryField, SHIPS[2]))); assertUpdateResponse(solrClient.commit(getAlias())); // wait for all the collections to exist... @@ -381,10 +430,11 @@ public void testSliceRouting() throws Exception { waitColAndAlias(getAlias(), "__CRA__", noSpaces(SHIPS[1]), numShards); waitColAndAlias(getAlias(), "__CRA__", noSpaces(SHIPS[2]), numShards); - // at this point we now have 3 collections with 4 shards each, and 3 replicas per shard for a total of - // 36 total replicas, 1/3 of which are leaders. We will add 3 docs and each has a 33% chance of hitting a - // leader randomly and not causing a failure if the code is broken, but as a whole this test will therefore only have - // about a 3.6% false positive rate (0.33^3). If that's not good enough, add more docs or more replicas per shard :). + // at this point we now have 3 collections with 4 shards each, and 3 replicas per shard for a + // total of 36 total replicas, 1/3 of which are leaders. We will add 3 docs and each has a 33% + // chance of hitting a leader randomly and not causing a failure if the code is broken, but as a + // whole this test will therefore only have about a 3.6% false positive rate (0.33^3). If that's + // not good enough, add more docs or more replicas per shard :). final String trackGroupName = getTrackUpdatesGroupName(); final List updateCommands; @@ -392,20 +442,19 @@ public void testSliceRouting() throws Exception { TrackingUpdateProcessorFactory.startRecording(trackGroupName); ModifiableSolrParams params = params("post-processor", "tracking-" + trackGroupName); - List list = Arrays.asList( - sdoc("id", "4", categoryField, SHIPS[0]), - sdoc("id", "5", categoryField, SHIPS[1]), - sdoc("id", "6", categoryField, SHIPS[2])); + List list = + Arrays.asList( + sdoc("id", "4", categoryField, SHIPS[0]), + sdoc("id", "5", categoryField, SHIPS[1]), + sdoc("id", "6", categoryField, SHIPS[2])); Collections.shuffle(list, random()); // order should not matter here - assertUpdateResponse(add(getAlias(), list, - params)); + assertUpdateResponse(add(getAlias(), list, params)); } finally { updateCommands = TrackingUpdateProcessorFactory.stopRecording(trackGroupName); } assertRouting(numShards, updateCommands); } - /* * We expect the following invariants: * 1.) to see all the supplied collections @@ -413,45 +462,72 @@ public void testSliceRouting() throws Exception { * 3.) find as many docs as have been added but not deleted/failed */ private void assertInvariants(String... expectedColls) throws IOException, SolrServerException { - final int expectNumFound = lastDocId - numDocsDeletedOrFailed; //lastDocId is effectively # generated docs - - List observedCols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(getAlias()); + final int expectNumFound = + lastDocId - numDocsDeletedOrFailed; // lastDocId is effectively # generated docs + + List observedCols = + new CollectionAdminRequest.ListAliases() + .process(solrClient) + .getAliasesAsLists() + .get(getAlias()); observedCols = new ArrayList<>(observedCols); observedCols.sort(String::compareTo); // don't really care about the order here. assert !observedCols.isEmpty(); int numFoundViaCollections = 0; for (String col : observedCols) { - final QueryResponse colResponse = solrClient.query(col, params( - "q", "*:*", - "rows", "0")); + final QueryResponse colResponse = + solrClient.query( + col, + params( + "q", "*:*", + "rows", "0")); long numFound = colResponse.getResults().getNumFound(); if (numFound > 0) { numFoundViaCollections += numFound; } } - final QueryResponse colResponse = solrClient.query(getAlias(), params( - "q", "*:*", - "rows", "0")); + final QueryResponse colResponse = + solrClient.query( + getAlias(), + params( + "q", "*:*", + "rows", "0")); long numFoundViaAlias = colResponse.getResults().getNumFound(); List expectedList = Arrays.asList(expectedColls); expectedList.sort(String::compareTo); - assertArrayEquals("Expected " + expectedColls.length + " collections, found " + observedCols.size() + ":\n" + - observedCols + " vs \n" + expectedList, expectedColls, observedCols.toArray()); - assertEquals("Expected collections and alias to have same number of documents", - numFoundViaAlias, numFoundViaCollections); - assertEquals("Expected to find " + expectNumFound + " docs but found " + numFoundViaAlias, - expectNumFound, numFoundViaAlias); + assertArrayEquals( + "Expected " + + expectedColls.length + + " collections, found " + + observedCols.size() + + ":\n" + + observedCols + + " vs \n" + + expectedList, + expectedColls, + observedCols.toArray()); + assertEquals( + "Expected collections and alias to have same number of documents", + numFoundViaAlias, + numFoundViaCollections); + assertEquals( + "Expected to find " + expectNumFound + " docs but found " + numFoundViaAlias, + expectNumFound, + numFoundViaAlias); } private SolrInputDocument newDoc(String routedValue) { if (routedValue != null) { - return sdoc("id", Integer.toString(++lastDocId), - categoryField, routedValue, - intField, "0"); // always 0 + return sdoc( + "id", + Integer.toString(++lastDocId), + categoryField, + routedValue, + intField, + "0"); // always 0 } else { - return sdoc("id", Integer.toString(++lastDocId), - intField, "0"); // always 0 + return sdoc("id", Integer.toString(++lastDocId), intField, "0"); // always 0 } } @@ -465,27 +541,29 @@ public CloudSolrClient getSolrClient() { return solrClient; } - public static class IncrementURPFactory extends FieldMutatingUpdateProcessorFactory { @Override - public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { - return FieldValueMutatingUpdateProcessor.valueMutator(getSelector(), next, - (src) -> Integer.valueOf(src.toString()) + 1); + public UpdateRequestProcessor getInstance( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { + return FieldValueMutatingUpdateProcessor.valueMutator( + getSelector(), next, (src) -> Integer.valueOf(src.toString()) + 1); } } - private void testFailedDocument(SolrInputDocument sdoc, String errorMsg) throws SolrServerException, IOException { + private void testFailedDocument(SolrInputDocument sdoc, String errorMsg) + throws SolrServerException, IOException { try { final UpdateResponse resp = solrClient.add(getAlias(), sdoc); // if we have a TolerantUpdateProcessor then we see it there) final Object errors = resp.getResponseHeader().get("errors"); // Tolerant URP assertNotNull(errors); - assertTrue("Expected to find " + errorMsg + " in errors: " + errors.toString(),errors.toString().contains(errorMsg)); + assertTrue( + "Expected to find " + errorMsg + " in errors: " + errors.toString(), + errors.toString().contains(errorMsg)); } catch (SolrException e) { assertTrue(e.getMessage().contains(errorMsg)); } ++numDocsDeletedOrFailed; } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java index de429cfa2e0..a68c41adad4 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorFactoryTest.java @@ -16,6 +16,9 @@ */ package org.apache.solr.update.processor; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; + import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.NamedList; @@ -24,14 +27,10 @@ import org.junit.Before; import org.junit.Test; -import static org.hamcrest.core.Is.is; -import static org.mockito.Mockito.mock; - -/** - * Tests for {@link ClassificationUpdateProcessorFactory} - */ +/** Tests for {@link ClassificationUpdateProcessorFactory} */ public class ClassificationUpdateProcessorFactoryTest extends SolrTestCaseJ4 { - private ClassificationUpdateProcessorFactory cFactoryToTest = new ClassificationUpdateProcessorFactory(); + private ClassificationUpdateProcessorFactory cFactoryToTest = + new ClassificationUpdateProcessorFactory(); private NamedList args = new NamedList<>(); @Before @@ -48,14 +47,16 @@ public void initArgs() { @Test public void init_fullArgs_shouldInitFullClassificationParams() { cFactoryToTest.init(args); - ClassificationUpdateProcessorParams classificationParams = cFactoryToTest.getClassificationParams(); + ClassificationUpdateProcessorParams classificationParams = + cFactoryToTest.getClassificationParams(); String[] inputFieldNames = classificationParams.getInputFieldNames(); assertEquals("inputField1", inputFieldNames[0]); assertEquals("inputField2", inputFieldNames[1]); assertEquals("classField1", classificationParams.getTrainingClassField()); assertEquals("classFieldX", classificationParams.getPredictedClassField()); - assertEquals(ClassificationUpdateProcessorFactory.Algorithm.BAYES, classificationParams.getAlgorithm()); + assertEquals( + ClassificationUpdateProcessorFactory.Algorithm.BAYES, classificationParams.getAlgorithm()); assertEquals(8, classificationParams.getMinDf()); assertEquals(10, classificationParams.getMinTf()); assertEquals(9, classificationParams.getK()); @@ -81,7 +82,8 @@ public void init_emptyPredictedClassField_shouldDefaultToTrainingClassField() { cFactoryToTest.init(args); - ClassificationUpdateProcessorParams classificationParams = cFactoryToTest.getClassificationParams(); + ClassificationUpdateProcessorParams classificationParams = + cFactoryToTest.getClassificationParams(); assertThat(classificationParams.getPredictedClassField(), is("classField1")); } @@ -90,23 +92,29 @@ public void init_unsupportedAlgorithm_shouldThrowExceptionWithDetailedMessage() args.removeAll("algorithm"); args.add("algorithm", "unsupported"); SolrException e = assertThrows(SolrException.class, () -> cFactoryToTest.init(args)); - assertEquals("Classification UpdateProcessor Algorithm: 'unsupported' not supported", e.getMessage()); + assertEquals( + "Classification UpdateProcessor Algorithm: 'unsupported' not supported", e.getMessage()); } @Test public void init_unsupportedFilterQuery_shouldThrowExceptionWithDetailedMessage() { assumeWorkingMockito(); - + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); SolrQueryRequest mockRequest = mock(SolrQueryRequest.class); SolrQueryResponse mockResponse = mock(SolrQueryResponse.class); args.add("knn.filterQuery", "not supported query"); cFactoryToTest.init(args); - SolrException e = assertThrows(SolrException.class, () -> { - /* parsing failure happens because of the mocks, fine enough to check a proper exception propagation */ - cFactoryToTest.getInstance(mockRequest, mockResponse, mockProcessor); - }); - assertEquals("Classification UpdateProcessor Training Filter Query: 'not supported query' is not supported", e.getMessage()); + SolrException e = + assertThrows( + SolrException.class, + () -> { + /* parsing failure happens because of the mocks, fine enough to check a proper exception propagation */ + cFactoryToTest.getInstance(mockRequest, mockResponse, mockProcessor); + }); + assertEquals( + "Classification UpdateProcessor Training Filter Query: 'not supported query' is not supported", + e.getMessage()); } @Test @@ -116,9 +124,11 @@ public void init_emptyArgs_shouldDefaultClassificationParams() { args.removeAll("knn.minDf"); args.removeAll("knn.minTf"); cFactoryToTest.init(args); - ClassificationUpdateProcessorParams classificationParams = cFactoryToTest.getClassificationParams(); + ClassificationUpdateProcessorParams classificationParams = + cFactoryToTest.getClassificationParams(); - assertEquals(ClassificationUpdateProcessorFactory.Algorithm.KNN, classificationParams.getAlgorithm()); + assertEquals( + ClassificationUpdateProcessorFactory.Algorithm.KNN, classificationParams.getAlgorithm()); assertEquals(1, classificationParams.getMinDf()); assertEquals(1, classificationParams.getMinTf()); assertEquals(10, classificationParams.getK()); diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java index 1d166812fac..4fc42b34ad9 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorIntegrationTest.java @@ -16,8 +16,9 @@ */ package org.apache.solr.update.processor; -import java.io.IOException; +import static org.hamcrest.core.Is.is; +import java.io.IOException; import org.apache.lucene.document.Document; import org.apache.lucene.index.Term; import org.apache.lucene.search.ScoreDoc; @@ -31,8 +32,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.Is.is; - /** * Tests for {@link ClassificationUpdateProcessor} and {@link ClassificationUpdateProcessorFactory} */ @@ -47,7 +46,8 @@ public class ClassificationUpdateProcessorIntegrationTest extends SolrTestCaseJ4 private static final String CHAIN = "classification"; private static final String BROKEN_CHAIN_FILTER_QUERY = "classification-unsupported-filterQuery"; - private ClassificationUpdateProcessorFactory cFactoryToTest = new ClassificationUpdateProcessorFactory(); + private ClassificationUpdateProcessorFactory cFactoryToTest = + new ClassificationUpdateProcessorFactory(); @BeforeClass public static void beforeClass() throws Exception { @@ -67,104 +67,218 @@ public void setUp() throws Exception { public void classify_fullConfiguration_shouldAutoClassify() throws Exception { indexTrainingSet(); // To be classified,we index documents without a class and verify the expected one is returned - addDoc(adoc(ID, "22", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5 ", - AUTHOR, "Name1 Surname1"), CHAIN); - addDoc(adoc(ID, "21", - TITLE, "word1 word1", - CONTENT, "word2 word2", - AUTHOR, "Name Surname"), CHAIN); + addDoc( + adoc( + ID, + "22", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5 ", + AUTHOR, + "Name1 Surname1"), + CHAIN); + addDoc( + adoc(ID, "21", TITLE, "word1 word1", CONTENT, "word2 word2", AUTHOR, "Name Surname"), + CHAIN); addDoc(commit()); Document doc22 = getDoc("22"); - assertThat(doc22.get(CLASS),is("class2")); + assertThat(doc22.get(CLASS), is("class2")); Document doc21 = getDoc("21"); - assertThat(doc21.get(CLASS),is("class1")); + assertThat(doc21.get(CLASS), is("class1")); } @Test - public void classify_unsupportedFilterQueryConfiguration_shouldThrowExceptionWithDetailedMessage() throws Exception { + public void classify_unsupportedFilterQueryConfiguration_shouldThrowExceptionWithDetailedMessage() + throws Exception { indexTrainingSet(); - SolrException e = assertThrows(SolrException.class, () -> addDoc( - adoc(ID, "21", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5 ", - AUTHOR, "Name1 Surname1"), BROKEN_CHAIN_FILTER_QUERY)); - assertEquals("Classification UpdateProcessor Training Filter Query: 'not valid ( lucene query' is not supported", e.getMessage()); + SolrException e = + assertThrows( + SolrException.class, + () -> + addDoc( + adoc( + ID, + "21", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5 ", + AUTHOR, + "Name1 Surname1"), + BROKEN_CHAIN_FILTER_QUERY)); + assertEquals( + "Classification UpdateProcessor Training Filter Query: 'not valid ( lucene query' is not supported", + e.getMessage()); addDoc(commit()); } /** - * Index some example documents with a class manually assigned. - * This will be our trained model. + * Index some example documents with a class manually assigned. This will be our trained model. * * @throws Exception If there is a low-level I/O error */ private void indexTrainingSet() throws Exception { - //class1 - addDoc(adoc(ID, "1", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 word2", - AUTHOR, "Name Surname", - CLASS, "class1"), CHAIN); - addDoc(adoc(ID, "2", - TITLE, "word1 word1", - CONTENT, "word2 word2", - AUTHOR, "Name Surname", - CLASS, "class1"), CHAIN); - addDoc(adoc(ID, "3", - TITLE, "word1 word1 word1", - CONTENT, "word2", - AUTHOR, "Name Surname", - CLASS, "class1"), CHAIN); - addDoc(adoc(ID, "4", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 word2", - AUTHOR, "Name Surname", - CLASS, "class1"), CHAIN); - //class2 - addDoc(adoc(ID, "5", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5", - AUTHOR, "Name Surname", - CLASS, "class2"), CHAIN); - addDoc(adoc(ID, "6", - TITLE, "word4 word4", - CONTENT, "word5", - AUTHOR, "Name Surname", - CLASS, "class2"), CHAIN); - addDoc(adoc(ID, "7", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5 word5", - AUTHOR, "Name Surname", - CLASS, "class2"), CHAIN); - addDoc(adoc(ID, "8", - TITLE, "word4", - CONTENT, "word5 word5 word5 word5", - AUTHOR, "Name Surname", - CLASS, "class2"), CHAIN); - //class3 - addDoc(adoc(ID, "9", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5", - AUTHOR, "Name1 Surname1", - CLASS, "class3"), CHAIN); - addDoc(adoc(ID, "10", - TITLE, "word4 word4", - CONTENT, "word5", - AUTHOR, "Name1 Surname1", - CLASS, "class3"), CHAIN); - addDoc(adoc(ID, "11", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5 word5", - AUTHOR, "Name1 Surname1", - CLASS, "class3"), CHAIN); - addDoc(adoc(ID, "12", - TITLE, "word4", - CONTENT, "word5 word5 word5 word5", - AUTHOR, "Name1 Surname1", - CLASS, "class3"), CHAIN); + // class1 + addDoc( + adoc( + ID, + "1", + TITLE, + "word1 word1 word1", + CONTENT, + "word2 word2 word2", + AUTHOR, + "Name Surname", + CLASS, + "class1"), + CHAIN); + addDoc( + adoc( + ID, + "2", + TITLE, + "word1 word1", + CONTENT, + "word2 word2", + AUTHOR, + "Name Surname", + CLASS, + "class1"), + CHAIN); + addDoc( + adoc( + ID, + "3", + TITLE, + "word1 word1 word1", + CONTENT, + "word2", + AUTHOR, + "Name Surname", + CLASS, + "class1"), + CHAIN); + addDoc( + adoc( + ID, + "4", + TITLE, + "word1 word1 word1", + CONTENT, + "word2 word2 word2", + AUTHOR, + "Name Surname", + CLASS, + "class1"), + CHAIN); + // class2 + addDoc( + adoc( + ID, + "5", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5", + AUTHOR, + "Name Surname", + CLASS, + "class2"), + CHAIN); + addDoc( + adoc( + ID, + "6", + TITLE, + "word4 word4", + CONTENT, + "word5", + AUTHOR, + "Name Surname", + CLASS, + "class2"), + CHAIN); + addDoc( + adoc( + ID, + "7", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5 word5", + AUTHOR, + "Name Surname", + CLASS, + "class2"), + CHAIN); + addDoc( + adoc( + ID, + "8", + TITLE, + "word4", + CONTENT, + "word5 word5 word5 word5", + AUTHOR, + "Name Surname", + CLASS, + "class2"), + CHAIN); + // class3 + addDoc( + adoc( + ID, + "9", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5", + AUTHOR, + "Name1 Surname1", + CLASS, + "class3"), + CHAIN); + addDoc( + adoc( + ID, + "10", + TITLE, + "word4 word4", + CONTENT, + "word5", + AUTHOR, + "Name1 Surname1", + CLASS, + "class3"), + CHAIN); + addDoc( + adoc( + ID, + "11", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5 word5", + AUTHOR, + "Name1 Surname1", + CLASS, + "class3"), + CHAIN); + addDoc( + adoc( + ID, + "12", + TITLE, + "word4", + CONTENT, + "word5 word5 word5 word5", + AUTHOR, + "Name1 Surname1", + CLASS, + "class3"), + CHAIN); addDoc(commit()); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java index 19752bddb1f..d07fd966af9 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/ClassificationUpdateProcessorTest.java @@ -16,9 +16,11 @@ */ package org.apache.solr.update.processor; +import static org.hamcrest.core.Is.is; +import static org.mockito.Mockito.mock; + import java.io.IOException; import java.util.ArrayList; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; @@ -37,12 +39,7 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.hamcrest.core.Is.is; -import static org.mockito.Mockito.mock; - -/** - * Tests for {@link ClassificationUpdateProcessor} - */ +/** Tests for {@link ClassificationUpdateProcessor} */ public class ClassificationUpdateProcessorTest extends SolrTestCaseJ4 { /* field names are used in accordance with the solrconfig and schema supplied */ private static final String ID = "id"; @@ -88,267 +85,270 @@ public void tearDown() throws Exception { super.tearDown(); } - - - @Test - public void classificationMonoClass_predictedClassFieldSet_shouldAssignClassInPredictedClassField() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + public void + classificationMonoClass_predictedClassFieldSet_shouldAssignClassInPredictedClassField() + throws Exception { + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMonoClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params = initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); params.setPredictedClassField(PREDICTED_CLASS); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); - assertThat(unseenDocument1.getFieldValue(PREDICTED_CLASS),is("class2")); + assertThat(unseenDocument1.getFieldValue(PREDICTED_CLASS), is("class2")); } @Test public void knnMonoClass_sampleParams_shouldAssignCorrectClass() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMonoClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params = initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); - assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS),is("class2")); + assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS), is("class2")); } @Test public void knnMonoClass_boostFields_shouldAssignCorrectClass() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMonoClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params = initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); - params.setInputFieldNames(new String[]{TITLE + "^1.5", CONTENT + "^0.5", AUTHOR + "^2.5"}); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); + params.setInputFieldNames(new String[] {TITLE + "^1.5", CONTENT + "^0.5", AUTHOR + "^2.5"}); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); - assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS),is("class2")); + assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS), is("class2")); } @Test public void bayesMonoClass_sampleParams_shouldAssignCorrectClass() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMonoClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); - assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS),is("class1")); + assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS), is("class1")); } @Test public void knnMonoClass_contextQueryFiltered_shouldAssignCorrectClass() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMonoClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "a"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "a"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); - Query class3DocsChunk=new TermQuery(new Term(TITLE,"word6")); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); + Query class3DocsChunk = new TermQuery(new Term(TITLE, "word6")); params.setTrainingFilterQuery(class3DocsChunk); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); - assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS),is("class3")); + assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS), is("class3")); } @Test public void bayesMonoClass_boostFields_shouldAssignCorrectClass() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMonoClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); - params.setInputFieldNames(new String[]{TITLE+"^1.5",CONTENT+"^0.5",AUTHOR+"^2.5"}); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); + params.setInputFieldNames(new String[] {TITLE + "^1.5", CONTENT + "^0.5", AUTHOR + "^2.5"}); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); - assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS),is("class2")); + assertThat(unseenDocument1.getFieldValue(TRAINING_CLASS), is("class2")); } @Test - public void knnClassification_maxOutputClassesGreaterThanAvailable_shouldAssignCorrectClass() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + public void knnClassification_maxOutputClassesGreaterThanAvailable_shouldAssignCorrectClass() + throws Exception { + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMultiClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word1 word1 word1", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); params.setMaxPredictedClasses(100); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); @SuppressWarnings({"unchecked"}) - ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); - assertThat(assignedClasses.get(0),is("class2")); - assertThat(assignedClasses.get(1),is("class1")); + ArrayList assignedClasses = (ArrayList) unseenDocument1.getFieldValues(TRAINING_CLASS); + assertThat(assignedClasses.get(0), is("class2")); + assertThat(assignedClasses.get(1), is("class1")); } @Test public void knnMultiClass_maxOutputClasses2_shouldAssignMax2Classes() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMultiClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word1 word1 word1", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); params.setMaxPredictedClasses(2); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); @SuppressWarnings({"unchecked"}) - ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); - assertThat(assignedClasses.size(),is(2)); - assertThat(assignedClasses.get(0),is("class2")); - assertThat(assignedClasses.get(1),is("class1")); + ArrayList assignedClasses = (ArrayList) unseenDocument1.getFieldValues(TRAINING_CLASS); + assertThat(assignedClasses.size(), is(2)); + assertThat(assignedClasses.get(0), is("class2")); + assertThat(assignedClasses.get(1), is("class1")); } @Test public void bayesMultiClass_maxOutputClasses2_shouldAssignMax2Classes() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMultiClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word1 word1 word1", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); params.setMaxPredictedClasses(2); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); @SuppressWarnings({"unchecked"}) - ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); - assertThat(assignedClasses.size(),is(2)); - assertThat(assignedClasses.get(0),is("class2")); - assertThat(assignedClasses.get(1),is("class1")); + ArrayList assignedClasses = (ArrayList) unseenDocument1.getFieldValues(TRAINING_CLASS); + assertThat(assignedClasses.size(), is(2)); + assertThat(assignedClasses.get(0), is("class2")); + assertThat(assignedClasses.get(1), is("class1")); } @Test - public void knnMultiClass_boostFieldsMaxOutputClasses2_shouldAssignMax2Classes() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + public void knnMultiClass_boostFieldsMaxOutputClasses2_shouldAssignMax2Classes() + throws Exception { + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMultiClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); - params.setInputFieldNames(new String[]{TITLE+"^1.5",CONTENT+"^0.5",AUTHOR+"^2.5"}); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.KNN); + params.setInputFieldNames(new String[] {TITLE + "^1.5", CONTENT + "^0.5", AUTHOR + "^2.5"}); params.setMaxPredictedClasses(2); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); @SuppressWarnings({"unchecked"}) - ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); - assertThat(assignedClasses.size(),is(2)); - assertThat(assignedClasses.get(0),is("class4")); - assertThat(assignedClasses.get(1),is("class6")); + ArrayList assignedClasses = (ArrayList) unseenDocument1.getFieldValues(TRAINING_CLASS); + assertThat(assignedClasses.size(), is(2)); + assertThat(assignedClasses.get(0), is("class4")); + assertThat(assignedClasses.get(1), is("class6")); } @Test - public void bayesMultiClass_boostFieldsMaxOutputClasses2_shouldAssignMax2Classes() throws Exception { - UpdateRequestProcessor mockProcessor=mock(UpdateRequestProcessor.class); + public void bayesMultiClass_boostFieldsMaxOutputClasses2_shouldAssignMax2Classes() + throws Exception { + UpdateRequestProcessor mockProcessor = mock(UpdateRequestProcessor.class); prepareTrainedIndexMultiClass(); - AddUpdateCommand update=new AddUpdateCommand(req()); - SolrInputDocument unseenDocument1 = sdoc(ID, "10", - TITLE, "word4 word4 word4", - CONTENT, "word2 word2 ", - AUTHOR, "unseenAuthor"); - update.solrDoc=unseenDocument1; + AddUpdateCommand update = new AddUpdateCommand(req()); + SolrInputDocument unseenDocument1 = + sdoc(ID, "10", TITLE, "word4 word4 word4", CONTENT, "word2 word2 ", AUTHOR, "unseenAuthor"); + update.solrDoc = unseenDocument1; - ClassificationUpdateProcessorParams params= initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); - params.setInputFieldNames(new String[]{TITLE+"^1.5",CONTENT+"^0.5",AUTHOR+"^2.5"}); + ClassificationUpdateProcessorParams params = + initParams(ClassificationUpdateProcessorFactory.Algorithm.BAYES); + params.setInputFieldNames(new String[] {TITLE + "^1.5", CONTENT + "^0.5", AUTHOR + "^2.5"}); params.setMaxPredictedClasses(2); - updateProcessorToTest=new ClassificationUpdateProcessor(params,mockProcessor,reader,req().getSchema()); + updateProcessorToTest = + new ClassificationUpdateProcessor(params, mockProcessor, reader, req().getSchema()); updateProcessorToTest.processAdd(update); @SuppressWarnings({"unchecked"}) - ArrayList assignedClasses = (ArrayList)unseenDocument1.getFieldValues(TRAINING_CLASS); - assertThat(assignedClasses.size(),is(2)); - assertThat(assignedClasses.get(0),is("class4")); - assertThat(assignedClasses.get(1),is("class6")); + ArrayList assignedClasses = (ArrayList) unseenDocument1.getFieldValues(TRAINING_CLASS); + assertThat(assignedClasses.size(), is(2)); + assertThat(assignedClasses.get(0), is("class4")); + assertThat(assignedClasses.get(1), is("class6")); } - private ClassificationUpdateProcessorParams initParams(ClassificationUpdateProcessorFactory.Algorithm classificationAlgorithm) { - ClassificationUpdateProcessorParams params= new ClassificationUpdateProcessorParams(); - params.setInputFieldNames(new String[]{TITLE,CONTENT,AUTHOR}); + private ClassificationUpdateProcessorParams initParams( + ClassificationUpdateProcessorFactory.Algorithm classificationAlgorithm) { + ClassificationUpdateProcessorParams params = new ClassificationUpdateProcessorParams(); + params.setInputFieldNames(new String[] {TITLE, CONTENT, AUTHOR}); params.setTrainingClassField(TRAINING_CLASS); params.setPredictedClassField(TRAINING_CLASS); params.setMinTf(1); @@ -360,8 +360,7 @@ private ClassificationUpdateProcessorParams initParams(ClassificationUpdateProce } /** - * Index some example documents with a class manually assigned. - * This will be our trained model. + * Index some example documents with a class manually assigned. This will be our trained model. * * @throws Exception If there is a low-level I/O error */ @@ -369,69 +368,129 @@ private void prepareTrainedIndexMonoClass() throws Exception { directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory); - //class1 - addDoc(writer, buildLuceneDocument(ID, "1", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 word2", - AUTHOR, "a", - TRAINING_CLASS, "class1")); - addDoc(writer, buildLuceneDocument(ID, "2", - TITLE, "word1 word1", - CONTENT, "word2 word2", - AUTHOR, "a", - TRAINING_CLASS, "class1")); - addDoc(writer, buildLuceneDocument(ID, "3", - TITLE, "word1 word1 word1", - CONTENT, "word2", - AUTHOR, "a", - TRAINING_CLASS, "class1")); - addDoc(writer, buildLuceneDocument(ID, "4", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 word2", - AUTHOR, "a", - TRAINING_CLASS, "class1")); - //class2 - addDoc(writer, buildLuceneDocument(ID, "5", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5", - AUTHOR, "c", - TRAINING_CLASS, "class2")); - addDoc(writer, buildLuceneDocument(ID, "6", - TITLE, "word4 word4", - CONTENT, "word5", - AUTHOR, "c", - TRAINING_CLASS, "class2")); - addDoc(writer, buildLuceneDocument(ID, "7", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5 word5", - AUTHOR, "c", - TRAINING_CLASS, "class2")); - addDoc(writer, buildLuceneDocument(ID, "8", - TITLE, "word4", - CONTENT, "word5 word5 word5 word5", - AUTHOR, "c", - TRAINING_CLASS, "class2")); - //class3 - addDoc(writer, buildLuceneDocument(ID, "9", - TITLE, "word6", - CONTENT, "word7", - AUTHOR, "a", - TRAINING_CLASS, "class3")); - addDoc(writer, buildLuceneDocument(ID, "10", - TITLE, "word6", - CONTENT, "word7", - AUTHOR, "a", - TRAINING_CLASS, "class3")); - addDoc(writer, buildLuceneDocument(ID, "11", - TITLE, "word6", - CONTENT, "word7", - AUTHOR, "a", - TRAINING_CLASS, "class3")); - addDoc(writer, buildLuceneDocument(ID, "12", - TITLE, "word6", - CONTENT, "word7", - AUTHOR, "a", - TRAINING_CLASS, "class3")); + // class1 + addDoc( + writer, + buildLuceneDocument( + ID, + "1", + TITLE, + "word1 word1 word1", + CONTENT, + "word2 word2 word2", + AUTHOR, + "a", + TRAINING_CLASS, + "class1")); + addDoc( + writer, + buildLuceneDocument( + ID, + "2", + TITLE, + "word1 word1", + CONTENT, + "word2 word2", + AUTHOR, + "a", + TRAINING_CLASS, + "class1")); + addDoc( + writer, + buildLuceneDocument( + ID, + "3", + TITLE, + "word1 word1 word1", + CONTENT, + "word2", + AUTHOR, + "a", + TRAINING_CLASS, + "class1")); + addDoc( + writer, + buildLuceneDocument( + ID, + "4", + TITLE, + "word1 word1 word1", + CONTENT, + "word2 word2 word2", + AUTHOR, + "a", + TRAINING_CLASS, + "class1")); + // class2 + addDoc( + writer, + buildLuceneDocument( + ID, + "5", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5", + AUTHOR, + "c", + TRAINING_CLASS, + "class2")); + addDoc( + writer, + buildLuceneDocument( + ID, + "6", + TITLE, + "word4 word4", + CONTENT, + "word5", + AUTHOR, + "c", + TRAINING_CLASS, + "class2")); + addDoc( + writer, + buildLuceneDocument( + ID, + "7", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5 word5", + AUTHOR, + "c", + TRAINING_CLASS, + "class2")); + addDoc( + writer, + buildLuceneDocument( + ID, + "8", + TITLE, + "word4", + CONTENT, + "word5 word5 word5 word5", + AUTHOR, + "c", + TRAINING_CLASS, + "class2")); + // class3 + addDoc( + writer, + buildLuceneDocument( + ID, "9", TITLE, "word6", CONTENT, "word7", AUTHOR, "a", TRAINING_CLASS, "class3")); + addDoc( + writer, + buildLuceneDocument( + ID, "10", TITLE, "word6", CONTENT, "word7", AUTHOR, "a", TRAINING_CLASS, "class3")); + addDoc( + writer, + buildLuceneDocument( + ID, "11", TITLE, "word6", CONTENT, "word7", AUTHOR, "a", TRAINING_CLASS, "class3")); + addDoc( + writer, + buildLuceneDocument( + ID, "12", TITLE, "word6", CONTENT, "word7", AUTHOR, "a", TRAINING_CLASS, "class3")); reader = writer.getReader(); writer.close(); @@ -442,64 +501,128 @@ private void prepareTrainedIndexMultiClass() throws Exception { directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory); - //class1 - addDoc(writer, buildLuceneDocument(ID, "1", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 word2", - AUTHOR, "Name Surname", - TRAINING_CLASS, "class1", - TRAINING_CLASS, "class2" - )); - addDoc(writer, buildLuceneDocument(ID, "2", - TITLE, "word1 word1", - CONTENT, "word2 word2", - AUTHOR, "Name Surname", - TRAINING_CLASS, "class3", - TRAINING_CLASS, "class2" - )); - addDoc(writer, buildLuceneDocument(ID, "3", - TITLE, "word1 word1 word1", - CONTENT, "word2", - AUTHOR, "Name Surname", - TRAINING_CLASS, "class1", - TRAINING_CLASS, "class2" - )); - addDoc(writer, buildLuceneDocument(ID, "4", - TITLE, "word1 word1 word1", - CONTENT, "word2 word2 word2", - AUTHOR, "Name Surname", - TRAINING_CLASS, "class1", - TRAINING_CLASS, "class2" - )); - //class2 - addDoc(writer, buildLuceneDocument(ID, "5", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5", - AUTHOR, "Name1 Surname1", - TRAINING_CLASS, "class6", - TRAINING_CLASS, "class4" - )); - addDoc(writer, buildLuceneDocument(ID, "6", - TITLE, "word4 word4", - CONTENT, "word5", - AUTHOR, "Name1 Surname1", - TRAINING_CLASS, "class5", - TRAINING_CLASS, "class4" - )); - addDoc(writer, buildLuceneDocument(ID, "7", - TITLE, "word4 word4 word4", - CONTENT, "word5 word5 word5", - AUTHOR, "Name1 Surname1", - TRAINING_CLASS, "class6", - TRAINING_CLASS, "class4" - )); - addDoc(writer, buildLuceneDocument(ID, "8", - TITLE, "word4", - CONTENT, "word5 word5 word5 word5", - AUTHOR, "Name1 Surname1", - TRAINING_CLASS, "class6", - TRAINING_CLASS, "class4" - )); + // class1 + addDoc( + writer, + buildLuceneDocument( + ID, + "1", + TITLE, + "word1 word1 word1", + CONTENT, + "word2 word2 word2", + AUTHOR, + "Name Surname", + TRAINING_CLASS, + "class1", + TRAINING_CLASS, + "class2")); + addDoc( + writer, + buildLuceneDocument( + ID, + "2", + TITLE, + "word1 word1", + CONTENT, + "word2 word2", + AUTHOR, + "Name Surname", + TRAINING_CLASS, + "class3", + TRAINING_CLASS, + "class2")); + addDoc( + writer, + buildLuceneDocument( + ID, + "3", + TITLE, + "word1 word1 word1", + CONTENT, + "word2", + AUTHOR, + "Name Surname", + TRAINING_CLASS, + "class1", + TRAINING_CLASS, + "class2")); + addDoc( + writer, + buildLuceneDocument( + ID, + "4", + TITLE, + "word1 word1 word1", + CONTENT, + "word2 word2 word2", + AUTHOR, + "Name Surname", + TRAINING_CLASS, + "class1", + TRAINING_CLASS, + "class2")); + // class2 + addDoc( + writer, + buildLuceneDocument( + ID, + "5", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5", + AUTHOR, + "Name1 Surname1", + TRAINING_CLASS, + "class6", + TRAINING_CLASS, + "class4")); + addDoc( + writer, + buildLuceneDocument( + ID, + "6", + TITLE, + "word4 word4", + CONTENT, + "word5", + AUTHOR, + "Name1 Surname1", + TRAINING_CLASS, + "class5", + TRAINING_CLASS, + "class4")); + addDoc( + writer, + buildLuceneDocument( + ID, + "7", + TITLE, + "word4 word4 word4", + CONTENT, + "word5 word5 word5", + AUTHOR, + "Name1 Surname1", + TRAINING_CLASS, + "class6", + TRAINING_CLASS, + "class4")); + addDoc( + writer, + buildLuceneDocument( + ID, + "8", + TITLE, + "word4", + CONTENT, + "word5 word5 word5 word5", + AUTHOR, + "Name1 Surname1", + TRAINING_CLASS, + "class6", + TRAINING_CLASS, + "class4")); reader = writer.getReader(); writer.close(); @@ -508,8 +631,10 @@ private void prepareTrainedIndexMultiClass() throws Exception { public static Document buildLuceneDocument(Object... fieldsAndValues) { Document luceneDoc = new Document(); - for (int i=0; i dest_s = doc.getFieldValues("dest_s"); @@ -57,11 +51,9 @@ public void testMultiClone() throws Exception { @Test public void testArrayClone() throws Exception { - SolrInputDocument doc = processAdd("clone-array", - doc(f("id", "1"), - f("source1_s", "foo"), - f("source2_s", "bar"))); - + SolrInputDocument doc = + processAdd("clone-array", doc(f("id", "1"), f("source1_s", "foo"), f("source2_s", "bar"))); + assertEquals("source1_s should have stringValue", "foo", doc.getFieldValue("source1_s")); assertEquals("source2_s should have stringValue", "bar", doc.getFieldValue("source2_s")); Collection dest_s = doc.getFieldValues("dest_s"); @@ -71,13 +63,17 @@ public void testArrayClone() throws Exception { @Test public void testSelectorClone() throws Exception { - SolrInputDocument doc = processAdd("clone-selector", - doc(f("id", "1"), - f("source0_s", "nope, not me"), - f("source1_s", "foo"), - f("source2_s", "bar"))); - - assertEquals("source0_s should have stringValue", "nope, not me", doc.getFieldValue("source0_s")); + SolrInputDocument doc = + processAdd( + "clone-selector", + doc( + f("id", "1"), + f("source0_s", "nope, not me"), + f("source1_s", "foo"), + f("source2_s", "bar"))); + + assertEquals( + "source0_s should have stringValue", "nope, not me", doc.getFieldValue("source0_s")); assertEquals("source1_s should have stringValue", "foo", doc.getFieldValue("source1_s")); assertEquals("source2_s should have stringValue", "bar", doc.getFieldValue("source2_s")); Collection dest_s = doc.getFieldValues("dest_s"); @@ -87,16 +83,19 @@ public void testSelectorClone() throws Exception { } public void testMultipleClones() throws Exception { - SolrInputDocument doc = processAdd("multiple-clones", - doc(f("id", "1"), - f("category", "test"), - f("authors", "author1", "author2"), - f("editors", "ed1", "ed2"), - f("bfriday_price", 4.00), - f("sale_price", 5.00), - f("list_price", 6.00), - f("features", "hill", "valley", "dune"))); - + SolrInputDocument doc = + processAdd( + "multiple-clones", + doc( + f("id", "1"), + f("category", "test"), + f("authors", "author1", "author2"), + f("editors", "ed1", "ed2"), + f("bfriday_price", 4.00), + f("sale_price", 5.00), + f("list_price", 6.00), + f("features", "hill", "valley", "dune"))); + // the original values should remain assertEquals("category should have a value", "test", doc.getFieldValue("category")); @@ -135,8 +134,9 @@ public void testMultipleClones() throws Exception { assertTrue(prices.contains(4.0)); assertFalse(prices.contains(6.0)); - // n.b. the field names below imply singularity but that would be achieved with a subsequent - // FirstFieldValueUpdateProcessorFactory (or similar custom class), and not in clone field itself + // n.b. the field names below imply singularity but that would be achieved with a subsequent + // FirstFieldValueUpdateProcessorFactory (or similar custom class), and not in clone field + // itself Collection keyf = doc.getFieldValues("key_feature"); assertTrue(keyf.size() == 3); @@ -156,164 +156,175 @@ public void testCloneField() throws Exception { SolrInputDocument d; // regardless of chain, all of these checks should be equivalent - for (String chain : Arrays.asList("clone-single", "clone-single-regex", - "clone-multi", "clone-multi-regex", - "clone-array", "clone-array-regex", - "clone-selector", "clone-selector-regex")) { - + for (String chain : + Arrays.asList( + "clone-single", "clone-single-regex", + "clone-multi", "clone-multi-regex", + "clone-array", "clone-array-regex", + "clone-selector", "clone-selector-regex")) { + // simple clone - d = processAdd(chain, - doc(f("id", "1111"), - f("source0_s", "NOT COPIED"), - f("source1_s", "123456789", "", 42, "abcd"))); + d = + processAdd( + chain, + doc( + f("id", "1111"), + f("source0_s", "NOT COPIED"), + f("source1_s", "123456789", "", 42, "abcd"))); assertNotNull(chain, d); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("source1_s")); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("dest_s")); + assertEquals( + chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("source1_s")); + assertEquals(chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("dest_s")); // append to existing values, preserve boost - d = processAdd(chain, - doc(f("id", "1111"), - field("dest_s", "orig1", "orig2"), - f("source0_s", "NOT COPIED"), - f("source1_s", "123456789", "", 42, "abcd"))); + d = + processAdd( + chain, + doc( + f("id", "1111"), + field("dest_s", "orig1", "orig2"), + f("source0_s", "NOT COPIED"), + f("source1_s", "123456789", "", 42, "abcd"))); assertNotNull(chain, d); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("source1_s")); - assertEquals(chain, - Arrays.asList("orig1", "orig2", "123456789", "", 42, "abcd"), - d.getFieldValues("dest_s")); + assertEquals( + chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("source1_s")); + assertEquals( + chain, + Arrays.asList("orig1", "orig2", "123456789", "", 42, "abcd"), + d.getFieldValues("dest_s")); } // should be equivalent for any chain matching source1_s and source2_s (but not source0_s) - for (String chain : Arrays.asList("clone-multi", "clone-multi-regex", - "clone-array", "clone-array-regex", - "clone-selector", "clone-selector-regex")) { + for (String chain : + Arrays.asList( + "clone-multi", "clone-multi-regex", + "clone-array", "clone-array-regex", + "clone-selector", "clone-selector-regex")) { // simple clone - d = processAdd(chain, - doc(f("id", "1111"), - f("source0_s", "NOT COPIED"), - f("source1_s", "123456789", "", 42, "abcd"), - f("source2_s", "xxx", 999))); + d = + processAdd( + chain, + doc( + f("id", "1111"), + f("source0_s", "NOT COPIED"), + f("source1_s", "123456789", "", 42, "abcd"), + f("source2_s", "xxx", 999))); assertNotNull(chain, d); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("source1_s")); - assertEquals(chain, - Arrays.asList("xxx", 999), - d.getFieldValues("source2_s")); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd", "xxx", 999), - d.getFieldValues("dest_s")); + assertEquals( + chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("source1_s")); + assertEquals(chain, Arrays.asList("xxx", 999), d.getFieldValues("source2_s")); + assertEquals( + chain, + Arrays.asList("123456789", "", 42, "abcd", "xxx", 999), + d.getFieldValues("dest_s")); // append to existing values - d = processAdd(chain, - doc(f("id", "1111"), - field("dest_s", "orig1", "orig2"), - f("source0_s", "NOT COPIED"), - f("source1_s", "123456789", "", 42, "abcd"), - f("source2_s", "xxx", 999))); + d = + processAdd( + chain, + doc( + f("id", "1111"), + field("dest_s", "orig1", "orig2"), + f("source0_s", "NOT COPIED"), + f("source1_s", "123456789", "", 42, "abcd"), + f("source2_s", "xxx", 999))); assertNotNull(chain, d); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("source1_s")); - assertEquals(chain, - Arrays.asList("xxx", 999), - d.getFieldValues("source2_s")); - assertEquals(chain, - Arrays.asList("orig1", "orig2", - "123456789", "", 42, "abcd", - "xxx", 999), - d.getFieldValues("dest_s")); + assertEquals( + chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("source1_s")); + assertEquals(chain, Arrays.asList("xxx", 999), d.getFieldValues("source2_s")); + assertEquals( + chain, + Arrays.asList("orig1", "orig2", "123456789", "", 42, "abcd", "xxx", 999), + d.getFieldValues("dest_s")); } - + // any chain that copies source1_s to dest_s should be equivalent for these assertions - for (String chain : Arrays.asList("clone-simple-regex-syntax", - "clone-single", "clone-single-regex", - "clone-multi", "clone-multi-regex", - "clone-array", "clone-array-regex", - "clone-selector", "clone-selector-regex")) { + for (String chain : + Arrays.asList( + "clone-simple-regex-syntax", + "clone-single", + "clone-single-regex", + "clone-multi", + "clone-multi-regex", + "clone-array", + "clone-array-regex", + "clone-selector", + "clone-selector-regex")) { // simple clone - d = processAdd(chain, - doc(f("id", "1111"), - f("source1_s", "123456789", "", 42, "abcd"))); + d = processAdd(chain, doc(f("id", "1111"), f("source1_s", "123456789", "", 42, "abcd"))); assertNotNull(chain, d); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("source1_s")); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("dest_s")); + assertEquals( + chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("source1_s")); + assertEquals(chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("dest_s")); // append to existing values, preserve boost - d = processAdd(chain, - doc(f("id", "1111"), - field("dest_s", "orig1", "orig2"), - f("source1_s", "123456789", "", 42, "abcd"))); + d = + processAdd( + chain, + doc( + f("id", "1111"), + field("dest_s", "orig1", "orig2"), + f("source1_s", "123456789", "", 42, "abcd"))); assertNotNull(chain, d); - assertEquals(chain, - Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("source1_s")); - assertEquals(chain, - Arrays.asList("orig1", "orig2", "123456789", "", 42, "abcd"), - d.getFieldValues("dest_s")); + assertEquals( + chain, Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("source1_s")); + assertEquals( + chain, + Arrays.asList("orig1", "orig2", "123456789", "", 42, "abcd"), + d.getFieldValues("dest_s")); } } public void testCloneFieldRegexReplaceAll() throws Exception { - SolrInputDocument d = processAdd("clone-regex-replaceall", - doc(f("id", "1111"), - f("foo_x2_s", "123456789", "", 42, "abcd"), - f("foo_x3_x7_s", "xyz"))); - + SolrInputDocument d = + processAdd( + "clone-regex-replaceall", + doc( + f("id", "1111"), + f("foo_x2_s", "123456789", "", 42, "abcd"), + f("foo_x3_x7_s", "xyz"))); + assertNotNull(d); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("foo_y2_s")); - assertEquals("xyz", - d.getFieldValue("foo_y3_y7_s")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("foo_y2_s")); + assertEquals("xyz", d.getFieldValue("foo_y3_y7_s")); } - + public void testCloneFieldExample() throws Exception { SolrInputDocument d; // test example from the javadocs - d = processAdd("multiple-clones", - doc(f("id", "1111"), - f("category", "misc"), - f("authors", "Isaac Asimov", "John Brunner"), - f("editors", "John W. Campbell"), - f("store1_price", 87), - f("store2_price", 78), - f("store3_price", (Object) null), - f("list_price", 1000), - f("features", "Pages!", "Binding!"), - f("feat_of_strengths", "Pullups"))); - + d = + processAdd( + "multiple-clones", + doc( + f("id", "1111"), + f("category", "misc"), + f("authors", "Isaac Asimov", "John Brunner"), + f("editors", "John W. Campbell"), + f("store1_price", 87), + f("store2_price", 78), + f("store3_price", (Object) null), + f("list_price", 1000), + f("features", "Pages!", "Binding!"), + f("feat_of_strengths", "Pullups"))); + assertNotNull(d); assertEquals("misc", d.getFieldValue("category")); assertEquals("misc", d.getFieldValue("category_s")); - assertEquals(Arrays.asList("Isaac Asimov", "John Brunner"), - d.getFieldValues("authors")); - assertEquals(Collections.singletonList("John W. Campbell"), - d.getFieldValues("editors")); - assertEquals(Arrays.asList("Isaac Asimov", "John Brunner", - "John W. Campbell"), - d.getFieldValues("contributors")); + assertEquals(Arrays.asList("Isaac Asimov", "John Brunner"), d.getFieldValues("authors")); + assertEquals(Collections.singletonList("John W. Campbell"), d.getFieldValues("editors")); + assertEquals( + Arrays.asList("Isaac Asimov", "John Brunner", "John W. Campbell"), + d.getFieldValues("contributors")); assertEquals(87, d.getFieldValue("store1_price")); assertEquals(78, d.getFieldValue("store2_price")); assertEquals(1000, d.getFieldValue("list_price")); - assertEquals(Arrays.asList(87, 78), - d.getFieldValues("all_prices")); - - assertEquals(Arrays.asList("Pages!", "Binding!"), - d.getFieldValues("key_feature")); + assertEquals(Arrays.asList(87, 78), d.getFieldValues("all_prices")); + + assertEquals(Arrays.asList("Pages!", "Binding!"), d.getFieldValues("key_feature")); assertEquals("Pullups", d.getFieldValue("key_feat_of_strength")); } @@ -322,49 +333,41 @@ public void testCloneCombinations() throws Exception { SolrInputDocument d; // maxChars - d = processAdd("clone-max-chars", - doc(f("id", "1111"), - f("field1", "text"))); + d = processAdd("clone-max-chars", doc(f("id", "1111"), f("field1", "text"))); assertNotNull(d); assertEquals("text", d.getFieldValue("field1")); assertEquals("tex", d.getFieldValue("toField")); // move - d = processAdd("clone-move", - doc(f("id", "1111"), - f("field1", "text"))); + d = processAdd("clone-move", doc(f("id", "1111"), f("field1", "text"))); assertNotNull(d); assertEquals("text", d.getFieldValue("toField")); assertFalse(d.containsKey("field1")); // replace - d = processAdd("clone-replace", - doc(f("id", "1111"), - f("toField", "IGNORED"), - f("field1", "text"))); + d = + processAdd( + "clone-replace", doc(f("id", "1111"), f("toField", "IGNORED"), f("field1", "text"))); assertNotNull(d); assertEquals("text", d.getFieldValue("field1")); assertEquals("text", d.getFieldValue("toField")); // append - d = processAdd("clone-append", - doc(f("id", "1111"), - f("toField", "aaa"), - f("field1", "bbb"), - f("field2", "ccc"))); + d = + processAdd( + "clone-append", + doc(f("id", "1111"), f("toField", "aaa"), f("field1", "bbb"), f("field2", "ccc"))); assertNotNull(d); assertEquals("bbb", d.getFieldValue("field1")); assertEquals("ccc", d.getFieldValue("field2")); assertEquals("aaa; bbb; ccc", d.getFieldValue("toField")); // first value - d = processAdd("clone-first", - doc(f("id", "1111"), - f("field0", "aaa"), - f("field1", "bbb"), - f("field2", "ccc"))); + d = + processAdd( + "clone-first", + doc(f("id", "1111"), f("field0", "aaa"), f("field1", "bbb"), f("field2", "ccc"))); assertNotNull(d); assertEquals("aaa", d.getFieldValue("toField")); } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessor.java b/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessor.java index 24d9a1273ab..5745fc2d3fc 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessor.java +++ b/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessor.java @@ -16,13 +16,9 @@ */ package org.apache.solr.update.processor; - -/** - * A passthrough processor that does nothing. - */ +/** A passthrough processor that does nothing. */ public class CustomUpdateRequestProcessor extends UpdateRequestProcessor { - public CustomUpdateRequestProcessor( UpdateRequestProcessor next) { + public CustomUpdateRequestProcessor(UpdateRequestProcessor next) { super(next); } } - diff --git a/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java b/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java index e34c7340258..684a043fa8a 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java +++ b/solr/core/src/test/org/apache/solr/update/processor/CustomUpdateRequestProcessorFactory.java @@ -20,23 +20,18 @@ import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; - -/** - * A custom class to do custom stuff - */ -public class CustomUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory -{ +/** A custom class to do custom stuff */ +public class CustomUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory { public NamedList args = null; - + @Override - public void init(NamedList args ) - { + public void init(NamedList args) { this.args = args; } @Override - public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { + public UpdateRequestProcessor getInstance( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { return new CustomUpdateRequestProcessor(next); } } - diff --git a/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java index ddf5646cf5c..045448717bb 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/DefaultValueUpdateProcessorTest.java @@ -16,25 +16,20 @@ */ package org.apache.solr.update.processor; +import java.io.IOException; +import java.util.Arrays; import java.util.Date; import java.util.UUID; -import java.util.Arrays; -import java.io.IOException; - import org.apache.solr.SolrTestCaseJ4; - import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.core.SolrCore; - -import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.LocalSolrQueryRequest; +import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.SolrQueryResponse; - import org.apache.solr.update.AddUpdateCommand; - import org.junit.BeforeClass; public class DefaultValueUpdateProcessorTest extends SolrTestCaseJ4 { @@ -50,47 +45,42 @@ public void testDefaults() throws Exception { Date now = new Date(); // get all defaults - d = processAdd("default-values", - doc(f("id", "1111"), - f("name", "Existing", "Values"))); - + d = processAdd("default-values", doc(f("id", "1111"), f("name", "Existing", "Values"))); + assertNotNull(d); - + assertEquals("X", d.getFieldValue("processor_default_s")); assertEquals(42, d.getFieldValue("processor_default_i")); assertNotNull(d.getFieldValue("uuid")); assertNotNull(UUID.fromString(d.getFieldValue("uuid").toString())); assertNotNull(d.getFieldValue("timestamp")); - assertTrue("timestamp not a date: " + - d.getFieldValue("timestamp").getClass(), - d.getFieldValue("timestamp") instanceof Date); - assertEquals(Arrays.asList("Existing","Values"), - d.getFieldValues("name")); - + assertTrue( + "timestamp not a date: " + d.getFieldValue("timestamp").getClass(), + d.getFieldValue("timestamp") instanceof Date); + assertEquals(Arrays.asList("Existing", "Values"), d.getFieldValues("name")); + // defaults already specified - d = processAdd("default-values", - doc(f("id", "1111"), - f("timestamp", now), - f("uuid", "550e8400-e29b-41d4-a716-446655440000"), - f("processor_default_s", "I HAVE A VALUE"), - f("processor_default_i", 12345), - f("name", "Existing", "Values"))); - + d = + processAdd( + "default-values", + doc( + f("id", "1111"), + f("timestamp", now), + f("uuid", "550e8400-e29b-41d4-a716-446655440000"), + f("processor_default_s", "I HAVE A VALUE"), + f("processor_default_i", 12345), + f("name", "Existing", "Values"))); + assertNotNull(d); - + assertEquals("I HAVE A VALUE", d.getFieldValue("processor_default_s")); assertEquals(12345, d.getFieldValue("processor_default_i")); - assertEquals("550e8400-e29b-41d4-a716-446655440000", - d.getFieldValue("uuid")); + assertEquals("550e8400-e29b-41d4-a716-446655440000", d.getFieldValue("uuid")); assertEquals(now, d.getFieldValue("timestamp")); - assertEquals(Arrays.asList("Existing","Values"), - d.getFieldValues("name")); + assertEquals(Arrays.asList("Existing", "Values"), d.getFieldValues("name")); } - - /** - * Convenience method for building up SolrInputDocuments - */ + /** Convenience method for building up SolrInputDocuments */ SolrInputDocument doc(SolrInputField... fields) { SolrInputDocument d = new SolrInputDocument(); for (SolrInputField f : fields) { @@ -99,9 +89,7 @@ SolrInputDocument doc(SolrInputField... fields) { return d; } - /** - * Convenience method for building up SolrInputFields - */ + /** Convenience method for building up SolrInputFields */ SolrInputField field(String name, Object... values) { SolrInputField f = new SolrInputField(name); for (Object v : values) { @@ -110,22 +98,17 @@ SolrInputField field(String name, Object... values) { return f; } - /** - * Convenience method for building up SolrInputFields with default boost - */ + /** Convenience method for building up SolrInputFields with default boost */ SolrInputField f(String name, Object... values) { return field(name, values); } - /** - * Runs a document through the specified chain, and returns the final - * document used when the chain is completed (NOTE: some chains may - * modify the document in place + * Runs a document through the specified chain, and returns the final document used when the chain + * is completed (NOTE: some chains may modify the document in place */ - SolrInputDocument processAdd(final String chain, - final SolrInputDocument docIn) - throws IOException { + SolrInputDocument processAdd(final String chain, final SolrInputDocument docIn) + throws IOException { SolrCore core = h.getCore(); UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain); @@ -133,10 +116,9 @@ SolrInputDocument processAdd(final String chain, SolrQueryResponse rsp = new SolrQueryResponse(); - SolrQueryRequest req = new LocalSolrQueryRequest - (core, new ModifiableSolrParams()); + SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); try { - SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req,rsp)); + SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = docIn; diff --git a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java index ef516de42ce..568dfccb611 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java @@ -17,6 +17,9 @@ package org.apache.solr.update.processor; +import static org.apache.solr.client.solrj.request.CollectionAdminRequest.createCategoryRoutedAlias; +import static org.apache.solr.client.solrj.request.CollectionAdminRequest.createTimeRoutedAlias; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.time.Instant; @@ -24,7 +27,6 @@ import java.util.Date; import java.util.List; import java.util.stream.Collectors; - import org.apache.lucene.util.IOUtils; import org.apache.solr.client.solrj.RoutedAliasTypes; import org.apache.solr.client.solrj.SolrServerException; @@ -49,9 +51,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.client.solrj.request.CollectionAdminRequest.createCategoryRoutedAlias; -import static org.apache.solr.client.solrj.request.CollectionAdminRequest.createTimeRoutedAlias; - public class DimensionalRoutedAliasUpdateProcessorTest extends RoutedAliasUpdateProcessorTest { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -66,12 +65,11 @@ public class DimensionalRoutedAliasUpdateProcessorTest extends RoutedAliasUpdate private static final String timeField = "timestamp_dt"; private static final String catField = "cat_s"; - @Before public void doBefore() throws Exception { configureCluster(4).configure(); solrClient = getCloudSolrClient(cluster); - //log this to help debug potential causes of problems + // log this to help debug potential causes of problems if (log.isInfoEnabled()) { log.info("SolrClient: {}", solrClient); log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn @@ -89,20 +87,27 @@ public static void finish() throws Exception { IOUtils.close(solrClient); solrClient = null; } + @Test public void testTimeCat() throws Exception { String configName = getSaferTestName(); createConfigSet(configName); - CreateTimeRoutedAlias TRA_Dim = createTimeRoutedAlias(getAlias(), "2019-07-01T00:00:00Z", "+1DAY", - getTimeField(), null); + CreateTimeRoutedAlias TRA_Dim = + createTimeRoutedAlias(getAlias(), "2019-07-01T00:00:00Z", "+1DAY", getTimeField(), null); CreateCategoryRoutedAlias CRA_Dim = createCategoryRoutedAlias(null, getCatField(), 20, null); - CollectionAdminRequest.DimensionalRoutedAlias dra = CollectionAdminRequest.createDimensionalRoutedAlias(getAlias(), - CollectionAdminRequest.createCollection("_unused_", configName, 2, 2), TRA_Dim, CRA_Dim); + CollectionAdminRequest.DimensionalRoutedAlias dra = + CollectionAdminRequest.createDimensionalRoutedAlias( + getAlias(), + CollectionAdminRequest.createCollection("_unused_", configName, 2, 2), + TRA_Dim, + CRA_Dim); SolrParams params = dra.getParams(); - assertEquals("Dimensional[TIME,CATEGORY]", params.get(CollectionAdminRequest.RoutedAliasAdminRequest.ROUTER_TYPE_NAME)); + assertEquals( + "Dimensional[TIME,CATEGORY]", + params.get(CollectionAdminRequest.RoutedAliasAdminRequest.ROUTER_TYPE_NAME)); System.out.println(params); assertEquals("20", params.get("router.1.maxCardinality")); assertEquals("2019-07-01T00:00:00Z", params.get("router.0.start")); @@ -118,37 +123,29 @@ public void testTimeCat() throws Exception { assertCatTimeInvariants( ap( firstCol, - // lower dimensions are fleshed out because we need to maintain the order of the TRA dim and - // not fail if we get an older document later + // lower dimensions are fleshed out because we need to maintain the order of the TRA dim + // and not fail if we get an older document later timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "tabby" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("tabby")); addDocsAndCommit(true, newDoc("calico", "2019-07-02T00:00:00Z")); - // initial col not removed because the 07-01 CRA has not yet gained a new category (sub-dimensions are independent) + // initial col not removed because the 07-01 CRA has not yet gained a new category + // (sub-dimensions are independent) assertCatTimeInvariants( ap( timeCatDraColFor("2019-07-01", "calico"), timeCatDraColFor("2019-07-02", "calico"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "tabby", - "calico" - ) - ); - - testFailedDocument("shorthair", "2017-10-23T00:00:00Z", "couldn't be routed" ); - testFailedDocument("shorthair", "2020-10-23T00:00:00Z", "too far in the future" ); - testFailedDocument(null, "2019-07-02T00:00:00Z", "Route value is null"); + timeCatDraColFor("2019-07-02", "tabby")), + ap("tabby", "calico")); + + testFailedDocument("shorthair", "2017-10-23T00:00:00Z", "couldn't be routed"); + testFailedDocument("shorthair", "2020-10-23T00:00:00Z", "too far in the future"); + testFailedDocument(null, "2019-07-02T00:00:00Z", "Route value is null"); testFailedDocument("foo__CRA__bar", "2019-07-02T00:00:00Z", "7 character sequence __CRA__"); - testFailedDocument("fóóCRAóóbar", "2019-07-02T00:00:00Z", "7 character sequence __CRA__"); + testFailedDocument("fóóCRAóóbar", "2019-07-02T00:00:00Z", "7 character sequence __CRA__"); // hopefully nothing changed assertCatTimeInvariants( @@ -156,21 +153,16 @@ public void testTimeCat() throws Exception { timeCatDraColFor("2019-07-01", "calico"), timeCatDraColFor("2019-07-02", "calico"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "tabby", - "calico" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("tabby", "calico")); // 4 docs no new collections - addDocsAndCommit(true, + addDocsAndCommit( + true, newDoc("calico", "2019-07-02T00:00:00Z"), newDoc("tabby", "2019-07-01T00:00:00Z"), newDoc("tabby", "2019-07-01T23:00:00Z"), - newDoc("calico", "2019-07-02T23:00:00Z") - ); + newDoc("calico", "2019-07-02T23:00:00Z")); // hopefully nothing changed assertCatTimeInvariants( @@ -178,21 +170,16 @@ public void testTimeCat() throws Exception { timeCatDraColFor("2019-07-01", "calico"), timeCatDraColFor("2019-07-02", "calico"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "tabby", - "calico" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("tabby", "calico")); // 4 docs 2 new collections, in random order and maybe not using the alias - addDocsAndCommit(false, + addDocsAndCommit( + false, newDoc("calico", "2019-07-04T00:00:00Z"), newDoc("tabby", "2019-07-01T00:00:00Z"), newDoc("tabby", "2019-07-01T23:00:00Z"), - newDoc("calico", "2019-07-03T23:00:00Z") - ); + newDoc("calico", "2019-07-03T23:00:00Z")); assertCatTimeInvariants( ap( @@ -201,24 +188,21 @@ public void testTimeCat() throws Exception { timeCatDraColFor("2019-07-03", "calico"), timeCatDraColFor("2019-07-04", "calico"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "tabby", - "calico" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("tabby", "calico")); // now test with async pre-create. CollectionAdminRequest.setAliasProperty(getAlias()) - .addProperty("router.0.preemptiveCreateMath", "30MINUTE").process(solrClient); + .addProperty("router.0.preemptiveCreateMath", "30MINUTE") + .process(solrClient); - addDocsAndCommit(true, + addDocsAndCommit( + true, newDoc("shorthair", "2019-07-02T23:40:00Z"), // create 2 sync 1 async - newDoc("calico", "2019-07-03T23:00:00Z") // does not create - ); + newDoc("calico", "2019-07-03T23:00:00Z") // does not create + ); - waitColAndAlias(getAlias(), "", TRA + "2019-07-03" + CRA + "shorthair", 2); + waitColAndAlias(getAlias(), "", TRA + "2019-07-03" + CRA + "shorthair", 2); assertCatTimeInvariants( ap( @@ -230,19 +214,13 @@ public void testTimeCat() throws Exception { timeCatDraColFor("2019-07-02", "shorthair"), timeCatDraColFor("2019-07-03", "shorthair"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("shorthair", "tabby", "calico")); - addDocsAndCommit(false, + addDocsAndCommit( + false, newDoc("shorthair", "2019-07-02T23:40:00Z"), // should be no change - newDoc("calico", "2019-07-03T23:00:00Z") - ); + newDoc("calico", "2019-07-03T23:00:00Z")); /* Here we need to be testing that something that should not be created (extra preemptive async collections) @@ -262,24 +240,20 @@ Here we need to be testing that something that should not be created (extra pree timeCatDraColFor("2019-07-02", "shorthair"), timeCatDraColFor("2019-07-03", "shorthair"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("shorthair", "tabby", "calico")); // now test with auto-delete. CollectionAdminRequest.setAliasProperty(getAlias()) - .addProperty("router.0.autoDeleteAge", "/DAY-5DAY").process(solrClient); + .addProperty("router.0.autoDeleteAge", "/DAY-5DAY") + .process(solrClient); // this one should not yet cause deletion - addDocsAndCommit(false, + addDocsAndCommit( + false, newDoc("shorthair", "2019-07-02T23:00:00Z"), // no effect expected - newDoc("calico", "2019-07-05T23:00:00Z") // create 1 - ); + newDoc("calico", "2019-07-05T23:00:00Z") // create 1 + ); assertCatTimeInvariants( ap( @@ -292,21 +266,16 @@ Here we need to be testing that something that should not be created (extra pree timeCatDraColFor("2019-07-02", "shorthair"), timeCatDraColFor("2019-07-03", "shorthair"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("shorthair", "tabby", "calico")); // have to only send to alias here since one of the collections will be deleted. - addDocsAndCommit(true, + addDocsAndCommit( + true, newDoc("shorthair", "2019-07-02T23:00:00Z"), // no effect expected - newDoc("calico", "2019-07-06T00:00:00Z") // create July 6, delete July 1 - ); - waitCoreCount(getAlias() + TRA + "2019-07-01" + CRA + "calico", 0); + newDoc("calico", "2019-07-06T00:00:00Z") // create July 6, delete July 1 + ); + waitCoreCount(getAlias() + TRA + "2019-07-01" + CRA + "calico", 0); assertCatTimeInvariants( ap( @@ -320,22 +289,18 @@ Here we need to be testing that something that should not be created (extra pree timeCatDraColFor("2019-07-02", "shorthair"), timeCatDraColFor("2019-07-03", "shorthair"), timeCatDraColFor("2019-07-01", "tabby"), - timeCatDraColFor("2019-07-02", "tabby") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + timeCatDraColFor("2019-07-02", "tabby")), + ap("shorthair", "tabby", "calico")); // verify that all the documents ended up in the right collections. - QueryResponse resp = solrClient.query(getAlias(), params( - "q", "*:*", - "rows", "100", - "fl","*,[shard]", - "sort", "id asc" - )); + QueryResponse resp = + solrClient.query( + getAlias(), + params( + "q", "*:*", + "rows", "100", + "fl", "*,[shard]", + "sort", "id asc")); SolrDocumentList results = resp.getResults(); assertEquals(18, results.getNumFound()); for (SolrDocument result : results) { @@ -348,21 +313,26 @@ Here we need to be testing that something that should not be created (extra pree } } - @Test public void testCatTime() throws Exception { String configName = getSaferTestName(); createConfigSet(configName); - CreateTimeRoutedAlias TRA_Dim = createTimeRoutedAlias(getAlias(), "2019-07-01T00:00:00Z", "+1DAY", - getTimeField(), null); + CreateTimeRoutedAlias TRA_Dim = + createTimeRoutedAlias(getAlias(), "2019-07-01T00:00:00Z", "+1DAY", getTimeField(), null); CreateCategoryRoutedAlias CRA_Dim = createCategoryRoutedAlias(null, getCatField(), 20, null); - CollectionAdminRequest.DimensionalRoutedAlias dra = CollectionAdminRequest.createDimensionalRoutedAlias(getAlias(), - CollectionAdminRequest.createCollection("_unused_", configName, 2, 2), CRA_Dim, TRA_Dim); + CollectionAdminRequest.DimensionalRoutedAlias dra = + CollectionAdminRequest.createDimensionalRoutedAlias( + getAlias(), + CollectionAdminRequest.createCollection("_unused_", configName, 2, 2), + CRA_Dim, + TRA_Dim); SolrParams params = dra.getParams(); - assertEquals("Dimensional[CATEGORY,TIME]", params.get(CollectionAdminRequest.RoutedAliasAdminRequest.ROUTER_TYPE_NAME)); + assertEquals( + "Dimensional[CATEGORY,TIME]", + params.get(CollectionAdminRequest.RoutedAliasAdminRequest.ROUTER_TYPE_NAME)); System.out.println(params); assertEquals("20", params.get("router.0.maxCardinality")); assertEquals("2019-07-01T00:00:00Z", params.get("router.1.start")); @@ -379,12 +349,8 @@ public void testCatTime() throws Exception { ap( firstCol, catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "tabby" - ) - ); + catTimeDraColFor("tabby", "2019-07-02")), + ap("tabby")); addDocsAndCommit(true, newDoc("calico", "2019-07-02T00:00:00Z")); @@ -394,19 +360,14 @@ public void testCatTime() throws Exception { catTimeDraColFor("calico", "2019-07-01"), catTimeDraColFor("calico", "2019-07-02"), catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "tabby", - "calico" - ) - ); - - testFailedDocument("shorthair", "2017-10-23T00:00:00Z", "couldn't be routed" ); - testFailedDocument("shorthair", "2020-10-23T00:00:00Z", "too far in the future" ); - testFailedDocument(null, "2019-07-02T00:00:00Z", "Route value is null"); + catTimeDraColFor("tabby", "2019-07-02")), + ap("tabby", "calico")); + + testFailedDocument("shorthair", "2017-10-23T00:00:00Z", "couldn't be routed"); + testFailedDocument("shorthair", "2020-10-23T00:00:00Z", "too far in the future"); + testFailedDocument(null, "2019-07-02T00:00:00Z", "Route value is null"); testFailedDocument("foo__CRA__bar", "2019-07-02T00:00:00Z", "7 character sequence __CRA__"); - testFailedDocument("fóóCRAóóbar", "2019-07-02T00:00:00Z", "7 character sequence __CRA__"); + testFailedDocument("fóóCRAóóbar", "2019-07-02T00:00:00Z", "7 character sequence __CRA__"); // hopefully nothing changed assertCatTimeInvariants( @@ -414,21 +375,16 @@ public void testCatTime() throws Exception { catTimeDraColFor("calico", "2019-07-01"), catTimeDraColFor("calico", "2019-07-02"), catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "tabby", - "calico" - ) - ); + catTimeDraColFor("tabby", "2019-07-02")), + ap("tabby", "calico")); // 4 docs no new collections - addDocsAndCommit(true, + addDocsAndCommit( + true, newDoc("calico", "2019-07-02T00:00:00Z"), newDoc("tabby", "2019-07-01T00:00:00Z"), newDoc("tabby", "2019-07-01T23:00:00Z"), - newDoc("calico", "2019-07-02T23:00:00Z") - ); + newDoc("calico", "2019-07-02T23:00:00Z")); // hopefully nothing changed assertCatTimeInvariants( @@ -436,21 +392,16 @@ public void testCatTime() throws Exception { catTimeDraColFor("calico", "2019-07-01"), catTimeDraColFor("calico", "2019-07-02"), catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "tabby", - "calico" - ) - ); + catTimeDraColFor("tabby", "2019-07-02")), + ap("tabby", "calico")); // 4 docs 2 new collections, in random order and maybe not using the alias - addDocsAndCommit(false, + addDocsAndCommit( + false, newDoc("calico", "2019-07-04T00:00:00Z"), newDoc("tabby", "2019-07-01T00:00:00Z"), newDoc("tabby", "2019-07-01T23:00:00Z"), - newDoc("calico", "2019-07-03T23:00:00Z") - ); + newDoc("calico", "2019-07-03T23:00:00Z")); assertCatTimeInvariants( ap( @@ -462,21 +413,19 @@ public void testCatTime() throws Exception { catTimeDraColFor("tabby", "2019-07-02") // tabby collections not filled in. No guarantee that time periods remain in sync // across categories. - ), - ap( - "tabby", - "calico" - ) - ); + ), + ap("tabby", "calico")); // now test with async pre-create. CollectionAdminRequest.setAliasProperty(getAlias()) - .addProperty("router.1.preemptiveCreateMath", "30MINUTE").process(solrClient); + .addProperty("router.1.preemptiveCreateMath", "30MINUTE") + .process(solrClient); - addDocsAndCommit(false, + addDocsAndCommit( + false, newDoc("shorthair", "2019-07-02T23:40:00Z"), // create 2 sync 1 async - newDoc("calico", "2019-07-03T23:00:00Z") // does not create - ); + newDoc("calico", "2019-07-03T23:00:00Z") // does not create + ); waitColAndAlias(getAlias(), "", CRA + "shorthair" + TRA + "2019-07-03", 2); @@ -490,19 +439,13 @@ public void testCatTime() throws Exception { catTimeDraColFor("shorthair", "2019-07-02"), catTimeDraColFor("shorthair", "2019-07-03"), catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + catTimeDraColFor("tabby", "2019-07-02")), + ap("shorthair", "tabby", "calico")); - addDocsAndCommit(false, + addDocsAndCommit( + false, newDoc("shorthair", "2019-07-02T23:40:00Z"), // should be no change - newDoc("calico", "2019-07-03T23:00:00Z") - ); + newDoc("calico", "2019-07-03T23:00:00Z")); /* Here we need to be testing that something that should not be created (extra preemptive async collections) @@ -522,24 +465,20 @@ Here we need to be testing that something that should not be created (extra pree catTimeDraColFor("shorthair", "2019-07-02"), catTimeDraColFor("shorthair", "2019-07-03"), catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + catTimeDraColFor("tabby", "2019-07-02")), + ap("shorthair", "tabby", "calico")); // now test with auto-delete. CollectionAdminRequest.setAliasProperty(getAlias()) - .addProperty("router.1.autoDeleteAge", "/DAY-5DAY").process(solrClient); + .addProperty("router.1.autoDeleteAge", "/DAY-5DAY") + .process(solrClient); // this one should not yet cause deletion - addDocsAndCommit(false, + addDocsAndCommit( + false, newDoc("shorthair", "2019-07-02T23:00:00Z"), // no effect expected - newDoc("calico", "2019-07-05T23:00:00Z") // create 1 - ); + newDoc("calico", "2019-07-05T23:00:00Z") // create 1 + ); assertCatTimeInvariants( ap( @@ -552,19 +491,14 @@ Here we need to be testing that something that should not be created (extra pree catTimeDraColFor("shorthair", "2019-07-02"), catTimeDraColFor("shorthair", "2019-07-03"), catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + catTimeDraColFor("tabby", "2019-07-02")), + ap("shorthair", "tabby", "calico")); - addDocsAndCommit(true, + addDocsAndCommit( + true, newDoc("shorthair", "2019-07-02T23:00:00Z"), // no effect expected - newDoc("calico", "2019-07-06T00:00:00Z") // create July 6, delete July 1 - ); + newDoc("calico", "2019-07-06T00:00:00Z") // create July 6, delete July 1 + ); waitCoreCount(getAlias() + CRA + "calico" + TRA + "2019-07-01", 0); assertCatTimeInvariants( @@ -579,22 +513,18 @@ Here we need to be testing that something that should not be created (extra pree catTimeDraColFor("shorthair", "2019-07-02"), catTimeDraColFor("shorthair", "2019-07-03"), catTimeDraColFor("tabby", "2019-07-01"), - catTimeDraColFor("tabby", "2019-07-02") - ), - ap( - "shorthair", - "tabby", - "calico" - ) - ); + catTimeDraColFor("tabby", "2019-07-02")), + ap("shorthair", "tabby", "calico")); // verify that all the documents ended up in the right collections. - QueryResponse resp = solrClient.query(getAlias(), params( - "q", "*:*", - "rows", "100", - "fl","*,[shard]", - "sort", "id asc" - )); + QueryResponse resp = + solrClient.query( + getAlias(), + params( + "q", "*:*", + "rows", "100", + "fl", "*,[shard]", + "sort", "id asc")); SolrDocumentList results = resp.getResults(); assertEquals(18, results.getNumFound()); for (SolrDocument result : results) { @@ -605,7 +535,6 @@ Here we need to be testing that something that should not be created (extra pree assertTrue(shard.contains(cat)); assertTrue(shard.contains(day)); } - } public String catTimeDraColFor(String category, String timestamp) { @@ -613,29 +542,34 @@ public String catTimeDraColFor(String category, String timestamp) { } public String timeCatDraColFor(String timestamp, String category) { - return getAlias() + TRA + timestamp + CRA + category; + return getAlias() + TRA + timestamp + CRA + category; } /** * Test for invariant conditions when dealing with a DRA that is category X time. * * @param expectedCols the collections we expect to see - * @param categories the categories added thus far + * @param categories the categories added thus far */ - private void assertCatTimeInvariants(String[] expectedCols, String[] categories) throws Exception { - final int expectNumFound = lastDocId - numDocsDeletedOrFailed; //lastDocId is effectively # generated docs + private void assertCatTimeInvariants(String[] expectedCols, String[] categories) + throws Exception { + final int expectNumFound = + lastDocId - numDocsDeletedOrFailed; // lastDocId is effectively # generated docs int totalNumFound = 0; - final List cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(getSaferTestName()); + final List cols = + new CollectionAdminRequest.ListAliases() + .process(solrClient) + .getAliasesAsLists() + .get(getSaferTestName()); assert !cols.isEmpty(); for (String category : categories) { - List cats = cols.stream().filter(c -> c.contains(category)).collect(Collectors.toList()); + List cats = + cols.stream().filter(c -> c.contains(category)).collect(Collectors.toList()); Object[] expectedColOrder = cats.stream().sorted(Collections.reverseOrder()).toArray(); Object[] actuals = cats.toArray(); - assertArrayEquals("expected reverse sorted", - expectedColOrder, - actuals); + assertArrayEquals("expected reverse sorted", expectedColOrder, actuals); Instant colEndInstant = null; // exclusive end @@ -648,31 +582,35 @@ private void assertCatTimeInvariants(String[] expectedCols, String[] categories) // special case for tests... all of which have no more than one TRA dimension // This won't work if we decide to write a test with 2 time dimensions. // (but that's an odd case so we'll wait) - int traIndex = colTmp.indexOf(TRA)+ TRA.length(); + int traIndex = colTmp.indexOf(TRA) + TRA.length(); while (colTmp.lastIndexOf("__") > traIndex) { - colTmp = colTmp.substring(0,colTmp.lastIndexOf("__")); + colTmp = colTmp.substring(0, colTmp.lastIndexOf("__")); } colStartInstant = TimeRoutedAlias.parseInstantFromCollectionName(getAlias(), colTmp); } - final QueryResponse colStatsResp = solrClient.query(col, params( - "q", "*:*", - "fq", catField + ":" + category, - "rows", "0", - "stats", "true", - "stats.field", getTimeField())); + final QueryResponse colStatsResp = + solrClient.query( + col, + params( + "q", "*:*", + "fq", catField + ":" + category, + "rows", "0", + "stats", "true", + "stats.field", getTimeField())); long numFound = colStatsResp.getResults().getNumFound(); if (numFound > 0) { totalNumFound += numFound; - final FieldStatsInfo timestampStats = colStatsResp.getFieldStatsInfo().get(getTimeField()); + final FieldStatsInfo timestampStats = + colStatsResp.getFieldStatsInfo().get(getTimeField()); assertTrue(colStartInstant.toEpochMilli() <= ((Date) timestampStats.getMin()).getTime()); if (colEndInstant != null) { assertTrue(colEndInstant.toEpochMilli() > ((Date) timestampStats.getMax()).getTime()); } } - colEndInstant = colStartInstant; // next older segment will max out at our current start time + colEndInstant = + colStartInstant; // next older segment will max out at our current start time } - } assertEquals(expectNumFound, totalNumFound); @@ -680,7 +618,8 @@ private void assertCatTimeInvariants(String[] expectedCols, String[] categories) assertEquals("COLS FOUND:" + cols, expectedCols.length, cols.size()); } - private void testFailedDocument(String category, String timestamp, String errorMsg) throws SolrServerException, IOException { + private void testFailedDocument(String category, String timestamp, String errorMsg) + throws SolrServerException, IOException { try { final UpdateResponse resp = solrClient.add(getAlias(), newDoc(category, timestamp)); // if we have a TolerantUpdateProcessor then we see it there) @@ -688,7 +627,9 @@ private void testFailedDocument(String category, String timestamp, String errorM assertTrue(errors != null && errors.toString().contains(errorMsg)); } catch (SolrException e) { String message = e.getMessage(); - assertTrue("expected message to contain" + errorMsg + " but message was " + message , message.contains(errorMsg)); + assertTrue( + "expected message to contain" + errorMsg + " but message was " + message, + message.contains(errorMsg)); } numDocsDeletedOrFailed++; } @@ -698,13 +639,17 @@ private String[] ap(String... p) { return p; } - private SolrInputDocument newDoc(String category, String timestamp) { Instant instant = Instant.parse(timestamp); - return sdoc("id", Integer.toString(++lastDocId), - getTimeField(), instant.toString(), - getCatField(), category, - getIntField(), "0"); // always 0 + return sdoc( + "id", + Integer.toString(++lastDocId), + getTimeField(), + instant.toString(), + getCatField(), + category, + getIntField(), + "0"); // always 0 } private String getTimeField() { @@ -724,5 +669,4 @@ public String getAlias() { public CloudSolrClient getSolrClient() { return solrClient; } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/DistributedUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/DistributedUpdateProcessorTest.java index 09fe5216714..92183952e2f 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/DistributedUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/DistributedUpdateProcessorTest.java @@ -17,6 +17,10 @@ package org.apache.solr.update.processor; +import static org.hamcrest.CoreMatchers.is; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.doReturn; + import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -25,7 +29,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.function.Function; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.params.ModifiableSolrParams; @@ -45,14 +48,9 @@ import org.mockito.junit.MockitoJUnit; import org.mockito.junit.MockitoRule; -import static org.hamcrest.CoreMatchers.is; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.doReturn; - public class DistributedUpdateProcessorTest extends SolrTestCaseJ4 { - @Rule - public MockitoRule rule = MockitoJUnit.rule(); + @Rule public MockitoRule rule = MockitoJUnit.rule(); private static ExecutorService executor; @BeforeClass @@ -60,12 +58,14 @@ public static void beforeClass() throws Exception { assumeWorkingMockito(); executor = ExecutorUtil.newMDCAwareCachedThreadPool(getClassName()); System.setProperty("enable.update.log", "true"); - initCore("solr/collection1/conf/solrconfig.xml","solr/collection1/conf/schema-minimal-with-another-uniqkey.xml"); + initCore( + "solr/collection1/conf/solrconfig.xml", + "solr/collection1/conf/schema-minimal-with-another-uniqkey.xml"); } @AfterClass public static void AfterClass() { - if (null != executor) { // may not have inited due to lack of mockito + if (null != executor) { // may not have inited due to lack of mockito executor.shutdown(); } System.clearProperty("enable.update.log"); @@ -74,34 +74,35 @@ public static void AfterClass() { @Test public void testShouldBufferUpdateZk() throws IOException { SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), new ModifiableSolrParams()); - try (DistributedUpdateProcessor processor = new DistributedUpdateProcessor( - req, null, null, null)) { + try (DistributedUpdateProcessor processor = + new DistributedUpdateProcessor(req, null, null, null)) { AddUpdateCommand cmd = new AddUpdateCommand(req); // applying buffer updates, isReplayOrPeerSync flag doesn't matter assertFalse(processor.shouldBufferUpdate(cmd, false, UpdateLog.State.APPLYING_BUFFERED)); assertFalse(processor.shouldBufferUpdate(cmd, true, UpdateLog.State.APPLYING_BUFFERED)); - + assertTrue(processor.shouldBufferUpdate(cmd, false, UpdateLog.State.BUFFERING)); // this is not an buffer updates and it depend on other updates cmd.prevVersion = 10; assertTrue(processor.shouldBufferUpdate(cmd, false, UpdateLog.State.APPLYING_BUFFERED)); } } - + @Test public void testVersionAdd() throws IOException { SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), new ModifiableSolrParams()); int threads = 5; - Function versionAddFunc = (DistributedUpdateProcessor process) -> { - try { - AddUpdateCommand cmd = new AddUpdateCommand(req); - cmd.solrDoc = new SolrInputDocument(); - cmd.solrDoc.setField("notid", "10"); - return process.versionAdd(cmd); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; + Function versionAddFunc = + (DistributedUpdateProcessor process) -> { + try { + AddUpdateCommand cmd = new AddUpdateCommand(req); + cmd.solrDoc = new SolrInputDocument(); + cmd.solrDoc.setField("notid", "10"); + return process.versionAdd(cmd); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; int succeeded = runCommands(threads, 1000, req, versionAddFunc); // only one should succeed assertThat(succeeded, is(1)); @@ -116,15 +117,16 @@ public void testVersionDelete() throws IOException { SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), new ModifiableSolrParams()); int threads = 5; - Function versionDeleteFunc = (DistributedUpdateProcessor process) -> { - try { - DeleteUpdateCommand cmd = new DeleteUpdateCommand(req); - cmd.id = "1"; - return process.versionDelete(cmd); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; + Function versionDeleteFunc = + (DistributedUpdateProcessor process) -> { + try { + DeleteUpdateCommand cmd = new DeleteUpdateCommand(req); + cmd.id = "1"; + return process.versionDelete(cmd); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; int succeeded = runCommands(threads, 1000, req, versionDeleteFunc); // only one should succeed @@ -134,46 +136,51 @@ public void testVersionDelete() throws IOException { // all should succeed assertThat(succeeded, is(threads)); } - + /** * @return how many requests succeeded */ - private int runCommands(int threads, int versionBucketLockTimeoutMs, SolrQueryRequest req, - Function function) + private int runCommands( + int threads, + int versionBucketLockTimeoutMs, + SolrQueryRequest req, + Function function) throws IOException { - try (DistributedUpdateProcessor processor = new DistributedUpdateProcessor( - req, null, null, null)) { + try (DistributedUpdateProcessor processor = + new DistributedUpdateProcessor(req, null, null, null)) { if (versionBucketLockTimeoutMs > 0) { // use TimedVersionBucket with versionBucketLockTimeoutMs VersionInfo vinfo = Mockito.spy(processor.vinfo); processor.vinfo = vinfo; - doReturn(new TimedVersionBucket() { - /** - * simulate the case: it takes 5 seconds to add the doc - * - */ - @Override - protected boolean tryLock(int lockTimeoutMs) { - boolean locked = super.tryLock(versionBucketLockTimeoutMs); - if (locked) { - try { - Thread.sleep(5000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - return locked; - } - }).when(vinfo).bucket(anyInt()); + doReturn( + new TimedVersionBucket() { + /** simulate the case: it takes 5 seconds to add the doc */ + @Override + protected boolean tryLock(int lockTimeoutMs) { + boolean locked = super.tryLock(versionBucketLockTimeoutMs); + if (locked) { + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + return locked; + } + }) + .when(vinfo) + .bucket(anyInt()); } CountDownLatch latch = new CountDownLatch(1); Collection> futures = new ArrayList<>(); for (int t = 0; t < threads; ++t) { - futures.add(executor.submit(() -> { - latch.await(); - return function.apply(processor); - })); + futures.add( + executor.submit( + () -> { + latch.await(); + return function.apply(processor); + })); } latch.countDown(); diff --git a/solr/core/src/test/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactoryTest.java index 428a94f22c1..e0fc6537e8c 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactoryTest.java @@ -16,23 +16,18 @@ */ package org.apache.solr.update.processor; +import java.util.Date; +import java.util.List; +import java.util.concurrent.TimeUnit; import org.apache.solr.common.SolrInputDocument; - -import org.apache.solr.update.UpdateCommand; import org.apache.solr.update.CommitUpdateCommand; import org.apache.solr.update.DeleteUpdateCommand; - +import org.apache.solr.update.UpdateCommand; import org.junit.BeforeClass; -import java.util.Date; -import java.util.List; -import java.util.concurrent.TimeUnit; - -/** - * Tests various configurations of DocExpirationUpdateProcessorFactory - */ +/** Tests various configurations of DocExpirationUpdateProcessorFactory */ public class DocExpirationUpdateProcessorFactoryTest extends UpdateProcessorTestBase { - + public static final String CONFIG_XML = "solrconfig-doc-expire-update-processor.xml"; public static final String SCHEMA_XML = "schema15.xml"; @@ -44,17 +39,21 @@ public static void beforeClass() throws Exception { public void testTTLDefaultsConversion() throws Exception { SolrInputDocument d = null; - d = processAdd("convert-ttl-defaults", - params("NOW","1394059630042"), - doc(f("id", "1111"), - f("_ttl_","+5MINUTES"))); + d = + processAdd( + "convert-ttl-defaults", + params("NOW", "1394059630042"), + doc(f("id", "1111"), f("_ttl_", "+5MINUTES"))); assertNotNull(d); assertEquals(new Date(1394059930042L), d.getFieldValue("_expire_at_tdt")); - d = processAdd("convert-ttl-defaults", - params("NOW","1394059630042", - "_ttl_","+5MINUTES"), - doc(f("id", "1111"))); + d = + processAdd( + "convert-ttl-defaults", + params( + "NOW", "1394059630042", + "_ttl_", "+5MINUTES"), + doc(f("id", "1111"))); assertNotNull(d); assertEquals(new Date(1394059930042L), d.getFieldValue("_expire_at_tdt")); @@ -63,39 +62,46 @@ public void testTTLDefaultsConversion() throws Exception { public void testTTLFieldConversion() throws Exception { final String chain = "convert-ttl-field"; SolrInputDocument d = null; - d = processAdd(chain, - params("NOW","1394059630042"), - doc(f("id", "1111"), - f("_ttl_field_","+5MINUTES"))); + d = + processAdd( + chain, + params("NOW", "1394059630042"), + doc(f("id", "1111"), f("_ttl_field_", "+5MINUTES"))); assertNotNull(d); assertEquals(new Date(1394059930042L), d.getFieldValue("_expire_at_tdt")); - d = processAdd(chain, - params("NOW","1394059630042"), - doc(f("id", "2222"), - f("_ttl_field_","+27MINUTES"))); + d = + processAdd( + chain, + params("NOW", "1394059630042"), + doc(f("id", "2222"), f("_ttl_field_", "+27MINUTES"))); assertNotNull(d); assertEquals(new Date(1394061250042L), d.getFieldValue("_expire_at_tdt")); - d = processAdd(chain, - params("NOW","1394059630042"), - doc(f("id", "3333"), - f("_ttl_field_","+1YEAR"))); + d = + processAdd( + chain, + params("NOW", "1394059630042"), + doc(f("id", "3333"), f("_ttl_field_", "+1YEAR"))); assertNotNull(d); assertEquals(new Date(1425595630042L), d.getFieldValue("_expire_at_tdt")); - d = processAdd(chain, - params("NOW","1394059630042"), - doc(f("id", "1111"), - f("_ttl_field_","/DAY+1YEAR"))); + d = + processAdd( + chain, + params("NOW", "1394059630042"), + doc(f("id", "1111"), f("_ttl_field_", "/DAY+1YEAR"))); assertNotNull(d); assertEquals(new Date(1425513600000L), d.getFieldValue("_expire_at_tdt")); // default ttlParamName is disabled, this should not convert... - d = processAdd(chain, - params("NOW","1394059630042", - "_ttl_","+5MINUTES"), - doc(f("id", "1111"))); + d = + processAdd( + chain, + params( + "NOW", "1394059630042", + "_ttl_", "+5MINUTES"), + doc(f("id", "1111"))); assertNotNull(d); assertNull(d.getFieldValue("_expire_at_tdt")); } @@ -103,35 +109,42 @@ public void testTTLFieldConversion() throws Exception { public void testTTLParamConversion() throws Exception { final String chain = "convert-ttl-param"; SolrInputDocument d = null; - d = processAdd(chain, - params("NOW","1394059630042", - "_ttl_param_","+5MINUTES"), - doc(f("id", "1111"))); + d = + processAdd( + chain, + params( + "NOW", "1394059630042", + "_ttl_param_", "+5MINUTES"), + doc(f("id", "1111"))); assertNotNull(d); assertEquals(new Date(1394059930042L), d.getFieldValue("_expire_at_tdt")); - d = processAdd(chain, - params("NOW","1394059630042", - "_ttl_param_","+27MINUTES"), - doc(f("id", "2222"))); + d = + processAdd( + chain, + params( + "NOW", "1394059630042", + "_ttl_param_", "+27MINUTES"), + doc(f("id", "2222"))); assertNotNull(d); assertEquals(new Date(1394061250042L), d.getFieldValue("_expire_at_tdt")); // default ttlFieldName is disabled, param should be used... - d = processAdd(chain, - params("NOW","1394059630042", - "_ttl_param_","+5MINUTES"), - doc(f("id", "1111"), - f("_ttl_field_","+999MINUTES"))); + d = + processAdd( + chain, + params( + "NOW", "1394059630042", + "_ttl_param_", "+5MINUTES"), + doc(f("id", "1111"), f("_ttl_field_", "+999MINUTES"))); assertNotNull(d); assertEquals(new Date(1394059930042L), d.getFieldValue("_expire_at_tdt")); // default ttlFieldName is disabled, this should not convert... - d = processAdd(chain, - params("NOW","1394059630042"), - doc(f("id", "1111"), - f("_ttl_","/DAY+1YEAR"))); + d = + processAdd( + chain, params("NOW", "1394059630042"), doc(f("id", "1111"), f("_ttl_", "/DAY+1YEAR"))); assertNotNull(d); assertNull(d.getFieldValue("_expire_at_tdt")); } @@ -139,81 +152,81 @@ public void testTTLParamConversion() throws Exception { public void testTTLFieldConversionWithDefaultParam() throws Exception { final String chain = "convert-ttl-field-with-param-default"; SolrInputDocument d = null; - d = processAdd(chain, - params("NOW","1394059630042", - "_ttl_param_","+999MINUTES"), - doc(f("id", "1111"), - f("_ttl_field_","+5MINUTES"))); + d = + processAdd( + chain, + params( + "NOW", "1394059630042", + "_ttl_param_", "+999MINUTES"), + doc(f("id", "1111"), f("_ttl_field_", "+5MINUTES"))); assertNotNull(d); assertEquals(new Date(1394059930042L), d.getFieldValue("_expire_at_tdt")); - d = processAdd(chain, - params("NOW","1394059630042", - "_ttl_param_","+27MINUTES"), - doc(f("id", "2222"))); + d = + processAdd( + chain, + params( + "NOW", "1394059630042", + "_ttl_param_", "+27MINUTES"), + doc(f("id", "2222"))); assertNotNull(d); assertEquals(new Date(1394061250042L), d.getFieldValue("_expire_at_tdt")); - } public void testAutomaticDeletes() throws Exception { // get a handle on our recorder - UpdateRequestProcessorChain chain = - h.getCore().getUpdateProcessingChain("scheduled-delete"); + UpdateRequestProcessorChain chain = h.getCore().getUpdateProcessingChain("scheduled-delete"); assertNotNull(chain); List factories = chain.getProcessors(); - assertEquals("did number of processors configured in chain get changed?", - 5, factories.size()); - assertTrue("Expected [1] RecordingUpdateProcessorFactory: " + factories.get(1).getClass(), - factories.get(1) instanceof RecordingUpdateProcessorFactory); - RecordingUpdateProcessorFactory recorder = - (RecordingUpdateProcessorFactory) factories.get(1); + assertEquals("did number of processors configured in chain get changed?", 5, factories.size()); + assertTrue( + "Expected [1] RecordingUpdateProcessorFactory: " + factories.get(1).getClass(), + factories.get(1) instanceof RecordingUpdateProcessorFactory); + RecordingUpdateProcessorFactory recorder = (RecordingUpdateProcessorFactory) factories.get(1); // now start recording, and monitor for the expected commands try { recorder.startRecording(); - + // more then one iter to verify it's recurring final int numItersToCheck = 1 + RANDOM_MULTIPLIER; - - for (int i = 0; i < numItersToCheck; i++) { + + for (int i = 0; i < numItersToCheck; i++) { UpdateCommand tmp; - + // be generous in how long we wait, some jenkins machines are slooooow tmp = recorder.commandQueue.poll(30, TimeUnit.SECONDS); - + // we can be confident in the order because DocExpirationUpdateProcessorFactory - // uses the same request for both the delete & the commit -- and both - // RecordingUpdateProcessorFactory's getInstance & startRecording methods are - // synchronized. So it should not be possible to start recording in the + // uses the same request for both the delete & the commit -- and both + // RecordingUpdateProcessorFactory's getInstance & startRecording methods are + // synchronized. So it should not be possible to start recording in the // middle of the two commands - assertTrue("expected DeleteUpdateCommand: " + tmp.getClass(), - tmp instanceof DeleteUpdateCommand); - + assertTrue( + "expected DeleteUpdateCommand: " + tmp.getClass(), tmp instanceof DeleteUpdateCommand); + DeleteUpdateCommand delete = (DeleteUpdateCommand) tmp; assertFalse(delete.isDeleteById()); assertNotNull(delete.getQuery()); - assertTrue(delete.getQuery(), - delete.getQuery().startsWith("{!cache=false}eXpField_tdt:[* TO ")); - + assertTrue( + delete.getQuery(), delete.getQuery().startsWith("{!cache=false}eXpField_tdt:[* TO ")); + // commit should be immediately after the delete tmp = recorder.commandQueue.poll(5, TimeUnit.SECONDS); - assertTrue("expected CommitUpdateCommand: " + tmp.getClass(), - tmp instanceof CommitUpdateCommand); - + assertTrue( + "expected CommitUpdateCommand: " + tmp.getClass(), tmp instanceof CommitUpdateCommand); + CommitUpdateCommand commit = (CommitUpdateCommand) tmp; assertTrue(commit.softCommit); assertTrue(commit.openSearcher); - } + } } finally { recorder.stopRecording(); } } - - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java index 1f48d71ef1b..0e0da7998b4 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java @@ -16,10 +16,9 @@ */ package org.apache.solr.update.processor; +import java.util.Arrays; import java.util.LinkedHashSet; import java.util.TreeSet; -import java.util.Arrays; - import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; @@ -27,9 +26,8 @@ import org.junit.BeforeClass; /** - * Tests the basics of configuring FieldMutatingUpdateProcessors - * (mainly via TrimFieldUpdateProcessor) and the logic of other various - * subclasses. + * Tests the basics of configuring FieldMutatingUpdateProcessors (mainly via + * TrimFieldUpdateProcessor) and the logic of other various subclasses. */ public class FieldMutatingUpdateProcessorTest extends UpdateProcessorTestBase { @@ -43,120 +41,124 @@ public void testComprehensive() throws Exception { final String countMe = "how long is this string?"; final int count = countMe.length(); - processAdd("comprehensive", - doc(f("id", "1111"), - f("primary_author_s1", - "XXXX", "Adam", "Sam"), - f("all_authors_s1", - "XXXX", "Adam", "Sam"), - f("foo_is", countMe, 42), - f("first_foo_l", countMe, -34), - f("max_foo_l", countMe, -34), - f("min_foo_l", countMe, -34))); + processAdd( + "comprehensive", + doc( + f("id", "1111"), + f("primary_author_s1", "XXXX", "Adam", "Sam"), + f("all_authors_s1", "XXXX", "Adam", "Sam"), + f("foo_is", countMe, 42), + f("first_foo_l", countMe, -34), + f("max_foo_l", countMe, -34), + f("min_foo_l", countMe, -34))); assertU(commit()); - assertQ(req("id:1111") - ,"//str[@name='primary_author_s1'][.='XXXX']" - ,"//str[@name='all_authors_s1'][.='XXXX; Adam; Sam']" - ,"//arr[@name='foo_is']/int[1][.='"+count+"']" - ,"//arr[@name='foo_is']/int[2][.='42']" - ,"//long[@name='max_foo_l'][.='"+count+"']" - ,"//long[@name='first_foo_l'][.='"+count+"']" - ,"//long[@name='min_foo_l'][.='-34']" - ); + assertQ( + req("id:1111"), + "//str[@name='primary_author_s1'][.='XXXX']", + "//str[@name='all_authors_s1'][.='XXXX; Adam; Sam']", + "//arr[@name='foo_is']/int[1][.='" + count + "']", + "//arr[@name='foo_is']/int[2][.='42']", + "//long[@name='max_foo_l'][.='" + count + "']", + "//long[@name='first_foo_l'][.='" + count + "']", + "//long[@name='min_foo_l'][.='-34']"); } public void testTrimAll() throws Exception { SolrInputDocument d = null; - d = processAdd("trim-all", - doc(f("id", "1111"), - f("name", " Hoss ", new StringBuilder(" Man")), - f("foo_t", " some text ", "other Text\t"), - f("foo_d", 42), - field("foo_s", " string "))); + d = + processAdd( + "trim-all", + doc( + f("id", "1111"), + f("name", " Hoss ", new StringBuilder(" Man")), + f("foo_t", " some text ", "other Text\t"), + f("foo_d", 42), + field("foo_s", " string "))); assertNotNull(d); // simple stuff assertEquals("string", d.getFieldValue("foo_s")); - assertEquals(Arrays.asList("some text","other Text"), - d.getFieldValues("foo_t")); - assertEquals(Arrays.asList("Hoss","Man"), - d.getFieldValues("name")); + assertEquals(Arrays.asList("some text", "other Text"), d.getFieldValues("foo_t")); + assertEquals(Arrays.asList("Hoss", "Man"), d.getFieldValues("name")); // slightly more interesting - assertEquals("processor borked non string value", - 42, d.getFieldValue("foo_d")); + assertEquals("processor borked non string value", 42, d.getFieldValue("foo_d")); } public void testUniqValues() throws Exception { final String chain = "uniq-values"; SolrInputDocument d = null; - d = processAdd(chain, - doc(f("id", "1111"), - f("name", "Hoss", "Man", "Hoss"), - f("uniq_1_s", "Hoss", "Man", "Hoss"), - f("uniq_2_s", "Foo", "Hoss", "Man", "Hoss", "Bar"), - f("uniq_3_s", 5.0F, 23, "string", 5.0F))); - - assertNotNull(d); - - assertEquals(Arrays.asList("Hoss", "Man", "Hoss"), - d.getFieldValues("name")); - assertEquals(Arrays.asList("Hoss","Man"), - d.getFieldValues("uniq_1_s")); - assertEquals(Arrays.asList("Foo","Hoss","Man","Bar"), - d.getFieldValues("uniq_2_s")); - assertEquals(Arrays.asList(5.0F, 23, "string"), - d.getFieldValues("uniq_3_s")); + d = + processAdd( + chain, + doc( + f("id", "1111"), + f("name", "Hoss", "Man", "Hoss"), + f("uniq_1_s", "Hoss", "Man", "Hoss"), + f("uniq_2_s", "Foo", "Hoss", "Man", "Hoss", "Bar"), + f("uniq_3_s", 5.0F, 23, "string", 5.0F))); + + assertNotNull(d); + + assertEquals(Arrays.asList("Hoss", "Man", "Hoss"), d.getFieldValues("name")); + assertEquals(Arrays.asList("Hoss", "Man"), d.getFieldValues("uniq_1_s")); + assertEquals(Arrays.asList("Foo", "Hoss", "Man", "Bar"), d.getFieldValues("uniq_2_s")); + assertEquals(Arrays.asList(5.0F, 23, "string"), d.getFieldValues("uniq_3_s")); } public void testTrimFields() throws Exception { for (String chain : Arrays.asList("trim-fields", "trim-fields-arr")) { SolrInputDocument d = null; - d = processAdd(chain, - doc(f("id", "1111"), - f("name", " Hoss ", " Man"), - f("foo_t", " some text ", "other Text\t"), - f("foo_s", " string "))); - + d = + processAdd( + chain, + doc( + f("id", "1111"), + f("name", " Hoss ", " Man"), + f("foo_t", " some text ", "other Text\t"), + f("foo_s", " string "))); + assertNotNull(d); - + assertEquals(" string ", d.getFieldValue("foo_s")); - assertEquals(Arrays.asList("some text","other Text"), - d.getFieldValues("foo_t")); - assertEquals(Arrays.asList("Hoss","Man"), - d.getFieldValues("name")); + assertEquals(Arrays.asList("some text", "other Text"), d.getFieldValues("foo_t")); + assertEquals(Arrays.asList("Hoss", "Man"), d.getFieldValues("name")); } } public void testTrimField() throws Exception { SolrInputDocument d = null; - d = processAdd("trim-field", - doc(f("id", "1111"), - f("name", " Hoss ", " Man"), - f("foo_t", " some text ", "other Text\t"), - f("foo_s", " string "))); + d = + processAdd( + "trim-field", + doc( + f("id", "1111"), + f("name", " Hoss ", " Man"), + f("foo_t", " some text ", "other Text\t"), + f("foo_s", " string "))); assertNotNull(d); assertEquals(" string ", d.getFieldValue("foo_s")); - assertEquals(Arrays.asList("some text","other Text"), - d.getFieldValues("foo_t")); - assertEquals(Arrays.asList(" Hoss "," Man"), - d.getFieldValues("name")); + assertEquals(Arrays.asList("some text", "other Text"), d.getFieldValues("foo_t")); + assertEquals(Arrays.asList(" Hoss ", " Man"), d.getFieldValues("name")); } public void testTrimRegex() throws Exception { SolrInputDocument d = null; - d = processAdd("trim-field-regexes", - doc(f("id", "1111"), - f("foo_t", " string1 "), - f("foozat_s", " string2 "), - f("bar_t", " string3 "), - f("bar_s", " string4 "))); + d = + processAdd( + "trim-field-regexes", + doc( + f("id", "1111"), + f("foo_t", " string1 "), + f("foozat_s", " string2 "), + f("bar_t", " string3 "), + f("bar_s", " string4 "))); assertNotNull(d); @@ -164,18 +166,20 @@ public void testTrimRegex() throws Exception { assertEquals("string2", d.getFieldValue("foozat_s")); assertEquals(" string3 ", d.getFieldValue("bar_t")); assertEquals("string4", d.getFieldValue("bar_s")); - } public void testTrimTypes() throws Exception { SolrInputDocument d = null; - d = processAdd("trim-types", - doc(f("id", "1111"), - f("foo_sw", " string0 "), - f("name", " string1 "), - f("title", " string2 "), - f("bar_t", " string3 "), - f("bar_s", " string4 "))); + d = + processAdd( + "trim-types", + doc( + f("id", "1111"), + f("foo_sw", " string0 "), + f("name", " string1 "), + f("title", " string2 "), + f("bar_t", " string3 "), + f("bar_s", " string4 "))); assertNotNull(d); @@ -184,16 +188,18 @@ public void testTrimTypes() throws Exception { assertEquals("string2", d.getFieldValue("title")); assertEquals(" string3 ", d.getFieldValue("bar_t")); assertEquals(" string4 ", d.getFieldValue("bar_s")); - } public void testTrimClasses() throws Exception { SolrInputDocument d = null; - d = processAdd("trim-classes", - doc(f("id", "1111"), - f("foo_t", " string1 "), - f("foo_s", " string2 "), - f("bar_dt", " string3 "))); + d = + processAdd( + "trim-classes", + doc( + f("id", "1111"), + f("foo_t", " string1 "), + f("foo_s", " string2 "), + f("bar_dt", " string3 "))); assertNotNull(d); @@ -204,11 +210,14 @@ public void testTrimClasses() throws Exception { public void testTrimMultipleRules() throws Exception { SolrInputDocument d = null; - d = processAdd("trim-multi", - doc(f("id", "1111"), - f("foo_t", " string1 "), - f("foo_s", " string2 "), - f("bar_dt", " string3 "))); + d = + processAdd( + "trim-multi", + doc( + f("id", "1111"), + f("foo_t", " string1 "), + f("foo_s", " string2 "), + f("bar_dt", " string3 "))); assertNotNull(d); @@ -219,11 +228,14 @@ public void testTrimMultipleRules() throws Exception { public void testTrimExclusions() throws Exception { SolrInputDocument d = null; - d = processAdd("trim-most", - doc(f("id", "1111"), - f("foo_t", " string1 "), - f("foo_s", " string2 "), - f("bar_dt", " string3 "))); + d = + processAdd( + "trim-most", + doc( + f("id", "1111"), + f("foo_t", " string1 "), + f("foo_s", " string2 "), + f("bar_dt", " string3 "))); assertNotNull(d); @@ -231,12 +243,15 @@ public void testTrimExclusions() throws Exception { assertEquals("string2", d.getFieldValue("foo_s")); assertEquals("string3", d.getFieldValue("bar_dt")); - d = processAdd("trim-many", - doc(f("id", "1111"), - f("foo_t", " string1 "), - f("foo_s", " string2 "), - f("bar_dt", " string3 "), - f("bar_HOSS_s", " string4 "))); + d = + processAdd( + "trim-many", + doc( + f("id", "1111"), + f("foo_t", " string1 "), + f("foo_s", " string2 "), + f("bar_dt", " string3 "), + f("bar_HOSS_s", " string4 "))); assertNotNull(d); @@ -245,12 +260,15 @@ public void testTrimExclusions() throws Exception { assertEquals("string3", d.getFieldValue("bar_dt")); assertEquals(" string4 ", d.getFieldValue("bar_HOSS_s")); - d = processAdd("trim-few", - doc(f("id", "1111"), - f("foo_t", " string1 "), - f("foo_s", " string2 "), - f("bar_dt", " string3 "), - f("bar_HOSS_s", " string4 "))); + d = + processAdd( + "trim-few", + doc( + f("id", "1111"), + f("foo_t", " string1 "), + f("foo_s", " string2 "), + f("bar_dt", " string3 "), + f("bar_HOSS_s", " string4 "))); assertNotNull(d); @@ -259,12 +277,15 @@ public void testTrimExclusions() throws Exception { assertEquals(" string3 ", d.getFieldValue("bar_dt")); assertEquals(" string4 ", d.getFieldValue("bar_HOSS_s")); - d = processAdd("trim-some", - doc(f("id", "1111"), - f("foo_t", " string1 "), - f("foo_s", " string2 "), - f("bar_dt", " string3 "), - f("bar_HOSS_s", " string4 "))); + d = + processAdd( + "trim-some", + doc( + f("id", "1111"), + f("foo_t", " string1 "), + f("foo_s", " string2 "), + f("bar_dt", " string3 "), + f("bar_HOSS_s", " string4 "))); assertNotNull(d); @@ -276,146 +297,150 @@ public void testTrimExclusions() throws Exception { public void testRemoveBlanks() throws Exception { SolrInputDocument d = null; - d = processAdd("remove-all-blanks", - doc(f("id", "1111"), - f("foo_s", "string1", ""), - f("bar_dt", "string2", "", "string3"), - f("yak_t", ""), - f("foo_d", 42))); - - assertNotNull(d); - - assertEquals(Arrays.asList("string1"), - d.getFieldValues("foo_s")); - assertEquals(Arrays.asList("string2","string3"), - d.getFieldValues("bar_dt")); - assertFalse("shouldn't be any values for yak_t", - d.containsKey("yak_t")); - assertEquals("processor borked non string value", - 42, d.getFieldValue("foo_d")); - + d = + processAdd( + "remove-all-blanks", + doc( + f("id", "1111"), + f("foo_s", "string1", ""), + f("bar_dt", "string2", "", "string3"), + f("yak_t", ""), + f("foo_d", 42))); + + assertNotNull(d); + + assertEquals(Arrays.asList("string1"), d.getFieldValues("foo_s")); + assertEquals(Arrays.asList("string2", "string3"), d.getFieldValues("bar_dt")); + assertFalse("shouldn't be any values for yak_t", d.containsKey("yak_t")); + assertEquals("processor borked non string value", 42, d.getFieldValue("foo_d")); } public void testStrLength() throws Exception { SolrInputDocument d = null; - d = processAdd("length-none", - doc(f("id", "1111"), - f("foo_s", "string1", "string222"), - f("bar_dt", "string3"), - f("yak_t", ""), - f("foo_d", 42))); + d = + processAdd( + "length-none", + doc( + f("id", "1111"), + f("foo_s", "string1", "string222"), + f("bar_dt", "string3"), + f("yak_t", ""), + f("foo_d", 42))); assertNotNull(d); - assertEquals(Arrays.asList("string1","string222"), - d.getFieldValues("foo_s")); + assertEquals(Arrays.asList("string1", "string222"), d.getFieldValues("foo_s")); assertEquals("string3", d.getFieldValue("bar_dt")); assertEquals("", d.getFieldValue("yak_t")); - assertEquals("processor borked non string value", - 42, d.getFieldValue("foo_d")); - - d = processAdd("length-some", - doc(f("id", "1111"), - f("foo_s", "string1", "string222"), - f("bar_dt", "string3"), - f("yak_t", ""), - f("foo_d", 42))); + assertEquals("processor borked non string value", 42, d.getFieldValue("foo_d")); + + d = + processAdd( + "length-some", + doc( + f("id", "1111"), + f("foo_s", "string1", "string222"), + f("bar_dt", "string3"), + f("yak_t", ""), + f("foo_d", 42))); assertNotNull(d); - assertEquals(Arrays.asList(7, 9), - d.getFieldValues("foo_s")); + assertEquals(Arrays.asList(7, 9), d.getFieldValues("foo_s")); assertEquals("string3", d.getFieldValue("bar_dt")); assertEquals(0, d.getFieldValue("yak_t")); - assertEquals("processor borked non string value", - 42, d.getFieldValue("foo_d")); + assertEquals("processor borked non string value", 42, d.getFieldValue("foo_d")); } public void testRegexReplace() throws Exception { SolrInputDocument d = null; - d = processAdd("regex-replace", - doc(f("id", "doc1"), - f("content", "This is a text\t with a lot\n of whitespace"), - f("title", "This\ttitle has a lot of spaces"))); - - assertNotNull(d); - - assertEquals("ThisXisXaXtextXwithXaXlotXofXwhitespace", - d.getFieldValue("content")); - assertEquals("ThisXtitleXhasXaXlotXofXspaces", - d.getFieldValue("title")); + d = + processAdd( + "regex-replace", + doc( + f("id", "doc1"), + f("content", "This is a text\t with a lot\n of whitespace"), + f("title", "This\ttitle has a lot of spaces"))); + + assertNotNull(d); + + assertEquals("ThisXisXaXtextXwithXaXlotXofXwhitespace", d.getFieldValue("content")); + assertEquals("ThisXtitleXhasXaXlotXofXspaces", d.getFieldValue("title")); // literalReplacement = true - d = processAdd("regex-replace-literal-true", - doc(f("id", "doc2"), - f("content", "Let's try this one"), - f("title", "Let's try try this one"))); + d = + processAdd( + "regex-replace-literal-true", + doc( + f("id", "doc2"), + f("content", "Let's try this one"), + f("title", "Let's try try this one"))); assertNotNull(d); - assertEquals("Let's <$1> this one", - d.getFieldValue("content")); - assertEquals("Let's <$1> <$1> this one", - d.getFieldValue("title")); + assertEquals("Let's <$1> this one", d.getFieldValue("content")); + assertEquals("Let's <$1> <$1> this one", d.getFieldValue("title")); // literalReplacement is not specified, defaults to true - d = processAdd("regex-replace-literal-default-true", - doc(f("id", "doc3"), - f("content", "Let's try this one"), - f("title", "Let's try try this one"))); + d = + processAdd( + "regex-replace-literal-default-true", + doc( + f("id", "doc3"), + f("content", "Let's try this one"), + f("title", "Let's try try this one"))); assertNotNull(d); - assertEquals("Let's <$1> this one", - d.getFieldValue("content")); - assertEquals("Let's <$1> <$1> this one", - d.getFieldValue("title")); + assertEquals("Let's <$1> this one", d.getFieldValue("content")); + assertEquals("Let's <$1> <$1> this one", d.getFieldValue("title")); // if user passes literalReplacement as a string param instead of boolean - d = processAdd("regex-replace-literal-str-true", - doc(f("id", "doc4"), - f("content", "Let's try this one"), - f("title", "Let's try try this one"))); + d = + processAdd( + "regex-replace-literal-str-true", + doc( + f("id", "doc4"), + f("content", "Let's try this one"), + f("title", "Let's try try this one"))); assertNotNull(d); - assertEquals("Let's <$1> this one", - d.getFieldValue("content")); - assertEquals("Let's <$1> <$1> this one", - d.getFieldValue("title")); + assertEquals("Let's <$1> this one", d.getFieldValue("content")); + assertEquals("Let's <$1> <$1> this one", d.getFieldValue("title")); // This is with literalReplacement = false - d = processAdd("regex-replace-literal-false", - doc(f("id", "doc5"), - f("content", "Let's try this one"), - f("title", "Let's try try this one"))); + d = + processAdd( + "regex-replace-literal-false", + doc( + f("id", "doc5"), + f("content", "Let's try this one"), + f("title", "Let's try try this one"))); assertNotNull(d); - assertEquals("Let's this one", - d.getFieldValue("content")); - assertEquals("Let's this one", - d.getFieldValue("title")); - + assertEquals("Let's this one", d.getFieldValue("content")); + assertEquals("Let's this one", d.getFieldValue("title")); } - + public void testFirstValue() throws Exception { SolrInputDocument d = null; - d = processAdd("first-value", - doc(f("id", "1111"), - f("foo_s", "string1", "string222"), - f("bar_s", "string3"), - f("yak_t", "string4", "string5"))); + d = + processAdd( + "first-value", + doc( + f("id", "1111"), + f("foo_s", "string1", "string222"), + f("bar_s", "string3"), + f("yak_t", "string4", "string5"))); assertNotNull(d); - assertEquals(Arrays.asList("string1"), - d.getFieldValues("foo_s")); - assertEquals(Arrays.asList("string3"), - d.getFieldValues("bar_s")); - assertEquals(Arrays.asList("string4", "string5"), - d.getFieldValues("yak_t")); + assertEquals(Arrays.asList("string1"), d.getFieldValues("foo_s")); + assertEquals(Arrays.asList("string3"), d.getFieldValues("bar_s")); + assertEquals(Arrays.asList("string4", "string5"), d.getFieldValues("yak_t")); } public void testLastValue() throws Exception { @@ -423,20 +448,20 @@ public void testLastValue() throws Exception { // basics - d = processAdd("last-value", - doc(f("id", "1111"), - f("foo_s", "string1", "string222"), - f("bar_s", "string3"), - f("yak_t", "string4", "string5"))); + d = + processAdd( + "last-value", + doc( + f("id", "1111"), + f("foo_s", "string1", "string222"), + f("bar_s", "string3"), + f("yak_t", "string4", "string5"))); assertNotNull(d); - assertEquals(Arrays.asList("string222"), - d.getFieldValues("foo_s")); - assertEquals(Arrays.asList("string3"), - d.getFieldValues("bar_s")); - assertEquals(Arrays.asList("string4", "string5"), - d.getFieldValues("yak_t")); + assertEquals(Arrays.asList("string222"), d.getFieldValues("foo_s")); + assertEquals(Arrays.asList("string3"), d.getFieldValues("bar_s")); + assertEquals(Arrays.asList("string4", "string5"), d.getFieldValues("yak_t")); // test optimizations (and force test of defaults) @@ -445,251 +470,253 @@ public void testLastValue() throws Exception { // test something that's definitely a SortedSet special = new SolrInputField("foo_s"); - special.setValue(new TreeSet<> - (Arrays.asList("ggg", "first", "last", "hhh"))); - - d = processAdd("last-value", - doc(f("id", "1111"), - special)); + special.setValue(new TreeSet<>(Arrays.asList("ggg", "first", "last", "hhh"))); + + d = processAdd("last-value", doc(f("id", "1111"), special)); assertNotNull(d); assertEquals("last", d.getFieldValue("foo_s")); // test something that's definitely a List - + special = new SolrInputField("foo_s"); special.setValue(Arrays.asList("first", "ggg", "hhh", "last")); - - d = processAdd("last-value", - doc(f("id", "1111"), - special)); + + d = processAdd("last-value", doc(f("id", "1111"), special)); assertNotNull(d); assertEquals("last", d.getFieldValue("foo_s")); - // test something that is definitely not a List or SortedSet + // test something that is definitely not a List or SortedSet // (ie: get default behavior of Collection using iterator) special = new SolrInputField("foo_s"); - special.setValue(new LinkedHashSet<> - (Arrays.asList("first", "ggg", "hhh", "last"))); - - d = processAdd("last-value", - doc(f("id", "1111"), - special)); + special.setValue(new LinkedHashSet<>(Arrays.asList("first", "ggg", "hhh", "last"))); + + d = processAdd("last-value", doc(f("id", "1111"), special)); assertNotNull(d); assertEquals("last", d.getFieldValue("foo_s")); - - } public void testMinValue() throws Exception { SolrInputDocument d = null; - d = processAdd("min-value", - doc(f("id", "1111"), - f("foo_s", "zzz", "aaa", "bbb"), - f("foo_i", 42, 128, -3), - f("bar_s", "aaa"), - f("yak_t", "aaa", "bbb"))); + d = + processAdd( + "min-value", + doc( + f("id", "1111"), + f("foo_s", "zzz", "aaa", "bbb"), + f("foo_i", 42, 128, -3), + f("bar_s", "aaa"), + f("yak_t", "aaa", "bbb"))); assertNotNull(d); - assertEquals(Arrays.asList("aaa"), - d.getFieldValues("foo_s")); - assertEquals(Arrays.asList(-3), - d.getFieldValues("foo_i")); - assertEquals(Arrays.asList("aaa"), - d.getFieldValues("bar_s")); - assertEquals(Arrays.asList("aaa", "bbb"), - d.getFieldValues("yak_t")); - + assertEquals(Arrays.asList("aaa"), d.getFieldValues("foo_s")); + assertEquals(Arrays.asList(-3), d.getFieldValues("foo_i")); + assertEquals(Arrays.asList("aaa"), d.getFieldValues("bar_s")); + assertEquals(Arrays.asList("aaa", "bbb"), d.getFieldValues("yak_t")); + // failure when un-comparable SolrException error = null; try { ignoreException(".*Unable to mutate field.*"); - d = processAdd("min-value", - doc(f("id", "1111"), - f("foo_s", "zzz", 42, "bbb"), - f("bar_s", "aaa"), - f("yak_t", "aaa", "bbb"))); + d = + processAdd( + "min-value", + doc( + f("id", "1111"), + f("foo_s", "zzz", 42, "bbb"), + f("bar_s", "aaa"), + f("yak_t", "aaa", "bbb"))); } catch (SolrException e) { error = e; } finally { resetExceptionIgnores(); } assertNotNull("no error on un-comparable values", error); - assertTrue("error doesn't mention field name", - 0 <= error.getMessage().indexOf("foo_s")); + assertTrue("error doesn't mention field name", 0 <= error.getMessage().indexOf("foo_s")); } public void testMaxValue() throws Exception { SolrInputDocument d = null; - d = processAdd("max-value", - doc(f("id", "1111"), - f("foo_s", "zzz", "aaa", "bbb"), - f("foo_i", 42, 128, -3), - f("bar_s", "aaa"), - f("yak_t", "aaa", "bbb"))); + d = + processAdd( + "max-value", + doc( + f("id", "1111"), + f("foo_s", "zzz", "aaa", "bbb"), + f("foo_i", 42, 128, -3), + f("bar_s", "aaa"), + f("yak_t", "aaa", "bbb"))); assertNotNull(d); - assertEquals(Arrays.asList("zzz"), - d.getFieldValues("foo_s")); - assertEquals(Arrays.asList(128), - d.getFieldValues("foo_i")); - assertEquals(Arrays.asList("aaa"), - d.getFieldValues("bar_s")); - assertEquals(Arrays.asList("aaa", "bbb"), - d.getFieldValues("yak_t")); - + assertEquals(Arrays.asList("zzz"), d.getFieldValues("foo_s")); + assertEquals(Arrays.asList(128), d.getFieldValues("foo_i")); + assertEquals(Arrays.asList("aaa"), d.getFieldValues("bar_s")); + assertEquals(Arrays.asList("aaa", "bbb"), d.getFieldValues("yak_t")); + // failure when un-comparable SolrException error = null; try { ignoreException(".*Unable to mutate field.*"); - d = processAdd("min-value", - doc(f("id", "1111"), - f("foo_s", "zzz", 42, "bbb"), - f("bar_s", "aaa"), - f("yak_t", "aaa", "bbb"))); + d = + processAdd( + "min-value", + doc( + f("id", "1111"), + f("foo_s", "zzz", 42, "bbb"), + f("bar_s", "aaa"), + f("yak_t", "aaa", "bbb"))); } catch (SolrException e) { error = e; } finally { resetExceptionIgnores(); } assertNotNull("no error on un-comparable values", error); - assertTrue("error doesn't mention field name", - 0 <= error.getMessage().indexOf("foo_s")); + assertTrue("error doesn't mention field name", 0 <= error.getMessage().indexOf("foo_s")); } - public void testHtmlStrip() throws Exception { SolrInputDocument d = null; - d = processAdd("html-strip", - doc(f("id", "1111"), - f("html_s", "hi & bye", "aaa", "bbb"), - f("bar_s", "hi & bye"))); + d = + processAdd( + "html-strip", + doc( + f("id", "1111"), + f("html_s", "hi & bye", "aaa", "bbb"), + f("bar_s", "hi & bye"))); assertNotNull(d); - assertEquals(Arrays.asList("hi & bye", "aaa", "bbb"), - d.getFieldValues("html_s")); + assertEquals(Arrays.asList("hi & bye", "aaa", "bbb"), d.getFieldValues("html_s")); assertEquals("hi & bye", d.getFieldValue("bar_s")); - } public void testTruncate() throws Exception { SolrInputDocument d = null; - d = processAdd("truncate", - doc(f("id", "1111"), - f("trunc", "123456789", "", 42, "abcd"))); + d = processAdd("truncate", doc(f("id", "1111"), f("trunc", "123456789", "", 42, "abcd"))); assertNotNull(d); - assertEquals(Arrays.asList("12345", "", 42, "abcd"), - d.getFieldValues("trunc")); + assertEquals(Arrays.asList("12345", "", 42, "abcd"), d.getFieldValues("trunc")); } public void testIgnore() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); - assertNull("test expects 'foo_giberish' to not be a valid field, looks like schema was changed out from under us", - schema.getFieldTypeNoEx("foo_giberish")); - assertNull("test expects 'bar_giberish' to not be a valid field, looks like schema was changed out from under us", - schema.getFieldTypeNoEx("bar_giberish")); - assertNotNull("test expects 't_raw' to be a valid field, looks like schema was changed out from under us", - schema.getFieldTypeNoEx("t_raw")); - assertNotNull("test expects 'foo_s' to be a valid field, looks like schema was changed out from under us", - schema.getFieldTypeNoEx("foo_s")); - + assertNull( + "test expects 'foo_giberish' to not be a valid field, looks like schema was changed out from under us", + schema.getFieldTypeNoEx("foo_giberish")); + assertNull( + "test expects 'bar_giberish' to not be a valid field, looks like schema was changed out from under us", + schema.getFieldTypeNoEx("bar_giberish")); + assertNotNull( + "test expects 't_raw' to be a valid field, looks like schema was changed out from under us", + schema.getFieldTypeNoEx("t_raw")); + assertNotNull( + "test expects 'foo_s' to be a valid field, looks like schema was changed out from under us", + schema.getFieldTypeNoEx("foo_s")); + SolrInputDocument d = null; - - d = processAdd("ignore-not-in-schema", - doc(f("id", "1111"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); - + + d = + processAdd( + "ignore-not-in-schema", + doc( + f("id", "1111"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); + assertNotNull(d); assertFalse(d.containsKey("bar_giberish")); assertFalse(d.containsKey("foo_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("t_raw")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("t_raw")); assertEquals("hoss", d.getFieldValue("foo_s")); - d = processAdd("ignore-some", - doc(f("id", "1111"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); + d = + processAdd( + "ignore-some", + doc( + f("id", "1111"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); assertNotNull(d); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("foo_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("bar_giberish")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("foo_giberish")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("bar_giberish")); assertFalse(d.containsKey("t_raw")); assertEquals("hoss", d.getFieldValue("foo_s")); - d = processAdd("ignore-not-in-schema-explicit-selector", - doc(f("id", "1111"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); + d = + processAdd( + "ignore-not-in-schema-explicit-selector", + doc( + f("id", "1111"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); assertNotNull(d); assertFalse(d.containsKey("foo_giberish")); assertFalse(d.containsKey("bar_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("t_raw")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("t_raw")); assertEquals("hoss", d.getFieldValue("foo_s")); - d = processAdd("ignore-not-in-schema-and-foo-name-prefix", - doc(f("id", "1111"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); + d = + processAdd( + "ignore-not-in-schema-and-foo-name-prefix", + doc( + f("id", "1111"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); assertNotNull(d); assertFalse(d.containsKey("foo_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("bar_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("t_raw")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("bar_giberish")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("t_raw")); assertEquals("hoss", d.getFieldValue("foo_s")); - d = processAdd("ignore-foo-name-prefix-except-not-schema", - doc(f("id", "1111"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); - assertNotNull(d); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("foo_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("bar_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("t_raw")); + d = + processAdd( + "ignore-foo-name-prefix-except-not-schema", + doc( + f("id", "1111"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); + assertNotNull(d); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("foo_giberish")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("bar_giberish")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("t_raw")); assertFalse(d.containsKey("foo_s")); - d = processAdd("ignore-in-schema", - doc(f("id", "1111"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); + d = + processAdd( + "ignore-in-schema", + doc( + f("id", "1111"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); assertNotNull(d); assertTrue(d.containsKey("foo_giberish")); assertTrue(d.containsKey("bar_giberish")); @@ -697,137 +724,130 @@ public void testIgnore() throws Exception { assertFalse(d.containsKey("t_raw")); assertFalse(d.containsKey("foo_s")); - d = processAdd("ignore-not-in-schema-explicit-str-selector", - doc(f("id", "1111"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); + d = + processAdd( + "ignore-not-in-schema-explicit-str-selector", + doc( + f("id", "1111"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); assertNotNull(d); assertFalse(d.containsKey("foo_giberish")); assertFalse(d.containsKey("bar_giberish")); - assertEquals(Arrays.asList("123456789", "", 42, "abcd"), - d.getFieldValues("t_raw")); + assertEquals(Arrays.asList("123456789", "", 42, "abcd"), d.getFieldValues("t_raw")); assertEquals("hoss", d.getFieldValue("foo_s")); - d = processAdd("ignore-in-schema-str-selector", - doc(f("id", "1111"), - f("foo_giberish", "123456789", "", 42, "abcd"), - f("bar_giberish", "123456789", "", 42, "abcd"), - f("t_raw", "123456789", "", 42, "abcd"), - f("foo_s", "hoss"))); + d = + processAdd( + "ignore-in-schema-str-selector", + doc( + f("id", "1111"), + f("foo_giberish", "123456789", "", 42, "abcd"), + f("bar_giberish", "123456789", "", 42, "abcd"), + f("t_raw", "123456789", "", 42, "abcd"), + f("foo_s", "hoss"))); assertNotNull(d); assertTrue(d.containsKey("foo_giberish")); assertTrue(d.containsKey("bar_giberish")); assertFalse(d.containsKey("id")); assertFalse(d.containsKey("t_raw")); assertFalse(d.containsKey("foo_s")); - } - public void testCountValues() throws Exception { SolrInputDocument d = null; - // trivial - d = processAdd("count", - doc(f("id", "1111"), - f("count_field", "aaa", "bbb", "ccc"))); + // trivial + d = processAdd("count", doc(f("id", "1111"), f("count_field", "aaa", "bbb", "ccc"))); assertNotNull(d); assertEquals(3, d.getFieldValue("count_field")); - // edge case: no values to count, means no count + // edge case: no values to count, means no count // (use default if you want one) - d = processAdd("count", - doc(f("id", "1111"))); + d = processAdd("count", doc(f("id", "1111"))); assertNotNull(d); assertFalse(d.containsKey("count_field")); - // typical usecase: clone and count - d = processAdd("clone-then-count", - doc(f("id", "1111"), - f("category", "scifi", "war", "space"), - f("editors", "John W. Campbell"), - f("list_price", 1000))); - assertNotNull(d); - assertEquals(Arrays.asList("scifi", "war", "space"), - d.getFieldValues("category")); - assertEquals(3, - d.getFieldValue("category_count")); - assertEquals(Arrays.asList("John W. Campbell"), - d.getFieldValues("editors")); - assertEquals(1000,d.getFieldValue("list_price")); + // typical usecase: clone and count + d = + processAdd( + "clone-then-count", + doc( + f("id", "1111"), + f("category", "scifi", "war", "space"), + f("editors", "John W. Campbell"), + f("list_price", 1000))); + assertNotNull(d); + assertEquals(Arrays.asList("scifi", "war", "space"), d.getFieldValues("category")); + assertEquals(3, d.getFieldValue("category_count")); + assertEquals(Arrays.asList("John W. Campbell"), d.getFieldValues("editors")); + assertEquals(1000, d.getFieldValue("list_price")); // typical usecase: clone and count demonstrating default - d = processAdd("clone-then-count", - doc(f("id", "1111"), - f("editors", "Anonymous"), - f("list_price", 1000))); - assertNotNull(d); - assertEquals(0, - d.getFieldValue("category_count")); - assertEquals(Arrays.asList("Anonymous"), - d.getFieldValues("editors")); - assertEquals(1000,d.getFieldValue("list_price")); - - - - - } + d = + processAdd( + "clone-then-count", + doc(f("id", "1111"), f("editors", "Anonymous"), f("list_price", 1000))); + assertNotNull(d); + assertEquals(0, d.getFieldValue("category_count")); + assertEquals(Arrays.asList("Anonymous"), d.getFieldValues("editors")); + assertEquals(1000, d.getFieldValue("list_price")); + } public void testConcatDefaults() throws Exception { SolrInputDocument d = null; - d = processAdd("concat-defaults", - doc(f("id", "1111", "222"), - f("attr_foo", "string1", "string2"), - f("foo_s1", "string3", "string4"), - f("bar_dt", "string5", "string6"), - f("bar_HOSS_s", "string7", "string8"), - f("foo_d", 42))); + d = + processAdd( + "concat-defaults", + doc( + f("id", "1111", "222"), + f("attr_foo", "string1", "string2"), + f("foo_s1", "string3", "string4"), + f("bar_dt", "string5", "string6"), + f("bar_HOSS_s", "string7", "string8"), + f("foo_d", 42))); assertNotNull(d); assertEquals("1111, 222", d.getFieldValue("id")); - assertEquals(Arrays.asList("string1","string2"), - d.getFieldValues("attr_foo")); + assertEquals(Arrays.asList("string1", "string2"), d.getFieldValues("attr_foo")); assertEquals("string3, string4", d.getFieldValue("foo_s1")); - assertEquals(Arrays.asList("string5","string6"), - d.getFieldValues("bar_dt")); - assertEquals(Arrays.asList("string7","string8"), - d.getFieldValues("bar_HOSS_s")); - assertEquals("processor borked non string value", - 42, d.getFieldValue("foo_d")); - + assertEquals(Arrays.asList("string5", "string6"), d.getFieldValues("bar_dt")); + assertEquals(Arrays.asList("string7", "string8"), d.getFieldValues("bar_HOSS_s")); + assertEquals("processor borked non string value", 42, d.getFieldValue("foo_d")); } public void testConcatExplicit() throws Exception { doSimpleDelimTest("concat-field", ", "); } + public void testConcatExplicitWithDelim() throws Exception { doSimpleDelimTest("concat-type-delim", "; "); } - private void doSimpleDelimTest(final String chain, final String delim) - throws Exception { + + private void doSimpleDelimTest(final String chain, final String delim) throws Exception { SolrInputDocument d = null; - d = processAdd(chain, - doc(f("id", "1111"), - f("foo_t", "string1", "string2"), - f("foo_d", 42), - field("foo_s", "string3", "string4"))); + d = + processAdd( + chain, + doc( + f("id", "1111"), + f("foo_t", "string1", "string2"), + f("foo_d", 42), + field("foo_s", "string3", "string4"))); assertNotNull(d); - assertEquals(Arrays.asList("string1","string2"), - d.getFieldValues("foo_t")); + assertEquals(Arrays.asList("string1", "string2"), d.getFieldValues("foo_t")); assertEquals("string3" + delim + "string4", d.getFieldValue("foo_s")); // slightly more interesting - assertEquals("processor borked non string value", - 42, d.getFieldValue("foo_d")); + assertEquals("processor borked non string value", 42, d.getFieldValue("foo_d")); } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactoryTest.java index 4b244886730..dfed0d8e089 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/IgnoreCommitOptimizeUpdateProcessorFactoryTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.update.processor; +import java.io.IOException; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.core.SolrCore; @@ -26,8 +27,6 @@ import org.apache.solr.update.CommitUpdateCommand; import org.junit.BeforeClass; -import java.io.IOException; - public class IgnoreCommitOptimizeUpdateProcessorFactoryTest extends SolrTestCaseJ4 { @BeforeClass @@ -38,25 +37,36 @@ public static void beforeClass() throws Exception { public void testIgnoreCommit() throws Exception { // verify that the processor returns an error if it receives a commit SolrQueryResponse rsp = processCommit("ignore-commit-from-client-403", false); - assertNotNull("Sending a commit should have resulted in an exception in the response", rsp.getException()); + assertNotNull( + "Sending a commit should have resulted in an exception in the response", + rsp.getException()); rsp = processCommit("ignore-commit-from-client-200", false); Exception shouldBeNull = rsp.getException(); - assertNull("Sending a commit should NOT have resulted in an exception in the response: "+shouldBeNull, shouldBeNull); + assertNull( + "Sending a commit should NOT have resulted in an exception in the response: " + + shouldBeNull, + shouldBeNull); rsp = processCommit("ignore-optimize-only-from-client-403", true); - assertNotNull("Sending an optimize should have resulted in an exception in the response", rsp.getException()); + assertNotNull( + "Sending an optimize should have resulted in an exception in the response", + rsp.getException()); // commit should happen if DistributedUpdateProcessor.COMMIT_END_POINT == true rsp = processCommit("ignore-commit-from-client-403", false, Boolean.TRUE); shouldBeNull = rsp.getException(); - assertNull("Sending a commit should NOT have resulted in an exception in the response: "+shouldBeNull, shouldBeNull); + assertNull( + "Sending a commit should NOT have resulted in an exception in the response: " + + shouldBeNull, + shouldBeNull); } SolrQueryResponse processCommit(final String chain, boolean optimize) throws IOException { return processCommit(chain, optimize, null); } - SolrQueryResponse processCommit(final String chain, boolean optimize, Boolean commitEndPoint) throws IOException { + SolrQueryResponse processCommit(final String chain, boolean optimize, Boolean commitEndPoint) + throws IOException { SolrCore core = h.getCore(); UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain); assertNotNull("No Chain named: " + chain, pc); @@ -65,12 +75,12 @@ SolrQueryResponse processCommit(final String chain, boolean optimize, Boolean co SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams()); if (commitEndPoint != null) { - ((ModifiableSolrParams)req.getParams()).set( - DistributedUpdateProcessor.COMMIT_END_POINT, commitEndPoint.booleanValue()); + ((ModifiableSolrParams) req.getParams()) + .set(DistributedUpdateProcessor.COMMIT_END_POINT, commitEndPoint.booleanValue()); } try { - SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req,rsp)); + SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); CommitUpdateCommand cmd = new CommitUpdateCommand(req, false); cmd.optimize = optimize; UpdateRequestProcessor processor = pc.createProcessor(req, rsp); diff --git a/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java index f0845352481..c84da2a84e3 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/IgnoreLargeDocumentProcessorFactoryTest.java @@ -17,6 +17,8 @@ package org.apache.solr.update.processor; +import static org.apache.solr.update.processor.IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate; + import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; @@ -24,7 +26,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import org.apache.solr.SolrTestCase; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -32,8 +33,6 @@ import org.apache.solr.update.AddUpdateCommand; import org.junit.Test; -import static org.apache.solr.update.processor.IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate; - public class IgnoreLargeDocumentProcessorFactoryTest extends SolrTestCase { @Test @@ -53,7 +52,6 @@ public void testProcessor() throws IOException { factory.init(args); UpdateRequestProcessor requestProcessor = factory.getInstance(null, null, null); requestProcessor.processAdd(getUpdate(1024)); - } public AddUpdateCommand getUpdate(int size) { @@ -87,14 +85,17 @@ public void testEstimateObjectSize() { for (Map.Entry entry : map.entrySet()) { document.addField(entry.getKey(), entry.getValue()); } - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(map)); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(map)); SolrInputDocument childDocument = new SolrInputDocument(); for (Map.Entry entry : map.entrySet()) { childDocument.addField(entry.getKey(), entry.getValue()); } document.addChildDocument(childDocument); - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(map) * 2); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), + estimate(map) * 2); } @Test @@ -116,12 +117,13 @@ public void testEstimateObjectSizeWithSingleChild() { assertEquals(estimate(mapWChild), 50); Map childMap = new HashMap<>(mapWChild); - SolrInputDocument document = new SolrInputDocument(); for (Map.Entry entry : mapWChild.entrySet()) { document.addField(entry.getKey(), entry.getValue()); } - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(mapWChild)); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), + estimate(mapWChild)); SolrInputDocument childDocument = new SolrInputDocument(); for (Map.Entry entry : mapWChild.entrySet()) { @@ -129,8 +131,12 @@ public void testEstimateObjectSizeWithSingleChild() { } document.addField(childDocKey, childDocument); mapWChild.put(childDocKey, childMap); - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(childMap) * 2 + estimate(childDocKey)); - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(mapWChild)); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), + estimate(childMap) * 2 + estimate(childDocKey)); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), + estimate(mapWChild)); } @Test @@ -152,26 +158,32 @@ public void testEstimateObjectSizeWithChildList() { assertEquals(estimate(mapWChild), 50); Map childMap = new HashMap<>(mapWChild); - SolrInputDocument document = new SolrInputDocument(); for (Map.Entry entry : mapWChild.entrySet()) { document.addField(entry.getKey(), entry.getValue()); } - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(mapWChild)); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), + estimate(mapWChild)); SolrInputDocument childDocument = new SolrInputDocument(); for (Map.Entry entry : mapWChild.entrySet()) { childDocument.addField(entry.getKey(), entry.getValue()); } - List childList = new ArrayList(){ - { - add(childDocument); - add(new SolrInputDocument(childDocument)); - } - }; + List childList = + new ArrayList() { + { + add(childDocument); + add(new SolrInputDocument(childDocument)); + } + }; document.addField(childDocKey, childList); mapWChild.put(childDocKey, childList); - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(mapWChild)); - assertEquals(IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), estimate(childMap) * (childList.size() + 1) + estimate(childDocKey)); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), + estimate(mapWChild)); + assertEquals( + IgnoreLargeDocumentProcessorFactory.ObjectSizeEstimator.estimate(document), + estimate(childMap) * (childList.size() + 1) + estimate(childDocKey)); } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java b/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java index 5f9889e04e5..8d4ae2cf362 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java @@ -24,5 +24,4 @@ public class JavaBinAtomicUpdateMultivalueTest extends AbstractAtomicUpdatesMult RequestWriterSupplier getRequestWriterSupplier() { return RequestWriterSupplier.JavaBin; } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java b/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java index 65d6bde3da6..3f43a080cb0 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/NestedAtomicUpdateTest.java @@ -17,6 +17,9 @@ package org.apache.solr.update.processor; +import static org.apache.solr.handler.component.RealTimeGetComponent.Resolution.DOC; +import static org.apache.solr.handler.component.RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN; + import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -26,7 +29,6 @@ import java.util.Map; import java.util.stream.Collectors; import java.util.stream.IntStream; - import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -38,12 +40,9 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.handler.component.RealTimeGetComponent.Resolution.DOC; -import static org.apache.solr.handler.component.RealTimeGetComponent.Resolution.ROOT_WITH_CHILDREN; - public class NestedAtomicUpdateTest extends SolrTestCaseJ4 { - private final static String VERSION = "_version_"; + private static final String VERSION = "_version_"; @BeforeClass public static void beforeTests() throws Exception { @@ -59,22 +58,40 @@ public void before() { @Test public void testMergeChildDoc() throws Exception { SolrInputDocument newChildDoc = sdoc("id", "3", "cat_ss", "child"); - SolrInputDocument addedDoc = sdoc("id", "1", - "cat_ss", Collections.singletonMap("add", "bbb"), - "child", Collections.singletonMap("add", sdocs(newChildDoc))); - - SolrInputDocument dummyBlock = sdoc("id", "1", - "cat_ss", new ArrayList<>(Arrays.asList("aaa", "ccc")), - "_root_", "1", "child", new ArrayList<>(sdocs(addedDoc))); + SolrInputDocument addedDoc = + sdoc( + "id", + "1", + "cat_ss", + Collections.singletonMap("add", "bbb"), + "child", + Collections.singletonMap("add", sdocs(newChildDoc))); + + SolrInputDocument dummyBlock = + sdoc( + "id", + "1", + "cat_ss", + new ArrayList<>(Arrays.asList("aaa", "ccc")), + "_root_", + "1", + "child", + new ArrayList<>(sdocs(addedDoc))); dummyBlock.removeField(VERSION); SolrInputDocument preMergeDoc = new SolrInputDocument(dummyBlock); AtomicUpdateDocumentMerger docMerger = new AtomicUpdateDocumentMerger(req()); docMerger.merge(addedDoc, dummyBlock); - assertEquals("merged document should have the same id", preMergeDoc.getFieldValue("id"), dummyBlock.getFieldValue("id")); + assertEquals( + "merged document should have the same id", + preMergeDoc.getFieldValue("id"), + dummyBlock.getFieldValue("id")); assertDocContainsSubset(preMergeDoc, dummyBlock); assertDocContainsSubset(addedDoc, dummyBlock); - final List children = dummyBlock.getFieldValues("child").stream().map(SolrInputDocument.class::cast).collect(Collectors.toList()); + final List children = + dummyBlock.getFieldValues("child").stream() + .map(SolrInputDocument.class::cast) + .collect(Collectors.toList()); assertDocContainsSubset(newChildDoc, children.get(1)); assertEquals(dummyBlock.getFieldValue("id"), dummyBlock.getFieldValue("id")); } @@ -82,90 +99,158 @@ public void testMergeChildDoc() throws Exception { @Test public void testMergeChildDocsWithSameId() throws Exception { SolrInputDocument existingChild = sdoc("id", "2", "cat_ss", "child"); - SolrInputDocument existingDoc = sdoc("id", "1", - "cat_ss", new ArrayList<>(Arrays.asList("aaa", "ccc")), - "_root_", "1", "child", new ArrayList<>(sdocs(existingChild))); + SolrInputDocument existingDoc = + sdoc( + "id", + "1", + "cat_ss", + new ArrayList<>(Arrays.asList("aaa", "ccc")), + "_root_", + "1", + "child", + new ArrayList<>(sdocs(existingChild))); SolrInputDocument updatedChildDoc = sdoc("id", "2", "cat_ss", "updated child"); - SolrInputDocument updateDoc = sdoc("id", "1", - "cat_ss", Collections.singletonMap("add", "bbb"), // add value to collection on parent - "child", Collections.singletonMap("add", sdocs(updatedChildDoc))); // child with same id and updated "cat_ss" field - + SolrInputDocument updateDoc = + sdoc( + "id", + "1", + "cat_ss", + Collections.singletonMap("add", "bbb"), // add value to collection on parent + "child", + Collections.singletonMap( + "add", sdocs(updatedChildDoc))); // child with same id and updated "cat_ss" field SolrInputDocument preMergeDoc = new SolrInputDocument(existingDoc); AtomicUpdateDocumentMerger docMerger = new AtomicUpdateDocumentMerger(req()); docMerger.merge(updateDoc, existingDoc); - assertEquals("merged document should have the same id", preMergeDoc.getFieldValue("id"), existingDoc.getFieldValue("id")); + assertEquals( + "merged document should have the same id", + preMergeDoc.getFieldValue("id"), + existingDoc.getFieldValue("id")); assertDocContainsSubset(preMergeDoc, existingDoc); assertDocContainsSubset(updateDoc, existingDoc); assertEquals(1, existingDoc.getFieldValues("child").size()); - assertDocContainsSubset(updatedChildDoc, ((SolrInputDocument) existingDoc.getFieldValues("child").toArray()[0])); + assertDocContainsSubset( + updatedChildDoc, ((SolrInputDocument) existingDoc.getFieldValues("child").toArray()[0])); } @Test public void testMergeChildDocsWithSameAndNestedSet() throws Exception { SolrInputDocument existingChild = sdoc("id", "2", "cat_ss", "child"); - SolrInputDocument existingDoc = sdoc("id", "1", - "cat_ss", new ArrayList<>(Arrays.asList("aaa", "ccc")), - "_root_", "1", "child", new ArrayList<>(sdocs(existingChild))); - - - SolrInputDocument updatedChildDoc = sdoc("id", "2", "cat_ss", Collections.singletonMap("set", "updated child")); - SolrInputDocument updateDoc = sdoc("id", "1", - "cat_ss", Collections.singletonMap("add", "bbb"), // add value to collection on parent - "child", Collections.singletonMap("add", sdocs(updatedChildDoc))); // child with same id and nested set on "cat_ss" field - + SolrInputDocument existingDoc = + sdoc( + "id", + "1", + "cat_ss", + new ArrayList<>(Arrays.asList("aaa", "ccc")), + "_root_", + "1", + "child", + new ArrayList<>(sdocs(existingChild))); + + SolrInputDocument updatedChildDoc = + sdoc("id", "2", "cat_ss", Collections.singletonMap("set", "updated child")); + SolrInputDocument updateDoc = + sdoc( + "id", + "1", + "cat_ss", + Collections.singletonMap("add", "bbb"), // add value to collection on parent + "child", + Collections.singletonMap( + "add", + sdocs(updatedChildDoc))); // child with same id and nested set on "cat_ss" field SolrInputDocument preMergeDoc = new SolrInputDocument(existingDoc); AtomicUpdateDocumentMerger docMerger = new AtomicUpdateDocumentMerger(req()); docMerger.merge(updateDoc, existingDoc); - assertEquals("merged document should have the same id", preMergeDoc.getFieldValue("id"), existingDoc.getFieldValue("id")); + assertEquals( + "merged document should have the same id", + preMergeDoc.getFieldValue("id"), + existingDoc.getFieldValue("id")); assertDocContainsSubset(preMergeDoc, existingDoc); assertDocContainsSubset(updateDoc, existingDoc); assertEquals(1, existingDoc.getFieldValues("child").size()); - assertDocContainsSubset(sdoc("id", "2", "cat_ss", "updated child"), ((SolrInputDocument) existingDoc.getFieldValues("child").toArray()[0])); + assertDocContainsSubset( + sdoc("id", "2", "cat_ss", "updated child"), + ((SolrInputDocument) existingDoc.getFieldValues("child").toArray()[0])); } @Test public void testMergeChildDocsWithMultipleChildDocs() throws Exception { SolrInputDocument existingChild = sdoc("id", "2", "cat_ss", "child"); SolrInputDocument nonMatchingExistingChild = sdoc("id", "3", "cat_ss", "other"); - SolrInputDocument existingDoc = sdoc("id", "1", - "cat_ss", new ArrayList<>(Arrays.asList("aaa", "ccc")), - "_root_", "1", "child", new ArrayList<>(sdocs(existingChild, nonMatchingExistingChild))); + SolrInputDocument existingDoc = + sdoc( + "id", + "1", + "cat_ss", + new ArrayList<>(Arrays.asList("aaa", "ccc")), + "_root_", + "1", + "child", + new ArrayList<>(sdocs(existingChild, nonMatchingExistingChild))); SolrInputDocument updatedChildDoc = sdoc("id", "2", "cat_ss", "updated child"); - SolrInputDocument updateDoc = sdoc("id", "1", - "cat_ss", Collections.singletonMap("add", "bbb"), // add value to collection on parent - "child", Collections.singletonMap("add", sdocs(updatedChildDoc))); // child with same id and updated "cat_ss" field - + SolrInputDocument updateDoc = + sdoc( + "id", + "1", + "cat_ss", + Collections.singletonMap("add", "bbb"), // add value to collection on parent + "child", + Collections.singletonMap( + "add", sdocs(updatedChildDoc))); // child with same id and updated "cat_ss" field SolrInputDocument preMergeDoc = new SolrInputDocument(existingDoc); AtomicUpdateDocumentMerger docMerger = new AtomicUpdateDocumentMerger(req()); docMerger.merge(updateDoc, existingDoc); - assertEquals("merged document should have the same id", preMergeDoc.getFieldValue("id"), existingDoc.getFieldValue("id")); + assertEquals( + "merged document should have the same id", + preMergeDoc.getFieldValue("id"), + existingDoc.getFieldValue("id")); assertDocContainsSubset(preMergeDoc, existingDoc); assertDocContainsSubset(updateDoc, existingDoc); assertEquals(2, existingDoc.getFieldValues("child").size()); - assertDocContainsSubset(updatedChildDoc, ((SolrInputDocument) existingDoc.getFieldValues("child").toArray()[0])); + assertDocContainsSubset( + updatedChildDoc, ((SolrInputDocument) existingDoc.getFieldValues("child").toArray()[0])); } @Test public void testAtomicUpdateNonExistingChildException() throws Exception { SolrInputDocument existingChild = sdoc("id", "2", "cat_ss", "child"); - SolrInputDocument existingDoc = sdoc("id", "1", - "cat_ss", new ArrayList<>(Arrays.asList("aaa", "ccc")), - "_root_", "1", "child_ss", new ArrayList<>(sdocs(existingChild))); - - SolrInputDocument updateDoc = sdoc("id", "1", - "child_ss", Collections.singletonMap("add", sdoc("id", "3", "cat_ss", Map.of("set", "child2")))); // an atomic update + SolrInputDocument existingDoc = + sdoc( + "id", + "1", + "cat_ss", + new ArrayList<>(Arrays.asList("aaa", "ccc")), + "_root_", + "1", + "child_ss", + new ArrayList<>(sdocs(existingChild))); + + SolrInputDocument updateDoc = + sdoc( + "id", + "1", + "child_ss", + Collections.singletonMap( + "add", sdoc("id", "3", "cat_ss", Map.of("set", "child2")))); // an atomic update AtomicUpdateDocumentMerger docMerger = new AtomicUpdateDocumentMerger(req()); - SolrException expected = expectThrows(SolrException.class, () -> { - docMerger.merge(updateDoc, existingDoc); - }); - assertTrue(expected.getMessage().equals("A nested atomic update can only update an existing nested document")); + SolrException expected = + expectThrows( + SolrException.class, + () -> { + docMerger.merge(updateDoc, existingDoc); + }); + assertTrue( + expected + .getMessage() + .equals("A nested atomic update can only update an existing nested document")); } @Test @@ -175,75 +260,101 @@ public void testBlockAtomicInplaceUpdates() throws Exception { assertU(commit()); - assertQ(req("q", "id:1", "fl", "*"), - "//*[@numFound='1']", - "//doc[1]/str[@name='id']=1" - ); + assertQ(req("q", "id:1", "fl", "*"), "//*[@numFound='1']", "//doc[1]/str[@name='id']=1"); - List docs = IntStream.range(10, 20).mapToObj(x -> sdoc("id", String.valueOf(x), "string_s", - "child", "inplace_updatable_int", "0")).collect(Collectors.toList()); + List docs = + IntStream.range(10, 20) + .mapToObj( + x -> + sdoc( + "id", String.valueOf(x), "string_s", "child", "inplace_updatable_int", "0")) + .collect(Collectors.toList()); doc = sdoc("id", "1", "_root_", "1", "children", Collections.singletonMap("add", docs)); addAndGetVersion(doc, null); assertU(commit()); + assertQ(req("q", "_root_:1", "fl", "*", "rows", "11"), "//*[@numFound='11']"); - assertQ(req("q", "_root_:1", "fl", "*", "rows", "11"), - "//*[@numFound='11']" - ); - - assertQ(req("q", "string_s:child", "fl", "*"), + assertQ( + req("q", "string_s:child", "fl", "*"), "//*[@numFound='10']", - "*[count(//str[@name='string_s'][.='child'])=10]" - ); + "*[count(//str[@name='string_s'][.='child'])=10]"); - for(int i = 10; i < 20; ++i) { - doc = sdoc("id", String.valueOf(i), "_root_", "1", "inplace_updatable_int", Collections.singletonMap("inc", "1")); + for (int i = 10; i < 20; ++i) { + doc = + sdoc( + "id", + String.valueOf(i), + "_root_", + "1", + "inplace_updatable_int", + Collections.singletonMap("inc", "1")); addAndGetVersion(doc, null); assertU(commit()); } - for(int i = 10; i < 20; ++i) { - doc = sdoc("id", String.valueOf(i), "_root_", "1", "inplace_updatable_int", Collections.singletonMap("inc", "1")); + for (int i = 10; i < 20; ++i) { + doc = + sdoc( + "id", + String.valueOf(i), + "_root_", + "1", + "inplace_updatable_int", + Collections.singletonMap("inc", "1")); addAndGetVersion(doc, null); assertU(commit()); } // ensure updates work when block has more than 10 children - for(int i = 10; i < 20; ++i) { - docs = IntStream.range(i * 10, (i * 10) + 5).mapToObj(x -> sdoc("id", String.valueOf(x), "string_s", "grandChild")).collect(Collectors.toList()); - doc = sdoc("id", String.valueOf(i), "_root_", "1", "grandChildren", Collections.singletonMap("add", docs)); + for (int i = 10; i < 20; ++i) { + docs = + IntStream.range(i * 10, (i * 10) + 5) + .mapToObj(x -> sdoc("id", String.valueOf(x), "string_s", "grandChild")) + .collect(Collectors.toList()); + doc = + sdoc( + "id", + String.valueOf(i), + "_root_", + "1", + "grandChildren", + Collections.singletonMap("add", docs)); addAndGetVersion(doc, null); assertU(commit()); } - for(int i =10; i < 20; ++i) { - doc = sdoc("id", String.valueOf(i), "_root_", "1", "inplace_updatable_int", Collections.singletonMap("inc", "1")); + for (int i = 10; i < 20; ++i) { + doc = + sdoc( + "id", + String.valueOf(i), + "_root_", + "1", + "inplace_updatable_int", + Collections.singletonMap("inc", "1")); addAndGetVersion(doc, null); assertU(commit()); } - assertQ(req("q", "-_root_:*", "fl", "*"), - "//*[@numFound='0']" - ); + assertQ(req("q", "-_root_:*", "fl", "*"), "//*[@numFound='0']"); - assertQ(req("q", "string_s:grandChild", "fl", "*", "rows", "50"), - "//*[@numFound='50']" - ); + assertQ(req("q", "string_s:grandChild", "fl", "*", "rows", "50"), "//*[@numFound='50']"); - assertQ(req("q", "string_s:child", "fl", "*"), + assertQ( + req("q", "string_s:child", "fl", "*"), "//*[@numFound='10']", "*[count(//str[@name='string_s'][.='child'])=10]"); - assertJQ(req("q", "id:1", "fl", "*,[child limit=-1]"), + assertJQ( + req("q", "id:1", "fl", "*,[child limit=-1]"), "/response/docs/[0]/id=='1'", "/response/docs/[0]/children/[0]/id=='10'", "/response/docs/[0]/children/[0]/inplace_updatable_int==3", "/response/docs/[0]/children/[0]/grandChildren/[0]/id=='100'", "/response/docs/[0]/children/[0]/grandChildren/[4]/id=='104'", - "/response/docs/[0]/children/[9]/id=='19'" - ); - + "/response/docs/[0]/children/[9]/id=='19'"); } @Test @@ -253,30 +364,30 @@ public void testBlockAtomicQuantities() throws Exception { assertU(commit()); - assertQ(req("q", "id:1", "fl", "*"), - "//*[@numFound='1']", - "//doc[1]/str[@name='id']=1" - ); + assertQ(req("q", "id:1", "fl", "*"), "//*[@numFound='1']", "//doc[1]/str[@name='id']=1"); - List docs = IntStream.range(10, 20).mapToObj(x -> sdoc("id", String.valueOf(x), "string_s", "child")).collect(Collectors.toList()); + List docs = + IntStream.range(10, 20) + .mapToObj(x -> sdoc("id", String.valueOf(x), "string_s", "child")) + .collect(Collectors.toList()); doc = sdoc("id", "1", "children", Collections.singletonMap("add", docs)); addAndGetVersion(doc, null); assertU(commit()); + assertQ(req("q", "_root_:1", "fl", "*", "rows", "11"), "//*[@numFound='11']"); - assertQ(req("q", "_root_:1", "fl", "*", "rows", "11"), - "//*[@numFound='11']" - ); - - assertQ(req("q", "string_s:child", "fl", "*"), + assertQ( + req("q", "string_s:child", "fl", "*"), "//*[@numFound='10']", - "*[count(//str[@name='string_s'][.='child'])=10]" - ); + "*[count(//str[@name='string_s'][.='child'])=10]"); // ensure updates work when block has more than 10 children - for(int i = 10; i < 20; ++i) { - docs = IntStream.range(i * 10, (i * 10) + 5).mapToObj(x -> sdoc("id", String.valueOf(x), "string_s", "grandChild")).collect(Collectors.toList()); + for (int i = 10; i < 20; ++i) { + docs = + IntStream.range(i * 10, (i * 10) + 5) + .mapToObj(x -> sdoc("id", String.valueOf(x), "string_s", "grandChild")) + .collect(Collectors.toList()); doc = sdoc( "id", @@ -289,11 +400,13 @@ public void testBlockAtomicQuantities() throws Exception { assertU(commit()); } - assertQ(req("q", "string_s:grandChild", "fl", "*", "rows", "50"), + assertQ( + req("q", "string_s:grandChild", "fl", "*", "rows", "50"), "//*[@numFound='50']", "*[count(//str[@name='string_s'][.='grandChild'])=50]"); - assertQ(req("q", "string_s:child", "fl", "*"), + assertQ( + req("q", "string_s:child", "fl", "*"), "//*[@numFound='10']", "*[count(//str[@name='string_s'][.='child'])=10]"); } @@ -305,32 +418,48 @@ public void testBlockAtomicStack() throws Exception { assertU(commit()); - assertJQ(req("q","id:1", "fl", "*, [child]"), + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/child1/[0]/id=='2'", - "/response/docs/[0]/child1/[0]/child_s=='child'" - ); - - doc = sdoc("id", "1", "child1", Collections.singletonMap("add", sdocs(sdoc("id", "3", "child_s", "child")))); + "/response/docs/[0]/child1/[0]/child_s=='child'"); + + doc = + sdoc( + "id", + "1", + "child1", + Collections.singletonMap("add", sdocs(sdoc("id", "3", "child_s", "child")))); addAndGetVersion(doc, null); assertU(commit()); - assertJQ(req("q","id:1", "fl", "*, [child]"), + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/child1/[0]/id=='2'", "/response/docs/[0]/child1/[0]/child_s=='child'", "/response/docs/[0]/child1/[1]/id=='3'", - "/response/docs/[0]/child1/[0]/child_s=='child'" - ); - - doc = sdoc("id", "2", "_root_", "1", - "grandChild", Collections.singletonMap("add", sdocs(sdoc("id", "4", "child_s", "grandChild"), sdoc("id", "5", "child_s", "grandChild")))); + "/response/docs/[0]/child1/[0]/child_s=='child'"); + + doc = + sdoc( + "id", + "2", + "_root_", + "1", + "grandChild", + Collections.singletonMap( + "add", + sdocs( + sdoc("id", "4", "child_s", "grandChild"), + sdoc("id", "5", "child_s", "grandChild")))); addAndGetVersion(doc, null); assertU(commit()); - assertJQ(req("q","id:1", "fl", "*, [child]", "sort", "id asc"), + assertJQ( + req("q", "id:1", "fl", "*, [child]", "sort", "id asc"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/child1/[0]/id=='2'", @@ -338,16 +467,20 @@ public void testBlockAtomicStack() throws Exception { "/response/docs/[0]/child1/[1]/id=='3'", "/response/docs/[0]/child1/[1]/child_s=='child'", "/response/docs/[0]/child1/[0]/grandChild/[0]/id=='4'", - "/response/docs/[0]/child1/[0]/grandChild/[0]/child_s=='grandChild'" - ); - - doc = sdoc("id", "1", - "child2", Collections.singletonMap("add", sdocs(sdoc("id", "8", "child_s", "child")))); + "/response/docs/[0]/child1/[0]/grandChild/[0]/child_s=='grandChild'"); + + doc = + sdoc( + "id", + "1", + "child2", + Collections.singletonMap("add", sdocs(sdoc("id", "8", "child_s", "child")))); addAndGetVersion(doc, null); assertU(commit()); - assertJQ(req("q","id:1", "fl", "*, [child]", "sort", "id asc"), //fails + assertJQ( + req("q", "id:1", "fl", "*, [child]", "sort", "id asc"), // fails "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/child1/[0]/id=='2'", @@ -357,17 +490,16 @@ public void testBlockAtomicStack() throws Exception { "/response/docs/[0]/child1/[0]/grandChild/[0]/id=='4'", "/response/docs/[0]/child1/[0]/grandChild/[0]/child_s=='grandChild'", "/response/docs/[0]/child2/[0]/id=='8'", - "/response/docs/[0]/child2/[0]/child_s=='child'" - ); + "/response/docs/[0]/child2/[0]/child_s=='child'"); - doc = sdoc("id", "1", - "new_s", Collections.singletonMap("add", "new string")); + doc = sdoc("id", "1", "new_s", Collections.singletonMap("add", "new string")); addAndGetVersion(doc, null); assertU(commit()); // ensure the whole block has been committed correctly to the index. - assertJQ(req("q","id:1", "fl", "*, [child]"), + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/child1/[0]/id=='2'", @@ -380,137 +512,157 @@ public void testBlockAtomicStack() throws Exception { "/response/docs/[0]/child1/[0]/grandChild/[1]/child_s=='grandChild'", "/response/docs/[0]/new_s=='new string'", "/response/docs/[0]/child2/[0]/id=='8'", - "/response/docs/[0]/child2/[0]/child_s=='child'" - ); - + "/response/docs/[0]/child2/[0]/child_s=='child'"); } @Test public void testBlockAtomicAdd() throws Exception { final SolrInputDocument sdoc2 = sdoc("id", "2", "cat_ss", "child"); - SolrInputDocument doc = sdoc("id", "1", - "cat_ss", new String[] {"aaa", "ccc"}, - "child1", sdoc2 - ); + SolrInputDocument doc = sdoc("id", "1", "cat_ss", new String[] {"aaa", "ccc"}, "child1", sdoc2); assertU(adoc(doc)); BytesRef rootDocId = new BytesRef("1"); SolrCore core = h.getCore(); - SolrInputDocument block = RealTimeGetComponent.getInputDocument(core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); + SolrInputDocument block = + RealTimeGetComponent.getInputDocument( + core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); // assert block doc has child docs assertTrue(block.containsKey("child1")); - assertJQ(req("q","id:1") - ,"/response/numFound==0" - ); + assertJQ(req("q", "id:1"), "/response/numFound==0"); // commit the changes assertU(commit()); BytesRef childDocId = new BytesRef("2"); - assertEquals(sdoc2.toString(), removeSpecialFields( - RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC) - ).toString()); + assertEquals( + sdoc2.toString(), + removeSpecialFields( + RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC)) + .toString()); - assertJQ(req("q","id:1") - ,"/response/numFound==1" - ); + assertJQ(req("q", "id:1"), "/response/numFound==1"); - doc = sdoc("id", "1", - "cat_ss", Collections.singletonMap("add", "bbb"), - "child2", Collections.singletonMap("add", sdoc("id", "3", "cat_ss", "child"))); + doc = + sdoc( + "id", + "1", + "cat_ss", + Collections.singletonMap("add", "bbb"), + "child2", + Collections.singletonMap("add", sdoc("id", "3", "cat_ss", "child"))); addAndGetVersion(doc, null); - - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, child2, [child]") - ,"=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]," + - "child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, child2, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]," + + "child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + + " }}"); assertU(commit()); - // a cut-n-paste of the first big query, but this time it will be retrieved from the index rather than the transaction log - // this requires ChildDocTransformer to get the whole block, since the document is retrieved using an index lookup - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, child2, [child]") - , "=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]," + - "child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); - - doc = sdoc("id", "2", - "_root_", "1", - "child3", Collections.singletonMap("add", sdoc("id", "4", "cat_ss", "grandChild"))); + // a cut-n-paste of the first big query, but this time it will be retrieved from the index + // rather than the transaction log + // this requires ChildDocTransformer to get the whole block, since the document is retrieved + // using an index lookup + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, child2, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]," + + "child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + + " }}"); + + doc = + sdoc( + "id", + "2", + "_root_", + "1", + "child3", + Collections.singletonMap("add", sdoc("id", "4", "cat_ss", "grandChild"))); addAndGetVersion(doc, null); - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, child2, child3, [child]") - ,"=={'doc':{'id':'1'" + - ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"], child3:[{\"id\":\"4\",\"cat_ss\":[\"grandChild\"]}]}]," + - "child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, child2, child3, [child]"), + "=={'doc':{'id':'1'" + + ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"], child3:[{\"id\":\"4\",\"cat_ss\":[\"grandChild\"]}]}]," + + "child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]" + + " }}"); - assertJQ(req("qt","/get", "id","2", "fl","id, cat_ss, child, child3, [child]") - ,"=={'doc':{\"id\":\"2\",\"cat_ss\":[\"child\"], child3:[{\"id\":\"4\",\"cat_ss\":[\"grandChild\"]}]}" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "2", "fl", "id, cat_ss, child, child3, [child]"), + "=={'doc':{\"id\":\"2\",\"cat_ss\":[\"child\"], child3:[{\"id\":\"4\",\"cat_ss\":[\"grandChild\"]}]}" + + " }}"); assertU(commit()); - //add greatGrandChild - doc = sdoc("id", "4", "_root_", "1", - "child4", Collections.singletonMap("add", sdoc("id", "5", "cat_ss", "greatGrandChild"))); + // add greatGrandChild + doc = + sdoc( + "id", + "4", + "_root_", + "1", + "child4", + Collections.singletonMap("add", sdoc("id", "5", "cat_ss", "greatGrandChild"))); addAndGetVersion(doc, null); - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, child2, child3, child4, [child]") - ,"=={'doc':{'id':'1'" + - ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"], child3:[{\"id\":\"4\",\"cat_ss\":[\"grandChild\"]," + - " child4:[{\"id\":\"5\",\"cat_ss\":[\"greatGrandChild\"]}]}]}], child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, child2, child3, child4, [child]"), + "=={'doc':{'id':'1'" + + ", cat_ss:[\"aaa\",\"ccc\",\"bbb\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"], child3:[{\"id\":\"4\",\"cat_ss\":[\"grandChild\"]," + + " child4:[{\"id\":\"5\",\"cat_ss\":[\"greatGrandChild\"]}]}]}], child2:[{\"id\":\"3\", \"cat_ss\": [\"child\"]}]" + + " }}"); - assertJQ(req("qt","/get", "id","4", "fl","id, cat_ss, child4, [child]") - ,"=={'doc':{\"id\":\"4\",\"cat_ss\":[\"grandChild\"], child4:[{\"id\":\"5\",\"cat_ss\":[\"greatGrandChild\"]}]}" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "4", "fl", "id, cat_ss, child4, [child]"), + "=={'doc':{\"id\":\"4\",\"cat_ss\":[\"grandChild\"], child4:[{\"id\":\"5\",\"cat_ss\":[\"greatGrandChild\"]}]}" + + " }}"); assertU(commit()); - //add another greatGrandChild - doc = sdoc("id", "4", "_root_", "1", - "child4", Collections.singletonMap("add", sdoc("id", "6", "cat_ss", "greatGrandChild"))); + // add another greatGrandChild + doc = + sdoc( + "id", + "4", + "_root_", + "1", + "child4", + Collections.singletonMap("add", sdoc("id", "6", "cat_ss", "greatGrandChild"))); addAndGetVersion(doc, null); assertU(commit()); - assertJQ(req("qt","/get", "id","4", "fl","id, cat_ss, child4, [child]") - ,"=={'doc':{\"id\":\"4\",\"cat_ss\":[\"grandChild\"], child4:[{\"id\":\"5\",\"cat_ss\":[\"greatGrandChild\"]}," + - "{\"id\":\"6\", \"cat_ss\":[\"greatGrandChild\"]}]}" + - " }}" - ); - - //add another child field name - doc = sdoc("id", "1", - "child5", Collections.singletonMap("add", sdocs(sdoc("id", "7", "cat_ss", "child"), - sdoc("id", "8", "cat_ss", "child") - )) - ); + assertJQ( + req("qt", "/get", "id", "4", "fl", "id, cat_ss, child4, [child]"), + "=={'doc':{\"id\":\"4\",\"cat_ss\":[\"grandChild\"], child4:[{\"id\":\"5\",\"cat_ss\":[\"greatGrandChild\"]}," + + "{\"id\":\"6\", \"cat_ss\":[\"greatGrandChild\"]}]}" + + " }}"); + + // add another child field name + doc = + sdoc( + "id", + "1", + "child5", + Collections.singletonMap( + "add", + sdocs(sdoc("id", "7", "cat_ss", "child"), sdoc("id", "8", "cat_ss", "child")))); addAndGetVersion(doc, null); assertU(commit()); - doc = sdoc("id", "1", - "new_s", Collections.singletonMap("add", "new string")); + doc = sdoc("id", "1", "new_s", Collections.singletonMap("add", "new string")); addAndGetVersion(doc, null); assertU(commit()); - // ensure the whole block has been committed correctly to the index. - assertJQ(req("q","id:1", "fl", "*, [child]"), + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/cat_ss/[0]==\"aaa\"", @@ -530,96 +682,114 @@ public void testBlockAtomicAdd() throws Exception { "/response/docs/[0]/child5/[0]/cat_ss/[0]=='child'", "/response/docs/[0]/child5/[1]/id=='8'", "/response/docs/[0]/child5/[1]/cat_ss/[0]=='child'", - "/response/docs/[0]/new_s=='new string'" - ); + "/response/docs/[0]/new_s=='new string'"); } @Test public void testBlockAtomicSet() throws Exception { SolrInputDocument sdoc2 = sdoc("id", "2", "cat_ss", "child"); - SolrInputDocument doc = sdoc("id", "1", - "cat_ss", new String[] {"aaa", "ccc"}, - "child1", Collections.singleton(sdoc2) - ); + SolrInputDocument doc = + sdoc( + "id", + "1", + "cat_ss", + new String[] {"aaa", "ccc"}, + "child1", + Collections.singleton(sdoc2)); assertU(adoc(doc)); BytesRef rootDocId = new BytesRef("1"); SolrCore core = h.getCore(); - SolrInputDocument block = RealTimeGetComponent.getInputDocument(core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); + SolrInputDocument block = + RealTimeGetComponent.getInputDocument( + core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); // assert block doc has child docs assertTrue(block.containsKey("child1")); - assertJQ(req("q","id:1") - ,"/response/numFound==0" - ); + assertJQ(req("q", "id:1"), "/response/numFound==0"); // commit the changes assertU(commit()); BytesRef childDocId = new BytesRef("2"); - assertEquals(sdoc2.toString(), removeSpecialFields( - RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC) - ).toString()); + assertEquals( + sdoc2.toString(), + removeSpecialFields( + RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC)) + .toString()); - assertJQ(req("q","id:1") - ,"/response/numFound==1" - ); + assertJQ(req("q", "id:1"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + + " }}"); assertU(commit()); - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); - - doc = sdoc("id", "1", "_root_", "1", - "cat_ss", Collections.singletonMap("set", Arrays.asList("aaa", "bbb")), - "child1", Collections.singletonMap("set", sdoc("id", "3", "cat_ss", "child"))); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + + " }}"); + + doc = + sdoc( + "id", + "1", + "_root_", + "1", + "cat_ss", + Collections.singletonMap("set", Arrays.asList("aaa", "bbb")), + "child1", + Collections.singletonMap("set", sdoc("id", "3", "cat_ss", "child"))); addAndGetVersion(doc, null); - - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"bbb\"], child1:{\"id\":\"3\",\"cat_ss\":[\"child\"]}" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"bbb\"], child1:{\"id\":\"3\",\"cat_ss\":[\"child\"]}" + + " }}"); assertU(commit()); - // a cut-n-paste of the first big query, but this time it will be retrieved from the index rather than the transaction log - // this requires ChildDocTransformer to get the whole block, since the document is retrieved using an index lookup - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={'doc':{'id':'1'" + - ", cat_ss:[\"aaa\",\"bbb\"], child1:{\"id\":\"3\",\"cat_ss\":[\"child\"]}" + - " }}" - ); - - doc = sdoc("id", "3", "_root_", "1", - "child2", Collections.singletonMap("set", sdoc("id", "4", "cat_ss", "child"))); + // a cut-n-paste of the first big query, but this time it will be retrieved from the index + // rather than the transaction log + // this requires ChildDocTransformer to get the whole block, since the document is retrieved + // using an index lookup + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={'doc':{'id':'1'" + + ", cat_ss:[\"aaa\",\"bbb\"], child1:{\"id\":\"3\",\"cat_ss\":[\"child\"]}" + + " }}"); + + doc = + sdoc( + "id", + "3", + "_root_", + "1", + "child2", + Collections.singletonMap("set", sdoc("id", "4", "cat_ss", "child"))); addAndGetVersion(doc, null); - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, child2, [child]") - ,"=={'doc':{'id':'1'" + - ", cat_ss:[\"aaa\",\"bbb\"], child1:{\"id\":\"3\",\"cat_ss\":[\"child\"], child2:{\"id\":\"4\",\"cat_ss\":[\"child\"]}}" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, child2, [child]"), + "=={'doc':{'id':'1'" + + ", cat_ss:[\"aaa\",\"bbb\"], child1:{\"id\":\"3\",\"cat_ss\":[\"child\"], child2:{\"id\":\"4\",\"cat_ss\":[\"child\"]}}" + + " }}"); - assertJQ(req("qt","/get", "id","3", "fl","id, cat_ss, child, child2, [child]") - ,"=={'doc':{\"id\":\"3\",\"cat_ss\":[\"child\"], child2:{\"id\":\"4\",\"cat_ss\":[\"child\"]}}" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "3", "fl", "id, cat_ss, child, child2, [child]"), + "=={'doc':{\"id\":\"3\",\"cat_ss\":[\"child\"], child2:{\"id\":\"4\",\"cat_ss\":[\"child\"]}}" + + " }}"); assertU(commit()); // ensure the whole block has been committed correctly to the index. - assertJQ(req("q","id:1", "fl", "*, [child]"), + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/cat_ss/[0]==\"aaa\"", @@ -627,123 +797,133 @@ public void testBlockAtomicSet() throws Exception { "/response/docs/[0]/child1/id=='3'", "/response/docs/[0]/child1/cat_ss/[0]=='child'", "/response/docs/[0]/child1/child2/id=='4'", - "/response/docs/[0]/child1/child2/cat_ss/[0]=='child'" - ); + "/response/docs/[0]/child1/child2/cat_ss/[0]=='child'"); } @Test public void testAtomicUpdateDeleteNoRootField() throws Exception { - SolrInputDocument doc = sdoc("id", "1", - "cat_ss", new String[]{"aaa", "bbb"}); + SolrInputDocument doc = sdoc("id", "1", "cat_ss", new String[] {"aaa", "bbb"}); assertU(adoc(doc)); - assertJQ(req("q", "id:1") - , "/response/numFound==0" - ); + assertJQ(req("q", "id:1"), "/response/numFound==0"); // commit the changes assertU(commit()); - assertJQ(req("q", "id:1"), + assertJQ( + req("q", "id:1"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/cat_ss/[0]==\"aaa\"", - "/response/docs/[0]/cat_ss/[1]==\"bbb\"" - ); - - doc = sdoc("id", "1", - "child1", Collections.singletonMap("add", sdoc("id", "2", "cat_ss", "child"))); + "/response/docs/[0]/cat_ss/[1]==\"bbb\""); + + doc = + sdoc( + "id", + "1", + "child1", + Collections.singletonMap("add", sdoc("id", "2", "cat_ss", "child"))); addAndGetVersion(doc, null); // commit the changes assertU(commit()); - // assert that doc with id:1 was removed even though it did not have _root_:1 since it was not indexed with child documents. - assertJQ(req("q", "id:1", "fl", "*, [child]"), + // assert that doc with id:1 was removed even though it did not have _root_:1 since it was not + // indexed with child documents. + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/cat_ss/[0]==\"aaa\"", "/response/docs/[0]/cat_ss/[1]==\"bbb\"", "/response/docs/[0]/child1/[0]/id==\"2\"", - "/response/docs/[0]/child1/[0]/cat_ss/[0]==\"child\"" - ); + "/response/docs/[0]/child1/[0]/cat_ss/[0]==\"child\""); } @Test public void testBlockAtomicRemove() throws Exception { SolrInputDocument sdoc2 = sdoc("id", "2", "cat_ss", "child"); - SolrInputDocument doc = sdoc("id", "1", - "cat_ss", new String[] {"aaa", "ccc"}, - "child1", sdocs(sdoc2, sdoc("id", "3", "cat_ss", "child")) - ); + SolrInputDocument doc = + sdoc( + "id", + "1", + "cat_ss", + new String[] {"aaa", "ccc"}, + "child1", + sdocs(sdoc2, sdoc("id", "3", "cat_ss", "child"))); assertU(adoc(doc)); BytesRef rootDocId = new BytesRef("1"); SolrCore core = h.getCore(); - SolrInputDocument block = RealTimeGetComponent.getInputDocument(core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); + SolrInputDocument block = + RealTimeGetComponent.getInputDocument( + core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); // assert block doc has child docs assertTrue(block.containsKey("child1")); - assertJQ(req("q","id:1") - ,"/response/numFound==0" - ); + assertJQ(req("q", "id:1"), "/response/numFound==0"); // commit the changes assertU(commit()); BytesRef childDocId = new BytesRef("2"); - assertEquals(sdoc2.toString(), removeSpecialFields( - RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC) - ).toString()); + assertEquals( + sdoc2.toString(), + removeSpecialFields( + RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC)) + .toString()); - assertJQ(req("q","id:1") - ,"/response/numFound==1" - ); + assertJQ(req("q", "id:1"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]" + + " }}"); assertU(commit()); - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); - - doc = sdoc("id", "1", - "child1", Collections.singletonMap("remove", sdoc("id", "3", "cat_ss", "child"))); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]" + + " }}"); + + doc = + sdoc( + "id", + "1", + "child1", + Collections.singletonMap("remove", sdoc("id", "3", "cat_ss", "child"))); addAndGetVersion(doc, null); - - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={\"doc\":{'id':\"1\"" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\"" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + + " }}"); assertU(commit()); - // a cut-n-paste of the first big query, but this time it will be retrieved from the index rather than the transaction log - // this requires ChildDocTransformer to get the whole block, since the document is retrieved using an index lookup - assertJQ(req("qt","/get", "id","1", "fl","id, cat_ss, child1, [child]") - ,"=={'doc':{'id':'1'" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + - " }}" - ); + // a cut-n-paste of the first big query, but this time it will be retrieved from the index + // rather than the transaction log + // this requires ChildDocTransformer to get the whole block, since the document is retrieved + // using an index lookup + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, cat_ss, child1, [child]"), + "=={'doc':{'id':'1'" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}]" + + " }}"); // ensure the whole block has been committed correctly to the index. - assertJQ(req("q","id:1", "fl", "*, [child]"), + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/cat_ss/[0]==\"aaa\"", "/response/docs/[0]/cat_ss/[1]==\"ccc\"", "/response/docs/[0]/child1/[0]/id=='2'", - "/response/docs/[0]/child1/[0]/cat_ss/[0]=='child'" - ); + "/response/docs/[0]/child1/[0]/cat_ss/[0]=='child'"); } @Test @@ -757,17 +937,27 @@ public void testBlockAtomicSetToEmpty() throws Exception { } private void testBlockAtomicSetToNullOrEmpty(boolean empty) throws Exception { - // latlon field is included to ensure reading from LatLonDocValuesField is working due to atomic update. + // latlon field is included to ensure reading from LatLonDocValuesField is working due to atomic + // update. // See SOLR-13966 for further details. SolrInputDocument sdoc2 = sdoc("id", "2", "cat_ss", "child"); - SolrInputDocument doc = sdoc("id", "1", "latlon", "0,0", - "cat_ss", new String[] {"aaa", "ccc"}, - "child1", sdocs(sdoc2, sdoc("id", "3", "cat_ss", "child"))); + SolrInputDocument doc = + sdoc( + "id", + "1", + "latlon", + "0,0", + "cat_ss", + new String[] {"aaa", "ccc"}, + "child1", + sdocs(sdoc2, sdoc("id", "3", "cat_ss", "child"))); assertU(adoc(doc)); BytesRef rootDocId = new BytesRef("1"); SolrCore core = h.getCore(); - SolrInputDocument block = RealTimeGetComponent.getInputDocument(core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); + SolrInputDocument block = + RealTimeGetComponent.getInputDocument( + core, rootDocId, rootDocId, null, null, ROOT_WITH_CHILDREN); // assert block doc has child docs assertTrue(block.containsKey("child1")); @@ -777,38 +967,49 @@ private void testBlockAtomicSetToNullOrEmpty(boolean empty) throws Exception { assertU(commit()); BytesRef childDocId = new BytesRef("2"); - assertEquals(sdoc2.toString(), removeSpecialFields( - RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC) - ).toString()); + assertEquals( + sdoc2.toString(), + removeSpecialFields( + RealTimeGetComponent.getInputDocument(core, childDocId, rootDocId, null, null, DOC)) + .toString()); assertJQ(req("q", "id:1"), "/response/numFound==1"); - assertJQ(req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), - "=={\"doc\":{'id':\"1\", \"latlon\":\"0,0\"" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]}}"); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\", \"latlon\":\"0,0\"" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]}}"); assertU(commit()); - assertJQ(req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), - "=={\"doc\":{'id':\"1\", \"latlon\":\"0,0\"" + - ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]}}"); + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), + "=={\"doc\":{'id':\"1\", \"latlon\":\"0,0\"" + + ", cat_ss:[\"aaa\",\"ccc\"], child1:[{\"id\":\"2\",\"cat_ss\":[\"child\"]}, {\"id\":\"3\",\"cat_ss\":[\"child\"]}]}}"); - doc = sdoc("id", "1", "child1", Collections.singletonMap("set", empty ? new ArrayList<>() : null)); + doc = + sdoc( + "id", "1", "child1", Collections.singletonMap("set", empty ? new ArrayList<>() : null)); addAndGetVersion(doc, null); - assertJQ(req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), "=={\"doc\":{'id':\"1\", \"latlon\":\"0,0\", cat_ss:[\"aaa\",\"ccc\"]}}"); assertU(commit()); - // a cut-n-paste of the first big query, but this time it will be retrieved from the index rather than the + // a cut-n-paste of the first big query, but this time it will be retrieved from the index + // rather than the // transaction log - // this requires ChildDocTransformer to get the whole block, since the document is retrieved using an index lookup - assertJQ(req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), + // this requires ChildDocTransformer to get the whole block, since the document is retrieved + // using an index lookup + assertJQ( + req("qt", "/get", "id", "1", "fl", "id, latlon, cat_ss, child1, [child]"), "=={\"doc\":{'id':\"1\", \"latlon\":\"0,0\", cat_ss:[\"aaa\",\"ccc\"]}}"); // ensure the whole block has been committed correctly to the index. - assertJQ(req("q", "id:1", "fl", "*, [child]"), + assertJQ( + req("q", "id:1", "fl", "*, [child]"), "/response/numFound==1", "/response/docs/[0]/id=='1'", "/response/docs/[0]/latlon=='0,0'", @@ -817,24 +1018,31 @@ private void testBlockAtomicSetToNullOrEmpty(boolean empty) throws Exception { } public void testIncorrectlyUpdateChildDoc() throws Exception { - SolrInputDocument doc = sdoc("id", "1", - "child", sdoc("id", "2")); + SolrInputDocument doc = sdoc("id", "1", "child", sdoc("id", "2")); assertU(adoc(doc)); assertU(commit()); // did not add _root_ like we should have - SolrException e = expectThrows(SolrException.class, () -> { - addAndGetVersion( - sdoc("id", "2", "grandchild", Collections.singletonMap("set", sdoc("id", "3"))), null); - }); - assertTrue(e.toString(), e.getMessage().contains("Attempted an atomic/partial update to a " + - "child doc without indicating the _root_ somehow.")); + SolrException e = + expectThrows( + SolrException.class, + () -> { + addAndGetVersion( + sdoc("id", "2", "grandchild", Collections.singletonMap("set", sdoc("id", "3"))), + null); + }); + assertTrue( + e.toString(), + e.getMessage() + .contains( + "Attempted an atomic/partial update to a " + + "child doc without indicating the _root_ somehow.")); } private SolrInputDocument removeSpecialFields(SolrInputDocument doc) { final Iterator fieldIter = doc.iterator(); while (fieldIter.hasNext()) { - SolrInputField field = fieldIter.next(); + SolrInputField field = fieldIter.next(); if (field.getName().matches("^_.*_$")) { fieldIter.remove(); } @@ -843,15 +1051,17 @@ private SolrInputDocument removeSpecialFields(SolrInputDocument doc) { } @SuppressWarnings({"unchecked"}) - private static void assertDocContainsSubset(SolrInputDocument subsetDoc, SolrInputDocument fullDoc) { - for(SolrInputField field: subsetDoc) { + private static void assertDocContainsSubset( + SolrInputDocument subsetDoc, SolrInputDocument fullDoc) { + for (SolrInputField field : subsetDoc) { String fieldName = field.getName(); assertTrue("doc should contain field: " + fieldName, fullDoc.containsKey(fieldName)); Object fullValue = fullDoc.getField(fieldName).getValue(); - if(fullValue instanceof Collection) { + if (fullValue instanceof Collection) { ((Collection) fullValue).containsAll(field.getValues()); } else { - assertEquals("docs should have the same value for field: " + fieldName, field.getValue(), fullValue); + assertEquals( + "docs should have the same value for field: " + fieldName, field.getValue(), fullValue); } } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java index 8b04101feb5..7661c48064d 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java @@ -34,20 +34,19 @@ import java.util.Locale; import java.util.Map; import java.util.Set; - -import org.hamcrest.core.IsInstanceOf; - import org.apache.solr.common.SolrInputDocument; import org.apache.solr.schema.IndexSchema; +import org.hamcrest.core.IsInstanceOf; import org.junit.BeforeClass; + /** - * Tests for the field mutating update processors - * that parse Dates, Longs, Doubles, and Booleans. + * Tests for the field mutating update processors that parse Dates, Longs, Doubles, and Booleans. */ public class ParsingFieldUpdateProcessorsTest extends UpdateProcessorTestBase { private static final double EPSILON = 1E-15; private static final DateTimeFormatter isoDateOptionalTimeFormatter = - DateTimeFormatter.ofPattern("yyyy-MM-dd['T'HH:mm[:ss[.SSS]]][z", Locale.ROOT).withZone(ZoneOffset.UTC); + DateTimeFormatter.ofPattern("yyyy-MM-dd['T'HH:mm[:ss[.SSS]]][z", Locale.ROOT) + .withZone(ZoneOffset.UTC); private static final IsInstanceOf IS_BOOLEAN = new IsInstanceOf(Boolean.class); private static final IsInstanceOf IS_STRING = new IsInstanceOf(String.class); @@ -56,7 +55,7 @@ public class ParsingFieldUpdateProcessorsTest extends UpdateProcessorTestBase { private static final IsInstanceOf IS_DOUBLE = new IsInstanceOf(Double.class); private static final IsInstanceOf IS_INTEGER = new IsInstanceOf(Integer.class); private static final IsInstanceOf IS_LONG = new IsInstanceOf(Long.class); - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-parsing-update-processor-chains.xml", "schema12.xml"); @@ -86,66 +85,77 @@ public void testParseTrieDateRoundTrip() throws Exception { assertQ(req("id:39"), "//date[@name='date_tdt'][.='" + dateString + "']"); } - public void testParseDateFieldNotInSchema() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); String dateString = "2010-11-12T13:14:15.168Z"; - SolrInputDocument d = processAdd("parse-date-no-run-processor", - doc(f("id", "18"), f("not_in_schema", dateString))); + SolrInputDocument d = + processAdd( + "parse-date-no-run-processor", doc(f("id", "18"), f("not_in_schema", dateString))); assertNotNull(d); assertThat(d.getFieldValue("not_in_schema"), IS_DATE); - assertEquals(Instant.parse(dateString), ((Date)d.getFieldValue("not_in_schema")).toInstant()); - - d = processAdd("parse-date-no-run-processor", - doc(f("id", "36"), f("not_in_schema", "not a date", dateString))); + assertEquals(Instant.parse(dateString), ((Date) d.getFieldValue("not_in_schema")).toInstant()); + + d = + processAdd( + "parse-date-no-run-processor", + doc(f("id", "36"), f("not_in_schema", "not a date", dateString))); assertNotNull(d); for (Object val : d.getFieldValues("not_in_schema")) { - // check that nothing was mutated, since not all field values are parseable as dates + // check that nothing was mutated, since not all field values are parseable as dates assertThat(val, IS_STRING); } - d = processAdd("parse-date-no-run-processor", - doc(f("id", "72"), f("not_in_schema", dateString, "not a date"))); + d = + processAdd( + "parse-date-no-run-processor", + doc(f("id", "72"), f("not_in_schema", dateString, "not a date"))); assertNotNull(d); for (Object val : d.getFieldValues("not_in_schema")) { - // check again that nothing was mutated, but with a valid date first this time + // check again that nothing was mutated, but with a valid date first this time assertThat(val, IS_STRING); } } - + public void testParseDateNonUTCdefaultTimeZoneRoundTrip() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("date_dt")); // should match "*_dt" dynamic field - String dateStringNoTimeZone = "2010-11-12T13:14:15.168"; + String dateStringNoTimeZone = "2010-11-12T13:14:15.168"; String dateStringUTC = dateStringNoTimeZone + "Z"; // dateStringNoTimeZone interpreted as being in timeZone America/New_York, then printed as UTC String dateStringUSEasternTimeAsUTC = "2010-11-12T18:14:15.168Z"; - - SolrInputDocument d = processAdd - ("parse-date-non-UTC-defaultTimeZone", doc(f("id", "99"), f("dateUTC_dt", dateStringUTC), - f("dateNoTimeZone_dt", dateStringNoTimeZone))); + + SolrInputDocument d = + processAdd( + "parse-date-non-UTC-defaultTimeZone", + doc( + f("id", "99"), + f("dateUTC_dt", dateStringUTC), + f("dateNoTimeZone_dt", dateStringNoTimeZone))); assertNotNull(d); assertThat(d.getFieldValue("dateUTC_dt"), IS_DATE); assertThat(d.getFieldValue("dateNoTimeZone_dt"), IS_DATE); assertU(commit()); - assertQ(req("id:99") - ,"//date[@name='dateUTC_dt'][.='" + dateStringUTC + "']" - ,"//date[@name='dateNoTimeZone_dt'][.='" + dateStringUSEasternTimeAsUTC + "']"); + assertQ( + req("id:99"), + "//date[@name='dateUTC_dt'][.='" + dateStringUTC + "']", + "//date[@name='dateNoTimeZone_dt'][.='" + dateStringUSEasternTimeAsUTC + "']"); } - + public void testParseDateExplicitNotInSchemaSelector() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); String dateString = "2010-11-12T13:14:15.168Z"; - SolrInputDocument d = processAdd("parse-date-explicit-not-in-schema-selector-no-run-processor", - doc(f("id", "88"), f("not_in_schema", dateString))); + SolrInputDocument d = + processAdd( + "parse-date-explicit-not-in-schema-selector-no-run-processor", + doc(f("id", "88"), f("not_in_schema", dateString))); assertNotNull(d); assertThat(d.getFieldValue("not_in_schema"), IS_DATE); - assertEquals(Instant.parse(dateString), ((Date)d.getFieldValue("not_in_schema")).toInstant()); + assertEquals(Instant.parse(dateString), ((Date) d.getFieldValue("not_in_schema")).toInstant()); } public void testParseDateExplicitTypeClassSelector() throws Exception { @@ -154,58 +164,66 @@ public void testParseDateExplicitTypeClassSelector() throws Exception { String dateString = "2010-11-12T13:14:15.168Z"; SolrInputDocument d; if (schema.getField("date_dt").getType().isPointField()) { - d = processAdd("parse-date-explicit-typeclass-point-selector-no-run-processor", - doc(f("id", "77"), f("date_dt", dateString))); + d = + processAdd( + "parse-date-explicit-typeclass-point-selector-no-run-processor", + doc(f("id", "77"), f("date_dt", dateString))); } else { - d = processAdd("parse-date-explicit-typeclass-selector-no-run-processor", - doc(f("id", "77"), f("date_dt", dateString))); + d = + processAdd( + "parse-date-explicit-typeclass-selector-no-run-processor", + doc(f("id", "77"), f("date_dt", dateString))); } assertNotNull(d); assertThat(d.getFieldValue("date_dt"), IS_DATE); - assertEquals(Instant.parse(dateString), ((Date)d.getFieldValue("date_dt")).toInstant()); + assertEquals(Instant.parse(dateString), ((Date) d.getFieldValue("date_dt")).toInstant()); } public void testParseUSPacificDate() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); - String dateString = "8/9/2010"; // Interpreted as 00:00 US Pacific Daylight Time = UTC+07:00 - SolrInputDocument d = processAdd("US-Pacific-parse-date-no-run-processor", - doc(f("id", "288"), f("not_in_schema", dateString))); + String dateString = "8/9/2010"; // Interpreted as 00:00 US Pacific Daylight Time = UTC+07:00 + SolrInputDocument d = + processAdd( + "US-Pacific-parse-date-no-run-processor", + doc(f("id", "288"), f("not_in_schema", dateString))); assertNotNull(d); assertThat(d.getFieldValue("not_in_schema"), IS_DATE); - assertEquals(Instant.parse("2010-08-09T07:00:00.000Z"), ((Date)d.getFieldValue("not_in_schema")).toInstant()); + assertEquals( + Instant.parse("2010-08-09T07:00:00.000Z"), + ((Date) d.getFieldValue("not_in_schema")).toInstant()); } - + public void testParseDateManyFormats() throws Exception { - String[] formatExamples = { - "2010-01-15T00:00:00.000Z", - "2010-01-15T00:00:00,000Z", - "2010-01-15T00:00:00.000", - "2010-01-15T00:00:00,000", - "2010-01-15T00:00:00Z", - "2010-01-15T00:00:00", - "2010-01-15T00:00Z", - "2010-01-15T00:00", - "2010-01-15 00:00:00.000Z", - "2010-01-15 00:00:00,000Z", - "2010-01-15 00:00:00.000", - "2010-01-15 00:00:00,000", - "2010-01-15 00:00:00Z", - "2010-01-15 00:00:00", - "2010-01-15 00:00Z", - "2010-01-15 00:00", - "2010-01-15 12:00 AM", - "2010-01-15 12:00AM", - "2010-01-15", - "Fri Jan 15 00:00:00 +0000 2010", - "Fri Jan 15 00:00:00 2010 +00:00", - "Fri Jan 15 00:00:00 2010", - "Fri, 15 Jan 2010 00:00:00 +00:00", - "Friday, 15-Jan-10 00:00:00 +00:00", - "Friday, January 15, 2010", - "January 15, 2010", - "Jan. 15, 2010" + String[] formatExamples = { + "2010-01-15T00:00:00.000Z", + "2010-01-15T00:00:00,000Z", + "2010-01-15T00:00:00.000", + "2010-01-15T00:00:00,000", + "2010-01-15T00:00:00Z", + "2010-01-15T00:00:00", + "2010-01-15T00:00Z", + "2010-01-15T00:00", + "2010-01-15 00:00:00.000Z", + "2010-01-15 00:00:00,000Z", + "2010-01-15 00:00:00.000", + "2010-01-15 00:00:00,000", + "2010-01-15 00:00:00Z", + "2010-01-15 00:00:00", + "2010-01-15 00:00Z", + "2010-01-15 00:00", + "2010-01-15 12:00 AM", + "2010-01-15 12:00AM", + "2010-01-15", + "Fri Jan 15 00:00:00 +0000 2010", + "Fri Jan 15 00:00:00 2010 +00:00", + "Fri Jan 15 00:00:00 2010", + "Fri, 15 Jan 2010 00:00:00 +00:00", + "Friday, 15-Jan-10 00:00:00 +00:00", + "Friday, January 15, 2010", + "January 15, 2010", + "Jan. 15, 2010" }; IndexSchema schema = h.getCore().getLatestSchema(); @@ -213,43 +231,53 @@ public void testParseDateManyFormats() throws Exception { Instant expectedInstant = Instant.parse(formatExamples[0]); - for (int i = 0 ; i < formatExamples.length ; ++i) { + for (int i = 0; i < formatExamples.length; ++i) { String dateString = formatExamples[i]; String id = "95" + i; - SolrInputDocument d = processAdd("parse-date-many-formats-no-run-processor", - doc(f("id", id), f("dateUTC_dt", dateString))); + SolrInputDocument d = + processAdd( + "parse-date-many-formats-no-run-processor", + doc(f("id", id), f("dateUTC_dt", dateString))); assertNotNull(d); - assertThat("index: " + i + " date '" + dateString + "' is not mutated to a Date", - d.getFieldValue("dateUTC_dt"), IS_DATE); - assertEquals("date '" + dateString + "' mismatched milliseconds", - expectedInstant, ((Date)d.getFieldValue("dateUTC_dt")).toInstant()); + assertThat( + "index: " + i + " date '" + dateString + "' is not mutated to a Date", + d.getFieldValue("dateUTC_dt"), + IS_DATE); + assertEquals( + "date '" + dateString + "' mismatched milliseconds", + expectedInstant, + ((Date) d.getFieldValue("dateUTC_dt")).toInstant()); } } - + public void testParseFrenchDate() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); String frenchDateString = "le vendredi 15 janvier 2010"; String dateString = "2010-01-15T00:00:00.000Z"; - SolrInputDocument d = processAdd("parse-french-date-UTC-defaultTimeZone-no-run-processor", - doc(f("id", "88"), f("not_in_schema", frenchDateString))); + SolrInputDocument d = + processAdd( + "parse-french-date-UTC-defaultTimeZone-no-run-processor", + doc(f("id", "88"), f("not_in_schema", frenchDateString))); assertNotNull(d); assertThat(d.getFieldValue("not_in_schema"), IS_DATE); - assertEquals(Instant.parse(dateString), ((Date)d.getFieldValue("not_in_schema")).toInstant()); + assertEquals(Instant.parse(dateString), ((Date) d.getFieldValue("not_in_schema")).toInstant()); } - + public void testFailedParseMixedDate() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); - Map mixed = new HashMap<>(); - String[] dateStrings = { "2020-05-13T18:47", "1989-12-14", "1682-07-22T18:33:00.000Z" }; + Map mixed = new HashMap<>(); + String[] dateStrings = {"2020-05-13T18:47", "1989-12-14", "1682-07-22T18:33:00.000Z"}; for (String dateString : dateStrings) { mixed.put(parse(isoDateOptionalTimeFormatter, dateString), dateString); } Double extraDouble = 29.554d; mixed.put(extraDouble, extraDouble); // Double-typed field value - SolrInputDocument d = processAdd("parse-date-no-run-processor", - doc(f("id", "7201"), f("not_in_schema", mixed.values()))); + SolrInputDocument d = + processAdd( + "parse-date-no-run-processor", + doc(f("id", "7201"), f("not_in_schema", mixed.values()))); assertNotNull(d); boolean foundDouble = false; for (Object o : d.getFieldValues("not_in_schema")) { @@ -271,38 +299,42 @@ public void testParseIntRoundTrip() throws Exception { int value = 1089883491; String intString1 = "1089883491"; String intString2 = "1,089,883,491"; - SolrInputDocument d = processAdd("parse-int", - doc(f("id", "113"), f("int1_i", intString1), f("int2_i", intString2))); + SolrInputDocument d = + processAdd( + "parse-int", doc(f("id", "113"), f("int1_i", intString1), f("int2_i", intString2))); assertNotNull(d); assertThat(d.getFieldValue("int1_i"), IS_INTEGER); - assertEquals(value, ((Integer)d.getFieldValue("int1_i")).intValue()); + assertEquals(value, ((Integer) d.getFieldValue("int1_i")).intValue()); assertThat(d.getFieldValue("int2_i"), IS_INTEGER); - assertEquals(value, ((Integer)d.getFieldValue("int2_i")).intValue()); + assertEquals(value, ((Integer) d.getFieldValue("int2_i")).intValue()); assertU(commit()); - assertQ(req("id:113") - ,"//int[@name='int1_i'][.='" + value + "']" - ,"//int[@name='int2_i'][.='" + value + "']"); + assertQ( + req("id:113"), + "//int[@name='int1_i'][.='" + value + "']", + "//int[@name='int2_i'][.='" + value + "']"); } public void testParseIntNonRootLocale() throws Exception { - final DecimalFormatSymbols ru_RU = DecimalFormatSymbols.getInstance(new Locale("ru","RU")); + final DecimalFormatSymbols ru_RU = DecimalFormatSymbols.getInstance(new Locale("ru", "RU")); final char groupChar = ru_RU.getGroupingSeparator(); - + int value = 1089883491; String intString1 = "1089883491"; - String intString2 = "1"+groupChar+"089"+groupChar+"883"+groupChar+"491"; - + String intString2 = "1" + groupChar + "089" + groupChar + "883" + groupChar + "491"; + IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("int_i")); // should match dynamic field "*_i" assertNull(schema.getFieldOrNull("not_in_schema")); - SolrInputDocument d = processAdd("parse-int-russian-no-run-processor", - doc(f("id", "113"), f("int_i", intString1), f("not_in_schema", intString2))); + SolrInputDocument d = + processAdd( + "parse-int-russian-no-run-processor", + doc(f("id", "113"), f("int_i", intString1), f("not_in_schema", intString2))); assertNotNull(d); assertThat(d.getFieldValue("int_i"), IS_INTEGER); - assertEquals(value, ((Integer)d.getFieldValue("int_i")).intValue()); + assertEquals(value, ((Integer) d.getFieldValue("int_i")).intValue()); assertThat(d.getFieldValue("not_in_schema"), IS_INTEGER); - assertEquals(value, ((Integer)d.getFieldValue("not_in_schema")).intValue()); + assertEquals(value, ((Integer) d.getFieldValue("not_in_schema")).intValue()); } public void testParseTrieIntRoundTrip() throws Exception { @@ -312,46 +344,54 @@ public void testParseTrieIntRoundTrip() throws Exception { int value = 1089883491; String intString1 = "1089883491"; String intString2 = "1,089,883,491"; - SolrInputDocument d = processAdd("parse-int", - doc(f("id", "113"), f("int1_ti", intString1), f("int2_ti", intString2))); + SolrInputDocument d = + processAdd( + "parse-int", doc(f("id", "113"), f("int1_ti", intString1), f("int2_ti", intString2))); assertNotNull(d); assertThat(d.getFieldValue("int1_ti"), IS_INTEGER); - assertEquals(value, ((Integer)d.getFieldValue("int1_ti")).intValue()); + assertEquals(value, ((Integer) d.getFieldValue("int1_ti")).intValue()); assertThat(d.getFieldValue("int2_ti"), IS_INTEGER); - assertEquals(value, ((Integer)d.getFieldValue("int2_ti")).intValue()); + assertEquals(value, ((Integer) d.getFieldValue("int2_ti")).intValue()); assertU(commit()); - assertQ(req("id:113") - ,"//int[@name='int1_ti'][.='" + value + "']" - ,"//int[@name='int2_ti'][.='" + value + "']"); + assertQ( + req("id:113"), + "//int[@name='int1_ti'][.='" + value + "']", + "//int[@name='int2_ti'][.='" + value + "']"); } public void testIntOverflow() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema1")); assertNull(schema.getFieldOrNull("not_in_schema2")); - long longValue1 = (long)Integer.MAX_VALUE + 100L; - long longValue2 = (long)Integer.MIN_VALUE - 100L; + long longValue1 = (long) Integer.MAX_VALUE + 100L; + long longValue2 = (long) Integer.MIN_VALUE - 100L; String longString1 = Long.toString(longValue1); String longString2 = Long.toString(longValue2); - SolrInputDocument d = processAdd("parse-int-no-run-processor", - doc(f("id", "282"), f("not_in_schema1", longString1), f("not_in_schema2", longString2))); + SolrInputDocument d = + processAdd( + "parse-int-no-run-processor", + doc( + f("id", "282"), + f("not_in_schema1", longString1), + f("not_in_schema2", longString2))); assertNotNull(d); assertThat(d.getFieldValue("not_in_schema1"), IS_STRING); assertThat(d.getFieldValue("not_in_schema2"), IS_STRING); } - + public void testFailedParseMixedInt() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); - Map mixed = new HashMap<>(); + Map mixed = new HashMap<>(); Float floatVal = 294423.0f; mixed.put(85, "85"); mixed.put(floatVal, floatVal); // Float-typed field value mixed.put(-2894518, "-2,894,518"); mixed.put(1879472193, "1,879,472,193"); - SolrInputDocument d = processAdd("parse-int-no-run-processor", - doc(f("id", "7202"), f("not_in_schema", mixed.values()))); + SolrInputDocument d = + processAdd( + "parse-int-no-run-processor", doc(f("id", "7202"), f("not_in_schema", mixed.values()))); assertNotNull(d); boolean foundFloat = false; for (Object o : d.getFieldValues("not_in_schema")) { @@ -373,38 +413,43 @@ public void testParseLongRoundTrip() throws Exception { long value = 1089883491L; String longString1 = "1089883491"; String longString2 = "1,089,883,491"; - SolrInputDocument d = processAdd("parse-long", - doc(f("id", "113"), f("long1_l", longString1), f("long2_l", longString2))); + SolrInputDocument d = + processAdd( + "parse-long", + doc(f("id", "113"), f("long1_l", longString1), f("long2_l", longString2))); assertNotNull(d); assertThat(d.getFieldValue("long1_l"), IS_LONG); assertEquals(value, ((Long) d.getFieldValue("long1_l")).longValue()); assertThat(d.getFieldValue("long2_l"), IS_LONG); - assertEquals(value, ((Long)d.getFieldValue("long2_l")).longValue()); - + assertEquals(value, ((Long) d.getFieldValue("long2_l")).longValue()); + assertU(commit()); - assertQ(req("id:113") - ,"//long[@name='long1_l'][.='" + value + "']" - ,"//long[@name='long2_l'][.='" + value + "']"); + assertQ( + req("id:113"), + "//long[@name='long1_l'][.='" + value + "']", + "//long[@name='long2_l'][.='" + value + "']"); } public void testParseLongNonRootLocale() throws Exception { - final DecimalFormatSymbols ru_RU = DecimalFormatSymbols.getInstance(new Locale("ru","RU")); + final DecimalFormatSymbols ru_RU = DecimalFormatSymbols.getInstance(new Locale("ru", "RU")); final char groupChar = ru_RU.getGroupingSeparator(); - + long value = 1089883491L; String longString1 = "1089883491"; - String longString2 = "1"+groupChar+"089"+groupChar+"883"+groupChar+"491"; - + String longString2 = "1" + groupChar + "089" + groupChar + "883" + groupChar + "491"; + IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("long_l")); // should match dynamic field "*_l" assertNull(schema.getFieldOrNull("not_in_schema")); - SolrInputDocument d = processAdd("parse-long-russian-no-run-processor", - doc(f("id", "113"), f("long_l", longString1), f("not_in_schema", longString2))); + SolrInputDocument d = + processAdd( + "parse-long-russian-no-run-processor", + doc(f("id", "113"), f("long_l", longString1), f("not_in_schema", longString2))); assertNotNull(d); assertThat(d.getFieldValue("long_l"), IS_LONG); - assertEquals(value, ((Long)d.getFieldValue("long_l")).longValue()); + assertEquals(value, ((Long) d.getFieldValue("long_l")).longValue()); assertThat(d.getFieldValue("not_in_schema"), IS_LONG); - assertEquals(value, ((Long)d.getFieldValue("not_in_schema")).longValue()); + assertEquals(value, ((Long) d.getFieldValue("not_in_schema")).longValue()); } public void testParseTrieLongRoundTrip() throws Exception { @@ -414,31 +459,36 @@ public void testParseTrieLongRoundTrip() throws Exception { long value = 1089883491L; String longString1 = "1089883491"; String longString2 = "1,089,883,491"; - SolrInputDocument d = processAdd("parse-long", - doc(f("id", "113"), f("long1_tl", longString1), f("long2_tl", longString2))); + SolrInputDocument d = + processAdd( + "parse-long", + doc(f("id", "113"), f("long1_tl", longString1), f("long2_tl", longString2))); assertNotNull(d); assertThat(d.getFieldValue("long1_tl"), IS_LONG); - assertEquals(value, ((Long)d.getFieldValue("long1_tl")).longValue()); + assertEquals(value, ((Long) d.getFieldValue("long1_tl")).longValue()); assertThat(d.getFieldValue("long2_tl"), IS_LONG); - assertEquals(value, ((Long)d.getFieldValue("long2_tl")).longValue()); + assertEquals(value, ((Long) d.getFieldValue("long2_tl")).longValue()); assertU(commit()); - assertQ(req("id:113") - ,"//long[@name='long1_tl'][.='" + value + "']" - ,"//long[@name='long2_tl'][.='" + value + "']"); + assertQ( + req("id:113"), + "//long[@name='long1_tl'][.='" + value + "']", + "//long[@name='long2_tl'][.='" + value + "']"); } public void testFailedParseMixedLong() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); - Map mixed = new HashMap<>(); + Map mixed = new HashMap<>(); Float floatVal = 294423.0f; mixed.put(85L, "85"); mixed.put(floatVal, floatVal); // Float-typed field value mixed.put(-2894518L, "-2,894,518"); mixed.put(1879472193L, "1,879,472,193"); - SolrInputDocument d = processAdd("parse-long-no-run-processor", - doc(f("id", "7204"), f("not_in_schema", mixed.values()))); + SolrInputDocument d = + processAdd( + "parse-long-no-run-processor", + doc(f("id", "7204"), f("not_in_schema", mixed.values()))); assertNotNull(d); boolean foundFloat = false; for (Object o : d.getFieldValues("not_in_schema")) { @@ -460,40 +510,44 @@ public void testParseFloatRoundTrip() throws Exception { float value = 10898.83491f; String floatString1 = "10898.83491"; String floatString2 = "10,898.83491"; - SolrInputDocument d = processAdd("parse-float", - doc(f("id", "128"), f("float1_f", floatString1), f("float2_f", floatString2))); + SolrInputDocument d = + processAdd( + "parse-float", + doc(f("id", "128"), f("float1_f", floatString1), f("float2_f", floatString2))); assertNotNull(d); assertThat(d.getFieldValue("float1_f"), IS_FLOAT); - assertEquals(value, (Float)d.getFieldValue("float1_f"), EPSILON); + assertEquals(value, (Float) d.getFieldValue("float1_f"), EPSILON); assertThat(d.getFieldValue("float2_f"), IS_FLOAT); - assertEquals(value, (Float)d.getFieldValue("float2_f"), EPSILON); + assertEquals(value, (Float) d.getFieldValue("float2_f"), EPSILON); assertU(commit()); - assertQ(req("id:128") - ,"//float[@name='float1_f'][.='" + value + "']" - ,"//float[@name='float2_f'][.='" + value + "']"); + assertQ( + req("id:128"), + "//float[@name='float1_f'][.='" + value + "']", + "//float[@name='float2_f'][.='" + value + "']"); } public void testParseFloatNonRootLocale() throws Exception { - final DecimalFormatSymbols fr_FR = DecimalFormatSymbols.getInstance(new Locale("fr","FR")); + final DecimalFormatSymbols fr_FR = DecimalFormatSymbols.getInstance(new Locale("fr", "FR")); final char groupChar = fr_FR.getGroupingSeparator(); final char decimalChar = fr_FR.getDecimalSeparator(); float value = 10898.83491F; - String floatString1 = "10898"+decimalChar+"83491"; - String floatString2 = "10"+groupChar+"898"+decimalChar+"83491"; - + String floatString1 = "10898" + decimalChar + "83491"; + String floatString2 = "10" + groupChar + "898" + decimalChar + "83491"; + IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("float_f")); // should match dynamic field "*_f" assertNull(schema.getFieldOrNull("not_in_schema")); - SolrInputDocument d = processAdd("parse-float-french-no-run-processor", - doc(f("id", "140"), f("float_f", floatString1), - f("not_in_schema", floatString2))); + SolrInputDocument d = + processAdd( + "parse-float-french-no-run-processor", + doc(f("id", "140"), f("float_f", floatString1), f("not_in_schema", floatString2))); assertNotNull(d); assertThat(d.getFieldValue("float_f"), IS_FLOAT); - assertEquals(value, (Float)d.getFieldValue("float_f"), EPSILON); + assertEquals(value, (Float) d.getFieldValue("float_f"), EPSILON); assertThat(d.getFieldValue("not_in_schema"), IS_FLOAT); - assertEquals(value, (Float)d.getFieldValue("not_in_schema"), EPSILON); + assertEquals(value, (Float) d.getFieldValue("not_in_schema"), EPSILON); } public void testParseTrieFloatRoundTrip() throws Exception { @@ -503,30 +557,35 @@ public void testParseTrieFloatRoundTrip() throws Exception { float value = 10898.83491f; String floatString1 = "10898.83491"; String floatString2 = "10,898.83491"; - SolrInputDocument d = processAdd("parse-float", - doc(f("id", "728"), f("float1_tf", floatString1), f("float2_tf", floatString2))); + SolrInputDocument d = + processAdd( + "parse-float", + doc(f("id", "728"), f("float1_tf", floatString1), f("float2_tf", floatString2))); assertNotNull(d); assertThat(d.getFieldValue("float1_tf"), IS_FLOAT); - assertEquals(value, (Float)d.getFieldValue("float1_tf"), EPSILON); + assertEquals(value, (Float) d.getFieldValue("float1_tf"), EPSILON); assertThat(d.getFieldValue("float2_tf"), IS_FLOAT); - assertEquals(value, (Float)d.getFieldValue("float2_tf"), EPSILON); + assertEquals(value, (Float) d.getFieldValue("float2_tf"), EPSILON); assertU(commit()); - assertQ(req("id:728") - ,"//float[@name='float1_tf'][.='" + value + "']" - ,"//float[@name='float2_tf'][.='" + value + "']"); + assertQ( + req("id:728"), + "//float[@name='float1_tf'][.='" + value + "']", + "//float[@name='float2_tf'][.='" + value + "']"); } - + public void testMixedFloats() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("float_tf")); // should match dynamic field "*_tf" - Map mixedFloats = new HashMap<>(); + Map mixedFloats = new HashMap<>(); mixedFloats.put(85.0f, "85"); mixedFloats.put(2894518.0f, "2,894,518"); mixedFloats.put(2.94423E-9f, 2.94423E-9f); // Float-typed field value mixedFloats.put(48794721.937f, "48,794,721.937"); - SolrInputDocument d = processAdd("parse-float-no-run-processor", - doc(f("id", "342"), f("float_tf", mixedFloats.values()))); + SolrInputDocument d = + processAdd( + "parse-float-no-run-processor", + doc(f("id", "342"), f("float_tf", mixedFloats.values()))); assertNotNull(d); for (Object o : d.getFieldValues("float_tf")) { assertThat(o, IS_FLOAT); @@ -538,14 +597,16 @@ public void testMixedFloats() throws Exception { public void testFailedParseMixedFloat() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); - Map mixed = new HashMap<>(); + Map mixed = new HashMap<>(); Long longVal = 294423L; mixed.put(85L, "85"); mixed.put(longVal, longVal); // Float-typed field value mixed.put(-2894518L, "-2,894,518"); mixed.put(1879472193L, "1,879,472,193"); - SolrInputDocument d = processAdd("parse-float-no-run-processor", - doc(f("id", "7205"), f("not_in_schema", mixed.values()))); + SolrInputDocument d = + processAdd( + "parse-float-no-run-processor", + doc(f("id", "7205"), f("not_in_schema", mixed.values()))); assertNotNull(d); boolean foundLong = false; for (Object o : d.getFieldValues("not_in_schema")) { @@ -567,40 +628,44 @@ public void testParseDoubleRoundTrip() throws Exception { double value = 10898.83491; String doubleString1 = "10898.83491"; String doubleString2 = "10,898.83491"; - SolrInputDocument d = processAdd("parse-double", - doc(f("id", "128"), f("double1_d", doubleString1), f("double2_d", doubleString2))); + SolrInputDocument d = + processAdd( + "parse-double", + doc(f("id", "128"), f("double1_d", doubleString1), f("double2_d", doubleString2))); assertNotNull(d); assertThat(d.getFieldValue("double1_d"), IS_DOUBLE); - assertEquals(value, (Double)d.getFieldValue("double1_d"), EPSILON); + assertEquals(value, (Double) d.getFieldValue("double1_d"), EPSILON); assertThat(d.getFieldValue("double2_d"), IS_DOUBLE); - assertEquals(value, (Double)d.getFieldValue("double2_d"), EPSILON); + assertEquals(value, (Double) d.getFieldValue("double2_d"), EPSILON); assertU(commit()); - assertQ(req("id:128") - ,"//double[@name='double1_d'][.='" + value + "']" - ,"//double[@name='double2_d'][.='" + value + "']"); + assertQ( + req("id:128"), + "//double[@name='double1_d'][.='" + value + "']", + "//double[@name='double2_d'][.='" + value + "']"); } public void testParseDoubleNonRootLocale() throws Exception { - final DecimalFormatSymbols fr_FR = DecimalFormatSymbols.getInstance(new Locale("fr","FR")); + final DecimalFormatSymbols fr_FR = DecimalFormatSymbols.getInstance(new Locale("fr", "FR")); final char groupChar = fr_FR.getGroupingSeparator(); final char decimalChar = fr_FR.getDecimalSeparator(); double value = 10898.83491D; - String doubleString1 = "10898"+decimalChar+"83491"; - String doubleString2 = "10"+groupChar+"898"+decimalChar+"83491"; - + String doubleString1 = "10898" + decimalChar + "83491"; + String doubleString2 = "10" + groupChar + "898" + decimalChar + "83491"; + IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("double_d")); // should match dynamic field "*_d" assertNull(schema.getFieldOrNull("not_in_schema")); - SolrInputDocument d = processAdd("parse-double-french-no-run-processor", - doc(f("id", "140"), f("double_d", doubleString1), - f("not_in_schema", doubleString2))); + SolrInputDocument d = + processAdd( + "parse-double-french-no-run-processor", + doc(f("id", "140"), f("double_d", doubleString1), f("not_in_schema", doubleString2))); assertNotNull(d); assertThat(d.getFieldValue("double_d"), IS_DOUBLE); - assertEquals(value, (Double)d.getFieldValue("double_d"), EPSILON); + assertEquals(value, (Double) d.getFieldValue("double_d"), EPSILON); assertThat(d.getFieldValue("not_in_schema"), IS_DOUBLE); - assertEquals(value, (Double)d.getFieldValue("not_in_schema"), EPSILON); + assertEquals(value, (Double) d.getFieldValue("not_in_schema"), EPSILON); } public void testParseTrieDoubleRoundTrip() throws Exception { @@ -610,31 +675,36 @@ public void testParseTrieDoubleRoundTrip() throws Exception { double value = 10898.83491; String doubleString1 = "10898.83491"; String doubleString2 = "10,898.83491"; - SolrInputDocument d = processAdd("parse-double", - doc(f("id", "728"), f("double1_td", doubleString1), f("double2_td", doubleString2))); + SolrInputDocument d = + processAdd( + "parse-double", + doc(f("id", "728"), f("double1_td", doubleString1), f("double2_td", doubleString2))); assertNotNull(d); assertThat(d.getFieldValue("double1_td"), IS_DOUBLE); - assertEquals(value, (Double)d.getFieldValue("double1_td"), EPSILON); + assertEquals(value, (Double) d.getFieldValue("double1_td"), EPSILON); assertThat(d.getFieldValue("double2_td"), IS_DOUBLE); - assertEquals(value, (Double)d.getFieldValue("double2_td"), EPSILON); + assertEquals(value, (Double) d.getFieldValue("double2_td"), EPSILON); assertU(commit()); - assertQ(req("id:728") - ,"//double[@name='double1_td'][.='" + value + "']" - ,"//double[@name='double2_td'][.='" + value + "']"); + assertQ( + req("id:728"), + "//double[@name='double1_td'][.='" + value + "']", + "//double[@name='double2_td'][.='" + value + "']"); } public void testFailedParseMixedDouble() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); - Map mixed = new HashMap<>(); + Map mixed = new HashMap<>(); Long longVal = 294423L; mixed.put(85, "85.0"); mixed.put(longVal, longVal); // Float-typed field value mixed.put(-2894.518, "-2,894.518"); mixed.put(187947.2193, "187,947.2193"); - SolrInputDocument d = processAdd("parse-double-no-run-processor", - doc(f("id", "7206"), f("not_in_schema", mixed.values()))); + SolrInputDocument d = + processAdd( + "parse-double-no-run-processor", + doc(f("id", "7206"), f("not_in_schema", mixed.values()))); assertNotNull(d); boolean foundLong = false; for (Object o : d.getFieldValues("not_in_schema")) { @@ -655,8 +725,9 @@ public void testParseBooleanRoundTrip() throws Exception { assertNotNull(schema.getFieldOrNull("boolean2_b")); // should match dynamic field "*_b" boolean value1 = true; boolean value2 = false; - SolrInputDocument d = processAdd("parse-boolean", - doc(f("id", "141"), f("boolean1_b", value1), f("boolean2_b", value2))); + SolrInputDocument d = + processAdd( + "parse-boolean", doc(f("id", "141"), f("boolean1_b", value1), f("boolean2_b", value2))); assertNotNull(d); assertThat(d.getFieldValue("boolean1_b"), IS_BOOLEAN); assertEquals(value1, d.getFieldValue("boolean1_b")); @@ -664,11 +735,12 @@ public void testParseBooleanRoundTrip() throws Exception { assertEquals(value2, d.getFieldValue("boolean2_b")); assertU(commit()); - assertQ(req("id:141") - ,"//bool[@name='boolean1_b'][.='" + value1 + "']" - ,"//bool[@name='boolean2_b'][.='" + value2 + "']"); + assertQ( + req("id:141"), + "//bool[@name='boolean1_b'][.='" + value1 + "']", + "//bool[@name='boolean2_b'][.='" + value2 + "']"); } - + public void testParseAlternateValueBooleans() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("boolean1_b")); // should match dynamic field "*_b" @@ -677,17 +749,19 @@ public void testParseAlternateValueBooleans() throws Exception { assertNotNull(schema.getFieldOrNull("boolean4_b")); // should match dynamic field "*_b" assertNotNull(schema.getFieldOrNull("boolean5_b")); // should match dynamic field "*_b" assertNull(schema.getFieldOrNull("not_in_schema")); - boolean[] values = { true, true, true, false, false, false }; - String[] stringValues = { "on", "yes", "True", "Off", "no", "FALSE" }; - String[] fieldNames = { "boolean1_b", "boolean2_b", "boolean3_b", "boolean4_b", "boolean5_b", "not_in_schema" }; + boolean[] values = {true, true, true, false, false, false}; + String[] stringValues = {"on", "yes", "True", "Off", "no", "FALSE"}; + String[] fieldNames = { + "boolean1_b", "boolean2_b", "boolean3_b", "boolean4_b", "boolean5_b", "not_in_schema" + }; SolrInputDocument d = doc(f("id", "55")); - for (int i = 0 ; i < values.length ; ++i) { + for (int i = 0; i < values.length; ++i) { d.addField(fieldNames[i], stringValues[i]); } d = processAdd("parse-boolean-alternate-values-no-run-processor", d); assertNotNull(d); - for (int i = 0 ; i < values.length ; ++i) { + for (int i = 0; i < values.length; ++i) { assertThat(d.getFieldValue(fieldNames[i]), IS_BOOLEAN); assertEquals(values[i], d.getFieldValue(fieldNames[i])); } @@ -697,31 +771,31 @@ public void testParseAlternateSingleValuesBooleans() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("boolean1_b")); // should match dynamic field "*_b" assertNotNull(schema.getFieldOrNull("boolean2_b")); // should match dynamic field "*_b" - boolean[] values = { true, false }; - String[] stringValues = { "yup", "nope" }; - String[] fieldNames = { "boolean1_b", "boolean2_b" }; + boolean[] values = {true, false}; + String[] stringValues = {"yup", "nope"}; + String[] fieldNames = {"boolean1_b", "boolean2_b"}; SolrInputDocument d = doc(f("id", "59")); - for (int i = 0 ; i < values.length ; ++i) { + for (int i = 0; i < values.length; ++i) { d.addField(fieldNames[i], stringValues[i]); } d = processAdd("parse-boolean-alternate-single-values-no-run-processor", d); assertNotNull(d); - for (int i = 0 ; i < values.length ; ++i) { + for (int i = 0; i < values.length; ++i) { assertThat(d.getFieldValue(fieldNames[i]), IS_BOOLEAN); assertEquals(values[i], d.getFieldValue(fieldNames[i])); } // Standard boolean values should not be mutated, since they're not configured - stringValues = new String[] { "true", "false" }; + stringValues = new String[] {"true", "false"}; d = doc(f("id", "593")); - for (int i = 0 ; i < values.length ; ++i) { + for (int i = 0; i < values.length; ++i) { d.addField(fieldNames[i], stringValues[i]); } d = processAdd("parse-boolean-alternate-single-values-no-run-processor", d); assertNotNull(d); - for (int i = 0 ; i < values.length ; ++i) { + for (int i = 0; i < values.length; ++i) { assertThat(d.getFieldValue(fieldNames[i]), IS_STRING); } } @@ -729,14 +803,16 @@ public void testParseAlternateSingleValuesBooleans() throws Exception { public void testFailedParseMixedBoolean() throws Exception { IndexSchema schema = h.getCore().getLatestSchema(); assertNull(schema.getFieldOrNull("not_in_schema")); - Map mixed = new HashMap<>(); + Map mixed = new HashMap<>(); Long longVal = 294423L; mixed.put(true, "true"); mixed.put(longVal, longVal); // Float-typed field value mixed.put(false, "false"); mixed.put(true, "true"); - SolrInputDocument d = processAdd("parse-boolean-no-run-processor", - doc(f("id", "7207"), f("not_in_schema", mixed.values()))); + SolrInputDocument d = + processAdd( + "parse-boolean-no-run-processor", + doc(f("id", "7207"), f("not_in_schema", mixed.values()))); assertNotNull(d); boolean foundLong = false; for (Object o : d.getFieldValues("not_in_schema")) { @@ -757,8 +833,8 @@ public void testCascadingParsers() throws Exception { assertNull(schema.getFieldOrNull(fieldName)); SolrInputDocument d = null; String chain = "cascading-parsers-no-run-processor"; - - Map booleans = new HashMap<>(); + + Map booleans = new HashMap<>(); booleans.put(true, "truE"); booleans.put(false, "False"); d = processAdd(chain, doc(f("id", "341"), f(fieldName, booleans.values()))); @@ -769,7 +845,7 @@ public void testCascadingParsers() throws Exception { } assertTrue(booleans.isEmpty()); - Map ints = new HashMap<>(); + Map ints = new HashMap<>(); ints.put(2, "2"); ints.put(50928, "50928"); ints.put(86942008, "86,942,008"); @@ -781,7 +857,7 @@ public void testCascadingParsers() throws Exception { } assertTrue(ints.isEmpty()); - Map longs = new HashMap<>(); + Map longs = new HashMap<>(); longs.put(2L, "2"); longs.put(50928L, "50928"); longs.put(86942008987654L, "86,942,008,987,654"); @@ -792,10 +868,10 @@ public void testCascadingParsers() throws Exception { longs.remove(o); } assertTrue(longs.isEmpty()); - + /* // Disabling this test because unlike Integer/Long, Float parsing can perform - // rounding to make values fit. See + // rounding to make values fit. See Map floats = new HashMap(); floats.put(2.0, "2."); floats.put(509.28, "509.28"); @@ -808,7 +884,7 @@ public void testCascadingParsers() throws Exception { } */ - Map doubles = new HashMap<>(); + Map doubles = new HashMap<>(); doubles.put(2.0, "2."); doubles.put(509.28, "509.28"); doubles.put(86942.008, "86,942.008"); @@ -819,8 +895,8 @@ public void testCascadingParsers() throws Exception { longs.remove(o); } - Map dates = new HashMap<>(); - String[] dateStrings = { "2020-05-13T18:47", "1989-12-14", "1682-07-22T18:33:00.000Z" }; + Map dates = new HashMap<>(); + String[] dateStrings = {"2020-05-13T18:47", "1989-12-14", "1682-07-22T18:33:00.000Z"}; for (String dateString : dateStrings) { dates.put(parse(isoDateOptionalTimeFormatter, dateString), dateString); } @@ -831,8 +907,8 @@ public void testCascadingParsers() throws Exception { dates.remove(o); } assertTrue(dates.isEmpty()); - - Map mixedLongsAndDoubles = new LinkedHashMap<>(); // preserve order + + Map mixedLongsAndDoubles = new LinkedHashMap<>(); // preserve order mixedLongsAndDoubles.put(85.0, "85"); mixedLongsAndDoubles.put(2.94423E-9, "2.94423E-9"); mixedLongsAndDoubles.put(2894518.0, "2,894,518"); @@ -844,7 +920,7 @@ public void testCascadingParsers() throws Exception { mixedLongsAndDoubles.remove(o); } assertTrue(mixedLongsAndDoubles.isEmpty()); - + Set mixed = new HashSet<>(); mixed.add("true"); mixed.add("1682-07-22T18:33:00.000Z"); @@ -857,7 +933,7 @@ public void testCascadingParsers() throws Exception { assertThat(o, IS_STRING); } - Map mixedDoubles = new LinkedHashMap<>(); // preserve order + Map mixedDoubles = new LinkedHashMap<>(); // preserve order mixedDoubles.put(85.0, "85"); mixedDoubles.put(2.94423E-9, 2.94423E-9); // Double-typed field value mixedDoubles.put(2894518.0, "2,894,518"); @@ -870,7 +946,7 @@ public void testCascadingParsers() throws Exception { } assertTrue(mixedDoubles.isEmpty()); - Map mixedInts = new LinkedHashMap<>(); // preserve order + Map mixedInts = new LinkedHashMap<>(); // preserve order mixedInts.put(85, "85"); mixedInts.put(294423, 294423); // Integer-typed field value mixedInts.put(-2894518, "-2,894,518"); @@ -883,7 +959,7 @@ public void testCascadingParsers() throws Exception { } assertTrue(mixedInts.isEmpty()); - Map mixedLongs = new LinkedHashMap<>(); // preserve order + Map mixedLongs = new LinkedHashMap<>(); // preserve order mixedLongs.put(85L, "85"); mixedLongs.put(42944233L, 42944233L); // Long-typed field value mixedLongs.put(2894518L, "2,894,518"); @@ -896,7 +972,7 @@ public void testCascadingParsers() throws Exception { } assertTrue(mixedLongs.isEmpty()); - Map mixedBooleans = new LinkedHashMap<>(); // preserve order + Map mixedBooleans = new LinkedHashMap<>(); // preserve order mixedBooleans.put(true, "true"); mixedBooleans.put(false, false); // Boolean-typed field value mixedBooleans.put(false, "false"); @@ -909,8 +985,8 @@ public void testCascadingParsers() throws Exception { } assertTrue(mixedBooleans.isEmpty()); - Map mixedDates = new HashMap<>(); - dateStrings = new String[] { "2020-05-13T18:47", "1989-12-14", "1682-07-22T18:33:00.000Z" }; + Map mixedDates = new HashMap<>(); + dateStrings = new String[] {"2020-05-13T18:47", "1989-12-14", "1682-07-22T18:33:00.000Z"}; for (String dateString : dateStrings) { mixedDates.put(parse(isoDateOptionalTimeFormatter, dateString), dateString); } @@ -931,21 +1007,29 @@ public void testISO8601() throws IOException { // This test tries to mimic TestExtractionDateUtil#testISO8601 String[] dateStrings = { - "0001-01-01T01:01:01Z", "+12021-12-01T03:03:03Z", - "0000-04-04T04:04:04Z", "-0005-05-05T05:05:05Z", - "-2021-12-01T04:04:04Z", "-12021-12-01T02:02:02Z" + "0001-01-01T01:01:01Z", "+12021-12-01T03:03:03Z", + "0000-04-04T04:04:04Z", "-0005-05-05T05:05:05Z", + "-2021-12-01T04:04:04Z", "-12021-12-01T02:02:02Z" }; int id = 1; // ensure strings are parsed - for(String notInFormatDateString: dateStrings) { + for (String notInFormatDateString : dateStrings) { IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("date_dt")); // should match "*_dt" dynamic field - SolrInputDocument d = processAdd("parse-date-patterns-default-config", doc(f("id", id), f("date_dt", notInFormatDateString))); + SolrInputDocument d = + processAdd( + "parse-date-patterns-default-config", + doc(f("id", id), f("date_dt", notInFormatDateString))); assertNotNull(d); - assertThat("Date string: " + notInFormatDateString + " was not parsed as a date", d.getFieldValue("date_dt"), IS_DATE); - assertEquals(notInFormatDateString, ((Date) d.getField("date_dt").getFirstValue()).toInstant().toString()); + assertThat( + "Date string: " + notInFormatDateString + " was not parsed as a date", + d.getFieldValue("date_dt"), + IS_DATE); + assertEquals( + notInFormatDateString, + ((Date) d.getField("date_dt").getFirstValue()).toInstant().toString()); assertU(commit()); assertQ(req("id:" + id), "//date[@name='date_dt'][.='" + notInFormatDateString + "']"); ++id; @@ -953,23 +1037,29 @@ public void testISO8601() throws IOException { // odd values are date strings, even values are expected strings String[] lenientDateStrings = { - "10995-12-31T23:59:59.990Z", "+10995-12-31T23:59:59.990Z", - "995-1-2T3:4:5Z", "0995-01-02T03:04:05Z", - "2021-01-01t03:04:05", "2021-01-01T03:04:05Z", - "2021-12-01 04:04:04", "2021-12-01T04:04:04Z" + "10995-12-31T23:59:59.990Z", "+10995-12-31T23:59:59.990Z", + "995-1-2T3:4:5Z", "0995-01-02T03:04:05Z", + "2021-01-01t03:04:05", "2021-01-01T03:04:05Z", + "2021-12-01 04:04:04", "2021-12-01T04:04:04Z" }; // ensure sure strings that should be parsed using lenient resolver are properly parsed - for(int i = 0; i < lenientDateStrings.length; ++i) { + for (int i = 0; i < lenientDateStrings.length; ++i) { String lenientDateString = lenientDateStrings[i]; String expectedString = lenientDateStrings[++i]; IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("date_dt")); // should match "*_dt" dynamic field - SolrInputDocument d = processAdd("parse-date-patterns-default-config", doc(f("id", id), f("date_dt", lenientDateString))); + SolrInputDocument d = + processAdd( + "parse-date-patterns-default-config", + doc(f("id", id), f("date_dt", lenientDateString))); assertNotNull(d); - assertThat("Date string: " + lenientDateString + " was not parsed as a date", - d.getFieldValue("date_dt"), IS_DATE); - assertEquals(expectedString, ((Date) d.getField("date_dt").getFirstValue()).toInstant().toString()); + assertThat( + "Date string: " + lenientDateString + " was not parsed as a date", + d.getFieldValue("date_dt"), + IS_DATE); + assertEquals( + expectedString, ((Date) d.getField("date_dt").getFirstValue()).toInstant().toString()); ++id; } } @@ -980,32 +1070,51 @@ public void testAKSTZone() throws IOException { final String inputString = "Thu Nov 13 04:35:51 AKST 2008"; // asctime + timezone1 final long expectTs = 1226583351000L; - assertEquals(expectTs, + assertEquals( + expectTs, DateTimeFormatter.ofPattern(dateFormat, Locale.ENGLISH) - .withZone(ZoneId.of("UTC")).parse(inputString, Instant::from).toEpochMilli()); + .withZone(ZoneId.of("UTC")) + .parse(inputString, Instant::from) + .toEpochMilli()); // ensure english locale and root locale return the same date - assertEquals(expectTs + "", DateTimeFormatter.ofPattern(dateFormat, Locale.ENGLISH) - .withZone(ZoneId.of("UTC")).parse(inputString, Instant::from).toEpochMilli(), + assertEquals( + expectTs + "", + DateTimeFormatter.ofPattern(dateFormat, Locale.ENGLISH) + .withZone(ZoneId.of("UTC")) + .parse(inputString, Instant::from) + .toEpochMilli(), DateTimeFormatter.ofPattern(dateFormat, Locale.ROOT) - .withZone(ZoneId.of("UTC")).parse(inputString, Instant::from).toEpochMilli()); + .withZone(ZoneId.of("UTC")) + .parse(inputString, Instant::from) + .toEpochMilli()); - assertParsedDate(inputString, Date.from(Instant.ofEpochMilli(expectTs)), "parse-date-patterns-default-config"); + assertParsedDate( + inputString, + Date.from(Instant.ofEpochMilli(expectTs)), + "parse-date-patterns-default-config"); // A bug in Java 9 (not in 8) causes this to fail! (not fixed yet?!) // see https://bugs.openjdk.java.net/browse/JDK-8189784 if (System.getProperty("java.version").startsWith("1.8.")) { // with daylight savings time timezone - assertParsedDate("Fri Oct 7 05:14:15 AKDT 2005", Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + assertParsedDate( + "Fri Oct 7 05:14:15 AKDT 2005", + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); } else { - System.err.println("Didn't test AKDT because only Java 1.8 does this right! This Java version is: " + System.getProperty("java.version")); + System.err.println( + "Didn't test AKDT because only Java 1.8 does this right! This Java version is: " + + System.getProperty("java.version")); } } public void testEDTZone() throws IOException { - //EDT is GMT-4 - assertParsedDate("Fri Oct 7 09:14:15 EDT 2005", // asctime + timezone - Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + // EDT is GMT-4 + assertParsedDate( + "Fri Oct 7 09:14:15 EDT 2005", // asctime + timezone + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); } public void testNoTime() throws IOException { @@ -1015,34 +1124,55 @@ public void testNoTime() throws IOException { } public void testRfc1123() throws IOException { - assertParsedDate("Fri, 07 Oct 2005 13:14:15 GMT", Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + assertParsedDate( + "Fri, 07 Oct 2005 13:14:15 GMT", + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); } public void testRfc1036() throws IOException { - assertParsedDate("Friday, 07-Oct-05 13:14:15 GMT", Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + assertParsedDate( + "Friday, 07-Oct-05 13:14:15 GMT", + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); } public void testAsctime() throws Exception { - assertParsedDate("Fri Oct 7 13:14:15 2005" , Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + assertParsedDate( + "Fri Oct 7 13:14:15 2005", + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); // also see testEDTZone } public void testAsctimeLeniency() throws Exception { // test double digit day - assertParsedDate("Fri Oct 07 13:14:15 2005" , Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + assertParsedDate( + "Fri Oct 07 13:14:15 2005", + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); // test 2 spaces left of a single digit day - assertParsedDate("Fri Oct 7 13:14:15 2005" , Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + assertParsedDate( + "Fri Oct 7 13:14:15 2005", + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); // longer day of week - assertParsedDate("Friday Oct 7 13:14:15 2005", Date.from(inst20051007131415()), "parse-date-patterns-default-config"); + assertParsedDate( + "Friday Oct 7 13:14:15 2005", + Date.from(inst20051007131415()), + "parse-date-patterns-default-config"); } public void testParseQuotedDate() throws IOException { // also using 2 digit day - assertParsedDate("'Fri, 14 Oct 2005 13:14:15 GMT'", - Date.from(instant(2005, 10, 14, 13, 14, 15)), "parse-date-patterns-default-config"); + assertParsedDate( + "'Fri, 14 Oct 2005 13:14:15 GMT'", + Date.from(instant(2005, 10, 14, 13, 14, 15)), + "parse-date-patterns-default-config"); } - private static Instant instant(final int year, final int month, final int day, int hour, int minute, int second) { + private static Instant instant( + final int year, final int month, final int day, int hour, int minute, int second) { return LocalDate.of(year, month, day).atTime(hour, minute, second).toInstant(ZoneOffset.UTC); } @@ -1050,19 +1180,28 @@ private Instant inst20051007131415() { return instant(2005, 10, 7, 13, 14, 15); } - private void assertParsedDate(String inputDateString, Date expectedDate, String chain) throws IOException { + private void assertParsedDate(String inputDateString, Date expectedDate, String chain) + throws IOException { IndexSchema schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("date_dt")); // should match "*_dt" dynamic field SolrInputDocument d = processAdd(chain, doc(f("id", "1"), f("date_dt", inputDateString))); assertNotNull(d); - assertThat("Date string: " + inputDateString + " was not parsed as a date", - d.getFieldValue("date_dt"), IS_DATE); + assertThat( + "Date string: " + inputDateString + " was not parsed as a date", + d.getFieldValue("date_dt"), + IS_DATE); assertEquals(expectedDate, d.getField("date_dt").getFirstValue()); } private static Date parse(DateTimeFormatter dateTimeFormatter, String dateString) { - final TemporalAccessor temporalAccessor = dateTimeFormatter.parseBest(dateString, OffsetDateTime::from, - ZonedDateTime::from, LocalDateTime::from, LocalDate::from, Instant::from); + final TemporalAccessor temporalAccessor = + dateTimeFormatter.parseBest( + dateString, + OffsetDateTime::from, + ZonedDateTime::from, + LocalDateTime::from, + LocalDate::from, + Instant::from); return temporalToDate(temporalAccessor, dateTimeFormatter.getZone()); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorTest.java index 20ce997f38f..c18a3a85ecb 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/PreAnalyzedUpdateProcessorTest.java @@ -22,23 +22,20 @@ import org.junit.Test; public class PreAnalyzedUpdateProcessorTest extends UpdateProcessorTestBase { - String[] simpleTitle = new String[] { - "not pre-analyzed", - "1 =string value=foo bar" - }; - String[] jsonTitle = new String[] { - "not pre-analyzed", - "{\"v\":\"1\",\"str\":\"string value\",\"tokens\":[{\"t\":\"foo\"},{\"t\":\"bar\"}]}", - }; - String[] simpleTeststop = new String[] { - "1 =this is a test.=one two three", - "1 =this is a test.=three four five" - }; - String[] jsonTeststop = new String[] { - "{\"v\":\"1\",\"str\":\"this is a test.\",\"tokens\":[{\"t\":\"one\"},{\"t\":\"two\"},{\"t\":\"three\"}]}", - "{\"v\":\"1\",\"str\":\"this is a test.\",\"tokens\":[{\"t\":\"three\"},{\"t\":\"four\"},{\"t\":\"five\"}]}", - }; - + String[] simpleTitle = new String[] {"not pre-analyzed", "1 =string value=foo bar"}; + String[] jsonTitle = + new String[] { + "not pre-analyzed", + "{\"v\":\"1\",\"str\":\"string value\",\"tokens\":[{\"t\":\"foo\"},{\"t\":\"bar\"}]}", + }; + String[] simpleTeststop = + new String[] {"1 =this is a test.=one two three", "1 =this is a test.=three four five"}; + String[] jsonTeststop = + new String[] { + "{\"v\":\"1\",\"str\":\"this is a test.\",\"tokens\":[{\"t\":\"one\"},{\"t\":\"two\"},{\"t\":\"three\"}]}", + "{\"v\":\"1\",\"str\":\"this is a test.\",\"tokens\":[{\"t\":\"three\"},{\"t\":\"four\"},{\"t\":\"five\"}]}", + }; + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-update-processor-chains.xml", "schema12.xml"); @@ -48,72 +45,78 @@ public static void beforeClass() throws Exception { public void testSimple() throws Exception { test("pre-analyzed-simple", simpleTitle, simpleTeststop); } - + @Test public void testJson() throws Exception { test("pre-analyzed-json", jsonTitle, jsonTeststop); } private void test(String chain, String[] title, String[] teststop) throws Exception { - SolrInputDocument doc = processAdd(chain, - doc(f("id", "1"), - f("title", title[0]), - f("teststop", teststop[0]), - f("nonexistent", "foobar"), - f("ssto", teststop[0]), - f("sind", teststop[0]))); + SolrInputDocument doc = + processAdd( + chain, + doc( + f("id", "1"), + f("title", title[0]), + f("teststop", teststop[0]), + f("nonexistent", "foobar"), + f("ssto", teststop[0]), + f("sind", teststop[0]))); assertEquals("title should be unchanged", title[0], doc.getFieldValue("title")); assertTrue("teststop should be a Field", doc.getFieldValue("teststop") instanceof Field); - Field f = (Field)doc.getFieldValue("teststop"); + Field f = (Field) doc.getFieldValue("teststop"); assertEquals("teststop should have stringValue", "this is a test.", f.stringValue()); assertNotNull("teststop should have tokensStreamValue", f.tokenStreamValue()); assertNull("nonexistent should be dropped", doc.getField("nonexistent")); // check how SchemaField type affects stored/indexed part processing - f = (Field)doc.getFieldValue("ssto"); + f = (Field) doc.getFieldValue("ssto"); assertNotNull("should have ssto", f); assertNotNull("should have stringValue", f.stringValue()); assertNull("should not have tokenStreamValue", f.tokenStreamValue()); - f = (Field)doc.getFieldValue("sind"); + f = (Field) doc.getFieldValue("sind"); assertNotNull("should have sind", f); assertNull("should not have stringValue: '" + f.stringValue() + "'", f.stringValue()); assertNotNull("should have tokenStreamValue", f.tokenStreamValue()); - - doc = processAdd(chain, - doc(f("id", "2"), - f("title", title[1]), - f("teststop", teststop[1]), - f("nonexistent", "foobar"), - f("ssto", teststop[1]), - f("sind", teststop[1]))); + + doc = + processAdd( + chain, + doc( + f("id", "2"), + f("title", title[1]), + f("teststop", teststop[1]), + f("nonexistent", "foobar"), + f("ssto", teststop[1]), + f("sind", teststop[1]))); assertTrue("title should be a Field", doc.getFieldValue("title") instanceof Field); assertTrue("teststop should be a Field", doc.getFieldValue("teststop") instanceof Field); - f = (Field)doc.getFieldValue("teststop"); + f = (Field) doc.getFieldValue("teststop"); assertEquals("teststop should have stringValue", "this is a test.", f.stringValue()); assertNotNull("teststop should have tokensStreamValue", f.tokenStreamValue()); assertNull("nonexistent should be dropped", doc.getField("nonexistent")); // check how SchemaField type affects stored/indexed part processing - f = (Field)doc.getFieldValue("ssto"); + f = (Field) doc.getFieldValue("ssto"); assertNotNull("should have ssto", f); assertNotNull("should have stringValue", f.stringValue()); assertNull("should not have tokenStreamValue", f.tokenStreamValue()); - f = (Field)doc.getFieldValue("sind"); + f = (Field) doc.getFieldValue("sind"); assertNotNull("should have sind", f); assertNull("should not have stringValue: '" + f.stringValue() + "'", f.stringValue()); assertNotNull("should have tokenStreamValue", f.tokenStreamValue()); - + assertU(commit()); - assertQ(req("teststop:\"one two three\"") - ,"//str[@name='id'][.='1']" - ,"//str[@name='teststop'][.='this is a test.']" - ); - assertQ(req("teststop:three") - ,"//*[@numFound='2']" - ,"//result/doc[1]/str[@name='id'][.='1']" - ,"//result/doc[1]/str[@name='title'][.='not pre-analyzed']" - ,"//result/doc[2]/str[@name='id'][.='2']" - ,"//result/doc[2]/arr[@name='title']/str[.='string value']" - ); + assertQ( + req("teststop:\"one two three\""), + "//str[@name='id'][.='1']", + "//str[@name='teststop'][.='this is a test.']"); + assertQ( + req("teststop:three"), + "//*[@numFound='2']", + "//result/doc[1]/str[@name='id'][.='1']", + "//result/doc[1]/str[@name='title'][.='not pre-analyzed']", + "//result/doc[2]/str[@name='id'][.='2']", + "//result/doc[2]/arr[@name='title']/str[.='string value']"); assertQ(req("ssto:three"), "//*[@numFound='0']"); assertQ(req("sind:three"), "//*[@numFound='2']"); - } + } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java b/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java index 1a6f60d193a..8803345841f 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java +++ b/solr/core/src/test/org/apache/solr/update/processor/RecordingUpdateProcessorFactory.java @@ -17,72 +17,70 @@ package org.apache.solr.update.processor; import java.io.IOException; - +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; -import org.apache.solr.update.UpdateCommand; import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.update.CommitUpdateCommand; import org.apache.solr.update.DeleteUpdateCommand; import org.apache.solr.update.MergeIndexesCommand; import org.apache.solr.update.RollbackUpdateCommand; - -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; +import org.apache.solr.update.UpdateCommand; /** - * This Factory can optionally save references to the commands it receives in - * BlockingQueues that tests can poll from to observe that the expected commands - * are executed. By default, this factory does nothing except return the "next" - * processor from the chain unless it's told to {@link #startRecording()} + * This Factory can optionally save references to the commands it receives in BlockingQueues that + * tests can poll from to observe that the expected commands are executed. By default, this factory + * does nothing except return the "next" processor from the chain unless it's told to {@link + * #startRecording()} */ -public final class RecordingUpdateProcessorFactory - extends UpdateRequestProcessorFactory { +public final class RecordingUpdateProcessorFactory extends UpdateRequestProcessorFactory { private boolean recording = false; - /** The queue containing commands that were recorded + /** + * The queue containing commands that were recorded + * * @see #startRecording */ - public final BlockingQueue commandQueue - = new LinkedBlockingQueue(); + public final BlockingQueue commandQueue = new LinkedBlockingQueue(); - /** - * @see #stopRecording + /** + * @see #stopRecording * @see #commandQueue */ public synchronized void startRecording() { recording = true; } - /** @see #startRecording */ + /** + * @see #startRecording + */ public synchronized void stopRecording() { recording = false; } @Override @SuppressWarnings("resource") - public synchronized UpdateRequestProcessor getInstance(SolrQueryRequest req, - SolrQueryResponse rsp, - UpdateRequestProcessor next ) { + public synchronized UpdateRequestProcessor getInstance( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { return recording ? new RecordingUpdateRequestProcessor(commandQueue, next) : next; } - private static final class RecordingUpdateRequestProcessor - extends UpdateRequestProcessor { + private static final class RecordingUpdateRequestProcessor extends UpdateRequestProcessor { private final BlockingQueue commandQueue; - public RecordingUpdateRequestProcessor(BlockingQueue commandQueue, - UpdateRequestProcessor next) { + public RecordingUpdateRequestProcessor( + BlockingQueue commandQueue, UpdateRequestProcessor next) { super(next); this.commandQueue = commandQueue; } private void record(UpdateCommand cmd) { - if (! commandQueue.offer(cmd) ) { - throw new RuntimeException - ("WTF: commandQueue should be unbounded but offer failed: " + cmd.toString()); + if (!commandQueue.offer(cmd)) { + throw new RuntimeException( + "WTF: commandQueue should be unbounded but offer failed: " + cmd.toString()); } } @@ -91,21 +89,25 @@ public void processAdd(AddUpdateCommand cmd) throws IOException { record(cmd); super.processAdd(cmd); } + @Override public void processDelete(DeleteUpdateCommand cmd) throws IOException { record(cmd); super.processDelete(cmd); } + @Override public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException { record(cmd); super.processMergeIndexes(cmd); } + @Override public void processCommit(CommitUpdateCommand cmd) throws IOException { record(cmd); super.processCommit(cmd); } + @Override public void processRollback(RollbackUpdateCommand cmd) throws IOException { record(cmd); @@ -113,6 +115,3 @@ public void processRollback(RollbackUpdateCommand cmd) throws IOException { } } } - - - diff --git a/solr/core/src/test/org/apache/solr/update/processor/RegexBoostProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/RegexBoostProcessorTest.java index dac0ad0b9cd..2854f62ad4e 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/RegexBoostProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/RegexBoostProcessorTest.java @@ -41,7 +41,7 @@ public static void setUpBeforeClass() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ initCore("solrconfig.xml", "schema12.xml"); SolrCore core = h.getCore(); - _parser = new SolrRequestParsers( null ); + _parser = new SolrRequestParsers(null); SolrQueryResponse resp = null; parameters = new ModifiableSolrParams(); parameters.set(RegexpBoostProcessor.BOOST_FILENAME_PARAM, "regex-boost-processor-test.txt"); @@ -52,7 +52,7 @@ public static void setUpBeforeClass() throws Exception { factory.init(parameters.toNamedList()); reProcessor = (RegexpBoostProcessor) factory.getInstance(req, resp, null); } - + @AfterClass public static void tearDownAfterClass() throws Exception { // null static members for gc @@ -72,55 +72,54 @@ public void setUp() throws Exception { public void testNoBoost() throws Exception { document.addField("id", "doc1"); document.addField("url", "http://www.nomatch.no"); - + processAdd(document); - + assertEquals(1.0d, document.getFieldValue("urlboost")); } - + @Test public void testDeboostOld() throws Exception { document.addField("id", "doc1"); document.addField("url", "http://www.somedomain.no/old/test.html"); - + processAdd(document); - + assertEquals(0.1d, document.getFieldValue("urlboost")); // Test the other deboost rule document = new SolrInputDocument(); document.addField("id", "doc1"); document.addField("url", "http://www.somedomain.no/foo/index(1).html"); - + processAdd(document); - + assertEquals(0.5d, document.getFieldValue("urlboost")); -} - + } + @Test public void testBoostGood() throws Exception { document.addField("id", "doc1"); document.addField("url", "http://www.mydomain.no/fifty-percent-boost"); - + processAdd(document); - + assertEquals(1.5d, document.getFieldValue("urlboost")); } - + @Test public void testTwoRules() throws Exception { document.addField("id", "doc1"); document.addField("url", "http://www.mydomain.no/old/test.html"); - + processAdd(document); - + assertEquals(0.15d, document.getFieldValue("urlboost")); } - + private void processAdd(SolrInputDocument doc) throws Exception { AddUpdateCommand addCommand = new AddUpdateCommand(null); addCommand.solrDoc = doc; reProcessor.processAdd(addCommand); } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java index 906e393c66c..14ef7e996e8 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/RoutedAliasUpdateProcessorTest.java @@ -17,6 +17,8 @@ package org.apache.solr.update.processor; +import static java.util.concurrent.TimeUnit.NANOSECONDS; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -29,7 +31,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.stream.Collectors; - import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; @@ -52,22 +53,22 @@ import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.core.CoreDescriptor; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.update.UpdateCommand; -import org.apache.solr.common.util.SolrNamedThreadFactory; import org.junit.Ignore; -import static java.util.concurrent.TimeUnit.NANOSECONDS; - -@org.apache.lucene.util.LuceneTestCase.AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13696") -@Ignore // don't try too run abstract base class +@org.apache.lucene.util.LuceneTestCase.AwaitsFix( + bugUrl = "https://issues.apache.org/jira/browse/SOLR-13696") +@Ignore // don't try too run abstract base class public abstract class RoutedAliasUpdateProcessorTest extends SolrCloudTestCase { private static final String intField = "integer_i"; - void waitColAndAlias(String alias, String separator, final String suffix, int slices) throws InterruptedException { + void waitColAndAlias(String alias, String separator, final String suffix, int slices) + throws InterruptedException { // collection to exist String collection = alias + separator + suffix; waitCol(slices, collection); @@ -81,15 +82,19 @@ void waitColAndAlias(String alias, String separator, final String suffix, int sl } } try { - DocCollection confirmCollection = cluster.getSolrClient().getClusterStateProvider().getClusterState().getCollectionOrNull(collection); - assertNotNull("Unable to find collection we were waiting for after done waiting",confirmCollection); + DocCollection confirmCollection = + cluster + .getSolrClient() + .getClusterStateProvider() + .getClusterState() + .getCollectionOrNull(collection); + assertNotNull( + "Unable to find collection we were waiting for after done waiting", confirmCollection); } catch (IOException e) { fail("exception getting collection we were waiting for and have supposedly created already"); } } - - private boolean haveCollection(String alias, String collection) { // separated into separate lines to make it easier to track down an NPE that occurred once // 3000 runs if it shows up again... @@ -101,58 +106,86 @@ private boolean haveCollection(String alias, String collection) { return strings.contains(collection); } - /** @see TrackingUpdateProcessorFactory */ + /** + * @see TrackingUpdateProcessorFactory + */ String getTrackUpdatesGroupName() { return getSaferTestName(); } - void createConfigSet(String configName) throws SolrServerException, IOException, InterruptedException { + void createConfigSet(String configName) + throws SolrServerException, IOException, InterruptedException { // First create a configSet // Then we create a collection with the name of the eventual config. // We configure it, and ultimately delete the collection, leaving a modified config-set behind. // Later we create the "real" collections referencing this modified config-set. - assertEquals(0, new ConfigSetAdminRequest.Create() - .setConfigSetName(configName) - .setBaseConfigSetName("_default") - .process(getSolrClient()).getStatus()); + assertEquals( + 0, + new ConfigSetAdminRequest.Create() + .setConfigSetName(configName) + .setBaseConfigSetName("_default") + .process(getSolrClient()) + .getStatus()); CollectionAdminRequest.createCollection(configName, configName, 1, 1).process(getSolrClient()); // TODO: fix SOLR-13059, a where this wait isn't working ~0.3% of the time without the sleep. - waitCol(1,configName); + waitCol(1, configName); Thread.sleep(500); // YUCK but works (beasts 2500x20 ok vs failing in ~500x20 every time) // manipulate the config... - checkNoError(getSolrClient().request(new V2Request.Builder("/collections/" + configName + "/config") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{" + - " 'set-user-property' : {'update.autoCreateFields':false}," + // no data driven - " 'add-updateprocessor' : {" + - " 'name':'tolerant', 'class':'solr.TolerantUpdateProcessorFactory'" + - " }," + - // See TrackingUpdateProcessorFactory javadocs for details... - " 'add-updateprocessor' : {" + - " 'name':'tracking-testSliceRouting', 'class':'solr.TrackingUpdateProcessorFactory', 'group':'" + getTrackUpdatesGroupName() + "'" + - " }," + - " 'add-updateprocessor' : {" + // for testing - " 'name':'inc', 'class':'" + IncrementURPFactory.class.getName() + "'," + - " 'fieldName':'" + getIntField() + "'" + - " }," + - "}").build())); + checkNoError( + getSolrClient() + .request( + new V2Request.Builder("/collections/" + configName + "/config") + .withMethod(SolrRequest.METHOD.POST) + .withPayload( + "{" + + " 'set-user-property' : {'update.autoCreateFields':false}," + + // no data driven + " 'add-updateprocessor' : {" + + " 'name':'tolerant', 'class':'solr.TolerantUpdateProcessorFactory'" + + " }," + + + // See TrackingUpdateProcessorFactory javadocs for details... + " 'add-updateprocessor' : {" + + " 'name':'tracking-testSliceRouting', 'class':'solr.TrackingUpdateProcessorFactory', 'group':'" + + getTrackUpdatesGroupName() + + "'" + + " }," + + " 'add-updateprocessor' : {" + + // for testing + " 'name':'inc', 'class':'" + + IncrementURPFactory.class.getName() + + "'," + + " 'fieldName':'" + + getIntField() + + "'" + + " }," + + "}") + .build())); // only sometimes test with "tolerant" URP: final String urpNames = "inc" + (random().nextBoolean() ? ",tolerant" : ""); - checkNoError(getSolrClient().request(new V2Request.Builder("/collections/" + configName + "/config/params") - .withMethod(SolrRequest.METHOD.POST) - .withPayload("{" + - " 'set' : {" + - " '_UPDATE' : {'processor':'" + urpNames + "'}" + - " }" + - "}").build())); + checkNoError( + getSolrClient() + .request( + new V2Request.Builder("/collections/" + configName + "/config/params") + .withMethod(SolrRequest.METHOD.POST) + .withPayload( + "{" + + " 'set' : {" + + " '_UPDATE' : {'processor':'" + + urpNames + + "'}" + + " }" + + "}") + .build())); CollectionAdminRequest.deleteCollection(configName).process(getSolrClient()); assertTrue( - new ConfigSetAdminRequest.List().process(getSolrClient()).getConfigSets() - .contains(configName) - ); + new ConfigSetAdminRequest.List() + .process(getSolrClient()) + .getConfigSets() + .contains(configName)); } String getIntField() { @@ -167,10 +200,12 @@ void checkNoError(NamedList response) { @SuppressWarnings("WeakerAccess") Set getLeaderCoreNames(ClusterState clusterState) { - Set leaders = new TreeSet<>(); // sorted just to make it easier to read when debugging... + Set leaders = + new TreeSet<>(); // sorted just to make it easier to read when debugging... List jettySolrRunners = cluster.getJettySolrRunners(); for (JettySolrRunner jettySolrRunner : jettySolrRunners) { - List coreDescriptors = jettySolrRunner.getCoreContainer().getCoreDescriptors(); + List coreDescriptors = + jettySolrRunner.getCoreContainer().getCoreDescriptors(); for (CoreDescriptor core : coreDescriptors) { String nodeName = jettySolrRunner.getNodeName(); String collectionName = core.getCollectionName(); @@ -191,12 +226,22 @@ void assertRouting(int numShards, List updateCommands) throws IOE ClusterStateProvider clusterStateProvider = cloudSolrClient.getClusterStateProvider(); clusterStateProvider.connect(); Set leaders = getLeaderCoreNames(clusterStateProvider.getClusterState()); - assertEquals("should have " + 3 * numShards + " leaders, " + numShards + " per collection", 3 * numShards, leaders.size()); + assertEquals( + "should have " + 3 * numShards + " leaders, " + numShards + " per collection", + 3 * numShards, + leaders.size()); assertEquals(3, updateCommands.size()); for (UpdateCommand updateCommand : updateCommands) { - String node = (String) updateCommand.getReq().getContext().get(TrackingUpdateProcessorFactory.REQUEST_NODE); - assertTrue("Update was not routed to a leader (" + node + " not in list of leaders" + leaders, leaders.contains(node)); + String node = + (String) + updateCommand + .getReq() + .getContext() + .get(TrackingUpdateProcessorFactory.REQUEST_NODE); + assertTrue( + "Update was not routed to a leader (" + node + " not in list of leaders" + leaders, + leaders.contains(node)); } } } @@ -215,12 +260,18 @@ protected void waitCoreCount(String collection, int count) { for (CoreDescriptor coreDescriptor : coreDescriptors) { String collectionName = coreDescriptor.getCollectionName(); if (collection.equals(collectionName)) { - coreFooCount ++; + coreFooCount++; } } } if (NANOSECONDS.toSeconds(System.nanoTime() - start) > 60) { - fail("took over 60 seconds after collection creation to update aliases:"+collection + " core count=" + coreFooCount + " was looking for " + count); + fail( + "took over 60 seconds after collection creation to update aliases:" + + collection + + " core count=" + + coreFooCount + + " was looking for " + + count); } else { try { Thread.sleep(500); @@ -229,17 +280,18 @@ protected void waitCoreCount(String collection, int count) { fail(e.getMessage()); } } - } while(coreFooCount != count); + } while (coreFooCount != count); } - public abstract String getAlias() ; - - public abstract CloudSolrClient getSolrClient() ; + public abstract String getAlias(); + public abstract CloudSolrClient getSolrClient(); @SuppressWarnings("WeakerAccess") void waitCol(int slices, String collection) { - waitForState("waiting for collections to be created", collection, + waitForState( + "waiting for collections to be created", + collection, (liveNodes, collectionState) -> { if (collectionState == null) { // per predicate javadoc, this is what we get if the collection doesn't exist at all. @@ -251,9 +303,12 @@ void waitCol(int slices, String collection) { }); } - /** Adds these documents and commits, returning when they are committed. - * We randomly go about this in different ways. */ - void addDocsAndCommit(boolean aliasOnly, SolrInputDocument... solrInputDocuments) throws Exception { + /** + * Adds these documents and commits, returning when they are committed. We randomly go about this + * in different ways. + */ + void addDocsAndCommit(boolean aliasOnly, SolrInputDocument... solrInputDocuments) + throws Exception { // we assume all docs will be added (none too old/new to cause exception) Collections.shuffle(Arrays.asList(solrInputDocuments), random()); @@ -262,7 +317,11 @@ void addDocsAndCommit(boolean aliasOnly, SolrInputDocument... solrInputDocuments List collections = new ArrayList<>(); collections.add(getAlias()); if (!aliasOnly) { - collections.addAll(new CollectionAdminRequest.ListAliases().process(getSolrClient()).getAliasesAsLists().get(getAlias())); + collections.addAll( + new CollectionAdminRequest.ListAliases() + .process(getSolrClient()) + .getAliasesAsLists() + .get(getAlias())); } int commitWithin = random().nextBoolean() ? -1 : 500; // if -1, we commit explicitly instead @@ -272,8 +331,9 @@ void addDocsAndCommit(boolean aliasOnly, SolrInputDocument... solrInputDocuments ExecutorService exec = null; try (CloudSolrClient solrClient = getCloudSolrClient(cluster)) { try { - exec = ExecutorUtil.newMDCAwareFixedThreadPool(1 + random().nextInt(2), - new SolrNamedThreadFactory(getSaferTestName())); + exec = + ExecutorUtil.newMDCAwareFixedThreadPool( + 1 + random().nextInt(2), new SolrNamedThreadFactory(getSaferTestName())); List> futures = new ArrayList<>(solrInputDocuments.length); for (SolrInputDocument solrInputDocument : solrInputDocuments) { String col = collections.get(random().nextInt(collections.size())); @@ -304,8 +364,9 @@ void addDocsAndCommit(boolean aliasOnly, SolrInputDocument... solrInputDocuments // check that it all got committed eventually String docsQ = "{!terms f=id}" - + Arrays.stream(solrInputDocuments).map(d -> d.getFieldValue("id").toString()) - .collect(Collectors.joining(",")); + + Arrays.stream(solrInputDocuments) + .map(d -> d.getFieldValue("id").toString()) + .collect(Collectors.joining(",")); int numDocs = queryNumDocs(docsQ); if (numDocs == solrInputDocuments.length) { System.err.println("Docs committed sooner than expected. Bug or slow test env?"); @@ -313,14 +374,14 @@ void addDocsAndCommit(boolean aliasOnly, SolrInputDocument... solrInputDocuments } // wait until it's committed Thread.sleep(commitWithin); - for (int idx = 0; idx < 100; ++idx) { // Loop for up to 10 seconds waiting for commit to catch up + // Loop for up to 10 seconds waiting for commit to catch up + for (int idx = 0; idx < 100; ++idx) { numDocs = queryNumDocs(docsQ); if (numDocs == solrInputDocuments.length) break; Thread.sleep(100); } - assertEquals("not committed. Bug or a slow test?", - solrInputDocuments.length, numDocs); + assertEquals("not committed. Bug or a slow test?", solrInputDocuments.length, numDocs); } } @@ -328,19 +389,22 @@ void assertUpdateResponse(UpdateResponse rsp) { // use of TolerantUpdateProcessor can cause non-thrown "errors" that we need to check for @SuppressWarnings({"rawtypes"}) List errors = (List) rsp.getResponseHeader().get("errors"); - assertTrue("Expected no errors: " + errors,errors == null || errors.isEmpty()); + assertTrue("Expected no errors: " + errors, errors == null || errors.isEmpty()); } private int queryNumDocs(String q) throws SolrServerException, IOException { - return (int) getSolrClient().query(getAlias(), params("q", q, "rows", "0")).getResults().getNumFound(); + return (int) + getSolrClient().query(getAlias(), params("q", q, "rows", "0")).getResults().getNumFound(); } /** Adds the docs to Solr via {@link #getSolrClient()} with the params */ @SuppressWarnings("SameParameterValue") - protected UpdateResponse add(String collection, Collection docs, SolrParams params) throws SolrServerException, IOException { + protected UpdateResponse add( + String collection, Collection docs, SolrParams params) + throws SolrServerException, IOException { UpdateRequest req = new UpdateRequest(); if (params != null) { - req.setParams(new ModifiableSolrParams(params));// copy because will be modified + req.setParams(new ModifiableSolrParams(params)); // copy because will be modified } req.add(docs); return req.process(getSolrClient(), collection); @@ -349,9 +413,10 @@ protected UpdateResponse add(String collection, Collection do public static class IncrementURPFactory extends FieldMutatingUpdateProcessorFactory { @Override - public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { - return FieldValueMutatingUpdateProcessor.valueMutator( getSelector(), next, - (src) -> Integer.valueOf(src.toString()) + 1); + public UpdateRequestProcessor getInstance( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { + return FieldValueMutatingUpdateProcessor.valueMutator( + getSelector(), next, (src) -> Integer.valueOf(src.toString()) + 1); } } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java index 54acd5f2b0f..398b9289ba8 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java @@ -20,7 +20,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; - import org.apache.lucene.util.Constants; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.impl.BinaryRequestWriter; @@ -39,18 +38,15 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ +/** */ public class SignatureUpdateProcessorFactoryTest extends SolrTestCaseJ4 { @BeforeClass public static void betterNotBeJ9() { - assumeFalse("FIXME: SOLR-5793: This test fails under IBM J9", - Constants.JAVA_VENDOR.startsWith("IBM")); + assumeFalse( + "FIXME: SOLR-5793: This test fails under IBM J9", Constants.JAVA_VENDOR.startsWith("IBM")); } - /** modified by tests as needed */ private String chain = "dedupe"; @@ -76,7 +72,7 @@ static void checkNumDocs(int n) { req.close(); } } - + @Test public void testDupeAllFieldsDetection() throws Exception { this.chain = "dedupe-allfields"; @@ -87,9 +83,9 @@ public void testDupeAllFieldsDetection() throws Exception { addDoc(adoc("v_t", "Hello Dude man!", "name", "name2'")); addDoc(commit()); - + checkNumDocs(3); - } + } @Test public void testDupeDetection() throws Exception { @@ -106,8 +102,7 @@ public void testDupeDetection() throws Exception { checkNumDocs(1); - addDoc(adoc("id", "3b", "v_t", "Hello Dude man!", "t_field", - "fake value galore")); + addDoc(adoc("id", "3b", "v_t", "Hello Dude man!", "t_field", "fake value galore")); addDoc(commit()); @@ -140,46 +135,46 @@ public void testMultiThreaded() throws Exception { threads = new Thread[7]; for (int i = 0; i < threads.length; i++) { - threads[i] = new Thread() { - - @Override - public void run() { - for (int i = 0; i < 30; i++) { - // h.update(adoc("id", Integer.toString(1+ i), "v_t", - // "Goodbye Dude girl!")); - try { - addDoc(adoc("id", Integer.toString(1 + i), "v_t", - "Goodbye Dude girl!")); - } catch (Exception e) { - throw new RuntimeException(e); + threads[i] = + new Thread() { + + @Override + public void run() { + for (int i = 0; i < 30; i++) { + // h.update(adoc("id", Integer.toString(1+ i), "v_t", + // "Goodbye Dude girl!")); + try { + addDoc(adoc("id", Integer.toString(1 + i), "v_t", "Goodbye Dude girl!")); + } catch (Exception e) { + throw new RuntimeException(e); + } + } } - } - } - }; + }; threads[i].setName("testThread-" + i); } threads2 = new Thread[3]; for (int i = 0; i < threads2.length; i++) { - threads2[i] = new Thread() { - - @Override - public void run() { - for (int i = 0; i < 10; i++) { - // h.update(adoc("id" , Integer.toString(1+ i + 10000), "v_t", - // "Goodbye Dude girl")); - // h.update(commit()); - try { - addDoc(adoc("id", Integer.toString(1 + i), "v_t", - "Goodbye Dude girl!")); - addDoc(commit()); - } catch (Exception e) { - throw new RuntimeException(e); + threads2[i] = + new Thread() { + + @Override + public void run() { + for (int i = 0; i < 10; i++) { + // h.update(adoc("id" , Integer.toString(1+ i + 10000), "v_t", + // "Goodbye Dude girl")); + // h.update(commit()); + try { + addDoc(adoc("id", Integer.toString(1 + i), "v_t", "Goodbye Dude girl!")); + addDoc(commit()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } } - } - } - }; + }; threads2[i].setName("testThread2-" + i); } @@ -206,15 +201,13 @@ public void run() { checkNumDocs(1); } - /** - * a non-indexed signatureField is fine as long as overwriteDupes==false - */ + /** a non-indexed signatureField is fine as long as overwriteDupes==false */ @Test public void testNonIndexedSignatureField() throws Exception { this.chain = "stored_sig"; assertNotNull(h.getCore().getUpdateProcessingChain(this.chain)); - checkNumDocs(0); + checkNumDocs(0); addDoc(adoc("id", "2a", "v_t", "Hello Dude man!", "name", "ali babi'")); addDoc(adoc("id", "2b", "v_t", "Hello Dude man!", "name", "ali babi'")); @@ -237,22 +230,21 @@ public void testFailNonIndexedSigWithOverwriteDupes() throws Exception { } catch (Exception e) { exception_ok = true; } - assertTrue("Should have gotten an exception from inform(SolrCore)", - exception_ok); + assertTrue("Should have gotten an exception from inform(SolrCore)", exception_ok); } - + @Test @SuppressWarnings({"rawtypes"}) public void testNonStringFieldsValues() throws Exception { this.chain = "dedupe-allfields"; assertNotNull(h.getCore().getUpdateProcessingChain(this.chain)); - Map params = new HashMap<>(); + Map params = new HashMap<>(); MultiMapSolrParams mmparams = new MultiMapSolrParams(params); params.put(UpdateParams.UPDATE_CHAIN, new String[] {chain}); - + UpdateRequest ureq = new UpdateRequest(); - + { SolrInputDocument doc = new SolrInputDocument(); doc.addField("v_t", "same"); @@ -283,14 +275,14 @@ public void testNonStringFieldsValues() throws Exception { } docB.addField("ints_is", ints); - for (SolrInputDocument doc : new SolrInputDocument[] { docA, docB }) { + for (SolrInputDocument doc : new SolrInputDocument[] {docA, docB}) { doc.addField("v_t", "same"); doc.addField("weight", 3.0f); ureq.add(doc); } } { - // now add another doc with the same values as A & B above, + // now add another doc with the same values as A & B above, // but diff ints_is collection (diff order) SolrInputDocument doc = new SolrInputDocument(); doc.addField("v_t", "same"); @@ -300,20 +292,20 @@ public void testNonStringFieldsValues() throws Exception { } ureq.add(doc); } - LocalSolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), mmparams); try { - req.setContentStreams(Collections.singletonList(ContentStreamBase.create(new BinaryRequestWriter(), ureq))); + req.setContentStreams( + Collections.singletonList(ContentStreamBase.create(new BinaryRequestWriter(), ureq))); UpdateRequestHandler h = new UpdateRequestHandler(); h.init(new NamedList<>()); h.handleRequestBody(req, new SolrQueryResponse()); } finally { req.close(); } - + addDoc(commit()); - + checkNumDocs(4); } @@ -322,13 +314,14 @@ private static final class UnusualList extends ArrayList { public UnusualList(int size) { super(size); } + @Override public String toString() { return "UNUSUAL:" + super.toString(); } } - private void addDoc(String doc) throws Exception { + private void addDoc(String doc) throws Exception { addDoc(doc, chain); } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java index a7c2477b600..550edc12328 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/SkipExistingDocumentsProcessorFactoryTest.java @@ -22,9 +22,8 @@ import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; -import java.io.IOException; - import com.google.common.collect.ImmutableMap; +import java.io.IOException; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; @@ -42,9 +41,10 @@ public class SkipExistingDocumentsProcessorFactoryTest { private BytesRef docId = new BytesRef(); + @SuppressWarnings({"rawtypes"}) private SolrQueryRequest defaultRequest = new LocalSolrQueryRequest(null, new NamedList()); - + @BeforeClass public static void beforeClass() { SolrTestCaseJ4.assumeWorkingMockito(); @@ -52,7 +52,7 @@ public static void beforeClass() { // Tests for logic in the factory - @Test(expected=SolrException.class) + @Test(expected = SolrException.class) public void testExceptionIfSkipInsertParamNonBoolean() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList initArgs = new NamedList<>(); @@ -60,7 +60,7 @@ public void testExceptionIfSkipInsertParamNonBoolean() { factory.init(initArgs); } - @Test(expected=SolrException.class) + @Test(expected = SolrException.class) public void testExceptionIfSkipUpdateParamNonBoolean() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList initArgs = new NamedList<>(); @@ -68,7 +68,7 @@ public void testExceptionIfSkipUpdateParamNonBoolean() { factory.init(initArgs); } - @Test(expected=SolrException.class) + @Test(expected = SolrException.class) public void testExceptionIfNextProcessorIsNull() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList initArgs = new NamedList<>(); @@ -77,7 +77,7 @@ public void testExceptionIfNextProcessorIsNull() { factory.getInstance(defaultRequest, new SolrQueryResponse(), null); } - @Test(expected=SolrException.class) + @Test(expected = SolrException.class) public void testExceptionIfNextProcessorNotDistributed() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList initArgs = new NamedList<>(); @@ -115,7 +115,8 @@ public void testSkipInsertsAndUpdatesDefaultToTrueIfNotConfigured() { factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); + SkipExistingDocumentsUpdateProcessor processor = + factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } @@ -128,7 +129,8 @@ public void testSkipInsertsFalseIfInInitArgs() { factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); + SkipExistingDocumentsUpdateProcessor processor = + factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertFalse("Expected skipInsertIfExists to be false", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } @@ -141,7 +143,8 @@ public void testSkipUpdatesFalseIfInInitArgs() { factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); + SkipExistingDocumentsUpdateProcessor processor = + factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertFalse("Expected skipUpdateIfMissing to be false", processor.isSkipUpdateIfMissing()); } @@ -155,7 +158,8 @@ public void testSkipBothFalseIfInInitArgs() { factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); + SkipExistingDocumentsUpdateProcessor processor = + factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertFalse("Expected skipInsertIfExists to be false", processor.isSkipInsertIfExists()); assertFalse("Expected skipUpdateIfMissing to be false", processor.isSkipUpdateIfMissing()); } @@ -171,7 +175,8 @@ public void testSkipInsertsFalseIfInitArgsTrueButFalseStringInRequest() { SolrQueryRequest req = new LocalSolrQueryRequest(null, requestArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(req, new SolrQueryResponse(), next); + SkipExistingDocumentsUpdateProcessor processor = + factory.getInstance(req, new SolrQueryResponse(), next); assertFalse("Expected skipInsertIfExists to be false", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } @@ -187,7 +192,8 @@ public void testSkipUpdatesFalseIfInitArgsTrueButFalseBooleanInRequest() { SolrQueryRequest req = new LocalSolrQueryRequest(null, requestArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(req, new SolrQueryResponse(), next); + SkipExistingDocumentsUpdateProcessor processor = + factory.getInstance(req, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertFalse("Expected skipUpdateIfMissing to be false", processor.isSkipUpdateIfMissing()); } @@ -204,19 +210,19 @@ public void testSkipUpdatesTrueIfInitArgsFalseButTrueStringInRequest() { SolrQueryRequest req = new LocalSolrQueryRequest(null, requestArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(req, new SolrQueryResponse(), next); + SkipExistingDocumentsUpdateProcessor processor = + factory.getInstance(req, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } - // Tests for logic in the processor @Test public void testSkippableInsertIsNotSkippedIfNotLeader() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, true)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, true)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(false).when(processor).isLeader(cmd); @@ -229,8 +235,8 @@ public void testSkippableInsertIsNotSkippedIfNotLeader() throws IOException { @Test public void testSkippableInsertIsNotSkippedIfSkipInsertsFalse() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, false)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, false)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); @@ -243,8 +249,8 @@ public void testSkippableInsertIsNotSkippedIfSkipInsertsFalse() throws IOExcepti @Test public void testSkippableInsertIsSkippedIfSkipInsertsTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, false)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, false)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); @@ -257,8 +263,8 @@ public void testSkippableInsertIsSkippedIfSkipInsertsTrue() throws IOException { @Test public void testNonSkippableInsertIsNotSkippedIfSkipInsertsTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, false)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, false)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); @@ -271,8 +277,8 @@ public void testNonSkippableInsertIsNotSkippedIfSkipInsertsTrue() throws IOExcep @Test public void testSkippableUpdateIsNotSkippedIfNotLeader() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, true)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, true)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(false).when(processor).isLeader(cmd); @@ -285,8 +291,8 @@ public void testSkippableUpdateIsNotSkippedIfNotLeader() throws IOException { @Test public void testSkippableUpdateIsNotSkippedIfSkipUpdatesFalse() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, false)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, false)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); @@ -299,8 +305,8 @@ public void testSkippableUpdateIsNotSkippedIfSkipUpdatesFalse() throws IOExcepti @Test public void testSkippableUpdateIsSkippedIfSkipUpdatesTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, true)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, true)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); @@ -313,8 +319,8 @@ public void testSkippableUpdateIsSkippedIfSkipUpdatesTrue() throws IOException { @Test public void testNonSkippableUpdateIsNotSkippedIfSkipUpdatesTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); - SkipExistingDocumentsUpdateProcessor processor - = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, true)); + SkipExistingDocumentsUpdateProcessor processor = + Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, true)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); diff --git a/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java index c8ae8b25641..878238bcd4a 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TemplateUpdateProcessorTest.java @@ -18,7 +18,6 @@ package org.apache.solr.update.processor; import java.lang.invoke.MethodHandles; - import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.client.solrj.response.QueryResponse; @@ -40,12 +39,9 @@ public class TemplateUpdateProcessorTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - @BeforeClass public static void setupCluster() throws Exception { - configureCluster(5) - .addConfig("conf1", configset("cloud-minimal")) - .configure(); + configureCluster(5).addConfig("conf1", configset("cloud-minimal")).configure(); } @After @@ -54,26 +50,25 @@ public void after() throws Exception { cluster.shutdown(); } - @org.junit.Rule - public ExpectedException expectedException = ExpectedException.none(); - + @org.junit.Rule public ExpectedException expectedException = ExpectedException.none(); public void testSimple() throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams() - .add("processor", "template") - .add("template.field", "id:{firstName}_{lastName}") - .add("template.field", "another:{lastName}_{firstName}") - .add("template.field", "missing:{lastName}_{unKnown}"); - AddUpdateCommand cmd = new AddUpdateCommand(new LocalSolrQueryRequest(null, - params + ModifiableSolrParams params = + new ModifiableSolrParams() + .add("processor", "template") + .add("template.field", "id:{firstName}_{lastName}") + .add("template.field", "another:{lastName}_{firstName}") + .add("template.field", "missing:{lastName}_{unKnown}"); + AddUpdateCommand cmd = new AddUpdateCommand(new LocalSolrQueryRequest(null, params)); - )); cmd.solrDoc = new SolrInputDocument(); cmd.solrDoc.addField("firstName", "Tom"); cmd.solrDoc.addField("lastName", "Cruise"); - new TemplateUpdateProcessorFactory().getInstance(cmd.getReq(), new SolrQueryResponse(), null).processAdd(cmd); + new TemplateUpdateProcessorFactory() + .getInstance(cmd.getReq(), new SolrQueryResponse(), null) + .processAdd(cmd); assertEquals("Tom_Cruise", cmd.solrDoc.getFieldValue("id")); assertEquals("Cruise_Tom", cmd.solrDoc.getFieldValue("another")); assertEquals("Cruise_", cmd.solrDoc.getFieldValue("missing")); @@ -81,21 +76,26 @@ public void testSimple() throws Exception { SolrInputDocument solrDoc = new SolrInputDocument(); solrDoc.addField("id", "1"); - params = new ModifiableSolrParams() - .add("processor", "template") - .add("commit", "true") - .add("template.field", "x_s:key_{id}"); + params = + new ModifiableSolrParams() + .add("processor", "template") + .add("commit", "true") + .add("template.field", "x_s:key_{id}"); params.add("commit", "true"); UpdateRequest add = new UpdateRequest().add(solrDoc); add.setParams(params); - NamedList result = cluster.getSolrClient().request(CollectionAdminRequest.createCollection("c", "conf1", 1, 1).setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE)); + NamedList result = + cluster + .getSolrClient() + .request( + CollectionAdminRequest.createCollection("c", "conf1", 1, 1) + .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE)); Utils.toJSONString(result.asMap(4)); - AbstractFullDistribZkTestBase.waitForCollection(cluster.getSolrClient().getZkStateReader(), "c",1); + AbstractFullDistribZkTestBase.waitForCollection( + cluster.getSolrClient().getZkStateReader(), "c", 1); cluster.getSolrClient().request(add, "c"); - QueryResponse rsp = cluster.getSolrClient().query("c", - new ModifiableSolrParams().add("q","id:1")); - assertEquals( "key_1", rsp.getResults().get(0).getFieldValue("x_s")); - - + QueryResponse rsp = + cluster.getSolrClient().query("c", new ModifiableSolrParams().add("q", "id:1")); + assertEquals("key_1", rsp.getResults().get(0).getFieldValue("x_s")); } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/TestCloudDeduplication.java b/solr/core/src/test/org/apache/solr/update/processor/TestCloudDeduplication.java index f945ec79434..6b2745e09f3 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TestCloudDeduplication.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TestCloudDeduplication.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import java.util.List; - +import org.apache.lucene.util.IOUtils; import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; @@ -30,33 +30,31 @@ import org.apache.solr.client.solrj.response.json.BucketBasedJsonFacet; import org.apache.solr.client.solrj.response.json.BucketJsonFacet; import org.apache.solr.cloud.SolrCloudTestCase; - -import org.apache.lucene.util.IOUtils; - import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; /** - * Tests the ability to use {@link SignatureUpdateProcessorFactory} to generate uniqueKeys for "duplicate" documents - * in cloud mode. + * Tests the ability to use {@link SignatureUpdateProcessorFactory} to generate uniqueKeys for + * "duplicate" documents in cloud mode. */ public class TestCloudDeduplication extends SolrCloudTestCase { - public final static String COLLECTION = "dedup_col"; - + public static final String COLLECTION = "dedup_col"; + /** One client per node */ private static final List NODE_CLIENTS = new ArrayList<>(7); - /** clients (including cloud client) for easy randomization and looping of collection level requests */ + /** + * clients (including cloud client) for easy randomization and looping of collection level + * requests + */ private static final List CLIENTS = new ArrayList<>(7); - + @BeforeClass public static void setupCluster() throws Exception { final int numShards = usually() ? 2 : 1; final int numReplicas = usually() ? 2 : 1; - final int numNodes = 1 + (numShards * numReplicas); // at least one node w/o any replicas - configureCluster(numNodes) - .addConfig("conf", configset("dedup")) - .configure(); + final int numNodes = 1 + (numShards * numReplicas); // at least one node w/o any replicas + configureCluster(numNodes).addConfig("conf", configset("dedup")).configure(); CLIENTS.add(cluster.getSolrClient()); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { @@ -64,16 +62,17 @@ public static void setupCluster() throws Exception { NODE_CLIENTS.add(c); CLIENTS.add(c); } - - assertEquals("failed to create collection", 0, - CollectionAdminRequest - .createCollection(COLLECTION, "conf", numShards, numReplicas) - .process(cluster.getSolrClient()).getStatus()); - cluster.waitForActiveCollection(COLLECTION, numShards, numShards * numReplicas); + assertEquals( + "failed to create collection", + 0, + CollectionAdminRequest.createCollection(COLLECTION, "conf", numShards, numReplicas) + .process(cluster.getSolrClient()) + .getStatus()); + cluster.waitForActiveCollection(COLLECTION, numShards, numShards * numReplicas); } - + @AfterClass private static void closeClients() throws Exception { try { @@ -86,16 +85,16 @@ private static void closeClients() throws Exception { @After public void clearCollection() throws Exception { - assertEquals("DBQ failed", 0, cluster.getSolrClient().deleteByQuery(COLLECTION, "*:*").getStatus()); + assertEquals( + "DBQ failed", 0, cluster.getSolrClient().deleteByQuery(COLLECTION, "*:*").getStatus()); assertEquals("commit failed", 0, cluster.getSolrClient().commit(COLLECTION).getStatus()); } - public void testRandomDocs() throws Exception { // index some random documents, using a mix-match of batches, to various SolrClients - - final int uniqueMod = atLeast(43); // the number of unique sig values expected + + final int uniqueMod = atLeast(43); // the number of unique sig values expected final int numBatches = atLeast(uniqueMod); // we'll add at least one doc per batch int docCounter = 0; for (int batchId = 0; batchId < numBatches; batchId++) { @@ -103,42 +102,44 @@ public void testRandomDocs() throws Exception { final int batchSize = atLeast(2); for (int i = 0; i < batchSize; i++) { docCounter++; - ureq.add(sdoc(// NOTE: No 'id' field, SignatureUpdateProcessor fills it in for us - "data_s", (docCounter % uniqueMod))); + ureq.add( + sdoc( // NOTE: No 'id' field, SignatureUpdateProcessor fills it in for us + "data_s", (docCounter % uniqueMod))); } assertEquals("add failed", 0, ureq.process(getRandClient(), COLLECTION).getStatus()); } assertEquals("commit failed", 0, getRandClient().commit(COLLECTION).getStatus()); - + assert docCounter > uniqueMod; - + // query our collection and confirm no duplicates on the signature field (using faceting) // Check every (node) for consistency... - final JsonQueryRequest req = new JsonQueryRequest() - .setQuery("*:*") - .setLimit(0) - .withFacet("data_facet", new TermsFacetMap("data_s").setLimit(uniqueMod + 1)); + final JsonQueryRequest req = + new JsonQueryRequest() + .setQuery("*:*") + .setLimit(0) + .withFacet("data_facet", new TermsFacetMap("data_s").setLimit(uniqueMod + 1)); for (SolrClient client : CLIENTS) { final QueryResponse rsp = req.process(client, COLLECTION); try { assertEquals(0, rsp.getStatus()); assertEquals(uniqueMod, rsp.getResults().getNumFound()); - - final BucketBasedJsonFacet facet = rsp.getJsonFacetingResponse().getBucketBasedFacets("data_facet"); + + final BucketBasedJsonFacet facet = + rsp.getJsonFacetingResponse().getBucketBasedFacets("data_facet"); assertEquals(uniqueMod, facet.getBuckets().size()); for (BucketJsonFacet bucket : facet.getBuckets()) { - assertEquals("Bucket " + bucket.getVal(), - 1, bucket.getCount()); + assertEquals("Bucket " + bucket.getVal(), 1, bucket.getCount()); } } catch (AssertionError e) { throw new AssertionError(rsp + " + " + client + " => " + e.getMessage(), e); } } } - - /** - * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed - * at a node in our cluster. + + /** + * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed at a node + * in our cluster. */ private static SolrClient getRandClient() { return CLIENTS.get(random().nextInt(CLIENTS.size())); diff --git a/solr/core/src/test/org/apache/solr/update/processor/TestDocBasedVersionConstraints.java b/solr/core/src/test/org/apache/solr/update/processor/TestDocBasedVersionConstraints.java index 5132a9b7f07..20bad34abd0 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TestDocBasedVersionConstraints.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TestDocBasedVersionConstraints.java @@ -16,30 +16,29 @@ */ package org.apache.solr.update.processor; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.nullValue; + import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.SchemaField; -import org.apache.solr.common.util.SolrNamedThreadFactory; import org.junit.Before; import org.junit.BeforeClass; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.hasItem; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; -import static org.hamcrest.CoreMatchers.nullValue; - public class TestDocBasedVersionConstraints extends SolrTestCaseJ4 { @BeforeClass @@ -53,7 +52,6 @@ public void before() throws Exception { assertU(commit()); } - public void testSimpleUpdates() throws Exception { // skip low version against committed data @@ -61,47 +59,45 @@ public void testSimpleUpdates() throws Exception { assertU(commit()); assertU(adoc("id", "aaa", "name", "a2", "my_version_l", "1002")); assertU(commit()); - assertU(adoc("id", "aaa", "name", "XX", "my_version_l", "1")); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); + assertU(adoc("id", "aaa", "name", "XX", "my_version_l", "1")); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("q","+id:aaa +name:a2"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ(req("q", "+id:aaa +name:a2"), "/response/numFound==1"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); // skip low version against uncommitted data from updateLog assertU(adoc("id", "aaa", "name", "a3", "my_version_l", "1003")); - assertU(adoc("id", "aaa", "name", "XX", "my_version_l", "7")); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a3'}}"); + assertU(adoc("id", "aaa", "name", "XX", "my_version_l", "7")); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a3'}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("q","+id:aaa +name:a3"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a3'}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ(req("q", "+id:aaa +name:a3"), "/response/numFound==1"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a3'}}"); // interleave updates to multiple docs using same versions for (long ver = 1010; ver < 1020; ver++) { for (String id : new String[] {"aaa", "bbb", "ccc", "ddd"}) { - assertU(adoc("id", id, "my_version_l", ""+ver)); + assertU(adoc("id", id, "my_version_l", "" + ver)); } } for (String id : new String[] {"aaa", "bbb", "ccc", "ddd"}) { assertU(adoc("id", id, "name", "XX", "my_version_l", "10")); - assertJQ(req("qt","/get", "id",id, "fl","my_version_l") - , "=={'doc':{'my_version_l':"+1019+"}}"); + assertJQ( + req("qt", "/get", "id", id, "fl", "my_version_l"), + "=={'doc':{'my_version_l':" + 1019 + "}}"); } assertU(commit()); - assertJQ(req("q","name:XX"), "/response/numFound==0"); + assertJQ(req("q", "name:XX"), "/response/numFound==0"); for (String id : new String[] {"aaa", "bbb", "ccc", "ddd"}) { - assertJQ(req("q","+id:"+id), "/response/numFound==1"); - assertJQ(req("q","+name:XX +id:"+id), "/response/numFound==0"); - assertJQ(req("q","+id:"+id + " +my_version_l:1019"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id",id, "fl","my_version_l") - , "=={'doc':{'my_version_l':"+1019+"}}"); + assertJQ(req("q", "+id:" + id), "/response/numFound==1"); + assertJQ(req("q", "+name:XX +id:" + id), "/response/numFound==0"); + assertJQ(req("q", "+id:" + id + " +my_version_l:1019"), "/response/numFound==1"); + assertJQ( + req("qt", "/get", "id", id, "fl", "my_version_l"), + "=={'doc':{'my_version_l':" + 1019 + "}}"); } } @@ -112,317 +108,426 @@ public void testSimpleDeletes() throws Exception { assertU(commit()); assertU(adoc("id", "aaa", "name", "a2", "my_version_l", "1002")); assertU(commit()); - deleteAndGetVersion("aaa", - params("del_version", "7")); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); + deleteAndGetVersion("aaa", params("del_version", "7")); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:a2"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:a2"), "/response/numFound==1"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); // skip low version delete against uncommitted doc from updateLog assertU(adoc("id", "aaa", "name", "a3", "my_version_l", "1003")); - deleteAndGetVersion("aaa", - params("del_version", "8")); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a3'}}"); + deleteAndGetVersion("aaa", params("del_version", "8")); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a3'}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:a3"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a3'}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:a3"), "/response/numFound==1"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a3'}}"); // skip low version add against uncommitted "delete" from updateLog deleteAndGetVersion("aaa", params("del_version", "1010")); assertU(adoc("id", "aaa", "name", "XX", "my_version_l", "22")); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}}"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}"); // skip low version add against committed "delete" // (delete was already done & committed above) assertU(adoc("id", "aaa", "name", "XX", "my_version_l", "23")); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}}"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}"); } /** - * Sanity check that there are no hardcoded assumptions about the - * field type used that could byte us in the ass. + * Sanity check that there are no hardcoded assumptions about the field type used that could byte + * us in the ass. */ public void testFloatVersionField() throws Exception { // skip low version add & low version delete against committed doc - updateJ(jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_f", "10.01")), - params("update.chain","external-version-float")); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_f", "10.01")), + params("update.chain", "external-version-float")); assertU(commit()); - updateJ(jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_f", "4.2")), - params("update.chain","external-version-float")); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_f", "4.2")), + params("update.chain", "external-version-float")); assertU(commit()); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a1'}}"); - deleteAndGetVersion("aaa", params("del_version", "7", - "update.chain","external-version-float")); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a1'}}"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a1'}}"); + deleteAndGetVersion( + "aaa", + params( + "del_version", "7", + "update.chain", "external-version-float")); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a1'}}"); assertU(commit()); - + // skip low version delete against uncommitted doc from updateLog - updateJ(jsonAdd(sdoc("id", "aaa", "name", "a2", "my_version_f", "10.02")), - params("update.chain","external-version-float")); - deleteAndGetVersion("aaa", params("del_version", "8", - "update.chain","external-version-float")); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); - assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:a2"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "a2", "my_version_f", "10.02")), + params("update.chain", "external-version-float")); + deleteAndGetVersion( + "aaa", + params( + "del_version", "8", + "update.chain", "external-version-float")); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); + assertU(commit()); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:a2"), "/response/numFound==1"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); // skip low version add against uncommitted "delete" from updateLog - deleteAndGetVersion("aaa", params("del_version", "10.10", - "update.chain","external-version-float")); - updateJ(jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_f", "10.05")), - params("update.chain","external-version-float")); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_f") - , "=={'doc':{'my_version_f':10.10}}}"); - assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_f") - , "=={'doc':{'my_version_f':10.10}}"); + deleteAndGetVersion( + "aaa", + params( + "del_version", "10.10", + "update.chain", "external-version-float")); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_f", "10.05")), + params("update.chain", "external-version-float")); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_f"), "=={'doc':{'my_version_f':10.10}}}"); + assertU(commit()); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_f"), "=={'doc':{'my_version_f':10.10}}"); // skip low version add against committed "delete" // (delete was already done & committed above) - updateJ(jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_f", "10.09")), - params("update.chain","external-version-float")); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_f") - , "=={'doc':{'my_version_f':10.10}}}"); - assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_f") - , "=={'doc':{'my_version_f':10.10}}"); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_f", "10.09")), + params("update.chain", "external-version-float")); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_f"), "=={'doc':{'my_version_f':10.10}}}"); + assertU(commit()); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_f"), "=={'doc':{'my_version_f':10.10}}"); } public void testFailOnOldVersion() throws Exception { // fail low version add & low version delete against committed doc - updateJ(jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_l", "1001")), - params("update.chain","external-version-failhard")); - assertU(commit()); - - SolrException ex = expectThrows(SolrException.class, () -> { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_l", "42")), - params("update.chain","external-version-failhard")); - }); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_l", "1001")), + params("update.chain", "external-version-failhard")); + assertU(commit()); + + SolrException ex = + expectThrows( + SolrException.class, + () -> { + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_l", "42")), + params("update.chain", "external-version-failhard")); + }); assertEquals(409, ex.code()); assertU(commit()); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a1'}}"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a1'}}"); - ex = expectThrows(SolrException.class, () -> { - deleteAndGetVersion("aaa", params("del_version", "7", - "update.chain","external-version-failhard")); - }); + ex = + expectThrows( + SolrException.class, + () -> { + deleteAndGetVersion( + "aaa", params("del_version", "7", "update.chain", "external-version-failhard")); + }); assertEquals(409, ex.code()); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a1'}}"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a1'}}"); assertU(commit()); - + // fail low version delete against uncommitted doc from updateLog - updateJ(jsonAdd(sdoc("id", "aaa", "name", "a2", "my_version_l", "1002")), - params("update.chain","external-version-failhard")); - ex = expectThrows(SolrException.class, () -> { - deleteAndGetVersion("aaa", params("del_version", "8", - "update.chain","external-version-failhard")); - }); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "a2", "my_version_l", "1002")), + params("update.chain", "external-version-failhard")); + ex = + expectThrows( + SolrException.class, + () -> { + deleteAndGetVersion( + "aaa", params("del_version", "8", "update.chain", "external-version-failhard")); + }); assertEquals(409, ex.code()); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:a2"), "/response/numFound==1"); - assertJQ(req("qt","/get", "id","aaa", "fl","name") - , "=={'doc':{'name':'a2'}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:a2"), "/response/numFound==1"); + assertJQ(req("qt", "/get", "id", "aaa", "fl", "name"), "=={'doc':{'name':'a2'}}"); // fail low version add against uncommitted "delete" from updateLog - deleteAndGetVersion("aaa", params("del_version", "1010", - "update.chain","external-version-failhard")); - ex = expectThrows(SolrException.class, () -> { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_l", "1005")), - params("update.chain","external-version-failhard")); - }); + deleteAndGetVersion( + "aaa", + params( + "del_version", "1010", + "update.chain", "external-version-failhard")); + ex = + expectThrows( + SolrException.class, + () -> { + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_l", "1005")), + params("update.chain", "external-version-failhard")); + }); assertEquals(409, ex.code()); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}}"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}"); // fail low version add against committed "delete" // (delete was already done & committed above) - ex = expectThrows(SolrException.class, () -> { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_l", "1009")), - params("update.chain","external-version-failhard")); - }); + ex = + expectThrows( + SolrException.class, + () -> { + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "XX", "my_version_l", "1009")), + params("update.chain", "external-version-failhard")); + }); assertEquals(409, ex.code()); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}}"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}}"); assertU(commit()); - assertJQ(req("q","+id:aaa"), "/response/numFound==1"); - assertJQ(req("q","+id:aaa +name:XX"), "/response/numFound==0"); - assertJQ(req("qt","/get", "id","aaa", "fl","my_version_l") - , "=={'doc':{'my_version_l':1010}}"); + assertJQ(req("q", "+id:aaa"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:XX"), "/response/numFound==0"); + assertJQ( + req("qt", "/get", "id", "aaa", "fl", "my_version_l"), "=={'doc':{'my_version_l':1010}}"); } // Test multiple versions, that it has to be greater than my_version_l and my_version_f public void testMultipleVersions() throws Exception { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_l", "1001", "my_version_f", "1.0")), - params("update.chain","external-version-failhard-multiple")); - assertU(commit()); - // All variations of additional versions should fail other than my_version_l greater or my_version_f greater. - SolrException ex = expectThrows(SolrException.class, () -> { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "X1", "my_version_l", "1000", "my_version_f", "1.0")), - params("update.chain","external-version-failhard-multiple")); - }); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_l", "1001", "my_version_f", "1.0")), + params("update.chain", "external-version-failhard-multiple")); + assertU(commit()); + // All variations of additional versions should fail other than my_version_l greater or + // my_version_f greater. + SolrException ex = + expectThrows( + SolrException.class, + () -> { + updateJ( + jsonAdd( + sdoc( + "id", + "aaa", + "name", + "X1", + "my_version_l", + "1000", + "my_version_f", + "1.0")), + params("update.chain", "external-version-failhard-multiple")); + }); assertEquals(409, ex.code()); - ex = expectThrows(SolrException.class, () -> { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "X2", "my_version_l", "1001", "my_version_f", "0.9")), - params("update.chain","external-version-failhard-multiple")); - }); + ex = + expectThrows( + SolrException.class, + () -> { + updateJ( + jsonAdd( + sdoc( + "id", + "aaa", + "name", + "X2", + "my_version_l", + "1001", + "my_version_f", + "0.9")), + params("update.chain", "external-version-failhard-multiple")); + }); assertEquals(409, ex.code()); // Also fails on the exact same version - ex = expectThrows(SolrException.class, () -> { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "X3", "my_version_l", "1001", "my_version_f", "1.0")), - params("update.chain","external-version-failhard-multiple")); - }); + ex = + expectThrows( + SolrException.class, + () -> { + updateJ( + jsonAdd( + sdoc( + "id", + "aaa", + "name", + "X3", + "my_version_l", + "1001", + "my_version_f", + "1.0")), + params("update.chain", "external-version-failhard-multiple")); + }); assertEquals(409, ex.code()); - //Verify we are still unchanged + // Verify we are still unchanged assertU(commit()); - assertJQ(req("q","+id:aaa +name:a1"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:a1"), "/response/numFound==1"); // update version 1 - updateJ(jsonAdd(sdoc("id", "aaa", "name", "Y1", "my_version_l", "2001", "my_version_f", "1.0")), - params("update.chain","external-version-failhard-multiple")); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "Y1", "my_version_l", "2001", "my_version_f", "1.0")), + params("update.chain", "external-version-failhard-multiple")); assertU(commit()); - assertJQ(req("q","+id:aaa +name:Y1"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:Y1"), "/response/numFound==1"); // update version 2 - updateJ(jsonAdd(sdoc("id", "aaa", "name", "Y2", "my_version_l", "2001", "my_version_f", "2.0")), - params("update.chain","external-version-failhard-multiple")); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "Y2", "my_version_l", "2001", "my_version_f", "2.0")), + params("update.chain", "external-version-failhard-multiple")); assertU(commit()); - assertJQ(req("q","+id:aaa +name:Y2"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:Y2"), "/response/numFound==1"); } public void testMultipleVersionDeletes() throws Exception { - updateJ(jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_l", "1001", "my_version_f", "1.0")), - params("update.chain","external-version-failhard-multiple")); - assertU(commit()); - - SolrException ex = expectThrows(SolrException.class, () -> { - deleteAndGetVersion("aaa", params("del_version", "1000", "del_version_2", "1.0", - "update.chain","external-version-failhard-multiple")); - }); + updateJ( + jsonAdd(sdoc("id", "aaa", "name", "a1", "my_version_l", "1001", "my_version_f", "1.0")), + params("update.chain", "external-version-failhard-multiple")); + assertU(commit()); + + SolrException ex = + expectThrows( + SolrException.class, + () -> { + deleteAndGetVersion( + "aaa", + params( + "del_version", + "1000", + "del_version_2", + "1.0", + "update.chain", + "external-version-failhard-multiple")); + }); assertEquals(409, ex.code()); - ex = expectThrows(SolrException.class, () -> { - deleteAndGetVersion("aaa", params("del_version", "1001", "del_version_2", "0.9", - "update.chain","external-version-failhard-multiple")); - }); + ex = + expectThrows( + SolrException.class, + () -> { + deleteAndGetVersion( + "aaa", + params( + "del_version", + "1001", + "del_version_2", + "0.9", + "update.chain", + "external-version-failhard-multiple")); + }); assertEquals(409, ex.code()); // And just verify if we pass version 1, we still error if version 2 isn't found. ignoreException("Delete by ID must specify doc version param"); - ex = expectThrows(SolrException.class, () -> { - deleteAndGetVersion("aaa", params("del_version", "1001", - "update.chain","external-version-failhard-multiple")); - }); + ex = + expectThrows( + SolrException.class, + () -> { + deleteAndGetVersion( + "aaa", + params( + "del_version", "1001", "update.chain", "external-version-failhard-multiple")); + }); assertEquals(400, ex.code()); unIgnoreException("Delete by ID must specify doc version param"); - //Verify we are still unchanged + // Verify we are still unchanged assertU(commit()); - assertJQ(req("q","+id:aaa +name:a1"), "/response/numFound==1"); + assertJQ(req("q", "+id:aaa +name:a1"), "/response/numFound==1"); - //And let's verify the actual case. - deleteAndGetVersion("aaa", params("del_version", "1001", "del_version_2", "2.0", - "update.chain","external-version-failhard-multiple")); + // And let's verify the actual case. + deleteAndGetVersion( + "aaa", + params( + "del_version", + "1001", + "del_version_2", + "2.0", + "update.chain", + "external-version-failhard-multiple")); assertU(commit()); - assertJQ(req("q","+id:aaa +name:a1"), "/response/numFound==0"); //Delete allowed + assertJQ(req("q", "+id:aaa +name:a1"), "/response/numFound==0"); // Delete allowed } - - /** - * Proof of concept test demonstrating how to manage and periodically cleanup - * the "logically" deleted documents + /** + * Proof of concept test demonstrating how to manage and periodically cleanup the "logically" + * deleted documents */ public void testManagingDeletes() throws Exception { // add some docs for (long ver = 1010; ver < 1020; ver++) { for (String id : new String[] {"aaa", "bbb", "ccc", "ddd"}) { - assertU(adoc("id", id, "name", "name_"+id, "my_version_l", ""+ver)); + assertU(adoc("id", id, "name", "name_" + id, "my_version_l", "" + ver)); } } assertU(adoc("id", "aaa", "name", "name_aaa", "my_version_l", "1030")); assertU(commit()); // sample queries - assertJQ(req("q","*:*", - "fq","live_b:true") - ,"/response/numFound==4"); - assertJQ(req("q","id:aaa", - "fq","live_b:true", - "fl","id,my_version_l") - ,"/response/numFound==1" - ,"/response/docs==[{'id':'aaa','my_version_l':1030}]}"); + assertJQ( + req( + "q", "*:*", + "fq", "live_b:true"), + "/response/numFound==4"); + assertJQ( + req( + "q", "id:aaa", + "fq", "live_b:true", + "fl", "id,my_version_l"), + "/response/numFound==1", + "/response/docs==[{'id':'aaa','my_version_l':1030}]}"); // logically delete - deleteAndGetVersion("aaa", - params("del_version", "1031")); + deleteAndGetVersion("aaa", params("del_version", "1031")); assertU(commit()); // sample queries - assertJQ(req("q","*:*", - "fq","live_b:true") - ,"/response/numFound==3"); - assertJQ(req("q","id:aaa", - "fq","live_b:true") - ,"/response/numFound==0"); + assertJQ( + req( + "q", "*:*", + "fq", "live_b:true"), + "/response/numFound==3"); + assertJQ( + req( + "q", "id:aaa", + "fq", "live_b:true"), + "/response/numFound==0"); // placeholder doc is still in the index though - assertJQ(req("q","id:aaa", - "fq","live_b:false", - "fq", "timestamp_tdt:[* TO *]", - "fl","id,live_b,my_version_l") - ,"/response/numFound==1" - ,"/response/docs==[{'id':'aaa','my_version_l':1031,'live_b':false}]}"); + assertJQ( + req( + "q", "id:aaa", + "fq", "live_b:false", + "fq", "timestamp_tdt:[* TO *]", + "fl", "id,live_b,my_version_l"), + "/response/numFound==1", + "/response/docs==[{'id':'aaa','my_version_l':1031,'live_b':false}]}"); // doc can't be re-added with a low version assertU(adoc("id", "aaa", "name", "XX", "my_version_l", "1025")); assertU(commit()); - assertJQ(req("q","id:aaa", - "fq","live_b:true") - ,"/response/numFound==0"); + assertJQ( + req( + "q", "id:aaa", + "fq", "live_b:true"), + "/response/numFound==0"); - // "dead" placeholder docs can be periodically cleaned up + // "dead" placeholder docs can be periodically cleaned up // ie: assertU(delQ("+live_b:false +timestamp_tdt:[* TO NOW/MINUTE-5MINUTE]")); // but to prevent the test from ebing time sensitive we'll just purge them all assertU(delQ("+live_b:false")); @@ -430,22 +535,25 @@ public void testManagingDeletes() throws Exception { // now doc can be re-added w/any version, no matter how low assertU(adoc("id", "aaa", "name", "aaa", "my_version_l", "7")); assertU(commit()); - assertJQ(req("q","id:aaa", - "fq","live_b:true", - "fl","id,live_b,my_version_l") - ,"/response/numFound==1" - ,"/response/docs==[{'id':'aaa','my_version_l':7,'live_b':true}]}"); - + assertJQ( + req( + "q", "id:aaa", + "fq", "live_b:true", + "fl", "id,live_b,my_version_l"), + "/response/numFound==1", + "/response/docs==[{'id':'aaa','my_version_l':7,'live_b':true}]}"); } - /** - * Constantly hammer the same doc with multiple concurrent threads and diff versions, - * confirm that the highest version wins. + /** + * Constantly hammer the same doc with multiple concurrent threads and diff versions, confirm that + * the highest version wins. */ public void testConcurrentAdds() throws Exception { final int NUM_DOCS = atLeast(50); final int MAX_CONCURENT = atLeast(10); - ExecutorService runner = ExecutorUtil.newMDCAwareFixedThreadPool(MAX_CONCURENT, new SolrNamedThreadFactory("TestDocBasedVersionConstraints")); + ExecutorService runner = + ExecutorUtil.newMDCAwareFixedThreadPool( + MAX_CONCURENT, new SolrNamedThreadFactory("TestDocBasedVersionConstraints")); // runner = Executors.newFixedThreadPool(1); // to test single threaded try { for (int id = 0; id < NUM_DOCS; id++) { @@ -455,28 +563,39 @@ public void testConcurrentAdds() throws Exception { final boolean winnerIsDeleted = (0 == TestUtil.nextInt(random(), 0, 4)); List> tasks = new ArrayList<>(numAdds); for (int variant = 0; variant < numAdds; variant++) { - final boolean iShouldWin = (variant==winner); - final long version = (iShouldWin ? winnerVersion - : TestUtil.nextInt(random(), 1, winnerVersion - 1)); + final boolean iShouldWin = (variant == winner); + final long version = + (iShouldWin ? winnerVersion : TestUtil.nextInt(random(), 1, winnerVersion - 1)); if ((iShouldWin && winnerIsDeleted) || (!iShouldWin && 0 == TestUtil.nextInt(random(), 0, 4))) { - tasks.add(delayedDelete(""+id, ""+version)); + tasks.add(delayedDelete("" + id, "" + version)); } else { - tasks.add(delayedAdd("id",""+id,"name","name"+id+"_"+variant, - "my_version_l", ""+ version)); + tasks.add( + delayedAdd( + "id", + "" + id, + "name", + "name" + id + "_" + variant, + "my_version_l", + "" + version)); } } runner.invokeAll(tasks); - final String expectedDoc = "{'id':'"+id+"','my_version_l':"+winnerVersion + - ( ! winnerIsDeleted ? ",'name':'name"+id+"_"+winner+"'}" : "}"); - - assertJQ(req("qt","/get", "id",""+id, "fl","id,name,my_version_l") - , "=={'doc':" + expectedDoc + "}"); + final String expectedDoc = + "{'id':'" + + id + + "','my_version_l':" + + winnerVersion + + (!winnerIsDeleted ? ",'name':'name" + id + "_" + winner + "'}" : "}"); + + assertJQ( + req("qt", "/get", "id", "" + id, "fl", "id,name,my_version_l"), + "=={'doc':" + expectedDoc + "}"); assertU(commit()); - assertJQ(req("q","id:"+id, - "fl","id,name,my_version_l") - ,"/response/numFound==1" - ,"/response/docs==["+expectedDoc+"]"); + assertJQ( + req("q", "id:" + id, "fl", "id,name,my_version_l"), + "/response/numFound==1", + "/response/docs==[" + expectedDoc + "]"); } } finally { ExecutorUtil.shutdownAndAwaitTermination(runner); @@ -487,87 +606,102 @@ public void testMissingVersionOnOldDocs() throws Exception { String version = "2"; // Write one doc with version, one doc without version using the "no version" chain - updateJ(json("[{\"id\": \"a\", \"name\": \"a1\", \"my_version_l\": " + version + "}]"), - params("update.chain", "no-external-version")); - updateJ(json("[{\"id\": \"b\", \"name\": \"b1\"}]"), params("update.chain", "no-external-version")); - assertU(commit()); - assertJQ(req("q","*:*"), "/response/numFound==2"); - assertJQ(req("q","id:a"), "/response/numFound==1"); - assertJQ(req("q","id:b"), "/response/numFound==1"); - - // Try updating both with a new version and using the enforced version chain, expect id=b to fail bc old - // doc is missing the version field + updateJ( + json("[{\"id\": \"a\", \"name\": \"a1\", \"my_version_l\": " + version + "}]"), + params("update.chain", "no-external-version")); + updateJ( + json("[{\"id\": \"b\", \"name\": \"b1\"}]"), params("update.chain", "no-external-version")); + assertU(commit()); + assertJQ(req("q", "*:*"), "/response/numFound==2"); + assertJQ(req("q", "id:a"), "/response/numFound==1"); + assertJQ(req("q", "id:b"), "/response/numFound==1"); + + // Try updating both with a new version and using the enforced version chain, expect id=b to + // fail bc old doc is missing the version field String newVersion = "3"; - updateJ(json("[{\"id\": \"a\", \"name\": \"a1\", \"my_version_l\": " + newVersion + "}]"), - params("update.chain", "external-version-constraint")); + updateJ( + json("[{\"id\": \"a\", \"name\": \"a1\", \"my_version_l\": " + newVersion + "}]"), + params("update.chain", "external-version-constraint")); ignoreException("Doc exists in index, but has null versionField: my_version_l"); - SolrException ex = expectThrows(SolrException.class, () -> { - updateJ(json("[{\"id\": \"b\", \"name\": \"b1\", \"my_version_l\": " + newVersion + "}]"), - params("update.chain", "external-version-constraint")); - }); + SolrException ex = + expectThrows( + SolrException.class, + () -> { + updateJ( + json("[{\"id\": \"b\", \"name\": \"b1\", \"my_version_l\": " + newVersion + "}]"), + params("update.chain", "external-version-constraint")); + }); assertEquals("Doc exists in index, but has null versionField: my_version_l", ex.getMessage()); unIgnoreException("Doc exists in index, but has null versionField: my_version_l"); assertU(commit()); - assertJQ(req("q","*:*"), "/response/numFound==2"); - assertJQ(req("qt","/get", "id", "a", "fl", "id,my_version_l"), "=={'doc':{'id':'a', 'my_version_l':3}}"); // version changed to 3 - assertJQ(req("qt","/get", "id", "b", "fl", "id,my_version_l"), "=={'doc':{'id':'b'}}"); // no version, because update failed + assertJQ(req("q", "*:*"), "/response/numFound==2"); + assertJQ( + req("qt", "/get", "id", "a", "fl", "id,my_version_l"), + "=={'doc':{'id':'a', 'my_version_l':3}}"); // version changed to 3 + assertJQ( + req("qt", "/get", "id", "b", "fl", "id,my_version_l"), + "=={'doc':{'id':'b'}}"); // no version, because update failed - // Try to update again using the external version enforcement, but allowing old docs to not have the version - // field. Expect id=a to fail because version is lower, expect id=b to succeed. + // Try to update again using the external version enforcement, but allowing old docs to not have + // the version field. Expect id=a to fail because version is lower, expect id=b to succeed. version = "1"; - updateJ(json("[{\"id\": \"a\", \"name\": \"a1\", \"my_version_l\": " + version + "}]"), - params("update.chain", "external-version-support-missing")); + updateJ( + json("[{\"id\": \"a\", \"name\": \"a1\", \"my_version_l\": " + version + "}]"), + params("update.chain", "external-version-support-missing")); System.out.println("send b"); - updateJ(json("[{\"id\": \"b\", \"name\": \"b1\", \"my_version_l\": " + version + "}]"), - params("update.chain", "external-version-support-missing")); - assertU(commit()); - assertJQ(req("q","*:*"), "/response/numFound==2"); - assertJQ(req("qt","/get", "id", "a", "fl", "id,my_version_l"), "=={'doc':{'id':'a', 'my_version_l':3}}"); - assertJQ(req("qt","/get", "id", "b", "fl", "id,my_version_l"), "=={'doc':{'id':'b', 'my_version_l':1}}"); + updateJ( + json("[{\"id\": \"b\", \"name\": \"b1\", \"my_version_l\": " + version + "}]"), + params("update.chain", "external-version-support-missing")); + assertU(commit()); + assertJQ(req("q", "*:*"), "/response/numFound==2"); + assertJQ( + req("qt", "/get", "id", "a", "fl", "id,my_version_l"), + "=={'doc':{'id':'a', 'my_version_l':3}}"); + assertJQ( + req("qt", "/get", "id", "b", "fl", "id,my_version_l"), + "=={'doc':{'id':'b', 'my_version_l':1}}"); } - + public void testTombstoneConfig() throws Exception { - assertJQ(req("q","*:*"),"/response/numFound==0"); - updateWithChain("tombstone-config", - "id", "b!doc1", - "my_version_l", "1"); - assertU(commit()); - assertJQ(req("q","*:*"),"/response/numFound==1"); - assertJQ(req("q","foo_b:true"),"/response/numFound==0"); - assertJQ(req("q","foo_i:1"),"/response/numFound==0"); - assertJQ(req("q","foo_l:1"),"/response/numFound==0"); - assertJQ(req("q","foo_f:1.5"),"/response/numFound==0"); - assertJQ(req("q","foo_s:bar"),"/response/numFound==0"); - assertJQ(req("q","foo_ss:bar1"),"/response/numFound==0"); - assertJQ(req("q","foo_ss:bar2"),"/response/numFound==0"); - - deleteAndGetVersion("b!doc1", - params("del_version", "2", "update.chain", - "tombstone-config")); - assertU(commit()); - - assertJQ(req("q","foo_b:true"),"/response/numFound==1"); - assertJQ(req("q","foo_i:1"),"/response/numFound==1"); - assertJQ(req("q","foo_l:1"),"/response/numFound==1"); - assertJQ(req("q","foo_f:1.5"),"/response/numFound==1"); - assertJQ(req("q","foo_s:bar"),"/response/numFound==1"); - assertJQ(req("q","foo_ss:bar1"),"/response/numFound==1"); - assertJQ(req("q","foo_ss:bar2"),"/response/numFound==1"); + assertJQ(req("q", "*:*"), "/response/numFound==0"); + updateWithChain("tombstone-config", "id", "b!doc1", "my_version_l", "1"); + assertU(commit()); + assertJQ(req("q", "*:*"), "/response/numFound==1"); + assertJQ(req("q", "foo_b:true"), "/response/numFound==0"); + assertJQ(req("q", "foo_i:1"), "/response/numFound==0"); + assertJQ(req("q", "foo_l:1"), "/response/numFound==0"); + assertJQ(req("q", "foo_f:1.5"), "/response/numFound==0"); + assertJQ(req("q", "foo_s:bar"), "/response/numFound==0"); + assertJQ(req("q", "foo_ss:bar1"), "/response/numFound==0"); + assertJQ(req("q", "foo_ss:bar2"), "/response/numFound==0"); + + deleteAndGetVersion("b!doc1", params("del_version", "2", "update.chain", "tombstone-config")); + assertU(commit()); + + assertJQ(req("q", "foo_b:true"), "/response/numFound==1"); + assertJQ(req("q", "foo_i:1"), "/response/numFound==1"); + assertJQ(req("q", "foo_l:1"), "/response/numFound==1"); + assertJQ(req("q", "foo_f:1.5"), "/response/numFound==1"); + assertJQ(req("q", "foo_s:bar"), "/response/numFound==1"); + assertJQ(req("q", "foo_ss:bar1"), "/response/numFound==1"); + assertJQ(req("q", "foo_ss:bar2"), "/response/numFound==1"); } - + public void testCanCreateTombstonesBasic() { - DocBasedVersionConstraintsProcessorFactory factory = new DocBasedVersionConstraintsProcessorFactory(); + DocBasedVersionConstraintsProcessorFactory factory = + new DocBasedVersionConstraintsProcessorFactory(); NamedList config = new NamedList<>(); config.add("versionField", "_version_"); factory.init(config); IndexSchema schema = h.getCore().getLatestSchema(); assertThat(factory.canCreateTombstoneDocument(schema), is(true)); } - + public void testCanCreateTombstonesMissingRequiredField() { - DocBasedVersionConstraintsProcessorFactory factory = new DocBasedVersionConstraintsProcessorFactory(); + DocBasedVersionConstraintsProcessorFactory factory = + new DocBasedVersionConstraintsProcessorFactory(); NamedList config = new NamedList<>(); config.add("versionField", "_version_"); factory.init(config); @@ -582,9 +716,10 @@ public void testCanCreateTombstonesMissingRequiredField() { schema.getRequiredFields().remove(sf); } } - + public void testCanCreateTombstonesRequiredFieldWithDefault() { - DocBasedVersionConstraintsProcessorFactory factory = new DocBasedVersionConstraintsProcessorFactory(); + DocBasedVersionConstraintsProcessorFactory factory = + new DocBasedVersionConstraintsProcessorFactory(); NamedList config = new NamedList<>(); config.add("versionField", "_version_"); factory.init(config); @@ -598,9 +733,10 @@ public void testCanCreateTombstonesRequiredFieldWithDefault() { schema.getRequiredFields().remove(sf2); } } - + public void testCanCreateTombstonesRequiredFieldInTombstoneConfig() { - DocBasedVersionConstraintsProcessorFactory factory = new DocBasedVersionConstraintsProcessorFactory(); + DocBasedVersionConstraintsProcessorFactory factory = + new DocBasedVersionConstraintsProcessorFactory(); NamedList config = new NamedList<>(); config.add("versionField", "_version_"); NamedList tombstoneConfig = new NamedList<>(); @@ -618,9 +754,10 @@ public void testCanCreateTombstonesRequiredFieldInTombstoneConfig() { schema.getRequiredFields().remove(sf); } } - + public void testCanCreateTombstonesVersionFieldRequired() { - DocBasedVersionConstraintsProcessorFactory factory = new DocBasedVersionConstraintsProcessorFactory(); + DocBasedVersionConstraintsProcessorFactory factory = + new DocBasedVersionConstraintsProcessorFactory(); NamedList config = new NamedList<>(); config.add("versionField", "_version_"); factory.init(config); @@ -635,9 +772,10 @@ public void testCanCreateTombstonesVersionFieldRequired() { schema.getRequiredFields().remove(versionField); } } - + public void testCanCreateTombstonesUniqueKeyFieldRequired() { - DocBasedVersionConstraintsProcessorFactory factory = new DocBasedVersionConstraintsProcessorFactory(); + DocBasedVersionConstraintsProcessorFactory factory = + new DocBasedVersionConstraintsProcessorFactory(); NamedList config = new NamedList<>(); config.add("versionField", "_version_"); factory.init(config); @@ -648,29 +786,31 @@ public void testCanCreateTombstonesUniqueKeyFieldRequired() { assertThat(schema.getRequiredFields(), hasItem(schema.getUniqueKeyField())); assertThat(factory.canCreateTombstoneDocument(schema), is(true)); } - - private void updateWithChain(String chain, String...fields) throws Exception { + + private void updateWithChain(String chain, String... fields) throws Exception { assert fields.length % 2 == 0; SolrInputDocument doc = new SolrInputDocument(fields); updateJ(jsonAdd(doc), params("update.chain", chain)); } - + private Callable delayedAdd(final String... fields) { - return Executors.callable(() -> { - // log.info("ADDING DOC: " + adoc(fields)); - assertU(adoc(fields)); - }); + return Executors.callable( + () -> { + // log.info("ADDING DOC: " + adoc(fields)); + assertU(adoc(fields)); + }); } + private Callable delayedDelete(final String id, final String externalVersion) { - return Executors.callable(() -> { - try { - // Why does this throw "Exception" ??? - // log.info("DELETING DOC: " + id + " v="+externalVersion); - deleteAndGetVersion(id, params("del_version", externalVersion)); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return Executors.callable( + () -> { + try { + // Why does this throw "Exception" ??? + // log.info("DELETING DOC: " + id + " v="+externalVersion); + deleteAndGetVersion(id, params("del_version", externalVersion)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/TestPartialUpdateDeduplication.java b/solr/core/src/test/org/apache/solr/update/processor/TestPartialUpdateDeduplication.java index 8925da5f6de..500f2e33579 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TestPartialUpdateDeduplication.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TestPartialUpdateDeduplication.java @@ -17,14 +17,13 @@ package org.apache.solr.update.processor; import com.google.common.collect.Maps; +import java.util.Map; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.SolrInputDocument; import org.junit.BeforeClass; import org.junit.Test; -import java.util.Map; - public class TestPartialUpdateDeduplication extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { @@ -49,7 +48,8 @@ public void testPartialUpdates() throws Exception { } catch (Exception e) { exception_ok = true; } - assertTrue("Should have gotten an exception with partial update on signature generating field", + assertTrue( + "Should have gotten an exception with partial update on signature generating field", exception_ok); SignatureUpdateProcessorFactoryTest.checkNumDocs(0); diff --git a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java index 7ce4f249f67..3abc1b86389 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java @@ -17,6 +17,12 @@ package org.apache.solr.update.processor; +import static org.apache.solr.client.solrj.RoutedAliasTypes.TIME; +import static org.apache.solr.cloud.api.collections.RoutedAlias.ROUTED_ALIAS_NAME_CORE_PROP; +import static org.apache.solr.cloud.api.collections.TimeRoutedAlias.ROUTER_START; +import static org.apache.solr.common.cloud.ZkStateReader.COLLECTIONS_ZKNODE; +import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROPS_ZKNODE; + import java.io.IOException; import java.lang.invoke.MethodHandles; import java.time.Instant; @@ -34,7 +40,6 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; - import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.BaseHttpClusterStateProvider; @@ -68,12 +73,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.client.solrj.RoutedAliasTypes.TIME; -import static org.apache.solr.cloud.api.collections.RoutedAlias.ROUTED_ALIAS_NAME_CORE_PROP; -import static org.apache.solr.cloud.api.collections.TimeRoutedAlias.ROUTER_START; -import static org.apache.solr.common.cloud.ZkStateReader.COLLECTIONS_ZKNODE; -import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROPS_ZKNODE; - @LuceneTestCase.BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13059") public class TimeRoutedAliasUpdateProcessorTest extends RoutedAliasUpdateProcessorTest { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -83,7 +82,7 @@ public class TimeRoutedAliasUpdateProcessorTest extends RoutedAliasUpdateProcess private static final String timeField = "timestamp_dt"; public static final String TRA = TIME.getSeparatorPrefix(); - private CloudSolrClient solrClient; + private CloudSolrClient solrClient; private int lastDocId = 0; private int numDocsDeletedOrFailed = 0; @@ -92,7 +91,7 @@ public class TimeRoutedAliasUpdateProcessorTest extends RoutedAliasUpdateProcess public void doBefore() throws Exception { configureCluster(4).configure(); solrClient = getCloudSolrClient(cluster); - //log this to help debug potential causes of problems + // log this to help debug potential causes of problems if (log.isInfoEnabled()) { log.info("SolrClient: {}", solrClient); log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn @@ -110,13 +109,12 @@ public void doAfter() throws Exception { @Slow @Test @LogLevel("org.apache.solr.update.processor.TimeRoutedAlias=DEBUG;org.apache.solr.cloud=DEBUG") - // commented out on: 17-Feb-2019 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 14-Oct-2018 public void test() throws Exception { String configName = getSaferTestName(); createConfigSet(configName); - // Start with one collection manually created (and use higher numShards & replicas than we'll use for others) - // This tests we may pre-create the collection and it's acceptable. + // Start with one collection manually created (and use higher numShards & replicas than we'll + // use for others) This tests we may pre-create the collection and it's acceptable. final String col23rd = alias + TRA + "2017-10-23"; CollectionAdminRequest.createCollection(col23rd, configName, 2, 2) .withProperty(ROUTED_ALIAS_NAME_CORE_PROP, alias) @@ -124,22 +122,31 @@ public void test() throws Exception { cluster.waitForActiveCollection(col23rd, 2, 4); - List retrievedConfigSetNames = new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); + List retrievedConfigSetNames = + new ConfigSetAdminRequest.List().process(solrClient).getConfigSets(); List expectedConfigSetNames = Arrays.asList("_default", configName); - // config sets leak between tests so we can't be any more specific than this on the next 2 asserts - assertTrue("We expect at least 2 configSets", + // config sets leak between tests so we can't be any more specific than this on the next 2 + // asserts + assertTrue( + "We expect at least 2 configSets", retrievedConfigSetNames.size() >= expectedConfigSetNames.size()); - assertTrue("ConfigNames should include :" + expectedConfigSetNames, retrievedConfigSetNames.containsAll(expectedConfigSetNames)); - - CollectionAdminRequest.createTimeRoutedAlias(alias, "2017-10-23T00:00:00Z", "+1DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) + assertTrue( + "ConfigNames should include :" + expectedConfigSetNames, + retrievedConfigSetNames.containsAll(expectedConfigSetNames)); + + CollectionAdminRequest.createTimeRoutedAlias( + alias, + "2017-10-23T00:00:00Z", + "+1DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, 1, 1)) .process(solrClient); // now we index a document assertUpdateResponse(solrClient.add(alias, newDoc(Instant.parse("2017-10-23T00:00:00Z")))); solrClient.commit(alias); - //assertDocRoutedToCol(lastDocId, col23rd); + // assertDocRoutedToCol(lastDocId, col23rd); assertInvariants(col23rd); // a document that is too old @@ -148,28 +155,31 @@ public void test() throws Exception { // a document which is too far into the future testFailedDocument(Instant.now().plus(30, ChronoUnit.MINUTES), "too far in the future"); - // add another collection with the precise name we expect, but don't add to alias explicitly. When we add a document - // destined for this collection, Solr will see it already exists and add it to the alias. + // add another collection with the precise name we expect, but don't add to alias explicitly. + // When we add a document destined for this collection, Solr will see it already exists and add + // it to the alias. final String col24th = alias + TRA + "2017-10-24"; - CollectionAdminRequest.createCollection(col24th, configName, 1, 1) // more shards and replicas now + CollectionAdminRequest.createCollection( + col24th, configName, 1, 1) // more shards and replicas now .withProperty(ROUTED_ALIAS_NAME_CORE_PROP, alias) .process(solrClient); // index 3 documents in a random fashion - addDocsAndCommit(false, // send these to alias & collections + addDocsAndCommit( + false, // send these to alias & collections newDoc(Instant.parse("2017-10-23T00:00:00Z")), newDoc(Instant.parse("2017-10-24T01:00:00Z")), - newDoc(Instant.parse("2017-10-24T02:00:00Z")) - ); -// System.out.println(cluster.getRandomJetty(random()).getBaseUrl()); -// Thread.sleep(1000000); + newDoc(Instant.parse("2017-10-24T02:00:00Z"))); + // System.out.println(cluster.getRandomJetty(random()).getBaseUrl()); + // Thread.sleep(1000000); assertInvariants(col24th, col23rd); // assert that the IncrementURP has updated all '0' to '1' - final SolrDocumentList checkIncResults = solrClient.query(alias, params("q", "NOT " + getIntField() + ":1")).getResults(); + final SolrDocumentList checkIncResults = + solrClient.query(alias, params("q", "NOT " + getIntField() + ":1")).getResults(); assertEquals(checkIncResults.toString(), 0, checkIncResults.getNumFound()); - //delete a random document id; ensure we don't find it + // delete a random document id; ensure we don't find it int idToDelete = 1 + random().nextInt(lastDocId); if (idToDelete == 2 || idToDelete == 3) { // these didn't make it idToDelete = 4; @@ -181,7 +191,8 @@ public void test() throws Exception { // delete the Oct23rd (save memory)... // make sure we track that we are effectively deleting docs there - numDocsDeletedOrFailed += solrClient.query(col23rd, params("q", "*:*", "rows", "0")).getResults().getNumFound(); + numDocsDeletedOrFailed += + solrClient.query(col23rd, params("q", "*:*", "rows", "0")).getResults().getNumFound(); // remove from the alias CollectionAdminRequest.createAlias(alias, col24th).process(solrClient); // delete the collection @@ -189,45 +200,60 @@ public void test() throws Exception { // now we're going to add documents that will trigger more collections to be created // for 25th & 26th - addDocsAndCommit(false, // send these to alias & collections + addDocsAndCommit( + false, // send these to alias & collections newDoc(Instant.parse("2017-10-24T03:00:00Z")), newDoc(Instant.parse("2017-10-25T04:00:00Z")), newDoc(Instant.parse("2017-10-26T05:00:00Z")), - newDoc(Instant.parse("2017-10-26T06:00:00Z")) - ); + newDoc(Instant.parse("2017-10-26T06:00:00Z"))); assertInvariants(alias + TRA + "2017-10-26", alias + TRA + "2017-10-25", col24th); - // verify that collection properties are set when the collections are created. Note: first 2 collections in - // this test have a core property instead, of a collection property but that MUST continue to work as well - // for back compatibility's reasons. + // verify that collection properties are set when the collections are created. Note: first 2 + // collections in this test have a core property instead, of a collection property but that MUST + // continue to work as well for back compatibility's reasons. Thread.sleep(1000); - byte[] data = cluster.getZkClient() - .getData(COLLECTIONS_ZKNODE + "/" + alias + TRA + "2017-10-26" + "/" + COLLECTION_PROPS_ZKNODE,null, null, true); + byte[] data = + cluster + .getZkClient() + .getData( + COLLECTIONS_ZKNODE + + "/" + + alias + + TRA + + "2017-10-26" + + "/" + + COLLECTION_PROPS_ZKNODE, + null, + null, + true); assertNotNull(data); assertTrue(data.length > 0); @SuppressWarnings("unchecked") - Map props = (Map) Utils.fromJSON(data); + Map props = (Map) Utils.fromJSON(data); assertTrue(props.containsKey(ROUTED_ALIAS_NAME_CORE_PROP)); - assertEquals(alias,props.get(ROUTED_ALIAS_NAME_CORE_PROP)); + assertEquals(alias, props.get(ROUTED_ALIAS_NAME_CORE_PROP)); // update metadata to auto-delete oldest collections CollectionAdminRequest.setAliasProperty(alias) - .addProperty(TimeRoutedAlias.ROUTER_AUTO_DELETE_AGE, "-1DAY") // thus usually keep 2 collections of a day size + .addProperty( + TimeRoutedAlias.ROUTER_AUTO_DELETE_AGE, + "-1DAY") // thus usually keep 2 collections of a day size .process(solrClient); // add more docs, creating one new collection, but trigger ones prior to - int numDocsToBeAutoDeleted = queryNumDocs(getTimeField() +":[* TO \"2017-10-26T00:00:00Z\"}"); - addDocsAndCommit(true, // send these to alias only + int numDocsToBeAutoDeleted = queryNumDocs(getTimeField() + ":[* TO \"2017-10-26T00:00:00Z\"}"); + addDocsAndCommit( + true, // send these to alias only newDoc(Instant.parse("2017-10-26T07:00:00Z")), // existing newDoc(Instant.parse("2017-10-27T08:00:00Z")) // new - ); + ); numDocsDeletedOrFailed += numDocsToBeAutoDeleted; assertInvariants(alias + TRA + "2017-10-27", alias + TRA + "2017-10-26"); } /** - * Test that the Update Processor Factory routes documents to leader shards and thus - * avoids the possibility of introducing an extra hop to find the leader. + * Test that the Update Processor Factory routes documents to leader shards and thus avoids the + * possibility of introducing an extra hop to find the leader. * * @throws Exception when it blows up unexpectedly :) */ @@ -242,12 +268,18 @@ public void testSliceRouting() throws Exception { // 4 of which are leaders, and 8 of which should fail this test. final int numShards = 1 + random().nextInt(4); final int numReplicas = 1 + random().nextInt(3); - CollectionAdminRequest.createTimeRoutedAlias(alias, "2017-10-23T00:00:00Z", "+1DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) + CollectionAdminRequest.createTimeRoutedAlias( + alias, + "2017-10-23T00:00:00Z", + "+1DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) .process(solrClient); // cause some collections to be created - assertUpdateResponse(solrClient.add(alias, new SolrInputDocument("id","1","timestamp_dt", "2017-10-25T00:00:00Z"))); + assertUpdateResponse( + solrClient.add( + alias, new SolrInputDocument("id", "1", "timestamp_dt", "2017-10-25T00:00:00Z"))); assertUpdateResponse(solrClient.commit(alias)); // wait for all the collections to exist... @@ -255,10 +287,11 @@ public void testSliceRouting() throws Exception { waitColAndAlias(alias, TRA, "2017-10-24", numShards); waitColAndAlias(alias, TRA, "2017-10-25", numShards); - // at this point we now have 3 collections with 4 shards each, and 3 replicas per shard for a total of - // 36 total replicas, 1/3 of which are leaders. We will add 3 docs and each has a 33% chance of hitting a - // leader randomly and not causing a failure if the code is broken, but as a whole this test will therefore only have - // about a 3.6% false positive rate (0.33^3). If that's not good enough, add more docs or more replicas per shard :). + // at this point we now have 3 collections with 4 shards each, and 3 replicas per shard for a + // total of 36 total replicas, 1/3 of which are leaders. We will add 3 docs and each has a 33% + // chance of hitting a leader randomly and not causing a failure if the code is broken, but as a + // whole this test will therefore only have about a 3.6% false positive rate (0.33^3). If that's + // not good enough, add more docs or more replicas per shard :). final String trackGroupName = getTrackUpdatesGroupName(); final List updateCommands; @@ -268,11 +301,14 @@ public void testSliceRouting() throws Exception { // cause some collections to be created ModifiableSolrParams params = params("post-processor", "tracking-" + trackGroupName); - assertUpdateResponse(add(alias, Arrays.asList( - sdoc("id", "2", "timestamp_dt", "2017-10-24T00:00:00Z"), - sdoc("id", "3", "timestamp_dt", "2017-10-25T00:00:00Z"), - sdoc("id", "4", "timestamp_dt", "2017-10-23T00:00:00Z")), - params)); + assertUpdateResponse( + add( + alias, + Arrays.asList( + sdoc("id", "2", "timestamp_dt", "2017-10-24T00:00:00Z"), + sdoc("id", "3", "timestamp_dt", "2017-10-25T00:00:00Z"), + sdoc("id", "4", "timestamp_dt", "2017-10-23T00:00:00Z")), + params)); } finally { updateCommands = TrackingUpdateProcessorFactory.stopRecording(trackGroupName); } @@ -286,16 +322,25 @@ public void testPreemptiveCreation() throws Exception { String configName = getSaferTestName(); createConfigSet(configName); - final int numShards = 1 ; - final int numReplicas = 1 ; - CollectionAdminRequest.createTimeRoutedAlias(alias, "2017-10-23T00:00:00Z", "+1DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) + final int numShards = 1; + final int numReplicas = 1; + CollectionAdminRequest.createTimeRoutedAlias( + alias, + "2017-10-23T00:00:00Z", + "+1DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) .setPreemptiveCreateWindow("3HOUR") .process(solrClient); - // needed to verify that preemptive creation in one alias doesn't inhibit preemptive creation in another - CollectionAdminRequest.createTimeRoutedAlias(alias2, "2017-10-23T00:00:00Z", "+1DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) + // needed to verify that preemptive creation in one alias doesn't inhibit preemptive creation in + // another + CollectionAdminRequest.createTimeRoutedAlias( + alias2, + "2017-10-23T00:00:00Z", + "+1DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) .setPreemptiveCreateWindow("3HOUR") .process(solrClient); @@ -307,34 +352,38 @@ public void testPreemptiveCreation() throws Exception { // Using threads to ensure that two TRA's are simultaneously preemptively creating and don't // interfere with each other - ExecutorService executorService = ExecutorUtil.newMDCAwareCachedThreadPool("TimeRoutedAliasProcessorTestx-testPreemptiveCreation"); + ExecutorService executorService = + ExecutorUtil.newMDCAwareCachedThreadPool( + "TimeRoutedAliasProcessorTestx-testPreemptiveCreation"); Exception[] threadExceptions = new Exception[2]; boolean[] threadStarted = new boolean[2]; boolean[] threadFinished = new boolean[2]; try { CountDownLatch starter = new CountDownLatch(1); - executorService.submit(() -> { - threadStarted[0] = true; - try { - starter.await(); - concurrentUpdates(params, alias); - } catch (Exception e) { - threadExceptions[0] = e; - } - threadFinished[0] = true; - }); - - executorService.submit(() -> { - threadStarted[1] = true; - try { - starter.await(); - concurrentUpdates(params, alias2); - } catch (Exception e) { - threadExceptions[1] = e; - } - threadFinished[1] = true; - }); + executorService.submit( + () -> { + threadStarted[0] = true; + try { + starter.await(); + concurrentUpdates(params, alias); + } catch (Exception e) { + threadExceptions[0] = e; + } + threadFinished[0] = true; + }); + + executorService.submit( + () -> { + threadStarted[1] = true; + try { + starter.await(); + concurrentUpdates(params, alias2); + } catch (Exception e) { + threadExceptions[1] = e; + } + threadFinished[1] = true; + }); starter.countDown(); } finally { ExecutorUtil.shutdownAndAwaitTermination(executorService); @@ -358,37 +407,50 @@ public void testPreemptiveCreation() throws Exception { waitColAndAlias(alias, TRA, "2017-10-26", numShards); waitColAndAlias(alias2, TRA, "2017-10-26", numShards); - // these next checks will be checking that a collection DID NOT get created asynchronously, there's - // no way to wait for something that should never exist to not exist... so all we can do is sleep - // a good while before checking + // these next checks will be checking that a collection DID NOT get created asynchronously, + // there's no way to wait for something that should never exist to not exist... so all we can do + // is sleep a good while before checking Thread.sleep(5000); // after this we can ignore alias2 checkPreemptiveCase1(alias); checkPreemptiveCase1(alias2); - // Some designs contemplated with close hooks were not properly restricted to the core and would have - // failed after other cores with other TRAs were stopped. Make sure that we don't fall into that trap in - // the future. The basic problem with a close hook solution is that one either winds up putting the - // executor on the TRAUP where it's duplicated/initiated for every request, or putting it at the class level - // in which case the hook will remove it for all TRA's which can pass a single TRA test nicely but is not safe - // where multiple TRA's might come and go. + // Some designs contemplated with close hooks were not properly restricted to the core and would + // have failed after other cores with other TRAs were stopped. Make sure that we don't fall into + // that trap in the future. The basic problem with a close hook solution is that one either + // winds up puttingthe executor on the TRAUP where it's duplicated/initiated for every request, + // or putting it at the class level in which case the hook will remove it for all TRA's which + // can pass a single TRA test nicely but is not safe where multiple TRA's might come and go. // // Start and stop some cores that have TRA's... 2x2 used to ensure every jetty gets at least one - CollectionAdminRequest.createTimeRoutedAlias(getSaferTestName() + "foo", "2017-10-23T00:00:00Z", "+1DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", configName, 2, 2)) + CollectionAdminRequest.createTimeRoutedAlias( + getSaferTestName() + "foo", + "2017-10-23T00:00:00Z", + "+1DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, 2, 2)) .setPreemptiveCreateWindow("3HOUR") .process(solrClient); - waitColAndAlias(getSaferTestName() + "foo", TRA, "2017-10-23",2); - waitCoreCount(getSaferTestName() + "foo" + TRA + "2017-10-23", 4); // prove this works, for confidence in deletion checking below. - assertUpdateResponse(solrClient.add(getSaferTestName() + "foo", - sdoc("id","1","timestamp_dt", "2017-10-23T00:00:00Z") // no extra collections should be created - )); + waitColAndAlias(getSaferTestName() + "foo", TRA, "2017-10-23", 2); + waitCoreCount( + getSaferTestName() + "foo" + TRA + "2017-10-23", + 4); // prove this works, for confidence in deletion checking below. + assertUpdateResponse( + solrClient.add( + getSaferTestName() + "foo", + sdoc( + "id", + "1", + "timestamp_dt", + "2017-10-23T00:00:00Z") // no extra collections should be created + )); assertUpdateResponse(solrClient.commit(getSaferTestName() + "foo")); - List foo = solrClient.getClusterStateProvider().resolveAlias(getSaferTestName() + "foo"); + List foo = + solrClient.getClusterStateProvider().resolveAlias(getSaferTestName() + "foo"); CollectionAdminRequest.deleteAlias(getSaferTestName() + "foo").process(solrClient); @@ -397,40 +459,61 @@ public void testPreemptiveCreation() throws Exception { waitCoreCount(colName, 0); } - // if the design for terminating our executor is correct create/delete above will not cause failures below - // continue testing... + // if the design for terminating our executor is correct create/delete above will not cause + // failures below continue testing... - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertEquals(4,cols.size()); // only one created in async case + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertEquals(4, cols.size()); // only one created in async case // now test with pre-create window longer than time slice, only one creation per request CollectionAdminRequest.setAliasProperty(alias) - .addProperty(TimeRoutedAlias.ROUTER_PREEMPTIVE_CREATE_MATH, "3DAY").process(solrClient); + .addProperty(TimeRoutedAlias.ROUTER_PREEMPTIVE_CREATE_MATH, "3DAY") + .process(solrClient); - assertUpdateResponse(add(alias, Arrays.asList( - sdoc("id", "7", "timestamp_dt", "2017-10-25T23:01:00Z"), // should cause preemptive creation of 10-27 now - sdoc("id", "71", "timestamp_dt", "2017-10-25T23:02:00Z")), // should not cause preemptive creation of 10-28 now - params)); + assertUpdateResponse( + add( + alias, + Arrays.asList( + sdoc( + "id", + "7", + "timestamp_dt", + "2017-10-25T23:01:00Z"), // should cause preemptive creation of 10-27 now + sdoc( + "id", + "71", + "timestamp_dt", + "2017-10-25T23:02:00Z")), // should not cause preemptive creation of 10-28 now + params)); assertUpdateResponse(solrClient.commit(alias)); waitColAndAlias(alias, TRA, "2017-10-27", numShards); - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertEquals(5,cols.size()); // only one created in async case + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertEquals(5, cols.size()); // only one created in async case assertNumDocs("2017-10-23", 1, alias); assertNumDocs("2017-10-24", 1, alias); assertNumDocs("2017-10-25", 6, alias); assertNumDocs("2017-10-26", 0, alias); assertNumDocs("2017-10-27", 0, alias); - - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "8", "timestamp_dt", "2017-10-25T23:01:00Z")), // should cause preemptive creation of 10-28 now - params)); + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc( + "id", + "8", + "timestamp_dt", + "2017-10-25T23:01:00Z")), // should cause preemptive creation of 10-28 now + params)); assertUpdateResponse(solrClient.commit(alias)); waitColAndAlias(alias, TRA, "2017-10-28", numShards); - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertEquals(6,cols.size()); + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertEquals(6, cols.size()); assertNumDocs("2017-10-23", 1, alias); assertNumDocs("2017-10-24", 1, alias); assertNumDocs("2017-10-25", 7, alias); @@ -439,145 +522,242 @@ public void testPreemptiveCreation() throws Exception { assertNumDocs("2017-10-28", 0, alias); QueryResponse resp; - resp = solrClient.query(alias, params( - "q", "*:*", - "rows", "10")); + resp = + solrClient.query( + alias, + params( + "q", "*:*", + "rows", "10")); assertEquals(9, resp.getResults().getNumFound()); - assertUpdateResponse(add(alias, Arrays.asList( - sdoc("id", "9", "timestamp_dt", "2017-10-27T23:01:00Z"), // should cause preemptive creation - - // If these are not ignored properly this test will fail during cleanup with a message about router.name being - // required. This happens because the test finishes while overseer threads are still trying to invoke maintain - // after the @After method has deleted collections and emptied out the aliases.... this leaves the maintain - // command cloning alias properties Aliases.EMPTY and thus not getting a value from router.name - // (normally router.name == 'time') The check for non-blank router.name happens to be the first validation. - // There is a small chance this could slip through without a fail occasionally, but it was 100% with just one - // of these. - sdoc("id", "10", "timestamp_dt", "2017-10-28T23:01:00Z"), // should be ignored due to in progress creation - sdoc("id", "11", "timestamp_dt", "2017-10-28T23:02:00Z"), // should be ignored due to in progress creation - sdoc("id", "12", "timestamp_dt", "2017-10-28T23:03:00Z")), // should be ignored due to in progress creation - params)); + assertUpdateResponse( + add( + alias, + Arrays.asList( + sdoc( + "id", + "9", + "timestamp_dt", + "2017-10-27T23:01:00Z"), // should cause preemptive creation + + // If these are not ignored properly this test will fail during cleanup with a + // message about router.name being required. This happens because the test finishes + // while overseer threads are still trying to invoke maintain after the @After + // method has deleted collections and emptied out the aliases.... this leaves the + // maintain command cloning alias properties Aliases.EMPTY and thus not getting a + // value from router.name (normally router.name == 'time') The check for non-blank + // router.name happens to be the first validation. There is a small chance this + // could slip through without a fail occasionally, but it was 100% with just one of + // these. + sdoc( + "id", + "10", + "timestamp_dt", + "2017-10-28T23:01:00Z"), // should be ignored due to in progress creation + sdoc( + "id", + "11", + "timestamp_dt", + "2017-10-28T23:02:00Z"), // should be ignored due to in progress creation + sdoc( + "id", + "12", + "timestamp_dt", + "2017-10-28T23:03:00Z")), // should be ignored due to in progress creation + params)); assertUpdateResponse(solrClient.commit(alias)); waitColAndAlias(alias, TRA, "2017-10-29", numShards); - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertEquals(7,cols.size()); + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertEquals(7, cols.size()); assertNumDocs("2017-10-23", 1, alias); assertNumDocs("2017-10-24", 1, alias); assertNumDocs("2017-10-25", 7, alias); assertNumDocs("2017-10-26", 0, alias); assertNumDocs("2017-10-27", 1, alias); - assertNumDocs("2017-10-28", 3, alias); // should get through even though preemptive creation ignored it. + assertNumDocs( + "2017-10-28", 3, alias); // should get through even though preemptive creation ignored it. assertNumDocs("2017-10-29", 0, alias); - resp = solrClient.query(alias, params( - "q", "*:*", - "rows", "0")); + resp = + solrClient.query( + alias, + params( + "q", "*:*", + "rows", "0")); assertEquals(13, resp.getResults().getNumFound()); // Sych creation with an interval longer than the time slice for the alias.. - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "13", "timestamp_dt", "2017-10-30T23:03:00Z")), // lucky? - params)); + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc("id", "13", "timestamp_dt", "2017-10-30T23:03:00Z")), // lucky? + params)); assertUpdateResponse(solrClient.commit(alias)); waitColAndAlias(alias, TRA, "2017-10-30", numShards); - // removed support for this case because it created a LOT of complexity for the benefit of attempting to - // (maybe) not pause again after already hitting a synchronous creation (but only if asynch gets it done first, - // otherwise we have a race... not enough benefit to justify the support/complexity. + // removed support for this case because it created a LOT of complexity for the benefit of + // attempting to (maybe) not pause again after already hitting a synchronous creation (but only + // if asynch gets it done first, otherwise we have a race... not enough benefit to justify the + // support/complexity. // // Now we just let the next doc take care of it... // - // waitColAndAlias(alias, TRA, "2017-10-31", numShards); // spooky! async case arising in middle of sync creation!! + // waitColAndAlias(alias, TRA, "2017-10-31", numShards); // spooky! async case arising in middle + // of sync creation!! - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertEquals(8,cols.size()); + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertEquals(8, cols.size()); assertNumDocs("2017-10-23", 1, alias); assertNumDocs("2017-10-24", 1, alias); assertNumDocs("2017-10-25", 7, alias); assertNumDocs("2017-10-26", 0, alias); assertNumDocs("2017-10-27", 1, alias); - assertNumDocs("2017-10-28", 3, alias); // should get through even though preemptive creation ignored it. + assertNumDocs( + "2017-10-28", 3, alias); // should get through even though preemptive creation ignored it. assertNumDocs("2017-10-29", 0, alias); assertNumDocs("2017-10-30", 1, alias); - resp = solrClient.query(alias, params( - "q", "*:*", - "rows", "0")); + resp = + solrClient.query( + alias, + params( + "q", "*:*", + "rows", "0")); assertEquals(14, resp.getResults().getNumFound()); - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "14", "timestamp_dt", "2017-10-30T23:01:00Z")), // should cause preemptive creation 10-31 - params)); + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc( + "id", + "14", + "timestamp_dt", + "2017-10-30T23:01:00Z")), // should cause preemptive creation 10-31 + params)); waitColAndAlias(alias, TRA, "2017-10-31", numShards); - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "15", "timestamp_dt", "2017-10-30T23:01:00Z")), // should cause preemptive creation 11-01 - params)); + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc( + "id", + "15", + "timestamp_dt", + "2017-10-30T23:01:00Z")), // should cause preemptive creation 11-01 + params)); waitColAndAlias(alias, TRA, "2017-11-01", numShards); - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "16", "timestamp_dt", "2017-10-30T23:01:00Z")), // should cause preemptive creation 11-02 - params)); + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc( + "id", + "16", + "timestamp_dt", + "2017-10-30T23:01:00Z")), // should cause preemptive creation 11-02 + params)); waitColAndAlias(alias, TRA, "2017-11-02", numShards); - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "17", "timestamp_dt", "2017-10-30T23:01:00Z")), // should NOT cause preemptive creation 11-03 - params)); - - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc( + "id", + "17", + "timestamp_dt", + "2017-10-30T23:01:00Z")), // should NOT cause preemptive creation 11-03 + params)); + + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); assertFalse(cols.contains("myalias" + TRA + "2017-11-03")); - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "18", "timestamp_dt", "2017-10-31T23:01:00Z")), // should cause preemptive creation 11-03 - params)); - waitColAndAlias(alias, TRA, "2017-11-03",numShards); - + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc( + "id", + "18", + "timestamp_dt", + "2017-10-31T23:01:00Z")), // should cause preemptive creation 11-03 + params)); + waitColAndAlias(alias, TRA, "2017-11-03", numShards); } - private void concurrentUpdates(ModifiableSolrParams params, String alias) throws SolrServerException, IOException { - // In this method we intentionally rely on timing of a race condition but the gap in collection creation time vs - // requesting the list of aliases and adding a single doc should be very large (1-2 seconds vs a few ms so we - // should always win the race) This is necessary because we are testing that we can guard against specific race - // conditions that happen while a collection is being created. To test this without timing sensitivity we would - // need a means to pass a semaphore to the server that it can use to delay collection creation + private void concurrentUpdates(ModifiableSolrParams params, String alias) + throws SolrServerException, IOException { + // In this method we intentionally rely on timing of a race condition but the gap in collection + // creation time vs requesting the list of aliases and adding a single doc should be very large + // (1-2 seconds vs a few ms so we should always win the race) This is necessary because we are + // testing that we can guard against specific race conditions that happen while a collection is + // being created. To test this without timing sensitivity we would need a means to pass a + // semaphore to the server that it can use to delay collection creation // - // This method must NOT gain any Thread.sleep() statements, nor should it gain any long running operations - assertUpdateResponse(add(alias, Arrays.asList( - sdoc("id", "2", "timestamp_dt", "2017-10-24T00:00:00Z"), - sdoc("id", "3", "timestamp_dt", "2017-10-25T00:00:00Z"), - sdoc("id", "4", "timestamp_dt", "2017-10-23T00:00:00Z"), - sdoc("id", "5", "timestamp_dt", "2017-10-25T23:00:00Z")), // should cause preemptive creation - params)); + // This method must NOT gain any Thread.sleep() statements, nor should it gain any long running + // operations + assertUpdateResponse( + add( + alias, + Arrays.asList( + sdoc("id", "2", "timestamp_dt", "2017-10-24T00:00:00Z"), + sdoc("id", "3", "timestamp_dt", "2017-10-25T00:00:00Z"), + sdoc("id", "4", "timestamp_dt", "2017-10-23T00:00:00Z"), + sdoc( + "id", + "5", + "timestamp_dt", + "2017-10-25T23:00:00Z")), // should cause preemptive creation + params)); assertUpdateResponse(solrClient.commit(alias)); List colsT1; - colsT1 = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + colsT1 = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); assertEquals(3, colsT1.size()); - assertTrue("Preemptive creation appears to not be asynchronous anymore", !colsT1.contains("myalias" + TRA + "2017-10-26")); + assertTrue( + "Preemptive creation appears to not be asynchronous anymore", + !colsT1.contains("myalias" + TRA + "2017-10-26")); assertNumDocs("2017-10-23", 1, alias); assertNumDocs("2017-10-24", 1, alias); assertNumDocs("2017-10-25", 3, alias); - // Here we quickly add another doc in a separate request, before the collection creation has completed. - // This has the potential to incorrectly cause preemptive collection creation to run twice and create a - // second collection. MaintainRoutedAliasCmd is meant to guard against this race condition by acquiring - // a lock on the collection name. - assertUpdateResponse(add(alias, Collections.singletonList( - sdoc("id", "6", "timestamp_dt", "2017-10-25T23:01:00Z")), // might cause duplicate preemptive creation - params)); + // Here we quickly add another doc in a separate request, before the collection creation has + // completed. This has the potential to incorrectly cause preemptive collection creation to run + // twice and create a second collection. MaintainRoutedAliasCmd is meant to guard against this + // race condition by acquiring a lock on the collection name. + assertUpdateResponse( + add( + alias, + Collections.singletonList( + sdoc( + "id", + "6", + "timestamp_dt", + "2017-10-25T23:01:00Z")), // might cause duplicate preemptive creation + params)); assertUpdateResponse(solrClient.commit(alias)); } private void checkPreemptiveCase1(String alias) throws SolrServerException, IOException { List cols; - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertTrue("Preemptive creation happened twice and created a collection " + - "further in the future than the configured time slice!",!cols.contains("myalias" + TRA + "2017-10-27")); - - validateCollectionCountAndAvailability(alias, 4, "Only 4 cols expected (premptive create happened" + - "twice among threads"); + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertTrue( + "Preemptive creation happened twice and created a collection " + + "further in the future than the configured time slice!", + !cols.contains("myalias" + TRA + "2017-10-27")); + + validateCollectionCountAndAvailability( + alias, 4, "Only 4 cols expected (premptive create happened" + "twice among threads"); assertEquals(4, cols.size()); assertNumDocs("2017-10-23", 1, alias); assertNumDocs("2017-10-24", 1, alias); @@ -586,21 +766,30 @@ private void checkPreemptiveCase1(String alias) throws SolrServerException, IOEx } @SuppressWarnings("SameParameterValue") - private void addOneDocSynchCreation(int numShards, String alias) throws SolrServerException, IOException, InterruptedException { + private void addOneDocSynchCreation(int numShards, String alias) + throws SolrServerException, IOException, InterruptedException { // cause some collections to be created - assertUpdateResponse(solrClient.add(alias, - sdoc("id","1","timestamp_dt", "2017-10-25T00:00:00Z") - )); + assertUpdateResponse( + solrClient.add(alias, sdoc("id", "1", "timestamp_dt", "2017-10-25T00:00:00Z"))); assertUpdateResponse(solrClient.commit(alias)); // wait for all the collections to exist... - waitColAndAlias(alias, TRA, "2017-10-23", numShards); // This one should have already existed from the alias creation + waitColAndAlias( + alias, + TRA, + "2017-10-23", + numShards); // This one should have already existed from the alias creation waitColAndAlias(alias, TRA, "2017-10-24", numShards); // Create 1 - waitColAndAlias(alias, TRA, "2017-10-25", numShards); // Create 2nd synchronously (ensure this is not broken) + waitColAndAlias( + alias, + TRA, + "2017-10-25", + numShards); // Create 2nd synchronously (ensure this is not broken) // normal update, nothing special, no collection creation required. - List cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertEquals(3,cols.size()); + List cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertEquals(3, cols.size()); assertNumDocs("2017-10-23", 0, alias); assertNumDocs("2017-10-24", 0, alias); @@ -609,15 +798,17 @@ private void addOneDocSynchCreation(int numShards, String alias) throws SolrServ validateCollectionCountAndAvailability(alias, 3, "was expecting 3 live collections"); } - private void validateCollectionCountAndAvailability(String alias, int expected, String message) throws SolrServerException, IOException { + private void validateCollectionCountAndAvailability(String alias, int expected, String message) + throws SolrServerException, IOException { List cols; - cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); - assertEquals(message,expected,cols.size()); // only one created in async case + cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + assertEquals(message, expected, cols.size()); // only one created in async case // make sure they all exist for (String col : cols) { try { - solrClient.query(col, params("q", "*:*","rows", "10")); + solrClient.query(col, params("q", "*:*", "rows", "10")); } catch (SolrException e) { e.printStackTrace(); fail("Unable to query " + col); @@ -625,15 +816,19 @@ private void validateCollectionCountAndAvailability(String alias, int expected, } } - private void assertNumDocs(final String datePart, int expected, String alias) throws SolrServerException, IOException { - QueryResponse resp = solrClient.query(alias + TRA + datePart, params( - "q", "*:*", - "rows", "10")); + private void assertNumDocs(final String datePart, int expected, String alias) + throws SolrServerException, IOException { + QueryResponse resp = + solrClient.query( + alias + TRA + datePart, + params( + "q", "*:*", + "rows", "10")); assertEquals(expected, resp.getResults().getNumFound()); } - - private void testFailedDocument(Instant timestamp, String errorMsg) throws SolrServerException, IOException { + private void testFailedDocument(Instant timestamp, String errorMsg) + throws SolrServerException, IOException { try { final UpdateResponse resp = solrClient.add(alias, newDoc(timestamp)); // if we have a TolerantUpdateProcessor then we see it there) @@ -645,7 +840,6 @@ private void testFailedDocument(Instant timestamp, String errorMsg) throws SolrS numDocsDeletedOrFailed++; } - @Override public String getAlias() { return alias; @@ -661,12 +855,15 @@ private int queryNumDocs(String q) throws SolrServerException, IOException { } private void assertInvariants(String... expectedColls) throws IOException, SolrServerException { - final int expectNumFound = lastDocId - numDocsDeletedOrFailed; //lastDocId is effectively # generated docs + final int expectNumFound = + lastDocId - numDocsDeletedOrFailed; // lastDocId is effectively # generated docs - final List cols = new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); + final List cols = + new CollectionAdminRequest.ListAliases().process(solrClient).getAliasesAsLists().get(alias); assert !cols.isEmpty(); - assertArrayEquals("expected reverse sorted", + assertArrayEquals( + "expected reverse sorted", cols.stream().sorted(Collections.reverseOrder()).toArray(), cols.toArray()); @@ -674,18 +871,21 @@ private void assertInvariants(String... expectedColls) throws IOException, SolrS Instant colEndInstant = null; // exclusive end for (String col : cols) { // ASSUMPTION: reverse sorted order final Instant colStartInstant = TimeRoutedAlias.parseInstantFromCollectionName(alias, col); - final QueryResponse colStatsResp = solrClient.query(col, params( - "q", "*:*", - "rows", "0", - "stats", "true", - "stats.field", getTimeField())); + final QueryResponse colStatsResp = + solrClient.query( + col, + params( + "q", "*:*", + "rows", "0", + "stats", "true", + "stats.field", getTimeField())); long numFound = colStatsResp.getResults().getNumFound(); if (numFound > 0) { totalNumFound += numFound; final FieldStatsInfo timestampStats = colStatsResp.getFieldStatsInfo().get(getTimeField()); - assertTrue(colStartInstant.toEpochMilli() <= ((Date)timestampStats.getMin()).getTime()); + assertTrue(colStartInstant.toEpochMilli() <= ((Date) timestampStats.getMin()).getTime()); if (colEndInstant != null) { - assertTrue(colEndInstant.toEpochMilli() > ((Date)timestampStats.getMax()).getTime()); + assertTrue(colEndInstant.toEpochMilli() > ((Date) timestampStats.getMax()).getTime()); } } @@ -696,9 +896,13 @@ private void assertInvariants(String... expectedColls) throws IOException, SolrS } private SolrInputDocument newDoc(Instant timestamp) { - return sdoc("id", Integer.toString(++lastDocId), - getTimeField(), timestamp.toString(), - getIntField(), "0"); // always 0 + return sdoc( + "id", + Integer.toString(++lastDocId), + getTimeField(), + timestamp.toString(), + getIntField(), + "0"); // always 0 } private String getTimeField() { @@ -707,14 +911,18 @@ private String getTimeField() { @Test public void testParse() { - assertEquals(Instant.parse("2017-10-02T03:04:05Z"), - TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02_03_04_05")); - assertEquals(Instant.parse("2017-10-02T03:04:00Z"), - TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02_03_04")); - assertEquals(Instant.parse("2017-10-02T03:00:00Z"), - TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02_03")); - assertEquals(Instant.parse("2017-10-02T00:00:00Z"), - TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02")); + assertEquals( + Instant.parse("2017-10-02T03:04:05Z"), + TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02_03_04_05")); + assertEquals( + Instant.parse("2017-10-02T03:04:00Z"), + TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02_03_04")); + assertEquals( + Instant.parse("2017-10-02T03:00:00Z"), + TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02_03")); + assertEquals( + Instant.parse("2017-10-02T00:00:00Z"), + TimeRoutedAlias.parseInstantFromCollectionName(alias, alias + TRA + "2017-10-02")); } @Test @@ -724,8 +932,9 @@ public void testMaxFutureMs() throws Exception { ZonedDateTime a = ZonedDateTime.now(ZoneId.from(ZoneOffset.UTC)); ZonedDateTime b = a.plusMonths(2); ZonedDateTime c = a.plusYears(1); - DateTimeFormatter SOLR_DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)); - + DateTimeFormatter SOLR_DATE_TIME_FORMATTER = + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT) + .withZone(ZoneId.from(ZoneOffset.UTC)); final String YYYY_MM_DD_a = SOLR_DATE_TIME_FORMATTER.format(a); final String YYYY_MM_DD_b = SOLR_DATE_TIME_FORMATTER.format(b); @@ -733,30 +942,42 @@ public void testMaxFutureMs() throws Exception { final int numShards = 1 + random().nextInt(4); final int numReplicas = 1 + random().nextInt(3); - CollectionAdminRequest.createTimeRoutedAlias(alias, YYYY_MM_DD_a, "+180DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", "_default", numShards, numReplicas)) + CollectionAdminRequest.createTimeRoutedAlias( + alias, + YYYY_MM_DD_a, + "+180DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", "_default", numShards, numReplicas)) .setMaxFutureMs(maxFutureMs) .process(solrClient); final ModifiableSolrParams params = params(); - final String dayB = DateTimeFormatter.ISO_INSTANT.format(DateMathParser.parseMath(new Date(), YYYY_MM_DD_b).toInstant()); - assertUpdateResponse(add(alias, Arrays.asList( - sdoc("id", "2", "timestamp_dt", dayB)), - params)); + final String dayB = + DateTimeFormatter.ISO_INSTANT.format( + DateMathParser.parseMath(new Date(), YYYY_MM_DD_b).toInstant()); + assertUpdateResponse(add(alias, Arrays.asList(sdoc("id", "2", "timestamp_dt", dayB)), params)); - final String dayC = DateTimeFormatter.ISO_INSTANT.format(DateMathParser.parseMath(new Date(), YYYY_MM_DD_c).toInstant()); + final String dayC = + DateTimeFormatter.ISO_INSTANT.format( + DateMathParser.parseMath(new Date(), YYYY_MM_DD_c).toInstant()); try { - assertUpdateResponse(add(alias, Arrays.asList( - sdoc("id", "3", "timestamp_dt", dayC)), - params)); + assertUpdateResponse( + add(alias, Arrays.asList(sdoc("id", "3", "timestamp_dt", dayC)), params)); fail("expected update with " + dayC + " timestamp to fail"); } catch (Exception ex) { - assertTrue(ex.getMessage(), ex.getMessage().contains("The document's time routed key of " + dayC + " is too far in the future given router.maxFutureMs="+maxFutureMs)); + assertTrue( + ex.getMessage(), + ex.getMessage() + .contains( + "The document's time routed key of " + + dayC + + " is too far in the future given router.maxFutureMs=" + + maxFutureMs)); } } - @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13943") + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13943") @Test public void testDateMathInStart() throws Exception { ClusterStateProvider clusterStateProvider = solrClient.getClusterStateProvider(); @@ -774,230 +995,285 @@ public void testDateMathInStart() throws Exception { // 4 of which are leaders, and 8 of which should fail this test. final int numShards = 1 + random().nextInt(4); final int numReplicas = 1 + random().nextInt(3); - CollectionAdminRequest.createTimeRoutedAlias(alias, "2019-09-14T03:00:00Z/DAY", "+1DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) + CollectionAdminRequest.createTimeRoutedAlias( + alias, + "2019-09-14T03:00:00Z/DAY", + "+1DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) .process(solrClient); aliasUpdate.await(); if (BaseHttpClusterStateProvider.class.isAssignableFrom(aClass)) { - ((BaseHttpClusterStateProvider)clusterStateProvider).resolveAlias(getAlias(), true); + ((BaseHttpClusterStateProvider) clusterStateProvider).resolveAlias(getAlias(), true); } aliasUpdate = new CountDownLatch(1); monitorAlias(aliasUpdate); ModifiableSolrParams params = params(); - String nowDay = DateTimeFormatter.ISO_INSTANT.format(DateMathParser.parseMath(new Date(), "2019-09-14T01:00:00Z").toInstant()); - assertUpdateResponse(add(alias, Arrays.asList( - sdoc("id", "1", "timestamp_dt", nowDay)), // should not cause preemptive creation of 10-28 now - params)); - - // this process should have lead to the modification of the start time for the alias, converting it into - // a parsable date, removing the DateMath + String nowDay = + DateTimeFormatter.ISO_INSTANT.format( + DateMathParser.parseMath(new Date(), "2019-09-14T01:00:00Z").toInstant()); + assertUpdateResponse( + add( + alias, + Arrays.asList( + sdoc( + "id", + "1", + "timestamp_dt", + nowDay)), // should not cause preemptive creation of 10-28 now + params)); + + // this process should have lead to the modification of the start time for the alias, converting + // it into a parsable date, removing the DateMath // what we test next happens in a separate thread, so we have to give it some time to happen aliasUpdate.await(); if (BaseHttpClusterStateProvider.class.isAssignableFrom(aClass)) { - ((BaseHttpClusterStateProvider)clusterStateProvider).resolveAlias(getAlias(), true); + ((BaseHttpClusterStateProvider) clusterStateProvider).resolveAlias(getAlias(), true); } - String hopeFullyModified = clusterStateProvider.getAliasProperties(getAlias()).get(ROUTER_START); + String hopeFullyModified = + clusterStateProvider.getAliasProperties(getAlias()).get(ROUTER_START); try { Instant.parse(hopeFullyModified); } catch (DateTimeParseException e) { - fail(ROUTER_START + " should not have any date math by this point and parse as an instant. Using "+ aClass +" Found:" + hopeFullyModified); + fail( + ROUTER_START + + " should not have any date math by this point and parse as an instant. Using " + + aClass + + " Found:" + + hopeFullyModified); } } - private void monitorAlias(CountDownLatch aliasUpdate) throws KeeperException, InterruptedException { + private void monitorAlias(CountDownLatch aliasUpdate) + throws KeeperException, InterruptedException { Stat stat = new Stat(); - zkClient().getData("/aliases.json", new Watcher() { - @Override - public void process(WatchedEvent watchedEvent) { - aliasUpdate.countDown(); - } - }, stat, true); + zkClient() + .getData( + "/aliases.json", + new Watcher() { + @Override + public void process(WatchedEvent watchedEvent) { + aliasUpdate.countDown(); + } + }, + stat, + true); } /** - * Need to ensure that the existing TRA's gracefully handle, old, new and mixtures thereof. TRA's with - * an autoDeleteAge setting will gracefully convert to the new format over time. + * Need to ensure that the existing TRA's gracefully handle, old, new and mixtures thereof. TRA's + * with an autoDeleteAge setting will gracefully convert to the new format over time. */ @Test public void handleLegacyCollectionNames() throws Exception { manuallyConstructLegacyTRA(); - // OK we now have an alias with legacy names and 2 documents. Let's try to query it to ensure query time back compat + // OK we now have an alias with legacy names and 2 documents. Let's try to query it to ensure + // query time back compat - QueryResponse resp = solrClient.query(alias, params( - "q", "*:*", - "rows", "10" - )); + QueryResponse resp = + solrClient.query( + alias, + params( + "q", "*:*", + "rows", "10")); - assertEquals(2,resp.getResults().getNumFound()); + assertEquals(2, resp.getResults().getNumFound()); // verify that we can still add documents to it. - assertUpdateResponse(solrClient.add(alias, - sdoc("id","3","timestamp_dt", "2017-10-23T00:00:01Z") - )); + assertUpdateResponse( + solrClient.add(alias, sdoc("id", "3", "timestamp_dt", "2017-10-23T00:00:01Z"))); solrClient.commit(alias); - resp = solrClient.query(alias, params( - "q", "*:*", - "rows", "10" - )); - assertEquals(3,resp.getResults().getNumFound()); - + resp = + solrClient.query( + alias, + params( + "q", "*:*", + "rows", "10")); + assertEquals(3, resp.getResults().getNumFound()); // verify that it can create new collections - assertUpdateResponse(solrClient.add(alias, - sdoc("id","4","timestamp_dt", "2017-10-24T23:00:01Z") // preemptive - )); + assertUpdateResponse( + solrClient.add( + alias, sdoc("id", "4", "timestamp_dt", "2017-10-24T23:00:01Z") // preemptive + )); solrClient.commit(alias); - waitColAndAlias(alias, TRA, "2017-10-25",1); + waitColAndAlias(alias, TRA, "2017-10-25", 1); // verify that mixed old/new collections works for update/query - resp = solrClient.query(alias, params( - "q", "*:*", - "rows", "10" - )); - assertEquals(4,resp.getResults().getNumFound()); + resp = + solrClient.query( + alias, + params( + "q", "*:*", + "rows", "10")); + assertEquals(4, resp.getResults().getNumFound()); // verify that documents go to the right collections - assertUpdateResponse(solrClient.add(alias, - sdoc("id","5","timestamp_dt", "2017-10-25T12:00:01Z") // preemptive - )); + assertUpdateResponse( + solrClient.add( + alias, sdoc("id", "5", "timestamp_dt", "2017-10-25T12:00:01Z") // preemptive + )); solrClient.commit(alias); - resp = solrClient.query("myalias_2017-10-23", params( - "q", "*:*", - "rows", "10", - "sort", "id asc" - )); - assertEquals(2,resp.getResults().getNumFound()); + resp = + solrClient.query( + "myalias_2017-10-23", + params( + "q", "*:*", + "rows", "10", + "sort", "id asc")); + assertEquals(2, resp.getResults().getNumFound()); assertEquals(resp.getResults().get(0).getFirstValue("id"), "1"); assertEquals(resp.getResults().get(1).getFirstValue("id"), "3"); - resp = solrClient.query("myalias_2017-10-24", params( - "q", "*:*", - "rows", "10", - "sort", "id asc" - )); - assertEquals(2,resp.getResults().getNumFound()); + resp = + solrClient.query( + "myalias_2017-10-24", + params( + "q", "*:*", + "rows", "10", + "sort", "id asc")); + assertEquals(2, resp.getResults().getNumFound()); assertEquals(resp.getResults().get(0).getFirstValue("id"), "2"); assertEquals(resp.getResults().get(1).getFirstValue("id"), "4"); - resp = solrClient.query("myalias" + TRA + "2017-10-25", params( - "q", "*:*", - "rows", "10", - "sort", "id asc" - )); - assertEquals(1,resp.getResults().getNumFound()); + resp = + solrClient.query( + "myalias" + TRA + "2017-10-25", + params( + "q", "*:*", + "rows", "10", + "sort", "id asc")); + assertEquals(1, resp.getResults().getNumFound()); assertEquals(resp.getResults().get(0).getFirstValue("id"), "5"); // verify that auto-delete will age out old collections checkCollectionCountIs(3); - assertUpdateResponse(solrClient.add(alias, - sdoc("id","6","timestamp_dt", "2017-10-26T12:00:01Z") // preemptive - )); - waitColAndAlias(alias, TRA,"2017-10-26",1); + assertUpdateResponse( + solrClient.add( + alias, sdoc("id", "6", "timestamp_dt", "2017-10-26T12:00:01Z") // preemptive + )); + waitColAndAlias(alias, TRA, "2017-10-26", 1); checkCollectionCountIs(3) - .containsAll(Arrays.asList( - "myalias_2017-10-24", - "myalias" + TRA + "2017-10-25", - "myalias" + TRA + "2017-10-26")); - - assertUpdateResponse(solrClient.add(alias, - sdoc("id","7","timestamp_dt", "2017-10-27T12:00:01Z") // preemptive - )); - waitColAndAlias(alias, TRA,"2017-10-27",1); - waitCoreCount("myalias_2017-10-23",0); + .containsAll( + Arrays.asList( + "myalias_2017-10-24", + "myalias" + TRA + "2017-10-25", + "myalias" + TRA + "2017-10-26")); + + assertUpdateResponse( + solrClient.add( + alias, sdoc("id", "7", "timestamp_dt", "2017-10-27T12:00:01Z") // preemptive + )); + waitColAndAlias(alias, TRA, "2017-10-27", 1); + waitCoreCount("myalias_2017-10-23", 0); checkCollectionCountIs(3) - .containsAll(Arrays.asList( - "myalias" + TRA + "2017-10-25", - "myalias" + TRA + "2017-10-26", - "myalias" + TRA + "2017-10-27")); + .containsAll( + Arrays.asList( + "myalias" + TRA + "2017-10-25", + "myalias" + TRA + "2017-10-26", + "myalias" + TRA + "2017-10-27")); // verify that auto-delete works on new collections. - assertUpdateResponse(solrClient.add(alias, - sdoc("id","8","timestamp_dt", "2017-10-28T12:00:01Z") // preemptive - )); - waitColAndAlias(alias, TRA,"2017-10-28",1); - waitCoreCount("myalias_2017-10-24",0); + assertUpdateResponse( + solrClient.add( + alias, sdoc("id", "8", "timestamp_dt", "2017-10-28T12:00:01Z") // preemptive + )); + waitColAndAlias(alias, TRA, "2017-10-28", 1); + waitCoreCount("myalias_2017-10-24", 0); checkCollectionCountIs(3) - .containsAll(Arrays.asList( - "myalias" + TRA + "2017-10-26", - "myalias" + TRA + "2017-10-27", - "myalias" + TRA + "2017-10-28")); - + .containsAll( + Arrays.asList( + "myalias" + TRA + "2017-10-26", + "myalias" + TRA + "2017-10-27", + "myalias" + TRA + "2017-10-28")); solrClient.commit(alias); - resp = solrClient.query(alias, params( - "q", "*:*", - "rows", "10" - )); - assertEquals(3,resp.getResults().getNumFound()); - + resp = + solrClient.query( + alias, + params( + "q", "*:*", + "rows", "10")); + assertEquals(3, resp.getResults().getNumFound()); } private List checkCollectionCountIs(int num) { ClusterStateProvider clusterStateProvider = solrClient.getClusterStateProvider(); List collections = clusterStateProvider.resolveAlias(alias); if (clusterStateProvider instanceof BaseHttpClusterStateProvider) { - collections = ((BaseHttpClusterStateProvider)clusterStateProvider).resolveAlias(alias,true); + collections = ((BaseHttpClusterStateProvider) clusterStateProvider).resolveAlias(alias, true); } -// System.out.println(); -// System.out.println(clusterStateProvider.getClass()); -// System.out.println(collections); -// System.out.println(); + // System.out.println(); + // System.out.println(clusterStateProvider.getClass()); + // System.out.println(collections); + // System.out.println(); assertEquals(num, collections.size()); // starting point return collections; } - // here we do things not to be emulated elsewhere to create a legacy condition and ensure that we can - // work with both old and new formats. + // here we do things not to be emulated elsewhere to create a legacy condition and ensure that we + // can work with both old and new formats. @SuppressWarnings({"unchecked", "rawtypes"}) private void manuallyConstructLegacyTRA() throws Exception { // first create a "modern" alias String configName = getSaferTestName(); createConfigSet(configName); - final int numShards = 1 ; - final int numReplicas = 1 ; - CollectionAdminRequest.createTimeRoutedAlias(alias, "2017-10-23T00:00:00Z", "+1DAY", getTimeField(), - CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) - .setPreemptiveCreateWindow("3HOUR").setAutoDeleteAge("/DAY-3DAYS") + final int numShards = 1; + final int numReplicas = 1; + CollectionAdminRequest.createTimeRoutedAlias( + alias, + "2017-10-23T00:00:00Z", + "+1DAY", + getTimeField(), + CollectionAdminRequest.createCollection("_unused_", configName, numShards, numReplicas)) + .setPreemptiveCreateWindow("3HOUR") + .setAutoDeleteAge("/DAY-3DAYS") .process(solrClient); // now create collections that look like the legacy (pre __TRA__) names... String legacy23 = alias + "_" + "2017-10-23"; - CollectionAdminRequest.createCollection(legacy23, configName, numShards,numReplicas).process(solrClient); + CollectionAdminRequest.createCollection(legacy23, configName, numShards, numReplicas) + .process(solrClient); String legacy24 = alias + "_" + "2017-10-24"; - CollectionAdminRequest.createCollection(legacy24, configName, numShards,numReplicas).process(solrClient); + CollectionAdminRequest.createCollection(legacy24, configName, numShards, numReplicas) + .process(solrClient); - waitCol(1,legacy23); - waitCol(1,legacy24); + waitCol(1, legacy23); + waitCol(1, legacy24); // put some data in the legacy collections: - assertUpdateResponse(solrClient.add(legacy23, - sdoc("id","1","timestamp_dt", "2017-10-23T00:00:01Z") - )); - assertUpdateResponse(solrClient.add(legacy24, - sdoc("id","2","timestamp_dt", "2017-10-24T00:00:01Z") - )); + assertUpdateResponse( + solrClient.add(legacy23, sdoc("id", "1", "timestamp_dt", "2017-10-23T00:00:01Z"))); + assertUpdateResponse( + solrClient.add(legacy24, sdoc("id", "2", "timestamp_dt", "2017-10-24T00:00:01Z"))); solrClient.commit(legacy23); solrClient.commit(legacy24); - QueryResponse resp = solrClient.query(legacy23, params( - "q", "*:*", - "rows", "10")); - assertEquals(1,resp.getResults().getNumFound()); - - resp = solrClient.query(legacy24, params( - "q", "*:*", - "rows", "10")); - assertEquals(1,resp.getResults().getNumFound()); + QueryResponse resp = + solrClient.query( + legacy23, + params( + "q", "*:*", + "rows", "10")); + assertEquals(1, resp.getResults().getNumFound()); + + resp = + solrClient.query( + legacy24, + params( + "q", "*:*", + "rows", "10")); + assertEquals(1, resp.getResults().getNumFound()); // now knock out the collection backing our alias ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader(); @@ -1020,33 +1296,42 @@ private void manuallyConstructLegacyTRA() throws Exception { assertNotEquals(0, aliasMap.size()); Map colAliases = aliasMap.getOrDefault("collection", Collections.emptyMap()); - assertNotEquals(0,colAliases.size()); + assertNotEquals(0, colAliases.size()); String singleInitialCollection = (String) colAliases.get(alias); assertFalse(singleInitialCollection.contains(",")); // replace with our two new collections... in asc order! - colAliases.put(alias,String.join(",",legacy24,legacy23)); + colAliases.put(alias, String.join(",", legacy24, legacy23)); data = Utils.toJSON(aliasMap); - zkStateReader.getZkClient().setData("/aliases.json",data,true); + zkStateReader.getZkClient().setData("/aliases.json", data, true); zkStateReader.aliasesManager.update(); // make sure we've updated with the data we just sent aliases = zkStateReader.aliasesManager.getAliases(); - assertEquals(2,aliases.getCollectionAliasListMap().get(alias).size()); + assertEquals(2, aliases.getCollectionAliasListMap().get(alias).size()); CollectionAdminRequest.deleteCollection(singleInitialCollection).process(solrClient); - waitCoreCount(singleInitialCollection,0); + waitCoreCount(singleInitialCollection, 0); // now make the legacy collections part of the alias - CollectionAdminRequest.setCollectionProperty(legacy23,RoutedAlias.ROUTED_ALIAS_NAME_CORE_PROP, alias).process(solrClient); - CollectionAdminRequest.setCollectionProperty(legacy24,RoutedAlias.ROUTED_ALIAS_NAME_CORE_PROP, alias).process(solrClient); + CollectionAdminRequest.setCollectionProperty( + legacy23, RoutedAlias.ROUTED_ALIAS_NAME_CORE_PROP, alias) + .process(solrClient); + CollectionAdminRequest.setCollectionProperty( + legacy24, RoutedAlias.ROUTED_ALIAS_NAME_CORE_PROP, alias) + .process(solrClient); CollectionAdminRequest.reloadCollection(legacy23).process(solrClient); CollectionAdminRequest.reloadCollection(legacy24).process(solrClient); - cluster.getOpenOverseer().getCoreContainer().getZkController().getZkStateReader().aliasesManager.update(); + cluster + .getOpenOverseer() + .getCoreContainer() + .getZkController() + .getZkStateReader() + .aliasesManager + .update(); } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java index f51b94a9e98..6c679f1585a 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TolerantUpdateProcessorTest.java @@ -16,7 +16,6 @@ */ package org.apache.solr.update.processor; -import javax.xml.xpath.XPathExpressionException; import java.io.IOException; import java.io.StringWriter; import java.lang.reflect.Method; @@ -26,7 +25,7 @@ import java.util.HashSet; import java.util.List; import java.util.Set; - +import javax.xml.xpath.XPathExpressionException; import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; @@ -48,169 +47,192 @@ import org.xml.sax.SAXException; public class TolerantUpdateProcessorTest extends UpdateProcessorTestBase { - - /** - * List of valid + invalid documents - */ + + /** List of valid + invalid documents */ private static List docs = null; - /** - * IDs of the invalid documents in docs - */ + /** IDs of the invalid documents in docs */ private static String[] badIds = null; - + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-update-processor-chains.xml", "schema12.xml"); } - + @AfterClass public static void tearDownClass() { docs = null; badIds = null; } - + @Override public void setUp() throws Exception { super.setUp(); - //expected exception messages + // expected exception messages ignoreException("Error adding field"); ignoreException("Document is missing mandatory uniqueKey field"); if (docs == null) { docs = new ArrayList<>(20); badIds = new String[10]; - for(int i = 0; i < 10;i++) { + for (int i = 0; i < 10; i++) { // a valid document - docs.add(doc(field("id", String.valueOf(2*i)), field("weight", i))); + docs.add(doc(field("id", String.valueOf(2 * i)), field("weight", i))); // ... and an invalid one - docs.add(doc(field("id", String.valueOf(2*i+1)), field("weight", "b"))); - badIds[i] = String.valueOf(2*i+1); + docs.add(doc(field("id", String.valueOf(2 * i + 1)), field("weight", "b"))); + badIds[i] = String.valueOf(2 * i + 1); } } - } - + @Override public void tearDown() throws Exception { resetExceptionIgnores(); assertU(delQ("*:*")); assertU(commit()); - assertQ(req("q","*:*") - ,"//result[@numFound='0']"); + assertQ(req("q", "*:*"), "//result[@numFound='0']"); super.tearDown(); } /** - * future proof TolerantUpdateProcessor against new default method impls being added to UpdateProcessor - * to ensure that every method involved in a processor chain life cycle is overridden with - * exception catching/tracking. + * future proof TolerantUpdateProcessor against new default method impls being added to + * UpdateProcessor to ensure that every method involved in a processor chain life cycle is + * overridden with exception catching/tracking. */ public void testReflection() { for (Method method : TolerantUpdateProcessor.class.getMethods()) { if (method.getDeclaringClass().equals(Object.class) || method.getName().equals("close")) { continue; } - assertEquals("base class(es) has changed, TolerantUpdateProcessor needs updated to ensure it " + - "overrides all solr update lifcycle methods with exception tracking: " + method.toString(), - TolerantUpdateProcessor.class, method.getDeclaringClass()); + assertEquals( + "base class(es) has changed, TolerantUpdateProcessor needs updated to ensure it " + + "overrides all solr update lifcycle methods with exception tracking: " + + method.toString(), + TolerantUpdateProcessor.class, + method.getDeclaringClass()); } } - - + @Test public void testValidAdds() throws IOException { SolrInputDocument validDoc = doc(field("id", "1"), field("text", "the quick brown fox")); add("tolerant-chain-max-errors-10", null, validDoc); - + validDoc = doc(field("id", "2"), field("text", "the quick brown fox")); add("tolerant-chain-max-errors-not-set", null, validDoc); - + assertU(commit()); - assertQ(req("q","*:*") - ,"//result[@numFound='2']"); - assertQ(req("q","id:1") - ,"//result[@numFound='1']"); - assertQ(req("q","id:2") - ,"//result[@numFound='1']"); + assertQ(req("q", "*:*"), "//result[@numFound='2']"); + assertQ(req("q", "id:1"), "//result[@numFound='1']"); + assertQ(req("q", "id:2"), "//result[@numFound='1']"); } - + @Test public void testInvalidAdds() throws IOException { - SolrInputDocument invalidDoc1 = doc(field("text", "the quick brown fox")); //no id + SolrInputDocument invalidDoc1 = doc(field("text", "the quick brown fox")); // no id // This doc should fail without being tolerant Exception e = expectThrows(Exception.class, () -> add("not-tolerant", null, invalidDoc1)); assertTrue(e.getMessage().contains("Document is missing mandatory uniqueKey field")); - assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc1}), null, "(unknown)"); - - //a valid doc + assertAddsSucceedWithErrors( + "tolerant-chain-max-errors-10", + Arrays.asList(new SolrInputDocument[] {invalidDoc1}), + null, + "(unknown)"); + + // a valid doc SolrInputDocument validDoc1 = doc(field("id", "1"), field("text", "the quick brown fox")); // This batch should fail without being tolerant - e = expectThrows(Exception.class, () -> add("not-tolerant", null, - Arrays.asList(new SolrInputDocument[]{invalidDoc1, validDoc1}))); + e = + expectThrows( + Exception.class, + () -> + add( + "not-tolerant", + null, + Arrays.asList(new SolrInputDocument[] {invalidDoc1, validDoc1}))); assertTrue(e.getMessage().contains("Document is missing mandatory uniqueKey field")); - + assertU(commit()); - assertQ(req("q","id:1"),"//result[@numFound='0']"); - - - assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc1, validDoc1}), null, "(unknown)"); + assertQ(req("q", "id:1"), "//result[@numFound='0']"); + + assertAddsSucceedWithErrors( + "tolerant-chain-max-errors-10", + Arrays.asList(new SolrInputDocument[] {invalidDoc1, validDoc1}), + null, + "(unknown)"); assertU(commit()); - - // verify that the good document made it in. - assertQ(req("q","id:1"),"//result[@numFound='1']"); - + + // verify that the good document made it in. + assertQ(req("q", "id:1"), "//result[@numFound='1']"); + SolrInputDocument invalidDoc2 = doc(field("id", "2"), field("weight", "aaa")); SolrInputDocument validDoc2 = doc(field("id", "3"), field("weight", "3")); // This batch should fail without being tolerant - e = expectThrows(Exception.class, () -> add("not-tolerant", null, - Arrays.asList(new SolrInputDocument[]{invalidDoc2, validDoc2}))); + e = + expectThrows( + Exception.class, + () -> + add( + "not-tolerant", + null, + Arrays.asList(new SolrInputDocument[] {invalidDoc2, validDoc2}))); assertTrue(e.getMessage().contains("Error adding field")); assertU(commit()); - assertQ(req("q","id:3"),"//result[@numFound='0']"); - - assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", Arrays.asList(new SolrInputDocument[]{invalidDoc2, validDoc2}), null, "2"); + assertQ(req("q", "id:3"), "//result[@numFound='0']"); + + assertAddsSucceedWithErrors( + "tolerant-chain-max-errors-10", + Arrays.asList(new SolrInputDocument[] {invalidDoc2, validDoc2}), + null, + "2"); assertU(commit()); - + // The valid document was indexed - assertQ(req("q","id:3"),"//result[@numFound='1']"); - + assertQ(req("q", "id:3"), "//result[@numFound='1']"); + // The invalid document was NOT indexed - assertQ(req("q","id:2"),"//result[@numFound='0']"); - + assertQ(req("q", "id:2"), "//result[@numFound='0']"); } - + @Test public void testMaxErrorsDefault() throws IOException { - // by default the TolerantUpdateProcessor accepts all errors, so this batch should succeed with 10 errors. + // by default the TolerantUpdateProcessor accepts all errors, so this batch should succeed with + // 10 errors. assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, null, badIds); assertU(commit()); - assertQ(req("q","*:*"),"//result[@numFound='10']"); + assertQ(req("q", "*:*"), "//result[@numFound='10']"); } - + public void testMaxErrorsSucceed() throws IOException { ModifiableSolrParams requestParams = new ModifiableSolrParams(); requestParams.add("maxErrors", "10"); // still OK assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, requestParams, badIds); assertU(commit()); - assertQ(req("q","*:*"),"//result[@numFound='10']"); + assertQ(req("q", "*:*"), "//result[@numFound='10']"); } - + @Test public void testMaxErrorsThrowsException() throws IOException { ModifiableSolrParams requestParams = new ModifiableSolrParams(); requestParams.add("maxErrors", "5"); - SolrException e = expectThrows(SolrException.class, () -> - assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, requestParams, badIds)); - assertTrue(e.getMessage(), - e.getMessage().contains("ERROR: [doc=1] Error adding field 'weight'='b' msg=For input string: \"b\"")); - //the first good documents made it to the index + SolrException e = + expectThrows( + SolrException.class, + () -> + assertAddsSucceedWithErrors( + "tolerant-chain-max-errors-not-set", docs, requestParams, badIds)); + assertTrue( + e.getMessage(), + e.getMessage() + .contains( + "ERROR: [doc=1] Error adding field 'weight'='b' msg=For input string: \"b\"")); + // the first good documents made it to the index assertU(commit()); - assertQ(req("q","*:*"),"//result[@numFound='6']"); + assertQ(req("q", "*:*"), "//result[@numFound='6']"); } @Test @@ -220,121 +242,141 @@ public void testMaxErrorsInfinite() throws IOException { assertAddsSucceedWithErrors("tolerant-chain-max-errors-not-set", docs, null, badIds); assertU(commit()); - assertQ(req("q","*:*"),"//result[@numFound='10']"); + assertQ(req("q", "*:*"), "//result[@numFound='10']"); } - + @Test public void testMaxErrors0() throws IOException { - //make the TolerantUpdateProcessor intolerant + // make the TolerantUpdateProcessor intolerant List smallBatch = docs.subList(0, 2); ModifiableSolrParams requestParams = new ModifiableSolrParams(); requestParams.add("maxErrors", "0"); - SolrException e = expectThrows(SolrException.class, () -> - assertAddsSucceedWithErrors("tolerant-chain-max-errors-10", smallBatch, requestParams, "1")); - assertTrue(e.getMessage().contains("ERROR: [doc=1] Error adding field 'weight'='b' msg=For input string: \"b\"")); + SolrException e = + expectThrows( + SolrException.class, + () -> + assertAddsSucceedWithErrors( + "tolerant-chain-max-errors-10", smallBatch, requestParams, "1")); + assertTrue( + e.getMessage() + .contains( + "ERROR: [doc=1] Error adding field 'weight'='b' msg=For input string: \"b\"")); - //the first good documents made it to the index + // the first good documents made it to the index assertU(commit()); - assertQ(req("q","*:*"),"//result[@numFound='1']"); + assertQ(req("q", "*:*"), "//result[@numFound='1']"); } - + @Test public void testInvalidDelete() throws XPathExpressionException, SAXException { ignoreException("undefined field invalidfield"); - String response = update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox")); - assertNull(BaseTestHarness.validateXPath(response, - "//int[@name='status']=0", - "//arr[@name='errors']", - "count(//arr[@name='errors']/lst)=0")); - + String response = + update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox")); + assertNull( + BaseTestHarness.validateXPath( + response, + "//int[@name='status']=0", + "//arr[@name='errors']", + "count(//arr[@name='errors']/lst)=0")); + response = update("tolerant-chain-max-errors-10", delQ("invalidfield:1")); - assertNull(BaseTestHarness.validateXPath - (response, - "//int[@name='status']=0", - "count(//arr[@name='errors']/lst)=1", - "//arr[@name='errors']/lst/str[@name='type']/text()='DELQ'", - "//arr[@name='errors']/lst/str[@name='id']/text()='invalidfield:1'", - "//arr[@name='errors']/lst/str[@name='message']/text()='undefined field invalidfield'")); + assertNull( + BaseTestHarness.validateXPath( + response, + "//int[@name='status']=0", + "count(//arr[@name='errors']/lst)=1", + "//arr[@name='errors']/lst/str[@name='type']/text()='DELQ'", + "//arr[@name='errors']/lst/str[@name='id']/text()='invalidfield:1'", + "//arr[@name='errors']/lst/str[@name='message']/text()='undefined field invalidfield'")); } - + @Test public void testValidDelete() throws XPathExpressionException, SAXException { ignoreException("undefined field invalidfield"); - String response = update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox")); - assertNull(BaseTestHarness.validateXPath(response, - "//int[@name='status']=0", - "//arr[@name='errors']", - "count(//arr[@name='errors']/lst)=0")); + String response = + update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox")); + assertNull( + BaseTestHarness.validateXPath( + response, + "//int[@name='status']=0", + "//arr[@name='errors']", + "count(//arr[@name='errors']/lst)=0")); assertU(commit()); - assertQ(req("q","*:*") - ,"//result[@numFound='1']"); - + assertQ(req("q", "*:*"), "//result[@numFound='1']"); + response = update("tolerant-chain-max-errors-10", delQ("id:1")); - assertNull(BaseTestHarness.validateXPath(response, - "//int[@name='status']=0", - "//arr[@name='errors']", - "count(//arr[@name='errors']/lst)=0")); + assertNull( + BaseTestHarness.validateXPath( + response, + "//int[@name='status']=0", + "//arr[@name='errors']", + "count(//arr[@name='errors']/lst)=0")); assertU(commit()); - assertQ(req("q","*:*") - ,"//result[@numFound='0']"); + assertQ(req("q", "*:*"), "//result[@numFound='0']"); } - + @Test public void testResponse() throws SAXException, XPathExpressionException, IOException { - String response = update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox")); - assertNull(BaseTestHarness.validateXPath(response, - "//int[@name='status']=0", - "//arr[@name='errors']", - "count(//arr[@name='errors']/lst)=0")); + String response = + update("tolerant-chain-max-errors-10", adoc("id", "1", "text", "the quick brown fox")); + assertNull( + BaseTestHarness.validateXPath( + response, + "//int[@name='status']=0", + "//arr[@name='errors']", + "count(//arr[@name='errors']/lst)=0")); response = update("tolerant-chain-max-errors-10", adoc("text", "the quick brown fox")); - assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0", - "//int[@name='maxErrors']/text()='10'", - "count(//arr[@name='errors']/lst)=1", - "//arr[@name='errors']/lst/str[@name='id']/text()='(unknown)'", - "//arr[@name='errors']/lst/str[@name='message']/text()='Document is missing mandatory uniqueKey field: id'")); - + assertNull( + BaseTestHarness.validateXPath( + response, + "//int[@name='status']=0", + "//int[@name='maxErrors']/text()='10'", + "count(//arr[@name='errors']/lst)=1", + "//arr[@name='errors']/lst/str[@name='id']/text()='(unknown)'", + "//arr[@name='errors']/lst/str[@name='message']/text()='Document is missing mandatory uniqueKey field: id'")); + response = update("tolerant-chain-max-errors-10", adoc("text", "the quick brown fox")); StringWriter builder = new StringWriter(); builder.append(""); - for (SolrInputDocument doc:docs) { + for (SolrInputDocument doc : docs) { ClientUtils.writeXML(doc, builder); } builder.append(""); response = update("tolerant-chain-max-errors-10", builder.toString()); - assertNull(BaseTestHarness.validateXPath(response, "//int[@name='status']=0", - "//int[@name='maxErrors']/text()='10'", - "count(//arr[@name='errors']/lst)=10", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='0')", - "//arr[@name='errors']/lst/str[@name='id']/text()='1'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='2')", - "//arr[@name='errors']/lst/str[@name='id']/text()='3'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='4')", - "//arr[@name='errors']/lst/str[@name='id']/text()='5'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='6')", - "//arr[@name='errors']/lst/str[@name='id']/text()='7'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='8')", - "//arr[@name='errors']/lst/str[@name='id']/text()='9'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='10')", - "//arr[@name='errors']/lst/str[@name='id']/text()='11'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='12')", - "//arr[@name='errors']/lst/str[@name='id']/text()='13'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='14')", - "//arr[@name='errors']/lst/str[@name='id']/text()='15'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='16')", - "//arr[@name='errors']/lst/str[@name='id']/text()='17'", - "not(//arr[@name='errors']/lst/str[@name='id']/text()='18')", - "//arr[@name='errors']/lst/str[@name='id']/text()='19'")); + assertNull( + BaseTestHarness.validateXPath( + response, + "//int[@name='status']=0", + "//int[@name='maxErrors']/text()='10'", + "count(//arr[@name='errors']/lst)=10", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='0')", + "//arr[@name='errors']/lst/str[@name='id']/text()='1'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='2')", + "//arr[@name='errors']/lst/str[@name='id']/text()='3'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='4')", + "//arr[@name='errors']/lst/str[@name='id']/text()='5'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='6')", + "//arr[@name='errors']/lst/str[@name='id']/text()='7'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='8')", + "//arr[@name='errors']/lst/str[@name='id']/text()='9'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='10')", + "//arr[@name='errors']/lst/str[@name='id']/text()='11'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='12')", + "//arr[@name='errors']/lst/str[@name='id']/text()='13'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='14')", + "//arr[@name='errors']/lst/str[@name='id']/text()='15'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='16')", + "//arr[@name='errors']/lst/str[@name='id']/text()='17'", + "not(//arr[@name='errors']/lst/str[@name='id']/text()='18')", + "//arr[@name='errors']/lst/str[@name='id']/text()='19'")); // spot check response when effective maxErrors is unlimited response = update("tolerant-chain-max-errors-not-set", builder.toString()); assertNull(BaseTestHarness.validateXPath(response, "//int[@name='maxErrors']/text()='-1'")); - } - - public String update(String chain, String xml) { DirectSolrConnection connection = new DirectSolrConnection(h.getCore()); SolrRequestHandler handler = h.getCore().getRequestHandler("/update"); @@ -348,66 +390,70 @@ public String update(String chain, String xml) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } } - - private void assertAddsSucceedWithErrors(String chain, - final Collection docs, - SolrParams requestParams, - String... idsShouldFail) throws IOException { + + private void assertAddsSucceedWithErrors( + String chain, + final Collection docs, + SolrParams requestParams, + String... idsShouldFail) + throws IOException { SolrQueryResponse response = add(chain, requestParams, docs); - + @SuppressWarnings("unchecked") - List> errors = (List>) - response.getResponseHeader().get("errors"); + List> errors = + (List>) response.getResponseHeader().get("errors"); assertNotNull(errors); assertEquals("number of errors", idsShouldFail.length, errors.size()); - + Set addErrorIdsExpected = new HashSet(Arrays.asList(idsShouldFail)); for (SimpleOrderedMap err : errors) { assertEquals("this method only expects 'add' errors", "ADD", err.get("type")); - + String id = err.get("id"); assertNotNull("null err id", id); assertTrue("unexpected id", addErrorIdsExpected.contains(id)); - } } - - protected SolrQueryResponse add(final String chain, SolrParams requestParams, final SolrInputDocument doc) throws IOException { - return add(chain, requestParams, Arrays.asList(new SolrInputDocument[]{doc})); + + protected SolrQueryResponse add( + final String chain, SolrParams requestParams, final SolrInputDocument doc) + throws IOException { + return add(chain, requestParams, Arrays.asList(new SolrInputDocument[] {doc})); } - - protected SolrQueryResponse add(final String chain, SolrParams requestParams, final Collection docs) throws IOException { - + + protected SolrQueryResponse add( + final String chain, SolrParams requestParams, final Collection docs) + throws IOException { + SolrCore core = h.getCore(); UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain); assertNotNull("No Chain named: " + chain, pc); - + SolrQueryResponse rsp = new SolrQueryResponse(); rsp.add("responseHeader", new SimpleOrderedMap()); - - if(requestParams == null) { + + if (requestParams == null) { requestParams = new ModifiableSolrParams(); } - + SolrQueryRequest req = new LocalSolrQueryRequest(core, requestParams); UpdateRequestProcessor processor = null; try { processor = pc.createProcessor(req, rsp); - for(SolrInputDocument doc:docs) { + for (SolrInputDocument doc : docs) { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = doc; processor.processAdd(cmd); } processor.finish(); - + } finally { IOUtils.closeQuietly(processor); req.close(); } return rsp; } - } diff --git a/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java b/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java index 0ac81a7d83f..38eb42390f4 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java +++ b/solr/core/src/test/org/apache/solr/update/processor/TrackingUpdateProcessorFactory.java @@ -24,7 +24,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; - import org.apache.solr.cloud.MiniSolrCloudCluster; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.util.NamedList; @@ -40,21 +39,20 @@ import org.slf4j.LoggerFactory; /** - * This Factory is similar to {@link RecordingUpdateProcessorFactory}, but with the goal of - * tracking requests across multiple collections/shards/replicas in a {@link SolrCloudTestCase}. - * It can optionally save references to the commands it receives inm a single global + * This Factory is similar to {@link RecordingUpdateProcessorFactory}, but with the goal of tracking + * requests across multiple collections/shards/replicas in a {@link SolrCloudTestCase}. It can + * optionally save references to the commands it receives inm a single global * Map<String,BlockingQueue> keys in the map are arbitrary, but the intention is that tests - * generate a key that is unique to that test, and configure the factory with the key as "group name" - * to avoid cross talk between tests. Tests can poll for requests from a group to observe that the expected - * commands are executed. By default, this factory does nothing except return the "next" - * processor from the chain unless it's told to {@link #startRecording(String)} in which case all factories - * with the same group will begin recording. + * generate a key that is unique to that test, and configure the factory with the key as "group + * name" to avoid cross talk between tests. Tests can poll for requests from a group to observe that + * the expected commands are executed. By default, this factory does nothing except return the + * "next" processor from the chain unless it's told to {@link #startRecording(String)} in which case + * all factories with the same group will begin recording. * - * This class is only for unit test purposes and should not be used in any production capacity. It presumes all nodes - * exist within the same JVM (i.e. {@link MiniSolrCloudCluster}). + *

This class is only for unit test purposes and should not be used in any production capacity. + * It presumes all nodes exist within the same JVM (i.e. {@link MiniSolrCloudCluster}). */ -public final class TrackingUpdateProcessorFactory - extends UpdateRequestProcessorFactory { +public final class TrackingUpdateProcessorFactory extends UpdateRequestProcessorFactory { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static final String REQUEST_COUNT = "TrackingUpdateProcessorRequestCount"; @@ -62,9 +60,10 @@ public final class TrackingUpdateProcessorFactory /** * The map of group queues containing commands that were recorded + * * @see #startRecording */ - private final static Map> groupToCommands = new ConcurrentHashMap<>(); + private static final Map> groupToCommands = new ConcurrentHashMap<>(); private String group = "default"; @@ -72,46 +71,47 @@ public static void startRecording(String group) { final List updateCommands = groupToCommands.get(group); assert updateCommands == null || updateCommands.isEmpty(); - List existing = groupToCommands.put(group, Collections.synchronizedList(new ArrayList<>())); + List existing = + groupToCommands.put(group, Collections.synchronizedList(new ArrayList<>())); assert existing == null : "Test cross-talk?"; } /** - * * @param group the name of the group to fetch * @return A cloned queue containing the same elements as the queue held in groupToCommands */ public static List stopRecording(String group) { List commands = groupToCommands.remove(group); - return Arrays.asList(commands.toArray(new UpdateCommand[0])); // safe copy. input list is synchronized + return Arrays.asList( + commands.toArray(new UpdateCommand[0])); // safe copy. input list is synchronized } @Override public void init(NamedList args) { - if (args != null && args.indexOf("group",0) >= 0) { + if (args != null && args.indexOf("group", 0) >= 0) { group = (String) args.get("group"); log.debug("Init URP, group '{}'", group); } else { - log.warn("TrackingUpdateProcessorFactory initialized without group configuration, using 'default' but this group is shared" + - "across the entire VM and guaranteed to have unpredictable behavior if used by more than one test"); + log.warn( + "TrackingUpdateProcessorFactory initialized without group configuration, using 'default' but this group is shared" + + "across the entire VM and guaranteed to have unpredictable behavior if used by more than one test"); } } @Override @SuppressWarnings("resource") - public synchronized UpdateRequestProcessor getInstance(SolrQueryRequest req, - SolrQueryResponse rsp, - UpdateRequestProcessor next ) { + public synchronized UpdateRequestProcessor getInstance( + SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) { final List commands = groupToCommands.get(group); return commands == null ? next : new RecordingUpdateRequestProcessor(commands, next); } - private static final class RecordingUpdateRequestProcessor - extends UpdateRequestProcessor { + private static final class RecordingUpdateRequestProcessor extends UpdateRequestProcessor { private final List groupCommands; - RecordingUpdateRequestProcessor(List groupCommands, UpdateRequestProcessor next) { + RecordingUpdateRequestProcessor( + List groupCommands, UpdateRequestProcessor next) { super(next); this.groupCommands = groupCommands; } @@ -129,21 +129,25 @@ public void processAdd(AddUpdateCommand cmd) throws IOException { record(cmd); super.processAdd(cmd); } + @Override public void processDelete(DeleteUpdateCommand cmd) throws IOException { record(cmd); super.processDelete(cmd); } + @Override public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException { record(cmd); super.processMergeIndexes(cmd); } + @Override public void processCommit(CommitUpdateCommand cmd) throws IOException { record(cmd); super.processCommit(cmd); } + @Override public void processRollback(RollbackUpdateCommand cmd) throws IOException { record(cmd); @@ -151,6 +155,3 @@ public void processRollback(RollbackUpdateCommand cmd) throws IOException { } } } - - - diff --git a/solr/core/src/test/org/apache/solr/update/processor/URLClassifyProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/URLClassifyProcessorTest.java index f04118cad94..9b7db5c4ea0 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/URLClassifyProcessorTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/URLClassifyProcessorTest.java @@ -19,7 +19,6 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.update.AddUpdateCommand; @@ -27,15 +26,15 @@ import org.junit.Test; public class URLClassifyProcessorTest extends SolrTestCaseJ4 { - + private static URLClassifyProcessor classifyProcessor; - + @BeforeClass public static void initTest() { classifyProcessor = - (URLClassifyProcessor) new URLClassifyProcessorFactory().getInstance(null, null, null); + (URLClassifyProcessor) new URLClassifyProcessorFactory().getInstance(null, null, null); } - + @Test public void testProcessor() throws IOException { AddUpdateCommand addCommand = new AddUpdateCommand(null); @@ -44,61 +43,119 @@ public void testProcessor() throws IOException { document.addField("url", "http://www.example.com"); addCommand.solrDoc = document; classifyProcessor.processAdd(addCommand); - assertEquals("Confirm single valued field returned",1, document.getField("url_length").getValueCount()); - assertEquals("Confirm field populated",22, document.getField("url_length").getValue()); + assertEquals( + "Confirm single valued field returned", 1, document.getField("url_length").getValueCount()); + assertEquals("Confirm field populated", 22, document.getField("url_length").getValue()); } - + @Test public void testNormalizations() throws MalformedURLException, URISyntaxException { String url1 = "http://www.example.com/research/"; String url2 = "http://www.example.com/research/../research/"; - assertEquals(classifyProcessor.getNormalizedURL(url1), classifyProcessor.getNormalizedURL(url2)); + assertEquals( + classifyProcessor.getNormalizedURL(url1), classifyProcessor.getNormalizedURL(url2)); } - + @Test public void testLength() throws MalformedURLException, URISyntaxException { - assertEquals(22, classifyProcessor.length(classifyProcessor.getNormalizedURL("http://www.example.com"))); + assertEquals( + 22, classifyProcessor.length(classifyProcessor.getNormalizedURL("http://www.example.com"))); } - + @Test public void testLevels() throws MalformedURLException, URISyntaxException { - assertEquals(1, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com/research/"))); - assertEquals(1, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com/research/index.html"))); - assertEquals(1, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com/research/../research/"))); - assertEquals(0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com/"))); - assertEquals(0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com/index.htm"))); - assertEquals(0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com"))); - assertEquals(0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("https://www.example.com"))); - assertEquals(0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com////"))); + assertEquals( + 1, + classifyProcessor.levels( + classifyProcessor.getNormalizedURL("http://www.example.com/research/"))); + assertEquals( + 1, + classifyProcessor.levels( + classifyProcessor.getNormalizedURL("http://www.example.com/research/index.html"))); + assertEquals( + 1, + classifyProcessor.levels( + classifyProcessor.getNormalizedURL("http://www.example.com/research/../research/"))); + assertEquals( + 0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com/"))); + assertEquals( + 0, + classifyProcessor.levels( + classifyProcessor.getNormalizedURL("http://www.example.com/index.htm"))); + assertEquals( + 0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com"))); + assertEquals( + 0, classifyProcessor.levels(classifyProcessor.getNormalizedURL("https://www.example.com"))); + assertEquals( + 0, + classifyProcessor.levels(classifyProcessor.getNormalizedURL("http://www.example.com////"))); } - + @Test public void testLandingPage() throws MalformedURLException, URISyntaxException { - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/index.html"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/index.htm"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/welcome.html"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/welcome.htm"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/index.php"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/index.asp"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/research/"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("https://www.example.com/research/"))); - assertTrue(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/"))); - assertFalse(classifyProcessor.isLandingPage(classifyProcessor.getNormalizedURL("http://www.example.com/intro.htm"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/index.html"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/index.htm"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/welcome.html"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/welcome.htm"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/index.php"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/index.asp"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/research/"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("https://www.example.com/research/"))); + assertTrue( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/"))); + assertFalse( + classifyProcessor.isLandingPage( + classifyProcessor.getNormalizedURL("http://www.example.com/intro.htm"))); } - + @Test public void testTopLevelPage() throws MalformedURLException, URISyntaxException { - assertTrue(classifyProcessor.isTopLevelPage(classifyProcessor.getNormalizedURL("http://www.example.com"))); - assertTrue(classifyProcessor.isTopLevelPage(classifyProcessor.getNormalizedURL("http://www.example.com/"))); - assertTrue(classifyProcessor.isTopLevelPage(classifyProcessor.getNormalizedURL("http://subdomain.example.com:1234/#anchor"))); - assertTrue(classifyProcessor.isTopLevelPage(classifyProcessor.getNormalizedURL("http://www.example.com/index.html"))); - - assertFalse(classifyProcessor.isTopLevelPage(classifyProcessor.getNormalizedURL("http://www.example.com/foo"))); - assertFalse(classifyProcessor.isTopLevelPage(classifyProcessor.getNormalizedURL("http://subdomain.example.com/?sorting=lastModified%253Adesc&tag=myTag&view=feed"))); + assertTrue( + classifyProcessor.isTopLevelPage( + classifyProcessor.getNormalizedURL("http://www.example.com"))); + assertTrue( + classifyProcessor.isTopLevelPage( + classifyProcessor.getNormalizedURL("http://www.example.com/"))); + assertTrue( + classifyProcessor.isTopLevelPage( + classifyProcessor.getNormalizedURL("http://subdomain.example.com:1234/#anchor"))); + assertTrue( + classifyProcessor.isTopLevelPage( + classifyProcessor.getNormalizedURL("http://www.example.com/index.html"))); + + assertFalse( + classifyProcessor.isTopLevelPage( + classifyProcessor.getNormalizedURL("http://www.example.com/foo"))); + assertFalse( + classifyProcessor.isTopLevelPage( + classifyProcessor.getNormalizedURL( + "http://subdomain.example.com/?sorting=lastModified%253Adesc&tag=myTag&view=feed"))); } - + @Test public void testCanonicalUrl() throws MalformedURLException, URISyntaxException { - assertEquals("http://www.example.com/", classifyProcessor.getCanonicalUrl(classifyProcessor.getNormalizedURL("http://www.example.com/index.html")).toString()); + assertEquals( + "http://www.example.com/", + classifyProcessor + .getCanonicalUrl( + classifyProcessor.getNormalizedURL("http://www.example.com/index.html")) + .toString()); } } diff --git a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java index e57e0ef2a13..4e9b3355433 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java @@ -20,7 +20,6 @@ import java.util.Date; import java.util.List; import java.util.UUID; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; @@ -46,8 +45,8 @@ public static void beforeClass() throws Exception { public void testFallbackToUnique() throws Exception { // get all defaults - SolrInputDocument d = processAdd("default-values-fallback-to-unique", - doc(f("name", "Existing", "Values"))); + SolrInputDocument d = + processAdd("default-values-fallback-to-unique", doc(f("name", "Existing", "Values"))); assertNotNull(d); @@ -55,8 +54,10 @@ public void testFallbackToUnique() throws Exception { assertNotNull(UUID.fromString(d.getFieldValue("id").toString())); // get all defaults - d = processAdd("default-values-fallback-to-unique-automatically", - doc(f("name", "Existing", "Values"))); + d = + processAdd( + "default-values-fallback-to-unique-automatically", + doc(f("name", "Existing", "Values"))); assertNotNull(d); @@ -64,63 +65,71 @@ public void testFallbackToUnique() throws Exception { assertNotNull(UUID.fromString(d.getFieldValue("id").toString())); // defaults already specified - d = processAdd("default-values-fallback-to-unique", - doc(f("timestamp", now), - f("id", "550e8400-e29b-41d4-a716-446655440000"), - f("processor_default_s", "I HAVE A VALUE"), - f("processor_default_i", 12345), - f("name", "Existing", "Values"))); + d = + processAdd( + "default-values-fallback-to-unique", + doc( + f("timestamp", now), + f("id", "550e8400-e29b-41d4-a716-446655440000"), + f("processor_default_s", "I HAVE A VALUE"), + f("processor_default_i", 12345), + f("name", "Existing", "Values"))); assertNotNull(d); - assertEquals("550e8400-e29b-41d4-a716-446655440000", - d.getFieldValue("id")); + assertEquals("550e8400-e29b-41d4-a716-446655440000", d.getFieldValue("id")); // defaults already specified //both config and request param not passed. - d = processAdd("default-values-fallback-to-unique-automatically", - doc(f("timestamp", now), - f("id", "550e8400-e29b-41d4-a716-446655440000"), - f("processor_default_s", "I HAVE A VALUE"), - f("processor_default_i", 121), - f("name", "Existing", "Values"))); + d = + processAdd( + "default-values-fallback-to-unique-automatically", + doc( + f("timestamp", now), + f("id", "550e8400-e29b-41d4-a716-446655440000"), + f("processor_default_s", "I HAVE A VALUE"), + f("processor_default_i", 121), + f("name", "Existing", "Values"))); assertNotNull(d); - assertEquals("550e8400-e29b-41d4-a716-446655440000", - d.getFieldValue("id")); + assertEquals("550e8400-e29b-41d4-a716-446655440000", d.getFieldValue("id")); assertEquals(121, d.getFieldValue("processor_default_i")); } public void testRequesTParams() throws Exception { - SolrInputDocument d = processAdd(null, - doc(f("name", "Existing", "Values"), f( "id","75765")), params("processor", "uuid", "uuid.fieldName", "id_s")); + SolrInputDocument d = + processAdd( + null, + doc(f("name", "Existing", "Values"), f("id", "75765")), + params("processor", "uuid", "uuid.fieldName", "id_s")); assertNotNull(d); assertNotNull(d.getFieldValue("id_s")); assertNotNull(UUID.fromString(d.getFieldValue("id_s").toString())); - - // defaults already specified - d = processAdd(null, - doc(f("timestamp", now), - f("id", "454435"), - f("id_s", "550e8400-e29b-41d4-a716-446655440000"), - f("processor_default_s", "I HAVE A VALUE"), - f("processor_default_i", 121), - f("name", "Existing", "Values")) - , params("processor", "uuid", "uuid.fieldName", "id_s")); + d = + processAdd( + null, + doc( + f("timestamp", now), + f("id", "454435"), + f("id_s", "550e8400-e29b-41d4-a716-446655440000"), + f("processor_default_s", "I HAVE A VALUE"), + f("processor_default_i", 121), + f("name", "Existing", "Values")), + params("processor", "uuid", "uuid.fieldName", "id_s")); assertNotNull(d); - assertEquals("550e8400-e29b-41d4-a716-446655440000", - d.getFieldValue("id_s")); + assertEquals("550e8400-e29b-41d4-a716-446655440000", d.getFieldValue("id_s")); assertEquals(121, d.getFieldValue("processor_default_i")); } public void testProcessorPrefixReqParam() throws Exception { - List processors = UpdateRequestProcessorChain.getReqProcessors("uuid", h.getCore()); + List processors = + UpdateRequestProcessorChain.getReqProcessors("uuid", h.getCore()); UpdateRequestProcessorFactory processorFactory = processors.get(0); assertTrue(processorFactory instanceof UUIDUpdateProcessorFactory); @@ -136,9 +145,7 @@ public void testProcessorPrefixReqParam() throws Exception { assertNotNull(cmd.solrDoc.get("id").getValue()); } - /** - * Convenience method for building up SolrInputDocuments - */ + /** Convenience method for building up SolrInputDocuments */ SolrInputDocument doc(SolrInputField... fields) { SolrInputDocument d = new SolrInputDocument(); for (SolrInputField f : fields) { @@ -147,9 +154,7 @@ SolrInputDocument doc(SolrInputField... fields) { return d; } - /** - * Convenience method for building up SolrInputFields - */ + /** Convenience method for building up SolrInputFields */ SolrInputField field(String name, float boost, Object... values) { SolrInputField f = new SolrInputField(name); for (Object v : values) { @@ -158,41 +163,33 @@ SolrInputField field(String name, float boost, Object... values) { return f; } - /** - * Convenience method for building up SolrInputFields with default boost - */ + /** Convenience method for building up SolrInputFields with default boost */ SolrInputField f(String name, Object... values) { return field(name, 1.0F, values); } - /** - * Runs a document through the specified chain, and returns the final - * document used when the chain is completed (NOTE: some chains may - * modify the document in place + * Runs a document through the specified chain, and returns the final document used when the chain + * is completed (NOTE: some chains may modify the document in place */ - - SolrInputDocument processAdd(final String chain, - final SolrInputDocument docIn) throws IOException { + SolrInputDocument processAdd(final String chain, final SolrInputDocument docIn) + throws IOException { return processAdd(chain, docIn, params()); } - SolrInputDocument processAdd(final String chain, - final SolrInputDocument docIn, SolrParams params) + SolrInputDocument processAdd(final String chain, final SolrInputDocument docIn, SolrParams params) throws IOException { SolrCore core = h.getCore(); - UpdateRequestProcessorChain pc = chain == null ? - core.getUpdateProcessorChain(params) : - core.getUpdateProcessingChain(chain); + UpdateRequestProcessorChain pc = + chain == null ? core.getUpdateProcessorChain(params) : core.getUpdateProcessingChain(chain); assertNotNull("No Chain named: " + chain, pc); SolrQueryResponse rsp = new SolrQueryResponse(); - SolrQueryRequest req = new LocalSolrQueryRequest - (core, params); + SolrQueryRequest req = new LocalSolrQueryRequest(core, params); try { - SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req,rsp)); + SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp)); AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = docIn; diff --git a/solr/core/src/test/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactoryTest.java index 6156ec973b9..30fbcd5f300 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/UniqFieldsUpdateProcessorFactoryTest.java @@ -19,7 +19,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Map; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.MultiMapSolrParams; import org.apache.solr.common.params.SolrParams; @@ -34,9 +33,7 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ +/** */ public class UniqFieldsUpdateProcessorFactoryTest extends SolrTestCaseJ4 { @BeforeClass @@ -56,41 +53,55 @@ public void setUp() throws Exception { @Test public void testUniqFields() throws Exception { SolrCore core = h.getCore(); - UpdateRequestProcessorChain chained = core - .getUpdateProcessingChain("uniq-fields"); - UniqFieldsUpdateProcessorFactory factory = ((UniqFieldsUpdateProcessorFactory) chained.getProcessors().get(0)); + UpdateRequestProcessorChain chained = core.getUpdateProcessingChain("uniq-fields"); + UniqFieldsUpdateProcessorFactory factory = + ((UniqFieldsUpdateProcessorFactory) chained.getProcessors().get(0)); assertNotNull(chained); - addDoc(adoc("id", "1a", - "uniq", "value1", - "uniq", "value1", - "uniq", "value2")); - addDoc(adoc("id", "2a", - "uniq2", "value1", - "uniq2", "value2", - "uniq2", "value1", - "uniq2", "value3", - "uniq", "value1", - "uniq", "value1")); - addDoc(adoc("id", "1b", - "uniq3", "value1", - "uniq3", "value1")); - addDoc(adoc("id", "1c", - "nouniq", "value1", - "nouniq", "value1", - "nouniq", "value2")); - addDoc(adoc("id", "2c", - "nouniq", "value1", - "nouniq", "value1", - "nouniq", "value2", - "uniq2", "value1", - "uniq2", "value1")); + addDoc( + adoc( + "id", "1a", + "uniq", "value1", + "uniq", "value1", + "uniq", "value2")); + addDoc( + adoc( + "id", "2a", + "uniq2", "value1", + "uniq2", "value2", + "uniq2", "value1", + "uniq2", "value3", + "uniq", "value1", + "uniq", "value1")); + addDoc( + adoc( + "id", "1b", + "uniq3", "value1", + "uniq3", "value1")); + addDoc( + adoc( + "id", "1c", + "nouniq", "value1", + "nouniq", "value1", + "nouniq", "value2")); + addDoc( + adoc( + "id", "2c", + "nouniq", "value1", + "nouniq", "value1", + "nouniq", "value2", + "uniq2", "value1", + "uniq2", "value1")); assertU(commit()); - assertQ(req("id:1a"), "count(//*[@name='uniq']/*)=2", + assertQ( + req("id:1a"), + "count(//*[@name='uniq']/*)=2", "//arr[@name='uniq']/str[1][.='value1']", "//arr[@name='uniq']/str[2][.='value2']"); - assertQ(req("id:2a"), "count(//*[@name='uniq2']/*)=3", + assertQ( + req("id:2a"), + "count(//*[@name='uniq2']/*)=3", "//arr[@name='uniq2']/str[1][.='value1']", "//arr[@name='uniq2']/str[2][.='value2']", "//arr[@name='uniq2']/str[3][.='value3']"); @@ -99,16 +110,13 @@ public void testUniqFields() throws Exception { assertQ(req("id:1c"), "count(//*[@name='nouniq']/*)=3"); assertQ(req("id:2c"), "count(//*[@name='nouniq']/*)=3"); assertQ(req("id:2c"), "count(//*[@name='uniq2']/*)=1"); - } private void addDoc(String doc) throws Exception { Map params = new HashMap<>(); MultiMapSolrParams mmparams = new MultiMapSolrParams(params); - params.put(UpdateParams.UPDATE_CHAIN, new String[] { "uniq-fields" }); - SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), - (SolrParams) mmparams) { - }; + params.put(UpdateParams.UPDATE_CHAIN, new String[] {"uniq-fields"}); + SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), (SolrParams) mmparams) {}; UpdateRequestHandler handler = new UpdateRequestHandler(); handler.init(null); diff --git a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java index cbd69203b30..3f1f9d82e36 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java @@ -16,11 +16,12 @@ */ package org.apache.solr.update.processor; +import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; + import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Arrays; import java.util.List; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.core.SolrCore; @@ -29,11 +30,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM; - -/** - * - */ +/** */ public class UpdateRequestProcessorFactoryTest extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); @@ -44,125 +41,136 @@ public static void beforeClass() throws Exception { initCore("solrconfig-transformers.xml", "schema.xml"); } - public void testRequestTimeUrp(){ + public void testRequestTimeUrp() { SolrCore core = h.getCore(); - ModifiableSolrParams params = new ModifiableSolrParams() - .add("processor", "template") - .add("template.field", "id_t:{firstName}_{lastName}") - .add("template.field", "another_t:{lastName}_{firstName}") - .add("template.field", "missing_t:{lastName}_{unKnown}"); + ModifiableSolrParams params = + new ModifiableSolrParams() + .add("processor", "template") + .add("template.field", "id_t:{firstName}_{lastName}") + .add("template.field", "another_t:{lastName}_{firstName}") + .add("template.field", "missing_t:{lastName}_{unKnown}"); UpdateRequestProcessorChain chain = core.getUpdateProcessorChain(params); List l = chain.getProcessors(); assertTrue(l.get(0) instanceof TemplateUpdateProcessorFactory); - - } - - public void testConfiguration() throws Exception - { + + public void testConfiguration() throws Exception { SolrCore core = h.getCore(); // make sure it loaded the factories - UpdateRequestProcessorChain chained = core.getUpdateProcessingChain( "standard" ); - + UpdateRequestProcessorChain chained = core.getUpdateProcessingChain("standard"); + // Make sure it got 3 items (4 configured, 1 is enable=false) - assertEquals("wrong number of (enabled) factories in chain", - 3, chained.getProcessors().size() ); + assertEquals("wrong number of (enabled) factories in chain", 3, chained.getProcessors().size()); // first one should be log, and it should be configured properly UpdateRequestProcessorFactory first = chained.getProcessors().get(0); - assertEquals("wrong factory at front of chain", - LogUpdateProcessorFactory.class, first.getClass()); - LogUpdateProcessorFactory log = (LogUpdateProcessorFactory)first; - assertEquals("wrong config for LogUpdateProcessorFactory.maxNumToLog", - 100, log.maxNumToLog ); - assertEquals("wrong config for LogUpdateProcessorFactory.slowUpdateThresholdMillis", - 2000, log.slowUpdateThresholdMillis); - - - UpdateRequestProcessorChain custom = core.getUpdateProcessingChain( null ); - CustomUpdateRequestProcessorFactory link = (CustomUpdateRequestProcessorFactory) custom.getProcessors().get(0); - - assertEquals( custom, core.getUpdateProcessingChain( "" ) ); - assertEquals( custom, core.getUpdateProcessingChain( "custom" ) ); - + assertEquals( + "wrong factory at front of chain", LogUpdateProcessorFactory.class, first.getClass()); + LogUpdateProcessorFactory log = (LogUpdateProcessorFactory) first; + assertEquals("wrong config for LogUpdateProcessorFactory.maxNumToLog", 100, log.maxNumToLog); + assertEquals( + "wrong config for LogUpdateProcessorFactory.slowUpdateThresholdMillis", + 2000, + log.slowUpdateThresholdMillis); + + UpdateRequestProcessorChain custom = core.getUpdateProcessingChain(null); + CustomUpdateRequestProcessorFactory link = + (CustomUpdateRequestProcessorFactory) custom.getProcessors().get(0); + + assertEquals(custom, core.getUpdateProcessingChain("")); + assertEquals(custom, core.getUpdateProcessingChain("custom")); + // Make sure the NamedListArgs got through ok - assertEquals( "{name={n8=88, n9=99}}", link.args.toString() ); + assertEquals("{name={n8=88, n9=99}}", link.args.toString()); } public void testUpdateDistribChainSkipping() throws Exception { - // a key part of this test is verifying that LogUpdateProcessor is found in all chains because it - // is a @RunAlways processor -- but in order for that to work, we have to sanity check that the log - // level is at least "INFO" otherwise the factory won't even produce a processor and all our assertions - // are for nought. (see LogUpdateProcessorFactory.getInstance) + // a key part of this test is verifying that LogUpdateProcessor is found in all chains because + // it is a @RunAlways processor -- but in order for that to work, we have to sanity check that + // the log level is at least "INFO" otherwise the factory won't even produce a processor and all + // our assertions are for nought. (see LogUpdateProcessorFactory.getInstance) // - // TODO: maybe create a new mock Processor w/ @RunAlways annot if folks feel requiring INFO is evil. - assertTrue("Tests must be run with INFO level logging "+ - "otherwise LogUpdateProcessor isn't used and can't be tested.", log.isInfoEnabled()); - + // TODO: maybe create a new mock Processor w/ @RunAlways annot if folks feel requiring INFO is + // evil. + assertTrue( + "Tests must be run with INFO level logging " + + "otherwise LogUpdateProcessor isn't used and can't be tested.", + log.isInfoEnabled()); + final int EXPECTED_CHAIN_LENGTH = 5; SolrCore core = h.getCore(); - for (final String name : Arrays.asList("distrib-chain-explicit", - "distrib-chain-implicit", - "distrib-chain-noop")) { + for (final String name : + Arrays.asList("distrib-chain-explicit", "distrib-chain-implicit", "distrib-chain-noop")) { UpdateRequestProcessor proc; List procs; - + UpdateRequestProcessorChain chain = core.getUpdateProcessingChain(name); assertNotNull(name, chain); // either explicitly, or because of injection - assertEquals(name + " factory chain length: " + chain.toString(), EXPECTED_CHAIN_LENGTH, - chain.getProcessors().size()); + assertEquals( + name + " factory chain length: " + chain.toString(), + EXPECTED_CHAIN_LENGTH, + chain.getProcessors().size()); // test a basic (non distrib) chain proc = chain.createProcessor(req(), new SolrQueryResponse()); procs = procToList(proc); int expectedProcLen = EXPECTED_CHAIN_LENGTH; - if ("distrib-chain-noop".equals(name)) { // NoOpDistributingUpdateProcessorFactory produces no processor + if ("distrib-chain-noop" + .equals(name)) { // NoOpDistributingUpdateProcessorFactory produces no processor expectedProcLen--; } - if (procs.stream().anyMatch(p -> p.getClass().getSimpleName().equals("NestedUpdateProcessor"))) { + if (procs.stream() + .anyMatch(p -> p.getClass().getSimpleName().equals("NestedUpdateProcessor"))) { expectedProcLen++; // NestedUpdate sneaks in via RunUpdate's Factory. } assertEquals(name + " procs size: " + procs.toString(), expectedProcLen, procs.size()); - + // Custom comes first in all three of our chains - assertTrue(name + " first processor isn't a CustomUpdateRequestProcessor: " + procs.toString(), - ( // compare them both just because i'm going insane and the more checks the better - proc instanceof CustomUpdateRequestProcessor - && procs.get(0) instanceof CustomUpdateRequestProcessor)); + assertTrue( + name + " first processor isn't a CustomUpdateRequestProcessor: " + procs.toString(), + ( // compare them both just because i'm going insane and the more checks the better + proc instanceof CustomUpdateRequestProcessor + && procs.get(0) instanceof CustomUpdateRequestProcessor)); // Log should always come second in our chain. assertNotNull(name + " proc.next is null", proc.next); assertNotNull(name + " second proc is null", procs.get(1)); - assertTrue(name + " second proc isn't LogUpdateProcessor: " + procs.toString(), - ( // compare them both just because i'm going insane and the more checks the better - proc.next instanceof LogUpdateProcessorFactory.LogUpdateProcessor - && procs.get(1) instanceof LogUpdateProcessorFactory.LogUpdateProcessor)); + assertTrue( + name + " second proc isn't LogUpdateProcessor: " + procs.toString(), + ( // compare them both just because i'm going insane and the more checks the better + proc.next instanceof LogUpdateProcessorFactory.LogUpdateProcessor + && procs.get(1) instanceof LogUpdateProcessorFactory.LogUpdateProcessor)); // fetch the distributed version of this chain - proc = chain.createProcessor(req(DISTRIB_UPDATE_PARAM, "NONE"), // just some non-blank value - new SolrQueryResponse()); + proc = + chain.createProcessor( + req(DISTRIB_UPDATE_PARAM, "NONE"), // just some non-blank value + new SolrQueryResponse()); procs = procToList(proc); assertNotNull(name + " (distrib) chain produced null proc", proc); assertFalse(name + " (distrib) procs is empty", procs.isEmpty()); // for these 3 (distrib) chains, the first proc should always be LogUpdateProcessor - assertTrue(name + " (distrib) first proc should be LogUpdateProcessor because of @RunAlways: " - + procs.toString(), - ( // compare them both just because i'm going insane and the more checks the better - proc instanceof LogUpdateProcessorFactory.LogUpdateProcessor - && procs.get(0) instanceof LogUpdateProcessorFactory.LogUpdateProcessor)); + assertTrue( + name + + " (distrib) first proc should be LogUpdateProcessor because of @RunAlways: " + + procs.toString(), + ( // compare them both just because i'm going insane and the more checks the better + proc instanceof LogUpdateProcessorFactory.LogUpdateProcessor + && procs.get(0) instanceof LogUpdateProcessorFactory.LogUpdateProcessor)); // for these 3 (distrib) chains, the last proc should always be RunUpdateProcessor - assertTrue(name + " (distrib) last processor isn't a RunUpdateProcessor: " + procs.toString(), - procs.get(procs.size()-1) instanceof RunUpdateProcessorFactory.RunUpdateProcessor ); + assertTrue( + name + " (distrib) last processor isn't a RunUpdateProcessor: " + procs.toString(), + procs.get(procs.size() - 1) instanceof RunUpdateProcessorFactory.RunUpdateProcessor); // either 1 proc was droped in distrib mode, or 1 for the "implicit" chain @@ -173,18 +181,18 @@ public void testUpdateDistribChainSkipping() throws Exception { // -1 = distrib-chain-implicit: does RemoveBlank before distrib expectedProcLen--; } - if (procs.stream().anyMatch(p -> p.getClass().getSimpleName().equals("NestedUpdateProcessor"))) { + if (procs.stream() + .anyMatch(p -> p.getClass().getSimpleName().equals("NestedUpdateProcessor"))) { expectedProcLen++; // NestedUpdate sneaks in via RunUpdate's Factory. } - assertEquals(name + " (distrib) chain has wrong length: " + procs.toString(), - expectedProcLen, procs.size()); + assertEquals( + name + " (distrib) chain has wrong length: " + procs.toString(), + expectedProcLen, + procs.size()); } - } - /** - * walks the "next" values of the proc building up a List of the procs for easier testing - */ + /** walks the "next" values of the proc building up a List of the procs for easier testing */ public static List procToList(UpdateRequestProcessor proc) { List result = new ArrayList<>(7); while (null != proc) { @@ -194,4 +202,3 @@ public static List procToList(UpdateRequestProcessor pro return result; } } - diff --git a/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java b/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java index 1a5f62be155..cee43ad0ccd 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java @@ -24,5 +24,4 @@ public class XMLAtomicUpdateMultivalueTest extends AbstractAtomicUpdatesMultival RequestWriterSupplier getRequestWriterSupplier() { return RequestWriterSupplier.XML; } - } diff --git a/solr/core/src/test/org/apache/solr/util/AuthToolTest.java b/solr/core/src/test/org/apache/solr/util/AuthToolTest.java index 81a79c594e8..5bec00db10a 100644 --- a/solr/core/src/test/org/apache/solr/util/AuthToolTest.java +++ b/solr/core/src/test/org/apache/solr/util/AuthToolTest.java @@ -17,9 +17,11 @@ package org.apache.solr.util; +import static org.apache.solr.util.SolrCLI.findTool; +import static org.apache.solr.util.SolrCLI.parseCmdLine; + import java.nio.file.Files; import java.nio.file.Path; - import org.apache.commons.cli.CommandLine; import org.apache.solr.cloud.SolrCloudTestCase; import org.junit.After; @@ -27,19 +29,15 @@ import org.junit.BeforeClass; import org.junit.Test; -import static org.apache.solr.util.SolrCLI.findTool; -import static org.apache.solr.util.SolrCLI.parseCmdLine; - -/** - * Unit test for SolrCLI's AuthTool - */ +/** Unit test for SolrCLI's AuthTool */ public class AuthToolTest extends SolrCloudTestCase { private Path dir; @BeforeClass public static void setupCluster() throws Exception { configureCluster(1) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); } @@ -61,12 +59,20 @@ public void tearDown() throws Exception { @Test public void testEnableAuth() throws Exception { Path solrIncludeFile = Files.createFile(dir.resolve("solrIncludeFile.txt")); - String[] args = {"auth", "enable", - "-zkHost", cluster.getZkClient().getZkServerAddress(), - "-authConfDir", dir.toAbsolutePath().toString(), - "-solrIncludeFile", solrIncludeFile.toAbsolutePath().toString(), - "-credentials", "solr:solr", - "-blockUnknown", "true"}; + String[] args = { + "auth", + "enable", + "-zkHost", + cluster.getZkClient().getZkServerAddress(), + "-authConfDir", + dir.toAbsolutePath().toString(), + "-solrIncludeFile", + solrIncludeFile.toAbsolutePath().toString(), + "-credentials", + "solr:solr", + "-blockUnknown", + "true" + }; assertEquals(0, runTool(args)); } diff --git a/solr/core/src/test/org/apache/solr/util/BitSetPerf.java b/solr/core/src/test/org/apache/solr/util/BitSetPerf.java index 60a73863256..887524cd49b 100644 --- a/solr/core/src/test/org/apache/solr/util/BitSetPerf.java +++ b/solr/core/src/test/org/apache/solr/util/BitSetPerf.java @@ -18,23 +18,20 @@ import java.util.BitSet; import java.util.Random; - import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.FixedBitSet; -/** Performance tester for FixedBitSet. - * Use -Xbatch for more predictable results, and run tests such that the duration - * is at least 10 seconds for better accuracy. Close browsers on your system (javascript - * or flash may be running and cause more erratic results). - * - * +/** + * Performance tester for FixedBitSet. Use -Xbatch for more predictable results, and run tests such + * that the duration is at least 10 seconds for better accuracy. Close browsers on your system + * (javascript or flash may be running and cause more erratic results). */ public class BitSetPerf { static Random rand = new Random(0); static void randomSets(int maxSize, int bitsToSet, BitSet target1, FixedBitSet target2) { - for (int i=0; i "); System.out.println(" impl => open for FixedBitSet"); } @@ -57,12 +54,12 @@ public static void main(String[] args) { int numBitsSet = Integer.parseInt(args[2]); String test = args[3]; int iter = Integer.parseInt(args[4]); - String impl = args.length>5 ? args[5].intern() : "bit"; + String impl = args.length > 5 ? args[5].intern() : "bit"; BitSet[] sets = new BitSet[numSets]; FixedBitSet[] osets = new FixedBitSet[numSets]; - for (int i=0; i=0; next=set.nextSetBit(next+1)) { + for (int next = set.nextSetBit(0); next >= 0; next = set.nextSetBit(next + 1)) { ret += next; } } @@ -166,19 +163,18 @@ public static void main(String[] args) { } } - if ("iterator".equals(test)) { - for (int it=0; it=0; next=iterator.nextDoc()) { + for (int next = iterator.nextDoc(); next >= 0; next = iterator.nextDoc()) { ret += next; } } else { final BitSet set = sets[i]; - for(int next=set.nextSetBit(0); next>=0; next=set.nextSetBit(next+1)) { + for (int next = set.nextSetBit(0); next >= 0; next = set.nextSetBit(next + 1)) { ret += next; } } @@ -186,10 +182,7 @@ public static void main(String[] args) { } } - System.out.println("ret="+ret); - System.out.println("TIME="+timer.getTime()); - + System.out.println("ret=" + ret); + System.out.println("TIME=" + timer.getTime()); } - - } diff --git a/solr/core/src/test/org/apache/solr/util/CircularListTest.java b/solr/core/src/test/org/apache/solr/util/CircularListTest.java index 46361befd61..7708282058a 100644 --- a/solr/core/src/test/org/apache/solr/util/CircularListTest.java +++ b/solr/core/src/test/org/apache/solr/util/CircularListTest.java @@ -17,28 +17,25 @@ package org.apache.solr.util; import java.io.IOException; - import org.apache.solr.SolrTestCase; import org.apache.solr.logging.CircularList; import org.junit.Test; -/** - * Test circular list - */ -public class CircularListTest extends SolrTestCase { +/** Test circular list */ +public class CircularListTest extends SolrTestCase { @Test public void testCircularList() throws IOException { CircularList list = new CircularList<>(10); - for(int i=0;i<10; i++) { + for (int i = 0; i < 10; i++) { list.add(i); } assertEquals("within list", Integer.valueOf(0), list.get(0)); - for(int i=10;i<20; i++) { + for (int i = 10; i < 20; i++) { list.add(i); assertEquals("within list", Integer.valueOf(i - 9), list.get(0)); } - + // now try the resize list.resize(5); assertEquals(Integer.valueOf(15), list.get(0)); diff --git a/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java b/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java index bc9aba772d9..2c249b44657 100644 --- a/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java +++ b/solr/core/src/test/org/apache/solr/util/DateMathParserTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.util; +import static org.apache.solr.util.DateMathParser.UTC; + import java.text.ParseException; import java.time.Instant; import java.time.ZoneOffset; @@ -25,34 +27,29 @@ import java.util.Locale; import java.util.Map; import java.util.TimeZone; - import org.apache.solr.SolrTestCaseJ4; -import static org.apache.solr.util.DateMathParser.UTC; - -/** - * Tests that the functions in DateMathParser - */ +/** Tests that the functions in DateMathParser */ public class DateMathParserTest extends SolrTestCaseJ4 { /** - * A formatter for specifying every last nuance of a Date for easy - * reference in assertion statements + * A formatter for specifying every last nuance of a Date for easy reference in assertion + * statements */ private DateTimeFormatter fmt; - /** - * A parser for reading in explicit dates that are convenient to type - * in a test - */ + /** A parser for reading in explicit dates that are convenient to type in a test */ private DateTimeFormatter parser; - @SuppressWarnings("MisusedDayOfYear") // use a bunch of pattern symbols for more comprehensive testing + @SuppressWarnings( + "MisusedDayOfYear") // use a bunch of pattern symbols for more comprehensive testing public DateMathParserTest() { - fmt = DateTimeFormatter.ofPattern("G yyyyy MM ww W D dd F E a HH hh mm ss SSS z Z", Locale.ROOT) - .withZone(ZoneOffset.UTC); + fmt = + DateTimeFormatter.ofPattern("G yyyyy MM ww W D dd F E a HH hh mm ss SSS z Z", Locale.ROOT) + .withZone(ZoneOffset.UTC); - parser = DateTimeFormatter.ISO_LOCAL_DATE_TIME.withZone(ZoneOffset.UTC); // basically without the 'Z' + parser = + DateTimeFormatter.ISO_LOCAL_DATE_TIME.withZone(ZoneOffset.UTC); // basically without the 'Z' } /** MACRO: Round: parses s, rounds with u, fmts */ @@ -60,7 +57,7 @@ protected String r(String s, String u) throws Exception { Date dt = DateMathParser.parseMath(null, s + "Z/" + u); return fmt.format(dt.toInstant()); } - + /** MACRO: Add: parses s, adds v u, fmts */ protected String a(String s, int v, String u) throws Exception { char sign = v >= 0 ? '+' : '-'; @@ -75,31 +72,29 @@ protected String e(String s) throws Exception { protected void assertRound(String e, String i, String u) throws Exception { String ee = e(e); - String rr = r(i,u); + String rr = r(i, u); assertEquals(ee + " != " + rr + " round:" + i + ":" + u, ee, rr); } - protected void assertAdd(String e, String i, int v, String u) - throws Exception { - + protected void assertAdd(String e, String i, int v, String u) throws Exception { + String ee = e(e); - String aa = a(i,v,u); + String aa = a(i, v, u); assertEquals(ee + " != " + aa + " add:" + i + "+" + v + ":" + u, ee, aa); } - protected void assertMath(String e, DateMathParser p, String i) - throws Exception { - + protected void assertMath(String e, DateMathParser p, String i) throws Exception { + String ee = e(e); String aa = fmt.format(p.parseMath(i).toInstant()); - assertEquals(ee + " != " + aa + " math:" + - parser.format(p.getNow().toInstant()) + ":" + i, ee, aa); + assertEquals( + ee + " != " + aa + " math:" + parser.format(p.getNow().toInstant()) + ":" + i, ee, aa); } private void setNow(DateMathParser p, String text) { p.setNow(Date.from(parser.parse(text, Instant::from))); } - + public void testCalendarUnitsConsistency() throws Exception { String input = "1234-07-04T12:08:56.235"; for (String u : DateMathParser.CALENDAR_UNITS.keySet()) { @@ -115,34 +110,32 @@ public void testCalendarUnitsConsistency() throws Exception { } } } - + public void testRound() throws Exception { - + String input = "1234-07-04T12:08:56.235"; - + assertRound("1234-07-04T12:08:56.000", input, "SECOND"); assertRound("1234-07-04T12:08:00.000", input, "MINUTE"); assertRound("1234-07-04T12:00:00.000", input, "HOUR"); assertRound("1234-07-04T00:00:00.000", input, "DAY"); assertRound("1234-07-01T00:00:00.000", input, "MONTH"); assertRound("1234-01-01T00:00:00.000", input, "YEAR"); - } public void testAddZero() throws Exception { - + String input = "1234-07-04T12:08:56.235"; - + for (String u : DateMathParser.CALENDAR_UNITS.keySet()) { assertAdd(input, input, 0, u); } } - public void testAdd() throws Exception { - + String input = "1234-07-04T12:08:56.235"; - + assertAdd("1234-07-04T12:08:56.236", input, 1, "MILLISECOND"); assertAdd("1234-07-04T12:08:57.235", input, 1, "SECOND"); assertAdd("1234-07-04T12:09:56.235", input, 1, "MINUTE"); @@ -150,23 +143,22 @@ public void testAdd() throws Exception { assertAdd("1234-07-05T12:08:56.235", input, 1, "DAY"); assertAdd("1234-08-04T12:08:56.235", input, 1, "MONTH"); assertAdd("1235-07-04T12:08:56.235", input, 1, "YEAR"); - } - + public void testParseStatelessness() throws Exception { DateMathParser p = new DateMathParser(UTC); setNow(p, "1234-07-04T12:08:56.235"); String e = fmt.format(p.parseMath("").toInstant()); - + Date trash = p.parseMath("+7YEARS"); trash = p.parseMath("/MONTH"); trash = p.parseMath("-5DAYS+20MINUTES"); Thread.currentThread(); Thread.sleep(5); - - String a =fmt.format(p.parseMath("").toInstant()); + + String a = fmt.format(p.parseMath("").toInstant()); assertEquals("State of DateMathParser changed", e, a); } @@ -177,7 +169,7 @@ public void testParseMath() throws Exception { // No-Op assertMath("1234-07-04T12:08:56.235", p, ""); - + // simple round assertMath("1234-07-04T12:08:56.235", p, "/MILLIS"); // no change assertMath("1234-07-04T12:08:56.000", p, "/SECOND"); @@ -255,11 +247,13 @@ public void testParseMathTz() throws Exception { final String PLUS_TZS = "America/Los_Angeles"; final String NEG_TZS = "Europe/Paris"; - - assumeTrue("Test requires JVM to know about about TZ: " + PLUS_TZS, - TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(PLUS_TZS)); - assumeTrue("Test requires JVM to know about about TZ: " + NEG_TZS, - TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(NEG_TZS)); + + assumeTrue( + "Test requires JVM to know about about TZ: " + PLUS_TZS, + TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(PLUS_TZS)); + assumeTrue( + "Test requires JVM to know about about TZ: " + NEG_TZS, + TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(NEG_TZS)); // US, Positive Offset with DST @@ -299,15 +293,14 @@ public void testParseMathTz() throws Exception { assertMath("2000-12-31T23:00:00.000", p, "/YEAR"); // no DST in nov assertMath("2001-11-03T23:00:00.000", p, "+4MONTH/DAY"); + } - } - public void testParseMathExceptions() throws Exception { - + DateMathParser p = new DateMathParser(UTC); setNow(p, "1234-07-04T12:08:56.235"); - - Map badCommands = new HashMap<>(); + + Map badCommands = new HashMap<>(); badCommands.put("/", 1); badCommands.put("+", 1); badCommands.put("-", 1); @@ -321,17 +314,17 @@ public void testParseMathExceptions() throws Exception { for (String command : badCommands.keySet()) { ParseException e = expectThrows(ParseException.class, () -> p.parseMath(command)); - assertEquals("Wrong pos for: " + command + " => " + e.getMessage(), - badCommands.get(command).intValue(), e.getErrorOffset()); + assertEquals( + "Wrong pos for: " + command + " => " + e.getMessage(), + badCommands.get(command).intValue(), + e.getErrorOffset()); } - } /* PARSING / FORMATTING (without date math) Formerly in DateFieldTest. */ - public void testFormatter() { assertFormat("1995-12-31T23:59:59.999Z", 820454399999l); assertFormat("1995-12-31T23:59:59.990Z", 820454399990l); @@ -340,26 +333,26 @@ public void testFormatter() { // just after epoch assertFormat("1970-01-01T00:00:00.005Z", 5L); - assertFormat("1970-01-01T00:00:00Z", 0L); - assertFormat("1970-01-01T00:00:00.370Z", 370L); - assertFormat("1970-01-01T00:00:00.900Z", 900L); + assertFormat("1970-01-01T00:00:00Z", 0L); + assertFormat("1970-01-01T00:00:00.370Z", 370L); + assertFormat("1970-01-01T00:00:00.900Z", 900L); // well after epoch assertFormat("1999-12-31T23:59:59.005Z", 946684799005L); - assertFormat("1999-12-31T23:59:59Z", 946684799000L); - assertFormat("1999-12-31T23:59:59.370Z", 946684799370L); - assertFormat("1999-12-31T23:59:59.900Z", 946684799900L); + assertFormat("1999-12-31T23:59:59Z", 946684799000L); + assertFormat("1999-12-31T23:59:59.370Z", 946684799370L); + assertFormat("1999-12-31T23:59:59.900Z", 946684799900L); // waaaay after epoch ('+' is required for more than 4 digits in a year) assertFormat("+12345-12-31T23:59:59.005Z", 327434918399005L); - assertFormat("+12345-12-31T23:59:59Z", 327434918399000L); - assertFormat("+12345-12-31T23:59:59.370Z", 327434918399370L); - assertFormat("+12345-12-31T23:59:59.900Z", 327434918399900L); + assertFormat("+12345-12-31T23:59:59Z", 327434918399000L); + assertFormat("+12345-12-31T23:59:59.370Z", 327434918399370L); + assertFormat("+12345-12-31T23:59:59.900Z", 327434918399900L); // well before epoch - assertFormat("0299-12-31T23:59:59Z", -52700112001000L); + assertFormat("0299-12-31T23:59:59Z", -52700112001000L); assertFormat("0299-12-31T23:59:59.123Z", -52700112000877L); - assertFormat("0299-12-31T23:59:59.090Z", -52700112000910L); + assertFormat("0299-12-31T23:59:59.090Z", -52700112000910L); // BC (negative years) assertFormat("-12021-12-01T02:02:02Z", Instant.parse("-12021-12-01T02:02:02Z").toEpochMilli()); @@ -367,32 +360,31 @@ public void testFormatter() { private void assertFormat(final String expected, final long millis) { assertEquals(expected, Instant.ofEpochMilli(millis).toString()); // assert same as ISO_INSTANT - assertEquals(millis, DateMathParser.parseMath(null, expected).getTime()); // assert DMP has same result + assertEquals( + millis, DateMathParser.parseMath(null, expected).getTime()); // assert DMP has same result } - /** - * Using dates in the canonical format, verify that parsing+formatting - * is an identify function - */ + /** Using dates in the canonical format, verify that parsing+formatting is an identify function */ public void testRoundTrip() throws Exception { // NOTE: the 2nd arg is what the round trip result looks like (may be null if same as input) - assertParseFormatEquals("1995-12-31T23:59:59.999666Z", "1995-12-31T23:59:59.999Z"); // beyond millis is truncated - assertParseFormatEquals("1995-12-31T23:59:59.999Z", "1995-12-31T23:59:59.999Z"); - assertParseFormatEquals("1995-12-31T23:59:59.99Z", "1995-12-31T23:59:59.990Z"); - assertParseFormatEquals("1995-12-31T23:59:59.9Z", "1995-12-31T23:59:59.900Z"); - assertParseFormatEquals("1995-12-31T23:59:59Z", "1995-12-31T23:59:59Z"); + assertParseFormatEquals( + "1995-12-31T23:59:59.999666Z", "1995-12-31T23:59:59.999Z"); // beyond millis is truncated + assertParseFormatEquals("1995-12-31T23:59:59.999Z", "1995-12-31T23:59:59.999Z"); + assertParseFormatEquals("1995-12-31T23:59:59.99Z", "1995-12-31T23:59:59.990Z"); + assertParseFormatEquals("1995-12-31T23:59:59.9Z", "1995-12-31T23:59:59.900Z"); + assertParseFormatEquals("1995-12-31T23:59:59Z", "1995-12-31T23:59:59Z"); // here the input isn't in the canonical form, but we should be forgiving assertParseFormatEquals("1995-12-31T23:59:59.990Z", "1995-12-31T23:59:59.990Z"); assertParseFormatEquals("1995-12-31T23:59:59.900Z", "1995-12-31T23:59:59.900Z"); - assertParseFormatEquals("1995-12-31T23:59:59.90Z", "1995-12-31T23:59:59.900Z"); + assertParseFormatEquals("1995-12-31T23:59:59.90Z", "1995-12-31T23:59:59.900Z"); assertParseFormatEquals("1995-12-31T23:59:59.000Z", "1995-12-31T23:59:59Z"); - assertParseFormatEquals("1995-12-31T23:59:59.00Z", "1995-12-31T23:59:59Z"); - assertParseFormatEquals("1995-12-31T23:59:59.0Z", "1995-12-31T23:59:59Z"); + assertParseFormatEquals("1995-12-31T23:59:59.00Z", "1995-12-31T23:59:59Z"); + assertParseFormatEquals("1995-12-31T23:59:59.0Z", "1995-12-31T23:59:59Z"); // kind of kludgy, but we have other tests for the actual date math - //assertParseFormatEquals("NOW/DAY", p.parseMath("/DAY").toInstant().toString()); + // assertParseFormatEquals("NOW/DAY", p.parseMath("/DAY").toInstant().toString()); // as of Solr 1.3 assertParseFormatEquals("1995-12-31T23:59:59Z/DAY", "1995-12-31T00:00:00Z"); @@ -401,11 +393,12 @@ public void testRoundTrip() throws Exception { // typical dates, various precision (0,1,2,3 digits of millis) assertParseFormatEquals("1995-12-31T23:59:59.987Z", null); - assertParseFormatEquals("1995-12-31T23:59:59.98Z", "1995-12-31T23:59:59.980Z");//add 0 ms - assertParseFormatEquals("1995-12-31T23:59:59.9Z", "1995-12-31T23:59:59.900Z");//add 00 ms + assertParseFormatEquals("1995-12-31T23:59:59.98Z", "1995-12-31T23:59:59.980Z"); // add 0 ms + assertParseFormatEquals("1995-12-31T23:59:59.9Z", "1995-12-31T23:59:59.900Z"); // add 00 ms assertParseFormatEquals("1995-12-31T23:59:59Z", null); assertParseFormatEquals("1976-03-06T03:06:00Z", null); - assertParseFormatEquals("1995-12-31T23:59:59.987654Z", "1995-12-31T23:59:59.987Z");//truncate nanoseconds off + assertParseFormatEquals( + "1995-12-31T23:59:59.987654Z", "1995-12-31T23:59:59.987Z"); // truncate nanoseconds off // dates with atypical years assertParseFormatEquals("0001-01-01T01:01:01Z", null); @@ -421,7 +414,8 @@ public void testRoundTrip() throws Exception { public void testParseLenient() throws Exception { // dates that only parse thanks to lenient mode of DateTimeFormatter - assertParseFormatEquals("10995-12-31T23:59:59.990Z", "+10995-12-31T23:59:59.990Z"); // missing '+' 5 digit year + assertParseFormatEquals( + "10995-12-31T23:59:59.990Z", "+10995-12-31T23:59:59.990Z"); // missing '+' 5 digit year assertParseFormatEquals("995-1-2T3:4:5Z", "0995-01-02T03:04:05Z"); // wasn't 0 padded } @@ -434,4 +428,3 @@ private void assertParseFormatEquals(String inputStr, String expectedStr) { assertEquals("d:" + inputDate.getTime(), expectedStr, resultStr); } } - diff --git a/solr/core/src/test/org/apache/solr/util/DynamicMapsTest.java b/solr/core/src/test/org/apache/solr/util/DynamicMapsTest.java index cc20b993a0c..303ea14f5f3 100644 --- a/solr/core/src/test/org/apache/solr/util/DynamicMapsTest.java +++ b/solr/core/src/test/org/apache/solr/util/DynamicMapsTest.java @@ -20,7 +20,6 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; - import org.apache.solr.SolrTestCase; public class DynamicMapsTest extends SolrTestCase { @@ -35,7 +34,7 @@ public void testIntFloatMap() { map.put(key, val); } - for (Map.Entry entry: standard.entrySet()) { + for (Map.Entry entry : standard.entrySet()) { assertEquals(entry.getValue(), map.get(entry.getKey()), 0.0001); } AtomicInteger size = new AtomicInteger(0); @@ -56,8 +55,8 @@ public void testIntLongMap() { map.put(key, val); } - for (Map.Entry entry: standard.entrySet()) { - assertEquals((long)entry.getValue(), map.get(entry.getKey())); + for (Map.Entry entry : standard.entrySet()) { + assertEquals((long) entry.getValue(), map.get(entry.getKey())); } AtomicInteger size = new AtomicInteger(0); map.forEachValue(i -> size.incrementAndGet()); @@ -77,8 +76,8 @@ public void testIntIntMap() { map.put(key, val); } - for (Map.Entry entry: standard.entrySet()) { - assertEquals((int)entry.getValue(), map.get(entry.getKey())); + for (Map.Entry entry : standard.entrySet()) { + assertEquals((int) entry.getValue(), map.get(entry.getKey())); } AtomicInteger size = new AtomicInteger(0); map.forEachValue(i -> size.incrementAndGet()); diff --git a/solr/core/src/test/org/apache/solr/util/FileUtilsTest.java b/solr/core/src/test/org/apache/solr/util/FileUtilsTest.java index 95b37011a6d..118a2f501fe 100644 --- a/solr/core/src/test/org/apache/solr/util/FileUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/FileUtilsTest.java @@ -18,7 +18,6 @@ import java.io.File; import java.io.IOException; - import org.apache.solr.SolrTestCase; public class FileUtilsTest extends SolrTestCase { @@ -26,7 +25,8 @@ public class FileUtilsTest extends SolrTestCase { public void testResolve() throws IOException { String cwd = new File(".").getAbsolutePath(); assertEquals(new File("conf/data"), FileUtils.resolvePath(new File("conf"), "data")); - assertEquals(new File(cwd+"/conf/data"), FileUtils.resolvePath(new File(cwd+"/conf"), "data")); - assertEquals(new File(cwd+"/data"), FileUtils.resolvePath(new File("conf"), cwd+"/data")); + assertEquals( + new File(cwd + "/conf/data"), FileUtils.resolvePath(new File(cwd + "/conf"), "data")); + assertEquals(new File(cwd + "/data"), FileUtils.resolvePath(new File("conf"), cwd + "/data")); } } diff --git a/solr/core/src/test/org/apache/solr/util/LongSetTest.java b/solr/core/src/test/org/apache/solr/util/LongSetTest.java index 44414c4d2ed..4d3183ccdea 100644 --- a/solr/core/src/test/org/apache/solr/util/LongSetTest.java +++ b/solr/core/src/test/org/apache/solr/util/LongSetTest.java @@ -17,7 +17,6 @@ package org.apache.solr.util; import java.util.HashSet; - import org.apache.solr.SolrTestCase; import org.junit.Test; @@ -90,5 +89,4 @@ public void testIterating() { assertEquals(0L, it.next()); assertFalse(it.hasNext()); } - } diff --git a/solr/core/src/test/org/apache/solr/util/ModuleUtilsTest.java b/solr/core/src/test/org/apache/solr/util/ModuleUtilsTest.java index d2c2db2f436..45120027c57 100644 --- a/solr/core/src/test/org/apache/solr/util/ModuleUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/ModuleUtilsTest.java @@ -16,13 +16,12 @@ */ package org.apache.solr.util; -import junit.framework.TestCase; - import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; import java.util.Set; +import junit.framework.TestCase; public class ModuleUtilsTest extends TestCase { private Path mockRootDir; @@ -46,8 +45,9 @@ public void testIsValidName() { } public void testGetModuleLibPath() { - assertEquals(mockRootDir.resolve("modules") - .resolve("mod1").resolve("lib"), ModuleUtils.getModuleLibPath(mockRootDir, "mod1")); + assertEquals( + mockRootDir.resolve("modules").resolve("mod1").resolve("lib"), + ModuleUtils.getModuleLibPath(mockRootDir, "mod1")); } public void testResolveFromSyspropOrEnv() { @@ -62,10 +62,14 @@ public void testListAvailableModules() { } public void testResolveModules() { - assertEquals(Set.of("foo", "bar", "baz", "mod1"), ModuleUtils.resolveModulesFromStringOrSyspropOrEnv("foo ,bar, baz,mod1")); + assertEquals( + Set.of("foo", "bar", "baz", "mod1"), + ModuleUtils.resolveModulesFromStringOrSyspropOrEnv("foo ,bar, baz,mod1")); assertEquals(Collections.emptySet(), ModuleUtils.resolveModulesFromStringOrSyspropOrEnv("")); System.setProperty("solr.modules", "foo ,bar, baz,mod1"); - assertEquals(Set.of("foo", "bar", "baz", "mod1"), ModuleUtils.resolveModulesFromStringOrSyspropOrEnv(null)); + assertEquals( + Set.of("foo", "bar", "baz", "mod1"), + ModuleUtils.resolveModulesFromStringOrSyspropOrEnv(null)); System.clearProperty("solr.modules"); } @@ -82,4 +86,4 @@ private Path setupMockInstallDir(Set modules) throws IOException { } return root; } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java b/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java index f62301cdd2d..24bb7c3f3a2 100644 --- a/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java +++ b/solr/core/src/test/org/apache/solr/util/OrderedExecutorTest.java @@ -18,23 +18,19 @@ package org.apache.solr.util; import java.lang.invoke.MethodHandles; - import java.util.HashMap; import java.util.Map; - import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeUnit; - +import java.util.concurrent.TimeoutException; import org.apache.solr.SolrTestCase; import org.apache.solr.common.util.ExecutorUtil; import org.junit.Test; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,7 +39,8 @@ public class OrderedExecutorTest extends SolrTestCase { @Test public void testExecutionInOrder() { - OrderedExecutor orderedExecutor = new OrderedExecutor(10, ExecutorUtil.newMDCAwareCachedThreadPool("executeInOrderTest")); + OrderedExecutor orderedExecutor = + new OrderedExecutor(10, ExecutorUtil.newMDCAwareCachedThreadPool("executeInOrderTest")); IntBox intBox = new IntBox(); for (int i = 0; i < 100; i++) { orderedExecutor.execute(1, () -> intBox.value++); @@ -54,37 +51,44 @@ public void testExecutionInOrder() { @Test public void testLockWhenQueueIsFull() { - final ExecutorService controlExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("testLockWhenQueueIsFull_control"); - final OrderedExecutor orderedExecutor = new OrderedExecutor - (10, ExecutorUtil.newMDCAwareCachedThreadPool("testLockWhenQueueIsFull_test")); - + final ExecutorService controlExecutor = + ExecutorUtil.newMDCAwareCachedThreadPool("testLockWhenQueueIsFull_control"); + final OrderedExecutor orderedExecutor = + new OrderedExecutor( + 10, ExecutorUtil.newMDCAwareCachedThreadPool("testLockWhenQueueIsFull_test")); + try { // AAA and BBB events will both depend on the use of the same lockId final BlockingQueue events = new ArrayBlockingQueue<>(2); final Integer lockId = 1; - + // AAA enters executor first so it should execute first (even though it's waiting on latch) final CountDownLatch latchAAA = new CountDownLatch(1); - orderedExecutor.execute(lockId, () -> { - try { - if (latchAAA.await(120, TimeUnit.SECONDS)) { - events.add("AAA"); - } else { - events.add("AAA Timed Out"); + orderedExecutor.execute( + lockId, + () -> { + try { + if (latchAAA.await(120, TimeUnit.SECONDS)) { + events.add("AAA"); + } else { + events.add("AAA Timed Out"); + } + } catch (InterruptedException e) { + log.error("Interrupt in AAA worker", e); + Thread.currentThread().interrupt(); } - } catch (InterruptedException e) { - log.error("Interrupt in AAA worker", e); - Thread.currentThread().interrupt(); - } - }); + }); // BBB doesn't care about the latch, but because it uses the same lockId, it's blocked on AAA // so we execute it in a background thread... - controlExecutor.execute(() -> { - orderedExecutor.execute(lockId, () -> { - events.add("BBB"); - }); - }); - + controlExecutor.execute( + () -> { + orderedExecutor.execute( + lockId, + () -> { + events.add("BBB"); + }); + }); + // now if we release the latchAAA, AAA should be garunteed to fire first, then BBB latchAAA.countDown(); try { @@ -104,10 +108,12 @@ public void testLockWhenQueueIsFull() { @Test public void testRunInParallel() { final int parallelism = atLeast(3); - - final ExecutorService controlExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("testRunInParallel_control"); - final OrderedExecutor orderedExecutor = new OrderedExecutor - (parallelism, ExecutorUtil.newMDCAwareCachedThreadPool("testRunInParallel_test")); + + final ExecutorService controlExecutor = + ExecutorUtil.newMDCAwareCachedThreadPool("testRunInParallel_control"); + final OrderedExecutor orderedExecutor = + new OrderedExecutor( + parallelism, ExecutorUtil.newMDCAwareCachedThreadPool("testRunInParallel_test")); try { // distinct lockIds should be able to be used in parallel, up to the size of the executor, @@ -116,38 +122,43 @@ public void testRunInParallel() { final CyclicBarrier barrier = new CyclicBarrier(parallelism + 1); final CountDownLatch preBarrierLatch = new CountDownLatch(parallelism); final CountDownLatch postBarrierLatch = new CountDownLatch(parallelism); - + for (int i = 0; i < parallelism; i++) { final int lockId = i; - controlExecutor.execute(() -> { - orderedExecutor.execute(lockId, () -> { - try { - log.info("Worker #{} starting", lockId); - preBarrierLatch.countDown(); - barrier.await(120, TimeUnit.SECONDS); - postBarrierLatch.countDown(); - } catch (TimeoutException t) { - log.error("Timeout in worker# {} awaiting barrier", lockId, t); - } catch (BrokenBarrierException b) { - log.error("Broken Barrier in worker#{}", lockId, b); - } catch (InterruptedException e) { - log.error("Interrupt in worker#{} awaiting barrier", lockId, e); - Thread.currentThread().interrupt(); - } - }); - }); + controlExecutor.execute( + () -> { + orderedExecutor.execute( + lockId, + () -> { + try { + log.info("Worker #{} starting", lockId); + preBarrierLatch.countDown(); + barrier.await(120, TimeUnit.SECONDS); + postBarrierLatch.countDown(); + } catch (TimeoutException t) { + log.error("Timeout in worker# {} awaiting barrier", lockId, t); + } catch (BrokenBarrierException b) { + log.error("Broken Barrier in worker#{}", lockId, b); + } catch (InterruptedException e) { + log.error("Interrupt in worker#{} awaiting barrier", lockId, e); + Thread.currentThread().interrupt(); + } + }); + }); } if (log.isInfoEnabled()) { - log.info("main thread: about to wait on pre-barrier latch, barrier={}, post-barrier latch={}", - barrier.getNumberWaiting(), postBarrierLatch.getCount()); + log.info( + "main thread: about to wait on pre-barrier latch, barrier={}, post-barrier latch={}", + barrier.getNumberWaiting(), + postBarrierLatch.getCount()); } - + try { // this latch should have fully counted down by now // (or with a small await for thread scheduling but no other external action) - assertTrue("Timeout awaiting pre barrier latch", - preBarrierLatch.await(120, TimeUnit.SECONDS)); + assertTrue( + "Timeout awaiting pre barrier latch", preBarrierLatch.await(120, TimeUnit.SECONDS)); } catch (InterruptedException e) { log.error("Interrupt awwaiting pre barrier latch", e); Thread.currentThread().interrupt(); @@ -155,10 +166,12 @@ public void testRunInParallel() { } if (log.isInfoEnabled()) { - log.info("main thread: pre-barrier latch done, barrier={}, post-barrier latch={}", - barrier.getNumberWaiting(), postBarrierLatch.getCount()); + log.info( + "main thread: pre-barrier latch done, barrier={}, post-barrier latch={}", + barrier.getNumberWaiting(), + postBarrierLatch.getCount()); } - + // nothing should have counted down yet on the postBarrierLatch assertEquals(parallelism, postBarrierLatch.getCount()); @@ -168,14 +181,15 @@ public void testRunInParallel() { barrier.await(120, TimeUnit.SECONDS); if (log.isInfoEnabled()) { - log.info("main thread: barrier has released, post-barrier latch={}", + log.info( + "main thread: barrier has released, post-barrier latch={}", postBarrierLatch.getCount()); } - + // and now the post-barrier latch should release immediately // (or with a small await for thread scheduling but no other external action) - assertTrue("Timeout awaiting post barrier latch", - postBarrierLatch.await(120, TimeUnit.SECONDS)); + assertTrue( + "Timeout awaiting post barrier latch", postBarrierLatch.await(120, TimeUnit.SECONDS)); } catch (TimeoutException t) { log.error("Timeout awaiting barrier", t); fail("barrier timed out"); @@ -202,7 +216,8 @@ public void testStress() { base.put(i, i); run.put(i, i); } - OrderedExecutor orderedExecutor = new OrderedExecutor(10, ExecutorUtil.newMDCAwareCachedThreadPool("testStress")); + OrderedExecutor orderedExecutor = + new OrderedExecutor(10, ExecutorUtil.newMDCAwareCachedThreadPool("testStress")); for (int i = 0; i < 1000; i++) { int key = random().nextInt(N); base.put(key, base.get(key) + 1); diff --git a/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java b/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java index 87e24f9d9a1..5b0d7cfa93e 100644 --- a/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/PrimUtilsTest.java @@ -16,9 +16,8 @@ */ package org.apache.solr.util; -import org.apache.solr.SolrTestCase; - import java.util.Arrays; +import org.apache.solr.SolrTestCase; public class PrimUtilsTest extends SolrTestCase { @@ -28,23 +27,24 @@ public void testSort() { int[] a = new int[maxSize]; int[] b = new int[maxSize]; - PrimUtils.IntComparator comparator = new PrimUtils.IntComparator() { - @Override - public int compare(int a, int b) { - return b - a; // sort in reverse - } - }; + PrimUtils.IntComparator comparator = + new PrimUtils.IntComparator() { + @Override + public int compare(int a, int b) { + return b - a; // sort in reverse + } + }; - for (int iter=0; iter<100; iter++) { - int start = random().nextInt(maxSize+1); - int end = start==maxSize ? maxSize : start + random().nextInt(maxSize-start); - for (int i=start; iA MockPageFetcher is used to prevent real HTTP requests from being executed. + */ public class SimplePostToolTest extends SolrTestCaseJ4 { SimplePostTool t_file, t_file_auto, t_file_rec, t_web, t_test; PageFetcher pf; - + @Before public void initVariousPostTools() throws Exception { String[] args = {"-"}; - + // Add a dummy core/collection property so that the SimplePostTool - // doesn't fail fast. + // doesn't fail fast. System.setProperty("c", "testcollection"); - + System.setProperty("data", "files"); t_file = SimplePostTool.parseArgsAndInit(args); @@ -63,7 +62,7 @@ public void initVariousPostTools() throws Exception { System.setProperty("recursive", "yes"); t_file_rec = SimplePostTool.parseArgsAndInit(args); - + System.setProperty("data", "web"); t_web = SimplePostTool.parseArgsAndInit(args); @@ -72,12 +71,12 @@ public void initVariousPostTools() throws Exception { t_test = SimplePostTool.parseArgsAndInit(args); pf = new MockPageFetcher(); - for (SimplePostTool mockable : new SimplePostTool[]{t_web, t_file_auto}) { + for (SimplePostTool mockable : new SimplePostTool[] {t_web, t_file_auto}) { mockable.pageFetcher = pf; mockable.mockMode = true; } } - + @Test public void testParseArgsAndInit() { assertEquals(false, t_file.auto); @@ -90,29 +89,42 @@ public void testParseArgsAndInit() { assertEquals(1, t_web.recursive); assertEquals(10, t_web.delay); - - assertEquals("http://user:password@localhost:5150/solr/update?param1=foo¶m2=bar",t_test.solrUrl.toExternalForm()); + + assertEquals( + "http://user:password@localhost:5150/solr/update?param1=foo¶m2=bar", + t_test.solrUrl.toExternalForm()); } - + @Test public void testNormalizeUrlEnding() { assertEquals("http://[ff01::114]", SimplePostTool.normalizeUrlEnding("http://[ff01::114]/")); - assertEquals("http://[ff01::114]", SimplePostTool.normalizeUrlEnding("http://[ff01::114]/#foo?bar=baz")); - assertEquals("http://[ff01::114]/index.html", SimplePostTool.normalizeUrlEnding("http://[ff01::114]/index.html#hello")); + assertEquals( + "http://[ff01::114]", SimplePostTool.normalizeUrlEnding("http://[ff01::114]/#foo?bar=baz")); + assertEquals( + "http://[ff01::114]/index.html", + SimplePostTool.normalizeUrlEnding("http://[ff01::114]/index.html#hello")); } - + @Test public void testComputeFullUrl() throws MalformedURLException { - assertEquals("http://[ff01::114]/index.html", t_web.computeFullUrl(new URL("http://[ff01::114]/"), "/index.html")); - assertEquals("http://[ff01::114]/index.html", t_web.computeFullUrl(new URL("http://[ff01::114]/foo/bar/"), "/index.html")); - assertEquals("http://[ff01::114]/fil.html", t_web.computeFullUrl(new URL("http://[ff01::114]/foo.htm?baz#hello"), "fil.html")); -// TODO: How to know what is the base if URL path ends with "foo"?? -// assertEquals("http://[ff01::114]/fil.html", t_web.computeFullUrl(new URL("http://[ff01::114]/foo?baz#hello"), "fil.html")); + assertEquals( + "http://[ff01::114]/index.html", + t_web.computeFullUrl(new URL("http://[ff01::114]/"), "/index.html")); + assertEquals( + "http://[ff01::114]/index.html", + t_web.computeFullUrl(new URL("http://[ff01::114]/foo/bar/"), "/index.html")); + assertEquals( + "http://[ff01::114]/fil.html", + t_web.computeFullUrl(new URL("http://[ff01::114]/foo.htm?baz#hello"), "fil.html")); + // TODO: How to know what is the base if URL path ends with "foo"?? + // assertEquals("http://[ff01::114]/fil.html", t_web.computeFullUrl(new + // URL("http://[ff01::114]/foo?baz#hello"), "fil.html")); assertEquals(null, t_web.computeFullUrl(new URL("http://[ff01::114]/"), "fil.jpg")); - assertEquals(null, t_web.computeFullUrl(new URL("http://[ff01::114]/"), "mailto:hello@foo.bar")); + assertEquals( + null, t_web.computeFullUrl(new URL("http://[ff01::114]/"), "mailto:hello@foo.bar")); assertEquals(null, t_web.computeFullUrl(new URL("http://[ff01::114]/"), "ftp://server/file")); } - + @Test public void testTypeSupported() { assertTrue(t_web.typeSupported("application/pdf")); @@ -124,25 +136,30 @@ public void testTypeSupported() { assertFalse(t_web.typeSupported("application/pdf")); assertTrue(t_web.typeSupported("application/msword")); } - + @Test public void testIsOn() { assertTrue(SimplePostTool.isOn("true")); assertTrue(SimplePostTool.isOn("1")); assertFalse(SimplePostTool.isOn("off")); } - + @Test public void testAppendParam() { - assertEquals("http://[ff01::114]?foo=bar", SimplePostTool.appendParam("http://[ff01::114]", "foo=bar")); - assertEquals("http://[ff01::114]/?a=b&foo=bar", SimplePostTool.appendParam("http://[ff01::114]/?a=b", "foo=bar")); + assertEquals( + "http://[ff01::114]?foo=bar", SimplePostTool.appendParam("http://[ff01::114]", "foo=bar")); + assertEquals( + "http://[ff01::114]/?a=b&foo=bar", + SimplePostTool.appendParam("http://[ff01::114]/?a=b", "foo=bar")); } - + @Test public void testAppendUrlPath() throws MalformedURLException { - assertEquals(new URL("http://[ff01::114]/a?foo=bar"), SimplePostTool.appendUrlPath(new URL("http://[ff01::114]?foo=bar"), "/a")); + assertEquals( + new URL("http://[ff01::114]/a?foo=bar"), + SimplePostTool.appendUrlPath(new URL("http://[ff01::114]?foo=bar"), "/a")); } - + @Test public void testGuessType() { File f = new File("foo.doc"); @@ -168,38 +185,54 @@ public void testDoWebMode() { t_web.recursive = 5; int num = t_web.postWebPages(new String[] {"http://[ff01::114]/#removeme"}, 0, null); assertEquals(5, num); - + t_web.recursive = 1; num = t_web.postWebPages(new String[] {"http://[ff01::114]/"}, 0, null); assertEquals(3, num); - + // Without respecting robots.txt t_web.pageFetcher.robotsCache.put("[ff01::114]", Collections.emptyList()); t_web.recursive = 5; num = t_web.postWebPages(new String[] {"http://[ff01::114]/#removeme"}, 0, null); assertEquals(6, num); -} - + } + @Test public void testRobotsExclusion() throws MalformedURLException { assertFalse(t_web.pageFetcher.isDisallowedByRobots(new URL("http://[ff01::114]/"))); assertTrue(t_web.pageFetcher.isDisallowedByRobots(new URL("http://[ff01::114]/disallowed"))); - assertTrue("There should be two entries parsed from robots.txt", t_web.pageFetcher.robotsCache.get("[ff01::114]").size() == 2); + assertTrue( + "There should be two entries parsed from robots.txt", + t_web.pageFetcher.robotsCache.get("[ff01::114]").size() == 2); } static class MockPageFetcher extends PageFetcher { - HashMap htmlMap = new HashMap<>(); - HashMap> linkMap = new HashMap<>(); - + HashMap htmlMap = new HashMap<>(); + HashMap> linkMap = new HashMap<>(); + public MockPageFetcher() throws IOException, URISyntaxException { (new SimplePostTool()).super(); - htmlMap.put("http://[ff01::114]", "page1page2"); - htmlMap.put("http://[ff01::114]/index.html", "page1page2"); - htmlMap.put("http://[ff01::114]/page1", ""); - htmlMap.put("http://[ff01::114]/page1/foo", ""); - htmlMap.put("http://[ff01::114]/page1/foo/bar", ""); - htmlMap.put("http://[ff01::114]/page2", ""); - htmlMap.put("http://[ff01::114]/disallowed", ""); + htmlMap.put( + "http://[ff01::114]", + "page1page2"); + htmlMap.put( + "http://[ff01::114]/index.html", + "page1page2"); + htmlMap.put( + "http://[ff01::114]/page1", + ""); + htmlMap.put( + "http://[ff01::114]/page1/foo", + ""); + htmlMap.put( + "http://[ff01::114]/page1/foo/bar", + ""); + htmlMap.put( + "http://[ff01::114]/page2", + ""); + htmlMap.put( + "http://[ff01::114]/disallowed", + ""); Set s = new HashSet<>(); s.add(new URI("http://[ff01::114]/page1")); @@ -215,18 +248,21 @@ public MockPageFetcher() throws IOException, URISyntaxException { s = new HashSet<>(); s.add(new URI("http://[ff01::114]/disallowed")); linkMap.put("http://[ff01::114]/page2", s); - + // Simulate a robots.txt file with comments and a few disallows StringBuilder sb = new StringBuilder(); - sb.append("# Comments appear after the \"#\" symbol at the start of a line, or after a directive\n"); + sb.append( + "# Comments appear after the \"#\" symbol at the start of a line, or after a directive\n"); sb.append("User-agent: * # match all bots\n"); sb.append("Disallow: # This is void\n"); sb.append("Disallow: /disallow # Disallow this path\n"); sb.append("Disallow: /nonexistingpath # Disallow this path\n"); - this.robotsCache.put("[ff01::114]", super. - parseRobotsTxt(new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8)))); + this.robotsCache.put( + "[ff01::114]", + super.parseRobotsTxt( + new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8)))); } - + @Override public PageFetcherResult readPageFromUrl(URL u) { PageFetcherResult res = new PageFetcherResult(); @@ -236,16 +272,15 @@ public PageFetcherResult readPageFromUrl(URL u) { } res.httpStatus = 200; res.contentType = "text/html"; - res.content = ByteBuffer.wrap( htmlMap.get(u.toString()).getBytes(StandardCharsets.UTF_8)); + res.content = ByteBuffer.wrap(htmlMap.get(u.toString()).getBytes(StandardCharsets.UTF_8)); return res; } - + @Override public Set getLinksFromWebPage(URL u, InputStream is, String type, URL postUrl) { Set s = linkMap.get(SimplePostTool.normalizeUrlEnding(u.toString())); - if(s == null) - s = new HashSet<>(); + if (s == null) s = new HashSet<>(); return s; } } -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/util/SolrCliUptimeTest.java b/solr/core/src/test/org/apache/solr/util/SolrCliUptimeTest.java index 7bcca20bd15..cb5312cf22f 100644 --- a/solr/core/src/test/org/apache/solr/util/SolrCliUptimeTest.java +++ b/solr/core/src/test/org/apache/solr/util/SolrCliUptimeTest.java @@ -16,21 +16,24 @@ */ package org.apache.solr.util; -import org.junit.Test; - import static org.junit.Assert.assertEquals; +import org.junit.Test; + public class SolrCliUptimeTest { - @Test - public void testUptime() { - assertEquals("?", SolrCLI.uptime(0)); - assertEquals("0 days, 0 hours, 0 minutes, 0 seconds", SolrCLI.uptime(1)); + @Test + public void testUptime() { + assertEquals("?", SolrCLI.uptime(0)); + assertEquals("0 days, 0 hours, 0 minutes, 0 seconds", SolrCLI.uptime(1)); - assertEquals("Should have rounded down", "0 days, 0 hours, 0 minutes, 0 seconds", SolrCLI.uptime(499)); - assertEquals("Should have rounded up", "0 days, 0 hours, 0 minutes, 1 seconds", SolrCLI.uptime(501)); + assertEquals( + "Should have rounded down", "0 days, 0 hours, 0 minutes, 0 seconds", SolrCLI.uptime(499)); + assertEquals( + "Should have rounded up", "0 days, 0 hours, 0 minutes, 1 seconds", SolrCLI.uptime(501)); - // Overflow - assertEquals("24 days, 20 hours, 31 minutes, 24 seconds", SolrCLI.uptime(Integer.MAX_VALUE)); - assertEquals("106751991167 days, 7 hours, 12 minutes, 56 seconds", SolrCLI.uptime(Long.MAX_VALUE)); - } + // Overflow + assertEquals("24 days, 20 hours, 31 minutes, 24 seconds", SolrCLI.uptime(Integer.MAX_VALUE)); + assertEquals( + "106751991167 days, 7 hours, 12 minutes, 56 seconds", SolrCLI.uptime(Long.MAX_VALUE)); + } } diff --git a/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java b/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java index 19ff8bf9739..1cfeff22845 100644 --- a/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java +++ b/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java @@ -44,9 +44,10 @@ private String sometimesSolr9Format(String record) { } @Test - public void testQueryRecord() throws Exception{ - String record = sometimesSolr9Format( - "2019-12-09 15:05:11.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&shards.purpose=36&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n"); + public void testQueryRecord() throws Exception { + String record = + sometimesSolr9Format( + "2019-12-09 15:05:11.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&shards.purpose=36&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n"); List docs = readDocs(record); assertEquals(docs.size(), 1); SolrInputDocument doc = docs.get(0); @@ -92,13 +93,14 @@ public void testQueryRecord() throws Exception{ assertEquals("REFINE_FACETS", purposes[1].toString()); } - // Requests which have multiple copies of the same param should be parsed so that the first param value only is - // indexed, since the log schema expects many of these to be single-valued fields and will throw errors if multiple - // values are received. + // Requests which have multiple copies of the same param should be parsed so that the first param + // value only is indexed, since the log schema expects many of these to be single-valued fields + // and will throw errors if multiple values are received. @Test public void testRecordsFirstInstanceOfSingleValuedParams() throws Exception { - final String record = sometimesSolr9Format( - "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&q=inStock:true&_=1575835181759&shards.purpose=36&isShard=true&wt=javabin&wt=xml&distrib=false} hits=234868 status=0 QTime=8\n"); + final String record = + sometimesSolr9Format( + "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&q=inStock:true&_=1575835181759&shards.purpose=36&isShard=true&wt=javabin&wt=xml&distrib=false} hits=234868 status=0 QTime=8\n"); List docs = readDocs(record); assertEquals(docs.size(), 1); @@ -113,8 +115,9 @@ public void testRecordsFirstInstanceOfSingleValuedParams() throws Exception { @Test public void testRTGRecord() throws Exception { - final String record = sometimesSolr9Format( - "2020-03-19 20:00:30.845 INFO (qtp1635378213-20354) [c:logs4 s:shard8 r:core_node63 x:logs4_shard8_replica_n60] o.a.s.c.S.Request [logs4_shard8_replica_n60] webapp=/solr path=/get params={qt=/get&_stateVer_=logs4:104&ids=id1&ids=id2&ids=id3&wt=javabin&version=2} status=0 QTime=61"); + final String record = + sometimesSolr9Format( + "2020-03-19 20:00:30.845 INFO (qtp1635378213-20354) [c:logs4 s:shard8 r:core_node63 x:logs4_shard8_replica_n60] o.a.s.c.S.Request [logs4_shard8_replica_n60] webapp=/solr path=/get params={qt=/get&_stateVer_=logs4:104&ids=id1&ids=id2&ids=id3&wt=javabin&version=2} status=0 QTime=61"); List docs = readDocs(record); assertEquals(docs.size(), 1); @@ -134,10 +137,11 @@ public void testRTGRecord() throws Exception { } @Test - public void testUpdateRecords() throws Exception{ - String record = sometimesSolr9Format( - "2019-12-25 20:38:23.498 INFO (qtp2103763750-126) [c:logs3 s:shard1 r:core_node2 x:logs3_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [logs3_shard1_replica_n1] webapp=/solr path=/update params={commitWithin=1000&overwrite=true&wt=json&_=1577306114481}{deleteByQuery=*:* (-1653925534487281664)} 0 11\n" + - "2019-12-25 20:42:13.411 INFO (qtp2103763750-303) [c:logs5 s:shard1 r:core_node2 x:logs5_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [logs5_shard1_replica_n1] webapp=/solr path=/update params={commitWithin=1000&overwrite=true&wt=json&_=1577306114481}{delete=[03bbe975-728a-4df8-aa25-fe25049dc0ef (-1653925775577972736)]} 0 1\n"); + public void testUpdateRecords() throws Exception { + String record = + sometimesSolr9Format( + "2019-12-25 20:38:23.498 INFO (qtp2103763750-126) [c:logs3 s:shard1 r:core_node2 x:logs3_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [logs3_shard1_replica_n1] webapp=/solr path=/update params={commitWithin=1000&overwrite=true&wt=json&_=1577306114481}{deleteByQuery=*:* (-1653925534487281664)} 0 11\n" + + "2019-12-25 20:42:13.411 INFO (qtp2103763750-303) [c:logs5 s:shard1 r:core_node2 x:logs5_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [logs5_shard1_replica_n1] webapp=/solr path=/update params={commitWithin=1000&overwrite=true&wt=json&_=1577306114481}{delete=[03bbe975-728a-4df8-aa25-fe25049dc0ef (-1653925775577972736)]} 0 1\n"); List docs = readDocs(record); assertEquals(docs.size(), 2); SolrInputDocument doc = docs.get(0); @@ -154,7 +158,7 @@ public void testUpdateRecords() throws Exception{ SolrInputField date1 = doc1.getField("date_dt"); SolrInputField type1 = doc1.getField("type_s"); SolrInputField core1 = doc1.getField("core_s"); - SolrInputField collection1= doc1.getField("collection_s"); + SolrInputField collection1 = doc1.getField("collection_s"); assertEquals(date1.getValue(), "2019-12-25T20:42:13.411Z"); assertEquals(type1.getValue(), "delete"); assertEquals(collection1.getValue(), "logs5"); @@ -162,79 +166,80 @@ public void testUpdateRecords() throws Exception{ } @Test - public void testErrorRecord() throws Exception{ - String record = "2019-12-31 01:49:53.251 ERROR (qtp2103763750-240) [c:logs6 s:shard1 r:core_node2 x:logs6_shard1_replica_n1] o.a.s.h.RequestHandlerBase org.apache.solr.common.SolrException: org.apache.solr.search.SyntaxError: Cannot parse 'id:[* TO *': Encountered \"\" at line 1, column 10.\n" + - "Was expecting one of:\n" + - " \"]\" ...\n" + - " \"}\" ...\n" + - " \n" + - "\tat org.apache.solr.handler.component.QueryComponent.prepare(QueryComponent.java:218)\n" + - "\tat org.apache.solr.handler.component.SearchHandler.handleRequestBody(SearchHandler.java:302)\n" + - "\tat org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:197)\n" + - "\tat org.apache.solr.core.SolrCore.execute(SolrCore.java:2582)\n" + - "\tat org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:799)\n" + - "\tat org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:578)\n" + - "\tat org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:419)\n" + - "\tat org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:351)\n" + - "\tat org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1602)\n" + - "\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)\n" + - "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:146)\n" + - "\tat org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:548)\n" + - "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" + - "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:257)\n" + - "\tat org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1711)\n" + - "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)\n" + - "\tat org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1347)\n" + - "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)\n" + - "\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)\n" + - "\tat org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1678)\n" + - "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)\n" + - "\tat org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1249)\n" + - "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)\n" + - "\tat org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:220)\n" + - "\tat org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:152)\n" + - "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" + - "\tat org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:335)\n" + - "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" + - "\tat org.eclipse.jetty.server.Server.handle(Server.java:505)\n" + - "\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:370)\n" + - "\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:267)\n" + - "\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)\n" + - "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)\n" + - "\tat org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117)\n" + - "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)\n" + - "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)\n" + - "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)\n" + - "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)\n" + - "\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)\n" + - "\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:781)\n" + - "\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:917)\n" + - "\tat java.base/java.lang.Thread.run(Thread.java:834)\n" + - "Caused by: org.apache.solr.search.SyntaxError: Cannot parse 'id:[* TO *': Encountered \"\" at line 1, column 10.\n" + - "Was expecting one of:\n" + - " \"]\" ...\n" + - " \"}\" ...\n" + - " \n" + - "\tat org.apache.solr.parser.SolrQueryParserBase.parse(SolrQueryParserBase.java:266)\n" + - "\tat org.apache.solr.search.LuceneQParser.parse(LuceneQParser.java:49)\n" + - "\tat org.apache.solr.search.QParser.getQuery(QParser.java:174)\n" + - "\tat org.apache.solr.handler.component.QueryComponent.prepare(QueryComponent.java:160)\n" + - "\t... 41 more\n" + - "Caused by: org.apache.solr.parser.ParseException: Encountered \"\" at line 1, column 10.\n" + - "Was expecting one of:\n" + - " \"]\" ...\n" + - " \"}\" ...\n" + - " \n" + - "\tat org.apache.solr.parser.QueryParser.generateParseException(QueryParser.java:885)\n" + - "\tat org.apache.solr.parser.QueryParser.jj_consume_token(QueryParser.java:767)\n" + - "\tat org.apache.solr.parser.QueryParser.Term(QueryParser.java:479)\n" + - "\tat org.apache.solr.parser.QueryParser.Clause(QueryParser.java:278)\n" + - "\tat org.apache.solr.parser.QueryParser.Query(QueryParser.java:162)\n" + - "\tat org.apache.solr.parser.QueryParser.TopLevelQuery(QueryParser.java:131)\n" + - "\tat org.apache.solr.parser.SolrQueryParserBase.parse(SolrQueryParserBase.java:262)\n" + - "\t... 44 more\n" + - "\n"+ - "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n"; + public void testErrorRecord() throws Exception { + String record = + "2019-12-31 01:49:53.251 ERROR (qtp2103763750-240) [c:logs6 s:shard1 r:core_node2 x:logs6_shard1_replica_n1] o.a.s.h.RequestHandlerBase org.apache.solr.common.SolrException: org.apache.solr.search.SyntaxError: Cannot parse 'id:[* TO *': Encountered \"\" at line 1, column 10.\n" + + "Was expecting one of:\n" + + " \"]\" ...\n" + + " \"}\" ...\n" + + " \n" + + "\tat org.apache.solr.handler.component.QueryComponent.prepare(QueryComponent.java:218)\n" + + "\tat org.apache.solr.handler.component.SearchHandler.handleRequestBody(SearchHandler.java:302)\n" + + "\tat org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:197)\n" + + "\tat org.apache.solr.core.SolrCore.execute(SolrCore.java:2582)\n" + + "\tat org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:799)\n" + + "\tat org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:578)\n" + + "\tat org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:419)\n" + + "\tat org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:351)\n" + + "\tat org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1602)\n" + + "\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)\n" + + "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:146)\n" + + "\tat org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:548)\n" + + "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" + + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:257)\n" + + "\tat org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1711)\n" + + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)\n" + + "\tat org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1347)\n" + + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)\n" + + "\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)\n" + + "\tat org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1678)\n" + + "\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)\n" + + "\tat org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1249)\n" + + "\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)\n" + + "\tat org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:220)\n" + + "\tat org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:152)\n" + + "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" + + "\tat org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:335)\n" + + "\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" + + "\tat org.eclipse.jetty.server.Server.handle(Server.java:505)\n" + + "\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:370)\n" + + "\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:267)\n" + + "\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)\n" + + "\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)\n" + + "\tat org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117)\n" + + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)\n" + + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)\n" + + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)\n" + + "\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)\n" + + "\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)\n" + + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:781)\n" + + "\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:917)\n" + + "\tat java.base/java.lang.Thread.run(Thread.java:834)\n" + + "Caused by: org.apache.solr.search.SyntaxError: Cannot parse 'id:[* TO *': Encountered \"\" at line 1, column 10.\n" + + "Was expecting one of:\n" + + " \"]\" ...\n" + + " \"}\" ...\n" + + " \n" + + "\tat org.apache.solr.parser.SolrQueryParserBase.parse(SolrQueryParserBase.java:266)\n" + + "\tat org.apache.solr.search.LuceneQParser.parse(LuceneQParser.java:49)\n" + + "\tat org.apache.solr.search.QParser.getQuery(QParser.java:174)\n" + + "\tat org.apache.solr.handler.component.QueryComponent.prepare(QueryComponent.java:160)\n" + + "\t... 41 more\n" + + "Caused by: org.apache.solr.parser.ParseException: Encountered \"\" at line 1, column 10.\n" + + "Was expecting one of:\n" + + " \"]\" ...\n" + + " \"}\" ...\n" + + " \n" + + "\tat org.apache.solr.parser.QueryParser.generateParseException(QueryParser.java:885)\n" + + "\tat org.apache.solr.parser.QueryParser.jj_consume_token(QueryParser.java:767)\n" + + "\tat org.apache.solr.parser.QueryParser.Term(QueryParser.java:479)\n" + + "\tat org.apache.solr.parser.QueryParser.Clause(QueryParser.java:278)\n" + + "\tat org.apache.solr.parser.QueryParser.Query(QueryParser.java:162)\n" + + "\tat org.apache.solr.parser.QueryParser.TopLevelQuery(QueryParser.java:131)\n" + + "\tat org.apache.solr.parser.SolrQueryParserBase.parse(SolrQueryParserBase.java:262)\n" + + "\t... 44 more\n" + + "\n" + + "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n"; List docs = readDocs(record); assertEquals(docs.size(), 2); SolrInputDocument doc = docs.get(0); @@ -247,12 +252,10 @@ public void testErrorRecord() throws Exception{ SolrInputField root = doc.getField("root_cause_t"); SolrInputField collection = doc.getField("collection_s"); - assertEquals(date.getValue(), "2019-12-31T01:49:53.251Z"); assertEquals(type.getValue(), "error"); assertEquals(collection.getValue(), "logs6"); - assertEquals(shard.getValue(), "shard1"); assertEquals(replica.getValue(), "core_node2"); assertEquals(core.getValue(), "logs6_shard1_replica_n1"); @@ -263,12 +266,12 @@ public void testErrorRecord() throws Exception{ SolrInputField type1 = doc1.getField("type_s"); assertEquals(date1.getValue(), "2019-12-09T15:05:01.931Z"); assertEquals(type1.getValue(), "query"); - } @Test - public void testCommit() throws Exception{ - String record = "2021-10-08 16:42:10.636 INFO (qtp1080476785-26) [c:collection1 s:shard1 r:core_node2 x:collection1_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1] webapp=/solr path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 152"; + public void testCommit() throws Exception { + String record = + "2021-10-08 16:42:10.636 INFO (qtp1080476785-26) [c:collection1 s:shard1 r:core_node2 x:collection1_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [collection1_shard1_replica_n1] webapp=/solr path=/update params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 152"; List docs = readDocs(record); assertEquals(docs.size(), 1); SolrInputDocument doc = docs.get(0); @@ -286,11 +289,11 @@ public void testCommit() throws Exception{ assertEquals(collection.getValue(), "collection1"); } - @Test - public void testNewSearcher() throws Exception{ - String record = sometimesSolr9Format( -"2022-01-25 20:01:15.903 INFO (searcherExecutor-19-thread-1-processing-localhost:8983_solr test_shard1_replica_n1 test shard1 core_node2) [c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrCore Registered new searcher autowarm time: 0 ms"); + public void testNewSearcher() throws Exception { + String record = + sometimesSolr9Format( + "2022-01-25 20:01:15.903 INFO (searcherExecutor-19-thread-1-processing-localhost:8983_solr test_shard1_replica_n1 test shard1 core_node2) [c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrCore Registered new searcher autowarm time: 0 ms"); List docs = readDocs(record); assertEquals(docs.size(), 1); @@ -310,7 +313,8 @@ public void testNewSearcher() throws Exception{ // Ensure SolrLogPostTool parses _all_ log lines into searchable records @Test public void testOtherRecord() throws Exception { - final String record = "2020-06-11 11:59:08.386 INFO (main) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)"; + final String record = + "2020-06-11 11:59:08.386 INFO (main) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)"; final List docs = readDocs(record); assertEquals(docs.size(), 1); @@ -337,5 +341,4 @@ private List readDocs(String records) throws Exception { } return list; } - -} \ No newline at end of file +} diff --git a/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java b/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java index 98153418b13..6b917bba506 100644 --- a/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/SolrPluginUtilsTest.java @@ -23,7 +23,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; - import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; @@ -40,90 +39,94 @@ import org.junit.BeforeClass; import org.junit.Test; -/** - * Tests that the functions in SolrPluginUtils work as advertised. - */ +/** Tests that the functions in SolrPluginUtils work as advertised. */ public class SolrPluginUtilsTest extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { - initCore("solrconfig.xml","schema.xml"); + initCore("solrconfig.xml", "schema.xml"); } @Test public void testPartialEscape() { - assertEquals("",pe("")); - assertEquals("foo",pe("foo")); - assertEquals("foo\\:bar",pe("foo:bar")); - assertEquals("+foo\\:bar",pe("+foo:bar")); - assertEquals("foo \\! bar",pe("foo ! bar")); - assertEquals("foo\\?",pe("foo?")); - assertEquals("foo \"bar\"",pe("foo \"bar\"")); - assertEquals("foo\\! \"bar\"",pe("foo! \"bar\"")); - + assertEquals("", pe("")); + assertEquals("foo", pe("foo")); + assertEquals("foo\\:bar", pe("foo:bar")); + assertEquals("+foo\\:bar", pe("+foo:bar")); + assertEquals("foo \\! bar", pe("foo ! bar")); + assertEquals("foo\\?", pe("foo?")); + assertEquals("foo \"bar\"", pe("foo \"bar\"")); + assertEquals("foo\\! \"bar\"", pe("foo! \"bar\"")); } @Test public void testStripUnbalancedQuotes() { - - assertEquals("",strip("")); - assertEquals("foo",strip("foo")); - assertEquals("foo \"bar\"",strip("foo \"bar\"")); - assertEquals("42",strip("42\"")); - assertEquals("\"how now brown cow?\"",strip("\"how now brown cow?\"")); - assertEquals("\"you go\" \"now!\"",strip("\"you go\" \"now!\"")); - + + assertEquals("", strip("")); + assertEquals("foo", strip("foo")); + assertEquals("foo \"bar\"", strip("foo \"bar\"")); + assertEquals("42", strip("42\"")); + assertEquals("\"how now brown cow?\"", strip("\"how now brown cow?\"")); + assertEquals("\"you go\" \"now!\"", strip("\"you go\" \"now!\"")); } @Test public void testStripIllegalOperators() { - assertEquals("",stripOp("")); - assertEquals("foo",stripOp("foo")); - assertEquals("foo -bar",stripOp("foo -bar")); - assertEquals("foo +bar",stripOp("foo +bar")); - assertEquals("foo + bar",stripOp("foo + bar")); - assertEquals("foo+ bar",stripOp("foo+ bar")); - assertEquals("foo+ bar",stripOp("foo+ bar")); - assertEquals("foo+",stripOp("foo+")); - assertEquals("foo bar",stripOp("foo bar -")); - assertEquals("foo bar ",stripOp("foo bar - + ++")); - assertEquals("foo bar",stripOp("foo --bar")); - assertEquals("foo bar ",stripOp("foo -------------------------------------------------------------------------------------------------------------------------bar --")); - assertEquals("foo bar ",stripOp("foo --bar -----------------------------------------------------------------------------------------------------------------------")); - + assertEquals("", stripOp("")); + assertEquals("foo", stripOp("foo")); + assertEquals("foo -bar", stripOp("foo -bar")); + assertEquals("foo +bar", stripOp("foo +bar")); + assertEquals("foo + bar", stripOp("foo + bar")); + assertEquals("foo+ bar", stripOp("foo+ bar")); + assertEquals("foo+ bar", stripOp("foo+ bar")); + assertEquals("foo+", stripOp("foo+")); + assertEquals("foo bar", stripOp("foo bar -")); + assertEquals("foo bar ", stripOp("foo bar - + ++")); + assertEquals("foo bar", stripOp("foo --bar")); + assertEquals( + "foo bar ", + stripOp( + "foo -------------------------------------------------------------------------------------------------------------------------bar --")); + assertEquals( + "foo bar ", + stripOp( + "foo --bar -----------------------------------------------------------------------------------------------------------------------")); } @Test public void testParseFieldBoosts() throws Exception { - Map e1 = new HashMap<>(); - e1.put("fieldOne",2.3f); - e1.put("fieldTwo",null); - e1.put("fieldThree",-0.4f); - - assertEquals("basic e1", e1, SolrPluginUtils.parseFieldBoosts - ("fieldOne^2.3 fieldTwo fieldThree^-0.4")); - assertEquals("spacey e1", e1, SolrPluginUtils.parseFieldBoosts - (" fieldOne^2.3 fieldTwo fieldThree^-0.4 ")); - assertEquals("really spacey e1", e1, SolrPluginUtils.parseFieldBoosts - (" \t fieldOne^2.3 \n fieldTwo fieldThree^-0.4 ")); - assertEquals("really spacey e1", e1, SolrPluginUtils.parseFieldBoosts - (new String[]{" \t fieldOne^2.3 \n", - " fieldTwo fieldThree^-0.4 ", - " "})); - - Map e2 = new HashMap<>(); - assertEquals("empty e2", e2, SolrPluginUtils.parseFieldBoosts - ("")); - assertEquals("spacey e2", e2, SolrPluginUtils.parseFieldBoosts - (" \t ")); + Map e1 = new HashMap<>(); + e1.put("fieldOne", 2.3f); + e1.put("fieldTwo", null); + e1.put("fieldThree", -0.4f); + + assertEquals( + "basic e1", e1, SolrPluginUtils.parseFieldBoosts("fieldOne^2.3 fieldTwo fieldThree^-0.4")); + assertEquals( + "spacey e1", + e1, + SolrPluginUtils.parseFieldBoosts(" fieldOne^2.3 fieldTwo fieldThree^-0.4 ")); + assertEquals( + "really spacey e1", + e1, + SolrPluginUtils.parseFieldBoosts(" \t fieldOne^2.3 \n fieldTwo fieldThree^-0.4 ")); + assertEquals( + "really spacey e1", + e1, + SolrPluginUtils.parseFieldBoosts( + new String[] {" \t fieldOne^2.3 \n", " fieldTwo fieldThree^-0.4 ", " "})); + + Map e2 = new HashMap<>(); + assertEquals("empty e2", e2, SolrPluginUtils.parseFieldBoosts("")); + assertEquals("spacey e2", e2, SolrPluginUtils.parseFieldBoosts(" \t ")); } - @Test + @Test public void testDisjunctionMaxQueryParser() throws Exception { - + Query out; String t; @@ -131,121 +134,109 @@ public void testDisjunctionMaxQueryParser() throws Exception { QParser qparser = QParser.getParser("hi", "dismax", req); DisjunctionMaxQueryParser qp = - new SolrPluginUtils.DisjunctionMaxQueryParser(qparser, req.getParams().get("df")); + new SolrPluginUtils.DisjunctionMaxQueryParser(qparser, req.getParams().get("df")); - qp.addAlias("hoss", 0.01f, SolrPluginUtils.parseFieldBoosts - ("title^2.0 title_stemmed name^1.2 subject^0.5")); + qp.addAlias( + "hoss", + 0.01f, + SolrPluginUtils.parseFieldBoosts("title^2.0 title_stemmed name^1.2 subject^0.5")); qp.addAlias("test", 0.01f, SolrPluginUtils.parseFieldBoosts("text^2.0")); - qp.addAlias("unused", 1.0f, SolrPluginUtils.parseFieldBoosts - ("subject^0.5 sind^1.5")); - + qp.addAlias("unused", 1.0f, SolrPluginUtils.parseFieldBoosts("subject^0.5 sind^1.5")); /* first some sanity tests that don't use aliasing at all */ t = "XXXXXXXX"; out = qp.parse(t); - assertNotNull(t+" sanity test gave back null", out); - assertTrue(t+" sanity test isn't TermQuery: " + out.getClass(), - out instanceof TermQuery); - assertEquals(t+" sanity test is wrong field", - qp.getDefaultField(), - ((TermQuery)out).getTerm().field()); + assertNotNull(t + " sanity test gave back null", out); + assertTrue(t + " sanity test isn't TermQuery: " + out.getClass(), out instanceof TermQuery); + assertEquals( + t + " sanity test is wrong field", + qp.getDefaultField(), + ((TermQuery) out).getTerm().field()); t = "subject:XXXXXXXX"; out = qp.parse(t); - assertNotNull(t+" sanity test gave back null", out); - assertTrue(t+" sanity test isn't TermQuery: " + out.getClass(), - out instanceof TermQuery); - assertEquals(t+" sanity test is wrong field", "subject", - ((TermQuery)out).getTerm().field()); + assertNotNull(t + " sanity test gave back null", out); + assertTrue(t + " sanity test isn't TermQuery: " + out.getClass(), out instanceof TermQuery); + assertEquals(t + " sanity test is wrong field", "subject", ((TermQuery) out).getTerm().field()); /* field has untokenzied type, so this should be a term anyway */ t = "sind:\"simple phrase\""; out = qp.parse(t); - assertNotNull(t+" sanity test gave back null", out); - assertTrue(t+" sanity test isn't TermQuery: " + out.getClass(), - out instanceof TermQuery); - assertEquals(t+" sanity test is wrong field", "sind", - ((TermQuery)out).getTerm().field()); + assertNotNull(t + " sanity test gave back null", out); + assertTrue(t + " sanity test isn't TermQuery: " + out.getClass(), out instanceof TermQuery); + assertEquals(t + " sanity test is wrong field", "sind", ((TermQuery) out).getTerm().field()); t = "subject:\"simple phrase\""; out = qp.parse(t); - assertNotNull(t+" sanity test gave back null", out); - assertTrue(t+" sanity test isn't PhraseQuery: " + out.getClass(), - out instanceof PhraseQuery); - assertEquals(t+" sanity test is wrong field", "subject", - ((PhraseQuery)out).getTerms()[0].field()); + assertNotNull(t + " sanity test gave back null", out); + assertTrue(t + " sanity test isn't PhraseQuery: " + out.getClass(), out instanceof PhraseQuery); + assertEquals( + t + " sanity test is wrong field", "subject", ((PhraseQuery) out).getTerms()[0].field()); - /* now some tests that use aliasing */ /* basic usage of single "term" */ t = "hoss:XXXXXXXX"; out = qp.parse(t); - assertNotNull(t+" was null", out); - assertTrue(t+" wasn't a DMQ:" + out.getClass(), - out instanceof DisjunctionMaxQuery); - assertEquals(t+" wrong number of clauses", 4, - countItems(((DisjunctionMaxQuery)out).iterator())); - + assertNotNull(t + " was null", out); + assertTrue(t + " wasn't a DMQ:" + out.getClass(), out instanceof DisjunctionMaxQuery); + assertEquals( + t + " wrong number of clauses", 4, countItems(((DisjunctionMaxQuery) out).iterator())); /* odd case, but should still work, DMQ of one clause */ t = "test:YYYYY"; out = qp.parse(t); - assertNotNull(t+" was null", out); - assertTrue(t+" wasn't a DMQ:" + out.getClass(), - out instanceof DisjunctionMaxQuery); - assertEquals(t+" wrong number of clauses", 1, - countItems(((DisjunctionMaxQuery)out).iterator())); - + assertNotNull(t + " was null", out); + assertTrue(t + " wasn't a DMQ:" + out.getClass(), out instanceof DisjunctionMaxQuery); + assertEquals( + t + " wrong number of clauses", 1, countItems(((DisjunctionMaxQuery) out).iterator())); + /* basic usage of multiple "terms" */ t = "hoss:XXXXXXXX test:YYYYY"; out = qp.parse(t); - assertNotNull(t+" was null", out); - assertTrue(t+" wasn't a boolean:" + out.getClass(), - out instanceof BooleanQuery); + assertNotNull(t + " was null", out); + assertTrue(t + " wasn't a boolean:" + out.getClass(), out instanceof BooleanQuery); { - BooleanQuery bq = (BooleanQuery)out; + BooleanQuery bq = (BooleanQuery) out; List clauses = new ArrayList<>(bq.clauses()); - assertEquals(t+" wrong number of clauses", 2, - clauses.size()); + assertEquals(t + " wrong number of clauses", 2, clauses.size()); Query sub = clauses.get(0).getQuery(); - assertTrue(t+" first wasn't a DMQ:" + sub.getClass(), - sub instanceof DisjunctionMaxQuery); - assertEquals(t+" first had wrong number of clauses", 4, - countItems(((DisjunctionMaxQuery)sub).iterator())); + assertTrue(t + " first wasn't a DMQ:" + sub.getClass(), sub instanceof DisjunctionMaxQuery); + assertEquals( + t + " first had wrong number of clauses", + 4, + countItems(((DisjunctionMaxQuery) sub).iterator())); sub = clauses.get(1).getQuery(); - assertTrue(t+" second wasn't a DMQ:" + sub.getClass(), - sub instanceof DisjunctionMaxQuery); - assertEquals(t+" second had wrong number of clauses", 1, - countItems(((DisjunctionMaxQuery)sub).iterator())); + assertTrue(t + " second wasn't a DMQ:" + sub.getClass(), sub instanceof DisjunctionMaxQuery); + assertEquals( + t + " second had wrong number of clauses", + 1, + countItems(((DisjunctionMaxQuery) sub).iterator())); } - + /* a phrase, and a term that is a stop word for some fields */ t = "hoss:\"XXXXXX YYYYY\" hoss:the"; out = qp.parse(t); - assertNotNull(t+" was null", out); - assertTrue(t+" wasn't a boolean:" + out.getClass(), - out instanceof BooleanQuery); + assertNotNull(t + " was null", out); + assertTrue(t + " wasn't a boolean:" + out.getClass(), out instanceof BooleanQuery); { - BooleanQuery bq = (BooleanQuery)out; + BooleanQuery bq = (BooleanQuery) out; List clauses = new ArrayList<>(bq.clauses()); - assertEquals(t+" wrong number of clauses", 2, - clauses.size()); + assertEquals(t + " wrong number of clauses", 2, clauses.size()); Query sub = clauses.get(0).getQuery(); - assertTrue(t+" first wasn't a DMQ:" + sub.getClass(), - sub instanceof DisjunctionMaxQuery); - assertEquals(t+" first had wrong number of clauses", 4, - countItems(((DisjunctionMaxQuery)sub).iterator())); + assertTrue(t + " first wasn't a DMQ:" + sub.getClass(), sub instanceof DisjunctionMaxQuery); + assertEquals( + t + " first had wrong number of clauses", + 4, + countItems(((DisjunctionMaxQuery) sub).iterator())); sub = clauses.get(1).getQuery(); - assertTrue(t+" second wasn't a DMQ:" + sub.getClass(), - sub instanceof DisjunctionMaxQuery); - assertEquals(t+" second had wrong number of clauses (stop words)", 2, - countItems(((DisjunctionMaxQuery)sub).iterator())); + assertTrue(t + " second wasn't a DMQ:" + sub.getClass(), sub instanceof DisjunctionMaxQuery); + assertEquals( + t + " second had wrong number of clauses (stop words)", + 2, + countItems(((DisjunctionMaxQuery) sub).iterator())); } - - - } private static int countItems(Iterator i) { @@ -257,7 +248,7 @@ private static int countItems(Iterator i) { return count; } - @Test + @Test public void testMinShouldMatchCalculator() { /* zero is zero is zero */ @@ -279,7 +270,7 @@ public void testMinShouldMatchCalculator() { assertEquals(1, calcMSM(4, "25%")); assertEquals(1, calcMSM(5, " 25% ")); assertEquals(2, calcMSM(10, "25%")); - + /* negative percentages with rounding */ assertEquals(3, calcMSM(3, " \n-25%\n ")); assertEquals(3, calcMSM(4, "-25%")); @@ -314,35 +305,34 @@ public void testMinShouldMatchCalculator() { assertEquals(97, calcMSM(100, "3<-25% 10<-3")); BooleanQuery.Builder q = new BooleanQuery.Builder(); - q.add(new TermQuery(new Term("a","b")), Occur.SHOULD); - q.add(new TermQuery(new Term("a","c")), Occur.SHOULD); - q.add(new TermQuery(new Term("a","d")), Occur.SHOULD); - q.add(new TermQuery(new Term("a","d")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "b")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "c")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "d")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "d")), Occur.SHOULD); SolrPluginUtils.setMinShouldMatch(q, "0"); assertEquals(0, q.build().getMinimumNumberShouldMatch()); - + SolrPluginUtils.setMinShouldMatch(q, "1"); assertEquals(1, q.build().getMinimumNumberShouldMatch()); - + SolrPluginUtils.setMinShouldMatch(q, "50%"); assertEquals(2, q.build().getMinimumNumberShouldMatch()); SolrPluginUtils.setMinShouldMatch(q, "99"); assertEquals(4, q.build().getMinimumNumberShouldMatch()); - q.add(new TermQuery(new Term("a","e")), Occur.MUST); - q.add(new TermQuery(new Term("a","f")), Occur.MUST); + q.add(new TermQuery(new Term("a", "e")), Occur.MUST); + q.add(new TermQuery(new Term("a", "f")), Occur.MUST); SolrPluginUtils.setMinShouldMatch(q, "50%"); assertEquals(2, q.build().getMinimumNumberShouldMatch()); - } @Test public void testMinShouldMatchBadQueries() { Exception e = expectThrows(SolrException.class, () -> calcMSM(2, "1<")); - assertEquals("Invalid 'mm' spec: '1<'. Expecting values before and after '<'" , e.getMessage()); + assertEquals("Invalid 'mm' spec: '1<'. Expecting values before and after '<'", e.getMessage()); e = expectThrows(SolrException.class, () -> calcMSM(2, "1 calcMSM(1, "x%")); @@ -351,21 +341,21 @@ public void testMinShouldMatchBadQueries() { assertEquals("Invalid 'mm' spec. Expecting an integer.", e.getMessage()); e = expectThrows(SolrException.class, () -> calcMSM(1, "x")); assertEquals("Invalid 'mm' spec. Expecting an integer.", e.getMessage()); - + e = expectThrows(SolrException.class, () -> calcMSM(10, "2<-25% 9 calcMSM(10, "2<-25% 9<")); - assertEquals("Invalid 'mm' spec: '9<'. Expecting values before and after '<'" , e.getMessage()); + assertEquals("Invalid 'mm' spec: '9<'. Expecting values before and after '<'", e.getMessage()); } @Test public void testMinShouldMatchAutoRelax() { /* The basics should not be affected by autoRelax */ BooleanQuery.Builder q = new BooleanQuery.Builder(); - q.add(new TermQuery(new Term("a","b")), Occur.SHOULD); - q.add(new TermQuery(new Term("a","c")), Occur.SHOULD); - q.add(new TermQuery(new Term("a","d")), Occur.SHOULD); - q.add(new TermQuery(new Term("a","d")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "b")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "c")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "d")), Occur.SHOULD); + q.add(new TermQuery(new Term("a", "d")), Occur.SHOULD); SolrPluginUtils.setMinShouldMatch(q, "0", true); assertEquals(0, q.build().getMinimumNumberShouldMatch()); @@ -379,26 +369,26 @@ public void testMinShouldMatchAutoRelax() { SolrPluginUtils.setMinShouldMatch(q, "99", true); assertEquals(4, q.build().getMinimumNumberShouldMatch()); - q.add(new TermQuery(new Term("a","e")), Occur.MUST); - q.add(new TermQuery(new Term("a","f")), Occur.MUST); + q.add(new TermQuery(new Term("a", "e")), Occur.MUST); + q.add(new TermQuery(new Term("a", "f")), Occur.MUST); SolrPluginUtils.setMinShouldMatch(q, "50%", true); assertEquals(2, q.build().getMinimumNumberShouldMatch()); /* Simulate stopwords through uneven disjuncts */ q = new BooleanQuery.Builder(); - q.add(new DisjunctionMaxQuery(Collections.singleton(new TermQuery(new Term("a","foo"))), 0.0f), Occur.SHOULD); - DisjunctionMaxQuery dmq = new DisjunctionMaxQuery( - Arrays.asList( - new TermQuery(new Term("a","foo")), - new TermQuery(new Term("b","foo"))), - 0f); + q.add( + new DisjunctionMaxQuery(Collections.singleton(new TermQuery(new Term("a", "foo"))), 0.0f), + Occur.SHOULD); + DisjunctionMaxQuery dmq = + new DisjunctionMaxQuery( + Arrays.asList(new TermQuery(new Term("a", "foo")), new TermQuery(new Term("b", "foo"))), + 0f); q.add(dmq, Occur.SHOULD); - dmq = new DisjunctionMaxQuery( - Arrays.asList( - new TermQuery(new Term("a","bar")), - new TermQuery(new Term("b","bar"))), - 0f); + dmq = + new DisjunctionMaxQuery( + Arrays.asList(new TermQuery(new Term("a", "bar")), new TermQuery(new Term("b", "bar"))), + 0f); q.add(dmq, Occur.SHOULD); // Without relax @@ -410,24 +400,27 @@ public void testMinShouldMatchAutoRelax() { assertEquals(2, q.build().getMinimumNumberShouldMatch()); // Still same result with a MUST clause extra - q.add(new TermQuery(new Term("a","must")), Occur.MUST); + q.add(new TermQuery(new Term("a", "must")), Occur.MUST); SolrPluginUtils.setMinShouldMatch(q, "100%", true); assertEquals(2, q.build().getMinimumNumberShouldMatch()); // Combination of dismax and non-dismax SHOULD clauses - q.add(new TermQuery(new Term("b","should")), Occur.SHOULD); + q.add(new TermQuery(new Term("b", "should")), Occur.SHOULD); SolrPluginUtils.setMinShouldMatch(q, "100%", true); assertEquals(3, q.build().getMinimumNumberShouldMatch()); } private static class InvokeSettersTestClass { private float aFloat = random().nextFloat(); + public float getAFloat() { return aFloat; } + public void setAFloat(float aFloat) { this.aFloat = aFloat; } + public void setAFloat(String aFloat) { this.aFloat = Float.parseFloat(aFloat); } @@ -442,7 +435,7 @@ public void testInvokeSetters() { public void implTestInvokeSetters(final Float theFloat, final Object theFloatObject) { final InvokeSettersTestClass bean = new InvokeSettersTestClass(); - final Map initArgs = new HashMap<>(); + final Map initArgs = new HashMap<>(); initArgs.put("aFloat", theFloatObject); SolrPluginUtils.invokeSetters(bean, initArgs.entrySet()); assertEquals(bean.getAFloat(), theFloat.floatValue(), 0.0); @@ -452,20 +445,19 @@ public void implTestInvokeSetters(final Float theFloat, final Object theFloatObj public String pe(CharSequence s) { return SolrPluginUtils.partialEscape(s).toString(); } - + /** macro */ public String strip(CharSequence s) { return SolrPluginUtils.stripUnbalancedQuotes(s).toString(); } - + /** macro */ public String stripOp(CharSequence s) { return SolrPluginUtils.stripIllegalOperators(s).toString(); } - + /** macro */ public int calcMSM(int clauses, String spec) { return SolrPluginUtils.calculateMinShouldMatch(clauses, spec); } } - diff --git a/solr/core/src/test/org/apache/solr/util/TestCircuitBreaker.java b/solr/core/src/test/org/apache/solr/util/TestCircuitBreaker.java index 9c48c8c348b..d377ba2a518 100644 --- a/solr/core/src/test/org/apache/solr/util/TestCircuitBreaker.java +++ b/solr/core/src/test/org/apache/solr/util/TestCircuitBreaker.java @@ -17,6 +17,9 @@ package org.apache.solr.util; +import static org.hamcrest.CoreMatchers.containsString; + +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.HashMap; @@ -25,8 +28,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; @@ -46,14 +47,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.hamcrest.CoreMatchers.containsString; - public class TestCircuitBreaker extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private final static int NUM_DOCS = 20; + private static final int NUM_DOCS = 20; - @Rule - public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); + @Rule public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule()); @BeforeClass public static void setUpClass() throws Exception { @@ -62,7 +60,7 @@ public static void setUpClass() throws Exception { System.setProperty("documentCache.enabled", "true"); initCore("solrconfig-memory-circuitbreaker.xml", "schema.xml"); - for (int i = 0 ; i < NUM_DOCS ; i ++) { + for (int i = 0; i < NUM_DOCS; i++) { assertU(adoc("name", "john smith", "id", "1")); assertU(adoc("name", "johathon smith", "id", "2")); assertU(adoc("name", "john percival smith", "id", "3")); @@ -70,7 +68,7 @@ public static void setUpClass() throws Exception { assertU(adoc("id", "2", "title", "this is another title.", "inStock_b1", "true")); assertU(adoc("id", "3", "title", "Mary had a little lamb.", "inStock_b1", "false")); - //commit inside the loop to get multiple segments to make search as realistic as possible + // commit inside the loop to get multiple segments to make search as realistic as possible assertU(commit()); } } @@ -93,17 +91,21 @@ public void testCBAlwaysTrips() { removeAllExistingCircuitBreakers(); - PluginInfo pluginInfo = h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); + PluginInfo pluginInfo = + h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); - CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerManager.buildCBConfig(pluginInfo); + CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = + CircuitBreakerManager.buildCBConfig(pluginInfo); CircuitBreaker circuitBreaker = new MockCircuitBreaker(circuitBreakerConfig); h.getCore().getCircuitBreakerManager().register(circuitBreaker); - expectThrows(SolrException.class, () -> { - h.query(req("name:\"john smith\"")); - }); + expectThrows( + SolrException.class, + () -> { + h.query(req("name:\"john smith\"")); + }); } public void testCBFakeMemoryPressure() { @@ -114,21 +116,25 @@ public void testCBFakeMemoryPressure() { removeAllExistingCircuitBreakers(); - PluginInfo pluginInfo = h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); + PluginInfo pluginInfo = + h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); - CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerManager.buildCBConfig(pluginInfo); + CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = + CircuitBreakerManager.buildCBConfig(pluginInfo); CircuitBreaker circuitBreaker = new FakeMemoryPressureCircuitBreaker(circuitBreakerConfig); h.getCore().getCircuitBreakerManager().register(circuitBreaker); - expectThrows(SolrException.class, () -> { - h.query(req("name:\"john smith\"")); - }); + expectThrows( + SolrException.class, + () -> { + h.query(req("name:\"john smith\"")); + }); } public void testBuildingMemoryPressure() { - ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool( - new SolrNamedThreadFactory("TestCircuitBreaker")); + ExecutorService executor = + ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("TestCircuitBreaker")); HashMap args = new HashMap(); args.put(QueryParsing.DEFTYPE, CircuitBreaker.NAME); @@ -139,31 +145,36 @@ public void testBuildingMemoryPressure() { try { removeAllExistingCircuitBreakers(); - PluginInfo pluginInfo = h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); + PluginInfo pluginInfo = + h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); - CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerManager.buildCBConfig(pluginInfo); - CircuitBreaker circuitBreaker = new BuildingUpMemoryPressureCircuitBreaker(circuitBreakerConfig); + CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = + CircuitBreakerManager.buildCBConfig(pluginInfo); + CircuitBreaker circuitBreaker = + new BuildingUpMemoryPressureCircuitBreaker(circuitBreakerConfig); h.getCore().getCircuitBreakerManager().register(circuitBreaker); List> futures = new ArrayList<>(); for (int i = 0; i < 5; i++) { - Future future = executor.submit(() -> { - try { - h.query(req("name:\"john smith\"")); - } catch (SolrException e) { - assertThat(e.getMessage(), containsString("Circuit Breakers tripped")); - failureCount.incrementAndGet(); - } catch (Exception e) { - throw new RuntimeException(e.getMessage()); - } - }); + Future future = + executor.submit( + () -> { + try { + h.query(req("name:\"john smith\"")); + } catch (SolrException e) { + assertThat(e.getMessage(), containsString("Circuit Breakers tripped")); + failureCount.incrementAndGet(); + } catch (Exception e) { + throw new RuntimeException(e.getMessage()); + } + }); futures.add(future); } - for (Future future : futures) { + for (Future future : futures) { try { future.get(); } catch (Exception e) { @@ -190,14 +201,16 @@ public void testBuildingMemoryPressure() { public void testFakeCPUCircuitBreaker() { AtomicInteger failureCount = new AtomicInteger(); - ExecutorService executor = ExecutorUtil.newMDCAwareCachedThreadPool( - new SolrNamedThreadFactory("TestCircuitBreaker")); + ExecutorService executor = + ExecutorUtil.newMDCAwareCachedThreadPool(new SolrNamedThreadFactory("TestCircuitBreaker")); try { removeAllExistingCircuitBreakers(); - PluginInfo pluginInfo = h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); + PluginInfo pluginInfo = + h.getCore().getSolrConfig().getPluginInfo(CircuitBreakerManager.class.getName()); - CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerManager.buildCBConfig(pluginInfo); + CircuitBreaker.CircuitBreakerConfig circuitBreakerConfig = + CircuitBreakerManager.buildCBConfig(pluginInfo); CircuitBreaker circuitBreaker = new FakeCPUCircuitBreaker(circuitBreakerConfig); h.getCore().getCircuitBreakerManager().register(circuitBreaker); @@ -205,21 +218,23 @@ public void testFakeCPUCircuitBreaker() { List> futures = new ArrayList<>(); for (int i = 0; i < 5; i++) { - Future future = executor.submit(() -> { - try { - h.query(req("name:\"john smith\"")); - } catch (SolrException e) { - assertThat(e.getMessage(), containsString("Circuit Breakers tripped")); - failureCount.incrementAndGet(); - } catch (Exception e) { - throw new RuntimeException(e.getMessage()); - } - }); + Future future = + executor.submit( + () -> { + try { + h.query(req("name:\"john smith\"")); + } catch (SolrException e) { + assertThat(e.getMessage(), containsString("Circuit Breakers tripped")); + failureCount.incrementAndGet(); + } catch (Exception e) { + throw new RuntimeException(e.getMessage()); + } + }); futures.add(future); } - for (Future future : futures) { + for (Future future : futures) { try { future.get(); } catch (Exception e) { @@ -235,7 +250,7 @@ public void testFakeCPUCircuitBreaker() { throw new RuntimeException(e.getMessage()); } - assertEquals("Number of failed queries is not correct",5, failureCount.get()); + assertEquals("Number of failed queries is not correct", 5, failureCount.get()); } finally { if (!executor.isShutdown()) { executor.shutdown(); @@ -244,7 +259,8 @@ public void testFakeCPUCircuitBreaker() { } public void testResponseWithCBTiming() { - assertQ(req("q", "*:*", CommonParams.DEBUG_QUERY, "true"), + assertQ( + req("q", "*:*", CommonParams.DEBUG_QUERY, "true"), "//str[@name='rawquerystring']='*:*'", "//str[@name='querystring']='*:*'", "//str[@name='parsedquery']='MatchAllDocsQuery(*:*)'", @@ -261,12 +277,12 @@ public void testResponseWithCBTiming() { "count(//lst[@name='prepare']/*)>0", "//lst[@name='prepare']/double[@name='time']", "count(//lst[@name='process']/*)>0", - "//lst[@name='process']/double[@name='time']" - ); + "//lst[@name='process']/double[@name='time']"); } private void removeAllExistingCircuitBreakers() { - List registeredCircuitBreakers = h.getCore().getCircuitBreakerManager().getRegisteredCircuitBreakers(); + List registeredCircuitBreakers = + h.getCore().getCircuitBreakerManager().getRegisteredCircuitBreakers(); registeredCircuitBreakers.clear(); } @@ -316,17 +332,20 @@ protected long calculateLiveMemoryUsage() { int localCount = count.getAndIncrement(); if (localCount >= 4) { - //TODO: To be removed + // TODO: To be removed if (log.isInfoEnabled()) { - String logMessage = "Blocking query from BuildingUpMemoryPressureCircuitBreaker for count " + localCount; + String logMessage = + "Blocking query from BuildingUpMemoryPressureCircuitBreaker for count " + localCount; log.info(logMessage); } return Long.MAX_VALUE; } - //TODO: To be removed + // TODO: To be removed if (log.isInfoEnabled()) { - String logMessage = "BuildingUpMemoryPressureCircuitBreaker: Returning unblocking value for count " + localCount; + String logMessage = + "BuildingUpMemoryPressureCircuitBreaker: Returning unblocking value for count " + + localCount; log.info(logMessage); } return Long.MIN_VALUE; // Random number guaranteed to not trip the circuit breaker diff --git a/solr/core/src/test/org/apache/solr/util/TestExportTool.java b/solr/core/src/test/org/apache/solr/util/TestExportTool.java index a69d733cb68..3d66b85bcf8 100644 --- a/solr/core/src/test/org/apache/solr/util/TestExportTool.java +++ b/solr/core/src/test/org/apache/solr/util/TestExportTool.java @@ -27,7 +27,6 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Predicate; - import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.SolrQuery; @@ -49,36 +48,37 @@ public class TestExportTool extends SolrCloudTestCase { public void testBasic() throws Exception { String COLLECTION_NAME = "globalLoaderColl"; - configureCluster(4) - .addConfig("conf", configset("cloud-dynamic")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-dynamic")).configure(); try { - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 2, 1) + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 2, 1) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2); - String tmpFileLoc = new File(cluster.getBaseDir().toFile().getAbsolutePath() + - File.separator).getPath(); + String tmpFileLoc = + new File(cluster.getBaseDir().toFile().getAbsolutePath() + File.separator).getPath(); UpdateRequest ur = new UpdateRequest(); ur.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); int docCount = 1000; for (int i = 0; i < docCount; i++) { - ur.add("id", String.valueOf(i), - "desc_s", TestUtil.randomSimpleString(random(), 10, 50) , - "a_dt", "2019-09-30T05:58:03Z"); + ur.add( + "id", + String.valueOf(i), + "desc_s", + TestUtil.randomSimpleString(random(), 10, 50), + "a_dt", + "2019-09-30T05:58:03Z"); } cluster.getSolrClient().request(ur, COLLECTION_NAME); - QueryResponse qr = cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("*:*").setRows(0)); + QueryResponse qr = + cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("*:*").setRows(0)); assertEquals(docCount, qr.getResults().getNumFound()); String url = cluster.getRandomJetty(random()).getBaseUrl() + "/" + COLLECTION_NAME; - ExportTool.Info info = new ExportTool.MultiThreadedRunner(url); String absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".json"; info.setOutFormat(absolutePath, "jsonl"); @@ -95,7 +95,7 @@ public void testBasic() throws Exception { info.fields = "id,desc_s"; info.exportDocs(); - assertJsonDocsCount(info, 1000,null); + assertJsonDocsCount(info, 1000, null); info = new ExportTool.MultiThreadedRunner(url); absolutePath = tmpFileLoc + COLLECTION_NAME + random().nextInt(100000) + ".javabin"; @@ -116,28 +116,23 @@ public void testBasic() throws Exception { } finally { cluster.shutdown(); - } } @Nightly public void testVeryLargeCluster() throws Exception { String COLLECTION_NAME = "veryLargeColl"; - configureCluster(4) - .addConfig("conf", configset("cloud-minimal")) - .configure(); + configureCluster(4).addConfig("conf", configset("cloud-minimal")).configure(); try { - CollectionAdminRequest - .createCollection(COLLECTION_NAME, "conf", 8, 1) + CollectionAdminRequest.createCollection(COLLECTION_NAME, "conf", 8, 1) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION_NAME, 8, 8); - String tmpFileLoc = new File(cluster.getBaseDir().toFile().getAbsolutePath() + - File.separator).getPath(); + String tmpFileLoc = + new File(cluster.getBaseDir().toFile().getAbsolutePath() + File.separator).getPath(); String url = cluster.getRandomJetty(random()).getBaseUrl() + "/" + COLLECTION_NAME; - int docCount = 0; for (int j = 0; j < 4; j++) { @@ -145,16 +140,22 @@ public void testVeryLargeCluster() throws Exception { UpdateRequest ur = new UpdateRequest(); ur.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); for (int i = 0; i < bsz; i++) { - ur.add("id", String.valueOf((j * bsz) + i), "desc_s", TestUtil.randomSimpleString(random(), 10, 50)); + ur.add( + "id", + String.valueOf((j * bsz) + i), + "desc_s", + TestUtil.randomSimpleString(random(), 10, 50)); } cluster.getSolrClient().request(ur, COLLECTION_NAME); docCount += bsz; } - QueryResponse qr = cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("*:*").setRows(0)); + QueryResponse qr = + cluster.getSolrClient().query(COLLECTION_NAME, new SolrQuery("*:*").setRows(0)); assertEquals(docCount, qr.getResults().getNumFound()); - DocCollection coll = cluster.getSolrClient().getClusterStateProvider().getCollection(COLLECTION_NAME); + DocCollection coll = + cluster.getSolrClient().getClusterStateProvider().getCollection(COLLECTION_NAME); HashMap docCounts = new HashMap<>(); long totalDocsFromCores = 0; for (Slice slice : coll.getSlices()) { @@ -178,7 +179,8 @@ public void testVeryLargeCluster() throws Exception { info.exportDocs(); assertJavabinDocsCount(info, docCount); for (Map.Entry e : docCounts.entrySet()) { - assertEquals(e.getValue().longValue(), info.corehandlers.get(e.getKey()).receivedDocs.get()); + assertEquals( + e.getValue().longValue(), info.corehandlers.get(e.getKey()).receivedDocs.get()); } info = new ExportTool.MultiThreadedRunner(url); info.output = System.out; @@ -188,47 +190,55 @@ public void testVeryLargeCluster() throws Exception { info.setLimit("-1"); info.exportDocs(); long actual = ((ExportTool.JsonSink) info.sink).info.docsWritten.get(); - assertTrue("docs written :" + actual + "docs produced : " + info.docsWritten.get(), actual >= docCount); - assertJsonDocsCount(info, docCount,null); + assertTrue( + "docs written :" + actual + "docs produced : " + info.docsWritten.get(), + actual >= docCount); + assertJsonDocsCount(info, docCount, null); } finally { cluster.shutdown(); - } } - private void assertJavabinDocsCount(ExportTool.Info info, int expected) throws IOException { - assertTrue("" + info.docsWritten.get() + " expected " + expected, info.docsWritten.get() >= expected); + assertTrue( + "" + info.docsWritten.get() + " expected " + expected, info.docsWritten.get() >= expected); FileInputStream fis = new FileInputStream(info.out); try { - int[] count = new int[]{0}; + int[] count = new int[] {0}; FastInputStream in = FastInputStream.wrap(fis); new JavaBinUpdateRequestCodec() - .unmarshal(in, (document, req, commitWithin, override) -> { - assertEquals(2, document.size()); - count[0]++; - }); + .unmarshal( + in, + (document, req, commitWithin, override) -> { + assertEquals(2, document.size()); + count[0]++; + }); assertTrue(count[0] >= expected); } finally { fis.close(); } } - private void assertJsonDocsCount(ExportTool.Info info, int expected, Predicate> predicate) throws IOException { - assertTrue("" + info.docsWritten.get() + " expected " + expected, info.docsWritten.get() >= expected); + private void assertJsonDocsCount( + ExportTool.Info info, int expected, Predicate> predicate) + throws IOException { + assertTrue( + "" + info.docsWritten.get() + " expected " + expected, info.docsWritten.get() >= expected); JsonRecordReader jsonReader; Reader rdr; jsonReader = JsonRecordReader.getInst("/", Arrays.asList("$FQN:/**")); rdr = new InputStreamReader(new FileInputStream(info.out), StandardCharsets.UTF_8); try { - int[] count = new int[]{0}; - jsonReader.streamRecords(rdr, (record, path) -> { - if(predicate != null){ - assertTrue(predicate.test(record)); - } - count[0]++; - }); + int[] count = new int[] {0}; + jsonReader.streamRecords( + rdr, + (record, path) -> { + if (predicate != null) { + assertTrue(predicate.test(record)); + } + count[0]++; + }); assertTrue(count[0] >= expected); } finally { rdr.close(); diff --git a/solr/core/src/test/org/apache/solr/util/TestFastOutputStream.java b/solr/core/src/test/org/apache/solr/util/TestFastOutputStream.java index f7176209732..1122f6abe05 100644 --- a/solr/core/src/test/org/apache/solr/util/TestFastOutputStream.java +++ b/solr/core/src/test/org/apache/solr/util/TestFastOutputStream.java @@ -16,11 +16,10 @@ */ package org.apache.solr.util; +import java.util.Random; import org.apache.solr.SolrTestCase; import org.apache.solr.update.MemOutputStream; -import java.util.Random; - public class TestFastOutputStream extends SolrTestCase { Random rand; @@ -30,24 +29,23 @@ public void testRandomWrites() throws Exception { rand = random(); arr = new byte[20000]; - for (int i=0; i\n" + - "\n" + - "]>\n" + - "&internalTerm;&externalTerm;"); + // TODO: Fix the underlying EmptyEntityResolver to not replace external entities by nothing and + // instead throw exception: + Document doc = + SafeXMLParsing.parseUntrustedXML( + log, + "\n" + + "\n" + + "]>\n" + + "&internalTerm;&externalTerm;"); assertEquals("foobar", doc.getDocumentElement().getTextContent()); } - + InputStream getStringStream(String xml) { return new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8)); } - + public void testConfig() throws Exception { - final ResourceLoader loader = new ResourceLoader() { - @Override - public InputStream openResource(String resource) throws IOException { - switch (resource) { - case "source1.xml": - return getStringStream("\n" + - "]>\n" + - "&externalTerm;"); - case "source2.xml": - return getStringStream("\n" + - "]>\n" + - "&externalTerm;"); - case "source3.xml": - return getStringStream("\n" + - " \n" + - ""); - case "include1.xml": - return getStringStream("Make XML Great Again!™"); - case "include2.xml": - return getStringStream("Make XML Great Again!™"); - } - throw new IOException("Resource not found: " + resource); - } + final ResourceLoader loader = + new ResourceLoader() { + @Override + public InputStream openResource(String resource) throws IOException { + switch (resource) { + case "source1.xml": + return getStringStream( + "\n" + + "]>\n" + + "&externalTerm;"); + case "source2.xml": + return getStringStream( + "\n" + + "]>\n" + + "&externalTerm;"); + case "source3.xml": + return getStringStream( + "\n" + + " \n" + + ""); + case "include1.xml": + return getStringStream("Make XML Great Again!™"); + case "include2.xml": + return getStringStream("Make XML Great Again!™"); + } + throw new IOException("Resource not found: " + resource); + } - @Override - public Class findClass(String cname, Class expectedType) { - throw new UnsupportedOperationException(); - } + @Override + public Class findClass(String cname, Class expectedType) { + throw new UnsupportedOperationException(); + } - @Override - public T newInstance(String cname, Class expectedType) { - throw new UnsupportedOperationException(); - } - - }; - - IOException ioe = expectThrows(IOException.class, () -> { - SafeXMLParsing.parseConfigXML(log, loader, "source1.xml"); - }); + @Override + public T newInstance(String cname, Class expectedType) { + throw new UnsupportedOperationException(); + } + }; + + IOException ioe = + expectThrows( + IOException.class, + () -> { + SafeXMLParsing.parseConfigXML(log, loader, "source1.xml"); + }); assertTrue(ioe.getMessage().contains("Cannot resolve absolute systemIDs")); - + Document doc = SafeXMLParsing.parseConfigXML(log, loader, "source2.xml"); assertEquals("Make XML Great Again!™", doc.getDocumentElement().getTextContent()); - + doc = SafeXMLParsing.parseConfigXML(log, loader, "source3.xml"); assertEquals("Make XML Great Again!™", doc.getDocumentElement().getTextContent().trim()); } - } diff --git a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java index c15abeddb0f..cb81d752de4 100644 --- a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java +++ b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java @@ -34,7 +34,6 @@ import java.util.Arrays; import java.util.List; import java.util.Map; - import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteResultHandler; import org.apache.lucene.util.LuceneTestCase; @@ -54,19 +53,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - -/** - * Tests the SolrCLI.RunExampleTool implementation that supports bin/solr -e [example] - */ +/** Tests the SolrCLI.RunExampleTool implementation that supports bin/solr -e [example] */ @LuceneTestCase.Slow @SolrTestCaseJ4.SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776") public class TestSolrCLIRunExample extends SolrTestCaseJ4 { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - + @BeforeClass public static void beforeClass() throws IOException { - assumeFalse("FIXME: This test does not work with whitespace in CWD (https://issues.apache.org/jira/browse/SOLR-8877)", + assumeFalse( + "FIXME: This test does not work with whitespace in CWD (https://issues.apache.org/jira/browse/SOLR-8877)", Paths.get(".").toAbsolutePath().toString().contains(" ")); // to be true System.setProperty("solr.directoryFactory", "solr.NRTCachingDirectoryFactory"); @@ -76,10 +73,10 @@ public static void beforeClass() throws IOException { public static void cleanupDirectoryFactory() throws IOException { System.clearProperty("solr.directoryFactory"); } - + /** - * Overrides the call to exec bin/solr to start Solr nodes to start them using the Solr test-framework - * instead of the script, since the script depends on a full build. + * Overrides the call to exec bin/solr to start Solr nodes to start them using the Solr + * test-framework instead of the script, since the script depends on a full build. */ private static class RunExampleExecutor extends DefaultExecutor implements Closeable { @@ -94,12 +91,16 @@ private static class RunExampleExecutor extends DefaultExecutor implements Close } /** - * Override the call to execute a command asynchronously to occur synchronously during a unit test. + * Override the call to execute a command asynchronously to occur synchronously during a unit + * test. */ @Override - public void execute(org.apache.commons.exec.CommandLine cmd, Map env, ExecuteResultHandler erh) throws IOException { + public void execute( + org.apache.commons.exec.CommandLine cmd, Map env, ExecuteResultHandler erh) + throws IOException { int code = execute(cmd); - if (code != 0) throw new RuntimeException("Failed to execute cmd: "+joinArgs(cmd.getArguments())); + if (code != 0) + throw new RuntimeException("Failed to execute cmd: " + joinArgs(cmd.getArguments())); } @Override @@ -111,30 +112,32 @@ public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException { if (exe.endsWith("solr")) { String[] args = cmd.getArguments(); if ("start".equals(args[0])) { - if (!hasFlag("-cloud", args) && !hasFlag("-c", args)) - return startStandaloneSolr(args); + if (!hasFlag("-cloud", args) && !hasFlag("-c", args)) return startStandaloneSolr(args); String solrHomeDir = getArg("-s", args); int port = Integer.parseInt(getArg("-p", args)); - String solrxml = new String(Files.readAllBytes(Paths.get(solrHomeDir).resolve("solr.xml")), Charset.defaultCharset()); + String solrxml = + new String( + Files.readAllBytes(Paths.get(solrHomeDir).resolve("solr.xml")), + Charset.defaultCharset()); - JettyConfig jettyConfig = - JettyConfig.builder().setContext("/solr").setPort(port).build(); + JettyConfig jettyConfig = JettyConfig.builder().setContext("/solr").setPort(port).build(); try { if (solrCloudCluster == null) { Path logDir = createTempDir("solr_logs"); System.setProperty("solr.log.dir", logDir.toString()); System.setProperty("host", "localhost"); System.setProperty("jetty.port", String.valueOf(port)); - solrCloudCluster = - new MiniSolrCloudCluster(1, createTempDir(), solrxml, jettyConfig); + solrCloudCluster = new MiniSolrCloudCluster(1, createTempDir(), solrxml, jettyConfig); } else { - // another member of this cluster -- not supported yet, due to how MiniSolrCloudCluster works - throw new IllegalArgumentException("Only launching one SolrCloud node is supported by this test!"); + // another member of this cluster -- not supported yet, due to how + // MiniSolrCloudCluster works + throw new IllegalArgumentException( + "Only launching one SolrCloud node is supported by this test!"); } } catch (Exception e) { if (e instanceof RuntimeException) { - throw (RuntimeException)e; + throw (RuntimeException) e; } else { throw new RuntimeException(e); } @@ -152,13 +155,13 @@ public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException { log.info("Stopped standalone Solr instance running on port {}", port); } catch (Exception e) { if (e instanceof RuntimeException) { - throw (RuntimeException)e; + throw (RuntimeException) e; } else { throw new RuntimeException(e); } } } else { - throw new IllegalArgumentException("No Solr is running on port "+port); + throw new IllegalArgumentException("No Solr is running on port " + port); } } else { if (solrCloudCluster != null) { @@ -167,7 +170,7 @@ public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException { log.info("Stopped SolrCloud test cluster"); } catch (Exception e) { if (e instanceof RuntimeException) { - throw (RuntimeException)e; + throw (RuntimeException) e; } else { throw new RuntimeException(e); } @@ -192,11 +195,10 @@ public int execute(org.apache.commons.exec.CommandLine cmd) throws IOException { } protected String joinArgs(String[] args) { - if (args == null || args.length == 0) - return ""; + if (args == null || args.length == 0) return ""; StringBuilder sb = new StringBuilder(); - for (int a=0; a < args.length; a++) { + for (int a = 0; a < args.length; a++) { if (a > 0) sb.append(' '); sb.append(args[a]); } @@ -206,13 +208,16 @@ protected String joinArgs(String[] args) { protected int startStandaloneSolr(String[] args) { if (standaloneSolr != null) { - throw new IllegalStateException("Test is already running a standalone Solr instance "+ - standaloneSolr.getBaseUrl()+"! This indicates a bug in the unit test logic."); + throw new IllegalStateException( + "Test is already running a standalone Solr instance " + + standaloneSolr.getBaseUrl() + + "! This indicates a bug in the unit test logic."); } if (solrCloudCluster != null) { - throw new IllegalStateException("Test is already running a mini SolrCloud cluster! "+ - "This indicates a bug in the unit test logic."); + throw new IllegalStateException( + "Test is already running a mini SolrCloud cluster! " + + "This indicates a bug in the unit test logic."); } int port = Integer.parseInt(getArg("-p", args)); @@ -224,41 +229,45 @@ protected int startStandaloneSolr(String[] args) { System.setProperty("solr.log.dir", createTempDir("solr_logs").toString()); standaloneSolr = new JettySolrRunner(solrHomeDir.getAbsolutePath(), "/solr", port); - Thread bg = new Thread() { - public void run() { - try { - standaloneSolr.start(); - } catch (Exception e) { - if (e instanceof RuntimeException) { - throw (RuntimeException)e; - } else { - throw new RuntimeException(e); + Thread bg = + new Thread() { + public void run() { + try { + standaloneSolr.start(); + } catch (Exception e) { + if (e instanceof RuntimeException) { + throw (RuntimeException) e; + } else { + throw new RuntimeException(e); + } + } } - } - } - }; + }; bg.start(); return 0; } protected String getArg(String arg, String[] args) { - for (int a=0; a < args.length; a++) { + for (int a = 0; a < args.length; a++) { if (arg.equals(args[a])) { - if (a+1 >= args.length) - throw new IllegalArgumentException("Missing required value for the "+arg+" option!"); + if (a + 1 >= args.length) + throw new IllegalArgumentException( + "Missing required value for the " + arg + " option!"); return args[a + 1]; } } - throw new IllegalArgumentException("Missing required arg "+arg+ - " needed to execute command: "+commandsExecuted.get(commandsExecuted.size()-1)); + throw new IllegalArgumentException( + "Missing required arg " + + arg + + " needed to execute command: " + + commandsExecuted.get(commandsExecuted.size() - 1)); } protected boolean hasFlag(String flag, String[] args) { for (String arg : args) { - if (flag.equals(arg)) - return true; + if (flag.equals(arg)) return true; } return false; } @@ -294,14 +303,15 @@ public void tearDown() throws Exception { for (Closeable toClose : closeables) { try { toClose.close(); - } catch (Exception ignore) {} + } catch (Exception ignore) { + } } closeables.clear(); closeables = null; } } - @Test + @Test public void testTechproductsExample() throws Exception { testExample("techproducts"); } @@ -314,74 +324,89 @@ public void testSchemalessExample() throws Exception { protected void testExample(String exampleName) throws Exception { File solrHomeDir = new File(ExternalPaths.SERVER_HOME); if (!solrHomeDir.isDirectory()) - fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!"); + fail(solrHomeDir.getAbsolutePath() + " not found and is required to run this test!"); Path tmpDir = createTempDir(); File solrExampleDir = tmpDir.toFile(); File solrServerDir = solrHomeDir.getParentFile(); - for (int pass = 0; pass<2; pass++){ + for (int pass = 0; pass < 2; pass++) { // need a port to start the example server on int bindPort = -1; try (ServerSocket socket = new ServerSocket(0)) { bindPort = socket.getLocalPort(); } - + log.info("Selected port {} to start {} example Solr instance on ...", bindPort, exampleName); - - String[] toolArgs = new String[] { - "-e", exampleName, - "-serverDir", solrServerDir.getAbsolutePath(), - "-exampleDir", solrExampleDir.getAbsolutePath(), - "-p", String.valueOf(bindPort) - }; - + + String[] toolArgs = + new String[] { + "-e", exampleName, + "-serverDir", solrServerDir.getAbsolutePath(), + "-exampleDir", solrExampleDir.getAbsolutePath(), + "-p", String.valueOf(bindPort) + }; + // capture tool output to stdout ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream stdoutSim = new PrintStream(baos, true, StandardCharsets.UTF_8.name()); - + RunExampleExecutor executor = new RunExampleExecutor(stdoutSim); closeables.add(executor); - + SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, System.in, stdoutSim); try { - int status = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); - + int status = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); + if (status == -1) { // maybe it's the port, try again try (ServerSocket socket = new ServerSocket(0)) { bindPort = socket.getLocalPort(); } Thread.sleep(100); - status = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); + status = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); } - - assertEquals("it should be ok "+tool+" "+Arrays.toString(toolArgs),0, status); + + assertEquals("it should be ok " + tool + " " + Arrays.toString(toolArgs), 0, status); } catch (Exception e) { - log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}" - , e , baos.toString(StandardCharsets.UTF_8.name())); // nowarn + log.error( + "RunExampleTool failed due to: {}; stdout from tool prior to failure: {}", + e, + baos.toString(StandardCharsets.UTF_8.name())); // nowarn throw e; } - + String toolOutput = baos.toString(StandardCharsets.UTF_8.name()); - + // dump all the output written by the SolrCLI commands to stdout - //System.out.println("\n\n"+toolOutput+"\n\n"); - - File exampleSolrHomeDir = new File(solrExampleDir, exampleName+"/solr"); - assertTrue(exampleSolrHomeDir.getAbsolutePath() + " not found! run " + - exampleName + " example failed; output: " + toolOutput, + // System.out.println("\n\n"+toolOutput+"\n\n"); + + File exampleSolrHomeDir = new File(solrExampleDir, exampleName + "/solr"); + assertTrue( + exampleSolrHomeDir.getAbsolutePath() + + " not found! run " + + exampleName + + " example failed; output: " + + toolOutput, exampleSolrHomeDir.isDirectory()); - + if ("techproducts".equals(exampleName)) { - HttpSolrClient solrClient = getHttpSolrClient("http://localhost:" + bindPort + "/solr/" + exampleName); - try{ + HttpSolrClient solrClient = + getHttpSolrClient("http://localhost:" + bindPort + "/solr/" + exampleName); + try { SolrQuery query = new SolrQuery("*:*"); QueryResponse qr = solrClient.query(query); long numFound = qr.getResults().getNumFound(); if (numFound == 0) { // brief wait in case of timing issue in getting the new docs committed - log.warn("Going to wait for 1 second before re-trying query for techproduct example docs ..."); + log.warn( + "Going to wait for 1 second before re-trying query for techproduct example docs ..."); try { Thread.sleep(1000); } catch (InterruptedException ignore) { @@ -389,39 +414,46 @@ protected void testExample(String exampleName) throws Exception { } numFound = solrClient.query(query).getResults().getNumFound(); } - assertTrue("expected 32 docs in the " + exampleName + " example but found " + numFound + ", output: " + toolOutput, + assertTrue( + "expected 32 docs in the " + + exampleName + + " example but found " + + numFound + + ", output: " + + toolOutput, numFound == 32); - }finally{ + } finally { solrClient.close(); } } - + // stop the test instance executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p " + bindPort)); } } /** - * Tests the interactive SolrCloud example; we cannot test the non-interactive because we need control over - * the port and can only test with one node since the test relies on setting the host and jetty.port system - * properties, i.e. there is no test coverage for the -noprompt option. + * Tests the interactive SolrCloud example; we cannot test the non-interactive because we need + * control over the port and can only test with one node since the test relies on setting the host + * and jetty.port system properties, i.e. there is no test coverage for the -noprompt option. */ @Test public void testInteractiveSolrCloudExample() throws Exception { File solrHomeDir = new File(ExternalPaths.SERVER_HOME); if (!solrHomeDir.isDirectory()) - fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!"); + fail(solrHomeDir.getAbsolutePath() + " not found and is required to run this test!"); Path tmpDir = createTempDir(); File solrExampleDir = tmpDir.toFile(); File solrServerDir = solrHomeDir.getParentFile(); - String[] toolArgs = new String[] { - "-example", "cloud", - "-serverDir", solrServerDir.getAbsolutePath(), - "-exampleDir", solrExampleDir.getAbsolutePath() - }; + String[] toolArgs = + new String[] { + "-example", "cloud", + "-serverDir", solrServerDir.getAbsolutePath(), + "-exampleDir", solrExampleDir.getAbsolutePath() + }; int bindPort = -1; try (ServerSocket socket = new ServerSocket(0)) { @@ -432,7 +464,7 @@ public void testInteractiveSolrCloudExample() throws Exception { // sthis test only support launching one SolrCloud node due to how MiniSolrCloudCluster works // and the need for setting the host and port system properties ... - String userInput = "1\n"+bindPort+"\n"+collectionName+"\n2\n2\n_default\n"; + String userInput = "1\n" + bindPort + "\n" + collectionName + "\n2\n2\n_default\n"; // simulate user input from stdin InputStream userInputSim = new ByteArrayInputStream(userInput.getBytes(StandardCharsets.UTF_8)); @@ -446,21 +478,31 @@ public void testInteractiveSolrCloudExample() throws Exception { SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, userInputSim, stdoutSim); try { - tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); } catch (Exception e) { - System.err.println("RunExampleTool failed due to: " + e + - "; stdout from tool prior to failure: " + baos.toString(StandardCharsets.UTF_8.name())); + System.err.println( + "RunExampleTool failed due to: " + + e + + "; stdout from tool prior to failure: " + + baos.toString(StandardCharsets.UTF_8.name())); throw e; } String toolOutput = baos.toString(StandardCharsets.UTF_8.name()); // verify Solr is running on the expected port and verify the collection exists - String solrUrl = "http://localhost:"+bindPort+"/solr"; - String collectionListUrl = solrUrl+"/admin/collections?action=list"; + String solrUrl = "http://localhost:" + bindPort + "/solr"; + String collectionListUrl = solrUrl + "/admin/collections?action=list"; if (!SolrCLI.safeCheckCollectionExists(collectionListUrl, collectionName)) { - fail("After running Solr cloud example, test collection '"+collectionName+ - "' not found in Solr at: "+solrUrl+"; tool output: "+toolOutput); + fail( + "After running Solr cloud example, test collection '" + + collectionName + + "' not found in Solr at: " + + solrUrl + + "; tool output: " + + toolOutput); } // index some docs - to verify all is good for both shards @@ -472,9 +514,9 @@ public void testInteractiveSolrCloudExample() throws Exception { cloudClient.setDefaultCollection(collectionName); int numDocs = 10; - for (int d=0; d < numDocs; d++) { + for (int d = 0; d < numDocs; d++) { SolrInputDocument doc = new SolrInputDocument(); - doc.setField("id", "doc"+d); + doc.setField("id", "doc" + d); doc.setField("str_s", "a"); cloudClient.add(doc); } @@ -482,30 +524,40 @@ public void testInteractiveSolrCloudExample() throws Exception { QueryResponse qr = cloudClient.query(new SolrQuery("str_s:a")); if (qr.getResults().getNumFound() != numDocs) { - fail("Expected "+numDocs+" to be found in the "+collectionName+ - " collection but only found "+qr.getResults().getNumFound()); + fail( + "Expected " + + numDocs + + " to be found in the " + + collectionName + + " collection but only found " + + qr.getResults().getNumFound()); } } finally { if (cloudClient != null) { try { cloudClient.close(); - } catch (Exception ignore){} + } catch (Exception ignore) { + } } } File node1SolrHome = new File(solrExampleDir, "cloud/node1/solr"); if (!node1SolrHome.isDirectory()) { - fail(node1SolrHome.getAbsolutePath() + " not found! run cloud example failed; tool output: " + toolOutput); + fail( + node1SolrHome.getAbsolutePath() + + " not found! run cloud example failed; tool output: " + + toolOutput); } // delete the collection SolrCLI.DeleteTool deleteTool = new SolrCLI.DeleteTool(stdoutSim); - String[] deleteArgs = new String[]{"-name", collectionName, "-solrUrl", solrUrl}; + String[] deleteArgs = new String[] {"-name", collectionName, "-solrUrl", solrUrl}; deleteTool.runTool( - SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(deleteTool.getOptions()), deleteArgs)); + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(deleteTool.getOptions()), deleteArgs)); // dump all the output written by the SolrCLI commands to stdout - //System.out.println(toolOutput); + // System.out.println(toolOutput); // stop the test instance executor.execute(org.apache.commons.exec.CommandLine.parse("bin/solr stop -p " + bindPort)); @@ -515,8 +567,8 @@ public void testInteractiveSolrCloudExample() throws Exception { public void testFailExecuteScript() throws Exception { File solrHomeDir = new File(ExternalPaths.SERVER_HOME); if (!solrHomeDir.isDirectory()) - fail(solrHomeDir.getAbsolutePath()+" not found and is required to run this test!"); - + fail(solrHomeDir.getAbsolutePath() + " not found and is required to run this test!"); + Path tmpDir = createTempDir(); File solrExampleDir = tmpDir.toFile(); File solrServerDir = solrHomeDir.getParentFile(); @@ -528,15 +580,18 @@ public void testFailExecuteScript() throws Exception { } File toExecute = new File(tmpDir.toString(), "failExecuteScript"); - assertTrue("Should have been able to create file '" + toExecute.getAbsolutePath() + "' ", toExecute.createNewFile()); - - String[] toolArgs = new String[] { - "-e", "techproducts", - "-serverDir", solrServerDir.getAbsolutePath(), - "-exampleDir", solrExampleDir.getAbsolutePath(), - "-p", String.valueOf(bindPort), - "-script", toExecute.getAbsolutePath().toString() - }; + assertTrue( + "Should have been able to create file '" + toExecute.getAbsolutePath() + "' ", + toExecute.createNewFile()); + + String[] toolArgs = + new String[] { + "-e", "techproducts", + "-serverDir", solrServerDir.getAbsolutePath(), + "-exampleDir", solrExampleDir.getAbsolutePath(), + "-p", String.valueOf(bindPort), + "-script", toExecute.getAbsolutePath().toString() + }; // capture tool output to stdout ByteArrayOutputStream baos = new ByteArrayOutputStream(); @@ -545,7 +600,10 @@ public void testFailExecuteScript() throws Exception { DefaultExecutor executor = new DefaultExecutor(); SolrCLI.RunExampleTool tool = new SolrCLI.RunExampleTool(executor, System.in, stdoutSim); - int code = tool.runTool(SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); + int code = + tool.runTool( + SolrCLI.processCommandLineArgs( + SolrCLI.joinCommonAndToolOptions(tool.getOptions()), toolArgs)); assertTrue("Execution should have failed with return code 1", code == 1); } } diff --git a/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java b/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java index 41dbac5f3a6..317b60df709 100644 --- a/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java +++ b/solr/core/src/test/org/apache/solr/util/TestSolrJacksonAnnotation.java @@ -17,10 +17,9 @@ package org.apache.solr.util; +import com.fasterxml.jackson.databind.ObjectMapper; import java.util.List; import java.util.Map; - -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.solr.SolrTestCase; import org.apache.solr.common.annotation.JsonProperty; import org.apache.solr.common.util.JsonSchemaCreator; @@ -42,10 +41,10 @@ public void testSerDe() throws Exception { @SuppressWarnings("unchecked") Map m = (Map) Utils.fromJSONString(json); - assertEquals("v1", m.get("field")); - assertEquals("v2", m.get("friendlyName")); - assertEquals("1234", String.valueOf(m.get("friendlyIntFld"))); - assertEquals("5678", String.valueOf(m.get("friendlyLongFld"))); + assertEquals("v1", m.get("field")); + assertEquals("v2", m.get("friendlyName")); + assertEquals("1234", String.valueOf(m.get("friendlyIntFld"))); + assertEquals("5678", String.valueOf(m.get("friendlyLongFld"))); TestObj o1 = mapper.readValue(json, TestObj.class); assertEquals("v1", o1.field); @@ -54,11 +53,10 @@ public void testSerDe() throws Exception { assertEquals(5678L, o1.lfld); Map schema = JsonSchemaCreator.getSchema(TestObj.class); - assertEquals("string", Utils.getObjectByPath(schema,true,"/properties/friendlyName/type")); - assertEquals("integer", Utils.getObjectByPath(schema,true,"/properties/friendlyIntFld/type")); - assertEquals("long", Utils.getObjectByPath(schema,true,"/properties/friendlyLongFld/type")); - assertEquals("friendlyName", Utils.getObjectByPath(schema,true,"/required[0]")); - + assertEquals("string", Utils.getObjectByPath(schema, true, "/properties/friendlyName/type")); + assertEquals("integer", Utils.getObjectByPath(schema, true, "/properties/friendlyIntFld/type")); + assertEquals("long", Utils.getObjectByPath(schema, true, "/properties/friendlyLongFld/type")); + assertEquals("friendlyName", Utils.getObjectByPath(schema, true, "/required[0]")); JsonSchemaValidator validator = new JsonSchemaValidator(schema); List errs = validator.validateJson(m); @@ -75,16 +73,15 @@ public void testSerDe() throws Exception { assertTrue(errs.get(0).contains("Value is not valid")); } - - - public static class TestObj { - @JsonProperty() - public String field; - @JsonProperty(value = "friendlyName" ,required = true) + @JsonProperty() public String field; + + @JsonProperty(value = "friendlyName", required = true) public String f2; + @JsonProperty("friendlyIntFld") public int ifld; + @JsonProperty("friendlyLongFld") public long lfld; } diff --git a/solr/core/src/test/org/apache/solr/util/TestSolrVersion.java b/solr/core/src/test/org/apache/solr/util/TestSolrVersion.java index f7d67c38e29..6ea34613066 100644 --- a/solr/core/src/test/org/apache/solr/util/TestSolrVersion.java +++ b/solr/core/src/test/org/apache/solr/util/TestSolrVersion.java @@ -40,13 +40,12 @@ public void testLatestInitialized() { } public void testForwardsCompatibility() { - assertTrue(SolrVersion.valueOf("9.10.20").greaterThanOrEqualTo(SolrVersion.forIntegers(9, 0, 0))); + assertTrue( + SolrVersion.valueOf("9.10.20").greaterThanOrEqualTo(SolrVersion.forIntegers(9, 0, 0))); } public void testParseExceptions() { - expectThrows( - ParseException.class, - () -> SolrVersion.valueOf("SOLR_7_0_0")); + expectThrows(ParseException.class, () -> SolrVersion.valueOf("SOLR_7_0_0")); } public void testSatisfies() { @@ -59,7 +58,7 @@ public void testSatisfies() { } public void testSatisfiesParseFailure() { - assertThrows(SolrVersion.InvalidSemVerExpressionException.class, () -> - SOLR_9_0_1.satisfies(":")); + assertThrows( + SolrVersion.InvalidSemVerExpressionException.class, () -> SOLR_9_0_1.satisfies(":")); } } diff --git a/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java b/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java index a64f169f72a..eb5633acd56 100644 --- a/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java +++ b/solr/core/src/test/org/apache/solr/util/TestSystemIdResolver.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.nio.file.Path; import java.util.Arrays; - import org.apache.commons.io.IOUtils; import org.apache.lucene.util.ResourceLoader; import org.apache.solr.SolrTestCaseJ4; @@ -28,13 +27,15 @@ import org.xml.sax.InputSource; public class TestSystemIdResolver extends SolrTestCaseJ4 { - + public void tearDown() throws Exception { System.clearProperty(SolrResourceLoader.SOLR_ALLOW_UNSAFE_RESOURCELOADING_PARAM); super.tearDown(); } - private void assertEntityResolving(SystemIdResolver resolver, String expectedSystemId, String base, String systemId) throws Exception { + private void assertEntityResolving( + SystemIdResolver resolver, String expectedSystemId, String base, String systemId) + throws Exception { final InputSource is = resolver.resolveEntity(null, null, base, systemId); try { assertEquals("Resolved SystemId does not match", expectedSystemId, is.getSystemId()); @@ -42,65 +43,102 @@ private void assertEntityResolving(SystemIdResolver resolver, String expectedSys IOUtils.closeQuietly(is.getByteStream()); } } - + public void testResolving() throws Exception { final Path testHome = SolrTestCaseJ4.getFile("solr/collection1").getParentFile().toPath(); - final ResourceLoader loader = new SolrResourceLoader(testHome.resolve("collection1"), this.getClass().getClassLoader()); + final ResourceLoader loader = + new SolrResourceLoader(testHome.resolve("collection1"), this.getClass().getClassLoader()); final SystemIdResolver resolver = new SystemIdResolver(loader); - final String fileUri = new File(testHome+"/crazy-path-to-config.xml").toURI().toASCIIString(); - + final String fileUri = new File(testHome + "/crazy-path-to-config.xml").toURI().toASCIIString(); + assertEquals("solrres:/test.xml", SystemIdResolver.createSystemIdFromResourceName("test.xml")); - assertEquals("solrres://@/usr/local/etc/test.xml", SystemIdResolver.createSystemIdFromResourceName("/usr/local/etc/test.xml")); - assertEquals("solrres://@/test.xml", SystemIdResolver.createSystemIdFromResourceName(File.separatorChar+"test.xml")); - + assertEquals( + "solrres://@/usr/local/etc/test.xml", + SystemIdResolver.createSystemIdFromResourceName("/usr/local/etc/test.xml")); + assertEquals( + "solrres://@/test.xml", + SystemIdResolver.createSystemIdFromResourceName(File.separatorChar + "test.xml")); + // check relative URI resolving - assertEquals("solrres:/test.xml", resolver.resolveRelativeURI("solrres:/base.xml", "test.xml").toASCIIString()); - assertEquals("solrres://@/etc/test.xml", - resolver.resolveRelativeURI("solrres://@/usr/local/etc/base.xml", "../../../etc/test.xml").toASCIIString()); + assertEquals( + "solrres:/test.xml", + resolver.resolveRelativeURI("solrres:/base.xml", "test.xml").toASCIIString()); + assertEquals( + "solrres://@/etc/test.xml", + resolver + .resolveRelativeURI("solrres://@/usr/local/etc/base.xml", "../../../etc/test.xml") + .toASCIIString()); // special case: if relative URI starts with "/" convert to an absolute solrres://@/-URI - assertEquals("solrres://@/a/test.xml", resolver.resolveRelativeURI("solrres:/base.xml", "/a/test.xml").toASCIIString()); - // test, that resolving works if somebody uses an absolute file:-URI in a href attribute, it should be preserved - assertEquals(fileUri, resolver.resolveRelativeURI("solrres:/base.xml", fileUri).toASCIIString()); - assertEquals("solrres:/base.xml", resolver.resolveRelativeURI(fileUri, "solrres:/base.xml").toASCIIString()); - + assertEquals( + "solrres://@/a/test.xml", + resolver.resolveRelativeURI("solrres:/base.xml", "/a/test.xml").toASCIIString()); + // test, that resolving works if somebody uses an absolute file:-URI in a href attribute, it + // should be preserved + assertEquals( + fileUri, resolver.resolveRelativeURI("solrres:/base.xml", fileUri).toASCIIString()); + assertEquals( + "solrres:/base.xml", + resolver.resolveRelativeURI(fileUri, "solrres:/base.xml").toASCIIString()); + // do some real resolves to InputStreams with real existing files assertEntityResolving(resolver, "solrres:/schema.xml", "solrres:/solrconfig.xml", "schema.xml"); - assertEntityResolving(resolver, "solrres:/org/apache/solr/util/TestSystemIdResolver.class", - "solrres:/org/apache/solr/util/RTimer.class", "TestSystemIdResolver.class"); - assertEntityResolving(resolver, SystemIdResolver.createSystemIdFromResourceName(testHome+"/collection1/conf/schema.xml"), - SystemIdResolver.createSystemIdFromResourceName(testHome+"/collection1/conf/solrconfig.xml"), "schema.xml"); - + assertEntityResolving( + resolver, + "solrres:/org/apache/solr/util/TestSystemIdResolver.class", + "solrres:/org/apache/solr/util/RTimer.class", + "TestSystemIdResolver.class"); + assertEntityResolving( + resolver, + SystemIdResolver.createSystemIdFromResourceName(testHome + "/collection1/conf/schema.xml"), + SystemIdResolver.createSystemIdFromResourceName( + testHome + "/collection1/conf/solrconfig.xml"), + "schema.xml"); + // if somebody uses an absolute uri (e.g., file://) we should fail resolving: - IOException ioe = expectThrows(IOException.class, () -> { - resolver.resolveEntity(null, null, "solrres:/solrconfig.xml", fileUri); - }); + IOException ioe = + expectThrows( + IOException.class, + () -> { + resolver.resolveEntity(null, null, "solrres:/solrconfig.xml", fileUri); + }); assertTrue(ioe.getMessage().startsWith("Cannot resolve absolute")); - - ioe = expectThrows(IOException.class, () -> { - resolver.resolveEntity(null, null, "solrres:/solrconfig.xml", "http://lucene.apache.org/test.xml"); - }); + + ioe = + expectThrows( + IOException.class, + () -> { + resolver.resolveEntity( + null, null, "solrres:/solrconfig.xml", "http://lucene.apache.org/test.xml"); + }); assertTrue(ioe.getMessage().startsWith("Cannot resolve absolute")); - + // check that we can't escape with absolute file paths: for (String path : Arrays.asList("/etc/passwd", "/windows/notepad.exe")) { - ioe = expectThrows(IOException.class, () -> { - resolver.resolveEntity(null, null, "solrres:/solrconfig.xml", path); - }); - assertTrue(ioe.getMessage().startsWith("Can't find resource") - || ioe.getMessage().contains("access denied") - || ioe.getMessage().contains("is outside resource loader dir")); + ioe = + expectThrows( + IOException.class, + () -> { + resolver.resolveEntity(null, null, "solrres:/solrconfig.xml", path); + }); + assertTrue( + ioe.getMessage().startsWith("Can't find resource") + || ioe.getMessage().contains("access denied") + || ioe.getMessage().contains("is outside resource loader dir")); } } public void testUnsafeResolving() throws Exception { System.setProperty(SolrResourceLoader.SOLR_ALLOW_UNSAFE_RESOURCELOADING_PARAM, "true"); - + final Path testHome = SolrTestCaseJ4.getFile("solr/collection1").getParentFile().toPath(); - final ResourceLoader loader = new SolrResourceLoader(testHome.resolve("collection1"), this.getClass().getClassLoader()); + final ResourceLoader loader = + new SolrResourceLoader(testHome.resolve("collection1"), this.getClass().getClassLoader()); final SystemIdResolver resolver = new SystemIdResolver(loader); - - assertEntityResolving(resolver, SystemIdResolver.createSystemIdFromResourceName(testHome+"/crazy-path-to-schema.xml"), - SystemIdResolver.createSystemIdFromResourceName(testHome+"/crazy-path-to-config.xml"), "crazy-path-to-schema.xml"); - } + assertEntityResolving( + resolver, + SystemIdResolver.createSystemIdFromResourceName(testHome + "/crazy-path-to-schema.xml"), + SystemIdResolver.createSystemIdFromResourceName(testHome + "/crazy-path-to-config.xml"), + "crazy-path-to-schema.xml"); + } } diff --git a/solr/core/src/test/org/apache/solr/util/TestTestInjection.java b/solr/core/src/test/org/apache/solr/util/TestTestInjection.java index 089b6715658..c86a2344c71 100644 --- a/solr/core/src/test/org/apache/solr/util/TestTestInjection.java +++ b/solr/core/src/test/org/apache/solr/util/TestTestInjection.java @@ -17,45 +17,42 @@ package org.apache.solr.util; import java.util.Locale; - import org.apache.solr.SolrTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; public class TestTestInjection extends SolrTestCase { - + @BeforeClass - public static void beforeClass() { - - } - + public static void beforeClass() {} + @AfterClass public static void cleanup() { TestInjection.reset(); } - + public void testBasics() { TestInjection.failReplicaRequests = "true:100"; Exception e = expectThrows(Exception.class, TestInjection::injectFailReplicaRequests); - assertFalse("Should not fail based on bad syntax", + assertFalse( + "Should not fail based on bad syntax", e.getMessage().toLowerCase(Locale.ENGLISH).contains("bad syntax")); - + TestInjection.failReplicaRequests = "true:00"; for (int i = 0; i < 100; i++) { // should never fail TestInjection.injectFailReplicaRequests(); - } } - + public void testBadSyntax() { testBadSyntax("true/10"); testBadSyntax("boo:100"); testBadSyntax("false:100f"); testBadSyntax("TRUE:0:"); } - + public void testGoodSyntax() { testGoodSyntax("true:10"); testGoodSyntax("true:100"); @@ -65,7 +62,6 @@ public void testGoodSyntax() { testGoodSyntax("TRUE:000"); testGoodSyntax("FALSE:50"); testGoodSyntax("FAlsE:99"); - } public void testBadSyntax(String syntax) { @@ -73,7 +69,7 @@ public void testBadSyntax(String syntax) { Exception e = expectThrows(Exception.class, TestInjection::injectFailReplicaRequests); assertTrue(e.getMessage().toLowerCase(Locale.ENGLISH).contains("bad syntax")); } - + public void testGoodSyntax(String syntax) { TestInjection.failReplicaRequests = syntax; diff --git a/solr/core/src/test/org/apache/solr/util/TestUtils.java b/solr/core/src/test/org/apache/solr/util/TestUtils.java index e5aa8b0b2dc..3d17bf046f2 100644 --- a/solr/core/src/test/org/apache/solr/util/TestUtils.java +++ b/solr/core/src/test/org/apache/solr/util/TestUtils.java @@ -16,6 +16,14 @@ */ package org.apache.solr.util; +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Arrays.asList; +import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_DEF; +import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS; +import static org.apache.solr.common.cloud.ZkStateReader.NUM_SHARDS_PROP; +import static org.apache.solr.common.util.Utils.fromJSONString; + +import com.google.common.collect.ImmutableList; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.StringReader; @@ -24,8 +32,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; - -import com.google.common.collect.ImmutableList; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.MapWriter; import org.apache.solr.common.util.CommandOperation; @@ -38,35 +44,26 @@ import org.apache.solr.common.util.Utils; import org.junit.Assert; -import static java.nio.charset.StandardCharsets.UTF_8; -import static java.util.Arrays.asList; -import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_DEF; -import static org.apache.solr.common.cloud.ZkStateReader.NRT_REPLICAS; -import static org.apache.solr.common.cloud.ZkStateReader.NUM_SHARDS_PROP; -import static org.apache.solr.common.util.Utils.fromJSONString; - -/** - * - */ +/** */ public class TestUtils extends SolrTestCaseJ4 { - + public void testJoin() { - assertEquals("a|b|c", StrUtils.join(asList("a","b","c"), '|')); - assertEquals("a,b,c", StrUtils.join(asList("a","b","c"), ',')); - assertEquals("a\\,b,c", StrUtils.join(asList("a,b","c"), ',')); - assertEquals("a,b|c", StrUtils.join(asList("a,b","c"), '|')); + assertEquals("a|b|c", StrUtils.join(asList("a", "b", "c"), '|')); + assertEquals("a,b,c", StrUtils.join(asList("a", "b", "c"), ',')); + assertEquals("a\\,b,c", StrUtils.join(asList("a,b", "c"), ',')); + assertEquals("a,b|c", StrUtils.join(asList("a,b", "c"), '|')); - assertEquals("a\\\\b|c", StrUtils.join(asList("a\\b","c"), '|')); + assertEquals("a\\\\b|c", StrUtils.join(asList("a\\b", "c"), '|')); } public void testEscapeTextWithSeparator() { - assertEquals("a", StrUtils.escapeTextWithSeparator("a", '|')); - assertEquals("a", StrUtils.escapeTextWithSeparator("a", ',')); - - assertEquals("a\\|b", StrUtils.escapeTextWithSeparator("a|b", '|')); - assertEquals("a|b", StrUtils.escapeTextWithSeparator("a|b", ',')); - assertEquals("a,b", StrUtils.escapeTextWithSeparator("a,b", '|')); - assertEquals("a\\,b", StrUtils.escapeTextWithSeparator("a,b", ',')); + assertEquals("a", StrUtils.escapeTextWithSeparator("a", '|')); + assertEquals("a", StrUtils.escapeTextWithSeparator("a", ',')); + + assertEquals("a\\|b", StrUtils.escapeTextWithSeparator("a|b", '|')); + assertEquals("a|b", StrUtils.escapeTextWithSeparator("a|b", ',')); + assertEquals("a,b", StrUtils.escapeTextWithSeparator("a,b", '|')); + assertEquals("a\\,b", StrUtils.escapeTextWithSeparator("a,b", ',')); assertEquals("a\\\\b", StrUtils.escapeTextWithSeparator("a\\b", ',')); assertEquals("a\\\\\\,b", StrUtils.escapeTextWithSeparator("a\\,b", ',')); @@ -74,102 +71,101 @@ public void testEscapeTextWithSeparator() { public void testSplitEscaping() { List arr = StrUtils.splitSmart("\\r\\n:\\t\\f\\b", ":", true); - assertEquals(2,arr.size()); - assertEquals("\r\n",arr.get(0)); - assertEquals("\t\f\b",arr.get(1)); + assertEquals(2, arr.size()); + assertEquals("\r\n", arr.get(0)); + assertEquals("\t\f\b", arr.get(1)); arr = StrUtils.splitSmart("\\r\\n:\\t\\f\\b", ":", false); - assertEquals(2,arr.size()); - assertEquals("\\r\\n",arr.get(0)); - assertEquals("\\t\\f\\b",arr.get(1)); + assertEquals(2, arr.size()); + assertEquals("\\r\\n", arr.get(0)); + assertEquals("\\t\\f\\b", arr.get(1)); arr = StrUtils.splitWS("\\r\\n \\t\\f\\b", true); - assertEquals(2,arr.size()); - assertEquals("\r\n",arr.get(0)); - assertEquals("\t\f\b",arr.get(1)); + assertEquals(2, arr.size()); + assertEquals("\r\n", arr.get(0)); + assertEquals("\t\f\b", arr.get(1)); arr = StrUtils.splitWS("\\r\\n \\t\\f\\b", false); - assertEquals(2,arr.size()); - assertEquals("\\r\\n",arr.get(0)); - assertEquals("\\t\\f\\b",arr.get(1)); + assertEquals(2, arr.size()); + assertEquals("\\r\\n", arr.get(0)); + assertEquals("\\t\\f\\b", arr.get(1)); arr = StrUtils.splitSmart("\\:foo\\::\\:bar\\:", ":", true); - assertEquals(2,arr.size()); - assertEquals(":foo:",arr.get(0)); - assertEquals(":bar:",arr.get(1)); + assertEquals(2, arr.size()); + assertEquals(":foo:", arr.get(0)); + assertEquals(":bar:", arr.get(1)); arr = StrUtils.splitWS("\\ foo\\ \\ bar\\ ", true); - assertEquals(2,arr.size()); - assertEquals(" foo ",arr.get(0)); - assertEquals(" bar ",arr.get(1)); - + assertEquals(2, arr.size()); + assertEquals(" foo ", arr.get(0)); + assertEquals(" bar ", arr.get(1)); + arr = StrUtils.splitFileNames("/h/s,/h/\\,s,"); - assertEquals(2,arr.size()); - assertEquals("/h/s",arr.get(0)); - assertEquals("/h/,s",arr.get(1)); + assertEquals(2, arr.size()); + assertEquals("/h/s", arr.get(0)); + assertEquals("/h/,s", arr.get(1)); arr = StrUtils.splitFileNames("/h/s"); - assertEquals(1,arr.size()); - assertEquals("/h/s",arr.get(0)); + assertEquals(1, arr.size()); + assertEquals("/h/s", arr.get(0)); } - public void testNamedLists() - { + public void testNamedLists() { SimpleOrderedMap map = new SimpleOrderedMap<>(); - map.add( "test", 10 ); + map.add("test", 10); SimpleOrderedMap clone = map.clone(); - assertEquals( map.toString(), clone.toString() ); - assertEquals(Integer.valueOf(10), clone.get( "test" ) ); - - Map realMap = new HashMap<>(); - realMap.put( "one", 1 ); - realMap.put( "two", 2 ); - realMap.put( "three", 3 ); + assertEquals(map.toString(), clone.toString()); + assertEquals(Integer.valueOf(10), clone.get("test")); + + Map realMap = new HashMap<>(); + realMap.put("one", 1); + realMap.put("two", 2); + realMap.put("three", 3); map = new SimpleOrderedMap<>(); - map.addAll( realMap ); - assertEquals( 3, map.size() ); + map.addAll(realMap); + assertEquals(3, map.size()); map = new SimpleOrderedMap<>(); - map.add( "one", 1 ); - map.add( "two", 2 ); - map.add( "three", 3 ); - map.add( "one", 100 ); - map.add( null, null ); - - assertEquals( "one", map.getName(0) ); - map.setName( 0, "ONE" ); - assertEquals( "ONE", map.getName(0) ); - assertEquals(Integer.valueOf(100), map.get( "one", 1 ) ); - assertEquals( 4, map.indexOf( null, 1 ) ); - assertEquals( null, map.get( null, 1 ) ); + map.add("one", 1); + map.add("two", 2); + map.add("three", 3); + map.add("one", 100); + map.add(null, null); + + assertEquals("one", map.getName(0)); + map.setName(0, "ONE"); + assertEquals("ONE", map.getName(0)); + assertEquals(Integer.valueOf(100), map.get("one", 1)); + assertEquals(4, map.indexOf(null, 1)); + assertEquals(null, map.get(null, 1)); map = new SimpleOrderedMap<>(); - map.add( "one", 1 ); - map.add( "two", 2 ); + map.add("one", 1); + map.add("two", 2); Iterator> iter = map.iterator(); - while( iter.hasNext() ) { + while (iter.hasNext()) { Map.Entry v = iter.next(); v.toString(); // coverage - v.setValue( v.getValue()*10 ); + v.setValue(v.getValue() * 10); try { iter.remove(); - Assert.fail( "should be unsupported..." ); - } catch( UnsupportedOperationException ignored) {} + Assert.fail("should be unsupported..."); + } catch (UnsupportedOperationException ignored) { + } } // the values should be bigger - assertEquals(Integer.valueOf(10), map.get( "one" ) ); - assertEquals(Integer.valueOf(20), map.get( "two" ) ); + assertEquals(Integer.valueOf(10), map.get("one")); + assertEquals(Integer.valueOf(20), map.get("two")); } - - public void testNumberUtils() - { + + public void testNumberUtils() { double number = 1.234; - String sortable = NumberUtils.double2sortableStr( number ); - assertEquals( number, NumberUtils.SortableStr2double(sortable), 0.001); - + String sortable = NumberUtils.double2sortableStr(number); + assertEquals(number, NumberUtils.SortableStr2double(sortable), 0.001); + long num = System.nanoTime(); - sortable = NumberUtils.long2sortableStr( num ); - assertEquals( num, NumberUtils.SortableStr2long(sortable, 0, sortable.length() ) ); - assertEquals( Long.toString(num), NumberUtils.SortableStr2long(sortable) ); + sortable = NumberUtils.long2sortableStr(num); + assertEquals(num, NumberUtils.SortableStr2long(sortable, 0, sortable.length())); + assertEquals(Long.toString(num), NumberUtils.SortableStr2long(sortable)); } public void testNoggitFlags() throws IOException { @@ -180,126 +176,154 @@ public void testNoggitFlags() throws IOException { assertEquals(2, commands.size()); for (CommandOperation command : commands) { assertEquals("a", command.name); - assertEquals( "v1" ,Utils.getObjectByPath(command.getDataMap(), true, "d[0]/k1"));command.getDataMap(); - assertEquals( "v2" ,Utils.getObjectByPath(command.getDataMap(), true, "d[1]/k2"));command.getDataMap(); + assertEquals("v1", Utils.getObjectByPath(command.getDataMap(), true, "d[0]/k1")); + command.getDataMap(); + assertEquals("v2", Utils.getObjectByPath(command.getDataMap(), true, "d[1]/k2")); + command.getDataMap(); } } public void testBinaryCommands() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try (final JavaBinCodec jbc = new JavaBinCodec()) { - jbc.marshal((MapWriter) ew -> { - ew.put("set-user", fromJSONString("{x:y}")); - ew.put("set-user", fromJSONString("{x:y,x1:y1}")); - ew.put("single", asList(fromJSONString("[{x:y,x1:y1},{x2:y2}]"), fromJSONString( "{x2:y2}"))); - ew.put("multi", asList(fromJSONString("{x:y,x1:y1}"), fromJSONString( "{x2:y2}"))); - }, baos); + jbc.marshal( + (MapWriter) + ew -> { + ew.put("set-user", fromJSONString("{x:y}")); + ew.put("set-user", fromJSONString("{x:y,x1:y1}")); + ew.put( + "single", + asList(fromJSONString("[{x:y,x1:y1},{x2:y2}]"), fromJSONString("{x2:y2}"))); + ew.put("multi", asList(fromJSONString("{x:y,x1:y1}"), fromJSONString("{x2:y2}"))); + }, + baos); } - ContentStream stream = new ContentStreamBase.ByteArrayStream(baos.toByteArray(),null, "application/javabin"); - List commands = CommandOperation.readCommands(Collections.singletonList(stream), new NamedList<>(), Collections.singleton("single")); + ContentStream stream = + new ContentStreamBase.ByteArrayStream(baos.toByteArray(), null, "application/javabin"); + List commands = + CommandOperation.readCommands( + Collections.singletonList(stream), new NamedList<>(), Collections.singleton("single")); assertEquals(5, commands.size()); } private void assertNoggitJsonValues(Map m) { - assertEquals( "c" ,Utils.getObjectByPath(m, true, "/a/b")); - assertEquals( "v1" ,Utils.getObjectByPath(m, true, "/a/d[0]/k1")); - assertEquals( "v2" ,Utils.getObjectByPath(m, true, "/a/d[1]/k2")); + assertEquals("c", Utils.getObjectByPath(m, true, "/a/b")); + assertEquals("v1", Utils.getObjectByPath(m, true, "/a/d[0]/k1")); + assertEquals("v2", Utils.getObjectByPath(m, true, "/a/d[1]/k2")); } - public void testSetObjectByPath(){ - String json = "{\n" + - " 'authorization':{\n" + - " 'class':'solr.RuleBasedAuthorizationPlugin',\n" + - " 'user-role':{\n" + - " 'solr':'admin',\n" + - " 'harry':'admin'},\n" + - " 'permissions':[{\n" + - " 'name':'security-edit',\n" + - " 'role':['admin']},\n" + - " {\n" + - " 'name':'x-update',\n" + - " 'collection':'x',\n" + - " 'path':'/update/*',\n" + - " 'role':'dev'}],\n" + - " '':{'v':4}}}"; - Map m = (Map) fromJSONString(json); - Utils.setObjectByPath(m,"authorization/permissions[1]/role","guest"); - Utils.setObjectByPath(m,"authorization/permissions[0]/role[-1]","dev"); - assertEquals("guest", Utils.getObjectByPath(m,true,"authorization/permissions[1]/role")); - assertEquals("dev", Utils.getObjectByPath(m,true,"authorization/permissions[0]/role[1]")); + public void testSetObjectByPath() { + String json = + "{\n" + + " 'authorization':{\n" + + " 'class':'solr.RuleBasedAuthorizationPlugin',\n" + + " 'user-role':{\n" + + " 'solr':'admin',\n" + + " 'harry':'admin'},\n" + + " 'permissions':[{\n" + + " 'name':'security-edit',\n" + + " 'role':['admin']},\n" + + " {\n" + + " 'name':'x-update',\n" + + " 'collection':'x',\n" + + " 'path':'/update/*',\n" + + " 'role':'dev'}],\n" + + " '':{'v':4}}}"; + Map m = (Map) fromJSONString(json); + Utils.setObjectByPath(m, "authorization/permissions[1]/role", "guest"); + Utils.setObjectByPath(m, "authorization/permissions[0]/role[-1]", "dev"); + assertEquals("guest", Utils.getObjectByPath(m, true, "authorization/permissions[1]/role")); + assertEquals("dev", Utils.getObjectByPath(m, true, "authorization/permissions[0]/role[1]")); } - public void testUtilsJSPath(){ - - String json = "{\n" + - " 'authorization':{\n" + - " 'class':'solr.RuleBasedAuthorizationPlugin',\n" + - " 'user-role':{\n" + - " 'solr':'admin',\n" + - " 'harry':'admin'},\n" + - " 'permissions':[{\n" + - " 'name':'security-edit',\n" + - " 'role':'admin'},\n" + - " {\n" + - " 'name':'x-update',\n" + - " 'collection':'x',\n" + - " 'path':'/update/*',\n" + - " 'role':'dev'}],\n" + - " '':{'v':4}}}"; + public void testUtilsJSPath() { + + String json = + "{\n" + + " 'authorization':{\n" + + " 'class':'solr.RuleBasedAuthorizationPlugin',\n" + + " 'user-role':{\n" + + " 'solr':'admin',\n" + + " 'harry':'admin'},\n" + + " 'permissions':[{\n" + + " 'name':'security-edit',\n" + + " 'role':'admin'},\n" + + " {\n" + + " 'name':'x-update',\n" + + " 'collection':'x',\n" + + " 'path':'/update/*',\n" + + " 'role':'dev'}],\n" + + " '':{'v':4}}}"; Map m = (Map) fromJSONString(json); - assertEquals("x-update", Utils.getObjectByPath(m,false, "authorization/permissions[1]/name")); - + assertEquals("x-update", Utils.getObjectByPath(m, false, "authorization/permissions[1]/name")); } - + @SuppressWarnings({"unchecked"}) - public void testMapWriterIdx(){ - String json = "{" + - " 'responseHeader':{" + - " 'status':0," + - " 'QTime':6752}," + - " 'success':{" + - " '127.0.0.1:56443_solr':{" + - " 'responseHeader':{" + - " 'status':0," + - " 'QTime':4276}," + - " 'core':'corestatus_test_shard2_replica_n5'}," + - " '127.0.0.1:56445_solr':{" + - " 'responseHeader':{" + - " 'status':0," + - " 'QTime':4271}," + - " 'core':'corestatus_test_shard1_replica_n1'}," + - " '127.0.0.1:56446_solr':{" + - " 'responseHeader':{" + - " 'status':0," + - " 'QTime':5015}," + - " 'core':'corestatus_test_shard1_replica_n2'}," + - " '127.0.0.1:56444_solr':{" + - " 'responseHeader':{" + - " 'status':0," + - " 'QTime':5033}," + - " 'core':'corestatus_test_shard2_replica_n3'}}}"; + public void testMapWriterIdx() { + String json = + "{" + + " 'responseHeader':{" + + " 'status':0," + + " 'QTime':6752}," + + " 'success':{" + + " '127.0.0.1:56443_solr':{" + + " 'responseHeader':{" + + " 'status':0," + + " 'QTime':4276}," + + " 'core':'corestatus_test_shard2_replica_n5'}," + + " '127.0.0.1:56445_solr':{" + + " 'responseHeader':{" + + " 'status':0," + + " 'QTime':4271}," + + " 'core':'corestatus_test_shard1_replica_n1'}," + + " '127.0.0.1:56446_solr':{" + + " 'responseHeader':{" + + " 'status':0," + + " 'QTime':5015}," + + " 'core':'corestatus_test_shard1_replica_n2'}," + + " '127.0.0.1:56444_solr':{" + + " 'responseHeader':{" + + " 'status':0," + + " 'QTime':5033}," + + " 'core':'corestatus_test_shard2_replica_n3'}}}"; @SuppressWarnings({"rawtypes"}) Map m = (Map) fromJSONString(json); - assertEquals("127.0.0.1:56443_solr", Utils.getObjectByPath(m,false, "success[0]/key")); - assertEquals("corestatus_test_shard2_replica_n5", Utils.getObjectByPath(m, false,asList("success[0]", "value", "core") )); - assertEquals(4276L, Utils.getObjectByPath(m, false,asList("success[0]", "value", "responseHeader", "QTime") )); - - assertEquals("127.0.0.1:56444_solr", Utils.getObjectByPath(m,false, "success[3]/key")); - assertEquals("corestatus_test_shard2_replica_n3", Utils.getObjectByPath(m, false,asList("success[3]", "value", "core") )); - assertEquals(5033L, Utils.getObjectByPath(m, false,asList("success[3]", "value", "responseHeader", "QTime") )); + assertEquals("127.0.0.1:56443_solr", Utils.getObjectByPath(m, false, "success[0]/key")); + assertEquals( + "corestatus_test_shard2_replica_n5", + Utils.getObjectByPath(m, false, asList("success[0]", "value", "core"))); + assertEquals( + 4276L, + Utils.getObjectByPath(m, false, asList("success[0]", "value", "responseHeader", "QTime"))); + + assertEquals("127.0.0.1:56444_solr", Utils.getObjectByPath(m, false, "success[3]/key")); + assertEquals( + "corestatus_test_shard2_replica_n3", + Utils.getObjectByPath(m, false, asList("success[3]", "value", "core"))); + assertEquals( + 5033L, + Utils.getObjectByPath(m, false, asList("success[3]", "value", "responseHeader", "QTime"))); Map nodes = (Map) m.get("success"); - m.put("success", (MapWriter) ew -> nodes.forEach((o, o2) -> ew.putNoEx((String) o,o2))); - assertEquals("127.0.0.1:56443_solr", Utils.getObjectByPath(m,false, "success[0]/key")); - assertEquals("corestatus_test_shard2_replica_n5", Utils.getObjectByPath(m, false,asList("success[0]", "value", "core") )); - assertEquals(4276L, Utils.getObjectByPath(m, false,asList("success[0]", "value", "responseHeader", "QTime") )); - - assertEquals("127.0.0.1:56444_solr", Utils.getObjectByPath(m,false, "success[3]/key")); - assertEquals("corestatus_test_shard2_replica_n3", Utils.getObjectByPath(m, false,asList("success[3]", "value", "core") )); - assertEquals(5033L, Utils.getObjectByPath(m, false,asList("success[3]", "value", "responseHeader", "QTime") )); + m.put("success", (MapWriter) ew -> nodes.forEach((o, o2) -> ew.putNoEx((String) o, o2))); + assertEquals("127.0.0.1:56443_solr", Utils.getObjectByPath(m, false, "success[0]/key")); + assertEquals( + "corestatus_test_shard2_replica_n5", + Utils.getObjectByPath(m, false, asList("success[0]", "value", "core"))); + assertEquals( + 4276L, + Utils.getObjectByPath(m, false, asList("success[0]", "value", "responseHeader", "QTime"))); + + assertEquals("127.0.0.1:56444_solr", Utils.getObjectByPath(m, false, "success[3]/key")); + assertEquals( + "corestatus_test_shard2_replica_n3", + Utils.getObjectByPath(m, false, asList("success[3]", "value", "core"))); + assertEquals( + 5033L, + Utils.getObjectByPath(m, false, asList("success[3]", "value", "responseHeader", "QTime"))); final int[] count = {0}; NamedList nl = new NamedList<>(m); nl._forEachEntry("success", (o, o2) -> count[0]++); @@ -308,8 +332,13 @@ public void testMapWriterIdx(){ @SuppressWarnings({"unchecked"}) public void testMergeJson() { - Map sink = (Map) Utils.fromJSONString("{k2:v2, k1: {a:b, p:r, k21:{xx:yy}}}"); - assertTrue(Utils.mergeJson(sink, (Map) Utils.fromJSONString("k1:{a:c, e:f, p :null, k11:{a1:b1}, k21:{pp : qq}}"))); + Map sink = + (Map) Utils.fromJSONString("{k2:v2, k1: {a:b, p:r, k21:{xx:yy}}}"); + assertTrue( + Utils.mergeJson( + sink, + (Map) + Utils.fromJSONString("k1:{a:c, e:f, p :null, k11:{a1:b1}, k21:{pp : qq}}"))); assertEquals("v2", Utils.getObjectByPath(sink, true, "k2")); assertEquals("c", Utils.getObjectByPath(sink, true, "k1/a")); @@ -319,8 +348,14 @@ public void testMergeJson() { assertEquals("b1", Utils.getObjectByPath(sink, true, "k1/k11/a1")); sink = new HashMap<>(); - assertTrue(Utils.mergeJson(sink, (Map) Utils.fromJSONString("collectionDefaults:{numShards:3 , nrtReplicas:2}"))); - assertEquals(3L, Utils.getObjectByPath(sink, true, ImmutableList.of(COLLECTION_DEF, NUM_SHARDS_PROP))); - assertEquals(2L, Utils.getObjectByPath(sink, true, ImmutableList.of(COLLECTION_DEF, NRT_REPLICAS))); + assertTrue( + Utils.mergeJson( + sink, + (Map) + Utils.fromJSONString("collectionDefaults:{numShards:3 , nrtReplicas:2}"))); + assertEquals( + 3L, Utils.getObjectByPath(sink, true, ImmutableList.of(COLLECTION_DEF, NUM_SHARDS_PROP))); + assertEquals( + 2L, Utils.getObjectByPath(sink, true, ImmutableList.of(COLLECTION_DEF, NRT_REPLICAS))); } } diff --git a/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java b/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java index 37ea1cd4e03..4a97a6bad07 100644 --- a/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/TimeZoneUtilsTest.java @@ -16,21 +16,19 @@ */ package org.apache.solr.util; -import org.apache.lucene.util.TestUtil; -import org.apache.solr.SolrTestCase; - -import java.util.Set; import java.util.HashSet; +import java.util.Locale; import java.util.Random; +import java.util.Set; import java.util.TimeZone; -import java.util.Locale; +import org.apache.lucene.util.TestUtil; +import org.apache.solr.SolrTestCase; public class TimeZoneUtilsTest extends SolrTestCase { - private static void assertSameRules(final String label, - final TimeZone expected, - final TimeZone actual) { - + private static void assertSameRules( + final String label, final TimeZone expected, final TimeZone actual) { + if (null == expected && null == actual) return; assertNotNull(label + ": expected is null", expected); @@ -38,9 +36,8 @@ private static void assertSameRules(final String label, final boolean same = expected.hasSameRules(actual); - assertTrue(label + ": " + expected.toString() + " [[NOT SAME RULES]] " + - actual.toString(), - same); + assertTrue( + label + ": " + expected.toString() + " [[NOT SAME RULES]] " + actual.toString(), same); } public void testValidIds() throws Exception { @@ -49,8 +46,9 @@ public void testValidIds() throws Exception { // brain dead: anything the JVM supports, should work for (String validId : TimeZone.getAvailableIDs()) { - assertTrue(validId + " not found in list of known ids", - TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(validId)); + assertTrue( + validId + " not found in list of known ids", + TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(validId)); final TimeZone expected = TimeZone.getTimeZone(validId); final TimeZone actual = TimeZoneUtils.getTimeZone(validId); @@ -58,57 +56,82 @@ public void testValidIds() throws Exception { idsTested.add(validId); } - - assertEquals("TimeZone.getAvailableIDs vs TimeZoneUtils.KNOWN_TIMEZONE_IDS", - TimeZoneUtils.KNOWN_TIMEZONE_IDS.size(), idsTested.size()); + + assertEquals( + "TimeZone.getAvailableIDs vs TimeZoneUtils.KNOWN_TIMEZONE_IDS", + TimeZoneUtils.KNOWN_TIMEZONE_IDS.size(), + idsTested.size()); } public void testCustom() throws Exception { - for (String input : new String[] {"GMT-00", "GMT+00", "GMT-0", "GMT+0", - "GMT+08","GMT+8", "GMT-08","GMT-8", - "GMT+0800","GMT+08:00", - "GMT-0800","GMT-08:00", - "GMT+23", "GMT+2300", - "GMT-23", "GMT-2300"}) { - assertSameRules(input, - TimeZone.getTimeZone(input), - TimeZoneUtils.getTimeZone(input)); + for (String input : + new String[] { + "GMT-00", + "GMT+00", + "GMT-0", + "GMT+0", + "GMT+08", + "GMT+8", + "GMT-08", + "GMT-8", + "GMT+0800", + "GMT+08:00", + "GMT-0800", + "GMT-08:00", + "GMT+23", + "GMT+2300", + "GMT-23", + "GMT-2300" + }) { + assertSameRules(input, TimeZone.getTimeZone(input), TimeZoneUtils.getTimeZone(input)); } } public void testStupidIKnowButIDontTrustTheJVM() throws Exception { - for (String input : new String[] {"GMT-00", "GMT+00", "GMT-0", "GMT+0", - "GMT+08","GMT+8", "GMT-08","GMT-8", - "GMT+0800","GMT+08:00", - "GMT-0800","GMT-08:00", - "GMT+23", "GMT+2300", - "GMT-23", "GMT-2300"}) { - assertSameRules(input, - TimeZone.getTimeZone(input), - TimeZone.getTimeZone(input)); + for (String input : + new String[] { + "GMT-00", + "GMT+00", + "GMT-0", + "GMT+0", + "GMT+08", + "GMT+8", + "GMT-08", + "GMT-8", + "GMT+0800", + "GMT+08:00", + "GMT-0800", + "GMT-08:00", + "GMT+23", + "GMT+2300", + "GMT-23", + "GMT-2300" + }) { + assertSameRules(input, TimeZone.getTimeZone(input), TimeZone.getTimeZone(input)); } } public void testInvalidInput() throws Exception { final String giberish = "giberish"; - assumeFalse("This test assumes that " + giberish + " is not a valid tz id", - TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(giberish)); + assumeFalse( + "This test assumes that " + giberish + " is not a valid tz id", + TimeZoneUtils.KNOWN_TIMEZONE_IDS.contains(giberish)); assertNull(giberish, TimeZoneUtils.getTimeZone(giberish)); - - for (String malformed : new String[] {"GMT+72", "GMT0800", - "GMT+2400" , "GMT+24:00", - "GMT+11-30" , "GMT+11:-30", - "GMT+0080" , "GMT+00:80"}) { + for (String malformed : + new String[] { + "GMT+72", "GMT0800", + "GMT+2400", "GMT+24:00", + "GMT+11-30", "GMT+11:-30", + "GMT+0080", "GMT+00:80" + }) { assertNull(malformed, TimeZoneUtils.getTimeZone(malformed)); } } - - public void testRandom() throws Exception { final String ONE_DIGIT = "%1d"; final String TWO_DIGIT = "%02d"; @@ -119,16 +142,14 @@ public void testRandom() throws Exception { int hour = TestUtil.nextInt(r, 0, 23); int min = TestUtil.nextInt(r, 0, 59); - String hours = String.format(Locale.ROOT, - (r.nextBoolean() ? ONE_DIGIT : TWO_DIGIT), - hour); + String hours = String.format(Locale.ROOT, (r.nextBoolean() ? ONE_DIGIT : TWO_DIGIT), hour); String mins = String.format(Locale.ROOT, TWO_DIGIT, min); - String input = "GMT" + (r.nextBoolean()?"+":"-") - + hours + (r.nextBoolean() ? "" : ((r.nextBoolean()?":":"") + mins)); - assertSameRules(input, - TimeZone.getTimeZone(input), - TimeZoneUtils.getTimeZone(input)); + String input = + "GMT" + + (r.nextBoolean() ? "+" : "-") + + hours + + (r.nextBoolean() ? "" : ((r.nextBoolean() ? ":" : "") + mins)); + assertSameRules(input, TimeZone.getTimeZone(input), TimeZoneUtils.getTimeZone(input)); } } } - diff --git a/solr/core/src/test/org/apache/solr/util/configuration/SSLConfigurationsTest.java b/solr/core/src/test/org/apache/solr/util/configuration/SSLConfigurationsTest.java index 25ecfd0d91c..27222f71d00 100644 --- a/solr/core/src/test/org/apache/solr/util/configuration/SSLConfigurationsTest.java +++ b/solr/core/src/test/org/apache/solr/util/configuration/SSLConfigurationsTest.java @@ -17,11 +17,13 @@ package org.apache.solr.util.configuration; +import static org.hamcrest.CoreMatchers.is; +import static org.junit.Assert.assertThat; + import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; - import org.apache.lucene.util.TestRuleRestoreSystemProperties; import org.apache.solr.util.configuration.providers.EnvSSLCredentialProvider; import org.apache.solr.util.configuration.providers.SysPropSSLCredentialProvider; @@ -30,9 +32,6 @@ import org.junit.Test; import org.junit.rules.TestRule; -import static org.hamcrest.CoreMatchers.is; -import static org.junit.Assert.assertThat; - public class SSLConfigurationsTest { private Map envs; private SSLConfigurations sut; @@ -41,17 +40,20 @@ public class SSLConfigurationsTest { public static final String SAMPLE_PW2 = "pw456"; public static final String SAMPLE_PW3 = "pw789"; public static final String KEY_STORE_PASSWORD = SSLConfigurations.SysProps.SSL_KEY_STORE_PASSWORD; - public static final String TRUST_STORE_PASSWORD = SSLConfigurations.SysProps.SSL_TRUST_STORE_PASSWORD; - public static final String CLIENT_KEY_STORE_PASSWORD = SSLConfigurations.SysProps.SSL_CLIENT_KEY_STORE_PASSWORD; - public static final String CLIENT_TRUST_STORE_PASSWORD = SSLConfigurations.SysProps.SSL_CLIENT_TRUST_STORE_PASSWORD; + public static final String TRUST_STORE_PASSWORD = + SSLConfigurations.SysProps.SSL_TRUST_STORE_PASSWORD; + public static final String CLIENT_KEY_STORE_PASSWORD = + SSLConfigurations.SysProps.SSL_CLIENT_KEY_STORE_PASSWORD; + public static final String CLIENT_TRUST_STORE_PASSWORD = + SSLConfigurations.SysProps.SSL_CLIENT_TRUST_STORE_PASSWORD; @Rule - public TestRule syspropRestore = new TestRuleRestoreSystemProperties( - SSLConfigurations.SysProps.SSL_KEY_STORE_PASSWORD, - SSLConfigurations.SysProps.SSL_TRUST_STORE_PASSWORD, - SSLConfigurations.SysProps.SSL_CLIENT_KEY_STORE_PASSWORD, - SSLConfigurations.SysProps.SSL_CLIENT_TRUST_STORE_PASSWORD - ); + public TestRule syspropRestore = + new TestRuleRestoreSystemProperties( + SSLConfigurations.SysProps.SSL_KEY_STORE_PASSWORD, + SSLConfigurations.SysProps.SSL_TRUST_STORE_PASSWORD, + SSLConfigurations.SysProps.SSL_CLIENT_KEY_STORE_PASSWORD, + SSLConfigurations.SysProps.SSL_CLIENT_TRUST_STORE_PASSWORD); @Before public void setUp() { @@ -61,7 +63,9 @@ public void setUp() { private SSLConfigurations createSut() { EnvSSLCredentialProvider envSSLCredentialProvider = new EnvSSLCredentialProvider(); envSSLCredentialProvider.setEnvVars(envs); - sut = new SSLConfigurations(Arrays.asList(envSSLCredentialProvider, new SysPropSSLCredentialProvider())); + sut = + new SSLConfigurations( + Arrays.asList(envSSLCredentialProvider, new SysPropSSLCredentialProvider())); return sut; } @@ -96,7 +100,6 @@ public void testSslConfigKeystorePwNotOverwrittenIfExists() { assertThat(System.getProperty(CLIENT_KEY_STORE_PASSWORD), is(SAMPLE_PW3)); // unchanged } - @Test public void testSslConfigTruststorePwFromKeystoreEnvVar() { envs.put(EnvSSLCredentialProvider.EnvVars.SOLR_SSL_TRUST_STORE_PASSWORD, SAMPLE_PW1); diff --git a/solr/core/src/test/org/apache/solr/util/configuration/SSLCredentialProviderFactoryTest.java b/solr/core/src/test/org/apache/solr/util/configuration/SSLCredentialProviderFactoryTest.java index a6a5b06ccc9..e00715570e7 100644 --- a/solr/core/src/test/org/apache/solr/util/configuration/SSLCredentialProviderFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/util/configuration/SSLCredentialProviderFactoryTest.java @@ -17,8 +17,11 @@ package org.apache.solr.util.configuration; -import java.util.List; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsInstanceOf.instanceOf; +import static org.junit.Assert.assertThat; +import java.util.List; import org.apache.lucene.util.TestRuleRestoreSystemProperties; import org.apache.solr.util.configuration.providers.EnvSSLCredentialProvider; import org.apache.solr.util.configuration.providers.SysPropSSLCredentialProvider; @@ -27,18 +30,12 @@ import org.junit.Test; import org.junit.rules.TestRule; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - -/** - */ +/** */ public class SSLCredentialProviderFactoryTest { @Rule - public TestRule syspropRestore = new TestRuleRestoreSystemProperties( - SSLCredentialProviderFactory.PROVIDER_CHAIN_KEY - ); + public TestRule syspropRestore = + new TestRuleRestoreSystemProperties(SSLCredentialProviderFactory.PROVIDER_CHAIN_KEY); public static Matcher isA(Class type) { final Matcher typeMatcher = instanceOf(type); @@ -60,7 +57,8 @@ public void testGetProvidersOrder() { @Test public void testGetProvidersWithCustomProvider() { - SSLCredentialProviderFactory sut = getSut("sysprop;class://" + CustomSSLCredentialProvider.class.getName() + ";env"); + SSLCredentialProviderFactory sut = + getSut("sysprop;class://" + CustomSSLCredentialProvider.class.getName() + ";env"); List providers = sut.getProviders(); assertThat(providers.get(0), isA(SysPropSSLCredentialProvider.class)); assertThat(providers.get(1), isA(CustomSSLCredentialProvider.class)); @@ -87,11 +85,10 @@ private SSLCredentialProviderFactory getSut(String providerChain) { return new SSLCredentialProviderFactory(providerChain); } - static public class CustomSSLCredentialProvider implements SSLCredentialProvider { + public static class CustomSSLCredentialProvider implements SSLCredentialProvider { @Override public String getCredential(CredentialType type) { return null; } } - } diff --git a/solr/core/src/test/org/apache/solr/util/configuration/providers/EnvSSLCredentialProviderTest.java b/solr/core/src/test/org/apache/solr/util/configuration/providers/EnvSSLCredentialProviderTest.java index 2b85e7daf81..947c5e083e6 100644 --- a/solr/core/src/test/org/apache/solr/util/configuration/providers/EnvSSLCredentialProviderTest.java +++ b/solr/core/src/test/org/apache/solr/util/configuration/providers/EnvSSLCredentialProviderTest.java @@ -17,33 +17,32 @@ package org.apache.solr.util.configuration.providers; -import java.util.Map; +import static org.apache.solr.util.configuration.providers.AbstractSSLCredentialProvider.DEFAULT_CREDENTIAL_KEY_MAP; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertThat; import com.google.common.collect.ImmutableMap; +import java.util.Map; import org.apache.solr.util.configuration.SSLCredentialProvider; import org.junit.Test; -import static org.apache.solr.util.configuration.providers.AbstractSSLCredentialProvider.DEFAULT_CREDENTIAL_KEY_MAP; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/** - */ +/** */ public class EnvSSLCredentialProviderTest { @Test public void testGetCredentials() throws Exception { int cnt = 0; - Map envvars = ImmutableMap.of( - EnvSSLCredentialProvider.EnvVars.SOLR_SSL_KEY_STORE_PASSWORD, "pw" + ++cnt, - EnvSSLCredentialProvider.EnvVars.SOLR_SSL_TRUST_STORE_PASSWORD, "pw" + ++cnt, - EnvSSLCredentialProvider.EnvVars.SOLR_SSL_CLIENT_KEY_STORE_PASSWORD, "pw" + ++cnt, - EnvSSLCredentialProvider.EnvVars.SOLR_SSL_CLIENT_TRUST_STORE_PASSWORD, "pw" + ++cnt - ); + Map envvars = + ImmutableMap.of( + EnvSSLCredentialProvider.EnvVars.SOLR_SSL_KEY_STORE_PASSWORD, "pw" + ++cnt, + EnvSSLCredentialProvider.EnvVars.SOLR_SSL_TRUST_STORE_PASSWORD, "pw" + ++cnt, + EnvSSLCredentialProvider.EnvVars.SOLR_SSL_CLIENT_KEY_STORE_PASSWORD, "pw" + ++cnt, + EnvSSLCredentialProvider.EnvVars.SOLR_SSL_CLIENT_TRUST_STORE_PASSWORD, "pw" + ++cnt); EnvSSLCredentialProvider sut = new EnvSSLCredentialProvider(); sut.setEnvVars(envvars); cnt = 0; - for (Map.Entry set : DEFAULT_CREDENTIAL_KEY_MAP.entrySet()) { + for (Map.Entry set : + DEFAULT_CREDENTIAL_KEY_MAP.entrySet()) { String expectedpw = "pw" + ++cnt; assertThat(sut.getCredential(set.getKey()), is(expectedpw)); } diff --git a/solr/core/src/test/org/apache/solr/util/configuration/providers/SysPropSSLCredentialProviderTest.java b/solr/core/src/test/org/apache/solr/util/configuration/providers/SysPropSSLCredentialProviderTest.java index 4a5894d786b..7e60acf1c8a 100644 --- a/solr/core/src/test/org/apache/solr/util/configuration/providers/SysPropSSLCredentialProviderTest.java +++ b/solr/core/src/test/org/apache/solr/util/configuration/providers/SysPropSSLCredentialProviderTest.java @@ -17,8 +17,11 @@ package org.apache.solr.util.configuration.providers; -import java.util.Map; +import static org.apache.solr.util.configuration.providers.AbstractSSLCredentialProvider.DEFAULT_CREDENTIAL_KEY_MAP; +import static org.hamcrest.core.Is.is; +import static org.junit.Assert.assertThat; +import java.util.Map; import org.apache.lucene.util.TestRuleRestoreSystemProperties; import org.apache.solr.util.configuration.SSLConfigurations; import org.apache.solr.util.configuration.SSLCredentialProvider; @@ -26,34 +29,29 @@ import org.junit.Test; import org.junit.rules.TestRule; -import static org.apache.solr.util.configuration.providers.AbstractSSLCredentialProvider.DEFAULT_CREDENTIAL_KEY_MAP; -import static org.hamcrest.core.Is.is; -import static org.junit.Assert.assertThat; - -/** - */ +/** */ public class SysPropSSLCredentialProviderTest { @Rule - public TestRule syspropRestore = new TestRuleRestoreSystemProperties( - SSLConfigurations.SysProps.SSL_KEY_STORE_PASSWORD, - SSLConfigurations.SysProps.SSL_TRUST_STORE_PASSWORD, - SSLConfigurations.SysProps.SSL_CLIENT_KEY_STORE_PASSWORD, - SSLConfigurations.SysProps.SSL_CLIENT_TRUST_STORE_PASSWORD - ); + public TestRule syspropRestore = + new TestRuleRestoreSystemProperties( + SSLConfigurations.SysProps.SSL_KEY_STORE_PASSWORD, + SSLConfigurations.SysProps.SSL_TRUST_STORE_PASSWORD, + SSLConfigurations.SysProps.SSL_CLIENT_KEY_STORE_PASSWORD, + SSLConfigurations.SysProps.SSL_CLIENT_TRUST_STORE_PASSWORD); @Test public void testGetCredentials() throws Exception { int cnt = 0; SysPropSSLCredentialProvider sut = new SysPropSSLCredentialProvider(); - for (Map.Entry set : DEFAULT_CREDENTIAL_KEY_MAP.entrySet()) { + for (Map.Entry set : + DEFAULT_CREDENTIAL_KEY_MAP.entrySet()) { String pw = "pw" + ++cnt; System.setProperty(set.getValue(), pw); assertThat(sut.getCredential(set.getKey()), is(pw)); } } - @Test public void testGetCredentialsWithoutSetup() throws Exception { SysPropSSLCredentialProvider sut = new SysPropSSLCredentialProvider(); diff --git a/solr/core/src/test/org/apache/solr/util/hll/BigEndianAscendingWordDeserializerTest.java b/solr/core/src/test/org/apache/solr/util/hll/BigEndianAscendingWordDeserializerTest.java index d7a4feda1ac..21782a9f132 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/BigEndianAscendingWordDeserializerTest.java +++ b/solr/core/src/test/org/apache/solr/util/hll/BigEndianAscendingWordDeserializerTest.java @@ -16,168 +16,177 @@ */ package org.apache.solr.util.hll; -import java.util.Random; - +import static com.carrotsearch.randomizedtesting.RandomizedTest.*; +import java.util.Random; import org.apache.solr.SolrTestCase; import org.junit.Test; -import static com.carrotsearch.randomizedtesting.RandomizedTest.*; - -/** - * Unit and smoke tests for {@link BigEndianAscendingWordDeserializer}. - */ +/** Unit and smoke tests for {@link BigEndianAscendingWordDeserializer}. */ public class BigEndianAscendingWordDeserializerTest extends SolrTestCase { - /** - * Error checking tests for constructor. - */ - @Test - public void constructorErrorTest() { - // word length too small - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - new BigEndianAscendingWordDeserializer(0/*wordLength, below minimum of 1*/, 0/*bytePadding, arbitrary*/, new byte[1]/*bytes, arbitrary, not used here*/); - }); - assertTrue(e.getMessage().contains("Word length must be")); - - // word length too large - e = expectThrows(IllegalArgumentException.class, () -> { - new BigEndianAscendingWordDeserializer(65/*wordLength, above maximum of 64*/, 0/*bytePadding, arbitrary*/, new byte[1]/*bytes, arbitrary, not used here*/); - }); - assertTrue(e.getMessage().contains("Word length must be")); - - // byte padding negative - e = expectThrows(IllegalArgumentException.class, () -> { - new BigEndianAscendingWordDeserializer(5/*wordLength, arbitrary*/, -1/*bytePadding, too small*/, new byte[1]/*bytes, arbitrary, not used here*/); - }); - assertTrue(e.getMessage().contains("Byte padding must be")); + /** Error checking tests for constructor. */ + @Test + public void constructorErrorTest() { + // word length too small + IllegalArgumentException e = + expectThrows( + IllegalArgumentException.class, + () -> { + new BigEndianAscendingWordDeserializer( + 0 /*wordLength, below minimum of 1*/, + 0 /*bytePadding, arbitrary*/, + new byte[1] /*bytes, arbitrary, not used here*/); + }); + assertTrue(e.getMessage().contains("Word length must be")); + + // word length too large + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new BigEndianAscendingWordDeserializer( + 65 /*wordLength, above maximum of 64*/, + 0 /*bytePadding, arbitrary*/, + new byte[1] /*bytes, arbitrary, not used here*/); + }); + assertTrue(e.getMessage().contains("Word length must be")); + + // byte padding negative + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new BigEndianAscendingWordDeserializer( + 5 /*wordLength, arbitrary*/, + -1 /*bytePadding, too small*/, + new byte[1] /*bytes, arbitrary, not used here*/); + }); + assertTrue(e.getMessage().contains("Byte padding must be")); + } + + /** Smoke test using 64-bit short words and special word values. */ + @Test + public void smokeTest64BitWord() { + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + 64 /*wordLength*/, 5 /*wordCount*/, 0 /*bytePadding, arbitrary*/); + + // Check that the sign bit is being preserved. + serializer.writeWord(-1L); + serializer.writeWord(-112894714L); + + // Check "special" values + serializer.writeWord(0L); + serializer.writeWord(Long.MAX_VALUE); + serializer.writeWord(Long.MIN_VALUE); + + final byte[] bytes = serializer.getBytes(); + + final BigEndianAscendingWordDeserializer deserializer = + new BigEndianAscendingWordDeserializer(64 /*wordLength*/, 0 /*bytePadding*/, bytes); + + assertEquals(deserializer.totalWordCount(), 5 /*wordCount*/); + + assertEquals(deserializer.readWord(), -1L); + assertEquals(deserializer.readWord(), -112894714L); + assertEquals(deserializer.readWord(), 0L); + assertEquals(deserializer.readWord(), Long.MAX_VALUE); + assertEquals(deserializer.readWord(), Long.MIN_VALUE); + } + + /** A smoke/fuzz test for ascending (from zero) word values. */ + @Test + public void ascendingSmokeTest() { + for (int wordLength = 5; wordLength < 65; wordLength++) { + runAscendingTest(wordLength, 3 /*bytePadding, arbitrary*/, 100000 /*wordCount, arbitrary*/); } + } - /** - * Smoke test using 64-bit short words and special word values. - */ - @Test - public void smokeTest64BitWord() { - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(64/*wordLength*/, - 5/*wordCount*/, - 0/*bytePadding, arbitrary*/); - - // Check that the sign bit is being preserved. - serializer.writeWord(-1L); - serializer.writeWord(-112894714L); - - // Check "special" values - serializer.writeWord(0L); - serializer.writeWord(Long.MAX_VALUE); - serializer.writeWord(Long.MIN_VALUE); - - final byte[] bytes = serializer.getBytes(); - - final BigEndianAscendingWordDeserializer deserializer = - new BigEndianAscendingWordDeserializer(64/*wordLength*/, 0/*bytePadding*/, bytes); - - assertEquals(deserializer.totalWordCount(), 5/*wordCount*/); - - assertEquals(deserializer.readWord(), -1L); - assertEquals(deserializer.readWord(), -112894714L); - assertEquals(deserializer.readWord(), 0L); - assertEquals(deserializer.readWord(), Long.MAX_VALUE); - assertEquals(deserializer.readWord(), Long.MIN_VALUE); + /** A smoke/fuzz test for random word values. */ + @Test + public void randomSmokeTest() { + for (int wordLength = 5; wordLength < 65; wordLength++) { + runRandomTest(wordLength, 3 /*bytePadding, arbitrary*/, 100000 /*wordCount, arbitrary*/); + } + } + + // ------------------------------------------------------------------------ + /** + * Runs a test which serializes and deserializes random word values. + * + * @param wordLength the length of words to test + * @param bytePadding the number of bytes padding the byte array + * @param wordCount the number of word values to test + */ + private static void runRandomTest( + final int wordLength, final int bytePadding, final int wordCount) { + final long seed = randomLong(); + final Random random = new Random(seed); + final Random verificationRandom = new Random(seed); + + final long wordMask; + if (wordLength == 64) { + wordMask = ~0L; + } else { + wordMask = (1L << wordLength) - 1L; } - /** - * A smoke/fuzz test for ascending (from zero) word values. - */ - @Test - public void ascendingSmokeTest() { - for(int wordLength=5; wordLength<65; wordLength++) { - runAscendingTest(wordLength, 3/*bytePadding, arbitrary*/, 100000/*wordCount, arbitrary*/); - } + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + wordLength /*wordLength, arbitrary*/, + wordCount, + bytePadding /*bytePadding, arbitrary*/); + + for (int i = 0; i < wordCount; i++) { + final long value = random.nextLong() & wordMask; + serializer.writeWord(value); } - /** - * A smoke/fuzz test for random word values. - */ - @Test - public void randomSmokeTest() { - for(int wordLength=5; wordLength<65; wordLength++) { - runRandomTest(wordLength, 3/*bytePadding, arbitrary*/, 100000/*wordCount, arbitrary*/); - } + final byte[] bytes = serializer.getBytes(); + + final BigEndianAscendingWordDeserializer deserializer = + new BigEndianAscendingWordDeserializer(wordLength, bytePadding, bytes); + + assertEquals(deserializer.totalWordCount(), wordCount); + for (int i = 0; i < wordCount; i++) { + assertEquals(deserializer.readWord(), (verificationRandom.nextLong() & wordMask)); + } + } + + /** + * Runs a test which serializes and deserializes ascending (from zero) word values. + * + * @param wordLength the length of words to test + * @param bytePadding the number of bytes padding the byte array + * @param wordCount the number of word values to test + */ + private static void runAscendingTest( + final int wordLength, final int bytePadding, final int wordCount) { + final long wordMask; + if (wordLength == 64) { + wordMask = ~0L; + } else { + wordMask = (1L << wordLength) - 1L; } - // ------------------------------------------------------------------------ - /** - * Runs a test which serializes and deserializes random word values. - * - * @param wordLength the length of words to test - * @param bytePadding the number of bytes padding the byte array - * @param wordCount the number of word values to test - */ - private static void runRandomTest(final int wordLength, final int bytePadding, final int wordCount) { - final long seed = randomLong(); - final Random random = new Random(seed); - final Random verificationRandom = new Random(seed); - - final long wordMask; - if(wordLength == 64) { - wordMask = ~0L; - } else { - wordMask = (1L << wordLength) - 1L; - } - - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(wordLength/*wordLength, arbitrary*/, - wordCount, - bytePadding/*bytePadding, arbitrary*/); - - for(int i=0; i { - new BigEndianAscendingWordSerializer(0/*wordLength, below minimum of 1*/, 1/*wordCount, arbitrary*/, 0/*bytePadding, arbitrary*/);; - }); - assertTrue(e.getMessage().contains("Word length must be")); - - // word length too large - e = expectThrows(IllegalArgumentException.class, () -> { - new BigEndianAscendingWordSerializer(65/*wordLength, above max of 64*/, 1/*wordCount, arbitrary*/, 0/*bytePadding, arbitrary*/); - }); - assertTrue(e.getMessage().contains("Word length must be")); - - // word count negative - e = expectThrows(IllegalArgumentException.class, () -> { - new BigEndianAscendingWordSerializer(5/*wordLength, arbitrary*/, -1/*wordCount, too small*/, 0/*bytePadding, arbitrary*/); - }); - assertTrue(e.getMessage().contains("Word count must be")); - - // byte padding negative - e = expectThrows(IllegalArgumentException.class, () -> { - new BigEndianAscendingWordSerializer(5/*wordLength, arbitrary*/, 1/*wordCount, arbitrary*/, -1/*bytePadding, too small*/); - }); - assertTrue(e.getMessage().contains("Byte padding must be")); + /** Error checking tests for constructor. */ + @Test + public void constructorErrorTest() { + // word length too small + IllegalArgumentException e = + expectThrows( + IllegalArgumentException.class, + () -> { + new BigEndianAscendingWordSerializer( + 0 /*wordLength, below minimum of 1*/, + 1 /*wordCount, arbitrary*/, + 0 /*bytePadding, arbitrary*/); + ; + }); + assertTrue(e.getMessage().contains("Word length must be")); + + // word length too large + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new BigEndianAscendingWordSerializer( + 65 /*wordLength, above max of 64*/, + 1 /*wordCount, arbitrary*/, + 0 /*bytePadding, arbitrary*/); + }); + assertTrue(e.getMessage().contains("Word length must be")); + + // word count negative + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new BigEndianAscendingWordSerializer( + 5 /*wordLength, arbitrary*/, + -1 /*wordCount, too small*/, + 0 /*bytePadding, arbitrary*/); + }); + assertTrue(e.getMessage().contains("Word count must be")); + + // byte padding negative + e = + expectThrows( + IllegalArgumentException.class, + () -> { + new BigEndianAscendingWordSerializer( + 5 /*wordLength, arbitrary*/, + 1 /*wordCount, arbitrary*/, + -1 /*bytePadding, too small*/); + }); + assertTrue(e.getMessage().contains("Byte padding must be")); + } + + /** + * Tests runtime exception thrown at premature call to {@link + * BigEndianAscendingWordSerializer#getBytes()}. + */ + @Test + public void earlyGetBytesTest() { + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + 5 /*wordLength, arbitrary*/, 1 /*wordCount*/, 0 /*bytePadding, arbitrary*/); + + // getBytes without enough writeWord should throw + RuntimeException e = expectThrows(RuntimeException.class, serializer::getBytes); + assertTrue(e.getMessage().contains("Not all words")); + } + + /** */ + @Test + public void smokeTestExplicitParams() { + final int shortWordLength = 64 /*longs used in LongSetSlab*/; + + { // Should work on an empty sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 0 /*wordCount*/, 0 /*bytePadding, none*/); + + assert (Arrays.equals(serializer.getBytes(), new byte[0])); } - - /** - * Tests runtime exception thrown at premature call to {@link BigEndianAscendingWordSerializer#getBytes()}. - */ - @Test - public void earlyGetBytesTest() { - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(5/*wordLength, arbitrary*/, - 1/*wordCount*/, - 0/*bytePadding, arbitrary*/); - - // getBytes without enough writeWord should throw - RuntimeException e = expectThrows(RuntimeException.class, serializer::getBytes); - assertTrue(e.getMessage().contains("Not all words")); + { // Should work on a byte-divisible sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 2 /*wordCount*/, 0 /*bytePadding, none*/); + + serializer.writeWord(0xBAAAAAAAAAAAAAACL); + serializer.writeWord(0x8FFFFFFFFFFFFFF1L); + + // Bytes: + // ====== + // 0xBA 0xAA 0xAA 0xAA 0xAA 0xAA 0xAA 0xAC + // 0x8F 0xFF 0xFF 0xFF 0xFF 0xFF 0xFF 0xF1 + // + // -70 -86 ... -84 + // -113 -1 ... -15 + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = + new byte[] { + -70, -86, -86, -86, -86, -86, -86, -84, + -113, -1, -1, -1, -1, -1, -1, -15 + }; + assertTrue(Arrays.equals(bytes, expectedBytes)); } - - /** - */ - @Test - public void smokeTestExplicitParams() { - final int shortWordLength = 64/*longs used in LongSetSlab*/; - - {// Should work on an empty sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 0/*wordCount*/, - 0/*bytePadding, none*/); - - assert(Arrays.equals(serializer.getBytes(), new byte[0])); - } - {// Should work on a byte-divisible sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 2/*wordCount*/, - 0/*bytePadding, none*/); - - serializer.writeWord(0xBAAAAAAAAAAAAAACL); - serializer.writeWord(0x8FFFFFFFFFFFFFF1L); - - // Bytes: - // ====== - // 0xBA 0xAA 0xAA 0xAA 0xAA 0xAA 0xAA 0xAC - // 0x8F 0xFF 0xFF 0xFF 0xFF 0xFF 0xFF 0xF1 - // - // -70 -86 ... -84 - // -113 -1 ... -15 - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { -70, -86, -86, -86, -86, -86, -86, -84, - -113, -1, -1, -1, -1, -1, -1, -15 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } - {// Should pad the array correctly. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 1/*wordCount*/, - 1/*bytePadding*/); - - serializer.writeWord(1); - // 1 byte leading padding | value 1 | trailing padding - // 0000 0000 | 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0001 - // 0x00 | 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x01 - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 1 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } + { // Should pad the array correctly. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer(shortWordLength, 1 /*wordCount*/, 1 /*bytePadding*/); + + serializer.writeWord(1); + // 1 byte leading padding | value 1 | trailing padding + // 0000 0000 | 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0000 0001 + // 0x00 | 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x01 + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 1}; + assertTrue(Arrays.equals(bytes, expectedBytes)); } - - /** - * Smoke test for typical parameters used in practice. - */ - @Test - public void smokeTestProbabilisticParams() { - // XXX: revisit this - final int shortWordLength = 5; - {// Should work on an empty sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 0/*wordCount*/, - 0/*bytePadding, none*/); - - assert(Arrays.equals(serializer.getBytes(), new byte[0])); - } - {// Should work on a non-byte-divisible sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 3/*wordCount*/, - 0/*bytePadding, none*/); - - serializer.writeWord(9); - serializer.writeWord(31); - serializer.writeWord(1); - - // The values: - // ----------- - // 9 |31 |1 |padding - - // Corresponding bits: - // ------------------ - // 0100 1|111 11|00 001|0 - - // And the hex/decimal (remember Java bytes are signed): - // ----------------------------------------------------- - // 0100 1111 -> 0x4F -> 79 - // 1100 0010 -> 0xC2 -> -62 - - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { 79, -62 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } - {// Should work on a byte-divisible sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 8/*wordCount*/, - 0/*bytePadding, none*/); - - for(int i=1; i<9; i++) { - serializer.writeWord(i); - } - - // Values: 1-8 - // Corresponding bits: - // ------------------ - // 00001 - // 00010 - // 00011 - // 00100 - // 00101 - // 00110 - // 00111 - // 01000 - - // And the hex: - // ------------ - // 0000 1000 => 0x08 => 8 - // 1000 0110 => 0x86 => -122 - // 0100 0010 => 0x62 => 66 - // 1001 1000 => 0x98 => -104 - // 1110 1000 => 0xE8 => -24 - - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { 8, -122, 66, -104, -24 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } - {// Should pad the array correctly. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 1/*wordCount*/, - 1/*bytePadding*/); - - serializer.writeWord(1); - // 1 byte leading padding | value 1 | trailing padding - // 0000 0000 | 0000 1|000 - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { 0, 8 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } + } + + /** Smoke test for typical parameters used in practice. */ + @Test + public void smokeTestProbabilisticParams() { + // XXX: revisit this + final int shortWordLength = 5; + { // Should work on an empty sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 0 /*wordCount*/, 0 /*bytePadding, none*/); + + assert (Arrays.equals(serializer.getBytes(), new byte[0])); } - - /** - * Smoke test for typical parameters used in practice. - */ - @Test - public void smokeTestSparseParams() { - // XXX: revisit - final int shortWordLength = 17; - {// Should work on an empty sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 0/*wordCount*/, - 0/*bytePadding, none*/); - - assert(Arrays.equals(serializer.getBytes(), new byte[0])); - } - {// Should work on a non-byte-divisible sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 3/*wordCount*/, - 0/*bytePadding, none*/); - - serializer.writeWord(9); - serializer.writeWord(42); - serializer.writeWord(75); - - // The values: - // ----------- - // 9 |42 |75 |padding - - // Corresponding bits: - // ------------------ - // 0000 0000 0000 0100 1|000 0000 0000 1010 10|00 0000 0000 1001 011|0 0000 - - // And the hex/decimal (remember Java bytes are signed): - // ----------------------------------------------------- - // 0000 0000 -> 0x00 -> 0 - // 0000 0100 -> 0x04 -> 4 - // 1000 0000 -> 0x80 -> -128 - // 0000 1010 -> 0x0A -> 10 - // 1000 0000 -> 0x80 -> -128 - // 0000 1001 -> 0x09 -> 9 - // 0110 0000 -> 0x60 -> 96 - - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { 0, 4, -128, 10, -128, 9, 96 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } - {// Should work on a byte-divisible sequence, with no padding. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 8/*wordCount*/, - 0/*bytePadding, none*/); - - for(int i=1; i<9; i++) { - serializer.writeWord(i); - } - - // Values: 1-8 - // Corresponding bits: - // ------------------ - // 0000 0000 0000 0000 1 - // 000 0000 0000 0000 10 - // 00 0000 0000 0000 011 - // 0 0000 0000 0000 0100 - - // 0000 0000 0000 0010 1 - // 000 0000 0000 0001 10 - // 00 0000 0000 0000 111 - // 0 0000 0000 0000 1000 - - // And the hex: - // ------------ - // 0000 0000 -> 0x00 -> 0 - // 0000 0000 -> 0x00 -> 0 - // 1000 0000 -> 0x80 -> -128 - // 0000 0000 -> 0x00 -> 0 - // 1000 0000 -> 0x80 -> -128 - // 0000 0000 -> 0x00 -> 0 - // 0110 0000 -> 0x60 -> 96 - // 0000 0000 -> 0x00 -> 0 - // 0100 0000 -> 0x40 -> 64 - // 0000 0000 -> 0x00 -> 0 - // 0010 1000 -> 0x28 -> 40 - // 0000 0000 -> 0x00 -> 0 - // 0001 1000 -> 0x18 -> 24 - // 0000 0000 -> 0x00 -> 0 - // 0000 1110 -> 0x0D -> 14 - // 0000 0000 -> 0x00 -> 0 - // 0000 1000 -> 0x08 -> 8 - - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { 0, 0, -128, 0, -128, 0, 96, 0, 64, 0, 40, 0, 24, 0, 14, 0, 8 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } - {// Should pad the array correctly. - final BigEndianAscendingWordSerializer serializer = - new BigEndianAscendingWordSerializer(shortWordLength, - 1/*wordCount*/, - 1/*bytePadding*/); - - serializer.writeWord(1); - // 1 byte leading padding | value 1 | trailing padding - // 0000 0000 | 0000 0000 0000 0000 1|000 0000 - // 0x00 0x00 0x00 0x80 - final byte[] bytes = serializer.getBytes(); - final byte[] expectedBytes = new byte[] { 0, 0, 0, -128 }; - assertTrue(Arrays.equals(bytes, expectedBytes)); - } + { // Should work on a non-byte-divisible sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 3 /*wordCount*/, 0 /*bytePadding, none*/); + + serializer.writeWord(9); + serializer.writeWord(31); + serializer.writeWord(1); + + // The values: + // ----------- + // 9 |31 |1 |padding + + // Corresponding bits: + // ------------------ + // 0100 1|111 11|00 001|0 + + // And the hex/decimal (remember Java bytes are signed): + // ----------------------------------------------------- + // 0100 1111 -> 0x4F -> 79 + // 1100 0010 -> 0xC2 -> -62 + + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = new byte[] {79, -62}; + assertTrue(Arrays.equals(bytes, expectedBytes)); + } + { // Should work on a byte-divisible sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 8 /*wordCount*/, 0 /*bytePadding, none*/); + + for (int i = 1; i < 9; i++) { + serializer.writeWord(i); + } + + // Values: 1-8 + // Corresponding bits: + // ------------------ + // 00001 + // 00010 + // 00011 + // 00100 + // 00101 + // 00110 + // 00111 + // 01000 + + // And the hex: + // ------------ + // 0000 1000 => 0x08 => 8 + // 1000 0110 => 0x86 => -122 + // 0100 0010 => 0x62 => 66 + // 1001 1000 => 0x98 => -104 + // 1110 1000 => 0xE8 => -24 + + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = new byte[] {8, -122, 66, -104, -24}; + assertTrue(Arrays.equals(bytes, expectedBytes)); + } + { // Should pad the array correctly. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer(shortWordLength, 1 /*wordCount*/, 1 /*bytePadding*/); + + serializer.writeWord(1); + // 1 byte leading padding | value 1 | trailing padding + // 0000 0000 | 0000 1|000 + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = new byte[] {0, 8}; + assertTrue(Arrays.equals(bytes, expectedBytes)); + } + } + + /** Smoke test for typical parameters used in practice. */ + @Test + public void smokeTestSparseParams() { + // XXX: revisit + final int shortWordLength = 17; + { // Should work on an empty sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 0 /*wordCount*/, 0 /*bytePadding, none*/); + + assert (Arrays.equals(serializer.getBytes(), new byte[0])); + } + { // Should work on a non-byte-divisible sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 3 /*wordCount*/, 0 /*bytePadding, none*/); + + serializer.writeWord(9); + serializer.writeWord(42); + serializer.writeWord(75); + + // The values: + // ----------- + // 9 |42 |75 |padding + + // Corresponding bits: + // ------------------ + // 0000 0000 0000 0100 1|000 0000 0000 1010 10|00 0000 0000 1001 011|0 0000 + + // And the hex/decimal (remember Java bytes are signed): + // ----------------------------------------------------- + // 0000 0000 -> 0x00 -> 0 + // 0000 0100 -> 0x04 -> 4 + // 1000 0000 -> 0x80 -> -128 + // 0000 1010 -> 0x0A -> 10 + // 1000 0000 -> 0x80 -> -128 + // 0000 1001 -> 0x09 -> 9 + // 0110 0000 -> 0x60 -> 96 + + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = new byte[] {0, 4, -128, 10, -128, 9, 96}; + assertTrue(Arrays.equals(bytes, expectedBytes)); + } + { // Should work on a byte-divisible sequence, with no padding. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer( + shortWordLength, 8 /*wordCount*/, 0 /*bytePadding, none*/); + + for (int i = 1; i < 9; i++) { + serializer.writeWord(i); + } + + // Values: 1-8 + // Corresponding bits: + // ------------------ + // 0000 0000 0000 0000 1 + // 000 0000 0000 0000 10 + // 00 0000 0000 0000 011 + // 0 0000 0000 0000 0100 + + // 0000 0000 0000 0010 1 + // 000 0000 0000 0001 10 + // 00 0000 0000 0000 111 + // 0 0000 0000 0000 1000 + + // And the hex: + // ------------ + // 0000 0000 -> 0x00 -> 0 + // 0000 0000 -> 0x00 -> 0 + // 1000 0000 -> 0x80 -> -128 + // 0000 0000 -> 0x00 -> 0 + // 1000 0000 -> 0x80 -> -128 + // 0000 0000 -> 0x00 -> 0 + // 0110 0000 -> 0x60 -> 96 + // 0000 0000 -> 0x00 -> 0 + // 0100 0000 -> 0x40 -> 64 + // 0000 0000 -> 0x00 -> 0 + // 0010 1000 -> 0x28 -> 40 + // 0000 0000 -> 0x00 -> 0 + // 0001 1000 -> 0x18 -> 24 + // 0000 0000 -> 0x00 -> 0 + // 0000 1110 -> 0x0D -> 14 + // 0000 0000 -> 0x00 -> 0 + // 0000 1000 -> 0x08 -> 8 + + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = + new byte[] {0, 0, -128, 0, -128, 0, 96, 0, 64, 0, 40, 0, 24, 0, 14, 0, 8}; + assertTrue(Arrays.equals(bytes, expectedBytes)); + } + { // Should pad the array correctly. + final BigEndianAscendingWordSerializer serializer = + new BigEndianAscendingWordSerializer(shortWordLength, 1 /*wordCount*/, 1 /*bytePadding*/); + + serializer.writeWord(1); + // 1 byte leading padding | value 1 | trailing padding + // 0000 0000 | 0000 0000 0000 0000 1|000 0000 + // 0x00 0x00 0x00 0x80 + final byte[] bytes = serializer.getBytes(); + final byte[] expectedBytes = new byte[] {0, 0, 0, -128}; + assertTrue(Arrays.equals(bytes, expectedBytes)); } + } } diff --git a/solr/core/src/test/org/apache/solr/util/hll/BitVectorTest.java b/solr/core/src/test/org/apache/solr/util/hll/BitVectorTest.java index 24d9366c3d1..832f1bc29cd 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/BitVectorTest.java +++ b/solr/core/src/test/org/apache/solr/util/hll/BitVectorTest.java @@ -17,153 +17,146 @@ package org.apache.solr.util.hll; import java.util.Locale; - import org.apache.solr.SolrTestCase; import org.apache.solr.util.LongIterator; import org.junit.Test; -/** - * Unit tests for {@link BitVector}. - */ +/** Unit tests for {@link BitVector}. */ public class BitVectorTest extends SolrTestCase { - /** - * Tests {@link BitVector#getRegister(long)} and {@link BitVector#setRegister(long, long)}. - */ - @Test - public void getSetRegisterTest() { - { // locally scoped for sanity - // NOTE: registers are only 5bits wide - final BitVector vector1 = new BitVector(5/*width*/, 128/*count, 2^7*/); - final BitVector vector2 = new BitVector(5/*width*/, 128/*count, 2^7*/); - final BitVector vector3 = new BitVector(5/*width*/, 128/*count, 2^7*/); - final BitVector vector4 = new BitVector(5/*width*/, 128/*count, 2^7*/); - - for(int i=0; i<128/*2^7*/; i++) { - vector1.setRegister(i, 0x1F); - vector2.setRegister(i, (i & 0x1F)); - vector3.setRegister(i, ((127 - i) & 0x1F)); - vector4.setRegister(i, 0x15); - } - - for(int i=0; i<128/*2^7*/; i++) { - assertEquals(vector1.getRegister(i), 0x1F); - assertEquals(vector2.getRegister(i), (i & 0x1F)); - assertEquals(vector3.getRegister(i), ((127 - i) & 0x1F)); - assertEquals(vector4.getRegister(i), 0x15); - } - } + /** Tests {@link BitVector#getRegister(long)} and {@link BitVector#setRegister(long, long)}. */ + @Test + public void getSetRegisterTest() { + { // locally scoped for sanity + // NOTE: registers are only 5bits wide + final BitVector vector1 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + final BitVector vector2 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + final BitVector vector3 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + final BitVector vector4 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + + for (int i = 0; i < 128 /*2^7*/; i++) { + vector1.setRegister(i, 0x1F); + vector2.setRegister(i, (i & 0x1F)); + vector3.setRegister(i, ((127 - i) & 0x1F)); + vector4.setRegister(i, 0x15); + } + + for (int i = 0; i < 128 /*2^7*/; i++) { + assertEquals(vector1.getRegister(i), 0x1F); + assertEquals(vector2.getRegister(i), (i & 0x1F)); + assertEquals(vector3.getRegister(i), ((127 - i) & 0x1F)); + assertEquals(vector4.getRegister(i), 0x15); + } } - - // ======================================================================== - /** - * Tests {@link BitVector#registerIterator()} - */ - @Test - public void registerIteratorTest() { - { // scoped locally for sanity - // NOTE: registers are only 5bits wide - final BitVector vector1 = new BitVector(5/*width*/, 128/*count, 2^7*/); - final BitVector vector2 = new BitVector(5/*width*/, 128/*count, 2^7*/); - final BitVector vector3 = new BitVector(5/*width*/, 128/*count, 2^7*/); - final BitVector vector4 = new BitVector(5/*width*/, 128/*count, 2^7*/); - - for(int i=0; i<128/*2^7*/; i++) { - vector1.setRegister(i, 0x1F); - vector2.setRegister(i, (i & 0x1F)); - vector3.setRegister(i, ((127 - i) & 0x1F)); - vector4.setRegister(i, 0x15); - } - - final LongIterator registerIterator1 = vector1.registerIterator(); - final LongIterator registerIterator2 = vector2.registerIterator(); - final LongIterator registerIterator3 = vector3.registerIterator(); - final LongIterator registerIterator4 = vector4.registerIterator(); - for(int i=0; i<128/*2^7*/; i++) { - assertEquals(registerIterator1.hasNext(), true); - assertEquals(registerIterator2.hasNext(), true); - assertEquals(registerIterator3.hasNext(), true); - assertEquals(registerIterator4.hasNext(), true); - - assertEquals(registerIterator1.next(), 0x1F); - assertEquals(registerIterator2.next(), (i & 0x1F)); - assertEquals(registerIterator3.next(), ((127 - i) & 0x1F)); - assertEquals(registerIterator4.next(), 0x15); - } - assertEquals(registerIterator1.hasNext(), false/*no more*/); - assertEquals(registerIterator2.hasNext(), false/*no more*/); - assertEquals(registerIterator3.hasNext(), false/*no more*/); - assertEquals(registerIterator4.hasNext(), false/*no more*/); - } - - { // scoped locally for sanity - // Vectors that are shorter than one word - assertIterator(1, 12/* 1*12=12 bits, fewer than a single word */); - assertIterator(2, 12/* 2*12=24 bits, fewer than a single word */); - assertIterator(3, 12/* 3*12=36 bits, fewer than a single word */); - assertIterator(4, 12/* 4*12=48 bits, fewer than a single word */); - - // Vectors that don't fit exactly into longs - assertIterator(5, 16/* 5*16=80 bits */); - assertIterator(5, 32/* 5*32=160 bits */); - } - - // Iterate over vectors that are padded + } + + // ======================================================================== + /** Tests {@link BitVector#registerIterator()} */ + @Test + public void registerIteratorTest() { + { // scoped locally for sanity + // NOTE: registers are only 5bits wide + final BitVector vector1 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + final BitVector vector2 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + final BitVector vector3 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + final BitVector vector4 = new BitVector(5 /*width*/, 128 /*count, 2^7*/); + + for (int i = 0; i < 128 /*2^7*/; i++) { + vector1.setRegister(i, 0x1F); + vector2.setRegister(i, (i & 0x1F)); + vector3.setRegister(i, ((127 - i) & 0x1F)); + vector4.setRegister(i, 0x15); + } + + final LongIterator registerIterator1 = vector1.registerIterator(); + final LongIterator registerIterator2 = vector2.registerIterator(); + final LongIterator registerIterator3 = vector3.registerIterator(); + final LongIterator registerIterator4 = vector4.registerIterator(); + for (int i = 0; i < 128 /*2^7*/; i++) { + assertEquals(registerIterator1.hasNext(), true); + assertEquals(registerIterator2.hasNext(), true); + assertEquals(registerIterator3.hasNext(), true); + assertEquals(registerIterator4.hasNext(), true); + + assertEquals(registerIterator1.next(), 0x1F); + assertEquals(registerIterator2.next(), (i & 0x1F)); + assertEquals(registerIterator3.next(), ((127 - i) & 0x1F)); + assertEquals(registerIterator4.next(), 0x15); + } + assertEquals(registerIterator1.hasNext(), false /*no more*/); + assertEquals(registerIterator2.hasNext(), false /*no more*/); + assertEquals(registerIterator3.hasNext(), false /*no more*/); + assertEquals(registerIterator4.hasNext(), false /*no more*/); } - private static void assertIterator(final int width, final int count) { - final BitVector vector = new BitVector(width, count); - final LongIterator iter = vector.registerIterator(); + { // scoped locally for sanity + // Vectors that are shorter than one word + assertIterator(1, 12 /* 1*12=12 bits, fewer than a single word */); + assertIterator(2, 12 /* 2*12=24 bits, fewer than a single word */); + assertIterator(3, 12 /* 3*12=36 bits, fewer than a single word */); + assertIterator(4, 12 /* 4*12=48 bits, fewer than a single word */); - for(int i=0; i canonical = new HashSet(); - final HLL hll = newHLL(explicitThreshold); - - for(int i=0;i explicitThreshold = 8*/, false/*sparseon*/, HLLType.EXPLICIT); - - for(int i=0;i<9/* > explicitThreshold */;i++){ - hll.addRaw(i); - } - assertEquals(hll.getType(), HLLType.FULL); - } + } + + // ------------------------------------------------------------------------ + /** Tests correctness against {@link java.util.HashSet}. */ + @Test + public void randomValuesTest() { + final int explicitThreshold = 4096; + final HashSet canonical = new HashSet(); + final HLL hll = newHLL(explicitThreshold); + + for (int i = 0; i < explicitThreshold; i++) { + long randomLong = randomLong(); + canonical.add(randomLong); + hll.addRaw(randomLong); } - - // ************************************************************************ - // assertion helpers - /** - * Asserts that values in both sets are exactly equal. - */ - private static void assertElementsEqual(final HLL hllA, final HLL hllB) { - final LongHashSet internalSetA = hllA.explicitStorage; - final LongHashSet internalSetB = hllB.explicitStorage; - - assertTrue(internalSetA.equals(internalSetB)); + final int canonicalCardinality = canonical.size(); + assertEquals(hll.cardinality(), canonicalCardinality); + } + + // ------------------------------------------------------------------------ + /** Tests promotion to {@link HLLType#SPARSE} and {@link HLLType#FULL}. */ + @Test + public void promotionTest() { + { // locally scoped for sanity + final int explicitThreshold = 128; + final HLL hll = + new HLL( + 11 /*log2m, unused*/, + 5 /*regwidth, unused*/, + explicitThreshold, + 256 /*sparseThreshold*/, + HLLType.EXPLICIT); + + for (int i = 0; i < explicitThreshold + 1; i++) { + hll.addRaw(i); + } + assertEquals(hll.getType(), HLLType.SPARSE); } - - /** - * Builds a {@link HLLType#EXPLICIT} {@link HLL} instance with the specified - * explicit threshold. - * - * @param explicitThreshold explicit threshold to use for the constructed - * {@link HLL}. This must be greater than zero. - * @return a default-sized {@link HLLType#EXPLICIT} empty {@link HLL} instance. - * This will never be null. - */ - private static HLL newHLL(final int explicitThreshold) { - return new HLL(11/*log2m, unused*/, 5/*regwidth, unused*/, explicitThreshold, 256/*sparseThreshold, arbitrary, unused*/, HLLType.EXPLICIT); + { // locally scoped for sanity + final HLL hll = + new HLL( + 11 /*log2m, unused*/, + 5 /*regwidth, unused*/, + 4 /*expthresh => explicitThreshold = 8*/, + false /*sparseon*/, + HLLType.EXPLICIT); + + for (int i = 0; i < 9 /* > explicitThreshold */; i++) { + hll.addRaw(i); + } + assertEquals(hll.getType(), HLLType.FULL); } + } + + // ************************************************************************ + // assertion helpers + /** Asserts that values in both sets are exactly equal. */ + private static void assertElementsEqual(final HLL hllA, final HLL hllB) { + final LongHashSet internalSetA = hllA.explicitStorage; + final LongHashSet internalSetB = hllB.explicitStorage; + + assertTrue(internalSetA.equals(internalSetB)); + } + + /** + * Builds a {@link HLLType#EXPLICIT} {@link HLL} instance with the specified explicit threshold. + * + * @param explicitThreshold explicit threshold to use for the constructed {@link HLL}. This must + * be greater than zero. + * @return a default-sized {@link HLLType#EXPLICIT} empty {@link HLL} instance. This will never be + * null. + */ + private static HLL newHLL(final int explicitThreshold) { + return new HLL( + 11 /*log2m, unused*/, + 5 /*regwidth, unused*/, + explicitThreshold, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.EXPLICIT); + } } diff --git a/solr/core/src/test/org/apache/solr/util/hll/FullHLLTest.java b/solr/core/src/test/org/apache/solr/util/hll/FullHLLTest.java index 0d2dd8e1988..521f32f47a9 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/FullHLLTest.java +++ b/solr/core/src/test/org/apache/solr/util/hll/FullHLLTest.java @@ -20,322 +20,362 @@ import org.apache.solr.util.LongIterator; import org.junit.Test; -/** - * Tests {@link HLL} of type {@link HLLType#FULL}. - */ +/** Tests {@link HLL} of type {@link HLLType#FULL}. */ public class FullHLLTest extends SolrTestCase { - // TODO union test - /** - * Smoke test for {@link HLL#cardinality()} and the proper use of the - * small range correction. - */ - @Test - public void smallRangeSmokeTest() { - final int log2m = 11; - final int m = (1 << log2m); - final int regwidth = 5; - - // only one register set - { - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary, unused*/, HLLType.FULL); - hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 0/*ix*/, 1/*val*/)); - - final long cardinality = hll.cardinality(); - - // Trivially true that small correction conditions hold: one register - // set implies zeroes exist, and estimator trivially smaller than 5m/2. - // Small range correction: m * log(m/V) - final long expected = (long)Math.ceil(m * Math.log((double)m / (m - 1)/*# of zeroes*/)); - assertEquals(cardinality, expected); - } - - // all but one register set - { - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary, unused*/, HLLType.FULL); - for(int i=0; i<(m - 1); i++) { - hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, i/*ix*/, 1/*val*/)); - } - - // Trivially true that small correction conditions hold: all but - // one register set implies a zero exists, and estimator trivially - // smaller than 5m/2 since it's alpha / ((m-1)/2) - final long cardinality = hll.cardinality(); - - // Small range correction: m * log(m/V) - final long expected = (long)Math.ceil(m * Math.log((double)m / 1/*# of zeroes*/)); - assertEquals(cardinality, expected); - } + // TODO union test + /** Smoke test for {@link HLL#cardinality()} and the proper use of the small range correction. */ + @Test + public void smallRangeSmokeTest() { + final int log2m = 11; + final int m = (1 << log2m); + final int regwidth = 5; + + // only one register set + { + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.FULL); + hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 0 /*ix*/, 1 /*val*/)); + + final long cardinality = hll.cardinality(); + + // Trivially true that small correction conditions hold: one register + // set implies zeroes exist, and estimator trivially smaller than 5m/2. + // Small range correction: m * log(m/V) + final long expected = (long) Math.ceil(m * Math.log((double) m / (m - 1) /*# of zeroes*/)); + assertEquals(cardinality, expected); } - /** - * Smoke test for {@link HLL#cardinality()} and the proper use of the - * uncorrected estimator - */ - @Test - public void normalRangeSmokeTest() { - final int log2m = 11; - final int regwidth = 5; - // regwidth = 5, so hash space is - // log2m + (2^5 - 1 - 1), so L = log2m + 30 - final int l = log2m + 30; - final int m = (1 << log2m); - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary, unused*/, HLLType.FULL); - - // all registers at 'medium' value - { - final int registerValue = 7/*chosen to ensure neither correction kicks in*/; - for(int i=0; i (5 * m /(double)2)); - - final long expected = (long)Math.ceil(estimator); - assertEquals(cardinality, expected); - } + // all but one register set + { + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.FULL); + for (int i = 0; i < (m - 1); i++) { + hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, i /*ix*/, 1 /*val*/)); + } + + // Trivially true that small correction conditions hold: all but + // one register set implies a zero exists, and estimator trivially + // smaller than 5m/2 since it's alpha / ((m-1)/2) + final long cardinality = hll.cardinality(); + + // Small range correction: m * log(m/V) + final long expected = (long) Math.ceil(m * Math.log((double) m / 1 /*# of zeroes*/)); + assertEquals(cardinality, expected); } - - /** - * Smoke test for {@link HLL#cardinality()} and the proper use of the large - * range correction. - */ - @Test - public void largeRangeSmokeTest() { - final int log2m = 12; - final int regwidth = 5; - // regwidth = 5, so hash space is - // log2m + (2^5 - 1 - 1), so L = log2m + 30 - final int l = log2m + 30; - final int m = (1 << log2m); - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary, unused*/, HLLType.FULL); - - { - final int registerValue = 31/*chosen to ensure large correction kicks in*/; - for(int i=0; i Math.pow(2,l)/30); - - // Large range correction: -2^L * log(1 - E/2^L) - final long expected = (long)Math.ceil(-1.0 * Math.pow(2, l) * Math.log(1.0 - estimator/Math.pow(2, l))); - assertEquals(cardinality, expected); - } + } + + /** Smoke test for {@link HLL#cardinality()} and the proper use of the uncorrected estimator */ + @Test + public void normalRangeSmokeTest() { + final int log2m = 11; + final int regwidth = 5; + // regwidth = 5, so hash space is + // log2m + (2^5 - 1 - 1), so L = log2m + 30 + final int l = log2m + 30; + final int m = (1 << log2m); + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.FULL); + + // all registers at 'medium' value + { + final int registerValue = 7 /*chosen to ensure neither correction kicks in*/; + for (int i = 0; i < m; i++) { + hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, i, registerValue)); + } + + final long cardinality = hll.cardinality(); + + // Simplified estimator when all registers take same value: alpha / (m/2^val) + final double estimator = HLLUtil.alphaMSquared(m) / ((double) m / Math.pow(2, registerValue)); + + // Assert conditions for uncorrected range + assertTrue(estimator <= Math.pow(2, l) / 30); + assertTrue(estimator > (5 * m / (double) 2)); + + final long expected = (long) Math.ceil(estimator); + assertEquals(cardinality, expected); + } + } + + /** Smoke test for {@link HLL#cardinality()} and the proper use of the large range correction. */ + @Test + public void largeRangeSmokeTest() { + final int log2m = 12; + final int regwidth = 5; + // regwidth = 5, so hash space is + // log2m + (2^5 - 1 - 1), so L = log2m + 30 + final int l = log2m + 30; + final int m = (1 << log2m); + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.FULL); + + { + final int registerValue = 31 /*chosen to ensure large correction kicks in*/; + for (int i = 0; i < m; i++) { + hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, i, registerValue)); + } + + final long cardinality = hll.cardinality(); + + // Simplified estimator when all registers take same value: alpha / (m/2^val) + final double estimator = HLLUtil.alphaMSquared(m) / ((double) m / Math.pow(2, registerValue)); + + // Assert conditions for large range + + assertTrue(estimator > Math.pow(2, l) / 30); + + // Large range correction: -2^L * log(1 - E/2^L) + final long expected = + (long) Math.ceil(-1.0 * Math.pow(2, l) * Math.log(1.0 - estimator / Math.pow(2, l))); + assertEquals(cardinality, expected); + } + } + + // ======================================================================== + /** Tests the bounds on a register's value for a given raw input value. */ + @Test + public void registerValueTest() { + final int log2m = 4 /*small enough to make testing easy (addRaw() shifts by one byte)*/; + + // register width 4 (the minimum size) + { // scoped locally for sanity + final int regwidth = 4; + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.FULL); + final BitVector bitVector = hll.probabilisticStorage; + + // lower-bounds of the register + hll.addRaw(0x000000000000001L /*'j'=1*/); + assertEquals(bitVector.getRegister(1 /*'j'*/), 0); + + hll.addRaw(0x0000000000000012L /*'j'=2*/); + assertEquals(bitVector.getRegister(2 /*'j'*/), 1); + + hll.addRaw(0x0000000000000023L /*'j'=3*/); + assertEquals(bitVector.getRegister(3 /*'j'*/), 2); + + hll.addRaw(0x0000000000000044L /*'j'=4*/); + assertEquals(bitVector.getRegister(4 /*'j'*/), 3); + + hll.addRaw(0x0000000000000085L /*'j'=5*/); + assertEquals(bitVector.getRegister(5 /*'j'*/), 4); + + // upper-bounds of the register + // NOTE: bear in mind that BitVector itself does ensure that + // overflow of a register is prevented + hll.addRaw(0x0000000000010006L /*'j'=6*/); + assertEquals(bitVector.getRegister(6 /*'j'*/), 13); + + hll.addRaw(0x0000000000020007L /*'j'=7*/); + assertEquals(bitVector.getRegister(7 /*'j'*/), 14); + + hll.addRaw(0x0000000000040008L /*'j'=8*/); + assertEquals(bitVector.getRegister(8 /*'j'*/), 15); + + hll.addRaw(0x0000000000080009L /*'j'=9*/); + assertEquals(bitVector.getRegister(9 /*'j'*/), 15 /*overflow*/); + + // sanity checks to ensure that no other bits above the lowest-set + // bit matters + // NOTE: same as case 'j = 6' above + hll.addRaw(0x000000000003000AL /*'j'=10*/); + assertEquals(bitVector.getRegister(10 /*'j'*/), 13); + + hll.addRaw(0x000000000011000BL /*'j'=11*/); + assertEquals(bitVector.getRegister(11 /*'j'*/), 13); } - // ======================================================================== - /** - * Tests the bounds on a register's value for a given raw input value. - */ - @Test - public void registerValueTest() { - final int log2m = 4/*small enough to make testing easy (addRaw() shifts by one byte)*/; - - // register width 4 (the minimum size) - { // scoped locally for sanity - final int regwidth = 4; - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary, unused*/, HLLType.FULL); - final BitVector bitVector = hll.probabilisticStorage; - - // lower-bounds of the register - hll.addRaw(0x000000000000001L/*'j'=1*/); - assertEquals(bitVector.getRegister(1/*'j'*/), 0); - - hll.addRaw(0x0000000000000012L/*'j'=2*/); - assertEquals(bitVector.getRegister(2/*'j'*/), 1); - - hll.addRaw(0x0000000000000023L/*'j'=3*/); - assertEquals(bitVector.getRegister(3/*'j'*/), 2); - - hll.addRaw(0x0000000000000044L/*'j'=4*/); - assertEquals(bitVector.getRegister(4/*'j'*/), 3); - - hll.addRaw(0x0000000000000085L/*'j'=5*/); - assertEquals(bitVector.getRegister(5/*'j'*/), 4); - - // upper-bounds of the register - // NOTE: bear in mind that BitVector itself does ensure that - // overflow of a register is prevented - hll.addRaw(0x0000000000010006L/*'j'=6*/); - assertEquals(bitVector.getRegister(6/*'j'*/), 13); - - hll.addRaw(0x0000000000020007L/*'j'=7*/); - assertEquals(bitVector.getRegister(7/*'j'*/), 14); - - hll.addRaw(0x0000000000040008L/*'j'=8*/); - assertEquals(bitVector.getRegister(8/*'j'*/), 15); - - hll.addRaw(0x0000000000080009L/*'j'=9*/); - assertEquals(bitVector.getRegister(9/*'j'*/), 15/*overflow*/); - - // sanity checks to ensure that no other bits above the lowest-set - // bit matters - // NOTE: same as case 'j = 6' above - hll.addRaw(0x000000000003000AL/*'j'=10*/); - assertEquals(bitVector.getRegister(10/*'j'*/), 13); - - hll.addRaw(0x000000000011000BL/*'j'=11*/); - assertEquals(bitVector.getRegister(11/*'j'*/), 13); - } - - // register width 5 - { // scoped locally for sanity - final int regwidth = 5; - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary, unused*/, HLLType.FULL); - final BitVector bitVector = hll.probabilisticStorage; + // register width 5 + { // scoped locally for sanity + final int regwidth = 5; + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.FULL); + final BitVector bitVector = hll.probabilisticStorage; - // lower-bounds of the register - hll.addRaw(0x0000000000000001L/*'j'=1*/); - assertEquals(bitVector.getRegister(1/*'j'*/), 0); + // lower-bounds of the register + hll.addRaw(0x0000000000000001L /*'j'=1*/); + assertEquals(bitVector.getRegister(1 /*'j'*/), 0); - hll.addRaw(0x0000000000000012L/*'j'=2*/); - assertEquals(bitVector.getRegister(2/*'j'*/), 1); + hll.addRaw(0x0000000000000012L /*'j'=2*/); + assertEquals(bitVector.getRegister(2 /*'j'*/), 1); - hll.addRaw(0x0000000000000023L/*'j'=3*/); - assertEquals(bitVector.getRegister(3/*'j'*/), 2); + hll.addRaw(0x0000000000000023L /*'j'=3*/); + assertEquals(bitVector.getRegister(3 /*'j'*/), 2); - hll.addRaw(0x0000000000000044L/*'j'=4*/); - assertEquals(bitVector.getRegister(4/*'j'*/), 3); + hll.addRaw(0x0000000000000044L /*'j'=4*/); + assertEquals(bitVector.getRegister(4 /*'j'*/), 3); - hll.addRaw(0x0000000000000085L/*'j'=5*/); - assertEquals(bitVector.getRegister(5/*'j'*/), 4); + hll.addRaw(0x0000000000000085L /*'j'=5*/); + assertEquals(bitVector.getRegister(5 /*'j'*/), 4); - // upper-bounds of the register - // NOTE: bear in mind that BitVector itself does ensure that - // overflow of a register is prevented - hll.addRaw(0x0000000100000006L/*'j'=6*/); - assertEquals(bitVector.getRegister(6/*'j'*/), 29); + // upper-bounds of the register + // NOTE: bear in mind that BitVector itself does ensure that + // overflow of a register is prevented + hll.addRaw(0x0000000100000006L /*'j'=6*/); + assertEquals(bitVector.getRegister(6 /*'j'*/), 29); - hll.addRaw(0x0000000200000007L/*'j'=7*/); - assertEquals(bitVector.getRegister(7/*'j'*/), 30); + hll.addRaw(0x0000000200000007L /*'j'=7*/); + assertEquals(bitVector.getRegister(7 /*'j'*/), 30); - hll.addRaw(0x0000000400000008L/*'j'=8*/); - assertEquals(bitVector.getRegister(8/*'j'*/), 31); + hll.addRaw(0x0000000400000008L /*'j'=8*/); + assertEquals(bitVector.getRegister(8 /*'j'*/), 31); - hll.addRaw(0x0000000800000009L/*'j'=9*/); - assertEquals(bitVector.getRegister(9/*'j'*/), 31/*overflow*/); - } + hll.addRaw(0x0000000800000009L /*'j'=9*/); + assertEquals(bitVector.getRegister(9 /*'j'*/), 31 /*overflow*/); } - - // ======================================================================== - /** - * Tests {@link HLL#clear()}. - */ - @Test - public void clearTest() { - final int regwidth = 5; - final int log2m = 4/*16 registers per counter*/; - final int m = 1 << log2m; - - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary, unused*/, HLLType.FULL); - final BitVector bitVector = hll.probabilisticStorage; - for(int i=0; i randoms = new ArrayList(randomCount); - for (int i=0; i randoms = new ArrayList(randomCount); - for (int i=0; i items) throws CloneNotSupportedException { - for(int regw=MINIMUM_REGWIDTH_PARAM; regw<=MAXIMUM_REGWIDTH_PARAM; regw++) { - for(int expthr=MINIMUM_EXPTHRESH_PARAM; expthr<=MAXIMUM_EXPTHRESH_PARAM; expthr++ ) { - for(final boolean sparse: new boolean[]{true, false}) { - for(int log2m=MINIMUM_LOG2M_PARAM; log2m<=maxLog2m; log2m++) { + private static void assertCardinality( + final HLLType hllType, final int maxLog2m, final Collection items) + throws CloneNotSupportedException { + for (int regw = MINIMUM_REGWIDTH_PARAM; regw <= MAXIMUM_REGWIDTH_PARAM; regw++) { + for (int expthr = MINIMUM_EXPTHRESH_PARAM; expthr <= MAXIMUM_EXPTHRESH_PARAM; expthr++) { + for (final boolean sparse : new boolean[] {true, false}) { + for (int log2m = MINIMUM_LOG2M_PARAM; log2m <= maxLog2m; log2m++) { assertCardinality(new HLL(log2m, regw, expthr, sparse, hllType), items); } } @@ -195,29 +189,30 @@ private static void assertCardinality(final HLLType hllType, } /** - * Adds all of the items to the specified hll, then does a round trip serialize/deserialize and confirms - * equality of several properties (including the byte serialization). Repeats process with a clone. + * Adds all of the items to the specified hll, then does a round trip serialize/deserialize and + * confirms equality of several properties (including the byte serialization). Repeats process + * with a clone. */ private static void assertCardinality(HLL hll, final Collection items) - throws CloneNotSupportedException { - - for (final Long item: items) { + throws CloneNotSupportedException { + + for (final Long item : items) { hll.addRaw(item); } - + final long hllCardinality = hll.cardinality(); final HLLType hllType = hll.getType(); final byte[] hllBytes = hll.toBytes(); hll = null; // allow some GC - + HLL copy = HLL.fromBytes(hllBytes); assertEquals(copy.cardinality(), hllCardinality); assertEquals(copy.getType(), hllType); assertTrue(Arrays.equals(copy.toBytes(), hllBytes)); - + HLL clone = copy.clone(); copy = null; // allow some GC - + assertEquals(clone.cardinality(), hllCardinality); assertEquals(clone.getType(), hllType); assertTrue(Arrays.equals(clone.toBytes(), hllBytes)); diff --git a/solr/core/src/test/org/apache/solr/util/hll/HLLUtilTest.java b/solr/core/src/test/org/apache/solr/util/hll/HLLUtilTest.java index 0e849ae81d0..4cd692c3782 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/HLLUtilTest.java +++ b/solr/core/src/test/org/apache/solr/util/hll/HLLUtilTest.java @@ -19,26 +19,27 @@ import org.apache.solr.SolrTestCase; import org.junit.Test; -/** - * Tests {@link HLLUtil} static methods. - */ +/** Tests {@link HLLUtil} static methods. */ public class HLLUtilTest extends SolrTestCase { - /** - * Tests that {@link HLLUtil#largeEstimatorCutoff(int, int)} is the same - * as a trivial implementation. - */ - @Test - public void largeEstimatorCutoffTest() { - for(int log2m=HLL.MINIMUM_LOG2M_PARAM; log2m<=HLL.MAXIMUM_LOG2M_PARAM; log2m++) { - for(int regWidth=HLL.MINIMUM_REGWIDTH_PARAM; regWidth<=HLL.MINIMUM_REGWIDTH_PARAM; regWidth++) { - final double cutoff = HLLUtil.largeEstimatorCutoff(log2m, regWidth); + /** + * Tests that {@link HLLUtil#largeEstimatorCutoff(int, int)} is the same as a trivial + * implementation. + */ + @Test + public void largeEstimatorCutoffTest() { + for (int log2m = HLL.MINIMUM_LOG2M_PARAM; log2m <= HLL.MAXIMUM_LOG2M_PARAM; log2m++) { + for (int regWidth = HLL.MINIMUM_REGWIDTH_PARAM; + regWidth <= HLL.MINIMUM_REGWIDTH_PARAM; + regWidth++) { + final double cutoff = HLLUtil.largeEstimatorCutoff(log2m, regWidth); - // See blog post (http://research.neustar.biz/2013/01/24/hyperloglog-googles-take-on-engineering-hll/) - // and original paper (Fig. 3) for information on 2^L and - // "large range correction" cutoff. - final double expected = Math.pow(2, Math.pow(2, regWidth) - 2 + log2m) / 30.0; - assertEquals(cutoff, expected, 0.0001); - } - } + // See blog post + // (http://research.neustar.biz/2013/01/24/hyperloglog-googles-take-on-engineering-hll/) + // and original paper (Fig. 3) for information on 2^L and + // "large range correction" cutoff. + final double expected = Math.pow(2, Math.pow(2, regWidth) - 2 + log2m) / 30.0; + assertEquals(cutoff, expected, 0.0001); + } } + } } diff --git a/solr/core/src/test/org/apache/solr/util/hll/IntegrationTestGenerator.java b/solr/core/src/test/org/apache/solr/util/hll/IntegrationTestGenerator.java index cc2641cffd5..d22194e2c7c 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/IntegrationTestGenerator.java +++ b/solr/core/src/test/org/apache/solr/util/hll/IntegrationTestGenerator.java @@ -27,685 +27,681 @@ import java.util.Random; /** - * Generates test files for testing other implementations of HLL - * serialization/deserialization, namely the PostgreSQL implementation. + * Generates test files for testing other implementations of HLL serialization/deserialization, + * namely the PostgreSQL implementation. */ public class IntegrationTestGenerator { - // ************************************************************************ - // directory to output the generated tests - private static final String OUTPUT_DIRECTORY = "/tmp/hll_test/"; - - // ------------------------------------------------------------------------ - // configurations for HLLs, should mirror settings in PostgreSQL impl. tests - private static final int REGWIDTH = 5; - private static final int LOG2M = 11; - // NOTE: This differs from the PostgreSQL impl. parameter 'expthresh'. This - // is a literal threshold to use in the promotion hierarchy, implying - // that both EXPLICIT representation should be used and it should - // NOT be automatically computed. This is done to ensure that the - // parameters of the test are very explicitly defined. - private static final int EXPLICIT_THRESHOLD = 256; - // NOTE: This is not the PostgreSQL impl. parameter 'sparseon'. 'sparseon' - // is assumed to be true and this is a literal register-count threshold - // to use in the promotion hierarchy. This is done to ensure that the - // parameters of the test are very explicitly defined. - private static final int SPARSE_THRESHOLD = 850; - - // ------------------------------------------------------------------------ - // computed constants - private static final int REGISTER_COUNT = (1 << LOG2M); - private static final int REGISTER_MAX_VALUE = (1 << REGWIDTH) - 1; - - // ======================================================================== - // Tests - /** - * Cumulatively adds random values to a FULL HLL through the small range - * correction, uncorrected range, and large range correction of the HLL's - * cardinality estimator. - * - * Format: cumulative add - * Tests: - * - FULL cardinality computation - */ - private static void fullCardinalityCorrectionTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "cardinality_correction", TestType.ADD); - - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.FULL); - initLineAdd(output, hll, schemaVersion); - - // run through some values in the small range correction - for(int i=0; i<((1 << LOG2M) - 1); i++) { - final long rawValue = constructHLLValue(LOG2M, i, 1); - cumulativeAddLine(output, hll, rawValue, schemaVersion); - } - - // run up past some values in the uncorrected range - for(int i=0; i<(1 << LOG2M); i++) { - final long rawValue = constructHLLValue(LOG2M, i, 7); - cumulativeAddLine(output, hll, rawValue, schemaVersion); - } - - // run through some values in the large range correction - for(int i=0; i<(1 << LOG2M); i++) { - final long rawValue = constructHLLValue(LOG2M, i, 30); - cumulativeAddLine(output, hll, rawValue, schemaVersion); - } - - output.flush(); - output.close(); + // ************************************************************************ + // directory to output the generated tests + private static final String OUTPUT_DIRECTORY = "/tmp/hll_test/"; + + // ------------------------------------------------------------------------ + // configurations for HLLs, should mirror settings in PostgreSQL impl. tests + private static final int REGWIDTH = 5; + private static final int LOG2M = 11; + // NOTE: This differs from the PostgreSQL impl. parameter 'expthresh'. This + // is a literal threshold to use in the promotion hierarchy, implying + // that both EXPLICIT representation should be used and it should + // NOT be automatically computed. This is done to ensure that the + // parameters of the test are very explicitly defined. + private static final int EXPLICIT_THRESHOLD = 256; + // NOTE: This is not the PostgreSQL impl. parameter 'sparseon'. 'sparseon' + // is assumed to be true and this is a literal register-count threshold + // to use in the promotion hierarchy. This is done to ensure that the + // parameters of the test are very explicitly defined. + private static final int SPARSE_THRESHOLD = 850; + + // ------------------------------------------------------------------------ + // computed constants + private static final int REGISTER_COUNT = (1 << LOG2M); + private static final int REGISTER_MAX_VALUE = (1 << REGWIDTH) - 1; + + // ======================================================================== + // Tests + /** + * Cumulatively adds random values to a FULL HLL through the small range correction, uncorrected + * range, and large range correction of the HLL's cardinality estimator. + * + *

Format: cumulative add Tests: - FULL cardinality computation + */ + private static void fullCardinalityCorrectionTest(final ISchemaVersion schemaVersion) + throws IOException { + final Writer output = openOutput(schemaVersion, "cardinality_correction", TestType.ADD); + + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.FULL); + initLineAdd(output, hll, schemaVersion); + + // run through some values in the small range correction + for (int i = 0; i < ((1 << LOG2M) - 1); i++) { + final long rawValue = constructHLLValue(LOG2M, i, 1); + cumulativeAddLine(output, hll, rawValue, schemaVersion); } - /** - * Cumulatively adds random values to an EMPTY HLL. - * - * Format: cumulative add - * Tests: - * - EMPTY, EXPLICIT, SPARSE, PROBABILSTIC addition - * - EMPTY to EXPLICIT promotion - * - EXPLICIT to SPARSE promotion - * - SPARSE to FULL promotion - */ - private static void globalStepTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "comprehensive_promotion", TestType.ADD); - - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.EMPTY); - initLineAdd(output, hll, schemaVersion); - - for(int i=0; i<10000/*arbitrary*/; i++) { - cumulativeAddLine(output, hll, randomLong(), schemaVersion); - } - - output.flush(); - output.close(); + // run up past some values in the uncorrected range + for (int i = 0; i < (1 << LOG2M); i++) { + final long rawValue = constructHLLValue(LOG2M, i, 7); + cumulativeAddLine(output, hll, rawValue, schemaVersion); } - /** - * Cumulatively unions "underpopulated" FULL HLLs into the - * accumulator to verify the correct behavior from the PostgreSQL implementation. - * The PostgreSQL implementation's representations of probabilistic HLLs should - * depend exclusively on the chosen SPARSE-to-FULL cutoff. - * - * Format: cumulative union - * Tests: - * - EMPTY U "underpopulated" FULL => SPARSE - * - SPARSE U "underpopulated" FULL => SPARSE - * - SPARSE U "barely underpopulated" FULL => FULL - */ - private static void sparseFullRepresentationTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "sparse_full_representation", TestType.UNION); - - final HLL emptyHLL1 = newHLL(HLLType.EMPTY); - final HLL emptyHLL2 = newHLL(HLLType.EMPTY); - - cumulativeUnionLine(output, emptyHLL1, emptyHLL2, schemaVersion); - - // NOTE: In this test the sparseReference will be the "expected" value - // from the C representation, since it doesn't choose representation - // based on original encoding, but rather on the promotion rules - // and the declared type of the "receiving" field. - // It is the manually-constructed union result. - - // "underpopulated" FULL U EMPTY => SPARSE - final HLL fullHLL = newHLL(HLLType.FULL); - fullHLL.addRaw(constructHLLValue(LOG2M, 0/*ix*/, 1/*val*/)); - - final HLL sparseHLL = newHLL(HLLType.SPARSE); - sparseHLL.addRaw(constructHLLValue(LOG2M, 0/*ix*/, 1/*val*/)); - - output.write(stringCardinality(fullHLL) + "," + toByteA(fullHLL, schemaVersion) + "," + stringCardinality(sparseHLL) + "," + toByteA(sparseHLL, schemaVersion) + "\n"); - output.flush(); - - // "underpopulated" FULL (small) U SPARSE (small) => SPARSE - final HLL fullHLL2 = newHLL(HLLType.FULL); - fullHLL2.addRaw(constructHLLValue(LOG2M, 1/*ix*/, 1/*val*/)); - - sparseHLL.addRaw(constructHLLValue(LOG2M, 1/*ix*/, 1/*val*/)); - - output.write(stringCardinality(fullHLL2) + "," + toByteA(fullHLL2, schemaVersion) + "," + stringCardinality(sparseHLL) + "," + toByteA(sparseHLL, schemaVersion) + "\n"); - output.flush(); - - // "underpopulated" FULL (just on edge) U SPARSE (small) => FULL - final HLL fullHLL3 = newHLL(HLLType.FULL); - for(int i=2; i<(SPARSE_THRESHOLD + 1); i++) { - fullHLL3.addRaw(constructHLLValue(LOG2M, i/*ix*/, 1/*val*/)); - sparseHLL.addRaw(constructHLLValue(LOG2M, i/*ix*/, 1/*val*/)); - } - - output.write(stringCardinality(fullHLL3) + "," + toByteA(fullHLL3, schemaVersion) + "," + stringCardinality(sparseHLL) + "," + toByteA(sparseHLL, schemaVersion) + "\n"); - output.flush(); + // run through some values in the large range correction + for (int i = 0; i < (1 << LOG2M); i++) { + final long rawValue = constructHLLValue(LOG2M, i, 30); + cumulativeAddLine(output, hll, rawValue, schemaVersion); } - /** - * Cumulatively sets successive registers to: - * - * (registerIndex % REGISTER_MAX_VALUE) + 1 - * - * by adding specifically constructed values to a SPARSE HLL. - * Does not induce promotion. - * - * Format: cumulative add - * Tests: - * - SPARSE addition (predictable) - */ - private static void sparseStepTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "sparse_step", TestType.ADD); - - // the accumulator, starts empty sparse probabilistic - final HLL hll = newHLL(HLLType.SPARSE); - initLineAdd(output, hll, schemaVersion); - - for(int i=0; iFormat: cumulative add Tests: - EMPTY, EXPLICIT, SPARSE, PROBABILSTIC addition - EMPTY to + * EXPLICIT promotion - EXPLICIT to SPARSE promotion - SPARSE to FULL promotion + */ + private static void globalStepTest(final ISchemaVersion schemaVersion) throws IOException { + final Writer output = openOutput(schemaVersion, "comprehensive_promotion", TestType.ADD); + + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.EMPTY); + initLineAdd(output, hll, schemaVersion); + + for (int i = 0; i < 10000 /*arbitrary*/; i++) { + cumulativeAddLine(output, hll, randomLong(), schemaVersion); } - /** - * Cumulatively sets random registers of a SPARSE HLL to - * random values by adding random values. Does not induce promotion. - * - * Format: cumulative add - * Tests: - * - SPARSE addition (random) - */ - private static void sparseRandomTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "sparse_random", TestType.ADD); - - final Random random = new Random(randomLong()); - - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.SPARSE); - initLineAdd(output, hll, schemaVersion); - - for(int i=0; iFormat: cumulative union Tests: - EMPTY U "underpopulated" FULL => SPARSE - SPARSE U + * "underpopulated" FULL => SPARSE - SPARSE U "barely underpopulated" FULL => FULL + */ + private static void sparseFullRepresentationTest(final ISchemaVersion schemaVersion) + throws IOException { + final Writer output = openOutput(schemaVersion, "sparse_full_representation", TestType.UNION); + + final HLL emptyHLL1 = newHLL(HLLType.EMPTY); + final HLL emptyHLL2 = newHLL(HLLType.EMPTY); + + cumulativeUnionLine(output, emptyHLL1, emptyHLL2, schemaVersion); + + // NOTE: In this test the sparseReference will be the "expected" value + // from the C representation, since it doesn't choose representation + // based on original encoding, but rather on the promotion rules + // and the declared type of the "receiving" field. + // It is the manually-constructed union result. + + // "underpopulated" FULL U EMPTY => SPARSE + final HLL fullHLL = newHLL(HLLType.FULL); + fullHLL.addRaw(constructHLLValue(LOG2M, 0 /*ix*/, 1 /*val*/)); + + final HLL sparseHLL = newHLL(HLLType.SPARSE); + sparseHLL.addRaw(constructHLLValue(LOG2M, 0 /*ix*/, 1 /*val*/)); + + output.write( + stringCardinality(fullHLL) + + "," + + toByteA(fullHLL, schemaVersion) + + "," + + stringCardinality(sparseHLL) + + "," + + toByteA(sparseHLL, schemaVersion) + + "\n"); + output.flush(); + + // "underpopulated" FULL (small) U SPARSE (small) => SPARSE + final HLL fullHLL2 = newHLL(HLLType.FULL); + fullHLL2.addRaw(constructHLLValue(LOG2M, 1 /*ix*/, 1 /*val*/)); + + sparseHLL.addRaw(constructHLLValue(LOG2M, 1 /*ix*/, 1 /*val*/)); + + output.write( + stringCardinality(fullHLL2) + + "," + + toByteA(fullHLL2, schemaVersion) + + "," + + stringCardinality(sparseHLL) + + "," + + toByteA(sparseHLL, schemaVersion) + + "\n"); + output.flush(); + + // "underpopulated" FULL (just on edge) U SPARSE (small) => FULL + final HLL fullHLL3 = newHLL(HLLType.FULL); + for (int i = 2; i < (SPARSE_THRESHOLD + 1); i++) { + fullHLL3.addRaw(constructHLLValue(LOG2M, i /*ix*/, 1 /*val*/)); + sparseHLL.addRaw(constructHLLValue(LOG2M, i /*ix*/, 1 /*val*/)); } - /** - * Cumulatively sets the first register (index 0) to value 2, the last - * register (index m-1) to value 2, and then sets registers with indices in - * the range 2 to (sparseCutoff + 2) to value 1 to trigger promotion. - * - * This tests for register alignment in the promotion from SPARSE - * to FULL. - * - * Format: cumulative add - * Tests: - * - SPARSE addition - * - SPARSE to FULL promotion - */ - private static void sparseEdgeTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "sparse_edge", TestType.ADD); - - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.SPARSE); - initLineAdd(output, hll, schemaVersion); - - final long firstValue = constructHLLValue(LOG2M, 0, 2); - cumulativeAddLine(output, hll, firstValue, schemaVersion); - - final long lastValue = constructHLLValue(LOG2M, (1 << LOG2M) - 1, 2); - cumulativeAddLine(output, hll, lastValue, schemaVersion); - - for(int i=2; i<(SPARSE_THRESHOLD + 2); i++) { - final long middleValue = constructHLLValue(LOG2M, i, 1); - - cumulativeAddLine(output, hll, middleValue, schemaVersion); - } - - output.flush(); - output.close(); + output.write( + stringCardinality(fullHLL3) + + "," + + toByteA(fullHLL3, schemaVersion) + + "," + + stringCardinality(sparseHLL) + + "," + + toByteA(sparseHLL, schemaVersion) + + "\n"); + output.flush(); + } + + /** + * Cumulatively sets successive registers to: (registerIndex % REGISTER_MAX_VALUE) + 1 + * by adding specifically constructed values to a SPARSE HLL. Does not induce promotion. + * + *

Format: cumulative add Tests: - SPARSE addition (predictable) + */ + private static void sparseStepTest(final ISchemaVersion schemaVersion) throws IOException { + final Writer output = openOutput(schemaVersion, "sparse_step", TestType.ADD); + + // the accumulator, starts empty sparse probabilistic + final HLL hll = newHLL(HLLType.SPARSE); + initLineAdd(output, hll, schemaVersion); + + for (int i = 0; i < SPARSE_THRESHOLD; i++) { + final long rawValue = constructHLLValue(LOG2M, i, ((i % REGISTER_MAX_VALUE) + 1)); + cumulativeAddLine(output, hll, rawValue, schemaVersion); } - /** - * Unions an EMPTY accumulator with EXPLICIT HLLs, each containing a - * single random value. - * - * Format: cumulative union - * Tests: - * - EMPTY U EXPLICIT - * - EXPLICIT U EXPLICIT - * - EXPLICIT to SPARSE promotion - * - SPARSE U EXPLICIT - */ - private static void explicitPromotionTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "explicit_promotion", TestType.UNION); + output.flush(); + output.close(); + } - final Random random = new Random(randomLong()); + /** + * Cumulatively sets random registers of a SPARSE HLL to random values by adding random values. + * Does not induce promotion. + * + *

Format: cumulative add Tests: - SPARSE addition (random) + */ + private static void sparseRandomTest(final ISchemaVersion schemaVersion) throws IOException { + final Writer output = openOutput(schemaVersion, "sparse_random", TestType.ADD); - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.EMPTY); - final HLL emptyHLL = newHLL(HLLType.EMPTY); - cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + final Random random = new Random(randomLong()); - for(int i=0; i<(EXPLICIT_THRESHOLD+500)/*should be greater than promotion cutoff*/; i++) { - // make an EXPLICIT set and populate with cardinality 1 - final HLL explicitHLL = newHLL(HLLType.EXPLICIT); - explicitHLL.addRaw(random.nextLong()); + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.SPARSE); + initLineAdd(output, hll, schemaVersion); - cumulativeUnionLine(output, hll, explicitHLL, schemaVersion); - } + for (int i = 0; i < SPARSE_THRESHOLD; i++) { + final int registerIndex = random.nextInt(REGISTER_COUNT); + final int registerValue = random.nextInt(REGISTER_MAX_VALUE) + 1; + final long rawValue = constructHLLValue(LOG2M, registerIndex, registerValue); - output.flush(); - output.close(); + cumulativeAddLine(output, hll, rawValue, schemaVersion); } - /** - * Unions an EMPTY accumulator with SPARSE HLLs, each - * having one register set. - * - * Format: cumulative union - * Tests: - * - EMPTY U SPARSE - * - SPARSE U SPARSE - * - SPARSE promotion - * - SPARSE U FULL - */ - private static void sparseProbabilisticPromotionTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "sparse_promotion", TestType.UNION); + output.flush(); + output.close(); + } - final Random random = new Random(randomLong()); + /** + * Cumulatively sets the first register (index 0) to value 2, the last register (index m-1) to + * value 2, and then sets registers with indices in the range 2 to (sparseCutoff + 2) to value 1 + * to trigger promotion. + * + *

This tests for register alignment in the promotion from SPARSE to FULL. + * + *

Format: cumulative add Tests: - SPARSE addition - SPARSE to FULL promotion + */ + private static void sparseEdgeTest(final ISchemaVersion schemaVersion) throws IOException { + final Writer output = openOutput(schemaVersion, "sparse_edge", TestType.ADD); - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.EMPTY); - final HLL emptyHLL = newHLL(HLLType.EMPTY); - cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.SPARSE); + initLineAdd(output, hll, schemaVersion); + final long firstValue = constructHLLValue(LOG2M, 0, 2); + cumulativeAddLine(output, hll, firstValue, schemaVersion); - for(int i=0; i<(SPARSE_THRESHOLD + 1000)/*should be greater than promotion cutoff*/; i++) { - // make a SPARSE set and populate with cardinality 1 - final HLL sparseHLL = newHLL(HLLType.SPARSE); + final long lastValue = constructHLLValue(LOG2M, (1 << LOG2M) - 1, 2); + cumulativeAddLine(output, hll, lastValue, schemaVersion); - final int registerIndex = random.nextInt(REGISTER_COUNT); - final int registerValue = random.nextInt(REGISTER_MAX_VALUE) + 1; - final long rawValue = constructHLLValue(LOG2M, registerIndex, registerValue); - sparseHLL.addRaw(rawValue); + for (int i = 2; i < (SPARSE_THRESHOLD + 2); i++) { + final long middleValue = constructHLLValue(LOG2M, i, 1); - cumulativeUnionLine(output, hll, sparseHLL, schemaVersion); - } - - output.flush(); - output.close(); + cumulativeAddLine(output, hll, middleValue, schemaVersion); } - /** - * Unions an EMPTY accumulator with EXPLICIT HLLs, each having a single - * random value, twice in a row to verify that the set properties are - * satisfied. - * - * Format: cumulative union - * Tests: - * - EMPTY U EXPLICIT - * - EXPLICIT U EXPLICIT - */ - private static void explicitOverlapTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "explicit_explicit", TestType.UNION); - - final Random random = new Random(randomLong()); - - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.EMPTY); - final HLL emptyHLL = newHLL(HLLType.EMPTY); - - cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); - - for(int i=0; iFormat: cumulative union Tests: - EMPTY U EXPLICIT - EXPLICIT U EXPLICIT - EXPLICIT to + * SPARSE promotion - SPARSE U EXPLICIT + */ + private static void explicitPromotionTest(final ISchemaVersion schemaVersion) throws IOException { + final Writer output = openOutput(schemaVersion, "explicit_promotion", TestType.UNION); + + final Random random = new Random(randomLong()); + + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.EMPTY); + final HLL emptyHLL = newHLL(HLLType.EMPTY); + cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + + for (int i = 0; + i < (EXPLICIT_THRESHOLD + 500) /*should be greater than promotion cutoff*/; + i++) { + // make an EXPLICIT set and populate with cardinality 1 + final HLL explicitHLL = newHLL(HLLType.EXPLICIT); + explicitHLL.addRaw(random.nextLong()); + + cumulativeUnionLine(output, hll, explicitHLL, schemaVersion); + } - output.flush(); - output.close(); + output.flush(); + output.close(); + } + + /** + * Unions an EMPTY accumulator with SPARSE HLLs, each having one register set. + * + *

Format: cumulative union Tests: - EMPTY U SPARSE - SPARSE U SPARSE - SPARSE promotion - + * SPARSE U FULL + */ + private static void sparseProbabilisticPromotionTest(final ISchemaVersion schemaVersion) + throws IOException { + final Writer output = openOutput(schemaVersion, "sparse_promotion", TestType.UNION); + + final Random random = new Random(randomLong()); + + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.EMPTY); + final HLL emptyHLL = newHLL(HLLType.EMPTY); + cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + + for (int i = 0; + i < (SPARSE_THRESHOLD + 1000) /*should be greater than promotion cutoff*/; + i++) { + // make a SPARSE set and populate with cardinality 1 + final HLL sparseHLL = newHLL(HLLType.SPARSE); + + final int registerIndex = random.nextInt(REGISTER_COUNT); + final int registerValue = random.nextInt(REGISTER_MAX_VALUE) + 1; + final long rawValue = constructHLLValue(LOG2M, registerIndex, registerValue); + sparseHLL.addRaw(rawValue); + + cumulativeUnionLine(output, hll, sparseHLL, schemaVersion); } - /** - * Unions an EMPTY accumulator with SPARSE HLLs, each - * having a single register set, twice in a row to verify that the set - * properties are satisfied. - * - * Format: cumulative union - * Tests: - * - EMPTY U SPARSE - * - SPARSE U SPARSE - */ - private static void sparseProbabilisticOverlapTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "sparse_sparse", TestType.UNION); + output.flush(); + output.close(); + } - final Random random = new Random(randomLong()); + /** + * Unions an EMPTY accumulator with EXPLICIT HLLs, each having a single random value, twice in a + * row to verify that the set properties are satisfied. + * + *

Format: cumulative union Tests: - EMPTY U EXPLICIT - EXPLICIT U EXPLICIT + */ + private static void explicitOverlapTest(final ISchemaVersion schemaVersion) throws IOException { + final Writer output = openOutput(schemaVersion, "explicit_explicit", TestType.UNION); - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.EMPTY); - final HLL emptyHLL = newHLL(HLLType.EMPTY); + final Random random = new Random(randomLong()); - cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.EMPTY); + final HLL emptyHLL = newHLL(HLLType.EMPTY); - for(int i=0; iFormat: cumulative union Tests: - EMPTY U SPARSE - SPARSE U SPARSE + */ + private static void sparseProbabilisticOverlapTest(final ISchemaVersion schemaVersion) + throws IOException { + final Writer output = openOutput(schemaVersion, "sparse_sparse", TestType.UNION); + + final Random random = new Random(randomLong()); + + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.EMPTY); + final HLL emptyHLL = newHLL(HLLType.EMPTY); + + cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + + for (int i = 0; i < SPARSE_THRESHOLD; i++) { + // make a SPARSE set and populate with cardinality 1 + final HLL sparseHLL = newHLL(HLLType.SPARSE); + final int registerIndex = random.nextInt(REGISTER_COUNT); + final int registerValue = random.nextInt(REGISTER_MAX_VALUE) + 1; + final long rawValue = constructHLLValue(LOG2M, registerIndex, registerValue); + sparseHLL.addRaw(rawValue); + + cumulativeUnionLine(output, hll, sparseHLL, schemaVersion); + } - output.flush(); - output.close(); + output.flush(); + output.close(); + } + + /** + * Unions an EMPTY accumulator with FULL HLLs, each having many registers set, twice in a row to + * verify that the set properties are satisfied. + * + *

Format: cumulative union Tests: - EMPTY U FULL - FULL U FULL + */ + private static void probabilisticUnionTest(final ISchemaVersion schemaVersion) + throws IOException { + final Writer output = openOutput(schemaVersion, "probabilistic_probabilistic", TestType.UNION); + + final Random random = new Random(randomLong()); + + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.EMPTY); + final HLL emptyHLL = newHLL(HLLType.EMPTY); + cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + + for (int i = 0; i < 1000 /*number of rows to generate*/; i++) { + // make a FULL set and populate with + final HLL fullHLL = newHLL(HLLType.FULL); + final int elementCount = random.nextInt(10000 /*arbitrary maximum cardinality*/); + for (int j = 0; j < elementCount; j++) { + fullHLL.addRaw(random.nextLong()); + } + + cumulativeUnionLine(output, hll, fullHLL, schemaVersion); } - /** - * Unions an EMPTY accumulator with random HLLs. - * - * Format: cumulative union - * Tests: - * - hopefully all union possibilities - */ - private static void globalUnionTest(final ISchemaVersion schemaVersion) throws IOException { - final Writer output = openOutput(schemaVersion, "comprehensive", TestType.UNION); + output.flush(); + output.close(); + } - // the accumulator, starts empty - final HLL hll = newHLL(HLLType.EMPTY); - final HLL emptyHLL = newHLL(HLLType.EMPTY); + /** + * Unions an EMPTY accumulator with random HLLs. + * + *

Format: cumulative union Tests: - hopefully all union possibilities + */ + private static void globalUnionTest(final ISchemaVersion schemaVersion) throws IOException { + final Writer output = openOutput(schemaVersion, "comprehensive", TestType.UNION); - cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); + // the accumulator, starts empty + final HLL hll = newHLL(HLLType.EMPTY); + final HLL emptyHLL = newHLL(HLLType.EMPTY); - for(int i=0; i<1000/*number of rows to generate*/; i++) { - final HLL randomHLL = generateRandomHLL(); - cumulativeUnionLine(output, hll, randomHLL, schemaVersion); - } + cumulativeUnionLine(output, hll, emptyHLL, schemaVersion); - output.flush(); - output.close(); + for (int i = 0; i < 1000 /*number of rows to generate*/; i++) { + final HLL randomHLL = generateRandomHLL(); + cumulativeUnionLine(output, hll, randomHLL, schemaVersion); } - // ======================================================================== - // Main - public static void fullSuite(final ISchemaVersion schemaVersion) throws IOException { - fullCardinalityCorrectionTest(schemaVersion); - globalUnionTest(schemaVersion); - globalStepTest(schemaVersion); - probabilisticUnionTest(schemaVersion); - explicitPromotionTest(schemaVersion); - explicitOverlapTest(schemaVersion); - sparseFullRepresentationTest(schemaVersion); - sparseStepTest(schemaVersion); - sparseRandomTest(schemaVersion); - sparseEdgeTest(schemaVersion); - sparseProbabilisticPromotionTest(schemaVersion); - sparseProbabilisticOverlapTest(schemaVersion); + output.flush(); + output.close(); + } + + // ======================================================================== + // Main + public static void fullSuite(final ISchemaVersion schemaVersion) throws IOException { + fullCardinalityCorrectionTest(schemaVersion); + globalUnionTest(schemaVersion); + globalStepTest(schemaVersion); + probabilisticUnionTest(schemaVersion); + explicitPromotionTest(schemaVersion); + explicitOverlapTest(schemaVersion); + sparseFullRepresentationTest(schemaVersion); + sparseStepTest(schemaVersion); + sparseRandomTest(schemaVersion); + sparseEdgeTest(schemaVersion); + sparseProbabilisticPromotionTest(schemaVersion); + sparseProbabilisticOverlapTest(schemaVersion); + } + + public static void main(String[] args) throws IOException { + fullSuite(SerializationUtil.VERSION_ONE); + } + + // ************************************************************************ + // Helpers + /** + * Shortcut for testing constructor, which uses the constants defined at the top of the file as + * default parameters. + * + * @return a new {@link HLL} of specified type, which uses the parameters ({@link #LOG2M}, {@link + * #REGWIDTH}, {@link #EXPLICIT_THRESHOLD}, and {@link #SPARSE_THRESHOLD}) specified above. + */ + private static HLL newHLL(final HLLType type) { + return new HLL(LOG2M, REGWIDTH, EXPLICIT_THRESHOLD, SPARSE_THRESHOLD, type); + } + + /** + * Returns the algorithm-specific cardinality of the specified {@link HLL} as a {@link String} + * appropriate for comparison with the algorithm-specific cardinality provided by the PostgreSQL + * implementation. + * + * @param hll the HLL whose algorithm-specific cardinality is to be printed. This cannot be + * null. + * @return the algorithm-specific cardinality of the instance as a PostgreSQL- compatible String. + * This will never be null + */ + private static String stringCardinality(final HLL hll) { + switch (hll.getType()) { + case EMPTY: + return "0"; + case EXPLICIT: /*promotion has not yet occurred*/ + return Long.toString(hll.cardinality()); + case SPARSE: + return Double.toString(hll.sparseProbabilisticAlgorithmCardinality()); + case FULL: + return Double.toString(hll.fullProbabilisticAlgorithmCardinality()); + default: + throw new RuntimeException("Unknown HLL type " + hll.getType()); } - - public static void main(String[] args) throws IOException { - fullSuite(SerializationUtil.VERSION_ONE); + } + + /** + * Generates a random HLL and populates it with random values. + * + * @return the populated HLL. This will never be null. + */ + public static HLL generateRandomHLL() { + final int randomTypeInt = randomIntBetween(0, HLLType.values().length - 1); + final HLLType type; + switch (randomTypeInt) { + case 0: + type = HLLType.EMPTY; + break; + case 1: + type = HLLType.EXPLICIT; + break; + case 2: + type = HLLType.FULL; + break; + case 3: + type = HLLType.EMPTY; + break; + case 4: + type = HLLType.SPARSE; + break; + default: + throw new RuntimeException("Unassigned type int " + randomTypeInt); } - // ************************************************************************ - // Helpers - /** - * Shortcut for testing constructor, which uses the constants defined at - * the top of the file as default parameters. - * - * @return a new {@link HLL} of specified type, which uses the parameters - * ({@link #LOG2M}, {@link #REGWIDTH}, {@link #EXPLICIT_THRESHOLD}, - * and {@link #SPARSE_THRESHOLD}) specified above. - */ - private static HLL newHLL(final HLLType type) { - return new HLL(LOG2M, REGWIDTH, EXPLICIT_THRESHOLD, SPARSE_THRESHOLD, type); + final int cardinalityCap; + final int cardinalityBaseline; + + switch (type) { + case EMPTY: + return newHLL(HLLType.EMPTY); + case EXPLICIT: + cardinalityCap = EXPLICIT_THRESHOLD; + cardinalityBaseline = 1; + break; + case SPARSE: + cardinalityCap = SPARSE_THRESHOLD; + cardinalityBaseline = (EXPLICIT_THRESHOLD + 1); + break; + case FULL: + cardinalityCap = 100000; + cardinalityBaseline = (SPARSE_THRESHOLD * 10); + break; + default: + throw new RuntimeException("We should never be here."); } - /** - * Returns the algorithm-specific cardinality of the specified {@link HLL} - * as a {@link String} appropriate for comparison with the algorithm-specific - * cardinality provided by the PostgreSQL implementation. - * - * @param hll the HLL whose algorithm-specific cardinality is to be printed. - * This cannot be null. - * @return the algorithm-specific cardinality of the instance as a PostgreSQL- - * compatible String. This will never be null - */ - private static String stringCardinality(final HLL hll) { - switch(hll.getType()) { - case EMPTY: - return "0"; - case EXPLICIT:/*promotion has not yet occurred*/ - return Long.toString(hll.cardinality()); - case SPARSE: - return Double.toString(hll.sparseProbabilisticAlgorithmCardinality()); - case FULL: - return Double.toString(hll.fullProbabilisticAlgorithmCardinality()); - default: - throw new RuntimeException("Unknown HLL type " + hll.getType()); - } + final HLL hll = newHLL(HLLType.EMPTY); + for (int i = 0; i < cardinalityBaseline; i++) { + hll.addRaw(randomLong()); } - - /** - * Generates a random HLL and populates it with random values. - * - * @return the populated HLL. This will never be null. - */ - public static HLL generateRandomHLL() { - final int randomTypeInt = randomIntBetween(0, HLLType.values().length - 1); - final HLLType type; - switch(randomTypeInt) { - case 0: - type = HLLType.EMPTY; - break; - case 1: - type = HLLType.EXPLICIT; - break; - case 2: - type = HLLType.FULL; - break; - case 3: - type = HLLType.EMPTY; - break; - case 4: - type = HLLType.SPARSE; - break; - default: - throw new RuntimeException("Unassigned type int " + randomTypeInt); - } - - final int cardinalityCap; - final int cardinalityBaseline; - - switch(type) { - case EMPTY: - return newHLL(HLLType.EMPTY); - case EXPLICIT: - cardinalityCap = EXPLICIT_THRESHOLD; - cardinalityBaseline = 1; - break; - case SPARSE: - cardinalityCap = SPARSE_THRESHOLD; - cardinalityBaseline = (EXPLICIT_THRESHOLD + 1); - break; - case FULL: - cardinalityCap = 100000; - cardinalityBaseline = (SPARSE_THRESHOLD*10); - break; - default: - throw new RuntimeException("We should never be here."); - } - - final HLL hll = newHLL(HLLType.EMPTY); - for(int i=0; inull. - * @param description Description string used to build the filename. - * This cannot be null. - * @param type {@link TestType type} of the test file to be written. - * This cannot be null. - * @return The opened {@link Writer writer}. This will never be null. - */ - private static Writer openOutput(final ISchemaVersion schemaVersion, final String description, final TestType type) throws IOException { - final String schemaVersionPrefix = "v"+ schemaVersion.schemaVersionNumber() + "_"; - final String header; - final String filename; - switch(type) { - case ADD: - header = "cardinality,raw_value,HLL\n"; - filename = schemaVersionPrefix + "cumulative_add_" + description + ".csv"; - break; - case UNION: - header = "cardinality,HLL,union_cardinality,union_HLL\n"; - filename = schemaVersionPrefix + "cumulative_union_" + description + ".csv"; - break; - default: - throw new RuntimeException("Unknown test type " + type); - } - - final Writer output = Files.newBufferedWriter( - Paths.get(OUTPUT_DIRECTORY, filename), StandardCharsets.UTF_8); - output.write(header); - output.flush(); - return output; - } - - /** - * Writes out a {@link TestType#ADD}-formatted test line. - * - * @param output The output {@link Writer writer}. This cannot be null. - * @param hll The "accumulator" HLL instance. This cannot be null. - * @param rawValue The raw value added to the HLL. - * @param schemaVersion the schema with which to serialize the HLLs. This cannot - * be null. - */ - private static void cumulativeAddLine(final Writer output, final HLL hll, final long rawValue, final ISchemaVersion schemaVersion) throws IOException { - hll.addRaw(rawValue); - final String accumulatorCardinality = stringCardinality(hll); - - output.write(accumulatorCardinality + "," + rawValue + "," + toByteA(hll, schemaVersion) + "\n"); - output.flush(); + for (int i = 0; i < randomInt(cardinalityCap - cardinalityBaseline); i++) { + hll.addRaw(randomLong()); } - /** - * Writes an initial line for a {@link TestType#ADD}-formatted test. - * - * @param output The output {@link Writer writer}. This cannot be null. - * @param hll The "accumulator" HLL instance. This cannot be null. - * @param schemaVersion the schema with which to serialize the HLLs. This cannot - * be null. - */ - private static void initLineAdd(final Writer output, final HLL hll, final ISchemaVersion schemaVersion) throws IOException { - output.write(0 + "," + 0 + "," + toByteA(hll, schemaVersion) + "\n"); - output.flush(); + return hll; + } + + /** + * Opens a {@link Writer} and writes out an appropriate CSV header. + * + * @param schemaVersion Schema version of the output. This cannot be null. + * @param description Description string used to build the filename. This cannot be null + * . + * @param type {@link TestType type} of the test file to be written. This cannot be null + * . + * @return The opened {@link Writer writer}. This will never be null. + */ + private static Writer openOutput( + final ISchemaVersion schemaVersion, final String description, final TestType type) + throws IOException { + final String schemaVersionPrefix = "v" + schemaVersion.schemaVersionNumber() + "_"; + final String header; + final String filename; + switch (type) { + case ADD: + header = "cardinality,raw_value,HLL\n"; + filename = schemaVersionPrefix + "cumulative_add_" + description + ".csv"; + break; + case UNION: + header = "cardinality,HLL,union_cardinality,union_HLL\n"; + filename = schemaVersionPrefix + "cumulative_union_" + description + ".csv"; + break; + default: + throw new RuntimeException("Unknown test type " + type); } + final Writer output = + Files.newBufferedWriter(Paths.get(OUTPUT_DIRECTORY, filename), StandardCharsets.UTF_8); + output.write(header); + output.flush(); + return output; + } + + /** + * Writes out a {@link TestType#ADD}-formatted test line. + * + * @param output The output {@link Writer writer}. This cannot be null. + * @param hll The "accumulator" HLL instance. This cannot be null. + * @param rawValue The raw value added to the HLL. + * @param schemaVersion the schema with which to serialize the HLLs. This cannot be null + * . + */ + private static void cumulativeAddLine( + final Writer output, final HLL hll, final long rawValue, final ISchemaVersion schemaVersion) + throws IOException { + hll.addRaw(rawValue); + final String accumulatorCardinality = stringCardinality(hll); + + output.write( + accumulatorCardinality + "," + rawValue + "," + toByteA(hll, schemaVersion) + "\n"); + output.flush(); + } + + /** + * Writes an initial line for a {@link TestType#ADD}-formatted test. + * + * @param output The output {@link Writer writer}. This cannot be null. + * @param hll The "accumulator" HLL instance. This cannot be null. + * @param schemaVersion the schema with which to serialize the HLLs. This cannot be null + * . + */ + private static void initLineAdd( + final Writer output, final HLL hll, final ISchemaVersion schemaVersion) throws IOException { + output.write(0 + "," + 0 + "," + toByteA(hll, schemaVersion) + "\n"); + output.flush(); + } + + /** + * Writes out a {@link TestType#UNION}-formatted test line. + * + * @param output The output {@link Writer writer}. This cannot be null. + * @param hll The "accumulator" HLL instance. This cannot be null. + * @param increment The "increment" HLL instance which will be unioned into the accumulator. This + * cannot be null. + * @param schemaVersion the schema with which to serialize the HLLs. This cannot be null + * . + */ + private static void cumulativeUnionLine( + final Writer output, final HLL hll, final HLL increment, final ISchemaVersion schemaVersion) + throws IOException { + hll.union(increment); + + final String incrementCardinality = stringCardinality(increment); + final String accumulatorCardinality = stringCardinality(hll); + output.write( + incrementCardinality + + "," + + toByteA(increment, schemaVersion) + + "," + + accumulatorCardinality + + "," + + toByteA(hll, schemaVersion) + + "\n"); + output.flush(); + } + + /** + * Serializes a HLL to Postgres 9 'bytea' hex-format, for CSV ingest. + * + * @param hll the HLL to serialize. This cannot be null. + * @param schemaVersion the schema with which to serialize the HLLs. This cannot be null + * . + * @return a PostgreSQL 'bytea' string representing the HLL. + */ + private static String toByteA(final HLL hll, final ISchemaVersion schemaVersion) { + final byte[] bytes = hll.toBytes(schemaVersion); + return ("\\x" + NumberUtil.toHex(bytes, 0, bytes.length)); + } + + /** Indicates what kind of test output a test will generate. */ + private static enum TestType { /** - * Writes out a {@link TestType#UNION}-formatted test line. - * - * @param output The output {@link Writer writer}. This cannot be null. - * @param hll The "accumulator" HLL instance. This cannot be null. - * @param increment The "increment" HLL instance which will be unioned into - * the accumulator. This cannot be null. - * @param schemaVersion the schema with which to serialize the HLLs. This cannot - * be null. + * This type of test is characterized by values being added to an accumulator HLL whose + * serialized representation (after the value is added) is printed to each line along with the + * cardinality and added value. */ - private static void cumulativeUnionLine(final Writer output, final HLL hll, final HLL increment, final ISchemaVersion schemaVersion) throws IOException { - hll.union(increment); - - final String incrementCardinality = stringCardinality(increment); - final String accumulatorCardinality = stringCardinality(hll); - output.write(incrementCardinality + "," + toByteA(increment, schemaVersion) + "," + accumulatorCardinality + "," + toByteA(hll, schemaVersion) + "\n"); - output.flush(); - } - + ADD, /** - * Serializes a HLL to Postgres 9 'bytea' hex-format, for CSV ingest. - * - * @param hll the HLL to serialize. This cannot be null. - * @param schemaVersion the schema with which to serialize the HLLs. This cannot - * be null. - * @return a PostgreSQL 'bytea' string representing the HLL. + * This type of test is characterized by HLLs being unioned into an accumulator HLL whose + * serialized representation (after the HLL is union'd) is printed to each line along with the + * cardinalities and the serialized representation of the HLL union'd in. */ - private static String toByteA(final HLL hll, final ISchemaVersion schemaVersion) { - final byte[] bytes = hll.toBytes(schemaVersion); - return ("\\x" + NumberUtil.toHex(bytes, 0, bytes.length)); - } - - /** - * Indicates what kind of test output a test will generate. - */ - private static enum TestType { - /** - * This type of test is characterized by values being added to an - * accumulator HLL whose serialized representation (after the value is added) - * is printed to each line along with the cardinality and added value. - */ - ADD, - /** - * This type of test is characterized by HLLs being unioned into an - * accumulator HLL whose serialized representation (after the HLL is - * union'd) is printed to each line along with the cardinalities and the - * serialized representation of the HLL union'd in. - */ - UNION; - } + UNION; + } } diff --git a/solr/core/src/test/org/apache/solr/util/hll/NumberUtilTest.java b/solr/core/src/test/org/apache/solr/util/hll/NumberUtilTest.java index 24cf7a08f10..7707857209c 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/NumberUtilTest.java +++ b/solr/core/src/test/org/apache/solr/util/hll/NumberUtilTest.java @@ -16,102 +16,119 @@ */ package org.apache.solr.util.hll; -import org.junit.Test; import static org.junit.Assert.*; import java.util.Arrays; +import org.junit.Test; -/** - * Tests {@link NumberUtil} - */ +/** Tests {@link NumberUtil} */ public class NumberUtilTest { - final static byte[] ALL_PRINTABLE_ASCII_CHARS = new byte[] { ' ', '!', '"', '#', '$', '%', '&', '\'', '(', ')', '*', - '+', ',', '-', '.', '/', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', '@', 'A', - 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', - 'Y', 'Z', '[', '\\', ']', '^', '_', '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', - 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~', '', }; - - final static byte[] ALL_WORD_CHARAC_ASCII_CHARS = new byte[] { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', - 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', - 'Y', 'Z' }; - - final static byte[] ALL_NUMBER_CHARAC_ASCII_CHARS = new byte[] { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9'}; - - final static byte[] ALL_LETTER_ASCII_CHARS = new byte[] { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', - 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z' }; - - final static String ALL_NUMBER_ASCII_CHARS_IN_HEX = "30313233343536373839"; - final static String ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX = "4142434445464748494A4B4C4D4E4F" + "505152535455565758595A"; - final static String ALL_LOW_LETTERS_ASCII_CHARS_IN_HEX = "6162636465666768696A6B6C6D6E6F" + "707172737475767778797A"; - - final static String ALL_PRINTABLE_ASCII_CHARS_IN_HEX = "202122232425262728292A2B2C2D2E2F" - + ALL_NUMBER_ASCII_CHARS_IN_HEX + "3A3B3C3D3E3F" + "40" + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX + "5B5C5D5E5F" + "60" - + ALL_LOW_LETTERS_ASCII_CHARS_IN_HEX + "7B7C7D7E7F"; - - /** - * Test {@link NumberUtil#log2(double)}. - */ + static final byte[] ALL_PRINTABLE_ASCII_CHARS = + new byte[] { + ' ', '!', '"', '#', '$', '%', '&', '\'', '(', ')', '*', '+', ',', '-', '.', '/', '0', '1', + '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', '@', 'A', 'B', 'C', + 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', + 'V', 'W', 'X', 'Y', 'Z', '[', '\\', ']', '^', '_', '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', + 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', + 'z', '{', '|', '}', '~', '', + }; + + static final byte[] ALL_WORD_CHARAC_ASCII_CHARS = + new byte[] { + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', + 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z' + }; + + static final byte[] ALL_NUMBER_CHARAC_ASCII_CHARS = + new byte[] {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'}; + + static final byte[] ALL_LETTER_ASCII_CHARS = + new byte[] { + 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', + 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z' + }; + + static final String ALL_NUMBER_ASCII_CHARS_IN_HEX = "30313233343536373839"; + static final String ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX = + "4142434445464748494A4B4C4D4E4F" + "505152535455565758595A"; + static final String ALL_LOW_LETTERS_ASCII_CHARS_IN_HEX = + "6162636465666768696A6B6C6D6E6F" + "707172737475767778797A"; + + static final String ALL_PRINTABLE_ASCII_CHARS_IN_HEX = + "202122232425262728292A2B2C2D2E2F" + + ALL_NUMBER_ASCII_CHARS_IN_HEX + + "3A3B3C3D3E3F" + + "40" + + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX + + "5B5C5D5E5F" + + "60" + + ALL_LOW_LETTERS_ASCII_CHARS_IN_HEX + + "7B7C7D7E7F"; + + /** Test {@link NumberUtil#log2(double)}. */ @Test public void testLog2() { final double log2Result = NumberUtil.log2(2d); assertTrue(log2Result == 1); } - /** - * Test {@link NumberUtil#toHex(byte[], int, int)} - */ + /** Test {@link NumberUtil#toHex(byte[], int, int)} */ @Test public void TestToHex() { - assertTrue(ALL_PRINTABLE_ASCII_CHARS_IN_HEX - .equals(NumberUtil.toHex(ALL_PRINTABLE_ASCII_CHARS, 0, ALL_PRINTABLE_ASCII_CHARS.length))); + assertTrue( + ALL_PRINTABLE_ASCII_CHARS_IN_HEX.equals( + NumberUtil.toHex(ALL_PRINTABLE_ASCII_CHARS, 0, ALL_PRINTABLE_ASCII_CHARS.length))); } - /** - * Test {@link NumberUtil#toHex(byte[], int, int)} - */ + /** Test {@link NumberUtil#toHex(byte[], int, int)} */ @Test public void TestToHexWithOffset() { - assertTrue(ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX - .equals(NumberUtil.toHex(ALL_WORD_CHARAC_ASCII_CHARS, 10, ALL_PRINTABLE_ASCII_CHARS.length))); + assertTrue( + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX.equals( + NumberUtil.toHex(ALL_WORD_CHARAC_ASCII_CHARS, 10, ALL_PRINTABLE_ASCII_CHARS.length))); } - /** - * Test {@link NumberUtil#toHex(byte[], int, int)} - */ + /** Test {@link NumberUtil#toHex(byte[], int, int)} */ @Test public void TestToHexWithCountt() { - assertTrue(ALL_NUMBER_ASCII_CHARS_IN_HEX.equals(NumberUtil.toHex(ALL_WORD_CHARAC_ASCII_CHARS, 0, 10))); + assertTrue( + ALL_NUMBER_ASCII_CHARS_IN_HEX.equals(NumberUtil.toHex(ALL_WORD_CHARAC_ASCII_CHARS, 0, 10))); } - /** - * Test {@link NumberUtil#fromHex(String, int, int)} - */ + /** Test {@link NumberUtil#fromHex(String, int, int)} */ @Test public void TestFromHex() { assertTrue( - Arrays - .equals( - NumberUtil.fromHex(ALL_NUMBER_ASCII_CHARS_IN_HEX + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX - + ALL_LOW_LETTERS_ASCII_CHARS_IN_HEX, 0, ALL_WORD_CHARAC_ASCII_CHARS.length * 2), - ALL_WORD_CHARAC_ASCII_CHARS)); + Arrays.equals( + NumberUtil.fromHex( + ALL_NUMBER_ASCII_CHARS_IN_HEX + + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX + + ALL_LOW_LETTERS_ASCII_CHARS_IN_HEX, + 0, + ALL_WORD_CHARAC_ASCII_CHARS.length * 2), + ALL_WORD_CHARAC_ASCII_CHARS)); } - /** - * Test {@link NumberUtil#fromHex(String, int, int)} - */ + /** Test {@link NumberUtil#fromHex(String, int, int)} */ @Test public void TestFromHexWithOffset() { - assertTrue(Arrays.equals(NumberUtil.fromHex(ALL_NUMBER_ASCII_CHARS_IN_HEX + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX, 20, - ALL_LETTER_ASCII_CHARS.length * 2), ALL_LETTER_ASCII_CHARS)); + assertTrue( + Arrays.equals( + NumberUtil.fromHex( + ALL_NUMBER_ASCII_CHARS_IN_HEX + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX, + 20, + ALL_LETTER_ASCII_CHARS.length * 2), + ALL_LETTER_ASCII_CHARS)); } - /** - * Test {@link NumberUtil#fromHex(String, int, int)} - */ + /** Test {@link NumberUtil#fromHex(String, int, int)} */ @Test public void TestFromHexWithCount() { - assertTrue(Arrays.equals(NumberUtil.fromHex(ALL_NUMBER_ASCII_CHARS_IN_HEX + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX, 0, - 20), ALL_NUMBER_CHARAC_ASCII_CHARS)); + assertTrue( + Arrays.equals( + NumberUtil.fromHex( + ALL_NUMBER_ASCII_CHARS_IN_HEX + ALL_MAJ_LETTERS_ASCII_CHARS_IN_HEX, 0, 20), + ALL_NUMBER_CHARAC_ASCII_CHARS)); } } diff --git a/solr/core/src/test/org/apache/solr/util/hll/ProbabilisticTestUtil.java b/solr/core/src/test/org/apache/solr/util/hll/ProbabilisticTestUtil.java index edfe5986063..3081a682b95 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/ProbabilisticTestUtil.java +++ b/solr/core/src/test/org/apache/solr/util/hll/ProbabilisticTestUtil.java @@ -17,59 +17,56 @@ package org.apache.solr.util.hll; /** - * A collection of test utilities for constructing input values to HLLs and for - * computing their serialized size. + * A collection of test utilities for constructing input values to HLLs and for computing their + * serialized size. */ public class ProbabilisticTestUtil { - /** - * Constructs a value that when added raw to a HLL will set the register at - * registerIndex to registerValue. - * - * @param log2m the log-base-2 of the number of registers in the HLL - * @param registerIndex the index of the register to set - * @param registerValue the value to set the register to - * @return the value - */ - public static long constructHLLValue(final int log2m, final int registerIndex, final int registerValue) { - final long partition = registerIndex; - final long substreamValue = (1L << (registerValue - 1)); - return (substreamValue << log2m) | partition; - } - - /** - * Extracts the HLL register index from a raw value. - */ - public static short getRegisterIndex(final long rawValue, final int log2m) { - final long mBitsMask = (1 << log2m) - 1; - final short j = (short)(rawValue & mBitsMask); - return j; - } + /** + * Constructs a value that when added raw to a HLL will set the register at registerIndex + * to registerValue. + * + * @param log2m the log-base-2 of the number of registers in the HLL + * @param registerIndex the index of the register to set + * @param registerValue the value to set the register to + * @return the value + */ + public static long constructHLLValue( + final int log2m, final int registerIndex, final int registerValue) { + final long partition = registerIndex; + final long substreamValue = (1L << (registerValue - 1)); + return (substreamValue << log2m) | partition; + } - /** - * Extracts the HLL register value from a raw value. - */ - public static byte getRegisterValue(final long rawValue, final int log2m) { - final long substreamValue = (rawValue >>> log2m); - final byte p_w; + /** Extracts the HLL register index from a raw value. */ + public static short getRegisterIndex(final long rawValue, final int log2m) { + final long mBitsMask = (1 << log2m) - 1; + final short j = (short) (rawValue & mBitsMask); + return j; + } - if (substreamValue == 0L) { - // The paper does not cover p(0x0), so the special value 0 is used. - // 0 is the original initialization value of the registers, so by - // doing this the HLL simply ignores it. This is acceptable - // because the probability is 1/(2^(2^registerSizeInBits)). - p_w = 0; - } else { - p_w = (byte)Math.min(1 + BitUtil.leastSignificantBit(substreamValue), 31); - } + /** Extracts the HLL register value from a raw value. */ + public static byte getRegisterValue(final long rawValue, final int log2m) { + final long substreamValue = (rawValue >>> log2m); + final byte p_w; - return p_w; + if (substreamValue == 0L) { + // The paper does not cover p(0x0), so the special value 0 is used. + // 0 is the original initialization value of the registers, so by + // doing this the HLL simply ignores it. This is acceptable + // because the probability is 1/(2^(2^registerSizeInBits)). + p_w = 0; + } else { + p_w = (byte) Math.min(1 + BitUtil.leastSignificantBit(substreamValue), 31); } - /** - * @return the number of bytes required to pack registerCount - * registers of width shortWordLength. - */ - public static int getRequiredBytes(final int shortWordLength, final int registerCount) { - return (int)Math.ceil((registerCount * shortWordLength)/(float)8); - } + return p_w; + } + + /** + * @return the number of bytes required to pack registerCount registers of width + * shortWordLength. + */ + public static int getRequiredBytes(final int shortWordLength, final int registerCount) { + return (int) Math.ceil((registerCount * shortWordLength) / (float) 8); + } } diff --git a/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java b/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java index a22038b0d09..ce65d484af8 100644 --- a/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java +++ b/solr/core/src/test/org/apache/solr/util/hll/SparseHLLTest.java @@ -16,437 +16,567 @@ */ package org.apache.solr.util.hll; -import org.apache.solr.SolrTestCase; -import org.junit.Test; - import com.carrotsearch.hppc.IntByteHashMap; import com.carrotsearch.hppc.cursors.IntByteCursor; import com.carrotsearch.randomizedtesting.RandomizedTest; +import org.apache.solr.SolrTestCase; +import org.junit.Test; -/** - * Tests {@link HLL} of type {@link HLLType#SPARSE}. - */ +/** Tests {@link HLL} of type {@link HLLType#SPARSE}. */ public class SparseHLLTest extends SolrTestCase { - private static final int log2m = 11; - - /** - * Tests {@link HLL#addRaw(long)}. - */ - @Test - public void addTest() { - { // insert an element with register value 1 (minimum set value) - final int registerIndex = 0; - final int registerValue = 1; - final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); - - final HLL hll = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - hll.addRaw(rawValue); - - assertOneRegisterSet(hll, registerIndex, (byte)registerValue); - } - { // insert an element with register value 31 (maximum set value) - final int registerIndex = 0; - final int registerValue = 31; - final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); - - final HLL hll = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - hll.addRaw(rawValue); - - assertOneRegisterSet(hll, registerIndex, (byte)registerValue); - } - { // insert an element that could overflow the register (past 31) - final int registerIndex = 0; - final int registerValue = 36; - final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); - - final HLL hll = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - hll.addRaw(rawValue); - - assertOneRegisterSet(hll, (short)registerIndex, (byte)31/*register max*/); - } - { // insert duplicate elements, observe no change - final int registerIndex = 0; - final int registerValue = 1; - final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); - - final HLL hll = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - hll.addRaw(rawValue); - hll.addRaw(rawValue); - - assertOneRegisterSet(hll, registerIndex, (byte)registerValue); - } - { // insert elements that increase a register's value - final int registerIndex = 0; - final int registerValue = 1; - final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); - - final HLL hll = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - hll.addRaw(rawValue); - - final int registerValue2 = 2; - final long rawValue2 = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue2); - hll.addRaw(rawValue2); - - assertOneRegisterSet(hll, registerIndex, (byte)registerValue2); - } - { // insert elements that have lower register values, observe no change - final int registerIndex = 0; - final int registerValue = 2; - final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); - - final HLL hll = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - hll.addRaw(rawValue); - - final int registerValue2 = 1; - final long rawValue2 = ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue2); - hll.addRaw(rawValue2); - - assertOneRegisterSet(hll, registerIndex, (byte)registerValue); - } + private static final int log2m = 11; + + /** Tests {@link HLL#addRaw(long)}. */ + @Test + public void addTest() { + { // insert an element with register value 1 (minimum set value) + final int registerIndex = 0; + final int registerValue = 1; + final long rawValue = + ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); + + final HLL hll = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary*/, + HLLType.SPARSE); + hll.addRaw(rawValue); + + assertOneRegisterSet(hll, registerIndex, (byte) registerValue); } - - /** - * Smoke test for {@link HLL#cardinality()} and the proper use of the small - * range correction. - */ - @Test - public void smallRangeSmokeTest() { - final int log2m = 11; - final int m = (1 << log2m); - final int regwidth = 5; - - // only one register set - { - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 0, 1)); - - final long cardinality = hll.cardinality(); - - // Trivially true that small correction conditions hold: one register - // set implies zeroes exist, and estimator trivially smaller than 5m/2. - // Small range correction: m * log(m/V) - final long expected = (long)Math.ceil(m * Math.log((double)m / (m - 1)/*# of zeroes*/)); - assertEquals(cardinality, expected); - } - - // all but one register set - { - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, 256/*sparseThreshold, arbitrary*/, HLLType.SPARSE); - for(int i=0; i<(m - 1); i++) { - hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, i, 1)); - } - - // Trivially true that small correction conditions hold: all but - // one register set implies a zero exists, and estimator trivially - // smaller than 5m/2 since it's alpha / ((m-1)/2) - final long cardinality = hll.cardinality(); - - // Small range correction: m * log(m/V) - final long expected = (long)Math.ceil(m * Math.log((double)m / 1/*# of zeroes*/)); - assertEquals(cardinality, expected); - } + { // insert an element with register value 31 (maximum set value) + final int registerIndex = 0; + final int registerValue = 31; + final long rawValue = + ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); + + final HLL hll = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary*/, + HLLType.SPARSE); + hll.addRaw(rawValue); + + assertOneRegisterSet(hll, registerIndex, (byte) registerValue); } - - /** - * Smoke test for {@link HLL#cardinality()} and the proper use of the - * uncorrected estimator. - */ - @Test - public void normalRangeSmokeTest() { - final int log2m = 11; - final int m = (1 << log2m); - final int regwidth = 5; - // regwidth = 5, so hash space is - // log2m + (2^5 - 1 - 1), so L = log2m + 30 - final int l = log2m + 30; - - // all registers at 'medium' value - { - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, m/*sparseThreshold*/, HLLType.SPARSE); - - final int registerValue = 7/*chosen to ensure neither correction kicks in*/; - for(int i=0; i (5 * m /(double)2)); - - final long expected = (long)Math.ceil(estimator); - assertEquals(cardinality, expected); - } + { // insert an element that could overflow the register (past 31) + final int registerIndex = 0; + final int registerValue = 36; + final long rawValue = + ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); + + final HLL hll = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary*/, + HLLType.SPARSE); + hll.addRaw(rawValue); + + assertOneRegisterSet(hll, (short) registerIndex, (byte) 31 /*register max*/); } - - /** - * Smoke test for {@link HLL#cardinality()} and the proper use of the large - * range correction. - */ - @Test - public void largeRangeSmokeTest() { - final int log2m = 11; - final int m = (1 << log2m); - final int regwidth = 5; - // regwidth = 5, so hash space is - // log2m + (2^5 - 1 - 1), so L = log2m + 30 - final int l = log2m + 30; - - // all registers at large value - { - final HLL hll = new HLL(log2m, regwidth, 128/*explicitThreshold, arbitrary, unused*/, m/*sparseThreshold*/, HLLType.SPARSE); - - final int registerValue = 31/*chosen to ensure large correction kicks in*/; - for(int i=0; i Math.pow(2, l)/30); - - // Large range correction: -2^32 * log(1 - E/2^32) - final long expected = (long)Math.ceil(-1.0 * Math.pow(2, l) * Math.log(1.0 - estimator/Math.pow(2, l))); - assertEquals(cardinality, expected); - } + { // insert duplicate elements, observe no change + final int registerIndex = 0; + final int registerValue = 1; + final long rawValue = + ProbabilisticTestUtil.constructHLLValue(log2m, registerIndex, registerValue); + + final HLL hll = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary*/, + HLLType.SPARSE); + hll.addRaw(rawValue); + hll.addRaw(rawValue); + + assertOneRegisterSet(hll, registerIndex, (byte) registerValue); } - - /** - * Tests {@link HLL#union(HLL)}. - */ - @Test - public void unionTest() { - final int log2m = 11/*arbitrary*/; - final int sparseThreshold = 256/*arbitrary*/; - - { // two empty multisets should union to an empty set - final HLL hllA = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - final HLL hllB = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - - hllA.union(hllB); - - assertEquals(hllA.getType(), HLLType.SPARSE/*unchanged*/); - assertEquals(hllA.cardinality(), 0L); - } - { // two disjoint multisets should union properly - final HLL hllA = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - hllA.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 1, 1)); - final HLL hllB = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - hllB.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 2, 1)); - - - hllA.union(hllB); - - assertEquals(hllA.getType(), HLLType.SPARSE/*unchanged*/); - assertEquals(hllA.cardinality(), 3L/*precomputed*/); - assertRegisterPresent(hllA, 1, (byte)1); - assertRegisterPresent(hllA, 2, (byte)1); - } - { // two exactly overlapping multisets should union properly - final HLL hllA = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - hllA.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 1, 10)); - final HLL hllB = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - hllB.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 1, 13)); - - hllA.union(hllB); - - assertEquals(hllA.getType(), HLLType.SPARSE/*unchanged*/); - assertEquals(hllA.cardinality(), 2L/*precomputed*/); - assertOneRegisterSet(hllA, 1, (byte)13/*max(10,13)*/); - } - { // overlapping multisets should union properly - final HLL hllA = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - final HLL hllB = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - // register index = 3 - final long rawValueA = ProbabilisticTestUtil.constructHLLValue(log2m, 3, 11); - - // register index = 4 - final long rawValueB = ProbabilisticTestUtil.constructHLLValue(log2m, 4, 13); - final long rawValueBPrime = ProbabilisticTestUtil.constructHLLValue(log2m, 4, 21); - - // register index = 5 - final long rawValueC = ProbabilisticTestUtil.constructHLLValue(log2m, 5, 14); - - hllA.addRaw(rawValueA); - hllA.addRaw(rawValueB); - - hllB.addRaw(rawValueBPrime); - hllB.addRaw(rawValueC); - - hllA.union(hllB); - // union should have three registers set, with partition B set to the - // max of the two registers - assertRegisterPresent(hllA, 3, (byte)11); - assertRegisterPresent(hllA, 4, (byte)21/*max(21,13)*/); - assertRegisterPresent(hllA, 5, (byte)14); - } - { // too-large unions should promote - final HLL hllA = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - final HLL hllB = new HLL(log2m, 5/*regwidth*/, 128/*explicitThreshold, arbitrary, unused*/, sparseThreshold, HLLType.SPARSE); - - // fill up sets to maxCapacity - for(int i=0; i (5 * m / (double) 2)); + + final long expected = (long) Math.ceil(estimator); + assertEquals(cardinality, expected); } - - /** - * Asserts that only the specified register is set and has the specified value. - */ - private static void assertOneRegisterSet(final HLL hll, - final int registerIndex, - final byte registerValue) { - final IntByteHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage; - assertEquals(sparseProbabilisticStorage.size(), 1); - assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue); + } + + /** Smoke test for {@link HLL#cardinality()} and the proper use of the large range correction. */ + @Test + public void largeRangeSmokeTest() { + final int log2m = 11; + final int m = (1 << log2m); + final int regwidth = 5; + // regwidth = 5, so hash space is + // log2m + (2^5 - 1 - 1), so L = log2m + 30 + final int l = log2m + 30; + + // all registers at large value + { + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + m /*sparseThreshold*/, + HLLType.SPARSE); + + final int registerValue = 31 /*chosen to ensure large correction kicks in*/; + for (int i = 0; i < m; i++) { + hll.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, i, registerValue)); + } + + final long cardinality = hll.cardinality(); + + // Simplified estimator when all registers take same value: alpha / (m/2^val) + final double estimator = HLLUtil.alphaMSquared(m) / ((double) m / Math.pow(2, registerValue)); + + // Assert conditions for large range + assertTrue(estimator > Math.pow(2, l) / 30); + + // Large range correction: -2^32 * log(1 - E/2^32) + final long expected = + (long) Math.ceil(-1.0 * Math.pow(2, l) * Math.log(1.0 - estimator / Math.pow(2, l))); + assertEquals(cardinality, expected); } - - /** - * Asserts that all registers in the two {@link HLL} instances are identical. - */ - private static void assertElementsEqual(final HLL hllA, final HLL hllB) { - final IntByteHashMap sparseProbabilisticStorageA = hllA.sparseProbabilisticStorage; - final IntByteHashMap sparseProbabilisticStorageB = hllB.sparseProbabilisticStorage; - assertEquals(sparseProbabilisticStorageA.size(), sparseProbabilisticStorageB.size()); - for (IntByteCursor c : sparseProbabilisticStorageA) { - assertEquals(sparseProbabilisticStorageA.get(c.key), - sparseProbabilisticStorageB.get(c.key)); + } + + /** Tests {@link HLL#union(HLL)}. */ + @Test + public void unionTest() { + final int log2m = 11 /*arbitrary*/; + final int sparseThreshold = 256 /*arbitrary*/; + + { // two empty multisets should union to an empty set + final HLL hllA = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + final HLL hllB = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + + hllA.union(hllB); + + assertEquals(hllA.getType(), HLLType.SPARSE /*unchanged*/); + assertEquals(hllA.cardinality(), 0L); + } + { // two disjoint multisets should union properly + final HLL hllA = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + hllA.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 1, 1)); + final HLL hllB = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + hllB.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 2, 1)); + + hllA.union(hllB); + + assertEquals(hllA.getType(), HLLType.SPARSE /*unchanged*/); + assertEquals(hllA.cardinality(), 3L /*precomputed*/); + assertRegisterPresent(hllA, 1, (byte) 1); + assertRegisterPresent(hllA, 2, (byte) 1); + } + { // two exactly overlapping multisets should union properly + final HLL hllA = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + hllA.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 1, 10)); + final HLL hllB = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + hllB.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, 1, 13)); + + hllA.union(hllB); + + assertEquals(hllA.getType(), HLLType.SPARSE /*unchanged*/); + assertEquals(hllA.cardinality(), 2L /*precomputed*/); + assertOneRegisterSet(hllA, 1, (byte) 13 /*max(10,13)*/); + } + { // overlapping multisets should union properly + final HLL hllA = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + final HLL hllB = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + // register index = 3 + final long rawValueA = ProbabilisticTestUtil.constructHLLValue(log2m, 3, 11); + + // register index = 4 + final long rawValueB = ProbabilisticTestUtil.constructHLLValue(log2m, 4, 13); + final long rawValueBPrime = ProbabilisticTestUtil.constructHLLValue(log2m, 4, 21); + + // register index = 5 + final long rawValueC = ProbabilisticTestUtil.constructHLLValue(log2m, 5, 14); + + hllA.addRaw(rawValueA); + hllA.addRaw(rawValueB); + + hllB.addRaw(rawValueBPrime); + hllB.addRaw(rawValueC); + + hllA.union(hllB); + // union should have three registers set, with partition B set to the + // max of the two registers + assertRegisterPresent(hllA, 3, (byte) 11); + assertRegisterPresent(hllA, 4, (byte) 21 /*max(21,13)*/); + assertRegisterPresent(hllA, 5, (byte) 14); + } + { // too-large unions should promote + final HLL hllA = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + final HLL hllB = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + + // fill up sets to maxCapacity + for (int i = 0; i < sparseThreshold; i++) { + hllA.addRaw(ProbabilisticTestUtil.constructHLLValue(log2m, i, 1)); + hllB.addRaw( + ProbabilisticTestUtil.constructHLLValue( + log2m, (i + sparseThreshold) /*non-overlapping*/, 1)); + } + + hllA.union(hllB); + + assertEquals(hllA.getType(), HLLType.FULL); + } + } + + /** Tests {@link HLL#clear()}. */ + @Test + public void clearTest() { + final HLL hll = + new HLL( + log2m, + 5 /*regwidth*/, + 128 /*explicitThreshold, arbitrary, unused*/, + 256 /*sparseThreshold, arbitrary, unused*/, + HLLType.SPARSE); + hll.addRaw(1L); + hll.clear(); + assertEquals(hll.cardinality(), 0L); + } + + /** Tests {@link HLL#toBytes(ISchemaVersion)} and {@link HLL#fromBytes(byte[])}. */ + @Test + public void toFromBytesTest() { + final int log2m = 11 /*arbitrary*/; + final int regwidth = 5 /*arbitrary*/; + final int sparseThreshold = 256 /*arbitrary*/; + final int shortWordLength = 16 /*log2m + regwidth = 11 + 5*/; + + final ISchemaVersion schemaVersion = SerializationUtil.DEFAULT_SCHEMA_VERSION; + final HLLType type = HLLType.SPARSE; + final int padding = schemaVersion.paddingBytes(type); + + { // Should work on an empty element + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + final byte[] bytes = hll.toBytes(schemaVersion); + + // output should just be padding since no registers are used + assertEquals(bytes.length, padding); + + final HLL inHLL = HLL.fromBytes(bytes); + + // assert register values correct + assertElementsEqual(hll, inHLL); + } + { // Should work on a partially filled element + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + + for (int i = 0; i < 3; i++) { + final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, i, (i + 9)); + hll.addRaw(rawValue); + } + + final byte[] bytes = hll.toBytes(schemaVersion); + + assertEquals( + bytes.length, + padding + ProbabilisticTestUtil.getRequiredBytes(shortWordLength, 3 /*registerCount*/)); + + final HLL inHLL = HLL.fromBytes(bytes); + + // assert register values correct + assertElementsEqual(hll, inHLL); + } + { // Should work on a full set + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + + for (int i = 0; i < sparseThreshold; i++) { + final long rawValue = ProbabilisticTestUtil.constructHLLValue(log2m, i, (i % 9) + 1); + hll.addRaw(rawValue); + } + + final byte[] bytes = hll.toBytes(schemaVersion); + + // 'short words' should be 12 bits + 5 bits = 17 bits long + assertEquals( + bytes.length, + padding + ProbabilisticTestUtil.getRequiredBytes(shortWordLength, sparseThreshold)); + + final HLL inHLL = HLL.fromBytes(bytes); + + // assert register values correct + assertElementsEqual(hll, inHLL); + } + } + + /** Smoke tests the multisets by adding random values. */ + @Test + public void randomValuesTest() { + final int log2m = 11 /*arbitrary*/; + final int regwidth = 5 /*arbitrary*/; + final int sparseThreshold = 256 /*arbitrary*/; + + for (int run = 0; run < 100; run++) { + final HLL hll = + new HLL( + log2m, + regwidth, + 128 /*explicitThreshold, arbitrary, unused*/, + sparseThreshold, + HLLType.SPARSE); + + final IntByteHashMap map = new IntByteHashMap(); + + for (int i = 0; i < sparseThreshold; i++) { + final long rawValue = RandomizedTest.randomLong(); + + final short registerIndex = ProbabilisticTestUtil.getRegisterIndex(rawValue, log2m); + final byte registerValue = ProbabilisticTestUtil.getRegisterValue(rawValue, log2m); + if (map.get(registerIndex) < registerValue) { + map.put(registerIndex, registerValue); } + + hll.addRaw(rawValue); + } + + for (IntByteCursor c : map) { + final byte expectedRegisterValue = map.get(c.key); + assertRegisterPresent(hll, c.key, expectedRegisterValue); + } + } + } + + // ************************************************************************* + // assertion helpers + /** Asserts that the register at the specified index is set to the specified value. */ + private static void assertRegisterPresent( + final HLL hll, final int registerIndex, final int registerValue) { + final IntByteHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage; + assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue); + } + + /** Asserts that only the specified register is set and has the specified value. */ + private static void assertOneRegisterSet( + final HLL hll, final int registerIndex, final byte registerValue) { + final IntByteHashMap sparseProbabilisticStorage = hll.sparseProbabilisticStorage; + assertEquals(sparseProbabilisticStorage.size(), 1); + assertEquals(sparseProbabilisticStorage.get(registerIndex), registerValue); + } + + /** Asserts that all registers in the two {@link HLL} instances are identical. */ + private static void assertElementsEqual(final HLL hllA, final HLL hllB) { + final IntByteHashMap sparseProbabilisticStorageA = hllA.sparseProbabilisticStorage; + final IntByteHashMap sparseProbabilisticStorageB = hllB.sparseProbabilisticStorage; + assertEquals(sparseProbabilisticStorageA.size(), sparseProbabilisticStorageB.size()); + for (IntByteCursor c : sparseProbabilisticStorageA) { + assertEquals(sparseProbabilisticStorageA.get(c.key), sparseProbabilisticStorageB.get(c.key)); } + } } diff --git a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java index 68854995dff..51dd2c6dc7f 100644 --- a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java +++ b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java @@ -17,11 +17,6 @@ package org.apache.solr.util.stats; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; import com.codahale.metrics.Histogram; @@ -30,6 +25,10 @@ import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Snapshot; import com.codahale.metrics.Timer; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.MapWriter; import org.apache.solr.common.util.NamedList; @@ -49,10 +48,17 @@ public void testSolrTimerGetSnapshot() { timer.update(random().nextInt(Integer.MAX_VALUE - 1) + 1, TimeUnit.NANOSECONDS); } // obtain timer metrics - Map map = new HashMap<>(); - MetricUtils.convertTimer("", timer, MetricUtils.ALL_PROPERTIES, false, false, ".", (k, v) -> { - ((MapWriter) v).toMap(map); - }); + Map map = new HashMap<>(); + MetricUtils.convertTimer( + "", + timer, + MetricUtils.ALL_PROPERTIES, + false, + false, + ".", + (k, v) -> { + ((MapWriter) v).toMap(map); + }); NamedList lst = new NamedList<>(map); // check that expected metrics were obtained assertEquals(14, lst.size()); @@ -102,69 +108,93 @@ public void testMetrics() throws Exception { Gauge gauge = () -> "foobar"; registry.register("gauge", gauge); - Gauge error = () -> {throw new InternalError("Memory Pool not found error");}; + Gauge error = + () -> { + throw new InternalError("Memory Pool not found error"); + }; registry.register("memory.expected.error", error); - MetricsMap metricsMapWithMap = new MetricsMap((detailed, map) -> { - map.put("foo", "bar"); - }); + MetricsMap metricsMapWithMap = + new MetricsMap( + (detailed, map) -> { + map.put("foo", "bar"); + }); registry.register("mapWithMap", metricsMapWithMap); - MetricsMap metricsMap = new MetricsMap(map -> { - map.putNoEx("foo", "bar"); - }); + MetricsMap metricsMap = + new MetricsMap( + map -> { + map.putNoEx("foo", "bar"); + }); registry.register("map", metricsMap); - SolrMetricManager.GaugeWrapper> gaugeWrapper = new SolrMetricManager.GaugeWrapper<>(metricsMap, "foo-tag"); + SolrMetricManager.GaugeWrapper> gaugeWrapper = + new SolrMetricManager.GaugeWrapper<>(metricsMap, "foo-tag"); registry.register("wrappedGauge", gaugeWrapper); - MetricUtils.toMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL, - MetricUtils.ALL_PROPERTIES, false, false, false, false, (k, o) -> { - Map v = new HashMap<>(); - if (o != null) { - ((MapWriter) o).toMap(v); - } - if (k.startsWith("counter")) { - assertEquals(1L, v.get("count")); - } else if (k.startsWith("gauge")) { - assertEquals("foobar", v.get("value")); - } else if (k.startsWith("timer")) { - assertEquals(1L, v.get("count")); - assertTrue(((Number)v.get("min_ms")).intValue() > 100); - } else if (k.startsWith("meter")) { - assertEquals(1L, v.get("count")); - } else if (k.startsWith("histogram")) { - assertEquals(1L, v.get("count")); - } else if (k.startsWith("aggregate1")) { - assertEquals(4, v.get("count")); - Map values = (Map)v.get("values"); - assertNotNull(values); - assertEquals(4, values.size()); - Map update = (Map)values.get("a"); - assertEquals(-10, update.get("value")); - assertEquals(1, update.get("updateCount")); - update = (Map)values.get("b"); - assertEquals(-2, update.get("value")); - assertEquals(2, update.get("updateCount")); - assertEquals(-10D, v.get("min")); - assertEquals(-2D, v.get("max")); - assertEquals(-5D, v.get("mean")); - } else if (k.startsWith("aggregate2")) { - // SOLR-14252: non-Number metric aggregations should return 0 rather than throwing NPE - assertEquals(2, v.get("count")); - assertEquals(0D, v.get("min")); - assertEquals(0D, v.get("max")); - assertEquals(0D, v.get("mean")); - } else if (k.startsWith("memory.expected.error")) { - assertTrue(v.isEmpty()); - } else if (k.startsWith("map") || k.startsWith("wrapped")) { - assertNotNull(v.toString(), v.get("value")); - assertTrue(v.toString(), v.get("value") instanceof Map); - assertEquals(v.toString(), "bar", ((Map) v.get("value")).get("foo")); - } - }); + MetricUtils.toMaps( + registry, + Collections.singletonList(MetricFilter.ALL), + MetricFilter.ALL, + MetricUtils.ALL_PROPERTIES, + false, + false, + false, + false, + (k, o) -> { + Map v = new HashMap<>(); + if (o != null) { + ((MapWriter) o).toMap(v); + } + if (k.startsWith("counter")) { + assertEquals(1L, v.get("count")); + } else if (k.startsWith("gauge")) { + assertEquals("foobar", v.get("value")); + } else if (k.startsWith("timer")) { + assertEquals(1L, v.get("count")); + assertTrue(((Number) v.get("min_ms")).intValue() > 100); + } else if (k.startsWith("meter")) { + assertEquals(1L, v.get("count")); + } else if (k.startsWith("histogram")) { + assertEquals(1L, v.get("count")); + } else if (k.startsWith("aggregate1")) { + assertEquals(4, v.get("count")); + Map values = (Map) v.get("values"); + assertNotNull(values); + assertEquals(4, values.size()); + Map update = (Map) values.get("a"); + assertEquals(-10, update.get("value")); + assertEquals(1, update.get("updateCount")); + update = (Map) values.get("b"); + assertEquals(-2, update.get("value")); + assertEquals(2, update.get("updateCount")); + assertEquals(-10D, v.get("min")); + assertEquals(-2D, v.get("max")); + assertEquals(-5D, v.get("mean")); + } else if (k.startsWith("aggregate2")) { + // SOLR-14252: non-Number metric aggregations should return 0 rather than throwing NPE + assertEquals(2, v.get("count")); + assertEquals(0D, v.get("min")); + assertEquals(0D, v.get("max")); + assertEquals(0D, v.get("mean")); + } else if (k.startsWith("memory.expected.error")) { + assertTrue(v.isEmpty()); + } else if (k.startsWith("map") || k.startsWith("wrapped")) { + assertNotNull(v.toString(), v.get("value")); + assertTrue(v.toString(), v.get("value") instanceof Map); + assertEquals(v.toString(), "bar", ((Map) v.get("value")).get("foo")); + } + }); // test compact format - MetricUtils.toMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL, - MetricUtils.ALL_PROPERTIES, false, false, true, false, (k, o) -> { + MetricUtils.toMaps( + registry, + Collections.singletonList(MetricFilter.ALL), + MetricFilter.ALL, + MetricUtils.ALL_PROPERTIES, + false, + false, + true, + false, + (k, o) -> { if (k.startsWith("counter")) { assertTrue(o instanceof Long); assertEquals(1L, o); @@ -176,7 +206,7 @@ public void testMetrics() throws Exception { Map v = new HashMap<>(); ((MapWriter) o).toMap(v); assertEquals(1L, v.get("count")); - assertTrue(((Number)v.get("min_ms")).intValue() > 100); + assertTrue(((Number) v.get("min_ms")).intValue() > 100); } else if (k.startsWith("meter")) { assertTrue(o instanceof MapWriter); Map v = new HashMap<>(); @@ -192,13 +222,13 @@ public void testMetrics() throws Exception { Map v = new HashMap<>(); ((MapWriter) o).toMap(v); assertEquals(4, v.get("count")); - Map values = (Map)v.get("values"); + Map values = (Map) v.get("values"); assertNotNull(values); assertEquals(4, values.size()); - Map update = (Map)values.get("a"); + Map update = (Map) values.get("a"); assertEquals(-10, update.get("value")); assertEquals(1, update.get("updateCount")); - update = (Map)values.get("b"); + update = (Map) values.get("b"); assertEquals(-2, update.get("value")); assertEquals(2, update.get("updateCount")); } else if (k.startsWith("aggregate2")) { @@ -206,13 +236,13 @@ public void testMetrics() throws Exception { Map v = new HashMap<>(); ((MapWriter) o).toMap(v); assertEquals(2, v.get("count")); - Map values = (Map)v.get("values"); + Map values = (Map) v.get("values"); assertNotNull(values); assertEquals(2, values.size()); - Map update = (Map)values.get("a"); + Map update = (Map) values.get("a"); assertEquals(false, update.get("value")); assertEquals(1, update.get("updateCount")); - update = (Map)values.get("b"); + update = (Map) values.get("b"); assertEquals(true, update.get("value")); assertEquals(1, update.get("updateCount")); } else if (k.startsWith("memory.expected.error")) { @@ -229,8 +259,5 @@ public void testMetrics() throws Exception { assertEquals(1L, v.get("count")); } }); - } - } - diff --git a/solr/core/src/test/org/apache/solr/util/tracing/TestDistributedTracing.java b/solr/core/src/test/org/apache/solr/util/tracing/TestDistributedTracing.java index 0e9a9a60d97..624c9d162ef 100644 --- a/solr/core/src/test/org/apache/solr/util/tracing/TestDistributedTracing.java +++ b/solr/core/src/test/org/apache/solr/util/tracing/TestDistributedTracing.java @@ -17,15 +17,14 @@ package org.apache.solr.util.tracing; +import io.opentracing.mock.MockSpan; +import io.opentracing.mock.MockTracer; +import io.opentracing.util.GlobalTracer; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; - -import io.opentracing.mock.MockSpan; -import io.opentracing.mock.MockTracer; -import io.opentracing.util.GlobalTracer; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; @@ -55,10 +54,10 @@ public static void beforeTest() throws Exception { assertTrue(GlobalTracer.registerIfAbsent(tracer)); configureCluster(4) - .addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) + .addConfig( + "config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf")) .configure(); - CollectionAdminRequest - .createCollection(COLLECTION, "config", 2, 2) + CollectionAdminRequest.createCollection(COLLECTION, "config", 2, 2) .setPerReplicaState(SolrCloudTestCase.USE_PER_REPLICA_STATE) .process(cluster.getSolrClient()); cluster.waitForActiveCollection(COLLECTION, 2, 4); @@ -70,7 +69,8 @@ public static void afterTest() { } @Test - public void test() throws IOException, SolrServerException, TimeoutException, InterruptedException { + public void test() + throws IOException, SolrServerException, TimeoutException, InterruptedException { // TODO it would be clearer if we could compare the complete Span tree between reality // and what we assert it looks like in a structured visual way. @@ -80,8 +80,7 @@ public void test() throws IOException, SolrServerException, TimeoutException, In // Indexing cloudClient.add(COLLECTION, sdoc("id", "1")); finishedSpans = getAndClearSpans(); - finishedSpans.removeIf(x -> - !x.tags().get("http.url").toString().endsWith("/update")); + finishedSpans.removeIf(x -> !x.tags().get("http.url").toString().endsWith("/update")); assertEquals(2, finishedSpans.size()); assertOneSpanIsChildOfAnother(finishedSpans); // core because cloudClient routes to core @@ -97,18 +96,20 @@ public void test() throws IOException, SolrServerException, TimeoutException, In // Searching cloudClient.query(COLLECTION, new SolrQuery("*:*")); finishedSpans = getAndClearSpans(); - finishedSpans.removeIf(x -> - !x.tags().get("http.url").toString().endsWith("/select")); + finishedSpans.removeIf(x -> !x.tags().get("http.url").toString().endsWith("/select")); // one from client to server, 2 for execute query, 2 for fetching documents assertEquals(5, finishedSpans.size()); assertEquals(1, finishedSpans.stream().filter(s -> s.parentId() == 0).count()); - long parentId = finishedSpans.stream() - .filter(s -> s.parentId() == 0) - .collect(Collectors.toList()) - .get(0).context().spanId(); - for (MockSpan span: finishedSpans) { + long parentId = + finishedSpans.stream() + .filter(s -> s.parentId() == 0) + .collect(Collectors.toList()) + .get(0) + .context() + .spanId(); + for (MockSpan span : finishedSpans) { if (span.parentId() != 0 && parentId != span.parentId()) { - fail("All spans must belong to single span, but:"+finishedSpans); + fail("All spans must belong to single span, but:" + finishedSpans); } } assertEquals("get:/{collection}/select", finishedSpans.get(0).operationName()); @@ -137,9 +138,7 @@ public void testV2Api() throws Exception { new V2Request.Builder("/c/" + COLLECTION) .withMethod(SolrRequest.METHOD.POST) - .withPayload("{\n" + - " \"reload\" : {}\n" + - "}") + .withPayload("{\n" + " \"reload\" : {}\n" + "}") .build() .process(cloudClient); finishedSpans = getAndClearSpans(); @@ -148,9 +147,7 @@ public void testV2Api() throws Exception { new V2Request.Builder("/c/" + COLLECTION + "/update/json") .withMethod(SolrRequest.METHOD.POST) - .withPayload("{\n" + - " \"id\" : \"9\"\n" + - "}") + .withPayload("{\n" + " \"id\" : \"9\"\n" + "}") .withParams(params("commit", "true")) .build() .process(cloudClient); @@ -158,15 +155,16 @@ public void testV2Api() throws Exception { assertEquals("post:/c/{collection}/update/json", finishedSpans.get(0).operationName()); assertDbInstanceColl(finishedSpans.get(0)); - final V2Response v2Response = new V2Request.Builder("/c/" + COLLECTION + "/select") - .withMethod(SolrRequest.METHOD.GET) - .withParams(params("q", "id:9")) - .build() - .process(cloudClient); + final V2Response v2Response = + new V2Request.Builder("/c/" + COLLECTION + "/select") + .withMethod(SolrRequest.METHOD.GET) + .withParams(params("q", "id:9")) + .build() + .process(cloudClient); finishedSpans = getAndClearSpans(); assertEquals("get:/c/{collection}/select", finishedSpans.get(0).operationName()); assertDbInstanceColl(finishedSpans.get(0)); - assertEquals(1, ((SolrDocumentList)v2Response.getResponse().get("response")).getNumFound()); + assertEquals(1, ((SolrDocumentList) v2Response.getResponse().get("response")).getNumFound()); } private void assertDbInstanceColl(MockSpan mockSpan) { diff --git a/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java b/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java index 50d88d83215..dfbb84312f9 100644 --- a/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java +++ b/solr/core/src/test/org/apache/solr/util/tracing/TestHttpServletCarrier.java @@ -17,23 +17,21 @@ package org.apache.solr.util.tracing; -import javax.servlet.http.HttpServletRequest; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimap; import java.util.Enumeration; import java.util.Iterator; import java.util.Map; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; +import javax.servlet.http.HttpServletRequest; import org.apache.commons.collections4.IteratorUtils; import org.apache.solr.SolrTestCaseJ4; import org.junit.Test; import org.mockito.stubbing.Answer; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - public class TestHttpServletCarrier extends SolrTestCaseJ4 { @Test @@ -49,20 +47,21 @@ public void test() { headers.put("c", "a"); when(req.getHeaderNames()).thenReturn(IteratorUtils.asEnumeration(headers.keySet().iterator())); - when(req.getHeaders(anyString())).thenAnswer((Answer>) inv -> { - String key = inv.getArgument(0); - return IteratorUtils.asEnumeration(headers.get(key).iterator()); - }); + when(req.getHeaders(anyString())) + .thenAnswer( + (Answer>) + inv -> { + String key = inv.getArgument(0); + return IteratorUtils.asEnumeration(headers.get(key).iterator()); + }); HttpServletCarrier servletCarrier = new HttpServletCarrier(req); Iterator> it = servletCarrier.iterator(); Multimap resultBack = HashMultimap.create(); - while(it.hasNext()) { + while (it.hasNext()) { Map.Entry entry = it.next(); resultBack.put(entry.getKey(), entry.getValue()); } assertEquals(headers, resultBack); - - } }